Restore building with chronicles_sinks=json

This commit is contained in:
Zahary Karadjov 2019-06-24 18:13:48 +03:00 committed by zah
parent 7d174b548a
commit 31baa77742
6 changed files with 29 additions and 26 deletions

View File

@ -99,8 +99,8 @@ proc init*(T: type BeaconNode, conf: BeaconNodeConf): Future[BeaconNode] {.async
else:
try:
result.networkMetadata = Json.loadFile(conf.network, NetworkMetadata)
except:
fail "Failed to load network metadata: ", getCurrentExceptionMsg()
except SerializationError as err:
fail "Failed to load network metadata: \n", err.formatMsg(conf.network)
var metadataErrorMsg = ""

View File

@ -144,18 +144,6 @@ else:
const
networkKeyFilename = "privkey.protobuf"
proc writeValue*(writer: var JsonWriter, value: PeerID) {.inline.} =
writer.writeValue value.pretty
proc readValue*(reader: var JsonReader, value: var PeerID) {.inline.} =
value = PeerID.init reader.readValue(string)
proc writeValue*(writer: var JsonWriter, value: MultiAddress) {.inline.} =
writer.writeValue $value
proc readValue*(reader: var JsonReader, value: var MultiAddress) {.inline.} =
value = MultiAddress.init reader.readValue(string)
proc init*(T: type BootstrapAddr, str: string): T =
Json.decode(str, PeerInfo)

View File

@ -1,12 +1,12 @@
import
options, macros, algorithm, tables,
macros, algorithm, tables,
std_shims/[macros_shim, tables_shims], chronos, chronicles,
libp2p/daemon/daemonapi, faststreams/output_stream, serialization,
eth/p2p/p2p_protocol_dsl,
ssz
json_serialization/std/options, eth/p2p/p2p_protocol_dsl,
libp2p_json_serialization, ssz
export
daemonapi, p2pProtocol
daemonapi, p2pProtocol, libp2p_json_serialization
type
Eth2Node* = ref object of RootObj
@ -506,7 +506,7 @@ proc p2pProtocolBackendImpl*(p: P2PProtocol): Backend =
try:
debug "INCOMING CONNECTION", `peerVar`
`await` `handshakeProcName`(`peerVar`, `streamVar`)
debug "HANSHAKE COMPLETED", `peerVar`
debug "HANDSHAKE COMPLETED", `peerVar`
except SerializationError as err:
debug "Failed to decode message",
err = err.formatMsg("<msg>"),

View File

@ -0,0 +1,15 @@
import libp2p/daemon/daemonapi, json_serialization
export json_serialization
proc writeValue*(writer: var JsonWriter, value: PeerID) {.inline.} =
writer.writeValue value.pretty
proc readValue*(reader: var JsonReader, value: var PeerID) {.inline.} =
value = PeerID.init reader.readValue(string)
proc writeValue*(writer: var JsonWriter, value: MultiAddress) {.inline.} =
writer.writeValue $value
proc readValue*(reader: var JsonReader, value: var MultiAddress) {.inline.} =
value = MultiAddress.init reader.readValue(string)

View File

@ -2,10 +2,10 @@ import
tables, deques, options, algorithm, std_shims/[macros_shim, tables_shims],
ranges/ptr_arith, chronos, chronicles, serialization, faststreams/input_stream,
eth/async_utils, eth/p2p/p2p_protocol_dsl, libp2p/daemon/daemonapi,
ssz
libp2p_json_serialization, ssz
export
daemonapi, p2pProtocol, serialization, ssz
daemonapi, p2pProtocol, serialization, ssz, libp2p_json_serialization
const
# Compression nibble

View File

@ -71,12 +71,12 @@ suite "Simple serialization":
ser.len() == 3
SSZ.decode(ser, v.type) == v
SSZ.roundripTest [1, 2, 3]
SSZ.roundripTest @[1, 2, 3]
SSZ.roundripTest SigKey.random().getKey()
SSZ.roundripTest BeaconBlock(
SSZ.roundtripTest [1, 2, 3]
SSZ.roundtripTest @[1, 2, 3]
SSZ.roundtripTest SigKey.random().getKey()
SSZ.roundtripTest BeaconBlock(
slot: 42.Slot, signature: sign(SigKey.random(), 0'u64, ""))
SSZ.roundripTest BeaconState(slot: 42.Slot)
SSZ.roundtripTest BeaconState(slot: 42.Slot)
# suite "Tree hashing":
# # TODO The test values are taken from an earlier version of SSZ and have