Restore building with chronicles_sinks=json
This commit is contained in:
parent
7d174b548a
commit
31baa77742
|
@ -99,8 +99,8 @@ proc init*(T: type BeaconNode, conf: BeaconNodeConf): Future[BeaconNode] {.async
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
result.networkMetadata = Json.loadFile(conf.network, NetworkMetadata)
|
result.networkMetadata = Json.loadFile(conf.network, NetworkMetadata)
|
||||||
except:
|
except SerializationError as err:
|
||||||
fail "Failed to load network metadata: ", getCurrentExceptionMsg()
|
fail "Failed to load network metadata: \n", err.formatMsg(conf.network)
|
||||||
|
|
||||||
var metadataErrorMsg = ""
|
var metadataErrorMsg = ""
|
||||||
|
|
||||||
|
|
|
@ -144,18 +144,6 @@ else:
|
||||||
const
|
const
|
||||||
networkKeyFilename = "privkey.protobuf"
|
networkKeyFilename = "privkey.protobuf"
|
||||||
|
|
||||||
proc writeValue*(writer: var JsonWriter, value: PeerID) {.inline.} =
|
|
||||||
writer.writeValue value.pretty
|
|
||||||
|
|
||||||
proc readValue*(reader: var JsonReader, value: var PeerID) {.inline.} =
|
|
||||||
value = PeerID.init reader.readValue(string)
|
|
||||||
|
|
||||||
proc writeValue*(writer: var JsonWriter, value: MultiAddress) {.inline.} =
|
|
||||||
writer.writeValue $value
|
|
||||||
|
|
||||||
proc readValue*(reader: var JsonReader, value: var MultiAddress) {.inline.} =
|
|
||||||
value = MultiAddress.init reader.readValue(string)
|
|
||||||
|
|
||||||
proc init*(T: type BootstrapAddr, str: string): T =
|
proc init*(T: type BootstrapAddr, str: string): T =
|
||||||
Json.decode(str, PeerInfo)
|
Json.decode(str, PeerInfo)
|
||||||
|
|
||||||
|
|
|
@ -1,12 +1,12 @@
|
||||||
import
|
import
|
||||||
options, macros, algorithm, tables,
|
macros, algorithm, tables,
|
||||||
std_shims/[macros_shim, tables_shims], chronos, chronicles,
|
std_shims/[macros_shim, tables_shims], chronos, chronicles,
|
||||||
libp2p/daemon/daemonapi, faststreams/output_stream, serialization,
|
libp2p/daemon/daemonapi, faststreams/output_stream, serialization,
|
||||||
eth/p2p/p2p_protocol_dsl,
|
json_serialization/std/options, eth/p2p/p2p_protocol_dsl,
|
||||||
ssz
|
libp2p_json_serialization, ssz
|
||||||
|
|
||||||
export
|
export
|
||||||
daemonapi, p2pProtocol
|
daemonapi, p2pProtocol, libp2p_json_serialization
|
||||||
|
|
||||||
type
|
type
|
||||||
Eth2Node* = ref object of RootObj
|
Eth2Node* = ref object of RootObj
|
||||||
|
@ -506,7 +506,7 @@ proc p2pProtocolBackendImpl*(p: P2PProtocol): Backend =
|
||||||
try:
|
try:
|
||||||
debug "INCOMING CONNECTION", `peerVar`
|
debug "INCOMING CONNECTION", `peerVar`
|
||||||
`await` `handshakeProcName`(`peerVar`, `streamVar`)
|
`await` `handshakeProcName`(`peerVar`, `streamVar`)
|
||||||
debug "HANSHAKE COMPLETED", `peerVar`
|
debug "HANDSHAKE COMPLETED", `peerVar`
|
||||||
except SerializationError as err:
|
except SerializationError as err:
|
||||||
debug "Failed to decode message",
|
debug "Failed to decode message",
|
||||||
err = err.formatMsg("<msg>"),
|
err = err.formatMsg("<msg>"),
|
||||||
|
|
|
@ -0,0 +1,15 @@
|
||||||
|
import libp2p/daemon/daemonapi, json_serialization
|
||||||
|
export json_serialization
|
||||||
|
|
||||||
|
proc writeValue*(writer: var JsonWriter, value: PeerID) {.inline.} =
|
||||||
|
writer.writeValue value.pretty
|
||||||
|
|
||||||
|
proc readValue*(reader: var JsonReader, value: var PeerID) {.inline.} =
|
||||||
|
value = PeerID.init reader.readValue(string)
|
||||||
|
|
||||||
|
proc writeValue*(writer: var JsonWriter, value: MultiAddress) {.inline.} =
|
||||||
|
writer.writeValue $value
|
||||||
|
|
||||||
|
proc readValue*(reader: var JsonReader, value: var MultiAddress) {.inline.} =
|
||||||
|
value = MultiAddress.init reader.readValue(string)
|
||||||
|
|
|
@ -2,10 +2,10 @@ import
|
||||||
tables, deques, options, algorithm, std_shims/[macros_shim, tables_shims],
|
tables, deques, options, algorithm, std_shims/[macros_shim, tables_shims],
|
||||||
ranges/ptr_arith, chronos, chronicles, serialization, faststreams/input_stream,
|
ranges/ptr_arith, chronos, chronicles, serialization, faststreams/input_stream,
|
||||||
eth/async_utils, eth/p2p/p2p_protocol_dsl, libp2p/daemon/daemonapi,
|
eth/async_utils, eth/p2p/p2p_protocol_dsl, libp2p/daemon/daemonapi,
|
||||||
ssz
|
libp2p_json_serialization, ssz
|
||||||
|
|
||||||
export
|
export
|
||||||
daemonapi, p2pProtocol, serialization, ssz
|
daemonapi, p2pProtocol, serialization, ssz, libp2p_json_serialization
|
||||||
|
|
||||||
const
|
const
|
||||||
# Compression nibble
|
# Compression nibble
|
||||||
|
|
|
@ -71,12 +71,12 @@ suite "Simple serialization":
|
||||||
ser.len() == 3
|
ser.len() == 3
|
||||||
SSZ.decode(ser, v.type) == v
|
SSZ.decode(ser, v.type) == v
|
||||||
|
|
||||||
SSZ.roundripTest [1, 2, 3]
|
SSZ.roundtripTest [1, 2, 3]
|
||||||
SSZ.roundripTest @[1, 2, 3]
|
SSZ.roundtripTest @[1, 2, 3]
|
||||||
SSZ.roundripTest SigKey.random().getKey()
|
SSZ.roundtripTest SigKey.random().getKey()
|
||||||
SSZ.roundripTest BeaconBlock(
|
SSZ.roundtripTest BeaconBlock(
|
||||||
slot: 42.Slot, signature: sign(SigKey.random(), 0'u64, ""))
|
slot: 42.Slot, signature: sign(SigKey.random(), 0'u64, ""))
|
||||||
SSZ.roundripTest BeaconState(slot: 42.Slot)
|
SSZ.roundtripTest BeaconState(slot: 42.Slot)
|
||||||
|
|
||||||
# suite "Tree hashing":
|
# suite "Tree hashing":
|
||||||
# # TODO The test values are taken from an earlier version of SSZ and have
|
# # TODO The test values are taken from an earlier version of SSZ and have
|
||||||
|
|
Loading…
Reference in New Issue