mirror of
https://github.com/status-im/nimbus-eth1.git
synced 2025-01-27 20:45:48 +00:00
Add new portal beacon chain lc content encoding tests (#1636)
This commit is contained in:
parent
d0e46d9075
commit
89b3e679e2
@ -33,21 +33,6 @@ type
|
|||||||
|
|
||||||
BlockDataTable* = Table[string, BlockData]
|
BlockDataTable* = Table[string, BlockData]
|
||||||
|
|
||||||
proc toString(v: IoErrorCode): string =
|
|
||||||
try: ioErrorMsg(v)
|
|
||||||
except Exception as e: raiseAssert e.msg
|
|
||||||
|
|
||||||
proc readJsonType*(dataFile: string, T: type): Result[T, string] =
|
|
||||||
let data = ? readAllFile(dataFile).mapErr(toString)
|
|
||||||
|
|
||||||
let decoded =
|
|
||||||
try:
|
|
||||||
Json.decode(data, T)
|
|
||||||
except SerializationError as e:
|
|
||||||
return err("Failed decoding json data-file: " & e.msg)
|
|
||||||
|
|
||||||
ok(decoded)
|
|
||||||
|
|
||||||
iterator blockHashes*(blockData: BlockDataTable): BlockHash =
|
iterator blockHashes*(blockData: BlockDataTable): BlockHash =
|
||||||
for k, v in blockData:
|
for k, v in blockData:
|
||||||
var blockHash: BlockHash
|
var blockHash: BlockHash
|
||||||
@ -189,6 +174,29 @@ proc getGenesisHeader*(id: NetworkId = MainNet): BlockHeader =
|
|||||||
except RlpError:
|
except RlpError:
|
||||||
raise (ref Defect)(msg: "Genesis should be valid")
|
raise (ref Defect)(msg: "Genesis should be valid")
|
||||||
|
|
||||||
|
# Reading JSON Portal content and content keys
|
||||||
|
|
||||||
|
type
|
||||||
|
JsonPortalContent* = object
|
||||||
|
content_key*: string
|
||||||
|
content_value*: string
|
||||||
|
|
||||||
|
JsonPortalContentTable* = OrderedTable[string, JsonPortalContent]
|
||||||
|
|
||||||
|
proc toString(v: IoErrorCode): string =
|
||||||
|
try: ioErrorMsg(v)
|
||||||
|
except Exception as e: raiseAssert e.msg
|
||||||
|
|
||||||
|
proc readJsonType*(dataFile: string, T: type): Result[T, string] =
|
||||||
|
let data = ? readAllFile(dataFile).mapErr(toString)
|
||||||
|
|
||||||
|
let decoded =
|
||||||
|
try:
|
||||||
|
Json.decode(data, T)
|
||||||
|
except SerializationError as e:
|
||||||
|
return err("Failed decoding json data-file: " & e.msg)
|
||||||
|
|
||||||
|
ok(decoded)
|
||||||
|
|
||||||
# Writing JSON history data
|
# Writing JSON history data
|
||||||
|
|
||||||
|
@ -51,12 +51,12 @@ type
|
|||||||
# this causes them also to be included in a request, which makes perhaps less
|
# this causes them also to be included in a request, which makes perhaps less
|
||||||
# sense?
|
# sense?
|
||||||
LightClientFinalityUpdateKey* = object
|
LightClientFinalityUpdateKey* = object
|
||||||
optimisticSlot: uint64 ## slot of attested header of the update
|
optimisticSlot*: uint64 ## slot of attested header of the update
|
||||||
finalizedSlot: uint64 ## slot of finalized header of the update
|
finalizedSlot*: uint64 ## slot of finalized header of the update
|
||||||
|
|
||||||
# TODO: Same remark as for `LightClientFinalityUpdateKey`
|
# TODO: Same remark as for `LightClientFinalityUpdateKey`
|
||||||
LightClientOptimisticUpdateKey* = object
|
LightClientOptimisticUpdateKey* = object
|
||||||
optimisticSlot: uint64 ## slot of attested header of the update
|
optimisticSlot*: uint64 ## slot of attested header of the update
|
||||||
|
|
||||||
ContentKey* = object
|
ContentKey* = object
|
||||||
case contentType*: ContentType
|
case contentType*: ContentType
|
||||||
|
@ -8,13 +8,187 @@
|
|||||||
{.used.}
|
{.used.}
|
||||||
|
|
||||||
import
|
import
|
||||||
unittest2,
|
unittest2, stew/byteutils, stew/io2, stew/results,
|
||||||
|
beacon_chain/networking/network_metadata,
|
||||||
beacon_chain/spec/forks,
|
beacon_chain/spec/forks,
|
||||||
beacon_chain/spec/datatypes/altair,
|
beacon_chain/spec/datatypes/altair,
|
||||||
|
../../eth_data/[history_data_ssz_e2s, history_data_json_store],
|
||||||
../../network/beacon_light_client/beacon_light_client_content,
|
../../network/beacon_light_client/beacon_light_client_content,
|
||||||
"."/[light_client_test_data, beacon_light_client_test_helpers]
|
"."/[light_client_test_data, beacon_light_client_test_helpers]
|
||||||
|
|
||||||
|
suite "Beacon Light Client Content Encodings - Mainnet":
|
||||||
|
# These test vectors are generated by eth_data_exporter. The content is taken
|
||||||
|
# from mainnet and encoded as it would be transmitted on Portal Network,
|
||||||
|
# including also the content key.
|
||||||
|
const testVectorDir =
|
||||||
|
"./vendor/portal-spec-tests/tests/mainnet/beacon_chain/light_client/"
|
||||||
|
|
||||||
|
let
|
||||||
|
metadata = getMetadataForNetwork("mainnet")
|
||||||
|
genesisState =
|
||||||
|
try:
|
||||||
|
template genesisData(): auto = metadata.genesisData
|
||||||
|
newClone(readSszForkedHashedBeaconState(
|
||||||
|
metadata.cfg,
|
||||||
|
genesisData.toOpenArray(genesisData.low, genesisData.high)))
|
||||||
|
except CatchableError as err:
|
||||||
|
raiseAssert "Invalid baked-in state: " & err.msg
|
||||||
|
genesis_validators_root =
|
||||||
|
getStateField(genesisState[], genesis_validators_root)
|
||||||
|
forkDigests = newClone ForkDigests.init(metadata.cfg, genesis_validators_root)
|
||||||
|
|
||||||
|
test "LightClientBootstrap":
|
||||||
|
const file = testVectorDir & "bootstrap.json"
|
||||||
|
let res = readJsonType(file, JsonPortalContentTable)
|
||||||
|
check res.isOk()
|
||||||
|
let content = res.get()
|
||||||
|
for k, v in content:
|
||||||
|
let
|
||||||
|
contentKeyEncoded = v.content_key.hexToSeqByte()
|
||||||
|
contentValueEncoded = v.content_value.hexToSeqByte()
|
||||||
|
|
||||||
|
# Decode content and content key
|
||||||
|
let
|
||||||
|
contentKey = decodeSsz(
|
||||||
|
contentKeyEncoded, ContentKey)
|
||||||
|
contentValue = decodeLightClientBootstrapForked(
|
||||||
|
forkDigests[], contentValueEncoded)
|
||||||
|
check:
|
||||||
|
contentKey.isOk()
|
||||||
|
contentValue.isOk()
|
||||||
|
|
||||||
|
let bootstrap = contentValue.value()
|
||||||
|
let key = contentKey.value()
|
||||||
|
|
||||||
|
withForkyObject(bootstrap):
|
||||||
|
when lcDataFork > LightClientDataFork.None:
|
||||||
|
let blockRoot = hash_tree_root(forkyObject.header.beacon)
|
||||||
|
check blockRoot == key.lightClientBootstrapKey.blockHash
|
||||||
|
|
||||||
|
# re-encode content and content key
|
||||||
|
let encoded = encodeForkedLightClientObject(
|
||||||
|
bootstrap, forkDigests.capella)
|
||||||
|
|
||||||
|
check encoded == contentValueEncoded
|
||||||
|
check encode(key).asSeq() == contentKeyEncoded
|
||||||
|
|
||||||
|
test "LightClientUpdates":
|
||||||
|
const file = testVectorDir & "updates.json"
|
||||||
|
let res = readJsonType(file, JsonPortalContentTable)
|
||||||
|
check res.isOk()
|
||||||
|
let content = res.get()
|
||||||
|
for k, v in content:
|
||||||
|
let
|
||||||
|
contentKeyEncoded = v.content_key.hexToSeqByte()
|
||||||
|
contentValueEncoded = v.content_value.hexToSeqByte()
|
||||||
|
|
||||||
|
# Decode content and content key
|
||||||
|
let
|
||||||
|
contentKey = decodeSsz(
|
||||||
|
contentKeyEncoded, ContentKey)
|
||||||
|
contentValue = decodeLightClientUpdatesByRange(
|
||||||
|
forkDigests[], contentValueEncoded)
|
||||||
|
check:
|
||||||
|
contentKey.isOk()
|
||||||
|
contentValue.isOk()
|
||||||
|
|
||||||
|
let updates = contentValue.value()
|
||||||
|
let key = contentKey.value()
|
||||||
|
|
||||||
|
check key.lightClientUpdateKey.count == uint64(updates.len())
|
||||||
|
|
||||||
|
for i, update in updates:
|
||||||
|
withForkyObject(update):
|
||||||
|
when lcDataFork > LightClientDataFork.None:
|
||||||
|
check forkyObject.finalized_header.beacon.slot div
|
||||||
|
(SLOTS_PER_EPOCH * EPOCHS_PER_SYNC_COMMITTEE_PERIOD) ==
|
||||||
|
key.lightClientUpdateKey.startPeriod + uint64(i)
|
||||||
|
|
||||||
|
# re-encode content and content key
|
||||||
|
let encoded = encodeLightClientUpdatesForked(
|
||||||
|
forkDigests.capella, updates.asSeq())
|
||||||
|
|
||||||
|
check encoded == contentValueEncoded
|
||||||
|
check encode(key).asSeq() == contentKeyEncoded
|
||||||
|
|
||||||
|
test "LightClientFinalityUpdate":
|
||||||
|
const file = testVectorDir & "finality_update.json"
|
||||||
|
let res = readJsonType(file, JsonPortalContentTable)
|
||||||
|
check res.isOk()
|
||||||
|
let content = res.get()
|
||||||
|
for k, v in content:
|
||||||
|
let
|
||||||
|
contentKeyEncoded = v.content_key.hexToSeqByte()
|
||||||
|
contentValueEncoded = v.content_value.hexToSeqByte()
|
||||||
|
|
||||||
|
# Decode content and content key
|
||||||
|
let
|
||||||
|
contentKey = decodeSsz(
|
||||||
|
contentKeyEncoded, ContentKey)
|
||||||
|
contentValue = decodeLightClientFinalityUpdateForked(
|
||||||
|
forkDigests[], contentValueEncoded)
|
||||||
|
|
||||||
|
check:
|
||||||
|
contentKey.isOk()
|
||||||
|
contentValue.isOk()
|
||||||
|
|
||||||
|
let update = contentValue.value()
|
||||||
|
let key = contentKey.value()
|
||||||
|
withForkyObject(update):
|
||||||
|
when lcDataFork > LightClientDataFork.None:
|
||||||
|
let attestedSlot = forkyObject.attested_header.beacon.slot
|
||||||
|
let finalizedSlot = forkyObject.finalized_header.beacon.slot
|
||||||
|
|
||||||
|
check:
|
||||||
|
attestedSlot == key.lightClientFinalityUpdateKey.optimisticSlot
|
||||||
|
finalizedSlot == key.lightClientFinalityUpdateKey.finalizedSlot
|
||||||
|
|
||||||
|
# re-encode content and content key
|
||||||
|
let encoded = encodeForkedLightClientObject(update, forkDigests.capella)
|
||||||
|
|
||||||
|
check encoded == contentValueEncoded
|
||||||
|
check encode(key).asSeq() == contentKeyEncoded
|
||||||
|
|
||||||
|
test "LightClientOptimisticUpdate":
|
||||||
|
const file = testVectorDir & "optimistic_update.json"
|
||||||
|
let res = readJsonType(file, JsonPortalContentTable)
|
||||||
|
check res.isOk()
|
||||||
|
let content = res.get()
|
||||||
|
for k, v in content:
|
||||||
|
let
|
||||||
|
contentKeyEncoded = v.content_key.hexToSeqByte()
|
||||||
|
contentValueEncoded = v.content_value.hexToSeqByte()
|
||||||
|
|
||||||
|
# Decode content and content key
|
||||||
|
let
|
||||||
|
contentKey = decodeSsz(
|
||||||
|
contentKeyEncoded, ContentKey)
|
||||||
|
contentValue = decodeLightClientOptimisticUpdateForked(
|
||||||
|
forkDigests[], contentValueEncoded)
|
||||||
|
|
||||||
|
check:
|
||||||
|
contentKey.isOk()
|
||||||
|
contentValue.isOk()
|
||||||
|
|
||||||
|
let update = contentValue.value()
|
||||||
|
let key = contentKey.value()
|
||||||
|
withForkyObject(update):
|
||||||
|
when lcDataFork > LightClientDataFork.None:
|
||||||
|
let attestedSlot = forkyObject.attested_header.beacon.slot
|
||||||
|
|
||||||
|
check:
|
||||||
|
attestedSlot == key.lightClientOptimisticUpdateKey.optimisticSlot
|
||||||
|
|
||||||
|
# re-encode content and content key
|
||||||
|
let encoded = encodeForkedLightClientObject(update, forkDigests.capella)
|
||||||
|
|
||||||
|
check encoded == contentValueEncoded
|
||||||
|
check encode(key).asSeq() == contentKeyEncoded
|
||||||
|
|
||||||
suite "Beacon Light Client Content Encodings":
|
suite "Beacon Light Client Content Encodings":
|
||||||
|
# TODO: These tests are less useful now and should instead be altered to
|
||||||
|
# use the consensus test vectors to simply test if encoding / decoding works
|
||||||
|
# fine for the different forks.
|
||||||
let forkDigests = testForkDigests
|
let forkDigests = testForkDigests
|
||||||
|
|
||||||
test "LightClientBootstrap":
|
test "LightClientBootstrap":
|
||||||
|
@ -17,13 +17,6 @@ import
|
|||||||
../../../network/history/[history_content, history_network, accumulator],
|
../../../network/history/[history_content, history_network, accumulator],
|
||||||
../../test_history_util
|
../../test_history_util
|
||||||
|
|
||||||
type
|
|
||||||
JsonPortalContent* = object
|
|
||||||
content_key*: string
|
|
||||||
content_value*: string
|
|
||||||
|
|
||||||
JsonPortalContentTable* = Table[string, JsonPortalContent]
|
|
||||||
|
|
||||||
suite "History Content Encodings":
|
suite "History Content Encodings":
|
||||||
test "HeaderWithProof Building and Encoding":
|
test "HeaderWithProof Building and Encoding":
|
||||||
const
|
const
|
||||||
|
@ -525,7 +525,7 @@ when isMainModule:
|
|||||||
content_key: encodedContentKey.asSeq().to0xHex(),
|
content_key: encodedContentKey.asSeq().to0xHex(),
|
||||||
content_value: encodedContent.to0xHex())
|
content_value: encodedContent.to0xHex())
|
||||||
|
|
||||||
contentTable[blockNumber] = portalContent
|
contentTable[$blockNumber] = portalContent
|
||||||
else:
|
else:
|
||||||
# TODO: Deal with writing post merge headers
|
# TODO: Deal with writing post merge headers
|
||||||
error "Not a pre merge header"
|
error "Not a pre merge header"
|
||||||
|
@ -95,7 +95,7 @@ proc exportLCBootstrapUpdate*(
|
|||||||
)
|
)
|
||||||
|
|
||||||
var contentTable: JsonPortalContentTable
|
var contentTable: JsonPortalContentTable
|
||||||
contentTable[slot.uint64] = portalContent
|
contentTable[$slot] = portalContent
|
||||||
|
|
||||||
writePortalContentToJson(fh, contentTable)
|
writePortalContentToJson(fh, contentTable)
|
||||||
|
|
||||||
@ -140,7 +140,7 @@ proc exportLCUpdates*(
|
|||||||
let
|
let
|
||||||
slot = forkyObject.attested_header.beacon.slot
|
slot = forkyObject.attested_header.beacon.slot
|
||||||
period = forkyObject.attested_header.beacon.slot.sync_committee_period
|
period = forkyObject.attested_header.beacon.slot.sync_committee_period
|
||||||
contentKey = encode(updateContentKey(period.uint64, uint64(1)))
|
contentKey = encode(updateContentKey(period.uint64, count))
|
||||||
forkDigest = forkDigestAtEpoch(
|
forkDigest = forkDigestAtEpoch(
|
||||||
forkDigests[], epoch(forkyObject.attested_header.beacon.slot), cfg)
|
forkDigests[], epoch(forkyObject.attested_header.beacon.slot), cfg)
|
||||||
|
|
||||||
@ -156,7 +156,7 @@ proc exportLCUpdates*(
|
|||||||
)
|
)
|
||||||
|
|
||||||
var contentTable: JsonPortalContentTable
|
var contentTable: JsonPortalContentTable
|
||||||
contentTable[slot.uint64] = portalContent
|
contentTable[$slot] = portalContent
|
||||||
|
|
||||||
writePortalContentToJson(fh, contentTable)
|
writePortalContentToJson(fh, contentTable)
|
||||||
else:
|
else:
|
||||||
@ -218,7 +218,7 @@ proc exportLCFinalityUpdate*(
|
|||||||
)
|
)
|
||||||
|
|
||||||
var contentTable: JsonPortalContentTable
|
var contentTable: JsonPortalContentTable
|
||||||
contentTable[optimisticSlot.uint64] = portalContent
|
contentTable[$optimisticSlot] = portalContent
|
||||||
|
|
||||||
writePortalContentToJson(fh, contentTable)
|
writePortalContentToJson(fh, contentTable)
|
||||||
|
|
||||||
@ -275,6 +275,6 @@ proc exportLCOptimisticUpdate*(
|
|||||||
)
|
)
|
||||||
|
|
||||||
var contentTable: JsonPortalContentTable
|
var contentTable: JsonPortalContentTable
|
||||||
contentTable[slot.uint64] = portalContent
|
contentTable[$slot] = portalContent
|
||||||
|
|
||||||
writePortalContentToJson(fh, contentTable)
|
writePortalContentToJson(fh, contentTable)
|
||||||
|
@ -11,15 +11,11 @@ import
|
|||||||
std/[strutils, os],
|
std/[strutils, os],
|
||||||
chronicles,
|
chronicles,
|
||||||
stew/io2,
|
stew/io2,
|
||||||
|
faststreams,
|
||||||
json_serialization, json_serialization/std/tables,
|
json_serialization, json_serialization/std/tables,
|
||||||
faststreams
|
../../eth_data/history_data_json_store
|
||||||
|
|
||||||
type
|
export history_data_json_store
|
||||||
JsonPortalContent* = object
|
|
||||||
content_key*: string
|
|
||||||
content_value*: string
|
|
||||||
|
|
||||||
JsonPortalContentTable* = OrderedTable[uint64, JsonPortalContent]
|
|
||||||
|
|
||||||
proc writePortalContentToJson*(
|
proc writePortalContentToJson*(
|
||||||
fh: OutputStreamHandle, content: JsonPortalContentTable) =
|
fh: OutputStreamHandle, content: JsonPortalContentTable) =
|
||||||
|
2
vendor/portal-spec-tests
vendored
2
vendor/portal-spec-tests
vendored
@ -1 +1 @@
|
|||||||
Subproject commit df86ab856782f5e24b983b7ec85c2b811d27bc31
|
Subproject commit 529764e1df46f99899127b75097d5162e9ed7ed0
|
Loading…
x
Reference in New Issue
Block a user