From 49a199dbc38713dd545ee51c8b624c4d1e010b42 Mon Sep 17 00:00:00 2001 From: Kim De Mey Date: Tue, 19 Mar 2024 16:45:32 +0100 Subject: [PATCH] Add beacon state historical_roots to fluffy (#2085) - Add historical_roots in the binary for proof verification pre-capella - Add command to export historical_roots to file in eth_data_exporter tool --- fluffy/fluffy.nim | 5 +- fluffy/network_metadata.nim | 34 +++++++++----- .../mainnet/test_history_content.nim | 12 +---- fluffy/tools/content_verifier.nim | 7 +-- fluffy/tools/eth_data_exporter.nim | 4 ++ .../eth_data_exporter/cl_data_exporter.nim | 47 +++++++++++++++++-- .../tools/eth_data_exporter/exporter_conf.nim | 3 ++ .../portal_bridge/portal_bridge_history.nim | 7 +-- vendor/portal-spec-tests | 2 +- 9 files changed, 80 insertions(+), 41 deletions(-) diff --git a/fluffy/fluffy.nim b/fluffy/fluffy.nim index c694ca2f3..7439ee1f0 100644 --- a/fluffy/fluffy.nim +++ b/fluffy/fluffy.nim @@ -215,10 +215,7 @@ proc run(config: PortalConf) {.raises: [CatchableError].} = ) else: # Get it from binary file containing SSZ encoded accumulator - try: - SSZ.decode(finishedAccumulator, FinishedAccumulator) - except SszError as err: - raiseAssert "Invalid baked-in accumulator: " & err.msg + loadAccumulator() historyNetwork = if Network.history in config.networks: diff --git a/fluffy/network_metadata.nim b/fluffy/network_metadata.nim index 2b2ccba80..a1aad77aa 100644 --- a/fluffy/network_metadata.nim +++ b/fluffy/network_metadata.nim @@ -7,12 +7,17 @@ {.push raises: [].} -import std/[sequtils, strutils, os, macros], stew/results, chronos/timer +import + std/[sequtils, strutils, os, macros], + results, + stew/io2, + chronos/timer, + beacon_chain/spec/forks, + ./network/history/accumulator proc loadBootstrapNodes(path: string): seq[string] {.raises: [IOError].} = # Read a list of ENR URIs from a file containing a flat list of entries. # If the file can't be read, this will raise. This is intentionally. - splitLines(readFile(path)).filterIt(it.startsWith("enr:")).mapIt(it.strip()) proc loadCompileTimeBootstrapNodes(path: string): seq[string] = @@ -23,14 +28,6 @@ proc loadCompileTimeBootstrapNodes(path: string): seq[string] = except IOError as err: macros.error "Failed to load bootstrap nodes metadata at '" & path & "': " & err.msg -# Need to use std/io readFile because: -# https://github.com/status-im/nim-stew/issues/145 -proc loadEncodedAccumulator(path: string): string = - try: - return readFile(path).string - except IOError as err: - macros.error "Failed to read finished accumulator at '" & path & "': " & err.msg - const # TODO: Change this from our local repo to an eth-client repo if/when this # gets created for the Portal networks. @@ -55,10 +52,25 @@ const portalNetworksDir / "testnet0" / "bootstrap_nodes.txt" ) - finishedAccumulator* = loadEncodedAccumulator( + finishedAccumulatorSSZ* = slurp( portalTestDir / "mainnet" / "history" / "accumulator" / "finished_accumulator.ssz" ) + historicalRootsSSZ* = + slurp(portalTestDir / "mainnet" / "beacon_chain" / "historical_roots.ssz") + +func loadAccumulator*(): FinishedAccumulator = + try: + SSZ.decode(finishedAccumulatorSSZ, FinishedAccumulator) + except SerializationError as err: + raiseAssert "Invalid baked-in accumulator: " & err.msg + +func loadHistoricalRoots*(): HashList[Eth2Digest, Limit HISTORICAL_ROOTS_LIMIT] = + try: + SSZ.decode(historicalRootsSSZ, HashList[Eth2Digest, Limit HISTORICAL_ROOTS_LIMIT]) + except SerializationError as err: + raiseAssert "Invalid baked-in historical_roots: " & err.msg + type # TODO: I guess we could use the nimbus ChainConfig but: # - Only need some of the values right now diff --git a/fluffy/tests/portal_spec_tests/mainnet/test_history_content.nim b/fluffy/tests/portal_spec_tests/mainnet/test_history_content.nim index a847b3163..bff82725b 100644 --- a/fluffy/tests/portal_spec_tests/mainnet/test_history_content.nim +++ b/fluffy/tests/portal_spec_tests/mainnet/test_history_content.nim @@ -35,11 +35,7 @@ suite "History Content Encodings": raiseAssert "Invalid epoch accumulator file: " & accumulatorFile blockHeadersWithProof = buildHeadersWithProof(blockHeaders, epochAccumulator).valueOr: raiseAssert "Could not build headers with proof" - accumulator = - try: - SSZ.decode(finishedAccumulator, FinishedAccumulator) - except SszError as err: - raiseAssert "Invalid baked-in accumulator: " & err.msg + accumulator = loadAccumulator() let res = readJsonType(headersWithProofFile, JsonPortalContentTable) check res.isOk() @@ -85,11 +81,7 @@ suite "History Content Encodings": const dataFile = "./vendor/portal-spec-tests/tests/mainnet/history/headers_with_proof/14764013.json" - let accumulator = - try: - SSZ.decode(finishedAccumulator, FinishedAccumulator) - except SszError as err: - raiseAssert "Invalid baked-in accumulator: " & err.msg + let accumulator = loadAccumulator() let res = readJsonType(dataFile, JsonPortalContentTable) check res.isOk() diff --git a/fluffy/tools/content_verifier.nim b/fluffy/tools/content_verifier.nim index e3e5a42c9..6fa4090c1 100644 --- a/fluffy/tools/content_verifier.nim +++ b/fluffy/tools/content_verifier.nim @@ -38,12 +38,7 @@ type ContentVerifierConf* = object .}: uint16 proc checkAccumulators(client: RpcClient) {.async.} = - let accumulator = - # Get it from binary file containing SSZ encoded accumulator - try: - SSZ.decode(finishedAccumulator, FinishedAccumulator) - except SszError as err: - raiseAssert "Invalid baked-in accumulator: " & err.msg + let accumulator = loadAccumulator() for i, hash in accumulator.historicalEpochs: let root = Digest(data: hash) diff --git a/fluffy/tools/eth_data_exporter.nim b/fluffy/tools/eth_data_exporter.nim index c440e4368..8f2b9f089 100644 --- a/fluffy/tools/eth_data_exporter.nim +++ b/fluffy/tools/eth_data_exporter.nim @@ -688,3 +688,7 @@ when isMainModule: waitFor exportLCOptimisticUpdate( config.restUrl, string config.dataDir, cfg, forkDigests ) + of BeaconCmd.exportHistoricalRoots: + waitFor exportHistoricalRoots( + config.restUrl, string config.dataDir, cfg, forkDigests + ) diff --git a/fluffy/tools/eth_data_exporter/cl_data_exporter.nim b/fluffy/tools/eth_data_exporter/cl_data_exporter.nim index be03a7532..23136ebda 100644 --- a/fluffy/tools/eth_data_exporter/cl_data_exporter.nim +++ b/fluffy/tools/eth_data_exporter/cl_data_exporter.nim @@ -8,19 +8,22 @@ {.push raises: [].} import + std/os, chronicles, chronos, - stew/byteutils, + stew/[byteutils, io2], eth/async_utils, beacon_chain/networking/network_metadata, - beacon_chain/spec // eth2_apis/rest_beacon_client, + beacon_chain/spec/eth2_apis/rest_beacon_client, beacon_chain/beacon_clock, ../../network/beacon/beacon_content, ./exporter_common export beacon_clock -const restRequestsTimeout = 30.seconds +const + largeRequestsTimeout = 60.seconds # Downloading large items such as states. + restRequestsTimeout = 30.seconds proc getBeaconData*(): (RuntimeConfig, ref ForkDigests, BeaconClock) = let @@ -255,3 +258,41 @@ proc exportLCOptimisticUpdate*( contentTable[$slot] = portalContent writePortalContentToJson(fh, contentTable) + +proc exportHistoricalRoots*( + restUrl: string, dataDir: string, cfg: RuntimeConfig, forkDigests: ref ForkDigests +) {.async.} = + let file = dataDir / "historical_roots.ssz" + if isFile(file): + notice "Not downloading historical_roots, file already exists", file + quit 1 + + let client = RestClientRef.new(restUrl).valueOr: + error "Cannot connect to server", error + quit 1 + + let state = + try: + notice "Downloading beacon state" + awaitWithTimeout( + client.getStateV2(StateIdent.init(StateIdentType.Finalized), cfg), + largeRequestsTimeout, + ): + error "Attempt to download beacon state timed out" + quit 1 + except CatchableError as exc: + error "Unable to download beacon state", error = exc.msg + quit 1 + + if state == nil: + error "No beacon state found" + quit 1 + + let historical_roots = getStateField(state[], historical_roots) + + let res = io2.writeFile(file, SSZ.encode(historical_roots)) + if res.isErr(): + error "Failed writing historical_roots to file", file, error = ioErrorMsg(res.error) + quit 1 + else: + notice "Succesfully wrote historical_roots to file", file diff --git a/fluffy/tools/eth_data_exporter/exporter_conf.nim b/fluffy/tools/eth_data_exporter/exporter_conf.nim index 8a652ac07..dfd7da1fb 100644 --- a/fluffy/tools/eth_data_exporter/exporter_conf.nim +++ b/fluffy/tools/eth_data_exporter/exporter_conf.nim @@ -69,6 +69,7 @@ type exportLCUpdates = "Export Light Client Updates" exportLCFinalityUpdate = "Export Light Client Finality Update" exportLCOptimisticUpdate = "Export Light Client Optimistic Update" + exportHistoricalRoots = "Export historical roots from the beacon state (SSZ format)" ExporterConf* = object logLevel* {. @@ -207,6 +208,8 @@ type discard of exportLCOptimisticUpdate: discard + of exportHistoricalRoots: + discard proc parseCmdArg*(T: type Web3Url, p: string): T {.raises: [ValueError].} = let diff --git a/fluffy/tools/portal_bridge/portal_bridge_history.nim b/fluffy/tools/portal_bridge/portal_bridge_history.nim index 317910b55..9bf875303 100644 --- a/fluffy/tools/portal_bridge/portal_bridge_history.nim +++ b/fluffy/tools/portal_bridge/portal_bridge_history.nim @@ -346,12 +346,7 @@ proc runBackfillLoop( ) {.async: (raises: [CancelledError]).} = let rng = newRng() - accumulator = - try: - SSZ.decode(finishedAccumulator, FinishedAccumulator) - except SerializationError as err: - raiseAssert "Invalid baked-in accumulator: " & err.msg - + accumulator = loadAccumulator() while true: let # Grab a random era1 to backfill diff --git a/vendor/portal-spec-tests b/vendor/portal-spec-tests index 5c484f383..1b3ad4a94 160000 --- a/vendor/portal-spec-tests +++ b/vendor/portal-spec-tests @@ -1 +1 @@ -Subproject commit 5c484f38325ede9c4cb212892c57324029aa367c +Subproject commit 1b3ad4a94c4282023428792694f375d30fac84d6