Add beacon state historical_roots to fluffy (#2085)

- Add historical_roots in the binary for proof verification
pre-capella
- Add command to export historical_roots to file in
eth_data_exporter tool
This commit is contained in:
Kim De Mey 2024-03-19 16:45:32 +01:00 committed by GitHub
parent a147ff7553
commit 49a199dbc3
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
9 changed files with 80 additions and 41 deletions

View File

@ -215,10 +215,7 @@ proc run(config: PortalConf) {.raises: [CatchableError].} =
)
else:
# Get it from binary file containing SSZ encoded accumulator
try:
SSZ.decode(finishedAccumulator, FinishedAccumulator)
except SszError as err:
raiseAssert "Invalid baked-in accumulator: " & err.msg
loadAccumulator()
historyNetwork =
if Network.history in config.networks:

View File

@ -7,12 +7,17 @@
{.push raises: [].}
import std/[sequtils, strutils, os, macros], stew/results, chronos/timer
import
std/[sequtils, strutils, os, macros],
results,
stew/io2,
chronos/timer,
beacon_chain/spec/forks,
./network/history/accumulator
proc loadBootstrapNodes(path: string): seq[string] {.raises: [IOError].} =
# Read a list of ENR URIs from a file containing a flat list of entries.
# If the file can't be read, this will raise. This is intentionally.
splitLines(readFile(path)).filterIt(it.startsWith("enr:")).mapIt(it.strip())
proc loadCompileTimeBootstrapNodes(path: string): seq[string] =
@ -23,14 +28,6 @@ proc loadCompileTimeBootstrapNodes(path: string): seq[string] =
except IOError as err:
macros.error "Failed to load bootstrap nodes metadata at '" & path & "': " & err.msg
# Need to use std/io readFile because:
# https://github.com/status-im/nim-stew/issues/145
proc loadEncodedAccumulator(path: string): string =
try:
return readFile(path).string
except IOError as err:
macros.error "Failed to read finished accumulator at '" & path & "': " & err.msg
const
# TODO: Change this from our local repo to an eth-client repo if/when this
# gets created for the Portal networks.
@ -55,10 +52,25 @@ const
portalNetworksDir / "testnet0" / "bootstrap_nodes.txt"
)
finishedAccumulator* = loadEncodedAccumulator(
finishedAccumulatorSSZ* = slurp(
portalTestDir / "mainnet" / "history" / "accumulator" / "finished_accumulator.ssz"
)
historicalRootsSSZ* =
slurp(portalTestDir / "mainnet" / "beacon_chain" / "historical_roots.ssz")
func loadAccumulator*(): FinishedAccumulator =
try:
SSZ.decode(finishedAccumulatorSSZ, FinishedAccumulator)
except SerializationError as err:
raiseAssert "Invalid baked-in accumulator: " & err.msg
func loadHistoricalRoots*(): HashList[Eth2Digest, Limit HISTORICAL_ROOTS_LIMIT] =
try:
SSZ.decode(historicalRootsSSZ, HashList[Eth2Digest, Limit HISTORICAL_ROOTS_LIMIT])
except SerializationError as err:
raiseAssert "Invalid baked-in historical_roots: " & err.msg
type
# TODO: I guess we could use the nimbus ChainConfig but:
# - Only need some of the values right now

View File

@ -35,11 +35,7 @@ suite "History Content Encodings":
raiseAssert "Invalid epoch accumulator file: " & accumulatorFile
blockHeadersWithProof = buildHeadersWithProof(blockHeaders, epochAccumulator).valueOr:
raiseAssert "Could not build headers with proof"
accumulator =
try:
SSZ.decode(finishedAccumulator, FinishedAccumulator)
except SszError as err:
raiseAssert "Invalid baked-in accumulator: " & err.msg
accumulator = loadAccumulator()
let res = readJsonType(headersWithProofFile, JsonPortalContentTable)
check res.isOk()
@ -85,11 +81,7 @@ suite "History Content Encodings":
const dataFile =
"./vendor/portal-spec-tests/tests/mainnet/history/headers_with_proof/14764013.json"
let accumulator =
try:
SSZ.decode(finishedAccumulator, FinishedAccumulator)
except SszError as err:
raiseAssert "Invalid baked-in accumulator: " & err.msg
let accumulator = loadAccumulator()
let res = readJsonType(dataFile, JsonPortalContentTable)
check res.isOk()

View File

@ -38,12 +38,7 @@ type ContentVerifierConf* = object
.}: uint16
proc checkAccumulators(client: RpcClient) {.async.} =
let accumulator =
# Get it from binary file containing SSZ encoded accumulator
try:
SSZ.decode(finishedAccumulator, FinishedAccumulator)
except SszError as err:
raiseAssert "Invalid baked-in accumulator: " & err.msg
let accumulator = loadAccumulator()
for i, hash in accumulator.historicalEpochs:
let root = Digest(data: hash)

View File

@ -688,3 +688,7 @@ when isMainModule:
waitFor exportLCOptimisticUpdate(
config.restUrl, string config.dataDir, cfg, forkDigests
)
of BeaconCmd.exportHistoricalRoots:
waitFor exportHistoricalRoots(
config.restUrl, string config.dataDir, cfg, forkDigests
)

View File

@ -8,19 +8,22 @@
{.push raises: [].}
import
std/os,
chronicles,
chronos,
stew/byteutils,
stew/[byteutils, io2],
eth/async_utils,
beacon_chain/networking/network_metadata,
beacon_chain/spec // eth2_apis/rest_beacon_client,
beacon_chain/spec/eth2_apis/rest_beacon_client,
beacon_chain/beacon_clock,
../../network/beacon/beacon_content,
./exporter_common
export beacon_clock
const restRequestsTimeout = 30.seconds
const
largeRequestsTimeout = 60.seconds # Downloading large items such as states.
restRequestsTimeout = 30.seconds
proc getBeaconData*(): (RuntimeConfig, ref ForkDigests, BeaconClock) =
let
@ -255,3 +258,41 @@ proc exportLCOptimisticUpdate*(
contentTable[$slot] = portalContent
writePortalContentToJson(fh, contentTable)
proc exportHistoricalRoots*(
restUrl: string, dataDir: string, cfg: RuntimeConfig, forkDigests: ref ForkDigests
) {.async.} =
let file = dataDir / "historical_roots.ssz"
if isFile(file):
notice "Not downloading historical_roots, file already exists", file
quit 1
let client = RestClientRef.new(restUrl).valueOr:
error "Cannot connect to server", error
quit 1
let state =
try:
notice "Downloading beacon state"
awaitWithTimeout(
client.getStateV2(StateIdent.init(StateIdentType.Finalized), cfg),
largeRequestsTimeout,
):
error "Attempt to download beacon state timed out"
quit 1
except CatchableError as exc:
error "Unable to download beacon state", error = exc.msg
quit 1
if state == nil:
error "No beacon state found"
quit 1
let historical_roots = getStateField(state[], historical_roots)
let res = io2.writeFile(file, SSZ.encode(historical_roots))
if res.isErr():
error "Failed writing historical_roots to file", file, error = ioErrorMsg(res.error)
quit 1
else:
notice "Succesfully wrote historical_roots to file", file

View File

@ -69,6 +69,7 @@ type
exportLCUpdates = "Export Light Client Updates"
exportLCFinalityUpdate = "Export Light Client Finality Update"
exportLCOptimisticUpdate = "Export Light Client Optimistic Update"
exportHistoricalRoots = "Export historical roots from the beacon state (SSZ format)"
ExporterConf* = object
logLevel* {.
@ -207,6 +208,8 @@ type
discard
of exportLCOptimisticUpdate:
discard
of exportHistoricalRoots:
discard
proc parseCmdArg*(T: type Web3Url, p: string): T {.raises: [ValueError].} =
let

View File

@ -346,12 +346,7 @@ proc runBackfillLoop(
) {.async: (raises: [CancelledError]).} =
let
rng = newRng()
accumulator =
try:
SSZ.decode(finishedAccumulator, FinishedAccumulator)
except SerializationError as err:
raiseAssert "Invalid baked-in accumulator: " & err.msg
accumulator = loadAccumulator()
while true:
let
# Grab a random era1 to backfill

@ -1 +1 @@
Subproject commit 5c484f38325ede9c4cb212892c57324029aa367c
Subproject commit 1b3ad4a94c4282023428792694f375d30fac84d6