Add beacon state historical_roots to fluffy (#2085)
- Add historical_roots in the binary for proof verification pre-capella - Add command to export historical_roots to file in eth_data_exporter tool
This commit is contained in:
parent
a147ff7553
commit
49a199dbc3
|
@ -215,10 +215,7 @@ proc run(config: PortalConf) {.raises: [CatchableError].} =
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
# Get it from binary file containing SSZ encoded accumulator
|
# Get it from binary file containing SSZ encoded accumulator
|
||||||
try:
|
loadAccumulator()
|
||||||
SSZ.decode(finishedAccumulator, FinishedAccumulator)
|
|
||||||
except SszError as err:
|
|
||||||
raiseAssert "Invalid baked-in accumulator: " & err.msg
|
|
||||||
|
|
||||||
historyNetwork =
|
historyNetwork =
|
||||||
if Network.history in config.networks:
|
if Network.history in config.networks:
|
||||||
|
|
|
@ -7,12 +7,17 @@
|
||||||
|
|
||||||
{.push raises: [].}
|
{.push raises: [].}
|
||||||
|
|
||||||
import std/[sequtils, strutils, os, macros], stew/results, chronos/timer
|
import
|
||||||
|
std/[sequtils, strutils, os, macros],
|
||||||
|
results,
|
||||||
|
stew/io2,
|
||||||
|
chronos/timer,
|
||||||
|
beacon_chain/spec/forks,
|
||||||
|
./network/history/accumulator
|
||||||
|
|
||||||
proc loadBootstrapNodes(path: string): seq[string] {.raises: [IOError].} =
|
proc loadBootstrapNodes(path: string): seq[string] {.raises: [IOError].} =
|
||||||
# Read a list of ENR URIs from a file containing a flat list of entries.
|
# Read a list of ENR URIs from a file containing a flat list of entries.
|
||||||
# If the file can't be read, this will raise. This is intentionally.
|
# If the file can't be read, this will raise. This is intentionally.
|
||||||
|
|
||||||
splitLines(readFile(path)).filterIt(it.startsWith("enr:")).mapIt(it.strip())
|
splitLines(readFile(path)).filterIt(it.startsWith("enr:")).mapIt(it.strip())
|
||||||
|
|
||||||
proc loadCompileTimeBootstrapNodes(path: string): seq[string] =
|
proc loadCompileTimeBootstrapNodes(path: string): seq[string] =
|
||||||
|
@ -23,14 +28,6 @@ proc loadCompileTimeBootstrapNodes(path: string): seq[string] =
|
||||||
except IOError as err:
|
except IOError as err:
|
||||||
macros.error "Failed to load bootstrap nodes metadata at '" & path & "': " & err.msg
|
macros.error "Failed to load bootstrap nodes metadata at '" & path & "': " & err.msg
|
||||||
|
|
||||||
# Need to use std/io readFile because:
|
|
||||||
# https://github.com/status-im/nim-stew/issues/145
|
|
||||||
proc loadEncodedAccumulator(path: string): string =
|
|
||||||
try:
|
|
||||||
return readFile(path).string
|
|
||||||
except IOError as err:
|
|
||||||
macros.error "Failed to read finished accumulator at '" & path & "': " & err.msg
|
|
||||||
|
|
||||||
const
|
const
|
||||||
# TODO: Change this from our local repo to an eth-client repo if/when this
|
# TODO: Change this from our local repo to an eth-client repo if/when this
|
||||||
# gets created for the Portal networks.
|
# gets created for the Portal networks.
|
||||||
|
@ -55,10 +52,25 @@ const
|
||||||
portalNetworksDir / "testnet0" / "bootstrap_nodes.txt"
|
portalNetworksDir / "testnet0" / "bootstrap_nodes.txt"
|
||||||
)
|
)
|
||||||
|
|
||||||
finishedAccumulator* = loadEncodedAccumulator(
|
finishedAccumulatorSSZ* = slurp(
|
||||||
portalTestDir / "mainnet" / "history" / "accumulator" / "finished_accumulator.ssz"
|
portalTestDir / "mainnet" / "history" / "accumulator" / "finished_accumulator.ssz"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
historicalRootsSSZ* =
|
||||||
|
slurp(portalTestDir / "mainnet" / "beacon_chain" / "historical_roots.ssz")
|
||||||
|
|
||||||
|
func loadAccumulator*(): FinishedAccumulator =
|
||||||
|
try:
|
||||||
|
SSZ.decode(finishedAccumulatorSSZ, FinishedAccumulator)
|
||||||
|
except SerializationError as err:
|
||||||
|
raiseAssert "Invalid baked-in accumulator: " & err.msg
|
||||||
|
|
||||||
|
func loadHistoricalRoots*(): HashList[Eth2Digest, Limit HISTORICAL_ROOTS_LIMIT] =
|
||||||
|
try:
|
||||||
|
SSZ.decode(historicalRootsSSZ, HashList[Eth2Digest, Limit HISTORICAL_ROOTS_LIMIT])
|
||||||
|
except SerializationError as err:
|
||||||
|
raiseAssert "Invalid baked-in historical_roots: " & err.msg
|
||||||
|
|
||||||
type
|
type
|
||||||
# TODO: I guess we could use the nimbus ChainConfig but:
|
# TODO: I guess we could use the nimbus ChainConfig but:
|
||||||
# - Only need some of the values right now
|
# - Only need some of the values right now
|
||||||
|
|
|
@ -35,11 +35,7 @@ suite "History Content Encodings":
|
||||||
raiseAssert "Invalid epoch accumulator file: " & accumulatorFile
|
raiseAssert "Invalid epoch accumulator file: " & accumulatorFile
|
||||||
blockHeadersWithProof = buildHeadersWithProof(blockHeaders, epochAccumulator).valueOr:
|
blockHeadersWithProof = buildHeadersWithProof(blockHeaders, epochAccumulator).valueOr:
|
||||||
raiseAssert "Could not build headers with proof"
|
raiseAssert "Could not build headers with proof"
|
||||||
accumulator =
|
accumulator = loadAccumulator()
|
||||||
try:
|
|
||||||
SSZ.decode(finishedAccumulator, FinishedAccumulator)
|
|
||||||
except SszError as err:
|
|
||||||
raiseAssert "Invalid baked-in accumulator: " & err.msg
|
|
||||||
|
|
||||||
let res = readJsonType(headersWithProofFile, JsonPortalContentTable)
|
let res = readJsonType(headersWithProofFile, JsonPortalContentTable)
|
||||||
check res.isOk()
|
check res.isOk()
|
||||||
|
@ -85,11 +81,7 @@ suite "History Content Encodings":
|
||||||
const dataFile =
|
const dataFile =
|
||||||
"./vendor/portal-spec-tests/tests/mainnet/history/headers_with_proof/14764013.json"
|
"./vendor/portal-spec-tests/tests/mainnet/history/headers_with_proof/14764013.json"
|
||||||
|
|
||||||
let accumulator =
|
let accumulator = loadAccumulator()
|
||||||
try:
|
|
||||||
SSZ.decode(finishedAccumulator, FinishedAccumulator)
|
|
||||||
except SszError as err:
|
|
||||||
raiseAssert "Invalid baked-in accumulator: " & err.msg
|
|
||||||
|
|
||||||
let res = readJsonType(dataFile, JsonPortalContentTable)
|
let res = readJsonType(dataFile, JsonPortalContentTable)
|
||||||
check res.isOk()
|
check res.isOk()
|
||||||
|
|
|
@ -38,12 +38,7 @@ type ContentVerifierConf* = object
|
||||||
.}: uint16
|
.}: uint16
|
||||||
|
|
||||||
proc checkAccumulators(client: RpcClient) {.async.} =
|
proc checkAccumulators(client: RpcClient) {.async.} =
|
||||||
let accumulator =
|
let accumulator = loadAccumulator()
|
||||||
# Get it from binary file containing SSZ encoded accumulator
|
|
||||||
try:
|
|
||||||
SSZ.decode(finishedAccumulator, FinishedAccumulator)
|
|
||||||
except SszError as err:
|
|
||||||
raiseAssert "Invalid baked-in accumulator: " & err.msg
|
|
||||||
|
|
||||||
for i, hash in accumulator.historicalEpochs:
|
for i, hash in accumulator.historicalEpochs:
|
||||||
let root = Digest(data: hash)
|
let root = Digest(data: hash)
|
||||||
|
|
|
@ -688,3 +688,7 @@ when isMainModule:
|
||||||
waitFor exportLCOptimisticUpdate(
|
waitFor exportLCOptimisticUpdate(
|
||||||
config.restUrl, string config.dataDir, cfg, forkDigests
|
config.restUrl, string config.dataDir, cfg, forkDigests
|
||||||
)
|
)
|
||||||
|
of BeaconCmd.exportHistoricalRoots:
|
||||||
|
waitFor exportHistoricalRoots(
|
||||||
|
config.restUrl, string config.dataDir, cfg, forkDigests
|
||||||
|
)
|
||||||
|
|
|
@ -8,19 +8,22 @@
|
||||||
{.push raises: [].}
|
{.push raises: [].}
|
||||||
|
|
||||||
import
|
import
|
||||||
|
std/os,
|
||||||
chronicles,
|
chronicles,
|
||||||
chronos,
|
chronos,
|
||||||
stew/byteutils,
|
stew/[byteutils, io2],
|
||||||
eth/async_utils,
|
eth/async_utils,
|
||||||
beacon_chain/networking/network_metadata,
|
beacon_chain/networking/network_metadata,
|
||||||
beacon_chain/spec // eth2_apis/rest_beacon_client,
|
beacon_chain/spec/eth2_apis/rest_beacon_client,
|
||||||
beacon_chain/beacon_clock,
|
beacon_chain/beacon_clock,
|
||||||
../../network/beacon/beacon_content,
|
../../network/beacon/beacon_content,
|
||||||
./exporter_common
|
./exporter_common
|
||||||
|
|
||||||
export beacon_clock
|
export beacon_clock
|
||||||
|
|
||||||
const restRequestsTimeout = 30.seconds
|
const
|
||||||
|
largeRequestsTimeout = 60.seconds # Downloading large items such as states.
|
||||||
|
restRequestsTimeout = 30.seconds
|
||||||
|
|
||||||
proc getBeaconData*(): (RuntimeConfig, ref ForkDigests, BeaconClock) =
|
proc getBeaconData*(): (RuntimeConfig, ref ForkDigests, BeaconClock) =
|
||||||
let
|
let
|
||||||
|
@ -255,3 +258,41 @@ proc exportLCOptimisticUpdate*(
|
||||||
contentTable[$slot] = portalContent
|
contentTable[$slot] = portalContent
|
||||||
|
|
||||||
writePortalContentToJson(fh, contentTable)
|
writePortalContentToJson(fh, contentTable)
|
||||||
|
|
||||||
|
proc exportHistoricalRoots*(
|
||||||
|
restUrl: string, dataDir: string, cfg: RuntimeConfig, forkDigests: ref ForkDigests
|
||||||
|
) {.async.} =
|
||||||
|
let file = dataDir / "historical_roots.ssz"
|
||||||
|
if isFile(file):
|
||||||
|
notice "Not downloading historical_roots, file already exists", file
|
||||||
|
quit 1
|
||||||
|
|
||||||
|
let client = RestClientRef.new(restUrl).valueOr:
|
||||||
|
error "Cannot connect to server", error
|
||||||
|
quit 1
|
||||||
|
|
||||||
|
let state =
|
||||||
|
try:
|
||||||
|
notice "Downloading beacon state"
|
||||||
|
awaitWithTimeout(
|
||||||
|
client.getStateV2(StateIdent.init(StateIdentType.Finalized), cfg),
|
||||||
|
largeRequestsTimeout,
|
||||||
|
):
|
||||||
|
error "Attempt to download beacon state timed out"
|
||||||
|
quit 1
|
||||||
|
except CatchableError as exc:
|
||||||
|
error "Unable to download beacon state", error = exc.msg
|
||||||
|
quit 1
|
||||||
|
|
||||||
|
if state == nil:
|
||||||
|
error "No beacon state found"
|
||||||
|
quit 1
|
||||||
|
|
||||||
|
let historical_roots = getStateField(state[], historical_roots)
|
||||||
|
|
||||||
|
let res = io2.writeFile(file, SSZ.encode(historical_roots))
|
||||||
|
if res.isErr():
|
||||||
|
error "Failed writing historical_roots to file", file, error = ioErrorMsg(res.error)
|
||||||
|
quit 1
|
||||||
|
else:
|
||||||
|
notice "Succesfully wrote historical_roots to file", file
|
||||||
|
|
|
@ -69,6 +69,7 @@ type
|
||||||
exportLCUpdates = "Export Light Client Updates"
|
exportLCUpdates = "Export Light Client Updates"
|
||||||
exportLCFinalityUpdate = "Export Light Client Finality Update"
|
exportLCFinalityUpdate = "Export Light Client Finality Update"
|
||||||
exportLCOptimisticUpdate = "Export Light Client Optimistic Update"
|
exportLCOptimisticUpdate = "Export Light Client Optimistic Update"
|
||||||
|
exportHistoricalRoots = "Export historical roots from the beacon state (SSZ format)"
|
||||||
|
|
||||||
ExporterConf* = object
|
ExporterConf* = object
|
||||||
logLevel* {.
|
logLevel* {.
|
||||||
|
@ -207,6 +208,8 @@ type
|
||||||
discard
|
discard
|
||||||
of exportLCOptimisticUpdate:
|
of exportLCOptimisticUpdate:
|
||||||
discard
|
discard
|
||||||
|
of exportHistoricalRoots:
|
||||||
|
discard
|
||||||
|
|
||||||
proc parseCmdArg*(T: type Web3Url, p: string): T {.raises: [ValueError].} =
|
proc parseCmdArg*(T: type Web3Url, p: string): T {.raises: [ValueError].} =
|
||||||
let
|
let
|
||||||
|
|
|
@ -346,12 +346,7 @@ proc runBackfillLoop(
|
||||||
) {.async: (raises: [CancelledError]).} =
|
) {.async: (raises: [CancelledError]).} =
|
||||||
let
|
let
|
||||||
rng = newRng()
|
rng = newRng()
|
||||||
accumulator =
|
accumulator = loadAccumulator()
|
||||||
try:
|
|
||||||
SSZ.decode(finishedAccumulator, FinishedAccumulator)
|
|
||||||
except SerializationError as err:
|
|
||||||
raiseAssert "Invalid baked-in accumulator: " & err.msg
|
|
||||||
|
|
||||||
while true:
|
while true:
|
||||||
let
|
let
|
||||||
# Grab a random era1 to backfill
|
# Grab a random era1 to backfill
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
Subproject commit 5c484f38325ede9c4cb212892c57324029aa367c
|
Subproject commit 1b3ad4a94c4282023428792694f375d30fac84d6
|
Loading…
Reference in New Issue