diff --git a/fluffy/network/history/accumulator.nim b/fluffy/network/history/accumulator.nim index 8286be8d8..602e5d07d 100644 --- a/fluffy/network/history/accumulator.nim +++ b/fluffy/network/history/accumulator.nim @@ -122,7 +122,7 @@ func getEpochIndex*(header: BlockHeader): uint64 = ## Get the index for the historical epochs getEpochIndex(blockNumber) -func getHeaderRecordIndex(blockNumber: uint64, epochIndex: uint64): uint64 = +func getHeaderRecordIndex*(blockNumber: uint64, epochIndex: uint64): uint64 = ## Get the relative header index for the epoch accumulator uint64(blockNumber - epochIndex * epochSize) diff --git a/fluffy/tools/eth_data_exporter.nim b/fluffy/tools/eth_data_exporter.nim index 888df07af..dfd2aea76 100644 --- a/fluffy/tools/eth_data_exporter.nim +++ b/fluffy/tools/eth_data_exporter.nim @@ -37,7 +37,7 @@ {.push raises: [].} import - std/[json, typetraits, strutils, strformat, os], + std/[json, typetraits, strutils, strformat, os, uri], confutils, stew/[byteutils, io2], json_serialization, @@ -64,10 +64,19 @@ proc defaultDataDir*(): string = getHomeDir() / dataDir +type + Web3UrlKind* = enum + HttpUrl, WsUrl + + Web3Url* = object + kind*: Web3UrlKind + url*: string + const defaultDataDirDesc = defaultDataDir() defaultBlockFileName = "eth-block-data" defaultAccumulatorFileName = "mainnet-master-accumulator.ssz" + defaultWeb3Url = Web3Url(kind: HttpUrl, url: "http://127.0.0.1:8545") type ExporterCmd* = enum @@ -85,6 +94,8 @@ type "Print the root hash of the master accumulator and of all historical epoch accumulators. Requires data generated by exportAccumulatorData command" exportHeaderRange = "Export block headers from an Ethereum JSON RPC Execution endpoint to *.e2s files (unlimited amount)" + exportHeadersWithProof = + "Export block headers with proof from *.e2s headers file and epochAccumulator files" StorageMode* = enum Json, Db @@ -100,6 +111,10 @@ type defaultValue: defaultDataDir() defaultValueDesc: $defaultDataDirDesc name: "data-dir" .}: OutDir + web3Url* {. + desc: "Execution layer JSON-RPC API URL" + defaultValue: defaultWeb3Url + name: "web3-url" .}: Web3Url case cmd* {. command defaultValue: exportBlockData .}: ExporterCmd @@ -169,6 +184,32 @@ type endBlockNumber* {. desc: "Number of the last block header to be exported" name: "end-block" .}: uint64 + of exportHeadersWithProof: + startBlockNumber2* {. + desc: "Number of the first block header to be exported" + name: "start-block" .}: uint64 + endBlockNumber2* {. + desc: "Number of the last block header to be exported" + name: "end-block" .}: uint64 + +proc parseCmdArg*( + T: type Web3Url, p: string): T {.raises: [ConfigurationError].} = + let + url = parseUri(p) + normalizedScheme = url.scheme.toLowerAscii() + + if (normalizedScheme == "http" or normalizedScheme == "https"): + Web3Url(kind: HttpUrl, url: p) + elif (normalizedScheme == "ws" or normalizedScheme == "wss"): + Web3Url(kind: WsUrl, url: p) + else: + raise newException( + ConfigurationError, + "The Web3 URL must specify one of following protocols: http/https/ws/wss" + ) + +proc completeCmdArg*(T: type Web3Url, val: string): seq[string] = + return @[] proc parseCmdArg*(T: type StorageMode, p: string): T {.raises: [ConfigurationError].} = @@ -319,6 +360,34 @@ proc exportBlocks(config: ExporterConf, client: RpcClient) = else: writeBlocksToDb(config, client) +proc newRpcClient(web3Url: Web3Url): RpcClient = + # TODO: I don't like this API. I think the creation of the RPC clients should + # already include the URL. And then an optional connect may be necessary + # depending on the protocol. + let client: RpcClient = + case web3Url.kind + of HttpUrl: + newRpcHttpClient() + of WsUrl: + newRpcWebSocketClient() + + client + +proc connectRpcClient( + client: RpcClient, web3Url: Web3Url): + Future[Result[void, string]] {.async.} = + case web3Url.kind + of HttpUrl: + try: + await RpcHttpClient(client).connect(web3Url.url) + except CatchableError as e: + return err(e.msg) + of WsUrl: + try: + await RpcWebSocketClient(client).connect(web3Url.url) + except CatchableError as e: + return err(e.msg) + when isMainModule: {.pop.} let config = ExporterConf.load() @@ -334,19 +403,14 @@ when isMainModule: dir = dataDir, error = ioErrorMsg(res.error) quit 1 - var client: RpcClient - try: - let c = newRpcWebSocketClient() - # TODO: Hardcoded to the default geth ws address. This should become - # a configurable cli option - waitFor c.connect("ws://127.0.0.1:8546") - client = c - except CatchableError as e: - fatal "Error while connecting to data provider", error = e.msg - quit 1 - case config.cmd of ExporterCmd.exportBlockData: + let client = newRpcClient(config.web3Url) + let connectRes = waitFor client.connectRpcClient(config.web3Url) + if connectRes.isErr(): + fatal "Failed connecting to JSON-RPC client", error = connectRes.error + quit 1 + if (config.endBlock < config.startBlock): fatal "Initial block number should be smaller than end block number", startBlock = config.startBlock, @@ -359,6 +423,12 @@ when isMainModule: waitFor client.close() of ExporterCmd.exportEpochHeaders: + let client = newRpcClient(config.web3Url) + let connectRes = waitFor client.connectRpcClient(config.web3Url) + if connectRes.isErr(): + fatal "Failed connecting to JSON-RPC client", error = connectRes.error + quit 1 + proc exportEpochHeaders(file: string, epoch: uint64): Result[void, string] = # Downloading headers from JSON RPC endpoint info "Requesting epoch headers", epoch @@ -547,6 +617,12 @@ when isMainModule: echo &"{i.uint64:05} 0x{root.toHex()}" of ExporterCmd.exportHeaderRange: + let client = newRpcClient(config.web3Url) + let connectRes = waitFor client.connectRpcClient(config.web3Url) + if connectRes.isErr(): + fatal "Failed connecting to JSON-RPC client", error = connectRes.error + quit 1 + let startBlockNumber = config.startBlockNumber endBlockNumber = config.endBlockNumber @@ -585,3 +661,89 @@ when isMainModule: if res.isErr(): fatal "Failed exporting headers", error = res.error quit 1 + + of ExporterCmd.exportHeadersWithProof: + let + startBlockNumber = config.startBlockNumber2 + endBlockNumber = config.endBlockNumber2 + + if (endBlockNumber < startBlockNumber): + fatal "Start block number should be smaller than end block number", + startBlockNumber, endBlockNumber + quit 1 + + type + JsonPortalContent = object + content_key*: string + content_value*: string + + JsonPortalContentTable = OrderedTable[uint64, JsonPortalContent] + + proc writePortalContentToJson( + fh: OutputStreamHandle, content: JsonPortalContentTable) = + try: + var writer = JsonWriter[DefaultFlavor].init(fh.s, pretty = true) + writer.writeValue(content) + except IOError as e: + fatal "Error occured while writing to file", error = e.msg + quit 1 + + let file = &"mainnet-headersWithProof-{startBlockNumber:05}-{endBlockNumber:05}.json" + let fh = createAndOpenFile(string config.dataDir, file) + + var contentTable: JsonPortalContentTable + for blockNumber in startBlockNumber..endBlockNumber: + let + epochIndex = getEpochIndex(blockNumber) + epochHeadersFile = + dataDir / &"mainnet-headers-epoch-{epochIndex:05}.e2s" + epochAccumulatorFile = + dataDir / &"mainnet-epoch-accumulator-{epochIndex:05}.ssz" + + let res = readBlockHeaders(epochHeadersFile) + if res.isErr(): + error "Could not read headers epoch file", error = res.error + quit 1 + + let blockHeaders = res.get() + + let epochAccumulatorRes = readEpochAccumulatorCached(epochAccumulatorFile) + if epochAccumulatorRes.isErr(): + error "Could not read epoch accumulator file", error = res.error + quit 1 + + let epochAccumulator = epochAccumulatorRes.get() + + let headerIndex = getHeaderRecordIndex(blockNumber, epochIndex) + let header = blockHeaders[headerIndex] + if header.isPreMerge(): + let headerWithProof = buildHeaderWithProof(header, epochAccumulator) + if headerWithProof.isErr: + error "Error building proof", error = headerWithProof.error + quit 1 + + let + content = headerWithProof.get() + contentKey = ContentKey( + contentType: blockHeader, + blockHeaderKey: BlockKey(blockHash: header.blockHash())) + encodedContentKey = history_content.encode(contentKey) + encodedContent = SSZ.encode(content) + + let portalContent = JsonPortalContent( + content_key: encodedContentKey.asSeq().to0xHex(), + content_value: encodedContent.to0xHex()) + + contentTable[blockNumber] = portalContent + else: + # TODO: Deal with writing post merge headers + error "Not a pre merge header" + quit 1 + + writePortalContentToJson(fh, contentTable) + + try: + fh.close() + except IOError as e: + fatal "Error occured while closing file", error = e.msg + quit 1