Adjust to the newly specified SSZ types for Block Body and Receipts (#1147)

This commit is contained in:
Kim De Mey 2022-07-01 21:51:51 +02:00 committed by GitHub
parent 134fe26997
commit 33015ea4a3
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 460 additions and 325 deletions

View File

@ -10,13 +10,15 @@
{.push raises: [Defect].}
import
std/options,
std/[options, math],
nimcrypto/[sha2, hash], stew/byteutils, stint,
ssz_serialization,
../../common/common_types
export ssz_serialization, common_types, hash
## Types and calls for history network content keys
type
ContentType* = enum
blockHeader = 0x00
@ -103,3 +105,27 @@ func `$`*(x: ContentKey): string =
res.add(")")
res
## Types for history network content
const
MAX_TRANSACTION_LENGTH* = 2^24 # ~= 16 million
MAX_TRANSACTION_COUNT* = 2^14 # ~= 16k
MAX_RECEIPT_LENGTH* = 2^27 # ~= 134 million
MAX_HEADER_LENGTH = 2^13 # = 8192
MAX_ENCODED_UNCLES_LENGTH* = MAX_HEADER_LENGTH * 2^4 # = 2**17 ~= 131k
type
## Types for content
# TODO: Using `init` on these lists appears to fail because of the constants
# that are used? Strange.
TransactionByteList* = List[byte, MAX_TRANSACTION_LENGTH] # RLP data
Transactions* = List[TransactionByteList, MAX_TRANSACTION_COUNT]
Uncles* = List[byte, MAX_ENCODED_UNCLES_LENGTH] # RLP data
BlockBodySSZ* = object
transactions*: Transactions
uncles*: Uncles
ReceiptByteList* = List[byte, MAX_RECEIPT_LENGTH] # RLP data
ReceiptsSSZ* = List[ReceiptByteList, MAX_TRANSACTION_COUNT]

View File

@ -9,13 +9,13 @@
import
std/options,
stew/results, chronos, chronicles,
eth/[common/eth_types, rlp],
stew/results, chronos, chronicles, nimcrypto/[keccak, hash],
eth/[common/eth_types, rlp, trie, trie/db],
eth/p2p/discoveryv5/[protocol, enr],
../../content_db,
../../../nimbus/[utils, constants],
../../../nimbus/constants,
../wire/[portal_protocol, portal_stream, portal_protocol_config],
./history_content
"."/[history_content, accumulator]
logScope:
topics = "portal_hist"
@ -23,7 +23,6 @@ logScope:
const
historyProtocolId* = [byte 0x50, 0x0B]
# TODO: Extract common parts from the different networks
type
HistoryNetwork* = ref object
portalProtocol*: PortalProtocol
@ -34,7 +33,7 @@ type
func setStreamTransport*(n: HistoryNetwork, transport: UtpDiscv5Protocol) =
setTransport(n.portalProtocol.stream, transport)
proc toContentIdHandler(contentKey: ByteList): Option[ContentId] =
func toContentIdHandler(contentKey: ByteList): Option[ContentId] =
some(toContentId(contentKey))
func encodeKey(k: ContentKey): (ByteList, ContentId) =
@ -61,115 +60,230 @@ func getEncodedKeyForContent(
return encodeKey(contentKey)
proc getContentFromBytes(bytes: openArray[byte], T: type): Result[T, string] =
var rlp = rlpFromBytes(bytes)
func decodeRlp*(bytes: openArray[byte], T: type): Result[T, string] =
try:
let content = rlp.read(T)
ok[T](content)
ok(rlp.decode(bytes, T))
except RlpError as e:
err(e.msg)
proc validateHeaderBytes*(
bytes: openArray[byte], hash: BlockHash): Option[BlockHeader] =
## Calls to go from SSZ decoded types to RLP fully decoded types
let headerResult = getContentFromBytes(bytes, BlockHeader)
func fromPortalBlockBody(
T: type BlockBody, body: BlockBodySSZ): Result[T, string] =
## Get the full decoded BlockBody from the SSZ-decoded `PortalBlockBody`.
try:
var transactions: seq[Transaction]
for tx in body.transactions:
transactions.add(rlp.decode(tx.asSeq(), Transaction))
if headerResult.isErr():
error "Failed to decode header ", msg = headerResult.error()
return none(BlockHeader)
let uncles = rlp.decode(body.uncles.asSeq(), seq[BlockHeader])
let header = headerResult.unsafeGet()
ok(BlockBody(transactions: transactions, uncles: uncles))
except RlpError as e:
err("RLP decoding failed: " & e.msg)
func fromReceipts(
T: type seq[Receipt], receipts: ReceiptsSSZ): Result[T, string] =
## Get the full decoded seq[Receipt] from the SSZ-decoded `Receipts`.
try:
var res: seq[Receipt]
for receipt in receipts:
res.add(rlp.decode(receipt.asSeq(), Receipt))
ok(res)
except RlpError as e:
err("RLP decoding failed: " & e.msg)
## Calls to encode Block types to the SSZ types.
func fromBlockBody(T: type BlockBodySSZ, body: BlockBody): T =
var transactions: Transactions
for tx in body.transactions:
discard transactions.add(TransactionByteList(rlp.encode(tx)))
let uncles = Uncles(rlp.encode(body.uncles))
BlockBodySSZ(transactions: transactions, uncles: uncles)
func fromReceipts(T: type ReceiptsSSZ, receipts: seq[Receipt]): T =
var receiptsSSZ: ReceiptsSSZ
for receipt in receipts:
discard receiptsSSZ.add(ReceiptByteList(rlp.encode(receipt)))
receiptsSSZ
func encode*(blockBody: BlockBody): seq[byte] =
let portalBlockBody = BlockBodySSZ.fromBlockBody(blockBody)
SSZ.encode(portalBlockBody)
func encode*(receipts: seq[Receipt]): seq[byte] =
let portalReceipts = ReceiptsSSZ.fromReceipts(receipts)
SSZ.encode(portalReceipts)
## Calls and helper calls to do validation of block header, body and receipts
# TODO: Failures on validation and perhaps deserialisation should be punished
# for if/when peer scoring/banning is added.
proc calcRootHash(items: Transactions | ReceiptsSSZ): Hash256 =
var tr = initHexaryTrie(newMemoryDB())
for i, t in items:
try:
tr.put(rlp.encode(i), t.asSeq())
except RlpError as e:
# TODO: Investigate this RlpError as it doesn't sound like this is
# something that can actually occur.
raiseAssert(e.msg)
return tr.rootHash
template calcTxsRoot*(transactions: Transactions): Hash256 =
calcRootHash(transactions)
template calcReceiptsRoot*(receipts: ReceiptsSSZ): Hash256 =
calcRootHash(receipts)
func validateBlockHeaderBytes*(
bytes: openArray[byte], hash: BlockHash): Result[BlockHeader, string] =
let header = ? decodeRlp(bytes, BlockHeader)
if not (header.blockHash() == hash):
# TODO: Header with different hash than expected, maybe we should punish
# peer which sent us this ?
return none(BlockHeader)
err("Block header hash does not match")
else:
ok(header)
return some(header)
proc validateBlockBody(
body: BlockBodySSZ, txsRoot, ommersHash: KeccakHash):
Result[void, string] =
## Validate the block body against the txRoot amd ommersHash from the header.
let calculatedOmmersHash = keccak256.digest(body.uncles.asSeq())
if calculatedOmmersHash != ommersHash:
return err("Invalid ommers hash")
proc validateExpectedBody(
bb: BlockBody,
txRoot: KeccakHash,
ommersHash: KeccakHash): Result[void, string] =
try:
let calculatedTxRoot = calcTxRoot(bb.transactions)
let calculatedOmmersHash = rlpHash(bb.uncles)
let calculatedTxsRoot = calcTxsRoot(body.transactions)
if calculatedTxsRoot != txsRoot:
return err("Invalid transactions root")
if calculatedTxRoot != txRoot:
return err("Unexpected transaction root")
elif calculatedOmmersHash != ommersHash:
return err("Unexpected ommers hash")
else:
return ok()
except RlpError as e:
return err(e.msg)
ok()
proc validateBodyBytes*(
proc validateBlockBodyBytes*(
bytes: openArray[byte], txRoot, ommersHash: KeccakHash):
Result[BlockBody, string] =
## Fully decode the SSZ Block Body and validate it against the header.
let body =
try:
SSZ.decode(bytes, BlockBodySSZ)
except SszError as e:
return err("Failed to decode block body" & e.msg)
? validateBlockBody(body, txRoot, ommersHash)
BlockBody.fromPortalBlockBody(body)
proc validateReceipts(
receipts: ReceiptsSSZ, receiptsRoot: KeccakHash): Result[void, string] =
let calculatedReceiptsRoot = calcReceiptsRoot(receipts)
if calculatedReceiptsRoot != receiptsRoot:
return err("Unexpected receipt root")
else:
return ok()
proc validateReceiptsBytes*(
bytes: openArray[byte],
txRoot: KeccakHash,
ommersHash: KeccakHash):Option[BlockBody] =
receiptsRoot: KeccakHash): Result[seq[Receipt], string] =
## Fully decode the SSZ Block Body and validate it against the header.
let receipts =
try:
SSZ.decode(bytes, ReceiptsSSZ)
except SszError as e:
return err("Failed to decode receipts" & e.msg)
let bodyResult = getContentFromBytes(bytes, BlockBody)
? validateReceipts(receipts, receiptsRoot)
if bodyResult.isErr():
error "Failed to decode block body", msg = bodyResult.error()
return none(BlockBody)
seq[Receipt].fromReceipts(receipts)
let blockBody = bodyResult.unsafeGet()
## ContentDB getters for specific history network types
let expectedResult = validateExpectedBody(blockBody, txRoot, ommersHash)
proc getSszDecoded(
db: ContentDB, contentId: ContentID,
T: type auto): Option[T] =
let res = db.get(contentId)
if res.isSome():
try:
some(SSZ.decode(res.get(), T))
except SszError as e:
raiseAssert("Stored data should always be serialized correctly: " & e.msg)
else:
none(T)
if expectedResult.isErr():
error "Failed to validate if block body matches header",
msg = expectedResult.error()
proc get(db: ContentDB, T: type BlockHeader, contentId: ContentID): Option[T] =
let contentFromDB = db.get(contentId)
if contentFromDB.isSome():
let res = decodeRlp(contentFromDB.get(), T)
if res.isErr():
raiseAssert(res.error)
else:
some(res.get())
else:
none(T)
# we got block body (bundle of transactions and uncles) which do not match
# header. For now just ignore it, but maybe we should penalize peer
# sending us such data?
return none(BlockBody)
proc get(db: ContentDB, T: type BlockBody, contentId: ContentID): Option[T] =
let contentFromDB = db.getSszDecoded(contentId, BlockBodySSZ)
if contentFromDB.isSome():
let res = T.fromPortalBlockBody(contentFromDB.get())
if res.isErr():
raiseAssert(res.error)
else:
some(res.get())
else:
none(T)
return some(blockBody)
proc get(db: ContentDB, T: type seq[Receipt], contentId: ContentID): Option[T] =
let contentFromDB = db.getSszDecoded(contentId, ReceiptsSSZ)
if contentFromDB.isSome():
let res = T.fromReceipts(contentFromDB.get())
if res.isErr():
raiseAssert(res.error)
else:
some(res.get())
else:
none(T)
proc getContentFromDb(
h: HistoryNetwork, T: type, contentId: ContentId): Option[T] =
if h.portalProtocol.inRange(contentId):
let contentFromDB = h.contentDB.get(contentId)
if contentFromDB.isSome():
var rlp = rlpFromBytes(contentFromDB.unsafeGet())
try:
let content = rlp.read(T)
return some(content)
except CatchableError as e:
# Content in db should always have valid formatting, so this should not
# happen
raiseAssert(e.msg)
else:
return none(T)
h.contentDB.get(T, contentId)
else:
return none(T)
none(T)
## Public API to get the history network specific types, either from database
## or through a lookup on the Portal Network
proc getBlockHeader*(
h: HistoryNetwork, chainId: uint16, hash: BlockHash):
Future[Option[BlockHeader]] {.async.} =
let (keyEncoded, contentId) = getEncodedKeyForContent(blockHeader, chainId, hash)
let (keyEncoded, contentId) =
getEncodedKeyForContent(blockHeader, chainId, hash)
let maybeHeaderFromDb = h.getContentFromDb(BlockHeader, contentId)
if maybeHeaderFromDb.isSome():
let headerFromDb = h.getContentFromDb(BlockHeader, contentId)
if headerFromDb.isSome():
info "Fetched block header from database", hash
return maybeHeaderFromDb
return headerFromDb
let maybeHeaderContent = await h.portalProtocol.contentLookup(keyEncoded, contentId)
if maybeHeaderContent.isNone():
let headerContentLookup =
await h.portalProtocol.contentLookup(keyEncoded, contentId)
if headerContentLookup.isNone():
warn "Failed fetching block header from the network", hash
return none(BlockHeader)
let headerContent = maybeHeaderContent.unsafeGet()
let headerContent = headerContentLookup.unsafeGet()
let maybeHeader = validateHeaderBytes(headerContent.content, hash)
if maybeHeader.isSome():
let res = validateBlockHeaderBytes(headerContent.content, hash)
# TODO: If the validation fails, a new request could be done.
if res.isOk():
info "Fetched block header from the network", hash
# Content is valid we can propagate it to interested peers
h.portalProtocol.triggerPoke(
@ -180,38 +294,39 @@ proc getBlockHeader*(
h.portalProtocol.storeContent(contentId, headerContent.content)
return maybeHeader
return some(res.get())
else:
return none(BlockHeader)
proc getBlockBody*(
h: HistoryNetwork,
chainId: uint16,
hash: BlockHash,
header: BlockHeader):Future[Option[BlockBody]] {.async.} =
let
(keyEncoded, contentId) = getEncodedKeyForContent(blockBody, chainId, hash)
bodyFromDb = h.getContentFromDb(BlockBody, contentId)
let (keyEncoded, contentId) = getEncodedKeyForContent(blockBody, chainId, hash)
let maybeBodyFromDb = h.getContentFromDb(BlockBody, contentId)
if maybeBodyFromDb.isSome():
if bodyFromDb.isSome():
info "Fetched block body from database", hash
return some[BlockBody](maybeBodyFromDb.unsafeGet())
return some(bodyFromDb.unsafeGet())
let maybeBodyContent = await h.portalProtocol.contentLookup(keyEncoded, contentId)
if maybeBodyContent.isNone():
let bodyContentLookup =
await h.portalProtocol.contentLookup(keyEncoded, contentId)
if bodyContentLookup.isNone():
warn "Failed fetching block body from the network", hash
return none(BlockBody)
let bodyContent = maybeBodyContent.unsafeGet()
let bodyContent = bodyContentLookup.unsafeGet()
let maybeBody = validateBodyBytes(bodyContent.content, header.txRoot, header.ommersHash)
if maybeBody.isNone():
let res = validateBlockBodyBytes(
bodyContent.content, header.txRoot, header.ommersHash)
if res.isErr():
return none(BlockBody)
info "Fetched block body from the network", hash
let blockBody = maybeBody.unsafeGet()
let blockBody = res.get()
# body is valid, propagate it to interested peers
h.portalProtocol.triggerPoke(
@ -227,96 +342,54 @@ proc getBlockBody*(
proc getBlock*(
h: HistoryNetwork, chainId: uint16, hash: BlockHash):
Future[Option[Block]] {.async.} =
let maybeHeader = await h.getBlockHeader(chainId, hash)
if maybeHeader.isNone():
# we do not have header for given hash,so we would not be able to validate
# that received body really belong to it
let headerOpt = await h.getBlockHeader(chainId, hash)
if headerOpt.isNone():
# Cannot validate block without header.
return none(Block)
let header = maybeHeader.unsafeGet()
let header = headerOpt.unsafeGet()
let maybeBody = await h.getBlockBody(chainId, hash, header)
let bodyOpt = await h.getBlockBody(chainId, hash, header)
if maybeBody.isNone():
if bodyOpt.isNone():
return none(Block)
let body = maybeBody.unsafeGet()
let body = bodyOpt.unsafeGet()
return some[Block]((header, body))
proc validateExpectedReceipts(
receipts: seq[Receipt],
receiptRoot: KeccakHash): Result[void, string] =
try:
let calculatedReceiptRoot = calcReceiptRoot(receipts)
if calculatedReceiptRoot != receiptRoot:
return err("Unexpected receipt root")
else:
return ok()
except RlpError as e:
return err(e.msg)
proc validateReceiptsBytes*(
bytes: openArray[byte],
receiptRoot: KeccakHash): Option[seq[Receipt]] =
let receiptResult = getContentFromBytes(bytes, seq[Receipt])
if receiptResult.isErr():
error "Failed to decode receipts", msg = receiptResult.error()
return none(seq[Receipt])
let receipts = receiptResult.unsafeGet()
let expectedReceiptsResult = validateExpectedReceipts(receipts, receiptRoot)
if expectedReceiptsResult.isErr():
error "Failed to validate if receipts matches header",
msg = expectedReceiptsResult.error()
# we got receipts which do not match
# header. For now just ignore it, but maybe we should penalize peer
# sending us such data?
return none(seq[Receipt])
return some(receipts)
proc getReceipts*(
h: HistoryNetwork,
chainId: uint16,
hash: BlockHash,
header: BlockHeader): Future[Option[seq[Receipt]]] {.async.} =
# header does not have any receipts, return early and do not save empty bytes
# into the database
if header.receiptRoot == BLANK_ROOT_HASH:
# The header has no receipts, return early with empty receipts
return some(newSeq[Receipt]())
let (keyEncoded, contentId) = getEncodedKeyForContent(receipts, chainId, hash)
let maybeReceiptsFromDb = h.getContentFromDb(seq[Receipt], contentId)
let receiptsFromDb = h.getContentFromDb(seq[Receipt], contentId)
if maybeReceiptsFromDb.isSome():
if receiptsFromDb.isSome():
info "Fetched receipts from database", hash
return some(maybeReceiptsFromDb.unsafeGet())
return some(receiptsFromDb.unsafeGet())
let maybeReceiptsContent = await h.portalProtocol.contentLookup(keyEncoded, contentId)
if maybeReceiptsContent.isNone():
let receiptsContentLookup =
await h.portalProtocol.contentLookup(keyEncoded, contentId)
if receiptsContentLookup.isNone():
warn "Failed fetching receipts from the network", hash
return none[seq[Receipt]]()
let receiptsContent = maybeReceiptsContent.unsafeGet()
let receiptsContent = receiptsContentLookup.unsafeGet()
let maybeReceipts = validateReceiptsBytes(receiptsContent.content, header.receiptRoot)
if maybeReceipts.isNone():
let res = validateReceiptsBytes(receiptsContent.content, header.receiptRoot)
if res.isErr():
return none[seq[Receipt]]()
info "Fetched receipts from the network", hash
let receipts = maybeReceipts.unsafeGet()
let receipts = res.get()
# receips are valid, propagate it to interested peers
h.portalProtocol.triggerPoke(
@ -329,6 +402,22 @@ proc getReceipts*(
return some(receipts)
func validateEpochAccumulator(bytes: openArray[byte]): bool =
# For now just validate by checking if de-serialization works
try:
discard SSZ.decode(bytes, EpochAccumulator)
true
except SszError:
false
func validateMasterAccumulator(bytes: openArray[byte]): bool =
# For now just validate by checking if de-serialization works
try:
discard SSZ.decode(bytes, Accumulator)
true
except SszError:
false
proc validateContent(content: openArray[byte], contentKey: ByteList): bool =
let keyOpt = contentKey.decode()
@ -339,7 +428,7 @@ proc validateContent(content: openArray[byte], contentKey: ByteList): bool =
case key.contentType:
of blockHeader:
validateHeaderBytes(content, key.blockHeaderKey.blockHash).isSome()
validateBlockHeaderBytes(content, key.blockHeaderKey.blockHash).isOk()
of blockBody:
true
# TODO: Need to get the header from the db or the network for this. Or how
@ -347,9 +436,9 @@ proc validateContent(content: openArray[byte], contentKey: ByteList): bool =
of receipts:
true
of epochAccumulator:
true
validateEpochAccumulator(content)
of masterAccumulator:
true
validateMasterAccumulator(content)
proc new*(
T: type HistoryNetwork,

View File

@ -27,11 +27,13 @@ export results, tables
type
BlockData* = object
rlp: string
header*: string
body*: string
receipts*: string
# TODO:
# uint64, but then it expects a string for some reason.
# Fix in nim-json-serialization or should I overload something here?
number: int
number*: int
BlockDataTable* = Table[string, BlockData]
@ -64,62 +66,47 @@ func readBlockData(
Result[seq[(ContentKey, seq[byte])], string] =
var res: seq[(ContentKey, seq[byte])]
var rlp =
try:
rlpFromHex(blockData.rlp)
except ValueError as e:
return err("Invalid hex for rlp block data, number " &
$blockData.number & ": " & e.msg)
var blockHash: BlockHash
try:
blockHash.data = hexToByteArray[sizeof(BlockHash)](hash)
except ValueError as e:
return err("Invalid hex for blockhash, number " &
$blockData.number & ": " & e.msg)
# Data is formatted as it gets stored and send over the
# network. I.e. [header, [txs, uncles], receipts]
if rlp.enterList():
var blockHash: BlockHash
try:
blockHash.data = hexToByteArray[sizeof(BlockHash)](hash)
except ValueError as e:
return err("Invalid hex for blockhash, number " &
$blockData.number & ": " & e.msg)
let contentKeyType =
BlockKey(chainId: 1'u16, blockHash: blockHash)
let contentKeyType =
BlockKey(chainId: 1'u16, blockHash: blockHash)
try:
# If wanted the hash for the corresponding header can be verified
if verify:
if keccak256.digest(blockData.header.hexToSeqByte()) != blockHash:
return err("Data is not matching hash, number " & $blockData.number)
try:
# If wanted the hash for the corresponding header can be verified
if verify:
if keccak256.digest(rlp.rawData()) != blockHash:
return err("Data is not matching hash, number " & $blockData.number)
block:
let contentKey = ContentKey(
contentType: blockHeader,
blockHeaderKey: contentKeyType)
block:
let contentKey = ContentKey(
contentType: blockHeader,
blockHeaderKey: contentKeyType)
res.add((contentKey, blockData.header.hexToSeqByte()))
res.add((contentKey, @(rlp.rawData())))
rlp.skipElem()
block:
let contentKey = ContentKey(
contentType: blockBody,
blockBodyKey: contentKeyType)
block:
let contentKey = ContentKey(
contentType: blockBody,
blockBodyKey: contentKeyType)
res.add((contentKey, blockData.body.hexToSeqByte()))
res.add((contentKey, @(rlp.rawData())))
rlp.skipElem()
block:
let contentKey = ContentKey(
contentType: receipts,
receiptsKey: contentKeyType)
block:
let contentKey = ContentKey(
contentType: receipts,
receiptsKey: contentKeyType)
res.add((contentKey, blockData.receipts.hexToSeqByte()))
res.add((contentKey, @(rlp.rawData())))
rlp.skipElem()
except ValueError as e:
return err("Invalid hex data, number " & $blockData.number & ": " & e.msg)
except RlpError as e:
return err("Invalid rlp data, number " & $blockData.number & ": " & e.msg)
ok(res)
else:
err("Item is not a valid rlp list, number " & $blockData.number)
ok(res)
iterator blocks*(
blockData: BlockDataTable, verify = false): seq[(ContentKey, seq[byte])] =
@ -134,18 +121,15 @@ iterator blocks*(
func readBlockHeader*(blockData: BlockData): Result[BlockHeader, string] =
var rlp =
try:
rlpFromHex(blockData.rlp)
rlpFromHex(blockData.header)
except ValueError as e:
return err("Invalid hex for rlp block data, number " &
$blockData.number & ": " & e.msg)
if rlp.enterList():
try:
return ok(rlp.read(BlockHeader))
except RlpError as e:
return err("Invalid header, number " & $blockData.number & ": " & e.msg)
else:
return err("Item is not a valid rlp list, number " & $blockData.number)
try:
return ok(rlp.read(BlockHeader))
except RlpError as e:
return err("Invalid header, number " & $blockData.number & ": " & e.msg)
proc getGenesisHeader*(id: NetworkId = MainNet): BlockHeader =
let params =

File diff suppressed because one or more lines are too long

View File

@ -1,10 +1,14 @@
{
"0x88e96d4537bea4d9c05d12549907b32561d3bf31f45aae734cdc119f13406cb6": {
"rlp": "0xf90218f90211a0d4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d493479405a56e2d52c817161883f50c441c3228cfe54d9fa0d67e4d450343046425ae4271474353857ab860dbc0a1dde64b41b5cd3a532bf3a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421b90100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008503ff80000001821388808455ba422499476574682f76312e302e302f6c696e75782f676f312e342e32a0969b900de27b6ac6a67742365dd65f55a0526c41fd18e1b16f1a1215c2e66f5988539bd4979fef1ec4c2c0c0c0",
"header": "0xf90211a0d4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d493479405a56e2d52c817161883f50c441c3228cfe54d9fa0d67e4d450343046425ae4271474353857ab860dbc0a1dde64b41b5cd3a532bf3a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421b90100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008503ff80000001821388808455ba422499476574682f76312e302e302f6c696e75782f676f312e342e32a0969b900de27b6ac6a67742365dd65f55a0526c41fd18e1b16f1a1215c2e66f5988539bd4979fef1ec4",
"body": "0x0800000008000000c0",
"receipts": "0x",
"number": 1
},
"0xb495a1d7e6663152ae92708da4843337b958146015a2802f4193a410044698c9": {
"rlp": "0xf9021ff90218a088e96d4537bea4d9c05d12549907b32561d3bf31f45aae734cdc119f13406cb6a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d4934794dd2f1e6e498202e86d8f5442af596580a4f03c2ca04943d941637411107494da9ec8bc04359d731bfd08b72b4d0edcbd4cd2ecb341a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421b90100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008503ff00100002821388808455ba4241a0476574682f76312e302e302d30636463373634372f6c696e75782f676f312e34a02f0790c5aa31ab94195e1f6443d645af5b75c46c04fbf9911711198a0ce8fdda88b853fa261a86aa9ec2c0c0c0",
"header": "0xf90218a088e96d4537bea4d9c05d12549907b32561d3bf31f45aae734cdc119f13406cb6a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d4934794dd2f1e6e498202e86d8f5442af596580a4f03c2ca04943d941637411107494da9ec8bc04359d731bfd08b72b4d0edcbd4cd2ecb341a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421b90100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008503ff00100002821388808455ba4241a0476574682f76312e302e302d30636463373634372f6c696e75782f676f312e34a02f0790c5aa31ab94195e1f6443d645af5b75c46c04fbf9911711198a0ce8fdda88b853fa261a86aa9e",
"body": "0x0800000008000000c0",
"receipts": "0x",
"number": 2
}
}

File diff suppressed because one or more lines are too long

View File

@ -8,14 +8,17 @@
# Tool to download chain history data from local node, and save it to the json
# file or sqlite database.
# In case of json:
# Data of each block is rlp encoded list of:
# [blockHeader, [block_transactions, block_uncles], block_receipts]
# Block data is stored as it gets transmitted over the wire and as defined here:
# https://github.com/ethereum/portal-network-specs/blob/master/history-network.md#content-keys-and-values
#
# Json file has following format:
# {
# "hexEncodedBlockHash: {
# "rlp": "hex of rlp encoded list [blockHeader, [block_transactions, block_uncles], block_receipts]",
# "number": "block number"
# },
# "header": "the rlp encoded block header as a hex string"
# "body": "the SSZ encoded container of transactions and uncles as a hex string"
# "receipts: "The SSZ encoded list of the receipts as a hex string"
# "number": "block number"
# },
# ...,
# ...,
# }
@ -25,6 +28,11 @@
# Such format enables queries to quickly find content in range of some node
# which makes it possible to offer content to nodes in bulk.
#
# When using geth as client to download receipts from, be aware that you will
# have to set the number of blocks to maintain the transaction index for to
# unlimited if you want access to all transactions/receipts.
# e.g: `./build/bin/geth --ws --txlookuplimit=0`
#
{.push raises: [Defect].}
@ -41,6 +49,9 @@ import
../../premix/downloader,
../network/history/history_content
# Need to be selective due to the `Block` type conflict from downloader
from ../network/history/history_network import encode
proc defaultDataDir*(): string =
let dataDir = when defined(windows):
"AppData" / "Roaming" / "EthData"
@ -89,7 +100,9 @@ type
name: "storage-mode" .}: StorageMode
DataRecord = object
rlp: string
header: string
body: string
receipts: string
number: uint64
proc parseCmdArg*(T: type StorageMode, p: TaintedString): T
@ -105,11 +118,15 @@ proc parseCmdArg*(T: type StorageMode, p: TaintedString): T
proc completeCmdArg*(T: type StorageMode, val: TaintedString): seq[string] =
return @[]
proc writeBlock(writer: var JsonWriter, blck: Block) {.raises: [IOError, Defect].} =
proc writeBlock(writer: var JsonWriter, blck: Block)
{.raises: [IOError, Defect].} =
let
enc = rlp.encodeList(blck.header, blck.body, blck.receipts)
asHex = to0xHex(enc)
dataRecord = DataRecord(rlp: asHex, number: cast[uint64](blck.header.blockNumber))
dataRecord = DataRecord(
header: rlp.encode(blck.header).to0xHex(),
body: encode(blck.body).to0xHex(),
receipts: encode(blck.receipts).to0xHex(),
number: blck.header.blockNumber.truncate(uint64))
headerHash = to0xHex(rlpHash(blck.header).data)
writer.writeField(headerHash, dataRecord)
@ -157,7 +174,7 @@ proc writeToJson(config: ExporterConf, client: RpcClient) =
let fh = createAndOpenFile(config)
try:
var writer = JsonWriter[DefaultFlavor].init(fh.s)
var writer = JsonWriter[DefaultFlavor].init(fh.s, pretty = true)
writer.beginRecord()
for i in config.initialBlock..config.endBlock:
let blck = downloadBlock(i, client)
@ -185,18 +202,23 @@ proc writeToDb(config: ExporterConf, client: RpcClient) =
blck = downloadBlock(i, client)
blockHash = blck.header.blockHash()
contentKeyType = BlockKey(chainId: 1, blockHash: blockHash)
headerKey = encode(ContentKey(contentType: blockHeader, blockHeaderKey: contentKeyType))
bodyKey = encode(ContentKey(contentType: blockBody, blockBodyKey: contentKeyType))
receiptsKey = encode(ContentKey(contentType: receipts, receiptsKey: contentKeyType))
headerKey = encode(ContentKey(
contentType: blockHeader, blockHeaderKey: contentKeyType))
bodyKey = encode(ContentKey(
contentType: blockBody, blockBodyKey: contentKeyType))
receiptsKey = encode(
ContentKey(contentType: receipts, receiptsKey: contentKeyType))
db.put(headerKey.toContentId(), headerKey.asSeq(), rlp.encode[BlockHeader](blck.header))
db.put(headerKey.toContentId(), headerKey.asSeq(), rlp.encode(blck.header))
# No need to seed empty stuff into database
# No need to seed empty lists into database
if len(blck.body.transactions) > 0 or len(blck.body.uncles) > 0:
db.put(bodyKey.toContentId(), bodyKey.asSeq(), rlp.encode[BlockBody](blck.body))
let body = encode(blck.body)
db.put(bodyKey.toContentId(), bodyKey.asSeq(), body)
if len(blck.receipts) > 0:
db.put(receiptsKey.toContentId(), receiptsKey.asSeq(), rlp.encode[seq[Receipt]](blck.receipts))
let receipts = encode(blck.receipts)
db.put(receiptsKey.toContentId(), receiptsKey.asSeq(), receipts)
info "Data successfuly written to db"

View File

@ -62,12 +62,12 @@ task test_rocksdb, "Run rocksdb tests":
test "tests/db", "test_kvstore_rocksdb", "-d:chronicles_log_level=ERROR -d:unittest2DisableParamFiltering"
task fluffy, "Build fluffy":
buildBinary "fluffy", "fluffy/", "-d:chronicles_log_level=TRACE -d:chronosStrictException"
buildBinary "fluffy", "fluffy/", "-d:chronicles_log_level=TRACE -d:chronosStrictException -d:PREFER_BLST_SHA256=false"
task fluffy_tools, "Build fluffy tools":
buildBinary "portalcli", "fluffy/tools/", "-d:chronicles_log_level=TRACE -d:chronosStrictException"
buildBinary "blockwalk", "fluffy/tools/", "-d:chronicles_log_level=TRACE -d:chronosStrictException"
buildBinary "eth_data_exporter", "fluffy/tools/", "-d:chronicles_log_level=TRACE -d:chronosStrictException"
buildBinary "eth_data_exporter", "fluffy/tools/", "-d:chronicles_log_level=TRACE -d:chronosStrictException -d:PREFER_BLST_SHA256=false"
task utp_test_app, "Build uTP test app":
buildBinary "utp_test_app", "fluffy/tools/utp_testing/", "-d:chronicles_log_level=TRACE -d:chronosStrictException"