diff --git a/fluffy/database/content_db_migrate_deprecated.nim b/fluffy/database/content_db_migrate_deprecated.nim new file mode 100644 index 000000000..e4cf5d1db --- /dev/null +++ b/fluffy/database/content_db_migrate_deprecated.nim @@ -0,0 +1,269 @@ +# Fluffy +# Copyright (c) 2025 Status Research & Development GmbH +# Licensed and distributed under either of +# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). +# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). +# at your option. This file may not be copied, modified, or distributed except according to those terms. + +{.push raises: [].} + +import + chronicles, + metrics, + stint, + results, + stew/ptrops, + sqlite3_abi, + eth/db/kvstore, + eth/db/kvstore_sqlite3, + eth/common/[headers_rlp, blocks_rlp, receipts_rlp, transactions_rlp], + ../network/history/history_content, + ../network/history/history_type_conversions, + ../network/state/state_utils, + ../network/state/state_content, + ../network/history/content/content_values_deprecated + +export kvstore_sqlite3 + +type + ContentPair = tuple[contentKey: array[32, byte], contentItem: seq[byte]] + + ContentDBDeprecated* = ref object + backend: SqStoreRef + kv: KvStoreRef + selectAllStmt: SqliteStmt[NoParams, ContentPair] + deleteBatchStmt: SqliteStmt[NoParams, void] + updateBatchStmt: SqliteStmt[NoParams, void] + +func isWithoutProofImpl(content: openArray[byte]): bool = + let headerWithProof = decodeSsz(content, BlockHeaderWithProofDeprecated).valueOr: + # Leave all other content as it is + return false + + if headerWithProof.proof.proofType == + BlockHeaderProofType.historicalHashesAccumulatorProof: + false + elif headerWithProof.proof.proofType == BlockHeaderProofType.none: + true + else: + false + +func isWithoutProof*( + ctx: SqliteContext, n: cint, v: SqliteValue +) {.cdecl, gcsafe, raises: [].} = + doAssert(n == 1) + + let + ptrs = makeUncheckedArray(v) + blob1Len = sqlite3_value_bytes(ptrs[][0]) + + if isWithoutProofImpl(makeOpenArray(sqlite3_value_blob(ptrs[][0]), byte, blob1Len)): + ctx.sqlite3_result_int(cint 1) + else: + ctx.sqlite3_result_int(cint 0) + +func isWithInvalidEncodingImpl(content: openArray[byte]): bool = + let headerWithProof = decodeSsz(content, BlockHeaderWithProofDeprecated).valueOr: + # Leave all other content as it is + return false + + if headerWithProof.proof.proofType == + BlockHeaderProofType.historicalHashesAccumulatorProof: true else: false + +func isWithInvalidEncoding*( + ctx: SqliteContext, n: cint, v: SqliteValue +) {.cdecl, gcsafe, raises: [].} = + doAssert(n == 1) + + let + ptrs = makeUncheckedArray(v) + blobLen = sqlite3_value_bytes(ptrs[][0]) + + if isWithInvalidEncodingImpl( + makeOpenArray(sqlite3_value_blob(ptrs[][0]), byte, blobLen) + ): + ctx.sqlite3_result_int(cint 1) + else: + ctx.sqlite3_result_int(cint 0) + +func adjustContentImpl(a: openArray[byte]): seq[byte] = + let headerWithProof = decodeSsz(a, BlockHeaderWithProofDeprecated).valueOr: + raiseAssert("Should not occur as decoding check is already done") + + let accumulatorProof = headerWithProof.proof.historicalHashesAccumulatorProof + let adjustedContent = BlockHeaderWithProof( + header: headerWithProof.header, + proof: ByteList[MAX_HEADER_PROOF_LENGTH].init(SSZ.encode(accumulatorProof)), + ) + + SSZ.encode(adjustedContent) + +func adjustContent*( + ctx: SqliteContext, n: cint, v: SqliteValue +) {.cdecl, gcsafe, raises: [].} = + doAssert(n == 1) + + let + ptrs = makeUncheckedArray(v) + blobLen = sqlite3_value_bytes(ptrs[][0]) + + bytes = + adjustContentImpl(makeOpenArray(sqlite3_value_blob(ptrs[][0]), byte, blobLen)) + + sqlite3_result_blob(ctx, baseAddr bytes, cint bytes.len, SQLITE_TRANSIENT) + +template expectDb(x: auto): untyped = + # There's no meaningful error handling implemented for a corrupt database or + # full disk - this requires manual intervention, so we'll panic for now + x.expect("working database (disk broken/full?)") + +## Public calls to get database size, content size and similar. + +proc new*( + T: type ContentDBDeprecated, + path: string, + inMemory = false, + manualCheckpoint = false, +): ContentDBDeprecated = + let db = + if inMemory: + SqStoreRef.init("", "fluffy-test", inMemory = true).expect( + "working database (out of memory?)" + ) + else: + SqStoreRef.init(path, "fluffy", manualCheckpoint = false).expectDb() + + db.createCustomFunction("isWithoutProof", 1, isWithoutProof).expect( + "Custom function isWithoutProof creation OK" + ) + + db.createCustomFunction("isWithInvalidEncoding", 1, isWithInvalidEncoding).expect( + "Custom function isWithInvalidEncoding creation OK" + ) + + db.createCustomFunction("adjustContent", 1, adjustContent).expect( + "Custom function adjustContent creation OK" + ) + + let selectAllStmt = + db.prepareStmt("SELECT key, value FROM kvstore", NoParams, ContentPair)[] + + let deleteBatchStmt = db.prepareStmt( + "DELETE FROM kvstore WHERE key IN (SELECT key FROM kvstore WHERE isWithoutProof(value) == 1)", + NoParams, void, + )[] + + let updateBatchStmt = db.prepareStmt( + "UPDATE kvstore SET value = adjustContent(value) WHERE key IN (SELECT key FROM kvstore WHERE isWithInvalidEncoding(value) == 1)", + NoParams, void, + )[] + + let kvStore = kvStore db.openKvStore().expectDb() + + let contentDb = ContentDBDeprecated( + kv: kvStore, + backend: db, + selectAllStmt: selectAllStmt, + deleteBatchStmt: deleteBatchStmt, + updateBatchStmt: updateBatchStmt, + ) + + contentDb + +template disposeSafe(s: untyped): untyped = + if distinctBase(s) != nil: + s.dispose() + s = typeof(s)(nil) + +proc close*(db: ContentDBDeprecated) = + db.selectAllStmt.disposeSafe() + db.deleteBatchStmt.disposeSafe() + db.updateBatchStmt.disposeSafe() + discard db.kv.close() + +proc deleteAllHeadersWithoutProof*(db: ContentDBDeprecated) = + notice "ContentDB migration: deleting all headers without proof" + db.deleteBatchStmt.exec().expectDb() + notice "ContentDB migration done" + +proc updateAllHeadersWithInvalidEncoding*(db: ContentDBDeprecated) = + notice "ContentDB migration: updating all headers with invalid encoding" + db.updateBatchStmt.exec().expectDb() + notice "ContentDB migration done" + +proc iterateAllAndCountTypes*(db: ContentDBDeprecated) = + ## Ugly debugging call to print out count of content types in case of issues. + var + contentPair: ContentPair + contentTotal = 0 + contentOldHeaders = 0 + contentNewHeaders = 0 + contentBodies = 0 + contentReceipts = 0 + contentAccount = 0 + contentContract = 0 + contentCode = 0 + contentOther = 0 + + notice "ContentDB type count: iterating over all content" + for e in db.selectAllStmt.exec(contentPair): + contentTotal.inc() + block: + let res = decodeSsz(contentPair.contentItem, BlockHeaderWithProofDeprecated) + if res.isOk(): + if decodeRlp(res.value().header.asSeq(), Header).isOk(): + contentOldHeaders.inc() + continue + block: + let res = decodeSsz(contentPair.contentItem, BlockHeaderWithProof) + if res.isOk(): + if decodeRlp(res.value().header.asSeq(), Header).isOk(): + contentNewHeaders.inc() + continue + + block: + let res = decodeSsz(contentPair.contentItem, PortalReceipts) + if res.isOk(): + if fromPortalReceipts(seq[Receipt], res.value()).isOk(): + contentReceipts.inc() + continue + + block: + let res = decodeSsz(contentPair.contentItem, PortalBlockBodyShanghai) + if res.isOk(): + if fromPortalBlockBody(BlockBody, res.value()).isOk(): + contentBodies.inc() + continue + + block: + let res = decodeSsz(contentPair.contentItem, PortalBlockBodyLegacy) + if res.isOk(): + if fromPortalBlockBody(BlockBody, res.value()).isOk(): + contentBodies.inc() + continue + + block: + let res = decodeSsz(contentPair.contentItem, AccountTrieNodeRetrieval) + if res.isOk(): + if rlpDecodeAccountTrieNode(res.value().node).isOk(): + contentAccount.inc() + continue + + block: + let res = decodeSsz(contentPair.contentItem, ContractTrieNodeRetrieval) + if res.isOk(): + if rlpDecodeContractTrieNode(res.value().node).isOk(): + contentContract.inc() + continue + + block: + let res = decodeSsz(contentPair.contentItem, ContractCodeRetrieval) + if res.isOk(): + contentCode.inc() + continue + + contentOther.inc() + + notice "ContentDB type count done: ", + contentTotal, contentOldHeaders, contentNewHeaders, contentReceipts, contentBodies, + contentAccount, contentContract, contentCode, contentOther diff --git a/fluffy/network/history/content/content_values.nim b/fluffy/network/history/content/content_values.nim index 311ad8e07..a2f93d294 100644 --- a/fluffy/network/history/content/content_values.nim +++ b/fluffy/network/history/content/content_values.nim @@ -26,25 +26,15 @@ const MAX_WITHDRAWALS_COUNT = MAX_WITHDRAWALS_PER_PAYLOAD MAX_EPHEMERAL_HEADER_PAYLOAD = 256 + MAX_HEADER_PROOF_LENGTH* = 1024 type ## BlockHeader types HistoricalHashesAccumulatorProof* = array[15, Digest] - BlockHeaderProofType* = enum - none = 0x00 # An SSZ Union None - historicalHashesAccumulatorProof = 0x01 - - BlockHeaderProof* = object - case proofType*: BlockHeaderProofType - of none: - discard - of historicalHashesAccumulatorProof: - historicalHashesAccumulatorProof*: HistoricalHashesAccumulatorProof - BlockHeaderWithProof* = object header*: ByteList[MAX_HEADER_LENGTH] # RLP data - proof*: BlockHeaderProof + proof*: ByteList[MAX_HEADER_PROOF_LENGTH] ## Ephemeral BlockHeader list EphemeralBlockHeaderList* = @@ -73,11 +63,3 @@ type ## Receipts types ReceiptByteList* = ByteList[MAX_RECEIPT_LENGTH] # RLP data PortalReceipts* = List[ReceiptByteList, MAX_TRANSACTION_COUNT] - -func init*(T: type BlockHeaderProof, proof: HistoricalHashesAccumulatorProof): T = - BlockHeaderProof( - proofType: historicalHashesAccumulatorProof, historicalHashesAccumulatorProof: proof - ) - -func init*(T: type BlockHeaderProof): T = - BlockHeaderProof(proofType: none) diff --git a/fluffy/network/history/content/content_values_deprecated.nim b/fluffy/network/history/content/content_values_deprecated.nim new file mode 100644 index 000000000..96155135f --- /dev/null +++ b/fluffy/network/history/content/content_values_deprecated.nim @@ -0,0 +1,33 @@ +# fluffy +# Copyright (c) 2025 Status Research & Development GmbH +# Licensed and distributed under either of +# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). +# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). +# at your option. This file may not be copied, modified, or distributed except according to those terms. + +{.push raises: [].} + +import std/math, nimcrypto/hash, ssz_serialization + +export ssz_serialization, hash + +const MAX_HEADER_LENGTH = 2 ^ 11 # = 2048 + +type + ## BlockHeader types + HistoricalHashesAccumulatorProof* = array[15, Digest] + + BlockHeaderProofType* = enum + none = 0x00 # An SSZ Union None + historicalHashesAccumulatorProof = 0x01 + + BlockHeaderProof* = object + case proofType*: BlockHeaderProofType + of none: + discard + of historicalHashesAccumulatorProof: + historicalHashesAccumulatorProof*: HistoricalHashesAccumulatorProof + + BlockHeaderWithProofDeprecated* = object + header*: ByteList[MAX_HEADER_LENGTH] # RLP data + proof*: BlockHeaderProof diff --git a/fluffy/network/history/history_validation.nim b/fluffy/network/history/history_validation.nim index aa7d76b36..85cd6806c 100644 --- a/fluffy/network/history/history_validation.nim +++ b/fluffy/network/history/history_validation.nim @@ -50,20 +50,23 @@ func validateHeaderBytes*( ok(header) func verifyBlockHeaderProof*( - a: FinishedHistoricalHashesAccumulator, header: Header, proof: BlockHeaderProof + a: FinishedHistoricalHashesAccumulator, + header: Header, + proof: ByteList[MAX_HEADER_PROOF_LENGTH], ): Result[void, string] = - case proof.proofType - of BlockHeaderProofType.historicalHashesAccumulatorProof: - a.verifyAccumulatorProof(header, proof.historicalHashesAccumulatorProof) - of BlockHeaderProofType.none: - if header.isPreMerge(): - err("Pre merge header requires HistoricalHashesAccumulatorProof") - else: - # TODO: - # Add verification post merge based on historical_roots & historical_summaries - # Lets for now no longer accept other headers without a proof and the most - # recent ones are now a different type. - err("Post merge header proofs not yet activated") + let timestamp = Moment.init(header.timestamp.int64, Second) + + if isShanghai(chainConfig, timestamp): + # TODO: Add verification post merge based on historical_summaries + err("Shanghai block verification not implemented") + elif isPoSBlock(chainConfig, header.number): + # TODO: Add verification post merge based on historical_roots + err("PoS block verification not implemented") + else: + let accumulatorProof = decodeSsz(proof.asSeq(), HistoricalHashesAccumulatorProof).valueOr: + return err("Failed decoding accumulator proof: " & error) + + a.verifyAccumulatorProof(header, accumulatorProof) func validateCanonicalHeaderBytes*( bytes: openArray[byte], id: uint64 | Hash32, a: FinishedHistoricalHashesAccumulator diff --git a/fluffy/network/history/validation/historical_hashes_accumulator.nim b/fluffy/network/history/validation/historical_hashes_accumulator.nim index a6979aece..0b9c07086 100644 --- a/fluffy/network/history/validation/historical_hashes_accumulator.nim +++ b/fluffy/network/history/validation/historical_hashes_accumulator.nim @@ -1,5 +1,5 @@ # Nimbus -# Copyright (c) 2022-2024 Status Research & Development GmbH +# Copyright (c) 2022-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). # * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). @@ -198,7 +198,7 @@ func buildHeaderWithProof*( ok( BlockHeaderWithProof( - header: ByteList[2048].init(rlp.encode(header)), - proof: BlockHeaderProof.init(proof), + header: ByteList[MAX_HEADER_LENGTH].init(rlp.encode(header)), + proof: ByteList[MAX_HEADER_PROOF_LENGTH].init(SSZ.encode(proof)), ) ) diff --git a/fluffy/tests/rpc_tests/test_portal_rpc_client.nim b/fluffy/tests/rpc_tests/test_portal_rpc_client.nim index bb8b9a6b9..466d16f09 100644 --- a/fluffy/tests/rpc_tests/test_portal_rpc_client.nim +++ b/fluffy/tests/rpc_tests/test_portal_rpc_client.nim @@ -1,5 +1,5 @@ # Nimbus - Portal Network -# Copyright (c) 2021-2024 Status Research & Development GmbH +# Copyright (c) 2021-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). # * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). @@ -63,7 +63,8 @@ proc store*(hn: HistoryNode, blockHash: Hash32, blockHeader: Header) = let headerRlp = rlp.encode(blockHeader) blockHeaderWithProof = BlockHeaderWithProof( - header: ByteList[2048].init(headerRlp), proof: BlockHeaderProof.init() + header: ByteList[MAX_HEADER_LENGTH].init(headerRlp), + proof: ByteList[MAX_HEADER_PROOF_LENGTH].init(@[]), ) contentKeyBytes = blockHeaderContentKey(blockHash).encode() contentId = history_content.toContentId(contentKeyBytes) diff --git a/fluffy/tests/state_network_tests/state_test_helpers.nim b/fluffy/tests/state_network_tests/state_test_helpers.nim index 86e4b2869..45fa2db05 100644 --- a/fluffy/tests/state_network_tests/state_test_helpers.nim +++ b/fluffy/tests/state_network_tests/state_test_helpers.nim @@ -154,7 +154,8 @@ proc mockStateRootLookup*( blockHeader = Header(stateRoot: stateRoot) headerRlp = rlp.encode(blockHeader) blockHeaderWithProof = BlockHeaderWithProof( - header: ByteList[2048].init(headerRlp), proof: BlockHeaderProof.init() + header: ByteList[MAX_HEADER_LENGTH].init(headerRlp), + proof: ByteList[MAX_HEADER_PROOF_LENGTH].init(@[]), ) contentKeyBytes = blockHeaderContentKey(blockNumOrHash).encode() contentId = history_content.toContentId(contentKeyBytes) diff --git a/fluffy/tools/fcli_db.nim b/fluffy/tools/fcli_db.nim index 93c99f6a8..eed872027 100644 --- a/fluffy/tools/fcli_db.nim +++ b/fluffy/tools/fcli_db.nim @@ -1,12 +1,18 @@ # Fluffy -# Copyright (c) 2023-2024 Status Research & Development GmbH +# Copyright (c) 2023-2025 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). # * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). # at your option. This file may not be copied, modified, or distributed except according to those terms. import - chronicles, confutils, stint, eth/common/keys, ../database/content_db, ./benchmark + chronicles, + confutils, + stint, + eth/common/keys, + ../database/content_db, + ../database/content_db_migrate_deprecated, + ./benchmark when defined(posix): import system/ansi_c @@ -29,6 +35,7 @@ type generate = "Generate random content into the database, for testing purposes." prune = "Prune the ContentDb in case of resizing or selecting a different local id" validate = "Validate all the content in the ContentDb" + migrate = "Migrate the ContentDb for new HeaderWithProof format" DbConf = object databaseDir* {. @@ -60,6 +67,22 @@ type .}: bool of DbCmd.validate: discard + of DbCmd.migrate: + deleteNoProof* {. + desc: "Delete old HeaderWithProof content without proof", + defaultValue: true, + name: "delete-no-proof" + .}: bool + updateWithProof* {. + desc: "Update old HeaderWithProof per-merge content with proof", + defaultValue: true, + name: "update-with-proof" + .}: bool + checkTypes* {. + desc: "Iterate and count all content types", + defaultValue: false, + name: "debug-check-types" + .}: bool const maxDbSize = 4_000_000_000'u64 @@ -153,6 +176,21 @@ proc cmdPrune(conf: DbConf) = notice "Functionality not yet implemented" quit QuitSuccess +proc cmdMigrate(conf: DbConf) = + let db = ContentDBDeprecated.new(conf.databaseDir.string) + + if conf.checkTypes: + db.iterateAllAndCountTypes() + + if conf.deleteNoProof: + db.deleteAllHeadersWithoutProof() + + if conf.updateWithProof: + db.updateAllHeadersWithInvalidEncoding() + + if conf.checkTypes and (conf.deleteNoProof or conf.updateWithProof): + db.iterateAllAndCountTypes() + proc controlCHook() {.noconv.} = notice "Shutting down after having received SIGINT." quit QuitSuccess @@ -177,3 +215,5 @@ when isMainModule: cmdPrune(conf) of DbCmd.validate: notice "Functionality not yet implemented" + of DbCmd.migrate: + cmdMigrate(conf) diff --git a/vendor/portal-spec-tests b/vendor/portal-spec-tests index 9d48845c2..6f2a2a8a1 160000 --- a/vendor/portal-spec-tests +++ b/vendor/portal-spec-tests @@ -1 +1 @@ -Subproject commit 9d48845c2f01ccd95e9d4d6ffe4eb2b231b7cc40 +Subproject commit 6f2a2a8a14225c2acabfd2d2707c5c75edbb6b46