Jordan Hrycaj 221e6c9e2f
Unified database frontend integration (#1670)
* Nimbus folder environment update

details:
* Integrated `CoreDbRef` for the sources in the `nimbus` sub-folder.
* The `nimbus` program does not compile yet as it needs the updates
  in the parallel `stateless` sub-folder.

* Stateless environment update

details:
* Integrated `CoreDbRef` for the sources in the `stateless` sub-folder.
* The `nimbus` program compiles now.

* Premix environment update

details:
* Integrated `CoreDbRef` for the sources in the `premix` sub-folder.

* Fluffy environment update

details:
* Integrated `CoreDbRef` for the sources in the `fluffy` sub-folder.

* Tools environment update

details:
* Integrated `CoreDbRef` for the sources in the `tools` sub-folder.

* Nodocker environment update

details:
* Integrated `CoreDbRef` for the sources in the
  `hive_integration/nodocker` sub-folder.

* Tests environment update

details:
* Integrated `CoreDbRef` for the sources in the `tests` sub-folder.
* The unit tests compile and run cleanly now.

* Generalise `CoreDbRef` to any `select_backend` supported database

why:
  Generalisation was just missed due to overcoming some compiler oddity
  which was tied to rocksdb for testing.

* Suppress compiler warning for `newChainDB()`

why:
  Warning was added to this function which must be wrapped so that
  any `CatchableError` is re-raised as `Defect`.

* Split off persistent `CoreDbRef` constructor into separate file

why:
  This allows to compile a memory only database version without linking
  the backend library.

* Use memory `CoreDbRef` database by default

detail:
 Persistent DB constructor needs to import `db/core_db/persistent

why:
 Most tests use memory DB anyway. This avoids linking `-lrocksdb` or
 any other backend by default.

* fix `toLegacyBackend()` availability check

why:
  got garbled after memory/persistent split.

* Clarify raw access to MPT for snap sync handler

why:
  Logically, `kvt` is not the raw access for the hexary trie (although
  this holds for the legacy database)
2023-08-04 12:10:09 +01:00

70 lines
1.7 KiB
Nim

# Nimbus
# Copyright (c) 2021 Status Research & Development GmbH
# Licensed under either of
# * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE))
# * MIT license ([LICENSE-MIT](LICENSE-MIT))
# at your option.
# This file may not be copied, modified, or distributed except according to
# those terms.
import
std/[os, json, strutils, times],
stew/byteutils,
chronicles,
../../../nimbus/core/block_import,
../../../nimbus/common,
../sim_utils,
./extract_consensus_data
proc processChainData(cd: ChainData): TestStatus =
let
networkId = NetworkId(cd.params.config.chainId)
com = CommonRef.new(newCoreDbRef LegacyDbMemory,
pruneTrie = false,
networkId,
cd.params
)
com.initializeEmptyDb()
for bytes in cd.blocksRlp:
# ignore return value here
# because good blocks maybe interleaved with
# bad blocks
discard importRlpBlock(bytes, com, "consensus_sim")
let head = com.db.getCanonicalHead()
let blockHash = "0x" & head.blockHash.data.toHex
if blockHash == cd.lastBlockHash:
TestStatus.OK
else:
trace "block hash not equal",
got=blockHash,
number=head.blockNumber,
expected=cd.lastBlockHash
TestStatus.Failed
proc main() =
const basePath = "tests" / "fixtures" / "eth_tests" / "BlockchainTests"
var stat: SimStat
let start = getTime()
for fileName in walkDirRec(basePath):
if not fileName.endsWith(".json"):
continue
let n = json.parseFile(fileName)
for name, unit in n:
if "loopMul" in name:
inc stat.skipped
continue
let cd = extractChainData(unit)
let status = processChainData(cd)
stat.inc(name, status)
let elpd = getTime() - start
print(stat, elpd, "consensus")
main()