2023-12-12 19:12:56 +00:00
|
|
|
# Nimbus
|
2024-02-02 20:23:04 +00:00
|
|
|
# Copyright (c) 2023-2024 Status Research & Development GmbH
|
2023-10-03 11:56:13 +00:00
|
|
|
# Licensed under either of
|
|
|
|
# * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0)
|
|
|
|
# * MIT license ([LICENSE-MIT](LICENSE-MIT) or
|
|
|
|
# http://opensource.org/licenses/MIT)
|
|
|
|
# at your option. This file may not be copied, modified, or
|
|
|
|
# distributed except according to those terms.
|
|
|
|
|
|
|
|
## Testing `CoreDB` wrapper implementation
|
|
|
|
|
|
|
|
import
|
2024-03-20 07:35:38 +00:00
|
|
|
std/[os, strformat, strutils],
|
2023-10-03 11:56:13 +00:00
|
|
|
chronicles,
|
|
|
|
eth/common,
|
|
|
|
results,
|
|
|
|
unittest2,
|
2024-02-15 02:57:05 +00:00
|
|
|
../nimbus/db/core_db/persistent,
|
|
|
|
../nimbus/db/ledger,
|
|
|
|
../nimbus/core/chain,
|
2023-10-03 11:56:13 +00:00
|
|
|
./replay/pp,
|
2023-12-12 17:47:41 +00:00
|
|
|
./test_coredb/[coredb_test_xx, test_chainsync, test_helpers]
|
2023-10-03 11:56:13 +00:00
|
|
|
|
|
|
|
const
|
2024-02-12 19:37:00 +00:00
|
|
|
# If `true`, this compile time option set up `unittest2` for manual parsing
|
|
|
|
unittest2DisableParamFiltering {.booldefine.} = false
|
|
|
|
|
2023-10-03 11:56:13 +00:00
|
|
|
baseDir = [".", "..", ".."/"..", $DirSep]
|
|
|
|
repoDir = [".", "tests", "nimbus-eth1-blobs"]
|
2024-05-20 13:59:18 +00:00
|
|
|
subDir = ["replay", "test_coredb", "custom-network", "main-era1"]
|
2023-10-03 11:56:13 +00:00
|
|
|
|
|
|
|
# Reference file for finding some database directory base
|
|
|
|
sampleDirRefFile = "coredb_test_xx.nim"
|
|
|
|
|
2024-05-20 10:17:51 +00:00
|
|
|
dbTypeDefault = AristoDbMemory
|
|
|
|
ldgTypeDefault = LedgerCache
|
2024-02-12 19:37:00 +00:00
|
|
|
|
2024-02-09 13:30:07 +00:00
|
|
|
let
|
2023-10-03 11:56:13 +00:00
|
|
|
# Standard test sample
|
2024-05-22 13:41:14 +00:00
|
|
|
memorySampleDefault = mainTest0m
|
|
|
|
persistentSampleDefault = mainTest2r
|
2023-10-03 11:56:13 +00:00
|
|
|
|
|
|
|
# ------------------------------------------------------------------------------
|
|
|
|
# Helpers
|
|
|
|
# ------------------------------------------------------------------------------
|
|
|
|
|
2024-02-12 19:37:00 +00:00
|
|
|
when unittest2DisableParamFiltering:
|
2024-02-16 09:08:07 +00:00
|
|
|
import algorithm
|
2024-03-20 07:35:38 +00:00
|
|
|
|
2024-02-12 19:37:00 +00:00
|
|
|
# Filter out local options and pass on the rest to `unittest2`
|
2024-02-16 09:08:07 +00:00
|
|
|
proc cmdLineConfig(): tuple[samples: seq[CaptureSpecs]] {.used.} =
|
2024-02-14 19:11:59 +00:00
|
|
|
## This helper allows to pass additional command line options to the
|
|
|
|
## unit test.
|
|
|
|
##
|
|
|
|
## Example:
|
|
|
|
## ::
|
|
|
|
## nim c -r ...\
|
|
|
|
## -d:unittest2DisableParamFiltering \
|
|
|
|
## ./tests/test_coredb.nim \
|
|
|
|
## --output-level=VERBOSE \
|
|
|
|
## --sample=goerli-lp,goerli-ar
|
|
|
|
## or
|
|
|
|
## ::
|
|
|
|
## nim c ... -d:unittest2DisableParamFiltering ./tests/test_coredb.nim
|
|
|
|
## ./tests/test_coredb.out --output-level=VERBOSE --sample=goerli-ar
|
|
|
|
## ...
|
|
|
|
##
|
|
|
|
## At the moment, only the `--sample=` additional option is provided.
|
|
|
|
##
|
2024-02-12 19:37:00 +00:00
|
|
|
# Define sample list from the command line (if any)
|
|
|
|
const optPfx = "--sample=" # Custom option with sample list
|
|
|
|
|
|
|
|
proc parseError(s = "") =
|
|
|
|
let msg = if 0 < s.len: "Unsupported \"" & optPfx & "\" list item: " & s
|
|
|
|
else: "Empty \"" & optPfx & " list"
|
|
|
|
echo "*** ", getAppFilename().splitFile.name, ": ", msg
|
|
|
|
echo " Available: ", allSamples.mapIt(it.name).sorted.join(" ")
|
|
|
|
quit(99)
|
|
|
|
|
|
|
|
var other: seq[string] # Options for manual parsing by `unittest2`
|
|
|
|
|
|
|
|
for arg in commandLineParams():
|
|
|
|
if optPfx.len <= arg.len and arg[0 ..< optPfx.len] == optPfx:
|
|
|
|
for w in arg[optPfx.len ..< arg.len].split(",").mapIt(it.strip):
|
|
|
|
block findSample:
|
|
|
|
for sample in allSamples:
|
|
|
|
if w.cmpIgnoreCase(sample.name) == 0:
|
|
|
|
result.samples.add sample
|
|
|
|
break findSample
|
|
|
|
w.parseError()
|
|
|
|
if result.samples.len == 0:
|
|
|
|
parseError()
|
|
|
|
else:
|
|
|
|
other.add arg
|
|
|
|
|
|
|
|
# Setup `unittest2`
|
|
|
|
other.parseParameters
|
|
|
|
|
|
|
|
else:
|
|
|
|
# Kill the compilation process iff the directive `cmdLineConfig()` is used
|
2024-02-16 09:08:07 +00:00
|
|
|
template cmdLineConfig(): untyped {.used.} =
|
2024-02-12 19:37:00 +00:00
|
|
|
{.error: "cmdLineConfig() needs compiler option "&
|
|
|
|
" -d:unittest2DisableParamFiltering".}
|
|
|
|
|
|
|
|
|
2023-10-03 11:56:13 +00:00
|
|
|
proc findFilePath(
|
2024-02-02 20:23:04 +00:00
|
|
|
file: string;
|
|
|
|
baseDir: openArray[string] = baseDir;
|
|
|
|
repoDir: openArray[string] = repoDir;
|
|
|
|
subDir: openArray[string] = subDir;
|
|
|
|
): Result[string,void] =
|
2024-02-12 19:37:00 +00:00
|
|
|
file.findFilePathHelper(baseDir, repoDir, subDir)
|
|
|
|
|
2023-10-03 11:56:13 +00:00
|
|
|
|
|
|
|
proc getTmpDir(sampleDir = sampleDirRefFile): string =
|
|
|
|
sampleDir.findFilePath.value.splitFile.dir
|
|
|
|
|
2024-02-12 19:37:00 +00:00
|
|
|
|
2023-10-03 11:56:13 +00:00
|
|
|
proc flushDbDir(s: string) =
|
|
|
|
if s != "":
|
|
|
|
let dataDir = s / "nimbus"
|
|
|
|
if (dataDir / "data").dirExists:
|
|
|
|
# Typically under Windows: there might be stale file locks.
|
|
|
|
try: dataDir.removeDir except CatchableError: discard
|
|
|
|
block dontClearUnlessEmpty:
|
|
|
|
for w in s.walkDir:
|
|
|
|
break dontClearUnlessEmpty
|
|
|
|
try: s.removeDir except CatchableError: discard
|
|
|
|
|
|
|
|
# ----------------
|
|
|
|
|
|
|
|
proc setTraceLevel {.used.} =
|
|
|
|
discard
|
|
|
|
when defined(chronicles_runtime_filtering) and loggingEnabled:
|
|
|
|
setLogLevel(LogLevel.TRACE)
|
|
|
|
|
2023-12-12 19:12:56 +00:00
|
|
|
proc setDebugLevel {.used.} =
|
|
|
|
discard
|
|
|
|
when defined(chronicles_runtime_filtering) and loggingEnabled:
|
|
|
|
setLogLevel(LogLevel.DEBUG)
|
|
|
|
|
2023-10-03 11:56:13 +00:00
|
|
|
proc setErrorLevel {.used.} =
|
|
|
|
discard
|
|
|
|
when defined(chronicles_runtime_filtering) and loggingEnabled:
|
|
|
|
setLogLevel(LogLevel.ERROR)
|
|
|
|
|
|
|
|
# ------------------------------------------------------------------------------
|
|
|
|
# Private functions
|
|
|
|
# ------------------------------------------------------------------------------
|
|
|
|
|
2023-12-12 19:12:56 +00:00
|
|
|
proc initRunnerDB(
|
2023-10-03 11:56:13 +00:00
|
|
|
path: string;
|
2024-02-09 13:30:07 +00:00
|
|
|
specs: CaptureSpecs;
|
2023-12-12 19:12:56 +00:00
|
|
|
dbType: CoreDbType;
|
|
|
|
ldgType: LedgerType;
|
2023-10-03 11:56:13 +00:00
|
|
|
): CommonRef =
|
2023-12-12 19:12:56 +00:00
|
|
|
let coreDB =
|
|
|
|
# Resolve for static `dbType`
|
|
|
|
case dbType:
|
|
|
|
of AristoDbMemory: AristoDbMemory.newCoreDbRef()
|
|
|
|
of AristoDbRocks: AristoDbRocks.newCoreDbRef path
|
|
|
|
of AristoDbVoid: AristoDbVoid.newCoreDbRef()
|
|
|
|
else: raiseAssert "Oops"
|
|
|
|
|
|
|
|
when false: # or true:
|
|
|
|
setDebugLevel()
|
|
|
|
coreDB.trackLegaApi = true
|
|
|
|
coreDB.trackNewApi = true
|
|
|
|
|
2024-02-09 13:30:07 +00:00
|
|
|
var
|
|
|
|
params: NetworkParams
|
|
|
|
networkId: NetworkId
|
|
|
|
if specs.builtIn:
|
|
|
|
networkId = specs.network
|
|
|
|
params = networkId.networkParams()
|
|
|
|
else:
|
|
|
|
doAssert specs.genesis.findFilePath.value.loadNetworkParams(params)
|
|
|
|
networkId = params.config.chainId.NetworkId
|
|
|
|
|
2023-10-03 11:56:13 +00:00
|
|
|
result = CommonRef.new(
|
|
|
|
db = coreDB,
|
2024-02-09 13:30:07 +00:00
|
|
|
networkId = networkId,
|
|
|
|
params = params,
|
2023-12-12 19:12:56 +00:00
|
|
|
ldgType = ldgType)
|
2024-02-02 20:23:04 +00:00
|
|
|
|
2023-10-03 11:56:13 +00:00
|
|
|
result.initializeEmptyDb
|
|
|
|
|
2024-02-02 20:23:04 +00:00
|
|
|
setErrorLevel()
|
|
|
|
coreDB.trackLegaApi = false
|
|
|
|
coreDB.trackNewApi = false
|
2024-05-22 13:41:14 +00:00
|
|
|
coreDB.trackLedgerApi =false
|
2024-02-02 20:23:04 +00:00
|
|
|
|
2023-10-03 11:56:13 +00:00
|
|
|
# ------------------------------------------------------------------------------
|
|
|
|
# Test Runners: accounts and accounts storages
|
|
|
|
# ------------------------------------------------------------------------------
|
|
|
|
|
2023-11-08 12:18:32 +00:00
|
|
|
proc chainSyncRunner(
|
2023-10-03 11:56:13 +00:00
|
|
|
noisy = true;
|
2024-05-22 13:41:14 +00:00
|
|
|
capture = memorySampleDefault;
|
2024-02-12 19:37:00 +00:00
|
|
|
dbType = CoreDbType(0);
|
|
|
|
ldgType = ldgTypeDefault;
|
2024-02-14 19:11:59 +00:00
|
|
|
profilingOk = false;
|
|
|
|
finalDiskCleanUpOk = true;
|
|
|
|
enaLoggingOk = false;
|
|
|
|
lastOneExtraOk = true;
|
2023-10-03 11:56:13 +00:00
|
|
|
) =
|
2024-02-09 13:30:07 +00:00
|
|
|
|
2023-12-12 19:12:56 +00:00
|
|
|
## Test backend database and ledger
|
2023-10-03 11:56:13 +00:00
|
|
|
let
|
2024-02-22 08:24:58 +00:00
|
|
|
fileInfo = capture.files[0]
|
|
|
|
.splitFile.name.split(".")[0]
|
|
|
|
.strip(leading=false, chars={'0'..'9'})
|
2024-02-09 13:30:07 +00:00
|
|
|
filePaths = capture.files.mapIt(it.findFilePath(baseDir,repoDir).value)
|
2024-05-22 13:41:14 +00:00
|
|
|
baseDir = getTmpDir() / capture.dbName & "-chain-sync"
|
2023-12-12 19:12:56 +00:00
|
|
|
dbDir = baseDir / "tmp"
|
2023-10-03 11:56:13 +00:00
|
|
|
numBlocks = capture.numBlocks
|
2023-12-12 17:47:41 +00:00
|
|
|
numBlocksInfo = if numBlocks == high(int): "all" else: $numBlocks
|
2024-02-12 19:37:00 +00:00
|
|
|
|
|
|
|
dbType = block:
|
|
|
|
# Decreasing priority: dbType, capture.dbType, dbTypeDefault
|
|
|
|
var effDbType = dbTypeDefault
|
|
|
|
if dbType != CoreDbType(0):
|
|
|
|
effDbType = dbType
|
|
|
|
elif capture.dbType != CoreDbType(0):
|
|
|
|
effDbType = capture.dbType
|
|
|
|
effDbType
|
|
|
|
|
2023-12-12 19:12:56 +00:00
|
|
|
persistent = dbType in CoreDbPersistentTypes
|
2023-10-03 11:56:13 +00:00
|
|
|
|
|
|
|
defer:
|
|
|
|
if persistent: baseDir.flushDbDir
|
|
|
|
|
2023-12-12 19:12:56 +00:00
|
|
|
suite &"CoreDB and LedgerRef API on {fileInfo}, {dbType}, {ldgType}":
|
2023-10-03 11:56:13 +00:00
|
|
|
|
2023-12-12 19:12:56 +00:00
|
|
|
test &"Ledger API {ldgType}, {numBlocksInfo} blocks":
|
2023-10-03 11:56:13 +00:00
|
|
|
let
|
2024-02-09 13:30:07 +00:00
|
|
|
com = initRunnerDB(dbDir, capture, dbType, ldgType)
|
2023-10-03 11:56:13 +00:00
|
|
|
defer:
|
2024-02-14 19:11:59 +00:00
|
|
|
com.db.finish(flush = finalDiskCleanUpOk)
|
2024-02-22 08:24:58 +00:00
|
|
|
if profilingOk: noisy.test_chainSyncProfilingPrint numBlocks
|
2024-02-14 19:11:59 +00:00
|
|
|
if persistent and finalDiskCleanUpOk: dbDir.flushDbDir
|
2023-10-03 11:56:13 +00:00
|
|
|
|
2023-12-12 17:47:41 +00:00
|
|
|
if noisy:
|
|
|
|
com.db.trackNewApi = true
|
|
|
|
com.db.trackNewApi = true
|
|
|
|
com.db.trackLedgerApi = true
|
|
|
|
|
2024-02-22 08:24:58 +00:00
|
|
|
check noisy.test_chainSync(filePaths, com, numBlocks,
|
2024-02-14 19:11:59 +00:00
|
|
|
lastOneExtra=lastOneExtraOk, enaLogging=enaLoggingOk)
|
2023-10-03 11:56:13 +00:00
|
|
|
|
2024-05-22 13:41:14 +00:00
|
|
|
|
|
|
|
proc persistentSyncPreLoadAndResumeRunner(
|
|
|
|
noisy = true;
|
|
|
|
capture = persistentSampleDefault;
|
|
|
|
dbType = CoreDbType(0);
|
|
|
|
ldgType = ldgTypeDefault;
|
|
|
|
profilingOk = false;
|
|
|
|
finalDiskCleanUpOk = true;
|
|
|
|
enaLoggingOk = false;
|
|
|
|
lastOneExtraOk = true;
|
|
|
|
) =
|
|
|
|
## Test backend database and ledger
|
|
|
|
let
|
|
|
|
fileInfo = capture.files[0]
|
|
|
|
.splitFile.name.split(".")[0]
|
|
|
|
.strip(leading=false, chars={'0'..'9'})
|
|
|
|
filePaths = capture.files.mapIt(it.findFilePath(baseDir,repoDir).value)
|
|
|
|
baseDir = getTmpDir() / capture.dbName & "-chain-sync"
|
|
|
|
dbDir = baseDir / "tmp"
|
|
|
|
|
|
|
|
dbType = block:
|
|
|
|
# Decreasing priority: dbType, capture.dbType, dbTypeDefault
|
|
|
|
var effDbType = dbTypeDefault
|
|
|
|
if dbType != CoreDbType(0):
|
|
|
|
effDbType = dbType
|
|
|
|
elif capture.dbType != CoreDbType(0):
|
|
|
|
effDbType = capture.dbType
|
|
|
|
effDbType
|
|
|
|
|
|
|
|
doAssert dbType in CoreDbPersistentTypes
|
|
|
|
defer: baseDir.flushDbDir
|
|
|
|
|
|
|
|
let
|
|
|
|
firstPart = min(capture.numBlocks div 2, 200_000)
|
|
|
|
secndPart = capture.numBlocks
|
|
|
|
secndPartInfo = if secndPart == high(int): "all" else: $secndPart
|
|
|
|
|
|
|
|
suite &"CoreDB pre-load and resume test ..{firstPart}..{secndPartInfo}":
|
|
|
|
|
|
|
|
test "Populate db by initial sample parts":
|
|
|
|
let
|
|
|
|
com = initRunnerDB(dbDir, capture, dbType, ldgType)
|
|
|
|
defer:
|
|
|
|
com.db.finish(flush = finalDiskCleanUpOk)
|
|
|
|
if profilingOk: noisy.test_chainSyncProfilingPrint firstPart
|
|
|
|
|
|
|
|
if noisy:
|
|
|
|
com.db.trackNewApi = true
|
|
|
|
com.db.trackNewApi = true
|
|
|
|
com.db.trackLedgerApi = true
|
|
|
|
|
|
|
|
check noisy.test_chainSync(filePaths, com, firstPart,
|
|
|
|
lastOneExtra=lastOneExtraOk, enaLogging=enaLoggingOk)
|
|
|
|
|
|
|
|
test &"Continue with rest of sample":
|
|
|
|
let
|
|
|
|
com = initRunnerDB(dbDir, capture, dbType, ldgType)
|
|
|
|
defer:
|
|
|
|
com.db.finish(flush = finalDiskCleanUpOk)
|
|
|
|
if profilingOk: noisy.test_chainSyncProfilingPrint secndPart
|
|
|
|
if finalDiskCleanUpOk: dbDir.flushDbDir
|
|
|
|
|
|
|
|
if noisy:
|
|
|
|
com.db.trackNewApi = true
|
|
|
|
com.db.trackNewApi = true
|
|
|
|
com.db.trackLedgerApi = true
|
|
|
|
|
|
|
|
check noisy.test_chainSync(filePaths, com, secndPart,
|
|
|
|
lastOneExtra=lastOneExtraOk, enaLogging=enaLoggingOk)
|
|
|
|
|
2023-10-03 11:56:13 +00:00
|
|
|
# ------------------------------------------------------------------------------
|
|
|
|
# Main function(s)
|
|
|
|
# ------------------------------------------------------------------------------
|
|
|
|
|
|
|
|
proc coreDbMain*(noisy = defined(debug)) =
|
2024-05-20 10:17:51 +00:00
|
|
|
noisy.chainSyncRunner()
|
2024-05-22 13:41:14 +00:00
|
|
|
noisy.persistentSyncPreLoadAndResumeRunner()
|
2023-10-03 11:56:13 +00:00
|
|
|
|
|
|
|
when isMainModule:
|
2024-03-21 10:45:57 +00:00
|
|
|
import
|
|
|
|
std/times
|
2023-10-03 11:56:13 +00:00
|
|
|
const
|
|
|
|
noisy = defined(debug) or true
|
2024-02-12 19:37:00 +00:00
|
|
|
var
|
|
|
|
sampleList: seq[CaptureSpecs]
|
2023-10-03 11:56:13 +00:00
|
|
|
|
|
|
|
setErrorLevel()
|
|
|
|
|
2024-05-22 13:41:14 +00:00
|
|
|
when true:
|
|
|
|
false.coreDbMain()
|
|
|
|
|
2023-10-12 20:10:04 +00:00
|
|
|
# This one uses the readily available dump: `bulkTest0` and some huge replay
|
|
|
|
# dumps `bulkTest2`, `bulkTest3`, .. from the `nimbus-eth1-blobs` package.
|
|
|
|
# For specs see `tests/test_coredb/bulk_test_xx.nim`.
|
2024-02-12 19:37:00 +00:00
|
|
|
|
|
|
|
sampleList = cmdLineConfig().samples
|
|
|
|
if sampleList.len == 0:
|
2024-05-22 13:41:14 +00:00
|
|
|
sampleList = @[memorySampleDefault]
|
|
|
|
|
|
|
|
when true: # and false:
|
|
|
|
var state: (Duration, int)
|
|
|
|
for n,capture in sampleList:
|
|
|
|
noisy.profileSection("@sample #" & $n, state):
|
|
|
|
noisy.chainSyncRunner(
|
|
|
|
capture = capture,
|
|
|
|
#profilingOk = true,
|
|
|
|
#finalDiskCleanUpOk = false,
|
|
|
|
)
|
|
|
|
|
|
|
|
noisy.say "***", "total: ", state[0].pp, " sections: ", state[1]
|
2023-10-03 11:56:13 +00:00
|
|
|
|
|
|
|
# ------------------------------------------------------------------------------
|
|
|
|
# End
|
|
|
|
# ------------------------------------------------------------------------------
|