Re-adjust canonical head to parent of block to be inserted (#726)

* Re-adjust canonical head to parent of block to be inserted

why:
  of the failing tests that remain to be solved, 30 of those will succeed
  if the canonical database chain head is cleverly adjusted -- yes, it
  looks like a hack, indeed.

details:
  at the moment, this hack works for the non-hive tests only and is
  triggered by a boolean argument passed on to the chain.persistBlocks()
  method.

* Use parent instead of canonical head for block to be inserted

why:
  side chains need to be inserted typically somewhere before the
  canonical head.

details:
  the previous _hack_ was unnecessary and removed, it was inspired by
  some verification in persistBlocks() which explicitly referenced the
  canonical head (which now might or might not refer to the newly inserted
  header.)

* remove unnecessary code + comment
This commit is contained in:
Jordan Hrycaj 2021-06-22 17:52:31 +01:00 committed by GitHub
parent 2269d16c4c
commit 2d6bf34175
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 33 additions and 16 deletions

View File

@ -8,8 +8,8 @@
# those terms.
import
std/[os, parseopt, json],
eth/[common, p2p, trie/db], stew/byteutils,
std/[os, parseopt, strformat, json],
eth/[common, trie/db], stew/byteutils,
../../../nimbus/db/db_chain,
../../../nimbus/[genesis, config, conf_utils],
../sim_utils
@ -42,7 +42,13 @@ proc main() =
else:
paramStr(1)
if not caseFolder.dirExists:
# Handy early error message and stop directive
let progname = getAppFilename().extractFilename
quit(&"*** {progname}: Not a case folder: {caseFolder}")
runTest("Consensus", caseFolder):
# Variable `fileName` is injected by `runTest()`
let node = parseFile(fileName)
processNode(fileName, node["chainfile"].getStr,
node["lastblockhash"].getStr, testStatusIMPL)

View File

@ -19,7 +19,7 @@ type
EthHeader = object
header: BlockHeader
proc importRlpBlock*(importFile: string, chainDB: BasechainDB): bool =
proc importRlpBlock*(importFile: string; chainDB: BasechainDB): bool =
let res = io2.readAllBytes(importFile)
if res.isErr:
error "failed to import", fileName = importFile

View File

@ -1,7 +1,17 @@
import ../db/db_chain, eth/common, chronicles, ../vm_state,
stint, nimcrypto,
../utils, eth/trie/db, ./executor, ../chain_config, ../genesis, ../utils,
stew/endians2, ./validate, ./validate/epoch_hash_cache
import
../chain_config,
../db/db_chain,
../genesis,
../utils,
../vm_state,
./executor,
./validate,
./validate/epoch_hash_cache,
chronicles,
eth/[common, trie/db],
nimcrypto,
stew/endians2,
stint
when not defined(release):
import ../tracer
@ -130,7 +140,8 @@ method getAncestorHeader*(c: Chain, h: BlockHeader, output: var BlockHeader, ski
method getBlockBody*(c: Chain, blockHash: KeccakHash): BlockBodyRef =
result = nil
method persistBlocks*(c: Chain, headers: openarray[BlockHeader], bodies: openarray[BlockBody]): ValidationResult {.gcsafe.} =
method persistBlocks*(c: Chain; headers: openarray[BlockHeader];
bodies: openarray[BlockBody]): ValidationResult {.gcsafe.} =
# Run the VM here
if headers.len != bodies.len:
debug "Number of headers not matching number of bodies"
@ -140,11 +151,15 @@ method persistBlocks*(c: Chain, headers: openarray[BlockHeader], bodies: openarr
let transaction = c.db.db.beginTransaction()
defer: transaction.dispose()
trace "Persisting blocks", fromBlock = headers[0].blockNumber, toBlock = headers[^1].blockNumber
trace "Persisting blocks",
fromBlock = headers[0].blockNumber,
toBlock = headers[^1].blockNumber
for i in 0 ..< headers.len:
let head = c.db.getCanonicalHead()
let vmState = newBaseVMState(head.stateRoot, headers[i], c.db)
let validationResult = processBlock(c.db, headers[i], bodies[i], vmState)
let
head = c.db.getBlockHeader(headers[i].parentHash)
vmState = newBaseVMState(head.stateRoot, headers[i], c.db)
validationResult = processBlock(c.db, headers[i], bodies[i], vmState)
when not defined(release):
if validationResult == ValidationResult.Error and
@ -167,10 +182,6 @@ method persistBlocks*(c: Chain, headers: openarray[BlockHeader], bodies: openarr
return ValidationResult.Error
discard c.db.persistHeaderToDb(headers[i])
if c.db.getCanonicalHead().blockHash != headers[i].blockHash:
debug "Stored block header hash doesn't match declared hash"
return ValidationResult.Error
discard c.db.persistTransactions(headers[i].blockNumber, bodies[i].transactions)
discard c.db.persistReceipts(vmState.receipts)