Add desc validator to fc unit tests (#2899)

* Kludge: fix `eip4844` import in `validate`

why:
  Importing `validate` needs `blscurve` here or with the importing module.

* Separate out `FC` descriptor iinto separate file

why:
  Needed for external descriptor access (e.g. for debugging)

* Debugging toolkit for `FC`

* Verify chain descriptor after changing state
This commit is contained in:
Jordan Hrycaj 2024-12-02 17:49:53 +00:00 committed by GitHub
parent 3bf0920a16
commit 9da3f29dff
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
5 changed files with 315 additions and 40 deletions

View File

@ -18,45 +18,15 @@ import
../../evm/types,
../../evm/state,
../validate,
../executor/process_block
../executor/process_block,
./forked_chain/chain_desc
export
BlockDesc,
ForkedChainRef,
common,
core_db
type
CursorDesc = object
forkJunction: BlockNumber
hash: Hash32
BlockDesc* = object
blk*: Block
receipts*: seq[Receipt]
BaseDesc = object
hash: Hash32
header: Header
CanonicalDesc = object
## Designate some `header` entry on a `CursorDesc` sub-chain named
## `cursorDesc` identified by `cursorHash == cursorDesc.hash`.
cursorHash: Hash32
header: Header
ForkedChainRef* = ref object
stagingTx: CoreDbTxRef
db: CoreDbRef
com: CommonRef
blocks: Table[Hash32, BlockDesc]
txRecords: Table[Hash32, (Hash32, uint64)]
baseHash: Hash32
baseHeader: Header
cursorHash: Hash32
cursorHeader: Header
cursorHeads: seq[CursorDesc]
extraValidation: bool
baseDistance: uint64
const
BaseDistance = 128
@ -487,8 +457,7 @@ proc init*(
baseHash: baseHash,
cursorHeader: baseHeader,
extraValidation: extraValidation,
baseDistance: baseDistance,
txRecords: initTable[Hash32, (Hash32, uint64)]())
baseDistance: baseDistance)
proc newForkedChain*(com: CommonRef,
baseHeader: Header,
@ -506,9 +475,7 @@ proc newForkedChain*(com: CommonRef,
baseHash : baseHash,
cursorHeader: baseHeader,
extraValidation: extraValidation,
baseDistance: baseDistance,
txRecords: initTable[Hash32, (Hash32, uint64)]()
)
baseDistance: baseDistance)
# update global syncStart
com.syncStart = baseHeader.number

View File

@ -0,0 +1,57 @@
# Nimbus
# Copyright (c) 2024 Status Research & Development GmbH
# Licensed under either of
# * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or
# http://www.apache.org/licenses/LICENSE-2.0)
# * MIT license ([LICENSE-MIT](LICENSE-MIT) or
# http://opensource.org/licenses/MIT)
# at your option. This file may not be copied, modified, or distributed except
# according to those terms.
{.push raises: [].}
import
std/tables,
../../../common,
../../../db/core_db
type
CursorDesc* = object
forkJunction*: BlockNumber
hash*: Hash32
BlockDesc* = object
blk*: Block
receipts*: seq[Receipt]
BaseDesc* = object
hash*: Hash32
header*: Header
CanonicalDesc* = object
## Designate some `header` entry on a `CursorDesc` sub-chain named
## `cursorDesc` identified by `cursorHash == cursorDesc.hash`.
cursorHash*: Hash32
header*: Header
ForkedChainRef* = ref object
stagingTx*: CoreDbTxRef
db*: CoreDbRef
com*: CommonRef
blocks*: Table[Hash32, BlockDesc]
txRecords: Table[Hash32, (Hash32, uint64)]
baseHash*: Hash32
baseHeader*: Header
cursorHash*: Hash32
cursorHeader*: Header
cursorHeads*: seq[CursorDesc]
extraValidation*: bool
baseDistance*: uint64
# ----------------
func txRecords*(c: ForkedChainRef): var Table[Hash32, (Hash32, uint64)] =
## Avoid clash with `forked_chain.txRecords()`
c.txRecords
# End

View File

@ -12,6 +12,7 @@
import
std/[sequtils, sets, strformat],
pkg/blscurve, # Kludge: needed to compile `eip4844` -- sometimes :)
../db/ledger,
../common/common,
../transaction/call_types,

View File

@ -9,12 +9,14 @@
# according to those terms.
import
pkg/chronicles,
pkg/unittest2,
../nimbus/common,
../nimbus/config,
../nimbus/utils/utils,
../nimbus/core/chain/forked_chain,
../nimbus/db/ledger,
unittest2
./test_forked_chain/chain_debug
const
genesisFile = "tests/customgenesis/cancun123.json"
@ -128,6 +130,7 @@ proc forkedChainMain*() =
B7 = cc.makeBlk(7, B6)
test "newBase == oldBase":
const info = "newBase == oldBase"
let com = env.newCom()
var chain = newForkedChain(com, com.genesisHeader)
@ -139,15 +142,18 @@ proc forkedChainMain*() =
check chain.importBlock(blk2).isOk
check chain.importBlock(blk3).isOk
check chain.validate info & " (1)"
# no parent
check chain.importBlock(blk5).isErr
check com.headHash == genesisHash
check chain.latestHash == blk3.blockHash
check chain.validate info & " (2)"
# finalized > head -> error
check chain.forkChoice(blk1.blockHash, blk3.blockHash).isErr
check chain.validate info & " (3)"
# blk4 is not part of chain
check chain.forkChoice(blk4.blockHash, blk2.blockHash).isErr
@ -162,17 +168,21 @@ proc forkedChainMain*() =
check chain.forkChoice(blk2.blockHash, blk1.blockHash).isOk
check com.headHash == blk2.blockHash
check chain.latestHash == blk2.blockHash
check chain.validate info & " (7)"
# finalized == head -> ok
check chain.forkChoice(blk2.blockHash, blk2.blockHash).isOk
check com.headHash == blk2.blockHash
check chain.latestHash == blk2.blockHash
check chain.validate info & " (8)"
# no baggage written
check com.wdWritten(blk1) == 0
check com.wdWritten(blk2) == 0
check chain.validate info & " (9)"
test "newBase == cursor":
const info = "newBase == cursor"
let com = env.newCom()
var chain = newForkedChain(com, com.genesisHeader, baseDistance = 3)
@ -185,9 +195,11 @@ proc forkedChainMain*() =
check chain.importBlock(blk7).isOk
check chain.importBlock(blk4).isOk
check chain.validate info & " (1)"
# newbase == cursor
check chain.forkChoice(blk7.blockHash, blk6.blockHash).isOk
check chain.validate info & " (2)"
check com.headHash == blk7.blockHash
check chain.latestHash == blk7.blockHash
@ -198,8 +210,10 @@ proc forkedChainMain*() =
check com.wdWritten(blk4) == 4
# make sure aristo not wiped out baggage
check com.wdWritten(blk3) == 3
check chain.validate info & " (9)"
test "newBase between oldBase and cursor":
const info = "newBase between oldBase and cursor"
let com = env.newCom()
var chain = newForkedChain(com, com.genesisHeader, baseDistance = 3)
@ -210,8 +224,10 @@ proc forkedChainMain*() =
check chain.importBlock(blk5).isOk
check chain.importBlock(blk6).isOk
check chain.importBlock(blk7).isOk
check chain.validate info & " (1)"
check chain.forkChoice(blk7.blockHash, blk6.blockHash).isOk
check chain.validate info & " (2)"
check com.headHash == blk7.blockHash
check chain.latestHash == blk7.blockHash
@ -223,8 +239,10 @@ proc forkedChainMain*() =
check com.wdWritten(blk4) == 4
# make sure aristo not wiped out baggage
check com.wdWritten(blk3) == 3
check chain.validate info & " (9)"
test "newBase == oldBase, fork and keep on that fork":
const info = "newBase == oldBase, fork .."
let com = env.newCom()
var chain = newForkedChain(com, com.genesisHeader)
@ -240,13 +258,16 @@ proc forkedChainMain*() =
check chain.importBlock(B5).isOk
check chain.importBlock(B6).isOk
check chain.importBlock(B7).isOk
check chain.validate info & " (1)"
check chain.forkChoice(B7.blockHash, B5.blockHash).isOk
check com.headHash == B7.blockHash
check chain.latestHash == B7.blockHash
check chain.validate info & " (9)"
test "newBase == cursor, fork and keep on that fork":
const info = "newBase == cursor, fork .."
let com = env.newCom()
var chain = newForkedChain(com, com.genesisHeader, baseDistance = 3)
@ -264,13 +285,17 @@ proc forkedChainMain*() =
check chain.importBlock(B7).isOk
check chain.importBlock(B4).isOk
check chain.validate info & " (1)"
check chain.forkChoice(B7.blockHash, B6.blockHash).isOk
check chain.validate info & " (2)"
check com.headHash == B7.blockHash
check chain.latestHash == B7.blockHash
check chain.validate info & " (9)"
test "newBase between oldBase and cursor, fork and keep on that fork":
const info = "newBase between oldBase .."
let com = env.newCom()
var chain = newForkedChain(com, com.genesisHeader, baseDistance = 3)
@ -286,13 +311,17 @@ proc forkedChainMain*() =
check chain.importBlock(B5).isOk
check chain.importBlock(B6).isOk
check chain.importBlock(B7).isOk
check chain.validate info & " (1)"
check chain.forkChoice(B7.blockHash, B5.blockHash).isOk
check chain.validate info & " (2)"
check com.headHash == B7.blockHash
check chain.latestHash == B7.blockHash
check chain.validate info & " (9)"
test "newBase == oldBase, fork and return to old chain":
const info = "newBase == oldBase, fork .."
let com = env.newCom()
var chain = newForkedChain(com, com.genesisHeader)
@ -308,13 +337,17 @@ proc forkedChainMain*() =
check chain.importBlock(B5).isOk
check chain.importBlock(B6).isOk
check chain.importBlock(B7).isOk
check chain.validate info & " (1)"
check chain.forkChoice(blk7.blockHash, blk5.blockHash).isOk
check chain.validate info & " (2)"
check com.headHash == blk7.blockHash
check chain.latestHash == blk7.blockHash
check chain.validate info & " (9)"
test "newBase == cursor, fork and return to old chain":
const info = "newBase == cursor, fork .."
let com = env.newCom()
var chain = newForkedChain(com, com.genesisHeader, baseDistance = 3)
@ -332,13 +365,17 @@ proc forkedChainMain*() =
check chain.importBlock(B7).isOk
check chain.importBlock(blk4).isOk
check chain.validate info & " (1)"
check chain.forkChoice(blk7.blockHash, blk5.blockHash).isOk
check chain.validate info & " (2)"
check com.headHash == blk7.blockHash
check chain.latestHash == blk7.blockHash
check chain.validate info & " (9)"
test "newBase between oldBase and cursor, fork and return to old chain, switch to new chain":
const info = "newBase between oldBase and .."
let com = env.newCom()
var chain = newForkedChain(com, com.genesisHeader, baseDistance = 3)
@ -356,13 +393,17 @@ proc forkedChainMain*() =
check chain.importBlock(B7).isOk
check chain.importBlock(blk4).isOk
check chain.validate info & " (1)"
check chain.forkChoice(B7.blockHash, B5.blockHash).isOk
check chain.validate info & " (2)"
check com.headHash == B7.blockHash
check chain.latestHash == B7.blockHash
check chain.validate info & " (9)"
test "newBase between oldBase and cursor, fork and return to old chain":
const info = "newBase between oldBase and .."
let com = env.newCom()
var chain = newForkedChain(com, com.genesisHeader, baseDistance = 3)
@ -378,13 +419,17 @@ proc forkedChainMain*() =
check chain.importBlock(B5).isOk
check chain.importBlock(B6).isOk
check chain.importBlock(B7).isOk
check chain.validate info & " (1)"
check chain.forkChoice(blk7.blockHash, blk5.blockHash).isOk
check chain.validate info & " (2)"
check com.headHash == blk7.blockHash
check chain.latestHash == blk7.blockHash
check chain.validate info & " (9)"
test "headerByNumber":
const info = "headerByNumber"
let com = env.newCom()
var chain = newForkedChain(com, com.genesisHeader, baseDistance = 3)
@ -400,8 +445,10 @@ proc forkedChainMain*() =
check chain.importBlock(B5).isOk
check chain.importBlock(B6).isOk
check chain.importBlock(B7).isOk
check chain.validate info & " (1)"
check chain.forkChoice(blk7.blockHash, blk5.blockHash).isOk
check chain.validate info & " (2)"
# cursor
check chain.headerByNumber(8).isErr
@ -419,8 +466,10 @@ proc forkedChainMain*() =
# from cache
check chain.headerByNumber(5).expect("OK").number == 5
check chain.headerByNumber(5).expect("OK").blockHash == blk5.blockHash
check chain.validate info & " (9)"
test "Import after Replay Segment":
const info = "Import after Replay Segment"
let com = env.newCom()
var chain = newForkedChain(com, com.genesisHeader, baseDistance = 3)
@ -429,12 +478,15 @@ proc forkedChainMain*() =
check chain.importBlock(blk3).isOk
check chain.importBlock(blk4).isOk
check chain.importBlock(blk5).isOk
check chain.validate info & " (1)"
chain.replaySegment(blk2.header.blockHash)
chain.replaySegment(blk5.header.blockHash)
check chain.validate info & " (2)"
check chain.importBlock(blk6).isOk
check chain.importBlock(blk7).isOk
check chain.validate info & " (9)"
when isMainModule:
forkedChainMain()

View File

@ -0,0 +1,198 @@
# Nimbus
# Copyright (c) 2024 Status Research & Development GmbH
# Licensed under either of
# * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or
# http://www.apache.org/licenses/LICENSE-2.0)
# * MIT license ([LICENSE-MIT](LICENSE-MIT) or
# http://opensource.org/licenses/MIT)
# at your option. This file may not be copied, modified, or distributed except
# according to those terms.
## Test and verifier toolkit for `ForkedChainRef`
{.push raises: [].}
import
std/[algorithm, sequtils, sets, strutils, tables],
pkg/chronicles,
pkg/stew/interval_set,
../../nimbus/common,
../../nimbus/sync/beacon/worker/helpers,
../../nimbus/core/chain/forked_chain/chain_desc
logScope: topics = "forked-chain"
# ------------------------------------------------------------------------------
# Private
# ------------------------------------------------------------------------------
func header(h: Hash32; c: ForkedChainRef): Header =
c.blocks.withValue(h, val):
return val.blk.header
func cmp(c: ForkedChainRef; _: type CursorDesc): auto =
return func(x,y: CursorDesc): int =
result = cmp(x.forkJunction, y.forkJunction)
if result == 0:
result = cmp(x.hash.header(c).number, y.hash.header(c).number)
func cmp(c: ForkedChainRef; _: type seq[Hash32]): auto =
return func(x,y: seq[Hash32]): int =
result = cmp(x[0].header(c).number, y[0].header(c).number)
if result == 0:
result = cmp(x[^1].header(c).number, y[^1].header(c).number)
# ----------------
func baseChains(c: ForkedChainRef): seq[seq[Hash32]] =
# find leafs
var leafs = c.blocks.pairs.toSeq.mapIt((it[0],it[1].blk.header)).toTable
for w in c.blocks.values:
leafs.del w.blk.header.parentHash
# Assemble separate chain per leaf
for (k,v) in leafs.pairs:
var
q = @[k]
w = v.parentHash
while true:
c.blocks.withValue(w, val):
q.add w
w = val.blk.header.parentHash
do:
break
result.add q.reversed
func baseChainsSorted(c: ForkedChainRef): seq[seq[Hash32]] =
c.baseChains.sorted(c.cmp seq[Hash32])
# ----------------
func cnStr(q: openArray[Hash32]; c: ForkedChainRef): string =
let (a,b) = (q[0].header(c).number, q[^1].header(c).number)
result = a.bnStr
if a != b:
result &= "<<" & b.bnStr
func ppImpl[T: Block|Header](q: openArray[T]): string =
func number(b: Block): BlockNumber = b.header.number
let bns = IntervalSetRef[BlockNumber,uint64].init()
for w in q:
discard bns.merge(w.number,w.number)
let (a,b) = (bns.total, q.len.uint64 - bns.total)
"{" & bns.increasing.toSeq.mapIt($it).join(",") & "}[#" & $a & "+" & $b & "]"
# ------------------------------------------------------------------------------
# Public pretty printers
# ------------------------------------------------------------------------------
# Pretty printers
func pp*(n: BlockNumber): string = n.bnStr
func pp*(h: Header): string = h.bnStr
func pp*(b: Block): string = b.bnStr
func pp*(h: Hash32): string = h.short
func pp*(d: BaseDesc): string = d.header.pp
func pp*(q: openArray[Block]): string = q.ppImpl
func pp*(q: openArray[Header]): string = q.ppImpl
func pp*(rc: Result[Header,string]): string =
if rc.isOk: rc.value.pp else: "err(" & rc.error & ")"
# --------------------
func pp*(h: Hash32; c: ForkedChainRef): string =
c.blocks.withValue(h, val) do:
return val.blk.header.pp
if h == c.baseHash:
return c.baseHeader.pp
h.short
func pp*(d: CanonicalDesc; c: ForkedChainRef): string =
"(" & d.cursorHash.header(c).number.pp & "," & d.header.pp & ")"
func pp*(d: CursorDesc; c: ForkedChainRef): string =
let (a,b) = (d.forkJunction, d.hash.header(c).number)
result = a.bnStr
if a != b:
result &= ".." & (if b == 0: d.hash.pp else: b.pp)
func pp*(q: openArray[CursorDesc]; c: ForkedChainRef): string =
"{" & q.sorted(c.cmp CursorDesc).mapIt(it.pp(c)).join(",") & "}"
func pp*(c: ForkedChainRef): string =
"(" & c.baseHeader.pp &
",{" & c.baseChainsSorted.mapIt(it.cnStr(c)).join(",") & "}" &
"," & c.cursorHeader.pp &
"," & c.cursorHeads.pp(c) &
"," & (if c.extraValidation: "t" else: "f") &
"," & $c.baseDistance &
")"
# ------------------------------------------------------------------------------
# Public object validators
# ------------------------------------------------------------------------------
func validate*(c: ForkedChainRef): Result[void,string] =
if c.cursorHeader.number < c.baseHeader.number:
return err("cursor block number too low")
# Empty descriptor (mainly used with unit tests)
if c.cursorHash == c.baseHash and
c.blocks.len == 0 and
c.cursorHeads.len == 0:
return ok()
# `cursorHeader` must be in the `c.blocks[]` table but `base` must not
if not c.blocks.hasKey(c.cursorHash):
return err("cursor must be in blocks[] table: " & c.cursorHeader.pp)
if c.blocks.hasKey(c.baseHash):
return err("base must not be in blocks[] table: " & c.baseHeader.pp)
# Base chains must range inside `(base,cursor]`, rooted on `base`
var bcHeads: HashSet[Hash32]
for chain in c.baseChains:
if chain[0].header(c).parentHash != c.baseHash:
return err("unbased chain: " & chain.cnStr(c))
bcHeads.incl chain[^1]
# Cursor heads must refer to items of `c.blocks[]`
for ch in c.cursorHeads:
if not c.blocks.hasKey(ch.hash):
return err("stray cursor head: " & ch.pp(c))
if ch.forkJunction <= c.baseHeader.number:
return err("cursor head junction too small: " & ch.pp(c))
# Get fork junction header
var h = ch.hash.header(c)
while ch.forkJunction < h.number:
c.blocks.withValue(h.parentHash, val):
h = val.blk.header
do:
return err("inconsistent/broken cursor chain " & ch.pp(c))
# Now: `cn.forkJunction == h.number`, check parent
if h.parentHash != c.baseHash and not c.blocks.hasKey(h.parentHash):
return err("unaligned junction of cursor chain " & ch.pp(c))
# Check cursor heads against assembled chain heads
if ch.hash notin bcHeads:
return err("stale or dup cursor chain " & ch.pp(c))
bcHeads.excl ch.hash
# Each chain must have exactly one cursor head
if bcHeads.len != 0:
return err("missing cursor chain for head " & bcHeads.toSeq[0].pp(c))
ok()
proc validate*(c: ForkedChainRef; info: static[string]): bool {.discardable.} =
let rc = c.validate()
if rc.isOk:
return true
error info & ": invalid desc", error=rc.error, c=c.pp
# ------------------------------------------------------------------------------
# End
# ------------------------------------------------------------------------------