Compare commits

...

3 Commits

Author SHA1 Message Date
jangko 73a0f7caeb
Write baggage to database 2024-06-24 12:04:59 +07:00
jangko 7c679a8ea9
Do not force base update 2024-06-24 11:12:37 +07:00
jangko d40529ebef
Avoid reloading parent header 2024-06-24 09:53:35 +07:00
1 changed files with 126 additions and 48 deletions

View File

@ -18,31 +18,37 @@ import
../executor/process_block
type
HeadDesc = object
number: BlockNumber
hash: Hash256
BlockDesc = object
blk: EthBlock
receipts: seq[Receipt]
ForkedChain* = object
stagingTx: CoreDbTxRef
db: CoreDbRef
com: CommonRef
blocks: Table[Hash256, EthBlock]
head: Hash256
base: Hash256
headBlockNumber: BlockNumber
blocks: Table[Hash256, BlockDesc]
headHash: Hash256
baseHash: Hash256
baseHeader: BlockHeader
headHeader: BlockHeader
heads: seq[HeadDesc]
# ------------------------------------------------------------------------------
# Private
# ------------------------------------------------------------------------------
proc getVmState(c: ForkedChain,
header: BlockHeader): Result[BaseVMState, string] =
let vmState = BaseVMState()
if not vmState.init(header, c.com):
return err("Could not initialise VMState")
ok(vmState)
proc processBlock(c: ForkedChain, blk: EthBlock): Result[void, string] =
proc processBlock(c: ForkedChain,
parent: BlockHeader,
blk: EthBlock): Result[seq[Receipt], string] =
template header(): BlockHeader =
blk.header
let vmState = ?c.getVmState(header)
let vmState = BaseVMState()
vmState.init(parent, header, c.com)
c.com.hardForkTransition(header)
?c.com.validateHeaderAndKinship(blk, vmState.parent, checkSealOK = false)
@ -54,6 +60,8 @@ proc processBlock(c: ForkedChain, blk: EthBlock): Result[void, string] =
skipUncles = true,
)
# We still need to write header to database
# because validateUncles still need it
let blockHash = header.blockHash()
if not c.db.persistHeader(
blockHash,
@ -61,55 +69,102 @@ proc processBlock(c: ForkedChain, blk: EthBlock): Result[void, string] =
c.com.startOfHistory):
return err("Could not persist header")
ok()
ok(move(vmState.receipts))
proc updateHead(c: var ForkedChain, blk: EthBlock) =
proc updateHeads(c: var ForkedChain,
hash: Hash256,
header: BlockHeader) =
for i in 0..<c.heads.len:
if c.heads[i].hash == header.parentHash:
c.heads[i] = HeadDesc(
hash: hash,
number: header.number,
)
return
c.heads.add HeadDesc(
hash: hash,
number: header.number,
)
proc updateHead(c: var ForkedChain,
blk: EthBlock,
receipts: sink seq[Receipt]) =
template header(): BlockHeader =
blk.header
c.head = header.blockHash
c.headBlockNumber = header.number
c.blocks[c.head] = blk
c.headHeader = header
c.headHash = header.blockHash
c.blocks[c.headHash] = BlockDesc(
blk: blk,
receipts: move(receipts)
)
c.updateHeads(c.headHash, header)
proc validatePotentialHead(c: var ForkedChain,
parent: BlockHeader,
blk: EthBlock,
updateHead: bool = true) =
let dbTx = c.db.newTransaction()
defer:
dbTx.dispose()
let res = c.processBlock(blk)
var res = c.processBlock(parent, blk)
if res.isErr:
dbTx.rollback()
return
dbTx.commit()
if updateHead:
c.updateHead(blk)
c.updateHead(blk, move(res.value))
proc replaySegment(c: var ForkedChain,
head: Hash256): BlockNumber =
head: Hash256) =
var
prevHash = head
chain = newSeq[EthBlock]()
while prevHash != c.base:
chain.add c.blocks[prevHash]
while prevHash != c.baseHash:
chain.add c.blocks[prevHash].blk
prevHash = chain[^1].header.parentHash
c.stagingTx.rollback()
c.stagingTx = c.db.newTransaction()
c.headHeader = c.baseHeader
for i in countdown(chain.high, chain.low):
c.validatePotentialHead(chain[i], updateHead = false)
c.validatePotentialHead(c.headHeader, chain[i], updateHead = false)
c.headHeader = chain[i].header
chain[^1].header.number
proc writeBaggage(c: var ForkedChain, blockHash: Hash256) =
var prevHash = blockHash
while prevHash != c.baseHash:
let blk = c.blocks[prevHash]
c.db.persistTransactions(blk.blk.header.number, blk.blk.transactions)
c.db.persistReceipts(blk.receipts)
discard c.db.persistUncles(blk.blk.uncles)
if blk.blk.withdrawals.isSome:
c.db.persistWithdrawals(blk.blk.withdrawals.get)
prevHash = blk.blk.header.parentHash
proc updateBase(c: var ForkedChain,
head: Hash256, headBlockNumber: BlockNumber) =
c.base = head
c.head = head
c.headBlockNumber = headBlockNumber
c.blocks.clear()
newBaseHash: Hash256, newBaseHeader: BlockHeader) =
# remove obsolete chains
for i in 0..<c.heads.len:
if c.heads[i].number <= c.baseHeader.number:
var prevHash = c.heads[i].hash
while prevHash != c.baseHash:
c.blocks.withValue(prevHash, val) do:
let rmHash = prevHash
prevHash = val.blk.header.parentHash
c.blocks.del(rmHash)
do:
# older chain segment have been deleted
# by previous head
break
c.heads.del(i)
c.baseHeader = newBaseHeader
c.baseHash = newBaseHash
# ------------------------------------------------------------------------------
# Public functions
@ -119,23 +174,24 @@ proc initForkedChain*(com: CommonRef): ForkedChain =
result.com = com
result.db = com.db
result.stagingTx = com.db.newTransaction()
let head = com.db.getCanonicalHead()
let headHash = head.blockHash
result.head = headHash
result.base = headHash
result.baseHeader = com.db.getCanonicalHead()
let headHash = result.baseHeader.blockHash
result.headHash = headHash
result.baseHash = headHash
result.headHeader = result.baseHeader
proc addBlock*(c: var ForkedChain, blk: EthBlock) =
template header(): BlockHeader =
blk.header
if header.parentHash == c.head:
c.validatePotentialHead(blk)
if header.parentHash == c.headHash:
c.validatePotentialHead(c.headHeader, blk)
return
if header.parentHash == c.base:
if header.parentHash == c.baseHash:
c.stagingTx.rollback()
c.stagingTx = c.db.newTransaction()
c.validatePotentialHead(blk)
c.validatePotentialHead(c.baseHeader, blk)
return
if header.parentHash notin c.blocks:
@ -143,35 +199,57 @@ proc addBlock*(c: var ForkedChain, blk: EthBlock) =
# there is no hope the descendant is valid
return
discard c.replaySegment(header.parentHash)
c.validatePotentialHead(blk)
c.replaySegment(header.parentHash)
c.validatePotentialHead(c.headHeader, blk)
proc finalizeSegment*(c: var ForkedChain,
finalized: Hash256): Result[void, string] =
if finalized == c.head:
finalizedHash: Hash256): Result[void, string] =
if finalizedHash == c.headHash:
c.writeBaggage(finalizedHash)
# the current segment is canonical chain
c.stagingTx.commit()
# Save and record the block number before the last saved block state.
c.db.persistent(c.headBlockNumber).isOkOr:
c.db.persistent(c.headHeader.number).isOkOr:
return err("Failed to save state: " & $$error)
c.updateBase(finalized, c.headBlockNumber)
c.stagingTx = c.db.newTransaction()
c.updateBase(finalizedHash, c.headHeader)
return ok()
if finalized notin c.blocks:
var
newBaseHash: Hash256
newBaseHeader: BlockHeader
c.blocks.withValue(finalizedHash, val) do:
if c.headHeader.number <= 128:
if val.blk.header.number < c.headHeader.number:
newBaseHash = finalizedHash
newBaseHeader = val.blk.header
else:
newBaseHash = c.headHash
newBaseHeader = c.headHeader
elif val.blk.header.number < c.headHeader.number - 128:
newBaseHash = finalizedHash
newBaseHeader = val.blk.header
else:
newBaseHash = c.headHash
newBaseHeader = c.headHeader
do:
return err("Finalized head not in segments list")
c.stagingTx.rollback()
c.stagingTx = c.db.newTransaction()
let headBlockNumber = c.replaySegment(finalized)
c.replaySegment(newBaseHash)
c.writeBaggage(newBaseHash)
c.stagingTx.commit()
c.db.persistent(headBlockNumber).isOkOr:
c.db.persistent(newBaseHeader.number).isOkOr:
return err("Failed to save state: " & $$error)
c.updateBase(finalized, headBlockNumber)
c.stagingTx = c.db.newTransaction()
c.updateBase(newBaseHash, newBaseHeader)
ok()