simplify unviable head pruning (#3528)
Also note bug that exists that potentially prevents states from being pruned correctly
This commit is contained in:
parent
fd1ffd62dd
commit
13fafe3a40
|
@ -15,8 +15,8 @@ export chronicles, forks
|
||||||
|
|
||||||
type
|
type
|
||||||
BlockRef* = ref object
|
BlockRef* = ref object
|
||||||
## Node in object graph guaranteed to lead back to tail block, and to have
|
## Node in object graph guaranteed to lead back to finalized head, and to
|
||||||
## a corresponding entry in database.
|
## have a corresponding entry in database.
|
||||||
##
|
##
|
||||||
## All blocks identified by a `BlockRef` are valid per the state transition
|
## All blocks identified by a `BlockRef` are valid per the state transition
|
||||||
## rules and that at some point were candidates for head selection. The
|
## rules and that at some point were candidates for head selection. The
|
||||||
|
@ -31,7 +31,7 @@ type
|
||||||
## Root that can be used to retrieve block data from database
|
## Root that can be used to retrieve block data from database
|
||||||
|
|
||||||
parent*: BlockRef ##\
|
parent*: BlockRef ##\
|
||||||
## Not nil, except for the tail
|
## Not nil, except for the finalized head
|
||||||
|
|
||||||
BlockSlot* = object
|
BlockSlot* = object
|
||||||
## Unique identifier for a particular fork and time in the block chain -
|
## Unique identifier for a particular fork and time in the block chain -
|
||||||
|
|
|
@ -1251,8 +1251,16 @@ proc pruneBlocksDAG(dag: ChainDAGRef) =
|
||||||
continue
|
continue
|
||||||
|
|
||||||
var cur = head.atSlot()
|
var cur = head.atSlot()
|
||||||
while not cur.blck.isAncestorOf(dag.finalizedHead.blck):
|
# The block whose parent is nil is the `BlockRef` that's part of the
|
||||||
|
# canonical chain but has now been finalized - in theory there could be
|
||||||
|
# states at empty slot iff the fork had epoch-long gaps where the epoch
|
||||||
|
# transition was not on the canonical chain - these will not properly get
|
||||||
|
# cleaned up by the current logic - but they should also be rare
|
||||||
|
# TODO clean up the above as well
|
||||||
|
doAssert dag.finalizedHead.blck.parent == nil,
|
||||||
|
"finalizedHead parent should have been pruned from memory already"
|
||||||
|
|
||||||
|
while cur.blck.parent != nil:
|
||||||
# TODO: should we move that disk I/O to `onSlotEnd`
|
# TODO: should we move that disk I/O to `onSlotEnd`
|
||||||
dag.delState(cur.toBlockSlotId().expect("not nil"))
|
dag.delState(cur.toBlockSlotId().expect("not nil"))
|
||||||
|
|
||||||
|
@ -1263,8 +1271,6 @@ proc pruneBlocksDAG(dag: ChainDAGRef) =
|
||||||
dag.forkBlocks.excl(KeyedBlockRef.init(cur.blck))
|
dag.forkBlocks.excl(KeyedBlockRef.init(cur.blck))
|
||||||
dag.db.delBlock(cur.blck.root)
|
dag.db.delBlock(cur.blck.root)
|
||||||
|
|
||||||
if cur.blck.parent.isNil:
|
|
||||||
break
|
|
||||||
cur = cur.parentOrSlot
|
cur = cur.parentOrSlot
|
||||||
|
|
||||||
dag.heads.del(n)
|
dag.heads.del(n)
|
||||||
|
|
|
@ -474,6 +474,15 @@ suite "chain DAG finalization tests" & preset():
|
||||||
dag2.finalizedHead.slot == dag.finalizedHead.slot
|
dag2.finalizedHead.slot == dag.finalizedHead.slot
|
||||||
getStateRoot(dag2.headState) == getStateRoot(dag.headState)
|
getStateRoot(dag2.headState) == getStateRoot(dag.headState)
|
||||||
|
|
||||||
|
# No canonical block data should be pruned by the removal of the fork
|
||||||
|
for i in Slot(0)..dag2.head.slot:
|
||||||
|
let bids = dag.getBlockIdAtSlot(i).expect("found it")
|
||||||
|
if bids.isProposed:
|
||||||
|
check: dag2.getForkedBlock(bids.bid).isSome
|
||||||
|
|
||||||
|
# The unviable block should have been pruned however
|
||||||
|
check: dag2.getForkedBlock(lateBlock.root).isNone
|
||||||
|
|
||||||
test "orphaned epoch block" & preset():
|
test "orphaned epoch block" & preset():
|
||||||
let prestate = (ref ForkedHashedBeaconState)(kind: BeaconStateFork.Phase0)
|
let prestate = (ref ForkedHashedBeaconState)(kind: BeaconStateFork.Phase0)
|
||||||
for i in 0 ..< SLOTS_PER_EPOCH:
|
for i in 0 ..< SLOTS_PER_EPOCH:
|
||||||
|
|
Loading…
Reference in New Issue