2019-02-28 21:21:29 +00:00
|
|
|
# beacon_chain
|
2020-04-10 14:06:24 +00:00
|
|
|
# Copyright (c) 2018-2020 Status Research & Development GmbH
|
2019-02-28 21:21:29 +00:00
|
|
|
# Licensed and distributed under either of
|
2019-11-25 15:30:02 +00:00
|
|
|
# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT).
|
|
|
|
# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0).
|
2019-02-28 21:21:29 +00:00
|
|
|
# at your option. This file may not be copied, modified, or distributed except according to those terms.
|
|
|
|
|
2019-11-14 10:47:55 +00:00
|
|
|
{.used.}
|
|
|
|
|
2019-02-28 21:21:29 +00:00
|
|
|
import
|
2020-05-20 04:57:39 +00:00
|
|
|
options, sequtils, unittest,
|
2020-05-05 09:18:44 +00:00
|
|
|
./testutil, ./testblockutil,
|
2020-07-30 19:18:17 +00:00
|
|
|
../beacon_chain/spec/[datatypes, digest, state_transition, presets],
|
|
|
|
../beacon_chain/[beacon_node_types, ssz],
|
2020-07-31 14:49:06 +00:00
|
|
|
../beacon_chain/block_pools/[chain_dag, quarantine, clearance]
|
2019-02-28 21:21:29 +00:00
|
|
|
|
2020-06-12 16:43:20 +00:00
|
|
|
when isMainModule:
|
|
|
|
import chronicles # or some random compile error happens...
|
|
|
|
|
2020-03-10 04:00:19 +00:00
|
|
|
suiteReport "BlockRef and helpers" & preset():
|
2019-12-23 15:34:09 +00:00
|
|
|
timedTest "isAncestorOf sanity" & preset():
|
|
|
|
let
|
|
|
|
s0 = BlockRef(slot: Slot(0))
|
|
|
|
s1 = BlockRef(slot: Slot(1), parent: s0)
|
|
|
|
s2 = BlockRef(slot: Slot(2), parent: s1)
|
|
|
|
|
|
|
|
check:
|
|
|
|
s0.isAncestorOf(s0)
|
|
|
|
s0.isAncestorOf(s1)
|
|
|
|
s0.isAncestorOf(s2)
|
|
|
|
s1.isAncestorOf(s1)
|
|
|
|
s1.isAncestorOf(s2)
|
|
|
|
|
|
|
|
not s2.isAncestorOf(s0)
|
|
|
|
not s2.isAncestorOf(s1)
|
|
|
|
not s1.isAncestorOf(s0)
|
|
|
|
|
|
|
|
timedTest "getAncestorAt sanity" & preset():
|
|
|
|
let
|
|
|
|
s0 = BlockRef(slot: Slot(0))
|
|
|
|
s1 = BlockRef(slot: Slot(1), parent: s0)
|
|
|
|
s2 = BlockRef(slot: Slot(2), parent: s1)
|
|
|
|
s4 = BlockRef(slot: Slot(4), parent: s2)
|
|
|
|
|
|
|
|
check:
|
|
|
|
s0.getAncestorAt(Slot(0)) == s0
|
|
|
|
s0.getAncestorAt(Slot(1)) == s0
|
|
|
|
|
|
|
|
s1.getAncestorAt(Slot(0)) == s0
|
|
|
|
s1.getAncestorAt(Slot(1)) == s1
|
|
|
|
|
|
|
|
s4.getAncestorAt(Slot(0)) == s0
|
|
|
|
s4.getAncestorAt(Slot(1)) == s1
|
|
|
|
s4.getAncestorAt(Slot(2)) == s2
|
|
|
|
s4.getAncestorAt(Slot(3)) == s2
|
|
|
|
s4.getAncestorAt(Slot(4)) == s4
|
|
|
|
|
2020-03-10 04:00:19 +00:00
|
|
|
suiteReport "BlockSlot and helpers" & preset():
|
2019-12-23 15:34:09 +00:00
|
|
|
timedTest "atSlot sanity" & preset():
|
|
|
|
let
|
|
|
|
s0 = BlockRef(slot: Slot(0))
|
|
|
|
s1 = BlockRef(slot: Slot(1), parent: s0)
|
|
|
|
s2 = BlockRef(slot: Slot(2), parent: s1)
|
|
|
|
s4 = BlockRef(slot: Slot(4), parent: s2)
|
|
|
|
|
|
|
|
check:
|
|
|
|
s0.atSlot(Slot(0)).blck == s0
|
|
|
|
s0.atSlot(Slot(0)) == s1.atSlot(Slot(0))
|
|
|
|
s1.atSlot(Slot(1)).blck == s1
|
|
|
|
|
|
|
|
s4.atSlot(Slot(0)).blck == s0
|
|
|
|
|
|
|
|
timedTest "parent sanity" & preset():
|
|
|
|
let
|
|
|
|
s0 = BlockRef(slot: Slot(0))
|
|
|
|
s00 = BlockSlot(blck: s0, slot: Slot(0))
|
|
|
|
s01 = BlockSlot(blck: s0, slot: Slot(1))
|
|
|
|
s2 = BlockRef(slot: Slot(2), parent: s0)
|
|
|
|
s22 = BlockSlot(blck: s2, slot: Slot(2))
|
|
|
|
s24 = BlockSlot(blck: s2, slot: Slot(4))
|
|
|
|
|
|
|
|
check:
|
|
|
|
s00.parent == BlockSlot(blck: nil, slot: Slot(0))
|
|
|
|
s01.parent == s00
|
|
|
|
s22.parent == s01
|
|
|
|
s24.parent == BlockSlot(blck: s2, slot: Slot(3))
|
|
|
|
s24.parent.parent == s22
|
|
|
|
|
2020-05-05 09:18:44 +00:00
|
|
|
suiteReport "Block pool processing" & preset():
|
|
|
|
setup:
|
|
|
|
var
|
|
|
|
db = makeTestDB(SLOTS_PER_EPOCH)
|
2020-07-31 14:49:06 +00:00
|
|
|
dag = init(ChainDAGRef, defaultRuntimePreset, db)
|
|
|
|
quarantine = QuarantineRef()
|
2020-07-30 19:18:17 +00:00
|
|
|
stateData = newClone(dag.loadTailState())
|
2020-07-15 10:44:18 +00:00
|
|
|
cache = StateCache()
|
2020-07-30 19:18:17 +00:00
|
|
|
b1 = addTestBlock(stateData.data, dag.tail.root, cache)
|
2020-05-05 09:18:44 +00:00
|
|
|
b1Root = hash_tree_root(b1.message)
|
2020-06-04 12:03:16 +00:00
|
|
|
b2 = addTestBlock(stateData.data, b1Root, cache)
|
2020-05-05 09:18:44 +00:00
|
|
|
b2Root {.used.} = hash_tree_root(b2.message)
|
|
|
|
|
|
|
|
timedTest "getRef returns nil for missing blocks":
|
|
|
|
check:
|
2020-07-30 19:18:17 +00:00
|
|
|
dag.getRef(default Eth2Digest) == nil
|
2020-04-10 14:06:24 +00:00
|
|
|
|
2020-05-05 09:18:44 +00:00
|
|
|
timedTest "loadTailState gets genesis block on first load" & preset():
|
|
|
|
let
|
2020-07-30 19:18:17 +00:00
|
|
|
b0 = dag.get(dag.tail.root)
|
2019-12-19 14:13:35 +00:00
|
|
|
|
2020-05-05 09:18:44 +00:00
|
|
|
check:
|
|
|
|
b0.isSome()
|
2020-05-04 21:07:18 +00:00
|
|
|
|
2020-05-05 09:18:44 +00:00
|
|
|
timedTest "Simple block add&get" & preset():
|
|
|
|
let
|
2020-07-30 19:18:17 +00:00
|
|
|
b1Add = dag.addRawBlock(quarantine, b1, nil)
|
|
|
|
b1Get = dag.get(b1.root)
|
2019-12-19 14:13:35 +00:00
|
|
|
|
2020-05-05 09:18:44 +00:00
|
|
|
check:
|
|
|
|
b1Get.isSome()
|
|
|
|
b1Get.get().refs.root == b1Root
|
2020-07-09 09:29:32 +00:00
|
|
|
b1Add[].root == b1Get.get().refs.root
|
2020-07-30 19:18:17 +00:00
|
|
|
dag.heads.len == 1
|
|
|
|
dag.heads[0] == b1Add[]
|
2019-12-19 14:13:35 +00:00
|
|
|
|
2020-05-05 09:18:44 +00:00
|
|
|
let
|
2020-07-30 19:18:17 +00:00
|
|
|
b2Add = dag.addRawBlock(quarantine, b2, nil)
|
|
|
|
b2Get = dag.get(b2.root)
|
2019-12-19 14:13:35 +00:00
|
|
|
|
2020-05-05 09:18:44 +00:00
|
|
|
check:
|
|
|
|
b2Get.isSome()
|
2020-07-16 13:16:51 +00:00
|
|
|
b2Get.get().refs.root == b2.root
|
2020-07-09 09:29:32 +00:00
|
|
|
b2Add[].root == b2Get.get().refs.root
|
2020-07-30 19:18:17 +00:00
|
|
|
dag.heads.len == 1
|
|
|
|
dag.heads[0] == b2Add[]
|
2019-12-19 14:13:35 +00:00
|
|
|
|
2020-05-05 09:18:44 +00:00
|
|
|
# Skip one slot to get a gap
|
2020-05-19 15:46:29 +00:00
|
|
|
check:
|
|
|
|
process_slots(stateData.data, stateData.data.data.slot + 1)
|
2019-12-19 14:13:35 +00:00
|
|
|
|
2020-05-05 09:18:44 +00:00
|
|
|
let
|
2020-07-16 13:16:51 +00:00
|
|
|
b4 = addTestBlock(stateData.data, b2.root, cache)
|
2020-07-30 19:18:17 +00:00
|
|
|
b4Add = dag.addRawBlock(quarantine, b4, nil)
|
2019-12-19 14:13:35 +00:00
|
|
|
|
2020-05-05 09:18:44 +00:00
|
|
|
check:
|
2020-07-09 09:29:32 +00:00
|
|
|
b4Add[].parent == b2Add[]
|
2019-12-19 14:13:35 +00:00
|
|
|
|
2020-07-30 19:18:17 +00:00
|
|
|
dag.updateHead(b4Add[])
|
2020-04-21 06:43:39 +00:00
|
|
|
|
2020-05-05 09:18:44 +00:00
|
|
|
var blocks: array[3, BlockRef]
|
2020-04-21 06:43:39 +00:00
|
|
|
|
2020-05-05 09:18:44 +00:00
|
|
|
check:
|
2020-07-30 19:18:17 +00:00
|
|
|
dag.getBlockRange(Slot(0), 1, blocks.toOpenArray(0, 0)) == 0
|
|
|
|
blocks[0..<1] == [dag.tail]
|
2020-04-21 06:43:39 +00:00
|
|
|
|
2020-07-30 19:18:17 +00:00
|
|
|
dag.getBlockRange(Slot(0), 1, blocks.toOpenArray(0, 1)) == 0
|
|
|
|
blocks[0..<2] == [dag.tail, b1Add[]]
|
2020-04-21 06:43:39 +00:00
|
|
|
|
2020-07-30 19:18:17 +00:00
|
|
|
dag.getBlockRange(Slot(0), 2, blocks.toOpenArray(0, 1)) == 0
|
|
|
|
blocks[0..<2] == [dag.tail, b2Add[]]
|
2020-04-21 06:43:39 +00:00
|
|
|
|
2020-07-30 19:18:17 +00:00
|
|
|
dag.getBlockRange(Slot(0), 3, blocks.toOpenArray(0, 1)) == 1
|
|
|
|
blocks[0..<2] == [nil, dag.tail] # block 3 is missing!
|
2020-04-21 06:43:39 +00:00
|
|
|
|
2020-07-30 19:18:17 +00:00
|
|
|
dag.getBlockRange(Slot(2), 2, blocks.toOpenArray(0, 1)) == 0
|
2020-07-09 09:29:32 +00:00
|
|
|
blocks[0..<2] == [b2Add[], b4Add[]] # block 3 is missing!
|
2020-04-21 06:43:39 +00:00
|
|
|
|
2020-05-05 09:18:44 +00:00
|
|
|
# empty length
|
2020-07-30 19:18:17 +00:00
|
|
|
dag.getBlockRange(Slot(2), 2, blocks.toOpenArray(0, -1)) == 0
|
2020-04-21 06:43:39 +00:00
|
|
|
|
2020-05-05 09:18:44 +00:00
|
|
|
# No blocks in sight
|
2020-07-30 19:18:17 +00:00
|
|
|
dag.getBlockRange(Slot(5), 1, blocks.toOpenArray(0, 1)) == 2
|
2020-04-21 06:43:39 +00:00
|
|
|
|
2020-05-05 09:18:44 +00:00
|
|
|
# No blocks in sight either due to gaps
|
2020-07-30 19:18:17 +00:00
|
|
|
dag.getBlockRange(Slot(3), 2, blocks.toOpenArray(0, 1)) == 2
|
2020-05-05 09:18:44 +00:00
|
|
|
blocks[0..<2] == [BlockRef nil, nil] # block 3 is missing!
|
2020-04-21 06:43:39 +00:00
|
|
|
|
2020-05-05 09:18:44 +00:00
|
|
|
timedTest "Reverse order block add & get" & preset():
|
2020-07-30 19:18:17 +00:00
|
|
|
let missing = dag.addRawBlock(quarantine, b2, nil)
|
2020-07-09 09:29:32 +00:00
|
|
|
check: missing.error == MissingParent
|
2020-04-21 06:43:39 +00:00
|
|
|
|
2020-05-05 09:18:44 +00:00
|
|
|
check:
|
2020-07-30 19:18:17 +00:00
|
|
|
dag.get(b2.root).isNone() # Unresolved, shouldn't show up
|
|
|
|
FetchRecord(root: b1.root) in quarantine.checkMissing()
|
2020-04-21 06:43:39 +00:00
|
|
|
|
2020-07-30 19:18:17 +00:00
|
|
|
let status = dag.addRawBlock(quarantine, b1, nil)
|
2020-07-09 09:29:32 +00:00
|
|
|
|
|
|
|
check: status.isOk
|
2020-04-21 06:43:39 +00:00
|
|
|
|
2020-05-05 09:18:44 +00:00
|
|
|
let
|
2020-07-30 19:18:17 +00:00
|
|
|
b1Get = dag.get(b1.root)
|
|
|
|
b2Get = dag.get(b2.root)
|
2019-12-19 14:13:35 +00:00
|
|
|
|
2020-05-05 09:18:44 +00:00
|
|
|
check:
|
|
|
|
b1Get.isSome()
|
|
|
|
b2Get.isSome()
|
2019-12-19 14:13:35 +00:00
|
|
|
|
2020-05-05 09:18:44 +00:00
|
|
|
b2Get.get().refs.parent == b1Get.get().refs
|
2019-12-19 14:13:35 +00:00
|
|
|
|
2020-07-30 19:18:17 +00:00
|
|
|
dag.updateHead(b2Get.get().refs)
|
2019-12-19 14:13:35 +00:00
|
|
|
|
2020-05-05 09:18:44 +00:00
|
|
|
# The heads structure should have been updated to contain only the new
|
|
|
|
# b2 head
|
|
|
|
check:
|
2020-07-30 19:18:17 +00:00
|
|
|
dag.heads.mapIt(it) == @[b2Get.get().refs]
|
2019-12-19 14:13:35 +00:00
|
|
|
|
2020-05-05 09:18:44 +00:00
|
|
|
# check that init also reloads block graph
|
|
|
|
var
|
2020-07-31 14:49:06 +00:00
|
|
|
dag2 = init(ChainDAGRef, defaultRuntimePreset, db)
|
2019-12-19 14:13:35 +00:00
|
|
|
|
2020-05-05 09:18:44 +00:00
|
|
|
check:
|
|
|
|
# ensure we loaded the correct head state
|
2020-07-30 19:18:17 +00:00
|
|
|
dag2.head.root == b2Root
|
|
|
|
hash_tree_root(dag2.headState.data.data) == b2.message.state_root
|
|
|
|
dag2.get(b1Root).isSome()
|
|
|
|
dag2.get(b2Root).isSome()
|
|
|
|
dag2.heads.len == 1
|
|
|
|
dag2.heads[0].root == b2Root
|
2020-05-05 09:18:44 +00:00
|
|
|
|
2020-07-09 09:29:32 +00:00
|
|
|
timedTest "Adding the same block twice returns a Duplicate error" & preset():
|
2020-05-05 09:18:44 +00:00
|
|
|
let
|
2020-07-30 19:18:17 +00:00
|
|
|
b10 = dag.addRawBlock(quarantine, b1, nil)
|
|
|
|
b11 = dag.addRawBlock(quarantine, b1, nil)
|
2019-12-19 14:13:35 +00:00
|
|
|
|
2020-05-05 09:18:44 +00:00
|
|
|
check:
|
2020-07-09 09:29:32 +00:00
|
|
|
b11.error == Duplicate
|
|
|
|
not b10[].isNil
|
2019-12-19 14:13:35 +00:00
|
|
|
|
2020-05-05 09:18:44 +00:00
|
|
|
timedTest "updateHead updates head and headState" & preset():
|
|
|
|
let
|
2020-07-30 19:18:17 +00:00
|
|
|
b1Add = dag.addRawBlock(quarantine, b1, nil)
|
2020-05-05 09:18:44 +00:00
|
|
|
|
2020-07-30 19:18:17 +00:00
|
|
|
dag.updateHead(b1Add[])
|
2020-05-05 09:18:44 +00:00
|
|
|
|
|
|
|
check:
|
2020-07-30 19:18:17 +00:00
|
|
|
dag.head == b1Add[]
|
|
|
|
dag.headState.data.data.slot == b1Add[].slot
|
2020-05-05 09:18:44 +00:00
|
|
|
|
|
|
|
timedTest "updateStateData sanity" & preset():
|
|
|
|
let
|
2020-07-30 19:18:17 +00:00
|
|
|
b1Add = dag.addRawBlock(quarantine, b1, nil)
|
|
|
|
b2Add = dag.addRawBlock(quarantine, b2, nil)
|
2020-07-09 09:29:32 +00:00
|
|
|
bs1 = BlockSlot(blck: b1Add[], slot: b1.message.slot)
|
|
|
|
bs1_3 = b1Add[].atSlot(3.Slot)
|
|
|
|
bs2_3 = b2Add[].atSlot(3.Slot)
|
2020-05-05 09:18:44 +00:00
|
|
|
|
2020-07-30 19:18:17 +00:00
|
|
|
var tmpState = assignClone(dag.headState)
|
2020-05-05 09:18:44 +00:00
|
|
|
|
|
|
|
# move to specific block
|
2020-07-30 19:18:17 +00:00
|
|
|
dag.updateStateData(tmpState[], bs1)
|
2020-05-05 09:18:44 +00:00
|
|
|
|
|
|
|
check:
|
2020-07-09 09:29:32 +00:00
|
|
|
tmpState.blck == b1Add[]
|
2020-05-05 09:18:44 +00:00
|
|
|
tmpState.data.data.slot == bs1.slot
|
|
|
|
|
|
|
|
# Skip slots
|
2020-07-30 19:18:17 +00:00
|
|
|
dag.updateStateData(tmpState[], bs1_3) # skip slots
|
2020-05-05 09:18:44 +00:00
|
|
|
|
|
|
|
check:
|
2020-07-09 09:29:32 +00:00
|
|
|
tmpState.blck == b1Add[]
|
2020-05-05 09:18:44 +00:00
|
|
|
tmpState.data.data.slot == bs1_3.slot
|
|
|
|
|
|
|
|
# Move back slots, but not blocks
|
2020-07-30 19:18:17 +00:00
|
|
|
dag.updateStateData(tmpState[], bs1_3.parent())
|
2020-05-05 09:18:44 +00:00
|
|
|
check:
|
2020-07-09 09:29:32 +00:00
|
|
|
tmpState.blck == b1Add[]
|
2020-05-05 09:18:44 +00:00
|
|
|
tmpState.data.data.slot == bs1_3.parent().slot
|
|
|
|
|
|
|
|
# Move to different block and slot
|
2020-07-30 19:18:17 +00:00
|
|
|
dag.updateStateData(tmpState[], bs2_3)
|
2020-05-05 09:18:44 +00:00
|
|
|
check:
|
2020-07-09 09:29:32 +00:00
|
|
|
tmpState.blck == b2Add[]
|
2020-05-05 09:18:44 +00:00
|
|
|
tmpState.data.data.slot == bs2_3.slot
|
|
|
|
|
|
|
|
# Move back slot and block
|
2020-07-30 19:18:17 +00:00
|
|
|
dag.updateStateData(tmpState[], bs1)
|
2020-05-05 09:18:44 +00:00
|
|
|
check:
|
2020-07-09 09:29:32 +00:00
|
|
|
tmpState.blck == b1Add[]
|
2020-05-05 09:18:44 +00:00
|
|
|
tmpState.data.data.slot == bs1.slot
|
|
|
|
|
|
|
|
# Move back to genesis
|
2020-07-30 19:18:17 +00:00
|
|
|
dag.updateStateData(tmpState[], bs1.parent())
|
2020-05-05 09:18:44 +00:00
|
|
|
check:
|
2020-07-09 09:29:32 +00:00
|
|
|
tmpState.blck == b1Add[].parent
|
2020-05-05 09:18:44 +00:00
|
|
|
tmpState.data.data.slot == bs1.parent.slot
|
|
|
|
|
2020-07-30 19:18:17 +00:00
|
|
|
suiteReport "chain DAG finalization tests" & preset():
|
2020-07-01 17:00:14 +00:00
|
|
|
setup:
|
|
|
|
var
|
|
|
|
db = makeTestDB(SLOTS_PER_EPOCH)
|
2020-07-31 14:49:06 +00:00
|
|
|
dag = init(ChainDAGRef, defaultRuntimePreset, db)
|
|
|
|
quarantine = QuarantineRef()
|
2020-07-15 10:44:18 +00:00
|
|
|
cache = StateCache()
|
2020-02-05 11:41:46 +00:00
|
|
|
|
2020-07-01 17:00:14 +00:00
|
|
|
timedTest "prune heads on finalization" & preset():
|
|
|
|
# Create a fork that will not be taken
|
|
|
|
var
|
2020-07-30 19:18:17 +00:00
|
|
|
blck = makeTestBlock(dag.headState.data, dag.head.root, cache)
|
|
|
|
tmpState = assignClone(dag.headState.data)
|
2020-07-01 17:00:14 +00:00
|
|
|
check:
|
|
|
|
process_slots(
|
|
|
|
tmpState[], tmpState.data.slot + (5 * SLOTS_PER_EPOCH).uint64)
|
|
|
|
|
2020-07-30 19:18:17 +00:00
|
|
|
let lateBlock = makeTestBlock(tmpState[], dag.head.root, cache)
|
2020-07-09 09:29:32 +00:00
|
|
|
block:
|
2020-07-30 19:18:17 +00:00
|
|
|
let status = dag.addRawBlock(quarantine, blck, nil)
|
2020-07-09 09:29:32 +00:00
|
|
|
check: status.isOk()
|
|
|
|
|
2020-07-01 17:00:14 +00:00
|
|
|
for i in 0 ..< (SLOTS_PER_EPOCH * 6):
|
|
|
|
if i == 1:
|
|
|
|
# There are 2 heads now because of the fork at slot 1
|
|
|
|
check:
|
2020-07-30 19:18:17 +00:00
|
|
|
dag.heads.len == 2
|
2020-07-09 09:29:32 +00:00
|
|
|
|
|
|
|
blck = makeTestBlock(
|
2020-07-30 19:18:17 +00:00
|
|
|
dag.headState.data, dag.head.root, cache,
|
2020-07-09 09:29:32 +00:00
|
|
|
attestations = makeFullAttestations(
|
2020-07-30 19:18:17 +00:00
|
|
|
dag.headState.data.data, dag.head.root,
|
|
|
|
dag.headState.data.data.slot, cache, {}))
|
|
|
|
let added = dag.addRawBlock(quarantine, blck, nil)
|
2020-07-09 09:29:32 +00:00
|
|
|
check: added.isOk()
|
2020-07-30 19:18:17 +00:00
|
|
|
dag.updateHead(added[])
|
2020-07-01 17:00:14 +00:00
|
|
|
|
|
|
|
check:
|
2020-07-30 19:18:17 +00:00
|
|
|
dag.heads.len() == 1
|
2020-07-01 17:00:14 +00:00
|
|
|
|
2020-07-09 09:29:32 +00:00
|
|
|
block:
|
2020-07-01 17:00:14 +00:00
|
|
|
# The late block is a block whose parent was finalized long ago and thus
|
|
|
|
# is no longer a viable head candidate
|
2020-07-30 19:18:17 +00:00
|
|
|
let status = dag.addRawBlock(quarantine, lateBlock, nil)
|
2020-07-09 09:29:32 +00:00
|
|
|
check: status.error == Unviable
|
2020-07-01 17:00:14 +00:00
|
|
|
|
|
|
|
let
|
2020-07-31 14:49:06 +00:00
|
|
|
dag2 = init(ChainDAGRef, defaultRuntimePreset, db)
|
2020-07-01 17:00:14 +00:00
|
|
|
|
|
|
|
# check that the state reloaded from database resembles what we had before
|
|
|
|
check:
|
2020-07-30 19:18:17 +00:00
|
|
|
dag2.tail.root == dag.tail.root
|
|
|
|
dag2.head.root == dag.head.root
|
|
|
|
dag2.finalizedHead.blck.root == dag.finalizedHead.blck.root
|
|
|
|
dag2.finalizedHead.slot == dag.finalizedHead.slot
|
|
|
|
hash_tree_root(dag2.headState.data.data) ==
|
|
|
|
hash_tree_root(dag.headState.data.data)
|
2020-06-25 09:36:03 +00:00
|
|
|
|
2020-07-09 09:29:32 +00:00
|
|
|
# timedTest "init with gaps" & preset():
|
2020-07-15 10:44:18 +00:00
|
|
|
# var cache = StateCache()
|
2020-07-09 09:29:32 +00:00
|
|
|
# for i in 0 ..< (SLOTS_PER_EPOCH * 6 - 2):
|
|
|
|
# var
|
|
|
|
# blck = makeTestBlock(
|
2020-07-30 19:18:17 +00:00
|
|
|
# dag.headState.data, pool.head.blck.root, cache,
|
2020-07-09 09:29:32 +00:00
|
|
|
# attestations = makeFullAttestations(
|
2020-07-30 19:18:17 +00:00
|
|
|
# dag.headState.data.data, pool.head.blck.root,
|
|
|
|
# dag.headState.data.data.slot, cache, {}))
|
2020-07-09 09:29:32 +00:00
|
|
|
|
2020-07-30 19:18:17 +00:00
|
|
|
# let added = dag.addRawBlock(quarantine, hash_tree_root(blck.message), blck) do (validBlock: BlockRef):
|
2020-07-09 09:29:32 +00:00
|
|
|
# discard
|
|
|
|
# check: added.isOk()
|
2020-07-30 19:18:17 +00:00
|
|
|
# dag.updateHead(added[])
|
2020-07-09 09:29:32 +00:00
|
|
|
|
|
|
|
# # Advance past epoch so that the epoch transition is gapped
|
|
|
|
# check:
|
|
|
|
# process_slots(
|
2020-07-30 19:18:17 +00:00
|
|
|
# dag.headState.data, Slot(SLOTS_PER_EPOCH * 6 + 2) )
|
2020-07-09 09:29:32 +00:00
|
|
|
|
|
|
|
# var blck = makeTestBlock(
|
2020-07-30 19:18:17 +00:00
|
|
|
# dag.headState.data, pool.head.blck.root, cache,
|
2020-07-09 09:29:32 +00:00
|
|
|
# attestations = makeFullAttestations(
|
2020-07-30 19:18:17 +00:00
|
|
|
# dag.headState.data.data, pool.head.blck.root,
|
|
|
|
# dag.headState.data.data.slot, cache, {}))
|
2020-07-09 09:29:32 +00:00
|
|
|
|
2020-07-30 19:18:17 +00:00
|
|
|
# let added = dag.addRawBlock(quarantine, hash_tree_root(blck.message), blck) do (validBlock: BlockRef):
|
2020-07-09 09:29:32 +00:00
|
|
|
# discard
|
|
|
|
# check: added.isOk()
|
2020-07-30 19:18:17 +00:00
|
|
|
# dag.updateHead(added[])
|
2020-07-09 09:29:32 +00:00
|
|
|
|
|
|
|
# let
|
|
|
|
# pool2 = BlockPool.init(db)
|
2020-07-30 19:18:17 +00:00
|
|
|
|
2020-07-09 09:29:32 +00:00
|
|
|
# # check that the state reloaded from database resembles what we had before
|
|
|
|
# check:
|
2020-07-30 19:18:17 +00:00
|
|
|
# pool2.dag.tail.root == dag.tail.root
|
|
|
|
# pool2.dag.head.blck.root == dag.head.blck.root
|
|
|
|
# pool2.dag.finalizedHead.blck.root == dag.finalizedHead.blck.root
|
|
|
|
# pool2.dag.finalizedHead.slot == dag.finalizedHead.slot
|
2020-07-09 09:29:32 +00:00
|
|
|
# hash_tree_root(pool2.headState.data.data) ==
|
2020-07-30 19:18:17 +00:00
|
|
|
# hash_tree_root(dag.headState.data.data)
|