2019-02-28 21:21:29 +00:00
|
|
|
# beacon_chain
|
2020-04-10 14:06:24 +00:00
|
|
|
# Copyright (c) 2018-2020 Status Research & Development GmbH
|
2019-02-28 21:21:29 +00:00
|
|
|
# Licensed and distributed under either of
|
2019-11-25 15:30:02 +00:00
|
|
|
# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT).
|
|
|
|
# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0).
|
2019-02-28 21:21:29 +00:00
|
|
|
# at your option. This file may not be copied, modified, or distributed except according to those terms.
|
|
|
|
|
2019-11-14 10:47:55 +00:00
|
|
|
{.used.}
|
|
|
|
|
2019-02-28 21:21:29 +00:00
|
|
|
import
|
2020-05-20 04:57:39 +00:00
|
|
|
options, sequtils, unittest,
|
2020-05-05 09:18:44 +00:00
|
|
|
./testutil, ./testblockutil,
|
2020-08-11 19:39:53 +00:00
|
|
|
../beacon_chain/spec/[datatypes, digest, helpers, state_transition, presets],
|
2020-07-30 19:18:17 +00:00
|
|
|
../beacon_chain/[beacon_node_types, ssz],
|
2020-07-31 14:49:06 +00:00
|
|
|
../beacon_chain/block_pools/[chain_dag, quarantine, clearance]
|
2019-02-28 21:21:29 +00:00
|
|
|
|
2020-06-12 16:43:20 +00:00
|
|
|
when isMainModule:
|
|
|
|
import chronicles # or some random compile error happens...
|
|
|
|
|
2020-08-26 15:23:34 +00:00
|
|
|
template wrappedTimedTest(name: string, body: untyped) =
|
|
|
|
# `check` macro takes a copy of whatever it's checking, on the stack!
|
|
|
|
# This leads to stack overflow
|
|
|
|
# We can mitigate that by wrapping checks in proc
|
|
|
|
block: # Symbol namespacing
|
|
|
|
proc wrappedTest() =
|
|
|
|
timedTest name:
|
|
|
|
body
|
|
|
|
wrappedTest()
|
|
|
|
|
2020-03-10 04:00:19 +00:00
|
|
|
suiteReport "BlockRef and helpers" & preset():
|
2020-08-26 15:23:34 +00:00
|
|
|
wrappedTimedTest "isAncestorOf sanity" & preset():
|
2019-12-23 15:34:09 +00:00
|
|
|
let
|
|
|
|
s0 = BlockRef(slot: Slot(0))
|
|
|
|
s1 = BlockRef(slot: Slot(1), parent: s0)
|
|
|
|
s2 = BlockRef(slot: Slot(2), parent: s1)
|
|
|
|
|
|
|
|
check:
|
|
|
|
s0.isAncestorOf(s0)
|
|
|
|
s0.isAncestorOf(s1)
|
|
|
|
s0.isAncestorOf(s2)
|
|
|
|
s1.isAncestorOf(s1)
|
|
|
|
s1.isAncestorOf(s2)
|
|
|
|
|
|
|
|
not s2.isAncestorOf(s0)
|
|
|
|
not s2.isAncestorOf(s1)
|
|
|
|
not s1.isAncestorOf(s0)
|
|
|
|
|
2020-08-26 15:23:34 +00:00
|
|
|
wrappedTimedTest "get_ancestor sanity" & preset():
|
2019-12-23 15:34:09 +00:00
|
|
|
let
|
|
|
|
s0 = BlockRef(slot: Slot(0))
|
|
|
|
s1 = BlockRef(slot: Slot(1), parent: s0)
|
|
|
|
s2 = BlockRef(slot: Slot(2), parent: s1)
|
|
|
|
s4 = BlockRef(slot: Slot(4), parent: s2)
|
|
|
|
|
|
|
|
check:
|
2020-08-03 19:47:42 +00:00
|
|
|
s0.get_ancestor(Slot(0)) == s0
|
|
|
|
s0.get_ancestor(Slot(1)) == s0
|
2019-12-23 15:34:09 +00:00
|
|
|
|
2020-08-03 19:47:42 +00:00
|
|
|
s1.get_ancestor(Slot(0)) == s0
|
|
|
|
s1.get_ancestor(Slot(1)) == s1
|
2019-12-23 15:34:09 +00:00
|
|
|
|
2020-08-03 19:47:42 +00:00
|
|
|
s4.get_ancestor(Slot(0)) == s0
|
|
|
|
s4.get_ancestor(Slot(1)) == s1
|
|
|
|
s4.get_ancestor(Slot(2)) == s2
|
|
|
|
s4.get_ancestor(Slot(3)) == s2
|
|
|
|
s4.get_ancestor(Slot(4)) == s4
|
2019-12-23 15:34:09 +00:00
|
|
|
|
2020-08-26 15:23:34 +00:00
|
|
|
wrappedTimedTest "epochAncestor sanity" & preset():
|
2020-08-18 20:29:33 +00:00
|
|
|
let
|
|
|
|
s0 = BlockRef(slot: Slot(0))
|
|
|
|
var cur = s0
|
|
|
|
for i in 1..SLOTS_PER_EPOCH * 2:
|
|
|
|
cur = BlockRef(slot: Slot(i), parent: cur)
|
|
|
|
|
|
|
|
let ancestor = cur.epochAncestor(cur.slot.epoch)
|
|
|
|
|
|
|
|
check:
|
|
|
|
ancestor.slot.epoch == cur.slot.epoch
|
|
|
|
ancestor.blck != cur # should have selected a parent
|
|
|
|
|
|
|
|
ancestor.blck.epochAncestor(cur.slot.epoch) == ancestor
|
|
|
|
ancestor.blck.epochAncestor(ancestor.blck.slot.epoch) != ancestor
|
|
|
|
|
2020-03-10 04:00:19 +00:00
|
|
|
suiteReport "BlockSlot and helpers" & preset():
|
2020-08-26 15:23:34 +00:00
|
|
|
wrappedTimedTest "atSlot sanity" & preset():
|
2019-12-23 15:34:09 +00:00
|
|
|
let
|
|
|
|
s0 = BlockRef(slot: Slot(0))
|
|
|
|
s1 = BlockRef(slot: Slot(1), parent: s0)
|
|
|
|
s2 = BlockRef(slot: Slot(2), parent: s1)
|
|
|
|
s4 = BlockRef(slot: Slot(4), parent: s2)
|
|
|
|
|
|
|
|
check:
|
|
|
|
s0.atSlot(Slot(0)).blck == s0
|
|
|
|
s0.atSlot(Slot(0)) == s1.atSlot(Slot(0))
|
|
|
|
s1.atSlot(Slot(1)).blck == s1
|
|
|
|
|
|
|
|
s4.atSlot(Slot(0)).blck == s0
|
|
|
|
|
2020-08-26 15:23:34 +00:00
|
|
|
wrappedTimedTest "parent sanity" & preset():
|
2019-12-23 15:34:09 +00:00
|
|
|
let
|
|
|
|
s0 = BlockRef(slot: Slot(0))
|
|
|
|
s00 = BlockSlot(blck: s0, slot: Slot(0))
|
|
|
|
s01 = BlockSlot(blck: s0, slot: Slot(1))
|
|
|
|
s2 = BlockRef(slot: Slot(2), parent: s0)
|
|
|
|
s22 = BlockSlot(blck: s2, slot: Slot(2))
|
|
|
|
s24 = BlockSlot(blck: s2, slot: Slot(4))
|
|
|
|
|
|
|
|
check:
|
|
|
|
s00.parent == BlockSlot(blck: nil, slot: Slot(0))
|
|
|
|
s01.parent == s00
|
|
|
|
s22.parent == s01
|
|
|
|
s24.parent == BlockSlot(blck: s2, slot: Slot(3))
|
|
|
|
s24.parent.parent == s22
|
|
|
|
|
2020-05-05 09:18:44 +00:00
|
|
|
suiteReport "Block pool processing" & preset():
|
|
|
|
setup:
|
|
|
|
var
|
|
|
|
db = makeTestDB(SLOTS_PER_EPOCH)
|
2020-07-31 14:49:06 +00:00
|
|
|
dag = init(ChainDAGRef, defaultRuntimePreset, db)
|
|
|
|
quarantine = QuarantineRef()
|
2020-08-27 07:34:12 +00:00
|
|
|
stateData = newClone(dag.headState)
|
2020-07-15 10:44:18 +00:00
|
|
|
cache = StateCache()
|
2020-07-30 19:18:17 +00:00
|
|
|
b1 = addTestBlock(stateData.data, dag.tail.root, cache)
|
2020-05-05 09:18:44 +00:00
|
|
|
b1Root = hash_tree_root(b1.message)
|
2020-06-04 12:03:16 +00:00
|
|
|
b2 = addTestBlock(stateData.data, b1Root, cache)
|
2020-05-05 09:18:44 +00:00
|
|
|
b2Root {.used.} = hash_tree_root(b2.message)
|
2020-08-26 15:23:34 +00:00
|
|
|
wrappedTimedTest "getRef returns nil for missing blocks":
|
2020-05-05 09:18:44 +00:00
|
|
|
check:
|
2020-07-30 19:18:17 +00:00
|
|
|
dag.getRef(default Eth2Digest) == nil
|
2020-04-10 14:06:24 +00:00
|
|
|
|
2020-08-26 15:23:34 +00:00
|
|
|
wrappedTimedTest "loadTailState gets genesis block on first load" & preset():
|
2020-05-05 09:18:44 +00:00
|
|
|
let
|
2020-07-30 19:18:17 +00:00
|
|
|
b0 = dag.get(dag.tail.root)
|
2019-12-19 14:13:35 +00:00
|
|
|
|
2020-05-05 09:18:44 +00:00
|
|
|
check:
|
|
|
|
b0.isSome()
|
2020-05-04 21:07:18 +00:00
|
|
|
|
2020-08-26 15:23:34 +00:00
|
|
|
wrappedTimedTest "Simple block add&get" & preset():
|
2020-05-05 09:18:44 +00:00
|
|
|
let
|
2020-07-30 19:18:17 +00:00
|
|
|
b1Add = dag.addRawBlock(quarantine, b1, nil)
|
|
|
|
b1Get = dag.get(b1.root)
|
2019-12-19 14:13:35 +00:00
|
|
|
|
2020-05-05 09:18:44 +00:00
|
|
|
check:
|
|
|
|
b1Get.isSome()
|
|
|
|
b1Get.get().refs.root == b1Root
|
2020-07-09 09:29:32 +00:00
|
|
|
b1Add[].root == b1Get.get().refs.root
|
2020-07-30 19:18:17 +00:00
|
|
|
dag.heads.len == 1
|
|
|
|
dag.heads[0] == b1Add[]
|
2019-12-19 14:13:35 +00:00
|
|
|
|
2020-05-05 09:18:44 +00:00
|
|
|
let
|
2020-07-30 19:18:17 +00:00
|
|
|
b2Add = dag.addRawBlock(quarantine, b2, nil)
|
|
|
|
b2Get = dag.get(b2.root)
|
2019-12-19 14:13:35 +00:00
|
|
|
|
2020-05-05 09:18:44 +00:00
|
|
|
check:
|
|
|
|
b2Get.isSome()
|
2020-07-16 13:16:51 +00:00
|
|
|
b2Get.get().refs.root == b2.root
|
2020-07-09 09:29:32 +00:00
|
|
|
b2Add[].root == b2Get.get().refs.root
|
2020-07-30 19:18:17 +00:00
|
|
|
dag.heads.len == 1
|
|
|
|
dag.heads[0] == b2Add[]
|
2020-08-18 20:29:33 +00:00
|
|
|
not b1Add[].findEpochRef(b1Add[].slot.epoch).isNil
|
|
|
|
b1Add[].findEpochRef(b1Add[].slot.epoch) ==
|
|
|
|
b2Add[].findEpochRef(b2Add[].slot.epoch)
|
|
|
|
b1Add[].findEpochRef(b1Add[].slot.epoch + 1).isNil
|
2019-12-19 14:13:35 +00:00
|
|
|
|
2020-05-05 09:18:44 +00:00
|
|
|
# Skip one slot to get a gap
|
2020-05-19 15:46:29 +00:00
|
|
|
check:
|
|
|
|
process_slots(stateData.data, stateData.data.data.slot + 1)
|
2019-12-19 14:13:35 +00:00
|
|
|
|
2020-05-05 09:18:44 +00:00
|
|
|
let
|
2020-07-16 13:16:51 +00:00
|
|
|
b4 = addTestBlock(stateData.data, b2.root, cache)
|
2020-07-30 19:18:17 +00:00
|
|
|
b4Add = dag.addRawBlock(quarantine, b4, nil)
|
2019-12-19 14:13:35 +00:00
|
|
|
|
2020-05-05 09:18:44 +00:00
|
|
|
check:
|
2020-07-09 09:29:32 +00:00
|
|
|
b4Add[].parent == b2Add[]
|
2019-12-19 14:13:35 +00:00
|
|
|
|
2020-08-31 09:00:38 +00:00
|
|
|
dag.updateHead(b4Add[], quarantine)
|
2020-04-21 06:43:39 +00:00
|
|
|
|
2020-05-05 09:18:44 +00:00
|
|
|
var blocks: array[3, BlockRef]
|
2020-04-21 06:43:39 +00:00
|
|
|
|
2020-05-05 09:18:44 +00:00
|
|
|
check:
|
2020-07-30 19:18:17 +00:00
|
|
|
dag.getBlockRange(Slot(0), 1, blocks.toOpenArray(0, 0)) == 0
|
|
|
|
blocks[0..<1] == [dag.tail]
|
2020-04-21 06:43:39 +00:00
|
|
|
|
2020-07-30 19:18:17 +00:00
|
|
|
dag.getBlockRange(Slot(0), 1, blocks.toOpenArray(0, 1)) == 0
|
|
|
|
blocks[0..<2] == [dag.tail, b1Add[]]
|
2020-04-21 06:43:39 +00:00
|
|
|
|
2020-07-30 19:18:17 +00:00
|
|
|
dag.getBlockRange(Slot(0), 2, blocks.toOpenArray(0, 1)) == 0
|
|
|
|
blocks[0..<2] == [dag.tail, b2Add[]]
|
2020-04-21 06:43:39 +00:00
|
|
|
|
2020-07-30 19:18:17 +00:00
|
|
|
dag.getBlockRange(Slot(0), 3, blocks.toOpenArray(0, 1)) == 1
|
2020-08-05 23:22:12 +00:00
|
|
|
blocks[1..<2] == [dag.tail] # block 3 is missing!
|
2020-04-21 06:43:39 +00:00
|
|
|
|
2020-07-30 19:18:17 +00:00
|
|
|
dag.getBlockRange(Slot(2), 2, blocks.toOpenArray(0, 1)) == 0
|
2020-07-09 09:29:32 +00:00
|
|
|
blocks[0..<2] == [b2Add[], b4Add[]] # block 3 is missing!
|
2020-04-21 06:43:39 +00:00
|
|
|
|
2020-05-05 09:18:44 +00:00
|
|
|
# empty length
|
2020-07-30 19:18:17 +00:00
|
|
|
dag.getBlockRange(Slot(2), 2, blocks.toOpenArray(0, -1)) == 0
|
2020-04-21 06:43:39 +00:00
|
|
|
|
2020-05-05 09:18:44 +00:00
|
|
|
# No blocks in sight
|
2020-07-30 19:18:17 +00:00
|
|
|
dag.getBlockRange(Slot(5), 1, blocks.toOpenArray(0, 1)) == 2
|
2020-04-21 06:43:39 +00:00
|
|
|
|
2020-05-05 09:18:44 +00:00
|
|
|
# No blocks in sight either due to gaps
|
2020-07-30 19:18:17 +00:00
|
|
|
dag.getBlockRange(Slot(3), 2, blocks.toOpenArray(0, 1)) == 2
|
2020-08-05 23:22:12 +00:00
|
|
|
blocks[2..<2].len == 0
|
2020-04-21 06:43:39 +00:00
|
|
|
|
2020-08-26 15:23:34 +00:00
|
|
|
wrappedTimedTest "Reverse order block add & get" & preset():
|
2020-07-30 19:18:17 +00:00
|
|
|
let missing = dag.addRawBlock(quarantine, b2, nil)
|
2020-07-09 09:29:32 +00:00
|
|
|
check: missing.error == MissingParent
|
2020-04-21 06:43:39 +00:00
|
|
|
|
2020-05-05 09:18:44 +00:00
|
|
|
check:
|
2020-07-30 19:18:17 +00:00
|
|
|
dag.get(b2.root).isNone() # Unresolved, shouldn't show up
|
|
|
|
FetchRecord(root: b1.root) in quarantine.checkMissing()
|
2020-04-21 06:43:39 +00:00
|
|
|
|
2020-07-30 19:18:17 +00:00
|
|
|
let status = dag.addRawBlock(quarantine, b1, nil)
|
2020-07-09 09:29:32 +00:00
|
|
|
|
|
|
|
check: status.isOk
|
2020-04-21 06:43:39 +00:00
|
|
|
|
2020-05-05 09:18:44 +00:00
|
|
|
let
|
2020-07-30 19:18:17 +00:00
|
|
|
b1Get = dag.get(b1.root)
|
|
|
|
b2Get = dag.get(b2.root)
|
2019-12-19 14:13:35 +00:00
|
|
|
|
2020-05-05 09:18:44 +00:00
|
|
|
check:
|
|
|
|
b1Get.isSome()
|
|
|
|
b2Get.isSome()
|
2019-12-19 14:13:35 +00:00
|
|
|
|
2020-05-05 09:18:44 +00:00
|
|
|
b2Get.get().refs.parent == b1Get.get().refs
|
2019-12-19 14:13:35 +00:00
|
|
|
|
2020-08-31 09:00:38 +00:00
|
|
|
dag.updateHead(b2Get.get().refs, quarantine)
|
2019-12-19 14:13:35 +00:00
|
|
|
|
2020-05-05 09:18:44 +00:00
|
|
|
# The heads structure should have been updated to contain only the new
|
|
|
|
# b2 head
|
|
|
|
check:
|
2020-07-30 19:18:17 +00:00
|
|
|
dag.heads.mapIt(it) == @[b2Get.get().refs]
|
2019-12-19 14:13:35 +00:00
|
|
|
|
2020-05-05 09:18:44 +00:00
|
|
|
# check that init also reloads block graph
|
|
|
|
var
|
2020-07-31 14:49:06 +00:00
|
|
|
dag2 = init(ChainDAGRef, defaultRuntimePreset, db)
|
2019-12-19 14:13:35 +00:00
|
|
|
|
2020-05-05 09:18:44 +00:00
|
|
|
check:
|
|
|
|
# ensure we loaded the correct head state
|
2020-07-30 19:18:17 +00:00
|
|
|
dag2.head.root == b2Root
|
|
|
|
hash_tree_root(dag2.headState.data.data) == b2.message.state_root
|
|
|
|
dag2.get(b1Root).isSome()
|
|
|
|
dag2.get(b2Root).isSome()
|
|
|
|
dag2.heads.len == 1
|
|
|
|
dag2.heads[0].root == b2Root
|
2020-05-05 09:18:44 +00:00
|
|
|
|
2020-08-26 15:23:34 +00:00
|
|
|
wrappedTimedTest "Adding the same block twice returns a Duplicate error" & preset():
|
2020-05-05 09:18:44 +00:00
|
|
|
let
|
2020-07-30 19:18:17 +00:00
|
|
|
b10 = dag.addRawBlock(quarantine, b1, nil)
|
|
|
|
b11 = dag.addRawBlock(quarantine, b1, nil)
|
2019-12-19 14:13:35 +00:00
|
|
|
|
2020-05-05 09:18:44 +00:00
|
|
|
check:
|
2020-07-09 09:29:32 +00:00
|
|
|
b11.error == Duplicate
|
|
|
|
not b10[].isNil
|
2019-12-19 14:13:35 +00:00
|
|
|
|
2020-08-26 15:23:34 +00:00
|
|
|
wrappedTimedTest "updateHead updates head and headState" & preset():
|
2020-05-05 09:18:44 +00:00
|
|
|
let
|
2020-07-30 19:18:17 +00:00
|
|
|
b1Add = dag.addRawBlock(quarantine, b1, nil)
|
2020-05-05 09:18:44 +00:00
|
|
|
|
2020-08-31 09:00:38 +00:00
|
|
|
dag.updateHead(b1Add[], quarantine)
|
2020-05-05 09:18:44 +00:00
|
|
|
|
|
|
|
check:
|
2020-07-30 19:18:17 +00:00
|
|
|
dag.head == b1Add[]
|
|
|
|
dag.headState.data.data.slot == b1Add[].slot
|
2020-05-05 09:18:44 +00:00
|
|
|
|
2020-08-26 15:23:34 +00:00
|
|
|
wrappedTimedTest "updateStateData sanity" & preset():
|
2020-05-05 09:18:44 +00:00
|
|
|
let
|
2020-07-30 19:18:17 +00:00
|
|
|
b1Add = dag.addRawBlock(quarantine, b1, nil)
|
|
|
|
b2Add = dag.addRawBlock(quarantine, b2, nil)
|
2020-07-09 09:29:32 +00:00
|
|
|
bs1 = BlockSlot(blck: b1Add[], slot: b1.message.slot)
|
|
|
|
bs1_3 = b1Add[].atSlot(3.Slot)
|
|
|
|
bs2_3 = b2Add[].atSlot(3.Slot)
|
2020-05-05 09:18:44 +00:00
|
|
|
|
2020-07-30 19:18:17 +00:00
|
|
|
var tmpState = assignClone(dag.headState)
|
2020-05-05 09:18:44 +00:00
|
|
|
|
|
|
|
# move to specific block
|
2020-08-18 20:29:33 +00:00
|
|
|
var cache = StateCache()
|
|
|
|
dag.updateStateData(tmpState[], bs1, cache)
|
2020-05-05 09:18:44 +00:00
|
|
|
|
|
|
|
check:
|
2020-07-09 09:29:32 +00:00
|
|
|
tmpState.blck == b1Add[]
|
2020-05-05 09:18:44 +00:00
|
|
|
tmpState.data.data.slot == bs1.slot
|
|
|
|
|
|
|
|
# Skip slots
|
2020-08-18 20:29:33 +00:00
|
|
|
dag.updateStateData(tmpState[], bs1_3, cache) # skip slots
|
2020-05-05 09:18:44 +00:00
|
|
|
|
|
|
|
check:
|
2020-07-09 09:29:32 +00:00
|
|
|
tmpState.blck == b1Add[]
|
2020-05-05 09:18:44 +00:00
|
|
|
tmpState.data.data.slot == bs1_3.slot
|
|
|
|
|
|
|
|
# Move back slots, but not blocks
|
2020-08-18 20:29:33 +00:00
|
|
|
dag.updateStateData(tmpState[], bs1_3.parent(), cache)
|
2020-05-05 09:18:44 +00:00
|
|
|
check:
|
2020-07-09 09:29:32 +00:00
|
|
|
tmpState.blck == b1Add[]
|
2020-05-05 09:18:44 +00:00
|
|
|
tmpState.data.data.slot == bs1_3.parent().slot
|
|
|
|
|
|
|
|
# Move to different block and slot
|
2020-08-18 20:29:33 +00:00
|
|
|
dag.updateStateData(tmpState[], bs2_3, cache)
|
2020-05-05 09:18:44 +00:00
|
|
|
check:
|
2020-07-09 09:29:32 +00:00
|
|
|
tmpState.blck == b2Add[]
|
2020-05-05 09:18:44 +00:00
|
|
|
tmpState.data.data.slot == bs2_3.slot
|
|
|
|
|
|
|
|
# Move back slot and block
|
2020-08-18 20:29:33 +00:00
|
|
|
dag.updateStateData(tmpState[], bs1, cache)
|
2020-05-05 09:18:44 +00:00
|
|
|
check:
|
2020-07-09 09:29:32 +00:00
|
|
|
tmpState.blck == b1Add[]
|
2020-05-05 09:18:44 +00:00
|
|
|
tmpState.data.data.slot == bs1.slot
|
|
|
|
|
|
|
|
# Move back to genesis
|
2020-08-18 20:29:33 +00:00
|
|
|
dag.updateStateData(tmpState[], bs1.parent(), cache)
|
2020-05-05 09:18:44 +00:00
|
|
|
check:
|
2020-07-09 09:29:32 +00:00
|
|
|
tmpState.blck == b1Add[].parent
|
2020-05-05 09:18:44 +00:00
|
|
|
tmpState.data.data.slot == bs1.parent.slot
|
|
|
|
|
2020-07-30 19:18:17 +00:00
|
|
|
suiteReport "chain DAG finalization tests" & preset():
|
2020-07-01 17:00:14 +00:00
|
|
|
setup:
|
|
|
|
var
|
|
|
|
db = makeTestDB(SLOTS_PER_EPOCH)
|
2020-07-31 14:49:06 +00:00
|
|
|
dag = init(ChainDAGRef, defaultRuntimePreset, db)
|
|
|
|
quarantine = QuarantineRef()
|
2020-07-15 10:44:18 +00:00
|
|
|
cache = StateCache()
|
2020-02-05 11:41:46 +00:00
|
|
|
|
2020-08-26 15:23:34 +00:00
|
|
|
wrappedTimedTest "prune heads on finalization" & preset():
|
2020-07-01 17:00:14 +00:00
|
|
|
# Create a fork that will not be taken
|
|
|
|
var
|
2020-07-30 19:18:17 +00:00
|
|
|
blck = makeTestBlock(dag.headState.data, dag.head.root, cache)
|
|
|
|
tmpState = assignClone(dag.headState.data)
|
2020-07-01 17:00:14 +00:00
|
|
|
check:
|
|
|
|
process_slots(
|
|
|
|
tmpState[], tmpState.data.slot + (5 * SLOTS_PER_EPOCH).uint64)
|
|
|
|
|
2020-08-27 07:34:12 +00:00
|
|
|
let lateBlock = addTestBlock(tmpState[], dag.head.root, cache)
|
2020-07-09 09:29:32 +00:00
|
|
|
block:
|
2020-07-30 19:18:17 +00:00
|
|
|
let status = dag.addRawBlock(quarantine, blck, nil)
|
2020-07-09 09:29:32 +00:00
|
|
|
check: status.isOk()
|
|
|
|
|
2020-08-27 07:34:12 +00:00
|
|
|
assign(tmpState[], dag.headState.data)
|
|
|
|
|
2020-07-01 17:00:14 +00:00
|
|
|
for i in 0 ..< (SLOTS_PER_EPOCH * 6):
|
|
|
|
if i == 1:
|
|
|
|
# There are 2 heads now because of the fork at slot 1
|
|
|
|
check:
|
2020-07-30 19:18:17 +00:00
|
|
|
dag.heads.len == 2
|
2020-07-09 09:29:32 +00:00
|
|
|
|
2020-08-27 07:34:12 +00:00
|
|
|
blck = addTestBlock(
|
|
|
|
tmpState[], dag.head.root, cache,
|
2020-07-09 09:29:32 +00:00
|
|
|
attestations = makeFullAttestations(
|
2020-08-27 07:34:12 +00:00
|
|
|
tmpState[].data, dag.head.root, tmpState[].data.slot, cache, {}))
|
2020-07-30 19:18:17 +00:00
|
|
|
let added = dag.addRawBlock(quarantine, blck, nil)
|
2020-07-09 09:29:32 +00:00
|
|
|
check: added.isOk()
|
2020-08-31 09:00:38 +00:00
|
|
|
dag.updateHead(added[], quarantine)
|
2020-07-01 17:00:14 +00:00
|
|
|
|
|
|
|
check:
|
2020-07-30 19:18:17 +00:00
|
|
|
dag.heads.len() == 1
|
2020-07-01 17:00:14 +00:00
|
|
|
|
2020-08-26 15:06:40 +00:00
|
|
|
let headER = dag.heads[0].findEpochRef(dag.heads[0].slot.epoch)
|
|
|
|
check:
|
|
|
|
|
2020-08-11 19:39:53 +00:00
|
|
|
# Epochrefs should share validator key set when the validator set is
|
|
|
|
# stable
|
2020-08-26 15:06:40 +00:00
|
|
|
not headER.isNil
|
2020-08-18 20:29:33 +00:00
|
|
|
not dag.heads[0].findEpochRef(dag.heads[0].slot.epoch - 1).isNil
|
2020-08-26 15:06:40 +00:00
|
|
|
headER !=
|
2020-08-18 20:29:33 +00:00
|
|
|
dag.heads[0].findEpochRef(dag.heads[0].slot.epoch - 1)
|
2020-08-26 15:06:40 +00:00
|
|
|
headER.validator_key_store[1] ==
|
2020-08-18 20:29:33 +00:00
|
|
|
dag.heads[0].findEpochRef(dag.heads[0].slot.epoch - 1).validator_key_store[1]
|
2020-08-11 19:39:53 +00:00
|
|
|
|
2020-08-26 15:06:40 +00:00
|
|
|
block:
|
|
|
|
var cur = dag.heads[0]
|
|
|
|
while cur != nil:
|
|
|
|
if cur.slot < dag.finalizedHead.blck.slot:
|
|
|
|
# Cache should be cleaned on finalization
|
|
|
|
check: cur.epochRefs.len == 0
|
|
|
|
else:
|
|
|
|
# EpochRef validator keystores should back-propagate to all previous
|
|
|
|
# epochs
|
|
|
|
for e in cur.epochRefs:
|
|
|
|
check (addr headER.validator_keys) == (addr e.validator_keys)
|
|
|
|
cur = cur.parent
|
|
|
|
|
2020-07-09 09:29:32 +00:00
|
|
|
block:
|
2020-07-01 17:00:14 +00:00
|
|
|
# The late block is a block whose parent was finalized long ago and thus
|
|
|
|
# is no longer a viable head candidate
|
2020-07-30 19:18:17 +00:00
|
|
|
let status = dag.addRawBlock(quarantine, lateBlock, nil)
|
2020-07-09 09:29:32 +00:00
|
|
|
check: status.error == Unviable
|
2020-07-01 17:00:14 +00:00
|
|
|
|
|
|
|
let
|
2020-07-31 14:49:06 +00:00
|
|
|
dag2 = init(ChainDAGRef, defaultRuntimePreset, db)
|
2020-07-01 17:00:14 +00:00
|
|
|
|
|
|
|
# check that the state reloaded from database resembles what we had before
|
|
|
|
check:
|
2020-07-30 19:18:17 +00:00
|
|
|
dag2.tail.root == dag.tail.root
|
|
|
|
dag2.head.root == dag.head.root
|
|
|
|
dag2.finalizedHead.blck.root == dag.finalizedHead.blck.root
|
|
|
|
dag2.finalizedHead.slot == dag.finalizedHead.slot
|
|
|
|
hash_tree_root(dag2.headState.data.data) ==
|
|
|
|
hash_tree_root(dag.headState.data.data)
|
2020-06-25 09:36:03 +00:00
|
|
|
|
2020-08-26 15:23:34 +00:00
|
|
|
wrappedTimedTest "orphaned epoch block" & preset():
|
2020-08-13 09:50:05 +00:00
|
|
|
var prestate = (ref HashedBeaconState)()
|
|
|
|
for i in 0 ..< SLOTS_PER_EPOCH:
|
|
|
|
if i == SLOTS_PER_EPOCH - 1:
|
|
|
|
assign(prestate[], dag.headState.data)
|
|
|
|
|
|
|
|
let blck = makeTestBlock(
|
|
|
|
dag.headState.data, dag.head.root, cache)
|
|
|
|
let added = dag.addRawBlock(quarantine, blck, nil)
|
|
|
|
check: added.isOk()
|
2020-08-31 09:00:38 +00:00
|
|
|
dag.updateHead(added[], quarantine)
|
2020-08-13 09:50:05 +00:00
|
|
|
|
|
|
|
check:
|
|
|
|
dag.heads.len() == 1
|
|
|
|
|
|
|
|
advance_slot(prestate[], {}, cache)
|
|
|
|
|
|
|
|
# create another block, orphaning the head
|
|
|
|
let blck = makeTestBlock(
|
|
|
|
prestate[], dag.head.parent.root, cache)
|
|
|
|
|
|
|
|
# Add block, but don't update head
|
|
|
|
let added = dag.addRawBlock(quarantine, blck, nil)
|
|
|
|
check: added.isOk()
|
|
|
|
|
|
|
|
var
|
|
|
|
dag2 = init(ChainDAGRef, defaultRuntimePreset, db)
|
|
|
|
|
|
|
|
# check that we can apply the block after the orphaning
|
|
|
|
let added2 = dag2.addRawBlock(quarantine, blck, nil)
|
|
|
|
check: added2.isOk()
|
|
|
|
|
|
|
|
suiteReport "chain DAG finalization tests" & preset():
|
|
|
|
setup:
|
|
|
|
var
|
|
|
|
db = makeTestDB(SLOTS_PER_EPOCH)
|
|
|
|
dag = init(ChainDAGRef, defaultRuntimePreset, db)
|
|
|
|
quarantine = QuarantineRef()
|
|
|
|
cache = StateCache()
|
|
|
|
|
2020-08-27 07:34:12 +00:00
|
|
|
timedTest "init with gaps" & preset():
|
|
|
|
for blck in makeTestBlocks(
|
|
|
|
dag.headState.data, dag.head.root, cache, int(SLOTS_PER_EPOCH * 6 - 2),
|
|
|
|
true):
|
2020-08-13 09:50:05 +00:00
|
|
|
let added = dag.addRawBlock(quarantine, blck, nil)
|
|
|
|
check: added.isOk()
|
2020-08-31 09:00:38 +00:00
|
|
|
dag.updateHead(added[], quarantine)
|
2020-08-13 09:50:05 +00:00
|
|
|
|
|
|
|
# Advance past epoch so that the epoch transition is gapped
|
|
|
|
check:
|
|
|
|
process_slots(
|
|
|
|
dag.headState.data, Slot(SLOTS_PER_EPOCH * 6 + 2) )
|
|
|
|
|
|
|
|
var blck = makeTestBlock(
|
|
|
|
dag.headState.data, dag.head.root, cache,
|
|
|
|
attestations = makeFullAttestations(
|
|
|
|
dag.headState.data.data, dag.head.root,
|
|
|
|
dag.headState.data.data.slot, cache, {}))
|
|
|
|
|
|
|
|
let added = dag.addRawBlock(quarantine, blck, nil)
|
|
|
|
check: added.isOk()
|
2020-08-31 09:00:38 +00:00
|
|
|
dag.updateHead(added[], quarantine)
|
2020-08-13 09:50:05 +00:00
|
|
|
|
|
|
|
let
|
|
|
|
dag2 = init(ChainDAGRef, defaultRuntimePreset, db)
|
|
|
|
|
|
|
|
# check that the state reloaded from database resembles what we had before
|
|
|
|
check:
|
|
|
|
dag2.tail.root == dag.tail.root
|
|
|
|
dag2.head.root == dag.head.root
|
|
|
|
dag2.finalizedHead.blck.root == dag.finalizedHead.blck.root
|
|
|
|
dag2.finalizedHead.slot == dag.finalizedHead.slot
|
|
|
|
hash_tree_root(dag2.headState.data.data) ==
|
|
|
|
hash_tree_root(dag.headState.data.data)
|