nimbus-eth1/tests/test_lru_cache.nim
Jordan Hrycaj cfe955c962
Feature/implement poa processing (#748)
* re-shuffled Clique functions

why:
  Due to the port from the go-sources, the interface logic is not optimal
  for nimbus. The main visible function is currently snapshot() and most
  of the _procurement_ of this function result has been moved to a
  sub-directory.

* run eip-225 Clique test against p2p/chain.persistBlocks()

why:
  Previously, loading the test block chains was fugdged with the purpose
  only to fill the database. As it is now clear how nimbus works on
  Goerli, the same can be achieved with a more realistic scenario.

details:
  Eventually these tests will be pre-cursor to the reply tests for the
  Goerli chain supporting TDD approach with more simple cases.

* fix exception annotations for executor module

why:
  needed for exception tracking

details:
  main annoyance are vmState methods (in state.nim) which potentially
  throw a base level Exception (a proc would only throws CatchableError)

* split p2p/chain into sub-modules and fix exception annotations

why:
  make space for implementing PoA stuff

* provide over-loadable Clique PRNG

why:
  There is a PRNG provided for generating reproducible number sequences.
  The functions which employ the PRNG to generate time slots were ported
  ported from the go-implementation. They are currently unused.

* implement trusted signer assembly in p2p/chain.persistBlocks()

details:
  * PoA processing moved there at the end of a transaction. Currently,
   there is no action (eg. transaction rollback) if this fails.
  * The unit tests with staged blocks work ok. In particular, there should
    be tests with to-be-rejected blocks.
  * TODO: 1.Optimise throughput/cache handling; 2.Verify headers

* fix statement cast in pool.nim

* added table features to LRU cache

why:
  Clique uses the LRU cache using a mixture of volatile online items
  from the LRU cache and database checkpoints for hard synchronisation.
  For performance, Clique needs more table like features.

details:
  First, last, and query key added, as well as efficient random delete
  added. Also key-item pair iterator added for debugging.

* re-factored LRU snapshot caching

why:
  Caching was sub-optimal (aka. bonkers) in that it skipped over memory
  caches in many cases and so mostly rebuild the snapshot from the
  last on-disk checkpoint.

details;
  The LRU snapshot toValue() handler has been moved into the module
  clique_snapshot. This is for the fact that toValue() is not supposed
  to see the whole LRU cache database. So there must be a higher layer
  working with the the whole LRU cache and the on-disk checkpoint
  database.

also:
  some clean up

todo:
  The code still assumes that the block headers are valid in itself. This
  is particular important when an epoch header (aka re-sync header) is
  processed as it must contain the PoA result of all previous headers.

  So blocks need to be verified when they come in before used for PoA
  processing.

* fix some snapshot cache fringe cases

why:
  Must not index empty sequences in clique_snapshot module
2021-07-14 16:13:27 +01:00

216 lines
4.8 KiB
Nim

# Nimbus
# Copyright (c) 2018-2019 Status Research & Development GmbH
# Licensed under either of
# * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or
# http://www.apache.org/licenses/LICENSE-2.0)
# * MIT license ([LICENSE-MIT](LICENSE-MIT) or
# http://opensource.org/licenses/MIT)
# at your option. This file may not be copied, modified, or distributed except
# according to those terms.
import
../nimbus/utils/lru_cache,
eth/rlp,
sequtils,
strformat,
tables,
unittest2
const
cacheLimit = 10
keyList = [
185, 208, 53, 54, 196, 189, 187, 117, 94, 29, 6, 173, 207, 45, 31,
208, 127, 106, 117, 49, 40, 171, 6, 94, 84, 60, 125, 87, 168, 183,
200, 155, 34, 27, 67, 107, 108, 223, 249, 4, 113, 9, 205, 100, 77,
224, 19, 196, 14, 83, 145, 154, 95, 56, 236, 97, 115, 140, 134, 97,
153, 167, 23, 17, 182, 116, 253, 32, 108, 148, 135, 169, 178, 124, 147,
231, 236, 174, 211, 247, 22, 118, 144, 224, 68, 124, 200, 92, 63, 183,
56, 107, 45, 180, 113, 233, 59, 246, 29, 212, 172, 161, 183, 207, 189,
56, 198, 130, 62, 28, 53, 122]
# Debugging output
proc say(noisy = false; pfx = "***"; args: varargs[string, `$`]) =
if noisy:
var outText = pfx & " "
for a in args.items:
outText &= a
echo outText
proc verifyBackLinks[T,K,V,E](lru: var LruCache[T,K,V,E]) =
var
index = 0
prvKey: K
for key,item in lru.keyItemPairs:
if 0 < index:
doAssert prvKey == item.prv
index.inc
prvKey = key
proc toKeyList[T,K,V,E](lru: var LruCache[T,K,V,E]): seq[K] =
lru.verifyBackLinks
toSeq(lru.keyItemPairs).mapIt(it[0])
proc toValueList[T,K,V,E](lru: var LruCache[T,K,V,E]): seq[V] =
lru.verifyBackLinks
toSeq(lru.keyItemPairs).mapIt(it[1].value)
proc createTestCache: LruCache[int,int,string,int] =
var
getKey: LruKey[int,int] =
proc(x: int): int = x
getValue: LruValue[int,string,int] =
proc(x: int): Result[string,int] = ok($x)
cache: LruCache[int,int,string,int]
# Create LRU cache
cache.initCache(getKey, getValue, cacheLimit)
result = cache
proc filledTestCache(noisy: bool): LruCache[int,int,string,int] =
var
cache = createTestCache()
lastQ: seq[int]
for w in keyList:
var
key = w mod 13
reSched = cache.hasKey(key)
value = cache.getItem(key)
queue = cache.toKeyList
values = cache.toValueList
if reSched:
noisy.say ">>>", &"rotate {value} => {queue}"
else:
noisy.say "+++", &"append {value} => {queue}"
doAssert queue.mapIt($it) == values
doAssert key == cache.lastKey
result = cache
# ---
proc doFillUpTest(noisy: bool) =
discard filledTestCache(noisy)
proc doDeepCopyTest(noisy: bool) =
proc say(a: varargs[string]) =
say(noisy = noisy, args = a)
var
c1 = filledTestCache(false)
c2 = c1
doAssert c1 == c2
discard c1.getItem(77)
say &"c1Specs: {c1.maxLen} {c1.firstKey} {c1.lastKey} ..."
say &"c2Specs: {c2.maxLen} {c2.firstKey} {c2.lastKey} ..."
doAssert c1 != c2
doAssert toSeq(c1.keyItemPairs) != toSeq(c2.keyItemPairs)
proc doSerialiserTest(noisy: bool) =
proc say(a: varargs[string]) =
say(noisy = noisy, args = a)
var
c1 = filledTestCache(false)
s1 = rlp.encode(c1.data)
c2 = createTestCache()
say &"serialised[{s1.len}]: {s1}"
c2.clearCache
doAssert c1 != c2
c2.data = s1.decode(type c2.data)
doAssert c1 == c2
say &"c2Specs: {c2.maxLen} {c2.firstKey} {c2.lastKey} ..."
doAssert s1 == rlp.encode(c2.data)
proc doSerialiseSingleEntry(noisy: bool) =
proc say(a: varargs[string]) =
say(noisy = noisy, args = a)
var
c1 = createTestCache()
value = c1.getItem(77)
queue = c1.toKeyList
values = c1.toValueList
say &"c1: append {value} => {queue}"
var
s1 = rlp.encode(c1.data)
c2 = createTestCache()
say &"serialised[{s1.len}]: {s1}"
c2.clearCache
doAssert c1 != c2
c2.data = s1.decode(type c2.data)
doAssert c1 == c2
say &"c2Specs: {c2.maxLen} {c2.firstKey} {c2.lastKey} ..."
doAssert s1 == rlp.encode(c2.data)
proc doRandomDeleteTest(noisy: bool) =
proc say(a: varargs[string]) =
say(noisy = noisy, args = a)
var
c1 = filledTestCache(false)
sq = toSeq(c1.keyItemPairs).mapIt(it[0])
s0 = sq
inx = 5
key = sq[5]
sq.delete(5,5)
say &"sq: {s0} <off sq[5]({key})> {sq}"
doAssert c1.delItem(key)
doAssert sq == toSeq(c1.keyItemPairs).mapIt(it[0])
c1.verifyBackLinks
proc lruCacheMain*(noisy = defined(debug)) =
suite "LRU Cache":
test "Fill Up":
doFillUpTest(noisy)
test "Deep Copy Semantics":
doDeepCopyTest(noisy)
test "Rlp Serialise & Load":
doSerialiserTest(noisy)
test "Rlp Single Entry Test":
doSerialiseSingleEntry(noisy)
test "Random Delete":
doRandomDeleteTest(noisy)
when isMainModule:
lruCacheMain()
# End