Add a first, simple, content network test (#760)
* Add a first, simple, content network test * Use sqlite as nimbus_db_backend for fluffy tests * Fix backend selection for sqlite
This commit is contained in:
parent
cfe955c962
commit
e37bafd47e
|
@ -10,7 +10,8 @@
|
||||||
{.push raises: [Defect].}
|
{.push raises: [Defect].}
|
||||||
|
|
||||||
import
|
import
|
||||||
nimcrypto/[sha2, hash], stew/objects, eth/ssz/ssz_serialization
|
nimcrypto/[sha2, hash], stew/objects,
|
||||||
|
eth/ssz/ssz_serialization, eth/trie/[hexary, db]
|
||||||
|
|
||||||
export ssz_serialization
|
export ssz_serialization
|
||||||
|
|
||||||
|
@ -47,8 +48,6 @@ template toSszType*(x: ContentType): uint8 =
|
||||||
uint8(x)
|
uint8(x)
|
||||||
|
|
||||||
template toSszType*(x: auto): auto =
|
template toSszType*(x: auto): auto =
|
||||||
mixin toSszType
|
|
||||||
|
|
||||||
x
|
x
|
||||||
|
|
||||||
func fromSszBytes*(T: type ContentType, data: openArray[byte]):
|
func fromSszBytes*(T: type ContentType, data: openArray[byte]):
|
||||||
|
@ -70,6 +69,18 @@ func toContentId*(contentKey: ContentKey): ContentId =
|
||||||
|
|
||||||
type
|
type
|
||||||
ContentStorage* = object
|
ContentStorage* = object
|
||||||
|
# TODO: Quick implementation for now where we just use HexaryTrie, current
|
||||||
|
# idea is to move in here a more direct storage of the trie nodes, but have
|
||||||
|
# an `ContentProvider` "interface" that could provide the trie nodes via
|
||||||
|
# this direct storage, via the HexaryTrie (for full nodes), or also without
|
||||||
|
# storage, via json rpc client requesting data from a full eth1 client.
|
||||||
|
trie*: HexaryTrie
|
||||||
|
|
||||||
func getContent*(storage: ContentStorage, key: ContentKey): Option[seq[byte]] =
|
proc getContent*(storage: ContentStorage, key: ContentKey): Option[seq[byte]] =
|
||||||
discard
|
if storage.trie.db == nil: # TODO: for now...
|
||||||
|
return none(seq[byte])
|
||||||
|
let val = storage.trie.db.get(key.nodeHash.asSeq())
|
||||||
|
if val.len > 0:
|
||||||
|
some(val)
|
||||||
|
else:
|
||||||
|
none(seq[byte])
|
||||||
|
|
|
@ -11,7 +11,8 @@ import ../../test_macro
|
||||||
|
|
||||||
import
|
import
|
||||||
./test_portal_encoding,
|
./test_portal_encoding,
|
||||||
./test_portal
|
./test_portal,
|
||||||
|
./test_content_network
|
||||||
|
|
||||||
cliBuilder:
|
cliBuilder:
|
||||||
import
|
import
|
||||||
|
|
|
@ -0,0 +1,45 @@
|
||||||
|
{
|
||||||
|
"config": {
|
||||||
|
"chainId": 7,
|
||||||
|
"homesteadBlock": 0,
|
||||||
|
"eip150Block": 0,
|
||||||
|
"eip158Block": 0
|
||||||
|
},
|
||||||
|
"genesis": {
|
||||||
|
"coinbase": "0x0000000000000000000000000000000000000000",
|
||||||
|
"difficulty": "0x20000",
|
||||||
|
"extraData": "0x0000000000000000000000000000000000000000000000000000000000000000658bdf435d810c91414ec09147daa6db624063790000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
|
||||||
|
"gasLimit": "0x2fefd8",
|
||||||
|
"nonce": "0x0000000000000000",
|
||||||
|
"timestamp": "0x1234",
|
||||||
|
"alloc": {
|
||||||
|
"cf49fda3be353c69b41ed96333cd24302da4556f": {
|
||||||
|
"balance": "0x123450000000000000000"
|
||||||
|
},
|
||||||
|
"0161e041aad467a890839d5b08b138c1e6373072": {
|
||||||
|
"balance": "0x123450000000000000000"
|
||||||
|
},
|
||||||
|
"87da6a8c6e9eff15d703fc2773e32f6af8dbe301": {
|
||||||
|
"balance": "0x123450000000000000000"
|
||||||
|
},
|
||||||
|
"b97de4b8c857e4f6bc354f226dc3249aaee49209": {
|
||||||
|
"balance": "0x123450000000000000000"
|
||||||
|
},
|
||||||
|
"c5065c9eeebe6df2c2284d046bfc906501846c51": {
|
||||||
|
"balance": "0x123450000000000000000"
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000314": {
|
||||||
|
"balance": "0x0",
|
||||||
|
"code": "0x60606040526000357c0100000000000000000000000000000000000000000000000000000000900463ffffffff168063a223e05d1461006a578063abd1a0cf1461008d578063abfced1d146100d4578063e05c914a14610110578063e6768b451461014c575b610000565b346100005761007761019d565b6040518082815260200191505060405180910390f35b34610000576100be600480803573ffffffffffffffffffffffffffffffffffffffff169060200190919050506101a3565b6040518082815260200191505060405180910390f35b346100005761010e600480803573ffffffffffffffffffffffffffffffffffffffff169060200190919080359060200190919050506101ed565b005b346100005761014a600480803590602001909190803573ffffffffffffffffffffffffffffffffffffffff16906020019091905050610236565b005b346100005761017960048080359060200190919080359060200190919080359060200190919050506103c4565b60405180848152602001838152602001828152602001935050505060405180910390f35b60005481565b6000600160008373ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1681526020019081526020016000205490505b919050565b80600160008473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff168152602001908152602001600020819055505b5050565b7f6031a8d62d7c95988fa262657cd92107d90ed96e08d8f867d32f26edfe85502260405180905060405180910390a17f47e2689743f14e97f7dcfa5eec10ba1dff02f83b3d1d4b9c07b206cbbda66450826040518082815260200191505060405180910390a1817fa48a6b249a5084126c3da369fbc9b16827ead8cb5cdc094b717d3f1dcd995e2960405180905060405180910390a27f7890603b316f3509577afd111710f9ebeefa15e12f72347d9dffd0d65ae3bade81604051808273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390a18073ffffffffffffffffffffffffffffffffffffffff167f7efef9ea3f60ddc038e50cccec621f86a0195894dc0520482abf8b5c6b659e4160405180905060405180910390a28181604051808381526020018273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1681526020019250505060405180910390a05b5050565b6000600060008585859250925092505b935093509390505600a165627a7a72305820aaf842d0d0c35c45622c5263cbb54813d2974d3999c8c38551d7c613ea2bc1170029",
|
||||||
|
"storage": {
|
||||||
|
"0x0000000000000000000000000000000000000000000000000000000000000000": "0x1234",
|
||||||
|
"0x6661e9d6d8b923d5bbaab1b96e1dd51ff6ea2a93520fdc9eb75d059238b8c5e9": "0x01"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"0000000000000000000000000000000000000315": {
|
||||||
|
"balance": "0x9999999999999999999999999999999",
|
||||||
|
"code": "0x60606040526000357c0100000000000000000000000000000000000000000000000000000000900463ffffffff168063ef2769ca1461003e575b610000565b3461000057610078600480803573ffffffffffffffffffffffffffffffffffffffff1690602001909190803590602001909190505061007a565b005b8173ffffffffffffffffffffffffffffffffffffffff166108fc829081150290604051809050600060405180830381858888f1935050505015610106578173ffffffffffffffffffffffffffffffffffffffff167f30a3c50752f2552dcc2b93f5b96866280816a986c0c0408cb6778b9fa198288f826040518082815260200191505060405180910390a25b5b50505600a165627a7a72305820637991fabcc8abad4294bf2bb615db78fbec4edff1635a2647d3894e2daf6a610029"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,72 @@
|
||||||
|
# Nimbus - Portal Network
|
||||||
|
# Copyright (c) 2021 Status Research & Development GmbH
|
||||||
|
# Licensed and distributed under either of
|
||||||
|
# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT).
|
||||||
|
# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0).
|
||||||
|
# at your option. This file may not be copied, modified, or distributed except according to those terms.
|
||||||
|
|
||||||
|
import
|
||||||
|
std/os,
|
||||||
|
testutils/unittests,
|
||||||
|
eth/[keys, trie/db, trie/hexary, ssz/ssz_serialization],
|
||||||
|
../../nimbus/[genesis, chain_config, db/db_chain],
|
||||||
|
../network/portal_protocol, ../content,
|
||||||
|
./test_helpers
|
||||||
|
|
||||||
|
proc genesisToTrie(filePath: string): HexaryTrie =
|
||||||
|
# TODO: Doing our best here with API that exists, to be improved.
|
||||||
|
var cg: CustomGenesis
|
||||||
|
if not loadCustomGenesis(filePath, cg):
|
||||||
|
quit(1)
|
||||||
|
|
||||||
|
var chainDB = newBaseChainDB(
|
||||||
|
newMemoryDb(),
|
||||||
|
pruneTrie = false
|
||||||
|
)
|
||||||
|
# TODO: Can't provide this at the `newBaseChainDB` call, need to adjust API
|
||||||
|
chainDB.config = cg.config
|
||||||
|
# TODO: this actually also creates a HexaryTrie and AccountStateDB, which we
|
||||||
|
# could skip
|
||||||
|
let header = toBlock(cg.genesis, chainDB)
|
||||||
|
|
||||||
|
# Trie exists already in flat db, but need to provide the root
|
||||||
|
initHexaryTrie(chainDB.db, header.stateRoot, chainDB.pruneTrie)
|
||||||
|
|
||||||
|
procSuite "Content Network":
|
||||||
|
let rng = newRng()
|
||||||
|
asyncTest "Test Share Full State":
|
||||||
|
let
|
||||||
|
node1 = initDiscoveryNode(
|
||||||
|
rng, PrivateKey.random(rng[]), localAddress(20302))
|
||||||
|
node2 = initDiscoveryNode(
|
||||||
|
rng, PrivateKey.random(rng[]), localAddress(20303))
|
||||||
|
|
||||||
|
proto1 = PortalProtocol.new(node1)
|
||||||
|
proto2 = PortalProtocol.new(node2)
|
||||||
|
|
||||||
|
let trie =
|
||||||
|
genesisToTrie("fluffy" / "tests" / "custom_genesis" / "chainid7.json")
|
||||||
|
|
||||||
|
proto1.contentStorage = ContentStorage(trie: trie)
|
||||||
|
|
||||||
|
var keys: seq[seq[byte]]
|
||||||
|
for k, v in trie.replicate:
|
||||||
|
keys.add(k)
|
||||||
|
|
||||||
|
for key in keys:
|
||||||
|
let
|
||||||
|
contentKey = ContentKey(
|
||||||
|
networkId: 0'u16,
|
||||||
|
contentType: content.ContentType.Account,
|
||||||
|
nodeHash: List[byte, 32](key))
|
||||||
|
|
||||||
|
let foundContent = await proto2.findContent(proto1.baseProtocol.localNode,
|
||||||
|
contentKey)
|
||||||
|
|
||||||
|
check:
|
||||||
|
foundContent.isOk()
|
||||||
|
foundContent.get().payload.len() != 0
|
||||||
|
foundContent.get().enrs.len() == 0
|
||||||
|
|
||||||
|
let hash = hexary.keccak(foundContent.get().payload.asSeq())
|
||||||
|
check hash.data == key
|
|
@ -0,0 +1,36 @@
|
||||||
|
# Nimbus - Portal Network
|
||||||
|
# Copyright (c) 2021 Status Research & Development GmbH
|
||||||
|
# Licensed and distributed under either of
|
||||||
|
# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT).
|
||||||
|
# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0).
|
||||||
|
# at your option. This file may not be copied, modified, or distributed except according to those terms.
|
||||||
|
|
||||||
|
import
|
||||||
|
stew/shims/net,
|
||||||
|
eth/keys,
|
||||||
|
eth/p2p/discoveryv5/[enr, node, routing_table],
|
||||||
|
eth/p2p/discoveryv5/protocol as discv5_protocol
|
||||||
|
|
||||||
|
proc localAddress*(port: int): Address =
|
||||||
|
Address(ip: ValidIpAddress.init("127.0.0.1"), port: Port(port))
|
||||||
|
|
||||||
|
proc initDiscoveryNode*(rng: ref BrHmacDrbgContext,
|
||||||
|
privKey: PrivateKey,
|
||||||
|
address: Address,
|
||||||
|
bootstrapRecords: openarray[Record] = [],
|
||||||
|
localEnrFields: openarray[(string, seq[byte])] = [],
|
||||||
|
previousRecord = none[enr.Record]()): discv5_protocol.Protocol =
|
||||||
|
# set bucketIpLimit to allow bucket split
|
||||||
|
let tableIpLimits = TableIpLimits(tableIpLimit: 1000, bucketIpLimit: 24)
|
||||||
|
|
||||||
|
result = newProtocol(privKey,
|
||||||
|
some(address.ip),
|
||||||
|
some(address.port), some(address.port),
|
||||||
|
bindPort = address.port,
|
||||||
|
bootstrapRecords = bootstrapRecords,
|
||||||
|
localEnrFields = localEnrFields,
|
||||||
|
previousRecord = previousRecord,
|
||||||
|
tableIpLimits = tableIpLimits,
|
||||||
|
rng = rng)
|
||||||
|
|
||||||
|
result.open()
|
|
@ -9,34 +9,10 @@
|
||||||
|
|
||||||
import
|
import
|
||||||
chronos, testutils/unittests, stew/shims/net,
|
chronos, testutils/unittests, stew/shims/net,
|
||||||
eth/keys, # for rng
|
eth/keys, eth/p2p/discoveryv5/routing_table,
|
||||||
eth/p2p/discoveryv5/[enr, node, routing_table],
|
|
||||||
eth/p2p/discoveryv5/protocol as discv5_protocol,
|
eth/p2p/discoveryv5/protocol as discv5_protocol,
|
||||||
../network/portal_protocol
|
../network/portal_protocol,
|
||||||
|
./test_helpers
|
||||||
proc localAddress(port: int): node.Address =
|
|
||||||
node.Address(ip: ValidIpAddress.init("127.0.0.1"), port: Port(port))
|
|
||||||
|
|
||||||
proc initDiscoveryNode(rng: ref BrHmacDrbgContext, privKey: PrivateKey,
|
|
||||||
address: node.Address,
|
|
||||||
bootstrapRecords: openarray[Record] = [],
|
|
||||||
localEnrFields: openarray[(string, seq[byte])] = [],
|
|
||||||
previousRecord = none[enr.Record]()):
|
|
||||||
discv5_protocol.Protocol =
|
|
||||||
# set bucketIpLimit to allow bucket split
|
|
||||||
let tableIpLimits = TableIpLimits(tableIpLimit: 1000, bucketIpLimit: 24)
|
|
||||||
|
|
||||||
result = newProtocol(privKey,
|
|
||||||
some(address.ip),
|
|
||||||
some(address.port), some(address.port),
|
|
||||||
bindPort = address.port,
|
|
||||||
bootstrapRecords = bootstrapRecords,
|
|
||||||
localEnrFields = localEnrFields,
|
|
||||||
previousRecord = previousRecord,
|
|
||||||
tableIpLimits = tableIpLimits,
|
|
||||||
rng = rng)
|
|
||||||
|
|
||||||
result.open()
|
|
||||||
|
|
||||||
proc random(T: type UInt256, rng: var BrHmacDrbgContext): T =
|
proc random(T: type UInt256, rng: var BrHmacDrbgContext): T =
|
||||||
var key: UInt256
|
var key: UInt256
|
||||||
|
|
|
@ -29,7 +29,7 @@ proc buildBinary(name: string, srcDir = "./", params = "", lang = "c") =
|
||||||
extra_params &= " " & paramStr(i)
|
extra_params &= " " & paramStr(i)
|
||||||
exec "nim " & lang & " --out:build/" & name & " " & extra_params & " " & srcDir & name & ".nim"
|
exec "nim " & lang & " --out:build/" & name & " " & extra_params & " " & srcDir & name & ".nim"
|
||||||
|
|
||||||
proc test(path: string, name: string, lang = "c") =
|
proc test(path: string, name: string, params = "", lang = "c") =
|
||||||
# Verify stack usage is kept low by setting 750k stack limit in tests.
|
# Verify stack usage is kept low by setting 750k stack limit in tests.
|
||||||
const stackLimitKiB = 750
|
const stackLimitKiB = 750
|
||||||
when not defined(windows):
|
when not defined(windows):
|
||||||
|
@ -42,11 +42,11 @@ proc test(path: string, name: string, lang = "c") =
|
||||||
const (buildOption, runPrefix) =
|
const (buildOption, runPrefix) =
|
||||||
(" -d:windowsNoSetStack --passL:-Wl,--stack," & $(stackLimitKiB * 1024), "")
|
(" -d:windowsNoSetStack --passL:-Wl,--stack," & $(stackLimitKiB * 1024), "")
|
||||||
|
|
||||||
buildBinary name, (path & "/"), "-d:chronicles_log_level=ERROR" & buildOption
|
buildBinary name, (path & "/"), params & buildOption
|
||||||
exec runPrefix & "build/" & name
|
exec runPrefix & "build/" & name
|
||||||
|
|
||||||
task test, "Run tests":
|
task test, "Run tests":
|
||||||
test "tests", "all_tests"
|
test "tests", "all_tests", "-d:chronicles_log_level=ERROR"
|
||||||
|
|
||||||
task nimbus, "Build Nimbus":
|
task nimbus, "Build Nimbus":
|
||||||
buildBinary "nimbus", "nimbus/", "-d:chronicles_log_level=TRACE"
|
buildBinary "nimbus", "nimbus/", "-d:chronicles_log_level=TRACE"
|
||||||
|
@ -58,4 +58,4 @@ task portalcli, "Build portalcli":
|
||||||
buildBinary "portalcli", "fluffy/network/", "-d:chronicles_log_level=TRACE"
|
buildBinary "portalcli", "fluffy/network/", "-d:chronicles_log_level=TRACE"
|
||||||
|
|
||||||
task testfluffy, "Run fluffy tests":
|
task testfluffy, "Run fluffy tests":
|
||||||
test "fluffy/tests", "all_fluffy_tests"
|
test "fluffy/tests", "all_fluffy_tests", "-d:chronicles_log_level=ERROR -d:nimbus_db_backend=sqlite"
|
||||||
|
|
|
@ -38,9 +38,10 @@ proc del*(db: ChainDB, key: openArray[byte]) =
|
||||||
db.kv.del(key).expect("working database")
|
db.kv.del(key).expect("working database")
|
||||||
|
|
||||||
when dbBackend == sqlite:
|
when dbBackend == sqlite:
|
||||||
import eth/db/kvstore_sqlite as database_backend
|
import eth/db/kvstore_sqlite3 as database_backend
|
||||||
proc newChainDB*(path: string): ChainDB =
|
proc newChainDB*(path: string): ChainDB =
|
||||||
ChainDB(kv: kvStore SqKvStore.init(path, "nimbus").tryGet())
|
let db = SqStoreRef.init(path, "nimbus").expect("working database")
|
||||||
|
ChainDB(kv: kvStore db.openKvStore().expect("working database"))
|
||||||
elif dbBackend == rocksdb:
|
elif dbBackend == rocksdb:
|
||||||
import eth/db/kvstore_rocksdb as database_backend
|
import eth/db/kvstore_rocksdb as database_backend
|
||||||
proc newChainDB*(path: string): ChainDB =
|
proc newChainDB*(path: string): ChainDB =
|
||||||
|
|
Loading…
Reference in New Issue