Create memory-leak detecting test suite (#226)
* adding tracker for streamstore * adding tracker tests * Sets up tracker helper functions and closes streams in testnode.nim * Deploying checksuite for memory leak tracker checking. * Successfully deploys checksuite and asyncchecksuite. * Fix leak in testpor.nim * Fixes leaked storestream in testnetwork.nim * Fixes integration tests * Cleanup * cleanup comment by Mark --------- Co-authored-by: benbierens <thatbenbierens@gmail.com>
This commit is contained in:
parent
e47b38af11
commit
bd594c9aaf
|
@ -29,6 +29,9 @@ export stores, blocktype, manifest, chronos
|
|||
logScope:
|
||||
topics = "codex storestream"
|
||||
|
||||
const
|
||||
StoreStreamTrackerName* = "StoreStream"
|
||||
|
||||
type
|
||||
# Make SeekableStream from a sequence of blocks stored in Manifest
|
||||
# (only original file data - see StoreStream.size)
|
||||
|
@ -37,6 +40,12 @@ type
|
|||
manifest*: Manifest # List of block CIDs
|
||||
pad*: bool # Pad last block to manifest.blockSize?
|
||||
|
||||
method initStream*(s: StoreStream) =
|
||||
if s.objName.len == 0:
|
||||
s.objName = StoreStreamTrackerName
|
||||
|
||||
procCall SeekableStream(s).initStream()
|
||||
|
||||
proc new*(
|
||||
T: type StoreStream,
|
||||
store: BlockStore,
|
||||
|
|
|
@ -0,0 +1,27 @@
|
|||
import ./helpers
|
||||
|
||||
## Unit testing suite that calls checkTrackers in teardown to check for memory leaks using chronos trackers.
|
||||
template checksuite*(name, body) =
|
||||
suite name:
|
||||
multisetup()
|
||||
|
||||
teardown:
|
||||
checkTrackers()
|
||||
|
||||
# Avoids GcUnsafe2 warnings with chronos
|
||||
# Copied from asynctest/templates.nim
|
||||
let suiteproc = proc =
|
||||
body
|
||||
|
||||
suiteproc()
|
||||
|
||||
template asyncchecksuite*(name, body) =
|
||||
suite name:
|
||||
asyncmultisetup()
|
||||
|
||||
teardown:
|
||||
checkTrackers()
|
||||
|
||||
body
|
||||
|
||||
export helpers
|
|
@ -20,7 +20,7 @@ import ../../helpers/mockdiscovery
|
|||
import ../../helpers
|
||||
import ../../examples
|
||||
|
||||
suite "Block Advertising and Discovery":
|
||||
asyncchecksuite "Block Advertising and Discovery":
|
||||
let chunker = RandomChunker.new(Rng.instance(), size = 4096, chunkSize = 256)
|
||||
|
||||
var
|
||||
|
@ -172,7 +172,7 @@ suite "Block Advertising and Discovery":
|
|||
|
||||
await engine.stop()
|
||||
|
||||
suite "E2E - Multiple Nodes Discovery":
|
||||
asyncchecksuite "E2E - Multiple Nodes Discovery":
|
||||
let chunker = RandomChunker.new(Rng.instance(), size = 4096, chunkSize = 256)
|
||||
|
||||
var
|
||||
|
|
|
@ -19,7 +19,7 @@ import ../../helpers/mockdiscovery
|
|||
import ../../helpers
|
||||
import ../../examples
|
||||
|
||||
suite "Test Discovery Engine":
|
||||
asyncchecksuite "Test Discovery Engine":
|
||||
let chunker = RandomChunker.new(Rng.instance(), size = 4096, chunkSize = 256)
|
||||
|
||||
var
|
||||
|
|
|
@ -18,7 +18,7 @@ import pkg/codex/blocktype as bt
|
|||
import ../../examples
|
||||
import ../../helpers
|
||||
|
||||
suite "NetworkStore engine - 2 nodes":
|
||||
asyncchecksuite "NetworkStore engine - 2 nodes":
|
||||
let
|
||||
chunker1 = RandomChunker.new(Rng.instance(), size = 2048, chunkSize = 256)
|
||||
chunker2 = RandomChunker.new(Rng.instance(), size = 2048, chunkSize = 256)
|
||||
|
@ -180,7 +180,7 @@ suite "NetworkStore engine - 2 nodes":
|
|||
|
||||
check eventually wallet.balance(channel, Asset) > 0
|
||||
|
||||
suite "NetworkStore - multiple nodes":
|
||||
asyncchecksuite "NetworkStore - multiple nodes":
|
||||
let
|
||||
chunker = RandomChunker.new(Rng.instance(), size = 4096, chunkSize = 256)
|
||||
|
||||
|
|
|
@ -20,7 +20,7 @@ import pkg/codex/utils/asyncheapqueue
|
|||
import ../../helpers
|
||||
import ../../examples
|
||||
|
||||
suite "NetworkStore engine basic":
|
||||
asyncchecksuite "NetworkStore engine basic":
|
||||
var
|
||||
rng: Rng
|
||||
seckey: PrivateKey
|
||||
|
@ -125,7 +125,7 @@ suite "NetworkStore engine basic":
|
|||
|
||||
await done.wait(100.millis)
|
||||
|
||||
suite "NetworkStore engine handlers":
|
||||
asyncchecksuite "NetworkStore engine handlers":
|
||||
var
|
||||
rng: Rng
|
||||
seckey: PrivateKey
|
||||
|
@ -352,7 +352,7 @@ suite "NetworkStore engine handlers":
|
|||
check cid in peerCtx.peerHave
|
||||
check peerCtx.blocks[cid].price == price
|
||||
|
||||
suite "Task Handler":
|
||||
asyncchecksuite "Task Handler":
|
||||
var
|
||||
rng: Rng
|
||||
seckey: PrivateKey
|
||||
|
|
|
@ -2,8 +2,9 @@ import std/unittest
|
|||
|
||||
import pkg/codex/stores
|
||||
import ../../examples
|
||||
import ../../helpers
|
||||
|
||||
suite "engine payments":
|
||||
checksuite "engine payments":
|
||||
|
||||
let address = EthAddress.example
|
||||
let amount = 42.u256
|
||||
|
|
|
@ -2,9 +2,10 @@ import pkg/asynctest
|
|||
import pkg/chronos
|
||||
import pkg/stew/byteutils
|
||||
import ../../examples
|
||||
import ../../helpers
|
||||
import pkg/codex/stores
|
||||
|
||||
suite "account protobuf messages":
|
||||
checksuite "account protobuf messages":
|
||||
|
||||
let account = Account(address: EthAddress.example)
|
||||
let message = AccountMessage.init(account)
|
||||
|
@ -20,7 +21,7 @@ suite "account protobuf messages":
|
|||
incorrect.address.del(0)
|
||||
check Account.init(incorrect).isNone
|
||||
|
||||
suite "channel update messages":
|
||||
checksuite "channel update messages":
|
||||
|
||||
let state = SignedState.example
|
||||
let update = StateChannelUpdate.init(state)
|
||||
|
|
|
@ -4,8 +4,9 @@ import pkg/libp2p
|
|||
|
||||
import pkg/codex/blockexchange/protobuf/presence
|
||||
import ../../examples
|
||||
import ../../helpers
|
||||
|
||||
suite "block presence protobuf messages":
|
||||
checksuite "block presence protobuf messages":
|
||||
|
||||
let cid = Cid.example
|
||||
let price = UInt256.example
|
||||
|
|
|
@ -14,7 +14,7 @@ import pkg/codex/blockexchange
|
|||
import ../helpers
|
||||
import ../examples
|
||||
|
||||
suite "Network - Handlers":
|
||||
asyncchecksuite "Network - Handlers":
|
||||
let
|
||||
rng = Rng.instance()
|
||||
seckey = PrivateKey.random(rng[]).tryGet()
|
||||
|
@ -137,7 +137,7 @@ suite "Network - Handlers":
|
|||
|
||||
await done.wait(100.millis)
|
||||
|
||||
suite "Network - Senders":
|
||||
asyncchecksuite "Network - Senders":
|
||||
let
|
||||
chunker = RandomChunker.new(Rng.instance(), size = 1024, chunkSize = 256)
|
||||
|
||||
|
@ -260,7 +260,7 @@ suite "Network - Senders":
|
|||
await network1.sendPayment(switch2.peerInfo.peerId, payment)
|
||||
await done.wait(500.millis)
|
||||
|
||||
suite "Network - Test Limits":
|
||||
asyncchecksuite "Network - Test Limits":
|
||||
var
|
||||
switch1, switch2: Switch
|
||||
network1, network2: BlockExcNetwork
|
||||
|
|
|
@ -8,9 +8,10 @@ import pkg/codex/blockexchange/peers
|
|||
import pkg/codex/blockexchange/protobuf/blockexc
|
||||
import pkg/codex/blockexchange/protobuf/presence
|
||||
|
||||
import ../helpers
|
||||
import ../examples
|
||||
|
||||
suite "Peer Context Store":
|
||||
checksuite "Peer Context Store":
|
||||
var
|
||||
store: PeerCtxStore
|
||||
peerCtx: BlockExcPeerCtx
|
||||
|
@ -30,7 +31,7 @@ suite "Peer Context Store":
|
|||
test "Should get peer":
|
||||
check store.get(peerCtx.id) == peerCtx
|
||||
|
||||
suite "Peer Context Store Peer Selection":
|
||||
checksuite "Peer Context Store Peer Selection":
|
||||
var
|
||||
store: PeerCtxStore
|
||||
peerCtxs: seq[BlockExcPeerCtx]
|
||||
|
|
|
@ -9,7 +9,9 @@ import pkg/stew/byteutils
|
|||
import pkg/codex/blocktype as bt
|
||||
import pkg/codex/blockexchange
|
||||
|
||||
suite "Pending Blocks":
|
||||
import ../helpers
|
||||
|
||||
checksuite "Pending Blocks":
|
||||
test "Should add want handle":
|
||||
let
|
||||
pendingBlocks = PendingBlocksManager.new()
|
||||
|
|
|
@ -10,8 +10,9 @@ import ./helpers/nodeutils
|
|||
import ./helpers/randomchunker
|
||||
import ./helpers/mockdiscovery
|
||||
import ./helpers/eventually
|
||||
import ../checktest
|
||||
|
||||
export randomchunker, nodeutils, mockdiscovery, eventually
|
||||
export randomchunker, nodeutils, mockdiscovery, eventually, checktest
|
||||
|
||||
# NOTE: The meaning of equality for blocks
|
||||
# is changed here, because blocks are now `ref`
|
||||
|
|
|
@ -3,6 +3,9 @@ import pkg/questionable
|
|||
import pkg/questionable/results
|
||||
|
||||
import pkg/codex/sales/reservations
|
||||
import ../helpers
|
||||
|
||||
export checktest
|
||||
|
||||
proc allAvailabilities*(r: Reservations): Future[seq[Availability]] {.async.} =
|
||||
var ret: seq[Availability] = @[]
|
||||
|
|
|
@ -6,9 +6,9 @@ import pkg/codex/sales/states/cancelled
|
|||
import pkg/codex/sales/states/failed
|
||||
import pkg/codex/sales/states/filled
|
||||
import ../../examples
|
||||
import ../../helpers
|
||||
|
||||
suite "sales state 'downloading'":
|
||||
|
||||
checksuite "sales state 'downloading'":
|
||||
let request = StorageRequest.example
|
||||
let slotIndex = (request.ask.slots div 2).u256
|
||||
var state: SaleDownloading
|
||||
|
|
|
@ -8,8 +8,9 @@ import pkg/codex/sales/states/errored
|
|||
import pkg/codex/sales/states/finished
|
||||
import ../../helpers/mockmarket
|
||||
import ../../examples
|
||||
import ../../helpers
|
||||
|
||||
suite "sales state 'filled'":
|
||||
checksuite "sales state 'filled'":
|
||||
|
||||
let request = StorageRequest.example
|
||||
let slotIndex = (request.ask.slots div 2).u256
|
||||
|
|
|
@ -6,8 +6,9 @@ import pkg/codex/sales/states/cancelled
|
|||
import pkg/codex/sales/states/failed
|
||||
import pkg/codex/sales/states/filled
|
||||
import ../../examples
|
||||
import ../../helpers
|
||||
|
||||
suite "sales state 'filling'":
|
||||
checksuite "sales state 'filling'":
|
||||
|
||||
let request = StorageRequest.example
|
||||
let slotIndex = (request.ask.slots div 2).u256
|
||||
|
|
|
@ -5,8 +5,9 @@ import pkg/codex/sales/states/finished
|
|||
import pkg/codex/sales/states/cancelled
|
||||
import pkg/codex/sales/states/failed
|
||||
import ../../examples
|
||||
import ../../helpers
|
||||
|
||||
suite "sales state 'finished'":
|
||||
checksuite "sales state 'finished'":
|
||||
|
||||
let request = StorageRequest.example
|
||||
var state: SaleFinished
|
||||
|
|
|
@ -6,8 +6,9 @@ import pkg/codex/sales/states/cancelled
|
|||
import pkg/codex/sales/states/failed
|
||||
import pkg/codex/sales/states/filled
|
||||
import ../../examples
|
||||
import ../../helpers
|
||||
|
||||
suite "sales state 'proving'":
|
||||
checksuite "sales state 'proving'":
|
||||
|
||||
let request = StorageRequest.example
|
||||
let slotIndex = (request.ask.slots div 2).u256
|
||||
|
|
|
@ -10,8 +10,9 @@ import pkg/codex/sales/states/finished
|
|||
import pkg/codex/sales/states/failed
|
||||
import ../../helpers/mockmarket
|
||||
import ../../examples
|
||||
import ../../helpers
|
||||
|
||||
suite "sales state 'unknown'":
|
||||
checksuite "sales state 'unknown'":
|
||||
|
||||
let request = StorageRequest.example
|
||||
let slotIndex = (request.ask.slots div 2).u256
|
||||
|
|
|
@ -13,8 +13,7 @@ import pkg/codex/sales
|
|||
import ../examples
|
||||
import ./helpers
|
||||
|
||||
suite "Reservations module":
|
||||
|
||||
asyncchecksuite "Reservations module":
|
||||
var
|
||||
repo: RepoStore
|
||||
repoDs: Datastore
|
||||
|
|
|
@ -21,8 +21,7 @@ import ../helpers/eventually
|
|||
import ../examples
|
||||
import ./helpers
|
||||
|
||||
suite "Sales":
|
||||
|
||||
asyncchecksuite "Sales":
|
||||
let proof = exampleProof()
|
||||
|
||||
var availability: Availability
|
||||
|
|
|
@ -42,8 +42,7 @@ method onError*(state: MockErrorState, err: ref CatchableError): ?State =
|
|||
method run*(state: MockErrorState, machine: Machine): Future[?State] {.async.} =
|
||||
raise newException(ValueError, "failure")
|
||||
|
||||
suite "Sales agent":
|
||||
|
||||
asyncchecksuite "Sales agent":
|
||||
var request = StorageRequest(
|
||||
ask: StorageAsk(
|
||||
slots: 4,
|
||||
|
|
|
@ -23,7 +23,7 @@ const
|
|||
BlockSize = 31 * 64
|
||||
DataSetSize = BlockSize * 100
|
||||
|
||||
suite "Storage Proofs Network":
|
||||
asyncchecksuite "Storage Proofs Network":
|
||||
let
|
||||
hostAddr = ca.Address.example
|
||||
blocks = toSeq([1, 5, 10, 14, 20, 12, 22]) # TODO: maybe make them random
|
||||
|
@ -43,10 +43,11 @@ suite "Storage Proofs Network":
|
|||
spk: st.PublicKey
|
||||
porMsg: PorMessage
|
||||
cid: Cid
|
||||
porStream: StoreStream
|
||||
por: PoR
|
||||
tags: seq[Tag]
|
||||
|
||||
setupAll:
|
||||
setup:
|
||||
chunker = RandomChunker.new(Rng.instance(), size = DataSetSize, chunkSize = BlockSize)
|
||||
store = CacheStore.new(cacheSize = DataSetSize, chunkSize = BlockSize)
|
||||
manifest = Manifest.new(blockSize = BlockSize).tryGet()
|
||||
|
@ -61,8 +62,9 @@ suite "Storage Proofs Network":
|
|||
(await store.putBlock(blk)).tryGet()
|
||||
|
||||
cid = manifest.cid.tryGet()
|
||||
porStream = StoreStream.new(store, manifest)
|
||||
por = await PoR.init(
|
||||
StoreStream.new(store, manifest),
|
||||
porStream,
|
||||
ssk, spk,
|
||||
BlockSize)
|
||||
|
||||
|
@ -70,7 +72,6 @@ suite "Storage Proofs Network":
|
|||
tags = blocks.mapIt(
|
||||
Tag(idx: it, tag: porMsg.authenticators[it]))
|
||||
|
||||
setup:
|
||||
switch1 = newStandardSwitch()
|
||||
switch2 = newStandardSwitch()
|
||||
|
||||
|
@ -89,6 +90,7 @@ suite "Storage Proofs Network":
|
|||
teardown:
|
||||
await switch1.stop()
|
||||
await switch2.stop()
|
||||
await close(porStream)
|
||||
|
||||
test "Should upload to host":
|
||||
var
|
||||
|
|
|
@ -19,13 +19,15 @@ const
|
|||
SectorsPerBlock = BlockSize div SectorSize
|
||||
DataSetSize = BlockSize * 100
|
||||
|
||||
suite "BLS PoR":
|
||||
asyncchecksuite "BLS PoR":
|
||||
var
|
||||
chunker: RandomChunker
|
||||
manifest: Manifest
|
||||
store: BlockStore
|
||||
ssk: st.SecretKey
|
||||
spk: st.PublicKey
|
||||
porStream: StoreStream
|
||||
proofStream: StoreStream
|
||||
|
||||
setup:
|
||||
chunker = RandomChunker.new(Rng.instance(), size = DataSetSize, chunkSize = BlockSize)
|
||||
|
@ -33,6 +35,9 @@ suite "BLS PoR":
|
|||
manifest = Manifest.new(blockSize = BlockSize).tryGet()
|
||||
(spk, ssk) = st.keyGen()
|
||||
|
||||
porStream = StoreStream.new(store, manifest)
|
||||
proofStream = StoreStream.new(store, manifest)
|
||||
|
||||
while (
|
||||
let chunk = await chunker.getBytes();
|
||||
chunk.len > 0):
|
||||
|
@ -41,41 +46,43 @@ suite "BLS PoR":
|
|||
manifest.add(blk.cid)
|
||||
(await store.putBlock(blk)).tryGet()
|
||||
|
||||
test "Test PoR without corruption":
|
||||
let
|
||||
por = await PoR.init(
|
||||
StoreStream.new(store, manifest),
|
||||
teardown:
|
||||
await close(porStream)
|
||||
await close(proofStream)
|
||||
|
||||
proc createPor(): Future[PoR] =
|
||||
return PoR.init(
|
||||
porStream,
|
||||
ssk,
|
||||
spk,
|
||||
BlockSize)
|
||||
q = generateQuery(por.tau, 22)
|
||||
proof = await generateProof(
|
||||
StoreStream.new(store, manifest),
|
||||
|
||||
proc createProof(por: PoR, q: seq[QElement]): Future[Proof] =
|
||||
return generateProof(
|
||||
proofStream,
|
||||
q,
|
||||
por.authenticators,
|
||||
SectorsPerBlock)
|
||||
|
||||
test "Test PoR without corruption":
|
||||
let
|
||||
por = await createPor()
|
||||
q = generateQuery(por.tau, 22)
|
||||
proof = await createProof(por, q)
|
||||
|
||||
check por.verifyProof(q, proof.mu, proof.sigma)
|
||||
|
||||
test "Test PoR with corruption - query: 22, corrupted blocks: 300, bytes: 10":
|
||||
let
|
||||
por = await PoR.init(
|
||||
StoreStream.new(store, manifest),
|
||||
ssk,
|
||||
spk,
|
||||
BlockSize)
|
||||
por = await createPor()
|
||||
pos = await store.corruptBlocks(manifest, 30, 10)
|
||||
q = generateQuery(por.tau, 22)
|
||||
proof = await generateProof(
|
||||
StoreStream.new(store, manifest),
|
||||
q,
|
||||
por.authenticators,
|
||||
SectorsPerBlock)
|
||||
proof = await createProof(por, q)
|
||||
|
||||
check pos.len == 30
|
||||
check not por.verifyProof(q, proof.mu, proof.sigma)
|
||||
|
||||
suite "Test Serialization":
|
||||
asyncchecksuite "Test Serialization":
|
||||
var
|
||||
chunker: RandomChunker
|
||||
manifest: Manifest
|
||||
|
@ -85,8 +92,10 @@ suite "Test Serialization":
|
|||
por: PoR
|
||||
q: seq[QElement]
|
||||
proof: Proof
|
||||
porStream: StoreStream
|
||||
proofStream: StoreStream
|
||||
|
||||
setupAll:
|
||||
setup:
|
||||
chunker = RandomChunker.new(Rng.instance(), size = DataSetSize, chunkSize = BlockSize)
|
||||
store = CacheStore.new(cacheSize = DataSetSize, chunkSize = BlockSize)
|
||||
manifest = Manifest.new(blockSize = BlockSize).tryGet()
|
||||
|
@ -100,18 +109,24 @@ suite "Test Serialization":
|
|||
(await store.putBlock(blk)).tryGet()
|
||||
|
||||
(spk, ssk) = st.keyGen()
|
||||
porStream = StoreStream.new(store, manifest)
|
||||
por = await PoR.init(
|
||||
StoreStream.new(store, manifest),
|
||||
porStream,
|
||||
ssk,
|
||||
spk,
|
||||
BlockSize)
|
||||
q = generateQuery(por.tau, 22)
|
||||
proofStream = StoreStream.new(store, manifest)
|
||||
proof = await generateProof(
|
||||
StoreStream.new(store, manifest),
|
||||
proofStream,
|
||||
q,
|
||||
por.authenticators,
|
||||
SectorsPerBlock)
|
||||
|
||||
teardown:
|
||||
await close(porStream)
|
||||
await close(proofStream)
|
||||
|
||||
test "Serialize Public Key":
|
||||
var
|
||||
spkMessage = spk.toMessage()
|
||||
|
|
|
@ -15,7 +15,7 @@ const
|
|||
BlockSize = 31 * 64
|
||||
DataSetSize = BlockSize * 100
|
||||
|
||||
suite "Test PoR store":
|
||||
asyncchecksuite "Test PoR store":
|
||||
let
|
||||
blocks = toSeq([1, 5, 10, 14, 20, 12, 22]) # TODO: maybe make them random
|
||||
|
||||
|
@ -27,12 +27,13 @@ suite "Test PoR store":
|
|||
spk: st.PublicKey
|
||||
repoDir: string
|
||||
stpstore: st.StpStore
|
||||
porStream: StoreStream
|
||||
por: PoR
|
||||
porMsg: PorMessage
|
||||
cid: Cid
|
||||
tags: seq[Tag]
|
||||
|
||||
setupAll:
|
||||
setup:
|
||||
chunker = RandomChunker.new(Rng.instance(), size = DataSetSize, chunkSize = BlockSize)
|
||||
store = CacheStore.new(cacheSize = DataSetSize, chunkSize = BlockSize)
|
||||
manifest = Manifest.new(blockSize = BlockSize).tryGet()
|
||||
|
@ -47,8 +48,9 @@ suite "Test PoR store":
|
|||
(await store.putBlock(blk)).tryGet()
|
||||
|
||||
cid = manifest.cid.tryGet()
|
||||
porStream = StoreStream.new(store, manifest)
|
||||
por = await PoR.init(
|
||||
StoreStream.new(store, manifest),
|
||||
porStream,
|
||||
ssk, spk,
|
||||
BlockSize)
|
||||
|
||||
|
@ -60,7 +62,8 @@ suite "Test PoR store":
|
|||
createDir(repoDir)
|
||||
stpstore = st.StpStore.init(repoDir)
|
||||
|
||||
teardownAll:
|
||||
teardown:
|
||||
await close(porStream)
|
||||
removeDir(repoDir)
|
||||
|
||||
test "Should store Storage Proofs":
|
||||
|
@ -68,6 +71,7 @@ suite "Test PoR store":
|
|||
check fileExists(stpstore.stpPath(cid) / "por")
|
||||
|
||||
test "Should retrieve Storage Proofs":
|
||||
discard await stpstore.store(por.toMessage(), cid)
|
||||
check (await stpstore.retrieve(cid)).tryGet() == porMsg
|
||||
|
||||
test "Should store tags":
|
||||
|
@ -76,4 +80,5 @@ suite "Test PoR store":
|
|||
check fileExists(stpstore.stpPath(cid) / $t.idx )
|
||||
|
||||
test "Should retrieve tags":
|
||||
discard await stpstore.store(tags, cid)
|
||||
check (await stpstore.retrieve(cid, blocks)).tryGet() == tags
|
||||
|
|
|
@ -23,7 +23,7 @@ proc commonBlockStoreTests*(name: string,
|
|||
before: Before = nil,
|
||||
after: After = nil) =
|
||||
|
||||
suite name & " Store Common":
|
||||
asyncchecksuite name & " Store Common":
|
||||
var
|
||||
newBlock, newBlock1, newBlock2, newBlock3: Block
|
||||
store: BlockStore
|
||||
|
|
|
@ -12,7 +12,7 @@ import ./commonstoretests
|
|||
|
||||
import ../helpers
|
||||
|
||||
suite "Cache Store":
|
||||
checksuite "Cache Store":
|
||||
var
|
||||
newBlock, newBlock1, newBlock2, newBlock3: Block
|
||||
store: CacheStore
|
||||
|
|
|
@ -38,7 +38,7 @@ proc createManifestCid(): ?!Cid =
|
|||
let cid = ? Cid.init(version, codec, hash).mapFailure
|
||||
return success cid
|
||||
|
||||
suite "KeyUtils":
|
||||
checksuite "KeyUtils":
|
||||
test "makePrefixKey should create block key":
|
||||
let length = 6
|
||||
let cid = Cid.example
|
||||
|
|
|
@ -15,6 +15,7 @@ import pkg/codex/blocktype as bt
|
|||
import pkg/codex/stores/repostore
|
||||
import pkg/codex/clock
|
||||
|
||||
import ../helpers
|
||||
import ../helpers/mocktimer
|
||||
import ../helpers/mockrepostore
|
||||
import ../helpers/mockclock
|
||||
|
@ -22,7 +23,7 @@ import ../examples
|
|||
|
||||
import codex/stores/maintenance
|
||||
|
||||
suite "BlockMaintainer":
|
||||
checksuite "BlockMaintainer":
|
||||
var mockRepoStore: MockRepoStore
|
||||
var interval: Duration
|
||||
var mockTimer: MockTimer
|
||||
|
|
|
@ -22,7 +22,7 @@ import ../helpers
|
|||
import ../helpers/mockclock
|
||||
import ./commonstoretests
|
||||
|
||||
suite "Test RepoStore start/stop":
|
||||
checksuite "Test RepoStore start/stop":
|
||||
|
||||
var
|
||||
repoDs: Datastore
|
||||
|
@ -55,7 +55,7 @@ suite "Test RepoStore start/stop":
|
|||
await repo.stop()
|
||||
check not repo.started
|
||||
|
||||
suite "RepoStore":
|
||||
asyncchecksuite "RepoStore":
|
||||
var
|
||||
repoDs: Datastore
|
||||
metaDs: Datastore
|
||||
|
|
|
@ -5,6 +5,8 @@ import pkg/stew/results
|
|||
import pkg/codex/utils/asyncheapqueue
|
||||
import pkg/codex/rng
|
||||
|
||||
import ./helpers
|
||||
|
||||
type
|
||||
Task* = tuple[name: string, priority: int]
|
||||
|
||||
|
@ -21,7 +23,7 @@ proc toSortedSeq[T](h: AsyncHeapQueue[T], queueType = QueueType.Min): seq[T] =
|
|||
while tmp.len > 0:
|
||||
result.add(popNoWait(tmp).tryGet())
|
||||
|
||||
suite "Synchronous tests":
|
||||
checksuite "Synchronous tests":
|
||||
test "Test pushNoWait - Min":
|
||||
var heap = newAsyncHeapQueue[int]()
|
||||
let data = [1, 3, 5, 7, 9, 2, 4, 6, 8, 0]
|
||||
|
@ -127,8 +129,7 @@ suite "Synchronous tests":
|
|||
heap.clear()
|
||||
check heap.len == 0
|
||||
|
||||
suite "Asynchronous Tests":
|
||||
|
||||
asyncchecksuite "Asynchronous Tests":
|
||||
test "Test push":
|
||||
var heap = newAsyncHeapQueue[int]()
|
||||
let data = [1, 3, 5, 7, 9, 2, 4, 6, 8, 0]
|
||||
|
|
|
@ -5,7 +5,9 @@ import pkg/chronicles
|
|||
import pkg/chronos
|
||||
import pkg/libp2p
|
||||
|
||||
suite "Chunking":
|
||||
import ./helpers
|
||||
|
||||
asyncchecksuite "Chunking":
|
||||
test "should return proper size chunks":
|
||||
var offset = 0
|
||||
let contents = [1.byte, 2, 3, 4, 5, 6, 7, 8, 9, 0]
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
import std/unittest
|
||||
|
||||
import codex/clock
|
||||
import ./helpers
|
||||
|
||||
suite "Clock":
|
||||
checksuite "Clock":
|
||||
proc testConversion(seconds: SecondsSince1970) =
|
||||
let asBytes = seconds.toBytes
|
||||
|
||||
|
|
|
@ -13,8 +13,7 @@ import pkg/codex/rng
|
|||
|
||||
import ./helpers
|
||||
|
||||
suite "Erasure encode/decode":
|
||||
|
||||
asyncchecksuite "Erasure encode/decode":
|
||||
const BlockSize = 1024
|
||||
const dataSetSize = BlockSize * 123 # weird geometry
|
||||
|
||||
|
|
|
@ -12,7 +12,7 @@ import pkg/codex/manifest
|
|||
|
||||
import ./helpers
|
||||
|
||||
suite "Manifest":
|
||||
checksuite "Manifest":
|
||||
test "Should produce valid tree hash checksum":
|
||||
var manifest = Manifest.new(
|
||||
blocks = @[
|
||||
|
|
|
@ -20,7 +20,7 @@ import pkg/codex/blocktype as bt
|
|||
|
||||
import ./helpers
|
||||
|
||||
suite "Test Node":
|
||||
asyncchecksuite "Test Node":
|
||||
let
|
||||
(path, _, _) = instantiationInfo(-2, fullPaths = true) # get this file's name
|
||||
|
||||
|
@ -62,6 +62,8 @@ suite "Test Node":
|
|||
var
|
||||
data: seq[byte]
|
||||
|
||||
defer: await stream.close()
|
||||
|
||||
while not stream.atEof:
|
||||
var
|
||||
buf = newSeq[byte](oddChunkSize)
|
||||
|
@ -154,8 +156,8 @@ suite "Test Node":
|
|||
manifestBlock = (await localStore.getBlock(manifestCid)).tryGet()
|
||||
localManifest = Manifest.decode(manifestBlock).tryGet()
|
||||
|
||||
let
|
||||
data = await retrieve(manifestCid)
|
||||
let data = await retrieve(manifestCid)
|
||||
|
||||
check:
|
||||
data.len == localManifest.originalBytes
|
||||
data.len == original.len
|
||||
|
@ -168,6 +170,7 @@ suite "Test Node":
|
|||
|
||||
(await localStore.putBlock(blk)).tryGet()
|
||||
let stream = (await node.retrieve(blk.cid)).tryGet()
|
||||
defer: await stream.close()
|
||||
|
||||
var data = newSeq[byte](testString.len)
|
||||
await stream.readExactly(addr data[0], data.len)
|
||||
|
|
|
@ -6,8 +6,9 @@ import ./helpers/mockmarket
|
|||
import ./helpers/mockclock
|
||||
import ./helpers/eventually
|
||||
import ./examples
|
||||
import ./helpers
|
||||
|
||||
suite "Proving":
|
||||
asyncchecksuite "Proving":
|
||||
|
||||
var proving: Proving
|
||||
var market: MockMarket
|
||||
|
|
|
@ -13,9 +13,9 @@ import ./helpers/mockmarket
|
|||
import ./helpers/mockclock
|
||||
import ./helpers/eventually
|
||||
import ./examples
|
||||
import ./helpers
|
||||
|
||||
suite "Purchasing":
|
||||
|
||||
asyncchecksuite "Purchasing":
|
||||
var purchasing: Purchasing
|
||||
var market: MockMarket
|
||||
var clock: MockClock
|
||||
|
@ -119,7 +119,7 @@ suite "Purchasing":
|
|||
await purchase.wait()
|
||||
check market.withdrawn == @[request.id]
|
||||
|
||||
suite "Purchasing state machine":
|
||||
checksuite "Purchasing state machine":
|
||||
|
||||
var purchasing: Purchasing
|
||||
var market: MockMarket
|
||||
|
|
|
@ -10,7 +10,7 @@ import pkg/codex/stores
|
|||
import pkg/codex/manifest
|
||||
import pkg/codex/blocktype as bt
|
||||
|
||||
suite "StoreStream":
|
||||
asyncchecksuite "StoreStream":
|
||||
var
|
||||
manifest: Manifest
|
||||
store: BlockStore
|
||||
|
@ -37,6 +37,9 @@ suite "StoreStream":
|
|||
[byte 90, 91, 92, 93, 94, 95, 96, 97, 98, 99],
|
||||
]
|
||||
|
||||
teardown:
|
||||
await stream.close()
|
||||
|
||||
setup:
|
||||
store = CacheStore.new()
|
||||
manifest = Manifest.new(blockSize = 10).tryGet()
|
||||
|
|
|
@ -2,8 +2,9 @@ import std/times
|
|||
import std/unittest
|
||||
|
||||
import codex/systemclock
|
||||
import ./helpers
|
||||
|
||||
suite "SystemClock":
|
||||
checksuite "SystemClock":
|
||||
test "Should get now":
|
||||
let clock = SystemClock.new()
|
||||
|
||||
|
|
|
@ -7,9 +7,9 @@ import ./helpers/mockmarket
|
|||
import ./helpers/mockclock
|
||||
import ./helpers/eventually
|
||||
import ./examples
|
||||
import ./helpers
|
||||
|
||||
suite "validation":
|
||||
|
||||
asyncchecksuite "validation":
|
||||
let period = 10
|
||||
let timeout = 5
|
||||
let maxSlots = 100
|
||||
|
|
|
@ -4,6 +4,7 @@ import pkg/chronos
|
|||
import pkg/upraises
|
||||
import codex/utils/asyncstatemachine
|
||||
import ../helpers/eventually
|
||||
import ../helpers
|
||||
|
||||
type
|
||||
State1 = ref object of State
|
||||
|
@ -59,7 +60,7 @@ method onError(state: State4, error: ref CatchableError): ?State =
|
|||
inc errors[3]
|
||||
some State(State2.new())
|
||||
|
||||
suite "async state machines":
|
||||
asyncchecksuite "async state machines":
|
||||
var machine: Machine
|
||||
|
||||
proc moveToNextStateEvent(state: State): ?State =
|
||||
|
|
|
@ -3,12 +3,12 @@ import std/os
|
|||
import pkg/libp2p
|
||||
import pkg/questionable/results
|
||||
import codex/utils/keyutils
|
||||
import ../helpers
|
||||
|
||||
when defined(windows):
|
||||
import stew/windows/acl
|
||||
|
||||
suite "keyutils":
|
||||
|
||||
checksuite "keyutils":
|
||||
let path = getTempDir() / "CodexTest"
|
||||
|
||||
setup:
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
import std/unittest
|
||||
import codex/utils/optionalcast
|
||||
import ../helpers
|
||||
|
||||
suite "optional casts":
|
||||
|
||||
checksuite "optional casts":
|
||||
test "casting value to same type works":
|
||||
check 42 as int == some 42
|
||||
|
||||
|
|
|
@ -12,8 +12,9 @@ import pkg/asynctest
|
|||
|
||||
import codex/utils/timer
|
||||
import ../helpers/eventually
|
||||
import ../helpers
|
||||
|
||||
suite "Timer":
|
||||
asyncchecksuite "Timer":
|
||||
var timer1: Timer
|
||||
var timer2: Timer
|
||||
var output: string
|
||||
|
|
|
@ -4,7 +4,6 @@ import codex/contracts/clock
|
|||
import ../ethertest
|
||||
|
||||
ethersuite "On-Chain Clock":
|
||||
|
||||
var clock: OnChainClock
|
||||
|
||||
setup:
|
||||
|
|
|
@ -4,6 +4,7 @@ import codex/contracts/deployment
|
|||
import codex/conf
|
||||
import codex/contracts
|
||||
|
||||
import ../checktest
|
||||
|
||||
type MockProvider = ref object of Provider
|
||||
chainId*: UInt256
|
||||
|
@ -17,8 +18,7 @@ proc configFactory(): CodexConf =
|
|||
proc configFactory(marketplace: Option[EthAddress]): CodexConf =
|
||||
CodexConf(cmd: noCommand, nat: ValidIpAddress.init("127.0.0.1"), discoveryIp: ValidIpAddress.init(IPv4_any()), metricsAddress: ValidIpAddress.init("127.0.0.1"), marketplaceAddress: marketplace)
|
||||
|
||||
suite "Deployment":
|
||||
|
||||
asyncchecksuite "Deployment":
|
||||
let provider = MockProvider()
|
||||
|
||||
test "uses conf value as priority":
|
||||
|
|
|
@ -2,39 +2,20 @@ import std/json
|
|||
import pkg/asynctest
|
||||
import pkg/ethers
|
||||
|
||||
# Allow multiple setups and teardowns in a test suite
|
||||
template multisetup =
|
||||
|
||||
var setups: seq[proc: Future[void] {.gcsafe.}]
|
||||
var teardowns: seq[proc: Future[void] {.gcsafe.}]
|
||||
|
||||
setup:
|
||||
for setup in setups:
|
||||
await setup()
|
||||
|
||||
teardown:
|
||||
for teardown in teardowns:
|
||||
await teardown()
|
||||
|
||||
template setup(setupBody) {.inject.} =
|
||||
setups.add(proc {.async.} = setupBody)
|
||||
|
||||
template teardown(teardownBody) {.inject.} =
|
||||
teardowns.insert(proc {.async.} = teardownBody)
|
||||
import ./helpers
|
||||
import ./checktest
|
||||
|
||||
## Unit testing suite that sets up an Ethereum testing environment.
|
||||
## Injects a `provider` instance, and a list of `accounts`.
|
||||
## Calls the `evm_snapshot` and `evm_revert` methods to ensure that any
|
||||
## changes to the blockchain do not persist.
|
||||
template ethersuite*(name, body) =
|
||||
suite name:
|
||||
asyncchecksuite name:
|
||||
|
||||
var provider {.inject, used.}: JsonRpcProvider
|
||||
var accounts {.inject, used.}: seq[Address]
|
||||
var snapshot: JsonNode
|
||||
|
||||
multisetup()
|
||||
|
||||
setup:
|
||||
provider = JsonRpcProvider.new("ws://localhost:8545")
|
||||
snapshot = await send(provider, "evm_snapshot")
|
||||
|
|
|
@ -0,0 +1,4 @@
|
|||
import helpers/multisetup
|
||||
import helpers/trackers
|
||||
|
||||
export multisetup, trackers
|
|
@ -0,0 +1,38 @@
|
|||
import pkg/chronos
|
||||
|
||||
# Allow multiple setups and teardowns in a test suite
|
||||
template asyncmultisetup* =
|
||||
var setups: seq[proc: Future[void] {.gcsafe.}]
|
||||
var teardowns: seq[proc: Future[void] {.gcsafe.}]
|
||||
|
||||
setup:
|
||||
for setup in setups:
|
||||
await setup()
|
||||
|
||||
teardown:
|
||||
for teardown in teardowns:
|
||||
await teardown()
|
||||
|
||||
template setup(setupBody) {.inject.} =
|
||||
setups.add(proc {.async.} = setupBody)
|
||||
|
||||
template teardown(teardownBody) {.inject.} =
|
||||
teardowns.insert(proc {.async.} = teardownBody)
|
||||
|
||||
template multisetup* =
|
||||
var setups: seq[proc() {.gcsafe.}]
|
||||
var teardowns: seq[proc() {.gcsafe.}]
|
||||
|
||||
setup:
|
||||
for setup in setups:
|
||||
setup()
|
||||
|
||||
teardown:
|
||||
for teardown in teardowns:
|
||||
teardown()
|
||||
|
||||
template setup(setupBody) {.inject.} =
|
||||
setups.add(proc = setupBody)
|
||||
|
||||
template teardown(teardownBody) {.inject.} =
|
||||
teardowns.insert(proc = teardownBody)
|
|
@ -0,0 +1,30 @@
|
|||
import pkg/codex/streams/storestream
|
||||
import std/unittest
|
||||
|
||||
# From lip2p/tests/helpers
|
||||
const trackerNames = [
|
||||
StoreStreamTrackerName
|
||||
]
|
||||
|
||||
iterator testTrackers*(extras: openArray[string] = []): TrackerBase =
|
||||
for name in trackerNames:
|
||||
let t = getTracker(name)
|
||||
if not isNil(t): yield t
|
||||
for name in extras:
|
||||
let t = getTracker(name)
|
||||
if not isNil(t): yield t
|
||||
|
||||
proc checkTracker*(name: string) =
|
||||
var tracker = getTracker(name)
|
||||
if tracker.isLeaked():
|
||||
checkpoint tracker.dump()
|
||||
fail()
|
||||
|
||||
proc checkTrackers*() =
|
||||
for tracker in testTrackers():
|
||||
if tracker.isLeaked():
|
||||
checkpoint tracker.dump()
|
||||
fail()
|
||||
try:
|
||||
GC_fullCollect()
|
||||
except: discard
|
|
@ -14,7 +14,6 @@ import ./twonodes
|
|||
# to enable custom logging levels for specific topics like: debug2 = "INFO; TRACE: marketplace"
|
||||
|
||||
twonodessuite "Integration tests", debug1 = false, debug2 = false:
|
||||
|
||||
setup:
|
||||
# Our Hardhat configuration does use automine, which means that time tracked by `provider.currentTime()` is not
|
||||
# advanced until blocks are mined and that happens only when transaction is submitted.
|
||||
|
|
|
@ -6,7 +6,6 @@ import ../ethertest
|
|||
import ./nodes
|
||||
|
||||
ethersuite "Node block expiration tests":
|
||||
|
||||
var node: NodeProcess
|
||||
var baseurl: string
|
||||
|
||||
|
|
|
@ -14,7 +14,6 @@ logScope:
|
|||
topics = "test proofs"
|
||||
|
||||
twonodessuite "Proving integration test", debug1=false, debug2=false:
|
||||
|
||||
let validatorDir = getTempDir() / "CodexValidator"
|
||||
|
||||
var marketplace: Marketplace
|
||||
|
|
|
@ -14,7 +14,6 @@ template twonodessuite*(name: string, debug1, debug2: bool | string, body) =
|
|||
twonodessuite(name, $debug1, $debug2, body)
|
||||
|
||||
template twonodessuite*(name: string, debug1, debug2: string, body) =
|
||||
|
||||
ethersuite name:
|
||||
|
||||
var node1 {.inject, used.}: NodeProcess
|
||||
|
|
Loading…
Reference in New Issue