Create memory-leak detecting test suite (#226)
* adding tracker for streamstore * adding tracker tests * Sets up tracker helper functions and closes streams in testnode.nim * Deploying checksuite for memory leak tracker checking. * Successfully deploys checksuite and asyncchecksuite. * Fix leak in testpor.nim * Fixes leaked storestream in testnetwork.nim * Fixes integration tests * Cleanup * cleanup comment by Mark --------- Co-authored-by: benbierens <thatbenbierens@gmail.com>
This commit is contained in:
parent
e47b38af11
commit
bd594c9aaf
|
@ -29,6 +29,9 @@ export stores, blocktype, manifest, chronos
|
||||||
logScope:
|
logScope:
|
||||||
topics = "codex storestream"
|
topics = "codex storestream"
|
||||||
|
|
||||||
|
const
|
||||||
|
StoreStreamTrackerName* = "StoreStream"
|
||||||
|
|
||||||
type
|
type
|
||||||
# Make SeekableStream from a sequence of blocks stored in Manifest
|
# Make SeekableStream from a sequence of blocks stored in Manifest
|
||||||
# (only original file data - see StoreStream.size)
|
# (only original file data - see StoreStream.size)
|
||||||
|
@ -37,6 +40,12 @@ type
|
||||||
manifest*: Manifest # List of block CIDs
|
manifest*: Manifest # List of block CIDs
|
||||||
pad*: bool # Pad last block to manifest.blockSize?
|
pad*: bool # Pad last block to manifest.blockSize?
|
||||||
|
|
||||||
|
method initStream*(s: StoreStream) =
|
||||||
|
if s.objName.len == 0:
|
||||||
|
s.objName = StoreStreamTrackerName
|
||||||
|
|
||||||
|
procCall SeekableStream(s).initStream()
|
||||||
|
|
||||||
proc new*(
|
proc new*(
|
||||||
T: type StoreStream,
|
T: type StoreStream,
|
||||||
store: BlockStore,
|
store: BlockStore,
|
||||||
|
|
|
@ -0,0 +1,27 @@
|
||||||
|
import ./helpers
|
||||||
|
|
||||||
|
## Unit testing suite that calls checkTrackers in teardown to check for memory leaks using chronos trackers.
|
||||||
|
template checksuite*(name, body) =
|
||||||
|
suite name:
|
||||||
|
multisetup()
|
||||||
|
|
||||||
|
teardown:
|
||||||
|
checkTrackers()
|
||||||
|
|
||||||
|
# Avoids GcUnsafe2 warnings with chronos
|
||||||
|
# Copied from asynctest/templates.nim
|
||||||
|
let suiteproc = proc =
|
||||||
|
body
|
||||||
|
|
||||||
|
suiteproc()
|
||||||
|
|
||||||
|
template asyncchecksuite*(name, body) =
|
||||||
|
suite name:
|
||||||
|
asyncmultisetup()
|
||||||
|
|
||||||
|
teardown:
|
||||||
|
checkTrackers()
|
||||||
|
|
||||||
|
body
|
||||||
|
|
||||||
|
export helpers
|
|
@ -20,7 +20,7 @@ import ../../helpers/mockdiscovery
|
||||||
import ../../helpers
|
import ../../helpers
|
||||||
import ../../examples
|
import ../../examples
|
||||||
|
|
||||||
suite "Block Advertising and Discovery":
|
asyncchecksuite "Block Advertising and Discovery":
|
||||||
let chunker = RandomChunker.new(Rng.instance(), size = 4096, chunkSize = 256)
|
let chunker = RandomChunker.new(Rng.instance(), size = 4096, chunkSize = 256)
|
||||||
|
|
||||||
var
|
var
|
||||||
|
@ -172,7 +172,7 @@ suite "Block Advertising and Discovery":
|
||||||
|
|
||||||
await engine.stop()
|
await engine.stop()
|
||||||
|
|
||||||
suite "E2E - Multiple Nodes Discovery":
|
asyncchecksuite "E2E - Multiple Nodes Discovery":
|
||||||
let chunker = RandomChunker.new(Rng.instance(), size = 4096, chunkSize = 256)
|
let chunker = RandomChunker.new(Rng.instance(), size = 4096, chunkSize = 256)
|
||||||
|
|
||||||
var
|
var
|
||||||
|
|
|
@ -19,7 +19,7 @@ import ../../helpers/mockdiscovery
|
||||||
import ../../helpers
|
import ../../helpers
|
||||||
import ../../examples
|
import ../../examples
|
||||||
|
|
||||||
suite "Test Discovery Engine":
|
asyncchecksuite "Test Discovery Engine":
|
||||||
let chunker = RandomChunker.new(Rng.instance(), size = 4096, chunkSize = 256)
|
let chunker = RandomChunker.new(Rng.instance(), size = 4096, chunkSize = 256)
|
||||||
|
|
||||||
var
|
var
|
||||||
|
|
|
@ -18,7 +18,7 @@ import pkg/codex/blocktype as bt
|
||||||
import ../../examples
|
import ../../examples
|
||||||
import ../../helpers
|
import ../../helpers
|
||||||
|
|
||||||
suite "NetworkStore engine - 2 nodes":
|
asyncchecksuite "NetworkStore engine - 2 nodes":
|
||||||
let
|
let
|
||||||
chunker1 = RandomChunker.new(Rng.instance(), size = 2048, chunkSize = 256)
|
chunker1 = RandomChunker.new(Rng.instance(), size = 2048, chunkSize = 256)
|
||||||
chunker2 = RandomChunker.new(Rng.instance(), size = 2048, chunkSize = 256)
|
chunker2 = RandomChunker.new(Rng.instance(), size = 2048, chunkSize = 256)
|
||||||
|
@ -180,7 +180,7 @@ suite "NetworkStore engine - 2 nodes":
|
||||||
|
|
||||||
check eventually wallet.balance(channel, Asset) > 0
|
check eventually wallet.balance(channel, Asset) > 0
|
||||||
|
|
||||||
suite "NetworkStore - multiple nodes":
|
asyncchecksuite "NetworkStore - multiple nodes":
|
||||||
let
|
let
|
||||||
chunker = RandomChunker.new(Rng.instance(), size = 4096, chunkSize = 256)
|
chunker = RandomChunker.new(Rng.instance(), size = 4096, chunkSize = 256)
|
||||||
|
|
||||||
|
|
|
@ -20,7 +20,7 @@ import pkg/codex/utils/asyncheapqueue
|
||||||
import ../../helpers
|
import ../../helpers
|
||||||
import ../../examples
|
import ../../examples
|
||||||
|
|
||||||
suite "NetworkStore engine basic":
|
asyncchecksuite "NetworkStore engine basic":
|
||||||
var
|
var
|
||||||
rng: Rng
|
rng: Rng
|
||||||
seckey: PrivateKey
|
seckey: PrivateKey
|
||||||
|
@ -125,7 +125,7 @@ suite "NetworkStore engine basic":
|
||||||
|
|
||||||
await done.wait(100.millis)
|
await done.wait(100.millis)
|
||||||
|
|
||||||
suite "NetworkStore engine handlers":
|
asyncchecksuite "NetworkStore engine handlers":
|
||||||
var
|
var
|
||||||
rng: Rng
|
rng: Rng
|
||||||
seckey: PrivateKey
|
seckey: PrivateKey
|
||||||
|
@ -352,7 +352,7 @@ suite "NetworkStore engine handlers":
|
||||||
check cid in peerCtx.peerHave
|
check cid in peerCtx.peerHave
|
||||||
check peerCtx.blocks[cid].price == price
|
check peerCtx.blocks[cid].price == price
|
||||||
|
|
||||||
suite "Task Handler":
|
asyncchecksuite "Task Handler":
|
||||||
var
|
var
|
||||||
rng: Rng
|
rng: Rng
|
||||||
seckey: PrivateKey
|
seckey: PrivateKey
|
||||||
|
|
|
@ -2,8 +2,9 @@ import std/unittest
|
||||||
|
|
||||||
import pkg/codex/stores
|
import pkg/codex/stores
|
||||||
import ../../examples
|
import ../../examples
|
||||||
|
import ../../helpers
|
||||||
|
|
||||||
suite "engine payments":
|
checksuite "engine payments":
|
||||||
|
|
||||||
let address = EthAddress.example
|
let address = EthAddress.example
|
||||||
let amount = 42.u256
|
let amount = 42.u256
|
||||||
|
|
|
@ -2,9 +2,10 @@ import pkg/asynctest
|
||||||
import pkg/chronos
|
import pkg/chronos
|
||||||
import pkg/stew/byteutils
|
import pkg/stew/byteutils
|
||||||
import ../../examples
|
import ../../examples
|
||||||
|
import ../../helpers
|
||||||
import pkg/codex/stores
|
import pkg/codex/stores
|
||||||
|
|
||||||
suite "account protobuf messages":
|
checksuite "account protobuf messages":
|
||||||
|
|
||||||
let account = Account(address: EthAddress.example)
|
let account = Account(address: EthAddress.example)
|
||||||
let message = AccountMessage.init(account)
|
let message = AccountMessage.init(account)
|
||||||
|
@ -20,7 +21,7 @@ suite "account protobuf messages":
|
||||||
incorrect.address.del(0)
|
incorrect.address.del(0)
|
||||||
check Account.init(incorrect).isNone
|
check Account.init(incorrect).isNone
|
||||||
|
|
||||||
suite "channel update messages":
|
checksuite "channel update messages":
|
||||||
|
|
||||||
let state = SignedState.example
|
let state = SignedState.example
|
||||||
let update = StateChannelUpdate.init(state)
|
let update = StateChannelUpdate.init(state)
|
||||||
|
|
|
@ -4,8 +4,9 @@ import pkg/libp2p
|
||||||
|
|
||||||
import pkg/codex/blockexchange/protobuf/presence
|
import pkg/codex/blockexchange/protobuf/presence
|
||||||
import ../../examples
|
import ../../examples
|
||||||
|
import ../../helpers
|
||||||
|
|
||||||
suite "block presence protobuf messages":
|
checksuite "block presence protobuf messages":
|
||||||
|
|
||||||
let cid = Cid.example
|
let cid = Cid.example
|
||||||
let price = UInt256.example
|
let price = UInt256.example
|
||||||
|
|
|
@ -14,7 +14,7 @@ import pkg/codex/blockexchange
|
||||||
import ../helpers
|
import ../helpers
|
||||||
import ../examples
|
import ../examples
|
||||||
|
|
||||||
suite "Network - Handlers":
|
asyncchecksuite "Network - Handlers":
|
||||||
let
|
let
|
||||||
rng = Rng.instance()
|
rng = Rng.instance()
|
||||||
seckey = PrivateKey.random(rng[]).tryGet()
|
seckey = PrivateKey.random(rng[]).tryGet()
|
||||||
|
@ -137,7 +137,7 @@ suite "Network - Handlers":
|
||||||
|
|
||||||
await done.wait(100.millis)
|
await done.wait(100.millis)
|
||||||
|
|
||||||
suite "Network - Senders":
|
asyncchecksuite "Network - Senders":
|
||||||
let
|
let
|
||||||
chunker = RandomChunker.new(Rng.instance(), size = 1024, chunkSize = 256)
|
chunker = RandomChunker.new(Rng.instance(), size = 1024, chunkSize = 256)
|
||||||
|
|
||||||
|
@ -260,7 +260,7 @@ suite "Network - Senders":
|
||||||
await network1.sendPayment(switch2.peerInfo.peerId, payment)
|
await network1.sendPayment(switch2.peerInfo.peerId, payment)
|
||||||
await done.wait(500.millis)
|
await done.wait(500.millis)
|
||||||
|
|
||||||
suite "Network - Test Limits":
|
asyncchecksuite "Network - Test Limits":
|
||||||
var
|
var
|
||||||
switch1, switch2: Switch
|
switch1, switch2: Switch
|
||||||
network1, network2: BlockExcNetwork
|
network1, network2: BlockExcNetwork
|
||||||
|
|
|
@ -8,9 +8,10 @@ import pkg/codex/blockexchange/peers
|
||||||
import pkg/codex/blockexchange/protobuf/blockexc
|
import pkg/codex/blockexchange/protobuf/blockexc
|
||||||
import pkg/codex/blockexchange/protobuf/presence
|
import pkg/codex/blockexchange/protobuf/presence
|
||||||
|
|
||||||
|
import ../helpers
|
||||||
import ../examples
|
import ../examples
|
||||||
|
|
||||||
suite "Peer Context Store":
|
checksuite "Peer Context Store":
|
||||||
var
|
var
|
||||||
store: PeerCtxStore
|
store: PeerCtxStore
|
||||||
peerCtx: BlockExcPeerCtx
|
peerCtx: BlockExcPeerCtx
|
||||||
|
@ -30,7 +31,7 @@ suite "Peer Context Store":
|
||||||
test "Should get peer":
|
test "Should get peer":
|
||||||
check store.get(peerCtx.id) == peerCtx
|
check store.get(peerCtx.id) == peerCtx
|
||||||
|
|
||||||
suite "Peer Context Store Peer Selection":
|
checksuite "Peer Context Store Peer Selection":
|
||||||
var
|
var
|
||||||
store: PeerCtxStore
|
store: PeerCtxStore
|
||||||
peerCtxs: seq[BlockExcPeerCtx]
|
peerCtxs: seq[BlockExcPeerCtx]
|
||||||
|
|
|
@ -9,7 +9,9 @@ import pkg/stew/byteutils
|
||||||
import pkg/codex/blocktype as bt
|
import pkg/codex/blocktype as bt
|
||||||
import pkg/codex/blockexchange
|
import pkg/codex/blockexchange
|
||||||
|
|
||||||
suite "Pending Blocks":
|
import ../helpers
|
||||||
|
|
||||||
|
checksuite "Pending Blocks":
|
||||||
test "Should add want handle":
|
test "Should add want handle":
|
||||||
let
|
let
|
||||||
pendingBlocks = PendingBlocksManager.new()
|
pendingBlocks = PendingBlocksManager.new()
|
||||||
|
|
|
@ -10,8 +10,9 @@ import ./helpers/nodeutils
|
||||||
import ./helpers/randomchunker
|
import ./helpers/randomchunker
|
||||||
import ./helpers/mockdiscovery
|
import ./helpers/mockdiscovery
|
||||||
import ./helpers/eventually
|
import ./helpers/eventually
|
||||||
|
import ../checktest
|
||||||
|
|
||||||
export randomchunker, nodeutils, mockdiscovery, eventually
|
export randomchunker, nodeutils, mockdiscovery, eventually, checktest
|
||||||
|
|
||||||
# NOTE: The meaning of equality for blocks
|
# NOTE: The meaning of equality for blocks
|
||||||
# is changed here, because blocks are now `ref`
|
# is changed here, because blocks are now `ref`
|
||||||
|
|
|
@ -3,6 +3,9 @@ import pkg/questionable
|
||||||
import pkg/questionable/results
|
import pkg/questionable/results
|
||||||
|
|
||||||
import pkg/codex/sales/reservations
|
import pkg/codex/sales/reservations
|
||||||
|
import ../helpers
|
||||||
|
|
||||||
|
export checktest
|
||||||
|
|
||||||
proc allAvailabilities*(r: Reservations): Future[seq[Availability]] {.async.} =
|
proc allAvailabilities*(r: Reservations): Future[seq[Availability]] {.async.} =
|
||||||
var ret: seq[Availability] = @[]
|
var ret: seq[Availability] = @[]
|
||||||
|
|
|
@ -6,9 +6,9 @@ import pkg/codex/sales/states/cancelled
|
||||||
import pkg/codex/sales/states/failed
|
import pkg/codex/sales/states/failed
|
||||||
import pkg/codex/sales/states/filled
|
import pkg/codex/sales/states/filled
|
||||||
import ../../examples
|
import ../../examples
|
||||||
|
import ../../helpers
|
||||||
|
|
||||||
suite "sales state 'downloading'":
|
checksuite "sales state 'downloading'":
|
||||||
|
|
||||||
let request = StorageRequest.example
|
let request = StorageRequest.example
|
||||||
let slotIndex = (request.ask.slots div 2).u256
|
let slotIndex = (request.ask.slots div 2).u256
|
||||||
var state: SaleDownloading
|
var state: SaleDownloading
|
||||||
|
|
|
@ -8,8 +8,9 @@ import pkg/codex/sales/states/errored
|
||||||
import pkg/codex/sales/states/finished
|
import pkg/codex/sales/states/finished
|
||||||
import ../../helpers/mockmarket
|
import ../../helpers/mockmarket
|
||||||
import ../../examples
|
import ../../examples
|
||||||
|
import ../../helpers
|
||||||
|
|
||||||
suite "sales state 'filled'":
|
checksuite "sales state 'filled'":
|
||||||
|
|
||||||
let request = StorageRequest.example
|
let request = StorageRequest.example
|
||||||
let slotIndex = (request.ask.slots div 2).u256
|
let slotIndex = (request.ask.slots div 2).u256
|
||||||
|
|
|
@ -6,8 +6,9 @@ import pkg/codex/sales/states/cancelled
|
||||||
import pkg/codex/sales/states/failed
|
import pkg/codex/sales/states/failed
|
||||||
import pkg/codex/sales/states/filled
|
import pkg/codex/sales/states/filled
|
||||||
import ../../examples
|
import ../../examples
|
||||||
|
import ../../helpers
|
||||||
|
|
||||||
suite "sales state 'filling'":
|
checksuite "sales state 'filling'":
|
||||||
|
|
||||||
let request = StorageRequest.example
|
let request = StorageRequest.example
|
||||||
let slotIndex = (request.ask.slots div 2).u256
|
let slotIndex = (request.ask.slots div 2).u256
|
||||||
|
|
|
@ -5,8 +5,9 @@ import pkg/codex/sales/states/finished
|
||||||
import pkg/codex/sales/states/cancelled
|
import pkg/codex/sales/states/cancelled
|
||||||
import pkg/codex/sales/states/failed
|
import pkg/codex/sales/states/failed
|
||||||
import ../../examples
|
import ../../examples
|
||||||
|
import ../../helpers
|
||||||
|
|
||||||
suite "sales state 'finished'":
|
checksuite "sales state 'finished'":
|
||||||
|
|
||||||
let request = StorageRequest.example
|
let request = StorageRequest.example
|
||||||
var state: SaleFinished
|
var state: SaleFinished
|
||||||
|
|
|
@ -6,8 +6,9 @@ import pkg/codex/sales/states/cancelled
|
||||||
import pkg/codex/sales/states/failed
|
import pkg/codex/sales/states/failed
|
||||||
import pkg/codex/sales/states/filled
|
import pkg/codex/sales/states/filled
|
||||||
import ../../examples
|
import ../../examples
|
||||||
|
import ../../helpers
|
||||||
|
|
||||||
suite "sales state 'proving'":
|
checksuite "sales state 'proving'":
|
||||||
|
|
||||||
let request = StorageRequest.example
|
let request = StorageRequest.example
|
||||||
let slotIndex = (request.ask.slots div 2).u256
|
let slotIndex = (request.ask.slots div 2).u256
|
||||||
|
|
|
@ -10,8 +10,9 @@ import pkg/codex/sales/states/finished
|
||||||
import pkg/codex/sales/states/failed
|
import pkg/codex/sales/states/failed
|
||||||
import ../../helpers/mockmarket
|
import ../../helpers/mockmarket
|
||||||
import ../../examples
|
import ../../examples
|
||||||
|
import ../../helpers
|
||||||
|
|
||||||
suite "sales state 'unknown'":
|
checksuite "sales state 'unknown'":
|
||||||
|
|
||||||
let request = StorageRequest.example
|
let request = StorageRequest.example
|
||||||
let slotIndex = (request.ask.slots div 2).u256
|
let slotIndex = (request.ask.slots div 2).u256
|
||||||
|
|
|
@ -13,8 +13,7 @@ import pkg/codex/sales
|
||||||
import ../examples
|
import ../examples
|
||||||
import ./helpers
|
import ./helpers
|
||||||
|
|
||||||
suite "Reservations module":
|
asyncchecksuite "Reservations module":
|
||||||
|
|
||||||
var
|
var
|
||||||
repo: RepoStore
|
repo: RepoStore
|
||||||
repoDs: Datastore
|
repoDs: Datastore
|
||||||
|
|
|
@ -21,8 +21,7 @@ import ../helpers/eventually
|
||||||
import ../examples
|
import ../examples
|
||||||
import ./helpers
|
import ./helpers
|
||||||
|
|
||||||
suite "Sales":
|
asyncchecksuite "Sales":
|
||||||
|
|
||||||
let proof = exampleProof()
|
let proof = exampleProof()
|
||||||
|
|
||||||
var availability: Availability
|
var availability: Availability
|
||||||
|
|
|
@ -42,8 +42,7 @@ method onError*(state: MockErrorState, err: ref CatchableError): ?State =
|
||||||
method run*(state: MockErrorState, machine: Machine): Future[?State] {.async.} =
|
method run*(state: MockErrorState, machine: Machine): Future[?State] {.async.} =
|
||||||
raise newException(ValueError, "failure")
|
raise newException(ValueError, "failure")
|
||||||
|
|
||||||
suite "Sales agent":
|
asyncchecksuite "Sales agent":
|
||||||
|
|
||||||
var request = StorageRequest(
|
var request = StorageRequest(
|
||||||
ask: StorageAsk(
|
ask: StorageAsk(
|
||||||
slots: 4,
|
slots: 4,
|
||||||
|
|
|
@ -23,7 +23,7 @@ const
|
||||||
BlockSize = 31 * 64
|
BlockSize = 31 * 64
|
||||||
DataSetSize = BlockSize * 100
|
DataSetSize = BlockSize * 100
|
||||||
|
|
||||||
suite "Storage Proofs Network":
|
asyncchecksuite "Storage Proofs Network":
|
||||||
let
|
let
|
||||||
hostAddr = ca.Address.example
|
hostAddr = ca.Address.example
|
||||||
blocks = toSeq([1, 5, 10, 14, 20, 12, 22]) # TODO: maybe make them random
|
blocks = toSeq([1, 5, 10, 14, 20, 12, 22]) # TODO: maybe make them random
|
||||||
|
@ -43,10 +43,11 @@ suite "Storage Proofs Network":
|
||||||
spk: st.PublicKey
|
spk: st.PublicKey
|
||||||
porMsg: PorMessage
|
porMsg: PorMessage
|
||||||
cid: Cid
|
cid: Cid
|
||||||
|
porStream: StoreStream
|
||||||
por: PoR
|
por: PoR
|
||||||
tags: seq[Tag]
|
tags: seq[Tag]
|
||||||
|
|
||||||
setupAll:
|
setup:
|
||||||
chunker = RandomChunker.new(Rng.instance(), size = DataSetSize, chunkSize = BlockSize)
|
chunker = RandomChunker.new(Rng.instance(), size = DataSetSize, chunkSize = BlockSize)
|
||||||
store = CacheStore.new(cacheSize = DataSetSize, chunkSize = BlockSize)
|
store = CacheStore.new(cacheSize = DataSetSize, chunkSize = BlockSize)
|
||||||
manifest = Manifest.new(blockSize = BlockSize).tryGet()
|
manifest = Manifest.new(blockSize = BlockSize).tryGet()
|
||||||
|
@ -61,16 +62,16 @@ suite "Storage Proofs Network":
|
||||||
(await store.putBlock(blk)).tryGet()
|
(await store.putBlock(blk)).tryGet()
|
||||||
|
|
||||||
cid = manifest.cid.tryGet()
|
cid = manifest.cid.tryGet()
|
||||||
|
porStream = StoreStream.new(store, manifest)
|
||||||
por = await PoR.init(
|
por = await PoR.init(
|
||||||
StoreStream.new(store, manifest),
|
porStream,
|
||||||
ssk, spk,
|
ssk, spk,
|
||||||
BlockSize)
|
BlockSize)
|
||||||
|
|
||||||
porMsg = por.toMessage()
|
porMsg = por.toMessage()
|
||||||
tags = blocks.mapIt(
|
tags = blocks.mapIt(
|
||||||
Tag(idx: it, tag: porMsg.authenticators[it]) )
|
Tag(idx: it, tag: porMsg.authenticators[it]))
|
||||||
|
|
||||||
setup:
|
|
||||||
switch1 = newStandardSwitch()
|
switch1 = newStandardSwitch()
|
||||||
switch2 = newStandardSwitch()
|
switch2 = newStandardSwitch()
|
||||||
|
|
||||||
|
@ -89,6 +90,7 @@ suite "Storage Proofs Network":
|
||||||
teardown:
|
teardown:
|
||||||
await switch1.stop()
|
await switch1.stop()
|
||||||
await switch2.stop()
|
await switch2.stop()
|
||||||
|
await close(porStream)
|
||||||
|
|
||||||
test "Should upload to host":
|
test "Should upload to host":
|
||||||
var
|
var
|
||||||
|
|
|
@ -19,13 +19,15 @@ const
|
||||||
SectorsPerBlock = BlockSize div SectorSize
|
SectorsPerBlock = BlockSize div SectorSize
|
||||||
DataSetSize = BlockSize * 100
|
DataSetSize = BlockSize * 100
|
||||||
|
|
||||||
suite "BLS PoR":
|
asyncchecksuite "BLS PoR":
|
||||||
var
|
var
|
||||||
chunker: RandomChunker
|
chunker: RandomChunker
|
||||||
manifest: Manifest
|
manifest: Manifest
|
||||||
store: BlockStore
|
store: BlockStore
|
||||||
ssk: st.SecretKey
|
ssk: st.SecretKey
|
||||||
spk: st.PublicKey
|
spk: st.PublicKey
|
||||||
|
porStream: StoreStream
|
||||||
|
proofStream: StoreStream
|
||||||
|
|
||||||
setup:
|
setup:
|
||||||
chunker = RandomChunker.new(Rng.instance(), size = DataSetSize, chunkSize = BlockSize)
|
chunker = RandomChunker.new(Rng.instance(), size = DataSetSize, chunkSize = BlockSize)
|
||||||
|
@ -33,6 +35,9 @@ suite "BLS PoR":
|
||||||
manifest = Manifest.new(blockSize = BlockSize).tryGet()
|
manifest = Manifest.new(blockSize = BlockSize).tryGet()
|
||||||
(spk, ssk) = st.keyGen()
|
(spk, ssk) = st.keyGen()
|
||||||
|
|
||||||
|
porStream = StoreStream.new(store, manifest)
|
||||||
|
proofStream = StoreStream.new(store, manifest)
|
||||||
|
|
||||||
while (
|
while (
|
||||||
let chunk = await chunker.getBytes();
|
let chunk = await chunker.getBytes();
|
||||||
chunk.len > 0):
|
chunk.len > 0):
|
||||||
|
@ -41,41 +46,43 @@ suite "BLS PoR":
|
||||||
manifest.add(blk.cid)
|
manifest.add(blk.cid)
|
||||||
(await store.putBlock(blk)).tryGet()
|
(await store.putBlock(blk)).tryGet()
|
||||||
|
|
||||||
test "Test PoR without corruption":
|
teardown:
|
||||||
let
|
await close(porStream)
|
||||||
por = await PoR.init(
|
await close(proofStream)
|
||||||
StoreStream.new(store, manifest),
|
|
||||||
|
proc createPor(): Future[PoR] =
|
||||||
|
return PoR.init(
|
||||||
|
porStream,
|
||||||
ssk,
|
ssk,
|
||||||
spk,
|
spk,
|
||||||
BlockSize)
|
BlockSize)
|
||||||
q = generateQuery(por.tau, 22)
|
|
||||||
proof = await generateProof(
|
proc createProof(por: PoR, q: seq[QElement]): Future[Proof] =
|
||||||
StoreStream.new(store, manifest),
|
return generateProof(
|
||||||
|
proofStream,
|
||||||
q,
|
q,
|
||||||
por.authenticators,
|
por.authenticators,
|
||||||
SectorsPerBlock)
|
SectorsPerBlock)
|
||||||
|
|
||||||
|
test "Test PoR without corruption":
|
||||||
|
let
|
||||||
|
por = await createPor()
|
||||||
|
q = generateQuery(por.tau, 22)
|
||||||
|
proof = await createProof(por, q)
|
||||||
|
|
||||||
check por.verifyProof(q, proof.mu, proof.sigma)
|
check por.verifyProof(q, proof.mu, proof.sigma)
|
||||||
|
|
||||||
test "Test PoR with corruption - query: 22, corrupted blocks: 300, bytes: 10":
|
test "Test PoR with corruption - query: 22, corrupted blocks: 300, bytes: 10":
|
||||||
let
|
let
|
||||||
por = await PoR.init(
|
por = await createPor()
|
||||||
StoreStream.new(store, manifest),
|
|
||||||
ssk,
|
|
||||||
spk,
|
|
||||||
BlockSize)
|
|
||||||
pos = await store.corruptBlocks(manifest, 30, 10)
|
pos = await store.corruptBlocks(manifest, 30, 10)
|
||||||
q = generateQuery(por.tau, 22)
|
q = generateQuery(por.tau, 22)
|
||||||
proof = await generateProof(
|
proof = await createProof(por, q)
|
||||||
StoreStream.new(store, manifest),
|
|
||||||
q,
|
|
||||||
por.authenticators,
|
|
||||||
SectorsPerBlock)
|
|
||||||
|
|
||||||
check pos.len == 30
|
check pos.len == 30
|
||||||
check not por.verifyProof(q, proof.mu, proof.sigma)
|
check not por.verifyProof(q, proof.mu, proof.sigma)
|
||||||
|
|
||||||
suite "Test Serialization":
|
asyncchecksuite "Test Serialization":
|
||||||
var
|
var
|
||||||
chunker: RandomChunker
|
chunker: RandomChunker
|
||||||
manifest: Manifest
|
manifest: Manifest
|
||||||
|
@ -85,8 +92,10 @@ suite "Test Serialization":
|
||||||
por: PoR
|
por: PoR
|
||||||
q: seq[QElement]
|
q: seq[QElement]
|
||||||
proof: Proof
|
proof: Proof
|
||||||
|
porStream: StoreStream
|
||||||
|
proofStream: StoreStream
|
||||||
|
|
||||||
setupAll:
|
setup:
|
||||||
chunker = RandomChunker.new(Rng.instance(), size = DataSetSize, chunkSize = BlockSize)
|
chunker = RandomChunker.new(Rng.instance(), size = DataSetSize, chunkSize = BlockSize)
|
||||||
store = CacheStore.new(cacheSize = DataSetSize, chunkSize = BlockSize)
|
store = CacheStore.new(cacheSize = DataSetSize, chunkSize = BlockSize)
|
||||||
manifest = Manifest.new(blockSize = BlockSize).tryGet()
|
manifest = Manifest.new(blockSize = BlockSize).tryGet()
|
||||||
|
@ -100,18 +109,24 @@ suite "Test Serialization":
|
||||||
(await store.putBlock(blk)).tryGet()
|
(await store.putBlock(blk)).tryGet()
|
||||||
|
|
||||||
(spk, ssk) = st.keyGen()
|
(spk, ssk) = st.keyGen()
|
||||||
|
porStream = StoreStream.new(store, manifest)
|
||||||
por = await PoR.init(
|
por = await PoR.init(
|
||||||
StoreStream.new(store, manifest),
|
porStream,
|
||||||
ssk,
|
ssk,
|
||||||
spk,
|
spk,
|
||||||
BlockSize)
|
BlockSize)
|
||||||
q = generateQuery(por.tau, 22)
|
q = generateQuery(por.tau, 22)
|
||||||
|
proofStream = StoreStream.new(store, manifest)
|
||||||
proof = await generateProof(
|
proof = await generateProof(
|
||||||
StoreStream.new(store, manifest),
|
proofStream,
|
||||||
q,
|
q,
|
||||||
por.authenticators,
|
por.authenticators,
|
||||||
SectorsPerBlock)
|
SectorsPerBlock)
|
||||||
|
|
||||||
|
teardown:
|
||||||
|
await close(porStream)
|
||||||
|
await close(proofStream)
|
||||||
|
|
||||||
test "Serialize Public Key":
|
test "Serialize Public Key":
|
||||||
var
|
var
|
||||||
spkMessage = spk.toMessage()
|
spkMessage = spk.toMessage()
|
||||||
|
|
|
@ -15,7 +15,7 @@ const
|
||||||
BlockSize = 31 * 64
|
BlockSize = 31 * 64
|
||||||
DataSetSize = BlockSize * 100
|
DataSetSize = BlockSize * 100
|
||||||
|
|
||||||
suite "Test PoR store":
|
asyncchecksuite "Test PoR store":
|
||||||
let
|
let
|
||||||
blocks = toSeq([1, 5, 10, 14, 20, 12, 22]) # TODO: maybe make them random
|
blocks = toSeq([1, 5, 10, 14, 20, 12, 22]) # TODO: maybe make them random
|
||||||
|
|
||||||
|
@ -27,12 +27,13 @@ suite "Test PoR store":
|
||||||
spk: st.PublicKey
|
spk: st.PublicKey
|
||||||
repoDir: string
|
repoDir: string
|
||||||
stpstore: st.StpStore
|
stpstore: st.StpStore
|
||||||
|
porStream: StoreStream
|
||||||
por: PoR
|
por: PoR
|
||||||
porMsg: PorMessage
|
porMsg: PorMessage
|
||||||
cid: Cid
|
cid: Cid
|
||||||
tags: seq[Tag]
|
tags: seq[Tag]
|
||||||
|
|
||||||
setupAll:
|
setup:
|
||||||
chunker = RandomChunker.new(Rng.instance(), size = DataSetSize, chunkSize = BlockSize)
|
chunker = RandomChunker.new(Rng.instance(), size = DataSetSize, chunkSize = BlockSize)
|
||||||
store = CacheStore.new(cacheSize = DataSetSize, chunkSize = BlockSize)
|
store = CacheStore.new(cacheSize = DataSetSize, chunkSize = BlockSize)
|
||||||
manifest = Manifest.new(blockSize = BlockSize).tryGet()
|
manifest = Manifest.new(blockSize = BlockSize).tryGet()
|
||||||
|
@ -47,8 +48,9 @@ suite "Test PoR store":
|
||||||
(await store.putBlock(blk)).tryGet()
|
(await store.putBlock(blk)).tryGet()
|
||||||
|
|
||||||
cid = manifest.cid.tryGet()
|
cid = manifest.cid.tryGet()
|
||||||
|
porStream = StoreStream.new(store, manifest)
|
||||||
por = await PoR.init(
|
por = await PoR.init(
|
||||||
StoreStream.new(store, manifest),
|
porStream,
|
||||||
ssk, spk,
|
ssk, spk,
|
||||||
BlockSize)
|
BlockSize)
|
||||||
|
|
||||||
|
@ -60,7 +62,8 @@ suite "Test PoR store":
|
||||||
createDir(repoDir)
|
createDir(repoDir)
|
||||||
stpstore = st.StpStore.init(repoDir)
|
stpstore = st.StpStore.init(repoDir)
|
||||||
|
|
||||||
teardownAll:
|
teardown:
|
||||||
|
await close(porStream)
|
||||||
removeDir(repoDir)
|
removeDir(repoDir)
|
||||||
|
|
||||||
test "Should store Storage Proofs":
|
test "Should store Storage Proofs":
|
||||||
|
@ -68,6 +71,7 @@ suite "Test PoR store":
|
||||||
check fileExists(stpstore.stpPath(cid) / "por")
|
check fileExists(stpstore.stpPath(cid) / "por")
|
||||||
|
|
||||||
test "Should retrieve Storage Proofs":
|
test "Should retrieve Storage Proofs":
|
||||||
|
discard await stpstore.store(por.toMessage(), cid)
|
||||||
check (await stpstore.retrieve(cid)).tryGet() == porMsg
|
check (await stpstore.retrieve(cid)).tryGet() == porMsg
|
||||||
|
|
||||||
test "Should store tags":
|
test "Should store tags":
|
||||||
|
@ -76,4 +80,5 @@ suite "Test PoR store":
|
||||||
check fileExists(stpstore.stpPath(cid) / $t.idx )
|
check fileExists(stpstore.stpPath(cid) / $t.idx )
|
||||||
|
|
||||||
test "Should retrieve tags":
|
test "Should retrieve tags":
|
||||||
|
discard await stpstore.store(tags, cid)
|
||||||
check (await stpstore.retrieve(cid, blocks)).tryGet() == tags
|
check (await stpstore.retrieve(cid, blocks)).tryGet() == tags
|
||||||
|
|
|
@ -23,7 +23,7 @@ proc commonBlockStoreTests*(name: string,
|
||||||
before: Before = nil,
|
before: Before = nil,
|
||||||
after: After = nil) =
|
after: After = nil) =
|
||||||
|
|
||||||
suite name & " Store Common":
|
asyncchecksuite name & " Store Common":
|
||||||
var
|
var
|
||||||
newBlock, newBlock1, newBlock2, newBlock3: Block
|
newBlock, newBlock1, newBlock2, newBlock3: Block
|
||||||
store: BlockStore
|
store: BlockStore
|
||||||
|
|
|
@ -12,7 +12,7 @@ import ./commonstoretests
|
||||||
|
|
||||||
import ../helpers
|
import ../helpers
|
||||||
|
|
||||||
suite "Cache Store":
|
checksuite "Cache Store":
|
||||||
var
|
var
|
||||||
newBlock, newBlock1, newBlock2, newBlock3: Block
|
newBlock, newBlock1, newBlock2, newBlock3: Block
|
||||||
store: CacheStore
|
store: CacheStore
|
||||||
|
|
|
@ -38,7 +38,7 @@ proc createManifestCid(): ?!Cid =
|
||||||
let cid = ? Cid.init(version, codec, hash).mapFailure
|
let cid = ? Cid.init(version, codec, hash).mapFailure
|
||||||
return success cid
|
return success cid
|
||||||
|
|
||||||
suite "KeyUtils":
|
checksuite "KeyUtils":
|
||||||
test "makePrefixKey should create block key":
|
test "makePrefixKey should create block key":
|
||||||
let length = 6
|
let length = 6
|
||||||
let cid = Cid.example
|
let cid = Cid.example
|
||||||
|
|
|
@ -15,6 +15,7 @@ import pkg/codex/blocktype as bt
|
||||||
import pkg/codex/stores/repostore
|
import pkg/codex/stores/repostore
|
||||||
import pkg/codex/clock
|
import pkg/codex/clock
|
||||||
|
|
||||||
|
import ../helpers
|
||||||
import ../helpers/mocktimer
|
import ../helpers/mocktimer
|
||||||
import ../helpers/mockrepostore
|
import ../helpers/mockrepostore
|
||||||
import ../helpers/mockclock
|
import ../helpers/mockclock
|
||||||
|
@ -22,7 +23,7 @@ import ../examples
|
||||||
|
|
||||||
import codex/stores/maintenance
|
import codex/stores/maintenance
|
||||||
|
|
||||||
suite "BlockMaintainer":
|
checksuite "BlockMaintainer":
|
||||||
var mockRepoStore: MockRepoStore
|
var mockRepoStore: MockRepoStore
|
||||||
var interval: Duration
|
var interval: Duration
|
||||||
var mockTimer: MockTimer
|
var mockTimer: MockTimer
|
||||||
|
|
|
@ -22,7 +22,7 @@ import ../helpers
|
||||||
import ../helpers/mockclock
|
import ../helpers/mockclock
|
||||||
import ./commonstoretests
|
import ./commonstoretests
|
||||||
|
|
||||||
suite "Test RepoStore start/stop":
|
checksuite "Test RepoStore start/stop":
|
||||||
|
|
||||||
var
|
var
|
||||||
repoDs: Datastore
|
repoDs: Datastore
|
||||||
|
@ -55,7 +55,7 @@ suite "Test RepoStore start/stop":
|
||||||
await repo.stop()
|
await repo.stop()
|
||||||
check not repo.started
|
check not repo.started
|
||||||
|
|
||||||
suite "RepoStore":
|
asyncchecksuite "RepoStore":
|
||||||
var
|
var
|
||||||
repoDs: Datastore
|
repoDs: Datastore
|
||||||
metaDs: Datastore
|
metaDs: Datastore
|
||||||
|
|
|
@ -5,6 +5,8 @@ import pkg/stew/results
|
||||||
import pkg/codex/utils/asyncheapqueue
|
import pkg/codex/utils/asyncheapqueue
|
||||||
import pkg/codex/rng
|
import pkg/codex/rng
|
||||||
|
|
||||||
|
import ./helpers
|
||||||
|
|
||||||
type
|
type
|
||||||
Task* = tuple[name: string, priority: int]
|
Task* = tuple[name: string, priority: int]
|
||||||
|
|
||||||
|
@ -21,7 +23,7 @@ proc toSortedSeq[T](h: AsyncHeapQueue[T], queueType = QueueType.Min): seq[T] =
|
||||||
while tmp.len > 0:
|
while tmp.len > 0:
|
||||||
result.add(popNoWait(tmp).tryGet())
|
result.add(popNoWait(tmp).tryGet())
|
||||||
|
|
||||||
suite "Synchronous tests":
|
checksuite "Synchronous tests":
|
||||||
test "Test pushNoWait - Min":
|
test "Test pushNoWait - Min":
|
||||||
var heap = newAsyncHeapQueue[int]()
|
var heap = newAsyncHeapQueue[int]()
|
||||||
let data = [1, 3, 5, 7, 9, 2, 4, 6, 8, 0]
|
let data = [1, 3, 5, 7, 9, 2, 4, 6, 8, 0]
|
||||||
|
@ -127,8 +129,7 @@ suite "Synchronous tests":
|
||||||
heap.clear()
|
heap.clear()
|
||||||
check heap.len == 0
|
check heap.len == 0
|
||||||
|
|
||||||
suite "Asynchronous Tests":
|
asyncchecksuite "Asynchronous Tests":
|
||||||
|
|
||||||
test "Test push":
|
test "Test push":
|
||||||
var heap = newAsyncHeapQueue[int]()
|
var heap = newAsyncHeapQueue[int]()
|
||||||
let data = [1, 3, 5, 7, 9, 2, 4, 6, 8, 0]
|
let data = [1, 3, 5, 7, 9, 2, 4, 6, 8, 0]
|
||||||
|
|
|
@ -5,7 +5,9 @@ import pkg/chronicles
|
||||||
import pkg/chronos
|
import pkg/chronos
|
||||||
import pkg/libp2p
|
import pkg/libp2p
|
||||||
|
|
||||||
suite "Chunking":
|
import ./helpers
|
||||||
|
|
||||||
|
asyncchecksuite "Chunking":
|
||||||
test "should return proper size chunks":
|
test "should return proper size chunks":
|
||||||
var offset = 0
|
var offset = 0
|
||||||
let contents = [1.byte, 2, 3, 4, 5, 6, 7, 8, 9, 0]
|
let contents = [1.byte, 2, 3, 4, 5, 6, 7, 8, 9, 0]
|
||||||
|
|
|
@ -1,8 +1,9 @@
|
||||||
import std/unittest
|
import std/unittest
|
||||||
|
|
||||||
import codex/clock
|
import codex/clock
|
||||||
|
import ./helpers
|
||||||
|
|
||||||
suite "Clock":
|
checksuite "Clock":
|
||||||
proc testConversion(seconds: SecondsSince1970) =
|
proc testConversion(seconds: SecondsSince1970) =
|
||||||
let asBytes = seconds.toBytes
|
let asBytes = seconds.toBytes
|
||||||
|
|
||||||
|
|
|
@ -13,8 +13,7 @@ import pkg/codex/rng
|
||||||
|
|
||||||
import ./helpers
|
import ./helpers
|
||||||
|
|
||||||
suite "Erasure encode/decode":
|
asyncchecksuite "Erasure encode/decode":
|
||||||
|
|
||||||
const BlockSize = 1024
|
const BlockSize = 1024
|
||||||
const dataSetSize = BlockSize * 123 # weird geometry
|
const dataSetSize = BlockSize * 123 # weird geometry
|
||||||
|
|
||||||
|
|
|
@ -12,7 +12,7 @@ import pkg/codex/manifest
|
||||||
|
|
||||||
import ./helpers
|
import ./helpers
|
||||||
|
|
||||||
suite "Manifest":
|
checksuite "Manifest":
|
||||||
test "Should produce valid tree hash checksum":
|
test "Should produce valid tree hash checksum":
|
||||||
var manifest = Manifest.new(
|
var manifest = Manifest.new(
|
||||||
blocks = @[
|
blocks = @[
|
||||||
|
|
|
@ -20,7 +20,7 @@ import pkg/codex/blocktype as bt
|
||||||
|
|
||||||
import ./helpers
|
import ./helpers
|
||||||
|
|
||||||
suite "Test Node":
|
asyncchecksuite "Test Node":
|
||||||
let
|
let
|
||||||
(path, _, _) = instantiationInfo(-2, fullPaths = true) # get this file's name
|
(path, _, _) = instantiationInfo(-2, fullPaths = true) # get this file's name
|
||||||
|
|
||||||
|
@ -62,6 +62,8 @@ suite "Test Node":
|
||||||
var
|
var
|
||||||
data: seq[byte]
|
data: seq[byte]
|
||||||
|
|
||||||
|
defer: await stream.close()
|
||||||
|
|
||||||
while not stream.atEof:
|
while not stream.atEof:
|
||||||
var
|
var
|
||||||
buf = newSeq[byte](oddChunkSize)
|
buf = newSeq[byte](oddChunkSize)
|
||||||
|
@ -154,8 +156,8 @@ suite "Test Node":
|
||||||
manifestBlock = (await localStore.getBlock(manifestCid)).tryGet()
|
manifestBlock = (await localStore.getBlock(manifestCid)).tryGet()
|
||||||
localManifest = Manifest.decode(manifestBlock).tryGet()
|
localManifest = Manifest.decode(manifestBlock).tryGet()
|
||||||
|
|
||||||
let
|
let data = await retrieve(manifestCid)
|
||||||
data = await retrieve(manifestCid)
|
|
||||||
check:
|
check:
|
||||||
data.len == localManifest.originalBytes
|
data.len == localManifest.originalBytes
|
||||||
data.len == original.len
|
data.len == original.len
|
||||||
|
@ -168,6 +170,7 @@ suite "Test Node":
|
||||||
|
|
||||||
(await localStore.putBlock(blk)).tryGet()
|
(await localStore.putBlock(blk)).tryGet()
|
||||||
let stream = (await node.retrieve(blk.cid)).tryGet()
|
let stream = (await node.retrieve(blk.cid)).tryGet()
|
||||||
|
defer: await stream.close()
|
||||||
|
|
||||||
var data = newSeq[byte](testString.len)
|
var data = newSeq[byte](testString.len)
|
||||||
await stream.readExactly(addr data[0], data.len)
|
await stream.readExactly(addr data[0], data.len)
|
||||||
|
|
|
@ -6,8 +6,9 @@ import ./helpers/mockmarket
|
||||||
import ./helpers/mockclock
|
import ./helpers/mockclock
|
||||||
import ./helpers/eventually
|
import ./helpers/eventually
|
||||||
import ./examples
|
import ./examples
|
||||||
|
import ./helpers
|
||||||
|
|
||||||
suite "Proving":
|
asyncchecksuite "Proving":
|
||||||
|
|
||||||
var proving: Proving
|
var proving: Proving
|
||||||
var market: MockMarket
|
var market: MockMarket
|
||||||
|
|
|
@ -13,9 +13,9 @@ import ./helpers/mockmarket
|
||||||
import ./helpers/mockclock
|
import ./helpers/mockclock
|
||||||
import ./helpers/eventually
|
import ./helpers/eventually
|
||||||
import ./examples
|
import ./examples
|
||||||
|
import ./helpers
|
||||||
|
|
||||||
suite "Purchasing":
|
asyncchecksuite "Purchasing":
|
||||||
|
|
||||||
var purchasing: Purchasing
|
var purchasing: Purchasing
|
||||||
var market: MockMarket
|
var market: MockMarket
|
||||||
var clock: MockClock
|
var clock: MockClock
|
||||||
|
@ -119,7 +119,7 @@ suite "Purchasing":
|
||||||
await purchase.wait()
|
await purchase.wait()
|
||||||
check market.withdrawn == @[request.id]
|
check market.withdrawn == @[request.id]
|
||||||
|
|
||||||
suite "Purchasing state machine":
|
checksuite "Purchasing state machine":
|
||||||
|
|
||||||
var purchasing: Purchasing
|
var purchasing: Purchasing
|
||||||
var market: MockMarket
|
var market: MockMarket
|
||||||
|
|
|
@ -10,7 +10,7 @@ import pkg/codex/stores
|
||||||
import pkg/codex/manifest
|
import pkg/codex/manifest
|
||||||
import pkg/codex/blocktype as bt
|
import pkg/codex/blocktype as bt
|
||||||
|
|
||||||
suite "StoreStream":
|
asyncchecksuite "StoreStream":
|
||||||
var
|
var
|
||||||
manifest: Manifest
|
manifest: Manifest
|
||||||
store: BlockStore
|
store: BlockStore
|
||||||
|
@ -37,6 +37,9 @@ suite "StoreStream":
|
||||||
[byte 90, 91, 92, 93, 94, 95, 96, 97, 98, 99],
|
[byte 90, 91, 92, 93, 94, 95, 96, 97, 98, 99],
|
||||||
]
|
]
|
||||||
|
|
||||||
|
teardown:
|
||||||
|
await stream.close()
|
||||||
|
|
||||||
setup:
|
setup:
|
||||||
store = CacheStore.new()
|
store = CacheStore.new()
|
||||||
manifest = Manifest.new(blockSize = 10).tryGet()
|
manifest = Manifest.new(blockSize = 10).tryGet()
|
||||||
|
|
|
@ -2,8 +2,9 @@ import std/times
|
||||||
import std/unittest
|
import std/unittest
|
||||||
|
|
||||||
import codex/systemclock
|
import codex/systemclock
|
||||||
|
import ./helpers
|
||||||
|
|
||||||
suite "SystemClock":
|
checksuite "SystemClock":
|
||||||
test "Should get now":
|
test "Should get now":
|
||||||
let clock = SystemClock.new()
|
let clock = SystemClock.new()
|
||||||
|
|
||||||
|
|
|
@ -7,9 +7,9 @@ import ./helpers/mockmarket
|
||||||
import ./helpers/mockclock
|
import ./helpers/mockclock
|
||||||
import ./helpers/eventually
|
import ./helpers/eventually
|
||||||
import ./examples
|
import ./examples
|
||||||
|
import ./helpers
|
||||||
|
|
||||||
suite "validation":
|
asyncchecksuite "validation":
|
||||||
|
|
||||||
let period = 10
|
let period = 10
|
||||||
let timeout = 5
|
let timeout = 5
|
||||||
let maxSlots = 100
|
let maxSlots = 100
|
||||||
|
|
|
@ -4,6 +4,7 @@ import pkg/chronos
|
||||||
import pkg/upraises
|
import pkg/upraises
|
||||||
import codex/utils/asyncstatemachine
|
import codex/utils/asyncstatemachine
|
||||||
import ../helpers/eventually
|
import ../helpers/eventually
|
||||||
|
import ../helpers
|
||||||
|
|
||||||
type
|
type
|
||||||
State1 = ref object of State
|
State1 = ref object of State
|
||||||
|
@ -59,7 +60,7 @@ method onError(state: State4, error: ref CatchableError): ?State =
|
||||||
inc errors[3]
|
inc errors[3]
|
||||||
some State(State2.new())
|
some State(State2.new())
|
||||||
|
|
||||||
suite "async state machines":
|
asyncchecksuite "async state machines":
|
||||||
var machine: Machine
|
var machine: Machine
|
||||||
|
|
||||||
proc moveToNextStateEvent(state: State): ?State =
|
proc moveToNextStateEvent(state: State): ?State =
|
||||||
|
|
|
@ -3,12 +3,12 @@ import std/os
|
||||||
import pkg/libp2p
|
import pkg/libp2p
|
||||||
import pkg/questionable/results
|
import pkg/questionable/results
|
||||||
import codex/utils/keyutils
|
import codex/utils/keyutils
|
||||||
|
import ../helpers
|
||||||
|
|
||||||
when defined(windows):
|
when defined(windows):
|
||||||
import stew/windows/acl
|
import stew/windows/acl
|
||||||
|
|
||||||
suite "keyutils":
|
checksuite "keyutils":
|
||||||
|
|
||||||
let path = getTempDir() / "CodexTest"
|
let path = getTempDir() / "CodexTest"
|
||||||
|
|
||||||
setup:
|
setup:
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
import std/unittest
|
import std/unittest
|
||||||
import codex/utils/optionalcast
|
import codex/utils/optionalcast
|
||||||
|
import ../helpers
|
||||||
|
|
||||||
suite "optional casts":
|
checksuite "optional casts":
|
||||||
|
|
||||||
test "casting value to same type works":
|
test "casting value to same type works":
|
||||||
check 42 as int == some 42
|
check 42 as int == some 42
|
||||||
|
|
||||||
|
|
|
@ -12,8 +12,9 @@ import pkg/asynctest
|
||||||
|
|
||||||
import codex/utils/timer
|
import codex/utils/timer
|
||||||
import ../helpers/eventually
|
import ../helpers/eventually
|
||||||
|
import ../helpers
|
||||||
|
|
||||||
suite "Timer":
|
asyncchecksuite "Timer":
|
||||||
var timer1: Timer
|
var timer1: Timer
|
||||||
var timer2: Timer
|
var timer2: Timer
|
||||||
var output: string
|
var output: string
|
||||||
|
|
|
@ -4,7 +4,6 @@ import codex/contracts/clock
|
||||||
import ../ethertest
|
import ../ethertest
|
||||||
|
|
||||||
ethersuite "On-Chain Clock":
|
ethersuite "On-Chain Clock":
|
||||||
|
|
||||||
var clock: OnChainClock
|
var clock: OnChainClock
|
||||||
|
|
||||||
setup:
|
setup:
|
||||||
|
|
|
@ -4,6 +4,7 @@ import codex/contracts/deployment
|
||||||
import codex/conf
|
import codex/conf
|
||||||
import codex/contracts
|
import codex/contracts
|
||||||
|
|
||||||
|
import ../checktest
|
||||||
|
|
||||||
type MockProvider = ref object of Provider
|
type MockProvider = ref object of Provider
|
||||||
chainId*: UInt256
|
chainId*: UInt256
|
||||||
|
@ -17,8 +18,7 @@ proc configFactory(): CodexConf =
|
||||||
proc configFactory(marketplace: Option[EthAddress]): CodexConf =
|
proc configFactory(marketplace: Option[EthAddress]): CodexConf =
|
||||||
CodexConf(cmd: noCommand, nat: ValidIpAddress.init("127.0.0.1"), discoveryIp: ValidIpAddress.init(IPv4_any()), metricsAddress: ValidIpAddress.init("127.0.0.1"), marketplaceAddress: marketplace)
|
CodexConf(cmd: noCommand, nat: ValidIpAddress.init("127.0.0.1"), discoveryIp: ValidIpAddress.init(IPv4_any()), metricsAddress: ValidIpAddress.init("127.0.0.1"), marketplaceAddress: marketplace)
|
||||||
|
|
||||||
suite "Deployment":
|
asyncchecksuite "Deployment":
|
||||||
|
|
||||||
let provider = MockProvider()
|
let provider = MockProvider()
|
||||||
|
|
||||||
test "uses conf value as priority":
|
test "uses conf value as priority":
|
||||||
|
|
|
@ -2,39 +2,20 @@ import std/json
|
||||||
import pkg/asynctest
|
import pkg/asynctest
|
||||||
import pkg/ethers
|
import pkg/ethers
|
||||||
|
|
||||||
# Allow multiple setups and teardowns in a test suite
|
import ./helpers
|
||||||
template multisetup =
|
import ./checktest
|
||||||
|
|
||||||
var setups: seq[proc: Future[void] {.gcsafe.}]
|
|
||||||
var teardowns: seq[proc: Future[void] {.gcsafe.}]
|
|
||||||
|
|
||||||
setup:
|
|
||||||
for setup in setups:
|
|
||||||
await setup()
|
|
||||||
|
|
||||||
teardown:
|
|
||||||
for teardown in teardowns:
|
|
||||||
await teardown()
|
|
||||||
|
|
||||||
template setup(setupBody) {.inject.} =
|
|
||||||
setups.add(proc {.async.} = setupBody)
|
|
||||||
|
|
||||||
template teardown(teardownBody) {.inject.} =
|
|
||||||
teardowns.insert(proc {.async.} = teardownBody)
|
|
||||||
|
|
||||||
## Unit testing suite that sets up an Ethereum testing environment.
|
## Unit testing suite that sets up an Ethereum testing environment.
|
||||||
## Injects a `provider` instance, and a list of `accounts`.
|
## Injects a `provider` instance, and a list of `accounts`.
|
||||||
## Calls the `evm_snapshot` and `evm_revert` methods to ensure that any
|
## Calls the `evm_snapshot` and `evm_revert` methods to ensure that any
|
||||||
## changes to the blockchain do not persist.
|
## changes to the blockchain do not persist.
|
||||||
template ethersuite*(name, body) =
|
template ethersuite*(name, body) =
|
||||||
suite name:
|
asyncchecksuite name:
|
||||||
|
|
||||||
var provider {.inject, used.}: JsonRpcProvider
|
var provider {.inject, used.}: JsonRpcProvider
|
||||||
var accounts {.inject, used.}: seq[Address]
|
var accounts {.inject, used.}: seq[Address]
|
||||||
var snapshot: JsonNode
|
var snapshot: JsonNode
|
||||||
|
|
||||||
multisetup()
|
|
||||||
|
|
||||||
setup:
|
setup:
|
||||||
provider = JsonRpcProvider.new("ws://localhost:8545")
|
provider = JsonRpcProvider.new("ws://localhost:8545")
|
||||||
snapshot = await send(provider, "evm_snapshot")
|
snapshot = await send(provider, "evm_snapshot")
|
||||||
|
|
|
@ -0,0 +1,4 @@
|
||||||
|
import helpers/multisetup
|
||||||
|
import helpers/trackers
|
||||||
|
|
||||||
|
export multisetup, trackers
|
|
@ -0,0 +1,38 @@
|
||||||
|
import pkg/chronos
|
||||||
|
|
||||||
|
# Allow multiple setups and teardowns in a test suite
|
||||||
|
template asyncmultisetup* =
|
||||||
|
var setups: seq[proc: Future[void] {.gcsafe.}]
|
||||||
|
var teardowns: seq[proc: Future[void] {.gcsafe.}]
|
||||||
|
|
||||||
|
setup:
|
||||||
|
for setup in setups:
|
||||||
|
await setup()
|
||||||
|
|
||||||
|
teardown:
|
||||||
|
for teardown in teardowns:
|
||||||
|
await teardown()
|
||||||
|
|
||||||
|
template setup(setupBody) {.inject.} =
|
||||||
|
setups.add(proc {.async.} = setupBody)
|
||||||
|
|
||||||
|
template teardown(teardownBody) {.inject.} =
|
||||||
|
teardowns.insert(proc {.async.} = teardownBody)
|
||||||
|
|
||||||
|
template multisetup* =
|
||||||
|
var setups: seq[proc() {.gcsafe.}]
|
||||||
|
var teardowns: seq[proc() {.gcsafe.}]
|
||||||
|
|
||||||
|
setup:
|
||||||
|
for setup in setups:
|
||||||
|
setup()
|
||||||
|
|
||||||
|
teardown:
|
||||||
|
for teardown in teardowns:
|
||||||
|
teardown()
|
||||||
|
|
||||||
|
template setup(setupBody) {.inject.} =
|
||||||
|
setups.add(proc = setupBody)
|
||||||
|
|
||||||
|
template teardown(teardownBody) {.inject.} =
|
||||||
|
teardowns.insert(proc = teardownBody)
|
|
@ -0,0 +1,30 @@
|
||||||
|
import pkg/codex/streams/storestream
|
||||||
|
import std/unittest
|
||||||
|
|
||||||
|
# From lip2p/tests/helpers
|
||||||
|
const trackerNames = [
|
||||||
|
StoreStreamTrackerName
|
||||||
|
]
|
||||||
|
|
||||||
|
iterator testTrackers*(extras: openArray[string] = []): TrackerBase =
|
||||||
|
for name in trackerNames:
|
||||||
|
let t = getTracker(name)
|
||||||
|
if not isNil(t): yield t
|
||||||
|
for name in extras:
|
||||||
|
let t = getTracker(name)
|
||||||
|
if not isNil(t): yield t
|
||||||
|
|
||||||
|
proc checkTracker*(name: string) =
|
||||||
|
var tracker = getTracker(name)
|
||||||
|
if tracker.isLeaked():
|
||||||
|
checkpoint tracker.dump()
|
||||||
|
fail()
|
||||||
|
|
||||||
|
proc checkTrackers*() =
|
||||||
|
for tracker in testTrackers():
|
||||||
|
if tracker.isLeaked():
|
||||||
|
checkpoint tracker.dump()
|
||||||
|
fail()
|
||||||
|
try:
|
||||||
|
GC_fullCollect()
|
||||||
|
except: discard
|
|
@ -14,7 +14,6 @@ import ./twonodes
|
||||||
# to enable custom logging levels for specific topics like: debug2 = "INFO; TRACE: marketplace"
|
# to enable custom logging levels for specific topics like: debug2 = "INFO; TRACE: marketplace"
|
||||||
|
|
||||||
twonodessuite "Integration tests", debug1 = false, debug2 = false:
|
twonodessuite "Integration tests", debug1 = false, debug2 = false:
|
||||||
|
|
||||||
setup:
|
setup:
|
||||||
# Our Hardhat configuration does use automine, which means that time tracked by `provider.currentTime()` is not
|
# Our Hardhat configuration does use automine, which means that time tracked by `provider.currentTime()` is not
|
||||||
# advanced until blocks are mined and that happens only when transaction is submitted.
|
# advanced until blocks are mined and that happens only when transaction is submitted.
|
||||||
|
|
|
@ -6,7 +6,6 @@ import ../ethertest
|
||||||
import ./nodes
|
import ./nodes
|
||||||
|
|
||||||
ethersuite "Node block expiration tests":
|
ethersuite "Node block expiration tests":
|
||||||
|
|
||||||
var node: NodeProcess
|
var node: NodeProcess
|
||||||
var baseurl: string
|
var baseurl: string
|
||||||
|
|
||||||
|
|
|
@ -14,7 +14,6 @@ logScope:
|
||||||
topics = "test proofs"
|
topics = "test proofs"
|
||||||
|
|
||||||
twonodessuite "Proving integration test", debug1=false, debug2=false:
|
twonodessuite "Proving integration test", debug1=false, debug2=false:
|
||||||
|
|
||||||
let validatorDir = getTempDir() / "CodexValidator"
|
let validatorDir = getTempDir() / "CodexValidator"
|
||||||
|
|
||||||
var marketplace: Marketplace
|
var marketplace: Marketplace
|
||||||
|
|
|
@ -14,7 +14,6 @@ template twonodessuite*(name: string, debug1, debug2: bool | string, body) =
|
||||||
twonodessuite(name, $debug1, $debug2, body)
|
twonodessuite(name, $debug1, $debug2, body)
|
||||||
|
|
||||||
template twonodessuite*(name: string, debug1, debug2: string, body) =
|
template twonodessuite*(name: string, debug1, debug2: string, body) =
|
||||||
|
|
||||||
ethersuite name:
|
ethersuite name:
|
||||||
|
|
||||||
var node1 {.inject, used.}: NodeProcess
|
var node1 {.inject, used.}: NodeProcess
|
||||||
|
|
Loading…
Reference in New Issue