nim-codex/tests/examples.nim
Eric d70ab59004
refactor: multinode integration test refactor (#662)
* refactor multi node test suite

Refactor the multinode test suite into the marketplace test suite.

- Arbitrary number of nodes can be started with each test: clients, providers, validators
- Hardhat can also be started locally with each test, usually for the purpose of saving and inspecting its log file.
- Log files for all nodes can be persisted on disk, with configuration at the test-level
- Log files, if persisted (as specified in the test), will be persisted to a CI artifact
- Node config is specified at the test-level instead of the suite-level
- Node/Hardhat process starting/stopping is now async, and runs much faster
- Per-node config includes:
  - simulating proof failures
  - logging to file
  - log level
  - log topics
  - storage quota
  - debug (print logs to stdout)
- Tests find next available ports when starting nodes, as closing ports on Windows can lag
- Hardhat is no longer required to be running prior to starting the integration tests (as long as Hardhat is configured to run in the tests).
  - If Hardhat is already running, a snapshot will be taken and reverted before and after each test, respectively.
  - If Hardhat is not already running and configured to run at the test-level, a Hardhat process will be spawned and torn down before and after each test, respectively.

* additional logging for debug purposes

* address PR feedback

- fix spelling
- revert change from catching ProviderError to SignerError -- this should be handled more consistently in the Market abstraction, and will be handled in another PR.
- remove method label from raiseAssert
- remove unused import

* Use API instead of command exec to test for free port

Use chronos `createStreamServer` API to test for free port by binding localhost address and port. Use `ServerFlags.ReuseAddr` to enable reuse of same IP/Port on multiple test runs.

* clean up

* remove upraises annotations from tests

* Update tests to work with updated erasure coding slot sizes

* update dataset size, nodes, tolerance to match valid ec params

Integration tests now have valid dataset sizes (blocks), tolerances, and number of nodes, to work with valid ec params. These values are validated when requested storage.

Print the rest api failure message (via doAssert) when a rest api call fails (eg the rest api may validate some ec params).

All integration tests pass when the async `clock.now` changes are reverted.

* dont use async clock for now

* fix workflow

* move integration logs uplod to reusable

---------

Co-authored-by: Dmitriy Ryajov <dryajov@gmail.com>
2024-02-19 04:55:39 +00:00

104 lines
3.0 KiB
Nim

import std/random
import std/sequtils
import std/times
import std/typetraits
import pkg/codex/contracts/requests
import pkg/codex/rng
import pkg/codex/contracts/proofs
import pkg/codex/sales/slotqueue
import pkg/codex/stores
import pkg/codex/units
import pkg/chronos
import pkg/stew/byteutils
import pkg/stint
import ./codex/helpers/randomchunker
export randomchunker
export units
proc exampleString*(length: int): string =
let chars = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
result = newString(length) # Create a new empty string with a given length
for i in 0..<length:
result[i] = chars[rand(chars.len-1)] # Generate a random index and set the string's character
proc example*[T: SomeInteger](_: type T): T =
rand(T)
proc example*[T,N](_: type array[N, T]): array[N, T] =
for item in result.mitems:
item = T.example
proc example*[T](_: type seq[T]): seq[T] =
let length = uint8.example.int
newSeqWith(length, T.example)
proc example*(_: type UInt256): UInt256 =
UInt256.fromBytes(array[32, byte].example)
proc example*[T: distinct](_: type T): T =
type baseType = T.distinctBase
T(baseType.example)
proc example*(_: type StorageRequest): StorageRequest =
StorageRequest(
client: Address.example,
ask: StorageAsk(
slots: 4,
slotSize: (1 * 1024 * 1024 * 1024).u256, # 1 Gigabyte
duration: (10 * 60 * 60).u256, # 10 hours
collateral: 200.u256,
proofProbability: 4.u256, # require a proof roughly once every 4 periods
reward: 84.u256,
maxSlotLoss: 2 # 2 slots can be freed without data considered to be lost
),
content: StorageContent(
cid: "zb2rhheVmk3bLks5MgzTqyznLu1zqGH5jrfTA1eAZXrjx7Vob",
merkleRoot: array[32, byte].example
),
expiry: (getTime() + 1.hours).toUnix.u256,
nonce: Nonce.example
)
proc example*(_: type Slot): Slot =
let request = StorageRequest.example
let slotIndex = rand(request.ask.slots.int).u256
Slot(request: request, slotIndex: slotIndex)
proc example*(_: type SlotQueueItem): SlotQueueItem =
let request = StorageRequest.example
let slot = Slot.example
SlotQueueItem.init(request, slot.slotIndex.truncate(uint16))
proc example(_: type G1Point): G1Point =
G1Point(x: UInt256.example, y: UInt256.example)
proc example(_: type G2Point): G2Point =
G2Point(
x: [UInt256.example, UInt256.example],
y: [UInt256.example, UInt256.example]
)
proc example*(_: type Groth16Proof): Groth16Proof =
Groth16Proof(
a: G1Point.example,
b: G2Point.example,
c: G1Point.example
)
proc example*(_: type RandomChunker, blocks: int): Future[string] {.async.} =
# doAssert blocks >= 3, "must be more than 3 blocks"
let rng = Rng.instance()
let chunker = RandomChunker.new(
rng, size = DefaultBlockSize * blocks.NBytes, chunkSize = DefaultBlockSize)
var data: seq[byte]
while (let moar = await chunker.getBytes(); moar != []):
data.add moar
return byteutils.toHex(data)
proc example*(_: type RandomChunker): Future[string] {.async.} =
await RandomChunker.example(3)