nim-codex/tests/dagger/testchunking.nim
Dmitriy Ryajov fbe161a073
Node setup (#32)
* settup basic nim node

* adding http utils

* adding confutils

* rough rest api proto

* adding missing deps

* turn tls emulation off

* adding toml serialization

* wip

* adding missing deps

* make sure to clean old state in teardown

* adding file upload rest endpoint

* renaming blockexchange to networkstore

* updating nim-presto

* updating libp2p

* wip adding streaming upload

* reworked chunking

* bump to latest unstable

* adding asyncfutures stream

* make streamable

* deleting unused files

* reworking stores api

* use new stores api

* rework blockset and remove blockstream

* don't return option from constructor

* rework chunker

* wip implement upload

* fix tests

* move unrelated logic to engine

* don't print entire message

* logging

* basic encode/decode to/from dag-pb

* add basic upload/download support

* fix tests

* renaming blockset to manifest

* don't pass config to node

* remove config and use new manifest

* wip: make endpoints more reliable

* wip: adding node tests

* include correct manifest test

* removing asyncfutures

* proper chunking of files

* simplify stream reading

* test with encoding/decoding with many blocks

* add block storing tests

* adding retrieval test

* add logging

* tidy up chunker

* tidy up manifest and node

* use default chunk size

* fix tests

* fix tests

* make sure Eof is set properly

* wip

* minor cleanup

* add file utils

* cleanup config

* splitout DaggerServer and "main"

* remove events since they are not used

* add broadcast method to network peer

* add and wire localstore

* use localstore in the node

* wip

* logging

* move file utils

* use the constant

* updating deps

* fix memstore

* use latest libp2p unstable

* fix tests

* rework block streaming

* don't fail storing if the block already exists

* add helper info endpoint

* correct comment

* rename localstore to fsstore

* fix tests

* remove unused tests

* add test to retrieve one block

* move some test files around

* consolidate setup

* Update dagger/blockexchange/engine.nim

Co-authored-by: Tanguy <tanguy@status.im>

* typo

* better block path handling

* don't inherit rootobj

* remove useless template

* Update tests/dagger/blockexc/testblockexc.nim

Co-authored-by: markspanbroek <mark@spanbroek.net>

* use isMainModule

* use proper flag for starter/stoped

* cleanup optional use

* wrap in isMainModule

* use `cancelAndAwait`

* remove unused imports

* wip

* don't use optional

* use functional error api

* rework store tests and add fs tests

* Block.new() to Block.init()

* don't use optional for engine blocks

* use result instead of optional for getBlock

* remove unused imports

* move stopping servers to `shutdown`

* use result instead of optional

* rework with results

* fix tests

* use waitFor in signal handlers

* error helper

* use `?` and mapFailure where possible

* remove unnecesary `=?`

* improve empty cid digest initialization

Co-authored-by: Tanguy <tanguy@status.im>
Co-authored-by: markspanbroek <mark@spanbroek.net>
2022-01-10 09:32:56 -06:00

73 lines
2.0 KiB
Nim

import pkg/asynctest
import pkg/stew/byteutils
import pkg/dagger/chunker
import pkg/chronicles
import pkg/chronos
import pkg/libp2p
suite "Chunking":
test "should return proper size chunks":
var offset = 0
let contents = [1.byte, 2, 3, 4, 5, 6, 7, 8, 9, 0]
proc reader(data: ChunkBuffer, len: int): Future[int]
{.gcsafe, async, raises: [Defect].} =
if offset >= contents.len:
return 0
copyMem(data, unsafeAddr contents[offset], len)
offset += 2
return len
let chunker = Chunker.new(
reader = reader,
chunkSize = 2)
check:
(await chunker.getBytes()) == [1.byte, 2]
(await chunker.getBytes()) == [3.byte, 4]
(await chunker.getBytes()) == [5.byte, 6]
(await chunker.getBytes()) == [7.byte, 8]
(await chunker.getBytes()) == [9.byte, 0]
(await chunker.getBytes()) == []
test "should chunk LPStream":
var offset = 0
let stream = BufferStream.new()
let chunker = LPStreamChunker.new(
stream = stream,
chunkSize = 2)
proc writer() {.async.} =
for d in [@[1.byte, 2, 3, 4], @[5.byte, 6, 7, 8], @[9.byte, 0]]:
await stream.pushData(d)
await stream.pushEof()
await stream.close()
let writerFut = writer()
check:
(await chunker.getBytes()) == [1.byte, 2]
(await chunker.getBytes()) == [3.byte, 4]
(await chunker.getBytes()) == [5.byte, 6]
(await chunker.getBytes()) == [7.byte, 8]
(await chunker.getBytes()) == [9.byte, 0]
(await chunker.getBytes()) == []
await writerFut
test "should chunk file":
let
(path, _, _) = instantiationInfo(-2, fullPaths = true) # get this file's name
file = open(path)
fileChunker = FileChunker.new(file = file, chunkSize = 256)
var data: seq[byte]
while true:
let buff = await fileChunker.getBytes()
if buff.len <= 0:
break
check buff.len <= fileChunker.chunkSize
data.add(buff)
check string.fromBytes(data) == readFile(path)