nim-dagger/tests/codex/testchunking.nim
Eric 27f585eb6f
feat: create logging proxy (#653)
* implement a logging proxy

The logging proxy:
- prevents the need to import chronicles (as well as export except toJson),
- prevents the need to override `writeValue` or use or import nim-json-seralization elsewhere in the codebase, allowing for sole use of utils/json for de/serialization,
- and handles json formatting correctly in chronicles json sinks

* Rename logging -> logutils to avoid ambiguity with common names

* clean up

* add setProperty for JsonRecord, remove nim-json-serialization conflict

* Allow specifying textlines and json format separately

Not specifying a LogFormat will apply the formatting to both textlines and json sinks.

Specifying a LogFormat will apply the formatting to only that sink.

* remove unneeded usages of std/json

We only need to import utils/json instead of std/json

* move serialization from rest/json to utils/json so it can be shared

* fix NoColors ambiguity

Was causing unit tests to fail on Windows.

* Remove nre usage to fix Windows error

Windows was erroring with `could not load: pcre64.dll`. Instead of fixing that error, remove the pcre usage :)

* Add logutils module doc

* Shorten logutils.formatIt for `NBytes`

Both json and textlines formatIt were not needed, and could be combined into one formatIt

* remove debug integration test config

debug output and logformat of json for integration test logs

* Use ## module doc to support docgen
2023-12-19 22:12:47 +00:00

81 lines
2.2 KiB
Nim

import pkg/asynctest
import pkg/stew/byteutils
import pkg/codex/chunker
import pkg/codex/logutils
import pkg/chronos
import ./helpers
asyncchecksuite "Chunking":
test "should return proper size chunks":
var offset = 0
let contents = [1.byte, 2, 3, 4, 5, 6, 7, 8, 9, 0]
proc reader(data: ChunkBuffer, len: int): Future[int]
{.gcsafe, async, raises: [Defect].} =
let read = min(contents.len - offset, len)
if read == 0:
return 0
copyMem(data, unsafeAddr contents[offset], read)
offset += read
return read
let chunker = Chunker.new(
reader = reader,
chunkSize = 2'nb)
check:
(await chunker.getBytes()) == [1.byte, 2]
(await chunker.getBytes()) == [3.byte, 4]
(await chunker.getBytes()) == [5.byte, 6]
(await chunker.getBytes()) == [7.byte, 8]
(await chunker.getBytes()) == [9.byte, 0]
(await chunker.getBytes()) == []
chunker.offset == offset
test "should chunk LPStream":
let stream = BufferStream.new()
let chunker = LPStreamChunker.new(
stream = stream,
chunkSize = 2'nb)
proc writer() {.async.} =
for d in [@[1.byte, 2, 3, 4], @[5.byte, 6, 7, 8], @[9.byte, 0]]:
await stream.pushData(d)
await stream.pushEof()
await stream.close()
let writerFut = writer()
check:
(await chunker.getBytes()) == [1.byte, 2]
(await chunker.getBytes()) == [3.byte, 4]
(await chunker.getBytes()) == [5.byte, 6]
(await chunker.getBytes()) == [7.byte, 8]
(await chunker.getBytes()) == [9.byte, 0]
(await chunker.getBytes()) == []
chunker.offset == 10
await writerFut
test "should chunk file":
let
(path, _, _) = instantiationInfo(-2, fullPaths = true) # get this file's name
file = open(path)
fileChunker = FileChunker.new(file = file, chunkSize = 256'nb, pad = false)
var data: seq[byte]
while true:
let buff = await fileChunker.getBytes()
if buff.len <= 0:
break
check buff.len <= fileChunker.chunkSize.int
data.add(buff)
check:
string.fromBytes(data) == readFile(path)
fileChunker.offset == data.len