2021-02-25 18:23:22 -06:00
|
|
|
import pkg/stew/byteutils
|
2022-05-19 14:56:03 -05:00
|
|
|
import pkg/codex/chunker
|
feat: create logging proxy (#663)
* implement a logging proxy
The logging proxy:
- prevents the need to import chronicles (as well as export except toJson),
- prevents the need to override `writeValue` or use or import nim-json-seralization elsewhere in the codebase, allowing for sole use of utils/json for de/serialization,
- and handles json formatting correctly in chronicles json sinks
* Rename logging -> logutils to avoid ambiguity with common names
* clean up
* add setProperty for JsonRecord, remove nim-json-serialization conflict
* Allow specifying textlines and json format separately
Not specifying a LogFormat will apply the formatting to both textlines and json sinks.
Specifying a LogFormat will apply the formatting to only that sink.
* remove unneeded usages of std/json
We only need to import utils/json instead of std/json
* move serialization from rest/json to utils/json so it can be shared
* fix NoColors ambiguity
Was causing unit tests to fail on Windows.
* Remove nre usage to fix Windows error
Windows was erroring with `could not load: pcre64.dll`. Instead of fixing that error, remove the pcre usage :)
* Add logutils module doc
* Shorten logutils.formatIt for `NBytes`
Both json and textlines formatIt were not needed, and could be combined into one formatIt
* remove debug integration test config
debug output and logformat of json for integration test logs
* Use ## module doc to support docgen
* bump nim-poseidon2 to export fromBytes
Before the changes in this branch, fromBytes was likely being resolved by nim-stew, or other dependency. With the changes in this branch, that dependency was removed and fromBytes could no longer be resolved. By exporting fromBytes from nim-poseidon, the correct resolution is now happening.
* fixes to get compiling after rebasing master
* Add support for Result types being logged using formatIt
2024-01-23 18:35:03 +11:00
|
|
|
import pkg/codex/logutils
|
2022-01-10 09:32:56 -06:00
|
|
|
import pkg/chronos
|
2021-02-25 18:23:22 -06:00
|
|
|
|
2024-01-29 21:03:51 +01:00
|
|
|
import ../asynctest
|
2023-06-22 12:01:21 -06:00
|
|
|
import ./helpers
|
|
|
|
|
2025-01-10 15:12:37 +01:00
|
|
|
# Trying to use a CancelledError or LPStreamError value for toRaise
|
|
|
|
# will produce a compilation error;
|
|
|
|
# Error: only a 'ref object' can be raised
|
|
|
|
# This is because they are not ref object but plain object.
|
|
|
|
# CancelledError* = object of FutureError
|
|
|
|
# LPStreamError* = object of LPError
|
|
|
|
|
2025-01-21 21:54:46 +01:00
|
|
|
type CrashingStreamWrapper* = ref object of LPStream
|
|
|
|
toRaise*: proc(): void {.gcsafe, raises: [CancelledError, LPStreamError].}
|
2024-10-10 13:22:36 +02:00
|
|
|
|
|
|
|
method readOnce*(
|
2025-01-21 21:54:46 +01:00
|
|
|
self: CrashingStreamWrapper, pbytes: pointer, nbytes: int
|
2025-01-10 15:12:37 +01:00
|
|
|
): Future[int] {.gcsafe, async: (raises: [CancelledError, LPStreamError]).} =
|
|
|
|
self.toRaise()
|
2024-10-10 13:22:36 +02:00
|
|
|
|
2023-06-22 12:01:21 -06:00
|
|
|
asyncchecksuite "Chunking":
|
2021-02-25 18:23:22 -06:00
|
|
|
test "should return proper size chunks":
|
2022-01-10 09:32:56 -06:00
|
|
|
var offset = 0
|
|
|
|
let contents = [1.byte, 2, 3, 4, 5, 6, 7, 8, 9, 0]
|
2025-01-21 21:54:46 +01:00
|
|
|
proc reader(
|
|
|
|
data: ChunkBuffer, len: int
|
|
|
|
): Future[int] {.gcsafe, async, raises: [Defect].} =
|
2022-08-24 15:15:59 +03:00
|
|
|
let read = min(contents.len - offset, len)
|
|
|
|
if read == 0:
|
2022-01-10 09:32:56 -06:00
|
|
|
return 0
|
|
|
|
|
2022-08-24 15:15:59 +03:00
|
|
|
copyMem(data, unsafeAddr contents[offset], read)
|
|
|
|
offset += read
|
|
|
|
return read
|
2021-02-25 18:23:22 -06:00
|
|
|
|
2025-01-21 21:54:46 +01:00
|
|
|
let chunker = Chunker.new(reader = reader, chunkSize = 2'nb)
|
2021-02-25 18:23:22 -06:00
|
|
|
|
2022-01-10 09:32:56 -06:00
|
|
|
check:
|
|
|
|
(await chunker.getBytes()) == [1.byte, 2]
|
|
|
|
(await chunker.getBytes()) == [3.byte, 4]
|
|
|
|
(await chunker.getBytes()) == [5.byte, 6]
|
|
|
|
(await chunker.getBytes()) == [7.byte, 8]
|
|
|
|
(await chunker.getBytes()) == [9.byte, 0]
|
|
|
|
(await chunker.getBytes()) == []
|
2022-08-24 15:15:59 +03:00
|
|
|
chunker.offset == offset
|
2022-01-10 09:32:56 -06:00
|
|
|
|
|
|
|
test "should chunk LPStream":
|
|
|
|
let stream = BufferStream.new()
|
2025-01-21 21:54:46 +01:00
|
|
|
let chunker = LPStreamChunker.new(stream = stream, chunkSize = 2'nb)
|
2022-01-10 09:32:56 -06:00
|
|
|
|
|
|
|
proc writer() {.async.} =
|
|
|
|
for d in [@[1.byte, 2, 3, 4], @[5.byte, 6, 7, 8], @[9.byte, 0]]:
|
|
|
|
await stream.pushData(d)
|
|
|
|
await stream.pushEof()
|
|
|
|
await stream.close()
|
|
|
|
|
|
|
|
let writerFut = writer()
|
|
|
|
check:
|
|
|
|
(await chunker.getBytes()) == [1.byte, 2]
|
|
|
|
(await chunker.getBytes()) == [3.byte, 4]
|
|
|
|
(await chunker.getBytes()) == [5.byte, 6]
|
|
|
|
(await chunker.getBytes()) == [7.byte, 8]
|
|
|
|
(await chunker.getBytes()) == [9.byte, 0]
|
|
|
|
(await chunker.getBytes()) == []
|
2022-08-24 15:15:59 +03:00
|
|
|
chunker.offset == 10
|
2022-01-10 09:32:56 -06:00
|
|
|
|
|
|
|
await writerFut
|
2021-02-25 18:23:22 -06:00
|
|
|
|
|
|
|
test "should chunk file":
|
2022-01-10 09:32:56 -06:00
|
|
|
let
|
2024-01-11 10:45:23 -06:00
|
|
|
path = currentSourcePath()
|
2022-01-10 09:32:56 -06:00
|
|
|
file = open(path)
|
2023-07-06 16:23:27 -07:00
|
|
|
fileChunker = FileChunker.new(file = file, chunkSize = 256'nb, pad = false)
|
2021-02-25 18:23:22 -06:00
|
|
|
|
|
|
|
var data: seq[byte]
|
|
|
|
while true:
|
2022-01-10 09:32:56 -06:00
|
|
|
let buff = await fileChunker.getBytes()
|
2021-02-25 18:23:22 -06:00
|
|
|
if buff.len <= 0:
|
|
|
|
break
|
|
|
|
|
2023-07-06 16:23:27 -07:00
|
|
|
check buff.len <= fileChunker.chunkSize.int
|
2021-02-25 18:23:22 -06:00
|
|
|
data.add(buff)
|
|
|
|
|
2022-08-24 15:15:59 +03:00
|
|
|
check:
|
|
|
|
string.fromBytes(data) == readFile(path)
|
|
|
|
fileChunker.offset == data.len
|
|
|
|
|
2025-01-10 15:12:37 +01:00
|
|
|
proc raiseStreamException(exc: ref CancelledError | ref LPStreamError) {.async.} =
|
2024-10-10 13:22:36 +02:00
|
|
|
let stream = CrashingStreamWrapper.new()
|
2025-01-21 21:54:46 +01:00
|
|
|
let chunker = LPStreamChunker.new(stream = stream, chunkSize = 2'nb)
|
2024-10-10 13:22:36 +02:00
|
|
|
|
2025-01-10 15:12:37 +01:00
|
|
|
stream.toRaise = proc(): void {.raises: [CancelledError, LPStreamError].} =
|
|
|
|
raise exc
|
2024-10-10 13:22:36 +02:00
|
|
|
discard (await chunker.getBytes())
|
|
|
|
|
|
|
|
test "stream should forward LPStreamError":
|
|
|
|
expect LPStreamError:
|
|
|
|
await raiseStreamException(newException(LPStreamError, "test error"))
|
|
|
|
|
|
|
|
test "stream should catch LPStreamEOFError":
|
|
|
|
await raiseStreamException(newException(LPStreamEOFError, "test error"))
|
|
|
|
|
|
|
|
test "stream should forward CancelledError":
|
|
|
|
expect CancelledError:
|
|
|
|
await raiseStreamException(newException(CancelledError, "test error"))
|
|
|
|
|
|
|
|
test "stream should forward LPStreamError":
|
|
|
|
expect LPStreamError:
|
2025-01-21 21:54:46 +01:00
|
|
|
await raiseStreamException(newException(LPStreamError, "test error"))
|