2022-05-19 19:56:03 +00:00
|
|
|
## Nim-Codex
|
2021-02-26 00:23:22 +00:00
|
|
|
## Copyright (c) 2021 Status Research & Development GmbH
|
|
|
|
## Licensed under either of
|
|
|
|
## * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE))
|
|
|
|
## * MIT license ([LICENSE-MIT](LICENSE-MIT))
|
|
|
|
## at your option.
|
|
|
|
## This file may not be copied, modified, or distributed except according to
|
|
|
|
## those terms.
|
|
|
|
|
2021-08-30 19:25:20 +00:00
|
|
|
# TODO: This is super inneficient and needs a rewrite, but it'll do for now
|
|
|
|
|
2022-03-18 22:17:51 +00:00
|
|
|
import pkg/upraises
|
|
|
|
|
|
|
|
push: {.upraises: [].}
|
2021-02-26 00:23:22 +00:00
|
|
|
|
2021-08-30 19:25:20 +00:00
|
|
|
import pkg/questionable
|
|
|
|
import pkg/questionable/results
|
2022-01-10 15:32:56 +00:00
|
|
|
import pkg/chronos
|
|
|
|
import pkg/libp2p except shuffle
|
2021-08-30 19:25:20 +00:00
|
|
|
|
2021-02-26 00:23:22 +00:00
|
|
|
import ./blocktype
|
feat: create logging proxy (#663)
* implement a logging proxy
The logging proxy:
- prevents the need to import chronicles (as well as export except toJson),
- prevents the need to override `writeValue` or use or import nim-json-seralization elsewhere in the codebase, allowing for sole use of utils/json for de/serialization,
- and handles json formatting correctly in chronicles json sinks
* Rename logging -> logutils to avoid ambiguity with common names
* clean up
* add setProperty for JsonRecord, remove nim-json-serialization conflict
* Allow specifying textlines and json format separately
Not specifying a LogFormat will apply the formatting to both textlines and json sinks.
Specifying a LogFormat will apply the formatting to only that sink.
* remove unneeded usages of std/json
We only need to import utils/json instead of std/json
* move serialization from rest/json to utils/json so it can be shared
* fix NoColors ambiguity
Was causing unit tests to fail on Windows.
* Remove nre usage to fix Windows error
Windows was erroring with `could not load: pcre64.dll`. Instead of fixing that error, remove the pcre usage :)
* Add logutils module doc
* Shorten logutils.formatIt for `NBytes`
Both json and textlines formatIt were not needed, and could be combined into one formatIt
* remove debug integration test config
debug output and logformat of json for integration test logs
* Use ## module doc to support docgen
* bump nim-poseidon2 to export fromBytes
Before the changes in this branch, fromBytes was likely being resolved by nim-stew, or other dependency. With the changes in this branch, that dependency was removed and fromBytes could no longer be resolved. By exporting fromBytes from nim-poseidon, the correct resolution is now happening.
* fixes to get compiling after rebasing master
* Add support for Result types being logged using formatIt
2024-01-23 07:35:03 +00:00
|
|
|
import ./logutils
|
2021-02-26 00:23:22 +00:00
|
|
|
|
|
|
|
export blocktype
|
|
|
|
|
|
|
|
const
|
2023-07-06 23:23:27 +00:00
|
|
|
DefaultChunkSize* = DefaultBlockSize
|
2021-02-26 00:23:22 +00:00
|
|
|
|
|
|
|
type
|
|
|
|
# default reader type
|
2022-01-10 15:32:56 +00:00
|
|
|
ChunkBuffer* = ptr UncheckedArray[byte]
|
2022-08-24 12:15:59 +00:00
|
|
|
Reader* = proc(data: ChunkBuffer, len: int): Future[int] {.gcsafe, raises: [Defect].}
|
|
|
|
|
|
|
|
# Reader that splits input data into fixed-size chunks
|
|
|
|
Chunker* = ref object
|
|
|
|
reader*: Reader # Procedure called to actually read the data
|
|
|
|
offset*: int # Bytes read so far (position in the stream)
|
2023-07-06 23:23:27 +00:00
|
|
|
chunkSize*: NBytes # Size of each chunk
|
2022-08-24 12:15:59 +00:00
|
|
|
pad*: bool # Pad last chunk to chunkSize?
|
2021-02-26 00:23:22 +00:00
|
|
|
|
2022-01-10 15:32:56 +00:00
|
|
|
FileChunker* = Chunker
|
|
|
|
LPStreamChunker* = Chunker
|
|
|
|
|
|
|
|
proc getBytes*(c: Chunker): Future[seq[byte]] {.async.} =
|
2021-02-26 00:23:22 +00:00
|
|
|
## returns a chunk of bytes from
|
|
|
|
## the instantiated chunker
|
|
|
|
##
|
|
|
|
|
2023-07-06 23:23:27 +00:00
|
|
|
var buff = newSeq[byte](c.chunkSize.int)
|
2022-01-10 15:32:56 +00:00
|
|
|
let read = await c.reader(cast[ChunkBuffer](addr buff[0]), buff.len)
|
2021-02-26 00:23:22 +00:00
|
|
|
|
2022-01-10 15:32:56 +00:00
|
|
|
if read <= 0:
|
|
|
|
return @[]
|
2021-02-26 00:23:22 +00:00
|
|
|
|
2022-08-24 12:15:59 +00:00
|
|
|
c.offset += read
|
|
|
|
|
2022-01-10 15:32:56 +00:00
|
|
|
if not c.pad and buff.len > read:
|
|
|
|
buff.setLen(read)
|
2021-02-26 00:23:22 +00:00
|
|
|
|
2022-11-15 15:46:21 +00:00
|
|
|
return move buff
|
2021-02-26 00:23:22 +00:00
|
|
|
|
2023-07-06 23:23:27 +00:00
|
|
|
proc new*(
|
2023-06-22 15:11:18 +00:00
|
|
|
T: type Chunker,
|
|
|
|
reader: Reader,
|
|
|
|
chunkSize = DefaultChunkSize,
|
|
|
|
pad = true
|
|
|
|
): Chunker =
|
|
|
|
## create a new Chunker instance
|
|
|
|
##
|
|
|
|
Chunker(
|
|
|
|
reader: reader,
|
2022-08-24 12:15:59 +00:00
|
|
|
offset: 0,
|
|
|
|
chunkSize: chunkSize,
|
|
|
|
pad: pad)
|
2021-02-26 00:23:22 +00:00
|
|
|
|
2022-01-10 15:32:56 +00:00
|
|
|
proc new*(
|
2023-06-22 15:11:18 +00:00
|
|
|
T: type LPStreamChunker,
|
|
|
|
stream: LPStream,
|
|
|
|
chunkSize = DefaultChunkSize,
|
|
|
|
pad = true
|
|
|
|
): LPStreamChunker =
|
2022-01-10 15:32:56 +00:00
|
|
|
## create the default File chunker
|
2021-02-26 00:23:22 +00:00
|
|
|
##
|
|
|
|
|
2022-01-10 15:32:56 +00:00
|
|
|
proc reader(data: ChunkBuffer, len: int): Future[int]
|
|
|
|
{.gcsafe, async, raises: [Defect].} =
|
|
|
|
var res = 0
|
|
|
|
try:
|
|
|
|
while res < len:
|
|
|
|
res += await stream.readOnce(addr data[res], len - res)
|
|
|
|
except LPStreamEOFError as exc:
|
|
|
|
trace "LPStreamChunker stream Eof", exc = exc.msg
|
2024-05-23 15:29:30 +00:00
|
|
|
except CancelledError as error:
|
|
|
|
raise error
|
2024-10-10 11:22:36 +00:00
|
|
|
except LPStreamError as error:
|
|
|
|
error "LPStream error", err = error.msg
|
|
|
|
raise error
|
2022-01-10 15:32:56 +00:00
|
|
|
except CatchableError as exc:
|
2024-10-10 11:22:36 +00:00
|
|
|
error "CatchableError exception", exc = exc.msg
|
2022-01-10 15:32:56 +00:00
|
|
|
raise newException(Defect, exc.msg)
|
2021-02-26 00:23:22 +00:00
|
|
|
|
2022-01-10 15:32:56 +00:00
|
|
|
return res
|
2021-02-26 00:23:22 +00:00
|
|
|
|
2023-06-22 15:11:18 +00:00
|
|
|
LPStreamChunker.new(
|
2021-02-26 00:23:22 +00:00
|
|
|
reader = reader,
|
2022-08-24 12:15:59 +00:00
|
|
|
chunkSize = chunkSize,
|
|
|
|
pad = pad)
|
2021-02-26 00:23:22 +00:00
|
|
|
|
2022-01-10 15:32:56 +00:00
|
|
|
proc new*(
|
2023-06-22 15:11:18 +00:00
|
|
|
T: type FileChunker,
|
|
|
|
file: File,
|
|
|
|
chunkSize = DefaultChunkSize,
|
|
|
|
pad = true
|
|
|
|
): FileChunker =
|
2021-02-26 00:23:22 +00:00
|
|
|
## create the default File chunker
|
|
|
|
##
|
|
|
|
|
2022-01-10 15:32:56 +00:00
|
|
|
proc reader(data: ChunkBuffer, len: int): Future[int]
|
|
|
|
{.gcsafe, async, raises: [Defect].} =
|
|
|
|
var total = 0
|
2021-08-30 19:25:20 +00:00
|
|
|
try:
|
2022-01-10 15:32:56 +00:00
|
|
|
while total < len:
|
|
|
|
let res = file.readBuffer(addr data[total], len - total)
|
|
|
|
if res <= 0:
|
|
|
|
break
|
|
|
|
|
|
|
|
total += res
|
2021-08-30 19:25:20 +00:00
|
|
|
except IOError as exc:
|
2022-01-10 15:32:56 +00:00
|
|
|
trace "Exception reading file", exc = exc.msg
|
2024-05-23 15:29:30 +00:00
|
|
|
except CancelledError as error:
|
|
|
|
raise error
|
2022-01-10 15:32:56 +00:00
|
|
|
except CatchableError as exc:
|
2024-10-10 11:22:36 +00:00
|
|
|
error "CatchableError exception", exc = exc.msg
|
2021-08-30 19:25:20 +00:00
|
|
|
raise newException(Defect, exc.msg)
|
2021-02-26 00:23:22 +00:00
|
|
|
|
2022-01-10 15:32:56 +00:00
|
|
|
return total
|
|
|
|
|
2023-06-22 15:11:18 +00:00
|
|
|
FileChunker.new(
|
2021-02-26 00:23:22 +00:00
|
|
|
reader = reader,
|
2022-08-24 12:15:59 +00:00
|
|
|
chunkSize = chunkSize,
|
|
|
|
pad = pad)
|