nim-codex/dagger/chunker.nim

133 lines
2.8 KiB
Nim
Raw Normal View History

Poc 2 (#7) * moving protobuf into bitswap * adding block type * reworking bitswap * adding chunker * adding license header * use 1.2.6 * adding fixed size chunker * add blockstore * add iterator to chunker * more bitswap changes * rename ipfs to dagger * rename to dagger * blockstore inherits from BlockProvider * wip - add core block handling logic * smal changes * use proper block store methods * adding asynq heapqueue * wip prepare for bitswap task runner * adding `$` * adding memory store and tests * fixed chunking * extracted desicion engine from bitswap * added helper random funcs * adding testing helpers * only handle seqs * add peer events * cleanup pending blocks on blockstore event * allow nil handlers * move protobuf type helpers * allow initializing block from Cid * testing and fixes * small fixes * expose `<` * spelling * default value * spelling * pending blocks manager * adding stores manager * more tests a wip around bitswap * small changes * merge bitswap and engine for now * for now run only the new poc's tests * add a more complete ci setup * use template in map * remove p2pd * remove go * dont use asyncCheck * few small changes * adding ability to update items * adding multiple task runners * handle cancelation properly * use Result instead of throwing * wip bitswap tests * moving things around * split out engine again * add request and handlers interface * fix tests * wip - engine tests * remove unused imports * fix tests * cleanup block requesting logic * add block request tests * more block requests * add support for max heap * don't use result * use max heap & send block presence in task handler * add task handler tests * rename store to localStore * cleanup & logging * cancel task on stop * don't depend on local store for events * dont use heap queue for wants * add chronicles * fix issue with peer wants * add test for delayed block sends * remove obsolete tests * wip chunker * run all tests * add todo * misc * remove irrelevant files * removing more files * adding helpers for bitswap tests * moved bitswap file * misc * make blocks timeout longer * adjust block timeout * speedup test * compile with threads * import missing crypto * misc * disable threads for now * fix 32 bit platforms * re-enable threads support in tests
2021-02-26 00:23:22 +00:00
## Nim-Dagger
## Copyright (c) 2021 Status Research & Development GmbH
## Licensed under either of
## * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE))
## * MIT license ([LICENSE-MIT](LICENSE-MIT))
## at your option.
## This file may not be copied, modified, or distributed except according to
## those terms.
# TODO: This is super inneficient and merits a rewrite, but it'll do for now
import std/sequtils
import ./p2p/rng
import ./blocktype
export blocktype
const
DefaultChunkSize*: int64 = 1024 * 256
type
# default reader type
Reader* = proc(data: var openArray[byte], offset: Natural = 0): int {.gcsafe, closure.}
ChunkerType* {.pure.} = enum
SizedChunker
RabinChunker
Chunker* = ref object of RootObj
reader*: Reader
size*: Natural
pos*: Natural
case kind*: ChunkerType:
of SizedChunker:
chunkSize*: Natural
pad*: bool # pad last block if less than size
of RabinChunker:
discard
proc getBytes*(c: Chunker): seq[byte] =
## returns a chunk of bytes from
## the instantiated chunker
##
if c.pos >= c.size:
return
var bytes = newSeq[byte](c.chunkSize)
let read = c.reader(bytes, c.pos)
c.pos += read
if not c.pad and bytes.len != read:
bytes.setLen(read)
return bytes
iterator items*(c: Chunker): seq[byte] =
while true:
let chunk = c.getBytes()
if chunk.len <= 0:
break
yield chunk
proc new*(
T: type Chunker,
kind = ChunkerType.SizedChunker,
reader: Reader,
size: Natural,
chunkSize = DefaultChunkSize,
pad = false): T =
var chunker = Chunker(
kind: kind,
reader: reader,
size: size)
if kind == ChunkerType.SizedChunker:
chunker.pad = pad
chunker.chunkSize = chunkSize
return chunker
proc newRandomChunker*(
rng: Rng,
size: int64,
kind = ChunkerType.SizedChunker,
chunkSize = DefaultChunkSize,
pad = false): Chunker =
## create a chunker that produces
## random data
##
proc reader(data: var openArray[byte], offset: Natural = 0): int =
var alpha = toSeq(byte('A')..byte('z'))
var read = 0
while read <= data.high:
rng.shuffle(alpha)
for a in alpha:
if read > data.high:
break
data[read] = a
read.inc
return read
Chunker.new(
kind = ChunkerType.SizedChunker,
reader = reader,
size = size,
pad = pad,
chunkSize = chunkSize)
proc newFileChunker*(
file: File,
kind = ChunkerType.SizedChunker,
chunkSize = DefaultChunkSize,
pad = false): Chunker =
## create the default File chunker
##
proc reader(data: var openArray[byte], offset: Natural = 0): int =
return file.readBytes(data, 0, data.len)
Chunker.new(
kind = ChunkerType.SizedChunker,
reader = reader,
size = file.getFileSize(),
pad = pad,
chunkSize = chunkSize)