2022-03-17 13:56:46 +00:00
|
|
|
import pkg/chronos
|
|
|
|
import pkg/questionable/results
|
|
|
|
|
2024-06-26 02:38:04 +00:00
|
|
|
import pkg/codex/[
|
|
|
|
streams,
|
|
|
|
stores,
|
|
|
|
indexingstrategy,
|
|
|
|
manifest,
|
|
|
|
blocktype as bt]
|
2022-03-17 13:56:46 +00:00
|
|
|
|
2024-01-29 20:03:51 +00:00
|
|
|
import ../asynctest
|
2024-06-26 02:38:04 +00:00
|
|
|
import ./examples
|
2024-01-29 20:03:51 +00:00
|
|
|
import ./helpers
|
|
|
|
|
2023-06-22 18:01:21 +00:00
|
|
|
asyncchecksuite "StoreStream":
|
2022-03-17 13:56:46 +00:00
|
|
|
var
|
|
|
|
manifest: Manifest
|
|
|
|
store: BlockStore
|
|
|
|
stream: StoreStream
|
|
|
|
|
2022-07-28 00:39:17 +00:00
|
|
|
# Check that `buf` contains `size` bytes with values start, start+1...
|
2023-03-10 07:02:54 +00:00
|
|
|
proc sequentialBytes(buf: seq[byte], size: int, start: int): bool =
|
2022-07-28 00:39:17 +00:00
|
|
|
for i in 0..<size:
|
|
|
|
if int(buf[i]) != start+i:
|
|
|
|
return false
|
|
|
|
return true
|
|
|
|
|
2022-03-17 13:56:46 +00:00
|
|
|
let
|
2023-11-14 12:02:17 +00:00
|
|
|
data = [byte 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
|
|
|
|
10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
|
|
|
|
20, 21, 22, 23, 24, 25, 26, 27, 28, 29,
|
|
|
|
30, 31, 32, 33, 34, 35, 36, 37, 38, 39,
|
|
|
|
40, 41, 42, 43, 44, 45, 46, 47, 48, 49,
|
|
|
|
50, 51, 52, 53, 54, 55, 56, 57, 58, 59,
|
|
|
|
60, 61, 62, 63, 64, 65, 66, 67, 68, 69,
|
|
|
|
70, 71, 72, 73, 74, 75, 76, 77, 78, 79,
|
|
|
|
80, 81, 82, 83, 84, 85, 86, 87, 88, 89,
|
|
|
|
90, 91, 92, 93, 94, 95, 96, 97, 98, 99]
|
|
|
|
chunkSize = 10
|
2022-03-17 13:56:46 +00:00
|
|
|
|
2023-06-22 18:01:21 +00:00
|
|
|
teardown:
|
|
|
|
await stream.close()
|
|
|
|
|
2022-03-17 13:56:46 +00:00
|
|
|
setup:
|
|
|
|
store = CacheStore.new()
|
2023-11-14 12:02:17 +00:00
|
|
|
manifest = await storeDataGetManifest(store, MockChunker.new(dataset = data, chunkSize = chunkSize))
|
2022-03-30 02:43:35 +00:00
|
|
|
stream = StoreStream.new(store, manifest)
|
2022-03-17 13:56:46 +00:00
|
|
|
|
|
|
|
test "Read all blocks < blockSize":
|
|
|
|
var
|
|
|
|
buf = newSeq[byte](8)
|
2022-07-28 00:39:17 +00:00
|
|
|
n = 0
|
2022-03-17 13:56:46 +00:00
|
|
|
|
|
|
|
while not stream.atEof:
|
2022-07-28 00:39:17 +00:00
|
|
|
let read = (await stream.readOnce(addr buf[0], buf.len))
|
2022-03-17 13:56:46 +00:00
|
|
|
|
2022-07-28 00:39:17 +00:00
|
|
|
if not stream.atEof:
|
2022-03-17 13:56:46 +00:00
|
|
|
check read == 8
|
|
|
|
else:
|
|
|
|
check read == 4
|
|
|
|
|
2023-03-10 07:02:54 +00:00
|
|
|
check sequentialBytes(buf,read,n)
|
2022-07-28 00:39:17 +00:00
|
|
|
n += read
|
|
|
|
|
2022-03-17 13:56:46 +00:00
|
|
|
test "Read all blocks == blockSize":
|
|
|
|
var
|
|
|
|
buf = newSeq[byte](10)
|
2022-07-28 00:39:17 +00:00
|
|
|
n = 0
|
2022-03-17 13:56:46 +00:00
|
|
|
|
|
|
|
while not stream.atEof:
|
2022-07-28 00:39:17 +00:00
|
|
|
let read = (await stream.readOnce(addr buf[0], buf.len))
|
2022-03-17 13:56:46 +00:00
|
|
|
check read == 10
|
2023-03-10 07:02:54 +00:00
|
|
|
check sequentialBytes(buf,read,n)
|
2022-07-28 00:39:17 +00:00
|
|
|
n += read
|
2022-03-17 13:56:46 +00:00
|
|
|
|
|
|
|
test "Read all blocks > blockSize":
|
|
|
|
var
|
|
|
|
buf = newSeq[byte](11)
|
2022-07-28 00:39:17 +00:00
|
|
|
n = 0
|
2022-03-17 13:56:46 +00:00
|
|
|
|
|
|
|
while not stream.atEof:
|
2022-07-28 00:39:17 +00:00
|
|
|
let read = (await stream.readOnce(addr buf[0], buf.len))
|
2022-03-17 13:56:46 +00:00
|
|
|
|
2022-07-28 00:39:17 +00:00
|
|
|
if not stream.atEof:
|
2022-03-17 13:56:46 +00:00
|
|
|
check read == 11
|
|
|
|
else:
|
|
|
|
check read == 1
|
|
|
|
|
2023-03-10 07:02:54 +00:00
|
|
|
check sequentialBytes(buf,read,n)
|
2022-07-28 00:39:17 +00:00
|
|
|
n += read
|
|
|
|
|
2022-03-17 13:56:46 +00:00
|
|
|
test "Read exact bytes within block boundary":
|
|
|
|
var
|
|
|
|
buf = newSeq[byte](5)
|
|
|
|
|
|
|
|
await stream.readExactly(addr buf[0], 5)
|
2023-03-10 07:02:54 +00:00
|
|
|
check sequentialBytes(buf,5,0)
|
2022-03-17 13:56:46 +00:00
|
|
|
|
|
|
|
test "Read exact bytes outside of block boundary":
|
|
|
|
var
|
|
|
|
buf = newSeq[byte](15)
|
|
|
|
|
|
|
|
await stream.readExactly(addr buf[0], 15)
|
2023-03-10 07:02:54 +00:00
|
|
|
check sequentialBytes(buf,15,0)
|
2024-06-26 02:38:04 +00:00
|
|
|
|
|
|
|
suite "StoreStream - Size Tests":
|
|
|
|
|
|
|
|
var stream: StoreStream
|
|
|
|
|
|
|
|
teardown:
|
|
|
|
await stream.close()
|
|
|
|
|
|
|
|
test "Should return dataset size as stream size":
|
|
|
|
let manifest = Manifest.new(
|
|
|
|
treeCid = Cid.example,
|
|
|
|
datasetSize = 80.NBytes,
|
|
|
|
blockSize = 10.NBytes
|
|
|
|
)
|
|
|
|
|
|
|
|
stream = StoreStream.new(CacheStore.new(), manifest)
|
|
|
|
|
|
|
|
check stream.size == 80
|
|
|
|
|
|
|
|
test "Should not count parity/padding bytes as part of stream size":
|
|
|
|
let protectedManifest = Manifest.new(
|
|
|
|
treeCid = Cid.example,
|
|
|
|
datasetSize = 120.NBytes, # size including parity bytes
|
|
|
|
blockSize = 10.NBytes,
|
|
|
|
version = CIDv1,
|
|
|
|
hcodec = Sha256HashCodec,
|
|
|
|
codec = BlockCodec,
|
|
|
|
ecK = 2,
|
|
|
|
ecM = 1,
|
|
|
|
originalTreeCid = Cid.example,
|
|
|
|
originalDatasetSize = 80.NBytes, # size without parity bytes
|
|
|
|
strategy = StrategyType.SteppedStrategy
|
|
|
|
)
|
|
|
|
|
|
|
|
stream = StoreStream.new(CacheStore.new(), protectedManifest)
|
|
|
|
|
|
|
|
check stream.size == 80
|