2022-04-05 19:12:59 +00:00
|
|
|
import std/sequtils
|
2023-11-14 16:53:06 +00:00
|
|
|
import std/sugar
|
2022-04-05 19:12:59 +00:00
|
|
|
|
2024-01-10 21:25:57 +00:00
|
|
|
import pkg/asynctest/chronos/unittest
|
2022-04-05 19:12:59 +00:00
|
|
|
import pkg/chronos
|
2023-08-21 02:51:04 +00:00
|
|
|
import pkg/datastore
|
2022-04-05 19:12:59 +00:00
|
|
|
import pkg/questionable/results
|
|
|
|
|
2022-05-19 19:56:03 +00:00
|
|
|
import pkg/codex/erasure
|
|
|
|
import pkg/codex/manifest
|
|
|
|
import pkg/codex/stores
|
|
|
|
import pkg/codex/blocktype as bt
|
|
|
|
import pkg/codex/rng
|
2023-11-14 16:53:06 +00:00
|
|
|
import pkg/codex/utils
|
2022-04-05 19:12:59 +00:00
|
|
|
|
|
|
|
import ./helpers
|
|
|
|
|
2024-01-08 22:52:46 +00:00
|
|
|
suite "Erasure encode/decode":
|
2023-07-06 23:23:27 +00:00
|
|
|
const BlockSize = 1024'nb
|
2022-05-10 12:10:17 +00:00
|
|
|
const dataSetSize = BlockSize * 123 # weird geometry
|
|
|
|
|
|
|
|
var rng: Rng
|
|
|
|
var chunker: Chunker
|
|
|
|
var manifest: Manifest
|
|
|
|
var store: BlockStore
|
|
|
|
var erasure: Erasure
|
|
|
|
|
|
|
|
setup:
|
2023-11-14 12:02:17 +00:00
|
|
|
let
|
|
|
|
repoDs = SQLiteDatastore.new(Memory).tryGet()
|
|
|
|
metaDs = SQLiteDatastore.new(Memory).tryGet()
|
2022-05-10 12:10:17 +00:00
|
|
|
rng = Rng.instance()
|
|
|
|
chunker = RandomChunker.new(rng, size = dataSetSize, chunkSize = BlockSize)
|
2023-08-21 02:51:04 +00:00
|
|
|
store = RepoStore.new(repoDs, metaDs)
|
2022-05-10 12:10:17 +00:00
|
|
|
erasure = Erasure.new(store, leoEncoderProvider, leoDecoderProvider)
|
2023-11-14 12:02:17 +00:00
|
|
|
manifest = await storeDataGetManifest(store, chunker)
|
2022-04-05 19:12:59 +00:00
|
|
|
|
2024-01-10 21:25:57 +00:00
|
|
|
proc encode(
|
|
|
|
buffers, parity: int): Future[Manifest] {.async:
|
|
|
|
(handleException: true).} =
|
2022-04-05 19:12:59 +00:00
|
|
|
let
|
|
|
|
encoded = (await erasure.encode(
|
|
|
|
manifest,
|
2024-01-11 16:45:23 +00:00
|
|
|
buffers.Natural,
|
|
|
|
parity.Natural)).tryGet()
|
2022-04-05 19:12:59 +00:00
|
|
|
|
|
|
|
check:
|
2023-11-14 12:02:17 +00:00
|
|
|
encoded.blocksCount mod (buffers + parity) == 0
|
2023-11-14 16:53:06 +00:00
|
|
|
encoded.rounded == roundUp(manifest.blocksCount, buffers)
|
2022-04-05 19:12:59 +00:00
|
|
|
encoded.steps == encoded.rounded div buffers
|
|
|
|
|
2022-05-10 12:10:17 +00:00
|
|
|
return encoded
|
|
|
|
|
2022-11-14 13:50:00 +00:00
|
|
|
test "Should tolerate losing M data blocks in a single random column":
|
2022-05-10 12:10:17 +00:00
|
|
|
const
|
|
|
|
buffers = 20
|
|
|
|
parity = 10
|
|
|
|
|
|
|
|
let encoded = await encode(buffers, parity)
|
|
|
|
|
2022-04-05 19:12:59 +00:00
|
|
|
var
|
2023-11-14 16:53:06 +00:00
|
|
|
column = rng.rand((encoded.blocksCount div encoded.steps) - 1) # random column
|
2023-11-14 12:02:17 +00:00
|
|
|
dropped: seq[int]
|
2022-04-05 19:12:59 +00:00
|
|
|
|
2023-03-10 07:02:54 +00:00
|
|
|
for _ in 0..<encoded.ecM:
|
2023-11-14 12:02:17 +00:00
|
|
|
dropped.add(column)
|
|
|
|
(await store.delBlock(encoded.treeCid, column)).tryGet()
|
|
|
|
(await store.delBlock(manifest.treeCid, column)).tryGet()
|
2023-11-14 16:53:06 +00:00
|
|
|
column = (column + encoded.steps) mod encoded.blocksCount # wrap around
|
2022-04-05 19:12:59 +00:00
|
|
|
|
|
|
|
var
|
|
|
|
decoded = (await erasure.decode(encoded)).tryGet()
|
|
|
|
|
|
|
|
check:
|
2023-11-14 12:02:17 +00:00
|
|
|
decoded.treeCid == manifest.treeCid
|
|
|
|
decoded.treeCid == encoded.originalTreeCid
|
|
|
|
decoded.blocksCount == encoded.originalBlocksCount
|
2022-04-05 19:12:59 +00:00
|
|
|
|
|
|
|
for d in dropped:
|
2023-11-14 16:53:06 +00:00
|
|
|
if d < manifest.blocksCount: # we don't support returning parity blocks yet
|
|
|
|
let present = await store.hasBlock(manifest.treeCid, d)
|
|
|
|
check present.tryGet()
|
2022-04-05 19:12:59 +00:00
|
|
|
|
2022-11-14 13:50:00 +00:00
|
|
|
test "Should not tolerate losing more than M data blocks in a single random column":
|
2022-04-05 19:12:59 +00:00
|
|
|
const
|
|
|
|
buffers = 20
|
|
|
|
parity = 10
|
|
|
|
|
2022-05-10 12:10:17 +00:00
|
|
|
let encoded = await encode(buffers, parity)
|
2022-04-05 19:12:59 +00:00
|
|
|
|
|
|
|
var
|
2023-11-14 16:53:06 +00:00
|
|
|
column = rng.rand((encoded.blocksCount div encoded.steps) - 1) # random column
|
2023-11-14 12:02:17 +00:00
|
|
|
dropped: seq[int]
|
2022-04-05 19:12:59 +00:00
|
|
|
|
2023-03-10 07:02:54 +00:00
|
|
|
for _ in 0..<encoded.ecM + 1:
|
2023-11-14 12:02:17 +00:00
|
|
|
dropped.add(column)
|
|
|
|
(await store.delBlock(encoded.treeCid, column)).tryGet()
|
|
|
|
(await store.delBlock(manifest.treeCid, column)).tryGet()
|
2023-11-14 16:53:06 +00:00
|
|
|
column = (column + encoded.steps) mod encoded.blocksCount # wrap around
|
2022-04-05 19:12:59 +00:00
|
|
|
|
|
|
|
var
|
|
|
|
decoded: Manifest
|
|
|
|
|
|
|
|
expect ResultFailure:
|
|
|
|
decoded = (await erasure.decode(encoded)).tryGet()
|
|
|
|
|
|
|
|
for d in dropped:
|
2023-11-14 12:02:17 +00:00
|
|
|
let present = await store.hasBlock(manifest.treeCid, d)
|
2022-07-28 00:39:17 +00:00
|
|
|
check not present.tryGet()
|
2022-04-05 19:12:59 +00:00
|
|
|
|
2022-11-14 13:50:00 +00:00
|
|
|
test "Should tolerate losing M data blocks in M random columns":
|
2022-04-05 19:12:59 +00:00
|
|
|
const
|
|
|
|
buffers = 20
|
|
|
|
parity = 10
|
|
|
|
|
2022-05-10 12:10:17 +00:00
|
|
|
let encoded = await encode(buffers, parity)
|
2022-04-05 19:12:59 +00:00
|
|
|
|
|
|
|
var
|
|
|
|
blocks: seq[int]
|
|
|
|
offset = 0
|
|
|
|
|
|
|
|
while offset < encoded.steps - 1:
|
|
|
|
let
|
2023-11-14 12:02:17 +00:00
|
|
|
blockIdx = toSeq(countup(offset, encoded.blocksCount - 1, encoded.steps))
|
2022-04-05 19:12:59 +00:00
|
|
|
|
2023-03-10 07:02:54 +00:00
|
|
|
for _ in 0..<encoded.ecM:
|
2022-04-05 19:12:59 +00:00
|
|
|
blocks.add(rng.sample(blockIdx, blocks))
|
|
|
|
offset.inc
|
|
|
|
|
|
|
|
for idx in blocks:
|
2023-11-14 12:02:17 +00:00
|
|
|
(await store.delBlock(encoded.treeCid, idx)).tryGet()
|
|
|
|
(await store.delBlock(manifest.treeCid, idx)).tryGet()
|
|
|
|
discard
|
2022-04-05 19:12:59 +00:00
|
|
|
|
2022-05-10 12:10:17 +00:00
|
|
|
discard (await erasure.decode(encoded)).tryGet()
|
2022-04-05 19:12:59 +00:00
|
|
|
|
2023-11-14 12:02:17 +00:00
|
|
|
for d in 0..<manifest.blocksCount:
|
|
|
|
let present = await store.hasBlock(manifest.treeCid, d)
|
2022-07-28 00:39:17 +00:00
|
|
|
check present.tryGet()
|
2022-04-05 19:12:59 +00:00
|
|
|
|
2022-11-14 13:50:00 +00:00
|
|
|
test "Should not tolerate losing more than M data blocks in M random columns":
|
2022-04-05 19:12:59 +00:00
|
|
|
const
|
|
|
|
buffers = 20
|
|
|
|
parity = 10
|
|
|
|
|
2022-05-10 12:10:17 +00:00
|
|
|
let encoded = await encode(buffers, parity)
|
2022-04-05 19:12:59 +00:00
|
|
|
|
|
|
|
var
|
|
|
|
blocks: seq[int]
|
|
|
|
offset = 0
|
|
|
|
|
2023-11-14 16:53:06 +00:00
|
|
|
while offset < encoded.steps:
|
2022-04-05 19:12:59 +00:00
|
|
|
let
|
2023-11-14 12:02:17 +00:00
|
|
|
blockIdx = toSeq(countup(offset, encoded.blocksCount - 1, encoded.steps))
|
2022-04-05 19:12:59 +00:00
|
|
|
|
2023-03-10 07:02:54 +00:00
|
|
|
for _ in 0..<encoded.ecM + 1: # NOTE: the +1
|
2022-04-07 23:08:43 +00:00
|
|
|
var idx: int
|
|
|
|
while true:
|
|
|
|
idx = rng.sample(blockIdx, blocks)
|
2023-11-14 12:02:17 +00:00
|
|
|
let blk = (await store.getBlock(encoded.treeCid, idx)).tryGet()
|
|
|
|
if not blk.isEmpty:
|
2022-04-07 23:08:43 +00:00
|
|
|
break
|
|
|
|
|
|
|
|
blocks.add(idx)
|
2022-04-05 19:12:59 +00:00
|
|
|
offset.inc
|
|
|
|
|
|
|
|
for idx in blocks:
|
2023-11-14 12:02:17 +00:00
|
|
|
(await store.delBlock(encoded.treeCid, idx)).tryGet()
|
|
|
|
(await store.delBlock(manifest.treeCid, idx)).tryGet()
|
|
|
|
discard
|
2022-04-05 19:12:59 +00:00
|
|
|
|
|
|
|
var
|
|
|
|
decoded: Manifest
|
|
|
|
|
|
|
|
expect ResultFailure:
|
|
|
|
decoded = (await erasure.decode(encoded)).tryGet()
|
|
|
|
|
2022-11-14 13:50:00 +00:00
|
|
|
test "Should tolerate losing M (a.k.a row) contiguous data blocks":
|
2022-04-05 19:12:59 +00:00
|
|
|
const
|
|
|
|
buffers = 20
|
|
|
|
parity = 10
|
|
|
|
|
2022-05-10 12:10:17 +00:00
|
|
|
let encoded = await encode(buffers, parity)
|
2022-04-05 19:12:59 +00:00
|
|
|
|
2023-11-14 16:53:06 +00:00
|
|
|
# loose M original (systematic) symbols/blocks
|
|
|
|
for b in 0..<(encoded.steps * encoded.ecM):
|
2023-11-14 12:02:17 +00:00
|
|
|
(await store.delBlock(encoded.treeCid, b)).tryGet()
|
|
|
|
(await store.delBlock(manifest.treeCid, b)).tryGet()
|
2022-04-05 19:12:59 +00:00
|
|
|
|
2022-05-10 12:10:17 +00:00
|
|
|
discard (await erasure.decode(encoded)).tryGet()
|
2022-04-05 19:12:59 +00:00
|
|
|
|
2023-11-14 12:02:17 +00:00
|
|
|
for d in 0..<manifest.blocksCount:
|
|
|
|
let present = await store.hasBlock(manifest.treeCid, d)
|
2022-07-28 00:39:17 +00:00
|
|
|
check present.tryGet()
|
2022-04-05 19:12:59 +00:00
|
|
|
|
2022-11-14 13:50:00 +00:00
|
|
|
test "Should tolerate losing M (a.k.a row) contiguous parity blocks":
|
2022-04-05 19:12:59 +00:00
|
|
|
const
|
|
|
|
buffers = 20
|
|
|
|
parity = 10
|
|
|
|
|
2023-11-14 16:53:06 +00:00
|
|
|
let
|
|
|
|
encoded = await encode(buffers, parity)
|
|
|
|
blocks = collect:
|
|
|
|
for i in 0..encoded.blocksCount:
|
|
|
|
i
|
2022-04-05 19:12:59 +00:00
|
|
|
|
2023-11-14 16:53:06 +00:00
|
|
|
# loose M parity (all!) symbols/blocks from the dataset
|
|
|
|
for b in blocks[^(encoded.steps * encoded.ecM)..^1]:
|
2023-11-14 12:02:17 +00:00
|
|
|
(await store.delBlock(encoded.treeCid, b)).tryGet()
|
|
|
|
(await store.delBlock(manifest.treeCid, b)).tryGet()
|
2022-04-05 19:12:59 +00:00
|
|
|
|
2022-05-10 12:10:17 +00:00
|
|
|
discard (await erasure.decode(encoded)).tryGet()
|
2022-04-05 19:12:59 +00:00
|
|
|
|
2023-11-14 12:02:17 +00:00
|
|
|
for d in 0..<manifest.blocksCount:
|
|
|
|
let present = await store.hasBlock(manifest.treeCid, d)
|
2022-07-28 00:39:17 +00:00
|
|
|
check present.tryGet()
|
2022-05-10 11:50:22 +00:00
|
|
|
|
|
|
|
test "handles edge case of 0 parity blocks":
|
|
|
|
const
|
|
|
|
buffers = 20
|
|
|
|
parity = 0
|
|
|
|
|
2022-05-10 12:10:17 +00:00
|
|
|
let encoded = await encode(buffers, parity)
|
2022-05-10 11:50:22 +00:00
|
|
|
|
2022-05-10 12:10:17 +00:00
|
|
|
discard (await erasure.decode(encoded)).tryGet()
|