diff --git a/codex/blocktype.nim b/codex/blocktype.nim index 8e1d82e7..ce04644f 100644 --- a/codex/blocktype.nim +++ b/codex/blocktype.nim @@ -22,7 +22,9 @@ import pkg/questionable/results import ./errors const - BlockSize* = 31 * 64 * 4 # block size + # Size of blocks for storage / network exchange, + # should be divisible by 31 for PoR and by 64 for Leopard ECC + BlockSize* = 31 * 64 * 33 type Block* = ref object of RootObj diff --git a/codex/node.nim b/codex/node.nim index 324e4285..b2d02238 100644 --- a/codex/node.nim +++ b/codex/node.nim @@ -138,7 +138,7 @@ proc retrieve*( except CatchableError as exc: trace "Exception prefetching blocks", exc = exc.msg # - asyncSpawn prefetchBlocks() + # asyncSpawn prefetchBlocks() - temporarily commented out # # Retrieve all blocks of the dataset sequentially from the local store or network return LPStream(StoreStream.new(node.blockStore, manifest, pad = false)).success diff --git a/tests/codex/testnode.nim b/tests/codex/testnode.nim index fb14d8c6..6f172faa 100644 --- a/tests/codex/testnode.nim +++ b/tests/codex/testnode.nim @@ -128,7 +128,7 @@ suite "Test Node": let stream = BufferStream.new() storeFut = node.store(stream) - oddChunkSize = math.trunc(BlockSize/1.618).int # Let's check that node.store can correctly rechunk these odd chunks + oddChunkSize = math.trunc(BlockSize/3.14).int # Let's check that node.store can correctly rechunk these odd chunks oddChunker = FileChunker.new(file = file, chunkSize = oddChunkSize, pad = false) # TODO: doesn't work with pad=tue var original: seq[byte] diff --git a/tests/testIntegration.nim b/tests/testIntegration.nim index 6af170c5..7da27efb 100644 --- a/tests/testIntegration.nim +++ b/tests/testIntegration.nim @@ -80,7 +80,7 @@ ethersuite "Integration tests": test "nodes negotiate contracts on the marketplace": proc sell = - let json = %*{"size": "0x1F00", "duration": "0x200", "minPrice": "0x300"} + let json = %*{"size": "0xFFFFF", "duration": "0x200", "minPrice": "0x300"} discard client.post(baseurl2 & "/sales/availability", $json) proc available: JsonNode =