Changes required for reasonable upload/download speed (#265)
Increase blocksize from ~8KiB to ~64KiB and remove useless unbounded prefetching on download * increased upload/download speed on my Win10 Zen3 box to 50/150 MB/s (with FSStore) * made manifest files 8x smaller * two more changes in the tests to make them pass Closes #263
This commit is contained in:
parent
106b04340a
commit
6e6f40016c
|
@ -22,7 +22,9 @@ import pkg/questionable/results
|
||||||
import ./errors
|
import ./errors
|
||||||
|
|
||||||
const
|
const
|
||||||
BlockSize* = 31 * 64 * 4 # block size
|
# Size of blocks for storage / network exchange,
|
||||||
|
# should be divisible by 31 for PoR and by 64 for Leopard ECC
|
||||||
|
BlockSize* = 31 * 64 * 33
|
||||||
|
|
||||||
type
|
type
|
||||||
Block* = ref object of RootObj
|
Block* = ref object of RootObj
|
||||||
|
|
|
@ -138,7 +138,7 @@ proc retrieve*(
|
||||||
except CatchableError as exc:
|
except CatchableError as exc:
|
||||||
trace "Exception prefetching blocks", exc = exc.msg
|
trace "Exception prefetching blocks", exc = exc.msg
|
||||||
#
|
#
|
||||||
asyncSpawn prefetchBlocks()
|
# asyncSpawn prefetchBlocks() - temporarily commented out
|
||||||
#
|
#
|
||||||
# Retrieve all blocks of the dataset sequentially from the local store or network
|
# Retrieve all blocks of the dataset sequentially from the local store or network
|
||||||
return LPStream(StoreStream.new(node.blockStore, manifest, pad = false)).success
|
return LPStream(StoreStream.new(node.blockStore, manifest, pad = false)).success
|
||||||
|
|
|
@ -128,7 +128,7 @@ suite "Test Node":
|
||||||
let
|
let
|
||||||
stream = BufferStream.new()
|
stream = BufferStream.new()
|
||||||
storeFut = node.store(stream)
|
storeFut = node.store(stream)
|
||||||
oddChunkSize = math.trunc(BlockSize/1.618).int # Let's check that node.store can correctly rechunk these odd chunks
|
oddChunkSize = math.trunc(BlockSize/3.14).int # Let's check that node.store can correctly rechunk these odd chunks
|
||||||
oddChunker = FileChunker.new(file = file, chunkSize = oddChunkSize, pad = false) # TODO: doesn't work with pad=tue
|
oddChunker = FileChunker.new(file = file, chunkSize = oddChunkSize, pad = false) # TODO: doesn't work with pad=tue
|
||||||
var
|
var
|
||||||
original: seq[byte]
|
original: seq[byte]
|
||||||
|
|
|
@ -80,7 +80,7 @@ ethersuite "Integration tests":
|
||||||
|
|
||||||
test "nodes negotiate contracts on the marketplace":
|
test "nodes negotiate contracts on the marketplace":
|
||||||
proc sell =
|
proc sell =
|
||||||
let json = %*{"size": "0x1F00", "duration": "0x200", "minPrice": "0x300"}
|
let json = %*{"size": "0xFFFFF", "duration": "0x200", "minPrice": "0x300"}
|
||||||
discard client.post(baseurl2 & "/sales/availability", $json)
|
discard client.post(baseurl2 & "/sales/availability", $json)
|
||||||
|
|
||||||
proc available: JsonNode =
|
proc available: JsonNode =
|
||||||
|
|
Loading…
Reference in New Issue