write sampling CDF to csv file
Signed-off-by: Csaba Kiraly <csaba.kiraly@gmail.com>
This commit is contained in:
parent
6cd3f9af53
commit
f799a11341
16
das.nim
16
das.nim
|
@ -1,5 +1,5 @@
|
||||||
import
|
import
|
||||||
std/[random, math],
|
std/[random, math, strformat],
|
||||||
chronicles,
|
chronicles,
|
||||||
chronos,
|
chronos,
|
||||||
libp2pdht/discv5/crypto as dhtcrypto,
|
libp2pdht/discv5/crypto as dhtcrypto,
|
||||||
|
@ -94,7 +94,7 @@ proc sample(s: Slice[int], len: int): seq[int] =
|
||||||
|
|
||||||
when isMainModule:
|
when isMainModule:
|
||||||
proc main() {.async.} =
|
proc main() {.async.} =
|
||||||
let
|
var
|
||||||
nodecount = 100
|
nodecount = 100
|
||||||
delay_pernode = 10 # in millisec
|
delay_pernode = 10 # in millisec
|
||||||
blocksize = 256
|
blocksize = 256
|
||||||
|
@ -106,6 +106,7 @@ when isMainModule:
|
||||||
delay_init = 60.minutes
|
delay_init = 60.minutes
|
||||||
upload_timeout = 4.seconds
|
upload_timeout = 4.seconds
|
||||||
sampling_delay = 4.seconds
|
sampling_delay = 4.seconds
|
||||||
|
filename: string
|
||||||
assert(log2(blocksize.float).ceil.int <= segmentsize * 8 )
|
assert(log2(blocksize.float).ceil.int <= segmentsize * 8 )
|
||||||
assert(samplesize <= blocksize)
|
assert(samplesize <= blocksize)
|
||||||
|
|
||||||
|
@ -186,6 +187,7 @@ when isMainModule:
|
||||||
info "sample", by = n.localNode, pass, cnt = passcount, time
|
info "sample", by = n.localNode, pass, cnt = passcount, time
|
||||||
return (pass, passcount, time)
|
return (pass, passcount, time)
|
||||||
|
|
||||||
|
proc sampleDAMany() {.async.} =
|
||||||
# all nodes start sampling in parallel
|
# all nodes start sampling in parallel
|
||||||
var samplings = newSeq[Future[(bool, int, Duration)]]()
|
var samplings = newSeq[Future[(bool, int, Duration)]]()
|
||||||
for n in 1 ..< nodecount:
|
for n in 1 ..< nodecount:
|
||||||
|
@ -193,6 +195,9 @@ when isMainModule:
|
||||||
await allFutures(samplings)
|
await allFutures(samplings)
|
||||||
|
|
||||||
# print statistics
|
# print statistics
|
||||||
|
let csvFile = open(fmt"{filename}.csv", fmWrite)
|
||||||
|
defer: csvFile.close()
|
||||||
|
|
||||||
var
|
var
|
||||||
passed = 0
|
passed = 0
|
||||||
for f in samplings:
|
for f in samplings:
|
||||||
|
@ -200,10 +205,17 @@ when isMainModule:
|
||||||
let (pass, passcount, time) = await f
|
let (pass, passcount, time) = await f
|
||||||
passed += pass.int
|
passed += pass.int
|
||||||
debug "sampleStats", pass, cnt = passcount, time
|
debug "sampleStats", pass, cnt = passcount, time
|
||||||
|
if pass:
|
||||||
|
csvFile.writeLine(time.milliseconds)
|
||||||
|
else:
|
||||||
|
csvFile.writeLine("100000") # using large value here as Gnuplot has issues with NaN
|
||||||
else:
|
else:
|
||||||
error "This should not happen!"
|
error "This should not happen!"
|
||||||
info "sampleStats", passed, total = samplings.len, ratio = passed/samplings.len
|
info "sampleStats", passed, total = samplings.len, ratio = passed/samplings.len
|
||||||
|
|
||||||
|
filename = fmt"n{nodecount},dpn{delay_pernode},dinit{delay_init},bs{blocksize},ss{samplesize},sthr{samplethreshold}"
|
||||||
|
await sampleDAMany()
|
||||||
|
|
||||||
waitfor main()
|
waitfor main()
|
||||||
|
|
||||||
# proc teardownAll() =
|
# proc teardownAll() =
|
||||||
|
|
Loading…
Reference in New Issue