adding scenario configs

Signed-off-by: Csaba Kiraly <csaba.kiraly@gmail.com>
This commit is contained in:
Csaba Kiraly 2023-04-12 20:24:38 +02:00
parent d97b323674
commit 20487a2c53
No known key found for this signature in database
GPG Key ID: 0FE274EE8C95166E
2 changed files with 193 additions and 0 deletions

96
config_failuremodel.py Normal file
View File

@ -0,0 +1,96 @@
"""Example configuration file
This file illustrates how to define options and simulation parameter ranges.
It also defines the traversal order of the simulation space. As the file
extension suggests, configuration is pure python code, allowing complex
setups. Use at your own risk.
To use this example, run
python3 study.py config_example
Otherwise copy it and modify as needed. The default traversal order defined
in the nested loop of nextShape() is good for most cases, but customizable
if needed.
"""
import logging
import itertools
import numpy as np
from DAS.shape import Shape
dumpXML = 1
# save progress vectors to XML
saveProgress = 1
# plot progress for each run to PNG
plotProgress = 1
visualization = 1
logLevel = logging.INFO
# number of parallel workers. -1: all cores; 1: sequential
# for more details, see joblib.Parallel
numJobs = 1
# distribute rows/columns evenly between validators (True)
# or generate it using local randomness (False)
evenLineDistribution = False
# Number of simulation runs with the same parameters for statistical relevance
runs = range(1)
# Number of validators
numberNodes = [1000]
failureModels = ["random", "sequential", "MEP", "MEP+1", "DEP", "DEP+1", "MREP", "MREP-1"]
# Percentage of block not released by producer
failureRates = [50]
# Block size in one dimension in segments. Block is blockSizes * blockSizes segments.
blockSizes = [256]
# Per-topic mesh neighborhood size
netDegrees = [4]
gsChannelFailureRate = 0
gsEdgeFailureRate = 0
# number of rows and columns a validator is interested in
chis = [1,2,3]
# ratio of class1 nodes (see below for parameters per class)
class1ratios = [0.8]
# Number of validators per beacon node
validatorsPerNode1 = [1]
validatorsPerNode2 = [100]
# Set uplink bandwidth. In segments (~560 bytes) per timestep (50ms?)
# 1 Mbps ~= 1e6 / 20 / 8 / 560 ~= 11
bwUplinksProd = [200]
bwUplinks1 = [10]
bwUplinks2 = [200]
# Step duration in milliseconds (Classic RTT is about 100ms)
stepDuration = 50
# Segment size in bytes (with proof)
segmentSize = 560
# Set to True if you want your run to be deterministic, False if not
deterministic = True
# If your run is deterministic you can decide the random seed. This is ignore otherwise.
randomSeed = "DAS"
# Number of steps without progress to stop simulation
steps4StopCondition = 10
def nextShape():
for run, fm, fr, class1ratio, chi, vpn1, vpn2, blockSize, nn, netDegree, bwUplinkProd, bwUplink1, bwUplink2 in itertools.product(
runs, failureModels, failureRates, class1ratios, chis, validatorsPerNode1, validatorsPerNode2, blockSizes, numberNodes, netDegrees, bwUplinksProd, bwUplinks1, bwUplinks2):
# Network Degree has to be an even number
if netDegree % 2 == 0:
shape = Shape(blockSize, nn, fm, fr, class1ratio, chi, vpn1, vpn2, netDegree, bwUplinkProd, bwUplink1, bwUplink2, run)
yield shape

97
config_scenario1.py Normal file
View File

@ -0,0 +1,97 @@
"""Example configuration file
This file illustrates how to define options and simulation parameter ranges.
It also defines the traversal order of the simulation space. As the file
extension suggests, configuration is pure python code, allowing complex
setups. Use at your own risk.
To use this example, run
python3 study.py config_example
Otherwise copy it and modify as needed. The default traversal order defined
in the nested loop of nextShape() is good for most cases, but customizable
if needed.
"""
import logging
import itertools
import numpy as np
from DAS.shape import Shape
dumpXML = 1
# save progress vectors to XML
saveProgress = 1
# plot progress for each run to PNG
plotProgress = 1
visualization = 1
logLevel = logging.INFO
# number of parallel workers. -1: all cores; 1: sequential
# for more details, see joblib.Parallel
numJobs = 1
# distribute rows/columns evenly between validators (True)
# or generate it using local randomness (False)
evenLineDistribution = False
# Number of simulation runs with the same parameters for statistical relevance
runs = range(1)
# Number of validators
numberNodes = [8000]
#failureModels = ["random", "sequential", "MEP", "MEP+1", "DEP", "DEP+1", "MREP", "MREP-1"]
failureModels = ["random"]
# Percentage of block not released by producer
failureRates = [0]
# Block size in one dimension in segments. Block is blockSizes * blockSizes segments.
blockSizes = [512]
# Per-topic mesh neighborhood size
netDegrees = [4]
gsChannelFailureRate = 0
gsEdgeFailureRate = 0
# number of rows and columns a validator is interested in
chis = [2]
# ratio of class1 nodes (see below for parameters per class)
class1ratios = [0.8]
# Number of validators per beacon node
validatorsPerNode1 = [1]
validatorsPerNode2 = [500]
# Set uplink bandwidth. In segments (~560 bytes) per timestep (50ms?)
# 1 Mbps ~= 1e6 / 20 / 8 / 560 ~= 11
bwUplinksProd = [1000]
bwUplinks1 = [50]
bwUplinks2 = [1000]
# Step duration in milliseconds (Classic RTT is about 100ms)
stepDuration = 50
# Segment size in bytes (with proof)
segmentSize = 560
# Set to True if you want your run to be deterministic, False if not
deterministic = True
# If your run is deterministic you can decide the random seed. This is ignore otherwise.
randomSeed = "DAS"
# Number of steps without progress to stop simulation
steps4StopCondition = 4
def nextShape():
for run, fm, fr, class1ratio, chi, vpn1, vpn2, blockSize, nn, netDegree, bwUplinkProd, bwUplink1, bwUplink2 in itertools.product(
runs, failureModels, failureRates, class1ratios, chis, validatorsPerNode1, validatorsPerNode2, blockSizes, numberNodes, netDegrees, bwUplinksProd, bwUplinks1, bwUplinks2):
# Network Degree has to be an even number
if netDegree % 2 == 0:
shape = Shape(blockSize, nn, fm, fr, class1ratio, chi, vpn1, vpn2, netDegree, bwUplinkProd, bwUplink1, bwUplink2, run)
yield shape