From cb0a3ea1bac38810d065a6d5e6346ffa6ca71671 Mon Sep 17 00:00:00 2001 From: Csaba Kiraly Date: Mon, 27 Mar 2023 23:27:49 +0200 Subject: [PATCH 1/5] fixup: avoid warning on mean if empty Signed-off-by: Csaba Kiraly --- DAS/observer.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/DAS/observer.py b/DAS/observer.py index 1bdf4d0..18c0461 100644 --- a/DAS/observer.py +++ b/DAS/observer.py @@ -70,6 +70,8 @@ class Observer: def getTrafficStats(self, validators): def maxOrNan(l): return np.max(l) if l else np.NaN + def meanOrNan(l): + return np.mean(l) if l else np.NaN trafficStats = {} for cl in range(0,3): @@ -77,9 +79,9 @@ class Observer: Rx = [v.statsRxInSlot for v in validators if v.nodeClass == cl] RxDup = [v.statsRxDupInSlot for v in validators if v.nodeClass == cl] trafficStats[cl] = { - "Tx": {"mean": np.mean(Tx), "max": maxOrNan(Tx)}, - "Rx": {"mean": np.mean(Rx), "max": maxOrNan(Rx)}, - "RxDup": {"mean": np.mean(RxDup), "max": maxOrNan(RxDup)}, + "Tx": {"mean": meanOrNan(Tx), "max": maxOrNan(Tx)}, + "Rx": {"mean": meanOrNan(Rx), "max": maxOrNan(Rx)}, + "RxDup": {"mean": meanOrNan(RxDup), "max": maxOrNan(RxDup)}, } return trafficStats From 98db10f7a6a095036416df4f991bbdd816127e17 Mon Sep 17 00:00:00 2001 From: Csaba Kiraly Date: Thu, 30 Mar 2023 00:01:06 +0200 Subject: [PATCH 2/5] Add more documentation Signed-off-by: Csaba Kiraly --- DAS/observer.py | 12 ++++++++++++ DAS/results.py | 1 + 2 files changed, 13 insertions(+) diff --git a/DAS/observer.py b/DAS/observer.py index 18c0461..235ed60 100644 --- a/DAS/observer.py +++ b/DAS/observer.py @@ -58,6 +58,17 @@ class Observer: return (arrived, expected, ready, validated) def getProgress(self, validators): + """Calculate current simulation progress with different metrics. + + Returns: + - missingSamples: overall number of sample instances missing in nodes. + Sample are counted on both rows and columns, so intersections of interest are counted twice. + - sampleProgress: previous expressed as progress ratio + - nodeProgress: ratio of nodes having all segments interested in + - validatorProgress: same as above, but vpn weighted average. I.e. it counts per validator, + but counts a validator only if its support node's all validators see all interesting segments + TODO: add real per validator progress counter + """ arrived, expected, ready, validated = self.checkStatus(validators) missingSamples = expected - arrived sampleProgress = arrived / expected @@ -68,6 +79,7 @@ class Observer: return missingSamples, sampleProgress, nodeProgress, validatorProgress def getTrafficStats(self, validators): + """Summary statistics of traffic measurements in a timestep.""" def maxOrNan(l): return np.max(l) if l else np.NaN def meanOrNan(l): diff --git a/DAS/results.py b/DAS/results.py index 61f4f04..c5687a5 100644 --- a/DAS/results.py +++ b/DAS/results.py @@ -28,6 +28,7 @@ class Result: self.tta = -1 def addMetric(self, name, metric): + """Generic function to add a metric to the results.""" self.metrics[name] = metric def dump(self, execID): From 9800161ac91576a63ce99121c454c2361400d9ed Mon Sep 17 00:00:00 2001 From: Leonardo Bautista-Gomez Date: Mon, 27 Mar 2023 15:29:39 +0200 Subject: [PATCH 3/5] Switch from time steps to miliseconds. Rebase to traffic progress. --- DAS/results.py | 15 ++++++++------- DAS/simulator.py | 13 +++++++------ DAS/visualizer.py | 6 ++++-- config_example.py | 15 +++++++++------ study.py | 4 ++-- 5 files changed, 30 insertions(+), 23 deletions(-) diff --git a/DAS/results.py b/DAS/results.py index c5687a5..48512cd 100644 --- a/DAS/results.py +++ b/DAS/results.py @@ -7,22 +7,23 @@ from dicttoxml import dicttoxml class Result: """This class stores and process/store the results of a simulation.""" - def __init__(self, shape): + def __init__(self, shape, execID): """It initializes the instance with a specific shape.""" self.shape = shape + self.execID = execID self.blockAvailable = -1 self.tta = -1 self.missingVector = [] self.metrics = {} - def populate(self, shape, missingVector): + def populate(self, shape, config, missingVector): """It populates part of the result data inside a vector.""" self.shape = shape self.missingVector = missingVector missingSamples = missingVector[-1] if missingSamples == 0: self.blockAvailable = 1 - self.tta = len(missingVector) + self.tta = len(missingVector) * (1000/config.stepDuration) else: self.blockAvailable = 0 self.tta = -1 @@ -31,12 +32,12 @@ class Result: """Generic function to add a metric to the results.""" self.metrics[name] = metric - def dump(self, execID): + def dump(self): """It dumps the results of the simulation in an XML file.""" if not os.path.exists("results"): os.makedirs("results") - if not os.path.exists("results/"+execID): - os.makedirs("results/"+execID) + if not os.path.exists("results/"+self.execID): + os.makedirs("results/"+self.execID) resd1 = self.shape.__dict__ resd2 = self.__dict__.copy() resd2.pop("shape") @@ -44,6 +45,6 @@ class Result: resXml = dicttoxml(resd1) xmlstr = minidom.parseString(resXml) xmlPretty = xmlstr.toprettyxml() - filePath = "results/"+execID+"/"+str(self.shape)+".xml" + filePath = "results/"+self.execID+"/"+str(self.shape)+".xml" with open(filePath, "w") as f: f.write(xmlPretty) diff --git a/DAS/simulator.py b/DAS/simulator.py index 677a260..4acf3f5 100644 --- a/DAS/simulator.py +++ b/DAS/simulator.py @@ -13,12 +13,13 @@ from DAS.validator import * class Simulator: """This class implements the main DAS simulator.""" - def __init__(self, shape, config): + def __init__(self, shape, config, execID): """It initializes the simulation with a set of parameters (shape).""" self.shape = shape self.config = config self.format = {"entity": "Simulator"} - self.result = Result(self.shape) + self.execID = execID + self.result = Result(self.shape, self.execID) self.validators = [] self.logger = [] self.logLevel = config.logLevel @@ -192,14 +193,14 @@ class Simulator: # log TX and RX statistics trafficStats = self.glob.getTrafficStats(self.validators) - self.logger.debug("step %d: %s" % + self.logger.debug("step %d: %s" % (steps, trafficStats), extra=self.format) for i in range(0,self.shape.numberNodes): self.validators[i].updateStats() trafficStatsVector.append(trafficStats) missingSamples, sampleProgress, nodeProgress, validatorProgress = self.glob.getProgress(self.validators) - self.logger.debug("step %d, arrived %0.02f %%, ready %0.02f %%, validated %0.02f %%" + self.logger.debug("step %d, arrived %0.02f %%, ready %0.02f %%, validated %0.02f %%" % (steps, sampleProgress*100, nodeProgress*100, validatorProgress*100), extra=self.format) cnS = "samples received" @@ -231,7 +232,7 @@ class Simulator: missingVector.append(missingSamples) break elif missingSamples == 0: - #self.logger.info("The entire block is available at step %d, with failure rate %d !" % (steps, self.shape.failureRate), extra=self.format) + self.logger.debug("The entire block is available at step %d, with failure rate %d !" % (steps, self.shape.failureRate), extra=self.format) missingVector.append(missingSamples) break else: @@ -240,6 +241,6 @@ class Simulator: progress = pd.DataFrame(progressVector) if self.config.saveProgress: self.result.addMetric("progress", progress.to_dict(orient='list')) - self.result.populate(self.shape, missingVector) + self.result.populate(self.shape, self.config, missingVector) return self.result diff --git a/DAS/visualizer.py b/DAS/visualizer.py index 8afd006..db4b2d5 100644 --- a/DAS/visualizer.py +++ b/DAS/visualizer.py @@ -36,7 +36,7 @@ class Visualizer: bwUplinkProd = int(root.find('bwUplinkProd').text) bwUplink1 = int(root.find('bwUplink1').text) bwUplink2 = int(root.find('bwUplink2').text) - tta = int(root.find('tta').text) + tta = float(root.find('tta').text) # Loop over all possible combinations of of the parameters minus two for combination in combinations(self.parameters, len(self.parameters)-2): @@ -120,7 +120,7 @@ class Visualizer: hist, xedges, yedges = np.histogram2d(data[key][labels[0]], data[key][labels[1]], bins=(len(xlabels), len(ylabels)), weights=data[key]['ttas']) hist = hist.T fig, ax = plt.subplots(figsize=(10, 6)) - sns.heatmap(hist, xticklabels=xlabels, yticklabels=ylabels, cmap='Purples', cbar_kws={'label': 'Time to block availability'}, linecolor='black', linewidths=0.3, annot=True, fmt=".2f", ax=ax) + sns.heatmap(hist, xticklabels=xlabels, yticklabels=ylabels, cmap='Purples', cbar_kws={'label': 'Time to block availability (ms)'}, linecolor='black', linewidths=0.3, annot=True, fmt=".2f", ax=ax) plt.xlabel(self.formatLabel(labels[0])) plt.ylabel(self.formatLabel(labels[1])) filename = "" @@ -131,6 +131,8 @@ class Visualizer: filename += f"{key[paramValueCnt]}" formattedTitle = self.formatTitle(key[paramValueCnt]) title += formattedTitle + if (paramValueCnt+1) % 5 == 0: + title += "\n" paramValueCnt += 1 title_obj = plt.title(title) font_size = 16 * fig.get_size_inches()[0] / 10 diff --git a/config_example.py b/config_example.py index 5b1a396..3ff5ae8 100644 --- a/config_example.py +++ b/config_example.py @@ -27,14 +27,14 @@ logLevel = logging.INFO # number of parallel workers. -1: all cores; 1: sequential # for more details, see joblib.Parallel -numJobs = 3 +numJobs = -1 # distribute rows/columns evenly between validators (True) # or generate it using local randomness (False) evenLineDistribution = True # Number of simulation runs with the same parameters for statistical relevance -runs = range(10) +runs = range(2) # Number of validators numberNodes = range(256, 513, 128) @@ -49,14 +49,14 @@ blockSizes = range(32,65,16) netDegrees = range(6, 9, 2) # number of rows and columns a validator is interested in -chis = range(1, 5, 2) +chis = range(2, 5, 2) # ratio of class1 nodes (see below for parameters per class) -class1ratios = np.arange(0, 1, .2) +class1ratios = [0.8, 0.9] # Number of validators per beacon node validatorsPerNode1 = [1] -validatorsPerNode2 = [2, 4, 8, 16, 32] +validatorsPerNode2 = [500] # Set uplink bandwidth. In segments (~560 bytes) per timestep (50ms?) # 1 Mbps ~= 1e6 / 20 / 8 / 560 ~= 11 @@ -64,8 +64,11 @@ bwUplinksProd = [2200] bwUplinks1 = [110] bwUplinks2 = [2200] +# Step duration in miliseconds (Classic RTT is about 100ms) +stepDuration = 50 + # Set to True if you want your run to be deterministic, False if not -deterministic = False +deterministic = True # If your run is deterministic you can decide the random seed. This is ignore otherwise. randomSeed = "DAS" diff --git a/study.py b/study.py index fde8099..74dba63 100644 --- a/study.py +++ b/study.py @@ -28,7 +28,7 @@ def runOnce(config, shape, execID): shape.setSeed(config.randomSeed+"-"+str(shape)) random.seed(shape.randomSeed) - sim = Simulator(shape, config) + sim = Simulator(shape, config, execID) sim.initLogger() sim.initValidators() sim.initNetwork() @@ -36,7 +36,7 @@ def runOnce(config, shape, execID): sim.logger.info("Shape: %s ... Block Available: %d in %d steps" % (str(sim.shape.__dict__), result.blockAvailable, len(result.missingVector)), extra=sim.format) if config.dumpXML: - result.dump(execID) + result.dump() return result From 41e83991591eab9d9d8ea2e787c0064628c10cd0 Mon Sep 17 00:00:00 2001 From: Leonardo Bautista-Gomez Date: Mon, 27 Mar 2023 21:24:25 +0200 Subject: [PATCH 4/5] Add Tx and Rx stats to resultsi. Rebase to traffic progress. --- DAS/results.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/DAS/results.py b/DAS/results.py index 48512cd..469810c 100644 --- a/DAS/results.py +++ b/DAS/results.py @@ -16,10 +16,16 @@ class Result: self.missingVector = [] self.metrics = {} - def populate(self, shape, config, missingVector): + def populate(self, shape, config, missingVector, bandwidthVector): """It populates part of the result data inside a vector.""" self.shape = shape self.missingVector = missingVector + self.proTx = bandwidthVector[0] + self.proRx = bandwidthVector[1] + self.aveTx = bandwidthVector[2] + self.maxTx = bandwidthVector[3] + self.aveRx = bandwidthVector[4] + self.maxRx = bandwidthVector[5] missingSamples = missingVector[-1] if missingSamples == 0: self.blockAvailable = 1 From b5390b9f1b8d777f8df75a1174fe757e759255a8 Mon Sep 17 00:00:00 2001 From: Leonardo Bautista-Gomez Date: Wed, 29 Mar 2023 17:01:28 +0200 Subject: [PATCH 5/5] Remove traffic statsi. Rebase to traffic progress. --- DAS/results.py | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/DAS/results.py b/DAS/results.py index 469810c..48512cd 100644 --- a/DAS/results.py +++ b/DAS/results.py @@ -16,16 +16,10 @@ class Result: self.missingVector = [] self.metrics = {} - def populate(self, shape, config, missingVector, bandwidthVector): + def populate(self, shape, config, missingVector): """It populates part of the result data inside a vector.""" self.shape = shape self.missingVector = missingVector - self.proTx = bandwidthVector[0] - self.proRx = bandwidthVector[1] - self.aveTx = bandwidthVector[2] - self.maxTx = bandwidthVector[3] - self.aveRx = bandwidthVector[4] - self.maxRx = bandwidthVector[5] missingSamples = missingVector[-1] if missingSamples == 0: self.blockAvailable = 1