das-research/DAS/simulator.py

114 lines
4.1 KiB
Python
Raw Normal View History

2022-11-30 15:28:27 +01:00
#!/bin/python
import networkx as nx
2022-11-30 15:28:27 +01:00
import logging
from datetime import datetime
from DAS.tools import *
from DAS.observer import *
from DAS.validator import *
class Simulator:
proposerID = 0
logLevel = logging.INFO
validators = []
glob = []
2023-01-13 16:51:27 +01:00
config = []
2022-11-30 15:28:27 +01:00
logger = []
format = {}
2023-01-13 16:51:27 +01:00
def __init__(self, config):
self.config = config
2022-11-30 15:28:27 +01:00
self.format = {"entity": "Simulator"}
def initValidators(self):
2023-01-13 16:51:27 +01:00
if not self.config.deterministic:
2022-11-30 15:28:27 +01:00
random.seed(datetime.now())
2023-01-13 16:51:27 +01:00
self.glob = Observer(self.logger, self.config)
2022-11-30 15:28:27 +01:00
self.glob.reset()
self.validators = []
2023-01-13 16:51:27 +01:00
for i in range(self.config.numberValidators):
val = Validator(i, int(not i!=0), self.logger, self.config)
2022-11-30 15:28:27 +01:00
if i == self.proposerID:
val.initBlock()
self.glob.setGoldenData(val.block)
else:
val.logIDs()
self.validators.append(val)
def initNetwork(self, d=6):
2023-01-13 16:51:27 +01:00
rowChannels = [[] for i in range(self.config.blockSize)]
columnChannels = [[] for i in range(self.config.blockSize)]
for v in self.validators:
for id in v.rowIDs:
rowChannels[id].append(v)
for id in v.columnIDs:
columnChannels[id].append(v)
2023-01-13 16:51:27 +01:00
for id in range(self.config.blockSize):
G = nx.random_regular_graph(d, len(rowChannels[id]))
if not nx.is_connected(G):
self.logger.error("graph not connected for row %d !" % id, extra=self.format)
for u, v in G.edges:
val1=rowChannels[id][u]
val2=rowChannels[id][v]
val1.rowNeighbors[id].append(val2)
val2.rowNeighbors[id].append(val1)
G = nx.random_regular_graph(d, len(columnChannels[id]))
if not nx.is_connected(G):
self.logger.error("graph not connected for column %d !" % id, extra=self.format)
for u, v in G.edges:
val1=columnChannels[id][u]
val2=columnChannels[id][v]
val1.columnNeighbors[id].append(val2)
val2.columnNeighbors[id].append(val1)
2022-11-30 15:28:27 +01:00
def initLogger(self):
logger = logging.getLogger("DAS")
logger.setLevel(self.logLevel)
ch = logging.StreamHandler()
ch.setLevel(self.logLevel)
ch.setFormatter(CustomFormatter())
logger.addHandler(ch)
self.logger = logger
def resetFailureRate(self, failureRate):
self.failureRate = failureRate
def run(self):
self.glob.checkRowsColumns(self.validators)
self.validators[self.proposerID].broadcastBlock()
arrived, expected = self.glob.checkStatus(self.validators)
missingSamples = expected - arrived
2023-01-13 16:51:27 +01:00
steps = 0
2022-11-30 15:28:27 +01:00
while(missingSamples > 0):
oldMissingSamples = missingSamples
2023-01-13 16:51:27 +01:00
for i in range(1,self.config.numberValidators):
self.validators[i].receiveRowsColumns()
2023-01-13 16:51:27 +01:00
for i in range(1,self.config.numberValidators):
self.validators[i].restoreRows()
self.validators[i].restoreColumns()
self.validators[i].sendRows()
self.validators[i].sendColumns()
2022-11-30 15:28:27 +01:00
self.validators[i].logRows()
self.validators[i].logColumns()
arrived, expected = self.glob.checkStatus(self.validators)
missingSamples = expected - arrived
2023-01-11 17:20:19 +01:00
missingRate = missingSamples*100/expected
2023-01-13 16:51:27 +01:00
self.logger.info("step %d, missing %d of %d (%0.02f %%)" % (steps, missingSamples, expected, missingRate), extra=self.format)
2022-11-30 15:28:27 +01:00
if missingSamples == oldMissingSamples:
break
elif missingSamples == 0:
break
else:
2023-01-13 16:51:27 +01:00
steps += 1
2022-11-30 15:28:27 +01:00
if missingSamples == 0:
2023-01-13 16:51:27 +01:00
self.logger.debug("The entire block is available at step %d, with failure rate %d !" % (steps, self.failureRate), extra=self.format)
2022-11-30 15:28:27 +01:00
return 0
else:
self.logger.debug("The block cannot be recovered, failure rate %d!" % self.failureRate, extra=self.format)
return 1