diff --git a/DAS/block.py b/DAS/block.py index 0ec24a4..76379d8 100644 --- a/DAS/block.py +++ b/DAS/block.py @@ -16,12 +16,31 @@ class Block: def fill(self): self.data.setall(1) + def merge(self, merged): + self.data |= merged.data + def getColumn(self, columnID): return self.data[columnID::self.blockSize] + def mergeColumn(self, columnID, column): + self.data[columnID::self.blockSize] |= column + + def repairColumn(self, id): + success = self.data[id::self.blockSize].count(1) + if success >= self.blockSize/2: + self.data[id::self.blockSize] = 1 + def getRow(self, rowID): return self.data[rowID*self.blockSize:(rowID+1)*self.blockSize] + def mergeRow(self, rowID, row): + self.data[rowID*self.blockSize:(rowID+1)*self.blockSize] |= row + + def repairRow(self, id): + success = self.data[id*self.blockSize:(id+1)*self.blockSize].count(1) + if success >= self.blockSize/2: + self.data[id*self.blockSize:(id+1)*self.blockSize] = 1 + def print(self): dash = "-" * (self.blockSize+2) print(dash) diff --git a/DAS/observer.py b/DAS/observer.py index cbbbf8d..f905af3 100644 --- a/DAS/observer.py +++ b/DAS/observer.py @@ -50,3 +50,12 @@ class Observer: self.logger.debug("There are %d missing samples in the network" % zeros, extra=self.format) return zeros + def checkStatus(self, validators): + arrived = 0 + expected = 0 + for val in validators: + if val.proposer == 0: + (a, e) = val.checkStatus() + arrived += a + expected += e + return (arrived, expected) diff --git a/DAS/requeriments.txt b/DAS/requeriments.txt deleted file mode 100644 index df2eb0d..0000000 --- a/DAS/requeriments.txt +++ /dev/null @@ -1 +0,0 @@ -bitarray==2.6.0 diff --git a/DAS/requirements.txt b/DAS/requirements.txt new file mode 100644 index 0000000..d0bb457 --- /dev/null +++ b/DAS/requirements.txt @@ -0,0 +1,3 @@ +bitarray==2.6.0 +DAS==0.28.7 +networkx==3.0 diff --git a/DAS/simulator.py b/DAS/simulator.py index a7f2b64..fcfeb2e 100644 --- a/DAS/simulator.py +++ b/DAS/simulator.py @@ -1,5 +1,6 @@ #!/bin/python +import networkx as nx import logging from datetime import datetime from DAS.tools import * @@ -41,6 +42,33 @@ class Simulator: val.logIDs() self.validators.append(val) + def initNetwork(self, d=6): + rowChannels = [[] for i in range(self.blockSize)] + columnChannels = [[] for i in range(self.blockSize)] + for v in self.validators: + for id in v.rowIDs: + rowChannels[id].append(v) + for id in v.columnIDs: + columnChannels[id].append(v) + + for id in range(self.blockSize): + G = nx.random_regular_graph(d, len(rowChannels[id])) + if not nx.is_connected(G): + self.logger.error("graph not connected for row %d !" % id, extra=self.format) + for u, v in G.edges: + val1=rowChannels[id][u] + val2=rowChannels[id][v] + val1.rowNeighbors[id].append(val2) + val2.rowNeighbors[id].append(val1) + G = nx.random_regular_graph(d, len(columnChannels[id])) + if not nx.is_connected(G): + self.logger.error("graph not connected for column %d !" % id, extra=self.format) + for u, v in G.edges: + val1=columnChannels[id][u] + val2=columnChannels[id][v] + val1.columnNeighbors[id].append(val2) + val2.columnNeighbors[id].append(val1) + def initLogger(self): logger = logging.getLogger("DAS") logger.setLevel(self.logLevel) @@ -55,26 +83,26 @@ class Simulator: def run(self): self.glob.checkRowsColumns(self.validators) - self.validators[self.proposerID].broadcastBlock(self.glob.broadcasted) - missingSamples = self.glob.checkBroadcasted() + self.validators[self.proposerID].broadcastBlock() + arrived, expected = self.glob.checkStatus(self.validators) + missingSamples = expected - arrived self.steps = 0 while(missingSamples > 0): oldMissingSamples = missingSamples - self.logger.debug("Step %d:" % self.steps, extra=self.format) for i in range(1,self.numberValidators): - self.validators[i].receiveRowsColumns(self.glob.broadcasted) - #Rows + self.validators[i].receiveRowsColumns() + for i in range(1,self.numberValidators): self.validators[i].restoreRows() - self.validators[i].sendRows(self.glob.broadcasted) - self.validators[i].logRows() - self.validators[i].logColumns() - # Columns self.validators[i].restoreColumns() - self.validators[i].sendColumns(self.glob.broadcasted) + self.validators[i].sendRows() + self.validators[i].sendColumns() self.validators[i].logRows() self.validators[i].logColumns() - missingSamples = self.glob.checkBroadcasted() + arrived, expected = self.glob.checkStatus(self.validators) + missingSamples = expected - arrived + missingRate = missingSamples*100/expected + self.logger.info("step %d, missing %d of %d (%0.02f %%)" % (self.steps, missingSamples, expected, missingRate), extra=self.format) if missingSamples == oldMissingSamples: break elif missingSamples == 0: diff --git a/DAS/validator.py b/DAS/validator.py index d6c9684..eaeb0a0 100644 --- a/DAS/validator.py +++ b/DAS/validator.py @@ -1,6 +1,8 @@ #!/bin/python3 import random +import collections +import logging from DAS.block import * from bitarray import bitarray from bitarray.util import zeros @@ -11,11 +13,6 @@ class Validator: chi = 0 format = {} blocksize = 0 - block = [] - rowIDs = [] - columnIDs = [] - rows = [] - columns = [] proposer = 0 failureRate = 0 logger = [] @@ -25,6 +22,8 @@ class Validator: self.ID = ID self.format = {"entity": "Val "+str(self.ID)} self.blockSize = blockSize + self.block = Block(blockSize) + self.receivedBlock = Block(blockSize) self.proposer = proposer self.failureRate = failureRate self.logger = logger @@ -34,12 +33,18 @@ class Validator: self.logger.error("Chi has to be smaller than %d" % blockSize, extra=self.format) else: self.chi = chi - self.rowIDs = [] - self.columnIDs = [] - if deterministic: - random.seed(self.ID) - self.rowIDs = random.sample(range(self.blockSize), self.chi) - self.columnIDs = random.sample(range(self.blockSize), self.chi) + if proposer: + self.rowIDs = range(blockSize) + self.columnIDs = range(blockSize) + else: + self.rowIDs = [] + self.columnIDs = [] + if deterministic: + random.seed(self.ID) + self.rowIDs = random.sample(range(self.blockSize), self.chi) + self.columnIDs = random.sample(range(self.blockSize), self.chi) + self.rowNeighbors = collections.defaultdict(list) + self.columnNeighbors = collections.defaultdict(list) def logIDs(self): if self.proposer == 1: @@ -54,87 +59,115 @@ class Validator: self.block.fill() #self.block.print() - def broadcastBlock(self, broadcasted): + def broadcastBlock(self): if self.proposer == 0: self.logger.error("I am NOT a block proposer", extra=self.format) else: self.logger.debug("Broadcasting my block...", extra=self.format) - tempBlock = self.block order = [i for i in range(self.blockSize * self.blockSize)] random.shuffle(order) while(order): i = order.pop() - if (random.randint(0,99) > self.failureRate): - broadcasted.data[i] = self.block.data[i] + if (random.randint(0,99) >= self.failureRate): + self.block.data[i] = 1 + else: + self.block.data[i] = 0 + nbFailures = self.block.data.count(0) + measuredFailureRate = nbFailures * 100 / (self.blockSize * self.blockSize) + self.logger.info("Number of failures: %d (%0.02f %%)", nbFailures, measuredFailureRate, extra=self.format) #broadcasted.print() + for id in range(self.blockSize): + self.sendColumn(id) + for id in range(self.blockSize): + self.sendRow(id) - def getColumn(self, columnID, broadcasted): - column = broadcasted.getColumn(columnID) - self.columns.append(column) + def getColumn(self, index): + return self.block.getColumn(index) - def getRow(self, rowID, broadcasted): - row = broadcasted.getRow(rowID) - self.rows.append(row) + def getRow(self, index): + return self.block.getRow(index) - def receiveRowsColumns(self, broadcasted): - self.rows = [] - self.columns = [] + def receiveColumn(self, id, column): + if id in self.columnIDs: + self.receivedBlock.mergeColumn(id, column) + else: + pass + + def receiveRow(self, id, row): + if id in self.rowIDs: + self.receivedBlock.mergeRow(id, row) + else: + pass + + + def receiveRowsColumns(self): if self.proposer == 1: self.logger.error("I am a block proposer", extra=self.format) else: self.logger.debug("Receiving the data...", extra=self.format) - for r in self.rowIDs: - self.getRow(r, broadcasted) - for c in self.columnIDs: - self.getColumn(c, broadcasted) + #self.logger.debug("%s -> %s", self.block.data, self.receivedBlock.data, extra=self.format) - def sendColumn(self, c, columnID, broadcasted): - broadcasted.data[columnID::self.blockSize] |= self.columns[c] + self.block.merge(self.receivedBlock) - def sendRow(self, r, rowID, broadcasted): - broadcasted.data[rowID*self.blockSize:(rowID+1)*self.blockSize] |= self.rows[r] + def sendColumn(self, columnID): + line = self.getColumn(columnID) + if line.any(): + self.logger.debug("col %d -> %s", columnID, self.columnNeighbors[columnID] , extra=self.format) + for n in self.columnNeighbors[columnID]: + n.receiveColumn(columnID, line) - def sendRows(self, broadcasted): + def sendRow(self, rowID): + line = self.getRow(rowID) + if line.any(): + self.logger.debug("row %d -> %s", rowID, self.rowNeighbors[rowID], extra=self.format) + for n in self.rowNeighbors[rowID]: + n.receiveRow(rowID, line) + + def sendRows(self): if self.proposer == 1: self.logger.error("I am a block proposer", extra=self.format) else: self.logger.debug("Sending restored rows...", extra=self.format) - for r in range(len(self.rowIDs)): - self.sendRow(r, self.rowIDs[r], broadcasted) + for r in self.rowIDs: + self.sendRow(r) - def sendColumns(self, broadcasted): + def sendColumns(self): if self.proposer == 1: self.logger.error("I am a block proposer", extra=self.format) else: self.logger.debug("Sending restored columns...", extra=self.format) - for c in range(len(self.columnIDs)): - self.sendColumn(c, self.columnIDs[c], broadcasted) + for c in self.columnIDs: + self.sendColumn(c) def logRows(self): - self.logger.debug("Rows: "+str(self.rows), extra=self.format) + if self.logger.isEnabledFor(logging.DEBUG): + for id in self.rowIDs: + self.logger.debug("Row %d: %s", id, self.getRow(id), extra=self.format) def logColumns(self): - self.logger.debug("Columns: "+str(self.columns), extra=self.format) + if self.logger.isEnabledFor(logging.DEBUG): + for id in self.columnIDs: + self.logger.debug("Column %d: %s", id, self.getColumn(id), extra=self.format) def restoreRows(self): - for rid in range(len(self.rows)): - row = self.rows[rid] - success = row.count(1) - - if success >= len(row)/2: - self.rows[rid].setall(1) - self.logger.debug("%d samples restored in row %d" % (len(row)-success, self.rowIDs[rid]), extra=self.format ) - else: - self.logger.debug("Row %d cannot be restored" % (self.rowIDs[rid]), extra=self.format) + for id in self.rowIDs: + self.block.repairRow(id) def restoreColumns(self): - for cid in range(len(self.columns)): - column = self.columns[cid] - success = column.count(1) - if success >= len(column)/2: - self.columns[cid].setall(1) - self.logger.debug("%d samples restored in column %d" % (len(column)-success, self.columnIDs[cid]), extra=self.format) - else: - self.logger.debug("Column %d cannot be restored" % (self.columnIDs[cid]), extra=self.format) + for id in self.columnIDs: + self.block.repairColumn(id) + def checkStatus(self): + arrived = 0 + expected = 0 + for id in self.columnIDs: + line = self.getColumn(id) + arrived += line.count(1) + expected += len(line) + for id in self.rowIDs: + line = self.getRow(id) + arrived += line.count(1) + expected += len(line) + self.logger.debug("status: %d / %d", arrived, expected, extra=self.format) + return (arrived, expected) diff --git a/study.py b/study.py index fd88312..04578de 100644 --- a/study.py +++ b/study.py @@ -21,6 +21,7 @@ def study(): result = 0 for i in range(maxTries): sim.initValidators() + sim.initNetwork() result += sim.run() simCnt += 1 frRange.append(fr)