add per-node state and per-channel network

Signed-off-by: Csaba Kiraly <csaba.kiraly@gmail.com>
This commit is contained in:
Csaba Kiraly 2022-12-20 11:13:54 +01:00
parent db61300c46
commit 7a5f67ff31
No known key found for this signature in database
GPG Key ID: 0FE274EE8C95166E
4 changed files with 126 additions and 63 deletions

View File

@ -50,3 +50,12 @@ class Observer:
self.logger.debug("There are %d missing samples in the network" % zeros, extra=self.format)
return zeros
def checkStatus(self, validators):
arrived = 0
expected = 0
for val in validators:
if val.proposer == 0:
(a, e) = val.checkStatus()
arrived += a
expected += e
return (arrived, expected)

View File

@ -1,5 +1,6 @@
#!/bin/python
import networkx as nx
import logging
from datetime import datetime
from DAS.tools import *
@ -41,6 +42,33 @@ class Simulator:
val.logIDs()
self.validators.append(val)
def initNetwork(self, d=6):
rowChannels = [[] for i in range(self.blockSize)]
columnChannels = [[] for i in range(self.blockSize)]
for v in self.validators:
for id in v.rowIDs:
rowChannels[id].append(v)
for id in v.columnIDs:
columnChannels[id].append(v)
for id in range(self.blockSize):
G = nx.random_regular_graph(d, len(rowChannels[id]))
if not nx.is_connected(G):
self.logger.error("graph not connected for row %d !" % id, extra=self.format)
for u, v in G.edges:
val1=rowChannels[id][u]
val2=rowChannels[id][v]
val1.rowNeighbors[id].append(val2)
val2.rowNeighbors[id].append(val1)
G = nx.random_regular_graph(d, len(columnChannels[id]))
if not nx.is_connected(G):
self.logger.error("graph not connected for column %d !" % id, extra=self.format)
for u, v in G.edges:
val1=columnChannels[id][u]
val2=columnChannels[id][v]
val1.columnNeighbors[id].append(val2)
val2.columnNeighbors[id].append(val1)
def initLogger(self):
logger = logging.getLogger("DAS")
logger.setLevel(self.logLevel)
@ -55,26 +83,26 @@ class Simulator:
def run(self):
self.glob.checkRowsColumns(self.validators)
self.validators[self.proposerID].broadcastBlock(self.glob.broadcasted)
missingSamples = self.glob.checkBroadcasted()
self.validators[self.proposerID].broadcastBlock()
arrived, expected = self.glob.checkStatus(self.validators)
missingSamples = expected - arrived
self.steps = 0
while(missingSamples > 0):
oldMissingSamples = missingSamples
self.logger.debug("Step %d:" % self.steps, extra=self.format)
self.logger.info("Step %d:" % self.steps, extra=self.format)
for i in range(1,self.numberValidators):
self.validators[i].receiveRowsColumns()
for i in range(1,self.numberValidators):
self.validators[i].receiveRowsColumns(self.glob.broadcasted)
#Rows
self.validators[i].restoreRows()
self.validators[i].sendRows(self.glob.broadcasted)
self.validators[i].logRows()
self.validators[i].logColumns()
# Columns
self.validators[i].restoreColumns()
self.validators[i].sendColumns(self.glob.broadcasted)
self.validators[i].sendRows()
self.validators[i].sendColumns()
self.validators[i].logRows()
self.validators[i].logColumns()
missingSamples = self.glob.checkBroadcasted()
arrived, expected = self.glob.checkStatus(self.validators)
missingSamples = expected - arrived
self.logger.info("step %d, missing %d of %d" % (self.steps, missingSamples, expected), extra=self.format)
if missingSamples == oldMissingSamples:
break
elif missingSamples == 0:

View File

@ -1,6 +1,7 @@
#!/bin/python3
import random
import collections
from DAS.block import *
from bitarray import bitarray
from bitarray.util import zeros
@ -11,11 +12,6 @@ class Validator:
chi = 0
format = {}
blocksize = 0
block = []
rowIDs = []
columnIDs = []
rows = []
columns = []
proposer = 0
failureRate = 0
logger = []
@ -25,6 +21,8 @@ class Validator:
self.ID = ID
self.format = {"entity": "Val "+str(self.ID)}
self.blockSize = blockSize
self.block = Block(blockSize)
self.receivedBlock = Block(blockSize)
self.proposer = proposer
self.failureRate = failureRate
self.logger = logger
@ -34,12 +32,18 @@ class Validator:
self.logger.error("Chi has to be smaller than %d" % blockSize, extra=self.format)
else:
self.chi = chi
self.rowIDs = []
self.columnIDs = []
if deterministic:
random.seed(self.ID)
self.rowIDs = random.sample(range(self.blockSize), self.chi)
self.columnIDs = random.sample(range(self.blockSize), self.chi)
if proposer:
self.rowIDs = range(blockSize)
self.columnIDs = range(blockSize)
else:
self.rowIDs = []
self.columnIDs = []
if deterministic:
random.seed(self.ID)
self.rowIDs = random.sample(range(self.blockSize), self.chi)
self.columnIDs = random.sample(range(self.blockSize), self.chi)
self.rowNeighbors = collections.defaultdict(list)
self.columnNeighbors = collections.defaultdict(list)
def logIDs(self):
if self.proposer == 1:
@ -54,61 +58,82 @@ class Validator:
self.block.fill()
#self.block.print()
def broadcastBlock(self, broadcasted):
def broadcastBlock(self):
if self.proposer == 0:
self.logger.error("I am NOT a block proposer", extra=self.format)
else:
self.logger.debug("Broadcasting my block...", extra=self.format)
tempBlock = self.block
order = [i for i in range(self.blockSize * self.blockSize)]
random.shuffle(order)
while(order):
i = order.pop()
if (random.randint(0,99) > self.failureRate):
broadcasted.data[i] = self.block.data[i]
self.block.data[i] = 1
else:
self.block.data[i] = 0
#broadcasted.print()
for id in range(self.blockSize):
self.sendColumn(id, id)
for id in range(self.blockSize):
self.sendRow(id, id)
def getColumn(self, columnID, broadcasted):
column = broadcasted.getColumn(columnID)
self.columns.append(column)
def getColumn(self, index):
return self.block.getColumn(index)
def getRow(self, rowID, broadcasted):
row = broadcasted.getRow(rowID)
self.rows.append(row)
def getRow(self, index):
return self.block.getRow(index)
def receiveRowsColumns(self, broadcasted):
self.rows = []
self.columns = []
def receiveColumn(self, id, column):
if id in self.columnIDs:
self.receivedBlock.mergeColumn(id, column)
else:
pass
def receiveRow(self, id, row):
if id in self.rowIDs:
self.receivedBlock.mergeRow(id, row)
else:
pass
def receiveRowsColumns(self):
if self.proposer == 1:
self.logger.error("I am a block proposer", extra=self.format)
else:
self.logger.debug("Receiving the data...", extra=self.format)
for r in self.rowIDs:
self.getRow(r, broadcasted)
for c in self.columnIDs:
self.getColumn(c, broadcasted)
#self.logger.debug("%s -> %s", self.block.data, self.receivedBlock.data, extra=self.format)
def sendColumn(self, c, columnID, broadcasted):
broadcasted.data[columnID::self.blockSize] |= self.columns[c]
self.block.merge(self.receivedBlock)
def sendRow(self, r, rowID, broadcasted):
broadcasted.data[rowID*self.blockSize:(rowID+1)*self.blockSize] |= self.rows[r]
def sendColumn(self, c, columnID):
line = self.getColumn(columnID)
if line.any():
self.logger.debug("col %d -> %s", columnID, self.columnNeighbors[columnID] , extra=self.format)
for n in self.columnNeighbors[columnID]:
n.receiveColumn(columnID, line)
def sendRows(self, broadcasted):
def sendRow(self, r, rowID):
line = self.getRow(rowID)
if line.any():
self.logger.debug("row %d -> %s", rowID, self.rowNeighbors[rowID], extra=self.format)
for n in self.rowNeighbors[rowID]:
n.receiveRow(rowID, line)
def sendRows(self):
if self.proposer == 1:
self.logger.error("I am a block proposer", extra=self.format)
else:
self.logger.debug("Sending restored rows...", extra=self.format)
for r in range(len(self.rowIDs)):
self.sendRow(r, self.rowIDs[r], broadcasted)
self.sendRow(r, self.rowIDs[r])
def sendColumns(self, broadcasted):
def sendColumns(self):
if self.proposer == 1:
self.logger.error("I am a block proposer", extra=self.format)
else:
self.logger.debug("Sending restored columns...", extra=self.format)
for c in range(len(self.columnIDs)):
self.sendColumn(c, self.columnIDs[c], broadcasted)
self.sendColumn(c, self.columnIDs[c])
def logRows(self):
self.logger.debug("Rows: "+str(self.rows), extra=self.format)
@ -117,24 +142,24 @@ class Validator:
self.logger.debug("Columns: "+str(self.columns), extra=self.format)
def restoreRows(self):
for rid in range(len(self.rows)):
row = self.rows[rid]
success = row.count(1)
if success >= len(row)/2:
self.rows[rid].setall(1)
self.logger.debug("%d samples restored in row %d" % (len(row)-success, self.rowIDs[rid]), extra=self.format )
else:
self.logger.debug("Row %d cannot be restored" % (self.rowIDs[rid]), extra=self.format)
for id in self.rowIDs:
self.block.repairRow(id)
def restoreColumns(self):
for cid in range(len(self.columns)):
column = self.columns[cid]
success = column.count(1)
if success >= len(column)/2:
self.columns[cid].setall(1)
self.logger.debug("%d samples restored in column %d" % (len(column)-success, self.columnIDs[cid]), extra=self.format)
else:
self.logger.debug("Column %d cannot be restored" % (self.columnIDs[cid]), extra=self.format)
for id in self.columnIDs:
self.block.repairColumn(id)
def checkStatus(self):
arrived = 0
expected = 0
for id in self.columnIDs:
line = self.getColumn(id)
arrived += line.count(1)
expected += len(line)
for id in self.rowIDs:
line = self.getRow(id)
arrived += line.count(1)
expected += len(line)
self.logger.debug("status: %d / %d", arrived, expected, extra=self.format)
return (arrived, expected)

View File

@ -21,6 +21,7 @@ def study():
result = 0
for i in range(maxTries):
sim.initValidators()
sim.initNetwork()
result += sim.run()
simCnt += 1
frRange.append(fr)