Merge pull request #6 from status-im/develop

First version of the simulator
This commit is contained in:
Leo 2022-12-14 18:58:32 +01:00 committed by GitHub
commit a9112c418e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 407 additions and 1 deletions

2
.gitignore vendored Normal file
View File

@ -0,0 +1,2 @@
*.swp
*.pyc

1
DAS/__init__.py Normal file
View File

@ -0,0 +1 @@
from DAS.simulator import *

34
DAS/block.py Normal file
View File

@ -0,0 +1,34 @@
#!/bin/python3
import random
from bitarray import bitarray
from bitarray.util import zeros
class Block:
blockSize = 0
data = bitarray()
def __init__(self, size):
self.blockSize = size
self.data = zeros(self.blockSize*self.blockSize)
def fill(self):
self.data.setall(1)
def getColumn(self, columnID):
return self.data[columnID::self.blockSize]
def getRow(self, rowID):
return self.data[rowID*self.blockSize:(rowID+1)*self.blockSize]
def print(self):
dash = "-" * (self.blockSize+2)
print(dash)
for i in range(self.blockSize):
line = "|"
for j in range(self.blockSize):
line += "%i" % self.data[(i*self.blockSize)+j]
print(line+"|")
print(dash)

52
DAS/observer.py Normal file
View File

@ -0,0 +1,52 @@
#!/bin/python3
from DAS.block import *
class Observer:
block = []
blockSize = 0
rows = []
columns = []
goldenData = []
broadcasted = []
logger = []
def __init__(self, blockSize, logger):
self.format = {"entity": "Observer"}
self.blockSize = blockSize
self.logger = logger
def reset(self):
self.block = [0] * self.blockSize * self.blockSize
self.goldenData = [0] * self.blockSize * self.blockSize
self.rows = [0] * self.blockSize
self.columns = [0] * self.blockSize
self.broadcasted = Block(self.blockSize)
def checkRowsColumns(self, validators):
for val in validators:
if val.proposer == 0:
for r in val.rowIDs:
self.rows[r] += 1
for c in val.columnIDs:
self.columns[c] += 1
for i in range(self.blockSize):
self.logger.debug("Row/Column %d have %d and %d validators assigned." % (i, self.rows[i], self.columns[i]), extra=self.format)
if self.rows[i] == 0 or self.columns[i] == 0:
self.logger.warning("There is a row/column that has not been assigned", extra=self.format)
def setGoldenData(self, block):
for i in range(self.blockSize*self.blockSize):
self.goldenData[i] = block.data[i]
def checkBroadcasted(self):
zeros = 0
for i in range(self.blockSize * self.blockSize):
if self.broadcasted.data[i] == 0:
zeros += 1
if zeros > 0:
self.logger.debug("There are %d missing samples in the network" % zeros, extra=self.format)
return zeros

1
DAS/requeriments.txt Normal file
View File

@ -0,0 +1 @@
bitarray==2.6.0

91
DAS/simulator.py Normal file
View File

@ -0,0 +1,91 @@
#!/bin/python
import logging
from datetime import datetime
from DAS.tools import *
from DAS.observer import *
from DAS.validator import *
class Simulator:
chi = 8
blockSize = 256
numberValidators = 8192
failureRate = 0
proposerID = 0
logLevel = logging.INFO
deterministic = 0
validators = []
glob = []
logger = []
format = {}
steps = 0
def __init__(self, failureRate):
self.failureRate = failureRate
self.format = {"entity": "Simulator"}
self.steps = 0
def initValidators(self):
if not self.deterministic:
random.seed(datetime.now())
self.glob = Observer(self.blockSize, self.logger)
self.glob.reset()
self.validators = []
for i in range(self.numberValidators):
val = Validator(i, self.chi, self.blockSize, int(not i!=0), self.failureRate, self.deterministic, self.logger)
if i == self.proposerID:
val.initBlock()
self.glob.setGoldenData(val.block)
else:
val.logIDs()
self.validators.append(val)
def initLogger(self):
logger = logging.getLogger("DAS")
logger.setLevel(self.logLevel)
ch = logging.StreamHandler()
ch.setLevel(self.logLevel)
ch.setFormatter(CustomFormatter())
logger.addHandler(ch)
self.logger = logger
def resetFailureRate(self, failureRate):
self.failureRate = failureRate
def run(self):
self.glob.checkRowsColumns(self.validators)
self.validators[self.proposerID].broadcastBlock(self.glob.broadcasted)
missingSamples = self.glob.checkBroadcasted()
self.steps = 0
while(missingSamples > 0):
oldMissingSamples = missingSamples
self.logger.debug("Step %d:" % self.steps, extra=self.format)
for i in range(1,self.numberValidators):
self.validators[i].receiveRowsColumns(self.glob.broadcasted)
#Rows
self.validators[i].restoreRows()
self.validators[i].sendRows(self.glob.broadcasted)
self.validators[i].logRows()
self.validators[i].logColumns()
# Columns
self.validators[i].restoreColumns()
self.validators[i].sendColumns(self.glob.broadcasted)
self.validators[i].logRows()
self.validators[i].logColumns()
missingSamples = self.glob.checkBroadcasted()
if missingSamples == oldMissingSamples:
break
elif missingSamples == 0:
break
else:
self.steps += 1
if missingSamples == 0:
self.logger.debug("The entire block is available at step %d, with failure rate %d !" % (self.steps, self.failureRate), extra=self.format)
return 0
else:
self.logger.debug("The block cannot be recovered, failure rate %d!" % self.failureRate, extra=self.format)
return 1

27
DAS/tools.py Normal file
View File

@ -0,0 +1,27 @@
#!/bin/python3
import logging
class CustomFormatter(logging.Formatter):
blue = "\x1b[34;20m"
grey = "\x1b[38;20m"
yellow = "\x1b[33;20m"
red = "\x1b[31;20m"
bold_red = "\x1b[31;1m"
reset = "\x1b[0m"
format = "%(levelname)s : %(entity)s : %(message)s"
FORMATS = {
logging.DEBUG: grey + format + reset,
logging.INFO: blue + format + reset,
logging.WARNING: yellow + format + reset,
logging.ERROR: red + format + reset,
logging.CRITICAL: bold_red + format + reset
}
def format(self, record):
log_fmt = self.FORMATS.get(record.levelno)
formatter = logging.Formatter(log_fmt)
return formatter.format(record)

140
DAS/validator.py Normal file
View File

@ -0,0 +1,140 @@
#!/bin/python3
import random
from DAS.block import *
from bitarray import bitarray
from bitarray.util import zeros
class Validator:
ID = 0
chi = 0
format = {}
blocksize = 0
block = []
rowIDs = []
columnIDs = []
rows = []
columns = []
proposer = 0
failureRate = 0
logger = []
def __init__(self, ID, chi, blockSize, proposer, failureRate, deterministic, logger):
FORMAT = "%(levelname)s : %(entity)s : %(message)s"
self.ID = ID
self.format = {"entity": "Val "+str(self.ID)}
self.blockSize = blockSize
self.proposer = proposer
self.failureRate = failureRate
self.logger = logger
if chi < 1:
self.logger.error("Chi has to be greater than 0", extra=self.format)
elif chi > blockSize:
self.logger.error("Chi has to be smaller than %d" % blockSize, extra=self.format)
else:
self.chi = chi
self.rowIDs = []
self.columnIDs = []
if deterministic:
random.seed(self.ID)
self.rowIDs = random.sample(range(self.blockSize), self.chi)
self.columnIDs = random.sample(range(self.blockSize), self.chi)
def logIDs(self):
if self.proposer == 1:
self.logger.warning("I am a block proposer."% self.ID)
else:
self.logger.debug("Selected rows: "+str(self.rowIDs), extra=self.format)
self.logger.debug("Selected columns: "+str(self.columnIDs), extra=self.format)
def initBlock(self):
self.logger.debug("I am a block proposer.", extra=self.format)
self.block = Block(self.blockSize)
self.block.fill()
#self.block.print()
def broadcastBlock(self, broadcasted):
if self.proposer == 0:
self.logger.error("I am NOT a block proposer", extra=self.format)
else:
self.logger.debug("Broadcasting my block...", extra=self.format)
tempBlock = self.block
order = [i for i in range(self.blockSize * self.blockSize)]
random.shuffle(order)
while(order):
i = order.pop()
if (random.randint(0,99) > self.failureRate):
broadcasted.data[i] = self.block.data[i]
#broadcasted.print()
def getColumn(self, columnID, broadcasted):
column = broadcasted.getColumn(columnID)
self.columns.append(column)
def getRow(self, rowID, broadcasted):
row = broadcasted.getRow(rowID)
self.rows.append(row)
def receiveRowsColumns(self, broadcasted):
self.rows = []
self.columns = []
if self.proposer == 1:
self.logger.error("I am a block proposer", extra=self.format)
else:
self.logger.debug("Receiving the data...", extra=self.format)
for r in self.rowIDs:
self.getRow(r, broadcasted)
for c in self.columnIDs:
self.getColumn(c, broadcasted)
def sendColumn(self, c, columnID, broadcasted):
broadcasted.data[columnID::self.blockSize] |= self.columns[c]
def sendRow(self, r, rowID, broadcasted):
broadcasted.data[rowID*self.blockSize:(rowID+1)*self.blockSize] |= self.rows[r]
def sendRows(self, broadcasted):
if self.proposer == 1:
self.logger.error("I am a block proposer", extra=self.format)
else:
self.logger.debug("Sending restored rows...", extra=self.format)
for r in range(len(self.rowIDs)):
self.sendRow(r, self.rowIDs[r], broadcasted)
def sendColumns(self, broadcasted):
if self.proposer == 1:
self.logger.error("I am a block proposer", extra=self.format)
else:
self.logger.debug("Sending restored columns...", extra=self.format)
for c in range(len(self.columnIDs)):
self.sendColumn(c, self.columnIDs[c], broadcasted)
def logRows(self):
self.logger.debug("Rows: "+str(self.rows), extra=self.format)
def logColumns(self):
self.logger.debug("Columns: "+str(self.columns), extra=self.format)
def restoreRows(self):
for rid in range(len(self.rows)):
row = self.rows[rid]
success = row.count(1)
if success >= len(row)/2:
self.rows[rid].setall(1)
self.logger.debug("%d samples restored in row %d" % (len(row)-success, self.rowIDs[rid]), extra=self.format )
else:
self.logger.debug("Row %d cannot be restored" % (self.rowIDs[rid]), extra=self.format)
def restoreColumns(self):
for cid in range(len(self.columns)):
column = self.columns[cid]
success = column.count(1)
if success >= len(column)/2:
self.columns[cid].setall(1)
self.logger.debug("%d samples restored in column %d" % (len(column)-success, self.columnIDs[cid]), extra=self.format)
else:
self.logger.debug("Column %d cannot be restored" % (self.columnIDs[cid]), extra=self.format)

View File

@ -1,3 +1,26 @@
# DAS Research # DAS Research
## (Collaboration between Codex and the EF)
This repository hosts all the research on DAS for the collaboration between Codex and the EF. This repository hosts all the research on DAS for the collaboration between Codex and the EF.
## Prepare the environment
* Clone the DAS repository (if not done yet) and go into the das-research directory
```
$ git clone https://github.com/status-im/das-research.git
$ cd das-research
```
* Create a virtual environment and install the requirements
```
$ python3 -m venv myenv
$ source myenv/bin/activate
$ pip3 install -r DAS/requeriments.txt
```
## Run the simulator
```
$ python3 study.py
```

35
study.py Normal file
View File

@ -0,0 +1,35 @@
#! /bin/python3
import time
from DAS import *
def study():
sim = Simulator(0)
sim.initLogger()
maxTries = 10
step = 20
frRange = []
resultRange = []
simCnt = 0
sim.logger.info("Starting simulations:", extra=sim.format)
start = time.time()
for fr in range(0, 100, step):
if fr % 10 == 0:
sim.logger.info("Failure rate %d %% ..." % fr, extra=sim.format)
sim.resetFailureRate(fr)
result = 0
for i in range(maxTries):
sim.initValidators()
result += sim.run()
simCnt += 1
frRange.append(fr)
resultRange.append((maxTries-result)*100/maxTries)
end = time.time()
sim.logger.info("A total of %d simulations ran in %d seconds" % (simCnt, end-start), extra=sim.format)
for i in range(len(frRange)):
sim.logger.info("For failure rate of %d we got %d %% success rate in DAS!" % (frRange[i], resultRange[i]), extra=sim.format)
study()