mirror of
https://github.com/logos-storage/das-research.git
synced 2026-01-07 15:43:08 +00:00
make the changes on study level
Signed-off-by: Arunima Chaudhuri <arunimachaudhuri2020@gmail.com>
This commit is contained in:
parent
2aa0074163
commit
04004ed1fb
@ -9,9 +9,6 @@ from DAS.tools import *
|
|||||||
from DAS.results import *
|
from DAS.results import *
|
||||||
from DAS.observer import *
|
from DAS.observer import *
|
||||||
from DAS.node import *
|
from DAS.node import *
|
||||||
import os
|
|
||||||
import pickle
|
|
||||||
import uuid
|
|
||||||
|
|
||||||
class Simulator:
|
class Simulator:
|
||||||
"""This class implements the main DAS simulator."""
|
"""This class implements the main DAS simulator."""
|
||||||
@ -276,13 +273,6 @@ class Simulator:
|
|||||||
trafficStatsVector = []
|
trafficStatsVector = []
|
||||||
malicious_nodes_not_added_count = 0
|
malicious_nodes_not_added_count = 0
|
||||||
steps = 0
|
steps = 0
|
||||||
unique_run_id = str(uuid.uuid4())
|
|
||||||
backup_folder = f"results/{self.execID}/backup"
|
|
||||||
if not os.path.exists(backup_folder):
|
|
||||||
os.makedirs(backup_folder)
|
|
||||||
backup_file = os.path.join(backup_folder, f"simulation_data_{unique_run_id}.pkl")
|
|
||||||
with open(backup_file, 'ab') as f:
|
|
||||||
pickle.dump(self.shape.__dict__, f)
|
|
||||||
|
|
||||||
while(True):
|
while(True):
|
||||||
missingVector.append(missingSamples)
|
missingVector.append(missingSamples)
|
||||||
@ -363,8 +353,6 @@ class Simulator:
|
|||||||
break
|
break
|
||||||
steps += 1
|
steps += 1
|
||||||
|
|
||||||
with open(backup_file, 'ab') as f:
|
|
||||||
pickle.dump("completed", f)
|
|
||||||
|
|
||||||
for i in range(0,self.shape.numberNodes):
|
for i in range(0,self.shape.numberNodes):
|
||||||
if not self.validators[i].amIaddedToQueue :
|
if not self.validators[i].amIaddedToQueue :
|
||||||
|
|||||||
15
study.py
15
study.py
@ -5,6 +5,9 @@ import importlib
|
|||||||
import subprocess
|
import subprocess
|
||||||
from joblib import Parallel, delayed
|
from joblib import Parallel, delayed
|
||||||
from DAS import *
|
from DAS import *
|
||||||
|
import os
|
||||||
|
import pickle
|
||||||
|
import uuid
|
||||||
|
|
||||||
# Parallel execution:
|
# Parallel execution:
|
||||||
# The code currently uses 'joblib' to execute on multiple cores. For other options such as 'ray', see
|
# The code currently uses 'joblib' to execute on multiple cores. For other options such as 'ray', see
|
||||||
@ -29,6 +32,14 @@ def runOnce(config, shape, execID):
|
|||||||
shape.setSeed(config.randomSeed+"-"+str(shape))
|
shape.setSeed(config.randomSeed+"-"+str(shape))
|
||||||
random.seed(shape.randomSeed)
|
random.seed(shape.randomSeed)
|
||||||
|
|
||||||
|
unique_run_id = str(uuid.uuid4())
|
||||||
|
backup_folder = f"results/{execID}/backup"
|
||||||
|
if not os.path.exists(backup_folder):
|
||||||
|
os.makedirs(backup_folder)
|
||||||
|
backup_file = os.path.join(backup_folder, f"simulation_data_{unique_run_id}.pkl")
|
||||||
|
with open(backup_file, 'ab') as f:
|
||||||
|
pickle.dump(shape.__dict__, f)
|
||||||
|
|
||||||
sim = Simulator(shape, config, execID)
|
sim = Simulator(shape, config, execID)
|
||||||
sim.initLogger()
|
sim.initLogger()
|
||||||
sim.initValidators()
|
sim.initValidators()
|
||||||
@ -43,6 +54,9 @@ def runOnce(config, shape, execID):
|
|||||||
visual = Visualizor(execID, config, [result])
|
visual = Visualizor(execID, config, [result])
|
||||||
visual.plotAll()
|
visual.plotAll()
|
||||||
|
|
||||||
|
with open(backup_file, 'ab') as f:
|
||||||
|
pickle.dump("completed", f)
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
@ -98,7 +112,6 @@ def start_simulation(execID, completed_files, completed_shapes, incomplete_files
|
|||||||
subprocess.run(["git", "diff"], stdout=f)
|
subprocess.run(["git", "diff"], stdout=f)
|
||||||
with open(dir+"/git.describe", 'w') as f:
|
with open(dir+"/git.describe", 'w') as f:
|
||||||
subprocess.run(["git", "describe", "--always"], stdout=f)
|
subprocess.run(["git", "describe", "--always"], stdout=f)
|
||||||
subprocess.run(["cp", sys.argv[1], dir+"/"])
|
|
||||||
|
|
||||||
logger.info("Starting simulations:", extra=format)
|
logger.info("Starting simulations:", extra=format)
|
||||||
start = time.time()
|
start = time.time()
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user