Implemented state saving and completion check using pickle files

Signed-off-by: Arunima Chaudhuri <arunimachaudhuri2020@gmail.com>
This commit is contained in:
Arunima Chaudhuri 2024-03-24 21:00:18 +00:00
parent 1303229026
commit 705ab8a962
2 changed files with 84 additions and 0 deletions

View File

@ -9,6 +9,9 @@ from DAS.tools import *
from DAS.results import *
from DAS.observer import *
from DAS.node import *
import os
import pickle
import uuid
class Simulator:
"""This class implements the main DAS simulator."""
@ -273,7 +276,9 @@ class Simulator:
trafficStatsVector = []
malicious_nodes_not_added_count = 0
steps = 0
unique_run_id = str(uuid.uuid4())
while(True):
vectors_data = []
missingVector.append(missingSamples)
self.logger.debug("Expected Samples: %d" % expected, extra=self.format)
self.logger.debug("Missing Samples: %d" % missingSamples, extra=self.format)
@ -350,7 +355,43 @@ class Simulator:
self.logger.debug("The entire block is available at step %d, with failure rate %d !" % (steps, self.shape.failureRate), extra=self.format)
missingVector.append(missingSamples)
break
for i in range(0, self.shape.numberNodes):
validator_data = {
'validator_ID': self.validators[i].ID,
'rowIDs': list(self.validators[i].rowIDs),
'columnIDs': list(self.validators[i].columnIDs),
'amImalicious': self.validators[i].amImalicious,
'amIaddedToQueue': self.validators[i].amIaddedToQueue,
'msgSentCount': self.validators[i].msgSentCount,
'msgRecvCount': self.validators[i].msgRecvCount,
'sampleSentCount': self.validators[i].sampleSentCount,
'sampleRecvCount': self.validators[i].sampleRecvCount,
'restoreRowCount': self.validators[i].restoreRowCount,
'restoreColumnCount': self.validators[i].restoreColumnCount,
'repairedSampleCount': self.validators[i].repairedSampleCount,
'rowNeighbors': list(self.validators[i].rowNeighbors),
'columnNeighbors': list(self.validators[i].columnNeighbors)
}
vectors_data.append(validator_data)
vectors_data += (progressVector,missingVector)
backup_folder = f"results/{self.execID}/backup"
if not os.path.exists(backup_folder):
os.makedirs(backup_folder)
backup_file = os.path.join(backup_folder, f"simulation_data_{unique_run_id}.pkl")
with open(backup_file, 'ab') as f:
pickle.dump(vectors_data, f)
steps += 1
backup_folder = f"results/{self.execID}/backup"
if not os.path.exists(backup_folder):
os.makedirs(backup_folder)
backup_file = os.path.join(backup_folder, f"simulation_data_{unique_run_id}.pkl")
with open(backup_file, 'ab') as f: # Open in append binary mode
pickle.dump("completed", f)
for i in range(0,self.shape.numberNodes):
if not self.validators[i].amIaddedToQueue :

View File

@ -45,7 +45,50 @@ def runOnce(config, shape, execID):
return result
def check_simulation_completion(state_file):
backup_dir = os.path.join(os.path.dirname(state_file), "backup")
if not os.path.exists(backup_dir):
return False
all_completed = True
for filename in sorted(os.listdir(backup_dir), reverse=True): # Iterate in reverse order
if not filename.endswith(".pkl"):
continue
full_path = os.path.join(backup_dir, filename)
try:
with open(full_path, 'rb') as f:
items = []
while True:
try:
item = pickle.load(f)
items.append(item) # Load all items
except EOFError: # Reached end of file
break
last_item = items[-1] # Access the last item
# print(last_item)
if last_item != "completed":
all_completed = False
break # No need to continue checking other files
except (OSError, pickle.UnpicklingError) as e:
print(f"Error loading state from {full_path}: {e}")
all_completed = False # Treat errors as incomplete
break # No need to continue checking other files
return all_completed
def study():
restart_path = None
for arg in sys.argv[1:]:
if arg.startswith("--restart="):
restart_path = arg[len("--restart="):]
if restart_path:
execID = restart_path.split("/")[1]
state_file = f"results/{execID}/backup"
print(check_simulation_completion(state_file))
sys.exit(0)
if len(sys.argv) < 2:
print("You need to pass a configuration file in parameter")
exit(1)