mirror of
https://github.com/logos-blockchain/logos-blockchain-simulations.git
synced 2026-02-18 12:13:09 +00:00
remove mixnet-analysis
This commit is contained in:
parent
619c24cbb2
commit
befe73853b
2
mixnet-analysis/.gitignore
vendored
2
mixnet-analysis/.gitignore
vendored
@ -1,2 +0,0 @@
|
||||
.venv/
|
||||
*.csv
|
||||
@ -1,42 +0,0 @@
|
||||
import argparse
|
||||
import os
|
||||
|
||||
import pandas as pd
|
||||
|
||||
|
||||
def aggregate(path: str):
|
||||
dataframes = []
|
||||
for root, dirs, files in os.walk(path):
|
||||
# print(f"root: {root}, dirs: {dirs}, files: {files}")
|
||||
if "paramset.csv" in files:
|
||||
df = pd.read_csv(os.path.join(root, "paramset.csv"))
|
||||
|
||||
assert "data_msg_count_means.csv" in files
|
||||
mean_df = pd.read_csv(os.path.join(root, "data_msg_count_means.csv"))
|
||||
df["mean_data_msg_count"] = mean_df["mean"].values[0]
|
||||
|
||||
assert "latency_stats.csv" in files
|
||||
latency_df = pd.read_csv(os.path.join(root, "latency_stats.csv"))
|
||||
df["latency_min"] = latency_df["min"].values[0]
|
||||
df["latency_median"] = latency_df["median"].values[0]
|
||||
df["latency_mean"] = latency_df["mean"].values[0]
|
||||
df["latency_std"] = latency_df["std"].values[0]
|
||||
df["latency_max"] = latency_df["max"].values[0]
|
||||
|
||||
dataframes.append(df)
|
||||
print(f"Processed {root}")
|
||||
|
||||
if dataframes:
|
||||
df = pd.concat(dataframes).sort_values(by=["paramset", "queue_type"])
|
||||
outpath = os.path.join(path, "aggregated.csv")
|
||||
df.to_csv(outpath, index=False)
|
||||
print(f"Saved {outpath}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Aggregate the results of all paramsets of an experiment"
|
||||
)
|
||||
parser.add_argument("path", type=str, help="dir path")
|
||||
args = parser.parse_args()
|
||||
aggregate(args.path)
|
||||
@ -1,59 +0,0 @@
|
||||
import argparse
|
||||
import os
|
||||
|
||||
import pandas as pd
|
||||
|
||||
|
||||
def analyze(path: str) -> pd.DataFrame:
|
||||
medians = pd.read_csv(path).iloc[:, 1:].median(axis=1)
|
||||
return pd.DataFrame(
|
||||
{
|
||||
"min": medians.min(),
|
||||
"median": medians.median(),
|
||||
"mean": medians.mean(),
|
||||
"std": medians.std(),
|
||||
"max": medians.max(),
|
||||
},
|
||||
index=pd.Series([0]),
|
||||
)
|
||||
|
||||
|
||||
def save_means_stats(means: list, outpath: str):
|
||||
series = pd.Series(means)
|
||||
means_stat = pd.DataFrame(
|
||||
{"mean": series.mean(), "std": series.std()}, index=pd.Series([0])
|
||||
)
|
||||
means_stat.to_csv(outpath, index=False)
|
||||
print(f"Saved {outpath}")
|
||||
|
||||
|
||||
def analyze_all(path: str):
|
||||
means = []
|
||||
means_outpath = ""
|
||||
for root, dirs, files in os.walk(path):
|
||||
if os.path.basename(root).startswith("paramset_"):
|
||||
if len(means) > 0:
|
||||
assert means_outpath != ""
|
||||
save_means_stats(means, means_outpath)
|
||||
means = []
|
||||
means_outpath = os.path.join(root, "data_msg_count_means.csv")
|
||||
elif "data_msg_counts_stats.csv" in files:
|
||||
stats = pd.read_csv(os.path.join(root, "data_msg_counts_stats.csv"))
|
||||
means.append(stats["mean"].values[0])
|
||||
elif "data_msg_counts.csv" in files:
|
||||
stats = analyze(os.path.join(root, "data_msg_counts.csv"))
|
||||
means.append(stats["mean"].values[0])
|
||||
outpath = os.path.join(root, "data_msg_counts_stats.csv")
|
||||
stats.to_csv(outpath, index=False)
|
||||
print(f"Saved {outpath}")
|
||||
|
||||
if len(means) > 0:
|
||||
assert means_outpath != ""
|
||||
save_means_stats(means, means_outpath)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description="Analyze message count data")
|
||||
parser.add_argument("path", type=str, help="dir path")
|
||||
args = parser.parse_args()
|
||||
analyze_all(args.path)
|
||||
@ -1,46 +0,0 @@
|
||||
import argparse
|
||||
import os
|
||||
|
||||
import pandas as pd
|
||||
|
||||
|
||||
def save_latency_stats(latencies: list, outpath: str):
|
||||
series = pd.Series(latencies)
|
||||
means_stat = pd.DataFrame(
|
||||
{
|
||||
"min": series.min(),
|
||||
"median": series.median(),
|
||||
"mean": series.mean(),
|
||||
"std": series.std(),
|
||||
"max": series.max(),
|
||||
},
|
||||
index=pd.Series([0]),
|
||||
)
|
||||
means_stat.to_csv(outpath, index=False)
|
||||
print(f"Saved {outpath}")
|
||||
|
||||
|
||||
def aggregate(path: str):
|
||||
latencies = []
|
||||
latencies_outpath = ""
|
||||
for root, dirs, files in os.walk(path):
|
||||
if os.path.basename(root).startswith("paramset_"):
|
||||
if len(latencies) > 0:
|
||||
assert latencies_outpath != ""
|
||||
save_latency_stats(latencies, latencies_outpath)
|
||||
latencies = []
|
||||
latencies_outpath = os.path.join(root, "latency_stats.csv")
|
||||
elif "latency.csv" in files:
|
||||
df = pd.read_csv(os.path.join(root, "latency.csv"))
|
||||
latencies.extend(df["latency"].to_list())
|
||||
|
||||
if len(latencies) > 0:
|
||||
assert latencies_outpath != ""
|
||||
save_latency_stats(latencies, latencies_outpath)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description="Aggregate latencies")
|
||||
parser.add_argument("path", type=str, help="dir path")
|
||||
args = parser.parse_args()
|
||||
aggregate(args.path)
|
||||
@ -1,7 +0,0 @@
|
||||
{
|
||||
"venvPath": ".",
|
||||
"venv": ".venv",
|
||||
"typeCheckingMode": "basic",
|
||||
"reportMissingTypeStubs": "none",
|
||||
"ignore": ["reportMissingTypeStubs"]
|
||||
}
|
||||
@ -1 +0,0 @@
|
||||
pandas==2.2.2
|
||||
Loading…
x
Reference in New Issue
Block a user