GossipSub shadow simulation scripts

This commit is contained in:
ufarooqstatus 2023-09-28 01:10:36 +05:00
parent 65a5e5e6f0
commit 212933a4af
4 changed files with 230 additions and 8 deletions

View File

@ -10,9 +10,12 @@ nimble install -dy
cd shadow
# the default shadow.yml will start 5k nodes, you might want to change that by removing
# lines and setting PEERS to the number of instances
./run.sh
# the output is a "latencies" file, or you can find each host output in the
# data.shadow folder
./run.sh <runs> <nodes>
# the first parameter <runs> tells the number of simulation runs, and second parameter <nodes> tells the
# number of nodes in simulation, for example ./run.sh 2 3000
# the output for each run creates latencies(X) and shadowlogX files. where X is the simulation number.
# the run script (run.sh) uses awk to summarize latencies(X) and shadowlogX files
# you can use the plotter tool to extract useful metrics & generate a graph
cd ../tools

View File

@ -1,8 +1,33 @@
#!/bin/sh
set -e
if [ $# -ne 2 ]; then
echo "Usage: $0 <runs> <nodes>"
exit 1
fi
nim c -d:chronicles_colors=None --threads:on -d:metrics -d:libp2p_network_protocols_metrics -d:release main
rm -rf shadow.data/
shadow shadow.yaml
grep -rne 'milliseconds\|BW' shadow.data/ > latencies
runs="$1" #number of simulation runs
nodes="$2" #number of nodes to simulate
shadow_file="shadow.yaml"
sed -i '/environment:/q' "$shadow_file"
sed -E -i "s/\"PEERS\": \"[0-9]+\"/\"PEERS\": \"$nodes\"/" "$shadow_file"
counter=2
while [ $counter -le $nodes ]; do
echo " peer$counter: *client_host" >> "$shadow_file"
counter=$((counter + 1))
done
rm -f shadowlog* latencies* main && rm -rf shadow.data/
nim c -d:chronicles_colors=None --threads:on -d:metrics -d:libp2p_network_protocols_metrics -d:release main
for i in $(seq $runs); do
echo "Running for turn "$i
shadow shadow.yaml > shadowlog$i && grep -rne 'milliseconds\|BW' shadow.data/ > latencies$i
rm -rf shadow.data/
done
for i in $(seq $runs); do
echo "Summary for turn "$i
awk -f summary_latency.awk latencies$i
awk -f summary_shadowlog.awk shadowlog$i
done

View File

@ -0,0 +1,49 @@
# we parse the latencies(x) file produced by run.sh to receive results summary (Max/Avg Latency --> per packet, overall)
# runs $awk -f result_summary.awk latencies(x)
BEGIN {
FS = " "; #default column separator
network_size = 0
max_nw_lat = sum_nw_lat = 0
hop_lat = 100 #should be consistent with shadow.yaml
}
{
clean_int = $3
gsub(/[^0-9]/, "", clean_int);
if ($3 == clean_int){ #get rid of unwanted rows
sum_nw_lat += $NF
if (max_nw_lat < $NF) {max_nw_lat = $NF}
if (split($1, arr, "peer|/main|:.*:")) {
#$3 = rx_latency, arr[4] = publish_time, arr[2] = peerID
lat_arr[arr[4], $3]++;
msg_arr[arr[4]] = 1; #we maintain set of messages identified by their publish time
if (network_size < arr[2]) {network_size = arr[2]}
}
}
}
END {
print "Total Nodes : ", network_size, "Total Messages Published : ", length(msg_arr),
"Network Latency\t MAX : ", max_nw_lat, "\tAverage : ", sum_nw_lat/NR
print " Message ID \t Avg Latency \t Messages Received"
for (value in msg_arr) {
sum_rx_msgs = 0;
latency = 0;
for (key in lat_arr) {
split(key, parts, SUBSEP);
if (parts[1] == value) {
sum_rx_msgs = sum_rx_msgs + lat_arr[key]; #total receives / message
latency = latency + (lat_arr[key] * parts[2])
spread[ int((parts[2]) / hop_lat) ] = lat_arr[key] #hop-by-hop spread count of messages
}
}
print value, "\t", latency/sum_rx_msgs, "\t ", sum_rx_msgs, "spread is",
spread[1], spread[2], spread[3], spread[4], spread[5], spread[6], spread[7]
delete spread
}
}

View File

@ -0,0 +1,145 @@
BEGIN {
FS = " "; #column separator
fg_index = 7 #flags start index in $10
flag_size = 12 #size of flags
local_in = 0 #inbound-localhost-counters
local_out = 1 #outbound-localhost-counters
remote_in = 2 #inbound-remote-counters
remote_out = 3 #outbound-remote-counters
}
{
if ($9 == "[node]") {
#$5: peer info, $10: traffic stats, we need to split
peerlist[$5] = 1 #list for all peers
if (split($10, arr, ",|;")) {
#arr[2]: received bytes, arr[3]: transferred bytes
if (arr[2] > 0) {sum_rx[$5] += arr[2]} #bytes received
if (arr[3] > 0) {sum_tx[$5] += arr[3]} #bytes transferred
#inbound-localhost-counters
idx = fg_index + (flag_size * local_in)
#if (arr[idx] > 0) {
local_in_pkt[$5] += arr[idx]
local_in_bytes[$5] += arr[idx+1]
local_in_ctrl_pkt[$5] += arr[idx+2]
local_in_ctrl_hdr_bytes[$5] += arr[idx+3]
local_in_data_pkt[$5] += arr[idx+6]
local_in_data_hdr_bytes[$5] += arr[idx+7]
local_in_data_bytes[$5] += arr[idx+8]
#}
#outbound-localhost-counters
idx = fg_index + (flag_size * local_out)
#if (arr[idx] > 0) {
local_out_pkt[$5] += arr[idx]
local_out_bytes[$5] += arr[idx+1]
local_out_ctrl_pkt[$5] += arr[idx+2]
local_out_ctrl_hdr_bytes[$5] += arr[idx+3]
local_out_data_pkt[$5] += arr[idx+6]
local_out_data_hdr_bytes[$5] += arr[idx+7]
local_out_data_bytes[$5] += arr[idx+8]
#}
#inbound-remote-counters
idx = fg_index + (flag_size * remote_in)
#if (arr[idx] > 0) {
remote_in_pkt[$5] += arr[idx]
remote_in_bytes[$5] += arr[idx+1]
remote_in_ctrl_pkt[$5] += arr[idx+2]
remote_in_ctrl_hdr_bytes[$5] += arr[idx+3]
remote_in_data_pkt[$5] += arr[idx+6]
remote_in_data_hdr_bytes[$5] += arr[idx+7]
remote_in_data_bytes[$5] += arr[idx+8]
#}
#outbound-remote-counters
idx = fg_index + (flag_size * remote_out)
#if (arr[idx] > 0) {
remote_out_pkt[$5] += arr[idx]
remote_out_bytes[$5] += arr[idx+1]
remote_out_ctrl_pkt[$5] += arr[idx+2]
remote_out_ctrl_hdr_bytes[$5] += arr[idx+3]
remote_out_data_pkt[$5] += arr[idx+6]
remote_out_data_hdr_bytes[$5] += arr[idx+7]
remote_out_data_bytes[$5] += arr[idx+8]
#}
}
}
}
END {
nw_size = length(peerlist)
min_in = max_in = min_out = max_out = sum_in = sum_out = avg_in = avg_out = 0
for (value in peerlist) { #node specific tx/rx stats (bytes)
sum_in += sum_rx[value]
sum_out += sum_tx[value]
if (sum_rx[value] < min_in || min_in == 0) min_in = sum_rx[value]
if (sum_tx[value] < min_out || min_out == 0) min_out = sum_tx[value]
if (sum_rx[value] > max_in) max_in = sum_rx[value]
if (sum_tx[value] > max_out) max_out = sum_tx[value]
}
avg_in = sum_in/nw_size
avg_out = sum_out/nw_size
for (value in peerlist) {
sum_sq_in += (sum_rx[value] - avg_in) ^ 2 #for stddev
sum_sq_out += (sum_tx[value] - avg_out) ^ 2
sum_local_in_pkt += local_in_pkt[value]
sum_local_in_bytes += local_in_bytes[value]
sum_local_in_ctrl_pkt += local_in_ctrl_pkt[value]
sum_local_in_ctrl_hdr_bytes += local_in_ctrl_hdr_bytes[value]
sum_local_in_data_pkt += local_in_data_pkt[value]
sum_local_in_data_hdr_bytes += local_in_data_hdr_bytes[value]
sum_local_in_data_bytes += local_in_data_bytes[value]
sum_local_out_pkt += local_out_pkt[value]
sum_local_out_bytes += local_out_bytes[value]
sum_local_out_ctrl_pkt += local_out_ctrl_pkt[value]
sum_local_out_ctrl_hdr_bytes += local_out_ctrl_hdr_bytes[value]
sum_local_out_data_pkt += local_out_data_pkt[value]
sum_local_out_data_hdr_bytes += local_out_data_hdr_bytes[value]
sum_local_out_data_bytes += local_out_data_bytes[value]
sum_remote_in_pkt += remote_in_pkt[value]
sum_romote_in_bytes += remote_in_bytes[value]
sum_remote_in_ctrl_pkt += remote_in_ctrl_pkt[value]
sum_remote_in_ctrl_hdr_bytes += remote_in_ctrl_hdr_bytes[value]
sum_remote_in_data_pkt += remote_in_data_pkt[value]
sum_remote_in_data_hdr_bytes += remote_in_data_hdr_bytes[value]
sum_remote_in_data_bytes +=remote_in_data_bytes[value]
sum_remote_out_pkt +=remote_out_pkt[value]
sum_remote_out_bytes +=remote_out_bytes[value]
sum_remote_out_ctrl_pkt +=remote_out_ctrl_pkt[value]
sum_remote_out_ctrl_hdr_bytes +=remote_out_ctrl_hdr_bytes[value]
sum_remote_out_data_pkt +=remote_out_data_pkt[value]
sum_remote_out_data_hdr_bytes +=remote_out_data_hdr_bytes[value]
sum_remote_out_data_bytes +=remote_out_data_bytes[value]
#}
}
print "\nTotal Bytes Received : ", sum_in, "Total Bytes Transferred : ", sum_out
print "Per Node Pkt Receives : min, max, avg, stddev = ", min_in, max_in, avg_in, sqrt(sum_sq_in/nw_size)
print "Per Node Pkt Transfers: min, max, avg, stddev = ", min_out, max_out, avg_out, sqrt(sum_sq_out/nw_size)
print "Details..."
#print "Local IN pkt: ", sum_local_in_pkt, "Bytes : ", sum_local_in_bytes, "ctrlPkt: ", sum_local_in_ctrl_pkt, "ctrlHdrBytes: ", sum_local_in_ctrl_hdr_bytes,
# "DataPkt: ", sum_local_in_data_pkt, "DataHdrBytes: ", sum_local_in_data_hdr_bytes, "DataBytes", sum_local_in_data_bytes
#print "Local OUT pkt: ", sum_local_out_pkt, "Bytes : ", sum_local_out_bytes, "ctrlPkt: ", sum_local_out_ctrl_pkt, "ctrlHdrBytes: ", sum_local_out_ctrl_hdr_bytes,
# "DataPkt: ", sum_local_out_data_pkt, "DataHdrBytes: ", sum_local_out_data_hdr_bytes, "DataBytes", sum_local_out_data_bytes
print "Remote IN pkt: ", sum_remote_in_pkt, "Bytes : ", sum_romote_in_bytes, "ctrlPkt: ", sum_remote_in_ctrl_pkt, "ctrlHdrBytes: ", sum_remote_in_ctrl_hdr_bytes,
"DataPkt: ", sum_remote_in_data_pkt, "DataHdrBytes: ", sum_remote_in_data_hdr_bytes, "DataBytes", sum_remote_in_data_bytes
print "Remote OUT pkt: ", sum_remote_out_pkt, "Bytes : ", sum_romote_out_bytes, "ctrlPkt: ", sum_remote_out_ctrl_pkt, "ctrlHdrBytes: ", sum_remote_out_ctrl_hdr_bytes,
"DataPkt: ", sum_remote_out_data_pkt, "DataHdrBytes: ", sum_remote_out_data_hdr_bytes, "DataBytes", sum_remote_out_data_bytes
}