Skip to content

first commit #687

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion exercises/basic/Makefile
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
# SPDX-License-Identifier: Apache-2.0
BMV2_SWITCH_EXE = simple_switch_grpc
TOPO = pod-topo/topology.json
# TOPO = triangle-topo/topology.json

include ../../utils/Makefile
Binary file added exercises/basic/analysis/fct_cdf.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
89 changes: 89 additions & 0 deletions exercises/basic/analysis/flow_completion.statistics
Original file line number Diff line number Diff line change
@@ -0,0 +1,89 @@
all_99.9th_fct_ms=14578.498400000004
all_99.9th_fct_ns=14578498400.000004
all_99th_fct_ms=13664.98400000001
all_99th_fct_ns=13664984000.00001
all_flows_completed_fraction=1.0
all_mean_fct_ms=2532.5430555555554
all_mean_fct_ns=2532543055.5555553
all_median_fct_ms=692.7
all_median_fct_ns=692700000.0
all_num_finished_flows=72
all_num_flows=72
all_num_unfinished_flows=0
general_flow_size_bytes_mean=1573375.0
general_flow_size_bytes_std=3608652.9321029196
general_num_flows=72
general_num_unique_sources=72
general_num_unique_targets=72
geq_100KB_99.9th_fct_ms=14619.956800000007
geq_100KB_99.9th_fct_ns=14619956800.000008
geq_100KB_99th_fct_ms=14079.567999999997
geq_100KB_99th_fct_ns=14079567999.999998
geq_100KB_flows_completed_fraction=1.0
geq_100KB_mean_fct_ms=4162.816279069768
geq_100KB_mean_fct_ns=4162816279.0697675
geq_100KB_median_fct_ms=2875.9
geq_100KB_median_fct_ns=2875900000.0
geq_100KB_num_finished_flows=43
geq_100KB_num_flows=43
geq_100KB_num_unfinished_flows=0
geq_10MB_99.9th_fct_ms=14678.5704
geq_10MB_99.9th_fct_ns=14678570400.0
geq_10MB_99th_fct_ms=14665.704
geq_10MB_99th_fct_ns=14665704000.0
geq_10MB_flows_completed_fraction=1.0
geq_10MB_mean_fct_ms=13965.2
geq_10MB_mean_fct_ns=13965200000.0
geq_10MB_median_fct_ms=13965.2
geq_10MB_median_fct_ns=13965200000.0
geq_10MB_num_finished_flows=2
geq_10MB_num_flows=2
geq_10MB_num_unfinished_flows=0
geq_1MB_99.9th_fct_ms=14651.408000000005
geq_1MB_99.9th_fct_ns=14651408000.000006
geq_1MB_99th_fct_ms=14394.080000000002
geq_1MB_99th_fct_ns=14394080000.000002
geq_1MB_flows_completed_fraction=1.0
geq_1MB_mean_fct_ms=7226.92380952381
geq_1MB_mean_fct_ns=7226923809.523809
geq_1MB_median_fct_ms=6346.5
geq_1MB_median_fct_ns=6346500000.0
geq_1MB_num_finished_flows=21
geq_1MB_num_flows=21
geq_1MB_num_unfinished_flows=0
geq_2.4349MB_99.9th_fct_ms=14661.415200000001
geq_2.4349MB_99.9th_fct_ns=14661415200.000002
geq_2.4349MB_99th_fct_ms=14494.151999999998
geq_2.4349MB_99th_fct_ns=14494151999.999998
geq_2.4349MB_flows_completed_fraction=1.0
geq_2.4349MB_mean_fct_ms=9018.371428571429
geq_2.4349MB_mean_fct_ns=9018371428.571428
geq_2.4349MB_median_fct_ms=9785.05
geq_2.4349MB_median_fct_ns=9785050000.0
geq_2.4349MB_num_finished_flows=14
geq_2.4349MB_num_flows=14
geq_2.4349MB_num_unfinished_flows=0
less_100KB_99.9th_fct_ms=460.2460000000001
less_100KB_99.9th_fct_ns=460246000.0000001
less_100KB_99th_fct_ms=439.95999999999987
less_100KB_99th_fct_ns=439959999.9999999
less_100KB_flows_completed_fraction=1.0
less_100KB_mean_fct_ms=115.24137931034483
less_100KB_mean_fct_ns=115241379.31034483
less_100KB_median_fct_ms=14.8
less_100KB_median_fct_ns=14800000.0
less_100KB_num_finished_flows=29
less_100KB_num_flows=29
less_100KB_num_unfinished_flows=0
less_2.4349MB_99.9th_fct_ms=4257.393900000002
less_2.4349MB_99.9th_fct_ns=4257393900.000002
less_2.4349MB_99th_fct_ms=4063.839
less_2.4349MB_99th_fct_ns=4063839000.0
less_2.4349MB_flows_completed_fraction=1.0
less_2.4349MB_mean_fct_ms=966.998275862069
less_2.4349MB_mean_fct_ns=966998275.862069
less_2.4349MB_median_fct_ms=422.25
less_2.4349MB_median_fct_ns=422250000.0
less_2.4349MB_num_finished_flows=58
less_2.4349MB_num_flows=58
less_2.4349MB_num_unfinished_flows=0
89 changes: 89 additions & 0 deletions exercises/basic/analysis/flow_completion_ct.statistics
Original file line number Diff line number Diff line change
@@ -0,0 +1,89 @@
all_99.9th_fct_ms=14276.2466
all_99.9th_fct_ns=14276246600.0
all_99th_fct_ms=14036.366000000002
all_99th_fct_ns=14036366000.000002
all_flows_completed_fraction=1.0
all_mean_fct_ms=2498.5416666666665
all_mean_fct_ns=2498541666.6666665
all_median_fct_ms=625.15
all_median_fct_ns=625150000.0
all_num_finished_flows=72
all_num_flows=72
all_num_unfinished_flows=0
general_flow_size_bytes_mean=1573375.0
general_flow_size_bytes_std=3608652.9321029196
general_num_flows=72
general_num_unique_sources=72
general_num_unique_targets=72
geq_100KB_99.9th_fct_ms=14287.133200000002
geq_100KB_99.9th_fct_ns=14287133200.000002
geq_100KB_99th_fct_ms=14145.232
geq_100KB_99th_fct_ns=14145232000.0
geq_100KB_flows_completed_fraction=1.0
geq_100KB_mean_fct_ms=4109.076744186046
geq_100KB_mean_fct_ns=4109076744.1860466
geq_100KB_median_fct_ms=2603.9
geq_100KB_median_fct_ns=2603900000.0
geq_100KB_num_finished_flows=43
geq_100KB_num_flows=43
geq_100KB_num_unfinished_flows=0
geq_10MB_99.9th_fct_ms=14302.5246
geq_10MB_99.9th_fct_ns=14302524600.0
geq_10MB_99th_fct_ms=14299.146
geq_10MB_99th_fct_ns=14299146000.0
geq_10MB_flows_completed_fraction=1.0
geq_10MB_mean_fct_ms=14115.2
geq_10MB_mean_fct_ns=14115200000.0
geq_10MB_median_fct_ms=14115.2
geq_10MB_median_fct_ns=14115200000.0
geq_10MB_num_finished_flows=2
geq_10MB_num_flows=2
geq_10MB_num_unfinished_flows=0
geq_1MB_99.9th_fct_ms=14295.392000000002
geq_1MB_99.9th_fct_ns=14295392000.000002
geq_1MB_99th_fct_ms=14227.82
geq_1MB_99th_fct_ns=14227820000.0
geq_1MB_flows_completed_fraction=1.0
geq_1MB_mean_fct_ms=7281.7
geq_1MB_mean_fct_ns=7281700000.0
geq_1MB_median_fct_ms=6908.6
geq_1MB_median_fct_ns=6908600000.0
geq_1MB_num_finished_flows=21
geq_1MB_num_flows=21
geq_1MB_num_unfinished_flows=0
geq_2.4349MB_99.9th_fct_ms=14298.0198
geq_2.4349MB_99.9th_fct_ns=14298019800.0
geq_2.4349MB_99th_fct_ms=14254.098
geq_2.4349MB_99th_fct_ns=14254098000.0
geq_2.4349MB_flows_completed_fraction=1.0
geq_2.4349MB_mean_fct_ms=9170.507142857143
geq_2.4349MB_mean_fct_ns=9170507142.857143
geq_2.4349MB_median_fct_ms=9693.1
geq_2.4349MB_median_fct_ns=9693100000.0
geq_2.4349MB_num_finished_flows=14
geq_2.4349MB_num_flows=14
geq_2.4349MB_num_unfinished_flows=0
less_100KB_99.9th_fct_ms=349.8824000000001
less_100KB_99.9th_fct_ns=349882400.00000006
less_100KB_99th_fct_ms=342.52399999999994
less_100KB_99th_fct_ns=342523999.99999994
less_100KB_flows_completed_fraction=1.0
less_100KB_mean_fct_ms=110.50689655172414
less_100KB_mean_fct_ns=110506896.55172414
less_100KB_median_fct_ms=28.8
less_100KB_median_fct_ns=28800000.0
less_100KB_num_finished_flows=29
less_100KB_num_flows=29
less_100KB_num_unfinished_flows=0
less_2.4349MB_99.9th_fct_ms=4804.555900000004
less_2.4349MB_99.9th_fct_ns=4804555900.000004
less_2.4349MB_99th_fct_ms=4388.359
less_2.4349MB_99th_fct_ns=4388359000.0
less_2.4349MB_flows_completed_fraction=1.0
less_2.4349MB_mean_fct_ms=888.0672413793104
less_2.4349MB_mean_fct_ns=888067241.3793104
less_2.4349MB_median_fct_ms=355.9
less_2.4349MB_median_fct_ns=355900000.0
less_2.4349MB_num_finished_flows=58
less_2.4349MB_num_flows=58
less_2.4349MB_num_unfinished_flows=0
60 changes: 60 additions & 0 deletions exercises/basic/analysis/throughput.statistics
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
all_num_flows=72
all_throughput_0.1th_Mbps=1.53414
all_throughput_1th_Mbps=1.7514
all_throughput_99.9th_Mbps=36.16870000000002
all_throughput_99th_Mbps=29.587000000000064
all_throughput_mean_Mbps=8.248888888888887
all_throughput_median_Mbps=6.0600000000000005
all_throughput_std_Mbps=6.908011965897834
general_flow_size_bytes_mean=1574295.608888889
general_num_flows=72
general_num_unique_sources=72
general_num_unique_targets=72
general_throughput_mbps_mean=8.248888888888887
geq_100KB_num_flows=43
geq_100KB_throughput_0.1th_Mbps=2.15588
geq_100KB_throughput_1th_Mbps=2.2088
geq_100KB_throughput_99.9th_Mbps=26.553800000000006
geq_100KB_throughput_99th_Mbps=26.137999999999998
geq_100KB_throughput_mean_Mbps=7.761860465116279
geq_100KB_throughput_median_Mbps=5.83
geq_100KB_throughput_std_Mbps=5.875536692285266
geq_10MB_num_flows=2
geq_10MB_throughput_1th_Mbps=25.511
geq_10MB_throughput_99.9th_Mbps=26.5989
geq_10MB_throughput_99th_Mbps=26.589000000000002
geq_10MB_throughput_mean_Mbps=26.05
geq_10MB_throughput_median_Mbps=26.05
geq_10MB_throughput_std_Mbps=0.5500000000000007
geq_1MB_num_flows=21
geq_1MB_throughput_0.1th_Mbps=4.469
geq_1MB_throughput_1th_Mbps=4.55
geq_1MB_throughput_99.9th_Mbps=26.578000000000007
geq_1MB_throughput_99th_Mbps=26.380000000000003
geq_1MB_throughput_mean_Mbps=11.404285714285717
geq_1MB_throughput_median_Mbps=8.86
geq_1MB_throughput_std_Mbps=6.559180096430877
geq_2.4349MB_num_flows=14
geq_2.4349MB_throughput_0.1th_Mbps=4.48249
geq_2.4349MB_throughput_1th_Mbps=4.6849
geq_2.4349MB_throughput_99.9th_Mbps=26.585700000000003
geq_2.4349MB_throughput_99th_Mbps=26.457
geq_2.4349MB_throughput_mean_Mbps=13.345714285714289
geq_2.4349MB_throughput_median_Mbps=12.25
geq_2.4349MB_throughput_std_Mbps=7.180166844744442
less_100KB_num_flows=29
less_100KB_throughput_0.1th_Mbps=1.51952
less_100KB_throughput_1th_Mbps=1.6052
less_100KB_throughput_99.9th_Mbps=36.564000000000014
less_100KB_throughput_99th_Mbps=33.539999999999985
less_100KB_throughput_mean_Mbps=8.971034482758622
less_100KB_throughput_median_Mbps=6.3
less_100KB_throughput_std_Mbps=8.149718567100884
less_2.4349MB_num_flows=58
less_2.4349MB_throughput_0.1th_Mbps=1.52938
less_2.4349MB_throughput_1th_Mbps=1.7038
less_2.4349MB_throughput_99.9th_Mbps=36.21600000000006
less_2.4349MB_throughput_99th_Mbps=30.059999999999995
less_2.4349MB_throughput_mean_Mbps=7.018620689655173
less_2.4349MB_throughput_median_Mbps=5.205
less_2.4349MB_throughput_std_Mbps=6.245888150388073
Binary file added exercises/basic/analysis/throughput_boxplot.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added exercises/basic/analysis/throughput_cdf.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
150 changes: 150 additions & 0 deletions exercises/basic/analyze_fct.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,150 @@
#!/usr/bin/env python3
import numpy as np
import csv
import sys
import os
import pandas as pd
import matplotlib.pyplot as plt

##################################
# Setup
#

print("Flow Completion Time Analysis Tool v0.01")

# Create analysis folder if it doesn't exist
analysis_folder_path = 'analysis'
if not os.path.exists(analysis_folder_path):
os.makedirs(analysis_folder_path)

##################################
# Analyze flow completion
#
def analyze_flow_completion(csv_file_path):
print(f"Analyzing flow completion times from: {csv_file_path}")

# Read the CSV file using pandas
df = pd.read_csv(csv_file_path)

# Extract relevant columns
flow_ids = df['flow_id'].values
source_ids = df['sender'].values
target_ids = df['receiver'].values
flow_sizes = df['flow_size'].values
durations = df['duration'].values # These are already in seconds
progress = df['progress'].values

# Determine completion status (considering flows with ≥99.9% progress as completed)
completed = progress >= 99.9

print("Calculating statistics...")

statistics = {
'general_num_flows': len(flow_ids),
'general_num_unique_sources': len(set(source_ids)),
'general_num_unique_targets': len(set(target_ids)),
'general_flow_size_bytes_mean': np.mean(flow_sizes),
'general_flow_size_bytes_std': np.std(flow_sizes)
}

# Define flow size ranges
range_low = [-1, -1, -1, 100000, 2434900, 1000000, 10000000]
range_high = [-1, 100000, 2434900, -1, -1, -1, -1]
range_name = ["all", "less_100KB", "less_2.4349MB", "geq_100KB", "geq_2.4349MB", "geq_1MB", "geq_10MB"]
range_completed_duration = [[], [], [], [], [], [], []]
range_num_finished_flows = [0, 0, 0, 0, 0, 0, 0]
range_num_unfinished_flows = [0, 0, 0, 0, 0, 0, 0]
range_low_eq = [0, 0, 0, 1, 1, 1, 1]
range_high_eq = [0, 0, 0, 1, 1, 1, 1]

# Go over all flows
for i in range(0, len(flow_ids)):
# Range-specific
for j in range(0, len(range_name)):
if (
(range_low[j] == -1 or (range_low_eq[j] == 0 and flow_sizes[i] > range_low[j]) or (range_low_eq[j] == 1 and flow_sizes[i] >= range_low[j])) and
(range_high[j] == -1 or (range_high_eq[j] == 0 and flow_sizes[i] < range_high[j]) or (range_high_eq[j] == 1 and flow_sizes[i] <= range_high[j]))
):
if completed[i]:
range_num_finished_flows[j] += 1
range_completed_duration[j].append(durations[i] * 1000000000) # Convert seconds to nanoseconds
else:
range_num_unfinished_flows[j] += 1

# Ranges statistics
for j in range(0, len(range_name)):
# Number of finished flows
statistics[range_name[j] + '_num_flows'] = range_num_finished_flows[j] + range_num_unfinished_flows[j]
print(range_name[j] + '_num_flows', range_num_finished_flows[j] + range_num_unfinished_flows[j])

statistics[range_name[j] + '_num_finished_flows'] = range_num_finished_flows[j]
print(range_name[j] + '_num_finished_flows', range_num_finished_flows[j])

statistics[range_name[j] + '_num_unfinished_flows'] = range_num_unfinished_flows[j]
print(range_name[j] + '_num_unfinished_flows', range_num_unfinished_flows[j])

total = (range_num_finished_flows[j] + range_num_unfinished_flows[j])
if range_num_finished_flows[j] != 0:
statistics[range_name[j] + '_flows_completed_fraction'] = float(range_num_finished_flows[j]) / float(total)

# Duration is stored in nanoseconds in the statistics
statistics[range_name[j] + '_mean_fct_ns'] = np.mean(range_completed_duration[j])
print(range_name[j] + '_mean_fct_ns', np.mean(range_completed_duration[j]))

statistics[range_name[j] + '_median_fct_ns'] = np.median(range_completed_duration[j])
statistics[range_name[j] + '_99th_fct_ns'] = np.percentile(range_completed_duration[j], 99)
statistics[range_name[j] + '_99.9th_fct_ns'] = np.percentile(range_completed_duration[j], 99.9)

# Convert to milliseconds for display purposes
statistics[range_name[j] + '_mean_fct_ms'] = statistics[range_name[j] + '_mean_fct_ns'] / 1000000
statistics[range_name[j] + '_median_fct_ms'] = statistics[range_name[j] + '_median_fct_ns'] / 1000000
statistics[range_name[j] + '_99th_fct_ms'] = statistics[range_name[j] + '_99th_fct_ns'] / 1000000
statistics[range_name[j] + '_99.9th_fct_ms'] = statistics[range_name[j] + '_99.9th_fct_ns'] / 1000000
else:
statistics[range_name[j] + '_flows_completed_fraction'] = 0

# Add the original duration values for comparing with other analysis
print("\nOriginal duration values (seconds) for comparison:")
print(f"All flows - Mean duration: {np.mean(durations):.4f} seconds")
print(f"Small flows (<100KB) - Mean duration: {np.mean(durations[flow_sizes <= 100000]):.4f} seconds")
print(f"Medium flows (>100KB, <2.4MB) - Mean duration: {np.mean(durations[(flow_sizes > 100000) & (flow_sizes < 2434900)]):.4f} seconds")
print(f"Large flows (>1MB) - Mean duration: {np.mean(durations[flow_sizes > 1000000]):.4f} seconds")

# Create CDF plot for flow completion times
plt.figure(figsize=(10, 6))
for j in range(len(range_name)):
if range_num_finished_flows[j] > 0:
sorted_fct = np.sort(range_completed_duration[j]) / 1000000 # Convert ns to ms for readability
cdf = np.arange(1, len(sorted_fct)+1) / len(sorted_fct)
plt.plot(sorted_fct, cdf, label=range_name[j])

plt.xlabel('Flow Completion Time (ms)')
plt.ylabel('CDF')
plt.title('CDF of Flow Completion Times by Flow Size Category')
plt.grid(True, linestyle='--', linewidth=0.5)
plt.legend()
plt.savefig(f'{analysis_folder_path}/fct_cdf.png')

# Print raw results
print('Writing to result file flow_completion.statistics...')
with open(analysis_folder_path + '/flow_completion.statistics', 'w+') as outfile:
for key, value in sorted(statistics.items()):
outfile.write(str(key) + "=" + str(value) + "\n")

# Print summary statistics for comparison
print("\nSummary of Flow Completion Times:")
print("Flow Size Category | Mean (sec) | Median (sec) | 99th (sec)")
print("------------------------|-------------|--------------|------------")
for j in range(len(range_name)):
if range_num_finished_flows[j] > 0:
mean_sec = statistics[range_name[j] + '_mean_fct_ns'] / 1e9
median_sec = statistics[range_name[j] + '_median_fct_ns'] / 1e9
p99_sec = statistics[range_name[j] + '_99th_fct_ns'] / 1e9
print(f"{range_name[j].ljust(24)} | {mean_sec:11.4f} | {median_sec:12.4f} | {p99_sec:10.4f}")

if __name__ == "__main__":
# Path to the CSV file
csv_file_path = "flow_analysis.csv"

# Analyze flow completion
analyze_flow_completion(csv_file_path)
Loading