Skip to content

Commit

Permalink
Include statistics metrics
Browse files Browse the repository at this point in the history
  • Loading branch information
enriquetomasmb committed Nov 19, 2024
1 parent 8ba351a commit 7662a11
Show file tree
Hide file tree
Showing 3 changed files with 54 additions and 0 deletions.
3 changes: 3 additions & 0 deletions nebula/frontend/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -757,6 +757,9 @@ def stop_scenario(scenario_name):
ScenarioManagement.stop_participants()
ScenarioManagement.stop_blockchain()
scenario_set_status_to_finished(scenario_name)
# Generate statistics for the scenario
path = Utils.check_path(settings.log_dir, scenario_name)
ScenarioManagement.generate_statistics(path)


def stop_all_scenarios():
Expand Down
49 changes: 49 additions & 0 deletions nebula/scenarios.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
from datetime import datetime

import docker
import tensorboard_reducer as tbr

from nebula.addons.blockchain.blockchain_deployer import BlockchainDeployer
from nebula.addons.topologymanager import TopologyManager
Expand Down Expand Up @@ -1034,3 +1035,51 @@ def scenario_finished(self, timeout_seconds):
return False

time.sleep(5)

@classmethod
def generate_statistics(cls, path):
try:
# Generate statistics
logging.info(f"Generating statistics for scenario {path}")

# Define input directories
input_event_dirs = sorted(glob.glob(os.path.join(path, "metrics/*")))
# Where to write reduced TB events
tb_events_output_dir = os.path.join(path, "metrics", "reduced-data")
csv_out_path = os.path.join(path, "metrics", "reduced-data-as.csv")
# Whether to abort or overwrite when csv_out_path already exists
overwrite = False
reduce_ops = ("mean", "min", "max", "median", "std", "var")

# Handle duplicate steps
handle_dup_steps = "keep-first"
# Strict steps
strict_steps = False

events_dict = tbr.load_tb_events(
input_event_dirs, handle_dup_steps=handle_dup_steps, strict_steps=strict_steps
)

# Number of recorded tags. e.g. would be 3 if you recorded loss, MAE and R^2
n_scalars = len(events_dict)
n_steps, n_events = list(events_dict.values())[0].shape

logging.info(f"Loaded {n_events} TensorBoard runs with {n_scalars} scalars and {n_steps} steps each")
logging.info(f"Events dict keys: {events_dict.keys()}")

reduced_events = tbr.reduce_events(events_dict, reduce_ops)

for op in reduce_ops:
logging.info(f"Writing '{op}' reduction to '{tb_events_output_dir}-{op}'")

tbr.write_tb_events(reduced_events, tb_events_output_dir, overwrite)

logging.info(f"Writing results to '{csv_out_path}'")

tbr.write_data_file(reduced_events, csv_out_path, overwrite)

logging.info("Reduction complete")

except Exception as e:
logging.exception(f"Error generating statistics: {e}")
return False
2 changes: 2 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -124,6 +124,8 @@ frontend = [
"setuptools==74.1.2",
"tensorboard==2.17.1",
"tensorboardx==2.6.2.2",
"tensorboard-reducer==0.3.1",
"torch==2.4.1",
"uvicorn==0.30.6",
"web3==6.20.0",
"wheel==0.44.0",
Expand Down

0 comments on commit 7662a11

Please sign in to comment.