From 77f03815e5682adceb055e29140840d196feb338 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Wed, 20 Mar 2024 16:00:01 +0000 Subject: [PATCH] chore: fix up benchmarking scripts (#4601) # Description ## Problem\* Resolves ## Summary\* This PR gets our criterion benchmarks working again after we made various changes to the repo structure. I've also changed it to bench proving times. I'm going to follow up with changes to run this in CI. ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- tooling/nargo_cli/benches/criterion.rs | 24 +++++++++++++++++------- tooling/nargo_cli/benches/utils.rs | 13 +++++++++---- 2 files changed, 26 insertions(+), 11 deletions(-) diff --git a/tooling/nargo_cli/benches/criterion.rs b/tooling/nargo_cli/benches/criterion.rs index a7b094fd7aa..9f67bcffd6e 100644 --- a/tooling/nargo_cli/benches/criterion.rs +++ b/tooling/nargo_cli/benches/criterion.rs @@ -1,9 +1,10 @@ //! Select representative tests to bench with criterion use assert_cmd::prelude::{CommandCargoExt, OutputAssertExt}; use criterion::{criterion_group, criterion_main, Criterion}; + use paste::paste; use pprof::criterion::{Output, PProfProfiler}; -use std::process::Command; +use std::{process::Command, time::Duration}; include!("./utils.rs"); macro_rules! criterion_command { @@ -15,9 +16,11 @@ macro_rules! criterion_command { let mut cmd = Command::cargo_bin("nargo").unwrap(); cmd.arg("--program-dir").arg(&test_program_dir); cmd.arg($command_string); + cmd.arg("--force"); - c.bench_function(&format!("{}_{}", test_program_dir.file_name().unwrap().to_str().unwrap(), $command_string), |b| { - b.iter(|| cmd.assert()) + let benchmark_name = format!("{}_{}", test_program_dir.file_name().unwrap().to_str().unwrap(), $command_string); + c.bench_function(&benchmark_name, |b| { + b.iter(|| cmd.assert().success()) }); } } @@ -25,9 +28,16 @@ macro_rules! criterion_command { }; } criterion_command!(execution, "execute"); +criterion_command!(prove, "prove"); + +criterion_group! { + name = execution_benches; + config = Criterion::default().sample_size(20).measurement_time(Duration::from_secs(20)).with_profiler(PProfProfiler::new(100, Output::Flamegraph(None))); + targets = criterion_selected_tests_execution +} criterion_group! { - name = benches; - config = Criterion::default().sample_size(20).with_profiler(PProfProfiler::new(100, Output::Flamegraph(None))); - targets = criterion_selected_tests_execution + name = prove_benches; + config = Criterion::default().sample_size(10).measurement_time(Duration::from_secs(20)).with_profiler(PProfProfiler::new(100, Output::Flamegraph(None))); + targets = criterion_selected_tests_prove } -criterion_main!(benches); +criterion_main!(execution_benches, prove_benches); diff --git a/tooling/nargo_cli/benches/utils.rs b/tooling/nargo_cli/benches/utils.rs index 52a6b718c44..47968f7e898 100644 --- a/tooling/nargo_cli/benches/utils.rs +++ b/tooling/nargo_cli/benches/utils.rs @@ -4,11 +4,16 @@ use std::path::PathBuf; fn get_selected_tests() -> Vec { let manifest_dir = match std::env::var("CARGO_MANIFEST_DIR") { Ok(dir) => PathBuf::from(dir), - Err(_) => std::env::current_dir().unwrap().join("crates").join("nargo_cli"), + Err(_) => std::env::current_dir().unwrap(), }; - let test_dir = manifest_dir.join("tests").join("execution_success"); + let test_dir = manifest_dir + .parent() + .unwrap() + .parent() + .unwrap() + .join("test_programs") + .join("execution_success"); - let selected_tests = - vec!["8_integration", "sha256_blocks", "struct", "eddsa", "regression", "regression_2099"]; + let selected_tests = vec!["struct", "eddsa", "regression"]; selected_tests.into_iter().map(|t| test_dir.join(t)).collect() }