Skip to content

Commit

Permalink
chore: fix up benchmarking scripts (#4601)
Browse files Browse the repository at this point in the history
# Description

## Problem\*

Resolves <!-- Link to GitHub Issue -->

## Summary\*

This PR gets our criterion benchmarks working again after we made
various changes to the repo structure. I've also changed it to bench
proving times.

I'm going to follow up with changes to run this in CI.

## Additional Context



## Documentation\*

Check one:
- [x] No documentation needed.
- [ ] Documentation included in this PR.
- [ ] **[Exceptional Case]** Documentation to be submitted in a separate
PR.

# PR Checklist\*

- [x] I have tested the changes locally.
- [x] I have formatted the changes with [Prettier](https://prettier.io/)
and/or `cargo fmt` on default settings.
  • Loading branch information
TomAFrench committed Apr 3, 2024
1 parent 7360726 commit ee02a29
Show file tree
Hide file tree
Showing 2 changed files with 26 additions and 11 deletions.
24 changes: 17 additions & 7 deletions tooling/nargo_cli/benches/criterion.rs
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
//! Select representative tests to bench with criterion
use assert_cmd::prelude::{CommandCargoExt, OutputAssertExt};
use criterion::{criterion_group, criterion_main, Criterion};

use paste::paste;
use pprof::criterion::{Output, PProfProfiler};
use std::process::Command;
use std::{process::Command, time::Duration};
include!("./utils.rs");

macro_rules! criterion_command {
Expand All @@ -15,19 +16,28 @@ macro_rules! criterion_command {
let mut cmd = Command::cargo_bin("nargo").unwrap();
cmd.arg("--program-dir").arg(&test_program_dir);
cmd.arg($command_string);
cmd.arg("--force");

c.bench_function(&format!("{}_{}", test_program_dir.file_name().unwrap().to_str().unwrap(), $command_string), |b| {
b.iter(|| cmd.assert())
let benchmark_name = format!("{}_{}", test_program_dir.file_name().unwrap().to_str().unwrap(), $command_string);
c.bench_function(&benchmark_name, |b| {
b.iter(|| cmd.assert().success())
});
}
}
}
};
}
criterion_command!(execution, "execute");
criterion_command!(prove, "prove");

criterion_group! {
name = execution_benches;
config = Criterion::default().sample_size(20).measurement_time(Duration::from_secs(20)).with_profiler(PProfProfiler::new(100, Output::Flamegraph(None)));
targets = criterion_selected_tests_execution
}
criterion_group! {
name = benches;
config = Criterion::default().sample_size(20).with_profiler(PProfProfiler::new(100, Output::Flamegraph(None)));
targets = criterion_selected_tests_execution
name = prove_benches;
config = Criterion::default().sample_size(10).measurement_time(Duration::from_secs(20)).with_profiler(PProfProfiler::new(100, Output::Flamegraph(None)));
targets = criterion_selected_tests_prove
}
criterion_main!(benches);
criterion_main!(execution_benches, prove_benches);
13 changes: 9 additions & 4 deletions tooling/nargo_cli/benches/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,16 @@ use std::path::PathBuf;
fn get_selected_tests() -> Vec<PathBuf> {
let manifest_dir = match std::env::var("CARGO_MANIFEST_DIR") {
Ok(dir) => PathBuf::from(dir),
Err(_) => std::env::current_dir().unwrap().join("crates").join("nargo_cli"),
Err(_) => std::env::current_dir().unwrap(),
};
let test_dir = manifest_dir.join("tests").join("execution_success");
let test_dir = manifest_dir
.parent()
.unwrap()
.parent()
.unwrap()
.join("test_programs")
.join("execution_success");

let selected_tests =
vec!["8_integration", "sha256_blocks", "struct", "eddsa", "regression", "regression_2099"];
let selected_tests = vec!["struct", "eddsa", "regression"];
selected_tests.into_iter().map(|t| test_dir.join(t)).collect()
}

0 comments on commit ee02a29

Please sign in to comment.