Skip to content

Commit

Permalink
chore: add benchmarks for serializing a dummy program (#4813)
Browse files Browse the repository at this point in the history
# Description

## Problem\*

Resolves <!-- Link to GitHub Issue -->

## Summary\*

This PR adds some benchmarks for serializing a (not particularly
realistic) ACIR program to give a sense of how this scales with the
number of opcodes in a program. As part of this I've fixed some issues
in the existing benchmarks which prevented good flamegraphs being
generated on linux.

I've also smoothed out the experience for creating flamegraphs with new
scripts

1. run `sudo ./scripts/benchmark_start.sh`
2. cargo bench --bench serialization  -- --profile-time=10
3. run `sudo ./scripts/benchmark_stop.sh`

The flamegraphs for all of the serialization benchmarks will then be
stored in `target/criterion/<benchmark_name>/profile`

The scripts to start and stop benchmarking disable/enable some kernel
settings which improve profiling performance at the expense of security.

## Additional Context



## Documentation\*

Check one:
- [x] No documentation needed.
- [ ] Documentation included in this PR.
- [ ] **[For Experimental Features]** Documentation to be submitted in a
separate PR.

# PR Checklist\*

- [x] I have tested the changes locally.
- [x] I have formatted the changes with [Prettier](https://prettier.io/)
and/or `cargo fmt` on default settings.
  • Loading branch information
TomAFrench authored Apr 17, 2024
1 parent 6cc105e commit 5f02129
Show file tree
Hide file tree
Showing 7 changed files with 171 additions and 8 deletions.
28 changes: 26 additions & 2 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

8 changes: 8 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -104,6 +104,14 @@ chumsky = { git = "https://github.com/jfecher/chumsky", rev = "ad9d312", default
"ahash",
"std",
] }

# Benchmarking
criterion = "0.5.0"
# Note that using the "frame-pointer" feature breaks framegraphs on linux
# https://github.com/tikv/pprof-rs/pull/172
pprof = { version = "0.13", features = ["flamegraph","criterion"] }


dirs = "4"
serde = { version = "1.0.136", features = ["derive"] }
serde_json = "1.0"
Expand Down
6 changes: 6 additions & 0 deletions acvm-repo/acir/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,14 @@ strum_macros = "0.24"
serde-reflection = "0.3.6"
serde-generate = "0.25.1"
fxhash.workspace = true
criterion.workspace = true
pprof.workspace = true

[features]
default = ["bn254"]
bn254 = ["acir_field/bn254", "brillig/bn254"]
bls12_381 = ["acir_field/bls12_381", "brillig/bls12_381"]

[[bench]]
name = "serialization"
harness = false
123 changes: 123 additions & 0 deletions acvm-repo/acir/benches/serialization.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,123 @@
use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion, Throughput};
use std::{collections::BTreeSet, time::Duration};

use acir::{
circuit::{Circuit, ExpressionWidth, Opcode, Program, PublicInputs},
native_types::{Expression, Witness},
FieldElement,
};

use pprof::criterion::{Output, PProfProfiler};

const SIZES: [usize; 9] = [10, 50, 100, 500, 1000, 5000, 10000, 50000, 100000];

fn sample_program(num_opcodes: usize) -> Program {
let assert_zero_opcodes: Vec<Opcode> = (0..num_opcodes)
.map(|i| {
Opcode::AssertZero(Expression {
mul_terms: vec![(
FieldElement::from(2 * i),
Witness(i as u32),
Witness(i as u32 + 10),
)],
linear_combinations: vec![
(FieldElement::from(2 * i), Witness(i as u32)),
(FieldElement::from(3 * i), Witness(i as u32 + 1)),
],
q_c: FieldElement::from(i),
})
})
.collect();

Program {
functions: vec![Circuit {
current_witness_index: 4000,
opcodes: assert_zero_opcodes.to_vec(),
expression_width: ExpressionWidth::Bounded { width: 3 },
private_parameters: BTreeSet::from([Witness(1), Witness(2), Witness(3), Witness(4)]),
public_parameters: PublicInputs(BTreeSet::from([Witness(5)])),
return_values: PublicInputs(BTreeSet::from([Witness(6)])),
assert_messages: Vec::new(),
recursive: false,
}],
}
}

fn bench_serialization(c: &mut Criterion) {
let mut group = c.benchmark_group("serialize_program");
for size in SIZES.iter() {
let program = sample_program(*size);

group.throughput(Throughput::Elements(*size as u64));
group.bench_with_input(BenchmarkId::from_parameter(size), &program, |b, program| {
b.iter(|| Program::serialize_program(program));
});
}
group.finish();

let mut group = c.benchmark_group("serialize_program_json");
for size in SIZES.iter() {
let program = sample_program(*size);

group.throughput(Throughput::Elements(*size as u64));
group.bench_with_input(BenchmarkId::from_parameter(size), &program, |b, program| {
b.iter(|| {
let mut bytes = Vec::new();
let mut serializer = serde_json::Serializer::new(&mut bytes);
Program::serialize_program_base64(program, &mut serializer)
});
});
}
group.finish();
}

fn bench_deserialization(c: &mut Criterion) {
let mut group = c.benchmark_group("deserialize_program");
for size in SIZES.iter() {
let program = sample_program(*size);
let serialized_program = Program::serialize_program(&program);

group.throughput(Throughput::Elements(*size as u64));
group.bench_with_input(
BenchmarkId::from_parameter(size),
&serialized_program,
|b, program| {
b.iter(|| Program::deserialize_program(program));
},
);
}
group.finish();

let mut group = c.benchmark_group("deserialize_program_json");
for size in SIZES.iter() {
let program = sample_program(*size);

let serialized_program = {
let mut bytes = Vec::new();
let mut serializer = serde_json::Serializer::new(&mut bytes);
Program::serialize_program_base64(&program, &mut serializer).expect("should succeed");
bytes
};

group.throughput(Throughput::Elements(*size as u64));
group.bench_with_input(
BenchmarkId::from_parameter(size),
&serialized_program,
|b, program| {
b.iter(|| {
let mut deserializer = serde_json::Deserializer::from_slice(program);
Program::deserialize_program_base64(&mut deserializer)
});
},
);
}
group.finish();
}

criterion_group!(
name = benches;
config = Criterion::default().sample_size(40).measurement_time(Duration::from_secs(20)).with_profiler(PProfProfiler::new(100, Output::Flamegraph(None)));
targets = bench_serialization, bench_deserialization
);

criterion_main!(benches);
3 changes: 3 additions & 0 deletions scripts/benchmark_start.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
#!/bin/bash

echo -1 | sudo tee /proc/sys/kernel/perf_event_paranoid
3 changes: 3 additions & 0 deletions scripts/benchmark_stop.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
#!/bin/bash

echo 4 | sudo tee /proc/sys/kernel/perf_event_paranoid
8 changes: 2 additions & 6 deletions tooling/nargo_cli/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -72,13 +72,9 @@ assert_cmd = "2.0.8"
assert_fs = "1.0.10"
predicates = "2.1.5"
fm.workspace = true
criterion = "0.5.0"
criterion.workspace = true
pprof.workspace = true
paste = "1.0.14"
pprof = { version = "0.12", features = [
"flamegraph",
"frame-pointer",
"criterion",
] }
iai = "0.1.1"
test-binary = "3.0.2"

Expand Down

0 comments on commit 5f02129

Please sign in to comment.