Combine benchmarks to speed up cargo bench build times (#7722)

I've been using the new Criterion benchmarks and I noticed that they
take a _long_ time to build before the benchmark can run. Turns out
`cargo build` was building 3 separate benchmarking binaries with most of
Nu's functionality in each one.

As a simple temporary fix, I've moved all the benchmarks into a single
file so that we only build 1 binary.

### Future work

Would be nice to split the unrelated benchmarks out into modules, but
when I did that a separate binary still got built for each one. I
suspect Criterion's macros are doing something funny with module or file
names. I've left a FIXME in the code to investigate this further.
This commit is contained in:
Reilly Wood 2023-01-10 17:51:25 -08:00 committed by GitHub
parent 5664ee7bda
commit 9a274128ce
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
5 changed files with 188 additions and 161 deletions

View file

@ -146,13 +146,5 @@ path = "src/main.rs"
# Run all benchmarks with `cargo bench`
# Run individual benchmarks like `cargo bench -- <regex>` e.g. `cargo bench -- parse`
[[bench]]
name = "encoder_benchmark"
harness = false
[[bench]]
name = "eval_benchmark"
harness = false
[[bench]]
name = "parser_benchmark"
name = "benchmarks"
harness = false

187
benches/benchmarks.rs Normal file
View file

@ -0,0 +1,187 @@
use criterion::{criterion_group, criterion_main, BatchSize, Criterion};
use nu_cli::eval_source;
use nu_parser::parse;
use nu_plugin::{EncodingType, PluginResponse};
use nu_protocol::{PipelineData, Span, Value};
use nu_utils::{get_default_config, get_default_env};
// FIXME: All benchmarks live in this 1 file to speed up build times when benchmarking.
// When the *_benchmarks functions were in different files, `cargo bench` would build
// an executable for every single one - incredibly slowly. Would be nice to figure out
// a way to split things up again.
fn parser_benchmarks(c: &mut Criterion) {
let mut engine_state = nu_command::create_default_context();
// parsing config.nu breaks without PWD set
engine_state.add_env_var(
"PWD".into(),
Value::string("/some/dir".to_string(), Span::test_data()),
);
let default_env = get_default_env().as_bytes();
c.bench_function("parse_default_env_file", |b| {
b.iter_batched(
|| nu_protocol::engine::StateWorkingSet::new(&engine_state),
|mut working_set| parse(&mut working_set, None, default_env, false, &[]),
BatchSize::SmallInput,
)
});
let default_config = get_default_config().as_bytes();
c.bench_function("parse_default_config_file", |b| {
b.iter_batched(
|| nu_protocol::engine::StateWorkingSet::new(&engine_state),
|mut working_set| parse(&mut working_set, None, default_config, false, &[]),
BatchSize::SmallInput,
)
});
c.bench_function("eval default_env.nu", |b| {
b.iter(|| {
let mut engine_state = nu_command::create_default_context();
let mut stack = nu_protocol::engine::Stack::new();
eval_source(
&mut engine_state,
&mut stack,
get_default_env().as_bytes(),
"default_env.nu",
PipelineData::empty(),
)
})
});
c.bench_function("eval default_config.nu", |b| {
b.iter(|| {
let mut engine_state = nu_command::create_default_context();
// parsing config.nu breaks without PWD set
engine_state.add_env_var(
"PWD".into(),
Value::string("/some/dir".to_string(), Span::test_data()),
);
let mut stack = nu_protocol::engine::Stack::new();
eval_source(
&mut engine_state,
&mut stack,
get_default_config().as_bytes(),
"default_config.nu",
PipelineData::empty(),
)
})
});
}
fn eval_benchmarks(c: &mut Criterion) {
c.bench_function("eval default_env.nu", |b| {
b.iter(|| {
let mut engine_state = nu_command::create_default_context();
let mut stack = nu_protocol::engine::Stack::new();
eval_source(
&mut engine_state,
&mut stack,
get_default_env().as_bytes(),
"default_env.nu",
PipelineData::empty(),
)
})
});
c.bench_function("eval default_config.nu", |b| {
b.iter(|| {
let mut engine_state = nu_command::create_default_context();
// parsing config.nu breaks without PWD set
engine_state.add_env_var(
"PWD".into(),
Value::string("/some/dir".to_string(), Span::test_data()),
);
let mut stack = nu_protocol::engine::Stack::new();
eval_source(
&mut engine_state,
&mut stack,
get_default_config().as_bytes(),
"default_config.nu",
PipelineData::empty(),
)
})
});
}
// generate a new table data with `row_cnt` rows, `col_cnt` columns.
fn encoding_test_data(row_cnt: usize, col_cnt: usize) -> Value {
let columns: Vec<String> = (0..col_cnt).map(|x| format!("col_{x}")).collect();
let vals: Vec<Value> = (0..col_cnt as i64).map(Value::test_int).collect();
Value::List {
vals: (0..row_cnt)
.map(|_| Value::test_record(columns.clone(), vals.clone()))
.collect(),
span: Span::test_data(),
}
}
fn encoding_benchmarks(c: &mut Criterion) {
let mut group = c.benchmark_group("Encoding");
let test_cnt_pairs = [
(100, 5),
(100, 10),
(100, 15),
(1000, 5),
(1000, 10),
(1000, 15),
(10000, 5),
(10000, 10),
(10000, 15),
];
for (row_cnt, col_cnt) in test_cnt_pairs.into_iter() {
for fmt in ["json", "msgpack"] {
group.bench_function(&format!("{fmt} encode {row_cnt} * {col_cnt}"), |b| {
let mut res = vec![];
let test_data =
PluginResponse::Value(Box::new(encoding_test_data(row_cnt, col_cnt)));
let encoder = EncodingType::try_from_bytes(fmt.as_bytes()).unwrap();
b.iter(|| encoder.encode_response(&test_data, &mut res))
});
}
}
group.finish();
}
fn decoding_benchmarks(c: &mut Criterion) {
let mut group = c.benchmark_group("Decoding");
let test_cnt_pairs = [
(100, 5),
(100, 10),
(100, 15),
(1000, 5),
(1000, 10),
(1000, 15),
(10000, 5),
(10000, 10),
(10000, 15),
];
for (row_cnt, col_cnt) in test_cnt_pairs.into_iter() {
for fmt in ["json", "msgpack"] {
group.bench_function(&format!("{fmt} decode for {row_cnt} * {col_cnt}"), |b| {
let mut res = vec![];
let test_data =
PluginResponse::Value(Box::new(encoding_test_data(row_cnt, col_cnt)));
let encoder = EncodingType::try_from_bytes(fmt.as_bytes()).unwrap();
encoder.encode_response(&test_data, &mut res).unwrap();
let mut binary_data = std::io::Cursor::new(res);
b.iter(|| {
binary_data.set_position(0);
encoder.decode_response(&mut binary_data)
})
});
}
}
group.finish();
}
criterion_group!(
benches,
parser_benchmarks,
eval_benchmarks,
encoding_benchmarks,
decoding_benchmarks
);
criterion_main!(benches);

View file

@ -1,76 +0,0 @@
use criterion::{criterion_group, criterion_main, Criterion};
use nu_plugin::{EncodingType, PluginResponse};
use nu_protocol::{Span, Value};
// generate a new table data with `row_cnt` rows, `col_cnt` columns.
fn new_test_data(row_cnt: usize, col_cnt: usize) -> Value {
let columns: Vec<String> = (0..col_cnt).map(|x| format!("col_{x}")).collect();
let vals: Vec<Value> = (0..col_cnt as i64).map(Value::test_int).collect();
Value::List {
vals: (0..row_cnt)
.map(|_| Value::test_record(columns.clone(), vals.clone()))
.collect(),
span: Span::test_data(),
}
}
fn bench_encoding(c: &mut Criterion) {
let mut group = c.benchmark_group("Encoding");
let test_cnt_pairs = [
(100, 5),
(100, 10),
(100, 15),
(1000, 5),
(1000, 10),
(1000, 15),
(10000, 5),
(10000, 10),
(10000, 15),
];
for (row_cnt, col_cnt) in test_cnt_pairs.into_iter() {
for fmt in ["json", "msgpack"] {
group.bench_function(&format!("{fmt} encode {row_cnt} * {col_cnt}"), |b| {
let mut res = vec![];
let test_data = PluginResponse::Value(Box::new(new_test_data(row_cnt, col_cnt)));
let encoder = EncodingType::try_from_bytes(fmt.as_bytes()).unwrap();
b.iter(|| encoder.encode_response(&test_data, &mut res))
});
}
}
group.finish();
}
fn bench_decoding(c: &mut Criterion) {
let mut group = c.benchmark_group("Decoding");
let test_cnt_pairs = [
(100, 5),
(100, 10),
(100, 15),
(1000, 5),
(1000, 10),
(1000, 15),
(10000, 5),
(10000, 10),
(10000, 15),
];
for (row_cnt, col_cnt) in test_cnt_pairs.into_iter() {
for fmt in ["json", "msgpack"] {
group.bench_function(&format!("{fmt} decode for {row_cnt} * {col_cnt}"), |b| {
let mut res = vec![];
let test_data = PluginResponse::Value(Box::new(new_test_data(row_cnt, col_cnt)));
let encoder = EncodingType::try_from_bytes(fmt.as_bytes()).unwrap();
encoder.encode_response(&test_data, &mut res).unwrap();
let mut binary_data = std::io::Cursor::new(res);
b.iter(|| {
binary_data.set_position(0);
encoder.decode_response(&mut binary_data)
})
});
}
}
group.finish();
}
criterion_group!(benches, bench_encoding, bench_decoding);
criterion_main!(benches);

View file

@ -1,42 +0,0 @@
use criterion::{criterion_group, criterion_main, Criterion};
use nu_cli::eval_source;
use nu_protocol::{PipelineData, Span, Value};
use nu_utils::{get_default_config, get_default_env};
fn criterion_benchmark(c: &mut Criterion) {
c.bench_function("eval default_env.nu", |b| {
b.iter(|| {
let mut engine_state = nu_command::create_default_context();
let mut stack = nu_protocol::engine::Stack::new();
eval_source(
&mut engine_state,
&mut stack,
get_default_env().as_bytes(),
"default_env.nu",
PipelineData::empty(),
)
})
});
c.bench_function("eval default_config.nu", |b| {
b.iter(|| {
let mut engine_state = nu_command::create_default_context();
// parsing config.nu breaks without PWD set
engine_state.add_env_var(
"PWD".into(),
Value::string("/some/dir".to_string(), Span::test_data()),
);
let mut stack = nu_protocol::engine::Stack::new();
eval_source(
&mut engine_state,
&mut stack,
get_default_config().as_bytes(),
"default_config.nu",
PipelineData::empty(),
)
})
});
}
criterion_group!(benches, criterion_benchmark);
criterion_main!(benches);

View file

@ -1,34 +0,0 @@
use criterion::{criterion_group, criterion_main, BatchSize, Criterion};
use nu_parser::parse;
use nu_protocol::{Span, Value};
use nu_utils::{get_default_config, get_default_env};
fn criterion_benchmark(c: &mut Criterion) {
let mut engine_state = nu_command::create_default_context();
// parsing config.nu breaks without PWD set
engine_state.add_env_var(
"PWD".into(),
Value::string("/some/dir".to_string(), Span::test_data()),
);
let default_env = get_default_env().as_bytes();
c.bench_function("parse_default_env_file", |b| {
b.iter_batched(
|| nu_protocol::engine::StateWorkingSet::new(&engine_state),
|mut working_set| parse(&mut working_set, None, default_env, false, &[]),
BatchSize::SmallInput,
)
});
let default_config = get_default_config().as_bytes();
c.bench_function("parse_default_config_file", |b| {
b.iter_batched(
|| nu_protocol::engine::StateWorkingSet::new(&engine_state),
|mut working_set| parse(&mut working_set, None, default_config, false, &[]),
BatchSize::SmallInput,
)
});
}
criterion_group!(benches, criterion_benchmark);
criterion_main!(benches);