mirror of
https://github.com/rust-lang/rust-clippy
synced 2024-11-27 15:11:30 +00:00
Add SPEEDTEST
This commit is contained in:
parent
15ed281699
commit
d69c4f553c
2 changed files with 64 additions and 6 deletions
24
book/src/development/speedtest.md
Normal file
24
book/src/development/speedtest.md
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
# Speedtest
|
||||||
|
`SPEEDTEST` is the tool we use to measure lint's performance, it works by executing the same test several times.
|
||||||
|
|
||||||
|
It's useful for measuring changes to current lints and deciding if the performance changes too much. `SPEEDTEST` is
|
||||||
|
accessed by the `SPEEDTEST` (and `SPEEDTEST_*`) environment variables.
|
||||||
|
|
||||||
|
## Checking Speedtest
|
||||||
|
|
||||||
|
To do a simple speed test of a lint (e.g. `allow_attributes`), use this command.
|
||||||
|
|
||||||
|
```sh
|
||||||
|
$ SPEEDTEST=ui TESTNAME="allow_attributes" cargo uitest -- --nocapture
|
||||||
|
```
|
||||||
|
|
||||||
|
This will test all `ui` tests (`SPEEDTEST=ui`) whose names start with `allow_attributes`. By default, `SPEEDTEST` will
|
||||||
|
iterate your test 1000 times. But you can change this with `SPEEDTEST_ITERATIONS`.
|
||||||
|
|
||||||
|
```sh
|
||||||
|
$ SPEEDTEST=toml SPEEDTEST_ITERATIONS=100 TESTNAME="semicolon_block" cargo uitest -- --nocapture
|
||||||
|
```
|
||||||
|
|
||||||
|
> **WARNING**: Be sure to use `-- --nocapture` at the end of the command to see the average test time. If you don't
|
||||||
|
> use `-- --nocapture` (e.g. `SPEEDTEST=ui` `TESTNAME="let_underscore_untyped" cargo uitest -- --nocapture`), this
|
||||||
|
> will not show up.
|
|
@ -213,12 +213,46 @@ fn run_ui_cargo() {
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
set_var("CLIPPY_DISABLE_DOCS_LINKS", "true");
|
set_var("CLIPPY_DISABLE_DOCS_LINKS", "true");
|
||||||
run_ui();
|
// The SPEEDTEST_* env variables can be used to check Clippy's performance on your PR. It runs the
|
||||||
run_ui_toml();
|
// affected test 1000 times and gets the average.
|
||||||
run_ui_cargo();
|
if let Ok(speedtest) = std::env::var("SPEEDTEST") {
|
||||||
run_internal_tests();
|
println!("----------- STARTING SPEEDTEST -----------");
|
||||||
rustfix_coverage_known_exceptions_accuracy();
|
let f;
|
||||||
ui_cargo_toml_metadata();
|
match speedtest.as_str() {
|
||||||
|
"ui" => f = run_ui as fn(),
|
||||||
|
"cargo" => f = run_ui_cargo as fn(),
|
||||||
|
"toml" => f = run_ui_toml as fn(),
|
||||||
|
"internal" => f = run_internal_tests as fn(),
|
||||||
|
"rustfix-coverage-known-exceptions-accuracy" => f = rustfix_coverage_known_exceptions_accuracy as fn(),
|
||||||
|
"ui-cargo-toml-metadata" => f = ui_cargo_toml_metadata() as fn(),
|
||||||
|
|
||||||
|
_ => panic!("unknown speedtest: {speedtest} || accepted speedtests are: [ui, cargo, toml, internal]"),
|
||||||
|
}
|
||||||
|
|
||||||
|
let iterations;
|
||||||
|
if let Ok(iterations_str) = std::env::var("SPEEDTEST_ITERATIONS") {
|
||||||
|
iterations = iterations_str
|
||||||
|
.parse::<u64>()
|
||||||
|
.unwrap_or_else(|_| panic!("Couldn't parse `{}`, please use a valid u64", iterations_str));
|
||||||
|
} else {
|
||||||
|
iterations = 1000;
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut sum = 0;
|
||||||
|
for _ in 0..iterations {
|
||||||
|
let start = std::time::Instant::now();
|
||||||
|
f();
|
||||||
|
sum += start.elapsed().as_millis();
|
||||||
|
}
|
||||||
|
println!("average {} time: {} millis.", speedtest.to_uppercase(), sum / 1000);
|
||||||
|
} else {
|
||||||
|
run_ui();
|
||||||
|
run_ui_toml();
|
||||||
|
run_ui_cargo();
|
||||||
|
run_internal_tests();
|
||||||
|
rustfix_coverage_known_exceptions_accuracy();
|
||||||
|
ui_cargo_toml_metadata();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const RUSTFIX_COVERAGE_KNOWN_EXCEPTIONS: &[&str] = &[
|
const RUSTFIX_COVERAGE_KNOWN_EXCEPTIONS: &[&str] = &[
|
||||||
|
|
Loading…
Reference in a new issue