mirror of
https://github.com/nushell/nushell
synced 2024-12-29 14:33:13 +00:00
2a3805c164
# Description Uses "normal" module `std/<submodule>/mod.nu` instead of renaming the files (as requested in #13842). # User-Facing Changes No user-facing changes other than in `view files` results. Imports remain the same after this PR. # Tests + Formatting - 🟢 `toolkit fmt` - 🟢 `toolkit clippy` - 🟢 `toolkit test` - 🟢 `toolkit test stdlib` Also manually confirmed that it does not interfere with nupm, since we did have a conflict at one point (and it's not possible to test here). # Performance Tests ## Linux ### Nushell Startup - No config ```nu bench --pretty -n 200 { <path_to>/nu -c "exit" } ``` | Release | Startup Time | | --- | --- | | 0.98.0 | 22ms 730µs 768ns +/- 1ms 515µs 942ns | This commit | 9ms 312µs 68ns +/- 709µs 378ns | Yesterday's nightly | 9ms 230µs 953ns +/- 9ms 67µs 689ns ### Nushell Startup - Load full standard library Measures relative impact of a full `use std *`, which isn't recommended, but worth tracking. ```nu bench --pretty -n 200 { <path_to>/nu -c "use std *; exit" } ``` | Release | Startup Time | | --- | --- | | 0.98.0 | 23ms 10µs 636ns +/- 1ms 277µs 854ns | This commit | 26ms 922µs 769ns +/- 562µs 538ns | Yesterday's nightly | 28ms 133µs 95ns +/- 761µs 943ns | `deprecated_dirs` removal PR * | 23ms 610µs 333ns +/- 369µs 436ns \* Current increase is partially due to double-loading `dirs` with removal warning in older version. # After Submitting Still TODO - Update standard library doc
71 lines
No EOL
2.5 KiB
Text
71 lines
No EOL
2.5 KiB
Text
# run a piece of `nushell` code multiple times and measure the time of execution.
|
|
#
|
|
# this command returns a benchmark report of the following form:
|
|
# ```
|
|
# record<
|
|
# mean: duration
|
|
# std: duration
|
|
# times: list<duration>
|
|
# >
|
|
# ```
|
|
#
|
|
# > **Note**
|
|
# > `std bench --pretty` will return a `string`.
|
|
#
|
|
# # Examples
|
|
# measure the performance of simple addition
|
|
# > std bench { 1 + 2 } -n 10 | table -e
|
|
# ╭───────┬────────────────────╮
|
|
# │ mean │ 4µs 956ns │
|
|
# │ std │ 4µs 831ns │
|
|
# │ │ ╭───┬────────────╮ │
|
|
# │ times │ │ 0 │ 19µs 402ns │ │
|
|
# │ │ │ 1 │ 4µs 322ns │ │
|
|
# │ │ │ 2 │ 3µs 352ns │ │
|
|
# │ │ │ 3 │ 2µs 966ns │ │
|
|
# │ │ │ 4 │ 3µs │ │
|
|
# │ │ │ 5 │ 3µs 86ns │ │
|
|
# │ │ │ 6 │ 3µs 84ns │ │
|
|
# │ │ │ 7 │ 3µs 604ns │ │
|
|
# │ │ │ 8 │ 3µs 98ns │ │
|
|
# │ │ │ 9 │ 3µs 653ns │ │
|
|
# │ │ ╰───┴────────────╯ │
|
|
# ╰───────┴────────────────────╯
|
|
#
|
|
# get a pretty benchmark report
|
|
# > std bench { 1 + 2 } --pretty
|
|
# 3µs 125ns +/- 2µs 408ns
|
|
export def main [
|
|
code: closure # the piece of `nushell` code to measure the performance of
|
|
--rounds (-n): int = 50 # the number of benchmark rounds (hopefully the more rounds the less variance)
|
|
--verbose (-v) # be more verbose (namely prints the progress)
|
|
--pretty # shows the results in human-readable format: "<mean> +/- <stddev>"
|
|
] {
|
|
let times = (
|
|
seq 1 $rounds | each {|i|
|
|
if $verbose { print -n $"($i) / ($rounds)\r" }
|
|
timeit { do $code } | into int | into float
|
|
}
|
|
)
|
|
|
|
if $verbose { print $"($rounds) / ($rounds)" }
|
|
|
|
let report = {
|
|
mean: ($times | math avg | from ns)
|
|
min: ($times | math min | from ns)
|
|
max: ($times | math max | from ns)
|
|
std: ($times | math stddev | from ns)
|
|
times: ($times | each { from ns })
|
|
}
|
|
|
|
if $pretty {
|
|
$"($report.mean) +/- ($report.std)"
|
|
} else {
|
|
$report
|
|
}
|
|
}
|
|
|
|
# convert an integer amount of nanoseconds to a real duration
|
|
def "from ns" [] {
|
|
[$in "ns"] | str join | into duration
|
|
} |