mirror of
https://github.com/nushell/nushell
synced 2024-12-26 13:03:07 +00:00
Merge branch 'main' of https://github.com/nushell/nushell into patch/bump-tabled-to-0.17.0
This commit is contained in:
commit
ccc1509645
278 changed files with 6190 additions and 3832 deletions
31
.github/workflows/ci.yml
vendored
31
.github/workflows/ci.yml
vendored
|
@ -162,3 +162,34 @@ jobs:
|
||||||
else
|
else
|
||||||
echo "no changes in working directory";
|
echo "no changes in working directory";
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
build-wasm:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4.1.7
|
||||||
|
|
||||||
|
- name: Setup Rust toolchain and cache
|
||||||
|
uses: actions-rust-lang/setup-rust-toolchain@v1.10.1
|
||||||
|
|
||||||
|
- name: Add wasm32-unknown-unknown target
|
||||||
|
run: rustup target add wasm32-unknown-unknown
|
||||||
|
|
||||||
|
- run: cargo build -p nu-cmd-base --no-default-features --target wasm32-unknown-unknown
|
||||||
|
- run: cargo build -p nu-cmd-extra --no-default-features --target wasm32-unknown-unknown
|
||||||
|
- run: cargo build -p nu-cmd-lang --no-default-features --target wasm32-unknown-unknown
|
||||||
|
- run: cargo build -p nu-color-config --no-default-features --target wasm32-unknown-unknown
|
||||||
|
- run: cargo build -p nu-command --no-default-features --target wasm32-unknown-unknown
|
||||||
|
- run: cargo build -p nu-derive-value --no-default-features --target wasm32-unknown-unknown
|
||||||
|
- run: cargo build -p nu-engine --no-default-features --target wasm32-unknown-unknown
|
||||||
|
- run: cargo build -p nu-glob --no-default-features --target wasm32-unknown-unknown
|
||||||
|
- run: cargo build -p nu-json --no-default-features --target wasm32-unknown-unknown
|
||||||
|
- run: cargo build -p nu-parser --no-default-features --target wasm32-unknown-unknown
|
||||||
|
- run: cargo build -p nu-path --no-default-features --target wasm32-unknown-unknown
|
||||||
|
- run: cargo build -p nu-pretty-hex --no-default-features --target wasm32-unknown-unknown
|
||||||
|
- run: cargo build -p nu-protocol --no-default-features --target wasm32-unknown-unknown
|
||||||
|
- run: cargo build -p nu-std --no-default-features --target wasm32-unknown-unknown
|
||||||
|
- run: cargo build -p nu-system --no-default-features --target wasm32-unknown-unknown
|
||||||
|
- run: cargo build -p nu-table --no-default-features --target wasm32-unknown-unknown
|
||||||
|
- run: cargo build -p nu-term-grid --no-default-features --target wasm32-unknown-unknown
|
||||||
|
- run: cargo build -p nu-utils --no-default-features --target wasm32-unknown-unknown
|
||||||
|
- run: cargo build -p nuon --no-default-features --target wasm32-unknown-unknown
|
||||||
|
|
2
.github/workflows/typos.yml
vendored
2
.github/workflows/typos.yml
vendored
|
@ -10,4 +10,4 @@ jobs:
|
||||||
uses: actions/checkout@v4.1.7
|
uses: actions/checkout@v4.1.7
|
||||||
|
|
||||||
- name: Check spelling
|
- name: Check spelling
|
||||||
uses: crate-ci/typos@v1.27.3
|
uses: crate-ci/typos@v1.28.2
|
||||||
|
|
1477
Cargo.lock
generated
1477
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
35
Cargo.toml
35
Cargo.toml
|
@ -10,7 +10,7 @@ homepage = "https://www.nushell.sh"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
name = "nu"
|
name = "nu"
|
||||||
repository = "https://github.com/nushell/nushell"
|
repository = "https://github.com/nushell/nushell"
|
||||||
rust-version = "1.80.1"
|
rust-version = "1.81.0"
|
||||||
version = "0.100.1"
|
version = "0.100.1"
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
@ -92,7 +92,7 @@ filetime = "0.2"
|
||||||
fuzzy-matcher = "0.3"
|
fuzzy-matcher = "0.3"
|
||||||
heck = "0.5.0"
|
heck = "0.5.0"
|
||||||
human-date-parser = "0.2.0"
|
human-date-parser = "0.2.0"
|
||||||
indexmap = "2.6"
|
indexmap = "2.7"
|
||||||
indicatif = "0.17"
|
indicatif = "0.17"
|
||||||
interprocess = "2.2.0"
|
interprocess = "2.2.0"
|
||||||
is_executable = "1.0"
|
is_executable = "1.0"
|
||||||
|
@ -106,11 +106,11 @@ lsp-server = "0.7.5"
|
||||||
lsp-types = { version = "0.95.0", features = ["proposed"] }
|
lsp-types = { version = "0.95.0", features = ["proposed"] }
|
||||||
mach2 = "0.4"
|
mach2 = "0.4"
|
||||||
md5 = { version = "0.10", package = "md-5" }
|
md5 = { version = "0.10", package = "md-5" }
|
||||||
miette = "7.2"
|
miette = "7.3"
|
||||||
mime = "0.3.17"
|
mime = "0.3.17"
|
||||||
mime_guess = "2.0"
|
mime_guess = "2.0"
|
||||||
mockito = { version = "1.6", default-features = false }
|
mockito = { version = "1.6", default-features = false }
|
||||||
multipart-rs = "0.1.11"
|
multipart-rs = "0.1.13"
|
||||||
native-tls = "0.2"
|
native-tls = "0.2"
|
||||||
nix = { version = "0.29", default-features = false }
|
nix = { version = "0.29", default-features = false }
|
||||||
notify-debouncer-full = { version = "0.3", default-features = false }
|
notify-debouncer-full = { version = "0.3", default-features = false }
|
||||||
|
@ -127,13 +127,14 @@ pretty_assertions = "1.4"
|
||||||
print-positions = "0.6"
|
print-positions = "0.6"
|
||||||
proc-macro-error = { version = "1.0", default-features = false }
|
proc-macro-error = { version = "1.0", default-features = false }
|
||||||
proc-macro2 = "1.0"
|
proc-macro2 = "1.0"
|
||||||
procfs = "0.16.0"
|
procfs = "0.17.0"
|
||||||
pwd = "1.3"
|
pwd = "1.3"
|
||||||
quick-xml = "0.37.0"
|
quick-xml = "0.37.0"
|
||||||
quickcheck = "1.0"
|
quickcheck = "1.0"
|
||||||
quickcheck_macros = "1.0"
|
quickcheck_macros = "1.0"
|
||||||
quote = "1.0"
|
quote = "1.0"
|
||||||
rand = "0.8"
|
rand = "0.8"
|
||||||
|
getrandom = "0.2" # pick same version that rand requires
|
||||||
rand_chacha = "0.3.1"
|
rand_chacha = "0.3.1"
|
||||||
ratatui = "0.26"
|
ratatui = "0.26"
|
||||||
rayon = "1.10"
|
rayon = "1.10"
|
||||||
|
@ -142,10 +143,11 @@ regex = "1.9.5"
|
||||||
rmp = "0.8"
|
rmp = "0.8"
|
||||||
rmp-serde = "1.3"
|
rmp-serde = "1.3"
|
||||||
ropey = "1.6.1"
|
ropey = "1.6.1"
|
||||||
roxmltree = "0.19"
|
roxmltree = "0.20"
|
||||||
rstest = { version = "0.23", default-features = false }
|
rstest = { version = "0.23", default-features = false }
|
||||||
rusqlite = "0.31"
|
rusqlite = "0.31"
|
||||||
rust-embed = "8.5.0"
|
rust-embed = "8.5.0"
|
||||||
|
scopeguard = { version = "1.2.0" }
|
||||||
serde = { version = "1.0" }
|
serde = { version = "1.0" }
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
serde_urlencoded = "0.7.1"
|
serde_urlencoded = "0.7.1"
|
||||||
|
@ -157,13 +159,13 @@ sysinfo = "0.32"
|
||||||
tabled = { version = "0.17.0", default-features = false }
|
tabled = { version = "0.17.0", default-features = false }
|
||||||
tempfile = "3.14"
|
tempfile = "3.14"
|
||||||
terminal_size = "0.4"
|
terminal_size = "0.4"
|
||||||
titlecase = "2.0"
|
titlecase = "3.0"
|
||||||
toml = "0.8"
|
toml = "0.8"
|
||||||
trash = "5.2"
|
trash = "5.2"
|
||||||
umask = "2.1"
|
umask = "2.1"
|
||||||
unicode-segmentation = "1.12"
|
unicode-segmentation = "1.12"
|
||||||
unicode-width = "0.1"
|
unicode-width = "0.2"
|
||||||
ureq = { version = "2.10", default-features = false }
|
ureq = { version = "2.12", default-features = false }
|
||||||
url = "2.2"
|
url = "2.2"
|
||||||
uu_cp = "0.0.28"
|
uu_cp = "0.0.28"
|
||||||
uu_mkdir = "0.0.28"
|
uu_mkdir = "0.0.28"
|
||||||
|
@ -176,7 +178,7 @@ uucore = "0.0.28"
|
||||||
uuid = "1.11.0"
|
uuid = "1.11.0"
|
||||||
v_htmlescape = "0.15.0"
|
v_htmlescape = "0.15.0"
|
||||||
wax = "0.6"
|
wax = "0.6"
|
||||||
which = "6.0.0"
|
which = "7.0.0"
|
||||||
windows = "0.56"
|
windows = "0.56"
|
||||||
windows-sys = "0.48"
|
windows-sys = "0.48"
|
||||||
winreg = "0.52"
|
winreg = "0.52"
|
||||||
|
@ -249,13 +251,18 @@ tempfile = { workspace = true }
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
plugin = [
|
plugin = [
|
||||||
"nu-plugin-engine",
|
# crates
|
||||||
"nu-cmd-plugin",
|
"nu-cmd-plugin",
|
||||||
|
"nu-plugin-engine",
|
||||||
|
|
||||||
|
# features
|
||||||
"nu-cli/plugin",
|
"nu-cli/plugin",
|
||||||
"nu-parser/plugin",
|
"nu-cmd-lang/plugin",
|
||||||
"nu-command/plugin",
|
"nu-command/plugin",
|
||||||
"nu-protocol/plugin",
|
|
||||||
"nu-engine/plugin",
|
"nu-engine/plugin",
|
||||||
|
"nu-engine/plugin",
|
||||||
|
"nu-parser/plugin",
|
||||||
|
"nu-protocol/plugin",
|
||||||
]
|
]
|
||||||
|
|
||||||
default = [
|
default = [
|
||||||
|
@ -314,7 +321,7 @@ bench = false
|
||||||
# To use a development version of a dependency please use a global override here
|
# To use a development version of a dependency please use a global override here
|
||||||
# changing versions in each sub-crate of the workspace is tedious
|
# changing versions in each sub-crate of the workspace is tedious
|
||||||
[patch.crates-io]
|
[patch.crates-io]
|
||||||
# reedline = { git = "https://github.com/nushell/reedline", branch = "main" }
|
reedline = { git = "https://github.com/nushell/reedline", branch = "main" }
|
||||||
# nu-ansi-term = {git = "https://github.com/nushell/nu-ansi-term.git", branch = "main"}
|
# nu-ansi-term = {git = "https://github.com/nushell/nu-ansi-term.git", branch = "main"}
|
||||||
|
|
||||||
# Run all benchmarks with `cargo bench`
|
# Run all benchmarks with `cargo bench`
|
||||||
|
|
|
@ -58,7 +58,7 @@ For details about which platforms the Nushell team actively supports, see [our p
|
||||||
|
|
||||||
## Configuration
|
## Configuration
|
||||||
|
|
||||||
The default configurations can be found at [sample_config](crates/nu-utils/src/sample_config)
|
The default configurations can be found at [sample_config](crates/nu-utils/src/default_files)
|
||||||
which are the configuration files one gets when they startup Nushell for the first time.
|
which are the configuration files one gets when they startup Nushell for the first time.
|
||||||
|
|
||||||
It sets all of the default configuration to run Nushell. From here one can
|
It sets all of the default configuration to run Nushell. From here one can
|
||||||
|
|
|
@ -19,11 +19,11 @@ tempfile = { workspace = true }
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
nu-cmd-base = { path = "../nu-cmd-base", version = "0.100.1" }
|
nu-cmd-base = { path = "../nu-cmd-base", version = "0.100.1" }
|
||||||
nu-engine = { path = "../nu-engine", version = "0.100.1" }
|
nu-engine = { path = "../nu-engine", version = "0.100.1", features = ["os"] }
|
||||||
nu-path = { path = "../nu-path", version = "0.100.1" }
|
nu-path = { path = "../nu-path", version = "0.100.1" }
|
||||||
nu-parser = { path = "../nu-parser", version = "0.100.1" }
|
nu-parser = { path = "../nu-parser", version = "0.100.1" }
|
||||||
nu-plugin-engine = { path = "../nu-plugin-engine", version = "0.100.1", optional = true }
|
nu-plugin-engine = { path = "../nu-plugin-engine", version = "0.100.1", optional = true }
|
||||||
nu-protocol = { path = "../nu-protocol", version = "0.100.1" }
|
nu-protocol = { path = "../nu-protocol", version = "0.100.1", features = ["os"] }
|
||||||
nu-utils = { path = "../nu-utils", version = "0.100.1" }
|
nu-utils = { path = "../nu-utils", version = "0.100.1" }
|
||||||
nu-color-config = { path = "../nu-color-config", version = "0.100.1" }
|
nu-color-config = { path = "../nu-color-config", version = "0.100.1" }
|
||||||
nu-ansi-term = { workspace = true }
|
nu-ansi-term = { workspace = true }
|
||||||
|
|
|
@ -41,8 +41,7 @@ impl CommandCompletion {
|
||||||
) -> HashMap<String, SemanticSuggestion> {
|
) -> HashMap<String, SemanticSuggestion> {
|
||||||
let mut suggs = HashMap::new();
|
let mut suggs = HashMap::new();
|
||||||
|
|
||||||
// os agnostic way to get the PATH env var
|
let paths = working_set.permanent_state.get_env_var_insensitive("path");
|
||||||
let paths = working_set.permanent_state.get_path_env_var();
|
|
||||||
|
|
||||||
if let Some(paths) = paths {
|
if let Some(paths) = paths {
|
||||||
if let Ok(paths) = paths.as_list() {
|
if let Ok(paths) = paths.as_list() {
|
||||||
|
|
|
@ -297,7 +297,7 @@ impl NuCompleter {
|
||||||
let mut completer =
|
let mut completer =
|
||||||
OperatorCompletion::new(pipeline_element.expr.clone());
|
OperatorCompletion::new(pipeline_element.expr.clone());
|
||||||
|
|
||||||
return self.process_completion(
|
let operator_suggestion = self.process_completion(
|
||||||
&mut completer,
|
&mut completer,
|
||||||
&working_set,
|
&working_set,
|
||||||
prefix,
|
prefix,
|
||||||
|
@ -305,6 +305,9 @@ impl NuCompleter {
|
||||||
fake_offset,
|
fake_offset,
|
||||||
pos,
|
pos,
|
||||||
);
|
);
|
||||||
|
if !operator_suggestion.is_empty() {
|
||||||
|
return operator_suggestion;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,13 +1,12 @@
|
||||||
use crate::completions::{
|
use crate::completions::{
|
||||||
completer::map_value_completions, Completer, CompletionOptions, MatchAlgorithm,
|
completer::map_value_completions, Completer, CompletionOptions, SemanticSuggestion,
|
||||||
SemanticSuggestion,
|
|
||||||
};
|
};
|
||||||
use nu_engine::eval_call;
|
use nu_engine::eval_call;
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
ast::{Argument, Call, Expr, Expression},
|
ast::{Argument, Call, Expr, Expression},
|
||||||
debugger::WithoutDebug,
|
debugger::WithoutDebug,
|
||||||
engine::{Stack, StateWorkingSet},
|
engine::{Stack, StateWorkingSet},
|
||||||
CompletionSort, DeclId, PipelineData, Span, Type, Value,
|
DeclId, PipelineData, Span, Type, Value,
|
||||||
};
|
};
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
@ -68,6 +67,7 @@ impl Completer for CustomCompletion {
|
||||||
);
|
);
|
||||||
|
|
||||||
let mut custom_completion_options = None;
|
let mut custom_completion_options = None;
|
||||||
|
let mut should_sort = true;
|
||||||
|
|
||||||
// Parse result
|
// Parse result
|
||||||
let suggestions = result
|
let suggestions = result
|
||||||
|
@ -85,10 +85,9 @@ impl Completer for CustomCompletion {
|
||||||
let options = val.get("options");
|
let options = val.get("options");
|
||||||
|
|
||||||
if let Some(Value::Record { val: options, .. }) = &options {
|
if let Some(Value::Record { val: options, .. }) = &options {
|
||||||
let should_sort = options
|
if let Some(sort) = options.get("sort").and_then(|val| val.as_bool().ok()) {
|
||||||
.get("sort")
|
should_sort = sort;
|
||||||
.and_then(|val| val.as_bool().ok())
|
}
|
||||||
.unwrap_or(false);
|
|
||||||
|
|
||||||
custom_completion_options = Some(CompletionOptions {
|
custom_completion_options = Some(CompletionOptions {
|
||||||
case_sensitive: options
|
case_sensitive: options
|
||||||
|
@ -98,20 +97,16 @@ impl Completer for CustomCompletion {
|
||||||
positional: options
|
positional: options
|
||||||
.get("positional")
|
.get("positional")
|
||||||
.and_then(|val| val.as_bool().ok())
|
.and_then(|val| val.as_bool().ok())
|
||||||
.unwrap_or(true),
|
.unwrap_or(completion_options.positional),
|
||||||
match_algorithm: match options.get("completion_algorithm") {
|
match_algorithm: match options.get("completion_algorithm") {
|
||||||
Some(option) => option
|
Some(option) => option
|
||||||
.coerce_string()
|
.coerce_string()
|
||||||
.ok()
|
.ok()
|
||||||
.and_then(|option| option.try_into().ok())
|
.and_then(|option| option.try_into().ok())
|
||||||
.unwrap_or(MatchAlgorithm::Prefix),
|
.unwrap_or(completion_options.match_algorithm),
|
||||||
None => completion_options.match_algorithm,
|
None => completion_options.match_algorithm,
|
||||||
},
|
},
|
||||||
sort: if should_sort {
|
sort: completion_options.sort,
|
||||||
CompletionSort::Alphabetical
|
|
||||||
} else {
|
|
||||||
CompletionSort::Smart
|
|
||||||
},
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -124,9 +119,17 @@ impl Completer for CustomCompletion {
|
||||||
|
|
||||||
let options = custom_completion_options.unwrap_or(completion_options.clone());
|
let options = custom_completion_options.unwrap_or(completion_options.clone());
|
||||||
let mut matcher = NuMatcher::new(String::from_utf8_lossy(prefix), options);
|
let mut matcher = NuMatcher::new(String::from_utf8_lossy(prefix), options);
|
||||||
|
|
||||||
|
if should_sort {
|
||||||
for sugg in suggestions {
|
for sugg in suggestions {
|
||||||
matcher.add_semantic_suggestion(sugg);
|
matcher.add_semantic_suggestion(sugg);
|
||||||
}
|
}
|
||||||
matcher.results()
|
matcher.results()
|
||||||
|
} else {
|
||||||
|
suggestions
|
||||||
|
.into_iter()
|
||||||
|
.filter(|sugg| matcher.matches(&sugg.suggestion.value))
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -60,10 +60,6 @@ impl Completer for OperatorCompletion {
|
||||||
("bit-shr", "Bitwise shift right"),
|
("bit-shr", "Bitwise shift right"),
|
||||||
("in", "Is a member of (doesn't use regex)"),
|
("in", "Is a member of (doesn't use regex)"),
|
||||||
("not-in", "Is not a member of (doesn't use regex)"),
|
("not-in", "Is not a member of (doesn't use regex)"),
|
||||||
(
|
|
||||||
"++",
|
|
||||||
"Appends two lists, a list and a value, two strings, or two binary values",
|
|
||||||
),
|
|
||||||
],
|
],
|
||||||
Expr::String(_) => vec![
|
Expr::String(_) => vec![
|
||||||
("=~", "Contains regex match"),
|
("=~", "Contains regex match"),
|
||||||
|
@ -72,7 +68,7 @@ impl Completer for OperatorCompletion {
|
||||||
("not-like", "Does not contain regex match"),
|
("not-like", "Does not contain regex match"),
|
||||||
(
|
(
|
||||||
"++",
|
"++",
|
||||||
"Appends two lists, a list and a value, two strings, or two binary values",
|
"Concatenates two lists, two strings, or two binary values",
|
||||||
),
|
),
|
||||||
("in", "Is a member of (doesn't use regex)"),
|
("in", "Is a member of (doesn't use regex)"),
|
||||||
("not-in", "Is not a member of (doesn't use regex)"),
|
("not-in", "Is not a member of (doesn't use regex)"),
|
||||||
|
@ -95,10 +91,6 @@ impl Completer for OperatorCompletion {
|
||||||
("**", "Power of"),
|
("**", "Power of"),
|
||||||
("in", "Is a member of (doesn't use regex)"),
|
("in", "Is a member of (doesn't use regex)"),
|
||||||
("not-in", "Is not a member of (doesn't use regex)"),
|
("not-in", "Is not a member of (doesn't use regex)"),
|
||||||
(
|
|
||||||
"++",
|
|
||||||
"Appends two lists, a list and a value, two strings, or two binary values",
|
|
||||||
),
|
|
||||||
],
|
],
|
||||||
Expr::Bool(_) => vec![
|
Expr::Bool(_) => vec![
|
||||||
(
|
(
|
||||||
|
@ -113,15 +105,11 @@ impl Completer for OperatorCompletion {
|
||||||
("not", "Negates a value or expression"),
|
("not", "Negates a value or expression"),
|
||||||
("in", "Is a member of (doesn't use regex)"),
|
("in", "Is a member of (doesn't use regex)"),
|
||||||
("not-in", "Is not a member of (doesn't use regex)"),
|
("not-in", "Is not a member of (doesn't use regex)"),
|
||||||
(
|
|
||||||
"++",
|
|
||||||
"Appends two lists, a list and a value, two strings, or two binary values",
|
|
||||||
),
|
|
||||||
],
|
],
|
||||||
Expr::FullCellPath(path) => match path.head.expr {
|
Expr::FullCellPath(path) => match path.head.expr {
|
||||||
Expr::List(_) => vec![(
|
Expr::List(_) => vec![(
|
||||||
"++",
|
"++",
|
||||||
"Appends two lists, a list and a value, two strings, or two binary values",
|
"Concatenates two lists, two strings, or two binary values",
|
||||||
)],
|
)],
|
||||||
Expr::Var(id) => get_variable_completions(id, working_set),
|
Expr::Var(id) => get_variable_completions(id, working_set),
|
||||||
_ => vec![],
|
_ => vec![],
|
||||||
|
@ -161,7 +149,7 @@ pub fn get_variable_completions<'a>(
|
||||||
Type::List(_) | Type::String | Type::Binary => vec![
|
Type::List(_) | Type::String | Type::Binary => vec![
|
||||||
(
|
(
|
||||||
"++=",
|
"++=",
|
||||||
"Appends a list, a value, a string, or a binary value to a variable.",
|
"Concatenates two lists, two strings, or two binary values",
|
||||||
),
|
),
|
||||||
("=", "Assigns a value to a variable."),
|
("=", "Assigns a value to a variable."),
|
||||||
],
|
],
|
||||||
|
|
|
@ -16,7 +16,7 @@ use crate::{
|
||||||
use crossterm::cursor::SetCursorStyle;
|
use crossterm::cursor::SetCursorStyle;
|
||||||
use log::{error, trace, warn};
|
use log::{error, trace, warn};
|
||||||
use miette::{ErrReport, IntoDiagnostic, Result};
|
use miette::{ErrReport, IntoDiagnostic, Result};
|
||||||
use nu_cmd_base::{hook::eval_hook, util::get_editor};
|
use nu_cmd_base::util::get_editor;
|
||||||
use nu_color_config::StyleComputer;
|
use nu_color_config::StyleComputer;
|
||||||
#[allow(deprecated)]
|
#[allow(deprecated)]
|
||||||
use nu_engine::{convert_env_values, current_dir_str, env_to_strings};
|
use nu_engine::{convert_env_values, current_dir_str, env_to_strings};
|
||||||
|
@ -313,20 +313,26 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
|
||||||
perf!("reset signals", start_time, use_color);
|
perf!("reset signals", start_time, use_color);
|
||||||
|
|
||||||
start_time = std::time::Instant::now();
|
start_time = std::time::Instant::now();
|
||||||
// Right before we start our prompt and take input from the user,
|
// Right before we start our prompt and take input from the user, fire the "pre_prompt" hook
|
||||||
// fire the "pre_prompt" hook
|
if let Err(err) = hook::eval_hooks(
|
||||||
if let Some(hook) = engine_state.get_config().hooks.pre_prompt.clone() {
|
engine_state,
|
||||||
if let Err(err) = eval_hook(engine_state, &mut stack, None, vec![], &hook, "pre_prompt") {
|
&mut stack,
|
||||||
|
vec![],
|
||||||
|
&engine_state.get_config().hooks.pre_prompt.clone(),
|
||||||
|
"pre_prompt",
|
||||||
|
) {
|
||||||
report_shell_error(engine_state, &err);
|
report_shell_error(engine_state, &err);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
perf!("pre-prompt hook", start_time, use_color);
|
perf!("pre-prompt hook", start_time, use_color);
|
||||||
|
|
||||||
start_time = std::time::Instant::now();
|
start_time = std::time::Instant::now();
|
||||||
// Next, check all the environment variables they ask for
|
// Next, check all the environment variables they ask for
|
||||||
// fire the "env_change" hook
|
// fire the "env_change" hook
|
||||||
let env_change = engine_state.get_config().hooks.env_change.clone();
|
if let Err(error) = hook::eval_env_change_hook(
|
||||||
if let Err(error) = hook::eval_env_change_hook(env_change, engine_state, &mut stack) {
|
&engine_state.get_config().hooks.env_change.clone(),
|
||||||
|
engine_state,
|
||||||
|
&mut stack,
|
||||||
|
) {
|
||||||
report_shell_error(engine_state, &error)
|
report_shell_error(engine_state, &error)
|
||||||
}
|
}
|
||||||
perf!("env-change hook", start_time, use_color);
|
perf!("env-change hook", start_time, use_color);
|
||||||
|
@ -511,18 +517,17 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
|
||||||
|
|
||||||
// Right before we start running the code the user gave us, fire the `pre_execution`
|
// Right before we start running the code the user gave us, fire the `pre_execution`
|
||||||
// hook
|
// hook
|
||||||
if let Some(hook) = config.hooks.pre_execution.clone() {
|
{
|
||||||
// Set the REPL buffer to the current command for the "pre_execution" hook
|
// Set the REPL buffer to the current command for the "pre_execution" hook
|
||||||
let mut repl = engine_state.repl_state.lock().expect("repl state mutex");
|
let mut repl = engine_state.repl_state.lock().expect("repl state mutex");
|
||||||
repl.buffer = repl_cmd_line_text.to_string();
|
repl.buffer = repl_cmd_line_text.to_string();
|
||||||
drop(repl);
|
drop(repl);
|
||||||
|
|
||||||
if let Err(err) = eval_hook(
|
if let Err(err) = hook::eval_hooks(
|
||||||
engine_state,
|
engine_state,
|
||||||
&mut stack,
|
&mut stack,
|
||||||
None,
|
|
||||||
vec![],
|
vec![],
|
||||||
&hook,
|
&engine_state.get_config().hooks.pre_execution.clone(),
|
||||||
"pre_execution",
|
"pre_execution",
|
||||||
) {
|
) {
|
||||||
report_shell_error(engine_state, &err);
|
report_shell_error(engine_state, &err);
|
||||||
|
|
|
@ -144,8 +144,6 @@ impl Highlighter for NuHighlighter {
|
||||||
}
|
}
|
||||||
FlatShape::Flag => add_colored_token(&shape.1, next_token),
|
FlatShape::Flag => add_colored_token(&shape.1, next_token),
|
||||||
FlatShape::Pipe => add_colored_token(&shape.1, next_token),
|
FlatShape::Pipe => add_colored_token(&shape.1, next_token),
|
||||||
FlatShape::And => add_colored_token(&shape.1, next_token),
|
|
||||||
FlatShape::Or => add_colored_token(&shape.1, next_token),
|
|
||||||
FlatShape::Redirection => add_colored_token(&shape.1, next_token),
|
FlatShape::Redirection => add_colored_token(&shape.1, next_token),
|
||||||
FlatShape::Custom(..) => add_colored_token(&shape.1, next_token),
|
FlatShape::Custom(..) => add_colored_token(&shape.1, next_token),
|
||||||
FlatShape::MatchPattern => add_colored_token(&shape.1, next_token),
|
FlatShape::MatchPattern => add_colored_token(&shape.1, next_token),
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![allow(clippy::byte_char_slices)]
|
||||||
|
|
||||||
use nu_cmd_base::hook::eval_hook;
|
use nu_cmd_base::hook::eval_hook;
|
||||||
use nu_engine::{eval_block, eval_block_with_early_return};
|
use nu_engine::{eval_block, eval_block_with_early_return};
|
||||||
use nu_parser::{lex, parse, unescape_unquote_string, Token, TokenContents};
|
use nu_parser::{lex, parse, unescape_unquote_string, Token, TokenContents};
|
||||||
|
|
|
@ -88,6 +88,27 @@ fn completer_strings_with_options() -> NuCompleter {
|
||||||
NuCompleter::new(Arc::new(engine), Arc::new(stack))
|
NuCompleter::new(Arc::new(engine), Arc::new(stack))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[fixture]
|
||||||
|
fn completer_strings_no_sort() -> NuCompleter {
|
||||||
|
// Create a new engine
|
||||||
|
let (_, _, mut engine, mut stack) = new_engine();
|
||||||
|
let command = r#"
|
||||||
|
def animals [] {
|
||||||
|
{
|
||||||
|
completions: ["zzzfoo", "foo", "not matched", "abcfoo" ],
|
||||||
|
options: {
|
||||||
|
completion_algorithm: "fuzzy",
|
||||||
|
sort: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
def my-command [animal: string@animals] { print $animal }"#;
|
||||||
|
assert!(support::merge_input(command.as_bytes(), &mut engine, &mut stack).is_ok());
|
||||||
|
|
||||||
|
// Instantiate a new completer
|
||||||
|
NuCompleter::new(Arc::new(engine), Arc::new(stack))
|
||||||
|
}
|
||||||
|
|
||||||
#[fixture]
|
#[fixture]
|
||||||
fn custom_completer() -> NuCompleter {
|
fn custom_completer() -> NuCompleter {
|
||||||
// Create a new engine
|
// Create a new engine
|
||||||
|
@ -210,6 +231,13 @@ fn customcompletions_case_insensitive(mut completer_strings_with_options: NuComp
|
||||||
match_suggestions(&expected, &suggestions);
|
match_suggestions(&expected, &suggestions);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[rstest]
|
||||||
|
fn customcompletions_no_sort(mut completer_strings_no_sort: NuCompleter) {
|
||||||
|
let suggestions = completer_strings_no_sort.complete("my-command foo", 14);
|
||||||
|
let expected: Vec<String> = vec!["zzzfoo".into(), "foo".into(), "abcfoo".into()];
|
||||||
|
match_suggestions(&expected, &suggestions);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn dotnu_completions() {
|
fn dotnu_completions() {
|
||||||
// Create a new engine
|
// Create a new engine
|
||||||
|
@ -329,6 +357,39 @@ fn file_completions() {
|
||||||
// Match the results
|
// Match the results
|
||||||
match_suggestions(&expected_paths, &suggestions);
|
match_suggestions(&expected_paths, &suggestions);
|
||||||
|
|
||||||
|
// Test completions for the current folder even with parts before the autocomplet
|
||||||
|
let target_dir = format!("cp somefile.txt {dir_str}{MAIN_SEPARATOR}");
|
||||||
|
let suggestions = completer.complete(&target_dir, target_dir.len());
|
||||||
|
|
||||||
|
// Create the expected values
|
||||||
|
let expected_paths: Vec<String> = vec![
|
||||||
|
folder(dir.join("another")),
|
||||||
|
file(dir.join("custom_completion.nu")),
|
||||||
|
folder(dir.join("directory_completion")),
|
||||||
|
file(dir.join("nushell")),
|
||||||
|
folder(dir.join("test_a")),
|
||||||
|
folder(dir.join("test_b")),
|
||||||
|
file(dir.join(".hidden_file")),
|
||||||
|
folder(dir.join(".hidden_folder")),
|
||||||
|
];
|
||||||
|
|
||||||
|
#[cfg(windows)]
|
||||||
|
{
|
||||||
|
let separator = '/';
|
||||||
|
let target_dir = format!("cp somefile.txt {dir_str}{separator}");
|
||||||
|
let slash_suggestions = completer.complete(&target_dir, target_dir.len());
|
||||||
|
|
||||||
|
let expected_slash_paths: Vec<String> = expected_paths
|
||||||
|
.iter()
|
||||||
|
.map(|s| s.replace('\\', "/"))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
match_suggestions(&expected_slash_paths, &slash_suggestions);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Match the results
|
||||||
|
match_suggestions(&expected_paths, &suggestions);
|
||||||
|
|
||||||
// Test completions for a file
|
// Test completions for a file
|
||||||
let target_dir = format!("cp {}", folder(dir.join("another")));
|
let target_dir = format!("cp {}", folder(dir.join("another")));
|
||||||
let suggestions = completer.complete(&target_dir, target_dir.len());
|
let suggestions = completer.complete(&target_dir, target_dir.len());
|
||||||
|
@ -363,6 +424,75 @@ fn file_completions() {
|
||||||
match_suggestions(&expected_paths, &suggestions);
|
match_suggestions(&expected_paths, &suggestions);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn custom_command_rest_any_args_file_completions() {
|
||||||
|
// Create a new engine
|
||||||
|
let (dir, dir_str, mut engine, mut stack) = new_engine();
|
||||||
|
let command = r#"def list [ ...args: any ] {}"#;
|
||||||
|
assert!(support::merge_input(command.as_bytes(), &mut engine, &mut stack).is_ok());
|
||||||
|
|
||||||
|
// Instantiate a new completer
|
||||||
|
let mut completer = NuCompleter::new(Arc::new(engine), Arc::new(stack));
|
||||||
|
|
||||||
|
// Test completions for the current folder
|
||||||
|
let target_dir = format!("list {dir_str}{MAIN_SEPARATOR}");
|
||||||
|
let suggestions = completer.complete(&target_dir, target_dir.len());
|
||||||
|
|
||||||
|
// Create the expected values
|
||||||
|
let expected_paths: Vec<String> = vec![
|
||||||
|
folder(dir.join("another")),
|
||||||
|
file(dir.join("custom_completion.nu")),
|
||||||
|
folder(dir.join("directory_completion")),
|
||||||
|
file(dir.join("nushell")),
|
||||||
|
folder(dir.join("test_a")),
|
||||||
|
folder(dir.join("test_b")),
|
||||||
|
file(dir.join(".hidden_file")),
|
||||||
|
folder(dir.join(".hidden_folder")),
|
||||||
|
];
|
||||||
|
|
||||||
|
// Match the results
|
||||||
|
match_suggestions(&expected_paths, &suggestions);
|
||||||
|
|
||||||
|
// Test completions for the current folder even with parts before the autocomplet
|
||||||
|
let target_dir = format!("list somefile.txt {dir_str}{MAIN_SEPARATOR}");
|
||||||
|
let suggestions = completer.complete(&target_dir, target_dir.len());
|
||||||
|
|
||||||
|
// Create the expected values
|
||||||
|
let expected_paths: Vec<String> = vec![
|
||||||
|
folder(dir.join("another")),
|
||||||
|
file(dir.join("custom_completion.nu")),
|
||||||
|
folder(dir.join("directory_completion")),
|
||||||
|
file(dir.join("nushell")),
|
||||||
|
folder(dir.join("test_a")),
|
||||||
|
folder(dir.join("test_b")),
|
||||||
|
file(dir.join(".hidden_file")),
|
||||||
|
folder(dir.join(".hidden_folder")),
|
||||||
|
];
|
||||||
|
|
||||||
|
// Match the results
|
||||||
|
match_suggestions(&expected_paths, &suggestions);
|
||||||
|
|
||||||
|
// Test completions for a file
|
||||||
|
let target_dir = format!("list {}", folder(dir.join("another")));
|
||||||
|
let suggestions = completer.complete(&target_dir, target_dir.len());
|
||||||
|
|
||||||
|
// Create the expected values
|
||||||
|
let expected_paths: Vec<String> = vec![file(dir.join("another").join("newfile"))];
|
||||||
|
|
||||||
|
// Match the results
|
||||||
|
match_suggestions(&expected_paths, &suggestions);
|
||||||
|
|
||||||
|
// Test completions for hidden files
|
||||||
|
let target_dir = format!("list {}", file(dir.join(".hidden_folder").join(".")));
|
||||||
|
let suggestions = completer.complete(&target_dir, target_dir.len());
|
||||||
|
|
||||||
|
let expected_paths: Vec<String> =
|
||||||
|
vec![file(dir.join(".hidden_folder").join(".hidden_subfile"))];
|
||||||
|
|
||||||
|
// Match the results
|
||||||
|
match_suggestions(&expected_paths, &suggestions);
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
#[test]
|
#[test]
|
||||||
fn file_completions_with_mixed_separators() {
|
fn file_completions_with_mixed_separators() {
|
||||||
|
@ -1629,13 +1759,3 @@ fn alias_offset_bug_7754() {
|
||||||
// This crashes before PR #7756
|
// This crashes before PR #7756
|
||||||
let _suggestions = completer.complete("ll -a | c", 9);
|
let _suggestions = completer.complete("ll -a | c", 9);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn get_path_env_var_8003() {
|
|
||||||
// Create a new engine
|
|
||||||
let (_, _, engine, _) = new_engine();
|
|
||||||
// Get the path env var in a platform agnostic way
|
|
||||||
let the_path = engine.get_path_env_var();
|
|
||||||
// Make sure it's not empty
|
|
||||||
assert!(the_path.is_some());
|
|
||||||
}
|
|
||||||
|
|
|
@ -13,10 +13,10 @@ version = "0.100.1"
|
||||||
workspace = true
|
workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
nu-engine = { path = "../nu-engine", version = "0.100.1" }
|
nu-engine = { path = "../nu-engine", version = "0.100.1", default-features = false }
|
||||||
nu-parser = { path = "../nu-parser", version = "0.100.1" }
|
nu-parser = { path = "../nu-parser", version = "0.100.1" }
|
||||||
nu-path = { path = "../nu-path", version = "0.100.1" }
|
nu-path = { path = "../nu-path", version = "0.100.1" }
|
||||||
nu-protocol = { path = "../nu-protocol", version = "0.100.1" }
|
nu-protocol = { path = "../nu-protocol", version = "0.100.1", default-features = false }
|
||||||
|
|
||||||
indexmap = { workspace = true }
|
indexmap = { workspace = true }
|
||||||
miette = { workspace = true }
|
miette = { workspace = true }
|
||||||
|
|
|
@ -7,46 +7,52 @@ use nu_protocol::{
|
||||||
engine::{Closure, EngineState, Stack, StateWorkingSet},
|
engine::{Closure, EngineState, Stack, StateWorkingSet},
|
||||||
PipelineData, PositionalArg, ShellError, Span, Type, Value, VarId,
|
PipelineData, PositionalArg, ShellError, Span, Type, Value, VarId,
|
||||||
};
|
};
|
||||||
use std::sync::Arc;
|
use std::{collections::HashMap, sync::Arc};
|
||||||
|
|
||||||
pub fn eval_env_change_hook(
|
pub fn eval_env_change_hook(
|
||||||
env_change_hook: Option<Value>,
|
env_change_hook: &HashMap<String, Vec<Value>>,
|
||||||
engine_state: &mut EngineState,
|
engine_state: &mut EngineState,
|
||||||
stack: &mut Stack,
|
stack: &mut Stack,
|
||||||
) -> Result<(), ShellError> {
|
) -> Result<(), ShellError> {
|
||||||
if let Some(hook) = env_change_hook {
|
for (env, hooks) in env_change_hook {
|
||||||
match hook {
|
let before = engine_state.previous_env_vars.get(env);
|
||||||
Value::Record { val, .. } => {
|
let after = stack.get_env_var(engine_state, env);
|
||||||
for (env_name, hook_value) in &*val {
|
|
||||||
let before = engine_state.previous_env_vars.get(env_name);
|
|
||||||
let after = stack.get_env_var(engine_state, env_name);
|
|
||||||
if before != after {
|
if before != after {
|
||||||
let before = before.cloned().unwrap_or_default();
|
let before = before.cloned().unwrap_or_default();
|
||||||
let after = after.cloned().unwrap_or_default();
|
let after = after.cloned().unwrap_or_default();
|
||||||
|
|
||||||
|
eval_hooks(
|
||||||
|
engine_state,
|
||||||
|
stack,
|
||||||
|
vec![("$before".into(), before), ("$after".into(), after.clone())],
|
||||||
|
hooks,
|
||||||
|
"env_change",
|
||||||
|
)?;
|
||||||
|
|
||||||
|
Arc::make_mut(&mut engine_state.previous_env_vars).insert(env.clone(), after);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn eval_hooks(
|
||||||
|
engine_state: &mut EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
arguments: Vec<(String, Value)>,
|
||||||
|
hooks: &[Value],
|
||||||
|
hook_name: &str,
|
||||||
|
) -> Result<(), ShellError> {
|
||||||
|
for hook in hooks {
|
||||||
eval_hook(
|
eval_hook(
|
||||||
engine_state,
|
engine_state,
|
||||||
stack,
|
stack,
|
||||||
None,
|
None,
|
||||||
vec![("$before".into(), before), ("$after".into(), after.clone())],
|
arguments.clone(),
|
||||||
hook_value,
|
hook,
|
||||||
"env_change",
|
&format!("{hook_name} list, recursive"),
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
Arc::make_mut(&mut engine_state.previous_env_vars)
|
|
||||||
.insert(env_name.clone(), after);
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
}
|
|
||||||
x => {
|
|
||||||
return Err(ShellError::TypeMismatch {
|
|
||||||
err_message: "record for the 'env_change' hook".to_string(),
|
|
||||||
span: x.span(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -127,16 +133,7 @@ pub fn eval_hook(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Value::List { vals, .. } => {
|
Value::List { vals, .. } => {
|
||||||
for val in vals {
|
eval_hooks(engine_state, stack, arguments, vals, hook_name)?;
|
||||||
eval_hook(
|
|
||||||
engine_state,
|
|
||||||
stack,
|
|
||||||
None,
|
|
||||||
arguments.clone(),
|
|
||||||
val,
|
|
||||||
&format!("{hook_name} list, recursive"),
|
|
||||||
)?;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
Value::Record { val, .. } => {
|
Value::Record { val, .. } => {
|
||||||
// Hooks can optionally be a record in this form:
|
// Hooks can optionally be a record in this form:
|
||||||
|
|
|
@ -17,12 +17,12 @@ workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
nu-cmd-base = { path = "../nu-cmd-base", version = "0.100.1" }
|
nu-cmd-base = { path = "../nu-cmd-base", version = "0.100.1" }
|
||||||
nu-engine = { path = "../nu-engine", version = "0.100.1" }
|
nu-engine = { path = "../nu-engine", version = "0.100.1", default-features = false }
|
||||||
nu-json = { version = "0.100.1", path = "../nu-json" }
|
nu-json = { version = "0.100.1", path = "../nu-json" }
|
||||||
nu-parser = { path = "../nu-parser", version = "0.100.1" }
|
nu-parser = { path = "../nu-parser", version = "0.100.1" }
|
||||||
nu-pretty-hex = { version = "0.100.1", path = "../nu-pretty-hex" }
|
nu-pretty-hex = { version = "0.100.1", path = "../nu-pretty-hex" }
|
||||||
nu-protocol = { path = "../nu-protocol", version = "0.100.1" }
|
nu-protocol = { path = "../nu-protocol", version = "0.100.1", default-features = false }
|
||||||
nu-utils = { path = "../nu-utils", version = "0.100.1" }
|
nu-utils = { path = "../nu-utils", version = "0.100.1", default-features = false }
|
||||||
|
|
||||||
# Potential dependencies for extras
|
# Potential dependencies for extras
|
||||||
heck = { workspace = true }
|
heck = { workspace = true }
|
||||||
|
|
|
@ -203,7 +203,7 @@ pub fn action(input: &Value, _args: &Arguments, span: Span) -> Value {
|
||||||
Value::string(raw_string.trim(), span)
|
Value::string(raw_string.trim(), span)
|
||||||
}
|
}
|
||||||
Value::Int { val, .. } => convert_to_smallest_number_type(*val, span),
|
Value::Int { val, .. } => convert_to_smallest_number_type(*val, span),
|
||||||
Value::Filesize { val, .. } => convert_to_smallest_number_type(*val, span),
|
Value::Filesize { val, .. } => convert_to_smallest_number_type(val.get(), span),
|
||||||
Value::Duration { val, .. } => convert_to_smallest_number_type(*val, span),
|
Value::Duration { val, .. } => convert_to_smallest_number_type(*val, span),
|
||||||
Value::String { val, .. } => {
|
Value::String { val, .. } => {
|
||||||
let raw_bytes = val.as_bytes();
|
let raw_bytes = val.as_bytes();
|
||||||
|
|
|
@ -66,7 +66,7 @@ fn action(input: &Value, _args: &CellPathOnlyArgs, span: Span) -> Value {
|
||||||
match input {
|
match input {
|
||||||
Value::Float { val, .. } => fmt_it_64(*val, span),
|
Value::Float { val, .. } => fmt_it_64(*val, span),
|
||||||
Value::Int { val, .. } => fmt_it(*val, span),
|
Value::Int { val, .. } => fmt_it(*val, span),
|
||||||
Value::Filesize { val, .. } => fmt_it(*val, span),
|
Value::Filesize { val, .. } => fmt_it(val.get(), span),
|
||||||
// Propagate errors by explicitly matching them before the final case.
|
// Propagate errors by explicitly matching them before the final case.
|
||||||
Value::Error { .. } => input.clone(),
|
Value::Error { .. } => input.clone(),
|
||||||
other => Value::error(
|
other => Value::error(
|
||||||
|
|
|
@ -25,7 +25,7 @@ impl Command for EachWhile {
|
||||||
)])
|
)])
|
||||||
.required(
|
.required(
|
||||||
"closure",
|
"closure",
|
||||||
SyntaxShape::Closure(Some(vec![SyntaxShape::Any, SyntaxShape::Int])),
|
SyntaxShape::Closure(Some(vec![SyntaxShape::Any])),
|
||||||
"the closure to run",
|
"the closure to run",
|
||||||
)
|
)
|
||||||
.category(Category::Filters)
|
.category(Category::Filters)
|
||||||
|
|
|
@ -2,4 +2,4 @@ mod from;
|
||||||
mod to;
|
mod to;
|
||||||
|
|
||||||
pub(crate) use from::url::FromUrl;
|
pub(crate) use from::url::FromUrl;
|
||||||
pub(crate) use to::html::ToHtml;
|
pub use to::html::ToHtml;
|
||||||
|
|
|
@ -9,6 +9,7 @@ mod strings;
|
||||||
pub use bits::{
|
pub use bits::{
|
||||||
Bits, BitsAnd, BitsInto, BitsNot, BitsOr, BitsRol, BitsRor, BitsShl, BitsShr, BitsXor,
|
Bits, BitsAnd, BitsInto, BitsNot, BitsOr, BitsRol, BitsRor, BitsShl, BitsShr, BitsXor,
|
||||||
};
|
};
|
||||||
|
pub use formats::ToHtml;
|
||||||
pub use math::{MathArcCos, MathArcCosH, MathArcSin, MathArcSinH, MathArcTan, MathArcTanH};
|
pub use math::{MathArcCos, MathArcCosH, MathArcSin, MathArcSinH, MathArcTan, MathArcTanH};
|
||||||
pub use math::{MathCos, MathCosH, MathSin, MathSinH, MathTan, MathTanH};
|
pub use math::{MathCos, MathCosH, MathSin, MathSinH, MathTan, MathTanH};
|
||||||
pub use math::{MathExp, MathLn};
|
pub use math::{MathExp, MathLn};
|
||||||
|
@ -54,7 +55,8 @@ pub fn add_extra_command_context(mut engine_state: EngineState) -> EngineState {
|
||||||
strings::str_::case::StrTitleCase
|
strings::str_::case::StrTitleCase
|
||||||
);
|
);
|
||||||
|
|
||||||
bind_command!(formats::ToHtml, formats::FromUrl);
|
bind_command!(ToHtml, formats::FromUrl);
|
||||||
|
|
||||||
// Bits
|
// Bits
|
||||||
bind_command! {
|
bind_command! {
|
||||||
Bits,
|
Bits,
|
||||||
|
|
|
@ -15,10 +15,10 @@ bench = false
|
||||||
workspace = true
|
workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
nu-engine = { path = "../nu-engine", version = "0.100.1" }
|
nu-engine = { path = "../nu-engine", version = "0.100.1", default-features = false }
|
||||||
nu-parser = { path = "../nu-parser", version = "0.100.1" }
|
nu-parser = { path = "../nu-parser", version = "0.100.1" }
|
||||||
nu-protocol = { path = "../nu-protocol", version = "0.100.1" }
|
nu-protocol = { path = "../nu-protocol", version = "0.100.1", default-features = false }
|
||||||
nu-utils = { path = "../nu-utils", version = "0.100.1" }
|
nu-utils = { path = "../nu-utils", version = "0.100.1", default-features = false }
|
||||||
|
|
||||||
itertools = { workspace = true }
|
itertools = { workspace = true }
|
||||||
shadow-rs = { version = "0.36", default-features = false }
|
shadow-rs = { version = "0.36", default-features = false }
|
||||||
|
@ -27,6 +27,17 @@ shadow-rs = { version = "0.36", default-features = false }
|
||||||
shadow-rs = { version = "0.36", default-features = false }
|
shadow-rs = { version = "0.36", default-features = false }
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
|
default = ["os"]
|
||||||
|
os = [
|
||||||
|
"nu-engine/os",
|
||||||
|
"nu-protocol/os",
|
||||||
|
"nu-utils/os",
|
||||||
|
]
|
||||||
|
plugin = [
|
||||||
|
"nu-protocol/plugin",
|
||||||
|
"os",
|
||||||
|
]
|
||||||
|
|
||||||
mimalloc = []
|
mimalloc = []
|
||||||
trash-support = []
|
trash-support = []
|
||||||
sqlite = []
|
sqlite = []
|
||||||
|
|
|
@ -169,6 +169,7 @@ fn run(
|
||||||
let origin = match stream.source() {
|
let origin = match stream.source() {
|
||||||
ByteStreamSource::Read(_) => "unknown",
|
ByteStreamSource::Read(_) => "unknown",
|
||||||
ByteStreamSource::File(_) => "file",
|
ByteStreamSource::File(_) => "file",
|
||||||
|
#[cfg(feature = "os")]
|
||||||
ByteStreamSource::Child(_) => "external",
|
ByteStreamSource::Child(_) => "external",
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -1,9 +1,8 @@
|
||||||
use nu_engine::{command_prelude::*, get_eval_block_with_early_return, redirect_env};
|
use nu_engine::{command_prelude::*, get_eval_block_with_early_return, redirect_env};
|
||||||
use nu_protocol::{
|
#[cfg(feature = "os")]
|
||||||
engine::Closure,
|
use nu_protocol::process::{ChildPipe, ChildProcess};
|
||||||
process::{ChildPipe, ChildProcess},
|
use nu_protocol::{engine::Closure, ByteStream, ByteStreamSource, OutDest};
|
||||||
ByteStream, ByteStreamSource, OutDest,
|
|
||||||
};
|
|
||||||
use std::{
|
use std::{
|
||||||
io::{Cursor, Read},
|
io::{Cursor, Read},
|
||||||
thread,
|
thread,
|
||||||
|
@ -69,6 +68,33 @@ impl Command for Do {
|
||||||
let block: Closure = call.req(engine_state, caller_stack, 0)?;
|
let block: Closure = call.req(engine_state, caller_stack, 0)?;
|
||||||
let rest: Vec<Value> = call.rest(engine_state, caller_stack, 1)?;
|
let rest: Vec<Value> = call.rest(engine_state, caller_stack, 1)?;
|
||||||
let ignore_all_errors = call.has_flag(engine_state, caller_stack, "ignore-errors")?;
|
let ignore_all_errors = call.has_flag(engine_state, caller_stack, "ignore-errors")?;
|
||||||
|
|
||||||
|
if call.has_flag(engine_state, caller_stack, "ignore-shell-errors")? {
|
||||||
|
nu_protocol::report_shell_warning(
|
||||||
|
engine_state,
|
||||||
|
&ShellError::GenericError {
|
||||||
|
error: "Deprecated option".into(),
|
||||||
|
msg: "`--ignore-shell-errors` is deprecated and will be removed in 0.102.0."
|
||||||
|
.into(),
|
||||||
|
span: Some(call.head),
|
||||||
|
help: Some("Please use the `--ignore-errors(-i)`".into()),
|
||||||
|
inner: vec![],
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if call.has_flag(engine_state, caller_stack, "ignore-program-errors")? {
|
||||||
|
nu_protocol::report_shell_warning(
|
||||||
|
engine_state,
|
||||||
|
&ShellError::GenericError {
|
||||||
|
error: "Deprecated option".into(),
|
||||||
|
msg: "`--ignore-program-errors` is deprecated and will be removed in 0.102.0."
|
||||||
|
.into(),
|
||||||
|
span: Some(call.head),
|
||||||
|
help: Some("Please use the `--ignore-errors(-i)`".into()),
|
||||||
|
inner: vec![],
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
let ignore_shell_errors = ignore_all_errors
|
let ignore_shell_errors = ignore_all_errors
|
||||||
|| call.has_flag(engine_state, caller_stack, "ignore-shell-errors")?;
|
|| call.has_flag(engine_state, caller_stack, "ignore-shell-errors")?;
|
||||||
let ignore_program_errors = ignore_all_errors
|
let ignore_program_errors = ignore_all_errors
|
||||||
|
@ -92,6 +118,13 @@ impl Command for Do {
|
||||||
match result {
|
match result {
|
||||||
Ok(PipelineData::ByteStream(stream, metadata)) if capture_errors => {
|
Ok(PipelineData::ByteStream(stream, metadata)) if capture_errors => {
|
||||||
let span = stream.span();
|
let span = stream.span();
|
||||||
|
#[cfg(not(feature = "os"))]
|
||||||
|
return Err(ShellError::DisabledOsSupport {
|
||||||
|
msg: "Cannot create a thread to receive stdout message.".to_string(),
|
||||||
|
span: Some(span),
|
||||||
|
});
|
||||||
|
|
||||||
|
#[cfg(feature = "os")]
|
||||||
match stream.into_child() {
|
match stream.into_child() {
|
||||||
Ok(mut child) => {
|
Ok(mut child) => {
|
||||||
// Use a thread to receive stdout message.
|
// Use a thread to receive stdout message.
|
||||||
|
@ -169,6 +202,7 @@ impl Command for Do {
|
||||||
OutDest::Pipe | OutDest::PipeSeparate | OutDest::Value
|
OutDest::Pipe | OutDest::PipeSeparate | OutDest::Value
|
||||||
) =>
|
) =>
|
||||||
{
|
{
|
||||||
|
#[cfg(feature = "os")]
|
||||||
if let ByteStreamSource::Child(child) = stream.source_mut() {
|
if let ByteStreamSource::Child(child) = stream.source_mut() {
|
||||||
child.ignore_error(true);
|
child.ignore_error(true);
|
||||||
}
|
}
|
||||||
|
@ -208,16 +242,6 @@ impl Command for Do {
|
||||||
example: r#"do --ignore-errors { thisisnotarealcommand }"#,
|
example: r#"do --ignore-errors { thisisnotarealcommand }"#,
|
||||||
result: None,
|
result: None,
|
||||||
},
|
},
|
||||||
Example {
|
|
||||||
description: "Run the closure and ignore shell errors",
|
|
||||||
example: r#"do --ignore-shell-errors { thisisnotarealcommand }"#,
|
|
||||||
result: None,
|
|
||||||
},
|
|
||||||
Example {
|
|
||||||
description: "Run the closure and ignore external program errors",
|
|
||||||
example: r#"do --ignore-program-errors { nu --commands 'exit 1' }; echo "I'll still run""#,
|
|
||||||
result: None,
|
|
||||||
},
|
|
||||||
Example {
|
Example {
|
||||||
description: "Abort the pipeline if a program returns a non-zero exit code",
|
description: "Abort the pipeline if a program returns a non-zero exit code",
|
||||||
example: r#"do --capture-errors { nu --commands 'exit 1' } | myscarycommand"#,
|
example: r#"do --capture-errors { nu --commands 'exit 1' } | myscarycommand"#,
|
||||||
|
|
|
@ -35,6 +35,7 @@ impl Command for Ignore {
|
||||||
mut input: PipelineData,
|
mut input: PipelineData,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
if let PipelineData::ByteStream(stream, _) = &mut input {
|
if let PipelineData::ByteStream(stream, _) = &mut input {
|
||||||
|
#[cfg(feature = "os")]
|
||||||
if let ByteStreamSource::Child(child) = stream.source_mut() {
|
if let ByteStreamSource::Child(child) = stream.source_mut() {
|
||||||
child.ignore_error(true);
|
child.ignore_error(true);
|
||||||
}
|
}
|
||||||
|
|
|
@ -107,11 +107,7 @@ fn run_catch(
|
||||||
|
|
||||||
if let Some(catch) = catch {
|
if let Some(catch) = catch {
|
||||||
stack.set_last_error(&error);
|
stack.set_last_error(&error);
|
||||||
let fancy_errors = match engine_state.get_config().error_style {
|
let error = error.into_value(&StateWorkingSet::new(engine_state), span);
|
||||||
nu_protocol::ErrorStyle::Fancy => true,
|
|
||||||
nu_protocol::ErrorStyle::Plain => false,
|
|
||||||
};
|
|
||||||
let error = error.into_value(span, fancy_errors);
|
|
||||||
let block = engine_state.get_block(catch.block_id);
|
let block = engine_state.get_block(catch.block_id);
|
||||||
// Put the error value in the positional closure var
|
// Put the error value in the positional closure var
|
||||||
if let Some(var) = block.signature.get_positional(0) {
|
if let Some(var) = block.signature.get_positional(0) {
|
||||||
|
|
|
@ -116,6 +116,11 @@ pub fn version(engine_state: &EngineState, span: Span) -> Result<PipelineData, S
|
||||||
Value::string(features_enabled().join(", "), span),
|
Value::string(features_enabled().join(", "), span),
|
||||||
);
|
);
|
||||||
|
|
||||||
|
#[cfg(not(feature = "plugin"))]
|
||||||
|
let _ = engine_state;
|
||||||
|
|
||||||
|
#[cfg(feature = "plugin")]
|
||||||
|
{
|
||||||
// Get a list of plugin names and versions if present
|
// Get a list of plugin names and versions if present
|
||||||
let installed_plugins = engine_state
|
let installed_plugins = engine_state
|
||||||
.plugins()
|
.plugins()
|
||||||
|
@ -134,6 +139,7 @@ pub fn version(engine_state: &EngineState, span: Span) -> Result<PipelineData, S
|
||||||
"installed_plugins",
|
"installed_plugins",
|
||||||
Value::string(installed_plugins.join(", "), span),
|
Value::string(installed_plugins.join(", "), span),
|
||||||
);
|
);
|
||||||
|
}
|
||||||
|
|
||||||
Ok(Value::record(record, span).into_pipeline_data())
|
Ok(Value::record(record, span).into_pipeline_data())
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
#![cfg_attr(not(feature = "os"), allow(unused))]
|
||||||
#![doc = include_str!("../README.md")]
|
#![doc = include_str!("../README.md")]
|
||||||
mod core_commands;
|
mod core_commands;
|
||||||
mod default_context;
|
mod default_context;
|
||||||
|
|
|
@ -135,18 +135,24 @@ pub(crate) fn get_plugin_dirs(
|
||||||
engine_state: &EngineState,
|
engine_state: &EngineState,
|
||||||
stack: &Stack,
|
stack: &Stack,
|
||||||
) -> impl Iterator<Item = String> {
|
) -> impl Iterator<Item = String> {
|
||||||
// Get the NU_PLUGIN_DIRS constant or env var
|
// Get the NU_PLUGIN_DIRS from the constant and/or env var
|
||||||
let working_set = StateWorkingSet::new(engine_state);
|
let working_set = StateWorkingSet::new(engine_state);
|
||||||
let value = working_set
|
let dirs_from_const = working_set
|
||||||
.find_variable(b"$NU_PLUGIN_DIRS")
|
.find_variable(b"$NU_PLUGIN_DIRS")
|
||||||
.and_then(|var_id| working_set.get_constant(var_id).ok())
|
.and_then(|var_id| working_set.get_constant(var_id).ok())
|
||||||
.or_else(|| stack.get_env_var(engine_state, "NU_PLUGIN_DIRS"))
|
.cloned() // TODO: avoid this clone
|
||||||
.cloned(); // TODO: avoid this clone
|
|
||||||
|
|
||||||
// Get all of the strings in the list, if possible
|
|
||||||
value
|
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.flat_map(|value| value.into_list().ok())
|
.flat_map(|value| value.into_list().ok())
|
||||||
.flatten()
|
.flatten()
|
||||||
.flat_map(|list_item| list_item.coerce_into_string().ok())
|
.flat_map(|list_item| list_item.coerce_into_string().ok());
|
||||||
|
|
||||||
|
let dirs_from_env = stack
|
||||||
|
.get_env_var(engine_state, "NU_PLUGIN_DIRS")
|
||||||
|
.cloned() // TODO: avoid this clone
|
||||||
|
.into_iter()
|
||||||
|
.flat_map(|value| value.into_list().ok())
|
||||||
|
.flatten()
|
||||||
|
.flat_map(|list_item| list_item.coerce_into_string().ok());
|
||||||
|
|
||||||
|
dirs_from_const.chain(dirs_from_env)
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,8 +14,8 @@ bench = false
|
||||||
workspace = true
|
workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
nu-protocol = { path = "../nu-protocol", version = "0.100.1" }
|
nu-protocol = { path = "../nu-protocol", version = "0.100.1", default-features = false }
|
||||||
nu-engine = { path = "../nu-engine", version = "0.100.1" }
|
nu-engine = { path = "../nu-engine", version = "0.100.1", default-features = false }
|
||||||
nu-json = { path = "../nu-json", version = "0.100.1" }
|
nu-json = { path = "../nu-json", version = "0.100.1" }
|
||||||
nu-ansi-term = { workspace = true }
|
nu-ansi-term = { workspace = true }
|
||||||
|
|
||||||
|
|
|
@ -5,7 +5,6 @@ use nu_protocol::{Config, Value};
|
||||||
// The default colors for shapes, used when there is no config for them.
|
// The default colors for shapes, used when there is no config for them.
|
||||||
pub fn default_shape_color(shape: &str) -> Style {
|
pub fn default_shape_color(shape: &str) -> Style {
|
||||||
match shape {
|
match shape {
|
||||||
"shape_and" => Style::new().fg(Color::Purple).bold(),
|
|
||||||
"shape_binary" => Style::new().fg(Color::Purple).bold(),
|
"shape_binary" => Style::new().fg(Color::Purple).bold(),
|
||||||
"shape_block" => Style::new().fg(Color::Blue).bold(),
|
"shape_block" => Style::new().fg(Color::Blue).bold(),
|
||||||
"shape_bool" => Style::new().fg(Color::LightCyan),
|
"shape_bool" => Style::new().fg(Color::LightCyan),
|
||||||
|
@ -30,7 +29,6 @@ pub fn default_shape_color(shape: &str) -> Style {
|
||||||
"shape_match_pattern" => Style::new().fg(Color::Green),
|
"shape_match_pattern" => Style::new().fg(Color::Green),
|
||||||
"shape_nothing" => Style::new().fg(Color::LightCyan),
|
"shape_nothing" => Style::new().fg(Color::LightCyan),
|
||||||
"shape_operator" => Style::new().fg(Color::Yellow),
|
"shape_operator" => Style::new().fg(Color::Yellow),
|
||||||
"shape_or" => Style::new().fg(Color::Purple).bold(),
|
|
||||||
"shape_pipe" => Style::new().fg(Color::Purple).bold(),
|
"shape_pipe" => Style::new().fg(Color::Purple).bold(),
|
||||||
"shape_range" => Style::new().fg(Color::Yellow).bold(),
|
"shape_range" => Style::new().fg(Color::Yellow).bold(),
|
||||||
"shape_raw_string" => Style::new().fg(Color::LightMagenta).bold(),
|
"shape_raw_string" => Style::new().fg(Color::LightMagenta).bold(),
|
||||||
|
|
|
@ -18,17 +18,17 @@ workspace = true
|
||||||
[dependencies]
|
[dependencies]
|
||||||
nu-cmd-base = { path = "../nu-cmd-base", version = "0.100.1" }
|
nu-cmd-base = { path = "../nu-cmd-base", version = "0.100.1" }
|
||||||
nu-color-config = { path = "../nu-color-config", version = "0.100.1" }
|
nu-color-config = { path = "../nu-color-config", version = "0.100.1" }
|
||||||
nu-engine = { path = "../nu-engine", version = "0.100.1" }
|
nu-engine = { path = "../nu-engine", version = "0.100.1", default-features = false }
|
||||||
nu-glob = { path = "../nu-glob", version = "0.100.1" }
|
nu-glob = { path = "../nu-glob", version = "0.100.1" }
|
||||||
nu-json = { path = "../nu-json", version = "0.100.1" }
|
nu-json = { path = "../nu-json", version = "0.100.1" }
|
||||||
nu-parser = { path = "../nu-parser", version = "0.100.1" }
|
nu-parser = { path = "../nu-parser", version = "0.100.1" }
|
||||||
nu-path = { path = "../nu-path", version = "0.100.1" }
|
nu-path = { path = "../nu-path", version = "0.100.1" }
|
||||||
nu-pretty-hex = { path = "../nu-pretty-hex", version = "0.100.1" }
|
nu-pretty-hex = { path = "../nu-pretty-hex", version = "0.100.1" }
|
||||||
nu-protocol = { path = "../nu-protocol", version = "0.100.1" }
|
nu-protocol = { path = "../nu-protocol", version = "0.100.1", default-features = false }
|
||||||
nu-system = { path = "../nu-system", version = "0.100.1" }
|
nu-system = { path = "../nu-system", version = "0.100.1" }
|
||||||
nu-table = { path = "../nu-table", version = "0.100.1" }
|
nu-table = { path = "../nu-table", version = "0.100.1" }
|
||||||
nu-term-grid = { path = "../nu-term-grid", version = "0.100.1" }
|
nu-term-grid = { path = "../nu-term-grid", version = "0.100.1" }
|
||||||
nu-utils = { path = "../nu-utils", version = "0.100.1" }
|
nu-utils = { path = "../nu-utils", version = "0.100.1", default-features = false }
|
||||||
nu-ansi-term = { workspace = true }
|
nu-ansi-term = { workspace = true }
|
||||||
nuon = { path = "../nuon", version = "0.100.1" }
|
nuon = { path = "../nuon", version = "0.100.1" }
|
||||||
|
|
||||||
|
@ -43,7 +43,7 @@ chardetng = { workspace = true }
|
||||||
chrono = { workspace = true, features = ["std", "unstable-locales", "clock"], default-features = false }
|
chrono = { workspace = true, features = ["std", "unstable-locales", "clock"], default-features = false }
|
||||||
chrono-humanize = { workspace = true }
|
chrono-humanize = { workspace = true }
|
||||||
chrono-tz = { workspace = true }
|
chrono-tz = { workspace = true }
|
||||||
crossterm = { workspace = true }
|
crossterm = { workspace = true, optional = true }
|
||||||
csv = { workspace = true }
|
csv = { workspace = true }
|
||||||
dialoguer = { workspace = true, default-features = false, features = ["fuzzy-select"] }
|
dialoguer = { workspace = true, default-features = false, features = ["fuzzy-select"] }
|
||||||
digest = { workspace = true, default-features = false }
|
digest = { workspace = true, default-features = false }
|
||||||
|
@ -61,24 +61,26 @@ lscolors = { workspace = true, default-features = false, features = ["nu-ansi-te
|
||||||
md5 = { workspace = true }
|
md5 = { workspace = true }
|
||||||
mime = { workspace = true }
|
mime = { workspace = true }
|
||||||
mime_guess = { workspace = true }
|
mime_guess = { workspace = true }
|
||||||
multipart-rs = { workspace = true }
|
multipart-rs = { workspace = true, optional = true }
|
||||||
native-tls = { workspace = true }
|
native-tls = { workspace = true, optional = true }
|
||||||
notify-debouncer-full = { workspace = true, default-features = false }
|
notify-debouncer-full = { workspace = true, default-features = false, optional = true }
|
||||||
num-format = { workspace = true }
|
num-format = { workspace = true }
|
||||||
num-traits = { workspace = true }
|
num-traits = { workspace = true }
|
||||||
oem_cp = { workspace = true }
|
oem_cp = { workspace = true }
|
||||||
open = { workspace = true }
|
open = { workspace = true, optional = true }
|
||||||
os_pipe = { workspace = true }
|
os_pipe = { workspace = true, optional = true }
|
||||||
pathdiff = { workspace = true }
|
pathdiff = { workspace = true }
|
||||||
percent-encoding = { workspace = true }
|
percent-encoding = { workspace = true }
|
||||||
print-positions = { workspace = true }
|
print-positions = { workspace = true }
|
||||||
quick-xml = { workspace = true }
|
quick-xml = { workspace = true }
|
||||||
rand = { workspace = true }
|
rand = { workspace = true, optional = true }
|
||||||
|
getrandom = { workspace = true, optional = true }
|
||||||
rayon = { workspace = true }
|
rayon = { workspace = true }
|
||||||
regex = { workspace = true }
|
regex = { workspace = true }
|
||||||
roxmltree = { workspace = true }
|
roxmltree = { workspace = true }
|
||||||
rusqlite = { workspace = true, features = ["bundled", "backup", "chrono"], optional = true }
|
rusqlite = { workspace = true, features = ["bundled", "backup", "chrono"], optional = true }
|
||||||
rmp = { workspace = true }
|
rmp = { workspace = true }
|
||||||
|
scopeguard = { workspace = true }
|
||||||
serde = { workspace = true, features = ["derive"] }
|
serde = { workspace = true, features = ["derive"] }
|
||||||
serde_json = { workspace = true, features = ["preserve_order"] }
|
serde_json = { workspace = true, features = ["preserve_order"] }
|
||||||
serde_urlencoded = { workspace = true }
|
serde_urlencoded = { workspace = true }
|
||||||
|
@ -86,30 +88,29 @@ serde_yaml = { workspace = true }
|
||||||
sha2 = { workspace = true }
|
sha2 = { workspace = true }
|
||||||
sysinfo = { workspace = true }
|
sysinfo = { workspace = true }
|
||||||
tabled = { workspace = true, features = ["ansi"], default-features = false }
|
tabled = { workspace = true, features = ["ansi"], default-features = false }
|
||||||
terminal_size = { workspace = true }
|
|
||||||
titlecase = { workspace = true }
|
titlecase = { workspace = true }
|
||||||
toml = { workspace = true, features = ["preserve_order"] }
|
toml = { workspace = true, features = ["preserve_order"] }
|
||||||
unicode-segmentation = { workspace = true }
|
unicode-segmentation = { workspace = true }
|
||||||
ureq = { workspace = true, default-features = false, features = ["charset", "gzip", "json", "native-tls"] }
|
ureq = { workspace = true, default-features = false, features = ["charset", "gzip", "json"] }
|
||||||
url = { workspace = true }
|
url = { workspace = true }
|
||||||
uu_cp = { workspace = true }
|
uu_cp = { workspace = true, optional = true }
|
||||||
uu_mkdir = { workspace = true }
|
uu_mkdir = { workspace = true, optional = true }
|
||||||
uu_mktemp = { workspace = true }
|
uu_mktemp = { workspace = true, optional = true }
|
||||||
uu_mv = { workspace = true }
|
uu_mv = { workspace = true, optional = true }
|
||||||
uu_touch = { workspace = true }
|
uu_touch = { workspace = true, optional = true }
|
||||||
uu_uname = { workspace = true }
|
uu_uname = { workspace = true, optional = true }
|
||||||
uu_whoami = { workspace = true }
|
uu_whoami = { workspace = true, optional = true }
|
||||||
uuid = { workspace = true, features = ["v4"] }
|
uuid = { workspace = true, features = ["v4"], optional = true }
|
||||||
v_htmlescape = { workspace = true }
|
v_htmlescape = { workspace = true }
|
||||||
wax = { workspace = true }
|
wax = { workspace = true }
|
||||||
which = { workspace = true }
|
which = { workspace = true, optional = true }
|
||||||
unicode-width = { workspace = true }
|
unicode-width = { workspace = true }
|
||||||
data-encoding = { version = "2.6.0", features = ["alloc"] }
|
data-encoding = { version = "2.6.0", features = ["alloc"] }
|
||||||
|
|
||||||
[target.'cfg(windows)'.dependencies]
|
[target.'cfg(windows)'.dependencies]
|
||||||
winreg = { workspace = true }
|
winreg = { workspace = true }
|
||||||
|
|
||||||
[target.'cfg(not(windows))'.dependencies]
|
[target.'cfg(all(not(windows), not(target_arch = "wasm32")))'.dependencies]
|
||||||
uucore = { workspace = true, features = ["mode"] }
|
uucore = { workspace = true, features = ["mode"] }
|
||||||
|
|
||||||
[target.'cfg(unix)'.dependencies]
|
[target.'cfg(unix)'.dependencies]
|
||||||
|
@ -135,7 +136,53 @@ features = [
|
||||||
workspace = true
|
workspace = true
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
plugin = ["nu-parser/plugin"]
|
default = ["os"]
|
||||||
|
os = [
|
||||||
|
# include other features
|
||||||
|
"js",
|
||||||
|
"network",
|
||||||
|
"nu-protocol/os",
|
||||||
|
"nu-utils/os",
|
||||||
|
|
||||||
|
# os-dependant dependencies
|
||||||
|
"crossterm",
|
||||||
|
"notify-debouncer-full",
|
||||||
|
"open",
|
||||||
|
"os_pipe",
|
||||||
|
"uu_cp",
|
||||||
|
"uu_mkdir",
|
||||||
|
"uu_mktemp",
|
||||||
|
"uu_mv",
|
||||||
|
"uu_touch",
|
||||||
|
"uu_uname",
|
||||||
|
"uu_whoami",
|
||||||
|
"which",
|
||||||
|
]
|
||||||
|
|
||||||
|
# The dependencies listed below need 'getrandom'.
|
||||||
|
# They work with JS (usually with wasm-bindgen) or regular OS support.
|
||||||
|
# Hence they are also put under the 'os' feature to avoid repetition.
|
||||||
|
js = [
|
||||||
|
"getrandom",
|
||||||
|
"getrandom/js",
|
||||||
|
"rand",
|
||||||
|
"uuid",
|
||||||
|
]
|
||||||
|
|
||||||
|
# These dependencies require networking capabilities, especially the http
|
||||||
|
# interface requires openssl which is not easy to embed into wasm,
|
||||||
|
# using rustls could solve this issue.
|
||||||
|
network = [
|
||||||
|
"multipart-rs",
|
||||||
|
"native-tls",
|
||||||
|
"ureq/native-tls",
|
||||||
|
"uuid",
|
||||||
|
]
|
||||||
|
|
||||||
|
plugin = [
|
||||||
|
"nu-parser/plugin",
|
||||||
|
"os",
|
||||||
|
]
|
||||||
sqlite = ["rusqlite"]
|
sqlite = ["rusqlite"]
|
||||||
trash-support = ["trash"]
|
trash-support = ["trash"]
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
use nu_engine::{command_prelude::*, get_eval_expression};
|
use nu_engine::command_prelude::*;
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct BytesBuild;
|
pub struct BytesBuild;
|
||||||
|
@ -49,8 +49,7 @@ impl Command for BytesBuild {
|
||||||
_input: PipelineData,
|
_input: PipelineData,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
let mut output = vec![];
|
let mut output = vec![];
|
||||||
let eval_expression = get_eval_expression(engine_state);
|
for val in call.rest::<Value>(engine_state, stack, 0)? {
|
||||||
for val in call.rest_iter_flattened(engine_state, stack, eval_expression, 0)? {
|
|
||||||
let val_span = val.span();
|
let val_span = val.span();
|
||||||
match val {
|
match val {
|
||||||
Value::Binary { mut val, .. } => output.append(&mut val),
|
Value::Binary { mut val, .. } => output.append(&mut val),
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
use chrono::{DateTime, FixedOffset};
|
use chrono::{DateTime, FixedOffset};
|
||||||
use nu_protocol::{ShellError, Span, Value};
|
use nu_protocol::{Filesize, ShellError, Span, Value};
|
||||||
use std::hash::{Hash, Hasher};
|
use std::hash::{Hash, Hasher};
|
||||||
|
|
||||||
/// A subset of [`Value`], which is hashable.
|
/// A subset of [`Value`], which is hashable.
|
||||||
|
@ -30,7 +30,7 @@ pub enum HashableValue {
|
||||||
span: Span,
|
span: Span,
|
||||||
},
|
},
|
||||||
Filesize {
|
Filesize {
|
||||||
val: i64,
|
val: Filesize,
|
||||||
span: Span,
|
span: Span,
|
||||||
},
|
},
|
||||||
Duration {
|
Duration {
|
||||||
|
@ -198,7 +198,10 @@ mod test {
|
||||||
(Value::int(1, span), HashableValue::Int { val: 1, span }),
|
(Value::int(1, span), HashableValue::Int { val: 1, span }),
|
||||||
(
|
(
|
||||||
Value::filesize(1, span),
|
Value::filesize(1, span),
|
||||||
HashableValue::Filesize { val: 1, span },
|
HashableValue::Filesize {
|
||||||
|
val: 1.into(),
|
||||||
|
span,
|
||||||
|
},
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
Value::duration(1, span),
|
Value::duration(1, span),
|
||||||
|
|
|
@ -167,7 +167,7 @@ fn fill(
|
||||||
fn action(input: &Value, args: &Arguments, span: Span) -> Value {
|
fn action(input: &Value, args: &Arguments, span: Span) -> Value {
|
||||||
match input {
|
match input {
|
||||||
Value::Int { val, .. } => fill_int(*val, args, span),
|
Value::Int { val, .. } => fill_int(*val, args, span),
|
||||||
Value::Filesize { val, .. } => fill_int(*val, args, span),
|
Value::Filesize { val, .. } => fill_int(val.get(), args, span),
|
||||||
Value::Float { val, .. } => fill_float(*val, args, span),
|
Value::Float { val, .. } => fill_float(*val, args, span),
|
||||||
Value::String { val, .. } => fill_string(val, args, span),
|
Value::String { val, .. } => fill_string(val, args, span),
|
||||||
// Propagate errors by explicitly matching them before the final case.
|
// Propagate errors by explicitly matching them before the final case.
|
||||||
|
|
|
@ -147,7 +147,7 @@ pub fn action(input: &Value, _args: &Arguments, span: Span) -> Value {
|
||||||
Value::Binary { .. } => input.clone(),
|
Value::Binary { .. } => input.clone(),
|
||||||
Value::Int { val, .. } => Value::binary(val.to_ne_bytes().to_vec(), span),
|
Value::Int { val, .. } => Value::binary(val.to_ne_bytes().to_vec(), span),
|
||||||
Value::Float { val, .. } => Value::binary(val.to_ne_bytes().to_vec(), span),
|
Value::Float { val, .. } => Value::binary(val.to_ne_bytes().to_vec(), span),
|
||||||
Value::Filesize { val, .. } => Value::binary(val.to_ne_bytes().to_vec(), span),
|
Value::Filesize { val, .. } => Value::binary(val.get().to_ne_bytes().to_vec(), span),
|
||||||
Value::String { val, .. } => Value::binary(val.as_bytes().to_vec(), span),
|
Value::String { val, .. } => Value::binary(val.as_bytes().to_vec(), span),
|
||||||
Value::Bool { val, .. } => Value::binary(i64::from(*val).to_ne_bytes().to_vec(), span),
|
Value::Bool { val, .. } => Value::binary(i64::from(*val).to_ne_bytes().to_vec(), span),
|
||||||
Value::Duration { val, .. } => Value::binary(val.to_ne_bytes().to_vec(), span),
|
Value::Duration { val, .. } => Value::binary(val.to_ne_bytes().to_vec(), span),
|
||||||
|
|
|
@ -253,7 +253,7 @@ fn action(input: &Value, args: &Arguments, span: Span) -> Value {
|
||||||
convert_int(input, span, radix)
|
convert_int(input, span, radix)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Value::Filesize { val, .. } => Value::int(*val, span),
|
Value::Filesize { val, .. } => Value::int(val.get(), span),
|
||||||
Value::Float { val, .. } => Value::int(
|
Value::Float { val, .. } => Value::int(
|
||||||
{
|
{
|
||||||
if radix == 10 {
|
if radix == 10 {
|
||||||
|
|
|
@ -99,6 +99,11 @@ impl Command for SubCommand {
|
||||||
"timezone" => Value::test_string("+02:00"),
|
"timezone" => Value::test_string("+02:00"),
|
||||||
})),
|
})),
|
||||||
},
|
},
|
||||||
|
Example {
|
||||||
|
description: "convert date components to table columns",
|
||||||
|
example: "2020-04-12T22:10:57+02:00 | into record | transpose | transpose -r",
|
||||||
|
result: None,
|
||||||
|
},
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -359,7 +359,6 @@ fn nu_value_to_sqlite_type(val: &Value) -> Result<&'static str, ShellError> {
|
||||||
| Type::Custom(_)
|
| Type::Custom(_)
|
||||||
| Type::Error
|
| Type::Error
|
||||||
| Type::List(_)
|
| Type::List(_)
|
||||||
| Type::ListStream
|
|
||||||
| Type::Range
|
| Type::Range
|
||||||
| Type::Record(_)
|
| Type::Record(_)
|
||||||
| Type::Signature
|
| Type::Signature
|
||||||
|
|
|
@ -421,7 +421,7 @@ pub fn value_to_sql(value: Value) -> Result<Box<dyn rusqlite::ToSql>, ShellError
|
||||||
Value::Bool { val, .. } => Box::new(val),
|
Value::Bool { val, .. } => Box::new(val),
|
||||||
Value::Int { val, .. } => Box::new(val),
|
Value::Int { val, .. } => Box::new(val),
|
||||||
Value::Float { val, .. } => Box::new(val),
|
Value::Float { val, .. } => Box::new(val),
|
||||||
Value::Filesize { val, .. } => Box::new(val),
|
Value::Filesize { val, .. } => Box::new(val.get()),
|
||||||
Value::Duration { val, .. } => Box::new(val),
|
Value::Duration { val, .. } => Box::new(val),
|
||||||
Value::Date { val, .. } => Box::new(val),
|
Value::Date { val, .. } => Box::new(val),
|
||||||
Value::String { val, .. } => Box::new(val),
|
Value::String { val, .. } => Box::new(val),
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
use crate::date::utils::parse_date_from_string;
|
use crate::date::utils::parse_date_from_string;
|
||||||
use chrono::{DateTime, Datelike, FixedOffset, Local, Timelike};
|
use chrono::{DateTime, Datelike, FixedOffset, Local, Timelike};
|
||||||
use nu_engine::command_prelude::*;
|
use nu_engine::command_prelude::*;
|
||||||
|
use nu_protocol::{report_parse_warning, ParseWarning};
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct SubCommand;
|
pub struct SubCommand;
|
||||||
|
@ -17,7 +18,7 @@ impl Command for SubCommand {
|
||||||
(Type::String, Type::record()),
|
(Type::String, Type::record()),
|
||||||
])
|
])
|
||||||
.allow_variants_without_examples(true) // https://github.com/nushell/nushell/issues/7032
|
.allow_variants_without_examples(true) // https://github.com/nushell/nushell/issues/7032
|
||||||
.category(Category::Date)
|
.category(Category::Deprecated)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn description(&self) -> &str {
|
fn description(&self) -> &str {
|
||||||
|
@ -35,6 +36,17 @@ impl Command for SubCommand {
|
||||||
call: &Call,
|
call: &Call,
|
||||||
input: PipelineData,
|
input: PipelineData,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
let head = call.head;
|
||||||
|
report_parse_warning(
|
||||||
|
&StateWorkingSet::new(engine_state),
|
||||||
|
&ParseWarning::DeprecatedWarning {
|
||||||
|
old_command: "date to-record".into(),
|
||||||
|
new_suggestion: "see `into record` command examples".into(),
|
||||||
|
span: head,
|
||||||
|
url: "`help into record`".into(),
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
let head = call.head;
|
let head = call.head;
|
||||||
// This doesn't match explicit nulls
|
// This doesn't match explicit nulls
|
||||||
if matches!(input, PipelineData::Empty) {
|
if matches!(input, PipelineData::Empty) {
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
use crate::date::utils::parse_date_from_string;
|
use crate::date::utils::parse_date_from_string;
|
||||||
use chrono::{DateTime, Datelike, FixedOffset, Local, Timelike};
|
use chrono::{DateTime, Datelike, FixedOffset, Local, Timelike};
|
||||||
use nu_engine::command_prelude::*;
|
use nu_engine::command_prelude::*;
|
||||||
|
use nu_protocol::{report_parse_warning, ParseWarning};
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct SubCommand;
|
pub struct SubCommand;
|
||||||
|
@ -17,7 +18,7 @@ impl Command for SubCommand {
|
||||||
(Type::String, Type::table()),
|
(Type::String, Type::table()),
|
||||||
])
|
])
|
||||||
.allow_variants_without_examples(true) // https://github.com/nushell/nushell/issues/7032
|
.allow_variants_without_examples(true) // https://github.com/nushell/nushell/issues/7032
|
||||||
.category(Category::Date)
|
.category(Category::Deprecated)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn description(&self) -> &str {
|
fn description(&self) -> &str {
|
||||||
|
@ -36,6 +37,16 @@ impl Command for SubCommand {
|
||||||
input: PipelineData,
|
input: PipelineData,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
let head = call.head;
|
let head = call.head;
|
||||||
|
report_parse_warning(
|
||||||
|
&StateWorkingSet::new(engine_state),
|
||||||
|
&ParseWarning::DeprecatedWarning {
|
||||||
|
old_command: "date to-table".into(),
|
||||||
|
new_suggestion: "see `into record` command examples".into(),
|
||||||
|
span: head,
|
||||||
|
url: "`help into record`".into(),
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
// This doesn't match explicit nulls
|
// This doesn't match explicit nulls
|
||||||
if matches!(input, PipelineData::Empty) {
|
if matches!(input, PipelineData::Empty) {
|
||||||
return Err(ShellError::PipelineEmpty { dst_span: head });
|
return Err(ShellError::PipelineEmpty { dst_span: head });
|
||||||
|
|
|
@ -177,4 +177,9 @@ fn get_thread_id() -> u64 {
|
||||||
{
|
{
|
||||||
nix::sys::pthread::pthread_self() as u64
|
nix::sys::pthread::pthread_self() as u64
|
||||||
}
|
}
|
||||||
|
#[cfg(target_arch = "wasm32")]
|
||||||
|
{
|
||||||
|
// wasm doesn't have any threads accessible, so we return 0 as a fallback
|
||||||
|
0
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
use super::inspect_table;
|
use super::inspect_table;
|
||||||
use nu_engine::command_prelude::*;
|
use nu_engine::command_prelude::*;
|
||||||
use terminal_size::{terminal_size, Height, Width};
|
use nu_utils::terminal_size;
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct Inspect;
|
pub struct Inspect;
|
||||||
|
@ -38,12 +38,9 @@ impl Command for Inspect {
|
||||||
let original_input = input_val.clone();
|
let original_input = input_val.clone();
|
||||||
let description = input_val.get_type().to_string();
|
let description = input_val.get_type().to_string();
|
||||||
|
|
||||||
let (cols, _rows) = match terminal_size() {
|
let (cols, _rows) = terminal_size().unwrap_or((0, 0));
|
||||||
Some((w, h)) => (Width(w.0), Height(h.0)),
|
|
||||||
None => (Width(0), Height(0)),
|
|
||||||
};
|
|
||||||
|
|
||||||
let table = inspect_table::build_table(input_val, description, cols.0 as usize);
|
let table = inspect_table::build_table(input_val, description, cols as usize);
|
||||||
|
|
||||||
// Note that this is printed to stderr. The reason for this is so it doesn't disrupt the regular nushell
|
// Note that this is printed to stderr. The reason for this is so it doesn't disrupt the regular nushell
|
||||||
// tabular output. If we printed to stdout, nushell would get confused with two outputs.
|
// tabular output. If we printed to stdout, nushell would get confused with two outputs.
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
use nu_engine::{command_prelude::*, get_eval_block, get_eval_expression_with_input};
|
use nu_engine::{command_prelude::*, ClosureEvalOnce};
|
||||||
|
use nu_protocol::engine::Closure;
|
||||||
use std::time::Instant;
|
use std::time::Instant;
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
|
@ -10,16 +11,18 @@ impl Command for TimeIt {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn description(&self) -> &str {
|
fn description(&self) -> &str {
|
||||||
"Time the running time of a block."
|
"Time how long it takes a closure to run."
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extra_description(&self) -> &str {
|
||||||
|
"Any pipeline input given to this command is passed to the closure. Note that streaming inputs may affect timing results, and it is recommended to add a `collect` command before this if the input is a stream.
|
||||||
|
|
||||||
|
This command will bubble up any errors encountered when running the closure. The return pipeline of the closure is collected into a value and then discarded."
|
||||||
}
|
}
|
||||||
|
|
||||||
fn signature(&self) -> nu_protocol::Signature {
|
fn signature(&self) -> nu_protocol::Signature {
|
||||||
Signature::build("timeit")
|
Signature::build("timeit")
|
||||||
.required(
|
.required("command", SyntaxShape::Closure(None), "The closure to run.")
|
||||||
"command",
|
|
||||||
SyntaxShape::OneOf(vec![SyntaxShape::Block, SyntaxShape::Expression]),
|
|
||||||
"The command or block to run.",
|
|
||||||
)
|
|
||||||
.input_output_types(vec![
|
.input_output_types(vec![
|
||||||
(Type::Any, Type::Duration),
|
(Type::Any, Type::Duration),
|
||||||
(Type::Nothing, Type::Duration),
|
(Type::Nothing, Type::Duration),
|
||||||
|
@ -46,51 +49,38 @@ impl Command for TimeIt {
|
||||||
// reset outdest, so the command can write to stdout and stderr.
|
// reset outdest, so the command can write to stdout and stderr.
|
||||||
let stack = &mut stack.push_redirection(None, None);
|
let stack = &mut stack.push_redirection(None, None);
|
||||||
|
|
||||||
let command_to_run = call.positional_nth(stack, 0);
|
let closure: Closure = call.req(engine_state, stack, 0)?;
|
||||||
|
let closure = ClosureEvalOnce::new_preserve_out_dest(engine_state, stack, closure);
|
||||||
|
|
||||||
// Get the start time after all other computation has been done.
|
// Get the start time after all other computation has been done.
|
||||||
let start_time = Instant::now();
|
let start_time = Instant::now();
|
||||||
|
closure.run_with_input(input)?.into_value(call.head)?;
|
||||||
|
let time = start_time.elapsed();
|
||||||
|
|
||||||
if let Some(command_to_run) = command_to_run {
|
let output = Value::duration(time.as_nanos() as i64, call.head);
|
||||||
if let Some(block_id) = command_to_run.as_block() {
|
|
||||||
let eval_block = get_eval_block(engine_state);
|
|
||||||
let block = engine_state.get_block(block_id);
|
|
||||||
eval_block(engine_state, stack, block, input)?
|
|
||||||
} else {
|
|
||||||
let eval_expression_with_input = get_eval_expression_with_input(engine_state);
|
|
||||||
let expression = &command_to_run.clone();
|
|
||||||
eval_expression_with_input(engine_state, stack, expression, input)?
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
PipelineData::empty()
|
|
||||||
}
|
|
||||||
.into_value(call.head)?;
|
|
||||||
|
|
||||||
let end_time = Instant::now();
|
|
||||||
|
|
||||||
let output = Value::duration(
|
|
||||||
end_time.saturating_duration_since(start_time).as_nanos() as i64,
|
|
||||||
call.head,
|
|
||||||
);
|
|
||||||
|
|
||||||
Ok(output.into_pipeline_data())
|
Ok(output.into_pipeline_data())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn examples(&self) -> Vec<Example> {
|
fn examples(&self) -> Vec<Example> {
|
||||||
vec![
|
vec![
|
||||||
Example {
|
Example {
|
||||||
description: "Times a command within a closure",
|
description: "Time a closure containing one command",
|
||||||
example: "timeit { sleep 500ms }",
|
example: "timeit { sleep 500ms }",
|
||||||
result: None,
|
result: None,
|
||||||
},
|
},
|
||||||
Example {
|
Example {
|
||||||
description: "Times a command using an existing input",
|
description: "Time a closure with an input value",
|
||||||
example: "http get https://www.nushell.sh/book/ | timeit { split chars }",
|
example: "'A really long string' | timeit { split chars }",
|
||||||
result: None,
|
result: None,
|
||||||
},
|
},
|
||||||
Example {
|
Example {
|
||||||
description: "Times a command invocation",
|
description: "Time a closure with an input stream",
|
||||||
example: "timeit ls -la",
|
example: "open some_file.txt | collect | timeit { split chars }",
|
||||||
|
result: None,
|
||||||
|
},
|
||||||
|
Example {
|
||||||
|
description: "Time a closure containing a pipeline",
|
||||||
|
example: "timeit { open some_file.txt | split chars }",
|
||||||
result: None,
|
result: None,
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
|
|
|
@ -27,6 +27,10 @@ pub fn add_shell_command_context(mut engine_state: EngineState) -> EngineState {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Filters
|
// Filters
|
||||||
|
#[cfg(feature = "rand")]
|
||||||
|
bind_command! {
|
||||||
|
Shuffle
|
||||||
|
}
|
||||||
bind_command! {
|
bind_command! {
|
||||||
All,
|
All,
|
||||||
Any,
|
Any,
|
||||||
|
@ -64,6 +68,7 @@ pub fn add_shell_command_context(mut engine_state: EngineState) -> EngineState {
|
||||||
Length,
|
Length,
|
||||||
Lines,
|
Lines,
|
||||||
ParEach,
|
ParEach,
|
||||||
|
ChunkBy,
|
||||||
Prepend,
|
Prepend,
|
||||||
Range,
|
Range,
|
||||||
Reduce,
|
Reduce,
|
||||||
|
@ -71,7 +76,6 @@ pub fn add_shell_command_context(mut engine_state: EngineState) -> EngineState {
|
||||||
Rename,
|
Rename,
|
||||||
Reverse,
|
Reverse,
|
||||||
Select,
|
Select,
|
||||||
Shuffle,
|
|
||||||
Skip,
|
Skip,
|
||||||
SkipUntil,
|
SkipUntil,
|
||||||
SkipWhile,
|
SkipWhile,
|
||||||
|
@ -102,6 +106,7 @@ pub fn add_shell_command_context(mut engine_state: EngineState) -> EngineState {
|
||||||
bind_command! {
|
bind_command! {
|
||||||
Path,
|
Path,
|
||||||
PathBasename,
|
PathBasename,
|
||||||
|
PathSelf,
|
||||||
PathDirname,
|
PathDirname,
|
||||||
PathExists,
|
PathExists,
|
||||||
PathExpand,
|
PathExpand,
|
||||||
|
@ -113,6 +118,7 @@ pub fn add_shell_command_context(mut engine_state: EngineState) -> EngineState {
|
||||||
};
|
};
|
||||||
|
|
||||||
// System
|
// System
|
||||||
|
#[cfg(feature = "os")]
|
||||||
bind_command! {
|
bind_command! {
|
||||||
Complete,
|
Complete,
|
||||||
External,
|
External,
|
||||||
|
@ -160,10 +166,12 @@ pub fn add_shell_command_context(mut engine_state: EngineState) -> EngineState {
|
||||||
ViewSpan,
|
ViewSpan,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[cfg(windows)]
|
#[cfg(all(feature = "os", windows))]
|
||||||
bind_command! { RegistryQuery }
|
bind_command! { RegistryQuery }
|
||||||
|
|
||||||
#[cfg(any(
|
#[cfg(all(
|
||||||
|
feature = "os",
|
||||||
|
any(
|
||||||
target_os = "android",
|
target_os = "android",
|
||||||
target_os = "linux",
|
target_os = "linux",
|
||||||
target_os = "freebsd",
|
target_os = "freebsd",
|
||||||
|
@ -171,6 +179,7 @@ pub fn add_shell_command_context(mut engine_state: EngineState) -> EngineState {
|
||||||
target_os = "openbsd",
|
target_os = "openbsd",
|
||||||
target_os = "macos",
|
target_os = "macos",
|
||||||
target_os = "windows"
|
target_os = "windows"
|
||||||
|
)
|
||||||
))]
|
))]
|
||||||
bind_command! { Ps };
|
bind_command! { Ps };
|
||||||
|
|
||||||
|
@ -218,6 +227,7 @@ pub fn add_shell_command_context(mut engine_state: EngineState) -> EngineState {
|
||||||
};
|
};
|
||||||
|
|
||||||
// FileSystem
|
// FileSystem
|
||||||
|
#[cfg(feature = "os")]
|
||||||
bind_command! {
|
bind_command! {
|
||||||
Cd,
|
Cd,
|
||||||
Ls,
|
Ls,
|
||||||
|
@ -236,6 +246,7 @@ pub fn add_shell_command_context(mut engine_state: EngineState) -> EngineState {
|
||||||
};
|
};
|
||||||
|
|
||||||
// Platform
|
// Platform
|
||||||
|
#[cfg(feature = "os")]
|
||||||
bind_command! {
|
bind_command! {
|
||||||
Ansi,
|
Ansi,
|
||||||
AnsiLink,
|
AnsiLink,
|
||||||
|
@ -248,11 +259,13 @@ pub fn add_shell_command_context(mut engine_state: EngineState) -> EngineState {
|
||||||
IsTerminal,
|
IsTerminal,
|
||||||
Kill,
|
Kill,
|
||||||
Sleep,
|
Sleep,
|
||||||
|
Term,
|
||||||
TermSize,
|
TermSize,
|
||||||
|
TermQuery,
|
||||||
Whoami,
|
Whoami,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[cfg(unix)]
|
#[cfg(all(unix, feature = "os"))]
|
||||||
bind_command! { ULimit };
|
bind_command! { ULimit };
|
||||||
|
|
||||||
// Date
|
// Date
|
||||||
|
@ -377,6 +390,7 @@ pub fn add_shell_command_context(mut engine_state: EngineState) -> EngineState {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Network
|
// Network
|
||||||
|
#[cfg(feature = "network")]
|
||||||
bind_command! {
|
bind_command! {
|
||||||
Http,
|
Http,
|
||||||
HttpDelete,
|
HttpDelete,
|
||||||
|
@ -386,6 +400,9 @@ pub fn add_shell_command_context(mut engine_state: EngineState) -> EngineState {
|
||||||
HttpPost,
|
HttpPost,
|
||||||
HttpPut,
|
HttpPut,
|
||||||
HttpOptions,
|
HttpOptions,
|
||||||
|
Port,
|
||||||
|
}
|
||||||
|
bind_command! {
|
||||||
Url,
|
Url,
|
||||||
UrlBuildQuery,
|
UrlBuildQuery,
|
||||||
UrlSplitQuery,
|
UrlSplitQuery,
|
||||||
|
@ -393,10 +410,10 @@ pub fn add_shell_command_context(mut engine_state: EngineState) -> EngineState {
|
||||||
UrlEncode,
|
UrlEncode,
|
||||||
UrlJoin,
|
UrlJoin,
|
||||||
UrlParse,
|
UrlParse,
|
||||||
Port,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Random
|
// Random
|
||||||
|
#[cfg(feature = "rand")]
|
||||||
bind_command! {
|
bind_command! {
|
||||||
Random,
|
Random,
|
||||||
RandomBool,
|
RandomBool,
|
||||||
|
|
80
crates/nu-command/src/env/config/config_.rs
vendored
80
crates/nu-command/src/env/config/config_.rs
vendored
|
@ -1,4 +1,6 @@
|
||||||
use nu_engine::{command_prelude::*, get_full_help};
|
use nu_cmd_base::util::get_editor;
|
||||||
|
use nu_engine::{command_prelude::*, env_to_strings, get_full_help};
|
||||||
|
use nu_system::ForegroundChild;
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct ConfigMeta;
|
pub struct ConfigMeta;
|
||||||
|
@ -36,3 +38,79 @@ impl Command for ConfigMeta {
|
||||||
vec!["options", "setup"]
|
vec!["options", "setup"]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(not(feature = "os"))]
|
||||||
|
pub(super) fn start_editor(
|
||||||
|
_: &'static str,
|
||||||
|
_: &EngineState,
|
||||||
|
_: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
Err(ShellError::DisabledOsSupport {
|
||||||
|
msg: "Running external commands is not available without OS support.".to_string(),
|
||||||
|
span: Some(call.head),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "os")]
|
||||||
|
pub(super) fn start_editor(
|
||||||
|
config_path: &'static str,
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
// Find the editor executable.
|
||||||
|
let (editor_name, editor_args) = get_editor(engine_state, stack, call.head)?;
|
||||||
|
let paths = nu_engine::env::path_str(engine_state, stack, call.head)?;
|
||||||
|
let cwd = engine_state.cwd(Some(stack))?;
|
||||||
|
let editor_executable =
|
||||||
|
crate::which(&editor_name, &paths, cwd.as_ref()).ok_or(ShellError::ExternalCommand {
|
||||||
|
label: format!("`{editor_name}` not found"),
|
||||||
|
help: "Failed to find the editor executable".into(),
|
||||||
|
span: call.head,
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let Some(config_path) = engine_state.get_config_path(config_path) else {
|
||||||
|
return Err(ShellError::GenericError {
|
||||||
|
error: format!("Could not find $nu.{config_path}"),
|
||||||
|
msg: format!("Could not find $nu.{config_path}"),
|
||||||
|
span: None,
|
||||||
|
help: None,
|
||||||
|
inner: vec![],
|
||||||
|
});
|
||||||
|
};
|
||||||
|
let config_path = config_path.to_string_lossy().to_string();
|
||||||
|
|
||||||
|
// Create the command.
|
||||||
|
let mut command = std::process::Command::new(editor_executable);
|
||||||
|
|
||||||
|
// Configure PWD.
|
||||||
|
command.current_dir(cwd);
|
||||||
|
|
||||||
|
// Configure environment variables.
|
||||||
|
let envs = env_to_strings(engine_state, stack)?;
|
||||||
|
command.env_clear();
|
||||||
|
command.envs(envs);
|
||||||
|
|
||||||
|
// Configure args.
|
||||||
|
command.arg(config_path);
|
||||||
|
command.args(editor_args);
|
||||||
|
|
||||||
|
// Spawn the child process. On Unix, also put the child process to
|
||||||
|
// foreground if we're in an interactive session.
|
||||||
|
#[cfg(windows)]
|
||||||
|
let child = ForegroundChild::spawn(command)?;
|
||||||
|
#[cfg(unix)]
|
||||||
|
let child = ForegroundChild::spawn(
|
||||||
|
command,
|
||||||
|
engine_state.is_interactive,
|
||||||
|
&engine_state.pipeline_externals_state,
|
||||||
|
)?;
|
||||||
|
|
||||||
|
// Wrap the output into a `PipelineData::ByteStream`.
|
||||||
|
let child = nu_protocol::process::ChildProcess::new(child, None, false, call.head)?;
|
||||||
|
Ok(PipelineData::ByteStream(
|
||||||
|
ByteStream::child(child, call.head),
|
||||||
|
None,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
61
crates/nu-command/src/env/config/config_env.rs
vendored
61
crates/nu-command/src/env/config/config_env.rs
vendored
|
@ -1,7 +1,4 @@
|
||||||
use nu_cmd_base::util::get_editor;
|
use nu_engine::command_prelude::*;
|
||||||
use nu_engine::{command_prelude::*, env_to_strings};
|
|
||||||
use nu_protocol::{process::ChildProcess, ByteStream};
|
|
||||||
use nu_system::ForegroundChild;
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct ConfigEnv;
|
pub struct ConfigEnv;
|
||||||
|
@ -81,60 +78,6 @@ impl Command for ConfigEnv {
|
||||||
return Ok(Value::string(nu_utils::get_sample_env(), head).into_pipeline_data());
|
return Ok(Value::string(nu_utils::get_sample_env(), head).into_pipeline_data());
|
||||||
}
|
}
|
||||||
|
|
||||||
// Find the editor executable.
|
super::config_::start_editor("env-path", engine_state, stack, call)
|
||||||
let (editor_name, editor_args) = get_editor(engine_state, stack, call.head)?;
|
|
||||||
let paths = nu_engine::env::path_str(engine_state, stack, call.head)?;
|
|
||||||
let cwd = engine_state.cwd(Some(stack))?;
|
|
||||||
let editor_executable = crate::which(&editor_name, &paths, cwd.as_ref()).ok_or(
|
|
||||||
ShellError::ExternalCommand {
|
|
||||||
label: format!("`{editor_name}` not found"),
|
|
||||||
help: "Failed to find the editor executable".into(),
|
|
||||||
span: call.head,
|
|
||||||
},
|
|
||||||
)?;
|
|
||||||
|
|
||||||
let Some(env_path) = engine_state.get_config_path("env-path") else {
|
|
||||||
return Err(ShellError::GenericError {
|
|
||||||
error: "Could not find $nu.env-path".into(),
|
|
||||||
msg: "Could not find $nu.env-path".into(),
|
|
||||||
span: None,
|
|
||||||
help: None,
|
|
||||||
inner: vec![],
|
|
||||||
});
|
|
||||||
};
|
|
||||||
let env_path = env_path.to_string_lossy().to_string();
|
|
||||||
|
|
||||||
// Create the command.
|
|
||||||
let mut command = std::process::Command::new(editor_executable);
|
|
||||||
|
|
||||||
// Configure PWD.
|
|
||||||
command.current_dir(cwd);
|
|
||||||
|
|
||||||
// Configure environment variables.
|
|
||||||
let envs = env_to_strings(engine_state, stack)?;
|
|
||||||
command.env_clear();
|
|
||||||
command.envs(envs);
|
|
||||||
|
|
||||||
// Configure args.
|
|
||||||
command.arg(env_path);
|
|
||||||
command.args(editor_args);
|
|
||||||
|
|
||||||
// Spawn the child process. On Unix, also put the child process to
|
|
||||||
// foreground if we're in an interactive session.
|
|
||||||
#[cfg(windows)]
|
|
||||||
let child = ForegroundChild::spawn(command)?;
|
|
||||||
#[cfg(unix)]
|
|
||||||
let child = ForegroundChild::spawn(
|
|
||||||
command,
|
|
||||||
engine_state.is_interactive,
|
|
||||||
&engine_state.pipeline_externals_state,
|
|
||||||
)?;
|
|
||||||
|
|
||||||
// Wrap the output into a `PipelineData::ByteStream`.
|
|
||||||
let child = ChildProcess::new(child, None, false, call.head)?;
|
|
||||||
Ok(PipelineData::ByteStream(
|
|
||||||
ByteStream::child(child, call.head),
|
|
||||||
None,
|
|
||||||
))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
61
crates/nu-command/src/env/config/config_nu.rs
vendored
61
crates/nu-command/src/env/config/config_nu.rs
vendored
|
@ -1,7 +1,4 @@
|
||||||
use nu_cmd_base::util::get_editor;
|
use nu_engine::command_prelude::*;
|
||||||
use nu_engine::{command_prelude::*, env_to_strings};
|
|
||||||
use nu_protocol::{process::ChildProcess, ByteStream};
|
|
||||||
use nu_system::ForegroundChild;
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct ConfigNu;
|
pub struct ConfigNu;
|
||||||
|
@ -83,60 +80,6 @@ impl Command for ConfigNu {
|
||||||
return Ok(Value::string(nu_utils::get_sample_config(), head).into_pipeline_data());
|
return Ok(Value::string(nu_utils::get_sample_config(), head).into_pipeline_data());
|
||||||
}
|
}
|
||||||
|
|
||||||
// Find the editor executable.
|
super::config_::start_editor("config-path", engine_state, stack, call)
|
||||||
let (editor_name, editor_args) = get_editor(engine_state, stack, call.head)?;
|
|
||||||
let paths = nu_engine::env::path_str(engine_state, stack, call.head)?;
|
|
||||||
let cwd = engine_state.cwd(Some(stack))?;
|
|
||||||
let editor_executable = crate::which(&editor_name, &paths, cwd.as_ref()).ok_or(
|
|
||||||
ShellError::ExternalCommand {
|
|
||||||
label: format!("`{editor_name}` not found"),
|
|
||||||
help: "Failed to find the editor executable".into(),
|
|
||||||
span: call.head,
|
|
||||||
},
|
|
||||||
)?;
|
|
||||||
|
|
||||||
let Some(config_path) = engine_state.get_config_path("config-path") else {
|
|
||||||
return Err(ShellError::GenericError {
|
|
||||||
error: "Could not find $nu.config-path".into(),
|
|
||||||
msg: "Could not find $nu.config-path".into(),
|
|
||||||
span: None,
|
|
||||||
help: None,
|
|
||||||
inner: vec![],
|
|
||||||
});
|
|
||||||
};
|
|
||||||
let config_path = config_path.to_string_lossy().to_string();
|
|
||||||
|
|
||||||
// Create the command.
|
|
||||||
let mut command = std::process::Command::new(editor_executable);
|
|
||||||
|
|
||||||
// Configure PWD.
|
|
||||||
command.current_dir(cwd);
|
|
||||||
|
|
||||||
// Configure environment variables.
|
|
||||||
let envs = env_to_strings(engine_state, stack)?;
|
|
||||||
command.env_clear();
|
|
||||||
command.envs(envs);
|
|
||||||
|
|
||||||
// Configure args.
|
|
||||||
command.arg(config_path);
|
|
||||||
command.args(editor_args);
|
|
||||||
|
|
||||||
// Spawn the child process. On Unix, also put the child process to
|
|
||||||
// foreground if we're in an interactive session.
|
|
||||||
#[cfg(windows)]
|
|
||||||
let child = ForegroundChild::spawn(command)?;
|
|
||||||
#[cfg(unix)]
|
|
||||||
let child = ForegroundChild::spawn(
|
|
||||||
command,
|
|
||||||
engine_state.is_interactive,
|
|
||||||
&engine_state.pipeline_externals_state,
|
|
||||||
)?;
|
|
||||||
|
|
||||||
// Wrap the output into a `PipelineData::ByteStream`.
|
|
||||||
let child = ChildProcess::new(child, None, false, call.head)?;
|
|
||||||
Ok(PipelineData::ByteStream(
|
|
||||||
ByteStream::child(child, call.head),
|
|
||||||
None,
|
|
||||||
))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -103,3 +103,9 @@ fn is_root_impl() -> bool {
|
||||||
|
|
||||||
elevated
|
elevated
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(target_arch = "wasm32")]
|
||||||
|
fn is_root_impl() -> bool {
|
||||||
|
// in wasm we don't have a user system, so technically we are never root
|
||||||
|
false
|
||||||
|
}
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
use super::util::get_rest_for_glob_pattern;
|
|
||||||
use crate::{DirBuilder, DirInfo, FileInfo};
|
use crate::{DirBuilder, DirInfo, FileInfo};
|
||||||
#[allow(deprecated)]
|
#[allow(deprecated)]
|
||||||
use nu_engine::{command_prelude::*, current_dir};
|
use nu_engine::{command_prelude::*, current_dir};
|
||||||
|
@ -13,8 +12,8 @@ pub struct Du;
|
||||||
#[derive(Deserialize, Clone, Debug)]
|
#[derive(Deserialize, Clone, Debug)]
|
||||||
pub struct DuArgs {
|
pub struct DuArgs {
|
||||||
path: Option<Spanned<NuGlob>>,
|
path: Option<Spanned<NuGlob>>,
|
||||||
all: bool,
|
|
||||||
deref: bool,
|
deref: bool,
|
||||||
|
long: bool,
|
||||||
exclude: Option<Spanned<NuGlob>>,
|
exclude: Option<Spanned<NuGlob>>,
|
||||||
#[serde(rename = "max-depth")]
|
#[serde(rename = "max-depth")]
|
||||||
max_depth: Option<Spanned<i64>>,
|
max_depth: Option<Spanned<i64>>,
|
||||||
|
@ -50,6 +49,11 @@ impl Command for Du {
|
||||||
"Dereference symlinks to their targets for size",
|
"Dereference symlinks to their targets for size",
|
||||||
Some('r'),
|
Some('r'),
|
||||||
)
|
)
|
||||||
|
.switch(
|
||||||
|
"long",
|
||||||
|
"Get underlying directories and files for each entry",
|
||||||
|
Some('l'),
|
||||||
|
)
|
||||||
.named(
|
.named(
|
||||||
"exclude",
|
"exclude",
|
||||||
SyntaxShape::GlobPattern,
|
SyntaxShape::GlobPattern,
|
||||||
|
@ -95,13 +99,13 @@ impl Command for Du {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let all = call.has_flag(engine_state, stack, "all")?;
|
|
||||||
let deref = call.has_flag(engine_state, stack, "deref")?;
|
let deref = call.has_flag(engine_state, stack, "deref")?;
|
||||||
|
let long = call.has_flag(engine_state, stack, "long")?;
|
||||||
let exclude = call.get_flag(engine_state, stack, "exclude")?;
|
let exclude = call.get_flag(engine_state, stack, "exclude")?;
|
||||||
#[allow(deprecated)]
|
#[allow(deprecated)]
|
||||||
let current_dir = current_dir(engine_state, stack)?;
|
let current_dir = current_dir(engine_state, stack)?;
|
||||||
|
|
||||||
let paths = get_rest_for_glob_pattern(engine_state, stack, call, 0)?;
|
let paths = call.rest::<Spanned<NuGlob>>(engine_state, stack, 0)?;
|
||||||
let paths = if !call.has_positional_args(stack, 0) {
|
let paths = if !call.has_positional_args(stack, 0) {
|
||||||
None
|
None
|
||||||
} else {
|
} else {
|
||||||
|
@ -112,8 +116,8 @@ impl Command for Du {
|
||||||
None => {
|
None => {
|
||||||
let args = DuArgs {
|
let args = DuArgs {
|
||||||
path: None,
|
path: None,
|
||||||
all,
|
|
||||||
deref,
|
deref,
|
||||||
|
long,
|
||||||
exclude,
|
exclude,
|
||||||
max_depth,
|
max_depth,
|
||||||
min_size,
|
min_size,
|
||||||
|
@ -128,8 +132,8 @@ impl Command for Du {
|
||||||
for p in paths {
|
for p in paths {
|
||||||
let args = DuArgs {
|
let args = DuArgs {
|
||||||
path: Some(p),
|
path: Some(p),
|
||||||
all,
|
|
||||||
deref,
|
deref,
|
||||||
|
long,
|
||||||
exclude: exclude.clone(),
|
exclude: exclude.clone(),
|
||||||
max_depth,
|
max_depth,
|
||||||
min_size,
|
min_size,
|
||||||
|
@ -175,7 +179,6 @@ fn du_for_one_pattern(
|
||||||
})
|
})
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
let include_files = args.all;
|
|
||||||
let mut paths = match args.path {
|
let mut paths = match args.path {
|
||||||
Some(p) => nu_engine::glob_from(&p, current_dir, span, None),
|
Some(p) => nu_engine::glob_from(&p, current_dir, span, None),
|
||||||
// The * pattern should never fail.
|
// The * pattern should never fail.
|
||||||
|
@ -189,17 +192,10 @@ fn du_for_one_pattern(
|
||||||
None,
|
None,
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
.map(|f| f.1)?
|
.map(|f| f.1)?;
|
||||||
.filter(move |p| {
|
|
||||||
if include_files {
|
|
||||||
true
|
|
||||||
} else {
|
|
||||||
matches!(p, Ok(f) if f.is_dir())
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
let all = args.all;
|
|
||||||
let deref = args.deref;
|
let deref = args.deref;
|
||||||
|
let long = args.long;
|
||||||
let max_depth = args.max_depth.map(|f| f.item as u64);
|
let max_depth = args.max_depth.map(|f| f.item as u64);
|
||||||
let min_size = args.min_size.map(|f| f.item as u64);
|
let min_size = args.min_size.map(|f| f.item as u64);
|
||||||
|
|
||||||
|
@ -208,7 +204,7 @@ fn du_for_one_pattern(
|
||||||
min: min_size,
|
min: min_size,
|
||||||
deref,
|
deref,
|
||||||
exclude,
|
exclude,
|
||||||
all,
|
long,
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut output: Vec<Value> = vec![];
|
let mut output: Vec<Value> = vec![];
|
||||||
|
@ -217,7 +213,7 @@ fn du_for_one_pattern(
|
||||||
Ok(a) => {
|
Ok(a) => {
|
||||||
if a.is_dir() {
|
if a.is_dir() {
|
||||||
output.push(DirInfo::new(a, ¶ms, max_depth, span, signals)?.into());
|
output.push(DirInfo::new(a, ¶ms, max_depth, span, signals)?.into());
|
||||||
} else if let Ok(v) = FileInfo::new(a, deref, span) {
|
} else if let Ok(v) = FileInfo::new(a, deref, span, params.long) {
|
||||||
output.push(v.into());
|
output.push(v.into());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
use nu_engine::command_prelude::*;
|
use nu_engine::command_prelude::*;
|
||||||
use nu_protocol::Signals;
|
use nu_protocol::{ListStream, Signals};
|
||||||
use wax::{Glob as WaxGlob, WalkBehavior, WalkEntry};
|
use wax::{Glob as WaxGlob, WalkBehavior, WalkEntry};
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
|
@ -223,6 +223,7 @@ impl Command for Glob {
|
||||||
..Default::default()
|
..Default::default()
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
.into_owned()
|
||||||
.not(np)
|
.not(np)
|
||||||
.map_err(|err| ShellError::GenericError {
|
.map_err(|err| ShellError::GenericError {
|
||||||
error: "error with glob's not pattern".into(),
|
error: "error with glob's not pattern".into(),
|
||||||
|
@ -249,6 +250,7 @@ impl Command for Glob {
|
||||||
..Default::default()
|
..Default::default()
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
.into_owned()
|
||||||
.flatten();
|
.flatten();
|
||||||
glob_to_value(
|
glob_to_value(
|
||||||
engine_state.signals(),
|
engine_state.signals(),
|
||||||
|
@ -258,11 +260,9 @@ impl Command for Glob {
|
||||||
no_symlinks,
|
no_symlinks,
|
||||||
span,
|
span,
|
||||||
)
|
)
|
||||||
}?;
|
};
|
||||||
|
|
||||||
Ok(result
|
Ok(result.into_pipeline_data(span, engine_state.signals().clone()))
|
||||||
.into_iter()
|
|
||||||
.into_pipeline_data(span, engine_state.signals().clone()))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -281,29 +281,33 @@ fn convert_patterns(columns: &[Value]) -> Result<Vec<String>, ShellError> {
|
||||||
Ok(res)
|
Ok(res)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn glob_to_value<'a>(
|
fn glob_to_value(
|
||||||
signals: &Signals,
|
signals: &Signals,
|
||||||
glob_results: impl Iterator<Item = WalkEntry<'a>>,
|
glob_results: impl Iterator<Item = WalkEntry<'static>> + Send + 'static,
|
||||||
no_dirs: bool,
|
no_dirs: bool,
|
||||||
no_files: bool,
|
no_files: bool,
|
||||||
no_symlinks: bool,
|
no_symlinks: bool,
|
||||||
span: Span,
|
span: Span,
|
||||||
) -> Result<Vec<Value>, ShellError> {
|
) -> ListStream {
|
||||||
let mut result: Vec<Value> = Vec::new();
|
let map_signals = signals.clone();
|
||||||
for entry in glob_results {
|
let result = glob_results.filter_map(move |entry| {
|
||||||
signals.check(span)?;
|
if let Err(err) = map_signals.check(span) {
|
||||||
|
return Some(Value::error(err, span));
|
||||||
|
};
|
||||||
let file_type = entry.file_type();
|
let file_type = entry.file_type();
|
||||||
|
|
||||||
if !(no_dirs && file_type.is_dir()
|
if !(no_dirs && file_type.is_dir()
|
||||||
|| no_files && file_type.is_file()
|
|| no_files && file_type.is_file()
|
||||||
|| no_symlinks && file_type.is_symlink())
|
|| no_symlinks && file_type.is_symlink())
|
||||||
{
|
{
|
||||||
result.push(Value::string(
|
Some(Value::string(
|
||||||
entry.into_path().to_string_lossy().to_string(),
|
entry.into_path().to_string_lossy().to_string(),
|
||||||
span,
|
span,
|
||||||
));
|
))
|
||||||
}
|
} else {
|
||||||
|
None
|
||||||
}
|
}
|
||||||
|
});
|
||||||
|
|
||||||
Ok(result)
|
ListStream::new(result, span, signals.clone())
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
use super::util::get_rest_for_glob_pattern;
|
|
||||||
use crate::{DirBuilder, DirInfo};
|
use crate::{DirBuilder, DirInfo};
|
||||||
use chrono::{DateTime, Local, LocalResult, TimeZone, Utc};
|
use chrono::{DateTime, Local, LocalResult, TimeZone, Utc};
|
||||||
use nu_engine::glob_from;
|
use nu_engine::glob_from;
|
||||||
|
@ -114,7 +113,7 @@ impl Command for Ls {
|
||||||
call_span,
|
call_span,
|
||||||
};
|
};
|
||||||
|
|
||||||
let pattern_arg = get_rest_for_glob_pattern(engine_state, stack, call, 0)?;
|
let pattern_arg = call.rest::<Spanned<NuGlob>>(engine_state, stack, 0)?;
|
||||||
let input_pattern_arg = if !call.has_positional_args(stack, 0) {
|
let input_pattern_arg = if !call.has_positional_args(stack, 0) {
|
||||||
None
|
None
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
use super::util::get_rest_for_glob_pattern;
|
|
||||||
#[allow(deprecated)]
|
#[allow(deprecated)]
|
||||||
use nu_engine::{command_prelude::*, current_dir, get_eval_block};
|
use nu_engine::{command_prelude::*, current_dir, get_eval_block};
|
||||||
use nu_protocol::{ast, ByteStream, DataSource, NuGlob, PipelineMetadata};
|
use nu_protocol::{ast, DataSource, NuGlob, PipelineMetadata};
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
||||||
#[cfg(feature = "sqlite")]
|
#[cfg(feature = "sqlite")]
|
||||||
|
@ -53,7 +52,7 @@ impl Command for Open {
|
||||||
let call_span = call.head;
|
let call_span = call.head;
|
||||||
#[allow(deprecated)]
|
#[allow(deprecated)]
|
||||||
let cwd = current_dir(engine_state, stack)?;
|
let cwd = current_dir(engine_state, stack)?;
|
||||||
let mut paths = get_rest_for_glob_pattern(engine_state, stack, call, 0)?;
|
let mut paths = call.rest::<Spanned<NuGlob>>(engine_state, stack, 0)?;
|
||||||
let eval_block = get_eval_block(engine_state);
|
let eval_block = get_eval_block(engine_state);
|
||||||
|
|
||||||
if paths.is_empty() && !call.has_positional_args(stack, 0) {
|
if paths.is_empty() && !call.has_positional_args(stack, 0) {
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
use super::util::{get_rest_for_glob_pattern, try_interaction};
|
use super::util::try_interaction;
|
||||||
#[allow(deprecated)]
|
#[allow(deprecated)]
|
||||||
use nu_engine::{command_prelude::*, env::current_dir};
|
use nu_engine::{command_prelude::*, env::current_dir};
|
||||||
use nu_glob::MatchOptions;
|
use nu_glob::MatchOptions;
|
||||||
|
@ -118,7 +118,7 @@ fn rm(
|
||||||
let interactive = call.has_flag(engine_state, stack, "interactive")?;
|
let interactive = call.has_flag(engine_state, stack, "interactive")?;
|
||||||
let interactive_once = call.has_flag(engine_state, stack, "interactive-once")? && !interactive;
|
let interactive_once = call.has_flag(engine_state, stack, "interactive-once")? && !interactive;
|
||||||
|
|
||||||
let mut paths = get_rest_for_glob_pattern(engine_state, stack, call, 0)?;
|
let mut paths = call.rest::<Spanned<NuGlob>>(engine_state, stack, 0)?;
|
||||||
|
|
||||||
if paths.is_empty() {
|
if paths.is_empty() {
|
||||||
return Err(ShellError::MissingParameter {
|
return Err(ShellError::MissingParameter {
|
||||||
|
|
|
@ -102,6 +102,7 @@ impl Command for Save {
|
||||||
ByteStreamSource::File(source) => {
|
ByteStreamSource::File(source) => {
|
||||||
stream_to_file(source, size, signals, file, span, progress)?;
|
stream_to_file(source, size, signals, file, span, progress)?;
|
||||||
}
|
}
|
||||||
|
#[cfg(feature = "os")]
|
||||||
ByteStreamSource::Child(mut child) => {
|
ByteStreamSource::Child(mut child) => {
|
||||||
fn write_or_consume_stderr(
|
fn write_or_consume_stderr(
|
||||||
stderr: ChildPipe,
|
stderr: ChildPipe,
|
||||||
|
|
|
@ -2,11 +2,8 @@ use filetime::FileTime;
|
||||||
use nu_engine::command_prelude::*;
|
use nu_engine::command_prelude::*;
|
||||||
use nu_path::expand_path_with;
|
use nu_path::expand_path_with;
|
||||||
use nu_protocol::NuGlob;
|
use nu_protocol::NuGlob;
|
||||||
|
|
||||||
use std::{fs::OpenOptions, time::SystemTime};
|
use std::{fs::OpenOptions, time::SystemTime};
|
||||||
|
|
||||||
use super::util::get_rest_for_glob_pattern;
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct Touch;
|
pub struct Touch;
|
||||||
|
|
||||||
|
@ -72,7 +69,7 @@ impl Command for Touch {
|
||||||
let no_follow_symlinks: bool = call.has_flag(engine_state, stack, "no-deref")?;
|
let no_follow_symlinks: bool = call.has_flag(engine_state, stack, "no-deref")?;
|
||||||
let reference: Option<Spanned<String>> = call.get_flag(engine_state, stack, "reference")?;
|
let reference: Option<Spanned<String>> = call.get_flag(engine_state, stack, "reference")?;
|
||||||
let no_create: bool = call.has_flag(engine_state, stack, "no-create")?;
|
let no_create: bool = call.has_flag(engine_state, stack, "no-create")?;
|
||||||
let files: Vec<Spanned<NuGlob>> = get_rest_for_glob_pattern(engine_state, stack, call, 0)?;
|
let files = call.rest::<Spanned<NuGlob>>(engine_state, stack, 0)?;
|
||||||
|
|
||||||
let cwd = engine_state.cwd(Some(stack))?;
|
let cwd = engine_state.cwd(Some(stack))?;
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
use super::util::get_rest_for_glob_pattern;
|
|
||||||
#[allow(deprecated)]
|
#[allow(deprecated)]
|
||||||
use nu_engine::{command_prelude::*, current_dir};
|
use nu_engine::{command_prelude::*, current_dir};
|
||||||
|
use nu_protocol::NuGlob;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use uu_cp::{BackupMode, CopyMode, UpdateMode};
|
use uu_cp::{BackupMode, CopyMode, UpdateMode};
|
||||||
|
|
||||||
|
@ -156,7 +156,7 @@ impl Command for UCp {
|
||||||
target_os = "macos"
|
target_os = "macos"
|
||||||
)))]
|
)))]
|
||||||
let reflink_mode = uu_cp::ReflinkMode::Never;
|
let reflink_mode = uu_cp::ReflinkMode::Never;
|
||||||
let mut paths = get_rest_for_glob_pattern(engine_state, stack, call, 0)?;
|
let mut paths = call.rest::<Spanned<NuGlob>>(engine_state, stack, 0)?;
|
||||||
if paths.is_empty() {
|
if paths.is_empty() {
|
||||||
return Err(ShellError::GenericError {
|
return Err(ShellError::GenericError {
|
||||||
error: "Missing file operand".into(),
|
error: "Missing file operand".into(),
|
||||||
|
|
|
@ -1,12 +1,10 @@
|
||||||
#[allow(deprecated)]
|
#[allow(deprecated)]
|
||||||
use nu_engine::{command_prelude::*, current_dir};
|
use nu_engine::{command_prelude::*, current_dir};
|
||||||
|
use nu_protocol::NuGlob;
|
||||||
use uu_mkdir::mkdir;
|
use uu_mkdir::mkdir;
|
||||||
#[cfg(not(windows))]
|
#[cfg(not(windows))]
|
||||||
use uucore::mode;
|
use uucore::mode;
|
||||||
|
|
||||||
use super::util::get_rest_for_glob_pattern;
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct UMkdir;
|
pub struct UMkdir;
|
||||||
|
|
||||||
|
@ -61,7 +59,8 @@ impl Command for UMkdir {
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
#[allow(deprecated)]
|
#[allow(deprecated)]
|
||||||
let cwd = current_dir(engine_state, stack)?;
|
let cwd = current_dir(engine_state, stack)?;
|
||||||
let mut directories = get_rest_for_glob_pattern(engine_state, stack, call, 0)?
|
let mut directories = call
|
||||||
|
.rest::<Spanned<NuGlob>>(engine_state, stack, 0)?
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|dir| nu_path::expand_path_with(dir.item.as_ref(), &cwd, dir.item.is_expand()))
|
.map(|dir| nu_path::expand_path_with(dir.item.as_ref(), &cwd, dir.item.is_expand()))
|
||||||
.peekable();
|
.peekable();
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
use super::util::get_rest_for_glob_pattern;
|
|
||||||
#[allow(deprecated)]
|
#[allow(deprecated)]
|
||||||
use nu_engine::{command_prelude::*, current_dir};
|
use nu_engine::{command_prelude::*, current_dir};
|
||||||
use nu_path::expand_path_with;
|
use nu_path::expand_path_with;
|
||||||
|
@ -100,7 +99,7 @@ impl Command for UMv {
|
||||||
|
|
||||||
#[allow(deprecated)]
|
#[allow(deprecated)]
|
||||||
let cwd = current_dir(engine_state, stack)?;
|
let cwd = current_dir(engine_state, stack)?;
|
||||||
let mut paths = get_rest_for_glob_pattern(engine_state, stack, call, 0)?;
|
let mut paths = call.rest::<Spanned<NuGlob>>(engine_state, stack, 0)?;
|
||||||
if paths.is_empty() {
|
if paths.is_empty() {
|
||||||
return Err(ShellError::GenericError {
|
return Err(ShellError::GenericError {
|
||||||
error: "Missing file operand".into(),
|
error: "Missing file operand".into(),
|
||||||
|
|
|
@ -1,6 +1,4 @@
|
||||||
use dialoguer::Input;
|
use dialoguer::Input;
|
||||||
use nu_engine::{command_prelude::*, get_eval_expression};
|
|
||||||
use nu_protocol::{FromValue, NuGlob};
|
|
||||||
use std::{
|
use std::{
|
||||||
error::Error,
|
error::Error,
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
|
@ -89,22 +87,3 @@ pub fn is_older(src: &Path, dst: &Path) -> Option<bool> {
|
||||||
Some(src_ctime <= dst_ctime)
|
Some(src_ctime <= dst_ctime)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get rest arguments from given `call`, starts with `starting_pos`.
|
|
||||||
///
|
|
||||||
/// It's similar to `call.rest`, except that it always returns NuGlob.
|
|
||||||
pub fn get_rest_for_glob_pattern(
|
|
||||||
engine_state: &EngineState,
|
|
||||||
stack: &mut Stack,
|
|
||||||
call: &Call,
|
|
||||||
starting_pos: usize,
|
|
||||||
) -> Result<Vec<Spanned<NuGlob>>, ShellError> {
|
|
||||||
let eval_expression = get_eval_expression(engine_state);
|
|
||||||
|
|
||||||
call.rest_iter_flattened(engine_state, stack, eval_expression, starting_pos)?
|
|
||||||
.into_iter()
|
|
||||||
// This used to be much more complex, but I think `FromValue` should be able to handle the
|
|
||||||
// nuance here.
|
|
||||||
.map(FromValue::from_value)
|
|
||||||
.collect()
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,19 +1,10 @@
|
||||||
use std::io::ErrorKind;
|
|
||||||
use std::path::PathBuf;
|
|
||||||
|
|
||||||
use chrono::{DateTime, FixedOffset};
|
use chrono::{DateTime, FixedOffset};
|
||||||
use filetime::FileTime;
|
use filetime::FileTime;
|
||||||
|
use nu_engine::command_prelude::*;
|
||||||
use nu_engine::CallExt;
|
|
||||||
use nu_path::expand_path_with;
|
use nu_path::expand_path_with;
|
||||||
use nu_protocol::engine::{Call, Command, EngineState, Stack};
|
use nu_protocol::NuGlob;
|
||||||
use nu_protocol::{
|
use std::{io::ErrorKind, path::PathBuf};
|
||||||
Category, Example, NuGlob, PipelineData, ShellError, Signature, Spanned, SyntaxShape, Type,
|
use uu_touch::{error::TouchError, ChangeTimes, InputFile, Options, Source};
|
||||||
};
|
|
||||||
use uu_touch::error::TouchError;
|
|
||||||
use uu_touch::{ChangeTimes, InputFile, Options, Source};
|
|
||||||
|
|
||||||
use super::util::get_rest_for_glob_pattern;
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct UTouch;
|
pub struct UTouch;
|
||||||
|
@ -24,7 +15,7 @@ impl Command for UTouch {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn search_terms(&self) -> Vec<&str> {
|
fn search_terms(&self) -> Vec<&str> {
|
||||||
vec!["create", "file"]
|
vec!["create", "file", "coreutils"]
|
||||||
}
|
}
|
||||||
|
|
||||||
fn signature(&self) -> Signature {
|
fn signature(&self) -> Signature {
|
||||||
|
@ -91,8 +82,7 @@ impl Command for UTouch {
|
||||||
let change_atime: bool = call.has_flag(engine_state, stack, "access")?;
|
let change_atime: bool = call.has_flag(engine_state, stack, "access")?;
|
||||||
let no_create: bool = call.has_flag(engine_state, stack, "no-create")?;
|
let no_create: bool = call.has_flag(engine_state, stack, "no-create")?;
|
||||||
let no_deref: bool = call.has_flag(engine_state, stack, "no-dereference")?;
|
let no_deref: bool = call.has_flag(engine_state, stack, "no-dereference")?;
|
||||||
let file_globs: Vec<Spanned<NuGlob>> =
|
let file_globs = call.rest::<Spanned<NuGlob>>(engine_state, stack, 0)?;
|
||||||
get_rest_for_glob_pattern(engine_state, stack, call, 0)?;
|
|
||||||
let cwd = engine_state.cwd(Some(stack))?;
|
let cwd = engine_state.cwd(Some(stack))?;
|
||||||
|
|
||||||
if file_globs.is_empty() {
|
if file_globs.is_empty() {
|
||||||
|
|
|
@ -14,7 +14,7 @@ impl Command for All {
|
||||||
.input_output_types(vec![(Type::List(Box::new(Type::Any)), Type::Bool)])
|
.input_output_types(vec![(Type::List(Box::new(Type::Any)), Type::Bool)])
|
||||||
.required(
|
.required(
|
||||||
"predicate",
|
"predicate",
|
||||||
SyntaxShape::Closure(Some(vec![SyntaxShape::Any, SyntaxShape::Int])),
|
SyntaxShape::Closure(Some(vec![SyntaxShape::Any])),
|
||||||
"A closure that must evaluate to a boolean.",
|
"A closure that must evaluate to a boolean.",
|
||||||
)
|
)
|
||||||
.category(Category::Filters)
|
.category(Category::Filters)
|
||||||
|
|
|
@ -14,7 +14,7 @@ impl Command for Any {
|
||||||
.input_output_types(vec![(Type::List(Box::new(Type::Any)), Type::Bool)])
|
.input_output_types(vec![(Type::List(Box::new(Type::Any)), Type::Bool)])
|
||||||
.required(
|
.required(
|
||||||
"predicate",
|
"predicate",
|
||||||
SyntaxShape::Closure(Some(vec![SyntaxShape::Any, SyntaxShape::Int])),
|
SyntaxShape::Closure(Some(vec![SyntaxShape::Any])),
|
||||||
"A closure that must evaluate to a boolean.",
|
"A closure that must evaluate to a boolean.",
|
||||||
)
|
)
|
||||||
.category(Category::Filters)
|
.category(Category::Filters)
|
||||||
|
|
256
crates/nu-command/src/filters/chunk_by.rs
Normal file
256
crates/nu-command/src/filters/chunk_by.rs
Normal file
|
@ -0,0 +1,256 @@
|
||||||
|
use super::utils::chain_error_with_input;
|
||||||
|
use nu_engine::{command_prelude::*, ClosureEval};
|
||||||
|
use nu_protocol::engine::Closure;
|
||||||
|
use nu_protocol::Signals;
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct ChunkBy;
|
||||||
|
|
||||||
|
impl Command for ChunkBy {
|
||||||
|
fn name(&self) -> &str {
|
||||||
|
"chunk-by"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn signature(&self) -> Signature {
|
||||||
|
Signature::build("chunk-by")
|
||||||
|
.input_output_types(vec![
|
||||||
|
(
|
||||||
|
Type::List(Box::new(Type::Any)),
|
||||||
|
Type::list(Type::list(Type::Any)),
|
||||||
|
),
|
||||||
|
(Type::Range, Type::list(Type::list(Type::Any))),
|
||||||
|
])
|
||||||
|
.required(
|
||||||
|
"closure",
|
||||||
|
SyntaxShape::Closure(Some(vec![SyntaxShape::Any])),
|
||||||
|
"The closure to run.",
|
||||||
|
)
|
||||||
|
.category(Category::Filters)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn description(&self) -> &str {
|
||||||
|
r#"Divides a sequence into sub-sequences based on a closure."#
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extra_description(&self) -> &str {
|
||||||
|
r#"chunk-by applies the given closure to each value of the input list, and groups
|
||||||
|
consecutive elements that share the same closure result value into lists."#
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run(
|
||||||
|
&self,
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
input: PipelineData,
|
||||||
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
chunk_by(engine_state, stack, call, input)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn examples(&self) -> Vec<Example> {
|
||||||
|
vec![
|
||||||
|
Example {
|
||||||
|
description: "Chunk data into runs of larger than zero or not.",
|
||||||
|
example: "[1, 3, -2, -2, 0, 1, 2] | chunk-by {|it| $it >= 0 }",
|
||||||
|
result: Some(Value::test_list(vec![
|
||||||
|
Value::test_list(vec![Value::test_int(1), Value::test_int(3)]),
|
||||||
|
Value::test_list(vec![Value::test_int(-2), Value::test_int(-2)]),
|
||||||
|
Value::test_list(vec![
|
||||||
|
Value::test_int(0),
|
||||||
|
Value::test_int(1),
|
||||||
|
Value::test_int(2),
|
||||||
|
]),
|
||||||
|
])),
|
||||||
|
},
|
||||||
|
Example {
|
||||||
|
description: "Identify repetitions in a string",
|
||||||
|
example: r#"[a b b c c c] | chunk-by { |it| $it }"#,
|
||||||
|
result: Some(Value::test_list(vec![
|
||||||
|
Value::test_list(vec![Value::test_string("a")]),
|
||||||
|
Value::test_list(vec![Value::test_string("b"), Value::test_string("b")]),
|
||||||
|
Value::test_list(vec![
|
||||||
|
Value::test_string("c"),
|
||||||
|
Value::test_string("c"),
|
||||||
|
Value::test_string("c"),
|
||||||
|
]),
|
||||||
|
])),
|
||||||
|
},
|
||||||
|
Example {
|
||||||
|
description: "Chunk values of range by predicate",
|
||||||
|
example: r#"(0..8) | chunk-by { |it| $it // 3 }"#,
|
||||||
|
result: Some(Value::test_list(vec![
|
||||||
|
Value::test_list(vec![
|
||||||
|
Value::test_int(0),
|
||||||
|
Value::test_int(1),
|
||||||
|
Value::test_int(2),
|
||||||
|
]),
|
||||||
|
Value::test_list(vec![
|
||||||
|
Value::test_int(3),
|
||||||
|
Value::test_int(4),
|
||||||
|
Value::test_int(5),
|
||||||
|
]),
|
||||||
|
Value::test_list(vec![
|
||||||
|
Value::test_int(6),
|
||||||
|
Value::test_int(7),
|
||||||
|
Value::test_int(8),
|
||||||
|
]),
|
||||||
|
])),
|
||||||
|
},
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct Chunk<I, T, F, K> {
|
||||||
|
iterator: I,
|
||||||
|
last_value: Option<(T, K)>,
|
||||||
|
closure: F,
|
||||||
|
done: bool,
|
||||||
|
signals: Signals,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<I, T, F, K> Chunk<I, T, F, K>
|
||||||
|
where
|
||||||
|
I: Iterator<Item = T>,
|
||||||
|
F: FnMut(&T) -> K,
|
||||||
|
K: PartialEq,
|
||||||
|
{
|
||||||
|
fn inner_iterator_next(&mut self) -> Option<I::Item> {
|
||||||
|
if self.signals.interrupted() {
|
||||||
|
self.done = true;
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
self.iterator.next()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<I, T, F, K> Iterator for Chunk<I, T, F, K>
|
||||||
|
where
|
||||||
|
I: Iterator<Item = T>,
|
||||||
|
F: FnMut(&T) -> K,
|
||||||
|
K: PartialEq,
|
||||||
|
{
|
||||||
|
type Item = Vec<T>;
|
||||||
|
|
||||||
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
|
if self.done {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
let (head, head_key) = match self.last_value.take() {
|
||||||
|
None => {
|
||||||
|
let head = self.inner_iterator_next()?;
|
||||||
|
|
||||||
|
let key = (self.closure)(&head);
|
||||||
|
|
||||||
|
(head, key)
|
||||||
|
}
|
||||||
|
|
||||||
|
Some((value, key)) => (value, key),
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut result = vec![head];
|
||||||
|
|
||||||
|
loop {
|
||||||
|
match self.inner_iterator_next() {
|
||||||
|
None => {
|
||||||
|
self.done = true;
|
||||||
|
return Some(result);
|
||||||
|
}
|
||||||
|
Some(value) => {
|
||||||
|
let value_key = (self.closure)(&value);
|
||||||
|
|
||||||
|
if value_key == head_key {
|
||||||
|
result.push(value);
|
||||||
|
} else {
|
||||||
|
self.last_value = Some((value, value_key));
|
||||||
|
return Some(result);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// An iterator with the semantics of the chunk_by operation.
|
||||||
|
fn chunk_iter_by<I, T, F, K>(iterator: I, signals: Signals, closure: F) -> Chunk<I, T, F, K>
|
||||||
|
where
|
||||||
|
I: Iterator<Item = T>,
|
||||||
|
F: FnMut(&T) -> K,
|
||||||
|
K: PartialEq,
|
||||||
|
{
|
||||||
|
Chunk {
|
||||||
|
closure,
|
||||||
|
iterator,
|
||||||
|
last_value: None,
|
||||||
|
done: false,
|
||||||
|
signals,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn chunk_by(
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
input: PipelineData,
|
||||||
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
let head = call.head;
|
||||||
|
let closure: Closure = call.req(engine_state, stack, 0)?;
|
||||||
|
|
||||||
|
let metadata = input.metadata();
|
||||||
|
|
||||||
|
match input {
|
||||||
|
PipelineData::Empty => Ok(PipelineData::Empty),
|
||||||
|
PipelineData::Value(Value::Range { .. }, ..)
|
||||||
|
| PipelineData::Value(Value::List { .. }, ..)
|
||||||
|
| PipelineData::ListStream(..) => {
|
||||||
|
let closure = ClosureEval::new(engine_state, stack, closure);
|
||||||
|
|
||||||
|
let result = chunk_value_stream(
|
||||||
|
input.into_iter(),
|
||||||
|
closure,
|
||||||
|
head,
|
||||||
|
engine_state.signals().clone(),
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(result.into_pipeline_data(head, engine_state.signals().clone()))
|
||||||
|
}
|
||||||
|
|
||||||
|
PipelineData::ByteStream(..) | PipelineData::Value(..) => {
|
||||||
|
Err(input.unsupported_input_error("list", head))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
.map(|data| data.set_metadata(metadata))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn chunk_value_stream<I>(
|
||||||
|
iterator: I,
|
||||||
|
mut closure: ClosureEval,
|
||||||
|
head: Span,
|
||||||
|
signals: Signals,
|
||||||
|
) -> impl Iterator<Item = Value> + 'static + Send
|
||||||
|
where
|
||||||
|
I: Iterator<Item = Value> + 'static + Send,
|
||||||
|
{
|
||||||
|
chunk_iter_by(iterator, signals, move |value| {
|
||||||
|
match closure.run_with_value(value.clone()) {
|
||||||
|
Ok(data) => data.into_value(head).unwrap_or_else(|error| {
|
||||||
|
Value::error(chain_error_with_input(error, value.is_error(), head), head)
|
||||||
|
}),
|
||||||
|
|
||||||
|
Err(error) => Value::error(chain_error_with_input(error, value.is_error(), head), head),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.map(move |it| Value::list(it, head))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_examples() {
|
||||||
|
use crate::test_examples;
|
||||||
|
|
||||||
|
test_examples(ChunkBy {})
|
||||||
|
}
|
||||||
|
}
|
|
@ -29,7 +29,7 @@ impl Command for DropColumn {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn search_terms(&self) -> Vec<&str> {
|
fn search_terms(&self) -> Vec<&str> {
|
||||||
vec!["delete"]
|
vec!["delete", "remove"]
|
||||||
}
|
}
|
||||||
|
|
||||||
fn run(
|
fn run(
|
||||||
|
|
|
@ -26,7 +26,7 @@ impl Command for Drop {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn search_terms(&self) -> Vec<&str> {
|
fn search_terms(&self) -> Vec<&str> {
|
||||||
vec!["delete"]
|
vec!["delete", "remove"]
|
||||||
}
|
}
|
||||||
|
|
||||||
fn examples(&self) -> Vec<Example> {
|
fn examples(&self) -> Vec<Example> {
|
||||||
|
|
|
@ -32,7 +32,7 @@ impl Command for DropNth {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn search_terms(&self) -> Vec<&str> {
|
fn search_terms(&self) -> Vec<&str> {
|
||||||
vec!["delete"]
|
vec!["delete", "remove", "index"]
|
||||||
}
|
}
|
||||||
|
|
||||||
fn examples(&self) -> Vec<Example> {
|
fn examples(&self) -> Vec<Example> {
|
||||||
|
|
|
@ -30,7 +30,7 @@ a variable. On the other hand, the "row condition" syntax is not supported."#
|
||||||
])
|
])
|
||||||
.required(
|
.required(
|
||||||
"closure",
|
"closure",
|
||||||
SyntaxShape::Closure(Some(vec![SyntaxShape::Any, SyntaxShape::Int])),
|
SyntaxShape::Closure(Some(vec![SyntaxShape::Any])),
|
||||||
"Predicate closure.",
|
"Predicate closure.",
|
||||||
)
|
)
|
||||||
.category(Category::Filters)
|
.category(Category::Filters)
|
||||||
|
|
|
@ -129,6 +129,8 @@ fn insert(
|
||||||
let replacement: Value = call.req(engine_state, stack, 1)?;
|
let replacement: Value = call.req(engine_state, stack, 1)?;
|
||||||
|
|
||||||
match input {
|
match input {
|
||||||
|
// Propagate errors in the pipeline
|
||||||
|
PipelineData::Value(Value::Error { error, .. }, ..) => Err(*error),
|
||||||
PipelineData::Value(mut value, metadata) => {
|
PipelineData::Value(mut value, metadata) => {
|
||||||
if let Value::Closure { val, .. } = replacement {
|
if let Value::Closure { val, .. } = replacement {
|
||||||
match (cell_path.members.first(), &mut value) {
|
match (cell_path.members.first(), &mut value) {
|
||||||
|
|
|
@ -19,6 +19,7 @@ impl Command for Length {
|
||||||
.input_output_types(vec![
|
.input_output_types(vec![
|
||||||
(Type::List(Box::new(Type::Any)), Type::Int),
|
(Type::List(Box::new(Type::Any)), Type::Int),
|
||||||
(Type::Binary, Type::Int),
|
(Type::Binary, Type::Int),
|
||||||
|
(Type::Nothing, Type::Int),
|
||||||
])
|
])
|
||||||
.category(Category::Filters)
|
.category(Category::Filters)
|
||||||
}
|
}
|
||||||
|
@ -54,6 +55,11 @@ impl Command for Length {
|
||||||
example: "0x[01 02] | length",
|
example: "0x[01 02] | length",
|
||||||
result: Some(Value::test_int(2)),
|
result: Some(Value::test_int(2)),
|
||||||
},
|
},
|
||||||
|
Example {
|
||||||
|
description: "Count the length a null value",
|
||||||
|
example: "null | length",
|
||||||
|
result: Some(Value::test_int(0)),
|
||||||
|
},
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -61,23 +67,19 @@ impl Command for Length {
|
||||||
fn length_row(call: &Call, input: PipelineData) -> Result<PipelineData, ShellError> {
|
fn length_row(call: &Call, input: PipelineData) -> Result<PipelineData, ShellError> {
|
||||||
let span = input.span().unwrap_or(call.head);
|
let span = input.span().unwrap_or(call.head);
|
||||||
match input {
|
match input {
|
||||||
PipelineData::Value(Value::Nothing { .. }, ..) => {
|
PipelineData::Empty | PipelineData::Value(Value::Nothing { .. }, ..) => {
|
||||||
Ok(Value::int(0, call.head).into_pipeline_data())
|
Ok(Value::int(0, call.head).into_pipeline_data())
|
||||||
}
|
}
|
||||||
// I added this here because input_output_type() wasn't catching a record
|
|
||||||
// being sent in as input from echo. e.g. "echo {a:1 b:2} | length"
|
|
||||||
PipelineData::Value(Value::Record { .. }, ..) => {
|
|
||||||
Err(ShellError::OnlySupportsThisInputType {
|
|
||||||
exp_input_type: "list, and table".into(),
|
|
||||||
wrong_type: "record".into(),
|
|
||||||
dst_span: call.head,
|
|
||||||
src_span: span,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
PipelineData::Value(Value::Binary { val, .. }, ..) => {
|
PipelineData::Value(Value::Binary { val, .. }, ..) => {
|
||||||
Ok(Value::int(val.len() as i64, call.head).into_pipeline_data())
|
Ok(Value::int(val.len() as i64, call.head).into_pipeline_data())
|
||||||
}
|
}
|
||||||
PipelineData::ByteStream(stream, _) if stream.type_().is_binary_coercible() => {
|
PipelineData::Value(Value::List { vals, .. }, ..) => {
|
||||||
|
Ok(Value::int(vals.len() as i64, call.head).into_pipeline_data())
|
||||||
|
}
|
||||||
|
PipelineData::ListStream(stream, ..) => {
|
||||||
|
Ok(Value::int(stream.into_iter().count() as i64, call.head).into_pipeline_data())
|
||||||
|
}
|
||||||
|
PipelineData::ByteStream(stream, ..) if stream.type_().is_binary_coercible() => {
|
||||||
Ok(Value::int(
|
Ok(Value::int(
|
||||||
match stream.reader() {
|
match stream.reader() {
|
||||||
Some(r) => r.bytes().count() as i64,
|
Some(r) => r.bytes().count() as i64,
|
||||||
|
@ -87,17 +89,12 @@ fn length_row(call: &Call, input: PipelineData) -> Result<PipelineData, ShellErr
|
||||||
)
|
)
|
||||||
.into_pipeline_data())
|
.into_pipeline_data())
|
||||||
}
|
}
|
||||||
_ => {
|
_ => Err(ShellError::OnlySupportsThisInputType {
|
||||||
let mut count: i64 = 0;
|
exp_input_type: "list, table, binary, and nothing".into(),
|
||||||
// Check for and propagate errors
|
wrong_type: input.get_type().to_string(),
|
||||||
for value in input.into_iter() {
|
dst_span: call.head,
|
||||||
if let Value::Error { error, .. } = value {
|
src_span: span,
|
||||||
return Err(*error);
|
}),
|
||||||
}
|
|
||||||
count += 1
|
|
||||||
}
|
|
||||||
Ok(Value::int(count, call.head).into_pipeline_data())
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -120,6 +120,8 @@ repeating this process with row 1, and so on."#
|
||||||
PipelineData::Value(Value::Record { val: inp, .. }, ..),
|
PipelineData::Value(Value::Record { val: inp, .. }, ..),
|
||||||
Value::Record { val: to_merge, .. },
|
Value::Record { val: to_merge, .. },
|
||||||
) => Ok(Value::record(do_merge(inp, &to_merge), head).into_pipeline_data()),
|
) => Ok(Value::record(do_merge(inp, &to_merge), head).into_pipeline_data()),
|
||||||
|
// Propagate errors in the pipeline
|
||||||
|
(PipelineData::Value(Value::Error { error, .. }, ..), _) => Err(*error.clone()),
|
||||||
(PipelineData::Value(val, ..), ..) => {
|
(PipelineData::Value(val, ..), ..) => {
|
||||||
// Only point the "value originates here" arrow at the merge value
|
// Only point the "value originates here" arrow at the merge value
|
||||||
// if it was generated from a block. Otherwise, point at the pipeline value. -Leon 2022-10-27
|
// if it was generated from a block. Otherwise, point at the pipeline value. -Leon 2022-10-27
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
mod all;
|
mod all;
|
||||||
mod any;
|
mod any;
|
||||||
mod append;
|
mod append;
|
||||||
|
mod chunk_by;
|
||||||
mod chunks;
|
mod chunks;
|
||||||
mod columns;
|
mod columns;
|
||||||
mod compact;
|
mod compact;
|
||||||
|
@ -36,6 +37,7 @@ mod reject;
|
||||||
mod rename;
|
mod rename;
|
||||||
mod reverse;
|
mod reverse;
|
||||||
mod select;
|
mod select;
|
||||||
|
#[cfg(feature = "rand")]
|
||||||
mod shuffle;
|
mod shuffle;
|
||||||
mod skip;
|
mod skip;
|
||||||
mod sort;
|
mod sort;
|
||||||
|
@ -58,6 +60,7 @@ mod zip;
|
||||||
pub use all::All;
|
pub use all::All;
|
||||||
pub use any::Any;
|
pub use any::Any;
|
||||||
pub use append::Append;
|
pub use append::Append;
|
||||||
|
pub use chunk_by::ChunkBy;
|
||||||
pub use chunks::Chunks;
|
pub use chunks::Chunks;
|
||||||
pub use columns::Columns;
|
pub use columns::Columns;
|
||||||
pub use compact::Compact;
|
pub use compact::Compact;
|
||||||
|
@ -93,6 +96,7 @@ pub use reject::Reject;
|
||||||
pub use rename::Rename;
|
pub use rename::Rename;
|
||||||
pub use reverse::Reverse;
|
pub use reverse::Reverse;
|
||||||
pub use select::Select;
|
pub use select::Select;
|
||||||
|
#[cfg(feature = "rand")]
|
||||||
pub use shuffle::Shuffle;
|
pub use shuffle::Shuffle;
|
||||||
pub use skip::*;
|
pub use skip::*;
|
||||||
pub use sort::Sort;
|
pub use sort::Sort;
|
||||||
|
|
|
@ -38,7 +38,7 @@ impl Command for ParEach {
|
||||||
)
|
)
|
||||||
.required(
|
.required(
|
||||||
"closure",
|
"closure",
|
||||||
SyntaxShape::Closure(Some(vec![SyntaxShape::Any, SyntaxShape::Int])),
|
SyntaxShape::Closure(Some(vec![SyntaxShape::Any])),
|
||||||
"The closure to run.",
|
"The closure to run.",
|
||||||
)
|
)
|
||||||
.allow_variants_without_examples(true)
|
.allow_variants_without_examples(true)
|
||||||
|
|
|
@ -24,11 +24,7 @@ impl Command for Reduce {
|
||||||
)
|
)
|
||||||
.required(
|
.required(
|
||||||
"closure",
|
"closure",
|
||||||
SyntaxShape::Closure(Some(vec![
|
SyntaxShape::Closure(Some(vec![SyntaxShape::Any, SyntaxShape::Any])),
|
||||||
SyntaxShape::Any,
|
|
||||||
SyntaxShape::Any,
|
|
||||||
SyntaxShape::Int,
|
|
||||||
])),
|
|
||||||
"Reducing function.",
|
"Reducing function.",
|
||||||
)
|
)
|
||||||
.allow_variants_without_examples(true)
|
.allow_variants_without_examples(true)
|
||||||
|
@ -88,6 +84,15 @@ impl Command for Reduce {
|
||||||
"Concatenate a string with itself, using a range to determine the number of times.",
|
"Concatenate a string with itself, using a range to determine the number of times.",
|
||||||
result: Some(Value::test_string("StrStrStr")),
|
result: Some(Value::test_string("StrStrStr")),
|
||||||
},
|
},
|
||||||
|
Example {
|
||||||
|
example: r#"[{a: 1} {b: 2} {c: 3}] | reduce {|it| merge $it}"#,
|
||||||
|
description: "Merge multiple records together, making use of the fact that the accumulated value is also supplied as pipeline input to the closure.",
|
||||||
|
result: Some(Value::test_record(record!(
|
||||||
|
"a" => Value::test_int(1),
|
||||||
|
"b" => Value::test_int(2),
|
||||||
|
"c" => Value::test_int(3),
|
||||||
|
))),
|
||||||
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -135,8 +140,8 @@ mod test {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_examples() {
|
fn test_examples() {
|
||||||
use crate::test_examples;
|
use crate::{test_examples_with_commands, Merge};
|
||||||
|
|
||||||
test_examples(Reduce {})
|
test_examples_with_commands(Reduce {}, &[&Merge])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -206,7 +206,6 @@ fn select(
|
||||||
let columns = new_columns;
|
let columns = new_columns;
|
||||||
|
|
||||||
let input = if !unique_rows.is_empty() {
|
let input = if !unique_rows.is_empty() {
|
||||||
// let skip = call.has_flag(engine_state, stack, "skip")?;
|
|
||||||
let metadata = input.metadata();
|
let metadata = input.metadata();
|
||||||
let pipeline_iter: PipelineIterator = input.into_iter();
|
let pipeline_iter: PipelineIterator = input.into_iter();
|
||||||
|
|
||||||
|
@ -231,9 +230,9 @@ fn select(
|
||||||
Value::List {
|
Value::List {
|
||||||
vals: input_vals, ..
|
vals: input_vals, ..
|
||||||
} => {
|
} => {
|
||||||
let mut output = vec![];
|
Ok(input_vals
|
||||||
let mut columns_with_value = Vec::new();
|
.into_iter()
|
||||||
for input_val in input_vals {
|
.map(move |input_val| {
|
||||||
if !columns.is_empty() {
|
if !columns.is_empty() {
|
||||||
let mut record = Record::new();
|
let mut record = Record::new();
|
||||||
for path in &columns {
|
for path in &columns {
|
||||||
|
@ -241,23 +240,17 @@ fn select(
|
||||||
match input_val.clone().follow_cell_path(&path.members, false) {
|
match input_val.clone().follow_cell_path(&path.members, false) {
|
||||||
Ok(fetcher) => {
|
Ok(fetcher) => {
|
||||||
record.push(path.to_column_name(), fetcher);
|
record.push(path.to_column_name(), fetcher);
|
||||||
if !columns_with_value.contains(&path) {
|
|
||||||
columns_with_value.push(path);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(e) => {
|
|
||||||
return Err(e);
|
|
||||||
}
|
}
|
||||||
|
Err(e) => return Value::error(e, call_span),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
output.push(Value::record(record, span))
|
Value::record(record, span)
|
||||||
} else {
|
} else {
|
||||||
output.push(input_val)
|
input_val.clone()
|
||||||
}
|
}
|
||||||
}
|
})
|
||||||
|
.into_pipeline_data_with_metadata(
|
||||||
Ok(output.into_iter().into_pipeline_data_with_metadata(
|
|
||||||
call_span,
|
call_span,
|
||||||
engine_state.signals().clone(),
|
engine_state.signals().clone(),
|
||||||
metadata,
|
metadata,
|
||||||
|
@ -286,9 +279,8 @@ fn select(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
PipelineData::ListStream(stream, metadata, ..) => {
|
PipelineData::ListStream(stream, metadata, ..) => {
|
||||||
let mut values = vec![];
|
Ok(stream
|
||||||
|
.map(move |x| {
|
||||||
for x in stream {
|
|
||||||
if !columns.is_empty() {
|
if !columns.is_empty() {
|
||||||
let mut record = Record::new();
|
let mut record = Record::new();
|
||||||
for path in &columns {
|
for path in &columns {
|
||||||
|
@ -297,16 +289,15 @@ fn select(
|
||||||
Ok(value) => {
|
Ok(value) => {
|
||||||
record.push(path.to_column_name(), value);
|
record.push(path.to_column_name(), value);
|
||||||
}
|
}
|
||||||
Err(e) => return Err(e),
|
Err(e) => return Value::error(e, call_span),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
values.push(Value::record(record, call_span));
|
Value::record(record, call_span)
|
||||||
} else {
|
} else {
|
||||||
values.push(x);
|
x
|
||||||
}
|
}
|
||||||
}
|
})
|
||||||
|
.into_pipeline_data_with_metadata(
|
||||||
Ok(values.into_pipeline_data_with_metadata(
|
|
||||||
call_span,
|
call_span,
|
||||||
engine_state.signals().clone(),
|
engine_state.signals().clone(),
|
||||||
metadata,
|
metadata,
|
||||||
|
|
|
@ -20,7 +20,7 @@ impl Command for SkipUntil {
|
||||||
])
|
])
|
||||||
.required(
|
.required(
|
||||||
"predicate",
|
"predicate",
|
||||||
SyntaxShape::Closure(Some(vec![SyntaxShape::Any, SyntaxShape::Int])),
|
SyntaxShape::Closure(Some(vec![SyntaxShape::Any])),
|
||||||
"The predicate that skipped element must not match.",
|
"The predicate that skipped element must not match.",
|
||||||
)
|
)
|
||||||
.category(Category::Filters)
|
.category(Category::Filters)
|
||||||
|
|
|
@ -20,7 +20,7 @@ impl Command for SkipWhile {
|
||||||
])
|
])
|
||||||
.required(
|
.required(
|
||||||
"predicate",
|
"predicate",
|
||||||
SyntaxShape::Closure(Some(vec![SyntaxShape::Any, SyntaxShape::Int])),
|
SyntaxShape::Closure(Some(vec![SyntaxShape::Any])),
|
||||||
"The predicate that skipped element must match.",
|
"The predicate that skipped element must match.",
|
||||||
)
|
)
|
||||||
.category(Category::Filters)
|
.category(Category::Filters)
|
||||||
|
|
|
@ -17,7 +17,7 @@ impl Command for TakeUntil {
|
||||||
)])
|
)])
|
||||||
.required(
|
.required(
|
||||||
"predicate",
|
"predicate",
|
||||||
SyntaxShape::Closure(Some(vec![SyntaxShape::Any, SyntaxShape::Int])),
|
SyntaxShape::Closure(Some(vec![SyntaxShape::Any])),
|
||||||
"The predicate that element(s) must not match.",
|
"The predicate that element(s) must not match.",
|
||||||
)
|
)
|
||||||
.category(Category::Filters)
|
.category(Category::Filters)
|
||||||
|
|
|
@ -20,7 +20,7 @@ impl Command for TakeWhile {
|
||||||
])
|
])
|
||||||
.required(
|
.required(
|
||||||
"predicate",
|
"predicate",
|
||||||
SyntaxShape::Closure(Some(vec![SyntaxShape::Any, SyntaxShape::Int])),
|
SyntaxShape::Closure(Some(vec![SyntaxShape::Any])),
|
||||||
"The predicate that element(s) must match.",
|
"The predicate that element(s) must match.",
|
||||||
)
|
)
|
||||||
.category(Category::Filters)
|
.category(Category::Filters)
|
||||||
|
|
|
@ -1,7 +1,9 @@
|
||||||
use nu_engine::{command_prelude::*, get_eval_block_with_early_return};
|
use nu_engine::{command_prelude::*, get_eval_block_with_early_return};
|
||||||
|
#[cfg(feature = "os")]
|
||||||
|
use nu_protocol::process::ChildPipe;
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
byte_stream::copy_with_signals, engine::Closure, process::ChildPipe, report_shell_error,
|
byte_stream::copy_with_signals, engine::Closure, report_shell_error, ByteStream,
|
||||||
ByteStream, ByteStreamSource, OutDest, PipelineMetadata, Signals,
|
ByteStreamSource, OutDest, PipelineMetadata, Signals,
|
||||||
};
|
};
|
||||||
use std::{
|
use std::{
|
||||||
io::{self, Read, Write},
|
io::{self, Read, Write},
|
||||||
|
@ -152,6 +154,7 @@ use it in your pipeline."#
|
||||||
metadata,
|
metadata,
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
#[cfg(feature = "os")]
|
||||||
ByteStreamSource::Child(mut child) => {
|
ByteStreamSource::Child(mut child) => {
|
||||||
let stderr_thread = if use_stderr {
|
let stderr_thread = if use_stderr {
|
||||||
let stderr_thread = if let Some(stderr) = child.stderr.take() {
|
let stderr_thread = if let Some(stderr) = child.stderr.take() {
|
||||||
|
@ -454,6 +457,7 @@ fn copy(src: impl Read, dest: impl Write, info: &StreamInfo) -> Result<(), Shell
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "os")]
|
||||||
fn copy_pipe(pipe: ChildPipe, dest: impl Write, info: &StreamInfo) -> Result<(), ShellError> {
|
fn copy_pipe(pipe: ChildPipe, dest: impl Write, info: &StreamInfo) -> Result<(), ShellError> {
|
||||||
match pipe {
|
match pipe {
|
||||||
ChildPipe::Pipe(pipe) => copy(pipe, dest, info),
|
ChildPipe::Pipe(pipe) => copy(pipe, dest, info),
|
||||||
|
@ -477,6 +481,7 @@ fn copy_on_thread(
|
||||||
.map_err(|e| e.into_spanned(span).into())
|
.map_err(|e| e.into_spanned(span).into())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "os")]
|
||||||
fn copy_pipe_on_thread(
|
fn copy_pipe_on_thread(
|
||||||
pipe: ChildPipe,
|
pipe: ChildPipe,
|
||||||
dest: impl Write + Send + 'static,
|
dest: impl Write + Send + 'static,
|
||||||
|
|
|
@ -204,12 +204,45 @@ fn from_csv(
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
|
use nu_cmd_lang::eval_pipeline_without_terminal_expression;
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
|
use crate::{Metadata, MetadataSet};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_examples() {
|
fn test_examples() {
|
||||||
use crate::test_examples;
|
use crate::test_examples;
|
||||||
|
|
||||||
test_examples(FromCsv {})
|
test_examples(FromCsv {})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_content_type_metadata() {
|
||||||
|
let mut engine_state = Box::new(EngineState::new());
|
||||||
|
let delta = {
|
||||||
|
let mut working_set = StateWorkingSet::new(&engine_state);
|
||||||
|
|
||||||
|
working_set.add_decl(Box::new(FromCsv {}));
|
||||||
|
working_set.add_decl(Box::new(Metadata {}));
|
||||||
|
working_set.add_decl(Box::new(MetadataSet {}));
|
||||||
|
|
||||||
|
working_set.render()
|
||||||
|
};
|
||||||
|
|
||||||
|
engine_state
|
||||||
|
.merge_delta(delta)
|
||||||
|
.expect("Error merging delta");
|
||||||
|
|
||||||
|
let cmd = r#""a,b\n1,2" | metadata set --content-type 'text/csv' --datasource-ls | from csv | metadata | $in"#;
|
||||||
|
let result = eval_pipeline_without_terminal_expression(
|
||||||
|
cmd,
|
||||||
|
std::env::temp_dir().as_ref(),
|
||||||
|
&mut engine_state,
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
Value::test_record(record!("source" => Value::test_string("ls"))),
|
||||||
|
result.expect("There should be a result")
|
||||||
|
)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -93,9 +93,10 @@ pub(super) fn from_delimited_data(
|
||||||
input: PipelineData,
|
input: PipelineData,
|
||||||
name: Span,
|
name: Span,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
let metadata = input.metadata().map(|md| md.with_content_type(None));
|
||||||
match input {
|
match input {
|
||||||
PipelineData::Empty => Ok(PipelineData::Empty),
|
PipelineData::Empty => Ok(PipelineData::Empty),
|
||||||
PipelineData::Value(value, metadata) => {
|
PipelineData::Value(value, ..) => {
|
||||||
let string = value.into_string()?;
|
let string = value.into_string()?;
|
||||||
let byte_stream = ByteStream::read_string(string, name, Signals::empty());
|
let byte_stream = ByteStream::read_string(string, name, Signals::empty());
|
||||||
Ok(PipelineData::ListStream(
|
Ok(PipelineData::ListStream(
|
||||||
|
@ -109,7 +110,7 @@ pub(super) fn from_delimited_data(
|
||||||
dst_span: name,
|
dst_span: name,
|
||||||
src_span: list_stream.span(),
|
src_span: list_stream.span(),
|
||||||
}),
|
}),
|
||||||
PipelineData::ByteStream(byte_stream, metadata) => Ok(PipelineData::ListStream(
|
PipelineData::ByteStream(byte_stream, ..) => Ok(PipelineData::ListStream(
|
||||||
from_delimited_stream(config, byte_stream, name)?,
|
from_delimited_stream(config, byte_stream, name)?,
|
||||||
metadata,
|
metadata,
|
||||||
)),
|
)),
|
||||||
|
|
|
@ -70,13 +70,13 @@ impl Command for FromJson {
|
||||||
let span = call.head;
|
let span = call.head;
|
||||||
|
|
||||||
let strict = call.has_flag(engine_state, stack, "strict")?;
|
let strict = call.has_flag(engine_state, stack, "strict")?;
|
||||||
|
let metadata = input.metadata().map(|md| md.with_content_type(None));
|
||||||
|
|
||||||
// TODO: turn this into a structured underline of the nu_json error
|
// TODO: turn this into a structured underline of the nu_json error
|
||||||
if call.has_flag(engine_state, stack, "objects")? {
|
if call.has_flag(engine_state, stack, "objects")? {
|
||||||
// Return a stream of JSON values, one for each non-empty line
|
// Return a stream of JSON values, one for each non-empty line
|
||||||
match input {
|
match input {
|
||||||
PipelineData::Value(Value::String { val, .. }, metadata) => {
|
PipelineData::Value(Value::String { val, .. }, ..) => Ok(PipelineData::ListStream(
|
||||||
Ok(PipelineData::ListStream(
|
|
||||||
read_json_lines(
|
read_json_lines(
|
||||||
Cursor::new(val),
|
Cursor::new(val),
|
||||||
span,
|
span,
|
||||||
|
@ -84,9 +84,8 @@ impl Command for FromJson {
|
||||||
engine_state.signals().clone(),
|
engine_state.signals().clone(),
|
||||||
),
|
),
|
||||||
metadata,
|
metadata,
|
||||||
))
|
)),
|
||||||
}
|
PipelineData::ByteStream(stream, ..)
|
||||||
PipelineData::ByteStream(stream, metadata)
|
|
||||||
if stream.type_() != ByteStreamType::Binary =>
|
if stream.type_() != ByteStreamType::Binary =>
|
||||||
{
|
{
|
||||||
if let Some(reader) = stream.reader() {
|
if let Some(reader) = stream.reader() {
|
||||||
|
@ -107,7 +106,7 @@ impl Command for FromJson {
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// Return a single JSON value
|
// Return a single JSON value
|
||||||
let (string_input, span, metadata) = input.collect_string_strict(span)?;
|
let (string_input, span, ..) = input.collect_string_strict(span)?;
|
||||||
|
|
||||||
if string_input.is_empty() {
|
if string_input.is_empty() {
|
||||||
return Ok(Value::nothing(span).into_pipeline_data());
|
return Ok(Value::nothing(span).into_pipeline_data());
|
||||||
|
@ -267,6 +266,10 @@ fn convert_string_to_value_strict(string_input: &str, span: Span) -> Result<Valu
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
|
use nu_cmd_lang::eval_pipeline_without_terminal_expression;
|
||||||
|
|
||||||
|
use crate::{Metadata, MetadataSet};
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -275,4 +278,33 @@ mod test {
|
||||||
|
|
||||||
test_examples(FromJson {})
|
test_examples(FromJson {})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_content_type_metadata() {
|
||||||
|
let mut engine_state = Box::new(EngineState::new());
|
||||||
|
let delta = {
|
||||||
|
let mut working_set = StateWorkingSet::new(&engine_state);
|
||||||
|
|
||||||
|
working_set.add_decl(Box::new(FromJson {}));
|
||||||
|
working_set.add_decl(Box::new(Metadata {}));
|
||||||
|
working_set.add_decl(Box::new(MetadataSet {}));
|
||||||
|
|
||||||
|
working_set.render()
|
||||||
|
};
|
||||||
|
|
||||||
|
engine_state
|
||||||
|
.merge_delta(delta)
|
||||||
|
.expect("Error merging delta");
|
||||||
|
|
||||||
|
let cmd = r#"'{"a":1,"b":2}' | metadata set --content-type 'application/json' --datasource-ls | from json | metadata | $in"#;
|
||||||
|
let result = eval_pipeline_without_terminal_expression(
|
||||||
|
cmd,
|
||||||
|
std::env::temp_dir().as_ref(),
|
||||||
|
&mut engine_state,
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
Value::test_record(record!("source" => Value::test_string("ls"))),
|
||||||
|
result.expect("There should be a result")
|
||||||
|
)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -113,7 +113,8 @@ MessagePack: https://msgpack.org/
|
||||||
objects,
|
objects,
|
||||||
signals: engine_state.signals().clone(),
|
signals: engine_state.signals().clone(),
|
||||||
};
|
};
|
||||||
match input {
|
let metadata = input.metadata().map(|md| md.with_content_type(None));
|
||||||
|
let out = match input {
|
||||||
// Deserialize from a byte buffer
|
// Deserialize from a byte buffer
|
||||||
PipelineData::Value(Value::Binary { val: bytes, .. }, _) => {
|
PipelineData::Value(Value::Binary { val: bytes, .. }, _) => {
|
||||||
read_msgpack(Cursor::new(bytes), opts)
|
read_msgpack(Cursor::new(bytes), opts)
|
||||||
|
@ -136,7 +137,8 @@ MessagePack: https://msgpack.org/
|
||||||
dst_span: call.head,
|
dst_span: call.head,
|
||||||
src_span: input.span().unwrap_or(call.head),
|
src_span: input.span().unwrap_or(call.head),
|
||||||
}),
|
}),
|
||||||
}
|
};
|
||||||
|
out.map(|pd| pd.set_metadata(metadata))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -510,6 +512,10 @@ fn assert_eof(input: &mut impl io::Read, span: Span) -> Result<(), ShellError> {
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
|
use nu_cmd_lang::eval_pipeline_without_terminal_expression;
|
||||||
|
|
||||||
|
use crate::{Metadata, MetadataSet, ToMsgpack};
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -518,4 +524,34 @@ mod test {
|
||||||
|
|
||||||
test_examples(FromMsgpack {})
|
test_examples(FromMsgpack {})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_content_type_metadata() {
|
||||||
|
let mut engine_state = Box::new(EngineState::new());
|
||||||
|
let delta = {
|
||||||
|
let mut working_set = StateWorkingSet::new(&engine_state);
|
||||||
|
|
||||||
|
working_set.add_decl(Box::new(ToMsgpack {}));
|
||||||
|
working_set.add_decl(Box::new(FromMsgpack {}));
|
||||||
|
working_set.add_decl(Box::new(Metadata {}));
|
||||||
|
working_set.add_decl(Box::new(MetadataSet {}));
|
||||||
|
|
||||||
|
working_set.render()
|
||||||
|
};
|
||||||
|
|
||||||
|
engine_state
|
||||||
|
.merge_delta(delta)
|
||||||
|
.expect("Error merging delta");
|
||||||
|
|
||||||
|
let cmd = r#"{a: 1 b: 2} | to msgpack | metadata set --datasource-ls | from msgpack | metadata | $in"#;
|
||||||
|
let result = eval_pipeline_without_terminal_expression(
|
||||||
|
cmd,
|
||||||
|
std::env::temp_dir().as_ref(),
|
||||||
|
&mut engine_state,
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
Value::test_record(record!("source" => Value::test_string("ls"))),
|
||||||
|
result.expect("There should be a result")
|
||||||
|
)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -43,7 +43,8 @@ impl Command for FromMsgpackz {
|
||||||
objects,
|
objects,
|
||||||
signals: engine_state.signals().clone(),
|
signals: engine_state.signals().clone(),
|
||||||
};
|
};
|
||||||
match input {
|
let metadata = input.metadata().map(|md| md.with_content_type(None));
|
||||||
|
let out = match input {
|
||||||
// Deserialize from a byte buffer
|
// Deserialize from a byte buffer
|
||||||
PipelineData::Value(Value::Binary { val: bytes, .. }, _) => {
|
PipelineData::Value(Value::Binary { val: bytes, .. }, _) => {
|
||||||
let reader = brotli::Decompressor::new(Cursor::new(bytes), BUFFER_SIZE);
|
let reader = brotli::Decompressor::new(Cursor::new(bytes), BUFFER_SIZE);
|
||||||
|
@ -68,6 +69,7 @@ impl Command for FromMsgpackz {
|
||||||
dst_span: call.head,
|
dst_span: call.head,
|
||||||
src_span: span,
|
src_span: span,
|
||||||
}),
|
}),
|
||||||
}
|
};
|
||||||
|
out.map(|pd| pd.set_metadata(metadata))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -49,7 +49,8 @@ impl Command for FromNuon {
|
||||||
let (string_input, _span, metadata) = input.collect_string_strict(head)?;
|
let (string_input, _span, metadata) = input.collect_string_strict(head)?;
|
||||||
|
|
||||||
match nuon::from_nuon(&string_input, Some(head)) {
|
match nuon::from_nuon(&string_input, Some(head)) {
|
||||||
Ok(result) => Ok(result.into_pipeline_data_with_metadata(metadata)),
|
Ok(result) => Ok(result
|
||||||
|
.into_pipeline_data_with_metadata(metadata.map(|md| md.with_content_type(None)))),
|
||||||
Err(err) => Err(ShellError::GenericError {
|
Err(err) => Err(ShellError::GenericError {
|
||||||
error: "error when loading nuon text".into(),
|
error: "error when loading nuon text".into(),
|
||||||
msg: "could not load nuon text".into(),
|
msg: "could not load nuon text".into(),
|
||||||
|
@ -63,6 +64,10 @@ impl Command for FromNuon {
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
|
use nu_cmd_lang::eval_pipeline_without_terminal_expression;
|
||||||
|
|
||||||
|
use crate::{Metadata, MetadataSet};
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -71,4 +76,33 @@ mod test {
|
||||||
|
|
||||||
test_examples(FromNuon {})
|
test_examples(FromNuon {})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_content_type_metadata() {
|
||||||
|
let mut engine_state = Box::new(EngineState::new());
|
||||||
|
let delta = {
|
||||||
|
let mut working_set = StateWorkingSet::new(&engine_state);
|
||||||
|
|
||||||
|
working_set.add_decl(Box::new(FromNuon {}));
|
||||||
|
working_set.add_decl(Box::new(Metadata {}));
|
||||||
|
working_set.add_decl(Box::new(MetadataSet {}));
|
||||||
|
|
||||||
|
working_set.render()
|
||||||
|
};
|
||||||
|
|
||||||
|
engine_state
|
||||||
|
.merge_delta(delta)
|
||||||
|
.expect("Error merging delta");
|
||||||
|
|
||||||
|
let cmd = r#"'[[a, b]; [1, 2]]' | metadata set --content-type 'application/x-nuon' --datasource-ls | from nuon | metadata | $in"#;
|
||||||
|
let result = eval_pipeline_without_terminal_expression(
|
||||||
|
cmd,
|
||||||
|
std::env::temp_dir().as_ref(),
|
||||||
|
&mut engine_state,
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
Value::test_record(record!("source" => Value::test_string("ls"))),
|
||||||
|
result.expect("There should be a result")
|
||||||
|
)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -46,7 +46,8 @@ impl Command for FromOds {
|
||||||
vec![]
|
vec![]
|
||||||
};
|
};
|
||||||
|
|
||||||
from_ods(input, head, sel_sheets)
|
let metadata = input.metadata().map(|md| md.with_content_type(None));
|
||||||
|
from_ods(input, head, sel_sheets).map(|pd| pd.set_metadata(metadata))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn examples(&self) -> Vec<Example> {
|
fn examples(&self) -> Vec<Example> {
|
||||||
|
|
|
@ -29,7 +29,8 @@ impl Command for FromToml {
|
||||||
let span = call.head;
|
let span = call.head;
|
||||||
let (mut string_input, span, metadata) = input.collect_string_strict(span)?;
|
let (mut string_input, span, metadata) = input.collect_string_strict(span)?;
|
||||||
string_input.push('\n');
|
string_input.push('\n');
|
||||||
Ok(convert_string_to_value(string_input, span)?.into_pipeline_data_with_metadata(metadata))
|
Ok(convert_string_to_value(string_input, span)?
|
||||||
|
.into_pipeline_data_with_metadata(metadata.map(|md| md.with_content_type(None))))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn examples(&self) -> Vec<Example> {
|
fn examples(&self) -> Vec<Example> {
|
||||||
|
@ -144,8 +145,11 @@ pub fn convert_string_to_value(string_input: String, span: Span) -> Result<Value
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
use crate::{Metadata, MetadataSet};
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use chrono::TimeZone;
|
use chrono::TimeZone;
|
||||||
|
use nu_cmd_lang::eval_pipeline_without_terminal_expression;
|
||||||
use toml::value::Datetime;
|
use toml::value::Datetime;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -331,4 +335,33 @@ mod tests {
|
||||||
|
|
||||||
assert_eq!(result, reference_date);
|
assert_eq!(result, reference_date);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_content_type_metadata() {
|
||||||
|
let mut engine_state = Box::new(EngineState::new());
|
||||||
|
let delta = {
|
||||||
|
let mut working_set = StateWorkingSet::new(&engine_state);
|
||||||
|
|
||||||
|
working_set.add_decl(Box::new(FromToml {}));
|
||||||
|
working_set.add_decl(Box::new(Metadata {}));
|
||||||
|
working_set.add_decl(Box::new(MetadataSet {}));
|
||||||
|
|
||||||
|
working_set.render()
|
||||||
|
};
|
||||||
|
|
||||||
|
engine_state
|
||||||
|
.merge_delta(delta)
|
||||||
|
.expect("Error merging delta");
|
||||||
|
|
||||||
|
let cmd = r#""[a]\nb = 1\nc = 1" | metadata set --content-type 'text/x-toml' --datasource-ls | from toml | metadata | $in"#;
|
||||||
|
let result = eval_pipeline_without_terminal_expression(
|
||||||
|
cmd,
|
||||||
|
std::env::temp_dir().as_ref(),
|
||||||
|
&mut engine_state,
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
Value::test_record(record!("source" => Value::test_string("ls"))),
|
||||||
|
result.expect("There should be a result")
|
||||||
|
)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -165,6 +165,10 @@ fn from_tsv(
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
|
use nu_cmd_lang::eval_pipeline_without_terminal_expression;
|
||||||
|
|
||||||
|
use crate::{Metadata, MetadataSet};
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -173,4 +177,33 @@ mod test {
|
||||||
|
|
||||||
test_examples(FromTsv {})
|
test_examples(FromTsv {})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_content_type_metadata() {
|
||||||
|
let mut engine_state = Box::new(EngineState::new());
|
||||||
|
let delta = {
|
||||||
|
let mut working_set = StateWorkingSet::new(&engine_state);
|
||||||
|
|
||||||
|
working_set.add_decl(Box::new(FromTsv {}));
|
||||||
|
working_set.add_decl(Box::new(Metadata {}));
|
||||||
|
working_set.add_decl(Box::new(MetadataSet {}));
|
||||||
|
|
||||||
|
working_set.render()
|
||||||
|
};
|
||||||
|
|
||||||
|
engine_state
|
||||||
|
.merge_delta(delta)
|
||||||
|
.expect("Error merging delta");
|
||||||
|
|
||||||
|
let cmd = r#""a\tb\n1\t2" | metadata set --content-type 'text/tab-separated-values' --datasource-ls | from tsv | metadata | $in"#;
|
||||||
|
let result = eval_pipeline_without_terminal_expression(
|
||||||
|
cmd,
|
||||||
|
std::env::temp_dir().as_ref(),
|
||||||
|
&mut engine_state,
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
Value::test_record(record!("source" => Value::test_string("ls"))),
|
||||||
|
result.expect("There should be a result")
|
||||||
|
)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -47,7 +47,8 @@ impl Command for FromXlsx {
|
||||||
vec![]
|
vec![]
|
||||||
};
|
};
|
||||||
|
|
||||||
from_xlsx(input, head, sel_sheets)
|
let metadata = input.metadata().map(|md| md.with_content_type(None));
|
||||||
|
from_xlsx(input, head, sel_sheets).map(|pd| pd.set_metadata(metadata))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn examples(&self) -> Vec<Example> {
|
fn examples(&self) -> Vec<Example> {
|
||||||
|
|
|
@ -206,7 +206,9 @@ fn from_xml(input: PipelineData, info: &ParsingInfo) -> Result<PipelineData, She
|
||||||
let (concat_string, span, metadata) = input.collect_string_strict(info.span)?;
|
let (concat_string, span, metadata) = input.collect_string_strict(info.span)?;
|
||||||
|
|
||||||
match from_xml_string_to_value(&concat_string, info) {
|
match from_xml_string_to_value(&concat_string, info) {
|
||||||
Ok(x) => Ok(x.into_pipeline_data_with_metadata(metadata)),
|
Ok(x) => {
|
||||||
|
Ok(x.into_pipeline_data_with_metadata(metadata.map(|md| md.with_content_type(None))))
|
||||||
|
}
|
||||||
Err(err) => Err(process_xml_parse_error(err, span)),
|
Err(err) => Err(process_xml_parse_error(err, span)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -322,10 +324,14 @@ fn make_cant_convert_error(help: impl Into<String>, span: Span) -> ShellError {
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
use crate::Metadata;
|
||||||
|
use crate::MetadataSet;
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
use indexmap::indexmap;
|
use indexmap::indexmap;
|
||||||
use indexmap::IndexMap;
|
use indexmap::IndexMap;
|
||||||
|
use nu_cmd_lang::eval_pipeline_without_terminal_expression;
|
||||||
|
|
||||||
fn string(input: impl Into<String>) -> Value {
|
fn string(input: impl Into<String>) -> Value {
|
||||||
Value::test_string(input)
|
Value::test_string(input)
|
||||||
|
@ -480,4 +486,36 @@ mod tests {
|
||||||
|
|
||||||
test_examples(FromXml {})
|
test_examples(FromXml {})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_content_type_metadata() {
|
||||||
|
let mut engine_state = Box::new(EngineState::new());
|
||||||
|
let delta = {
|
||||||
|
let mut working_set = StateWorkingSet::new(&engine_state);
|
||||||
|
|
||||||
|
working_set.add_decl(Box::new(FromXml {}));
|
||||||
|
working_set.add_decl(Box::new(Metadata {}));
|
||||||
|
working_set.add_decl(Box::new(MetadataSet {}));
|
||||||
|
|
||||||
|
working_set.render()
|
||||||
|
};
|
||||||
|
|
||||||
|
engine_state
|
||||||
|
.merge_delta(delta)
|
||||||
|
.expect("Error merging delta");
|
||||||
|
|
||||||
|
let cmd = r#"'<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<note>
|
||||||
|
<remember>Event</remember>
|
||||||
|
</note>' | metadata set --content-type 'application/xml' --datasource-ls | from xml | metadata | $in"#;
|
||||||
|
let result = eval_pipeline_without_terminal_expression(
|
||||||
|
cmd,
|
||||||
|
std::env::temp_dir().as_ref(),
|
||||||
|
&mut engine_state,
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
Value::test_record(record!("source" => Value::test_string("ls"))),
|
||||||
|
result.expect("There should be a result")
|
||||||
|
)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -235,14 +235,19 @@ fn from_yaml(input: PipelineData, head: Span) -> Result<PipelineData, ShellError
|
||||||
let (concat_string, span, metadata) = input.collect_string_strict(head)?;
|
let (concat_string, span, metadata) = input.collect_string_strict(head)?;
|
||||||
|
|
||||||
match from_yaml_string_to_value(&concat_string, head, span) {
|
match from_yaml_string_to_value(&concat_string, head, span) {
|
||||||
Ok(x) => Ok(x.into_pipeline_data_with_metadata(metadata)),
|
Ok(x) => {
|
||||||
|
Ok(x.into_pipeline_data_with_metadata(metadata.map(|md| md.with_content_type(None))))
|
||||||
|
}
|
||||||
Err(other) => Err(other),
|
Err(other) => Err(other),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
|
use crate::{Metadata, MetadataSet};
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
use nu_cmd_lang::eval_pipeline_without_terminal_expression;
|
||||||
use nu_protocol::Config;
|
use nu_protocol::Config;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -395,4 +400,33 @@ mod test {
|
||||||
assert!(result.ok().unwrap() == test_case.expected.ok().unwrap());
|
assert!(result.ok().unwrap() == test_case.expected.ok().unwrap());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_content_type_metadata() {
|
||||||
|
let mut engine_state = Box::new(EngineState::new());
|
||||||
|
let delta = {
|
||||||
|
let mut working_set = StateWorkingSet::new(&engine_state);
|
||||||
|
|
||||||
|
working_set.add_decl(Box::new(FromYaml {}));
|
||||||
|
working_set.add_decl(Box::new(Metadata {}));
|
||||||
|
working_set.add_decl(Box::new(MetadataSet {}));
|
||||||
|
|
||||||
|
working_set.render()
|
||||||
|
};
|
||||||
|
|
||||||
|
engine_state
|
||||||
|
.merge_delta(delta)
|
||||||
|
.expect("Error merging delta");
|
||||||
|
|
||||||
|
let cmd = r#""a: 1\nb: 2" | metadata set --content-type 'application/yaml' --datasource-ls | from yaml | metadata | $in"#;
|
||||||
|
let result = eval_pipeline_without_terminal_expression(
|
||||||
|
cmd,
|
||||||
|
std::env::temp_dir().as_ref(),
|
||||||
|
&mut engine_state,
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
Value::test_record(record!("source" => Value::test_string("ls"))),
|
||||||
|
result.expect("There should be a result")
|
||||||
|
)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -109,7 +109,7 @@ pub fn value_to_json_value(v: &Value) -> Result<nu_json::Value, ShellError> {
|
||||||
let span = v.span();
|
let span = v.span();
|
||||||
Ok(match v {
|
Ok(match v {
|
||||||
Value::Bool { val, .. } => nu_json::Value::Bool(*val),
|
Value::Bool { val, .. } => nu_json::Value::Bool(*val),
|
||||||
Value::Filesize { val, .. } => nu_json::Value::I64(*val),
|
Value::Filesize { val, .. } => nu_json::Value::I64(val.get()),
|
||||||
Value::Duration { val, .. } => nu_json::Value::I64(*val),
|
Value::Duration { val, .. } => nu_json::Value::I64(*val),
|
||||||
Value::Date { val, .. } => nu_json::Value::String(val.to_string()),
|
Value::Date { val, .. } => nu_json::Value::String(val.to_string()),
|
||||||
Value::Float { val, .. } => nu_json::Value::F64(*val),
|
Value::Float { val, .. } => nu_json::Value::F64(*val),
|
||||||
|
|
|
@ -168,7 +168,7 @@ pub(crate) fn write_value(
|
||||||
mp::write_f64(out, *val).err_span(span)?;
|
mp::write_f64(out, *val).err_span(span)?;
|
||||||
}
|
}
|
||||||
Value::Filesize { val, .. } => {
|
Value::Filesize { val, .. } => {
|
||||||
mp::write_sint(out, *val).err_span(span)?;
|
mp::write_sint(out, val.get()).err_span(span)?;
|
||||||
}
|
}
|
||||||
Value::Duration { val, .. } => {
|
Value::Duration { val, .. } => {
|
||||||
mp::write_sint(out, *val).err_span(span)?;
|
mp::write_sint(out, *val).err_span(span)?;
|
||||||
|
|
|
@ -47,7 +47,7 @@ fn helper(engine_state: &EngineState, v: &Value) -> Result<toml::Value, ShellErr
|
||||||
Ok(match &v {
|
Ok(match &v {
|
||||||
Value::Bool { val, .. } => toml::Value::Boolean(*val),
|
Value::Bool { val, .. } => toml::Value::Boolean(*val),
|
||||||
Value::Int { val, .. } => toml::Value::Integer(*val),
|
Value::Int { val, .. } => toml::Value::Integer(*val),
|
||||||
Value::Filesize { val, .. } => toml::Value::Integer(*val),
|
Value::Filesize { val, .. } => toml::Value::Integer(val.get()),
|
||||||
Value::Duration { val, .. } => toml::Value::String(val.to_string()),
|
Value::Duration { val, .. } => toml::Value::String(val.to_string()),
|
||||||
Value::Date { val, .. } => toml::Value::Datetime(to_toml_datetime(val)),
|
Value::Date { val, .. } => toml::Value::Datetime(to_toml_datetime(val)),
|
||||||
Value::Range { .. } => toml::Value::String("<Range>".to_string()),
|
Value::Range { .. } => toml::Value::String("<Range>".to_string()),
|
||||||
|
|
|
@ -44,7 +44,9 @@ pub fn value_to_yaml_value(v: &Value) -> Result<serde_yaml::Value, ShellError> {
|
||||||
Ok(match &v {
|
Ok(match &v {
|
||||||
Value::Bool { val, .. } => serde_yaml::Value::Bool(*val),
|
Value::Bool { val, .. } => serde_yaml::Value::Bool(*val),
|
||||||
Value::Int { val, .. } => serde_yaml::Value::Number(serde_yaml::Number::from(*val)),
|
Value::Int { val, .. } => serde_yaml::Value::Number(serde_yaml::Number::from(*val)),
|
||||||
Value::Filesize { val, .. } => serde_yaml::Value::Number(serde_yaml::Number::from(*val)),
|
Value::Filesize { val, .. } => {
|
||||||
|
serde_yaml::Value::Number(serde_yaml::Number::from(val.get()))
|
||||||
|
}
|
||||||
Value::Duration { val, .. } => serde_yaml::Value::String(val.to_string()),
|
Value::Duration { val, .. } => serde_yaml::Value::String(val.to_string()),
|
||||||
Value::Date { val, .. } => serde_yaml::Value::String(val.to_string()),
|
Value::Date { val, .. } => serde_yaml::Value::String(val.to_string()),
|
||||||
Value::Range { .. } => serde_yaml::Value::Null,
|
Value::Range { .. } => serde_yaml::Value::Null,
|
||||||
|
|
|
@ -87,21 +87,10 @@ pub fn help_commands(
|
||||||
name.push_str(&r.item);
|
name.push_str(&r.item);
|
||||||
}
|
}
|
||||||
|
|
||||||
let output = engine_state
|
if let Some(decl) = engine_state.find_decl(name.as_bytes(), &[]) {
|
||||||
.get_decls_sorted(false)
|
let cmd = engine_state.get_decl(decl);
|
||||||
.into_iter()
|
let help_text = get_full_help(cmd, engine_state, stack);
|
||||||
.filter_map(|(_, decl_id)| {
|
Ok(Value::string(help_text, call.head).into_pipeline_data())
|
||||||
let decl = engine_state.get_decl(decl_id);
|
|
||||||
(decl.name() == name).then_some(decl)
|
|
||||||
})
|
|
||||||
.map(|cmd| get_full_help(cmd, engine_state, stack))
|
|
||||||
.collect::<Vec<String>>();
|
|
||||||
|
|
||||||
if !output.is_empty() {
|
|
||||||
Ok(
|
|
||||||
Value::string(output.join("======================\n\n"), call.head)
|
|
||||||
.into_pipeline_data(),
|
|
||||||
)
|
|
||||||
} else {
|
} else {
|
||||||
Err(ShellError::CommandNotFound {
|
Err(ShellError::CommandNotFound {
|
||||||
span: Span::merge_many(rest.iter().map(|s| s.span)),
|
span: Span::merge_many(rest.iter().map(|s| s.span)),
|
||||||
|
|
|
@ -107,21 +107,10 @@ pub fn help_externs(
|
||||||
name.push_str(&r.item);
|
name.push_str(&r.item);
|
||||||
}
|
}
|
||||||
|
|
||||||
let output = engine_state
|
if let Some(decl) = engine_state.find_decl(name.as_bytes(), &[]) {
|
||||||
.get_decls_sorted(false)
|
let cmd = engine_state.get_decl(decl);
|
||||||
.into_iter()
|
let help_text = get_full_help(cmd, engine_state, stack);
|
||||||
.filter_map(|(_, decl_id)| {
|
Ok(Value::string(help_text, call.head).into_pipeline_data())
|
||||||
let decl = engine_state.get_decl(decl_id);
|
|
||||||
(decl.name() == name).then_some(decl)
|
|
||||||
})
|
|
||||||
.map(|cmd| get_full_help(cmd, engine_state, stack))
|
|
||||||
.collect::<Vec<String>>();
|
|
||||||
|
|
||||||
if !output.is_empty() {
|
|
||||||
Ok(
|
|
||||||
Value::string(output.join("======================\n\n"), call.head)
|
|
||||||
.into_pipeline_data(),
|
|
||||||
)
|
|
||||||
} else {
|
} else {
|
||||||
Err(ShellError::CommandNotFound {
|
Err(ShellError::CommandNotFound {
|
||||||
span: Span::merge_many(rest.iter().map(|s| s.span)),
|
span: Span::merge_many(rest.iter().map(|s| s.span)),
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue