mirror of
https://github.com/nushell/nushell
synced 2025-01-12 21:29:07 +00:00
Merge branch 'continue-PWD-per-drive' of github.com:PegasusPlusUS/nushell into continue-PWD-per-drive
This commit is contained in:
commit
628497d0e3
45 changed files with 1227 additions and 996 deletions
564
Cargo.lock
generated
564
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
|
@ -25,7 +25,7 @@ impl Command for EachWhile {
|
|||
)])
|
||||
.required(
|
||||
"closure",
|
||||
SyntaxShape::Closure(Some(vec![SyntaxShape::Any, SyntaxShape::Int])),
|
||||
SyntaxShape::Closure(Some(vec![SyntaxShape::Any])),
|
||||
"the closure to run",
|
||||
)
|
||||
.category(Category::Filters)
|
||||
|
|
|
@ -144,18 +144,24 @@ pub(crate) fn get_plugin_dirs(
|
|||
engine_state: &EngineState,
|
||||
stack: &Stack,
|
||||
) -> impl Iterator<Item = String> {
|
||||
// Get the NU_PLUGIN_DIRS constant or env var
|
||||
// Get the NU_PLUGIN_DIRS from the constant and/or env var
|
||||
let working_set = StateWorkingSet::new(engine_state);
|
||||
let value = working_set
|
||||
let dirs_from_const = working_set
|
||||
.find_variable(b"$NU_PLUGIN_DIRS")
|
||||
.and_then(|var_id| working_set.get_constant(var_id).ok())
|
||||
.or_else(|| stack.get_env_var(engine_state, "NU_PLUGIN_DIRS"))
|
||||
.cloned(); // TODO: avoid this clone
|
||||
|
||||
// Get all of the strings in the list, if possible
|
||||
value
|
||||
.cloned() // TODO: avoid this clone
|
||||
.into_iter()
|
||||
.flat_map(|value| value.into_list().ok())
|
||||
.flatten()
|
||||
.flat_map(|list_item| list_item.coerce_into_string().ok())
|
||||
.flat_map(|list_item| list_item.coerce_into_string().ok());
|
||||
|
||||
let dirs_from_env = stack
|
||||
.get_env_var(engine_state, "NU_PLUGIN_DIRS")
|
||||
.cloned() // TODO: avoid this clone
|
||||
.into_iter()
|
||||
.flat_map(|value| value.into_list().ok())
|
||||
.flatten()
|
||||
.flat_map(|list_item| list_item.coerce_into_string().ok());
|
||||
|
||||
dirs_from_const.chain(dirs_from_env)
|
||||
}
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
use nu_engine::{command_prelude::*, get_eval_block, get_eval_expression_with_input};
|
||||
use nu_engine::{command_prelude::*, ClosureEvalOnce};
|
||||
use nu_protocol::engine::Closure;
|
||||
use std::time::Instant;
|
||||
|
||||
#[derive(Clone)]
|
||||
|
@ -10,16 +11,18 @@ impl Command for TimeIt {
|
|||
}
|
||||
|
||||
fn description(&self) -> &str {
|
||||
"Time the running time of a block."
|
||||
"Time how long it takes a closure to run."
|
||||
}
|
||||
|
||||
fn extra_description(&self) -> &str {
|
||||
"Any pipeline input given to this command is passed to the closure. Note that streaming inputs may affect timing results, and it is recommended to add a `collect` command before this if the input is a stream.
|
||||
|
||||
This command will bubble up any errors encountered when running the closure. The return pipeline of the closure is collected into a value and then discarded."
|
||||
}
|
||||
|
||||
fn signature(&self) -> nu_protocol::Signature {
|
||||
Signature::build("timeit")
|
||||
.required(
|
||||
"command",
|
||||
SyntaxShape::OneOf(vec![SyntaxShape::Block, SyntaxShape::Expression]),
|
||||
"The command or block to run.",
|
||||
)
|
||||
.required("command", SyntaxShape::Closure(None), "The closure to run.")
|
||||
.input_output_types(vec![
|
||||
(Type::Any, Type::Duration),
|
||||
(Type::Nothing, Type::Duration),
|
||||
|
@ -46,51 +49,38 @@ impl Command for TimeIt {
|
|||
// reset outdest, so the command can write to stdout and stderr.
|
||||
let stack = &mut stack.push_redirection(None, None);
|
||||
|
||||
let command_to_run = call.positional_nth(stack, 0);
|
||||
let closure: Closure = call.req(engine_state, stack, 0)?;
|
||||
let closure = ClosureEvalOnce::new_preserve_out_dest(engine_state, stack, closure);
|
||||
|
||||
// Get the start time after all other computation has been done.
|
||||
let start_time = Instant::now();
|
||||
closure.run_with_input(input)?.into_value(call.head)?;
|
||||
let time = start_time.elapsed();
|
||||
|
||||
if let Some(command_to_run) = command_to_run {
|
||||
if let Some(block_id) = command_to_run.as_block() {
|
||||
let eval_block = get_eval_block(engine_state);
|
||||
let block = engine_state.get_block(block_id);
|
||||
eval_block(engine_state, stack, block, input)?
|
||||
} else {
|
||||
let eval_expression_with_input = get_eval_expression_with_input(engine_state);
|
||||
let expression = &command_to_run.clone();
|
||||
eval_expression_with_input(engine_state, stack, expression, input)?
|
||||
}
|
||||
} else {
|
||||
PipelineData::empty()
|
||||
}
|
||||
.into_value(call.head)?;
|
||||
|
||||
let end_time = Instant::now();
|
||||
|
||||
let output = Value::duration(
|
||||
end_time.saturating_duration_since(start_time).as_nanos() as i64,
|
||||
call.head,
|
||||
);
|
||||
|
||||
let output = Value::duration(time.as_nanos() as i64, call.head);
|
||||
Ok(output.into_pipeline_data())
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
vec![
|
||||
Example {
|
||||
description: "Times a command within a closure",
|
||||
description: "Time a closure containing one command",
|
||||
example: "timeit { sleep 500ms }",
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
description: "Times a command using an existing input",
|
||||
example: "http get https://www.nushell.sh/book/ | timeit { split chars }",
|
||||
description: "Time a closure with an input value",
|
||||
example: "'A really long string' | timeit { split chars }",
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
description: "Times a command invocation",
|
||||
example: "timeit ls -la",
|
||||
description: "Time a closure with an input stream",
|
||||
example: "open some_file.txt | collect | timeit { split chars }",
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
description: "Time a closure containing a pipeline",
|
||||
example: "timeit { open some_file.txt | split chars }",
|
||||
result: None,
|
||||
},
|
||||
]
|
||||
|
|
|
@ -12,8 +12,8 @@ pub struct Du;
|
|||
#[derive(Deserialize, Clone, Debug)]
|
||||
pub struct DuArgs {
|
||||
path: Option<Spanned<NuGlob>>,
|
||||
all: bool,
|
||||
deref: bool,
|
||||
long: bool,
|
||||
exclude: Option<Spanned<NuGlob>>,
|
||||
#[serde(rename = "max-depth")]
|
||||
max_depth: Option<Spanned<i64>>,
|
||||
|
@ -49,6 +49,11 @@ impl Command for Du {
|
|||
"Dereference symlinks to their targets for size",
|
||||
Some('r'),
|
||||
)
|
||||
.switch(
|
||||
"long",
|
||||
"Get underlying directories and files for each entry",
|
||||
Some('l'),
|
||||
)
|
||||
.named(
|
||||
"exclude",
|
||||
SyntaxShape::GlobPattern,
|
||||
|
@ -94,8 +99,8 @@ impl Command for Du {
|
|||
});
|
||||
}
|
||||
}
|
||||
let all = call.has_flag(engine_state, stack, "all")?;
|
||||
let deref = call.has_flag(engine_state, stack, "deref")?;
|
||||
let long = call.has_flag(engine_state, stack, "long")?;
|
||||
let exclude = call.get_flag(engine_state, stack, "exclude")?;
|
||||
#[allow(deprecated)]
|
||||
let current_dir = current_dir(engine_state, stack)?;
|
||||
|
@ -111,8 +116,8 @@ impl Command for Du {
|
|||
None => {
|
||||
let args = DuArgs {
|
||||
path: None,
|
||||
all,
|
||||
deref,
|
||||
long,
|
||||
exclude,
|
||||
max_depth,
|
||||
min_size,
|
||||
|
@ -127,8 +132,8 @@ impl Command for Du {
|
|||
for p in paths {
|
||||
let args = DuArgs {
|
||||
path: Some(p),
|
||||
all,
|
||||
deref,
|
||||
long,
|
||||
exclude: exclude.clone(),
|
||||
max_depth,
|
||||
min_size,
|
||||
|
@ -174,7 +179,6 @@ fn du_for_one_pattern(
|
|||
})
|
||||
})?;
|
||||
|
||||
let include_files = args.all;
|
||||
let mut paths = match args.path {
|
||||
Some(p) => nu_engine::glob_from(&p, current_dir, span, None),
|
||||
// The * pattern should never fail.
|
||||
|
@ -188,17 +192,10 @@ fn du_for_one_pattern(
|
|||
None,
|
||||
),
|
||||
}
|
||||
.map(|f| f.1)?
|
||||
.filter(move |p| {
|
||||
if include_files {
|
||||
true
|
||||
} else {
|
||||
matches!(p, Ok(f) if f.is_dir())
|
||||
}
|
||||
});
|
||||
.map(|f| f.1)?;
|
||||
|
||||
let all = args.all;
|
||||
let deref = args.deref;
|
||||
let long = args.long;
|
||||
let max_depth = args.max_depth.map(|f| f.item as u64);
|
||||
let min_size = args.min_size.map(|f| f.item as u64);
|
||||
|
||||
|
@ -207,7 +204,7 @@ fn du_for_one_pattern(
|
|||
min: min_size,
|
||||
deref,
|
||||
exclude,
|
||||
all,
|
||||
long,
|
||||
};
|
||||
|
||||
let mut output: Vec<Value> = vec![];
|
||||
|
@ -216,7 +213,7 @@ fn du_for_one_pattern(
|
|||
Ok(a) => {
|
||||
if a.is_dir() {
|
||||
output.push(DirInfo::new(a, ¶ms, max_depth, span, signals)?.into());
|
||||
} else if let Ok(v) = FileInfo::new(a, deref, span) {
|
||||
} else if let Ok(v) = FileInfo::new(a, deref, span, params.long) {
|
||||
output.push(v.into());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -14,7 +14,7 @@ impl Command for All {
|
|||
.input_output_types(vec![(Type::List(Box::new(Type::Any)), Type::Bool)])
|
||||
.required(
|
||||
"predicate",
|
||||
SyntaxShape::Closure(Some(vec![SyntaxShape::Any, SyntaxShape::Int])),
|
||||
SyntaxShape::Closure(Some(vec![SyntaxShape::Any])),
|
||||
"A closure that must evaluate to a boolean.",
|
||||
)
|
||||
.category(Category::Filters)
|
||||
|
|
|
@ -14,7 +14,7 @@ impl Command for Any {
|
|||
.input_output_types(vec![(Type::List(Box::new(Type::Any)), Type::Bool)])
|
||||
.required(
|
||||
"predicate",
|
||||
SyntaxShape::Closure(Some(vec![SyntaxShape::Any, SyntaxShape::Int])),
|
||||
SyntaxShape::Closure(Some(vec![SyntaxShape::Any])),
|
||||
"A closure that must evaluate to a boolean.",
|
||||
)
|
||||
.category(Category::Filters)
|
||||
|
|
|
@ -30,7 +30,7 @@ a variable. On the other hand, the "row condition" syntax is not supported."#
|
|||
])
|
||||
.required(
|
||||
"closure",
|
||||
SyntaxShape::Closure(Some(vec![SyntaxShape::Any, SyntaxShape::Int])),
|
||||
SyntaxShape::Closure(Some(vec![SyntaxShape::Any])),
|
||||
"Predicate closure.",
|
||||
)
|
||||
.category(Category::Filters)
|
||||
|
|
|
@ -38,7 +38,7 @@ impl Command for ParEach {
|
|||
)
|
||||
.required(
|
||||
"closure",
|
||||
SyntaxShape::Closure(Some(vec![SyntaxShape::Any, SyntaxShape::Int])),
|
||||
SyntaxShape::Closure(Some(vec![SyntaxShape::Any])),
|
||||
"The closure to run.",
|
||||
)
|
||||
.allow_variants_without_examples(true)
|
||||
|
|
|
@ -24,11 +24,7 @@ impl Command for Reduce {
|
|||
)
|
||||
.required(
|
||||
"closure",
|
||||
SyntaxShape::Closure(Some(vec![
|
||||
SyntaxShape::Any,
|
||||
SyntaxShape::Any,
|
||||
SyntaxShape::Int,
|
||||
])),
|
||||
SyntaxShape::Closure(Some(vec![SyntaxShape::Any, SyntaxShape::Any])),
|
||||
"Reducing function.",
|
||||
)
|
||||
.allow_variants_without_examples(true)
|
||||
|
@ -88,6 +84,15 @@ impl Command for Reduce {
|
|||
"Concatenate a string with itself, using a range to determine the number of times.",
|
||||
result: Some(Value::test_string("StrStrStr")),
|
||||
},
|
||||
Example {
|
||||
example: r#"[{a: 1} {b: 2} {c: 3}] | reduce {|it| merge $it}"#,
|
||||
description: "Merge multiple records together, making use of the fact that the accumulated value is also supplied as pipeline input to the closure.",
|
||||
result: Some(Value::test_record(record!(
|
||||
"a" => Value::test_int(1),
|
||||
"b" => Value::test_int(2),
|
||||
"c" => Value::test_int(3),
|
||||
))),
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
|
@ -135,8 +140,8 @@ mod test {
|
|||
|
||||
#[test]
|
||||
fn test_examples() {
|
||||
use crate::test_examples;
|
||||
use crate::{test_examples_with_commands, Merge};
|
||||
|
||||
test_examples(Reduce {})
|
||||
test_examples_with_commands(Reduce {}, &[&Merge])
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,7 +20,7 @@ impl Command for SkipUntil {
|
|||
])
|
||||
.required(
|
||||
"predicate",
|
||||
SyntaxShape::Closure(Some(vec![SyntaxShape::Any, SyntaxShape::Int])),
|
||||
SyntaxShape::Closure(Some(vec![SyntaxShape::Any])),
|
||||
"The predicate that skipped element must not match.",
|
||||
)
|
||||
.category(Category::Filters)
|
||||
|
|
|
@ -20,7 +20,7 @@ impl Command for SkipWhile {
|
|||
])
|
||||
.required(
|
||||
"predicate",
|
||||
SyntaxShape::Closure(Some(vec![SyntaxShape::Any, SyntaxShape::Int])),
|
||||
SyntaxShape::Closure(Some(vec![SyntaxShape::Any])),
|
||||
"The predicate that skipped element must match.",
|
||||
)
|
||||
.category(Category::Filters)
|
||||
|
|
|
@ -17,7 +17,7 @@ impl Command for TakeUntil {
|
|||
)])
|
||||
.required(
|
||||
"predicate",
|
||||
SyntaxShape::Closure(Some(vec![SyntaxShape::Any, SyntaxShape::Int])),
|
||||
SyntaxShape::Closure(Some(vec![SyntaxShape::Any])),
|
||||
"The predicate that element(s) must not match.",
|
||||
)
|
||||
.category(Category::Filters)
|
||||
|
|
|
@ -20,7 +20,7 @@ impl Command for TakeWhile {
|
|||
])
|
||||
.required(
|
||||
"predicate",
|
||||
SyntaxShape::Closure(Some(vec![SyntaxShape::Any, SyntaxShape::Int])),
|
||||
SyntaxShape::Closure(Some(vec![SyntaxShape::Any])),
|
||||
"The predicate that element(s) must match.",
|
||||
)
|
||||
.category(Category::Filters)
|
||||
|
|
|
@ -87,21 +87,10 @@ pub fn help_commands(
|
|||
name.push_str(&r.item);
|
||||
}
|
||||
|
||||
let output = engine_state
|
||||
.get_decls_sorted(false)
|
||||
.into_iter()
|
||||
.filter_map(|(_, decl_id)| {
|
||||
let decl = engine_state.get_decl(decl_id);
|
||||
(decl.name() == name).then_some(decl)
|
||||
})
|
||||
.map(|cmd| get_full_help(cmd, engine_state, stack))
|
||||
.collect::<Vec<String>>();
|
||||
|
||||
if !output.is_empty() {
|
||||
Ok(
|
||||
Value::string(output.join("======================\n\n"), call.head)
|
||||
.into_pipeline_data(),
|
||||
)
|
||||
if let Some(decl) = engine_state.find_decl(name.as_bytes(), &[]) {
|
||||
let cmd = engine_state.get_decl(decl);
|
||||
let help_text = get_full_help(cmd, engine_state, stack);
|
||||
Ok(Value::string(help_text, call.head).into_pipeline_data())
|
||||
} else {
|
||||
Err(ShellError::CommandNotFound {
|
||||
span: Span::merge_many(rest.iter().map(|s| s.span)),
|
||||
|
|
|
@ -107,21 +107,10 @@ pub fn help_externs(
|
|||
name.push_str(&r.item);
|
||||
}
|
||||
|
||||
let output = engine_state
|
||||
.get_decls_sorted(false)
|
||||
.into_iter()
|
||||
.filter_map(|(_, decl_id)| {
|
||||
let decl = engine_state.get_decl(decl_id);
|
||||
(decl.name() == name).then_some(decl)
|
||||
})
|
||||
.map(|cmd| get_full_help(cmd, engine_state, stack))
|
||||
.collect::<Vec<String>>();
|
||||
|
||||
if !output.is_empty() {
|
||||
Ok(
|
||||
Value::string(output.join("======================\n\n"), call.head)
|
||||
.into_pipeline_data(),
|
||||
)
|
||||
if let Some(decl) = engine_state.find_decl(name.as_bytes(), &[]) {
|
||||
let cmd = engine_state.get_decl(decl);
|
||||
let help_text = get_full_help(cmd, engine_state, stack);
|
||||
Ok(Value::string(help_text, call.head).into_pipeline_data())
|
||||
} else {
|
||||
Err(ShellError::CommandNotFound {
|
||||
span: Span::merge_many(rest.iter().map(|s| s.span)),
|
||||
|
|
|
@ -9,7 +9,7 @@ pub struct DirBuilder {
|
|||
pub min: Option<u64>,
|
||||
pub deref: bool,
|
||||
pub exclude: Option<Pattern>,
|
||||
pub all: bool,
|
||||
pub long: bool,
|
||||
}
|
||||
|
||||
impl DirBuilder {
|
||||
|
@ -18,14 +18,14 @@ impl DirBuilder {
|
|||
min: Option<u64>,
|
||||
deref: bool,
|
||||
exclude: Option<Pattern>,
|
||||
all: bool,
|
||||
long: bool,
|
||||
) -> DirBuilder {
|
||||
DirBuilder {
|
||||
tag,
|
||||
min,
|
||||
deref,
|
||||
exclude,
|
||||
all,
|
||||
long,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -39,6 +39,7 @@ pub struct DirInfo {
|
|||
blocks: u64,
|
||||
path: PathBuf,
|
||||
tag: Span,
|
||||
long: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
|
@ -47,10 +48,16 @@ pub struct FileInfo {
|
|||
size: u64,
|
||||
blocks: Option<u64>,
|
||||
tag: Span,
|
||||
long: bool,
|
||||
}
|
||||
|
||||
impl FileInfo {
|
||||
pub fn new(path: impl Into<PathBuf>, deref: bool, tag: Span) -> Result<Self, ShellError> {
|
||||
pub fn new(
|
||||
path: impl Into<PathBuf>,
|
||||
deref: bool,
|
||||
tag: Span,
|
||||
long: bool,
|
||||
) -> Result<Self, ShellError> {
|
||||
let path = path.into();
|
||||
let m = if deref {
|
||||
std::fs::metadata(&path)
|
||||
|
@ -67,6 +74,7 @@ impl FileInfo {
|
|||
blocks: block_size,
|
||||
size: d.len(),
|
||||
tag,
|
||||
long,
|
||||
})
|
||||
}
|
||||
Err(e) => Err(e.into()),
|
||||
|
@ -92,6 +100,7 @@ impl DirInfo {
|
|||
blocks: 0,
|
||||
tag: params.tag,
|
||||
path,
|
||||
long: params.long,
|
||||
};
|
||||
|
||||
match std::fs::metadata(&s.path) {
|
||||
|
@ -154,13 +163,13 @@ impl DirInfo {
|
|||
.as_ref()
|
||||
.map_or(true, |x| !x.matches_path(&f));
|
||||
if include {
|
||||
match FileInfo::new(f, params.deref, self.tag) {
|
||||
match FileInfo::new(f, params.deref, self.tag, self.long) {
|
||||
Ok(file) => {
|
||||
let inc = params.min.map_or(true, |s| file.size >= s);
|
||||
if inc {
|
||||
self.size += file.size;
|
||||
self.blocks += file.blocks.unwrap_or(0);
|
||||
if params.all {
|
||||
if params.long {
|
||||
self.files.push(file);
|
||||
}
|
||||
}
|
||||
|
@ -197,16 +206,27 @@ impl From<DirInfo> for Value {
|
|||
// })
|
||||
// }
|
||||
|
||||
Value::record(
|
||||
record! {
|
||||
"path" => Value::string(d.path.display().to_string(), d.tag),
|
||||
"apparent" => Value::filesize(d.size as i64, d.tag),
|
||||
"physical" => Value::filesize(d.blocks as i64, d.tag),
|
||||
"directories" => value_from_vec(d.dirs, d.tag),
|
||||
"files" => value_from_vec(d.files, d.tag)
|
||||
},
|
||||
d.tag,
|
||||
)
|
||||
if d.long {
|
||||
Value::record(
|
||||
record! {
|
||||
"path" => Value::string(d.path.display().to_string(), d.tag),
|
||||
"apparent" => Value::filesize(d.size as i64, d.tag),
|
||||
"physical" => Value::filesize(d.blocks as i64, d.tag),
|
||||
"directories" => value_from_vec(d.dirs, d.tag),
|
||||
"files" => value_from_vec(d.files, d.tag)
|
||||
},
|
||||
d.tag,
|
||||
)
|
||||
} else {
|
||||
Value::record(
|
||||
record! {
|
||||
"path" => Value::string(d.path.display().to_string(), d.tag),
|
||||
"apparent" => Value::filesize(d.size as i64, d.tag),
|
||||
"physical" => Value::filesize(d.blocks as i64, d.tag),
|
||||
},
|
||||
d.tag,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -215,16 +235,27 @@ impl From<FileInfo> for Value {
|
|||
// cols.push("errors".into());
|
||||
// vals.push(Value::nothing(Span::unknown()));
|
||||
|
||||
Value::record(
|
||||
record! {
|
||||
"path" => Value::string(f.path.display().to_string(), f.tag),
|
||||
"apparent" => Value::filesize(f.size as i64, f.tag),
|
||||
"physical" => Value::filesize(f.blocks.unwrap_or(0) as i64, f.tag),
|
||||
"directories" => Value::nothing(Span::unknown()),
|
||||
"files" => Value::nothing(Span::unknown()),
|
||||
},
|
||||
f.tag,
|
||||
)
|
||||
if f.long {
|
||||
Value::record(
|
||||
record! {
|
||||
"path" => Value::string(f.path.display().to_string(), f.tag),
|
||||
"apparent" => Value::filesize(f.size as i64, f.tag),
|
||||
"physical" => Value::filesize(f.blocks.unwrap_or(0) as i64, f.tag),
|
||||
"directories" => Value::nothing(Span::unknown()),
|
||||
"files" => Value::nothing(Span::unknown()),
|
||||
},
|
||||
f.tag,
|
||||
)
|
||||
} else {
|
||||
Value::record(
|
||||
record! {
|
||||
"path" => Value::string(f.path.display().to_string(), f.tag),
|
||||
"apparent" => Value::filesize(f.size as i64, f.tag),
|
||||
"physical" => Value::filesize(f.blocks.unwrap_or(0) as i64, f.tag),
|
||||
},
|
||||
f.tag,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -125,6 +125,7 @@ impl Command for Table {
|
|||
let val = Value::list(supported_table_modes(), Span::test_data());
|
||||
return Ok(val.into_pipeline_data());
|
||||
}
|
||||
#[cfg(feature = "os")]
|
||||
let cwd = engine_state.cwd(Some(stack))?;
|
||||
let cfg = parse_table_config(call, engine_state, stack)?;
|
||||
let input = CmdInput::new(engine_state, stack, call, input);
|
||||
|
@ -135,7 +136,12 @@ impl Command for Table {
|
|||
let _ = nu_utils::enable_vt_processing();
|
||||
}
|
||||
|
||||
handle_table_command(input, cfg, cwd)
|
||||
handle_table_command(
|
||||
input,
|
||||
cfg,
|
||||
#[cfg(feature = "os")]
|
||||
cwd,
|
||||
)
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
|
@ -367,7 +373,7 @@ impl<'a> CmdInput<'a> {
|
|||
fn handle_table_command(
|
||||
mut input: CmdInput<'_>,
|
||||
cfg: TableConfig,
|
||||
cwd: nu_path::PathBuf<Absolute>,
|
||||
#[cfg(feature = "os")] cwd: nu_path::PathBuf<Absolute>,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let span = input.data.span().unwrap_or(input.call.head);
|
||||
match input.data {
|
||||
|
@ -390,11 +396,25 @@ fn handle_table_command(
|
|||
let stream = ListStream::new(vals.into_iter(), span, signals);
|
||||
input.data = PipelineData::Empty;
|
||||
|
||||
handle_row_stream(input, cfg, stream, metadata, cwd)
|
||||
handle_row_stream(
|
||||
input,
|
||||
cfg,
|
||||
stream,
|
||||
metadata,
|
||||
#[cfg(feature = "os")]
|
||||
cwd,
|
||||
)
|
||||
}
|
||||
PipelineData::ListStream(stream, metadata) => {
|
||||
input.data = PipelineData::Empty;
|
||||
handle_row_stream(input, cfg, stream, metadata, cwd)
|
||||
handle_row_stream(
|
||||
input,
|
||||
cfg,
|
||||
stream,
|
||||
metadata,
|
||||
#[cfg(feature = "os")]
|
||||
cwd,
|
||||
)
|
||||
}
|
||||
PipelineData::Value(Value::Record { val, .. }, ..) => {
|
||||
input.data = PipelineData::Empty;
|
||||
|
@ -414,7 +434,14 @@ fn handle_table_command(
|
|||
let stream =
|
||||
ListStream::new(val.into_range_iter(span, Signals::empty()), span, signals);
|
||||
input.data = PipelineData::Empty;
|
||||
handle_row_stream(input, cfg, stream, metadata, cwd)
|
||||
handle_row_stream(
|
||||
input,
|
||||
cfg,
|
||||
stream,
|
||||
metadata,
|
||||
#[cfg(feature = "os")]
|
||||
cwd,
|
||||
)
|
||||
}
|
||||
x => Ok(x),
|
||||
}
|
||||
|
@ -606,7 +633,7 @@ fn handle_row_stream(
|
|||
cfg: TableConfig,
|
||||
stream: ListStream,
|
||||
metadata: Option<PipelineMetadata>,
|
||||
cwd: nu_path::PathBuf<Absolute>,
|
||||
#[cfg(feature = "os")] cwd: nu_path::PathBuf<Absolute>,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let stream = match metadata.as_ref() {
|
||||
// First, `ls` sources:
|
||||
|
@ -636,9 +663,14 @@ fn handle_row_stream(
|
|||
if let Some(value) = record.to_mut().get_mut("name") {
|
||||
let span = value.span();
|
||||
if let Value::String { val, .. } = value {
|
||||
if let Some(val) =
|
||||
render_path_name(val, &config, &ls_colors, cwd.clone(), span)
|
||||
{
|
||||
if let Some(val) = render_path_name(
|
||||
val,
|
||||
&config,
|
||||
&ls_colors,
|
||||
#[cfg(feature = "os")]
|
||||
cwd.clone(),
|
||||
span,
|
||||
) {
|
||||
*value = val;
|
||||
}
|
||||
}
|
||||
|
@ -1031,14 +1063,18 @@ fn render_path_name(
|
|||
path: &str,
|
||||
config: &Config,
|
||||
ls_colors: &LsColors,
|
||||
cwd: nu_path::PathBuf<Absolute>,
|
||||
#[cfg(feature = "os")] cwd: nu_path::PathBuf<Absolute>,
|
||||
span: Span,
|
||||
) -> Option<Value> {
|
||||
if !config.ls.use_ls_colors {
|
||||
return None;
|
||||
}
|
||||
|
||||
#[cfg(feature = "os")]
|
||||
let fullpath = cwd.join(path);
|
||||
#[cfg(not(feature = "os"))]
|
||||
let fullpath = path;
|
||||
|
||||
let stripped_path = nu_utils::strip_ansi_unlikely(path);
|
||||
let metadata = std::fs::symlink_metadata(fullpath);
|
||||
let has_metadata = metadata.is_ok();
|
||||
|
|
|
@ -2,7 +2,7 @@ use nu_test_support::nu;
|
|||
|
||||
#[test]
|
||||
fn timeit_show_stdout() {
|
||||
let actual = nu!("let t = timeit nu --testbin cococo abcdefg");
|
||||
let actual = nu!("let t = timeit { nu --testbin cococo abcdefg }");
|
||||
assert_eq!(actual.out, "abcdefg");
|
||||
}
|
||||
|
||||
|
|
|
@ -100,3 +100,17 @@ fn du_with_multiple_path() {
|
|||
let actual = nu!(cwd: "tests/fixtures", "du ...[] | length");
|
||||
assert_eq!(actual.out, "0");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_du_output_columns() {
|
||||
let actual = nu!(
|
||||
cwd: "tests/fixtures/formats",
|
||||
"du -m 1 | columns | str join ','"
|
||||
);
|
||||
assert_eq!(actual.out, "path,apparent,physical");
|
||||
let actual = nu!(
|
||||
cwd: "tests/fixtures/formats",
|
||||
"du -m 1 -l | columns | str join ','"
|
||||
);
|
||||
assert_eq!(actual.out, "path,apparent,physical,directories,files");
|
||||
}
|
||||
|
|
|
@ -140,6 +140,15 @@ fn match_constant_7() {
|
|||
assert_eq!(actual.out, "success");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn match_constant_8() {
|
||||
let actual =
|
||||
nu!(r#"match "foo" { r#'foo'# => { print "success" }, _ => { print "failure" } }"#);
|
||||
// Make sure we don't see any of these values in the output
|
||||
// As we do not auto-print loops anymore
|
||||
assert_eq!(actual.out, "success");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn match_null() {
|
||||
let actual = nu!(r#"match null { null => { print "success"}, _ => { print "failure" }}"#);
|
||||
|
|
|
@ -88,6 +88,29 @@ impl ClosureEval {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn new_preserve_out_dest(
|
||||
engine_state: &EngineState,
|
||||
stack: &Stack,
|
||||
closure: Closure,
|
||||
) -> Self {
|
||||
let engine_state = engine_state.clone();
|
||||
let stack = stack.captures_to_stack_preserve_out_dest(closure.captures);
|
||||
let block = engine_state.get_block(closure.block_id).clone();
|
||||
let env_vars = stack.env_vars.clone();
|
||||
let env_hidden = stack.env_hidden.clone();
|
||||
let eval = get_eval_block_with_early_return(&engine_state);
|
||||
|
||||
Self {
|
||||
engine_state,
|
||||
stack,
|
||||
block,
|
||||
arg_index: 0,
|
||||
env_vars,
|
||||
env_hidden,
|
||||
eval,
|
||||
}
|
||||
}
|
||||
|
||||
/// Sets whether to enable debugging when evaluating the closure.
|
||||
///
|
||||
/// By default, this is controlled by the [`EngineState`] used to create this [`ClosureEval`].
|
||||
|
@ -189,6 +212,22 @@ impl<'a> ClosureEvalOnce<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn new_preserve_out_dest(
|
||||
engine_state: &'a EngineState,
|
||||
stack: &Stack,
|
||||
closure: Closure,
|
||||
) -> Self {
|
||||
let block = engine_state.get_block(closure.block_id);
|
||||
let eval = get_eval_block_with_early_return(engine_state);
|
||||
Self {
|
||||
engine_state,
|
||||
stack: stack.captures_to_stack_preserve_out_dest(closure.captures),
|
||||
block,
|
||||
arg_index: 0,
|
||||
eval,
|
||||
}
|
||||
}
|
||||
|
||||
/// Sets whether to enable debugging when evaluating the closure.
|
||||
///
|
||||
/// By default, this is controlled by the [`EngineState`] used to create this [`ClosureEvalOnce`].
|
||||
|
|
|
@ -20,12 +20,11 @@ pub(crate) fn compile_call(
|
|||
|
||||
// Check if this call has --help - if so, just redirect to `help`
|
||||
if call.named_iter().any(|(name, _, _)| name.item == "help") {
|
||||
return compile_help(
|
||||
working_set,
|
||||
builder,
|
||||
decl.name().into_spanned(call.head),
|
||||
io_reg,
|
||||
);
|
||||
let name = working_set
|
||||
.find_decl_name(call.decl_id) // check for name in scope
|
||||
.and_then(|name| std::str::from_utf8(name).ok())
|
||||
.unwrap_or(decl.name()); // fall back to decl's name
|
||||
return compile_help(working_set, builder, name.into_spanned(call.head), io_reg);
|
||||
}
|
||||
|
||||
// Try to figure out if this is a keyword call like `if`, and handle those specially
|
||||
|
|
|
@ -33,14 +33,20 @@ pub fn convert_env_values(engine_state: &mut EngineState, stack: &Stack) -> Resu
|
|||
let env_vars = engine_state.render_env_vars();
|
||||
|
||||
for (name, val) in env_vars {
|
||||
match get_converted_value(engine_state, stack, name, val, "from_string") {
|
||||
ConversionResult::Ok(v) => {
|
||||
let _ = new_scope.insert(name.to_string(), v);
|
||||
}
|
||||
ConversionResult::ConversionError(e) => error = error.or(Some(e)),
|
||||
ConversionResult::CellPathError => {
|
||||
let _ = new_scope.insert(name.to_string(), val.clone());
|
||||
if let Value::String { .. } = val {
|
||||
// Only run from_string on string values
|
||||
match get_converted_value(engine_state, stack, name, val, "from_string") {
|
||||
ConversionResult::Ok(v) => {
|
||||
let _ = new_scope.insert(name.to_string(), v);
|
||||
}
|
||||
ConversionResult::ConversionError(e) => error = error.or(Some(e)),
|
||||
ConversionResult::CellPathError => {
|
||||
let _ = new_scope.insert(name.to_string(), val.clone());
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Skip values that are already converted (not a string)
|
||||
let _ = new_scope.insert(name.to_string(), val.clone());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -56,11 +56,12 @@ impl<'e, 's> ScopeData<'e, 's> {
|
|||
let var_type = Value::string(var.ty.to_string(), span);
|
||||
let is_const = Value::bool(var.const_val.is_some(), span);
|
||||
|
||||
let var_value = if let Ok(val) = self.stack.get_var(**var_id, span) {
|
||||
val
|
||||
} else {
|
||||
Value::nothing(span)
|
||||
};
|
||||
let var_value = self
|
||||
.stack
|
||||
.get_var(**var_id, span)
|
||||
.ok()
|
||||
.or(var.const_val.clone())
|
||||
.unwrap_or(Value::nothing(span));
|
||||
|
||||
let var_id_val = Value::int(var_id.get() as i64, span);
|
||||
|
||||
|
|
|
@ -51,7 +51,7 @@ impl BlockKind {
|
|||
}
|
||||
|
||||
// A baseline token is terminated if it's not nested inside of a paired
|
||||
// delimiter and the next character is one of: `|`, `;`, `#` or any
|
||||
// delimiter and the next character is one of: `|`, `;` or any
|
||||
// whitespace.
|
||||
fn is_item_terminator(
|
||||
block_level: &[BlockKind],
|
||||
|
@ -115,6 +115,7 @@ pub fn lex_item(
|
|||
// character (whitespace, `|`, `;` or `#`) is encountered, the baseline
|
||||
// token is done.
|
||||
// - Otherwise, accumulate the character into the current baseline token.
|
||||
let mut previous_char = None;
|
||||
while let Some(c) = input.get(*curr_offset) {
|
||||
let c = *c;
|
||||
|
||||
|
@ -147,11 +148,9 @@ pub fn lex_item(
|
|||
// Also need to check to make sure we aren't escaped
|
||||
quote_start = None;
|
||||
}
|
||||
} else if c == b'#' {
|
||||
if is_item_terminator(&block_level, c, additional_whitespace, special_tokens) {
|
||||
break;
|
||||
}
|
||||
in_comment = true;
|
||||
} else if c == b'#' && !in_comment {
|
||||
// To start a comment, It either need to be the first character of the token or prefixed with space.
|
||||
in_comment = previous_char.map(|pc| pc == b' ').unwrap_or(true);
|
||||
} else if c == b'\n' || c == b'\r' {
|
||||
in_comment = false;
|
||||
if is_item_terminator(&block_level, c, additional_whitespace, special_tokens) {
|
||||
|
@ -254,6 +253,7 @@ pub fn lex_item(
|
|||
}
|
||||
|
||||
*curr_offset += 1;
|
||||
previous_char = Some(c);
|
||||
}
|
||||
|
||||
let span = Span::new(span_offset + token_start, span_offset + *curr_offset);
|
||||
|
|
|
@ -1532,7 +1532,9 @@ pub fn parse_int(working_set: &mut StateWorkingSet, span: Span) -> Expression {
|
|||
span: Span,
|
||||
radix: u32,
|
||||
) -> Expression {
|
||||
if let Ok(num) = i64::from_str_radix(token, radix) {
|
||||
// Parse as a u64, then cast to i64, otherwise, for numbers like "0xffffffffffffffef",
|
||||
// you'll get `Error parsing hex string: number too large to fit in target type`.
|
||||
if let Ok(num) = u64::from_str_radix(token, radix).map(|val| val as i64) {
|
||||
Expression::new(working_set, Expr::Int(num), span, Type::Int)
|
||||
} else {
|
||||
working_set.error(ParseError::InvalidLiteral(
|
||||
|
|
|
@ -159,6 +159,29 @@ fn lex_comment() {
|
|||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn lex_not_comment_needs_space_in_front_of_hashtag() {
|
||||
let file = b"1..10 | each {echo test#testing }";
|
||||
|
||||
let output = lex(file, 0, &[], &[], false);
|
||||
|
||||
assert!(output.1.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn lex_comment_with_space_in_front_of_hashtag() {
|
||||
let file = b"1..10 | each {echo test #testing }";
|
||||
|
||||
let output = lex(file, 0, &[], &[], false);
|
||||
|
||||
assert!(output.1.is_some());
|
||||
assert!(matches!(
|
||||
output.1.unwrap(),
|
||||
ParseError::UnexpectedEof(missing_token, span) if missing_token == "}"
|
||||
&& span == Span::new(33, 34)
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn lex_is_incomplete() {
|
||||
let file = b"let x = 300 | ;";
|
||||
|
|
|
@ -65,6 +65,8 @@ impl<'a> ConfigErrors<'a> {
|
|||
});
|
||||
}
|
||||
|
||||
// We'll probably need this again in the future so allow dead code for now
|
||||
#[allow(dead_code)]
|
||||
pub fn deprecated_option(&mut self, path: &ConfigPath, suggestion: &'static str, span: Span) {
|
||||
self.error(ConfigError::Deprecated {
|
||||
path: path.to_string(),
|
||||
|
|
|
@ -156,10 +156,6 @@ impl UpdateFromValue for Config {
|
|||
"filesize" => self.filesize.update(val, path, errors),
|
||||
"explore" => self.explore.update(val, path, errors),
|
||||
"color_config" => self.color_config.update(val, path, errors),
|
||||
"use_grid_icons" => {
|
||||
// TODO: delete it after 0.99
|
||||
errors.deprecated_option(path, "use `grid -i`", val.span());
|
||||
}
|
||||
"footer_mode" => self.footer_mode.update(val, path, errors),
|
||||
"float_precision" => self.float_precision.update(val, path, errors),
|
||||
"use_ansi_coloring" => self.use_ansi_coloring.update(val, path, errors),
|
||||
|
|
|
@ -128,7 +128,7 @@ impl Matcher for Pattern {
|
|||
false
|
||||
}
|
||||
}
|
||||
Expr::String(x) => {
|
||||
Expr::String(x) | Expr::RawString(x) => {
|
||||
if let Value::String { val, .. } = &value {
|
||||
x == val
|
||||
} else {
|
||||
|
|
|
@ -459,21 +459,48 @@ impl<'a> StateWorkingSet<'a> {
|
|||
}
|
||||
|
||||
// check overlay in perma
|
||||
for overlay_frame in self
|
||||
.permanent_state
|
||||
.active_overlays(&removed_overlays)
|
||||
.rev()
|
||||
{
|
||||
visibility.append(&overlay_frame.visibility);
|
||||
self.permanent_state.find_decl(name, &removed_overlays)
|
||||
}
|
||||
|
||||
pub fn find_decl_name(&self, decl_id: DeclId) -> Option<&[u8]> {
|
||||
let mut removed_overlays = vec![];
|
||||
|
||||
let mut visibility: Visibility = Visibility::new();
|
||||
|
||||
for scope_frame in self.delta.scope.iter().rev() {
|
||||
if self.search_predecls {
|
||||
for (name, id) in scope_frame.predecls.iter() {
|
||||
if id == &decl_id {
|
||||
return Some(name);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// check overlay in delta
|
||||
for overlay_frame in scope_frame.active_overlays(&mut removed_overlays).rev() {
|
||||
visibility.append(&overlay_frame.visibility);
|
||||
|
||||
if self.search_predecls {
|
||||
for (name, id) in overlay_frame.predecls.iter() {
|
||||
if id == &decl_id {
|
||||
return Some(name);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(decl_id) = overlay_frame.get_decl(name) {
|
||||
if visibility.is_decl_id_visible(&decl_id) {
|
||||
return Some(decl_id);
|
||||
for (name, id) in overlay_frame.decls.iter() {
|
||||
if id == &decl_id {
|
||||
return Some(name);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
// check overlay in perma
|
||||
self.permanent_state
|
||||
.find_decl_name(decl_id, &removed_overlays)
|
||||
}
|
||||
|
||||
pub fn find_module(&self, name: &[u8]) -> Option<ModuleId> {
|
||||
|
|
|
@ -101,8 +101,9 @@ export def intersperse [ # -> list<any>
|
|||
# Returns a list of intermediate steps performed by `reduce`
|
||||
# (`fold`). It takes two arguments, an initial value to seed the
|
||||
# initial state and a closure that takes two arguments, the first
|
||||
# being the internal state and the second the list element in the
|
||||
# current iteration.
|
||||
# being the list element in the current iteration and the second
|
||||
# the internal state.
|
||||
# The internal state is also provided as pipeline input.
|
||||
#
|
||||
# # Example
|
||||
# ```
|
||||
|
@ -123,7 +124,8 @@ export def scan [ # -> list<any>
|
|||
--noinit(-n) # remove the initial value from the result
|
||||
] {
|
||||
reduce --fold [$init] {|it, acc|
|
||||
$acc ++ [(do $fn ($acc | last) $it)]
|
||||
let acc_last = $acc | last
|
||||
$acc ++ [($acc_last | do $fn $it $acc_last)]
|
||||
}
|
||||
| if $noinit {
|
||||
$in | skip
|
||||
|
|
|
@ -49,7 +49,10 @@ def iter_scan [] {
|
|||
let scanned = ([1 2 3] | iter scan 0 {|x, y| $x + $y})
|
||||
assert equal $scanned [0, 1, 3, 6]
|
||||
|
||||
let scanned = ([a b c d] | iter scan "" {|x, y| [$x, $y] | str join} -n)
|
||||
let scanned = ([a b c d] | iter scan "" {|it, acc| [$acc, $it] | str join} -n)
|
||||
assert equal $scanned ["a" "ab" "abc" "abcd"]
|
||||
|
||||
let scanned = ([a b c d] | iter scan "" {|it, acc| append $it | str join} -n)
|
||||
assert equal $scanned ["a" "ab" "abc" "abcd"]
|
||||
}
|
||||
|
||||
|
|
|
@ -17,11 +17,6 @@ $env.PROMPT_COMMAND = $env.PROMPT_COMMAND? | default {||
|
|||
$path_segment | str replace --all (char path_sep) $"($separator_color)(char path_sep)($path_color)"
|
||||
}
|
||||
|
||||
$env.PROMPT_INDICATOR = $env.PROMPT_INDICATOR? | default "> "
|
||||
$env.PROMPT_INDICATOR_VI_NORMAL = $env.PROMPT_INDICATOR_VI_NORMAL? | default "> "
|
||||
$env.PROMPT_INDICATOR_VI_INSERT = $env.PROMPT_INDICATOR_VI_INSERT? | default ": "
|
||||
$env.PROMPT_MULTILINE_INDICATOR = $env.PROMPT_MULTILINE_INDICATOR? | default "::: "
|
||||
|
||||
$env.PROMPT_COMMAND_RIGHT = $env.PROMPT_COMMAND_RIGHT? | default {||
|
||||
# create a right prompt in magenta with green separators and am/pm underlined
|
||||
let time_segment = ([
|
||||
|
@ -39,19 +34,3 @@ $env.PROMPT_COMMAND_RIGHT = $env.PROMPT_COMMAND_RIGHT? | default {||
|
|||
|
||||
([$last_exit_code, (char space), $time_segment] | str join)
|
||||
}
|
||||
|
||||
$env.ENV_CONVERSIONS = {
|
||||
"PATH": {
|
||||
from_string: { |s| $s | split row (char esep) | path expand --no-symlink }
|
||||
to_string: { |v| $v | path expand --no-symlink | str join (char esep) }
|
||||
}
|
||||
}
|
||||
|
||||
$env.NU_LIB_DIRS = $env.NU_LIB_DIRS? | default [
|
||||
($nu.default-config-dir | path join 'scripts') # add <nushell-config-dir>/scripts
|
||||
($nu.data-dir | path join 'completions') # default home for nushell completions
|
||||
]
|
||||
|
||||
$env.NU_PLUGIN_DIRS = $env.NU_PLUGIN_DIRS | default [
|
||||
($nu.default-config-dir | path join 'plugins') # add <nushell-config-dir>/plugins
|
||||
]
|
||||
|
|
|
@ -785,3 +785,145 @@ $env.config.explore = {
|
|||
},
|
||||
selected_cell: { bg: light_blue },
|
||||
}
|
||||
|
||||
# ---------------------------------------------------------------------------------------
|
||||
# Environment Variables
|
||||
# ---------------------------------------------------------------------------------------
|
||||
|
||||
# In addition to the $env.config record, a number of other environment variables
|
||||
# also affect Nushell's behavior:
|
||||
|
||||
# PROMPT_*
|
||||
# --------
|
||||
# Prompt configuration
|
||||
# PROMPT_ variables accept either a string or a closure that returns a string
|
||||
|
||||
# PROMPT_COMMAND
|
||||
# --------------
|
||||
# Defines the primary prompt. Note that the PROMPT_INDICATOR (below) is appended to this value.
|
||||
# Simple example - Static string:
|
||||
$env.PROMPT_COMMAND = "Nushell"
|
||||
# Simple example - Dynamic closure displaying the path:
|
||||
$env.PROMPT_COMMAND = {|| pwd}
|
||||
|
||||
# PROMPT_COMMAND_RIGHT
|
||||
# --------------------
|
||||
# Defines a prompt which will appear right-aligned in the terminal
|
||||
$env.PROMPT_COMMAND_RIGHT = {|| date now | format date "%d-%a %r" }
|
||||
|
||||
# PROMPT_INDICATOR*
|
||||
# -----------------
|
||||
# The prompt indicators are environmental variables that represent
|
||||
# the state of the prompt. The specified character(s) will appear
|
||||
# immediately following the PROMPT_COMMAND
|
||||
|
||||
# When in Emacs mode (default):
|
||||
$env.PROMPT_INDICATOR = "> "
|
||||
|
||||
# When in normal vi mode:
|
||||
$env.PROMPT_INDICATOR_VI_NORMAL = "> "
|
||||
# When in vi insert-mode:
|
||||
$env.PROMPT_INDICATOR_VI_INSERT = ": "
|
||||
|
||||
# When a commandline extends across multiple lines:
|
||||
$env.PROMPT_MULTILINE_INDICATOR = "::: "
|
||||
|
||||
# TRANSIENT_PROMPT_*
|
||||
# ------------------
|
||||
# Allows a different prompt to be shown after a command has been executed. This
|
||||
# can be useful if you have a 2-line prompt. Instead of each previously-entered
|
||||
# command taking up at least 2 lines, the transient prompt can condense it to a
|
||||
# shorter version. The following example shows a rocket emoji before each
|
||||
# previously-entered command:
|
||||
$env.TRANSIENT_PROMPT_COMMAND = "🚀 "
|
||||
$env.TRANSIENT_PROMPT_INDICATOR = ""
|
||||
$env.TRANSIENT_PROMPT_INDICATOR_VI_INSERT = ""
|
||||
$env.TRANSIENT_PROMPT_INDICATOR_VI_NORMAL = ""
|
||||
# Tip: Removing the transient multiline indicator and right-prompt can simplify
|
||||
# copying from the terminal
|
||||
$env.TRANSIENT_PROMPT_MULTILINE_INDICATOR = ""
|
||||
$env.TRANSIENT_PROMPT_COMMAND_RIGHT = ""
|
||||
|
||||
# ENV_CONVERSIONS
|
||||
# ---------------
|
||||
# Certain variables, such as those containing multiple paths, are often stored as a
|
||||
# colon-separated string in other shells. Nushell can convert these automatically to a
|
||||
# more convenient Nushell list. The ENV_CONVERSIONS variable specifies how environment
|
||||
# variables are:
|
||||
# - converted from a string to a value on Nushell startup (from_string)
|
||||
# - converted from a value back to a string when running external commands (to_string)
|
||||
#
|
||||
# Note: The OS Path variable is automatically converted before env.nu loads, so it can
|
||||
# be treated a list in this file.
|
||||
#
|
||||
# Note: Environment variables are not case-sensitive, so the following will work
|
||||
# for both Windows and Unix-like platforms.
|
||||
#
|
||||
# By default, the internal conversion looks something like the following, so there
|
||||
# is no need to add this in your actual env.nu:
|
||||
$env.ENV_CONVERSIONS = {
|
||||
"Path": {
|
||||
from_string: { |s| $s | split row (char esep) | path expand --no-symlink }
|
||||
to_string: { |v| $v | path expand --no-symlink | str join (char esep) }
|
||||
}
|
||||
}
|
||||
|
||||
# Here's an example converts the XDG_DATA_DIRS variable to and from a list:
|
||||
$env.ENV_CONVERSIONS = $env.ENV_CONVERSIONS | merge {
|
||||
"XDG_DATA_DIRS": {
|
||||
from_string: { |s| $s | split row (char esep) | path expand --no-symlink }
|
||||
to_string: { |v| $v | path expand --no-symlink | str join (char esep) }
|
||||
}
|
||||
}
|
||||
#
|
||||
# Other common directory-lists for conversion: TERMINFO_DIRS.
|
||||
# Note that other variable conversions take place after `config.nu` is loaded.
|
||||
|
||||
# NU_LIB_DIRS
|
||||
# -----------
|
||||
# Directories in this constant are searched by the
|
||||
# `use` and `source` commands.
|
||||
#
|
||||
# By default, the `scripts` subdirectory of the default configuration
|
||||
# directory is included:
|
||||
const NU_LIB_DIRS = [
|
||||
($nu.default-config-dir | path join 'scripts') # add <nushell-config-dir>/scripts
|
||||
($nu.data-dir | path join 'completions') # default home for nushell completions
|
||||
]
|
||||
# You can replace (override) or append to this list by shadowing the constant
|
||||
const NU_LIB_DIRS = $NU_LIB_DIRS ++ [($nu.default-config-dir | path join 'modules')]
|
||||
|
||||
# An environment variable version of this also exists. It is searched after the constant.
|
||||
$env.NU_LIB_DIRS ++= [ ($nu.data-dir | path join "nu_scripts") ]
|
||||
|
||||
# NU_PLUGIN_DIRS
|
||||
# --------------
|
||||
# Directories to search for plugin binaries when calling add.
|
||||
|
||||
# By default, the `plugins` subdirectory of the default configuration
|
||||
# directory is included:
|
||||
const NU_PLUGIN_DIRS = [
|
||||
($nu.default-config-dir | path join 'plugins') # add <nushell-config-dir>/plugins
|
||||
]
|
||||
# You can replace (override) or append to this list by shadowing the constant
|
||||
const NU_PLUGIN_DIRS = $NU_PLUGIN_DIRS ++ [($nu.default-config-dir | path join 'plugins')]
|
||||
|
||||
# As with NU_LIB_DIRS, an $env.NU_PLUGIN_DIRS is searched after the constant version
|
||||
|
||||
# Appending to the OS path is a common configuration task.
|
||||
# Because of the previous ENV_CONVERSIONS (performed internally
|
||||
# before your config.nu loads), the path variable is a list that can
|
||||
# be appended to using, for example:
|
||||
$env.path ++= "~/.local/bin"
|
||||
|
||||
# Or prepend using
|
||||
$env.path = "~/.local/bin" ++ $env.path
|
||||
|
||||
# The `path add` function from the Standard Library also provides
|
||||
# a convenience method for prepending to the path:
|
||||
use std/util "path add"
|
||||
path add "~/.local/bin"
|
||||
path add ($env.CARGO_HOME | path join "bin")
|
||||
|
||||
# You can remove duplicate directories from the path using:
|
||||
$env.PATH = ($env.PATH | uniq)
|
||||
|
|
|
@ -1,140 +1,9 @@
|
|||
# Sample Nushell Environment Config File
|
||||
#
|
||||
# Environment variables are usually configured in `env.nu`. Nushell
|
||||
# sets sensible defaults for many environment variables, so the user's
|
||||
# `env.nu` only needs to override these defaults if desired.
|
||||
#
|
||||
# This file serves as simple "in-shell" documentation for these
|
||||
# settings, or you can view a more complete discussion online at:
|
||||
# https://nushell.sh/book/configuration
|
||||
#
|
||||
# You can pretty-print and page this file using:
|
||||
# config env --sample | nu-highlight | less -R
|
||||
# Previously, environment variables were typically configured in `env.nu`.
|
||||
# In general, most configuration can and should be performed in `config.nu`
|
||||
# or one of the autoload directories.
|
||||
|
||||
# PROMPT_*
|
||||
# --------
|
||||
# Prompt configuration
|
||||
# PROMPT_ variables accept either a string or a closure that returns a string
|
||||
|
||||
# PROMPT_COMMAND
|
||||
# --------------
|
||||
# Defines the primary prompt. Note that the PROMPT_INDICATOR (below) is appended to this value.
|
||||
# Simple example - Static string:
|
||||
$env.PROMPT_COMMAND = "Nushell"
|
||||
# Simple example - Dynamic closure displaying the path:
|
||||
$env.PROMPT_COMMAND = {|| pwd}
|
||||
|
||||
# PROMPT_COMMAND_RIGHT
|
||||
# --------------------
|
||||
# Defines a prompt which will appear right-aligned in the terminal
|
||||
$env.PROMPT_COMMAND_RIGHT = {|| date now | format date "%d-%a %r" }
|
||||
|
||||
# PROMPT_INDICATOR*
|
||||
# -----------------
|
||||
# The prompt indicators are environmental variables that represent
|
||||
# the state of the prompt. The specified character(s) will appear
|
||||
# immediately following the PROMPT_COMMAND
|
||||
|
||||
# When in Emacs mode (default):
|
||||
$env.PROMPT_INDICATOR = "> "
|
||||
|
||||
# When in normal vi mode:
|
||||
$env.PROMPT_INDICATOR_VI_NORMAL = "> "
|
||||
# When in vi insert-mode:
|
||||
$env.PROMPT_INDICATOR_VI_INSERT = ": "
|
||||
|
||||
# When a commandline extends across multiple lines:
|
||||
$env.PROMPT_MULTILINE_INDICATOR = "::: "
|
||||
|
||||
# TRANSIENT_PROMPT_*
|
||||
# ------------------
|
||||
# Allows a different prompt to be shown after a command has been executed. This
|
||||
# can be useful if you have a 2-line prompt. Instead of each previously-entered
|
||||
# command taking up at least 2 lines, the transient prompt can condense it to a
|
||||
# shorter version. The following example shows a rocket emoji before each
|
||||
# previously-entered command:
|
||||
$env.TRANSIENT_PROMPT_COMMAND = "🚀 "
|
||||
$env.TRANSIENT_PROMPT_INDICATOR = ""
|
||||
$env.TRANSIENT_PROMPT_INDICATOR_VI_INSERT = ""
|
||||
$env.TRANSIENT_PROMPT_INDICATOR_VI_NORMAL = ""
|
||||
# Tip: Removing the transient multiline indicator and right-prompt can simplify
|
||||
# copying from the terminal
|
||||
$env.TRANSIENT_PROMPT_MULTILINE_INDICATOR = ""
|
||||
$env.TRANSIENT_PROMPT_COMMAND_RIGHT = ""
|
||||
|
||||
# ENV_CONVERSIONS
|
||||
# ---------------
|
||||
# Certain variables, such as those containing multiple paths, are often stored as a
|
||||
# colon-separated string in other shells. Nushell can convert these automatically to a
|
||||
# more convenient Nushell list. The ENV_CONVERSIONS variable specifies how environment
|
||||
# variables are:
|
||||
# - converted from a string to a value on Nushell startup (from_string)
|
||||
# - converted from a value back to a string when running external commands (to_string)
|
||||
#
|
||||
# Note: The OS Path variable is automatically converted before env.nu loads, so it can
|
||||
# be treated a list in this file.
|
||||
#
|
||||
# Note: Environment variables are not case-sensitive, so the following will work
|
||||
# for both Windows and Unix-like platforms.
|
||||
#
|
||||
# By default, the internal conversion looks something like the following, so there
|
||||
# is no need to add this in your actual env.nu:
|
||||
$env.ENV_CONVERSIONS = {
|
||||
"Path": {
|
||||
from_string: { |s| $s | split row (char esep) | path expand --no-symlink }
|
||||
to_string: { |v| $v | path expand --no-symlink | str join (char esep) }
|
||||
}
|
||||
}
|
||||
|
||||
# Here's an example converts the XDG_DATA_DIRS variable to and from a list:
|
||||
$env.ENV_CONVERSIONS = $env.ENV_CONVERSIONS | merge {
|
||||
"XDG_DATA_DIRS": {
|
||||
from_string: { |s| $s | split row (char esep) | path expand --no-symlink }
|
||||
to_string: { |v| $v | path expand --no-symlink | str join (char esep) }
|
||||
}
|
||||
}
|
||||
#
|
||||
# Other common directory-lists for conversion: TERMINFO_DIRS.
|
||||
# Note that other variable conversions take place after `config.nu` is loaded.
|
||||
|
||||
# NU_LIB_DIRS
|
||||
# -----------
|
||||
# Directories in this environment variable are searched by the
|
||||
# `use` and `source` commands.
|
||||
#
|
||||
# By default, the `scripts` subdirectory of the default configuration
|
||||
# directory is included:
|
||||
$env.NU_LIB_DIRS = [
|
||||
($nu.default-config-dir | path join 'scripts') # add <nushell-config-dir>/scripts
|
||||
($nu.data-dir | path join 'completions') # default home for nushell completions
|
||||
]
|
||||
# You can replace (override) or append to this list:
|
||||
$env.NU_LIB_DIRS ++= ($nu.default-config-dir | path join 'modules')
|
||||
|
||||
# NU_PLUGIN_DIRS
|
||||
# --------------
|
||||
# Directories to search for plugin binaries when calling register.
|
||||
|
||||
# By default, the `plugins` subdirectory of the default configuration
|
||||
# directory is included:
|
||||
$env.NU_PLUGIN_DIRS = [
|
||||
($nu.default-config-dir | path join 'plugins') # add <nushell-config-dir>/plugins
|
||||
]
|
||||
|
||||
# Appending to the OS path is a common configuration task.
|
||||
# Because of the previous ENV_CONVERSIONS (performed internally
|
||||
# before your env.nu loads), the path variable is a list that can
|
||||
# be appended to using, for example:
|
||||
$env.path ++= "~/.local/bin"
|
||||
|
||||
# Or prepend using
|
||||
$env.path = "~/.local/bin" ++ $env.path
|
||||
|
||||
# The `path add` function from the Standard Library also provides
|
||||
# a convenience method for prepending to the path:
|
||||
use std/util "path add"
|
||||
path add "~/.local/bin"
|
||||
path add ($env.CARGO_HOME | path join "bin")
|
||||
|
||||
# You can remove duplicate directories from the path using:
|
||||
$env.PATH = ($env.PATH | uniq)
|
||||
# To pretty-print the in-shell documentation for Nushell's various configuration
|
||||
# settings, you can run:
|
||||
config nu --sample | nu-highlight | less -R
|
|
@ -1,17 +1,18 @@
|
|||
# env.nu
|
||||
#
|
||||
# This file is typically used to add or override environment variables.
|
||||
# Previously, environment variables were typically configured in `env.nu`.
|
||||
# In general, most configuration can and should be performed in `config.nu`
|
||||
# or one of the autoload directories.
|
||||
#
|
||||
# This file is generated for backwards compatibility for now.
|
||||
# It is loaded before config.nu and login.nu
|
||||
#
|
||||
# See https://www.nushell.sh/book/configuration.html
|
||||
#
|
||||
# This file is loaded before config.nu and login.nu
|
||||
# To pretty-print a sample of the configuration settings, run:
|
||||
# config nu --sample | nu-highlight | less -R
|
||||
#
|
||||
# You can open this file in your default editor using:
|
||||
# config env
|
||||
#
|
||||
# To pretty-print a sample env.nu with documentation, run:
|
||||
# config env --sample | nu-highlight | less -R
|
||||
#
|
||||
# To pretty-print the default environment values, run:
|
||||
# To pretty-print the default env.nu, run:
|
||||
# config env --default | nu-highlight | less -R
|
||||
#
|
||||
# You can remove these comments if you want or leave
|
||||
|
|
|
@ -27,8 +27,8 @@ impl PluginCommand for LazyJoin {
|
|||
fn signature(&self) -> Signature {
|
||||
Signature::build(self.name())
|
||||
.required("other", SyntaxShape::Any, "LazyFrame to join with")
|
||||
.required("left_on", SyntaxShape::Any, "Left column(s) to join on")
|
||||
.required("right_on", SyntaxShape::Any, "Right column(s) to join on")
|
||||
.optional("left_on", SyntaxShape::Any, "Left column(s) to join on")
|
||||
.optional("right_on", SyntaxShape::Any, "Right column(s) to join on")
|
||||
.switch(
|
||||
"inner",
|
||||
"inner joining between lazyframes (default)",
|
||||
|
@ -54,8 +54,8 @@ impl PluginCommand for LazyJoin {
|
|||
vec![
|
||||
Example {
|
||||
description: "Join two lazy dataframes",
|
||||
example: r#"let df_a = ([[a b c];[1 "a" 0] [2 "b" 1] [1 "c" 2] [1 "c" 3]] | polars into-lazy);
|
||||
let df_b = ([["foo" "bar" "ham"];[1 "a" "let"] [2 "c" "var"] [3 "c" "const"]] | polars into-lazy);
|
||||
example: r#"let df_a = ([[a b c];[1 "a" 0] [2 "b" 1] [1 "c" 2] [1 "c" 3]] | polars into-lazy)
|
||||
let df_b = ([["foo" "bar" "ham"];[1 "a" "let"] [2 "c" "var"] [3 "c" "const"]] | polars into-lazy)
|
||||
$df_a | polars join $df_b a foo | polars collect"#,
|
||||
result: Some(
|
||||
NuDataFrame::try_from_columns(
|
||||
|
@ -114,8 +114,8 @@ impl PluginCommand for LazyJoin {
|
|||
},
|
||||
Example {
|
||||
description: "Join one eager dataframe with a lazy dataframe",
|
||||
example: r#"let df_a = ([[a b c];[1 "a" 0] [2 "b" 1] [1 "c" 2] [1 "c" 3]] | polars into-df);
|
||||
let df_b = ([["foo" "bar" "ham"];[1 "a" "let"] [2 "c" "var"] [3 "c" "const"]] | polars into-lazy);
|
||||
example: r#"let df_a = ([[a b c];[1 "a" 0] [2 "b" 1] [1 "c" 2] [1 "c" 3]] | polars into-df)
|
||||
let df_b = ([["foo" "bar" "ham"];[1 "a" "let"] [2 "c" "var"] [3 "c" "const"]] | polars into-lazy)
|
||||
$df_a | polars join $df_b a foo"#,
|
||||
result: Some(
|
||||
NuDataFrame::try_from_columns(
|
||||
|
@ -172,6 +172,43 @@ impl PluginCommand for LazyJoin {
|
|||
.into_value(Span::test_data()),
|
||||
),
|
||||
},
|
||||
Example {
|
||||
description: "Join one eager dataframe with another using a cross join",
|
||||
example: r#"let tokens = [[monopoly_token]; [hat] [shoe] [boat]] | polars into-df
|
||||
let players = [[name, cash]; [Alice, 78] [Bob, 135]] | polars into-df
|
||||
$players | polars select (polars col name) | polars join --cross $tokens | polars collect"#,
|
||||
result: Some(
|
||||
NuDataFrame::try_from_columns(
|
||||
vec![
|
||||
Column::new(
|
||||
"name".to_string(),
|
||||
vec![
|
||||
Value::test_string("Alice"),
|
||||
Value::test_string("Alice"),
|
||||
Value::test_string("Alice"),
|
||||
Value::test_string("Bob"),
|
||||
Value::test_string("Bob"),
|
||||
Value::test_string("Bob"),
|
||||
],
|
||||
),
|
||||
Column::new(
|
||||
"monopoly_token".to_string(),
|
||||
vec![
|
||||
Value::test_string("hat"),
|
||||
Value::test_string("shoe"),
|
||||
Value::test_string("boat"),
|
||||
Value::test_string("hat"),
|
||||
Value::test_string("shoe"),
|
||||
Value::test_string("boat"),
|
||||
],
|
||||
),
|
||||
],
|
||||
None,
|
||||
)
|
||||
.expect("simple df for test should not fail")
|
||||
.into_value(Span::test_data()),
|
||||
),
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
|
@ -200,11 +237,21 @@ impl PluginCommand for LazyJoin {
|
|||
let other = NuLazyFrame::try_from_value_coerce(plugin, &other)?;
|
||||
let other = other.to_polars();
|
||||
|
||||
let left_on: Value = call.req(1)?;
|
||||
let left_on = NuExpression::extract_exprs(plugin, left_on)?;
|
||||
let left_on_opt: Option<Value> = call.opt(1)?;
|
||||
let left_on = match left_on_opt {
|
||||
Some(left_on_value) if left || left_on_opt.is_some() => {
|
||||
NuExpression::extract_exprs(plugin, left_on_value)?
|
||||
}
|
||||
_ => vec![],
|
||||
};
|
||||
|
||||
let right_on: Value = call.req(2)?;
|
||||
let right_on = NuExpression::extract_exprs(plugin, right_on)?;
|
||||
let right_on_opt: Option<Value> = call.opt(2)?;
|
||||
let right_on = match right_on_opt {
|
||||
Some(right_on_value) if full || right_on_opt.is_some() => {
|
||||
NuExpression::extract_exprs(plugin, right_on_value)?
|
||||
}
|
||||
_ => vec![],
|
||||
};
|
||||
|
||||
if left_on.len() != right_on.len() {
|
||||
let right_on: Value = call.req(2)?;
|
||||
|
@ -232,16 +279,25 @@ impl PluginCommand for LazyJoin {
|
|||
let lazy = NuLazyFrame::try_from_value_coerce(plugin, &value)?;
|
||||
let from_eager = lazy.from_eager;
|
||||
let lazy = lazy.to_polars();
|
||||
|
||||
let lazy = lazy
|
||||
.join_builder()
|
||||
.with(other)
|
||||
.left_on(left_on)
|
||||
.right_on(right_on)
|
||||
.how(how)
|
||||
.force_parallel(true)
|
||||
.suffix(suffix)
|
||||
.finish();
|
||||
let lazy = if cross {
|
||||
lazy.join_builder()
|
||||
.with(other)
|
||||
.left_on(vec![])
|
||||
.right_on(vec![])
|
||||
.how(how)
|
||||
.force_parallel(true)
|
||||
.suffix(suffix)
|
||||
.finish()
|
||||
} else {
|
||||
lazy.join_builder()
|
||||
.with(other)
|
||||
.left_on(left_on)
|
||||
.right_on(right_on)
|
||||
.how(how)
|
||||
.force_parallel(true)
|
||||
.suffix(suffix)
|
||||
.finish()
|
||||
};
|
||||
|
||||
let lazy = NuLazyFrame::new(from_eager, lazy);
|
||||
lazy.to_pipeline_data(plugin, engine, call.head)
|
||||
|
|
|
@ -223,57 +223,30 @@ pub fn insert_value(
|
|||
inner: vec![],
|
||||
})
|
||||
}
|
||||
}
|
||||
// Checking that the type for the value is the same
|
||||
// for the previous value in the column
|
||||
else if col_val.values.is_empty() {
|
||||
if let Some(schema) = maybe_schema {
|
||||
if let Some(field) = schema.schema.get_field(&key) {
|
||||
col_val.column_type = Some(field.dtype().clone());
|
||||
}
|
||||
}
|
||||
|
||||
} else {
|
||||
let current_data_type = value_to_data_type(&value);
|
||||
if col_val.column_type.is_none() {
|
||||
col_val.column_type = Some(value_to_data_type(&value));
|
||||
col_val.column_type = value_to_data_type(&value);
|
||||
} else if let Some(current_data_type) = current_data_type {
|
||||
if col_val.column_type.as_ref() != Some(¤t_data_type) {
|
||||
col_val.column_type = Some(DataType::Object("Value", None));
|
||||
}
|
||||
}
|
||||
col_val.values.push(value);
|
||||
Ok(())
|
||||
} else {
|
||||
let prev_value = &col_val.values[col_val.values.len() - 1];
|
||||
|
||||
match (&prev_value, &value) {
|
||||
(Value::Int { .. }, Value::Int { .. })
|
||||
| (Value::Float { .. }, Value::Float { .. })
|
||||
| (Value::String { .. }, Value::String { .. })
|
||||
| (Value::Bool { .. }, Value::Bool { .. })
|
||||
| (Value::Date { .. }, Value::Date { .. })
|
||||
| (Value::Filesize { .. }, Value::Filesize { .. })
|
||||
| (Value::Binary { .. }, Value::Binary { .. })
|
||||
| (Value::Duration { .. }, Value::Duration { .. }) => col_val.values.push(value),
|
||||
(_, Value::Nothing { .. }) => col_val.values.push(value),
|
||||
(Value::List { .. }, _) => {
|
||||
col_val.column_type = Some(value_to_data_type(&value));
|
||||
col_val.values.push(value);
|
||||
}
|
||||
_ => {
|
||||
col_val.column_type = Some(DataType::Object("Value", None));
|
||||
col_val.values.push(value);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn value_to_data_type(value: &Value) -> DataType {
|
||||
fn value_to_data_type(value: &Value) -> Option<DataType> {
|
||||
match &value {
|
||||
Value::Int { .. } => DataType::Int64,
|
||||
Value::Float { .. } => DataType::Float64,
|
||||
Value::String { .. } => DataType::String,
|
||||
Value::Bool { .. } => DataType::Boolean,
|
||||
Value::Date { .. } => DataType::Date,
|
||||
Value::Duration { .. } => DataType::Duration(TimeUnit::Nanoseconds),
|
||||
Value::Filesize { .. } => DataType::Int64,
|
||||
Value::Binary { .. } => DataType::Binary,
|
||||
Value::Int { .. } => Some(DataType::Int64),
|
||||
Value::Float { .. } => Some(DataType::Float64),
|
||||
Value::String { .. } => Some(DataType::String),
|
||||
Value::Bool { .. } => Some(DataType::Boolean),
|
||||
Value::Date { .. } => Some(DataType::Date),
|
||||
Value::Duration { .. } => Some(DataType::Duration(TimeUnit::Nanoseconds)),
|
||||
Value::Filesize { .. } => Some(DataType::Int64),
|
||||
Value::Binary { .. } => Some(DataType::Binary),
|
||||
Value::List { vals, .. } => {
|
||||
// We need to determined the type inside of the list.
|
||||
// Since Value::List does not have any kind of
|
||||
|
@ -286,243 +259,247 @@ fn value_to_data_type(value: &Value) -> DataType {
|
|||
.filter(|v| !matches!(v, Value::Nothing { .. }))
|
||||
.map(value_to_data_type)
|
||||
.nth(1)
|
||||
.flatten()
|
||||
.unwrap_or(DataType::Object("Value", None));
|
||||
|
||||
DataType::List(Box::new(list_type))
|
||||
Some(DataType::List(Box::new(list_type)))
|
||||
}
|
||||
_ => DataType::Object("Value", None),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn typed_column_to_series(name: PlSmallStr, column: TypedColumn) -> Result<Series, ShellError> {
|
||||
if let Some(column_type) = &column.column_type {
|
||||
match column_type {
|
||||
DataType::Float32 => {
|
||||
let series_values: Result<Vec<_>, _> = column
|
||||
.values
|
||||
.iter()
|
||||
.map(|v| {
|
||||
value_to_option(v, |v| match v {
|
||||
Value::Float { val, .. } => Ok(*val as f32),
|
||||
Value::Int { val, .. } => Ok(*val as f32),
|
||||
x => Err(ShellError::GenericError {
|
||||
error: "Error converting to f32".into(),
|
||||
msg: "".into(),
|
||||
span: None,
|
||||
help: Some(format!("Unexpected type: {x:?}")),
|
||||
inner: vec![],
|
||||
}),
|
||||
})
|
||||
let column_type = &column
|
||||
.column_type
|
||||
.clone()
|
||||
.unwrap_or(DataType::Object("Value", None));
|
||||
match column_type {
|
||||
DataType::Float32 => {
|
||||
let series_values: Result<Vec<_>, _> = column
|
||||
.values
|
||||
.iter()
|
||||
.map(|v| {
|
||||
value_to_option(v, |v| match v {
|
||||
Value::Float { val, .. } => Ok(*val as f32),
|
||||
Value::Int { val, .. } => Ok(*val as f32),
|
||||
x => Err(ShellError::GenericError {
|
||||
error: "Error converting to f32".into(),
|
||||
msg: "".into(),
|
||||
span: None,
|
||||
help: Some(format!("Unexpected type: {x:?}")),
|
||||
inner: vec![],
|
||||
}),
|
||||
})
|
||||
.collect();
|
||||
Ok(Series::new(name, series_values?))
|
||||
}
|
||||
DataType::Float64 => {
|
||||
let series_values: Result<Vec<_>, _> = column
|
||||
.values
|
||||
.iter()
|
||||
.map(|v| {
|
||||
value_to_option(v, |v| match v {
|
||||
Value::Float { val, .. } => Ok(*val),
|
||||
Value::Int { val, .. } => Ok(*val as f64),
|
||||
x => Err(ShellError::GenericError {
|
||||
error: "Error converting to f64".into(),
|
||||
msg: "".into(),
|
||||
span: None,
|
||||
help: Some(format!("Unexpected type: {x:?}")),
|
||||
inner: vec![],
|
||||
}),
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
Ok(Series::new(name, series_values?))
|
||||
}
|
||||
DataType::Float64 => {
|
||||
let series_values: Result<Vec<_>, _> = column
|
||||
.values
|
||||
.iter()
|
||||
.map(|v| {
|
||||
value_to_option(v, |v| match v {
|
||||
Value::Float { val, .. } => Ok(*val),
|
||||
Value::Int { val, .. } => Ok(*val as f64),
|
||||
x => Err(ShellError::GenericError {
|
||||
error: "Error converting to f64".into(),
|
||||
msg: "".into(),
|
||||
span: None,
|
||||
help: Some(format!("Unexpected type: {x:?}")),
|
||||
inner: vec![],
|
||||
}),
|
||||
})
|
||||
.collect();
|
||||
Ok(Series::new(name, series_values?))
|
||||
}
|
||||
DataType::UInt8 => {
|
||||
let series_values: Result<Vec<_>, _> = column
|
||||
.values
|
||||
.iter()
|
||||
.map(|v| value_to_option(v, |v| value_to_int(v).map(|v| v as u8)))
|
||||
.collect();
|
||||
Ok(Series::new(name, series_values?))
|
||||
}
|
||||
DataType::UInt16 => {
|
||||
let series_values: Result<Vec<_>, _> = column
|
||||
.values
|
||||
.iter()
|
||||
.map(|v| value_to_option(v, |v| value_to_int(v).map(|v| v as u16)))
|
||||
.collect();
|
||||
Ok(Series::new(name, series_values?))
|
||||
}
|
||||
DataType::UInt32 => {
|
||||
let series_values: Result<Vec<_>, _> = column
|
||||
.values
|
||||
.iter()
|
||||
.map(|v| value_to_option(v, |v| value_to_int(v).map(|v| v as u32)))
|
||||
.collect();
|
||||
Ok(Series::new(name, series_values?))
|
||||
}
|
||||
DataType::UInt64 => {
|
||||
let series_values: Result<Vec<_>, _> = column
|
||||
.values
|
||||
.iter()
|
||||
.map(|v| value_to_option(v, |v| value_to_int(v).map(|v| v as u64)))
|
||||
.collect();
|
||||
Ok(Series::new(name, series_values?))
|
||||
}
|
||||
DataType::Int8 => {
|
||||
let series_values: Result<Vec<_>, _> = column
|
||||
.values
|
||||
.iter()
|
||||
.map(|v| value_to_option(v, |v| value_to_int(v).map(|v| v as i8)))
|
||||
.collect();
|
||||
Ok(Series::new(name, series_values?))
|
||||
}
|
||||
DataType::Int16 => {
|
||||
let series_values: Result<Vec<_>, _> = column
|
||||
.values
|
||||
.iter()
|
||||
.map(|v| value_to_option(v, |v| value_to_int(v).map(|v| v as i16)))
|
||||
.collect();
|
||||
Ok(Series::new(name, series_values?))
|
||||
}
|
||||
DataType::Int32 => {
|
||||
let series_values: Result<Vec<_>, _> = column
|
||||
.values
|
||||
.iter()
|
||||
.map(|v| value_to_option(v, |v| value_to_int(v).map(|v| v as i32)))
|
||||
.collect();
|
||||
Ok(Series::new(name, series_values?))
|
||||
}
|
||||
DataType::Int64 => {
|
||||
let series_values: Result<Vec<_>, _> = column
|
||||
.values
|
||||
.iter()
|
||||
.map(|v| value_to_option(v, value_to_int))
|
||||
.collect();
|
||||
Ok(Series::new(name, series_values?))
|
||||
}
|
||||
DataType::Boolean => {
|
||||
let series_values: Result<Vec<_>, _> = column
|
||||
.values
|
||||
.iter()
|
||||
.map(|v| value_to_option(v, |v| v.as_bool()))
|
||||
.collect();
|
||||
Ok(Series::new(name, series_values?))
|
||||
}
|
||||
DataType::String => {
|
||||
let series_values: Result<Vec<_>, _> = column
|
||||
.values
|
||||
.iter()
|
||||
.map(|v| value_to_option(v, |v| v.coerce_string()))
|
||||
.collect();
|
||||
Ok(Series::new(name, series_values?))
|
||||
}
|
||||
DataType::Binary | DataType::BinaryOffset => {
|
||||
let series_values: Result<Vec<_>, _> =
|
||||
column.values.iter().map(|v| v.coerce_binary()).collect();
|
||||
Ok(Series::new(name, series_values?))
|
||||
}
|
||||
DataType::Object(_, _) => value_to_series(name, &column.values),
|
||||
DataType::Duration(time_unit) => {
|
||||
let series_values: Result<Vec<_>, _> = column
|
||||
.values
|
||||
.iter()
|
||||
.map(|v| {
|
||||
value_to_option(v, |v| {
|
||||
v.as_duration().map(|v| nanos_from_timeunit(v, *time_unit))
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
Ok(Series::new(name, series_values?))
|
||||
}
|
||||
DataType::UInt8 => {
|
||||
let series_values: Result<Vec<_>, _> = column
|
||||
.values
|
||||
.iter()
|
||||
.map(|v| value_to_option(v, |v| value_to_int(v).map(|v| v as u8)))
|
||||
.collect();
|
||||
Ok(Series::new(name, series_values?))
|
||||
}
|
||||
DataType::UInt16 => {
|
||||
let series_values: Result<Vec<_>, _> = column
|
||||
.values
|
||||
.iter()
|
||||
.map(|v| value_to_option(v, |v| value_to_int(v).map(|v| v as u16)))
|
||||
.collect();
|
||||
Ok(Series::new(name, series_values?))
|
||||
}
|
||||
DataType::UInt32 => {
|
||||
let series_values: Result<Vec<_>, _> = column
|
||||
.values
|
||||
.iter()
|
||||
.map(|v| value_to_option(v, |v| value_to_int(v).map(|v| v as u32)))
|
||||
.collect();
|
||||
Ok(Series::new(name, series_values?))
|
||||
}
|
||||
DataType::UInt64 => {
|
||||
let series_values: Result<Vec<_>, _> = column
|
||||
.values
|
||||
.iter()
|
||||
.map(|v| value_to_option(v, |v| value_to_int(v).map(|v| v as u64)))
|
||||
.collect();
|
||||
Ok(Series::new(name, series_values?))
|
||||
}
|
||||
DataType::Int8 => {
|
||||
let series_values: Result<Vec<_>, _> = column
|
||||
.values
|
||||
.iter()
|
||||
.map(|v| value_to_option(v, |v| value_to_int(v).map(|v| v as i8)))
|
||||
.collect();
|
||||
Ok(Series::new(name, series_values?))
|
||||
}
|
||||
DataType::Int16 => {
|
||||
let series_values: Result<Vec<_>, _> = column
|
||||
.values
|
||||
.iter()
|
||||
.map(|v| value_to_option(v, |v| value_to_int(v).map(|v| v as i16)))
|
||||
.collect();
|
||||
Ok(Series::new(name, series_values?))
|
||||
}
|
||||
DataType::Int32 => {
|
||||
let series_values: Result<Vec<_>, _> = column
|
||||
.values
|
||||
.iter()
|
||||
.map(|v| value_to_option(v, |v| value_to_int(v).map(|v| v as i32)))
|
||||
.collect();
|
||||
Ok(Series::new(name, series_values?))
|
||||
}
|
||||
DataType::Int64 => {
|
||||
let series_values: Result<Vec<_>, _> = column
|
||||
.values
|
||||
.iter()
|
||||
.map(|v| value_to_option(v, value_to_int))
|
||||
.collect();
|
||||
Ok(Series::new(name, series_values?))
|
||||
}
|
||||
DataType::Boolean => {
|
||||
let series_values: Result<Vec<_>, _> = column
|
||||
.values
|
||||
.iter()
|
||||
.map(|v| value_to_option(v, |v| v.as_bool()))
|
||||
.collect();
|
||||
Ok(Series::new(name, series_values?))
|
||||
}
|
||||
DataType::String => {
|
||||
let series_values: Result<Vec<_>, _> = column
|
||||
.values
|
||||
.iter()
|
||||
.map(|v| value_to_option(v, |v| v.coerce_string()))
|
||||
.collect();
|
||||
Ok(Series::new(name, series_values?))
|
||||
}
|
||||
DataType::Binary | DataType::BinaryOffset => {
|
||||
let series_values: Result<Vec<_>, _> =
|
||||
column.values.iter().map(|v| v.coerce_binary()).collect();
|
||||
Ok(Series::new(name, series_values?))
|
||||
}
|
||||
DataType::Object(_, _) => value_to_series(name, &column.values),
|
||||
DataType::Duration(time_unit) => {
|
||||
let series_values: Result<Vec<_>, _> = column
|
||||
.values
|
||||
.iter()
|
||||
.map(|v| {
|
||||
value_to_option(v, |v| {
|
||||
v.as_duration().map(|v| nanos_from_timeunit(v, *time_unit))
|
||||
})
|
||||
.collect();
|
||||
Ok(Series::new(name, series_values?))
|
||||
}
|
||||
DataType::List(list_type) => {
|
||||
match input_type_list_to_series(&name, list_type.as_ref(), &column.values) {
|
||||
Ok(series) => Ok(series),
|
||||
Err(_) => {
|
||||
// An error case will occur when there are lists of mixed types.
|
||||
// If this happens, fallback to object list
|
||||
input_type_list_to_series(
|
||||
&name,
|
||||
&DataType::Object("unknown", None),
|
||||
&column.values,
|
||||
)
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
Ok(Series::new(name, series_values?))
|
||||
}
|
||||
DataType::List(list_type) => {
|
||||
match input_type_list_to_series(&name, list_type.as_ref(), &column.values) {
|
||||
Ok(series) => Ok(series),
|
||||
Err(_) => {
|
||||
// An error case will occur when there are lists of mixed types.
|
||||
// If this happens, fallback to object list
|
||||
input_type_list_to_series(
|
||||
&name,
|
||||
&DataType::Object("unknown", None),
|
||||
&column.values,
|
||||
)
|
||||
}
|
||||
}
|
||||
DataType::Date => {
|
||||
let it = column.values.iter().map(|v| {
|
||||
}
|
||||
DataType::Date => {
|
||||
let it = column.values.iter().map(|v| {
|
||||
if let Value::Date { val, .. } = &v {
|
||||
Some(val.timestamp_nanos_opt().unwrap_or_default())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
});
|
||||
|
||||
let res: DatetimeChunked = ChunkedArray::<Int64Type>::from_iter_options(name, it)
|
||||
.into_datetime(TimeUnit::Nanoseconds, None);
|
||||
|
||||
Ok(res.into_series())
|
||||
}
|
||||
DataType::Datetime(tu, maybe_tz) => {
|
||||
let dates = column
|
||||
.values
|
||||
.iter()
|
||||
.map(|v| {
|
||||
if let Value::Date { val, .. } = &v {
|
||||
Some(val.timestamp_nanos_opt().unwrap_or_default())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
});
|
||||
|
||||
let res: DatetimeChunked = ChunkedArray::<Int64Type>::from_iter_options(name, it)
|
||||
.into_datetime(TimeUnit::Nanoseconds, None);
|
||||
|
||||
Ok(res.into_series())
|
||||
}
|
||||
DataType::Datetime(tu, maybe_tz) => {
|
||||
let dates = column
|
||||
.values
|
||||
.iter()
|
||||
.map(|v| {
|
||||
if let Value::Date { val, .. } = &v {
|
||||
// If there is a timezone specified, make sure
|
||||
// the value is converted to it
|
||||
Ok(maybe_tz
|
||||
.as_ref()
|
||||
.map(|tz| tz.parse::<Tz>().map(|tz| val.with_timezone(&tz)))
|
||||
.transpose()
|
||||
.map_err(|e| ShellError::GenericError {
|
||||
error: "Error parsing timezone".into(),
|
||||
msg: "".into(),
|
||||
span: None,
|
||||
help: Some(e.to_string()),
|
||||
inner: vec![],
|
||||
})?
|
||||
.and_then(|dt| dt.timestamp_nanos_opt())
|
||||
.map(|nanos| nanos_from_timeunit(nanos, *tu)))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
})
|
||||
.collect::<Result<Vec<Option<i64>>, ShellError>>()?;
|
||||
|
||||
let res: DatetimeChunked =
|
||||
ChunkedArray::<Int64Type>::from_iter_options(name, dates.into_iter())
|
||||
.into_datetime(*tu, maybe_tz.clone());
|
||||
|
||||
Ok(res.into_series())
|
||||
}
|
||||
DataType::Struct(fields) => {
|
||||
let schema = Some(NuSchema::new(Schema::from_iter(fields.clone())));
|
||||
// let mut structs: Vec<Series> = Vec::new();
|
||||
let mut structs: HashMap<PlSmallStr, Series> = HashMap::new();
|
||||
|
||||
for v in column.values.iter() {
|
||||
let mut column_values: ColumnMap = IndexMap::new();
|
||||
let record = v.as_record()?;
|
||||
insert_record(&mut column_values, record.clone(), &schema)?;
|
||||
let df = from_parsed_columns(column_values)?;
|
||||
for name in df.df.get_column_names() {
|
||||
let series = df
|
||||
.df
|
||||
.column(name)
|
||||
// If there is a timezone specified, make sure
|
||||
// the value is converted to it
|
||||
Ok(maybe_tz
|
||||
.as_ref()
|
||||
.map(|tz| tz.parse::<Tz>().map(|tz| val.with_timezone(&tz)))
|
||||
.transpose()
|
||||
.map_err(|e| ShellError::GenericError {
|
||||
error: format!(
|
||||
"Error creating struct, could not get column name {name}: {e}"
|
||||
),
|
||||
error: "Error parsing timezone".into(),
|
||||
msg: "".into(),
|
||||
span: None,
|
||||
help: None,
|
||||
help: Some(e.to_string()),
|
||||
inner: vec![],
|
||||
})?
|
||||
.as_materialized_series();
|
||||
.and_then(|dt| dt.timestamp_nanos_opt())
|
||||
.map(|nanos| nanos_from_timeunit(nanos, *tu)))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
})
|
||||
.collect::<Result<Vec<Option<i64>>, ShellError>>()?;
|
||||
|
||||
if let Some(v) = structs.get_mut(name) {
|
||||
let _ = v.append(series)
|
||||
let res: DatetimeChunked =
|
||||
ChunkedArray::<Int64Type>::from_iter_options(name, dates.into_iter())
|
||||
.into_datetime(*tu, maybe_tz.clone());
|
||||
|
||||
Ok(res.into_series())
|
||||
}
|
||||
DataType::Struct(fields) => {
|
||||
let schema = Some(NuSchema::new(Schema::from_iter(fields.clone())));
|
||||
// let mut structs: Vec<Series> = Vec::new();
|
||||
let mut structs: HashMap<PlSmallStr, Series> = HashMap::new();
|
||||
|
||||
for v in column.values.iter() {
|
||||
let mut column_values: ColumnMap = IndexMap::new();
|
||||
let record = v.as_record()?;
|
||||
insert_record(&mut column_values, record.clone(), &schema)?;
|
||||
let df = from_parsed_columns(column_values)?;
|
||||
for name in df.df.get_column_names() {
|
||||
let series = df
|
||||
.df
|
||||
.column(name)
|
||||
.map_err(|e| ShellError::GenericError {
|
||||
error: format!(
|
||||
"Error creating struct, could not get column name {name}: {e}"
|
||||
),
|
||||
msg: "".into(),
|
||||
span: None,
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})?
|
||||
.as_materialized_series();
|
||||
|
||||
if let Some(v) = structs.get_mut(name) {
|
||||
let _ = v.append(series)
|
||||
.map_err(|e| ShellError::GenericError {
|
||||
error: format!("Error creating struct, could not append to series for col {name}: {e}"),
|
||||
msg: "".into(),
|
||||
|
@ -530,44 +507,32 @@ fn typed_column_to_series(name: PlSmallStr, column: TypedColumn) -> Result<Serie
|
|||
help: None,
|
||||
inner: vec![],
|
||||
})?;
|
||||
} else {
|
||||
structs.insert(name.clone(), series.to_owned());
|
||||
}
|
||||
} else {
|
||||
structs.insert(name.clone(), series.to_owned());
|
||||
}
|
||||
}
|
||||
|
||||
let structs: Vec<Series> = structs.into_values().collect();
|
||||
|
||||
let chunked = StructChunked::from_series(
|
||||
column.name().to_owned(),
|
||||
structs.len(),
|
||||
structs.iter(),
|
||||
)
|
||||
.map_err(|e| ShellError::GenericError {
|
||||
error: format!("Error creating struct: {e}"),
|
||||
msg: "".into(),
|
||||
span: None,
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})?;
|
||||
Ok(chunked.into_series())
|
||||
}
|
||||
_ => Err(ShellError::GenericError {
|
||||
error: format!("Error creating dataframe: Unsupported type: {column_type:?}"),
|
||||
msg: "".into(),
|
||||
span: None,
|
||||
help: None,
|
||||
inner: vec![],
|
||||
}),
|
||||
|
||||
let structs: Vec<Series> = structs.into_values().collect();
|
||||
|
||||
let chunked =
|
||||
StructChunked::from_series(column.name().to_owned(), structs.len(), structs.iter())
|
||||
.map_err(|e| ShellError::GenericError {
|
||||
error: format!("Error creating struct: {e}"),
|
||||
msg: "".into(),
|
||||
span: None,
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})?;
|
||||
Ok(chunked.into_series())
|
||||
}
|
||||
} else {
|
||||
Err(ShellError::GenericError {
|
||||
error: "Passed a type column with no type".into(),
|
||||
_ => Err(ShellError::GenericError {
|
||||
error: format!("Error creating dataframe: Unsupported type: {column_type:?}"),
|
||||
msg: "".into(),
|
||||
span: None,
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -20,7 +20,7 @@ nu-plugin = { path = "../nu-plugin", version = "0.100.1" }
|
|||
nu-protocol = { path = "../nu-protocol", version = "0.100.1" }
|
||||
|
||||
gjson = "0.8"
|
||||
scraper = { default-features = false, version = "0.21" }
|
||||
scraper = { default-features = false, version = "0.22" }
|
||||
sxd-document = "0.3"
|
||||
sxd-xpath = "0.4"
|
||||
webpage = { version = "2.0.1", features = ["serde"] }
|
||||
|
|
70
src/main.rs
70
src/main.rs
|
@ -24,8 +24,8 @@ use nu_cli::gather_parent_env_vars;
|
|||
use nu_lsp::LanguageServer;
|
||||
use nu_path::canonicalize_with;
|
||||
use nu_protocol::{
|
||||
engine::EngineState, report_shell_error, ByteStream, Config, IntoValue, PipelineData,
|
||||
ShellError, Span, Spanned, Value,
|
||||
engine::EngineState, record, report_shell_error, ByteStream, Config, IntoValue, PipelineData,
|
||||
ShellError, Span, Spanned, Type, Value,
|
||||
};
|
||||
use nu_std::load_standard_library;
|
||||
use nu_utils::perf;
|
||||
|
@ -147,22 +147,43 @@ fn main() -> Result<()> {
|
|||
|
||||
let mut default_nu_lib_dirs_path = nushell_config_path.clone();
|
||||
default_nu_lib_dirs_path.push("scripts");
|
||||
engine_state.add_env_var(
|
||||
"NU_LIB_DIRS".to_string(),
|
||||
// env.NU_LIB_DIRS to be replaced by constant (below) - Eventual deprecation
|
||||
// but an empty list for now to allow older code to work
|
||||
engine_state.add_env_var("NU_LIB_DIRS".to_string(), Value::test_list(vec![]));
|
||||
|
||||
let mut working_set = nu_protocol::engine::StateWorkingSet::new(&engine_state);
|
||||
let var_id = working_set.add_variable(
|
||||
b"$NU_LIB_DIRS".into(),
|
||||
Span::unknown(),
|
||||
Type::List(Box::new(Type::String)),
|
||||
false,
|
||||
);
|
||||
working_set.set_variable_const_val(
|
||||
var_id,
|
||||
Value::test_list(vec![
|
||||
Value::test_string(default_nu_lib_dirs_path.to_string_lossy()),
|
||||
Value::test_string(default_nushell_completions_path.to_string_lossy()),
|
||||
]),
|
||||
);
|
||||
engine_state.merge_delta(working_set.render())?;
|
||||
|
||||
let mut default_nu_plugin_dirs_path = nushell_config_path;
|
||||
default_nu_plugin_dirs_path.push("plugins");
|
||||
engine_state.add_env_var(
|
||||
"NU_PLUGIN_DIRS".to_string(),
|
||||
engine_state.add_env_var("NU_PLUGIN_DIRS".to_string(), Value::test_list(vec![]));
|
||||
let mut working_set = nu_protocol::engine::StateWorkingSet::new(&engine_state);
|
||||
let var_id = working_set.add_variable(
|
||||
b"$NU_PLUGIN_DIRS".into(),
|
||||
Span::unknown(),
|
||||
Type::List(Box::new(Type::String)),
|
||||
false,
|
||||
);
|
||||
working_set.set_variable_const_val(
|
||||
var_id,
|
||||
Value::test_list(vec![Value::test_string(
|
||||
default_nu_plugin_dirs_path.to_string_lossy(),
|
||||
)]),
|
||||
);
|
||||
engine_state.merge_delta(working_set.render())?;
|
||||
// End: Default NU_LIB_DIRS, NU_PLUGIN_DIRS
|
||||
|
||||
// This is the real secret sauce to having an in-memory sqlite db. You must
|
||||
|
@ -264,6 +285,11 @@ fn main() -> Result<()> {
|
|||
);
|
||||
perf!("$env.config setup", start_time, use_color);
|
||||
|
||||
engine_state.add_env_var(
|
||||
"ENV_CONVERSIONS".to_string(),
|
||||
Value::test_record(record! {}),
|
||||
);
|
||||
|
||||
start_time = std::time::Instant::now();
|
||||
if let Some(include_path) = &parsed_nu_cli_args.include_path {
|
||||
let span = include_path.span;
|
||||
|
@ -273,7 +299,15 @@ fn main() -> Result<()> {
|
|||
.map(|x| Value::string(x.trim().to_string(), span))
|
||||
.collect();
|
||||
|
||||
engine_state.add_env_var("NU_LIB_DIRS".into(), Value::list(vals, span));
|
||||
let mut working_set = nu_protocol::engine::StateWorkingSet::new(&engine_state);
|
||||
let var_id = working_set.add_variable(
|
||||
b"$NU_LIB_DIRS".into(),
|
||||
span,
|
||||
Type::List(Box::new(Type::String)),
|
||||
false,
|
||||
);
|
||||
working_set.set_variable_const_val(var_id, Value::list(vals, span));
|
||||
engine_state.merge_delta(working_set.render())?;
|
||||
}
|
||||
perf!("NU_LIB_DIRS setup", start_time, use_color);
|
||||
|
||||
|
@ -286,9 +320,29 @@ fn main() -> Result<()> {
|
|||
"NU_VERSION".to_string(),
|
||||
Value::string(env!("CARGO_PKG_VERSION"), Span::unknown()),
|
||||
);
|
||||
|
||||
// Add SHLVL if interactive
|
||||
if engine_state.is_interactive {
|
||||
engine_state.add_env_var("PROMPT_INDICATOR".to_string(), Value::test_string("> "));
|
||||
engine_state.add_env_var(
|
||||
"PROMPT_INDICATOR_VI_NORMAL".to_string(),
|
||||
Value::test_string("> "),
|
||||
);
|
||||
engine_state.add_env_var(
|
||||
"PROMPT_INDICATOR_VI_INSERT".to_string(),
|
||||
Value::test_string(": "),
|
||||
);
|
||||
engine_state.add_env_var(
|
||||
"PROMPT_MULTILINE_INDICATOR".to_string(),
|
||||
Value::test_string("::: "),
|
||||
);
|
||||
engine_state.add_env_var(
|
||||
"TRANSIENT_PROMPT_MULTILINE_INDICATOR".to_string(),
|
||||
Value::test_string(""),
|
||||
);
|
||||
engine_state.add_env_var(
|
||||
"TRANSIENT_PROMPT_COMMAND_RIGHT".to_string(),
|
||||
Value::test_string(""),
|
||||
);
|
||||
let mut shlvl = engine_state
|
||||
.get_env_var("SHLVL")
|
||||
.map(|x| x.as_str().unwrap_or("0").parse::<i64>().unwrap_or(0))
|
||||
|
|
|
@ -90,12 +90,19 @@ fn help_works_with_missing_requirements() -> TestResult {
|
|||
run_test(r#"each --help | lines | length"#, "72")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn scope_variable() -> TestResult {
|
||||
run_test(
|
||||
r#"let x = 3; scope variables | where name == "$x" | get type.0"#,
|
||||
"int",
|
||||
)
|
||||
#[rstest]
|
||||
#[case("let x = 3", "$x", "int", "3")]
|
||||
#[case("const x = 3", "$x", "int", "3")]
|
||||
fn scope_variable(
|
||||
#[case] var_decl: &str,
|
||||
#[case] exp_name: &str,
|
||||
#[case] exp_type: &str,
|
||||
#[case] exp_value: &str,
|
||||
) -> TestResult {
|
||||
let get_var_info =
|
||||
format!(r#"{var_decl}; scope variables | where name == "{exp_name}" | first"#);
|
||||
run_test(&format!(r#"{get_var_info} | get type"#), exp_type)?;
|
||||
run_test(&format!(r#"{get_var_info} | get value"#), exp_value)
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
|
|
|
@ -17,13 +17,33 @@ fn shorthand_env_3() -> TestResult {
|
|||
}
|
||||
|
||||
#[test]
|
||||
fn default_nu_lib_dirs_type() {
|
||||
fn default_nu_lib_dirs_env_type() {
|
||||
// Previously, this was a list<string>
|
||||
// While we are transitioning to const NU_LIB_DIRS
|
||||
// the env version will be empty, and thus a
|
||||
// list<any>
|
||||
let actual = nu!("$env.NU_LIB_DIRS | describe");
|
||||
assert_eq!(actual.out, "list<any>");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn default_nu_lib_dirs_type() {
|
||||
let actual = nu!("$NU_LIB_DIRS | describe");
|
||||
assert_eq!(actual.out, "list<string>");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn default_nu_plugin_dirs_type() {
|
||||
fn default_nu_plugin_dirs_env_type() {
|
||||
// Previously, this was a list<string>
|
||||
// While we are transitioning to const NU_PLUGIN_DIRS
|
||||
// the env version will be empty, and thus a
|
||||
// list<any>
|
||||
let actual = nu!("$env.NU_PLUGIN_DIRS | describe");
|
||||
assert_eq!(actual.out, "list<any>");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn default_nu_plugin_dirs_type() {
|
||||
let actual = nu!("$NU_PLUGIN_DIRS | describe");
|
||||
assert_eq!(actual.out, "list<string>");
|
||||
}
|
||||
|
|
|
@ -169,6 +169,41 @@ fn comment_skipping_in_pipeline_3() -> TestResult {
|
|||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn still_string_if_hashtag_is_middle_of_string() -> TestResult {
|
||||
run_test(r#"echo test#testing"#, "test#testing")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn non_comment_hashtag_in_comment_does_not_stop_comment() -> TestResult {
|
||||
run_test(r#"# command_bar_text: { fg: '#C4C9C6' },"#, "")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn non_comment_hashtag_in_comment_does_not_stop_comment_in_block() -> TestResult {
|
||||
run_test(
|
||||
r#"{
|
||||
explore: {
|
||||
# command_bar_text: { fg: '#C4C9C6' },
|
||||
}
|
||||
} | get explore | is-empty"#,
|
||||
"true",
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn still_string_if_hashtag_is_middle_of_string_inside_each() -> TestResult {
|
||||
run_test(
|
||||
r#"1..1 | each {echo test#testing } | get 0"#,
|
||||
"test#testing",
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn still_string_if_hashtag_is_middle_of_string_inside_each_also_with_dot() -> TestResult {
|
||||
run_test(r#"1..1 | each {echo '.#testing' } | get 0"#, ".#testing")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bad_var_name() -> TestResult {
|
||||
fail_test(r#"let $"foo bar" = 4"#, "can't contain")
|
||||
|
@ -282,6 +317,11 @@ fn raw_string_with_equals() -> TestResult {
|
|||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn raw_string_with_hashtag() -> TestResult {
|
||||
run_test(r#"r##' one # two '##"#, "one # two")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn list_quotes_with_equals() -> TestResult {
|
||||
run_test(
|
||||
|
|
Loading…
Reference in a new issue