Merge branch 'main' into continue-PWD-per-drive

This commit is contained in:
PegasusPlusUS 2024-12-20 14:52:59 -08:00 committed by GitHub
commit e623224abf
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
68 changed files with 1667 additions and 340 deletions

View file

@ -10,4 +10,4 @@ jobs:
uses: actions/checkout@v4.1.7
- name: Check spelling
uses: crate-ci/typos@v1.28.2
uses: crate-ci/typos@v1.28.4

9
Cargo.lock generated
View file

@ -2458,9 +2458,9 @@ checksum = "7655c9839580ee829dfacba1d1278c2b7883e50a277ff7541299489d6bdfdc45"
[[package]]
name = "is_debug"
version = "1.0.1"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "06d198e9919d9822d5f7083ba8530e04de87841eaf21ead9af8f2304efd57c89"
checksum = "e8ea828c9d6638a5bd3d8b14e37502b4d56cae910ccf8a5b7f51c7a0eb1d0508"
[[package]]
name = "is_executable"
@ -3737,6 +3737,7 @@ dependencies = [
"nix 0.29.0",
"num-format",
"serde",
"serde_json",
"strip-ansi-escapes",
"sys-locale",
"unicase",
@ -6293,9 +6294,9 @@ dependencies = [
[[package]]
name = "shadow-rs"
version = "0.36.0"
version = "0.37.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "58cfcd0643497a9f780502063aecbcc4a3212cbe4948fd25ee8fd179c2cf9a18"
checksum = "974eb8222c62a8588bc0f02794dd1ba5b60b3ec88b58e050729d0907ed6af610"
dependencies = [
"const_format",
"is_debug",

View file

@ -176,7 +176,6 @@ pub fn complete_item(
) -> Vec<FileSuggestion> {
let cleaned_partial = surround_remove(partial);
let isdir = cleaned_partial.ends_with(is_separator);
#[cfg(windows)]
let cleaned_partial =
if let Some(absolute_path) = expand_pwd(stack, engine_state, Path::new(&cleaned_partial)) {
if let Some(abs_path_str) = absolute_path.as_path().to_str() {

View file

@ -21,10 +21,10 @@ nu-protocol = { path = "../nu-protocol", version = "0.100.1", default-features =
nu-utils = { path = "../nu-utils", version = "0.100.1", default-features = false }
itertools = { workspace = true }
shadow-rs = { version = "0.36", default-features = false }
shadow-rs = { version = "0.37", default-features = false }
[build-dependencies]
shadow-rs = { version = "0.36", default-features = false }
shadow-rs = { version = "0.37", default-features = false }
[features]
default = ["os"]
@ -42,4 +42,4 @@ mimalloc = []
trash-support = []
sqlite = []
static-link-openssl = []
system-clipboard = []
system-clipboard = []

View file

@ -1,12 +1,13 @@
use std::process::Command;
fn main() -> shadow_rs::SdResult<()> {
fn main() {
// Look up the current Git commit ourselves instead of relying on shadow_rs,
// because shadow_rs does it in a really slow-to-compile way (it builds libgit2)
let hash = get_git_hash().unwrap_or_default();
println!("cargo:rustc-env=NU_COMMIT_HASH={hash}");
shadow_rs::new()
shadow_rs::ShadowBuilder::builder()
.build()
.expect("shadow builder build should success");
}
fn get_git_hash() -> Option<String> {

View file

@ -1,7 +1,7 @@
use nu_cmd_base::input_handler::{operate, CmdArgument};
use nu_engine::command_prelude::*;
pub struct Arguments {
struct Arguments {
cell_paths: Option<Vec<CellPath>>,
compact: bool,
}
@ -142,7 +142,7 @@ fn into_binary(
}
}
pub fn action(input: &Value, _args: &Arguments, span: Span) -> Value {
fn action(input: &Value, _args: &Arguments, span: Span) -> Value {
let value = match input {
Value::Binary { .. } => input.clone(),
Value::Int { val, .. } => Value::binary(val.to_ne_bytes().to_vec(), span),

View file

@ -116,7 +116,7 @@ impl Command for SubCommand {
}
}
pub fn action(input: &Value, _args: &CellPathOnlyArgs, span: Span) -> Value {
fn action(input: &Value, _args: &CellPathOnlyArgs, span: Span) -> Value {
let value_span = input.span();
match input {
Value::Filesize { .. } => input.clone(),

View file

@ -10,6 +10,7 @@ mod metadata_set;
mod profile;
mod timeit;
mod view;
mod view_blocks;
mod view_files;
mod view_ir;
mod view_source;
@ -27,6 +28,7 @@ pub use metadata_set::MetadataSet;
pub use profile::DebugProfile;
pub use timeit::TimeIt;
pub use view::View;
pub use view_blocks::ViewBlocks;
pub use view_files::ViewFiles;
pub use view_ir::ViewIr;
pub use view_source::ViewSource;

View file

@ -0,0 +1,71 @@
use nu_engine::command_prelude::*;
#[derive(Clone)]
pub struct ViewBlocks;
impl Command for ViewBlocks {
fn name(&self) -> &str {
"view blocks"
}
fn description(&self) -> &str {
"View the blocks registered in nushell's EngineState memory."
}
fn extra_description(&self) -> &str {
"These are blocks parsed and loaded at runtime as well as any blocks that accumulate in the repl."
}
fn signature(&self) -> nu_protocol::Signature {
Signature::build("view blocks")
.input_output_types(vec![(
Type::Nothing,
Type::Table(
[
("block_id".into(), Type::Int),
("content".into(), Type::String),
("start".into(), Type::Int),
("end".into(), Type::Int),
]
.into(),
),
)])
.category(Category::Debug)
}
fn run(
&self,
engine_state: &EngineState,
_stack: &mut Stack,
call: &Call,
_input: PipelineData,
) -> Result<PipelineData, ShellError> {
let mut records = vec![];
for block_id in 0..engine_state.num_blocks() {
let block = engine_state.get_block(nu_protocol::BlockId::new(block_id));
if let Some(span) = block.span {
let contents_bytes = engine_state.get_span_contents(span);
let contents_string = String::from_utf8_lossy(contents_bytes);
let cur_rec = record! {
"block_id" => Value::int(block_id as i64, span),
"content" => Value::string(contents_string.trim().to_string(), span),
"start" => Value::int(span.start as i64, span),
"end" => Value::int(span.end as i64, span),
};
records.push(Value::record(cur_rec, span));
}
}
Ok(Value::list(records, call.head).into_pipeline_data())
}
fn examples(&self) -> Vec<Example> {
vec![Example {
description: "View the blocks registered in Nushell's EngineState memory",
example: r#"view blocks"#,
result: None,
}]
}
}

View file

@ -33,6 +33,34 @@ impl Command for ViewSource {
let arg_span = arg.span();
let source = match arg {
Value::Int { val, .. } => {
if let Some(block) =
engine_state.try_get_block(nu_protocol::BlockId::new(val as usize))
{
if let Some(span) = block.span {
let contents = engine_state.get_span_contents(span);
Ok(Value::string(String::from_utf8_lossy(contents), call.head)
.into_pipeline_data())
} else {
Err(ShellError::GenericError {
error: "Cannot view int value".to_string(),
msg: "the block does not have a viewable span".to_string(),
span: Some(arg_span),
help: None,
inner: vec![],
})
}
} else {
Err(ShellError::GenericError {
error: format!("Block Id {} does not exist", arg.coerce_into_string()?),
msg: "this number does not correspond to a block".to_string(),
span: Some(arg_span),
help: None,
inner: vec![],
})
}
}
Value::String { val, .. } => {
if let Some(decl_id) = engine_state.find_decl(val.as_bytes(), &[]) {
// arg is a command
@ -130,7 +158,7 @@ impl Command for ViewSource {
Ok(Value::string(final_contents, call.head).into_pipeline_data())
} else {
Err(ShellError::GenericError {
error: "Cannot view value".to_string(),
error: "Cannot view string value".to_string(),
msg: "the command does not have a viewable block span".to_string(),
span: Some(arg_span),
help: None,
@ -139,7 +167,7 @@ impl Command for ViewSource {
}
} else {
Err(ShellError::GenericError {
error: "Cannot view value".to_string(),
error: "Cannot view string decl value".to_string(),
msg: "the command does not have a viewable block".to_string(),
span: Some(arg_span),
help: None,
@ -155,7 +183,7 @@ impl Command for ViewSource {
.into_pipeline_data())
} else {
Err(ShellError::GenericError {
error: "Cannot view value".to_string(),
error: "Cannot view string module value".to_string(),
msg: "the module does not have a viewable block".to_string(),
span: Some(arg_span),
help: None,
@ -164,7 +192,7 @@ impl Command for ViewSource {
}
} else {
Err(ShellError::GenericError {
error: "Cannot view value".to_string(),
error: "Cannot view string value".to_string(),
msg: "this name does not correspond to a viewable value".to_string(),
span: Some(arg_span),
help: None,

View file

@ -61,6 +61,7 @@ pub fn add_shell_command_context(mut engine_state: EngineState) -> EngineState {
SplitBy,
Take,
Merge,
MergeDeep,
Move,
TakeWhile,
TakeUntil,
@ -160,6 +161,7 @@ pub fn add_shell_command_context(mut engine_state: EngineState) -> EngineState {
MetadataSet,
TimeIt,
View,
ViewBlocks,
ViewFiles,
ViewIr,
ViewSource,
@ -349,6 +351,7 @@ pub fn add_shell_command_context(mut engine_state: EngineState) -> EngineState {
WithEnv,
ConfigNu,
ConfigEnv,
ConfigFlatten,
ConfigMeta,
ConfigReset,
};

View file

@ -18,8 +18,8 @@ impl Command for ConfigEnv {
Some('d'),
)
.switch(
"sample",
"Print a commented, sample `env.nu` file instead.",
"doc",
"Print a commented `env.nu` with documentation instead.",
Some('s'),
)
// TODO: Signature narrower than what run actually supports theoretically
@ -37,8 +37,8 @@ impl Command for ConfigEnv {
result: None,
},
Example {
description: "pretty-print a commented, sample `env.nu` that explains common settings",
example: "config env --sample | nu-highlight,",
description: "pretty-print a commented `env.nu` that explains common settings",
example: "config env --doc | nu-highlight,",
result: None,
},
Example {
@ -57,13 +57,13 @@ impl Command for ConfigEnv {
_input: PipelineData,
) -> Result<PipelineData, ShellError> {
let default_flag = call.has_flag(engine_state, stack, "default")?;
let sample_flag = call.has_flag(engine_state, stack, "sample")?;
if default_flag && sample_flag {
let doc_flag = call.has_flag(engine_state, stack, "doc")?;
if default_flag && doc_flag {
return Err(ShellError::IncompatibleParameters {
left_message: "can't use `--default` at the same time".into(),
left_span: call.get_flag_span(stack, "default").expect("has flag"),
right_message: "because of `--sample`".into(),
right_span: call.get_flag_span(stack, "sample").expect("has flag"),
right_message: "because of `--doc`".into(),
right_span: call.get_flag_span(stack, "doc").expect("has flag"),
});
}
// `--default` flag handling
@ -72,10 +72,10 @@ impl Command for ConfigEnv {
return Ok(Value::string(nu_utils::get_default_env(), head).into_pipeline_data());
}
// `--sample` flag handling
if sample_flag {
// `--doc` flag handling
if doc_flag {
let head = call.head;
return Ok(Value::string(nu_utils::get_sample_env(), head).into_pipeline_data());
return Ok(Value::string(nu_utils::get_doc_env(), head).into_pipeline_data());
}
super::config_::start_editor("env-path", engine_state, stack, call)

View file

@ -0,0 +1,195 @@
use nu_engine::command_prelude::*;
use nu_utils::JsonFlattener; // Ensure this import is present // Ensure this import is present
#[derive(Clone)]
pub struct ConfigFlatten;
impl Command for ConfigFlatten {
fn name(&self) -> &str {
"config flatten"
}
fn signature(&self) -> Signature {
Signature::build(self.name())
.category(Category::Debug)
.input_output_types(vec![(Type::Nothing, Type::record())])
}
fn description(&self) -> &str {
"Show the current configuration in a flattened form."
}
fn examples(&self) -> Vec<Example> {
vec![Example {
description: "Show the current configuration in a flattened form",
example: "config flatten",
result: None,
}]
}
fn run(
&self,
engine_state: &EngineState,
_stack: &mut Stack,
call: &Call,
_input: PipelineData,
) -> Result<PipelineData, ShellError> {
// Get the Config instance from the EngineState
let config = engine_state.get_config();
// Serialize the Config instance to JSON
let serialized_config =
serde_json::to_value(&**config).map_err(|err| ShellError::GenericError {
error: format!("Failed to serialize config to json: {err}"),
msg: "".into(),
span: Some(call.head),
help: None,
inner: vec![],
})?;
// Create a JsonFlattener instance with appropriate arguments
let flattener = JsonFlattener {
separator: ".",
alt_array_flattening: false,
preserve_arrays: true,
};
// Flatten the JSON value
let flattened_config_str = flattener.flatten(&serialized_config).to_string();
let flattened_values =
convert_string_to_value(&flattened_config_str, engine_state, call.head)?;
Ok(flattened_values.into_pipeline_data())
}
}
// From here below is taken from `from json`. Would be nice to have a nu-utils-value crate that could be shared
fn convert_string_to_value(
string_input: &str,
engine_state: &EngineState,
span: Span,
) -> Result<Value, ShellError> {
match nu_json::from_str(string_input) {
Ok(value) => Ok(convert_nujson_to_value(None, value, engine_state, span)),
Err(x) => match x {
nu_json::Error::Syntax(_, row, col) => {
let label = x.to_string();
let label_span = convert_row_column_to_span(row, col, string_input);
Err(ShellError::GenericError {
error: "Error while parsing JSON text".into(),
msg: "error parsing JSON text".into(),
span: Some(span),
help: None,
inner: vec![ShellError::OutsideSpannedLabeledError {
src: string_input.into(),
error: "Error while parsing JSON text".into(),
msg: label,
span: label_span,
}],
})
}
x => Err(ShellError::CantConvert {
to_type: format!("structured json data ({x})"),
from_type: "string".into(),
span,
help: None,
}),
},
}
}
fn convert_nujson_to_value(
key: Option<String>,
value: nu_json::Value,
engine_state: &EngineState,
span: Span,
) -> Value {
match value {
nu_json::Value::Array(array) => Value::list(
array
.into_iter()
.map(|x| convert_nujson_to_value(key.clone(), x, engine_state, span))
.collect(),
span,
),
nu_json::Value::Bool(b) => Value::bool(b, span),
nu_json::Value::F64(f) => Value::float(f, span),
nu_json::Value::I64(i) => {
if let Some(closure_str) = expand_closure(key.clone(), i, engine_state) {
Value::string(closure_str, span)
} else {
Value::int(i, span)
}
}
nu_json::Value::Null => Value::nothing(span),
nu_json::Value::Object(k) => Value::record(
k.into_iter()
.map(|(k, v)| {
let mut key = k.clone();
// Keep .Closure.val and .block_id as part of the key during conversion to value
let value = convert_nujson_to_value(Some(key.clone()), v, engine_state, span);
// Replace .Closure.val and .block_id from the key after the conversion
if key.contains(".Closure.val") || key.contains(".block_id") {
key = key.replace(".Closure.val", "").replace(".block_id", "");
}
(key, value)
})
.collect(),
span,
),
nu_json::Value::U64(u) => {
if u > i64::MAX as u64 {
Value::error(
ShellError::CantConvert {
to_type: "i64 sized integer".into(),
from_type: "value larger than i64".into(),
span,
help: None,
},
span,
)
} else if let Some(closure_str) = expand_closure(key.clone(), u as i64, engine_state) {
Value::string(closure_str, span)
} else {
Value::int(u as i64, span)
}
}
nu_json::Value::String(s) => Value::string(s, span),
}
}
// If the block_id is a real block id, then it should expand into the closure contents, otherwise return None
fn expand_closure(
key: Option<String>,
block_id: i64,
engine_state: &EngineState,
) -> Option<String> {
match key {
Some(key) if key.contains(".Closure.val") || key.contains(".block_id") => engine_state
.try_get_block(nu_protocol::BlockId::new(block_id as usize))
.and_then(|block| block.span)
.map(|span| {
let contents = engine_state.get_span_contents(span);
String::from_utf8_lossy(contents).to_string()
}),
_ => None,
}
}
// Converts row+column to a Span, assuming bytes (1-based rows)
fn convert_row_column_to_span(row: usize, col: usize, contents: &str) -> Span {
let mut cur_row = 1;
let mut cur_col = 1;
for (offset, curr_byte) in contents.bytes().enumerate() {
if curr_byte == b'\n' {
cur_row += 1;
cur_col = 1;
}
if cur_row >= row && cur_col >= col {
return Span::new(offset, offset);
} else {
cur_col += 1;
}
}
Span::new(contents.len(), contents.len())
}

View file

@ -18,11 +18,10 @@ impl Command for ConfigNu {
Some('d'),
)
.switch(
"sample",
"Print a commented, sample `config.nu` file instead.",
"doc",
"Print a commented `config.nu` with documentation instead.",
Some('s'),
)
// TODO: Signature narrower than what run actually supports theoretically
}
fn description(&self) -> &str {
@ -37,8 +36,8 @@ impl Command for ConfigNu {
result: None,
},
Example {
description: "pretty-print a commented, sample `config.nu` that explains common settings",
example: "config nu --sample | nu-highlight",
description: "pretty-print a commented `config.nu` that explains common settings",
example: "config nu --doc | nu-highlight",
result: None,
},
Example {
@ -58,13 +57,13 @@ impl Command for ConfigNu {
_input: PipelineData,
) -> Result<PipelineData, ShellError> {
let default_flag = call.has_flag(engine_state, stack, "default")?;
let sample_flag = call.has_flag(engine_state, stack, "sample")?;
if default_flag && sample_flag {
let doc_flag = call.has_flag(engine_state, stack, "doc")?;
if default_flag && doc_flag {
return Err(ShellError::IncompatibleParameters {
left_message: "can't use `--default` at the same time".into(),
left_span: call.get_flag_span(stack, "default").expect("has flag"),
right_message: "because of `--sample`".into(),
right_span: call.get_flag_span(stack, "sample").expect("has flag"),
right_message: "because of `--doc`".into(),
right_span: call.get_flag_span(stack, "doc").expect("has flag"),
});
}
@ -74,10 +73,10 @@ impl Command for ConfigNu {
return Ok(Value::string(nu_utils::get_default_config(), head).into_pipeline_data());
}
// `--sample` flag handling
if sample_flag {
// `--doc` flag handling
if doc_flag {
let head = call.head;
return Ok(Value::string(nu_utils::get_sample_config(), head).into_pipeline_data());
return Ok(Value::string(nu_utils::get_doc_config(), head).into_pipeline_data());
}
super::config_::start_editor("config-path", engine_state, stack, call)

View file

@ -1,8 +1,11 @@
mod config_;
mod config_env;
mod config_flatten;
mod config_nu;
mod config_reset;
pub use config_::ConfigMeta;
pub use config_env::ConfigEnv;
pub use config_flatten::ConfigFlatten;
pub use config_nu::ConfigNu;
pub use config_reset::ConfigReset;

View file

@ -5,6 +5,7 @@ mod source_env;
mod with_env;
pub use config::ConfigEnv;
pub use config::ConfigFlatten;
pub use config::ConfigMeta;
pub use config::ConfigNu;
pub use config::ConfigReset;

View file

@ -39,11 +39,6 @@ impl Command for Du {
SyntaxShape::OneOf(vec![SyntaxShape::GlobPattern, SyntaxShape::String]),
"Starting directory.",
)
.switch(
"all",
"Output file sizes as well as directory sizes",
Some('a'),
)
.switch(
"deref",
"Dereference symlinks to their targets for size",

View file

@ -0,0 +1,174 @@
use nu_engine::command_prelude::*;
#[derive(Copy, Clone)]
pub(crate) enum MergeStrategy {
/// Key-value pairs present in lhs and rhs are overwritten by values in rhs
Shallow,
/// Records are merged recursively, otherwise same behavior as shallow
Deep(ListMerge),
}
#[derive(Copy, Clone)]
pub(crate) enum ListMerge {
/// Lists in lhs are overwritten by lists in rhs
Overwrite,
/// Lists of records are merged element-wise, other lists are overwritten by rhs
Elementwise,
/// All lists are concatenated together, lhs ++ rhs
Append,
/// All lists are concatenated together, rhs ++ lhs
Prepend,
}
/// Test whether a value is a list of records.
///
/// This includes tables and non-tables.
fn is_list_of_records(val: &Value) -> bool {
match val {
list @ Value::List { .. } if matches!(list.get_type(), Type::Table { .. }) => true,
// we want to include lists of records, but not lists of mixed types
Value::List { vals, .. } => vals
.iter()
.map(Value::get_type)
.all(|val| matches!(val, Type::Record { .. })),
_ => false,
}
}
/// Typecheck a merge operation.
///
/// Ensures that both arguments are records, tables, or lists of non-matching records.
pub(crate) fn typecheck_merge(lhs: &Value, rhs: &Value, head: Span) -> Result<(), ShellError> {
match (lhs.get_type(), rhs.get_type()) {
(Type::Record { .. }, Type::Record { .. }) => Ok(()),
(_, _) if is_list_of_records(lhs) && is_list_of_records(rhs) => Ok(()),
_ => Err(ShellError::PipelineMismatch {
exp_input_type: "input and argument to be both record or both table".to_string(),
dst_span: head,
src_span: lhs.span(),
}),
}
}
pub(crate) fn do_merge(
lhs: Value,
rhs: Value,
strategy: MergeStrategy,
span: Span,
) -> Result<Value, ShellError> {
match (strategy, lhs, rhs) {
// Propagate errors
(_, Value::Error { error, .. }, _) | (_, _, Value::Error { error, .. }) => Err(*error),
// Shallow merge records
(
MergeStrategy::Shallow,
Value::Record { val: lhs, .. },
Value::Record { val: rhs, .. },
) => Ok(Value::record(
merge_records(lhs.into_owned(), rhs.into_owned(), strategy, span)?,
span,
)),
// Deep merge records
(
MergeStrategy::Deep(_),
Value::Record { val: lhs, .. },
Value::Record { val: rhs, .. },
) => Ok(Value::record(
merge_records(lhs.into_owned(), rhs.into_owned(), strategy, span)?,
span,
)),
// Merge lists by appending
(
MergeStrategy::Deep(ListMerge::Append),
Value::List { vals: lhs, .. },
Value::List { vals: rhs, .. },
) => Ok(Value::list(lhs.into_iter().chain(rhs).collect(), span)),
// Merge lists by prepending
(
MergeStrategy::Deep(ListMerge::Prepend),
Value::List { vals: lhs, .. },
Value::List { vals: rhs, .. },
) => Ok(Value::list(rhs.into_iter().chain(lhs).collect(), span)),
// Merge lists of records elementwise (tables and non-tables)
// Match on shallow since this might be a top-level table
(
MergeStrategy::Shallow | MergeStrategy::Deep(ListMerge::Elementwise),
lhs_list @ Value::List { .. },
rhs_list @ Value::List { .. },
) if is_list_of_records(&lhs_list) && is_list_of_records(&rhs_list) => {
let lhs = lhs_list
.into_list()
.expect("Value matched as list above, but is not a list");
let rhs = rhs_list
.into_list()
.expect("Value matched as list above, but is not a list");
Ok(Value::list(merge_tables(lhs, rhs, strategy, span)?, span))
}
// Use rhs value (shallow record merge, overwrite list merge, and general scalar merge)
(_, _, val) => Ok(val),
}
}
/// Merge right-hand table into left-hand table, element-wise
///
/// For example:
/// lhs = [{a: 12, b: 34}]
/// rhs = [{a: 56, c: 78}]
/// output = [{a: 56, b: 34, c: 78}]
fn merge_tables(
lhs: Vec<Value>,
rhs: Vec<Value>,
strategy: MergeStrategy,
span: Span,
) -> Result<Vec<Value>, ShellError> {
let mut table_iter = rhs.into_iter();
lhs.into_iter()
.map(move |inp| match (inp.into_record(), table_iter.next()) {
(Ok(rec), Some(to_merge)) => match to_merge.into_record() {
Ok(to_merge) => Ok(Value::record(
merge_records(rec.to_owned(), to_merge.to_owned(), strategy, span)?,
span,
)),
Err(error) => Ok(Value::error(error, span)),
},
(Ok(rec), None) => Ok(Value::record(rec, span)),
(Err(error), _) => Ok(Value::error(error, span)),
})
.collect()
}
fn merge_records(
mut lhs: Record,
rhs: Record,
strategy: MergeStrategy,
span: Span,
) -> Result<Record, ShellError> {
match strategy {
MergeStrategy::Shallow => {
for (col, rval) in rhs.into_iter() {
lhs.insert(col, rval);
}
}
strategy => {
for (col, rval) in rhs.into_iter() {
// in order to both avoid cloning (possibly nested) record values and maintain the ordering of record keys, we can swap a temporary value into the source record.
// if we were to remove the value, the ordering would be messed up as we might not insert back into the original index
// it's okay to swap a temporary value in, since we know it will be replaced by the end of the function call
//
// use an error here instead of something like null so if this somehow makes it into the output, the bug will be immediately obvious
let failed_error = ShellError::NushellFailed {
msg: "Merge failed to properly replace internal temporary value".to_owned(),
};
let value = match lhs.insert(&col, Value::error(failed_error, span)) {
Some(lval) => do_merge(lval, rval, strategy, span)?,
None => rval,
};
lhs.insert(col, value);
}
}
}
Ok(lhs)
}

View file

@ -0,0 +1,157 @@
use super::common::{do_merge, typecheck_merge, ListMerge, MergeStrategy};
use nu_engine::command_prelude::*;
#[derive(Clone)]
pub struct MergeDeep;
impl Command for MergeDeep {
fn name(&self) -> &str {
"merge deep"
}
fn description(&self) -> &str {
"Merge the input with a record or table, recursively merging values in matching columns."
}
fn extra_description(&self) -> &str {
r#"The way that key-value pairs which exist in both the input and the argument are merged depends on their types.
Scalar values (like numbers and strings) in the input are overwritten by the corresponding value from the argument.
Records in the input are merged similarly to the merge command, but recursing rather than overwriting inner records.
The way lists and tables are merged is controlled by the `--strategy` flag:
- table: Merges tables element-wise, similarly to the merge command. Non-table lists are overwritten.
- overwrite: Lists and tables are overwritten with their corresponding value from the argument, similarly to scalars.
- append: Lists and tables in the input are appended with the corresponding list from the argument.
- prepend: Lists and tables in the input are prepended with the corresponding list from the argument."#
}
fn signature(&self) -> nu_protocol::Signature {
Signature::build("merge deep")
.input_output_types(vec![
(Type::record(), Type::record()),
(Type::table(), Type::table()),
])
.required(
"value",
SyntaxShape::OneOf(vec![
SyntaxShape::Record(vec![]),
SyntaxShape::Table(vec![]),
SyntaxShape::List(SyntaxShape::Any.into()),
]),
"The new value to merge with.",
)
.category(Category::Filters)
.named("strategy", SyntaxShape::String, "The list merging strategy to use. One of: table (default), overwrite, append, prepend", Some('s'))
}
fn examples(&self) -> Vec<Example> {
vec![
Example {
example: "{a: 1, b: {c: 2, d: 3}} | merge deep {b: {d: 4, e: 5}}",
description: "Merge two records recursively",
result: Some(Value::test_record(record! {
"a" => Value::test_int(1),
"b" => Value::test_record(record! {
"c" => Value::test_int(2),
"d" => Value::test_int(4),
"e" => Value::test_int(5),
})
})),
},
Example {
example: r#"[{columnA: 0, columnB: [{B1: 1}]}] | merge deep [{columnB: [{B2: 2}]}]"#,
description: "Merge two tables",
result: Some(Value::test_list(vec![Value::test_record(record! {
"columnA" => Value::test_int(0),
"columnB" => Value::test_list(vec![
Value::test_record(record! {
"B1" => Value::test_int(1),
"B2" => Value::test_int(2),
})
]),
})])),
},
Example {
example: r#"{inner: [{a: 1}, {b: 2}]} | merge deep {inner: [{c: 3}]}"#,
description: "Merge two records and their inner tables",
result: Some(Value::test_record(record! {
"inner" => Value::test_list(vec![
Value::test_record(record! {
"a" => Value::test_int(1),
"c" => Value::test_int(3),
}),
Value::test_record(record! {
"b" => Value::test_int(2),
})
])
})),
},
Example {
example: r#"{inner: [{a: 1}, {b: 2}]} | merge deep {inner: [{c: 3}]} --strategy=append"#,
description: "Merge two records, appending their inner tables",
result: Some(Value::test_record(record! {
"inner" => Value::test_list(vec![
Value::test_record(record! {
"a" => Value::test_int(1),
}),
Value::test_record(record! {
"b" => Value::test_int(2),
}),
Value::test_record(record! {
"c" => Value::test_int(3),
}),
])
})),
},
]
}
fn run(
&self,
engine_state: &EngineState,
stack: &mut Stack,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let head = call.head;
let merge_value: Value = call.req(engine_state, stack, 0)?;
let strategy_flag: Option<String> = call.get_flag(engine_state, stack, "strategy")?;
let metadata = input.metadata();
// collect input before typechecking, so tables are detected as such
let input_span = input.span().unwrap_or(head);
let input = input.into_value(input_span)?;
let strategy = match strategy_flag.as_deref() {
None | Some("table") => MergeStrategy::Deep(ListMerge::Elementwise),
Some("append") => MergeStrategy::Deep(ListMerge::Append),
Some("prepend") => MergeStrategy::Deep(ListMerge::Prepend),
Some("overwrite") => MergeStrategy::Deep(ListMerge::Overwrite),
Some(_) => {
return Err(ShellError::IncorrectValue {
msg: "The list merging strategy must be one one of: table, overwrite, append, prepend".to_string(),
val_span: call.get_flag_span(stack, "strategy").unwrap_or(head),
call_span: head,
})
}
};
typecheck_merge(&input, &merge_value, head)?;
let merged = do_merge(input, merge_value, strategy, head)?;
Ok(merged.into_pipeline_data_with_metadata(metadata))
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_examples() {
use crate::test_examples;
test_examples(MergeDeep {})
}
}

View file

@ -1,3 +1,4 @@
use super::common::{do_merge, typecheck_merge, MergeStrategy};
use nu_engine::command_prelude::*;
#[derive(Clone)]
@ -28,8 +29,10 @@ repeating this process with row 1, and so on."#
])
.required(
"value",
// Both this and `update` should have a shape more like <record> | <table> than just <any>. -Leon 2022-10-27
SyntaxShape::Any,
SyntaxShape::OneOf(vec![
SyntaxShape::Record(vec![]),
SyntaxShape::Table(vec![]),
]),
"The new value to merge with.",
)
.category(Category::Filters)
@ -89,74 +92,17 @@ repeating this process with row 1, and so on."#
let merge_value: Value = call.req(engine_state, stack, 0)?;
let metadata = input.metadata();
match (&input, merge_value) {
// table (list of records)
(
PipelineData::Value(Value::List { .. }, ..) | PipelineData::ListStream { .. },
Value::List { vals, .. },
) => {
let mut table_iter = vals.into_iter();
// collect input before typechecking, so tables are detected as such
let input_span = input.span().unwrap_or(head);
let input = input.into_value(input_span)?;
let res =
input
.into_iter()
.map(move |inp| match (inp.as_record(), table_iter.next()) {
(Ok(inp), Some(to_merge)) => match to_merge.as_record() {
Ok(to_merge) => Value::record(do_merge(inp, to_merge), head),
Err(error) => Value::error(error, head),
},
(_, None) => inp,
(Err(error), _) => Value::error(error, head),
});
typecheck_merge(&input, &merge_value, head)?;
Ok(res.into_pipeline_data_with_metadata(
head,
engine_state.signals().clone(),
metadata,
))
}
// record
(
PipelineData::Value(Value::Record { val: inp, .. }, ..),
Value::Record { val: to_merge, .. },
) => Ok(Value::record(do_merge(inp, &to_merge), head).into_pipeline_data()),
// Propagate errors in the pipeline
(PipelineData::Value(Value::Error { error, .. }, ..), _) => Err(*error.clone()),
(PipelineData::Value(val, ..), ..) => {
// Only point the "value originates here" arrow at the merge value
// if it was generated from a block. Otherwise, point at the pipeline value. -Leon 2022-10-27
let span = if val.span() == Span::test_data() {
Span::new(head.start, head.start)
} else {
val.span()
};
Err(ShellError::PipelineMismatch {
exp_input_type: "input, and argument, to be both record or both table"
.to_string(),
dst_span: head,
src_span: span,
})
}
_ => Err(ShellError::PipelineMismatch {
exp_input_type: "input, and argument, to be both record or both table".to_string(),
dst_span: head,
src_span: Span::new(head.start, head.start),
}),
}
let merged = do_merge(input, merge_value, MergeStrategy::Shallow, head)?;
Ok(merged.into_pipeline_data_with_metadata(metadata))
}
}
// TODO: rewrite to mutate the input record
fn do_merge(input_record: &Record, to_merge_record: &Record) -> Record {
let mut result = input_record.clone();
for (col, val) in to_merge_record {
result.insert(col, val.clone());
}
result
}
#[cfg(test)]
mod test {
use super::*;

View file

@ -0,0 +1,6 @@
mod common;
pub mod deep;
pub mod merge_;
pub use deep::MergeDeep;
pub use merge_::Merge;

View file

@ -87,6 +87,7 @@ pub use last::Last;
pub use length::Length;
pub use lines::Lines;
pub use merge::Merge;
pub use merge::MergeDeep;
pub use move_::Move;
pub use par_each::ParEach;
pub use prepend::Prepend;

View file

@ -14,7 +14,7 @@ impl Command for SplitBy {
Signature::build("split-by")
.input_output_types(vec![(Type::record(), Type::record())])
.optional("splitter", SyntaxShape::Any, "The splitter value to use.")
.category(Category::Filters)
.category(Category::Deprecated)
}
fn description(&self) -> &str {

View file

@ -204,12 +204,45 @@ fn from_csv(
#[cfg(test)]
mod test {
use nu_cmd_lang::eval_pipeline_without_terminal_expression;
use super::*;
use crate::{Metadata, MetadataSet};
#[test]
fn test_examples() {
use crate::test_examples;
test_examples(FromCsv {})
}
#[test]
fn test_content_type_metadata() {
let mut engine_state = Box::new(EngineState::new());
let delta = {
let mut working_set = StateWorkingSet::new(&engine_state);
working_set.add_decl(Box::new(FromCsv {}));
working_set.add_decl(Box::new(Metadata {}));
working_set.add_decl(Box::new(MetadataSet {}));
working_set.render()
};
engine_state
.merge_delta(delta)
.expect("Error merging delta");
let cmd = r#""a,b\n1,2" | metadata set --content-type 'text/csv' --datasource-ls | from csv | metadata | $in"#;
let result = eval_pipeline_without_terminal_expression(
cmd,
std::env::temp_dir().as_ref(),
&mut engine_state,
);
assert_eq!(
Value::test_record(record!("source" => Value::test_string("ls"))),
result.expect("There should be a result")
)
}
}

View file

@ -93,9 +93,10 @@ pub(super) fn from_delimited_data(
input: PipelineData,
name: Span,
) -> Result<PipelineData, ShellError> {
let metadata = input.metadata().map(|md| md.with_content_type(None));
match input {
PipelineData::Empty => Ok(PipelineData::Empty),
PipelineData::Value(value, metadata) => {
PipelineData::Value(value, ..) => {
let string = value.into_string()?;
let byte_stream = ByteStream::read_string(string, name, Signals::empty());
Ok(PipelineData::ListStream(
@ -109,7 +110,7 @@ pub(super) fn from_delimited_data(
dst_span: name,
src_span: list_stream.span(),
}),
PipelineData::ByteStream(byte_stream, metadata) => Ok(PipelineData::ListStream(
PipelineData::ByteStream(byte_stream, ..) => Ok(PipelineData::ListStream(
from_delimited_stream(config, byte_stream, name)?,
metadata,
)),

View file

@ -70,23 +70,22 @@ impl Command for FromJson {
let span = call.head;
let strict = call.has_flag(engine_state, stack, "strict")?;
let metadata = input.metadata().map(|md| md.with_content_type(None));
// TODO: turn this into a structured underline of the nu_json error
if call.has_flag(engine_state, stack, "objects")? {
// Return a stream of JSON values, one for each non-empty line
match input {
PipelineData::Value(Value::String { val, .. }, metadata) => {
Ok(PipelineData::ListStream(
read_json_lines(
Cursor::new(val),
span,
strict,
engine_state.signals().clone(),
),
metadata,
))
}
PipelineData::ByteStream(stream, metadata)
PipelineData::Value(Value::String { val, .. }, ..) => Ok(PipelineData::ListStream(
read_json_lines(
Cursor::new(val),
span,
strict,
engine_state.signals().clone(),
),
metadata,
)),
PipelineData::ByteStream(stream, ..)
if stream.type_() != ByteStreamType::Binary =>
{
if let Some(reader) = stream.reader() {
@ -107,7 +106,7 @@ impl Command for FromJson {
}
} else {
// Return a single JSON value
let (string_input, span, metadata) = input.collect_string_strict(span)?;
let (string_input, span, ..) = input.collect_string_strict(span)?;
if string_input.is_empty() {
return Ok(Value::nothing(span).into_pipeline_data());
@ -267,6 +266,10 @@ fn convert_string_to_value_strict(string_input: &str, span: Span) -> Result<Valu
#[cfg(test)]
mod test {
use nu_cmd_lang::eval_pipeline_without_terminal_expression;
use crate::{Metadata, MetadataSet};
use super::*;
#[test]
@ -275,4 +278,33 @@ mod test {
test_examples(FromJson {})
}
#[test]
fn test_content_type_metadata() {
let mut engine_state = Box::new(EngineState::new());
let delta = {
let mut working_set = StateWorkingSet::new(&engine_state);
working_set.add_decl(Box::new(FromJson {}));
working_set.add_decl(Box::new(Metadata {}));
working_set.add_decl(Box::new(MetadataSet {}));
working_set.render()
};
engine_state
.merge_delta(delta)
.expect("Error merging delta");
let cmd = r#"'{"a":1,"b":2}' | metadata set --content-type 'application/json' --datasource-ls | from json | metadata | $in"#;
let result = eval_pipeline_without_terminal_expression(
cmd,
std::env::temp_dir().as_ref(),
&mut engine_state,
);
assert_eq!(
Value::test_record(record!("source" => Value::test_string("ls"))),
result.expect("There should be a result")
)
}
}

View file

@ -113,7 +113,8 @@ MessagePack: https://msgpack.org/
objects,
signals: engine_state.signals().clone(),
};
match input {
let metadata = input.metadata().map(|md| md.with_content_type(None));
let out = match input {
// Deserialize from a byte buffer
PipelineData::Value(Value::Binary { val: bytes, .. }, _) => {
read_msgpack(Cursor::new(bytes), opts)
@ -136,7 +137,8 @@ MessagePack: https://msgpack.org/
dst_span: call.head,
src_span: input.span().unwrap_or(call.head),
}),
}
};
out.map(|pd| pd.set_metadata(metadata))
}
}
@ -510,6 +512,10 @@ fn assert_eof(input: &mut impl io::Read, span: Span) -> Result<(), ShellError> {
#[cfg(test)]
mod test {
use nu_cmd_lang::eval_pipeline_without_terminal_expression;
use crate::{Metadata, MetadataSet, ToMsgpack};
use super::*;
#[test]
@ -518,4 +524,34 @@ mod test {
test_examples(FromMsgpack {})
}
#[test]
fn test_content_type_metadata() {
let mut engine_state = Box::new(EngineState::new());
let delta = {
let mut working_set = StateWorkingSet::new(&engine_state);
working_set.add_decl(Box::new(ToMsgpack {}));
working_set.add_decl(Box::new(FromMsgpack {}));
working_set.add_decl(Box::new(Metadata {}));
working_set.add_decl(Box::new(MetadataSet {}));
working_set.render()
};
engine_state
.merge_delta(delta)
.expect("Error merging delta");
let cmd = r#"{a: 1 b: 2} | to msgpack | metadata set --datasource-ls | from msgpack | metadata | $in"#;
let result = eval_pipeline_without_terminal_expression(
cmd,
std::env::temp_dir().as_ref(),
&mut engine_state,
);
assert_eq!(
Value::test_record(record!("source" => Value::test_string("ls"))),
result.expect("There should be a result")
)
}
}

View file

@ -43,7 +43,8 @@ impl Command for FromMsgpackz {
objects,
signals: engine_state.signals().clone(),
};
match input {
let metadata = input.metadata().map(|md| md.with_content_type(None));
let out = match input {
// Deserialize from a byte buffer
PipelineData::Value(Value::Binary { val: bytes, .. }, _) => {
let reader = brotli::Decompressor::new(Cursor::new(bytes), BUFFER_SIZE);
@ -68,6 +69,7 @@ impl Command for FromMsgpackz {
dst_span: call.head,
src_span: span,
}),
}
};
out.map(|pd| pd.set_metadata(metadata))
}
}

View file

@ -49,7 +49,8 @@ impl Command for FromNuon {
let (string_input, _span, metadata) = input.collect_string_strict(head)?;
match nuon::from_nuon(&string_input, Some(head)) {
Ok(result) => Ok(result.into_pipeline_data_with_metadata(metadata)),
Ok(result) => Ok(result
.into_pipeline_data_with_metadata(metadata.map(|md| md.with_content_type(None)))),
Err(err) => Err(ShellError::GenericError {
error: "error when loading nuon text".into(),
msg: "could not load nuon text".into(),
@ -63,6 +64,10 @@ impl Command for FromNuon {
#[cfg(test)]
mod test {
use nu_cmd_lang::eval_pipeline_without_terminal_expression;
use crate::{Metadata, MetadataSet};
use super::*;
#[test]
@ -71,4 +76,33 @@ mod test {
test_examples(FromNuon {})
}
#[test]
fn test_content_type_metadata() {
let mut engine_state = Box::new(EngineState::new());
let delta = {
let mut working_set = StateWorkingSet::new(&engine_state);
working_set.add_decl(Box::new(FromNuon {}));
working_set.add_decl(Box::new(Metadata {}));
working_set.add_decl(Box::new(MetadataSet {}));
working_set.render()
};
engine_state
.merge_delta(delta)
.expect("Error merging delta");
let cmd = r#"'[[a, b]; [1, 2]]' | metadata set --content-type 'application/x-nuon' --datasource-ls | from nuon | metadata | $in"#;
let result = eval_pipeline_without_terminal_expression(
cmd,
std::env::temp_dir().as_ref(),
&mut engine_state,
);
assert_eq!(
Value::test_record(record!("source" => Value::test_string("ls"))),
result.expect("There should be a result")
)
}
}

View file

@ -46,7 +46,8 @@ impl Command for FromOds {
vec![]
};
from_ods(input, head, sel_sheets)
let metadata = input.metadata().map(|md| md.with_content_type(None));
from_ods(input, head, sel_sheets).map(|pd| pd.set_metadata(metadata))
}
fn examples(&self) -> Vec<Example> {

View file

@ -29,7 +29,8 @@ impl Command for FromToml {
let span = call.head;
let (mut string_input, span, metadata) = input.collect_string_strict(span)?;
string_input.push('\n');
Ok(convert_string_to_value(string_input, span)?.into_pipeline_data_with_metadata(metadata))
Ok(convert_string_to_value(string_input, span)?
.into_pipeline_data_with_metadata(metadata.map(|md| md.with_content_type(None))))
}
fn examples(&self) -> Vec<Example> {
@ -144,8 +145,11 @@ pub fn convert_string_to_value(string_input: String, span: Span) -> Result<Value
#[cfg(test)]
mod tests {
use crate::{Metadata, MetadataSet};
use super::*;
use chrono::TimeZone;
use nu_cmd_lang::eval_pipeline_without_terminal_expression;
use toml::value::Datetime;
#[test]
@ -331,4 +335,33 @@ mod tests {
assert_eq!(result, reference_date);
}
#[test]
fn test_content_type_metadata() {
let mut engine_state = Box::new(EngineState::new());
let delta = {
let mut working_set = StateWorkingSet::new(&engine_state);
working_set.add_decl(Box::new(FromToml {}));
working_set.add_decl(Box::new(Metadata {}));
working_set.add_decl(Box::new(MetadataSet {}));
working_set.render()
};
engine_state
.merge_delta(delta)
.expect("Error merging delta");
let cmd = r#""[a]\nb = 1\nc = 1" | metadata set --content-type 'text/x-toml' --datasource-ls | from toml | metadata | $in"#;
let result = eval_pipeline_without_terminal_expression(
cmd,
std::env::temp_dir().as_ref(),
&mut engine_state,
);
assert_eq!(
Value::test_record(record!("source" => Value::test_string("ls"))),
result.expect("There should be a result")
)
}
}

View file

@ -165,6 +165,10 @@ fn from_tsv(
#[cfg(test)]
mod test {
use nu_cmd_lang::eval_pipeline_without_terminal_expression;
use crate::{Metadata, MetadataSet};
use super::*;
#[test]
@ -173,4 +177,33 @@ mod test {
test_examples(FromTsv {})
}
#[test]
fn test_content_type_metadata() {
let mut engine_state = Box::new(EngineState::new());
let delta = {
let mut working_set = StateWorkingSet::new(&engine_state);
working_set.add_decl(Box::new(FromTsv {}));
working_set.add_decl(Box::new(Metadata {}));
working_set.add_decl(Box::new(MetadataSet {}));
working_set.render()
};
engine_state
.merge_delta(delta)
.expect("Error merging delta");
let cmd = r#""a\tb\n1\t2" | metadata set --content-type 'text/tab-separated-values' --datasource-ls | from tsv | metadata | $in"#;
let result = eval_pipeline_without_terminal_expression(
cmd,
std::env::temp_dir().as_ref(),
&mut engine_state,
);
assert_eq!(
Value::test_record(record!("source" => Value::test_string("ls"))),
result.expect("There should be a result")
)
}
}

View file

@ -47,7 +47,8 @@ impl Command for FromXlsx {
vec![]
};
from_xlsx(input, head, sel_sheets)
let metadata = input.metadata().map(|md| md.with_content_type(None));
from_xlsx(input, head, sel_sheets).map(|pd| pd.set_metadata(metadata))
}
fn examples(&self) -> Vec<Example> {

View file

@ -206,7 +206,9 @@ fn from_xml(input: PipelineData, info: &ParsingInfo) -> Result<PipelineData, She
let (concat_string, span, metadata) = input.collect_string_strict(info.span)?;
match from_xml_string_to_value(&concat_string, info) {
Ok(x) => Ok(x.into_pipeline_data_with_metadata(metadata)),
Ok(x) => {
Ok(x.into_pipeline_data_with_metadata(metadata.map(|md| md.with_content_type(None))))
}
Err(err) => Err(process_xml_parse_error(err, span)),
}
}
@ -322,10 +324,14 @@ fn make_cant_convert_error(help: impl Into<String>, span: Span) -> ShellError {
#[cfg(test)]
mod tests {
use crate::Metadata;
use crate::MetadataSet;
use super::*;
use indexmap::indexmap;
use indexmap::IndexMap;
use nu_cmd_lang::eval_pipeline_without_terminal_expression;
fn string(input: impl Into<String>) -> Value {
Value::test_string(input)
@ -480,4 +486,36 @@ mod tests {
test_examples(FromXml {})
}
#[test]
fn test_content_type_metadata() {
let mut engine_state = Box::new(EngineState::new());
let delta = {
let mut working_set = StateWorkingSet::new(&engine_state);
working_set.add_decl(Box::new(FromXml {}));
working_set.add_decl(Box::new(Metadata {}));
working_set.add_decl(Box::new(MetadataSet {}));
working_set.render()
};
engine_state
.merge_delta(delta)
.expect("Error merging delta");
let cmd = r#"'<?xml version="1.0" encoding="UTF-8"?>
<note>
<remember>Event</remember>
</note>' | metadata set --content-type 'application/xml' --datasource-ls | from xml | metadata | $in"#;
let result = eval_pipeline_without_terminal_expression(
cmd,
std::env::temp_dir().as_ref(),
&mut engine_state,
);
assert_eq!(
Value::test_record(record!("source" => Value::test_string("ls"))),
result.expect("There should be a result")
)
}
}

View file

@ -235,14 +235,19 @@ fn from_yaml(input: PipelineData, head: Span) -> Result<PipelineData, ShellError
let (concat_string, span, metadata) = input.collect_string_strict(head)?;
match from_yaml_string_to_value(&concat_string, head, span) {
Ok(x) => Ok(x.into_pipeline_data_with_metadata(metadata)),
Ok(x) => {
Ok(x.into_pipeline_data_with_metadata(metadata.map(|md| md.with_content_type(None))))
}
Err(other) => Err(other),
}
}
#[cfg(test)]
mod test {
use crate::{Metadata, MetadataSet};
use super::*;
use nu_cmd_lang::eval_pipeline_without_terminal_expression;
use nu_protocol::Config;
#[test]
@ -395,4 +400,33 @@ mod test {
assert!(result.ok().unwrap() == test_case.expected.ok().unwrap());
}
}
#[test]
fn test_content_type_metadata() {
let mut engine_state = Box::new(EngineState::new());
let delta = {
let mut working_set = StateWorkingSet::new(&engine_state);
working_set.add_decl(Box::new(FromYaml {}));
working_set.add_decl(Box::new(Metadata {}));
working_set.add_decl(Box::new(MetadataSet {}));
working_set.render()
};
engine_state
.merge_delta(delta)
.expect("Error merging delta");
let cmd = r#""a: 1\nb: 2" | metadata set --content-type 'application/yaml' --datasource-ls | from yaml | metadata | $in"#;
let result = eval_pipeline_without_terminal_expression(
cmd,
std::env::temp_dir().as_ref(),
&mut engine_state,
);
assert_eq!(
Value::test_record(record!("source" => Value::test_string("ls"))),
result.expect("There should be a result")
)
}
}

View file

@ -4,7 +4,7 @@ use nu_cmd_base::input_handler::{operate, CmdArgument};
use nu_engine::command_prelude::*;
use nu_protocol::Config;
pub struct Arguments {
struct Arguments {
cell_paths: Option<Vec<CellPath>>,
config: Arc<Config>,
}

View file

@ -0,0 +1,144 @@
use nu_test_support::nu;
#[test]
fn table_strategy_table() {
assert_eq!(
nu!(
"{} | merge deep {} | to nuon",
"{inner: [{a: 1}, {b: 2}]}",
"{inner: [{c: 3}]}"
)
.out,
"{inner: [{a: 1, c: 3}, {b: 2}]}"
)
}
#[test]
fn table_strategy_list() {
assert_eq!(
nu!(
"{} | merge deep {} | to nuon",
"{a: [1, 2, 3]}",
"{a: [4, 5, 6]}"
)
.out,
"{a: [4, 5, 6]}"
)
}
#[test]
fn overwrite_strategy_table() {
assert_eq!(
nu!(
"{} | merge deep --strategy=overwrite {} | to nuon",
"{inner: [{a: 1}, {b: 2}]}",
"{inner: [[c]; [3]]}"
)
.out,
"{inner: [[c]; [3]]}"
)
}
#[test]
fn overwrite_strategy_list() {
assert_eq!(
nu!(
"{} | merge deep --strategy=overwrite {} | to nuon",
"{a: [1, 2, 3]}",
"{a: [4, 5, 6]}"
)
.out,
"{a: [4, 5, 6]}"
)
}
#[test]
fn append_strategy_table() {
assert_eq!(
nu!(
"{} | merge deep --strategy=append {} | to nuon",
"{inner: [{a: 1}, {b: 2}]}",
"{inner: [{c: 3}]}"
)
.out,
"{inner: [{a: 1}, {b: 2}, {c: 3}]}"
)
}
#[test]
fn append_strategy_list() {
assert_eq!(
nu!(
"{} | merge deep --strategy=append {} | to nuon",
"{inner: [1, 2, 3]}",
"{inner: [4, 5, 6]}"
)
.out,
"{inner: [1, 2, 3, 4, 5, 6]}"
)
}
#[test]
fn prepend_strategy_table() {
assert_eq!(
nu!(
"{} | merge deep --strategy=prepend {} | to nuon",
"{inner: [{a: 1}, {b: 2}]}",
"{inner: [{c: 3}]}"
)
.out,
"{inner: [{c: 3}, {a: 1}, {b: 2}]}"
)
}
#[test]
fn prepend_strategy_list() {
assert_eq!(
nu!(
"{} | merge deep --strategy=prepend {} | to nuon",
"{inner: [1, 2, 3]}",
"{inner: [4, 5, 6]}"
)
.out,
"{inner: [4, 5, 6, 1, 2, 3]}"
)
}
#[test]
fn record_nested_with_overwrite() {
assert_eq!(
nu!(
"{} | merge deep {} | to nuon",
"{a: {b: {c: {d: 123, e: 456}}}}",
"{a: {b: {c: {e: 654, f: 789}}}}"
)
.out,
"{a: {b: {c: {d: 123, e: 654, f: 789}}}}"
)
}
#[test]
fn single_row_table() {
assert_eq!(
nu!(
"{} | merge deep {} | to nuon",
"[[a]; [{foo: [1, 2, 3]}]]",
"[[a]; [{bar: [4, 5, 6]}]]"
)
.out,
"[[a]; [{foo: [1, 2, 3], bar: [4, 5, 6]}]]"
)
}
#[test]
fn multi_row_table() {
assert_eq!(
nu!(
"{} | merge deep {} | to nuon ",
"[[a b]; [{inner: {foo: abc}} {inner: {baz: ghi}}]]",
"[[a b]; [{inner: {bar: def}} {inner: {qux: jkl}}]]"
)
.out,
"[[a, b]; [{inner: {foo: abc, bar: def}}, {inner: {baz: ghi, qux: jkl}}]]"
)
}

View file

@ -66,6 +66,7 @@ mod ls;
mod match_;
mod math;
mod merge;
mod merge_deep;
mod mktemp;
mod move_;
mod mut_;

View file

@ -145,7 +145,5 @@ fn http_delete_timeout() {
#[cfg(not(target_os = "windows"))]
assert!(&actual.err.contains("timed out reading response"));
#[cfg(target_os = "windows")]
assert!(&actual
.err
.contains("did not properly respond after a period of time"));
assert!(&actual.err.contains(super::WINDOWS_ERROR_TIMEOUT_SLOW_LINK));
}

View file

@ -339,7 +339,5 @@ fn http_get_timeout() {
#[cfg(not(target_os = "windows"))]
assert!(&actual.err.contains("timed out reading response"));
#[cfg(target_os = "windows")]
assert!(&actual
.err
.contains("did not properly respond after a period of time"));
assert!(&actual.err.contains(super::WINDOWS_ERROR_TIMEOUT_SLOW_LINK));
}

View file

@ -5,3 +5,14 @@ mod options;
mod patch;
mod post;
mod put;
/// String representation of the Windows error code for timeouts on slow links.
///
/// Use this constant in tests instead of matching partial error message content,
/// such as `"did not properly respond after a period of time"`, which can vary by language.
/// The specific string `"(os error 10060)"` is consistent across all locales, as it represents
/// the raw error code rather than localized text.
///
/// For more details, see the [Microsoft docs](https://learn.microsoft.com/en-us/troubleshoot/windows-client/networking/10060-connection-timed-out-with-proxy-server).
#[cfg(all(test, windows))]
const WINDOWS_ERROR_TIMEOUT_SLOW_LINK: &str = "(os error 10060)";

View file

@ -64,7 +64,5 @@ fn http_options_timeout() {
#[cfg(not(target_os = "windows"))]
assert!(&actual.err.contains("timed out reading response"));
#[cfg(target_os = "windows")]
assert!(&actual
.err
.contains("did not properly respond after a period of time"));
assert!(&actual.err.contains(super::WINDOWS_ERROR_TIMEOUT_SLOW_LINK));
}

View file

@ -189,7 +189,5 @@ fn http_patch_timeout() {
#[cfg(not(target_os = "windows"))]
assert!(&actual.err.contains("timed out reading response"));
#[cfg(target_os = "windows")]
assert!(&actual
.err
.contains("did not properly respond after a period of time"));
assert!(&actual.err.contains(super::WINDOWS_ERROR_TIMEOUT_SLOW_LINK));
}

View file

@ -303,7 +303,5 @@ fn http_post_timeout() {
#[cfg(not(target_os = "windows"))]
assert!(&actual.err.contains("timed out reading response"));
#[cfg(target_os = "windows")]
assert!(&actual
.err
.contains("did not properly respond after a period of time"));
assert!(&actual.err.contains(super::WINDOWS_ERROR_TIMEOUT_SLOW_LINK));
}

View file

@ -189,7 +189,5 @@ fn http_put_timeout() {
#[cfg(not(target_os = "windows"))]
assert!(&actual.err.contains("timed out reading response"));
#[cfg(target_os = "windows")]
assert!(&actual
.err
.contains("did not properly respond after a period of time"));
assert!(&actual.err.contains(super::WINDOWS_ERROR_TIMEOUT_SLOW_LINK));
}

View file

@ -4,6 +4,7 @@ mod exists;
mod expand;
mod join;
mod parse;
mod self_;
mod split;
mod type_;

View file

@ -0,0 +1,64 @@
use std::path::Path;
use itertools::Itertools;
use nu_test_support::{fs::Stub, nu, playground::Playground};
#[test]
fn self_path_const() {
Playground::setup("path_self_const", |dirs, sandbox| {
sandbox
.within("scripts")
.with_files(&[Stub::FileWithContentToBeTrimmed(
"foo.nu",
r#"
export const paths = {
self: (path self),
dir: (path self .),
sibling: (path self sibling),
parent_dir: (path self ..),
cousin: (path self ../cousin),
}
"#,
)]);
let actual = nu!(cwd: dirs.test(), r#"use scripts/foo.nu; $foo.paths | values | str join (char nul)"#);
let (self_, dir, sibling, parent_dir, cousin) = actual
.out
.split("\0")
.collect_tuple()
.expect("should have 5 NUL separated paths");
let mut pathbuf = dirs.test().to_path_buf();
pathbuf.push("scripts");
assert_eq!(pathbuf, Path::new(dir));
pathbuf.push("foo.nu");
assert_eq!(pathbuf, Path::new(self_));
pathbuf.pop();
pathbuf.push("sibling");
assert_eq!(pathbuf, Path::new(sibling));
pathbuf.pop();
pathbuf.pop();
assert_eq!(pathbuf, Path::new(parent_dir));
pathbuf.push("cousin");
assert_eq!(pathbuf, Path::new(cousin));
})
}
#[test]
fn self_path_runtime() {
let actual = nu!("path self");
assert!(!actual.status.success());
assert!(actual.err.contains("can only run during parse-time"));
}
#[test]
fn self_path_repl() {
let actual = nu!("const foo = path self; $foo");
assert!(!actual.status.success());
assert!(actual.err.contains("nu::shell::file_not_found"));
}

View file

@ -1,6 +1,6 @@
use std::{borrow::Cow, fs::File, sync::Arc};
use nu_path::AbsolutePathBuf;
use nu_path::{expand_path_with, AbsolutePathBuf};
use nu_protocol::{
ast::{Bits, Block, Boolean, CellPath, Comparison, Math, Operator},
debugger::DebugContext,
@ -15,7 +15,7 @@ use nu_protocol::{
};
use nu_utils::IgnoreCaseExt;
use crate::{eval::is_automatic_env_var, eval_block_with_early_return, redirect_env};
use crate::{eval::is_automatic_env_var, eval_block_with_early_return};
/// Evaluate the compiled representation of a [`Block`].
pub fn eval_ir_block<D: DebugContext>(
@ -872,7 +872,6 @@ fn literal_value(
} else {
let cwd = ctx.engine_state.cwd(Some(ctx.stack))?;
let path = expand_path_with(ctx.stack, ctx.engine_state, path, cwd, true);
Value::string(path.to_string_lossy(), span)
}
}
@ -892,7 +891,6 @@ fn literal_value(
.map(AbsolutePathBuf::into_std_path_buf)
.unwrap_or_default();
let path = expand_path_with(ctx.stack, ctx.engine_state, path, cwd, true);
Value::string(path.to_string_lossy(), span)
}
}
@ -1491,3 +1489,26 @@ fn eval_iterate(
eval_iterate(ctx, dst, stream, end_index)
}
}
/// Redirect environment from the callee stack to the caller stack
fn redirect_env(engine_state: &EngineState, caller_stack: &mut Stack, callee_stack: &Stack) {
// TODO: make this more efficient
// Grab all environment variables from the callee
let caller_env_vars = caller_stack.get_env_var_names(engine_state);
// remove env vars that are present in the caller but not in the callee
// (the callee hid them)
for var in caller_env_vars.iter() {
if !callee_stack.has_env_var(engine_state, var) {
caller_stack.remove_env_var(engine_state, var);
}
}
// add new env vars from callee to caller
for (var, value) in callee_stack.get_stack_env_vars() {
caller_stack.add_env_var(var, value);
}
// set config to callee config, to capture any updates to that
caller_stack.config.clone_from(&callee_stack.config);
}

View file

@ -51,7 +51,7 @@ impl BlockKind {
}
// A baseline token is terminated if it's not nested inside of a paired
// delimiter and the next character is one of: `|`, `;` or any
// delimiter and the next character is one of: `|`, `;`, `#` or any
// whitespace.
fn is_item_terminator(
block_level: &[BlockKind],
@ -115,7 +115,6 @@ pub fn lex_item(
// character (whitespace, `|`, `;` or `#`) is encountered, the baseline
// token is done.
// - Otherwise, accumulate the character into the current baseline token.
let mut previous_char = None;
while let Some(c) = input.get(*curr_offset) {
let c = *c;
@ -148,9 +147,11 @@ pub fn lex_item(
// Also need to check to make sure we aren't escaped
quote_start = None;
}
} else if c == b'#' && !in_comment {
// To start a comment, It either need to be the first character of the token or prefixed with space.
in_comment = previous_char.map(|pc| pc == b' ').unwrap_or(true);
} else if c == b'#' {
if is_item_terminator(&block_level, c, additional_whitespace, special_tokens) {
break;
}
in_comment = true;
} else if c == b'\n' || c == b'\r' {
in_comment = false;
if is_item_terminator(&block_level, c, additional_whitespace, special_tokens) {
@ -253,7 +254,6 @@ pub fn lex_item(
}
*curr_offset += 1;
previous_char = Some(c);
}
let span = Span::new(span_offset + token_start, span_offset + *curr_offset);

View file

@ -159,29 +159,6 @@ fn lex_comment() {
);
}
#[test]
fn lex_not_comment_needs_space_in_front_of_hashtag() {
let file = b"1..10 | each {echo test#testing }";
let output = lex(file, 0, &[], &[], false);
assert!(output.1.is_none());
}
#[test]
fn lex_comment_with_space_in_front_of_hashtag() {
let file = b"1..10 | each {echo test #testing }";
let output = lex(file, 0, &[], &[], false);
assert!(output.1.is_some());
assert!(matches!(
output.1.unwrap(),
ParseError::UnexpectedEof(missing_token, span) if missing_token == "}"
&& span == Span::new(33, 34)
));
}
#[test]
fn lex_is_incomplete() {
let file = b"let x = 300 | ;";

View file

@ -3,12 +3,14 @@ use std/dt [datetime-diff, pretty-print-duration]
# Print a banner for nushell with information about the project
export def banner [] {
let dt = (datetime-diff (date now) 2019-05-10T09:59:12-07:00)
let ver = (version)
let banner_msg = $"(ansi green) __ ,(ansi reset)
(ansi green) .--\(\)°'.' (ansi reset)Welcome to (ansi green)Nushell(ansi reset),
(ansi green)'|, . ,' (ansi reset)based on the (ansi green)nu(ansi reset) language,
(ansi green) !_-\(_\\ (ansi reset)where all data is structured!
Version: (ansi green)($ver.version) \(($ver.build_os)\)
Please join our (ansi purple)Discord(ansi reset) community at (ansi purple)https://discord.gg/NtAbbGn(ansi reset)
Our (ansi green_bold)GitHub(ansi reset) repository is at (ansi green_bold)https://github.com/nushell/nushell(ansi reset)
Our (ansi green)Documentation(ansi reset) is located at (ansi green)https://nushell.sh(ansi reset)

View file

@ -3,5 +3,5 @@ use std/assert
#[test]
def banner [] {
use std/core
assert ((core banner | lines | length) == 15)
assert ((core banner | lines | length) == 16)
}

View file

@ -24,6 +24,7 @@ log = { workspace = true }
num-format = { workspace = true }
strip-ansi-escapes = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
sys-locale = "0.3"
unicase = "2.8.0"
@ -38,4 +39,4 @@ crossterm_winapi = "0.9"
nix = { workspace = true, default-features = false, features = ["user", "fs"] }
[lints]
workspace = true
workspace = true

View file

@ -9,7 +9,7 @@
* During a startup where the user specifies an alternative `env.nu` via `nu --env-config <path>`
* During a `nu -c <commandstring>` or `nu <script>` startup so that `ENV_CONVERSIONS` is properly handled for Windows.
* Is *not* loaded when running with an explicit `no --no-config-file (-n)`.
* Is not commented - Comments are in `sample_env.nu`.
* Is not commented - Comments are in `doc_env.nu`.
* Should be optimized for fastest load times.
* Can be introspected via `config env --default | nu-highlight`
@ -27,7 +27,7 @@ Counterpart to `default_env.nu`.
* `nu -n/--no-config`
* `nu -c "ls"`
* `nu <script.nu>`
* Is not commented - Comments are in `sample_config.nu`.
* Is not commented - Comments are in `doc_config.nu`.
* Should be optimized for fastest load times. Whenever possible, values should be set via nu-protocol::config
* Exception: `color_config` values are currently set in this file so that user's can introspect the values
* TODO: Implement defaults for `color_config` in nu-protocol::config and remove from `default_config.nu`
@ -37,24 +37,24 @@ Counterpart to `default_env.nu`.
$env.config = {}
```
## `sample_env.nu`
## `doc_env.nu`
* A commented file documenting the most common environment variables that a user might configure in `env.nu`
* For convenient in-shell access - Can be pretty-printed via `config env --sample | nu-highlight`
* For convenient in-shell access - Can be pretty-printed via `config env --doc | nu-highlight`
* Since this file is for documentation only, include actual Nushell code without comments so that it can be pretty-printed
* No optimization necessary - Not intended for use other than documentation.
* Consider replacing `config env --sample` with `help env.nu` at some point.
* Consider replacing `config env --doc` with `help env.nu` at some point.
* Uses a mix of default values (explained) as well as other examples that users might want in their own `env.nu`
## `sample_config.nu`
## `doc_config.nu`
Counterpart to `sample_env.nu`.
Counterpart to `doc_env.nu`.
* A commented file documenting the most common environment variables that a user might configure in `config.nu`
* For convenient in-shell access - Can be pretty-printed via `config nu --sample | nu-highlight`
* For convenient in-shell access - Can be pretty-printed via `config nu --doc | nu-highlight`
* Since this file is for documentation only, include actual Nushell code without comments so that it can be pretty-printed
* No optimization necessary - Not intended for use other than documentation.
* Consider replacing `config nu --sample` with `help config.nu` at some point.
* Consider replacing `config nu --doc` with `help config.nu` at some point.
* Uses a mix of default values (explained) as well as other examples that users might want in their own `config.nu`
## `scaffold_env.nu`
@ -70,7 +70,3 @@ Counterpart to `scaffold_env.nu`.
* This file is used *one-time* (typically) at **first** startup
* If the `$nu.default-config-path` directory does not exist, the directory is created and then both `scaffold_env.nu` and `scaffold_config.nu` are written to it
* Contains only commented lines explaining the purpose of the file to the user, along with information on the `config nu` command.
## `sample_login.nu`
This file is not used by any Nushell code. Of course, if the user has a `login.nu`, then it will be evaluated during startup of a login shell.

View file

@ -20,6 +20,8 @@ $env.config.color_config = {
row_index: green_bold
record: white
list: white
closure: green_bold
glob:cyan_bold
block: white
hints: dark_gray
search_result: { bg: red fg: white }
@ -58,4 +60,9 @@ $env.config.color_config = {
shape_variable: purple
shape_vardecl: purple
shape_raw_string: light_purple
shape_garbage: {
fg: white
bg: red
attr: b
}
}

View file

@ -3,7 +3,7 @@
#
# version = "0.100.1"
$env.PROMPT_COMMAND = $env.PROMPT_COMMAND? | default {||
$env.PROMPT_COMMAND = $env.PROMPT_COMMAND? | default {||
let dir = match (do -i { $env.PWD | path relative-to $nu.home-path }) {
null => $env.PWD
'' => '~'
@ -17,7 +17,7 @@ $env.PROMPT_COMMAND = $env.PROMPT_COMMAND? | default {||
$path_segment | str replace --all (char path_sep) $"($separator_color)(char path_sep)($path_color)"
}
$env.PROMPT_COMMAND_RIGHT = $env.PROMPT_COMMAND_RIGHT? | default {||
$env.PROMPT_COMMAND_RIGHT = $env.PROMPT_COMMAND_RIGHT? | default {||
# create a right prompt in magenta with green separators and am/pm underlined
let time_segment = ([
(ansi reset)

View file

@ -1,6 +1,9 @@
# Nushell Config File
# Nushell Config File Documentation
#
# version = "0.99.2"
# Warning: This file is intended for documentation purposes only and
# is not intended to be used as an actual configuration file as-is.
#
# version = "0.100.1"
#
# A `config.nu` file is used to override default Nushell settings,
# define (or import) custom commands, or run any other startup tasks.
@ -15,7 +18,7 @@
# https://nushell.sh/book/configuration
#
# You can pretty-print and page this file using:
# config nu --sample | nu-highlight | less -R
# config nu --doc | nu-highlight | less -R
# $env.config
# -----------
@ -86,14 +89,14 @@ $env.config.recursion_limit = 50
# ---------------------------
# edit_mode (string) "vi" or "emacs" sets the editing behavior of Reedline
edit_mode: "emacs"
$env.config.edit_mode = "emacs"
# Command that will be used to edit the current line buffer with Ctrl+O.
# If unset, uses $env.VISUAL and then $env.EDITOR
#
# Tip: Set to "editor" to use the default editor on Unix platforms using
# the Alternatives system or equivalent
buffer_editor: "editor"
$env.config.buffer_editor = "editor"
# cursor_shape_* (string)
# -----------------------
@ -120,7 +123,7 @@ $env.config.completions.algorithm = "prefix"
$env.config.completions.sort = "smart"
# case_sensitive (bool): true/false to enable/disable case-sensitive completions
$env.config.completions.case_sensitive = false
$env.config.completions.case_sensitive = false
# quick (bool):
# true: auto-select the completion when only one remains
@ -132,7 +135,7 @@ $env.config.completions.quick = true
# false: Do not partially complete
# Partial Example: If a directory contains only files named "forage", "food", and "forest",
# then typing "ls " and pressing <Tab> will partially complete the first two
# letters, "f" and "o". If the directory also includes a file named "faster",
# letters, "f" and "o". If the directory also includes a file named "faster",
# then only "f" would be partially completed.
$env.config.completions.partial = true
@ -145,7 +148,7 @@ $env.config.completions.use_ls_colors = true
# completions.external.*: Settings related to completing external commands
# and additional completers
# external.exnable (bool)
# external.enable (bool)
# true: search for external commands on the Path
# false: disabling might be desired for performance if your path includes
# directories on a slower filesystem
@ -156,16 +159,16 @@ $env.config.completions.external.enable = true
$env.config.completions.external.max_results = 50
# completer (closure with a |spans| parameter): A command to call for *argument* completions
# to commands (internal or external).
# to commands (internal or external).
#
# The |spans| parameter is a list of strings representing the tokens (spans)
# on the current commandline. It is always a list of at least two strings - The
# on the current commandline. It is always a list of at least two strings - The
# command being completed plus the first argument of that command ("" if no argument has
# been partially typed yet), and additional strings for additional arguments beyond
# the first.
#
# This setting is usually set to a closure which will call a third-party completion system, such
# as Carapace.
# as Carapace.
#
# Note: The following is an over-simplified completer command that will call Carapace if it
# is installed. Please use the official Carapace completer, which can be generated automatically
@ -206,8 +209,8 @@ $env.config.shell_integration.osc9_9 = false
# osc8 (bool):
# When true, the `ls` command will generate clickable links that can be launched in another
# application by the terminal.
# Note: This setting replaces the now deprecated `ls.show_clickable_links`
$env.config.shell.integration.osc8: true
# Note: This setting replaces the now deprecated `ls.clickable_links`
$env.config.shell_integration.osc8 = true
# Deprecated
# $env.config.ls.clickable_links = true
@ -229,13 +232,13 @@ $env.config.shell_integration.osc633 = true
# reset_application_mode (bool):
# true/false to enable/disable sending ESC[?1l to the terminal
# This sequence is commonly used to keep cursor key modes in sync between the local
# This sequence is commonly used to keep cursor key modes in sync between the local
# terminal and a remove SSH host.
$env.config.shell_integration.reset_application_mode = true
# bracketed_paste (bool):
# true/false to enable/disable the bracketed-paste feature, which allows multiple-lines
# to be pasted into Nushell at once without immediate execution. When disabled,
# to be pasted into Nushell at once without immediate execution. When disabled,
# each pasted line is executed as it is received.
# Note that bracketed paste is not currently supported on the Windows version of
# Nushell.
@ -266,7 +269,7 @@ $env.config.display_errors.exit_code = false
# display_errors.termination_signal (bool):
# true/false to enable/disable displaying a Nushell error when a child process is
# terminated via any signal
# terminated via any signal
$env.config.display_errors.termination_signal = true
# -------------
@ -282,7 +285,7 @@ $env.config.display_errors.termination_signal = true
$env.config.footer_mode = 25
# table.*
# table_mode (string):
# table_mode (string):
# One of: "default", "basic", "compact", "compact_double", "heavy", "light", "none", "reinforced",
# "rounded", "thin", "with_love", "psql", "markdown", "dots", "restructured", "ascii_rounded",
# or "basic_compact"
@ -332,7 +335,7 @@ $env.config.table.header_on_separator = false
# If set to an int, all tables will be abbreviated to only show the first <n> and last <n> rows
# If set to `null`, all table rows will be displayed
# Can be overridden by passing a table to `| table --abbreviated/-a`
$env.config.table.abbreviated_row_count
$env.config.table.abbreviated_row_count = null
# footer_inheritance (bool): Footer behavior in nested tables
# true: If a nested table is long enough on its own to display a footer (per `footer_mode` above),
@ -344,7 +347,7 @@ $env.config.table.footer_inheritance = false
# Datetime Display
# ----------------
# datetime_format.* (string or nothing):
# Format strings that will be used for datetime values.
# Format strings that will be used for datetime values.
# When set to `null`, the default behavior is to "humanize" the value (e.g., "now" or "a day ago")
# datetime_format.table (string or nothing):
@ -389,7 +392,7 @@ $env.config.float_precision = 2
# ls.use_ls_colors (bool):
# true: The `ls` command will apply the $env.LS_COLORS standard to filenames
# false: Filenames in the `ls` table will use the color_config for strings
$env.config.ls = true
$env.config.ls.use_ls_colors = true
# Hooks
# -----
@ -402,12 +405,23 @@ $env.config.ls = true
# WARNING: A malformed display_output hook can suppress all Nushell output to the terminal.
# It can be reset by assigning an empty string as below:
$env.config.hooks.pre_prompt = [] # Before each prompt is displayed
$env.config.hooks.pre_execution = [] # After <enter> is pressed; before the commandline
# is executed
$env.config.hooks.env_change = [] # When a specified environment variable changes
$env.config.hooks.display_output = "" # Before Nushell output is displayed in the terminal
$env.config.hooks.command_not_found = [] # When a command is not found
# Before each prompt is displayed
$env.config.hooks.pre_prompt = []
# After <enter> is pressed; before the commandline is executed
$env.config.hooks.pre_execution = []
# When a specified environment variable changes
$env.config.hooks.env_change = {
# Example: Run if the PWD environment is different since the last REPL input
PWD: [{|before, after| null }]
}
# Before Nushell output is displayed in the terminal
$env.config.hooks.display_output = "if (term size).columns >= 100 { table -e } else { table }"
# When a command is not found
$env.config.hooks.command_not_found = []
# The env_change hook accepts a record with environment variable names as keys, and a list
# of hooks to run when that variable changes
$env.config.hooks.env_change = {}
# -----------
# Keybindings
@ -462,7 +476,9 @@ $env.config.menus ++= [
type: {
layout: description
columns: 4
col_width: 20 # Optional value. If missing all the screen width is used to calculate column width
# col_width is an optional value. If missing, the entire screen width is used to
# calculate the column width
col_width: 20
col_padding: 2
selection_rows: 4
description_rows: 10
@ -475,33 +491,25 @@ $env.config.menus ++= [
}
]
# ---------------
# Plugin behavior
# ---------------
# Per-plugin configuration. See https://www.nushell.sh/contributor-book/plugins.html#configuration.
plugins: {}
$env.config.plugins
$env.config.plugin_gc
$env.config.plugin_gc.default
$env.config.plugin_gc.default.enabled
$env.config.plugin_gc.default.stop_after
$env.config.plugin_gc.plugins
plugin_gc: {
# Configuration for plugin garbage collection
default: {
enabled: true # true to enable stopping of inactive plugins
stop_after: 10sec # how long to wait after a plugin is inactive to stop it
}
plugins: {
# alternate configuration for specific plugins, by name, for example:
#
# gstat: {
# enabled: false
# }
}
}
# Per-plugin configuration. See https://www.nushell.sh/contributor-book/plugins.html#plugin-configuration
$env.config.plugins = {}
# Plugin garbage collection configuration
# $env.config.plugin_gc.*
# enabled (bool): true/false to enable/disable stopping inactive plugins
$env.config.plugin_gc.default.enabled = true
# stop_after (duration): How long to wait after a plugin is inactive before stopping it
$env.config.plugin_gc.default.stop_after = 10sec
# plugins (record): Alternate garbage collection configuration per-plugin.
$env.config.plugin_gc.plugins = {
# gstat: {
# enabled: false
# }
}
# -------------------------------------
# Themes/Colors and Syntax Highlighting
@ -532,12 +540,12 @@ use std/config dark-theme
$env.config.color_config = (dark-theme)
# Or, individual color settings can be configured or overridden.
#
#
# Values can be one of:
# - A color name such as "red" (see `ansi -l` for a list)
# - A color RGB value in the form of "#C4C9C6"
# - A record including:
# * `fg` (color)
# * `fg` (color)
# * `bg` (color)
# * `attr`: a string with one or more of:
# - 'n': normal
@ -547,7 +555,7 @@ $env.config.color_config = (dark-theme)
# - 'i': italics
# - 'd': dimmed
# foreground, background, and cursor colors are not handled by Nushell, but can be used by
# foreground, background, and cursor colors are not handled by Nushell, but can be used by
# custom-commands such as `theme` from the nu_scripts repository. That `theme` command can be
# used to set the terminal foreground, background, and cursor colors.
$env.config.color_config.foreground
@ -557,7 +565,7 @@ $env.config.color_config.cursor
# -------------------------------------------------------------------------------------------------
# shape_: Applies syntax highlighting based on the "shape" (inferred or declared type) of an
# element on the commandline. Nushell's parser can identify shapes based on many criteria, often
# as the commandline is being typed.
# as the commandline is being typed.
# shape_string: Can appear as a single-or-quoted value, a bareword string, the key of a record,
# an argument which has been declared as a string, and other parsed strings.
@ -733,7 +741,7 @@ $env.config.color_config.custom # Custom value (often from a plugin)
$env.config.color_config.nothing # Not used, since a null is not displayed
$env.config.color_config.date # datetime value
$env.config.color_config.filesize # filesize value
$env.config.color_config.list # Not currently used. Lists are displayed using their
$env.config.color_config.list # Not currently used. Lists are displayed using their
# members' styles
$env.config.color_config.record # Not currently used. Records are displayed using their
# member's styles
@ -828,7 +836,7 @@ $env.PROMPT_INDICATOR_VI_INSERT = ": "
# When a commandline extends across multiple lines:
$env.PROMPT_MULTILINE_INDICATOR = "::: "
# TRANSIENT_PROMPT_*
# TRANSIENT_PROMPT_*
# ------------------
# Allows a different prompt to be shown after a command has been executed. This
# can be useful if you have a 2-line prompt. Instead of each previously-entered
@ -855,10 +863,10 @@ $env.TRANSIENT_PROMPT_COMMAND_RIGHT = ""
#
# Note: The OS Path variable is automatically converted before env.nu loads, so it can
# be treated a list in this file.
#
#
# Note: Environment variables are not case-sensitive, so the following will work
# for both Windows and Unix-like platforms.
#
#
# By default, the internal conversion looks something like the following, so there
# is no need to add this in your actual env.nu:
$env.ENV_CONVERSIONS = {
@ -912,12 +920,12 @@ const NU_PLUGIN_DIRS = $NU_PLUGIN_DIRS ++ [($nu.default-config-dir | path join '
# Appending to the OS path is a common configuration task.
# Because of the previous ENV_CONVERSIONS (performed internally
# before your config.nu loads), the path variable is a list that can
# before your config.nu loads), the path variable is a list that can
# be appended to using, for example:
$env.path ++= "~/.local/bin"
$env.PATH ++= [ "~/.local/bin" ]
# Or prepend using
$env.path = "~/.local/bin" ++ $env.path
$env.PATH = [ "~/.local/bin" ] ++ $env.PATH
# The `path add` function from the Standard Library also provides
# a convenience method for prepending to the path:

View file

@ -1,4 +1,6 @@
# Sample Nushell Environment Config File
# Nushell Environment Config File Documentation
#
# version = "0.100.1"
#
# Previously, environment variables were typically configured in `env.nu`.
# In general, most configuration can and should be performed in `config.nu`
@ -6,4 +8,4 @@
# To pretty-print the in-shell documentation for Nushell's various configuration
# settings, you can run:
config nu --sample | nu-highlight | less -R
config nu --doc | nu-highlight | less -R

View file

@ -1,9 +0,0 @@
# Example Nushell Loginshell Config File
# - has to be as login.nu in the default config directory
# - will be sourced after config.nu and env.nu in case of nushell started as login shell
# just as an example for overwriting of an environment variable of env.nu
$env.PROMPT_INDICATOR = {|| "(LS)> " }
# Similar to env-path and config-path there is a variable containing the path to login.nu
echo $nu.loginshell-path

View file

@ -1,19 +1,18 @@
# config.nu
#
# Installed by:
# version = "0.100.1"
#
# This file is used to override default Nushell settings, define
# (or import) custom commands, or run any other startup tasks.
# See https://www.nushell.sh/book/configuration.html
#
# This file is loaded after env.nu and before login.nu
#
#
# You can open this file in your default editor using:
# config nu
#
# To pretty-print a sample config.nu with documentation, run:
# config nu --sample | nu-highlight | less -R
#
# To pretty-print the default configuration values, run:
# config nu --default | nu-highlight | less -R
# See `help config nu` for more options
#
# You can remove these comments if you want or leave
# them for future reference.

View file

@ -1,5 +1,8 @@
# env.nu
#
# Installed by:
# version = "0.100.1"
#
# Previously, environment variables were typically configured in `env.nu`.
# In general, most configuration can and should be performed in `config.nu`
# or one of the autoload directories.
@ -9,11 +12,7 @@
#
# See https://www.nushell.sh/book/configuration.html
#
# To pretty-print a sample of the configuration settings, run:
# config nu --sample | nu-highlight | less -R
#
# To pretty-print the default env.nu, run:
# config env --default | nu-highlight | less -R
# Also see `help config env` for more options.
#
# You can remove these comments if you want or leave
# them for future reference.

View file

@ -0,0 +1,259 @@
use serde_json::{json, Map, Value as SerdeValue};
/// JsonFlattener is the main driver when flattening JSON
/// # Examples
/// ```
/// use nu_utils;
///
/// let flattener = nu_utils::JsonFlattener { ..Default::default() };
/// ```
pub struct JsonFlattener<'a> {
/// Alternate separator used between keys when flattening
/// # Examples
/// ```
/// use nu_utils;
/// let flattener = nu_utils::JsonFlattener { separator: "_", ..Default::default()};
/// ```
pub separator: &'a str,
/// Opinionated flattening format that places values in an array if the object is nested inside an array
/// # Examples
/// ```
/// use nu_utils;
/// let flattener = nu_utils::JsonFlattener { alt_array_flattening: true, ..Default::default()};
/// ```
pub alt_array_flattening: bool,
/// Completely flatten JSON and keep array structure in the key when flattening
/// # Examples
/// ```
/// use nu_utils;
/// let flattener = nu_utils::JsonFlattener { preserve_arrays: true, ..Default::default()};
/// ```
pub preserve_arrays: bool,
}
impl<'a> Default for JsonFlattener<'a> {
fn default() -> Self {
JsonFlattener {
separator: ".",
alt_array_flattening: false,
preserve_arrays: false,
}
}
}
/// This implementation defines the core usage for the `JsonFlattener` structure.
/// # Examples
/// ```
/// use nu_utils;
/// use serde_json::json;
///
/// let flattener = nu_utils::JsonFlattener::new();
/// let example = json!({
/// "a": {
/// "b": "c"
/// }
/// });
///
/// let flattened_example = flattener.flatten(&example);
/// ```
impl<'a> JsonFlattener<'a> {
/// Returns a flattener with the default arguments
/// # Examples
/// ```
/// use nu_utils;
///
/// let flattener = nu_utils::JsonFlattener::new();
/// ```
pub fn new() -> Self {
JsonFlattener {
..Default::default()
}
}
/// Flattens JSON variants into a JSON object
///
/// # Arguments
///
/// * `json` - A serde_json Value to flatten
///
/// # Examples
/// ```
/// use nu_utils;
/// use serde_json::json;
///
/// let flattener = nu_utils::JsonFlattener::new();
/// let example = json!({
/// "name": "John Doe",
/// "age": 43,
/// "address": {
/// "street": "10 Downing Street",
/// "city": "London"
/// },
/// "phones": [
/// "+44 1234567",
/// "+44 2345678"
/// ]
/// });
///
/// let flattened_example = flattener.flatten(&example);
/// ```
pub fn flatten(&self, json: &SerdeValue) -> SerdeValue {
let mut flattened_val = Map::<String, SerdeValue>::new();
match json {
SerdeValue::Array(obj_arr) => {
self.flatten_array(&mut flattened_val, &"".to_string(), obj_arr)
}
SerdeValue::Object(obj_val) => {
self.flatten_object(&mut flattened_val, None, obj_val, false)
}
_ => self.flatten_value(&mut flattened_val, &"".to_string(), json, false),
}
SerdeValue::Object(flattened_val)
}
fn flatten_object(
&self,
builder: &mut Map<String, SerdeValue>,
identifier: Option<&String>,
obj: &Map<String, SerdeValue>,
arr: bool,
) {
for (k, v) in obj {
let expanded_identifier = identifier.map_or_else(
|| k.clone(),
|identifier| format!("{identifier}{}{k}", self.separator),
);
if expanded_identifier.contains("span.start")
|| expanded_identifier.contains("span.end")
{
continue;
}
let expanded_identifier = self.filter_known_keys(&expanded_identifier);
match v {
SerdeValue::Object(obj_val) => {
self.flatten_object(builder, Some(&expanded_identifier), obj_val, arr)
}
SerdeValue::Array(obj_arr) => {
self.flatten_array(builder, &expanded_identifier, obj_arr)
}
_ => self.flatten_value(builder, &expanded_identifier, v, arr),
}
}
}
fn flatten_array(
&self,
builder: &mut Map<String, SerdeValue>,
identifier: &String,
obj: &[SerdeValue],
) {
for (k, v) in obj.iter().enumerate() {
let with_key = format!("{identifier}{}{k}", self.separator);
if with_key.contains("span.start") || with_key.contains("span.end") {
continue;
}
let with_key = self.filter_known_keys(&with_key);
match v {
SerdeValue::Object(obj_val) => self.flatten_object(
builder,
Some(if self.preserve_arrays {
&with_key
} else {
identifier
}),
obj_val,
self.alt_array_flattening,
),
SerdeValue::Array(obj_arr) => self.flatten_array(
builder,
if self.preserve_arrays {
&with_key
} else {
identifier
},
obj_arr,
),
_ => self.flatten_value(
builder,
if self.preserve_arrays {
&with_key
} else {
identifier
},
v,
self.alt_array_flattening,
),
}
}
}
fn flatten_value(
&self,
builder: &mut Map<String, SerdeValue>,
identifier: &String,
obj: &SerdeValue,
arr: bool,
) {
if let Some(v) = builder.get_mut(identifier) {
if let Some(arr) = v.as_array_mut() {
arr.push(obj.clone());
} else {
let new_val = json!(vec![v, obj]);
builder.remove(identifier);
builder.insert(identifier.to_string(), new_val);
}
} else {
builder.insert(
identifier.to_string(),
if arr {
json!(vec![obj.clone()])
} else {
obj.clone()
},
);
}
}
fn filter_known_keys(&self, key: &str) -> String {
let mut filtered_key = key.to_string();
if filtered_key.contains(".String.val") {
filtered_key = filtered_key.replace(".String.val", "");
}
if filtered_key.contains(".Record.val") {
filtered_key = filtered_key.replace(".Record.val", "");
}
if filtered_key.contains(".List.vals") {
filtered_key = filtered_key.replace(".List.vals", "");
}
if filtered_key.contains(".Int.val") {
filtered_key = filtered_key.replace(".Int.val", "");
}
if filtered_key.contains(".Bool.val") {
filtered_key = filtered_key.replace(".Bool.val", "");
}
if filtered_key.contains(".Truncate.suffix") {
filtered_key = filtered_key.replace(".Truncate.suffix", ".truncating_suffix");
}
if filtered_key.contains(".RowCount") {
filtered_key = filtered_key.replace(".RowCount", "");
}
if filtered_key.contains(".Wrap.try_to_keep_words") {
filtered_key =
filtered_key.replace(".Wrap.try_to_keep_words", ".wrapping_try_keep_words");
}
// For now, let's skip replacing these because they tell us which
// numbers are closures and blocks which is useful for extracting the content
// if filtered_key.contains(".Closure.val") {
// filtered_key = filtered_key.replace(".Closure.val", "");
// }
// if filtered_key.contains(".block_id") {
// filtered_key = filtered_key.replace(".block_id", "");
// }
filtered_key
}
}

View file

@ -3,6 +3,7 @@ mod casing;
mod deansi;
pub mod emoji;
pub mod filesystem;
pub mod flatten_json;
pub mod locale;
mod quoting;
mod shared_cow;
@ -10,8 +11,8 @@ pub mod utils;
pub use locale::get_system_locale;
pub use utils::{
enable_vt_processing, get_default_config, get_default_env, get_ls_colors, get_sample_config,
get_sample_env, get_scaffold_config, get_scaffold_env, stderr_write_all_and_flush,
enable_vt_processing, get_default_config, get_default_env, get_doc_config, get_doc_env,
get_ls_colors, get_scaffold_config, get_scaffold_env, stderr_write_all_and_flush,
stdout_write_all_and_flush, terminal_size,
};
@ -20,6 +21,7 @@ pub use deansi::{
strip_ansi_likely, strip_ansi_string_likely, strip_ansi_string_unlikely, strip_ansi_unlikely,
};
pub use emoji::contains_emoji;
pub use flatten_json::JsonFlattener;
pub use quoting::{escape_quote_string, needs_quoting};
pub use shared_cow::SharedCow;

View file

@ -94,8 +94,8 @@ pub fn get_scaffold_env() -> &'static str {
include_str!("default_files/scaffold_env.nu")
}
pub fn get_sample_env() -> &'static str {
include_str!("default_files/sample_env.nu")
pub fn get_doc_env() -> &'static str {
include_str!("default_files/doc_env.nu")
}
pub fn get_default_config() -> &'static str {
@ -106,8 +106,8 @@ pub fn get_scaffold_config() -> &'static str {
include_str!("default_files/scaffold_config.nu")
}
pub fn get_sample_config() -> &'static str {
include_str!("default_files/sample_config.nu")
pub fn get_doc_config() -> &'static str {
include_str!("default_files/doc_config.nu")
}
pub fn get_ls_colors(lscolors_env_string: Option<String>) -> LsColors {

View file

@ -30,9 +30,13 @@ impl PluginCommand for ProfileDF {
}
fn description(&self) -> &str {
"Profile a lazy dataframe. This will run the query and return a record containing the materialized DataFrame and a DataFrame that contains profiling information of each node that is executed.
"Profile a lazy dataframe."
}
The units of the timings are microseconds."
fn extra_description(&self) -> &str {
r#"This will run the query and return a record containing the materialized DataFrame and a DataFrame that contains profiling information of each node that is executed.
The units of the timings are microseconds."#
}
fn examples(&self) -> Vec<Example> {

View file

@ -8,7 +8,10 @@ use nu_protocol::{
Category, Example, LabeledError, PipelineData, ShellError, Signature, Span, SyntaxShape, Type,
Value,
};
use polars::prelude::{Expr, JoinType};
use polars::{
df,
prelude::{Expr, JoinCoalesce, JoinType},
};
#[derive(Clone)]
pub struct LazyJoin;
@ -37,6 +40,7 @@ impl PluginCommand for LazyJoin {
.switch("left", "left join between lazyframes", Some('l'))
.switch("full", "full join between lazyframes", Some('f'))
.switch("cross", "cross join between lazyframes", Some('c'))
.switch("coalesce-columns", "Sets the join coalesce strategy to colesce columns. Most useful when used with --full, which will not otherwise coalesce.", None)
.named(
"suffix",
SyntaxShape::String,
@ -172,6 +176,24 @@ impl PluginCommand for LazyJoin {
.into_value(Span::test_data()),
),
},
Example {
description: "Perform a full join of two dataframes and coalesce columns",
example: r#"let table1 = [[A B]; ["common" "common"] ["table1" "only"]] | polars into-df
let table2 = [[A C]; ["common" "common"] ["table2" "only"]] | polars into-df
$table1 | polars join -f $table2 --coalesce-columns A A"#,
result: Some(
NuDataFrame::new(
false,
df!(
"A" => [Some("common"), Some("table2"), Some("table1")],
"B" => [Some("common"), None, Some("only")],
"C" => [Some("common"), Some("only"), None]
)
.expect("Should have created a DataFrame"),
)
.into_value(Span::test_data()),
),
},
Example {
description: "Join one eager dataframe with another using a cross join",
example: r#"let tokens = [[monopoly_token]; [hat] [shoe] [boat]] | polars into-df
@ -279,9 +301,17 @@ impl PluginCommand for LazyJoin {
let lazy = NuLazyFrame::try_from_value_coerce(plugin, &value)?;
let from_eager = lazy.from_eager;
let lazy = lazy.to_polars();
let coalesce = if call.has_flag("coalesce-columns")? {
JoinCoalesce::CoalesceColumns
} else {
JoinCoalesce::default()
};
let lazy = if cross {
lazy.join_builder()
.with(other)
.coalesce(coalesce)
.left_on(vec![])
.right_on(vec![])
.how(how)
@ -291,6 +321,7 @@ impl PluginCommand for LazyJoin {
} else {
lazy.join_builder()
.with(other)
.coalesce(coalesce)
.left_on(left_on)
.right_on(right_on)
.how(how)

View file

@ -182,7 +182,6 @@ pub(crate) fn run_repl(
) -> Result<(), miette::ErrReport> {
trace!("run_repl");
let mut stack = Stack::new();
let start_time = std::time::Instant::now();
if parsed_nu_cli_args.no_config_file.is_none() {

View file

@ -169,41 +169,6 @@ fn comment_skipping_in_pipeline_3() -> TestResult {
)
}
#[test]
fn still_string_if_hashtag_is_middle_of_string() -> TestResult {
run_test(r#"echo test#testing"#, "test#testing")
}
#[test]
fn non_comment_hashtag_in_comment_does_not_stop_comment() -> TestResult {
run_test(r#"# command_bar_text: { fg: '#C4C9C6' },"#, "")
}
#[test]
fn non_comment_hashtag_in_comment_does_not_stop_comment_in_block() -> TestResult {
run_test(
r#"{
explore: {
# command_bar_text: { fg: '#C4C9C6' },
}
} | get explore | is-empty"#,
"true",
)
}
#[test]
fn still_string_if_hashtag_is_middle_of_string_inside_each() -> TestResult {
run_test(
r#"1..1 | each {echo test#testing } | get 0"#,
"test#testing",
)
}
#[test]
fn still_string_if_hashtag_is_middle_of_string_inside_each_also_with_dot() -> TestResult {
run_test(r#"1..1 | each {echo '.#testing' } | get 0"#, ".#testing")
}
#[test]
fn bad_var_name() -> TestResult {
fail_test(r#"let $"foo bar" = 4"#, "can't contain")
@ -317,11 +282,6 @@ fn raw_string_with_equals() -> TestResult {
)
}
#[test]
fn raw_string_with_hashtag() -> TestResult {
run_test(r#"r##' one # two '##"#, "one # two")
}
#[test]
fn list_quotes_with_equals() -> TestResult {
run_test(