Move to using clippy (#1142)

* Clippy fixes

* Finish converting to use clippy

* fix warnings in new master

* fix windows

* fix windows

Co-authored-by: Artem Vorotnikov <artem@vorotnikov.me>
This commit is contained in:
Jonathan Turner 2019-12-31 20:36:08 +13:00 committed by GitHub
parent 8093612cac
commit 72838cc083
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
93 changed files with 754 additions and 852 deletions

View file

@ -45,9 +45,15 @@ steps:
- bash: RUSTFLAGS="-D warnings" cargo test --all --features=stable - bash: RUSTFLAGS="-D warnings" cargo test --all --features=stable
condition: eq(variables['style'], 'unflagged') condition: eq(variables['style'], 'unflagged')
displayName: Run tests displayName: Run tests
- bash: RUSTFLAGS="-D warnings" cargo clippy --all --features=stable
condition: eq(variables['style'], 'unflagged')
displayName: Check clippy lints
- bash: NUSHELL_ENABLE_ALL_FLAGS=1 RUSTFLAGS="-D warnings" cargo test --all --features=stable - bash: NUSHELL_ENABLE_ALL_FLAGS=1 RUSTFLAGS="-D warnings" cargo test --all --features=stable
condition: eq(variables['style'], 'canary') condition: eq(variables['style'], 'canary')
displayName: Run tests displayName: Run tests
- bash: NUSHELL_ENABLE_ALL_FLAGS=1 RUSTFLAGS="-D warnings" cargo clippy --all --features=stable
condition: eq(variables['style'], 'canary')
displayName: Check clippy lints
- bash: cargo fmt --all -- --check - bash: cargo fmt --all -- --check
condition: eq(variables['style'], 'fmt') condition: eq(variables['style'], 'fmt')
displayName: Lint displayName: Lint

View file

@ -31,7 +31,7 @@ fn get_cargo_workspace(manifest_dir: &str) -> Option<&Path> {
let manifest: Manifest = serde_json::from_slice(&output.stdout).unwrap(); let manifest: Manifest = serde_json::from_slice(&output.stdout).unwrap();
let path = Box::leak(Box::new(PathBuf::from(manifest.workspace_root))); let path = Box::leak(Box::new(PathBuf::from(manifest.workspace_root)));
workspaces.insert(manifest_dir.to_string(), path.as_path()); workspaces.insert(manifest_dir.to_string(), path.as_path());
workspaces.get(manifest_dir).map(|w| *w) workspaces.get(manifest_dir).cloned()
} }
} }
@ -47,7 +47,7 @@ pub fn build() -> Result<(), Box<dyn std::error::Error>> {
let all_on = env::var("NUSHELL_ENABLE_ALL_FLAGS").is_ok(); let all_on = env::var("NUSHELL_ENABLE_ALL_FLAGS").is_ok();
let flags: HashSet<String> = env::var("NUSHELL_ENABLE_FLAGS") let flags: HashSet<String> = env::var("NUSHELL_ENABLE_FLAGS")
.map(|s| s.split(",").map(|s| s.to_string()).collect()) .map(|s| s.split(',').map(|s| s.to_string()).collect())
.unwrap_or_else(|_| HashSet::new()); .unwrap_or_else(|_| HashSet::new());
if all_on && !flags.is_empty() { if all_on && !flags.is_empty() {
@ -72,7 +72,7 @@ pub fn build() -> Result<(), Box<dyn std::error::Error>> {
let toml: HashMap<String, Feature> = toml::from_str(&std::fs::read_to_string(path)?)?; let toml: HashMap<String, Feature> = toml::from_str(&std::fs::read_to_string(path)?)?;
for (key, value) in toml.iter() { for (key, value) in toml.iter() {
if value.enabled == true || all_on || flags.contains(key) { if value.enabled || all_on || flags.contains(key) {
println!("cargo:rustc-cfg={}", key); println!("cargo:rustc-cfg={}", key);
} }
} }

View file

@ -85,11 +85,9 @@ impl From<ParseError> for ShellError {
fn from(error: ParseError) -> ShellError { fn from(error: ParseError) -> ShellError {
match error.reason { match error.reason {
ParseErrorReason::Eof { expected, span } => ShellError::unexpected_eof(expected, span), ParseErrorReason::Eof { expected, span } => ShellError::unexpected_eof(expected, span),
ParseErrorReason::ExtraTokens { actual } => { ParseErrorReason::ExtraTokens { actual } => ShellError::type_error("nothing", actual),
ShellError::type_error("nothing", actual.clone())
}
ParseErrorReason::Mismatch { actual, expected } => { ParseErrorReason::Mismatch { actual, expected } => {
ShellError::type_error(expected, actual.clone()) ShellError::type_error(expected, actual)
} }
ParseErrorReason::ArgumentError { command, error } => { ParseErrorReason::ArgumentError { command, error } => {
ShellError::argument_error(command, error) ShellError::argument_error(command, error)
@ -146,7 +144,7 @@ pub struct ShellError {
cause: Option<Box<ShellError>>, cause: Option<Box<ShellError>>,
} }
/// `PrettyDebug` is for internal debugging. For user-facing debugging, [to_diagnostic](ShellError::to_diagnostic) /// `PrettyDebug` is for internal debugging. For user-facing debugging, [into_diagnostic](ShellError::into_diagnostic)
/// is used, which prints an error, highlighting spans. /// is used, which prints an error, highlighting spans.
impl PrettyDebug for ShellError { impl PrettyDebug for ShellError {
fn pretty(&self) -> DebugDocBuilder { fn pretty(&self) -> DebugDocBuilder {
@ -169,7 +167,7 @@ impl PrettyDebug for ShellError {
+ b::space() + b::space()
+ b::description("actual:") + b::description("actual:")
+ b::space() + b::space()
+ b::option(actual.item.as_ref().map(|actual| b::description(actual))), + b::option(actual.item.as_ref().map(b::description)),
")", ")",
) )
} }
@ -388,13 +386,13 @@ impl ShellError {
// TODO: Get span of EOF // TODO: Get span of EOF
let diagnostic = Diagnostic::new( let diagnostic = Diagnostic::new(
Severity::Error, Severity::Error,
format!("Parse Error: Unexpected end of line"), "Parse Error: Unexpected end of line".to_string(),
); );
ShellError::diagnostic(diagnostic) ShellError::diagnostic(diagnostic)
} }
nom::Err::Failure(span) | nom::Err::Error(span) => { nom::Err::Failure(span) | nom::Err::Error(span) => {
let diagnostic = Diagnostic::new(Severity::Error, format!("Parse Error")) let diagnostic = Diagnostic::new(Severity::Error, "Parse Error".to_string())
.with_label(Label::new_primary(Span::from(span.0))); .with_label(Label::new_primary(Span::from(span.0)));
ShellError::diagnostic(diagnostic) ShellError::diagnostic(diagnostic)
@ -406,7 +404,7 @@ impl ShellError {
ProximateShellError::Diagnostic(ShellDiagnostic { diagnostic }).start() ProximateShellError::Diagnostic(ShellDiagnostic { diagnostic }).start()
} }
pub fn to_diagnostic(self) -> Diagnostic<Span> { pub fn into_diagnostic(self) -> Diagnostic<Span> {
match self.error { match self.error {
ProximateShellError::MissingValue { span, reason } => { ProximateShellError::MissingValue { span, reason } => {
let mut d = Diagnostic::new( let mut d = Diagnostic::new(
@ -426,7 +424,7 @@ impl ShellError {
} => match error { } => match error {
ArgumentError::InvalidExternalWord => Diagnostic::new( ArgumentError::InvalidExternalWord => Diagnostic::new(
Severity::Error, Severity::Error,
format!("Invalid bare word for Nu command (did you intend to invoke an external command?)")) "Invalid bare word for Nu command (did you intend to invoke an external command?)".to_string())
.with_label(Label::new_primary(command.span)), .with_label(Label::new_primary(command.span)),
ArgumentError::MissingMandatoryFlag(name) => Diagnostic::new( ArgumentError::MissingMandatoryFlag(name) => Diagnostic::new(
Severity::Error, Severity::Error,
@ -483,7 +481,7 @@ impl ShellError {
ProximateShellError::UnexpectedEof { ProximateShellError::UnexpectedEof {
expected, span expected, span
} => Diagnostic::new(Severity::Error, format!("Unexpected end of input")) } => Diagnostic::new(Severity::Error, "Unexpected end of input".to_string())
.with_label(Label::new_primary(span).with_message(format!("Expected {}", expected))), .with_label(Label::new_primary(span).with_message(format!("Expected {}", expected))),
ProximateShellError::RangeError { ProximateShellError::RangeError {

View file

@ -50,7 +50,7 @@ impl PrettyDebug for ExternalCommand {
+ b::preceded( + b::preceded(
b::space(), b::space(),
b::intersperse( b::intersperse(
self.args.iter().map(|a| b::primitive(format!("{}", a.arg))), self.args.iter().map(|a| b::primitive(a.arg.to_string())),
b::space(), b::space(),
), ),
), ),

View file

@ -107,7 +107,7 @@ impl SignatureRegistry for TestRegistry {
self.signatures.contains_key(name) self.signatures.contains_key(name)
} }
fn get(&self, name: &str) -> Option<Signature> { fn get(&self, name: &str) -> Option<Signature> {
self.signatures.get(name).map(|sig| sig.clone()) self.signatures.get(name).cloned()
} }
} }
@ -159,7 +159,7 @@ fn inner_string_span(span: Span) -> Span {
} }
pub fn print_err(err: ShellError, source: &Text) { pub fn print_err(err: ShellError, source: &Text) {
let diag = err.to_diagnostic(); let diag = err.into_diagnostic();
let writer = termcolor::StandardStream::stderr(termcolor::ColorChoice::Auto); let writer = termcolor::StandardStream::stderr(termcolor::ColorChoice::Auto);
let mut source = source.to_string(); let mut source = source.to_string();

View file

@ -295,7 +295,7 @@ impl ColorSyntax for ExternalExpressionShape {
}; };
token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes)); token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes));
return ExternalExpressionResult::Processed; ExternalExpressionResult::Processed
} }
} }

View file

@ -24,16 +24,14 @@ impl PrettyDebugWithSource for NamedValue {
} }
} }
#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)] #[derive(Debug, Default, Clone, Eq, PartialEq, Serialize, Deserialize)]
pub struct NamedArguments { pub struct NamedArguments {
pub named: IndexMap<String, NamedValue>, pub named: IndexMap<String, NamedValue>,
} }
impl NamedArguments { impl NamedArguments {
pub fn new() -> NamedArguments { pub fn new() -> NamedArguments {
NamedArguments { Default::default()
named: IndexMap::new(),
}
} }
pub fn iter(&self) -> impl Iterator<Item = (&String, &NamedValue)> { pub fn iter(&self) -> impl Iterator<Item = (&String, &NamedValue)> {
@ -47,7 +45,7 @@ impl NamedArguments {
trace!("Inserting switch -- {} = {:?}", name, switch); trace!("Inserting switch -- {} = {:?}", name, switch);
match switch { match switch {
None => self.named.insert(name.into(), NamedValue::AbsentSwitch), None => self.named.insert(name, NamedValue::AbsentSwitch),
Some(flag) => self.named.insert( Some(flag) => self.named.insert(
name, name,
NamedValue::PresentSwitch(Tag { NamedValue::PresentSwitch(Tag {

View file

@ -111,15 +111,12 @@ impl ExpandSyntax for KeywordShape {
) -> Result<Self::Output, ParseError> { ) -> Result<Self::Output, ParseError> {
let atom = expand_atom(token_nodes, "keyword", context, ExpansionRule::new())?; let atom = expand_atom(token_nodes, "keyword", context, ExpansionRule::new())?;
match &atom.unspanned { if let UnspannedAtomicToken::Word { text } = &atom.unspanned {
UnspannedAtomicToken::Word { text } => { let word = text.slice(context.source());
let word = text.slice(context.source());
if word == self.keyword { if word == self.keyword {
return Ok(atom.span); return Ok(atom.span);
}
} }
_ => {}
} }
Err(ParseError::mismatch(self.keyword, atom.spanned_type_name())) Err(ParseError::mismatch(self.keyword, atom.spanned_type_name()))
@ -338,17 +335,14 @@ impl ExpandSyntax for IdentifierShape {
) -> Result<Self::Output, ParseError> { ) -> Result<Self::Output, ParseError> {
let atom = expand_atom(token_nodes, "identifier", context, ExpansionRule::new())?; let atom = expand_atom(token_nodes, "identifier", context, ExpansionRule::new())?;
match atom.unspanned { if let UnspannedAtomicToken::Word { text } = atom.unspanned {
UnspannedAtomicToken::Word { text } => { let body = text.slice(context.source());
let body = text.slice(context.source()); if is_id(body) {
if is_id(body) { return Ok(Identifier {
return Ok(Identifier { body: body.to_string(),
body: body.to_string(), span: text,
span: text, });
});
}
} }
_ => {}
} }
Err(ParseError::mismatch("identifier", atom.spanned_type_name())) Err(ParseError::mismatch("identifier", atom.spanned_type_name()))
@ -359,7 +353,7 @@ fn is_id(input: &str) -> bool {
let source = nu_source::nom_input(input); let source = nu_source::nom_input(input);
match crate::parse::parser::ident(source) { match crate::parse::parser::ident(source) {
Err(_) => false, Err(_) => false,
Ok((input, _)) => input.fragment.len() == 0, Ok((input, _)) => input.fragment.is_empty(),
} }
} }

View file

@ -732,9 +732,9 @@ impl ExpandSyntax for CommandHeadShape {
}); });
match node { match node {
Ok(expr) => return Ok(expr), Ok(expr) => Ok(expr),
Err(_) => match expand_expr(&AnyExpressionShape, token_nodes, context) { Err(_) => match expand_expr(&AnyExpressionShape, token_nodes, context) {
Ok(expr) => return Ok(CommandSignature::Expression(expr)), Ok(expr) => Ok(CommandSignature::Expression(expr)),
Err(_) => Err(token_nodes.peek_non_ws().type_error("command head3")), Err(_) => Err(token_nodes.peek_non_ws().type_error("command head3")),
}, },
} }
@ -834,7 +834,7 @@ impl FallibleColorSyntax for InternalCommandHeadShape {
let node = peeked_head.commit(); let node = peeked_head.commit();
let _expr = match node { match node {
TokenNode::Token(Token { TokenNode::Token(Token {
unspanned: UnspannedToken::Bare, unspanned: UnspannedToken::Bare,
span, span,
@ -904,8 +904,8 @@ impl<'token> SingleError<'token> {
} }
} }
fn parse_single_node<'a, 'b, T>( fn parse_single_node<T>(
token_nodes: &'b mut TokensIterator<'a>, token_nodes: &mut TokensIterator<'_>,
expected: &'static str, expected: &'static str,
callback: impl FnOnce(UnspannedToken, Span, SingleError) -> Result<T, ParseError>, callback: impl FnOnce(UnspannedToken, Span, SingleError) -> Result<T, ParseError>,
) -> Result<T, ParseError> { ) -> Result<T, ParseError> {
@ -926,8 +926,8 @@ fn parse_single_node<'a, 'b, T>(
}) })
} }
fn parse_single_node_skipping_ws<'a, 'b, T>( fn parse_single_node_skipping_ws<T>(
token_nodes: &'b mut TokensIterator<'a>, token_nodes: &mut TokensIterator<'_>,
expected: &'static str, expected: &'static str,
callback: impl FnOnce(UnspannedToken, Span, SingleError) -> Result<T, ShellError>, callback: impl FnOnce(UnspannedToken, Span, SingleError) -> Result<T, ShellError>,
) -> Result<T, ShellError> { ) -> Result<T, ShellError> {
@ -982,7 +982,7 @@ impl FallibleColorSyntax for WhitespaceShape {
let node = peeked.commit(); let node = peeked.commit();
let _ = match node { match node {
TokenNode::Whitespace(span) => { TokenNode::Whitespace(span) => {
token_nodes.color_shape(FlatShape::Whitespace.spanned(*span)) token_nodes.color_shape(FlatShape::Whitespace.spanned(*span))
} }

View file

@ -39,21 +39,17 @@ impl FallibleColorSyntax for AnyBlockShape {
// is it just a block? // is it just a block?
let block = block.node.as_block(); let block = block.node.as_block();
match block { if let Some((children, spans)) = block {
// If so, color it as a block token_nodes.child(children, context.source.clone(), |token_nodes| {
Some((children, spans)) => { color_syntax_with(
token_nodes.child(children, context.source.clone(), |token_nodes| { &DelimitedShape,
color_syntax_with( &(Delimiter::Brace, spans.0, spans.1),
&DelimitedShape, token_nodes,
&(Delimiter::Brace, spans.0, spans.1), context,
token_nodes, );
context, });
);
});
return Ok(()); return Ok(());
}
_ => {}
} }
// Otherwise, look for a shorthand block. If none found, fail // Otherwise, look for a shorthand block. If none found, fail
@ -76,16 +72,13 @@ impl ExpandExpression for AnyBlockShape {
// is it just a block? // is it just a block?
let block = block.node.as_block(); let block = block.node.as_block();
match block { if let Some((block, _tags)) = block {
Some((block, _tags)) => { let mut iterator =
let mut iterator = TokensIterator::new(&block.item, block.span, context.source.clone(), false);
TokensIterator::new(&block.item, block.span, context.source.clone(), false);
let exprs = expand_syntax(&ExpressionListShape, &mut iterator, context)?.exprs; let exprs = expand_syntax(&ExpressionListShape, &mut iterator, context)?.exprs;
return Ok(hir::RawExpression::Block(exprs.item).into_expr(block.span)); return Ok(hir::RawExpression::Block(exprs.item).into_expr(block.span));
}
_ => {}
} }
expand_syntax(&ShorthandBlock, token_nodes, context) expand_syntax(&ShorthandBlock, token_nodes, context)
@ -169,30 +162,20 @@ impl FallibleColorSyntax for ShorthandPath {
token_nodes.atomic(|token_nodes| { token_nodes.atomic(|token_nodes| {
let variable = color_fallible_syntax(&VariablePathShape, token_nodes, context); let variable = color_fallible_syntax(&VariablePathShape, token_nodes, context);
match variable { if variable.is_ok() {
Ok(_) => { // if it's a variable path, that's the head part
// if it's a variable path, that's the head part return Ok(());
return Ok(());
}
Err(_) => {
// otherwise, we'll try to find a member path
}
} }
// otherwise, we'll try to find a member path
// look for a member (`<member>` -> `$it.<member>`) // look for a member (`<member>` -> `$it.<member>`)
color_fallible_syntax(&MemberShape, token_nodes, context)?; color_fallible_syntax(&MemberShape, token_nodes, context)?;
// Now that we've synthesized the head, of the path, proceed to expand the tail of the path // Now that we've synthesized the head, of the path, proceed to expand the tail of the path
// like any other path. // like any other path.
let tail = color_fallible_syntax(&PathTailShape, token_nodes, context); // It's ok if there's no path tail; a single member is sufficient
let _ = color_fallible_syntax(&PathTailShape, token_nodes, context);
match tail {
Ok(_) => {}
Err(_) => {
// It's ok if there's no path tail; a single member is sufficient
}
}
Ok(()) Ok(())
}) })
@ -212,9 +195,8 @@ impl ExpandExpression for ShorthandPath {
// if it's a variable path, that's the head part // if it's a variable path, that's the head part
let path = expand_expr(&VariablePathShape, token_nodes, context); let path = expand_expr(&VariablePathShape, token_nodes, context);
match path { if let Ok(path) = path {
Ok(path) => return Ok(path), return Ok(path);
Err(_) => {}
} }
// Synthesize the head of the shorthand path (`<member>` -> `$it.<member>`) // Synthesize the head of the shorthand path (`<member>` -> `$it.<member>`)
@ -225,7 +207,7 @@ impl ExpandExpression for ShorthandPath {
let tail = expand_syntax(&PathTailShape, token_nodes, context); let tail = expand_syntax(&PathTailShape, token_nodes, context);
match tail { match tail {
Err(_) => return Ok(head), Err(_) => Ok(head),
Ok(PathTailSyntax { tail, .. }) => { Ok(PathTailSyntax { tail, .. }) => {
// For each member that `PathTailShape` expanded, join it onto the existing expression // For each member that `PathTailShape` expanded, join it onto the existing expression
// to form a new path // to form a new path

View file

@ -111,13 +111,9 @@ pub(crate) fn continue_coloring_expression(
// Check to see whether there's any continuation after the head expression // Check to see whether there's any continuation after the head expression
let result = color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context); let result = color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context);
match result { if result.is_err() {
Err(_) => { // We already saw one continuation, so just return
// We already saw one continuation, so just return return Ok(());
return Ok(());
}
Ok(_) => {}
} }
} }
} }
@ -138,19 +134,17 @@ impl ExpandExpression for AnyExpressionStartShape {
let atom = expand_atom(token_nodes, "expression", context, ExpansionRule::new())?; let atom = expand_atom(token_nodes, "expression", context, ExpansionRule::new())?;
match atom.unspanned { match atom.unspanned {
UnspannedAtomicToken::Size { number, unit } => { UnspannedAtomicToken::Size { number, unit } => Ok(hir::Expression::size(
return Ok(hir::Expression::size( number.to_number(context.source),
number.to_number(context.source), unit.item,
unit.item, Tag {
Tag { span: atom.span,
span: atom.span, anchor: None,
anchor: None, },
}, )),
))
}
UnspannedAtomicToken::SquareDelimited { nodes, .. } => { UnspannedAtomicToken::SquareDelimited { nodes, .. } => {
expand_delimited_square(&nodes, atom.span.into(), context) expand_delimited_square(&nodes, atom.span, context)
} }
UnspannedAtomicToken::Word { .. } => { UnspannedAtomicToken::Word { .. } => {
@ -158,11 +152,9 @@ impl ExpandExpression for AnyExpressionStartShape {
Ok(hir::Expression::bare(atom.span.until_option(end))) Ok(hir::Expression::bare(atom.span.until_option(end)))
} }
other => { other => other
return other .into_atomic_token(atom.span)
.into_atomic_token(atom.span) .to_hir(context, "expression"),
.into_hir(context, "expression")
}
} }
} }
} }
@ -208,7 +200,7 @@ impl FallibleColorSyntax for AnyExpressionStartShape {
UnspannedAtomicToken::Size { number, unit } => token_nodes.color_shape( UnspannedAtomicToken::Size { number, unit } => token_nodes.color_shape(
FlatShape::Size { FlatShape::Size {
number: number.span(), number: number.span(),
unit: unit.span.into(), unit: unit.span,
} }
.spanned(atom.span), .spanned(atom.span),
), ),
@ -218,7 +210,7 @@ impl FallibleColorSyntax for AnyExpressionStartShape {
(&nodes[..]).spanned(atom.span), (&nodes[..]).spanned(atom.span),
context.source.clone(), context.source.clone(),
|tokens| { |tokens| {
color_delimited_square(spans, tokens, atom.span.into(), context); color_delimited_square(spans, tokens, atom.span, context);
}, },
); );
} }
@ -257,13 +249,13 @@ impl FallibleColorSyntax for BareTailShape {
let word = let word =
color_fallible_syntax_with(&BareShape, &FlatShape::Word, token_nodes, context); color_fallible_syntax_with(&BareShape, &FlatShape::Word, token_nodes, context);
match word { if word.is_ok() {
// if a word was found, continue // if a word was found, continue
Ok(_) => continue, continue;
// if a word wasn't found, try to find a dot
Err(_) => {}
} }
// if a word wasn't found, try to find a dot
// try to find a dot // try to find a dot
let dot = color_fallible_syntax_with( let dot = color_fallible_syntax_with(
&ColorableDotShape, &ColorableDotShape,

View file

@ -148,7 +148,7 @@ impl<'tokens> Deref for AtomicToken<'tokens> {
} }
impl<'tokens> AtomicToken<'tokens> { impl<'tokens> AtomicToken<'tokens> {
pub fn into_hir( pub fn to_hir(
&self, &self,
context: &ExpandContext, context: &ExpandContext,
expected: &'static str, expected: &'static str,
@ -198,59 +198,49 @@ impl<'tokens> AtomicToken<'tokens> {
pub(crate) fn color_tokens(&self, shapes: &mut Vec<Spanned<FlatShape>>) { pub(crate) fn color_tokens(&self, shapes: &mut Vec<Spanned<FlatShape>>) {
match &self.unspanned { match &self.unspanned {
UnspannedAtomicToken::Eof { .. } => {} UnspannedAtomicToken::Eof { .. } => {}
UnspannedAtomicToken::Error { .. } => { UnspannedAtomicToken::Error { .. } => shapes.push(FlatShape::Error.spanned(self.span)),
return shapes.push(FlatShape::Error.spanned(self.span))
}
UnspannedAtomicToken::CompareOperator { .. } => { UnspannedAtomicToken::CompareOperator { .. } => {
return shapes.push(FlatShape::CompareOperator.spanned(self.span)); shapes.push(FlatShape::CompareOperator.spanned(self.span))
} }
UnspannedAtomicToken::ShorthandFlag { .. } => { UnspannedAtomicToken::ShorthandFlag { .. } => {
return shapes.push(FlatShape::ShorthandFlag.spanned(self.span)); shapes.push(FlatShape::ShorthandFlag.spanned(self.span))
} }
UnspannedAtomicToken::Whitespace { .. } => { UnspannedAtomicToken::Whitespace { .. } => {
return shapes.push(FlatShape::Whitespace.spanned(self.span)); shapes.push(FlatShape::Whitespace.spanned(self.span))
} }
UnspannedAtomicToken::Number { UnspannedAtomicToken::Number {
number: RawNumber::Decimal(_), number: RawNumber::Decimal(_),
} => { } => shapes.push(FlatShape::Decimal.spanned(self.span)),
return shapes.push(FlatShape::Decimal.spanned(self.span));
}
UnspannedAtomicToken::Number { UnspannedAtomicToken::Number {
number: RawNumber::Int(_), number: RawNumber::Int(_),
} => { } => shapes.push(FlatShape::Int.spanned(self.span)),
return shapes.push(FlatShape::Int.spanned(self.span)); UnspannedAtomicToken::Size { number, unit } => shapes.push(
} FlatShape::Size {
UnspannedAtomicToken::Size { number, unit } => { number: number.span(),
return shapes.push( unit: unit.span,
FlatShape::Size { }
number: number.span(), .spanned(self.span),
unit: unit.span, ),
}
.spanned(self.span),
);
}
UnspannedAtomicToken::String { .. } => { UnspannedAtomicToken::String { .. } => {
return shapes.push(FlatShape::String.spanned(self.span)) shapes.push(FlatShape::String.spanned(self.span))
} }
UnspannedAtomicToken::ItVariable { .. } => { UnspannedAtomicToken::ItVariable { .. } => {
return shapes.push(FlatShape::ItVariable.spanned(self.span)) shapes.push(FlatShape::ItVariable.spanned(self.span))
} }
UnspannedAtomicToken::Variable { .. } => { UnspannedAtomicToken::Variable { .. } => {
return shapes.push(FlatShape::Variable.spanned(self.span)) shapes.push(FlatShape::Variable.spanned(self.span))
} }
UnspannedAtomicToken::ExternalCommand { .. } => { UnspannedAtomicToken::ExternalCommand { .. } => {
return shapes.push(FlatShape::ExternalCommand.spanned(self.span)); shapes.push(FlatShape::ExternalCommand.spanned(self.span))
} }
UnspannedAtomicToken::ExternalWord { .. } => { UnspannedAtomicToken::ExternalWord { .. } => {
return shapes.push(FlatShape::ExternalWord.spanned(self.span)) shapes.push(FlatShape::ExternalWord.spanned(self.span))
} }
UnspannedAtomicToken::GlobPattern { .. } => { UnspannedAtomicToken::GlobPattern { .. } => {
return shapes.push(FlatShape::GlobPattern.spanned(self.span)) shapes.push(FlatShape::GlobPattern.spanned(self.span))
} }
UnspannedAtomicToken::Word { .. } => { UnspannedAtomicToken::Word { .. } => shapes.push(FlatShape::Word.spanned(self.span)),
return shapes.push(FlatShape::Word.spanned(self.span)) _ => shapes.push(FlatShape::Error.spanned(self.span)),
}
_ => return shapes.push(FlatShape::Error.spanned(self.span)),
} }
} }
} }
@ -524,14 +514,13 @@ fn expand_atom_inner<'me, 'content>(
rule: ExpansionRule, rule: ExpansionRule,
) -> Result<AtomicToken<'content>, ParseError> { ) -> Result<AtomicToken<'content>, ParseError> {
if token_nodes.at_end() { if token_nodes.at_end() {
match rule.allow_eof { if rule.allow_eof {
true => { return Ok(UnspannedAtomicToken::Eof {
return Ok(UnspannedAtomicToken::Eof { span: Span::unknown(),
span: Span::unknown(),
}
.into_atomic_token(Span::unknown()))
} }
false => return Err(ParseError::unexpected_eof("anything", Span::unknown())), .into_atomic_token(Span::unknown()));
} else {
return Err(ParseError::unexpected_eof("anything", Span::unknown()));
} }
} }
@ -540,9 +529,8 @@ fn expand_atom_inner<'me, 'content>(
// If treat_size_as_word, don't try to parse the head of the token stream // If treat_size_as_word, don't try to parse the head of the token stream
// as a size. // as a size.
match rule.treat_size_as_word { if !rule.treat_size_as_word {
true => {} match expand_syntax(&UnitShape, token_nodes, context) {
false => match expand_syntax(&UnitShape, token_nodes, context) {
// If the head of the stream isn't a valid unit, we'll try to parse // If the head of the stream isn't a valid unit, we'll try to parse
// it again next as a word // it again next as a word
Err(_) => {} Err(_) => {}
@ -552,31 +540,28 @@ fn expand_atom_inner<'me, 'content>(
unit: (number, unit), unit: (number, unit),
span, span,
}) => return Ok(UnspannedAtomicToken::Size { number, unit }.into_atomic_token(span)), }) => return Ok(UnspannedAtomicToken::Size { number, unit }.into_atomic_token(span)),
}, }
} }
match rule.separate_members { if rule.separate_members {
false => {} let mut next = token_nodes.peek_any();
true => {
let mut next = token_nodes.peek_any();
match next.node { match next.node {
Some(token) if token.is_word() => { Some(token) if token.is_word() => {
next.commit(); next.commit();
return Ok(UnspannedAtomicToken::Word { text: token.span() } return Ok(UnspannedAtomicToken::Word { text: token.span() }
.into_atomic_token(token.span()));
}
Some(token) if token.is_int() => {
next.commit();
return Ok(UnspannedAtomicToken::Number {
number: RawNumber::Int(token.span()),
}
.into_atomic_token(token.span())); .into_atomic_token(token.span()));
}
_ => {}
} }
Some(token) if token.is_int() => {
next.commit();
return Ok(UnspannedAtomicToken::Number {
number: RawNumber::Int(token.span()),
}
.into_atomic_token(token.span()));
}
_ => {}
} }
} }

View file

@ -6,7 +6,7 @@ use nu_errors::ParseError;
use nu_source::{Span, SpannedItem, Tag}; use nu_source::{Span, SpannedItem, Tag};
pub fn expand_delimited_square( pub fn expand_delimited_square(
children: &Vec<TokenNode>, children: &[TokenNode],
span: Span, span: Span,
context: &ExpandContext, context: &ExpandContext,
) -> Result<hir::Expression, ParseError> { ) -> Result<hir::Expression, ParseError> {

View file

@ -74,15 +74,15 @@ impl ExpandExpression for FilePathShape {
| UnspannedAtomicToken::ExternalWord { text: body } | UnspannedAtomicToken::ExternalWord { text: body }
| UnspannedAtomicToken::String { body } => { | UnspannedAtomicToken::String { body } => {
let path = expand_file_path(body.slice(context.source), context); let path = expand_file_path(body.slice(context.source), context);
return Ok(hir::Expression::file_path(path, atom.span)); Ok(hir::Expression::file_path(path, atom.span))
} }
UnspannedAtomicToken::Number { .. } | UnspannedAtomicToken::Size { .. } => { UnspannedAtomicToken::Number { .. } | UnspannedAtomicToken::Size { .. } => {
let path = atom.span.slice(context.source); let path = atom.span.slice(context.source);
return Ok(hir::Expression::file_path(path, atom.span)); Ok(hir::Expression::file_path(path, atom.span))
} }
_ => return atom.into_hir(context, "file path"), _ => atom.to_hir(context, "file path"),
} }
} }
} }

View file

@ -120,27 +120,21 @@ impl ColorSyntax for ExpressionListShape {
} }
} else { } else {
// Try to color the head of the stream as an expression // Try to color the head of the stream as an expression
match color_fallible_syntax(&AnyExpressionShape, token_nodes, context) { if color_fallible_syntax(&AnyExpressionShape, token_nodes, context).is_err() {
// If no expression was found, switch to backoff coloring mode // If no expression was found, switch to backoff coloring mode
Err(_) => {
backoff = true; backoff = true;
continue; continue;
}
Ok(_) => {}
} }
// If an expression was found, consume a space // If an expression was found, consume a space
match color_fallible_syntax(&SpaceShape, token_nodes, context) { if color_fallible_syntax(&SpaceShape, token_nodes, context).is_err() {
Err(_) => { // If no space was found, we're either at the end or there's an error.
// If no space was found, we're either at the end or there's an error. // Either way, switch to backoff coloring mode. If we're at the end
// Either way, switch to backoff coloring mode. If we're at the end // it won't have any consequences.
// it won't have any consequences. backoff = true;
backoff = true;
}
Ok(_) => {
// Otherwise, move on to the next expression
}
} }
// Otherwise, move on to the next expression
} }
} }
} }

View file

@ -68,9 +68,9 @@ impl ExpandExpression for PatternShape {
| UnspannedAtomicToken::ExternalWord { text: body } | UnspannedAtomicToken::ExternalWord { text: body }
| UnspannedAtomicToken::GlobPattern { pattern: body } => { | UnspannedAtomicToken::GlobPattern { pattern: body } => {
let path = expand_file_path(body.slice(context.source), context); let path = expand_file_path(body.slice(context.source), context);
return Ok(hir::Expression::pattern(path.to_string_lossy(), atom.span)); Ok(hir::Expression::pattern(path.to_string_lossy(), atom.span))
} }
_ => return atom.into_hir(context, "pattern"), _ => atom.to_hir(context, "pattern"),
} }
} }
} }

View file

@ -41,9 +41,8 @@ impl ExpandExpression for VariablePathShape {
let mut tail: Vec<PathMember> = vec![]; let mut tail: Vec<PathMember> = vec![];
loop { loop {
match DotShape.skip(token_nodes, context) { if DotShape.skip(token_nodes, context).is_err() {
Err(_) => break, break;
Ok(_) => {}
} }
let member = expand_syntax(&MemberShape, token_nodes, context)?; let member = expand_syntax(&MemberShape, token_nodes, context)?;
@ -77,17 +76,16 @@ impl FallibleColorSyntax for VariablePathShape {
loop { loop {
// look for a dot at the head of a stream // look for a dot at the head of a stream
let dot = color_fallible_syntax_with( if color_fallible_syntax_with(
&ColorableDotShape, &ColorableDotShape,
&FlatShape::Dot, &FlatShape::Dot,
token_nodes, token_nodes,
context, context,
); )
.is_err()
// if there's no dot, we're done {
match dot { // if there's no dot, we're done
Err(_) => break, break;
Ok(_) => {}
} }
// otherwise, look for a member, and if you don't find one, fail // otherwise, look for a member, and if you don't find one, fail
@ -125,9 +123,8 @@ impl FallibleColorSyntax for PathTailShape {
context, context,
); );
match result { if result.is_err() {
Err(_) => return Ok(()), return Ok(());
Ok(_) => {}
} }
// If we've seen a dot but not a member, fail // If we've seen a dot but not a member, fail
@ -170,9 +167,8 @@ impl ExpandSyntax for PathTailShape {
let mut tail: Vec<PathMember> = vec![]; let mut tail: Vec<PathMember> = vec![];
loop { loop {
match DotShape.skip(token_nodes, context) { if DotShape.skip(token_nodes, context).is_err() {
Err(_) => break, break;
Ok(_) => {}
} }
let member = expand_syntax(&MemberShape, token_nodes, context)?; let member = expand_syntax(&MemberShape, token_nodes, context)?;
@ -649,13 +645,12 @@ impl FallibleColorSyntax for MemberShape {
let bare = let bare =
color_fallible_syntax_with(&BareShape, &FlatShape::BareMember, token_nodes, context); color_fallible_syntax_with(&BareShape, &FlatShape::BareMember, token_nodes, context);
match bare { if bare.is_ok() {
Ok(_) => return Ok(()), return Ok(());
Err(_) => {
// If we don't have a bare word, we'll look for a string
}
} }
// If we don't have a bare word, we'll look for a string
// Look for a string token. If we don't find one, fail // Look for a string token. If we don't find one, fail
color_fallible_syntax_with(&StringShape, &FlatShape::StringMember, token_nodes, context) color_fallible_syntax_with(&StringShape, &FlatShape::StringMember, token_nodes, context)
} }
@ -696,7 +691,7 @@ impl ExpandSyntax for IntMemberShape {
let int = BigInt::from_str(text.slice(context.source)); let int = BigInt::from_str(text.slice(context.source));
match int { match int {
Ok(int) => return Ok(Member::Int(int, text)), Ok(int) => Ok(Member::Int(int, text)),
Err(_) => Err(ParseError::mismatch("integer member", "word".spanned(text))), Err(_) => Err(ParseError::mismatch("integer member", "word".spanned(text))),
} }
} }

View file

@ -35,7 +35,7 @@ pub enum FlatShape {
} }
impl FlatShape { impl FlatShape {
pub fn from(token: &TokenNode, source: &Text, shapes: &mut Vec<Spanned<FlatShape>>) -> () { pub fn from(token: &TokenNode, source: &Text, shapes: &mut Vec<Spanned<FlatShape>>) {
match token { match token {
TokenNode::Token(token) => match token.unspanned { TokenNode::Token(token) => match token.unspanned {
UnspannedToken::Number(RawNumber::Int(_)) => { UnspannedToken::Number(RawNumber::Int(_)) => {
@ -84,7 +84,7 @@ impl FlatShape {
} }
TokenNode::Pipeline(pipeline) => { TokenNode::Pipeline(pipeline) => {
for part in &pipeline.parts { for part in &pipeline.parts {
if let Some(_) = part.pipe { if part.pipe.is_some() {
shapes.push(FlatShape::Pipe.spanned(part.span())); shapes.push(FlatShape::Pipe.spanned(part.span()));
} }
} }

View file

@ -5,7 +5,6 @@ use self::debug::{ColorTracer, ExpandTracer};
use crate::hir::syntax_shape::FlatShape; use crate::hir::syntax_shape::FlatShape;
use crate::hir::Expression; use crate::hir::Expression;
use crate::TokenNode; use crate::TokenNode;
#[allow(unused)]
use getset::{Getters, MutGetters}; use getset::{Getters, MutGetters};
use nu_errors::{ParseError, ShellError}; use nu_errors::{ParseError, ShellError};
use nu_protocol::SpannedTypeName; use nu_protocol::SpannedTypeName;
@ -102,7 +101,7 @@ impl<'content, 'me> Peeked<'content, 'me> {
} }
pub fn type_error(&self, expected: &'static str) -> ParseError { pub fn type_error(&self, expected: &'static str) -> ParseError {
peek_error(&self.node, self.iterator.eof_span(), expected) peek_error(self.node, self.iterator.eof_span(), expected)
} }
} }
@ -130,11 +129,11 @@ impl<'content, 'me> PeekedNode<'content, 'me> {
pub fn rollback(self) {} pub fn rollback(self) {}
pub fn type_error(&self, expected: &'static str) -> ParseError { pub fn type_error(&self, expected: &'static str) -> ParseError {
peek_error(&Some(self.node), self.iterator.eof_span(), expected) peek_error(Some(self.node), self.iterator.eof_span(), expected)
} }
} }
pub fn peek_error(node: &Option<&TokenNode>, eof_span: Span, expected: &'static str) -> ParseError { pub fn peek_error(node: Option<&TokenNode>, eof_span: Span, expected: &'static str) -> ParseError {
match node { match node {
None => ParseError::unexpected_eof(expected, eof_span), None => ParseError::unexpected_eof(expected, eof_span),
Some(node) => ParseError::mismatch(expected, node.spanned_type_name()), Some(node) => ParseError::mismatch(expected, node.spanned_type_name()),
@ -158,7 +157,7 @@ impl<'content> TokensIterator<'content> {
shapes: vec![], shapes: vec![],
}, },
color_tracer: ColorTracer::new(source.clone()), color_tracer: ColorTracer::new(source.clone()),
expand_tracer: ExpandTracer::new(source.clone()), expand_tracer: ExpandTracer::new(source),
} }
} }
@ -174,6 +173,10 @@ impl<'content> TokensIterator<'content> {
self.state.tokens.len() self.state.tokens.len()
} }
pub fn is_empty(&self) -> bool {
self.len() == 0
}
pub fn spanned<T>( pub fn spanned<T>(
&mut self, &mut self,
block: impl FnOnce(&mut TokensIterator<'content>) -> T, block: impl FnOnce(&mut TokensIterator<'content>) -> T,
@ -233,7 +236,7 @@ impl<'content> TokensIterator<'content> {
let mut color_tracer = ColorTracer::new(source.clone()); let mut color_tracer = ColorTracer::new(source.clone());
std::mem::swap(&mut color_tracer, &mut self.color_tracer); std::mem::swap(&mut color_tracer, &mut self.color_tracer);
let mut expand_tracer = ExpandTracer::new(source.clone()); let mut expand_tracer = ExpandTracer::new(source);
std::mem::swap(&mut expand_tracer, &mut self.expand_tracer); std::mem::swap(&mut expand_tracer, &mut self.expand_tracer);
let mut iterator = TokensIterator { let mut iterator = TokensIterator {
@ -409,7 +412,7 @@ impl<'content> TokensIterator<'content> {
let value = block(checkpoint.iterator)?; let value = block(checkpoint.iterator)?;
checkpoint.commit(); checkpoint.commit();
return Ok(value); Ok(value)
} }
/// Use a checkpoint when you need to peek more than one token ahead, but can't be sure /// Use a checkpoint when you need to peek more than one token ahead, but can't be sure
@ -437,7 +440,7 @@ impl<'content> TokensIterator<'content> {
let value = block(checkpoint.iterator)?; let value = block(checkpoint.iterator)?;
checkpoint.commit(); checkpoint.commit();
return Ok(value); Ok(value)
} }
/// Use a checkpoint when you need to peek more than one token ahead, but can't be sure /// Use a checkpoint when you need to peek more than one token ahead, but can't be sure
@ -474,7 +477,7 @@ impl<'content> TokensIterator<'content> {
checkpoint.commit(); checkpoint.commit();
std::mem::swap(&mut self.state.shapes, &mut shapes); std::mem::swap(&mut self.state.shapes, &mut shapes);
return (Ok(value), shapes); (Ok(value), shapes)
} }
fn eof_span(&self) -> Span { fn eof_span(&self) -> Span {
@ -583,12 +586,12 @@ impl<'content> TokensIterator<'content> {
let peeked = peeked.not_eof(expected); let peeked = peeked.not_eof(expected);
match peeked { match peeked {
Err(err) => return Err(err), Err(err) => Err(err),
Ok(peeked) => match block(peeked.node) { Ok(peeked) => match block(peeked.node) {
Err(err) => return Err(err), Err(err) => Err(err),
Ok(val) => { Ok(val) => {
peeked.commit(); peeked.commit();
return Ok(val); Ok(val)
} }
}, },
} }
@ -658,10 +661,7 @@ fn peek<'content, 'me>(
} }
} }
fn peek_pos<'content, 'me>( fn peek_pos(iterator: &TokensIterator<'_>, skip_ws: bool) -> Option<usize> {
iterator: &'me TokensIterator<'content>,
skip_ws: bool,
) -> Option<usize> {
let state = iterator.state(); let state = iterator.state();
let mut to = state.index; let mut to = state.index;

View file

@ -24,13 +24,11 @@ pub(crate) fn debug_tokens(state: &TokensIteratorState, source: &str) -> Vec<Deb
out.push(DebugIteratorToken::Cursor); out.push(DebugIteratorToken::Cursor);
} }
let msg = token.debug(source).to_string();
if state.seen.contains(&i) { if state.seen.contains(&i) {
out.push(DebugIteratorToken::Seen(format!("{}", token.debug(source)))); out.push(DebugIteratorToken::Seen(msg));
} else { } else {
out.push(DebugIteratorToken::Unseen(format!( out.push(DebugIteratorToken::Unseen(msg));
"{}",
token.debug(source)
)));
} }
} }

View file

@ -49,7 +49,7 @@ pub struct ColorFrame {
impl ColorFrame { impl ColorFrame {
fn colored_leaf_description(&self, f: &mut impl io::Write) -> io::Result<()> { fn colored_leaf_description(&self, f: &mut impl io::Write) -> io::Result<()> {
if self.has_only_error_descendents() { if self.has_only_error_descendents() {
if self.children.len() == 0 { if self.children.is_empty() {
write!( write!(
f, f,
"{}", "{}",
@ -109,14 +109,10 @@ impl ColorFrame {
fn any_child_shape(&self, predicate: impl Fn(Spanned<FlatShape>) -> bool) -> bool { fn any_child_shape(&self, predicate: impl Fn(Spanned<FlatShape>) -> bool) -> bool {
for item in &self.children { for item in &self.children {
match item { if let FrameChild::Shape(shape) = item {
FrameChild::Shape(shape) => { if predicate(*shape) {
if predicate(*shape) { return true;
return true;
}
} }
_ => {}
} }
} }
@ -125,14 +121,10 @@ impl ColorFrame {
fn any_child_frame(&self, predicate: impl Fn(&ColorFrame) -> bool) -> bool { fn any_child_frame(&self, predicate: impl Fn(&ColorFrame) -> bool) -> bool {
for item in &self.children { for item in &self.children {
match item { if let FrameChild::Frame(frame) = item {
FrameChild::Frame(frame) => { if predicate(frame) {
if predicate(frame) { return true;
return true;
}
} }
_ => {}
} }
} }
@ -148,7 +140,7 @@ impl ColorFrame {
} }
fn has_only_error_descendents(&self) -> bool { fn has_only_error_descendents(&self) -> bool {
if self.children.len() == 0 { if self.children.is_empty() {
// if this frame has no children at all, it has only error descendents if this frame // if this frame has no children at all, it has only error descendents if this frame
// is an error // is an error
self.error.is_some() self.error.is_some()
@ -259,7 +251,7 @@ impl ColorTracer {
let result = self.frame_stack.pop().expect("Can't pop root tracer frame"); let result = self.frame_stack.pop().expect("Can't pop root tracer frame");
if self.frame_stack.len() == 0 { if self.frame_stack.is_empty() {
panic!("Can't pop root tracer frame {:#?}", self); panic!("Can't pop root tracer frame {:#?}", self);
} }

View file

@ -19,7 +19,7 @@ impl FrameChild {
fn get_error_leaf(&self) -> Option<&'static str> { fn get_error_leaf(&self) -> Option<&'static str> {
match self { match self {
FrameChild::Frame(frame) if frame.error.is_some() => { FrameChild::Frame(frame) if frame.error.is_some() => {
if frame.children.len() == 0 { if frame.children.is_empty() {
Some(frame.description) Some(frame.description)
} else { } else {
None None
@ -33,12 +33,12 @@ impl FrameChild {
match self { match self {
FrameChild::Expr(expr) => TreeChild::OkExpr(expr.clone(), text.clone()), FrameChild::Expr(expr) => TreeChild::OkExpr(expr.clone(), text.clone()),
FrameChild::Result(result) => { FrameChild::Result(result) => {
let result = format!("{}", result.display()); let result = result.display();
TreeChild::OkNonExpr(result) TreeChild::OkNonExpr(result)
} }
FrameChild::Frame(frame) => { FrameChild::Frame(frame) => {
if frame.error.is_some() { if frame.error.is_some() {
if frame.children.len() == 0 { if frame.children.is_empty() {
TreeChild::ErrorLeaf(vec![frame.description]) TreeChild::ErrorLeaf(vec![frame.description])
} else { } else {
TreeChild::ErrorFrame(frame.to_tree_frame(text), text.clone()) TreeChild::ErrorFrame(frame.to_tree_frame(text), text.clone())
@ -67,7 +67,7 @@ impl ExprFrame {
if let Some(error_leaf) = child.get_error_leaf() { if let Some(error_leaf) = child.get_error_leaf() {
errors.push(error_leaf); errors.push(error_leaf);
continue; continue;
} else if errors.len() > 0 { } else if !errors.is_empty() {
children.push(TreeChild::ErrorLeaf(errors)); children.push(TreeChild::ErrorLeaf(errors));
errors = vec![]; errors = vec![];
} }
@ -75,7 +75,7 @@ impl ExprFrame {
children.push(child.to_tree_child(text)); children.push(child.to_tree_child(text));
} }
if errors.len() > 0 { if !errors.is_empty() {
children.push(TreeChild::ErrorLeaf(errors)); children.push(TreeChild::ErrorLeaf(errors));
} }
@ -115,22 +115,20 @@ impl TreeFrame {
write!(f, " -> ")?; write!(f, " -> ")?;
self.children[0].leaf_description(f) self.children[0].leaf_description(f)
} else { } else if self.error.is_some() {
if self.error.is_some() { if self.children.is_empty() {
if self.children.len() == 0 { write!(
write!( f,
f, "{}",
"{}", Color::White.bold().on(Color::Red).paint(self.description)
Color::White.bold().on(Color::Red).paint(self.description) )
)
} else {
write!(f, "{}", Color::Red.normal().paint(self.description))
}
} else if self.has_descendent_green() {
write!(f, "{}", Color::Green.normal().paint(self.description))
} else { } else {
write!(f, "{}", Color::Yellow.bold().paint(self.description)) write!(f, "{}", Color::Red.normal().paint(self.description))
} }
} else if self.has_descendent_green() {
write!(f, "{}", Color::Green.normal().paint(self.description))
} else {
write!(f, "{}", Color::Yellow.bold().paint(self.description))
} }
} }
@ -143,14 +141,10 @@ impl TreeFrame {
fn any_child_frame(&self, predicate: impl Fn(&TreeFrame) -> bool) -> bool { fn any_child_frame(&self, predicate: impl Fn(&TreeFrame) -> bool) -> bool {
for item in &self.children { for item in &self.children {
match item { if let TreeChild::OkFrame(frame, ..) = item {
TreeChild::OkFrame(frame, ..) => { if predicate(frame) {
if predicate(frame) { return true;
return true;
}
} }
_ => {}
} }
} }
@ -209,7 +203,7 @@ impl TreeChild {
Color::White Color::White
.bold() .bold()
.on(Color::Green) .on(Color::Green)
.paint(format!("{}", result)) .paint(result.to_string())
), ),
TreeChild::ErrorLeaf(desc) => { TreeChild::ErrorLeaf(desc) => {
@ -260,12 +254,7 @@ pub struct ExpandTracer {
impl ExpandTracer { impl ExpandTracer {
pub fn print(&self, source: Text) -> PrintTracer { pub fn print(&self, source: Text) -> PrintTracer {
let root = self let root = self.frame_stack.get(0).unwrap().to_tree_frame(&source);
.frame_stack
.iter()
.nth(0)
.unwrap()
.to_tree_frame(&source);
PrintTracer { root, source } PrintTracer { root, source }
} }
@ -292,7 +281,7 @@ impl ExpandTracer {
fn pop_frame(&mut self) -> ExprFrame { fn pop_frame(&mut self) -> ExprFrame {
let result = self.frame_stack.pop().expect("Can't pop root tracer frame"); let result = self.frame_stack.pop().expect("Can't pop root tracer frame");
if self.frame_stack.len() == 0 { if self.frame_stack.is_empty() {
panic!("Can't pop root tracer frame"); panic!("Can't pop root tracer frame");
} }

View file

@ -1,3 +1,5 @@
#![allow(clippy::large_enum_variant, clippy::type_complexity)]
pub mod commands; pub mod commands;
pub mod hir; pub mod hir;
pub mod parse; pub mod parse;

View file

@ -30,7 +30,7 @@ impl PrettyDebugWithSource for CallNode {
impl CallNode { impl CallNode {
pub fn new(head: Box<TokenNode>, children: Vec<TokenNode>) -> CallNode { pub fn new(head: Box<TokenNode>, children: Vec<TokenNode>) -> CallNode {
if children.len() == 0 { if children.is_empty() {
CallNode { CallNode {
head, head,
children: None, children: None,

View file

@ -26,7 +26,7 @@ impl language_reporting::ReportingFiles for Files {
} }
fn file_name(&self, _file: Self::FileId) -> FileName { fn file_name(&self, _file: Self::FileId) -> FileName {
FileName::Verbatim(format!("shell")) FileName::Verbatim("shell".to_string())
} }
fn byte_index(&self, _file: Self::FileId, _line: usize, _column: usize) -> Option<usize> { fn byte_index(&self, _file: Self::FileId, _line: usize, _column: usize) -> Option<usize> {
@ -143,9 +143,7 @@ impl language_reporting::ReportingFiles for Files {
fn source(&self, span: Self::Span) -> Option<String> { fn source(&self, span: Self::Span) -> Option<String> {
trace!("source(tag={:?}) snippet={:?}", span, self.snippet); trace!("source(tag={:?}) snippet={:?}", span, self.snippet);
if span.start() > span.end() { if span.start() > span.end() || span.end() > self.snippet.len() {
return None;
} else if span.end() > self.snippet.len() {
return None; return None;
} }
Some(span.slice(&self.snippet).to_string()) Some(span.slice(&self.snippet).to_string())

View file

@ -22,12 +22,12 @@ impl PrettyDebug for CompareOperator {
} }
impl CompareOperator { impl CompareOperator {
pub fn print(&self) -> String { pub fn print(self) -> String {
self.as_str().to_string() self.as_str().to_string()
} }
pub fn as_str(&self) -> &str { pub fn as_str(self) -> &'static str {
match *self { match self {
CompareOperator::Equal => "==", CompareOperator::Equal => "==",
CompareOperator::NotEqual => "!=", CompareOperator::NotEqual => "!=",
CompareOperator::LessThan => "<", CompareOperator::LessThan => "<",
@ -76,12 +76,12 @@ impl PrettyDebug for EvaluationOperator {
} }
impl EvaluationOperator { impl EvaluationOperator {
pub fn print(&self) -> String { pub fn print(self) -> String {
self.as_str().to_string() self.as_str().to_string()
} }
pub fn as_str(&self) -> &str { pub fn as_str(self) -> &'static str {
match *self { match self {
EvaluationOperator::Dot => ".", EvaluationOperator::Dot => ".",
EvaluationOperator::DotDot => "..", EvaluationOperator::DotDot => "..",
} }

View file

@ -233,13 +233,9 @@ pub fn raw_number(input: NomSpan) -> IResult<NomSpan, RawNumber> {
let dotdot_result = dotdot(input); let dotdot_result = dotdot(input);
match dotdot_result { if let Ok((dotdot_input, _)) = dotdot_result {
// If we see a `..` immediately after an integer, it's a range, not a decimal // If we see a `..` immediately after an integer, it's a range, not a decimal
Ok((dotdot_input, _)) => { return Ok((input, RawNumber::int(Span::new(start, input.offset))));
return Ok((input, RawNumber::int(Span::new(start, input.offset))))
}
Err(_) => {}
} }
let dot: IResult<NomSpan, NomSpan, (NomSpan, nom::error::ErrorKind)> = tag(".")(input); let dot: IResult<NomSpan, NomSpan, (NomSpan, nom::error::ErrorKind)> = tag(".")(input);
@ -434,10 +430,8 @@ enum SawSpecial {
fn start_file_char(input: NomSpan) -> IResult<NomSpan, BitFlags<SawSpecial>> { fn start_file_char(input: NomSpan) -> IResult<NomSpan, BitFlags<SawSpecial>> {
let path_sep_result = special_file_char(input); let path_sep_result = special_file_char(input);
match path_sep_result { if let Ok((input, special)) = path_sep_result {
Ok((input, special)) => return Ok((input, special)), return Ok((input, special));
Err(_) => {}
} }
start_filename(input).map(|(input, output)| (input, BitFlags::empty())) start_filename(input).map(|(input, output)| (input, BitFlags::empty()))
@ -447,9 +441,8 @@ fn start_file_char(input: NomSpan) -> IResult<NomSpan, BitFlags<SawSpecial>> {
fn continue_file_char(input: NomSpan) -> IResult<NomSpan, BitFlags<SawSpecial>> { fn continue_file_char(input: NomSpan) -> IResult<NomSpan, BitFlags<SawSpecial>> {
let path_sep_result = special_file_char(input); let path_sep_result = special_file_char(input);
match path_sep_result { if let Ok((input, special)) = path_sep_result {
Ok((input, special)) => return Ok((input, special)), return Ok((input, special));
Err(_) => {}
} }
matches(is_file_char)(input).map(|(input, _)| (input, BitFlags::empty())) matches(is_file_char)(input).map(|(input, _)| (input, BitFlags::empty()))
@ -457,9 +450,8 @@ fn continue_file_char(input: NomSpan) -> IResult<NomSpan, BitFlags<SawSpecial>>
#[tracable_parser] #[tracable_parser]
fn special_file_char(input: NomSpan) -> IResult<NomSpan, BitFlags<SawSpecial>> { fn special_file_char(input: NomSpan) -> IResult<NomSpan, BitFlags<SawSpecial>> {
match matches(is_path_separator)(input) { if let Ok((input, _)) = matches(is_path_separator)(input) {
Ok((input, _)) => return Ok((input, BitFlags::empty() | SawSpecial::PathSeparator)), return Ok((input, BitFlags::empty() | SawSpecial::PathSeparator));
Err(_) => {}
} }
let (input, _) = matches(is_glob_specific_char)(input)?; let (input, _) = matches(is_glob_specific_char)(input)?;
@ -667,9 +659,13 @@ pub fn spaced_token_list(input: NomSpan) -> IResult<NomSpan, Spanned<Vec<TokenNo
let mut out = vec![]; let mut out = vec![];
pre_ws.map(|pre_ws| out.extend(pre_ws)); if let Some(pre_ws) = pre_ws {
out.extend(pre_ws)
}
out.extend(items.item); out.extend(items.item);
post_ws.map(|post_ws| out.extend(post_ws)); if let Some(post_ws) = post_ws {
out.extend(post_ws)
}
Ok((input, out.spanned(Span::new(start, end)))) Ok((input, out.spanned(Span::new(start, end))))
} }
@ -917,7 +913,7 @@ fn parse_int<T>(frag: &str, neg: Option<T>) -> i64 {
match neg { match neg {
None => int, None => int,
Some(_) => int * -1, Some(_) => -int,
} }
} }
@ -1104,42 +1100,63 @@ mod tests {
} }
#[test] #[test]
fn test_operator() { fn test_gt_operator() {
equal_tokens! { equal_tokens! {
<nodes> <nodes>
">" -> b::token_list(vec![b::op(">")]) ">" -> b::token_list(vec![b::op(">")])
} }
}
#[test]
fn test_gte_operator() {
equal_tokens! { equal_tokens! {
<nodes> <nodes>
">=" -> b::token_list(vec![b::op(">=")]) ">=" -> b::token_list(vec![b::op(">=")])
} }
}
#[test]
fn test_lt_operator() {
equal_tokens! { equal_tokens! {
<nodes> <nodes>
"<" -> b::token_list(vec![b::op("<")]) "<" -> b::token_list(vec![b::op("<")])
} }
}
#[test]
fn test_lte_operator() {
equal_tokens! { equal_tokens! {
<nodes> <nodes>
"<=" -> b::token_list(vec![b::op("<=")]) "<=" -> b::token_list(vec![b::op("<=")])
} }
}
#[test]
fn test_eq_operator() {
equal_tokens! { equal_tokens! {
<nodes> <nodes>
"==" -> b::token_list(vec![b::op("==")]) "==" -> b::token_list(vec![b::op("==")])
} }
}
#[test]
fn test_ne_operator() {
equal_tokens! { equal_tokens! {
<nodes> <nodes>
"!=" -> b::token_list(vec![b::op("!=")]) "!=" -> b::token_list(vec![b::op("!=")])
} }
}
#[test]
fn test_sim_operator() {
equal_tokens! { equal_tokens! {
<nodes> <nodes>
"=~" -> b::token_list(vec![b::op("=~")]) "=~" -> b::token_list(vec![b::op("=~")])
} }
}
#[test]
fn test_nsim_operator() {
equal_tokens! { equal_tokens! {
<nodes> <nodes>
"!~" -> b::token_list(vec![b::op("!~")]) "!~" -> b::token_list(vec![b::op("!~")])
@ -1396,37 +1413,58 @@ mod tests {
<nodes> <nodes>
"git add ." -> b::token_list(vec![b::bare("git"), b::sp(), b::bare("add"), b::sp(), b::bare(".")]) "git add ." -> b::token_list(vec![b::bare("git"), b::sp(), b::bare("add"), b::sp(), b::bare(".")])
} }
}
#[test]
fn test_smoke_single_command_open() {
equal_tokens! { equal_tokens! {
<nodes> <nodes>
"open Cargo.toml" -> b::token_list(vec![b::bare("open"), b::sp(), b::bare("Cargo"), b::dot(), b::bare("toml")]) "open Cargo.toml" -> b::token_list(vec![b::bare("open"), b::sp(), b::bare("Cargo"), b::dot(), b::bare("toml")])
} }
}
#[test]
fn test_smoke_single_command_select() {
equal_tokens! { equal_tokens! {
<nodes> <nodes>
"select package.version" -> b::token_list(vec![b::bare("select"), b::sp(), b::bare("package"), b::dot(), b::bare("version")]) "select package.version" -> b::token_list(vec![b::bare("select"), b::sp(), b::bare("package"), b::dot(), b::bare("version")])
} }
}
#[test]
fn test_smoke_single_command_it() {
equal_tokens! { equal_tokens! {
<nodes> <nodes>
"echo $it" -> b::token_list(vec![b::bare("echo"), b::sp(), b::var("it")]) "echo $it" -> b::token_list(vec![b::bare("echo"), b::sp(), b::var("it")])
} }
}
#[test]
fn test_smoke_single_command_open_raw() {
equal_tokens! { equal_tokens! {
<nodes> <nodes>
"open Cargo.toml --raw" -> b::token_list(vec![b::bare("open"), b::sp(), b::bare("Cargo"), b::dot(), b::bare("toml"), b::sp(), b::flag("raw")]) "open Cargo.toml --raw" -> b::token_list(vec![b::bare("open"), b::sp(), b::bare("Cargo"), b::dot(), b::bare("toml"), b::sp(), b::flag("raw")])
} }
}
#[test]
fn test_smoke_single_command_open_r() {
equal_tokens! { equal_tokens! {
<nodes> <nodes>
"open Cargo.toml -r" -> b::token_list(vec![b::bare("open"), b::sp(), b::bare("Cargo"), b::dot(), b::bare("toml"), b::sp(), b::shorthand("r")]) "open Cargo.toml -r" -> b::token_list(vec![b::bare("open"), b::sp(), b::bare("Cargo"), b::dot(), b::bare("toml"), b::sp(), b::shorthand("r")])
} }
}
#[test]
fn test_smoke_single_command_config() {
equal_tokens! { equal_tokens! {
<nodes> <nodes>
"config --set tabs 2" -> b::token_list(vec![b::bare("config"), b::sp(), b::flag("set"), b::sp(), b::bare("tabs"), b::sp(), b::int(2)]) "config --set tabs 2" -> b::token_list(vec![b::bare("config"), b::sp(), b::flag("set"), b::sp(), b::bare("tabs"), b::sp(), b::int(2)])
} }
}
#[test]
fn test_smoke_single_command_inc() {
equal_tokens! { equal_tokens! {
<nodes> <nodes>
"inc --patch package.version" -> b::token_list( "inc --patch package.version" -> b::token_list(

View file

@ -349,7 +349,7 @@ pub enum Delimiter {
} }
impl Delimiter { impl Delimiter {
pub(crate) fn open(&self) -> &'static str { pub(crate) fn open(self) -> &'static str {
match self { match self {
Delimiter::Paren => "(", Delimiter::Paren => "(",
Delimiter::Brace => "{", Delimiter::Brace => "{",
@ -357,7 +357,7 @@ impl Delimiter {
} }
} }
pub(crate) fn close(&self) -> &'static str { pub(crate) fn close(self) -> &'static str {
match self { match self {
Delimiter::Paren => ")", Delimiter::Paren => ")",
Delimiter::Brace => "}", Delimiter::Brace => "}",

View file

@ -9,17 +9,15 @@ use bigdecimal::BigDecimal;
use nu_source::{Span, Spanned, SpannedItem}; use nu_source::{Span, Spanned, SpannedItem};
use num_bigint::BigInt; use num_bigint::BigInt;
#[derive(Default)]
pub struct TokenTreeBuilder { pub struct TokenTreeBuilder {
pos: usize, pos: usize,
output: String, output: String,
} }
impl TokenTreeBuilder { impl TokenTreeBuilder {
pub fn new() -> TokenTreeBuilder { pub fn new() -> Self {
TokenTreeBuilder { Default::default()
pos: 0,
output: String::new(),
}
} }
} }
@ -319,7 +317,7 @@ impl TokenTreeBuilder {
} }
pub fn spanned_call(input: Vec<TokenNode>, span: impl Into<Span>) -> Spanned<CallNode> { pub fn spanned_call(input: Vec<TokenNode>, span: impl Into<Span>) -> Spanned<CallNode> {
if input.len() == 0 { if input.is_empty() {
panic!("BUG: spanned call (TODO)") panic!("BUG: spanned call (TODO)")
} }

View file

@ -40,8 +40,8 @@ fn convert_number_to_u64(number: &Number) -> u64 {
} }
impl Unit { impl Unit {
pub fn as_str(&self) -> &str { pub fn as_str(self) -> &'static str {
match *self { match self {
Unit::Byte => "B", Unit::Byte => "B",
Unit::Kilobyte => "KB", Unit::Kilobyte => "KB",
Unit::Megabyte => "MB", Unit::Megabyte => "MB",
@ -58,10 +58,10 @@ impl Unit {
} }
} }
pub fn compute(&self, size: &Number) -> UntaggedValue { pub fn compute(self, size: &Number) -> UntaggedValue {
let size = size.clone(); let size = size.clone();
match &self { match self {
Unit::Byte => number(size), Unit::Byte => number(size),
Unit::Kilobyte => number(size * 1024), Unit::Kilobyte => number(size * 1024),
Unit::Megabyte => number(size * 1024 * 1024), Unit::Megabyte => number(size * 1024 * 1024),

View file

@ -134,7 +134,7 @@ pub fn parse_command_tail(
trace!(target: "nu::parse", "Constructed positional={:?} named={:?}", positional, named); trace!(target: "nu::parse", "Constructed positional={:?} named={:?}", positional, named);
let positional = if positional.len() == 0 { let positional = if positional.is_empty() {
None None
} else { } else {
Some(positional) Some(positional)
@ -204,7 +204,7 @@ impl ColorSyntax for CommandTailShape {
fn insert_flag( fn insert_flag(
token_nodes: &mut TokensIterator, token_nodes: &mut TokensIterator,
syntax_type: &SyntaxShape, syntax_type: SyntaxShape,
args: &mut ColoringArgs, args: &mut ColoringArgs,
flag: Flag, flag: Flag,
pos: usize, pos: usize,
@ -226,7 +226,7 @@ impl ColorSyntax for CommandTailShape {
// If the part after a mandatory flag isn't present, that's ok, but we // If the part after a mandatory flag isn't present, that's ok, but we
// should roll back any whitespace we chomped // should roll back any whitespace we chomped
color_fallible_syntax(syntax_type, token_nodes, context)?; color_fallible_syntax(&syntax_type, token_nodes, context)?;
Ok(()) Ok(())
}); });
@ -243,9 +243,10 @@ impl ColorSyntax for CommandTailShape {
match &kind.0 { match &kind.0 {
NamedType::Switch => { NamedType::Switch => {
match token_nodes.extract(|t| t.as_flag(name, context.source())) { if let Some((pos, flag)) =
Some((pos, flag)) => args.insert(pos, vec![flag.color()]), token_nodes.extract(|t| t.as_flag(name, context.source()))
None => {} {
args.insert(pos, vec![flag.color()])
} }
} }
NamedType::Mandatory(syntax_type) => { NamedType::Mandatory(syntax_type) => {
@ -260,7 +261,7 @@ impl ColorSyntax for CommandTailShape {
// The mandatory flag didn't exist at all, so there's nothing to color // The mandatory flag didn't exist at all, so there's nothing to color
} }
Ok((pos, flag)) => { Ok((pos, flag)) => {
insert_flag(token_nodes, syntax_type, &mut args, flag, pos, context) insert_flag(token_nodes, *syntax_type, &mut args, flag, pos, context)
} }
} }
} }
@ -270,7 +271,7 @@ impl ColorSyntax for CommandTailShape {
// The optional flag didn't exist at all, so there's nothing to color // The optional flag didn't exist at all, so there's nothing to color
} }
Ok(Some((pos, flag))) => { Ok(Some((pos, flag))) => {
insert_flag(token_nodes, syntax_type, &mut args, flag, pos, context) insert_flag(token_nodes, *syntax_type, &mut args, flag, pos, context)
} }
Ok(None) => { Ok(None) => {

View file

@ -24,9 +24,9 @@ pub trait Plugin {
} }
pub fn serve_plugin(plugin: &mut dyn Plugin) { pub fn serve_plugin(plugin: &mut dyn Plugin) {
let args = std::env::args(); let mut args = std::env::args();
if args.len() > 1 { if args.len() > 1 {
let input = args.skip(1).next(); let input = args.nth(1);
let input = match input { let input = match input {
Some(arg) => std::fs::read_to_string(arg), Some(arg) => std::fs::read_to_string(arg),

View file

@ -13,7 +13,7 @@ pub struct PluginTest<'a, T: Plugin> {
impl<'a, T: Plugin> PluginTest<'a, T> { impl<'a, T: Plugin> PluginTest<'a, T> {
pub fn for_plugin(plugin: &'a mut T) -> Self { pub fn for_plugin(plugin: &'a mut T) -> Self {
PluginTest { PluginTest {
plugin: plugin, plugin,
call_info: CallStub::new().create(), call_info: CallStub::new().create(),
input: UntaggedValue::nothing().into_value(Tag::unknown()), input: UntaggedValue::nothing().into_value(Tag::unknown()),
} }
@ -99,17 +99,16 @@ impl<'a, T: Plugin> PluginTest<'a, T> {
pub fn plugin<T: Plugin>(plugin: &mut T) -> PluginTest<T> { pub fn plugin<T: Plugin>(plugin: &mut T) -> PluginTest<T> {
PluginTest::for_plugin(plugin) PluginTest::for_plugin(plugin)
} }
#[derive(Default)]
pub struct CallStub { pub struct CallStub {
positionals: Vec<Value>, positionals: Vec<Value>,
flags: IndexMap<String, Value>, flags: IndexMap<String, Value>,
} }
impl CallStub { impl CallStub {
pub fn new() -> CallStub { pub fn new() -> Self {
CallStub { Default::default()
positionals: vec![],
flags: indexmap::IndexMap::new(),
}
} }
pub fn with_named_parameter(&mut self, name: &str, value: Value) -> &mut Self { pub fn with_named_parameter(&mut self, name: &str, value: Value) -> &mut Self {
@ -127,7 +126,7 @@ impl CallStub {
pub fn with_parameter(&mut self, name: &str) -> &mut Self { pub fn with_parameter(&mut self, name: &str) -> &mut Self {
let fields: Vec<Value> = name let fields: Vec<Value> = name
.split(".") .split('.')
.map(|s| UntaggedValue::string(s.to_string()).into_value(Tag::unknown())) .map(|s| UntaggedValue::string(s.to_string()).into_value(Tag::unknown()))
.collect(); .collect();
@ -188,7 +187,7 @@ pub mod value {
pub fn structured_sample_record(key: &str, value: &str) -> Value { pub fn structured_sample_record(key: &str, value: &str) -> Value {
let mut record = TaggedDictBuilder::new(Tag::unknown()); let mut record = TaggedDictBuilder::new(Tag::unknown());
record.insert_untagged(key.clone(), UntaggedValue::string(value)); record.insert_untagged(key, UntaggedValue::string(value));
record.into_value() record.into_value()
} }
@ -196,16 +195,13 @@ pub mod value {
UntaggedValue::string(value).into_value(Tag::unknown()) UntaggedValue::string(value).into_value(Tag::unknown())
} }
pub fn table(list: &Vec<Value>) -> Value { pub fn table(list: &[Value]) -> Value {
UntaggedValue::table(list).into_untagged_value() UntaggedValue::table(list).into_untagged_value()
} }
pub fn column_path(paths: &Vec<Value>) -> Value { pub fn column_path(paths: &[Value]) -> Value {
UntaggedValue::Primitive(Primitive::ColumnPath( UntaggedValue::Primitive(Primitive::ColumnPath(
table(&paths.iter().cloned().collect()) table(&paths.to_vec()).as_column_path().unwrap().item,
.as_column_path()
.unwrap()
.item,
)) ))
.into_untagged_value() .into_untagged_value()
} }

View file

@ -30,14 +30,14 @@ impl EvaluatedArgs {
pub fn nth(&self, pos: usize) -> Option<&Value> { pub fn nth(&self, pos: usize) -> Option<&Value> {
match &self.positional { match &self.positional {
None => None, None => None,
Some(array) => array.iter().nth(pos), Some(array) => array.get(pos),
} }
} }
pub fn expect_nth(&self, pos: usize) -> Result<&Value, ShellError> { pub fn expect_nth(&self, pos: usize) -> Result<&Value, ShellError> {
match &self.positional { match &self.positional {
None => Err(ShellError::unimplemented("Better error: expect_nth")), None => Err(ShellError::unimplemented("Better error: expect_nth")),
Some(array) => match array.iter().nth(pos) { Some(array) => match array.get(pos) {
None => Err(ShellError::unimplemented("Better error: expect_nth")), None => Err(ShellError::unimplemented("Better error: expect_nth")),
Some(item) => Ok(item), Some(item) => Ok(item),
}, },
@ -51,6 +51,10 @@ impl EvaluatedArgs {
} }
} }
pub fn is_empty(&self) -> bool {
self.len() == 0
}
pub fn has(&self, name: &str) -> bool { pub fn has(&self, name: &str) -> bool {
match &self.named { match &self.named {
None => false, None => false,

View file

@ -1,3 +1,5 @@
#![allow(clippy::should_implement_trait)]
#[derive(Debug)] #[derive(Debug)]
pub enum MaybeOwned<'a, T> { pub enum MaybeOwned<'a, T> {
Owned(T), Owned(T),

View file

@ -278,7 +278,7 @@ impl<'a> PrettyDebug for DebugEntry<'a> {
fn pretty(&self) -> DebugDocBuilder { fn pretty(&self) -> DebugDocBuilder {
(b::key(match self.key { (b::key(match self.key {
Column::String(string) => string.clone(), Column::String(string) => string.clone(),
Column::Value => format!("<value>"), Column::Value => "<value>".to_string(),
}) + b::delimit("(", self.value.pretty(), ")").into_kind()) }) + b::delimit("(", self.value.pretty(), ")").into_kind())
} }
} }
@ -346,12 +346,12 @@ where
None => { None => {
self.values.insert(key, { self.values.insert(key, {
let mut group = G::new(); let mut group = G::new();
group.merge(value.into()); group.merge(value);
group group
}); });
} }
Some(group) => { Some(group) => {
group.merge(value.into()); group.merge(value);
} }
} }
} }

View file

@ -46,12 +46,7 @@ impl UntaggedValue {
pub fn data_descriptors(&self) -> Vec<String> { pub fn data_descriptors(&self) -> Vec<String> {
match self { match self {
UntaggedValue::Primitive(_) => vec![], UntaggedValue::Primitive(_) => vec![],
UntaggedValue::Row(columns) => columns UntaggedValue::Row(columns) => columns.entries.keys().map(|x| x.to_string()).collect(),
.entries
.keys()
.into_iter()
.map(|x| x.to_string())
.collect(),
UntaggedValue::Block(_) => vec![], UntaggedValue::Block(_) => vec![],
UntaggedValue::Table(_) => vec![], UntaggedValue::Table(_) => vec![],
UntaggedValue::Error(_) => vec![], UntaggedValue::Error(_) => vec![],
@ -116,7 +111,7 @@ impl UntaggedValue {
UntaggedValue::Row(entries.into()) UntaggedValue::Row(entries.into())
} }
pub fn table(list: &Vec<Value>) -> UntaggedValue { pub fn table(list: &[Value]) -> UntaggedValue {
UntaggedValue::Table(list.to_vec()) UntaggedValue::Table(list.to_vec())
} }
@ -227,9 +222,7 @@ impl Value {
pub fn as_path(&self) -> Result<PathBuf, ShellError> { pub fn as_path(&self) -> Result<PathBuf, ShellError> {
match &self.value { match &self.value {
UntaggedValue::Primitive(Primitive::Path(path)) => Ok(path.clone()), UntaggedValue::Primitive(Primitive::Path(path)) => Ok(path.clone()),
UntaggedValue::Primitive(Primitive::String(path_str)) => { UntaggedValue::Primitive(Primitive::String(path_str)) => Ok(PathBuf::from(&path_str)),
Ok(PathBuf::from(&path_str).clone())
}
_ => Err(ShellError::type_error("Path", self.spanned_type_name())), _ => Err(ShellError::type_error("Path", self.spanned_type_name())),
} }
} }

View file

@ -69,7 +69,7 @@ impl PrettyDebug for ColumnPath {
impl HasFallibleSpan for ColumnPath { impl HasFallibleSpan for ColumnPath {
fn maybe_span(&self) -> Option<Span> { fn maybe_span(&self) -> Option<Span> {
if self.members.len() == 0 { if self.members.is_empty() {
None None
} else { } else {
Some(span_for_spanned_list(self.members.iter().map(|m| m.span))) Some(span_for_spanned_list(self.members.iter().map(|m| m.span)))
@ -98,7 +98,7 @@ pub fn did_you_mean(obj_source: &Value, field_tried: &PathMember) -> Option<Vec<
let mut possible_matches: Vec<_> = possibilities let mut possible_matches: Vec<_> = possibilities
.into_iter() .into_iter()
.map(|x| { .map(|x| {
let word = x.clone(); let word = x;
let distance = natural::distance::levenshtein_distance(&word, &field_tried); let distance = natural::distance::levenshtein_distance(&word, &field_tried);
(distance, word) (distance, word)

View file

@ -15,6 +15,7 @@ pub struct Dictionary {
pub entries: IndexMap<String, Value>, pub entries: IndexMap<String, Value>,
} }
#[allow(clippy::derive_hash_xor_eq)]
impl Hash for Dictionary { impl Hash for Dictionary {
fn hash<H: Hasher>(&self, state: &mut H) { fn hash<H: Hasher>(&self, state: &mut H) {
let mut entries = self.entries.clone(); let mut entries = self.entries.clone();
@ -105,7 +106,7 @@ impl From<IndexMap<String, Value>> for Dictionary {
} }
impl Dictionary { impl Dictionary {
pub fn get_data(&self, desc: &String) -> MaybeOwned<'_, Value> { pub fn get_data(&self, desc: &str) -> MaybeOwned<'_, Value> {
match self.entries.get(desc) { match self.entries.get(desc) {
Some(v) => MaybeOwned::Borrowed(v), Some(v) => MaybeOwned::Borrowed(v),
None => MaybeOwned::Owned( None => MaybeOwned::Owned(

View file

@ -108,7 +108,7 @@ pub fn format_primitive(primitive: &Primitive, field_name: Option<&String>) -> S
match byte.get_unit() { match byte.get_unit() {
byte_unit::ByteUnit::B => format!("{} B ", byte.get_value()), byte_unit::ByteUnit::B => format!("{} B ", byte.get_value()),
_ => byte.format(1).to_string(), _ => byte.format(1),
} }
} }
Primitive::Duration(sec) => format_duration(*sec), Primitive::Duration(sec) => format_duration(*sec),
@ -150,7 +150,7 @@ pub fn format_primitive(primitive: &Primitive, field_name: Option<&String>) -> S
} }
.to_owned(), .to_owned(),
Primitive::Binary(_) => "<binary>".to_owned(), Primitive::Binary(_) => "<binary>".to_owned(),
Primitive::Date(d) => d.humanize().to_string(), Primitive::Date(d) => d.humanize(),
} }
} }

View file

@ -10,14 +10,14 @@ pub enum RangeInclusion {
} }
impl RangeInclusion { impl RangeInclusion {
pub fn debug_left_bracket(&self) -> DebugDocBuilder { pub fn debug_left_bracket(self) -> DebugDocBuilder {
b::delimiter(match self { b::delimiter(match self {
RangeInclusion::Exclusive => "(", RangeInclusion::Exclusive => "(",
RangeInclusion::Inclusive => "[", RangeInclusion::Inclusive => "[",
}) })
} }
pub fn debug_right_bracket(&self) -> DebugDocBuilder { pub fn debug_right_bracket(self) -> DebugDocBuilder {
b::delimiter(match self { b::delimiter(match self {
RangeInclusion::Exclusive => ")", RangeInclusion::Exclusive => ")",
RangeInclusion::Inclusive => "]", RangeInclusion::Inclusive => "]",

View file

@ -9,7 +9,7 @@ where
serde::Serialize::serialize( serde::Serialize::serialize(
&big_decimal &big_decimal
.to_f64() .to_f64()
.ok_or(serde::ser::Error::custom("expected a f64-sized bignum"))?, .ok_or_else(|| serde::ser::Error::custom("expected a f64-sized bignum"))?,
serializer, serializer,
) )
} }
@ -20,5 +20,5 @@ where
{ {
let x: f64 = serde::Deserialize::deserialize(deserializer)?; let x: f64 = serde::Deserialize::deserialize(deserializer)?;
Ok(BigDecimal::from_f64(x) Ok(BigDecimal::from_f64(x)
.ok_or(serde::de::Error::custom("expected a f64-sized bigdecimal"))?) .ok_or_else(|| serde::de::Error::custom("expected a f64-sized bigdecimal"))?)
} }

View file

@ -9,7 +9,7 @@ where
serde::Serialize::serialize( serde::Serialize::serialize(
&big_int &big_int
.to_i64() .to_i64()
.ok_or(serde::ser::Error::custom("expected a i64-sized bignum"))?, .ok_or_else(|| serde::ser::Error::custom("expected a i64-sized bignum"))?,
serializer, serializer,
) )
} }
@ -19,5 +19,6 @@ where
D: serde::Deserializer<'de>, D: serde::Deserializer<'de>,
{ {
let x: i64 = serde::Deserialize::deserialize(deserializer)?; let x: i64 = serde::Deserialize::deserialize(deserializer)?;
Ok(BigInt::from_i64(x).ok_or(serde::de::Error::custom("expected a i64-sized bignum"))?) Ok(BigInt::from_i64(x)
.ok_or_else(|| serde::de::Error::custom("expected a i64-sized bignum"))?)
} }

View file

@ -488,6 +488,7 @@ fn hash_doc<H: std::hash::Hasher>(doc: &PrettyDebugDoc, state: &mut H) {
} }
} }
#[allow(clippy::derive_hash_xor_eq)]
impl std::hash::Hash for DebugDoc { impl std::hash::Hash for DebugDoc {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) { fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
hash_doc(&self.inner, state); hash_doc(&self.inner, state);

View file

@ -133,7 +133,7 @@ impl DisplayPath for str {
impl DisplayPath for &str { impl DisplayPath for &str {
fn display_path(&self) -> String { fn display_path(&self) -> String {
self.to_string() (*self).to_string()
} }
} }
@ -145,7 +145,7 @@ impl DisplayPath for String {
impl DisplayPath for &String { impl DisplayPath for &String {
fn display_path(&self) -> String { fn display_path(&self) -> String {
self.to_string() (*self).to_string()
} }
} }
pub enum Stub<'a> { pub enum Stub<'a> {
@ -196,7 +196,7 @@ pub fn create_file_at(full_path: impl AsRef<Path>) -> Result<(), std::io::Error>
panic!(format!("{:?} exists", parent.display())); panic!(format!("{:?} exists", parent.display()));
} }
std::fs::write(full_path, "fake data".as_bytes()) std::fs::write(full_path, b"fake data")
} }
pub fn copy_file_to(source: &str, destination: &str) { pub fn copy_file_to(source: &str, destination: &str) {

View file

@ -34,7 +34,7 @@ macro_rules! nu {
.spawn() .spawn()
{ {
Ok(child) => child, Ok(child) => child,
Err(why) => panic!("Can't run test {}", why.description()), Err(why) => panic!("Can't run test {}", why.to_string()),
}; };
let stdin = process.stdin.as_mut().expect("couldn't open stdin"); let stdin = process.stdin.as_mut().expect("couldn't open stdin");

View file

@ -22,7 +22,7 @@ pub struct Dirs {
impl Dirs { impl Dirs {
pub fn formats(&self) -> PathBuf { pub fn formats(&self) -> PathBuf {
PathBuf::from(self.fixtures.join("formats")) self.fixtures.join("formats")
} }
} }
@ -47,7 +47,7 @@ impl Playground {
std::fs::create_dir(PathBuf::from(&nuplay_dir)).expect("can not create directory"); std::fs::create_dir(PathBuf::from(&nuplay_dir)).expect("can not create directory");
let mut playground = Playground { let mut playground = Playground {
root: root, root,
tests: topic.to_string(), tests: topic.to_string(),
cwd: nuplay_dir, cwd: nuplay_dir,
}; };
@ -63,21 +63,29 @@ impl Playground {
.expect("Couldn't find the fixtures directory") .expect("Couldn't find the fixtures directory")
.join("tests/fixtures"); .join("tests/fixtures");
let fixtures = dunce::canonicalize(fixtures.clone()).expect(&format!( let fixtures = dunce::canonicalize(fixtures.clone()).unwrap_or_else(|e| {
"Couldn't canonicalize fixtures path {}", panic!(
fixtures.display() "Couldn't canonicalize fixtures path {}: {:?}",
)); fixtures.display(),
e
)
});
let test = let test = dunce::canonicalize(playground_root.join(topic)).unwrap_or_else(|e| {
dunce::canonicalize(PathBuf::from(playground_root.join(topic))).expect(&format!( panic!(
"Couldn't canonicalize test path {}", "Couldn't canonicalize test path {}: {:?}",
playground_root.join(topic).display() playground_root.join(topic).display(),
)); e
)
});
let root = dunce::canonicalize(playground_root).expect(&format!( let root = dunce::canonicalize(playground_root).unwrap_or_else(|e| {
"Couldn't canonicalize tests root path {}", panic!(
playground_root.display() "Couldn't canonicalize tests root path {}: {:?}",
)); playground_root.display(),
e
)
});
let dirs = Dirs { let dirs = Dirs {
root, root,
@ -119,8 +127,7 @@ impl Playground {
path.push(file_name); path.push(file_name);
std::fs::write(PathBuf::from(path), contents.as_bytes()) std::fs::write(path, contents.as_bytes()).expect("can not create file");
.expect("can not create file");
}) })
.for_each(drop); .for_each(drop);
self.back_to_playground(); self.back_to_playground();
@ -136,17 +143,14 @@ impl Playground {
pub fn glob_vec(pattern: &str) -> Vec<PathBuf> { pub fn glob_vec(pattern: &str) -> Vec<PathBuf> {
let glob = glob(pattern); let glob = glob(pattern);
match glob { glob.expect("invalid pattern")
Ok(paths) => paths .map(|path| {
.map(|path| { if let Ok(path) = path {
if let Ok(path) = path { path
path } else {
} else { unreachable!()
unreachable!() }
} })
}) .collect()
.collect(),
Err(_) => panic!("Invalid pattern."),
}
} }
} }

View file

@ -9,7 +9,7 @@ use num_traits::cast::ToPrimitive;
pub trait ValueExt { pub trait ValueExt {
fn into_parts(self) -> (UntaggedValue, Tag); fn into_parts(self) -> (UntaggedValue, Tag);
fn get_data(&self, desc: &String) -> MaybeOwned<'_, Value>; fn get_data(&self, desc: &str) -> MaybeOwned<'_, Value>;
fn get_data_by_key(&self, name: Spanned<&str>) -> Option<Value>; fn get_data_by_key(&self, name: Spanned<&str>) -> Option<Value>;
fn get_data_by_member(&self, name: &PathMember) -> Result<Value, ShellError>; fn get_data_by_member(&self, name: &PathMember) -> Result<Value, ShellError>;
fn get_data_by_column_path( fn get_data_by_column_path(
@ -43,7 +43,7 @@ impl ValueExt for Value {
(self.value, self.tag) (self.value, self.tag)
} }
fn get_data(&self, desc: &String) -> MaybeOwned<'_, Value> { fn get_data(&self, desc: &str) -> MaybeOwned<'_, Value> {
get_data(self, desc) get_data(self, desc)
} }
@ -163,14 +163,9 @@ pub fn get_data_by_member(value: &Value, name: &PathMember) -> Result<Value, She
) )
})?; })?;
match get_data_by_index(value, index.spanned(value.tag.span)) { get_data_by_index(value, index.spanned(value.tag.span)).ok_or_else(|| {
Some(v) => Ok(v.clone()), ShellError::range_error(0..(l.len()), &int.spanned(name.span), "indexing")
None => Err(ShellError::range_error( })
0..(l.len()),
&int.spanned(name.span),
"indexing",
)),
}
} }
} }
} }
@ -193,7 +188,7 @@ pub fn get_data_by_column_path(
match value { match value {
Ok(v) => current = v.clone(), Ok(v) => current = v.clone(),
Err(e) => return Err(callback((&current.clone(), &p.clone(), e))), Err(e) => return Err(callback((&current, &p.clone(), e))),
} }
} }
@ -210,10 +205,9 @@ pub fn insert_data_at_path(value: &Value, path: &str, new_value: Value) -> Optio
if split_path.len() == 1 { if split_path.len() == 1 {
// Special case for inserting at the top level // Special case for inserting at the top level
current.entries.insert( current
path.to_string(), .entries
new_value.value.clone().into_value(&value.tag), .insert(path.to_string(), new_value.value.into_value(&value.tag));
);
return Some(new_obj); return Some(new_obj);
} }
@ -275,7 +269,7 @@ pub fn insert_data_at_member(
) )
})?; })?;
insert_data_at_index(array, int.tagged(member.span), new_value.clone())?; insert_data_at_index(array, int.tagged(member.span), new_value)?;
Ok(()) Ok(())
} }
}, },
@ -428,7 +422,7 @@ fn insert_data_at_index(
} }
} }
pub fn get_data<'value>(value: &'value Value, desc: &String) -> MaybeOwned<'value, Value> { pub fn get_data<'value>(value: &'value Value, desc: &str) -> MaybeOwned<'value, Value> {
match &value.value { match &value.value {
UntaggedValue::Primitive(_) => MaybeOwned::Borrowed(value), UntaggedValue::Primitive(_) => MaybeOwned::Borrowed(value),
UntaggedValue::Row(o) => o.get_data(desc), UntaggedValue::Row(o) => o.get_data(desc),

View file

@ -23,11 +23,8 @@ impl Plugin for BinaryView {
fn sink(&mut self, call_info: CallInfo, input: Vec<Value>) { fn sink(&mut self, call_info: CallInfo, input: Vec<Value>) {
for v in input { for v in input {
let value_anchor = v.anchor(); let value_anchor = v.anchor();
match &v.value { if let UntaggedValue::Primitive(Primitive::Binary(b)) = &v.value {
UntaggedValue::Primitive(Primitive::Binary(b)) => { let _ = view_binary(&b, value_anchor.as_ref(), call_info.args.has("lores"));
let _ = view_binary(&b, value_anchor.as_ref(), call_info.args.has("lores"));
}
_ => {}
} }
} }
} }
@ -39,12 +36,9 @@ fn view_binary(
lores_mode: bool, lores_mode: bool,
) -> Result<(), Box<dyn std::error::Error>> { ) -> Result<(), Box<dyn std::error::Error>> {
if b.len() > 3 { if b.len() > 3 {
match (b[0], b[1], b[2]) { if let (0x4e, 0x45, 0x53) = (b[0], b[1], b[2]) {
(0x4e, 0x45, 0x53) => { view_contents_interactive(b, source, lores_mode)?;
view_contents_interactive(b, source, lores_mode)?; return Ok(());
return Ok(());
}
_ => {}
} }
} }
view_contents(b, source, lores_mode)?; view_contents(b, source, lores_mode)?;
@ -155,16 +149,13 @@ impl RenderContext {
} }
} }
if prev_count > 0 { if prev_count > 0 {
match (prev_fg, prev_bg) { if let (Some(c), Some(d)) = (prev_fg, prev_bg) {
(Some(c), Some(d)) => { print!(
print!( "{}",
"{}", ansi_term::Colour::RGB(c.0, c.1, c.2)
ansi_term::Colour::RGB(c.0, c.1, c.2) .on(ansi_term::Colour::RGB(d.0, d.1, d.2,))
.on(ansi_term::Colour::RGB(d.0, d.1, d.2,)) .paint((0..prev_count).map(|_| "").collect::<String>())
.paint((0..prev_count).map(|_| "").collect::<String>()) );
);
}
_ => {}
} }
} }
outln!("{}", Attribute::Reset); outln!("{}", Attribute::Reset);
@ -407,11 +398,11 @@ pub fn view_contents_interactive(
if rawkey.is_pressed(rawkey::KeyCode::Escape) { if rawkey.is_pressed(rawkey::KeyCode::Escape) {
break 'gameloop; break 'gameloop;
} else { } else {
for i in 0..buttons.len() { for (idx, button) in buttons.iter().enumerate() {
if rawkey.is_pressed(buttons[i]) { if rawkey.is_pressed(*button) {
nes.press_button(0, i as u8); nes.press_button(0, idx as u8);
} else { } else {
nes.release_button(0, i as u8); nes.release_button(0, idx as u8);
} }
} }
} }

View file

@ -100,18 +100,18 @@ async fn fetch_helper(path: &Value, has_raw: bool, row: Value) -> ReturnValue {
} else { } else {
// If the extension could not be determined via mimetype, try to use the path // If the extension could not be determined via mimetype, try to use the path
// extension. Some file types do not declare their mimetypes (such as bson files). // extension. Some file types do not declare their mimetypes (such as bson files).
file_extension.or(path_str.split('.').last().map(String::from)) file_extension.or_else(|| path_str.split('.').last().map(String::from))
}; };
let tagged_contents = contents.retag(&contents_tag); let tagged_contents = contents.retag(&contents_tag);
if let Some(extension) = file_extension { if let Some(extension) = file_extension {
return Ok(ReturnSuccess::Action(CommandAction::AutoConvert( Ok(ReturnSuccess::Action(CommandAction::AutoConvert(
tagged_contents, tagged_contents,
extension, extension,
))); )))
} else { } else {
return ReturnSuccess::value(tagged_contents); ReturnSuccess::value(tagged_contents)
} }
} }

View file

@ -16,6 +16,7 @@ pub enum SemVerAction {
Patch, Patch,
} }
#[derive(Default)]
pub struct Inc { pub struct Inc {
pub field: Option<Tagged<ColumnPath>>, pub field: Option<Tagged<ColumnPath>>,
pub error: Option<String>, pub error: Option<String>,
@ -23,12 +24,8 @@ pub struct Inc {
} }
impl Inc { impl Inc {
pub fn new() -> Inc { pub fn new() -> Self {
Inc { Default::default()
field: None,
error: None,
action: None,
}
} }
fn apply(&self, input: &str) -> Result<UntaggedValue, ShellError> { fn apply(&self, input: &str) -> Result<UntaggedValue, ShellError> {
@ -122,12 +119,11 @@ impl Inc {
); );
let got = replace_for?; let got = replace_for?;
let replacement = self.inc(got.clone())?; let replacement = self.inc(got)?;
match value.replace_data_at_column_path( match value
&f, .replace_data_at_column_path(&f, replacement.value.into_untagged_value())
replacement.value.clone().into_untagged_value(), {
) {
Some(v) => Ok(v), Some(v) => Ok(v),
None => Err(ShellError::labeled_error( None => Err(ShellError::labeled_error(
"inc could not find field to replace", "inc could not find field to replace",

View file

@ -56,7 +56,7 @@ mod integration {
plugin(&mut Inc::new()) plugin(&mut Inc::new())
.args(CallStub::new().with_parameter("package.version").create()) .args(CallStub::new().with_parameter("package.version").create())
.setup(|plugin, _| { .setup(|plugin, _| {
plugin.expect_field(column_path(&vec![string("package"), string("version")])) plugin.expect_field(column_path(&[string("package"), string("version")]))
}); });
} }

View file

@ -12,6 +12,7 @@ struct Match {
} }
impl Match { impl Match {
#[allow(clippy::trivial_regex)]
fn new() -> Self { fn new() -> Self {
Match { Match {
column: String::new(), column: String::new(),

View file

@ -61,15 +61,12 @@ impl Post {
file => Some(file.clone()), file => Some(file.clone()),
}; };
self.user = call_info self.user = call_info.args.get("user").map(|x| x.as_string().unwrap());
.args
.get("user")
.map(|x| x.as_string().unwrap().to_string());
self.password = call_info self.password = call_info
.args .args
.get("password") .get("password")
.map(|x| x.as_string().unwrap().to_string()); .map(|x| x.as_string().unwrap());
self.headers = get_headers(&call_info)?; self.headers = get_headers(&call_info)?;
@ -166,18 +163,18 @@ async fn post_helper(
} else { } else {
// If the extension could not be determined via mimetype, try to use the path // If the extension could not be determined via mimetype, try to use the path
// extension. Some file types do not declare their mimetypes (such as bson files). // extension. Some file types do not declare their mimetypes (such as bson files).
file_extension.or(path_str.split('.').last().map(String::from)) file_extension.or_else(|| path_str.split('.').last().map(String::from))
}; };
let tagged_contents = contents.into_value(&contents_tag); let tagged_contents = contents.into_value(&contents_tag);
if let Some(extension) = file_extension { if let Some(extension) = file_extension {
return Ok(ReturnSuccess::Action(CommandAction::AutoConvert( Ok(ReturnSuccess::Action(CommandAction::AutoConvert(
tagged_contents, tagged_contents,
extension, extension,
))); )))
} else { } else {
return ReturnSuccess::value(tagged_contents); ReturnSuccess::value(tagged_contents)
} }
} }
@ -468,7 +465,7 @@ pub fn value_to_json_value(v: &Value) -> Result<serde_json::Value, ShellError> {
}) })
} }
fn json_list(input: &Vec<Value>) -> Result<Vec<serde_json::Value>, ShellError> { fn json_list(input: &[Value]) -> Result<Vec<serde_json::Value>, ShellError> {
let mut out = vec![]; let mut out = vec![];
for value in input { for value in input {
@ -482,20 +479,22 @@ fn get_headers(call_info: &CallInfo) -> Result<Vec<HeaderKind>, ShellError> {
let mut headers = vec![]; let mut headers = vec![];
match extract_header_value(&call_info, "content-type") { match extract_header_value(&call_info, "content-type") {
Ok(h) => match h { Ok(h) => {
Some(ct) => headers.push(HeaderKind::ContentType(ct)), if let Some(ct) = h {
None => {} headers.push(HeaderKind::ContentType(ct))
}, }
}
Err(e) => { Err(e) => {
return Err(e); return Err(e);
} }
}; };
match extract_header_value(&call_info, "content-length") { match extract_header_value(&call_info, "content-length") {
Ok(h) => match h { Ok(h) => {
Some(cl) => headers.push(HeaderKind::ContentLength(cl)), if let Some(cl) = h {
None => {} headers.push(HeaderKind::ContentLength(cl))
}, }
}
Err(e) => { Err(e) => {
return Err(e); return Err(e);
} }

View file

@ -81,8 +81,8 @@ impl Plugin for Str {
}) = args.get("find-replace") }) = args.get("find-replace")
{ {
self.for_replace(ReplaceAction::FindAndReplace( self.for_replace(ReplaceAction::FindAndReplace(
arguments.get(0).unwrap().as_string()?.to_string(), arguments.get(0).unwrap().as_string()?,
arguments.get(1).unwrap().as_string()?.to_string(), arguments.get(1).unwrap().as_string()?,
)); ));
} }
} }

View file

@ -72,7 +72,7 @@ mod integration {
CallStub::new() CallStub::new()
.with_named_parameter( .with_named_parameter(
"find-replace", "find-replace",
table(&vec![string(&search_argument), string(&replace_argument)]), table(&[string(&search_argument), string(&replace_argument)]),
) )
.create(), .create(),
) )
@ -91,7 +91,7 @@ mod integration {
.create(), .create(),
) )
.setup(|plugin, _| { .setup(|plugin, _| {
plugin.expect_field(column_path(&vec![string("package"), string("description")])) plugin.expect_field(column_path(&[string("package"), string("description")]))
}); });
} }
@ -194,7 +194,7 @@ mod integration {
.with_parameter("staff") .with_parameter("staff")
.with_named_parameter( .with_named_parameter(
"find-replace", "find-replace",
table(&vec![string("kittens"), string("jotandrehuda")]), table(&[string("kittens"), string("jotandrehuda")]),
) )
.create(), .create(),
) )
@ -353,7 +353,7 @@ mod integration {
CallStub::new() CallStub::new()
.with_named_parameter( .with_named_parameter(
"find-replace", "find-replace",
table(&vec![string("kittens"), string("jotandrehuda")]), table(&[string("kittens"), string("jotandrehuda")]),
) )
.create(), .create(),
) )

View file

@ -20,6 +20,7 @@ pub enum ReplaceAction {
FindAndReplace(String, String), FindAndReplace(String, String),
} }
#[derive(Default)]
pub struct Str { pub struct Str {
pub field: Option<Tagged<ColumnPath>>, pub field: Option<Tagged<ColumnPath>>,
pub error: Option<String>, pub error: Option<String>,
@ -27,12 +28,8 @@ pub struct Str {
} }
impl Str { impl Str {
pub fn new() -> Str { pub fn new() -> Self {
Str { Default::default()
field: None,
error: None,
action: None,
}
} }
fn apply(&self, input: &str) -> Result<UntaggedValue, ShellError> { fn apply(&self, input: &str) -> Result<UntaggedValue, ShellError> {
@ -174,12 +171,11 @@ impl Str {
); );
let got = replace_for?; let got = replace_for?;
let replacement = self.strutils(got.clone())?; let replacement = self.strutils(got)?;
match value.replace_data_at_column_path( match value
&f, .replace_data_at_column_path(&f, replacement.value.into_untagged_value())
replacement.value.clone().into_untagged_value(), {
) {
Some(v) => Ok(v), Some(v) => Ok(v),
None => Err(ShellError::labeled_error( None => Err(ShellError::labeled_error(
"str could not find field to replace", "str could not find field to replace",

View file

@ -38,7 +38,7 @@ impl Plugin for TextView {
} }
fn paint_textview( fn paint_textview(
draw_commands: &Vec<DrawCommand>, draw_commands: &[DrawCommand],
starting_row: usize, starting_row: usize,
use_color_buffer: bool, use_color_buffer: bool,
) -> usize { ) -> usize {
@ -148,8 +148,8 @@ fn scroll_view_lines_if_needed(draw_commands: Vec<DrawCommand>, use_color_buffer
loop { loop {
if let Some(ev) = sync_stdin.next() { if let Some(ev) = sync_stdin.next() {
match ev { if let InputEvent::Keyboard(k) = ev {
InputEvent::Keyboard(k) => match k { match k {
KeyEvent::Esc => { KeyEvent::Esc => {
break; break;
} }
@ -187,8 +187,7 @@ fn scroll_view_lines_if_needed(draw_commands: Vec<DrawCommand>, use_color_buffer
paint_textview(&draw_commands, starting_row, use_color_buffer); paint_textview(&draw_commands, starting_row, use_color_buffer);
} }
_ => {} _ => {}
}, }
_ => {}
} }
} }
@ -220,71 +219,67 @@ fn scroll_view(s: &str) {
fn view_text_value(value: &Value) { fn view_text_value(value: &Value) {
let value_anchor = value.anchor(); let value_anchor = value.anchor();
match &value.value { if let UntaggedValue::Primitive(Primitive::String(ref s)) = &value.value {
UntaggedValue::Primitive(Primitive::String(ref s)) => { if let Some(source) = value_anchor {
if let Some(source) = value_anchor { let extension: Option<String> = match source {
let extension: Option<String> = match source { AnchorLocation::File(file) => {
AnchorLocation::File(file) => { let path = Path::new(&file);
let path = Path::new(&file); path.extension().map(|x| x.to_string_lossy().to_string())
path.extension().map(|x| x.to_string_lossy().to_string()) }
} AnchorLocation::Url(url) => {
AnchorLocation::Url(url) => { let url = url::Url::parse(&url);
let url = url::Url::parse(&url); if let Ok(url) = url {
if let Ok(url) = url { if let Some(mut segments) = url.path_segments() {
let url = url.clone(); if let Some(file) = segments.next_back() {
if let Some(mut segments) = url.path_segments() { let path = Path::new(file);
if let Some(file) = segments.next_back() { path.extension().map(|x| x.to_string_lossy().to_string())
let path = Path::new(file);
path.extension().map(|x| x.to_string_lossy().to_string())
} else {
None
}
} else { } else {
None None
} }
} else { } else {
None None
} }
} else {
None
} }
//FIXME: this probably isn't correct }
AnchorLocation::Source(_source) => None, //FIXME: this probably isn't correct
}; AnchorLocation::Source(_source) => None,
};
match extension { match extension {
Some(extension) => { Some(extension) => {
// Load these once at the start of your program // Load these once at the start of your program
let ps: SyntaxSet = let ps: SyntaxSet =
syntect::dumps::from_binary(include_bytes!("assets/syntaxes.bin")); syntect::dumps::from_binary(include_bytes!("assets/syntaxes.bin"));
if let Some(syntax) = ps.find_syntax_by_extension(&extension) { if let Some(syntax) = ps.find_syntax_by_extension(&extension) {
let ts: ThemeSet = let ts: ThemeSet =
syntect::dumps::from_binary(include_bytes!("assets/themes.bin")); syntect::dumps::from_binary(include_bytes!("assets/themes.bin"));
let mut h = HighlightLines::new(syntax, &ts.themes["OneHalfDark"]); let mut h = HighlightLines::new(syntax, &ts.themes["OneHalfDark"]);
let mut v = vec![]; let mut v = vec![];
for line in s.lines() { for line in s.lines() {
let ranges: Vec<(Style, &str)> = h.highlight(line, &ps); let ranges: Vec<(Style, &str)> = h.highlight(line, &ps);
for range in ranges { for range in ranges {
v.push(DrawCommand::DrawString(range.0, range.1.to_string())); v.push(DrawCommand::DrawString(range.0, range.1.to_string()));
}
v.push(DrawCommand::NextLine);
} }
scroll_view_lines_if_needed(v, true);
} else { v.push(DrawCommand::NextLine);
scroll_view(s);
} }
} scroll_view_lines_if_needed(v, true);
_ => { } else {
scroll_view(s); scroll_view(s);
} }
} }
} else { _ => {
scroll_view(s); scroll_view(s);
}
} }
} else {
scroll_view(s);
} }
_ => {}
} }
} }

View file

@ -87,7 +87,7 @@ impl Plugin for TreeViewer {
} }
fn sink(&mut self, _call_info: CallInfo, input: Vec<Value>) { fn sink(&mut self, _call_info: CallInfo, input: Vec<Value>) {
if input.len() > 0 { if !input.is_empty() {
for i in input.iter() { for i in input.iter() {
let view = TreeView::from_value(&i); let view = TreeView::from_value(&i);
let _ = view.render_view(); let _ = view.render_view();

View file

@ -642,7 +642,7 @@ pub fn classify_pipeline(
} }
pub fn print_err(err: ShellError, host: &dyn Host, source: &Text) { pub fn print_err(err: ShellError, host: &dyn Host, source: &Text) {
let diag = err.to_diagnostic(); let diag = err.into_diagnostic();
let writer = host.err_termcolor(); let writer = host.err_termcolor();
let mut source = source.to_string(); let mut source = source.to_string();

View file

@ -116,7 +116,7 @@ pub(crate) use echo::Echo;
pub(crate) use edit::Edit; pub(crate) use edit::Edit;
pub(crate) use enter::Enter; pub(crate) use enter::Enter;
pub(crate) use env::Env; pub(crate) use env::Env;
#[allow(unused)] #[allow(unused_imports)]
pub(crate) use evaluate_by::EvaluateBy; pub(crate) use evaluate_by::EvaluateBy;
pub(crate) use exit::Exit; pub(crate) use exit::Exit;
pub(crate) use first::First; pub(crate) use first::First;
@ -144,7 +144,7 @@ pub(crate) use insert::Insert;
pub(crate) use last::Last; pub(crate) use last::Last;
pub(crate) use lines::Lines; pub(crate) use lines::Lines;
pub(crate) use ls::Ls; pub(crate) use ls::Ls;
#[allow(unused)] #[allow(unused_imports)]
pub(crate) use map_max_by::MapMaxBy; pub(crate) use map_max_by::MapMaxBy;
pub(crate) use mkdir::Mkdir; pub(crate) use mkdir::Mkdir;
pub(crate) use mv::Move; pub(crate) use mv::Move;
@ -158,7 +158,7 @@ pub(crate) use prepend::Prepend;
pub(crate) use prev::Previous; pub(crate) use prev::Previous;
pub(crate) use pwd::Pwd; pub(crate) use pwd::Pwd;
pub(crate) use range::Range; pub(crate) use range::Range;
#[allow(unused)] #[allow(unused_imports)]
pub(crate) use reduce_by::ReduceBy; pub(crate) use reduce_by::ReduceBy;
pub(crate) use reject::Reject; pub(crate) use reject::Reject;
pub(crate) use reverse::Reverse; pub(crate) use reverse::Reverse;
@ -172,7 +172,7 @@ pub(crate) use sort_by::SortBy;
pub(crate) use split_by::SplitBy; pub(crate) use split_by::SplitBy;
pub(crate) use split_column::SplitColumn; pub(crate) use split_column::SplitColumn;
pub(crate) use split_row::SplitRow; pub(crate) use split_row::SplitRow;
#[allow(unused)] #[allow(unused_imports)]
pub(crate) use t_sort_by::TSortBy; pub(crate) use t_sort_by::TSortBy;
pub(crate) use table::Table; pub(crate) use table::Table;
pub(crate) use tags::Tags; pub(crate) use tags::Tags;

View file

@ -58,7 +58,7 @@ pub mod clipboard {
let mut clip_context: ClipboardContext = ClipboardProvider::new().unwrap(); let mut clip_context: ClipboardContext = ClipboardProvider::new().unwrap();
let mut new_copy_data = String::new(); let mut new_copy_data = String::new();
if input.len() > 0 { if !input.is_empty() {
let mut first = true; let mut first = true;
for i in input.iter() { for i in input.iter() {
if !first { if !first {

View file

@ -192,7 +192,7 @@ impl CommandArgs {
let (input, args) = args.split(); let (input, args) = args.split();
let name_tag = args.call_info.name_tag; let name_tag = args.call_info.name_tag;
let mut deserializer = ConfigDeserializer::from_call_info(call_info.clone()); let mut deserializer = ConfigDeserializer::from_call_info(call_info);
Ok(RunnableRawArgs { Ok(RunnableRawArgs {
args: T::deserialize(&mut deserializer)?, args: T::deserialize(&mut deserializer)?,
@ -556,7 +556,6 @@ impl WholeStreamCommand for FnFilterCommand {
} = args; } = args;
let host: Arc<Mutex<dyn Host>> = host.clone(); let host: Arc<Mutex<dyn Host>> = host.clone();
let shell_manager = shell_manager.clone();
let registry: CommandRegistry = registry.clone(); let registry: CommandRegistry = registry.clone();
let func = self.func; let func = self.func;

View file

@ -111,7 +111,7 @@ pub fn evaluate(
} => { } => {
let data: Vec<_> = data let data: Vec<_> = data
.into_iter() .into_iter()
.map(|x| evaluate_with(x.clone(), tag.clone()).unwrap()) .map(|x| evaluate_with(x, tag.clone()).unwrap())
.collect(); .collect();
UntaggedValue::Table(data).into_value(&tag) UntaggedValue::Table(data).into_value(&tag)
} }
@ -124,7 +124,7 @@ pub fn evaluate(
}) })
.collect(); .collect();
UntaggedValue::Table(datasets.clone()).into_value(&tag) UntaggedValue::Table(datasets).into_value(&tag)
} }
_ => UntaggedValue::Table(vec![]).into_value(&tag), _ => UntaggedValue::Table(vec![]).into_value(&tag),
}; };
@ -156,7 +156,7 @@ mod tests {
UntaggedValue::row(entries).into_untagged_value() UntaggedValue::row(entries).into_untagged_value()
} }
fn table(list: &Vec<Value>) -> Value { fn table(list: &[Value]) -> Value {
UntaggedValue::table(list).into_untagged_value() UntaggedValue::table(list).into_untagged_value()
} }
@ -233,10 +233,10 @@ mod tests {
fn evaluates_the_tables() { fn evaluates_the_tables() {
assert_eq!( assert_eq!(
evaluate(&nu_releases_sorted_by_date(), None, Tag::unknown()).unwrap(), evaluate(&nu_releases_sorted_by_date(), None, Tag::unknown()).unwrap(),
table(&vec![table(&vec![ table(&[table(&[
table(&vec![int(1), int(1), int(1)]), table(&[int(1), int(1), int(1)]),
table(&vec![int(1), int(1), int(1)]), table(&[int(1), int(1), int(1)]),
table(&vec![int(1), int(1), int(1)]), table(&[int(1), int(1), int(1)]),
]),]) ]),])
); );
} }
@ -247,10 +247,10 @@ mod tests {
assert_eq!( assert_eq!(
evaluate(&nu_releases_sorted_by_date(), Some(eval), Tag::unknown()).unwrap(), evaluate(&nu_releases_sorted_by_date(), Some(eval), Tag::unknown()).unwrap(),
table(&vec![table(&vec![ table(&[table(&[
table(&vec![string("AR"), string("JT"), string("YK")]), table(&[string("AR"), string("JT"), string("YK")]),
table(&vec![string("AR"), string("YK"), string("JT")]), table(&[string("AR"), string("YK"), string("JT")]),
table(&vec![string("YK"), string("JT"), string("AR")]), table(&[string("YK"), string("JT"), string("AR")]),
]),]) ]),])
); );
} }

View file

@ -55,13 +55,10 @@ impl PerItemCommand for Format {
output.push_str(s); output.push_str(s);
} }
FormatCommand::Column(c) => { FormatCommand::Column(c) => {
match dict.entries.get(c) { if let Some(c) = dict.entries.get(c) {
Some(c) => output output.push_str(&value::format_leaf(c.borrow()).plain_string(100_000))
.push_str(&value::format_leaf(c.borrow()).plain_string(100_000)),
None => {
// This column doesn't match, so don't emit anything
}
} }
// That column doesn't match, so don't emit anything
} }
} }
} }

View file

@ -102,7 +102,7 @@ fn convert_bson_value_to_nu_value(v: &Bson, tag: impl Into<Tag>) -> Result<Value
); );
collected.insert_value( collected.insert_value(
"$scope".to_string(), "$scope".to_string(),
convert_bson_value_to_nu_value(&Bson::Document(doc.to_owned()), tag.clone())?, convert_bson_value_to_nu_value(&Bson::Document(doc.to_owned()), tag)?,
); );
collected.into_value() collected.into_value()
} }

View file

@ -183,7 +183,7 @@ fn parse_separated_columns<'a>(
let headers = (1..=num_columns) let headers = (1..=num_columns)
.map(|i| format!("Column{}", i)) .map(|i| format!("Column{}", i))
.collect::<Vec<String>>(); .collect::<Vec<String>>();
collect(headers, ls.iter().map(|s| s.as_ref()), separator) collect(headers, ls.into_iter(), separator)
}; };
match headers { match headers {

View file

@ -52,7 +52,7 @@ fn from_node_to_value<'a, 'd>(n: &roxmltree::Node<'a, 'd>, tag: impl Into<Tag>)
.collect(); .collect();
let mut collected = TaggedDictBuilder::new(tag); let mut collected = TaggedDictBuilder::new(tag);
collected.insert_untagged(name.clone(), UntaggedValue::Table(children_values)); collected.insert_untagged(name, UntaggedValue::Table(children_values));
collected.into_value() collected.into_value()
} else if n.is_comment() { } else if n.is_comment() {
@ -145,7 +145,7 @@ mod tests {
UntaggedValue::row(entries).into_untagged_value() UntaggedValue::row(entries).into_untagged_value()
} }
fn table(list: &Vec<Value>) -> Value { fn table(list: &[Value]) -> Value {
UntaggedValue::table(list).into_untagged_value() UntaggedValue::table(list).into_untagged_value()
} }
@ -160,7 +160,7 @@ mod tests {
assert_eq!( assert_eq!(
parse(source), parse(source),
row(indexmap! { row(indexmap! {
"nu".into() => table(&vec![]) "nu".into() => table(&[])
}) })
); );
} }
@ -172,7 +172,7 @@ mod tests {
assert_eq!( assert_eq!(
parse(source), parse(source),
row(indexmap! { row(indexmap! {
"nu".into() => table(&vec![string("La era de los tres caballeros")]) "nu".into() => table(&[string("La era de los tres caballeros")])
}) })
); );
} }
@ -189,10 +189,10 @@ mod tests {
assert_eq!( assert_eq!(
parse(source), parse(source),
row(indexmap! { row(indexmap! {
"nu".into() => table(&vec![ "nu".into() => table(&[
row(indexmap! {"dev".into() => table(&vec![string("Andrés")])}), row(indexmap! {"dev".into() => table(&[string("Andrés")])}),
row(indexmap! {"dev".into() => table(&vec![string("Jonathan")])}), row(indexmap! {"dev".into() => table(&[string("Jonathan")])}),
row(indexmap! {"dev".into() => table(&vec![string("Yehuda")])}) row(indexmap! {"dev".into() => table(&[string("Yehuda")])})
]) ])
}) })
); );

View file

@ -128,7 +128,7 @@ mod tests {
UntaggedValue::row(entries).into_untagged_value() UntaggedValue::row(entries).into_untagged_value()
} }
fn table(list: &Vec<Value>) -> Value { fn table(list: &[Value]) -> Value {
UntaggedValue::table(list).into_untagged_value() UntaggedValue::table(list).into_untagged_value()
} }
@ -171,17 +171,17 @@ mod tests {
assert_eq!( assert_eq!(
group(&for_key, nu_releases_commiters(), Tag::unknown()).unwrap(), group(&for_key, nu_releases_commiters(), Tag::unknown()).unwrap(),
row(indexmap! { row(indexmap! {
"August 23-2019".into() => table(&vec![ "August 23-2019".into() => table(&[
row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("August 23-2019")}), row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("August 23-2019")}),
row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("August 23-2019")}), row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("August 23-2019")}),
row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("August 23-2019")}) row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("August 23-2019")})
]), ]),
"October 10-2019".into() => table(&vec![ "October 10-2019".into() => table(&[
row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("October 10-2019")}), row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("October 10-2019")}),
row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("October 10-2019")}), row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("October 10-2019")}),
row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("October 10-2019")}) row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("October 10-2019")})
]), ]),
"Sept 24-2019".into() => table(&vec![ "Sept 24-2019".into() => table(&[
row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("Sept 24-2019")}), row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("Sept 24-2019")}),
row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("Sept 24-2019")}), row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("Sept 24-2019")}),
row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("Sept 24-2019")}) row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("Sept 24-2019")})
@ -197,17 +197,17 @@ mod tests {
assert_eq!( assert_eq!(
group(&for_key, nu_releases_commiters(), Tag::unknown()).unwrap(), group(&for_key, nu_releases_commiters(), Tag::unknown()).unwrap(),
row(indexmap! { row(indexmap! {
"EC".into() => table(&vec![ "EC".into() => table(&[
row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("August 23-2019")}), row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("August 23-2019")}),
row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("Sept 24-2019")}), row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("Sept 24-2019")}),
row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("October 10-2019")}) row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("October 10-2019")})
]), ]),
"NZ".into() => table(&vec![ "NZ".into() => table(&[
row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("August 23-2019")}), row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("August 23-2019")}),
row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("October 10-2019")}), row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("October 10-2019")}),
row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("Sept 24-2019")}) row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("Sept 24-2019")})
]), ]),
"US".into() => table(&vec![ "US".into() => table(&[
row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("October 10-2019")}), row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("October 10-2019")}),
row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("Sept 24-2019")}), row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("Sept 24-2019")}),
row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("August 23-2019")}), row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("August 23-2019")}),

View file

@ -114,7 +114,7 @@ macro_rules! command {
$($extract)* { $($extract)* {
use std::convert::TryInto; use std::convert::TryInto;
$args.get(stringify!($param_name)).clone().try_into()? $args.get(stringify!($param_name)).try_into()?
} }
} }
); );
@ -164,7 +164,7 @@ macro_rules! command {
$($extract)* { $($extract)* {
use std::convert::TryInto; use std::convert::TryInto;
$args.get(stringify!($param_name)).clone().try_into()? $args.get(stringify!($param_name)).try_into()?
} }
} }
); );
@ -214,7 +214,7 @@ macro_rules! command {
$($extract)* { $($extract)* {
use std::convert::TryInto; use std::convert::TryInto;
$args.get(stringify!($param_name)).clone().try_into()? $args.get(stringify!($param_name)).try_into()?
} }
} }
); );

View file

@ -65,7 +65,7 @@ fn nth(
.iter() .iter()
.any(|requested| requested.item == idx as u64) .any(|requested| requested.item == idx as u64)
{ {
result.push_back(ReturnSuccess::value(item.clone())); result.push_back(ReturnSuccess::value(item));
} }
result result

View file

@ -41,13 +41,13 @@ impl WholeStreamCommand for Range {
fn range( fn range(
RangeArgs { area }: RangeArgs, RangeArgs { area }: RangeArgs,
RunnableContext { input, name: _, .. }: RunnableContext, RunnableContext { input, .. }: RunnableContext,
) -> Result<OutputStream, ShellError> { ) -> Result<OutputStream, ShellError> {
let range = area.item; let range = area.item;
let (from, _) = range.from; let (from, _) = range.from;
let (to, _) = range.to; let (to, _) = range.to;
return Ok(OutputStream::from_input( Ok(OutputStream::from_input(
input.values.skip(*from).take(*to - *from + 1), input.values.skip(*from).take(*to - *from + 1),
)); ))
} }

View file

@ -180,7 +180,7 @@ mod tests {
UntaggedValue::row(entries).into_untagged_value() UntaggedValue::row(entries).into_untagged_value()
} }
fn table(list: &Vec<Value>) -> Value { fn table(list: &[Value]) -> Value {
UntaggedValue::table(list).into_untagged_value() UntaggedValue::table(list).into_untagged_value()
} }
@ -254,7 +254,7 @@ mod tests {
Some(String::from("sum")), Some(String::from("sum")),
Tag::unknown() Tag::unknown()
), ),
Ok(table(&vec![table(&vec![int(3), int(3), int(3)])])) Ok(table(&[table(&[int(3), int(3), int(3)])]))
); );
} }
} }

View file

@ -167,7 +167,7 @@ mod tests {
UntaggedValue::row(entries).into_untagged_value() UntaggedValue::row(entries).into_untagged_value()
} }
fn table(list: &Vec<Value>) -> Value { fn table(list: &[Value]) -> Value {
UntaggedValue::table(list).into_untagged_value() UntaggedValue::table(list).into_untagged_value()
} }
@ -216,35 +216,35 @@ mod tests {
split(&for_key, &nu_releases_grouped_by_date(), Tag::unknown()).unwrap(), split(&for_key, &nu_releases_grouped_by_date(), Tag::unknown()).unwrap(),
UntaggedValue::row(indexmap! { UntaggedValue::row(indexmap! {
"EC".into() => row(indexmap! { "EC".into() => row(indexmap! {
"August 23-2019".into() => table(&vec![ "August 23-2019".into() => table(&[
row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("August 23-2019")}) row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("August 23-2019")})
]), ]),
"Sept 24-2019".into() => table(&vec![ "Sept 24-2019".into() => table(&[
row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("Sept 24-2019")}) row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("Sept 24-2019")})
]), ]),
"October 10-2019".into() => table(&vec![ "October 10-2019".into() => table(&[
row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("October 10-2019")}) row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("October 10-2019")})
]) ])
}), }),
"NZ".into() => row(indexmap! { "NZ".into() => row(indexmap! {
"August 23-2019".into() => table(&vec![ "August 23-2019".into() => table(&[
row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("August 23-2019")}) row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("August 23-2019")})
]), ]),
"Sept 24-2019".into() => table(&vec![ "Sept 24-2019".into() => table(&[
row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("Sept 24-2019")}) row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("Sept 24-2019")})
]), ]),
"October 10-2019".into() => table(&vec![ "October 10-2019".into() => table(&[
row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("October 10-2019")}) row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("October 10-2019")})
]) ])
}), }),
"US".into() => row(indexmap! { "US".into() => row(indexmap! {
"August 23-2019".into() => table(&vec![ "August 23-2019".into() => table(&[
row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("August 23-2019")}) row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("August 23-2019")})
]), ]),
"Sept 24-2019".into() => table(&vec![ "Sept 24-2019".into() => table(&[
row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("Sept 24-2019")}) row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("Sept 24-2019")})
]), ]),
"October 10-2019".into() => table(&vec![ "October 10-2019".into() => table(&[
row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("October 10-2019")}) row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("October 10-2019")})
]) ])
}) })
@ -257,13 +257,13 @@ mod tests {
let for_key = String::from("country").tagged_unknown(); let for_key = String::from("country").tagged_unknown();
let nu_releases = row(indexmap! { let nu_releases = row(indexmap! {
"August 23-2019".into() => table(&vec![ "August 23-2019".into() => table(&[
row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("August 23-2019")}) row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("August 23-2019")})
]), ]),
"Sept 24-2019".into() => table(&vec![ "Sept 24-2019".into() => table(&[
row(indexmap!{"name".into() => UntaggedValue::string("JT").into_value(Tag::from(Span::new(5,10))), "date".into() => string("Sept 24-2019")}) row(indexmap!{"name".into() => UntaggedValue::string("JT").into_value(Tag::from(Span::new(5,10))), "date".into() => string("Sept 24-2019")})
]), ]),
"October 10-2019".into() => table(&vec![ "October 10-2019".into() => table(&[
row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("October 10-2019")}) row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("October 10-2019")})
]) ])
}); });

View file

@ -122,7 +122,7 @@ pub fn columns_sorted(
value: UntaggedValue::Primitive(Primitive::Date(d)), value: UntaggedValue::Primitive(Primitive::Date(d)),
.. ..
} => format!("{}", d.format("%B %d-%Y")), } => format!("{}", d.format("%B %d-%Y")),
_ => k.as_string().unwrap().to_string(), _ => k.as_string().unwrap(),
}) })
.collect(); .collect();
@ -181,10 +181,7 @@ pub fn t_sort(
Some(Value { Some(Value {
value: UntaggedValue::Row(dict), value: UntaggedValue::Row(dict),
.. ..
}) => dict }) => dict.get_data_by_key(label.borrow_spanned()).unwrap(),
.get_data_by_key(label.borrow_spanned())
.unwrap()
.clone(),
_ => UntaggedValue::Table(vec![]).into_value(&origin_tag), _ => UntaggedValue::Table(vec![]).into_value(&origin_tag),
}) })
.collect() .collect()
@ -223,7 +220,7 @@ mod tests {
UntaggedValue::row(entries).into_untagged_value() UntaggedValue::row(entries).into_untagged_value()
} }
fn table(list: &Vec<Value>) -> Value { fn table(list: &[Value]) -> Value {
UntaggedValue::table(list).into_untagged_value() UntaggedValue::table(list).into_untagged_value()
} }
@ -275,9 +272,9 @@ mod tests {
Tag::unknown() Tag::unknown()
), ),
vec![ vec![
format!("August 23-2019").tagged_unknown(), "August 23-2019".to_string().tagged_unknown(),
format!("September 24-2019").tagged_unknown(), "September 24-2019".to_string().tagged_unknown(),
format!("October 10-2019").tagged_unknown() "October 10-2019".to_string().tagged_unknown()
] ]
) )
} }
@ -294,8 +291,8 @@ mod tests {
Tag::unknown() Tag::unknown()
) )
.unwrap(), .unwrap(),
table(&vec![table(&vec![ table(&[table(&[
table(&vec![ table(&[
row( row(
indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("August 23-2019")} indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("August 23-2019")}
), ),
@ -306,7 +303,7 @@ mod tests {
indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("August 23-2019")} indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("August 23-2019")}
) )
]), ]),
table(&vec![ table(&[
row( row(
indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("September 24-2019")} indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("September 24-2019")}
), ),
@ -317,7 +314,7 @@ mod tests {
indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("September 24-2019")} indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("September 24-2019")}
) )
]), ]),
table(&vec![ table(&[
row( row(
indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("October 10-2019")} indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("October 10-2019")}
), ),

View file

@ -132,10 +132,10 @@ fn to_string_tagged_value(v: &Value) -> Result<String, ShellError> {
let tmp = format!("{}", b); let tmp = format!("{}", b);
Ok(tmp) Ok(tmp)
} }
UntaggedValue::Primitive(Primitive::Boolean(_)) => Ok(v.as_string()?.to_string()), UntaggedValue::Primitive(Primitive::Boolean(_)) => Ok(v.as_string()?),
UntaggedValue::Primitive(Primitive::Decimal(_)) => Ok(v.as_string()?.to_string()), UntaggedValue::Primitive(Primitive::Decimal(_)) => Ok(v.as_string()?),
UntaggedValue::Primitive(Primitive::Int(_)) => Ok(v.as_string()?.to_string()), UntaggedValue::Primitive(Primitive::Int(_)) => Ok(v.as_string()?),
UntaggedValue::Primitive(Primitive::Path(_)) => Ok(v.as_string()?.to_string()), UntaggedValue::Primitive(Primitive::Path(_)) => Ok(v.as_string()?),
UntaggedValue::Table(_) => Ok(String::from("[Table]")), UntaggedValue::Table(_) => Ok(String::from("[Table]")),
UntaggedValue::Row(_) => Ok(String::from("[Row]")), UntaggedValue::Row(_) => Ok(String::from("[Row]")),
UntaggedValue::Primitive(Primitive::Line(s)) => Ok(s.to_string()), UntaggedValue::Primitive(Primitive::Line(s)) => Ok(s.to_string()),

View file

@ -30,18 +30,17 @@ impl PerItemCommand for Where {
_raw_args: &RawCommandArgs, _raw_args: &RawCommandArgs,
input: Value, input: Value,
) -> Result<OutputStream, ShellError> { ) -> Result<OutputStream, ShellError> {
let input_clone = input.clone();
let condition = call_info.args.expect_nth(0)?; let condition = call_info.args.expect_nth(0)?;
let stream = match condition { let stream = match condition {
Value { Value {
value: UntaggedValue::Block(block), value: UntaggedValue::Block(block),
.. ..
} => { } => {
let result = block.invoke(&Scope::new(input_clone.clone())); let result = block.invoke(&Scope::new(input.clone()));
match result { match result {
Ok(v) => { Ok(v) => {
if v.is_true() { if v.is_true() {
VecDeque::from(vec![Ok(ReturnSuccess::Value(input_clone))]) VecDeque::from(vec![Ok(ReturnSuccess::Value(input))])
} else { } else {
VecDeque::new() VecDeque::new()
} }

View file

@ -214,7 +214,7 @@ mod tests {
UntaggedValue::row(entries).into_untagged_value() UntaggedValue::row(entries).into_untagged_value()
} }
fn table(list: &Vec<Value>) -> Value { fn table(list: &[Value]) -> Value {
UntaggedValue::table(list).into_untagged_value() UntaggedValue::table(list).into_untagged_value()
} }
@ -224,30 +224,26 @@ mod tests {
move |(_obj_source, _column_path_tried, _err)| ShellError::unimplemented(reason) move |(_obj_source, _column_path_tried, _err)| ShellError::unimplemented(reason)
} }
fn column_path(paths: &Vec<Value>) -> Tagged<ColumnPathValue> { fn column_path(paths: &[Value]) -> Tagged<ColumnPathValue> {
as_column_path(&table(&paths.iter().cloned().collect())).unwrap() as_column_path(&table(paths)).unwrap()
} }
#[test] #[test]
fn gets_matching_field_from_a_row() { fn gets_matching_field_from_a_row() {
let row = UntaggedValue::row(indexmap! { let row = UntaggedValue::row(indexmap! {
"amigos".into() => table(&vec![string("andres"),string("jonathan"),string("yehuda")]) "amigos".into() => table(&[string("andres"),string("jonathan"),string("yehuda")])
}) })
.into_untagged_value(); .into_untagged_value();
assert_eq!( assert_eq!(
row.get_data_by_key("amigos".spanned_unknown()).unwrap(), row.get_data_by_key("amigos".spanned_unknown()).unwrap(),
table(&vec![ table(&[string("andres"), string("jonathan"), string("yehuda")])
string("andres"),
string("jonathan"),
string("yehuda")
])
); );
} }
#[test] #[test]
fn gets_matching_field_from_nested_rows_inside_a_row() { fn gets_matching_field_from_nested_rows_inside_a_row() {
let field_path = column_path(&vec![string("package"), string("version")]); let field_path = column_path(&[string("package"), string("version")]);
let (version, tag) = string("0.4.0").into_parts(); let (version, tag) = string("0.4.0").into_parts();
@ -270,7 +266,7 @@ mod tests {
#[test] #[test]
fn gets_first_matching_field_from_rows_with_same_field_inside_a_table() { fn gets_first_matching_field_from_rows_with_same_field_inside_a_table() {
let field_path = column_path(&vec![string("package"), string("authors"), string("name")]); let field_path = column_path(&[string("package"), string("authors"), string("name")]);
let (_, tag) = string("Andrés N. Robalino").into_parts(); let (_, tag) = string("Andrés N. Robalino").into_parts();
@ -278,7 +274,7 @@ mod tests {
"package".into() => row(indexmap! { "package".into() => row(indexmap! {
"name".into() => string("nu"), "name".into() => string("nu"),
"version".into() => string("0.4.0"), "version".into() => string("0.4.0"),
"authors".into() => table(&vec![ "authors".into() => table(&[
row(indexmap!{"name".into() => string("Andrés N. Robalino")}), row(indexmap!{"name".into() => string("Andrés N. Robalino")}),
row(indexmap!{"name".into() => string("Jonathan Turner")}), row(indexmap!{"name".into() => string("Jonathan Turner")}),
row(indexmap!{"name".into() => string("Yehuda Katz")}) row(indexmap!{"name".into() => string("Yehuda Katz")})
@ -294,7 +290,7 @@ mod tests {
Box::new(error_callback("package.authors.name")) Box::new(error_callback("package.authors.name"))
) )
.unwrap(), .unwrap(),
table(&vec![ table(&[
string("Andrés N. Robalino"), string("Andrés N. Robalino"),
string("Jonathan Turner"), string("Jonathan Turner"),
string("Yehuda Katz") string("Yehuda Katz")
@ -304,7 +300,7 @@ mod tests {
#[test] #[test]
fn column_path_that_contains_just_a_number_gets_a_row_from_a_table() { fn column_path_that_contains_just_a_number_gets_a_row_from_a_table() {
let field_path = column_path(&vec![string("package"), string("authors"), int(0)]); let field_path = column_path(&[string("package"), string("authors"), int(0)]);
let (_, tag) = string("Andrés N. Robalino").into_parts(); let (_, tag) = string("Andrés N. Robalino").into_parts();
@ -312,7 +308,7 @@ mod tests {
"package".into() => row(indexmap! { "package".into() => row(indexmap! {
"name".into() => string("nu"), "name".into() => string("nu"),
"version".into() => string("0.4.0"), "version".into() => string("0.4.0"),
"authors".into() => table(&vec![ "authors".into() => table(&[
row(indexmap!{"name".into() => string("Andrés N. Robalino")}), row(indexmap!{"name".into() => string("Andrés N. Robalino")}),
row(indexmap!{"name".into() => string("Jonathan Turner")}), row(indexmap!{"name".into() => string("Jonathan Turner")}),
row(indexmap!{"name".into() => string("Yehuda Katz")}) row(indexmap!{"name".into() => string("Yehuda Katz")})
@ -333,7 +329,7 @@ mod tests {
#[test] #[test]
fn column_path_that_contains_just_a_number_gets_a_row_from_a_row() { fn column_path_that_contains_just_a_number_gets_a_row_from_a_row() {
let field_path = column_path(&vec![string("package"), string("authors"), string("0")]); let field_path = column_path(&[string("package"), string("authors"), string("0")]);
let (_, tag) = string("Andrés N. Robalino").into_parts(); let (_, tag) = string("Andrés N. Robalino").into_parts();
@ -365,10 +361,10 @@ mod tests {
#[test] #[test]
fn replaces_matching_field_from_a_row() { fn replaces_matching_field_from_a_row() {
let field_path = column_path(&vec![string("amigos")]); let field_path = column_path(&[string("amigos")]);
let sample = UntaggedValue::row(indexmap! { let sample = UntaggedValue::row(indexmap! {
"amigos".into() => table(&vec![ "amigos".into() => table(&[
string("andres"), string("andres"),
string("jonathan"), string("jonathan"),
string("yehuda"), string("yehuda"),
@ -387,7 +383,7 @@ mod tests {
#[test] #[test]
fn replaces_matching_field_from_nested_rows_inside_a_row() { fn replaces_matching_field_from_nested_rows_inside_a_row() {
let field_path = column_path(&vec![ let field_path = column_path(&[
string("package"), string("package"),
string("authors"), string("authors"),
string("los.3.caballeros"), string("los.3.caballeros"),
@ -396,14 +392,14 @@ mod tests {
let sample = UntaggedValue::row(indexmap! { let sample = UntaggedValue::row(indexmap! {
"package".into() => row(indexmap! { "package".into() => row(indexmap! {
"authors".into() => row(indexmap! { "authors".into() => row(indexmap! {
"los.3.mosqueteros".into() => table(&vec![string("andres::yehuda::jonathan")]), "los.3.mosqueteros".into() => table(&[string("andres::yehuda::jonathan")]),
"los.3.amigos".into() => table(&vec![string("andres::yehuda::jonathan")]), "los.3.amigos".into() => table(&[string("andres::yehuda::jonathan")]),
"los.3.caballeros".into() => table(&vec![string("andres::yehuda::jonathan")]) "los.3.caballeros".into() => table(&[string("andres::yehuda::jonathan")])
}) })
}) })
}); });
let replacement = table(&vec![string("yehuda::jonathan::andres")]); let replacement = table(&[string("yehuda::jonathan::andres")]);
let tag = replacement.tag.clone(); let tag = replacement.tag.clone();
let actual = sample let actual = sample
@ -416,15 +412,15 @@ mod tests {
UntaggedValue::row(indexmap! { UntaggedValue::row(indexmap! {
"package".into() => row(indexmap! { "package".into() => row(indexmap! {
"authors".into() => row(indexmap! { "authors".into() => row(indexmap! {
"los.3.mosqueteros".into() => table(&vec![string("andres::yehuda::jonathan")]), "los.3.mosqueteros".into() => table(&[string("andres::yehuda::jonathan")]),
"los.3.amigos".into() => table(&vec![string("andres::yehuda::jonathan")]), "los.3.amigos".into() => table(&[string("andres::yehuda::jonathan")]),
"los.3.caballeros".into() => replacement.clone()})})}) "los.3.caballeros".into() => replacement})})})
.into_value(tag) .into_value(tag)
); );
} }
#[test] #[test]
fn replaces_matching_field_from_rows_inside_a_table() { fn replaces_matching_field_from_rows_inside_a_table() {
let field_path = column_path(&vec![ let field_path = column_path(&[
string("shell_policy"), string("shell_policy"),
string("releases"), string("releases"),
string("nu.version.arepa"), string("nu.version.arepa"),
@ -432,7 +428,7 @@ mod tests {
let sample = UntaggedValue::row(indexmap! { let sample = UntaggedValue::row(indexmap! {
"shell_policy".into() => row(indexmap! { "shell_policy".into() => row(indexmap! {
"releases".into() => table(&vec![ "releases".into() => table(&[
row(indexmap! { row(indexmap! {
"nu.version.arepa".into() => row(indexmap! { "nu.version.arepa".into() => row(indexmap! {
"code".into() => string("0.4.0"), "tag_line".into() => string("GitHub-era") "code".into() => string("0.4.0"), "tag_line".into() => string("GitHub-era")
@ -467,7 +463,7 @@ mod tests {
actual, actual,
UntaggedValue::row(indexmap! { UntaggedValue::row(indexmap! {
"shell_policy".into() => row(indexmap! { "shell_policy".into() => row(indexmap! {
"releases".into() => table(&vec![ "releases".into() => table(&[
row(indexmap! { row(indexmap! {
"nu.version.arepa".into() => replacement "nu.version.arepa".into() => replacement
}), }),

View file

@ -168,7 +168,7 @@ impl PrettyDebug for FormatInlineShape {
(b::primitive(format!("{}", byte.get_value())) + b::space() + b::kind("B")) (b::primitive(format!("{}", byte.get_value())) + b::space() + b::kind("B"))
.group() .group()
} }
_ => b::primitive(byte.format(1).to_string()), _ => b::primitive(byte.format(1)),
} }
} }
InlineShape::String(string) => b::primitive(string), InlineShape::String(string) => b::primitive(string),

View file

@ -22,7 +22,7 @@ pub fn date_from_str(s: Tagged<&str>) -> Result<UntaggedValue, ShellError> {
} }
pub fn compare_values( pub fn compare_values(
operator: &CompareOperator, operator: CompareOperator,
left: &UntaggedValue, left: &UntaggedValue,
right: &UntaggedValue, right: &UntaggedValue,
) -> Result<bool, (&'static str, &'static str)> { ) -> Result<bool, (&'static str, &'static str)> {

View file

@ -403,7 +403,7 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut ConfigDeserializer<'de> {
Value { Value {
value: UntaggedValue::Primitive(Primitive::Path(p)), value: UntaggedValue::Primitive(Primitive::Path(p)),
.. ..
} => visit::<Tagged<PathBuf>, _>(p.clone().tagged(tag), name, fields, visitor), } => visit::<Tagged<PathBuf>, _>(p.tagged(tag), name, fields, visitor),
Value { Value {
value: UntaggedValue::Primitive(Primitive::Int(int)), value: UntaggedValue::Primitive(Primitive::Int(int)),
.. ..

View file

@ -40,7 +40,7 @@ pub(crate) fn evaluate_baseline_expr(
trace!("left={:?} right={:?}", left.value, right.value); trace!("left={:?} right={:?}", left.value, right.value);
match apply_operator(&**binary.op(), &left, &right) { match apply_operator(**binary.op(), &left, &right) {
Ok(result) => Ok(result.into_value(tag)), Ok(result) => Ok(result.into_value(tag)),
Err((left_type, right_type)) => Err(ShellError::coerce_error( Err((left_type, right_type)) => Err(ShellError::coerce_error(
left_type.spanned(binary.left().span), left_type.spanned(binary.left().span),
@ -120,7 +120,7 @@ pub(crate) fn evaluate_baseline_expr(
}; };
} }
Ok(item.value.clone().into_value(tag)) Ok(item.value.into_value(tag))
} }
RawExpression::Boolean(_boolean) => unimplemented!(), RawExpression::Boolean(_boolean) => unimplemented!(),
} }

View file

@ -4,11 +4,11 @@ use nu_protocol::{Primitive, ShellTypeName, UntaggedValue, Value};
use std::ops::Not; use std::ops::Not;
pub fn apply_operator( pub fn apply_operator(
op: &CompareOperator, op: CompareOperator,
left: &Value, left: &Value,
right: &Value, right: &Value,
) -> Result<UntaggedValue, (&'static str, &'static str)> { ) -> Result<UntaggedValue, (&'static str, &'static str)> {
match *op { match op {
CompareOperator::Equal CompareOperator::Equal
| CompareOperator::NotEqual | CompareOperator::NotEqual
| CompareOperator::LessThan | CompareOperator::LessThan

View file

@ -153,6 +153,7 @@ fn values_to_entries(values: &[Value], headers: &mut Vec<String>, starting_idx:
entries entries
} }
#[allow(clippy::ptr_arg)]
fn max_per_column(headers: &[String], entries: &Entries, values_len: usize) -> Vec<usize> { fn max_per_column(headers: &[String], entries: &Entries, values_len: usize) -> Vec<usize> {
let mut max_per_column = vec![]; let mut max_per_column = vec![];

View file

@ -38,7 +38,7 @@ impl Plugin for TextView {
} }
fn paint_textview( fn paint_textview(
draw_commands: &Vec<DrawCommand>, draw_commands: &[DrawCommand],
starting_row: usize, starting_row: usize,
use_color_buffer: bool, use_color_buffer: bool,
) -> usize { ) -> usize {
@ -148,8 +148,8 @@ fn scroll_view_lines_if_needed(draw_commands: Vec<DrawCommand>, use_color_buffer
loop { loop {
if let Some(ev) = sync_stdin.next() { if let Some(ev) = sync_stdin.next() {
match ev { if let InputEvent::Keyboard(k) = ev {
InputEvent::Keyboard(k) => match k { match k {
KeyEvent::Esc => { KeyEvent::Esc => {
break; break;
} }
@ -187,8 +187,7 @@ fn scroll_view_lines_if_needed(draw_commands: Vec<DrawCommand>, use_color_buffer
paint_textview(&draw_commands, starting_row, use_color_buffer); paint_textview(&draw_commands, starting_row, use_color_buffer);
} }
_ => {} _ => {}
}, }
_ => {}
} }
} }
@ -220,73 +219,67 @@ fn scroll_view(s: &str) {
fn view_text_value(value: &Value) { fn view_text_value(value: &Value) {
let value_anchor = value.anchor(); let value_anchor = value.anchor();
match &value.value { if let UntaggedValue::Primitive(Primitive::String(ref s)) = &value.value {
UntaggedValue::Primitive(Primitive::String(ref s)) => { if let Some(source) = value_anchor {
if let Some(source) = value_anchor { let extension: Option<String> = match source {
let extension: Option<String> = match source { AnchorLocation::File(file) => {
AnchorLocation::File(file) => { let path = Path::new(&file);
let path = Path::new(&file); path.extension().map(|x| x.to_string_lossy().to_string())
path.extension().map(|x| x.to_string_lossy().to_string()) }
} AnchorLocation::Url(url) => {
AnchorLocation::Url(url) => { let url = url::Url::parse(&url);
let url = url::Url::parse(&url); if let Ok(url) = url {
if let Ok(url) = url { if let Some(mut segments) = url.path_segments() {
let url = url.clone(); if let Some(file) = segments.next_back() {
if let Some(mut segments) = url.path_segments() { let path = Path::new(file);
if let Some(file) = segments.next_back() { path.extension().map(|x| x.to_string_lossy().to_string())
let path = Path::new(file);
path.extension().map(|x| x.to_string_lossy().to_string())
} else {
None
}
} else { } else {
None None
} }
} else { } else {
None None
} }
} else {
None
} }
//FIXME: this probably isn't correct }
AnchorLocation::Source(_source) => None, //FIXME: this probably isn't correct
}; AnchorLocation::Source(_source) => None,
};
match extension { match extension {
Some(extension) => { Some(extension) => {
// Load these once at the start of your program // Load these once at the start of your program
let ps: SyntaxSet = syntect::dumps::from_binary(include_bytes!( let ps: SyntaxSet =
"../../assets/syntaxes.bin" syntect::dumps::from_binary(include_bytes!("../../assets/syntaxes.bin"));
));
if let Some(syntax) = ps.find_syntax_by_extension(&extension) { if let Some(syntax) = ps.find_syntax_by_extension(&extension) {
let ts: ThemeSet = syntect::dumps::from_binary(include_bytes!( let ts: ThemeSet =
"../../assets/themes.bin" syntect::dumps::from_binary(include_bytes!("../../assets/themes.bin"));
)); let mut h = HighlightLines::new(syntax, &ts.themes["OneHalfDark"]);
let mut h = HighlightLines::new(syntax, &ts.themes["OneHalfDark"]);
let mut v = vec![]; let mut v = vec![];
for line in s.lines() { for line in s.lines() {
let ranges: Vec<(Style, &str)> = h.highlight(line, &ps); let ranges: Vec<(Style, &str)> = h.highlight(line, &ps);
for range in ranges { for range in ranges {
v.push(DrawCommand::DrawString(range.0, range.1.to_string())); v.push(DrawCommand::DrawString(range.0, range.1.to_string()));
}
v.push(DrawCommand::NextLine);
} }
scroll_view_lines_if_needed(v, true);
} else { v.push(DrawCommand::NextLine);
scroll_view(s);
} }
} scroll_view_lines_if_needed(v, true);
_ => { } else {
scroll_view(s); scroll_view(s);
} }
} }
} else { _ => {
scroll_view(s); scroll_view(s);
}
} }
} else {
scroll_view(s);
} }
_ => {}
} }
} }

View file

@ -1,3 +1,5 @@
#![allow(clippy::module_inception)]
pub(crate) mod completer; pub(crate) mod completer;
pub(crate) mod filesystem_shell; pub(crate) mod filesystem_shell;
pub(crate) mod help_shell; pub(crate) mod help_shell;

View file

@ -97,56 +97,45 @@ impl NuCompleter {
let replace_string = (replace_pos..pos).map(|_| " ").collect::<String>(); let replace_string = (replace_pos..pos).map(|_| " ").collect::<String>();
line_copy.replace_range(replace_pos..pos, &replace_string); line_copy.replace_range(replace_pos..pos, &replace_string);
match nu_parser::parse(&line_copy) { if let Ok(val) = nu_parser::parse(&line_copy) {
Ok(val) => { let source = Text::from(line);
let source = Text::from(line); let pipeline_list = vec![val.clone()];
let pipeline_list = vec![val.clone()]; let mut iterator =
let mut iterator = nu_parser::TokensIterator::all(&pipeline_list, source.clone(), val.span());
nu_parser::TokensIterator::all(&pipeline_list, source.clone(), val.span());
let expand_context = nu_parser::ExpandContext { let expand_context = nu_parser::ExpandContext {
homedir: None, homedir: None,
registry: Box::new(self.commands.clone()), registry: Box::new(self.commands.clone()),
source: &source, source: &source,
}; };
let result = nu_parser::expand_syntax( let result =
&nu_parser::PipelineShape, nu_parser::expand_syntax(&nu_parser::PipelineShape, &mut iterator, &expand_context);
&mut iterator,
&expand_context,
);
if let Ok(result) = result { if let Ok(result) = result {
for command in result.commands.list { for command in result.commands.list {
match command { if let nu_parser::ClassifiedCommand::Internal(nu_parser::InternalCommand {
nu_parser::ClassifiedCommand::Internal( args,
nu_parser::InternalCommand { args, .. }, ..
) => { }) = command
if replace_pos >= args.span.start() {
&& replace_pos <= args.span.end() if replace_pos >= args.span.start() && replace_pos <= args.span.end() {
{ if let Some(named) = args.named {
if let Some(named) = args.named { for (name, _) in named.iter() {
for (name, _) in named.iter() { let full_flag = format!("--{}", name);
let full_flag = format!("--{}", name);
if full_flag.starts_with(&substring) { if full_flag.starts_with(&substring) {
matching_arguments.push( matching_arguments.push(rustyline::completion::Pair {
rustyline::completion::Pair { display: full_flag.clone(),
display: full_flag.clone(), replacement: full_flag,
replacement: full_flag, });
},
);
}
}
} }
} }
} }
_ => {}
} }
} }
} }
} }
_ => {}
} }
matching_arguments matching_arguments

View file

@ -212,7 +212,7 @@ impl Shell for FilesystemShell {
return Err(ShellError::labeled_error( return Err(ShellError::labeled_error(
"Can not change to directory", "Can not change to directory",
"is not a directory", "is not a directory",
v.tag().clone(), v.tag(),
)); ));
} }
@ -222,7 +222,7 @@ impl Shell for FilesystemShell {
return Err(ShellError::labeled_error( return Err(ShellError::labeled_error(
"Can not change to directory", "Can not change to directory",
"directory not found", "directory not found",
v.tag().clone(), v.tag(),
)) ))
} }
} }
@ -753,80 +753,78 @@ impl Shell for FilesystemShell {
new_dst.push(fragment); new_dst.push(fragment);
} }
Ok((PathBuf::from(&source_file), PathBuf::from(new_dst))) Ok((PathBuf::from(&source_file), new_dst))
}; };
let sources = sources.paths_applying_with(strategy)?; let sources = sources.paths_applying_with(strategy)?;
for (ref src, ref dst) in sources { for (ref src, ref dst) in sources {
if src.is_dir() { if src.is_dir() && !dst.exists() {
if !dst.exists() { match std::fs::create_dir_all(dst) {
match std::fs::create_dir_all(dst) {
Err(e) => {
return Err(ShellError::labeled_error(
format!(
"Rename {:?} to {:?} aborted. {:}",
entry_file_name,
destination_file_name,
e.to_string(),
),
format!(
"Rename {:?} to {:?} aborted. {:}",
entry_file_name,
destination_file_name,
e.to_string(),
),
name_tag,
));
}
Ok(o) => o,
}
}
}
if src.is_file() {
match std::fs::copy(&src, &dst) {
Err(e) => { Err(e) => {
return Err(ShellError::labeled_error( return Err(ShellError::labeled_error(
format!( format!(
"Rename {:?} to {:?} aborted. {:}", "Rename {:?} to {:?} aborted. {:}",
src, entry_file_name,
destination_file_name, destination_file_name,
e.to_string(), e.to_string(),
), ),
format!( format!(
"Rename {:?} to {:?} aborted. {:}", "Rename {:?} to {:?} aborted. {:}",
src, entry_file_name,
destination_file_name, destination_file_name,
e.to_string(), e.to_string(),
), ),
name_tag, name_tag,
)); ));
} }
Ok(_) => match std::fs::remove_file(&src) { Ok(o) => o,
Err(e) => { }
return Err(ShellError::labeled_error(
format!(
"Rename {:?} to {:?} aborted. {:}",
entry_file_name,
destination_file_name,
e.to_string(),
),
format!(
"Rename {:?} to {:?} aborted. {:}",
entry_file_name,
destination_file_name,
e.to_string(),
),
name_tag,
));
}
Ok(o) => o,
},
};
} }
} }
if src.is_file() {
match std::fs::copy(&src, &dst) {
Err(e) => {
return Err(ShellError::labeled_error(
format!(
"Rename {:?} to {:?} aborted. {:}",
src,
destination_file_name,
e.to_string(),
),
format!(
"Rename {:?} to {:?} aborted. {:}",
src,
destination_file_name,
e.to_string(),
),
name_tag,
));
}
Ok(_) => match std::fs::remove_file(&src) {
Err(e) => {
return Err(ShellError::labeled_error(
format!(
"Rename {:?} to {:?} aborted. {:}",
entry_file_name,
destination_file_name,
e.to_string(),
),
format!(
"Rename {:?} to {:?} aborted. {:}",
entry_file_name,
destination_file_name,
e.to_string(),
),
name_tag,
));
}
Ok(o) => o,
},
};
}
match std::fs::remove_dir_all(entry) { match std::fs::remove_dir_all(entry) {
Err(e) => { Err(e) => {
return Err(ShellError::labeled_error( return Err(ShellError::labeled_error(

View file

@ -97,7 +97,7 @@ impl HelpShell {
} }
} }
x => { x => {
cmds.push_back(x.clone()); cmds.push_back(x);
} }
} }
@ -129,7 +129,7 @@ impl Shell for HelpShell {
fn set_path(&mut self, path: String) { fn set_path(&mut self, path: String) {
let _ = std::env::set_current_dir(&path); let _ = std::env::set_current_dir(&path);
self.path = path.clone(); self.path = path;
} }
fn ls( fn ls(

View file

@ -74,7 +74,7 @@ impl Highlighter for Helper {
Ok(v) => v, Ok(v) => v,
}; };
let tokens = vec![TokenNode::Pipeline(pipeline.clone())]; let tokens = vec![TokenNode::Pipeline(pipeline)];
let mut tokens = TokensIterator::all(&tokens[..], Text::from(line), v.span()); let mut tokens = TokensIterator::all(&tokens[..], Text::from(line), v.span());
let text = Text::from(line); let text = Text::from(line);

View file

@ -61,7 +61,7 @@ impl ValueShell {
} }
} }
x => { x => {
shell_entries.push_back(x.clone()); shell_entries.push_back(x);
} }
} }
@ -222,7 +222,7 @@ impl Shell for ValueShell {
fn set_path(&mut self, path: String) { fn set_path(&mut self, path: String) {
self.last_path = self.path.clone(); self.last_path = self.path.clone();
self.path = path.clone(); self.path = path;
} }
fn complete( fn complete(

View file

@ -201,15 +201,12 @@ mod tests {
sdx.push("fixtures"); sdx.push("fixtures");
sdx.push("formats"); sdx.push("formats");
match dunce::canonicalize(sdx) { dunce::canonicalize(sdx).expect("Wrong path")
Ok(path) => path,
Err(_) => panic!("Wrong path."),
}
} }
fn structured_sample_record(key: &str, value: &str) -> Value { fn structured_sample_record(key: &str, value: &str) -> Value {
let mut record = TaggedDictBuilder::new(Tag::unknown()); let mut record = TaggedDictBuilder::new(Tag::unknown());
record.insert_untagged(key.clone(), UntaggedValue::string(value)); record.insert_untagged(key, UntaggedValue::string(value));
record.into_value() record.into_value()
} }

View file

@ -85,7 +85,7 @@ mod pipeline {
); );
assert!( assert!(
!actual.contains("~"), !actual.contains('~'),
format!("'{}' should not contain ~", actual) format!("'{}' should not contain ~", actual)
); );
} }