mirror of
https://github.com/nushell/nushell
synced 2024-12-26 13:03:07 +00:00
Fix painting
This commit is contained in:
parent
6af4dafd87
commit
cbab97174e
11 changed files with 133 additions and 312 deletions
|
@ -9,7 +9,6 @@ edition = "2018"
|
|||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
#rustyline = "4.1.0"
|
||||
rustyline = "5.0.0"
|
||||
sysinfo = "0.8.5"
|
||||
chrono = { version = "0.4.6", features = ["serde"] }
|
||||
|
@ -59,7 +58,7 @@ ctrlc = "3.1.3"
|
|||
ptree = "0.2"
|
||||
clipboard = "0.5"
|
||||
reqwest = "0.9"
|
||||
roxmltree = "0.6.0"
|
||||
roxmltree = "0.6.1"
|
||||
pretty = "0.5.2"
|
||||
nom_locate = { git = "https://github.com/wycats/nom_locate.git", branch = "nom5" }
|
||||
derive_more = "0.15.0"
|
||||
|
|
|
@ -12,8 +12,8 @@ use crate::context::Context;
|
|||
crate use crate::errors::ShellError;
|
||||
use crate::evaluate::Scope;
|
||||
use crate::parser::parse2::span::Spanned;
|
||||
use crate::parser::parse2::{PipelineElement, TokenNode};
|
||||
use crate::parser::registry;
|
||||
use crate::parser::{Pipeline, PipelineElement, TokenNode};
|
||||
|
||||
use crate::git::current_branch;
|
||||
use crate::object::Value;
|
||||
|
@ -379,7 +379,9 @@ fn classify_pipeline(
|
|||
) -> Result<ClassifiedPipeline, ShellError> {
|
||||
let pipeline = pipeline.as_pipeline()?;
|
||||
|
||||
let commands: Result<Vec<_>, ShellError> = pipeline
|
||||
let Pipeline { parts, .. } = pipeline;
|
||||
|
||||
let commands: Result<Vec<_>, ShellError> = parts
|
||||
.iter()
|
||||
.map(|item| classify_command(&item, context, &source))
|
||||
.collect();
|
||||
|
|
247
src/parser.rs
247
src/parser.rs
|
@ -11,6 +11,7 @@ crate use parse2::files::Files;
|
|||
crate use parse2::flag::Flag;
|
||||
crate use parse2::operator::Operator;
|
||||
crate use parse2::parser::{nom_input, pipeline};
|
||||
crate use parse2::pipeline::{Pipeline, PipelineElement};
|
||||
crate use parse2::span::{Span, Spanned};
|
||||
crate use parse2::text::Text;
|
||||
crate use parse2::token_tree::TokenNode;
|
||||
|
@ -27,249 +28,3 @@ pub fn parse(input: &str) -> Result<TokenNode, ShellError> {
|
|||
Err(err) => Err(ShellError::parse_error(err)),
|
||||
}
|
||||
}
|
||||
|
||||
// #[allow(unused)]
|
||||
// pub fn parse_module(input: &str) -> Result<Module, ShellError> {
|
||||
// let _ = pretty_env_logger::try_init();
|
||||
|
||||
// let parser = ModuleParser::new();
|
||||
// let tokens = Lexer::new(input, false);
|
||||
|
||||
// trace!(
|
||||
// "Tokens: {:?}",
|
||||
// tokens.clone().collect::<Result<Vec<_>, _>>()
|
||||
// );
|
||||
|
||||
// match parser.parse(tokens) {
|
||||
// Ok(val) => Ok(val),
|
||||
// Err(err) => Err(ShellError::parse_error(err)),
|
||||
// }
|
||||
// }
|
||||
|
||||
// #[cfg(test)]
|
||||
// mod tests {
|
||||
// use super::*;
|
||||
// use crate::parser::ast::Pipeline;
|
||||
// use pretty_assertions::assert_eq;
|
||||
|
||||
// fn assert_parse(source: &str, expected: Pipeline) {
|
||||
// let parsed = match parse(source) {
|
||||
// Ok(p) => p,
|
||||
// Err(ShellError::Diagnostic(diag)) => {
|
||||
// use language_reporting::termcolor;
|
||||
|
||||
// let writer = termcolor::StandardStream::stdout(termcolor::ColorChoice::Auto);
|
||||
// let files = crate::parser::span::Files::new(source.to_string());
|
||||
|
||||
// language_reporting::emit(
|
||||
// &mut writer.lock(),
|
||||
// &files,
|
||||
// &diag.diagnostic,
|
||||
// &language_reporting::DefaultConfig,
|
||||
// )
|
||||
// .unwrap();
|
||||
|
||||
// panic!("Test failed")
|
||||
// }
|
||||
// Err(err) => panic!("Something went wrong during parse: {:#?}", err),
|
||||
// };
|
||||
|
||||
// let printed = parsed.print();
|
||||
|
||||
// assert_eq!(parsed, expected);
|
||||
// assert_eq!(printed, source);
|
||||
|
||||
// let span = expected.span;
|
||||
|
||||
// let expected_module = ModuleBuilder::spanned_items(
|
||||
// vec![Spanned::from_item(RawItem::Expression(expected), span)],
|
||||
// span.start,
|
||||
// span.end,
|
||||
// );
|
||||
|
||||
// assert_parse_module(source, expected_module);
|
||||
// }
|
||||
|
||||
// fn assert_parse_module(source: &str, expected: Module) {
|
||||
// let parsed = match parse_module(source) {
|
||||
// Ok(p) => p,
|
||||
// Err(ShellError::Diagnostic(diag)) => {
|
||||
// use language_reporting::termcolor;
|
||||
|
||||
// let writer = termcolor::StandardStream::stdout(termcolor::ColorChoice::Auto);
|
||||
// let files = crate::parser::span::Files::new(source.to_string());
|
||||
|
||||
// language_reporting::emit(
|
||||
// &mut writer.lock(),
|
||||
// &files,
|
||||
// &diag.diagnostic,
|
||||
// &language_reporting::DefaultConfig,
|
||||
// )
|
||||
// .unwrap();
|
||||
|
||||
// panic!("Test failed")
|
||||
// }
|
||||
// Err(err) => panic!("Something went wrong during parse: {:#?}", err),
|
||||
// };
|
||||
|
||||
// let printed = parsed.print();
|
||||
|
||||
// assert_eq!(parsed, expected);
|
||||
// assert_eq!(printed, source);
|
||||
// }
|
||||
|
||||
// macro_rules! commands {
|
||||
// ( $( ( $name:tt $( $command:ident ( $arg:expr ) )* ) )|* ) => {{
|
||||
// use $crate::parser::ast::{Expression, ExpressionBuilder};
|
||||
// let mut builder = crate::parser::ast::ExpressionBuilder::new();
|
||||
|
||||
// builder.pipeline(vec![
|
||||
// $(
|
||||
// (command!($name $($command($arg))*) as (&dyn Fn(&mut ExpressionBuilder) -> Expression))
|
||||
// ),*
|
||||
// ])
|
||||
// }}
|
||||
// }
|
||||
|
||||
// macro_rules! command {
|
||||
// ($name:ident) => {
|
||||
// &|b: &mut $crate::parser::ast::ExpressionBuilder| b.call((
|
||||
// &|b: &mut $crate::parser::ast::ExpressionBuilder| b.bare(stringify!($name)),
|
||||
// vec![]
|
||||
// ))
|
||||
// };
|
||||
|
||||
// ($name:ident $( $command:ident ( $body:expr ) )*) => {{
|
||||
// use $crate::parser::ast::{Expression, ExpressionBuilder};
|
||||
// &|b: &mut ExpressionBuilder| b.call((
|
||||
// (&|b: &mut ExpressionBuilder| b.bare(stringify!($name))) as (&dyn Fn(&mut ExpressionBuilder) -> Expression),
|
||||
// vec![$( (&|b: &mut ExpressionBuilder| b.$command($body)) as &dyn Fn(&mut ExpressionBuilder) -> Expression ),* ]))
|
||||
|
||||
// }};
|
||||
|
||||
// ($name:ident $( $command:ident ( $body:expr ) )*) => {
|
||||
// &|b: &mut $crate::parser::ast::ExpressionBuilder| b.call(|b| b.bare(stringify!($name)), vec![ $( |b| b.$command($body) ),* ])
|
||||
// };
|
||||
|
||||
// ($name:tt $( $command:ident ( $body:expr ) )*) => {
|
||||
// &|b: &mut $crate::parser::ast::ExpressionBuilder| b.call((&|b| b.bare($name), vec![ $( &|b| b.$command($body) ),* ]))
|
||||
// };
|
||||
// }
|
||||
|
||||
// #[test]
|
||||
// fn parse_simple_command() {
|
||||
// assert_parse("ls", commands![(ls)]);
|
||||
// }
|
||||
|
||||
// #[test]
|
||||
// fn parse_command_with_args() {
|
||||
// assert_parse(
|
||||
// r#"open Cargo.toml | select package.authors | split-row " ""#,
|
||||
// commands![
|
||||
// (open bare("Cargo.toml"))
|
||||
// | (select bare("package.authors"))
|
||||
// | ("split-row" string(" "))
|
||||
// ],
|
||||
// );
|
||||
|
||||
// assert_parse(r#"git add ."#, commands![("git" bare("add") bare("."))]);
|
||||
|
||||
// assert_parse(
|
||||
// "open Cargo.toml | select package.version | echo $it",
|
||||
// commands![
|
||||
// (open bare("Cargo.toml"))
|
||||
// | (select bare("package.version"))
|
||||
// | (echo var("it"))
|
||||
// ],
|
||||
// );
|
||||
|
||||
// assert_parse(
|
||||
// "open Cargo.toml --raw",
|
||||
// commands![(open bare("Cargo.toml") flag("raw"))],
|
||||
// );
|
||||
|
||||
// assert_parse(
|
||||
// "open Cargo.toml -r",
|
||||
// commands![(open bare("Cargo.toml") shorthand("r"))],
|
||||
// );
|
||||
|
||||
// assert_parse(
|
||||
// "open Cargo.toml | from-toml | to-toml",
|
||||
// commands![(open bare("Cargo.toml")) | ("from-toml") | ("to-toml")],
|
||||
// );
|
||||
|
||||
// assert_parse(
|
||||
// r#"config --get "ignore dups" | format-list"#,
|
||||
// commands![(config flag("get") string("ignore dups")) | ("format-list")],
|
||||
// );
|
||||
|
||||
// assert_parse(
|
||||
// "open Cargo.toml | from-toml | select dependencies | column serde",
|
||||
// commands![
|
||||
// (open bare("Cargo.toml"))
|
||||
// | ("from-toml")
|
||||
// | (select bare("dependencies"))
|
||||
// | (column bare("serde"))
|
||||
// ],
|
||||
// );
|
||||
|
||||
// assert_parse(
|
||||
// "config --set tabs 2",
|
||||
// commands![(config flag("set") bare("tabs") int(2))],
|
||||
// );
|
||||
|
||||
// assert_parse(
|
||||
// r#"ls | skip 1 | first 2 | select "file name" | rm $it"#,
|
||||
// commands![
|
||||
// (ls)
|
||||
// | (skip int(1))
|
||||
// | (first int(2))
|
||||
// | (select string("file name"))
|
||||
// | (rm var("it"))
|
||||
// ],
|
||||
// );
|
||||
|
||||
// assert_parse(
|
||||
// r#"git branch --merged | split-row "`n" | where $it != "* master""#,
|
||||
// commands![
|
||||
// // TODO: Handle escapes correctly. Should we do ` escape because of paths?
|
||||
// (git bare("branch") flag("merged")) | ("split-row" string("`n")) | (where binary((&|b| b.var("it"), &|b| b.op("!="), &|b| b.string("* master"))))
|
||||
// ],
|
||||
// );
|
||||
|
||||
// assert_parse(
|
||||
// r#"open input2.json | from-json | select glossary.GlossDiv.GlossList.GlossEntry.GlossDef.GlossSeeAlso | where $it > "GML""#,
|
||||
// commands![
|
||||
// (open bare("input2.json"))
|
||||
// | ("from-json")
|
||||
// | (select bare("glossary.GlossDiv.GlossList.GlossEntry.GlossDef.GlossSeeAlso"))
|
||||
// | (where binary((&|b| b.var("it"), &|b| b.op(">"), &|b| b.string("GML"))))
|
||||
// ]
|
||||
// );
|
||||
|
||||
// assert_parse(
|
||||
// r"cd ..\.cargo\",
|
||||
// commands![
|
||||
// (cd bare(r"..\.cargo\"))
|
||||
// ],
|
||||
// );
|
||||
|
||||
// assert_parse(
|
||||
// "ls | where size < 1KB",
|
||||
// commands![
|
||||
// (ls) | (where binary((&|b| b.bare("size"), &|b| b.op("<"), &|b| b.unit((1, "KB")))))
|
||||
// ],
|
||||
// );
|
||||
|
||||
// assert_parse(
|
||||
// "ls | where { $it.size > 100 }",
|
||||
// commands![
|
||||
// (ls) | (where block(&|b| b.binary((&|b| b.path((&|b| b.var("it"), vec!["size"])), &|b| b.op(">"), &|b| b.int(100)))))
|
||||
// ],
|
||||
// )
|
||||
// }
|
||||
|
||||
// use crate::parser::ast::{ModuleBuilder, RawItem};
|
||||
// use crate::parser::lexer::Spanned;
|
||||
|
||||
// }
|
||||
|
|
|
@ -524,6 +524,7 @@ impl Call {
|
|||
#[derive(new, Debug, Eq, PartialEq, Clone)]
|
||||
pub struct Pipeline {
|
||||
crate commands: Vec<Expression>,
|
||||
crate trailing: Span,
|
||||
crate span: Span,
|
||||
}
|
||||
|
||||
|
|
|
@ -98,7 +98,10 @@ pub fn baseline_parse_next_expr(
|
|||
vec![string],
|
||||
);
|
||||
let path = hir::RawExpression::Path(Box::new(path));
|
||||
Spanned { item: path, span: first.span }
|
||||
Spanned {
|
||||
item: path,
|
||||
span: first.span,
|
||||
}
|
||||
}
|
||||
Spanned {
|
||||
item: hir::RawExpression::Literal(hir::Literal::String(inner)),
|
||||
|
@ -114,7 +117,10 @@ pub fn baseline_parse_next_expr(
|
|||
vec![string],
|
||||
);
|
||||
let path = hir::RawExpression::Path(Box::new(path));
|
||||
Spanned { item: path, span: first.span }
|
||||
Spanned {
|
||||
item: path,
|
||||
span: first.span,
|
||||
}
|
||||
}
|
||||
Spanned {
|
||||
item: hir::RawExpression::Variable(..),
|
||||
|
@ -157,6 +163,7 @@ pub fn baseline_parse_semantic_token(
|
|||
TokenNode::Whitespace(_span) => unreachable!(),
|
||||
TokenNode::Error(error) => Err(*error.item.clone()),
|
||||
TokenNode::Path(_path) => unimplemented!(),
|
||||
TokenNode::EOF(_span) => unimplemented!(),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -3,6 +3,7 @@ crate mod files;
|
|||
crate mod flag;
|
||||
crate mod operator;
|
||||
crate mod parser;
|
||||
crate mod pipeline;
|
||||
crate mod span;
|
||||
crate mod text;
|
||||
crate mod token_tree;
|
||||
|
@ -10,5 +11,3 @@ crate mod token_tree_builder;
|
|||
crate mod tokens;
|
||||
crate mod unit;
|
||||
crate mod util;
|
||||
|
||||
crate use token_tree::{PipelineElement, TokenNode};
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
#![allow(unused)]
|
||||
|
||||
use crate::parser::parse2::{
|
||||
call_node::*, flag::*, operator::*, span::*, token_tree::*, token_tree_builder::*, tokens::*,
|
||||
unit::*,
|
||||
call_node::*, flag::*, operator::*, pipeline::*, span::*, token_tree::*, token_tree_builder::*,
|
||||
tokens::*, unit::*,
|
||||
};
|
||||
use nom;
|
||||
use nom::branch::*;
|
||||
|
@ -429,34 +429,63 @@ pub fn node(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
|||
)
|
||||
}
|
||||
|
||||
pub fn eof(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
||||
if input.input_len() == 0 {
|
||||
Ok((input, TokenNode::EOF(Span::from(input))))
|
||||
} else {
|
||||
Err(Err::Error(error_position!(
|
||||
input,
|
||||
nom::error::ErrorKind::Eof
|
||||
)))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn pipeline(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
||||
trace_step(input, "pipeline", |input| {
|
||||
let start = input.offset;
|
||||
let (input, head) = tuple((raw_call, opt(space1)))(input)?;
|
||||
let (input, head) = opt(tuple((raw_call, opt(space1), opt(tag("|")))))(input)?;
|
||||
let (input, items) = trace_step(
|
||||
input,
|
||||
"many0",
|
||||
many0(tuple((tag("|"), opt(space1), raw_call, opt(space1)))),
|
||||
many0(tuple((opt(space1), raw_call, opt(space1), opt(tag("|"))))),
|
||||
)?;
|
||||
|
||||
let (input, tail) = tuple((opt(space1), eof))(input)?;
|
||||
let end = input.offset;
|
||||
|
||||
Ok((
|
||||
input,
|
||||
TokenTreeBuilder::spanned_pipeline(make_call_list(head, items), (start, end)),
|
||||
TokenTreeBuilder::spanned_pipeline(
|
||||
(make_call_list(head, items), tail.0.map(Span::from)),
|
||||
(start, end),
|
||||
),
|
||||
))
|
||||
})
|
||||
}
|
||||
|
||||
fn make_call_list(
|
||||
head: (Spanned<CallNode>, Option<NomSpan>),
|
||||
tail: Vec<(NomSpan, Option<NomSpan>, Spanned<CallNode>, Option<NomSpan>)>,
|
||||
head: Option<(Spanned<CallNode>, Option<NomSpan>, Option<NomSpan>)>,
|
||||
items: Vec<(
|
||||
Option<NomSpan>,
|
||||
Spanned<CallNode>,
|
||||
Option<NomSpan>,
|
||||
Option<NomSpan>,
|
||||
)>,
|
||||
) -> Vec<PipelineElement> {
|
||||
let mut out = vec![];
|
||||
let el = PipelineElement::new(None, head.0, head.1.map(Span::from));
|
||||
out.push(el);
|
||||
|
||||
for (pipe, ws1, call, ws2) in tail {
|
||||
let el = PipelineElement::new(ws1.map(Span::from), call, ws2.map(Span::from));
|
||||
if let Some(head) = head {
|
||||
let el = PipelineElement::new(None, head.0, head.1.map(Span::from), head.2.map(Span::from));
|
||||
out.push(el);
|
||||
}
|
||||
|
||||
for (ws1, call, ws2, pipe) in items {
|
||||
let el = PipelineElement::new(
|
||||
ws1.map(Span::from),
|
||||
call,
|
||||
ws2.map(Span::from),
|
||||
pipe.map(Span::from),
|
||||
);
|
||||
out.push(el);
|
||||
}
|
||||
|
||||
|
|
18
src/parser/parse2/pipeline.rs
Normal file
18
src/parser/parse2/pipeline.rs
Normal file
|
@ -0,0 +1,18 @@
|
|||
use crate::parser::{CallNode, Span, Spanned};
|
||||
use derive_new::new;
|
||||
use getset::Getters;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, new)]
|
||||
pub struct Pipeline {
|
||||
crate parts: Vec<PipelineElement>,
|
||||
crate post_ws: Option<Span>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters, new)]
|
||||
pub struct PipelineElement {
|
||||
pub pre_ws: Option<Span>,
|
||||
#[get = "crate"]
|
||||
call: Spanned<CallNode>,
|
||||
pub post_ws: Option<Span>,
|
||||
pub post_pipe: Option<Span>,
|
||||
}
|
|
@ -1,9 +1,8 @@
|
|||
use crate::errors::ShellError;
|
||||
use crate::parser::parse2::{call_node::*, flag::*, operator::*, span::*, tokens::*};
|
||||
use crate::parser::parse2::{call_node::*, flag::*, operator::*, pipeline::*, span::*, tokens::*};
|
||||
use crate::Text;
|
||||
use derive_new::new;
|
||||
use enum_utils::FromStr;
|
||||
use getset::Getters;
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd)]
|
||||
pub enum TokenNode {
|
||||
|
@ -11,7 +10,7 @@ pub enum TokenNode {
|
|||
#[allow(unused)]
|
||||
Call(Spanned<CallNode>),
|
||||
Delimited(Spanned<DelimitedNode>),
|
||||
Pipeline(Spanned<Vec<PipelineElement>>),
|
||||
Pipeline(Spanned<Pipeline>),
|
||||
Operator(Spanned<Operator>),
|
||||
Flag(Spanned<Flag>),
|
||||
Identifier(Span),
|
||||
|
@ -19,6 +18,7 @@ pub enum TokenNode {
|
|||
#[allow(unused)]
|
||||
Error(Spanned<Box<ShellError>>),
|
||||
Path(Spanned<PathNode>),
|
||||
EOF(Span),
|
||||
}
|
||||
|
||||
impl TokenNode {
|
||||
|
@ -34,6 +34,7 @@ impl TokenNode {
|
|||
TokenNode::Whitespace(s) => *s,
|
||||
TokenNode::Error(s) => s.span,
|
||||
TokenNode::Path(s) => s.span,
|
||||
TokenNode::EOF(s) => *s,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -66,7 +67,7 @@ impl TokenNode {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn as_pipeline(&self) -> Result<Vec<PipelineElement>, ShellError> {
|
||||
pub fn as_pipeline(&self) -> Result<Pipeline, ShellError> {
|
||||
match self {
|
||||
TokenNode::Pipeline(Spanned { item, .. }) => Ok(item.clone()),
|
||||
_ => Err(ShellError::string("unimplemented")),
|
||||
|
@ -92,11 +93,3 @@ pub struct PathNode {
|
|||
head: Box<TokenNode>,
|
||||
tail: Vec<TokenNode>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters, new)]
|
||||
pub struct PipelineElement {
|
||||
pub pre_ws: Option<Span>,
|
||||
#[get = "crate"]
|
||||
call: Spanned<CallNode>,
|
||||
pub post_ws: Option<Span>,
|
||||
}
|
||||
|
|
|
@ -3,10 +3,9 @@ use crate::prelude::*;
|
|||
|
||||
use crate::parser::parse2::flag::{Flag, FlagKind};
|
||||
use crate::parser::parse2::operator::Operator;
|
||||
use crate::parser::parse2::pipeline::{Pipeline, PipelineElement};
|
||||
use crate::parser::parse2::span::{Span, Spanned};
|
||||
use crate::parser::parse2::token_tree::{
|
||||
DelimitedNode, Delimiter, PathNode, PipelineElement, TokenNode,
|
||||
};
|
||||
use crate::parser::parse2::token_tree::{DelimitedNode, Delimiter, PathNode, TokenNode};
|
||||
use crate::parser::parse2::tokens::{RawToken, Token};
|
||||
use crate::parser::parse2::unit::Unit;
|
||||
use crate::parser::CallNode;
|
||||
|
@ -47,7 +46,7 @@ impl TokenTreeBuilder {
|
|||
|
||||
let mut out: Vec<PipelineElement> = vec![];
|
||||
|
||||
let mut input = input.into_iter();
|
||||
let mut input = input.into_iter().peekable();
|
||||
let (pre, call, post) = input
|
||||
.next()
|
||||
.expect("A pipeline must contain at least one element");
|
||||
|
@ -55,33 +54,48 @@ impl TokenTreeBuilder {
|
|||
let pre_span = pre.map(|pre| b.consume(&pre));
|
||||
let call = call(b);
|
||||
let post_span = post.map(|post| b.consume(&post));
|
||||
let pipe = input.peek().map(|_| Span::from(b.consume("|")));
|
||||
out.push(PipelineElement::new(
|
||||
pre_span.map(Span::from),
|
||||
call,
|
||||
post_span.map(Span::from),
|
||||
pipe,
|
||||
));
|
||||
|
||||
for (pre, call, post) in input {
|
||||
b.consume("|");
|
||||
let pre_span = pre.map(|pre| b.consume(&pre));
|
||||
let call = call(b);
|
||||
let post_span = post.map(|post| b.consume(&post));
|
||||
loop {
|
||||
match input.next() {
|
||||
None => break,
|
||||
Some((pre, call, post)) => {
|
||||
let pre_span = pre.map(|pre| b.consume(&pre));
|
||||
let call = call(b);
|
||||
let post_span = post.map(|post| b.consume(&post));
|
||||
|
||||
out.push(PipelineElement::new(
|
||||
pre_span.map(Span::from),
|
||||
call,
|
||||
post_span.map(Span::from),
|
||||
));
|
||||
let pipe = input.peek().map(|_| Span::from(b.consume("|")));
|
||||
|
||||
out.push(PipelineElement::new(
|
||||
pre_span.map(Span::from),
|
||||
call,
|
||||
post_span.map(Span::from),
|
||||
pipe,
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let end = b.pos;
|
||||
|
||||
TokenTreeBuilder::spanned_pipeline(out, (start, end))
|
||||
TokenTreeBuilder::spanned_pipeline((out, None), (start, end))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_pipeline(input: Vec<PipelineElement>, span: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Pipeline(Spanned::from_item(input, span))
|
||||
pub fn spanned_pipeline(
|
||||
input: (Vec<PipelineElement>, Option<Span>),
|
||||
span: impl Into<Span>,
|
||||
) -> TokenNode {
|
||||
TokenNode::Pipeline(Spanned::from_item(
|
||||
Pipeline::new(input.0, input.1.into()),
|
||||
span,
|
||||
))
|
||||
}
|
||||
|
||||
pub fn op(input: impl Into<Operator>) -> CurriedToken {
|
||||
|
|
|
@ -2,7 +2,7 @@ use crate::parser::nom_input;
|
|||
use crate::parser::parse2::span::Spanned;
|
||||
use crate::parser::parse2::token_tree::TokenNode;
|
||||
use crate::parser::parse2::tokens::RawToken;
|
||||
use crate::parser::parse2::PipelineElement;
|
||||
use crate::parser::{Pipeline, PipelineElement};
|
||||
use crate::prelude::*;
|
||||
use crate::shell::completer::NuCompleter;
|
||||
use ansi_term::Color;
|
||||
|
@ -61,34 +61,31 @@ impl Highlighter for Helper {
|
|||
let tokens = crate::parser::pipeline(nom_input(line));
|
||||
|
||||
match tokens {
|
||||
Err(_) => Cow::Borrowed(line),
|
||||
Err(e) => {
|
||||
println!("error: {:?}", e);
|
||||
Cow::Borrowed(line)
|
||||
}
|
||||
Ok((_rest, v)) => {
|
||||
let mut out = String::new();
|
||||
let tokens = match v.as_pipeline() {
|
||||
let pipeline = match v.as_pipeline() {
|
||||
Err(_) => return Cow::Borrowed(line),
|
||||
Ok(v) => v,
|
||||
};
|
||||
|
||||
let mut iter = tokens.into_iter();
|
||||
|
||||
// match iter.next() {
|
||||
// None => return Cow::Owned(out),
|
||||
// Some(v) => out.push_str(v.span().slice(line)),
|
||||
// };
|
||||
let mut first = true;
|
||||
let Pipeline { parts, post_ws } = pipeline;
|
||||
let mut iter = parts.into_iter();
|
||||
|
||||
loop {
|
||||
match iter.next() {
|
||||
None => return Cow::Owned(out),
|
||||
Some(token) => {
|
||||
let styled = paint_pipeline_element(&token, line);
|
||||
|
||||
if !first {
|
||||
out.push_str("|");
|
||||
} else {
|
||||
first = false;
|
||||
None => {
|
||||
if let Some(ws) = post_ws {
|
||||
out.push_str(ws.slice(line));
|
||||
}
|
||||
|
||||
return Cow::Owned(out);
|
||||
}
|
||||
Some(token) => {
|
||||
let styled = paint_pipeline_element(&token, line);
|
||||
out.push_str(&styled.to_string());
|
||||
}
|
||||
}
|
||||
|
@ -133,6 +130,7 @@ fn paint_token_node(token_node: &TokenNode, line: &str) -> String {
|
|||
item: RawToken::Bare,
|
||||
..
|
||||
}) => Color::Green.normal().paint(token_node.span().slice(line)),
|
||||
TokenNode::EOF(_) => return format!(""),
|
||||
};
|
||||
|
||||
styled.to_string()
|
||||
|
@ -144,6 +142,7 @@ fn paint_pipeline_element(pipeline_element: &PipelineElement, line: &str) -> Str
|
|||
if let Some(ws) = pipeline_element.pre_ws {
|
||||
styled.push_str(&Color::White.normal().paint(ws.slice(line)));
|
||||
}
|
||||
|
||||
styled.push_str(&paint_token_node(pipeline_element.call().head(), line));
|
||||
|
||||
if let Some(children) = pipeline_element.call().children() {
|
||||
|
@ -151,10 +150,15 @@ fn paint_pipeline_element(pipeline_element: &PipelineElement, line: &str) -> Str
|
|||
styled.push_str(&paint_token_node(child, line));
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(ws) = pipeline_element.post_ws {
|
||||
styled.push_str(&Color::White.normal().paint(ws.slice(line)));
|
||||
}
|
||||
|
||||
if let Some(_) = pipeline_element.post_pipe {
|
||||
styled.push_str(&Color::Purple.paint("|"));
|
||||
}
|
||||
|
||||
styled.to_string()
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in a new issue