Finish the job of moving shapes into the stream

This commit should finish the `coloring_in_tokens` feature, which moves
the shape accumulator into the token stream. This allows rollbacks of
the token stream to also roll back any shapes that were added.

This commit also adds a much nicer syntax highlighter trace, which shows
all of the paths the highlighter took to arrive at a particular coloring
output. This change is fairly substantial, but really improves the
understandability of the flow. I intend to update the normal parser with
a similar tracing view.

In general, this change also fleshes out the concept of "atomic" token
stream operations.

A good next step would be to try to make the parser more
error-correcting, using the coloring infrastructure. A follow-up step
would involve merging the parser and highlighter shapes themselves.
This commit is contained in:
Yehuda Katz 2019-10-21 08:18:43 -07:00
parent 82b24d9beb
commit 6a7c00eaef
22 changed files with 888 additions and 361 deletions

View file

@ -84,7 +84,7 @@ heim = {version = "0.0.8", optional = true }
battery = {version = "0.7.4", optional = true } battery = {version = "0.7.4", optional = true }
rawkey = {version = "0.1.2", optional = true } rawkey = {version = "0.1.2", optional = true }
clipboard = {version = "0.5", optional = true } clipboard = {version = "0.5", optional = true }
ptree = {version = "0.2", optional = true } ptree = {version = "0.2" }
image = { version = "0.22.2", default_features = false, features = ["png_codec", "jpeg"], optional = true } image = { version = "0.22.2", default_features = false, features = ["png_codec", "jpeg"], optional = true }
[features] [features]
@ -95,7 +95,7 @@ binaryview = ["image", "crossterm"]
sys = ["heim", "battery"] sys = ["heim", "battery"]
ps = ["heim"] ps = ["heim"]
# trace = ["nom-tracable/trace"] # trace = ["nom-tracable/trace"]
all = ["raw-key", "textview", "binaryview", "sys", "ps", "clipboard", "ptree"] all = ["raw-key", "textview", "binaryview", "sys", "ps", "clipboard"]
[dependencies.rusqlite] [dependencies.rusqlite]
version = "0.20.0" version = "0.20.0"

View file

@ -73,9 +73,7 @@ pub fn interactive_fuzzy_search(lines: &Vec<&str>, max_results: usize) -> Select
searchinput.pop(); searchinput.pop();
selected = 0; selected = 0;
} }
_ => { _ => {}
// println!("OTHER InputEvent: {:?}", k);
}
}, },
_ => {} _ => {}
} }

View file

@ -3,9 +3,6 @@ use log::LevelFilter;
use std::error::Error; use std::error::Error;
fn main() -> Result<(), Box<dyn Error>> { fn main() -> Result<(), Box<dyn Error>> {
#[cfg(feature1)]
println!("feature1 is enabled");
let matches = App::new("nushell") let matches = App::new("nushell")
.version(clap::crate_version!()) .version(clap::crate_version!())
.arg( .arg(

View file

@ -14,7 +14,6 @@ pub(crate) use parse::files::Files;
pub(crate) use parse::flag::{Flag, FlagKind}; pub(crate) use parse::flag::{Flag, FlagKind};
pub(crate) use parse::operator::Operator; pub(crate) use parse::operator::Operator;
pub(crate) use parse::parser::{nom_input, pipeline}; pub(crate) use parse::parser::{nom_input, pipeline};
pub(crate) use parse::pipeline::{Pipeline, PipelineElement};
pub(crate) use parse::text::Text; pub(crate) use parse::text::Text;
pub(crate) use parse::token_tree::{DelimitedNode, Delimiter, TokenNode}; pub(crate) use parse::token_tree::{DelimitedNode, Delimiter, TokenNode};
pub(crate) use parse::tokens::{RawNumber, RawToken}; pub(crate) use parse::tokens::{RawNumber, RawToken};

View file

@ -61,6 +61,10 @@ impl ColorSyntax for ExternalTokensShape {
type Info = (); type Info = ();
type Input = (); type Input = ();
fn name(&self) -> &'static str {
"ExternalTokensShape"
}
fn color_syntax<'a, 'b>( fn color_syntax<'a, 'b>(
&self, &self,
_input: &(), _input: &(),
@ -192,6 +196,10 @@ impl ColorSyntax for ExternalExpression {
type Info = ExternalExpressionResult; type Info = ExternalExpressionResult;
type Input = (); type Input = ();
fn name(&self) -> &'static str {
"ExternalExpression"
}
fn color_syntax<'a, 'b>( fn color_syntax<'a, 'b>(
&self, &self,
_input: &(), _input: &(),
@ -212,7 +220,7 @@ impl ColorSyntax for ExternalExpression {
Ok(atom) => atom, Ok(atom) => atom,
}; };
atom.color_tokens(token_nodes.mut_shapes()); token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes));
return ExternalExpressionResult::Processed; return ExternalExpressionResult::Processed;
} }
} }

View file

@ -11,16 +11,15 @@ use crate::parser::hir::expand_external_tokens::ExternalTokensShape;
use crate::parser::hir::syntax_shape::block::AnyBlockShape; use crate::parser::hir::syntax_shape::block::AnyBlockShape;
use crate::parser::hir::tokens_iterator::Peeked; use crate::parser::hir::tokens_iterator::Peeked;
use crate::parser::parse_command::{parse_command_tail, CommandTailShape}; use crate::parser::parse_command::{parse_command_tail, CommandTailShape};
use crate::parser::PipelineElement;
use crate::parser::{ use crate::parser::{
hir, hir,
hir::{debug_tokens, TokensIterator}, hir::{debug_tokens, TokensIterator},
Operator, Pipeline, RawToken, TokenNode, Operator, RawToken, TokenNode,
}; };
use crate::prelude::*; use crate::prelude::*;
use derive_new::new; use derive_new::new;
use getset::Getters; use getset::Getters;
use log::{self, log_enabled, trace}; use log::{self, trace};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
@ -41,6 +40,11 @@ pub(crate) use self::expression::variable_path::{
pub(crate) use self::expression::{continue_expression, AnyExpressionShape}; pub(crate) use self::expression::{continue_expression, AnyExpressionShape};
pub(crate) use self::flat_shape::FlatShape; pub(crate) use self::flat_shape::FlatShape;
#[cfg(not(coloring_in_tokens))]
use crate::parser::parse::pipeline::Pipeline;
#[cfg(not(coloring_in_tokens))]
use log::log_enabled;
#[derive(Debug, Copy, Clone, Serialize, Deserialize)] #[derive(Debug, Copy, Clone, Serialize, Deserialize)]
pub enum SyntaxShape { pub enum SyntaxShape {
Any, Any,
@ -110,6 +114,10 @@ impl FallibleColorSyntax for SyntaxShape {
type Info = (); type Info = ();
type Input = (); type Input = ();
fn name(&self) -> &'static str {
"SyntaxShape"
}
fn color_syntax<'a, 'b>( fn color_syntax<'a, 'b>(
&self, &self,
_input: &(), _input: &(),
@ -241,6 +249,8 @@ pub trait FallibleColorSyntax: std::fmt::Debug + Copy {
type Info; type Info;
type Input; type Input;
fn name(&self) -> &'static str;
fn color_syntax<'a, 'b>( fn color_syntax<'a, 'b>(
&self, &self,
input: &Self::Input, input: &Self::Input,
@ -282,6 +292,8 @@ pub trait ColorSyntax: std::fmt::Debug + Copy {
type Info; type Info;
type Input; type Input;
fn name(&self) -> &'static str;
fn color_syntax<'a, 'b>( fn color_syntax<'a, 'b>(
&self, &self,
input: &Self::Input, input: &Self::Input,
@ -290,24 +302,6 @@ pub trait ColorSyntax: std::fmt::Debug + Copy {
) -> Self::Info; ) -> Self::Info;
} }
// impl<T> ColorSyntax for T
// where
// T: FallibleColorSyntax,
// {
// type Info = Result<T::Info, ShellError>;
// type Input = T::Input;
// fn color_syntax<'a, 'b>(
// &self,
// input: &Self::Input,
// token_nodes: &'b mut TokensIterator<'a>,
// context: &ExpandContext,
// shapes: &mut Vec<Spanned<FlatShape>>,
// ) -> Result<T::Info, ShellError> {
// FallibleColorSyntax::color_syntax(self, input, token_nodes, context, shapes)
// }
// }
pub(crate) trait ExpandSyntax: std::fmt::Debug + Copy { pub(crate) trait ExpandSyntax: std::fmt::Debug + Copy {
type Output: std::fmt::Debug; type Output: std::fmt::Debug;
@ -323,18 +317,18 @@ pub(crate) fn expand_syntax<'a, 'b, T: ExpandSyntax>(
token_nodes: &'b mut TokensIterator<'a>, token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext, context: &ExpandContext,
) -> Result<T::Output, ShellError> { ) -> Result<T::Output, ShellError> {
trace!(target: "nu::expand_syntax", "before {} :: {:?}", std::any::type_name::<T>(), debug_tokens(token_nodes, context.source)); trace!(target: "nu::expand_syntax", "before {} :: {:?}", std::any::type_name::<T>(), debug_tokens(token_nodes.state(), context.source));
let result = shape.expand_syntax(token_nodes, context); let result = shape.expand_syntax(token_nodes, context);
match result { match result {
Err(err) => { Err(err) => {
trace!(target: "nu::expand_syntax", "error :: {} :: {:?}", err, debug_tokens(token_nodes, context.source)); trace!(target: "nu::expand_syntax", "error :: {} :: {:?}", err, debug_tokens(token_nodes.state(), context.source));
Err(err) Err(err)
} }
Ok(result) => { Ok(result) => {
trace!(target: "nu::expand_syntax", "ok :: {:?} :: {:?}", result, debug_tokens(token_nodes, context.source)); trace!(target: "nu::expand_syntax", "ok :: {:?} :: {:?}", result, debug_tokens(token_nodes.state(), context.source));
Ok(result) Ok(result)
} }
} }
@ -347,12 +341,12 @@ pub fn color_syntax<'a, 'b, T: ColorSyntax<Info = U, Input = ()>, U>(
context: &ExpandContext, context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>, shapes: &mut Vec<Spanned<FlatShape>>,
) -> ((), U) { ) -> ((), U) {
trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::<T>(), debug_tokens(token_nodes, context.source)); trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::<T>(), debug_tokens(token_nodes.state(), context.source));
let len = shapes.len(); let len = shapes.len();
let result = shape.color_syntax(&(), token_nodes, context, shapes); let result = shape.color_syntax(&(), token_nodes, context, shapes);
trace!(target: "nu::color_syntax", "ok :: {:?}", debug_tokens(token_nodes, context.source)); trace!(target: "nu::color_syntax", "ok :: {:?}", debug_tokens(token_nodes.state(), context.source));
if log_enabled!(target: "nu::color_syntax", log::Level::Trace) { if log_enabled!(target: "nu::color_syntax", log::Level::Trace) {
trace!(target: "nu::color_syntax", "after {}", std::any::type_name::<T>()); trace!(target: "nu::color_syntax", "after {}", std::any::type_name::<T>());
@ -375,26 +369,12 @@ pub fn color_syntax<'a, 'b, T: ColorSyntax<Info = U, Input = ()>, U>(
token_nodes: &'b mut TokensIterator<'a>, token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext, context: &ExpandContext,
) -> ((), U) { ) -> ((), U) {
trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::<T>(), debug_tokens(token_nodes, context.source)); (
(),
let len = token_nodes.shapes().len(); token_nodes.color_frame(shape.name(), |token_nodes| {
let result = shape.color_syntax(&(), token_nodes, context); shape.color_syntax(&(), token_nodes, context)
}),
trace!(target: "nu::color_syntax", "ok :: {:?}", debug_tokens(token_nodes, context.source)); )
if log_enabled!(target: "nu::color_syntax", log::Level::Trace) {
trace!(target: "nu::color_syntax", "after {}", std::any::type_name::<T>());
if len < token_nodes.shapes().len() {
for i in len..(token_nodes.shapes().len()) {
trace!(target: "nu::color_syntax", "new shape :: {:?}", token_nodes.shapes()[i]);
}
} else {
trace!(target: "nu::color_syntax", "no new shapes");
}
}
((), result)
} }
#[cfg(not(coloring_in_tokens))] #[cfg(not(coloring_in_tokens))]
@ -404,7 +384,7 @@ pub fn color_fallible_syntax<'a, 'b, T: FallibleColorSyntax<Info = U, Input = ()
context: &ExpandContext, context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>, shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<U, ShellError> { ) -> Result<U, ShellError> {
trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::<T>(), debug_tokens(token_nodes, context.source)); trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::<T>(), debug_tokens(token_nodes.state(), context.source));
if token_nodes.at_end() { if token_nodes.at_end() {
trace!(target: "nu::color_syntax", "at eof"); trace!(target: "nu::color_syntax", "at eof");
@ -414,7 +394,7 @@ pub fn color_fallible_syntax<'a, 'b, T: FallibleColorSyntax<Info = U, Input = ()
let len = shapes.len(); let len = shapes.len();
let result = shape.color_syntax(&(), token_nodes, context, shapes); let result = shape.color_syntax(&(), token_nodes, context, shapes);
trace!(target: "nu::color_syntax", "ok :: {:?}", debug_tokens(token_nodes, context.source)); trace!(target: "nu::color_syntax", "ok :: {:?}", debug_tokens(token_nodes.state(), context.source));
if log_enabled!(target: "nu::color_syntax", log::Level::Trace) { if log_enabled!(target: "nu::color_syntax", log::Level::Trace) {
trace!(target: "nu::color_syntax", "after {}", std::any::type_name::<T>()); trace!(target: "nu::color_syntax", "after {}", std::any::type_name::<T>());
@ -437,31 +417,9 @@ pub fn color_fallible_syntax<'a, 'b, T: FallibleColorSyntax<Info = U, Input = ()
token_nodes: &'b mut TokensIterator<'a>, token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext, context: &ExpandContext,
) -> Result<U, ShellError> { ) -> Result<U, ShellError> {
trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::<T>(), debug_tokens(token_nodes, context.source)); token_nodes.color_fallible_frame(shape.name(), |token_nodes| {
shape.color_syntax(&(), token_nodes, context)
if token_nodes.at_end() { })
trace!(target: "nu::color_syntax", "at eof");
return Err(ShellError::unexpected_eof("coloring", Tag::unknown()));
}
let len = token_nodes.shapes().len();
let result = shape.color_syntax(&(), token_nodes, context);
trace!(target: "nu::color_syntax", "ok :: {:?}", debug_tokens(token_nodes, context.source));
if log_enabled!(target: "nu::color_syntax", log::Level::Trace) {
trace!(target: "nu::color_syntax", "after {}", std::any::type_name::<T>());
if len < token_nodes.shapes().len() {
for i in len..(token_nodes.shapes().len()) {
trace!(target: "nu::color_syntax", "new shape :: {:?}", token_nodes.shapes()[i]);
}
} else {
trace!(target: "nu::color_syntax", "no new shapes");
}
}
result
} }
#[cfg(not(coloring_in_tokens))] #[cfg(not(coloring_in_tokens))]
@ -472,12 +430,12 @@ pub fn color_syntax_with<'a, 'b, T: ColorSyntax<Info = U, Input = I>, U, I>(
context: &ExpandContext, context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>, shapes: &mut Vec<Spanned<FlatShape>>,
) -> ((), U) { ) -> ((), U) {
trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::<T>(), debug_tokens(token_nodes, context.source)); trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::<T>(), debug_tokens(token_nodes.state(), context.source));
let len = shapes.len(); let len = shapes.len();
let result = shape.color_syntax(input, token_nodes, context, shapes); let result = shape.color_syntax(input, token_nodes, context, shapes);
trace!(target: "nu::color_syntax", "ok :: {:?}", debug_tokens(token_nodes, context.source)); trace!(target: "nu::color_syntax", "ok :: {:?}", debug_tokens(token_nodes.state(), context.source));
if log_enabled!(target: "nu::color_syntax", log::Level::Trace) { if log_enabled!(target: "nu::color_syntax", log::Level::Trace) {
trace!(target: "nu::color_syntax", "after {}", std::any::type_name::<T>()); trace!(target: "nu::color_syntax", "after {}", std::any::type_name::<T>());
@ -501,26 +459,12 @@ pub fn color_syntax_with<'a, 'b, T: ColorSyntax<Info = U, Input = I>, U, I>(
token_nodes: &'b mut TokensIterator<'a>, token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext, context: &ExpandContext,
) -> ((), U) { ) -> ((), U) {
trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::<T>(), debug_tokens(token_nodes, context.source)); (
(),
let len = token_nodes.shapes().len(); token_nodes.color_frame(shape.name(), |token_nodes| {
let result = shape.color_syntax(input, token_nodes, context); shape.color_syntax(input, token_nodes, context)
}),
trace!(target: "nu::color_syntax", "ok :: {:?}", debug_tokens(token_nodes, context.source)); )
if log_enabled!(target: "nu::color_syntax", log::Level::Trace) {
trace!(target: "nu::color_syntax", "after {}", std::any::type_name::<T>());
if len < token_nodes.shapes().len() {
for i in len..(token_nodes.shapes().len()) {
trace!(target: "nu::color_syntax", "new shape :: {:?}", token_nodes.shapes()[i]);
}
} else {
trace!(target: "nu::color_syntax", "no new shapes");
}
}
((), result)
} }
#[cfg(not(coloring_in_tokens))] #[cfg(not(coloring_in_tokens))]
@ -531,31 +475,9 @@ pub fn color_fallible_syntax_with<'a, 'b, T: FallibleColorSyntax<Info = U, Input
context: &ExpandContext, context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>, shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<U, ShellError> { ) -> Result<U, ShellError> {
trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::<T>(), debug_tokens(token_nodes, context.source)); token_nodes.color_fallible_frame(std::any::type_name::<T>(), |token_nodes| {
shape.color_syntax(input, token_nodes, context, shapes)
if token_nodes.at_end() { })
trace!(target: "nu::color_syntax", "at eof");
return Err(ShellError::unexpected_eof("coloring", Tag::unknown()));
}
let len = shapes.len();
let result = shape.color_syntax(input, token_nodes, context, shapes);
trace!(target: "nu::color_syntax", "ok :: {:?}", debug_tokens(token_nodes, context.source));
if log_enabled!(target: "nu::color_syntax", log::Level::Trace) {
trace!(target: "nu::color_syntax", "after {}", std::any::type_name::<T>());
if len < shapes.len() {
for i in len..(shapes.len()) {
trace!(target: "nu::color_syntax", "new shape :: {:?}", shapes[i]);
}
} else {
trace!(target: "nu::color_syntax", "no new shapes");
}
}
result
} }
#[cfg(coloring_in_tokens)] #[cfg(coloring_in_tokens)]
@ -565,31 +487,9 @@ pub fn color_fallible_syntax_with<'a, 'b, T: FallibleColorSyntax<Info = U, Input
token_nodes: &'b mut TokensIterator<'a>, token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext, context: &ExpandContext,
) -> Result<U, ShellError> { ) -> Result<U, ShellError> {
trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::<T>(), debug_tokens(token_nodes, context.source)); token_nodes.color_fallible_frame(shape.name(), |token_nodes| {
shape.color_syntax(input, token_nodes, context)
if token_nodes.at_end() { })
trace!(target: "nu::color_syntax", "at eof");
return Err(ShellError::unexpected_eof("coloring", Tag::unknown()));
}
let len = token_nodes.shapes().len();
let result = shape.color_syntax(input, token_nodes, context);
trace!(target: "nu::color_syntax", "ok :: {:?}", debug_tokens(token_nodes, context.source));
if log_enabled!(target: "nu::color_syntax", log::Level::Trace) {
trace!(target: "nu::color_syntax", "after {}", std::any::type_name::<T>());
if len < token_nodes.shapes().len() {
for i in len..(token_nodes.shapes().len()) {
trace!(target: "nu::color_syntax", "new shape :: {:?}", token_nodes.shapes()[i]);
}
} else {
trace!(target: "nu::color_syntax", "no new shapes");
}
}
result
} }
pub(crate) fn expand_expr<'a, 'b, T: ExpandExpression>( pub(crate) fn expand_expr<'a, 'b, T: ExpandExpression>(
@ -597,18 +497,18 @@ pub(crate) fn expand_expr<'a, 'b, T: ExpandExpression>(
token_nodes: &'b mut TokensIterator<'a>, token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext, context: &ExpandContext,
) -> Result<hir::Expression, ShellError> { ) -> Result<hir::Expression, ShellError> {
trace!(target: "nu::expand_syntax", "before {} :: {:?}", std::any::type_name::<T>(), debug_tokens(token_nodes, context.source)); trace!(target: "nu::expand_syntax", "before {} :: {:?}", std::any::type_name::<T>(), debug_tokens(token_nodes.state(), context.source));
let result = shape.expand_syntax(token_nodes, context); let result = shape.expand_syntax(token_nodes, context);
match result { match result {
Err(err) => { Err(err) => {
trace!(target: "nu::expand_syntax", "error :: {} :: {:?}", err, debug_tokens(token_nodes, context.source)); trace!(target: "nu::expand_syntax", "error :: {} :: {:?}", err, debug_tokens(token_nodes.state(), context.source));
Err(err) Err(err)
} }
Ok(result) => { Ok(result) => {
trace!(target: "nu::expand_syntax", "ok :: {:?} :: {:?}", result, debug_tokens(token_nodes, context.source)); trace!(target: "nu::expand_syntax", "ok :: {:?} :: {:?}", result, debug_tokens(token_nodes.state(), context.source));
Ok(result) Ok(result)
} }
} }
@ -738,7 +638,7 @@ impl FallibleColorSyntax for BareShape {
_context: &ExpandContext, _context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>, shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> { ) -> Result<(), ShellError> {
token_nodes.peek_any_token(|token| match token { token_nodes.peek_any_token("word", |token| match token {
// If it's a bare token, color it // If it's a bare token, color it
TokenNode::Token(Spanned { TokenNode::Token(Spanned {
item: RawToken::Bare, item: RawToken::Bare,
@ -759,21 +659,22 @@ impl FallibleColorSyntax for BareShape {
type Info = (); type Info = ();
type Input = FlatShape; type Input = FlatShape;
fn name(&self) -> &'static str {
"BareShape"
}
fn color_syntax<'a, 'b>( fn color_syntax<'a, 'b>(
&self, &self,
input: &FlatShape, input: &FlatShape,
token_nodes: &'b mut TokensIterator<'a>, token_nodes: &'b mut TokensIterator<'a>,
_context: &ExpandContext, _context: &ExpandContext,
) -> Result<(), ShellError> { ) -> Result<(), ShellError> {
let span = token_nodes.peek_any_token(|token| match token { let span = token_nodes.peek_any_token("word", |token| match token {
// If it's a bare token, color it // If it's a bare token, color it
TokenNode::Token(Spanned { TokenNode::Token(Spanned {
item: RawToken::Bare, item: RawToken::Bare,
span, span,
}) => { }) => Ok(span),
// token_nodes.color_shape((*input).spanned(*span));
Ok(span)
}
// otherwise, fail // otherwise, fail
other => Err(ShellError::type_error("word", other.tagged_type_name())), other => Err(ShellError::type_error("word", other.tagged_type_name())),
@ -872,7 +773,8 @@ impl FallibleColorSyntax for PipelineShape {
shapes: &mut Vec<Spanned<FlatShape>>, shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> { ) -> Result<(), ShellError> {
// Make sure we're looking at a pipeline // Make sure we're looking at a pipeline
let Pipeline { parts, .. } = token_nodes.peek_any_token(|node| node.as_pipeline())?; let Pipeline { parts, .. } =
token_nodes.peek_any_token("pipeline", |node| node.as_pipeline())?;
// Enumerate the pipeline parts // Enumerate the pipeline parts
for part in parts { for part in parts {
@ -898,6 +800,10 @@ impl FallibleColorSyntax for PipelineShape {
type Info = (); type Info = ();
type Input = (); type Input = ();
fn name(&self) -> &'static str {
"PipelineShape"
}
fn color_syntax<'a, 'b>( fn color_syntax<'a, 'b>(
&self, &self,
_input: &(), _input: &(),
@ -905,7 +811,9 @@ impl FallibleColorSyntax for PipelineShape {
context: &ExpandContext, context: &ExpandContext,
) -> Result<(), ShellError> { ) -> Result<(), ShellError> {
// Make sure we're looking at a pipeline // Make sure we're looking at a pipeline
let Pipeline { parts, .. } = token_nodes.peek_any_token(|node| node.as_pipeline())?; let pipeline = token_nodes.peek_any_token("pipeline", |node| node.as_pipeline())?;
let parts = &pipeline.parts[..];
// Enumerate the pipeline parts // Enumerate the pipeline parts
for part in parts { for part in parts {
@ -914,40 +822,77 @@ impl FallibleColorSyntax for PipelineShape {
token_nodes.color_shape(FlatShape::Pipe.spanned(pipe)) token_nodes.color_shape(FlatShape::Pipe.spanned(pipe))
} }
// Create a new iterator containing the tokens in the pipeline part to color let tokens: Spanned<&[TokenNode]> = (&part.item.tokens[..]).spanned(part.span);
let mut token_nodes = TokensIterator::new(&part.tokens.item, part.span, false);
color_syntax(&MaybeSpaceShape, &mut token_nodes, context); token_nodes.child(tokens, move |token_nodes| {
color_syntax(&CommandShape, &mut token_nodes, context); color_syntax(&MaybeSpaceShape, token_nodes, context);
color_syntax(&CommandShape, token_nodes, context);
});
} }
Ok(()) Ok(())
} }
} }
#[cfg(coloring_in_tokens)]
impl ExpandSyntax for PipelineShape { impl ExpandSyntax for PipelineShape {
type Output = ClassifiedPipeline; type Output = ClassifiedPipeline;
fn expand_syntax<'a, 'b>( fn expand_syntax<'content, 'me>(
&self, &self,
iterator: &'b mut TokensIterator<'a>, iterator: &'me mut TokensIterator<'content>,
context: &ExpandContext, context: &ExpandContext,
) -> Result<Self::Output, ShellError> { ) -> Result<Self::Output, ShellError> {
let source = context.source; let source = context.source;
let peeked = iterator.peek_any().not_eof("pipeline")?; let peeked = iterator.peek_any().not_eof("pipeline")?;
let pipeline = peeked.node.as_pipeline()?; let pipeline = peeked.commit().as_pipeline()?;
peeked.commit();
let Pipeline { parts, .. } = pipeline; let parts = &pipeline.parts[..];
let commands: Result<Vec<_>, ShellError> = parts let mut out = vec![];
.iter()
.map(|item| classify_command(item, context, &source))
.collect();
Ok(ClassifiedPipeline { for part in parts {
commands: commands?, let tokens: Spanned<&[TokenNode]> = (&part.item.tokens[..]).spanned(part.span);
})
let classified = iterator.child(tokens, move |token_nodes| {
classify_command(token_nodes, context, &source)
})?;
out.push(classified);
}
Ok(ClassifiedPipeline { commands: out })
}
}
#[cfg(not(coloring_in_tokens))]
impl ExpandSyntax for PipelineShape {
type Output = ClassifiedPipeline;
fn expand_syntax<'content, 'me>(
&self,
iterator: &'me mut TokensIterator<'content>,
context: &ExpandContext,
) -> Result<Self::Output, ShellError> {
let source = context.source;
let peeked = iterator.peek_any().not_eof("pipeline")?;
let pipeline = peeked.commit().as_pipeline()?;
let parts = &pipeline.parts[..];
let mut out = vec![];
for part in parts {
let tokens: Spanned<&[TokenNode]> = (&part.item.tokens[..]).spanned(part.span);
let classified = iterator.child(tokens, move |token_nodes| {
classify_command(token_nodes, context, &source)
})?;
out.push(classified);
}
Ok(ClassifiedPipeline { commands: out })
} }
} }
@ -1018,6 +963,10 @@ impl FallibleColorSyntax for CommandHeadShape {
type Info = CommandHeadKind; type Info = CommandHeadKind;
type Input = (); type Input = ();
fn name(&self) -> &'static str {
"CommandHeadShape"
}
fn color_syntax<'a, 'b>( fn color_syntax<'a, 'b>(
&self, &self,
_input: &(), _input: &(),
@ -1215,6 +1164,10 @@ impl FallibleColorSyntax for InternalCommandHeadShape {
type Info = (); type Info = ();
type Input = (); type Input = ();
fn name(&self) -> &'static str {
"InternalCommandHeadShape"
}
fn color_syntax<'a, 'b>( fn color_syntax<'a, 'b>(
&self, &self,
_input: &(), _input: &(),
@ -1299,7 +1252,7 @@ fn parse_single_node<'a, 'b, T>(
expected: &'static str, expected: &'static str,
callback: impl FnOnce(RawToken, Span, SingleError) -> Result<T, ShellError>, callback: impl FnOnce(RawToken, Span, SingleError) -> Result<T, ShellError>,
) -> Result<T, ShellError> { ) -> Result<T, ShellError> {
token_nodes.peek_any_token(|node| match node { token_nodes.peek_any_token(expected, |node| match node {
TokenNode::Token(token) => callback( TokenNode::Token(token) => callback(
token.item, token.item,
token.span, token.span,
@ -1377,6 +1330,10 @@ impl FallibleColorSyntax for WhitespaceShape {
type Info = (); type Info = ();
type Input = (); type Input = ();
fn name(&self) -> &'static str {
"WhitespaceShape"
}
fn color_syntax<'a, 'b>( fn color_syntax<'a, 'b>(
&self, &self,
_input: &(), _input: &(),
@ -1502,6 +1459,10 @@ impl ColorSyntax for MaybeSpaceShape {
type Info = (); type Info = ();
type Input = (); type Input = ();
fn name(&self) -> &'static str {
"MaybeSpaceShape"
}
fn color_syntax<'a, 'b>( fn color_syntax<'a, 'b>(
&self, &self,
_input: &(), _input: &(),
@ -1559,6 +1520,10 @@ impl FallibleColorSyntax for SpaceShape {
type Info = (); type Info = ();
type Input = (); type Input = ();
fn name(&self) -> &'static str {
"SpaceShape"
}
fn color_syntax<'a, 'b>( fn color_syntax<'a, 'b>(
&self, &self,
_input: &(), _input: &(),
@ -1618,17 +1583,15 @@ fn expand_variable(span: Span, token_span: Span, source: &Text) -> hir::Expressi
} }
fn classify_command( fn classify_command(
command: &Spanned<PipelineElement>, mut iterator: &mut TokensIterator,
context: &ExpandContext, context: &ExpandContext,
source: &Text, source: &Text,
) -> Result<ClassifiedCommand, ShellError> { ) -> Result<ClassifiedCommand, ShellError> {
let mut iterator = TokensIterator::new(&command.tokens.item, command.span, true);
let head = CommandHeadShape.expand_syntax(&mut iterator, &context)?; let head = CommandHeadShape.expand_syntax(&mut iterator, &context)?;
match &head { match &head {
CommandSignature::Expression(_) => Err(ShellError::syntax_error( CommandSignature::Expression(_) => Err(ShellError::syntax_error(
"Unexpected expression in command position".tagged(command.span), "Unexpected expression in command position".tagged(iterator.whole_span()),
)), )),
// If the command starts with `^`, treat it as an external command no matter what // If the command starts with `^`, treat it as an external command no matter what
@ -1710,6 +1673,10 @@ impl ColorSyntax for CommandShape {
type Info = (); type Info = ();
type Input = (); type Input = ();
fn name(&self) -> &'static str {
"CommandShape"
}
fn color_syntax<'a, 'b>( fn color_syntax<'a, 'b>(
&self, &self,
_input: &(), _input: &(),

View file

@ -66,6 +66,10 @@ impl FallibleColorSyntax for AnyBlockShape {
type Info = (); type Info = ();
type Input = (); type Input = ();
fn name(&self) -> &'static str {
"AnyBlockShape"
}
fn color_syntax<'a, 'b>( fn color_syntax<'a, 'b>(
&self, &self,
_input: &(), _input: &(),
@ -85,13 +89,14 @@ impl FallibleColorSyntax for AnyBlockShape {
match block { match block {
// If so, color it as a block // If so, color it as a block
Some((children, spans)) => { Some((children, spans)) => {
let mut token_nodes = TokensIterator::new(children.item, context.span, false); token_nodes.child(children, |token_nodes| {
color_syntax_with( color_syntax_with(
&DelimitedShape, &DelimitedShape,
&(Delimiter::Brace, spans.0, spans.1), &(Delimiter::Brace, spans.0, spans.1),
&mut token_nodes, token_nodes,
context, context,
); );
});
return Ok(()); return Ok(());
} }
@ -169,6 +174,10 @@ impl FallibleColorSyntax for ShorthandBlock {
type Info = (); type Info = ();
type Input = (); type Input = ();
fn name(&self) -> &'static str {
"ShorthandBlock"
}
fn color_syntax<'a, 'b>( fn color_syntax<'a, 'b>(
&self, &self,
_input: &(), _input: &(),
@ -264,6 +273,10 @@ impl FallibleColorSyntax for ShorthandPath {
type Info = (); type Info = ();
type Input = (); type Input = ();
fn name(&self) -> &'static str {
"ShorthandPath"
}
fn color_syntax<'a, 'b>( fn color_syntax<'a, 'b>(
&self, &self,
_input: &(), _input: &(),

View file

@ -69,6 +69,10 @@ impl FallibleColorSyntax for AnyExpressionShape {
type Info = (); type Info = ();
type Input = (); type Input = ();
fn name(&self) -> &'static str {
"AnyExpressionShape"
}
fn color_syntax<'a, 'b>( fn color_syntax<'a, 'b>(
&self, &self,
_input: &(), _input: &(),
@ -267,6 +271,10 @@ impl FallibleColorSyntax for AnyExpressionStartShape {
type Info = (); type Info = ();
type Input = (); type Input = ();
fn name(&self) -> &'static str {
"AnyExpressionStartShape"
}
fn color_syntax<'a, 'b>( fn color_syntax<'a, 'b>(
&self, &self,
_input: &(), _input: &(),
@ -315,7 +323,7 @@ impl FallibleColorSyntax for AnyExpressionStartShape {
token_nodes.color_shape(FlatShape::Word.spanned(atom.span)); token_nodes.color_shape(FlatShape::Word.spanned(atom.span));
} }
_ => atom.color_tokens(token_nodes.mut_shapes()), _ => token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes)),
} }
Ok(()) Ok(())
@ -387,13 +395,17 @@ impl FallibleColorSyntax for BareTailShape {
type Info = (); type Info = ();
type Input = (); type Input = ();
fn name(&self) -> &'static str {
"BareTailShape"
}
fn color_syntax<'a, 'b>( fn color_syntax<'a, 'b>(
&self, &self,
_input: &(), _input: &(),
token_nodes: &'b mut TokensIterator<'a>, token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext, context: &ExpandContext,
) -> Result<(), ShellError> { ) -> Result<(), ShellError> {
let len = token_nodes.shapes().len(); let len = token_nodes.state().shapes().len();
loop { loop {
let word = let word =
@ -422,7 +434,7 @@ impl FallibleColorSyntax for BareTailShape {
} }
} }
if token_nodes.shapes().len() > len { if token_nodes.state().shapes().len() > len {
Ok(()) Ok(())
} else { } else {
Err(ShellError::syntax_error( Err(ShellError::syntax_error(

View file

@ -66,6 +66,11 @@ impl ColorSyntax for DelimitedShape {
impl ColorSyntax for DelimitedShape { impl ColorSyntax for DelimitedShape {
type Info = (); type Info = ();
type Input = (Delimiter, Span, Span); type Input = (Delimiter, Span, Span);
fn name(&self) -> &'static str {
"DelimitedShape"
}
fn color_syntax<'a, 'b>( fn color_syntax<'a, 'b>(
&self, &self,
(delimiter, open, close): &(Delimiter, Span, Span), (delimiter, open, close): &(Delimiter, Span, Span),

View file

@ -52,6 +52,10 @@ impl FallibleColorSyntax for FilePathShape {
type Info = (); type Info = ();
type Input = (); type Input = ();
fn name(&self) -> &'static str {
"FilePathShape"
}
fn color_syntax<'a, 'b>( fn color_syntax<'a, 'b>(
&self, &self,
_input: &(), _input: &(),
@ -78,7 +82,7 @@ impl FallibleColorSyntax for FilePathShape {
token_nodes.color_shape(FlatShape::Path.spanned(atom.span)); token_nodes.color_shape(FlatShape::Path.spanned(atom.span));
} }
_ => atom.color_tokens(token_nodes.mut_shapes()), _ => token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes)),
} }
Ok(()) Ok(())

View file

@ -121,6 +121,10 @@ impl ColorSyntax for ExpressionListShape {
type Info = (); type Info = ();
type Input = (); type Input = ();
fn name(&self) -> &'static str {
"ExpressionListShape"
}
/// The intent of this method is to fully color an expression list shape infallibly. /// The intent of this method is to fully color an expression list shape infallibly.
/// This means that if we can't expand a token into an expression, we fall back to /// This means that if we can't expand a token into an expression, we fall back to
/// a simpler coloring strategy. /// a simpler coloring strategy.
@ -148,12 +152,12 @@ impl ColorSyntax for ExpressionListShape {
} }
if backoff { if backoff {
let len = token_nodes.shapes().len(); let len = token_nodes.state().shapes().len();
// If we previously encountered a parsing error, use backoff coloring mode // If we previously encountered a parsing error, use backoff coloring mode
color_syntax(&SimplestExpression, token_nodes, context); color_syntax(&SimplestExpression, token_nodes, context);
if len == token_nodes.shapes().len() && !token_nodes.at_end() { if len == token_nodes.state().shapes().len() && !token_nodes.at_end() {
// This should never happen, but if it does, a panic is better than an infinite loop // This should never happen, but if it does, a panic is better than an infinite loop
panic!("Unexpected tokens left that couldn't be colored even with SimplestExpression") panic!("Unexpected tokens left that couldn't be colored even with SimplestExpression")
} }
@ -222,6 +226,10 @@ impl ColorSyntax for BackoffColoringMode {
type Info = (); type Info = ();
type Input = (); type Input = ();
fn name(&self) -> &'static str {
"BackoffColoringMode"
}
fn color_syntax<'a, 'b>( fn color_syntax<'a, 'b>(
&self, &self,
_input: &Self::Input, _input: &Self::Input,
@ -233,12 +241,12 @@ impl ColorSyntax for BackoffColoringMode {
break; break;
} }
let len = token_nodes.shapes().len(); let len = token_nodes.state().shapes().len();
color_syntax(&SimplestExpression, token_nodes, context); color_syntax(&SimplestExpression, token_nodes, context);
if len == token_nodes.shapes().len() && !token_nodes.at_end() { if len == token_nodes.state().shapes().len() && !token_nodes.at_end() {
// This shouldn't happen, but if it does, a panic is better than an infinite loop // This shouldn't happen, but if it does, a panic is better than an infinite loop
panic!("SimplestExpression failed to consume any tokens, but it's not at the end. This is unexpected\n== token nodes==\n{:#?}\n\n== shapes ==\n{:#?}", token_nodes, token_nodes.shapes()); panic!("SimplestExpression failed to consume any tokens, but it's not at the end. This is unexpected\n== token nodes==\n{:#?}\n\n== shapes ==\n{:#?}", token_nodes, token_nodes.state().shapes());
} }
} }
} }
@ -281,6 +289,10 @@ impl ColorSyntax for SimplestExpression {
type Info = (); type Info = ();
type Input = (); type Input = ();
fn name(&self) -> &'static str {
"SimplestExpression"
}
fn color_syntax<'a, 'b>( fn color_syntax<'a, 'b>(
&self, &self,
_input: &(), _input: &(),
@ -296,7 +308,7 @@ impl ColorSyntax for SimplestExpression {
match atom { match atom {
Err(_) => {} Err(_) => {}
Ok(atom) => atom.color_tokens(token_nodes.mut_shapes()), Ok(atom) => token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes)),
} }
} }
} }

View file

@ -79,6 +79,10 @@ impl FallibleColorSyntax for NumberShape {
type Info = (); type Info = ();
type Input = (); type Input = ();
fn name(&self) -> &'static str {
"NumberShape"
}
fn color_syntax<'a, 'b>( fn color_syntax<'a, 'b>(
&self, &self,
_input: &(), _input: &(),
@ -97,7 +101,7 @@ impl FallibleColorSyntax for NumberShape {
Spanned { item: Ok(atom), .. } => atom, Spanned { item: Ok(atom), .. } => atom,
}; };
atom.color_tokens(token_nodes.mut_shapes()); token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes));
Ok(()) Ok(())
} }
@ -171,6 +175,10 @@ impl FallibleColorSyntax for IntShape {
type Info = (); type Info = ();
type Input = (); type Input = ();
fn name(&self) -> &'static str {
"IntShape"
}
fn color_syntax<'a, 'b>( fn color_syntax<'a, 'b>(
&self, &self,
_input: &(), _input: &(),
@ -189,7 +197,7 @@ impl FallibleColorSyntax for IntShape {
Spanned { item: Ok(atom), .. } => atom, Spanned { item: Ok(atom), .. } => atom,
}; };
atom.color_tokens(token_nodes.mut_shapes()); token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes));
Ok(()) Ok(())
} }

View file

@ -41,6 +41,10 @@ impl FallibleColorSyntax for PatternShape {
type Info = (); type Info = ();
type Input = (); type Input = ();
fn name(&self) -> &'static str {
"PatternShape"
}
fn color_syntax<'a, 'b>( fn color_syntax<'a, 'b>(
&self, &self,
_input: &(), _input: &(),

View file

@ -45,6 +45,10 @@ impl FallibleColorSyntax for StringShape {
type Info = (); type Info = ();
type Input = FlatShape; type Input = FlatShape;
fn name(&self) -> &'static str {
"StringShape"
}
fn color_syntax<'a, 'b>( fn color_syntax<'a, 'b>(
&self, &self,
input: &FlatShape, input: &FlatShape,
@ -63,7 +67,7 @@ impl FallibleColorSyntax for StringShape {
item: AtomicToken::String { .. }, item: AtomicToken::String { .. },
span, span,
} => token_nodes.color_shape((*input).spanned(span)), } => token_nodes.color_shape((*input).spanned(span)),
other => other.color_tokens(token_nodes.mut_shapes()), atom => token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes)),
} }
Ok(()) Ok(())

View file

@ -90,6 +90,10 @@ impl FallibleColorSyntax for VariablePathShape {
type Info = (); type Info = ();
type Input = (); type Input = ();
fn name(&self) -> &'static str {
"VariablePathShape"
}
fn color_syntax<'a, 'b>( fn color_syntax<'a, 'b>(
&self, &self,
_input: &(), _input: &(),
@ -166,6 +170,10 @@ impl FallibleColorSyntax for PathTailShape {
type Info = (); type Info = ();
type Input = (); type Input = ();
fn name(&self) -> &'static str {
"PathTailShape"
}
fn color_syntax<'a, 'b>( fn color_syntax<'a, 'b>(
&self, &self,
_input: &(), _input: &(),
@ -334,6 +342,10 @@ impl FallibleColorSyntax for ExpressionContinuationShape {
type Info = ContinuationInfo; type Info = ContinuationInfo;
type Input = (); type Input = ();
fn name(&self) -> &'static str {
"ExpressionContinuationShape"
}
fn color_syntax<'a, 'b>( fn color_syntax<'a, 'b>(
&self, &self,
_input: &(), _input: &(),
@ -446,6 +458,10 @@ impl FallibleColorSyntax for VariableShape {
type Info = (); type Info = ();
type Input = (); type Input = ();
fn name(&self) -> &'static str {
"VariableShape"
}
fn color_syntax<'a, 'b>( fn color_syntax<'a, 'b>(
&self, &self,
_input: &(), _input: &(),
@ -658,6 +674,10 @@ impl FallibleColorSyntax for ColumnPathShape {
type Info = (); type Info = ();
type Input = (); type Input = ();
fn name(&self) -> &'static str {
"ColumnPathShape"
}
fn color_syntax<'a, 'b>( fn color_syntax<'a, 'b>(
&self, &self,
_input: &(), _input: &(),
@ -758,6 +778,10 @@ impl FallibleColorSyntax for MemberShape {
type Info = (); type Info = ();
type Input = (); type Input = ();
fn name(&self) -> &'static str {
"MemberShape"
}
fn color_syntax<'a, 'b>( fn color_syntax<'a, 'b>(
&self, &self,
_input: &(), _input: &(),
@ -843,6 +867,10 @@ impl FallibleColorSyntax for ColorableDotShape {
type Info = (); type Info = ();
type Input = FlatShape; type Input = FlatShape;
fn name(&self) -> &'static str {
"ColorableDotShape"
}
fn color_syntax<'a, 'b>( fn color_syntax<'a, 'b>(
&self, &self,
input: &FlatShape, input: &FlatShape,
@ -953,6 +981,10 @@ impl FallibleColorSyntax for InfixShape {
type Info = (); type Info = ();
type Input = (); type Input = ();
fn name(&self) -> &'static str {
"InfixShape"
}
fn color_syntax<'a, 'b>( fn color_syntax<'a, 'b>(
&self, &self,
_input: &(), _input: &(),
@ -971,10 +1003,7 @@ impl FallibleColorSyntax for InfixShape {
|token, token_span, _| { |token, token_span, _| {
match token { match token {
// If it's an operator (and not `.`), it's a match // If it's an operator (and not `.`), it's a match
RawToken::Operator(operator) if operator != Operator::Dot => { RawToken::Operator(operator) if operator != Operator::Dot => Ok(token_span),
// token_nodes.color_shape(FlatShape::Operator.spanned(token_span));
Ok(token_span)
}
// Otherwise, it's not a match // Otherwise, it's not a match
_ => Err(ShellError::type_error( _ => Err(ShellError::type_error(

View file

@ -1,25 +1,37 @@
pub(crate) mod debug; pub(crate) mod debug;
use self::debug::Tracer;
use crate::errors::ShellError; use crate::errors::ShellError;
#[cfg(coloring_in_tokens)] #[cfg(coloring_in_tokens)]
use crate::parser::hir::syntax_shape::FlatShape; use crate::parser::hir::syntax_shape::FlatShape;
use crate::parser::TokenNode; use crate::parser::TokenNode;
use crate::prelude::*;
use crate::{Span, Spanned, SpannedItem}; use crate::{Span, Spanned, SpannedItem};
#[allow(unused)] #[allow(unused)]
use getset::Getters; use getset::{Getters, MutGetters};
#[derive(Getters, Debug)] #[derive(Getters, Debug)]
pub struct TokensIterator<'content> { pub struct TokensIteratorState<'content> {
tokens: &'content [TokenNode], tokens: &'content [TokenNode],
span: Span, span: Span,
skip_ws: bool, skip_ws: bool,
index: usize, index: usize,
seen: indexmap::IndexSet<usize>, seen: indexmap::IndexSet<usize>,
#[cfg(coloring_in_tokens)] #[cfg(coloring_in_tokens)]
#[get = "pub"] #[cfg_attr(coloring_in_tokens, get = "pub")]
shapes: Vec<Spanned<FlatShape>>, shapes: Vec<Spanned<FlatShape>>,
} }
#[derive(Getters, MutGetters, Debug)]
pub struct TokensIterator<'content> {
#[get = "pub"]
#[get_mut = "pub"]
state: TokensIteratorState<'content>,
#[get = "pub"]
#[get_mut = "pub"]
tracer: Tracer,
}
#[derive(Debug)] #[derive(Debug)]
pub struct Checkpoint<'content, 'me> { pub struct Checkpoint<'content, 'me> {
pub(crate) iterator: &'me mut TokensIterator<'content>, pub(crate) iterator: &'me mut TokensIterator<'content>,
@ -39,10 +51,12 @@ impl<'content, 'me> Checkpoint<'content, 'me> {
impl<'content, 'me> std::ops::Drop for Checkpoint<'content, 'me> { impl<'content, 'me> std::ops::Drop for Checkpoint<'content, 'me> {
fn drop(&mut self) { fn drop(&mut self) {
if !self.committed { if !self.committed {
self.iterator.index = self.index; let state = &mut self.iterator.state;
self.iterator.seen = self.seen.clone();
state.index = self.index;
state.seen = self.seen.clone();
#[cfg(coloring_in_tokens)] #[cfg(coloring_in_tokens)]
self.iterator.shapes.truncate(self.shape_start); state.shapes.truncate(self.shape_start);
} }
} }
} }
@ -138,13 +152,16 @@ impl<'content> TokensIterator<'content> {
skip_ws: bool, skip_ws: bool,
) -> TokensIterator<'content> { ) -> TokensIterator<'content> {
TokensIterator { TokensIterator {
tokens: items, state: TokensIteratorState {
span, tokens: items,
skip_ws, span,
index: 0, skip_ws,
seen: indexmap::IndexSet::new(), index: 0,
#[cfg(coloring_in_tokens)] seen: indexmap::IndexSet::new(),
shapes: vec![], #[cfg(coloring_in_tokens)]
shapes: vec![],
},
tracer: Tracer::new(),
} }
} }
@ -153,7 +170,7 @@ impl<'content> TokensIterator<'content> {
} }
pub fn len(&self) -> usize { pub fn len(&self) -> usize {
self.tokens.len() self.state.tokens.len()
} }
pub fn spanned<T>( pub fn spanned<T>(
@ -171,35 +188,146 @@ impl<'content> TokensIterator<'content> {
#[cfg(coloring_in_tokens)] #[cfg(coloring_in_tokens)]
pub fn color_shape(&mut self, shape: Spanned<FlatShape>) { pub fn color_shape(&mut self, shape: Spanned<FlatShape>) {
self.shapes.push(shape); self.with_tracer(|_, tracer| tracer.add_shape(shape));
self.state.shapes.push(shape);
} }
#[cfg(coloring_in_tokens)] #[cfg(coloring_in_tokens)]
pub fn mut_shapes(&mut self) -> &mut Vec<Spanned<FlatShape>> { pub fn mutate_shapes(&mut self, block: impl FnOnce(&mut Vec<Spanned<FlatShape>>)) {
&mut self.shapes let new_shapes: Vec<Spanned<FlatShape>> = {
let shapes = &mut self.state.shapes;
let len = shapes.len();
block(shapes);
(len..(shapes.len())).map(|i| shapes[i]).collect()
};
self.with_tracer(|_, tracer| {
for shape in new_shapes {
tracer.add_shape(shape)
}
});
} }
#[cfg(coloring_in_tokens)] #[cfg(coloring_in_tokens)]
pub fn child<T>( pub fn silently_mutate_shapes(&mut self, block: impl FnOnce(&mut Vec<Spanned<FlatShape>>)) {
&mut self, let shapes = &mut self.state.shapes;
tokens: Spanned<&'content [TokenNode]>, block(shapes);
block: impl FnOnce(&mut TokensIterator) -> T, }
#[cfg(coloring_in_tokens)]
pub fn sort_shapes(&mut self) {
// This is pretty dubious, but it works. We should look into a better algorithm that doesn't end up requiring
// this solution.
self.state
.shapes
.sort_by(|a, b| a.span.start().cmp(&b.span.start()));
}
#[cfg(coloring_in_tokens)]
pub fn child<'me, T>(
&'me mut self,
tokens: Spanned<&'me [TokenNode]>,
block: impl FnOnce(&mut TokensIterator<'me>) -> T,
) -> T { ) -> T {
let mut shapes = vec![]; let mut shapes = vec![];
std::mem::swap(&mut shapes, &mut self.shapes); std::mem::swap(&mut shapes, &mut self.state.shapes);
let mut tracer = Tracer::new();
std::mem::swap(&mut tracer, &mut self.tracer);
let mut iterator = TokensIterator { let mut iterator = TokensIterator {
tokens: tokens.item, state: TokensIteratorState {
span: tokens.span, tokens: tokens.item,
skip_ws: false, span: tokens.span,
index: 0, skip_ws: false,
seen: indexmap::IndexSet::new(), index: 0,
shapes, seen: indexmap::IndexSet::new(),
shapes,
},
tracer,
}; };
let result = block(&mut iterator); let result = block(&mut iterator);
std::mem::swap(&mut iterator.shapes, &mut self.shapes); std::mem::swap(&mut iterator.state.shapes, &mut self.state.shapes);
std::mem::swap(&mut iterator.tracer, &mut self.tracer);
result
}
#[cfg(not(coloring_in_tokens))]
pub fn child<'me, T>(
&'me mut self,
tokens: Spanned<&'me [TokenNode]>,
block: impl FnOnce(&mut TokensIterator<'me>) -> T,
) -> T {
let mut tracer = Tracer::new();
std::mem::swap(&mut tracer, &mut self.tracer);
let mut iterator = TokensIterator {
state: TokensIteratorState {
tokens: tokens.item,
span: tokens.span,
skip_ws: false,
index: 0,
seen: indexmap::IndexSet::new(),
},
tracer,
};
let result = block(&mut iterator);
std::mem::swap(&mut iterator.tracer, &mut self.tracer);
result
}
pub fn with_tracer(&mut self, block: impl FnOnce(&mut TokensIteratorState, &mut Tracer)) {
let state = &mut self.state;
let tracer = &mut self.tracer;
block(state, tracer)
}
#[cfg(coloring_in_tokens)]
pub fn color_frame<T>(
&mut self,
desc: &'static str,
block: impl FnOnce(&mut TokensIterator) -> T,
) -> T {
self.with_tracer(|_, tracer| tracer.start(desc));
let result = block(self);
self.with_tracer(|_, tracer| {
tracer.success();
});
result
}
pub fn color_fallible_frame<T>(
&mut self,
desc: &'static str,
block: impl FnOnce(&mut TokensIterator) -> Result<T, ShellError>,
) -> Result<T, ShellError> {
self.with_tracer(|_, tracer| tracer.start(desc));
if self.at_end() {
self.with_tracer(|_, tracer| tracer.eof_frame());
return Err(ShellError::unexpected_eof("coloring", Tag::unknown()));
}
let result = block(self);
self.with_tracer(|_, tracer| match &result {
Ok(_) => {
tracer.success();
}
Err(err) => tracer.failed(err),
});
result result
} }
@ -207,10 +335,12 @@ impl<'content> TokensIterator<'content> {
/// Use a checkpoint when you need to peek more than one token ahead, but can't be sure /// Use a checkpoint when you need to peek more than one token ahead, but can't be sure
/// that you'll succeed. /// that you'll succeed.
pub fn checkpoint<'me>(&'me mut self) -> Checkpoint<'content, 'me> { pub fn checkpoint<'me>(&'me mut self) -> Checkpoint<'content, 'me> {
let index = self.index; let state = &mut self.state;
let index = state.index;
#[cfg(coloring_in_tokens)] #[cfg(coloring_in_tokens)]
let shape_start = self.shapes.len(); let shape_start = state.shapes.len();
let seen = self.seen.clone(); let seen = state.seen.clone();
Checkpoint { Checkpoint {
iterator: self, iterator: self,
@ -228,10 +358,12 @@ impl<'content> TokensIterator<'content> {
&'me mut self, &'me mut self,
block: impl FnOnce(&mut TokensIterator<'content>) -> Result<T, ShellError>, block: impl FnOnce(&mut TokensIterator<'content>) -> Result<T, ShellError>,
) -> Result<T, ShellError> { ) -> Result<T, ShellError> {
let index = self.index; let state = &mut self.state;
let index = state.index;
#[cfg(coloring_in_tokens)] #[cfg(coloring_in_tokens)]
let shape_start = self.shapes.len(); let shape_start = state.shapes.len();
let seen = self.seen.clone(); let seen = state.seen.clone();
let checkpoint = Checkpoint { let checkpoint = Checkpoint {
iterator: self, iterator: self,
@ -255,11 +387,11 @@ impl<'content> TokensIterator<'content> {
&'me mut self, &'me mut self,
block: impl FnOnce(&mut TokensIterator<'content>) -> Result<T, ShellError>, block: impl FnOnce(&mut TokensIterator<'content>) -> Result<T, ShellError>,
) -> (Result<T, ShellError>, Vec<Spanned<FlatShape>>) { ) -> (Result<T, ShellError>, Vec<Spanned<FlatShape>>) {
let index = self.index; let index = self.state.index;
let mut shapes = vec![]; let mut shapes = vec![];
let seen = self.seen.clone(); let seen = self.state.seen.clone();
std::mem::swap(&mut self.shapes, &mut shapes); std::mem::swap(&mut self.state.shapes, &mut shapes);
let checkpoint = Checkpoint { let checkpoint = Checkpoint {
iterator: self, iterator: self,
@ -274,7 +406,7 @@ impl<'content> TokensIterator<'content> {
let value = match value { let value = match value {
Err(err) => { Err(err) => {
drop(checkpoint); drop(checkpoint);
std::mem::swap(&mut self.shapes, &mut shapes); std::mem::swap(&mut self.state.shapes, &mut shapes);
return (Err(err), vec![]); return (Err(err), vec![]);
} }
@ -282,12 +414,12 @@ impl<'content> TokensIterator<'content> {
}; };
checkpoint.commit(); checkpoint.commit();
std::mem::swap(&mut self.shapes, &mut shapes); std::mem::swap(&mut self.state.shapes, &mut shapes);
return (Ok(value), shapes); return (Ok(value), shapes);
} }
fn eof_span(&self) -> Span { fn eof_span(&self) -> Span {
Span::new(self.span.end(), self.span.end()) Span::new(self.state.span.end(), self.state.span.end())
} }
pub fn typed_span_at_cursor(&mut self) -> Spanned<&'static str> { pub fn typed_span_at_cursor(&mut self) -> Spanned<&'static str> {
@ -299,6 +431,10 @@ impl<'content> TokensIterator<'content> {
} }
} }
pub fn whole_span(&self) -> Span {
self.state.span
}
pub fn span_at_cursor(&mut self) -> Span { pub fn span_at_cursor(&mut self) -> Span {
let next = self.peek_any(); let next = self.peek_any();
@ -309,11 +445,11 @@ impl<'content> TokensIterator<'content> {
} }
pub fn remove(&mut self, position: usize) { pub fn remove(&mut self, position: usize) {
self.seen.insert(position); self.state.seen.insert(position);
} }
pub fn at_end(&self) -> bool { pub fn at_end(&self) -> bool {
peek(self, self.skip_ws).is_none() peek(self, self.state.skip_ws).is_none()
} }
pub fn at_end_possible_ws(&self) -> bool { pub fn at_end_possible_ws(&self) -> bool {
@ -321,13 +457,15 @@ impl<'content> TokensIterator<'content> {
} }
pub fn advance(&mut self) { pub fn advance(&mut self) {
self.seen.insert(self.index); self.state.seen.insert(self.state.index);
self.index += 1; self.state.index += 1;
} }
pub fn extract<T>(&mut self, f: impl Fn(&TokenNode) -> Option<T>) -> Option<(usize, T)> { pub fn extract<T>(&mut self, f: impl Fn(&TokenNode) -> Option<T>) -> Option<(usize, T)> {
for (i, item) in self.tokens.iter().enumerate() { let state = &mut self.state;
if self.seen.contains(&i) {
for (i, item) in state.tokens.iter().enumerate() {
if state.seen.contains(&i) {
continue; continue;
} }
@ -336,7 +474,7 @@ impl<'content> TokensIterator<'content> {
continue; continue;
} }
Some(value) => { Some(value) => {
self.seen.insert(i); state.seen.insert(i);
return Some((i, value)); return Some((i, value));
} }
} }
@ -346,22 +484,26 @@ impl<'content> TokensIterator<'content> {
} }
pub fn move_to(&mut self, pos: usize) { pub fn move_to(&mut self, pos: usize) {
self.index = pos; self.state.index = pos;
} }
pub fn restart(&mut self) { pub fn restart(&mut self) {
self.index = 0; self.state.index = 0;
} }
pub fn clone(&self) -> TokensIterator<'content> { pub fn clone(&self) -> TokensIterator<'content> {
let state = &self.state;
TokensIterator { TokensIterator {
tokens: self.tokens, state: TokensIteratorState {
span: self.span, tokens: state.tokens,
index: self.index, span: state.span,
seen: self.seen.clone(), index: state.index,
skip_ws: self.skip_ws, seen: state.seen.clone(),
#[cfg(coloring_in_tokens)] skip_ws: state.skip_ws,
shapes: self.shapes.clone(), #[cfg(coloring_in_tokens)]
shapes: state.shapes.clone(),
},
tracer: self.tracer.clone(),
} }
} }
@ -384,10 +526,11 @@ impl<'content> TokensIterator<'content> {
// Peek the next token, including whitespace, but not EOF // Peek the next token, including whitespace, but not EOF
pub fn peek_any_token<'me, T>( pub fn peek_any_token<'me, T>(
&'me mut self, &'me mut self,
expected: &'static str,
block: impl FnOnce(&'content TokenNode) -> Result<T, ShellError>, block: impl FnOnce(&'content TokenNode) -> Result<T, ShellError>,
) -> Result<T, ShellError> { ) -> Result<T, ShellError> {
let peeked = start_next(self, false); let peeked = start_next(self, false);
let peeked = peeked.not_eof("invariant"); let peeked = peeked.not_eof(expected);
match peeked { match peeked {
Err(err) => return Err(err), Err(err) => return Err(err),
@ -403,10 +546,10 @@ impl<'content> TokensIterator<'content> {
fn commit(&mut self, from: usize, to: usize) { fn commit(&mut self, from: usize, to: usize) {
for index in from..to { for index in from..to {
self.seen.insert(index); self.state.seen.insert(index);
} }
self.index = to; self.state.index = to;
} }
pub fn pos(&self, skip_ws: bool) -> Option<usize> { pub fn pos(&self, skip_ws: bool) -> Option<usize> {
@ -424,7 +567,7 @@ impl<'content> Iterator for TokensIterator<'content> {
type Item = &'content TokenNode; type Item = &'content TokenNode;
fn next(&mut self) -> Option<&'content TokenNode> { fn next(&mut self) -> Option<&'content TokenNode> {
next(self, self.skip_ws) next(self, self.state.skip_ws)
} }
} }
@ -432,23 +575,25 @@ fn peek<'content, 'me>(
iterator: &'me TokensIterator<'content>, iterator: &'me TokensIterator<'content>,
skip_ws: bool, skip_ws: bool,
) -> Option<&'me TokenNode> { ) -> Option<&'me TokenNode> {
let mut to = iterator.index; let state = iterator.state();
let mut to = state.index;
loop { loop {
if to >= iterator.tokens.len() { if to >= state.tokens.len() {
return None; return None;
} }
if iterator.seen.contains(&to) { if state.seen.contains(&to) {
to += 1; to += 1;
continue; continue;
} }
if to >= iterator.tokens.len() { if to >= state.tokens.len() {
return None; return None;
} }
let node = &iterator.tokens[to]; let node = &state.tokens[to];
match node { match node {
TokenNode::Whitespace(_) if skip_ws => { TokenNode::Whitespace(_) if skip_ws => {
@ -465,23 +610,25 @@ fn peek_pos<'content, 'me>(
iterator: &'me TokensIterator<'content>, iterator: &'me TokensIterator<'content>,
skip_ws: bool, skip_ws: bool,
) -> Option<usize> { ) -> Option<usize> {
let mut to = iterator.index; let state = iterator.state();
let mut to = state.index;
loop { loop {
if to >= iterator.tokens.len() { if to >= state.tokens.len() {
return None; return None;
} }
if iterator.seen.contains(&to) { if state.seen.contains(&to) {
to += 1; to += 1;
continue; continue;
} }
if to >= iterator.tokens.len() { if to >= state.tokens.len() {
return None; return None;
} }
let node = &iterator.tokens[to]; let node = &state.tokens[to];
match node { match node {
TokenNode::Whitespace(_) if skip_ws => { TokenNode::Whitespace(_) if skip_ws => {
@ -496,11 +643,13 @@ fn start_next<'content, 'me>(
iterator: &'me mut TokensIterator<'content>, iterator: &'me mut TokensIterator<'content>,
skip_ws: bool, skip_ws: bool,
) -> Peeked<'content, 'me> { ) -> Peeked<'content, 'me> {
let from = iterator.index; let state = iterator.state();
let mut to = iterator.index;
let from = state.index;
let mut to = state.index;
loop { loop {
if to >= iterator.tokens.len() { if to >= state.tokens.len() {
return Peeked { return Peeked {
node: None, node: None,
iterator, iterator,
@ -509,12 +658,12 @@ fn start_next<'content, 'me>(
}; };
} }
if iterator.seen.contains(&to) { if state.seen.contains(&to) {
to += 1; to += 1;
continue; continue;
} }
if to >= iterator.tokens.len() { if to >= state.tokens.len() {
return Peeked { return Peeked {
node: None, node: None,
iterator, iterator,
@ -523,7 +672,7 @@ fn start_next<'content, 'me>(
}; };
} }
let node = &iterator.tokens[to]; let node = &state.tokens[to];
match node { match node {
TokenNode::Whitespace(_) if skip_ws => { TokenNode::Whitespace(_) if skip_ws => {
@ -547,20 +696,20 @@ fn next<'me, 'content>(
skip_ws: bool, skip_ws: bool,
) -> Option<&'content TokenNode> { ) -> Option<&'content TokenNode> {
loop { loop {
if iterator.index >= iterator.tokens.len() { if iterator.state().index >= iterator.state().tokens.len() {
return None; return None;
} }
if iterator.seen.contains(&iterator.index) { if iterator.state().seen.contains(&iterator.state().index) {
iterator.advance(); iterator.advance();
continue; continue;
} }
if iterator.index >= iterator.tokens.len() { if iterator.state().index >= iterator.state().tokens.len() {
return None; return None;
} }
match &iterator.tokens[iterator.index] { match &iterator.state().tokens[iterator.state().index] {
TokenNode::Whitespace(_) if skip_ws => { TokenNode::Whitespace(_) if skip_ws => {
iterator.advance(); iterator.advance();
} }

View file

@ -1,5 +1,13 @@
use crate::parser::hir::tokens_iterator::TokensIterator; use crate::errors::ShellError;
use crate::parser::hir::syntax_shape::FlatShape;
use crate::parser::hir::tokens_iterator::TokensIteratorState;
use crate::prelude::*;
use crate::traits::ToDebug; use crate::traits::ToDebug;
use ansi_term::Color;
use log::trace;
use ptree::*;
use std::borrow::Cow;
use std::io;
#[derive(Debug)] #[derive(Debug)]
pub(crate) enum DebugIteratorToken { pub(crate) enum DebugIteratorToken {
@ -8,15 +16,15 @@ pub(crate) enum DebugIteratorToken {
Cursor, Cursor,
} }
pub(crate) fn debug_tokens(iterator: &TokensIterator, source: &str) -> Vec<DebugIteratorToken> { pub(crate) fn debug_tokens(state: &TokensIteratorState, source: &str) -> Vec<DebugIteratorToken> {
let mut out = vec![]; let mut out = vec![];
for (i, token) in iterator.tokens.iter().enumerate() { for (i, token) in state.tokens.iter().enumerate() {
if iterator.index == i { if state.index == i {
out.push(DebugIteratorToken::Cursor); out.push(DebugIteratorToken::Cursor);
} }
if iterator.seen.contains(&i) { if state.seen.contains(&i) {
out.push(DebugIteratorToken::Seen(format!("{}", token.debug(source)))); out.push(DebugIteratorToken::Seen(format!("{}", token.debug(source))));
} else { } else {
out.push(DebugIteratorToken::Unseen(format!( out.push(DebugIteratorToken::Unseen(format!(
@ -28,3 +36,344 @@ pub(crate) fn debug_tokens(iterator: &TokensIterator, source: &str) -> Vec<Debug
out out
} }
#[derive(Debug, Clone)]
pub enum FrameChild {
#[allow(unused)]
Shape(Spanned<FlatShape>),
Frame(ColorFrame),
}
impl FrameChild {
fn colored_leaf_description(&self, text: &Text, f: &mut impl io::Write) -> io::Result<()> {
match self {
FrameChild::Shape(shape) => write!(
f,
"{} {:?}",
Color::White
.bold()
.on(Color::Green)
.paint(format!("{:?}", shape.item)),
shape.span.slice(text)
),
FrameChild::Frame(frame) => frame.colored_leaf_description(f),
}
}
fn into_tree_child(self, text: &Text) -> TreeChild {
match self {
FrameChild::Shape(shape) => TreeChild::Shape(shape, text.clone()),
FrameChild::Frame(frame) => TreeChild::Frame(frame, text.clone()),
}
}
}
#[derive(Debug, Clone)]
pub struct ColorFrame {
description: &'static str,
children: Vec<FrameChild>,
error: Option<ShellError>,
}
impl ColorFrame {
fn colored_leaf_description(&self, f: &mut impl io::Write) -> io::Result<()> {
if self.has_only_error_descendents() {
if self.children.len() == 0 {
write!(
f,
"{}",
Color::White.bold().on(Color::Red).paint(self.description)
)
} else {
write!(f, "{}", Color::Red.normal().paint(self.description))
}
} else if self.has_descendent_shapes() {
write!(f, "{}", Color::Green.normal().paint(self.description))
} else {
write!(f, "{}", Color::Yellow.bold().paint(self.description))
}
}
fn colored_description(&self, text: &Text, f: &mut impl io::Write) -> io::Result<()> {
if self.children.len() == 1 {
let child = &self.children[0];
self.colored_leaf_description(f)?;
write!(f, " -> ")?;
child.colored_leaf_description(text, f)
} else {
self.colored_leaf_description(f)
}
}
fn children_for_formatting(&self, text: &Text) -> Vec<TreeChild> {
if self.children.len() == 1 {
let child = &self.children[0];
match child {
FrameChild::Shape(_) => vec![],
FrameChild::Frame(frame) => frame.tree_children(text),
}
} else {
self.tree_children(text)
}
}
fn tree_children(&self, text: &Text) -> Vec<TreeChild> {
self.children
.clone()
.into_iter()
.map(|c| c.into_tree_child(text))
.collect()
}
#[allow(unused)]
fn add_shape(&mut self, shape: Spanned<FlatShape>) {
self.children.push(FrameChild::Shape(shape))
}
fn has_child_shapes(&self) -> bool {
self.any_child_shape(|_| true)
}
fn any_child_shape(&self, predicate: impl Fn(Spanned<FlatShape>) -> bool) -> bool {
for item in &self.children {
match item {
FrameChild::Shape(shape) => {
if predicate(*shape) {
return true;
}
}
_ => {}
}
}
false
}
fn any_child_frame(&self, predicate: impl Fn(&ColorFrame) -> bool) -> bool {
for item in &self.children {
match item {
FrameChild::Frame(frame) => {
if predicate(frame) {
return true;
}
}
_ => {}
}
}
false
}
fn has_descendent_shapes(&self) -> bool {
if self.has_child_shapes() {
true
} else {
self.any_child_frame(|frame| frame.has_descendent_shapes())
}
}
fn has_only_error_descendents(&self) -> bool {
if self.children.len() == 0 {
// if this frame has no children at all, it has only error descendents if this frame
// is an error
self.error.is_some()
} else {
// otherwise, it has only error descendents if all of its children terminate in an
// error (transitively)
let mut seen_error = false;
for child in &self.children {
match child {
// if this frame has at least one child shape, this frame has non-error descendents
FrameChild::Shape(_) => return false,
FrameChild::Frame(frame) => {
// if the chi
if frame.has_only_error_descendents() {
seen_error = true;
} else {
return false;
}
}
}
}
seen_error
}
}
}
#[derive(Debug, Clone)]
pub enum TreeChild {
Shape(Spanned<FlatShape>, Text),
Frame(ColorFrame, Text),
}
impl TreeChild {
fn colored_leaf_description(&self, f: &mut impl io::Write) -> io::Result<()> {
match self {
TreeChild::Shape(shape, text) => write!(
f,
"{} {:?}",
Color::White
.bold()
.on(Color::Green)
.paint(format!("{:?}", shape.item)),
shape.span.slice(text)
),
TreeChild::Frame(frame, _) => frame.colored_leaf_description(f),
}
}
}
impl TreeItem for TreeChild {
type Child = TreeChild;
fn write_self<W: io::Write>(&self, f: &mut W, _style: &Style) -> io::Result<()> {
match self {
shape @ TreeChild::Shape(..) => shape.colored_leaf_description(f),
TreeChild::Frame(frame, text) => frame.colored_description(text, f),
}
}
fn children(&self) -> Cow<[Self::Child]> {
match self {
TreeChild::Shape(..) => Cow::Borrowed(&[]),
TreeChild::Frame(frame, text) => Cow::Owned(frame.children_for_formatting(text)),
}
}
}
#[derive(Debug, Clone)]
pub struct Tracer {
frame_stack: Vec<ColorFrame>,
}
impl Tracer {
pub fn print(self, source: Text) -> PrintTracer {
PrintTracer {
tracer: self,
source,
}
}
pub fn new() -> Tracer {
let root = ColorFrame {
description: "Trace",
children: vec![],
error: None,
};
Tracer {
frame_stack: vec![root],
}
}
fn current_frame(&mut self) -> &mut ColorFrame {
let frames = &mut self.frame_stack;
let last = frames.len() - 1;
&mut frames[last]
}
fn pop_frame(&mut self) -> ColorFrame {
let result = self.frame_stack.pop().expect("Can't pop root tracer frame");
if self.frame_stack.len() == 0 {
panic!("Can't pop root tracer frame");
}
self.debug();
result
}
pub fn start(&mut self, description: &'static str) {
let frame = ColorFrame {
description,
children: vec![],
error: None,
};
self.frame_stack.push(frame);
self.debug();
}
pub fn eof_frame(&mut self) {
let current = self.pop_frame();
self.current_frame()
.children
.push(FrameChild::Frame(current));
}
#[allow(unused)]
pub fn finish(&mut self) {
loop {
if self.frame_stack.len() == 1 {
break;
}
let frame = self.pop_frame();
self.current_frame().children.push(FrameChild::Frame(frame));
}
}
#[allow(unused)]
pub fn add_shape(&mut self, shape: Spanned<FlatShape>) {
self.current_frame().add_shape(shape);
}
pub fn success(&mut self) {
let current = self.pop_frame();
self.current_frame()
.children
.push(FrameChild::Frame(current));
}
pub fn failed(&mut self, error: &ShellError) {
let mut current = self.pop_frame();
current.error = Some(error.clone());
self.current_frame()
.children
.push(FrameChild::Frame(current));
}
fn debug(&self) {
trace!(target: "nu::color_syntax",
"frames = {:?}",
self.frame_stack
.iter()
.map(|f| f.description)
.collect::<Vec<_>>()
);
trace!(target: "nu::color_syntax", "{:#?}", self);
}
}
#[derive(Debug, Clone)]
pub struct PrintTracer {
tracer: Tracer,
source: Text,
}
impl TreeItem for PrintTracer {
type Child = TreeChild;
fn write_self<W: io::Write>(&self, f: &mut W, style: &Style) -> io::Result<()> {
write!(f, "{}", style.paint("Color Trace"))
}
fn children(&self) -> Cow<[Self::Child]> {
Cow::Owned(vec![TreeChild::Frame(
self.tracer.frame_stack[0].clone(),
self.source.clone(),
)])
}
}

View file

@ -310,15 +310,6 @@ pub fn bare(input: NomSpan) -> IResult<NomSpan, TokenNode> {
let next_char = &input.fragment.chars().nth(0); let next_char = &input.fragment.chars().nth(0);
let prev_char = last.fragment.chars().nth(0); let prev_char = last.fragment.chars().nth(0);
// if let (Some(prev), Some(next)) = (prev_char, next_char) {
// if prev == '.' && is_member_start(*next) {
// return Err(nom::Err::Error(nom::error::make_error(
// input,
// nom::error::ErrorKind::TakeWhile1,
// )));
// }
// }
if let Some(next_char) = next_char { if let Some(next_char) = next_char {
if is_external_word_char(*next_char) || is_glob_specific_char(*next_char) { if is_external_word_char(*next_char) || is_glob_specific_char(*next_char) {
return Err(nom::Err::Error(nom::error::make_error( return Err(nom::Err::Error(nom::error::make_error(

View file

@ -5,8 +5,9 @@ use derive_new::new;
use getset::Getters; use getset::Getters;
use std::fmt; use std::fmt;
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, new)] #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Getters, new)]
pub struct Pipeline { pub struct Pipeline {
#[get = "pub"]
pub(crate) parts: Vec<Spanned<PipelineElement>>, pub(crate) parts: Vec<Spanned<PipelineElement>>,
// pub(crate) post_ws: Option<Tag>, // pub(crate) post_ws: Option<Tag>,
} }
@ -24,6 +25,7 @@ impl ToDebug for Pipeline {
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters, new)] #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters, new)]
pub struct PipelineElement { pub struct PipelineElement {
pub pipe: Option<Span>, pub pipe: Option<Span>,
#[get = "pub"]
pub tokens: Spanned<Vec<TokenNode>>, pub tokens: Spanned<Vec<TokenNode>>,
} }

View file

@ -90,11 +90,11 @@ pub fn parse_command_tail(
let mut positional = vec![]; let mut positional = vec![];
for arg in &config.positional { for arg in &config.positional {
trace!("Processing positional {:?}", arg); trace!(target: "nu::parse", "Processing positional {:?}", arg);
match arg { match arg {
PositionalType::Mandatory(..) => { PositionalType::Mandatory(..) => {
if tail.at_end() { if tail.at_end_possible_ws() {
return Err(ShellError::argument_error( return Err(ShellError::argument_error(
config.name.clone(), config.name.clone(),
ArgumentError::MissingMandatoryPositional(arg.name().to_string()), ArgumentError::MissingMandatoryPositional(arg.name().to_string()),
@ -107,7 +107,7 @@ pub fn parse_command_tail(
} }
PositionalType::Optional(..) => { PositionalType::Optional(..) => {
if tail.at_end() { if tail.at_end_possible_ws() {
break; break;
} }
} }
@ -138,7 +138,7 @@ pub fn parse_command_tail(
trace_remaining("after rest", tail.clone(), context.source()); trace_remaining("after rest", tail.clone(), context.source());
trace!("Constructed positional={:?} named={:?}", positional, named); trace!(target: "nu::parse", "Constructed positional={:?} named={:?}", positional, named);
let positional = if positional.len() == 0 { let positional = if positional.len() == 0 {
None None
@ -154,7 +154,7 @@ pub fn parse_command_tail(
Some(named) Some(named)
}; };
trace!("Normalized positional={:?} named={:?}", positional, named); trace!(target: "nu::parse", "Normalized positional={:?} named={:?}", positional, named);
Ok(Some((positional, named))) Ok(Some((positional, named)))
} }
@ -391,6 +391,10 @@ impl ColorSyntax for CommandTailShape {
type Info = (); type Info = ();
type Input = Signature; type Input = Signature;
fn name(&self) -> &'static str {
"CommandTailShape"
}
fn color_syntax<'a, 'b>( fn color_syntax<'a, 'b>(
&self, &self,
signature: &Signature, signature: &Signature,
@ -427,10 +431,7 @@ impl ColorSyntax for CommandTailShape {
token_nodes.move_to(pos); token_nodes.move_to(pos);
if token_nodes.at_end() { if token_nodes.at_end() {
// args.insert(pos, shapes);
// token_nodes.restart();
return Ok(()); return Ok(());
// continue;
} }
// We still want to color the flag even if the following tokens don't match, so don't // We still want to color the flag even if the following tokens don't match, so don't
@ -465,10 +466,7 @@ impl ColorSyntax for CommandTailShape {
token_nodes.move_to(pos); token_nodes.move_to(pos);
if token_nodes.at_end() { if token_nodes.at_end() {
// args.insert(pos, shapes);
// token_nodes.restart();
return Ok(()); return Ok(());
// continue;
} }
// We still want to color the flag even if the following tokens don't match, so don't // We still want to color the flag even if the following tokens don't match, so don't
@ -573,16 +571,14 @@ impl ColorSyntax for CommandTailShape {
} }
} }
args.spread_shapes(token_nodes.mut_shapes()); token_nodes.silently_mutate_shapes(|shapes| args.spread_shapes(shapes));
// Consume any remaining tokens with backoff coloring mode // Consume any remaining tokens with backoff coloring mode
color_syntax(&BackoffColoringMode, token_nodes, context); color_syntax(&BackoffColoringMode, token_nodes, context);
// This is pretty dubious, but it works. We should look into a better algorithm that doesn't end up requiring // This is pretty dubious, but it works. We should look into a better algorithm that doesn't end up requiring
// this solution. // this solution.
token_nodes token_nodes.sort_shapes()
.mut_shapes()
.sort_by(|a, b| a.span.start().cmp(&b.span.start()));
} }
} }
@ -633,7 +629,7 @@ fn extract_optional(
pub fn trace_remaining(desc: &'static str, tail: hir::TokensIterator<'_>, source: &Text) { pub fn trace_remaining(desc: &'static str, tail: hir::TokensIterator<'_>, source: &Text) {
trace!( trace!(
target: "nu::expand_args", target: "nu::parse",
"{} = {:?}", "{} = {:?}",
desc, desc,
itertools::join( itertools::join(

View file

@ -5,7 +5,7 @@ use crate::parser::nom_input;
use crate::parser::parse::token_tree::TokenNode; use crate::parser::parse::token_tree::TokenNode;
use crate::{Span, Spanned, SpannedItem, Tag, Tagged, Text}; use crate::{Span, Spanned, SpannedItem, Tag, Tagged, Text};
use ansi_term::Color; use ansi_term::Color;
use log::trace; use log::{log_enabled, trace};
use rustyline::completion::Completer; use rustyline::completion::Completer;
use rustyline::error::ReadlineError; use rustyline::error::ReadlineError;
use rustyline::highlight::Highlighter; use rustyline::highlight::Highlighter;
@ -34,23 +34,6 @@ impl Completer for Helper {
} }
} }
/*
impl Completer for Helper {
type Candidate = rustyline::completion::Pair;
fn complete(
&self,
line: &str,
pos: usize,
ctx: &rustyline::Context<'_>,
) -> Result<(usize, Vec<rustyline::completion::Pair>), ReadlineError> {
let result = self.helper.complete(line, pos, ctx);
result.map(|(x, y)| (x, y.iter().map(|z| z.into()).collect()))
}
}
*/
impl Hinter for Helper { impl Hinter for Helper {
fn hint(&self, line: &str, pos: usize, ctx: &rustyline::Context<'_>) -> Option<String> { fn hint(&self, line: &str, pos: usize, ctx: &rustyline::Context<'_>) -> Option<String> {
self.context.shell_manager.hint(line, pos, ctx) self.context.shell_manager.hint(line, pos, ctx)
@ -103,14 +86,18 @@ impl Highlighter for Helper {
let shapes = { let shapes = {
// We just constructed a token list that only contains a pipeline, so it can't fail // We just constructed a token list that only contains a pipeline, so it can't fail
color_fallible_syntax(&PipelineShape, &mut tokens, &expand_context).unwrap(); color_fallible_syntax(&PipelineShape, &mut tokens, &expand_context).unwrap();
tokens.with_tracer(|_, tracer| tracer.finish());
tokens.shapes() tokens.state().shapes()
}; };
trace!(target: "nu::shapes", trace!(target: "nu::color_syntax", "{:#?}", tokens.tracer());
"SHAPES :: {:?}",
shapes.iter().map(|shape| shape.item).collect::<Vec<_>>() if log_enabled!(target: "nu::color_syntax", log::Level::Trace) {
); println!("");
ptree::print_tree(&tokens.tracer().clone().print(Text::from(line))).unwrap();
println!("");
}
for shape in shapes { for shape in shapes {
let styled = paint_flat_shape(&shape, line); let styled = paint_flat_shape(&shape, line);
@ -118,18 +105,6 @@ impl Highlighter for Helper {
} }
Cow::Owned(out) Cow::Owned(out)
// loop {
// match iter.next() {
// None => {
// return Cow::Owned(out);
// }
// Some(token) => {
// let styled = paint_pipeline_element(&token, line);
// out.push_str(&styled.to_string());
// }
// }
// }
} }
} }
} }

View file

@ -246,13 +246,18 @@ fn it_arg_works_with_many_inputs_to_external_command() {
let (stdout, stderr) = nu_combined!( let (stdout, stderr) = nu_combined!(
cwd: dirs.test(), h::pipeline( cwd: dirs.test(), h::pipeline(
r#" r#"
echo file1 file2 echo hello world
| split-row " " | split-row " "
| cat $it | ^echo $it
"# "#
)); ));
assert_eq!("text and more text", stdout); #[cfg(windows)]
assert_eq!("hello world", stdout);
#[cfg(not(windows))]
assert_eq!("helloworld", stdout);
assert!(!stderr.contains("No such file or directory")); assert!(!stderr.contains("No such file or directory"));
}) })
} }