Merge pull request #831 from nushell/coloring_in_tokens

Start moving coloring into the token stream
This commit is contained in:
Yehuda Katz 2019-10-14 18:31:21 -07:00 committed by GitHub
commit d1ebc55ed7
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
15 changed files with 1808 additions and 10 deletions

View file

@ -2,3 +2,12 @@
description = "Adding hints based upon error states in the syntax highlighter"
enabled = false
[coloring_in_tokens]
description = "Move coloring into the TokensIterator so they can be atomic with the rest of the iterator"
reason = """
This is laying the groundwork for merging coloring and parsing. It also makes token_nodes.atomic() naturally
work with coloring, which is pretty useful on its own.
"""
enabled = false

View file

@ -1,10 +1,12 @@
use crate::errors::ShellError;
#[cfg(not(coloring_in_tokens))]
use crate::parser::hir::syntax_shape::FlatShape;
use crate::parser::{
hir::syntax_shape::{
color_syntax, expand_atom, AtomicToken, ColorSyntax, ExpandContext, ExpansionRule,
MaybeSpaceShape,
},
FlatShape, TokenNode, TokensIterator,
TokenNode, TokensIterator,
};
use crate::{Span, Spanned, Text};
@ -28,6 +30,7 @@ pub fn expand_external_tokens(
#[derive(Debug, Copy, Clone)]
pub struct ExternalTokensShape;
#[cfg(not(coloring_in_tokens))]
impl ColorSyntax for ExternalTokensShape {
type Info = ();
type Input = ();
@ -53,6 +56,31 @@ impl ColorSyntax for ExternalTokensShape {
}
}
#[cfg(coloring_in_tokens)]
impl ColorSyntax for ExternalTokensShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Self::Info {
loop {
// Allow a space
color_syntax(&MaybeSpaceShape, token_nodes, context);
// Process an external expression. External expressions are mostly words, with a
// few exceptions (like $variables and path expansion rules)
match color_syntax(&ExternalExpression, token_nodes, context).1 {
ExternalExpressionResult::Eof => break,
ExternalExpressionResult::Processed => continue,
}
}
}
}
pub fn expand_next_expression(
token_nodes: &mut TokensIterator<'_>,
) -> Result<Option<Span>, ShellError> {
@ -128,6 +156,7 @@ enum ExternalExpressionResult {
#[derive(Debug, Copy, Clone)]
struct ExternalExpression;
#[cfg(not(coloring_in_tokens))]
impl ColorSyntax for ExternalExpression {
type Info = ExternalExpressionResult;
type Input = ();
@ -157,3 +186,33 @@ impl ColorSyntax for ExternalExpression {
return ExternalExpressionResult::Processed;
}
}
#[cfg(coloring_in_tokens)]
impl ColorSyntax for ExternalExpression {
type Info = ExternalExpressionResult;
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> ExternalExpressionResult {
let atom = match expand_atom(
token_nodes,
"external word",
context,
ExpansionRule::permissive(),
) {
Err(_) => unreachable!("TODO: separate infallible expand_atom"),
Ok(Spanned {
item: AtomicToken::Eof { .. },
..
}) => return ExternalExpressionResult::Eof,
Ok(atom) => atom,
};
atom.color_tokens(token_nodes.mut_shapes());
return ExternalExpressionResult::Processed;
}
}

View file

@ -55,6 +55,7 @@ pub enum SyntaxShape {
Block,
}
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for SyntaxShape {
type Info = ();
type Input = ();
@ -104,6 +105,39 @@ impl FallibleColorSyntax for SyntaxShape {
}
}
#[cfg(coloring_in_tokens)]
impl FallibleColorSyntax for SyntaxShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<(), ShellError> {
match self {
SyntaxShape::Any => color_fallible_syntax(&AnyExpressionShape, token_nodes, context),
SyntaxShape::List => {
color_syntax(&ExpressionListShape, token_nodes, context);
Ok(())
}
SyntaxShape::Int => color_fallible_syntax(&IntShape, token_nodes, context),
SyntaxShape::String => {
color_fallible_syntax_with(&StringShape, &FlatShape::String, token_nodes, context)
}
SyntaxShape::Member => color_fallible_syntax(&MemberShape, token_nodes, context),
SyntaxShape::ColumnPath => {
color_fallible_syntax(&ColumnPathShape, token_nodes, context)
}
SyntaxShape::Number => color_fallible_syntax(&NumberShape, token_nodes, context),
SyntaxShape::Path => color_fallible_syntax(&FilePathShape, token_nodes, context),
SyntaxShape::Pattern => color_fallible_syntax(&PatternShape, token_nodes, context),
SyntaxShape::Block => color_fallible_syntax(&AnyBlockShape, token_nodes, context),
}
}
}
impl ExpandExpression for SyntaxShape {
fn expand_expr<'a, 'b>(
&self,
@ -202,6 +236,20 @@ pub trait ExpandExpression: std::fmt::Debug + Copy {
) -> Result<hir::Expression, ShellError>;
}
#[cfg(coloring_in_tokens)]
pub trait FallibleColorSyntax: std::fmt::Debug + Copy {
type Info;
type Input;
fn color_syntax<'a, 'b>(
&self,
input: &Self::Input,
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<Self::Info, ShellError>;
}
#[cfg(not(coloring_in_tokens))]
pub trait FallibleColorSyntax: std::fmt::Debug + Copy {
type Info;
type Input;
@ -215,6 +263,7 @@ pub trait FallibleColorSyntax: std::fmt::Debug + Copy {
) -> Result<Self::Info, ShellError>;
}
#[cfg(not(coloring_in_tokens))]
pub trait ColorSyntax: std::fmt::Debug + Copy {
type Info;
type Input;
@ -228,6 +277,19 @@ pub trait ColorSyntax: std::fmt::Debug + Copy {
) -> Self::Info;
}
#[cfg(coloring_in_tokens)]
pub trait ColorSyntax: std::fmt::Debug + Copy {
type Info;
type Input;
fn color_syntax<'a, 'b>(
&self,
input: &Self::Input,
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Self::Info;
}
// impl<T> ColorSyntax for T
// where
// T: FallibleColorSyntax,
@ -278,6 +340,7 @@ pub(crate) fn expand_syntax<'a, 'b, T: ExpandSyntax>(
}
}
#[cfg(not(coloring_in_tokens))]
pub fn color_syntax<'a, 'b, T: ColorSyntax<Info = U, Input = ()>, U>(
shape: &T,
token_nodes: &'b mut TokensIterator<'a>,
@ -306,6 +369,35 @@ pub fn color_syntax<'a, 'b, T: ColorSyntax<Info = U, Input = ()>, U>(
((), result)
}
#[cfg(coloring_in_tokens)]
pub fn color_syntax<'a, 'b, T: ColorSyntax<Info = U, Input = ()>, U>(
shape: &T,
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> ((), U) {
trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::<T>(), debug_tokens(token_nodes, context.source));
let len = token_nodes.shapes().len();
let result = shape.color_syntax(&(), token_nodes, context);
trace!(target: "nu::color_syntax", "ok :: {:?}", debug_tokens(token_nodes, context.source));
if log_enabled!(target: "nu::color_syntax", log::Level::Trace) {
trace!(target: "nu::color_syntax", "after {}", std::any::type_name::<T>());
if len < token_nodes.shapes().len() {
for i in len..(token_nodes.shapes().len()) {
trace!(target: "nu::color_syntax", "new shape :: {:?}", token_nodes.shapes()[i]);
}
} else {
trace!(target: "nu::color_syntax", "no new shapes");
}
}
((), result)
}
#[cfg(not(coloring_in_tokens))]
pub fn color_fallible_syntax<'a, 'b, T: FallibleColorSyntax<Info = U, Input = ()>, U>(
shape: &T,
token_nodes: &'b mut TokensIterator<'a>,
@ -339,6 +431,40 @@ pub fn color_fallible_syntax<'a, 'b, T: FallibleColorSyntax<Info = U, Input = ()
result
}
#[cfg(coloring_in_tokens)]
pub fn color_fallible_syntax<'a, 'b, T: FallibleColorSyntax<Info = U, Input = ()>, U>(
shape: &T,
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<U, ShellError> {
trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::<T>(), debug_tokens(token_nodes, context.source));
if token_nodes.at_end() {
trace!(target: "nu::color_syntax", "at eof");
return Err(ShellError::unexpected_eof("coloring", Tag::unknown()));
}
let len = token_nodes.shapes().len();
let result = shape.color_syntax(&(), token_nodes, context);
trace!(target: "nu::color_syntax", "ok :: {:?}", debug_tokens(token_nodes, context.source));
if log_enabled!(target: "nu::color_syntax", log::Level::Trace) {
trace!(target: "nu::color_syntax", "after {}", std::any::type_name::<T>());
if len < token_nodes.shapes().len() {
for i in len..(token_nodes.shapes().len()) {
trace!(target: "nu::color_syntax", "new shape :: {:?}", token_nodes.shapes()[i]);
}
} else {
trace!(target: "nu::color_syntax", "no new shapes");
}
}
result
}
#[cfg(not(coloring_in_tokens))]
pub fn color_syntax_with<'a, 'b, T: ColorSyntax<Info = U, Input = I>, U, I>(
shape: &T,
input: &I,
@ -368,6 +494,36 @@ pub fn color_syntax_with<'a, 'b, T: ColorSyntax<Info = U, Input = I>, U, I>(
((), result)
}
#[cfg(coloring_in_tokens)]
pub fn color_syntax_with<'a, 'b, T: ColorSyntax<Info = U, Input = I>, U, I>(
shape: &T,
input: &I,
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> ((), U) {
trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::<T>(), debug_tokens(token_nodes, context.source));
let len = token_nodes.shapes().len();
let result = shape.color_syntax(input, token_nodes, context);
trace!(target: "nu::color_syntax", "ok :: {:?}", debug_tokens(token_nodes, context.source));
if log_enabled!(target: "nu::color_syntax", log::Level::Trace) {
trace!(target: "nu::color_syntax", "after {}", std::any::type_name::<T>());
if len < token_nodes.shapes().len() {
for i in len..(token_nodes.shapes().len()) {
trace!(target: "nu::color_syntax", "new shape :: {:?}", token_nodes.shapes()[i]);
}
} else {
trace!(target: "nu::color_syntax", "no new shapes");
}
}
((), result)
}
#[cfg(not(coloring_in_tokens))]
pub fn color_fallible_syntax_with<'a, 'b, T: FallibleColorSyntax<Info = U, Input = I>, U, I>(
shape: &T,
input: &I,
@ -402,6 +558,40 @@ pub fn color_fallible_syntax_with<'a, 'b, T: FallibleColorSyntax<Info = U, Input
result
}
#[cfg(coloring_in_tokens)]
pub fn color_fallible_syntax_with<'a, 'b, T: FallibleColorSyntax<Info = U, Input = I>, U, I>(
shape: &T,
input: &I,
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<U, ShellError> {
trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::<T>(), debug_tokens(token_nodes, context.source));
if token_nodes.at_end() {
trace!(target: "nu::color_syntax", "at eof");
return Err(ShellError::unexpected_eof("coloring", Tag::unknown()));
}
let len = token_nodes.shapes().len();
let result = shape.color_syntax(input, token_nodes, context);
trace!(target: "nu::color_syntax", "ok :: {:?}", debug_tokens(token_nodes, context.source));
if log_enabled!(target: "nu::color_syntax", log::Level::Trace) {
trace!(target: "nu::color_syntax", "after {}", std::any::type_name::<T>());
if len < token_nodes.shapes().len() {
for i in len..(token_nodes.shapes().len()) {
trace!(target: "nu::color_syntax", "new shape :: {:?}", token_nodes.shapes()[i]);
}
} else {
trace!(target: "nu::color_syntax", "no new shapes");
}
}
result
}
pub(crate) fn expand_expr<'a, 'b, T: ExpandExpression>(
shape: &T,
token_nodes: &'b mut TokensIterator<'a>,
@ -536,6 +726,7 @@ impl ExpandSyntax for BarePathShape {
#[derive(Debug, Copy, Clone)]
pub struct BareShape;
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for BareShape {
type Info = ();
type Input = FlatShape;
@ -563,6 +754,37 @@ impl FallibleColorSyntax for BareShape {
}
}
#[cfg(coloring_in_tokens)]
impl FallibleColorSyntax for BareShape {
type Info = ();
type Input = FlatShape;
fn color_syntax<'a, 'b>(
&self,
input: &FlatShape,
token_nodes: &'b mut TokensIterator<'a>,
_context: &ExpandContext,
) -> Result<(), ShellError> {
let span = token_nodes.peek_any_token(|token| match token {
// If it's a bare token, color it
TokenNode::Token(Spanned {
item: RawToken::Bare,
span,
}) => {
// token_nodes.color_shape((*input).spanned(*span));
Ok(span)
}
// otherwise, fail
other => Err(ShellError::type_error("word", other.tagged_type_name())),
})?;
token_nodes.color_shape((*input).spanned(*span));
Ok(())
}
}
impl ExpandSyntax for BareShape {
type Output = Spanned<String>;
@ -636,6 +858,7 @@ impl CommandSignature {
#[derive(Debug, Copy, Clone)]
pub struct PipelineShape;
#[cfg(not(coloring_in_tokens))]
// The failure mode is if the head of the token stream is not a pipeline
impl FallibleColorSyntax for PipelineShape {
type Info = ();
@ -669,6 +892,39 @@ impl FallibleColorSyntax for PipelineShape {
}
}
#[cfg(coloring_in_tokens)]
// The failure mode is if the head of the token stream is not a pipeline
impl FallibleColorSyntax for PipelineShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<(), ShellError> {
// Make sure we're looking at a pipeline
let Pipeline { parts, .. } = token_nodes.peek_any_token(|node| node.as_pipeline())?;
// Enumerate the pipeline parts
for part in parts {
// If the pipeline part has a prefix `|`, emit a pipe to color
if let Some(pipe) = part.pipe {
token_nodes.color_shape(FlatShape::Pipe.spanned(pipe))
}
// Create a new iterator containing the tokens in the pipeline part to color
let mut token_nodes = TokensIterator::new(&part.tokens.item, part.span, false);
color_syntax(&MaybeSpaceShape, &mut token_nodes, context);
color_syntax(&CommandShape, &mut token_nodes, context);
}
Ok(())
}
}
impl ExpandSyntax for PipelineShape {
type Output = ClassifiedPipeline;
fn expand_syntax<'a, 'b>(
@ -703,6 +959,7 @@ pub enum CommandHeadKind {
#[derive(Debug, Copy, Clone)]
pub struct CommandHeadShape;
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for CommandHeadShape {
type Info = CommandHeadKind;
type Input = ();
@ -756,6 +1013,59 @@ impl FallibleColorSyntax for CommandHeadShape {
}
}
#[cfg(coloring_in_tokens)]
impl FallibleColorSyntax for CommandHeadShape {
type Info = CommandHeadKind;
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<CommandHeadKind, ShellError> {
// If we don't ultimately find a token, roll back
token_nodes.atomic(|token_nodes| {
// First, take a look at the next token
let atom = expand_atom(
token_nodes,
"command head",
context,
ExpansionRule::permissive(),
)?;
match atom.item {
// If the head is an explicit external command (^cmd), color it as an external command
AtomicToken::ExternalCommand { command } => {
token_nodes.color_shape(FlatShape::ExternalCommand.spanned(command));
Ok(CommandHeadKind::External)
}
// If the head is a word, it depends on whether it matches a registered internal command
AtomicToken::Word { text } => {
let name = text.slice(context.source);
if context.registry.has(name) {
// If the registry has the command, color it as an internal command
token_nodes.color_shape(FlatShape::InternalCommand.spanned(text));
let command = context.registry.expect_command(name);
Ok(CommandHeadKind::Internal(command.signature()))
} else {
// Otherwise, color it as an external command
token_nodes.color_shape(FlatShape::ExternalCommand.spanned(text));
Ok(CommandHeadKind::External)
}
}
// Otherwise, we're not actually looking at a command
_ => Err(ShellError::syntax_error(
"No command at the head".tagged(atom.span),
)),
}
})
}
}
impl ExpandSyntax for CommandHeadShape {
type Output = CommandSignature;
@ -861,6 +1171,7 @@ impl ExpandSyntax for ClassifiedCommandShape {
#[derive(Debug, Copy, Clone)]
pub struct InternalCommandHeadShape;
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for InternalCommandHeadShape {
type Info = ();
type Input = ();
@ -899,6 +1210,44 @@ impl FallibleColorSyntax for InternalCommandHeadShape {
}
}
#[cfg(coloring_in_tokens)]
impl FallibleColorSyntax for InternalCommandHeadShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
_context: &ExpandContext,
) -> Result<(), ShellError> {
let peeked_head = token_nodes.peek_non_ws().not_eof("command head4");
let peeked_head = match peeked_head {
Err(_) => return Ok(()),
Ok(peeked_head) => peeked_head,
};
let node = peeked_head.commit();
let _expr = match node {
TokenNode::Token(Spanned {
item: RawToken::Bare,
span,
}) => token_nodes.color_shape(FlatShape::Word.spanned(*span)),
TokenNode::Token(Spanned {
item: RawToken::String(_inner_tag),
span,
}) => token_nodes.color_shape(FlatShape::String.spanned(*span)),
_node => token_nodes.color_shape(FlatShape::Error.spanned(node.span())),
};
Ok(())
}
}
impl ExpandExpression for InternalCommandHeadShape {
fn expand_expr(
&self,
@ -992,6 +1341,7 @@ fn parse_single_node_skipping_ws<'a, 'b, T>(
#[derive(Debug, Copy, Clone)]
pub struct WhitespaceShape;
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for WhitespaceShape {
type Info = ();
type Input = ();
@ -1022,6 +1372,38 @@ impl FallibleColorSyntax for WhitespaceShape {
}
}
#[cfg(coloring_in_tokens)]
impl FallibleColorSyntax for WhitespaceShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
_context: &ExpandContext,
) -> Result<(), ShellError> {
let peeked = token_nodes.peek_any().not_eof("whitespace");
let peeked = match peeked {
Err(_) => return Ok(()),
Ok(peeked) => peeked,
};
let node = peeked.commit();
let _ = match node {
TokenNode::Whitespace(span) => {
token_nodes.color_shape(FlatShape::Whitespace.spanned(*span))
}
_other => return Ok(()),
};
Ok(())
}
}
impl ExpandSyntax for WhitespaceShape {
type Output = Span;
@ -1089,6 +1471,7 @@ pub struct MaybeSpacedExpression<T: ExpandExpression> {
#[derive(Debug, Copy, Clone)]
pub struct MaybeSpaceShape;
#[cfg(not(coloring_in_tokens))]
impl ColorSyntax for MaybeSpaceShape {
type Info = ();
type Input = ();
@ -1114,9 +1497,35 @@ impl ColorSyntax for MaybeSpaceShape {
}
}
#[cfg(coloring_in_tokens)]
impl ColorSyntax for MaybeSpaceShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
_context: &ExpandContext,
) -> Self::Info {
let peeked = token_nodes.peek_any().not_eof("whitespace");
let peeked = match peeked {
Err(_) => return,
Ok(peeked) => peeked,
};
if let TokenNode::Whitespace(span) = peeked.node {
peeked.commit();
token_nodes.color_shape(FlatShape::Whitespace.spanned(*span));
}
}
}
#[derive(Debug, Copy, Clone)]
pub struct SpaceShape;
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for SpaceShape {
type Info = ();
type Input = ();
@ -1145,6 +1554,34 @@ impl FallibleColorSyntax for SpaceShape {
}
}
#[cfg(coloring_in_tokens)]
impl FallibleColorSyntax for SpaceShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
_context: &ExpandContext,
) -> Result<(), ShellError> {
let peeked = token_nodes.peek_any().not_eof("whitespace")?;
match peeked.node {
TokenNode::Whitespace(span) => {
peeked.commit();
token_nodes.color_shape(FlatShape::Whitespace.spanned(*span));
Ok(())
}
other => Err(ShellError::type_error(
"whitespace",
other.tagged_type_name(),
)),
}
}
}
impl<T: ExpandExpression> ExpandExpression for MaybeSpacedExpression<T> {
fn expand_expr<'a, 'b>(
&self,
@ -1237,6 +1674,7 @@ fn classify_command(
#[derive(Debug, Copy, Clone)]
pub struct CommandShape;
#[cfg(not(coloring_in_tokens))]
impl ColorSyntax for CommandShape {
type Info = ();
type Input = ();
@ -1266,3 +1704,33 @@ impl ColorSyntax for CommandShape {
};
}
}
#[cfg(coloring_in_tokens)]
impl ColorSyntax for CommandShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) {
let kind = color_fallible_syntax(&CommandHeadShape, token_nodes, context);
match kind {
Err(_) => {
// We didn't find a command, so we'll have to fall back to parsing this pipeline part
// as a blob of undifferentiated expressions
color_syntax(&ExpressionListShape, token_nodes, context);
}
Ok(CommandHeadKind::External) => {
color_syntax(&ExternalTokensShape, token_nodes, context);
}
Ok(CommandHeadKind::Internal(signature)) => {
color_syntax_with(&CommandTailShape, &signature, token_nodes, context);
}
};
}
}

View file

@ -1,11 +1,12 @@
use crate::errors::ShellError;
#[cfg(not(coloring_in_tokens))]
use crate::parser::hir::syntax_shape::FlatShape;
use crate::parser::{
hir,
hir::syntax_shape::{
color_fallible_syntax, color_syntax_with, continue_expression, expand_expr, expand_syntax,
DelimitedShape, ExpandContext, ExpandExpression, ExpressionContinuationShape,
ExpressionListShape, FallibleColorSyntax, FlatShape, MemberShape, PathTailShape,
VariablePathShape,
ExpressionListShape, FallibleColorSyntax, MemberShape, PathTailShape, VariablePathShape,
},
hir::tokens_iterator::TokensIterator,
parse::token_tree::Delimiter,
@ -16,6 +17,7 @@ use crate::{Span, Spanned, SpannedItem};
#[derive(Debug, Copy, Clone)]
pub struct AnyBlockShape;
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for AnyBlockShape {
type Info = ();
type Input = ();
@ -59,6 +61,48 @@ impl FallibleColorSyntax for AnyBlockShape {
}
}
#[cfg(coloring_in_tokens)]
impl FallibleColorSyntax for AnyBlockShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<(), ShellError> {
let block = token_nodes.peek_non_ws().not_eof("block");
let block = match block {
Err(_) => return Ok(()),
Ok(block) => block,
};
// is it just a block?
let block = block.node.as_block();
match block {
// If so, color it as a block
Some((children, spans)) => {
let mut token_nodes = TokensIterator::new(children.item, context.span, false);
color_syntax_with(
&DelimitedShape,
&(Delimiter::Brace, spans.0, spans.1),
&mut token_nodes,
context,
);
return Ok(());
}
_ => {}
}
// Otherwise, look for a shorthand block. If none found, fail
color_fallible_syntax(&ShorthandBlock, token_nodes, context)
}
}
impl ExpandExpression for AnyBlockShape {
fn expand_expr<'a, 'b>(
&self,
@ -88,6 +132,7 @@ impl ExpandExpression for AnyBlockShape {
#[derive(Debug, Copy, Clone)]
pub struct ShorthandBlock;
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for ShorthandBlock {
type Info = ();
type Input = ();
@ -119,6 +164,36 @@ impl FallibleColorSyntax for ShorthandBlock {
}
}
#[cfg(coloring_in_tokens)]
impl FallibleColorSyntax for ShorthandBlock {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<(), ShellError> {
// Try to find a shorthand head. If none found, fail
color_fallible_syntax(&ShorthandPath, token_nodes, context)?;
loop {
// Check to see whether there's any continuation after the head expression
let result = color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context);
match result {
// if no continuation was found, we're done
Err(_) => break,
// if a continuation was found, look for another one
Ok(_) => continue,
}
}
Ok(())
}
}
impl ExpandExpression for ShorthandBlock {
fn expand_expr<'a, 'b>(
&self,
@ -139,6 +214,7 @@ impl ExpandExpression for ShorthandBlock {
#[derive(Debug, Copy, Clone)]
pub struct ShorthandPath;
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for ShorthandPath {
type Info = ();
type Input = ();
@ -183,6 +259,50 @@ impl FallibleColorSyntax for ShorthandPath {
}
}
#[cfg(coloring_in_tokens)]
impl FallibleColorSyntax for ShorthandPath {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<(), ShellError> {
token_nodes.atomic(|token_nodes| {
let variable = color_fallible_syntax(&VariablePathShape, token_nodes, context);
match variable {
Ok(_) => {
// if it's a variable path, that's the head part
return Ok(());
}
Err(_) => {
// otherwise, we'll try to find a member path
}
}
// look for a member (`<member>` -> `$it.<member>`)
color_fallible_syntax(&MemberShape, token_nodes, context)?;
// Now that we've synthesized the head, of the path, proceed to expand the tail of the path
// like any other path.
let tail = color_fallible_syntax(&PathTailShape, token_nodes, context);
match tail {
Ok(_) => {}
Err(_) => {
// It's ok if there's no path tail; a single member is sufficient
}
}
Ok(())
})
}
}
impl ExpandExpression for ShorthandPath {
fn expand_expr<'a, 'b>(
&self,
@ -223,6 +343,52 @@ impl ExpandExpression for ShorthandPath {
#[derive(Debug, Copy, Clone)]
pub struct ShorthandHeadShape;
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for ShorthandHeadShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
_context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
// A shorthand path must not be at EOF
let peeked = token_nodes.peek_non_ws().not_eof("shorthand path")?;
match peeked.node {
// If the head of a shorthand path is a bare token, it expands to `$it.bare`
TokenNode::Token(Spanned {
item: RawToken::Bare,
span,
}) => {
peeked.commit();
shapes.push(FlatShape::BareMember.spanned(*span));
Ok(())
}
// If the head of a shorthand path is a string, it expands to `$it."some string"`
TokenNode::Token(Spanned {
item: RawToken::String(_),
span: outer,
}) => {
peeked.commit();
shapes.push(FlatShape::StringMember.spanned(*outer));
Ok(())
}
other => Err(ShellError::type_error(
"shorthand head",
other.tagged_type_name(),
)),
}
}
}
#[cfg(coloring_in_tokens)]
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for ShorthandHeadShape {
type Info = ();
type Input = ();

View file

@ -37,6 +37,7 @@ impl ExpandExpression for AnyExpressionShape {
}
}
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for AnyExpressionShape {
type Info = ();
type Input = ();
@ -63,6 +64,32 @@ impl FallibleColorSyntax for AnyExpressionShape {
}
}
#[cfg(coloring_in_tokens)]
impl FallibleColorSyntax for AnyExpressionShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<(), ShellError> {
// Look for an expression at the cursor
color_fallible_syntax(&AnyExpressionStartShape, token_nodes, context)?;
match continue_coloring_expression(token_nodes, context) {
Err(_) => {
// it's fine for there to be no continuation
}
Ok(()) => {}
}
Ok(())
}
}
pub(crate) fn continue_expression(
mut head: hir::Expression,
token_nodes: &mut TokensIterator<'_>,
@ -91,6 +118,7 @@ pub(crate) fn continue_expression(
}
}
#[cfg(not(coloring_in_tokens))]
pub(crate) fn continue_coloring_expression(
token_nodes: &mut TokensIterator<'_>,
context: &ExpandContext,
@ -115,6 +143,29 @@ pub(crate) fn continue_coloring_expression(
}
}
#[cfg(coloring_in_tokens)]
pub(crate) fn continue_coloring_expression(
token_nodes: &mut TokensIterator<'_>,
context: &ExpandContext,
) -> Result<(), ShellError> {
// if there's not even one expression continuation, fail
color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context)?;
loop {
// Check to see whether there's any continuation after the head expression
let result = color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context);
match result {
Err(_) => {
// We already saw one continuation, so just return
return Ok(());
}
Ok(_) => {}
}
}
}
#[derive(Debug, Copy, Clone)]
pub struct AnyExpressionStartShape;
@ -152,6 +203,7 @@ impl ExpandExpression for AnyExpressionStartShape {
}
}
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for AnyExpressionStartShape {
type Info = ();
type Input = ();
@ -210,9 +262,70 @@ impl FallibleColorSyntax for AnyExpressionStartShape {
}
}
#[cfg(coloring_in_tokens)]
impl FallibleColorSyntax for AnyExpressionStartShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<(), ShellError> {
let atom = token_nodes.spanned(|token_nodes| {
expand_atom(
token_nodes,
"expression",
context,
ExpansionRule::permissive(),
)
});
let atom = match atom {
Spanned {
item: Err(_err),
span,
} => {
token_nodes.color_shape(FlatShape::Error.spanned(span));
return Ok(());
}
Spanned {
item: Ok(value), ..
} => value,
};
match atom.item {
AtomicToken::Size { number, unit } => token_nodes.color_shape(
FlatShape::Size {
number: number.span.into(),
unit: unit.span.into(),
}
.spanned(atom.span),
),
AtomicToken::SquareDelimited { nodes, spans } => {
token_nodes.child((&nodes[..]).spanned(atom.span), |tokens| {
color_delimited_square(spans, tokens, atom.span.into(), context);
});
}
AtomicToken::Word { .. } | AtomicToken::Dot { .. } => {
token_nodes.color_shape(FlatShape::Word.spanned(atom.span));
}
_ => atom.color_tokens(token_nodes.mut_shapes()),
}
Ok(())
}
}
#[derive(Debug, Copy, Clone)]
pub struct BareTailShape;
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for BareTailShape {
type Info = ();
type Input = ();
@ -269,6 +382,56 @@ impl FallibleColorSyntax for BareTailShape {
}
}
#[cfg(coloring_in_tokens)]
impl FallibleColorSyntax for BareTailShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<(), ShellError> {
let len = token_nodes.shapes().len();
loop {
let word =
color_fallible_syntax_with(&BareShape, &FlatShape::Word, token_nodes, context);
match word {
// if a word was found, continue
Ok(_) => continue,
// if a word wasn't found, try to find a dot
Err(_) => {}
}
// try to find a dot
let dot = color_fallible_syntax_with(
&ColorableDotShape,
&FlatShape::Word,
token_nodes,
context,
);
match dot {
// if a dot was found, try to find another word
Ok(_) => continue,
// otherwise, we're done
Err(_) => break,
}
}
if token_nodes.shapes().len() > len {
Ok(())
} else {
Err(ShellError::syntax_error(
"No tokens matched BareTailShape".tagged_unknown(),
))
}
}
}
impl ExpandSyntax for BareTailShape {
type Output = Option<Span>;

View file

@ -16,6 +16,7 @@ pub fn expand_delimited_square(
Ok(hir::Expression::list(list?, Tag { span, anchor: None }))
}
#[cfg(not(coloring_in_tokens))]
pub fn color_delimited_square(
(open, close): (Span, Span),
children: &Vec<TokenNode>,
@ -29,9 +30,22 @@ pub fn color_delimited_square(
shapes.push(FlatShape::CloseDelimiter(Delimiter::Square).spanned(close));
}
#[cfg(coloring_in_tokens)]
pub fn color_delimited_square(
(open, close): (Span, Span),
token_nodes: &mut TokensIterator,
_span: Span,
context: &ExpandContext,
) {
token_nodes.color_shape(FlatShape::OpenDelimiter(Delimiter::Square).spanned(open));
let _list = color_syntax(&ExpressionListShape, token_nodes, context);
token_nodes.color_shape(FlatShape::CloseDelimiter(Delimiter::Square).spanned(close));
}
#[derive(Debug, Copy, Clone)]
pub struct DelimitedShape;
#[cfg(not(coloring_in_tokens))]
impl ColorSyntax for DelimitedShape {
type Info = ();
type Input = (Delimiter, Span, Span);
@ -47,3 +61,19 @@ impl ColorSyntax for DelimitedShape {
shapes.push(FlatShape::CloseDelimiter(*delimiter).spanned(*close));
}
}
#[cfg(coloring_in_tokens)]
impl ColorSyntax for DelimitedShape {
type Info = ();
type Input = (Delimiter, Span, Span);
fn color_syntax<'a, 'b>(
&self,
(delimiter, open, close): &(Delimiter, Span, Span),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Self::Info {
token_nodes.color_shape(FlatShape::OpenDelimiter(*delimiter).spanned(*open));
color_syntax(&ExpressionListShape, token_nodes, context);
token_nodes.color_shape(FlatShape::CloseDelimiter(*delimiter).spanned(*close));
}
}

View file

@ -8,6 +8,7 @@ use crate::prelude::*;
#[derive(Debug, Copy, Clone)]
pub struct FilePathShape;
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for FilePathShape {
type Info = ();
type Input = ();
@ -46,6 +47,44 @@ impl FallibleColorSyntax for FilePathShape {
}
}
#[cfg(coloring_in_tokens)]
impl FallibleColorSyntax for FilePathShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<(), ShellError> {
let atom = expand_atom(
token_nodes,
"file path",
context,
ExpansionRule::permissive(),
);
let atom = match atom {
Err(_) => return Ok(()),
Ok(atom) => atom,
};
match atom.item {
AtomicToken::Word { .. }
| AtomicToken::String { .. }
| AtomicToken::Number { .. }
| AtomicToken::Size { .. } => {
token_nodes.color_shape(FlatShape::Path.spanned(atom.span));
}
_ => atom.color_tokens(token_nodes.mut_shapes()),
}
Ok(())
}
}
impl ExpandExpression for FilePathShape {
fn expand_expr<'a, 'b>(
&self,

View file

@ -1,4 +1,6 @@
use crate::errors::ShellError;
#[cfg(not(coloring_in_tokens))]
use crate::parser::hir::syntax_shape::FlatShape;
use crate::parser::{
hir,
hir::syntax_shape::{
@ -7,8 +9,8 @@ use crate::parser::{
MaybeSpaceShape, SpaceShape,
},
hir::TokensIterator,
FlatShape,
};
#[cfg(not(coloring_in_tokens))]
use crate::Spanned;
#[derive(Debug, Copy, Clone)]
@ -44,6 +46,7 @@ impl ExpandSyntax for ExpressionListShape {
}
}
#[cfg(not(coloring_in_tokens))]
impl ColorSyntax for ExpressionListShape {
type Info = ();
type Input = ();
@ -113,10 +116,80 @@ impl ColorSyntax for ExpressionListShape {
}
}
#[cfg(coloring_in_tokens)]
impl ColorSyntax for ExpressionListShape {
type Info = ();
type Input = ();
/// The intent of this method is to fully color an expression list shape infallibly.
/// This means that if we can't expand a token into an expression, we fall back to
/// a simpler coloring strategy.
///
/// This would apply to something like `where x >`, which includes an incomplete
/// binary operator. Since we will fail to process it as a binary operator, we'll
/// fall back to a simpler coloring and move on.
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) {
// We encountered a parsing error and will continue with simpler coloring ("backoff
// coloring mode")
let mut backoff = false;
// Consume any leading whitespace
color_syntax(&MaybeSpaceShape, token_nodes, context);
loop {
// If we reached the very end of the token stream, we're done
if token_nodes.at_end() {
return;
}
if backoff {
let len = token_nodes.shapes().len();
// If we previously encountered a parsing error, use backoff coloring mode
color_syntax(&SimplestExpression, token_nodes, context);
if len == token_nodes.shapes().len() && !token_nodes.at_end() {
// This should never happen, but if it does, a panic is better than an infinite loop
panic!("Unexpected tokens left that couldn't be colored even with SimplestExpression")
}
} else {
// Try to color the head of the stream as an expression
match color_fallible_syntax(&AnyExpressionShape, token_nodes, context) {
// If no expression was found, switch to backoff coloring mode
Err(_) => {
backoff = true;
continue;
}
Ok(_) => {}
}
// If an expression was found, consume a space
match color_fallible_syntax(&SpaceShape, token_nodes, context) {
Err(_) => {
// If no space was found, we're either at the end or there's an error.
// Either way, switch to backoff coloring mode. If we're at the end
// it won't have any consequences.
backoff = true;
}
Ok(_) => {
// Otherwise, move on to the next expression
}
}
}
}
}
}
/// BackoffColoringMode consumes all of the remaining tokens in an infallible way
#[derive(Debug, Copy, Clone)]
pub struct BackoffColoringMode;
#[cfg(not(coloring_in_tokens))]
impl ColorSyntax for BackoffColoringMode {
type Info = ();
type Input = ();
@ -144,12 +217,40 @@ impl ColorSyntax for BackoffColoringMode {
}
}
#[cfg(coloring_in_tokens)]
impl ColorSyntax for BackoffColoringMode {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &Self::Input,
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Self::Info {
loop {
if token_nodes.at_end() {
break;
}
let len = token_nodes.shapes().len();
color_syntax(&SimplestExpression, token_nodes, context);
if len == token_nodes.shapes().len() && !token_nodes.at_end() {
// This shouldn't happen, but if it does, a panic is better than an infinite loop
panic!("SimplestExpression failed to consume any tokens, but it's not at the end. This is unexpected\n== token nodes==\n{:#?}\n\n== shapes ==\n{:#?}", token_nodes, token_nodes.shapes());
}
}
}
}
/// The point of `SimplestExpression` is to serve as an infallible base case for coloring.
/// As a last ditch effort, if we can't find any way to parse the head of the stream as an
/// expression, fall back to simple coloring.
#[derive(Debug, Copy, Clone)]
pub struct SimplestExpression;
#[cfg(not(coloring_in_tokens))]
impl ColorSyntax for SimplestExpression {
type Info = ();
type Input = ();
@ -174,3 +275,28 @@ impl ColorSyntax for SimplestExpression {
}
}
}
#[cfg(coloring_in_tokens)]
impl ColorSyntax for SimplestExpression {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) {
let atom = expand_atom(
token_nodes,
"any token",
context,
ExpansionRule::permissive(),
);
match atom {
Err(_) => {}
Ok(atom) => atom.color_tokens(token_nodes.mut_shapes()),
}
}
}

View file

@ -44,6 +44,7 @@ impl ExpandExpression for NumberShape {
}
}
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for NumberShape {
type Info = ();
type Input = ();
@ -73,6 +74,35 @@ impl FallibleColorSyntax for NumberShape {
}
}
#[cfg(coloring_in_tokens)]
impl FallibleColorSyntax for NumberShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<(), ShellError> {
let atom = token_nodes.spanned(|token_nodes| {
expand_atom(token_nodes, "number", context, ExpansionRule::permissive())
});
let atom = match atom {
Spanned { item: Err(_), span } => {
token_nodes.color_shape(FlatShape::Error.spanned(span));
return Ok(());
}
Spanned { item: Ok(atom), .. } => atom,
};
atom.color_tokens(token_nodes.mut_shapes());
Ok(())
}
}
#[derive(Debug, Copy, Clone)]
pub struct IntShape;
@ -106,6 +136,7 @@ impl ExpandExpression for IntShape {
}
}
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for IntShape {
type Info = ();
type Input = ();
@ -134,3 +165,32 @@ impl FallibleColorSyntax for IntShape {
Ok(())
}
}
#[cfg(coloring_in_tokens)]
impl FallibleColorSyntax for IntShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<(), ShellError> {
let atom = token_nodes.spanned(|token_nodes| {
expand_atom(token_nodes, "integer", context, ExpansionRule::permissive())
});
let atom = match atom {
Spanned { item: Err(_), span } => {
token_nodes.color_shape(FlatShape::Error.spanned(span));
return Ok(());
}
Spanned { item: Ok(atom), .. } => atom,
};
atom.color_tokens(token_nodes.mut_shapes());
Ok(())
}
}

View file

@ -9,6 +9,7 @@ use crate::prelude::*;
#[derive(Debug, Copy, Clone)]
pub struct PatternShape;
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for PatternShape {
type Info = ();
type Input = ();
@ -35,6 +36,32 @@ impl FallibleColorSyntax for PatternShape {
}
}
#[cfg(coloring_in_tokens)]
impl FallibleColorSyntax for PatternShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<(), ShellError> {
token_nodes.atomic(|token_nodes| {
let atom = expand_atom(token_nodes, "pattern", context, ExpansionRule::permissive())?;
match &atom.item {
AtomicToken::GlobPattern { .. } | AtomicToken::Word { .. } => {
token_nodes.color_shape(FlatShape::GlobPattern.spanned(atom.span));
Ok(())
}
_ => Err(ShellError::type_error("pattern", atom.tagged_type_name())),
}
})
}
}
impl ExpandExpression for PatternShape {
fn expand_expr<'a, 'b>(
&self,

View file

@ -9,6 +9,7 @@ use crate::prelude::*;
#[derive(Debug, Copy, Clone)]
pub struct StringShape;
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for StringShape {
type Info = ();
type Input = FlatShape;
@ -39,6 +40,36 @@ impl FallibleColorSyntax for StringShape {
}
}
#[cfg(coloring_in_tokens)]
impl FallibleColorSyntax for StringShape {
type Info = ();
type Input = FlatShape;
fn color_syntax<'a, 'b>(
&self,
input: &FlatShape,
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<(), ShellError> {
let atom = expand_atom(token_nodes, "string", context, ExpansionRule::permissive());
let atom = match atom {
Err(_) => return Ok(()),
Ok(atom) => atom,
};
match atom {
Spanned {
item: AtomicToken::String { .. },
span,
} => token_nodes.color_shape((*input).spanned(span)),
other => other.color_tokens(token_nodes.mut_shapes()),
}
Ok(())
}
}
impl ExpandExpression for StringShape {
fn expand_expr<'a, 'b>(
&self,

View file

@ -44,6 +44,7 @@ impl ExpandExpression for VariablePathShape {
}
}
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for VariablePathShape {
type Info = ();
type Input = ();
@ -84,9 +85,49 @@ impl FallibleColorSyntax for VariablePathShape {
}
}
#[cfg(coloring_in_tokens)]
impl FallibleColorSyntax for VariablePathShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<(), ShellError> {
token_nodes.atomic(|token_nodes| {
// If the head of the token stream is not a variable, fail
color_fallible_syntax(&VariableShape, token_nodes, context)?;
loop {
// look for a dot at the head of a stream
let dot = color_fallible_syntax_with(
&ColorableDotShape,
&FlatShape::Dot,
token_nodes,
context,
);
// if there's no dot, we're done
match dot {
Err(_) => break,
Ok(_) => {}
}
// otherwise, look for a member, and if you don't find one, fail
color_fallible_syntax(&MemberShape, token_nodes, context)?;
}
Ok(())
})
}
}
#[derive(Debug, Copy, Clone)]
pub struct PathTailShape;
#[cfg(not(coloring_in_tokens))]
/// The failure mode of `PathTailShape` is a dot followed by a non-member
impl FallibleColorSyntax for PathTailShape {
type Info = ();
@ -119,6 +160,37 @@ impl FallibleColorSyntax for PathTailShape {
}
}
#[cfg(coloring_in_tokens)]
/// The failure mode of `PathTailShape` is a dot followed by a non-member
impl FallibleColorSyntax for PathTailShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<(), ShellError> {
token_nodes.atomic(|token_nodes| loop {
let result = color_fallible_syntax_with(
&ColorableDotShape,
&FlatShape::Dot,
token_nodes,
context,
);
match result {
Err(_) => return Ok(()),
Ok(_) => {}
}
// If we've seen a dot but not a member, fail
color_fallible_syntax(&MemberShape, token_nodes, context)?;
})
}
}
impl ExpandSyntax for PathTailShape {
type Output = (Vec<Spanned<String>>, Span);
fn expand_syntax<'a, 'b>(
@ -204,6 +276,7 @@ pub enum ContinuationInfo {
Infix,
}
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for ExpressionContinuationShape {
type Info = ContinuationInfo;
type Input = ();
@ -256,6 +329,51 @@ impl FallibleColorSyntax for ExpressionContinuationShape {
}
}
#[cfg(coloring_in_tokens)]
impl FallibleColorSyntax for ExpressionContinuationShape {
type Info = ContinuationInfo;
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<ContinuationInfo, ShellError> {
token_nodes.atomic(|token_nodes| {
// Try to expand a `.`
let dot = color_fallible_syntax_with(
&ColorableDotShape,
&FlatShape::Dot,
token_nodes,
context,
);
match dot {
Ok(_) => {
// we found a dot, so let's keep looking for a member; if no member was found, fail
color_fallible_syntax(&MemberShape, token_nodes, context)?;
Ok(ContinuationInfo::Dot)
}
Err(_) => {
let result = token_nodes.atomic(|token_nodes| {
// we didn't find a dot, so let's see if we're looking at an infix. If not found, fail
color_fallible_syntax(&InfixShape, token_nodes, context)?;
// now that we've seen an infix shape, look for any expression. If not found, fail
color_fallible_syntax(&AnyExpressionShape, token_nodes, context)?;
Ok(ContinuationInfo::Infix)
})?;
Ok(result)
}
}
})
}
}
#[derive(Debug, Copy, Clone)]
pub struct VariableShape;
@ -285,6 +403,7 @@ impl ExpandExpression for VariableShape {
}
}
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for VariableShape {
type Info = ();
type Input = ();
@ -322,6 +441,43 @@ impl FallibleColorSyntax for VariableShape {
}
}
#[cfg(coloring_in_tokens)]
impl FallibleColorSyntax for VariableShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<(), ShellError> {
let atom = expand_atom(
token_nodes,
"variable",
context,
ExpansionRule::permissive(),
);
let atom = match atom {
Err(err) => return Err(err),
Ok(atom) => atom,
};
match &atom.item {
AtomicToken::Variable { .. } => {
token_nodes.color_shape(FlatShape::Variable.spanned(atom.span));
Ok(())
}
AtomicToken::ItVariable { .. } => {
token_nodes.color_shape(FlatShape::ItVariable.spanned(atom.span));
Ok(())
}
_ => Err(ShellError::type_error("variable", atom.tagged_type_name())),
}
}
}
#[derive(Debug, Clone, Copy)]
pub enum Member {
String(/* outer */ Span, /* inner */ Span),
@ -447,6 +603,7 @@ pub fn expand_column_path<'a, 'b>(
#[derive(Debug, Copy, Clone)]
pub struct ColumnPathShape;
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for ColumnPathShape {
type Info = ();
type Input = ();
@ -496,6 +653,53 @@ impl FallibleColorSyntax for ColumnPathShape {
}
}
#[cfg(coloring_in_tokens)]
impl FallibleColorSyntax for ColumnPathShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<(), ShellError> {
// If there's not even one member shape, fail
color_fallible_syntax(&MemberShape, token_nodes, context)?;
loop {
let checkpoint = token_nodes.checkpoint();
match color_fallible_syntax_with(
&ColorableDotShape,
&FlatShape::Dot,
checkpoint.iterator,
context,
) {
Err(_) => {
// we already saw at least one member shape, so return successfully
return Ok(());
}
Ok(_) => {
match color_fallible_syntax(&MemberShape, checkpoint.iterator, context) {
Err(_) => {
// we saw a dot but not a member (but we saw at least one member),
// so don't commit the dot but return successfully
return Ok(());
}
Ok(_) => {
// we saw a dot and a member, so commit it and continue on
checkpoint.commit();
}
}
}
}
}
}
}
impl ExpandSyntax for ColumnPathShape {
type Output = Tagged<Vec<Member>>;
@ -511,6 +715,7 @@ impl ExpandSyntax for ColumnPathShape {
#[derive(Debug, Copy, Clone)]
pub struct MemberShape;
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for MemberShape {
type Info = ();
type Input = ();
@ -548,6 +753,32 @@ impl FallibleColorSyntax for MemberShape {
}
}
#[cfg(coloring_in_tokens)]
impl FallibleColorSyntax for MemberShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<(), ShellError> {
let bare =
color_fallible_syntax_with(&BareShape, &FlatShape::BareMember, token_nodes, context);
match bare {
Ok(_) => return Ok(()),
Err(_) => {
// If we don't have a bare word, we'll look for a string
}
}
// Look for a string token. If we don't find one, fail
color_fallible_syntax_with(&StringShape, &FlatShape::StringMember, token_nodes, context)
}
}
impl ExpandSyntax for MemberShape {
type Output = Member;
@ -581,6 +812,7 @@ pub struct DotShape;
#[derive(Debug, Copy, Clone)]
pub struct ColorableDotShape;
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for ColorableDotShape {
type Info = ();
type Input = FlatShape;
@ -606,6 +838,31 @@ impl FallibleColorSyntax for ColorableDotShape {
}
}
#[cfg(coloring_in_tokens)]
impl FallibleColorSyntax for ColorableDotShape {
type Info = ();
type Input = FlatShape;
fn color_syntax<'a, 'b>(
&self,
input: &FlatShape,
token_nodes: &'b mut TokensIterator<'a>,
_context: &ExpandContext,
) -> Result<(), ShellError> {
let peeked = token_nodes.peek_any().not_eof("dot")?;
match peeked.node {
node if node.is_dot() => {
peeked.commit();
token_nodes.color_shape((*input).spanned(node.span()));
Ok(())
}
other => Err(ShellError::type_error("dot", other.tagged_type_name())),
}
}
}
impl SkipSyntax for DotShape {
fn skip<'a, 'b>(
&self,
@ -643,6 +900,7 @@ impl ExpandSyntax for DotShape {
#[derive(Debug, Copy, Clone)]
pub struct InfixShape;
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for InfixShape {
type Info = ();
type Input = ();
@ -690,6 +948,55 @@ impl FallibleColorSyntax for InfixShape {
}
}
#[cfg(coloring_in_tokens)]
impl FallibleColorSyntax for InfixShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<(), ShellError> {
let checkpoint = token_nodes.checkpoint();
// An infix operator must be prefixed by whitespace. If no whitespace was found, fail
color_fallible_syntax(&WhitespaceShape, checkpoint.iterator, context)?;
// Parse the next TokenNode after the whitespace
let operator_span = parse_single_node(
checkpoint.iterator,
"infix operator",
|token, token_span, _| {
match token {
// If it's an operator (and not `.`), it's a match
RawToken::Operator(operator) if operator != Operator::Dot => {
// token_nodes.color_shape(FlatShape::Operator.spanned(token_span));
Ok(token_span)
}
// Otherwise, it's not a match
_ => Err(ShellError::type_error(
"infix operator",
token.type_name().tagged(token_span),
)),
}
},
)?;
checkpoint
.iterator
.color_shape(FlatShape::Operator.spanned(operator_span));
// An infix operator must be followed by whitespace. If no whitespace was found, fail
color_fallible_syntax(&WhitespaceShape, checkpoint.iterator, context)?;
checkpoint.commit();
Ok(())
}
}
impl ExpandSyntax for InfixShape {
type Output = (Span, Spanned<Operator>, Span);

View file

@ -1,16 +1,23 @@
pub(crate) mod debug;
use crate::errors::ShellError;
#[cfg(coloring_in_tokens)]
use crate::parser::hir::syntax_shape::FlatShape;
use crate::parser::TokenNode;
use crate::{Span, Spanned, SpannedItem};
#[allow(unused)]
use getset::Getters;
#[derive(Debug)]
#[derive(Getters, Debug)]
pub struct TokensIterator<'content> {
tokens: &'content [TokenNode],
span: Span,
skip_ws: bool,
index: usize,
seen: indexmap::IndexSet<usize>,
#[cfg(coloring_in_tokens)]
#[get = "pub"]
shapes: Vec<Spanned<FlatShape>>,
}
#[derive(Debug)]
@ -18,6 +25,8 @@ pub struct Checkpoint<'content, 'me> {
pub(crate) iterator: &'me mut TokensIterator<'content>,
index: usize,
seen: indexmap::IndexSet<usize>,
#[cfg(coloring_in_tokens)]
shape_start: usize,
committed: bool,
}
@ -32,6 +41,8 @@ impl<'content, 'me> std::ops::Drop for Checkpoint<'content, 'me> {
if !self.committed {
self.iterator.index = self.index;
self.iterator.seen = self.seen.clone();
#[cfg(coloring_in_tokens)]
self.iterator.shapes.truncate(self.shape_start);
}
}
}
@ -132,6 +143,8 @@ impl<'content> TokensIterator<'content> {
skip_ws,
index: 0,
seen: indexmap::IndexSet::new(),
#[cfg(coloring_in_tokens)]
shapes: vec![],
}
}
@ -156,10 +169,47 @@ impl<'content> TokensIterator<'content> {
result.spanned(start.until(end))
}
#[cfg(coloring_in_tokens)]
pub fn color_shape(&mut self, shape: Spanned<FlatShape>) {
self.shapes.push(shape);
}
#[cfg(coloring_in_tokens)]
pub fn mut_shapes(&mut self) -> &mut Vec<Spanned<FlatShape>> {
&mut self.shapes
}
#[cfg(coloring_in_tokens)]
pub fn child<T>(
&mut self,
tokens: Spanned<&'content [TokenNode]>,
block: impl FnOnce(&mut TokensIterator) -> T,
) -> T {
let mut shapes = vec![];
std::mem::swap(&mut shapes, &mut self.shapes);
let mut iterator = TokensIterator {
tokens: tokens.item,
span: tokens.span,
skip_ws: false,
index: 0,
seen: indexmap::IndexSet::new(),
shapes,
};
let result = block(&mut iterator);
std::mem::swap(&mut iterator.shapes, &mut self.shapes);
result
}
/// Use a checkpoint when you need to peek more than one token ahead, but can't be sure
/// that you'll succeed.
pub fn checkpoint<'me>(&'me mut self) -> Checkpoint<'content, 'me> {
let index = self.index;
#[cfg(coloring_in_tokens)]
let shape_start = self.shapes.len();
let seen = self.seen.clone();
Checkpoint {
@ -167,6 +217,8 @@ impl<'content> TokensIterator<'content> {
index,
seen,
committed: false,
#[cfg(coloring_in_tokens)]
shape_start,
}
}
@ -177,6 +229,8 @@ impl<'content> TokensIterator<'content> {
block: impl FnOnce(&mut TokensIterator<'content>) -> Result<T, ShellError>,
) -> Result<T, ShellError> {
let index = self.index;
#[cfg(coloring_in_tokens)]
let shape_start = self.shapes.len();
let seen = self.seen.clone();
let checkpoint = Checkpoint {
@ -184,6 +238,8 @@ impl<'content> TokensIterator<'content> {
index,
seen,
committed: false,
#[cfg(coloring_in_tokens)]
shape_start,
};
let value = block(checkpoint.iterator)?;
@ -192,6 +248,44 @@ impl<'content> TokensIterator<'content> {
return Ok(value);
}
#[cfg(coloring_in_tokens)]
/// Use a checkpoint when you need to peek more than one token ahead, but can't be sure
/// that you'll succeed.
pub fn atomic_returning_shapes<'me, T>(
&'me mut self,
block: impl FnOnce(&mut TokensIterator<'content>) -> Result<T, ShellError>,
) -> (Result<T, ShellError>, Vec<Spanned<FlatShape>>) {
let index = self.index;
let mut shapes = vec![];
let seen = self.seen.clone();
std::mem::swap(&mut self.shapes, &mut shapes);
let checkpoint = Checkpoint {
iterator: self,
index,
seen,
committed: false,
shape_start: 0,
};
let value = block(checkpoint.iterator);
let value = match value {
Err(err) => {
drop(checkpoint);
std::mem::swap(&mut self.shapes, &mut shapes);
return (Err(err), vec![]);
}
Ok(value) => value,
};
checkpoint.commit();
std::mem::swap(&mut self.shapes, &mut shapes);
return (Ok(value), shapes);
}
fn eof_span(&self) -> Span {
Span::new(self.span.end(), self.span.end())
}
@ -266,6 +360,8 @@ impl<'content> TokensIterator<'content> {
index: self.index,
seen: self.seen.clone(),
skip_ws: self.skip_ws,
#[cfg(coloring_in_tokens)]
shapes: self.shapes.clone(),
}
}

View file

@ -189,6 +189,7 @@ impl ColoringArgs {
#[derive(Debug, Copy, Clone)]
pub struct CommandTailShape;
#[cfg(not(coloring_in_tokens))]
impl ColorSyntax for CommandTailShape {
type Info = ();
type Input = Signature;
@ -385,6 +386,206 @@ impl ColorSyntax for CommandTailShape {
}
}
#[cfg(coloring_in_tokens)]
impl ColorSyntax for CommandTailShape {
type Info = ();
type Input = Signature;
fn color_syntax<'a, 'b>(
&self,
signature: &Signature,
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Self::Info {
let mut args = ColoringArgs::new(token_nodes.len());
trace_remaining("nodes", token_nodes.clone(), context.source());
for (name, kind) in &signature.named {
trace!(target: "nu::color_syntax", "looking for {} : {:?}", name, kind);
match kind {
NamedType::Switch => {
match token_nodes.extract(|t| t.as_flag(name, context.source())) {
Some((pos, flag)) => args.insert(pos, vec![flag.color()]),
None => {}
}
}
NamedType::Mandatory(syntax_type) => {
match extract_mandatory(
signature,
name,
token_nodes,
context.source(),
Span::unknown(),
) {
Err(_) => {
// The mandatory flag didn't exist at all, so there's nothing to color
}
Ok((pos, flag)) => {
let (_, shapes) = token_nodes.atomic_returning_shapes(|token_nodes| {
token_nodes.color_shape(flag.color());
token_nodes.move_to(pos);
if token_nodes.at_end() {
// args.insert(pos, shapes);
// token_nodes.restart();
return Ok(());
// continue;
}
// We still want to color the flag even if the following tokens don't match, so don't
// propagate the error to the parent atomic block if it fails
let _ = token_nodes.atomic(|token_nodes| {
// We can live with unmatched syntax after a mandatory flag
color_syntax(&MaybeSpaceShape, token_nodes, context);
// If the part after a mandatory flag isn't present, that's ok, but we
// should roll back any whitespace we chomped
color_fallible_syntax(syntax_type, token_nodes, context)?;
Ok(())
});
Ok(())
});
args.insert(pos, shapes);
token_nodes.restart();
}
}
}
NamedType::Optional(syntax_type) => {
match extract_optional(name, token_nodes, context.source()) {
Err(_) => {
// The optional flag didn't exist at all, so there's nothing to color
}
Ok(Some((pos, flag))) => {
let (_, shapes) = token_nodes.atomic_returning_shapes(|token_nodes| {
token_nodes.color_shape(flag.color());
token_nodes.move_to(pos);
if token_nodes.at_end() {
// args.insert(pos, shapes);
// token_nodes.restart();
return Ok(());
// continue;
}
// We still want to color the flag even if the following tokens don't match, so don't
// propagate the error to the parent atomic block if it fails
let _ = token_nodes.atomic(|token_nodes| {
// We can live with unmatched syntax after a mandatory flag
color_syntax(&MaybeSpaceShape, token_nodes, context);
// If the part after a mandatory flag isn't present, that's ok, but we
// should roll back any whitespace we chomped
color_fallible_syntax(syntax_type, token_nodes, context)?;
Ok(())
});
Ok(())
});
args.insert(pos, shapes);
token_nodes.restart();
}
Ok(None) => {
token_nodes.restart();
}
}
}
};
}
trace_remaining("after named", token_nodes.clone(), context.source());
for arg in &signature.positional {
trace!("Processing positional {:?}", arg);
match arg {
PositionalType::Mandatory(..) => {
if token_nodes.at_end() {
break;
}
}
PositionalType::Optional(..) => {
if token_nodes.at_end() {
break;
}
}
}
let pos = token_nodes.pos(false);
match pos {
None => break,
Some(pos) => {
// We can live with an unmatched positional argument. Hopefully it will be
// matched by a future token
let (_, shapes) = token_nodes.atomic_returning_shapes(|token_nodes| {
color_syntax(&MaybeSpaceShape, token_nodes, context);
// If no match, we should roll back any whitespace we chomped
color_fallible_syntax(&arg.syntax_type(), token_nodes, context)?;
Ok(())
});
args.insert(pos, shapes);
}
}
}
trace_remaining("after positional", token_nodes.clone(), context.source());
if let Some(syntax_type) = signature.rest_positional {
loop {
if token_nodes.at_end_possible_ws() {
break;
}
let pos = token_nodes.pos(false);
match pos {
None => break,
Some(pos) => {
// If any arguments don't match, we'll fall back to backoff coloring mode
let (result, shapes) = token_nodes.atomic_returning_shapes(|token_nodes| {
color_syntax(&MaybeSpaceShape, token_nodes, context);
// If no match, we should roll back any whitespace we chomped
color_fallible_syntax(&syntax_type, token_nodes, context)?;
Ok(())
});
args.insert(pos, shapes);
match result {
Err(_) => break,
Ok(_) => continue,
}
}
}
}
}
args.spread_shapes(token_nodes.mut_shapes());
// Consume any remaining tokens with backoff coloring mode
color_syntax(&BackoffColoringMode, token_nodes, context);
// This is pretty dubious, but it works. We should look into a better algorithm that doesn't end up requiring
// this solution.
token_nodes
.mut_shapes()
.sort_by(|a, b| a.span.start().cmp(&b.span.start()));
}
}
fn extract_switch(name: &str, tokens: &mut hir::TokensIterator<'_>, source: &Text) -> Option<Flag> {
tokens
.extract(|t| t.as_flag(name, source))

View file

@ -85,11 +85,27 @@ impl Highlighter for Helper {
let expand_context = self
.context
.expand_context(&text, Span::new(0, line.len() - 1));
let mut shapes = vec![];
// We just constructed a token list that only contains a pipeline, so it can't fail
color_fallible_syntax(&PipelineShape, &mut tokens, &expand_context, &mut shapes)
#[cfg(not(coloring_in_tokens))]
let shapes = {
let mut shapes = vec![];
color_fallible_syntax(
&PipelineShape,
&mut tokens,
&expand_context,
&mut shapes,
)
.unwrap();
shapes
};
#[cfg(coloring_in_tokens)]
let shapes = {
// We just constructed a token list that only contains a pipeline, so it can't fail
color_fallible_syntax(&PipelineShape, &mut tokens, &expand_context).unwrap();
tokens.shapes()
};
trace!(target: "nu::shapes",
"SHAPES :: {:?}",
@ -97,7 +113,7 @@ impl Highlighter for Helper {
);
for shape in shapes {
let styled = paint_flat_shape(shape, line);
let styled = paint_flat_shape(&shape, line);
out.push_str(&styled);
}
@ -135,7 +151,7 @@ fn vec_tag<T>(input: Vec<Tagged<T>>) -> Option<Tag> {
})
}
fn paint_flat_shape(flat_shape: Spanned<FlatShape>, line: &str) -> String {
fn paint_flat_shape(flat_shape: &Spanned<FlatShape>, line: &str) -> String {
let style = match &flat_shape.item {
FlatShape::OpenDelimiter(_) => Color::White.normal(),
FlatShape::CloseDelimiter(_) => Color::White.normal(),