move tt-iter into tt crate

This commit is contained in:
Lukas Wirth 2024-06-24 14:47:12 +02:00
parent db056b4a69
commit dc39e87b79
10 changed files with 336 additions and 323 deletions

1
Cargo.lock generated
View file

@ -2252,6 +2252,7 @@ dependencies = [
name = "tt"
version = "0.0.0"
dependencies = [
"arrayvec",
"smol_str",
"stdx",
"text-size",

View file

@ -1,16 +1,23 @@
use hir::InFile;
use syntax::{AstNode, TextRange};
use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
use crate::{adjusted_display_range, Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: incoherent-impl
//
// This diagnostic is triggered if the targe type of an impl is from a foreign crate.
pub(crate) fn incoherent_impl(ctx: &DiagnosticsContext<'_>, d: &hir::IncoherentImpl) -> Diagnostic {
Diagnostic::new_with_syntax_node_ptr(
ctx,
let display_range = adjusted_display_range(ctx, InFile::new(d.file_id, d.impl_), &|node| {
Some(TextRange::new(
node.syntax().text_range().start(),
node.self_ty()?.syntax().text_range().end(),
))
});
Diagnostic::new(
DiagnosticCode::RustcHardError("E0210"),
"cannot define inherent `impl` for foreign type".to_owned(),
InFile::new(d.file_id, d.impl_.into()),
display_range,
)
}
@ -23,7 +30,7 @@ mod change_case {
check_diagnostics(
r#"
impl bool {}
//^^^^^^^^^^^^ error: cannot define inherent `impl` for foreign type
//^^^^^^^^^ error: cannot define inherent `impl` for foreign type
"#,
);
}
@ -60,7 +67,7 @@ impl foo::S {
pub struct S;
//- /main.rs crate:main deps:foo
impl foo::S { #[rustc_allow_incoherent_impl] fn func(self) {} }
//^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: cannot define inherent `impl` for foreign type
//^^^^^^^^^^^ error: cannot define inherent `impl` for foreign type
"#,
);
check_diagnostics(
@ -70,7 +77,7 @@ pub struct S;
pub struct S;
//- /main.rs crate:main deps:foo
impl foo::S { fn func(self) {} }
//^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: cannot define inherent `impl` for foreign type
//^^^^^^^^^^^ error: cannot define inherent `impl` for foreign type
"#,
);
}

View file

@ -64,12 +64,12 @@ use std::{rc::Rc, sync::Arc};
use smallvec::{smallvec, SmallVec};
use span::{Edition, Span};
use syntax::SmolStr;
use tt::DelimSpan;
use tt::{iter::TtIter, DelimSpan};
use crate::{
expander::{Binding, Bindings, ExpandResult, Fragment},
expect_fragment,
parser::{MetaVarKind, Op, RepeatKind, Separator},
tt_iter::TtIter,
ExpandError, MetaTemplate, ValueResult,
};
@ -406,7 +406,7 @@ fn match_loop_inner<'t>(
if item.sep.is_some() && !item.sep_matched {
let sep = item.sep.as_ref().unwrap();
let mut fork = src.clone();
if fork.expect_separator(sep) {
if expect_separator(&mut fork, sep) {
// HACK: here we use `meta_result` to pass `TtIter` back to caller because
// it might have been advanced multiple times. `ValueResult` is
// insignificant.
@ -746,7 +746,7 @@ fn match_meta_var(
) -> ExpandResult<Option<Fragment>> {
let fragment = match kind {
MetaVarKind::Path => {
return input.expect_fragment(parser::PrefixEntryPoint::Path, edition).map(|it| {
return expect_fragment(input, parser::PrefixEntryPoint::Path, edition).map(|it| {
it.map(|it| tt::TokenTree::subtree_or_wrap(it, delim_span)).map(Fragment::Path)
});
}
@ -765,7 +765,7 @@ fn match_meta_var(
}
_ => {}
};
return input.expect_fragment(parser::PrefixEntryPoint::Expr, edition).map(|tt| {
return expect_fragment(input, parser::PrefixEntryPoint::Expr, edition).map(|tt| {
tt.map(|tt| match tt {
tt::TokenTree::Leaf(leaf) => tt::Subtree {
delimiter: tt::Delimiter::invisible_spanned(*leaf.span()),
@ -787,14 +787,13 @@ fn match_meta_var(
.expect_ident()
.map(|ident| tt::Leaf::from(ident.clone()).into())
.map_err(|()| ExpandError::binding_error("expected ident")),
MetaVarKind::Tt => input
.expect_tt()
.map_err(|()| ExpandError::binding_error("expected token tree")),
MetaVarKind::Lifetime => input
.expect_lifetime()
MetaVarKind::Tt => {
expect_tt(input).map_err(|()| ExpandError::binding_error("expected token tree"))
}
MetaVarKind::Lifetime => expect_lifetime(input)
.map_err(|()| ExpandError::binding_error("expected lifetime")),
MetaVarKind::Literal => {
let neg = input.eat_char('-');
let neg = eat_char(input, '-');
input
.expect_literal()
.map(|literal| {
@ -822,7 +821,7 @@ fn match_meta_var(
MetaVarKind::Item => parser::PrefixEntryPoint::Item,
MetaVarKind::Vis => parser::PrefixEntryPoint::Vis,
};
input.expect_fragment(fragment, edition).map(|it| it.map(Fragment::Tokens))
expect_fragment(input, fragment, edition).map(|it| it.map(Fragment::Tokens))
}
fn collect_vars(collector_fun: &mut impl FnMut(SmolStr), pattern: &MetaTemplate) {
@ -905,86 +904,84 @@ impl<'a> Iterator for OpDelimitedIter<'a> {
}
}
impl TtIter<'_, Span> {
fn expect_separator(&mut self, separator: &Separator) -> bool {
let mut fork = self.clone();
let ok = match separator {
Separator::Ident(lhs) => match fork.expect_ident_or_underscore() {
Ok(rhs) => rhs.text == lhs.text,
Err(_) => false,
fn expect_separator<S: Copy>(iter: &mut TtIter<'_, S>, separator: &Separator) -> bool {
let mut fork = iter.clone();
let ok = match separator {
Separator::Ident(lhs) => match fork.expect_ident_or_underscore() {
Ok(rhs) => rhs.text == lhs.text,
Err(_) => false,
},
Separator::Literal(lhs) => match fork.expect_literal() {
Ok(rhs) => match rhs {
tt::Leaf::Literal(rhs) => rhs.text == lhs.text,
tt::Leaf::Ident(rhs) => rhs.text == lhs.text,
tt::Leaf::Punct(_) => false,
},
Separator::Literal(lhs) => match fork.expect_literal() {
Ok(rhs) => match rhs {
tt::Leaf::Literal(rhs) => rhs.text == lhs.text,
tt::Leaf::Ident(rhs) => rhs.text == lhs.text,
tt::Leaf::Punct(_) => false,
},
Err(_) => false,
},
Separator::Puncts(lhs) => match fork.expect_glued_punct() {
Ok(rhs) => {
let lhs = lhs.iter().map(|it| it.char);
let rhs = rhs.iter().map(|it| it.char);
lhs.eq(rhs)
}
Err(_) => false,
},
};
if ok {
*self = fork;
}
ok
}
fn expect_tt(&mut self) -> Result<tt::TokenTree<Span>, ()> {
if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) = self.peek_n(0) {
if punct.char == '\'' {
self.expect_lifetime()
} else {
let puncts = self.expect_glued_punct()?;
let delimiter = tt::Delimiter {
open: puncts.first().unwrap().span,
close: puncts.last().unwrap().span,
kind: tt::DelimiterKind::Invisible,
};
let token_trees = puncts.into_iter().map(|p| tt::Leaf::Punct(p).into()).collect();
Ok(tt::TokenTree::Subtree(tt::Subtree { delimiter, token_trees }))
Err(_) => false,
},
Separator::Puncts(lhs) => match fork.expect_glued_punct() {
Ok(rhs) => {
let lhs = lhs.iter().map(|it| it.char);
let rhs = rhs.iter().map(|it| it.char);
lhs.eq(rhs)
}
Err(_) => false,
},
};
if ok {
*iter = fork;
}
ok
}
fn expect_tt<S: Copy>(iter: &mut TtIter<'_, S>) -> Result<tt::TokenTree<S>, ()> {
if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) = iter.peek_n(0) {
if punct.char == '\'' {
expect_lifetime(iter)
} else {
self.next().ok_or(()).cloned()
}
}
fn expect_lifetime(&mut self) -> Result<tt::TokenTree<Span>, ()> {
let punct = self.expect_single_punct()?;
if punct.char != '\'' {
return Err(());
}
let ident = self.expect_ident_or_underscore()?;
Ok(tt::Subtree {
delimiter: tt::Delimiter {
open: punct.span,
close: ident.span,
let puncts = iter.expect_glued_punct()?;
let delimiter = tt::Delimiter {
open: puncts.first().unwrap().span,
close: puncts.last().unwrap().span,
kind: tt::DelimiterKind::Invisible,
},
token_trees: Box::new([
tt::Leaf::Punct(*punct).into(),
tt::Leaf::Ident(ident.clone()).into(),
]),
}
.into())
}
fn eat_char(&mut self, c: char) -> Option<tt::TokenTree<Span>> {
let mut fork = self.clone();
match fork.expect_char(c) {
Ok(_) => {
let tt = self.next().cloned();
*self = fork;
tt
}
Err(_) => None,
};
let token_trees = puncts.into_iter().map(|p| tt::Leaf::Punct(p).into()).collect();
Ok(tt::TokenTree::Subtree(tt::Subtree { delimiter, token_trees }))
}
} else {
iter.next().ok_or(()).cloned()
}
}
fn expect_lifetime<S: Copy>(iter: &mut TtIter<'_, S>) -> Result<tt::TokenTree<S>, ()> {
let punct = iter.expect_single_punct()?;
if punct.char != '\'' {
return Err(());
}
let ident = iter.expect_ident_or_underscore()?;
Ok(tt::Subtree {
delimiter: tt::Delimiter {
open: punct.span,
close: ident.span,
kind: tt::DelimiterKind::Invisible,
},
token_trees: Box::new([
tt::Leaf::Punct(*punct).into(),
tt::Leaf::Ident(ident.clone()).into(),
]),
}
.into())
}
fn eat_char<S: Copy>(iter: &mut TtIter<'_, S>, c: char) -> Option<tt::TokenTree<S>> {
let mut fork = iter.clone();
match fork.expect_char(c) {
Ok(_) => {
let tt = iter.next().cloned();
*iter = fork;
tt
}
Err(_) => None,
}
}

View file

@ -12,20 +12,17 @@ mod expander;
mod parser;
mod syntax_bridge;
mod to_parser_input;
mod tt_iter;
#[cfg(test)]
mod benchmark;
use span::{Edition, Span, SyntaxContextId};
use stdx::impl_from;
use tt::iter::TtIter;
use std::fmt;
use crate::{
parser::{MetaTemplate, MetaVarKind, Op},
tt_iter::TtIter,
};
use crate::parser::{MetaTemplate, MetaVarKind, Op};
// FIXME: we probably should re-think `token_tree_to_syntax_node` interfaces
pub use ::parser::TopEntryPoint;
@ -247,6 +244,10 @@ impl DeclarativeMacro {
self.err.as_deref()
}
pub fn num_rules(&self) -> usize {
self.rules.len()
}
pub fn expand(
&self,
tt: &tt::Subtree<Span>,
@ -361,3 +362,60 @@ impl<T: Default, E> From<Result<T, E>> for ValueResult<T, E> {
result.map_or_else(Self::only_err, Self::ok)
}
}
fn expect_fragment<S: Copy + fmt::Debug>(
tt_iter: &mut TtIter<'_, S>,
entry_point: ::parser::PrefixEntryPoint,
edition: ::parser::Edition,
) -> ExpandResult<Option<tt::TokenTree<S>>> {
use ::parser;
let buffer = tt::buffer::TokenBuffer::from_tokens(tt_iter.as_slice());
let parser_input = to_parser_input::to_parser_input(&buffer);
let tree_traversal = entry_point.parse(&parser_input, edition);
let mut cursor = buffer.begin();
let mut error = false;
for step in tree_traversal.iter() {
match step {
parser::Step::Token { kind, mut n_input_tokens } => {
if kind == ::parser::SyntaxKind::LIFETIME_IDENT {
n_input_tokens = 2;
}
for _ in 0..n_input_tokens {
cursor = cursor.bump_subtree();
}
}
parser::Step::FloatSplit { .. } => {
// FIXME: We need to split the tree properly here, but mutating the token trees
// in the buffer is somewhat tricky to pull off.
cursor = cursor.bump_subtree();
}
parser::Step::Enter { .. } | parser::Step::Exit => (),
parser::Step::Error { .. } => error = true,
}
}
let err = if error || !cursor.is_root() {
Some(ExpandError::binding_error(format!("expected {entry_point:?}")))
} else {
None
};
let mut curr = buffer.begin();
let mut res = vec![];
while curr != cursor {
let Some(token) = curr.token_tree() else { break };
res.push(token.cloned());
curr = curr.bump();
}
*tt_iter = TtIter::new_iter(tt_iter.as_slice()[res.len()..].iter());
let res = match &*res {
[] | [_] => res.pop(),
[first, ..] => Some(tt::TokenTree::Subtree(tt::Subtree {
delimiter: Delimiter::invisible_spanned(first.first_span()),
token_trees: res.into_boxed_slice(),
})),
};
ExpandResult { value: res, err }
}

View file

@ -6,8 +6,9 @@ use std::sync::Arc;
use arrayvec::ArrayVec;
use span::{Edition, Span, SyntaxContextId};
use syntax::SmolStr;
use tt::iter::TtIter;
use crate::{tt_iter::TtIter, ParseError};
use crate::ParseError;
/// Consider
///

View file

@ -11,9 +11,12 @@ use syntax::{
SyntaxKind::*,
SyntaxNode, SyntaxToken, SyntaxTreeBuilder, TextRange, TextSize, WalkEvent, T,
};
use tt::buffer::{Cursor, TokenBuffer};
use tt::{
buffer::{Cursor, TokenBuffer},
iter::TtIter,
};
use crate::{to_parser_input::to_parser_input, tt_iter::TtIter};
use crate::to_parser_input::to_parser_input;
#[cfg(test)]
mod tests;
@ -213,7 +216,7 @@ where
let mut res = Vec::new();
while iter.peek_n(0).is_some() {
let expanded = iter.expect_fragment(parser::PrefixEntryPoint::Expr, edition);
let expanded = crate::expect_fragment(&mut iter, parser::PrefixEntryPoint::Expr, edition);
res.push(match expanded.value {
None => break,

View file

@ -1,217 +0,0 @@
//! A "Parser" structure for token trees. We use this when parsing a declarative
//! macro definition into a list of patterns and templates.
use core::fmt;
use arrayvec::ArrayVec;
use syntax::SyntaxKind;
use crate::{to_parser_input::to_parser_input, ExpandError, ExpandResult};
#[derive(Debug, Clone)]
pub(crate) struct TtIter<'a, S> {
pub(crate) inner: std::slice::Iter<'a, tt::TokenTree<S>>,
}
impl<'a, S: Copy> TtIter<'a, S> {
pub(crate) fn new(subtree: &'a tt::Subtree<S>) -> TtIter<'a, S> {
TtIter { inner: subtree.token_trees.iter() }
}
pub(crate) fn expect_char(&mut self, char: char) -> Result<(), ()> {
match self.next() {
Some(&tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: c, .. }))) if c == char => {
Ok(())
}
_ => Err(()),
}
}
pub(crate) fn expect_any_char(&mut self, chars: &[char]) -> Result<(), ()> {
match self.next() {
Some(tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: c, .. })))
if chars.contains(c) =>
{
Ok(())
}
_ => Err(()),
}
}
pub(crate) fn expect_subtree(&mut self) -> Result<&'a tt::Subtree<S>, ()> {
match self.next() {
Some(tt::TokenTree::Subtree(it)) => Ok(it),
_ => Err(()),
}
}
pub(crate) fn expect_leaf(&mut self) -> Result<&'a tt::Leaf<S>, ()> {
match self.next() {
Some(tt::TokenTree::Leaf(it)) => Ok(it),
_ => Err(()),
}
}
pub(crate) fn expect_dollar(&mut self) -> Result<(), ()> {
match self.expect_leaf()? {
tt::Leaf::Punct(tt::Punct { char: '$', .. }) => Ok(()),
_ => Err(()),
}
}
pub(crate) fn expect_ident(&mut self) -> Result<&'a tt::Ident<S>, ()> {
match self.expect_leaf()? {
tt::Leaf::Ident(it) if it.text != "_" => Ok(it),
_ => Err(()),
}
}
pub(crate) fn expect_ident_or_underscore(&mut self) -> Result<&'a tt::Ident<S>, ()> {
match self.expect_leaf()? {
tt::Leaf::Ident(it) => Ok(it),
_ => Err(()),
}
}
pub(crate) fn expect_literal(&mut self) -> Result<&'a tt::Leaf<S>, ()> {
let it = self.expect_leaf()?;
match it {
tt::Leaf::Literal(_) => Ok(it),
tt::Leaf::Ident(ident) if ident.text == "true" || ident.text == "false" => Ok(it),
_ => Err(()),
}
}
pub(crate) fn expect_single_punct(&mut self) -> Result<&'a tt::Punct<S>, ()> {
match self.expect_leaf()? {
tt::Leaf::Punct(it) => Ok(it),
_ => Err(()),
}
}
/// Returns consecutive `Punct`s that can be glued together.
///
/// This method currently may return a single quotation, which is part of lifetime ident and
/// conceptually not a punct in the context of mbe. Callers should handle this.
pub(crate) fn expect_glued_punct(&mut self) -> Result<ArrayVec<tt::Punct<S>, 3>, ()> {
let tt::TokenTree::Leaf(tt::Leaf::Punct(first)) = self.next().ok_or(())?.clone() else {
return Err(());
};
let mut res = ArrayVec::new();
if first.spacing == tt::Spacing::Alone {
res.push(first);
return Ok(res);
}
let (second, third) = match (self.peek_n(0), self.peek_n(1)) {
(
Some(tt::TokenTree::Leaf(tt::Leaf::Punct(p2))),
Some(tt::TokenTree::Leaf(tt::Leaf::Punct(p3))),
) if p2.spacing == tt::Spacing::Joint => (p2, Some(p3)),
(Some(tt::TokenTree::Leaf(tt::Leaf::Punct(p2))), _) => (p2, None),
_ => {
res.push(first);
return Ok(res);
}
};
match (first.char, second.char, third.map(|it| it.char)) {
('.', '.', Some('.' | '=')) | ('<', '<', Some('=')) | ('>', '>', Some('=')) => {
let _ = self.next().unwrap();
let _ = self.next().unwrap();
res.push(first);
res.push(*second);
res.push(*third.unwrap());
}
('-' | '!' | '*' | '/' | '&' | '%' | '^' | '+' | '<' | '=' | '>' | '|', '=', _)
| ('-' | '=' | '>', '>', _)
| ('<', '-', _)
| (':', ':', _)
| ('.', '.', _)
| ('&', '&', _)
| ('<', '<', _)
| ('|', '|', _) => {
let _ = self.next().unwrap();
res.push(first);
res.push(*second);
}
_ => res.push(first),
}
Ok(res)
}
pub(crate) fn peek_n(&self, n: usize) -> Option<&'a tt::TokenTree<S>> {
self.inner.as_slice().get(n)
}
}
impl<'a, S: Copy + fmt::Debug> TtIter<'a, S> {
pub(crate) fn expect_fragment(
&mut self,
entry_point: parser::PrefixEntryPoint,
edition: parser::Edition,
) -> ExpandResult<Option<tt::TokenTree<S>>> {
let buffer = tt::buffer::TokenBuffer::from_tokens(self.inner.as_slice());
let parser_input = to_parser_input(&buffer);
let tree_traversal = entry_point.parse(&parser_input, edition);
let mut cursor = buffer.begin();
let mut error = false;
for step in tree_traversal.iter() {
match step {
parser::Step::Token { kind, mut n_input_tokens } => {
if kind == SyntaxKind::LIFETIME_IDENT {
n_input_tokens = 2;
}
for _ in 0..n_input_tokens {
cursor = cursor.bump_subtree();
}
}
parser::Step::FloatSplit { .. } => {
// FIXME: We need to split the tree properly here, but mutating the token trees
// in the buffer is somewhat tricky to pull off.
cursor = cursor.bump_subtree();
}
parser::Step::Enter { .. } | parser::Step::Exit => (),
parser::Step::Error { .. } => error = true,
}
}
let err = if error || !cursor.is_root() {
Some(ExpandError::binding_error(format!("expected {entry_point:?}")))
} else {
None
};
let mut curr = buffer.begin();
let mut res = vec![];
while curr != cursor {
let Some(token) = curr.token_tree() else { break };
res.push(token.cloned());
curr = curr.bump();
}
self.inner = self.inner.as_slice()[res.len()..].iter();
let res = match &*res {
[] | [_] => res.pop(),
[first, ..] => Some(tt::TokenTree::Subtree(tt::Subtree {
delimiter: tt::Delimiter::invisible_spanned(first.first_span()),
token_trees: res.into_boxed_slice(),
})),
};
ExpandResult { value: res, err }
}
}
impl<'a, S> Iterator for TtIter<'a, S> {
type Item = &'a tt::TokenTree<S>;
fn next(&mut self) -> Option<Self::Item> {
self.inner.next()
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
}
impl<S> std::iter::ExactSizeIterator for TtIter<'_, S> {}

View file

@ -12,6 +12,7 @@ rust-version.workspace = true
doctest = false
[dependencies]
arrayvec.workspace = true
smol_str.workspace = true
text-size.workspace = true

161
crates/tt/src/iter.rs Normal file
View file

@ -0,0 +1,161 @@
//! A "Parser" structure for token trees. We use this when parsing a declarative
//! macro definition into a list of patterns and templates.
use arrayvec::ArrayVec;
use crate::{Ident, Leaf, Punct, Spacing, Subtree, TokenTree};
#[derive(Debug, Clone)]
pub struct TtIter<'a, S> {
inner: std::slice::Iter<'a, TokenTree<S>>,
}
impl<'a, S: Copy> TtIter<'a, S> {
pub fn new(subtree: &'a Subtree<S>) -> TtIter<'a, S> {
TtIter { inner: subtree.token_trees.iter() }
}
pub fn new_iter(iter: std::slice::Iter<'a, TokenTree<S>>) -> TtIter<'a, S> {
TtIter { inner: iter }
}
pub fn expect_char(&mut self, char: char) -> Result<(), ()> {
match self.next() {
Some(&TokenTree::Leaf(Leaf::Punct(Punct { char: c, .. }))) if c == char => Ok(()),
_ => Err(()),
}
}
pub fn expect_any_char(&mut self, chars: &[char]) -> Result<(), ()> {
match self.next() {
Some(TokenTree::Leaf(Leaf::Punct(Punct { char: c, .. }))) if chars.contains(c) => {
Ok(())
}
_ => Err(()),
}
}
pub fn expect_subtree(&mut self) -> Result<&'a Subtree<S>, ()> {
match self.next() {
Some(TokenTree::Subtree(it)) => Ok(it),
_ => Err(()),
}
}
pub fn expect_leaf(&mut self) -> Result<&'a Leaf<S>, ()> {
match self.next() {
Some(TokenTree::Leaf(it)) => Ok(it),
_ => Err(()),
}
}
pub fn expect_dollar(&mut self) -> Result<(), ()> {
match self.expect_leaf()? {
Leaf::Punct(Punct { char: '$', .. }) => Ok(()),
_ => Err(()),
}
}
pub fn expect_ident(&mut self) -> Result<&'a Ident<S>, ()> {
match self.expect_leaf()? {
Leaf::Ident(it) if it.text != "_" => Ok(it),
_ => Err(()),
}
}
pub fn expect_ident_or_underscore(&mut self) -> Result<&'a Ident<S>, ()> {
match self.expect_leaf()? {
Leaf::Ident(it) => Ok(it),
_ => Err(()),
}
}
pub fn expect_literal(&mut self) -> Result<&'a Leaf<S>, ()> {
let it = self.expect_leaf()?;
match it {
Leaf::Literal(_) => Ok(it),
Leaf::Ident(ident) if ident.text == "true" || ident.text == "false" => Ok(it),
_ => Err(()),
}
}
pub fn expect_single_punct(&mut self) -> Result<&'a Punct<S>, ()> {
match self.expect_leaf()? {
Leaf::Punct(it) => Ok(it),
_ => Err(()),
}
}
/// Returns consecutive `Punct`s that can be glued together.
///
/// This method currently may return a single quotation, which is part of lifetime ident and
/// conceptually not a punct in the context of mbe. Callers should handle this.
pub fn expect_glued_punct(&mut self) -> Result<ArrayVec<Punct<S>, 3>, ()> {
let TokenTree::Leaf(Leaf::Punct(first)) = self.next().ok_or(())?.clone() else {
return Err(());
};
let mut res = ArrayVec::new();
if first.spacing == Spacing::Alone {
res.push(first);
return Ok(res);
}
let (second, third) = match (self.peek_n(0), self.peek_n(1)) {
(Some(TokenTree::Leaf(Leaf::Punct(p2))), Some(TokenTree::Leaf(Leaf::Punct(p3))))
if p2.spacing == Spacing::Joint =>
{
(p2, Some(p3))
}
(Some(TokenTree::Leaf(Leaf::Punct(p2))), _) => (p2, None),
_ => {
res.push(first);
return Ok(res);
}
};
match (first.char, second.char, third.map(|it| it.char)) {
('.', '.', Some('.' | '=')) | ('<', '<', Some('=')) | ('>', '>', Some('=')) => {
let _ = self.next().unwrap();
let _ = self.next().unwrap();
res.push(first);
res.push(*second);
res.push(*third.unwrap());
}
('-' | '!' | '*' | '/' | '&' | '%' | '^' | '+' | '<' | '=' | '>' | '|', '=', _)
| ('-' | '=' | '>', '>', _)
| ('<', '-', _)
| (':', ':', _)
| ('.', '.', _)
| ('&', '&', _)
| ('<', '<', _)
| ('|', '|', _) => {
let _ = self.next().unwrap();
res.push(first);
res.push(*second);
}
_ => res.push(first),
}
Ok(res)
}
pub fn peek_n(&self, n: usize) -> Option<&'a TokenTree<S>> {
self.inner.as_slice().get(n)
}
pub fn as_slice(&self) -> &'a [TokenTree<S>] {
self.inner.as_slice()
}
}
impl<'a, S> Iterator for TtIter<'a, S> {
type Item = &'a TokenTree<S>;
fn next(&mut self) -> Option<Self::Item> {
self.inner.next()
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
}
impl<S> std::iter::ExactSizeIterator for TtIter<'_, S> {}

View file

@ -4,6 +4,9 @@
#![warn(rust_2018_idioms, unused_lifetimes)]
pub mod buffer;
pub mod iter;
use std::fmt;
use stdx::impl_from;
@ -365,8 +368,6 @@ impl<S> Subtree<S> {
}
}
pub mod buffer;
pub fn pretty<S>(tkns: &[TokenTree<S>]) -> String {
fn tokentree_to_text<S>(tkn: &TokenTree<S>) -> String {
match tkn {