internal: Use basic NonEmptyVec in mbe::syntax_bridge

This commit is contained in:
Lukas Wirth 2022-01-02 03:25:47 +01:00
parent a0e0e4575b
commit 65a1538dd1
3 changed files with 98 additions and 51 deletions

View file

@ -1,6 +1,7 @@
//! Conversions between [`SyntaxNode`] and [`tt::TokenTree`]. //! Conversions between [`SyntaxNode`] and [`tt::TokenTree`].
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::{FxHashMap, FxHashSet};
use stdx::non_empty_vec::NonEmptyVec;
use syntax::{ use syntax::{
ast::{self, make::tokens::doc_comment}, ast::{self, make::tokens::doc_comment},
AstToken, Parse, PreorderWithTokens, SmolStr, SyntaxElement, SyntaxKind, AstToken, Parse, PreorderWithTokens, SmolStr, SyntaxElement, SyntaxKind,
@ -141,25 +142,26 @@ fn convert_tokens<C: TokenConvertor>(conv: &mut C) -> tt::Subtree {
idx: !0, idx: !0,
open_range: TextRange::empty(TextSize::of('.')), open_range: TextRange::empty(TextSize::of('.')),
}; };
let mut stack = vec![entry]; let mut stack = NonEmptyVec::new(entry);
loop { loop {
let entry = stack.last_mut().unwrap(); let StackEntry { subtree, .. } = stack.last_mut();
let result = &mut entry.subtree.token_trees; let result = &mut subtree.token_trees;
let (token, range) = match conv.bump() { let (token, range) = match conv.bump() {
Some(it) => it, Some(it) => it,
None => break, None => break,
}; };
let k: SyntaxKind = token.kind(&conv); let kind = token.kind(&conv);
if k == COMMENT { if kind == COMMENT {
if let Some(tokens) = conv.convert_doc_comment(&token) { if let Some(tokens) = conv.convert_doc_comment(&token) {
// FIXME: There has to be a better way to do this // FIXME: There has to be a better way to do this
// Add the comments token id to the converted doc string // Add the comments token id to the converted doc string
let id = conv.id_alloc().alloc(range); let id = conv.id_alloc().alloc(range);
result.extend(tokens.into_iter().map(|mut tt| { result.extend(tokens.into_iter().map(|mut tt| {
if let tt::TokenTree::Subtree(sub) = &mut tt { if let tt::TokenTree::Subtree(sub) = &mut tt {
if let tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) = &mut sub.token_trees[2] if let Some(tt::TokenTree::Leaf(tt::Leaf::Literal(lit))) =
sub.token_trees.get_mut(2)
{ {
lit.id = id lit.id = id
} }
@ -169,26 +171,26 @@ fn convert_tokens<C: TokenConvertor>(conv: &mut C) -> tt::Subtree {
} }
continue; continue;
} }
let tt = if kind.is_punct() && kind != UNDERSCORE {
result.push(if k.is_punct() && k != UNDERSCORE {
assert_eq!(range.len(), TextSize::of('.')); assert_eq!(range.len(), TextSize::of('.'));
if let Some(delim) = entry.subtree.delimiter { if let Some(delim) = subtree.delimiter {
let expected = match delim.kind { let expected = match delim.kind {
tt::DelimiterKind::Parenthesis => T![')'], tt::DelimiterKind::Parenthesis => T![')'],
tt::DelimiterKind::Brace => T!['}'], tt::DelimiterKind::Brace => T!['}'],
tt::DelimiterKind::Bracket => T![']'], tt::DelimiterKind::Bracket => T![']'],
}; };
if k == expected { if kind == expected {
let entry = stack.pop().unwrap(); if let Some(entry) = stack.pop() {
conv.id_alloc().close_delim(entry.idx, Some(range)); conv.id_alloc().close_delim(entry.idx, Some(range));
stack.last_mut().unwrap().subtree.token_trees.push(entry.subtree.into()); stack.last_mut().subtree.token_trees.push(entry.subtree.into());
}
continue; continue;
} }
} }
let delim = match k { let delim = match kind {
T!['('] => Some(tt::DelimiterKind::Parenthesis), T!['('] => Some(tt::DelimiterKind::Parenthesis),
T!['{'] => Some(tt::DelimiterKind::Brace), T!['{'] => Some(tt::DelimiterKind::Brace),
T!['['] => Some(tt::DelimiterKind::Bracket), T!['['] => Some(tt::DelimiterKind::Bracket),
@ -201,36 +203,35 @@ fn convert_tokens<C: TokenConvertor>(conv: &mut C) -> tt::Subtree {
subtree.delimiter = Some(tt::Delimiter { id, kind }); subtree.delimiter = Some(tt::Delimiter { id, kind });
stack.push(StackEntry { subtree, idx, open_range: range }); stack.push(StackEntry { subtree, idx, open_range: range });
continue; continue;
} else {
let spacing = match conv.peek() {
Some(next)
if next.kind(&conv).is_trivia()
|| next.kind(&conv) == T!['[']
|| next.kind(&conv) == T!['{']
|| next.kind(&conv) == T!['('] =>
{
tt::Spacing::Alone
}
Some(next) if next.kind(&conv).is_punct() && next.kind(&conv) != UNDERSCORE => {
tt::Spacing::Joint
}
_ => tt::Spacing::Alone,
};
let char = match token.to_char(&conv) {
Some(c) => c,
None => {
panic!("Token from lexer must be single char: token = {:#?}", token);
}
};
tt::Leaf::from(tt::Punct { char, spacing, id: conv.id_alloc().alloc(range) }).into()
} }
let spacing = match conv.peek().map(|next| next.kind(&conv)) {
Some(kind)
if !kind.is_trivia()
&& kind.is_punct()
&& kind != T!['[']
&& kind != T!['{']
&& kind != T!['(']
&& kind != UNDERSCORE =>
{
tt::Spacing::Joint
}
_ => tt::Spacing::Alone,
};
let char = match token.to_char(&conv) {
Some(c) => c,
None => {
panic!("Token from lexer must be single char: token = {:#?}", token);
}
};
tt::Leaf::from(tt::Punct { char, spacing, id: conv.id_alloc().alloc(range) }).into()
} else { } else {
macro_rules! make_leaf { macro_rules! make_leaf {
($i:ident) => { ($i:ident) => {
tt::$i { id: conv.id_alloc().alloc(range), text: token.to_text(conv) }.into() tt::$i { id: conv.id_alloc().alloc(range), text: token.to_text(conv) }.into()
}; };
} }
let leaf: tt::Leaf = match k { let leaf: tt::Leaf = match kind {
T![true] | T![false] => make_leaf!(Ident), T![true] | T![false] => make_leaf!(Ident),
IDENT => make_leaf!(Ident), IDENT => make_leaf!(Ident),
UNDERSCORE => make_leaf!(Ident), UNDERSCORE => make_leaf!(Ident),
@ -258,15 +259,15 @@ fn convert_tokens<C: TokenConvertor>(conv: &mut C) -> tt::Subtree {
}; };
leaf.into() leaf.into()
}); };
result.push(tt);
} }
// If we get here, we've consumed all input tokens. // If we get here, we've consumed all input tokens.
// We might have more than one subtree in the stack, if the delimiters are improperly balanced. // We might have more than one subtree in the stack, if the delimiters are improperly balanced.
// Merge them so we're left with one. // Merge them so we're left with one.
while stack.len() > 1 { while let Some(entry) = stack.pop() {
let entry = stack.pop().unwrap(); let parent = stack.last_mut();
let parent = stack.last_mut().unwrap();
conv.id_alloc().close_delim(entry.idx, None); conv.id_alloc().close_delim(entry.idx, None);
let leaf: tt::Leaf = tt::Punct { let leaf: tt::Leaf = tt::Punct {
@ -283,13 +284,12 @@ fn convert_tokens<C: TokenConvertor>(conv: &mut C) -> tt::Subtree {
parent.subtree.token_trees.extend(entry.subtree.token_trees); parent.subtree.token_trees.extend(entry.subtree.token_trees);
} }
let subtree = stack.pop().unwrap().subtree; let subtree = stack.into_first().subtree;
if subtree.token_trees.len() == 1 { if let [tt::TokenTree::Subtree(first)] = &*subtree.token_trees {
if let tt::TokenTree::Subtree(first) = &subtree.token_trees[0] { first.clone()
return first.clone(); } else {
} subtree
} }
subtree
} }
/// Returns the textual content of a doc comment block as a quoted string /// Returns the textual content of a doc comment block as a quoted string
@ -320,7 +320,8 @@ fn convert_doc_comment(token: &syntax::SyntaxToken) -> Option<Vec<tt::TokenTree>
let meta_tkns = vec![mk_ident("doc"), mk_punct('='), mk_doc_literal(&comment)]; let meta_tkns = vec![mk_ident("doc"), mk_punct('='), mk_doc_literal(&comment)];
// Make `#![]` // Make `#![]`
let mut token_trees = vec![mk_punct('#')]; let mut token_trees = Vec::with_capacity(3);
token_trees.push(mk_punct('#'));
if let ast::CommentPlacement::Inner = doc { if let ast::CommentPlacement::Inner = doc {
token_trees.push(mk_punct('!')); token_trees.push(mk_punct('!'));
} }
@ -439,8 +440,8 @@ impl<'a> SrcToken<RawConvertor<'a>> for usize {
impl<'a> TokenConvertor for RawConvertor<'a> { impl<'a> TokenConvertor for RawConvertor<'a> {
type Token = usize; type Token = usize;
fn convert_doc_comment(&self, token: &usize) -> Option<Vec<tt::TokenTree>> { fn convert_doc_comment(&self, &token: &usize) -> Option<Vec<tt::TokenTree>> {
let text = self.lexed.text(*token); let text = self.lexed.text(token);
convert_doc_comment(&doc_comment(text)) convert_doc_comment(&doc_comment(text))
} }
@ -568,9 +569,9 @@ impl TokenConvertor for Convertor<'_> {
} }
self.current = Self::next_token(&mut self.preorder, self.censor); self.current = Self::next_token(&mut self.preorder, self.censor);
let token = if curr.kind().is_punct() { let token = if curr.kind().is_punct() {
self.punct_offset = Some((curr.clone(), 0.into()));
let range = curr.text_range(); let range = curr.text_range();
let range = TextRange::at(range.start(), TextSize::of('.')); let range = TextRange::at(range.start(), TextSize::of('.'));
self.punct_offset = Some((curr.clone(), 0.into()));
(SynToken::Punch(curr, 0.into()), range) (SynToken::Punch(curr, 0.into()), range)
} else { } else {
self.punct_offset = None; self.punct_offset = None;

View file

@ -5,6 +5,7 @@ use std::{cmp::Ordering, ops, time::Instant};
mod macros; mod macros;
pub mod process; pub mod process;
pub mod panic_context; pub mod panic_context;
pub mod non_empty_vec;
pub use always_assert::{always, never}; pub use always_assert::{always, never};

View file

@ -0,0 +1,45 @@
//! A [`Vec`] that is guaranteed to at least contain one element.
pub struct NonEmptyVec<T>(Vec<T>);
impl<T> NonEmptyVec<T> {
#[inline]
pub fn new(initial: T) -> Self {
NonEmptyVec(vec![initial])
}
#[inline]
pub fn last_mut(&mut self) -> &mut T {
match self.0.last_mut() {
Some(it) => it,
None => unreachable!(),
}
}
#[inline]
pub fn pop(&mut self) -> Option<T> {
if self.0.len() <= 1 {
None
} else {
self.0.pop()
}
}
#[inline]
pub fn push(&mut self, value: T) {
self.0.push(value)
}
#[inline]
pub fn len(&self) -> usize {
self.0.len()
}
#[inline]
pub fn into_first(mut self) -> T {
match self.0.pop() {
Some(it) => it,
None => unreachable!(),
}
}
}