4029: Fix various proc-macro bugs r=matklad a=edwin0cheng

This PRs does the following things:

1. Fixed #4001 by splitting `LIFETIME` lexer token to two mbe tokens. It is because rustc token stream expects `LIFETIME` as a combination of punct and ident, but RA `tt:TokenTree` treats it as a single `Ident` previously.
2. Fixed #4003, by skipping `proc-macro` for completion. It is because currently we don't have `AstNode` for `proc-macro`. We would need to redesign how to implement `HasSource` for `proc-macro`.
3.  Fixed a bug how empty `TokenStream` merging in `proc-macro-srv` such that no L_DOLLAR and R_DOLLAR will be emitted accidentally. 


Co-authored-by: Edwin Cheng <edwin0cheng@gmail.com>
This commit is contained in:
bors[bot] 2020-04-18 17:56:54 +00:00 committed by GitHub
commit 98819d8919
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
7 changed files with 117 additions and 17 deletions

View file

@ -759,6 +759,17 @@ impl MacroDef {
pub fn name(self, db: &dyn HirDatabase) -> Option<Name> {
self.source(db).value.name().map(|it| it.as_name())
}
/// Indicate it is a proc-macro
pub fn is_proc_macro(&self) -> bool {
match self.id.kind {
hir_expand::MacroDefKind::Declarative => false,
hir_expand::MacroDefKind::BuiltIn(_) => false,
hir_expand::MacroDefKind::BuiltInDerive(_) => false,
hir_expand::MacroDefKind::BuiltInEager(_) => false,
hir_expand::MacroDefKind::CustomDerive(_) => true,
}
}
}
/// Invariant: `inner.as_assoc_item(db).is_some()`

View file

@ -156,6 +156,12 @@ impl Completions {
name: Option<String>,
macro_: hir::MacroDef,
) {
// FIXME: Currently proc-macro do not have ast-node,
// such that it does not have source
if macro_.is_proc_macro() {
return;
}
let name = match name {
Some(it) => it,
None => return,

View file

@ -202,6 +202,13 @@ impl<'a> TtIter<'a> {
}
pub(crate) fn expect_tt(&mut self) -> Result<tt::TokenTree, ()> {
match self.peek_n(0) {
Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) if punct.char == '\'' => {
return self.expect_lifetime();
}
_ => (),
}
let tt = self.next().ok_or_else(|| ())?.clone();
let punct = match tt {
tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if punct.spacing == tt::Spacing::Joint => {
@ -255,13 +262,21 @@ impl<'a> TtIter<'a> {
}
}
pub(crate) fn expect_lifetime(&mut self) -> Result<&tt::Ident, ()> {
let ident = self.expect_ident()?;
// check if it start from "`"
if !ident.text.starts_with('\'') {
pub(crate) fn expect_lifetime(&mut self) -> Result<tt::TokenTree, ()> {
let punct = self.expect_punct()?;
if punct.char != '\'' {
return Err(());
}
Ok(ident)
let ident = self.expect_ident()?;
Ok(tt::Subtree {
delimiter: None,
token_trees: vec![
tt::Leaf::Punct(punct.clone()).into(),
tt::Leaf::Ident(ident.clone()).into(),
],
}
.into())
}
pub(crate) fn expect_fragment(
@ -274,7 +289,10 @@ impl<'a> TtIter<'a> {
}
impl<'a> TreeSink for OffsetTokenSink<'a> {
fn token(&mut self, _kind: SyntaxKind, n_tokens: u8) {
fn token(&mut self, kind: SyntaxKind, mut n_tokens: u8) {
if kind == SyntaxKind::LIFETIME {
n_tokens = 2;
}
for _ in 0..n_tokens {
self.cursor = self.cursor.bump_subtree();
}
@ -286,7 +304,7 @@ impl<'a> TtIter<'a> {
}
}
let buffer = TokenBuffer::new(self.inner.as_slice());
let buffer = TokenBuffer::new(&self.inner.as_slice());
let mut src = SubtreeTokenSource::new(&buffer);
let mut sink = OffsetTokenSink { cursor: buffer.begin(), error: false };
@ -422,7 +440,7 @@ fn match_meta_var(kind: &str, input: &mut TtIter) -> ExpandResult<Option<Fragmen
"tt" => input.expect_tt().map(Some).map_err(|()| err!()),
"lifetime" => input
.expect_lifetime()
.map(|ident| Some(tt::Leaf::Ident(ident.clone()).into()))
.map(|tt| Some(tt))
.map_err(|()| err!("expected lifetime")),
"literal" => input
.expect_literal()

View file

@ -50,6 +50,26 @@ impl<'a> SubtreeTokenSource<'a> {
}
fn get(&self, pos: usize) -> Ref<Option<TtToken>> {
fn is_lifetime(c: Cursor) -> Option<(Cursor, SmolStr)> {
let tkn = c.token_tree();
if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) = tkn {
if punct.char == '\'' {
let next = c.bump();
if let Some(tt::TokenTree::Leaf(tt::Leaf::Ident(ident))) = next.token_tree() {
let res_cursor = next.bump();
let text = SmolStr::new("'".to_string() + &ident.to_string());
return Some((res_cursor, text));
} else {
panic!("Next token must be ident : {:#?}", next.token_tree());
}
}
}
None
}
if pos < self.cached.borrow().len() {
return Ref::map(self.cached.borrow(), |c| &c[pos]);
}
@ -63,6 +83,12 @@ impl<'a> SubtreeTokenSource<'a> {
continue;
}
if let Some((curr, text)) = is_lifetime(cursor) {
cached.push(Some(TtToken { kind: LIFETIME, is_joint_to_next: false, text }));
self.cached_cursor.set(curr);
continue;
}
match cursor.token_tree() {
Some(tt::TokenTree::Leaf(leaf)) => {
cached.push(Some(convert_leaf(&leaf)));
@ -152,7 +178,11 @@ fn convert_ident(ident: &tt::Ident) -> TtToken {
}
fn convert_punct(p: tt::Punct) -> TtToken {
let kind = SyntaxKind::from_char(p.char).unwrap();
let kind = match SyntaxKind::from_char(p.char) {
None => panic!("{:#?} is not a valid punct", p),
Some(kind) => kind,
};
let text = {
let mut buf = [0u8; 4];
let s: &str = p.char.encode_utf8(&mut buf);

View file

@ -271,7 +271,7 @@ struct RawConvertor<'a> {
inner: std::slice::Iter<'a, RawToken>,
}
trait SrcToken {
trait SrcToken: std::fmt::Debug {
fn kind(&self) -> SyntaxKind;
fn to_char(&self) -> Option<char>;
@ -361,8 +361,12 @@ trait TokenConvertor {
Some(next) if next.kind().is_punct() => tt::Spacing::Joint,
_ => tt::Spacing::Alone,
};
let char = token.to_char().expect("Token from lexer must be single char");
let char = match token.to_char() {
Some(c) => c,
None => {
panic!("Token from lexer must be single char: token = {:#?}", token);
}
};
tt::Leaf::from(tt::Punct { char, spacing, id: self.id_alloc().alloc(range) }).into()
}
} else {
@ -373,9 +377,28 @@ trait TokenConvertor {
}
let leaf: tt::Leaf = match k {
T![true] | T![false] => make_leaf!(Literal),
IDENT | LIFETIME => make_leaf!(Ident),
IDENT => make_leaf!(Ident),
k if k.is_keyword() => make_leaf!(Ident),
k if k.is_literal() => make_leaf!(Literal),
LIFETIME => {
let char_unit = TextUnit::from_usize(1);
let r = TextRange::offset_len(range.start(), char_unit);
let apostrophe = tt::Leaf::from(tt::Punct {
char: '\'',
spacing: tt::Spacing::Joint,
id: self.id_alloc().alloc(r),
});
result.push(apostrophe.into());
let r =
TextRange::offset_len(range.start() + char_unit, range.len() - char_unit);
let ident = tt::Leaf::from(tt::Ident {
text: SmolStr::new(&token.to_text()[1..]),
id: self.id_alloc().alloc(r),
});
result.push(ident.into());
return;
}
_ => return,
};
@ -455,6 +478,7 @@ impl Convertor {
}
}
#[derive(Debug)]
enum SynToken {
Ordiniary(SyntaxToken),
Punch(SyntaxToken, TextUnit),
@ -592,11 +616,14 @@ fn delim_to_str(d: Option<tt::DelimiterKind>, closing: bool) -> SmolStr {
}
impl<'a> TreeSink for TtTreeSink<'a> {
fn token(&mut self, kind: SyntaxKind, n_tokens: u8) {
fn token(&mut self, kind: SyntaxKind, mut n_tokens: u8) {
if kind == L_DOLLAR || kind == R_DOLLAR {
self.cursor = self.cursor.bump_subtree();
return;
}
if kind == LIFETIME {
n_tokens = 2;
}
let mut last = self.cursor;
for _ in 0..n_tokens {

View file

@ -76,7 +76,16 @@ impl Extend<TokenTree> for TokenStream {
impl Extend<TokenStream> for TokenStream {
fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
for item in streams {
self.subtree.token_trees.extend(&mut item.into_iter())
for tkn in item {
match tkn {
tt::TokenTree::Subtree(subtree) if subtree.delimiter.is_none() => {
self.subtree.token_trees.extend(subtree.token_trees);
}
_ => {
self.subtree.token_trees.push(tkn);
}
}
}
}
}
}

View file

@ -25,8 +25,7 @@ SUBTREE $
SUBTREE () 4294967295
IDENT feature 4294967295
PUNCH = [alone] 4294967295
SUBTREE $
LITERAL "cargo-clippy" 0
LITERAL "cargo-clippy" 0
PUNCH , [alone] 4294967295
IDENT allow 4294967295
SUBTREE () 4294967295