Only strip derive attributes when preparing macro input

This commit is contained in:
Lukas Wirth 2021-09-19 18:30:29 +02:00
parent e458f66214
commit a6dde501df
7 changed files with 87 additions and 61 deletions

4
Cargo.lock generated
View file

@ -1286,9 +1286,9 @@ checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b"
[[package]]
name = "rowan"
version = "0.13.2"
version = "0.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4a938f42b9c73aeece236481f37adb3debb7dfe3ae347cd6a45b5797d9ce4250"
checksum = "86f050538a65de83ae021294fb50d57f71fb4530fe79af755fc4d4cd61082c01"
dependencies = [
"countme",
"hashbrown",

View file

@ -3,13 +3,13 @@
use std::sync::Arc;
use base_db::{salsa, SourceDatabase};
use itertools::Itertools;
use limit::Limit;
use mbe::{syntax_node_to_token_tree, ExpandError, ExpandResult};
use rustc_hash::FxHashSet;
use syntax::{
algo::diff,
ast::{self, AttrsOwner, NameOwner},
AstNode, GreenNode, Parse, SyntaxNode, SyntaxToken, TextRange, T,
AstNode, GreenNode, Parse, SyntaxNode, SyntaxToken, T,
};
use crate::{
@ -151,7 +151,7 @@ pub fn expand_speculative(
// Build the subtree and token mapping for the speculative args
let censor = censor_for_macro_input(&loc, &speculative_args);
let (mut tt, spec_args_tmap) =
mbe::syntax_node_to_token_tree_censored(&speculative_args, censor);
mbe::syntax_node_to_token_tree_censored(&speculative_args, &censor);
let (attr_arg, token_id) = match loc.kind {
MacroCallKind::Attr { invoc_attr_index, .. } => {
@ -305,7 +305,7 @@ fn macro_arg(db: &dyn AstDatabase, id: MacroCallId) -> Option<Arc<(tt::Subtree,
let node = SyntaxNode::new_root(arg);
let censor = censor_for_macro_input(&loc, &node);
let (mut tt, tmap) = mbe::syntax_node_to_token_tree_censored(&node, censor);
let (mut tt, tmap) = mbe::syntax_node_to_token_tree_censored(&node, &censor);
if loc.def.is_proc_macro() {
// proc macros expect their inputs without parentheses, MBEs expect it with them included
@ -315,24 +315,26 @@ fn macro_arg(db: &dyn AstDatabase, id: MacroCallId) -> Option<Arc<(tt::Subtree,
Some(Arc::new((tt, tmap)))
}
fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> Option<TextRange> {
match loc.kind {
MacroCallKind::FnLike { .. } => None,
MacroCallKind::Derive { derive_attr_index, .. } => match ast::Item::cast(node.clone()) {
Some(item) => item
fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<SyntaxNode> {
(|| {
let censor = match loc.kind {
MacroCallKind::FnLike { .. } => return None,
MacroCallKind::Derive { derive_attr_index, .. } => ast::Item::cast(node.clone())?
.attrs()
.map(|attr| attr.syntax().text_range())
.take(derive_attr_index as usize + 1)
.fold1(TextRange::cover),
None => None,
},
MacroCallKind::Attr { invoc_attr_index, .. } => match ast::Item::cast(node.clone()) {
Some(item) => {
item.attrs().nth(invoc_attr_index as usize).map(|attr| attr.syntax().text_range())
}
None => None,
},
}
.filter(|attr| attr.simple_name().as_deref() == Some("derive"))
.map(|it| it.syntax().clone())
.collect(),
MacroCallKind::Attr { invoc_attr_index, .. } => ast::Item::cast(node.clone())?
.attrs()
.nth(invoc_attr_index as usize)
.map(|attr| attr.syntax().clone())
.into_iter()
.collect(),
};
Some(censor)
})()
.unwrap_or_default()
}
fn macro_arg_text(db: &dyn AstDatabase, id: MacroCallId) -> Option<GreenNode> {

View file

@ -1,14 +1,13 @@
//! Conversions between [`SyntaxNode`] and [`tt::TokenTree`].
use std::iter;
use parser::{ParseError, TreeSink};
use rustc_hash::FxHashMap;
use rustc_hash::{FxHashMap, FxHashSet};
use syntax::{
ast::{self, make::tokens::doc_comment},
tokenize, AstToken, Parse, SmolStr, SyntaxKind,
tokenize, AstToken, Parse, PreorderWithTokens, SmolStr, SyntaxElement, SyntaxKind,
SyntaxKind::*,
SyntaxNode, SyntaxToken, SyntaxTreeBuilder, TextRange, TextSize, Token as RawToken, T,
SyntaxNode, SyntaxToken, SyntaxTreeBuilder, TextRange, TextSize, Token as RawToken, WalkEvent,
T,
};
use tt::buffer::{Cursor, TokenBuffer};
@ -19,14 +18,14 @@ use crate::{
/// Convert the syntax node to a `TokenTree` (what macro
/// will consume).
pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> (tt::Subtree, TokenMap) {
syntax_node_to_token_tree_censored(node, None)
syntax_node_to_token_tree_censored(node, &Default::default())
}
/// Convert the syntax node to a `TokenTree` (what macro will consume)
/// with the censored range excluded.
pub fn syntax_node_to_token_tree_censored(
node: &SyntaxNode,
censor: Option<TextRange>,
censor: &FxHashSet<SyntaxNode>,
) -> (tt::Subtree, TokenMap) {
let global_offset = node.text_range().start();
let mut c = Convertor::new(node, global_offset, censor);
@ -424,8 +423,6 @@ impl<'a> SrcToken for (&'a RawToken, &'a str) {
}
}
impl RawConvertor<'_> {}
impl<'a> TokenConvertor for RawConvertor<'a> {
type Token = (&'a RawToken, &'a str);
@ -455,30 +452,51 @@ impl<'a> TokenConvertor for RawConvertor<'a> {
}
}
struct Convertor {
struct Convertor<'c> {
id_alloc: TokenIdAlloc,
current: Option<SyntaxToken>,
censor: Option<TextRange>,
preorder: PreorderWithTokens,
censor: &'c FxHashSet<SyntaxNode>,
range: TextRange,
punct_offset: Option<(SyntaxToken, TextSize)>,
}
impl Convertor {
fn new(node: &SyntaxNode, global_offset: TextSize, censor: Option<TextRange>) -> Convertor {
let first = node.first_token();
let current = match censor {
Some(censor) => iter::successors(first, |token| token.next_token())
.find(|token| !censor.contains_range(token.text_range())),
None => first,
};
impl<'c> Convertor<'c> {
fn new(
node: &SyntaxNode,
global_offset: TextSize,
censor: &'c FxHashSet<SyntaxNode>,
) -> Convertor<'c> {
let range = node.text_range();
let mut preorder = node.preorder_with_tokens();
let first = Self::next_token(&mut preorder, censor);
Convertor {
id_alloc: { TokenIdAlloc { map: TokenMap::default(), global_offset, next_id: 0 } },
current,
range: node.text_range(),
current: first,
preorder,
range,
censor,
punct_offset: None,
}
}
fn next_token(
preorder: &mut PreorderWithTokens,
censor: &FxHashSet<SyntaxNode>,
) -> Option<SyntaxToken> {
while let Some(ev) = preorder.next() {
let ele = match ev {
WalkEvent::Enter(ele) => ele,
_ => continue,
};
match ele {
SyntaxElement::Token(t) => return Some(t),
SyntaxElement::Node(node) if censor.contains(&node) => preorder.skip_subtree(),
SyntaxElement::Node(_) => (),
}
}
None
}
}
#[derive(Debug)]
@ -511,7 +529,7 @@ impl SrcToken for SynToken {
}
}
impl TokenConvertor for Convertor {
impl TokenConvertor for Convertor<'_> {
type Token = SynToken;
fn convert_doc_comment(&self, token: &Self::Token) -> Option<Vec<tt::TokenTree>> {
convert_doc_comment(token.token())
@ -532,11 +550,7 @@ impl TokenConvertor for Convertor {
if !&self.range.contains_range(curr.text_range()) {
return None;
}
self.current = match self.censor {
Some(censor) => iter::successors(curr.next_token(), |token| token.next_token())
.find(|token| !censor.contains_range(token.text_range())),
None => curr.next_token(),
};
self.current = Self::next_token(&mut self.preorder, self.censor);
let token = if curr.kind().is_punct() {
let range = curr.text_range();
let range = TextRange::at(range.start(), TextSize::of('.'));

View file

@ -1,7 +1,7 @@
mod expand;
mod rule;
use std::fmt::Write;
use std::{fmt::Write, iter};
use syntax::{ast, AstNode, NodeOrToken, SyntaxNode, WalkEvent};
use test_utils::assert_eq_text;
@ -252,27 +252,36 @@ struct Struct {
let item = source_file.items().next().unwrap();
let attr = item.attrs().nth(1).unwrap();
let (tt, _) =
syntax_node_to_token_tree_censored(item.syntax(), Some(attr.syntax().text_range()));
let (tt, _) = syntax_node_to_token_tree_censored(
item.syntax(),
&iter::once(attr.syntax().clone()).collect(),
);
expect_test::expect![[r##"# [attr0] # [attr2] struct Struct {field : ()}"##]]
.assert_eq(&tt.to_string());
let source = r##"
#[attr0]
#[derive(Derive0)]
#[attr1]
#[derive(Derive1)]
#[attr2]
#[derive(Derive2)]
#[attr3]
struct Struct {
field: ()
}
"##;
let source_file = ast::SourceFile::parse(source).ok().unwrap();
let item = source_file.items().next().unwrap();
let attr = item.attrs().nth(1).unwrap();
let derive_attr_index = 3;
let censor = item
.attrs()
.take(derive_attr_index as usize + 1)
.filter(|attr| attr.simple_name().as_deref() == Some("derive"))
.map(|it| it.syntax().clone())
.collect();
let (tt, _) = syntax_node_to_token_tree_censored(
item.syntax(),
Some(attr.syntax().text_range().cover_offset(0.into())),
);
expect_test::expect![[r##"# [derive (Derive2)] struct Struct {field : ()}"##]]
let (tt, _) = syntax_node_to_token_tree_censored(item.syntax(), &censor);
expect_test::expect![[r##"# [attr0] # [attr1] # [attr2] # [derive (Derive2)] # [attr3] struct Struct {field : ()}"##]]
.assert_eq(&tt.to_string());
}

View file

@ -12,7 +12,7 @@ doctest = false
[dependencies]
cov-mark = "2.0.0-pre.1"
itertools = "0.10.0"
rowan = "0.13.0"
rowan = "0.14.0"
rustc_lexer = { version = "725.0.0", package = "rustc-ap-rustc_lexer" }
rustc-hash = "1.1.0"
once_cell = "1.3.1"

View file

@ -52,8 +52,8 @@ pub use crate::{
ptr::{AstPtr, SyntaxNodePtr},
syntax_error::SyntaxError,
syntax_node::{
SyntaxElement, SyntaxElementChildren, SyntaxNode, SyntaxNodeChildren, SyntaxToken,
SyntaxTreeBuilder,
PreorderWithTokens, SyntaxElement, SyntaxElementChildren, SyntaxNode, SyntaxNodeChildren,
SyntaxToken, SyntaxTreeBuilder,
},
token_text::TokenText,
};

View file

@ -31,6 +31,7 @@ pub type SyntaxToken = rowan::SyntaxToken<RustLanguage>;
pub type SyntaxElement = rowan::SyntaxElement<RustLanguage>;
pub type SyntaxNodeChildren = rowan::SyntaxNodeChildren<RustLanguage>;
pub type SyntaxElementChildren = rowan::SyntaxElementChildren<RustLanguage>;
pub type PreorderWithTokens = rowan::api::PreorderWithTokens<RustLanguage>;
#[derive(Default)]
pub struct SyntaxTreeBuilder {