mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-26 04:53:34 +00:00
Only strip derive attributes when preparing macro input
This commit is contained in:
parent
e458f66214
commit
a6dde501df
7 changed files with 87 additions and 61 deletions
4
Cargo.lock
generated
4
Cargo.lock
generated
|
@ -1286,9 +1286,9 @@ checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rowan"
|
name = "rowan"
|
||||||
version = "0.13.2"
|
version = "0.14.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "4a938f42b9c73aeece236481f37adb3debb7dfe3ae347cd6a45b5797d9ce4250"
|
checksum = "86f050538a65de83ae021294fb50d57f71fb4530fe79af755fc4d4cd61082c01"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"countme",
|
"countme",
|
||||||
"hashbrown",
|
"hashbrown",
|
||||||
|
|
|
@ -3,13 +3,13 @@
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use base_db::{salsa, SourceDatabase};
|
use base_db::{salsa, SourceDatabase};
|
||||||
use itertools::Itertools;
|
|
||||||
use limit::Limit;
|
use limit::Limit;
|
||||||
use mbe::{syntax_node_to_token_tree, ExpandError, ExpandResult};
|
use mbe::{syntax_node_to_token_tree, ExpandError, ExpandResult};
|
||||||
|
use rustc_hash::FxHashSet;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
algo::diff,
|
algo::diff,
|
||||||
ast::{self, AttrsOwner, NameOwner},
|
ast::{self, AttrsOwner, NameOwner},
|
||||||
AstNode, GreenNode, Parse, SyntaxNode, SyntaxToken, TextRange, T,
|
AstNode, GreenNode, Parse, SyntaxNode, SyntaxToken, T,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
@ -151,7 +151,7 @@ pub fn expand_speculative(
|
||||||
// Build the subtree and token mapping for the speculative args
|
// Build the subtree and token mapping for the speculative args
|
||||||
let censor = censor_for_macro_input(&loc, &speculative_args);
|
let censor = censor_for_macro_input(&loc, &speculative_args);
|
||||||
let (mut tt, spec_args_tmap) =
|
let (mut tt, spec_args_tmap) =
|
||||||
mbe::syntax_node_to_token_tree_censored(&speculative_args, censor);
|
mbe::syntax_node_to_token_tree_censored(&speculative_args, &censor);
|
||||||
|
|
||||||
let (attr_arg, token_id) = match loc.kind {
|
let (attr_arg, token_id) = match loc.kind {
|
||||||
MacroCallKind::Attr { invoc_attr_index, .. } => {
|
MacroCallKind::Attr { invoc_attr_index, .. } => {
|
||||||
|
@ -305,7 +305,7 @@ fn macro_arg(db: &dyn AstDatabase, id: MacroCallId) -> Option<Arc<(tt::Subtree,
|
||||||
|
|
||||||
let node = SyntaxNode::new_root(arg);
|
let node = SyntaxNode::new_root(arg);
|
||||||
let censor = censor_for_macro_input(&loc, &node);
|
let censor = censor_for_macro_input(&loc, &node);
|
||||||
let (mut tt, tmap) = mbe::syntax_node_to_token_tree_censored(&node, censor);
|
let (mut tt, tmap) = mbe::syntax_node_to_token_tree_censored(&node, &censor);
|
||||||
|
|
||||||
if loc.def.is_proc_macro() {
|
if loc.def.is_proc_macro() {
|
||||||
// proc macros expect their inputs without parentheses, MBEs expect it with them included
|
// proc macros expect their inputs without parentheses, MBEs expect it with them included
|
||||||
|
@ -315,24 +315,26 @@ fn macro_arg(db: &dyn AstDatabase, id: MacroCallId) -> Option<Arc<(tt::Subtree,
|
||||||
Some(Arc::new((tt, tmap)))
|
Some(Arc::new((tt, tmap)))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> Option<TextRange> {
|
fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<SyntaxNode> {
|
||||||
match loc.kind {
|
(|| {
|
||||||
MacroCallKind::FnLike { .. } => None,
|
let censor = match loc.kind {
|
||||||
MacroCallKind::Derive { derive_attr_index, .. } => match ast::Item::cast(node.clone()) {
|
MacroCallKind::FnLike { .. } => return None,
|
||||||
Some(item) => item
|
MacroCallKind::Derive { derive_attr_index, .. } => ast::Item::cast(node.clone())?
|
||||||
.attrs()
|
.attrs()
|
||||||
.map(|attr| attr.syntax().text_range())
|
|
||||||
.take(derive_attr_index as usize + 1)
|
.take(derive_attr_index as usize + 1)
|
||||||
.fold1(TextRange::cover),
|
.filter(|attr| attr.simple_name().as_deref() == Some("derive"))
|
||||||
None => None,
|
.map(|it| it.syntax().clone())
|
||||||
},
|
.collect(),
|
||||||
MacroCallKind::Attr { invoc_attr_index, .. } => match ast::Item::cast(node.clone()) {
|
MacroCallKind::Attr { invoc_attr_index, .. } => ast::Item::cast(node.clone())?
|
||||||
Some(item) => {
|
.attrs()
|
||||||
item.attrs().nth(invoc_attr_index as usize).map(|attr| attr.syntax().text_range())
|
.nth(invoc_attr_index as usize)
|
||||||
}
|
.map(|attr| attr.syntax().clone())
|
||||||
None => None,
|
.into_iter()
|
||||||
},
|
.collect(),
|
||||||
}
|
};
|
||||||
|
Some(censor)
|
||||||
|
})()
|
||||||
|
.unwrap_or_default()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn macro_arg_text(db: &dyn AstDatabase, id: MacroCallId) -> Option<GreenNode> {
|
fn macro_arg_text(db: &dyn AstDatabase, id: MacroCallId) -> Option<GreenNode> {
|
||||||
|
|
|
@ -1,14 +1,13 @@
|
||||||
//! Conversions between [`SyntaxNode`] and [`tt::TokenTree`].
|
//! Conversions between [`SyntaxNode`] and [`tt::TokenTree`].
|
||||||
|
|
||||||
use std::iter;
|
|
||||||
|
|
||||||
use parser::{ParseError, TreeSink};
|
use parser::{ParseError, TreeSink};
|
||||||
use rustc_hash::FxHashMap;
|
use rustc_hash::{FxHashMap, FxHashSet};
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, make::tokens::doc_comment},
|
ast::{self, make::tokens::doc_comment},
|
||||||
tokenize, AstToken, Parse, SmolStr, SyntaxKind,
|
tokenize, AstToken, Parse, PreorderWithTokens, SmolStr, SyntaxElement, SyntaxKind,
|
||||||
SyntaxKind::*,
|
SyntaxKind::*,
|
||||||
SyntaxNode, SyntaxToken, SyntaxTreeBuilder, TextRange, TextSize, Token as RawToken, T,
|
SyntaxNode, SyntaxToken, SyntaxTreeBuilder, TextRange, TextSize, Token as RawToken, WalkEvent,
|
||||||
|
T,
|
||||||
};
|
};
|
||||||
use tt::buffer::{Cursor, TokenBuffer};
|
use tt::buffer::{Cursor, TokenBuffer};
|
||||||
|
|
||||||
|
@ -19,14 +18,14 @@ use crate::{
|
||||||
/// Convert the syntax node to a `TokenTree` (what macro
|
/// Convert the syntax node to a `TokenTree` (what macro
|
||||||
/// will consume).
|
/// will consume).
|
||||||
pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> (tt::Subtree, TokenMap) {
|
pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> (tt::Subtree, TokenMap) {
|
||||||
syntax_node_to_token_tree_censored(node, None)
|
syntax_node_to_token_tree_censored(node, &Default::default())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Convert the syntax node to a `TokenTree` (what macro will consume)
|
/// Convert the syntax node to a `TokenTree` (what macro will consume)
|
||||||
/// with the censored range excluded.
|
/// with the censored range excluded.
|
||||||
pub fn syntax_node_to_token_tree_censored(
|
pub fn syntax_node_to_token_tree_censored(
|
||||||
node: &SyntaxNode,
|
node: &SyntaxNode,
|
||||||
censor: Option<TextRange>,
|
censor: &FxHashSet<SyntaxNode>,
|
||||||
) -> (tt::Subtree, TokenMap) {
|
) -> (tt::Subtree, TokenMap) {
|
||||||
let global_offset = node.text_range().start();
|
let global_offset = node.text_range().start();
|
||||||
let mut c = Convertor::new(node, global_offset, censor);
|
let mut c = Convertor::new(node, global_offset, censor);
|
||||||
|
@ -424,8 +423,6 @@ impl<'a> SrcToken for (&'a RawToken, &'a str) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl RawConvertor<'_> {}
|
|
||||||
|
|
||||||
impl<'a> TokenConvertor for RawConvertor<'a> {
|
impl<'a> TokenConvertor for RawConvertor<'a> {
|
||||||
type Token = (&'a RawToken, &'a str);
|
type Token = (&'a RawToken, &'a str);
|
||||||
|
|
||||||
|
@ -455,30 +452,51 @@ impl<'a> TokenConvertor for RawConvertor<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
struct Convertor {
|
struct Convertor<'c> {
|
||||||
id_alloc: TokenIdAlloc,
|
id_alloc: TokenIdAlloc,
|
||||||
current: Option<SyntaxToken>,
|
current: Option<SyntaxToken>,
|
||||||
censor: Option<TextRange>,
|
preorder: PreorderWithTokens,
|
||||||
|
censor: &'c FxHashSet<SyntaxNode>,
|
||||||
range: TextRange,
|
range: TextRange,
|
||||||
punct_offset: Option<(SyntaxToken, TextSize)>,
|
punct_offset: Option<(SyntaxToken, TextSize)>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Convertor {
|
impl<'c> Convertor<'c> {
|
||||||
fn new(node: &SyntaxNode, global_offset: TextSize, censor: Option<TextRange>) -> Convertor {
|
fn new(
|
||||||
let first = node.first_token();
|
node: &SyntaxNode,
|
||||||
let current = match censor {
|
global_offset: TextSize,
|
||||||
Some(censor) => iter::successors(first, |token| token.next_token())
|
censor: &'c FxHashSet<SyntaxNode>,
|
||||||
.find(|token| !censor.contains_range(token.text_range())),
|
) -> Convertor<'c> {
|
||||||
None => first,
|
let range = node.text_range();
|
||||||
};
|
let mut preorder = node.preorder_with_tokens();
|
||||||
|
let first = Self::next_token(&mut preorder, censor);
|
||||||
Convertor {
|
Convertor {
|
||||||
id_alloc: { TokenIdAlloc { map: TokenMap::default(), global_offset, next_id: 0 } },
|
id_alloc: { TokenIdAlloc { map: TokenMap::default(), global_offset, next_id: 0 } },
|
||||||
current,
|
current: first,
|
||||||
range: node.text_range(),
|
preorder,
|
||||||
|
range,
|
||||||
censor,
|
censor,
|
||||||
punct_offset: None,
|
punct_offset: None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn next_token(
|
||||||
|
preorder: &mut PreorderWithTokens,
|
||||||
|
censor: &FxHashSet<SyntaxNode>,
|
||||||
|
) -> Option<SyntaxToken> {
|
||||||
|
while let Some(ev) = preorder.next() {
|
||||||
|
let ele = match ev {
|
||||||
|
WalkEvent::Enter(ele) => ele,
|
||||||
|
_ => continue,
|
||||||
|
};
|
||||||
|
match ele {
|
||||||
|
SyntaxElement::Token(t) => return Some(t),
|
||||||
|
SyntaxElement::Node(node) if censor.contains(&node) => preorder.skip_subtree(),
|
||||||
|
SyntaxElement::Node(_) => (),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
@ -511,7 +529,7 @@ impl SrcToken for SynToken {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TokenConvertor for Convertor {
|
impl TokenConvertor for Convertor<'_> {
|
||||||
type Token = SynToken;
|
type Token = SynToken;
|
||||||
fn convert_doc_comment(&self, token: &Self::Token) -> Option<Vec<tt::TokenTree>> {
|
fn convert_doc_comment(&self, token: &Self::Token) -> Option<Vec<tt::TokenTree>> {
|
||||||
convert_doc_comment(token.token())
|
convert_doc_comment(token.token())
|
||||||
|
@ -532,11 +550,7 @@ impl TokenConvertor for Convertor {
|
||||||
if !&self.range.contains_range(curr.text_range()) {
|
if !&self.range.contains_range(curr.text_range()) {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
self.current = match self.censor {
|
self.current = Self::next_token(&mut self.preorder, self.censor);
|
||||||
Some(censor) => iter::successors(curr.next_token(), |token| token.next_token())
|
|
||||||
.find(|token| !censor.contains_range(token.text_range())),
|
|
||||||
None => curr.next_token(),
|
|
||||||
};
|
|
||||||
let token = if curr.kind().is_punct() {
|
let token = if curr.kind().is_punct() {
|
||||||
let range = curr.text_range();
|
let range = curr.text_range();
|
||||||
let range = TextRange::at(range.start(), TextSize::of('.'));
|
let range = TextRange::at(range.start(), TextSize::of('.'));
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
mod expand;
|
mod expand;
|
||||||
mod rule;
|
mod rule;
|
||||||
|
|
||||||
use std::fmt::Write;
|
use std::{fmt::Write, iter};
|
||||||
|
|
||||||
use syntax::{ast, AstNode, NodeOrToken, SyntaxNode, WalkEvent};
|
use syntax::{ast, AstNode, NodeOrToken, SyntaxNode, WalkEvent};
|
||||||
use test_utils::assert_eq_text;
|
use test_utils::assert_eq_text;
|
||||||
|
@ -252,27 +252,36 @@ struct Struct {
|
||||||
let item = source_file.items().next().unwrap();
|
let item = source_file.items().next().unwrap();
|
||||||
let attr = item.attrs().nth(1).unwrap();
|
let attr = item.attrs().nth(1).unwrap();
|
||||||
|
|
||||||
let (tt, _) =
|
let (tt, _) = syntax_node_to_token_tree_censored(
|
||||||
syntax_node_to_token_tree_censored(item.syntax(), Some(attr.syntax().text_range()));
|
item.syntax(),
|
||||||
|
&iter::once(attr.syntax().clone()).collect(),
|
||||||
|
);
|
||||||
expect_test::expect![[r##"# [attr0] # [attr2] struct Struct {field : ()}"##]]
|
expect_test::expect![[r##"# [attr0] # [attr2] struct Struct {field : ()}"##]]
|
||||||
.assert_eq(&tt.to_string());
|
.assert_eq(&tt.to_string());
|
||||||
|
|
||||||
let source = r##"
|
let source = r##"
|
||||||
|
#[attr0]
|
||||||
#[derive(Derive0)]
|
#[derive(Derive0)]
|
||||||
|
#[attr1]
|
||||||
#[derive(Derive1)]
|
#[derive(Derive1)]
|
||||||
|
#[attr2]
|
||||||
#[derive(Derive2)]
|
#[derive(Derive2)]
|
||||||
|
#[attr3]
|
||||||
struct Struct {
|
struct Struct {
|
||||||
field: ()
|
field: ()
|
||||||
}
|
}
|
||||||
"##;
|
"##;
|
||||||
let source_file = ast::SourceFile::parse(source).ok().unwrap();
|
let source_file = ast::SourceFile::parse(source).ok().unwrap();
|
||||||
let item = source_file.items().next().unwrap();
|
let item = source_file.items().next().unwrap();
|
||||||
let attr = item.attrs().nth(1).unwrap();
|
let derive_attr_index = 3;
|
||||||
|
let censor = item
|
||||||
|
.attrs()
|
||||||
|
.take(derive_attr_index as usize + 1)
|
||||||
|
.filter(|attr| attr.simple_name().as_deref() == Some("derive"))
|
||||||
|
.map(|it| it.syntax().clone())
|
||||||
|
.collect();
|
||||||
|
|
||||||
let (tt, _) = syntax_node_to_token_tree_censored(
|
let (tt, _) = syntax_node_to_token_tree_censored(item.syntax(), &censor);
|
||||||
item.syntax(),
|
expect_test::expect![[r##"# [attr0] # [attr1] # [attr2] # [derive (Derive2)] # [attr3] struct Struct {field : ()}"##]]
|
||||||
Some(attr.syntax().text_range().cover_offset(0.into())),
|
|
||||||
);
|
|
||||||
expect_test::expect![[r##"# [derive (Derive2)] struct Struct {field : ()}"##]]
|
|
||||||
.assert_eq(&tt.to_string());
|
.assert_eq(&tt.to_string());
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,7 +12,7 @@ doctest = false
|
||||||
[dependencies]
|
[dependencies]
|
||||||
cov-mark = "2.0.0-pre.1"
|
cov-mark = "2.0.0-pre.1"
|
||||||
itertools = "0.10.0"
|
itertools = "0.10.0"
|
||||||
rowan = "0.13.0"
|
rowan = "0.14.0"
|
||||||
rustc_lexer = { version = "725.0.0", package = "rustc-ap-rustc_lexer" }
|
rustc_lexer = { version = "725.0.0", package = "rustc-ap-rustc_lexer" }
|
||||||
rustc-hash = "1.1.0"
|
rustc-hash = "1.1.0"
|
||||||
once_cell = "1.3.1"
|
once_cell = "1.3.1"
|
||||||
|
|
|
@ -52,8 +52,8 @@ pub use crate::{
|
||||||
ptr::{AstPtr, SyntaxNodePtr},
|
ptr::{AstPtr, SyntaxNodePtr},
|
||||||
syntax_error::SyntaxError,
|
syntax_error::SyntaxError,
|
||||||
syntax_node::{
|
syntax_node::{
|
||||||
SyntaxElement, SyntaxElementChildren, SyntaxNode, SyntaxNodeChildren, SyntaxToken,
|
PreorderWithTokens, SyntaxElement, SyntaxElementChildren, SyntaxNode, SyntaxNodeChildren,
|
||||||
SyntaxTreeBuilder,
|
SyntaxToken, SyntaxTreeBuilder,
|
||||||
},
|
},
|
||||||
token_text::TokenText,
|
token_text::TokenText,
|
||||||
};
|
};
|
||||||
|
|
|
@ -31,6 +31,7 @@ pub type SyntaxToken = rowan::SyntaxToken<RustLanguage>;
|
||||||
pub type SyntaxElement = rowan::SyntaxElement<RustLanguage>;
|
pub type SyntaxElement = rowan::SyntaxElement<RustLanguage>;
|
||||||
pub type SyntaxNodeChildren = rowan::SyntaxNodeChildren<RustLanguage>;
|
pub type SyntaxNodeChildren = rowan::SyntaxNodeChildren<RustLanguage>;
|
||||||
pub type SyntaxElementChildren = rowan::SyntaxElementChildren<RustLanguage>;
|
pub type SyntaxElementChildren = rowan::SyntaxElementChildren<RustLanguage>;
|
||||||
|
pub type PreorderWithTokens = rowan::api::PreorderWithTokens<RustLanguage>;
|
||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
pub struct SyntaxTreeBuilder {
|
pub struct SyntaxTreeBuilder {
|
||||||
|
|
Loading…
Reference in a new issue