mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-26 04:53:34 +00:00
WIP: Actually fix up syntax errors in attribute macro input
This commit is contained in:
parent
212e82fd41
commit
cff209f152
6 changed files with 112 additions and 34 deletions
|
@ -345,6 +345,7 @@ impl base_db::ProcMacroExpander for IdentityWhenValidProcMacroExpander {
|
||||||
if parse.errors().is_empty() {
|
if parse.errors().is_empty() {
|
||||||
Ok(subtree.clone())
|
Ok(subtree.clone())
|
||||||
} else {
|
} else {
|
||||||
|
eprintln!("parse errors: {:?}", parse.errors());
|
||||||
use tt::{Delimiter, DelimiterKind, Ident, Leaf, Literal, Punct, TokenTree};
|
use tt::{Delimiter, DelimiterKind, Ident, Leaf, Literal, Punct, TokenTree};
|
||||||
let mut subtree = Subtree::default();
|
let mut subtree = Subtree::default();
|
||||||
subtree.token_trees.push(TokenTree::Leaf(
|
subtree.token_trees.push(TokenTree::Leaf(
|
||||||
|
|
|
@ -86,6 +86,10 @@ fn foo() { bar.; blub }
|
||||||
expect![[r##"
|
expect![[r##"
|
||||||
#[proc_macros::identity_when_valid]
|
#[proc_macros::identity_when_valid]
|
||||||
fn foo() { bar.; blub }
|
fn foo() { bar.; blub }
|
||||||
"##]],
|
|
||||||
|
fn foo() {
|
||||||
|
bar.;
|
||||||
|
blub
|
||||||
|
}"##]],
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,8 +5,8 @@ use std::sync::Arc;
|
||||||
use base_db::{salsa, SourceDatabase};
|
use base_db::{salsa, SourceDatabase};
|
||||||
use either::Either;
|
use either::Either;
|
||||||
use limit::Limit;
|
use limit::Limit;
|
||||||
use mbe::{syntax_node_to_token_tree, ExpandError, ExpandResult};
|
use mbe::{syntax_node_to_token_tree, ExpandError, ExpandResult, SyntheticToken};
|
||||||
use rustc_hash::FxHashSet;
|
use rustc_hash::{FxHashMap, FxHashSet};
|
||||||
use syntax::{
|
use syntax::{
|
||||||
algo::diff,
|
algo::diff,
|
||||||
ast::{self, HasAttrs, HasDocComments},
|
ast::{self, HasAttrs, HasDocComments},
|
||||||
|
@ -14,7 +14,7 @@ use syntax::{
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
ast_id_map::AstIdMap, hygiene::HygieneFrame, BuiltinAttrExpander, BuiltinDeriveExpander,
|
ast_id_map::AstIdMap, fixup, hygiene::HygieneFrame, BuiltinAttrExpander, BuiltinDeriveExpander,
|
||||||
BuiltinFnLikeExpander, ExpandTo, HirFileId, HirFileIdRepr, MacroCallId, MacroCallKind,
|
BuiltinFnLikeExpander, ExpandTo, HirFileId, HirFileIdRepr, MacroCallId, MacroCallKind,
|
||||||
MacroCallLoc, MacroDefId, MacroDefKind, MacroFile, ProcMacroExpander,
|
MacroCallLoc, MacroDefId, MacroDefKind, MacroFile, ProcMacroExpander,
|
||||||
};
|
};
|
||||||
|
@ -146,8 +146,10 @@ pub fn expand_speculative(
|
||||||
|
|
||||||
// Build the subtree and token mapping for the speculative args
|
// Build the subtree and token mapping for the speculative args
|
||||||
let censor = censor_for_macro_input(&loc, &speculative_args);
|
let censor = censor_for_macro_input(&loc, &speculative_args);
|
||||||
|
let mut fixups = fixup::fixup_syntax(&speculative_args);
|
||||||
|
fixups.replace.extend(censor.into_iter().map(|node| (node, Vec::new())));
|
||||||
let (mut tt, spec_args_tmap) =
|
let (mut tt, spec_args_tmap) =
|
||||||
mbe::syntax_node_to_token_tree_censored(&speculative_args, &censor);
|
mbe::syntax_node_to_token_tree_censored(&speculative_args, fixups.replace, fixups.append);
|
||||||
|
|
||||||
let (attr_arg, token_id) = match loc.kind {
|
let (attr_arg, token_id) = match loc.kind {
|
||||||
MacroCallKind::Attr { invoc_attr_index, .. } => {
|
MacroCallKind::Attr { invoc_attr_index, .. } => {
|
||||||
|
@ -294,8 +296,17 @@ fn macro_arg(db: &dyn AstDatabase, id: MacroCallId) -> Option<Arc<(tt::Subtree,
|
||||||
let loc = db.lookup_intern_macro_call(id);
|
let loc = db.lookup_intern_macro_call(id);
|
||||||
|
|
||||||
let node = SyntaxNode::new_root(arg);
|
let node = SyntaxNode::new_root(arg);
|
||||||
|
eprintln!("input text:\n{node}");
|
||||||
|
eprintln!("input syntax:\n{node:#?}");
|
||||||
let censor = censor_for_macro_input(&loc, &node);
|
let censor = censor_for_macro_input(&loc, &node);
|
||||||
let (mut tt, tmap) = mbe::syntax_node_to_token_tree_censored(&node, &censor);
|
// TODO only fixup for attribute macro input
|
||||||
|
let mut fixups = fixup::fixup_syntax(&node);
|
||||||
|
fixups.replace.extend(censor.into_iter().map(|node| (node, Vec::new())));
|
||||||
|
eprintln!("fixups: {fixups:?}");
|
||||||
|
let (mut tt, tmap) =
|
||||||
|
mbe::syntax_node_to_token_tree_censored(&node, fixups.replace, fixups.append);
|
||||||
|
|
||||||
|
eprintln!("fixed-up input: {}", tt);
|
||||||
|
|
||||||
if loc.def.is_proc_macro() {
|
if loc.def.is_proc_macro() {
|
||||||
// proc macros expect their inputs without parentheses, MBEs expect it with them included
|
// proc macros expect their inputs without parentheses, MBEs expect it with them included
|
||||||
|
|
|
@ -15,6 +15,7 @@ pub mod proc_macro;
|
||||||
pub mod quote;
|
pub mod quote;
|
||||||
pub mod eager;
|
pub mod eager;
|
||||||
pub mod mod_path;
|
pub mod mod_path;
|
||||||
|
mod fixup;
|
||||||
|
|
||||||
pub use mbe::{ExpandError, ExpandResult, Origin};
|
pub use mbe::{ExpandError, ExpandResult, Origin};
|
||||||
|
|
||||||
|
|
|
@ -30,7 +30,7 @@ pub use tt::{Delimiter, DelimiterKind, Punct};
|
||||||
pub use crate::{
|
pub use crate::{
|
||||||
syntax_bridge::{
|
syntax_bridge::{
|
||||||
parse_exprs_with_sep, parse_to_token_tree, syntax_node_to_token_tree,
|
parse_exprs_with_sep, parse_to_token_tree, syntax_node_to_token_tree,
|
||||||
syntax_node_to_token_tree_censored, token_tree_to_syntax_node,
|
syntax_node_to_token_tree_censored, token_tree_to_syntax_node, SyntheticToken,
|
||||||
},
|
},
|
||||||
token_map::TokenMap,
|
token_map::TokenMap,
|
||||||
};
|
};
|
||||||
|
|
|
@ -15,22 +15,26 @@ use crate::{to_parser_input::to_parser_input, tt_iter::TtIter, TokenMap};
|
||||||
/// Convert the syntax node to a `TokenTree` (what macro
|
/// Convert the syntax node to a `TokenTree` (what macro
|
||||||
/// will consume).
|
/// will consume).
|
||||||
pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> (tt::Subtree, TokenMap) {
|
pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> (tt::Subtree, TokenMap) {
|
||||||
syntax_node_to_token_tree_censored(node, &Default::default())
|
syntax_node_to_token_tree_censored(node, Default::default(), Default::default())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO rename
|
||||||
/// Convert the syntax node to a `TokenTree` (what macro will consume)
|
/// Convert the syntax node to a `TokenTree` (what macro will consume)
|
||||||
/// with the censored range excluded.
|
/// with the censored range excluded.
|
||||||
pub fn syntax_node_to_token_tree_censored(
|
pub fn syntax_node_to_token_tree_censored(
|
||||||
node: &SyntaxNode,
|
node: &SyntaxNode,
|
||||||
censor: &FxHashSet<SyntaxNode>,
|
replace: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
|
||||||
|
append: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
|
||||||
) -> (tt::Subtree, TokenMap) {
|
) -> (tt::Subtree, TokenMap) {
|
||||||
let global_offset = node.text_range().start();
|
let global_offset = node.text_range().start();
|
||||||
let mut c = Convertor::new(node, global_offset, censor);
|
let mut c = Convertor::new(node, global_offset, replace, append);
|
||||||
let subtree = convert_tokens(&mut c);
|
let subtree = convert_tokens(&mut c);
|
||||||
c.id_alloc.map.shrink_to_fit();
|
c.id_alloc.map.shrink_to_fit();
|
||||||
(subtree, c.id_alloc.map)
|
(subtree, c.id_alloc.map)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub type SyntheticToken = (SyntaxKind, SmolStr);
|
||||||
|
|
||||||
// The following items are what `rustc` macro can be parsed into :
|
// The following items are what `rustc` macro can be parsed into :
|
||||||
// link: https://github.com/rust-lang/rust/blob/9ebf47851a357faa4cd97f4b1dc7835f6376e639/src/libsyntax/ext/expand.rs#L141
|
// link: https://github.com/rust-lang/rust/blob/9ebf47851a357faa4cd97f4b1dc7835f6376e639/src/libsyntax/ext/expand.rs#L141
|
||||||
// * Expr(P<ast::Expr>) -> token_tree_to_expr
|
// * Expr(P<ast::Expr>) -> token_tree_to_expr
|
||||||
|
@ -465,86 +469,124 @@ impl<'a> TokenConvertor for RawConvertor<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
struct Convertor<'c> {
|
struct Convertor {
|
||||||
id_alloc: TokenIdAlloc,
|
id_alloc: TokenIdAlloc,
|
||||||
current: Option<SyntaxToken>,
|
current: Option<SyntaxToken>,
|
||||||
|
current_synthetic: Vec<SyntheticToken>,
|
||||||
preorder: PreorderWithTokens,
|
preorder: PreorderWithTokens,
|
||||||
censor: &'c FxHashSet<SyntaxNode>,
|
replace: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
|
||||||
|
append: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
|
||||||
range: TextRange,
|
range: TextRange,
|
||||||
punct_offset: Option<(SyntaxToken, TextSize)>,
|
punct_offset: Option<(SyntaxToken, TextSize)>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'c> Convertor<'c> {
|
impl Convertor {
|
||||||
fn new(
|
fn new(
|
||||||
node: &SyntaxNode,
|
node: &SyntaxNode,
|
||||||
global_offset: TextSize,
|
global_offset: TextSize,
|
||||||
censor: &'c FxHashSet<SyntaxNode>,
|
replace: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
|
||||||
) -> Convertor<'c> {
|
append: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
|
||||||
|
) -> Convertor {
|
||||||
let range = node.text_range();
|
let range = node.text_range();
|
||||||
let mut preorder = node.preorder_with_tokens();
|
let mut preorder = node.preorder_with_tokens();
|
||||||
let first = Self::next_token(&mut preorder, censor);
|
let (first, synthetic) = Self::next_token(&mut preorder, &replace, &append);
|
||||||
Convertor {
|
Convertor {
|
||||||
id_alloc: { TokenIdAlloc { map: TokenMap::default(), global_offset, next_id: 0 } },
|
id_alloc: { TokenIdAlloc { map: TokenMap::default(), global_offset, next_id: 0 } },
|
||||||
current: first,
|
current: first,
|
||||||
|
current_synthetic: synthetic,
|
||||||
preorder,
|
preorder,
|
||||||
range,
|
range,
|
||||||
censor,
|
replace,
|
||||||
|
append,
|
||||||
punct_offset: None,
|
punct_offset: None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn next_token(
|
fn next_token(
|
||||||
preorder: &mut PreorderWithTokens,
|
preorder: &mut PreorderWithTokens,
|
||||||
censor: &FxHashSet<SyntaxNode>,
|
replace: &FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
|
||||||
) -> Option<SyntaxToken> {
|
append: &FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
|
||||||
|
) -> (Option<SyntaxToken>, Vec<SyntheticToken>) {
|
||||||
while let Some(ev) = preorder.next() {
|
while let Some(ev) = preorder.next() {
|
||||||
let ele = match ev {
|
let ele = match ev {
|
||||||
WalkEvent::Enter(ele) => ele,
|
WalkEvent::Enter(ele) => ele,
|
||||||
|
WalkEvent::Leave(SyntaxElement::Node(node)) => {
|
||||||
|
if let Some(v) = append.get(&node) {
|
||||||
|
eprintln!("after {:?}, appending {:?}", node, v);
|
||||||
|
if !v.is_empty() {
|
||||||
|
let mut reversed = v.clone();
|
||||||
|
reversed.reverse();
|
||||||
|
return (None, reversed);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
continue;
|
||||||
|
}
|
||||||
_ => continue,
|
_ => continue,
|
||||||
};
|
};
|
||||||
match ele {
|
match ele {
|
||||||
SyntaxElement::Token(t) => return Some(t),
|
SyntaxElement::Token(t) => return (Some(t), Vec::new()),
|
||||||
SyntaxElement::Node(node) if censor.contains(&node) => preorder.skip_subtree(),
|
SyntaxElement::Node(node) => {
|
||||||
SyntaxElement::Node(_) => (),
|
if let Some(v) = replace.get(&node) {
|
||||||
|
preorder.skip_subtree();
|
||||||
|
eprintln!("replacing {:?} by {:?}", node, v);
|
||||||
|
if !v.is_empty() {
|
||||||
|
let mut reversed = v.clone();
|
||||||
|
reversed.reverse();
|
||||||
|
return (None, reversed);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
None
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
(None, Vec::new())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
enum SynToken {
|
enum SynToken {
|
||||||
Ordinary(SyntaxToken),
|
Ordinary(SyntaxToken),
|
||||||
|
// FIXME is this supposed to be `Punct`?
|
||||||
Punch(SyntaxToken, TextSize),
|
Punch(SyntaxToken, TextSize),
|
||||||
|
Synthetic(SyntheticToken),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SynToken {
|
impl SynToken {
|
||||||
fn token(&self) -> &SyntaxToken {
|
fn token(&self) -> Option<&SyntaxToken> {
|
||||||
match self {
|
match self {
|
||||||
SynToken::Ordinary(it) | SynToken::Punch(it, _) => it,
|
SynToken::Ordinary(it) | SynToken::Punch(it, _) => Some(it),
|
||||||
|
SynToken::Synthetic(_) => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> SrcToken<Convertor<'a>> for SynToken {
|
impl SrcToken<Convertor> for SynToken {
|
||||||
fn kind(&self, _ctx: &Convertor<'a>) -> SyntaxKind {
|
fn kind(&self, _ctx: &Convertor) -> SyntaxKind {
|
||||||
self.token().kind()
|
match self {
|
||||||
|
SynToken::Ordinary(token) => token.kind(),
|
||||||
|
SynToken::Punch(token, _) => token.kind(),
|
||||||
|
SynToken::Synthetic((kind, _)) => *kind,
|
||||||
}
|
}
|
||||||
fn to_char(&self, _ctx: &Convertor<'a>) -> Option<char> {
|
}
|
||||||
|
fn to_char(&self, _ctx: &Convertor) -> Option<char> {
|
||||||
match self {
|
match self {
|
||||||
SynToken::Ordinary(_) => None,
|
SynToken::Ordinary(_) => None,
|
||||||
SynToken::Punch(it, i) => it.text().chars().nth((*i).into()),
|
SynToken::Punch(it, i) => it.text().chars().nth((*i).into()),
|
||||||
|
SynToken::Synthetic(_) => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
fn to_text(&self, _ctx: &Convertor<'a>) -> SmolStr {
|
fn to_text(&self, _ctx: &Convertor) -> SmolStr {
|
||||||
self.token().text().into()
|
match self {
|
||||||
|
SynToken::Ordinary(token) => token.text().into(),
|
||||||
|
SynToken::Punch(token, _) => token.text().into(),
|
||||||
|
SynToken::Synthetic((_, text)) => text.clone(),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TokenConvertor for Convertor<'_> {
|
impl TokenConvertor for Convertor {
|
||||||
type Token = SynToken;
|
type Token = SynToken;
|
||||||
fn convert_doc_comment(&self, token: &Self::Token) -> Option<Vec<tt::TokenTree>> {
|
fn convert_doc_comment(&self, token: &Self::Token) -> Option<Vec<tt::TokenTree>> {
|
||||||
convert_doc_comment(token.token())
|
convert_doc_comment(token.token()?)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn bump(&mut self) -> Option<(Self::Token, TextRange)> {
|
fn bump(&mut self) -> Option<(Self::Token, TextRange)> {
|
||||||
|
@ -558,11 +600,25 @@ impl TokenConvertor for Convertor<'_> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if let Some(synth_token) = self.current_synthetic.pop() {
|
||||||
|
if self.current_synthetic.is_empty() {
|
||||||
|
let (new_current, new_synth) =
|
||||||
|
Self::next_token(&mut self.preorder, &self.replace, &self.append);
|
||||||
|
self.current = new_current;
|
||||||
|
self.current_synthetic = new_synth;
|
||||||
|
}
|
||||||
|
// TODO fix range?
|
||||||
|
return Some((SynToken::Synthetic(synth_token), self.range));
|
||||||
|
}
|
||||||
|
|
||||||
let curr = self.current.clone()?;
|
let curr = self.current.clone()?;
|
||||||
if !&self.range.contains_range(curr.text_range()) {
|
if !&self.range.contains_range(curr.text_range()) {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
self.current = Self::next_token(&mut self.preorder, self.censor);
|
let (new_current, new_synth) =
|
||||||
|
Self::next_token(&mut self.preorder, &self.replace, &self.append);
|
||||||
|
self.current = new_current;
|
||||||
|
self.current_synthetic = new_synth;
|
||||||
let token = if curr.kind().is_punct() {
|
let token = if curr.kind().is_punct() {
|
||||||
self.punct_offset = Some((curr.clone(), 0.into()));
|
self.punct_offset = Some((curr.clone(), 0.into()));
|
||||||
let range = curr.text_range();
|
let range = curr.text_range();
|
||||||
|
@ -585,6 +641,11 @@ impl TokenConvertor for Convertor<'_> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if let Some(synth_token) = self.current_synthetic.last() {
|
||||||
|
// TODO fix range?
|
||||||
|
return Some(SynToken::Synthetic(synth_token.clone()));
|
||||||
|
}
|
||||||
|
|
||||||
let curr = self.current.clone()?;
|
let curr = self.current.clone()?;
|
||||||
if !self.range.contains_range(curr.text_range()) {
|
if !self.range.contains_range(curr.text_range()) {
|
||||||
return None;
|
return None;
|
||||||
|
|
Loading…
Reference in a new issue