mirror of
https://github.com/rust-lang/rust-analyzer
synced 2025-01-27 20:35:09 +00:00
Remove Delimiter::DUMMY_INVISIBLE
This commit is contained in:
parent
2c6ce480e3
commit
7b804552a5
20 changed files with 170 additions and 106 deletions
|
@ -1,6 +1,6 @@
|
||||||
use arbitrary::{Arbitrary, Unstructured};
|
use arbitrary::{Arbitrary, Unstructured};
|
||||||
use expect_test::{expect, Expect};
|
use expect_test::{expect, Expect};
|
||||||
use mbe::{syntax_node_to_token_tree, DummyTestSpanMap};
|
use mbe::{syntax_node_to_token_tree, DummyTestSpanMap, DUMMY};
|
||||||
use syntax::{ast, AstNode};
|
use syntax::{ast, AstNode};
|
||||||
|
|
||||||
use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr};
|
use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr};
|
||||||
|
@ -8,7 +8,7 @@ use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr};
|
||||||
fn assert_parse_result(input: &str, expected: CfgExpr) {
|
fn assert_parse_result(input: &str, expected: CfgExpr) {
|
||||||
let source_file = ast::SourceFile::parse(input).ok().unwrap();
|
let source_file = ast::SourceFile::parse(input).ok().unwrap();
|
||||||
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
|
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
|
||||||
let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap);
|
let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap, DUMMY);
|
||||||
let cfg = CfgExpr::parse(&tt);
|
let cfg = CfgExpr::parse(&tt);
|
||||||
assert_eq!(cfg, expected);
|
assert_eq!(cfg, expected);
|
||||||
}
|
}
|
||||||
|
@ -16,7 +16,7 @@ fn assert_parse_result(input: &str, expected: CfgExpr) {
|
||||||
fn check_dnf(input: &str, expect: Expect) {
|
fn check_dnf(input: &str, expect: Expect) {
|
||||||
let source_file = ast::SourceFile::parse(input).ok().unwrap();
|
let source_file = ast::SourceFile::parse(input).ok().unwrap();
|
||||||
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
|
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
|
||||||
let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap);
|
let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap, DUMMY);
|
||||||
let cfg = CfgExpr::parse(&tt);
|
let cfg = CfgExpr::parse(&tt);
|
||||||
let actual = format!("#![cfg({})]", DnfExpr::new(cfg));
|
let actual = format!("#![cfg({})]", DnfExpr::new(cfg));
|
||||||
expect.assert_eq(&actual);
|
expect.assert_eq(&actual);
|
||||||
|
@ -25,7 +25,7 @@ fn check_dnf(input: &str, expect: Expect) {
|
||||||
fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) {
|
fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) {
|
||||||
let source_file = ast::SourceFile::parse(input).ok().unwrap();
|
let source_file = ast::SourceFile::parse(input).ok().unwrap();
|
||||||
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
|
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
|
||||||
let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap);
|
let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap, DUMMY);
|
||||||
let cfg = CfgExpr::parse(&tt);
|
let cfg = CfgExpr::parse(&tt);
|
||||||
let dnf = DnfExpr::new(cfg);
|
let dnf = DnfExpr::new(cfg);
|
||||||
let why_inactive = dnf.why_inactive(opts).unwrap().to_string();
|
let why_inactive = dnf.why_inactive(opts).unwrap().to_string();
|
||||||
|
@ -36,7 +36,7 @@ fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) {
|
||||||
fn check_enable_hints(input: &str, opts: &CfgOptions, expected_hints: &[&str]) {
|
fn check_enable_hints(input: &str, opts: &CfgOptions, expected_hints: &[&str]) {
|
||||||
let source_file = ast::SourceFile::parse(input).ok().unwrap();
|
let source_file = ast::SourceFile::parse(input).ok().unwrap();
|
||||||
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
|
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
|
||||||
let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap);
|
let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap, DUMMY);
|
||||||
let cfg = CfgExpr::parse(&tt);
|
let cfg = CfgExpr::parse(&tt);
|
||||||
let dnf = DnfExpr::new(cfg);
|
let dnf = DnfExpr::new(cfg);
|
||||||
let hints = dnf.compute_enable_hints(opts).map(|diff| diff.to_string()).collect::<Vec<_>>();
|
let hints = dnf.compute_enable_hints(opts).map(|diff| diff.to_string()).collect::<Vec<_>>();
|
||||||
|
|
|
@ -1,19 +1,23 @@
|
||||||
//! This module contains tests for doc-expression parsing.
|
//! This module contains tests for doc-expression parsing.
|
||||||
//! Currently, it tests `#[doc(hidden)]` and `#[doc(alias)]`.
|
//! Currently, it tests `#[doc(hidden)]` and `#[doc(alias)]`.
|
||||||
|
|
||||||
|
use triomphe::Arc;
|
||||||
|
|
||||||
use base_db::FileId;
|
use base_db::FileId;
|
||||||
use hir_expand::span_map::{RealSpanMap, SpanMapRef};
|
use hir_expand::span_map::{RealSpanMap, SpanMap};
|
||||||
use mbe::syntax_node_to_token_tree;
|
use mbe::syntax_node_to_token_tree;
|
||||||
use syntax::{ast, AstNode};
|
use syntax::{ast, AstNode, TextRange};
|
||||||
|
|
||||||
use crate::attr::{DocAtom, DocExpr};
|
use crate::attr::{DocAtom, DocExpr};
|
||||||
|
|
||||||
fn assert_parse_result(input: &str, expected: DocExpr) {
|
fn assert_parse_result(input: &str, expected: DocExpr) {
|
||||||
let source_file = ast::SourceFile::parse(input).ok().unwrap();
|
let source_file = ast::SourceFile::parse(input).ok().unwrap();
|
||||||
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
|
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
|
||||||
|
let map = SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute(FileId::from_raw(0))));
|
||||||
let tt = syntax_node_to_token_tree(
|
let tt = syntax_node_to_token_tree(
|
||||||
tt.syntax(),
|
tt.syntax(),
|
||||||
SpanMapRef::RealSpanMap(&RealSpanMap::absolute(FileId::from_raw(0))),
|
map.as_ref(),
|
||||||
|
map.span_for_range(TextRange::empty(0.into())),
|
||||||
);
|
);
|
||||||
let cfg = DocExpr::parse(&tt);
|
let cfg = DocExpr::parse(&tt);
|
||||||
assert_eq!(cfg, expected);
|
assert_eq!(cfg, expected);
|
||||||
|
|
|
@ -1350,7 +1350,7 @@ fn attr_macro_as_call_id(
|
||||||
let arg = match macro_attr.input.as_deref() {
|
let arg = match macro_attr.input.as_deref() {
|
||||||
Some(AttrInput::TokenTree(tt)) => {
|
Some(AttrInput::TokenTree(tt)) => {
|
||||||
let mut tt = tt.as_ref().clone();
|
let mut tt = tt.as_ref().clone();
|
||||||
tt.delimiter = tt::Delimiter::DUMMY_INVISIBLE;
|
tt.delimiter = tt::Delimiter::invisible_spanned(macro_attr.span);
|
||||||
Some(tt)
|
Some(tt)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -206,6 +206,7 @@ impl Attr {
|
||||||
id: AttrId,
|
id: AttrId,
|
||||||
) -> Option<Attr> {
|
) -> Option<Attr> {
|
||||||
let path = Interned::new(ModPath::from_src(db, ast.path()?, span_map)?);
|
let path = Interned::new(ModPath::from_src(db, ast.path()?, span_map)?);
|
||||||
|
let span = span_map.span_for_range(ast.syntax().text_range());
|
||||||
let input = if let Some(ast::Expr::Literal(lit)) = ast.expr() {
|
let input = if let Some(ast::Expr::Literal(lit)) = ast.expr() {
|
||||||
let value = match lit.kind() {
|
let value = match lit.kind() {
|
||||||
ast::LiteralKind::String(string) => string.value()?.into(),
|
ast::LiteralKind::String(string) => string.value()?.into(),
|
||||||
|
@ -213,12 +214,12 @@ impl Attr {
|
||||||
};
|
};
|
||||||
Some(Interned::new(AttrInput::Literal(value)))
|
Some(Interned::new(AttrInput::Literal(value)))
|
||||||
} else if let Some(tt) = ast.token_tree() {
|
} else if let Some(tt) = ast.token_tree() {
|
||||||
let tree = syntax_node_to_token_tree(tt.syntax(), span_map);
|
let tree = syntax_node_to_token_tree(tt.syntax(), span_map, span);
|
||||||
Some(Interned::new(AttrInput::TokenTree(Box::new(tree))))
|
Some(Interned::new(AttrInput::TokenTree(Box::new(tree))))
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
};
|
};
|
||||||
Some(Attr { id, path, input, span: span_map.span_for_range(ast.syntax().text_range()) })
|
Some(Attr { id, path, input, span })
|
||||||
}
|
}
|
||||||
|
|
||||||
fn from_tt(db: &dyn ExpandDatabase, tt: &tt::Subtree, id: AttrId) -> Option<Attr> {
|
fn from_tt(db: &dyn ExpandDatabase, tt: &tt::Subtree, id: AttrId) -> Option<Attr> {
|
||||||
|
|
|
@ -101,7 +101,12 @@ fn derive_attr_expand(
|
||||||
MacroCallKind::Attr { attr_args: Some(attr_args), .. } if loc.def.is_attribute_derive() => {
|
MacroCallKind::Attr { attr_args: Some(attr_args), .. } if loc.def.is_attribute_derive() => {
|
||||||
attr_args
|
attr_args
|
||||||
}
|
}
|
||||||
_ => return ExpandResult::ok(tt::Subtree::empty(tt::DelimSpan::DUMMY)),
|
_ => {
|
||||||
|
return ExpandResult::ok(tt::Subtree::empty(tt::DelimSpan {
|
||||||
|
open: loc.call_site,
|
||||||
|
close: loc.call_site,
|
||||||
|
}))
|
||||||
|
}
|
||||||
};
|
};
|
||||||
pseudo_derive_attr_expansion(tt, derives, loc.call_site)
|
pseudo_derive_attr_expansion(tt, derives, loc.call_site)
|
||||||
}
|
}
|
||||||
|
|
|
@ -246,7 +246,7 @@ fn parse_adt(
|
||||||
match this {
|
match this {
|
||||||
Some(it) => {
|
Some(it) => {
|
||||||
param_type_set.insert(it.as_name());
|
param_type_set.insert(it.as_name());
|
||||||
mbe::syntax_node_to_token_tree(it.syntax(), tm)
|
mbe::syntax_node_to_token_tree(it.syntax(), tm, call_site)
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
tt::Subtree::empty(::tt::DelimSpan { open: call_site, close: call_site })
|
tt::Subtree::empty(::tt::DelimSpan { open: call_site, close: call_site })
|
||||||
|
@ -254,15 +254,15 @@ fn parse_adt(
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
let bounds = match ¶m {
|
let bounds = match ¶m {
|
||||||
ast::TypeOrConstParam::Type(it) => {
|
ast::TypeOrConstParam::Type(it) => it
|
||||||
it.type_bound_list().map(|it| mbe::syntax_node_to_token_tree(it.syntax(), tm))
|
.type_bound_list()
|
||||||
}
|
.map(|it| mbe::syntax_node_to_token_tree(it.syntax(), tm, call_site)),
|
||||||
ast::TypeOrConstParam::Const(_) => None,
|
ast::TypeOrConstParam::Const(_) => None,
|
||||||
};
|
};
|
||||||
let ty = if let ast::TypeOrConstParam::Const(param) = param {
|
let ty = if let ast::TypeOrConstParam::Const(param) = param {
|
||||||
let ty = param
|
let ty = param
|
||||||
.ty()
|
.ty()
|
||||||
.map(|ty| mbe::syntax_node_to_token_tree(ty.syntax(), tm))
|
.map(|ty| mbe::syntax_node_to_token_tree(ty.syntax(), tm, call_site))
|
||||||
.unwrap_or_else(|| {
|
.unwrap_or_else(|| {
|
||||||
tt::Subtree::empty(::tt::DelimSpan { open: call_site, close: call_site })
|
tt::Subtree::empty(::tt::DelimSpan { open: call_site, close: call_site })
|
||||||
});
|
});
|
||||||
|
@ -298,7 +298,7 @@ fn parse_adt(
|
||||||
let name = p.path()?.qualifier()?.as_single_name_ref()?.as_name();
|
let name = p.path()?.qualifier()?.as_single_name_ref()?.as_name();
|
||||||
param_type_set.contains(&name).then_some(p)
|
param_type_set.contains(&name).then_some(p)
|
||||||
})
|
})
|
||||||
.map(|it| mbe::syntax_node_to_token_tree(it.syntax(), tm))
|
.map(|it| mbe::syntax_node_to_token_tree(it.syntax(), tm, call_site))
|
||||||
.collect();
|
.collect();
|
||||||
let name_token = name_to_token(tm, name)?;
|
let name_token = name_to_token(tm, name)?;
|
||||||
Ok(BasicAdtInfo { name: name_token, shape, param_types, associated_types })
|
Ok(BasicAdtInfo { name: name_token, shape, param_types, associated_types })
|
||||||
|
|
|
@ -201,7 +201,7 @@ fn assert_expand(
|
||||||
tt: &tt::Subtree,
|
tt: &tt::Subtree,
|
||||||
span: Span,
|
span: Span,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::Subtree> {
|
||||||
let args = parse_exprs_with_sep(tt, ',');
|
let args = parse_exprs_with_sep(tt, ',', span);
|
||||||
let dollar_crate = tt::Ident { text: SmolStr::new_inline("$crate"), span };
|
let dollar_crate = tt::Ident { text: SmolStr::new_inline("$crate"), span };
|
||||||
let expanded = match &*args {
|
let expanded = match &*args {
|
||||||
[cond, panic_args @ ..] => {
|
[cond, panic_args @ ..] => {
|
||||||
|
|
|
@ -72,7 +72,7 @@ impl DeclarativeMacroExpander {
|
||||||
});
|
});
|
||||||
match self.mac.err() {
|
match self.mac.err() {
|
||||||
Some(e) => ExpandResult::new(
|
Some(e) => ExpandResult::new(
|
||||||
tt::Subtree::empty(tt::DelimSpan::DUMMY),
|
tt::Subtree::empty(tt::DelimSpan { open: loc.call_site, close: loc.call_site }),
|
||||||
ExpandError::other(format!("invalid macro definition: {e}")),
|
ExpandError::other(format!("invalid macro definition: {e}")),
|
||||||
),
|
),
|
||||||
None => self
|
None => self
|
||||||
|
@ -108,7 +108,7 @@ impl DeclarativeMacroExpander {
|
||||||
});
|
});
|
||||||
match self.mac.err() {
|
match self.mac.err() {
|
||||||
Some(e) => ExpandResult::new(
|
Some(e) => ExpandResult::new(
|
||||||
tt::Subtree::empty(tt::DelimSpan::DUMMY),
|
tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
|
||||||
ExpandError::other(format!("invalid macro definition: {e}")),
|
ExpandError::other(format!("invalid macro definition: {e}")),
|
||||||
),
|
),
|
||||||
None => self.mac.expand(&tt, |_| (), new_meta_vars, call_site).map_err(Into::into),
|
None => self.mac.expand(&tt, |_| (), new_meta_vars, call_site).map_err(Into::into),
|
||||||
|
@ -244,12 +244,13 @@ pub fn expand_speculative(
|
||||||
|
|
||||||
// Build the subtree and token mapping for the speculative args
|
// Build the subtree and token mapping for the speculative args
|
||||||
let (mut tt, undo_info) = match loc.kind {
|
let (mut tt, undo_info) = match loc.kind {
|
||||||
MacroCallKind::FnLike { .. } => {
|
MacroCallKind::FnLike { .. } => (
|
||||||
(mbe::syntax_node_to_token_tree(speculative_args, span_map), SyntaxFixupUndoInfo::NONE)
|
mbe::syntax_node_to_token_tree(speculative_args, span_map, loc.call_site),
|
||||||
}
|
SyntaxFixupUndoInfo::NONE,
|
||||||
|
),
|
||||||
MacroCallKind::Derive { .. } | MacroCallKind::Attr { .. } => {
|
MacroCallKind::Derive { .. } | MacroCallKind::Attr { .. } => {
|
||||||
let censor = censor_for_macro_input(&loc, speculative_args);
|
let censor = censor_for_macro_input(&loc, speculative_args);
|
||||||
let mut fixups = fixup::fixup_syntax(span_map, speculative_args);
|
let mut fixups = fixup::fixup_syntax(span_map, speculative_args, loc.call_site);
|
||||||
fixups.append.retain(|it, _| match it {
|
fixups.append.retain(|it, _| match it {
|
||||||
syntax::NodeOrToken::Node(it) => !censor.contains(it),
|
syntax::NodeOrToken::Node(it) => !censor.contains(it),
|
||||||
syntax::NodeOrToken::Token(_) => true,
|
syntax::NodeOrToken::Token(_) => true,
|
||||||
|
@ -261,6 +262,7 @@ pub fn expand_speculative(
|
||||||
span_map,
|
span_map,
|
||||||
fixups.append,
|
fixups.append,
|
||||||
fixups.remove,
|
fixups.remove,
|
||||||
|
loc.call_site,
|
||||||
),
|
),
|
||||||
fixups.undo_info,
|
fixups.undo_info,
|
||||||
)
|
)
|
||||||
|
@ -282,8 +284,9 @@ pub fn expand_speculative(
|
||||||
}?;
|
}?;
|
||||||
match attr.token_tree() {
|
match attr.token_tree() {
|
||||||
Some(token_tree) => {
|
Some(token_tree) => {
|
||||||
let mut tree = syntax_node_to_token_tree(token_tree.syntax(), span_map);
|
let mut tree =
|
||||||
tree.delimiter = tt::Delimiter::DUMMY_INVISIBLE;
|
syntax_node_to_token_tree(token_tree.syntax(), span_map, loc.call_site);
|
||||||
|
tree.delimiter = tt::Delimiter::invisible_spanned(loc.call_site);
|
||||||
|
|
||||||
Some(tree)
|
Some(tree)
|
||||||
}
|
}
|
||||||
|
@ -297,7 +300,7 @@ pub fn expand_speculative(
|
||||||
// Otherwise the expand query will fetch the non speculative attribute args and pass those instead.
|
// Otherwise the expand query will fetch the non speculative attribute args and pass those instead.
|
||||||
let mut speculative_expansion = match loc.def.kind {
|
let mut speculative_expansion = match loc.def.kind {
|
||||||
MacroDefKind::ProcMacro(expander, ..) => {
|
MacroDefKind::ProcMacro(expander, ..) => {
|
||||||
tt.delimiter = tt::Delimiter::DUMMY_INVISIBLE;
|
tt.delimiter = tt::Delimiter::invisible_spanned(loc.call_site);
|
||||||
let call_site = loc.span(db);
|
let call_site = loc.span(db);
|
||||||
expander.expand(
|
expander.expand(
|
||||||
db,
|
db,
|
||||||
|
@ -473,12 +476,13 @@ fn macro_arg(
|
||||||
MacroCallKind::Attr { ast_id, .. } => ast_id.to_ptr(db).to_node(&root).syntax().clone(),
|
MacroCallKind::Attr { ast_id, .. } => ast_id.to_ptr(db).to_node(&root).syntax().clone(),
|
||||||
};
|
};
|
||||||
let (mut tt, undo_info) = match loc.kind {
|
let (mut tt, undo_info) = match loc.kind {
|
||||||
MacroCallKind::FnLike { .. } => {
|
MacroCallKind::FnLike { .. } => (
|
||||||
(mbe::syntax_node_to_token_tree(&syntax, map.as_ref()), SyntaxFixupUndoInfo::NONE)
|
mbe::syntax_node_to_token_tree(&syntax, map.as_ref(), loc.call_site),
|
||||||
}
|
SyntaxFixupUndoInfo::NONE,
|
||||||
|
),
|
||||||
MacroCallKind::Derive { .. } | MacroCallKind::Attr { .. } => {
|
MacroCallKind::Derive { .. } | MacroCallKind::Attr { .. } => {
|
||||||
let censor = censor_for_macro_input(&loc, &syntax);
|
let censor = censor_for_macro_input(&loc, &syntax);
|
||||||
let mut fixups = fixup::fixup_syntax(map.as_ref(), &syntax);
|
let mut fixups = fixup::fixup_syntax(map.as_ref(), &syntax, loc.call_site);
|
||||||
fixups.append.retain(|it, _| match it {
|
fixups.append.retain(|it, _| match it {
|
||||||
syntax::NodeOrToken::Node(it) => !censor.contains(it),
|
syntax::NodeOrToken::Node(it) => !censor.contains(it),
|
||||||
syntax::NodeOrToken::Token(_) => true,
|
syntax::NodeOrToken::Token(_) => true,
|
||||||
|
@ -490,6 +494,7 @@ fn macro_arg(
|
||||||
map.as_ref(),
|
map.as_ref(),
|
||||||
fixups.append.clone(),
|
fixups.append.clone(),
|
||||||
fixups.remove.clone(),
|
fixups.remove.clone(),
|
||||||
|
loc.call_site,
|
||||||
);
|
);
|
||||||
reverse_fixups(&mut tt, &fixups.undo_info);
|
reverse_fixups(&mut tt, &fixups.undo_info);
|
||||||
}
|
}
|
||||||
|
@ -499,6 +504,7 @@ fn macro_arg(
|
||||||
map,
|
map,
|
||||||
fixups.append,
|
fixups.append,
|
||||||
fixups.remove,
|
fixups.remove,
|
||||||
|
loc.call_site,
|
||||||
),
|
),
|
||||||
fixups.undo_info,
|
fixups.undo_info,
|
||||||
)
|
)
|
||||||
|
@ -507,7 +513,7 @@ fn macro_arg(
|
||||||
|
|
||||||
if loc.def.is_proc_macro() {
|
if loc.def.is_proc_macro() {
|
||||||
// proc macros expect their inputs without parentheses, MBEs expect it with them included
|
// proc macros expect their inputs without parentheses, MBEs expect it with them included
|
||||||
tt.delimiter = tt::Delimiter::DUMMY_INVISIBLE;
|
tt.delimiter.kind = tt::DelimiterKind::Invisible;
|
||||||
}
|
}
|
||||||
|
|
||||||
if matches!(loc.def.kind, MacroDefKind::BuiltInEager(..)) {
|
if matches!(loc.def.kind, MacroDefKind::BuiltInEager(..)) {
|
||||||
|
@ -611,7 +617,11 @@ fn decl_macro_expander(
|
||||||
ast::Macro::MacroRules(macro_rules) => (
|
ast::Macro::MacroRules(macro_rules) => (
|
||||||
match macro_rules.token_tree() {
|
match macro_rules.token_tree() {
|
||||||
Some(arg) => {
|
Some(arg) => {
|
||||||
let tt = mbe::syntax_node_to_token_tree(arg.syntax(), map.as_ref());
|
let tt = mbe::syntax_node_to_token_tree(
|
||||||
|
arg.syntax(),
|
||||||
|
map.as_ref(),
|
||||||
|
map.span_for_range(macro_rules.macro_rules_token().unwrap().text_range()),
|
||||||
|
);
|
||||||
let mac = mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021, new_meta_vars);
|
let mac = mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021, new_meta_vars);
|
||||||
mac
|
mac
|
||||||
}
|
}
|
||||||
|
@ -625,7 +635,11 @@ fn decl_macro_expander(
|
||||||
ast::Macro::MacroDef(macro_def) => (
|
ast::Macro::MacroDef(macro_def) => (
|
||||||
match macro_def.body() {
|
match macro_def.body() {
|
||||||
Some(arg) => {
|
Some(arg) => {
|
||||||
let tt = mbe::syntax_node_to_token_tree(arg.syntax(), map.as_ref());
|
let tt = mbe::syntax_node_to_token_tree(
|
||||||
|
arg.syntax(),
|
||||||
|
map.as_ref(),
|
||||||
|
map.span_for_range(macro_def.macro_token().unwrap().text_range()),
|
||||||
|
);
|
||||||
let mac = mbe::DeclarativeMacro::parse_macro2(&tt, is_2021, new_meta_vars);
|
let mac = mbe::DeclarativeMacro::parse_macro2(&tt, is_2021, new_meta_vars);
|
||||||
mac
|
mac
|
||||||
}
|
}
|
||||||
|
@ -677,7 +691,7 @@ fn macro_expand(
|
||||||
let Some((macro_arg, undo_info)) = value else {
|
let Some((macro_arg, undo_info)) = value else {
|
||||||
return ExpandResult {
|
return ExpandResult {
|
||||||
value: Arc::new(tt::Subtree {
|
value: Arc::new(tt::Subtree {
|
||||||
delimiter: tt::Delimiter::DUMMY_INVISIBLE,
|
delimiter: tt::Delimiter::invisible_spanned(loc.call_site),
|
||||||
token_trees: Vec::new(),
|
token_trees: Vec::new(),
|
||||||
}),
|
}),
|
||||||
// FIXME: We should make sure to enforce an invariant that invalid macro
|
// FIXME: We should make sure to enforce an invariant that invalid macro
|
||||||
|
@ -736,7 +750,7 @@ fn macro_expand(
|
||||||
// Skip checking token tree limit for include! macro call
|
// Skip checking token tree limit for include! macro call
|
||||||
if !loc.def.is_include() {
|
if !loc.def.is_include() {
|
||||||
// Set a hard limit for the expanded tt
|
// Set a hard limit for the expanded tt
|
||||||
if let Err(value) = check_tt_count(&tt) {
|
if let Err(value) = check_tt_count(&tt, loc.call_site) {
|
||||||
return value;
|
return value;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -749,7 +763,7 @@ fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<A
|
||||||
let Some((macro_arg, undo_info)) = db.macro_arg(id).value else {
|
let Some((macro_arg, undo_info)) = db.macro_arg(id).value else {
|
||||||
return ExpandResult {
|
return ExpandResult {
|
||||||
value: Arc::new(tt::Subtree {
|
value: Arc::new(tt::Subtree {
|
||||||
delimiter: tt::Delimiter::DUMMY_INVISIBLE,
|
delimiter: tt::Delimiter::invisible_spanned(loc.call_site),
|
||||||
token_trees: Vec::new(),
|
token_trees: Vec::new(),
|
||||||
}),
|
}),
|
||||||
// FIXME: We should make sure to enforce an invariant that invalid macro
|
// FIXME: We should make sure to enforce an invariant that invalid macro
|
||||||
|
@ -783,7 +797,7 @@ fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<A
|
||||||
);
|
);
|
||||||
|
|
||||||
// Set a hard limit for the expanded tt
|
// Set a hard limit for the expanded tt
|
||||||
if let Err(value) = check_tt_count(&tt) {
|
if let Err(value) = check_tt_count(&tt, loc.call_site) {
|
||||||
return value;
|
return value;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -806,12 +820,12 @@ fn token_tree_to_syntax_node(
|
||||||
mbe::token_tree_to_syntax_node(tt, entry_point)
|
mbe::token_tree_to_syntax_node(tt, entry_point)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<Arc<tt::Subtree>>> {
|
fn check_tt_count(tt: &tt::Subtree, call_site: Span) -> Result<(), ExpandResult<Arc<tt::Subtree>>> {
|
||||||
let count = tt.count();
|
let count = tt.count();
|
||||||
if TOKEN_LIMIT.check(count).is_err() {
|
if TOKEN_LIMIT.check(count).is_err() {
|
||||||
Err(ExpandResult {
|
Err(ExpandResult {
|
||||||
value: Arc::new(tt::Subtree {
|
value: Arc::new(tt::Subtree {
|
||||||
delimiter: tt::Delimiter::DUMMY_INVISIBLE,
|
delimiter: tt::Delimiter::invisible_spanned(call_site),
|
||||||
token_trees: vec![],
|
token_trees: vec![],
|
||||||
}),
|
}),
|
||||||
err: Some(ExpandError::other(format!(
|
err: Some(ExpandError::other(format!(
|
||||||
|
|
|
@ -82,9 +82,9 @@ pub fn expand_eager_macro_input(
|
||||||
return ExpandResult { value: None, err };
|
return ExpandResult { value: None, err };
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut subtree = mbe::syntax_node_to_token_tree(&expanded_eager_input, arg_map);
|
let mut subtree = mbe::syntax_node_to_token_tree(&expanded_eager_input, arg_map, call_site);
|
||||||
|
|
||||||
subtree.delimiter = crate::tt::Delimiter::DUMMY_INVISIBLE;
|
subtree.delimiter.kind = crate::tt::DelimiterKind::Invisible;
|
||||||
|
|
||||||
let loc = MacroCallLoc {
|
let loc = MacroCallLoc {
|
||||||
def,
|
def,
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
use la_arena::RawIdx;
|
use la_arena::RawIdx;
|
||||||
use rustc_hash::{FxHashMap, FxHashSet};
|
use rustc_hash::{FxHashMap, FxHashSet};
|
||||||
use smallvec::SmallVec;
|
use smallvec::SmallVec;
|
||||||
use span::{ErasedFileAstId, FileId, SpanAnchor, SpanData};
|
use span::{ErasedFileAstId, FileId, Span, SpanAnchor, SpanData};
|
||||||
use stdx::never;
|
use stdx::never;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, AstNode, HasLoopBody},
|
ast::{self, AstNode, HasLoopBody},
|
||||||
|
@ -48,7 +48,11 @@ const FIXUP_DUMMY_AST_ID: ErasedFileAstId = ErasedFileAstId::from_raw(RawIdx::fr
|
||||||
const FIXUP_DUMMY_RANGE: TextRange = TextRange::empty(TextSize::new(0));
|
const FIXUP_DUMMY_RANGE: TextRange = TextRange::empty(TextSize::new(0));
|
||||||
const FIXUP_DUMMY_RANGE_END: TextSize = TextSize::new(!0);
|
const FIXUP_DUMMY_RANGE_END: TextSize = TextSize::new(!0);
|
||||||
|
|
||||||
pub(crate) fn fixup_syntax(span_map: SpanMapRef<'_>, node: &SyntaxNode) -> SyntaxFixups {
|
pub(crate) fn fixup_syntax(
|
||||||
|
span_map: SpanMapRef<'_>,
|
||||||
|
node: &SyntaxNode,
|
||||||
|
call_site: Span,
|
||||||
|
) -> SyntaxFixups {
|
||||||
let mut append = FxHashMap::<SyntaxElement, _>::default();
|
let mut append = FxHashMap::<SyntaxElement, _>::default();
|
||||||
let mut remove = FxHashSet::<SyntaxNode>::default();
|
let mut remove = FxHashSet::<SyntaxNode>::default();
|
||||||
let mut preorder = node.preorder();
|
let mut preorder = node.preorder();
|
||||||
|
@ -69,7 +73,7 @@ pub(crate) fn fixup_syntax(span_map: SpanMapRef<'_>, node: &SyntaxNode) -> Synta
|
||||||
if can_handle_error(&node) && has_error_to_handle(&node) {
|
if can_handle_error(&node) && has_error_to_handle(&node) {
|
||||||
remove.insert(node.clone().into());
|
remove.insert(node.clone().into());
|
||||||
// the node contains an error node, we have to completely replace it by something valid
|
// the node contains an error node, we have to completely replace it by something valid
|
||||||
let original_tree = mbe::syntax_node_to_token_tree(&node, span_map);
|
let original_tree = mbe::syntax_node_to_token_tree(&node, span_map, call_site);
|
||||||
let idx = original.len() as u32;
|
let idx = original.len() as u32;
|
||||||
original.push(original_tree);
|
original.push(original_tree);
|
||||||
let replacement = Leaf::Ident(Ident {
|
let replacement = Leaf::Ident(Ident {
|
||||||
|
@ -358,6 +362,7 @@ fn reverse_fixups_(tt: &mut Subtree, undo_info: &[Subtree]) {
|
||||||
mod tests {
|
mod tests {
|
||||||
use base_db::FileId;
|
use base_db::FileId;
|
||||||
use expect_test::{expect, Expect};
|
use expect_test::{expect, Expect};
|
||||||
|
use syntax::TextRange;
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
@ -395,12 +400,17 @@ mod tests {
|
||||||
fn check(ra_fixture: &str, mut expect: Expect) {
|
fn check(ra_fixture: &str, mut expect: Expect) {
|
||||||
let parsed = syntax::SourceFile::parse(ra_fixture);
|
let parsed = syntax::SourceFile::parse(ra_fixture);
|
||||||
let span_map = SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute(FileId::from_raw(0))));
|
let span_map = SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute(FileId::from_raw(0))));
|
||||||
let fixups = super::fixup_syntax(span_map.as_ref(), &parsed.syntax_node());
|
let fixups = super::fixup_syntax(
|
||||||
|
span_map.as_ref(),
|
||||||
|
&parsed.syntax_node(),
|
||||||
|
span_map.span_for_range(TextRange::empty(0.into())),
|
||||||
|
);
|
||||||
let mut tt = mbe::syntax_node_to_token_tree_modified(
|
let mut tt = mbe::syntax_node_to_token_tree_modified(
|
||||||
&parsed.syntax_node(),
|
&parsed.syntax_node(),
|
||||||
span_map.as_ref(),
|
span_map.as_ref(),
|
||||||
fixups.append,
|
fixups.append,
|
||||||
fixups.remove,
|
fixups.remove,
|
||||||
|
span_map.span_for_range(TextRange::empty(0.into())),
|
||||||
);
|
);
|
||||||
|
|
||||||
let actual = format!("{tt}\n");
|
let actual = format!("{tt}\n");
|
||||||
|
@ -420,8 +430,11 @@ mod tests {
|
||||||
|
|
||||||
// the fixed-up + reversed version should be equivalent to the original input
|
// the fixed-up + reversed version should be equivalent to the original input
|
||||||
// modulo token IDs and `Punct`s' spacing.
|
// modulo token IDs and `Punct`s' spacing.
|
||||||
let original_as_tt =
|
let original_as_tt = mbe::syntax_node_to_token_tree(
|
||||||
mbe::syntax_node_to_token_tree(&parsed.syntax_node(), span_map.as_ref());
|
&parsed.syntax_node(),
|
||||||
|
span_map.as_ref(),
|
||||||
|
span_map.span_for_range(TextRange::empty(0.into())),
|
||||||
|
);
|
||||||
assert!(
|
assert!(
|
||||||
check_subtree_eq(&tt, &original_as_tt),
|
check_subtree_eq(&tt, &original_as_tt),
|
||||||
"different token tree:\n{tt:?}\n\n{original_as_tt:?}"
|
"different token tree:\n{tt:?}\n\n{original_as_tt:?}"
|
||||||
|
|
|
@ -719,7 +719,7 @@ impl ExpansionInfo {
|
||||||
let (macro_arg, _) = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| {
|
let (macro_arg, _) = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| {
|
||||||
(
|
(
|
||||||
Arc::new(tt::Subtree {
|
Arc::new(tt::Subtree {
|
||||||
delimiter: tt::Delimiter::DUMMY_INVISIBLE,
|
delimiter: tt::Delimiter::invisible_spanned(loc.call_site),
|
||||||
token_trees: Vec::new(),
|
token_trees: Vec::new(),
|
||||||
}),
|
}),
|
||||||
SyntaxFixupUndoInfo::NONE,
|
SyntaxFixupUndoInfo::NONE,
|
||||||
|
|
|
@ -67,8 +67,11 @@ fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree<DummyTestSpanData>
|
||||||
.filter_map(ast::MacroRules::cast)
|
.filter_map(ast::MacroRules::cast)
|
||||||
.map(|rule| {
|
.map(|rule| {
|
||||||
let id = rule.name().unwrap().to_string();
|
let id = rule.name().unwrap().to_string();
|
||||||
let def_tt =
|
let def_tt = syntax_node_to_token_tree(
|
||||||
syntax_node_to_token_tree(rule.token_tree().unwrap().syntax(), DummyTestSpanMap);
|
rule.token_tree().unwrap().syntax(),
|
||||||
|
DummyTestSpanMap,
|
||||||
|
DUMMY,
|
||||||
|
);
|
||||||
(id, def_tt)
|
(id, def_tt)
|
||||||
})
|
})
|
||||||
.collect()
|
.collect()
|
||||||
|
|
|
@ -56,7 +56,10 @@ pub(crate) fn expand_rules<S: Span>(
|
||||||
ExpandResult { value, err: match_.err.or(transcribe_err) }
|
ExpandResult { value, err: match_.err.or(transcribe_err) }
|
||||||
} else {
|
} else {
|
||||||
ExpandResult::new(
|
ExpandResult::new(
|
||||||
tt::Subtree { delimiter: tt::Delimiter::DUMMY_INVISIBLE, token_trees: vec![] },
|
tt::Subtree {
|
||||||
|
delimiter: tt::Delimiter::invisible_spanned(call_site),
|
||||||
|
token_trees: vec![],
|
||||||
|
},
|
||||||
ExpandError::NoMatchingRule,
|
ExpandError::NoMatchingRule,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -128,6 +131,7 @@ enum Binding<S> {
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
enum Fragment<S> {
|
enum Fragment<S> {
|
||||||
|
Empty,
|
||||||
/// token fragments are just copy-pasted into the output
|
/// token fragments are just copy-pasted into the output
|
||||||
Tokens(tt::TokenTree<S>),
|
Tokens(tt::TokenTree<S>),
|
||||||
/// Expr ast fragments are surrounded with `()` on insertion to preserve
|
/// Expr ast fragments are surrounded with `()` on insertion to preserve
|
||||||
|
|
|
@ -63,7 +63,7 @@ use std::rc::Rc;
|
||||||
|
|
||||||
use smallvec::{smallvec, SmallVec};
|
use smallvec::{smallvec, SmallVec};
|
||||||
use syntax::SmolStr;
|
use syntax::SmolStr;
|
||||||
use tt::Span;
|
use tt::{DelimSpan, Span};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
expander::{Binding, Bindings, ExpandResult, Fragment},
|
expander::{Binding, Bindings, ExpandResult, Fragment},
|
||||||
|
@ -74,11 +74,7 @@ use crate::{
|
||||||
|
|
||||||
impl<S: Span> Bindings<S> {
|
impl<S: Span> Bindings<S> {
|
||||||
fn push_optional(&mut self, name: &SmolStr) {
|
fn push_optional(&mut self, name: &SmolStr) {
|
||||||
// FIXME: Do we have a better way to represent an empty token ?
|
self.inner.insert(name.clone(), Binding::Fragment(Fragment::Empty));
|
||||||
// Insert an empty subtree for empty token
|
|
||||||
let tt =
|
|
||||||
tt::Subtree { delimiter: tt::Delimiter::DUMMY_INVISIBLE, token_trees: vec![] }.into();
|
|
||||||
self.inner.insert(name.clone(), Binding::Fragment(Fragment::Tokens(tt)));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn push_empty(&mut self, name: &SmolStr) {
|
fn push_empty(&mut self, name: &SmolStr) {
|
||||||
|
@ -387,6 +383,7 @@ fn match_loop_inner<'t, S: Span>(
|
||||||
eof_items: &mut SmallVec<[MatchState<'t, S>; 1]>,
|
eof_items: &mut SmallVec<[MatchState<'t, S>; 1]>,
|
||||||
error_items: &mut SmallVec<[MatchState<'t, S>; 1]>,
|
error_items: &mut SmallVec<[MatchState<'t, S>; 1]>,
|
||||||
is_2021: bool,
|
is_2021: bool,
|
||||||
|
delim_span: tt::DelimSpan<S>,
|
||||||
) {
|
) {
|
||||||
macro_rules! try_push {
|
macro_rules! try_push {
|
||||||
($items: expr, $it:expr) => {
|
($items: expr, $it:expr) => {
|
||||||
|
@ -474,7 +471,7 @@ fn match_loop_inner<'t, S: Span>(
|
||||||
cur_items.push(new_item);
|
cur_items.push(new_item);
|
||||||
}
|
}
|
||||||
cur_items.push(MatchState {
|
cur_items.push(MatchState {
|
||||||
dot: tokens.iter_delimited(None),
|
dot: tokens.iter_delimited(delim_span),
|
||||||
stack: Default::default(),
|
stack: Default::default(),
|
||||||
up: Some(Box::new(item)),
|
up: Some(Box::new(item)),
|
||||||
sep: separator.clone(),
|
sep: separator.clone(),
|
||||||
|
@ -489,7 +486,7 @@ fn match_loop_inner<'t, S: Span>(
|
||||||
if let Ok(subtree) = src.clone().expect_subtree() {
|
if let Ok(subtree) = src.clone().expect_subtree() {
|
||||||
if subtree.delimiter.kind == delimiter.kind {
|
if subtree.delimiter.kind == delimiter.kind {
|
||||||
item.stack.push(item.dot);
|
item.stack.push(item.dot);
|
||||||
item.dot = tokens.iter_delimited(Some(*delimiter));
|
item.dot = tokens.iter_delimited_with(*delimiter);
|
||||||
cur_items.push(item);
|
cur_items.push(item);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -497,7 +494,7 @@ fn match_loop_inner<'t, S: Span>(
|
||||||
OpDelimited::Op(Op::Var { kind, name, .. }) => {
|
OpDelimited::Op(Op::Var { kind, name, .. }) => {
|
||||||
if let &Some(kind) = kind {
|
if let &Some(kind) = kind {
|
||||||
let mut fork = src.clone();
|
let mut fork = src.clone();
|
||||||
let match_res = match_meta_var(kind, &mut fork, is_2021);
|
let match_res = match_meta_var(kind, &mut fork, is_2021, delim_span);
|
||||||
match match_res.err {
|
match match_res.err {
|
||||||
None => {
|
None => {
|
||||||
// Some meta variables are optional (e.g. vis)
|
// Some meta variables are optional (e.g. vis)
|
||||||
|
@ -611,6 +608,7 @@ fn match_loop_inner<'t, S: Span>(
|
||||||
}
|
}
|
||||||
|
|
||||||
fn match_loop<S: Span>(pattern: &MetaTemplate<S>, src: &tt::Subtree<S>, is_2021: bool) -> Match<S> {
|
fn match_loop<S: Span>(pattern: &MetaTemplate<S>, src: &tt::Subtree<S>, is_2021: bool) -> Match<S> {
|
||||||
|
let span = src.delimiter.delim_span();
|
||||||
let mut src = TtIter::new(src);
|
let mut src = TtIter::new(src);
|
||||||
let mut stack: SmallVec<[TtIter<'_, S>; 1]> = SmallVec::new();
|
let mut stack: SmallVec<[TtIter<'_, S>; 1]> = SmallVec::new();
|
||||||
let mut res = Match::default();
|
let mut res = Match::default();
|
||||||
|
@ -619,7 +617,7 @@ fn match_loop<S: Span>(pattern: &MetaTemplate<S>, src: &tt::Subtree<S>, is_2021:
|
||||||
let mut bindings_builder = BindingsBuilder::default();
|
let mut bindings_builder = BindingsBuilder::default();
|
||||||
|
|
||||||
let mut cur_items = smallvec![MatchState {
|
let mut cur_items = smallvec![MatchState {
|
||||||
dot: pattern.iter_delimited(None),
|
dot: pattern.iter_delimited(span),
|
||||||
stack: Default::default(),
|
stack: Default::default(),
|
||||||
up: None,
|
up: None,
|
||||||
sep: None,
|
sep: None,
|
||||||
|
@ -650,6 +648,7 @@ fn match_loop<S: Span>(pattern: &MetaTemplate<S>, src: &tt::Subtree<S>, is_2021:
|
||||||
&mut eof_items,
|
&mut eof_items,
|
||||||
&mut error_items,
|
&mut error_items,
|
||||||
is_2021,
|
is_2021,
|
||||||
|
span,
|
||||||
);
|
);
|
||||||
stdx::always!(cur_items.is_empty());
|
stdx::always!(cur_items.is_empty());
|
||||||
|
|
||||||
|
@ -763,12 +762,13 @@ fn match_meta_var<S: Span>(
|
||||||
kind: MetaVarKind,
|
kind: MetaVarKind,
|
||||||
input: &mut TtIter<'_, S>,
|
input: &mut TtIter<'_, S>,
|
||||||
is_2021: bool,
|
is_2021: bool,
|
||||||
|
delim_span: DelimSpan<S>,
|
||||||
) -> ExpandResult<Option<Fragment<S>>> {
|
) -> ExpandResult<Option<Fragment<S>>> {
|
||||||
let fragment = match kind {
|
let fragment = match kind {
|
||||||
MetaVarKind::Path => {
|
MetaVarKind::Path => {
|
||||||
return input
|
return input.expect_fragment(parser::PrefixEntryPoint::Path).map(|it| {
|
||||||
.expect_fragment(parser::PrefixEntryPoint::Path)
|
it.map(|it| tt::TokenTree::subtree_or_wrap(it, delim_span)).map(Fragment::Path)
|
||||||
.map(|it| it.map(tt::TokenTree::subtree_or_wrap).map(Fragment::Path));
|
});
|
||||||
}
|
}
|
||||||
MetaVarKind::Ty => parser::PrefixEntryPoint::Ty,
|
MetaVarKind::Ty => parser::PrefixEntryPoint::Ty,
|
||||||
MetaVarKind::Pat if is_2021 => parser::PrefixEntryPoint::PatTop,
|
MetaVarKind::Pat if is_2021 => parser::PrefixEntryPoint::PatTop,
|
||||||
|
@ -860,11 +860,14 @@ fn collect_vars<S: Span>(collector_fun: &mut impl FnMut(SmolStr), pattern: &Meta
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl<S: Span> MetaTemplate<S> {
|
impl<S: Span> MetaTemplate<S> {
|
||||||
fn iter_delimited(&self, delimited: Option<tt::Delimiter<S>>) -> OpDelimitedIter<'_, S> {
|
fn iter_delimited_with(&self, delimiter: tt::Delimiter<S>) -> OpDelimitedIter<'_, S> {
|
||||||
|
OpDelimitedIter { inner: &self.0, idx: 0, delimited: delimiter }
|
||||||
|
}
|
||||||
|
fn iter_delimited(&self, span: tt::DelimSpan<S>) -> OpDelimitedIter<'_, S> {
|
||||||
OpDelimitedIter {
|
OpDelimitedIter {
|
||||||
inner: &self.0,
|
inner: &self.0,
|
||||||
idx: 0,
|
idx: 0,
|
||||||
delimited: delimited.unwrap_or(tt::Delimiter::DUMMY_INVISIBLE),
|
delimited: tt::Delimiter::invisible_delim_spanned(span),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -59,12 +59,12 @@ impl<S: Span> Bindings<S> {
|
||||||
token_trees: token_trees.clone(),
|
token_trees: token_trees.clone(),
|
||||||
};
|
};
|
||||||
Ok(match f {
|
Ok(match f {
|
||||||
Fragment::Tokens(_) => unreachable!(),
|
Fragment::Tokens(_) | Fragment::Empty => unreachable!(),
|
||||||
Fragment::Expr(_) => Fragment::Expr,
|
Fragment::Expr(_) => Fragment::Expr,
|
||||||
Fragment::Path(_) => Fragment::Path,
|
Fragment::Path(_) => Fragment::Path,
|
||||||
}(subtree))
|
}(subtree))
|
||||||
}
|
}
|
||||||
Binding::Fragment(it @ Fragment::Tokens(_)) => Ok(it.clone()),
|
Binding::Fragment(it @ (Fragment::Tokens(_) | Fragment::Empty)) => Ok(it.clone()),
|
||||||
// emit some reasonable default expansion for missing bindings,
|
// emit some reasonable default expansion for missing bindings,
|
||||||
// this gives better recovery than emitting the `$fragment-name` verbatim
|
// this gives better recovery than emitting the `$fragment-name` verbatim
|
||||||
Binding::Missing(it) => Ok({
|
Binding::Missing(it) => Ok({
|
||||||
|
@ -87,10 +87,7 @@ impl<S: Span> Bindings<S> {
|
||||||
})),
|
})),
|
||||||
// FIXME: Meta and Item should get proper defaults
|
// FIXME: Meta and Item should get proper defaults
|
||||||
MetaVarKind::Meta | MetaVarKind::Item | MetaVarKind::Tt | MetaVarKind::Vis => {
|
MetaVarKind::Meta | MetaVarKind::Item | MetaVarKind::Tt | MetaVarKind::Vis => {
|
||||||
Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree {
|
Fragment::Empty
|
||||||
delimiter: tt::Delimiter::DUMMY_INVISIBLE,
|
|
||||||
token_trees: vec![],
|
|
||||||
}))
|
|
||||||
}
|
}
|
||||||
MetaVarKind::Path
|
MetaVarKind::Path
|
||||||
| MetaVarKind::Ty
|
| MetaVarKind::Ty
|
||||||
|
@ -351,7 +348,7 @@ fn expand_var<S: Span>(
|
||||||
// ```
|
// ```
|
||||||
// We just treat it a normal tokens
|
// We just treat it a normal tokens
|
||||||
let tt = tt::Subtree {
|
let tt = tt::Subtree {
|
||||||
delimiter: tt::Delimiter::DUMMY_INVISIBLE,
|
delimiter: tt::Delimiter::invisible_spanned(id),
|
||||||
token_trees: vec![
|
token_trees: vec![
|
||||||
tt::Leaf::from(tt::Punct { char: '$', spacing: tt::Spacing::Alone, span: id })
|
tt::Leaf::from(tt::Punct { char: '$', spacing: tt::Spacing::Alone, span: id })
|
||||||
.into(),
|
.into(),
|
||||||
|
@ -422,7 +419,7 @@ fn expand_repeat<S: Span>(
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
t.delimiter = tt::Delimiter::DUMMY_INVISIBLE;
|
t.delimiter.kind = tt::DelimiterKind::Invisible;
|
||||||
push_subtree(&mut buf, t);
|
push_subtree(&mut buf, t);
|
||||||
|
|
||||||
if let Some(sep) = separator {
|
if let Some(sep) = separator {
|
||||||
|
@ -456,7 +453,11 @@ fn expand_repeat<S: Span>(
|
||||||
|
|
||||||
// Check if it is a single token subtree without any delimiter
|
// Check if it is a single token subtree without any delimiter
|
||||||
// e.g {Delimiter:None> ['>'] /Delimiter:None>}
|
// e.g {Delimiter:None> ['>'] /Delimiter:None>}
|
||||||
let tt = tt::Subtree { delimiter: tt::Delimiter::DUMMY_INVISIBLE, token_trees: buf }.into();
|
let tt = tt::Subtree {
|
||||||
|
delimiter: tt::Delimiter::invisible_spanned(ctx.call_site),
|
||||||
|
token_trees: buf,
|
||||||
|
}
|
||||||
|
.into();
|
||||||
|
|
||||||
if RepeatKind::OneOrMore == kind && counter == 0 {
|
if RepeatKind::OneOrMore == kind && counter == 0 {
|
||||||
return ExpandResult {
|
return ExpandResult {
|
||||||
|
@ -479,6 +480,7 @@ fn push_fragment<S: Span>(
|
||||||
}
|
}
|
||||||
Fragment::Path(tt) => fix_up_and_push_path_tt(ctx, buf, tt),
|
Fragment::Path(tt) => fix_up_and_push_path_tt(ctx, buf, tt),
|
||||||
Fragment::Tokens(tt) => buf.push(tt),
|
Fragment::Tokens(tt) => buf.push(tt),
|
||||||
|
Fragment::Empty => (),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -78,13 +78,14 @@ pub(crate) mod dummy_test_span_utils {
|
||||||
pub fn syntax_node_to_token_tree<Ctx, SpanMap>(
|
pub fn syntax_node_to_token_tree<Ctx, SpanMap>(
|
||||||
node: &SyntaxNode,
|
node: &SyntaxNode,
|
||||||
map: SpanMap,
|
map: SpanMap,
|
||||||
|
span: SpanData<Ctx>,
|
||||||
) -> tt::Subtree<SpanData<Ctx>>
|
) -> tt::Subtree<SpanData<Ctx>>
|
||||||
where
|
where
|
||||||
SpanData<Ctx>: Span,
|
SpanData<Ctx>: Span,
|
||||||
Ctx: SyntaxContext,
|
Ctx: SyntaxContext,
|
||||||
SpanMap: SpanMapper<SpanData<Ctx>>,
|
SpanMap: SpanMapper<SpanData<Ctx>>,
|
||||||
{
|
{
|
||||||
let mut c = Converter::new(node, map, Default::default(), Default::default());
|
let mut c = Converter::new(node, map, Default::default(), Default::default(), span);
|
||||||
convert_tokens(&mut c)
|
convert_tokens(&mut c)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -96,13 +97,14 @@ pub fn syntax_node_to_token_tree_modified<Ctx, SpanMap>(
|
||||||
map: SpanMap,
|
map: SpanMap,
|
||||||
append: FxHashMap<SyntaxElement, Vec<tt::Leaf<SpanData<Ctx>>>>,
|
append: FxHashMap<SyntaxElement, Vec<tt::Leaf<SpanData<Ctx>>>>,
|
||||||
remove: FxHashSet<SyntaxNode>,
|
remove: FxHashSet<SyntaxNode>,
|
||||||
|
call_site: SpanData<Ctx>,
|
||||||
) -> tt::Subtree<SpanData<Ctx>>
|
) -> tt::Subtree<SpanData<Ctx>>
|
||||||
where
|
where
|
||||||
SpanMap: SpanMapper<SpanData<Ctx>>,
|
SpanMap: SpanMapper<SpanData<Ctx>>,
|
||||||
SpanData<Ctx>: Span,
|
SpanData<Ctx>: Span,
|
||||||
Ctx: SyntaxContext,
|
Ctx: SyntaxContext,
|
||||||
{
|
{
|
||||||
let mut c = Converter::new(node, map, append, remove);
|
let mut c = Converter::new(node, map, append, remove, call_site);
|
||||||
convert_tokens(&mut c)
|
convert_tokens(&mut c)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -187,7 +189,11 @@ where
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Split token tree with separate expr: $($e:expr)SEP*
|
/// Split token tree with separate expr: $($e:expr)SEP*
|
||||||
pub fn parse_exprs_with_sep<S: Span>(tt: &tt::Subtree<S>, sep: char) -> Vec<tt::Subtree<S>> {
|
pub fn parse_exprs_with_sep<S: Span>(
|
||||||
|
tt: &tt::Subtree<S>,
|
||||||
|
sep: char,
|
||||||
|
span: S,
|
||||||
|
) -> Vec<tt::Subtree<S>> {
|
||||||
if tt.token_trees.is_empty() {
|
if tt.token_trees.is_empty() {
|
||||||
return Vec::new();
|
return Vec::new();
|
||||||
}
|
}
|
||||||
|
@ -200,7 +206,7 @@ pub fn parse_exprs_with_sep<S: Span>(tt: &tt::Subtree<S>, sep: char) -> Vec<tt::
|
||||||
|
|
||||||
res.push(match expanded.value {
|
res.push(match expanded.value {
|
||||||
None => break,
|
None => break,
|
||||||
Some(tt) => tt.subtree_or_wrap(),
|
Some(tt) => tt.subtree_or_wrap(tt::DelimSpan { open: span, close: span }),
|
||||||
});
|
});
|
||||||
|
|
||||||
let mut fork = iter.clone();
|
let mut fork = iter.clone();
|
||||||
|
@ -212,7 +218,7 @@ pub fn parse_exprs_with_sep<S: Span>(tt: &tt::Subtree<S>, sep: char) -> Vec<tt::
|
||||||
|
|
||||||
if iter.peek_n(0).is_some() {
|
if iter.peek_n(0).is_some() {
|
||||||
res.push(tt::Subtree {
|
res.push(tt::Subtree {
|
||||||
delimiter: tt::Delimiter::DUMMY_INVISIBLE,
|
delimiter: tt::Delimiter::invisible_spanned(span),
|
||||||
token_trees: iter.cloned().collect(),
|
token_trees: iter.cloned().collect(),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -225,7 +231,10 @@ where
|
||||||
C: TokenConverter<S>,
|
C: TokenConverter<S>,
|
||||||
S: Span,
|
S: Span,
|
||||||
{
|
{
|
||||||
let entry = tt::Subtree { delimiter: tt::Delimiter::DUMMY_INVISIBLE, token_trees: vec![] };
|
let entry = tt::Subtree {
|
||||||
|
delimiter: tt::Delimiter::invisible_spanned(conv.call_site()),
|
||||||
|
token_trees: vec![],
|
||||||
|
};
|
||||||
let mut stack = NonEmptyVec::new(entry);
|
let mut stack = NonEmptyVec::new(entry);
|
||||||
|
|
||||||
while let Some((token, abs_range)) = conv.bump() {
|
while let Some((token, abs_range)) = conv.bump() {
|
||||||
|
@ -490,6 +499,8 @@ trait TokenConverter<S>: Sized {
|
||||||
fn peek(&self) -> Option<Self::Token>;
|
fn peek(&self) -> Option<Self::Token>;
|
||||||
|
|
||||||
fn span_for(&self, range: TextRange) -> S;
|
fn span_for(&self, range: TextRange) -> S;
|
||||||
|
|
||||||
|
fn call_site(&self) -> S;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<S, Ctx> SrcToken<RawConverter<'_, Ctx>, S> for usize {
|
impl<S, Ctx> SrcToken<RawConverter<'_, Ctx>, S> for usize {
|
||||||
|
@ -557,6 +568,10 @@ where
|
||||||
fn span_for(&self, range: TextRange) -> SpanData<Ctx> {
|
fn span_for(&self, range: TextRange) -> SpanData<Ctx> {
|
||||||
SpanData { range, anchor: self.anchor, ctx: self.ctx }
|
SpanData { range, anchor: self.anchor, ctx: self.ctx }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn call_site(&self) -> SpanData<Ctx> {
|
||||||
|
SpanData { range: TextRange::empty(0.into()), anchor: self.anchor, ctx: self.ctx }
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<S> TokenConverter<S> for StaticRawConverter<'_, S>
|
impl<S> TokenConverter<S> for StaticRawConverter<'_, S>
|
||||||
|
@ -592,6 +607,10 @@ where
|
||||||
fn span_for(&self, _: TextRange) -> S {
|
fn span_for(&self, _: TextRange) -> S {
|
||||||
self.span
|
self.span
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn call_site(&self) -> S {
|
||||||
|
self.span
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
struct Converter<SpanMap, S> {
|
struct Converter<SpanMap, S> {
|
||||||
|
@ -604,6 +623,7 @@ struct Converter<SpanMap, S> {
|
||||||
map: SpanMap,
|
map: SpanMap,
|
||||||
append: FxHashMap<SyntaxElement, Vec<tt::Leaf<S>>>,
|
append: FxHashMap<SyntaxElement, Vec<tt::Leaf<S>>>,
|
||||||
remove: FxHashSet<SyntaxNode>,
|
remove: FxHashSet<SyntaxNode>,
|
||||||
|
call_site: S,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<SpanMap, S> Converter<SpanMap, S> {
|
impl<SpanMap, S> Converter<SpanMap, S> {
|
||||||
|
@ -612,6 +632,7 @@ impl<SpanMap, S> Converter<SpanMap, S> {
|
||||||
map: SpanMap,
|
map: SpanMap,
|
||||||
append: FxHashMap<SyntaxElement, Vec<tt::Leaf<S>>>,
|
append: FxHashMap<SyntaxElement, Vec<tt::Leaf<S>>>,
|
||||||
remove: FxHashSet<SyntaxNode>,
|
remove: FxHashSet<SyntaxNode>,
|
||||||
|
call_site: S,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
let mut this = Converter {
|
let mut this = Converter {
|
||||||
current: None,
|
current: None,
|
||||||
|
@ -621,6 +642,7 @@ impl<SpanMap, S> Converter<SpanMap, S> {
|
||||||
map,
|
map,
|
||||||
append,
|
append,
|
||||||
remove,
|
remove,
|
||||||
|
call_site,
|
||||||
current_leafs: vec![],
|
current_leafs: vec![],
|
||||||
};
|
};
|
||||||
let first = this.next_token();
|
let first = this.next_token();
|
||||||
|
@ -780,6 +802,9 @@ where
|
||||||
fn span_for(&self, range: TextRange) -> S {
|
fn span_for(&self, range: TextRange) -> S {
|
||||||
self.map.span_for(range)
|
self.map.span_for(range)
|
||||||
}
|
}
|
||||||
|
fn call_site(&self) -> S {
|
||||||
|
self.call_site
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
struct TtTreeSink<'a, Ctx>
|
struct TtTreeSink<'a, Ctx>
|
||||||
|
|
|
@ -7,11 +7,11 @@ use tt::{
|
||||||
Leaf, Punct, Spacing,
|
Leaf, Punct, Spacing,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{syntax_node_to_token_tree, DummyTestSpanData, DummyTestSpanMap};
|
use crate::{syntax_node_to_token_tree, DummyTestSpanData, DummyTestSpanMap, DUMMY};
|
||||||
|
|
||||||
fn check_punct_spacing(fixture: &str) {
|
fn check_punct_spacing(fixture: &str) {
|
||||||
let source_file = ast::SourceFile::parse(fixture).ok().unwrap();
|
let source_file = ast::SourceFile::parse(fixture).ok().unwrap();
|
||||||
let subtree = syntax_node_to_token_tree(source_file.syntax(), DummyTestSpanMap);
|
let subtree = syntax_node_to_token_tree(source_file.syntax(), DummyTestSpanMap, DUMMY);
|
||||||
let mut annotations: HashMap<_, _> = extract_annotations(fixture)
|
let mut annotations: HashMap<_, _> = extract_annotations(fixture)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|(range, annotation)| {
|
.map(|(range, annotation)| {
|
||||||
|
|
|
@ -176,10 +176,10 @@ impl<'a, S: Span> TtIter<'a, S> {
|
||||||
}
|
}
|
||||||
|
|
||||||
self.inner = self.inner.as_slice()[res.len()..].iter();
|
self.inner = self.inner.as_slice()[res.len()..].iter();
|
||||||
let res = match res.len() {
|
let res = match &*res {
|
||||||
0 | 1 => res.pop(),
|
[] | [_] => res.pop(),
|
||||||
_ => Some(tt::TokenTree::Subtree(tt::Subtree {
|
[first, ..] => Some(tt::TokenTree::Subtree(tt::Subtree {
|
||||||
delimiter: tt::Delimiter::DUMMY_INVISIBLE,
|
delimiter: tt::Delimiter::invisible_spanned(first.first_span()),
|
||||||
token_trees: res,
|
token_trees: res,
|
||||||
})),
|
})),
|
||||||
};
|
};
|
||||||
|
|
|
@ -209,7 +209,7 @@ mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
use cfg::CfgExpr;
|
use cfg::CfgExpr;
|
||||||
use mbe::{syntax_node_to_token_tree, DummyTestSpanMap};
|
use mbe::{syntax_node_to_token_tree, DummyTestSpanMap, DUMMY};
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, AstNode},
|
ast::{self, AstNode},
|
||||||
SmolStr,
|
SmolStr,
|
||||||
|
@ -219,7 +219,7 @@ mod tests {
|
||||||
let cfg_expr = {
|
let cfg_expr = {
|
||||||
let source_file = ast::SourceFile::parse(cfg).ok().unwrap();
|
let source_file = ast::SourceFile::parse(cfg).ok().unwrap();
|
||||||
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
|
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
|
||||||
let tt = syntax_node_to_token_tree(tt.syntax(), &DummyTestSpanMap);
|
let tt = syntax_node_to_token_tree(tt.syntax(), &DummyTestSpanMap, DUMMY);
|
||||||
CfgExpr::parse(&tt)
|
CfgExpr::parse(&tt)
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -54,11 +54,12 @@ impl<S: Span> TokenTree<S> {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn subtree_or_wrap(self) -> Subtree<S> {
|
pub fn subtree_or_wrap(self, span: DelimSpan<S>) -> Subtree<S> {
|
||||||
match self {
|
match self {
|
||||||
TokenTree::Leaf(_) => {
|
TokenTree::Leaf(_) => Subtree {
|
||||||
Subtree { delimiter: Delimiter::DUMMY_INVISIBLE, token_trees: vec![self] }
|
delimiter: Delimiter::invisible_delim_spanned(span),
|
||||||
}
|
token_trees: vec![self],
|
||||||
|
},
|
||||||
TokenTree::Subtree(s) => s,
|
TokenTree::Subtree(s) => s,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -120,12 +121,6 @@ pub struct DelimSpan<S> {
|
||||||
pub close: S,
|
pub close: S,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<S: Span> DelimSpan<S> {
|
|
||||||
// FIXME should not exist
|
|
||||||
#[allow(deprecated)]
|
|
||||||
pub const DUMMY: Self = Self { open: S::DUMMY, close: S::DUMMY };
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
|
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
|
||||||
pub struct Delimiter<S> {
|
pub struct Delimiter<S> {
|
||||||
pub open: S,
|
pub open: S,
|
||||||
|
@ -134,11 +129,6 @@ pub struct Delimiter<S> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<S: Span> Delimiter<S> {
|
impl<S: Span> Delimiter<S> {
|
||||||
// FIXME should not exist
|
|
||||||
#[allow(deprecated)]
|
|
||||||
pub const DUMMY_INVISIBLE: Self =
|
|
||||||
Self { open: S::DUMMY, close: S::DUMMY, kind: DelimiterKind::Invisible };
|
|
||||||
|
|
||||||
pub const fn invisible_spanned(span: S) -> Self {
|
pub const fn invisible_spanned(span: S) -> Self {
|
||||||
Delimiter { open: span, close: span, kind: DelimiterKind::Invisible }
|
Delimiter { open: span, close: span, kind: DelimiterKind::Invisible }
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue