mirror of
https://github.com/rust-lang/rust-analyzer
synced 2025-01-14 14:13:58 +00:00
Re-introduce option for macro_arg to prevent calling macros with empty inputs
This commit is contained in:
parent
a2a3fecae3
commit
4ea5d7f6a0
7 changed files with 56 additions and 28 deletions
|
@ -33,7 +33,7 @@ m!(&k");
|
|||
"#,
|
||||
expect![[r#"
|
||||
macro_rules! m { ($i:literal) => {}; }
|
||||
/* error: Failed to lower macro args to token tree */"#]],
|
||||
/* error: invalid token tree */"#]],
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -98,7 +98,7 @@ macro_rules! m1 { ($x:ident) => { ($x } }
|
|||
macro_rules! m2 { ($x:ident) => {} }
|
||||
|
||||
/* error: invalid macro definition: expected subtree */
|
||||
/* error: Failed to lower macro args to token tree */
|
||||
/* error: invalid token tree */
|
||||
"#]],
|
||||
)
|
||||
}
|
||||
|
|
|
@ -183,13 +183,14 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
|
|||
let range: Range<usize> = range.into();
|
||||
|
||||
if show_token_ids {
|
||||
let (tree, map, _) = &*arg;
|
||||
let tt_range = call.token_tree().unwrap().syntax().text_range();
|
||||
let mut ranges = Vec::new();
|
||||
extract_id_ranges(&mut ranges, map, tree);
|
||||
for (range, id) in ranges {
|
||||
let idx = (tt_range.start() + range.end()).into();
|
||||
text_edits.push((idx..idx, format!("#{}", id.0)));
|
||||
if let Some((tree, map, _)) = arg.as_deref() {
|
||||
let tt_range = call.token_tree().unwrap().syntax().text_range();
|
||||
let mut ranges = Vec::new();
|
||||
extract_id_ranges(&mut ranges, map, tree);
|
||||
for (range, id) in ranges {
|
||||
let idx = (tt_range.start() + range.end()).into();
|
||||
text_edits.push((idx..idx, format!("#{}", id.0)));
|
||||
}
|
||||
}
|
||||
text_edits.push((range.start..range.start, "// ".into()));
|
||||
call.to_string().match_indices('\n').for_each(|(offset, _)| {
|
||||
|
|
|
@ -109,7 +109,7 @@ fn typing_inside_a_macro_should_not_invalidate_def_map() {
|
|||
}
|
||||
|
||||
#[test]
|
||||
fn typing_inside_a_function_should_not_invalidate_expansions() {
|
||||
fn typing_inside_a_function_should_not_invalidate_item_expansions() {
|
||||
let (mut db, pos) = TestDB::with_position(
|
||||
r#"
|
||||
//- /lib.rs
|
||||
|
@ -161,7 +161,7 @@ m!(Z);
|
|||
let n_recalculated_item_trees = events.iter().filter(|it| it.contains("item_tree")).count();
|
||||
assert_eq!(n_recalculated_item_trees, 1);
|
||||
let n_reparsed_macros =
|
||||
events.iter().filter(|it| it.contains("parse_macro_expansion")).count();
|
||||
events.iter().filter(|it| it.contains("parse_macro_expansion(")).count();
|
||||
assert_eq!(n_reparsed_macros, 0);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -120,7 +120,7 @@ pub trait ExpandDatabase: SourceDatabase {
|
|||
fn macro_arg(
|
||||
&self,
|
||||
id: MacroCallId,
|
||||
) -> Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>;
|
||||
) -> Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>>;
|
||||
/// Extracts syntax node, corresponding to a macro call. That's a firewall
|
||||
/// query, only typing in the macro call itself changes the returned
|
||||
/// subtree.
|
||||
|
@ -318,17 +318,8 @@ fn parse_macro_expansion(
|
|||
fn macro_arg(
|
||||
db: &dyn ExpandDatabase,
|
||||
id: MacroCallId,
|
||||
) -> Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)> {
|
||||
let Some(arg) = db.macro_arg_text(id) else {
|
||||
return Arc::new((
|
||||
tt::Subtree {
|
||||
delimiter: tt::Delimiter::UNSPECIFIED,
|
||||
token_trees: Vec::new(),
|
||||
},
|
||||
Default::default(),
|
||||
Default::default())
|
||||
);
|
||||
};
|
||||
) -> Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>> {
|
||||
let arg = db.macro_arg_text(id)?;
|
||||
let loc = db.lookup_intern_macro_call(id);
|
||||
|
||||
let node = SyntaxNode::new_root(arg);
|
||||
|
@ -347,7 +338,7 @@ fn macro_arg(
|
|||
// proc macros expect their inputs without parentheses, MBEs expect it with them included
|
||||
tt.delimiter = tt::Delimiter::unspecified();
|
||||
}
|
||||
Arc::new((tt, tmap, fixups.undo_info))
|
||||
Some(Arc::new((tt, tmap, fixups.undo_info)))
|
||||
}
|
||||
|
||||
fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<SyntaxNode> {
|
||||
|
@ -472,7 +463,20 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
|
|||
}
|
||||
}
|
||||
};
|
||||
let macro_arg = db.macro_arg(id);
|
||||
let Some(macro_arg) = db.macro_arg(id) else {
|
||||
return ExpandResult {
|
||||
value: Arc::new(
|
||||
tt::Subtree {
|
||||
delimiter: tt::Delimiter::UNSPECIFIED,
|
||||
token_trees: Vec::new(),
|
||||
},
|
||||
),
|
||||
err: Some(ExpandError::Other(
|
||||
"invalid token tree"
|
||||
.into(),
|
||||
)),
|
||||
};
|
||||
};
|
||||
let ExpandResult { value: mut tt, err } = expander.expand(db, id, ¯o_arg.0);
|
||||
// Set a hard limit for the expanded tt
|
||||
let count = tt.count();
|
||||
|
@ -508,7 +512,18 @@ fn parse_macro_expansion_error(
|
|||
|
||||
fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<tt::Subtree> {
|
||||
let loc: MacroCallLoc = db.lookup_intern_macro_call(id);
|
||||
let macro_arg = db.macro_arg(id);
|
||||
let Some(macro_arg) = db.macro_arg(id) else {
|
||||
return ExpandResult {
|
||||
value: tt::Subtree {
|
||||
delimiter: tt::Delimiter::UNSPECIFIED,
|
||||
token_trees: Vec::new(),
|
||||
},
|
||||
err: Some(ExpandError::Other(
|
||||
"invalid token tree"
|
||||
.into(),
|
||||
)),
|
||||
};
|
||||
};
|
||||
|
||||
let expander = match loc.def.kind {
|
||||
MacroDefKind::ProcMacro(expander, ..) => expander,
|
||||
|
|
|
@ -201,7 +201,13 @@ fn make_hygiene_info(
|
|||
|
||||
let macro_def = db.macro_def(loc.def).ok()?;
|
||||
let (_, exp_map) = db.parse_macro_expansion(macro_file).value;
|
||||
let macro_arg = db.macro_arg(macro_file.macro_call_id);
|
||||
let macro_arg = db.macro_arg(macro_file.macro_call_id).unwrap_or_else(|| {
|
||||
Arc::new((
|
||||
tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() },
|
||||
Default::default(),
|
||||
Default::default(),
|
||||
))
|
||||
});
|
||||
|
||||
Some(HygieneInfo {
|
||||
file: macro_file,
|
||||
|
|
|
@ -258,7 +258,13 @@ impl HirFileId {
|
|||
|
||||
let macro_def = db.macro_def(loc.def).ok()?;
|
||||
let (parse, exp_map) = db.parse_macro_expansion(macro_file).value;
|
||||
let macro_arg = db.macro_arg(macro_file.macro_call_id);
|
||||
let macro_arg = db.macro_arg(macro_file.macro_call_id).unwrap_or_else(|| {
|
||||
Arc::new((
|
||||
tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() },
|
||||
Default::default(),
|
||||
Default::default(),
|
||||
))
|
||||
});
|
||||
|
||||
let def = loc.def.ast_id().left().and_then(|id| {
|
||||
let def_tt = match id.to_node(db) {
|
||||
|
|
Loading…
Reference in a new issue