diff --git a/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs b/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs index 3f17f64f2a..b232651db9 100644 --- a/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs +++ b/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs @@ -235,10 +235,10 @@ macro_rules! format_args { fn main() { /* error: no rule matches input tokens */; - /* error: no rule matches input tokens */; - /* error: no rule matches input tokens */; - /* error: no rule matches input tokens */::core::fmt::Arguments::new_v1(&["", ], &[::core::fmt::ArgumentV1::new(&(), ::core::fmt::Display::fmt), ]); - /* error: no rule matches input tokens */; + /* error: expected expression */; + /* error: expected expression, expected COMMA */; + /* error: expected expression */::core::fmt::Arguments::new_v1(&["", ], &[::core::fmt::ArgumentV1::new(&(), ::core::fmt::Display::fmt), ]); + /* error: expected expression, expected R_PAREN */; ::core::fmt::Arguments::new_v1(&["", ], &[::core::fmt::ArgumentV1::new(&(5), ::core::fmt::Display::fmt), ]); } "##]], @@ -364,7 +364,7 @@ macro_rules! format_args { fn main() { let _ = - /* error: no rule matches input tokens *//* parse error: expected field name or number */ + /* error: expected field name or number *//* parse error: expected field name or number */ ::core::fmt::Arguments::new_v1(&["", " ", ], &[::core::fmt::ArgumentV1::new(&(a.), ::core::fmt::Display::fmt), ::core::fmt::ArgumentV1::new(&(), ::core::fmt::Debug::fmt), ]); } "##]], diff --git a/crates/hir-def/src/macro_expansion_tests/mbe.rs b/crates/hir-def/src/macro_expansion_tests/mbe.rs index 553ffe3d0b..b26f986758 100644 --- a/crates/hir-def/src/macro_expansion_tests/mbe.rs +++ b/crates/hir-def/src/macro_expansion_tests/mbe.rs @@ -98,6 +98,42 @@ fn#19 main#20(#21)#21 {#22 "##]], ); } + +#[test] +fn token_mapping_eager() { + check( + r#" +#[rustc_builtin_macro] +#[macro_export] +macro_rules! format_args {} + +macro_rules! identity { + ($expr:expr) => { $expr }; +} + +fn main(foo: ()) { + format_args/*+tokenids*/!("{} {} {}", format_args!("{}", 0), foo, identity!(10), "bar") +} + +"#, + expect![[r##" +#[rustc_builtin_macro] +#[macro_export] +macro_rules! format_args {} + +macro_rules! identity { + ($expr:expr) => { $expr }; +} + +fn main(foo: ()) { + // format_args/*+tokenids*/!("{} {} {}"#1,#3 format_args!("{}", 0#10),#12 foo#13,#14 identity!(10#18),#21 "bar"#22) +::core#4294967295::fmt#4294967295::Arguments#4294967295::new_v1#4294967295(�[#4294967295""#4294967295,#4294967295 " "#4294967295,#4294967295 " "#4294967295,#4294967295 ]#4294967295,#4294967295 �[::core#4294967295::fmt#4294967295::ArgumentV1#4294967295::new#4294967295(�(::core#4294967295::fmt#4294967295::Arguments#4294967295::new_v1#4294967295(�[#4294967295""#4294967295,#4294967295 ]#4294967295,#4294967295 �[::core#4294967295::fmt#4294967295::ArgumentV1#4294967295::new#4294967295(�(#42949672950#10)#4294967295,#4294967295 ::core#4294967295::fmt#4294967295::Display#4294967295::fmt#4294967295)#4294967295,#4294967295 ]#4294967295)#4294967295)#4294967295,#4294967295 ::core#4294967295::fmt#4294967295::Display#4294967295::fmt#4294967295)#4294967295,#4294967295 ::core#4294967295::fmt#4294967295::ArgumentV1#4294967295::new#4294967295(�(#4294967295foo#13)#4294967295,#4294967295 ::core#4294967295::fmt#4294967295::Display#4294967295::fmt#4294967295)#4294967295,#4294967295 ::core#4294967295::fmt#4294967295::ArgumentV1#4294967295::new#4294967295(�(#429496729510#18)#4294967295,#4294967295 ::core#4294967295::fmt#4294967295::Display#4294967295::fmt#4294967295)#4294967295,#4294967295 ]#4294967295)#4294967295 +} + +"##]], + ); +} + #[test] fn float_field_access_macro_input() { check( diff --git a/crates/hir-def/src/macro_expansion_tests/mod.rs b/crates/hir-def/src/macro_expansion_tests/mod.rs index 1c15c1b7f0..7a87e61c69 100644 --- a/crates/hir-def/src/macro_expansion_tests/mod.rs +++ b/crates/hir-def/src/macro_expansion_tests/mod.rs @@ -187,7 +187,7 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream let range: Range = range.into(); if show_token_ids { - if let Some((tree, map, _)) = arg.as_deref() { + if let Some((tree, map, _)) = arg.value.as_deref() { let tt_range = call.token_tree().unwrap().syntax().text_range(); let mut ranges = Vec::new(); extract_id_ranges(&mut ranges, map, tree); diff --git a/crates/hir-expand/src/builtin_fn_macro.rs b/crates/hir-expand/src/builtin_fn_macro.rs index 7370f07a52..95c6baf42d 100644 --- a/crates/hir-expand/src/builtin_fn_macro.rs +++ b/crates/hir-expand/src/builtin_fn_macro.rs @@ -692,7 +692,7 @@ pub(crate) fn include_arg_to_tt( arg_id: MacroCallId, ) -> Result<(triomphe::Arc<(::tt::Subtree<::tt::TokenId>, TokenMap)>, FileId), ExpandError> { let loc = db.lookup_intern_macro_call(arg_id); - let Some(EagerCallInfo { arg, arg_id: Some(arg_id), .. }) = loc.eager.as_deref() else { + let Some(EagerCallInfo { arg,arg_id, .. }) = loc.eager.as_deref() else { panic!("include_arg_to_tt called on non include macro call: {:?}", &loc.eager); }; let path = parse_string(&arg.0)?; diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs index 39df60562a..309c0930d1 100644 --- a/crates/hir-expand/src/db.rs +++ b/crates/hir-expand/src/db.rs @@ -3,7 +3,7 @@ use base_db::{salsa, CrateId, Edition, SourceDatabase}; use either::Either; use limit::Limit; -use mbe::syntax_node_to_token_tree; +use mbe::{syntax_node_to_token_tree, ValueResult}; use rustc_hash::FxHashSet; use syntax::{ ast::{self, HasAttrs, HasDocComments}, @@ -124,16 +124,21 @@ pub trait ExpandDatabase: SourceDatabase { fn macro_arg( &self, id: MacroCallId, - ) -> Option>; + ) -> ValueResult< + Option>, + Arc>, + >; /// Extracts syntax node, corresponding to a macro call. That's a firewall /// query, only typing in the macro call itself changes the returned /// subtree. - fn macro_arg_text(&self, id: MacroCallId) -> Option; - /// Gets the expander for this macro. This compiles declarative macros, and - /// just fetches procedural ones. - // FIXME: Rename this + fn macro_arg_node( + &self, + id: MacroCallId, + ) -> ValueResult, Arc>>; + /// Fetches the expander for this macro. #[salsa::transparent] - fn macro_def(&self, id: MacroDefId) -> TokenExpander; + fn macro_expander(&self, id: MacroDefId) -> TokenExpander; + /// Fetches (and compiles) the expander of this decl macro. fn decl_macro_expander( &self, def_crate: CrateId, @@ -335,14 +340,20 @@ fn parse_macro_expansion_error( fn macro_arg( db: &dyn ExpandDatabase, id: MacroCallId, -) -> Option> { +) -> ValueResult< + Option>, + Arc>, +> { let loc = db.lookup_intern_macro_call(id); if let Some(EagerCallInfo { arg, arg_id: _, error: _ }) = loc.eager.as_deref() { - return Some(Arc::new((arg.0.clone(), arg.1.clone(), Default::default()))); + return ValueResult::ok(Some(Arc::new((arg.0.clone(), arg.1.clone(), Default::default())))); } - let arg = db.macro_arg_text(id)?; + let ValueResult { value, err } = db.macro_arg_node(id); + let Some(arg) = value else { + return ValueResult { value: None, err }; + }; let node = SyntaxNode::new_root(arg); let censor = censor_for_macro_input(&loc, &node); @@ -360,7 +371,11 @@ fn macro_arg( // proc macros expect their inputs without parentheses, MBEs expect it with them included tt.delimiter = tt::Delimiter::unspecified(); } - Some(Arc::new((tt, tmap, fixups.undo_info))) + let val = Some(Arc::new((tt, tmap, fixups.undo_info))); + match err { + Some(err) => ValueResult::new(val, err), + None => ValueResult::ok(val), + } } /// Certain macro calls expect some nodes in the input to be preprocessed away, namely: @@ -402,9 +417,44 @@ fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet Option { +fn macro_arg_node( + db: &dyn ExpandDatabase, + id: MacroCallId, +) -> ValueResult, Arc>> { + let err = || -> Arc> { + Arc::new(Box::new([SyntaxError::new_at_offset( + "invalid macro call".to_owned(), + syntax::TextSize::from(0), + )])) + }; let loc = db.lookup_intern_macro_call(id); - let arg = loc.kind.arg(db)?.value; + let arg = if let MacroDefKind::BuiltInEager(..) = loc.def.kind { + let res = if let Some(EagerCallInfo { arg, .. }) = loc.eager.as_deref() { + Some(mbe::token_tree_to_syntax_node(&arg.0, mbe::TopEntryPoint::Expr).0) + } else { + loc.kind + .arg(db) + .and_then(|arg| ast::TokenTree::cast(arg.value)) + .map(|tt| tt.reparse_as_expr().to_syntax()) + }; + + match res { + Some(res) if res.errors().is_empty() => res.syntax_node(), + Some(res) => { + return ValueResult::new( + Some(res.syntax_node().green().into()), + // Box::<[_]>::from(res.errors()), not stable yet + Arc::new(res.errors().to_vec().into_boxed_slice()), + ); + } + None => return ValueResult::only_err(err()), + } + } else { + match loc.kind.arg(db) { + Some(res) => res.value, + None => return ValueResult::only_err(err()), + } + }; if matches!(loc.kind, MacroCallKind::FnLike { .. }) { let first = arg.first_child_or_token().map_or(T![.], |it| it.kind()); let last = arg.last_child_or_token().map_or(T![.], |it| it.kind()); @@ -419,20 +469,13 @@ fn macro_arg_text(db: &dyn ExpandDatabase, id: MacroCallId) -> Option // Some day, we'll have explicit recursion counters for all // recursive things, at which point this code might be removed. cov_mark::hit!(issue9358_bad_macro_stack_overflow); - return None; + return ValueResult::only_err(Arc::new(Box::new([SyntaxError::new( + "unbalanced token tree".to_owned(), + arg.text_range(), + )]))); } } - if let Some(EagerCallInfo { arg, .. }) = loc.eager.as_deref() { - Some( - mbe::token_tree_to_syntax_node(&arg.0, mbe::TopEntryPoint::Expr) - .0 - .syntax_node() - .green() - .into(), - ) - } else { - Some(arg.green().into()) - } + ValueResult::ok(Some(arg.green().into())) } fn decl_macro_expander( @@ -474,7 +517,7 @@ fn decl_macro_expander( Arc::new(DeclarativeMacroExpander { mac, def_site_token_map }) } -fn macro_def(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander { +fn macro_expander(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander { match id.kind { MacroDefKind::Declarative(ast_id) => { TokenExpander::DeclarativeMacro(db.decl_macro_expander(id.krate, ast_id)) @@ -491,20 +534,10 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult return db.expand_proc_macro(id), MacroDefKind::BuiltInDerive(expander, ..) => { - let arg = db.macro_arg_text(id).unwrap(); + let arg = db.macro_arg_node(id).value.unwrap(); let node = SyntaxNode::new_root(arg); let censor = censor_for_macro_input(&loc, &node); @@ -520,10 +553,13 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult { - let Some(macro_arg) = db.macro_arg(id) else { + let ValueResult { value, err } = db.macro_arg(id); + let Some(macro_arg) = value else { return ExpandResult { value: Arc::new(tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, @@ -534,18 +570,43 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult { db.decl_macro_expander(loc.def.krate, id).expand(arg.clone()) } MacroDefKind::BuiltIn(it, _) => it.expand(db, id, &arg).map_err(Into::into), + // This might look a bit odd, but we do not expand the inputs to eager macros here. + // Eager macros inputs are expanded, well, eagerly when we collect the macro calls. + // That kind of expansion uses the ast id map of an eager macros input though which goes through + // the HirFileId machinery. As eager macro inputs are assigned a macro file id that query + // will end up going through here again, whereas we want to just want to inspect the raw input. + // As such we just return the input subtree here. + MacroDefKind::BuiltInEager(..) if loc.eager.is_none() => { + let mut arg = arg.clone(); + fixup::reverse_fixups(&mut arg, arg_tm, undo_info); + + return ExpandResult { + value: Arc::new(arg), + err: err.map(|err| { + let mut buf = String::new(); + for err in &**err { + use std::fmt::Write; + _ = write!(buf, "{}, ", err); + } + buf.pop(); + buf.pop(); + ExpandError::other(buf) + }), + }; + } MacroDefKind::BuiltInEager(it, _) => it.expand(db, id, &arg).map_err(Into::into), MacroDefKind::BuiltInAttr(it, _) => it.expand(db, id, &arg), _ => unreachable!(), }; fixup::reverse_fixups(&mut res.value, arg_tm, undo_info); - (res, None) + res } }; @@ -559,24 +620,23 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult ExpandResult> { let loc = db.lookup_intern_macro_call(id); - let Some(macro_arg) = db.macro_arg(id) else { + let Some(macro_arg) = db.macro_arg(id).value else { return ExpandResult { value: Arc::new(tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new(), }), + // FIXME: We should make sure to enforce an invariant that invalid macro + // calls do not reach this call path! err: Some(ExpandError::other("invalid token tree")), }; }; + let (arg_tt, arg_tm, undo_info) = &*macro_arg; let expander = match loc.def.kind { diff --git a/crates/hir-expand/src/eager.rs b/crates/hir-expand/src/eager.rs index e6f1edfb60..876813eab5 100644 --- a/crates/hir-expand/src/eager.rs +++ b/crates/hir-expand/src/eager.rs @@ -19,7 +19,8 @@ //! //! See the full discussion : use base_db::CrateId; -use syntax::{ted, Parse, SyntaxNode}; +use rustc_hash::FxHashMap; +use syntax::{ted, Parse, SyntaxNode, TextRange, TextSize, WalkEvent}; use triomphe::Arc; use crate::{ @@ -38,18 +39,8 @@ pub fn expand_eager_macro_input( def: MacroDefId, resolver: &dyn Fn(ModPath) -> Option, ) -> Result>, UnresolvedMacro> { - assert!(matches!(def.kind, MacroDefKind::BuiltInEager(..))); - let token_tree = macro_call.value.token_tree(); - - let Some(token_tree) = token_tree else { - return Ok(ExpandResult { - value: None, - err: Some(ExpandError::other("invalid token tree")), - }); - }; - let (parsed_args, arg_token_map) = mbe::syntax_node_to_token_tree(token_tree.syntax()); - let ast_map = db.ast_id_map(macro_call.file_id); + // the expansion which the ast id map is built upon has no whitespace, so the offsets are wrong as macro_call is from the token tree that has whitespace! let call_id = InFile::new(macro_call.file_id, ast_map.ast_id(¯o_call.value)); let expand_to = ExpandTo::from_call_site(¯o_call.value); @@ -60,52 +51,69 @@ pub fn expand_eager_macro_input( let arg_id = db.intern_macro_call(MacroCallLoc { def, krate, - eager: Some(Box::new(EagerCallInfo { - arg: Arc::new((parsed_args, arg_token_map)), - arg_id: None, - error: None, - })), + eager: None, kind: MacroCallKind::FnLike { ast_id: call_id, expand_to: ExpandTo::Expr }, }); + let ExpandResult { value: (arg_exp, arg_exp_map), err: parse_err } = + db.parse_macro_expansion(arg_id.as_macro_file()); + // we need this map here as the expansion of the eager input fake file loses whitespace ... + let mut ws_mapping = FxHashMap::default(); + if let Some((_, tm, _)) = db.macro_arg(arg_id).value.as_deref() { + ws_mapping.extend(tm.entries().filter_map(|(id, range)| { + Some((arg_exp_map.first_range_by_token(id, syntax::SyntaxKind::TOMBSTONE)?, range)) + })); + } let ExpandResult { value: expanded_eager_input, err } = { - let arg_as_expr = match db.macro_arg_text(arg_id) { - Some(it) => it, - None => { - return Ok(ExpandResult { - value: None, - err: Some(ExpandError::other("invalid token tree")), - }) - } - }; - eager_macro_recur( db, &Hygiene::new(db, macro_call.file_id), - InFile::new(arg_id.as_file(), SyntaxNode::new_root(arg_as_expr)), + InFile::new(arg_id.as_file(), arg_exp.syntax_node()), krate, resolver, )? }; + let err = parse_err.or(err); - let Some(expanded_eager_input) = expanded_eager_input else { + let Some((expanded_eager_input, mapping)) = expanded_eager_input else { return Ok(ExpandResult { value: None, err }); }; - // FIXME: This token map is pointless, it points into the expanded eager syntax node, but that - // node doesn't exist outside this function so we can't use this tokenmap. - // Ideally we'd need to patch the tokenmap of the pre-expanded input and then put that here - // or even better, forego expanding into a SyntaxNode altogether and instead construct a subtree - // in place! But that is kind of difficult. - let (mut subtree, _token_map) = mbe::syntax_node_to_token_tree(&expanded_eager_input); - let token_map = Default::default(); + + let og_tmap = mbe::syntax_node_to_token_map( + macro_call.value.token_tree().expect("macro_arg_text succeeded").syntax(), + ); + + let (mut subtree, expanded_eager_input_token_map) = + mbe::syntax_node_to_token_tree(&expanded_eager_input); + + // The tokenmap and ids of subtree point into the expanded syntax node, but that is inaccessible from the outside + // so we need to remap them to the original input of the eager macro. + subtree.visit_ids(&|id| { + // Note: we discard all token ids of braces and the like here, but that's not too bad and only a temporary fix + + if let Some(range) = + expanded_eager_input_token_map.first_range_by_token(id, syntax::SyntaxKind::TOMBSTONE) + { + // remap from expanded eager input to eager input expansion + if let Some(og_range) = mapping.get(&range) { + // remap from eager input expansion to original eager input + if let Some(&og_range) = ws_mapping.get(og_range) { + if let Some(og_token) = og_tmap.token_by_range(og_range) { + return og_token; + } + } + } + } + tt::TokenId::UNSPECIFIED + }); subtree.delimiter = crate::tt::Delimiter::unspecified(); let loc = MacroCallLoc { def, krate, eager: Some(Box::new(EagerCallInfo { - arg: Arc::new((subtree, token_map)), - arg_id: Some(arg_id), + arg: Arc::new((subtree, og_tmap)), + arg_id, error: err.clone(), })), kind: MacroCallKind::FnLike { ast_id: call_id, expand_to }, @@ -119,19 +127,16 @@ fn lazy_expand( def: &MacroDefId, macro_call: InFile, krate: CrateId, -) -> ExpandResult>> { +) -> ExpandResult<(InFile>, Arc)> { let ast_id = db.ast_id_map(macro_call.file_id).ast_id(¯o_call.value); let expand_to = ExpandTo::from_call_site(¯o_call.value); - let id = def.as_lazy_macro( - db, - krate, - MacroCallKind::FnLike { ast_id: macro_call.with_value(ast_id), expand_to }, - ); - + let ast_id = macro_call.with_value(ast_id); + let id = def.as_lazy_macro(db, krate, MacroCallKind::FnLike { ast_id, expand_to }); let macro_file = id.as_macro_file(); - db.parse_macro_expansion(macro_file).map(|parse| InFile::new(macro_file.into(), parse.0)) + db.parse_macro_expansion(macro_file) + .map(|parse| (InFile::new(macro_file.into(), parse.0), parse.1)) } fn eager_macro_recur( @@ -140,18 +145,43 @@ fn eager_macro_recur( curr: InFile, krate: CrateId, macro_resolver: &dyn Fn(ModPath) -> Option, -) -> Result>, UnresolvedMacro> { +) -> Result)>>, UnresolvedMacro> { let original = curr.value.clone_for_update(); + let mut mapping = FxHashMap::default(); - let children = original.descendants().filter_map(ast::MacroCall::cast); let mut replacements = Vec::new(); // Note: We only report a single error inside of eager expansions let mut error = None; + let mut offset = 0i32; + let apply_offset = |it: TextSize, offset: i32| { + TextSize::from(u32::try_from(offset + u32::from(it) as i32).unwrap_or_default()) + }; + let mut children = original.preorder_with_tokens(); // Collect replacement - for child in children { - let def = match child.path().and_then(|path| ModPath::from_src(db, path, hygiene)) { + while let Some(child) = children.next() { + let WalkEvent::Enter(child) = child else { continue }; + let call = match child { + syntax::NodeOrToken::Node(node) => match ast::MacroCall::cast(node) { + Some(it) => { + children.skip_subtree(); + it + } + None => continue, + }, + syntax::NodeOrToken::Token(t) => { + mapping.insert( + TextRange::new( + apply_offset(t.text_range().start(), offset), + apply_offset(t.text_range().end(), offset), + ), + t.text_range(), + ); + continue; + } + }; + let def = match call.path().and_then(|path| ModPath::from_src(db, path, hygiene)) { Some(path) => macro_resolver(path.clone()).ok_or(UnresolvedMacro { path })?, None => { error = Some(ExpandError::other("malformed macro invocation")); @@ -163,7 +193,7 @@ fn eager_macro_recur( let ExpandResult { value, err } = match expand_eager_macro_input( db, krate, - curr.with_value(child.clone()), + curr.with_value(call.clone()), def, macro_resolver, ) { @@ -171,9 +201,22 @@ fn eager_macro_recur( Err(err) => return Err(err), }; match value { - Some(call) => { + Some(call_id) => { let ExpandResult { value, err: err2 } = - db.parse_macro_expansion(call.as_macro_file()); + db.parse_macro_expansion(call_id.as_macro_file()); + + let call_tt_start = + call.token_tree().unwrap().syntax().text_range().start(); + let call_start = apply_offset(call.syntax().text_range().start(), offset); + if let Some((_, arg_map, _)) = db.macro_arg(call_id).value.as_deref() { + mapping.extend(arg_map.entries().filter_map(|(tid, range)| { + value + .1 + .first_range_by_token(tid, syntax::SyntaxKind::TOMBSTONE) + .map(|r| (r + call_start, range + call_tt_start)) + })); + }; + ExpandResult { value: Some(value.0.syntax_node().clone_for_update()), err: err.or(err2), @@ -187,36 +230,61 @@ fn eager_macro_recur( | MacroDefKind::BuiltInAttr(..) | MacroDefKind::BuiltInDerive(..) | MacroDefKind::ProcMacro(..) => { - let ExpandResult { value, err } = - lazy_expand(db, &def, curr.with_value(child.clone()), krate); + let ExpandResult { value: (parse, tm), err } = + lazy_expand(db, &def, curr.with_value(call.clone()), krate); + let decl_mac = if let MacroDefKind::Declarative(ast_id) = def.kind { + Some(db.decl_macro_expander(def.krate, ast_id)) + } else { + None + }; // replace macro inside - let hygiene = Hygiene::new(db, value.file_id); + let hygiene = Hygiene::new(db, parse.file_id); let ExpandResult { value, err: error } = eager_macro_recur( db, &hygiene, // FIXME: We discard parse errors here - value.map(|it| it.syntax_node()), + parse.as_ref().map(|it| it.syntax_node()), krate, macro_resolver, )?; let err = err.or(error); - ExpandResult { value, err } + + let call_tt_start = call.token_tree().unwrap().syntax().text_range().start(); + let call_start = apply_offset(call.syntax().text_range().start(), offset); + if let Some((_tt, arg_map, _)) = parse + .file_id + .macro_file() + .and_then(|id| db.macro_arg(id.macro_call_id).value) + .as_deref() + { + mapping.extend(arg_map.entries().filter_map(|(tid, range)| { + tm.first_range_by_token( + decl_mac.as_ref().map(|it| it.map_id_down(tid)).unwrap_or(tid), + syntax::SyntaxKind::TOMBSTONE, + ) + .map(|r| (r + call_start, range + call_tt_start)) + })); + }; + // FIXME: Do we need to re-use _m here? + ExpandResult { value: value.map(|(n, _m)| n), err } } }; if err.is_some() { error = err; } // check if the whole original syntax is replaced - if child.syntax() == &original { - return Ok(ExpandResult { value, err: error }); + if call.syntax() == &original { + return Ok(ExpandResult { value: value.zip(Some(mapping)), err: error }); } if let Some(insert) = value { - replacements.push((child, insert)); + offset += u32::from(insert.text_range().len()) as i32 + - u32::from(call.syntax().text_range().len()) as i32; + replacements.push((call, insert)); } } replacements.into_iter().rev().for_each(|(old, new)| ted::replace(old.syntax(), new)); - Ok(ExpandResult { value: Some(original), err: error }) + Ok(ExpandResult { value: Some((original, mapping)), err: error }) } diff --git a/crates/hir-expand/src/fixup.rs b/crates/hir-expand/src/fixup.rs index af9d2d78dd..e6e8d8c029 100644 --- a/crates/hir-expand/src/fixup.rs +++ b/crates/hir-expand/src/fixup.rs @@ -26,7 +26,7 @@ pub(crate) struct SyntaxFixups { /// This is the information needed to reverse the fixups. #[derive(Debug, Default, PartialEq, Eq)] pub struct SyntaxFixupUndoInfo { - original: Vec, + original: Box<[Subtree]>, } const EMPTY_ID: SyntheticTokenId = SyntheticTokenId(!0); @@ -272,7 +272,7 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { replace, token_map, next_id, - undo_info: SyntaxFixupUndoInfo { original }, + undo_info: SyntaxFixupUndoInfo { original: original.into_boxed_slice() }, } } diff --git a/crates/hir-expand/src/hygiene.rs b/crates/hir-expand/src/hygiene.rs index 934c9a6924..54e74d50c8 100644 --- a/crates/hir-expand/src/hygiene.rs +++ b/crates/hir-expand/src/hygiene.rs @@ -194,9 +194,9 @@ fn make_hygiene_info( _ => None, }); - let macro_def = db.macro_def(loc.def); + let macro_def = db.macro_expander(loc.def); let (_, exp_map) = db.parse_macro_expansion(macro_file).value; - let macro_arg = db.macro_arg(macro_file.macro_call_id).unwrap_or_else(|| { + let macro_arg = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| { Arc::new(( tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() }, Default::default(), diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs index 60a3d1fa6b..9ed6c31ddd 100644 --- a/crates/hir-expand/src/lib.rs +++ b/crates/hir-expand/src/lib.rs @@ -127,7 +127,8 @@ impl_intern_key!(MacroCallId); pub struct MacroCallLoc { pub def: MacroDefId, pub(crate) krate: CrateId, - /// Some if `def` is a builtin eager macro. + /// Some if this is a macro call for an eager macro. Note that this is `None` + /// for the eager input macro file. eager: Option>, pub kind: MacroCallKind, } @@ -152,12 +153,10 @@ pub enum MacroDefKind { #[derive(Debug, Clone, PartialEq, Eq, Hash)] struct EagerCallInfo { - /// NOTE: This can be *either* the expansion result, *or* the argument to the eager macro! + /// The expanded argument of the eager macro. arg: Arc<(tt::Subtree, TokenMap)>, /// Call id of the eager macro's input file (this is the macro file for its fully expanded input). - /// If this is none, `arg` contains the pre-expanded input, otherwise arg contains the - /// post-expanded input. - arg_id: Option, + arg_id: MacroCallId, error: Option, } @@ -222,11 +221,7 @@ impl HirFileId { HirFileIdRepr::FileId(id) => break id, HirFileIdRepr::MacroFile(MacroFile { macro_call_id }) => { let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_call_id); - let is_include_expansion = loc.def.is_include() - && matches!( - loc.eager.as_deref(), - Some(EagerCallInfo { arg_id: Some(_), .. }) - ); + let is_include_expansion = loc.def.is_include() && loc.eager.is_some(); file_id = match is_include_expansion.then(|| db.include_expand(macro_call_id)) { Some(Ok((_, file))) => file.into(), _ => loc.kind.file_id(), @@ -325,7 +320,7 @@ impl HirFileId { match self.macro_file() { Some(macro_file) => { let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); - matches!(loc.eager.as_deref(), Some(EagerCallInfo { .. })) + matches!(loc.def.kind, MacroDefKind::BuiltInEager(..)) } _ => false, } @@ -709,9 +704,7 @@ impl ExpansionInfo { let loc = db.lookup_intern_macro_call(call_id); // Special case: map tokens from `include!` expansions to the included file - if loc.def.is_include() - && matches!(loc.eager.as_deref(), Some(EagerCallInfo { arg_id: Some(_), .. })) - { + if loc.def.is_include() { if let Ok((tt_and_map, file_id)) = db.include_expand(call_id) { let range = tt_and_map.1.first_range_by_token(token_id, token.value.kind())?; let source = db.parse(file_id); @@ -761,22 +754,17 @@ impl ExpansionInfo { let arg_tt = loc.kind.arg(db)?; - let macro_def = db.macro_def(loc.def); + let macro_def = db.macro_expander(loc.def); let (parse, exp_map) = db.parse_macro_expansion(macro_file).value; let expanded = InMacroFile { file_id: macro_file, value: parse.syntax_node() }; - let macro_arg = db - .macro_arg(match loc.eager.as_deref() { - Some(&EagerCallInfo { arg_id: Some(_), .. }) => return None, - _ => macro_file.macro_call_id, - }) - .unwrap_or_else(|| { - Arc::new(( - tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() }, - Default::default(), - Default::default(), - )) - }); + let macro_arg = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| { + Arc::new(( + tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() }, + Default::default(), + Default::default(), + )) + }); let def = loc.def.ast_id().left().and_then(|id| { let def_tt = match id.to_node(db) { diff --git a/crates/hir/src/db.rs b/crates/hir/src/db.rs index fa2deb7215..f3a0608944 100644 --- a/crates/hir/src/db.rs +++ b/crates/hir/src/db.rs @@ -6,7 +6,7 @@ pub use hir_def::db::*; pub use hir_expand::db::{ AstIdMapQuery, DeclMacroExpanderQuery, ExpandDatabase, ExpandDatabaseStorage, - ExpandProcMacroQuery, HygieneFrameQuery, InternMacroCallQuery, MacroArgTextQuery, + ExpandProcMacroQuery, HygieneFrameQuery, InternMacroCallQuery, MacroArgNodeQuery, MacroExpandQuery, ParseMacroExpansionErrorQuery, ParseMacroExpansionQuery, }; pub use hir_ty::db::*; diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index d09963d4d3..f8d9398ae2 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -698,7 +698,7 @@ impl Module { fn emit_macro_def_diagnostics(db: &dyn HirDatabase, acc: &mut Vec, m: Macro) { let id = macro_id_to_def_id(db.upcast(), m.id); - if let hir_expand::db::TokenExpander::DeclarativeMacro(expander) = db.macro_def(id) { + if let hir_expand::db::TokenExpander::DeclarativeMacro(expander) = db.macro_expander(id) { if let Some(e) = expander.mac.err() { let Some(ast) = id.ast_id().left() else { never!("declarative expander for non decl-macro: {:?}", e); diff --git a/crates/ide-db/src/apply_change.rs b/crates/ide-db/src/apply_change.rs index 7ba0bd73ec..a0b05c87ae 100644 --- a/crates/ide-db/src/apply_change.rs +++ b/crates/ide-db/src/apply_change.rs @@ -99,7 +99,7 @@ impl RootDatabase { hir::db::AstIdMapQuery hir::db::ParseMacroExpansionQuery hir::db::InternMacroCallQuery - hir::db::MacroArgTextQuery + hir::db::MacroArgNodeQuery hir::db::DeclMacroExpanderQuery hir::db::MacroExpandQuery hir::db::ExpandProcMacroQuery diff --git a/crates/ide-db/src/lib.rs b/crates/ide-db/src/lib.rs index feced7e4bb..f27ed485d8 100644 --- a/crates/ide-db/src/lib.rs +++ b/crates/ide-db/src/lib.rs @@ -200,7 +200,7 @@ impl RootDatabase { hir_db::AstIdMapQuery // hir_db::ParseMacroExpansionQuery // hir_db::InternMacroCallQuery - hir_db::MacroArgTextQuery + hir_db::MacroArgNodeQuery hir_db::DeclMacroExpanderQuery // hir_db::MacroExpandQuery hir_db::ExpandProcMacroQuery diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html b/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html index d26018b361..f4f164aa1d 100644 --- a/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html +++ b/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html @@ -130,7 +130,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd println!("Hello {:+}!", 5); println!("{:#x}!", 27); println!("Hello {:05}!", 5); - println!("Hello {:05}!", -5); + println!("Hello {:05}!", -5); println!("{:#010x}!", 27); println!("Hello {0} is {1:.5}", "x", 0.01); println!("Hello {1} is {2:.0$}", 5, "x", 0.01); @@ -161,18 +161,18 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd let _ = "\x28\x28\x00\x63\n"; let _ = b"\x28\x28\x00\x63\n"; - let _ = r"\\"; + let backslash = r"\\"; println!("{\x41}", A = 92); println!("{ничоси}", ничоси = 92); - println!("{:x?} {} ", thingy, n2); + println!("{:x?} {} ", thingy, n2); panic!("{}", 0); panic!("more {}", 1); assert!(true, "{}", 1); assert!(true, "{} asdasd", 1); toho!("{}fmt", 0); asm!("mov eax, {0}"); - format_args!(concat!("{}"), "{}"); - format_args!("{}", format_args!("{}", 0)); + format_args!(concat!("{}"), "{}"); + format_args!("{} {} {} {} {} {}", backslash, format_args!("{}", 0), foo, "bar", toho!(), backslash); } \ No newline at end of file diff --git a/crates/ide/src/syntax_highlighting/tests.rs b/crates/ide/src/syntax_highlighting/tests.rs index 497992f684..1ee451a06d 100644 --- a/crates/ide/src/syntax_highlighting/tests.rs +++ b/crates/ide/src/syntax_highlighting/tests.rs @@ -507,7 +507,7 @@ fn main() { let _ = "\x28\x28\x00\x63\n"; let _ = b"\x28\x28\x00\x63\n"; - let _ = r"\\"; + let backslash = r"\\"; println!("{\x41}", A = 92); println!("{ничоси}", ничоси = 92); @@ -520,7 +520,7 @@ fn main() { toho!("{}fmt", 0); asm!("mov eax, {0}"); format_args!(concat!("{}"), "{}"); - format_args!("{}", format_args!("{}", 0)); + format_args!("{} {} {} {} {} {}", backslash, format_args!("{}", 0), foo, "bar", toho!(), backslash); }"#, expect_file!["./test_data/highlight_strings.html"], false, diff --git a/crates/mbe/src/token_map.rs b/crates/mbe/src/token_map.rs index c923e7a69a..9b2df89f9c 100644 --- a/crates/mbe/src/token_map.rs +++ b/crates/mbe/src/token_map.rs @@ -110,4 +110,11 @@ impl TokenMap { // FIXME: This could be accidentally quadratic self.entries.remove(idx); } + + pub fn entries(&self) -> impl Iterator + '_ { + self.entries.iter().filter_map(|&(tid, tr)| match tr { + TokenTextRange::Token(range) => Some((tid, range)), + TokenTextRange::Delimiter(_) => None, + }) + } } diff --git a/crates/syntax/src/lib.rs b/crates/syntax/src/lib.rs index efbf879664..bed240a6d7 100644 --- a/crates/syntax/src/lib.rs +++ b/crates/syntax/src/lib.rs @@ -171,6 +171,109 @@ impl SourceFile { } } +impl ast::TokenTree { + pub fn reparse_as_expr(self) -> Parse { + let tokens = self.syntax().descendants_with_tokens().filter_map(NodeOrToken::into_token); + + let mut parser_input = parser::Input::default(); + let mut was_joint = false; + for t in tokens { + let kind = t.kind(); + if kind.is_trivia() { + was_joint = false + } else { + if kind == SyntaxKind::IDENT { + let token_text = t.text(); + let contextual_kw = SyntaxKind::from_contextual_keyword(token_text) + .unwrap_or(SyntaxKind::IDENT); + parser_input.push_ident(contextual_kw); + } else { + if was_joint { + parser_input.was_joint(); + } + parser_input.push(kind); + // Tag the token as joint if it is float with a fractional part + // we use this jointness to inform the parser about what token split + // event to emit when we encounter a float literal in a field access + if kind == SyntaxKind::FLOAT_NUMBER && !t.text().ends_with('.') { + parser_input.was_joint(); + } + } + was_joint = true; + } + } + + let parser_output = parser::TopEntryPoint::Expr.parse(&parser_input); + + let mut tokens = + self.syntax().descendants_with_tokens().filter_map(NodeOrToken::into_token); + let mut text = String::new(); + let mut pos = TextSize::from(0); + let mut builder = SyntaxTreeBuilder::default(); + for event in parser_output.iter() { + match event { + parser::Step::Token { kind, n_input_tokens } => { + let mut token = tokens.next().unwrap(); + while token.kind().is_trivia() { + let text = token.text(); + pos += TextSize::from(text.len() as u32); + builder.token(token.kind(), text); + + token = tokens.next().unwrap(); + } + text.push_str(token.text()); + for _ in 1..n_input_tokens { + let token = tokens.next().unwrap(); + text.push_str(token.text()); + } + + pos += TextSize::from(text.len() as u32); + builder.token(kind, &text); + text.clear(); + } + parser::Step::FloatSplit { ends_in_dot: has_pseudo_dot } => { + let token = tokens.next().unwrap(); + let text = token.text(); + + match text.split_once('.') { + Some((left, right)) => { + assert!(!left.is_empty()); + builder.start_node(SyntaxKind::NAME_REF); + builder.token(SyntaxKind::INT_NUMBER, left); + builder.finish_node(); + + // here we move the exit up, the original exit has been deleted in process + builder.finish_node(); + + builder.token(SyntaxKind::DOT, "."); + + if has_pseudo_dot { + assert!(right.is_empty(), "{left}.{right}"); + } else { + builder.start_node(SyntaxKind::NAME_REF); + builder.token(SyntaxKind::INT_NUMBER, right); + builder.finish_node(); + + // the parser creates an unbalanced start node, we are required to close it here + builder.finish_node(); + } + } + None => unreachable!(), + } + pos += TextSize::from(text.len() as u32); + } + parser::Step::Enter { kind } => builder.start_node(kind), + parser::Step::Exit => builder.finish_node(), + parser::Step::Error { msg } => builder.error(msg.to_owned(), pos), + } + } + + let (green, errors) = builder.finish_raw(); + + Parse { green, errors: Arc::new(errors), _ty: PhantomData } + } +} + /// Matches a `SyntaxNode` against an `ast` type. /// /// # Example: diff --git a/crates/tt/src/lib.rs b/crates/tt/src/lib.rs index c2ebf03746..1b8d4ba42a 100644 --- a/crates/tt/src/lib.rs +++ b/crates/tt/src/lib.rs @@ -68,6 +68,21 @@ pub mod token_id { Self::Subtree(Subtree { delimiter: Delimiter::unspecified(), token_trees: vec![] }) } } + + impl Subtree { + pub fn visit_ids(&mut self, f: &impl Fn(TokenId) -> TokenId) { + self.delimiter.open = f(self.delimiter.open); + self.delimiter.close = f(self.delimiter.close); + self.token_trees.iter_mut().for_each(|tt| match tt { + crate::TokenTree::Leaf(leaf) => match leaf { + crate::Leaf::Literal(it) => it.span = f(it.span), + crate::Leaf::Punct(it) => it.span = f(it.span), + crate::Leaf::Ident(it) => it.span = f(it.span), + }, + crate::TokenTree::Subtree(s) => s.visit_ids(f), + }) + } + } } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]