Auto merge of #18131 - ChayimFriedman2:macro-expand-dollar-crate, r=Veykril

fix: Get rid of `$crate` in expansions shown to the user

Be it "Expand Macro Recursively", "Inline macro" or few other things.

We replace it with the crate name, as should've always been.

Probably fixes some issues, but I don't know what they are.
This commit is contained in:
bors 2024-09-18 20:17:21 +00:00
commit 990c48cb0d
15 changed files with 396 additions and 64 deletions

View file

@ -21,6 +21,7 @@ pub mod span_map;
mod cfg_process; mod cfg_process;
mod fixup; mod fixup;
mod prettify_macro_expansion_;
use attrs::collect_attrs; use attrs::collect_attrs;
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
@ -51,11 +52,13 @@ use crate::{
span_map::{ExpansionSpanMap, SpanMap}, span_map::{ExpansionSpanMap, SpanMap},
}; };
pub use crate::files::{AstId, ErasedAstId, FileRange, InFile, InMacroFile, InRealFile}; pub use crate::{
files::{AstId, ErasedAstId, FileRange, InFile, InMacroFile, InRealFile},
prettify_macro_expansion_::prettify_macro_expansion,
};
pub use mbe::{DeclarativeMacro, ValueResult}; pub use mbe::{DeclarativeMacro, ValueResult};
pub use span::{HirFileId, MacroCallId, MacroFileId}; pub use span::{HirFileId, MacroCallId, MacroFileId};
pub use syntax_bridge::insert_whitespace_into_node;
pub mod tt { pub mod tt {
pub use span::Span; pub use span::Span;

View file

@ -0,0 +1,60 @@
//! Pretty printing of macros output.
use base_db::CrateId;
use rustc_hash::FxHashMap;
use syntax::NodeOrToken;
use syntax::{ast::make, SyntaxNode};
use crate::{db::ExpandDatabase, span_map::ExpansionSpanMap};
/// Inserts whitespace and replaces `$crate` in macro expansions.
#[expect(deprecated)]
pub fn prettify_macro_expansion(
db: &dyn ExpandDatabase,
syn: SyntaxNode,
span_map: &ExpansionSpanMap,
target_crate_id: CrateId,
) -> SyntaxNode {
let crate_graph = db.crate_graph();
let target_crate = &crate_graph[target_crate_id];
let mut syntax_ctx_id_to_dollar_crate_replacement = FxHashMap::default();
syntax_bridge::prettify_macro_expansion::prettify_macro_expansion(syn, &mut |dollar_crate| {
let ctx = span_map.span_at(dollar_crate.text_range().start()).ctx;
let replacement =
syntax_ctx_id_to_dollar_crate_replacement.entry(ctx).or_insert_with(|| {
let ctx_data = db.lookup_intern_syntax_context(ctx);
let macro_call_id =
ctx_data.outer_expn.expect("`$crate` cannot come from `SyntaxContextId::ROOT`");
let macro_call = db.lookup_intern_macro_call(macro_call_id);
let macro_def_crate = macro_call.def.krate;
// First, if this is the same crate as the macro, nothing will work but `crate`.
// If not, if the target trait has the macro's crate as a dependency, using the dependency name
// will work in inserted code and match the user's expectation.
// If not, the crate's display name is what the dependency name is likely to be once such dependency
// is inserted, and also understandable to the user.
// Lastly, if nothing else found, resort to leaving `$crate`.
if target_crate_id == macro_def_crate {
make::tokens::crate_kw()
} else if let Some(dep) =
target_crate.dependencies.iter().find(|dep| dep.crate_id == macro_def_crate)
{
make::tokens::ident(&dep.name)
} else if let Some(crate_name) = &crate_graph[macro_def_crate].display_name {
make::tokens::ident(crate_name.crate_name())
} else {
return dollar_crate.clone();
}
});
if replacement.text() == "$crate" {
// The parent may have many children, and looking for the token may yield incorrect results.
return dollar_crate.clone();
}
// We need to `clone_subtree()` but rowan doesn't provide such operation for tokens.
let parent = replacement.parent().unwrap().clone_subtree().clone_for_update();
parent
.children_with_tokens()
.filter_map(NodeOrToken::into_token)
.find(|it| it.kind() == replacement.kind())
.unwrap()
})
}

View file

@ -136,8 +136,8 @@ pub use {
}, },
hygiene::{marks_rev, SyntaxContextExt}, hygiene::{marks_rev, SyntaxContextExt},
inert_attr_macro::AttributeTemplate, inert_attr_macro::AttributeTemplate,
insert_whitespace_into_node,
name::Name, name::Name,
prettify_macro_expansion,
proc_macro::{ProcMacros, ProcMacrosBuilder}, proc_macro::{ProcMacros, ProcMacrosBuilder},
tt, ExpandResult, HirFileId, HirFileIdExt, MacroFileId, MacroFileIdExt, tt, ExpandResult, HirFileId, HirFileIdExt, MacroFileId, MacroFileIdExt,
}, },

View file

@ -2,14 +2,18 @@ use std::collections::BTreeSet;
use ast::make; use ast::make;
use either::Either; use either::Either;
use hir::{db::HirDatabase, sym, FileRange, PathResolution, Semantics, TypeInfo}; use hir::{
db::{ExpandDatabase, HirDatabase},
sym, FileRange, PathResolution, Semantics, TypeInfo,
};
use ide_db::{ use ide_db::{
base_db::CrateId,
defs::Definition, defs::Definition,
imports::insert_use::remove_path_if_in_use_stmt, imports::insert_use::remove_path_if_in_use_stmt,
path_transform::PathTransform, path_transform::PathTransform,
search::{FileReference, FileReferenceNode, SearchScope}, search::{FileReference, FileReferenceNode, SearchScope},
source_change::SourceChangeBuilder, source_change::SourceChangeBuilder,
syntax_helpers::{insert_whitespace_into_node::insert_ws_into, node_ext::expr_as_name_ref}, syntax_helpers::{node_ext::expr_as_name_ref, prettify_macro_expansion},
EditionedFileId, RootDatabase, EditionedFileId, RootDatabase,
}; };
use itertools::{izip, Itertools}; use itertools::{izip, Itertools};
@ -102,12 +106,13 @@ pub(crate) fn inline_into_callers(acc: &mut Assists, ctx: &AssistContext<'_>) ->
let mut remove_def = true; let mut remove_def = true;
let mut inline_refs_for_file = |file_id, refs: Vec<FileReference>| { let mut inline_refs_for_file = |file_id, refs: Vec<FileReference>| {
builder.edit_file(file_id); builder.edit_file(file_id);
let call_krate = ctx.sema.file_to_module_def(file_id).map(|it| it.krate());
let count = refs.len(); let count = refs.len();
// The collects are required as we are otherwise iterating while mutating 🙅‍♀️🙅‍♂️ // The collects are required as we are otherwise iterating while mutating 🙅‍♀️🙅‍♂️
let (name_refs, name_refs_use) = split_refs_and_uses(builder, refs, Some); let (name_refs, name_refs_use) = split_refs_and_uses(builder, refs, Some);
let call_infos: Vec<_> = name_refs let call_infos: Vec<_> = name_refs
.into_iter() .into_iter()
.filter_map(CallInfo::from_name_ref) .filter_map(|it| CallInfo::from_name_ref(it, call_krate?.into()))
// FIXME: do not handle callsites in macros' parameters, because // FIXME: do not handle callsites in macros' parameters, because
// directly inlining into macros may cause errors. // directly inlining into macros may cause errors.
.filter(|call_info| !ctx.sema.hir_file_for(call_info.node.syntax()).is_macro()) .filter(|call_info| !ctx.sema.hir_file_for(call_info.node.syntax()).is_macro())
@ -185,7 +190,10 @@ pub(super) fn split_refs_and_uses<T: ast::AstNode>(
// ``` // ```
pub(crate) fn inline_call(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { pub(crate) fn inline_call(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let name_ref: ast::NameRef = ctx.find_node_at_offset()?; let name_ref: ast::NameRef = ctx.find_node_at_offset()?;
let call_info = CallInfo::from_name_ref(name_ref.clone())?; let call_info = CallInfo::from_name_ref(
name_ref.clone(),
ctx.sema.file_to_module_def(ctx.file_id())?.krate().into(),
)?;
let (function, label) = match &call_info.node { let (function, label) = match &call_info.node {
ast::CallableExpr::Call(call) => { ast::CallableExpr::Call(call) => {
let path = match call.expr()? { let path = match call.expr()? {
@ -243,10 +251,11 @@ struct CallInfo {
node: ast::CallableExpr, node: ast::CallableExpr,
arguments: Vec<ast::Expr>, arguments: Vec<ast::Expr>,
generic_arg_list: Option<ast::GenericArgList>, generic_arg_list: Option<ast::GenericArgList>,
krate: CrateId,
} }
impl CallInfo { impl CallInfo {
fn from_name_ref(name_ref: ast::NameRef) -> Option<CallInfo> { fn from_name_ref(name_ref: ast::NameRef, krate: CrateId) -> Option<CallInfo> {
let parent = name_ref.syntax().parent()?; let parent = name_ref.syntax().parent()?;
if let Some(call) = ast::MethodCallExpr::cast(parent.clone()) { if let Some(call) = ast::MethodCallExpr::cast(parent.clone()) {
let receiver = call.receiver()?; let receiver = call.receiver()?;
@ -256,6 +265,7 @@ impl CallInfo {
generic_arg_list: call.generic_arg_list(), generic_arg_list: call.generic_arg_list(),
node: ast::CallableExpr::MethodCall(call), node: ast::CallableExpr::MethodCall(call),
arguments, arguments,
krate,
}) })
} else if let Some(segment) = ast::PathSegment::cast(parent) { } else if let Some(segment) = ast::PathSegment::cast(parent) {
let path = segment.syntax().parent().and_then(ast::Path::cast)?; let path = segment.syntax().parent().and_then(ast::Path::cast)?;
@ -266,6 +276,7 @@ impl CallInfo {
arguments: call.arg_list()?.args().collect(), arguments: call.arg_list()?.args().collect(),
node: ast::CallableExpr::Call(call), node: ast::CallableExpr::Call(call),
generic_arg_list: segment.generic_arg_list(), generic_arg_list: segment.generic_arg_list(),
krate,
}) })
} else { } else {
None None
@ -307,11 +318,15 @@ fn inline(
function: hir::Function, function: hir::Function,
fn_body: &ast::BlockExpr, fn_body: &ast::BlockExpr,
params: &[(ast::Pat, Option<ast::Type>, hir::Param)], params: &[(ast::Pat, Option<ast::Type>, hir::Param)],
CallInfo { node, arguments, generic_arg_list }: &CallInfo, CallInfo { node, arguments, generic_arg_list, krate }: &CallInfo,
) -> ast::Expr { ) -> ast::Expr {
let mut body = if sema.hir_file_for(fn_body.syntax()).is_macro() { let file_id = sema.hir_file_for(fn_body.syntax());
let mut body = if let Some(macro_file) = file_id.macro_file() {
cov_mark::hit!(inline_call_defined_in_macro); cov_mark::hit!(inline_call_defined_in_macro);
if let Some(body) = ast::BlockExpr::cast(insert_ws_into(fn_body.syntax().clone())) { let span_map = sema.db.expansion_span_map(macro_file);
let body_prettified =
prettify_macro_expansion(sema.db, fn_body.syntax().clone(), &span_map, *krate);
if let Some(body) = ast::BlockExpr::cast(body_prettified) {
body body
} else { } else {
fn_body.clone_for_update() fn_body.clone_for_update()
@ -420,8 +435,16 @@ fn inline(
let mut insert_let_stmt = || { let mut insert_let_stmt = || {
let param_ty = param_ty.clone().map(|param_ty| { let param_ty = param_ty.clone().map(|param_ty| {
if sema.hir_file_for(param_ty.syntax()).is_macro() { let file_id = sema.hir_file_for(param_ty.syntax());
ast::Type::cast(insert_ws_into(param_ty.syntax().clone())).unwrap_or(param_ty) if let Some(macro_file) = file_id.macro_file() {
let span_map = sema.db.expansion_span_map(macro_file);
let param_ty_prettified = prettify_macro_expansion(
sema.db,
param_ty.syntax().clone(),
&span_map,
*krate,
);
ast::Type::cast(param_ty_prettified).unwrap_or(param_ty)
} else { } else {
param_ty param_ty
} }

View file

@ -1,4 +1,5 @@
use ide_db::syntax_helpers::insert_whitespace_into_node::insert_ws_into; use hir::db::ExpandDatabase;
use ide_db::syntax_helpers::prettify_macro_expansion;
use syntax::ast::{self, AstNode}; use syntax::ast::{self, AstNode};
use crate::{AssistContext, AssistId, AssistKind, Assists}; use crate::{AssistContext, AssistId, AssistKind, Assists};
@ -36,7 +37,15 @@ use crate::{AssistContext, AssistId, AssistKind, Assists};
// ``` // ```
pub(crate) fn inline_macro(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { pub(crate) fn inline_macro(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let unexpanded = ctx.find_node_at_offset::<ast::MacroCall>()?; let unexpanded = ctx.find_node_at_offset::<ast::MacroCall>()?;
let expanded = insert_ws_into(ctx.sema.expand(&unexpanded)?.clone_for_update()); let macro_call = ctx.sema.to_def(&unexpanded)?;
let expanded = ctx.sema.parse_or_expand(macro_call.as_file());
let span_map = ctx.sema.db.expansion_span_map(macro_call.as_macro_file());
let expanded = prettify_macro_expansion(
ctx.db(),
expanded,
&span_map,
ctx.sema.file_to_module_def(ctx.file_id())?.krate().into(),
);
let text_range = unexpanded.syntax().text_range(); let text_range = unexpanded.syntax().text_range();
acc.add( acc.add(
@ -295,6 +304,75 @@ fn main() {
} }
}; };
} }
"#,
);
}
#[test]
fn dollar_crate() {
check_assist(
inline_macro,
r#"
pub struct Foo;
#[macro_export]
macro_rules! m {
() => { $crate::Foo };
}
fn bar() {
m$0!();
}
"#,
r#"
pub struct Foo;
#[macro_export]
macro_rules! m {
() => { $crate::Foo };
}
fn bar() {
crate::Foo;
}
"#,
);
check_assist(
inline_macro,
r#"
//- /a.rs crate:a
pub struct Foo;
#[macro_export]
macro_rules! m {
() => { $crate::Foo };
}
//- /b.rs crate:b deps:a
fn bar() {
a::m$0!();
}
"#,
r#"
fn bar() {
a::Foo;
}
"#,
);
check_assist(
inline_macro,
r#"
//- /a.rs crate:a
pub struct Foo;
#[macro_export]
macro_rules! m {
() => { $crate::Foo };
}
//- /b.rs crate:b deps:a
pub use a::m;
//- /c.rs crate:c deps:b
fn bar() {
b::m$0!();
}
"#,
r#"
fn bar() {
a::Foo;
}
"#, "#,
); );
} }

View file

@ -1,10 +1,13 @@
//! Assorted functions shared by several assists. //! Assorted functions shared by several assists.
pub(crate) use gen_trait_fn_body::gen_trait_fn_body; pub(crate) use gen_trait_fn_body::gen_trait_fn_body;
use hir::{db::HirDatabase, HasAttrs as HirHasAttrs, HirDisplay, InFile, Semantics}; use hir::{
db::{ExpandDatabase, HirDatabase},
HasAttrs as HirHasAttrs, HirDisplay, InFile, Semantics,
};
use ide_db::{ use ide_db::{
famous_defs::FamousDefs, path_transform::PathTransform, famous_defs::FamousDefs, path_transform::PathTransform,
syntax_helpers::insert_whitespace_into_node::insert_ws_into, RootDatabase, syntax_helpers::prettify_macro_expansion, RootDatabase,
}; };
use stdx::format_to; use stdx::format_to;
use syntax::{ use syntax::{
@ -178,10 +181,15 @@ pub fn add_trait_assoc_items_to_impl(
let new_indent_level = IndentLevel::from_node(impl_.syntax()) + 1; let new_indent_level = IndentLevel::from_node(impl_.syntax()) + 1;
let items = original_items.iter().map(|InFile { file_id, value: original_item }| { let items = original_items.iter().map(|InFile { file_id, value: original_item }| {
let cloned_item = { let cloned_item = {
if file_id.is_macro() { if let Some(macro_file) = file_id.macro_file() {
if let Some(formatted) = let span_map = sema.db.expansion_span_map(macro_file);
ast::AssocItem::cast(insert_ws_into(original_item.syntax().clone())) let item_prettified = prettify_macro_expansion(
{ sema.db,
original_item.syntax().clone(),
&span_map,
target_scope.krate().into(),
);
if let Some(formatted) = ast::AssocItem::cast(item_prettified) {
return formatted; return formatted;
} else { } else {
stdx::never!("formatted `AssocItem` could not be cast back to `AssocItem`"); stdx::never!("formatted `AssocItem` could not be cast back to `AssocItem`");

View file

@ -31,10 +31,10 @@
//! } //! }
//! ``` //! ```
use hir::{HasAttrs, Name}; use hir::{db::ExpandDatabase, HasAttrs, MacroFileId, Name};
use ide_db::{ use ide_db::{
documentation::HasDocs, path_transform::PathTransform, documentation::HasDocs, path_transform::PathTransform,
syntax_helpers::insert_whitespace_into_node, traits::get_missing_assoc_items, SymbolKind, syntax_helpers::prettify_macro_expansion, traits::get_missing_assoc_items, SymbolKind,
}; };
use syntax::{ use syntax::{
ast::{self, edit_in_place::AttrsOwnerEdit, make, HasGenericArgs, HasTypeBounds}, ast::{self, edit_in_place::AttrsOwnerEdit, make, HasGenericArgs, HasTypeBounds},
@ -227,7 +227,8 @@ fn add_function_impl_(
if let Some(transformed_fn) = if let Some(transformed_fn) =
get_transformed_fn(ctx, source.value, impl_def, async_sugaring) get_transformed_fn(ctx, source.value, impl_def, async_sugaring)
{ {
let function_decl = function_declaration(&transformed_fn, source.file_id.is_macro()); let function_decl =
function_declaration(ctx, &transformed_fn, source.file_id.macro_file());
match ctx.config.snippet_cap { match ctx.config.snippet_cap {
Some(cap) => { Some(cap) => {
let snippet = format!("{function_decl} {{\n $0\n}}"); let snippet = format!("{function_decl} {{\n $0\n}}");
@ -432,7 +433,8 @@ fn add_const_impl(
_ => unreachable!(), _ => unreachable!(),
}; };
let label = make_const_compl_syntax(&transformed_const, source.file_id.is_macro()); let label =
make_const_compl_syntax(ctx, &transformed_const, source.file_id.macro_file());
let replacement = format!("{label} "); let replacement = format!("{label} ");
let mut item = let mut item =
@ -456,9 +458,14 @@ fn add_const_impl(
} }
} }
fn make_const_compl_syntax(const_: &ast::Const, needs_whitespace: bool) -> SmolStr { fn make_const_compl_syntax(
let const_ = if needs_whitespace { ctx: &CompletionContext<'_>,
insert_whitespace_into_node::insert_ws_into(const_.syntax().clone()) const_: &ast::Const,
macro_file: Option<MacroFileId>,
) -> SmolStr {
let const_ = if let Some(macro_file) = macro_file {
let span_map = ctx.db.expansion_span_map(macro_file);
prettify_macro_expansion(ctx.db, const_.syntax().clone(), &span_map, ctx.krate.into())
} else { } else {
const_.syntax().clone() const_.syntax().clone()
}; };
@ -479,9 +486,14 @@ fn make_const_compl_syntax(const_: &ast::Const, needs_whitespace: bool) -> SmolS
format_smolstr!("{} =", syntax.trim_end()) format_smolstr!("{} =", syntax.trim_end())
} }
fn function_declaration(node: &ast::Fn, needs_whitespace: bool) -> String { fn function_declaration(
let node = if needs_whitespace { ctx: &CompletionContext<'_>,
insert_whitespace_into_node::insert_ws_into(node.syntax().clone()) node: &ast::Fn,
macro_file: Option<MacroFileId>,
) -> String {
let node = if let Some(macro_file) = macro_file {
let span_map = ctx.db.expansion_span_map(macro_file);
prettify_macro_expansion(ctx.db, node.syntax().clone(), &span_map, ctx.krate.into())
} else { } else {
node.syntax().clone() node.syntax().clone()
}; };

View file

@ -36,7 +36,7 @@ pub mod generated {
pub mod syntax_helpers { pub mod syntax_helpers {
pub mod format_string; pub mod format_string;
pub mod format_string_exprs; pub mod format_string_exprs;
pub use hir::insert_whitespace_into_node; pub use hir::prettify_macro_expansion;
pub mod node_ext; pub mod node_ext;
pub mod suggest_name; pub mod suggest_name;

View file

@ -1,9 +1,10 @@
use hir::db::ExpandDatabase;
use hir::{InFile, MacroFileIdExt, Semantics}; use hir::{InFile, MacroFileIdExt, Semantics};
use ide_db::base_db::CrateId;
use ide_db::{ use ide_db::{
helpers::pick_best_token, syntax_helpers::insert_whitespace_into_node::insert_ws_into, FileId, helpers::pick_best_token, syntax_helpers::prettify_macro_expansion, FileId, RootDatabase,
RootDatabase,
}; };
use span::Edition; use span::{Edition, SpanMap, SyntaxContextId, TextRange, TextSize};
use syntax::{ast, ted, AstNode, NodeOrToken, SyntaxKind, SyntaxNode, T}; use syntax::{ast, ted, AstNode, NodeOrToken, SyntaxKind, SyntaxNode, T};
use crate::FilePosition; use crate::FilePosition;
@ -27,6 +28,7 @@ pub struct ExpandedMacro {
pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<ExpandedMacro> { pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<ExpandedMacro> {
let sema = Semantics::new(db); let sema = Semantics::new(db);
let file = sema.parse_guess_edition(position.file_id); let file = sema.parse_guess_edition(position.file_id);
let krate = sema.file_to_module_def(position.file_id)?.krate().into();
let tok = pick_best_token(file.syntax().token_at_offset(position.offset), |kind| match kind { let tok = pick_best_token(file.syntax().token_at_offset(position.offset), |kind| match kind {
SyntaxKind::IDENT => 1, SyntaxKind::IDENT => 1,
@ -61,8 +63,17 @@ pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<
.take_while(|it| it != &token) .take_while(|it| it != &token)
.filter(|it| it.kind() == T![,]) .filter(|it| it.kind() == T![,])
.count(); .count();
let expansion = let expansion = expansions.get(idx)?.clone();
format(db, SyntaxKind::MACRO_ITEMS, position.file_id, expansions.get(idx).cloned()?); let expansion_file_id = sema.hir_file_for(&expansion).macro_file()?;
let expansion_span_map = db.expansion_span_map(expansion_file_id);
let expansion = format(
db,
SyntaxKind::MACRO_ITEMS,
position.file_id,
expansion,
&expansion_span_map,
krate,
);
Some(ExpandedMacro { name, expansion }) Some(ExpandedMacro { name, expansion })
}); });
@ -71,6 +82,7 @@ pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<
} }
let mut anc = tok.parent_ancestors(); let mut anc = tok.parent_ancestors();
let mut span_map = SpanMap::empty();
let (name, expanded, kind) = loop { let (name, expanded, kind) = loop {
let node = anc.next()?; let node = anc.next()?;
@ -85,7 +97,7 @@ pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<
.unwrap_or(Edition::CURRENT), .unwrap_or(Edition::CURRENT),
) )
.to_string(), .to_string(),
expand_macro_recur(&sema, &item)?, expand_macro_recur(&sema, &item, &mut span_map, TextSize::new(0))?,
SyntaxKind::MACRO_ITEMS, SyntaxKind::MACRO_ITEMS,
); );
} }
@ -95,14 +107,23 @@ pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<
name.push('!'); name.push('!');
let syntax_kind = let syntax_kind =
mac.syntax().parent().map(|it| it.kind()).unwrap_or(SyntaxKind::MACRO_ITEMS); mac.syntax().parent().map(|it| it.kind()).unwrap_or(SyntaxKind::MACRO_ITEMS);
break (name, expand_macro_recur(&sema, &ast::Item::MacroCall(mac))?, syntax_kind); break (
name,
expand_macro_recur(
&sema,
&ast::Item::MacroCall(mac),
&mut span_map,
TextSize::new(0),
)?,
syntax_kind,
);
} }
}; };
// FIXME: // FIXME:
// macro expansion may lose all white space information // macro expansion may lose all white space information
// But we hope someday we can use ra_fmt for that // But we hope someday we can use ra_fmt for that
let expansion = format(db, kind, position.file_id, expanded); let expansion = format(db, kind, position.file_id, expanded, &span_map, krate);
Some(ExpandedMacro { name, expansion }) Some(ExpandedMacro { name, expansion })
} }
@ -110,6 +131,8 @@ pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<
fn expand_macro_recur( fn expand_macro_recur(
sema: &Semantics<'_, RootDatabase>, sema: &Semantics<'_, RootDatabase>,
macro_call: &ast::Item, macro_call: &ast::Item,
result_span_map: &mut SpanMap<SyntaxContextId>,
offset_in_original_node: TextSize,
) -> Option<SyntaxNode> { ) -> Option<SyntaxNode> {
let expanded = match macro_call { let expanded = match macro_call {
item @ ast::Item::MacroCall(macro_call) => sema item @ ast::Item::MacroCall(macro_call) => sema
@ -118,29 +141,54 @@ fn expand_macro_recur(
.clone_for_update(), .clone_for_update(),
item => sema.expand_attr_macro(item)?.clone_for_update(), item => sema.expand_attr_macro(item)?.clone_for_update(),
}; };
expand(sema, expanded) let file_id =
sema.hir_file_for(&expanded).macro_file().expect("expansion must produce a macro file");
let expansion_span_map = sema.db.expansion_span_map(file_id);
result_span_map.merge(
TextRange::at(offset_in_original_node, macro_call.syntax().text_range().len()),
expanded.text_range().len(),
&expansion_span_map,
);
Some(expand(sema, expanded, result_span_map, offset_in_original_node))
} }
fn expand(sema: &Semantics<'_, RootDatabase>, expanded: SyntaxNode) -> Option<SyntaxNode> { fn expand(
sema: &Semantics<'_, RootDatabase>,
expanded: SyntaxNode,
result_span_map: &mut SpanMap<SyntaxContextId>,
offset_in_original_node: TextSize,
) -> SyntaxNode {
let children = expanded.descendants().filter_map(ast::Item::cast); let children = expanded.descendants().filter_map(ast::Item::cast);
let mut replacements = Vec::new(); let mut replacements = Vec::new();
for child in children { for child in children {
if let Some(new_node) = expand_macro_recur(sema, &child) { if let Some(new_node) = expand_macro_recur(
sema,
&child,
result_span_map,
offset_in_original_node + child.syntax().text_range().start(),
) {
// check if the whole original syntax is replaced // check if the whole original syntax is replaced
if expanded == *child.syntax() { if expanded == *child.syntax() {
return Some(new_node); return new_node;
} }
replacements.push((child, new_node)); replacements.push((child, new_node));
} }
} }
replacements.into_iter().rev().for_each(|(old, new)| ted::replace(old.syntax(), new)); replacements.into_iter().rev().for_each(|(old, new)| ted::replace(old.syntax(), new));
Some(expanded) expanded
} }
fn format(db: &RootDatabase, kind: SyntaxKind, file_id: FileId, expanded: SyntaxNode) -> String { fn format(
let expansion = insert_ws_into(expanded).to_string(); db: &RootDatabase,
kind: SyntaxKind,
file_id: FileId,
expanded: SyntaxNode,
span_map: &SpanMap<SyntaxContextId>,
krate: CrateId,
) -> String {
let expansion = prettify_macro_expansion(db, expanded, span_map, krate).to_string();
_format(db, kind, file_id, &expansion).unwrap_or(expansion) _format(db, kind, file_id, &expansion).unwrap_or(expansion)
} }
@ -498,7 +546,7 @@ struct Foo {}
"#, "#,
expect![[r#" expect![[r#"
Clone Clone
impl < >$crate::clone::Clone for Foo< >where { impl < >core::clone::Clone for Foo< >where {
fn clone(&self) -> Self { fn clone(&self) -> Self {
match self { match self {
Foo{} Foo{}
@ -524,7 +572,7 @@ struct Foo {}
"#, "#,
expect![[r#" expect![[r#"
Copy Copy
impl < >$crate::marker::Copy for Foo< >where{}"#]], impl < >core::marker::Copy for Foo< >where{}"#]],
); );
} }
@ -539,7 +587,7 @@ struct Foo {}
"#, "#,
expect![[r#" expect![[r#"
Copy Copy
impl < >$crate::marker::Copy for Foo< >where{}"#]], impl < >core::marker::Copy for Foo< >where{}"#]],
); );
check( check(
r#" r#"
@ -550,7 +598,7 @@ struct Foo {}
"#, "#,
expect![[r#" expect![[r#"
Clone Clone
impl < >$crate::clone::Clone for Foo< >where { impl < >core::clone::Clone for Foo< >where {
fn clone(&self) -> Self { fn clone(&self) -> Self {
match self { match self {
Foo{} Foo{}
@ -563,4 +611,44 @@ struct Foo {}
}"#]], }"#]],
); );
} }
#[test]
fn dollar_crate() {
check(
r#"
//- /a.rs crate:a
pub struct Foo;
#[macro_export]
macro_rules! m {
( $i:ident ) => { $crate::Foo; $crate::Foo; $i::Foo; };
}
//- /b.rs crate:b deps:a
pub struct Foo;
#[macro_export]
macro_rules! m {
() => { a::m!($crate); $crate::Foo; $crate::Foo; };
}
//- /c.rs crate:c deps:b,a
pub struct Foo;
#[macro_export]
macro_rules! m {
() => { b::m!(); $crate::Foo; $crate::Foo; };
}
fn bar() {
m$0!();
}
"#,
expect![[r#"
m!
a::Foo;
a::Foo;
b::Foo;
;
b::Foo;
b::Foo;
;
crate::Foo;
crate::Foo;"#]],
);
}
} }

View file

@ -3,9 +3,9 @@ use std::{mem, ops::Not};
use either::Either; use either::Either;
use hir::{ use hir::{
Adt, AsAssocItem, AsExternAssocItem, CaptureKind, HasCrate, HasSource, HirDisplay, Layout, db::ExpandDatabase, Adt, AsAssocItem, AsExternAssocItem, CaptureKind, HasCrate, HasSource,
LayoutError, MethodViolationCode, Name, ObjectSafetyViolation, Semantics, Trait, Type, HirDisplay, Layout, LayoutError, MethodViolationCode, Name, ObjectSafetyViolation, Semantics,
TypeInfo, Trait, Type, TypeInfo,
}; };
use ide_db::{ use ide_db::{
base_db::SourceDatabase, base_db::SourceDatabase,
@ -13,7 +13,7 @@ use ide_db::{
documentation::HasDocs, documentation::HasDocs,
famous_defs::FamousDefs, famous_defs::FamousDefs,
generated::lints::{CLIPPY_LINTS, DEFAULT_LINTS, FEATURES}, generated::lints::{CLIPPY_LINTS, DEFAULT_LINTS, FEATURES},
syntax_helpers::insert_whitespace_into_node, syntax_helpers::prettify_macro_expansion,
RootDatabase, RootDatabase,
}; };
use itertools::Itertools; use itertools::Itertools;
@ -476,8 +476,9 @@ pub(super) fn definition(
Err(_) => { Err(_) => {
let source = it.source(db)?; let source = it.source(db)?;
let mut body = source.value.body()?.syntax().clone(); let mut body = source.value.body()?.syntax().clone();
if source.file_id.is_macro() { if let Some(macro_file) = source.file_id.macro_file() {
body = insert_whitespace_into_node::insert_ws_into(body); let span_map = db.expansion_span_map(macro_file);
body = prettify_macro_expansion(db, body, &span_map, it.krate(db).into());
} }
Some(body.to_string()) Some(body.to_string())
} }
@ -486,8 +487,9 @@ pub(super) fn definition(
Definition::Static(it) => { Definition::Static(it) => {
let source = it.source(db)?; let source = it.source(db)?;
let mut body = source.value.body()?.syntax().clone(); let mut body = source.value.body()?.syntax().clone();
if source.file_id.is_macro() { if let Some(macro_file) = source.file_id.macro_file() {
body = insert_whitespace_into_node::insert_ws_into(body); let span_map = db.expansion_span_map(macro_file);
body = prettify_macro_expansion(db, body, &span_map, it.krate(db).into());
} }
Some(body.to_string()) Some(body.to_string())
} }

View file

@ -5,11 +5,11 @@
use expect_test::expect; use expect_test::expect;
use span::{Edition, EditionedFileId, ErasedFileAstId, FileId, Span, SpanAnchor, SyntaxContextId}; use span::{Edition, EditionedFileId, ErasedFileAstId, FileId, Span, SpanAnchor, SyntaxContextId};
use stdx::format_to; use stdx::format_to;
use syntax_bridge::insert_whitespace_into_node::insert_ws_into;
use tt::{TextRange, TextSize}; use tt::{TextRange, TextSize};
use crate::DeclarativeMacro; use crate::DeclarativeMacro;
#[expect(deprecated)]
fn check_( fn check_(
def_edition: Edition, def_edition: Edition,
call_edition: Edition, call_edition: Edition,
@ -60,7 +60,14 @@ fn check_(
format_to!(expect_res, "{:#?}\n\n", res.value.0); format_to!(expect_res, "{:#?}\n\n", res.value.0);
} }
let (node, _) = syntax_bridge::token_tree_to_syntax_node(&res.value.0, parse, def_edition); let (node, _) = syntax_bridge::token_tree_to_syntax_node(&res.value.0, parse, def_edition);
format_to!(expect_res, "{}", insert_ws_into(node.syntax_node())); format_to!(
expect_res,
"{}",
syntax_bridge::prettify_macro_expansion::prettify_macro_expansion(
node.syntax_node(),
&mut |it| it.clone()
)
);
expect.assert_eq(&expect_res); expect.assert_eq(&expect_res);
} }

View file

@ -104,6 +104,31 @@ where
pub fn iter(&self) -> impl Iterator<Item = (TextSize, SpanData<S>)> + '_ { pub fn iter(&self) -> impl Iterator<Item = (TextSize, SpanData<S>)> + '_ {
self.spans.iter().copied() self.spans.iter().copied()
} }
/// Merges this span map with another span map, where `other` is inserted at (and replaces) `other_range`.
///
/// The length of the replacement node needs to be `other_size`.
pub fn merge(&mut self, other_range: TextRange, other_size: TextSize, other: &SpanMap<S>) {
self.spans.retain_mut(|(offset, _)| {
if other_range.contains(*offset) {
false
} else {
if *offset >= other_range.end() {
*offset += other_size;
*offset -= other_range.len();
}
true
}
});
self.spans
.extend(other.spans.iter().map(|&(offset, span)| (offset + other_range.start(), span)));
self.spans.sort_unstable_by_key(|&(offset, _)| offset);
// Matched arm info is no longer correct once we have multiple macros.
self.matched_arm = None;
}
} }
#[derive(PartialEq, Eq, Hash, Debug)] #[derive(PartialEq, Eq, Hash, Debug)]

View file

@ -17,7 +17,7 @@ use tt::{
token_to_literal, token_to_literal,
}; };
pub mod insert_whitespace_into_node; pub mod prettify_macro_expansion;
mod to_parser_input; mod to_parser_input;
pub use to_parser_input::to_parser_input; pub use to_parser_input::to_parser_input;
// FIXME: we probably should re-think `token_tree_to_syntax_node` interfaces // FIXME: we probably should re-think `token_tree_to_syntax_node` interfaces

View file

@ -8,10 +8,19 @@ use syntax::{
}; };
/// Renders a [`SyntaxNode`] with whitespace inserted between tokens that require them. /// Renders a [`SyntaxNode`] with whitespace inserted between tokens that require them.
pub fn insert_ws_into(syn: SyntaxNode) -> SyntaxNode { ///
/// This is an internal API that is only exported because `mbe` needs it for tests and cannot depend
/// on `hir-expand`. For any purpose other than tests, you are supposed to use the `prettify_macro_expansion`
/// from `hir-expand` that handles `$crate` for you.
#[deprecated = "use `hir_expand::prettify_macro_expansion()` instead"]
pub fn prettify_macro_expansion(
syn: SyntaxNode,
dollar_crate_replacement: &mut dyn FnMut(&SyntaxToken) -> SyntaxToken,
) -> SyntaxNode {
let mut indent = 0; let mut indent = 0;
let mut last: Option<SyntaxKind> = None; let mut last: Option<SyntaxKind> = None;
let mut mods = Vec::new(); let mut mods = Vec::new();
let mut dollar_crate_replacements = Vec::new();
let syn = syn.clone_subtree().clone_for_update(); let syn = syn.clone_subtree().clone_for_update();
let before = Position::before; let before = Position::before;
@ -49,6 +58,9 @@ pub fn insert_ws_into(syn: SyntaxNode) -> SyntaxNode {
} }
_ => continue, _ => continue,
}; };
if token.kind() == SyntaxKind::IDENT && token.text() == "$crate" {
dollar_crate_replacements.push((token.clone(), dollar_crate_replacement(&token)));
}
let tok = &token; let tok = &token;
let is_next = |f: fn(SyntaxKind) -> bool, default| -> bool { let is_next = |f: fn(SyntaxKind) -> bool, default| -> bool {
@ -120,6 +132,9 @@ pub fn insert_ws_into(syn: SyntaxNode) -> SyntaxNode {
for (pos, insert) in mods { for (pos, insert) in mods {
ted::insert(pos, insert); ted::insert(pos, insert);
} }
for (old, new) in dollar_crate_replacements {
ted::replace(old, new);
}
if let Some(it) = syn.last_token().filter(|it| it.kind() == SyntaxKind::WHITESPACE) { if let Some(it) = syn.last_token().filter(|it| it.kind() == SyntaxKind::WHITESPACE) {
ted::remove(it); ted::remove(it);

View file

@ -1162,7 +1162,7 @@ pub mod tokens {
pub(super) static SOURCE_FILE: LazyLock<Parse<SourceFile>> = LazyLock::new(|| { pub(super) static SOURCE_FILE: LazyLock<Parse<SourceFile>> = LazyLock::new(|| {
SourceFile::parse( SourceFile::parse(
"const C: <()>::Item = ( true && true , true || true , 1 != 1, 2 == 2, 3 < 3, 4 <= 4, 5 > 5, 6 >= 6, !true, *p, &p , &mut p, async { let _ @ [] })\n;\n\nimpl A for B where: {}", Edition::CURRENT, "use crate::foo; const C: <()>::Item = ( true && true , true || true , 1 != 1, 2 == 2, 3 < 3, 4 <= 4, 5 > 5, 6 >= 6, !true, *p, &p , &mut p, async { let _ @ [] })\n;\n\nimpl A for B where: {}", Edition::CURRENT,
) )
}); });
@ -1188,6 +1188,17 @@ pub mod tokens {
.unwrap() .unwrap()
} }
pub fn crate_kw() -> SyntaxToken {
SOURCE_FILE
.tree()
.syntax()
.clone_for_update()
.descendants_with_tokens()
.filter_map(|it| it.into_token())
.find(|it| it.kind() == CRATE_KW)
.unwrap()
}
pub fn whitespace(text: &str) -> SyntaxToken { pub fn whitespace(text: &str) -> SyntaxToken {
assert!(text.trim().is_empty()); assert!(text.trim().is_empty());
let sf = SourceFile::parse(text, Edition::CURRENT).ok().unwrap(); let sf = SourceFile::parse(text, Edition::CURRENT).ok().unwrap();