Try caching macro calls more aggressively

This commit is contained in:
Lukas Wirth 2024-04-03 15:23:56 +02:00
parent f28f15ac6e
commit 97b58f2846
11 changed files with 382 additions and 300 deletions

View file

@ -395,6 +395,12 @@ impl BodySourceMap {
self.expr_map.get(&src).copied() self.expr_map.get(&src).copied()
} }
pub fn expansions(
&self,
) -> impl Iterator<Item = (&InFile<AstPtr<ast::MacroCall>>, &MacroFileId)> {
self.expansions.iter()
}
pub fn implicit_format_args( pub fn implicit_format_args(
&self, &self,
node: InFile<&ast::FormatArgsExpr>, node: InFile<&ast::FormatArgsExpr>,

View file

@ -12,6 +12,7 @@ use intern::Interned;
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use smallvec::SmallVec; use smallvec::SmallVec;
use span::AstIdMap; use span::AstIdMap;
use stdx::never;
use syntax::{ use syntax::{
ast::{ ast::{
self, ArrayExprKind, AstChildren, BlockExpr, HasArgList, HasAttrs, HasLoopBody, HasName, self, ArrayExprKind, AstChildren, BlockExpr, HasArgList, HasAttrs, HasLoopBody, HasName,
@ -480,7 +481,8 @@ impl ExprCollector<'_> {
} else if e.const_token().is_some() { } else if e.const_token().is_some() {
Mutability::Shared Mutability::Shared
} else { } else {
unreachable!("parser only remaps to raw_token() if matching mutability token follows") never!("parser only remaps to raw_token() if matching mutability token follows");
Mutability::Shared
} }
} else { } else {
Mutability::from_mutable(e.mut_token().is_some()) Mutability::from_mutable(e.mut_token().is_some())
@ -1006,9 +1008,9 @@ impl ExprCollector<'_> {
Some((mark, expansion)) => { Some((mark, expansion)) => {
// Keep collecting even with expansion errors so we can provide completions and // Keep collecting even with expansion errors so we can provide completions and
// other services in incomplete macro expressions. // other services in incomplete macro expressions.
self.source_map if let Some(macro_file) = self.expander.current_file_id().macro_file() {
.expansions self.source_map.expansions.insert(macro_call_ptr, macro_file);
.insert(macro_call_ptr, self.expander.current_file_id().macro_file().unwrap()); }
let prev_ast_id_map = mem::replace( let prev_ast_id_map = mem::replace(
&mut self.ast_id_map, &mut self.ast_id_map,
self.db.ast_id_map(self.expander.current_file_id()), self.db.ast_id_map(self.expander.current_file_id()),

View file

@ -6,7 +6,7 @@
use either::Either; use either::Either;
use hir_expand::{attrs::collect_attrs, HirFileId}; use hir_expand::{attrs::collect_attrs, HirFileId};
use syntax::ast; use syntax::{ast, AstPtr};
use crate::{ use crate::{
db::DefDatabase, db::DefDatabase,
@ -38,7 +38,7 @@ impl ChildBySource for TraitId {
data.attribute_calls().filter(|(ast_id, _)| ast_id.file_id == file_id).for_each( data.attribute_calls().filter(|(ast_id, _)| ast_id.file_id == file_id).for_each(
|(ast_id, call_id)| { |(ast_id, call_id)| {
res[keys::ATTR_MACRO_CALL].insert(ast_id.to_node(db.upcast()), call_id); res[keys::ATTR_MACRO_CALL].insert(ast_id.to_ptr(db.upcast()), call_id);
}, },
); );
data.items.iter().for_each(|&(_, item)| { data.items.iter().for_each(|&(_, item)| {
@ -50,9 +50,10 @@ impl ChildBySource for TraitId {
impl ChildBySource for ImplId { impl ChildBySource for ImplId {
fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) { fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) {
let data = db.impl_data(*self); let data = db.impl_data(*self);
// FIXME: Macro calls
data.attribute_calls().filter(|(ast_id, _)| ast_id.file_id == file_id).for_each( data.attribute_calls().filter(|(ast_id, _)| ast_id.file_id == file_id).for_each(
|(ast_id, call_id)| { |(ast_id, call_id)| {
res[keys::ATTR_MACRO_CALL].insert(ast_id.to_node(db.upcast()), call_id); res[keys::ATTR_MACRO_CALL].insert(ast_id.to_ptr(db.upcast()), call_id);
}, },
); );
data.items.iter().for_each(|&item| { data.items.iter().for_each(|&item| {
@ -80,7 +81,7 @@ impl ChildBySource for ItemScope {
.for_each(|konst| insert_item_loc(db, res, file_id, konst, keys::CONST)); .for_each(|konst| insert_item_loc(db, res, file_id, konst, keys::CONST));
self.attr_macro_invocs().filter(|(id, _)| id.file_id == file_id).for_each( self.attr_macro_invocs().filter(|(id, _)| id.file_id == file_id).for_each(
|(ast_id, call_id)| { |(ast_id, call_id)| {
res[keys::ATTR_MACRO_CALL].insert(ast_id.to_node(db.upcast()), call_id); res[keys::ATTR_MACRO_CALL].insert(ast_id.to_ptr(db.upcast()), call_id);
}, },
); );
self.legacy_macros().for_each(|(_, ids)| { self.legacy_macros().for_each(|(_, ids)| {
@ -88,7 +89,7 @@ impl ChildBySource for ItemScope {
if let MacroId::MacroRulesId(id) = id { if let MacroId::MacroRulesId(id) = id {
let loc = id.lookup(db); let loc = id.lookup(db);
if loc.id.file_id() == file_id { if loc.id.file_id() == file_id {
res[keys::MACRO_RULES].insert(loc.source(db).value, id); res[keys::MACRO_RULES].insert(loc.ast_ptr(db).value, id);
} }
} }
}) })
@ -100,12 +101,18 @@ impl ChildBySource for ItemScope {
if let Some((_, Either::Left(attr))) = if let Some((_, Either::Left(attr))) =
collect_attrs(&adt).nth(attr_id.ast_index()) collect_attrs(&adt).nth(attr_id.ast_index())
{ {
res[keys::DERIVE_MACRO_CALL].insert(attr, (attr_id, call_id, calls.into())); res[keys::DERIVE_MACRO_CALL]
.insert(AstPtr::new(&attr), (attr_id, call_id, calls.into()));
} }
}); });
}, },
); );
self.iter_macro_invoc().filter(|(id, _)| id.file_id == file_id).for_each(
|(ast_id, &call)| {
let ast = ast_id.to_ptr(db.upcast());
res[keys::MACRO_CALL].insert(ast, call);
},
);
fn add_module_def( fn add_module_def(
db: &dyn DefDatabase, db: &dyn DefDatabase,
map: &mut DynMap, map: &mut DynMap,
@ -155,8 +162,8 @@ impl ChildBySource for VariantId {
for (local_id, source) in arena_map.value.iter() { for (local_id, source) in arena_map.value.iter() {
let id = FieldId { parent, local_id }; let id = FieldId { parent, local_id };
match source.clone() { match source.clone() {
Either::Left(source) => res[keys::TUPLE_FIELD].insert(source, id), Either::Left(source) => res[keys::TUPLE_FIELD].insert(AstPtr::new(&source), id),
Either::Right(source) => res[keys::RECORD_FIELD].insert(source, id), Either::Right(source) => res[keys::RECORD_FIELD].insert(AstPtr::new(&source), id),
} }
} }
} }
@ -171,29 +178,30 @@ impl ChildBySource for EnumId {
let tree = loc.id.item_tree(db); let tree = loc.id.item_tree(db);
let ast_id_map = db.ast_id_map(loc.id.file_id()); let ast_id_map = db.ast_id_map(loc.id.file_id());
let root = db.parse_or_expand(loc.id.file_id());
db.enum_data(*self).variants.iter().for_each(|&(variant, _)| { db.enum_data(*self).variants.iter().for_each(|&(variant, _)| {
res[keys::ENUM_VARIANT].insert( res[keys::ENUM_VARIANT]
ast_id_map.get(tree[variant.lookup(db).id.value].ast_id).to_node(&root), .insert(ast_id_map.get(tree[variant.lookup(db).id.value].ast_id), variant);
variant,
);
}); });
} }
} }
impl ChildBySource for DefWithBodyId { impl ChildBySource for DefWithBodyId {
fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) { fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) {
let body = db.body(*self); let (body, sm) = db.body_with_source_map(*self);
if let &DefWithBodyId::VariantId(v) = self { if let &DefWithBodyId::VariantId(v) = self {
VariantId::EnumVariantId(v).child_by_source_to(db, res, file_id) VariantId::EnumVariantId(v).child_by_source_to(db, res, file_id)
} }
sm.expansions().filter(|(ast, _)| ast.file_id == file_id).for_each(|(ast, &exp_id)| {
res[keys::MACRO_CALL].insert(ast.value, exp_id.macro_call_id);
});
for (block, def_map) in body.blocks(db) { for (block, def_map) in body.blocks(db) {
// All block expressions are merged into the same map, because they logically all add // All block expressions are merged into the same map, because they logically all add
// inner items to the containing `DefWithBodyId`. // inner items to the containing `DefWithBodyId`.
def_map[DefMap::ROOT].scope.child_by_source_to(db, res, file_id); def_map[DefMap::ROOT].scope.child_by_source_to(db, res, file_id);
res[keys::BLOCK].insert(block.lookup(db).ast_id.to_node(db.upcast()), block); res[keys::BLOCK].insert(block.lookup(db).ast_id.to_ptr(db.upcast()), block);
} }
} }
} }
@ -220,13 +228,17 @@ impl ChildBySource for GenericDefId {
{ {
let id = TypeOrConstParamId { parent: *self, local_id }; let id = TypeOrConstParamId { parent: *self, local_id };
match ast_param { match ast_param {
ast::TypeOrConstParam::Type(a) => res[keys::TYPE_PARAM].insert(a, id), ast::TypeOrConstParam::Type(a) => {
ast::TypeOrConstParam::Const(a) => res[keys::CONST_PARAM].insert(a, id), res[keys::TYPE_PARAM].insert(AstPtr::new(&a), id)
}
ast::TypeOrConstParam::Const(a) => {
res[keys::CONST_PARAM].insert(AstPtr::new(&a), id)
}
} }
} }
for (local_id, ast_param) in lts_idx_iter.zip(generic_params_list.lifetime_params()) { for (local_id, ast_param) in lts_idx_iter.zip(generic_params_list.lifetime_params()) {
let id = LifetimeParamId { parent: *self, local_id }; let id = LifetimeParamId { parent: *self, local_id };
res[keys::LIFETIME_PARAM].insert(ast_param, id); res[keys::LIFETIME_PARAM].insert(AstPtr::new(&ast_param), id);
} }
} }
} }
@ -246,7 +258,7 @@ fn insert_item_loc<ID, N, Data>(
{ {
let loc = id.lookup(db); let loc = id.lookup(db);
if loc.item_tree_id().file_id() == file_id { if loc.item_tree_id().file_id() == file_id {
res[key].insert(loc.source(db).value, id) res[key].insert(loc.ast_ptr(db).value, id)
} }
} }

View file

@ -13,7 +13,7 @@ use crate::{
TraitId, TypeAliasId, TypeOrConstParamId, UnionId, UseId, TraitId, TypeAliasId, TypeOrConstParamId, UnionId, UseId,
}; };
pub type Key<K, V> = crate::dyn_map::Key<K, V, AstPtrPolicy<K, V>>; pub type Key<K, V> = crate::dyn_map::Key<AstPtr<K>, V, AstPtrPolicy<K, V>>;
pub const BLOCK: Key<ast::BlockExpr, BlockId> = Key::new(); pub const BLOCK: Key<ast::BlockExpr, BlockId> = Key::new();
pub const FUNCTION: Key<ast::Fn, FunctionId> = Key::new(); pub const FUNCTION: Key<ast::Fn, FunctionId> = Key::new();
@ -39,6 +39,7 @@ pub const LIFETIME_PARAM: Key<ast::LifetimeParam, LifetimeParamId> = Key::new();
pub const MACRO_RULES: Key<ast::MacroRules, MacroRulesId> = Key::new(); pub const MACRO_RULES: Key<ast::MacroRules, MacroRulesId> = Key::new();
pub const MACRO2: Key<ast::MacroDef, Macro2Id> = Key::new(); pub const MACRO2: Key<ast::MacroDef, Macro2Id> = Key::new();
pub const PROC_MACRO: Key<ast::Fn, ProcMacroId> = Key::new(); pub const PROC_MACRO: Key<ast::Fn, ProcMacroId> = Key::new();
pub const MACRO_CALL: Key<ast::MacroCall, MacroCallId> = Key::new();
pub const ATTR_MACRO_CALL: Key<ast::Item, MacroCallId> = Key::new(); pub const ATTR_MACRO_CALL: Key<ast::Item, MacroCallId> = Key::new();
pub const DERIVE_MACRO_CALL: Key<ast::Attr, (AttrId, MacroCallId, Box<[Option<MacroCallId>]>)> = pub const DERIVE_MACRO_CALL: Key<ast::Attr, (AttrId, MacroCallId, Box<[Option<MacroCallId>]>)> =
Key::new(); Key::new();
@ -54,18 +55,16 @@ pub struct AstPtrPolicy<AST, ID> {
} }
impl<AST: AstNode + 'static, ID: 'static> Policy for AstPtrPolicy<AST, ID> { impl<AST: AstNode + 'static, ID: 'static> Policy for AstPtrPolicy<AST, ID> {
type K = AST; type K = AstPtr<AST>;
type V = ID; type V = ID;
fn insert(map: &mut DynMap, key: AST, value: ID) { fn insert(map: &mut DynMap, key: AstPtr<AST>, value: ID) {
let key = AstPtr::new(&key);
map.map map.map
.entry::<FxHashMap<AstPtr<AST>, ID>>() .entry::<FxHashMap<AstPtr<AST>, ID>>()
.or_insert_with(Default::default) .or_insert_with(Default::default)
.insert(key, value); .insert(key, value);
} }
fn get<'a>(map: &'a DynMap, key: &AST) -> Option<&'a ID> { fn get<'a>(map: &'a DynMap, key: &AstPtr<AST>) -> Option<&'a ID> {
let key = AstPtr::new(key); map.map.get::<FxHashMap<AstPtr<AST>, ID>>()?.get(key)
map.map.get::<FxHashMap<AstPtr<AST>, ID>>()?.get(&key)
} }
fn is_empty(map: &DynMap) -> bool { fn is_empty(map: &DynMap) -> bool {
map.map.get::<FxHashMap<AstPtr<AST>, ID>>().map_or(true, |it| it.is_empty()) map.map.get::<FxHashMap<AstPtr<AST>, ID>>().map_or(true, |it| it.is_empty())

View file

@ -67,6 +67,10 @@ impl BuiltinFnLikeExpander {
let span = span_with_def_site_ctxt(db, span, id); let span = span_with_def_site_ctxt(db, span, id);
self.expander()(db, id, tt, span) self.expander()(db, id, tt, span)
} }
pub fn is_asm(&self) -> bool {
matches!(self, Self::Asm | Self::GlobalAsm)
}
} }
impl EagerExpander { impl EagerExpander {

View file

@ -1,6 +1,4 @@
//! Things to wrap other things in file ids. //! Things to wrap other things in file ids.
use std::iter;
use either::Either; use either::Either;
use span::{ use span::{
AstIdNode, ErasedFileAstId, FileAstId, FileId, FileRange, HirFileId, HirFileIdRepr, AstIdNode, ErasedFileAstId, FileAstId, FileId, FileRange, HirFileId, HirFileIdRepr,
@ -150,27 +148,16 @@ impl<FileId: Copy, N: AstNode> InFileWrapper<FileId, N> {
} }
} }
impl<FileId: Copy, N: AstNode> InFileWrapper<FileId, &N> {
// unfortunately `syntax` collides with the impl above, because `&_` is fundamental
pub fn syntax_ref(&self) -> InFileWrapper<FileId, &SyntaxNode> {
self.with_value(self.value.syntax())
}
}
// region:specific impls // region:specific impls
impl InFile<&SyntaxNode> { impl InFile<&SyntaxNode> {
/// Traverse up macro calls and skips the macro invocation node
pub fn ancestors_with_macros(
self,
db: &dyn db::ExpandDatabase,
) -> impl Iterator<Item = InFile<SyntaxNode>> + '_ {
let succ = move |node: &InFile<SyntaxNode>| match node.value.parent() {
Some(parent) => Some(node.with_value(parent)),
None => db
.lookup_intern_macro_call(node.file_id.macro_file()?.macro_call_id)
.to_node_item(db)
.syntax()
.cloned()
.map(|node| node.parent())
.transpose(),
};
iter::successors(succ(&self.cloned()), succ)
}
/// Falls back to the macro call range if the node cannot be mapped up fully. /// Falls back to the macro call range if the node cannot be mapped up fully.
/// ///
/// For attributes and derives, this will point back to the attribute only. /// For attributes and derives, this will point back to the attribute only.

View file

@ -47,7 +47,7 @@ use crate::{
builtin_attr_macro::BuiltinAttrExpander, builtin_attr_macro::BuiltinAttrExpander,
builtin_derive_macro::BuiltinDeriveExpander, builtin_derive_macro::BuiltinDeriveExpander,
builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander}, builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander},
db::{ExpandDatabase, TokenExpander}, db::ExpandDatabase,
mod_path::ModPath, mod_path::ModPath,
proc_macro::{CustomProcMacroExpander, ProcMacroKind}, proc_macro::{CustomProcMacroExpander, ProcMacroKind},
span_map::{ExpansionSpanMap, SpanMap}, span_map::{ExpansionSpanMap, SpanMap},
@ -253,9 +253,6 @@ pub trait HirFileIdExt {
/// If this is a macro call, returns the syntax node of the very first macro call this file resides in. /// If this is a macro call, returns the syntax node of the very first macro call this file resides in.
fn original_call_node(self, db: &dyn ExpandDatabase) -> Option<InRealFile<SyntaxNode>>; fn original_call_node(self, db: &dyn ExpandDatabase) -> Option<InRealFile<SyntaxNode>>;
/// Return expansion information if it is a macro-expansion file
fn expansion_info(self, db: &dyn ExpandDatabase) -> Option<ExpansionInfo>;
fn as_builtin_derive_attr_node(&self, db: &dyn ExpandDatabase) -> Option<InFile<ast::Attr>>; fn as_builtin_derive_attr_node(&self, db: &dyn ExpandDatabase) -> Option<InFile<ast::Attr>>;
} }
@ -309,11 +306,6 @@ impl HirFileIdExt for HirFileId {
} }
} }
/// Return expansion information if it is a macro-expansion file
fn expansion_info(self, db: &dyn ExpandDatabase) -> Option<ExpansionInfo> {
Some(ExpansionInfo::new(db, self.macro_file()?))
}
fn as_builtin_derive_attr_node(&self, db: &dyn ExpandDatabase) -> Option<InFile<ast::Attr>> { fn as_builtin_derive_attr_node(&self, db: &dyn ExpandDatabase) -> Option<InFile<ast::Attr>> {
let macro_file = self.macro_file()?; let macro_file = self.macro_file()?;
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
@ -417,8 +409,10 @@ impl MacroFileIdExt for MacroFileId {
} }
fn is_attr_macro(&self, db: &dyn ExpandDatabase) -> bool { fn is_attr_macro(&self, db: &dyn ExpandDatabase) -> bool {
let loc = db.lookup_intern_macro_call(self.macro_call_id); matches!(
matches!(loc.kind, MacroCallKind::Attr { .. }) db.lookup_intern_macro_call(self.macro_call_id).def.kind,
MacroDefKind::BuiltInAttr(..) | MacroDefKind::ProcMacro(_, ProcMacroKind::Attr, _)
)
} }
fn is_derive_attr_pseudo_expansion(&self, db: &dyn ExpandDatabase) -> bool { fn is_derive_attr_pseudo_expansion(&self, db: &dyn ExpandDatabase) -> bool {
@ -703,16 +697,12 @@ impl MacroCallKind {
// simpler function calls if the map is only used once // simpler function calls if the map is only used once
#[derive(Clone, Debug, PartialEq, Eq)] #[derive(Clone, Debug, PartialEq, Eq)]
pub struct ExpansionInfo { pub struct ExpansionInfo {
pub expanded: InMacroFile<SyntaxNode>, expanded: InMacroFile<SyntaxNode>,
/// The argument TokenTree or item for attributes /// The argument TokenTree or item for attributes
arg: InFile<Option<SyntaxNode>>, arg: InFile<Option<SyntaxNode>>,
/// The `macro_rules!` or attribute input. exp_map: Arc<ExpansionSpanMap>,
attr_input_or_mac_def: Option<InFile<ast::TokenTree>>,
macro_def: TokenExpander,
macro_arg: Arc<tt::Subtree>,
pub exp_map: Arc<ExpansionSpanMap>,
arg_map: SpanMap, arg_map: SpanMap,
loc: MacroCallLoc,
} }
impl ExpansionInfo { impl ExpansionInfo {
@ -720,14 +710,21 @@ impl ExpansionInfo {
self.expanded.clone() self.expanded.clone()
} }
pub fn call_node(&self) -> Option<InFile<SyntaxNode>> { pub fn call_node(&self) -> InFile<Option<SyntaxNode>> {
Some(self.arg.with_value(self.arg.value.as_ref()?.parent()?)) self.arg.with_value(self.arg.value.as_ref().and_then(SyntaxNode::parent))
} }
pub fn call_file(&self) -> HirFileId { pub fn call_file(&self) -> HirFileId {
self.arg.file_id self.arg.file_id
} }
pub fn is_attr(&self) -> bool {
matches!(
self.loc.def.kind,
MacroDefKind::BuiltInAttr(..) | MacroDefKind::ProcMacro(_, ProcMacroKind::Attr, _)
)
}
/// Maps the passed in file range down into a macro expansion if it is the input to a macro call. /// Maps the passed in file range down into a macro expansion if it is the input to a macro call.
/// ///
/// Note this does a linear search through the entire backing vector of the spanmap. /// Note this does a linear search through the entire backing vector of the spanmap.
@ -812,49 +809,15 @@ impl ExpansionInfo {
} }
pub fn new(db: &dyn ExpandDatabase, macro_file: MacroFileId) -> ExpansionInfo { pub fn new(db: &dyn ExpandDatabase, macro_file: MacroFileId) -> ExpansionInfo {
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
let arg_tt = loc.kind.arg(db); let arg_tt = loc.kind.arg(db);
let arg_map = db.span_map(arg_tt.file_id); let arg_map = db.span_map(arg_tt.file_id);
let macro_def = db.macro_expander(loc.def);
let (parse, exp_map) = db.parse_macro_expansion(macro_file).value; let (parse, exp_map) = db.parse_macro_expansion(macro_file).value;
let expanded = InMacroFile { file_id: macro_file, value: parse.syntax_node() }; let expanded = InMacroFile { file_id: macro_file, value: parse.syntax_node() };
let (macro_arg, _, _) = ExpansionInfo { expanded, loc, arg: arg_tt, exp_map, arg_map }
db.macro_arg_considering_derives(macro_file.macro_call_id, &loc.kind);
let def = loc.def.ast_id().left().and_then(|id| {
let def_tt = match id.to_node(db) {
ast::Macro::MacroRules(mac) => mac.token_tree()?,
ast::Macro::MacroDef(_) if matches!(macro_def, TokenExpander::BuiltInAttr(_)) => {
return None
}
ast::Macro::MacroDef(mac) => mac.body()?,
};
Some(InFile::new(id.file_id, def_tt))
});
let attr_input_or_mac_def = def.or_else(|| match loc.kind {
MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
// FIXME: handle `cfg_attr`
let tt = collect_attrs(&ast_id.to_node(db))
.nth(invoc_attr_index.ast_index())
.and_then(|x| Either::left(x.1))?
.token_tree()?;
Some(InFile::new(ast_id.file_id, tt))
}
_ => None,
});
ExpansionInfo {
expanded,
arg: arg_tt,
attr_input_or_mac_def,
macro_arg,
macro_def,
exp_map,
arg_map,
}
} }
} }

View file

@ -19,8 +19,8 @@ use hir_def::{
AsMacroCall, DefWithBodyId, FunctionId, MacroId, TraitId, VariantId, AsMacroCall, DefWithBodyId, FunctionId, MacroId, TraitId, VariantId,
}; };
use hir_expand::{ use hir_expand::{
attrs::collect_attrs, db::ExpandDatabase, files::InRealFile, name::AsName, ExpansionInfo, attrs::collect_attrs, db::ExpandDatabase, files::InRealFile, name::AsName, InMacroFile,
InMacroFile, MacroCallId, MacroFileId, MacroFileIdExt, MacroCallId, MacroFileId, MacroFileIdExt,
}; };
use itertools::Itertools; use itertools::Itertools;
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::{FxHashMap, FxHashSet};
@ -129,12 +129,9 @@ pub struct Semantics<'db, DB> {
pub struct SemanticsImpl<'db> { pub struct SemanticsImpl<'db> {
pub db: &'db dyn HirDatabase, pub db: &'db dyn HirDatabase,
s2d_cache: RefCell<SourceToDefCache>, s2d_cache: RefCell<(SourceToDefCache, FxHashMap<MacroFileId, hir_expand::ExpansionInfo>)>,
/// Rootnode to HirFileId cache /// Rootnode to HirFileId cache
root_to_file_cache: RefCell<FxHashMap<SyntaxNode, HirFileId>>, root_to_file_cache: RefCell<FxHashMap<SyntaxNode, HirFileId>>,
// These 2 caches are mainly useful for semantic highlighting as nothing else descends a lot of tokens
// So we might wanna move them out into something specific for semantic highlighting
expansion_info_cache: RefCell<FxHashMap<MacroFileId, ExpansionInfo>>,
/// MacroCall to its expansion's MacroFileId cache /// MacroCall to its expansion's MacroFileId cache
macro_call_cache: RefCell<FxHashMap<InFile<ast::MacroCall>, MacroFileId>>, macro_call_cache: RefCell<FxHashMap<InFile<ast::MacroCall>, MacroFileId>>,
} }
@ -295,7 +292,6 @@ impl<'db> SemanticsImpl<'db> {
db, db,
s2d_cache: Default::default(), s2d_cache: Default::default(),
root_to_file_cache: Default::default(), root_to_file_cache: Default::default(),
expansion_info_cache: Default::default(),
macro_call_cache: Default::default(), macro_call_cache: Default::default(),
} }
} }
@ -314,7 +310,16 @@ impl<'db> SemanticsImpl<'db> {
pub fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> { pub fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
let sa = self.analyze_no_infer(macro_call.syntax())?; let sa = self.analyze_no_infer(macro_call.syntax())?;
let file_id = sa.expand(self.db, InFile::new(sa.file_id, macro_call))?;
let macro_call = InFile::new(sa.file_id, macro_call);
let file_id = if let Some(call) =
<ast::MacroCall as crate::semantics::ToDef>::to_def(self, macro_call)
{
call.as_macro_file()
} else {
sa.expand(self.db, macro_call)?
};
let node = self.parse_or_expand(file_id.into()); let node = self.parse_or_expand(file_id.into());
Some(node) Some(node)
} }
@ -322,7 +327,7 @@ impl<'db> SemanticsImpl<'db> {
/// If `item` has an attribute macro attached to it, expands it. /// If `item` has an attribute macro attached to it, expands it.
pub fn expand_attr_macro(&self, item: &ast::Item) -> Option<SyntaxNode> { pub fn expand_attr_macro(&self, item: &ast::Item) -> Option<SyntaxNode> {
let src = self.wrap_node_infile(item.clone()); let src = self.wrap_node_infile(item.clone());
let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(src))?; let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(src.as_ref()))?;
Some(self.parse_or_expand(macro_call_id.as_file())) Some(self.parse_or_expand(macro_call_id.as_file()))
} }
@ -341,9 +346,7 @@ impl<'db> SemanticsImpl<'db> {
Some( Some(
calls calls
.into_iter() .into_iter()
.map(|call| { .map(|call| macro_call_to_macro_id(ctx, call?).map(|id| Macro { id }))
macro_call_to_macro_id(ctx, self.db.upcast(), call?).map(|id| Macro { id })
})
.collect(), .collect(),
) )
}) })
@ -403,7 +406,7 @@ impl<'db> SemanticsImpl<'db> {
pub fn is_attr_macro_call(&self, item: &ast::Item) -> bool { pub fn is_attr_macro_call(&self, item: &ast::Item) -> bool {
let file_id = self.find_file(item.syntax()).file_id; let file_id = self.find_file(item.syntax()).file_id;
let src = InFile::new(file_id, item.clone()); let src = InFile::new(file_id, item);
self.with_ctx(|ctx| ctx.item_to_macro_call(src).is_some()) self.with_ctx(|ctx| ctx.item_to_macro_call(src).is_some())
} }
@ -453,7 +456,7 @@ impl<'db> SemanticsImpl<'db> {
token_to_map: SyntaxToken, token_to_map: SyntaxToken,
) -> Option<(SyntaxNode, SyntaxToken)> { ) -> Option<(SyntaxNode, SyntaxToken)> {
let macro_call = self.wrap_node_infile(actual_macro_call.clone()); let macro_call = self.wrap_node_infile(actual_macro_call.clone());
let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(macro_call))?; let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(macro_call.as_ref()))?;
hir_expand::db::expand_speculative( hir_expand::db::expand_speculative(
self.db.upcast(), self.db.upcast(),
macro_call_id, macro_call_id,
@ -705,8 +708,6 @@ impl<'db> SemanticsImpl<'db> {
let parent = token.parent()?; let parent = token.parent()?;
let file_id = self.find_file(&parent).file_id.file_id()?; let file_id = self.find_file(&parent).file_id.file_id()?;
let mut cache = self.expansion_info_cache.borrow_mut();
// iterate related crates and find all include! invocations that include_file_id matches // iterate related crates and find all include! invocations that include_file_id matches
for (invoc, _) in self for (invoc, _) in self
.db .db
@ -716,18 +717,31 @@ impl<'db> SemanticsImpl<'db> {
.filter(|&(_, include_file_id)| include_file_id == file_id) .filter(|&(_, include_file_id)| include_file_id == file_id)
{ {
let macro_file = invoc.as_macro_file(); let macro_file = invoc.as_macro_file();
let expansion_info = cache.entry(macro_file).or_insert_with(|| { let expansion_info = {
let exp_info = macro_file.expansion_info(self.db.upcast()); self.with_ctx(|ctx| {
ctx.expansion_info_cache
.entry(macro_file)
.or_insert_with(|| {
let exp_info = macro_file.expansion_info(self.db.upcast());
let InMacroFile { file_id, value } = exp_info.expanded(); let InMacroFile { file_id, value } = exp_info.expanded();
self.cache(value, file_id.into()); if let InFile { file_id, value: Some(value) } = exp_info.call_node() {
self.cache(value.ancestors().last().unwrap(), file_id);
}
self.cache(value, file_id.into());
exp_info exp_info
}); })
.clone()
})
};
// FIXME: uncached parse // FIXME: uncached parse
// Create the source analyzer for the macro call scope // Create the source analyzer for the macro call scope
let Some(sa) = self.analyze_no_infer(&self.parse_or_expand(expansion_info.call_file())) let Some(sa) = expansion_info
.call_node()
.value
.and_then(|it| self.analyze_no_infer(&it.ancestors().last().unwrap()))
else { else {
continue; continue;
}; };
@ -785,23 +799,27 @@ impl<'db> SemanticsImpl<'db> {
} }
}; };
let mut cache = self.expansion_info_cache.borrow_mut(); let mut m_cache = self.macro_call_cache.borrow_mut();
let mut mcache = self.macro_call_cache.borrow_mut();
let def_map = sa.resolver.def_map(); let def_map = sa.resolver.def_map();
let mut stack: Vec<(_, SmallVec<[_; 2]>)> = vec![(file_id, smallvec![token])]; let mut stack: Vec<(_, SmallVec<[_; 2]>)> = vec![(file_id, smallvec![token])];
let mut process_expansion_for_token = |stack: &mut Vec<_>, macro_file| { let process_expansion_for_token = |stack: &mut Vec<_>, macro_file| {
let exp_info = cache.entry(macro_file).or_insert_with(|| { let InMacroFile { file_id, value: mapped_tokens } = self.with_ctx(|ctx| {
let exp_info = macro_file.expansion_info(self.db.upcast()); Some(
ctx.expansion_info_cache
.entry(macro_file)
.or_insert_with(|| {
let exp_info = macro_file.expansion_info(self.db.upcast());
let InMacroFile { file_id, value } = exp_info.expanded(); let InMacroFile { file_id, value } = exp_info.expanded();
self.cache(value, file_id.into()); self.cache(value, file_id.into());
exp_info exp_info
}); })
.map_range_down(span)?
let InMacroFile { file_id, value: mapped_tokens } = exp_info.map_range_down(span)?; .map(SmallVec::<[_; 2]>::from_iter),
let mapped_tokens: SmallVec<[_; 2]> = mapped_tokens.collect(); )
})?;
// we have found a mapping for the token if the vec is non-empty // we have found a mapping for the token if the vec is non-empty
let res = mapped_tokens.is_empty().not().then_some(()); let res = mapped_tokens.is_empty().not().then_some(());
@ -818,10 +836,7 @@ impl<'db> SemanticsImpl<'db> {
token.parent_ancestors().filter_map(ast::Item::cast).find_map(|item| { token.parent_ancestors().filter_map(ast::Item::cast).find_map(|item| {
// Don't force populate the dyn cache for items that don't have an attribute anyways // Don't force populate the dyn cache for items that don't have an attribute anyways
item.attrs().next()?; item.attrs().next()?;
Some(( Some((ctx.item_to_macro_call(InFile::new(file_id, &item))?, item))
ctx.item_to_macro_call(InFile::new(file_id, item.clone()))?,
item,
))
}) })
}); });
if let Some((call_id, item)) = containing_attribute_macro_call { if let Some((call_id, item)) = containing_attribute_macro_call {
@ -874,13 +889,20 @@ impl<'db> SemanticsImpl<'db> {
return None; return None;
} }
let macro_call = tt.syntax().parent().and_then(ast::MacroCall::cast)?; let macro_call = tt.syntax().parent().and_then(ast::MacroCall::cast)?;
let mcall: hir_expand::files::InFileWrapper<HirFileId, ast::MacroCall> = let mcall = InFile::new(file_id, macro_call);
InFile::new(file_id, macro_call); let file_id = match m_cache.get(&mcall) {
let file_id = match mcache.get(&mcall) {
Some(&it) => it, Some(&it) => it,
None => { None => {
let it = sa.expand(self.db, mcall.as_ref())?; let it = if let Some(call) =
mcache.insert(mcall, it); <ast::MacroCall as crate::semantics::ToDef>::to_def(
self,
mcall.as_ref(),
) {
call.as_macro_file()
} else {
sa.expand(self.db, mcall.as_ref())?
};
m_cache.insert(mcall, it);
it it
} }
}; };
@ -1056,16 +1078,19 @@ impl<'db> SemanticsImpl<'db> {
node: SyntaxNode, node: SyntaxNode,
) -> impl Iterator<Item = SyntaxNode> + Clone + '_ { ) -> impl Iterator<Item = SyntaxNode> + Clone + '_ {
let node = self.find_file(&node); let node = self.find_file(&node);
let db = self.db.upcast();
iter::successors(Some(node.cloned()), move |&InFile { file_id, ref value }| { iter::successors(Some(node.cloned()), move |&InFile { file_id, ref value }| {
match value.parent() { match value.parent() {
Some(parent) => Some(InFile::new(file_id, parent)), Some(parent) => Some(InFile::new(file_id, parent)),
None => { None => {
let call_node = file_id.macro_file()?.call_node(db); let macro_file = file_id.macro_file()?;
// cache the node
// FIXME: uncached parse self.with_ctx(|ctx| {
self.parse_or_expand(call_node.file_id); let expansion_info = ctx
Some(call_node) .expansion_info_cache
.entry(macro_file)
.or_insert_with(|| macro_file.expansion_info(self.db.upcast()));
expansion_info.call_node().transpose()
})
} }
} }
}) })
@ -1090,7 +1115,7 @@ impl<'db> SemanticsImpl<'db> {
.find(|tp| tp.lifetime().as_ref().map(|lt| lt.text()).as_ref() == Some(&text)) .find(|tp| tp.lifetime().as_ref().map(|lt| lt.text()).as_ref() == Some(&text))
})?; })?;
let src = self.wrap_node_infile(lifetime_param); let src = self.wrap_node_infile(lifetime_param);
ToDef::to_def(self, src) ToDef::to_def(self, src.as_ref())
} }
pub fn resolve_label(&self, lifetime: &ast::Lifetime) -> Option<Label> { pub fn resolve_label(&self, lifetime: &ast::Lifetime) -> Option<Label> {
@ -1112,7 +1137,7 @@ impl<'db> SemanticsImpl<'db> {
}) })
})?; })?;
let src = self.wrap_node_infile(label); let src = self.wrap_node_infile(label);
ToDef::to_def(self, src) ToDef::to_def(self, src.as_ref())
} }
pub fn resolve_type(&self, ty: &ast::Type) -> Option<Type> { pub fn resolve_type(&self, ty: &ast::Type) -> Option<Type> {
@ -1308,8 +1333,8 @@ impl<'db> SemanticsImpl<'db> {
pub fn resolve_attr_macro_call(&self, item: &ast::Item) -> Option<Macro> { pub fn resolve_attr_macro_call(&self, item: &ast::Item) -> Option<Macro> {
let item_in_file = self.wrap_node_infile(item.clone()); let item_in_file = self.wrap_node_infile(item.clone());
let id = self.with_ctx(|ctx| { let id = self.with_ctx(|ctx| {
let macro_call_id = ctx.item_to_macro_call(item_in_file)?; let macro_call_id = ctx.item_to_macro_call(item_in_file.as_ref())?;
macro_call_to_macro_id(ctx, self.db.upcast(), macro_call_id) macro_call_to_macro_id(ctx, macro_call_id)
})?; })?;
Some(Macro { id }) Some(Macro { id })
} }
@ -1339,13 +1364,13 @@ impl<'db> SemanticsImpl<'db> {
} }
fn with_ctx<F: FnOnce(&mut SourceToDefCtx<'_, '_>) -> T, T>(&self, f: F) -> T { fn with_ctx<F: FnOnce(&mut SourceToDefCtx<'_, '_>) -> T, T>(&self, f: F) -> T {
let mut cache = self.s2d_cache.borrow_mut(); let (dynmap_cache, expansion_info_cache) = &mut *self.s2d_cache.borrow_mut();
let mut ctx = SourceToDefCtx { db: self.db, dynmap_cache: &mut cache }; let mut ctx = SourceToDefCtx { db: self.db, dynmap_cache, expansion_info_cache };
f(&mut ctx) f(&mut ctx)
} }
pub fn to_def<T: ToDef>(&self, src: &T) -> Option<T::Def> { pub fn to_def<T: ToDef>(&self, src: &T) -> Option<T::Def> {
let src = self.find_file(src.syntax()).with_value(src).cloned(); let src = self.find_file(src.syntax()).with_value(src);
T::to_def(self, src) T::to_def(self, src)
} }
@ -1613,27 +1638,57 @@ impl<'db> SemanticsImpl<'db> {
fn macro_call_to_macro_id( fn macro_call_to_macro_id(
ctx: &mut SourceToDefCtx<'_, '_>, ctx: &mut SourceToDefCtx<'_, '_>,
db: &dyn ExpandDatabase,
macro_call_id: MacroCallId, macro_call_id: MacroCallId,
) -> Option<MacroId> { ) -> Option<MacroId> {
use span::HirFileIdRepr;
let db: &dyn ExpandDatabase = ctx.db.upcast();
let loc = db.lookup_intern_macro_call(macro_call_id); let loc = db.lookup_intern_macro_call(macro_call_id);
match loc.def.ast_id() { match loc.def.ast_id() {
Either::Left(it) => ctx.macro_to_def(InFile::new(it.file_id, it.to_node(db))), Either::Left(it) => {
Either::Right(it) => ctx.proc_macro_to_def(InFile::new(it.file_id, it.to_node(db))), let node = match it.file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
it.to_ptr(db).to_node(&db.parse(file_id).syntax_node())
}
HirFileIdRepr::MacroFile(macro_file) => {
let expansion_info = ctx
.expansion_info_cache
.entry(macro_file)
.or_insert_with(|| macro_file.expansion_info(ctx.db.upcast()));
it.to_ptr(db).to_node(&expansion_info.expanded().value)
}
};
ctx.macro_to_def(InFile::new(it.file_id, &node))
}
Either::Right(it) => {
let node = match it.file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
it.to_ptr(db).to_node(&db.parse(file_id).syntax_node())
}
HirFileIdRepr::MacroFile(macro_file) => {
let expansion_info = ctx
.expansion_info_cache
.entry(macro_file)
.or_insert_with(|| macro_file.expansion_info(ctx.db.upcast()));
it.to_ptr(db).to_node(&expansion_info.expanded().value)
}
};
ctx.proc_macro_to_def(InFile::new(it.file_id, &node))
}
} }
} }
pub trait ToDef: AstNode + Clone { pub trait ToDef: AstNode + Clone {
type Def; type Def;
fn to_def(sema: &SemanticsImpl<'_>, src: InFile<&Self>) -> Option<Self::Def>;
fn to_def(sema: &SemanticsImpl<'_>, src: InFile<Self>) -> Option<Self::Def>;
} }
macro_rules! to_def_impls { macro_rules! to_def_impls {
($(($def:path, $ast:path, $meth:ident)),* ,) => {$( ($(($def:path, $ast:path, $meth:ident)),* ,) => {$(
impl ToDef for $ast { impl ToDef for $ast {
type Def = $def; type Def = $def;
fn to_def(sema: &SemanticsImpl<'_>, src: InFile<Self>) -> Option<Self::Def> { fn to_def(sema: &SemanticsImpl<'_>, src: InFile<&Self>) -> Option<Self::Def> {
sema.with_ctx(|ctx| ctx.$meth(src)).map(<$def>::from) sema.with_ctx(|ctx| ctx.$meth(src)).map(<$def>::from)
} }
} }
@ -1666,6 +1721,7 @@ to_def_impls![
(crate::Label, ast::Label, label_to_def), (crate::Label, ast::Label, label_to_def),
(crate::Adt, ast::Adt, adt_to_def), (crate::Adt, ast::Adt, adt_to_def),
(crate::ExternCrateDecl, ast::ExternCrate, extern_crate_to_def), (crate::ExternCrateDecl, ast::ExternCrate, extern_crate_to_def),
(MacroCallId, ast::MacroCall, macro_call_to_macro_call),
]; ];
fn find_root(node: &SyntaxNode) -> SyntaxNode { fn find_root(node: &SyntaxNode) -> SyntaxNode {

View file

@ -98,22 +98,26 @@ use hir_def::{
FieldId, FunctionId, GenericDefId, GenericParamId, ImplId, LifetimeParamId, MacroId, ModuleId, FieldId, FunctionId, GenericDefId, GenericParamId, ImplId, LifetimeParamId, MacroId, ModuleId,
StaticId, StructId, TraitAliasId, TraitId, TypeAliasId, TypeParamId, UnionId, UseId, VariantId, StaticId, StructId, TraitAliasId, TraitId, TypeAliasId, TypeParamId, UnionId, UseId, VariantId,
}; };
use hir_expand::{attrs::AttrId, name::AsName, HirFileId, HirFileIdExt, MacroCallId}; use hir_expand::{
attrs::AttrId, name::AsName, ExpansionInfo, HirFileId, HirFileIdExt, MacroCallId,
};
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use smallvec::SmallVec; use smallvec::SmallVec;
use span::MacroFileId;
use stdx::impl_from; use stdx::impl_from;
use syntax::{ use syntax::{
ast::{self, HasName}, ast::{self, HasName},
AstNode, SyntaxNode, AstNode, AstPtr, SyntaxNode,
}; };
use crate::{db::HirDatabase, InFile}; use crate::{db::HirDatabase, InFile};
pub(super) type SourceToDefCache = FxHashMap<(ChildContainer, HirFileId), DynMap>; pub(super) type SourceToDefCache = FxHashMap<(ChildContainer, HirFileId), DynMap>;
pub(super) struct SourceToDefCtx<'a, 'b> { pub(super) struct SourceToDefCtx<'a, 'dyn_cache> {
pub(super) db: &'b dyn HirDatabase, pub(super) db: &'a dyn HirDatabase,
pub(super) dynmap_cache: &'a mut SourceToDefCache, pub(super) dynmap_cache: &'dyn_cache mut SourceToDefCache,
pub(super) expansion_info_cache: &'a mut FxHashMap<MacroFileId, ExpansionInfo>,
} }
impl SourceToDefCtx<'_, '_> { impl SourceToDefCtx<'_, '_> {
@ -135,19 +139,21 @@ impl SourceToDefCtx<'_, '_> {
mods mods
} }
pub(super) fn module_to_def(&mut self, src: InFile<ast::Module>) -> Option<ModuleId> { pub(super) fn module_to_def(&mut self, src: InFile<&ast::Module>) -> Option<ModuleId> {
let _p = tracing::span!(tracing::Level::INFO, "module_to_def").entered(); let _p = tracing::span!(tracing::Level::INFO, "module_to_def").entered();
let parent_declaration = src let parent_declaration = self
.syntax() .ancestors_with_macros(src.syntax_ref(), |_, ancestor| {
.ancestors_with_macros(self.db.upcast()) ancestor.map(Either::<ast::Module, ast::BlockExpr>::cast).transpose()
.find_map(|it| it.map(Either::<ast::Module, ast::BlockExpr>::cast).transpose()) })
.map(|it| it.transpose()); .map(|it| it.transpose());
let parent_module = match parent_declaration { let parent_module = match parent_declaration {
Some(Either::Right(parent_block)) => self Some(Either::Right(parent_block)) => self
.block_to_def(parent_block) .block_to_def(parent_block.as_ref())
.map(|block| self.db.block_def_map(block).root_module_id()), .map(|block| self.db.block_def_map(block).root_module_id()),
Some(Either::Left(parent_declaration)) => self.module_to_def(parent_declaration), Some(Either::Left(parent_declaration)) => {
self.module_to_def(parent_declaration.as_ref())
}
None => { None => {
let file_id = src.file_id.original_file(self.db.upcast()); let file_id = src.file_id.original_file(self.db.upcast());
self.file_to_def(file_id).first().copied() self.file_to_def(file_id).first().copied()
@ -160,73 +166,79 @@ impl SourceToDefCtx<'_, '_> {
Some(def_map.module_id(child_id)) Some(def_map.module_id(child_id))
} }
pub(super) fn source_file_to_def(&self, src: InFile<ast::SourceFile>) -> Option<ModuleId> { pub(super) fn source_file_to_def(&self, src: InFile<&ast::SourceFile>) -> Option<ModuleId> {
let _p = tracing::span!(tracing::Level::INFO, "source_file_to_def").entered(); let _p = tracing::span!(tracing::Level::INFO, "source_file_to_def").entered();
let file_id = src.file_id.original_file(self.db.upcast()); let file_id = src.file_id.original_file(self.db.upcast());
self.file_to_def(file_id).first().copied() self.file_to_def(file_id).first().copied()
} }
pub(super) fn trait_to_def(&mut self, src: InFile<ast::Trait>) -> Option<TraitId> { pub(super) fn trait_to_def(&mut self, src: InFile<&ast::Trait>) -> Option<TraitId> {
self.to_def(src, keys::TRAIT) self.to_def(src, keys::TRAIT)
} }
pub(super) fn trait_alias_to_def( pub(super) fn trait_alias_to_def(
&mut self, &mut self,
src: InFile<ast::TraitAlias>, src: InFile<&ast::TraitAlias>,
) -> Option<TraitAliasId> { ) -> Option<TraitAliasId> {
self.to_def(src, keys::TRAIT_ALIAS) self.to_def(src, keys::TRAIT_ALIAS)
} }
pub(super) fn impl_to_def(&mut self, src: InFile<ast::Impl>) -> Option<ImplId> { pub(super) fn impl_to_def(&mut self, src: InFile<&ast::Impl>) -> Option<ImplId> {
self.to_def(src, keys::IMPL) self.to_def(src, keys::IMPL)
} }
pub(super) fn fn_to_def(&mut self, src: InFile<ast::Fn>) -> Option<FunctionId> { pub(super) fn fn_to_def(&mut self, src: InFile<&ast::Fn>) -> Option<FunctionId> {
self.to_def(src, keys::FUNCTION) self.to_def(src, keys::FUNCTION)
} }
pub(super) fn struct_to_def(&mut self, src: InFile<ast::Struct>) -> Option<StructId> { pub(super) fn struct_to_def(&mut self, src: InFile<&ast::Struct>) -> Option<StructId> {
self.to_def(src, keys::STRUCT) self.to_def(src, keys::STRUCT)
} }
pub(super) fn enum_to_def(&mut self, src: InFile<ast::Enum>) -> Option<EnumId> { pub(super) fn enum_to_def(&mut self, src: InFile<&ast::Enum>) -> Option<EnumId> {
self.to_def(src, keys::ENUM) self.to_def(src, keys::ENUM)
} }
pub(super) fn union_to_def(&mut self, src: InFile<ast::Union>) -> Option<UnionId> { pub(super) fn union_to_def(&mut self, src: InFile<&ast::Union>) -> Option<UnionId> {
self.to_def(src, keys::UNION) self.to_def(src, keys::UNION)
} }
pub(super) fn static_to_def(&mut self, src: InFile<ast::Static>) -> Option<StaticId> { pub(super) fn static_to_def(&mut self, src: InFile<&ast::Static>) -> Option<StaticId> {
self.to_def(src, keys::STATIC) self.to_def(src, keys::STATIC)
} }
pub(super) fn const_to_def(&mut self, src: InFile<ast::Const>) -> Option<ConstId> { pub(super) fn const_to_def(&mut self, src: InFile<&ast::Const>) -> Option<ConstId> {
self.to_def(src, keys::CONST) self.to_def(src, keys::CONST)
} }
pub(super) fn type_alias_to_def(&mut self, src: InFile<ast::TypeAlias>) -> Option<TypeAliasId> { pub(super) fn type_alias_to_def(
&mut self,
src: InFile<&ast::TypeAlias>,
) -> Option<TypeAliasId> {
self.to_def(src, keys::TYPE_ALIAS) self.to_def(src, keys::TYPE_ALIAS)
} }
pub(super) fn record_field_to_def(&mut self, src: InFile<ast::RecordField>) -> Option<FieldId> { pub(super) fn record_field_to_def(
&mut self,
src: InFile<&ast::RecordField>,
) -> Option<FieldId> {
self.to_def(src, keys::RECORD_FIELD) self.to_def(src, keys::RECORD_FIELD)
} }
pub(super) fn tuple_field_to_def(&mut self, src: InFile<ast::TupleField>) -> Option<FieldId> { pub(super) fn tuple_field_to_def(&mut self, src: InFile<&ast::TupleField>) -> Option<FieldId> {
self.to_def(src, keys::TUPLE_FIELD) self.to_def(src, keys::TUPLE_FIELD)
} }
pub(super) fn block_to_def(&mut self, src: InFile<ast::BlockExpr>) -> Option<BlockId> { pub(super) fn block_to_def(&mut self, src: InFile<&ast::BlockExpr>) -> Option<BlockId> {
self.to_def(src, keys::BLOCK) self.to_def(src, keys::BLOCK)
} }
pub(super) fn enum_variant_to_def( pub(super) fn enum_variant_to_def(
&mut self, &mut self,
src: InFile<ast::Variant>, src: InFile<&ast::Variant>,
) -> Option<EnumVariantId> { ) -> Option<EnumVariantId> {
self.to_def(src, keys::ENUM_VARIANT) self.to_def(src, keys::ENUM_VARIANT)
} }
pub(super) fn extern_crate_to_def( pub(super) fn extern_crate_to_def(
&mut self, &mut self,
src: InFile<ast::ExternCrate>, src: InFile<&ast::ExternCrate>,
) -> Option<ExternCrateId> { ) -> Option<ExternCrateId> {
self.to_def(src, keys::EXTERN_CRATE) self.to_def(src, keys::EXTERN_CRATE)
} }
#[allow(dead_code)] #[allow(dead_code)]
pub(super) fn use_to_def(&mut self, src: InFile<ast::Use>) -> Option<UseId> { pub(super) fn use_to_def(&mut self, src: InFile<&ast::Use>) -> Option<UseId> {
self.to_def(src, keys::USE) self.to_def(src, keys::USE)
} }
pub(super) fn adt_to_def( pub(super) fn adt_to_def(
&mut self, &mut self,
InFile { file_id, value }: InFile<ast::Adt>, InFile { file_id, value }: InFile<&ast::Adt>,
) -> Option<AdtId> { ) -> Option<AdtId> {
match value { match value {
ast::Adt::Enum(it) => self.enum_to_def(InFile::new(file_id, it)).map(AdtId::EnumId), ast::Adt::Enum(it) => self.enum_to_def(InFile::new(file_id, it)).map(AdtId::EnumId),
@ -238,11 +250,11 @@ impl SourceToDefCtx<'_, '_> {
} }
pub(super) fn bind_pat_to_def( pub(super) fn bind_pat_to_def(
&mut self, &mut self,
src: InFile<ast::IdentPat>, src: InFile<&ast::IdentPat>,
) -> Option<(DefWithBodyId, BindingId)> { ) -> Option<(DefWithBodyId, BindingId)> {
let container = self.find_pat_or_label_container(src.syntax())?; let container = self.find_pat_or_label_container(src.syntax_ref())?;
let (body, source_map) = self.db.body_with_source_map(container); let (body, source_map) = self.db.body_with_source_map(container);
let src = src.map(ast::Pat::from); let src = src.cloned().map(ast::Pat::from);
let pat_id = source_map.node_pat(src.as_ref())?; let pat_id = source_map.node_pat(src.as_ref())?;
// the pattern could resolve to a constant, verify that that is not the case // the pattern could resolve to a constant, verify that that is not the case
if let crate::Pat::Bind { id, .. } = body[pat_id] { if let crate::Pat::Bind { id, .. } = body[pat_id] {
@ -253,25 +265,33 @@ impl SourceToDefCtx<'_, '_> {
} }
pub(super) fn self_param_to_def( pub(super) fn self_param_to_def(
&mut self, &mut self,
src: InFile<ast::SelfParam>, src: InFile<&ast::SelfParam>,
) -> Option<(DefWithBodyId, BindingId)> { ) -> Option<(DefWithBodyId, BindingId)> {
let container = self.find_pat_or_label_container(src.syntax())?; let container = self.find_pat_or_label_container(src.syntax_ref())?;
let body = self.db.body(container); let body = self.db.body(container);
Some((container, body.self_param?)) Some((container, body.self_param?))
} }
pub(super) fn label_to_def( pub(super) fn label_to_def(
&mut self, &mut self,
src: InFile<ast::Label>, src: InFile<&ast::Label>,
) -> Option<(DefWithBodyId, LabelId)> { ) -> Option<(DefWithBodyId, LabelId)> {
let container = self.find_pat_or_label_container(src.syntax())?; let container = self.find_pat_or_label_container(src.syntax_ref())?;
let (_body, source_map) = self.db.body_with_source_map(container); let (_body, source_map) = self.db.body_with_source_map(container);
let label_id = source_map.node_label(src.as_ref())?; let label_id = source_map.node_label(src)?;
Some((container, label_id)) Some((container, label_id))
} }
pub(super) fn item_to_macro_call(&mut self, src: InFile<ast::Item>) -> Option<MacroCallId> { pub(super) fn item_to_macro_call(&mut self, src: InFile<&ast::Item>) -> Option<MacroCallId> {
let map = self.dyn_map(src.as_ref())?; let map = self.dyn_map(src)?;
map[keys::ATTR_MACRO_CALL].get(&src.value).copied() map[keys::ATTR_MACRO_CALL].get(&AstPtr::new(src.value)).copied()
}
pub(super) fn macro_call_to_macro_call(
&mut self,
src: InFile<&ast::MacroCall>,
) -> Option<MacroCallId> {
let map = self.dyn_map(src)?;
map[keys::MACRO_CALL].get(&AstPtr::new(src.value)).copied()
} }
/// (AttrId, derive attribute call id, derive call ids) /// (AttrId, derive attribute call id, derive call ids)
@ -282,7 +302,7 @@ impl SourceToDefCtx<'_, '_> {
) -> Option<(AttrId, MacroCallId, &[Option<MacroCallId>])> { ) -> Option<(AttrId, MacroCallId, &[Option<MacroCallId>])> {
let map = self.dyn_map(item)?; let map = self.dyn_map(item)?;
map[keys::DERIVE_MACRO_CALL] map[keys::DERIVE_MACRO_CALL]
.get(&src.value) .get(&AstPtr::new(&src.value))
.map(|&(attr_id, call_id, ref ids)| (attr_id, call_id, &**ids)) .map(|&(attr_id, call_id, ref ids)| (attr_id, call_id, &**ids))
} }
@ -292,10 +312,10 @@ impl SourceToDefCtx<'_, '_> {
fn to_def<Ast: AstNode + 'static, ID: Copy + 'static>( fn to_def<Ast: AstNode + 'static, ID: Copy + 'static>(
&mut self, &mut self,
src: InFile<Ast>, src: InFile<&Ast>,
key: Key<Ast, ID>, key: Key<Ast, ID>,
) -> Option<ID> { ) -> Option<ID> {
self.dyn_map(src.as_ref())?[key].get(&src.value).copied() self.dyn_map(src)?[key].get(&AstPtr::new(src.value)).copied()
} }
fn dyn_map<Ast: AstNode + 'static>(&mut self, src: InFile<&Ast>) -> Option<&DynMap> { fn dyn_map<Ast: AstNode + 'static>(&mut self, src: InFile<&Ast>) -> Option<&DynMap> {
@ -310,33 +330,42 @@ impl SourceToDefCtx<'_, '_> {
.or_insert_with(|| container.child_by_source(db, file_id)) .or_insert_with(|| container.child_by_source(db, file_id))
} }
pub(super) fn type_param_to_def(&mut self, src: InFile<ast::TypeParam>) -> Option<TypeParamId> { pub(super) fn type_param_to_def(
let container: ChildContainer = self.find_generic_param_container(src.syntax())?.into(); &mut self,
src: InFile<&ast::TypeParam>,
) -> Option<TypeParamId> {
let container: ChildContainer = self.find_generic_param_container(src.syntax_ref())?.into();
let dyn_map = self.cache_for(container, src.file_id); let dyn_map = self.cache_for(container, src.file_id);
dyn_map[keys::TYPE_PARAM].get(&src.value).copied().map(TypeParamId::from_unchecked) dyn_map[keys::TYPE_PARAM]
.get(&AstPtr::new(src.value))
.copied()
.map(TypeParamId::from_unchecked)
} }
pub(super) fn lifetime_param_to_def( pub(super) fn lifetime_param_to_def(
&mut self, &mut self,
src: InFile<ast::LifetimeParam>, src: InFile<&ast::LifetimeParam>,
) -> Option<LifetimeParamId> { ) -> Option<LifetimeParamId> {
let container: ChildContainer = self.find_generic_param_container(src.syntax())?.into(); let container: ChildContainer = self.find_generic_param_container(src.syntax_ref())?.into();
let dyn_map = self.cache_for(container, src.file_id); let dyn_map = self.cache_for(container, src.file_id);
dyn_map[keys::LIFETIME_PARAM].get(&src.value).copied() dyn_map[keys::LIFETIME_PARAM].get(&AstPtr::new(src.value)).copied()
} }
pub(super) fn const_param_to_def( pub(super) fn const_param_to_def(
&mut self, &mut self,
src: InFile<ast::ConstParam>, src: InFile<&ast::ConstParam>,
) -> Option<ConstParamId> { ) -> Option<ConstParamId> {
let container: ChildContainer = self.find_generic_param_container(src.syntax())?.into(); let container: ChildContainer = self.find_generic_param_container(src.syntax_ref())?.into();
let dyn_map = self.cache_for(container, src.file_id); let dyn_map = self.cache_for(container, src.file_id);
dyn_map[keys::CONST_PARAM].get(&src.value).copied().map(ConstParamId::from_unchecked) dyn_map[keys::CONST_PARAM]
.get(&AstPtr::new(src.value))
.copied()
.map(ConstParamId::from_unchecked)
} }
pub(super) fn generic_param_to_def( pub(super) fn generic_param_to_def(
&mut self, &mut self,
InFile { file_id, value }: InFile<ast::GenericParam>, InFile { file_id, value }: InFile<&ast::GenericParam>,
) -> Option<GenericParamId> { ) -> Option<GenericParamId> {
match value { match value {
ast::GenericParam::ConstParam(it) => { ast::GenericParam::ConstParam(it) => {
@ -351,34 +380,111 @@ impl SourceToDefCtx<'_, '_> {
} }
} }
pub(super) fn macro_to_def(&mut self, src: InFile<ast::Macro>) -> Option<MacroId> { pub(super) fn macro_to_def(&mut self, src: InFile<&ast::Macro>) -> Option<MacroId> {
self.dyn_map(src.as_ref()).and_then(|it| match &src.value { self.dyn_map(src).and_then(|it| match src.value {
ast::Macro::MacroRules(value) => { ast::Macro::MacroRules(value) => {
it[keys::MACRO_RULES].get(value).copied().map(MacroId::from) it[keys::MACRO_RULES].get(&AstPtr::new(value)).copied().map(MacroId::from)
}
ast::Macro::MacroDef(value) => {
it[keys::MACRO2].get(&AstPtr::new(value)).copied().map(MacroId::from)
} }
ast::Macro::MacroDef(value) => it[keys::MACRO2].get(value).copied().map(MacroId::from),
}) })
} }
pub(super) fn proc_macro_to_def(&mut self, src: InFile<ast::Fn>) -> Option<MacroId> { pub(super) fn proc_macro_to_def(&mut self, src: InFile<&ast::Fn>) -> Option<MacroId> {
self.dyn_map(src.as_ref()) self.dyn_map(src).and_then(|it| {
.and_then(|it| it[keys::PROC_MACRO].get(&src.value).copied().map(MacroId::from)) it[keys::PROC_MACRO].get(&AstPtr::new(src.value)).copied().map(MacroId::from)
})
} }
pub(super) fn find_container(&mut self, src: InFile<&SyntaxNode>) -> Option<ChildContainer> { pub(super) fn find_container(&mut self, src: InFile<&SyntaxNode>) -> Option<ChildContainer> {
for container in src.ancestors_with_macros(self.db.upcast()) { let def =
if let Some(res) = self.container_to_def(container) { self.ancestors_with_macros(src, |this, container| this.container_to_def(container));
return Some(res); if let Some(def) = def {
} return Some(def);
} }
let def = self.file_to_def(src.file_id.original_file(self.db.upcast())).first().copied()?; let def = self.file_to_def(src.file_id.original_file(self.db.upcast())).first().copied()?;
Some(def.into()) Some(def.into())
} }
/// Skips the attributed item that caused the macro invocation we are climbing up
fn ancestors_with_macros<T>(
&mut self,
node: InFile<&SyntaxNode>,
mut cb: impl FnMut(&mut Self, InFile<SyntaxNode>) -> Option<T>,
) -> Option<T> {
use hir_expand::MacroFileIdExt;
let parent = |this: &mut Self, node: InFile<&SyntaxNode>| match node.value.parent() {
Some(parent) => Some(node.with_value(parent)),
None => {
let macro_file = node.file_id.macro_file()?;
let expansion_info = this
.expansion_info_cache
.entry(macro_file)
.or_insert_with(|| macro_file.expansion_info(this.db.upcast()));
expansion_info.call_node().map(|node| node?.parent()).transpose()
}
};
let mut node = node.cloned();
while let Some(parent) = parent(self, node.as_ref()) {
if let Some(res) = cb(self, parent.clone()) {
return Some(res);
}
node = parent;
}
None
}
fn find_generic_param_container(&mut self, src: InFile<&SyntaxNode>) -> Option<GenericDefId> {
self.ancestors_with_macros(src, |this, InFile { file_id, value }| {
let item = ast::Item::cast(value)?;
match &item {
ast::Item::Fn(it) => this.fn_to_def(InFile::new(file_id, it)).map(Into::into),
ast::Item::Struct(it) => {
this.struct_to_def(InFile::new(file_id, it)).map(Into::into)
}
ast::Item::Enum(it) => this.enum_to_def(InFile::new(file_id, it)).map(Into::into),
ast::Item::Trait(it) => this.trait_to_def(InFile::new(file_id, it)).map(Into::into),
ast::Item::TraitAlias(it) => {
this.trait_alias_to_def(InFile::new(file_id, it)).map(Into::into)
}
ast::Item::TypeAlias(it) => {
this.type_alias_to_def(InFile::new(file_id, it)).map(Into::into)
}
ast::Item::Impl(it) => this.impl_to_def(InFile::new(file_id, it)).map(Into::into),
_ => None,
}
})
}
fn find_pat_or_label_container(&mut self, src: InFile<&SyntaxNode>) -> Option<DefWithBodyId> {
self.ancestors_with_macros(src, |this, InFile { file_id, value }| {
let item = match ast::Item::cast(value.clone()) {
Some(it) => it,
None => {
let variant = ast::Variant::cast(value.clone())?;
return this
.enum_variant_to_def(InFile::new(file_id, &variant))
.map(Into::into);
}
};
match &item {
ast::Item::Fn(it) => this.fn_to_def(InFile::new(file_id, it)).map(Into::into),
ast::Item::Const(it) => this.const_to_def(InFile::new(file_id, it)).map(Into::into),
ast::Item::Static(it) => {
this.static_to_def(InFile::new(file_id, it)).map(Into::into)
}
_ => None,
}
})
}
fn container_to_def(&mut self, container: InFile<SyntaxNode>) -> Option<ChildContainer> { fn container_to_def(&mut self, container: InFile<SyntaxNode>) -> Option<ChildContainer> {
let cont = if let Some(item) = ast::Item::cast(container.value.clone()) { let cont = if let Some(item) = ast::Item::cast(container.value.clone()) {
match item { match &item {
ast::Item::Module(it) => self.module_to_def(container.with_value(it))?.into(), ast::Item::Module(it) => self.module_to_def(container.with_value(it))?.into(),
ast::Item::Trait(it) => self.trait_to_def(container.with_value(it))?.into(), ast::Item::Trait(it) => self.trait_to_def(container.with_value(it))?.into(),
ast::Item::TraitAlias(it) => { ast::Item::TraitAlias(it) => {
@ -413,63 +519,11 @@ impl SourceToDefCtx<'_, '_> {
} }
} else { } else {
let it = ast::Variant::cast(container.value)?; let it = ast::Variant::cast(container.value)?;
let def = self.enum_variant_to_def(InFile::new(container.file_id, it))?; let def = self.enum_variant_to_def(InFile::new(container.file_id, &it))?;
DefWithBodyId::from(def).into() DefWithBodyId::from(def).into()
}; };
Some(cont) Some(cont)
} }
fn find_generic_param_container(&mut self, src: InFile<&SyntaxNode>) -> Option<GenericDefId> {
let ancestors = src.ancestors_with_macros(self.db.upcast());
for InFile { file_id, value } in ancestors {
let item = match ast::Item::cast(value) {
Some(it) => it,
None => continue,
};
let res: GenericDefId = match item {
ast::Item::Fn(it) => self.fn_to_def(InFile::new(file_id, it))?.into(),
ast::Item::Struct(it) => self.struct_to_def(InFile::new(file_id, it))?.into(),
ast::Item::Union(it) => self.union_to_def(InFile::new(file_id, it))?.into(),
ast::Item::Enum(it) => self.enum_to_def(InFile::new(file_id, it))?.into(),
ast::Item::Trait(it) => self.trait_to_def(InFile::new(file_id, it))?.into(),
ast::Item::TraitAlias(it) => {
self.trait_alias_to_def(InFile::new(file_id, it))?.into()
}
ast::Item::TypeAlias(it) => {
self.type_alias_to_def(InFile::new(file_id, it))?.into()
}
ast::Item::Impl(it) => self.impl_to_def(InFile::new(file_id, it))?.into(),
_ => continue,
};
return Some(res);
}
None
}
fn find_pat_or_label_container(&mut self, src: InFile<&SyntaxNode>) -> Option<DefWithBodyId> {
let ancestors = src.ancestors_with_macros(self.db.upcast());
for InFile { file_id, value } in ancestors {
let item = match ast::Item::cast(value.clone()) {
Some(it) => it,
None => {
if let Some(variant) = ast::Variant::cast(value.clone()) {
return self
.enum_variant_to_def(InFile::new(file_id, variant))
.map(Into::into);
}
continue;
}
};
let res: DefWithBodyId = match item {
ast::Item::Const(it) => self.const_to_def(InFile::new(file_id, it))?.into(),
ast::Item::Static(it) => self.static_to_def(InFile::new(file_id, it))?.into(),
ast::Item::Fn(it) => self.fn_to_def(InFile::new(file_id, it))?.into(),
_ => continue,
};
return Some(res);
}
None
}
} }
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
@ -501,6 +555,7 @@ impl_from! {
impl ChildContainer { impl ChildContainer {
fn child_by_source(self, db: &dyn HirDatabase, file_id: HirFileId) -> DynMap { fn child_by_source(self, db: &dyn HirDatabase, file_id: HirFileId) -> DynMap {
let _p = tracing::span!(tracing::Level::INFO, "ChildContainer::child_by_source").entered();
let db = db.upcast(); let db = db.upcast();
match self { match self {
ChildContainer::DefWithBodyId(it) => it.child_by_source(db, file_id), ChildContainer::DefWithBodyId(it) => it.child_by_source(db, file_id),

View file

@ -24,11 +24,12 @@ use hir_def::{
LocalFieldId, Lookup, ModuleDefId, TraitId, VariantId, LocalFieldId, Lookup, ModuleDefId, TraitId, VariantId,
}; };
use hir_expand::{ use hir_expand::{
builtin_fn_macro::BuiltinFnLikeExpander,
mod_path::path, mod_path::path,
name,
name::{AsName, Name},
HirFileId, InFile, InMacroFile, MacroFileId, MacroFileIdExt, HirFileId, InFile, InMacroFile, MacroFileId, MacroFileIdExt,
{
name,
name::{AsName, Name},
},
}; };
use hir_ty::{ use hir_ty::{
diagnostics::{ diagnostics::{
@ -822,6 +823,8 @@ impl SourceAnalyzer {
macro_call: InFile<&ast::MacroCall>, macro_call: InFile<&ast::MacroCall>,
) -> Option<MacroFileId> { ) -> Option<MacroFileId> {
let krate = self.resolver.krate(); let krate = self.resolver.krate();
// FIXME: This causes us to parse, generally this is the wrong approach for resolving a
// macro call to a macro call id!
let macro_call_id = macro_call.as_call_id(db.upcast(), krate, |path| { let macro_call_id = macro_call.as_call_id(db.upcast(), krate, |path| {
self.resolver.resolve_path_as_macro_def(db.upcast(), &path, Some(MacroSubNs::Bang)) self.resolver.resolve_path_as_macro_def(db.upcast(), &path, Some(MacroSubNs::Bang))
})?; })?;
@ -851,13 +854,8 @@ impl SourceAnalyzer {
hir_def::MacroId::MacroRulesId(it) => it.lookup(db.upcast()).expander, hir_def::MacroId::MacroRulesId(it) => it.lookup(db.upcast()).expander,
_ => hir_def::MacroExpander::Declarative, _ => hir_def::MacroExpander::Declarative,
}; };
match ex { if matches!(ex, hir_def::MacroExpander::BuiltIn(ex) if ex.is_asm()) {
hir_def::MacroExpander::BuiltIn(e) return true;
if e == BuiltinFnLikeExpander::Asm || e == BuiltinFnLikeExpander::GlobalAsm =>
{
return true
}
_ => (),
} }
} }
let macro_expr = match macro_call let macro_expr = match macro_call

View file

@ -74,7 +74,7 @@ fn integrated_highlighting_benchmark() {
host.apply_change(change); host.apply_change(change);
} }
let _g = crate::tracing::hprof::init("*>20"); let _g = crate::tracing::hprof::init("*>10");
{ {
let _it = stdx::timeit("after change"); let _it = stdx::timeit("after change");