diff --git a/crates/hir-def/src/body.rs b/crates/hir-def/src/body.rs index d2f4d7b7e5..a796ef33c6 100644 --- a/crates/hir-def/src/body.rs +++ b/crates/hir-def/src/body.rs @@ -395,6 +395,12 @@ impl BodySourceMap { self.expr_map.get(&src).copied() } + pub fn expansions( + &self, + ) -> impl Iterator>, &MacroFileId)> { + self.expansions.iter() + } + pub fn implicit_format_args( &self, node: InFile<&ast::FormatArgsExpr>, diff --git a/crates/hir-def/src/body/lower.rs b/crates/hir-def/src/body/lower.rs index c6d9ba6cfe..5a5a8d302b 100644 --- a/crates/hir-def/src/body/lower.rs +++ b/crates/hir-def/src/body/lower.rs @@ -12,6 +12,7 @@ use intern::Interned; use rustc_hash::FxHashMap; use smallvec::SmallVec; use span::AstIdMap; +use stdx::never; use syntax::{ ast::{ self, ArrayExprKind, AstChildren, BlockExpr, HasArgList, HasAttrs, HasLoopBody, HasName, @@ -480,7 +481,8 @@ impl ExprCollector<'_> { } else if e.const_token().is_some() { Mutability::Shared } else { - unreachable!("parser only remaps to raw_token() if matching mutability token follows") + never!("parser only remaps to raw_token() if matching mutability token follows"); + Mutability::Shared } } else { Mutability::from_mutable(e.mut_token().is_some()) @@ -1006,9 +1008,9 @@ impl ExprCollector<'_> { Some((mark, expansion)) => { // Keep collecting even with expansion errors so we can provide completions and // other services in incomplete macro expressions. - self.source_map - .expansions - .insert(macro_call_ptr, self.expander.current_file_id().macro_file().unwrap()); + if let Some(macro_file) = self.expander.current_file_id().macro_file() { + self.source_map.expansions.insert(macro_call_ptr, macro_file); + } let prev_ast_id_map = mem::replace( &mut self.ast_id_map, self.db.ast_id_map(self.expander.current_file_id()), diff --git a/crates/hir-def/src/child_by_source.rs b/crates/hir-def/src/child_by_source.rs index 0b41984bdd..106109eb18 100644 --- a/crates/hir-def/src/child_by_source.rs +++ b/crates/hir-def/src/child_by_source.rs @@ -6,7 +6,7 @@ use either::Either; use hir_expand::{attrs::collect_attrs, HirFileId}; -use syntax::ast; +use syntax::{ast, AstPtr}; use crate::{ db::DefDatabase, @@ -38,7 +38,7 @@ impl ChildBySource for TraitId { data.attribute_calls().filter(|(ast_id, _)| ast_id.file_id == file_id).for_each( |(ast_id, call_id)| { - res[keys::ATTR_MACRO_CALL].insert(ast_id.to_node(db.upcast()), call_id); + res[keys::ATTR_MACRO_CALL].insert(ast_id.to_ptr(db.upcast()), call_id); }, ); data.items.iter().for_each(|&(_, item)| { @@ -50,9 +50,10 @@ impl ChildBySource for TraitId { impl ChildBySource for ImplId { fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) { let data = db.impl_data(*self); + // FIXME: Macro calls data.attribute_calls().filter(|(ast_id, _)| ast_id.file_id == file_id).for_each( |(ast_id, call_id)| { - res[keys::ATTR_MACRO_CALL].insert(ast_id.to_node(db.upcast()), call_id); + res[keys::ATTR_MACRO_CALL].insert(ast_id.to_ptr(db.upcast()), call_id); }, ); data.items.iter().for_each(|&item| { @@ -80,7 +81,7 @@ impl ChildBySource for ItemScope { .for_each(|konst| insert_item_loc(db, res, file_id, konst, keys::CONST)); self.attr_macro_invocs().filter(|(id, _)| id.file_id == file_id).for_each( |(ast_id, call_id)| { - res[keys::ATTR_MACRO_CALL].insert(ast_id.to_node(db.upcast()), call_id); + res[keys::ATTR_MACRO_CALL].insert(ast_id.to_ptr(db.upcast()), call_id); }, ); self.legacy_macros().for_each(|(_, ids)| { @@ -88,7 +89,7 @@ impl ChildBySource for ItemScope { if let MacroId::MacroRulesId(id) = id { let loc = id.lookup(db); if loc.id.file_id() == file_id { - res[keys::MACRO_RULES].insert(loc.source(db).value, id); + res[keys::MACRO_RULES].insert(loc.ast_ptr(db).value, id); } } }) @@ -100,12 +101,18 @@ impl ChildBySource for ItemScope { if let Some((_, Either::Left(attr))) = collect_attrs(&adt).nth(attr_id.ast_index()) { - res[keys::DERIVE_MACRO_CALL].insert(attr, (attr_id, call_id, calls.into())); + res[keys::DERIVE_MACRO_CALL] + .insert(AstPtr::new(&attr), (attr_id, call_id, calls.into())); } }); }, ); - + self.iter_macro_invoc().filter(|(id, _)| id.file_id == file_id).for_each( + |(ast_id, &call)| { + let ast = ast_id.to_ptr(db.upcast()); + res[keys::MACRO_CALL].insert(ast, call); + }, + ); fn add_module_def( db: &dyn DefDatabase, map: &mut DynMap, @@ -155,8 +162,8 @@ impl ChildBySource for VariantId { for (local_id, source) in arena_map.value.iter() { let id = FieldId { parent, local_id }; match source.clone() { - Either::Left(source) => res[keys::TUPLE_FIELD].insert(source, id), - Either::Right(source) => res[keys::RECORD_FIELD].insert(source, id), + Either::Left(source) => res[keys::TUPLE_FIELD].insert(AstPtr::new(&source), id), + Either::Right(source) => res[keys::RECORD_FIELD].insert(AstPtr::new(&source), id), } } } @@ -171,29 +178,30 @@ impl ChildBySource for EnumId { let tree = loc.id.item_tree(db); let ast_id_map = db.ast_id_map(loc.id.file_id()); - let root = db.parse_or_expand(loc.id.file_id()); db.enum_data(*self).variants.iter().for_each(|&(variant, _)| { - res[keys::ENUM_VARIANT].insert( - ast_id_map.get(tree[variant.lookup(db).id.value].ast_id).to_node(&root), - variant, - ); + res[keys::ENUM_VARIANT] + .insert(ast_id_map.get(tree[variant.lookup(db).id.value].ast_id), variant); }); } } impl ChildBySource for DefWithBodyId { fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) { - let body = db.body(*self); + let (body, sm) = db.body_with_source_map(*self); if let &DefWithBodyId::VariantId(v) = self { VariantId::EnumVariantId(v).child_by_source_to(db, res, file_id) } + sm.expansions().filter(|(ast, _)| ast.file_id == file_id).for_each(|(ast, &exp_id)| { + res[keys::MACRO_CALL].insert(ast.value, exp_id.macro_call_id); + }); + for (block, def_map) in body.blocks(db) { // All block expressions are merged into the same map, because they logically all add // inner items to the containing `DefWithBodyId`. def_map[DefMap::ROOT].scope.child_by_source_to(db, res, file_id); - res[keys::BLOCK].insert(block.lookup(db).ast_id.to_node(db.upcast()), block); + res[keys::BLOCK].insert(block.lookup(db).ast_id.to_ptr(db.upcast()), block); } } } @@ -220,13 +228,17 @@ impl ChildBySource for GenericDefId { { let id = TypeOrConstParamId { parent: *self, local_id }; match ast_param { - ast::TypeOrConstParam::Type(a) => res[keys::TYPE_PARAM].insert(a, id), - ast::TypeOrConstParam::Const(a) => res[keys::CONST_PARAM].insert(a, id), + ast::TypeOrConstParam::Type(a) => { + res[keys::TYPE_PARAM].insert(AstPtr::new(&a), id) + } + ast::TypeOrConstParam::Const(a) => { + res[keys::CONST_PARAM].insert(AstPtr::new(&a), id) + } } } for (local_id, ast_param) in lts_idx_iter.zip(generic_params_list.lifetime_params()) { let id = LifetimeParamId { parent: *self, local_id }; - res[keys::LIFETIME_PARAM].insert(ast_param, id); + res[keys::LIFETIME_PARAM].insert(AstPtr::new(&ast_param), id); } } } @@ -246,7 +258,7 @@ fn insert_item_loc( { let loc = id.lookup(db); if loc.item_tree_id().file_id() == file_id { - res[key].insert(loc.source(db).value, id) + res[key].insert(loc.ast_ptr(db).value, id) } } diff --git a/crates/hir-def/src/dyn_map/keys.rs b/crates/hir-def/src/dyn_map/keys.rs index f83ab1e1a0..9d330a7bf1 100644 --- a/crates/hir-def/src/dyn_map/keys.rs +++ b/crates/hir-def/src/dyn_map/keys.rs @@ -13,7 +13,7 @@ use crate::{ TraitId, TypeAliasId, TypeOrConstParamId, UnionId, UseId, }; -pub type Key = crate::dyn_map::Key>; +pub type Key = crate::dyn_map::Key, V, AstPtrPolicy>; pub const BLOCK: Key = Key::new(); pub const FUNCTION: Key = Key::new(); @@ -39,6 +39,7 @@ pub const LIFETIME_PARAM: Key = Key::new(); pub const MACRO_RULES: Key = Key::new(); pub const MACRO2: Key = Key::new(); pub const PROC_MACRO: Key = Key::new(); +pub const MACRO_CALL: Key = Key::new(); pub const ATTR_MACRO_CALL: Key = Key::new(); pub const DERIVE_MACRO_CALL: Key]>)> = Key::new(); @@ -54,18 +55,16 @@ pub struct AstPtrPolicy { } impl Policy for AstPtrPolicy { - type K = AST; + type K = AstPtr; type V = ID; - fn insert(map: &mut DynMap, key: AST, value: ID) { - let key = AstPtr::new(&key); + fn insert(map: &mut DynMap, key: AstPtr, value: ID) { map.map .entry::, ID>>() .or_insert_with(Default::default) .insert(key, value); } - fn get<'a>(map: &'a DynMap, key: &AST) -> Option<&'a ID> { - let key = AstPtr::new(key); - map.map.get::, ID>>()?.get(&key) + fn get<'a>(map: &'a DynMap, key: &AstPtr) -> Option<&'a ID> { + map.map.get::, ID>>()?.get(key) } fn is_empty(map: &DynMap) -> bool { map.map.get::, ID>>().map_or(true, |it| it.is_empty()) diff --git a/crates/hir-expand/src/builtin_fn_macro.rs b/crates/hir-expand/src/builtin_fn_macro.rs index ba96ab6cc2..02fd431e4e 100644 --- a/crates/hir-expand/src/builtin_fn_macro.rs +++ b/crates/hir-expand/src/builtin_fn_macro.rs @@ -67,6 +67,10 @@ impl BuiltinFnLikeExpander { let span = span_with_def_site_ctxt(db, span, id); self.expander()(db, id, tt, span) } + + pub fn is_asm(&self) -> bool { + matches!(self, Self::Asm | Self::GlobalAsm) + } } impl EagerExpander { diff --git a/crates/hir-expand/src/files.rs b/crates/hir-expand/src/files.rs index 1ba85c5c7e..743fac50f4 100644 --- a/crates/hir-expand/src/files.rs +++ b/crates/hir-expand/src/files.rs @@ -1,6 +1,4 @@ //! Things to wrap other things in file ids. -use std::iter; - use either::Either; use span::{ AstIdNode, ErasedFileAstId, FileAstId, FileId, FileRange, HirFileId, HirFileIdRepr, @@ -150,27 +148,16 @@ impl InFileWrapper { } } +impl InFileWrapper { + // unfortunately `syntax` collides with the impl above, because `&_` is fundamental + pub fn syntax_ref(&self) -> InFileWrapper { + self.with_value(self.value.syntax()) + } +} + // region:specific impls impl InFile<&SyntaxNode> { - /// Traverse up macro calls and skips the macro invocation node - pub fn ancestors_with_macros( - self, - db: &dyn db::ExpandDatabase, - ) -> impl Iterator> + '_ { - let succ = move |node: &InFile| match node.value.parent() { - Some(parent) => Some(node.with_value(parent)), - None => db - .lookup_intern_macro_call(node.file_id.macro_file()?.macro_call_id) - .to_node_item(db) - .syntax() - .cloned() - .map(|node| node.parent()) - .transpose(), - }; - iter::successors(succ(&self.cloned()), succ) - } - /// Falls back to the macro call range if the node cannot be mapped up fully. /// /// For attributes and derives, this will point back to the attribute only. diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs index b34649d972..131625a96a 100644 --- a/crates/hir-expand/src/lib.rs +++ b/crates/hir-expand/src/lib.rs @@ -47,7 +47,7 @@ use crate::{ builtin_attr_macro::BuiltinAttrExpander, builtin_derive_macro::BuiltinDeriveExpander, builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander}, - db::{ExpandDatabase, TokenExpander}, + db::ExpandDatabase, mod_path::ModPath, proc_macro::{CustomProcMacroExpander, ProcMacroKind}, span_map::{ExpansionSpanMap, SpanMap}, @@ -253,9 +253,6 @@ pub trait HirFileIdExt { /// If this is a macro call, returns the syntax node of the very first macro call this file resides in. fn original_call_node(self, db: &dyn ExpandDatabase) -> Option>; - /// Return expansion information if it is a macro-expansion file - fn expansion_info(self, db: &dyn ExpandDatabase) -> Option; - fn as_builtin_derive_attr_node(&self, db: &dyn ExpandDatabase) -> Option>; } @@ -309,11 +306,6 @@ impl HirFileIdExt for HirFileId { } } - /// Return expansion information if it is a macro-expansion file - fn expansion_info(self, db: &dyn ExpandDatabase) -> Option { - Some(ExpansionInfo::new(db, self.macro_file()?)) - } - fn as_builtin_derive_attr_node(&self, db: &dyn ExpandDatabase) -> Option> { let macro_file = self.macro_file()?; let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); @@ -417,8 +409,10 @@ impl MacroFileIdExt for MacroFileId { } fn is_attr_macro(&self, db: &dyn ExpandDatabase) -> bool { - let loc = db.lookup_intern_macro_call(self.macro_call_id); - matches!(loc.kind, MacroCallKind::Attr { .. }) + matches!( + db.lookup_intern_macro_call(self.macro_call_id).def.kind, + MacroDefKind::BuiltInAttr(..) | MacroDefKind::ProcMacro(_, ProcMacroKind::Attr, _) + ) } fn is_derive_attr_pseudo_expansion(&self, db: &dyn ExpandDatabase) -> bool { @@ -703,16 +697,12 @@ impl MacroCallKind { // simpler function calls if the map is only used once #[derive(Clone, Debug, PartialEq, Eq)] pub struct ExpansionInfo { - pub expanded: InMacroFile, + expanded: InMacroFile, /// The argument TokenTree or item for attributes arg: InFile>, - /// The `macro_rules!` or attribute input. - attr_input_or_mac_def: Option>, - - macro_def: TokenExpander, - macro_arg: Arc, - pub exp_map: Arc, + exp_map: Arc, arg_map: SpanMap, + loc: MacroCallLoc, } impl ExpansionInfo { @@ -720,14 +710,21 @@ impl ExpansionInfo { self.expanded.clone() } - pub fn call_node(&self) -> Option> { - Some(self.arg.with_value(self.arg.value.as_ref()?.parent()?)) + pub fn call_node(&self) -> InFile> { + self.arg.with_value(self.arg.value.as_ref().and_then(SyntaxNode::parent)) } pub fn call_file(&self) -> HirFileId { self.arg.file_id } + pub fn is_attr(&self) -> bool { + matches!( + self.loc.def.kind, + MacroDefKind::BuiltInAttr(..) | MacroDefKind::ProcMacro(_, ProcMacroKind::Attr, _) + ) + } + /// Maps the passed in file range down into a macro expansion if it is the input to a macro call. /// /// Note this does a linear search through the entire backing vector of the spanmap. @@ -812,49 +809,15 @@ impl ExpansionInfo { } pub fn new(db: &dyn ExpandDatabase, macro_file: MacroFileId) -> ExpansionInfo { - let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); + let loc = db.lookup_intern_macro_call(macro_file.macro_call_id); let arg_tt = loc.kind.arg(db); let arg_map = db.span_map(arg_tt.file_id); - let macro_def = db.macro_expander(loc.def); let (parse, exp_map) = db.parse_macro_expansion(macro_file).value; let expanded = InMacroFile { file_id: macro_file, value: parse.syntax_node() }; - let (macro_arg, _, _) = - db.macro_arg_considering_derives(macro_file.macro_call_id, &loc.kind); - - let def = loc.def.ast_id().left().and_then(|id| { - let def_tt = match id.to_node(db) { - ast::Macro::MacroRules(mac) => mac.token_tree()?, - ast::Macro::MacroDef(_) if matches!(macro_def, TokenExpander::BuiltInAttr(_)) => { - return None - } - ast::Macro::MacroDef(mac) => mac.body()?, - }; - Some(InFile::new(id.file_id, def_tt)) - }); - let attr_input_or_mac_def = def.or_else(|| match loc.kind { - MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => { - // FIXME: handle `cfg_attr` - let tt = collect_attrs(&ast_id.to_node(db)) - .nth(invoc_attr_index.ast_index()) - .and_then(|x| Either::left(x.1))? - .token_tree()?; - Some(InFile::new(ast_id.file_id, tt)) - } - _ => None, - }); - - ExpansionInfo { - expanded, - arg: arg_tt, - attr_input_or_mac_def, - macro_arg, - macro_def, - exp_map, - arg_map, - } + ExpansionInfo { expanded, loc, arg: arg_tt, exp_map, arg_map } } } diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs index 0cde3f000a..1eab509b7b 100644 --- a/crates/hir/src/semantics.rs +++ b/crates/hir/src/semantics.rs @@ -19,8 +19,8 @@ use hir_def::{ AsMacroCall, DefWithBodyId, FunctionId, MacroId, TraitId, VariantId, }; use hir_expand::{ - attrs::collect_attrs, db::ExpandDatabase, files::InRealFile, name::AsName, ExpansionInfo, - InMacroFile, MacroCallId, MacroFileId, MacroFileIdExt, + attrs::collect_attrs, db::ExpandDatabase, files::InRealFile, name::AsName, InMacroFile, + MacroCallId, MacroFileId, MacroFileIdExt, }; use itertools::Itertools; use rustc_hash::{FxHashMap, FxHashSet}; @@ -129,12 +129,9 @@ pub struct Semantics<'db, DB> { pub struct SemanticsImpl<'db> { pub db: &'db dyn HirDatabase, - s2d_cache: RefCell, + s2d_cache: RefCell<(SourceToDefCache, FxHashMap)>, /// Rootnode to HirFileId cache root_to_file_cache: RefCell>, - // These 2 caches are mainly useful for semantic highlighting as nothing else descends a lot of tokens - // So we might wanna move them out into something specific for semantic highlighting - expansion_info_cache: RefCell>, /// MacroCall to its expansion's MacroFileId cache macro_call_cache: RefCell, MacroFileId>>, } @@ -295,7 +292,6 @@ impl<'db> SemanticsImpl<'db> { db, s2d_cache: Default::default(), root_to_file_cache: Default::default(), - expansion_info_cache: Default::default(), macro_call_cache: Default::default(), } } @@ -314,7 +310,16 @@ impl<'db> SemanticsImpl<'db> { pub fn expand(&self, macro_call: &ast::MacroCall) -> Option { let sa = self.analyze_no_infer(macro_call.syntax())?; - let file_id = sa.expand(self.db, InFile::new(sa.file_id, macro_call))?; + + let macro_call = InFile::new(sa.file_id, macro_call); + let file_id = if let Some(call) = + ::to_def(self, macro_call) + { + call.as_macro_file() + } else { + sa.expand(self.db, macro_call)? + }; + let node = self.parse_or_expand(file_id.into()); Some(node) } @@ -322,7 +327,7 @@ impl<'db> SemanticsImpl<'db> { /// If `item` has an attribute macro attached to it, expands it. pub fn expand_attr_macro(&self, item: &ast::Item) -> Option { let src = self.wrap_node_infile(item.clone()); - let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(src))?; + let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(src.as_ref()))?; Some(self.parse_or_expand(macro_call_id.as_file())) } @@ -341,9 +346,7 @@ impl<'db> SemanticsImpl<'db> { Some( calls .into_iter() - .map(|call| { - macro_call_to_macro_id(ctx, self.db.upcast(), call?).map(|id| Macro { id }) - }) + .map(|call| macro_call_to_macro_id(ctx, call?).map(|id| Macro { id })) .collect(), ) }) @@ -403,7 +406,7 @@ impl<'db> SemanticsImpl<'db> { pub fn is_attr_macro_call(&self, item: &ast::Item) -> bool { let file_id = self.find_file(item.syntax()).file_id; - let src = InFile::new(file_id, item.clone()); + let src = InFile::new(file_id, item); self.with_ctx(|ctx| ctx.item_to_macro_call(src).is_some()) } @@ -453,7 +456,7 @@ impl<'db> SemanticsImpl<'db> { token_to_map: SyntaxToken, ) -> Option<(SyntaxNode, SyntaxToken)> { let macro_call = self.wrap_node_infile(actual_macro_call.clone()); - let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(macro_call))?; + let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(macro_call.as_ref()))?; hir_expand::db::expand_speculative( self.db.upcast(), macro_call_id, @@ -705,8 +708,6 @@ impl<'db> SemanticsImpl<'db> { let parent = token.parent()?; let file_id = self.find_file(&parent).file_id.file_id()?; - let mut cache = self.expansion_info_cache.borrow_mut(); - // iterate related crates and find all include! invocations that include_file_id matches for (invoc, _) in self .db @@ -716,18 +717,31 @@ impl<'db> SemanticsImpl<'db> { .filter(|&(_, include_file_id)| include_file_id == file_id) { let macro_file = invoc.as_macro_file(); - let expansion_info = cache.entry(macro_file).or_insert_with(|| { - let exp_info = macro_file.expansion_info(self.db.upcast()); + let expansion_info = { + self.with_ctx(|ctx| { + ctx.expansion_info_cache + .entry(macro_file) + .or_insert_with(|| { + let exp_info = macro_file.expansion_info(self.db.upcast()); - let InMacroFile { file_id, value } = exp_info.expanded(); - self.cache(value, file_id.into()); + let InMacroFile { file_id, value } = exp_info.expanded(); + if let InFile { file_id, value: Some(value) } = exp_info.call_node() { + self.cache(value.ancestors().last().unwrap(), file_id); + } + self.cache(value, file_id.into()); - exp_info - }); + exp_info + }) + .clone() + }) + }; // FIXME: uncached parse // Create the source analyzer for the macro call scope - let Some(sa) = self.analyze_no_infer(&self.parse_or_expand(expansion_info.call_file())) + let Some(sa) = expansion_info + .call_node() + .value + .and_then(|it| self.analyze_no_infer(&it.ancestors().last().unwrap())) else { continue; }; @@ -785,23 +799,27 @@ impl<'db> SemanticsImpl<'db> { } }; - let mut cache = self.expansion_info_cache.borrow_mut(); - let mut mcache = self.macro_call_cache.borrow_mut(); + let mut m_cache = self.macro_call_cache.borrow_mut(); let def_map = sa.resolver.def_map(); let mut stack: Vec<(_, SmallVec<[_; 2]>)> = vec![(file_id, smallvec![token])]; - let mut process_expansion_for_token = |stack: &mut Vec<_>, macro_file| { - let exp_info = cache.entry(macro_file).or_insert_with(|| { - let exp_info = macro_file.expansion_info(self.db.upcast()); + let process_expansion_for_token = |stack: &mut Vec<_>, macro_file| { + let InMacroFile { file_id, value: mapped_tokens } = self.with_ctx(|ctx| { + Some( + ctx.expansion_info_cache + .entry(macro_file) + .or_insert_with(|| { + let exp_info = macro_file.expansion_info(self.db.upcast()); - let InMacroFile { file_id, value } = exp_info.expanded(); - self.cache(value, file_id.into()); + let InMacroFile { file_id, value } = exp_info.expanded(); + self.cache(value, file_id.into()); - exp_info - }); - - let InMacroFile { file_id, value: mapped_tokens } = exp_info.map_range_down(span)?; - let mapped_tokens: SmallVec<[_; 2]> = mapped_tokens.collect(); + exp_info + }) + .map_range_down(span)? + .map(SmallVec::<[_; 2]>::from_iter), + ) + })?; // we have found a mapping for the token if the vec is non-empty let res = mapped_tokens.is_empty().not().then_some(()); @@ -818,10 +836,7 @@ impl<'db> SemanticsImpl<'db> { token.parent_ancestors().filter_map(ast::Item::cast).find_map(|item| { // Don't force populate the dyn cache for items that don't have an attribute anyways item.attrs().next()?; - Some(( - ctx.item_to_macro_call(InFile::new(file_id, item.clone()))?, - item, - )) + Some((ctx.item_to_macro_call(InFile::new(file_id, &item))?, item)) }) }); if let Some((call_id, item)) = containing_attribute_macro_call { @@ -874,13 +889,20 @@ impl<'db> SemanticsImpl<'db> { return None; } let macro_call = tt.syntax().parent().and_then(ast::MacroCall::cast)?; - let mcall: hir_expand::files::InFileWrapper = - InFile::new(file_id, macro_call); - let file_id = match mcache.get(&mcall) { + let mcall = InFile::new(file_id, macro_call); + let file_id = match m_cache.get(&mcall) { Some(&it) => it, None => { - let it = sa.expand(self.db, mcall.as_ref())?; - mcache.insert(mcall, it); + let it = if let Some(call) = + ::to_def( + self, + mcall.as_ref(), + ) { + call.as_macro_file() + } else { + sa.expand(self.db, mcall.as_ref())? + }; + m_cache.insert(mcall, it); it } }; @@ -1056,16 +1078,19 @@ impl<'db> SemanticsImpl<'db> { node: SyntaxNode, ) -> impl Iterator + Clone + '_ { let node = self.find_file(&node); - let db = self.db.upcast(); iter::successors(Some(node.cloned()), move |&InFile { file_id, ref value }| { match value.parent() { Some(parent) => Some(InFile::new(file_id, parent)), None => { - let call_node = file_id.macro_file()?.call_node(db); - // cache the node - // FIXME: uncached parse - self.parse_or_expand(call_node.file_id); - Some(call_node) + let macro_file = file_id.macro_file()?; + + self.with_ctx(|ctx| { + let expansion_info = ctx + .expansion_info_cache + .entry(macro_file) + .or_insert_with(|| macro_file.expansion_info(self.db.upcast())); + expansion_info.call_node().transpose() + }) } } }) @@ -1090,7 +1115,7 @@ impl<'db> SemanticsImpl<'db> { .find(|tp| tp.lifetime().as_ref().map(|lt| lt.text()).as_ref() == Some(&text)) })?; let src = self.wrap_node_infile(lifetime_param); - ToDef::to_def(self, src) + ToDef::to_def(self, src.as_ref()) } pub fn resolve_label(&self, lifetime: &ast::Lifetime) -> Option