diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs index 18f6015843..9f03c6467b 100644 --- a/crates/hir/src/semantics.rs +++ b/crates/hir/src/semantics.rs @@ -2,7 +2,7 @@ mod source_to_def; -use std::{cell::RefCell, fmt, iter::successors}; +use std::{cell::RefCell, fmt}; use base_db::{FileId, FileRange}; use hir_def::{ @@ -14,6 +14,7 @@ use hir_expand::{name::AsName, ExpansionInfo}; use hir_ty::{associated_type_shorthand_candidates, Interner}; use itertools::Itertools; use rustc_hash::{FxHashMap, FxHashSet}; +use smallvec::{smallvec, SmallVec}; use syntax::{ algo::find_node_at_offset, ast::{self, GenericParamsOwner, LoopBodyOwner}, @@ -165,7 +166,13 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { self.imp.speculative_expand(actual_macro_call, speculative_args, token_to_map) } + // FIXME: Rename to descend_into_macros_single pub fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken { + self.imp.descend_into_macros(token).pop().unwrap() + } + + // FIXME: Rename to descend_into_macros + pub fn descend_into_macros_many(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> { self.imp.descend_into_macros(token) } @@ -174,7 +181,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { node: &SyntaxNode, offset: TextSize, ) -> Option { - self.imp.descend_node_at_offset(node, offset).find_map(N::cast) + self.imp.descend_node_at_offset(node, offset).flatten().find_map(N::cast) } pub fn hir_file_for(&self, syntax_node: &SyntaxNode) -> HirFileId { @@ -228,7 +235,17 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { return Some(it); } - self.imp.descend_node_at_offset(node, offset).find_map(N::cast) + self.imp.descend_node_at_offset(node, offset).flatten().find_map(N::cast) + } + + /// Find an AstNode by offset inside SyntaxNode, if it is inside *MacroCall*, + /// descend it and find again + pub fn find_nodes_at_offset_with_descend<'slf, N: AstNode + 'slf>( + &'slf self, + node: &SyntaxNode, + offset: TextSize, + ) -> impl Iterator + 'slf { + self.imp.descend_node_at_offset(node, offset).filter_map(|mut it| it.find_map(N::cast)) } pub fn resolve_lifetime_param(&self, lifetime: &ast::Lifetime) -> Option { @@ -440,87 +457,93 @@ impl<'db> SemanticsImpl<'db> { ) } - fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken { + fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> { let _p = profile::span("descend_into_macros"); let parent = match token.parent() { Some(it) => it, - None => return token, + None => return smallvec![token], }; let sa = self.analyze(&parent); - - let token = successors(Some(InFile::new(sa.file_id, token)), |token| { + let mut queue = vec![InFile::new(sa.file_id, token)]; + let mut cache = self.expansion_info_cache.borrow_mut(); + let mut res = smallvec![]; + while let Some(token) = queue.pop() { self.db.unwind_if_cancelled(); - for node in token.value.ancestors() { - match_ast! { - match node { - ast::MacroCall(macro_call) => { - let tt = macro_call.token_tree()?; - let l_delim = match tt.left_delimiter_token() { - Some(it) => it.text_range().end(), - None => tt.syntax().text_range().start() - }; - let r_delim = match tt.right_delimiter_token() { - Some(it) => it.text_range().start(), - None => tt.syntax().text_range().end() - }; - if !TextRange::new(l_delim, r_delim).contains_range(token.value.text_range()) { - return None; - } - let file_id = sa.expand(self.db, token.with_value(¯o_call))?; - let token = self - .expansion_info_cache - .borrow_mut() - .entry(file_id) - .or_insert_with(|| file_id.expansion_info(self.db.upcast())) - .as_ref()? - .map_token_down(self.db.upcast(), None, token.as_ref())?; - - if let Some(parent) = token.value.parent() { - self.cache(find_root(&parent), token.file_id); - } - - return Some(token); - }, - ast::Item(item) => { - if let Some(call_id) = self.with_ctx(|ctx| ctx.item_to_macro_call(token.with_value(item.clone()))) { - let file_id = call_id.as_file(); - let token = self - .expansion_info_cache - .borrow_mut() + let was_not_remapped = (|| { + for node in token.value.ancestors() { + match_ast! { + match node { + ast::MacroCall(macro_call) => { + let tt = macro_call.token_tree()?; + let l_delim = match tt.left_delimiter_token() { + Some(it) => it.text_range().end(), + None => tt.syntax().text_range().start() + }; + let r_delim = match tt.right_delimiter_token() { + Some(it) => it.text_range().start(), + None => tt.syntax().text_range().end() + }; + if !TextRange::new(l_delim, r_delim).contains_range(token.value.text_range()) { + return None; + } + let file_id = sa.expand(self.db, token.with_value(¯o_call))?; + let tokens = cache .entry(file_id) .or_insert_with(|| file_id.expansion_info(self.db.upcast())) .as_ref()? - .map_token_down(self.db.upcast(), Some(item), token.as_ref())?; + .map_token_down(self.db.upcast(), None, token.as_ref())?; - if let Some(parent) = token.value.parent() { - self.cache(find_root(&parent), token.file_id); + let len = queue.len(); + queue.extend(tokens.inspect(|token| { + if let Some(parent) = token.value.parent() { + self.cache(find_root(&parent), token.file_id); + } + })); + return (queue.len() != len).then(|| ()); + }, + ast::Item(item) => { + if let Some(call_id) = self.with_ctx(|ctx| ctx.item_to_macro_call(token.with_value(item.clone()))) { + let file_id = call_id.as_file(); + let tokens = cache + .entry(file_id) + .or_insert_with(|| file_id.expansion_info(self.db.upcast())) + .as_ref()? + .map_token_down(self.db.upcast(), Some(item), token.as_ref())?; + + let len = queue.len(); + queue.extend(tokens.inspect(|token| { + if let Some(parent) = token.value.parent() { + self.cache(find_root(&parent), token.file_id); + } + })); + return (queue.len() != len).then(|| ()); } - - return Some(token); - } - }, - _ => {} + }, + _ => {} + } } } + None + })().is_none(); + if was_not_remapped { + res.push(token.value) } - - None - }) - .last() - .unwrap(); - token.value + } + res } + // Note this return type is deliberate as [`find_nodes_at_offset_with_descend`] wants to stop + // traversing the inner iterator when it finds a node. fn descend_node_at_offset( &self, node: &SyntaxNode, offset: TextSize, - ) -> impl Iterator + '_ { + ) -> impl Iterator + '_> + '_ { // Handle macro token cases node.token_at_offset(offset) - .map(|token| self.descend_into_macros(token)) - .map(|it| self.token_ancestors_with_macros(it)) + .map(move |token| self.descend_into_macros(token)) + .map(|it| it.into_iter().map(move |it| self.token_ancestors_with_macros(it))) .flatten() } diff --git a/crates/hir_expand/src/db.rs b/crates/hir_expand/src/db.rs index fc1dc048ef..2fc8468faf 100644 --- a/crates/hir_expand/src/db.rs +++ b/crates/hir_expand/src/db.rs @@ -163,7 +163,7 @@ pub fn expand_speculative( mbe::token_tree_to_syntax_node(&speculative_expansion.value, fragment_kind).ok()?; let token_id = macro_def.map_id_down(token_id); - let range = tmap_2.range_by_token(token_id, token_to_map.kind())?; + let range = tmap_2.first_range_by_token(token_id, token_to_map.kind())?; let token = node.syntax_node().covering_element(range).into_token()?; Some((node.syntax_node(), token)) } diff --git a/crates/hir_expand/src/hygiene.rs b/crates/hir_expand/src/hygiene.rs index 5deb59ae31..cac484a325 100644 --- a/crates/hir_expand/src/hygiene.rs +++ b/crates/hir_expand/src/hygiene.rs @@ -171,7 +171,7 @@ impl HygieneInfo { }, }; - let range = token_map.range_by_token(token_id, SyntaxKind::IDENT)?; + let range = token_map.first_range_by_token(token_id, SyntaxKind::IDENT)?; Some((tt.with_value(range + tt.value), origin)) } } diff --git a/crates/hir_expand/src/lib.rs b/crates/hir_expand/src/lib.rs index a12dd07426..3bbbb5722f 100644 --- a/crates/hir_expand/src/lib.rs +++ b/crates/hir_expand/src/lib.rs @@ -368,7 +368,7 @@ impl ExpansionInfo { db: &dyn db::AstDatabase, item: Option, token: InFile<&SyntaxToken>, - ) -> Option> { + ) -> Option> + '_> { assert_eq!(token.file_id, self.arg.file_id); let token_id = if let Some(item) = item { let call_id = match self.expanded.file_id.0 { @@ -411,11 +411,12 @@ impl ExpansionInfo { } }; - let range = self.exp_map.range_by_token(token_id, token.value.kind())?; + let tokens = self + .exp_map + .ranges_by_token(token_id, token.value.kind()) + .flat_map(move |range| self.expanded.value.covering_element(range).into_token()); - let token = self.expanded.value.covering_element(range).into_token()?; - - Some(self.expanded.with_value(token)) + Some(tokens.map(move |token| self.expanded.with_value(token))) } pub fn map_token_up( @@ -453,7 +454,7 @@ impl ExpansionInfo { }, }; - let range = token_map.range_by_token(token_id, token.value.kind())?; + let range = token_map.first_range_by_token(token_id, token.value.kind())?; let token = tt.value.covering_element(range + tt.value.text_range().start()).into_token()?; Some((tt.with_value(token), origin)) diff --git a/crates/ide/src/highlight_related.rs b/crates/ide/src/highlight_related.rs index 4fc2d35538..67ad263fa2 100644 --- a/crates/ide/src/highlight_related.rs +++ b/crates/ide/src/highlight_related.rs @@ -6,6 +6,7 @@ use ide_db::{ search::{FileReference, ReferenceAccess, SearchScope}, RootDatabase, }; +use rustc_hash::FxHashSet; use syntax::{ ast::{self, LoopBodyOwner}, match_ast, AstNode, SyntaxNode, SyntaxToken, TextRange, TextSize, T, @@ -13,6 +14,7 @@ use syntax::{ use crate::{display::TryToNav, references, NavigationTarget}; +#[derive(PartialEq, Eq, Hash)] pub struct HighlightedRange { pub range: TextRange, pub access: Option, @@ -70,7 +72,7 @@ fn highlight_references( syntax: &SyntaxNode, FilePosition { offset, file_id }: FilePosition, ) -> Option> { - let defs = find_defs(sema, syntax, offset)?; + let defs = find_defs(sema, syntax, offset); let usages = defs .iter() .flat_map(|&d| { @@ -99,7 +101,12 @@ fn highlight_references( }) }); - Some(declarations.chain(usages).collect()) + let res: FxHashSet<_> = declarations.chain(usages).collect(); + if res.is_empty() { + None + } else { + Some(res.into_iter().collect()) + } } fn highlight_exit_points( @@ -270,29 +277,40 @@ fn find_defs( sema: &Semantics, syntax: &SyntaxNode, offset: TextSize, -) -> Option> { - let defs = match sema.find_node_at_offset_with_descend(syntax, offset)? { - ast::NameLike::NameRef(name_ref) => match NameRefClass::classify(sema, &name_ref)? { - NameRefClass::Definition(def) => vec![def], - NameRefClass::FieldShorthand { local_ref, field_ref } => { - vec![Definition::Local(local_ref), Definition::Field(field_ref)] - } - }, - ast::NameLike::Name(name) => match NameClass::classify(sema, &name)? { - NameClass::Definition(it) | NameClass::ConstReference(it) => vec![it], - NameClass::PatFieldShorthand { local_def, field_ref } => { - vec![Definition::Local(local_def), Definition::Field(field_ref)] - } - }, - ast::NameLike::Lifetime(lifetime) => NameRefClass::classify_lifetime(sema, &lifetime) - .and_then(|class| match class { - NameRefClass::Definition(it) => Some(it), - _ => None, +) -> FxHashSet { + sema.find_nodes_at_offset_with_descend(syntax, offset) + .flat_map(|name_like| { + Some(match name_like { + ast::NameLike::NameRef(name_ref) => { + match NameRefClass::classify(sema, &name_ref)? { + NameRefClass::Definition(def) => vec![def], + NameRefClass::FieldShorthand { local_ref, field_ref } => { + vec![Definition::Local(local_ref), Definition::Field(field_ref)] + } + } + } + ast::NameLike::Name(name) => match NameClass::classify(sema, &name)? { + NameClass::Definition(it) | NameClass::ConstReference(it) => vec![it], + NameClass::PatFieldShorthand { local_def, field_ref } => { + vec![Definition::Local(local_def), Definition::Field(field_ref)] + } + }, + ast::NameLike::Lifetime(lifetime) => { + NameRefClass::classify_lifetime(sema, &lifetime) + .and_then(|class| match class { + NameRefClass::Definition(it) => Some(it), + _ => None, + }) + .or_else(|| { + NameClass::classify_lifetime(sema, &lifetime) + .and_then(NameClass::defined) + }) + .map(|it| vec![it])? + } }) - .or_else(|| NameClass::classify_lifetime(sema, &lifetime).and_then(NameClass::defined)) - .map(|it| vec![it])?, - }; - Some(defs) + }) + .flatten() + .collect() } #[cfg(test)] @@ -392,6 +410,45 @@ fn foo() { ); } + #[test] + fn test_multi_macro_usage() { + check( + r#" +macro_rules! foo { + ($ident:ident) => { + fn $ident() -> $ident { loop {} } + struct $ident; + } +} + +foo!(bar$0); + // ^^^ +fn foo() { + let bar: bar = bar(); + // ^^^ + // ^^^ +} +"#, + ); + check( + r#" +macro_rules! foo { + ($ident:ident) => { + fn $ident() -> $ident { loop {} } + struct $ident; + } +} + +foo!(bar); + // ^^^ +fn foo() { + let bar: bar$0 = bar(); + // ^^^ +} +"#, + ); + } + #[test] fn test_hl_yield_points() { check( @@ -813,7 +870,6 @@ fn function(field: u32) { //^^^^^ Struct { field$0 } //^^^^^ read - //^^^^^ read } "#, ); diff --git a/crates/ide_db/src/defs.rs b/crates/ide_db/src/defs.rs index 28d68c6d3e..719f424fd2 100644 --- a/crates/ide_db/src/defs.rs +++ b/crates/ide_db/src/defs.rs @@ -17,7 +17,7 @@ use syntax::{ use crate::RootDatabase; // FIXME: a more precise name would probably be `Symbol`? -#[derive(Debug, PartialEq, Eq, Copy, Clone)] +#[derive(Debug, PartialEq, Eq, Copy, Clone, Hash)] pub enum Definition { Macro(MacroDef), Field(Field), diff --git a/crates/ide_db/src/search.rs b/crates/ide_db/src/search.rs index 431a36d8a2..855675be42 100644 --- a/crates/ide_db/src/search.rs +++ b/crates/ide_db/src/search.rs @@ -61,7 +61,7 @@ pub struct FileReference { pub access: Option, } -#[derive(Debug, Copy, Clone, PartialEq)] +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub enum ReferenceAccess { Read, Write, @@ -393,7 +393,7 @@ impl<'a> FindUsages<'a> { continue; } - if let Some(name) = sema.find_node_at_offset_with_descend(&tree, offset) { + for name in sema.find_nodes_at_offset_with_descend(&tree, offset) { if match name { ast::NameLike::NameRef(name_ref) => self.found_name_ref(&name_ref, sink), ast::NameLike::Name(name) => self.found_name(&name, sink), @@ -410,9 +410,7 @@ impl<'a> FindUsages<'a> { continue; } - if let Some(ast::NameLike::NameRef(name_ref)) = - sema.find_node_at_offset_with_descend(&tree, offset) - { + for name_ref in sema.find_nodes_at_offset_with_descend(&tree, offset) { if self.found_self_ty_name_ref(self_ty, &name_ref, sink) { return; } diff --git a/crates/mbe/src/tests/expand.rs b/crates/mbe/src/tests/expand.rs index c788e427e7..c8d06eebb7 100644 --- a/crates/mbe/src/tests/expand.rs +++ b/crates/mbe/src/tests/expand.rs @@ -58,8 +58,9 @@ macro_rules! foobar { let (node, token_map) = token_tree_to_syntax_node(&expanded, FragmentKind::Items).unwrap(); let content = node.syntax_node().to_string(); - let get_text = - |id, kind| -> String { content[token_map.range_by_token(id, kind).unwrap()].to_string() }; + let get_text = |id, kind| -> String { + content[token_map.first_range_by_token(id, kind).unwrap()].to_string() + }; assert_eq!(expanded.token_trees.len(), 4); // {($e:ident) => { fn $e() {} }} diff --git a/crates/mbe/src/token_map.rs b/crates/mbe/src/token_map.rs index ff0c106cf2..9053526d20 100644 --- a/crates/mbe/src/token_map.rs +++ b/crates/mbe/src/token_map.rs @@ -46,9 +46,23 @@ impl TokenMap { Some(token_id) } - pub fn range_by_token(&self, token_id: tt::TokenId, kind: SyntaxKind) -> Option { - let &(_, range) = self.entries.iter().find(|(tid, _)| *tid == token_id)?; - range.by_kind(kind) + pub fn ranges_by_token( + &self, + token_id: tt::TokenId, + kind: SyntaxKind, + ) -> impl Iterator + '_ { + self.entries + .iter() + .filter(move |&&(tid, _)| tid == token_id) + .filter_map(move |(_, range)| range.by_kind(kind)) + } + + pub fn first_range_by_token( + &self, + token_id: tt::TokenId, + kind: SyntaxKind, + ) -> Option { + self.ranges_by_token(token_id, kind).next() } pub(crate) fn shrink_to_fit(&mut self) {