Auto merge of #12001 - Veykril:refs, r=Veykril

fix: Do reference search on all downmapped tokens with the same kind only

cc https://github.com/rust-lang/rust-analyzer/issues/11668
This commit is contained in:
bors 2022-04-15 17:43:24 +00:00
commit a912f2a9f6
3 changed files with 100 additions and 59 deletions

View file

@ -208,6 +208,16 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
self.imp.descend_into_macros(token) self.imp.descend_into_macros(token)
} }
/// Descend the token into macrocalls to all its mapped counterparts.
///
/// Returns the original non descended token if none of the mapped counterparts have the same syntax kind.
pub fn descend_into_macros_with_same_kind(
&self,
token: SyntaxToken,
) -> SmallVec<[SyntaxToken; 1]> {
self.imp.descend_into_macros_with_same_kind(token)
}
/// Maps a node down by mapping its first and last token down. /// Maps a node down by mapping its first and last token down.
pub fn descend_node_into_attributes<N: AstNode>(&self, node: N) -> SmallVec<[N; 1]> { pub fn descend_node_into_attributes<N: AstNode>(&self, node: N) -> SmallVec<[N; 1]> {
self.imp.descend_node_into_attributes(node) self.imp.descend_node_into_attributes(node)
@ -599,25 +609,19 @@ impl<'db> SemanticsImpl<'db> {
}; };
if first == last { if first == last {
self.descend_into_macros_impl( self.descend_into_macros_impl(first, &mut |InFile { value, .. }| {
first, if let Some(node) = value.ancestors().find_map(N::cast) {
&mut |InFile { value, .. }| { res.push(node)
if let Some(node) = value.ancestors().find_map(N::cast) { }
res.push(node) false
} });
},
false,
);
} else { } else {
// Descend first and last token, then zip them to look for the node they belong to // Descend first and last token, then zip them to look for the node they belong to
let mut scratch: SmallVec<[_; 1]> = smallvec![]; let mut scratch: SmallVec<[_; 1]> = smallvec![];
self.descend_into_macros_impl( self.descend_into_macros_impl(first, &mut |token| {
first, scratch.push(token);
&mut |token| { false
scratch.push(token); });
},
false,
);
let mut scratch = scratch.into_iter(); let mut scratch = scratch.into_iter();
self.descend_into_macros_impl( self.descend_into_macros_impl(
@ -638,8 +642,8 @@ impl<'db> SemanticsImpl<'db> {
} }
} }
} }
false
}, },
false,
); );
} }
res res
@ -647,21 +651,41 @@ impl<'db> SemanticsImpl<'db> {
fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> { fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
let mut res = smallvec![]; let mut res = smallvec![];
self.descend_into_macros_impl(token, &mut |InFile { value, .. }| res.push(value), false); self.descend_into_macros_impl(token, &mut |InFile { value, .. }| {
res.push(value);
false
});
res
}
fn descend_into_macros_with_same_kind(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
let kind = token.kind();
let mut res = smallvec![];
self.descend_into_macros_impl(token.clone(), &mut |InFile { value, .. }| {
if value.kind() == kind {
res.push(value);
}
false
});
if res.is_empty() {
res.push(token);
}
res res
} }
fn descend_into_macros_single(&self, token: SyntaxToken) -> SyntaxToken { fn descend_into_macros_single(&self, token: SyntaxToken) -> SyntaxToken {
let mut res = token.clone(); let mut res = token.clone();
self.descend_into_macros_impl(token, &mut |InFile { value, .. }| res = value, true); self.descend_into_macros_impl(token, &mut |InFile { value, .. }| {
res = value;
true
});
res res
} }
fn descend_into_macros_impl( fn descend_into_macros_impl(
&self, &self,
token: SyntaxToken, token: SyntaxToken,
f: &mut dyn FnMut(InFile<SyntaxToken>), f: &mut dyn FnMut(InFile<SyntaxToken>) -> bool,
single: bool,
) { ) {
let _p = profile::span("descend_into_macros"); let _p = profile::span("descend_into_macros");
let parent = match token.parent() { let parent = match token.parent() {
@ -688,16 +712,11 @@ impl<'db> SemanticsImpl<'db> {
self.cache(value, file_id); self.cache(value, file_id);
} }
let mut mapped_tokens = let mapped_tokens = expansion_info.map_token_down(self.db.upcast(), item, token)?;
expansion_info.map_token_down(self.db.upcast(), item, token)?;
let len = stack.len(); let len = stack.len();
// requeue the tokens we got from mapping our current token down // requeue the tokens we got from mapping our current token down
if single { stack.extend(mapped_tokens);
stack.extend(mapped_tokens.next());
} else {
stack.extend(mapped_tokens);
}
// if the length changed we have found a mapping for the token // if the length changed we have found a mapping for the token
(stack.len() != len).then(|| ()) (stack.len() != len).then(|| ())
}; };
@ -787,8 +806,8 @@ impl<'db> SemanticsImpl<'db> {
})() })()
.is_none(); .is_none();
if was_not_remapped { if was_not_remapped && f(token) {
f(token) break;
} }
} }
} }

View file

@ -20,7 +20,9 @@ use rustc_hash::FxHashMap;
use syntax::{ use syntax::{
algo::find_node_at_offset, algo::find_node_at_offset,
ast::{self, HasName}, ast::{self, HasName},
match_ast, AstNode, SyntaxNode, TextRange, TextSize, T, match_ast, AstNode,
SyntaxKind::*,
SyntaxNode, TextRange, TextSize, T,
}; };
use crate::{FilePosition, NavigationTarget, TryToNav}; use crate::{FilePosition, NavigationTarget, TryToNav};
@ -104,7 +106,7 @@ pub(crate) fn find_all_refs(
} }
None => { None => {
let search = make_searcher(false); let search = make_searcher(false);
Some(find_defs(sema, &syntax, position.offset).into_iter().map(search).collect()) Some(find_defs(sema, &syntax, position.offset)?.into_iter().map(search).collect())
} }
} }
} }
@ -113,31 +115,47 @@ pub(crate) fn find_defs<'a>(
sema: &'a Semantics<RootDatabase>, sema: &'a Semantics<RootDatabase>,
syntax: &SyntaxNode, syntax: &SyntaxNode,
offset: TextSize, offset: TextSize,
) -> impl Iterator<Item = Definition> + 'a { ) -> Option<impl Iterator<Item = Definition> + 'a> {
sema.find_nodes_at_offset_with_descend(syntax, offset).filter_map(move |name_like| { let token = syntax.token_at_offset(offset).find(|t| {
let def = match name_like { matches!(
ast::NameLike::NameRef(name_ref) => match NameRefClass::classify(sema, &name_ref)? { t.kind(),
NameRefClass::Definition(def) => def, IDENT | INT_NUMBER | LIFETIME_IDENT | T![self] | T![super] | T![crate] | T![Self]
NameRefClass::FieldShorthand { local_ref, field_ref: _ } => { )
Definition::Local(local_ref) });
} token.map(|token| {
}, sema.descend_into_macros_with_same_kind(token)
ast::NameLike::Name(name) => match NameClass::classify(sema, &name)? { .into_iter()
NameClass::Definition(it) | NameClass::ConstReference(it) => it, .filter_map(|it| ast::NameLike::cast(it.parent()?))
NameClass::PatFieldShorthand { local_def, field_ref: _ } => { .filter_map(move |name_like| {
Definition::Local(local_def) let def = match name_like {
} ast::NameLike::NameRef(name_ref) => {
}, match NameRefClass::classify(sema, &name_ref)? {
ast::NameLike::Lifetime(lifetime) => NameRefClass::classify_lifetime(sema, &lifetime) NameRefClass::Definition(def) => def,
.and_then(|class| match class { NameRefClass::FieldShorthand { local_ref, field_ref: _ } => {
NameRefClass::Definition(it) => Some(it), Definition::Local(local_ref)
_ => None, }
}) }
.or_else(|| { }
NameClass::classify_lifetime(sema, &lifetime).and_then(NameClass::defined) ast::NameLike::Name(name) => match NameClass::classify(sema, &name)? {
})?, NameClass::Definition(it) | NameClass::ConstReference(it) => it,
}; NameClass::PatFieldShorthand { local_def, field_ref: _ } => {
Some(def) Definition::Local(local_def)
}
},
ast::NameLike::Lifetime(lifetime) => {
NameRefClass::classify_lifetime(sema, &lifetime)
.and_then(|class| match class {
NameRefClass::Definition(it) => Some(it),
_ => None,
})
.or_else(|| {
NameClass::classify_lifetime(sema, &lifetime)
.and_then(NameClass::defined)
})?
}
};
Some(def)
})
}) })
} }

View file

@ -226,7 +226,11 @@ fn find_related_tests(
search_scope: Option<SearchScope>, search_scope: Option<SearchScope>,
tests: &mut FxHashSet<Runnable>, tests: &mut FxHashSet<Runnable>,
) { ) {
let defs = references::find_defs(sema, syntax, position.offset); // FIXME: why is this using references::find_defs, this should use ide_db::search
let defs = match references::find_defs(sema, syntax, position.offset) {
Some(defs) => defs,
None => return,
};
for def in defs { for def in defs {
let defs = def let defs = def
.usages(sema) .usages(sema)