mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-28 14:03:35 +00:00
Auto merge of #12001 - Veykril:refs, r=Veykril
fix: Do reference search on all downmapped tokens with the same kind only cc https://github.com/rust-lang/rust-analyzer/issues/11668
This commit is contained in:
commit
a912f2a9f6
3 changed files with 100 additions and 59 deletions
|
@ -208,6 +208,16 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
|
|||
self.imp.descend_into_macros(token)
|
||||
}
|
||||
|
||||
/// Descend the token into macrocalls to all its mapped counterparts.
|
||||
///
|
||||
/// Returns the original non descended token if none of the mapped counterparts have the same syntax kind.
|
||||
pub fn descend_into_macros_with_same_kind(
|
||||
&self,
|
||||
token: SyntaxToken,
|
||||
) -> SmallVec<[SyntaxToken; 1]> {
|
||||
self.imp.descend_into_macros_with_same_kind(token)
|
||||
}
|
||||
|
||||
/// Maps a node down by mapping its first and last token down.
|
||||
pub fn descend_node_into_attributes<N: AstNode>(&self, node: N) -> SmallVec<[N; 1]> {
|
||||
self.imp.descend_node_into_attributes(node)
|
||||
|
@ -599,25 +609,19 @@ impl<'db> SemanticsImpl<'db> {
|
|||
};
|
||||
|
||||
if first == last {
|
||||
self.descend_into_macros_impl(
|
||||
first,
|
||||
&mut |InFile { value, .. }| {
|
||||
if let Some(node) = value.ancestors().find_map(N::cast) {
|
||||
res.push(node)
|
||||
}
|
||||
},
|
||||
false,
|
||||
);
|
||||
self.descend_into_macros_impl(first, &mut |InFile { value, .. }| {
|
||||
if let Some(node) = value.ancestors().find_map(N::cast) {
|
||||
res.push(node)
|
||||
}
|
||||
false
|
||||
});
|
||||
} else {
|
||||
// Descend first and last token, then zip them to look for the node they belong to
|
||||
let mut scratch: SmallVec<[_; 1]> = smallvec![];
|
||||
self.descend_into_macros_impl(
|
||||
first,
|
||||
&mut |token| {
|
||||
scratch.push(token);
|
||||
},
|
||||
false,
|
||||
);
|
||||
self.descend_into_macros_impl(first, &mut |token| {
|
||||
scratch.push(token);
|
||||
false
|
||||
});
|
||||
|
||||
let mut scratch = scratch.into_iter();
|
||||
self.descend_into_macros_impl(
|
||||
|
@ -638,8 +642,8 @@ impl<'db> SemanticsImpl<'db> {
|
|||
}
|
||||
}
|
||||
}
|
||||
false
|
||||
},
|
||||
false,
|
||||
);
|
||||
}
|
||||
res
|
||||
|
@ -647,21 +651,41 @@ impl<'db> SemanticsImpl<'db> {
|
|||
|
||||
fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
|
||||
let mut res = smallvec![];
|
||||
self.descend_into_macros_impl(token, &mut |InFile { value, .. }| res.push(value), false);
|
||||
self.descend_into_macros_impl(token, &mut |InFile { value, .. }| {
|
||||
res.push(value);
|
||||
false
|
||||
});
|
||||
res
|
||||
}
|
||||
|
||||
fn descend_into_macros_with_same_kind(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
|
||||
let kind = token.kind();
|
||||
let mut res = smallvec![];
|
||||
self.descend_into_macros_impl(token.clone(), &mut |InFile { value, .. }| {
|
||||
if value.kind() == kind {
|
||||
res.push(value);
|
||||
}
|
||||
false
|
||||
});
|
||||
if res.is_empty() {
|
||||
res.push(token);
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
fn descend_into_macros_single(&self, token: SyntaxToken) -> SyntaxToken {
|
||||
let mut res = token.clone();
|
||||
self.descend_into_macros_impl(token, &mut |InFile { value, .. }| res = value, true);
|
||||
self.descend_into_macros_impl(token, &mut |InFile { value, .. }| {
|
||||
res = value;
|
||||
true
|
||||
});
|
||||
res
|
||||
}
|
||||
|
||||
fn descend_into_macros_impl(
|
||||
&self,
|
||||
token: SyntaxToken,
|
||||
f: &mut dyn FnMut(InFile<SyntaxToken>),
|
||||
single: bool,
|
||||
f: &mut dyn FnMut(InFile<SyntaxToken>) -> bool,
|
||||
) {
|
||||
let _p = profile::span("descend_into_macros");
|
||||
let parent = match token.parent() {
|
||||
|
@ -688,16 +712,11 @@ impl<'db> SemanticsImpl<'db> {
|
|||
self.cache(value, file_id);
|
||||
}
|
||||
|
||||
let mut mapped_tokens =
|
||||
expansion_info.map_token_down(self.db.upcast(), item, token)?;
|
||||
|
||||
let mapped_tokens = expansion_info.map_token_down(self.db.upcast(), item, token)?;
|
||||
let len = stack.len();
|
||||
|
||||
// requeue the tokens we got from mapping our current token down
|
||||
if single {
|
||||
stack.extend(mapped_tokens.next());
|
||||
} else {
|
||||
stack.extend(mapped_tokens);
|
||||
}
|
||||
stack.extend(mapped_tokens);
|
||||
// if the length changed we have found a mapping for the token
|
||||
(stack.len() != len).then(|| ())
|
||||
};
|
||||
|
@ -787,8 +806,8 @@ impl<'db> SemanticsImpl<'db> {
|
|||
})()
|
||||
.is_none();
|
||||
|
||||
if was_not_remapped {
|
||||
f(token)
|
||||
if was_not_remapped && f(token) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,7 +20,9 @@ use rustc_hash::FxHashMap;
|
|||
use syntax::{
|
||||
algo::find_node_at_offset,
|
||||
ast::{self, HasName},
|
||||
match_ast, AstNode, SyntaxNode, TextRange, TextSize, T,
|
||||
match_ast, AstNode,
|
||||
SyntaxKind::*,
|
||||
SyntaxNode, TextRange, TextSize, T,
|
||||
};
|
||||
|
||||
use crate::{FilePosition, NavigationTarget, TryToNav};
|
||||
|
@ -104,7 +106,7 @@ pub(crate) fn find_all_refs(
|
|||
}
|
||||
None => {
|
||||
let search = make_searcher(false);
|
||||
Some(find_defs(sema, &syntax, position.offset).into_iter().map(search).collect())
|
||||
Some(find_defs(sema, &syntax, position.offset)?.into_iter().map(search).collect())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -113,31 +115,47 @@ pub(crate) fn find_defs<'a>(
|
|||
sema: &'a Semantics<RootDatabase>,
|
||||
syntax: &SyntaxNode,
|
||||
offset: TextSize,
|
||||
) -> impl Iterator<Item = Definition> + 'a {
|
||||
sema.find_nodes_at_offset_with_descend(syntax, offset).filter_map(move |name_like| {
|
||||
let def = match name_like {
|
||||
ast::NameLike::NameRef(name_ref) => match NameRefClass::classify(sema, &name_ref)? {
|
||||
NameRefClass::Definition(def) => def,
|
||||
NameRefClass::FieldShorthand { local_ref, field_ref: _ } => {
|
||||
Definition::Local(local_ref)
|
||||
}
|
||||
},
|
||||
ast::NameLike::Name(name) => match NameClass::classify(sema, &name)? {
|
||||
NameClass::Definition(it) | NameClass::ConstReference(it) => it,
|
||||
NameClass::PatFieldShorthand { local_def, field_ref: _ } => {
|
||||
Definition::Local(local_def)
|
||||
}
|
||||
},
|
||||
ast::NameLike::Lifetime(lifetime) => NameRefClass::classify_lifetime(sema, &lifetime)
|
||||
.and_then(|class| match class {
|
||||
NameRefClass::Definition(it) => Some(it),
|
||||
_ => None,
|
||||
})
|
||||
.or_else(|| {
|
||||
NameClass::classify_lifetime(sema, &lifetime).and_then(NameClass::defined)
|
||||
})?,
|
||||
};
|
||||
Some(def)
|
||||
) -> Option<impl Iterator<Item = Definition> + 'a> {
|
||||
let token = syntax.token_at_offset(offset).find(|t| {
|
||||
matches!(
|
||||
t.kind(),
|
||||
IDENT | INT_NUMBER | LIFETIME_IDENT | T![self] | T![super] | T![crate] | T![Self]
|
||||
)
|
||||
});
|
||||
token.map(|token| {
|
||||
sema.descend_into_macros_with_same_kind(token)
|
||||
.into_iter()
|
||||
.filter_map(|it| ast::NameLike::cast(it.parent()?))
|
||||
.filter_map(move |name_like| {
|
||||
let def = match name_like {
|
||||
ast::NameLike::NameRef(name_ref) => {
|
||||
match NameRefClass::classify(sema, &name_ref)? {
|
||||
NameRefClass::Definition(def) => def,
|
||||
NameRefClass::FieldShorthand { local_ref, field_ref: _ } => {
|
||||
Definition::Local(local_ref)
|
||||
}
|
||||
}
|
||||
}
|
||||
ast::NameLike::Name(name) => match NameClass::classify(sema, &name)? {
|
||||
NameClass::Definition(it) | NameClass::ConstReference(it) => it,
|
||||
NameClass::PatFieldShorthand { local_def, field_ref: _ } => {
|
||||
Definition::Local(local_def)
|
||||
}
|
||||
},
|
||||
ast::NameLike::Lifetime(lifetime) => {
|
||||
NameRefClass::classify_lifetime(sema, &lifetime)
|
||||
.and_then(|class| match class {
|
||||
NameRefClass::Definition(it) => Some(it),
|
||||
_ => None,
|
||||
})
|
||||
.or_else(|| {
|
||||
NameClass::classify_lifetime(sema, &lifetime)
|
||||
.and_then(NameClass::defined)
|
||||
})?
|
||||
}
|
||||
};
|
||||
Some(def)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
|
|
|
@ -226,7 +226,11 @@ fn find_related_tests(
|
|||
search_scope: Option<SearchScope>,
|
||||
tests: &mut FxHashSet<Runnable>,
|
||||
) {
|
||||
let defs = references::find_defs(sema, syntax, position.offset);
|
||||
// FIXME: why is this using references::find_defs, this should use ide_db::search
|
||||
let defs = match references::find_defs(sema, syntax, position.offset) {
|
||||
Some(defs) => defs,
|
||||
None => return,
|
||||
};
|
||||
for def in defs {
|
||||
let defs = def
|
||||
.usages(sema)
|
||||
|
|
Loading…
Reference in a new issue