mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-11-14 00:47:18 +00:00
Auto merge of #15466 - Veykril:prep-inline-fmt-args, r=Veykril
internal: Add offset param to token descending API The offset is unused for now as we can't map by spans yet but it will be required for https://github.com/rust-lang/rust-analyzer/issues/11260 to work once the token map has been changed to record spans
This commit is contained in:
commit
e69b96bd40
21 changed files with 185 additions and 143 deletions
|
@ -37,7 +37,7 @@ use either::Either;
|
|||
use syntax::{
|
||||
algo::{self, skip_trivia_token},
|
||||
ast::{self, AstNode, HasDocComments},
|
||||
AstPtr, Direction, SyntaxNode, SyntaxNodePtr, SyntaxToken,
|
||||
AstPtr, Direction, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextSize,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
|
@ -642,6 +642,8 @@ impl ExpansionInfo {
|
|||
db: &dyn db::ExpandDatabase,
|
||||
item: Option<ast::Item>,
|
||||
token: InFile<&SyntaxToken>,
|
||||
// FIXME: use this for range mapping, so that we can resolve inline format args
|
||||
_relative_token_offset: Option<TextSize>,
|
||||
) -> Option<impl Iterator<Item = InFile<SyntaxToken>> + '_> {
|
||||
assert_eq!(token.file_id, self.arg.file_id);
|
||||
let token_id_in_attr_input = if let Some(item) = item {
|
||||
|
@ -1051,16 +1053,6 @@ impl InFile<SyntaxToken> {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn ancestors_with_macros(
|
||||
self,
|
||||
db: &dyn db::ExpandDatabase,
|
||||
) -> impl Iterator<Item = InFile<SyntaxNode>> + '_ {
|
||||
self.value.parent().into_iter().flat_map({
|
||||
let file_id = self.file_id;
|
||||
move |parent| InFile::new(file_id, &parent).ancestors_with_macros(db)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
|
||||
|
|
|
@ -170,6 +170,8 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
|
|||
self.imp.is_derive_annotated(item)
|
||||
}
|
||||
|
||||
/// Expand the macro call with a different token tree, mapping the `token_to_map` down into the
|
||||
/// expansion. `token_to_map` should be a token from the `speculative args` node.
|
||||
pub fn speculative_expand(
|
||||
&self,
|
||||
actual_macro_call: &ast::MacroCall,
|
||||
|
@ -179,6 +181,8 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
|
|||
self.imp.speculative_expand(actual_macro_call, speculative_args, token_to_map)
|
||||
}
|
||||
|
||||
/// Expand the macro call with a different item as the input, mapping the `token_to_map` down into the
|
||||
/// expansion. `token_to_map` should be a token from the `speculative args` node.
|
||||
pub fn speculative_expand_attr_macro(
|
||||
&self,
|
||||
actual_macro_call: &ast::Item,
|
||||
|
@ -201,14 +205,22 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
|
|||
)
|
||||
}
|
||||
|
||||
/// Descend the token into macrocalls to its first mapped counterpart.
|
||||
pub fn descend_into_macros_single(&self, token: SyntaxToken) -> SyntaxToken {
|
||||
self.imp.descend_into_macros_single(token)
|
||||
/// Descend the token into its macro call if it is part of one, returning the token in the
|
||||
/// expansion that it is associated with. If `offset` points into the token's range, it will
|
||||
/// be considered for the mapping in case of inline format args.
|
||||
pub fn descend_into_macros_single(&self, token: SyntaxToken, offset: TextSize) -> SyntaxToken {
|
||||
self.imp.descend_into_macros_single(token, offset)
|
||||
}
|
||||
|
||||
/// Descend the token into macrocalls to all its mapped counterparts.
|
||||
pub fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
|
||||
self.imp.descend_into_macros(token)
|
||||
/// Descend the token into its macro call if it is part of one, returning the tokens in the
|
||||
/// expansion that it is associated with. If `offset` points into the token's range, it will
|
||||
/// be considered for the mapping in case of inline format args.
|
||||
pub fn descend_into_macros(
|
||||
&self,
|
||||
token: SyntaxToken,
|
||||
offset: TextSize,
|
||||
) -> SmallVec<[SyntaxToken; 1]> {
|
||||
self.imp.descend_into_macros(token, offset)
|
||||
}
|
||||
|
||||
/// Descend the token into macrocalls to all its mapped counterparts that have the same text as the input token.
|
||||
|
@ -217,12 +229,17 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
|
|||
pub fn descend_into_macros_with_same_text(
|
||||
&self,
|
||||
token: SyntaxToken,
|
||||
offset: TextSize,
|
||||
) -> SmallVec<[SyntaxToken; 1]> {
|
||||
self.imp.descend_into_macros_with_same_text(token)
|
||||
self.imp.descend_into_macros_with_same_text(token, offset)
|
||||
}
|
||||
|
||||
pub fn descend_into_macros_with_kind_preference(&self, token: SyntaxToken) -> SyntaxToken {
|
||||
self.imp.descend_into_macros_with_kind_preference(token)
|
||||
pub fn descend_into_macros_with_kind_preference(
|
||||
&self,
|
||||
token: SyntaxToken,
|
||||
offset: TextSize,
|
||||
) -> SyntaxToken {
|
||||
self.imp.descend_into_macros_with_kind_preference(token, offset)
|
||||
}
|
||||
|
||||
/// Maps a node down by mapping its first and last token down.
|
||||
|
@ -665,7 +682,7 @@ impl<'db> SemanticsImpl<'db> {
|
|||
};
|
||||
|
||||
if first == last {
|
||||
self.descend_into_macros_impl(first, &mut |InFile { value, .. }| {
|
||||
self.descend_into_macros_impl(first, 0.into(), &mut |InFile { value, .. }| {
|
||||
if let Some(node) = value.parent_ancestors().find_map(N::cast) {
|
||||
res.push(node)
|
||||
}
|
||||
|
@ -674,7 +691,7 @@ impl<'db> SemanticsImpl<'db> {
|
|||
} else {
|
||||
// Descend first and last token, then zip them to look for the node they belong to
|
||||
let mut scratch: SmallVec<[_; 1]> = smallvec![];
|
||||
self.descend_into_macros_impl(first, &mut |token| {
|
||||
self.descend_into_macros_impl(first, 0.into(), &mut |token| {
|
||||
scratch.push(token);
|
||||
false
|
||||
});
|
||||
|
@ -682,6 +699,7 @@ impl<'db> SemanticsImpl<'db> {
|
|||
let mut scratch = scratch.into_iter();
|
||||
self.descend_into_macros_impl(
|
||||
last,
|
||||
0.into(),
|
||||
&mut |InFile { value: last, file_id: last_fid }| {
|
||||
if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() {
|
||||
if first_fid == last_fid {
|
||||
|
@ -705,19 +723,27 @@ impl<'db> SemanticsImpl<'db> {
|
|||
res
|
||||
}
|
||||
|
||||
fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
|
||||
fn descend_into_macros(
|
||||
&self,
|
||||
token: SyntaxToken,
|
||||
offset: TextSize,
|
||||
) -> SmallVec<[SyntaxToken; 1]> {
|
||||
let mut res = smallvec![];
|
||||
self.descend_into_macros_impl(token, &mut |InFile { value, .. }| {
|
||||
self.descend_into_macros_impl(token, offset, &mut |InFile { value, .. }| {
|
||||
res.push(value);
|
||||
false
|
||||
});
|
||||
res
|
||||
}
|
||||
|
||||
fn descend_into_macros_with_same_text(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
|
||||
fn descend_into_macros_with_same_text(
|
||||
&self,
|
||||
token: SyntaxToken,
|
||||
offset: TextSize,
|
||||
) -> SmallVec<[SyntaxToken; 1]> {
|
||||
let text = token.text();
|
||||
let mut res = smallvec![];
|
||||
self.descend_into_macros_impl(token.clone(), &mut |InFile { value, .. }| {
|
||||
self.descend_into_macros_impl(token.clone(), offset, &mut |InFile { value, .. }| {
|
||||
if value.text() == text {
|
||||
res.push(value);
|
||||
}
|
||||
|
@ -729,7 +755,11 @@ impl<'db> SemanticsImpl<'db> {
|
|||
res
|
||||
}
|
||||
|
||||
fn descend_into_macros_with_kind_preference(&self, token: SyntaxToken) -> SyntaxToken {
|
||||
fn descend_into_macros_with_kind_preference(
|
||||
&self,
|
||||
token: SyntaxToken,
|
||||
offset: TextSize,
|
||||
) -> SyntaxToken {
|
||||
let fetch_kind = |token: &SyntaxToken| match token.parent() {
|
||||
Some(node) => match node.kind() {
|
||||
kind @ (SyntaxKind::NAME | SyntaxKind::NAME_REF) => {
|
||||
|
@ -741,7 +771,7 @@ impl<'db> SemanticsImpl<'db> {
|
|||
};
|
||||
let preferred_kind = fetch_kind(&token);
|
||||
let mut res = None;
|
||||
self.descend_into_macros_impl(token.clone(), &mut |InFile { value, .. }| {
|
||||
self.descend_into_macros_impl(token.clone(), offset, &mut |InFile { value, .. }| {
|
||||
if fetch_kind(&value) == preferred_kind {
|
||||
res = Some(value);
|
||||
true
|
||||
|
@ -755,9 +785,9 @@ impl<'db> SemanticsImpl<'db> {
|
|||
res.unwrap_or(token)
|
||||
}
|
||||
|
||||
fn descend_into_macros_single(&self, token: SyntaxToken) -> SyntaxToken {
|
||||
fn descend_into_macros_single(&self, token: SyntaxToken, offset: TextSize) -> SyntaxToken {
|
||||
let mut res = token.clone();
|
||||
self.descend_into_macros_impl(token, &mut |InFile { value, .. }| {
|
||||
self.descend_into_macros_impl(token, offset, &mut |InFile { value, .. }| {
|
||||
res = value;
|
||||
true
|
||||
});
|
||||
|
@ -767,9 +797,13 @@ impl<'db> SemanticsImpl<'db> {
|
|||
fn descend_into_macros_impl(
|
||||
&self,
|
||||
token: SyntaxToken,
|
||||
// FIXME: We might want this to be Option<TextSize> to be able to opt out of subrange
|
||||
// mapping, specifically for node downmapping
|
||||
offset: TextSize,
|
||||
f: &mut dyn FnMut(InFile<SyntaxToken>) -> bool,
|
||||
) {
|
||||
let _p = profile::span("descend_into_macros");
|
||||
let relative_token_offset = token.text_range().start().checked_sub(offset);
|
||||
let parent = match token.parent() {
|
||||
Some(it) => it,
|
||||
None => return,
|
||||
|
@ -796,7 +830,12 @@ impl<'db> SemanticsImpl<'db> {
|
|||
self.cache(value, file_id);
|
||||
}
|
||||
|
||||
let mapped_tokens = expansion_info.map_token_down(self.db.upcast(), item, token)?;
|
||||
let mapped_tokens = expansion_info.map_token_down(
|
||||
self.db.upcast(),
|
||||
item,
|
||||
token,
|
||||
relative_token_offset,
|
||||
)?;
|
||||
let len = stack.len();
|
||||
|
||||
// requeue the tokens we got from mapping our current token down
|
||||
|
@ -943,7 +982,7 @@ impl<'db> SemanticsImpl<'db> {
|
|||
offset: TextSize,
|
||||
) -> impl Iterator<Item = impl Iterator<Item = SyntaxNode> + '_> + '_ {
|
||||
node.token_at_offset(offset)
|
||||
.map(move |token| self.descend_into_macros(token))
|
||||
.map(move |token| self.descend_into_macros(token, offset))
|
||||
.map(|descendants| {
|
||||
descendants.into_iter().map(move |it| self.token_ancestors_with_macros(it))
|
||||
})
|
||||
|
|
|
@ -48,7 +48,7 @@ pub(crate) fn extract_expressions_from_format_string(
|
|||
let tt = fmt_string.syntax().parent().and_then(ast::TokenTree::cast)?;
|
||||
|
||||
let expanded_t = ast::String::cast(
|
||||
ctx.sema.descend_into_macros_with_kind_preference(fmt_string.syntax().clone()),
|
||||
ctx.sema.descend_into_macros_with_kind_preference(fmt_string.syntax().clone(), 0.into()),
|
||||
)?;
|
||||
if !is_format_string(&expanded_t) {
|
||||
return None;
|
||||
|
|
|
@ -750,7 +750,7 @@ impl FunctionBody {
|
|||
.descendants_with_tokens()
|
||||
.filter_map(SyntaxElement::into_token)
|
||||
.filter(|it| matches!(it.kind(), SyntaxKind::IDENT | T![self]))
|
||||
.flat_map(|t| sema.descend_into_macros(t))
|
||||
.flat_map(|t| sema.descend_into_macros(t, 0.into()))
|
||||
.for_each(|t| add_name_if_local(t.parent().and_then(ast::NameRef::cast)));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -117,7 +117,7 @@ pub fn get_definition(
|
|||
sema: &Semantics<'_, RootDatabase>,
|
||||
token: SyntaxToken,
|
||||
) -> Option<Definition> {
|
||||
for token in sema.descend_into_macros(token) {
|
||||
for token in sema.descend_into_macros(token, 0.into()) {
|
||||
let def = IdentClass::classify_token(sema, &token).map(IdentClass::definitions_no_ops);
|
||||
if let Some(&[x]) = def.as_deref() {
|
||||
return Some(x);
|
||||
|
|
|
@ -456,14 +456,14 @@ impl<'a> FindUsages<'a> {
|
|||
it.text().trim_start_matches("r#") == name
|
||||
})
|
||||
.into_iter()
|
||||
.flat_map(|token| {
|
||||
.flat_map(move |token| {
|
||||
// FIXME: There should be optimization potential here
|
||||
// Currently we try to descend everything we find which
|
||||
// means we call `Semantics::descend_into_macros` on
|
||||
// every textual hit. That function is notoriously
|
||||
// expensive even for things that do not get down mapped
|
||||
// into macros.
|
||||
sema.descend_into_macros(token).into_iter().filter_map(|it| it.parent())
|
||||
sema.descend_into_macros(token, offset).into_iter().filter_map(|it| it.parent())
|
||||
})
|
||||
};
|
||||
|
||||
|
|
|
@ -74,18 +74,20 @@ pub(crate) fn incoming_calls(
|
|||
Some(calls.into_items())
|
||||
}
|
||||
|
||||
pub(crate) fn outgoing_calls(db: &RootDatabase, position: FilePosition) -> Option<Vec<CallItem>> {
|
||||
pub(crate) fn outgoing_calls(
|
||||
db: &RootDatabase,
|
||||
FilePosition { file_id, offset }: FilePosition,
|
||||
) -> Option<Vec<CallItem>> {
|
||||
let sema = Semantics::new(db);
|
||||
let file_id = position.file_id;
|
||||
let file = sema.parse(file_id);
|
||||
let file = file.syntax();
|
||||
let token = pick_best_token(file.token_at_offset(position.offset), |kind| match kind {
|
||||
let token = pick_best_token(file.token_at_offset(offset), |kind| match kind {
|
||||
IDENT => 1,
|
||||
_ => 0,
|
||||
})?;
|
||||
let mut calls = CallLocations::default();
|
||||
|
||||
sema.descend_into_macros(token)
|
||||
sema.descend_into_macros(token, offset)
|
||||
.into_iter()
|
||||
.filter_map(|it| it.parent_ancestors().nth(1).and_then(ast::Item::cast))
|
||||
.filter_map(|item| match item {
|
||||
|
|
|
@ -131,19 +131,19 @@ pub(crate) fn remove_links(markdown: &str) -> String {
|
|||
// |===
|
||||
pub(crate) fn external_docs(
|
||||
db: &RootDatabase,
|
||||
position: &FilePosition,
|
||||
FilePosition { file_id, offset }: FilePosition,
|
||||
target_dir: Option<&OsStr>,
|
||||
sysroot: Option<&OsStr>,
|
||||
) -> Option<DocumentationLinks> {
|
||||
let sema = &Semantics::new(db);
|
||||
let file = sema.parse(position.file_id).syntax().clone();
|
||||
let token = pick_best_token(file.token_at_offset(position.offset), |kind| match kind {
|
||||
let file = sema.parse(file_id).syntax().clone();
|
||||
let token = pick_best_token(file.token_at_offset(offset), |kind| match kind {
|
||||
IDENT | INT_NUMBER | T![self] => 3,
|
||||
T!['('] | T![')'] => 2,
|
||||
kind if kind.is_trivia() => 0,
|
||||
_ => 1,
|
||||
})?;
|
||||
let token = sema.descend_into_macros_single(token);
|
||||
let token = sema.descend_into_macros_single(token, offset);
|
||||
|
||||
let node = token.parent()?;
|
||||
let definition = match_ast! {
|
||||
|
@ -285,7 +285,7 @@ impl DocCommentToken {
|
|||
let original_start = doc_token.text_range().start();
|
||||
let relative_comment_offset = offset - original_start - prefix_len;
|
||||
|
||||
sema.descend_into_macros(doc_token).into_iter().find_map(|t| {
|
||||
sema.descend_into_macros(doc_token, offset).into_iter().find_map(|t| {
|
||||
let (node, descended_prefix_len) = match_ast! {
|
||||
match t {
|
||||
ast::Comment(comment) => (t.parent()?, TextSize::try_from(comment.prefix().len()).ok()?),
|
||||
|
|
|
@ -40,28 +40,33 @@ pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<
|
|||
// struct Bar;
|
||||
// ```
|
||||
|
||||
let derive = sema.descend_into_macros(tok.clone()).into_iter().find_map(|descended| {
|
||||
let hir_file = sema.hir_file_for(&descended.parent()?);
|
||||
if !hir_file.is_derive_attr_pseudo_expansion(db) {
|
||||
return None;
|
||||
}
|
||||
let derive =
|
||||
sema.descend_into_macros(tok.clone(), 0.into()).into_iter().find_map(|descended| {
|
||||
let hir_file = sema.hir_file_for(&descended.parent()?);
|
||||
if !hir_file.is_derive_attr_pseudo_expansion(db) {
|
||||
return None;
|
||||
}
|
||||
|
||||
let name = descended.parent_ancestors().filter_map(ast::Path::cast).last()?.to_string();
|
||||
// up map out of the #[derive] expansion
|
||||
let token = hir::InFile::new(hir_file, descended).upmap(db)?.value;
|
||||
let attr = token.parent_ancestors().find_map(ast::Attr::cast)?;
|
||||
let expansions = sema.expand_derive_macro(&attr)?;
|
||||
let idx = attr
|
||||
.token_tree()?
|
||||
.token_trees_and_tokens()
|
||||
.filter_map(NodeOrToken::into_token)
|
||||
.take_while(|it| it != &token)
|
||||
.filter(|it| it.kind() == T![,])
|
||||
.count();
|
||||
let expansion =
|
||||
format(db, SyntaxKind::MACRO_ITEMS, position.file_id, expansions.get(idx).cloned()?);
|
||||
Some(ExpandedMacro { name, expansion })
|
||||
});
|
||||
let name = descended.parent_ancestors().filter_map(ast::Path::cast).last()?.to_string();
|
||||
// up map out of the #[derive] expansion
|
||||
let token = hir::InFile::new(hir_file, descended).upmap(db)?.value;
|
||||
let attr = token.parent_ancestors().find_map(ast::Attr::cast)?;
|
||||
let expansions = sema.expand_derive_macro(&attr)?;
|
||||
let idx = attr
|
||||
.token_tree()?
|
||||
.token_trees_and_tokens()
|
||||
.filter_map(NodeOrToken::into_token)
|
||||
.take_while(|it| it != &token)
|
||||
.filter(|it| it.kind() == T![,])
|
||||
.count();
|
||||
let expansion = format(
|
||||
db,
|
||||
SyntaxKind::MACRO_ITEMS,
|
||||
position.file_id,
|
||||
expansions.get(idx).cloned()?,
|
||||
);
|
||||
Some(ExpandedMacro { name, expansion })
|
||||
});
|
||||
|
||||
if derive.is_some() {
|
||||
return derive;
|
||||
|
|
|
@ -17,8 +17,6 @@ use crate::FileRange;
|
|||
// Extends or shrinks the current selection to the encompassing syntactic construct
|
||||
// (expression, statement, item, module, etc). It works with multiple cursors.
|
||||
//
|
||||
// This is a standard LSP feature and not a protocol extension.
|
||||
//
|
||||
// |===
|
||||
// | Editor | Shortcut
|
||||
//
|
||||
|
@ -142,8 +140,10 @@ fn extend_tokens_from_range(
|
|||
|
||||
// compute original mapped token range
|
||||
let extended = {
|
||||
let fst_expanded = sema.descend_into_macros_single(first_token.clone());
|
||||
let lst_expanded = sema.descend_into_macros_single(last_token.clone());
|
||||
let fst_expanded =
|
||||
sema.descend_into_macros_single(first_token.clone(), original_range.start());
|
||||
let lst_expanded =
|
||||
sema.descend_into_macros_single(last_token.clone(), original_range.end());
|
||||
let mut lca =
|
||||
algo::least_common_ancestor(&fst_expanded.parent()?, &lst_expanded.parent()?)?;
|
||||
lca = shallowest_node(&lca);
|
||||
|
@ -154,13 +154,16 @@ fn extend_tokens_from_range(
|
|||
};
|
||||
|
||||
// Compute parent node range
|
||||
let validate = |token: &SyntaxToken| -> bool {
|
||||
let expanded = sema.descend_into_macros_single(token.clone());
|
||||
let parent = match expanded.parent() {
|
||||
Some(it) => it,
|
||||
None => return false,
|
||||
};
|
||||
algo::least_common_ancestor(&extended, &parent).as_ref() == Some(&extended)
|
||||
let validate = |offset: TextSize| {
|
||||
let extended = &extended;
|
||||
move |token: &SyntaxToken| -> bool {
|
||||
let expanded = sema.descend_into_macros_single(token.clone(), offset);
|
||||
let parent = match expanded.parent() {
|
||||
Some(it) => it,
|
||||
None => return false,
|
||||
};
|
||||
algo::least_common_ancestor(extended, &parent).as_ref() == Some(extended)
|
||||
}
|
||||
};
|
||||
|
||||
// Find the first and last text range under expanded parent
|
||||
|
@ -168,14 +171,14 @@ fn extend_tokens_from_range(
|
|||
let token = token.prev_token()?;
|
||||
skip_trivia_token(token, Direction::Prev)
|
||||
})
|
||||
.take_while(validate)
|
||||
.take_while(validate(original_range.start()))
|
||||
.last()?;
|
||||
|
||||
let last = successors(Some(last_token), |token| {
|
||||
let token = token.next_token()?;
|
||||
skip_trivia_token(token, Direction::Next)
|
||||
})
|
||||
.take_while(validate)
|
||||
.take_while(validate(original_range.end()))
|
||||
.last()?;
|
||||
|
||||
let range = first.text_range().cover(last.text_range());
|
||||
|
|
|
@ -20,16 +20,16 @@ use crate::{
|
|||
// - fields in patterns will navigate to the field declaration of the struct, union or variant
|
||||
pub(crate) fn goto_declaration(
|
||||
db: &RootDatabase,
|
||||
position: FilePosition,
|
||||
position @ FilePosition { file_id, offset }: FilePosition,
|
||||
) -> Option<RangeInfo<Vec<NavigationTarget>>> {
|
||||
let sema = Semantics::new(db);
|
||||
let file = sema.parse(position.file_id).syntax().clone();
|
||||
let file = sema.parse(file_id).syntax().clone();
|
||||
let original_token = file
|
||||
.token_at_offset(position.offset)
|
||||
.token_at_offset(offset)
|
||||
.find(|it| matches!(it.kind(), IDENT | T![self] | T![super] | T![crate] | T![Self]))?;
|
||||
let range = original_token.text_range();
|
||||
let info: Vec<NavigationTarget> = sema
|
||||
.descend_into_macros(original_token)
|
||||
.descend_into_macros(original_token, offset)
|
||||
.iter()
|
||||
.filter_map(|token| {
|
||||
let parent = token.parent()?;
|
||||
|
|
|
@ -29,45 +29,39 @@ use syntax::{ast, AstNode, AstToken, SyntaxKind::*, SyntaxToken, TextRange, T};
|
|||
// image::https://user-images.githubusercontent.com/48062697/113065563-025fbe00-91b1-11eb-83e4-a5a703610b23.gif[]
|
||||
pub(crate) fn goto_definition(
|
||||
db: &RootDatabase,
|
||||
position: FilePosition,
|
||||
FilePosition { file_id, offset }: FilePosition,
|
||||
) -> Option<RangeInfo<Vec<NavigationTarget>>> {
|
||||
let sema = &Semantics::new(db);
|
||||
let file = sema.parse(position.file_id).syntax().clone();
|
||||
let original_token =
|
||||
pick_best_token(file.token_at_offset(position.offset), |kind| match kind {
|
||||
IDENT
|
||||
| INT_NUMBER
|
||||
| LIFETIME_IDENT
|
||||
| T![self]
|
||||
| T![super]
|
||||
| T![crate]
|
||||
| T![Self]
|
||||
| COMMENT => 4,
|
||||
// index and prefix ops
|
||||
T!['['] | T![']'] | T![?] | T![*] | T![-] | T![!] => 3,
|
||||
kind if kind.is_keyword() => 2,
|
||||
T!['('] | T![')'] => 2,
|
||||
kind if kind.is_trivia() => 0,
|
||||
_ => 1,
|
||||
})?;
|
||||
let file = sema.parse(file_id).syntax().clone();
|
||||
let original_token = pick_best_token(file.token_at_offset(offset), |kind| match kind {
|
||||
IDENT
|
||||
| INT_NUMBER
|
||||
| LIFETIME_IDENT
|
||||
| T![self]
|
||||
| T![super]
|
||||
| T![crate]
|
||||
| T![Self]
|
||||
| COMMENT => 4,
|
||||
// index and prefix ops
|
||||
T!['['] | T![']'] | T![?] | T![*] | T![-] | T![!] => 3,
|
||||
kind if kind.is_keyword() => 2,
|
||||
T!['('] | T![')'] => 2,
|
||||
kind if kind.is_trivia() => 0,
|
||||
_ => 1,
|
||||
})?;
|
||||
if let Some(doc_comment) = token_as_doc_comment(&original_token) {
|
||||
return doc_comment.get_definition_with_descend_at(
|
||||
sema,
|
||||
position.offset,
|
||||
|def, _, link_range| {
|
||||
let nav = def.try_to_nav(db)?;
|
||||
Some(RangeInfo::new(link_range, vec![nav]))
|
||||
},
|
||||
);
|
||||
return doc_comment.get_definition_with_descend_at(sema, offset, |def, _, link_range| {
|
||||
let nav = def.try_to_nav(db)?;
|
||||
Some(RangeInfo::new(link_range, vec![nav]))
|
||||
});
|
||||
}
|
||||
let navs = sema
|
||||
.descend_into_macros(original_token.clone())
|
||||
.descend_into_macros(original_token.clone(), offset)
|
||||
.into_iter()
|
||||
.filter_map(|token| {
|
||||
let parent = token.parent()?;
|
||||
if let Some(tt) = ast::TokenTree::cast(parent) {
|
||||
if let Some(x) = try_lookup_include_path(sema, tt, token.clone(), position.file_id)
|
||||
{
|
||||
if let Some(x) = try_lookup_include_path(sema, tt, token.clone(), file_id) {
|
||||
return Some(vec![x]);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -22,20 +22,19 @@ use crate::{FilePosition, NavigationTarget, RangeInfo, TryToNav};
|
|||
// image::https://user-images.githubusercontent.com/48062697/113065566-02f85480-91b1-11eb-9288-aaad8abd8841.gif[]
|
||||
pub(crate) fn goto_implementation(
|
||||
db: &RootDatabase,
|
||||
position: FilePosition,
|
||||
FilePosition { file_id, offset }: FilePosition,
|
||||
) -> Option<RangeInfo<Vec<NavigationTarget>>> {
|
||||
let sema = Semantics::new(db);
|
||||
let source_file = sema.parse(position.file_id);
|
||||
let source_file = sema.parse(file_id);
|
||||
let syntax = source_file.syntax().clone();
|
||||
|
||||
let original_token =
|
||||
pick_best_token(syntax.token_at_offset(position.offset), |kind| match kind {
|
||||
IDENT | T![self] | INT_NUMBER => 1,
|
||||
_ => 0,
|
||||
})?;
|
||||
let original_token = pick_best_token(syntax.token_at_offset(offset), |kind| match kind {
|
||||
IDENT | T![self] | INT_NUMBER => 1,
|
||||
_ => 0,
|
||||
})?;
|
||||
let range = original_token.text_range();
|
||||
let navs =
|
||||
sema.descend_into_macros(original_token)
|
||||
sema.descend_into_macros(original_token, offset)
|
||||
.into_iter()
|
||||
.filter_map(|token| token.parent().and_then(ast::NameLike::cast))
|
||||
.filter_map(|node| match &node {
|
||||
|
|
|
@ -16,13 +16,13 @@ use crate::{FilePosition, NavigationTarget, RangeInfo, TryToNav};
|
|||
// image::https://user-images.githubusercontent.com/48062697/113020657-b560f500-917a-11eb-9007-0f809733a338.gif[]
|
||||
pub(crate) fn goto_type_definition(
|
||||
db: &RootDatabase,
|
||||
position: FilePosition,
|
||||
FilePosition { file_id, offset }: FilePosition,
|
||||
) -> Option<RangeInfo<Vec<NavigationTarget>>> {
|
||||
let sema = hir::Semantics::new(db);
|
||||
|
||||
let file: ast::SourceFile = sema.parse(position.file_id);
|
||||
let file: ast::SourceFile = sema.parse(file_id);
|
||||
let token: SyntaxToken =
|
||||
pick_best_token(file.syntax().token_at_offset(position.offset), |kind| match kind {
|
||||
pick_best_token(file.syntax().token_at_offset(offset), |kind| match kind {
|
||||
IDENT | INT_NUMBER | T![self] => 2,
|
||||
kind if kind.is_trivia() => 0,
|
||||
_ => 1,
|
||||
|
@ -37,7 +37,7 @@ pub(crate) fn goto_type_definition(
|
|||
}
|
||||
};
|
||||
let range = token.text_range();
|
||||
sema.descend_into_macros(token)
|
||||
sema.descend_into_macros(token, offset)
|
||||
.into_iter()
|
||||
.filter_map(|token| {
|
||||
let ty = sema
|
||||
|
|
|
@ -15,6 +15,7 @@ use syntax::{
|
|||
SyntaxKind::{self, IDENT, INT_NUMBER},
|
||||
SyntaxNode, SyntaxToken, TextRange, T,
|
||||
};
|
||||
use text_edit::TextSize;
|
||||
|
||||
use crate::{navigation_target::ToNav, references, NavigationTarget, TryToNav};
|
||||
|
||||
|
@ -51,7 +52,7 @@ pub struct HighlightRelatedConfig {
|
|||
pub(crate) fn highlight_related(
|
||||
sema: &Semantics<'_, RootDatabase>,
|
||||
config: HighlightRelatedConfig,
|
||||
FilePosition { offset, file_id }: FilePosition,
|
||||
pos @ FilePosition { offset, file_id }: FilePosition,
|
||||
) -> Option<Vec<HighlightedRange>> {
|
||||
let _p = profile::span("highlight_related");
|
||||
let syntax = sema.parse(file_id).syntax().clone();
|
||||
|
@ -79,7 +80,7 @@ pub(crate) fn highlight_related(
|
|||
}
|
||||
T![|] if config.closure_captures => highlight_closure_captures(sema, token, file_id),
|
||||
T![move] if config.closure_captures => highlight_closure_captures(sema, token, file_id),
|
||||
_ if config.references => highlight_references(sema, &syntax, token, file_id),
|
||||
_ if config.references => highlight_references(sema, &syntax, token, pos),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
@ -129,9 +130,9 @@ fn highlight_references(
|
|||
sema: &Semantics<'_, RootDatabase>,
|
||||
node: &SyntaxNode,
|
||||
token: SyntaxToken,
|
||||
file_id: FileId,
|
||||
FilePosition { file_id, offset }: FilePosition,
|
||||
) -> Option<Vec<HighlightedRange>> {
|
||||
let defs = find_defs(sema, token.clone());
|
||||
let defs = find_defs(sema, token.clone(), offset);
|
||||
let usages = defs
|
||||
.iter()
|
||||
.filter_map(|&d| {
|
||||
|
@ -455,8 +456,12 @@ fn cover_range(r0: Option<TextRange>, r1: Option<TextRange>) -> Option<TextRange
|
|||
}
|
||||
}
|
||||
|
||||
fn find_defs(sema: &Semantics<'_, RootDatabase>, token: SyntaxToken) -> FxHashSet<Definition> {
|
||||
sema.descend_into_macros(token)
|
||||
fn find_defs(
|
||||
sema: &Semantics<'_, RootDatabase>,
|
||||
token: SyntaxToken,
|
||||
offset: TextSize,
|
||||
) -> FxHashSet<Definition> {
|
||||
sema.descend_into_macros(token, offset)
|
||||
.into_iter()
|
||||
.filter_map(|token| IdentClass::classify_token(sema, &token))
|
||||
.map(IdentClass::definitions_no_ops)
|
||||
|
|
|
@ -162,9 +162,9 @@ fn hover_simple(
|
|||
// prefer descending the same token kind in attribute expansions, in normal macros text
|
||||
// equivalency is more important
|
||||
let descended = if in_attr {
|
||||
[sema.descend_into_macros_with_kind_preference(original_token.clone())].into()
|
||||
[sema.descend_into_macros_with_kind_preference(original_token.clone(), offset)].into()
|
||||
} else {
|
||||
sema.descend_into_macros_with_same_text(original_token.clone())
|
||||
sema.descend_into_macros_with_same_text(original_token.clone(), offset)
|
||||
};
|
||||
let descended = || descended.iter();
|
||||
|
||||
|
|
|
@ -484,7 +484,7 @@ impl Analysis {
|
|||
sysroot: Option<&OsStr>,
|
||||
) -> Cancellable<doc_links::DocumentationLinks> {
|
||||
self.with_db(|db| {
|
||||
doc_links::external_docs(db, &position, target_dir, sysroot).unwrap_or_default()
|
||||
doc_links::external_docs(db, position, target_dir, sysroot).unwrap_or_default()
|
||||
})
|
||||
}
|
||||
|
||||
|
|
|
@ -99,7 +99,7 @@ pub(crate) fn moniker(
|
|||
});
|
||||
}
|
||||
let navs = sema
|
||||
.descend_into_macros(original_token.clone())
|
||||
.descend_into_macros(original_token.clone(), offset)
|
||||
.into_iter()
|
||||
.filter_map(|token| {
|
||||
IdentClass::classify_token(sema, &token).map(IdentClass::definitions_no_ops).map(|it| {
|
||||
|
|
|
@ -126,7 +126,7 @@ pub(crate) fn find_defs<'a>(
|
|||
)
|
||||
});
|
||||
token.map(|token| {
|
||||
sema.descend_into_macros_with_same_text(token)
|
||||
sema.descend_into_macros_with_same_text(token, offset)
|
||||
.into_iter()
|
||||
.filter_map(|it| ast::NameLike::cast(it.parent()?))
|
||||
.filter_map(move |name_like| {
|
||||
|
|
|
@ -67,17 +67,20 @@ impl SignatureHelp {
|
|||
}
|
||||
|
||||
/// Computes parameter information for the given position.
|
||||
pub(crate) fn signature_help(db: &RootDatabase, position: FilePosition) -> Option<SignatureHelp> {
|
||||
pub(crate) fn signature_help(
|
||||
db: &RootDatabase,
|
||||
FilePosition { file_id, offset }: FilePosition,
|
||||
) -> Option<SignatureHelp> {
|
||||
let sema = Semantics::new(db);
|
||||
let file = sema.parse(position.file_id);
|
||||
let file = sema.parse(file_id);
|
||||
let file = file.syntax();
|
||||
let token = file
|
||||
.token_at_offset(position.offset)
|
||||
.token_at_offset(offset)
|
||||
.left_biased()
|
||||
// if the cursor is sandwiched between two space tokens and the call is unclosed
|
||||
// this prevents us from leaving the CallExpression
|
||||
.and_then(|tok| algo::skip_trivia_token(tok, Direction::Prev))?;
|
||||
let token = sema.descend_into_macros_single(token);
|
||||
let token = sema.descend_into_macros_single(token, offset);
|
||||
|
||||
for node in token.parent_ancestors() {
|
||||
match_ast! {
|
||||
|
|
|
@ -395,10 +395,10 @@ fn traverse(
|
|||
NodeOrToken::Token(token) if token.kind() != COMMENT => {
|
||||
let token = match attr_or_derive_item {
|
||||
Some(AttrOrDerive::Attr(_)) => {
|
||||
sema.descend_into_macros_with_kind_preference(token)
|
||||
sema.descend_into_macros_with_kind_preference(token, 0.into())
|
||||
}
|
||||
Some(AttrOrDerive::Derive(_)) | None => {
|
||||
sema.descend_into_macros_single(token)
|
||||
sema.descend_into_macros_single(token, 0.into())
|
||||
}
|
||||
};
|
||||
match token.parent().and_then(ast::NameLike::cast) {
|
||||
|
|
Loading…
Reference in a new issue