mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-11-15 09:27:27 +00:00
Support goto_def for intra-doc-links in macro invocations
This commit is contained in:
parent
d99adc5738
commit
22c6f0a8a5
3 changed files with 82 additions and 75 deletions
|
@ -19,7 +19,12 @@ use ide_db::{
|
|||
helpers::pick_best_token,
|
||||
RootDatabase,
|
||||
};
|
||||
use syntax::{ast, match_ast, AstNode, SyntaxKind::*, SyntaxNode, TextRange, T};
|
||||
use syntax::{
|
||||
ast::{self, IsString},
|
||||
match_ast, AstNode, AstToken,
|
||||
SyntaxKind::*,
|
||||
SyntaxNode, SyntaxToken, TextRange, TextSize, T,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
doc_links::intra_doc_links::{parse_intra_doc_link, strip_prefixes_suffixes},
|
||||
|
@ -220,6 +225,66 @@ pub(crate) fn doc_attributes(
|
|||
}
|
||||
}
|
||||
|
||||
pub(crate) struct DocCommentToken {
|
||||
doc_token: SyntaxToken,
|
||||
prefix_len: TextSize,
|
||||
}
|
||||
|
||||
pub(crate) fn token_as_doc_comment(doc_token: &SyntaxToken) -> Option<DocCommentToken> {
|
||||
(match_ast! {
|
||||
match doc_token {
|
||||
ast::Comment(comment) => TextSize::try_from(comment.prefix().len()).ok(),
|
||||
ast::String(string) => doc_token.ancestors().find_map(ast::Attr::cast)
|
||||
.filter(|attr| attr.simple_name().as_deref() == Some("doc")).and_then(|_| string.open_quote_text_range().map(|it| it.len())),
|
||||
_ => None,
|
||||
}
|
||||
}).map(|prefix_len| DocCommentToken { prefix_len, doc_token: doc_token.clone() })
|
||||
}
|
||||
|
||||
impl DocCommentToken {
|
||||
pub(crate) fn get_definition_with_descend_at<T>(
|
||||
self,
|
||||
sema: &Semantics<RootDatabase>,
|
||||
offset: TextSize,
|
||||
// Definition, CommentOwner, range of intra doc link in original file
|
||||
mut cb: impl FnMut(Definition, SyntaxNode, TextRange) -> Option<T>,
|
||||
) -> Option<T> {
|
||||
let DocCommentToken { prefix_len, doc_token } = self;
|
||||
// offset relative to the comments contents
|
||||
let original_start = doc_token.text_range().start();
|
||||
let relative_comment_offset = offset - original_start - prefix_len;
|
||||
|
||||
sema.descend_into_macros_many(doc_token.clone()).into_iter().find_map(|t| {
|
||||
let (node, descended_prefix_len) = match_ast! {
|
||||
match t {
|
||||
ast::Comment(comment) => (t.parent()?, TextSize::try_from(comment.prefix().len()).ok()?),
|
||||
ast::String(string) => (t.ancestors().skip_while(|n| n.kind() != ATTR).nth(1)?, string.open_quote_text_range()?.len()),
|
||||
_ => return None,
|
||||
}
|
||||
};
|
||||
let token_start = t.text_range().start();
|
||||
let abs_in_expansion_offset = token_start + relative_comment_offset + descended_prefix_len;
|
||||
|
||||
let (attributes, def) = doc_attributes(sema, &node)?;
|
||||
let (docs, doc_mapping) = attributes.docs_with_rangemap(sema.db)?;
|
||||
let (in_expansion_range, link, ns) =
|
||||
extract_definitions_from_docs(&docs).into_iter().find_map(|(range, link, ns)| {
|
||||
let mapped = doc_mapping.map(range)?;
|
||||
(mapped.value.contains(abs_in_expansion_offset)).then(|| (mapped.value, link, ns))
|
||||
})?;
|
||||
// get the relative range to the doc/attribute in the expansion
|
||||
let in_expansion_relative_range = in_expansion_range - descended_prefix_len - token_start;
|
||||
// Apply relative range to the original input comment
|
||||
let absolute_range = in_expansion_relative_range + original_start + prefix_len;
|
||||
let def = match resolve_doc_path_for_def(sema.db, def, &link, ns)? {
|
||||
Either::Left(it) => Definition::ModuleDef(it),
|
||||
Either::Right(it) => Definition::Macro(it),
|
||||
};
|
||||
cb(def, node, absolute_range)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn broken_link_clone_cb<'a, 'b>(link: BrokenLink<'a>) -> Option<(CowStr<'b>, CowStr<'b>)> {
|
||||
// These allocations are actually unnecessary but the lifetimes on BrokenLinkCallback are wrong
|
||||
// this is fixed in the repo but not on the crates.io release yet
|
||||
|
|
|
@ -1,11 +1,9 @@
|
|||
use std::convert::TryInto;
|
||||
|
||||
use crate::{
|
||||
display::TryToNav,
|
||||
doc_links::{doc_attributes, extract_definitions_from_docs, resolve_doc_path_for_def},
|
||||
FilePosition, NavigationTarget, RangeInfo,
|
||||
display::TryToNav, doc_links::token_as_doc_comment, FilePosition, NavigationTarget, RangeInfo,
|
||||
};
|
||||
use hir::{AsAssocItem, InFile, ModuleDef, Semantics};
|
||||
use hir::{AsAssocItem, ModuleDef, Semantics};
|
||||
use ide_db::{
|
||||
base_db::{AnchoredPath, FileId, FileLoader},
|
||||
defs::Definition,
|
||||
|
@ -30,7 +28,7 @@ pub(crate) fn goto_definition(
|
|||
db: &RootDatabase,
|
||||
position: FilePosition,
|
||||
) -> Option<RangeInfo<Vec<NavigationTarget>>> {
|
||||
let sema = Semantics::new(db);
|
||||
let sema = &Semantics::new(db);
|
||||
let file = sema.parse(position.file_id).syntax().clone();
|
||||
let original_token =
|
||||
pick_best_token(file.token_at_offset(position.offset), |kind| match kind {
|
||||
|
@ -38,18 +36,11 @@ pub(crate) fn goto_definition(
|
|||
kind if kind.is_trivia() => 0,
|
||||
_ => 1,
|
||||
})?;
|
||||
if let Some(_) = ast::Comment::cast(original_token.clone()) {
|
||||
let parent = original_token.parent()?;
|
||||
let (attributes, def) = doc_attributes(&sema, &parent)?;
|
||||
let (docs, doc_mapping) = attributes.docs_with_rangemap(db)?;
|
||||
let (_, link, ns) =
|
||||
extract_definitions_from_docs(&docs).into_iter().find(|&(range, ..)| {
|
||||
doc_mapping.map(range).map_or(false, |InFile { file_id, value: range }| {
|
||||
file_id == position.file_id.into() && range.contains(position.offset)
|
||||
})
|
||||
})?;
|
||||
let nav = resolve_doc_path_for_def(db, def, &link, ns)?.try_to_nav(db)?;
|
||||
return Some(RangeInfo::new(original_token.text_range(), vec![nav]));
|
||||
if let Some(doc_comment) = token_as_doc_comment(&original_token) {
|
||||
return doc_comment.get_definition_with_descend_at(sema, position.offset, |def, _, _| {
|
||||
let nav = def.try_to_nav(db)?;
|
||||
Some(RangeInfo::new(original_token.text_range(), vec![nav]))
|
||||
});
|
||||
}
|
||||
let navs = sema
|
||||
.descend_into_macros_many(original_token.clone())
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use std::{convert::TryFrom, iter};
|
||||
use std::iter;
|
||||
|
||||
use either::Either;
|
||||
use hir::{AsAssocItem, HasAttrs, HasSource, HirDisplay, Semantics, TypeInfo};
|
||||
|
@ -14,20 +14,13 @@ use ide_db::{
|
|||
use itertools::Itertools;
|
||||
use stdx::format_to;
|
||||
use syntax::{
|
||||
algo,
|
||||
ast::{self, IsString},
|
||||
display::fn_as_proc_macro_label,
|
||||
match_ast, AstNode, AstToken, Direction,
|
||||
SyntaxKind::*,
|
||||
SyntaxNode, SyntaxToken, TextSize, T,
|
||||
algo, ast, display::fn_as_proc_macro_label, match_ast, AstNode, Direction, SyntaxKind::*,
|
||||
SyntaxNode, SyntaxToken, T,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
display::{macro_label, TryToNav},
|
||||
doc_links::{
|
||||
doc_attributes, extract_definitions_from_docs, remove_links, resolve_doc_path_for_def,
|
||||
rewrite_links,
|
||||
},
|
||||
doc_links::{remove_links, rewrite_links, token_as_doc_comment},
|
||||
markdown_remove::remove_markdown,
|
||||
markup::Markup,
|
||||
runnables::{runnable_fn, runnable_mod},
|
||||
|
@ -118,57 +111,15 @@ pub(crate) fn hover(
|
|||
_ => 1,
|
||||
})?;
|
||||
|
||||
let descended = sema.descend_into_macros_many(original_token.clone());
|
||||
// magic intra doc link handling
|
||||
// FIXME: Lift this out to some other place, goto def wants this as well
|
||||
let comment_prefix_len = match_ast! {
|
||||
match original_token {
|
||||
ast::Comment(comment) => TextSize::try_from(comment.prefix().len()).ok(),
|
||||
ast::String(string) => original_token.ancestors().find_map(ast::Attr::cast)
|
||||
.filter(|attr| attr.simple_name().as_deref() == Some("doc")).and_then(|_| string.open_quote_text_range().map(|it| it.len())),
|
||||
_ => None,
|
||||
}
|
||||
};
|
||||
if let Some(prefix_len) = comment_prefix_len {
|
||||
if let Some(doc_comment) = token_as_doc_comment(&original_token) {
|
||||
cov_mark::hit!(no_highlight_on_comment_hover);
|
||||
|
||||
// offset relative to the comments contents
|
||||
let original_start = original_token.text_range().start();
|
||||
let relative_comment_offset = offset - original_start - prefix_len;
|
||||
|
||||
return descended.iter().find_map(|t| {
|
||||
let (node, descended_prefix_len) = match_ast! {
|
||||
match t {
|
||||
ast::Comment(comment) => (t.parent()?, TextSize::try_from(comment.prefix().len()).ok()?),
|
||||
ast::String(string) => (t.ancestors().skip_while(|n| n.kind() != ATTR).nth(1)?, string.open_quote_text_range()?.len()),
|
||||
_ => return None,
|
||||
}
|
||||
};
|
||||
let token_start = t.text_range().start();
|
||||
let abs_in_expansion_offset = token_start + relative_comment_offset + descended_prefix_len;
|
||||
|
||||
let (attributes, def) = doc_attributes(sema, &node)?;
|
||||
let (docs, doc_mapping) = attributes.docs_with_rangemap(sema.db)?;
|
||||
let (in_expansion_range, link, ns) = extract_definitions_from_docs(&docs).into_iter().find_map(
|
||||
|(range, link, ns)| {
|
||||
let mapped = doc_mapping.map(range)?;
|
||||
(mapped.value.contains(abs_in_expansion_offset))
|
||||
.then(|| (mapped.value, link, ns))
|
||||
},
|
||||
)?;
|
||||
// get the relative range to the doc/attribute in the expansion
|
||||
let in_expansion_relative_range = in_expansion_range - descended_prefix_len - token_start;
|
||||
// Apply relative range to the original input comment
|
||||
let absolute_range = in_expansion_relative_range + original_start + prefix_len;
|
||||
let def = match resolve_doc_path_for_def(sema.db, def, &link, ns)? {
|
||||
Either::Left(it) => Definition::ModuleDef(it),
|
||||
Either::Right(it) => Definition::Macro(it),
|
||||
};
|
||||
return doc_comment.get_definition_with_descend_at(sema, offset, |def, node, range| {
|
||||
let res = hover_for_definition(sema, file_id, def, &node, config)?;
|
||||
Some(RangeInfo::new(absolute_range, res))
|
||||
Some(RangeInfo::new(range, res))
|
||||
});
|
||||
}
|
||||
|
||||
let descended = sema.descend_into_macros_many(original_token.clone());
|
||||
// attributes, require special machinery as they are mere ident tokens
|
||||
|
||||
// FIXME: Definition should include known lints and the like instead of having this special case here
|
||||
|
|
Loading…
Reference in a new issue