mirror of
https://github.com/rust-lang/rust-analyzer
synced 2025-01-12 13:18:47 +00:00
support goto definition and find references
This commit is contained in:
parent
f2775ac2e9
commit
48bebeaa32
4 changed files with 163 additions and 45 deletions
2
.gitignore
vendored
2
.gitignore
vendored
|
@ -11,3 +11,5 @@ generated_assists.adoc
|
||||||
generated_features.adoc
|
generated_features.adoc
|
||||||
generated_diagnostic.adoc
|
generated_diagnostic.adoc
|
||||||
.DS_Store
|
.DS_Store
|
||||||
|
/out/
|
||||||
|
/dump.lsif
|
||||||
|
|
|
@ -87,7 +87,7 @@ pub use crate::{
|
||||||
references::ReferenceSearchResult,
|
references::ReferenceSearchResult,
|
||||||
rename::RenameError,
|
rename::RenameError,
|
||||||
runnables::{Runnable, RunnableKind, TestId},
|
runnables::{Runnable, RunnableKind, TestId},
|
||||||
static_index::{StaticIndex, StaticIndexedFile, TokenStaticData, TokenId},
|
static_index::{StaticIndex, StaticIndexedFile, TokenId, TokenStaticData},
|
||||||
syntax_highlighting::{
|
syntax_highlighting::{
|
||||||
tags::{Highlight, HlMod, HlMods, HlOperator, HlPunct, HlTag},
|
tags::{Highlight, HlMod, HlMods, HlOperator, HlPunct, HlTag},
|
||||||
HlRange,
|
HlRange,
|
||||||
|
|
|
@ -3,15 +3,17 @@
|
||||||
|
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
use hir::Semantics;
|
||||||
use hir::{db::HirDatabase, Crate, Module};
|
use hir::{db::HirDatabase, Crate, Module};
|
||||||
use ide_db::base_db::{FileId, SourceDatabaseExt};
|
use ide_db::base_db::{FileId, FileRange, SourceDatabaseExt};
|
||||||
use ide_db::RootDatabase;
|
|
||||||
use ide_db::defs::Definition;
|
use ide_db::defs::Definition;
|
||||||
|
use ide_db::RootDatabase;
|
||||||
use rustc_hash::FxHashSet;
|
use rustc_hash::FxHashSet;
|
||||||
use syntax::TextRange;
|
|
||||||
use syntax::{AstNode, SyntaxKind::*, T};
|
use syntax::{AstNode, SyntaxKind::*, T};
|
||||||
|
use syntax::{SyntaxToken, TextRange};
|
||||||
|
|
||||||
use crate::hover::{get_definition_of_token, hover_for_definition};
|
use crate::display::TryToNav;
|
||||||
|
use crate::hover::hover_for_definition;
|
||||||
use crate::{Analysis, Cancellable, Fold, HoverConfig, HoverDocFormat, HoverResult};
|
use crate::{Analysis, Cancellable, Fold, HoverConfig, HoverDocFormat, HoverResult};
|
||||||
|
|
||||||
/// A static representation of fully analyzed source code.
|
/// A static representation of fully analyzed source code.
|
||||||
|
@ -25,8 +27,15 @@ pub struct StaticIndex<'a> {
|
||||||
def_map: HashMap<Definition, TokenId>,
|
def_map: HashMap<Definition, TokenId>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub struct ReferenceData {
|
||||||
|
pub range: FileRange,
|
||||||
|
pub is_definition: bool,
|
||||||
|
}
|
||||||
|
|
||||||
pub struct TokenStaticData {
|
pub struct TokenStaticData {
|
||||||
pub hover: Option<HoverResult>,
|
pub hover: Option<HoverResult>,
|
||||||
|
pub definition: Option<FileRange>,
|
||||||
|
pub references: Vec<ReferenceData>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
|
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
|
||||||
|
@ -42,14 +51,16 @@ impl TokenStore {
|
||||||
id
|
id
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn get_mut(&mut self, id: TokenId) -> Option<&mut TokenStaticData> {
|
||||||
|
self.0.get_mut(id.0)
|
||||||
|
}
|
||||||
|
|
||||||
pub fn get(&self, id: TokenId) -> Option<&TokenStaticData> {
|
pub fn get(&self, id: TokenId) -> Option<&TokenStaticData> {
|
||||||
self.0.get(id.0)
|
self.0.get(id.0)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn iter(self) -> impl Iterator<Item=(TokenId, TokenStaticData)> {
|
pub fn iter(self) -> impl Iterator<Item = (TokenId, TokenStaticData)> {
|
||||||
self.0.into_iter().enumerate().map(|(i, x)| {
|
self.0.into_iter().enumerate().map(|(i, x)| (TokenId(i), x))
|
||||||
(TokenId(i), x)
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -84,26 +95,15 @@ impl StaticIndex<'_> {
|
||||||
});
|
});
|
||||||
let hover_config =
|
let hover_config =
|
||||||
HoverConfig { links_in_hover: true, documentation: Some(HoverDocFormat::Markdown) };
|
HoverConfig { links_in_hover: true, documentation: Some(HoverDocFormat::Markdown) };
|
||||||
let tokens = tokens
|
let tokens = tokens.filter(|token| match token.kind() {
|
||||||
.filter(|token| match token.kind() {
|
IDENT | INT_NUMBER | LIFETIME_IDENT | T![self] | T![super] | T![crate] => true,
|
||||||
IDENT
|
|
||||||
| INT_NUMBER
|
|
||||||
| LIFETIME_IDENT
|
|
||||||
| T![self]
|
|
||||||
| T![super]
|
|
||||||
| T![crate] => true,
|
|
||||||
_ => false,
|
_ => false,
|
||||||
});
|
});
|
||||||
let mut result = StaticIndexedFile {
|
let mut result = StaticIndexedFile { file_id, folds, tokens: vec![] };
|
||||||
file_id,
|
|
||||||
folds,
|
|
||||||
tokens: vec![],
|
|
||||||
};
|
|
||||||
for token in tokens {
|
for token in tokens {
|
||||||
let range = token.text_range();
|
let range = token.text_range();
|
||||||
let node = token.parent().unwrap();
|
let node = token.parent().unwrap();
|
||||||
let def = get_definition_of_token(self.db, &sema, &sema.descend_into_macros(token), file_id, range.start(), &mut None);
|
let def = if let Some(x) = get_definition(&sema, token.clone()) {
|
||||||
let def = if let Some(x) = def {
|
|
||||||
x
|
x
|
||||||
} else {
|
} else {
|
||||||
continue;
|
continue;
|
||||||
|
@ -112,18 +112,34 @@ impl StaticIndex<'_> {
|
||||||
*x
|
*x
|
||||||
} else {
|
} else {
|
||||||
let x = self.tokens.insert(TokenStaticData {
|
let x = self.tokens.insert(TokenStaticData {
|
||||||
hover: hover_for_definition(self.db, file_id, &sema, def, node, &hover_config),
|
hover: hover_for_definition(&sema, file_id, def, &node, &hover_config),
|
||||||
|
definition: def
|
||||||
|
.try_to_nav(self.db)
|
||||||
|
.map(|x| FileRange { file_id: x.file_id, range: x.focus_or_full_range() }),
|
||||||
|
references: vec![],
|
||||||
});
|
});
|
||||||
self.def_map.insert(def, x);
|
self.def_map.insert(def, x);
|
||||||
x
|
x
|
||||||
};
|
};
|
||||||
|
let token = self.tokens.get_mut(id).unwrap();
|
||||||
|
token.references.push(ReferenceData {
|
||||||
|
range: FileRange { range, file_id },
|
||||||
|
is_definition: if let Some(x) = def.try_to_nav(self.db) {
|
||||||
|
x.file_id == file_id && x.focus_or_full_range() == range
|
||||||
|
} else {
|
||||||
|
false
|
||||||
|
},
|
||||||
|
});
|
||||||
result.tokens.push((range, id));
|
result.tokens.push((range, id));
|
||||||
}
|
}
|
||||||
self.files.push(result);
|
self.files.push(result);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn compute<'a>(db: &'a RootDatabase, analysis: &'a Analysis) -> Cancellable<StaticIndex<'a>> {
|
pub fn compute<'a>(
|
||||||
|
db: &'a RootDatabase,
|
||||||
|
analysis: &'a Analysis,
|
||||||
|
) -> Cancellable<StaticIndex<'a>> {
|
||||||
let work = all_modules(db).into_iter().filter(|module| {
|
let work = all_modules(db).into_iter().filter(|module| {
|
||||||
let file_id = module.definition_source(db).file_id.original_file(db);
|
let file_id = module.definition_source(db).file_id.original_file(db);
|
||||||
let source_root = db.file_source_root(file_id);
|
let source_root = db.file_source_root(file_id);
|
||||||
|
@ -133,7 +149,8 @@ impl StaticIndex<'_> {
|
||||||
let mut this = StaticIndex {
|
let mut this = StaticIndex {
|
||||||
files: vec![],
|
files: vec![],
|
||||||
tokens: Default::default(),
|
tokens: Default::default(),
|
||||||
analysis, db,
|
analysis,
|
||||||
|
db,
|
||||||
def_map: Default::default(),
|
def_map: Default::default(),
|
||||||
};
|
};
|
||||||
let mut visited_files = FxHashSet::default();
|
let mut visited_files = FxHashSet::default();
|
||||||
|
@ -150,3 +167,15 @@ impl StaticIndex<'_> {
|
||||||
Ok(this)
|
Ok(this)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn get_definition(sema: &Semantics<RootDatabase>, token: SyntaxToken) -> Option<Definition> {
|
||||||
|
for token in sema.descend_into_macros_many(token) {
|
||||||
|
let def = Definition::from_token(&sema, &token);
|
||||||
|
if let [x] = def.as_slice() {
|
||||||
|
return Some(*x);
|
||||||
|
} else {
|
||||||
|
continue;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
|
@ -4,7 +4,10 @@ use std::collections::HashMap;
|
||||||
use std::env;
|
use std::env;
|
||||||
use std::time::Instant;
|
use std::time::Instant;
|
||||||
|
|
||||||
use ide::{Analysis, Cancellable, RootDatabase, StaticIndex, StaticIndexedFile, TokenId, TokenStaticData};
|
use ide::{
|
||||||
|
Analysis, Cancellable, FileId, FileRange, RootDatabase, StaticIndex, StaticIndexedFile,
|
||||||
|
TokenId, TokenStaticData,
|
||||||
|
};
|
||||||
use ide_db::LineIndexDatabase;
|
use ide_db::LineIndexDatabase;
|
||||||
|
|
||||||
use ide_db::base_db::salsa::{self, ParallelDatabase};
|
use ide_db::base_db::salsa::{self, ParallelDatabase};
|
||||||
|
@ -31,6 +34,8 @@ impl<DB: ParallelDatabase> Clone for Snap<salsa::Snapshot<DB>> {
|
||||||
struct LsifManager<'a> {
|
struct LsifManager<'a> {
|
||||||
count: i32,
|
count: i32,
|
||||||
token_map: HashMap<TokenId, Id>,
|
token_map: HashMap<TokenId, Id>,
|
||||||
|
range_map: HashMap<FileRange, Id>,
|
||||||
|
file_map: HashMap<FileId, Id>,
|
||||||
analysis: &'a Analysis,
|
analysis: &'a Analysis,
|
||||||
db: &'a RootDatabase,
|
db: &'a RootDatabase,
|
||||||
vfs: &'a Vfs,
|
vfs: &'a Vfs,
|
||||||
|
@ -50,6 +55,8 @@ impl LsifManager<'_> {
|
||||||
LsifManager {
|
LsifManager {
|
||||||
count: 0,
|
count: 0,
|
||||||
token_map: HashMap::default(),
|
token_map: HashMap::default(),
|
||||||
|
range_map: HashMap::default(),
|
||||||
|
file_map: HashMap::default(),
|
||||||
analysis,
|
analysis,
|
||||||
db,
|
db,
|
||||||
vfs,
|
vfs,
|
||||||
|
@ -68,9 +75,54 @@ impl LsifManager<'_> {
|
||||||
println!("{}", data);
|
println!("{}", data);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn add_token(&mut self, id: TokenId, token: TokenStaticData) {
|
fn get_token_id(&mut self, id: TokenId) -> Id {
|
||||||
|
if let Some(x) = self.token_map.get(&id) {
|
||||||
|
return *x;
|
||||||
|
}
|
||||||
let result_set_id = self.add(Element::Vertex(Vertex::ResultSet(ResultSet { key: None })));
|
let result_set_id = self.add(Element::Vertex(Vertex::ResultSet(ResultSet { key: None })));
|
||||||
self.token_map.insert(id, result_set_id);
|
self.token_map.insert(id, result_set_id);
|
||||||
|
result_set_id
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_range_id(&mut self, id: FileRange) -> Cancellable<Id> {
|
||||||
|
if let Some(x) = self.range_map.get(&id) {
|
||||||
|
return Ok(*x);
|
||||||
|
}
|
||||||
|
let file_id = id.file_id;
|
||||||
|
let doc_id = self.get_file_id(file_id);
|
||||||
|
let line_index = self.db.line_index(file_id);
|
||||||
|
let line_index = LineIndex {
|
||||||
|
index: line_index.clone(),
|
||||||
|
encoding: OffsetEncoding::Utf16,
|
||||||
|
endings: LineEndings::Unix,
|
||||||
|
};
|
||||||
|
let range_id = self.add(Element::Vertex(Vertex::Range {
|
||||||
|
range: to_proto::range(&line_index, id.range),
|
||||||
|
tag: None,
|
||||||
|
}));
|
||||||
|
self.add(Element::Edge(Edge::Contains(EdgeDataMultiIn {
|
||||||
|
in_vs: vec![range_id.into()],
|
||||||
|
out_v: doc_id.into(),
|
||||||
|
})));
|
||||||
|
Ok(range_id)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_file_id(&mut self, id: FileId) -> Id {
|
||||||
|
if let Some(x) = self.file_map.get(&id) {
|
||||||
|
return *x;
|
||||||
|
}
|
||||||
|
let path = self.vfs.file_path(id);
|
||||||
|
let path = path.as_path().unwrap();
|
||||||
|
let doc_id = self.add(Element::Vertex(Vertex::Document(Document {
|
||||||
|
language_id: "rust".to_string(),
|
||||||
|
uri: lsp_types::Url::from_file_path(path).unwrap(),
|
||||||
|
})));
|
||||||
|
self.file_map.insert(id, doc_id);
|
||||||
|
doc_id
|
||||||
|
}
|
||||||
|
|
||||||
|
fn add_token(&mut self, id: TokenId, token: TokenStaticData) -> Cancellable<()> {
|
||||||
|
let result_set_id = self.get_token_id(id);
|
||||||
if let Some(hover) = token.hover {
|
if let Some(hover) = token.hover {
|
||||||
let hover_id = self.add(Element::Vertex(Vertex::HoverResult {
|
let hover_id = self.add(Element::Vertex(Vertex::HoverResult {
|
||||||
result: Hover {
|
result: Hover {
|
||||||
|
@ -83,16 +135,50 @@ impl LsifManager<'_> {
|
||||||
out_v: result_set_id.into(),
|
out_v: result_set_id.into(),
|
||||||
})));
|
})));
|
||||||
}
|
}
|
||||||
|
if let Some(def) = token.definition {
|
||||||
|
let result_id = self.add(Element::Vertex(Vertex::DefinitionResult));
|
||||||
|
let def_vertex = self.get_range_id(def)?;
|
||||||
|
self.add(Element::Edge(Edge::Item(Item {
|
||||||
|
document: (*self.file_map.get(&def.file_id).unwrap()).into(),
|
||||||
|
property: None,
|
||||||
|
edge_data: EdgeDataMultiIn {
|
||||||
|
in_vs: vec![def_vertex.into()],
|
||||||
|
out_v: result_id.into(),
|
||||||
|
},
|
||||||
|
})));
|
||||||
|
self.add(Element::Edge(Edge::Definition(EdgeData {
|
||||||
|
in_v: result_id.into(),
|
||||||
|
out_v: result_set_id.into(),
|
||||||
|
})));
|
||||||
|
}
|
||||||
|
if !token.references.is_empty() {
|
||||||
|
let result_id = self.add(Element::Vertex(Vertex::ReferenceResult));
|
||||||
|
self.add(Element::Edge(Edge::References(EdgeData {
|
||||||
|
in_v: result_id.into(),
|
||||||
|
out_v: result_set_id.into(),
|
||||||
|
})));
|
||||||
|
for x in token.references {
|
||||||
|
let vertex = *self.range_map.get(&x.range).unwrap();
|
||||||
|
self.add(Element::Edge(Edge::Item(Item {
|
||||||
|
document: (*self.file_map.get(&x.range.file_id).unwrap()).into(),
|
||||||
|
property: Some(if x.is_definition {
|
||||||
|
ItemKind::Definitions
|
||||||
|
} else {
|
||||||
|
ItemKind::References
|
||||||
|
}),
|
||||||
|
edge_data: EdgeDataMultiIn {
|
||||||
|
in_vs: vec![vertex.into()],
|
||||||
|
out_v: result_id.into(),
|
||||||
|
},
|
||||||
|
})));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn add_file(&mut self, file: StaticIndexedFile) -> Cancellable<()> {
|
fn add_file(&mut self, file: StaticIndexedFile) -> Cancellable<()> {
|
||||||
let StaticIndexedFile { file_id, tokens, folds} = file;
|
let StaticIndexedFile { file_id, tokens, folds } = file;
|
||||||
let path = self.vfs.file_path(file_id);
|
let doc_id = self.get_file_id(file_id);
|
||||||
let path = path.as_path().unwrap();
|
|
||||||
let doc_id = self.add(Element::Vertex(Vertex::Document(Document {
|
|
||||||
language_id: "rust".to_string(),
|
|
||||||
uri: lsp_types::Url::from_file_path(path).unwrap(),
|
|
||||||
})));
|
|
||||||
let text = self.analysis.file_text(file_id)?;
|
let text = self.analysis.file_text(file_id)?;
|
||||||
let line_index = self.db.line_index(file_id);
|
let line_index = self.db.line_index(file_id);
|
||||||
let line_index = LineIndex {
|
let line_index = LineIndex {
|
||||||
|
@ -116,7 +202,8 @@ impl LsifManager<'_> {
|
||||||
range: to_proto::range(&line_index, range),
|
range: to_proto::range(&line_index, range),
|
||||||
tag: None,
|
tag: None,
|
||||||
}));
|
}));
|
||||||
let result_set_id = *self.token_map.get(&id).expect("token map doesn't contain id");
|
self.range_map.insert(FileRange { file_id, range }, range_id);
|
||||||
|
let result_set_id = self.get_token_id(id);
|
||||||
self.add(Element::Edge(Edge::Next(EdgeData {
|
self.add(Element::Edge(Edge::Next(EdgeData {
|
||||||
in_v: result_set_id.into(),
|
in_v: result_set_id.into(),
|
||||||
out_v: range_id.into(),
|
out_v: range_id.into(),
|
||||||
|
@ -161,12 +248,12 @@ impl flags::Lsif {
|
||||||
position_encoding: Encoding::Utf16,
|
position_encoding: Encoding::Utf16,
|
||||||
tool_info: None,
|
tool_info: None,
|
||||||
})));
|
})));
|
||||||
for (id, token) in si.tokens.iter() {
|
|
||||||
lsif.add_token(id, token);
|
|
||||||
}
|
|
||||||
for file in si.files {
|
for file in si.files {
|
||||||
lsif.add_file(file)?;
|
lsif.add_file(file)?;
|
||||||
}
|
}
|
||||||
|
for (id, token) in si.tokens.iter() {
|
||||||
|
lsif.add_token(id, token)?;
|
||||||
|
}
|
||||||
eprintln!("Generating LSIF finished in {:?}", now.elapsed());
|
eprintln!("Generating LSIF finished in {:?}", now.elapsed());
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue