From 48bebeaa32d1e0eb53336b80f14d8695f3cdd30a Mon Sep 17 00:00:00 2001 From: hamidreza kalbasi Date: Thu, 23 Sep 2021 16:28:21 +0330 Subject: [PATCH] support goto definition and find references --- .gitignore | 2 + crates/ide/src/lib.rs | 2 +- crates/ide/src/static_index.rs | 89 ++++++++++++++------- crates/rust-analyzer/src/cli/lsif.rs | 115 +++++++++++++++++++++++---- 4 files changed, 163 insertions(+), 45 deletions(-) diff --git a/.gitignore b/.gitignore index 7e097c0158..f3e3cab1d6 100644 --- a/.gitignore +++ b/.gitignore @@ -11,3 +11,5 @@ generated_assists.adoc generated_features.adoc generated_diagnostic.adoc .DS_Store +/out/ +/dump.lsif diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs index d50680ce14..dbfa99bdf2 100644 --- a/crates/ide/src/lib.rs +++ b/crates/ide/src/lib.rs @@ -87,7 +87,7 @@ pub use crate::{ references::ReferenceSearchResult, rename::RenameError, runnables::{Runnable, RunnableKind, TestId}, - static_index::{StaticIndex, StaticIndexedFile, TokenStaticData, TokenId}, + static_index::{StaticIndex, StaticIndexedFile, TokenId, TokenStaticData}, syntax_highlighting::{ tags::{Highlight, HlMod, HlMods, HlOperator, HlPunct, HlTag}, HlRange, diff --git a/crates/ide/src/static_index.rs b/crates/ide/src/static_index.rs index bd71177990..55a6710fcf 100644 --- a/crates/ide/src/static_index.rs +++ b/crates/ide/src/static_index.rs @@ -3,15 +3,17 @@ use std::collections::HashMap; +use hir::Semantics; use hir::{db::HirDatabase, Crate, Module}; -use ide_db::base_db::{FileId, SourceDatabaseExt}; -use ide_db::RootDatabase; +use ide_db::base_db::{FileId, FileRange, SourceDatabaseExt}; use ide_db::defs::Definition; +use ide_db::RootDatabase; use rustc_hash::FxHashSet; -use syntax::TextRange; use syntax::{AstNode, SyntaxKind::*, T}; +use syntax::{SyntaxToken, TextRange}; -use crate::hover::{get_definition_of_token, hover_for_definition}; +use crate::display::TryToNav; +use crate::hover::hover_for_definition; use crate::{Analysis, Cancellable, Fold, HoverConfig, HoverDocFormat, HoverResult}; /// A static representation of fully analyzed source code. @@ -25,8 +27,15 @@ pub struct StaticIndex<'a> { def_map: HashMap, } +pub struct ReferenceData { + pub range: FileRange, + pub is_definition: bool, +} + pub struct TokenStaticData { pub hover: Option, + pub definition: Option, + pub references: Vec, } #[derive(Clone, Copy, PartialEq, Eq, Hash)] @@ -42,14 +51,16 @@ impl TokenStore { id } + pub fn get_mut(&mut self, id: TokenId) -> Option<&mut TokenStaticData> { + self.0.get_mut(id.0) + } + pub fn get(&self, id: TokenId) -> Option<&TokenStaticData> { self.0.get(id.0) } - - pub fn iter(self) -> impl Iterator { - self.0.into_iter().enumerate().map(|(i, x)| { - (TokenId(i), x) - }) + + pub fn iter(self) -> impl Iterator { + self.0.into_iter().enumerate().map(|(i, x)| (TokenId(i), x)) } } @@ -84,26 +95,15 @@ impl StaticIndex<'_> { }); let hover_config = HoverConfig { links_in_hover: true, documentation: Some(HoverDocFormat::Markdown) }; - let tokens = tokens - .filter(|token| match token.kind() { - IDENT - | INT_NUMBER - | LIFETIME_IDENT - | T![self] - | T![super] - | T![crate] => true, - _ => false, - }); - let mut result = StaticIndexedFile { - file_id, - folds, - tokens: vec![], - }; + let tokens = tokens.filter(|token| match token.kind() { + IDENT | INT_NUMBER | LIFETIME_IDENT | T![self] | T![super] | T![crate] => true, + _ => false, + }); + let mut result = StaticIndexedFile { file_id, folds, tokens: vec![] }; for token in tokens { let range = token.text_range(); let node = token.parent().unwrap(); - let def = get_definition_of_token(self.db, &sema, &sema.descend_into_macros(token), file_id, range.start(), &mut None); - let def = if let Some(x) = def { + let def = if let Some(x) = get_definition(&sema, token.clone()) { x } else { continue; @@ -112,18 +112,34 @@ impl StaticIndex<'_> { *x } else { let x = self.tokens.insert(TokenStaticData { - hover: hover_for_definition(self.db, file_id, &sema, def, node, &hover_config), + hover: hover_for_definition(&sema, file_id, def, &node, &hover_config), + definition: def + .try_to_nav(self.db) + .map(|x| FileRange { file_id: x.file_id, range: x.focus_or_full_range() }), + references: vec![], }); self.def_map.insert(def, x); x }; + let token = self.tokens.get_mut(id).unwrap(); + token.references.push(ReferenceData { + range: FileRange { range, file_id }, + is_definition: if let Some(x) = def.try_to_nav(self.db) { + x.file_id == file_id && x.focus_or_full_range() == range + } else { + false + }, + }); result.tokens.push((range, id)); } self.files.push(result); Ok(()) } - - pub fn compute<'a>(db: &'a RootDatabase, analysis: &'a Analysis) -> Cancellable> { + + pub fn compute<'a>( + db: &'a RootDatabase, + analysis: &'a Analysis, + ) -> Cancellable> { let work = all_modules(db).into_iter().filter(|module| { let file_id = module.definition_source(db).file_id.original_file(db); let source_root = db.file_source_root(file_id); @@ -133,7 +149,8 @@ impl StaticIndex<'_> { let mut this = StaticIndex { files: vec![], tokens: Default::default(), - analysis, db, + analysis, + db, def_map: Default::default(), }; let mut visited_files = FxHashSet::default(); @@ -150,3 +167,15 @@ impl StaticIndex<'_> { Ok(this) } } + +fn get_definition(sema: &Semantics, token: SyntaxToken) -> Option { + for token in sema.descend_into_macros_many(token) { + let def = Definition::from_token(&sema, &token); + if let [x] = def.as_slice() { + return Some(*x); + } else { + continue; + }; + } + None +} diff --git a/crates/rust-analyzer/src/cli/lsif.rs b/crates/rust-analyzer/src/cli/lsif.rs index 509842516a..f7be8374ca 100644 --- a/crates/rust-analyzer/src/cli/lsif.rs +++ b/crates/rust-analyzer/src/cli/lsif.rs @@ -4,7 +4,10 @@ use std::collections::HashMap; use std::env; use std::time::Instant; -use ide::{Analysis, Cancellable, RootDatabase, StaticIndex, StaticIndexedFile, TokenId, TokenStaticData}; +use ide::{ + Analysis, Cancellable, FileId, FileRange, RootDatabase, StaticIndex, StaticIndexedFile, + TokenId, TokenStaticData, +}; use ide_db::LineIndexDatabase; use ide_db::base_db::salsa::{self, ParallelDatabase}; @@ -31,6 +34,8 @@ impl Clone for Snap> { struct LsifManager<'a> { count: i32, token_map: HashMap, + range_map: HashMap, + file_map: HashMap, analysis: &'a Analysis, db: &'a RootDatabase, vfs: &'a Vfs, @@ -50,12 +55,14 @@ impl LsifManager<'_> { LsifManager { count: 0, token_map: HashMap::default(), + range_map: HashMap::default(), + file_map: HashMap::default(), analysis, db, vfs, } } - + fn add(&mut self, data: Element) -> Id { let id = Id(self.count); self.emit(&serde_json::to_string(&Entry { id: id.into(), data }).unwrap()); @@ -68,9 +75,54 @@ impl LsifManager<'_> { println!("{}", data); } - fn add_token(&mut self, id: TokenId, token: TokenStaticData) { + fn get_token_id(&mut self, id: TokenId) -> Id { + if let Some(x) = self.token_map.get(&id) { + return *x; + } let result_set_id = self.add(Element::Vertex(Vertex::ResultSet(ResultSet { key: None }))); self.token_map.insert(id, result_set_id); + result_set_id + } + + fn get_range_id(&mut self, id: FileRange) -> Cancellable { + if let Some(x) = self.range_map.get(&id) { + return Ok(*x); + } + let file_id = id.file_id; + let doc_id = self.get_file_id(file_id); + let line_index = self.db.line_index(file_id); + let line_index = LineIndex { + index: line_index.clone(), + encoding: OffsetEncoding::Utf16, + endings: LineEndings::Unix, + }; + let range_id = self.add(Element::Vertex(Vertex::Range { + range: to_proto::range(&line_index, id.range), + tag: None, + })); + self.add(Element::Edge(Edge::Contains(EdgeDataMultiIn { + in_vs: vec![range_id.into()], + out_v: doc_id.into(), + }))); + Ok(range_id) + } + + fn get_file_id(&mut self, id: FileId) -> Id { + if let Some(x) = self.file_map.get(&id) { + return *x; + } + let path = self.vfs.file_path(id); + let path = path.as_path().unwrap(); + let doc_id = self.add(Element::Vertex(Vertex::Document(Document { + language_id: "rust".to_string(), + uri: lsp_types::Url::from_file_path(path).unwrap(), + }))); + self.file_map.insert(id, doc_id); + doc_id + } + + fn add_token(&mut self, id: TokenId, token: TokenStaticData) -> Cancellable<()> { + let result_set_id = self.get_token_id(id); if let Some(hover) = token.hover { let hover_id = self.add(Element::Vertex(Vertex::HoverResult { result: Hover { @@ -83,16 +135,50 @@ impl LsifManager<'_> { out_v: result_set_id.into(), }))); } + if let Some(def) = token.definition { + let result_id = self.add(Element::Vertex(Vertex::DefinitionResult)); + let def_vertex = self.get_range_id(def)?; + self.add(Element::Edge(Edge::Item(Item { + document: (*self.file_map.get(&def.file_id).unwrap()).into(), + property: None, + edge_data: EdgeDataMultiIn { + in_vs: vec![def_vertex.into()], + out_v: result_id.into(), + }, + }))); + self.add(Element::Edge(Edge::Definition(EdgeData { + in_v: result_id.into(), + out_v: result_set_id.into(), + }))); + } + if !token.references.is_empty() { + let result_id = self.add(Element::Vertex(Vertex::ReferenceResult)); + self.add(Element::Edge(Edge::References(EdgeData { + in_v: result_id.into(), + out_v: result_set_id.into(), + }))); + for x in token.references { + let vertex = *self.range_map.get(&x.range).unwrap(); + self.add(Element::Edge(Edge::Item(Item { + document: (*self.file_map.get(&x.range.file_id).unwrap()).into(), + property: Some(if x.is_definition { + ItemKind::Definitions + } else { + ItemKind::References + }), + edge_data: EdgeDataMultiIn { + in_vs: vec![vertex.into()], + out_v: result_id.into(), + }, + }))); + } + } + Ok(()) } fn add_file(&mut self, file: StaticIndexedFile) -> Cancellable<()> { - let StaticIndexedFile { file_id, tokens, folds} = file; - let path = self.vfs.file_path(file_id); - let path = path.as_path().unwrap(); - let doc_id = self.add(Element::Vertex(Vertex::Document(Document { - language_id: "rust".to_string(), - uri: lsp_types::Url::from_file_path(path).unwrap(), - }))); + let StaticIndexedFile { file_id, tokens, folds } = file; + let doc_id = self.get_file_id(file_id); let text = self.analysis.file_text(file_id)?; let line_index = self.db.line_index(file_id); let line_index = LineIndex { @@ -116,7 +202,8 @@ impl LsifManager<'_> { range: to_proto::range(&line_index, range), tag: None, })); - let result_set_id = *self.token_map.get(&id).expect("token map doesn't contain id"); + self.range_map.insert(FileRange { file_id, range }, range_id); + let result_set_id = self.get_token_id(id); self.add(Element::Edge(Edge::Next(EdgeData { in_v: result_set_id.into(), out_v: range_id.into(), @@ -161,12 +248,12 @@ impl flags::Lsif { position_encoding: Encoding::Utf16, tool_info: None, }))); - for (id, token) in si.tokens.iter() { - lsif.add_token(id, token); - } for file in si.files { lsif.add_file(file)?; } + for (id, token) in si.tokens.iter() { + lsif.add_token(id, token)?; + } eprintln!("Generating LSIF finished in {:?}", now.elapsed()); Ok(()) }