10423: Internal: refactor for mdbook plugin r=Veykril a=HKalbasi

This PR is for upstreaming changes that I made for mdbook plugin. Changes are adding inlay hints to `StaticIndex` and changing some functions for working around privacy of crates.

Aside this, is it okay if I bring the plugin in tree? It is a simple binary crate. I feel it will better maintained here and become resistant to api changes.

Co-authored-by: hamidreza kalbasi <hamidrezakalbasi@protonmail.com>
This commit is contained in:
bors[bot] 2021-10-12 18:35:03 +00:00 committed by GitHub
commit f185d1c533
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
3 changed files with 41 additions and 9 deletions

View file

@ -14,11 +14,14 @@ use syntax::{SyntaxToken, TextRange};
use crate::display::TryToNav; use crate::display::TryToNav;
use crate::hover::hover_for_definition; use crate::hover::hover_for_definition;
use crate::{Analysis, Fold, HoverConfig, HoverDocFormat, HoverResult}; use crate::{
Analysis, Fold, HoverConfig, HoverDocFormat, HoverResult, InlayHint, InlayHintsConfig,
};
/// A static representation of fully analyzed source code. /// A static representation of fully analyzed source code.
/// ///
/// The intended use-case is powering read-only code browsers and emitting LSIF /// The intended use-case is powering read-only code browsers and emitting LSIF
#[derive(Debug)]
pub struct StaticIndex<'a> { pub struct StaticIndex<'a> {
pub files: Vec<StaticIndexedFile>, pub files: Vec<StaticIndexedFile>,
pub tokens: TokenStore, pub tokens: TokenStore,
@ -27,21 +30,29 @@ pub struct StaticIndex<'a> {
def_map: HashMap<Definition, TokenId>, def_map: HashMap<Definition, TokenId>,
} }
#[derive(Debug)]
pub struct ReferenceData { pub struct ReferenceData {
pub range: FileRange, pub range: FileRange,
pub is_definition: bool, pub is_definition: bool,
} }
#[derive(Debug)]
pub struct TokenStaticData { pub struct TokenStaticData {
pub hover: Option<HoverResult>, pub hover: Option<HoverResult>,
pub definition: Option<FileRange>, pub definition: Option<FileRange>,
pub references: Vec<ReferenceData>, pub references: Vec<ReferenceData>,
} }
#[derive(Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct TokenId(usize); pub struct TokenId(usize);
#[derive(Default)] impl TokenId {
pub fn raw(self) -> usize {
self.0
}
}
#[derive(Default, Debug)]
pub struct TokenStore(Vec<TokenStaticData>); pub struct TokenStore(Vec<TokenStaticData>);
impl TokenStore { impl TokenStore {
@ -64,9 +75,11 @@ impl TokenStore {
} }
} }
#[derive(Debug)]
pub struct StaticIndexedFile { pub struct StaticIndexedFile {
pub file_id: FileId, pub file_id: FileId,
pub folds: Vec<Fold>, pub folds: Vec<Fold>,
pub inlay_hints: Vec<InlayHint>,
pub tokens: Vec<(TextRange, TokenId)>, pub tokens: Vec<(TextRange, TokenId)>,
} }
@ -86,6 +99,18 @@ fn all_modules(db: &dyn HirDatabase) -> Vec<Module> {
impl StaticIndex<'_> { impl StaticIndex<'_> {
fn add_file(&mut self, file_id: FileId) { fn add_file(&mut self, file_id: FileId) {
let folds = self.analysis.folding_ranges(file_id).unwrap(); let folds = self.analysis.folding_ranges(file_id).unwrap();
let inlay_hints = self
.analysis
.inlay_hints(
&InlayHintsConfig {
type_hints: true,
parameter_hints: true,
chaining_hints: true,
max_length: Some(25),
},
file_id,
)
.unwrap();
// hovers // hovers
let sema = hir::Semantics::new(self.db); let sema = hir::Semantics::new(self.db);
let tokens_or_nodes = sema.parse(file_id).syntax().clone(); let tokens_or_nodes = sema.parse(file_id).syntax().clone();
@ -99,7 +124,7 @@ impl StaticIndex<'_> {
IDENT | INT_NUMBER | LIFETIME_IDENT | T![self] | T![super] | T![crate] => true, IDENT | INT_NUMBER | LIFETIME_IDENT | T![self] | T![super] | T![crate] => true,
_ => false, _ => false,
}); });
let mut result = StaticIndexedFile { file_id, folds, tokens: vec![] }; let mut result = StaticIndexedFile { file_id, inlay_hints, folds, tokens: vec![] };
for token in tokens { for token in tokens {
let range = token.text_range(); let range = token.text_range();
let node = token.parent().unwrap(); let node = token.parent().unwrap();
@ -133,7 +158,8 @@ impl StaticIndex<'_> {
self.files.push(result); self.files.push(result);
} }
pub fn compute<'a>(db: &'a RootDatabase, analysis: &'a Analysis) -> StaticIndex<'a> { pub fn compute<'a>(analysis: &'a Analysis) -> StaticIndex<'a> {
let db = &*analysis.db;
let work = all_modules(db).into_iter().filter(|module| { let work = all_modules(db).into_iter().filter(|module| {
let file_id = module.definition_source(db).file_id.original_file(db); let file_id = module.definition_source(db).file_id.original_file(db);
let source_root = db.file_source_root(file_id); let source_root = db.file_source_root(file_id);
@ -181,7 +207,7 @@ mod tests {
fn check_all_ranges(ra_fixture: &str) { fn check_all_ranges(ra_fixture: &str) {
let (analysis, ranges) = fixture::annotations_without_marker(ra_fixture); let (analysis, ranges) = fixture::annotations_without_marker(ra_fixture);
let s = StaticIndex::compute(&*analysis.db, &analysis); let s = StaticIndex::compute(&analysis);
let mut range_set: HashSet<_> = ranges.iter().map(|x| x.0).collect(); let mut range_set: HashSet<_> = ranges.iter().map(|x| x.0).collect();
for f in s.files { for f in s.files {
for (range, _) in f.tokens { for (range, _) in f.tokens {
@ -199,7 +225,7 @@ mod tests {
fn check_definitions(ra_fixture: &str) { fn check_definitions(ra_fixture: &str) {
let (analysis, ranges) = fixture::annotations_without_marker(ra_fixture); let (analysis, ranges) = fixture::annotations_without_marker(ra_fixture);
let s = StaticIndex::compute(&*analysis.db, &analysis); let s = StaticIndex::compute(&analysis);
let mut range_set: HashSet<_> = ranges.iter().map(|x| x.0).collect(); let mut range_set: HashSet<_> = ranges.iter().map(|x| x.0).collect();
for (_, t) in s.tokens.iter() { for (_, t) in s.tokens.iter() {
if let Some(x) = t.definition { if let Some(x) = t.definition {

View file

@ -186,7 +186,7 @@ impl LsifManager<'_> {
} }
fn add_file(&mut self, file: StaticIndexedFile) { fn add_file(&mut self, file: StaticIndexedFile) {
let StaticIndexedFile { file_id, tokens, folds } = file; let StaticIndexedFile { file_id, tokens, folds, .. } = file;
let doc_id = self.get_file_id(file_id); let doc_id = self.get_file_id(file_id);
let text = self.analysis.file_text(file_id).unwrap(); let text = self.analysis.file_text(file_id).unwrap();
let line_index = self.db.line_index(file_id); let line_index = self.db.line_index(file_id);
@ -247,7 +247,7 @@ impl flags::Lsif {
let db = host.raw_database(); let db = host.raw_database();
let analysis = host.analysis(); let analysis = host.analysis();
let si = StaticIndex::compute(db, &analysis); let si = StaticIndex::compute(&analysis);
let mut lsif = LsifManager::new(&analysis, db, &vfs); let mut lsif = LsifManager::new(&analysis, db, &vfs);
lsif.add_vertex(lsif::Vertex::MetaData(lsif::MetaData { lsif.add_vertex(lsif::Vertex::MetaData(lsif::MetaData {

View file

@ -25,6 +25,12 @@ impl VfsPath {
VfsPath(VfsPathRepr::VirtualPath(VirtualPath(path))) VfsPath(VfsPathRepr::VirtualPath(VirtualPath(path)))
} }
/// Create a path from string. Input should be a string representation of
/// an absolute path inside filesystem
pub fn new_real_path(path: String) -> VfsPath {
VfsPath::from(AbsPathBuf::assert(path.into()))
}
/// Returns the `AbsPath` representation of `self` if `self` is on the file system. /// Returns the `AbsPath` representation of `self` if `self` is on the file system.
pub fn as_path(&self) -> Option<&AbsPath> { pub fn as_path(&self) -> Option<&AbsPath> {
match &self.0 { match &self.0 {