247: Hir r=matklad a=matklad

This doesn't achive anything new, just a big refactoring. 

The main change is that Descriptors are now called `hir`, and live in a separate crate.

Co-authored-by: Aleksey Kladov <aleksey.kladov@gmail.com>
This commit is contained in:
bors[bot] 2018-11-28 01:10:58 +00:00
commit 95c0c8f398
30 changed files with 1250 additions and 1061 deletions

34
Cargo.lock generated
View file

@ -604,10 +604,11 @@ name = "ra_analysis"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"fst 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "fst 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
"id-arena 1.0.2 (git+https://github.com/fitzgen/id-arena/?rev=43ecd67)",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)",
"ra_db 0.1.0",
"ra_editor 0.1.0", "ra_editor 0.1.0",
"ra_hir 0.1.0",
"ra_syntax 0.1.0", "ra_syntax 0.1.0",
"rayon 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)", "rayon 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
"relative-path 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "relative-path 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -628,6 +629,21 @@ dependencies = [
"tools 0.1.0", "tools 0.1.0",
] ]
[[package]]
name = "ra_db"
version = "0.1.0"
dependencies = [
"id-arena 1.0.2 (git+https://github.com/fitzgen/id-arena/?rev=43ecd67)",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)",
"ra_editor 0.1.0",
"ra_syntax 0.1.0",
"relative-path 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"salsa 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
"test_utils 0.1.0",
]
[[package]] [[package]]
name = "ra_editor" name = "ra_editor"
version = "0.1.0" version = "0.1.0"
@ -640,6 +656,22 @@ dependencies = [
"test_utils 0.1.0", "test_utils 0.1.0",
] ]
[[package]]
name = "ra_hir"
version = "0.1.0"
dependencies = [
"id-arena 1.0.2 (git+https://github.com/fitzgen/id-arena/?rev=43ecd67)",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)",
"ra_db 0.1.0",
"ra_editor 0.1.0",
"ra_syntax 0.1.0",
"relative-path 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"salsa 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
"test_utils 0.1.0",
]
[[package]] [[package]]
name = "ra_lsp_server" name = "ra_lsp_server"
version = "0.1.0" version = "0.1.0"

View file

@ -12,7 +12,8 @@ fst = "0.3.1"
salsa = "0.8.0" salsa = "0.8.0"
rustc-hash = "1.0" rustc-hash = "1.0"
parking_lot = "0.6.4" parking_lot = "0.6.4"
id-arena = { git = "https://github.com/fitzgen/id-arena/", rev = "43ecd67" }
ra_syntax = { path = "../ra_syntax" } ra_syntax = { path = "../ra_syntax" }
ra_editor = { path = "../ra_editor" } ra_editor = { path = "../ra_editor" }
ra_db = { path = "../ra_db" }
hir = { path = "../ra_hir", package = "ra_hir" }
test_utils = { path = "../test_utils" } test_utils = { path = "../test_utils" }

View file

@ -7,13 +7,11 @@ use ra_syntax::{
AstNode, AtomEdit, AstNode, AtomEdit,
SyntaxNodeRef, SyntaxNodeRef,
}; };
use ra_db::SyntaxDatabase;
use rustc_hash::{FxHashMap}; use rustc_hash::{FxHashMap};
use crate::{ use crate::{
db::{self, SyntaxDatabase}, db,
descriptors::{
module::{ModuleDescriptor}
},
Cancelable, FilePosition Cancelable, FilePosition
}; };
@ -31,14 +29,14 @@ pub(crate) fn completions(
db: &db::RootDatabase, db: &db::RootDatabase,
position: FilePosition, position: FilePosition,
) -> Cancelable<Option<Vec<CompletionItem>>> { ) -> Cancelable<Option<Vec<CompletionItem>>> {
let original_file = db.file_syntax(position.file_id); let original_file = db.source_file(position.file_id);
// Insert a fake ident to get a valid parse tree // Insert a fake ident to get a valid parse tree
let file = { let file = {
let edit = AtomEdit::insert(position.offset, "intellijRulezz".to_string()); let edit = AtomEdit::insert(position.offset, "intellijRulezz".to_string());
original_file.reparse(&edit) original_file.reparse(&edit)
}; };
let module = ctry!(ModuleDescriptor::guess_from_position(db, position)?); let module = ctry!(hir::Module::guess_from_position(db, position)?);
let mut res = Vec::new(); let mut res = Vec::new();
let mut has_completions = false; let mut has_completions = false;

View file

@ -6,23 +6,23 @@ use ra_syntax::{
ast::{self, LoopBodyOwner}, ast::{self, LoopBodyOwner},
SyntaxKind::*, SyntaxKind::*,
}; };
use hir::{
self,
FnScopes,
Def,
Path,
};
use crate::{ use crate::{
db::RootDatabase, db::RootDatabase,
completion::CompletionItem, completion::CompletionItem,
descriptors::{
module::{ModuleDescriptor},
function::FnScopes,
Def,
Path,
},
Cancelable Cancelable
}; };
pub(super) fn completions( pub(super) fn completions(
acc: &mut Vec<CompletionItem>, acc: &mut Vec<CompletionItem>,
db: &RootDatabase, db: &RootDatabase,
module: &ModuleDescriptor, module: &hir::Module,
file: &SourceFileNode, file: &SourceFileNode,
name_ref: ast::NameRef, name_ref: ast::NameRef,
) -> Cancelable<()> { ) -> Cancelable<()> {
@ -150,7 +150,7 @@ fn complete_fn(name_ref: ast::NameRef, scopes: &FnScopes, acc: &mut Vec<Completi
fn complete_path( fn complete_path(
acc: &mut Vec<CompletionItem>, acc: &mut Vec<CompletionItem>,
db: &RootDatabase, db: &RootDatabase,
module: &ModuleDescriptor, module: &hir::Module,
mut path: Path, mut path: Path,
) -> Cancelable<()> { ) -> Cancelable<()> {
if path.segments.is_empty() { if path.segments.is_empty() {

View file

@ -1,17 +1,12 @@
use std::sync::Arc; use std::sync::Arc;
#[cfg(test)] #[cfg(test)]
use parking_lot::Mutex; use parking_lot::Mutex;
use ra_editor::LineIndex;
use ra_syntax::{SourceFileNode, SyntaxNode};
use salsa::{self, Database}; use salsa::{self, Database};
use ra_db::{LocationIntener, BaseDatabase};
use hir::{self, DefId, DefLoc, FnId, SourceItemId};
use crate::{ use crate::{
db, symbol_index,
descriptors,
symbol_index::SymbolIndex,
syntax_ptr::SyntaxPtr,
loc2id::{IdMaps, IdDatabase},
Cancelable, Canceled, FileId,
}; };
#[derive(Debug)] #[derive(Debug)]
@ -22,7 +17,13 @@ pub(crate) struct RootDatabase {
events: (), events: (),
runtime: salsa::Runtime<RootDatabase>, runtime: salsa::Runtime<RootDatabase>,
id_maps: IdMaps, id_maps: Arc<IdMaps>,
}
#[derive(Debug, Default)]
struct IdMaps {
fns: LocationIntener<SourceItemId, FnId>,
defs: LocationIntener<DefLoc, DefId>,
} }
impl salsa::Database for RootDatabase { impl salsa::Database for RootDatabase {
@ -47,26 +48,18 @@ impl Default for RootDatabase {
let mut db = RootDatabase { let mut db = RootDatabase {
events: Default::default(), events: Default::default(),
runtime: salsa::Runtime::default(), runtime: salsa::Runtime::default(),
id_maps: IdMaps::default(), id_maps: Default::default(),
}; };
db.query_mut(crate::input::SourceRootQuery) db.query_mut(ra_db::SourceRootQuery)
.set(crate::input::WORKSPACE, Default::default()); .set(ra_db::WORKSPACE, Default::default());
db.query_mut(crate::input::CrateGraphQuery) db.query_mut(ra_db::CrateGraphQuery)
.set((), Default::default()); .set((), Default::default());
db.query_mut(crate::input::LibrariesQuery) db.query_mut(ra_db::LibrariesQuery)
.set((), Default::default()); .set((), Default::default());
db db
} }
} }
pub(crate) fn check_canceled(db: &impl salsa::Database) -> Cancelable<()> {
if db.salsa_runtime().is_current_revision_canceled() {
Err(Canceled)
} else {
Ok(())
}
}
impl salsa::ParallelDatabase for RootDatabase { impl salsa::ParallelDatabase for RootDatabase {
fn snapshot(&self) -> salsa::Snapshot<RootDatabase> { fn snapshot(&self) -> salsa::Snapshot<RootDatabase> {
salsa::Snapshot::new(RootDatabase { salsa::Snapshot::new(RootDatabase {
@ -77,9 +70,17 @@ impl salsa::ParallelDatabase for RootDatabase {
} }
} }
impl IdDatabase for RootDatabase { impl BaseDatabase for RootDatabase {}
fn id_maps(&self) -> &IdMaps {
&self.id_maps impl AsRef<LocationIntener<DefLoc, DefId>> for RootDatabase {
fn as_ref(&self) -> &LocationIntener<DefLoc, DefId> {
&self.id_maps.defs
}
}
impl AsRef<LocationIntener<hir::SourceItemId, FnId>> for RootDatabase {
fn as_ref(&self) -> &LocationIntener<hir::SourceItemId, FnId> {
&self.id_maps.fns
} }
} }
@ -108,63 +109,30 @@ impl RootDatabase {
salsa::database_storage! { salsa::database_storage! {
pub(crate) struct RootDatabaseStorage for RootDatabase { pub(crate) struct RootDatabaseStorage for RootDatabase {
impl crate::input::FilesDatabase { impl ra_db::FilesDatabase {
fn file_text() for crate::input::FileTextQuery; fn file_text() for ra_db::FileTextQuery;
fn file_source_root() for crate::input::FileSourceRootQuery; fn file_source_root() for ra_db::FileSourceRootQuery;
fn source_root() for crate::input::SourceRootQuery; fn source_root() for ra_db::SourceRootQuery;
fn libraries() for crate::input::LibrariesQuery; fn libraries() for ra_db::LibrariesQuery;
fn library_symbols() for crate::input::LibrarySymbolsQuery; fn crate_graph() for ra_db::CrateGraphQuery;
fn crate_graph() for crate::input::CrateGraphQuery;
} }
impl SyntaxDatabase { impl ra_db::SyntaxDatabase {
fn file_syntax() for FileSyntaxQuery; fn source_file() for ra_db::SourceFileQuery;
fn file_lines() for FileLinesQuery; fn file_lines() for ra_db::FileLinesQuery;
fn file_symbols() for FileSymbolsQuery;
fn resolve_syntax_ptr() for ResolveSyntaxPtrQuery;
} }
impl descriptors::DescriptorDatabase { impl symbol_index::SymbolsDatabase {
fn module_tree() for descriptors::ModuleTreeQuery; fn file_symbols() for symbol_index::FileSymbolsQuery;
fn fn_scopes() for descriptors::FnScopesQuery; fn library_symbols() for symbol_index::LibrarySymbolsQuery;
fn _file_items() for descriptors::FileItemsQuery; }
fn _file_item() for descriptors::FileItemQuery; impl hir::db::HirDatabase {
fn _input_module_items() for descriptors::InputModuleItemsQuery; fn module_tree() for hir::db::ModuleTreeQuery;
fn _item_map() for descriptors::ItemMapQuery; fn fn_scopes() for hir::db::FnScopesQuery;
fn _fn_syntax() for descriptors::FnSyntaxQuery; fn file_items() for hir::db::SourceFileItemsQuery;
fn _submodules() for descriptors::SubmodulesQuery; fn file_item() for hir::db::FileItemQuery;
fn input_module_items() for hir::db::InputModuleItemsQuery;
fn item_map() for hir::db::ItemMapQuery;
fn fn_syntax() for hir::db::FnSyntaxQuery;
fn submodules() for hir::db::SubmodulesQuery;
} }
} }
} }
salsa::query_group! {
pub(crate) trait SyntaxDatabase: crate::input::FilesDatabase {
fn file_syntax(file_id: FileId) -> SourceFileNode {
type FileSyntaxQuery;
}
fn file_lines(file_id: FileId) -> Arc<LineIndex> {
type FileLinesQuery;
}
fn file_symbols(file_id: FileId) -> Cancelable<Arc<SymbolIndex>> {
type FileSymbolsQuery;
}
fn resolve_syntax_ptr(ptr: SyntaxPtr) -> SyntaxNode {
type ResolveSyntaxPtrQuery;
// Don't retain syntax trees in memory
storage dependencies;
use fn crate::syntax_ptr::resolve_syntax_ptr;
}
}
}
fn file_syntax(db: &impl SyntaxDatabase, file_id: FileId) -> SourceFileNode {
let text = db.file_text(file_id);
SourceFileNode::parse(&*text)
}
fn file_lines(db: &impl SyntaxDatabase, file_id: FileId) -> Arc<LineIndex> {
let text = db.file_text(file_id);
Arc::new(LineIndex::new(&*text))
}
fn file_symbols(db: &impl SyntaxDatabase, file_id: FileId) -> Cancelable<Arc<SymbolIndex>> {
db::check_canceled(db)?;
let syntax = db.file_syntax(file_id);
Ok(Arc::new(SymbolIndex::for_file(file_id, syntax)))
}

View file

@ -1,21 +0,0 @@
use std::sync::Arc;
use ra_syntax::ast::{AstNode, FnDef, FnDefNode};
use crate::descriptors::{
function::{FnId, FnScopes},
DescriptorDatabase,
};
/// Resolve `FnId` to the corresponding `SyntaxNode`
pub(crate) fn fn_syntax(db: &impl DescriptorDatabase, fn_id: FnId) -> FnDefNode {
let ptr = db.id_maps().fn_ptr(fn_id);
let syntax = db.resolve_syntax_ptr(ptr);
FnDef::cast(syntax.borrowed()).unwrap().owned()
}
pub(crate) fn fn_scopes(db: &impl DescriptorDatabase, fn_id: FnId) -> Arc<FnScopes> {
let syntax = db._fn_syntax(fn_id);
let res = FnScopes::new(syntax.borrowed());
Arc::new(res)
}

View file

@ -1,137 +0,0 @@
pub(crate) mod function;
pub(crate) mod module;
mod path;
use std::sync::Arc;
use ra_syntax::{
ast::{self, FnDefNode, AstNode},
TextRange, SyntaxNode,
};
use crate::{
FileId,
db::SyntaxDatabase,
descriptors::function::{resolve_local_name, FnId, FnScopes},
descriptors::module::{
ModuleId, ModuleTree, ModuleSource, ModuleDescriptor,
nameres::{ItemMap, InputModuleItems, FileItems}
},
input::SourceRootId,
loc2id::{IdDatabase, DefId, DefLoc},
syntax_ptr::LocalSyntaxPtr,
Cancelable,
};
pub(crate) use self::path::{Path, PathKind};
pub(crate) use self::module::nameres::FileItemId;
salsa::query_group! {
pub(crate) trait DescriptorDatabase: SyntaxDatabase + IdDatabase {
fn fn_scopes(fn_id: FnId) -> Arc<FnScopes> {
type FnScopesQuery;
use fn function::imp::fn_scopes;
}
fn _file_items(file_id: FileId) -> Arc<FileItems> {
type FileItemsQuery;
storage dependencies;
use fn module::nameres::file_items;
}
fn _file_item(file_id: FileId, file_item_id: FileItemId) -> SyntaxNode {
type FileItemQuery;
storage dependencies;
use fn module::nameres::file_item;
}
fn _input_module_items(source_root_id: SourceRootId, module_id: ModuleId) -> Cancelable<Arc<InputModuleItems>> {
type InputModuleItemsQuery;
use fn module::nameres::input_module_items;
}
fn _item_map(source_root_id: SourceRootId) -> Cancelable<Arc<ItemMap>> {
type ItemMapQuery;
use fn module::nameres::item_map;
}
fn _module_tree(source_root_id: SourceRootId) -> Cancelable<Arc<ModuleTree>> {
type ModuleTreeQuery;
use fn module::imp::module_tree;
}
fn _fn_syntax(fn_id: FnId) -> FnDefNode {
type FnSyntaxQuery;
// Don't retain syntax trees in memory
storage dependencies;
use fn function::imp::fn_syntax;
}
fn _submodules(source: ModuleSource) -> Cancelable<Arc<Vec<module::imp::Submodule>>> {
type SubmodulesQuery;
use fn module::imp::submodules;
}
}
}
pub(crate) enum Def {
Module(ModuleDescriptor),
Item,
}
impl DefId {
pub(crate) fn resolve(self, db: &impl DescriptorDatabase) -> Cancelable<Def> {
let loc = db.id_maps().def_loc(self);
let res = match loc {
DefLoc::Module { id, source_root } => {
let descr = ModuleDescriptor::new(db, source_root, id)?;
Def::Module(descr)
}
DefLoc::Item { .. } => Def::Item,
};
Ok(res)
}
}
#[derive(Debug)]
pub struct ReferenceDescriptor {
pub range: TextRange,
pub name: String,
}
#[derive(Debug)]
pub struct DeclarationDescriptor<'a> {
pat: ast::BindPat<'a>,
pub range: TextRange,
}
impl<'a> DeclarationDescriptor<'a> {
pub fn new(pat: ast::BindPat) -> DeclarationDescriptor {
let range = pat.syntax().range();
DeclarationDescriptor { pat, range }
}
pub fn find_all_refs(&self) -> Vec<ReferenceDescriptor> {
let name_ptr = LocalSyntaxPtr::new(self.pat.syntax());
let fn_def = match self.pat.syntax().ancestors().find_map(ast::FnDef::cast) {
Some(def) => def,
None => return Default::default(),
};
let fn_scopes = FnScopes::new(fn_def);
let refs: Vec<_> = fn_def
.syntax()
.descendants()
.filter_map(ast::NameRef::cast)
.filter(|name_ref| match resolve_local_name(*name_ref, &fn_scopes) {
None => false,
Some(entry) => entry.ptr() == name_ptr,
})
.map(|name_ref| ReferenceDescriptor {
name: name_ref.syntax().text().to_string(),
range: name_ref.syntax().range(),
})
.collect();
refs
}
}

View file

@ -1,94 +1,33 @@
use std::{ use std::{
fmt, fmt,
hash::{Hash, Hasher},
sync::Arc, sync::Arc,
}; };
use ra_editor::{self, find_node_at_offset, FileSymbol, LineIndex, LocalEdit}; use ra_editor::{self, find_node_at_offset, FileSymbol, LineIndex, LocalEdit};
use ra_syntax::{ use ra_syntax::{
ast::{self, ArgListOwner, Expr, NameOwner}, ast::{self, ArgListOwner, Expr, NameOwner},
AstNode, SourceFileNode, SmolStr, AstNode, SourceFileNode,
SyntaxKind::*, SyntaxKind::*,
SyntaxNodeRef, TextRange, TextUnit, SyntaxNodeRef, TextRange, TextUnit,
}; };
use ra_db::{FilesDatabase, SourceRoot, SourceRootId, WORKSPACE, SyntaxDatabase, SourceFileQuery};
use rayon::prelude::*; use rayon::prelude::*;
use relative_path::RelativePath;
use rustc_hash::FxHashSet; use rustc_hash::FxHashSet;
use salsa::{Database, ParallelDatabase}; use salsa::{Database, ParallelDatabase};
use hir::{
self,
FnSignatureInfo,
Problem,
};
use crate::{ use crate::{
completion::{completions, CompletionItem}, completion::{completions, CompletionItem},
db::{self, FileSyntaxQuery, SyntaxDatabase}, db,
descriptors::{ symbol_index::{SymbolIndex, SymbolsDatabase},
function::{FnDescriptor, FnId}, AnalysisChange, Cancelable, CrateId, Diagnostic, FileId,
module::{ModuleDescriptor, Problem},
DeclarationDescriptor, DescriptorDatabase,
},
input::{FilesDatabase, SourceRoot, SourceRootId, WORKSPACE},
symbol_index::SymbolIndex,
AnalysisChange, Cancelable, CrateGraph, CrateId, Diagnostic, FileId, FileResolver,
FileSystemEdit, FilePosition, Query, SourceChange, SourceFileNodeEdit, FileSystemEdit, FilePosition, Query, SourceChange, SourceFileNodeEdit,
}; };
#[derive(Clone, Debug)]
pub(crate) struct FileResolverImp {
inner: Arc<FileResolver>,
}
impl PartialEq for FileResolverImp {
fn eq(&self, other: &FileResolverImp) -> bool {
self.inner() == other.inner()
}
}
impl Eq for FileResolverImp {}
impl Hash for FileResolverImp {
fn hash<H: Hasher>(&self, hasher: &mut H) {
self.inner().hash(hasher);
}
}
impl FileResolverImp {
pub(crate) fn new(inner: Arc<FileResolver>) -> FileResolverImp {
FileResolverImp { inner }
}
pub(crate) fn file_stem(&self, file_id: FileId) -> String {
self.inner.file_stem(file_id)
}
pub(crate) fn resolve(&self, file_id: FileId, path: &RelativePath) -> Option<FileId> {
self.inner.resolve(file_id, path)
}
pub(crate) fn debug_path(&self, file_id: FileId) -> Option<std::path::PathBuf> {
self.inner.debug_path(file_id)
}
fn inner(&self) -> *const FileResolver {
&*self.inner
}
}
impl Default for FileResolverImp {
fn default() -> FileResolverImp {
#[derive(Debug)]
struct DummyResolver;
impl FileResolver for DummyResolver {
fn file_stem(&self, _file_: FileId) -> String {
panic!("file resolver not set")
}
fn resolve(
&self,
_file_id: FileId,
_path: &::relative_path::RelativePath,
) -> Option<FileId> {
panic!("file resolver not set")
}
}
FileResolverImp {
inner: Arc::new(DummyResolver),
}
}
}
#[derive(Debug, Default)] #[derive(Debug, Default)]
pub(crate) struct AnalysisHostImpl { pub(crate) struct AnalysisHostImpl {
db: db::RootDatabase, db: db::RootDatabase,
@ -105,7 +44,7 @@ impl AnalysisHostImpl {
for (file_id, text) in change.files_changed { for (file_id, text) in change.files_changed {
self.db self.db
.query_mut(crate::input::FileTextQuery) .query_mut(ra_db::FileTextQuery)
.set(file_id, Arc::new(text)) .set(file_id, Arc::new(text))
} }
if !(change.files_added.is_empty() && change.files_removed.is_empty()) { if !(change.files_added.is_empty() && change.files_removed.is_empty()) {
@ -115,22 +54,22 @@ impl AnalysisHostImpl {
let mut source_root = SourceRoot::clone(&self.db.source_root(WORKSPACE)); let mut source_root = SourceRoot::clone(&self.db.source_root(WORKSPACE));
for (file_id, text) in change.files_added { for (file_id, text) in change.files_added {
self.db self.db
.query_mut(crate::input::FileTextQuery) .query_mut(ra_db::FileTextQuery)
.set(file_id, Arc::new(text)); .set(file_id, Arc::new(text));
self.db self.db
.query_mut(crate::input::FileSourceRootQuery) .query_mut(ra_db::FileSourceRootQuery)
.set(file_id, crate::input::WORKSPACE); .set(file_id, ra_db::WORKSPACE);
source_root.files.insert(file_id); source_root.files.insert(file_id);
} }
for file_id in change.files_removed { for file_id in change.files_removed {
self.db self.db
.query_mut(crate::input::FileTextQuery) .query_mut(ra_db::FileTextQuery)
.set(file_id, Arc::new(String::new())); .set(file_id, Arc::new(String::new()));
source_root.files.remove(&file_id); source_root.files.remove(&file_id);
} }
source_root.file_resolver = file_resolver; source_root.file_resolver = file_resolver;
self.db self.db
.query_mut(crate::input::SourceRootQuery) .query_mut(ra_db::SourceRootQuery)
.set(WORKSPACE, Arc::new(source_root)) .set(WORKSPACE, Arc::new(source_root))
} }
if !change.libraries_added.is_empty() { if !change.libraries_added.is_empty() {
@ -147,10 +86,10 @@ impl AnalysisHostImpl {
library.file_resolver.debug_path(file_id) library.file_resolver.debug_path(file_id)
); );
self.db self.db
.query_mut(crate::input::FileSourceRootQuery) .query_mut(ra_db::FileSourceRootQuery)
.set_constant(file_id, source_root_id); .set_constant(file_id, source_root_id);
self.db self.db
.query_mut(crate::input::FileTextQuery) .query_mut(ra_db::FileTextQuery)
.set_constant(file_id, Arc::new(text)); .set_constant(file_id, Arc::new(text));
} }
let source_root = SourceRoot { let source_root = SourceRoot {
@ -158,19 +97,19 @@ impl AnalysisHostImpl {
file_resolver: library.file_resolver, file_resolver: library.file_resolver,
}; };
self.db self.db
.query_mut(crate::input::SourceRootQuery) .query_mut(ra_db::SourceRootQuery)
.set(source_root_id, Arc::new(source_root)); .set(source_root_id, Arc::new(source_root));
self.db self.db
.query_mut(crate::input::LibrarySymbolsQuery) .query_mut(crate::symbol_index::LibrarySymbolsQuery)
.set(source_root_id, Arc::new(library.symbol_index)); .set(source_root_id, Arc::new(library.symbol_index));
} }
self.db self.db
.query_mut(crate::input::LibrariesQuery) .query_mut(ra_db::LibrariesQuery)
.set((), Arc::new(libraries)); .set((), Arc::new(libraries));
} }
if let Some(crate_graph) = change.crate_graph { if let Some(crate_graph) = change.crate_graph {
self.db self.db
.query_mut(crate::input::CrateGraphQuery) .query_mut(ra_db::CrateGraphQuery)
.set((), Arc::new(crate_graph)) .set((), Arc::new(crate_graph))
} }
} }
@ -189,7 +128,7 @@ impl fmt::Debug for AnalysisImpl {
impl AnalysisImpl { impl AnalysisImpl {
pub fn file_syntax(&self, file_id: FileId) -> SourceFileNode { pub fn file_syntax(&self, file_id: FileId) -> SourceFileNode {
self.db.file_syntax(file_id) self.db.source_file(file_id)
} }
pub fn file_line_index(&self, file_id: FileId) -> Arc<LineIndex> { pub fn file_line_index(&self, file_id: FileId) -> Arc<LineIndex> {
self.db.file_lines(file_id) self.db.file_lines(file_id)
@ -220,14 +159,14 @@ impl AnalysisImpl {
.collect() .collect()
}; };
self.db self.db
.query(FileSyntaxQuery) .query(SourceFileQuery)
.sweep(salsa::SweepStrategy::default().discard_values()); .sweep(salsa::SweepStrategy::default().discard_values());
Ok(query.search(&buf)) Ok(query.search(&buf))
} }
/// This return `Vec`: a module may be included from several places. We /// This return `Vec`: a module may be included from several places. We
/// don't handle this case yet though, so the Vec has length at most one. /// don't handle this case yet though, so the Vec has length at most one.
pub fn parent_module(&self, position: FilePosition) -> Cancelable<Vec<(FileId, FileSymbol)>> { pub fn parent_module(&self, position: FilePosition) -> Cancelable<Vec<(FileId, FileSymbol)>> {
let descr = match ModuleDescriptor::guess_from_position(&*self.db, position)? { let descr = match hir::Module::guess_from_position(&*self.db, position)? {
None => return Ok(Vec::new()), None => return Ok(Vec::new()),
Some(it) => it, Some(it) => it,
}; };
@ -246,7 +185,7 @@ impl AnalysisImpl {
} }
/// Returns `Vec` for the same reason as `parent_module` /// Returns `Vec` for the same reason as `parent_module`
pub fn crate_for(&self, file_id: FileId) -> Cancelable<Vec<CrateId>> { pub fn crate_for(&self, file_id: FileId) -> Cancelable<Vec<CrateId>> {
let descr = match ModuleDescriptor::guess_from_file_id(&*self.db, file_id)? { let descr = match hir::Module::guess_from_file_id(&*self.db, file_id)? {
None => return Ok(Vec::new()), None => return Ok(Vec::new()),
Some(it) => it, Some(it) => it,
}; };
@ -261,7 +200,7 @@ impl AnalysisImpl {
Ok(crate_id.into_iter().collect()) Ok(crate_id.into_iter().collect())
} }
pub fn crate_root(&self, crate_id: CrateId) -> FileId { pub fn crate_root(&self, crate_id: CrateId) -> FileId {
self.db.crate_graph().crate_roots[&crate_id] self.db.crate_graph().crate_root(crate_id)
} }
pub fn completions(&self, position: FilePosition) -> Cancelable<Option<Vec<CompletionItem>>> { pub fn completions(&self, position: FilePosition) -> Cancelable<Option<Vec<CompletionItem>>> {
completions(&self.db, position) completions(&self.db, position)
@ -270,33 +209,36 @@ impl AnalysisImpl {
&self, &self,
position: FilePosition, position: FilePosition,
) -> Cancelable<Vec<(FileId, FileSymbol)>> { ) -> Cancelable<Vec<(FileId, FileSymbol)>> {
let file = self.db.file_syntax(position.file_id); let file = self.db.source_file(position.file_id);
let syntax = file.syntax(); let syntax = file.syntax();
if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(syntax, position.offset) { if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(syntax, position.offset) {
// First try to resolve the symbol locally if let Some(fn_descr) =
return if let Some((name, range)) = hir::Function::guess_for_name_ref(&*self.db, position.file_id, name_ref)
resolve_local_name(&self.db, position.file_id, name_ref)
{ {
let mut vec = vec![]; let scope = fn_descr.scope(&*self.db);
vec.push(( // First try to resolve the symbol locally
position.file_id, return if let Some(entry) = scope.resolve_local_name(name_ref) {
FileSymbol { let mut vec = vec![];
name, vec.push((
node_range: range, position.file_id,
kind: NAME, FileSymbol {
}, name: entry.name().clone(),
)); node_range: entry.ptr().range(),
Ok(vec) kind: NAME,
} else { },
// If that fails try the index based approach. ));
self.index_resolve(name_ref) Ok(vec)
}; } else {
// If that fails try the index based approach.
self.index_resolve(name_ref)
};
}
} }
if let Some(name) = find_node_at_offset::<ast::Name>(syntax, position.offset) { if let Some(name) = find_node_at_offset::<ast::Name>(syntax, position.offset) {
if let Some(module) = name.syntax().parent().and_then(ast::Module::cast) { if let Some(module) = name.syntax().parent().and_then(ast::Module::cast) {
if module.has_semi() { if module.has_semi() {
let parent_module = let parent_module =
ModuleDescriptor::guess_from_file_id(&*self.db, position.file_id)?; hir::Module::guess_from_file_id(&*self.db, position.file_id)?;
let child_name = module.name(); let child_name = module.name();
match (parent_module, child_name) { match (parent_module, child_name) {
(Some(parent_module), Some(child_name)) => { (Some(parent_module), Some(child_name)) => {
@ -319,32 +261,42 @@ impl AnalysisImpl {
} }
pub fn find_all_refs(&self, position: FilePosition) -> Vec<(FileId, TextRange)> { pub fn find_all_refs(&self, position: FilePosition) -> Vec<(FileId, TextRange)> {
let file = self.db.file_syntax(position.file_id); let file = self.db.source_file(position.file_id);
let syntax = file.syntax();
// Find the binding associated with the offset // Find the binding associated with the offset
let maybe_binding = let (binding, descr) = match find_binding(&self.db, &file, position) {
find_node_at_offset::<ast::BindPat>(syntax, position.offset).or_else(|| {
let name_ref = find_node_at_offset::<ast::NameRef>(syntax, position.offset)?;
let resolved = resolve_local_name(&self.db, position.file_id, name_ref)?;
find_node_at_offset::<ast::BindPat>(syntax, resolved.1.end())
});
let binding = match maybe_binding {
None => return Vec::new(), None => return Vec::new(),
Some(it) => it, Some(it) => it,
}; };
let decl = DeclarationDescriptor::new(binding); let mut ret = vec![(position.file_id, binding.syntax().range())];
let mut ret = vec![(position.file_id, decl.range)];
ret.extend( ret.extend(
decl.find_all_refs() descr
.scope(&*self.db)
.find_all_refs(binding)
.into_iter() .into_iter()
.map(|ref_desc| (position.file_id, ref_desc.range)), .map(|ref_desc| (position.file_id, ref_desc.range)),
); );
ret return ret;
fn find_binding<'a>(
db: &db::RootDatabase,
source_file: &'a SourceFileNode,
position: FilePosition,
) -> Option<(ast::BindPat<'a>, hir::Function)> {
let syntax = source_file.syntax();
if let Some(binding) = find_node_at_offset::<ast::BindPat>(syntax, position.offset) {
let descr = hir::Function::guess_for_bind_pat(db, position.file_id, binding)?;
return Some((binding, descr));
};
let name_ref = find_node_at_offset::<ast::NameRef>(syntax, position.offset)?;
let descr = hir::Function::guess_for_name_ref(db, position.file_id, name_ref)?;
let scope = descr.scope(db);
let resolved = scope.resolve_local_name(name_ref)?;
let resolved = resolved.ptr().resolve(source_file);
let binding = find_node_at_offset::<ast::BindPat>(syntax, resolved.range().end())?;
Some((binding, descr))
}
} }
pub fn doc_comment_for( pub fn doc_comment_for(
@ -352,13 +304,13 @@ impl AnalysisImpl {
file_id: FileId, file_id: FileId,
symbol: FileSymbol, symbol: FileSymbol,
) -> Cancelable<Option<String>> { ) -> Cancelable<Option<String>> {
let file = self.db.file_syntax(file_id); let file = self.db.source_file(file_id);
Ok(symbol.docs(&file)) Ok(symbol.docs(&file))
} }
pub fn diagnostics(&self, file_id: FileId) -> Cancelable<Vec<Diagnostic>> { pub fn diagnostics(&self, file_id: FileId) -> Cancelable<Vec<Diagnostic>> {
let syntax = self.db.file_syntax(file_id); let syntax = self.db.source_file(file_id);
let mut res = ra_editor::diagnostics(&syntax) let mut res = ra_editor::diagnostics(&syntax)
.into_iter() .into_iter()
@ -368,7 +320,7 @@ impl AnalysisImpl {
fix: None, fix: None,
}) })
.collect::<Vec<_>>(); .collect::<Vec<_>>();
if let Some(m) = ModuleDescriptor::guess_from_file_id(&*self.db, file_id)? { if let Some(m) = hir::Module::guess_from_file_id(&*self.db, file_id)? {
for (name_node, problem) in m.problems(&*self.db) { for (name_node, problem) in m.problems(&*self.db) {
let diag = match problem { let diag = match problem {
Problem::UnresolvedModule { candidate } => { Problem::UnresolvedModule { candidate } => {
@ -445,8 +397,8 @@ impl AnalysisImpl {
pub fn resolve_callable( pub fn resolve_callable(
&self, &self,
position: FilePosition, position: FilePosition,
) -> Cancelable<Option<(FnDescriptor, Option<usize>)>> { ) -> Cancelable<Option<(FnSignatureInfo, Option<usize>)>> {
let file = self.db.file_syntax(position.file_id); let file = self.db.source_file(position.file_id);
let syntax = file.syntax(); let syntax = file.syntax();
// Find the calling expression and it's NameRef // Find the calling expression and it's NameRef
@ -455,11 +407,12 @@ impl AnalysisImpl {
// Resolve the function's NameRef (NOTE: this isn't entirely accurate). // Resolve the function's NameRef (NOTE: this isn't entirely accurate).
let file_symbols = self.index_resolve(name_ref)?; let file_symbols = self.index_resolve(name_ref)?;
for (fn_fiel_id, fs) in file_symbols { for (fn_file_id, fs) in file_symbols {
if fs.kind == FN_DEF { if fs.kind == FN_DEF {
let fn_file = self.db.file_syntax(fn_fiel_id); let fn_file = self.db.source_file(fn_file_id);
if let Some(fn_def) = find_node_at_offset(fn_file.syntax(), fs.node_range.start()) { if let Some(fn_def) = find_node_at_offset(fn_file.syntax(), fs.node_range.start()) {
if let Some(descriptor) = FnDescriptor::new(fn_def) { let descr = hir::Function::guess_from_source(&*self.db, fn_file_id, fn_def);
if let Some(descriptor) = descr.signature_info(&*self.db) {
// If we have a calling expression let's find which argument we are on // If we have a calling expression let's find which argument we are on
let mut current_parameter = None; let mut current_parameter = None;
@ -532,16 +485,6 @@ impl SourceChange {
} }
} }
impl CrateGraph {
fn crate_id_for_crate_root(&self, file_id: FileId) -> Option<CrateId> {
let (&crate_id, _) = self
.crate_roots
.iter()
.find(|(_crate_id, &root_id)| root_id == file_id)?;
Some(crate_id)
}
}
enum FnCallNode<'a> { enum FnCallNode<'a> {
CallExpr(ast::CallExpr<'a>), CallExpr(ast::CallExpr<'a>),
MethodCallExpr(ast::MethodCallExpr<'a>), MethodCallExpr(ast::MethodCallExpr<'a>),
@ -580,16 +523,3 @@ impl<'a> FnCallNode<'a> {
} }
} }
} }
fn resolve_local_name(
db: &db::RootDatabase,
file_id: FileId,
name_ref: ast::NameRef,
) -> Option<(SmolStr, TextRange)> {
let fn_def = name_ref.syntax().ancestors().find_map(ast::FnDef::cast)?;
let fn_id = FnId::get(db, file_id, fn_def);
let scopes = db.fn_scopes(fn_id);
let scope_entry = crate::descriptors::function::resolve_local_name(name_ref, &scopes)?;
let syntax = db.resolve_syntax_ptr(scope_entry.ptr().into_global(file_id));
Some((scope_entry.name().clone(), syntax.range()))
}

View file

@ -18,49 +18,36 @@ macro_rules! ctry {
}; };
} }
mod arena;
mod db; mod db;
mod loc2id;
mod input;
mod imp; mod imp;
mod completion; mod completion;
mod descriptors;
mod symbol_index; mod symbol_index;
mod syntax_ptr;
pub mod mock_analysis; pub mod mock_analysis;
use std::{fmt, sync::Arc}; use std::{fmt, sync::Arc};
use ra_syntax::{AtomEdit, SourceFileNode, TextRange, TextUnit}; use ra_syntax::{AtomEdit, SourceFileNode, TextRange, TextUnit};
use ra_db::FileResolverImp;
use rayon::prelude::*; use rayon::prelude::*;
use relative_path::RelativePathBuf; use relative_path::RelativePathBuf;
use crate::{ use crate::{
imp::{AnalysisHostImpl, AnalysisImpl, FileResolverImp}, imp::{AnalysisHostImpl, AnalysisImpl},
symbol_index::SymbolIndex, symbol_index::SymbolIndex,
}; };
pub use crate::{ pub use crate::{
completion::CompletionItem, completion::CompletionItem,
descriptors::function::FnDescriptor,
input::{CrateGraph, CrateId, FileId, FileResolver},
}; };
pub use ra_editor::{ pub use ra_editor::{
FileSymbol, Fold, FoldKind, HighlightedRange, LineIndex, Runnable, RunnableKind, StructureNode, FileSymbol, Fold, FoldKind, HighlightedRange, LineIndex, Runnable, RunnableKind, StructureNode,
}; };
pub use hir::FnSignatureInfo;
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)] pub use ra_db::{
pub struct Canceled; Canceled, Cancelable, FilePosition,
CrateGraph, CrateId, FileId, FileResolver
pub type Cancelable<T> = Result<T, Canceled>; };
impl std::fmt::Display for Canceled {
fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
fmt.write_str("Canceled")
}
}
impl std::error::Error for Canceled {}
#[derive(Default)] #[derive(Default)]
pub struct AnalysisChange { pub struct AnalysisChange {
@ -130,12 +117,6 @@ impl AnalysisHost {
} }
} }
#[derive(Clone, Copy, Debug)]
pub struct FilePosition {
pub file_id: FileId,
pub offset: TextUnit,
}
#[derive(Debug)] #[derive(Debug)]
pub struct SourceChange { pub struct SourceChange {
pub label: String, pub label: String,
@ -305,7 +286,7 @@ impl Analysis {
pub fn resolve_callable( pub fn resolve_callable(
&self, &self,
position: FilePosition, position: FilePosition,
) -> Cancelable<Option<(FnDescriptor, Option<usize>)>> { ) -> Cancelable<Option<(FnSignatureInfo, Option<usize>)>> {
self.imp.resolve_callable(position) self.imp.resolve_callable(position)
} }
} }
@ -336,3 +317,112 @@ fn analysis_is_send() {
fn is_send<T: Send>() {} fn is_send<T: Send>() {}
is_send::<Analysis>(); is_send::<Analysis>();
} }
//TODO: move to hir
#[cfg(test)]
mod hir_namres_tests {
use std::sync::Arc;
use ra_db::FilesDatabase;
use ra_syntax::SmolStr;
use hir::{self, db::HirDatabase};
use crate::{
AnalysisChange,
mock_analysis::{MockAnalysis, analysis_and_position},
};
fn item_map(fixture: &str) -> (Arc<hir::ItemMap>, hir::ModuleId) {
let (analysis, pos) = analysis_and_position(fixture);
let db = analysis.imp.db;
let source_root = db.file_source_root(pos.file_id);
let descr = hir::Module::guess_from_position(&*db, pos)
.unwrap()
.unwrap();
let module_id = descr.module_id;
(db.item_map(source_root).unwrap(), module_id)
}
#[test]
fn test_item_map() {
let (item_map, module_id) = item_map(
"
//- /lib.rs
mod foo;
use crate::foo::bar::Baz;
<|>
//- /foo/mod.rs
pub mod bar;
//- /foo/bar.rs
pub struct Baz;
",
);
let name = SmolStr::from("Baz");
let resolution = &item_map.per_module[&module_id].items[&name];
assert!(resolution.def_id.is_some());
}
#[test]
fn typing_inside_a_function_should_not_invalidate_item_map() {
let mock_analysis = MockAnalysis::with_files(
"
//- /lib.rs
mod foo;
use crate::foo::bar::Baz;
fn foo() -> i32 {
1 + 1
}
//- /foo/mod.rs
pub mod bar;
//- /foo/bar.rs
pub struct Baz;
",
);
let file_id = mock_analysis.id_of("/lib.rs");
let mut host = mock_analysis.analysis_host();
let source_root = host.analysis().imp.db.file_source_root(file_id);
{
let db = host.analysis().imp.db;
let events = db.log_executed(|| {
db.item_map(source_root).unwrap();
});
assert!(format!("{:?}", events).contains("item_map"))
}
let mut change = AnalysisChange::new();
change.change_file(
file_id,
"
mod foo;
use crate::foo::bar::Baz;
fn foo() -> i32 { 92 }
"
.to_string(),
);
host.apply_change(change);
{
let db = host.analysis().imp.db;
let events = db.log_executed(|| {
db.item_map(source_root).unwrap();
});
assert!(
!format!("{:?}", events).contains("_item_map"),
"{:#?}",
events
)
}
}
}

View file

@ -1,141 +0,0 @@
use parking_lot::Mutex;
use std::{
hash::Hash,
sync::Arc,
};
use rustc_hash::FxHashMap;
use crate::{
FileId,
descriptors::FileItemId,
descriptors::module::ModuleId,
syntax_ptr::SyntaxPtr,
input::SourceRootId,
};
/// There are two principle ways to refer to things:
/// - by their locatinon (module in foo/bar/baz.rs at line 42)
/// - by their numeric id (module `ModuleId(42)`)
///
/// The first one is more powerful (you can actually find the thing in question
/// by id), but the second one is so much more compact.
///
/// `Loc2IdMap` allows us to have a cake an eat it as well: by maintaining a
/// bidirectional mapping between positional and numeric ids, we can use compact
/// representation wich still allows us to get the actual item
#[derive(Debug)]
pub(crate) struct Loc2IdMap<L, ID>
where
ID: NumericId,
L: Clone + Eq + Hash,
{
loc2id: FxHashMap<L, ID>,
id2loc: FxHashMap<ID, L>,
}
impl<L, ID> Default for Loc2IdMap<L, ID>
where
ID: NumericId,
L: Clone + Eq + Hash,
{
fn default() -> Self {
Loc2IdMap {
loc2id: FxHashMap::default(),
id2loc: FxHashMap::default(),
}
}
}
impl<L, ID> Loc2IdMap<L, ID>
where
ID: NumericId,
L: Clone + Eq + Hash,
{
pub fn loc2id(&mut self, loc: &L) -> ID {
match self.loc2id.get(loc) {
Some(id) => return id.clone(),
None => (),
}
let id = self.loc2id.len();
assert!(id < u32::max_value() as usize);
let id = ID::from_u32(id as u32);
self.loc2id.insert(loc.clone(), id.clone());
self.id2loc.insert(id.clone(), loc.clone());
id
}
pub fn id2loc(&self, id: ID) -> L {
self.id2loc[&id].clone()
}
}
pub(crate) trait NumericId: Clone + Eq + Hash {
fn from_u32(id: u32) -> Self;
fn to_u32(self) -> u32;
}
macro_rules! impl_numeric_id {
($id:ident) => {
impl NumericId for $id {
fn from_u32(id: u32) -> Self {
$id(id)
}
fn to_u32(self) -> u32 {
self.0
}
}
};
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub(crate) struct FnId(u32);
impl_numeric_id!(FnId);
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub(crate) struct DefId(u32);
impl_numeric_id!(DefId);
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub(crate) enum DefLoc {
Module {
id: ModuleId,
source_root: SourceRootId,
},
Item {
file_id: FileId,
id: FileItemId,
},
}
pub(crate) trait IdDatabase: salsa::Database {
fn id_maps(&self) -> &IdMaps;
}
#[derive(Debug, Default, Clone)]
pub(crate) struct IdMaps {
inner: Arc<IdMapsInner>,
}
impl IdMaps {
pub(crate) fn fn_id(&self, ptr: SyntaxPtr) -> FnId {
self.inner.fns.lock().loc2id(&ptr)
}
pub(crate) fn fn_ptr(&self, fn_id: FnId) -> SyntaxPtr {
self.inner.fns.lock().id2loc(fn_id)
}
pub(crate) fn def_id(&self, loc: DefLoc) -> DefId {
self.inner.defs.lock().loc2id(&loc)
}
pub(crate) fn def_loc(&self, def_id: DefId) -> DefLoc {
self.inner.defs.lock().id2loc(def_id)
}
}
#[derive(Debug, Default)]
struct IdMapsInner {
fns: Mutex<Loc2IdMap<SyntaxPtr, FnId>>,
defs: Mutex<Loc2IdMap<DefLoc, DefId>>,
}

View file

@ -4,14 +4,36 @@ use std::{
}; };
use fst::{self, Streamer}; use fst::{self, Streamer};
use ra_editor::{file_symbols, FileSymbol}; use ra_editor::{self, FileSymbol};
use ra_syntax::{ use ra_syntax::{
SourceFileNode, SourceFileNode,
SyntaxKind::{self, *}, SyntaxKind::{self, *},
}; };
use ra_db::{SyntaxDatabase, SourceRootId};
use rayon::prelude::*; use rayon::prelude::*;
use crate::{FileId, Query}; use crate::{
Cancelable,
FileId, Query,
};
salsa::query_group! {
pub(crate) trait SymbolsDatabase: SyntaxDatabase {
fn file_symbols(file_id: FileId) -> Cancelable<Arc<SymbolIndex>> {
type FileSymbolsQuery;
}
fn library_symbols(id: SourceRootId) -> Arc<SymbolIndex> {
type LibrarySymbolsQuery;
storage input;
}
}
}
fn file_symbols(db: &impl SyntaxDatabase, file_id: FileId) -> Cancelable<Arc<SymbolIndex>> {
db.check_canceled()?;
let syntax = db.source_file(file_id);
Ok(Arc::new(SymbolIndex::for_file(file_id, syntax)))
}
#[derive(Default, Debug)] #[derive(Default, Debug)]
pub(crate) struct SymbolIndex { pub(crate) struct SymbolIndex {
@ -39,7 +61,7 @@ impl SymbolIndex {
) -> SymbolIndex { ) -> SymbolIndex {
let mut symbols = files let mut symbols = files
.flat_map(|(file_id, file)| { .flat_map(|(file_id, file)| {
file_symbols(&file) ra_editor::file_symbols(&file)
.into_iter() .into_iter()
.map(move |symbol| (symbol.name.as_str().to_lowercase(), (file_id, symbol))) .map(move |symbol| (symbol.name.as_str().to_lowercase(), (file_id, symbol)))
.collect::<Vec<_>>() .collect::<Vec<_>>()

View file

@ -10,10 +10,10 @@ use test_utils::assert_eq_dbg;
use ra_analysis::{ use ra_analysis::{
mock_analysis::{analysis_and_position, single_file, single_file_with_position, MockAnalysis}, mock_analysis::{analysis_and_position, single_file, single_file_with_position, MockAnalysis},
AnalysisChange, CrateGraph, FileId, FnDescriptor, AnalysisChange, CrateGraph, FileId, FnSignatureInfo,
}; };
fn get_signature(text: &str) -> (FnDescriptor, Option<usize>) { fn get_signature(text: &str) -> (FnSignatureInfo, Option<usize>) {
let (analysis, position) = single_file_with_position(text); let (analysis, position) = single_file_with_position(text);
analysis.resolve_callable(position).unwrap().unwrap() analysis.resolve_callable(position).unwrap().unwrap()
} }
@ -126,7 +126,7 @@ fn test_resolve_crate_root() {
let mut host = mock.analysis_host(); let mut host = mock.analysis_host();
assert!(host.analysis().crate_for(mod_file).unwrap().is_empty()); assert!(host.analysis().crate_for(mod_file).unwrap().is_empty());
let mut crate_graph = CrateGraph::new(); let mut crate_graph = CrateGraph::default();
let crate_id = crate_graph.add_crate_root(root_file); let crate_id = crate_graph.add_crate_root(root_file);
let mut change = AnalysisChange::new(); let mut change = AnalysisChange::new();
change.set_crate_graph(crate_graph); change.set_crate_graph(crate_graph);

16
crates/ra_db/Cargo.toml Normal file
View file

@ -0,0 +1,16 @@
[package]
edition = "2018"
name = "ra_db"
version = "0.1.0"
authors = ["Aleksey Kladov <aleksey.kladov@gmail.com>"]
[dependencies]
log = "0.4.5"
relative-path = "0.4.0"
salsa = "0.8.0"
rustc-hash = "1.0"
parking_lot = "0.6.4"
id-arena = { git = "https://github.com/fitzgen/id-arena/", rev = "43ecd67" }
ra_syntax = { path = "../ra_syntax" }
ra_editor = { path = "../ra_editor" }
test_utils = { path = "../test_utils" }

View file

@ -0,0 +1,76 @@
use std::{
sync::Arc,
hash::{Hash, Hasher},
fmt,
};
use relative_path::RelativePath;
use crate::input::FileId;
pub trait FileResolver: fmt::Debug + Send + Sync + 'static {
fn file_stem(&self, file_id: FileId) -> String;
fn resolve(&self, file_id: FileId, path: &RelativePath) -> Option<FileId>;
fn debug_path(&self, _1file_id: FileId) -> Option<std::path::PathBuf> {
None
}
}
#[derive(Clone, Debug)]
pub struct FileResolverImp {
inner: Arc<FileResolver>,
}
impl PartialEq for FileResolverImp {
fn eq(&self, other: &FileResolverImp) -> bool {
self.inner() == other.inner()
}
}
impl Eq for FileResolverImp {}
impl Hash for FileResolverImp {
fn hash<H: Hasher>(&self, hasher: &mut H) {
self.inner().hash(hasher);
}
}
impl FileResolverImp {
pub fn new(inner: Arc<FileResolver>) -> FileResolverImp {
FileResolverImp { inner }
}
pub fn file_stem(&self, file_id: FileId) -> String {
self.inner.file_stem(file_id)
}
pub fn resolve(&self, file_id: FileId, path: &RelativePath) -> Option<FileId> {
self.inner.resolve(file_id, path)
}
pub fn debug_path(&self, file_id: FileId) -> Option<std::path::PathBuf> {
self.inner.debug_path(file_id)
}
fn inner(&self) -> *const FileResolver {
&*self.inner
}
}
impl Default for FileResolverImp {
fn default() -> FileResolverImp {
#[derive(Debug)]
struct DummyResolver;
impl FileResolver for DummyResolver {
fn file_stem(&self, _file_: FileId) -> String {
panic!("file resolver not set")
}
fn resolve(
&self,
_file_id: FileId,
_path: &::relative_path::RelativePath,
) -> Option<FileId> {
panic!("file resolver not set")
}
}
FileResolverImp {
inner: Arc::new(DummyResolver),
}
}
}

View file

@ -1,11 +1,10 @@
use std::{fmt, sync::Arc}; use std::sync::Arc;
use relative_path::RelativePath;
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use rustc_hash::FxHashSet; use rustc_hash::FxHashSet;
use salsa; use salsa;
use crate::{symbol_index::SymbolIndex, FileResolverImp}; use crate::file_resolver::FileResolverImp;
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct FileId(pub u32); pub struct FileId(pub u32);
@ -19,8 +18,8 @@ pub struct CrateGraph {
} }
impl CrateGraph { impl CrateGraph {
pub fn new() -> CrateGraph { pub fn crate_root(&self, crate_id: CrateId) -> FileId {
CrateGraph::default() self.crate_roots[&crate_id]
} }
pub fn add_crate_root(&mut self, file_id: FileId) -> CrateId { pub fn add_crate_root(&mut self, file_id: FileId) -> CrateId {
let crate_id = CrateId(self.crate_roots.len() as u32); let crate_id = CrateId(self.crate_roots.len() as u32);
@ -28,18 +27,17 @@ impl CrateGraph {
assert!(prev.is_none()); assert!(prev.is_none());
crate_id crate_id
} }
} pub fn crate_id_for_crate_root(&self, file_id: FileId) -> Option<CrateId> {
let (&crate_id, _) = self
pub trait FileResolver: fmt::Debug + Send + Sync + 'static { .crate_roots
fn file_stem(&self, file_id: FileId) -> String; .iter()
fn resolve(&self, file_id: FileId, path: &RelativePath) -> Option<FileId>; .find(|(_crate_id, &root_id)| root_id == file_id)?;
fn debug_path(&self, _file_id: FileId) -> Option<std::path::PathBuf> { Some(crate_id)
None
} }
} }
salsa::query_group! { salsa::query_group! {
pub(crate) trait FilesDatabase: salsa::Database { pub trait FilesDatabase: salsa::Database {
fn file_text(file_id: FileId) -> Arc<String> { fn file_text(file_id: FileId) -> Arc<String> {
type FileTextQuery; type FileTextQuery;
storage input; storage input;
@ -56,10 +54,6 @@ salsa::query_group! {
type LibrariesQuery; type LibrariesQuery;
storage input; storage input;
} }
fn library_symbols(id: SourceRootId) -> Arc<SymbolIndex> {
type LibrarySymbolsQuery;
storage input;
}
fn crate_graph() -> Arc<CrateGraph> { fn crate_graph() -> Arc<CrateGraph> {
type CrateGraphQuery; type CrateGraphQuery;
storage input; storage input;
@ -68,12 +62,12 @@ salsa::query_group! {
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub(crate) struct SourceRootId(pub(crate) u32); pub struct SourceRootId(pub u32);
#[derive(Default, Clone, Debug, PartialEq, Eq)] #[derive(Default, Clone, Debug, PartialEq, Eq)]
pub(crate) struct SourceRoot { pub struct SourceRoot {
pub(crate) file_resolver: FileResolverImp, pub file_resolver: FileResolverImp,
pub(crate) files: FxHashSet<FileId>, pub files: FxHashSet<FileId>,
} }
pub(crate) const WORKSPACE: SourceRootId = SourceRootId(0); pub const WORKSPACE: SourceRootId = SourceRootId(0);

89
crates/ra_db/src/lib.rs Normal file
View file

@ -0,0 +1,89 @@
//! ra_db defines basic database traits. Concrete DB is defined by ra_analysis.
extern crate ra_editor;
extern crate ra_syntax;
extern crate relative_path;
extern crate rustc_hash;
extern crate salsa;
mod syntax_ptr;
mod file_resolver;
mod input;
mod loc2id;
use std::sync::Arc;
use ra_editor::LineIndex;
use ra_syntax::{TextUnit, SourceFileNode};
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct Canceled;
pub type Cancelable<T> = Result<T, Canceled>;
impl std::fmt::Display for Canceled {
fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
fmt.write_str("Canceled")
}
}
impl std::error::Error for Canceled {}
pub use crate::{
syntax_ptr::LocalSyntaxPtr,
file_resolver::{FileResolver, FileResolverImp},
input::{
FilesDatabase, FileId, CrateId, SourceRoot, SourceRootId, CrateGraph, WORKSPACE,
FileTextQuery, FileSourceRootQuery, SourceRootQuery, LibrariesQuery, CrateGraphQuery,
},
loc2id::{LocationIntener, NumericId},
};
#[macro_export]
macro_rules! impl_numeric_id {
($id:ident) => {
impl $crate::NumericId for $id {
fn from_u32(id: u32) -> Self {
$id(id)
}
fn to_u32(self) -> u32 {
self.0
}
}
};
}
pub trait BaseDatabase: salsa::Database {
fn check_canceled(&self) -> Cancelable<()> {
if self.salsa_runtime().is_current_revision_canceled() {
Err(Canceled)
} else {
Ok(())
}
}
}
salsa::query_group! {
pub trait SyntaxDatabase: crate::input::FilesDatabase + BaseDatabase {
fn source_file(file_id: FileId) -> SourceFileNode {
type SourceFileQuery;
}
fn file_lines(file_id: FileId) -> Arc<LineIndex> {
type FileLinesQuery;
}
}
}
fn source_file(db: &impl SyntaxDatabase, file_id: FileId) -> SourceFileNode {
let text = db.file_text(file_id);
SourceFileNode::parse(&*text)
}
fn file_lines(db: &impl SyntaxDatabase, file_id: FileId) -> Arc<LineIndex> {
let text = db.file_text(file_id);
Arc::new(LineIndex::new(&*text))
}
#[derive(Clone, Copy, Debug)]
pub struct FilePosition {
pub file_id: FileId,
pub offset: TextUnit,
}

100
crates/ra_db/src/loc2id.rs Normal file
View file

@ -0,0 +1,100 @@
use parking_lot::Mutex;
use std::hash::Hash;
use rustc_hash::FxHashMap;
/// There are two principle ways to refer to things:
/// - by their locatinon (module in foo/bar/baz.rs at line 42)
/// - by their numeric id (module `ModuleId(42)`)
///
/// The first one is more powerful (you can actually find the thing in question
/// by id), but the second one is so much more compact.
///
/// `Loc2IdMap` allows us to have a cake an eat it as well: by maintaining a
/// bidirectional mapping between positional and numeric ids, we can use compact
/// representation wich still allows us to get the actual item
#[derive(Debug)]
struct Loc2IdMap<LOC, ID>
where
ID: NumericId,
LOC: Clone + Eq + Hash,
{
loc2id: FxHashMap<LOC, ID>,
id2loc: FxHashMap<ID, LOC>,
}
impl<LOC, ID> Default for Loc2IdMap<LOC, ID>
where
ID: NumericId,
LOC: Clone + Eq + Hash,
{
fn default() -> Self {
Loc2IdMap {
loc2id: FxHashMap::default(),
id2loc: FxHashMap::default(),
}
}
}
impl<LOC, ID> Loc2IdMap<LOC, ID>
where
ID: NumericId,
LOC: Clone + Eq + Hash,
{
pub fn loc2id(&mut self, loc: &LOC) -> ID {
match self.loc2id.get(loc) {
Some(id) => return id.clone(),
None => (),
}
let id = self.loc2id.len();
assert!(id < u32::max_value() as usize);
let id = ID::from_u32(id as u32);
self.loc2id.insert(loc.clone(), id.clone());
self.id2loc.insert(id.clone(), loc.clone());
id
}
pub fn id2loc(&self, id: ID) -> LOC {
self.id2loc[&id].clone()
}
}
pub trait NumericId: Clone + Eq + Hash {
fn from_u32(id: u32) -> Self;
fn to_u32(self) -> u32;
}
#[derive(Debug)]
pub struct LocationIntener<LOC, ID>
where
ID: NumericId,
LOC: Clone + Eq + Hash,
{
map: Mutex<Loc2IdMap<LOC, ID>>,
}
impl<LOC, ID> Default for LocationIntener<LOC, ID>
where
ID: NumericId,
LOC: Clone + Eq + Hash,
{
fn default() -> Self {
LocationIntener {
map: Default::default(),
}
}
}
impl<LOC, ID> LocationIntener<LOC, ID>
where
ID: NumericId,
LOC: Clone + Eq + Hash,
{
pub fn loc2id(&self, loc: &LOC) -> ID {
self.map.lock().loc2id(loc)
}
pub fn id2loc(&self, id: ID) -> LOC {
self.map.lock().id2loc(id)
}
}

View file

@ -1,49 +1,21 @@
use ra_syntax::{SourceFileNode, SyntaxKind, SyntaxNode, SyntaxNodeRef, TextRange}; use ra_syntax::{SourceFileNode, SyntaxKind, SyntaxNode, SyntaxNodeRef, TextRange};
use crate::db::SyntaxDatabase;
use crate::FileId;
pub(crate) fn resolve_syntax_ptr(db: &impl SyntaxDatabase, ptr: SyntaxPtr) -> SyntaxNode {
let syntax = db.file_syntax(ptr.file_id);
ptr.local.resolve(&syntax)
}
/// SyntaxPtr is a cheap `Copy` id which identifies a particular syntax node,
/// without retaining syntax tree in memory. You need to explicitly `resolve`
/// `SyntaxPtr` to get a `SyntaxNode`
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub(crate) struct SyntaxPtr {
file_id: FileId,
local: LocalSyntaxPtr,
}
impl SyntaxPtr {
pub(crate) fn new(file_id: FileId, node: SyntaxNodeRef) -> SyntaxPtr {
let local = LocalSyntaxPtr::new(node);
SyntaxPtr { file_id, local }
}
pub(crate) fn file_id(self) -> FileId {
self.file_id
}
}
/// A pionter to a syntax node inside a file. /// A pionter to a syntax node inside a file.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub(crate) struct LocalSyntaxPtr { pub struct LocalSyntaxPtr {
range: TextRange, range: TextRange,
kind: SyntaxKind, kind: SyntaxKind,
} }
impl LocalSyntaxPtr { impl LocalSyntaxPtr {
pub(crate) fn new(node: SyntaxNodeRef) -> LocalSyntaxPtr { pub fn new(node: SyntaxNodeRef) -> LocalSyntaxPtr {
LocalSyntaxPtr { LocalSyntaxPtr {
range: node.range(), range: node.range(),
kind: node.kind(), kind: node.kind(),
} }
} }
pub(crate) fn resolve(self, file: &SourceFileNode) -> SyntaxNode { pub fn resolve(self, file: &SourceFileNode) -> SyntaxNode {
let mut curr = file.syntax(); let mut curr = file.syntax();
loop { loop {
if curr.range() == self.range && curr.kind() == self.kind { if curr.range() == self.range && curr.kind() == self.kind {
@ -56,11 +28,8 @@ impl LocalSyntaxPtr {
} }
} }
pub(crate) fn into_global(self, file_id: FileId) -> SyntaxPtr { pub fn range(self) -> TextRange {
SyntaxPtr { self.range
file_id,
local: self,
}
} }
} }

17
crates/ra_hir/Cargo.toml Normal file
View file

@ -0,0 +1,17 @@
[package]
edition = "2018"
name = "ra_hir"
version = "0.1.0"
authors = ["Aleksey Kladov <aleksey.kladov@gmail.com>"]
[dependencies]
log = "0.4.5"
relative-path = "0.4.0"
salsa = "0.8.0"
rustc-hash = "1.0"
parking_lot = "0.6.4"
id-arena = { git = "https://github.com/fitzgen/id-arena/", rev = "43ecd67" }
ra_syntax = { path = "../ra_syntax" }
ra_editor = { path = "../ra_editor" }
ra_db = { path = "../ra_db" }
test_utils = { path = "../test_utils" }

View file

@ -8,7 +8,7 @@ use std::{
marker::PhantomData, marker::PhantomData,
}; };
pub(crate) struct Id<T> { pub struct Id<T> {
idx: u32, idx: u32,
_ty: PhantomData<fn() -> T>, _ty: PhantomData<fn() -> T>,
} }

66
crates/ra_hir/src/db.rs Normal file
View file

@ -0,0 +1,66 @@
use std::sync::Arc;
use ra_syntax::{
SyntaxNode,
ast::FnDefNode,
};
use ra_db::{SourceRootId, LocationIntener, SyntaxDatabase, FileId, Cancelable};
use crate::{
DefLoc, DefId, FnId,
SourceFileItems, SourceItemId,
query_definitions,
FnScopes,
module::{ModuleId, ModuleTree, ModuleSource,
nameres::{ItemMap, InputModuleItems}},
};
salsa::query_group! {
pub trait HirDatabase: SyntaxDatabase
+ AsRef<LocationIntener<DefLoc, DefId>>
+ AsRef<LocationIntener<SourceItemId, FnId>>
{
fn fn_scopes(fn_id: FnId) -> Arc<FnScopes> {
type FnScopesQuery;
use fn query_definitions::fn_scopes;
}
fn fn_syntax(fn_id: FnId) -> FnDefNode {
type FnSyntaxQuery;
// Don't retain syntax trees in memory
storage dependencies;
use fn query_definitions::fn_syntax;
}
fn file_items(file_id: FileId) -> Arc<SourceFileItems> {
type SourceFileItemsQuery;
storage dependencies;
use fn query_definitions::file_items;
}
fn file_item(source_item_id: SourceItemId) -> SyntaxNode {
type FileItemQuery;
storage dependencies;
use fn query_definitions::file_item;
}
fn submodules(source: ModuleSource) -> Cancelable<Arc<Vec<crate::module::imp::Submodule>>> {
type SubmodulesQuery;
use fn query_definitions::submodules;
}
fn input_module_items(source_root_id: SourceRootId, module_id: ModuleId) -> Cancelable<Arc<InputModuleItems>> {
type InputModuleItemsQuery;
use fn query_definitions::input_module_items;
}
fn item_map(source_root_id: SourceRootId) -> Cancelable<Arc<ItemMap>> {
type ItemMapQuery;
use fn query_definitions::item_map;
}
fn module_tree(source_root_id: SourceRootId) -> Cancelable<Arc<ModuleTree>> {
type ModuleTreeQuery;
use fn crate::module::imp::module_tree;
}
}
}

View file

@ -1,30 +1,83 @@
pub(super) mod imp;
mod scope; mod scope;
use std::cmp::{max, min}; use std::{
cmp::{max, min},
sync::Arc,
};
use ra_syntax::{ use ra_syntax::{
TextRange, TextUnit, SyntaxNodeRef,
ast::{self, AstNode, DocCommentsOwner, NameOwner}, ast::{self, AstNode, DocCommentsOwner, NameOwner},
TextRange, TextUnit,
}; };
use ra_db::FileId;
use crate::{ use crate::{
syntax_ptr::SyntaxPtr, FileId, FnId, HirDatabase, SourceItemId,
loc2id::IdDatabase,
}; };
pub(crate) use self::scope::{resolve_local_name, FnScopes}; pub use self::scope::FnScopes;
pub(crate) use crate::loc2id::FnId;
impl FnId { impl FnId {
pub(crate) fn get(db: &impl IdDatabase, file_id: FileId, fn_def: ast::FnDef) -> FnId { pub fn get(db: &impl HirDatabase, file_id: FileId, fn_def: ast::FnDef) -> FnId {
let ptr = SyntaxPtr::new(file_id, fn_def.syntax()); let file_items = db.file_items(file_id);
db.id_maps().fn_id(ptr) let item_id = file_items.id_of(fn_def.syntax());
let item_id = SourceItemId { file_id, item_id };
FnId::from_loc(db, &item_id)
}
}
pub struct Function {
fn_id: FnId,
}
impl Function {
pub fn guess_from_source(
db: &impl HirDatabase,
file_id: FileId,
fn_def: ast::FnDef,
) -> Function {
let fn_id = FnId::get(db, file_id, fn_def);
Function { fn_id }
}
pub fn guess_for_name_ref(
db: &impl HirDatabase,
file_id: FileId,
name_ref: ast::NameRef,
) -> Option<Function> {
Function::guess_for_node(db, file_id, name_ref.syntax())
}
pub fn guess_for_bind_pat(
db: &impl HirDatabase,
file_id: FileId,
bind_pat: ast::BindPat,
) -> Option<Function> {
Function::guess_for_node(db, file_id, bind_pat.syntax())
}
fn guess_for_node(
db: &impl HirDatabase,
file_id: FileId,
node: SyntaxNodeRef,
) -> Option<Function> {
let fn_def = node.ancestors().find_map(ast::FnDef::cast)?;
let res = Function::guess_from_source(db, file_id, fn_def);
Some(res)
}
pub fn scope(&self, db: &impl HirDatabase) -> Arc<FnScopes> {
db.fn_scopes(self.fn_id)
}
pub fn signature_info(&self, db: &impl HirDatabase) -> Option<FnSignatureInfo> {
let syntax = db.fn_syntax(self.fn_id);
FnSignatureInfo::new(syntax.borrowed())
} }
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct FnDescriptor { pub struct FnSignatureInfo {
pub name: String, pub name: String,
pub label: String, pub label: String,
pub ret_type: Option<String>, pub ret_type: Option<String>,
@ -32,8 +85,8 @@ pub struct FnDescriptor {
pub doc: Option<String>, pub doc: Option<String>,
} }
impl FnDescriptor { impl FnSignatureInfo {
pub fn new(node: ast::FnDef) -> Option<Self> { fn new(node: ast::FnDef) -> Option<Self> {
let name = node.name()?.text().to_string(); let name = node.name()?.text().to_string();
let mut doc = None; let mut doc = None;
@ -52,7 +105,7 @@ impl FnDescriptor {
node.syntax().text().to_string() node.syntax().text().to_string()
}; };
if let Some((comment_range, docs)) = FnDescriptor::extract_doc_comments(node) { if let Some((comment_range, docs)) = FnSignatureInfo::extract_doc_comments(node) {
let comment_range = comment_range let comment_range = comment_range
.checked_sub(node.syntax().range().start()) .checked_sub(node.syntax().range().start())
.unwrap(); .unwrap();
@ -84,10 +137,10 @@ impl FnDescriptor {
} }
} }
let params = FnDescriptor::param_list(node); let params = FnSignatureInfo::param_list(node);
let ret_type = node.ret_type().map(|r| r.syntax().text().to_string()); let ret_type = node.ret_type().map(|r| r.syntax().text().to_string());
Some(FnDescriptor { Some(FnSignatureInfo {
name, name,
ret_type, ret_type,
params, params,

View file

@ -1,13 +1,13 @@
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::{FxHashMap, FxHashSet};
use ra_syntax::{ use ra_syntax::{
AstNode, SmolStr, SyntaxNodeRef, TextRange,
algo::generate, algo::generate,
ast::{self, ArgListOwner, LoopBodyOwner, NameOwner}, ast::{self, ArgListOwner, LoopBodyOwner, NameOwner},
AstNode, SmolStr, SyntaxNodeRef,
}; };
use ra_db::LocalSyntaxPtr;
use crate::{ use crate::{
syntax_ptr::LocalSyntaxPtr,
arena::{Arena, Id}, arena::{Arena, Id},
}; };
@ -15,7 +15,7 @@ pub(crate) type ScopeId = Id<ScopeData>;
#[derive(Debug, PartialEq, Eq)] #[derive(Debug, PartialEq, Eq)]
pub struct FnScopes { pub struct FnScopes {
pub(crate) self_param: Option<LocalSyntaxPtr>, pub self_param: Option<LocalSyntaxPtr>,
scopes: Arena<ScopeData>, scopes: Arena<ScopeData>,
scope_for: FxHashMap<LocalSyntaxPtr, ScopeId>, scope_for: FxHashMap<LocalSyntaxPtr, ScopeId>,
} }
@ -27,13 +27,13 @@ pub struct ScopeEntry {
} }
#[derive(Debug, PartialEq, Eq)] #[derive(Debug, PartialEq, Eq)]
pub(crate) struct ScopeData { pub struct ScopeData {
parent: Option<ScopeId>, parent: Option<ScopeId>,
entries: Vec<ScopeEntry>, entries: Vec<ScopeEntry>,
} }
impl FnScopes { impl FnScopes {
pub(crate) fn new(fn_def: ast::FnDef) -> FnScopes { pub fn new(fn_def: ast::FnDef) -> FnScopes {
let mut scopes = FnScopes { let mut scopes = FnScopes {
self_param: fn_def self_param: fn_def
.param_list() .param_list()
@ -49,7 +49,7 @@ impl FnScopes {
} }
scopes scopes
} }
pub(crate) fn entries(&self, scope: ScopeId) -> &[ScopeEntry] { pub fn entries(&self, scope: ScopeId) -> &[ScopeEntry] {
&self.scopes[scope].entries &self.scopes[scope].entries
} }
pub fn scope_chain<'a>(&'a self, node: SyntaxNodeRef) -> impl Iterator<Item = ScopeId> + 'a { pub fn scope_chain<'a>(&'a self, node: SyntaxNodeRef) -> impl Iterator<Item = ScopeId> + 'a {
@ -57,6 +57,37 @@ impl FnScopes {
self.scopes[scope].parent self.scopes[scope].parent
}) })
} }
pub fn resolve_local_name<'a>(&'a self, name_ref: ast::NameRef) -> Option<&'a ScopeEntry> {
let mut shadowed = FxHashSet::default();
let ret = self
.scope_chain(name_ref.syntax())
.flat_map(|scope| self.entries(scope).iter())
.filter(|entry| shadowed.insert(entry.name()))
.filter(|entry| entry.name() == &name_ref.text())
.nth(0);
ret
}
pub fn find_all_refs(&self, pat: ast::BindPat) -> Vec<ReferenceDescriptor> {
let fn_def = pat.syntax().ancestors().find_map(ast::FnDef::cast).unwrap();
let name_ptr = LocalSyntaxPtr::new(pat.syntax());
let refs: Vec<_> = fn_def
.syntax()
.descendants()
.filter_map(ast::NameRef::cast)
.filter(|name_ref| match self.resolve_local_name(*name_ref) {
None => false,
Some(entry) => entry.ptr() == name_ptr,
})
.map(|name_ref| ReferenceDescriptor {
name: name_ref.syntax().text().to_string(),
range: name_ref.syntax().range(),
})
.collect();
refs
}
fn root_scope(&mut self) -> ScopeId { fn root_scope(&mut self) -> ScopeId {
self.scopes.alloc(ScopeData { self.scopes.alloc(ScopeData {
parent: None, parent: None,
@ -104,10 +135,10 @@ impl ScopeEntry {
}; };
Some(res) Some(res)
} }
pub(crate) fn name(&self) -> &SmolStr { pub fn name(&self) -> &SmolStr {
&self.name &self.name
} }
pub(crate) fn ptr(&self) -> LocalSyntaxPtr { pub fn ptr(&self) -> LocalSyntaxPtr {
self.ptr self.ptr
} }
} }
@ -249,18 +280,10 @@ fn compute_expr_scopes(expr: ast::Expr, scopes: &mut FnScopes, scope: ScopeId) {
} }
} }
pub fn resolve_local_name<'a>( #[derive(Debug)]
name_ref: ast::NameRef, pub struct ReferenceDescriptor {
scopes: &'a FnScopes, pub range: TextRange,
) -> Option<&'a ScopeEntry> { pub name: String,
let mut shadowed = FxHashSet::default();
let ret = scopes
.scope_chain(name_ref.syntax())
.flat_map(|scope| scopes.entries(scope).iter())
.filter(|entry| shadowed.insert(entry.name()))
.filter(|entry| entry.name() == &name_ref.text())
.nth(0);
ret
} }
#[cfg(test)] #[cfg(test)]
@ -376,7 +399,7 @@ mod tests {
let scopes = FnScopes::new(fn_def); let scopes = FnScopes::new(fn_def);
let local_name_entry = resolve_local_name(name_ref, &scopes).unwrap(); let local_name_entry = scopes.resolve_local_name(name_ref).unwrap();
let local_name = local_name_entry.ptr().resolve(&file); let local_name = local_name_entry.ptr().resolve(&file);
let expected_name = let expected_name =
find_node_at_offset::<ast::Name>(file.syntax(), expected_offset.into()).unwrap(); find_node_at_offset::<ast::Name>(file.syntax(), expected_offset.into()).unwrap();

139
crates/ra_hir/src/lib.rs Normal file
View file

@ -0,0 +1,139 @@
//! HIR (previsouly known as descriptors) provides a high-level OO acess to Rust
//! code.
//!
//! The principal difference between HIR and syntax trees is that HIR is bound
//! to a particular crate instance. That is, it has cfg flags and features
//! applied. So, there relation between syntax and HIR is many-to-one.
macro_rules! ctry {
($expr:expr) => {
match $expr {
None => return Ok(None),
Some(it) => it,
}
};
}
pub mod db;
mod query_definitions;
mod function;
mod module;
mod path;
mod arena;
use std::ops::Index;
use ra_syntax::{SyntaxNodeRef, SyntaxNode};
use ra_db::{LocationIntener, SourceRootId, FileId, Cancelable};
use crate::{
db::HirDatabase,
arena::{Arena, Id},
};
pub use self::{
path::{Path, PathKind},
module::{Module, ModuleId, Problem, nameres::ItemMap},
function::{Function, FnScopes},
};
pub use self::function::FnSignatureInfo;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct FnId(u32);
ra_db::impl_numeric_id!(FnId);
impl FnId {
pub fn from_loc(
db: &impl AsRef<LocationIntener<SourceItemId, FnId>>,
loc: &SourceItemId,
) -> FnId {
db.as_ref().loc2id(loc)
}
pub fn loc(self, db: &impl AsRef<LocationIntener<SourceItemId, FnId>>) -> SourceItemId {
db.as_ref().id2loc(self)
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct DefId(u32);
ra_db::impl_numeric_id!(DefId);
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub enum DefLoc {
Module {
id: ModuleId,
source_root: SourceRootId,
},
Item {
source_item_id: SourceItemId,
},
}
impl DefId {
pub fn loc(self, db: &impl AsRef<LocationIntener<DefLoc, DefId>>) -> DefLoc {
db.as_ref().id2loc(self)
}
}
impl DefLoc {
pub fn id(&self, db: &impl AsRef<LocationIntener<DefLoc, DefId>>) -> DefId {
db.as_ref().loc2id(&self)
}
}
pub enum Def {
Module(Module),
Item,
}
impl DefId {
pub fn resolve(self, db: &impl HirDatabase) -> Cancelable<Def> {
let loc = self.loc(db);
let res = match loc {
DefLoc::Module { id, source_root } => {
let descr = Module::new(db, source_root, id)?;
Def::Module(descr)
}
DefLoc::Item { .. } => Def::Item,
};
Ok(res)
}
}
/// Identifier of item within a specific file. This is stable over reparses, so
/// it's OK to use it as a salsa key/value.
pub(crate) type SourceFileItemId = Id<SyntaxNode>;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct SourceItemId {
file_id: FileId,
item_id: SourceFileItemId,
}
/// Maps item's `SyntaxNode`s to `SourceFileItemId` and back.
#[derive(Debug, PartialEq, Eq, Default)]
pub struct SourceFileItems {
arena: Arena<SyntaxNode>,
}
impl SourceFileItems {
fn alloc(&mut self, item: SyntaxNode) -> SourceFileItemId {
self.arena.alloc(item)
}
pub fn id_of(&self, item: SyntaxNodeRef) -> SourceFileItemId {
let (id, _item) = self
.arena
.iter()
.find(|(_id, i)| i.borrowed() == item)
.unwrap();
id
}
}
impl Index<SourceFileItemId> for SourceFileItems {
type Output = SyntaxNode;
fn index(&self, idx: SourceFileItemId) -> &SyntaxNode {
&self.arena[idx]
}
}

View file

@ -6,21 +6,19 @@ use ra_syntax::{
}; };
use relative_path::RelativePathBuf; use relative_path::RelativePathBuf;
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::{FxHashMap, FxHashSet};
use ra_db::{SourceRoot, SourceRootId, FileResolverImp, Cancelable, FileId,};
use crate::{ use crate::{
db, HirDatabase,
descriptors::DescriptorDatabase,
input::{SourceRoot, SourceRootId},
Cancelable, FileId, FileResolverImp,
}; };
use super::{ use super::{
LinkData, LinkId, ModuleData, ModuleId, ModuleSource, ModuleSourceNode, LinkData, LinkId, ModuleData, ModuleId, ModuleSource,
ModuleTree, Problem, ModuleTree, Problem,
}; };
#[derive(Clone, Hash, PartialEq, Eq, Debug)] #[derive(Clone, Hash, PartialEq, Eq, Debug)]
pub(crate) enum Submodule { pub enum Submodule {
Declaration(SmolStr), Declaration(SmolStr),
Definition(SmolStr, ModuleSource), Definition(SmolStr, ModuleSource),
} }
@ -34,39 +32,6 @@ impl Submodule {
} }
} }
pub(crate) fn submodules(
db: &impl DescriptorDatabase,
source: ModuleSource,
) -> Cancelable<Arc<Vec<Submodule>>> {
db::check_canceled(db)?;
let file_id = source.file_id();
let submodules = match source.resolve(db) {
ModuleSourceNode::SourceFile(it) => collect_submodules(file_id, it.borrowed()),
ModuleSourceNode::Module(it) => it
.borrowed()
.item_list()
.map(|it| collect_submodules(file_id, it))
.unwrap_or_else(Vec::new),
};
return Ok(Arc::new(submodules));
fn collect_submodules<'a>(
file_id: FileId,
root: impl ast::ModuleItemOwner<'a>,
) -> Vec<Submodule> {
modules(root)
.map(|(name, m)| {
if m.has_semi() {
Submodule::Declaration(name)
} else {
let src = ModuleSource::new_inline(file_id, m);
Submodule::Definition(name, src)
}
})
.collect()
}
}
pub(crate) fn modules<'a>( pub(crate) fn modules<'a>(
root: impl ast::ModuleItemOwner<'a>, root: impl ast::ModuleItemOwner<'a>,
) -> impl Iterator<Item = (SmolStr, ast::Module<'a>)> { ) -> impl Iterator<Item = (SmolStr, ast::Module<'a>)> {
@ -82,16 +47,16 @@ pub(crate) fn modules<'a>(
} }
pub(crate) fn module_tree( pub(crate) fn module_tree(
db: &impl DescriptorDatabase, db: &impl HirDatabase,
source_root: SourceRootId, source_root: SourceRootId,
) -> Cancelable<Arc<ModuleTree>> { ) -> Cancelable<Arc<ModuleTree>> {
db::check_canceled(db)?; db.check_canceled()?;
let res = create_module_tree(db, source_root)?; let res = create_module_tree(db, source_root)?;
Ok(Arc::new(res)) Ok(Arc::new(res))
} }
fn create_module_tree<'a>( fn create_module_tree<'a>(
db: &impl DescriptorDatabase, db: &impl HirDatabase,
source_root: SourceRootId, source_root: SourceRootId,
) -> Cancelable<ModuleTree> { ) -> Cancelable<ModuleTree> {
let mut tree = ModuleTree::default(); let mut tree = ModuleTree::default();
@ -121,7 +86,7 @@ fn create_module_tree<'a>(
} }
fn build_subtree( fn build_subtree(
db: &impl DescriptorDatabase, db: &impl HirDatabase,
source_root: &SourceRoot, source_root: &SourceRoot,
tree: &mut ModuleTree, tree: &mut ModuleTree,
visited: &mut FxHashSet<ModuleSource>, visited: &mut FxHashSet<ModuleSource>,
@ -135,7 +100,7 @@ fn build_subtree(
parent, parent,
children: Vec::new(), children: Vec::new(),
}); });
for sub in db._submodules(source)?.iter() { for sub in db.submodules(source)?.iter() {
let link = tree.push_link(LinkData { let link = tree.push_link(LinkData {
name: sub.name().clone(), name: sub.name().clone(),
owner: id, owner: id,

View file

@ -10,65 +10,64 @@ use ra_syntax::{
ast::{self, AstNode, NameOwner}, ast::{self, AstNode, NameOwner},
SmolStr, SyntaxNode, SmolStr, SyntaxNode,
}; };
use ra_db::{SourceRootId, FileId, FilePosition, Cancelable};
use relative_path::RelativePathBuf; use relative_path::RelativePathBuf;
use crate::{ use crate::{
db::SyntaxDatabase, syntax_ptr::SyntaxPtr, FileId, FilePosition, Cancelable, DefLoc, DefId, Path, PathKind, HirDatabase, SourceItemId,
descriptors::{Path, PathKind, DescriptorDatabase},
input::SourceRootId,
arena::{Arena, Id}, arena::{Arena, Id},
loc2id::{DefLoc, DefId},
}; };
pub(crate) use self::nameres::ModuleScope; pub use self::nameres::ModuleScope;
/// `ModuleDescriptor` is API entry point to get all the information /// `Module` is API entry point to get all the information
/// about a particular module. /// about a particular module.
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub(crate) struct ModuleDescriptor { pub struct Module {
tree: Arc<ModuleTree>, tree: Arc<ModuleTree>,
source_root_id: SourceRootId, source_root_id: SourceRootId,
module_id: ModuleId, //TODO: make private
pub module_id: ModuleId,
} }
impl ModuleDescriptor { impl Module {
/// Lookup `ModuleDescriptor` by `FileId`. Note that this is inherently /// Lookup `Module` by `FileId`. Note that this is inherently
/// lossy transformation: in general, a single source might correspond to /// lossy transformation: in general, a single source might correspond to
/// several modules. /// several modules.
pub fn guess_from_file_id( pub fn guess_from_file_id(
db: &impl DescriptorDatabase, db: &impl HirDatabase,
file_id: FileId, file_id: FileId,
) -> Cancelable<Option<ModuleDescriptor>> { ) -> Cancelable<Option<Module>> {
ModuleDescriptor::guess_from_source(db, file_id, ModuleSource::SourceFile(file_id)) Module::guess_from_source(db, file_id, ModuleSource::SourceFile(file_id))
} }
/// Lookup `ModuleDescriptor` by position in the source code. Note that this /// Lookup `Module` by position in the source code. Note that this
/// is inherently lossy transformation: in general, a single source might /// is inherently lossy transformation: in general, a single source might
/// correspond to several modules. /// correspond to several modules.
pub fn guess_from_position( pub fn guess_from_position(
db: &impl DescriptorDatabase, db: &impl HirDatabase,
position: FilePosition, position: FilePosition,
) -> Cancelable<Option<ModuleDescriptor>> { ) -> Cancelable<Option<Module>> {
let file = db.file_syntax(position.file_id); let file = db.source_file(position.file_id);
let module_source = match find_node_at_offset::<ast::Module>(file.syntax(), position.offset) let module_source = match find_node_at_offset::<ast::Module>(file.syntax(), position.offset)
{ {
Some(m) if !m.has_semi() => ModuleSource::new_inline(position.file_id, m), Some(m) if !m.has_semi() => ModuleSource::new_inline(db, position.file_id, m),
_ => ModuleSource::SourceFile(position.file_id), _ => ModuleSource::SourceFile(position.file_id),
}; };
ModuleDescriptor::guess_from_source(db, position.file_id, module_source) Module::guess_from_source(db, position.file_id, module_source)
} }
fn guess_from_source( fn guess_from_source(
db: &impl DescriptorDatabase, db: &impl HirDatabase,
file_id: FileId, file_id: FileId,
module_source: ModuleSource, module_source: ModuleSource,
) -> Cancelable<Option<ModuleDescriptor>> { ) -> Cancelable<Option<Module>> {
let source_root_id = db.file_source_root(file_id); let source_root_id = db.file_source_root(file_id);
let module_tree = db._module_tree(source_root_id)?; let module_tree = db.module_tree(source_root_id)?;
let res = match module_tree.any_module_for_source(module_source) { let res = match module_tree.any_module_for_source(module_source) {
None => None, None => None,
Some(module_id) => Some(ModuleDescriptor { Some(module_id) => Some(Module {
tree: module_tree, tree: module_tree,
source_root_id, source_root_id,
module_id, module_id,
@ -78,12 +77,12 @@ impl ModuleDescriptor {
} }
pub(super) fn new( pub(super) fn new(
db: &impl DescriptorDatabase, db: &impl HirDatabase,
source_root_id: SourceRootId, source_root_id: SourceRootId,
module_id: ModuleId, module_id: ModuleId,
) -> Cancelable<ModuleDescriptor> { ) -> Cancelable<Module> {
let module_tree = db._module_tree(source_root_id)?; let module_tree = db.module_tree(source_root_id)?;
let res = ModuleDescriptor { let res = Module {
tree: module_tree, tree: module_tree,
source_root_id, source_root_id,
module_id, module_id,
@ -93,10 +92,7 @@ impl ModuleDescriptor {
/// Returns `mod foo;` or `mod foo {}` node whihc declared this module. /// Returns `mod foo;` or `mod foo {}` node whihc declared this module.
/// Returns `None` for the root module /// Returns `None` for the root module
pub fn parent_link_source( pub fn parent_link_source(&self, db: &impl HirDatabase) -> Option<(FileId, ast::ModuleNode)> {
&self,
db: &impl DescriptorDatabase,
) -> Option<(FileId, ast::ModuleNode)> {
let link = self.module_id.parent_link(&self.tree)?; let link = self.module_id.parent_link(&self.tree)?;
let file_id = link.owner(&self.tree).source(&self.tree).file_id(); let file_id = link.owner(&self.tree).source(&self.tree).file_id();
let src = link.bind_source(&self.tree, db); let src = link.bind_source(&self.tree, db);
@ -108,18 +104,18 @@ impl ModuleDescriptor {
} }
/// Parent module. Returns `None` if this is a root module. /// Parent module. Returns `None` if this is a root module.
pub fn parent(&self) -> Option<ModuleDescriptor> { pub fn parent(&self) -> Option<Module> {
let parent_id = self.module_id.parent(&self.tree)?; let parent_id = self.module_id.parent(&self.tree)?;
Some(ModuleDescriptor { Some(Module {
module_id: parent_id, module_id: parent_id,
..self.clone() ..self.clone()
}) })
} }
/// The root of the tree this module is part of /// The root of the tree this module is part of
pub fn crate_root(&self) -> ModuleDescriptor { pub fn crate_root(&self) -> Module {
let root_id = self.module_id.crate_root(&self.tree); let root_id = self.module_id.crate_root(&self.tree);
ModuleDescriptor { Module {
module_id: root_id, module_id: root_id,
..self.clone() ..self.clone()
} }
@ -132,35 +128,31 @@ impl ModuleDescriptor {
Some(link.name(&self.tree)) Some(link.name(&self.tree))
} }
pub fn def_id(&self, db: &impl DescriptorDatabase) -> DefId { pub fn def_id(&self, db: &impl HirDatabase) -> DefId {
let def_loc = DefLoc::Module { let def_loc = DefLoc::Module {
id: self.module_id, id: self.module_id,
source_root: self.source_root_id, source_root: self.source_root_id,
}; };
db.id_maps().def_id(def_loc) def_loc.id(db)
} }
/// Finds a child module with the specified name. /// Finds a child module with the specified name.
pub fn child(&self, name: &str) -> Option<ModuleDescriptor> { pub fn child(&self, name: &str) -> Option<Module> {
let child_id = self.module_id.child(&self.tree, name)?; let child_id = self.module_id.child(&self.tree, name)?;
Some(ModuleDescriptor { Some(Module {
module_id: child_id, module_id: child_id,
..self.clone() ..self.clone()
}) })
} }
/// Returns a `ModuleScope`: a set of items, visible in this module. /// Returns a `ModuleScope`: a set of items, visible in this module.
pub(crate) fn scope(&self, db: &impl DescriptorDatabase) -> Cancelable<ModuleScope> { pub fn scope(&self, db: &impl HirDatabase) -> Cancelable<ModuleScope> {
let item_map = db._item_map(self.source_root_id)?; let item_map = db.item_map(self.source_root_id)?;
let res = item_map.per_module[&self.module_id].clone(); let res = item_map.per_module[&self.module_id].clone();
Ok(res) Ok(res)
} }
pub(crate) fn resolve_path( pub fn resolve_path(&self, db: &impl HirDatabase, path: Path) -> Cancelable<Option<DefId>> {
&self,
db: &impl DescriptorDatabase,
path: Path,
) -> Cancelable<Option<DefId>> {
let mut curr = match path.kind { let mut curr = match path.kind {
PathKind::Crate => self.crate_root(), PathKind::Crate => self.crate_root(),
PathKind::Self_ | PathKind::Plain => self.clone(), PathKind::Self_ | PathKind::Plain => self.clone(),
@ -170,8 +162,8 @@ impl ModuleDescriptor {
let segments = path.segments; let segments = path.segments;
for name in segments.iter() { for name in segments.iter() {
let module = match db.id_maps().def_loc(curr) { let module = match curr.loc(db) {
DefLoc::Module { id, source_root } => ModuleDescriptor::new(db, source_root, id)?, DefLoc::Module { id, source_root } => Module::new(db, source_root, id)?,
_ => return Ok(None), _ => return Ok(None),
}; };
let scope = module.scope(db)?; let scope = module.scope(db)?;
@ -180,7 +172,7 @@ impl ModuleDescriptor {
Ok(Some(curr)) Ok(Some(curr))
} }
pub fn problems(&self, db: &impl DescriptorDatabase) -> Vec<(SyntaxNode, Problem)> { pub fn problems(&self, db: &impl HirDatabase) -> Vec<(SyntaxNode, Problem)> {
self.module_id.problems(&self.tree, db) self.module_id.problems(&self.tree, db)
} }
} }
@ -193,13 +185,13 @@ impl ModuleDescriptor {
/// (which can have multiple parents) to the precise world of modules (which /// (which can have multiple parents) to the precise world of modules (which
/// always have one parent). /// always have one parent).
#[derive(Default, Debug, PartialEq, Eq)] #[derive(Default, Debug, PartialEq, Eq)]
pub(crate) struct ModuleTree { pub struct ModuleTree {
mods: Arena<ModuleData>, mods: Arena<ModuleData>,
links: Arena<LinkData>, links: Arena<LinkData>,
} }
impl ModuleTree { impl ModuleTree {
fn modules<'a>(&'a self) -> impl Iterator<Item = ModuleId> + 'a { pub(crate) fn modules<'a>(&'a self) -> impl Iterator<Item = ModuleId> + 'a {
self.mods.iter().map(|(id, _)| id) self.mods.iter().map(|(id, _)| id)
} }
@ -219,20 +211,19 @@ impl ModuleTree {
/// `ModuleSource` is the syntax tree element that produced this module: /// `ModuleSource` is the syntax tree element that produced this module:
/// either a file, or an inlinde module. /// either a file, or an inlinde module.
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub(crate) enum ModuleSource { pub enum ModuleSource {
SourceFile(FileId), SourceFile(FileId),
#[allow(dead_code)] Module(SourceItemId),
Module(SyntaxPtr),
} }
/// An owned syntax node for a module. Unlike `ModuleSource`, /// An owned syntax node for a module. Unlike `ModuleSource`,
/// this holds onto the AST for the whole file. /// this holds onto the AST for the whole file.
enum ModuleSourceNode { pub enum ModuleSourceNode {
SourceFile(ast::SourceFileNode), SourceFile(ast::SourceFileNode),
Module(ast::ModuleNode), Module(ast::ModuleNode),
} }
pub(crate) type ModuleId = Id<ModuleData>; pub type ModuleId = Id<ModuleData>;
type LinkId = Id<LinkData>; type LinkId = Id<LinkData>;
#[derive(Clone, Debug, Hash, PartialEq, Eq)] #[derive(Clone, Debug, Hash, PartialEq, Eq)]
@ -247,7 +238,7 @@ pub enum Problem {
} }
impl ModuleId { impl ModuleId {
fn source(self, tree: &ModuleTree) -> ModuleSource { pub(crate) fn source(self, tree: &ModuleTree) -> ModuleSource {
tree.mods[self].source tree.mods[self].source
} }
fn parent_link(self, tree: &ModuleTree) -> Option<LinkId> { fn parent_link(self, tree: &ModuleTree) -> Option<LinkId> {
@ -277,7 +268,7 @@ impl ModuleId {
Some((link.name.clone(), module)) Some((link.name.clone(), module))
}) })
} }
fn problems(self, tree: &ModuleTree, db: &impl SyntaxDatabase) -> Vec<(SyntaxNode, Problem)> { fn problems(self, tree: &ModuleTree, db: &impl HirDatabase) -> Vec<(SyntaxNode, Problem)> {
tree.mods[self] tree.mods[self]
.children .children
.iter() .iter()
@ -298,7 +289,7 @@ impl LinkId {
fn name(self, tree: &ModuleTree) -> SmolStr { fn name(self, tree: &ModuleTree) -> SmolStr {
tree.links[self].name.clone() tree.links[self].name.clone()
} }
fn bind_source<'a>(self, tree: &ModuleTree, db: &impl SyntaxDatabase) -> ast::ModuleNode { fn bind_source<'a>(self, tree: &ModuleTree, db: &impl HirDatabase) -> ast::ModuleNode {
let owner = self.owner(tree); let owner = self.owner(tree);
match owner.source(tree).resolve(db) { match owner.source(tree).resolve(db) {
ModuleSourceNode::SourceFile(root) => { ModuleSourceNode::SourceFile(root) => {
@ -314,41 +305,47 @@ impl LinkId {
} }
#[derive(Debug, PartialEq, Eq, Hash)] #[derive(Debug, PartialEq, Eq, Hash)]
pub(crate) struct ModuleData { pub struct ModuleData {
source: ModuleSource, source: ModuleSource,
parent: Option<LinkId>, parent: Option<LinkId>,
children: Vec<LinkId>, children: Vec<LinkId>,
} }
impl ModuleSource { impl ModuleSource {
fn new_inline(file_id: FileId, module: ast::Module) -> ModuleSource { pub(crate) fn new_inline(
db: &impl HirDatabase,
file_id: FileId,
module: ast::Module,
) -> ModuleSource {
assert!(!module.has_semi()); assert!(!module.has_semi());
let ptr = SyntaxPtr::new(file_id, module.syntax()); let items = db.file_items(file_id);
ModuleSource::Module(ptr) let item_id = items.id_of(module.syntax());
let id = SourceItemId { file_id, item_id };
ModuleSource::Module(id)
} }
pub(crate) fn as_file(self) -> Option<FileId> { pub fn as_file(self) -> Option<FileId> {
match self { match self {
ModuleSource::SourceFile(f) => Some(f), ModuleSource::SourceFile(f) => Some(f),
ModuleSource::Module(..) => None, ModuleSource::Module(..) => None,
} }
} }
pub(crate) fn file_id(self) -> FileId { pub fn file_id(self) -> FileId {
match self { match self {
ModuleSource::SourceFile(f) => f, ModuleSource::SourceFile(f) => f,
ModuleSource::Module(ptr) => ptr.file_id(), ModuleSource::Module(source_item_id) => source_item_id.file_id,
} }
} }
fn resolve(self, db: &impl SyntaxDatabase) -> ModuleSourceNode { pub fn resolve(self, db: &impl HirDatabase) -> ModuleSourceNode {
match self { match self {
ModuleSource::SourceFile(file_id) => { ModuleSource::SourceFile(file_id) => {
let syntax = db.file_syntax(file_id); let syntax = db.source_file(file_id);
ModuleSourceNode::SourceFile(syntax.ast().owned()) ModuleSourceNode::SourceFile(syntax.ast().owned())
} }
ModuleSource::Module(ptr) => { ModuleSource::Module(item_id) => {
let syntax = db.resolve_syntax_ptr(ptr); let syntax = db.file_item(item_id);
let syntax = syntax.borrowed(); let syntax = syntax.borrowed();
let module = ast::Module::cast(syntax).unwrap(); let module = ast::Module::cast(syntax).unwrap();
ModuleSourceNode::Module(module.owned()) ModuleSourceNode::Module(module.owned())

View file

@ -16,101 +16,42 @@
//! structure itself is modified. //! structure itself is modified.
use std::{ use std::{
sync::Arc, sync::Arc,
time::Instant,
ops::Index,
}; };
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use ra_syntax::{ use ra_syntax::{
SyntaxNode, SyntaxNodeRef, TextRange, TextRange,
SmolStr, SyntaxKind::{self, *}, SmolStr, SyntaxKind::{self, *},
ast::{self, ModuleItemOwner, AstNode} ast::{self, AstNode}
}; };
use ra_db::SourceRootId;
use crate::{ use crate::{
Cancelable, FileId, Cancelable, FileId,
loc2id::{DefId, DefLoc}, DefId, DefLoc,
descriptors::{ SourceItemId, SourceFileItemId, SourceFileItems,
Path, PathKind, Path, PathKind,
DescriptorDatabase, HirDatabase,
module::{ModuleId, ModuleTree, ModuleSourceNode}, module::{ModuleId, ModuleTree},
},
input::SourceRootId,
arena::{Arena, Id}
}; };
/// Identifier of item within a specific file. This is stable over reparses, so
/// it's OK to use it as a salsa key/value.
pub(crate) type FileItemId = Id<SyntaxNode>;
/// Maps item's `SyntaxNode`s to `FileItemId` and back.
#[derive(Debug, PartialEq, Eq, Default)]
pub(crate) struct FileItems {
arena: Arena<SyntaxNode>,
}
impl FileItems {
fn alloc(&mut self, item: SyntaxNode) -> FileItemId {
self.arena.alloc(item)
}
fn id_of(&self, item: SyntaxNodeRef) -> FileItemId {
let (id, _item) = self
.arena
.iter()
.find(|(_id, i)| i.borrowed() == item)
.unwrap();
id
}
}
impl Index<FileItemId> for FileItems {
type Output = SyntaxNode;
fn index(&self, idx: FileItemId) -> &SyntaxNode {
&self.arena[idx]
}
}
pub(crate) fn file_items(db: &impl DescriptorDatabase, file_id: FileId) -> Arc<FileItems> {
let source_file = db.file_syntax(file_id);
let source_file = source_file.borrowed();
let mut res = FileItems::default();
source_file
.syntax()
.descendants()
.filter_map(ast::ModuleItem::cast)
.map(|it| it.syntax().owned())
.for_each(|it| {
res.alloc(it);
});
Arc::new(res)
}
pub(crate) fn file_item(
db: &impl DescriptorDatabase,
file_id: FileId,
file_item_id: FileItemId,
) -> SyntaxNode {
db._file_items(file_id)[file_item_id].clone()
}
/// Item map is the result of the name resolution. Item map contains, for each /// Item map is the result of the name resolution. Item map contains, for each
/// module, the set of visible items. /// module, the set of visible items.
#[derive(Default, Debug, PartialEq, Eq)] #[derive(Default, Debug, PartialEq, Eq)]
pub(crate) struct ItemMap { pub struct ItemMap {
pub(crate) per_module: FxHashMap<ModuleId, ModuleScope>, pub per_module: FxHashMap<ModuleId, ModuleScope>,
} }
#[derive(Debug, Default, PartialEq, Eq, Clone)] #[derive(Debug, Default, PartialEq, Eq, Clone)]
pub(crate) struct ModuleScope { pub struct ModuleScope {
items: FxHashMap<SmolStr, Resolution>, pub items: FxHashMap<SmolStr, Resolution>,
} }
impl ModuleScope { impl ModuleScope {
pub(crate) fn entries<'a>(&'a self) -> impl Iterator<Item = (&'a SmolStr, &Resolution)> + 'a { pub fn entries<'a>(&'a self) -> impl Iterator<Item = (&'a SmolStr, &Resolution)> + 'a {
self.items.iter() self.items.iter()
} }
pub(crate) fn get(&self, name: &SmolStr) -> Option<&Resolution> { pub fn get(&self, name: &SmolStr) -> Option<&Resolution> {
self.items.get(name) self.items.get(name)
} }
} }
@ -122,14 +63,14 @@ impl ModuleScope {
/// recomputing name res: if `InputModuleItems` are the same, we can avoid /// recomputing name res: if `InputModuleItems` are the same, we can avoid
/// running name resolution. /// running name resolution.
#[derive(Debug, Default, PartialEq, Eq)] #[derive(Debug, Default, PartialEq, Eq)]
pub(crate) struct InputModuleItems { pub struct InputModuleItems {
items: Vec<ModuleItem>, items: Vec<ModuleItem>,
imports: Vec<Import>, imports: Vec<Import>,
} }
#[derive(Debug, PartialEq, Eq)] #[derive(Debug, PartialEq, Eq)]
struct ModuleItem { struct ModuleItem {
id: FileItemId, id: SourceFileItemId,
name: SmolStr, name: SmolStr,
kind: SyntaxKind, kind: SyntaxKind,
vis: Vis, vis: Vis,
@ -148,14 +89,18 @@ struct Import {
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq)] #[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub(crate) struct NamedImport { pub struct NamedImport {
file_item_id: FileItemId, pub file_item_id: SourceFileItemId,
relative_range: TextRange, pub relative_range: TextRange,
} }
impl NamedImport { impl NamedImport {
pub(crate) fn range(&self, db: &impl DescriptorDatabase, file_id: FileId) -> TextRange { pub fn range(&self, db: &impl HirDatabase, file_id: FileId) -> TextRange {
let syntax = db._file_item(file_id, self.file_item_id); let source_item_id = SourceItemId {
file_id,
item_id: self.file_item_id,
};
let syntax = db.file_item(source_item_id);
let offset = syntax.borrowed().range().start(); let offset = syntax.borrowed().range().start();
self.relative_range + offset self.relative_range + offset
} }
@ -167,66 +112,14 @@ enum ImportKind {
Named(NamedImport), Named(NamedImport),
} }
pub(crate) fn input_module_items(
db: &impl DescriptorDatabase,
source_root: SourceRootId,
module_id: ModuleId,
) -> Cancelable<Arc<InputModuleItems>> {
let module_tree = db._module_tree(source_root)?;
let source = module_id.source(&module_tree);
let file_items = db._file_items(source.file_id());
let res = match source.resolve(db) {
ModuleSourceNode::SourceFile(it) => {
let items = it.borrowed().items();
InputModuleItems::new(&file_items, items)
}
ModuleSourceNode::Module(it) => {
let items = it
.borrowed()
.item_list()
.into_iter()
.flat_map(|it| it.items());
InputModuleItems::new(&file_items, items)
}
};
Ok(Arc::new(res))
}
pub(crate) fn item_map(
db: &impl DescriptorDatabase,
source_root: SourceRootId,
) -> Cancelable<Arc<ItemMap>> {
let start = Instant::now();
let module_tree = db._module_tree(source_root)?;
let input = module_tree
.modules()
.map(|id| {
let items = db._input_module_items(source_root, id)?;
Ok((id, items))
})
.collect::<Cancelable<FxHashMap<_, _>>>()?;
let mut resolver = Resolver {
db: db,
input: &input,
source_root,
module_tree,
result: ItemMap::default(),
};
resolver.resolve()?;
let res = resolver.result;
let elapsed = start.elapsed();
log::info!("item_map: {:?}", elapsed);
Ok(Arc::new(res))
}
/// Resolution is basically `DefId` atm, but it should account for stuff like /// Resolution is basically `DefId` atm, but it should account for stuff like
/// multiple namespaces, ambiguity and errors. /// multiple namespaces, ambiguity and errors.
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
pub(crate) struct Resolution { pub struct Resolution {
/// None for unresolved /// None for unresolved
pub(crate) def_id: Option<DefId>, pub def_id: Option<DefId>,
/// ident by whitch this is imported into local scope. /// ident by whitch this is imported into local scope.
pub(crate) import: Option<NamedImport>, pub import: Option<NamedImport>,
} }
// #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] // #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
@ -242,8 +135,8 @@ pub(crate) struct Resolution {
// } // }
impl InputModuleItems { impl InputModuleItems {
fn new<'a>( pub(crate) fn new<'a>(
file_items: &FileItems, file_items: &SourceFileItems,
items: impl Iterator<Item = ast::ModuleItem<'a>>, items: impl Iterator<Item = ast::ModuleItem<'a>>,
) -> InputModuleItems { ) -> InputModuleItems {
let mut res = InputModuleItems::default(); let mut res = InputModuleItems::default();
@ -253,7 +146,7 @@ impl InputModuleItems {
res res
} }
fn add_item(&mut self, file_items: &FileItems, item: ast::ModuleItem) -> Option<()> { fn add_item(&mut self, file_items: &SourceFileItems, item: ast::ModuleItem) -> Option<()> {
match item { match item {
ast::ModuleItem::StructDef(it) => self.items.push(ModuleItem::new(file_items, it)?), ast::ModuleItem::StructDef(it) => self.items.push(ModuleItem::new(file_items, it)?),
ast::ModuleItem::EnumDef(it) => self.items.push(ModuleItem::new(file_items, it)?), ast::ModuleItem::EnumDef(it) => self.items.push(ModuleItem::new(file_items, it)?),
@ -274,7 +167,7 @@ impl InputModuleItems {
Some(()) Some(())
} }
fn add_use_item(&mut self, file_items: &FileItems, item: ast::UseItem) { fn add_use_item(&mut self, file_items: &SourceFileItems, item: ast::UseItem) {
let file_item_id = file_items.id_of(item.syntax()); let file_item_id = file_items.id_of(item.syntax());
let start_offset = item.syntax().range().start(); let start_offset = item.syntax().range().start();
Path::expand_use_item(item, |path, range| { Path::expand_use_item(item, |path, range| {
@ -291,7 +184,7 @@ impl InputModuleItems {
} }
impl ModuleItem { impl ModuleItem {
fn new<'a>(file_items: &FileItems, item: impl ast::NameOwner<'a>) -> Option<ModuleItem> { fn new<'a>(file_items: &SourceFileItems, item: impl ast::NameOwner<'a>) -> Option<ModuleItem> {
let name = item.name()?.text(); let name = item.name()?.text();
let kind = item.syntax().kind(); let kind = item.syntax().kind();
let vis = Vis::Other; let vis = Vis::Other;
@ -306,28 +199,28 @@ impl ModuleItem {
} }
} }
struct Resolver<'a, DB> { pub(crate) struct Resolver<'a, DB> {
db: &'a DB, pub db: &'a DB,
input: &'a FxHashMap<ModuleId, Arc<InputModuleItems>>, pub input: &'a FxHashMap<ModuleId, Arc<InputModuleItems>>,
source_root: SourceRootId, pub source_root: SourceRootId,
module_tree: Arc<ModuleTree>, pub module_tree: Arc<ModuleTree>,
result: ItemMap, pub result: ItemMap,
} }
impl<'a, DB> Resolver<'a, DB> impl<'a, DB> Resolver<'a, DB>
where where
DB: DescriptorDatabase, DB: HirDatabase,
{ {
fn resolve(&mut self) -> Cancelable<()> { pub(crate) fn resolve(mut self) -> Cancelable<ItemMap> {
for (&module_id, items) in self.input.iter() { for (&module_id, items) in self.input.iter() {
self.populate_module(module_id, items) self.populate_module(module_id, items)
} }
for &module_id in self.input.keys() { for &module_id in self.input.keys() {
crate::db::check_canceled(self.db)?; self.db.check_canceled()?;
self.resolve_imports(module_id); self.resolve_imports(module_id);
} }
Ok(()) Ok(self.result)
} }
fn populate_module(&mut self, module_id: ModuleId, input: &InputModuleItems) { fn populate_module(&mut self, module_id: ModuleId, input: &InputModuleItems) {
@ -355,10 +248,12 @@ where
continue; continue;
} }
let def_loc = DefLoc::Item { let def_loc = DefLoc::Item {
file_id, source_item_id: SourceItemId {
id: item.id, file_id,
item_id: item.id,
},
}; };
let def_id = self.db.id_maps().def_id(def_loc); let def_id = def_loc.id(self.db);
let resolution = Resolution { let resolution = Resolution {
def_id: Some(def_id), def_id: Some(def_id),
import: None, import: None,
@ -371,7 +266,7 @@ where
id: mod_id, id: mod_id,
source_root: self.source_root, source_root: self.source_root,
}; };
let def_id = self.db.id_maps().def_id(def_loc); let def_id = def_loc.id(self.db);
let resolution = Resolution { let resolution = Resolution {
def_id: Some(def_id), def_id: Some(def_id),
import: None, import: None,
@ -420,7 +315,7 @@ where
}; };
if !is_last { if !is_last {
curr = match self.db.id_maps().def_loc(def_id) { curr = match def_id.loc(self.db) {
DefLoc::Module { id, .. } => id, DefLoc::Module { id, .. } => id,
_ => return, _ => return,
} }
@ -441,109 +336,3 @@ where
f(module_items) f(module_items)
} }
} }
#[cfg(test)]
mod tests {
use crate::{
AnalysisChange,
mock_analysis::{MockAnalysis, analysis_and_position},
descriptors::{DescriptorDatabase, module::ModuleDescriptor},
input::FilesDatabase,
};
use super::*;
fn item_map(fixture: &str) -> (Arc<ItemMap>, ModuleId) {
let (analysis, pos) = analysis_and_position(fixture);
let db = analysis.imp.db;
let source_root = db.file_source_root(pos.file_id);
let descr = ModuleDescriptor::guess_from_position(&*db, pos)
.unwrap()
.unwrap();
let module_id = descr.module_id;
(db._item_map(source_root).unwrap(), module_id)
}
#[test]
fn test_item_map() {
let (item_map, module_id) = item_map(
"
//- /lib.rs
mod foo;
use crate::foo::bar::Baz;
<|>
//- /foo/mod.rs
pub mod bar;
//- /foo/bar.rs
pub struct Baz;
",
);
let name = SmolStr::from("Baz");
let resolution = &item_map.per_module[&module_id].items[&name];
assert!(resolution.def_id.is_some());
}
#[test]
fn typing_inside_a_function_should_not_invalidate_item_map() {
let mock_analysis = MockAnalysis::with_files(
"
//- /lib.rs
mod foo;
use crate::foo::bar::Baz;
fn foo() -> i32 {
1 + 1
}
//- /foo/mod.rs
pub mod bar;
//- /foo/bar.rs
pub struct Baz;
",
);
let file_id = mock_analysis.id_of("/lib.rs");
let mut host = mock_analysis.analysis_host();
let source_root = host.analysis().imp.db.file_source_root(file_id);
{
let db = host.analysis().imp.db;
let events = db.log_executed(|| {
db._item_map(source_root).unwrap();
});
assert!(format!("{:?}", events).contains("_item_map"))
}
let mut change = AnalysisChange::new();
change.change_file(
file_id,
"
mod foo;
use crate::foo::bar::Baz;
fn foo() -> i32 { 92 }
"
.to_string(),
);
host.apply_change(change);
{
let db = host.analysis().imp.db;
let events = db.log_executed(|| {
db._item_map(source_root).unwrap();
});
assert!(
!format!("{:?}", events).contains("_item_map"),
"{:#?}",
events
)
}
}
}

View file

@ -1,13 +1,13 @@
use ra_syntax::{SmolStr, ast, AstNode, TextRange}; use ra_syntax::{SmolStr, ast, AstNode, TextRange};
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
pub(crate) struct Path { pub struct Path {
pub(crate) kind: PathKind, pub kind: PathKind,
pub(crate) segments: Vec<SmolStr>, pub segments: Vec<SmolStr>,
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq)] #[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub(crate) enum PathKind { pub enum PathKind {
Plain, Plain,
Self_, Self_,
Super, Super,
@ -16,14 +16,14 @@ pub(crate) enum PathKind {
impl Path { impl Path {
/// Calls `cb` with all paths, represented by this use item. /// Calls `cb` with all paths, represented by this use item.
pub(crate) fn expand_use_item(item: ast::UseItem, mut cb: impl FnMut(Path, Option<TextRange>)) { pub fn expand_use_item(item: ast::UseItem, mut cb: impl FnMut(Path, Option<TextRange>)) {
if let Some(tree) = item.use_tree() { if let Some(tree) = item.use_tree() {
expand_use_tree(None, tree, &mut cb); expand_use_tree(None, tree, &mut cb);
} }
} }
/// Converts an `ast::Path` to `Path`. Works with use trees. /// Converts an `ast::Path` to `Path`. Works with use trees.
pub(crate) fn from_ast(mut path: ast::Path) -> Option<Path> { pub fn from_ast(mut path: ast::Path) -> Option<Path> {
let mut kind = PathKind::Plain; let mut kind = PathKind::Plain;
let mut segments = Vec::new(); let mut segments = Vec::new();
loop { loop {
@ -64,7 +64,7 @@ impl Path {
} }
/// `true` is this path is a single identifier, like `foo` /// `true` is this path is a single identifier, like `foo`
pub(crate) fn is_ident(&self) -> bool { pub fn is_ident(&self) -> bool {
self.kind == PathKind::Plain && self.segments.len() == 1 self.kind == PathKind::Plain && self.segments.len() == 1
} }
} }

View file

@ -0,0 +1,154 @@
use std::{
sync::Arc,
time::Instant,
};
use rustc_hash::FxHashMap;
use ra_syntax::{
AstNode, SyntaxNode, SmolStr,
ast::{self, FnDef, FnDefNode, NameOwner, ModuleItemOwner}
};
use ra_db::{SourceRootId, FileId, Cancelable,};
use crate::{
FnId,
SourceFileItems, SourceItemId,
db::HirDatabase,
function::FnScopes,
module::{
ModuleSource, ModuleSourceNode, ModuleId,
imp::Submodule,
nameres::{InputModuleItems, ItemMap, Resolver},
},
};
/// Resolve `FnId` to the corresponding `SyntaxNode`
pub(super) fn fn_syntax(db: &impl HirDatabase, fn_id: FnId) -> FnDefNode {
let item_id = fn_id.loc(db);
let syntax = db.file_item(item_id);
FnDef::cast(syntax.borrowed()).unwrap().owned()
}
pub(super) fn fn_scopes(db: &impl HirDatabase, fn_id: FnId) -> Arc<FnScopes> {
let syntax = db.fn_syntax(fn_id);
let res = FnScopes::new(syntax.borrowed());
Arc::new(res)
}
pub(super) fn file_items(db: &impl HirDatabase, file_id: FileId) -> Arc<SourceFileItems> {
let source_file = db.source_file(file_id);
let source_file = source_file.borrowed();
let mut res = SourceFileItems::default();
source_file
.syntax()
.descendants()
.filter_map(ast::ModuleItem::cast)
.map(|it| it.syntax().owned())
.for_each(|it| {
res.alloc(it);
});
Arc::new(res)
}
pub(super) fn file_item(db: &impl HirDatabase, source_item_id: SourceItemId) -> SyntaxNode {
db.file_items(source_item_id.file_id)[source_item_id.item_id].clone()
}
pub(crate) fn submodules(
db: &impl HirDatabase,
source: ModuleSource,
) -> Cancelable<Arc<Vec<Submodule>>> {
db.check_canceled()?;
let file_id = source.file_id();
let submodules = match source.resolve(db) {
ModuleSourceNode::SourceFile(it) => collect_submodules(db, file_id, it.borrowed()),
ModuleSourceNode::Module(it) => it
.borrowed()
.item_list()
.map(|it| collect_submodules(db, file_id, it))
.unwrap_or_else(Vec::new),
};
return Ok(Arc::new(submodules));
fn collect_submodules<'a>(
db: &impl HirDatabase,
file_id: FileId,
root: impl ast::ModuleItemOwner<'a>,
) -> Vec<Submodule> {
modules(root)
.map(|(name, m)| {
if m.has_semi() {
Submodule::Declaration(name)
} else {
let src = ModuleSource::new_inline(db, file_id, m);
Submodule::Definition(name, src)
}
})
.collect()
}
}
pub(crate) fn modules<'a>(
root: impl ast::ModuleItemOwner<'a>,
) -> impl Iterator<Item = (SmolStr, ast::Module<'a>)> {
root.items()
.filter_map(|item| match item {
ast::ModuleItem::Module(m) => Some(m),
_ => None,
})
.filter_map(|module| {
let name = module.name()?.text();
Some((name, module))
})
}
pub(super) fn input_module_items(
db: &impl HirDatabase,
source_root: SourceRootId,
module_id: ModuleId,
) -> Cancelable<Arc<InputModuleItems>> {
let module_tree = db.module_tree(source_root)?;
let source = module_id.source(&module_tree);
let file_items = db.file_items(source.file_id());
let res = match source.resolve(db) {
ModuleSourceNode::SourceFile(it) => {
let items = it.borrowed().items();
InputModuleItems::new(&file_items, items)
}
ModuleSourceNode::Module(it) => {
let items = it
.borrowed()
.item_list()
.into_iter()
.flat_map(|it| it.items());
InputModuleItems::new(&file_items, items)
}
};
Ok(Arc::new(res))
}
pub(super) fn item_map(
db: &impl HirDatabase,
source_root: SourceRootId,
) -> Cancelable<Arc<ItemMap>> {
let start = Instant::now();
let module_tree = db.module_tree(source_root)?;
let input = module_tree
.modules()
.map(|id| {
let items = db.input_module_items(source_root, id)?;
Ok((id, items))
})
.collect::<Cancelable<FxHashMap<_, _>>>()?;
let resolver = Resolver {
db: db,
input: &input,
source_root,
module_tree,
result: ItemMap::default(),
};
let res = resolver.resolve()?;
let elapsed = start.elapsed();
log::info!("item_map: {:?}", elapsed);
Ok(Arc::new(res))
}

View file

@ -140,7 +140,7 @@ impl ServerWorldState {
Ok(file_id) Ok(file_id)
} }
pub fn set_workspaces(&mut self, ws: Vec<CargoWorkspace>) { pub fn set_workspaces(&mut self, ws: Vec<CargoWorkspace>) {
let mut crate_graph = CrateGraph::new(); let mut crate_graph = CrateGraph::default();
ws.iter() ws.iter()
.flat_map(|ws| { .flat_map(|ws| {
ws.packages() ws.packages()