reuse AnalysisHost in batch analysis

This commit is contained in:
Aleksey Kladov 2019-06-15 16:29:23 +03:00
parent 41c56c8a0d
commit b0be4207d0
6 changed files with 89 additions and 108 deletions

1
Cargo.lock generated
View file

@ -1023,6 +1023,7 @@ dependencies = [
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"ra_db 0.1.0", "ra_db 0.1.0",
"ra_hir 0.1.0", "ra_hir 0.1.0",
"ra_ide_api 0.1.0",
"ra_project_model 0.1.0", "ra_project_model 0.1.0",
"ra_syntax 0.1.0", "ra_syntax 0.1.0",
"ra_vfs 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)", "ra_vfs 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)",

View file

@ -11,6 +11,7 @@ rustc-hash = "1.0"
ra_vfs = "0.2.0" ra_vfs = "0.2.0"
ra_syntax = { path = "../ra_syntax" } ra_syntax = { path = "../ra_syntax" }
ra_db = { path = "../ra_db" } ra_db = { path = "../ra_db" }
ra_ide_api = { path = "../ra_ide_api" }
ra_hir = { path = "../ra_hir" } ra_hir = { path = "../ra_hir" }
ra_project_model = { path = "../ra_project_model" } ra_project_model = { path = "../ra_project_model" }

View file

@ -1,36 +1,19 @@
mod vfs_filter; mod vfs_filter;
use std::{sync::Arc, path::Path, collections::HashSet, error::Error}; use std::{path::Path, collections::HashSet, error::Error};
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use ra_db::{ use ra_db::{
CrateGraph, FileId, SourceRoot, SourceRootId, SourceDatabase, salsa::{self, Database}, CrateGraph, FileId, SourceRootId,
}; };
use ra_hir::db; use ra_ide_api::{AnalysisHost, AnalysisChange};
use ra_project_model::ProjectWorkspace; use ra_project_model::ProjectWorkspace;
use ra_vfs::{Vfs, VfsChange}; use ra_vfs::{Vfs, VfsChange};
use vfs_filter::IncludeRustFiles; use vfs_filter::IncludeRustFiles;
type Result<T> = std::result::Result<T, Box<dyn Error + Send + Sync>>; type Result<T> = std::result::Result<T, Box<dyn Error + Send + Sync>>;
#[salsa::database(
ra_db::SourceDatabaseStorage,
db::AstDatabaseStorage,
db::DefDatabaseStorage,
db::HirDatabaseStorage
)]
#[derive(Debug)]
pub struct BatchDatabase {
runtime: salsa::Runtime<BatchDatabase>,
}
impl salsa::Database for BatchDatabase {
fn salsa_runtime(&self) -> &salsa::Runtime<BatchDatabase> {
&self.runtime
}
}
fn vfs_file_to_id(f: ra_vfs::VfsFile) -> FileId { fn vfs_file_to_id(f: ra_vfs::VfsFile) -> FileId {
FileId(f.0) FileId(f.0)
} }
@ -38,16 +21,35 @@ fn vfs_root_to_id(r: ra_vfs::VfsRoot) -> SourceRootId {
SourceRootId(r.0) SourceRootId(r.0)
} }
impl BatchDatabase { pub fn load_cargo(root: &Path) -> Result<(AnalysisHost, Vec<SourceRootId>)> {
pub fn load(crate_graph: CrateGraph, vfs: &mut Vfs) -> BatchDatabase { let root = std::env::current_dir()?.join(root);
let mut db = BatchDatabase { runtime: salsa::Runtime::default() }; let ws = ProjectWorkspace::discover(root.as_ref())?;
let lru_cap = std::env::var("RA_LRU_CAP") let mut roots = Vec::new();
.ok() roots.push(IncludeRustFiles::member(root.clone()));
.and_then(|it| it.parse::<usize>().ok()) roots.extend(IncludeRustFiles::from_roots(ws.to_roots()));
.unwrap_or(ra_db::DEFAULT_LRU_CAP); let (mut vfs, roots) = Vfs::new(roots);
db.query_mut(ra_db::ParseQuery).set_lru_capacity(lru_cap); let crate_graph = ws.to_crate_graph(&mut |path: &Path| {
db.query_mut(ra_hir::db::ParseMacroQuery).set_lru_capacity(lru_cap); let vfs_file = vfs.load(path);
db.set_crate_graph(Arc::new(crate_graph)); log::debug!("vfs file {:?} -> {:?}", path, vfs_file);
vfs_file.map(vfs_file_to_id)
});
log::debug!("crate graph: {:?}", crate_graph);
let local_roots = roots
.into_iter()
.filter(|r| vfs.root2path(*r).starts_with(&root))
.map(vfs_root_to_id)
.collect();
let host = load(root.as_path(), crate_graph, &mut vfs);
Ok((host, local_roots))
}
pub fn load(project_root: &Path, crate_graph: CrateGraph, vfs: &mut Vfs) -> AnalysisHost {
let lru_cap = std::env::var("RA_LRU_CAP").ok().and_then(|it| it.parse::<usize>().ok());
let mut host = AnalysisHost::new(lru_cap);
let mut analysis_change = AnalysisChange::new();
analysis_change.set_crate_graph(crate_graph);
// wait until Vfs has loaded all roots // wait until Vfs has loaded all roots
let receiver = vfs.task_receiver().clone(); let receiver = vfs.task_receiver().clone();
@ -58,22 +60,21 @@ impl BatchDatabase {
for change in vfs.commit_changes() { for change in vfs.commit_changes() {
match change { match change {
VfsChange::AddRoot { root, files } => { VfsChange::AddRoot { root, files } => {
let is_local = vfs.root2path(root).starts_with(&project_root);
let source_root_id = vfs_root_to_id(root); let source_root_id = vfs_root_to_id(root);
log::debug!( log::debug!(
"loaded source root {:?} with path {:?}", "loaded source root {:?} with path {:?}",
source_root_id, source_root_id,
vfs.root2path(root) vfs.root2path(root)
); );
analysis_change.add_root(source_root_id, is_local);
let mut file_map = FxHashMap::default(); let mut file_map = FxHashMap::default();
for (vfs_file, path, text) in files { for (vfs_file, path, text) in files {
let file_id = vfs_file_to_id(vfs_file); let file_id = vfs_file_to_id(vfs_file);
db.set_file_text(file_id, text); analysis_change.add_file(source_root_id, file_id, path.clone(), text);
db.set_file_relative_path(file_id, path.clone());
db.set_file_source_root(file_id, source_root_id);
file_map.insert(path, file_id); file_map.insert(path, file_id);
} }
let source_root = SourceRoot { files: file_map };
db.set_source_root(source_root_id, Arc::new(source_root));
roots_loaded.insert(source_root_id); roots_loaded.insert(source_root_id);
if roots_loaded.len() == vfs.n_roots() { if roots_loaded.len() == vfs.n_roots() {
done = true; done = true;
@ -91,33 +92,8 @@ impl BatchDatabase {
} }
} }
db host.apply_change(analysis_change);
} host
pub fn load_cargo(root: impl AsRef<Path>) -> Result<(BatchDatabase, Vec<SourceRootId>)> {
let root = std::env::current_dir()?.join(root);
let ws = ProjectWorkspace::discover(root.as_ref())?;
let mut roots = Vec::new();
roots.push(IncludeRustFiles::member(root.clone()));
roots.extend(IncludeRustFiles::from_roots(ws.to_roots()));
let (mut vfs, roots) = Vfs::new(roots);
let mut load = |path: &Path| {
let vfs_file = vfs.load(path);
log::debug!("vfs file {:?} -> {:?}", path, vfs_file);
vfs_file.map(vfs_file_to_id)
};
let crate_graph = ws.to_crate_graph(&mut load);
log::debug!("crate graph: {:?}", crate_graph);
let local_roots = roots
.into_iter()
.filter(|r| vfs.root2path(*r).starts_with(&root))
.map(vfs_root_to_id)
.collect();
let db = BatchDatabase::load(crate_graph, &mut vfs);
Ok((db, local_roots))
}
} }
#[cfg(test)] #[cfg(test)]
@ -128,10 +104,10 @@ mod tests {
#[test] #[test]
fn test_loading_rust_analyzer() { fn test_loading_rust_analyzer() {
let path = Path::new(env!("CARGO_MANIFEST_DIR")).parent().unwrap().parent().unwrap(); let path = Path::new(env!("CARGO_MANIFEST_DIR")).parent().unwrap().parent().unwrap();
let (db, roots) = BatchDatabase::load_cargo(path).unwrap(); let (host, roots) = load_cargo(path).unwrap();
let mut n_crates = 0; let mut n_crates = 0;
for root in roots { for root in roots {
for _krate in Crate::source_root_crates(&db, root) { for _krate in Crate::source_root_crates(host.raw_database(), root) {
n_crates += 1; n_crates += 1;
} }
} }

View file

@ -1,7 +1,6 @@
use std::{collections::HashSet, time::Instant, fmt::Write}; use std::{collections::HashSet, time::Instant, fmt::Write};
use ra_db::SourceDatabase; use ra_db::SourceDatabase;
use ra_batch::BatchDatabase;
use ra_hir::{Crate, ModuleDef, Ty, ImplItem, HasSource}; use ra_hir::{Crate, ModuleDef, Ty, ImplItem, HasSource};
use ra_syntax::AstNode; use ra_syntax::AstNode;
@ -9,16 +8,17 @@ use crate::Result;
pub fn run(verbose: bool, path: &str, only: Option<&str>) -> Result<()> { pub fn run(verbose: bool, path: &str, only: Option<&str>) -> Result<()> {
let db_load_time = Instant::now(); let db_load_time = Instant::now();
let (db, roots) = BatchDatabase::load_cargo(path)?; let (host, roots) = ra_batch::load_cargo(path.as_ref())?;
let db = host.raw_database();
println!("Database loaded, {} roots, {:?}", roots.len(), db_load_time.elapsed()); println!("Database loaded, {} roots, {:?}", roots.len(), db_load_time.elapsed());
let analysis_time = Instant::now(); let analysis_time = Instant::now();
let mut num_crates = 0; let mut num_crates = 0;
let mut visited_modules = HashSet::new(); let mut visited_modules = HashSet::new();
let mut visit_queue = Vec::new(); let mut visit_queue = Vec::new();
for root in roots { for root in roots {
for krate in Crate::source_root_crates(&db, root) { for krate in Crate::source_root_crates(db, root) {
num_crates += 1; num_crates += 1;
let module = krate.root_module(&db).expect("crate in source root without root module"); let module = krate.root_module(db).expect("crate in source root without root module");
visit_queue.push(module); visit_queue.push(module);
} }
} }
@ -27,17 +27,17 @@ pub fn run(verbose: bool, path: &str, only: Option<&str>) -> Result<()> {
let mut funcs = Vec::new(); let mut funcs = Vec::new();
while let Some(module) = visit_queue.pop() { while let Some(module) = visit_queue.pop() {
if visited_modules.insert(module) { if visited_modules.insert(module) {
visit_queue.extend(module.children(&db)); visit_queue.extend(module.children(db));
for decl in module.declarations(&db) { for decl in module.declarations(db) {
num_decls += 1; num_decls += 1;
if let ModuleDef::Function(f) = decl { if let ModuleDef::Function(f) = decl {
funcs.push(f); funcs.push(f);
} }
} }
for impl_block in module.impl_blocks(&db) { for impl_block in module.impl_blocks(db) {
for item in impl_block.items(&db) { for item in impl_block.items(db) {
num_decls += 1; num_decls += 1;
if let ImplItem::Method(f) = item { if let ImplItem::Method(f) = item {
funcs.push(f); funcs.push(f);
@ -61,11 +61,11 @@ pub fn run(verbose: bool, path: &str, only: Option<&str>) -> Result<()> {
let mut num_exprs_unknown = 0; let mut num_exprs_unknown = 0;
let mut num_exprs_partially_unknown = 0; let mut num_exprs_partially_unknown = 0;
for f in funcs { for f in funcs {
let name = f.name(&db); let name = f.name(db);
let mut msg = format!("processing: {}", name); let mut msg = format!("processing: {}", name);
if verbose { if verbose {
let src = f.source(&db); let src = f.source(db);
let original_file = src.file_id.original_file(&db); let original_file = src.file_id.original_file(db);
let path = db.file_relative_path(original_file); let path = db.file_relative_path(original_file);
let syntax_range = src.ast.syntax().range(); let syntax_range = src.ast.syntax().range();
write!(msg, " ({:?} {})", path, syntax_range).unwrap(); write!(msg, " ({:?} {})", path, syntax_range).unwrap();
@ -76,8 +76,8 @@ pub fn run(verbose: bool, path: &str, only: Option<&str>) -> Result<()> {
continue; continue;
} }
} }
let body = f.body(&db); let body = f.body(db);
let inference_result = f.infer(&db); let inference_result = f.infer(db);
for (expr_id, _) in body.exprs() { for (expr_id, _) in body.exprs() {
let ty = &inference_result[expr_id]; let ty = &inference_result[expr_id];
num_exprs += 1; num_exprs += 1;

View file

@ -276,6 +276,9 @@ impl AnalysisHost {
pub fn collect_garbage(&mut self) { pub fn collect_garbage(&mut self) {
self.db.collect_garbage(); self.db.collect_garbage();
} }
pub fn raw_database(&self) -> &impl hir::db::HirDatabase {
&self.db
}
} }
/// Analysis is a snapshot of a world state at a moment in time. It is the main /// Analysis is a snapshot of a world state at a moment in time. It is the main

View file

@ -17,7 +17,7 @@ fn main() -> Result<()> {
Err(_) => ra_prof::Filter::disabled(), Err(_) => ra_prof::Filter::disabled(),
}); });
log::info!("lifecycle: server started"); log::info!("lifecycle: server started");
match ::std::panic::catch_unwind(main_inner) { match std::panic::catch_unwind(main_inner) {
Ok(res) => { Ok(res) => {
log::info!("lifecycle: terminating process with {:?}", res); log::info!("lifecycle: terminating process with {:?}", res);
res res