mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-26 21:13:37 +00:00
reuse AnalysisHost in batch analysis
This commit is contained in:
parent
41c56c8a0d
commit
b0be4207d0
6 changed files with 89 additions and 108 deletions
1
Cargo.lock
generated
1
Cargo.lock
generated
|
@ -1023,6 +1023,7 @@ dependencies = [
|
|||
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"ra_db 0.1.0",
|
||||
"ra_hir 0.1.0",
|
||||
"ra_ide_api 0.1.0",
|
||||
"ra_project_model 0.1.0",
|
||||
"ra_syntax 0.1.0",
|
||||
"ra_vfs 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
|
|
|
@ -11,6 +11,7 @@ rustc-hash = "1.0"
|
|||
ra_vfs = "0.2.0"
|
||||
ra_syntax = { path = "../ra_syntax" }
|
||||
ra_db = { path = "../ra_db" }
|
||||
ra_ide_api = { path = "../ra_ide_api" }
|
||||
ra_hir = { path = "../ra_hir" }
|
||||
ra_project_model = { path = "../ra_project_model" }
|
||||
|
||||
|
|
|
@ -1,36 +1,19 @@
|
|||
mod vfs_filter;
|
||||
|
||||
use std::{sync::Arc, path::Path, collections::HashSet, error::Error};
|
||||
use std::{path::Path, collections::HashSet, error::Error};
|
||||
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use ra_db::{
|
||||
CrateGraph, FileId, SourceRoot, SourceRootId, SourceDatabase, salsa::{self, Database},
|
||||
CrateGraph, FileId, SourceRootId,
|
||||
};
|
||||
use ra_hir::db;
|
||||
use ra_ide_api::{AnalysisHost, AnalysisChange};
|
||||
use ra_project_model::ProjectWorkspace;
|
||||
use ra_vfs::{Vfs, VfsChange};
|
||||
use vfs_filter::IncludeRustFiles;
|
||||
|
||||
type Result<T> = std::result::Result<T, Box<dyn Error + Send + Sync>>;
|
||||
|
||||
#[salsa::database(
|
||||
ra_db::SourceDatabaseStorage,
|
||||
db::AstDatabaseStorage,
|
||||
db::DefDatabaseStorage,
|
||||
db::HirDatabaseStorage
|
||||
)]
|
||||
#[derive(Debug)]
|
||||
pub struct BatchDatabase {
|
||||
runtime: salsa::Runtime<BatchDatabase>,
|
||||
}
|
||||
|
||||
impl salsa::Database for BatchDatabase {
|
||||
fn salsa_runtime(&self) -> &salsa::Runtime<BatchDatabase> {
|
||||
&self.runtime
|
||||
}
|
||||
}
|
||||
|
||||
fn vfs_file_to_id(f: ra_vfs::VfsFile) -> FileId {
|
||||
FileId(f.0)
|
||||
}
|
||||
|
@ -38,86 +21,79 @@ fn vfs_root_to_id(r: ra_vfs::VfsRoot) -> SourceRootId {
|
|||
SourceRootId(r.0)
|
||||
}
|
||||
|
||||
impl BatchDatabase {
|
||||
pub fn load(crate_graph: CrateGraph, vfs: &mut Vfs) -> BatchDatabase {
|
||||
let mut db = BatchDatabase { runtime: salsa::Runtime::default() };
|
||||
let lru_cap = std::env::var("RA_LRU_CAP")
|
||||
.ok()
|
||||
.and_then(|it| it.parse::<usize>().ok())
|
||||
.unwrap_or(ra_db::DEFAULT_LRU_CAP);
|
||||
db.query_mut(ra_db::ParseQuery).set_lru_capacity(lru_cap);
|
||||
db.query_mut(ra_hir::db::ParseMacroQuery).set_lru_capacity(lru_cap);
|
||||
db.set_crate_graph(Arc::new(crate_graph));
|
||||
pub fn load_cargo(root: &Path) -> Result<(AnalysisHost, Vec<SourceRootId>)> {
|
||||
let root = std::env::current_dir()?.join(root);
|
||||
let ws = ProjectWorkspace::discover(root.as_ref())?;
|
||||
let mut roots = Vec::new();
|
||||
roots.push(IncludeRustFiles::member(root.clone()));
|
||||
roots.extend(IncludeRustFiles::from_roots(ws.to_roots()));
|
||||
let (mut vfs, roots) = Vfs::new(roots);
|
||||
let crate_graph = ws.to_crate_graph(&mut |path: &Path| {
|
||||
let vfs_file = vfs.load(path);
|
||||
log::debug!("vfs file {:?} -> {:?}", path, vfs_file);
|
||||
vfs_file.map(vfs_file_to_id)
|
||||
});
|
||||
log::debug!("crate graph: {:?}", crate_graph);
|
||||
|
||||
// wait until Vfs has loaded all roots
|
||||
let receiver = vfs.task_receiver().clone();
|
||||
let mut roots_loaded = HashSet::new();
|
||||
for task in receiver {
|
||||
vfs.handle_task(task);
|
||||
let mut done = false;
|
||||
for change in vfs.commit_changes() {
|
||||
match change {
|
||||
VfsChange::AddRoot { root, files } => {
|
||||
let source_root_id = vfs_root_to_id(root);
|
||||
log::debug!(
|
||||
"loaded source root {:?} with path {:?}",
|
||||
source_root_id,
|
||||
vfs.root2path(root)
|
||||
);
|
||||
let mut file_map = FxHashMap::default();
|
||||
for (vfs_file, path, text) in files {
|
||||
let file_id = vfs_file_to_id(vfs_file);
|
||||
db.set_file_text(file_id, text);
|
||||
db.set_file_relative_path(file_id, path.clone());
|
||||
db.set_file_source_root(file_id, source_root_id);
|
||||
file_map.insert(path, file_id);
|
||||
}
|
||||
let source_root = SourceRoot { files: file_map };
|
||||
db.set_source_root(source_root_id, Arc::new(source_root));
|
||||
roots_loaded.insert(source_root_id);
|
||||
if roots_loaded.len() == vfs.n_roots() {
|
||||
done = true;
|
||||
}
|
||||
let local_roots = roots
|
||||
.into_iter()
|
||||
.filter(|r| vfs.root2path(*r).starts_with(&root))
|
||||
.map(vfs_root_to_id)
|
||||
.collect();
|
||||
|
||||
let host = load(root.as_path(), crate_graph, &mut vfs);
|
||||
Ok((host, local_roots))
|
||||
}
|
||||
|
||||
pub fn load(project_root: &Path, crate_graph: CrateGraph, vfs: &mut Vfs) -> AnalysisHost {
|
||||
let lru_cap = std::env::var("RA_LRU_CAP").ok().and_then(|it| it.parse::<usize>().ok());
|
||||
let mut host = AnalysisHost::new(lru_cap);
|
||||
let mut analysis_change = AnalysisChange::new();
|
||||
analysis_change.set_crate_graph(crate_graph);
|
||||
|
||||
// wait until Vfs has loaded all roots
|
||||
let receiver = vfs.task_receiver().clone();
|
||||
let mut roots_loaded = HashSet::new();
|
||||
for task in receiver {
|
||||
vfs.handle_task(task);
|
||||
let mut done = false;
|
||||
for change in vfs.commit_changes() {
|
||||
match change {
|
||||
VfsChange::AddRoot { root, files } => {
|
||||
let is_local = vfs.root2path(root).starts_with(&project_root);
|
||||
let source_root_id = vfs_root_to_id(root);
|
||||
log::debug!(
|
||||
"loaded source root {:?} with path {:?}",
|
||||
source_root_id,
|
||||
vfs.root2path(root)
|
||||
);
|
||||
analysis_change.add_root(source_root_id, is_local);
|
||||
|
||||
let mut file_map = FxHashMap::default();
|
||||
for (vfs_file, path, text) in files {
|
||||
let file_id = vfs_file_to_id(vfs_file);
|
||||
analysis_change.add_file(source_root_id, file_id, path.clone(), text);
|
||||
file_map.insert(path, file_id);
|
||||
}
|
||||
VfsChange::AddFile { .. }
|
||||
| VfsChange::RemoveFile { .. }
|
||||
| VfsChange::ChangeFile { .. } => {
|
||||
// We just need the first scan, so just ignore these
|
||||
roots_loaded.insert(source_root_id);
|
||||
if roots_loaded.len() == vfs.n_roots() {
|
||||
done = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
if done {
|
||||
break;
|
||||
VfsChange::AddFile { .. }
|
||||
| VfsChange::RemoveFile { .. }
|
||||
| VfsChange::ChangeFile { .. } => {
|
||||
// We just need the first scan, so just ignore these
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
db
|
||||
if done {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn load_cargo(root: impl AsRef<Path>) -> Result<(BatchDatabase, Vec<SourceRootId>)> {
|
||||
let root = std::env::current_dir()?.join(root);
|
||||
let ws = ProjectWorkspace::discover(root.as_ref())?;
|
||||
let mut roots = Vec::new();
|
||||
roots.push(IncludeRustFiles::member(root.clone()));
|
||||
roots.extend(IncludeRustFiles::from_roots(ws.to_roots()));
|
||||
let (mut vfs, roots) = Vfs::new(roots);
|
||||
let mut load = |path: &Path| {
|
||||
let vfs_file = vfs.load(path);
|
||||
log::debug!("vfs file {:?} -> {:?}", path, vfs_file);
|
||||
vfs_file.map(vfs_file_to_id)
|
||||
};
|
||||
let crate_graph = ws.to_crate_graph(&mut load);
|
||||
log::debug!("crate graph: {:?}", crate_graph);
|
||||
|
||||
let local_roots = roots
|
||||
.into_iter()
|
||||
.filter(|r| vfs.root2path(*r).starts_with(&root))
|
||||
.map(vfs_root_to_id)
|
||||
.collect();
|
||||
|
||||
let db = BatchDatabase::load(crate_graph, &mut vfs);
|
||||
Ok((db, local_roots))
|
||||
}
|
||||
host.apply_change(analysis_change);
|
||||
host
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
@ -128,10 +104,10 @@ mod tests {
|
|||
#[test]
|
||||
fn test_loading_rust_analyzer() {
|
||||
let path = Path::new(env!("CARGO_MANIFEST_DIR")).parent().unwrap().parent().unwrap();
|
||||
let (db, roots) = BatchDatabase::load_cargo(path).unwrap();
|
||||
let (host, roots) = load_cargo(path).unwrap();
|
||||
let mut n_crates = 0;
|
||||
for root in roots {
|
||||
for _krate in Crate::source_root_crates(&db, root) {
|
||||
for _krate in Crate::source_root_crates(host.raw_database(), root) {
|
||||
n_crates += 1;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
use std::{collections::HashSet, time::Instant, fmt::Write};
|
||||
|
||||
use ra_db::SourceDatabase;
|
||||
use ra_batch::BatchDatabase;
|
||||
use ra_hir::{Crate, ModuleDef, Ty, ImplItem, HasSource};
|
||||
use ra_syntax::AstNode;
|
||||
|
||||
|
@ -9,16 +8,17 @@ use crate::Result;
|
|||
|
||||
pub fn run(verbose: bool, path: &str, only: Option<&str>) -> Result<()> {
|
||||
let db_load_time = Instant::now();
|
||||
let (db, roots) = BatchDatabase::load_cargo(path)?;
|
||||
let (host, roots) = ra_batch::load_cargo(path.as_ref())?;
|
||||
let db = host.raw_database();
|
||||
println!("Database loaded, {} roots, {:?}", roots.len(), db_load_time.elapsed());
|
||||
let analysis_time = Instant::now();
|
||||
let mut num_crates = 0;
|
||||
let mut visited_modules = HashSet::new();
|
||||
let mut visit_queue = Vec::new();
|
||||
for root in roots {
|
||||
for krate in Crate::source_root_crates(&db, root) {
|
||||
for krate in Crate::source_root_crates(db, root) {
|
||||
num_crates += 1;
|
||||
let module = krate.root_module(&db).expect("crate in source root without root module");
|
||||
let module = krate.root_module(db).expect("crate in source root without root module");
|
||||
visit_queue.push(module);
|
||||
}
|
||||
}
|
||||
|
@ -27,17 +27,17 @@ pub fn run(verbose: bool, path: &str, only: Option<&str>) -> Result<()> {
|
|||
let mut funcs = Vec::new();
|
||||
while let Some(module) = visit_queue.pop() {
|
||||
if visited_modules.insert(module) {
|
||||
visit_queue.extend(module.children(&db));
|
||||
visit_queue.extend(module.children(db));
|
||||
|
||||
for decl in module.declarations(&db) {
|
||||
for decl in module.declarations(db) {
|
||||
num_decls += 1;
|
||||
if let ModuleDef::Function(f) = decl {
|
||||
funcs.push(f);
|
||||
}
|
||||
}
|
||||
|
||||
for impl_block in module.impl_blocks(&db) {
|
||||
for item in impl_block.items(&db) {
|
||||
for impl_block in module.impl_blocks(db) {
|
||||
for item in impl_block.items(db) {
|
||||
num_decls += 1;
|
||||
if let ImplItem::Method(f) = item {
|
||||
funcs.push(f);
|
||||
|
@ -61,11 +61,11 @@ pub fn run(verbose: bool, path: &str, only: Option<&str>) -> Result<()> {
|
|||
let mut num_exprs_unknown = 0;
|
||||
let mut num_exprs_partially_unknown = 0;
|
||||
for f in funcs {
|
||||
let name = f.name(&db);
|
||||
let name = f.name(db);
|
||||
let mut msg = format!("processing: {}", name);
|
||||
if verbose {
|
||||
let src = f.source(&db);
|
||||
let original_file = src.file_id.original_file(&db);
|
||||
let src = f.source(db);
|
||||
let original_file = src.file_id.original_file(db);
|
||||
let path = db.file_relative_path(original_file);
|
||||
let syntax_range = src.ast.syntax().range();
|
||||
write!(msg, " ({:?} {})", path, syntax_range).unwrap();
|
||||
|
@ -76,8 +76,8 @@ pub fn run(verbose: bool, path: &str, only: Option<&str>) -> Result<()> {
|
|||
continue;
|
||||
}
|
||||
}
|
||||
let body = f.body(&db);
|
||||
let inference_result = f.infer(&db);
|
||||
let body = f.body(db);
|
||||
let inference_result = f.infer(db);
|
||||
for (expr_id, _) in body.exprs() {
|
||||
let ty = &inference_result[expr_id];
|
||||
num_exprs += 1;
|
||||
|
|
|
@ -276,6 +276,9 @@ impl AnalysisHost {
|
|||
pub fn collect_garbage(&mut self) {
|
||||
self.db.collect_garbage();
|
||||
}
|
||||
pub fn raw_database(&self) -> &impl hir::db::HirDatabase {
|
||||
&self.db
|
||||
}
|
||||
}
|
||||
|
||||
/// Analysis is a snapshot of a world state at a moment in time. It is the main
|
||||
|
|
|
@ -17,7 +17,7 @@ fn main() -> Result<()> {
|
|||
Err(_) => ra_prof::Filter::disabled(),
|
||||
});
|
||||
log::info!("lifecycle: server started");
|
||||
match ::std::panic::catch_unwind(main_inner) {
|
||||
match std::panic::catch_unwind(main_inner) {
|
||||
Ok(res) => {
|
||||
log::info!("lifecycle: terminating process with {:?}", res);
|
||||
res
|
||||
|
|
Loading…
Reference in a new issue