mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-27 05:23:24 +00:00
Auto merge of #16706 - Veykril:load-cargo-ide-db, r=Veykril
internal: Remove load-cargo dependency on ide This lightens up the dep tree for projects using r-a as a library that do not need the ide crate itself.
This commit is contained in:
commit
b3b9b53df8
18 changed files with 70 additions and 61 deletions
3
Cargo.lock
generated
3
Cargo.lock
generated
|
@ -636,7 +636,6 @@ dependencies = [
|
|||
"arrayvec",
|
||||
"cfg",
|
||||
"cov-mark",
|
||||
"crossbeam-channel",
|
||||
"dot",
|
||||
"either",
|
||||
"expect-test",
|
||||
|
@ -713,6 +712,7 @@ dependencies = [
|
|||
"arrayvec",
|
||||
"base-db",
|
||||
"cov-mark",
|
||||
"crossbeam-channel",
|
||||
"either",
|
||||
"expect-test",
|
||||
"fst",
|
||||
|
@ -951,7 +951,6 @@ dependencies = [
|
|||
"anyhow",
|
||||
"crossbeam-channel",
|
||||
"hir-expand",
|
||||
"ide",
|
||||
"ide-db",
|
||||
"itertools",
|
||||
"proc-macro-api",
|
||||
|
|
|
@ -13,6 +13,7 @@ doctest = false
|
|||
|
||||
[dependencies]
|
||||
cov-mark = "2.0.0-pre.1"
|
||||
crossbeam-channel = "0.5.5"
|
||||
tracing.workspace = true
|
||||
rayon.workspace = true
|
||||
fst = { version = "0.4.7", default-features = false }
|
||||
|
@ -52,4 +53,4 @@ test-fixture.workspace = true
|
|||
sourcegen.workspace = true
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
workspace = true
|
||||
|
|
|
@ -15,6 +15,7 @@ pub mod helpers;
|
|||
pub mod items_locator;
|
||||
pub mod label;
|
||||
pub mod path_transform;
|
||||
pub mod prime_caches;
|
||||
pub mod rename;
|
||||
pub mod rust_doc;
|
||||
pub mod search;
|
||||
|
|
|
@ -7,16 +7,15 @@ mod topologic_sort;
|
|||
use std::time::Duration;
|
||||
|
||||
use hir::db::DefDatabase;
|
||||
use ide_db::{
|
||||
|
||||
use crate::{
|
||||
base_db::{
|
||||
salsa::{Database, ParallelDatabase, Snapshot},
|
||||
Cancelled, CrateGraph, CrateId, SourceDatabase, SourceDatabaseExt,
|
||||
},
|
||||
FxHashSet, FxIndexMap,
|
||||
FxHashSet, FxIndexMap, RootDatabase,
|
||||
};
|
||||
|
||||
use crate::RootDatabase;
|
||||
|
||||
/// We're indexing many crates.
|
||||
#[derive(Debug)]
|
||||
pub struct ParallelPrimeCachesProgress {
|
||||
|
@ -28,7 +27,7 @@ pub struct ParallelPrimeCachesProgress {
|
|||
pub crates_done: usize,
|
||||
}
|
||||
|
||||
pub(crate) fn parallel_prime_caches(
|
||||
pub fn parallel_prime_caches(
|
||||
db: &RootDatabase,
|
||||
num_worker_threads: u8,
|
||||
cb: &(dyn Fn(ParallelPrimeCachesProgress) + Sync),
|
|
@ -1,7 +1,7 @@
|
|||
//! helper data structure to schedule work for parallel prime caches.
|
||||
use std::{collections::VecDeque, hash::Hash};
|
||||
|
||||
use ide_db::FxHashMap;
|
||||
use crate::FxHashMap;
|
||||
|
||||
pub(crate) struct TopologicSortIterBuilder<T> {
|
||||
nodes: FxHashMap<T, Entry<T>>,
|
|
@ -13,7 +13,6 @@ doctest = false
|
|||
|
||||
[dependencies]
|
||||
cov-mark = "2.0.0-pre.1"
|
||||
crossbeam-channel = "0.5.5"
|
||||
arrayvec.workspace = true
|
||||
either.workspace = true
|
||||
itertools.workspace = true
|
||||
|
@ -56,4 +55,4 @@ test-fixture.workspace = true
|
|||
in-rust-tree = ["ide-assists/in-rust-tree", "ide-diagnostics/in-rust-tree"]
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
workspace = true
|
||||
|
|
|
@ -17,7 +17,6 @@ mod fixture;
|
|||
|
||||
mod markup;
|
||||
mod navigation_target;
|
||||
mod prime_caches;
|
||||
|
||||
mod annotations;
|
||||
mod call_hierarchy;
|
||||
|
@ -68,7 +67,7 @@ use ide_db::{
|
|||
salsa::{self, ParallelDatabase},
|
||||
CrateOrigin, Env, FileLoader, FileSet, SourceDatabase, VfsPath,
|
||||
},
|
||||
symbol_index, FxHashMap, FxIndexSet, LineIndexDatabase,
|
||||
prime_caches, symbol_index, FxHashMap, FxIndexSet, LineIndexDatabase,
|
||||
};
|
||||
use syntax::SourceFile;
|
||||
use triomphe::Arc;
|
||||
|
@ -100,7 +99,6 @@ pub use crate::{
|
|||
},
|
||||
move_item::Direction,
|
||||
navigation_target::{NavigationTarget, TryToNav, UpmappingResult},
|
||||
prime_caches::ParallelPrimeCachesProgress,
|
||||
references::ReferenceSearchResult,
|
||||
rename::RenameError,
|
||||
runnables::{Runnable, RunnableKind, TestId},
|
||||
|
@ -127,6 +125,7 @@ pub use ide_db::{
|
|||
documentation::Documentation,
|
||||
label::Label,
|
||||
line_index::{LineCol, LineIndex},
|
||||
prime_caches::ParallelPrimeCachesProgress,
|
||||
search::{ReferenceCategory, SearchScope},
|
||||
source_change::{FileSystemEdit, SnippetEdit, SourceChange},
|
||||
symbol_index::Query,
|
||||
|
@ -165,6 +164,10 @@ impl AnalysisHost {
|
|||
AnalysisHost { db: RootDatabase::new(lru_capacity) }
|
||||
}
|
||||
|
||||
pub fn with_database(db: RootDatabase) -> AnalysisHost {
|
||||
AnalysisHost { db }
|
||||
}
|
||||
|
||||
pub fn update_lru_capacity(&mut self, lru_capacity: Option<usize>) {
|
||||
self.db.update_base_query_lru_capacities(lru_capacity);
|
||||
}
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
//! This module generates [moniker](https://microsoft.github.io/language-server-protocol/specifications/lsif/0.6.0/specification/#exportsImports)
|
||||
//! for LSIF and LSP.
|
||||
|
||||
use core::fmt;
|
||||
|
||||
use hir::{Adt, AsAssocItem, AssocItemContainer, Crate, DescendPreference, MacroKind, Semantics};
|
||||
use ide_db::{
|
||||
base_db::{CrateOrigin, FilePosition, LangCrateOrigin},
|
||||
|
@ -93,9 +95,10 @@ pub struct MonikerIdentifier {
|
|||
pub description: Vec<MonikerDescriptor>,
|
||||
}
|
||||
|
||||
impl ToString for MonikerIdentifier {
|
||||
fn to_string(&self) -> String {
|
||||
format!("{}::{}", self.crate_name, self.description.iter().map(|x| &x.name).join("::"))
|
||||
impl fmt::Display for MonikerIdentifier {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str(&self.crate_name)?;
|
||||
f.write_fmt(format_args!("::{}", self.description.iter().map(|x| &x.name).join("::")))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -16,16 +16,16 @@ crossbeam-channel.workspace = true
|
|||
itertools.workspace = true
|
||||
tracing.workspace = true
|
||||
|
||||
ide.workspace = true
|
||||
# workspace deps
|
||||
|
||||
hir-expand.workspace = true
|
||||
ide-db.workspace = true
|
||||
proc-macro-api.workspace = true
|
||||
project-model.workspace = true
|
||||
tt.workspace = true
|
||||
vfs.workspace = true
|
||||
vfs-notify.workspace = true
|
||||
span.workspace = true
|
||||
|
||||
hir-expand.workspace = true
|
||||
tt.workspace = true
|
||||
vfs-notify.workspace = true
|
||||
vfs.workspace = true
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
workspace = true
|
||||
|
|
|
@ -9,10 +9,9 @@ use hir_expand::proc_macro::{
|
|||
ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind, ProcMacroLoadResult,
|
||||
ProcMacros,
|
||||
};
|
||||
use ide::{AnalysisHost, SourceRoot};
|
||||
use ide_db::{
|
||||
base_db::{CrateGraph, Env},
|
||||
Change, FxHashMap,
|
||||
base_db::{CrateGraph, Env, SourceRoot},
|
||||
prime_caches, Change, FxHashMap, RootDatabase,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
use proc_macro_api::{MacroDylib, ProcMacroServer};
|
||||
|
@ -38,7 +37,7 @@ pub fn load_workspace_at(
|
|||
cargo_config: &CargoConfig,
|
||||
load_config: &LoadCargoConfig,
|
||||
progress: &dyn Fn(String),
|
||||
) -> anyhow::Result<(AnalysisHost, vfs::Vfs, Option<ProcMacroServer>)> {
|
||||
) -> anyhow::Result<(RootDatabase, vfs::Vfs, Option<ProcMacroServer>)> {
|
||||
let root = AbsPathBuf::assert(std::env::current_dir()?.join(root));
|
||||
let root = ProjectManifest::discover_single(&root)?;
|
||||
let mut workspace = ProjectWorkspace::load(root, cargo_config, progress)?;
|
||||
|
@ -55,7 +54,7 @@ pub fn load_workspace(
|
|||
ws: ProjectWorkspace,
|
||||
extra_env: &FxHashMap<String, String>,
|
||||
load_config: &LoadCargoConfig,
|
||||
) -> anyhow::Result<(AnalysisHost, vfs::Vfs, Option<ProcMacroServer>)> {
|
||||
) -> anyhow::Result<(RootDatabase, vfs::Vfs, Option<ProcMacroServer>)> {
|
||||
let (sender, receiver) = unbounded();
|
||||
let mut vfs = vfs::Vfs::default();
|
||||
let mut loader = {
|
||||
|
@ -113,7 +112,7 @@ pub fn load_workspace(
|
|||
version: 0,
|
||||
});
|
||||
|
||||
let host = load_crate_graph(
|
||||
let db = load_crate_graph(
|
||||
&ws,
|
||||
crate_graph,
|
||||
proc_macros,
|
||||
|
@ -123,9 +122,9 @@ pub fn load_workspace(
|
|||
);
|
||||
|
||||
if load_config.prefill_caches {
|
||||
host.analysis().parallel_prime_caches(1, |_| {})?;
|
||||
prime_caches::parallel_prime_caches(&db, 1, &|_| ());
|
||||
}
|
||||
Ok((host, vfs, proc_macro_server.ok()))
|
||||
Ok((db, vfs, proc_macro_server.ok()))
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
|
@ -308,16 +307,16 @@ fn load_crate_graph(
|
|||
source_root_config: SourceRootConfig,
|
||||
vfs: &mut vfs::Vfs,
|
||||
receiver: &Receiver<vfs::loader::Message>,
|
||||
) -> AnalysisHost {
|
||||
) -> RootDatabase {
|
||||
let (ProjectWorkspace::Cargo { toolchain, target_layout, .. }
|
||||
| ProjectWorkspace::Json { toolchain, target_layout, .. }
|
||||
| ProjectWorkspace::DetachedFiles { toolchain, target_layout, .. }) = ws;
|
||||
|
||||
let lru_cap = std::env::var("RA_LRU_CAP").ok().and_then(|it| it.parse::<usize>().ok());
|
||||
let mut host = AnalysisHost::new(lru_cap);
|
||||
let mut db = RootDatabase::new(lru_cap);
|
||||
let mut analysis_change = Change::new();
|
||||
|
||||
host.raw_database_mut().enable_proc_attr_macros();
|
||||
db.enable_proc_attr_macros();
|
||||
|
||||
// wait until Vfs has loaded all roots
|
||||
for task in receiver {
|
||||
|
@ -352,8 +351,8 @@ fn load_crate_graph(
|
|||
.set_target_data_layouts(iter::repeat(target_layout.clone()).take(num_crates).collect());
|
||||
analysis_change.set_toolchains(iter::repeat(toolchain.clone()).take(num_crates).collect());
|
||||
|
||||
host.apply_change(analysis_change);
|
||||
host
|
||||
db.apply_change(analysis_change);
|
||||
db
|
||||
}
|
||||
|
||||
fn expander_to_proc_macro(
|
||||
|
@ -407,10 +406,10 @@ mod tests {
|
|||
with_proc_macro_server: ProcMacroServerChoice::None,
|
||||
prefill_caches: false,
|
||||
};
|
||||
let (host, _vfs, _proc_macro) =
|
||||
let (db, _vfs, _proc_macro) =
|
||||
load_workspace_at(path, &cargo_config, &load_cargo_config, &|_| {}).unwrap();
|
||||
|
||||
let n_crates = host.raw_database().crate_graph().iter().count();
|
||||
let n_crates = db.crate_graph().iter().count();
|
||||
// RA has quite a few crates, but the exact count doesn't matter
|
||||
assert!(n_crates > 20);
|
||||
}
|
||||
|
|
|
@ -16,8 +16,8 @@ use hir_def::{
|
|||
};
|
||||
use hir_ty::{Interner, Substitution, TyExt, TypeFlags};
|
||||
use ide::{
|
||||
Analysis, AnnotationConfig, DiagnosticsConfig, InlayFieldsToResolve, InlayHintsConfig, LineCol,
|
||||
RootDatabase,
|
||||
Analysis, AnalysisHost, AnnotationConfig, DiagnosticsConfig, InlayFieldsToResolve,
|
||||
InlayHintsConfig, LineCol, RootDatabase,
|
||||
};
|
||||
use ide_db::{
|
||||
base_db::{
|
||||
|
@ -90,9 +90,8 @@ impl flags::AnalysisStats {
|
|||
Some(build_scripts_sw.elapsed())
|
||||
};
|
||||
|
||||
let (host, vfs, _proc_macro) =
|
||||
let (db, vfs, _proc_macro) =
|
||||
load_workspace(workspace.clone(), &cargo_config.extra_env, &load_cargo_config)?;
|
||||
let db = host.raw_database();
|
||||
eprint!("{:<20} {}", "Database loaded:", db_load_sw.elapsed());
|
||||
eprint!(" (metadata {metadata_time}");
|
||||
if let Some(build_scripts_time) = build_scripts_time {
|
||||
|
@ -100,6 +99,9 @@ impl flags::AnalysisStats {
|
|||
}
|
||||
eprintln!(")");
|
||||
|
||||
let host = AnalysisHost::with_database(db);
|
||||
let db = host.raw_database();
|
||||
|
||||
let mut analysis_sw = self.stop_watch();
|
||||
|
||||
let mut krates = Crate::all(db);
|
||||
|
|
|
@ -5,7 +5,7 @@ use project_model::{CargoConfig, RustLibSource};
|
|||
use rustc_hash::FxHashSet;
|
||||
|
||||
use hir::{db::HirDatabase, Crate, HirFileIdExt, Module};
|
||||
use ide::{AssistResolveStrategy, DiagnosticsConfig, Severity};
|
||||
use ide::{AnalysisHost, AssistResolveStrategy, DiagnosticsConfig, Severity};
|
||||
use ide_db::base_db::SourceDatabaseExt;
|
||||
use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice};
|
||||
|
||||
|
@ -26,8 +26,9 @@ impl flags::Diagnostics {
|
|||
with_proc_macro_server,
|
||||
prefill_caches: false,
|
||||
};
|
||||
let (host, _vfs, _proc_macro) =
|
||||
let (db, _vfs, _proc_macro) =
|
||||
load_workspace_at(&self.path, &cargo_config, &load_cargo_config, &|_| {})?;
|
||||
let host = AnalysisHost::with_database(db);
|
||||
let db = host.raw_database();
|
||||
let analysis = host.analysis();
|
||||
|
||||
|
|
|
@ -4,8 +4,8 @@ use std::env;
|
|||
use std::time::Instant;
|
||||
|
||||
use ide::{
|
||||
Analysis, FileId, FileRange, MonikerKind, PackageInformation, RootDatabase, StaticIndex,
|
||||
StaticIndexedFile, TokenId, TokenStaticData,
|
||||
Analysis, AnalysisHost, FileId, FileRange, MonikerKind, PackageInformation, RootDatabase,
|
||||
StaticIndex, StaticIndexedFile, TokenId, TokenStaticData,
|
||||
};
|
||||
use ide_db::{
|
||||
base_db::salsa::{self, ParallelDatabase},
|
||||
|
@ -300,8 +300,9 @@ impl flags::Lsif {
|
|||
|
||||
let workspace = ProjectWorkspace::load(manifest, &cargo_config, no_progress)?;
|
||||
|
||||
let (host, vfs, _proc_macro) =
|
||||
let (db, vfs, _proc_macro) =
|
||||
load_workspace(workspace, &cargo_config.extra_env, &load_cargo_config)?;
|
||||
let host = AnalysisHost::with_database(db);
|
||||
let db = host.raw_database();
|
||||
let analysis = host.analysis();
|
||||
|
||||
|
|
|
@ -20,9 +20,8 @@ impl flags::RunTests {
|
|||
with_proc_macro_server: ProcMacroServerChoice::Sysroot,
|
||||
prefill_caches: false,
|
||||
};
|
||||
let (host, _vfs, _proc_macro) =
|
||||
let (ref db, _vfs, _proc_macro) =
|
||||
load_workspace_at(&self.path, &cargo_config, &load_cargo_config, &|_| {})?;
|
||||
let db = host.raw_database();
|
||||
|
||||
let tests = all_modules(db)
|
||||
.into_iter()
|
||||
|
|
|
@ -87,8 +87,9 @@ impl Tester {
|
|||
with_proc_macro_server: ProcMacroServerChoice::Sysroot,
|
||||
prefill_caches: false,
|
||||
};
|
||||
let (host, _vfs, _proc_macro) =
|
||||
let (db, _vfs, _proc_macro) =
|
||||
load_workspace(workspace, &cargo_config.extra_env, &load_cargo_config)?;
|
||||
let host = AnalysisHost::with_database(db);
|
||||
let db = host.raw_database();
|
||||
let krates = Crate::all(db);
|
||||
let root_crate = krates.iter().cloned().find(|krate| krate.origin(db).is_local()).unwrap();
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
use std::{path::PathBuf, time::Instant};
|
||||
|
||||
use ide::{
|
||||
LineCol, MonikerDescriptorKind, MonikerResult, StaticIndex, StaticIndexedFile,
|
||||
AnalysisHost, LineCol, MonikerDescriptorKind, MonikerResult, StaticIndex, StaticIndexedFile,
|
||||
SymbolInformationKind, TextRange, TokenId,
|
||||
};
|
||||
use ide_db::LineIndexDatabase;
|
||||
|
@ -42,12 +42,13 @@ impl flags::Scip {
|
|||
config.update(json)?;
|
||||
}
|
||||
let cargo_config = config.cargo();
|
||||
let (host, vfs, _) = load_workspace_at(
|
||||
let (db, vfs, _) = load_workspace_at(
|
||||
root.as_path().as_ref(),
|
||||
&cargo_config,
|
||||
&load_cargo_config,
|
||||
&no_progress,
|
||||
)?;
|
||||
let host = AnalysisHost::with_database(db);
|
||||
let db = host.raw_database();
|
||||
let analysis = host.analysis();
|
||||
|
||||
|
@ -324,7 +325,7 @@ fn moniker_to_symbol(moniker: &MonikerResult) -> scip_types::Symbol {
|
|||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
use ide::{AnalysisHost, FilePosition, TextSize};
|
||||
use ide::{FilePosition, TextSize};
|
||||
use scip::symbol::format_symbol;
|
||||
use test_fixture::ChangeFixture;
|
||||
|
||||
|
|
|
@ -17,13 +17,12 @@ impl flags::Ssr {
|
|||
with_proc_macro_server: ProcMacroServerChoice::Sysroot,
|
||||
prefill_caches: false,
|
||||
};
|
||||
let (host, vfs, _proc_macro) = load_workspace_at(
|
||||
let (ref db, vfs, _proc_macro) = load_workspace_at(
|
||||
&std::env::current_dir()?,
|
||||
&cargo_config,
|
||||
&load_cargo_config,
|
||||
&|_| {},
|
||||
)?;
|
||||
let db = host.raw_database();
|
||||
let mut match_finder = MatchFinder::at_first_file(db)?;
|
||||
for rule in self.rule {
|
||||
match_finder.add_rule(rule)?;
|
||||
|
@ -54,13 +53,12 @@ impl flags::Search {
|
|||
with_proc_macro_server: ProcMacroServerChoice::Sysroot,
|
||||
prefill_caches: false,
|
||||
};
|
||||
let (host, _vfs, _proc_macro) = load_workspace_at(
|
||||
let (ref db, _vfs, _proc_macro) = load_workspace_at(
|
||||
&std::env::current_dir()?,
|
||||
&cargo_config,
|
||||
&load_cargo_config,
|
||||
&|_| {},
|
||||
)?;
|
||||
let db = host.raw_database();
|
||||
let mut match_finder = MatchFinder::at_first_file(db)?;
|
||||
for pattern in self.pattern {
|
||||
match_finder.add_search_pattern(pattern)?;
|
||||
|
|
|
@ -11,7 +11,7 @@
|
|||
//! which you can use to paste the command in terminal and add `--release` manually.
|
||||
|
||||
use hir::Change;
|
||||
use ide::{CallableSnippets, CompletionConfig, FilePosition, TextSize};
|
||||
use ide::{AnalysisHost, CallableSnippets, CompletionConfig, FilePosition, TextSize};
|
||||
use ide_db::{
|
||||
imports::insert_use::{ImportGranularity, InsertUseConfig},
|
||||
SnippetCap,
|
||||
|
@ -43,10 +43,11 @@ fn integrated_highlighting_benchmark() {
|
|||
prefill_caches: false,
|
||||
};
|
||||
|
||||
let (mut host, vfs, _proc_macro) = {
|
||||
let (db, vfs, _proc_macro) = {
|
||||
let _it = stdx::timeit("workspace loading");
|
||||
load_workspace_at(&workspace_to_load, &cargo_config, &load_cargo_config, &|_| {}).unwrap()
|
||||
};
|
||||
let mut host = AnalysisHost::with_database(db);
|
||||
|
||||
let file_id = {
|
||||
let file = workspace_to_load.join(file);
|
||||
|
@ -99,10 +100,11 @@ fn integrated_completion_benchmark() {
|
|||
prefill_caches: true,
|
||||
};
|
||||
|
||||
let (mut host, vfs, _proc_macro) = {
|
||||
let (db, vfs, _proc_macro) = {
|
||||
let _it = stdx::timeit("workspace loading");
|
||||
load_workspace_at(&workspace_to_load, &cargo_config, &load_cargo_config, &|_| {}).unwrap()
|
||||
};
|
||||
let mut host = AnalysisHost::with_database(db);
|
||||
|
||||
let file_id = {
|
||||
let file = workspace_to_load.join(file);
|
||||
|
|
Loading…
Reference in a new issue