mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-27 05:23:24 +00:00
Simplify FileDelegate
This commit is contained in:
parent
000eed1da8
commit
fcb88832de
36 changed files with 78 additions and 86 deletions
|
@ -7,7 +7,7 @@ use salsa::Durability;
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
use vfs::FileId;
|
use vfs::FileId;
|
||||||
|
|
||||||
use crate::{CrateGraph, SourceDatabaseExt, SourceDatabaseExt2, SourceRoot, SourceRootId};
|
use crate::{CrateGraph, SourceDatabaseFileInputExt, SourceRoot, SourceRootDatabase, SourceRootId};
|
||||||
|
|
||||||
/// Encapsulate a bunch of raw `.set` calls on the database.
|
/// Encapsulate a bunch of raw `.set` calls on the database.
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
|
@ -50,7 +50,7 @@ impl FileChange {
|
||||||
self.crate_graph = Some(graph);
|
self.crate_graph = Some(graph);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn apply(self, db: &mut dyn SourceDatabaseExt) {
|
pub fn apply(self, db: &mut dyn SourceRootDatabase) {
|
||||||
let _p = tracing::info_span!("FileChange::apply").entered();
|
let _p = tracing::info_span!("FileChange::apply").entered();
|
||||||
if let Some(roots) = self.roots {
|
if let Some(roots) = self.roots {
|
||||||
for (idx, root) in roots.into_iter().enumerate() {
|
for (idx, root) in roots.into_iter().enumerate() {
|
||||||
|
|
|
@ -47,8 +47,6 @@ pub const DEFAULT_PARSE_LRU_CAP: u16 = 128;
|
||||||
pub const DEFAULT_BORROWCK_LRU_CAP: u16 = 2024;
|
pub const DEFAULT_BORROWCK_LRU_CAP: u16 = 2024;
|
||||||
|
|
||||||
pub trait FileLoader {
|
pub trait FileLoader {
|
||||||
/// Text of the file.
|
|
||||||
fn file_text(&self, file_id: FileId) -> Arc<str>;
|
|
||||||
fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId>;
|
fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId>;
|
||||||
/// Crates whose root's source root is the same as the source root of `file_id`
|
/// Crates whose root's source root is the same as the source root of `file_id`
|
||||||
fn relevant_crates(&self, file_id: FileId) -> Arc<[CrateId]>;
|
fn relevant_crates(&self, file_id: FileId) -> Arc<[CrateId]>;
|
||||||
|
@ -58,6 +56,13 @@ pub trait FileLoader {
|
||||||
/// model. Everything else in rust-analyzer is derived from these queries.
|
/// model. Everything else in rust-analyzer is derived from these queries.
|
||||||
#[salsa::query_group(SourceDatabaseStorage)]
|
#[salsa::query_group(SourceDatabaseStorage)]
|
||||||
pub trait SourceDatabase: FileLoader + std::fmt::Debug {
|
pub trait SourceDatabase: FileLoader + std::fmt::Debug {
|
||||||
|
#[salsa::input]
|
||||||
|
fn compressed_file_text(&self, file_id: FileId) -> Arc<[u8]>;
|
||||||
|
|
||||||
|
/// Text of the file.
|
||||||
|
#[salsa::lru]
|
||||||
|
fn file_text(&self, file_id: FileId) -> Arc<str>;
|
||||||
|
|
||||||
/// Parses the file into the syntax tree.
|
/// Parses the file into the syntax tree.
|
||||||
#[salsa::lru]
|
#[salsa::lru]
|
||||||
fn parse(&self, file_id: EditionedFileId) -> Parse<ast::SourceFile>;
|
fn parse(&self, file_id: EditionedFileId) -> Parse<ast::SourceFile>;
|
||||||
|
@ -99,16 +104,18 @@ fn parse_errors(db: &dyn SourceDatabase, file_id: EditionedFileId) -> Option<Arc
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn file_text(db: &dyn SourceDatabase, file_id: FileId) -> Arc<str> {
|
||||||
|
let bytes = db.compressed_file_text(file_id);
|
||||||
|
let bytes =
|
||||||
|
lz4_flex::decompress_size_prepended(&bytes).expect("lz4 decompression should not fail");
|
||||||
|
let text = std::str::from_utf8(&bytes).expect("file contents should be valid UTF-8");
|
||||||
|
Arc::from(text)
|
||||||
|
}
|
||||||
|
|
||||||
/// We don't want to give HIR knowledge of source roots, hence we extract these
|
/// We don't want to give HIR knowledge of source roots, hence we extract these
|
||||||
/// methods into a separate DB.
|
/// methods into a separate DB.
|
||||||
#[salsa::query_group(SourceDatabaseExtStorage)]
|
#[salsa::query_group(SourceRootDatabaseStorage)]
|
||||||
pub trait SourceDatabaseExt: SourceDatabase {
|
pub trait SourceRootDatabase: SourceDatabase {
|
||||||
#[salsa::input]
|
|
||||||
fn compressed_file_text(&self, file_id: FileId) -> Arc<[u8]>;
|
|
||||||
|
|
||||||
#[salsa::lru]
|
|
||||||
fn file_text(&self, file_id: FileId) -> Arc<str>;
|
|
||||||
|
|
||||||
/// Path to a file, relative to the root of its source root.
|
/// Path to a file, relative to the root of its source root.
|
||||||
/// Source root of the file.
|
/// Source root of the file.
|
||||||
#[salsa::input]
|
#[salsa::input]
|
||||||
|
@ -121,15 +128,7 @@ pub trait SourceDatabaseExt: SourceDatabase {
|
||||||
fn source_root_crates(&self, id: SourceRootId) -> Arc<[CrateId]>;
|
fn source_root_crates(&self, id: SourceRootId) -> Arc<[CrateId]>;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn file_text(db: &dyn SourceDatabaseExt, file_id: FileId) -> Arc<str> {
|
pub trait SourceDatabaseFileInputExt {
|
||||||
let bytes = db.compressed_file_text(file_id);
|
|
||||||
let bytes =
|
|
||||||
lz4_flex::decompress_size_prepended(&bytes).expect("lz4 decompression should not fail");
|
|
||||||
let text = std::str::from_utf8(&bytes).expect("file contents should be valid UTF-8");
|
|
||||||
Arc::from(text)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub trait SourceDatabaseExt2 {
|
|
||||||
fn set_file_text(&mut self, file_id: FileId, text: &str) {
|
fn set_file_text(&mut self, file_id: FileId, text: &str) {
|
||||||
self.set_file_text_with_durability(file_id, text, Durability::LOW);
|
self.set_file_text_with_durability(file_id, text, Durability::LOW);
|
||||||
}
|
}
|
||||||
|
@ -142,7 +141,7 @@ pub trait SourceDatabaseExt2 {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<Db: ?Sized + SourceDatabaseExt> SourceDatabaseExt2 for Db {
|
impl<Db: ?Sized + SourceRootDatabase> SourceDatabaseFileInputExt for Db {
|
||||||
fn set_file_text_with_durability(
|
fn set_file_text_with_durability(
|
||||||
&mut self,
|
&mut self,
|
||||||
file_id: FileId,
|
file_id: FileId,
|
||||||
|
@ -159,7 +158,7 @@ impl<Db: ?Sized + SourceDatabaseExt> SourceDatabaseExt2 for Db {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn source_root_crates(db: &dyn SourceDatabaseExt, id: SourceRootId) -> Arc<[CrateId]> {
|
fn source_root_crates(db: &dyn SourceRootDatabase, id: SourceRootId) -> Arc<[CrateId]> {
|
||||||
let graph = db.crate_graph();
|
let graph = db.crate_graph();
|
||||||
let mut crates = graph
|
let mut crates = graph
|
||||||
.iter()
|
.iter()
|
||||||
|
@ -173,13 +172,12 @@ fn source_root_crates(db: &dyn SourceDatabaseExt, id: SourceRootId) -> Arc<[Crat
|
||||||
crates.into_iter().collect()
|
crates.into_iter().collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Silly workaround for cyclic deps between the traits
|
// FIXME: Would be nice to get rid of this somehow
|
||||||
|
/// Silly workaround for cyclic deps due to the SourceRootDatabase and SourceDatabase split
|
||||||
|
/// regarding FileLoader
|
||||||
pub struct FileLoaderDelegate<T>(pub T);
|
pub struct FileLoaderDelegate<T>(pub T);
|
||||||
|
|
||||||
impl<T: SourceDatabaseExt> FileLoader for FileLoaderDelegate<&'_ T> {
|
impl<T: SourceRootDatabase> FileLoader for FileLoaderDelegate<&'_ T> {
|
||||||
fn file_text(&self, file_id: FileId) -> Arc<str> {
|
|
||||||
SourceDatabaseExt::file_text(self.0, file_id)
|
|
||||||
}
|
|
||||||
fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
|
fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
|
||||||
// FIXME: this *somehow* should be platform agnostic...
|
// FIXME: this *somehow* should be platform agnostic...
|
||||||
let source_root = self.0.file_source_root(path.anchor);
|
let source_root = self.0.file_source_root(path.anchor);
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
use base_db::{SourceDatabase, SourceDatabaseExt2 as _};
|
use base_db::{SourceDatabase, SourceDatabaseFileInputExt as _};
|
||||||
use test_fixture::WithFixture;
|
use test_fixture::WithFixture;
|
||||||
|
|
||||||
use crate::{db::DefDatabase, nameres::tests::TestDB, AdtId, ModuleDefId};
|
use crate::{db::DefDatabase, nameres::tests::TestDB, AdtId, ModuleDefId};
|
||||||
|
|
|
@ -19,7 +19,7 @@ use crate::{
|
||||||
};
|
};
|
||||||
|
|
||||||
#[salsa::database(
|
#[salsa::database(
|
||||||
base_db::SourceDatabaseExtStorage,
|
base_db::SourceRootDatabaseStorage,
|
||||||
base_db::SourceDatabaseStorage,
|
base_db::SourceDatabaseStorage,
|
||||||
hir_expand::db::ExpandDatabaseStorage,
|
hir_expand::db::ExpandDatabaseStorage,
|
||||||
crate::db::InternDatabaseStorage,
|
crate::db::InternDatabaseStorage,
|
||||||
|
@ -69,9 +69,6 @@ impl fmt::Debug for TestDB {
|
||||||
impl panic::RefUnwindSafe for TestDB {}
|
impl panic::RefUnwindSafe for TestDB {}
|
||||||
|
|
||||||
impl FileLoader for TestDB {
|
impl FileLoader for TestDB {
|
||||||
fn file_text(&self, file_id: FileId) -> Arc<str> {
|
|
||||||
FileLoaderDelegate(self).file_text(file_id)
|
|
||||||
}
|
|
||||||
fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
|
fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
|
||||||
FileLoaderDelegate(self).resolve_path(path)
|
FileLoaderDelegate(self).resolve_path(path)
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
//! Defines a unit of change that can applied to the database to get the next
|
//! Defines a unit of change that can applied to the database to get the next
|
||||||
//! state. Changes are transactional.
|
//! state. Changes are transactional.
|
||||||
use base_db::{
|
use base_db::{
|
||||||
salsa::Durability, CrateGraph, CrateId, FileChange, SourceDatabaseExt, SourceRoot,
|
salsa::Durability, CrateGraph, CrateId, FileChange, SourceRoot, SourceRootDatabase,
|
||||||
TargetLayoutLoadResult, Version,
|
TargetLayoutLoadResult, Version,
|
||||||
};
|
};
|
||||||
use la_arena::RawIdx;
|
use la_arena::RawIdx;
|
||||||
|
@ -23,7 +23,7 @@ impl ChangeWithProcMacros {
|
||||||
Self::default()
|
Self::default()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn apply(self, db: &mut (impl ExpandDatabase + SourceDatabaseExt)) {
|
pub fn apply(self, db: &mut (impl ExpandDatabase + SourceRootDatabase)) {
|
||||||
self.source_change.apply(db);
|
self.source_change.apply(db);
|
||||||
if let Some(proc_macros) = self.proc_macros {
|
if let Some(proc_macros) = self.proc_macros {
|
||||||
db.set_proc_macros_with_durability(Arc::new(proc_macros), Durability::HIGH);
|
db.set_proc_macros_with_durability(Arc::new(proc_macros), Durability::HIGH);
|
||||||
|
|
|
@ -176,7 +176,12 @@ impl ExpandErrorKind {
|
||||||
&ExpandErrorKind::MissingProcMacroExpander(def_crate) => {
|
&ExpandErrorKind::MissingProcMacroExpander(def_crate) => {
|
||||||
match db.proc_macros().get_error_for_crate(def_crate) {
|
match db.proc_macros().get_error_for_crate(def_crate) {
|
||||||
Some((e, hard_err)) => (e.to_owned(), hard_err),
|
Some((e, hard_err)) => (e.to_owned(), hard_err),
|
||||||
None => ("missing expander".to_owned(), true),
|
None => (
|
||||||
|
format!(
|
||||||
|
"internal error: proc-macro map is missing error entry for crate {def_crate:?}"
|
||||||
|
),
|
||||||
|
true,
|
||||||
|
),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ExpandErrorKind::MacroDefinition => {
|
ExpandErrorKind::MacroDefinition => {
|
||||||
|
|
|
@ -15,7 +15,7 @@ use test_utils::extract_annotations;
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
|
|
||||||
#[salsa::database(
|
#[salsa::database(
|
||||||
base_db::SourceDatabaseExtStorage,
|
base_db::SourceRootDatabaseStorage,
|
||||||
base_db::SourceDatabaseStorage,
|
base_db::SourceDatabaseStorage,
|
||||||
hir_expand::db::ExpandDatabaseStorage,
|
hir_expand::db::ExpandDatabaseStorage,
|
||||||
hir_def::db::InternDatabaseStorage,
|
hir_def::db::InternDatabaseStorage,
|
||||||
|
@ -75,9 +75,6 @@ impl salsa::ParallelDatabase for TestDB {
|
||||||
impl panic::RefUnwindSafe for TestDB {}
|
impl panic::RefUnwindSafe for TestDB {}
|
||||||
|
|
||||||
impl FileLoader for TestDB {
|
impl FileLoader for TestDB {
|
||||||
fn file_text(&self, file_id: FileId) -> Arc<str> {
|
|
||||||
FileLoaderDelegate(self).file_text(file_id)
|
|
||||||
}
|
|
||||||
fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
|
fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
|
||||||
FileLoaderDelegate(self).resolve_path(path)
|
FileLoaderDelegate(self).resolve_path(path)
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,7 +12,7 @@ mod traits;
|
||||||
|
|
||||||
use std::env;
|
use std::env;
|
||||||
|
|
||||||
use base_db::SourceDatabaseExt2 as _;
|
use base_db::SourceDatabaseFileInputExt as _;
|
||||||
use expect_test::Expect;
|
use expect_test::Expect;
|
||||||
use hir_def::{
|
use hir_def::{
|
||||||
body::{Body, BodySourceMap, SyntheticSyntax},
|
body::{Body, BodySourceMap, SyntheticSyntax},
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
use base_db::SourceDatabaseExt2 as _;
|
use base_db::SourceDatabaseFileInputExt as _;
|
||||||
use test_fixture::WithFixture;
|
use test_fixture::WithFixture;
|
||||||
|
|
||||||
use crate::{db::HirDatabase, test_db::TestDB};
|
use crate::{db::HirDatabase, test_db::TestDB};
|
||||||
|
|
|
@ -3,7 +3,7 @@ mod generated;
|
||||||
use expect_test::expect;
|
use expect_test::expect;
|
||||||
use hir::{FileRange, Semantics};
|
use hir::{FileRange, Semantics};
|
||||||
use ide_db::{
|
use ide_db::{
|
||||||
base_db::SourceDatabaseExt,
|
base_db::{SourceDatabase, SourceRootDatabase},
|
||||||
imports::insert_use::{ImportGranularity, InsertUseConfig},
|
imports::insert_use::{ImportGranularity, InsertUseConfig},
|
||||||
source_change::FileSystemEdit,
|
source_change::FileSystemEdit,
|
||||||
EditionedFileId, RootDatabase, SnippetCap,
|
EditionedFileId, RootDatabase, SnippetCap,
|
||||||
|
|
|
@ -4,7 +4,7 @@ use std::iter;
|
||||||
|
|
||||||
use hir::{HirFileIdExt, Module};
|
use hir::{HirFileIdExt, Module};
|
||||||
use ide_db::{
|
use ide_db::{
|
||||||
base_db::{SourceDatabaseExt, VfsPath},
|
base_db::{SourceRootDatabase, VfsPath},
|
||||||
FxHashSet, RootDatabase, SymbolKind,
|
FxHashSet, RootDatabase, SymbolKind,
|
||||||
};
|
};
|
||||||
use stdx::IsNoneOr;
|
use stdx::IsNoneOr;
|
||||||
|
|
|
@ -23,10 +23,10 @@ mod type_pos;
|
||||||
mod use_tree;
|
mod use_tree;
|
||||||
mod visibility;
|
mod visibility;
|
||||||
|
|
||||||
|
use base_db::SourceDatabase;
|
||||||
use expect_test::Expect;
|
use expect_test::Expect;
|
||||||
use hir::PrefixKind;
|
use hir::PrefixKind;
|
||||||
use ide_db::{
|
use ide_db::{
|
||||||
base_db::FileLoader,
|
|
||||||
imports::insert_use::{ImportGranularity, InsertUseConfig},
|
imports::insert_use::{ImportGranularity, InsertUseConfig},
|
||||||
FilePosition, RootDatabase, SnippetCap,
|
FilePosition, RootDatabase, SnippetCap,
|
||||||
};
|
};
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
use std::collections::VecDeque;
|
use std::collections::VecDeque;
|
||||||
|
|
||||||
use base_db::SourceDatabaseExt;
|
use base_db::SourceRootDatabase;
|
||||||
use hir::{Crate, DescendPreference, ItemInNs, ModuleDef, Name, Semantics};
|
use hir::{Crate, DescendPreference, ItemInNs, ModuleDef, Name, Semantics};
|
||||||
use span::FileId;
|
use span::FileId;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
|
|
|
@ -74,7 +74,7 @@ pub type FilePosition = FilePositionWrapper<FileId>;
|
||||||
pub type FileRange = FileRangeWrapper<FileId>;
|
pub type FileRange = FileRangeWrapper<FileId>;
|
||||||
|
|
||||||
#[salsa::database(
|
#[salsa::database(
|
||||||
base_db::SourceDatabaseExtStorage,
|
base_db::SourceRootDatabaseStorage,
|
||||||
base_db::SourceDatabaseStorage,
|
base_db::SourceDatabaseStorage,
|
||||||
hir::db::ExpandDatabaseStorage,
|
hir::db::ExpandDatabaseStorage,
|
||||||
hir::db::DefDatabaseStorage,
|
hir::db::DefDatabaseStorage,
|
||||||
|
@ -125,9 +125,6 @@ impl Upcast<dyn HirDatabase> for RootDatabase {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FileLoader for RootDatabase {
|
impl FileLoader for RootDatabase {
|
||||||
fn file_text(&self, file_id: FileId) -> Arc<str> {
|
|
||||||
FileLoaderDelegate(self).file_text(file_id)
|
|
||||||
}
|
|
||||||
fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
|
fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
|
||||||
FileLoaderDelegate(self).resolve_path(path)
|
FileLoaderDelegate(self).resolve_path(path)
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,7 +11,7 @@ use hir::db::DefDatabase;
|
||||||
use crate::{
|
use crate::{
|
||||||
base_db::{
|
base_db::{
|
||||||
salsa::{Database, ParallelDatabase, Snapshot},
|
salsa::{Database, ParallelDatabase, Snapshot},
|
||||||
Cancelled, CrateId, SourceDatabase, SourceDatabaseExt,
|
Cancelled, CrateId, SourceDatabase, SourceRootDatabase,
|
||||||
},
|
},
|
||||||
FxIndexMap, RootDatabase,
|
FxIndexMap, RootDatabase,
|
||||||
};
|
};
|
||||||
|
|
|
@ -6,7 +6,7 @@
|
||||||
|
|
||||||
use std::mem;
|
use std::mem;
|
||||||
|
|
||||||
use base_db::{salsa::Database, SourceDatabase, SourceDatabaseExt};
|
use base_db::{salsa::Database, SourceDatabase, SourceRootDatabase};
|
||||||
use hir::{
|
use hir::{
|
||||||
sym, AsAssocItem, DefWithBody, DescendPreference, FileRange, HasAttrs, HasSource, HirFileIdExt,
|
sym, AsAssocItem, DefWithBody, DescendPreference, FileRange, HasAttrs, HasSource, HirFileIdExt,
|
||||||
InFile, InRealFile, ModuleSource, PathResolution, Semantics, Visibility,
|
InFile, InRealFile, ModuleSource, PathResolution, Semantics, Visibility,
|
||||||
|
|
|
@ -29,7 +29,7 @@ use std::{
|
||||||
|
|
||||||
use base_db::{
|
use base_db::{
|
||||||
salsa::{self, ParallelDatabase},
|
salsa::{self, ParallelDatabase},
|
||||||
SourceDatabaseExt, SourceRootId, Upcast,
|
SourceRootDatabase, SourceRootId, Upcast,
|
||||||
};
|
};
|
||||||
use fst::{raw::IndexedValue, Automaton, Streamer};
|
use fst::{raw::IndexedValue, Automaton, Streamer};
|
||||||
use hir::{
|
use hir::{
|
||||||
|
@ -100,7 +100,7 @@ impl Query {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[salsa::query_group(SymbolsDatabaseStorage)]
|
#[salsa::query_group(SymbolsDatabaseStorage)]
|
||||||
pub trait SymbolsDatabase: HirDatabase + SourceDatabaseExt + Upcast<dyn HirDatabase> {
|
pub trait SymbolsDatabase: HirDatabase + SourceRootDatabase + Upcast<dyn HirDatabase> {
|
||||||
/// The symbol index for a given module. These modules should only be in source roots that
|
/// The symbol index for a given module. These modules should only be in source roots that
|
||||||
/// are inside local_roots.
|
/// are inside local_roots.
|
||||||
fn module_symbols(&self, module: Module) -> Arc<SymbolIndex>;
|
fn module_symbols(&self, module: Module) -> Arc<SymbolIndex>;
|
||||||
|
|
|
@ -4,7 +4,7 @@ use std::iter;
|
||||||
|
|
||||||
use hir::{db::DefDatabase, DefMap, InFile, ModuleSource};
|
use hir::{db::DefDatabase, DefMap, InFile, ModuleSource};
|
||||||
use ide_db::{
|
use ide_db::{
|
||||||
base_db::{FileLoader, SourceDatabaseExt},
|
base_db::{FileLoader, SourceDatabase, SourceRootDatabase},
|
||||||
source_change::SourceChange,
|
source_change::SourceChange,
|
||||||
FileId, FileRange, LineIndexDatabase,
|
FileId, FileRange, LineIndexDatabase,
|
||||||
};
|
};
|
||||||
|
@ -47,7 +47,7 @@ pub(crate) fn unlinked_file(
|
||||||
//
|
//
|
||||||
// Only show this diagnostic on the first three characters of
|
// Only show this diagnostic on the first three characters of
|
||||||
// the file, to avoid overwhelming the user during startup.
|
// the file, to avoid overwhelming the user during startup.
|
||||||
range = FileLoader::file_text(ctx.sema.db, file_id)
|
range = SourceDatabase::file_text(ctx.sema.db, file_id)
|
||||||
.char_indices()
|
.char_indices()
|
||||||
.take(3)
|
.take(3)
|
||||||
.last()
|
.last()
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#![allow(clippy::print_stderr)]
|
#![allow(clippy::print_stderr)]
|
||||||
|
|
||||||
use ide_db::{
|
use ide_db::{
|
||||||
assists::AssistResolveStrategy, base_db::SourceDatabaseExt, LineIndexDatabase, RootDatabase,
|
assists::AssistResolveStrategy, base_db::SourceDatabase, LineIndexDatabase, RootDatabase,
|
||||||
};
|
};
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use stdx::trim_indent;
|
use stdx::trim_indent;
|
||||||
|
|
|
@ -84,7 +84,7 @@ pub use crate::{errors::SsrError, from_comment::ssr_from_comment, matching::Matc
|
||||||
|
|
||||||
use crate::{errors::bail, matching::MatchFailureReason};
|
use crate::{errors::bail, matching::MatchFailureReason};
|
||||||
use hir::{FileRange, Semantics};
|
use hir::{FileRange, Semantics};
|
||||||
use ide_db::{EditionedFileId, FileId, FxHashMap, RootDatabase};
|
use ide_db::{base_db::SourceDatabase, EditionedFileId, FileId, FxHashMap, RootDatabase};
|
||||||
use resolving::ResolvedRule;
|
use resolving::ResolvedRule;
|
||||||
use syntax::{ast, AstNode, SyntaxNode, TextRange};
|
use syntax::{ast, AstNode, SyntaxNode, TextRange};
|
||||||
use text_edit::TextEdit;
|
use text_edit::TextEdit;
|
||||||
|
@ -141,7 +141,7 @@ impl<'db> MatchFinder<'db> {
|
||||||
|
|
||||||
/// Constructs an instance using the start of the first file in `db` as the lookup context.
|
/// Constructs an instance using the start of the first file in `db` as the lookup context.
|
||||||
pub fn at_first_file(db: &'db ide_db::RootDatabase) -> Result<MatchFinder<'db>, SsrError> {
|
pub fn at_first_file(db: &'db ide_db::RootDatabase) -> Result<MatchFinder<'db>, SsrError> {
|
||||||
use ide_db::base_db::SourceDatabaseExt;
|
use ide_db::base_db::SourceRootDatabase;
|
||||||
use ide_db::symbol_index::SymbolsDatabase;
|
use ide_db::symbol_index::SymbolsDatabase;
|
||||||
if let Some(first_file_id) =
|
if let Some(first_file_id) =
|
||||||
db.local_roots().iter().next().and_then(|root| db.source_root(*root).iter().next())
|
db.local_roots().iter().next().and_then(|root| db.source_root(*root).iter().next())
|
||||||
|
@ -172,7 +172,6 @@ impl<'db> MatchFinder<'db> {
|
||||||
|
|
||||||
/// Finds matches for all added rules and returns edits for all found matches.
|
/// Finds matches for all added rules and returns edits for all found matches.
|
||||||
pub fn edits(&self) -> FxHashMap<FileId, TextEdit> {
|
pub fn edits(&self) -> FxHashMap<FileId, TextEdit> {
|
||||||
use ide_db::base_db::SourceDatabaseExt;
|
|
||||||
let mut matches_by_file = FxHashMap::default();
|
let mut matches_by_file = FxHashMap::default();
|
||||||
for m in self.matches().matches {
|
for m in self.matches().matches {
|
||||||
matches_by_file
|
matches_by_file
|
||||||
|
@ -228,7 +227,6 @@ impl<'db> MatchFinder<'db> {
|
||||||
file_id: EditionedFileId,
|
file_id: EditionedFileId,
|
||||||
snippet: &str,
|
snippet: &str,
|
||||||
) -> Vec<MatchDebugInfo> {
|
) -> Vec<MatchDebugInfo> {
|
||||||
use ide_db::base_db::SourceDatabaseExt;
|
|
||||||
let file = self.sema.parse(file_id);
|
let file = self.sema.parse(file_id);
|
||||||
let mut res = Vec::new();
|
let mut res = Vec::new();
|
||||||
let file_text = self.sema.db.file_text(file_id.into());
|
let file_text = self.sema.db.file_text(file_id.into());
|
||||||
|
|
|
@ -156,7 +156,7 @@ impl MatchFinder<'_> {
|
||||||
fn search_files_do(&self, mut callback: impl FnMut(FileId)) {
|
fn search_files_do(&self, mut callback: impl FnMut(FileId)) {
|
||||||
if self.restrict_ranges.is_empty() {
|
if self.restrict_ranges.is_empty() {
|
||||||
// Unrestricted search.
|
// Unrestricted search.
|
||||||
use ide_db::base_db::SourceDatabaseExt;
|
use ide_db::base_db::SourceRootDatabase;
|
||||||
use ide_db::symbol_index::SymbolsDatabase;
|
use ide_db::symbol_index::SymbolsDatabase;
|
||||||
for &root in self.sema.db.local_roots().iter() {
|
for &root in self.sema.db.local_roots().iter() {
|
||||||
let sr = self.sema.db.source_root(root);
|
let sr = self.sema.db.source_root(root);
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
use expect_test::{expect, Expect};
|
use expect_test::{expect, Expect};
|
||||||
use hir::{FilePosition, FileRange};
|
use hir::{FilePosition, FileRange};
|
||||||
use ide_db::{
|
use ide_db::{
|
||||||
base_db::{salsa::Durability, SourceDatabaseExt},
|
base_db::{salsa::Durability, SourceDatabase},
|
||||||
EditionedFileId, FxHashSet,
|
EditionedFileId, FxHashSet,
|
||||||
};
|
};
|
||||||
use test_utils::RangeOrOffset;
|
use test_utils::RangeOrOffset;
|
||||||
|
|
|
@ -10,7 +10,7 @@ use hir::{
|
||||||
Semantics,
|
Semantics,
|
||||||
};
|
};
|
||||||
use ide_db::{
|
use ide_db::{
|
||||||
base_db::{AnchoredPath, FileLoader},
|
base_db::{AnchoredPath, FileLoader, SourceDatabase},
|
||||||
defs::{Definition, IdentClass},
|
defs::{Definition, IdentClass},
|
||||||
helpers::pick_best_token,
|
helpers::pick_best_token,
|
||||||
RootDatabase, SymbolKind,
|
RootDatabase, SymbolKind,
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
use expect_test::{expect, Expect};
|
use expect_test::{expect, Expect};
|
||||||
use ide_db::{base_db::FileLoader, FileRange};
|
use ide_db::{base_db::SourceDatabase, FileRange};
|
||||||
use syntax::TextRange;
|
use syntax::TextRange;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
use hir::Semantics;
|
use hir::Semantics;
|
||||||
use ide_db::{base_db::SourceDatabaseExt, FilePosition, LineIndexDatabase, RootDatabase};
|
use ide_db::{base_db::SourceRootDatabase, FilePosition, LineIndexDatabase, RootDatabase};
|
||||||
use std::{fmt::Write, time::Instant};
|
use std::{fmt::Write, time::Instant};
|
||||||
use syntax::{algo::ancestors_at_offset, ast, AstNode, TextRange};
|
use syntax::{algo::ancestors_at_offset, ast, AstNode, TextRange};
|
||||||
|
|
||||||
|
|
|
@ -65,7 +65,7 @@ use hir::{sym, ChangeWithProcMacros};
|
||||||
use ide_db::{
|
use ide_db::{
|
||||||
base_db::{
|
base_db::{
|
||||||
salsa::{self, ParallelDatabase},
|
salsa::{self, ParallelDatabase},
|
||||||
CrateOrigin, Env, FileLoader, FileSet, SourceDatabase, SourceDatabaseExt, VfsPath,
|
CrateOrigin, Env, FileLoader, FileSet, SourceDatabase, SourceRootDatabase, VfsPath,
|
||||||
},
|
},
|
||||||
prime_caches, symbol_index, FxHashMap, FxIndexSet, LineIndexDatabase,
|
prime_caches, symbol_index, FxHashMap, FxIndexSet, LineIndexDatabase,
|
||||||
};
|
};
|
||||||
|
@ -286,7 +286,7 @@ impl Analysis {
|
||||||
|
|
||||||
/// Gets the text of the source file.
|
/// Gets the text of the source file.
|
||||||
pub fn file_text(&self, file_id: FileId) -> Cancellable<Arc<str>> {
|
pub fn file_text(&self, file_id: FileId) -> Cancellable<Arc<str>> {
|
||||||
self.with_db(|db| SourceDatabaseExt::file_text(db, file_id))
|
self.with_db(|db| SourceDatabase::file_text(db, file_id))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Gets the syntax tree of the file.
|
/// Gets the syntax tree of the file.
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
|
|
||||||
use hir::{db::HirDatabase, Crate, HirFileIdExt, Module, Semantics};
|
use hir::{db::HirDatabase, Crate, HirFileIdExt, Module, Semantics};
|
||||||
use ide_db::{
|
use ide_db::{
|
||||||
base_db::SourceDatabaseExt, defs::Definition, documentation::Documentation,
|
base_db::SourceRootDatabase, defs::Definition, documentation::Documentation,
|
||||||
famous_defs::FamousDefs, helpers::get_definition, FileId, FileRange, FxHashMap, FxHashSet,
|
famous_defs::FamousDefs, helpers::get_definition, FileId, FileRange, FxHashMap, FxHashSet,
|
||||||
RootDatabase,
|
RootDatabase,
|
||||||
};
|
};
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
use dot::{Id, LabelText};
|
use dot::{Id, LabelText};
|
||||||
use ide_db::{
|
use ide_db::{
|
||||||
base_db::{CrateGraph, CrateId, Dependency, SourceDatabase, SourceDatabaseExt},
|
base_db::{CrateGraph, CrateId, Dependency, SourceDatabase, SourceRootDatabase},
|
||||||
FxHashSet, RootDatabase,
|
FxHashSet, RootDatabase,
|
||||||
};
|
};
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
|
|
|
@ -1349,13 +1349,14 @@ fn add_target_crate_root(
|
||||||
);
|
);
|
||||||
if let TargetKind::Lib { is_proc_macro: true } = kind {
|
if let TargetKind::Lib { is_proc_macro: true } = kind {
|
||||||
let proc_macro = match build_data.as_ref().map(|it| it.proc_macro_dylib_path.as_ref()) {
|
let proc_macro = match build_data.as_ref().map(|it| it.proc_macro_dylib_path.as_ref()) {
|
||||||
Some(it) => it.cloned().map(|path| Ok((cargo_name.to_owned(), path))),
|
Some(it) => match it {
|
||||||
None => Some(Err("proc-macro crate is missing its build data".to_owned())),
|
Some(path) => Ok((cargo_name.to_owned(), path.clone())),
|
||||||
|
None => Err("proc-macro crate build data is missing dylib path".to_owned()),
|
||||||
|
},
|
||||||
|
None => Err("proc-macro crate is missing its build data".to_owned()),
|
||||||
};
|
};
|
||||||
if let Some(proc_macro) = proc_macro {
|
|
||||||
proc_macros.insert(crate_id, proc_macro);
|
proc_macros.insert(crate_id, proc_macro);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
crate_id
|
crate_id
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,7 +23,7 @@ use ide::{
|
||||||
use ide_db::{
|
use ide_db::{
|
||||||
base_db::{
|
base_db::{
|
||||||
salsa::{self, debug::DebugQueryTable, ParallelDatabase},
|
salsa::{self, debug::DebugQueryTable, ParallelDatabase},
|
||||||
SourceDatabase, SourceDatabaseExt,
|
SourceDatabase, SourceRootDatabase,
|
||||||
},
|
},
|
||||||
EditionedFileId, LineIndexDatabase, SnippetCap,
|
EditionedFileId, LineIndexDatabase, SnippetCap,
|
||||||
};
|
};
|
||||||
|
|
|
@ -6,7 +6,7 @@ use rustc_hash::FxHashSet;
|
||||||
|
|
||||||
use hir::{db::HirDatabase, Crate, HirFileIdExt, Module};
|
use hir::{db::HirDatabase, Crate, HirFileIdExt, Module};
|
||||||
use ide::{AnalysisHost, AssistResolveStrategy, Diagnostic, DiagnosticsConfig, Severity};
|
use ide::{AnalysisHost, AssistResolveStrategy, Diagnostic, DiagnosticsConfig, Severity};
|
||||||
use ide_db::{base_db::SourceDatabaseExt, LineIndexDatabase};
|
use ide_db::{base_db::SourceRootDatabase, LineIndexDatabase};
|
||||||
use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice};
|
use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice};
|
||||||
|
|
||||||
use crate::cli::flags;
|
use crate::cli::flags;
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
use hir::{Crate, Module};
|
use hir::{Crate, Module};
|
||||||
use hir_ty::db::HirDatabase;
|
use hir_ty::db::HirDatabase;
|
||||||
use ide_db::{base_db::SourceDatabaseExt, LineIndexDatabase};
|
use ide_db::{base_db::SourceRootDatabase, LineIndexDatabase};
|
||||||
use profile::StopWatch;
|
use profile::StopWatch;
|
||||||
use project_model::{CargoConfig, RustLibSource};
|
use project_model::{CargoConfig, RustLibSource};
|
||||||
use syntax::TextRange;
|
use syntax::TextRange;
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
//! Applies structured search replace rules from the command line.
|
//! Applies structured search replace rules from the command line.
|
||||||
|
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
use ide_db::EditionedFileId;
|
use ide_db::{base_db::SourceDatabase, EditionedFileId};
|
||||||
use ide_ssr::MatchFinder;
|
use ide_ssr::MatchFinder;
|
||||||
use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice};
|
use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice};
|
||||||
use project_model::{CargoConfig, RustLibSource};
|
use project_model::{CargoConfig, RustLibSource};
|
||||||
|
@ -10,7 +10,6 @@ use crate::cli::flags;
|
||||||
|
|
||||||
impl flags::Ssr {
|
impl flags::Ssr {
|
||||||
pub fn run(self) -> anyhow::Result<()> {
|
pub fn run(self) -> anyhow::Result<()> {
|
||||||
use ide_db::base_db::SourceDatabaseExt;
|
|
||||||
let cargo_config =
|
let cargo_config =
|
||||||
CargoConfig { sysroot: Some(RustLibSource::Discover), ..Default::default() };
|
CargoConfig { sysroot: Some(RustLibSource::Discover), ..Default::default() };
|
||||||
let load_cargo_config = LoadCargoConfig {
|
let load_cargo_config = LoadCargoConfig {
|
||||||
|
@ -46,7 +45,7 @@ impl flags::Search {
|
||||||
/// `debug_snippet`. This is intended for debugging and probably isn't in it's current form useful
|
/// `debug_snippet`. This is intended for debugging and probably isn't in it's current form useful
|
||||||
/// for much else.
|
/// for much else.
|
||||||
pub fn run(self) -> anyhow::Result<()> {
|
pub fn run(self) -> anyhow::Result<()> {
|
||||||
use ide_db::base_db::SourceDatabaseExt;
|
use ide_db::base_db::SourceRootDatabase;
|
||||||
use ide_db::symbol_index::SymbolsDatabase;
|
use ide_db::symbol_index::SymbolsDatabase;
|
||||||
let cargo_config = CargoConfig::default();
|
let cargo_config = CargoConfig::default();
|
||||||
let load_cargo_config = LoadCargoConfig {
|
let load_cargo_config = LoadCargoConfig {
|
||||||
|
|
|
@ -9,7 +9,7 @@ use crossbeam_channel::{unbounded, Receiver, Sender};
|
||||||
use flycheck::{project_json, FlycheckHandle};
|
use flycheck::{project_json, FlycheckHandle};
|
||||||
use hir::ChangeWithProcMacros;
|
use hir::ChangeWithProcMacros;
|
||||||
use ide::{Analysis, AnalysisHost, Cancellable, FileId, SourceRootId};
|
use ide::{Analysis, AnalysisHost, Cancellable, FileId, SourceRootId};
|
||||||
use ide_db::base_db::{CrateId, ProcMacroPaths, SourceDatabaseExt};
|
use ide_db::base_db::{CrateId, ProcMacroPaths, SourceDatabase, SourceRootDatabase};
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use load_cargo::SourceRootConfig;
|
use load_cargo::SourceRootConfig;
|
||||||
use lsp_types::{SemanticTokens, Url};
|
use lsp_types::{SemanticTokens, Url};
|
||||||
|
|
|
@ -10,7 +10,7 @@ use std::{
|
||||||
use always_assert::always;
|
use always_assert::always;
|
||||||
use crossbeam_channel::{select, Receiver};
|
use crossbeam_channel::{select, Receiver};
|
||||||
use flycheck::project_json;
|
use flycheck::project_json;
|
||||||
use ide_db::base_db::{SourceDatabase, SourceDatabaseExt, VfsPath};
|
use ide_db::base_db::{SourceDatabase, SourceRootDatabase, VfsPath};
|
||||||
use lsp_server::{Connection, Notification, Request};
|
use lsp_server::{Connection, Notification, Request};
|
||||||
use lsp_types::{notification::Notification as _, TextDocumentIdentifier};
|
use lsp_types::{notification::Notification as _, TextDocumentIdentifier};
|
||||||
use stdx::thread::ThreadIntent;
|
use stdx::thread::ThreadIntent;
|
||||||
|
|
|
@ -3,7 +3,7 @@ use std::{iter, mem, str::FromStr, sync};
|
||||||
|
|
||||||
use base_db::{
|
use base_db::{
|
||||||
CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, Env, FileChange,
|
CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, Env, FileChange,
|
||||||
FileSet, LangCrateOrigin, SourceDatabaseExt, SourceRoot, Version, VfsPath,
|
FileSet, LangCrateOrigin, SourceRootDatabase, SourceRoot, Version, VfsPath,
|
||||||
};
|
};
|
||||||
use cfg::CfgOptions;
|
use cfg::CfgOptions;
|
||||||
use hir_expand::{
|
use hir_expand::{
|
||||||
|
@ -26,7 +26,7 @@ use tt::{Leaf, Subtree, TokenTree};
|
||||||
|
|
||||||
pub const WORKSPACE: base_db::SourceRootId = base_db::SourceRootId(0);
|
pub const WORKSPACE: base_db::SourceRootId = base_db::SourceRootId(0);
|
||||||
|
|
||||||
pub trait WithFixture: Default + ExpandDatabase + SourceDatabaseExt + 'static {
|
pub trait WithFixture: Default + ExpandDatabase + SourceRootDatabase + 'static {
|
||||||
#[track_caller]
|
#[track_caller]
|
||||||
fn with_single_file(ra_fixture: &str) -> (Self, EditionedFileId) {
|
fn with_single_file(ra_fixture: &str) -> (Self, EditionedFileId) {
|
||||||
let fixture = ChangeFixture::parse(ra_fixture);
|
let fixture = ChangeFixture::parse(ra_fixture);
|
||||||
|
@ -101,7 +101,7 @@ pub trait WithFixture: Default + ExpandDatabase + SourceDatabaseExt + 'static {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<DB: ExpandDatabase + SourceDatabaseExt + Default + 'static> WithFixture for DB {}
|
impl<DB: ExpandDatabase + SourceRootDatabase + Default + 'static> WithFixture for DB {}
|
||||||
|
|
||||||
pub struct ChangeFixture {
|
pub struct ChangeFixture {
|
||||||
pub file_position: Option<(EditionedFileId, RangeOrOffset)>,
|
pub file_position: Option<(EditionedFileId, RangeOrOffset)>,
|
||||||
|
|
Loading…
Reference in a new issue