Auto merge of #17794 - Veykril:source-db-simplify, r=Veykril

internal: Newtype `ErasedFileAstId`

It wrapping `la_arena::Idx` makes it quite annoying to use
This commit is contained in:
bors 2024-08-05 11:48:00 +00:00
commit 3760540ace
47 changed files with 129 additions and 124 deletions

3
Cargo.lock generated
View file

@ -1268,6 +1268,7 @@ name = "paths"
version = "0.0.0"
dependencies = [
"camino",
"serde",
]
[[package]]
@ -1330,14 +1331,12 @@ dependencies = [
"base-db",
"indexmap",
"intern",
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"paths",
"rustc-hash",
"serde",
"serde_json",
"span",
"stdx",
"text-size",
"tracing",
"tt",
]

View file

@ -7,7 +7,7 @@ use salsa::Durability;
use triomphe::Arc;
use vfs::FileId;
use crate::{CrateGraph, SourceDatabaseExt, SourceDatabaseExt2, SourceRoot, SourceRootId};
use crate::{CrateGraph, SourceDatabaseFileInputExt, SourceRoot, SourceRootDatabase, SourceRootId};
/// Encapsulate a bunch of raw `.set` calls on the database.
#[derive(Default)]
@ -50,7 +50,7 @@ impl FileChange {
self.crate_graph = Some(graph);
}
pub fn apply(self, db: &mut dyn SourceDatabaseExt) {
pub fn apply(self, db: &mut dyn SourceRootDatabase) {
let _p = tracing::info_span!("FileChange::apply").entered();
if let Some(roots) = self.roots {
for (idx, root) in roots.into_iter().enumerate() {

View file

@ -1,5 +1,5 @@
//! base_db defines basic database traits. The concrete DB is defined by ide.
// FIXME: Rename this crate, base db is non descriptive
mod change;
mod input;
@ -47,8 +47,6 @@ pub const DEFAULT_PARSE_LRU_CAP: u16 = 128;
pub const DEFAULT_BORROWCK_LRU_CAP: u16 = 2024;
pub trait FileLoader {
/// Text of the file.
fn file_text(&self, file_id: FileId) -> Arc<str>;
fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId>;
/// Crates whose root's source root is the same as the source root of `file_id`
fn relevant_crates(&self, file_id: FileId) -> Arc<[CrateId]>;
@ -58,6 +56,13 @@ pub trait FileLoader {
/// model. Everything else in rust-analyzer is derived from these queries.
#[salsa::query_group(SourceDatabaseStorage)]
pub trait SourceDatabase: FileLoader + std::fmt::Debug {
#[salsa::input]
fn compressed_file_text(&self, file_id: FileId) -> Arc<[u8]>;
/// Text of the file.
#[salsa::lru]
fn file_text(&self, file_id: FileId) -> Arc<str>;
/// Parses the file into the syntax tree.
#[salsa::lru]
fn parse(&self, file_id: EditionedFileId) -> Parse<ast::SourceFile>;
@ -99,16 +104,18 @@ fn parse_errors(db: &dyn SourceDatabase, file_id: EditionedFileId) -> Option<Arc
}
}
fn file_text(db: &dyn SourceDatabase, file_id: FileId) -> Arc<str> {
let bytes = db.compressed_file_text(file_id);
let bytes =
lz4_flex::decompress_size_prepended(&bytes).expect("lz4 decompression should not fail");
let text = std::str::from_utf8(&bytes).expect("file contents should be valid UTF-8");
Arc::from(text)
}
/// We don't want to give HIR knowledge of source roots, hence we extract these
/// methods into a separate DB.
#[salsa::query_group(SourceDatabaseExtStorage)]
pub trait SourceDatabaseExt: SourceDatabase {
#[salsa::input]
fn compressed_file_text(&self, file_id: FileId) -> Arc<[u8]>;
#[salsa::lru]
fn file_text(&self, file_id: FileId) -> Arc<str>;
#[salsa::query_group(SourceRootDatabaseStorage)]
pub trait SourceRootDatabase: SourceDatabase {
/// Path to a file, relative to the root of its source root.
/// Source root of the file.
#[salsa::input]
@ -121,15 +128,7 @@ pub trait SourceDatabaseExt: SourceDatabase {
fn source_root_crates(&self, id: SourceRootId) -> Arc<[CrateId]>;
}
fn file_text(db: &dyn SourceDatabaseExt, file_id: FileId) -> Arc<str> {
let bytes = db.compressed_file_text(file_id);
let bytes =
lz4_flex::decompress_size_prepended(&bytes).expect("lz4 decompression should not fail");
let text = std::str::from_utf8(&bytes).expect("file contents should be valid UTF-8");
Arc::from(text)
}
pub trait SourceDatabaseExt2 {
pub trait SourceDatabaseFileInputExt {
fn set_file_text(&mut self, file_id: FileId, text: &str) {
self.set_file_text_with_durability(file_id, text, Durability::LOW);
}
@ -142,7 +141,7 @@ pub trait SourceDatabaseExt2 {
);
}
impl<Db: ?Sized + SourceDatabaseExt> SourceDatabaseExt2 for Db {
impl<Db: ?Sized + SourceRootDatabase> SourceDatabaseFileInputExt for Db {
fn set_file_text_with_durability(
&mut self,
file_id: FileId,
@ -159,7 +158,7 @@ impl<Db: ?Sized + SourceDatabaseExt> SourceDatabaseExt2 for Db {
}
}
fn source_root_crates(db: &dyn SourceDatabaseExt, id: SourceRootId) -> Arc<[CrateId]> {
fn source_root_crates(db: &dyn SourceRootDatabase, id: SourceRootId) -> Arc<[CrateId]> {
let graph = db.crate_graph();
let mut crates = graph
.iter()
@ -173,13 +172,12 @@ fn source_root_crates(db: &dyn SourceDatabaseExt, id: SourceRootId) -> Arc<[Crat
crates.into_iter().collect()
}
/// Silly workaround for cyclic deps between the traits
// FIXME: Would be nice to get rid of this somehow
/// Silly workaround for cyclic deps due to the SourceRootDatabase and SourceDatabase split
/// regarding FileLoader
pub struct FileLoaderDelegate<T>(pub T);
impl<T: SourceDatabaseExt> FileLoader for FileLoaderDelegate<&'_ T> {
fn file_text(&self, file_id: FileId) -> Arc<str> {
SourceDatabaseExt::file_text(self.0, file_id)
}
impl<T: SourceRootDatabase> FileLoader for FileLoaderDelegate<&'_ T> {
fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
// FIXME: this *somehow* should be platform agnostic...
let source_root = self.0.file_source_root(path.anchor);

View file

@ -1,4 +1,4 @@
use base_db::{SourceDatabase, SourceDatabaseExt2 as _};
use base_db::{SourceDatabase, SourceDatabaseFileInputExt as _};
use test_fixture::WithFixture;
use crate::{db::DefDatabase, nameres::tests::TestDB, AdtId, ModuleDefId};

View file

@ -19,7 +19,7 @@ use crate::{
};
#[salsa::database(
base_db::SourceDatabaseExtStorage,
base_db::SourceRootDatabaseStorage,
base_db::SourceDatabaseStorage,
hir_expand::db::ExpandDatabaseStorage,
crate::db::InternDatabaseStorage,
@ -69,9 +69,6 @@ impl fmt::Debug for TestDB {
impl panic::RefUnwindSafe for TestDB {}
impl FileLoader for TestDB {
fn file_text(&self, file_id: FileId) -> Arc<str> {
FileLoaderDelegate(self).file_text(file_id)
}
fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
FileLoaderDelegate(self).resolve_path(path)
}

View file

@ -1,7 +1,7 @@
//! Defines a unit of change that can applied to the database to get the next
//! state. Changes are transactional.
use base_db::{
salsa::Durability, CrateGraph, CrateId, FileChange, SourceDatabaseExt, SourceRoot,
salsa::Durability, CrateGraph, CrateId, FileChange, SourceRoot, SourceRootDatabase,
TargetLayoutLoadResult, Version,
};
use la_arena::RawIdx;
@ -23,7 +23,7 @@ impl ChangeWithProcMacros {
Self::default()
}
pub fn apply(self, db: &mut (impl ExpandDatabase + SourceDatabaseExt)) {
pub fn apply(self, db: &mut (impl ExpandDatabase + SourceRootDatabase)) {
self.source_change.apply(db);
if let Some(proc_macros) = self.proc_macros {
db.set_proc_macros_with_durability(Arc::new(proc_macros), Durability::HIGH);

View file

@ -176,7 +176,12 @@ impl ExpandErrorKind {
&ExpandErrorKind::MissingProcMacroExpander(def_crate) => {
match db.proc_macros().get_error_for_crate(def_crate) {
Some((e, hard_err)) => (e.to_owned(), hard_err),
None => ("missing expander".to_owned(), true),
None => (
format!(
"internal error: proc-macro map is missing error entry for crate {def_crate:?}"
),
true,
),
}
}
ExpandErrorKind::MacroDefinition => {

View file

@ -15,7 +15,7 @@ use test_utils::extract_annotations;
use triomphe::Arc;
#[salsa::database(
base_db::SourceDatabaseExtStorage,
base_db::SourceRootDatabaseStorage,
base_db::SourceDatabaseStorage,
hir_expand::db::ExpandDatabaseStorage,
hir_def::db::InternDatabaseStorage,
@ -75,9 +75,6 @@ impl salsa::ParallelDatabase for TestDB {
impl panic::RefUnwindSafe for TestDB {}
impl FileLoader for TestDB {
fn file_text(&self, file_id: FileId) -> Arc<str> {
FileLoaderDelegate(self).file_text(file_id)
}
fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
FileLoaderDelegate(self).resolve_path(path)
}

View file

@ -12,7 +12,7 @@ mod traits;
use std::env;
use base_db::SourceDatabaseExt2 as _;
use base_db::SourceDatabaseFileInputExt as _;
use expect_test::Expect;
use hir_def::{
body::{Body, BodySourceMap, SyntheticSyntax},

View file

@ -1,4 +1,4 @@
use base_db::SourceDatabaseExt2 as _;
use base_db::SourceDatabaseFileInputExt as _;
use test_fixture::WithFixture;
use crate::{db::HirDatabase, test_db::TestDB};

View file

@ -3,7 +3,7 @@ mod generated;
use expect_test::expect;
use hir::{FileRange, Semantics};
use ide_db::{
base_db::SourceDatabaseExt,
base_db::{SourceDatabase, SourceRootDatabase},
imports::insert_use::{ImportGranularity, InsertUseConfig},
source_change::FileSystemEdit,
EditionedFileId, RootDatabase, SnippetCap,

View file

@ -4,7 +4,7 @@ use std::iter;
use hir::{HirFileIdExt, Module};
use ide_db::{
base_db::{SourceDatabaseExt, VfsPath},
base_db::{SourceRootDatabase, VfsPath},
FxHashSet, RootDatabase, SymbolKind,
};
use stdx::IsNoneOr;

View file

@ -23,10 +23,10 @@ mod type_pos;
mod use_tree;
mod visibility;
use base_db::SourceDatabase;
use expect_test::Expect;
use hir::PrefixKind;
use ide_db::{
base_db::FileLoader,
imports::insert_use::{ImportGranularity, InsertUseConfig},
FilePosition, RootDatabase, SnippetCap,
};

View file

@ -2,7 +2,7 @@
use std::collections::VecDeque;
use base_db::SourceDatabaseExt;
use base_db::SourceRootDatabase;
use hir::{Crate, DescendPreference, ItemInNs, ModuleDef, Name, Semantics};
use span::FileId;
use syntax::{

View file

@ -74,7 +74,7 @@ pub type FilePosition = FilePositionWrapper<FileId>;
pub type FileRange = FileRangeWrapper<FileId>;
#[salsa::database(
base_db::SourceDatabaseExtStorage,
base_db::SourceRootDatabaseStorage,
base_db::SourceDatabaseStorage,
hir::db::ExpandDatabaseStorage,
hir::db::DefDatabaseStorage,
@ -125,9 +125,6 @@ impl Upcast<dyn HirDatabase> for RootDatabase {
}
impl FileLoader for RootDatabase {
fn file_text(&self, file_id: FileId) -> Arc<str> {
FileLoaderDelegate(self).file_text(file_id)
}
fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
FileLoaderDelegate(self).resolve_path(path)
}

View file

@ -11,7 +11,7 @@ use hir::db::DefDatabase;
use crate::{
base_db::{
salsa::{Database, ParallelDatabase, Snapshot},
Cancelled, CrateId, SourceDatabase, SourceDatabaseExt,
Cancelled, CrateId, SourceDatabase, SourceRootDatabase,
},
FxIndexMap, RootDatabase,
};

View file

@ -6,7 +6,7 @@
use std::mem;
use base_db::{salsa::Database, SourceDatabase, SourceDatabaseExt};
use base_db::{salsa::Database, SourceDatabase, SourceRootDatabase};
use hir::{
sym, AsAssocItem, DefWithBody, DescendPreference, FileRange, HasAttrs, HasSource, HirFileIdExt,
InFile, InRealFile, ModuleSource, PathResolution, Semantics, Visibility,

View file

@ -29,7 +29,7 @@ use std::{
use base_db::{
salsa::{self, ParallelDatabase},
SourceDatabaseExt, SourceRootId, Upcast,
SourceRootDatabase, SourceRootId, Upcast,
};
use fst::{raw::IndexedValue, Automaton, Streamer};
use hir::{
@ -100,7 +100,7 @@ impl Query {
}
#[salsa::query_group(SymbolsDatabaseStorage)]
pub trait SymbolsDatabase: HirDatabase + SourceDatabaseExt + Upcast<dyn HirDatabase> {
pub trait SymbolsDatabase: HirDatabase + SourceRootDatabase + Upcast<dyn HirDatabase> {
/// The symbol index for a given module. These modules should only be in source roots that
/// are inside local_roots.
fn module_symbols(&self, module: Module) -> Arc<SymbolIndex>;

View file

@ -4,7 +4,7 @@ use std::iter;
use hir::{db::DefDatabase, DefMap, InFile, ModuleSource};
use ide_db::{
base_db::{FileLoader, SourceDatabaseExt},
base_db::{FileLoader, SourceDatabase, SourceRootDatabase},
source_change::SourceChange,
FileId, FileRange, LineIndexDatabase,
};
@ -47,7 +47,7 @@ pub(crate) fn unlinked_file(
//
// Only show this diagnostic on the first three characters of
// the file, to avoid overwhelming the user during startup.
range = FileLoader::file_text(ctx.sema.db, file_id)
range = SourceDatabase::file_text(ctx.sema.db, file_id)
.char_indices()
.take(3)
.last()

View file

@ -1,7 +1,7 @@
#![allow(clippy::print_stderr)]
use ide_db::{
assists::AssistResolveStrategy, base_db::SourceDatabaseExt, LineIndexDatabase, RootDatabase,
assists::AssistResolveStrategy, base_db::SourceDatabase, LineIndexDatabase, RootDatabase,
};
use itertools::Itertools;
use stdx::trim_indent;

View file

@ -84,7 +84,7 @@ pub use crate::{errors::SsrError, from_comment::ssr_from_comment, matching::Matc
use crate::{errors::bail, matching::MatchFailureReason};
use hir::{FileRange, Semantics};
use ide_db::{EditionedFileId, FileId, FxHashMap, RootDatabase};
use ide_db::{base_db::SourceDatabase, EditionedFileId, FileId, FxHashMap, RootDatabase};
use resolving::ResolvedRule;
use syntax::{ast, AstNode, SyntaxNode, TextRange};
use text_edit::TextEdit;
@ -141,7 +141,7 @@ impl<'db> MatchFinder<'db> {
/// Constructs an instance using the start of the first file in `db` as the lookup context.
pub fn at_first_file(db: &'db ide_db::RootDatabase) -> Result<MatchFinder<'db>, SsrError> {
use ide_db::base_db::SourceDatabaseExt;
use ide_db::base_db::SourceRootDatabase;
use ide_db::symbol_index::SymbolsDatabase;
if let Some(first_file_id) =
db.local_roots().iter().next().and_then(|root| db.source_root(*root).iter().next())
@ -172,7 +172,6 @@ impl<'db> MatchFinder<'db> {
/// Finds matches for all added rules and returns edits for all found matches.
pub fn edits(&self) -> FxHashMap<FileId, TextEdit> {
use ide_db::base_db::SourceDatabaseExt;
let mut matches_by_file = FxHashMap::default();
for m in self.matches().matches {
matches_by_file
@ -228,7 +227,6 @@ impl<'db> MatchFinder<'db> {
file_id: EditionedFileId,
snippet: &str,
) -> Vec<MatchDebugInfo> {
use ide_db::base_db::SourceDatabaseExt;
let file = self.sema.parse(file_id);
let mut res = Vec::new();
let file_text = self.sema.db.file_text(file_id.into());

View file

@ -156,7 +156,7 @@ impl MatchFinder<'_> {
fn search_files_do(&self, mut callback: impl FnMut(FileId)) {
if self.restrict_ranges.is_empty() {
// Unrestricted search.
use ide_db::base_db::SourceDatabaseExt;
use ide_db::base_db::SourceRootDatabase;
use ide_db::symbol_index::SymbolsDatabase;
for &root in self.sema.db.local_roots().iter() {
let sr = self.sema.db.source_root(root);

View file

@ -1,7 +1,7 @@
use expect_test::{expect, Expect};
use hir::{FilePosition, FileRange};
use ide_db::{
base_db::{salsa::Durability, SourceDatabaseExt},
base_db::{salsa::Durability, SourceDatabase},
EditionedFileId, FxHashSet,
};
use test_utils::RangeOrOffset;

View file

@ -10,7 +10,7 @@ use hir::{
Semantics,
};
use ide_db::{
base_db::{AnchoredPath, FileLoader},
base_db::{AnchoredPath, FileLoader, SourceDatabase},
defs::{Definition, IdentClass},
helpers::pick_best_token,
RootDatabase, SymbolKind,

View file

@ -1,5 +1,5 @@
use expect_test::{expect, Expect};
use ide_db::{base_db::FileLoader, FileRange};
use ide_db::{base_db::SourceDatabase, FileRange};
use syntax::TextRange;
use crate::{

View file

@ -1,5 +1,5 @@
use hir::Semantics;
use ide_db::{base_db::SourceDatabaseExt, FilePosition, LineIndexDatabase, RootDatabase};
use ide_db::{base_db::SourceRootDatabase, FilePosition, LineIndexDatabase, RootDatabase};
use std::{fmt::Write, time::Instant};
use syntax::{algo::ancestors_at_offset, ast, AstNode, TextRange};

View file

@ -65,7 +65,7 @@ use hir::{sym, ChangeWithProcMacros};
use ide_db::{
base_db::{
salsa::{self, ParallelDatabase},
CrateOrigin, Env, FileLoader, FileSet, SourceDatabase, SourceDatabaseExt, VfsPath,
CrateOrigin, Env, FileLoader, FileSet, SourceDatabase, SourceRootDatabase, VfsPath,
},
prime_caches, symbol_index, FxHashMap, FxIndexSet, LineIndexDatabase,
};
@ -286,7 +286,7 @@ impl Analysis {
/// Gets the text of the source file.
pub fn file_text(&self, file_id: FileId) -> Cancellable<Arc<str>> {
self.with_db(|db| SourceDatabaseExt::file_text(db, file_id))
self.with_db(|db| SourceDatabase::file_text(db, file_id))
}
/// Gets the syntax tree of the file.

View file

@ -3,7 +3,7 @@
use hir::{db::HirDatabase, Crate, HirFileIdExt, Module, Semantics};
use ide_db::{
base_db::SourceDatabaseExt, defs::Definition, documentation::Documentation,
base_db::SourceRootDatabase, defs::Definition, documentation::Documentation,
famous_defs::FamousDefs, helpers::get_definition, FileId, FileRange, FxHashMap, FxHashSet,
RootDatabase,
};

View file

@ -1,6 +1,6 @@
use dot::{Id, LabelText};
use ide_db::{
base_db::{CrateGraph, CrateId, Dependency, SourceDatabase, SourceDatabaseExt},
base_db::{CrateGraph, CrateId, Dependency, SourceDatabase, SourceRootDatabase},
FxHashSet, RootDatabase,
};
use triomphe::Arc;

View file

@ -13,13 +13,10 @@ doctest = false
[dependencies]
camino.workspace = true
# Adding this dep sadly puts a lot of rust-analyzer crates after the
# serde-derive crate. Even though we don't activate the derive feature here,
# someone else in the crate graph certainly does!
# serde.workspace = true
serde = { workspace = true, optional = true }
[features]
serde1 = ["camino/serde1"]
serde1 = ["camino/serde1", "dep:serde"]
[lints]
workspace = true

View file

@ -22,12 +22,11 @@ indexmap.workspace = true
paths = { workspace = true, features = ["serde1"] }
tt.workspace = true
stdx.workspace = true
text-size.workspace = true
span.workspace = true
# Ideally this crate would not depend on salsa things, but we need span information here which wraps
# InternIds for the syntax context
span.workspace = true
# only here due to the `Env` newtype :/
base-db.workspace = true
la-arena.workspace = true
intern.workspace = true
[lints]

View file

@ -158,9 +158,7 @@ type ProtocolWrite<W: Write> = for<'o, 'msg> fn(out: &'o mut W, msg: &'msg str)
#[cfg(test)]
mod tests {
use intern::{sym, Symbol};
use la_arena::RawIdx;
use span::{ErasedFileAstId, Span, SpanAnchor, SyntaxContextId};
use text_size::{TextRange, TextSize};
use span::{ErasedFileAstId, Span, SpanAnchor, SyntaxContextId, TextRange, TextSize};
use tt::{Delimiter, DelimiterKind, Ident, Leaf, Literal, Punct, Spacing, Subtree, TokenTree};
use super::*;
@ -171,7 +169,7 @@ mod tests {
span::FileId::from_raw(0xe4e4e),
span::Edition::CURRENT,
),
ast_id: ErasedFileAstId::from_raw(RawIdx::from(0)),
ast_id: ErasedFileAstId::from_raw(0),
};
let token_trees = Box::new([

View file

@ -38,11 +38,9 @@
use std::collections::VecDeque;
use intern::Symbol;
use la_arena::RawIdx;
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
use span::{EditionedFileId, ErasedFileAstId, Span, SpanAnchor, SyntaxContextId};
use text_size::TextRange;
use span::{EditionedFileId, ErasedFileAstId, Span, SpanAnchor, SyntaxContextId, TextRange};
use crate::msg::{ENCODE_CLOSE_SPAN_VERSION, EXTENDED_LEAF_DATA};
@ -54,7 +52,7 @@ pub fn serialize_span_data_index_map(map: &SpanDataIndexMap) -> Vec<u32> {
.flat_map(|span| {
[
span.anchor.file_id.as_u32(),
span.anchor.ast_id.into_raw().into_u32(),
span.anchor.ast_id.into_raw(),
span.range.start().into(),
span.range.end().into(),
span.ctx.into_u32(),
@ -71,7 +69,7 @@ pub fn deserialize_span_data_index_map(map: &[u32]) -> SpanDataIndexMap {
Span {
anchor: SpanAnchor {
file_id: EditionedFileId::from_raw(file_id),
ast_id: ErasedFileAstId::from_raw(RawIdx::from_u32(ast_id)),
ast_id: ErasedFileAstId::from_raw(ast_id),
},
range: TextRange::new(start.into(), end.into()),
ctx: SyntaxContextId::from_u32(e),

View file

@ -479,7 +479,7 @@ mod tests {
range: TextRange::empty(TextSize::new(0)),
anchor: span::SpanAnchor {
file_id: EditionedFileId::current_edition(FileId::from_raw(0)),
ast_id: span::ErasedFileAstId::from_raw(0.into()),
ast_id: span::ErasedFileAstId::from_raw(0),
},
ctx: SyntaxContextId::ROOT,
};
@ -515,7 +515,7 @@ mod tests {
range: TextRange::empty(TextSize::new(0)),
anchor: span::SpanAnchor {
file_id: EditionedFileId::current_edition(FileId::from_raw(0)),
ast_id: span::ErasedFileAstId::from_raw(0.into()),
ast_id: span::ErasedFileAstId::from_raw(0),
},
ctx: SyntaxContextId::ROOT,
};

View file

@ -69,7 +69,7 @@ fn assert_expand_impl(
range: TextRange::new(0.into(), 150.into()),
anchor: SpanAnchor {
file_id: EditionedFileId::current_edition(FileId::from_raw(41)),
ast_id: ErasedFileAstId::from_raw(From::from(1)),
ast_id: ErasedFileAstId::from_raw(1),
},
ctx: SyntaxContextId::ROOT,
};
@ -77,7 +77,7 @@ fn assert_expand_impl(
range: TextRange::new(0.into(), 100.into()),
anchor: SpanAnchor {
file_id: EditionedFileId::current_edition(FileId::from_raw(42)),
ast_id: ErasedFileAstId::from_raw(From::from(2)),
ast_id: ErasedFileAstId::from_raw(2),
},
ctx: SyntaxContextId::ROOT,
};

View file

@ -1349,12 +1349,13 @@ fn add_target_crate_root(
);
if let TargetKind::Lib { is_proc_macro: true } = kind {
let proc_macro = match build_data.as_ref().map(|it| it.proc_macro_dylib_path.as_ref()) {
Some(it) => it.cloned().map(|path| Ok((cargo_name.to_owned(), path))),
None => Some(Err("proc-macro crate is missing its build data".to_owned())),
Some(it) => match it {
Some(path) => Ok((cargo_name.to_owned(), path.clone())),
None => Err("proc-macro crate build data is missing dylib path".to_owned()),
},
None => Err("proc-macro crate is missing its build data".to_owned()),
};
if let Some(proc_macro) = proc_macro {
proc_macros.insert(crate_id, proc_macro);
}
proc_macros.insert(crate_id, proc_macro);
}
crate_id

View file

@ -23,7 +23,7 @@ use ide::{
use ide_db::{
base_db::{
salsa::{self, debug::DebugQueryTable, ParallelDatabase},
SourceDatabase, SourceDatabaseExt,
SourceDatabase, SourceRootDatabase,
},
EditionedFileId, LineIndexDatabase, SnippetCap,
};

View file

@ -6,7 +6,7 @@ use rustc_hash::FxHashSet;
use hir::{db::HirDatabase, Crate, HirFileIdExt, Module};
use ide::{AnalysisHost, AssistResolveStrategy, Diagnostic, DiagnosticsConfig, Severity};
use ide_db::{base_db::SourceDatabaseExt, LineIndexDatabase};
use ide_db::{base_db::SourceRootDatabase, LineIndexDatabase};
use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice};
use crate::cli::flags;

View file

@ -2,7 +2,7 @@
use hir::{Crate, Module};
use hir_ty::db::HirDatabase;
use ide_db::{base_db::SourceDatabaseExt, LineIndexDatabase};
use ide_db::{base_db::SourceRootDatabase, LineIndexDatabase};
use profile::StopWatch;
use project_model::{CargoConfig, RustLibSource};
use syntax::TextRange;

View file

@ -1,7 +1,7 @@
//! Applies structured search replace rules from the command line.
use anyhow::Context;
use ide_db::EditionedFileId;
use ide_db::{base_db::SourceDatabase, EditionedFileId};
use ide_ssr::MatchFinder;
use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice};
use project_model::{CargoConfig, RustLibSource};
@ -10,7 +10,6 @@ use crate::cli::flags;
impl flags::Ssr {
pub fn run(self) -> anyhow::Result<()> {
use ide_db::base_db::SourceDatabaseExt;
let cargo_config =
CargoConfig { sysroot: Some(RustLibSource::Discover), ..Default::default() };
let load_cargo_config = LoadCargoConfig {
@ -46,7 +45,7 @@ impl flags::Search {
/// `debug_snippet`. This is intended for debugging and probably isn't in it's current form useful
/// for much else.
pub fn run(self) -> anyhow::Result<()> {
use ide_db::base_db::SourceDatabaseExt;
use ide_db::base_db::SourceRootDatabase;
use ide_db::symbol_index::SymbolsDatabase;
let cargo_config = CargoConfig::default();
let load_cargo_config = LoadCargoConfig {

View file

@ -9,7 +9,7 @@ use crossbeam_channel::{unbounded, Receiver, Sender};
use flycheck::{project_json, FlycheckHandle};
use hir::ChangeWithProcMacros;
use ide::{Analysis, AnalysisHost, Cancellable, FileId, SourceRootId};
use ide_db::base_db::{CrateId, ProcMacroPaths, SourceDatabaseExt};
use ide_db::base_db::{CrateId, ProcMacroPaths, SourceDatabase, SourceRootDatabase};
use itertools::Itertools;
use load_cargo::SourceRootConfig;
use lsp_types::{SemanticTokens, Url};

View file

@ -10,7 +10,7 @@ use std::{
use always_assert::always;
use crossbeam_channel::{select, Receiver};
use flycheck::project_json;
use ide_db::base_db::{SourceDatabase, SourceDatabaseExt, VfsPath};
use ide_db::base_db::{SourceDatabase, SourceRootDatabase, VfsPath};
use lsp_server::{Connection, Notification, Request};
use lsp_types::{notification::Notification as _, TextDocumentIdentifier};
use stdx::thread::ThreadIntent;

View file

@ -1084,7 +1084,6 @@ fn resolve_proc_macro() {
let sysroot = project_model::Sysroot::discover(
&AbsPathBuf::assert_utf8(std::env::current_dir().unwrap()),
&Default::default(),
false,
);
let proc_macro_server_path = sysroot.discover_proc_macro_srv().unwrap();
@ -1125,7 +1124,6 @@ edition = "2021"
proc-macro = true
//- /bar/src/lib.rs
extern crate proc_macro;
use proc_macro::{Delimiter, Group, Ident, Span, TokenStream, TokenTree};
macro_rules! t {
($n:literal) => {

View file

@ -18,7 +18,28 @@ use syntax::{ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr};
/// See crates\hir-expand\src\ast_id_map.rs
/// This is a type erased FileAstId.
pub type ErasedFileAstId = la_arena::Idx<syntax::SyntaxNodePtr>;
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct ErasedFileAstId(u32);
impl ErasedFileAstId {
pub const fn into_raw(self) -> u32 {
self.0
}
pub const fn from_raw(u32: u32) -> Self {
Self(u32)
}
}
impl fmt::Display for ErasedFileAstId {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.0.fmt(f)
}
}
impl fmt::Debug for ErasedFileAstId {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.0.fmt(f)
}
}
/// `AstId` points to an AST node in a specific file.
pub struct FileAstId<N: AstIdNode> {
@ -47,7 +68,7 @@ impl<N: AstIdNode> Hash for FileAstId<N> {
impl<N: AstIdNode> fmt::Debug for FileAstId<N> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "FileAstId::<{}>({})", type_name::<N>(), self.raw.into_raw())
write!(f, "FileAstId::<{}>({})", type_name::<N>(), self.raw)
}
}
@ -176,7 +197,10 @@ impl AstIdMap {
let ptr = ptr.syntax_node_ptr();
let hash = hash_ptr(&ptr);
match self.map.raw_entry().from_hash(hash, |&idx| self.arena[idx] == ptr) {
Some((&raw, &())) => FileAstId { raw, covariant: PhantomData },
Some((&raw, &())) => FileAstId {
raw: ErasedFileAstId(raw.into_raw().into_u32()),
covariant: PhantomData,
},
None => panic!(
"Can't find {:?} in AstIdMap:\n{:?}",
ptr,
@ -186,18 +210,19 @@ impl AstIdMap {
}
pub fn get<N: AstIdNode>(&self, id: FileAstId<N>) -> AstPtr<N> {
AstPtr::try_from_raw(self.arena[id.raw]).unwrap()
AstPtr::try_from_raw(self.arena[Idx::from_raw(RawIdx::from_u32(id.raw.into_raw()))])
.unwrap()
}
pub fn get_erased(&self, id: ErasedFileAstId) -> SyntaxNodePtr {
self.arena[id]
self.arena[Idx::from_raw(RawIdx::from_u32(id.into_raw()))]
}
fn erased_ast_id(&self, item: &SyntaxNode) -> ErasedFileAstId {
let ptr = SyntaxNodePtr::new(item);
let hash = hash_ptr(&ptr);
match self.map.raw_entry().from_hash(hash, |&idx| self.arena[idx] == ptr) {
Some((&idx, &())) => idx,
Some((&idx, &())) => ErasedFileAstId(idx.into_raw().into_u32()),
None => panic!(
"Can't find {:?} in AstIdMap:\n{:?}",
item,
@ -207,7 +232,7 @@ impl AstIdMap {
}
fn alloc(&mut self, item: &SyntaxNode) -> ErasedFileAstId {
self.arena.alloc(SyntaxNodePtr::new(item))
ErasedFileAstId(self.arena.alloc(SyntaxNodePtr::new(item)).into_raw().into_u32())
}
}

View file

@ -21,15 +21,14 @@ pub use vfs::FileId;
/// The root ast id always points to the encompassing file, using this in spans is discouraged as
/// any range relative to it will be effectively absolute, ruining the entire point of anchored
/// relative text ranges.
pub const ROOT_ERASED_FILE_AST_ID: ErasedFileAstId =
la_arena::Idx::from_raw(la_arena::RawIdx::from_u32(0));
pub const ROOT_ERASED_FILE_AST_ID: ErasedFileAstId = ErasedFileAstId::from_raw(0);
/// FileId used as the span for syntax node fixups. Any Span containing this file id is to be
/// considered fake.
pub const FIXUP_ERASED_FILE_AST_ID_MARKER: ErasedFileAstId =
// we pick the second to last for this in case we every consider making this a NonMaxU32, this
// we pick the second to last for this in case we ever consider making this a NonMaxU32, this
// is required to be stable for the proc-macro-server
la_arena::Idx::from_raw(la_arena::RawIdx::from_u32(!0 - 1));
ErasedFileAstId::from_raw(!0 - 1);
pub type Span = SpanData<SyntaxContextId>;

View file

@ -119,7 +119,7 @@ impl fmt::Display for RealSpanMap {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
writeln!(f, "RealSpanMap({:?}):", self.file_id)?;
for span in self.pairs.iter() {
writeln!(f, "{}: {}", u32::from(span.0), span.1.into_raw().into_u32())?;
writeln!(f, "{}: {}", u32::from(span.0), span.1.into_raw())?;
}
Ok(())
}

View file

@ -3,7 +3,7 @@ use std::{iter, mem, str::FromStr, sync};
use base_db::{
CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, Env, FileChange,
FileSet, LangCrateOrigin, SourceDatabaseExt, SourceRoot, Version, VfsPath,
FileSet, LangCrateOrigin, SourceRoot, SourceRootDatabase, Version, VfsPath,
};
use cfg::CfgOptions;
use hir_expand::{
@ -26,7 +26,7 @@ use tt::{Leaf, Subtree, TokenTree};
pub const WORKSPACE: base_db::SourceRootId = base_db::SourceRootId(0);
pub trait WithFixture: Default + ExpandDatabase + SourceDatabaseExt + 'static {
pub trait WithFixture: Default + ExpandDatabase + SourceRootDatabase + 'static {
#[track_caller]
fn with_single_file(ra_fixture: &str) -> (Self, EditionedFileId) {
let fixture = ChangeFixture::parse(ra_fixture);
@ -101,7 +101,7 @@ pub trait WithFixture: Default + ExpandDatabase + SourceDatabaseExt + 'static {
}
}
impl<DB: ExpandDatabase + SourceDatabaseExt + Default + 'static> WithFixture for DB {}
impl<DB: ExpandDatabase + SourceRootDatabase + Default + 'static> WithFixture for DB {}
pub struct ChangeFixture {
pub file_position: Option<(EditionedFileId, RangeOrOffset)>,