Make use of NoHash hashing for FileId and CrateId

This commit is contained in:
Lukas Wirth 2022-08-25 20:31:02 +02:00
parent 8dcf4c70c4
commit d025c5d8d6
22 changed files with 189 additions and 72 deletions

2
Cargo.lock generated
View file

@ -704,6 +704,7 @@ dependencies = [
"ide-db",
"itertools",
"parser",
"stdx",
"syntax",
"test-utils",
"text-edit",
@ -1921,6 +1922,7 @@ dependencies = [
"indexmap",
"paths",
"rustc-hash",
"stdx",
]
[[package]]

View file

@ -9,10 +9,11 @@
use std::{fmt, ops, panic::RefUnwindSafe, str::FromStr, sync::Arc};
use cfg::CfgOptions;
use rustc_hash::{FxHashMap, FxHashSet};
use rustc_hash::FxHashMap;
use stdx::hash::{NoHashHashMap, NoHashHashSet};
use syntax::SmolStr;
use tt::Subtree;
use vfs::{file_set::FileSet, FileId, VfsPath};
use vfs::{file_set::FileSet, AnchoredPath, FileId, VfsPath};
/// Files are grouped into source roots. A source root is a directory on the
/// file systems which is watched for changes. Typically it corresponds to a
@ -31,22 +32,30 @@ pub struct SourceRoot {
/// Libraries are considered mostly immutable, this assumption is used to
/// optimize salsa's query structure
pub is_library: bool,
pub(crate) file_set: FileSet,
file_set: FileSet,
}
impl SourceRoot {
pub fn new_local(file_set: FileSet) -> SourceRoot {
SourceRoot { is_library: false, file_set }
}
pub fn new_library(file_set: FileSet) -> SourceRoot {
SourceRoot { is_library: true, file_set }
}
pub fn path_for_file(&self, file: &FileId) -> Option<&VfsPath> {
self.file_set.path_for_file(file)
}
pub fn file_for_path(&self, path: &VfsPath) -> Option<&FileId> {
self.file_set.file_for_path(path)
}
pub fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
self.file_set.resolve_path(path)
}
pub fn iter(&self) -> impl Iterator<Item = FileId> + '_ {
self.file_set.iter()
}
@ -72,12 +81,19 @@ impl SourceRoot {
/// <https://github.com/rust-lang/rust-analyzer/blob/master/docs/dev/architecture.md#serialization>
#[derive(Debug, Clone, Default /* Serialize, Deserialize */)]
pub struct CrateGraph {
arena: FxHashMap<CrateId, CrateData>,
arena: NoHashHashMap<CrateId, CrateData>,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
pub struct CrateId(pub u32);
impl stdx::hash::NoHashHashable for CrateId {}
impl std::hash::Hash for CrateId {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.0.hash(state);
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct CrateName(SmolStr);
@ -342,7 +358,7 @@ impl CrateGraph {
// Check if adding a dep from `from` to `to` creates a cycle. To figure
// that out, look for a path in the *opposite* direction, from `to` to
// `from`.
if let Some(path) = self.find_path(&mut FxHashSet::default(), dep.crate_id, from) {
if let Some(path) = self.find_path(&mut NoHashHashSet::default(), dep.crate_id, from) {
let path = path.into_iter().map(|it| (it, self[it].display_name.clone())).collect();
let err = CyclicDependenciesError { path };
assert!(err.from().0 == from && err.to().0 == dep.crate_id);
@ -365,7 +381,7 @@ impl CrateGraph {
/// including the crate itself.
pub fn transitive_deps(&self, of: CrateId) -> impl Iterator<Item = CrateId> {
let mut worklist = vec![of];
let mut deps = FxHashSet::default();
let mut deps = NoHashHashSet::default();
while let Some(krate) = worklist.pop() {
if !deps.insert(krate) {
@ -382,10 +398,10 @@ impl CrateGraph {
/// including the crate itself.
pub fn transitive_rev_deps(&self, of: CrateId) -> impl Iterator<Item = CrateId> {
let mut worklist = vec![of];
let mut rev_deps = FxHashSet::default();
let mut rev_deps = NoHashHashSet::default();
rev_deps.insert(of);
let mut inverted_graph = FxHashMap::<_, Vec<_>>::default();
let mut inverted_graph = NoHashHashMap::<_, Vec<_>>::default();
self.arena.iter().for_each(|(&krate, data)| {
data.dependencies
.iter()
@ -409,7 +425,7 @@ impl CrateGraph {
/// come before the crate itself).
pub fn crates_in_topological_order(&self) -> Vec<CrateId> {
let mut res = Vec::new();
let mut visited = FxHashSet::default();
let mut visited = NoHashHashSet::default();
for krate in self.arena.keys().copied() {
go(self, &mut visited, &mut res, krate);
@ -419,7 +435,7 @@ impl CrateGraph {
fn go(
graph: &CrateGraph,
visited: &mut FxHashSet<CrateId>,
visited: &mut NoHashHashSet<CrateId>,
res: &mut Vec<CrateId>,
source: CrateId,
) {
@ -459,7 +475,7 @@ impl CrateGraph {
fn find_path(
&self,
visited: &mut FxHashSet<CrateId>,
visited: &mut NoHashHashSet<CrateId>,
from: CrateId,
to: CrateId,
) -> Option<Vec<CrateId>> {

View file

@ -8,7 +8,7 @@ pub mod fixture;
use std::{panic, sync::Arc};
use rustc_hash::FxHashSet;
use stdx::hash::NoHashHashSet;
use syntax::{ast, Parse, SourceFile, TextRange, TextSize};
pub use crate::{
@ -58,7 +58,7 @@ pub trait FileLoader {
/// Text of the file.
fn file_text(&self, file_id: FileId) -> Arc<String>;
fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId>;
fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>>;
fn relevant_crates(&self, file_id: FileId) -> Arc<NoHashHashSet<CrateId>>;
}
/// Database which stores all significant input facts: source code and project
@ -94,10 +94,10 @@ pub trait SourceDatabaseExt: SourceDatabase {
#[salsa::input]
fn source_root(&self, id: SourceRootId) -> Arc<SourceRoot>;
fn source_root_crates(&self, id: SourceRootId) -> Arc<FxHashSet<CrateId>>;
fn source_root_crates(&self, id: SourceRootId) -> Arc<NoHashHashSet<CrateId>>;
}
fn source_root_crates(db: &dyn SourceDatabaseExt, id: SourceRootId) -> Arc<FxHashSet<CrateId>> {
fn source_root_crates(db: &dyn SourceDatabaseExt, id: SourceRootId) -> Arc<NoHashHashSet<CrateId>> {
let graph = db.crate_graph();
let res = graph
.iter()
@ -120,10 +120,10 @@ impl<T: SourceDatabaseExt> FileLoader for FileLoaderDelegate<&'_ T> {
// FIXME: this *somehow* should be platform agnostic...
let source_root = self.0.file_source_root(path.anchor);
let source_root = self.0.source_root(source_root);
source_root.file_set.resolve_path(path)
source_root.resolve_path(path)
}
fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>> {
fn relevant_crates(&self, file_id: FileId) -> Arc<NoHashHashSet<CrateId>> {
let _p = profile::span("relevant_crates");
let source_root = self.0.file_source_root(file_id);
self.0.source_root_crates(source_root)

View file

@ -10,7 +10,7 @@ use base_db::{
SourceDatabase, Upcast,
};
use hir_expand::{db::AstDatabase, InFile};
use rustc_hash::FxHashSet;
use stdx::hash::NoHashHashSet;
use syntax::{algo, ast, AstNode};
use crate::{
@ -76,7 +76,7 @@ impl FileLoader for TestDB {
fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
FileLoaderDelegate(self).resolve_path(path)
}
fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>> {
fn relevant_crates(&self, file_id: FileId) -> Arc<NoHashHashSet<CrateId>> {
FileLoaderDelegate(self).relevant_crates(file_id)
}
}

View file

@ -10,7 +10,7 @@ use base_db::{
};
use hir_def::{db::DefDatabase, ModuleId};
use hir_expand::db::AstDatabase;
use rustc_hash::{FxHashMap, FxHashSet};
use stdx::hash::{NoHashHashMap, NoHashHashSet};
use syntax::TextRange;
use test_utils::extract_annotations;
@ -80,7 +80,7 @@ impl FileLoader for TestDB {
fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
FileLoaderDelegate(self).resolve_path(path)
}
fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>> {
fn relevant_crates(&self, file_id: FileId) -> Arc<NoHashHashSet<CrateId>> {
FileLoaderDelegate(self).relevant_crates(file_id)
}
}
@ -102,7 +102,7 @@ impl TestDB {
self.module_for_file_opt(file_id).unwrap()
}
pub(crate) fn extract_annotations(&self) -> FxHashMap<FileId, Vec<(TextRange, String)>> {
pub(crate) fn extract_annotations(&self) -> NoHashHashMap<FileId, Vec<(TextRange, String)>> {
let mut files = Vec::new();
let crate_graph = self.crate_graph();
for krate in crate_graph.iter() {

View file

@ -52,6 +52,7 @@ use hir::{
db::{AstDatabase, DefDatabase, HirDatabase},
symbols::FileSymbolKind,
};
use stdx::hash::NoHashHashSet;
use crate::{line_index::LineIndex, symbol_index::SymbolsDatabase};
pub use rustc_hash::{FxHashMap, FxHashSet, FxHasher};
@ -118,7 +119,7 @@ impl FileLoader for RootDatabase {
fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
FileLoaderDelegate(self).resolve_path(path)
}
fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>> {
fn relevant_crates(&self, file_id: FileId) -> Arc<NoHashHashSet<CrateId>> {
FileLoaderDelegate(self).relevant_crates(file_id)
}
}

View file

@ -2,7 +2,7 @@
//! representation.
use std::{iter, mem};
use rustc_hash::FxHashMap;
use stdx::hash::NoHashHashMap;
use syntax::{TextRange, TextSize};
#[derive(Clone, Debug, PartialEq, Eq)]
@ -10,7 +10,7 @@ pub struct LineIndex {
/// Offset the the beginning of each line, zero-based
pub(crate) newlines: Vec<TextSize>,
/// List of non-ASCII characters on each line
pub(crate) utf16_lines: FxHashMap<u32, Vec<Utf16Char>>,
pub(crate) utf16_lines: NoHashHashMap<u32, Vec<Utf16Char>>,
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
@ -55,7 +55,7 @@ impl Utf16Char {
impl LineIndex {
pub fn new(text: &str) -> LineIndex {
let mut utf16_lines = FxHashMap::default();
let mut utf16_lines = NoHashHashMap::default();
let mut utf16_chars = Vec::new();
let mut newlines = vec![0.into()];

View file

@ -9,7 +9,7 @@ use std::{mem, sync::Arc};
use base_db::{FileId, FileRange, SourceDatabase, SourceDatabaseExt};
use hir::{DefWithBody, HasAttrs, HasSource, InFile, ModuleSource, Semantics, Visibility};
use once_cell::unsync::Lazy;
use rustc_hash::FxHashMap;
use stdx::hash::NoHashHashMap;
use syntax::{ast, match_ast, AstNode, TextRange, TextSize};
use crate::{
@ -20,7 +20,7 @@ use crate::{
#[derive(Debug, Default, Clone)]
pub struct UsageSearchResult {
pub references: FxHashMap<FileId, Vec<FileReference>>,
pub references: NoHashHashMap<FileId, Vec<FileReference>>,
}
impl UsageSearchResult {
@ -45,7 +45,7 @@ impl UsageSearchResult {
impl IntoIterator for UsageSearchResult {
type Item = (FileId, Vec<FileReference>);
type IntoIter = <FxHashMap<FileId, Vec<FileReference>> as IntoIterator>::IntoIter;
type IntoIter = <NoHashHashMap<FileId, Vec<FileReference>> as IntoIterator>::IntoIter;
fn into_iter(self) -> Self::IntoIter {
self.references.into_iter()
@ -78,17 +78,17 @@ pub enum ReferenceCategory {
/// e.g. for things like local variables.
#[derive(Clone, Debug)]
pub struct SearchScope {
entries: FxHashMap<FileId, Option<TextRange>>,
entries: NoHashHashMap<FileId, Option<TextRange>>,
}
impl SearchScope {
fn new(entries: FxHashMap<FileId, Option<TextRange>>) -> SearchScope {
fn new(entries: NoHashHashMap<FileId, Option<TextRange>>) -> SearchScope {
SearchScope { entries }
}
/// Build a search scope spanning the entire crate graph of files.
fn crate_graph(db: &RootDatabase) -> SearchScope {
let mut entries = FxHashMap::default();
let mut entries = NoHashHashMap::default();
let graph = db.crate_graph();
for krate in graph.iter() {
@ -102,7 +102,7 @@ impl SearchScope {
/// Build a search scope spanning all the reverse dependencies of the given crate.
fn reverse_dependencies(db: &RootDatabase, of: hir::Crate) -> SearchScope {
let mut entries = FxHashMap::default();
let mut entries = NoHashHashMap::default();
for rev_dep in of.transitive_reverse_dependencies(db) {
let root_file = rev_dep.root_file(db);
let source_root_id = db.file_source_root(root_file);
@ -117,14 +117,12 @@ impl SearchScope {
let root_file = of.root_file(db);
let source_root_id = db.file_source_root(root_file);
let source_root = db.source_root(source_root_id);
SearchScope {
entries: source_root.iter().map(|id| (id, None)).collect::<FxHashMap<_, _>>(),
}
SearchScope { entries: source_root.iter().map(|id| (id, None)).collect() }
}
/// Build a search scope spanning the given module and all its submodules.
fn module_and_children(db: &RootDatabase, module: hir::Module) -> SearchScope {
let mut entries = FxHashMap::default();
let mut entries = NoHashHashMap::default();
let (file_id, range) = {
let InFile { file_id, value } = module.definition_source(db);
@ -157,7 +155,7 @@ impl SearchScope {
/// Build an empty search scope.
pub fn empty() -> SearchScope {
SearchScope::new(FxHashMap::default())
SearchScope::new(NoHashHashMap::default())
}
/// Build a empty search scope spanning the given file.

View file

@ -6,8 +6,7 @@
use std::{collections::hash_map::Entry, iter, mem};
use base_db::{AnchoredPathBuf, FileId};
use rustc_hash::FxHashMap;
use stdx::never;
use stdx::{hash::NoHashHashMap, never};
use syntax::{algo, AstNode, SyntaxNode, SyntaxNodePtr, TextRange, TextSize};
use text_edit::{TextEdit, TextEditBuilder};
@ -15,7 +14,7 @@ use crate::SnippetCap;
#[derive(Default, Debug, Clone)]
pub struct SourceChange {
pub source_file_edits: FxHashMap<FileId, TextEdit>,
pub source_file_edits: NoHashHashMap<FileId, TextEdit>,
pub file_system_edits: Vec<FileSystemEdit>,
pub is_snippet: bool,
}
@ -24,7 +23,7 @@ impl SourceChange {
/// Creates a new SourceChange with the given label
/// from the edits.
pub fn from_edits(
source_file_edits: FxHashMap<FileId, TextEdit>,
source_file_edits: NoHashHashMap<FileId, TextEdit>,
file_system_edits: Vec<FileSystemEdit>,
) -> Self {
SourceChange { source_file_edits, file_system_edits, is_snippet: false }
@ -78,8 +77,8 @@ impl Extend<FileSystemEdit> for SourceChange {
}
}
impl From<FxHashMap<FileId, TextEdit>> for SourceChange {
fn from(source_file_edits: FxHashMap<FileId, TextEdit>) -> SourceChange {
impl From<NoHashHashMap<FileId, TextEdit>> for SourceChange {
fn from(source_file_edits: NoHashHashMap<FileId, TextEdit>) -> SourceChange {
SourceChange { source_file_edits, file_system_edits: Vec::new(), is_snippet: false }
}
}

View file

@ -20,6 +20,7 @@ parser = { path = "../parser", version = "0.0.0" }
syntax = { path = "../syntax", version = "0.0.0" }
ide-db = { path = "../ide-db", version = "0.0.0" }
hir = { path = "../hir", version = "0.0.0" }
stdx = { path = "../stdx", version = "0.0.0" }
[dev-dependencies]
test-utils = { path = "../test-utils" }

View file

@ -86,11 +86,9 @@ pub use crate::{errors::SsrError, from_comment::ssr_from_comment, matching::Matc
use crate::{errors::bail, matching::MatchFailureReason};
use hir::Semantics;
use ide_db::{
base_db::{FileId, FilePosition, FileRange},
FxHashMap,
};
use ide_db::base_db::{FileId, FilePosition, FileRange};
use resolving::ResolvedRule;
use stdx::hash::NoHashHashMap;
use syntax::{ast, AstNode, SyntaxNode, TextRange};
use text_edit::TextEdit;
@ -170,9 +168,9 @@ impl<'db> MatchFinder<'db> {
}
/// Finds matches for all added rules and returns edits for all found matches.
pub fn edits(&self) -> FxHashMap<FileId, TextEdit> {
pub fn edits(&self) -> NoHashHashMap<FileId, TextEdit> {
use ide_db::base_db::SourceDatabaseExt;
let mut matches_by_file = FxHashMap::default();
let mut matches_by_file = NoHashHashMap::default();
for m in self.matches().matches {
matches_by_file
.entry(m.range.file_id)

View file

@ -12,8 +12,9 @@ use ide_db::{
salsa::{Database, ParallelDatabase, Snapshot},
Cancelled, CrateGraph, CrateId, SourceDatabase, SourceDatabaseExt,
},
FxHashSet, FxIndexMap,
FxIndexMap,
};
use stdx::hash::NoHashHashSet;
use crate::RootDatabase;
@ -141,7 +142,7 @@ pub(crate) fn parallel_prime_caches(
}
}
fn compute_crates_to_prime(db: &RootDatabase, graph: &CrateGraph) -> FxHashSet<CrateId> {
fn compute_crates_to_prime(db: &RootDatabase, graph: &CrateGraph) -> NoHashHashSet<CrateId> {
// We're only interested in the workspace crates and the `ImportMap`s of their direct
// dependencies, though in practice the latter also compute the `DefMap`s.
// We don't prime transitive dependencies because they're generally not visible in

View file

@ -14,8 +14,9 @@ use ide_db::{
base_db::FileId,
defs::{Definition, NameClass, NameRefClass},
search::{ReferenceCategory, SearchScope, UsageSearchResult},
FxHashMap, RootDatabase,
RootDatabase,
};
use stdx::hash::NoHashHashMap;
use syntax::{
algo::find_node_at_offset,
ast::{self, HasName},
@ -29,7 +30,7 @@ use crate::{FilePosition, NavigationTarget, TryToNav};
#[derive(Debug, Clone)]
pub struct ReferenceSearchResult {
pub declaration: Option<Declaration>,
pub references: FxHashMap<FileId, Vec<(TextRange, Option<ReferenceCategory>)>>,
pub references: NoHashHashMap<FileId, Vec<(TextRange, Option<ReferenceCategory>)>>,
}
#[derive(Debug, Clone)]

View file

@ -3,8 +3,9 @@ use std::sync::Arc;
use dot::{Id, LabelText};
use ide_db::{
base_db::{CrateGraph, CrateId, Dependency, SourceDatabase, SourceDatabaseExt},
FxHashSet, RootDatabase,
RootDatabase,
};
use stdx::hash::NoHashHashSet;
// Feature: View Crate Graph
//
@ -41,7 +42,7 @@ pub(crate) fn view_crate_graph(db: &RootDatabase, full: bool) -> Result<String,
struct DotCrateGraph {
graph: Arc<CrateGraph>,
crates_to_render: FxHashSet<CrateId>,
crates_to_render: NoHashHashSet<CrateId>,
}
type Edge<'a> = (CrateId, &'a Dependency);

View file

@ -13,7 +13,7 @@ use cfg::{CfgDiff, CfgOptions};
use paths::{AbsPath, AbsPathBuf};
use rustc_hash::{FxHashMap, FxHashSet};
use semver::Version;
use stdx::always;
use stdx::{always, hash::NoHashHashMap};
use crate::{
build_scripts::BuildScriptOutput,
@ -471,7 +471,7 @@ fn project_json_to_crate_graph(
.map(|sysroot| sysroot_to_crate_graph(&mut crate_graph, sysroot, rustc_cfg.clone(), load));
let mut cfg_cache: FxHashMap<&str, Vec<CfgFlag>> = FxHashMap::default();
let crates: FxHashMap<CrateId, CrateId> = project
let crates: NoHashHashMap<CrateId, CrateId> = project
.crates()
.filter_map(|(crate_id, krate)| {
let file_path = &krate.root_module;

View file

@ -4,11 +4,12 @@ pub(crate) mod to_proto;
use std::{mem, sync::Arc};
use ide::FileId;
use rustc_hash::{FxHashMap, FxHashSet};
use ide_db::FxHashMap;
use stdx::hash::{NoHashHashMap, NoHashHashSet};
use crate::lsp_ext;
pub(crate) type CheckFixes = Arc<FxHashMap<usize, FxHashMap<FileId, Vec<Fix>>>>;
pub(crate) type CheckFixes = Arc<NoHashHashMap<usize, NoHashHashMap<FileId, Vec<Fix>>>>;
#[derive(Debug, Default, Clone)]
pub struct DiagnosticsMapConfig {
@ -19,12 +20,12 @@ pub struct DiagnosticsMapConfig {
#[derive(Debug, Default, Clone)]
pub(crate) struct DiagnosticCollection {
// FIXME: should be FxHashMap<FileId, Vec<ra_id::Diagnostic>>
pub(crate) native: FxHashMap<FileId, Vec<lsp_types::Diagnostic>>,
// FIXME: should be NoHashHashMap<FileId, Vec<ra_id::Diagnostic>>
pub(crate) native: NoHashHashMap<FileId, Vec<lsp_types::Diagnostic>>,
// FIXME: should be Vec<flycheck::Diagnostic>
pub(crate) check: FxHashMap<usize, FxHashMap<FileId, Vec<lsp_types::Diagnostic>>>,
pub(crate) check: NoHashHashMap<usize, NoHashHashMap<FileId, Vec<lsp_types::Diagnostic>>>,
pub(crate) check_fixes: CheckFixes,
changes: FxHashSet<FileId>,
changes: NoHashHashSet<FileId>,
}
#[derive(Debug, Clone)]
@ -105,7 +106,7 @@ impl DiagnosticCollection {
native.chain(check)
}
pub(crate) fn take_changes(&mut self) -> Option<FxHashSet<FileId>> {
pub(crate) fn take_changes(&mut self) -> Option<NoHashHashSet<FileId>> {
if self.changes.is_empty() {
return None;
}

View file

@ -14,6 +14,7 @@ use parking_lot::{Mutex, RwLock};
use proc_macro_api::ProcMacroServer;
use project_model::{CargoWorkspace, ProjectWorkspace, Target, WorkspaceBuildScripts};
use rustc_hash::FxHashMap;
use stdx::hash::NoHashHashMap;
use vfs::AnchoredPathBuf;
use crate::{
@ -67,7 +68,7 @@ pub(crate) struct GlobalState {
pub(crate) flycheck_sender: Sender<flycheck::Message>,
pub(crate) flycheck_receiver: Receiver<flycheck::Message>,
pub(crate) vfs: Arc<RwLock<(vfs::Vfs, FxHashMap<FileId, LineEndings>)>>,
pub(crate) vfs: Arc<RwLock<(vfs::Vfs, NoHashHashMap<FileId, LineEndings>)>>,
pub(crate) vfs_config_version: u32,
pub(crate) vfs_progress_config_version: u32,
pub(crate) vfs_progress_n_total: usize,
@ -113,7 +114,7 @@ pub(crate) struct GlobalStateSnapshot {
pub(crate) check_fixes: CheckFixes,
mem_docs: MemDocs,
pub(crate) semantic_tokens_cache: Arc<Mutex<FxHashMap<Url, SemanticTokens>>>,
vfs: Arc<RwLock<(vfs::Vfs, FxHashMap<FileId, LineEndings>)>>,
vfs: Arc<RwLock<(vfs::Vfs, NoHashHashMap<FileId, LineEndings>)>>,
pub(crate) workspaces: Arc<Vec<ProjectWorkspace>>,
}
@ -157,7 +158,7 @@ impl GlobalState {
flycheck_sender,
flycheck_receiver,
vfs: Arc::new(RwLock::new((vfs::Vfs::default(), FxHashMap::default()))),
vfs: Arc::new(RwLock::new((vfs::Vfs::default(), NoHashHashMap::default()))),
vfs_config_version: 0,
vfs_progress_config_version: 0,
vfs_progress_n_total: 0,

87
crates/stdx/src/hash.rs Normal file
View file

@ -0,0 +1,87 @@
use std::{
hash::{BuildHasher, Hasher},
marker::PhantomData,
};
pub type NoHashHashMap<K, V> = std::collections::HashMap<K, V, NoHashHasherBuilder<K>>;
pub type NoHashHashSet<K> = std::collections::HashSet<K, NoHashHasherBuilder<K>>;
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub struct NoHashHasherBuilder<T>(PhantomData<T>);
impl<T> Default for NoHashHasherBuilder<T> {
fn default() -> Self {
Self(Default::default())
}
}
pub trait NoHashHashable {}
impl NoHashHashable for usize {}
impl NoHashHashable for u32 {}
pub struct NoHashHasher(u64);
impl<T: NoHashHashable> BuildHasher for NoHashHasherBuilder<T> {
type Hasher = NoHashHasher;
fn build_hasher(&self) -> Self::Hasher {
NoHashHasher(0)
}
}
impl Hasher for NoHashHasher {
fn finish(&self) -> u64 {
self.0
}
fn write(&mut self, _: &[u8]) {
unimplemented!("NoHashHasher should only be used for hashing primitive integers")
}
fn write_u8(&mut self, i: u8) {
self.0 = i as u64;
}
fn write_u16(&mut self, i: u16) {
self.0 = i as u64;
}
fn write_u32(&mut self, i: u32) {
self.0 = i as u64;
}
fn write_u64(&mut self, i: u64) {
self.0 = i as u64;
}
fn write_u128(&mut self, i: u128) {
self.0 = i as u64;
}
fn write_usize(&mut self, i: usize) {
self.0 = i as u64;
}
fn write_i8(&mut self, i: i8) {
self.0 = i as u64;
}
fn write_i16(&mut self, i: i16) {
self.0 = i as u64;
}
fn write_i32(&mut self, i: i32) {
self.0 = i as u64;
}
fn write_i64(&mut self, i: i64) {
self.0 = i as u64;
}
fn write_i128(&mut self, i: i128) {
self.0 = i as u64;
}
fn write_isize(&mut self, i: isize) {
self.0 = i as u64;
}
}

View file

@ -7,6 +7,7 @@ use std::{cmp::Ordering, ops, time::Instant};
use std::{io as sio, iter};
mod macros;
pub mod hash;
pub mod process;
pub mod panic_context;
pub mod non_empty_vec;

View file

@ -12,6 +12,7 @@ doctest = false
[dependencies]
rustc-hash = "1.1.0"
fst = "0.4.7"
indexmap = "1.9.1"
paths = { path = "../paths", version = "0.0.0" }
indexmap = "1.9.1"
stdx = { path = "../stdx", version = "0.0.0" }

View file

@ -6,6 +6,7 @@ use std::fmt;
use fst::{IntoStreamer, Streamer};
use rustc_hash::FxHashMap;
use stdx::hash::NoHashHashMap;
use crate::{AnchoredPath, FileId, Vfs, VfsPath};
@ -13,7 +14,7 @@ use crate::{AnchoredPath, FileId, Vfs, VfsPath};
#[derive(Default, Clone, Eq, PartialEq)]
pub struct FileSet {
files: FxHashMap<VfsPath, FileId>,
paths: FxHashMap<FileId, VfsPath>,
paths: NoHashHashMap<FileId, VfsPath>,
}
impl FileSet {

View file

@ -59,9 +59,16 @@ pub use paths::{AbsPath, AbsPathBuf};
/// Handle to a file in [`Vfs`]
///
/// Most functions in rust-analyzer use this when they need to refer to a file.
#[derive(Copy, Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Hash)]
#[derive(Copy, Clone, Debug, Ord, PartialOrd, Eq, PartialEq)]
pub struct FileId(pub u32);
impl stdx::hash::NoHashHashable for FileId {}
impl std::hash::Hash for FileId {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.0.hash(state);
}
}
/// Storage for all files read by rust-analyzer.
///
/// For more information see the [crate-level](crate) documentation.