6784: Introduce anchored_path r=matklad a=matklad

bors r+
🤖

Co-authored-by: Aleksey Kladov <aleksey.kladov@gmail.com>
This commit is contained in:
bors[bot] 2020-12-09 16:07:33 +00:00 committed by GitHub
commit 99118eeccd
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
16 changed files with 130 additions and 72 deletions

View file

@ -18,7 +18,7 @@ pub use crate::{
}, },
}; };
pub use salsa; pub use salsa;
pub use vfs::{file_set::FileSet, FileId, VfsPath}; pub use vfs::{file_set::FileSet, AnchoredPath, AnchoredPathBuf, FileId, VfsPath};
#[macro_export] #[macro_export]
macro_rules! impl_intern_key { macro_rules! impl_intern_key {
@ -91,12 +91,7 @@ pub const DEFAULT_LRU_CAP: usize = 128;
pub trait FileLoader { pub trait FileLoader {
/// Text of the file. /// Text of the file.
fn file_text(&self, file_id: FileId) -> Arc<String>; fn file_text(&self, file_id: FileId) -> Arc<String>;
/// Note that we intentionally accept a `&str` and not a `&Path` here. This fn resolve_path(&self, path: AnchoredPath) -> Option<FileId>;
/// method exists to handle `#[path = "/some/path.rs"] mod foo;` and such,
/// so the input is guaranteed to be utf-8 string. One might be tempted to
/// introduce some kind of "utf-8 path with / separators", but that's a bad idea. Behold
/// `#[path = "C://no/way"]`
fn resolve_path(&self, anchor: FileId, path: &str) -> Option<FileId>;
fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>>; fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>>;
} }
@ -155,11 +150,11 @@ impl<T: SourceDatabaseExt> FileLoader for FileLoaderDelegate<&'_ T> {
fn file_text(&self, file_id: FileId) -> Arc<String> { fn file_text(&self, file_id: FileId) -> Arc<String> {
SourceDatabaseExt::file_text(self.0, file_id) SourceDatabaseExt::file_text(self.0, file_id)
} }
fn resolve_path(&self, anchor: FileId, path: &str) -> Option<FileId> { fn resolve_path(&self, path: AnchoredPath) -> Option<FileId> {
// FIXME: this *somehow* should be platform agnostic... // FIXME: this *somehow* should be platform agnostic...
let source_root = self.0.file_source_root(anchor); let source_root = self.0.file_source_root(path.anchor);
let source_root = self.0.source_root(source_root); let source_root = self.0.source_root(source_root);
source_root.file_set.resolve_path(anchor, path) source_root.file_set.resolve_path(path)
} }
fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>> { fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>> {

View file

@ -1,5 +1,5 @@
//! This module resolves `mod foo;` declaration to file. //! This module resolves `mod foo;` declaration to file.
use base_db::FileId; use base_db::{AnchoredPath, FileId};
use hir_expand::name::Name; use hir_expand::name::Name;
use syntax::SmolStr; use syntax::SmolStr;
use test_utils::mark; use test_utils::mark;
@ -77,7 +77,8 @@ impl ModDir {
}; };
for candidate in candidate_files.iter() { for candidate in candidate_files.iter() {
if let Some(file_id) = db.resolve_path(file_id, candidate.as_str()) { let path = AnchoredPath { anchor: file_id, path: candidate.as_str() };
if let Some(file_id) = db.resolve_path(path) {
let is_mod_rs = candidate.ends_with("mod.rs"); let is_mod_rs = candidate.ends_with("mod.rs");
let (dir_path, root_non_dir_owner) = if is_mod_rs || attr_path.is_some() { let (dir_path, root_non_dir_owner) = if is_mod_rs || attr_path.is_some() {

View file

@ -5,8 +5,8 @@ use std::{
sync::{Arc, Mutex}, sync::{Arc, Mutex},
}; };
use base_db::SourceDatabase;
use base_db::{salsa, CrateId, FileId, FileLoader, FileLoaderDelegate, Upcast}; use base_db::{salsa, CrateId, FileId, FileLoader, FileLoaderDelegate, Upcast};
use base_db::{AnchoredPath, SourceDatabase};
use hir_expand::db::AstDatabase; use hir_expand::db::AstDatabase;
use hir_expand::diagnostics::Diagnostic; use hir_expand::diagnostics::Diagnostic;
use hir_expand::diagnostics::DiagnosticSinkBuilder; use hir_expand::diagnostics::DiagnosticSinkBuilder;
@ -63,8 +63,8 @@ impl FileLoader for TestDB {
fn file_text(&self, file_id: FileId) -> Arc<String> { fn file_text(&self, file_id: FileId) -> Arc<String> {
FileLoaderDelegate(self).file_text(file_id) FileLoaderDelegate(self).file_text(file_id)
} }
fn resolve_path(&self, anchor: FileId, path: &str) -> Option<FileId> { fn resolve_path(&self, path: AnchoredPath) -> Option<FileId> {
FileLoaderDelegate(self).resolve_path(anchor, path) FileLoaderDelegate(self).resolve_path(path)
} }
fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>> { fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>> {
FileLoaderDelegate(self).relevant_crates(file_id) FileLoaderDelegate(self).relevant_crates(file_id)

View file

@ -4,7 +4,7 @@ use crate::{
MacroDefId, MacroDefKind, TextSize, MacroDefId, MacroDefKind, TextSize,
}; };
use base_db::FileId; use base_db::{AnchoredPath, FileId};
use either::Either; use either::Either;
use mbe::{parse_to_token_tree, ExpandResult}; use mbe::{parse_to_token_tree, ExpandResult};
use parser::FragmentKind; use parser::FragmentKind;
@ -324,7 +324,8 @@ fn relative_file(
allow_recursion: bool, allow_recursion: bool,
) -> Option<FileId> { ) -> Option<FileId> {
let call_site = call_id.as_file().original_file(db); let call_site = call_id.as_file().original_file(db);
let res = db.resolve_path(call_site, path)?; let path = AnchoredPath { anchor: call_site, path };
let res = db.resolve_path(path)?;
// Prevent include itself // Prevent include itself
if res == call_site && !allow_recursion { if res == call_site && !allow_recursion {
None None

View file

@ -5,7 +5,7 @@ use std::{
sync::{Arc, Mutex}, sync::{Arc, Mutex},
}; };
use base_db::{salsa, CrateId, FileId, FileLoader, FileLoaderDelegate}; use base_db::{salsa, AnchoredPath, CrateId, FileId, FileLoader, FileLoaderDelegate};
use rustc_hash::FxHashSet; use rustc_hash::FxHashSet;
#[salsa::database( #[salsa::database(
@ -40,8 +40,8 @@ impl FileLoader for TestDB {
fn file_text(&self, file_id: FileId) -> Arc<String> { fn file_text(&self, file_id: FileId) -> Arc<String> {
FileLoaderDelegate(self).file_text(file_id) FileLoaderDelegate(self).file_text(file_id)
} }
fn resolve_path(&self, anchor: FileId, path: &str) -> Option<FileId> { fn resolve_path(&self, path: AnchoredPath) -> Option<FileId> {
FileLoaderDelegate(self).resolve_path(anchor, path) FileLoaderDelegate(self).resolve_path(path)
} }
fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>> { fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>> {
FileLoaderDelegate(self).relevant_crates(file_id) FileLoaderDelegate(self).relevant_crates(file_id)

View file

@ -5,7 +5,9 @@ use std::{
sync::{Arc, Mutex}, sync::{Arc, Mutex},
}; };
use base_db::{salsa, CrateId, FileId, FileLoader, FileLoaderDelegate, SourceDatabase, Upcast}; use base_db::{
salsa, AnchoredPath, CrateId, FileId, FileLoader, FileLoaderDelegate, SourceDatabase, Upcast,
};
use hir_def::{db::DefDatabase, ModuleId}; use hir_def::{db::DefDatabase, ModuleId};
use hir_expand::db::AstDatabase; use hir_expand::db::AstDatabase;
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::{FxHashMap, FxHashSet};
@ -67,8 +69,8 @@ impl FileLoader for TestDB {
fn file_text(&self, file_id: FileId) -> Arc<String> { fn file_text(&self, file_id: FileId) -> Arc<String> {
FileLoaderDelegate(self).file_text(file_id) FileLoaderDelegate(self).file_text(file_id)
} }
fn resolve_path(&self, anchor: FileId, path: &str) -> Option<FileId> { fn resolve_path(&self, path: AnchoredPath) -> Option<FileId> {
FileLoaderDelegate(self).resolve_path(anchor, path) FileLoaderDelegate(self).resolve_path(path)
} }
fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>> { fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>> {
FileLoaderDelegate(self).relevant_crates(file_id) FileLoaderDelegate(self).relevant_crates(file_id)

View file

@ -610,10 +610,12 @@ fn test_fn() {
source_file_edits: [], source_file_edits: [],
file_system_edits: [ file_system_edits: [
CreateFile { CreateFile {
dst: AnchoredPathBuf {
anchor: FileId( anchor: FileId(
0, 0,
), ),
dst: "foo.rs", path: "foo.rs",
},
}, },
], ],
is_snippet: false, is_snippet: false,

View file

@ -8,7 +8,7 @@ use hir::{
}, },
HasSource, HirDisplay, Semantics, VariantDef, HasSource, HirDisplay, Semantics, VariantDef,
}; };
use ide_db::base_db::FileId; use ide_db::base_db::{AnchoredPathBuf, FileId};
use ide_db::{ use ide_db::{
source_change::{FileSystemEdit, SourceFileEdit}, source_change::{FileSystemEdit, SourceFileEdit},
RootDatabase, RootDatabase,
@ -36,8 +36,10 @@ impl DiagnosticWithFix for UnresolvedModule {
Some(Fix::new( Some(Fix::new(
"Create module", "Create module",
FileSystemEdit::CreateFile { FileSystemEdit::CreateFile {
dst: AnchoredPathBuf {
anchor: self.file.original_file(sema.db), anchor: self.file.original_file(sema.db),
dst: self.candidate.clone(), path: self.candidate.clone(),
},
} }
.into(), .into(),
unresolved_module.syntax().text_range(), unresolved_module.syntax().text_range(),

View file

@ -6,7 +6,7 @@ use std::{
}; };
use hir::{Module, ModuleDef, ModuleSource, Semantics}; use hir::{Module, ModuleDef, ModuleSource, Semantics};
use ide_db::base_db::{FileRange, SourceDatabaseExt}; use ide_db::base_db::{AnchoredPathBuf, FileRange, SourceDatabaseExt};
use ide_db::{ use ide_db::{
defs::{Definition, NameClass, NameRefClass}, defs::{Definition, NameClass, NameRefClass},
RootDatabase, RootDatabase,
@ -182,12 +182,13 @@ fn rename_mod(
match src.value { match src.value {
ModuleSource::SourceFile(..) => { ModuleSource::SourceFile(..) => {
// mod is defined in path/to/dir/mod.rs // mod is defined in path/to/dir/mod.rs
let dst = if module.is_mod_rs(sema.db) { let path = if module.is_mod_rs(sema.db) {
format!("../{}/mod.rs", new_name) format!("../{}/mod.rs", new_name)
} else { } else {
format!("{}.rs", new_name) format!("{}.rs", new_name)
}; };
let move_file = FileSystemEdit::MoveFile { src: file_id, anchor: file_id, dst }; let dst = AnchoredPathBuf { anchor: file_id, path };
let move_file = FileSystemEdit::MoveFile { src: file_id, dst };
file_system_edits.push(move_file); file_system_edits.push(move_file);
} }
ModuleSource::Module(..) => {} ModuleSource::Module(..) => {}
@ -771,10 +772,12 @@ mod foo<|>;
src: FileId( src: FileId(
2, 2,
), ),
dst: AnchoredPathBuf {
anchor: FileId( anchor: FileId(
2, 2,
), ),
dst: "foo2.rs", path: "foo2.rs",
},
}, },
], ],
is_snippet: false, is_snippet: false,
@ -837,10 +840,12 @@ use crate::foo<|>::FooContent;
src: FileId( src: FileId(
1, 1,
), ),
dst: AnchoredPathBuf {
anchor: FileId( anchor: FileId(
1, 1,
), ),
dst: "quux.rs", path: "quux.rs",
},
}, },
], ],
is_snippet: false, is_snippet: false,
@ -884,10 +889,12 @@ mod fo<|>o;
src: FileId( src: FileId(
1, 1,
), ),
dst: AnchoredPathBuf {
anchor: FileId( anchor: FileId(
1, 1,
), ),
dst: "../foo2/mod.rs", path: "../foo2/mod.rs",
},
}, },
], ],
is_snippet: false, is_snippet: false,
@ -932,10 +939,12 @@ mod outer { mod fo<|>o; }
src: FileId( src: FileId(
1, 1,
), ),
dst: AnchoredPathBuf {
anchor: FileId( anchor: FileId(
1, 1,
), ),
dst: "bar.rs", path: "bar.rs",
},
}, },
], ],
is_snippet: false, is_snippet: false,
@ -1016,10 +1025,12 @@ pub mod foo<|>;
src: FileId( src: FileId(
2, 2,
), ),
dst: AnchoredPathBuf {
anchor: FileId( anchor: FileId(
2, 2,
), ),
dst: "foo2.rs", path: "foo2.rs",
},
}, },
], ],
is_snippet: false, is_snippet: false,

View file

@ -19,8 +19,8 @@ use std::{fmt, sync::Arc};
use base_db::{ use base_db::{
salsa::{self, Durability}, salsa::{self, Durability},
Canceled, CheckCanceled, CrateId, FileId, FileLoader, FileLoaderDelegate, SourceDatabase, AnchoredPath, Canceled, CheckCanceled, CrateId, FileId, FileLoader, FileLoaderDelegate,
Upcast, SourceDatabase, Upcast,
}; };
use hir::db::{AstDatabase, DefDatabase, HirDatabase}; use hir::db::{AstDatabase, DefDatabase, HirDatabase};
use rustc_hash::FxHashSet; use rustc_hash::FxHashSet;
@ -72,8 +72,8 @@ impl FileLoader for RootDatabase {
fn file_text(&self, file_id: FileId) -> Arc<String> { fn file_text(&self, file_id: FileId) -> Arc<String> {
FileLoaderDelegate(self).file_text(file_id) FileLoaderDelegate(self).file_text(file_id)
} }
fn resolve_path(&self, anchor: FileId, path: &str) -> Option<FileId> { fn resolve_path(&self, path: AnchoredPath) -> Option<FileId> {
FileLoaderDelegate(self).resolve_path(anchor, path) FileLoaderDelegate(self).resolve_path(path)
} }
fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>> { fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>> {
FileLoaderDelegate(self).relevant_crates(file_id) FileLoaderDelegate(self).relevant_crates(file_id)

View file

@ -3,7 +3,7 @@
//! //!
//! It can be viewed as a dual for `AnalysisChange`. //! It can be viewed as a dual for `AnalysisChange`.
use base_db::FileId; use base_db::{AnchoredPathBuf, FileId};
use text_edit::TextEdit; use text_edit::TextEdit;
#[derive(Default, Debug, Clone)] #[derive(Default, Debug, Clone)]
@ -44,8 +44,8 @@ impl From<Vec<SourceFileEdit>> for SourceChange {
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub enum FileSystemEdit { pub enum FileSystemEdit {
CreateFile { anchor: FileId, dst: String }, CreateFile { dst: AnchoredPathBuf },
MoveFile { src: FileId, anchor: FileId, dst: String }, MoveFile { src: FileId, dst: AnchoredPathBuf },
} }
impl From<FileSystemEdit> for SourceChange { impl From<FileSystemEdit> for SourceChange {

View file

@ -13,6 +13,7 @@ use lsp_types::{SemanticTokens, Url};
use parking_lot::{Mutex, RwLock}; use parking_lot::{Mutex, RwLock};
use project_model::{CargoWorkspace, ProcMacroClient, ProjectWorkspace, Target}; use project_model::{CargoWorkspace, ProcMacroClient, ProjectWorkspace, Target};
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use vfs::AnchoredPathBuf;
use crate::{ use crate::{
config::Config, config::Config,
@ -268,10 +269,10 @@ impl GlobalStateSnapshot {
Some(self.mem_docs.get(&path)?.version) Some(self.mem_docs.get(&path)?.version)
} }
pub(crate) fn anchored_path(&self, file_id: FileId, path: &str) -> Url { pub(crate) fn anchored_path(&self, path: &AnchoredPathBuf) -> Url {
let mut base = self.vfs.read().0.file_path(file_id); let mut base = self.vfs.read().0.file_path(path.anchor);
base.pop(); base.pop();
let path = base.join(path).unwrap(); let path = base.join(&path.path).unwrap();
let path = path.as_path().unwrap(); let path = path.as_path().unwrap();
url_from_abs_path(&path) url_from_abs_path(&path)
} }

View file

@ -628,17 +628,17 @@ pub(crate) fn resource_op(
file_system_edit: FileSystemEdit, file_system_edit: FileSystemEdit,
) -> lsp_types::ResourceOp { ) -> lsp_types::ResourceOp {
match file_system_edit { match file_system_edit {
FileSystemEdit::CreateFile { anchor, dst } => { FileSystemEdit::CreateFile { dst } => {
let uri = snap.anchored_path(anchor, &dst); let uri = snap.anchored_path(&dst);
lsp_types::ResourceOp::Create(lsp_types::CreateFile { lsp_types::ResourceOp::Create(lsp_types::CreateFile {
uri, uri,
options: None, options: None,
annotation: None, annotation: None,
}) })
} }
FileSystemEdit::MoveFile { src, anchor, dst } => { FileSystemEdit::MoveFile { src, dst } => {
let old_uri = snap.file_id_to_url(src); let old_uri = snap.file_id_to_url(src);
let new_uri = snap.anchored_path(anchor, &dst); let new_uri = snap.anchored_path(&dst);
lsp_types::ResourceOp::Rename(lsp_types::RenameFile { lsp_types::ResourceOp::Rename(lsp_types::RenameFile {
old_uri, old_uri,
new_uri, new_uri,

View file

@ -0,0 +1,39 @@
//! Analysis-level representation of file-system paths.
//!
//! The primary goal of this is to losslessly represent paths like
//!
//! ```
//! #[path = "./bar.rs"]
//! mod foo;
//! ```
//!
//! The first approach one might reach for is to use `PathBuf`. The problem here
//! is that `PathBuf` depends on host target (windows or linux), but
//! rust-analyzer should be capable to process `#[path = r"C:\bar.rs"]` on Unix.
//!
//! The second try is to use a `String`. This also fails, however. Consider a
//! hypothetical scenario, where rust-analyzer operates in a
//! networked/distributed mode. There's one global instance of rust-analyzer,
//! which processes requests from different machines. Now, the semantics of
//! `#[path = "/abs/path.rs"]` actually depends on which file-system we are at!
//! That is, even absolute paths exist relative to a file system!
//!
//! A more realistic scenario here is virtual VFS paths we use for testing. More
//! generally, there can be separate "universes" of VFS paths.
//!
//! That's why we use anchored representation -- each path carries an info about
//! a file this path originates from. We can fetch fs/"universe" information
//! from the anchor than.
use crate::FileId;
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct AnchoredPathBuf {
pub anchor: FileId,
pub path: String,
}
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub struct AnchoredPath<'a> {
pub anchor: FileId,
pub path: &'a str,
}

View file

@ -7,7 +7,7 @@ use std::fmt;
use fst::{IntoStreamer, Streamer}; use fst::{IntoStreamer, Streamer};
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use crate::{FileId, Vfs, VfsPath}; use crate::{AnchoredPath, FileId, Vfs, VfsPath};
#[derive(Default, Clone, Eq, PartialEq)] #[derive(Default, Clone, Eq, PartialEq)]
pub struct FileSet { pub struct FileSet {
@ -19,10 +19,10 @@ impl FileSet {
pub fn len(&self) -> usize { pub fn len(&self) -> usize {
self.files.len() self.files.len()
} }
pub fn resolve_path(&self, anchor: FileId, path: &str) -> Option<FileId> { pub fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
let mut base = self.paths[&anchor].clone(); let mut base = self.paths[&path.anchor].clone();
base.pop(); base.pop();
let path = base.join(path)?; let path = base.join(path.path)?;
self.files.get(&path).copied() self.files.get(&path).copied()
} }

View file

@ -36,6 +36,7 @@
//! have a single `FileSet` which unions the two sources. //! have a single `FileSet` which unions the two sources.
mod vfs_path; mod vfs_path;
mod path_interner; mod path_interner;
mod anchored_path;
pub mod file_set; pub mod file_set;
pub mod loader; pub mod loader;
@ -43,7 +44,10 @@ use std::{fmt, mem};
use crate::path_interner::PathInterner; use crate::path_interner::PathInterner;
pub use crate::vfs_path::VfsPath; pub use crate::{
anchored_path::{AnchoredPath, AnchoredPathBuf},
vfs_path::VfsPath,
};
pub use paths::{AbsPath, AbsPathBuf}; pub use paths::{AbsPath, AbsPathBuf};
#[derive(Copy, Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Hash)] #[derive(Copy, Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Hash)]