4976: New VFS r=matklad a=matklad

Still a draft, but mostly working already. 

Co-authored-by: Aleksey Kladov <aleksey.kladov@gmail.com>
This commit is contained in:
bors[bot] 2020-06-23 15:53:56 +00:00 committed by GitHub
commit 0c12c4f960
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
46 changed files with 1028 additions and 1001 deletions

50
Cargo.lock generated
View file

@ -354,9 +354,9 @@ checksum = "5f2a4a2034423744d2cc7ca2068453168dcdb82c438419e639a26bd87839c674"
[[package]] [[package]]
name = "fsevent" name = "fsevent"
version = "0.4.0" version = "2.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5ab7d1bd1bd33cc98b0889831b72da23c0aa4df9cec7e0702f46ecea04b35db6" checksum = "97f347202c95c98805c216f9e1df210e8ebaec9fdb2365700a43c10797a35e63"
dependencies = [ dependencies = [
"bitflags", "bitflags",
"fsevent-sys", "fsevent-sys",
@ -364,9 +364,9 @@ dependencies = [
[[package]] [[package]]
name = "fsevent-sys" name = "fsevent-sys"
version = "2.0.1" version = "3.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f41b048a94555da0f42f1d632e2e19510084fb8e303b0daa2816e733fb3644a0" checksum = "77a29c77f1ca394c3e73a9a5d24cfcabb734682d9634fc398f2204a63c994120"
dependencies = [ dependencies = [
"libc", "libc",
] ]
@ -483,9 +483,9 @@ dependencies = [
[[package]] [[package]]
name = "inotify" name = "inotify"
version = "0.7.1" version = "0.8.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4816c66d2c8ae673df83366c18341538f234a26d65a9ecea5c348b453ac1d02f" checksum = "46dd0a94b393c730779ccfd2a872b67b1eb67be3fc33082e733bdb38b5fde4d4"
dependencies = [ dependencies = [
"bitflags", "bitflags",
"inotify-sys", "inotify-sys",
@ -766,11 +766,13 @@ dependencies = [
[[package]] [[package]]
name = "notify" name = "notify"
version = "4.0.15" version = "5.0.0-pre.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "80ae4a7688d1fab81c5bf19c64fc8db920be8d519ce6336ed4e7efe024724dbd" checksum = "77d03607cf88b4b160ba0e9ed425fff3cee3b55ac813f0c685b3a3772da37d0e"
dependencies = [ dependencies = [
"anymap",
"bitflags", "bitflags",
"crossbeam-channel",
"filetime", "filetime",
"fsevent", "fsevent",
"fsevent-sys", "fsevent-sys",
@ -952,7 +954,9 @@ dependencies = [
"relative-path", "relative-path",
"rustc-hash", "rustc-hash",
"salsa", "salsa",
"stdx",
"test_utils", "test_utils",
"vfs",
] ]
[[package]] [[package]]
@ -1232,22 +1236,6 @@ dependencies = [
"smol_str", "smol_str",
] ]
[[package]]
name = "ra_vfs"
version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cbf31a173fc77ec59c27cf39af6baa137b40f4dbd45a8b3eccb1b2e4cfc922c1"
dependencies = [
"crossbeam-channel",
"jod-thread",
"log",
"notify",
"parking_lot",
"relative-path",
"rustc-hash",
"walkdir",
]
[[package]] [[package]]
name = "rand" name = "rand"
version = "0.7.3" version = "0.7.3"
@ -1405,7 +1393,6 @@ dependencies = [
"ra_syntax", "ra_syntax",
"ra_text_edit", "ra_text_edit",
"ra_tt", "ra_tt",
"ra_vfs",
"rand", "rand",
"rustc-hash", "rustc-hash",
"serde", "serde",
@ -1414,6 +1401,8 @@ dependencies = [
"tempfile", "tempfile",
"test_utils", "test_utils",
"threadpool", "threadpool",
"vfs",
"vfs-notify",
"winapi 0.3.8", "winapi 0.3.8",
] ]
@ -1763,12 +1752,23 @@ dependencies = [
[[package]] [[package]]
name = "vfs" name = "vfs"
version = "0.1.0" version = "0.1.0"
dependencies = [
"paths",
"rustc-hash",
]
[[package]]
name = "vfs-notify"
version = "0.1.0"
dependencies = [ dependencies = [
"crossbeam-channel", "crossbeam-channel",
"globset", "globset",
"jod-thread", "jod-thread",
"log",
"notify",
"paths", "paths",
"rustc-hash", "rustc-hash",
"vfs",
"walkdir", "walkdir",
] ]

View file

@ -2,7 +2,7 @@
//! relative paths. //! relative paths.
use std::{ use std::{
convert::{TryFrom, TryInto}, convert::{TryFrom, TryInto},
io, ops, ops,
path::{Component, Path, PathBuf}, path::{Component, Path, PathBuf},
}; };
@ -46,9 +46,6 @@ impl TryFrom<&str> for AbsPathBuf {
} }
impl AbsPathBuf { impl AbsPathBuf {
pub fn canonicalized(path: &Path) -> io::Result<AbsPathBuf> {
path.canonicalize().map(|it| AbsPathBuf::try_from(it).unwrap())
}
pub fn as_path(&self) -> &AbsPath { pub fn as_path(&self) -> &AbsPath {
AbsPath::new_unchecked(self.0.as_path()) AbsPath::new_unchecked(self.0.as_path())
} }

View file

@ -1,10 +1,8 @@
mod generated; mod generated;
use std::sync::Arc;
use hir::Semantics; use hir::Semantics;
use ra_db::{fixture::WithFixture, FileId, FileRange, SourceDatabaseExt}; use ra_db::{fixture::WithFixture, FileId, FileRange, SourceDatabaseExt};
use ra_ide_db::{symbol_index::SymbolsDatabase, RootDatabase}; use ra_ide_db::RootDatabase;
use ra_syntax::TextRange; use ra_syntax::TextRange;
use test_utils::{ use test_utils::{
assert_eq_text, extract_offset, extract_range, extract_range_or_offset, RangeOrOffset, assert_eq_text, extract_offset, extract_range, extract_range_or_offset, RangeOrOffset,
@ -13,11 +11,7 @@ use test_utils::{
use crate::{handlers::Handler, Assist, AssistConfig, AssistContext, Assists}; use crate::{handlers::Handler, Assist, AssistConfig, AssistContext, Assists};
pub(crate) fn with_single_file(text: &str) -> (RootDatabase, FileId) { pub(crate) fn with_single_file(text: &str) -> (RootDatabase, FileId) {
let (mut db, file_id) = RootDatabase::with_single_file(text); RootDatabase::with_single_file(text)
// FIXME: ideally, this should be done by the above `RootDatabase::with_single_file`,
// but it looks like this might need specialization? :(
db.set_local_roots(Arc::new(vec![db.file_source_root(file_id)]));
(db, file_id)
} }
pub(crate) fn check_assist(assist: Handler, ra_fixture_before: &str, ra_fixture_after: &str) { pub(crate) fn check_assist(assist: Handler, ra_fixture_before: &str, ra_fixture_after: &str) {
@ -72,8 +66,7 @@ enum ExpectedResult<'a> {
fn check(handler: Handler, before: &str, expected: ExpectedResult) { fn check(handler: Handler, before: &str, expected: ExpectedResult) {
let (text_without_caret, file_with_caret_id, range_or_offset, db) = if before.contains("//-") { let (text_without_caret, file_with_caret_id, range_or_offset, db) = if before.contains("//-") {
let (mut db, position) = RootDatabase::with_position(before); let (db, position) = RootDatabase::with_position(before);
db.set_local_roots(Arc::new(vec![db.file_source_root(position.file_id)]));
( (
db.file_text(position.file_id).as_ref().to_owned(), db.file_text(position.file_id).as_ref().to_owned(),
position.file_id, position.file_id,

View file

@ -17,3 +17,5 @@ ra_cfg = { path = "../ra_cfg" }
ra_prof = { path = "../ra_prof" } ra_prof = { path = "../ra_prof" }
ra_tt = { path = "../ra_tt" } ra_tt = { path = "../ra_tt" }
test_utils = { path = "../test_utils" } test_utils = { path = "../test_utils" }
vfs = { path = "../vfs" }
stdx = { path = "../stdx" }

View file

@ -57,17 +57,16 @@
//! fn insert_source_code_here() {} //! fn insert_source_code_here() {}
//! " //! "
//! ``` //! ```
use std::{str::FromStr, sync::Arc};
use std::str::FromStr;
use std::sync::Arc;
use ra_cfg::CfgOptions; use ra_cfg::CfgOptions;
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use test_utils::{extract_offset, parse_fixture, parse_single_fixture, FixtureMeta, CURSOR_MARKER}; use test_utils::{extract_offset, parse_fixture, parse_single_fixture, FixtureMeta, CURSOR_MARKER};
use vfs::{file_set::FileSet, VfsPath};
use crate::{ use crate::{
input::CrateName, CrateGraph, CrateId, Edition, Env, FileId, FilePosition, RelativePathBuf, input::CrateName, CrateGraph, CrateId, Edition, Env, FileId, FilePosition, SourceDatabaseExt,
SourceDatabaseExt, SourceRoot, SourceRootId, SourceRoot, SourceRootId,
}; };
pub const WORKSPACE: SourceRootId = SourceRootId(0); pub const WORKSPACE: SourceRootId = SourceRootId(0);
@ -105,10 +104,10 @@ impl<DB: SourceDatabaseExt + Default + 'static> WithFixture for DB {}
fn with_single_file(db: &mut dyn SourceDatabaseExt, ra_fixture: &str) -> FileId { fn with_single_file(db: &mut dyn SourceDatabaseExt, ra_fixture: &str) -> FileId {
let file_id = FileId(0); let file_id = FileId(0);
let rel_path: RelativePathBuf = "/main.rs".into(); let mut file_set = vfs::file_set::FileSet::default();
file_set.insert(file_id, vfs::VfsPath::new_virtual_path("/main.rs".to_string()));
let mut source_root = SourceRoot::new_local(); let source_root = SourceRoot::new_local(file_set);
source_root.insert_file(rel_path.clone(), file_id);
let fixture = parse_single_fixture(ra_fixture); let fixture = parse_single_fixture(ra_fixture);
@ -128,7 +127,6 @@ fn with_single_file(db: &mut dyn SourceDatabaseExt, ra_fixture: &str) -> FileId
meta.cfg, meta.cfg,
meta.env, meta.env,
Default::default(), Default::default(),
Default::default(),
); );
crate_graph crate_graph
} else { } else {
@ -140,13 +138,11 @@ fn with_single_file(db: &mut dyn SourceDatabaseExt, ra_fixture: &str) -> FileId
CfgOptions::default(), CfgOptions::default(),
Env::default(), Env::default(),
Default::default(), Default::default(),
Default::default(),
); );
crate_graph crate_graph
}; };
db.set_file_text(file_id, Arc::new(ra_fixture.to_string())); db.set_file_text(file_id, Arc::new(ra_fixture.to_string()));
db.set_file_relative_path(file_id, rel_path);
db.set_file_source_root(file_id, WORKSPACE); db.set_file_source_root(file_id, WORKSPACE);
db.set_source_root(WORKSPACE, Arc::new(source_root)); db.set_source_root(WORKSPACE, Arc::new(source_root));
db.set_crate_graph(Arc::new(crate_graph)); db.set_crate_graph(Arc::new(crate_graph));
@ -162,7 +158,7 @@ fn with_files(db: &mut dyn SourceDatabaseExt, fixture: &str) -> Option<FilePosit
let mut crate_deps = Vec::new(); let mut crate_deps = Vec::new();
let mut default_crate_root: Option<FileId> = None; let mut default_crate_root: Option<FileId> = None;
let mut source_root = SourceRoot::new_local(); let mut file_set = FileSet::default();
let mut source_root_id = WORKSPACE; let mut source_root_id = WORKSPACE;
let mut source_root_prefix = "/".to_string(); let mut source_root_prefix = "/".to_string();
let mut file_id = FileId(0); let mut file_id = FileId(0);
@ -172,8 +168,8 @@ fn with_files(db: &mut dyn SourceDatabaseExt, fixture: &str) -> Option<FilePosit
for entry in fixture.iter() { for entry in fixture.iter() {
let meta = match ParsedMeta::from(&entry.meta) { let meta = match ParsedMeta::from(&entry.meta) {
ParsedMeta::Root { path } => { ParsedMeta::Root { path } => {
let source_root = std::mem::replace(&mut source_root, SourceRoot::new_local()); let file_set = std::mem::replace(&mut file_set, FileSet::default());
db.set_source_root(source_root_id, Arc::new(source_root)); db.set_source_root(source_root_id, Arc::new(SourceRoot::new_local(file_set)));
source_root_id.0 += 1; source_root_id.0 += 1;
source_root_prefix = path; source_root_prefix = path;
continue; continue;
@ -190,7 +186,6 @@ fn with_files(db: &mut dyn SourceDatabaseExt, fixture: &str) -> Option<FilePosit
meta.cfg, meta.cfg,
meta.env, meta.env,
Default::default(), Default::default(),
Default::default(),
); );
let prev = crates.insert(krate.clone(), crate_id); let prev = crates.insert(krate.clone(), crate_id);
assert!(prev.is_none()); assert!(prev.is_none());
@ -212,9 +207,9 @@ fn with_files(db: &mut dyn SourceDatabaseExt, fixture: &str) -> Option<FilePosit
}; };
db.set_file_text(file_id, Arc::new(text)); db.set_file_text(file_id, Arc::new(text));
db.set_file_relative_path(file_id, meta.path.clone().into());
db.set_file_source_root(file_id, source_root_id); db.set_file_source_root(file_id, source_root_id);
source_root.insert_file(meta.path.into(), file_id); let path = VfsPath::new_virtual_path(meta.path);
file_set.insert(file_id, path.into());
file_id.0 += 1; file_id.0 += 1;
} }
@ -228,7 +223,6 @@ fn with_files(db: &mut dyn SourceDatabaseExt, fixture: &str) -> Option<FilePosit
CfgOptions::default(), CfgOptions::default(),
Env::default(), Env::default(),
Default::default(), Default::default(),
Default::default(),
); );
} else { } else {
for (from, to) in crate_deps { for (from, to) in crate_deps {
@ -238,7 +232,7 @@ fn with_files(db: &mut dyn SourceDatabaseExt, fixture: &str) -> Option<FilePosit
} }
} }
db.set_source_root(source_root_id, Arc::new(source_root)); db.set_source_root(source_root_id, Arc::new(SourceRoot::new_local(file_set)));
db.set_crate_graph(Arc::new(crate_graph)); db.set_crate_graph(Arc::new(crate_graph));
file_position file_position

View file

@ -6,27 +6,15 @@
//! actual IO. See `vfs` and `project_model` in the `rust-analyzer` crate for how //! actual IO. See `vfs` and `project_model` in the `rust-analyzer` crate for how
//! actual IO is done and lowered to input. //! actual IO is done and lowered to input.
use std::{ use std::{fmt, ops, str::FromStr, sync::Arc};
fmt, ops,
path::{Path, PathBuf},
str::FromStr,
sync::Arc,
};
use ra_cfg::CfgOptions; use ra_cfg::CfgOptions;
use ra_syntax::SmolStr; use ra_syntax::SmolStr;
use ra_tt::TokenExpander; use ra_tt::TokenExpander;
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::{FxHashMap, FxHashSet};
use vfs::file_set::FileSet;
use crate::{RelativePath, RelativePathBuf}; pub use vfs::FileId;
/// `FileId` is an integer which uniquely identifies a file. File paths are
/// messy and system-dependent, so most of the code should work directly with
/// `FileId`, without inspecting the path. The mapping between `FileId` and path
/// and `SourceRoot` is constant. A file rename is represented as a pair of
/// deletion/creation.
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct FileId(pub u32);
/// Files are grouped into source roots. A source root is a directory on the /// Files are grouped into source roots. A source root is a directory on the
/// file systems which is watched for changes. Typically it corresponds to a /// file systems which is watched for changes. Typically it corresponds to a
@ -45,27 +33,18 @@ pub struct SourceRoot {
/// Libraries are considered mostly immutable, this assumption is used to /// Libraries are considered mostly immutable, this assumption is used to
/// optimize salsa's query structure /// optimize salsa's query structure
pub is_library: bool, pub is_library: bool,
files: FxHashMap<RelativePathBuf, FileId>, pub(crate) file_set: FileSet,
} }
impl SourceRoot { impl SourceRoot {
pub fn new_local() -> SourceRoot { pub fn new_local(file_set: FileSet) -> SourceRoot {
SourceRoot { is_library: false, files: Default::default() } SourceRoot { is_library: false, file_set }
} }
pub fn new_library() -> SourceRoot { pub fn new_library(file_set: FileSet) -> SourceRoot {
SourceRoot { is_library: true, files: Default::default() } SourceRoot { is_library: true, file_set }
} }
pub fn insert_file(&mut self, path: RelativePathBuf, file_id: FileId) { pub fn iter(&self) -> impl Iterator<Item = FileId> + '_ {
self.files.insert(path, file_id); self.file_set.iter()
}
pub fn remove_file(&mut self, path: &RelativePath) {
self.files.remove(path);
}
pub fn walk(&self) -> impl Iterator<Item = FileId> + '_ {
self.files.values().copied()
}
pub fn file_by_relative_path(&self, path: &RelativePath) -> Option<FileId> {
self.files.get(path).copied()
} }
} }
@ -141,7 +120,6 @@ pub struct CrateData {
pub display_name: Option<CrateName>, pub display_name: Option<CrateName>,
pub cfg_options: CfgOptions, pub cfg_options: CfgOptions,
pub env: Env, pub env: Env,
pub extern_source: ExternSource,
pub dependencies: Vec<Dependency>, pub dependencies: Vec<Dependency>,
pub proc_macro: Vec<ProcMacro>, pub proc_macro: Vec<ProcMacro>,
} }
@ -152,22 +130,11 @@ pub enum Edition {
Edition2015, Edition2015,
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct ExternSourceId(pub u32);
#[derive(Default, Debug, Clone, PartialEq, Eq)] #[derive(Default, Debug, Clone, PartialEq, Eq)]
pub struct Env { pub struct Env {
entries: FxHashMap<String, String>, entries: FxHashMap<String, String>,
} }
// FIXME: Redesign vfs for solve the following limitation ?
// Note: Some env variables (e.g. OUT_DIR) are located outside of the
// crate. We store a map to allow remap it to ExternSourceId
#[derive(Default, Debug, Clone, PartialEq, Eq)]
pub struct ExternSource {
extern_paths: FxHashMap<PathBuf, ExternSourceId>,
}
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
pub struct Dependency { pub struct Dependency {
pub crate_id: CrateId, pub crate_id: CrateId,
@ -182,7 +149,6 @@ impl CrateGraph {
display_name: Option<CrateName>, display_name: Option<CrateName>,
cfg_options: CfgOptions, cfg_options: CfgOptions,
env: Env, env: Env,
extern_source: ExternSource,
proc_macro: Vec<(SmolStr, Arc<dyn ra_tt::TokenExpander>)>, proc_macro: Vec<(SmolStr, Arc<dyn ra_tt::TokenExpander>)>,
) -> CrateId { ) -> CrateId {
let proc_macro = let proc_macro =
@ -194,7 +160,6 @@ impl CrateGraph {
display_name, display_name,
cfg_options, cfg_options,
env, env,
extern_source,
proc_macro, proc_macro,
dependencies: Vec::new(), dependencies: Vec::new(),
}; };
@ -334,20 +299,6 @@ impl Env {
} }
} }
impl ExternSource {
pub fn extern_path(&self, path: &Path) -> Option<(ExternSourceId, RelativePathBuf)> {
self.extern_paths.iter().find_map(|(root_path, id)| {
let rel_path = path.strip_prefix(root_path).ok()?;
let rel_path = RelativePathBuf::from_path(rel_path).ok()?;
Some((*id, rel_path))
})
}
pub fn set_extern_path(&mut self, root_path: &Path, root: ExternSourceId) {
self.extern_paths.insert(root_path.to_path_buf(), root);
}
}
#[derive(Debug)] #[derive(Debug)]
pub struct ParseEditionError { pub struct ParseEditionError {
invalid_input: String, invalid_input: String,
@ -378,7 +329,6 @@ mod tests {
CfgOptions::default(), CfgOptions::default(),
Env::default(), Env::default(),
Default::default(), Default::default(),
Default::default(),
); );
let crate2 = graph.add_crate_root( let crate2 = graph.add_crate_root(
FileId(2u32), FileId(2u32),
@ -387,7 +337,6 @@ mod tests {
CfgOptions::default(), CfgOptions::default(),
Env::default(), Env::default(),
Default::default(), Default::default(),
Default::default(),
); );
let crate3 = graph.add_crate_root( let crate3 = graph.add_crate_root(
FileId(3u32), FileId(3u32),
@ -396,7 +345,6 @@ mod tests {
CfgOptions::default(), CfgOptions::default(),
Env::default(), Env::default(),
Default::default(), Default::default(),
Default::default(),
); );
assert!(graph.add_dep(crate1, CrateName::new("crate2").unwrap(), crate2).is_ok()); assert!(graph.add_dep(crate1, CrateName::new("crate2").unwrap(), crate2).is_ok());
assert!(graph.add_dep(crate2, CrateName::new("crate3").unwrap(), crate3).is_ok()); assert!(graph.add_dep(crate2, CrateName::new("crate3").unwrap(), crate3).is_ok());
@ -413,7 +361,6 @@ mod tests {
CfgOptions::default(), CfgOptions::default(),
Env::default(), Env::default(),
Default::default(), Default::default(),
Default::default(),
); );
let crate2 = graph.add_crate_root( let crate2 = graph.add_crate_root(
FileId(2u32), FileId(2u32),
@ -422,7 +369,6 @@ mod tests {
CfgOptions::default(), CfgOptions::default(),
Env::default(), Env::default(),
Default::default(), Default::default(),
Default::default(),
); );
assert!(graph.add_dep(crate1, CrateName::new("crate2").unwrap(), crate2).is_ok()); assert!(graph.add_dep(crate1, CrateName::new("crate2").unwrap(), crate2).is_ok());
assert!(graph.add_dep(crate2, CrateName::new("crate2").unwrap(), crate2).is_err()); assert!(graph.add_dep(crate2, CrateName::new("crate2").unwrap(), crate2).is_err());
@ -438,7 +384,6 @@ mod tests {
CfgOptions::default(), CfgOptions::default(),
Env::default(), Env::default(),
Default::default(), Default::default(),
Default::default(),
); );
let crate2 = graph.add_crate_root( let crate2 = graph.add_crate_root(
FileId(2u32), FileId(2u32),
@ -447,7 +392,6 @@ mod tests {
CfgOptions::default(), CfgOptions::default(),
Env::default(), Env::default(),
Default::default(), Default::default(),
Default::default(),
); );
let crate3 = graph.add_crate_root( let crate3 = graph.add_crate_root(
FileId(3u32), FileId(3u32),
@ -456,7 +400,6 @@ mod tests {
CfgOptions::default(), CfgOptions::default(),
Env::default(), Env::default(),
Default::default(), Default::default(),
Default::default(),
); );
assert!(graph.add_dep(crate1, CrateName::new("crate2").unwrap(), crate2).is_ok()); assert!(graph.add_dep(crate1, CrateName::new("crate2").unwrap(), crate2).is_ok());
assert!(graph.add_dep(crate2, CrateName::new("crate3").unwrap(), crate3).is_ok()); assert!(graph.add_dep(crate2, CrateName::new("crate3").unwrap(), crate3).is_ok());
@ -472,7 +415,6 @@ mod tests {
CfgOptions::default(), CfgOptions::default(),
Env::default(), Env::default(),
Default::default(), Default::default(),
Default::default(),
); );
let crate2 = graph.add_crate_root( let crate2 = graph.add_crate_root(
FileId(2u32), FileId(2u32),
@ -481,7 +423,6 @@ mod tests {
CfgOptions::default(), CfgOptions::default(),
Env::default(), Env::default(),
Default::default(), Default::default(),
Default::default(),
); );
assert!(graph assert!(graph
.add_dep(crate1, CrateName::normalize_dashes("crate-name-with-dashes"), crate2) .add_dep(crate1, CrateName::normalize_dashes("crate-name-with-dashes"), crate2)

View file

@ -12,12 +12,13 @@ use rustc_hash::FxHashSet;
pub use crate::{ pub use crate::{
cancellation::Canceled, cancellation::Canceled,
input::{ input::{
CrateData, CrateGraph, CrateId, CrateName, Dependency, Edition, Env, ExternSource, CrateData, CrateGraph, CrateId, CrateName, Dependency, Edition, Env, FileId, ProcMacroId,
ExternSourceId, FileId, ProcMacroId, SourceRoot, SourceRootId, SourceRoot, SourceRootId,
}, },
}; };
pub use relative_path::{RelativePath, RelativePathBuf}; pub use relative_path::{RelativePath, RelativePathBuf};
pub use salsa; pub use salsa;
pub use vfs::{file_set::FileSet, AbsPathBuf, VfsPath};
#[macro_export] #[macro_export]
macro_rules! impl_intern_key { macro_rules! impl_intern_key {
@ -125,8 +126,6 @@ pub trait SourceDatabaseExt: SourceDatabase {
#[salsa::input] #[salsa::input]
fn file_text(&self, file_id: FileId) -> Arc<String>; fn file_text(&self, file_id: FileId) -> Arc<String>;
/// Path to a file, relative to the root of its source root. /// Path to a file, relative to the root of its source root.
#[salsa::input]
fn file_relative_path(&self, file_id: FileId) -> RelativePathBuf;
/// Source root of the file. /// Source root of the file.
#[salsa::input] #[salsa::input]
fn file_source_root(&self, file_id: FileId) -> SourceRootId; fn file_source_root(&self, file_id: FileId) -> SourceRootId;
@ -161,24 +160,9 @@ impl<T: SourceDatabaseExt> FileLoader for FileLoaderDelegate<&'_ T> {
} }
fn resolve_path(&self, anchor: FileId, path: &str) -> Option<FileId> { fn resolve_path(&self, anchor: FileId, path: &str) -> Option<FileId> {
// FIXME: this *somehow* should be platform agnostic... // FIXME: this *somehow* should be platform agnostic...
if std::path::Path::new(path).is_absolute() { let source_root = self.0.file_source_root(anchor);
let krate = *self.relevant_crates(anchor).iter().next()?; let source_root = self.0.source_root(source_root);
let (extern_source_id, relative_file) = source_root.file_set.resolve_path(anchor, path)
self.0.crate_graph()[krate].extern_source.extern_path(path.as_ref())?;
let source_root = self.0.source_root(SourceRootId(extern_source_id.0));
source_root.file_by_relative_path(&relative_file)
} else {
let rel_path = {
let mut rel_path = self.0.file_relative_path(anchor);
assert!(rel_path.pop());
rel_path.push(path);
rel_path.normalize()
};
let source_root = self.0.file_source_root(anchor);
let source_root = self.0.source_root(source_root);
source_root.file_by_relative_path(&rel_path)
}
} }
fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>> { fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>> {

View file

@ -2,7 +2,7 @@
use either::Either; use either::Either;
use hir_def::{ use hir_def::{
nameres::ModuleSource, nameres::{ModuleOrigin, ModuleSource},
src::{HasChildSource, HasSource as _}, src::{HasChildSource, HasSource as _},
Lookup, VariantId, Lookup, VariantId,
}; };
@ -29,6 +29,14 @@ impl Module {
def_map[self.id.local_id].definition_source(db.upcast()) def_map[self.id.local_id].definition_source(db.upcast())
} }
pub fn is_mod_rs(self, db: &dyn HirDatabase) -> bool {
let def_map = db.crate_def_map(self.id.krate);
match def_map[self.id.local_id].origin {
ModuleOrigin::File { is_mod_rs, .. } => is_mod_rs,
_ => false,
}
}
/// Returns a node which declares this module, either a `mod foo;` or a `mod foo {}`. /// Returns a node which declares this module, either a `mod foo;` or a `mod foo {}`.
/// `None` for the crate root. /// `None` for the crate root.
pub fn declaration_source(self, db: &dyn HirDatabase) -> Option<InFile<ast::Module>> { pub fn declaration_source(self, db: &dyn HirDatabase) -> Option<InFile<ast::Module>> {

View file

@ -104,6 +104,7 @@ pub enum ModuleOrigin {
}, },
/// Note that non-inline modules, by definition, live inside non-macro file. /// Note that non-inline modules, by definition, live inside non-macro file.
File { File {
is_mod_rs: bool,
declaration: AstId<ast::Module>, declaration: AstId<ast::Module>,
definition: FileId, definition: FileId,
}, },

View file

@ -777,11 +777,11 @@ impl ModCollector<'_, '_> {
name, name,
path_attr, path_attr,
) { ) {
Ok((file_id, mod_dir)) => { Ok((file_id, is_mod_rs, mod_dir)) => {
let module_id = self.push_child_module( let module_id = self.push_child_module(
name.clone(), name.clone(),
ast_id, ast_id,
Some(file_id), Some((file_id, is_mod_rs)),
&visibility, &visibility,
); );
let raw_items = self.def_collector.db.raw_items(file_id.into()); let raw_items = self.def_collector.db.raw_items(file_id.into());
@ -814,7 +814,7 @@ impl ModCollector<'_, '_> {
&mut self, &mut self,
name: Name, name: Name,
declaration: AstId<ast::Module>, declaration: AstId<ast::Module>,
definition: Option<FileId>, definition: Option<(FileId, bool)>,
visibility: &crate::visibility::RawVisibility, visibility: &crate::visibility::RawVisibility,
) -> LocalModuleId { ) -> LocalModuleId {
let vis = self let vis = self
@ -827,7 +827,9 @@ impl ModCollector<'_, '_> {
modules[res].parent = Some(self.module_id); modules[res].parent = Some(self.module_id);
modules[res].origin = match definition { modules[res].origin = match definition {
None => ModuleOrigin::Inline { definition: declaration }, None => ModuleOrigin::Inline { definition: declaration },
Some(definition) => ModuleOrigin::File { declaration, definition }, Some((definition, is_mod_rs)) => {
ModuleOrigin::File { declaration, definition, is_mod_rs }
}
}; };
for (name, mac) in modules[self.module_id].scope.collect_legacy_macros() { for (name, mac) in modules[self.module_id].scope.collect_legacy_macros() {
modules[res].scope.define_legacy_macro(name, mac) modules[res].scope.define_legacy_macro(name, mac)

View file

@ -44,7 +44,7 @@ impl ModDir {
file_id: HirFileId, file_id: HirFileId,
name: &Name, name: &Name,
attr_path: Option<&SmolStr>, attr_path: Option<&SmolStr>,
) -> Result<(FileId, ModDir), String> { ) -> Result<(FileId, bool, ModDir), String> {
let file_id = file_id.original_file(db.upcast()); let file_id = file_id.original_file(db.upcast());
let mut candidate_files = Vec::new(); let mut candidate_files = Vec::new();
@ -64,11 +64,12 @@ impl ModDir {
if let Some(file_id) = db.resolve_path(file_id, candidate.as_str()) { if let Some(file_id) = db.resolve_path(file_id, candidate.as_str()) {
let mut root_non_dir_owner = false; let mut root_non_dir_owner = false;
let mut mod_path = RelativePathBuf::new(); let mut mod_path = RelativePathBuf::new();
if !(candidate.ends_with("mod.rs") || attr_path.is_some()) { let is_mod_rs = candidate.ends_with("mod.rs");
if !(is_mod_rs || attr_path.is_some()) {
root_non_dir_owner = true; root_non_dir_owner = true;
mod_path.push(&name.to_string()); mod_path.push(&name.to_string());
} }
return Ok((file_id, ModDir { path: mod_path, root_non_dir_owner })); return Ok((file_id, is_mod_rs, ModDir { path: mod_path, root_non_dir_owner }));
} }
} }
Err(candidate_files.remove(0)) Err(candidate_files.remove(0))

View file

@ -47,7 +47,7 @@ use std::sync::Arc;
use ra_cfg::CfgOptions; use ra_cfg::CfgOptions;
use ra_db::{ use ra_db::{
salsa::{self, ParallelDatabase}, salsa::{self, ParallelDatabase},
CheckCanceled, Env, FileLoader, SourceDatabase, CheckCanceled, Env, FileLoader, FileSet, SourceDatabase, VfsPath,
}; };
use ra_ide_db::{ use ra_ide_db::{
symbol_index::{self, FileSymbol}, symbol_index::{self, FileSymbol},
@ -78,7 +78,8 @@ pub use crate::{
pub use hir::Documentation; pub use hir::Documentation;
pub use ra_assists::{Assist, AssistConfig, AssistId, ResolvedAssist}; pub use ra_assists::{Assist, AssistConfig, AssistId, ResolvedAssist};
pub use ra_db::{ pub use ra_db::{
Canceled, CrateGraph, CrateId, Edition, FileId, FilePosition, FileRange, SourceRootId, Canceled, CrateGraph, CrateId, Edition, FileId, FilePosition, FileRange, SourceRoot,
SourceRootId,
}; };
pub use ra_ide_db::{ pub use ra_ide_db::{
change::AnalysisChange, change::AnalysisChange,
@ -212,11 +213,14 @@ impl Analysis {
// `AnalysisHost` for creating a fully-featured analysis. // `AnalysisHost` for creating a fully-featured analysis.
pub fn from_single_file(text: String) -> (Analysis, FileId) { pub fn from_single_file(text: String) -> (Analysis, FileId) {
let mut host = AnalysisHost::default(); let mut host = AnalysisHost::default();
let source_root = SourceRootId(0);
let mut change = AnalysisChange::new();
change.add_root(source_root, true);
let mut crate_graph = CrateGraph::default();
let file_id = FileId(0); let file_id = FileId(0);
let mut file_set = FileSet::default();
file_set.insert(file_id, VfsPath::new_virtual_path("/main.rs".to_string()));
let source_root = SourceRoot::new_local(file_set);
let mut change = AnalysisChange::new();
change.set_roots(vec![source_root]);
let mut crate_graph = CrateGraph::default();
// FIXME: cfg options // FIXME: cfg options
// Default to enable test for single file. // Default to enable test for single file.
let mut cfg_options = CfgOptions::default(); let mut cfg_options = CfgOptions::default();
@ -228,9 +232,8 @@ impl Analysis {
cfg_options, cfg_options,
Env::default(), Env::default(),
Default::default(), Default::default(),
Default::default(),
); );
change.add_file(source_root, file_id, "main.rs".into(), Arc::new(text)); change.change_file(file_id, Some(Arc::new(text)));
change.set_crate_graph(crate_graph); change.set_crate_graph(crate_graph);
host.apply_change(change); host.apply_change(change);
(host.analysis(), file_id) (host.analysis(), file_id)

View file

@ -1,15 +1,12 @@
//! FIXME: write short doc here //! FIXME: write short doc here
use std::{str::FromStr, sync::Arc};
use std::str::FromStr;
use std::sync::Arc;
use ra_cfg::CfgOptions; use ra_cfg::CfgOptions;
use ra_db::{CrateName, Env}; use ra_db::{CrateName, Env, FileSet, SourceRoot, VfsPath};
use test_utils::{extract_offset, extract_range, parse_fixture, FixtureEntry, CURSOR_MARKER}; use test_utils::{extract_offset, extract_range, parse_fixture, FixtureEntry, CURSOR_MARKER};
use crate::{ use crate::{
Analysis, AnalysisChange, AnalysisHost, CrateGraph, Edition, FileId, FilePosition, FileRange, Analysis, AnalysisChange, AnalysisHost, CrateGraph, Edition, FileId, FilePosition, FileRange,
SourceRootId,
}; };
#[derive(Debug)] #[derive(Debug)]
@ -159,9 +156,8 @@ impl MockAnalysis {
} }
pub fn analysis_host(self) -> AnalysisHost { pub fn analysis_host(self) -> AnalysisHost {
let mut host = AnalysisHost::default(); let mut host = AnalysisHost::default();
let source_root = SourceRootId(0);
let mut change = AnalysisChange::new(); let mut change = AnalysisChange::new();
change.add_root(source_root, true); let mut file_set = FileSet::default();
let mut crate_graph = CrateGraph::default(); let mut crate_graph = CrateGraph::default();
let mut root_crate = None; let mut root_crate = None;
for (i, data) in self.files.into_iter().enumerate() { for (i, data) in self.files.into_iter().enumerate() {
@ -179,7 +175,6 @@ impl MockAnalysis {
cfg_options, cfg_options,
env, env,
Default::default(), Default::default(),
Default::default(),
)); ));
} else if path.ends_with("/lib.rs") { } else if path.ends_with("/lib.rs") {
let base = &path[..path.len() - "/lib.rs".len()]; let base = &path[..path.len() - "/lib.rs".len()];
@ -191,7 +186,6 @@ impl MockAnalysis {
cfg_options, cfg_options,
env, env,
Default::default(), Default::default(),
Default::default(),
); );
if let Some(root_crate) = root_crate { if let Some(root_crate) = root_crate {
crate_graph crate_graph
@ -199,9 +193,12 @@ impl MockAnalysis {
.unwrap(); .unwrap();
} }
} }
change.add_file(source_root, file_id, path.into(), Arc::new(data.content().to_owned())); let path = VfsPath::new_virtual_path(path.to_string());
file_set.insert(file_id, path);
change.change_file(file_id, Some(Arc::new(data.content().to_owned())));
} }
change.set_crate_graph(crate_graph); change.set_crate_graph(crate_graph);
change.set_roots(vec![SourceRoot::new_local(file_set)]);
host.apply_change(change); host.apply_change(change);
host host
} }

View file

@ -145,7 +145,6 @@ mod tests {
CfgOptions::default(), CfgOptions::default(),
Env::default(), Env::default(),
Default::default(), Default::default(),
Default::default(),
); );
let mut change = AnalysisChange::new(); let mut change = AnalysisChange::new();
change.set_crate_graph(crate_graph); change.set_crate_graph(crate_graph);

View file

@ -1,7 +1,7 @@
//! FIXME: write short doc here //! FIXME: write short doc here
use hir::{Module, ModuleDef, ModuleSource, Semantics}; use hir::{Module, ModuleDef, ModuleSource, Semantics};
use ra_db::{RelativePathBuf, SourceDatabaseExt}; use ra_db::SourceDatabaseExt;
use ra_ide_db::{ use ra_ide_db::{
defs::{classify_name, classify_name_ref, Definition, NameClass, NameRefClass}, defs::{classify_name, classify_name_ref, Definition, NameClass, NameRefClass},
RootDatabase, RootDatabase,
@ -109,9 +109,8 @@ fn rename_mod(
let file_id = src.file_id.original_file(db); let file_id = src.file_id.original_file(db);
match src.value { match src.value {
ModuleSource::SourceFile(..) => { ModuleSource::SourceFile(..) => {
let mod_path: RelativePathBuf = db.file_relative_path(file_id);
// mod is defined in path/to/dir/mod.rs // mod is defined in path/to/dir/mod.rs
let dst = if mod_path.file_stem() == Some("mod") { let dst = if module.is_mod_rs(db) {
format!("../{}/mod.rs", new_name) format!("../{}/mod.rs", new_name)
} else { } else {
format!("{}.rs", new_name) format!("{}.rs", new_name)

View file

@ -41,7 +41,7 @@ pub fn parse_search_replace(
match_finder.add_rule(rule); match_finder.add_rule(rule);
for &root in db.local_roots().iter() { for &root in db.local_roots().iter() {
let sr = db.source_root(root); let sr = db.source_root(root);
for file_id in sr.walk() { for file_id in sr.iter() {
if let Some(edit) = match_finder.edits_for_file(file_id) { if let Some(edit) = match_finder.edits_for_file(file_id) {
edits.push(SourceFileEdit { file_id, edit }); edits.push(SourceFileEdit { file_id, edit });
} }

View file

@ -9,26 +9,22 @@ use ra_db::{
SourceRootId, SourceRootId,
}; };
use ra_prof::{memory_usage, profile, Bytes}; use ra_prof::{memory_usage, profile, Bytes};
use rustc_hash::FxHashMap; use rustc_hash::FxHashSet;
use crate::{symbol_index::SymbolsDatabase, RootDatabase}; use crate::{symbol_index::SymbolsDatabase, RootDatabase};
#[derive(Default)] #[derive(Default)]
pub struct AnalysisChange { pub struct AnalysisChange {
new_roots: Vec<(SourceRootId, bool)>, roots: Option<Vec<SourceRoot>>,
roots_changed: FxHashMap<SourceRootId, RootChange>, files_changed: Vec<(FileId, Option<Arc<String>>)>,
files_changed: Vec<(FileId, Arc<String>)>,
crate_graph: Option<CrateGraph>, crate_graph: Option<CrateGraph>,
} }
impl fmt::Debug for AnalysisChange { impl fmt::Debug for AnalysisChange {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
let mut d = fmt.debug_struct("AnalysisChange"); let mut d = fmt.debug_struct("AnalysisChange");
if !self.new_roots.is_empty() { if let Some(roots) = &self.roots {
d.field("new_roots", &self.new_roots); d.field("roots", roots);
}
if !self.roots_changed.is_empty() {
d.field("roots_changed", &self.roots_changed);
} }
if !self.files_changed.is_empty() { if !self.files_changed.is_empty() {
d.field("files_changed", &self.files_changed.len()); d.field("files_changed", &self.files_changed.len());
@ -45,30 +41,14 @@ impl AnalysisChange {
AnalysisChange::default() AnalysisChange::default()
} }
pub fn add_root(&mut self, root_id: SourceRootId, is_local: bool) { pub fn set_roots(&mut self, roots: Vec<SourceRoot>) {
self.new_roots.push((root_id, is_local)); self.roots = Some(roots);
} }
pub fn add_file( pub fn change_file(&mut self, file_id: FileId, new_text: Option<Arc<String>>) {
&mut self,
root_id: SourceRootId,
file_id: FileId,
path: RelativePathBuf,
text: Arc<String>,
) {
let file = AddFile { file_id, path, text };
self.roots_changed.entry(root_id).or_default().added.push(file);
}
pub fn change_file(&mut self, file_id: FileId, new_text: Arc<String>) {
self.files_changed.push((file_id, new_text)) self.files_changed.push((file_id, new_text))
} }
pub fn remove_file(&mut self, root_id: SourceRootId, file_id: FileId, path: RelativePathBuf) {
let file = RemoveFile { file_id, path };
self.roots_changed.entry(root_id).or_default().removed.push(file);
}
pub fn set_crate_graph(&mut self, graph: CrateGraph) { pub fn set_crate_graph(&mut self, graph: CrateGraph) {
self.crate_graph = Some(graph); self.crate_graph = Some(graph);
} }
@ -114,31 +94,32 @@ impl RootDatabase {
let _p = profile("RootDatabase::apply_change"); let _p = profile("RootDatabase::apply_change");
self.request_cancellation(); self.request_cancellation();
log::info!("apply_change {:?}", change); log::info!("apply_change {:?}", change);
if !change.new_roots.is_empty() { if let Some(roots) = change.roots {
let mut local_roots = Vec::clone(&self.local_roots()); let mut local_roots = FxHashSet::default();
let mut libraries = Vec::clone(&self.library_roots()); let mut library_roots = FxHashSet::default();
for (root_id, is_local) in change.new_roots { for (idx, root) in roots.into_iter().enumerate() {
let root = let root_id = SourceRootId(idx as u32);
if is_local { SourceRoot::new_local() } else { SourceRoot::new_library() };
let durability = durability(&root); let durability = durability(&root);
self.set_source_root_with_durability(root_id, Arc::new(root), durability); if root.is_library {
if is_local { library_roots.insert(root_id);
local_roots.push(root_id);
} else { } else {
libraries.push(root_id) local_roots.insert(root_id);
} }
for file_id in root.iter() {
self.set_file_source_root_with_durability(file_id, root_id, durability);
}
self.set_source_root_with_durability(root_id, Arc::new(root), durability);
} }
self.set_local_roots_with_durability(Arc::new(local_roots), Durability::HIGH); self.set_local_roots_with_durability(Arc::new(local_roots), Durability::HIGH);
self.set_library_roots_with_durability(Arc::new(libraries), Durability::HIGH); self.set_library_roots_with_durability(Arc::new(library_roots), Durability::HIGH);
} }
for (root_id, root_change) in change.roots_changed {
self.apply_root_change(root_id, root_change);
}
for (file_id, text) in change.files_changed { for (file_id, text) in change.files_changed {
let source_root_id = self.file_source_root(file_id); let source_root_id = self.file_source_root(file_id);
let source_root = self.source_root(source_root_id); let source_root = self.source_root(source_root_id);
let durability = durability(&source_root); let durability = durability(&source_root);
// XXX: can't actually remove the file, just reset the text
let text = text.unwrap_or_default();
self.set_file_text_with_durability(file_id, text, durability) self.set_file_text_with_durability(file_id, text, durability)
} }
if let Some(crate_graph) = change.crate_graph { if let Some(crate_graph) = change.crate_graph {
@ -146,26 +127,6 @@ impl RootDatabase {
} }
} }
fn apply_root_change(&mut self, root_id: SourceRootId, root_change: RootChange) {
let mut source_root = SourceRoot::clone(&self.source_root(root_id));
let durability = durability(&source_root);
for add_file in root_change.added {
self.set_file_text_with_durability(add_file.file_id, add_file.text, durability);
self.set_file_relative_path_with_durability(
add_file.file_id,
add_file.path.clone(),
durability,
);
self.set_file_source_root_with_durability(add_file.file_id, root_id, durability);
source_root.insert_file(add_file.path, add_file.file_id);
}
for remove_file in root_change.removed {
self.set_file_text_with_durability(remove_file.file_id, Default::default(), durability);
source_root.remove_file(&remove_file.path);
}
self.set_source_root_with_durability(root_id, Arc::new(source_root), durability);
}
pub fn maybe_collect_garbage(&mut self) { pub fn maybe_collect_garbage(&mut self) {
if cfg!(feature = "wasm") { if cfg!(feature = "wasm") {
return; return;

View file

@ -157,14 +157,14 @@ impl Definition {
if let Some(Visibility::Public) = vis { if let Some(Visibility::Public) = vis {
let source_root_id = db.file_source_root(file_id); let source_root_id = db.file_source_root(file_id);
let source_root = db.source_root(source_root_id); let source_root = db.source_root(source_root_id);
let mut res = source_root.walk().map(|id| (id, None)).collect::<FxHashMap<_, _>>(); let mut res = source_root.iter().map(|id| (id, None)).collect::<FxHashMap<_, _>>();
let krate = module.krate(); let krate = module.krate();
for rev_dep in krate.reverse_dependencies(db) { for rev_dep in krate.reverse_dependencies(db) {
let root_file = rev_dep.root_file(db); let root_file = rev_dep.root_file(db);
let source_root_id = db.file_source_root(root_file); let source_root_id = db.file_source_root(root_file);
let source_root = db.source_root(source_root_id); let source_root = db.source_root(source_root_id);
res.extend(source_root.walk().map(|id| (id, None))); res.extend(source_root.iter().map(|id| (id, None)));
} }
return SearchScope::new(res); return SearchScope::new(res);
} }

View file

@ -42,7 +42,7 @@ use ra_syntax::{
SyntaxNode, SyntaxNodePtr, TextRange, WalkEvent, SyntaxNode, SyntaxNodePtr, TextRange, WalkEvent,
}; };
use rayon::prelude::*; use rayon::prelude::*;
use rustc_hash::FxHashMap; use rustc_hash::{FxHashMap, FxHashSet};
use crate::RootDatabase; use crate::RootDatabase;
@ -93,11 +93,11 @@ pub trait SymbolsDatabase: hir::db::HirDatabase + SourceDatabaseExt + ParallelDa
/// The set of "local" (that is, from the current workspace) roots. /// The set of "local" (that is, from the current workspace) roots.
/// Files in local roots are assumed to change frequently. /// Files in local roots are assumed to change frequently.
#[salsa::input] #[salsa::input]
fn local_roots(&self) -> Arc<Vec<SourceRootId>>; fn local_roots(&self) -> Arc<FxHashSet<SourceRootId>>;
/// The set of roots for crates.io libraries. /// The set of roots for crates.io libraries.
/// Files in libraries are assumed to never change. /// Files in libraries are assumed to never change.
#[salsa::input] #[salsa::input]
fn library_roots(&self) -> Arc<Vec<SourceRootId>>; fn library_roots(&self) -> Arc<FxHashSet<SourceRootId>>;
} }
fn library_symbols( fn library_symbols(
@ -111,7 +111,7 @@ fn library_symbols(
.map(|&root_id| { .map(|&root_id| {
let root = db.source_root(root_id); let root = db.source_root(root_id);
let files = root let files = root
.walk() .iter()
.map(|it| (it, SourceDatabaseExt::file_text(db, it))) .map(|it| (it, SourceDatabaseExt::file_text(db, it)))
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let symbol_index = SymbolIndex::for_files( let symbol_index = SymbolIndex::for_files(
@ -175,7 +175,7 @@ pub fn world_symbols(db: &RootDatabase, query: Query) -> Vec<FileSymbol> {
let mut files = Vec::new(); let mut files = Vec::new();
for &root in db.local_roots().iter() { for &root in db.local_roots().iter() {
let sr = db.source_root(root); let sr = db.source_root(root);
files.extend(sr.walk()) files.extend(sr.iter())
} }
let snap = Snap(db.snapshot()); let snap = Snap(db.snapshot());

View file

@ -13,7 +13,7 @@ use std::{
use anyhow::{bail, Context, Result}; use anyhow::{bail, Context, Result};
use ra_cfg::CfgOptions; use ra_cfg::CfgOptions;
use ra_db::{CrateGraph, CrateName, Edition, Env, ExternSource, ExternSourceId, FileId}; use ra_db::{CrateGraph, CrateName, Edition, Env, FileId};
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::{FxHashMap, FxHashSet};
use serde_json::from_reader; use serde_json::from_reader;
@ -246,7 +246,6 @@ impl ProjectWorkspace {
pub fn to_crate_graph( pub fn to_crate_graph(
&self, &self,
target: Option<&str>, target: Option<&str>,
extern_source_roots: &FxHashMap<PathBuf, ExternSourceId>,
proc_macro_client: &ProcMacroClient, proc_macro_client: &ProcMacroClient,
load: &mut dyn FnMut(&Path) -> Option<FileId>, load: &mut dyn FnMut(&Path) -> Option<FileId>,
) -> CrateGraph { ) -> CrateGraph {
@ -280,15 +279,11 @@ impl ProjectWorkspace {
}; };
let mut env = Env::default(); let mut env = Env::default();
let mut extern_source = ExternSource::default();
if let Some(out_dir) = &krate.out_dir { if let Some(out_dir) = &krate.out_dir {
// NOTE: cargo and rustc seem to hide non-UTF-8 strings from env! and option_env!() // NOTE: cargo and rustc seem to hide non-UTF-8 strings from env! and option_env!()
if let Some(out_dir) = out_dir.to_str().map(|s| s.to_owned()) { if let Some(out_dir) = out_dir.to_str().map(|s| s.to_owned()) {
env.set("OUT_DIR", out_dir); env.set("OUT_DIR", out_dir);
} }
if let Some(&extern_source_id) = extern_source_roots.get(out_dir) {
extern_source.set_extern_path(&out_dir, extern_source_id);
}
} }
let proc_macro = krate let proc_macro = krate
.proc_macro_dylib_path .proc_macro_dylib_path
@ -304,7 +299,6 @@ impl ProjectWorkspace {
None, None,
cfg_options, cfg_options,
env, env,
extern_source,
proc_macro.unwrap_or_default(), proc_macro.unwrap_or_default(),
), ),
)) ))
@ -341,7 +335,6 @@ impl ProjectWorkspace {
let file_id = load(&sysroot[krate].root)?; let file_id = load(&sysroot[krate].root)?;
let env = Env::default(); let env = Env::default();
let extern_source = ExternSource::default();
let proc_macro = vec![]; let proc_macro = vec![];
let crate_name = CrateName::new(&sysroot[krate].name) let crate_name = CrateName::new(&sysroot[krate].name)
.expect("Sysroot crate names should not contain dashes"); .expect("Sysroot crate names should not contain dashes");
@ -352,7 +345,6 @@ impl ProjectWorkspace {
Some(crate_name), Some(crate_name),
cfg_options.clone(), cfg_options.clone(),
env, env,
extern_source,
proc_macro, proc_macro,
); );
Some((krate, crate_id)) Some((krate, crate_id))
@ -409,15 +401,11 @@ impl ProjectWorkspace {
opts opts
}; };
let mut env = Env::default(); let mut env = Env::default();
let mut extern_source = ExternSource::default();
if let Some(out_dir) = &cargo[pkg].out_dir { if let Some(out_dir) = &cargo[pkg].out_dir {
// NOTE: cargo and rustc seem to hide non-UTF-8 strings from env! and option_env!() // NOTE: cargo and rustc seem to hide non-UTF-8 strings from env! and option_env!()
if let Some(out_dir) = out_dir.to_str().map(|s| s.to_owned()) { if let Some(out_dir) = out_dir.to_str().map(|s| s.to_owned()) {
env.set("OUT_DIR", out_dir); env.set("OUT_DIR", out_dir);
} }
if let Some(&extern_source_id) = extern_source_roots.get(out_dir) {
extern_source.set_extern_path(&out_dir, extern_source_id);
}
} }
let proc_macro = cargo[pkg] let proc_macro = cargo[pkg]
.proc_macro_dylib_path .proc_macro_dylib_path
@ -431,7 +419,6 @@ impl ProjectWorkspace {
Some(CrateName::normalize_dashes(&cargo[pkg].name)), Some(CrateName::normalize_dashes(&cargo[pkg].name)),
cfg_options, cfg_options,
env, env,
extern_source,
proc_macro.clone(), proc_macro.clone(),
); );
if cargo[tgt].kind == TargetKind::Lib { if cargo[tgt].kind == TargetKind::Lib {

View file

@ -38,7 +38,8 @@ ra_prof = { path = "../ra_prof" }
ra_project_model = { path = "../ra_project_model" } ra_project_model = { path = "../ra_project_model" }
ra_syntax = { path = "../ra_syntax" } ra_syntax = { path = "../ra_syntax" }
ra_text_edit = { path = "../ra_text_edit" } ra_text_edit = { path = "../ra_text_edit" }
ra_vfs = "0.6.0" vfs = { path = "../vfs" }
vfs-notify = { path = "../vfs-notify" }
ra_cfg = { path = "../ra_cfg"} ra_cfg = { path = "../ra_cfg"}
# This should only be used in CLI # This should only be used in CLI

View file

@ -1,5 +1,7 @@
//! See `CargoTargetSpec` //! See `CargoTargetSpec`
use std::path::PathBuf;
use ra_cfg::CfgExpr; use ra_cfg::CfgExpr;
use ra_ide::{FileId, RunnableKind, TestId}; use ra_ide::{FileId, RunnableKind, TestId};
use ra_project_model::{self, TargetKind}; use ra_project_model::{self, TargetKind};
@ -12,6 +14,7 @@ use crate::{global_state::GlobalStateSnapshot, Result};
/// build/test/run the target. /// build/test/run the target.
#[derive(Clone)] #[derive(Clone)]
pub(crate) struct CargoTargetSpec { pub(crate) struct CargoTargetSpec {
pub(crate) workspace_root: PathBuf,
pub(crate) package: String, pub(crate) package: String,
pub(crate) target: String, pub(crate) target: String,
pub(crate) target_kind: TargetKind, pub(crate) target_kind: TargetKind,
@ -101,6 +104,7 @@ impl CargoTargetSpec {
None => return Ok(None), None => return Ok(None),
}; };
let res = CargoTargetSpec { let res = CargoTargetSpec {
workspace_root: cargo_ws.workspace_root().to_path_buf(),
package: cargo_ws.package_flag(&cargo_ws[cargo_ws[target].package]), package: cargo_ws.package_flag(&cargo_ws[cargo_ws[target].package]),
target: cargo_ws[target].name.clone(), target: cargo_ws[target].name.clone(),
target_kind: cargo_ws[target].kind, target_kind: cargo_ws[target].kind,

View file

@ -1,6 +1,7 @@
//! Benchmark operations like highlighting or goto definition. //! Benchmark operations like highlighting or goto definition.
use std::{ use std::{
convert::TryFrom,
path::{Path, PathBuf}, path::{Path, PathBuf},
str::FromStr, str::FromStr,
sync::Arc, sync::Arc,
@ -10,7 +11,7 @@ use std::{
use anyhow::{format_err, Result}; use anyhow::{format_err, Result};
use ra_db::{ use ra_db::{
salsa::{Database, Durability}, salsa::{Database, Durability},
FileId, SourceDatabaseExt, AbsPathBuf, FileId,
}; };
use ra_ide::{Analysis, AnalysisChange, AnalysisHost, CompletionConfig, FilePosition, LineCol}; use ra_ide::{Analysis, AnalysisChange, AnalysisHost, CompletionConfig, FilePosition, LineCol};
@ -53,8 +54,7 @@ pub fn analysis_bench(
let start = Instant::now(); let start = Instant::now();
eprint!("loading: "); eprint!("loading: ");
let (mut host, roots) = load_cargo(path, load_output_dirs, with_proc_macro)?; let (mut host, vfs) = load_cargo(path, load_output_dirs, with_proc_macro)?;
let db = host.raw_database();
eprintln!("{:?}\n", start.elapsed()); eprintln!("{:?}\n", start.elapsed());
let file_id = { let file_id = {
@ -62,22 +62,9 @@ pub fn analysis_bench(
BenchWhat::Highlight { path } => path, BenchWhat::Highlight { path } => path,
BenchWhat::Complete(pos) | BenchWhat::GotoDef(pos) => &pos.path, BenchWhat::Complete(pos) | BenchWhat::GotoDef(pos) => &pos.path,
}; };
let path = std::env::current_dir()?.join(path).canonicalize()?; let path = AbsPathBuf::try_from(path.clone()).unwrap();
roots let path = path.into();
.iter() vfs.file_id(&path).ok_or_else(|| format_err!("Can't find {}", path))?
.find_map(|(source_root_id, project_root)| {
if project_root.is_member() {
for file_id in db.source_root(*source_root_id).walk() {
let rel_path = db.file_relative_path(file_id);
let abs_path = rel_path.to_path(project_root.path());
if abs_path == path {
return Some(file_id);
}
}
}
None
})
.ok_or_else(|| format_err!("Can't find {}", path.display()))?
}; };
match &what { match &what {
@ -149,7 +136,7 @@ fn do_work<F: Fn(&Analysis) -> T, T>(host: &mut AnalysisHost, file_id: FileId, w
let mut text = host.analysis().file_text(file_id).unwrap().to_string(); let mut text = host.analysis().file_text(file_id).unwrap().to_string();
text.push_str("\n/* Hello world */\n"); text.push_str("\n/* Hello world */\n");
let mut change = AnalysisChange::new(); let mut change = AnalysisChange::new();
change.change_file(file_id, Arc::new(text)); change.change_file(file_id, Some(Arc::new(text)));
host.apply_change(change); host.apply_change(change);
} }
work(&host.analysis()); work(&host.analysis());

View file

@ -28,26 +28,14 @@ pub fn analysis_stats(
with_proc_macro: bool, with_proc_macro: bool,
) -> Result<()> { ) -> Result<()> {
let db_load_time = Instant::now(); let db_load_time = Instant::now();
let (mut host, roots) = load_cargo(path, load_output_dirs, with_proc_macro)?; let (mut host, vfs) = load_cargo(path, load_output_dirs, with_proc_macro)?;
let db = host.raw_database(); let db = host.raw_database();
println!("Database loaded, {} roots, {:?}", roots.len(), db_load_time.elapsed()); println!("Database loaded {:?}", db_load_time.elapsed());
let analysis_time = Instant::now(); let analysis_time = Instant::now();
let mut num_crates = 0; let mut num_crates = 0;
let mut visited_modules = HashSet::new(); let mut visited_modules = HashSet::new();
let mut visit_queue = Vec::new(); let mut visit_queue = Vec::new();
let members =
roots
.into_iter()
.filter_map(|(source_root_id, project_root)| {
if with_deps || project_root.is_member() {
Some(source_root_id)
} else {
None
}
})
.collect::<HashSet<_>>();
let mut krates = Crate::all(db); let mut krates = Crate::all(db);
if randomize { if randomize {
krates.shuffle(&mut thread_rng()); krates.shuffle(&mut thread_rng());
@ -55,7 +43,10 @@ pub fn analysis_stats(
for krate in krates { for krate in krates {
let module = krate.root_module(db).expect("crate without root module"); let module = krate.root_module(db).expect("crate without root module");
let file_id = module.definition_source(db).file_id; let file_id = module.definition_source(db).file_id;
if members.contains(&db.file_source_root(file_id.original_file(db))) { let file_id = file_id.original_file(db);
let source_root = db.file_source_root(file_id);
let source_root = db.source_root(source_root);
if !source_root.is_library || with_deps {
num_crates += 1; num_crates += 1;
visit_queue.push(module); visit_queue.push(module);
} }
@ -128,7 +119,7 @@ pub fn analysis_stats(
if verbosity.is_verbose() { if verbosity.is_verbose() {
let src = f.source(db); let src = f.source(db);
let original_file = src.file_id.original_file(db); let original_file = src.file_id.original_file(db);
let path = db.file_relative_path(original_file); let path = vfs.file_path(original_file);
let syntax_range = src.value.syntax().text_range(); let syntax_range = src.value.syntax().text_range();
format_to!(msg, " ({:?} {:?})", path, syntax_range); format_to!(msg, " ({:?} {:?})", path, syntax_range);
} }
@ -196,7 +187,7 @@ pub fn analysis_stats(
let root = db.parse_or_expand(src.file_id).unwrap(); let root = db.parse_or_expand(src.file_id).unwrap();
let node = src.map(|e| e.to_node(&root).syntax().clone()); let node = src.map(|e| e.to_node(&root).syntax().clone());
let original_range = original_range(db, node.as_ref()); let original_range = original_range(db, node.as_ref());
let path = db.file_relative_path(original_range.file_id); let path = vfs.file_path(original_range.file_id);
let line_index = let line_index =
host.analysis().file_line_index(original_range.file_id).unwrap(); host.analysis().file_line_index(original_range.file_id).unwrap();
let text_range = original_range.range; let text_range = original_range.range;

View file

@ -2,68 +2,57 @@
//! code if any errors are found. //! code if any errors are found.
use anyhow::anyhow; use anyhow::anyhow;
use hir::Crate;
use ra_db::SourceDatabaseExt; use ra_db::SourceDatabaseExt;
use ra_ide::Severity; use ra_ide::Severity;
use std::{collections::HashSet, path::Path}; use std::{collections::HashSet, path::Path};
use crate::cli::{load_cargo::load_cargo, Result}; use crate::cli::{load_cargo::load_cargo, Result};
use hir::Semantics;
pub fn diagnostics( pub fn diagnostics(
path: &Path, path: &Path,
load_output_dirs: bool, load_output_dirs: bool,
with_proc_macro: bool, with_proc_macro: bool,
all: bool, _all: bool,
) -> Result<()> { ) -> Result<()> {
let (host, roots) = load_cargo(path, load_output_dirs, with_proc_macro)?; let (host, _vfs) = load_cargo(path, load_output_dirs, with_proc_macro)?;
let db = host.raw_database(); let db = host.raw_database();
let analysis = host.analysis(); let analysis = host.analysis();
let semantics = Semantics::new(db);
let members = roots
.into_iter()
.filter_map(|(source_root_id, project_root)| {
// filter out dependencies
if project_root.is_member() {
Some(source_root_id)
} else {
None
}
})
.collect::<HashSet<_>>();
let mut found_error = false; let mut found_error = false;
let mut visited_files = HashSet::new(); let mut visited_files = HashSet::new();
for source_root_id in members {
for file_id in db.source_root(source_root_id).walk() {
// Filter out files which are not actually modules (unless `--all` flag is
// passed). In the rust-analyzer repository this filters out the parser test files.
if semantics.to_module_def(file_id).is_some() || all {
if !visited_files.contains(&file_id) {
let crate_name = if let Some(module) = semantics.to_module_def(file_id) {
if let Some(name) = module.krate().display_name(db) {
format!("{}", name)
} else {
String::from("unknown")
}
} else {
String::from("unknown")
};
println!(
"processing crate: {}, module: {}",
crate_name,
db.file_relative_path(file_id)
);
for diagnostic in analysis.diagnostics(file_id).unwrap() {
if matches!(diagnostic.severity, Severity::Error) {
found_error = true;
}
println!("{:?}", diagnostic); let mut work = Vec::new();
} let krates = Crate::all(db);
for krate in krates {
let module = krate.root_module(db).expect("crate without root module");
let file_id = module.definition_source(db).file_id;
let file_id = file_id.original_file(db);
let source_root = db.file_source_root(file_id);
let source_root = db.source_root(source_root);
if !source_root.is_library {
work.push(module);
}
}
visited_files.insert(file_id); for module in work {
let file_id = module.definition_source(db).file_id.original_file(db);
if !visited_files.contains(&file_id) {
let crate_name = if let Some(name) = module.krate().display_name(db) {
format!("{}", name)
} else {
String::from("unknown")
};
println!("processing crate: {}, module: {}", crate_name, _vfs.file_path(file_id));
for diagnostic in analysis.diagnostics(file_id).unwrap() {
if matches!(diagnostic.severity, Severity::Error) {
found_error = true;
} }
println!("{:?}", diagnostic);
} }
visited_files.insert(file_id);
} }
} }

View file

@ -1,32 +1,21 @@
//! Loads a Cargo project into a static instance of analysis, without support //! Loads a Cargo project into a static instance of analysis, without support
//! for incorporating changes. //! for incorporating changes.
use std::{convert::TryFrom, path::Path, sync::Arc};
use std::path::{Path, PathBuf};
use anyhow::Result; use anyhow::Result;
use crossbeam_channel::{unbounded, Receiver}; use crossbeam_channel::{unbounded, Receiver};
use ra_db::{ExternSourceId, FileId, SourceRootId}; use ra_db::{AbsPathBuf, CrateGraph};
use ra_ide::{AnalysisChange, AnalysisHost}; use ra_ide::{AnalysisChange, AnalysisHost};
use ra_project_model::{ use ra_project_model::{CargoConfig, ProcMacroClient, ProjectManifest, ProjectWorkspace};
CargoConfig, PackageRoot, ProcMacroClient, ProjectManifest, ProjectWorkspace, use vfs::loader::Handle;
};
use ra_vfs::{RootEntry, Vfs, VfsChange, VfsTask, Watch};
use rustc_hash::{FxHashMap, FxHashSet};
use crate::vfs_glob::RustPackageFilterBuilder; use crate::global_state::{ProjectFolders, SourceRootConfig};
fn vfs_file_to_id(f: ra_vfs::VfsFile) -> FileId {
FileId(f.0)
}
fn vfs_root_to_id(r: ra_vfs::VfsRoot) -> SourceRootId {
SourceRootId(r.0)
}
pub fn load_cargo( pub fn load_cargo(
root: &Path, root: &Path,
load_out_dirs_from_check: bool, load_out_dirs_from_check: bool,
with_proc_macro: bool, with_proc_macro: bool,
) -> Result<(AnalysisHost, FxHashMap<SourceRootId, PackageRoot>)> { ) -> Result<(AnalysisHost, vfs::Vfs)> {
let root = std::env::current_dir()?.join(root); let root = std::env::current_dir()?.join(root);
let root = ProjectManifest::discover_single(&root)?; let root = ProjectManifest::discover_single(&root)?;
let ws = ProjectWorkspace::load( let ws = ProjectWorkspace::load(
@ -35,123 +24,74 @@ pub fn load_cargo(
true, true,
)?; )?;
let mut extern_dirs = FxHashSet::default();
let (sender, receiver) = unbounded(); let (sender, receiver) = unbounded();
let sender = Box::new(move |t| sender.send(t).unwrap()); let mut vfs = vfs::Vfs::default();
let mut loader = {
let loader =
vfs_notify::LoaderHandle::spawn(Box::new(move |msg| sender.send(msg).unwrap()));
Box::new(loader)
};
let mut roots = Vec::new(); let proc_macro_client = if with_proc_macro {
let project_roots = ws.to_roots();
for root in &project_roots {
roots.push(RootEntry::new(
root.path().to_owned(),
RustPackageFilterBuilder::default().set_member(root.is_member()).into_vfs_filter(),
));
if let Some(out_dir) = root.out_dir() {
extern_dirs.insert(out_dir.to_path_buf());
roots.push(RootEntry::new(
out_dir.to_owned(),
RustPackageFilterBuilder::default().set_member(root.is_member()).into_vfs_filter(),
))
}
}
let (mut vfs, roots) = Vfs::new(roots, sender, Watch(false));
let source_roots = roots
.into_iter()
.map(|vfs_root| {
let source_root_id = vfs_root_to_id(vfs_root);
let project_root = project_roots
.iter()
.find(|it| it.path() == vfs.root2path(vfs_root))
.unwrap()
.clone();
(source_root_id, project_root)
})
.collect::<FxHashMap<_, _>>();
let proc_macro_client = if !with_proc_macro {
ProcMacroClient::dummy()
} else {
let path = std::env::current_exe()?; let path = std::env::current_exe()?;
ProcMacroClient::extern_process(path, &["proc-macro"]).unwrap() ProcMacroClient::extern_process(path, &["proc-macro"]).unwrap()
} else {
ProcMacroClient::dummy()
}; };
let host = load(&source_roots, ws, &mut vfs, receiver, extern_dirs, &proc_macro_client);
Ok((host, source_roots)) let crate_graph = ws.to_crate_graph(None, &proc_macro_client, &mut |path: &Path| {
let path = AbsPathBuf::try_from(path.to_path_buf()).unwrap();
let contents = loader.load_sync(&path);
let path = vfs::VfsPath::from(path);
vfs.set_file_contents(path.clone(), contents);
vfs.file_id(&path)
});
let project_folders = ProjectFolders::new(&[ws]);
loader.set_config(vfs::loader::Config { load: project_folders.load, watch: vec![] });
log::debug!("crate graph: {:?}", crate_graph);
let host = load(crate_graph, project_folders.source_root_config, &mut vfs, &receiver);
Ok((host, vfs))
} }
pub(crate) fn load( pub(crate) fn load(
source_roots: &FxHashMap<SourceRootId, PackageRoot>, crate_graph: CrateGraph,
ws: ProjectWorkspace, source_root_config: SourceRootConfig,
vfs: &mut Vfs, vfs: &mut vfs::Vfs,
receiver: Receiver<VfsTask>, receiver: &Receiver<vfs::loader::Message>,
extern_dirs: FxHashSet<PathBuf>,
proc_macro_client: &ProcMacroClient,
) -> AnalysisHost { ) -> AnalysisHost {
let lru_cap = std::env::var("RA_LRU_CAP").ok().and_then(|it| it.parse::<usize>().ok()); let lru_cap = std::env::var("RA_LRU_CAP").ok().and_then(|it| it.parse::<usize>().ok());
let mut host = AnalysisHost::new(lru_cap); let mut host = AnalysisHost::new(lru_cap);
let mut analysis_change = AnalysisChange::new(); let mut analysis_change = AnalysisChange::new();
// wait until Vfs has loaded all roots // wait until Vfs has loaded all roots
let mut roots_loaded = FxHashSet::default();
let mut extern_source_roots = FxHashMap::default();
for task in receiver { for task in receiver {
vfs.handle_task(task); match task {
let mut done = false; vfs::loader::Message::Progress { n_entries_done, n_entries_total } => {
for change in vfs.commit_changes() { if n_entries_done == n_entries_total {
match change { break;
VfsChange::AddRoot { root, files } => {
let source_root_id = vfs_root_to_id(root);
let is_local = source_roots[&source_root_id].is_member();
log::debug!(
"loaded source root {:?} with path {:?}",
source_root_id,
vfs.root2path(root)
);
analysis_change.add_root(source_root_id, is_local);
let vfs_root_path = vfs.root2path(root);
if extern_dirs.contains(&vfs_root_path) {
extern_source_roots.insert(vfs_root_path, ExternSourceId(root.0));
}
let mut file_map = FxHashMap::default();
for (vfs_file, path, text) in files {
let file_id = vfs_file_to_id(vfs_file);
analysis_change.add_file(source_root_id, file_id, path.clone(), text);
file_map.insert(path, file_id);
}
roots_loaded.insert(source_root_id);
if roots_loaded.len() == vfs.n_roots() {
done = true;
}
} }
VfsChange::AddFile { root, file, path, text } => { }
let source_root_id = vfs_root_to_id(root); vfs::loader::Message::Loaded { files } => {
let file_id = vfs_file_to_id(file); for (path, contents) in files {
analysis_change.add_file(source_root_id, file_id, path, text); vfs.set_file_contents(path.into(), contents)
}
VfsChange::RemoveFile { .. } | VfsChange::ChangeFile { .. } => {
// We just need the first scan, so just ignore these
} }
} }
} }
if done { }
break; let changes = vfs.take_changes();
for file in changes {
if file.exists() {
let contents = vfs.file_contents(file.file_id).to_vec();
if let Ok(text) = String::from_utf8(contents) {
analysis_change.change_file(file.file_id, Some(Arc::new(text)))
}
} }
} }
let source_roots = source_root_config.partition(&vfs);
analysis_change.set_roots(source_roots);
let crate_graph =
ws.to_crate_graph(None, &extern_source_roots, proc_macro_client, &mut |path: &Path| {
// Some path from metadata will be non canonicalized, e.g. /foo/../bar/lib.rs
let path = path.canonicalize().ok()?;
let vfs_file = vfs.load(&path);
log::debug!("vfs file {:?} -> {:?}", path, vfs_file);
vfs_file.map(vfs_file_to_id)
});
log::debug!("crate graph: {:?}", crate_graph);
analysis_change.set_crate_graph(crate_graph); analysis_change.set_crate_graph(crate_graph);
host.apply_change(analysis_change); host.apply_change(analysis_change);
@ -167,7 +107,7 @@ mod tests {
#[test] #[test]
fn test_loading_rust_analyzer() { fn test_loading_rust_analyzer() {
let path = Path::new(env!("CARGO_MANIFEST_DIR")).parent().unwrap().parent().unwrap(); let path = Path::new(env!("CARGO_MANIFEST_DIR")).parent().unwrap().parent().unwrap();
let (host, _roots) = load_cargo(path, false, false).unwrap(); let (host, _vfs) = load_cargo(path, false, false).unwrap();
let n_crates = Crate::all(host.raw_database()).len(); let n_crates = Crate::all(host.raw_database()).len();
// RA has quite a few crates, but the exact count doesn't matter // RA has quite a few crates, but the exact count doesn't matter
assert!(n_crates > 20); assert!(n_crates > 20);

View file

@ -1,10 +1,22 @@
//! Conversion lsp_types types to rust-analyzer specific ones. //! Conversion lsp_types types to rust-analyzer specific ones.
use std::convert::TryFrom;
use ra_db::{FileId, FilePosition, FileRange}; use ra_db::{FileId, FilePosition, FileRange};
use ra_ide::{LineCol, LineIndex}; use ra_ide::{LineCol, LineIndex};
use ra_syntax::{TextRange, TextSize}; use ra_syntax::{TextRange, TextSize};
use vfs::AbsPathBuf;
use crate::{global_state::GlobalStateSnapshot, Result}; use crate::{global_state::GlobalStateSnapshot, Result};
pub(crate) fn abs_path(url: &lsp_types::Url) -> Result<AbsPathBuf> {
let path = url.to_file_path().map_err(|()| "url is not a file")?;
Ok(AbsPathBuf::try_from(path).unwrap())
}
pub(crate) fn vfs_path(url: &lsp_types::Url) -> Result<vfs::VfsPath> {
abs_path(url).map(vfs::VfsPath::from)
}
pub(crate) fn offset(line_index: &LineIndex, position: lsp_types::Position) -> TextSize { pub(crate) fn offset(line_index: &LineIndex, position: lsp_types::Position) -> TextSize {
let line_col = LineCol { line: position.line as u32, col_utf16: position.character as u32 }; let line_col = LineCol { line: position.line as u32, col_utf16: position.character as u32 };
line_index.offset(line_col) line_index.offset(line_col)

View file

@ -3,30 +3,28 @@
//! //!
//! Each tick provides an immutable snapshot of the state as `WorldSnapshot`. //! Each tick provides an immutable snapshot of the state as `WorldSnapshot`.
use std::{ use std::{convert::TryFrom, path::Path, sync::Arc};
path::{Path, PathBuf},
sync::Arc,
};
use crossbeam_channel::{unbounded, Receiver}; use crossbeam_channel::{unbounded, Receiver};
use lsp_types::Url; use lsp_types::Url;
use parking_lot::RwLock; use parking_lot::RwLock;
use ra_db::{CrateId, SourceRoot, VfsPath};
use ra_flycheck::{Flycheck, FlycheckConfig}; use ra_flycheck::{Flycheck, FlycheckConfig};
use ra_ide::{Analysis, AnalysisChange, AnalysisHost, CrateGraph, FileId, SourceRootId}; use ra_ide::{Analysis, AnalysisChange, AnalysisHost, CrateGraph, FileId};
use ra_project_model::{CargoWorkspace, ProcMacroClient, ProjectWorkspace, Target}; use ra_project_model::{CargoWorkspace, ProcMacroClient, ProjectWorkspace, Target};
use ra_vfs::{LineEndings, RootEntry, Vfs, VfsChange, VfsFile, VfsTask, Watch};
use stdx::format_to; use stdx::format_to;
use vfs::{file_set::FileSetConfig, loader::Handle, AbsPathBuf};
use crate::{ use crate::{
config::{Config, FilesWatcher}, config::{Config, FilesWatcher},
diagnostics::{CheckFixes, DiagnosticCollection}, diagnostics::{CheckFixes, DiagnosticCollection},
from_proto,
line_endings::LineEndings,
main_loop::request_metrics::{LatestRequests, RequestMetrics}, main_loop::request_metrics::{LatestRequests, RequestMetrics},
to_proto::url_from_abs_path, to_proto::url_from_abs_path,
vfs_glob::{Glob, RustPackageFilterBuilder}, Result,
LspError, Result,
}; };
use ra_db::{CrateId, ExternSourceId}; use rustc_hash::FxHashMap;
use rustc_hash::{FxHashMap, FxHashSet};
fn create_flycheck(workspaces: &[ProjectWorkspace], config: &FlycheckConfig) -> Option<Flycheck> { fn create_flycheck(workspaces: &[ProjectWorkspace], config: &FlycheckConfig) -> Option<Flycheck> {
// FIXME: Figure out the multi-workspace situation // FIXME: Figure out the multi-workspace situation
@ -50,15 +48,16 @@ fn create_flycheck(workspaces: &[ProjectWorkspace], config: &FlycheckConfig) ->
#[derive(Debug)] #[derive(Debug)]
pub struct GlobalState { pub struct GlobalState {
pub config: Config, pub config: Config,
pub local_roots: Vec<PathBuf>,
pub workspaces: Arc<Vec<ProjectWorkspace>>, pub workspaces: Arc<Vec<ProjectWorkspace>>,
pub analysis_host: AnalysisHost, pub analysis_host: AnalysisHost,
pub vfs: Arc<RwLock<Vfs>>, pub loader: Box<dyn vfs::loader::Handle>,
pub task_receiver: Receiver<VfsTask>, pub task_receiver: Receiver<vfs::loader::Message>,
pub flycheck: Option<Flycheck>, pub flycheck: Option<Flycheck>,
pub diagnostics: DiagnosticCollection, pub diagnostics: DiagnosticCollection,
pub proc_macro_client: ProcMacroClient, pub proc_macro_client: ProcMacroClient,
pub(crate) vfs: Arc<RwLock<(vfs::Vfs, FxHashMap<FileId, LineEndings>)>>,
pub(crate) latest_requests: Arc<RwLock<LatestRequests>>, pub(crate) latest_requests: Arc<RwLock<LatestRequests>>,
source_root_config: SourceRootConfig,
} }
/// An immutable snapshot of the world's state at a point in time. /// An immutable snapshot of the world's state at a point in time.
@ -68,62 +67,21 @@ pub struct GlobalStateSnapshot {
pub analysis: Analysis, pub analysis: Analysis,
pub check_fixes: CheckFixes, pub check_fixes: CheckFixes,
pub(crate) latest_requests: Arc<RwLock<LatestRequests>>, pub(crate) latest_requests: Arc<RwLock<LatestRequests>>,
vfs: Arc<RwLock<Vfs>>, vfs: Arc<RwLock<(vfs::Vfs, FxHashMap<FileId, LineEndings>)>>,
} }
impl GlobalState { impl GlobalState {
pub fn new( pub fn new(
workspaces: Vec<ProjectWorkspace>, workspaces: Vec<ProjectWorkspace>,
lru_capacity: Option<usize>, lru_capacity: Option<usize>,
exclude_globs: &[Glob],
config: Config, config: Config,
) -> GlobalState { ) -> GlobalState {
let mut change = AnalysisChange::new(); let mut change = AnalysisChange::new();
let mut extern_dirs: FxHashSet<PathBuf> = FxHashSet::default(); let project_folders = ProjectFolders::new(&workspaces);
let mut local_roots = Vec::new(); let (task_sender, task_receiver) = unbounded::<vfs::loader::Message>();
let roots: Vec<_> = { let mut vfs = vfs::Vfs::default();
let create_filter = |is_member| {
RustPackageFilterBuilder::default()
.set_member(is_member)
.exclude(exclude_globs.iter().cloned())
.into_vfs_filter()
};
let mut roots = Vec::new();
for root in workspaces.iter().flat_map(ProjectWorkspace::to_roots) {
let path = root.path().to_owned();
if root.is_member() {
local_roots.push(path.clone());
}
roots.push(RootEntry::new(path, create_filter(root.is_member())));
if let Some(out_dir) = root.out_dir() {
extern_dirs.insert(out_dir.to_path_buf());
roots.push(RootEntry::new(
out_dir.to_path_buf(),
create_filter(root.is_member()),
))
}
}
roots
};
let (task_sender, task_receiver) = unbounded();
let task_sender = Box::new(move |t| task_sender.send(t).unwrap());
let watch = Watch(matches!(config.files.watcher, FilesWatcher::Notify));
let (mut vfs, vfs_roots) = Vfs::new(roots, task_sender, watch);
let mut extern_source_roots = FxHashMap::default();
for r in vfs_roots {
let vfs_root_path = vfs.root2path(r);
let is_local = local_roots.iter().any(|it| vfs_root_path.starts_with(it));
change.add_root(SourceRootId(r.0), is_local);
// FIXME: add path2root in vfs to simpily this logic
if extern_dirs.contains(&vfs_root_path) {
extern_source_roots.insert(vfs_root_path, ExternSourceId(r.0));
}
}
let proc_macro_client = match &config.proc_macro_srv { let proc_macro_client = match &config.proc_macro_srv {
None => ProcMacroClient::dummy(), None => ProcMacroClient::dummy(),
@ -140,18 +98,30 @@ impl GlobalState {
}, },
}; };
let mut loader = {
let loader = vfs_notify::LoaderHandle::spawn(Box::new(move |msg| {
task_sender.send(msg).unwrap()
}));
Box::new(loader)
};
let watch = match config.files.watcher {
FilesWatcher::Client => vec![],
FilesWatcher::Notify => project_folders.watch,
};
loader.set_config(vfs::loader::Config { load: project_folders.load, watch });
// Create crate graph from all the workspaces // Create crate graph from all the workspaces
let mut crate_graph = CrateGraph::default(); let mut crate_graph = CrateGraph::default();
let mut load = |path: &Path| { let mut load = |path: &Path| {
// Some path from metadata will be non canonicalized, e.g. /foo/../bar/lib.rs let path = AbsPathBuf::try_from(path.to_path_buf()).ok()?;
let path = path.canonicalize().ok()?; let contents = loader.load_sync(&path);
let vfs_file = vfs.load(&path); let path = vfs::VfsPath::from(path);
vfs_file.map(|f| FileId(f.0)) vfs.set_file_contents(path.clone(), contents);
vfs.file_id(&path)
}; };
for ws in workspaces.iter() { for ws in workspaces.iter() {
crate_graph.extend(ws.to_crate_graph( crate_graph.extend(ws.to_crate_graph(
config.cargo.target.as_deref(), config.cargo.target.as_deref(),
&extern_source_roots,
&proc_macro_client, &proc_macro_client,
&mut load, &mut load,
)); ));
@ -162,18 +132,21 @@ impl GlobalState {
let mut analysis_host = AnalysisHost::new(lru_capacity); let mut analysis_host = AnalysisHost::new(lru_capacity);
analysis_host.apply_change(change); analysis_host.apply_change(change);
GlobalState { let mut res = GlobalState {
config, config,
local_roots,
workspaces: Arc::new(workspaces), workspaces: Arc::new(workspaces),
analysis_host, analysis_host,
vfs: Arc::new(RwLock::new(vfs)), loader,
vfs: Arc::new(RwLock::new((vfs, FxHashMap::default()))),
task_receiver, task_receiver,
latest_requests: Default::default(), latest_requests: Default::default(),
flycheck, flycheck,
diagnostics: Default::default(), diagnostics: Default::default(),
proc_macro_client, proc_macro_client,
} source_root_config: project_folders.source_root_config,
};
res.process_changes();
res
} }
pub fn update_configuration(&mut self, config: Config) { pub fn update_configuration(&mut self, config: Config) {
@ -186,33 +159,40 @@ impl GlobalState {
self.config = config; self.config = config;
} }
/// Returns a vec of libraries pub fn process_changes(&mut self) -> bool {
/// FIXME: better API here let change = {
pub fn process_changes(&mut self, roots_scanned: &mut usize) -> bool { let mut change = AnalysisChange::new();
let changes = self.vfs.write().commit_changes(); let (vfs, line_endings_map) = &mut *self.vfs.write();
if changes.is_empty() { let changed_files = vfs.take_changes();
return false; if changed_files.is_empty() {
} return false;
let mut change = AnalysisChange::new();
for c in changes {
match c {
VfsChange::AddRoot { root, files } => {
*roots_scanned += 1;
for (file, path, text) in files {
change.add_file(SourceRootId(root.0), FileId(file.0), path, text);
}
}
VfsChange::AddFile { root, file, path, text } => {
change.add_file(SourceRootId(root.0), FileId(file.0), path, text);
}
VfsChange::RemoveFile { root, file, path } => {
change.remove_file(SourceRootId(root.0), FileId(file.0), path)
}
VfsChange::ChangeFile { file, text } => {
change.change_file(FileId(file.0), text);
}
} }
}
let fs_op = changed_files.iter().any(|it| it.is_created_or_deleted());
if fs_op {
let roots = self.source_root_config.partition(&vfs);
change.set_roots(roots)
}
for file in changed_files {
let text = if file.exists() {
let bytes = vfs.file_contents(file.file_id).to_vec();
match String::from_utf8(bytes).ok() {
Some(text) => {
let (text, line_endings) = LineEndings::normalize(text);
line_endings_map.insert(file.file_id, line_endings);
Some(Arc::new(text))
}
None => None,
}
} else {
None
};
change.change_file(file.file_id, text);
}
change
};
self.analysis_host.apply_change(change); self.analysis_host.apply_change(change);
true true
} }
@ -242,35 +222,31 @@ impl GlobalState {
} }
impl GlobalStateSnapshot { impl GlobalStateSnapshot {
pub fn analysis(&self) -> &Analysis { pub(crate) fn analysis(&self) -> &Analysis {
&self.analysis &self.analysis
} }
pub fn url_to_file_id(&self, url: &Url) -> Result<FileId> { pub(crate) fn url_to_file_id(&self, url: &Url) -> Result<FileId> {
let path = url.to_file_path().map_err(|()| format!("invalid uri: {}", url))?; let path = from_proto::abs_path(url)?;
let file = self.vfs.read().path2file(&path).ok_or_else(|| { let path = path.into();
// Show warning as this file is outside current workspace let res =
// FIXME: just handle such files, and remove `LspError::UNKNOWN_FILE`. self.vfs.read().0.file_id(&path).ok_or_else(|| format!("file not found: {}", path))?;
LspError { Ok(res)
code: LspError::UNKNOWN_FILE,
message: "Rust file outside current workspace is not supported yet.".to_string(),
}
})?;
Ok(FileId(file.0))
} }
pub fn file_id_to_url(&self, id: FileId) -> Url { pub(crate) fn file_id_to_url(&self, id: FileId) -> Url {
file_id_to_url(&self.vfs.read(), id) file_id_to_url(&self.vfs.read().0, id)
} }
pub fn file_line_endings(&self, id: FileId) -> LineEndings { pub(crate) fn file_line_endings(&self, id: FileId) -> LineEndings {
self.vfs.read().file_line_endings(VfsFile(id.0)) self.vfs.read().1[&id]
} }
pub fn anchored_path(&self, file_id: FileId, path: &str) -> Url { pub fn anchored_path(&self, file_id: FileId, path: &str) -> Url {
let mut base = self.vfs.read().file2path(VfsFile(file_id.0)); let mut base = self.vfs.read().0.file_path(file_id);
base.pop(); base.pop();
let path = base.join(path); let path = base.join(path);
let path = path.as_path().unwrap();
url_from_abs_path(&path) url_from_abs_path(&path)
} }
@ -279,7 +255,8 @@ impl GlobalStateSnapshot {
crate_id: CrateId, crate_id: CrateId,
) -> Option<(&CargoWorkspace, Target)> { ) -> Option<(&CargoWorkspace, Target)> {
let file_id = self.analysis().crate_root(crate_id).ok()?; let file_id = self.analysis().crate_root(crate_id).ok()?;
let path = self.vfs.read().file2path(VfsFile(file_id.0)); let path = self.vfs.read().0.file_path(file_id);
let path = path.as_path()?;
self.workspaces.iter().find_map(|ws| match ws { self.workspaces.iter().find_map(|ws| match ws {
ProjectWorkspace::Cargo { cargo, .. } => { ProjectWorkspace::Cargo { cargo, .. } => {
cargo.target_by_root(&path).map(|it| (cargo, it)) cargo.target_by_root(&path).map(|it| (cargo, it))
@ -307,14 +284,86 @@ impl GlobalStateSnapshot {
); );
buf buf
} }
}
pub fn workspace_root_for(&self, file_id: FileId) -> Option<&Path> { pub(crate) fn file_id_to_url(vfs: &vfs::Vfs, id: FileId) -> Url {
let path = self.vfs.read().file2path(VfsFile(file_id.0)); let path = vfs.file_path(id);
self.workspaces.iter().find_map(|ws| ws.workspace_root_for(&path)) let path = path.as_path().unwrap();
url_from_abs_path(&path)
}
#[derive(Default)]
pub(crate) struct ProjectFolders {
pub(crate) load: Vec<vfs::loader::Entry>,
pub(crate) watch: Vec<usize>,
pub(crate) source_root_config: SourceRootConfig,
}
impl ProjectFolders {
pub(crate) fn new(workspaces: &[ProjectWorkspace]) -> ProjectFolders {
let mut res = ProjectFolders::default();
let mut fsc = FileSetConfig::builder();
let mut local_filesets = vec![];
for root in workspaces.iter().flat_map(|it| it.to_roots()) {
let path = root.path().to_owned();
let mut file_set_roots: Vec<VfsPath> = vec![];
let path = AbsPathBuf::try_from(path).unwrap();
let entry = if root.is_member() {
vfs::loader::Entry::local_cargo_package(path.clone())
} else {
vfs::loader::Entry::cargo_package_dependency(path.clone())
};
res.load.push(entry);
if root.is_member() {
res.watch.push(res.load.len() - 1);
}
if let Some(out_dir) = root.out_dir() {
let out_dir = AbsPathBuf::try_from(out_dir.to_path_buf()).unwrap();
res.load.push(vfs::loader::Entry::rs_files_recursively(out_dir.clone()));
if root.is_member() {
res.watch.push(res.load.len() - 1);
}
file_set_roots.push(out_dir.into());
}
file_set_roots.push(path.into());
if root.is_member() {
local_filesets.push(fsc.len());
}
fsc.add_file_set(file_set_roots)
}
let fsc = fsc.build();
res.source_root_config = SourceRootConfig { fsc, local_filesets };
res
} }
} }
pub(crate) fn file_id_to_url(vfs: &Vfs, id: FileId) -> Url { #[derive(Default, Debug)]
let path = vfs.file2path(VfsFile(id.0)); pub(crate) struct SourceRootConfig {
url_from_abs_path(&path) pub(crate) fsc: FileSetConfig,
pub(crate) local_filesets: Vec<usize>,
}
impl SourceRootConfig {
pub fn partition(&self, vfs: &vfs::Vfs) -> Vec<SourceRoot> {
self.fsc
.partition(vfs)
.into_iter()
.enumerate()
.map(|(idx, file_set)| {
let is_local = self.local_filesets.contains(&idx);
if is_local {
SourceRoot::new_local(file_set)
} else {
SourceRoot::new_library(file_set)
}
})
.collect()
}
} }

View file

@ -17,7 +17,6 @@ macro_rules! eprintln {
($($tt:tt)*) => { stdx::eprintln!($($tt)*) }; ($($tt:tt)*) => { stdx::eprintln!($($tt)*) };
} }
mod vfs_glob;
mod caps; mod caps;
mod cargo_target_spec; mod cargo_target_spec;
mod to_proto; mod to_proto;
@ -29,6 +28,7 @@ pub mod config;
mod global_state; mod global_state;
mod diagnostics; mod diagnostics;
mod semantic_tokens; mod semantic_tokens;
mod line_endings;
use serde::de::DeserializeOwned; use serde::de::DeserializeOwned;

View file

@ -0,0 +1,64 @@
//! We maintain invariant that all internal strings use `\n` as line separator.
//! This module does line ending conversion and detection (so that we can
//! convert back to `\r\n` on the way out).
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub(crate) enum LineEndings {
Unix,
Dos,
}
impl LineEndings {
/// Replaces `\r\n` with `\n` in-place in `src`.
pub(crate) fn normalize(src: String) -> (String, LineEndings) {
if !src.as_bytes().contains(&b'\r') {
return (src, LineEndings::Unix);
}
// We replace `\r\n` with `\n` in-place, which doesn't break utf-8 encoding.
// While we *can* call `as_mut_vec` and do surgery on the live string
// directly, let's rather steal the contents of `src`. This makes the code
// safe even if a panic occurs.
let mut buf = src.into_bytes();
let mut gap_len = 0;
let mut tail = buf.as_mut_slice();
loop {
let idx = match find_crlf(&tail[gap_len..]) {
None => tail.len(),
Some(idx) => idx + gap_len,
};
tail.copy_within(gap_len..idx, 0);
tail = &mut tail[idx - gap_len..];
if tail.len() == gap_len {
break;
}
gap_len += 1;
}
// Account for removed `\r`.
// After `set_len`, `buf` is guaranteed to contain utf-8 again.
let new_len = buf.len() - gap_len;
let src = unsafe {
buf.set_len(new_len);
String::from_utf8_unchecked(buf)
};
return (src, LineEndings::Dos);
fn find_crlf(src: &[u8]) -> Option<usize> {
let mut search_idx = 0;
while let Some(idx) = find_cr(&src[search_idx..]) {
if src[search_idx..].get(idx + 1) != Some(&b'\n') {
search_idx += idx + 1;
continue;
}
return Some(search_idx + idx);
}
None
}
fn find_cr(src: &[u8]) -> Option<usize> {
src.iter().enumerate().find_map(|(idx, &b)| if b == b'\r' { Some(idx) } else { None })
}
}
}

View file

@ -2,11 +2,9 @@
//! requests/replies and notifications back to the client. //! requests/replies and notifications back to the client.
mod handlers; mod handlers;
mod subscriptions;
pub(crate) mod request_metrics; pub(crate) mod request_metrics;
use std::{ use std::{
borrow::Cow,
env, env,
error::Error, error::Error,
fmt, fmt,
@ -20,16 +18,12 @@ use crossbeam_channel::{never, select, unbounded, RecvError, Sender};
use lsp_server::{ use lsp_server::{
Connection, ErrorCode, Message, Notification, ReqQueue, Request, RequestId, Response, Connection, ErrorCode, Message, Notification, ReqQueue, Request, RequestId, Response,
}; };
use lsp_types::{ use lsp_types::{request::Request as _, NumberOrString, TextDocumentContentChangeEvent};
request::Request as _, DidChangeTextDocumentParams, NumberOrString, use ra_flycheck::CheckTask;
TextDocumentContentChangeEvent, WorkDoneProgress, WorkDoneProgressBegin,
WorkDoneProgressCreateParams, WorkDoneProgressEnd, WorkDoneProgressReport,
};
use ra_flycheck::{CheckTask, Status};
use ra_ide::{Canceled, FileId, LineIndex}; use ra_ide::{Canceled, FileId, LineIndex};
use ra_prof::profile; use ra_prof::profile;
use ra_project_model::{PackageRoot, ProjectWorkspace}; use ra_project_model::{PackageRoot, ProjectWorkspace};
use ra_vfs::VfsTask; use rustc_hash::FxHashSet;
use serde::{de::DeserializeOwned, Serialize}; use serde::{de::DeserializeOwned, Serialize};
use threadpool::ThreadPool; use threadpool::ThreadPool;
@ -39,9 +33,10 @@ use crate::{
from_proto, from_proto,
global_state::{file_id_to_url, GlobalState, GlobalStateSnapshot}, global_state::{file_id_to_url, GlobalState, GlobalStateSnapshot},
lsp_ext, lsp_ext,
main_loop::{request_metrics::RequestMetrics, subscriptions::Subscriptions}, main_loop::request_metrics::RequestMetrics,
Result, Result,
}; };
use ra_db::VfsPath;
#[derive(Debug)] #[derive(Debug)]
pub struct LspError { pub struct LspError {
@ -128,13 +123,6 @@ pub fn main_loop(config: Config, connection: Connection) -> Result<()> {
.collect::<Vec<_>>() .collect::<Vec<_>>()
}; };
let globs = config
.files
.exclude
.iter()
.map(|glob| crate::vfs_glob::Glob::new(glob))
.collect::<std::result::Result<Vec<_>, _>>()?;
if let FilesWatcher::Client = config.files.watcher { if let FilesWatcher::Client = config.files.watcher {
let registration_options = lsp_types::DidChangeWatchedFilesRegistrationOptions { let registration_options = lsp_types::DidChangeWatchedFilesRegistrationOptions {
watchers: workspaces watchers: workspaces
@ -159,11 +147,9 @@ pub fn main_loop(config: Config, connection: Connection) -> Result<()> {
connection.sender.send(request.into()).unwrap(); connection.sender.send(request.into()).unwrap();
} }
GlobalState::new(workspaces, config.lru_capacity, &globs, config) GlobalState::new(workspaces, config.lru_capacity, config)
}; };
loop_state.roots_total = global_state.vfs.read().n_roots();
let pool = ThreadPool::default(); let pool = ThreadPool::default();
let (task_sender, task_receiver) = unbounded::<Task>(); let (task_sender, task_receiver) = unbounded::<Task>();
@ -192,7 +178,9 @@ pub fn main_loop(config: Config, connection: Connection) -> Result<()> {
break; break;
}; };
} }
assert!(!global_state.vfs.read().0.has_changes());
loop_turn(&pool, &task_sender, &connection, &mut global_state, &mut loop_state, event)?; loop_turn(&pool, &task_sender, &connection, &mut global_state, &mut loop_state, event)?;
assert!(!global_state.vfs.read().0.has_changes());
} }
} }
global_state.analysis_host.request_cancellation(); global_state.analysis_host.request_cancellation();
@ -222,7 +210,7 @@ enum Task {
enum Event { enum Event {
Msg(Message), Msg(Message),
Task(Task), Task(Task),
Vfs(VfsTask), Vfs(vfs::loader::Message),
CheckWatcher(CheckTask), CheckWatcher(CheckTask),
} }
@ -270,11 +258,20 @@ type Incoming = lsp_server::Incoming<(&'static str, Instant)>;
#[derive(Default)] #[derive(Default)]
struct LoopState { struct LoopState {
req_queue: ReqQueue<(&'static str, Instant), ReqHandler>, req_queue: ReqQueue<(&'static str, Instant), ReqHandler>,
subscriptions: Subscriptions, mem_docs: FxHashSet<VfsPath>,
workspace_loaded: bool, status: Status,
roots_progress_reported: Option<usize>, }
roots_scanned: usize,
roots_total: usize, #[derive(Eq, PartialEq)]
enum Status {
Loading,
Ready,
}
impl Default for Status {
fn default() -> Self {
Status::Loading
}
} }
fn loop_turn( fn loop_turn(
@ -295,14 +292,36 @@ fn loop_turn(
log::info!("queued count = {}", queue_count); log::info!("queued count = {}", queue_count);
} }
let mut became_ready = false;
match event { match event {
Event::Task(task) => { Event::Task(task) => {
on_task(task, &connection.sender, &mut loop_state.req_queue.incoming, global_state); on_task(task, &connection.sender, &mut loop_state.req_queue.incoming, global_state);
global_state.maybe_collect_garbage(); global_state.maybe_collect_garbage();
} }
Event::Vfs(task) => { Event::Vfs(task) => match task {
global_state.vfs.write().handle_task(task); vfs::loader::Message::Loaded { files } => {
} let vfs = &mut global_state.vfs.write().0;
for (path, contents) in files {
let path = VfsPath::from(path);
if !loop_state.mem_docs.contains(&path) {
vfs.set_file_contents(path, contents)
}
}
}
vfs::loader::Message::Progress { n_entries_total, n_entries_done } => {
if n_entries_done == n_entries_done {
loop_state.status = Status::Ready;
became_ready = true;
}
report_progress(
loop_state,
&connection.sender,
n_entries_done,
n_entries_total,
"roots scanned",
)
}
},
Event::CheckWatcher(task) => on_check_task(task, global_state, task_sender)?, Event::CheckWatcher(task) => on_check_task(task, global_state, task_sender)?,
Event::Msg(msg) => match msg { Event::Msg(msg) => match msg {
Message::Request(req) => on_request( Message::Request(req) => on_request(
@ -324,32 +343,29 @@ fn loop_turn(
}, },
}; };
let mut state_changed = global_state.process_changes(&mut loop_state.roots_scanned); let state_changed = global_state.process_changes();
let show_progress = if became_ready {
!loop_state.workspace_loaded && global_state.config.client_caps.work_done_progress;
if !loop_state.workspace_loaded && loop_state.roots_scanned == loop_state.roots_total {
state_changed = true;
loop_state.workspace_loaded = true;
if let Some(flycheck) = &global_state.flycheck { if let Some(flycheck) = &global_state.flycheck {
flycheck.update(); flycheck.update();
} }
} }
if show_progress { if loop_state.status == Status::Ready && (state_changed || became_ready) {
send_startup_progress(&connection.sender, loop_state); let subscriptions = loop_state
} .mem_docs
.iter()
.map(|path| global_state.vfs.read().0.file_id(&path).unwrap())
.collect::<Vec<_>>();
if state_changed && loop_state.workspace_loaded {
update_file_notifications_on_threadpool( update_file_notifications_on_threadpool(
pool, pool,
global_state.snapshot(), global_state.snapshot(),
task_sender.clone(), task_sender.clone(),
loop_state.subscriptions.subscriptions(), subscriptions.clone(),
); );
pool.execute({ pool.execute({
let subs = loop_state.subscriptions.subscriptions(); let subs = subscriptions;
let snap = global_state.snapshot(); let snap = global_state.snapshot();
move || snap.analysis().prime_caches(subs).unwrap_or_else(|_: Canceled| ()) move || snap.analysis().prime_caches(subs).unwrap_or_else(|_: Canceled| ())
}); });
@ -465,7 +481,7 @@ fn on_request(
fn on_notification( fn on_notification(
msg_sender: &Sender<Message>, msg_sender: &Sender<Message>,
state: &mut GlobalState, global_state: &mut GlobalState,
loop_state: &mut LoopState, loop_state: &mut LoopState,
not: Notification, not: Notification,
) -> Result<()> { ) -> Result<()> {
@ -484,12 +500,15 @@ fn on_notification(
}; };
let not = match notification_cast::<lsp_types::notification::DidOpenTextDocument>(not) { let not = match notification_cast::<lsp_types::notification::DidOpenTextDocument>(not) {
Ok(params) => { Ok(params) => {
let uri = params.text_document.uri; if let Ok(path) = from_proto::vfs_path(&params.text_document.uri) {
let path = uri.to_file_path().map_err(|()| format!("invalid uri: {}", uri))?; if !loop_state.mem_docs.insert(path.clone()) {
if let Some(file_id) = log::error!("duplicate DidOpenTextDocument: {}", path)
state.vfs.write().add_file_overlay(&path, params.text_document.text) }
{ global_state
loop_state.subscriptions.add_sub(FileId(file_id.0)); .vfs
.write()
.0
.set_file_contents(path, Some(params.text_document.text.into_bytes()));
} }
return Ok(()); return Ok(());
} }
@ -497,23 +516,13 @@ fn on_notification(
}; };
let not = match notification_cast::<lsp_types::notification::DidChangeTextDocument>(not) { let not = match notification_cast::<lsp_types::notification::DidChangeTextDocument>(not) {
Ok(params) => { Ok(params) => {
let DidChangeTextDocumentParams { text_document, content_changes } = params; if let Ok(path) = from_proto::vfs_path(&params.text_document.uri) {
let world = state.snapshot(); assert!(loop_state.mem_docs.contains(&path));
let file_id = from_proto::file_id(&world, &text_document.uri)?; let vfs = &mut global_state.vfs.write().0;
let line_index = world.analysis().file_line_index(file_id)?; let file_id = vfs.file_id(&path).unwrap();
let uri = text_document.uri; let mut text = String::from_utf8(vfs.file_contents(file_id).to_vec()).unwrap();
let path = uri.to_file_path().map_err(|()| format!("invalid uri: {}", uri))?; apply_document_changes(&mut text, params.content_changes);
state.vfs.write().change_file_overlay(&path, |old_text| { vfs.set_file_contents(path, Some(text.into_bytes()))
apply_document_changes(old_text, Cow::Borrowed(&line_index), content_changes);
});
return Ok(());
}
Err(not) => not,
};
let not = match notification_cast::<lsp_types::notification::DidSaveTextDocument>(not) {
Ok(_params) => {
if let Some(flycheck) = &state.flycheck {
flycheck.update();
} }
return Ok(()); return Ok(());
} }
@ -521,19 +530,34 @@ fn on_notification(
}; };
let not = match notification_cast::<lsp_types::notification::DidCloseTextDocument>(not) { let not = match notification_cast::<lsp_types::notification::DidCloseTextDocument>(not) {
Ok(params) => { Ok(params) => {
let uri = params.text_document.uri; if let Ok(path) = from_proto::vfs_path(&params.text_document.uri) {
let path = uri.to_file_path().map_err(|()| format!("invalid uri: {}", uri))?; if !loop_state.mem_docs.remove(&path) {
if let Some(file_id) = state.vfs.write().remove_file_overlay(path.as_path()) { log::error!("orphan DidCloseTextDocument: {}", path)
loop_state.subscriptions.remove_sub(FileId(file_id.0)); }
if let Some(path) = path.as_path() {
global_state.loader.invalidate(path.to_path_buf());
}
} }
let params = let params = lsp_types::PublishDiagnosticsParams {
lsp_types::PublishDiagnosticsParams { uri, diagnostics: Vec::new(), version: None }; uri: params.text_document.uri,
diagnostics: Vec::new(),
version: None,
};
let not = notification_new::<lsp_types::notification::PublishDiagnostics>(params); let not = notification_new::<lsp_types::notification::PublishDiagnostics>(params);
msg_sender.send(not.into()).unwrap(); msg_sender.send(not.into()).unwrap();
return Ok(()); return Ok(());
} }
Err(not) => not, Err(not) => not,
}; };
let not = match notification_cast::<lsp_types::notification::DidSaveTextDocument>(not) {
Ok(_params) => {
if let Some(flycheck) = &global_state.flycheck {
flycheck.update();
}
return Ok(());
}
Err(not) => not,
};
let not = match notification_cast::<lsp_types::notification::DidChangeConfiguration>(not) { let not = match notification_cast::<lsp_types::notification::DidChangeConfiguration>(not) {
Ok(_) => { Ok(_) => {
// As stated in https://github.com/microsoft/language-server-protocol/issues/676, // As stated in https://github.com/microsoft/language-server-protocol/issues/676,
@ -575,11 +599,10 @@ fn on_notification(
}; };
let not = match notification_cast::<lsp_types::notification::DidChangeWatchedFiles>(not) { let not = match notification_cast::<lsp_types::notification::DidChangeWatchedFiles>(not) {
Ok(params) => { Ok(params) => {
let mut vfs = state.vfs.write();
for change in params.changes { for change in params.changes {
let uri = change.uri; if let Ok(path) = from_proto::abs_path(&change.uri) {
let path = uri.to_file_path().map_err(|()| format!("invalid uri: {}", uri))?; global_state.loader.invalidate(path)
vfs.notify_changed(path) }
} }
return Ok(()); return Ok(());
} }
@ -594,9 +617,9 @@ fn on_notification(
fn apply_document_changes( fn apply_document_changes(
old_text: &mut String, old_text: &mut String,
mut line_index: Cow<'_, LineIndex>,
content_changes: Vec<TextDocumentContentChangeEvent>, content_changes: Vec<TextDocumentContentChangeEvent>,
) { ) {
let mut line_index = LineIndex::new(old_text);
// The changes we got must be applied sequentially, but can cross lines so we // The changes we got must be applied sequentially, but can cross lines so we
// have to keep our line index updated. // have to keep our line index updated.
// Some clients (e.g. Code) sort the ranges in reverse. As an optimization, we // Some clients (e.g. Code) sort the ranges in reverse. As an optimization, we
@ -621,7 +644,7 @@ fn apply_document_changes(
match change.range { match change.range {
Some(range) => { Some(range) => {
if !index_valid.covers(range.end.line) { if !index_valid.covers(range.end.line) {
line_index = Cow::Owned(LineIndex::new(&old_text)); line_index = LineIndex::new(&old_text);
} }
index_valid = IndexValid::UpToLineExclusive(range.start.line); index_valid = IndexValid::UpToLineExclusive(range.start.line);
let range = from_proto::text_range(&line_index, range); let range = from_proto::text_range(&line_index, range);
@ -652,18 +675,11 @@ fn on_check_task(
&workspace_root, &workspace_root,
); );
for diag in diagnostics { for diag in diagnostics {
let path = diag let path = from_proto::vfs_path(&diag.location.uri)?;
.location let file_id = match global_state.vfs.read().0.file_id(&path) {
.uri
.to_file_path()
.map_err(|()| format!("invalid uri: {}", diag.location.uri))?;
let file_id = match global_state.vfs.read().path2file(&path) {
Some(file) => FileId(file.0), Some(file) => FileId(file.0),
None => { None => {
log::error!( log::error!("File with cargo diagnostic not found in VFS: {}", path);
"File with cargo diagnostic not found in VFS: {}",
path.display()
);
return Ok(()); return Ok(());
} }
}; };
@ -679,7 +695,7 @@ fn on_check_task(
CheckTask::Status(status) => { CheckTask::Status(status) => {
if global_state.config.client_caps.work_done_progress { if global_state.config.client_caps.work_done_progress {
let progress = match status { let progress = match status {
Status::Being => { ra_flycheck::Status::Being => {
lsp_types::WorkDoneProgress::Begin(lsp_types::WorkDoneProgressBegin { lsp_types::WorkDoneProgress::Begin(lsp_types::WorkDoneProgressBegin {
title: "Running `cargo check`".to_string(), title: "Running `cargo check`".to_string(),
cancellable: Some(false), cancellable: Some(false),
@ -687,14 +703,14 @@ fn on_check_task(
percentage: None, percentage: None,
}) })
} }
Status::Progress(target) => { ra_flycheck::Status::Progress(target) => {
lsp_types::WorkDoneProgress::Report(lsp_types::WorkDoneProgressReport { lsp_types::WorkDoneProgress::Report(lsp_types::WorkDoneProgressReport {
cancellable: Some(false), cancellable: Some(false),
message: Some(target), message: Some(target),
percentage: None, percentage: None,
}) })
} }
Status::End => { ra_flycheck::Status::End => {
lsp_types::WorkDoneProgress::End(lsp_types::WorkDoneProgressEnd { lsp_types::WorkDoneProgress::End(lsp_types::WorkDoneProgressEnd {
message: None, message: None,
}) })
@ -720,7 +736,7 @@ fn on_diagnostic_task(task: DiagnosticTask, msg_sender: &Sender<Message>, state:
let subscriptions = state.diagnostics.handle_task(task); let subscriptions = state.diagnostics.handle_task(task);
for file_id in subscriptions { for file_id in subscriptions {
let url = file_id_to_url(&state.vfs.read(), file_id); let url = file_id_to_url(&state.vfs.read().0, file_id);
let diagnostics = state.diagnostics.diagnostics_for(file_id).cloned().collect(); let diagnostics = state.diagnostics.diagnostics_for(file_id).cloned().collect();
let params = lsp_types::PublishDiagnosticsParams { uri: url, diagnostics, version: None }; let params = lsp_types::PublishDiagnosticsParams { uri: url, diagnostics, version: None };
let not = notification_new::<lsp_types::notification::PublishDiagnostics>(params); let not = notification_new::<lsp_types::notification::PublishDiagnostics>(params);
@ -728,57 +744,46 @@ fn on_diagnostic_task(task: DiagnosticTask, msg_sender: &Sender<Message>, state:
} }
} }
fn send_startup_progress(sender: &Sender<Message>, loop_state: &mut LoopState) { fn report_progress(
let total: usize = loop_state.roots_total; loop_state: &mut LoopState,
let prev = loop_state.roots_progress_reported; sender: &Sender<Message>,
let progress = loop_state.roots_scanned; done: usize,
loop_state.roots_progress_reported = Some(progress); total: usize,
message: &str,
) {
let token = lsp_types::ProgressToken::String(format!("rustAnalyzer/{}", message));
let message = Some(format!("{}/{} {}", done, total, message));
let percentage = Some(100.0 * done as f64 / total.max(1) as f64);
let work_done_progress = if done == 0 {
let work_done_progress_create = loop_state.req_queue.outgoing.register(
lsp_types::request::WorkDoneProgressCreate::METHOD.to_string(),
lsp_types::WorkDoneProgressCreateParams { token: token.clone() },
DO_NOTHING,
);
sender.send(work_done_progress_create.into()).unwrap();
match (prev, loop_state.workspace_loaded) { lsp_types::WorkDoneProgress::Begin(lsp_types::WorkDoneProgressBegin {
(None, false) => { title: "rust-analyzer".into(),
let request = loop_state.req_queue.outgoing.register( cancellable: None,
lsp_types::request::WorkDoneProgressCreate::METHOD.to_string(), message,
WorkDoneProgressCreateParams { percentage,
token: lsp_types::ProgressToken::String("rustAnalyzer/startup".into()), })
}, } else if done < total {
DO_NOTHING, lsp_types::WorkDoneProgress::Report(lsp_types::WorkDoneProgressReport {
); cancellable: None,
sender.send(request.into()).unwrap(); message,
send_startup_progress_notif( percentage,
sender, })
WorkDoneProgress::Begin(WorkDoneProgressBegin { } else {
title: "rust-analyzer".into(), assert!(done == total);
cancellable: None, lsp_types::WorkDoneProgress::End(lsp_types::WorkDoneProgressEnd { message })
message: Some(format!("{}/{} packages", progress, total)), };
percentage: Some(100.0 * progress as f64 / total as f64), let notification =
}), notification_new::<lsp_types::notification::Progress>(lsp_types::ProgressParams {
); token,
} value: lsp_types::ProgressParamsValue::WorkDone(work_done_progress),
(Some(prev), false) if progress != prev => send_startup_progress_notif( });
sender, sender.send(notification.into()).unwrap();
WorkDoneProgress::Report(WorkDoneProgressReport {
cancellable: None,
message: Some(format!("{}/{} packages", progress, total)),
percentage: Some(100.0 * progress as f64 / total as f64),
}),
),
(_, true) => send_startup_progress_notif(
sender,
WorkDoneProgress::End(WorkDoneProgressEnd {
message: Some(format!("rust-analyzer loaded, {} packages", progress)),
}),
),
_ => {}
}
fn send_startup_progress_notif(sender: &Sender<Message>, work_done_progress: WorkDoneProgress) {
let notif =
notification_new::<lsp_types::notification::Progress>(lsp_types::ProgressParams {
token: lsp_types::ProgressToken::String("rustAnalyzer/startup".into()),
value: lsp_types::ProgressParamsValue::WorkDone(work_done_progress),
});
sender.send(notif.into()).unwrap();
}
} }
struct PoolDispatcher<'a> { struct PoolDispatcher<'a> {
@ -976,18 +981,12 @@ where
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use std::borrow::Cow;
use lsp_types::{Position, Range, TextDocumentContentChangeEvent}; use lsp_types::{Position, Range, TextDocumentContentChangeEvent};
use ra_ide::LineIndex;
use super::*;
#[test] #[test]
fn apply_document_changes() { fn test_apply_document_changes() {
fn run(text: &mut String, changes: Vec<TextDocumentContentChangeEvent>) {
let line_index = Cow::Owned(LineIndex::new(&text));
super::apply_document_changes(text, line_index, changes);
}
macro_rules! c { macro_rules! c {
[$($sl:expr, $sc:expr; $el:expr, $ec:expr => $text:expr),+] => { [$($sl:expr, $sc:expr; $el:expr, $ec:expr => $text:expr),+] => {
vec![$(TextDocumentContentChangeEvent { vec![$(TextDocumentContentChangeEvent {
@ -1002,9 +1001,9 @@ mod tests {
} }
let mut text = String::new(); let mut text = String::new();
run(&mut text, vec![]); apply_document_changes(&mut text, vec![]);
assert_eq!(text, ""); assert_eq!(text, "");
run( apply_document_changes(
&mut text, &mut text,
vec![TextDocumentContentChangeEvent { vec![TextDocumentContentChangeEvent {
range: None, range: None,
@ -1013,36 +1012,39 @@ mod tests {
}], }],
); );
assert_eq!(text, "the"); assert_eq!(text, "the");
run(&mut text, c![0, 3; 0, 3 => " quick"]); apply_document_changes(&mut text, c![0, 3; 0, 3 => " quick"]);
assert_eq!(text, "the quick"); assert_eq!(text, "the quick");
run(&mut text, c![0, 0; 0, 4 => "", 0, 5; 0, 5 => " foxes"]); apply_document_changes(&mut text, c![0, 0; 0, 4 => "", 0, 5; 0, 5 => " foxes"]);
assert_eq!(text, "quick foxes"); assert_eq!(text, "quick foxes");
run(&mut text, c![0, 11; 0, 11 => "\ndream"]); apply_document_changes(&mut text, c![0, 11; 0, 11 => "\ndream"]);
assert_eq!(text, "quick foxes\ndream"); assert_eq!(text, "quick foxes\ndream");
run(&mut text, c![1, 0; 1, 0 => "have "]); apply_document_changes(&mut text, c![1, 0; 1, 0 => "have "]);
assert_eq!(text, "quick foxes\nhave dream"); assert_eq!(text, "quick foxes\nhave dream");
run(&mut text, c![0, 0; 0, 0 => "the ", 1, 4; 1, 4 => " quiet", 1, 16; 1, 16 => "s\n"]); apply_document_changes(
&mut text,
c![0, 0; 0, 0 => "the ", 1, 4; 1, 4 => " quiet", 1, 16; 1, 16 => "s\n"],
);
assert_eq!(text, "the quick foxes\nhave quiet dreams\n"); assert_eq!(text, "the quick foxes\nhave quiet dreams\n");
run(&mut text, c![0, 15; 0, 15 => "\n", 2, 17; 2, 17 => "\n"]); apply_document_changes(&mut text, c![0, 15; 0, 15 => "\n", 2, 17; 2, 17 => "\n"]);
assert_eq!(text, "the quick foxes\n\nhave quiet dreams\n\n"); assert_eq!(text, "the quick foxes\n\nhave quiet dreams\n\n");
run( apply_document_changes(
&mut text, &mut text,
c![1, 0; 1, 0 => "DREAM", 2, 0; 2, 0 => "they ", 3, 0; 3, 0 => "DON'T THEY?"], c![1, 0; 1, 0 => "DREAM", 2, 0; 2, 0 => "they ", 3, 0; 3, 0 => "DON'T THEY?"],
); );
assert_eq!(text, "the quick foxes\nDREAM\nthey have quiet dreams\nDON'T THEY?\n"); assert_eq!(text, "the quick foxes\nDREAM\nthey have quiet dreams\nDON'T THEY?\n");
run(&mut text, c![0, 10; 1, 5 => "", 2, 0; 2, 12 => ""]); apply_document_changes(&mut text, c![0, 10; 1, 5 => "", 2, 0; 2, 12 => ""]);
assert_eq!(text, "the quick \nthey have quiet dreams\n"); assert_eq!(text, "the quick \nthey have quiet dreams\n");
text = String::from("❤️"); text = String::from("❤️");
run(&mut text, c![0, 0; 0, 0 => "a"]); apply_document_changes(&mut text, c![0, 0; 0, 0 => "a"]);
assert_eq!(text, "a❤"); assert_eq!(text, "a❤");
text = String::from("a\nb"); text = String::from("a\nb");
run(&mut text, c![0, 1; 1, 0 => "\nțc", 0, 1; 1, 1 => "d"]); apply_document_changes(&mut text, c![0, 1; 1, 0 => "\nțc", 0, 1; 1, 1 => "d"]);
assert_eq!(text, "adcb"); assert_eq!(text, "adcb");
text = String::from("a\nb"); text = String::from("a\nb");
run(&mut text, c![0, 1; 1, 0 => "ț\nc", 0, 2; 0, 2 => "c"]); apply_document_changes(&mut text, c![0, 1; 1, 0 => "ț\nc", 0, 2; 0, 2 => "c"]);
assert_eq!(text, "ațc\ncb"); assert_eq!(text, "ațc\ncb");
} }
} }

View file

@ -396,7 +396,6 @@ pub fn handle_runnables(
let line_index = snap.analysis().file_line_index(file_id)?; let line_index = snap.analysis().file_line_index(file_id)?;
let offset = params.position.map(|it| from_proto::offset(&line_index, it)); let offset = params.position.map(|it| from_proto::offset(&line_index, it));
let mut res = Vec::new(); let mut res = Vec::new();
let workspace_root = snap.workspace_root_for(file_id);
let cargo_spec = CargoTargetSpec::for_file(&snap, file_id)?; let cargo_spec = CargoTargetSpec::for_file(&snap, file_id)?;
for runnable in snap.analysis().runnables(file_id)? { for runnable in snap.analysis().runnables(file_id)? {
if let Some(offset) = offset { if let Some(offset) = offset {
@ -420,7 +419,7 @@ pub fn handle_runnables(
location: None, location: None,
kind: lsp_ext::RunnableKind::Cargo, kind: lsp_ext::RunnableKind::Cargo,
args: lsp_ext::CargoRunnable { args: lsp_ext::CargoRunnable {
workspace_root: workspace_root.map(|root| root.to_owned()), workspace_root: Some(spec.workspace_root.clone()),
cargo_args: vec![ cargo_args: vec![
cmd.to_string(), cmd.to_string(),
"--package".to_string(), "--package".to_string(),
@ -437,7 +436,7 @@ pub fn handle_runnables(
location: None, location: None,
kind: lsp_ext::RunnableKind::Cargo, kind: lsp_ext::RunnableKind::Cargo,
args: lsp_ext::CargoRunnable { args: lsp_ext::CargoRunnable {
workspace_root: workspace_root.map(|root| root.to_owned()), workspace_root: None,
cargo_args: vec!["check".to_string(), "--workspace".to_string()], cargo_args: vec!["check".to_string(), "--workspace".to_string()],
executable_args: Vec::new(), executable_args: Vec::new(),
}, },

View file

@ -1,22 +0,0 @@
//! Keeps track of file subscriptions -- the set of currently opened files for
//! which we want to publish diagnostics, syntax highlighting, etc.
use ra_ide::FileId;
use rustc_hash::FxHashSet;
#[derive(Default, Debug)]
pub(crate) struct Subscriptions {
subs: FxHashSet<FileId>,
}
impl Subscriptions {
pub(crate) fn add_sub(&mut self, file_id: FileId) {
self.subs.insert(file_id);
}
pub(crate) fn remove_sub(&mut self, file_id: FileId) {
self.subs.remove(&file_id);
}
pub(crate) fn subscriptions(&self) -> Vec<FileId> {
self.subs.iter().copied().collect()
}
}

View file

@ -10,11 +10,10 @@ use ra_ide::{
ResolvedAssist, Runnable, Severity, SourceChange, SourceFileEdit, TextEdit, ResolvedAssist, Runnable, Severity, SourceChange, SourceFileEdit, TextEdit,
}; };
use ra_syntax::{SyntaxKind, TextRange, TextSize}; use ra_syntax::{SyntaxKind, TextRange, TextSize};
use ra_vfs::LineEndings;
use crate::{ use crate::{
cargo_target_spec::CargoTargetSpec, global_state::GlobalStateSnapshot, lsp_ext, cargo_target_spec::CargoTargetSpec, global_state::GlobalStateSnapshot,
semantic_tokens, Result, line_endings::LineEndings, lsp_ext, semantic_tokens, Result,
}; };
pub(crate) fn position(line_index: &LineIndex, offset: TextSize) -> lsp_types::Position { pub(crate) fn position(line_index: &LineIndex, offset: TextSize) -> lsp_types::Position {
@ -650,6 +649,7 @@ pub(crate) fn runnable(
runnable: Runnable, runnable: Runnable,
) -> Result<lsp_ext::Runnable> { ) -> Result<lsp_ext::Runnable> {
let spec = CargoTargetSpec::for_file(snap, file_id)?; let spec = CargoTargetSpec::for_file(snap, file_id)?;
let workspace_root = spec.as_ref().map(|it| it.workspace_root.clone());
let target = spec.as_ref().map(|s| s.target.clone()); let target = spec.as_ref().map(|s| s.target.clone());
let (cargo_args, executable_args) = let (cargo_args, executable_args) =
CargoTargetSpec::runnable_args(spec, &runnable.kind, &runnable.cfg_exprs)?; CargoTargetSpec::runnable_args(spec, &runnable.kind, &runnable.cfg_exprs)?;
@ -661,7 +661,7 @@ pub(crate) fn runnable(
location: Some(location), location: Some(location),
kind: lsp_ext::RunnableKind::Cargo, kind: lsp_ext::RunnableKind::Cargo,
args: lsp_ext::CargoRunnable { args: lsp_ext::CargoRunnable {
workspace_root: snap.workspace_root_for(file_id).map(|root| root.to_owned()), workspace_root: workspace_root,
cargo_args, cargo_args,
executable_args, executable_args,
}, },

View file

@ -1,98 +0,0 @@
//! Exclusion rules for vfs.
//!
//! By default, we include only `.rs` files, and skip some know offenders like
//! `/target` or `/node_modules` altogether.
//!
//! It's also possible to add custom exclusion globs.
use globset::{GlobSet, GlobSetBuilder};
use ra_vfs::{Filter, RelativePath};
pub use globset::{Glob, GlobBuilder};
const ALWAYS_IGNORED: &[&str] = &["target/**", "**/node_modules/**", "**/.git/**"];
const IGNORED_FOR_NON_MEMBERS: &[&str] = &["examples/**", "tests/**", "benches/**"];
pub struct RustPackageFilterBuilder {
is_member: bool,
exclude: GlobSetBuilder,
}
impl Default for RustPackageFilterBuilder {
fn default() -> RustPackageFilterBuilder {
RustPackageFilterBuilder { is_member: false, exclude: GlobSetBuilder::new() }
}
}
impl RustPackageFilterBuilder {
pub fn set_member(mut self, is_member: bool) -> RustPackageFilterBuilder {
self.is_member = is_member;
self
}
pub fn exclude(mut self, globs: impl IntoIterator<Item = Glob>) -> RustPackageFilterBuilder {
for glob in globs.into_iter() {
self.exclude.add(glob);
}
self
}
pub fn into_vfs_filter(self) -> Box<dyn Filter> {
let RustPackageFilterBuilder { is_member, mut exclude } = self;
for &glob in ALWAYS_IGNORED {
exclude.add(Glob::new(glob).unwrap());
}
if !is_member {
for &glob in IGNORED_FOR_NON_MEMBERS {
exclude.add(Glob::new(glob).unwrap());
}
}
Box::new(RustPackageFilter { exclude: exclude.build().unwrap() })
}
}
struct RustPackageFilter {
exclude: GlobSet,
}
impl Filter for RustPackageFilter {
fn include_dir(&self, dir_path: &RelativePath) -> bool {
!self.exclude.is_match(dir_path.as_str())
}
fn include_file(&self, file_path: &RelativePath) -> bool {
file_path.extension() == Some("rs")
}
}
#[test]
fn test_globs() {
let filter = RustPackageFilterBuilder::default().set_member(true).into_vfs_filter();
assert!(filter.include_dir(RelativePath::new("src/tests")));
assert!(filter.include_dir(RelativePath::new("src/target")));
assert!(filter.include_dir(RelativePath::new("tests")));
assert!(filter.include_dir(RelativePath::new("benches")));
assert!(!filter.include_dir(RelativePath::new("target")));
assert!(!filter.include_dir(RelativePath::new("src/foo/.git")));
assert!(!filter.include_dir(RelativePath::new("foo/node_modules")));
let filter = RustPackageFilterBuilder::default().set_member(false).into_vfs_filter();
assert!(filter.include_dir(RelativePath::new("src/tests")));
assert!(filter.include_dir(RelativePath::new("src/target")));
assert!(!filter.include_dir(RelativePath::new("target")));
assert!(!filter.include_dir(RelativePath::new("src/foo/.git")));
assert!(!filter.include_dir(RelativePath::new("foo/node_modules")));
assert!(!filter.include_dir(RelativePath::new("tests")));
assert!(!filter.include_dir(RelativePath::new("benches")));
let filter = RustPackageFilterBuilder::default()
.set_member(true)
.exclude(std::iter::once(Glob::new("src/llvm-project/**").unwrap()))
.into_vfs_filter();
assert!(!filter.include_dir(RelativePath::new("src/llvm-project/clang")));
}

View file

@ -52,7 +52,7 @@ use std::collections::Spam;
partial_result_params: PartialResultParams::default(), partial_result_params: PartialResultParams::default(),
work_done_progress_params: WorkDoneProgressParams::default(), work_done_progress_params: WorkDoneProgressParams::default(),
}); });
assert!(format!("{}", res).contains("HashMap")); assert!(res.to_string().contains("HashMap"));
eprintln!("completion took {:?}", completion_start.elapsed()); eprintln!("completion took {:?}", completion_start.elapsed());
} }

View file

@ -212,7 +212,7 @@ impl Server {
ProgressParams { ProgressParams {
token: lsp_types::ProgressToken::String(ref token), token: lsp_types::ProgressToken::String(ref token),
value: ProgressParamsValue::WorkDone(WorkDoneProgress::End(_)), value: ProgressParamsValue::WorkDone(WorkDoneProgress::End(_)),
} if token == "rustAnalyzer/startup" => true, } if token == "rustAnalyzer/roots scanned" => true,
_ => false, _ => false,
} }
} }

View file

@ -0,0 +1,17 @@
[package]
name = "vfs-notify"
version = "0.1.0"
authors = ["rust-analyzer developers"]
edition = "2018"
[dependencies]
log = "0.4.8"
rustc-hash = "1.0"
jod-thread = "0.1.0"
walkdir = "2.3.1"
globset = "0.4.5"
crossbeam-channel = "0.4.0"
notify = "5.0.0-pre.3"
vfs = { path = "../vfs" }
paths = { path = "../paths" }

View file

@ -0,0 +1,43 @@
//! See `Include`.
use std::convert::TryFrom;
use globset::{Glob, GlobSet, GlobSetBuilder};
use paths::{RelPath, RelPathBuf};
/// `Include` is the opposite of .gitignore.
///
/// It describes the set of files inside some directory.
///
/// The current implementation is very limited, it allows white-listing file
/// globs and black-listing directories.
#[derive(Debug, Clone)]
pub(crate) struct Include {
include_files: GlobSet,
exclude_dirs: Vec<RelPathBuf>,
}
impl Include {
pub(crate) fn new(include: Vec<String>) -> Include {
let mut include_files = GlobSetBuilder::new();
let mut exclude_dirs = Vec::new();
for glob in include {
if glob.starts_with("!/") {
if let Ok(path) = RelPathBuf::try_from(&glob["!/".len()..]) {
exclude_dirs.push(path)
}
} else {
include_files.add(Glob::new(&glob).unwrap());
}
}
let include_files = include_files.build().unwrap();
Include { include_files, exclude_dirs }
}
pub(crate) fn include_file(&self, path: &RelPath) -> bool {
self.include_files.is_match(path)
}
pub(crate) fn exclude_dir(&self, path: &RelPath) -> bool {
self.exclude_dirs.iter().any(|excluded| path.starts_with(excluded))
}
}

View file

@ -0,0 +1,247 @@
//! An implementation of `loader::Handle`, based on `walkdir` and `notify`.
//!
//! The file watching bits here are untested and quite probably buggy. For this
//! reason, by default we don't watch files and rely on editor's file watching
//! capabilities.
//!
//! Hopefully, one day a reliable file watching/walking crate appears on
//! crates.io, and we can reduce this to trivial glue code.
mod include;
use std::convert::{TryFrom, TryInto};
use crossbeam_channel::{select, unbounded, Receiver};
use notify::{RecommendedWatcher, RecursiveMode, Watcher};
use paths::{AbsPath, AbsPathBuf};
use rustc_hash::FxHashSet;
use vfs::loader;
use walkdir::WalkDir;
use crate::include::Include;
#[derive(Debug)]
pub struct LoaderHandle {
// Relative order of fields below is significant.
sender: crossbeam_channel::Sender<Message>,
_thread: jod_thread::JoinHandle,
}
#[derive(Debug)]
enum Message {
Config(loader::Config),
Invalidate(AbsPathBuf),
}
impl loader::Handle for LoaderHandle {
fn spawn(sender: loader::Sender) -> LoaderHandle {
let actor = LoaderActor::new(sender);
let (sender, receiver) = unbounded::<Message>();
let thread = jod_thread::spawn(move || actor.run(receiver));
LoaderHandle { sender, _thread: thread }
}
fn set_config(&mut self, config: loader::Config) {
self.sender.send(Message::Config(config)).unwrap()
}
fn invalidate(&mut self, path: AbsPathBuf) {
self.sender.send(Message::Invalidate(path)).unwrap();
}
fn load_sync(&mut self, path: &AbsPathBuf) -> Option<Vec<u8>> {
read(path)
}
}
type NotifyEvent = notify::Result<notify::Event>;
struct LoaderActor {
config: Vec<(AbsPathBuf, Include, bool)>,
watched_paths: FxHashSet<AbsPathBuf>,
sender: loader::Sender,
// Drop order of fields bellow is significant,
watcher: Option<RecommendedWatcher>,
watcher_receiver: Receiver<NotifyEvent>,
}
#[derive(Debug)]
enum Event {
Message(Message),
NotifyEvent(NotifyEvent),
}
impl LoaderActor {
fn new(sender: loader::Sender) -> LoaderActor {
let (watcher_sender, watcher_receiver) = unbounded();
let watcher = log_notify_error(Watcher::new_immediate(move |event| {
watcher_sender.send(event).unwrap()
}));
LoaderActor {
watcher,
watcher_receiver,
watched_paths: FxHashSet::default(),
sender,
config: Vec::new(),
}
}
fn run(mut self, receiver: Receiver<Message>) {
while let Some(event) = self.next_event(&receiver) {
log::debug!("vfs-notify event: {:?}", event);
match event {
Event::Message(msg) => match msg {
Message::Config(config) => {
let n_entries_total = config.load.len();
self.send(loader::Message::Progress { n_entries_total, n_entries_done: 0 });
self.unwatch_all();
self.config.clear();
for (i, entry) in config.load.into_iter().enumerate() {
let watch = config.watch.contains(&i);
let files = self.load_entry(entry, watch);
self.send(loader::Message::Loaded { files });
self.send(loader::Message::Progress {
n_entries_total,
n_entries_done: i + 1,
});
}
self.config.sort_by(|x, y| x.0.cmp(&y.0));
}
Message::Invalidate(path) => {
let contents = read(path.as_path());
let files = vec![(path, contents)];
self.send(loader::Message::Loaded { files });
}
},
Event::NotifyEvent(event) => {
if let Some(event) = log_notify_error(event) {
let files = event
.paths
.into_iter()
.map(|path| AbsPathBuf::try_from(path).unwrap())
.filter_map(|path| {
let is_dir = path.is_dir();
let is_file = path.is_file();
let config_idx =
match self.config.binary_search_by(|it| it.0.cmp(&path)) {
Ok(it) => it,
Err(it) => it.saturating_sub(1),
};
let include = self.config.get(config_idx).and_then(|it| {
let rel_path = path.strip_prefix(&it.0)?;
Some((rel_path, &it.1))
});
if let Some((rel_path, include)) = include {
if is_dir && include.exclude_dir(&rel_path)
|| is_file && !include.include_file(&rel_path)
{
return None;
}
}
if is_dir {
self.watch(path);
return None;
}
if !is_file {
return None;
}
let contents = read(&path);
Some((path, contents))
})
.collect();
self.send(loader::Message::Loaded { files })
}
}
}
}
}
fn next_event(&self, receiver: &Receiver<Message>) -> Option<Event> {
select! {
recv(receiver) -> it => it.ok().map(Event::Message),
recv(&self.watcher_receiver) -> it => Some(Event::NotifyEvent(it.unwrap())),
}
}
fn load_entry(
&mut self,
entry: loader::Entry,
watch: bool,
) -> Vec<(AbsPathBuf, Option<Vec<u8>>)> {
match entry {
loader::Entry::Files(files) => files
.into_iter()
.map(|file| {
if watch {
self.watch(file.clone())
}
let contents = read(file.as_path());
(file, contents)
})
.collect::<Vec<_>>(),
loader::Entry::Directory { path, include } => {
let include = Include::new(include);
self.config.push((path.clone(), include.clone(), watch));
let files = WalkDir::new(&path)
.into_iter()
.filter_entry(|entry| {
let abs_path: &AbsPath = entry.path().try_into().unwrap();
match abs_path.strip_prefix(&path) {
Some(rel_path) => {
!(entry.file_type().is_dir() && include.exclude_dir(rel_path))
}
None => false,
}
})
.filter_map(|entry| entry.ok())
.filter_map(|entry| {
let is_dir = entry.file_type().is_dir();
let is_file = entry.file_type().is_file();
let abs_path = AbsPathBuf::try_from(entry.into_path()).unwrap();
if is_dir {
self.watch(abs_path.clone());
}
let rel_path = abs_path.strip_prefix(&path)?;
if is_file && include.include_file(&rel_path) {
Some(abs_path)
} else {
None
}
});
files
.map(|file| {
let contents = read(file.as_path());
(file, contents)
})
.collect()
}
}
}
fn watch(&mut self, path: AbsPathBuf) {
if let Some(watcher) = &mut self.watcher {
log_notify_error(watcher.watch(&path, RecursiveMode::NonRecursive));
self.watched_paths.insert(path);
}
}
fn unwatch_all(&mut self) {
if let Some(watcher) = &mut self.watcher {
for path in self.watched_paths.drain() {
log_notify_error(watcher.unwatch(path));
}
}
}
fn send(&mut self, msg: loader::Message) {
(self.sender)(msg)
}
}
fn read(path: &AbsPath) -> Option<Vec<u8>> {
std::fs::read(path).ok()
}
fn log_notify_error<T>(res: notify::Result<T>) -> Option<T> {
res.map_err(|err| log::warn!("notify error: {}", err)).ok()
}

View file

@ -6,9 +6,5 @@ edition = "2018"
[dependencies] [dependencies]
rustc-hash = "1.0" rustc-hash = "1.0"
jod-thread = "0.1.0"
walkdir = "2.3.1"
globset = "0.4.5"
crossbeam-channel = "0.4.0"
paths = { path = "../paths" } paths = { path = "../paths" }

View file

@ -4,7 +4,6 @@
//! the default `FileSet`. //! the default `FileSet`.
use std::{fmt, iter}; use std::{fmt, iter};
use paths::AbsPathBuf;
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use crate::{FileId, Vfs, VfsPath}; use crate::{FileId, Vfs, VfsPath};
@ -41,7 +40,7 @@ impl fmt::Debug for FileSet {
#[derive(Debug)] #[derive(Debug)]
pub struct FileSetConfig { pub struct FileSetConfig {
n_file_sets: usize, n_file_sets: usize,
roots: Vec<(AbsPathBuf, usize)>, roots: Vec<(VfsPath, usize)>,
} }
impl Default for FileSetConfig { impl Default for FileSetConfig {
@ -66,11 +65,7 @@ impl FileSetConfig {
self.n_file_sets self.n_file_sets
} }
fn classify(&self, path: &VfsPath) -> usize { fn classify(&self, path: &VfsPath) -> usize {
let path = match path.as_path() { let idx = match self.roots.binary_search_by(|(p, _)| p.cmp(path)) {
Some(it) => it,
None => return self.len() - 1,
};
let idx = match self.roots.binary_search_by(|(p, _)| p.as_path().cmp(path)) {
Ok(it) => it, Ok(it) => it,
Err(it) => it.saturating_sub(1), Err(it) => it.saturating_sub(1),
}; };
@ -83,7 +78,7 @@ impl FileSetConfig {
} }
pub struct FileSetConfigBuilder { pub struct FileSetConfigBuilder {
roots: Vec<Vec<AbsPathBuf>>, roots: Vec<Vec<VfsPath>>,
} }
impl Default for FileSetConfigBuilder { impl Default for FileSetConfigBuilder {
@ -96,12 +91,12 @@ impl FileSetConfigBuilder {
pub fn len(&self) -> usize { pub fn len(&self) -> usize {
self.roots.len() self.roots.len()
} }
pub fn add_file_set(&mut self, roots: Vec<AbsPathBuf>) { pub fn add_file_set(&mut self, roots: Vec<VfsPath>) {
self.roots.push(roots) self.roots.push(roots)
} }
pub fn build(self) -> FileSetConfig { pub fn build(self) -> FileSetConfig {
let n_file_sets = self.roots.len() + 1; let n_file_sets = self.roots.len() + 1;
let mut roots: Vec<(AbsPathBuf, usize)> = self let mut roots: Vec<(VfsPath, usize)> = self
.roots .roots
.into_iter() .into_iter()
.enumerate() .enumerate()

View file

@ -38,7 +38,6 @@ mod vfs_path;
mod path_interner; mod path_interner;
pub mod file_set; pub mod file_set;
pub mod loader; pub mod loader;
pub mod walkdir_loader;
use std::{fmt, mem}; use std::{fmt, mem};

View file

@ -3,19 +3,20 @@ use std::fmt;
use paths::AbsPathBuf; use paths::AbsPathBuf;
#[derive(Debug)]
pub enum Entry { pub enum Entry {
Files(Vec<AbsPathBuf>), Files(Vec<AbsPathBuf>),
Directory { path: AbsPathBuf, globs: Vec<String> }, Directory { path: AbsPathBuf, include: Vec<String> },
} }
#[derive(Debug)]
pub struct Config { pub struct Config {
pub load: Vec<Entry>, pub load: Vec<Entry>,
pub watch: Vec<usize>, pub watch: Vec<usize>,
} }
pub enum Message { pub enum Message {
DidSwitchConfig { n_entries: usize }, Progress { n_entries_total: usize, n_entries_done: usize },
DidLoadAllEntries,
Loaded { files: Vec<(AbsPathBuf, Option<Vec<u8>>)> }, Loaded { files: Vec<(AbsPathBuf, Option<Vec<u8>>)> },
} }
@ -32,15 +33,15 @@ pub trait Handle: fmt::Debug {
impl Entry { impl Entry {
pub fn rs_files_recursively(base: AbsPathBuf) -> Entry { pub fn rs_files_recursively(base: AbsPathBuf) -> Entry {
Entry::Directory { path: base, globs: globs(&["*.rs"]) } Entry::Directory { path: base, include: globs(&["*.rs", "!/.git/"]) }
} }
pub fn local_cargo_package(base: AbsPathBuf) -> Entry { pub fn local_cargo_package(base: AbsPathBuf) -> Entry {
Entry::Directory { path: base, globs: globs(&["*.rs", "!/target/"]) } Entry::Directory { path: base, include: globs(&["*.rs", "!/target/", "!/.git/"]) }
} }
pub fn cargo_package_dependency(base: AbsPathBuf) -> Entry { pub fn cargo_package_dependency(base: AbsPathBuf) -> Entry {
Entry::Directory { Entry::Directory {
path: base, path: base,
globs: globs(&["*.rs", "!/tests/", "!/examples/", "!/benches/"]), include: globs(&["*.rs", "!/tests/", "!/examples/", "!/benches/", "!/.git/"]),
} }
} }
} }
@ -55,10 +56,11 @@ impl fmt::Debug for Message {
Message::Loaded { files } => { Message::Loaded { files } => {
f.debug_struct("Loaded").field("n_files", &files.len()).finish() f.debug_struct("Loaded").field("n_files", &files.len()).finish()
} }
Message::DidSwitchConfig { n_entries } => { Message::Progress { n_entries_total, n_entries_done } => f
f.debug_struct("DidSwitchConfig").field("n_entries", n_entries).finish() .debug_struct("Progress")
} .field("n_entries_total", n_entries_total)
Message::DidLoadAllEntries => f.debug_struct("DidLoadAllEntries").finish(), .field("n_entries_done", n_entries_done)
.finish(),
} }
} }
} }

View file

@ -9,9 +9,17 @@ use paths::{AbsPath, AbsPathBuf};
pub struct VfsPath(VfsPathRepr); pub struct VfsPath(VfsPathRepr);
impl VfsPath { impl VfsPath {
/// Creates an "in-memory" path from `/`-separates string.
/// This is most useful for testing, to avoid windows/linux differences
pub fn new_virtual_path(path: String) -> VfsPath {
assert!(path.starts_with('/'));
VfsPath(VfsPathRepr::VirtualPath(VirtualPath(path)))
}
pub fn as_path(&self) -> Option<&AbsPath> { pub fn as_path(&self) -> Option<&AbsPath> {
match &self.0 { match &self.0 {
VfsPathRepr::PathBuf(it) => Some(it.as_path()), VfsPathRepr::PathBuf(it) => Some(it.as_path()),
VfsPathRepr::VirtualPath(_) => None,
} }
} }
pub fn join(&self, path: &str) -> VfsPath { pub fn join(&self, path: &str) -> VfsPath {
@ -20,11 +28,24 @@ impl VfsPath {
let res = it.join(path).normalize(); let res = it.join(path).normalize();
VfsPath(VfsPathRepr::PathBuf(res)) VfsPath(VfsPathRepr::PathBuf(res))
} }
VfsPathRepr::VirtualPath(it) => {
let res = it.join(path);
VfsPath(VfsPathRepr::VirtualPath(res))
}
} }
} }
pub fn pop(&mut self) -> bool { pub fn pop(&mut self) -> bool {
match &mut self.0 { match &mut self.0 {
VfsPathRepr::PathBuf(it) => it.pop(), VfsPathRepr::PathBuf(it) => it.pop(),
VfsPathRepr::VirtualPath(it) => it.pop(),
}
}
pub fn starts_with(&self, other: &VfsPath) -> bool {
match (&self.0, &other.0) {
(VfsPathRepr::PathBuf(lhs), VfsPathRepr::PathBuf(rhs)) => lhs.starts_with(rhs),
(VfsPathRepr::PathBuf(_), _) => false,
(VfsPathRepr::VirtualPath(lhs), VfsPathRepr::VirtualPath(rhs)) => lhs.starts_with(rhs),
(VfsPathRepr::VirtualPath(_), _) => false,
} }
} }
} }
@ -32,11 +53,12 @@ impl VfsPath {
#[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)] #[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)]
enum VfsPathRepr { enum VfsPathRepr {
PathBuf(AbsPathBuf), PathBuf(AbsPathBuf),
VirtualPath(VirtualPath),
} }
impl From<AbsPathBuf> for VfsPath { impl From<AbsPathBuf> for VfsPath {
fn from(v: AbsPathBuf) -> Self { fn from(v: AbsPathBuf) -> Self {
VfsPath(VfsPathRepr::PathBuf(v)) VfsPath(VfsPathRepr::PathBuf(v.normalize()))
} }
} }
@ -44,6 +66,33 @@ impl fmt::Display for VfsPath {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.0 { match &self.0 {
VfsPathRepr::PathBuf(it) => fmt::Display::fmt(&it.display(), f), VfsPathRepr::PathBuf(it) => fmt::Display::fmt(&it.display(), f),
VfsPathRepr::VirtualPath(VirtualPath(it)) => fmt::Display::fmt(it, f),
} }
} }
} }
#[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)]
struct VirtualPath(String);
impl VirtualPath {
fn starts_with(&self, other: &VirtualPath) -> bool {
self.0.starts_with(&other.0)
}
fn pop(&mut self) -> bool {
let pos = match self.0.rfind('/') {
Some(pos) => pos,
None => return false,
};
self.0 = self.0[..pos].to_string();
true
}
fn join(&self, mut path: &str) -> VirtualPath {
let mut res = self.clone();
while path.starts_with("../") {
assert!(res.pop());
path = &path["../".len()..]
}
res.0 = format!("{}/{}", res.0, path);
res
}
}

View file

@ -1,108 +0,0 @@
//! A walkdir-based implementation of `loader::Handle`, which doesn't try to
//! watch files.
use std::convert::TryFrom;
use globset::{Glob, GlobSetBuilder};
use paths::{AbsPath, AbsPathBuf};
use walkdir::WalkDir;
use crate::loader;
#[derive(Debug)]
pub struct WalkdirLoaderHandle {
// Relative order of fields below is significant.
sender: crossbeam_channel::Sender<Message>,
_thread: jod_thread::JoinHandle,
}
enum Message {
Config(loader::Config),
Invalidate(AbsPathBuf),
}
impl loader::Handle for WalkdirLoaderHandle {
fn spawn(sender: loader::Sender) -> WalkdirLoaderHandle {
let actor = WalkdirLoaderActor { sender };
let (sender, receiver) = crossbeam_channel::unbounded::<Message>();
let thread = jod_thread::spawn(move || actor.run(receiver));
WalkdirLoaderHandle { sender, _thread: thread }
}
fn set_config(&mut self, config: loader::Config) {
self.sender.send(Message::Config(config)).unwrap()
}
fn invalidate(&mut self, path: AbsPathBuf) {
self.sender.send(Message::Invalidate(path)).unwrap();
}
fn load_sync(&mut self, path: &AbsPathBuf) -> Option<Vec<u8>> {
read(path)
}
}
struct WalkdirLoaderActor {
sender: loader::Sender,
}
impl WalkdirLoaderActor {
fn run(mut self, receiver: crossbeam_channel::Receiver<Message>) {
for msg in receiver {
match msg {
Message::Config(config) => {
self.send(loader::Message::DidSwitchConfig { n_entries: config.load.len() });
for entry in config.load.into_iter() {
let files = self.load_entry(entry);
self.send(loader::Message::Loaded { files });
}
drop(config.watch);
self.send(loader::Message::DidLoadAllEntries);
}
Message::Invalidate(path) => {
let contents = read(path.as_path());
let files = vec![(path, contents)];
self.send(loader::Message::Loaded { files });
}
}
}
}
fn load_entry(&mut self, entry: loader::Entry) -> Vec<(AbsPathBuf, Option<Vec<u8>>)> {
match entry {
loader::Entry::Files(files) => files
.into_iter()
.map(|file| {
let contents = read(file.as_path());
(file, contents)
})
.collect::<Vec<_>>(),
loader::Entry::Directory { path, globs } => {
let globset = {
let mut builder = GlobSetBuilder::new();
for glob in &globs {
builder.add(Glob::new(glob).unwrap());
}
builder.build().unwrap()
};
let files = WalkDir::new(path)
.into_iter()
.filter_map(|it| it.ok())
.filter(|it| it.file_type().is_file())
.map(|it| it.into_path())
.map(|it| AbsPathBuf::try_from(it).unwrap())
.filter(|it| globset.is_match(&it));
files
.map(|file| {
let contents = read(file.as_path());
(file, contents)
})
.collect()
}
}
}
fn send(&mut self, msg: loader::Message) {
(self.sender)(msg)
}
}
fn read(path: &AbsPath) -> Option<Vec<u8>> {
std::fs::read(path).ok()
}