diff --git a/Cargo.lock b/Cargo.lock index f6df772066..f768577248 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1092,6 +1092,7 @@ dependencies = [ "cargo_metadata", "log", "ra_arena", + "ra_cargo_watch", "ra_cfg", "ra_db", "rustc-hash", diff --git a/crates/ra_cargo_watch/src/lib.rs b/crates/ra_cargo_watch/src/lib.rs index 1a6926db3b..71aa28f0a6 100644 --- a/crates/ra_cargo_watch/src/lib.rs +++ b/crates/ra_cargo_watch/src/lib.rs @@ -9,8 +9,8 @@ use lsp_types::{ }; use std::{ io::{BufRead, BufReader}, - path::PathBuf, - process::{Command, Stdio}, + path::{Path, PathBuf}, + process::{Child, Command, Stdio}, thread::JoinHandle, time::Instant, }; @@ -246,18 +246,71 @@ enum CheckEvent { End, } +pub fn run_cargo( + args: &[String], + current_dir: Option<&Path>, + mut on_message: impl FnMut(cargo_metadata::Message) -> bool, +) -> Child { + let mut command = Command::new("cargo"); + if let Some(current_dir) = current_dir { + command.current_dir(current_dir); + } + + let mut child = command + .args(args) + .stdout(Stdio::piped()) + .stderr(Stdio::null()) + .stdin(Stdio::null()) + .spawn() + .expect("couldn't launch cargo"); + + // We manually read a line at a time, instead of using serde's + // stream deserializers, because the deserializer cannot recover + // from an error, resulting in it getting stuck, because we try to + // be resillient against failures. + // + // Because cargo only outputs one JSON object per line, we can + // simply skip a line if it doesn't parse, which just ignores any + // erroneus output. + let stdout = BufReader::new(child.stdout.take().unwrap()); + for line in stdout.lines() { + let line = match line { + Ok(line) => line, + Err(err) => { + log::error!("Couldn't read line from cargo: {}", err); + continue; + } + }; + + let message = serde_json::from_str::(&line); + let message = match message { + Ok(message) => message, + Err(err) => { + log::error!("Invalid json from cargo check, ignoring ({}): {:?} ", err, line); + continue; + } + }; + + if !on_message(message) { + break; + } + } + + child +} + impl WatchThread { fn dummy() -> WatchThread { WatchThread { handle: None, message_recv: never() } } - fn new(options: &CheckOptions, workspace_root: &PathBuf) -> WatchThread { + fn new(options: &CheckOptions, workspace_root: &Path) -> WatchThread { let mut args: Vec = vec![ options.command.clone(), "--workspace".to_string(), "--message-format=json".to_string(), "--manifest-path".to_string(), - format!("{}/Cargo.toml", workspace_root.to_string_lossy()), + format!("{}/Cargo.toml", workspace_root.display()), ]; if options.all_targets { args.push("--all-targets".to_string()); @@ -265,83 +318,47 @@ impl WatchThread { args.extend(options.args.iter().cloned()); let (message_send, message_recv) = unbounded(); - let enabled = options.enable; - let handle = std::thread::spawn(move || { - if !enabled { - return; - } + let workspace_root = workspace_root.to_owned(); + let handle = if options.enable { + Some(std::thread::spawn(move || { + // If we trigger an error here, we will do so in the loop instead, + // which will break out of the loop, and continue the shutdown + let _ = message_send.send(CheckEvent::Begin); - let mut command = Command::new("cargo") - .args(&args) - .stdout(Stdio::piped()) - .stderr(Stdio::null()) - .stdin(Stdio::null()) - .spawn() - .expect("couldn't launch cargo"); - - // If we trigger an error here, we will do so in the loop instead, - // which will break out of the loop, and continue the shutdown - let _ = message_send.send(CheckEvent::Begin); - - // We manually read a line at a time, instead of using serde's - // stream deserializers, because the deserializer cannot recover - // from an error, resulting in it getting stuck, because we try to - // be resillient against failures. - // - // Because cargo only outputs one JSON object per line, we can - // simply skip a line if it doesn't parse, which just ignores any - // erroneus output. - let stdout = BufReader::new(command.stdout.take().unwrap()); - for line in stdout.lines() { - let line = match line { - Ok(line) => line, - Err(err) => { - log::error!("Couldn't read line from cargo: {}", err); - continue; + let mut child = run_cargo(&args, Some(&workspace_root), |message| { + // Skip certain kinds of messages to only spend time on what's useful + match &message { + Message::CompilerArtifact(artifact) if artifact.fresh => return true, + Message::BuildScriptExecuted(_) => return true, + Message::Unknown => return true, + _ => {} } - }; - let message = serde_json::from_str::(&line); - let message = match message { - Ok(message) => message, - Err(err) => { - log::error!( - "Invalid json from cargo check, ignoring ({}): {:?} ", - err, - line - ); - continue; - } - }; + match message_send.send(CheckEvent::Msg(message)) { + Ok(()) => {} + Err(_err) => { + // The send channel was closed, so we want to shutdown + return false; + } + }; - // Skip certain kinds of messages to only spend time on what's useful - match &message { - Message::CompilerArtifact(artifact) if artifact.fresh => continue, - Message::BuildScriptExecuted(_) => continue, - Message::Unknown => continue, - _ => {} - } + true + }); - match message_send.send(CheckEvent::Msg(message)) { - Ok(()) => {} - Err(_err) => { - // The send channel was closed, so we want to shutdown - break; - } - } - } + // We can ignore any error here, as we are already in the progress + // of shutting down. + let _ = message_send.send(CheckEvent::End); - // We can ignore any error here, as we are already in the progress - // of shutting down. - let _ = message_send.send(CheckEvent::End); + // It is okay to ignore the result, as it only errors if the process is already dead + let _ = child.kill(); - // It is okay to ignore the result, as it only errors if the process is already dead - let _ = command.kill(); - - // Again, we don't care about the exit status so just ignore the result - let _ = command.wait(); - }); - WatchThread { handle: Some(handle), message_recv } + // Again, we don't care about the exit status so just ignore the result + let _ = child.wait(); + })) + } else { + None + }; + WatchThread { handle, message_recv } } } diff --git a/crates/ra_db/src/input.rs b/crates/ra_db/src/input.rs index bde843001a..e371f849df 100644 --- a/crates/ra_db/src/input.rs +++ b/crates/ra_db/src/input.rs @@ -6,7 +6,11 @@ //! actual IO. See `vfs` and `project_model` in the `rust-analyzer` crate for how //! actual IO is done and lowered to input. -use std::{fmt, ops, str::FromStr}; +use std::{ + fmt, ops, + path::{Path, PathBuf}, + str::FromStr, +}; use ra_cfg::CfgOptions; use ra_syntax::SmolStr; @@ -144,7 +148,7 @@ pub struct Env { // crate. We store a map to allow remap it to ExternSourceId #[derive(Default, Debug, Clone, PartialEq, Eq)] pub struct ExternSource { - extern_paths: FxHashMap, + extern_paths: FxHashMap, } #[derive(Debug, Clone, PartialEq, Eq)] @@ -294,13 +298,10 @@ impl Env { } impl ExternSource { - pub fn extern_path(&self, path: &str) -> Option<(ExternSourceId, RelativePathBuf)> { + pub fn extern_path(&self, path: impl AsRef) -> Option<(ExternSourceId, RelativePathBuf)> { + let path = path.as_ref(); self.extern_paths.iter().find_map(|(root_path, id)| { - if path.starts_with(root_path) { - let mut rel_path = &path[root_path.len()..]; - if rel_path.starts_with("/") { - rel_path = &rel_path[1..]; - } + if let Ok(rel_path) = path.strip_prefix(root_path) { let rel_path = RelativePathBuf::from_path(rel_path).ok()?; Some((id.clone(), rel_path)) } else { @@ -309,8 +310,8 @@ impl ExternSource { }) } - pub fn set_extern_path(&mut self, root_path: &str, root: ExternSourceId) { - self.extern_paths.insert(root_path.to_owned(), root); + pub fn set_extern_path(&mut self, root_path: &Path, root: ExternSourceId) { + self.extern_paths.insert(root_path.to_path_buf(), root); } } diff --git a/crates/ra_project_model/Cargo.toml b/crates/ra_project_model/Cargo.toml index 6252241bf3..22300548a7 100644 --- a/crates/ra_project_model/Cargo.toml +++ b/crates/ra_project_model/Cargo.toml @@ -16,6 +16,7 @@ cargo_metadata = "0.9.1" ra_arena = { path = "../ra_arena" } ra_db = { path = "../ra_db" } ra_cfg = { path = "../ra_cfg" } +ra_cargo_watch = { path = "../ra_cargo_watch" } serde = { version = "1.0.104", features = ["derive"] } serde_json = "1.0.48" diff --git a/crates/ra_project_model/src/cargo_workspace.rs b/crates/ra_project_model/src/cargo_workspace.rs index 4fea459d5b..eeeb102339 100644 --- a/crates/ra_project_model/src/cargo_workspace.rs +++ b/crates/ra_project_model/src/cargo_workspace.rs @@ -3,8 +3,9 @@ use std::path::{Path, PathBuf}; use anyhow::{Context, Result}; -use cargo_metadata::{CargoOpt, MetadataCommand}; +use cargo_metadata::{CargoOpt, Message, MetadataCommand, PackageId}; use ra_arena::{impl_arena_id, Arena, RawId}; +use ra_cargo_watch::run_cargo; use ra_db::Edition; use rustc_hash::FxHashMap; use serde::Deserialize; @@ -35,11 +36,19 @@ pub struct CargoFeatures { /// List of features to activate. /// This will be ignored if `cargo_all_features` is true. pub features: Vec, + + /// Runs cargo check on launch to figure out the correct values of OUT_DIR + pub load_out_dirs_from_check: bool, } impl Default for CargoFeatures { fn default() -> Self { - CargoFeatures { no_default_features: false, all_features: true, features: Vec::new() } + CargoFeatures { + no_default_features: false, + all_features: true, + features: Vec::new(), + load_out_dirs_from_check: false, + } } } @@ -60,6 +69,7 @@ struct PackageData { dependencies: Vec, edition: Edition, features: Vec, + out_dir: Option, } #[derive(Debug, Clone)] @@ -131,6 +141,9 @@ impl Package { ) -> impl Iterator + 'a { ws.packages[self].dependencies.iter() } + pub fn out_dir(self, ws: &CargoWorkspace) -> Option<&Path> { + ws.packages[self].out_dir.as_ref().map(|od| od.as_path()) + } } impl Target { @@ -173,6 +186,12 @@ impl CargoWorkspace { let meta = meta.exec().with_context(|| { format!("Failed to run `cargo metadata --manifest-path {}`", cargo_toml.display()) })?; + + let mut out_dir_by_id = FxHashMap::default(); + if cargo_features.load_out_dirs_from_check { + out_dir_by_id = load_out_dirs(cargo_toml, cargo_features); + } + let mut pkg_by_id = FxHashMap::default(); let mut packages = Arena::default(); let mut targets = Arena::default(); @@ -193,6 +212,7 @@ impl CargoWorkspace { edition, dependencies: Vec::new(), features: Vec::new(), + out_dir: out_dir_by_id.get(&id).cloned(), }); let pkg_data = &mut packages[pkg]; pkg_by_id.insert(id, pkg); @@ -252,3 +272,46 @@ impl CargoWorkspace { &self.workspace_root } } + +pub fn load_out_dirs( + cargo_toml: &Path, + cargo_features: &CargoFeatures, +) -> FxHashMap { + let mut args: Vec = vec![ + "check".to_string(), + "--message-format=json".to_string(), + "--manifest-path".to_string(), + format!("{}", cargo_toml.display()), + ]; + + if cargo_features.all_features { + args.push("--all-features".to_string()); + } else if cargo_features.no_default_features { + // FIXME: `NoDefaultFeatures` is mutual exclusive with `SomeFeatures` + // https://github.com/oli-obk/cargo_metadata/issues/79 + args.push("--no-default-features".to_string()); + } else if !cargo_features.features.is_empty() { + for feature in &cargo_features.features { + args.push(feature.clone()); + } + } + + let mut res = FxHashMap::default(); + let mut child = run_cargo(&args, cargo_toml.parent(), |message| { + match message { + Message::BuildScriptExecuted(message) => { + let package_id = message.package_id; + let out_dir = message.out_dir; + res.insert(package_id, out_dir); + } + + Message::CompilerArtifact(_) => (), + Message::CompilerMessage(_) => (), + Message::Unknown => (), + } + true + }); + + let _ = child.wait(); + res +} diff --git a/crates/ra_project_model/src/lib.rs b/crates/ra_project_model/src/lib.rs index 8978748134..43f8342530 100644 --- a/crates/ra_project_model/src/lib.rs +++ b/crates/ra_project_model/src/lib.rs @@ -150,6 +150,21 @@ impl ProjectWorkspace { } } + pub fn out_dirs(&self) -> Vec { + match self { + ProjectWorkspace::Json { project: _project } => vec![], + ProjectWorkspace::Cargo { cargo, sysroot: _sysroot } => { + let mut out_dirs = Vec::with_capacity(cargo.packages().len()); + for pkg in cargo.packages() { + if let Some(out_dir) = pkg.out_dir(&cargo) { + out_dirs.push(out_dir.to_path_buf()); + } + } + out_dirs + } + } + } + pub fn n_packages(&self) -> usize { match self { ProjectWorkspace::Json { project } => project.crates.len(), @@ -162,7 +177,8 @@ impl ProjectWorkspace { pub fn to_crate_graph( &self, default_cfg_options: &CfgOptions, - outdirs: &FxHashMap, + additional_out_dirs: &FxHashMap, + extern_source_roots: &FxHashMap, load: &mut dyn FnMut(&Path) -> Option, ) -> CrateGraph { let mut crate_graph = CrateGraph::default(); @@ -237,9 +253,11 @@ impl ProjectWorkspace { let mut env = Env::default(); let mut extern_source = ExternSource::default(); - if let Some((id, path)) = outdirs.get(krate.name(&sysroot)) { - env.set("OUT_DIR", path.clone()); - extern_source.set_extern_path(&path, *id); + if let Some(path) = additional_out_dirs.get(krate.name(&sysroot)) { + env.set("OUT_DIR", path.to_string_lossy().to_string()); + if let Some(extern_source_id) = extern_source_roots.get(path) { + extern_source.set_extern_path(&path, *extern_source_id); + } } let crate_id = crate_graph.add_crate_root( @@ -292,9 +310,20 @@ impl ProjectWorkspace { }; let mut env = Env::default(); let mut extern_source = ExternSource::default(); - if let Some((id, path)) = outdirs.get(pkg.name(&cargo)) { - env.set("OUT_DIR", path.clone()); - extern_source.set_extern_path(&path, *id); + if let Some(out_dir) = dbg!(pkg.out_dir(cargo)) { + env.set("OUT_DIR", out_dir.to_string_lossy().to_string()); + if let Some(extern_source_id) = + dbg!(dbg!(&extern_source_roots).get(out_dir)) + { + extern_source.set_extern_path(&out_dir, *extern_source_id); + } + } else { + if let Some(path) = additional_out_dirs.get(pkg.name(&cargo)) { + env.set("OUT_DIR", path.to_string_lossy().to_string()); + if let Some(extern_source_id) = extern_source_roots.get(path) { + extern_source.set_extern_path(&path, *extern_source_id); + } + } } let crate_id = crate_graph.add_crate_root( file_id, diff --git a/crates/rust-analyzer/src/cli/load_cargo.rs b/crates/rust-analyzer/src/cli/load_cargo.rs index 2ce69c9b30..7d75b991d3 100644 --- a/crates/rust-analyzer/src/cli/load_cargo.rs +++ b/crates/rust-analyzer/src/cli/load_cargo.rs @@ -54,12 +54,18 @@ pub(crate) fn load_cargo( // FIXME: outdirs? let outdirs = FxHashMap::default(); + let extern_source_roots = FxHashMap::default(); - let crate_graph = ws.to_crate_graph(&default_cfg_options, &outdirs, &mut |path: &Path| { - let vfs_file = vfs.load(path); - log::debug!("vfs file {:?} -> {:?}", path, vfs_file); - vfs_file.map(vfs_file_to_id) - }); + let crate_graph = ws.to_crate_graph( + &default_cfg_options, + &outdirs, + &extern_source_roots, + &mut |path: &Path| { + let vfs_file = vfs.load(path); + log::debug!("vfs file {:?} -> {:?}", path, vfs_file); + vfs_file.map(vfs_file_to_id) + }, + ); log::debug!("crate graph: {:?}", crate_graph); let source_roots = roots diff --git a/crates/rust-analyzer/src/world.rs b/crates/rust-analyzer/src/world.rs index 5743471bfd..63e9130471 100644 --- a/crates/rust-analyzer/src/world.rs +++ b/crates/rust-analyzer/src/world.rs @@ -105,11 +105,15 @@ impl WorldState { })); } - let extern_dirs: FxHashSet<_> = + let mut extern_dirs: FxHashSet<_> = additional_out_dirs.iter().map(|(_, path)| (PathBuf::from(path))).collect(); + for ws in workspaces.iter() { + extern_dirs.extend(ws.out_dirs()); + } + let mut extern_source_roots = FxHashMap::default(); - roots.extend(additional_out_dirs.iter().map(|(_, path)| { + roots.extend(extern_dirs.iter().map(|path| { let mut filter = RustPackageFilterBuilder::default().set_member(false); for glob in exclude_globs.iter() { filter = filter.exclude(glob.clone()); @@ -148,17 +152,21 @@ impl WorldState { vfs_file.map(|f| FileId(f.0)) }; - let mut outdirs = FxHashMap::default(); - for (name, path) in additional_out_dirs { - let path = PathBuf::from(&path); - if let Some(id) = extern_source_roots.get(&path) { - outdirs.insert(name, (id.clone(), path.to_string_lossy().replace("\\", "/"))); - } - } + let additional_out_dirs: FxHashMap = additional_out_dirs + .into_iter() + .map(|(name, path)| (name, PathBuf::from(&path))) + .collect(); workspaces .iter() - .map(|ws| ws.to_crate_graph(&default_cfg_options, &outdirs, &mut load)) + .map(|ws| { + ws.to_crate_graph( + &default_cfg_options, + &additional_out_dirs, + &extern_source_roots, + &mut load, + ) + }) .for_each(|graph| { crate_graph.extend(graph); }); diff --git a/editors/code/package.json b/editors/code/package.json index 48f28b28a8..188a2f9ca6 100644 --- a/editors/code/package.json +++ b/editors/code/package.json @@ -362,6 +362,11 @@ }, "default": [], "description": "List of features to activate" + }, + "rust-analyzer.cargoFeatures.loadOutDirsFromCheck": { + "type": "boolean", + "default": false, + "markdownDescription": "Run `cargo check` on startup to get the correct value for package OUT_DIRs" } } }, diff --git a/editors/code/src/config.ts b/editors/code/src/config.ts index bd8096dd6e..84ec81ecdc 100644 --- a/editors/code/src/config.ts +++ b/editors/code/src/config.ts @@ -22,6 +22,7 @@ export interface CargoFeatures { noDefaultFeatures: boolean; allFeatures: boolean; features: string[]; + loadOutDirsFromCheck: boolean; } export const enum UpdatesChannel { @@ -204,6 +205,7 @@ export class Config { get featureFlags() { return this.cfg.get("featureFlags") as Record; } get additionalOutDirs() { return this.cfg.get("additionalOutDirs") as Record; } get rustfmtArgs() { return this.cfg.get("rustfmtArgs") as string[]; } + get loadOutDirsFromCheck() { return this.cfg.get("loadOutDirsFromCheck") as boolean; } get cargoWatchOptions(): CargoWatchOptions { return { @@ -219,6 +221,7 @@ export class Config { noDefaultFeatures: this.cfg.get("cargoFeatures.noDefaultFeatures") as boolean, allFeatures: this.cfg.get("cargoFeatures.allFeatures") as boolean, features: this.cfg.get("cargoFeatures.features") as string[], + loadOutDirsFromCheck: this.cfg.get("cargoFeatures.loadOutDirsFromCheck") as boolean, }; }