Support loading OUT_DIR from cargo check at launch

This commit is contained in:
Emil Lauridsen 2020-03-16 13:43:29 +01:00
parent 2720e2374b
commit 33c6c7abc6
10 changed files with 242 additions and 108 deletions

1
Cargo.lock generated
View file

@ -1092,6 +1092,7 @@ dependencies = [
"cargo_metadata", "cargo_metadata",
"log", "log",
"ra_arena", "ra_arena",
"ra_cargo_watch",
"ra_cfg", "ra_cfg",
"ra_db", "ra_db",
"rustc-hash", "rustc-hash",

View file

@ -9,8 +9,8 @@ use lsp_types::{
}; };
use std::{ use std::{
io::{BufRead, BufReader}, io::{BufRead, BufReader},
path::PathBuf, path::{Path, PathBuf},
process::{Command, Stdio}, process::{Child, Command, Stdio},
thread::JoinHandle, thread::JoinHandle,
time::Instant, time::Instant,
}; };
@ -246,18 +246,71 @@ enum CheckEvent {
End, End,
} }
pub fn run_cargo(
args: &[String],
current_dir: Option<&Path>,
mut on_message: impl FnMut(cargo_metadata::Message) -> bool,
) -> Child {
let mut command = Command::new("cargo");
if let Some(current_dir) = current_dir {
command.current_dir(current_dir);
}
let mut child = command
.args(args)
.stdout(Stdio::piped())
.stderr(Stdio::null())
.stdin(Stdio::null())
.spawn()
.expect("couldn't launch cargo");
// We manually read a line at a time, instead of using serde's
// stream deserializers, because the deserializer cannot recover
// from an error, resulting in it getting stuck, because we try to
// be resillient against failures.
//
// Because cargo only outputs one JSON object per line, we can
// simply skip a line if it doesn't parse, which just ignores any
// erroneus output.
let stdout = BufReader::new(child.stdout.take().unwrap());
for line in stdout.lines() {
let line = match line {
Ok(line) => line,
Err(err) => {
log::error!("Couldn't read line from cargo: {}", err);
continue;
}
};
let message = serde_json::from_str::<cargo_metadata::Message>(&line);
let message = match message {
Ok(message) => message,
Err(err) => {
log::error!("Invalid json from cargo check, ignoring ({}): {:?} ", err, line);
continue;
}
};
if !on_message(message) {
break;
}
}
child
}
impl WatchThread { impl WatchThread {
fn dummy() -> WatchThread { fn dummy() -> WatchThread {
WatchThread { handle: None, message_recv: never() } WatchThread { handle: None, message_recv: never() }
} }
fn new(options: &CheckOptions, workspace_root: &PathBuf) -> WatchThread { fn new(options: &CheckOptions, workspace_root: &Path) -> WatchThread {
let mut args: Vec<String> = vec![ let mut args: Vec<String> = vec![
options.command.clone(), options.command.clone(),
"--workspace".to_string(), "--workspace".to_string(),
"--message-format=json".to_string(), "--message-format=json".to_string(),
"--manifest-path".to_string(), "--manifest-path".to_string(),
format!("{}/Cargo.toml", workspace_root.to_string_lossy()), format!("{}/Cargo.toml", workspace_root.display()),
]; ];
if options.all_targets { if options.all_targets {
args.push("--all-targets".to_string()); args.push("--all-targets".to_string());
@ -265,83 +318,47 @@ impl WatchThread {
args.extend(options.args.iter().cloned()); args.extend(options.args.iter().cloned());
let (message_send, message_recv) = unbounded(); let (message_send, message_recv) = unbounded();
let enabled = options.enable; let workspace_root = workspace_root.to_owned();
let handle = std::thread::spawn(move || { let handle = if options.enable {
if !enabled { Some(std::thread::spawn(move || {
return; // If we trigger an error here, we will do so in the loop instead,
} // which will break out of the loop, and continue the shutdown
let _ = message_send.send(CheckEvent::Begin);
let mut command = Command::new("cargo") let mut child = run_cargo(&args, Some(&workspace_root), |message| {
.args(&args) // Skip certain kinds of messages to only spend time on what's useful
.stdout(Stdio::piped()) match &message {
.stderr(Stdio::null()) Message::CompilerArtifact(artifact) if artifact.fresh => return true,
.stdin(Stdio::null()) Message::BuildScriptExecuted(_) => return true,
.spawn() Message::Unknown => return true,
.expect("couldn't launch cargo"); _ => {}
// If we trigger an error here, we will do so in the loop instead,
// which will break out of the loop, and continue the shutdown
let _ = message_send.send(CheckEvent::Begin);
// We manually read a line at a time, instead of using serde's
// stream deserializers, because the deserializer cannot recover
// from an error, resulting in it getting stuck, because we try to
// be resillient against failures.
//
// Because cargo only outputs one JSON object per line, we can
// simply skip a line if it doesn't parse, which just ignores any
// erroneus output.
let stdout = BufReader::new(command.stdout.take().unwrap());
for line in stdout.lines() {
let line = match line {
Ok(line) => line,
Err(err) => {
log::error!("Couldn't read line from cargo: {}", err);
continue;
} }
};
let message = serde_json::from_str::<cargo_metadata::Message>(&line); match message_send.send(CheckEvent::Msg(message)) {
let message = match message { Ok(()) => {}
Ok(message) => message, Err(_err) => {
Err(err) => { // The send channel was closed, so we want to shutdown
log::error!( return false;
"Invalid json from cargo check, ignoring ({}): {:?} ", }
err, };
line
);
continue;
}
};
// Skip certain kinds of messages to only spend time on what's useful true
match &message { });
Message::CompilerArtifact(artifact) if artifact.fresh => continue,
Message::BuildScriptExecuted(_) => continue,
Message::Unknown => continue,
_ => {}
}
match message_send.send(CheckEvent::Msg(message)) { // We can ignore any error here, as we are already in the progress
Ok(()) => {} // of shutting down.
Err(_err) => { let _ = message_send.send(CheckEvent::End);
// The send channel was closed, so we want to shutdown
break;
}
}
}
// We can ignore any error here, as we are already in the progress // It is okay to ignore the result, as it only errors if the process is already dead
// of shutting down. let _ = child.kill();
let _ = message_send.send(CheckEvent::End);
// It is okay to ignore the result, as it only errors if the process is already dead // Again, we don't care about the exit status so just ignore the result
let _ = command.kill(); let _ = child.wait();
}))
// Again, we don't care about the exit status so just ignore the result } else {
let _ = command.wait(); None
}); };
WatchThread { handle: Some(handle), message_recv } WatchThread { handle, message_recv }
} }
} }

View file

@ -6,7 +6,11 @@
//! actual IO. See `vfs` and `project_model` in the `rust-analyzer` crate for how //! actual IO. See `vfs` and `project_model` in the `rust-analyzer` crate for how
//! actual IO is done and lowered to input. //! actual IO is done and lowered to input.
use std::{fmt, ops, str::FromStr}; use std::{
fmt, ops,
path::{Path, PathBuf},
str::FromStr,
};
use ra_cfg::CfgOptions; use ra_cfg::CfgOptions;
use ra_syntax::SmolStr; use ra_syntax::SmolStr;
@ -144,7 +148,7 @@ pub struct Env {
// crate. We store a map to allow remap it to ExternSourceId // crate. We store a map to allow remap it to ExternSourceId
#[derive(Default, Debug, Clone, PartialEq, Eq)] #[derive(Default, Debug, Clone, PartialEq, Eq)]
pub struct ExternSource { pub struct ExternSource {
extern_paths: FxHashMap<String, ExternSourceId>, extern_paths: FxHashMap<PathBuf, ExternSourceId>,
} }
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
@ -294,13 +298,10 @@ impl Env {
} }
impl ExternSource { impl ExternSource {
pub fn extern_path(&self, path: &str) -> Option<(ExternSourceId, RelativePathBuf)> { pub fn extern_path(&self, path: impl AsRef<Path>) -> Option<(ExternSourceId, RelativePathBuf)> {
let path = path.as_ref();
self.extern_paths.iter().find_map(|(root_path, id)| { self.extern_paths.iter().find_map(|(root_path, id)| {
if path.starts_with(root_path) { if let Ok(rel_path) = path.strip_prefix(root_path) {
let mut rel_path = &path[root_path.len()..];
if rel_path.starts_with("/") {
rel_path = &rel_path[1..];
}
let rel_path = RelativePathBuf::from_path(rel_path).ok()?; let rel_path = RelativePathBuf::from_path(rel_path).ok()?;
Some((id.clone(), rel_path)) Some((id.clone(), rel_path))
} else { } else {
@ -309,8 +310,8 @@ impl ExternSource {
}) })
} }
pub fn set_extern_path(&mut self, root_path: &str, root: ExternSourceId) { pub fn set_extern_path(&mut self, root_path: &Path, root: ExternSourceId) {
self.extern_paths.insert(root_path.to_owned(), root); self.extern_paths.insert(root_path.to_path_buf(), root);
} }
} }

View file

@ -16,6 +16,7 @@ cargo_metadata = "0.9.1"
ra_arena = { path = "../ra_arena" } ra_arena = { path = "../ra_arena" }
ra_db = { path = "../ra_db" } ra_db = { path = "../ra_db" }
ra_cfg = { path = "../ra_cfg" } ra_cfg = { path = "../ra_cfg" }
ra_cargo_watch = { path = "../ra_cargo_watch" }
serde = { version = "1.0.104", features = ["derive"] } serde = { version = "1.0.104", features = ["derive"] }
serde_json = "1.0.48" serde_json = "1.0.48"

View file

@ -3,8 +3,9 @@
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use anyhow::{Context, Result}; use anyhow::{Context, Result};
use cargo_metadata::{CargoOpt, MetadataCommand}; use cargo_metadata::{CargoOpt, Message, MetadataCommand, PackageId};
use ra_arena::{impl_arena_id, Arena, RawId}; use ra_arena::{impl_arena_id, Arena, RawId};
use ra_cargo_watch::run_cargo;
use ra_db::Edition; use ra_db::Edition;
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use serde::Deserialize; use serde::Deserialize;
@ -35,11 +36,19 @@ pub struct CargoFeatures {
/// List of features to activate. /// List of features to activate.
/// This will be ignored if `cargo_all_features` is true. /// This will be ignored if `cargo_all_features` is true.
pub features: Vec<String>, pub features: Vec<String>,
/// Runs cargo check on launch to figure out the correct values of OUT_DIR
pub load_out_dirs_from_check: bool,
} }
impl Default for CargoFeatures { impl Default for CargoFeatures {
fn default() -> Self { fn default() -> Self {
CargoFeatures { no_default_features: false, all_features: true, features: Vec::new() } CargoFeatures {
no_default_features: false,
all_features: true,
features: Vec::new(),
load_out_dirs_from_check: false,
}
} }
} }
@ -60,6 +69,7 @@ struct PackageData {
dependencies: Vec<PackageDependency>, dependencies: Vec<PackageDependency>,
edition: Edition, edition: Edition,
features: Vec<String>, features: Vec<String>,
out_dir: Option<PathBuf>,
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
@ -131,6 +141,9 @@ impl Package {
) -> impl Iterator<Item = &'a PackageDependency> + 'a { ) -> impl Iterator<Item = &'a PackageDependency> + 'a {
ws.packages[self].dependencies.iter() ws.packages[self].dependencies.iter()
} }
pub fn out_dir(self, ws: &CargoWorkspace) -> Option<&Path> {
ws.packages[self].out_dir.as_ref().map(|od| od.as_path())
}
} }
impl Target { impl Target {
@ -173,6 +186,12 @@ impl CargoWorkspace {
let meta = meta.exec().with_context(|| { let meta = meta.exec().with_context(|| {
format!("Failed to run `cargo metadata --manifest-path {}`", cargo_toml.display()) format!("Failed to run `cargo metadata --manifest-path {}`", cargo_toml.display())
})?; })?;
let mut out_dir_by_id = FxHashMap::default();
if cargo_features.load_out_dirs_from_check {
out_dir_by_id = load_out_dirs(cargo_toml, cargo_features);
}
let mut pkg_by_id = FxHashMap::default(); let mut pkg_by_id = FxHashMap::default();
let mut packages = Arena::default(); let mut packages = Arena::default();
let mut targets = Arena::default(); let mut targets = Arena::default();
@ -193,6 +212,7 @@ impl CargoWorkspace {
edition, edition,
dependencies: Vec::new(), dependencies: Vec::new(),
features: Vec::new(), features: Vec::new(),
out_dir: out_dir_by_id.get(&id).cloned(),
}); });
let pkg_data = &mut packages[pkg]; let pkg_data = &mut packages[pkg];
pkg_by_id.insert(id, pkg); pkg_by_id.insert(id, pkg);
@ -252,3 +272,46 @@ impl CargoWorkspace {
&self.workspace_root &self.workspace_root
} }
} }
pub fn load_out_dirs(
cargo_toml: &Path,
cargo_features: &CargoFeatures,
) -> FxHashMap<PackageId, PathBuf> {
let mut args: Vec<String> = vec![
"check".to_string(),
"--message-format=json".to_string(),
"--manifest-path".to_string(),
format!("{}", cargo_toml.display()),
];
if cargo_features.all_features {
args.push("--all-features".to_string());
} else if cargo_features.no_default_features {
// FIXME: `NoDefaultFeatures` is mutual exclusive with `SomeFeatures`
// https://github.com/oli-obk/cargo_metadata/issues/79
args.push("--no-default-features".to_string());
} else if !cargo_features.features.is_empty() {
for feature in &cargo_features.features {
args.push(feature.clone());
}
}
let mut res = FxHashMap::default();
let mut child = run_cargo(&args, cargo_toml.parent(), |message| {
match message {
Message::BuildScriptExecuted(message) => {
let package_id = message.package_id;
let out_dir = message.out_dir;
res.insert(package_id, out_dir);
}
Message::CompilerArtifact(_) => (),
Message::CompilerMessage(_) => (),
Message::Unknown => (),
}
true
});
let _ = child.wait();
res
}

View file

@ -150,6 +150,21 @@ impl ProjectWorkspace {
} }
} }
pub fn out_dirs(&self) -> Vec<PathBuf> {
match self {
ProjectWorkspace::Json { project: _project } => vec![],
ProjectWorkspace::Cargo { cargo, sysroot: _sysroot } => {
let mut out_dirs = Vec::with_capacity(cargo.packages().len());
for pkg in cargo.packages() {
if let Some(out_dir) = pkg.out_dir(&cargo) {
out_dirs.push(out_dir.to_path_buf());
}
}
out_dirs
}
}
}
pub fn n_packages(&self) -> usize { pub fn n_packages(&self) -> usize {
match self { match self {
ProjectWorkspace::Json { project } => project.crates.len(), ProjectWorkspace::Json { project } => project.crates.len(),
@ -162,7 +177,8 @@ impl ProjectWorkspace {
pub fn to_crate_graph( pub fn to_crate_graph(
&self, &self,
default_cfg_options: &CfgOptions, default_cfg_options: &CfgOptions,
outdirs: &FxHashMap<String, (ExternSourceId, String)>, additional_out_dirs: &FxHashMap<String, PathBuf>,
extern_source_roots: &FxHashMap<PathBuf, ExternSourceId>,
load: &mut dyn FnMut(&Path) -> Option<FileId>, load: &mut dyn FnMut(&Path) -> Option<FileId>,
) -> CrateGraph { ) -> CrateGraph {
let mut crate_graph = CrateGraph::default(); let mut crate_graph = CrateGraph::default();
@ -237,9 +253,11 @@ impl ProjectWorkspace {
let mut env = Env::default(); let mut env = Env::default();
let mut extern_source = ExternSource::default(); let mut extern_source = ExternSource::default();
if let Some((id, path)) = outdirs.get(krate.name(&sysroot)) { if let Some(path) = additional_out_dirs.get(krate.name(&sysroot)) {
env.set("OUT_DIR", path.clone()); env.set("OUT_DIR", path.to_string_lossy().to_string());
extern_source.set_extern_path(&path, *id); if let Some(extern_source_id) = extern_source_roots.get(path) {
extern_source.set_extern_path(&path, *extern_source_id);
}
} }
let crate_id = crate_graph.add_crate_root( let crate_id = crate_graph.add_crate_root(
@ -292,9 +310,20 @@ impl ProjectWorkspace {
}; };
let mut env = Env::default(); let mut env = Env::default();
let mut extern_source = ExternSource::default(); let mut extern_source = ExternSource::default();
if let Some((id, path)) = outdirs.get(pkg.name(&cargo)) { if let Some(out_dir) = dbg!(pkg.out_dir(cargo)) {
env.set("OUT_DIR", path.clone()); env.set("OUT_DIR", out_dir.to_string_lossy().to_string());
extern_source.set_extern_path(&path, *id); if let Some(extern_source_id) =
dbg!(dbg!(&extern_source_roots).get(out_dir))
{
extern_source.set_extern_path(&out_dir, *extern_source_id);
}
} else {
if let Some(path) = additional_out_dirs.get(pkg.name(&cargo)) {
env.set("OUT_DIR", path.to_string_lossy().to_string());
if let Some(extern_source_id) = extern_source_roots.get(path) {
extern_source.set_extern_path(&path, *extern_source_id);
}
}
} }
let crate_id = crate_graph.add_crate_root( let crate_id = crate_graph.add_crate_root(
file_id, file_id,

View file

@ -54,12 +54,18 @@ pub(crate) fn load_cargo(
// FIXME: outdirs? // FIXME: outdirs?
let outdirs = FxHashMap::default(); let outdirs = FxHashMap::default();
let extern_source_roots = FxHashMap::default();
let crate_graph = ws.to_crate_graph(&default_cfg_options, &outdirs, &mut |path: &Path| { let crate_graph = ws.to_crate_graph(
let vfs_file = vfs.load(path); &default_cfg_options,
log::debug!("vfs file {:?} -> {:?}", path, vfs_file); &outdirs,
vfs_file.map(vfs_file_to_id) &extern_source_roots,
}); &mut |path: &Path| {
let vfs_file = vfs.load(path);
log::debug!("vfs file {:?} -> {:?}", path, vfs_file);
vfs_file.map(vfs_file_to_id)
},
);
log::debug!("crate graph: {:?}", crate_graph); log::debug!("crate graph: {:?}", crate_graph);
let source_roots = roots let source_roots = roots

View file

@ -105,11 +105,15 @@ impl WorldState {
})); }));
} }
let extern_dirs: FxHashSet<_> = let mut extern_dirs: FxHashSet<_> =
additional_out_dirs.iter().map(|(_, path)| (PathBuf::from(path))).collect(); additional_out_dirs.iter().map(|(_, path)| (PathBuf::from(path))).collect();
for ws in workspaces.iter() {
extern_dirs.extend(ws.out_dirs());
}
let mut extern_source_roots = FxHashMap::default(); let mut extern_source_roots = FxHashMap::default();
roots.extend(additional_out_dirs.iter().map(|(_, path)| { roots.extend(extern_dirs.iter().map(|path| {
let mut filter = RustPackageFilterBuilder::default().set_member(false); let mut filter = RustPackageFilterBuilder::default().set_member(false);
for glob in exclude_globs.iter() { for glob in exclude_globs.iter() {
filter = filter.exclude(glob.clone()); filter = filter.exclude(glob.clone());
@ -148,17 +152,21 @@ impl WorldState {
vfs_file.map(|f| FileId(f.0)) vfs_file.map(|f| FileId(f.0))
}; };
let mut outdirs = FxHashMap::default(); let additional_out_dirs: FxHashMap<String, PathBuf> = additional_out_dirs
for (name, path) in additional_out_dirs { .into_iter()
let path = PathBuf::from(&path); .map(|(name, path)| (name, PathBuf::from(&path)))
if let Some(id) = extern_source_roots.get(&path) { .collect();
outdirs.insert(name, (id.clone(), path.to_string_lossy().replace("\\", "/")));
}
}
workspaces workspaces
.iter() .iter()
.map(|ws| ws.to_crate_graph(&default_cfg_options, &outdirs, &mut load)) .map(|ws| {
ws.to_crate_graph(
&default_cfg_options,
&additional_out_dirs,
&extern_source_roots,
&mut load,
)
})
.for_each(|graph| { .for_each(|graph| {
crate_graph.extend(graph); crate_graph.extend(graph);
}); });

View file

@ -362,6 +362,11 @@
}, },
"default": [], "default": [],
"description": "List of features to activate" "description": "List of features to activate"
},
"rust-analyzer.cargoFeatures.loadOutDirsFromCheck": {
"type": "boolean",
"default": false,
"markdownDescription": "Run `cargo check` on startup to get the correct value for package OUT_DIRs"
} }
} }
}, },

View file

@ -22,6 +22,7 @@ export interface CargoFeatures {
noDefaultFeatures: boolean; noDefaultFeatures: boolean;
allFeatures: boolean; allFeatures: boolean;
features: string[]; features: string[];
loadOutDirsFromCheck: boolean;
} }
export const enum UpdatesChannel { export const enum UpdatesChannel {
@ -204,6 +205,7 @@ export class Config {
get featureFlags() { return this.cfg.get("featureFlags") as Record<string, boolean>; } get featureFlags() { return this.cfg.get("featureFlags") as Record<string, boolean>; }
get additionalOutDirs() { return this.cfg.get("additionalOutDirs") as Record<string, string>; } get additionalOutDirs() { return this.cfg.get("additionalOutDirs") as Record<string, string>; }
get rustfmtArgs() { return this.cfg.get("rustfmtArgs") as string[]; } get rustfmtArgs() { return this.cfg.get("rustfmtArgs") as string[]; }
get loadOutDirsFromCheck() { return this.cfg.get("loadOutDirsFromCheck") as boolean; }
get cargoWatchOptions(): CargoWatchOptions { get cargoWatchOptions(): CargoWatchOptions {
return { return {
@ -219,6 +221,7 @@ export class Config {
noDefaultFeatures: this.cfg.get("cargoFeatures.noDefaultFeatures") as boolean, noDefaultFeatures: this.cfg.get("cargoFeatures.noDefaultFeatures") as boolean,
allFeatures: this.cfg.get("cargoFeatures.allFeatures") as boolean, allFeatures: this.cfg.get("cargoFeatures.allFeatures") as boolean,
features: this.cfg.get("cargoFeatures.features") as string[], features: this.cfg.get("cargoFeatures.features") as string[],
loadOutDirsFromCheck: this.cfg.get("cargoFeatures.loadOutDirsFromCheck") as boolean,
}; };
} }