mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-27 05:23:24 +00:00
Merge #7193
7193: Show progress for fetching workspace (cargo-metadata and loadOutDirsFromCheck) r=matklad a=edwin0cheng ![Peek 2021-01-07 21-57](https://user-images.githubusercontent.com/11014119/103902132-0db4c780-5135-11eb-94d3-32429445be87.gif) Fixes #7188 Fixes #3300 Co-authored-by: Edwin Cheng <edwin0cheng@gmail.com>
This commit is contained in:
commit
4ddf075673
9 changed files with 102 additions and 41 deletions
1
Cargo.lock
generated
1
Cargo.lock
generated
|
@ -427,6 +427,7 @@ dependencies = [
|
||||||
"jod-thread",
|
"jod-thread",
|
||||||
"log",
|
"log",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
|
"stdx",
|
||||||
"toolchain",
|
"toolchain",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
|
@ -17,3 +17,4 @@ serde_json = "1.0.48"
|
||||||
jod-thread = "0.1.1"
|
jod-thread = "0.1.1"
|
||||||
|
|
||||||
toolchain = { path = "../toolchain", version = "0.0.0" }
|
toolchain = { path = "../toolchain", version = "0.0.0" }
|
||||||
|
stdx = { path = "../stdx", version = "0.0.0" }
|
||||||
|
|
|
@ -5,13 +5,13 @@
|
||||||
use std::{
|
use std::{
|
||||||
fmt,
|
fmt,
|
||||||
io::{self, BufReader},
|
io::{self, BufReader},
|
||||||
ops,
|
|
||||||
path::PathBuf,
|
path::PathBuf,
|
||||||
process::{self, Command, Stdio},
|
process::{self, Command, Stdio},
|
||||||
time::Duration,
|
time::Duration,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crossbeam_channel::{never, select, unbounded, Receiver, Sender};
|
use crossbeam_channel::{never, select, unbounded, Receiver, Sender};
|
||||||
|
use stdx::JodChild;
|
||||||
|
|
||||||
pub use cargo_metadata::diagnostic::{
|
pub use cargo_metadata::diagnostic::{
|
||||||
Applicability, Diagnostic, DiagnosticCode, DiagnosticLevel, DiagnosticSpan,
|
Applicability, Diagnostic, DiagnosticCode, DiagnosticLevel, DiagnosticSpan,
|
||||||
|
@ -323,24 +323,3 @@ impl CargoActor {
|
||||||
Ok(read_at_least_one_message)
|
Ok(read_at_least_one_message)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
struct JodChild(process::Child);
|
|
||||||
|
|
||||||
impl ops::Deref for JodChild {
|
|
||||||
type Target = process::Child;
|
|
||||||
fn deref(&self) -> &process::Child {
|
|
||||||
&self.0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ops::DerefMut for JodChild {
|
|
||||||
fn deref_mut(&mut self) -> &mut process::Child {
|
|
||||||
&mut self.0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Drop for JodChild {
|
|
||||||
fn drop(&mut self) {
|
|
||||||
let _ = self.0.kill();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
|
@ -3,9 +3,10 @@
|
||||||
use std::{
|
use std::{
|
||||||
convert::TryInto,
|
convert::TryInto,
|
||||||
ffi::OsStr,
|
ffi::OsStr,
|
||||||
|
io::BufReader,
|
||||||
ops,
|
ops,
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
process::Command,
|
process::{Command, Stdio},
|
||||||
};
|
};
|
||||||
|
|
||||||
use anyhow::{Context, Result};
|
use anyhow::{Context, Result};
|
||||||
|
@ -15,6 +16,7 @@ use cargo_metadata::{BuildScript, CargoOpt, Message, MetadataCommand, PackageId}
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use paths::{AbsPath, AbsPathBuf};
|
use paths::{AbsPath, AbsPathBuf};
|
||||||
use rustc_hash::FxHashMap;
|
use rustc_hash::FxHashMap;
|
||||||
|
use stdx::JodChild;
|
||||||
|
|
||||||
use crate::cfg_flag::CfgFlag;
|
use crate::cfg_flag::CfgFlag;
|
||||||
use crate::utf8_stdout;
|
use crate::utf8_stdout;
|
||||||
|
@ -171,6 +173,7 @@ impl CargoWorkspace {
|
||||||
pub fn from_cargo_metadata(
|
pub fn from_cargo_metadata(
|
||||||
cargo_toml: &AbsPath,
|
cargo_toml: &AbsPath,
|
||||||
config: &CargoConfig,
|
config: &CargoConfig,
|
||||||
|
progress: &dyn Fn(String),
|
||||||
) -> Result<CargoWorkspace> {
|
) -> Result<CargoWorkspace> {
|
||||||
let mut meta = MetadataCommand::new();
|
let mut meta = MetadataCommand::new();
|
||||||
meta.cargo_path(toolchain::cargo());
|
meta.cargo_path(toolchain::cargo());
|
||||||
|
@ -220,6 +223,9 @@ impl CargoWorkspace {
|
||||||
meta.other_options(vec![String::from("--filter-platform"), target]);
|
meta.other_options(vec![String::from("--filter-platform"), target]);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// FIXME: Currently MetadataCommand is not based on parse_stream,
|
||||||
|
// So we just report it as a whole
|
||||||
|
progress("metadata".to_string());
|
||||||
let mut meta = meta.exec().with_context(|| {
|
let mut meta = meta.exec().with_context(|| {
|
||||||
let cwd: Option<AbsPathBuf> =
|
let cwd: Option<AbsPathBuf> =
|
||||||
std::env::current_dir().ok().and_then(|p| p.try_into().ok());
|
std::env::current_dir().ok().and_then(|p| p.try_into().ok());
|
||||||
|
@ -243,7 +249,7 @@ impl CargoWorkspace {
|
||||||
let mut envs = FxHashMap::default();
|
let mut envs = FxHashMap::default();
|
||||||
let mut proc_macro_dylib_paths = FxHashMap::default();
|
let mut proc_macro_dylib_paths = FxHashMap::default();
|
||||||
if config.load_out_dirs_from_check {
|
if config.load_out_dirs_from_check {
|
||||||
let resources = load_extern_resources(cargo_toml, config)?;
|
let resources = load_extern_resources(cargo_toml, config, progress)?;
|
||||||
out_dir_by_id = resources.out_dirs;
|
out_dir_by_id = resources.out_dirs;
|
||||||
cfgs = resources.cfgs;
|
cfgs = resources.cfgs;
|
||||||
envs = resources.env;
|
envs = resources.env;
|
||||||
|
@ -368,6 +374,7 @@ pub(crate) struct ExternResources {
|
||||||
pub(crate) fn load_extern_resources(
|
pub(crate) fn load_extern_resources(
|
||||||
cargo_toml: &Path,
|
cargo_toml: &Path,
|
||||||
cargo_features: &CargoConfig,
|
cargo_features: &CargoConfig,
|
||||||
|
progress: &dyn Fn(String),
|
||||||
) -> Result<ExternResources> {
|
) -> Result<ExternResources> {
|
||||||
let mut cmd = Command::new(toolchain::cargo());
|
let mut cmd = Command::new(toolchain::cargo());
|
||||||
cmd.args(&["check", "--message-format=json", "--manifest-path"]).arg(cargo_toml);
|
cmd.args(&["check", "--message-format=json", "--manifest-path"]).arg(cargo_toml);
|
||||||
|
@ -395,11 +402,14 @@ pub(crate) fn load_extern_resources(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let output = cmd.output()?;
|
cmd.stdout(Stdio::piped()).stderr(Stdio::null()).stdin(Stdio::null());
|
||||||
|
|
||||||
|
let mut child = cmd.spawn().map(JodChild)?;
|
||||||
|
let child_stdout = child.stdout.take().unwrap();
|
||||||
|
let stdout = BufReader::new(child_stdout);
|
||||||
|
|
||||||
let mut res = ExternResources::default();
|
let mut res = ExternResources::default();
|
||||||
|
for message in cargo_metadata::Message::parse_stream(stdout) {
|
||||||
for message in cargo_metadata::Message::parse_stream(output.stdout.as_slice()) {
|
|
||||||
if let Ok(message) = message {
|
if let Ok(message) = message {
|
||||||
match message {
|
match message {
|
||||||
Message::BuildScriptExecuted(BuildScript {
|
Message::BuildScriptExecuted(BuildScript {
|
||||||
|
@ -432,6 +442,8 @@ pub(crate) fn load_extern_resources(
|
||||||
res.env.insert(package_id, env);
|
res.env.insert(package_id, env);
|
||||||
}
|
}
|
||||||
Message::CompilerArtifact(message) => {
|
Message::CompilerArtifact(message) => {
|
||||||
|
progress(format!("metadata {}", message.target.name));
|
||||||
|
|
||||||
if message.target.kind.contains(&"proc-macro".to_string()) {
|
if message.target.kind.contains(&"proc-macro".to_string()) {
|
||||||
let package_id = message.package_id;
|
let package_id = message.package_id;
|
||||||
// Skip rmeta file
|
// Skip rmeta file
|
||||||
|
@ -442,7 +454,9 @@ pub(crate) fn load_extern_resources(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Message::CompilerMessage(_) => (),
|
Message::CompilerMessage(message) => {
|
||||||
|
progress(message.target.name.clone());
|
||||||
|
}
|
||||||
Message::Unknown => (),
|
Message::Unknown => (),
|
||||||
Message::BuildFinished(_) => {}
|
Message::BuildFinished(_) => {}
|
||||||
Message::TextLine(_) => {}
|
Message::TextLine(_) => {}
|
||||||
|
|
|
@ -64,7 +64,11 @@ impl fmt::Debug for ProjectWorkspace {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ProjectWorkspace {
|
impl ProjectWorkspace {
|
||||||
pub fn load(manifest: ProjectManifest, config: &CargoConfig) -> Result<ProjectWorkspace> {
|
pub fn load(
|
||||||
|
manifest: ProjectManifest,
|
||||||
|
config: &CargoConfig,
|
||||||
|
progress: &dyn Fn(String),
|
||||||
|
) -> Result<ProjectWorkspace> {
|
||||||
let res = match manifest {
|
let res = match manifest {
|
||||||
ProjectManifest::ProjectJson(project_json) => {
|
ProjectManifest::ProjectJson(project_json) => {
|
||||||
let file = fs::read_to_string(&project_json).with_context(|| {
|
let file = fs::read_to_string(&project_json).with_context(|| {
|
||||||
|
@ -84,15 +88,14 @@ impl ProjectWorkspace {
|
||||||
cmd
|
cmd
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
let cargo = CargoWorkspace::from_cargo_metadata(&cargo_toml, config).with_context(
|
let cargo = CargoWorkspace::from_cargo_metadata(&cargo_toml, config, progress)
|
||||||
|| {
|
.with_context(|| {
|
||||||
format!(
|
format!(
|
||||||
"Failed to read Cargo metadata from Cargo.toml file {}, {}",
|
"Failed to read Cargo metadata from Cargo.toml file {}, {}",
|
||||||
cargo_toml.display(),
|
cargo_toml.display(),
|
||||||
cargo_version
|
cargo_version
|
||||||
)
|
)
|
||||||
},
|
})?;
|
||||||
)?;
|
|
||||||
let sysroot = if config.no_sysroot {
|
let sysroot = if config.no_sysroot {
|
||||||
Sysroot::default()
|
Sysroot::default()
|
||||||
} else {
|
} else {
|
||||||
|
@ -105,9 +108,12 @@ impl ProjectWorkspace {
|
||||||
};
|
};
|
||||||
|
|
||||||
let rustc = if let Some(rustc_dir) = &config.rustc_source {
|
let rustc = if let Some(rustc_dir) = &config.rustc_source {
|
||||||
Some(CargoWorkspace::from_cargo_metadata(&rustc_dir, config).with_context(
|
Some(
|
||||||
|| format!("Failed to read Cargo metadata for Rust sources"),
|
CargoWorkspace::from_cargo_metadata(&rustc_dir, config, progress)
|
||||||
)?)
|
.with_context(|| {
|
||||||
|
format!("Failed to read Cargo metadata for Rust sources")
|
||||||
|
})?,
|
||||||
|
)
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
};
|
};
|
||||||
|
|
|
@ -21,6 +21,7 @@ pub fn load_cargo(
|
||||||
let ws = ProjectWorkspace::load(
|
let ws = ProjectWorkspace::load(
|
||||||
root,
|
root,
|
||||||
&CargoConfig { load_out_dirs_from_check, ..Default::default() },
|
&CargoConfig { load_out_dirs_from_check, ..Default::default() },
|
||||||
|
&|_| {},
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
let (sender, receiver) = unbounded();
|
let (sender, receiver) = unbounded();
|
||||||
|
|
|
@ -22,6 +22,7 @@ use crate::{
|
||||||
global_state::{file_id_to_url, url_to_file_id, GlobalState, Status},
|
global_state::{file_id_to_url, url_to_file_id, GlobalState, Status},
|
||||||
handlers, lsp_ext,
|
handlers, lsp_ext,
|
||||||
lsp_utils::{apply_document_changes, is_canceled, notification_is, Progress},
|
lsp_utils::{apply_document_changes, is_canceled, notification_is, Progress},
|
||||||
|
reload::ProjectWorkspaceProgress,
|
||||||
Result,
|
Result,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -63,6 +64,7 @@ pub(crate) enum Task {
|
||||||
Diagnostics(Vec<(FileId, Vec<lsp_types::Diagnostic>)>),
|
Diagnostics(Vec<(FileId, Vec<lsp_types::Diagnostic>)>),
|
||||||
Workspaces(Vec<anyhow::Result<ProjectWorkspace>>),
|
Workspaces(Vec<anyhow::Result<ProjectWorkspace>>),
|
||||||
PrimeCaches(PrimeCachesProgress),
|
PrimeCaches(PrimeCachesProgress),
|
||||||
|
FetchWorkspace(ProjectWorkspaceProgress),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Debug for Event {
|
impl fmt::Debug for Event {
|
||||||
|
@ -216,6 +218,16 @@ impl GlobalState {
|
||||||
}
|
}
|
||||||
PrimeCachesProgress::Finished => prime_caches_progress.push(progress),
|
PrimeCachesProgress::Finished => prime_caches_progress.push(progress),
|
||||||
},
|
},
|
||||||
|
Task::FetchWorkspace(progress) => {
|
||||||
|
let (state, msg) = match progress {
|
||||||
|
ProjectWorkspaceProgress::Begin => (Progress::Begin, None),
|
||||||
|
ProjectWorkspaceProgress::Report(msg) => {
|
||||||
|
(Progress::Report, Some(msg))
|
||||||
|
}
|
||||||
|
ProjectWorkspaceProgress::End => (Progress::End, None),
|
||||||
|
};
|
||||||
|
self.report_progress("fetching", state, msg, None);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
// Coalesce multiple task events into one loop turn
|
// Coalesce multiple task events into one loop turn
|
||||||
task = match self.task_pool.receiver.try_recv() {
|
task = match self.task_pool.receiver.try_recv() {
|
||||||
|
|
|
@ -15,6 +15,13 @@ use crate::{
|
||||||
};
|
};
|
||||||
use lsp_ext::StatusParams;
|
use lsp_ext::StatusParams;
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub(crate) enum ProjectWorkspaceProgress {
|
||||||
|
Begin,
|
||||||
|
Report(String),
|
||||||
|
End,
|
||||||
|
}
|
||||||
|
|
||||||
impl GlobalState {
|
impl GlobalState {
|
||||||
pub(crate) fn update_configuration(&mut self, config: Config) {
|
pub(crate) fn update_configuration(&mut self, config: Config) {
|
||||||
let _p = profile::span("GlobalState::update_configuration");
|
let _p = profile::span("GlobalState::update_configuration");
|
||||||
|
@ -93,23 +100,42 @@ impl GlobalState {
|
||||||
}
|
}
|
||||||
pub(crate) fn fetch_workspaces(&mut self) {
|
pub(crate) fn fetch_workspaces(&mut self) {
|
||||||
log::info!("will fetch workspaces");
|
log::info!("will fetch workspaces");
|
||||||
self.task_pool.handle.spawn({
|
|
||||||
|
self.task_pool.handle.spawn_with_sender({
|
||||||
let linked_projects = self.config.linked_projects();
|
let linked_projects = self.config.linked_projects();
|
||||||
let cargo_config = self.config.cargo();
|
let cargo_config = self.config.cargo();
|
||||||
move || {
|
|
||||||
|
move |sender| {
|
||||||
|
let progress = {
|
||||||
|
let sender = sender.clone();
|
||||||
|
move |msg| {
|
||||||
|
sender
|
||||||
|
.send(Task::FetchWorkspace(ProjectWorkspaceProgress::Report(msg)))
|
||||||
|
.unwrap()
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
sender.send(Task::FetchWorkspace(ProjectWorkspaceProgress::Begin)).unwrap();
|
||||||
|
|
||||||
let workspaces = linked_projects
|
let workspaces = linked_projects
|
||||||
.iter()
|
.iter()
|
||||||
.map(|project| match project {
|
.map(|project| match project {
|
||||||
LinkedProject::ProjectManifest(manifest) => {
|
LinkedProject::ProjectManifest(manifest) => {
|
||||||
project_model::ProjectWorkspace::load(manifest.clone(), &cargo_config)
|
project_model::ProjectWorkspace::load(
|
||||||
|
manifest.clone(),
|
||||||
|
&cargo_config,
|
||||||
|
&progress,
|
||||||
|
)
|
||||||
}
|
}
|
||||||
LinkedProject::InlineJsonProject(it) => {
|
LinkedProject::InlineJsonProject(it) => {
|
||||||
project_model::ProjectWorkspace::load_inline(it.clone())
|
project_model::ProjectWorkspace::load_inline(it.clone())
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
sender.send(Task::FetchWorkspace(ProjectWorkspaceProgress::End)).unwrap();
|
||||||
log::info!("did fetch workspaces {:?}", workspaces);
|
log::info!("did fetch workspaces {:?}", workspaces);
|
||||||
Task::Workspaces(workspaces)
|
sender.send(Task::Workspaces(workspaces)).unwrap()
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
//! Missing batteries for standard libraries.
|
//! Missing batteries for standard libraries.
|
||||||
use std::time::Instant;
|
use std::{ops, process, time::Instant};
|
||||||
|
|
||||||
mod macros;
|
mod macros;
|
||||||
pub mod panic_context;
|
pub mod panic_context;
|
||||||
|
@ -147,6 +147,27 @@ where
|
||||||
left
|
left
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub struct JodChild(pub process::Child);
|
||||||
|
|
||||||
|
impl ops::Deref for JodChild {
|
||||||
|
type Target = process::Child;
|
||||||
|
fn deref(&self) -> &process::Child {
|
||||||
|
&self.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ops::DerefMut for JodChild {
|
||||||
|
fn deref_mut(&mut self) -> &mut process::Child {
|
||||||
|
&mut self.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Drop for JodChild {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
let _ = self.0.kill();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
Loading…
Reference in a new issue