Show workspace info in the status bar

This commit is contained in:
Lukas Wirth 2024-04-26 11:06:52 +02:00
parent 56bee2ddaf
commit 18ca22a98e
15 changed files with 168 additions and 81 deletions

View file

@ -6,3 +6,10 @@
# prettier format
f247090558c9ba3c551566eae5882b7ca865225f
# subtree syncs
932d85b52946d917deab2c23ead552f7f713b828
3e358a6827d83e8d6473913a5e304734aadfed04
9d2cb42a413e51deb50b36794a2e1605381878fc
f532576ac53ddcc666bc8d59e0b6437065e2f599
c48062fe2ab9a2d913d1985a6b0aec4bf936bfc1

View file

@ -131,7 +131,7 @@ pub struct SemanticsImpl<'db> {
pub db: &'db dyn HirDatabase,
s2d_cache: RefCell<SourceToDefCache>,
/// Rootnode to HirFileId cache
cache: RefCell<FxHashMap<SyntaxNode, HirFileId>>,
root_to_file_cache: RefCell<FxHashMap<SyntaxNode, HirFileId>>,
// These 2 caches are mainly useful for semantic highlighting as nothing else descends a lot of tokens
// So we might wanna move them out into something specific for semantic highlighting
expansion_info_cache: RefCell<FxHashMap<MacroFileId, ExpansionInfo>>,
@ -294,7 +294,7 @@ impl<'db> SemanticsImpl<'db> {
SemanticsImpl {
db,
s2d_cache: Default::default(),
cache: Default::default(),
root_to_file_cache: Default::default(),
expansion_info_cache: Default::default(),
macro_call_cache: Default::default(),
}
@ -690,6 +690,7 @@ impl<'db> SemanticsImpl<'db> {
exp_info
});
// FIXME: uncached parse
// Create the source analyzer for the macro call scope
let Some(sa) = self.analyze_no_infer(&self.parse_or_expand(expansion_info.call_file()))
else {
@ -1025,6 +1026,7 @@ impl<'db> SemanticsImpl<'db> {
None => {
let call_node = file_id.macro_file()?.call_node(db);
// cache the node
// FIXME: uncached parse
self.parse_or_expand(call_node.file_id);
Some(call_node)
}
@ -1397,7 +1399,7 @@ impl<'db> SemanticsImpl<'db> {
fn cache(&self, root_node: SyntaxNode, file_id: HirFileId) {
assert!(root_node.parent().is_none());
let mut cache = self.cache.borrow_mut();
let mut cache = self.root_to_file_cache.borrow_mut();
let prev = cache.insert(root_node, file_id);
assert!(prev.is_none() || prev == Some(file_id))
}
@ -1407,7 +1409,7 @@ impl<'db> SemanticsImpl<'db> {
}
fn lookup(&self, root_node: &SyntaxNode) -> Option<HirFileId> {
let cache = self.cache.borrow();
let cache = self.root_to_file_cache.borrow();
cache.get(root_node).copied()
}
@ -1427,7 +1429,7 @@ impl<'db> SemanticsImpl<'db> {
known nodes: {}\n\n",
node,
root_node,
self.cache
self.root_to_file_cache
.borrow()
.keys()
.map(|it| format!("{it:?}"))

View file

@ -68,9 +68,9 @@ pub fn load_workspace(
let proc_macro_server = match &load_config.with_proc_macro_server {
ProcMacroServerChoice::Sysroot => ws
.find_sysroot_proc_macro_srv()
.and_then(|it| ProcMacroServer::spawn(it, extra_env).map_err(Into::into)),
.and_then(|it| ProcMacroServer::spawn(&it, extra_env).map_err(Into::into)),
ProcMacroServerChoice::Explicit(path) => {
ProcMacroServer::spawn(path.clone(), extra_env).map_err(Into::into)
ProcMacroServer::spawn(path, extra_env).map_err(Into::into)
}
ProcMacroServerChoice::None => Err(anyhow::format_err!("proc macro server disabled")),
};

View file

@ -13,7 +13,7 @@ mod version;
use base_db::Env;
use indexmap::IndexSet;
use paths::AbsPathBuf;
use paths::{AbsPath, AbsPathBuf};
use rustc_hash::FxHashMap;
use span::Span;
use std::{
@ -54,6 +54,7 @@ pub struct ProcMacroServer {
///
/// Therefore, we just wrap the `ProcMacroProcessSrv` in a mutex here.
process: Arc<Mutex<ProcMacroProcessSrv>>,
path: AbsPathBuf,
}
pub struct MacroDylib {
@ -113,11 +114,18 @@ pub struct MacroPanic {
impl ProcMacroServer {
/// Spawns an external process as the proc macro server and returns a client connected to it.
pub fn spawn(
process_path: AbsPathBuf,
process_path: &AbsPath,
env: &FxHashMap<String, String>,
) -> io::Result<ProcMacroServer> {
let process = ProcMacroProcessSrv::run(process_path, env)?;
Ok(ProcMacroServer { process: Arc::new(Mutex::new(process)) })
Ok(ProcMacroServer {
process: Arc::new(Mutex::new(process)),
path: process_path.to_owned(),
})
}
pub fn path(&self) -> &AbsPath {
&self.path
}
pub fn load_dylib(&self, dylib: MacroDylib) -> Result<Vec<ProcMacro>, ServerError> {

View file

@ -6,7 +6,7 @@ use std::{
sync::Arc,
};
use paths::{AbsPath, AbsPathBuf};
use paths::AbsPath;
use rustc_hash::FxHashMap;
use stdx::JodChild;
@ -28,11 +28,11 @@ pub(crate) struct ProcMacroProcessSrv {
impl ProcMacroProcessSrv {
pub(crate) fn run(
process_path: AbsPathBuf,
process_path: &AbsPath,
env: &FxHashMap<String, String>,
) -> io::Result<ProcMacroProcessSrv> {
let create_srv = |null_stderr| {
let mut process = Process::run(process_path.clone(), env, null_stderr)?;
let mut process = Process::run(process_path, env, null_stderr)?;
let (stdin, stdout) = process.stdio().expect("couldn't access child stdio");
io::Result::Ok(ProcMacroProcessSrv {
@ -153,11 +153,11 @@ struct Process {
impl Process {
fn run(
path: AbsPathBuf,
path: &AbsPath,
env: &FxHashMap<String, String>,
null_stderr: bool,
) -> io::Result<Process> {
let child = JodChild(mk_child(&path, env, null_stderr)?);
let child = JodChild(mk_child(path, env, null_stderr)?);
Ok(Process { child })
}

View file

@ -56,6 +56,7 @@ use serde::{de, Deserialize, Serialize};
use span::Edition;
use crate::cfg::CfgFlag;
use crate::ManifestPath;
/// Roots and crates that compose this Rust project.
#[derive(Clone, Debug, Eq, PartialEq)]
@ -65,6 +66,7 @@ pub struct ProjectJson {
/// e.g. `path/to/sysroot/lib/rustlib/src/rust`
pub(crate) sysroot_src: Option<AbsPathBuf>,
project_root: AbsPathBuf,
manifest: Option<ManifestPath>,
crates: Vec<Crate>,
}
@ -96,12 +98,17 @@ impl ProjectJson {
/// * `base` - The path to the workspace root (i.e. the folder containing `rust-project.json`)
/// * `data` - The parsed contents of `rust-project.json`, or project json that's passed via
/// configuration.
pub fn new(base: &AbsPath, data: ProjectJsonData) -> ProjectJson {
pub fn new(
manifest: Option<ManifestPath>,
base: &AbsPath,
data: ProjectJsonData,
) -> ProjectJson {
let absolutize_on_base = |p| base.absolutize(p);
ProjectJson {
sysroot: data.sysroot.map(absolutize_on_base),
sysroot_src: data.sysroot_src.map(absolutize_on_base),
project_root: base.to_path_buf(),
manifest,
crates: data
.crates
.into_iter()
@ -159,6 +166,11 @@ impl ProjectJson {
pub fn path(&self) -> &AbsPath {
&self.project_root
}
/// Returns the path to the project's manifest or root folder, if no manifest exists.
pub fn manifest_or_root(&self) -> &AbsPath {
self.manifest.as_ref().map_or(&self.project_root, |manifest| manifest.as_ref())
}
}
#[derive(Serialize, Deserialize, Debug, Clone)]

View file

@ -133,6 +133,24 @@ impl Sysroot {
}
}
pub fn check_has_core(&self) -> Result<(), String> {
let Some(Ok(src_root)) = &self.src_root else { return Ok(()) };
let has_core = match &self.mode {
SysrootMode::Workspace(ws) => ws.packages().any(|p| ws[p].name == "core"),
SysrootMode::Stitched(stitched) => stitched.by_name("core").is_some(),
};
if !has_core {
let var_note = if env::var_os("RUST_SRC_PATH").is_some() {
" (`RUST_SRC_PATH` might be incorrect, try unsetting it)"
} else {
" try running `rustup component add rust-src` to possible fix this"
};
Err(format!("could not find libcore in loaded sysroot at `{}`{var_note}", src_root,))
} else {
Ok(())
}
}
pub fn num_packages(&self) -> usize {
match &self.mode {
SysrootMode::Workspace(ws) => ws.packages().count(),

View file

@ -152,7 +152,7 @@ fn rooted_project_json(data: ProjectJsonData) -> ProjectJson {
replace_root(&mut root, true);
let path = Utf8Path::new(&root);
let base = AbsPath::assert(path);
ProjectJson::new(base, data)
ProjectJson::new(None, base, data)
}
fn to_crate_graph(project_workspace: ProjectWorkspace) -> (CrateGraph, ProcMacroPaths) {

View file

@ -199,7 +199,8 @@ impl ProjectWorkspace {
let data = serde_json::from_str(&file)
.with_context(|| format!("Failed to deserialize json file {project_json}"))?;
let project_location = project_json.parent().to_path_buf();
let project_json: ProjectJson = ProjectJson::new(&project_location, data);
let project_json: ProjectJson =
ProjectJson::new(Some(project_json.clone()), &project_location, data);
ProjectWorkspace::load_inline(
project_json,
config.target.as_deref(),
@ -555,7 +556,7 @@ impl ProjectWorkspace {
pub fn manifest_or_root(&self) -> &AbsPath {
match &self.kind {
ProjectWorkspaceKind::Cargo { cargo, .. } => cargo.manifest_path(),
ProjectWorkspaceKind::Json(project) => project.path(),
ProjectWorkspaceKind::Json(project) => project.manifest_or_root(),
ProjectWorkspaceKind::DetachedFile { file, .. } => file,
}
}

View file

@ -1323,7 +1323,7 @@ impl Config {
.map(Into::into)
}
ManifestOrProjectJson::ProjectJson(it) => {
Some(ProjectJson::new(&self.root_path, it.clone()).into())
Some(ProjectJson::new(None, &self.root_path, it.clone()).into())
}
})
.collect(),

View file

@ -2,6 +2,7 @@
#![allow(clippy::disallowed_types)]
use std::ops;
use std::path::PathBuf;
use ide_db::line_index::WideEncoding;
@ -494,10 +495,12 @@ impl Notification for ServerStatusNotification {
}
#[derive(Deserialize, Serialize, PartialEq, Eq, Clone)]
#[serde(rename_all = "camelCase")]
pub struct ServerStatusParams {
pub health: Health,
pub quiescent: bool,
pub message: Option<String>,
pub workspace_info: Option<String>,
}
#[derive(Serialize, Deserialize, Clone, Copy, PartialEq, Eq)]
@ -508,6 +511,16 @@ pub enum Health {
Error,
}
impl ops::BitOrAssign for Health {
fn bitor_assign(&mut self, rhs: Self) {
*self = match (*self, rhs) {
(Health::Error, _) | (_, Health::Error) => Health::Error,
(Health::Warning, _) | (_, Health::Warning) => Health::Warning,
_ => Health::Ok,
}
}
}
pub enum CodeActionRequest {}
impl Request for CodeActionRequest {

View file

@ -103,79 +103,48 @@ impl GlobalState {
health: lsp_ext::Health::Ok,
quiescent: self.is_quiescent(),
message: None,
workspace_info: None,
};
let mut message = String::new();
if !self.config.cargo_autoreload()
&& self.is_quiescent()
&& self.fetch_workspaces_queue.op_requested()
{
status.health |= lsp_ext::Health::Warning;
message.push_str("Auto-reloading is disabled and the workspace has changed, a manual workspace reload is required.\n\n");
}
if self.build_deps_changed {
status.health = lsp_ext::Health::Warning;
status.health |= lsp_ext::Health::Warning;
message.push_str(
"Proc-macros and/or build scripts have changed and need to be rebuilt.\n\n",
);
}
if self.fetch_build_data_error().is_err() {
status.health = lsp_ext::Health::Warning;
status.health |= lsp_ext::Health::Warning;
message.push_str("Failed to run build scripts of some packages.\n\n");
}
if self.proc_macro_clients.iter().any(|it| it.is_err()) {
status.health = lsp_ext::Health::Warning;
message.push_str("Failed to spawn one or more proc-macro servers.\n\n");
for err in self.proc_macro_clients.iter() {
if let Err(err) = err {
format_to!(message, "- {err}\n");
}
}
}
if !self.config.cargo_autoreload()
&& self.is_quiescent()
&& self.fetch_workspaces_queue.op_requested()
{
status.health = lsp_ext::Health::Warning;
message.push_str("Auto-reloading is disabled and the workspace has changed, a manual workspace reload is required.\n\n");
}
if self.config.linked_or_discovered_projects().is_empty()
&& self.config.detached_files().is_empty()
&& self.config.notifications().cargo_toml_not_found
{
status.health = lsp_ext::Health::Warning;
message.push_str("Failed to discover workspace.\n");
message.push_str("Consider adding the `Cargo.toml` of the workspace to the [`linkedProjects`](https://rust-analyzer.github.io/manual.html#rust-analyzer.linkedProjects) setting.\n\n");
}
if let Some(err) = &self.config_errors {
status.health = lsp_ext::Health::Warning;
status.health |= lsp_ext::Health::Warning;
format_to!(message, "{err}\n");
}
if let Some(err) = &self.last_flycheck_error {
status.health = lsp_ext::Health::Warning;
status.health |= lsp_ext::Health::Warning;
message.push_str(err);
message.push('\n');
}
for ws in self.workspaces.iter() {
let sysroot = ws.sysroot.as_ref();
match sysroot {
Err(None) => (),
Err(Some(e)) => {
status.health = lsp_ext::Health::Warning;
message.push_str(e);
message.push_str("\n\n");
}
Ok(s) => {
if let Some(e) = s.loading_warning() {
status.health = lsp_ext::Health::Warning;
message.push_str(&e);
message.push_str("\n\n");
}
}
}
if let ProjectWorkspaceKind::Cargo { rustc: Err(Some(e)), .. } = &ws.kind {
status.health = lsp_ext::Health::Warning;
message.push_str(e);
message.push_str("\n\n");
}
if self.config.linked_or_discovered_projects().is_empty()
&& self.config.detached_files().is_empty()
{
status.health |= lsp_ext::Health::Warning;
message.push_str("Failed to discover workspace.\n");
message.push_str("Consider adding the `Cargo.toml` of the workspace to the [`linkedProjects`](https://rust-analyzer.github.io/manual.html#rust-analyzer.linkedProjects) setting.\n\n");
}
if self.fetch_workspace_error().is_err() {
status.health = lsp_ext::Health::Error;
status.health |= lsp_ext::Health::Error;
message.push_str("Failed to load workspaces.");
if self.config.has_linked_projects() {
@ -191,9 +160,63 @@ impl GlobalState {
message.push_str("\n\n");
}
if !self.workspaces.is_empty() {
let proc_macro_clients =
self.proc_macro_clients.iter().map(Some).chain(iter::repeat_with(|| None));
let mut workspace_info = "Loaded workspaces:\n".to_owned();
for (ws, proc_macro_client) in self.workspaces.iter().zip(proc_macro_clients) {
format_to!(workspace_info, "- `{}`\n", ws.manifest_or_root());
format_to!(workspace_info, " - sysroot:");
match ws.sysroot.as_ref() {
Err(None) => format_to!(workspace_info, " None"),
Err(Some(e)) => {
status.health |= lsp_ext::Health::Warning;
format_to!(workspace_info, " {e}");
}
Ok(s) => {
format_to!(workspace_info, " `{}`", s.root().to_string());
if let Some(err) = s
.check_has_core()
.err()
.inspect(|_| status.health |= lsp_ext::Health::Warning)
{
format_to!(workspace_info, " ({err})");
}
if let Some(src_root) = s.src_root() {
format_to!(
workspace_info,
"\n - sysroot source: `{}`",
src_root
);
}
format_to!(workspace_info, "\n");
}
}
if let ProjectWorkspaceKind::Cargo { rustc: Err(Some(e)), .. } = &ws.kind {
status.health |= lsp_ext::Health::Warning;
format_to!(workspace_info, " - rustc workspace: {e}\n");
};
if let Some(proc_macro_client) = proc_macro_client {
format_to!(workspace_info, " - proc-macro server: ");
match proc_macro_client {
Ok(it) => format_to!(workspace_info, "`{}`\n", it.path()),
Err(e) => {
status.health |= lsp_ext::Health::Warning;
format_to!(workspace_info, "{e}\n")
}
}
}
}
status.workspace_info = Some(workspace_info);
}
if !message.is_empty() {
status.message = Some(message.trim_end().to_owned());
}
status
}
@ -520,7 +543,7 @@ impl GlobalState {
};
tracing::info!("Using proc-macro server at {path}");
ProcMacroServer::spawn(path.clone(), &env).map_err(|err| {
ProcMacroServer::spawn(&path, &env).map_err(|err| {
tracing::error!(
"Failed to run proc-macro server from path {path}, error: {err:?}",
);

View file

@ -1,5 +1,5 @@
<!---
lsp/ext.rs hash: dd51139b0530147e
lsp/ext.rs hash: d8e2aa65fdb48e48
If you need to change the above hash to make the test pass, please check if you
need to adjust this doc as well and ping this issue:

View file

@ -433,7 +433,6 @@ export class Ctx implements RustAnalyzerExtensionApi {
statusBar.tooltip.isTrusted = true;
switch (status.health) {
case "ok":
statusBar.tooltip.appendText(status.message ?? "Ready");
statusBar.color = undefined;
statusBar.backgroundColor = undefined;
if (this.config.statusBarClickAction === "stopServer") {
@ -444,9 +443,6 @@ export class Ctx implements RustAnalyzerExtensionApi {
this.dependencies?.refresh();
break;
case "warning":
if (status.message) {
statusBar.tooltip.appendText(status.message);
}
statusBar.color = new vscode.ThemeColor("statusBarItem.warningForeground");
statusBar.backgroundColor = new vscode.ThemeColor(
"statusBarItem.warningBackground",
@ -455,9 +451,6 @@ export class Ctx implements RustAnalyzerExtensionApi {
icon = "$(warning) ";
break;
case "error":
if (status.message) {
statusBar.tooltip.appendText(status.message);
}
statusBar.color = new vscode.ThemeColor("statusBarItem.errorForeground");
statusBar.backgroundColor = new vscode.ThemeColor("statusBarItem.errorBackground");
statusBar.command = "rust-analyzer.openLogs";
@ -476,6 +469,15 @@ export class Ctx implements RustAnalyzerExtensionApi {
statusBar.text = "$(stop-circle) rust-analyzer";
return;
}
if (status.message) {
statusBar.tooltip.appendText(status.message);
}
if (status.workspaceInfo) {
if (statusBar.tooltip.value) {
statusBar.tooltip.appendMarkdown("\n\n---\n\n");
}
statusBar.tooltip.appendMarkdown(status.workspaceInfo);
}
if (statusBar.tooltip.value) {
statusBar.tooltip.appendMarkdown("\n\n---\n\n");
}

View file

@ -241,6 +241,7 @@ export type ServerStatusParams = {
health: "ok" | "warning" | "error";
quiescent: boolean;
message?: string;
workspaceInfo?: string;
};
export type SsrParams = {
query: string;