mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-26 13:03:31 +00:00
Handle proc macro fetching via OpQueue
This commit is contained in:
parent
27c076a367
commit
ee02213e65
17 changed files with 121 additions and 92 deletions
|
@ -15,7 +15,9 @@ use syntax::SmolStr;
|
|||
use tt::token_id::Subtree;
|
||||
use vfs::{file_set::FileSet, AbsPathBuf, AnchoredPath, FileId, VfsPath};
|
||||
|
||||
pub type ProcMacroPaths = FxHashMap<CrateId, Result<(Option<String>, AbsPathBuf), String>>;
|
||||
// Map from crate id to the name of the crate and path of the proc-macro. If the value is `None`,
|
||||
// then the crate for the proc-macro hasn't been build yet as the build data is missing.
|
||||
pub type ProcMacroPaths = FxHashMap<CrateId, Option<(Option<String>, AbsPathBuf)>>;
|
||||
pub type ProcMacros = FxHashMap<CrateId, ProcMacroLoadResult>;
|
||||
|
||||
/// Files are grouped into source roots. A source root is a directory on the
|
||||
|
|
|
@ -25,25 +25,21 @@ pub(crate) fn unresolved_proc_macro(
|
|||
_ => proc_macros_enabled,
|
||||
};
|
||||
|
||||
let message = match &d.macro_name {
|
||||
let not_expanded_message = match &d.macro_name {
|
||||
Some(name) => format!("proc macro `{name}` not expanded"),
|
||||
None => "proc macro not expanded".to_string(),
|
||||
};
|
||||
let severity = if config_enabled { Severity::Error } else { Severity::WeakWarning };
|
||||
let def_map = ctx.sema.db.crate_def_map(d.krate);
|
||||
let message = format!(
|
||||
"{message}: {}",
|
||||
if config_enabled {
|
||||
let message = if config_enabled {
|
||||
def_map.proc_macro_loading_error().unwrap_or("proc macro not found in the built dylib")
|
||||
} else {
|
||||
match d.kind {
|
||||
hir::MacroKind::Attr if proc_macros_enabled => {
|
||||
"attribute macro expansion is disabled"
|
||||
}
|
||||
hir::MacroKind::Attr if proc_macros_enabled => "attribute macro expansion is disabled",
|
||||
_ => "proc-macro expansion is disabled",
|
||||
}
|
||||
},
|
||||
);
|
||||
};
|
||||
let message = format!("{not_expanded_message}: {message}");
|
||||
|
||||
Diagnostic::new("unresolved-proc-macro", message, display_range).severity(severity)
|
||||
}
|
||||
|
|
|
@ -707,7 +707,7 @@ fn project_json_to_crate_graph(
|
|||
if let Some(path) = krate.proc_macro_dylib_path.clone() {
|
||||
proc_macros.insert(
|
||||
crate_id,
|
||||
Ok((
|
||||
Some((
|
||||
krate.display_name.as_ref().map(|it| it.canonical_name().to_owned()),
|
||||
path,
|
||||
)),
|
||||
|
@ -1185,12 +1185,14 @@ fn add_target_crate_root(
|
|||
CrateOrigin::CratesIo { repo: pkg.repository.clone(), name: Some(pkg.name.clone()) },
|
||||
target_layout,
|
||||
);
|
||||
let proc_macro = match build_data.as_ref().map(|it| &it.proc_macro_dylib_path) {
|
||||
Some(it) => it.clone().map(Ok),
|
||||
None => Some(Err("crate has not (yet) been built".into())),
|
||||
if is_proc_macro {
|
||||
let proc_macro = match build_data.as_ref().map(|it| it.proc_macro_dylib_path.as_ref()) {
|
||||
Some(it) => it.cloned().map(|path| Some((Some(cargo_name.to_owned()), path))),
|
||||
None => Some(None),
|
||||
};
|
||||
if let Some(proc_macro) = proc_macro {
|
||||
proc_macros.insert(crate_id, proc_macro.map(|path| (Some(cargo_name.to_owned()), path)));
|
||||
proc_macros.insert(crate_id, proc_macro);
|
||||
}
|
||||
}
|
||||
|
||||
crate_id
|
||||
|
|
|
@ -101,7 +101,10 @@ pub fn load_workspace(
|
|||
.map(|(crate_id, path)| {
|
||||
(
|
||||
crate_id,
|
||||
path.and_then(|(_, path)| load_proc_macro(proc_macro_server, &path, &[])),
|
||||
path.map_or_else(
|
||||
|| Err("proc macro crate is missing dylib".to_owned()),
|
||||
|(_, path)| load_proc_macro(proc_macro_server, &path, &[]),
|
||||
),
|
||||
)
|
||||
})
|
||||
.collect()
|
||||
|
|
|
@ -1106,6 +1106,10 @@ impl Config {
|
|||
&self.data.procMacro_ignored
|
||||
}
|
||||
|
||||
pub fn expand_proc_macros(&self) -> bool {
|
||||
self.data.procMacro_enable
|
||||
}
|
||||
|
||||
pub fn expand_proc_attr_macros(&self) -> bool {
|
||||
self.data.procMacro_enable && self.data.procMacro_attributes_enable
|
||||
}
|
||||
|
|
|
@ -8,7 +8,7 @@ use std::{sync::Arc, time::Instant};
|
|||
use crossbeam_channel::{unbounded, Receiver, Sender};
|
||||
use flycheck::FlycheckHandle;
|
||||
use ide::{Analysis, AnalysisHost, Cancellable, Change, FileId};
|
||||
use ide_db::base_db::{CrateId, FileLoader, SourceDatabase};
|
||||
use ide_db::base_db::{CrateId, FileLoader, ProcMacroPaths, SourceDatabase};
|
||||
use lsp_types::{SemanticTokens, Url};
|
||||
use parking_lot::{Mutex, RwLock};
|
||||
use proc_macro_api::ProcMacroServer;
|
||||
|
@ -101,11 +101,12 @@ pub(crate) struct GlobalState {
|
|||
/// the user just adds comments or whitespace to Cargo.toml, we do not want
|
||||
/// to invalidate any salsa caches.
|
||||
pub(crate) workspaces: Arc<Vec<ProjectWorkspace>>,
|
||||
pub(crate) fetch_workspaces_queue: OpQueue<Option<Vec<anyhow::Result<ProjectWorkspace>>>>,
|
||||
pub(crate) fetch_workspaces_queue: OpQueue<(), Option<Vec<anyhow::Result<ProjectWorkspace>>>>,
|
||||
pub(crate) fetch_build_data_queue:
|
||||
OpQueue<(Arc<Vec<ProjectWorkspace>>, Vec<anyhow::Result<WorkspaceBuildScripts>>)>,
|
||||
OpQueue<(), (Arc<Vec<ProjectWorkspace>>, Vec<anyhow::Result<WorkspaceBuildScripts>>)>,
|
||||
pub(crate) fetch_proc_macros_queue: OpQueue<Vec<ProcMacroPaths>, bool>,
|
||||
|
||||
pub(crate) prime_caches_queue: OpQueue<()>,
|
||||
pub(crate) prime_caches_queue: OpQueue,
|
||||
}
|
||||
|
||||
/// An immutable snapshot of the world's state at a point in time.
|
||||
|
@ -117,6 +118,7 @@ pub(crate) struct GlobalStateSnapshot {
|
|||
pub(crate) semantic_tokens_cache: Arc<Mutex<FxHashMap<Url, SemanticTokens>>>,
|
||||
vfs: Arc<RwLock<(vfs::Vfs, NoHashHashMap<FileId, LineEndings>)>>,
|
||||
pub(crate) workspaces: Arc<Vec<ProjectWorkspace>>,
|
||||
// used to signal semantic highlighting to fall back to syntax based highlighting until proc-macros have been loaded
|
||||
pub(crate) proc_macros_loaded: bool,
|
||||
pub(crate) flycheck: Arc<[FlycheckHandle]>,
|
||||
}
|
||||
|
@ -170,9 +172,10 @@ impl GlobalState {
|
|||
|
||||
workspaces: Arc::new(Vec::new()),
|
||||
fetch_workspaces_queue: OpQueue::default(),
|
||||
prime_caches_queue: OpQueue::default(),
|
||||
|
||||
fetch_build_data_queue: OpQueue::default(),
|
||||
fetch_proc_macros_queue: OpQueue::default(),
|
||||
|
||||
prime_caches_queue: OpQueue::default(),
|
||||
};
|
||||
// Apply any required database inputs from the config.
|
||||
this.update_configuration(config);
|
||||
|
@ -286,7 +289,7 @@ impl GlobalState {
|
|||
// crate see https://github.com/rust-lang/rust-analyzer/issues/13029
|
||||
if let Some(path) = workspace_structure_change {
|
||||
self.fetch_workspaces_queue
|
||||
.request_op(format!("workspace vfs file change: {}", path.display()));
|
||||
.request_op(format!("workspace vfs file change: {}", path.display()), ());
|
||||
}
|
||||
self.proc_macro_changed =
|
||||
changed_files.iter().filter(|file| !file.is_created_or_deleted()).any(|file| {
|
||||
|
@ -309,7 +312,8 @@ impl GlobalState {
|
|||
check_fixes: Arc::clone(&self.diagnostics.check_fixes),
|
||||
mem_docs: self.mem_docs.clone(),
|
||||
semantic_tokens_cache: Arc::clone(&self.semantic_tokens_cache),
|
||||
proc_macros_loaded: !self.fetch_build_data_queue.last_op_result().0.is_empty(),
|
||||
proc_macros_loaded: !self.config.expand_proc_macros()
|
||||
|| *self.fetch_proc_macros_queue.last_op_result(),
|
||||
flycheck: self.flycheck.clone(),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -48,16 +48,15 @@ pub(crate) fn handle_workspace_reload(state: &mut GlobalState, _: ()) -> Result<
|
|||
state.proc_macro_clients = Arc::new([]);
|
||||
state.proc_macro_changed = false;
|
||||
|
||||
state.fetch_workspaces_queue.request_op("reload workspace request".to_string());
|
||||
state.fetch_build_data_queue.request_op("reload workspace request".to_string());
|
||||
state.fetch_workspaces_queue.request_op("reload workspace request".to_string(), ());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn handle_proc_macros_reload(state: &mut GlobalState, _: ()) -> Result<()> {
|
||||
pub(crate) fn handle_proc_macros_rebuild(state: &mut GlobalState, _: ()) -> Result<()> {
|
||||
state.proc_macro_clients = Arc::new([]);
|
||||
state.proc_macro_changed = false;
|
||||
|
||||
state.fetch_build_data_queue.request_op("reload proc macros request".to_string());
|
||||
state.fetch_build_data_queue.request_op("rebuild proc macros request".to_string(), ());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
|
|
@ -51,12 +51,12 @@ impl Request for ReloadWorkspace {
|
|||
const METHOD: &'static str = "rust-analyzer/reloadWorkspace";
|
||||
}
|
||||
|
||||
pub enum ReloadProcMacros {}
|
||||
pub enum RebuildProcMacros {}
|
||||
|
||||
impl Request for ReloadProcMacros {
|
||||
impl Request for RebuildProcMacros {
|
||||
type Params = ();
|
||||
type Result = ();
|
||||
const METHOD: &'static str = "rust-analyzer/reloadProcMacros";
|
||||
const METHOD: &'static str = "rust-analyzer/rebuildProcMacros";
|
||||
}
|
||||
|
||||
pub enum SyntaxTree {}
|
||||
|
|
|
@ -149,8 +149,8 @@ impl GlobalState {
|
|||
);
|
||||
}
|
||||
|
||||
self.fetch_workspaces_queue.request_op("startup".to_string());
|
||||
if let Some(cause) = self.fetch_workspaces_queue.should_start_op() {
|
||||
self.fetch_workspaces_queue.request_op("startup".to_string(), ());
|
||||
if let Some((cause, ())) = self.fetch_workspaces_queue.should_start_op() {
|
||||
self.fetch_workspaces(cause);
|
||||
}
|
||||
|
||||
|
@ -248,7 +248,7 @@ impl GlobalState {
|
|||
self.prime_caches_queue.op_completed(());
|
||||
if cancelled {
|
||||
self.prime_caches_queue
|
||||
.request_op("restart after cancellation".to_string());
|
||||
.request_op("restart after cancellation".to_string(), ());
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -280,7 +280,8 @@ impl GlobalState {
|
|||
if self.is_quiescent() {
|
||||
let became_quiescent = !(was_quiescent
|
||||
|| self.fetch_workspaces_queue.op_requested()
|
||||
|| self.fetch_build_data_queue.op_requested());
|
||||
|| self.fetch_build_data_queue.op_requested()
|
||||
|| self.fetch_proc_macros_queue.op_requested());
|
||||
|
||||
if became_quiescent {
|
||||
if self.config.check_on_save() {
|
||||
|
@ -288,7 +289,7 @@ impl GlobalState {
|
|||
self.flycheck.iter().for_each(FlycheckHandle::restart);
|
||||
}
|
||||
if self.config.prefill_caches() {
|
||||
self.prime_caches_queue.request_op("became quiescent".to_string());
|
||||
self.prime_caches_queue.request_op("became quiescent".to_string(), ());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -358,18 +359,20 @@ impl GlobalState {
|
|||
}
|
||||
|
||||
if self.config.cargo_autoreload() {
|
||||
if let Some(cause) = self.fetch_workspaces_queue.should_start_op() {
|
||||
if let Some((cause, ())) = self.fetch_workspaces_queue.should_start_op() {
|
||||
self.fetch_workspaces(cause);
|
||||
}
|
||||
}
|
||||
|
||||
if !self.fetch_workspaces_queue.op_in_progress() {
|
||||
if let Some(cause) = self.fetch_build_data_queue.should_start_op() {
|
||||
if let Some((cause, ())) = self.fetch_build_data_queue.should_start_op() {
|
||||
self.fetch_build_data(cause);
|
||||
} else if let Some((cause, paths)) = self.fetch_proc_macros_queue.should_start_op() {
|
||||
self.fetch_proc_macros(cause, paths);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(cause) = self.prime_caches_queue.should_start_op() {
|
||||
if let Some((cause, ())) = self.prime_caches_queue.should_start_op() {
|
||||
tracing::debug!(%cause, "will prime caches");
|
||||
let num_worker_threads = self.config.prime_caches_num_threads();
|
||||
|
||||
|
@ -463,7 +466,8 @@ impl GlobalState {
|
|||
let workspaces_updated = !Arc::ptr_eq(&old, &self.workspaces);
|
||||
|
||||
if self.config.run_build_scripts() && workspaces_updated {
|
||||
self.fetch_build_data_queue.request_op(format!("workspace updated"));
|
||||
self.fetch_build_data_queue
|
||||
.request_op(format!("workspace updated"), ());
|
||||
}
|
||||
|
||||
(Progress::End, None)
|
||||
|
@ -497,6 +501,7 @@ impl GlobalState {
|
|||
ProcMacroProgress::Begin => (Some(Progress::Begin), None),
|
||||
ProcMacroProgress::Report(msg) => (Some(Progress::Report), Some(msg)),
|
||||
ProcMacroProgress::End(proc_macro_load_result) => {
|
||||
self.fetch_proc_macros_queue.op_completed(true);
|
||||
self.set_proc_macros(proc_macro_load_result);
|
||||
|
||||
(Some(Progress::End), None)
|
||||
|
@ -649,7 +654,7 @@ impl GlobalState {
|
|||
|
||||
dispatcher
|
||||
.on_sync_mut::<lsp_ext::ReloadWorkspace>(handlers::handle_workspace_reload)
|
||||
.on_sync_mut::<lsp_ext::ReloadProcMacros>(handlers::handle_proc_macros_reload)
|
||||
.on_sync_mut::<lsp_ext::RebuildProcMacros>(handlers::handle_proc_macros_rebuild)
|
||||
.on_sync_mut::<lsp_ext::MemoryUsage>(handlers::handle_memory_usage)
|
||||
.on_sync_mut::<lsp_ext::ShuffleCrateGraph>(handlers::handle_shuffle_crate_graph)
|
||||
.on_sync::<lsp_ext::JoinLines>(handlers::handle_join_lines)
|
||||
|
@ -904,7 +909,7 @@ impl GlobalState {
|
|||
if let Some(abs_path) = vfs_path.as_path() {
|
||||
if reload::should_refresh_for_change(abs_path, ChangeKind::Modify) {
|
||||
this.fetch_workspaces_queue
|
||||
.request_op(format!("DidSaveTextDocument {}", abs_path.display()));
|
||||
.request_op(format!("DidSaveTextDocument {}", abs_path.display()), ());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -980,7 +985,7 @@ impl GlobalState {
|
|||
config.workspace_roots.extend(added);
|
||||
if !config.has_linked_projects() && config.detached_files().is_empty() {
|
||||
config.rediscover_workspaces();
|
||||
this.fetch_workspaces_queue.request_op("client workspaces changed".to_string())
|
||||
this.fetch_workspaces_queue.request_op("client workspaces changed".to_string(), ())
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
|
|
@ -3,23 +3,23 @@
|
|||
|
||||
pub(crate) type Cause = String;
|
||||
|
||||
pub(crate) struct OpQueue<Output> {
|
||||
op_requested: Option<Cause>,
|
||||
pub(crate) struct OpQueue<Args = (), Output = ()> {
|
||||
op_requested: Option<(Cause, Args)>,
|
||||
op_in_progress: bool,
|
||||
last_op_result: Output,
|
||||
}
|
||||
|
||||
impl<Output: Default> Default for OpQueue<Output> {
|
||||
impl<Args, Output: Default> Default for OpQueue<Args, Output> {
|
||||
fn default() -> Self {
|
||||
Self { op_requested: None, op_in_progress: false, last_op_result: Default::default() }
|
||||
}
|
||||
}
|
||||
|
||||
impl<Output> OpQueue<Output> {
|
||||
pub(crate) fn request_op(&mut self, reason: Cause) {
|
||||
self.op_requested = Some(reason);
|
||||
impl<Args, Output> OpQueue<Args, Output> {
|
||||
pub(crate) fn request_op(&mut self, reason: Cause, args: Args) {
|
||||
self.op_requested = Some((reason, args));
|
||||
}
|
||||
pub(crate) fn should_start_op(&mut self) -> Option<Cause> {
|
||||
pub(crate) fn should_start_op(&mut self) -> Option<(Cause, Args)> {
|
||||
if self.op_in_progress {
|
||||
return None;
|
||||
}
|
||||
|
|
|
@ -66,6 +66,7 @@ impl GlobalState {
|
|||
!(self.last_reported_status.is_none()
|
||||
|| self.fetch_workspaces_queue.op_in_progress()
|
||||
|| self.fetch_build_data_queue.op_in_progress()
|
||||
|| self.fetch_proc_macros_queue.op_in_progress()
|
||||
|| self.vfs_progress_config_version < self.vfs_config_version
|
||||
|| self.vfs_progress_n_done < self.vfs_progress_n_total)
|
||||
}
|
||||
|
@ -77,7 +78,7 @@ impl GlobalState {
|
|||
self.analysis_host.update_lru_capacity(self.config.lru_capacity());
|
||||
}
|
||||
if self.config.linked_projects() != old_config.linked_projects() {
|
||||
self.fetch_workspaces_queue.request_op("linked projects changed".to_string())
|
||||
self.fetch_workspaces_queue.request_op("linked projects changed".to_string(), ())
|
||||
} else if self.config.flycheck() != old_config.flycheck() {
|
||||
self.reload_flycheck();
|
||||
}
|
||||
|
@ -101,7 +102,7 @@ impl GlobalState {
|
|||
|
||||
if self.proc_macro_changed {
|
||||
status.health = lsp_ext::Health::Warning;
|
||||
message.push_str("Reload required due to source changes of a procedural macro.\n\n");
|
||||
message.push_str("Proc-macros have changed and need to be rebuild.\n\n");
|
||||
}
|
||||
if let Err(_) = self.fetch_build_data_error() {
|
||||
status.health = lsp_ext::Health::Warning;
|
||||
|
@ -223,8 +224,8 @@ impl GlobalState {
|
|||
});
|
||||
}
|
||||
|
||||
pub(crate) fn load_proc_macros(&mut self, paths: Vec<ProcMacroPaths>) {
|
||||
tracing::info!("will load proc macros");
|
||||
pub(crate) fn fetch_proc_macros(&mut self, cause: Cause, paths: Vec<ProcMacroPaths>) {
|
||||
tracing::info!(%cause, "will load proc macros");
|
||||
let dummy_replacements = self.config.dummy_replacements().clone();
|
||||
let proc_macro_clients = self.proc_macro_clients.clone();
|
||||
|
||||
|
@ -240,16 +241,17 @@ impl GlobalState {
|
|||
};
|
||||
|
||||
let mut res = FxHashMap::default();
|
||||
for (client, paths) in proc_macro_clients
|
||||
let chain = proc_macro_clients
|
||||
.iter()
|
||||
.map(|res| res.as_ref().map_err(|e| &**e))
|
||||
.chain(iter::repeat_with(|| Err("Proc macros are disabled")))
|
||||
.zip(paths)
|
||||
{
|
||||
.chain(iter::repeat_with(|| Err("Proc macros servers are not running")));
|
||||
for (client, paths) in chain.zip(paths) {
|
||||
res.extend(paths.into_iter().map(move |(crate_id, res)| {
|
||||
(
|
||||
crate_id,
|
||||
res.and_then(|(crate_name, path)| {
|
||||
res.map_or_else(
|
||||
|| Err("proc macro crate is missing dylib".to_owned()),
|
||||
|(crate_name, path)| {
|
||||
progress(path.display().to_string());
|
||||
load_proc_macro(
|
||||
client,
|
||||
|
@ -261,7 +263,8 @@ impl GlobalState {
|
|||
})
|
||||
.unwrap_or_default(),
|
||||
)
|
||||
}),
|
||||
},
|
||||
),
|
||||
)
|
||||
}));
|
||||
}
|
||||
|
@ -443,14 +446,25 @@ impl GlobalState {
|
|||
(crate_graph, proc_macros)
|
||||
};
|
||||
let mut change = Change::new();
|
||||
|
||||
if same_workspaces {
|
||||
if self.config.expand_proc_macros() {
|
||||
self.fetch_proc_macros_queue.request_op(cause, proc_macro_paths);
|
||||
}
|
||||
} else {
|
||||
// Set up errors for proc-macros upfront that we haven't run build scripts yet
|
||||
let mut proc_macros = FxHashMap::default();
|
||||
for paths in proc_macro_paths {
|
||||
proc_macros.extend(paths.into_iter().map(move |(crate_id, _)| {
|
||||
(crate_id, Err("crate has not yet been build".to_owned()))
|
||||
}));
|
||||
}
|
||||
change.set_proc_macros(proc_macros);
|
||||
}
|
||||
change.set_crate_graph(crate_graph);
|
||||
self.analysis_host.apply_change(change);
|
||||
self.process_changes();
|
||||
|
||||
if same_workspaces && !self.fetch_workspaces_queue.op_requested() {
|
||||
self.load_proc_macros(proc_macro_paths);
|
||||
}
|
||||
|
||||
self.reload_flycheck();
|
||||
|
||||
tracing::info!("did switch workspaces");
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
<!---
|
||||
lsp_ext.rs hash: 92fe1037312754df
|
||||
lsp_ext.rs hash: 7269e4cfab906e10
|
||||
|
||||
If you need to change the above hash to make the test pass, please check if you
|
||||
need to adjust this doc as well and ping this issue:
|
||||
|
@ -422,9 +422,9 @@ Returns internal status message, mostly for debugging purposes.
|
|||
|
||||
Reloads project information (that is, re-executes `cargo metadata`).
|
||||
|
||||
## Reload proc-macros
|
||||
## Rebuild proc-macros
|
||||
|
||||
**Method:** `rust-analyzer/reloadProcMacros`
|
||||
**Method:** `rust-analyzer/rebuildProcMacros`
|
||||
|
||||
**Request:** `null`
|
||||
|
||||
|
|
|
@ -200,8 +200,8 @@
|
|||
"category": "rust-analyzer"
|
||||
},
|
||||
{
|
||||
"command": "rust-analyzer.reloadProcMacros",
|
||||
"title": "Reload proc macros and build scripts",
|
||||
"command": "rust-analyzer.rebuildProcMacros",
|
||||
"title": "Rebuild proc macros and build scripts",
|
||||
"category": "rust-analyzer"
|
||||
},
|
||||
{
|
||||
|
|
|
@ -749,8 +749,8 @@ export function reloadWorkspace(ctx: CtxInit): Cmd {
|
|||
return async () => ctx.client.sendRequest(ra.reloadWorkspace);
|
||||
}
|
||||
|
||||
export function reloadProcMacros(ctx: CtxInit): Cmd {
|
||||
return async () => ctx.client.sendRequest(ra.reloadProcMacros);
|
||||
export function rebuildProcMacros(ctx: CtxInit): Cmd {
|
||||
return async () => ctx.client.sendRequest(ra.rebuildProcMacros);
|
||||
}
|
||||
|
||||
export function addProject(ctx: CtxInit): Cmd {
|
||||
|
|
|
@ -383,7 +383,7 @@ export class Ctx {
|
|||
"\n\n[Reload Workspace](command:rust-analyzer.reloadWorkspace)"
|
||||
);
|
||||
statusBar.tooltip.appendMarkdown(
|
||||
"\n\n[Rebuild Proc Macros](command:rust-analyzer.reloadProcMacros)"
|
||||
"\n\n[Rebuild Proc Macros](command:rust-analyzer.rebuildProcMacros)"
|
||||
);
|
||||
statusBar.tooltip.appendMarkdown("\n\n[Restart server](command:rust-analyzer.startServer)");
|
||||
statusBar.tooltip.appendMarkdown("\n\n[Stop server](command:rust-analyzer.stopServer)");
|
||||
|
|
|
@ -43,7 +43,7 @@ export const relatedTests = new lc.RequestType<lc.TextDocumentPositionParams, Te
|
|||
"rust-analyzer/relatedTests"
|
||||
);
|
||||
export const reloadWorkspace = new lc.RequestType0<null, void>("rust-analyzer/reloadWorkspace");
|
||||
export const reloadProcMacros = new lc.RequestType0<null, void>("rust-analyzer/reloadProcMacros");
|
||||
export const rebuildProcMacros = new lc.RequestType0<null, void>("rust-analyzer/reloadProcMacros");
|
||||
|
||||
export const runFlycheck = new lc.NotificationType<{
|
||||
textDocument: lc.TextDocumentIdentifier | null;
|
||||
|
|
|
@ -153,7 +153,7 @@ function createCommands(): Record<string, CommandFactory> {
|
|||
memoryUsage: { enabled: commands.memoryUsage },
|
||||
shuffleCrateGraph: { enabled: commands.shuffleCrateGraph },
|
||||
reloadWorkspace: { enabled: commands.reloadWorkspace },
|
||||
reloadProcMacros: { enabled: commands.reloadProcMacros },
|
||||
rebuildProcMacros: { enabled: commands.rebuildProcMacros },
|
||||
addProject: { enabled: commands.addProject },
|
||||
matchingBrace: { enabled: commands.matchingBrace },
|
||||
joinLines: { enabled: commands.joinLines },
|
||||
|
|
Loading…
Reference in a new issue