From a63b5d3c846b74b917b8029aa00cb448faba409f Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 18 Jul 2022 20:30:07 +0200 Subject: [PATCH 0001/1945] feat: Only flycheck workspace that belongs to saved file --- crates/flycheck/src/lib.rs | 7 ++- crates/paths/src/lib.rs | 8 ++++ crates/rust-analyzer/src/main_loop.rs | 62 ++++++++++++++++++++++++--- 3 files changed, 70 insertions(+), 7 deletions(-) diff --git a/crates/flycheck/src/lib.rs b/crates/flycheck/src/lib.rs index f683fe61fe..973f2a53c1 100644 --- a/crates/flycheck/src/lib.rs +++ b/crates/flycheck/src/lib.rs @@ -55,6 +55,7 @@ pub struct FlycheckHandle { // XXX: drop order is significant sender: Sender, _thread: jod_thread::JoinHandle, + id: usize, } impl FlycheckHandle { @@ -70,13 +71,17 @@ impl FlycheckHandle { .name("Flycheck".to_owned()) .spawn(move || actor.run(receiver)) .expect("failed to spawn thread"); - FlycheckHandle { sender, _thread: thread } + FlycheckHandle { id, sender, _thread: thread } } /// Schedule a re-start of the cargo check worker. pub fn update(&self) { self.sender.send(Restart).unwrap(); } + + pub fn id(&self) -> usize { + self.id + } } pub enum Message { diff --git a/crates/paths/src/lib.rs b/crates/paths/src/lib.rs index b4beb40e74..cf06bf0c27 100644 --- a/crates/paths/src/lib.rs +++ b/crates/paths/src/lib.rs @@ -103,6 +103,14 @@ impl AsRef for AbsPath { } } +impl ToOwned for AbsPath { + type Owned = AbsPathBuf; + + fn to_owned(&self) -> Self::Owned { + AbsPathBuf(self.0.to_owned()) + } +} + impl<'a> TryFrom<&'a Path> for &'a AbsPath { type Error = &'a Path; fn try_from(path: &'a Path) -> Result<&'a AbsPath, &'a Path> { diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs index 49b8394111..262c30f132 100644 --- a/crates/rust-analyzer/src/main_loop.rs +++ b/crates/rust-analyzer/src/main_loop.rs @@ -2,6 +2,7 @@ //! requests/replies and notifications back to the client. use std::{ fmt, + ops::Deref, sync::Arc, time::{Duration, Instant}, }; @@ -720,13 +721,62 @@ impl GlobalState { Ok(()) })? .on::(|this, params| { - for flycheck in &this.flycheck { - flycheck.update(); + let mut updated = false; + if let Ok(vfs_path) = from_proto::vfs_path(¶ms.text_document.uri) { + let (vfs, _) = &*this.vfs.read(); + if let Some(file_id) = vfs.file_id(&vfs_path) { + let analysis = this.analysis_host.analysis(); + let crate_ids = analysis.crate_for(file_id)?; + + let paths: Vec<_> = crate_ids + .iter() + .filter_map(|&crate_id| { + analysis + .crate_root(crate_id) + .map(|file_id| { + vfs.file_path(file_id).as_path().map(ToOwned::to_owned) + }) + .transpose() + }) + .collect::>()?; + let paths: Vec<_> = paths.iter().map(Deref::deref).collect(); + + let workspace_ids = + this.workspaces.iter().enumerate().filter(|(_, ws)| match ws { + project_model::ProjectWorkspace::Cargo { cargo, .. } => { + cargo.packages().filter(|&pkg| cargo[pkg].is_member).any( + |pkg| { + cargo[pkg].targets.iter().any(|&it| { + paths.contains(&cargo[it].root.as_path()) + }) + }, + ) + } + project_model::ProjectWorkspace::Json { project, .. } => project + .crates() + .any(|(c, _)| crate_ids.iter().any(|&crate_id| crate_id == c)), + project_model::ProjectWorkspace::DetachedFiles { .. } => false, + }); + 'workspace: for (id, _) in workspace_ids { + for flycheck in &this.flycheck { + if id == flycheck.id() { + updated = true; + flycheck.update(); + continue 'workspace; + } + } + } + } + if let Some(abs_path) = vfs_path.as_path() { + if reload::should_refresh_for_change(&abs_path, ChangeKind::Modify) { + this.fetch_workspaces_queue + .request_op(format!("DidSaveTextDocument {}", abs_path.display())); + } + } } - if let Ok(abs_path) = from_proto::abs_path(¶ms.text_document.uri) { - if reload::should_refresh_for_change(&abs_path, ChangeKind::Modify) { - this.fetch_workspaces_queue - .request_op(format!("DidSaveTextDocument {}", abs_path.display())); + if !updated { + for flycheck in &this.flycheck { + flycheck.update(); } } Ok(()) From 25391e6d44e3dc740349198c309e08782691da7e Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Wed, 20 Jul 2022 11:49:36 +0200 Subject: [PATCH 0002/1945] Only clear diagnostics of workspaces who have been flychecked --- crates/flycheck/src/lib.rs | 8 ++++--- crates/rust-analyzer/src/diagnostics.rs | 27 ++++++++++++++++++------ crates/rust-analyzer/src/global_state.rs | 1 + crates/rust-analyzer/src/handlers.rs | 4 +++- crates/rust-analyzer/src/main_loop.rs | 5 +++-- crates/rust-analyzer/src/reload.rs | 2 +- 6 files changed, 33 insertions(+), 14 deletions(-) diff --git a/crates/flycheck/src/lib.rs b/crates/flycheck/src/lib.rs index 973f2a53c1..7f14fe5798 100644 --- a/crates/flycheck/src/lib.rs +++ b/crates/flycheck/src/lib.rs @@ -86,7 +86,7 @@ impl FlycheckHandle { pub enum Message { /// Request adding a diagnostic with fixes included to a file - AddDiagnostic { workspace_root: AbsPathBuf, diagnostic: Diagnostic }, + AddDiagnostic { id: usize, workspace_root: AbsPathBuf, diagnostic: Diagnostic }, /// Request check progress notification to client Progress { @@ -99,8 +99,9 @@ pub enum Message { impl fmt::Debug for Message { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { - Message::AddDiagnostic { workspace_root, diagnostic } => f + Message::AddDiagnostic { id, workspace_root, diagnostic } => f .debug_struct("AddDiagnostic") + .field("id", id) .field("workspace_root", workspace_root) .field("diagnostic_code", &diagnostic.code.as_ref().map(|it| &it.code)) .finish(), @@ -186,7 +187,7 @@ impl FlycheckActor { } } Event::CheckEvent(None) => { - tracing::debug!("flycheck finished"); + tracing::debug!(flycheck_id = self.id, "flycheck finished"); // Watcher finished let cargo_handle = self.cargo_handle.take().unwrap(); @@ -206,6 +207,7 @@ impl FlycheckActor { CargoMessage::Diagnostic(msg) => { self.send(Message::AddDiagnostic { + id: self.id, workspace_root: self.workspace_root.clone(), diagnostic: msg, }); diff --git a/crates/rust-analyzer/src/diagnostics.rs b/crates/rust-analyzer/src/diagnostics.rs index 202a01adf7..7917ced666 100644 --- a/crates/rust-analyzer/src/diagnostics.rs +++ b/crates/rust-analyzer/src/diagnostics.rs @@ -8,7 +8,7 @@ use rustc_hash::{FxHashMap, FxHashSet}; use crate::lsp_ext; -pub(crate) type CheckFixes = Arc>>; +pub(crate) type CheckFixes = Arc>>>; #[derive(Debug, Default, Clone)] pub struct DiagnosticsMapConfig { @@ -22,7 +22,7 @@ pub(crate) struct DiagnosticCollection { // FIXME: should be FxHashMap> pub(crate) native: FxHashMap>, // FIXME: should be Vec - pub(crate) check: FxHashMap>, + pub(crate) check: FxHashMap>>, pub(crate) check_fixes: CheckFixes, changes: FxHashSet, } @@ -35,9 +35,19 @@ pub(crate) struct Fix { } impl DiagnosticCollection { - pub(crate) fn clear_check(&mut self) { + pub(crate) fn clear_check(&mut self, flycheck_id: usize) { + if let Some(it) = Arc::make_mut(&mut self.check_fixes).get_mut(&flycheck_id) { + it.clear(); + } + if let Some(it) = self.check.get_mut(&flycheck_id) { + self.changes.extend(it.drain().map(|(key, _value)| key)); + } + } + + pub(crate) fn clear_check_all(&mut self) { Arc::make_mut(&mut self.check_fixes).clear(); - self.changes.extend(self.check.drain().map(|(key, _value)| key)) + self.changes + .extend(self.check.values_mut().flat_map(|it| it.drain().map(|(key, _value)| key))) } pub(crate) fn clear_native_for(&mut self, file_id: FileId) { @@ -47,11 +57,12 @@ impl DiagnosticCollection { pub(crate) fn add_check_diagnostic( &mut self, + flycheck_id: usize, file_id: FileId, diagnostic: lsp_types::Diagnostic, fix: Option, ) { - let diagnostics = self.check.entry(file_id).or_default(); + let diagnostics = self.check.entry(flycheck_id).or_default().entry(file_id).or_default(); for existing_diagnostic in diagnostics.iter() { if are_diagnostics_equal(existing_diagnostic, &diagnostic) { return; @@ -59,8 +70,9 @@ impl DiagnosticCollection { } let check_fixes = Arc::make_mut(&mut self.check_fixes); - check_fixes.entry(file_id).or_default().extend(fix); + check_fixes.entry(flycheck_id).or_default().entry(file_id).or_default().extend(fix); diagnostics.push(diagnostic); + tracing::warn!(?flycheck_id, ?file_id, "add_check_diagnostic changes pushed"); self.changes.insert(file_id); } @@ -89,7 +101,8 @@ impl DiagnosticCollection { file_id: FileId, ) -> impl Iterator { let native = self.native.get(&file_id).into_iter().flatten(); - let check = self.check.get(&file_id).into_iter().flatten(); + let check = + self.check.values().filter_map(move |it| it.get(&file_id)).into_iter().flatten(); native.chain(check) } diff --git a/crates/rust-analyzer/src/global_state.rs b/crates/rust-analyzer/src/global_state.rs index 4af60035a2..2cd2044aef 100644 --- a/crates/rust-analyzer/src/global_state.rs +++ b/crates/rust-analyzer/src/global_state.rs @@ -201,6 +201,7 @@ impl GlobalState { } } + // Clear native diagnostics when their file gets deleted if !file.exists() { self.diagnostics.clear_native_for(file.file_id); } diff --git a/crates/rust-analyzer/src/handlers.rs b/crates/rust-analyzer/src/handlers.rs index e3875228a1..a64dc57ec6 100644 --- a/crates/rust-analyzer/src/handlers.rs +++ b/crates/rust-analyzer/src/handlers.rs @@ -1094,7 +1094,9 @@ pub(crate) fn handle_code_action( } // Fixes from `cargo check`. - for fix in snap.check_fixes.get(&frange.file_id).into_iter().flatten() { + for fix in + snap.check_fixes.values().filter_map(|it| it.get(&frange.file_id)).into_iter().flatten() + { // FIXME: this mapping is awkward and shouldn't exist. Refactor // `snap.check_fixes` to not convert to LSP prematurely. let intersect_fix_range = fix diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs index 262c30f132..572466cdfa 100644 --- a/crates/rust-analyzer/src/main_loop.rs +++ b/crates/rust-analyzer/src/main_loop.rs @@ -372,7 +372,7 @@ impl GlobalState { let _p = profile::span("GlobalState::handle_event/flycheck"); loop { match task { - flycheck::Message::AddDiagnostic { workspace_root, diagnostic } => { + flycheck::Message::AddDiagnostic { id, workspace_root, diagnostic } => { let snap = self.snapshot(); let diagnostics = crate::diagnostics::to_proto::map_rust_diagnostic_to_lsp( @@ -384,6 +384,7 @@ impl GlobalState { for diag in diagnostics { match url_to_file_id(&self.vfs.read().0, &diag.url) { Ok(file_id) => self.diagnostics.add_check_diagnostic( + id, file_id, diag.diagnostic, diag.fix, @@ -401,7 +402,7 @@ impl GlobalState { flycheck::Message::Progress { id, progress } => { let (state, message) = match progress { flycheck::Progress::DidStart => { - self.diagnostics.clear_check(); + self.diagnostics.clear_check(id); (Progress::Begin, None) } flycheck::Progress::DidCheckCrate(target) => { diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs index 13bcb7dfa2..579ba38027 100644 --- a/crates/rust-analyzer/src/reload.rs +++ b/crates/rust-analyzer/src/reload.rs @@ -417,7 +417,7 @@ impl GlobalState { Some(it) => it, None => { self.flycheck = Vec::new(); - self.diagnostics.clear_check(); + self.diagnostics.clear_check_all(); return; } }; From d73b0d5fc6c6be434bd0e31ddc6572a6efaf6400 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Wed, 20 Jul 2022 11:52:44 +0200 Subject: [PATCH 0003/1945] Don't filter flychecks by package member status --- crates/rust-analyzer/src/main_loop.rs | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs index 572466cdfa..561c2d7aef 100644 --- a/crates/rust-analyzer/src/main_loop.rs +++ b/crates/rust-analyzer/src/main_loop.rs @@ -745,25 +745,24 @@ impl GlobalState { let workspace_ids = this.workspaces.iter().enumerate().filter(|(_, ws)| match ws { project_model::ProjectWorkspace::Cargo { cargo, .. } => { - cargo.packages().filter(|&pkg| cargo[pkg].is_member).any( - |pkg| { - cargo[pkg].targets.iter().any(|&it| { - paths.contains(&cargo[it].root.as_path()) - }) - }, - ) + cargo.packages().any(|pkg| { + cargo[pkg] + .targets + .iter() + .any(|&it| paths.contains(&cargo[it].root.as_path())) + }) } project_model::ProjectWorkspace::Json { project, .. } => project .crates() .any(|(c, _)| crate_ids.iter().any(|&crate_id| crate_id == c)), project_model::ProjectWorkspace::DetachedFiles { .. } => false, }); - 'workspace: for (id, _) in workspace_ids { - for flycheck in &this.flycheck { + for flycheck in &this.flycheck { + for (id, _) in workspace_ids.clone() { if id == flycheck.id() { updated = true; flycheck.update(); - continue 'workspace; + continue; } } } From 7e0bd377c2ffe55a1f014ce932a7218a9b4dc639 Mon Sep 17 00:00:00 2001 From: Ryo Yoshida Date: Wed, 20 Jul 2022 22:30:16 +0900 Subject: [PATCH 0004/1945] fix: don't replace default members' body --- .../src/handlers/add_missing_impl_members.rs | 17 ++++++++++------- .../ide-assists/src/utils/gen_trait_fn_body.rs | 11 ++++++----- 2 files changed, 16 insertions(+), 12 deletions(-) diff --git a/crates/ide-assists/src/handlers/add_missing_impl_members.rs b/crates/ide-assists/src/handlers/add_missing_impl_members.rs index 6e6fd96a68..036bdd3a21 100644 --- a/crates/ide-assists/src/handlers/add_missing_impl_members.rs +++ b/crates/ide-assists/src/handlers/add_missing_impl_members.rs @@ -142,13 +142,16 @@ fn add_missing_impl_members_inner( Some(cap) => { let mut cursor = Cursor::Before(first_new_item.syntax()); let placeholder; - if let ast::AssocItem::Fn(func) = &first_new_item { - if try_gen_trait_body(ctx, func, &trait_, &impl_def).is_none() { - if let Some(m) = func.syntax().descendants().find_map(ast::MacroCall::cast) - { - if m.syntax().text() == "todo!()" { - placeholder = m; - cursor = Cursor::Replace(placeholder.syntax()); + if let DefaultMethods::No = mode { + if let ast::AssocItem::Fn(func) = &first_new_item { + if try_gen_trait_body(ctx, func, &trait_, &impl_def).is_none() { + if let Some(m) = + func.syntax().descendants().find_map(ast::MacroCall::cast) + { + if m.syntax().text() == "todo!()" { + placeholder = m; + cursor = Cursor::Replace(placeholder.syntax()); + } } } } diff --git a/crates/ide-assists/src/utils/gen_trait_fn_body.rs b/crates/ide-assists/src/utils/gen_trait_fn_body.rs index ec4835969f..43aaad3150 100644 --- a/crates/ide-assists/src/utils/gen_trait_fn_body.rs +++ b/crates/ide-assists/src/utils/gen_trait_fn_body.rs @@ -5,7 +5,7 @@ use syntax::{ ted, }; -/// Generate custom trait bodies where possible. +/// Generate custom trait bodies without default implementation where possible. /// /// Returns `Option` so that we can use `?` rather than `if let Some`. Returning /// `None` means that generating a custom trait body failed, and the body will remain @@ -28,6 +28,7 @@ pub(crate) fn gen_trait_fn_body( /// Generate a `Clone` impl based on the fields and members of the target type. fn gen_clone_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> { + debug_assert!(func.name().map_or(false, |name| name.text() == "clone")); fn gen_clone_call(target: ast::Expr) -> ast::Expr { let method = make::name_ref("clone"); make::expr_method_call(target, method, make::arg_list(None)) @@ -339,6 +340,7 @@ fn gen_default_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> { /// Generate a `Hash` impl based on the fields and members of the target type. fn gen_hash_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> { + debug_assert!(func.name().map_or(false, |name| name.text() == "hash")); fn gen_hash_call(target: ast::Expr) -> ast::Stmt { let method = make::name_ref("hash"); let arg = make::expr_path(make::ext::ident_path("state")); @@ -394,9 +396,7 @@ fn gen_hash_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> { /// Generate a `PartialEq` impl based on the fields and members of the target type. fn gen_partial_eq(adt: &ast::Adt, func: &ast::Fn) -> Option<()> { - if func.name().map_or(false, |name| name.text() == "ne") { - return None; - } + debug_assert!(func.name().map_or(false, |name| name.text() == "eq")); fn gen_eq_chain(expr: Option, cmp: ast::Expr) -> Option { match expr { Some(expr) => Some(make::expr_bin_op(expr, BinaryOp::LogicOp(LogicOp::And), cmp)), @@ -573,6 +573,7 @@ fn gen_partial_eq(adt: &ast::Adt, func: &ast::Fn) -> Option<()> { } fn gen_partial_ord(adt: &ast::Adt, func: &ast::Fn) -> Option<()> { + debug_assert!(func.name().map_or(false, |name| name.text() == "partial_cmp")); fn gen_partial_eq_match(match_target: ast::Expr) -> Option { let mut arms = vec![]; @@ -643,7 +644,7 @@ fn gen_partial_ord(adt: &ast::Adt, func: &ast::Fn) -> Option<()> { make::block_expr(stmts.into_iter(), tail).indent(ast::edit::IndentLevel(1)) } - // No fields in the body means there's nothing to hash. + // No fields in the body means there's nothing to compare. None => { let expr = make::expr_literal("true").into(); make::block_expr(None, Some(expr)).indent(ast::edit::IndentLevel(1)) From aeb07745d53a90cc8447c45328eed51e941d9307 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sat, 23 Jul 2022 20:10:10 +0200 Subject: [PATCH 0005/1945] Spawn a proc-macro-srv instance per workspace --- crates/rust-analyzer/src/global_state.rs | 4 +-- crates/rust-analyzer/src/handlers.rs | 2 +- crates/rust-analyzer/src/reload.rs | 43 +++++++++++++----------- 3 files changed, 26 insertions(+), 23 deletions(-) diff --git a/crates/rust-analyzer/src/global_state.rs b/crates/rust-analyzer/src/global_state.rs index 4af60035a2..700e3e19bb 100644 --- a/crates/rust-analyzer/src/global_state.rs +++ b/crates/rust-analyzer/src/global_state.rs @@ -61,7 +61,7 @@ pub(crate) struct GlobalState { pub(crate) proc_macro_changed: bool, pub(crate) last_reported_status: Option, pub(crate) source_root_config: SourceRootConfig, - pub(crate) proc_macro_client: Option, + pub(crate) proc_macro_clients: Vec>, pub(crate) flycheck: Vec, pub(crate) flycheck_sender: Sender, @@ -151,7 +151,7 @@ impl GlobalState { proc_macro_changed: false, last_reported_status: None, source_root_config: SourceRootConfig::default(), - proc_macro_client: None, + proc_macro_clients: vec![], flycheck: Vec::new(), flycheck_sender, diff --git a/crates/rust-analyzer/src/handlers.rs b/crates/rust-analyzer/src/handlers.rs index 520aa7d1dd..deb777c952 100644 --- a/crates/rust-analyzer/src/handlers.rs +++ b/crates/rust-analyzer/src/handlers.rs @@ -44,7 +44,7 @@ use crate::{ }; pub(crate) fn handle_workspace_reload(state: &mut GlobalState, _: ()) -> Result<()> { - state.proc_macro_client = None; + state.proc_macro_clients.clear(); state.proc_macro_changed = false; state.fetch_workspaces_queue.request_op("reload workspace request".to_string()); state.fetch_build_data_queue.request_op("reload workspace request".to_string()); diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs index 77125f88f4..d69b2be256 100644 --- a/crates/rust-analyzer/src/reload.rs +++ b/crates/rust-analyzer/src/reload.rs @@ -303,18 +303,21 @@ impl GlobalState { let files_config = self.config.files(); let project_folders = ProjectFolders::new(&self.workspaces, &files_config.exclude); - if self.proc_macro_client.is_none() { + if self.proc_macro_clients.is_empty() { if let Some((path, args)) = self.config.proc_macro_srv() { - match ProcMacroServer::spawn(path.clone(), args) { - Ok(it) => self.proc_macro_client = Some(it), - Err(err) => { - tracing::error!( - "Failed to run proc_macro_srv from path {}, error: {:?}", - path.display(), - err - ); - } - } + self.proc_macro_clients = (0..self.workspaces.len()) + .map(|_| match ProcMacroServer::spawn(path.clone(), args.clone()) { + Ok(it) => Some(it), + Err(err) => { + tracing::error!( + "Failed to run proc_macro_srv from path {}, error: {:?}", + path.display(), + err + ); + None + } + }) + .collect(); } } @@ -331,15 +334,7 @@ impl GlobalState { // Create crate graph from all the workspaces let crate_graph = { - let proc_macro_client = self.proc_macro_client.as_ref(); let dummy_replacements = self.config.dummy_replacements(); - let mut load_proc_macro = move |crate_name: &str, path: &AbsPath| { - load_proc_macro( - proc_macro_client, - path, - dummy_replacements.get(crate_name).map(|v| &**v).unwrap_or_default(), - ) - }; let vfs = &mut self.vfs.write().0; let loader = &mut self.loader; @@ -359,7 +354,15 @@ impl GlobalState { }; let mut crate_graph = CrateGraph::default(); - for ws in self.workspaces.iter() { + for (idx, ws) in self.workspaces.iter().enumerate() { + let proc_macro_client = self.proc_macro_clients[idx].as_ref(); + let mut load_proc_macro = move |crate_name: &str, path: &AbsPath| { + load_proc_macro( + proc_macro_client, + path, + dummy_replacements.get(crate_name).map(|v| &**v).unwrap_or_default(), + ) + }; crate_graph.extend(ws.to_crate_graph(&mut load_proc_macro, &mut load)); } crate_graph From 50b27e57baa83d63fecf77c712c345d480f2c3c6 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sat, 23 Jul 2022 20:24:01 +0200 Subject: [PATCH 0006/1945] Better error messages when the proc-macro-server fails to start --- crates/rust-analyzer/src/cli/load_cargo.rs | 6 +++--- crates/rust-analyzer/src/global_state.rs | 2 +- crates/rust-analyzer/src/reload.rs | 16 ++++++++-------- 3 files changed, 12 insertions(+), 12 deletions(-) diff --git a/crates/rust-analyzer/src/cli/load_cargo.rs b/crates/rust-analyzer/src/cli/load_cargo.rs index 4243af25ff..0ada4b73e8 100644 --- a/crates/rust-analyzer/src/cli/load_cargo.rs +++ b/crates/rust-analyzer/src/cli/load_cargo.rs @@ -60,9 +60,9 @@ pub fn load_workspace( let proc_macro_client = if load_config.with_proc_macro { let path = AbsPathBuf::assert(std::env::current_exe()?); - Some(ProcMacroServer::spawn(path, &["proc-macro"]).unwrap()) + Ok(ProcMacroServer::spawn(path, &["proc-macro"]).unwrap()) } else { - None + Err("proc macro server not started".to_owned()) }; let crate_graph = ws.to_crate_graph( @@ -89,7 +89,7 @@ pub fn load_workspace( if load_config.prefill_caches { host.analysis().parallel_prime_caches(1, |_| {})?; } - Ok((host, vfs, proc_macro_client)) + Ok((host, vfs, proc_macro_client.ok())) } fn load_crate_graph( diff --git a/crates/rust-analyzer/src/global_state.rs b/crates/rust-analyzer/src/global_state.rs index 700e3e19bb..8f881cba4d 100644 --- a/crates/rust-analyzer/src/global_state.rs +++ b/crates/rust-analyzer/src/global_state.rs @@ -61,7 +61,7 @@ pub(crate) struct GlobalState { pub(crate) proc_macro_changed: bool, pub(crate) last_reported_status: Option, pub(crate) source_root_config: SourceRootConfig, - pub(crate) proc_macro_clients: Vec>, + pub(crate) proc_macro_clients: Vec>, pub(crate) flycheck: Vec, pub(crate) flycheck_sender: Sender, diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs index d69b2be256..e5802773e7 100644 --- a/crates/rust-analyzer/src/reload.rs +++ b/crates/rust-analyzer/src/reload.rs @@ -306,16 +306,16 @@ impl GlobalState { if self.proc_macro_clients.is_empty() { if let Some((path, args)) = self.config.proc_macro_srv() { self.proc_macro_clients = (0..self.workspaces.len()) - .map(|_| match ProcMacroServer::spawn(path.clone(), args.clone()) { - Ok(it) => Some(it), - Err(err) => { - tracing::error!( + .map(|_| { + ProcMacroServer::spawn(path.clone(), args.clone()).map_err(|err| { + let error = format!( "Failed to run proc_macro_srv from path {}, error: {:?}", path.display(), err ); - None - } + tracing::error!(error); + error + }) }) .collect(); } @@ -539,14 +539,14 @@ impl SourceRootConfig { /// Load the proc-macros for the given lib path, replacing all expanders whose names are in `dummy_replace` /// with an identity dummy expander. pub(crate) fn load_proc_macro( - server: Option<&ProcMacroServer>, + server: Result<&ProcMacroServer, &String>, path: &AbsPath, dummy_replace: &[Box], ) -> ProcMacroLoadResult { let res: Result, String> = (|| { let dylib = MacroDylib::new(path.to_path_buf()) .map_err(|io| format!("Proc-macro dylib loading failed: {io}"))?; - let server = server.ok_or_else(|| format!("Proc-macro server not started"))?; + let server = server.map_err(ToOwned::to_owned)?; let vec = server.load_dylib(dylib).map_err(|e| format!("{e}"))?; if vec.is_empty() { return Err("proc macro library returned no proc macros".to_string()); From fb063d360ce66e9f1316c3e64885e00344062e55 Mon Sep 17 00:00:00 2001 From: Ryo Yoshida Date: Fri, 22 Jul 2022 18:12:21 +0900 Subject: [PATCH 0007/1945] Add `ellipsis` to HIR `RecordLit` --- crates/hir-def/src/body/lower.rs | 5 +++-- crates/hir-def/src/expr.rs | 1 + crates/hir-ty/src/diagnostics/expr.rs | 2 +- crates/hir-ty/src/infer/expr.rs | 2 +- 4 files changed, 6 insertions(+), 4 deletions(-) diff --git a/crates/hir-def/src/body/lower.rs b/crates/hir-def/src/body/lower.rs index c3f2611227..b43699de12 100644 --- a/crates/hir-def/src/body/lower.rs +++ b/crates/hir-def/src/body/lower.rs @@ -378,9 +378,10 @@ impl ExprCollector<'_> { }) .collect(); let spread = nfl.spread().map(|s| self.collect_expr(s)); - Expr::RecordLit { path, fields, spread } + let ellipsis = nfl.dotdot_token().is_some(); + Expr::RecordLit { path, fields, spread, ellipsis } } else { - Expr::RecordLit { path, fields: Box::default(), spread: None } + Expr::RecordLit { path, fields: Box::default(), spread: None, ellipsis: false } }; self.alloc_expr(record_lit, syntax_ptr) diff --git a/crates/hir-def/src/expr.rs b/crates/hir-def/src/expr.rs index a991365d6b..145fe4c19d 100644 --- a/crates/hir-def/src/expr.rs +++ b/crates/hir-def/src/expr.rs @@ -138,6 +138,7 @@ pub enum Expr { path: Option>, fields: Box<[RecordLitField]>, spread: Option, + ellipsis: bool, }, Field { expr: ExprId, diff --git a/crates/hir-ty/src/diagnostics/expr.rs b/crates/hir-ty/src/diagnostics/expr.rs index 8cca522aef..1fcb4c2976 100644 --- a/crates/hir-ty/src/diagnostics/expr.rs +++ b/crates/hir-ty/src/diagnostics/expr.rs @@ -305,7 +305,7 @@ pub fn record_literal_missing_fields( expr: &Expr, ) -> Option<(VariantId, Vec, /*exhaustive*/ bool)> { let (fields, exhaustive) = match expr { - Expr::RecordLit { path: _, fields, spread } => (fields, spread.is_none()), + Expr::RecordLit { fields, spread, .. } => (fields, spread.is_none()), _ => return None, }; diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs index 2f33467072..9debe83547 100644 --- a/crates/hir-ty/src/infer/expr.rs +++ b/crates/hir-ty/src/infer/expr.rs @@ -421,7 +421,7 @@ impl<'a> InferenceContext<'a> { } TyKind::Never.intern(Interner) } - Expr::RecordLit { path, fields, spread } => { + Expr::RecordLit { path, fields, spread, .. } => { let (ty, def_id) = self.resolve_variant(path.as_deref(), false); if let Some(variant) = def_id { self.write_variant_resolution(tgt_expr.into(), variant); From 64758bd48195241dbf39a63974ecbcdcd151f399 Mon Sep 17 00:00:00 2001 From: Ryo Yoshida Date: Sun, 24 Jul 2022 22:32:49 +0900 Subject: [PATCH 0008/1945] Add info whether it's assignee expr to relevant HIR `Expr` variants --- crates/hir-def/src/body/lower.rs | 41 +++++++++++++++++++++++++------- crates/hir-def/src/expr.rs | 11 +++++---- crates/hir-ty/src/infer/expr.rs | 16 ++++++------- 3 files changed, 48 insertions(+), 20 deletions(-) diff --git a/crates/hir-def/src/body/lower.rs b/crates/hir-def/src/body/lower.rs index b43699de12..66f9c24e87 100644 --- a/crates/hir-def/src/body/lower.rs +++ b/crates/hir-def/src/body/lower.rs @@ -96,6 +96,7 @@ pub(super) fn lower( expander, name_to_pat_grouping: Default::default(), is_lowering_inside_or_pat: false, + is_lowering_assignee_expr: false, } .collect(params, body) } @@ -109,6 +110,7 @@ struct ExprCollector<'a> { // a poor-mans union-find? name_to_pat_grouping: FxHashMap>, is_lowering_inside_or_pat: bool, + is_lowering_assignee_expr: bool, } impl ExprCollector<'_> { @@ -283,7 +285,10 @@ impl ExprCollector<'_> { } else { Box::default() }; - self.alloc_expr(Expr::Call { callee, args }, syntax_ptr) + self.alloc_expr( + Expr::Call { callee, args, is_assignee_expr: self.is_lowering_assignee_expr }, + syntax_ptr, + ) } ast::Expr::MethodCallExpr(e) => { let receiver = self.collect_expr_opt(e.receiver()); @@ -359,6 +364,7 @@ impl ExprCollector<'_> { ast::Expr::RecordExpr(e) => { let path = e.path().and_then(|path| self.expander.parse_path(self.db, path)).map(Box::new); + let is_assignee_expr = self.is_lowering_assignee_expr; let record_lit = if let Some(nfl) = e.record_expr_field_list() { let fields = nfl .fields() @@ -379,9 +385,15 @@ impl ExprCollector<'_> { .collect(); let spread = nfl.spread().map(|s| self.collect_expr(s)); let ellipsis = nfl.dotdot_token().is_some(); - Expr::RecordLit { path, fields, spread, ellipsis } + Expr::RecordLit { path, fields, spread, ellipsis, is_assignee_expr } } else { - Expr::RecordLit { path, fields: Box::default(), spread: None, ellipsis: false } + Expr::RecordLit { + path, + fields: Box::default(), + spread: None, + ellipsis: false, + is_assignee_expr, + } }; self.alloc_expr(record_lit, syntax_ptr) @@ -459,14 +471,21 @@ impl ExprCollector<'_> { ) } ast::Expr::BinExpr(e) => { - let lhs = self.collect_expr_opt(e.lhs()); - let rhs = self.collect_expr_opt(e.rhs()); let op = e.op_kind(); + if let Some(ast::BinaryOp::Assignment { op: None }) = op { + self.is_lowering_assignee_expr = true; + } + let lhs = self.collect_expr_opt(e.lhs()); + self.is_lowering_assignee_expr = false; + let rhs = self.collect_expr_opt(e.rhs()); self.alloc_expr(Expr::BinaryOp { lhs, rhs, op }, syntax_ptr) } ast::Expr::TupleExpr(e) => { let exprs = e.fields().map(|expr| self.collect_expr(expr)).collect(); - self.alloc_expr(Expr::Tuple { exprs }, syntax_ptr) + self.alloc_expr( + Expr::Tuple { exprs, is_assignee_expr: self.is_lowering_assignee_expr }, + syntax_ptr, + ) } ast::Expr::BoxExpr(e) => { let expr = self.collect_expr_opt(e.expr()); @@ -478,8 +497,14 @@ impl ExprCollector<'_> { match kind { ArrayExprKind::ElementList(e) => { - let exprs = e.map(|expr| self.collect_expr(expr)).collect(); - self.alloc_expr(Expr::Array(Array::ElementList(exprs)), syntax_ptr) + let elements = e.map(|expr| self.collect_expr(expr)).collect(); + self.alloc_expr( + Expr::Array(Array::ElementList { + elements, + is_assignee_expr: self.is_lowering_assignee_expr, + }), + syntax_ptr, + ) } ArrayExprKind::Repeat { initializer, repeat } => { let initializer = self.collect_expr_opt(initializer); diff --git a/crates/hir-def/src/expr.rs b/crates/hir-def/src/expr.rs index 145fe4c19d..c1b3788acb 100644 --- a/crates/hir-def/src/expr.rs +++ b/crates/hir-def/src/expr.rs @@ -110,6 +110,7 @@ pub enum Expr { Call { callee: ExprId, args: Box<[ExprId]>, + is_assignee_expr: bool, }, MethodCall { receiver: ExprId, @@ -139,6 +140,7 @@ pub enum Expr { fields: Box<[RecordLitField]>, spread: Option, ellipsis: bool, + is_assignee_expr: bool, }, Field { expr: ExprId, @@ -197,6 +199,7 @@ pub enum Expr { }, Tuple { exprs: Box<[ExprId]>, + is_assignee_expr: bool, }, Unsafe { body: ExprId, @@ -212,7 +215,7 @@ pub enum Expr { #[derive(Debug, Clone, Eq, PartialEq)] pub enum Array { - ElementList(Box<[ExprId]>), + ElementList { elements: Box<[ExprId]>, is_assignee_expr: bool }, Repeat { initializer: ExprId, repeat: ExprId }, } @@ -286,7 +289,7 @@ impl Expr { f(*iterable); f(*body); } - Expr::Call { callee, args } => { + Expr::Call { callee, args, .. } => { f(*callee); args.iter().copied().for_each(f); } @@ -340,9 +343,9 @@ impl Expr { | Expr::Box { expr } => { f(*expr); } - Expr::Tuple { exprs } => exprs.iter().copied().for_each(f), + Expr::Tuple { exprs, .. } => exprs.iter().copied().for_each(f), Expr::Array(a) => match a { - Array::ElementList(exprs) => exprs.iter().copied().for_each(f), + Array::ElementList { elements, .. } => elements.iter().copied().for_each(f), Array::Repeat { initializer, repeat } => { f(*initializer); f(*repeat) diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs index 9debe83547..d164e64a8b 100644 --- a/crates/hir-ty/src/infer/expr.rs +++ b/crates/hir-ty/src/infer/expr.rs @@ -276,7 +276,7 @@ impl<'a> InferenceContext<'a> { closure_ty } - Expr::Call { callee, args } => { + Expr::Call { callee, args, .. } => { let callee_ty = self.infer_expr(*callee, &Expectation::none()); let mut derefs = Autoderef::new(&mut self.table, callee_ty.clone()); let mut res = None; @@ -693,7 +693,7 @@ impl<'a> InferenceContext<'a> { self.err_ty() } } - Expr::Tuple { exprs } => { + Expr::Tuple { exprs, .. } => { let mut tys = match expected .only_has_type(&mut self.table) .as_ref() @@ -724,12 +724,12 @@ impl<'a> InferenceContext<'a> { let expected = Expectation::has_type(elem_ty.clone()); let len = match array { - Array::ElementList(items) => { - for &expr in items.iter() { + Array::ElementList { elements, .. } => { + for &expr in elements.iter() { let cur_elem_ty = self.infer_expr_inner(expr, &expected); coerce.coerce(self, Some(expr), &cur_elem_ty); } - consteval::usize_const(Some(items.len() as u128)) + consteval::usize_const(Some(elements.len() as u128)) } &Array::Repeat { initializer, repeat } => { self.infer_expr_coerce(initializer, &Expectation::has_type(elem_ty)); @@ -850,7 +850,7 @@ impl<'a> InferenceContext<'a> { let rhs_ty = self.resolve_ty_shallow(rhs_ty); let ty = match &self.body[lhs] { - Expr::Tuple { exprs } => { + Expr::Tuple { exprs, .. } => { // We don't consider multiple ellipses. This is analogous to // `hir_def::body::lower::ExprCollector::collect_tuple_pat()`. let ellipsis = exprs.iter().position(|e| is_rest_expr(*e)); @@ -858,7 +858,7 @@ impl<'a> InferenceContext<'a> { self.infer_tuple_pat_like(&rhs_ty, (), ellipsis, &exprs) } - Expr::Call { callee, args } => { + Expr::Call { callee, args, .. } => { // Tuple structs let path = match &self.body[*callee] { Expr::Path(path) => Some(path), @@ -872,7 +872,7 @@ impl<'a> InferenceContext<'a> { self.infer_tuple_struct_pat_like(path, &rhs_ty, (), lhs, ellipsis, &args) } - Expr::Array(Array::ElementList(elements)) => { + Expr::Array(Array::ElementList { elements, .. }) => { let elem_ty = match rhs_ty.kind(Interner) { TyKind::Array(st, _) => st.clone(), _ => self.err_ty(), From 805ac666ca8cb2b643026ff454951e9ce3834d94 Mon Sep 17 00:00:00 2001 From: Ryo Yoshida Date: Sun, 24 Jul 2022 22:34:53 +0900 Subject: [PATCH 0009/1945] fix: consider assignee expressions in record fields exhaustiveness check --- crates/hir-ty/src/diagnostics/expr.rs | 5 ++- .../src/handlers/missing_fields.rs | 31 +++++++++++++++++++ 2 files changed, 35 insertions(+), 1 deletion(-) diff --git a/crates/hir-ty/src/diagnostics/expr.rs b/crates/hir-ty/src/diagnostics/expr.rs index 1fcb4c2976..642e03edd2 100644 --- a/crates/hir-ty/src/diagnostics/expr.rs +++ b/crates/hir-ty/src/diagnostics/expr.rs @@ -305,7 +305,10 @@ pub fn record_literal_missing_fields( expr: &Expr, ) -> Option<(VariantId, Vec, /*exhaustive*/ bool)> { let (fields, exhaustive) = match expr { - Expr::RecordLit { fields, spread, .. } => (fields, spread.is_none()), + Expr::RecordLit { fields, spread, ellipsis, is_assignee_expr, .. } => { + let exhaustive = if *is_assignee_expr { !*ellipsis } else { spread.is_none() }; + (fields, exhaustive) + } _ => return None, }; diff --git a/crates/ide-diagnostics/src/handlers/missing_fields.rs b/crates/ide-diagnostics/src/handlers/missing_fields.rs index 30f903af50..edb1fc0919 100644 --- a/crates/ide-diagnostics/src/handlers/missing_fields.rs +++ b/crates/ide-diagnostics/src/handlers/missing_fields.rs @@ -292,6 +292,37 @@ fn x(a: S) { ) } + #[test] + fn missing_record_expr_in_assignee_expr() { + check_diagnostics( + r" +struct S { s: usize, t: usize } +struct S2 { s: S, t: () } +struct T(S); +fn regular(a: S) { + let s; + S { s, .. } = a; +} +fn nested(a: S2) { + let s; + S2 { s: S { s, .. }, .. } = a; +} +fn in_tuple(a: (S,)) { + let s; + (S { s, .. },) = a; +} +fn in_array(a: [S;1]) { + let s; + [S { s, .. },] = a; +} +fn in_tuple_struct(a: T) { + let s; + T(S { s, .. }) = a; +} + ", + ); + } + #[test] fn range_mapping_out_of_macros() { check_fix( From 77acb5c162957d9748d1f35261982591d9a85e22 Mon Sep 17 00:00:00 2001 From: Dorian Scheidt Date: Sun, 24 Jul 2022 00:29:07 -0500 Subject: [PATCH 0010/1945] fix: Autocomplete for struct fields includes receiver fixes #12857 --- crates/ide-completion/src/render.rs | 40 ++++++++++++++++++++++++++--- 1 file changed, 37 insertions(+), 3 deletions(-) diff --git a/crates/ide-completion/src/render.rs b/crates/ide-completion/src/render.rs index 3f25b294e0..9b25964a60 100644 --- a/crates/ide-completion/src/render.rs +++ b/crates/ide-completion/src/render.rs @@ -121,7 +121,7 @@ pub(crate) fn render_field( let mut item = CompletionItem::new( SymbolKind::Field, ctx.source_range(), - receiver.map_or_else(|| name.clone(), |receiver| format!("{}.{}", receiver, name).into()), + field_with_receiver(receiver.as_ref(), &name), ); item.set_relevance(CompletionRelevance { type_match: compute_type_match(ctx.completion, ty), @@ -132,7 +132,7 @@ pub(crate) fn render_field( .set_documentation(field.docs(ctx.db())) .set_deprecated(is_deprecated) .lookup_by(name.clone()); - item.insert_text(escaped_name); + item.insert_text(field_with_receiver(receiver.as_ref(), &escaped_name)); if let Some(receiver) = &dot_access.receiver { if let Some(original) = ctx.completion.sema.original_ast_node(receiver.clone()) { if let Some(ref_match) = compute_ref_match(ctx.completion, ty) { @@ -143,6 +143,11 @@ pub(crate) fn render_field( item.build() } +fn field_with_receiver(receiver: Option<&hir::Name>, field_name: &str) -> SmolStr { + receiver + .map_or_else(|| field_name.into(), |receiver| format!("{}.{}", receiver, field_name).into()) +} + pub(crate) fn render_tuple_field( ctx: RenderContext<'_>, receiver: Option, @@ -152,7 +157,7 @@ pub(crate) fn render_tuple_field( let mut item = CompletionItem::new( SymbolKind::Field, ctx.source_range(), - receiver.map_or_else(|| field.to_string(), |receiver| format!("{}.{}", receiver, field)), + field_with_receiver(receiver.as_ref(), &field.to_string()), ); item.detail(ty.display(ctx.db()).to_string()).lookup_by(field.to_string()); item.build() @@ -1873,6 +1878,35 @@ impl r#trait for r#struct { type t$0 } struct r#struct {} trait r#trait { type r#type; } impl r#trait for r#struct { type r#type = $0; } +"#, + ) + } + + #[test] + fn field_access_includes_self() { + check_edit( + "length", + r#" +struct S { + length: i32 +} + +impl S { + fn some_fn(&self) { + let l = len$0 + } +} +"#, + r#" +struct S { + length: i32 +} + +impl S { + fn some_fn(&self) { + let l = self.length + } +} "#, ) } From 71225c35bf86e639e3a3cfaf229a9c29f6815751 Mon Sep 17 00:00:00 2001 From: Ryo Yoshida Date: Mon, 25 Jul 2022 18:40:35 +0900 Subject: [PATCH 0011/1945] Replace `debug_assert!` with `stdx::always!` --- crates/ide-assists/src/utils/gen_trait_fn_body.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/crates/ide-assists/src/utils/gen_trait_fn_body.rs b/crates/ide-assists/src/utils/gen_trait_fn_body.rs index 43aaad3150..7a0c912959 100644 --- a/crates/ide-assists/src/utils/gen_trait_fn_body.rs +++ b/crates/ide-assists/src/utils/gen_trait_fn_body.rs @@ -28,7 +28,7 @@ pub(crate) fn gen_trait_fn_body( /// Generate a `Clone` impl based on the fields and members of the target type. fn gen_clone_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> { - debug_assert!(func.name().map_or(false, |name| name.text() == "clone")); + stdx::always!(func.name().map_or(false, |name| name.text() == "clone")); fn gen_clone_call(target: ast::Expr) -> ast::Expr { let method = make::name_ref("clone"); make::expr_method_call(target, method, make::arg_list(None)) @@ -340,7 +340,7 @@ fn gen_default_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> { /// Generate a `Hash` impl based on the fields and members of the target type. fn gen_hash_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> { - debug_assert!(func.name().map_or(false, |name| name.text() == "hash")); + stdx::always!(func.name().map_or(false, |name| name.text() == "hash")); fn gen_hash_call(target: ast::Expr) -> ast::Stmt { let method = make::name_ref("hash"); let arg = make::expr_path(make::ext::ident_path("state")); @@ -396,7 +396,7 @@ fn gen_hash_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> { /// Generate a `PartialEq` impl based on the fields and members of the target type. fn gen_partial_eq(adt: &ast::Adt, func: &ast::Fn) -> Option<()> { - debug_assert!(func.name().map_or(false, |name| name.text() == "eq")); + stdx::always!(func.name().map_or(false, |name| name.text() == "eq")); fn gen_eq_chain(expr: Option, cmp: ast::Expr) -> Option { match expr { Some(expr) => Some(make::expr_bin_op(expr, BinaryOp::LogicOp(LogicOp::And), cmp)), @@ -573,7 +573,7 @@ fn gen_partial_eq(adt: &ast::Adt, func: &ast::Fn) -> Option<()> { } fn gen_partial_ord(adt: &ast::Adt, func: &ast::Fn) -> Option<()> { - debug_assert!(func.name().map_or(false, |name| name.text() == "partial_cmp")); + stdx::always!(func.name().map_or(false, |name| name.text() == "partial_cmp")); fn gen_partial_eq_match(match_target: ast::Expr) -> Option { let mut arms = vec![]; From dc9405081531d0f3e68d984290b4d752bee3b9bd Mon Sep 17 00:00:00 2001 From: Amos Wenger Date: Mon, 25 Jul 2022 14:18:28 +0200 Subject: [PATCH 0012/1945] revert nightly rustfmt formatting that accidentally slipped in cf. https://github.com/rust-lang/rust/pull/99603 cf. https://github.com/rust-lang/rust-analyzer/pull/12871#discussion_r928816339 --- crates/hir-def/src/import_map.rs | 6 +++++- crates/hir-def/src/nameres.rs | 6 +++++- crates/proc-macro-test/build.rs | 3 +-- 3 files changed, 11 insertions(+), 4 deletions(-) diff --git a/crates/hir-def/src/import_map.rs b/crates/hir-def/src/import_map.rs index 688055e430..05e0ceb05a 100644 --- a/crates/hir-def/src/import_map.rs +++ b/crates/hir-def/src/import_map.rs @@ -167,7 +167,11 @@ fn collect_import_map(db: &dyn DefDatabase, krate: CrateId) -> ImportMap { let visible_items = mod_data.scope.entries().filter_map(|(name, per_ns)| { let per_ns = per_ns.filter_visibility(|vis| vis == Visibility::Public); - if per_ns.is_none() { None } else { Some((name, per_ns)) } + if per_ns.is_none() { + None + } else { + Some((name, per_ns)) + } }); for (name, per_ns) in visible_items { diff --git a/crates/hir-def/src/nameres.rs b/crates/hir-def/src/nameres.rs index 756fd583af..3949fbb6e7 100644 --- a/crates/hir-def/src/nameres.rs +++ b/crates/hir-def/src/nameres.rs @@ -334,7 +334,11 @@ impl DefMap { pub(crate) fn crate_root(&self, db: &dyn DefDatabase) -> ModuleId { self.with_ancestor_maps(db, self.root, &mut |def_map, _module| { - if def_map.block.is_none() { Some(def_map.module_id(def_map.root)) } else { None } + if def_map.block.is_none() { + Some(def_map.module_id(def_map.root)) + } else { + None + } }) .expect("DefMap chain without root") } diff --git a/crates/proc-macro-test/build.rs b/crates/proc-macro-test/build.rs index c90144509d..a80c962617 100644 --- a/crates/proc-macro-test/build.rs +++ b/crates/proc-macro-test/build.rs @@ -62,8 +62,7 @@ fn main() { Command::new(toolchain::cargo()) }; - cmd - .current_dir(&staging_dir) + cmd.current_dir(&staging_dir) .args(&["build", "-p", "proc-macro-test-impl", "--message-format", "json"]) // Explicit override the target directory to avoid using the same one which the parent // cargo is using, or we'll deadlock. From 5f54ec0e859a020b125519d56ad2a08defd3ddf3 Mon Sep 17 00:00:00 2001 From: Amos Wenger Date: Sat, 23 Jul 2022 22:47:34 +0200 Subject: [PATCH 0013/1945] Add a proc-macro-srv-cli crate --- Cargo.lock | 7 +++++++ crates/proc-macro-srv-cli/Cargo.toml | 13 +++++++++++++ crates/proc-macro-srv-cli/src/main.rs | 5 +++++ 3 files changed, 25 insertions(+) create mode 100644 crates/proc-macro-srv-cli/Cargo.toml create mode 100644 crates/proc-macro-srv-cli/src/main.rs diff --git a/Cargo.lock b/Cargo.lock index 4c83000683..703f0e5b8a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1198,6 +1198,13 @@ dependencies = [ "tt", ] +[[package]] +name = "proc-macro-srv-cli" +version = "0.0.0" +dependencies = [ + "proc-macro-srv", +] + [[package]] name = "proc-macro-test" version = "0.0.0" diff --git a/crates/proc-macro-srv-cli/Cargo.toml b/crates/proc-macro-srv-cli/Cargo.toml new file mode 100644 index 0000000000..959002f664 --- /dev/null +++ b/crates/proc-macro-srv-cli/Cargo.toml @@ -0,0 +1,13 @@ +[package] +name = "proc-macro-srv-cli" +version = "0.0.0" +description = "TBD" +license = "MIT OR Apache-2.0" +edition = "2021" +rust-version = "1.57" + +[dependencies] +proc-macro-srv = { version = "0.0.0", path = "../proc-macro-srv" } + +[features] +sysroot-abi = ["proc-macro-srv/sysroot-abi"] diff --git a/crates/proc-macro-srv-cli/src/main.rs b/crates/proc-macro-srv-cli/src/main.rs new file mode 100644 index 0000000000..ea16cfff4b --- /dev/null +++ b/crates/proc-macro-srv-cli/src/main.rs @@ -0,0 +1,5 @@ +use proc_macro_srv::cli; + +fn main() { + cli::run().unwrap(); +} From fba6adfae48d55984934872a81d533dc405b7cd3 Mon Sep 17 00:00:00 2001 From: Amos Wenger Date: Sun, 24 Jul 2022 00:48:20 +0200 Subject: [PATCH 0014/1945] Add doc comment to pass tidy check --- crates/proc-macro-srv-cli/src/main.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/crates/proc-macro-srv-cli/src/main.rs b/crates/proc-macro-srv-cli/src/main.rs index ea16cfff4b..0f23a7daaf 100644 --- a/crates/proc-macro-srv-cli/src/main.rs +++ b/crates/proc-macro-srv-cli/src/main.rs @@ -1,3 +1,5 @@ +//! A standalone binary for `proc-macro-srv`. + use proc_macro_srv::cli; fn main() { From fd1b64ec718d8483a98872bd91ee8f11b6893e7f Mon Sep 17 00:00:00 2001 From: Amos Wenger Date: Sun, 24 Jul 2022 14:50:30 +0200 Subject: [PATCH 0015/1945] Return result directly Co-authored-by: Lukas Wirth --- crates/proc-macro-srv-cli/src/main.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/proc-macro-srv-cli/src/main.rs b/crates/proc-macro-srv-cli/src/main.rs index 0f23a7daaf..8398f2ba6c 100644 --- a/crates/proc-macro-srv-cli/src/main.rs +++ b/crates/proc-macro-srv-cli/src/main.rs @@ -2,6 +2,6 @@ use proc_macro_srv::cli; -fn main() { - cli::run().unwrap(); +fn main() -> std::io::Result<()> { + cli::run() } From 4364531c2f3db41d9ba3a72f3e77ab26d16db8b0 Mon Sep 17 00:00:00 2001 From: Amos Wenger Date: Sun, 24 Jul 2022 14:53:20 +0200 Subject: [PATCH 0016/1945] Name the binary 'proc-macro-srv' --- crates/proc-macro-srv-cli/Cargo.toml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/crates/proc-macro-srv-cli/Cargo.toml b/crates/proc-macro-srv-cli/Cargo.toml index 959002f664..badb594f06 100644 --- a/crates/proc-macro-srv-cli/Cargo.toml +++ b/crates/proc-macro-srv-cli/Cargo.toml @@ -11,3 +11,7 @@ proc-macro-srv = { version = "0.0.0", path = "../proc-macro-srv" } [features] sysroot-abi = ["proc-macro-srv/sysroot-abi"] + +[[bin]] +name = "proc-macro-srv" +path = "src/main.rs" From dadb83282d7dc7bfb408d93d4953019a328e8e2b Mon Sep 17 00:00:00 2001 From: Amos Wenger Date: Mon, 25 Jul 2022 16:07:41 +0200 Subject: [PATCH 0017/1945] Remember the difference between 'sysroot root' and 'sysroot src root', start looking in there for a rust-analyzer-proc-macro-srv binary --- crates/project-model/src/project_json.rs | 5 ++++ crates/project-model/src/sysroot.rs | 21 +++++++++++++---- crates/project-model/src/tests.rs | 7 ++++-- crates/project-model/src/workspace.rs | 12 +++++++--- crates/rust-analyzer/src/reload.rs | 30 ++++++++++++++++++++++-- 5 files changed, 63 insertions(+), 12 deletions(-) diff --git a/crates/project-model/src/project_json.rs b/crates/project-model/src/project_json.rs index a3c5ac1674..63d1d0ace9 100644 --- a/crates/project-model/src/project_json.rs +++ b/crates/project-model/src/project_json.rs @@ -17,6 +17,9 @@ use crate::cfg_flag::CfgFlag; /// Roots and crates that compose this Rust project. #[derive(Clone, Debug, Eq, PartialEq)] pub struct ProjectJson { + /// e.g. `path/to/sysroot` + pub(crate) sysroot: Option, + /// e.g. `path/to/sysroot/lib/rustlib/src/rust` pub(crate) sysroot_src: Option, project_root: AbsPathBuf, crates: Vec, @@ -52,6 +55,7 @@ impl ProjectJson { /// configuration. pub fn new(base: &AbsPath, data: ProjectJsonData) -> ProjectJson { ProjectJson { + sysroot: data.sysroot.map(|it| base.join(it)), sysroot_src: data.sysroot_src.map(|it| base.join(it)), project_root: base.to_path_buf(), crates: data @@ -122,6 +126,7 @@ impl ProjectJson { #[derive(Deserialize, Debug, Clone)] pub struct ProjectJsonData { + sysroot: Option, sysroot_src: Option, crates: Vec, } diff --git a/crates/project-model/src/sysroot.rs b/crates/project-model/src/sysroot.rs index 52750f4896..b17a59b10b 100644 --- a/crates/project-model/src/sysroot.rs +++ b/crates/project-model/src/sysroot.rs @@ -15,6 +15,7 @@ use crate::{utf8_stdout, ManifestPath}; #[derive(Debug, Clone, Eq, PartialEq)] pub struct Sysroot { root: AbsPathBuf, + src_root: AbsPathBuf, crates: Arena, } @@ -35,6 +36,15 @@ impl ops::Index for Sysroot { } impl Sysroot { + /// Returns sysroot directory, where `bin/`, `etc/`, `lib/`, `libexec/` + /// subfolder live, like: + /// `$HOME/.rustup/toolchains/nightly-2022-07-23-x86_64-unknown-linux-gnu/lib/rustlib/src/rust/library` + pub fn src_root(&self) -> &AbsPath { + &self.src_root + } + + /// Returns sysroot "src" directory, where stdlib sources are located, like: + /// `$HOME/.rustup/toolchains/nightly-2022-07-23-x86_64-unknown-linux-gnu` pub fn root(&self) -> &AbsPath { &self.root } @@ -61,7 +71,7 @@ impl Sysroot { tracing::debug!("Discovering sysroot for {}", dir.display()); let sysroot_dir = discover_sysroot_dir(dir)?; let sysroot_src_dir = discover_sysroot_src_dir(&sysroot_dir, dir)?; - let res = Sysroot::load(sysroot_src_dir)?; + let res = Sysroot::load(sysroot_dir, sysroot_src_dir)?; Ok(res) } @@ -71,14 +81,15 @@ impl Sysroot { discover_sysroot_dir(current_dir).ok().and_then(|sysroot_dir| get_rustc_src(&sysroot_dir)) } - pub fn load(sysroot_src_dir: AbsPathBuf) -> Result { - let mut sysroot = Sysroot { root: sysroot_src_dir, crates: Arena::default() }; + pub fn load(sysroot_dir: AbsPathBuf, sysroot_src_dir: AbsPathBuf) -> Result { + let mut sysroot = + Sysroot { root: sysroot_dir, src_root: sysroot_src_dir, crates: Arena::default() }; for path in SYSROOT_CRATES.trim().lines() { let name = path.split('/').last().unwrap(); let root = [format!("{}/src/lib.rs", path), format!("lib{}/lib.rs", path)] .into_iter() - .map(|it| sysroot.root.join(it)) + .map(|it| sysroot.src_root.join(it)) .filter_map(|it| ManifestPath::try_from(it).ok()) .find(|it| fs::metadata(it).is_ok()); @@ -119,7 +130,7 @@ impl Sysroot { }; anyhow::bail!( "could not find libcore in sysroot path `{}`{}", - sysroot.root.as_path().display(), + sysroot.src_root.as_path().display(), var_note, ); } diff --git a/crates/project-model/src/tests.rs b/crates/project-model/src/tests.rs index ddfea0ce4c..e304a59c01 100644 --- a/crates/project-model/src/tests.rs +++ b/crates/project-model/src/tests.rs @@ -75,8 +75,11 @@ fn get_test_path(file: &str) -> PathBuf { fn get_fake_sysroot() -> Sysroot { let sysroot_path = get_test_path("fake-sysroot"); - let sysroot_src_dir = AbsPathBuf::assert(sysroot_path); - Sysroot::load(sysroot_src_dir).unwrap() + // there's no `libexec/` directory with a `proc-macro-srv` binary in that + // fake sysroot, so we give them both the same path: + let sysroot_dir = AbsPathBuf::assert(sysroot_path); + let sysroot_src_dir = sysroot_dir.clone(); + Sysroot::load(sysroot_dir, sysroot_src_dir).unwrap() } fn rooted_project_json(data: ProjectJsonData) -> ProjectJson { diff --git a/crates/project-model/src/workspace.rs b/crates/project-model/src/workspace.rs index de42458354..63882466fa 100644 --- a/crates/project-model/src/workspace.rs +++ b/crates/project-model/src/workspace.rs @@ -230,8 +230,14 @@ impl ProjectWorkspace { project_json: ProjectJson, target: Option<&str>, ) -> Result { - let sysroot = match &project_json.sysroot_src { - Some(path) => Some(Sysroot::load(path.clone())?), + let sysroot = match project_json.sysroot_src.clone() { + Some(sysroot_src) => { + // if `sysroot` isn't specified (only `sysroot_src`), we won't have + // a real sysroot path, that's fine. it's just used to discover + // the standalone `proc-macro-srv` binary. + let sysroot = project_json.sysroot.clone().unwrap_or_else(|| sysroot_src.clone()); + Some(Sysroot::load(sysroot, sysroot_src)?) + } None => None, }; let rustc_cfg = rustc_cfg::get(None, target); @@ -345,7 +351,7 @@ impl ProjectWorkspace { }) .chain(sysroot.iter().map(|sysroot| PackageRoot { is_local: false, - include: vec![sysroot.root().to_path_buf()], + include: vec![sysroot.src_root().to_path_buf()], exclude: Vec::new(), })) .chain(rustc.iter().flat_map(|rustc| { diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs index e5802773e7..4256c2a764 100644 --- a/crates/rust-analyzer/src/reload.rs +++ b/crates/rust-analyzer/src/reload.rs @@ -305,8 +305,34 @@ impl GlobalState { if self.proc_macro_clients.is_empty() { if let Some((path, args)) = self.config.proc_macro_srv() { - self.proc_macro_clients = (0..self.workspaces.len()) - .map(|_| { + self.proc_macro_clients = self + .workspaces + .iter() + .map(|ws| { + let mut path = path.clone(); + if let ProjectWorkspace::Cargo { sysroot, .. } = ws { + tracing::info!("Found a cargo workspace..."); + if let Some(sysroot) = sysroot.as_ref() { + tracing::info!("Found a cargo workspace with a sysroot..."); + let server_path = sysroot + .root() + .join("libexec") + .join("rust-analyzer-proc-macro-srv"); + if std::fs::metadata(&server_path).is_ok() { + tracing::info!( + "And the server exists at {}", + server_path.display() + ); + path = server_path; + } else { + tracing::info!( + "And the server does not exist at {}", + server_path.display() + ); + } + } + } + ProcMacroServer::spawn(path.clone(), args.clone()).map_err(|err| { let error = format!( "Failed to run proc_macro_srv from path {}, error: {:?}", From 74a2fad5e6b6193842fe52b50ee174736efb266d Mon Sep 17 00:00:00 2001 From: Amos Wenger Date: Mon, 25 Jul 2022 16:22:39 +0200 Subject: [PATCH 0018/1945] Gate and rename binary, use it if it's in the sysroot --- crates/proc-macro-api/src/process.rs | 1 + crates/proc-macro-srv-cli/Cargo.toml | 2 +- crates/proc-macro-srv-cli/src/main.rs | 12 ++++++++++++ crates/rust-analyzer/src/reload.rs | 8 ++++++++ 4 files changed, 22 insertions(+), 1 deletion(-) diff --git a/crates/proc-macro-api/src/process.rs b/crates/proc-macro-api/src/process.rs index ff4c59447d..c4018d3b39 100644 --- a/crates/proc-macro-api/src/process.rs +++ b/crates/proc-macro-api/src/process.rs @@ -86,6 +86,7 @@ fn mk_child( ) -> io::Result { Command::new(path.as_os_str()) .args(args) + .env("RUST_ANALYZER_INTERNALS_DO_NOT_USE", "this is unstable") .stdin(Stdio::piped()) .stdout(Stdio::piped()) .stderr(Stdio::inherit()) diff --git a/crates/proc-macro-srv-cli/Cargo.toml b/crates/proc-macro-srv-cli/Cargo.toml index badb594f06..9d0da5dee9 100644 --- a/crates/proc-macro-srv-cli/Cargo.toml +++ b/crates/proc-macro-srv-cli/Cargo.toml @@ -13,5 +13,5 @@ proc-macro-srv = { version = "0.0.0", path = "../proc-macro-srv" } sysroot-abi = ["proc-macro-srv/sysroot-abi"] [[bin]] -name = "proc-macro-srv" +name = "rust-analyzer-proc-macro-srv" path = "src/main.rs" diff --git a/crates/proc-macro-srv-cli/src/main.rs b/crates/proc-macro-srv-cli/src/main.rs index 8398f2ba6c..ac9fa9f5a4 100644 --- a/crates/proc-macro-srv-cli/src/main.rs +++ b/crates/proc-macro-srv-cli/src/main.rs @@ -3,5 +3,17 @@ use proc_macro_srv::cli; fn main() -> std::io::Result<()> { + let v = std::env::var("RUST_ANALYZER_INTERNALS_DO_NOT_USE"); + match v.as_deref() { + Ok("this is unstable") => { + // very well, if you must + } + _ => { + eprintln!("If you're rust-analyzer, you can use this tool by exporting RUST_ANALYZER_INTERNALS_DO_NOT_USE='this is unstable'."); + eprintln!("If not, you probably shouldn't use this tool. But do what you want: I'm an error message, not a cop."); + std::process::exit(122); + } + } + cli::run() } diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs index 4256c2a764..9ae361b034 100644 --- a/crates/rust-analyzer/src/reload.rs +++ b/crates/rust-analyzer/src/reload.rs @@ -309,7 +309,9 @@ impl GlobalState { .workspaces .iter() .map(|ws| { + let mut args = args.clone(); let mut path = path.clone(); + if let ProjectWorkspace::Cargo { sysroot, .. } = ws { tracing::info!("Found a cargo workspace..."); if let Some(sysroot) = sysroot.as_ref() { @@ -324,6 +326,7 @@ impl GlobalState { server_path.display() ); path = server_path; + args = vec![]; } else { tracing::info!( "And the server does not exist at {}", @@ -333,6 +336,11 @@ impl GlobalState { } } + tracing::info!( + "Using proc-macro server at {} with args {:?}", + path.display(), + args + ); ProcMacroServer::spawn(path.clone(), args.clone()).map_err(|err| { let error = format!( "Failed to run proc_macro_srv from path {}, error: {:?}", From 696775153dffaa701bcabe67b5fe601db862c4f4 Mon Sep 17 00:00:00 2001 From: Amos Wenger Date: Mon, 25 Jul 2022 16:59:10 +0200 Subject: [PATCH 0019/1945] Fix mix up in comments --- crates/project-model/src/sysroot.rs | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/crates/project-model/src/sysroot.rs b/crates/project-model/src/sysroot.rs index b17a59b10b..362bb0f5e7 100644 --- a/crates/project-model/src/sysroot.rs +++ b/crates/project-model/src/sysroot.rs @@ -36,19 +36,19 @@ impl ops::Index for Sysroot { } impl Sysroot { - /// Returns sysroot directory, where `bin/`, `etc/`, `lib/`, `libexec/` + /// Returns sysroot "root" directory, where `bin/`, `etc/`, `lib/`, `libexec/` /// subfolder live, like: - /// `$HOME/.rustup/toolchains/nightly-2022-07-23-x86_64-unknown-linux-gnu/lib/rustlib/src/rust/library` - pub fn src_root(&self) -> &AbsPath { - &self.src_root - } - - /// Returns sysroot "src" directory, where stdlib sources are located, like: /// `$HOME/.rustup/toolchains/nightly-2022-07-23-x86_64-unknown-linux-gnu` pub fn root(&self) -> &AbsPath { &self.root } + /// Returns the sysroot "source" directory, where stdlib sources are located, like: + /// `$HOME/.rustup/toolchains/nightly-2022-07-23-x86_64-unknown-linux-gnu/lib/rustlib/src/rust/library` + pub fn src_root(&self) -> &AbsPath { + &self.src_root + } + pub fn public_deps(&self) -> impl Iterator + '_ { // core is added as a dependency before std in order to // mimic rustcs dependency order From 2c2520fbb48b977a805b46af79e4016a4394e719 Mon Sep 17 00:00:00 2001 From: Amos Wenger Date: Mon, 25 Jul 2022 17:52:38 +0200 Subject: [PATCH 0020/1945] Allow specifying sysroot OR sysroot_src --- crates/project-model/src/workspace.rs | 24 +++++++++++++++++------- 1 file changed, 17 insertions(+), 7 deletions(-) diff --git a/crates/project-model/src/workspace.rs b/crates/project-model/src/workspace.rs index 63882466fa..b144006b44 100644 --- a/crates/project-model/src/workspace.rs +++ b/crates/project-model/src/workspace.rs @@ -230,16 +230,26 @@ impl ProjectWorkspace { project_json: ProjectJson, target: Option<&str>, ) -> Result { - let sysroot = match project_json.sysroot_src.clone() { - Some(sysroot_src) => { - // if `sysroot` isn't specified (only `sysroot_src`), we won't have - // a real sysroot path, that's fine. it's just used to discover - // the standalone `proc-macro-srv` binary. - let sysroot = project_json.sysroot.clone().unwrap_or_else(|| sysroot_src.clone()); + let sysroot = match (project_json.sysroot.clone(), project_json.sysroot_src.clone()) { + (Some(sysroot), Some(sysroot_src)) => Some(Sysroot::load(sysroot, sysroot_src)?), + (Some(sysroot), None) => { + // assume sysroot is structured like rustup's and guess `sysroot_src` + let sysroot_src = + sysroot.join("lib").join("rustlib").join("src").join("rust").join("library"); + Some(Sysroot::load(sysroot, sysroot_src)?) } - None => None, + (None, Some(sysroot_src)) => { + // assume sysroot is structured like rustup's and guess `sysroot` + let mut sysroot = sysroot_src.clone(); + for _ in 0..5 { + sysroot.pop(); + } + Some(Sysroot::load(sysroot, sysroot_src)?) + } + (None, None) => None, }; + let rustc_cfg = rustc_cfg::get(None, target); Ok(ProjectWorkspace::Json { project: project_json, sysroot, rustc_cfg }) } From e45a250f8c26e94a2a3ff8b50305530f75718239 Mon Sep 17 00:00:00 2001 From: Zachary S Date: Mon, 25 Jul 2022 15:43:25 -0500 Subject: [PATCH 0021/1945] fix: Insert spaces when inlining a function defined in a macro. --- .../ide-assists/src/handlers/inline_call.rs | 50 ++++++++++++++++++- 1 file changed, 48 insertions(+), 2 deletions(-) diff --git a/crates/ide-assists/src/handlers/inline_call.rs b/crates/ide-assists/src/handlers/inline_call.rs index 658a1aadf5..8b2f6ac75a 100644 --- a/crates/ide-assists/src/handlers/inline_call.rs +++ b/crates/ide-assists/src/handlers/inline_call.rs @@ -7,7 +7,7 @@ use ide_db::{ imports::insert_use::remove_path_if_in_use_stmt, path_transform::PathTransform, search::{FileReference, SearchScope}, - syntax_helpers::node_ext::expr_as_name_ref, + syntax_helpers::{insert_whitespace_into_node::insert_ws_into, node_ext::expr_as_name_ref}, RootDatabase, }; use itertools::{izip, Itertools}; @@ -301,7 +301,18 @@ fn inline( params: &[(ast::Pat, Option, hir::Param)], CallInfo { node, arguments, generic_arg_list }: &CallInfo, ) -> ast::Expr { - let body = fn_body.clone_for_update(); + let body = if sema.hir_file_for(fn_body.syntax()).is_macro() { + cov_mark::hit!(inline_call_defined_in_macro); + if let Some(body) = ast::BlockExpr::cast(insert_ws_into(fn_body.syntax().clone())) { + body + } else { + // FIXME(zachs18): I believe this should be unreachable, + // since insert_ws_into shouldn't change the kind of the SyntaxNode. + fn_body.clone_for_update() + } + } else { + fn_body.clone_for_update() + }; let usages_for_locals = |local| { Definition::Local(local) .usages(sema) @@ -1144,6 +1155,41 @@ fn bar() -> u32 { x }) + foo() } +"#, + ) + } + + #[test] + fn inline_call_defined_in_macro() { + cov_mark::check!(inline_call_defined_in_macro); + check_assist( + inline_call, + r#" +macro_rules! define_foo { + () => { fn foo() -> u32 { + let x = 0; + x + } }; +} +define_foo!(); +fn bar() -> u32 { + foo$0() +} +"#, + r#" +macro_rules! define_foo { + () => { fn foo() -> u32 { + let x = 0; + x + } }; +} +define_foo!(); +fn bar() -> u32 { + { + let x = 0; + x + } +} "#, ) } From 4e60db2d07b848d92a52d3fd9fa74e6a4f7f097f Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sun, 24 Jul 2022 12:04:15 +0200 Subject: [PATCH 0022/1945] feat: Downmap tokens inside derive helpers --- crates/hir-def/src/item_scope.rs | 35 ++++-- crates/hir-def/src/nameres.rs | 18 ++- crates/hir-def/src/nameres/collector.rs | 14 +-- crates/hir-def/src/resolver.rs | 6 +- crates/hir/src/semantics.rs | 132 ++++++++++++++-------- crates/hir/src/semantics/source_to_def.rs | 2 + crates/ide-db/src/rename.rs | 10 +- crates/ide-db/src/search.rs | 2 + crates/ide/src/hover.rs | 7 +- 9 files changed, 151 insertions(+), 75 deletions(-) diff --git a/crates/hir-def/src/item_scope.rs b/crates/hir-def/src/item_scope.rs index b98b2855cb..579f803ea1 100644 --- a/crates/hir-def/src/item_scope.rs +++ b/crates/hir-def/src/item_scope.rs @@ -66,10 +66,14 @@ pub struct ItemScope { attr_macros: FxHashMap, MacroCallId>, /// The derive macro invocations in this scope, keyed by the owner item over the actual derive attributes /// paired with the derive macro invocations for the specific attribute. - derive_macros: FxHashMap< - AstId, - SmallVec<[(AttrId, MacroCallId, SmallVec<[Option; 1]>); 1]>, - >, + derive_macros: FxHashMap, SmallVec<[DeriveMacroInvocation; 1]>>, +} + +#[derive(Debug, PartialEq, Eq)] +struct DeriveMacroInvocation { + attr_id: AttrId, + attr_call_id: MacroCallId, + derive_call_ids: SmallVec<[Option; 1]>, } pub(crate) static BUILTIN_SCOPE: Lazy> = Lazy::new(|| { @@ -210,12 +214,14 @@ impl ItemScope { &mut self, adt: AstId, call: MacroCallId, - attr_id: AttrId, + id: AttrId, idx: usize, ) { if let Some(derives) = self.derive_macros.get_mut(&adt) { - if let Some((.., invocs)) = derives.iter_mut().find(|&&mut (id, ..)| id == attr_id) { - invocs[idx] = Some(call); + if let Some(DeriveMacroInvocation { derive_call_ids, .. }) = + derives.iter_mut().find(|&&mut DeriveMacroInvocation { attr_id, .. }| id == attr_id) + { + derive_call_ids[idx] = Some(call); } } } @@ -227,10 +233,14 @@ impl ItemScope { &mut self, adt: AstId, attr_id: AttrId, - call_id: MacroCallId, + attr_call_id: MacroCallId, len: usize, ) { - self.derive_macros.entry(adt).or_default().push((attr_id, call_id, smallvec![None; len])); + self.derive_macros.entry(adt).or_default().push(DeriveMacroInvocation { + attr_id, + attr_call_id, + derive_call_ids: smallvec![None; len], + }); } pub(crate) fn derive_macro_invocs( @@ -242,7 +252,12 @@ impl ItemScope { ), > + '_ { self.derive_macros.iter().map(|(k, v)| { - (*k, v.iter().map(|&(attr_id, call_id, ref invocs)| (attr_id, call_id, &**invocs))) + ( + *k, + v.iter().map(|DeriveMacroInvocation { attr_id, attr_call_id, derive_call_ids }| { + (*attr_id, *attr_call_id, &**derive_call_ids) + }), + ) }) } diff --git a/crates/hir-def/src/nameres.rs b/crates/hir-def/src/nameres.rs index 3949fbb6e7..8165055e09 100644 --- a/crates/hir-def/src/nameres.rs +++ b/crates/hir-def/src/nameres.rs @@ -57,10 +57,10 @@ mod proc_macro; #[cfg(test)] mod tests; -use std::{cmp::Ord, sync::Arc}; +use std::{ops::Deref, sync::Arc}; use base_db::{CrateId, Edition, FileId}; -use hir_expand::{name::Name, InFile, MacroDefId}; +use hir_expand::{name::Name, InFile, MacroCallId, MacroDefId}; use itertools::Itertools; use la_arena::Arena; use profile::Count; @@ -106,6 +106,9 @@ pub struct DefMap { fn_proc_macro_mapping: FxHashMap, /// The error that occurred when failing to load the proc-macro dll. proc_macro_loading_error: Option>, + /// Tracks which custom derives are in scope for an item, to allow resolution of derive helper + /// attributes. + derive_helpers_in_scope: FxHashMap, Vec<(Name, MacroCallId)>>, /// Custom attributes registered with `#![register_attr]`. registered_attrs: Vec, @@ -275,6 +278,7 @@ impl DefMap { exported_derives: FxHashMap::default(), fn_proc_macro_mapping: FxHashMap::default(), proc_macro_loading_error: None, + derive_helpers_in_scope: FxHashMap::default(), prelude: None, root, modules, @@ -294,12 +298,19 @@ impl DefMap { pub fn modules(&self) -> impl Iterator + '_ { self.modules.iter() } + + pub fn derive_helpers_in_scope(&self, id: AstId) -> Option<&[(Name, MacroCallId)]> { + self.derive_helpers_in_scope.get(&id.map(|it| it.upcast())).map(Deref::deref) + } + pub fn registered_tools(&self) -> &[SmolStr] { &self.registered_tools } + pub fn registered_attrs(&self) -> &[SmolStr] { &self.registered_attrs } + pub fn root(&self) -> LocalModuleId { self.root } @@ -307,6 +318,7 @@ impl DefMap { pub fn fn_as_proc_macro(&self, id: FunctionId) -> Option { self.fn_proc_macro_mapping.get(&id).copied() } + pub fn proc_macro_loading_error(&self) -> Option<&str> { self.proc_macro_loading_error.as_deref() } @@ -467,6 +479,7 @@ impl DefMap { registered_attrs, registered_tools, fn_proc_macro_mapping, + derive_helpers_in_scope, proc_macro_loading_error: _, block: _, edition: _, @@ -483,6 +496,7 @@ impl DefMap { registered_attrs.shrink_to_fit(); registered_tools.shrink_to_fit(); fn_proc_macro_mapping.shrink_to_fit(); + derive_helpers_in_scope.shrink_to_fit(); for (_, module) in modules.iter_mut() { module.children.shrink_to_fit(); module.scope.shrink_to_fit(); diff --git a/crates/hir-def/src/nameres/collector.rs b/crates/hir-def/src/nameres/collector.rs index 67651e0641..e14d29952d 100644 --- a/crates/hir-def/src/nameres/collector.rs +++ b/crates/hir-def/src/nameres/collector.rs @@ -110,7 +110,6 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, mut def_map: DefMap, tree_id: T proc_macros, from_glob_import: Default::default(), skip_attrs: Default::default(), - derive_helpers_in_scope: Default::default(), is_proc_macro, }; if tree_id.is_block() { @@ -258,9 +257,6 @@ struct DefCollector<'a> { /// This also stores the attributes to skip when we resolve derive helpers and non-macro /// non-builtin attributes in general. skip_attrs: FxHashMap, AttrId>, - /// Tracks which custom derives are in scope for an item, to allow resolution of derive helper - /// attributes. - derive_helpers_in_scope: FxHashMap, Vec>, } impl DefCollector<'_> { @@ -1132,8 +1128,8 @@ impl DefCollector<'_> { }; if let Some(ident) = path.as_ident() { - if let Some(helpers) = self.derive_helpers_in_scope.get(&ast_id) { - if helpers.contains(ident) { + if let Some(helpers) = self.def_map.derive_helpers_in_scope.get(&ast_id) { + if helpers.iter().any(|(it, _)| it == ident) { cov_mark::hit!(resolved_derive_helper); // Resolved to derive helper. Collect the item's attributes again, // starting after the derive helper. @@ -1322,10 +1318,11 @@ impl DefCollector<'_> { if loc.def.krate != self.def_map.krate { let def_map = self.db.crate_def_map(loc.def.krate); if let Some(helpers) = def_map.exported_derives.get(&loc.def) { - self.derive_helpers_in_scope + self.def_map + .derive_helpers_in_scope .entry(ast_id.map(|it| it.upcast())) .or_default() - .extend(helpers.iter().cloned()); + .extend(helpers.iter().cloned().zip(std::iter::repeat(macro_call_id))); } } } @@ -2140,7 +2137,6 @@ mod tests { proc_macros: Default::default(), from_glob_import: Default::default(), skip_attrs: Default::default(), - derive_helpers_in_scope: Default::default(), is_proc_macro: false, }; collector.seed_with_top_level(); diff --git a/crates/hir-def/src/resolver.rs b/crates/hir-def/src/resolver.rs index c8d3052102..0c1ae8a064 100644 --- a/crates/hir-def/src/resolver.rs +++ b/crates/hir-def/src/resolver.rs @@ -448,10 +448,14 @@ impl Resolver { } pub fn krate(&self) -> CrateId { + self.def_map().krate() + } + + pub fn def_map(&self) -> &DefMap { self.scopes .get(0) .and_then(|scope| match scope { - Scope::ModuleScope(m) => Some(m.def_map.krate()), + Scope::ModuleScope(m) => Some(&m.def_map), _ => None, }) .expect("module scope invariant violated") diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs index 043f2b7c24..218e15989a 100644 --- a/crates/hir/src/semantics.rs +++ b/crates/hir/src/semantics.rs @@ -733,6 +733,8 @@ impl<'db> SemanticsImpl<'db> { Some(it) => it, None => return, }; + let def_map = sa.resolver.def_map(); + let mut stack: SmallVec<[_; 4]> = smallvec![InFile::new(sa.file_id, token)]; let mut cache = self.expansion_info_cache.borrow_mut(); let mut mcache = self.macro_call_cache.borrow_mut(); @@ -764,7 +766,7 @@ impl<'db> SemanticsImpl<'db> { while let Some(token) = stack.pop() { self.db.unwind_if_cancelled(); let was_not_remapped = (|| { - // are we inside an attribute macro call + // First expand into attribute invocations let containing_attribute_macro_call = self.with_ctx(|ctx| { token.value.parent_ancestors().filter_map(ast::Item::cast).find_map(|item| { if item.attrs().next().is_none() { @@ -784,53 +786,19 @@ impl<'db> SemanticsImpl<'db> { ); } - // or are we inside a function-like macro call - if let Some(tt) = - // FIXME replace map.while_some with take_while once stable - token - .value - .parent_ancestors() - .map(ast::TokenTree::cast) - .while_some() - .last() - { - let parent = tt.syntax().parent()?; - // check for derive attribute here - let macro_call = match_ast! { - match parent { - ast::MacroCall(mcall) => mcall, - // attribute we failed expansion for earlier, this might be a derive invocation - // so try downmapping the token into the pseudo derive expansion - // see [hir_expand::builtin_attr_macro] for how the pseudo derive expansion works - ast::Meta(meta) => { - let attr = meta.parent_attr()?; - let adt = attr.syntax().parent().and_then(ast::Adt::cast)?; - let call_id = self.with_ctx(|ctx| { - let (_, call_id, _) = ctx.attr_to_derive_macro_call( - token.with_value(&adt), - token.with_value(attr), - )?; - Some(call_id) - })?; - let file_id = call_id.as_file(); - return process_expansion_for_token( - &mut stack, - file_id, - Some(adt.into()), - token.as_ref(), - ); - }, - _ => return None, - } - }; + // Then check for token trees, that means we are either in a function-like macro or + // secondary attribute inputs + let tt = token.value.parent_ancestors().map_while(ast::TokenTree::cast).last()?; + let parent = tt.syntax().parent()?; - if tt.left_delimiter_token().map_or(false, |it| it == token.value) { - return None; - } - if tt.right_delimiter_token().map_or(false, |it| it == token.value) { - return None; - } + if tt.left_delimiter_token().map_or(false, |it| it == token.value) { + return None; + } + if tt.right_delimiter_token().map_or(false, |it| it == token.value) { + return None; + } + if let Some(macro_call) = ast::MacroCall::cast(parent.clone()) { let mcall = token.with_value(macro_call); let file_id = match mcache.get(&mcall) { Some(&it) => it, @@ -840,11 +808,75 @@ impl<'db> SemanticsImpl<'db> { it } }; - return process_expansion_for_token(&mut stack, file_id, None, token.as_ref()); - } + process_expansion_for_token(&mut stack, file_id, None, token.as_ref()) + } else if let Some(meta) = ast::Meta::cast(parent.clone()) { + // attribute we failed expansion for earlier, this might be a derive invocation + // or derive helper attribute + let attr = meta.parent_attr()?; - // outside of a macro invocation so this is a "final" token - None + let adt = if let Some(adt) = attr.syntax().parent().and_then(ast::Adt::cast) { + // this might be a derive, or a derive helper on an ADT + let derive_call = self.with_ctx(|ctx| { + // so try downmapping the token into the pseudo derive expansion + // see [hir_expand::builtin_attr_macro] for how the pseudo derive expansion works + ctx.attr_to_derive_macro_call( + token.with_value(&adt), + token.with_value(attr.clone()), + ) + .map(|(_, call_id, _)| call_id) + }); + + match derive_call { + Some(call_id) => { + // resolved to a derive + let file_id = call_id.as_file(); + return process_expansion_for_token( + &mut stack, + file_id, + Some(adt.into()), + token.as_ref(), + ); + } + None => Some(adt), + } + } else { + // Otherwise this could be a derive helper on a variant or field + if let Some(field) = attr.syntax().parent().and_then(ast::RecordField::cast) + { + field.syntax().ancestors().take(4).find_map(ast::Adt::cast) + } else if let Some(field) = + attr.syntax().parent().and_then(ast::TupleField::cast) + { + field.syntax().ancestors().take(4).find_map(ast::Adt::cast) + } else if let Some(variant) = + attr.syntax().parent().and_then(ast::Variant::cast) + { + variant.syntax().ancestors().nth(2).and_then(ast::Adt::cast) + } else { + None + } + }?; + + // Not an attribute, nor a derive, so it's either a builtin or a derive helper + // Try to resolve to a derive helper and downmap + let attr_name = attr.path().and_then(|it| it.as_single_name_ref())?.as_name(); + let id = self.db.ast_id_map(token.file_id).ast_id(&adt); + let helpers = + def_map.derive_helpers_in_scope(InFile::new(token.file_id, id))?; + let item = Some(adt.into()); + let mut res = None; + for (_, derive) in helpers.iter().filter(|(helper, _)| *helper == attr_name) { + res = res.or(process_expansion_for_token( + &mut stack, + derive.as_file(), + item.clone(), + token.as_ref(), + )); + } + res + } else { + None + } })() .is_none(); diff --git a/crates/hir/src/semantics/source_to_def.rs b/crates/hir/src/semantics/source_to_def.rs index 5c4cfa7b45..ba9a1cfb6b 100644 --- a/crates/hir/src/semantics/source_to_def.rs +++ b/crates/hir/src/semantics/source_to_def.rs @@ -247,6 +247,7 @@ impl SourceToDefCtx<'_, '_> { map[keys::ATTR_MACRO_CALL].get(&src.value).copied() } + /// (AttrId, derive attribute call id, derive call ids) pub(super) fn attr_to_derive_macro_call( &mut self, item: InFile<&ast::Adt>, @@ -257,6 +258,7 @@ impl SourceToDefCtx<'_, '_> { .get(&src.value) .map(|&(attr_id, call_id, ref ids)| (attr_id, call_id, &**ids)) } + pub(super) fn has_derives(&mut self, adt: InFile<&ast::Adt>) -> bool { self.dyn_map(adt).as_ref().map_or(false, |map| !map[keys::DERIVE_MACRO_CALL].is_empty()) } diff --git a/crates/ide-db/src/rename.rs b/crates/ide-db/src/rename.rs index bb466e43e7..dc1626f439 100644 --- a/crates/ide-db/src/rename.rs +++ b/crates/ide-db/src/rename.rs @@ -316,14 +316,20 @@ pub fn source_edit_from_references( // macros can cause multiple refs to occur for the same text range, so keep track of what we have edited so far let mut edited_ranges = Vec::new(); for &FileReference { range, ref name, .. } in references { + let name_range = name.syntax().text_range(); + if name_range.len() != range.len() { + // This usage comes from a different token kind that was downmapped to a NameLike in a macro + // Renaming this will most likely break things syntax-wise + continue; + } let has_emitted_edit = match name { // if the ranges differ then the node is inside a macro call, we can't really attempt // to make special rewrites like shorthand syntax and such, so just rename the node in // the macro input - ast::NameLike::NameRef(name_ref) if name_ref.syntax().text_range() == range => { + ast::NameLike::NameRef(name_ref) if name_range == range => { source_edit_from_name_ref(&mut edit, name_ref, new_name, def) } - ast::NameLike::Name(name) if name.syntax().text_range() == range => { + ast::NameLike::Name(name) if name_range == range => { source_edit_from_name(&mut edit, name, new_name) } _ => false, diff --git a/crates/ide-db/src/search.rs b/crates/ide-db/src/search.rs index c75364084e..eb4fc36438 100644 --- a/crates/ide-db/src/search.rs +++ b/crates/ide-db/src/search.rs @@ -54,7 +54,9 @@ impl IntoIterator for UsageSearchResult { #[derive(Debug, Clone)] pub struct FileReference { + /// The range of the reference in the original file pub range: TextRange, + /// The node of the reference in the (macro-)file pub name: ast::NameLike, pub category: Option, } diff --git a/crates/ide/src/hover.rs b/crates/ide/src/hover.rs index d8867cf783..59c97f2dcf 100644 --- a/crates/ide/src/hover.rs +++ b/crates/ide/src/hover.rs @@ -115,7 +115,12 @@ pub(crate) fn hover( }); } - let descended = sema.descend_into_macros_with_same_text(original_token.clone()); + let in_attr = matches!(original_token.parent().and_then(ast::TokenTree::cast), Some(tt) if tt.syntax().ancestors().any(|it| ast::Meta::can_cast(it.kind()))); + let descended = if in_attr { + [sema.descend_into_macros_with_kind_preference(original_token.clone())].into() + } else { + sema.descend_into_macros_with_same_text(original_token.clone()) + }; // FIXME: Definition should include known lints and the like instead of having this special case here let hovered_lint = descended.iter().find_map(|token| { From aa1491ecde22732329069773fed3e23f6b42e14d Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sun, 24 Jul 2022 14:05:37 +0200 Subject: [PATCH 0023/1945] Record derive helper attributes, resolve them in IDE layer --- crates/hir-def/src/lib.rs | 10 +-- crates/hir-def/src/nameres.rs | 9 ++- crates/hir-def/src/nameres/collector.rs | 50 ++++++++------- crates/hir-def/src/resolver.rs | 1 + crates/hir/src/lib.rs | 11 ++++ crates/hir/src/semantics.rs | 14 ++-- crates/hir/src/source_analyzer.rs | 64 +++++++++++++++---- crates/ide-completion/src/item.rs | 1 + crates/ide-db/src/defs.rs | 13 ++-- crates/ide-db/src/lib.rs | 1 + crates/ide-db/src/path_transform.rs | 3 +- crates/ide-db/src/rename.rs | 2 + crates/ide/src/doc_links.rs | 6 +- crates/ide/src/hover/render.rs | 2 + crates/ide/src/navigation_target.rs | 2 + crates/ide/src/signature_help.rs | 3 +- crates/ide/src/syntax_highlighting.rs | 1 + .../ide/src/syntax_highlighting/highlight.rs | 1 + crates/ide/src/syntax_highlighting/inject.rs | 1 + crates/ide/src/syntax_highlighting/tags.rs | 1 + crates/rust-analyzer/src/semantic_tokens.rs | 1 + crates/rust-analyzer/src/to_proto.rs | 5 +- 22 files changed, 144 insertions(+), 58 deletions(-) diff --git a/crates/hir-def/src/lib.rs b/crates/hir-def/src/lib.rs index 0dd0a5861e..56603f4b15 100644 --- a/crates/hir-def/src/lib.rs +++ b/crates/hir-def/src/lib.rs @@ -934,11 +934,11 @@ fn derive_macro_as_call_id( derive_attr: AttrId, derive_pos: u32, krate: CrateId, - resolver: impl Fn(path::ModPath) -> Option, -) -> Result { - let def: MacroDefId = resolver(item_attr.path.clone()) + resolver: impl Fn(path::ModPath) -> Option<(MacroId, MacroDefId)>, +) -> Result<(MacroId, MacroDefId, MacroCallId), UnresolvedMacro> { + let (macro_id, def_id) = resolver(item_attr.path.clone()) .ok_or_else(|| UnresolvedMacro { path: item_attr.path.clone() })?; - let res = def.as_lazy_macro( + let call_id = def_id.as_lazy_macro( db.upcast(), krate, MacroCallKind::Derive { @@ -947,7 +947,7 @@ fn derive_macro_as_call_id( derive_attr_index: derive_attr.ast_index, }, ); - Ok(res) + Ok((macro_id, def_id, call_id)) } fn attr_macro_as_call_id( diff --git a/crates/hir-def/src/nameres.rs b/crates/hir-def/src/nameres.rs index 8165055e09..25fb302e87 100644 --- a/crates/hir-def/src/nameres.rs +++ b/crates/hir-def/src/nameres.rs @@ -76,7 +76,7 @@ use crate::{ path::ModPath, per_ns::PerNs, visibility::Visibility, - AstId, BlockId, BlockLoc, FunctionId, LocalModuleId, ModuleId, ProcMacroId, + AstId, BlockId, BlockLoc, FunctionId, LocalModuleId, MacroId, ModuleId, ProcMacroId, }; /// Contains the results of (early) name resolution. @@ -108,7 +108,7 @@ pub struct DefMap { proc_macro_loading_error: Option>, /// Tracks which custom derives are in scope for an item, to allow resolution of derive helper /// attributes. - derive_helpers_in_scope: FxHashMap, Vec<(Name, MacroCallId)>>, + derive_helpers_in_scope: FxHashMap, Vec<(Name, MacroId, MacroCallId)>>, /// Custom attributes registered with `#![register_attr]`. registered_attrs: Vec, @@ -299,7 +299,10 @@ impl DefMap { self.modules.iter() } - pub fn derive_helpers_in_scope(&self, id: AstId) -> Option<&[(Name, MacroCallId)]> { + pub fn derive_helpers_in_scope( + &self, + id: AstId, + ) -> Option<&[(Name, MacroId, MacroCallId)]> { self.derive_helpers_in_scope.get(&id.map(|it| it.upcast())).map(Deref::deref) } diff --git a/crates/hir-def/src/nameres/collector.rs b/crates/hir-def/src/nameres/collector.rs index e14d29952d..f394c54171 100644 --- a/crates/hir-def/src/nameres/collector.rs +++ b/crates/hir-def/src/nameres/collector.rs @@ -18,7 +18,7 @@ use hir_expand::{ ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind, }; -use itertools::Itertools; +use itertools::{izip, Itertools}; use la_arena::Idx; use limit::Limit; use rustc_hash::{FxHashMap, FxHashSet}; @@ -1055,7 +1055,7 @@ impl DefCollector<'_> { }; let mut res = ReachedFixedPoint::Yes; macros.retain(|directive| { - let resolver = |path| { + let resolver2 = |path| { let resolved_res = self.def_map.resolve_path_fp_with_macro( self.db, ResolveMode::Other, @@ -1063,8 +1063,12 @@ impl DefCollector<'_> { &path, BuiltinShadowMode::Module, ); - resolved_res.resolved_def.take_macros().map(|it| macro_id_to_def_id(self.db, it)) + resolved_res + .resolved_def + .take_macros() + .map(|it| (it, macro_id_to_def_id(self.db, it))) }; + let resolver = |path| resolver2(path).map(|(_, it)| it); match &directive.kind { MacroDirectiveKind::FnLike { ast_id, expand_to } => { @@ -1083,21 +1087,37 @@ impl DefCollector<'_> { } } MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos } => { - let call_id = derive_macro_as_call_id( + let id = derive_macro_as_call_id( self.db, ast_id, *derive_attr, *derive_pos as u32, self.def_map.krate, - &resolver, + &resolver2, ); - if let Ok(call_id) = call_id { + + if let Ok((macro_id, def_id, call_id)) = id { self.def_map.modules[directive.module_id].scope.set_derive_macro_invoc( ast_id.ast_id, call_id, *derive_attr, *derive_pos, ); + // Record its helper attributes. + if def_id.krate != self.def_map.krate { + let def_map = self.db.crate_def_map(def_id.krate); + if let Some(helpers) = def_map.exported_derives.get(&def_id) { + self.def_map + .derive_helpers_in_scope + .entry(ast_id.ast_id.map(|it| it.upcast())) + .or_default() + .extend(izip!( + helpers.iter().cloned(), + iter::repeat(macro_id), + iter::repeat(call_id), + )); + } + } push_resolved(directive, call_id); res = ReachedFixedPoint::No; @@ -1129,7 +1149,7 @@ impl DefCollector<'_> { if let Some(ident) = path.as_ident() { if let Some(helpers) = self.def_map.derive_helpers_in_scope.get(&ast_id) { - if helpers.iter().any(|(it, _)| it == ident) { + if helpers.iter().any(|(it, ..)| it == ident) { cov_mark::hit!(resolved_derive_helper); // Resolved to derive helper. Collect the item's attributes again, // starting after the derive helper. @@ -1144,7 +1164,7 @@ impl DefCollector<'_> { }; if matches!( def, - MacroDefId { kind:MacroDefKind::BuiltInAttr(expander, _),.. } + MacroDefId { kind:MacroDefKind::BuiltInAttr(expander, _),.. } if expander.is_derive() ) { // Resolved to `#[derive]` @@ -1313,20 +1333,6 @@ impl DefCollector<'_> { self.def_map.diagnostics.push(diag); } - // If we've just resolved a derive, record its helper attributes. - if let MacroCallKind::Derive { ast_id, .. } = &loc.kind { - if loc.def.krate != self.def_map.krate { - let def_map = self.db.crate_def_map(loc.def.krate); - if let Some(helpers) = def_map.exported_derives.get(&loc.def) { - self.def_map - .derive_helpers_in_scope - .entry(ast_id.map(|it| it.upcast())) - .or_default() - .extend(helpers.iter().cloned().zip(std::iter::repeat(macro_call_id))); - } - } - } - // Then, fetch and process the item tree. This will reuse the expansion result from above. let item_tree = self.db.file_item_tree(file_id); let mod_dir = self.mod_dirs[&module_id].clone(); diff --git a/crates/hir-def/src/resolver.rs b/crates/hir-def/src/resolver.rs index 0c1ae8a064..3163fa0f93 100644 --- a/crates/hir-def/src/resolver.rs +++ b/crates/hir-def/src/resolver.rs @@ -149,6 +149,7 @@ impl Resolver { self.resolve_module_path(db, path, BuiltinShadowMode::Module) } + // FIXME: This shouldn't exist pub fn resolve_module_path_in_trait_assoc_items( &self, db: &dyn DefDatabase, diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 9ffbb3964c..86b5bd3c2c 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -2252,6 +2252,17 @@ impl Local { } } +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +pub struct DeriveHelper { + pub(crate) derive: MacroId, +} + +impl DeriveHelper { + pub fn derive(&self) -> Macro { + Macro { id: self.derive.into() } + } +} + // FIXME: Wrong name? This is could also be a registered attribute #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] pub struct BuiltinAttr { diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs index 218e15989a..fc8f23f19a 100644 --- a/crates/hir/src/semantics.rs +++ b/crates/hir/src/semantics.rs @@ -29,9 +29,9 @@ use crate::{ db::HirDatabase, semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx}, source_analyzer::{resolve_hir_path, SourceAnalyzer}, - Access, BindingMode, BuiltinAttr, Callable, ConstParam, Crate, Field, Function, HasSource, - HirFileId, Impl, InFile, Label, LifetimeParam, Local, Macro, Module, ModuleDef, Name, Path, - ScopeDef, ToolModule, Trait, Type, TypeAlias, TypeParam, VariantDef, + Access, BindingMode, BuiltinAttr, Callable, ConstParam, Crate, DeriveHelper, Field, Function, + HasSource, HirFileId, Impl, InFile, Label, LifetimeParam, Local, Macro, Module, ModuleDef, + Name, Path, ScopeDef, ToolModule, Trait, Type, TypeAlias, TypeParam, VariantDef, }; #[derive(Debug, Clone, PartialEq, Eq)] @@ -47,6 +47,7 @@ pub enum PathResolution { SelfType(Impl), BuiltinAttr(BuiltinAttr), ToolModule(ToolModule), + DeriveHelper(DeriveHelper), } impl PathResolution { @@ -71,6 +72,7 @@ impl PathResolution { PathResolution::BuiltinAttr(_) | PathResolution::ToolModule(_) | PathResolution::Local(_) + | PathResolution::DeriveHelper(_) | PathResolution::ConstParam(_) => None, PathResolution::TypeParam(param) => Some(TypeNs::GenericParam((*param).into())), PathResolution::SelfType(impl_def) => Some(TypeNs::SelfType((*impl_def).into())), @@ -856,7 +858,9 @@ impl<'db> SemanticsImpl<'db> { None } }?; - + if !self.with_ctx(|ctx| ctx.has_derives(InFile::new(token.file_id, &adt))) { + return None; + } // Not an attribute, nor a derive, so it's either a builtin or a derive helper // Try to resolve to a derive helper and downmap let attr_name = attr.path().and_then(|it| it.as_single_name_ref())?.as_name(); @@ -865,7 +869,7 @@ impl<'db> SemanticsImpl<'db> { def_map.derive_helpers_in_scope(InFile::new(token.file_id, id))?; let item = Some(adt.into()); let mut res = None; - for (_, derive) in helpers.iter().filter(|(helper, _)| *helper == attr_name) { + for (.., derive) in helpers.iter().filter(|(helper, ..)| *helper == attr_name) { res = res.or(process_expansion_for_token( &mut stack, derive.as_file(), diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs index e89f8a5429..d57a73ade3 100644 --- a/crates/hir/src/source_analyzer.rs +++ b/crates/hir/src/source_analyzer.rs @@ -43,8 +43,8 @@ use syntax::{ use crate::{ db::HirDatabase, semantics::PathResolution, Adt, AssocItem, BindingMode, BuiltinAttr, - BuiltinType, Callable, Const, Field, Function, Local, Macro, ModuleDef, Static, Struct, - ToolModule, Trait, Type, TypeAlias, Variant, + BuiltinType, Callable, Const, DeriveHelper, Field, Function, Local, Macro, ModuleDef, Static, + Struct, ToolModule, Trait, Type, TypeAlias, Variant, }; /// `SourceAnalyzer` is a convenience wrapper which exposes HIR API in terms of @@ -429,19 +429,21 @@ impl SourceAnalyzer { } } - let is_path_of_attr = path + let meta_path = path .syntax() .ancestors() - .map(|it| it.kind()) - .take_while(|&kind| ast::Path::can_cast(kind) || ast::Meta::can_cast(kind)) + .take_while(|it| { + let kind = it.kind(); + ast::Path::can_cast(kind) || ast::Meta::can_cast(kind) + }) .last() - .map_or(false, ast::Meta::can_cast); + .and_then(ast::Meta::cast); // Case where path is a qualifier of another path, e.g. foo::bar::Baz where we are // trying to resolve foo::bar. if path.parent_path().is_some() { return match resolve_hir_path_qualifier(db, &self.resolver, &hir_path) { - None if is_path_of_attr => { + None if meta_path.is_some() => { path.first_segment().and_then(|it| it.name_ref()).and_then(|name_ref| { ToolModule::by_name(db, self.resolver.krate().into(), &name_ref.text()) .map(PathResolution::ToolModule) @@ -449,16 +451,50 @@ impl SourceAnalyzer { } res => res, }; - } else if is_path_of_attr { + } else if let Some(meta_path) = meta_path { // Case where we are resolving the final path segment of a path in an attribute // in this case we have to check for inert/builtin attributes and tools and prioritize // resolution of attributes over other namespaces - let name_ref = path.as_single_name_ref(); - let builtin = name_ref.as_ref().and_then(|name_ref| { - BuiltinAttr::by_name(db, self.resolver.krate().into(), &name_ref.text()) - }); - if let Some(_) = builtin { - return builtin.map(PathResolution::BuiltinAttr); + if let Some(name_ref) = path.as_single_name_ref() { + let builtin = + BuiltinAttr::by_name(db, self.resolver.krate().into(), &name_ref.text()); + if let Some(_) = builtin { + return builtin.map(PathResolution::BuiltinAttr); + } + + if let Some(attr) = meta_path.parent_attr() { + let adt = if let Some(field) = + attr.syntax().parent().and_then(ast::RecordField::cast) + { + field.syntax().ancestors().take(4).find_map(ast::Adt::cast) + } else if let Some(field) = + attr.syntax().parent().and_then(ast::TupleField::cast) + { + field.syntax().ancestors().take(4).find_map(ast::Adt::cast) + } else if let Some(variant) = + attr.syntax().parent().and_then(ast::Variant::cast) + { + variant.syntax().ancestors().nth(2).and_then(ast::Adt::cast) + } else { + None + }; + if let Some(adt) = adt { + let ast_id = db.ast_id_map(self.file_id).ast_id(&adt); + if let Some(helpers) = self + .resolver + .def_map() + .derive_helpers_in_scope(InFile::new(self.file_id, ast_id)) + { + // FIXME: Multiple derives can have the same helper + let name_ref = name_ref.as_name(); + if let Some(&(_, derive, _)) = + helpers.iter().find(|(name, ..)| *name == name_ref) + { + return Some(PathResolution::DeriveHelper(DeriveHelper { derive })); + } + } + } + } } return match resolve_hir_path_as_macro(db, &self.resolver, &hir_path) { Some(m) => Some(PathResolution::Def(ModuleDef::Macro(m))), diff --git a/crates/ide-completion/src/item.rs b/crates/ide-completion/src/item.rs index 27482ea489..27c3ccb35a 100644 --- a/crates/ide-completion/src/item.rs +++ b/crates/ide-completion/src/item.rs @@ -292,6 +292,7 @@ impl CompletionItemKind { SymbolKind::Const => "ct", SymbolKind::ConstParam => "cp", SymbolKind::Derive => "de", + SymbolKind::DeriveHelper => "dh", SymbolKind::Enum => "en", SymbolKind::Field => "fd", SymbolKind::Function => "fn", diff --git a/crates/ide-db/src/defs.rs b/crates/ide-db/src/defs.rs index 692a315720..a9a78e6729 100644 --- a/crates/ide-db/src/defs.rs +++ b/crates/ide-db/src/defs.rs @@ -7,9 +7,9 @@ use arrayvec::ArrayVec; use hir::{ - Adt, AsAssocItem, AssocItem, BuiltinAttr, BuiltinType, Const, Crate, Field, Function, - GenericParam, HasVisibility, Impl, ItemInNs, Label, Local, Macro, Module, ModuleDef, Name, - PathResolution, Semantics, Static, ToolModule, Trait, TypeAlias, Variant, Visibility, + Adt, AsAssocItem, AssocItem, BuiltinAttr, BuiltinType, Const, Crate, DeriveHelper, Field, + Function, GenericParam, HasVisibility, Impl, ItemInNs, Label, Local, Macro, Module, ModuleDef, + Name, PathResolution, Semantics, Static, ToolModule, Trait, TypeAlias, Variant, Visibility, }; use stdx::impl_from; use syntax::{ @@ -37,6 +37,7 @@ pub enum Definition { Local(Local), GenericParam(GenericParam), Label(Label), + DeriveHelper(DeriveHelper), BuiltinAttr(BuiltinAttr), ToolModule(ToolModule), } @@ -69,6 +70,7 @@ impl Definition { Definition::Local(it) => it.module(db), Definition::GenericParam(it) => it.module(db), Definition::Label(it) => it.module(db), + Definition::DeriveHelper(it) => it.derive().module(db), Definition::BuiltinAttr(_) | Definition::BuiltinType(_) | Definition::ToolModule(_) => { return None } @@ -94,7 +96,8 @@ impl Definition { | Definition::SelfType(_) | Definition::Local(_) | Definition::GenericParam(_) - | Definition::Label(_) => return None, + | Definition::Label(_) + | Definition::DeriveHelper(_) => return None, }; Some(vis) } @@ -118,6 +121,7 @@ impl Definition { Definition::Label(it) => it.name(db), Definition::BuiltinAttr(_) => return None, // FIXME Definition::ToolModule(_) => return None, // FIXME + Definition::DeriveHelper(_) => return None, // FIXME }; Some(name) } @@ -500,6 +504,7 @@ impl From for Definition { PathResolution::SelfType(impl_def) => Definition::SelfType(impl_def), PathResolution::BuiltinAttr(attr) => Definition::BuiltinAttr(attr), PathResolution::ToolModule(tool) => Definition::ToolModule(tool), + PathResolution::DeriveHelper(helper) => Definition::DeriveHelper(helper), } } } diff --git a/crates/ide-db/src/lib.rs b/crates/ide-db/src/lib.rs index 26648b4d76..966bba616f 100644 --- a/crates/ide-db/src/lib.rs +++ b/crates/ide-db/src/lib.rs @@ -173,6 +173,7 @@ pub enum SymbolKind { Const, ConstParam, Derive, + DeriveHelper, Enum, Field, Function, diff --git a/crates/ide-db/src/path_transform.rs b/crates/ide-db/src/path_transform.rs index d78b8758d6..40af9e6fe2 100644 --- a/crates/ide-db/src/path_transform.rs +++ b/crates/ide-db/src/path_transform.rs @@ -224,7 +224,8 @@ impl<'a> Ctx<'a> { | hir::PathResolution::SelfType(_) | hir::PathResolution::Def(_) | hir::PathResolution::BuiltinAttr(_) - | hir::PathResolution::ToolModule(_) => (), + | hir::PathResolution::ToolModule(_) + | hir::PathResolution::DeriveHelper(_) => (), } Some(()) } diff --git a/crates/ide-db/src/rename.rs b/crates/ide-db/src/rename.rs index dc1626f439..517fe3f246 100644 --- a/crates/ide-db/src/rename.rs +++ b/crates/ide-db/src/rename.rs @@ -156,6 +156,8 @@ impl Definition { Definition::SelfType(_) => return None, Definition::BuiltinAttr(_) => return None, Definition::ToolModule(_) => return None, + // FIXME: This should be doable in theory + Definition::DeriveHelper(_) => return None, }; return res; diff --git a/crates/ide/src/doc_links.rs b/crates/ide/src/doc_links.rs index fed327f52b..582e9fe7e8 100644 --- a/crates/ide/src/doc_links.rs +++ b/crates/ide/src/doc_links.rs @@ -190,7 +190,8 @@ pub(crate) fn resolve_doc_path_for_def( | Definition::SelfType(_) | Definition::Local(_) | Definition::GenericParam(_) - | Definition::Label(_) => None, + | Definition::Label(_) + | Definition::DeriveHelper(_) => None, } .map(Definition::from) } @@ -515,7 +516,8 @@ fn filename_and_frag_for_def( | Definition::GenericParam(_) | Definition::Label(_) | Definition::BuiltinAttr(_) - | Definition::ToolModule(_) => return None, + | Definition::ToolModule(_) + | Definition::DeriveHelper(_) => return None, }; Some((def, res, None)) diff --git a/crates/ide/src/hover/render.rs b/crates/ide/src/hover/render.rs index 925aaa61cd..fcdb23fa18 100644 --- a/crates/ide/src/hover/render.rs +++ b/crates/ide/src/hover/render.rs @@ -370,6 +370,8 @@ pub(super) fn definition( // FIXME: We should be able to show more info about these Definition::BuiltinAttr(it) => return render_builtin_attr(db, it), Definition::ToolModule(it) => return Some(Markup::fenced_block(&it.name(db))), + // FIXME: it.name(db) + Definition::DeriveHelper(_it) => ("derive-helper".to_owned(), None), }; let docs = match config.documentation { diff --git a/crates/ide/src/navigation_target.rs b/crates/ide/src/navigation_target.rs index 1fb3b5ec3f..9f049e298a 100644 --- a/crates/ide/src/navigation_target.rs +++ b/crates/ide/src/navigation_target.rs @@ -196,6 +196,8 @@ impl TryToNav for Definition { Definition::BuiltinType(_) => None, Definition::ToolModule(_) => None, Definition::BuiltinAttr(_) => None, + // FIXME: The focus range should be set to the helper declaration + Definition::DeriveHelper(it) => it.derive().try_to_nav(db), } } } diff --git a/crates/ide/src/signature_help.rs b/crates/ide/src/signature_help.rs index ba287d13ae..fedc1a4358 100644 --- a/crates/ide/src/signature_help.rs +++ b/crates/ide/src/signature_help.rs @@ -237,7 +237,8 @@ fn signature_help_for_generics( | hir::PathResolution::Local(_) | hir::PathResolution::TypeParam(_) | hir::PathResolution::ConstParam(_) - | hir::PathResolution::SelfType(_) => return None, + | hir::PathResolution::SelfType(_) + | hir::PathResolution::DeriveHelper(_) => return None, }; generic_def diff --git a/crates/ide/src/syntax_highlighting.rs b/crates/ide/src/syntax_highlighting.rs index d7ad6a7579..eef717c16f 100644 --- a/crates/ide/src/syntax_highlighting.rs +++ b/crates/ide/src/syntax_highlighting.rs @@ -107,6 +107,7 @@ pub struct HlRange { // builtinType:: Emitted for builtin types like `u32`, `str` and `f32`. // comment:: Emitted for comments. // constParameter:: Emitted for const parameters. +// deriveHelper:: Emitted for derive helper attributes. // enumMember:: Emitted for enum variants. // generic:: Emitted for generic tokens that have no mapping. // keyword:: Emitted for keywords. diff --git a/crates/ide/src/syntax_highlighting/highlight.rs b/crates/ide/src/syntax_highlighting/highlight.rs index fd3723ed45..9395e914c4 100644 --- a/crates/ide/src/syntax_highlighting/highlight.rs +++ b/crates/ide/src/syntax_highlighting/highlight.rs @@ -472,6 +472,7 @@ fn highlight_def( Definition::Label(_) => Highlight::new(HlTag::Symbol(SymbolKind::Label)), Definition::BuiltinAttr(_) => Highlight::new(HlTag::Symbol(SymbolKind::BuiltinAttr)), Definition::ToolModule(_) => Highlight::new(HlTag::Symbol(SymbolKind::ToolModule)), + Definition::DeriveHelper(_) => Highlight::new(HlTag::Symbol(SymbolKind::DeriveHelper)), }; let def_crate = def.krate(db); diff --git a/crates/ide/src/syntax_highlighting/inject.rs b/crates/ide/src/syntax_highlighting/inject.rs index 94d573a30b..f779a985a9 100644 --- a/crates/ide/src/syntax_highlighting/inject.rs +++ b/crates/ide/src/syntax_highlighting/inject.rs @@ -270,6 +270,7 @@ fn module_def_to_hl_tag(def: Definition) -> HlTag { Definition::Label(_) => SymbolKind::Label, Definition::BuiltinAttr(_) => SymbolKind::BuiltinAttr, Definition::ToolModule(_) => SymbolKind::ToolModule, + Definition::DeriveHelper(_) => SymbolKind::DeriveHelper, }; HlTag::Symbol(symbol) } diff --git a/crates/ide/src/syntax_highlighting/tags.rs b/crates/ide/src/syntax_highlighting/tags.rs index b900dadcfa..5262770f30 100644 --- a/crates/ide/src/syntax_highlighting/tags.rs +++ b/crates/ide/src/syntax_highlighting/tags.rs @@ -134,6 +134,7 @@ impl HlTag { SymbolKind::Const => "constant", SymbolKind::ConstParam => "const_param", SymbolKind::Derive => "derive", + SymbolKind::DeriveHelper => "derive_helper", SymbolKind::Enum => "enum", SymbolKind::Field => "field", SymbolKind::Function => "function", diff --git a/crates/rust-analyzer/src/semantic_tokens.rs b/crates/rust-analyzer/src/semantic_tokens.rs index 5fb945ea98..6c78b5df1a 100644 --- a/crates/rust-analyzer/src/semantic_tokens.rs +++ b/crates/rust-analyzer/src/semantic_tokens.rs @@ -54,6 +54,7 @@ define_semantic_token_types![ (COMPARISON, "comparison"), (CONST_PARAMETER, "constParameter"), (DERIVE, "derive"), + (DERIVE_HELPER, "deriveHelper"), (DOT, "dot"), (ESCAPE_SEQUENCE, "escapeSequence"), (FORMAT_SPECIFIER, "formatSpecifier"), diff --git a/crates/rust-analyzer/src/to_proto.rs b/crates/rust-analyzer/src/to_proto.rs index 9c8e618b6e..7f4fa57fa1 100644 --- a/crates/rust-analyzer/src/to_proto.rs +++ b/crates/rust-analyzer/src/to_proto.rs @@ -53,7 +53,8 @@ pub(crate) fn symbol_kind(symbol_kind: SymbolKind) -> lsp_types::SymbolKind { SymbolKind::Macro | SymbolKind::BuiltinAttr | SymbolKind::Attribute - | SymbolKind::Derive => lsp_types::SymbolKind::FUNCTION, + | SymbolKind::Derive + | SymbolKind::DeriveHelper => lsp_types::SymbolKind::FUNCTION, SymbolKind::Module | SymbolKind::ToolModule => lsp_types::SymbolKind::MODULE, SymbolKind::TypeAlias | SymbolKind::TypeParam | SymbolKind::SelfType => { lsp_types::SymbolKind::TYPE_PARAMETER @@ -117,6 +118,7 @@ pub(crate) fn completion_item_kind( SymbolKind::Const => lsp_types::CompletionItemKind::CONSTANT, SymbolKind::ConstParam => lsp_types::CompletionItemKind::TYPE_PARAMETER, SymbolKind::Derive => lsp_types::CompletionItemKind::FUNCTION, + SymbolKind::DeriveHelper => lsp_types::CompletionItemKind::FUNCTION, SymbolKind::Enum => lsp_types::CompletionItemKind::ENUM, SymbolKind::Field => lsp_types::CompletionItemKind::FIELD, SymbolKind::Function => lsp_types::CompletionItemKind::FUNCTION, @@ -561,6 +563,7 @@ fn semantic_token_type_and_modifiers( HlTag::Symbol(symbol) => match symbol { SymbolKind::Attribute => semantic_tokens::ATTRIBUTE, SymbolKind::Derive => semantic_tokens::DERIVE, + SymbolKind::DeriveHelper => semantic_tokens::DERIVE_HELPER, SymbolKind::Module => lsp_types::SemanticTokenType::NAMESPACE, SymbolKind::Impl => semantic_tokens::TYPE_ALIAS, SymbolKind::Field => lsp_types::SemanticTokenType::PROPERTY, From ddad2847ab9a59d4b2fc7afb976a261c8c914686 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sun, 24 Jul 2022 14:32:39 +0200 Subject: [PATCH 0024/1945] Allow name querying for derive helpers --- crates/hir-def/src/data.rs | 19 +++++++++++++------ crates/hir-def/src/nameres.rs | 4 ++-- crates/hir/src/lib.rs | 15 +++++++++++++++ crates/hir/src/source_analyzer.rs | 13 ++++++++++--- crates/ide-db/src/defs.rs | 2 +- crates/ide-db/src/search.rs | 8 ++++---- crates/ide/src/hover/render.rs | 3 +-- crates/ide/src/syntax_highlighting.rs | 7 +++++++ 8 files changed, 53 insertions(+), 18 deletions(-) diff --git a/crates/hir-def/src/data.rs b/crates/hir-def/src/data.rs index 4309411419..35c8708955 100644 --- a/crates/hir-def/src/data.rs +++ b/crates/hir-def/src/data.rs @@ -12,7 +12,7 @@ use crate::{ db::DefDatabase, intern::Interned, item_tree::{self, AssocItem, FnFlags, ItemTree, ItemTreeId, ModItem, Param, TreeId}, - nameres::{attr_resolution::ResolvedAttr, DefMap}, + nameres::{attr_resolution::ResolvedAttr, proc_macro::ProcMacroKind, DefMap}, type_ref::{TraitRef, TypeBound, TypeRef}, visibility::RawVisibility, AssocItemId, AstIdWithPath, ConstId, ConstLoc, FunctionId, FunctionLoc, HasModule, ImplId, @@ -348,7 +348,8 @@ impl MacroRulesData { #[derive(Debug, Clone, PartialEq, Eq)] pub struct ProcMacroData { pub name: Name, - // FIXME: Record deriver helper here? + /// Derive helpers, if this is a derive + pub helpers: Option>, } impl ProcMacroData { @@ -360,17 +361,23 @@ impl ProcMacroData { let item_tree = loc.id.item_tree(db); let makro = &item_tree[loc.id.value]; - let name = if let Some(def) = item_tree + let (name, helpers) = if let Some(def) = item_tree .attrs(db, loc.container.krate(), ModItem::from(loc.id.value).into()) .parse_proc_macro_decl(&makro.name) { - def.name + ( + def.name, + match def.kind { + ProcMacroKind::CustomDerive { helpers } => Some(helpers), + ProcMacroKind::FnLike | ProcMacroKind::Attr => None, + }, + ) } else { // eeeh... stdx::never!("proc macro declaration is not a proc macro"); - makro.name.clone() + (makro.name.clone(), None) }; - Arc::new(ProcMacroData { name }) + Arc::new(ProcMacroData { name, helpers }) } } diff --git a/crates/hir-def/src/nameres.rs b/crates/hir-def/src/nameres.rs index 25fb302e87..45f631936d 100644 --- a/crates/hir-def/src/nameres.rs +++ b/crates/hir-def/src/nameres.rs @@ -48,11 +48,11 @@ //! the result pub mod attr_resolution; -mod collector; +pub mod proc_macro; pub mod diagnostics; +mod collector; mod mod_resolution; mod path_resolution; -mod proc_macro; #[cfg(test)] mod tests; diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 86b5bd3c2c..d4925455d7 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -2255,12 +2255,27 @@ impl Local { #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] pub struct DeriveHelper { pub(crate) derive: MacroId, + pub(crate) idx: usize, } impl DeriveHelper { pub fn derive(&self) -> Macro { Macro { id: self.derive.into() } } + + pub fn name(&self, db: &dyn HirDatabase) -> Name { + match self.derive { + MacroId::Macro2Id(_) => None, + MacroId::MacroRulesId(_) => None, + MacroId::ProcMacroId(proc_macro) => db + .proc_macro_data(proc_macro) + .helpers + .as_ref() + .and_then(|it| it.get(self.idx)) + .cloned(), + } + .unwrap_or_else(|| Name::missing()) + } } // FIXME: Wrong name? This is could also be a registered attribute diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs index d57a73ade3..1eb51b20c3 100644 --- a/crates/hir/src/source_analyzer.rs +++ b/crates/hir/src/source_analyzer.rs @@ -35,6 +35,7 @@ use hir_ty::{ method_resolution, Adjust, Adjustment, AutoBorrow, InferenceResult, Interner, Substitution, TyExt, TyKind, TyLoweringContext, }; +use itertools::Itertools; use smallvec::SmallVec; use syntax::{ ast::{self, AstNode}, @@ -487,10 +488,16 @@ impl SourceAnalyzer { { // FIXME: Multiple derives can have the same helper let name_ref = name_ref.as_name(); - if let Some(&(_, derive, _)) = - helpers.iter().find(|(name, ..)| *name == name_ref) + for (macro_id, mut helpers) in + helpers.iter().group_by(|(_, macro_id, ..)| macro_id).into_iter() { - return Some(PathResolution::DeriveHelper(DeriveHelper { derive })); + if let Some(idx) = helpers.position(|(name, ..)| *name == name_ref) + { + return Some(PathResolution::DeriveHelper(DeriveHelper { + derive: *macro_id, + idx, + })); + } } } } diff --git a/crates/ide-db/src/defs.rs b/crates/ide-db/src/defs.rs index a9a78e6729..aeaca00ec6 100644 --- a/crates/ide-db/src/defs.rs +++ b/crates/ide-db/src/defs.rs @@ -121,7 +121,7 @@ impl Definition { Definition::Label(it) => it.name(db), Definition::BuiltinAttr(_) => return None, // FIXME Definition::ToolModule(_) => return None, // FIXME - Definition::DeriveHelper(_) => return None, // FIXME + Definition::DeriveHelper(it) => it.name(db), }; Some(name) } diff --git a/crates/ide-db/src/search.rs b/crates/ide-db/src/search.rs index eb4fc36438..bd038cdaa0 100644 --- a/crates/ide-db/src/search.rs +++ b/crates/ide-db/src/search.rs @@ -278,16 +278,16 @@ impl Definition { } } hir::MacroKind::BuiltIn => SearchScope::crate_graph(db), - // FIXME: We don't actually see derives in derive attributes as these do not - // expand to something that references the derive macro in the output. - // We could get around this by doing pseudo expansions for proc_macro_derive like we - // do for the derive attribute hir::MacroKind::Derive | hir::MacroKind::Attr | hir::MacroKind::ProcMacro => { SearchScope::reverse_dependencies(db, module.krate()) } }; } + if let Definition::DeriveHelper(_) = self { + return SearchScope::reverse_dependencies(db, module.krate()); + } + let vis = self.visibility(db); if let Some(Visibility::Public) = vis { return SearchScope::reverse_dependencies(db, module.krate()); diff --git a/crates/ide/src/hover/render.rs b/crates/ide/src/hover/render.rs index fcdb23fa18..6c50a4e6ad 100644 --- a/crates/ide/src/hover/render.rs +++ b/crates/ide/src/hover/render.rs @@ -370,8 +370,7 @@ pub(super) fn definition( // FIXME: We should be able to show more info about these Definition::BuiltinAttr(it) => return render_builtin_attr(db, it), Definition::ToolModule(it) => return Some(Markup::fenced_block(&it.name(db))), - // FIXME: it.name(db) - Definition::DeriveHelper(_it) => ("derive-helper".to_owned(), None), + Definition::DeriveHelper(it) => (format!("derive_helper {}", it.name(db)), None), }; let docs = match config.documentation { diff --git a/crates/ide/src/syntax_highlighting.rs b/crates/ide/src/syntax_highlighting.rs index eef717c16f..d013d6f4b1 100644 --- a/crates/ide/src/syntax_highlighting.rs +++ b/crates/ide/src/syntax_highlighting.rs @@ -432,6 +432,13 @@ fn traverse( // let the editor do its highlighting for these tokens instead continue; } + if highlight.tag == HlTag::UnresolvedReference + && matches!(attr_or_derive_item, Some(AttrOrDerive::Derive(_)) if inside_attribute) + { + // do not emit unresolved references in derive helpers if the token mapping maps to + // something unresolvable. FIXME: There should be a way to prevent that + continue; + } if inside_attribute { highlight |= HlMod::Attribute } From b1e3daf14b14404945a14346d7b478feee7c5508 Mon Sep 17 00:00:00 2001 From: Amos Wenger Date: Tue, 26 Jul 2022 11:30:41 +0200 Subject: [PATCH 0025/1945] Find standalone proc-macro-srv on windows too --- crates/rust-analyzer/src/reload.rs | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs index 9ae361b034..b9aa13ec5b 100644 --- a/crates/rust-analyzer/src/reload.rs +++ b/crates/rust-analyzer/src/reload.rs @@ -303,6 +303,9 @@ impl GlobalState { let files_config = self.config.files(); let project_folders = ProjectFolders::new(&self.workspaces, &files_config.exclude); + let standalone_server_name = + format!("rust-analyzer-proc-macro-srv{}", std::env::consts::EXE_SUFFIX); + if self.proc_macro_clients.is_empty() { if let Some((path, args)) = self.config.proc_macro_srv() { self.proc_macro_clients = self @@ -316,10 +319,8 @@ impl GlobalState { tracing::info!("Found a cargo workspace..."); if let Some(sysroot) = sysroot.as_ref() { tracing::info!("Found a cargo workspace with a sysroot..."); - let server_path = sysroot - .root() - .join("libexec") - .join("rust-analyzer-proc-macro-srv"); + let server_path = + sysroot.root().join("libexec").join(&standalone_server_name); if std::fs::metadata(&server_path).is_ok() { tracing::info!( "And the server exists at {}", From c8ff70e924efe3adba39ac5dc2a93dbd94bde5a4 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 26 Jul 2022 16:30:45 +0200 Subject: [PATCH 0026/1945] fix: Fix server panicking on project loading when proc-macros are disabled --- crates/rust-analyzer/src/cli/load_cargo.rs | 4 +++- crates/rust-analyzer/src/reload.rs | 7 +++++-- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/crates/rust-analyzer/src/cli/load_cargo.rs b/crates/rust-analyzer/src/cli/load_cargo.rs index 0ada4b73e8..5d1c013c32 100644 --- a/crates/rust-analyzer/src/cli/load_cargo.rs +++ b/crates/rust-analyzer/src/cli/load_cargo.rs @@ -66,7 +66,9 @@ pub fn load_workspace( }; let crate_graph = ws.to_crate_graph( - &mut |_, path: &AbsPath| load_proc_macro(proc_macro_client.as_ref(), path, &[]), + &mut |_, path: &AbsPath| { + load_proc_macro(proc_macro_client.as_ref().map_err(|e| &**e), path, &[]) + }, &mut |path: &AbsPath| { let contents = loader.load_sync(path); let path = vfs::VfsPath::from(path.to_path_buf()); diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs index b9aa13ec5b..eaab275bc6 100644 --- a/crates/rust-analyzer/src/reload.rs +++ b/crates/rust-analyzer/src/reload.rs @@ -390,7 +390,10 @@ impl GlobalState { let mut crate_graph = CrateGraph::default(); for (idx, ws) in self.workspaces.iter().enumerate() { - let proc_macro_client = self.proc_macro_clients[idx].as_ref(); + let proc_macro_client = match self.proc_macro_clients.get(idx) { + Some(res) => res.as_ref().map_err(|e| &**e), + None => Err("Proc macros are disabled"), + }; let mut load_proc_macro = move |crate_name: &str, path: &AbsPath| { load_proc_macro( proc_macro_client, @@ -574,7 +577,7 @@ impl SourceRootConfig { /// Load the proc-macros for the given lib path, replacing all expanders whose names are in `dummy_replace` /// with an identity dummy expander. pub(crate) fn load_proc_macro( - server: Result<&ProcMacroServer, &String>, + server: Result<&ProcMacroServer, &str>, path: &AbsPath, dummy_replace: &[Box], ) -> ProcMacroLoadResult { From add33b65dda4b59b46b5192f3a88a83b771af844 Mon Sep 17 00:00:00 2001 From: Zachary S Date: Tue, 26 Jul 2022 10:59:19 -0500 Subject: [PATCH 0027/1945] Remove FIXME comment for unreachable fallback. --- crates/ide-assists/src/handlers/inline_call.rs | 2 -- 1 file changed, 2 deletions(-) diff --git a/crates/ide-assists/src/handlers/inline_call.rs b/crates/ide-assists/src/handlers/inline_call.rs index 8b2f6ac75a..80d3b92559 100644 --- a/crates/ide-assists/src/handlers/inline_call.rs +++ b/crates/ide-assists/src/handlers/inline_call.rs @@ -306,8 +306,6 @@ fn inline( if let Some(body) = ast::BlockExpr::cast(insert_ws_into(fn_body.syntax().clone())) { body } else { - // FIXME(zachs18): I believe this should be unreachable, - // since insert_ws_into shouldn't change the kind of the SyntaxNode. fn_body.clone_for_update() } } else { From 6c379b9f4bb9774758e018944a33db5eb29622fb Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Wed, 27 Jul 2022 12:45:29 +0200 Subject: [PATCH 0028/1945] fix: Fix Semantics::original_ast_node not caching the resulting file --- crates/hir/src/semantics.rs | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs index fc8f23f19a..a75e5cafd0 100644 --- a/crates/hir/src/semantics.rs +++ b/crates/hir/src/semantics.rs @@ -924,7 +924,12 @@ impl<'db> SemanticsImpl<'db> { } fn original_ast_node(&self, node: N) -> Option { - self.wrap_node_infile(node).original_ast_node(self.db.upcast()).map(|it| it.value) + self.wrap_node_infile(node).original_ast_node(self.db.upcast()).map( + |InFile { file_id, value }| { + self.cache(find_root(value.syntax()), file_id); + value + }, + ) } fn diagnostics_display_range(&self, src: InFile) -> FileRange { From 1f8daa180fdfb847f5f82b8ac9870aebb17c9494 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Wed, 27 Jul 2022 13:48:26 +0200 Subject: [PATCH 0029/1945] fix: Honor ref expressions for compute_ref_match completions --- crates/hir-ty/src/chalk_ext.rs | 5 ++ crates/hir/src/lib.rs | 4 ++ crates/ide-completion/src/context/analysis.rs | 69 ++++++++++++------- crates/ide-completion/src/context/tests.rs | 20 ++++++ 4 files changed, 75 insertions(+), 23 deletions(-) diff --git a/crates/hir-ty/src/chalk_ext.rs b/crates/hir-ty/src/chalk_ext.rs index b0885ab003..a9c124b42d 100644 --- a/crates/hir-ty/src/chalk_ext.rs +++ b/crates/hir-ty/src/chalk_ext.rs @@ -34,6 +34,7 @@ pub trait TyExt { fn callable_sig(&self, db: &dyn HirDatabase) -> Option; fn strip_references(&self) -> &Ty; + fn strip_reference(&self) -> &Ty; /// If this is a `dyn Trait`, returns that trait. fn dyn_trait(&self) -> Option; @@ -182,6 +183,10 @@ impl TyExt for Ty { t } + fn strip_reference(&self) -> &Ty { + self.as_reference().map_or(self, |(ty, _, _)| ty) + } + fn impl_trait_bounds(&self, db: &dyn HirDatabase) -> Option> { match self.kind(Interner) { TyKind::OpaqueType(opaque_ty_id, subst) => { diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index d4925455d7..8f984210e1 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -2769,6 +2769,10 @@ impl Type { self.derived(self.ty.strip_references().clone()) } + pub fn strip_reference(&self) -> Type { + self.derived(self.ty.strip_reference().clone()) + } + pub fn is_unknown(&self) -> bool { self.ty.is_unknown() } diff --git a/crates/ide-completion/src/context/analysis.rs b/crates/ide-completion/src/context/analysis.rs index c71ffa0ed8..09a1a99eb6 100644 --- a/crates/ide-completion/src/context/analysis.rs +++ b/crates/ide-completion/src/context/analysis.rs @@ -162,11 +162,52 @@ impl<'a> CompletionContext<'a> { } /// Calculate the expected type and name of the cursor position. - fn expected_type_and_name(&self) -> (Option, Option) { + fn expected_type_and_name( + &self, + name_like: &ast::NameLike, + ) -> (Option, Option) { let mut node = match self.token.parent() { Some(it) => it, None => return (None, None), }; + + let strip_refs = |mut ty: Type| match name_like { + ast::NameLike::NameRef(n) => { + let p = match n.syntax().parent() { + Some(it) => it, + None => return ty, + }; + let top_syn = match_ast! { + match p { + ast::FieldExpr(e) => e + .syntax() + .ancestors() + .map_while(ast::FieldExpr::cast) + .last() + .map(|it| it.syntax().clone()), + ast::PathSegment(e) => e + .syntax() + .ancestors() + .skip(1) + .take_while(|it| ast::Path::can_cast(it.kind()) || ast::PathExpr::can_cast(it.kind())) + .find_map(ast::PathExpr::cast) + .map(|it| it.syntax().clone()), + _ => None + } + }; + let top_syn = match top_syn { + Some(it) => it, + None => return ty, + }; + for _ in top_syn.ancestors().skip(1).map_while(ast::RefExpr::cast) { + cov_mark::hit!(expected_type_fn_param_ref); + ty = ty.strip_reference(); + } + ty + } + _ => ty, + }; + loop { break match_ast! { match node { @@ -199,13 +240,9 @@ impl<'a> CompletionContext<'a> { self.token.clone(), ).map(|ap| { let name = ap.ident().map(NameOrNameRef::Name); - let ty = if has_ref(&self.token) { - cov_mark::hit!(expected_type_fn_param_ref); - ap.ty.remove_ref() - } else { - Some(ap.ty) - }; - (ty, name) + + let ty = strip_refs(ap.ty); + (Some(ty), name) }) .unwrap_or((None, None)) }, @@ -330,8 +367,6 @@ impl<'a> CompletionContext<'a> { return None; } - (self.expected_type, self.expected_name) = self.expected_type_and_name(); - // Overwrite the path kind for derives if let Some((original_file, file_with_fake_ident, offset, origin_attr)) = derive_ctx { if let Some(ast::NameLike::NameRef(name_ref)) = @@ -389,6 +424,7 @@ impl<'a> CompletionContext<'a> { return Some(analysis); } }; + (self.expected_type, self.expected_name) = self.expected_type_and_name(&name_like); let analysis = match name_like { ast::NameLike::Lifetime(lifetime) => CompletionAnalysis::Lifetime( Self::classify_lifetime(&self.sema, original_file, lifetime)?, @@ -1141,19 +1177,6 @@ fn path_or_use_tree_qualifier(path: &ast::Path) -> Option<(ast::Path, bool)> { Some((use_tree.path()?, true)) } -fn has_ref(token: &SyntaxToken) -> bool { - let mut token = token.clone(); - for skip in [SyntaxKind::IDENT, SyntaxKind::WHITESPACE, T![mut]] { - if token.kind() == skip { - token = match token.prev_token() { - Some(it) => it, - None => return false, - } - } - } - token.kind() == T![&] -} - pub(crate) fn is_in_token_of_for_loop(element: SyntaxElement) -> bool { // oh my ... (|| { diff --git a/crates/ide-completion/src/context/tests.rs b/crates/ide-completion/src/context/tests.rs index c5557bdafb..50845b3881 100644 --- a/crates/ide-completion/src/context/tests.rs +++ b/crates/ide-completion/src/context/tests.rs @@ -391,3 +391,23 @@ fn foo($0: Foo) {} expect![[r#"ty: ?, name: ?"#]], ); } + +#[test] +fn expected_type_ref_prefix_on_field() { + check_expected_type_and_name( + r#" +fn foo(_: &mut i32) {} +struct S { + field: i32, +} + +fn main() { + let s = S { + field: 100, + }; + foo(&mut s.f$0); +} +"#, + expect!["ty: i32, name: ?"], + ); +} From 349dfc7e95c7b384a3e520db3cc26645918857d2 Mon Sep 17 00:00:00 2001 From: hi-rustin Date: Wed, 27 Jul 2022 20:18:00 +0800 Subject: [PATCH 0030/1945] Find original ast node before compute ref match in fn render Signed-off-by: hi-rustin --- crates/ide-completion/src/render/function.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/crates/ide-completion/src/render/function.rs b/crates/ide-completion/src/render/function.rs index 241de0a183..4b5535718c 100644 --- a/crates/ide-completion/src/render/function.rs +++ b/crates/ide-completion/src/render/function.rs @@ -85,7 +85,9 @@ fn render( item.ref_match(ref_match, path_ctx.path.syntax().text_range().start()); } FuncKind::Method(DotAccess { receiver: Some(receiver), .. }, _) => { - item.ref_match(ref_match, receiver.syntax().text_range().start()); + if let Some(original_expr) = completion.sema.original_ast_node(receiver.clone()) { + item.ref_match(ref_match, original_expr.syntax().text_range().start()); + } } _ => (), } From bf893d59b54eb3b0dd3bcf34a66f9f0703753ffb Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Wed, 27 Jul 2022 17:14:50 +0200 Subject: [PATCH 0031/1945] internal: Assume condition/iterable is missing if there is only a BlockExpr --- crates/syntax/src/ast/generated/nodes.rs | 4 --- crates/syntax/src/ast/node_ext.rs | 38 ++++++++++++++++++++++++ crates/syntax/src/tests/sourcegen_ast.rs | 2 ++ 3 files changed, 40 insertions(+), 4 deletions(-) diff --git a/crates/syntax/src/ast/generated/nodes.rs b/crates/syntax/src/ast/generated/nodes.rs index cf90ba64cf..63309a1552 100644 --- a/crates/syntax/src/ast/generated/nodes.rs +++ b/crates/syntax/src/ast/generated/nodes.rs @@ -880,7 +880,6 @@ impl ForExpr { pub fn for_token(&self) -> Option { support::token(&self.syntax, T![for]) } pub fn pat(&self) -> Option { support::child(&self.syntax) } pub fn in_token(&self) -> Option { support::token(&self.syntax, T![in]) } - pub fn iterable(&self) -> Option { support::child(&self.syntax) } } #[derive(Debug, Clone, PartialEq, Eq, Hash)] @@ -890,7 +889,6 @@ pub struct IfExpr { impl ast::HasAttrs for IfExpr {} impl IfExpr { pub fn if_token(&self) -> Option { support::token(&self.syntax, T![if]) } - pub fn condition(&self) -> Option { support::child(&self.syntax) } pub fn else_token(&self) -> Option { support::token(&self.syntax, T![else]) } } @@ -1051,7 +1049,6 @@ pub struct WhileExpr { impl ast::HasAttrs for WhileExpr {} impl WhileExpr { pub fn while_token(&self) -> Option { support::token(&self.syntax, T![while]) } - pub fn condition(&self) -> Option { support::child(&self.syntax) } } #[derive(Debug, Clone, PartialEq, Eq, Hash)] @@ -1170,7 +1167,6 @@ pub struct MatchGuard { } impl MatchGuard { pub fn if_token(&self) -> Option { support::token(&self.syntax, T![if]) } - pub fn condition(&self) -> Option { support::child(&self.syntax) } } #[derive(Debug, Clone, PartialEq, Eq, Hash)] diff --git a/crates/syntax/src/ast/node_ext.rs b/crates/syntax/src/ast/node_ext.rs index b143df1f83..bb92c51e9a 100644 --- a/crates/syntax/src/ast/node_ext.rs +++ b/crates/syntax/src/ast/node_ext.rs @@ -806,6 +806,19 @@ impl ast::GenericParamList { } } +impl ast::ForExpr { + pub fn iterable(&self) -> Option { + // If the iterable is a BlockExpr, check if the body is missing. + // If it is assume the iterable is the expression that is missing instead. + let mut exprs = support::children(self.syntax()); + let first = exprs.next(); + match first { + Some(ast::Expr::BlockExpr(_)) => exprs.next().and(first), + first => first, + } + } +} + impl ast::HasLoopBody for ast::ForExpr { fn loop_body(&self) -> Option { let mut exprs = support::children(self.syntax()); @@ -815,6 +828,19 @@ impl ast::HasLoopBody for ast::ForExpr { } } +impl ast::WhileExpr { + pub fn condition(&self) -> Option { + // If the condition is a BlockExpr, check if the body is missing. + // If it is assume the condition is the expression that is missing instead. + let mut exprs = support::children(self.syntax()); + let first = exprs.next(); + match first { + Some(ast::Expr::BlockExpr(_)) => exprs.next().and(first), + first => first, + } + } +} + impl ast::HasLoopBody for ast::WhileExpr { fn loop_body(&self) -> Option { let mut exprs = support::children(self.syntax()); @@ -835,3 +861,15 @@ impl From for ast::Item { } } } + +impl ast::IfExpr { + pub fn condition(&self) -> Option { + support::child(&self.syntax) + } +} + +impl ast::MatchGuard { + pub fn condition(&self) -> Option { + support::child(&self.syntax) + } +} diff --git a/crates/syntax/src/tests/sourcegen_ast.rs b/crates/syntax/src/tests/sourcegen_ast.rs index 4cfb8075cb..6d27662251 100644 --- a/crates/syntax/src/tests/sourcegen_ast.rs +++ b/crates/syntax/src/tests/sourcegen_ast.rs @@ -682,6 +682,8 @@ fn lower_rule(acc: &mut Vec, grammar: &Grammar, label: Option<&String>, r | "value" | "trait" | "self_ty" + | "iterable" + | "condition" ); if manually_implemented { return; From a96948195227f98dd18eecea8cd85c58406b6388 Mon Sep 17 00:00:00 2001 From: Paul Lange Date: Tue, 26 Jul 2022 16:10:26 +0200 Subject: [PATCH 0032/1945] Add syntax fixup for while loops --- crates/hir-expand/src/fixup.rs | 76 +++++++++++++++++++++++++++++++++- 1 file changed, 75 insertions(+), 1 deletion(-) diff --git a/crates/hir-expand/src/fixup.rs b/crates/hir-expand/src/fixup.rs index 9999790fae..6d0766020b 100644 --- a/crates/hir-expand/src/fixup.rs +++ b/crates/hir-expand/src/fixup.rs @@ -5,7 +5,7 @@ use std::mem; use mbe::{SyntheticToken, SyntheticTokenId, TokenMap}; use rustc_hash::FxHashMap; use syntax::{ - ast::{self, AstNode}, + ast::{self, AstNode, HasLoopBody}, match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange, }; use tt::Subtree; @@ -142,6 +142,39 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { ]); } }, + ast::WhileExpr(it) => { + if it.condition().is_none() { + // insert placeholder token after the while token + let while_token = match it.while_token() { + Some(t) => t, + None => continue, + }; + append.insert(while_token.into(), vec![ + SyntheticToken { + kind: SyntaxKind::IDENT, + text: "__ra_fixup".into(), + range: end_range, + id: EMPTY_ID, + }, + ]); + } + if it.loop_body().is_none() { + append.insert(node.clone().into(), vec![ + SyntheticToken { + kind: SyntaxKind::L_CURLY, + text: "{".into(), + range: end_range, + id: EMPTY_ID, + }, + SyntheticToken { + kind: SyntaxKind::R_CURLY, + text: "}".into(), + range: end_range, + id: EMPTY_ID, + }, + ]); + } + }, // FIXME: foo:: // FIXME: for, loop, match etc. _ => (), @@ -376,6 +409,47 @@ fn foo() { // the {} gets parsed as the condition, I think? expect![[r#" fn foo () {if {} {}} +"#]], + ) + } + + #[test] + fn fixup_while_1() { + check( + r#" +fn foo() { + while +} +"#, + expect![[r#" +fn foo () {while __ra_fixup {}} +"#]], + ) + } + + #[test] + fn fixup_while_2() { + check( + r#" +fn foo() { + while foo +} +"#, + expect![[r#" +fn foo () {while foo {}} +"#]], + ) + } + #[test] + fn fixup_while_3() { + check( + r#" +fn foo() { + while {} +} +"#, + expect![[r#" +fn foo () {while __ra_fixup {}} "#]], ) } From c16e4f260fb6b915767d18a59ab395cb46d82449 Mon Sep 17 00:00:00 2001 From: Paul Lange Date: Wed, 27 Jul 2022 18:18:51 +0200 Subject: [PATCH 0033/1945] Add syntax fixup for `loop` --- crates/hir-expand/src/fixup.rs | 34 +++++++++++++++++++++++++++++++++- 1 file changed, 33 insertions(+), 1 deletion(-) diff --git a/crates/hir-expand/src/fixup.rs b/crates/hir-expand/src/fixup.rs index 6d0766020b..e46f43a878 100644 --- a/crates/hir-expand/src/fixup.rs +++ b/crates/hir-expand/src/fixup.rs @@ -175,8 +175,26 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { ]); } }, + ast::LoopExpr(it) => { + if it.loop_body().is_none() { + append.insert(node.clone().into(), vec![ + SyntheticToken { + kind: SyntaxKind::L_CURLY, + text: "{".into(), + range: end_range, + id: EMPTY_ID, + }, + SyntheticToken { + kind: SyntaxKind::R_CURLY, + text: "}".into(), + range: end_range, + id: EMPTY_ID, + }, + ]); + } + }, // FIXME: foo:: - // FIXME: for, loop, match etc. + // FIXME: for, match etc. _ => (), } } @@ -450,6 +468,20 @@ fn foo() { "#, expect![[r#" fn foo () {while __ra_fixup {}} +"#]], + ) + } + + #[test] + fn fixup_loop() { + check( + r#" +fn foo() { + loop +} +"#, + expect![[r#" +fn foo () {loop {}} "#]], ) } From f83738e1d993d84c1607fcabdf9f41ab00f83d55 Mon Sep 17 00:00:00 2001 From: Brennan Vincent Date: Wed, 27 Jul 2022 14:34:46 -0400 Subject: [PATCH 0034/1945] Use large stack on expander thread --- crates/proc-macro-srv/src/lib.rs | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/crates/proc-macro-srv/src/lib.rs b/crates/proc-macro-srv/src/lib.rs index 4b1858b8ed..4c205b9cad 100644 --- a/crates/proc-macro-srv/src/lib.rs +++ b/crates/proc-macro-srv/src/lib.rs @@ -39,6 +39,8 @@ pub(crate) struct ProcMacroSrv { expanders: HashMap<(PathBuf, SystemTime), dylib::Expander>, } +const EXPANDER_STACK_SIZE: usize = 8 * 1024 * 1024; + impl ProcMacroSrv { pub fn expand(&mut self, task: ExpandMacro) -> Result { let expander = self.expander(task.lib.as_ref()).map_err(|err| { @@ -66,13 +68,18 @@ impl ProcMacroSrv { // FIXME: replace this with std's scoped threads once they stabilize // (then remove dependency on crossbeam) let result = crossbeam::scope(|s| { - let res = s + let res = match s + .builder() + .stack_size(EXPANDER_STACK_SIZE) + .name(task.macro_name.clone()) .spawn(|_| { expander .expand(&task.macro_name, ¯o_body, attributes.as_ref()) .map(|it| FlatTree::new(&it)) - }) - .join(); + }) { + Ok(handle) => handle.join(), + Err(e) => std::panic::resume_unwind(Box::new(e)), + }; match res { Ok(res) => res, From e782e59d3de3d4a58cbc8005fd9521502b8d9a61 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Thu, 28 Jul 2022 10:05:21 +0200 Subject: [PATCH 0035/1945] fix: Calculate completions after type anchors --- crates/hir/src/semantics.rs | 18 ++++++ .../src/completions/attribute.rs | 2 +- .../src/completions/attribute/derive.rs | 2 +- crates/ide-completion/src/completions/expr.rs | 23 +++++++- .../src/completions/item_list.rs | 2 +- .../ide-completion/src/completions/pattern.rs | 2 +- crates/ide-completion/src/completions/type.rs | 18 +++++- crates/ide-completion/src/completions/use_.rs | 2 +- crates/ide-completion/src/completions/vis.rs | 2 +- crates/ide-completion/src/context.rs | 5 +- crates/ide-completion/src/context/analysis.rs | 56 ++++++++++--------- crates/ide-completion/src/tests/special.rs | 55 +++++++++++++++++- 12 files changed, 151 insertions(+), 36 deletions(-) diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs index a75e5cafd0..c84318b2fb 100644 --- a/crates/hir/src/semantics.rs +++ b/crates/hir/src/semantics.rs @@ -324,6 +324,10 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { self.imp.resolve_type(ty) } + pub fn resolve_trait(&self, trait_: &ast::Path) -> Option { + self.imp.resolve_trait(trait_) + } + // FIXME: Figure out a nice interface to inspect adjustments pub fn is_implicit_reborrow(&self, expr: &ast::Expr) -> Option { self.imp.is_implicit_reborrow(expr) @@ -1014,6 +1018,20 @@ impl<'db> SemanticsImpl<'db> { Some(Type::new_with_resolver(self.db, &analyze.resolver, ty)) } + fn resolve_trait(&self, path: &ast::Path) -> Option { + let analyze = self.analyze(path.syntax())?; + let hygiene = hir_expand::hygiene::Hygiene::new(self.db.upcast(), analyze.file_id); + let ctx = body::LowerCtx::with_hygiene(self.db.upcast(), &hygiene); + let hir_path = Path::from_src(path.clone(), &ctx)?; + match analyze + .resolver + .resolve_path_in_type_ns_fully(self.db.upcast(), hir_path.mod_path())? + { + TypeNs::TraitId(id) => Some(Trait { id }), + _ => None, + } + } + fn is_implicit_reborrow(&self, expr: &ast::Expr) -> Option { self.analyze(expr.syntax())?.is_implicit_reborrow(self.db, expr) } diff --git a/crates/ide-completion/src/completions/attribute.rs b/crates/ide-completion/src/completions/attribute.rs index 1d8a8c5f20..d9fe94cb44 100644 --- a/crates/ide-completion/src/completions/attribute.rs +++ b/crates/ide-completion/src/completions/attribute.rs @@ -115,7 +115,7 @@ pub(crate) fn complete_attribute_path( }); acc.add_nameref_keywords_with_colon(ctx); } - Qualified::Infer | Qualified::With { .. } => {} + Qualified::TypeAnchor { .. } | Qualified::With { .. } => {} } let attributes = annotated_item_kind.and_then(|kind| { diff --git a/crates/ide-completion/src/completions/attribute/derive.rs b/crates/ide-completion/src/completions/attribute/derive.rs index 14538fef60..793c22630b 100644 --- a/crates/ide-completion/src/completions/attribute/derive.rs +++ b/crates/ide-completion/src/completions/attribute/derive.rs @@ -97,7 +97,7 @@ pub(crate) fn complete_derive_path( }); acc.add_nameref_keywords_with_colon(ctx); } - Qualified::Infer | Qualified::With { .. } => {} + Qualified::TypeAnchor { .. } | Qualified::With { .. } => {} } } diff --git a/crates/ide-completion/src/completions/expr.rs b/crates/ide-completion/src/completions/expr.rs index bdf6e64f09..d4f2766602 100644 --- a/crates/ide-completion/src/completions/expr.rs +++ b/crates/ide-completion/src/completions/expr.rs @@ -46,11 +46,32 @@ pub(crate) fn complete_expr_path( }; match qualified { - Qualified::Infer => ctx + Qualified::TypeAnchor { ty: None, trait_: None } => ctx .traits_in_scope() .iter() .flat_map(|&it| hir::Trait::from(it).items(ctx.sema.db)) .for_each(|item| add_assoc_item(acc, item)), + Qualified::TypeAnchor { trait_: Some(trait_), .. } => { + trait_.items(ctx.sema.db).into_iter().for_each(|item| add_assoc_item(acc, item)) + } + Qualified::TypeAnchor { ty: Some(ty), trait_: None } => { + if let Some(hir::Adt::Enum(e)) = ty.as_adt() { + cov_mark::hit!(completes_variant_through_alias); + acc.add_enum_variants(ctx, path_ctx, e); + } + + ctx.iterate_path_candidates(&ty, |item| { + add_assoc_item(acc, item); + }); + + // Iterate assoc types separately + ty.iterate_assoc_items(ctx.db, ctx.krate, |item| { + if let hir::AssocItem::TypeAlias(ty) = item { + acc.add_type_alias(ctx, ty) + } + None::<()> + }); + } Qualified::With { resolution: None, .. } => {} Qualified::With { resolution: Some(resolution), .. } => { // Add associated types on type parameters and `Self`. diff --git a/crates/ide-completion/src/completions/item_list.rs b/crates/ide-completion/src/completions/item_list.rs index 4e4c9fba6c..60d05ae46b 100644 --- a/crates/ide-completion/src/completions/item_list.rs +++ b/crates/ide-completion/src/completions/item_list.rs @@ -66,7 +66,7 @@ pub(crate) fn complete_item_list( }); acc.add_nameref_keywords_with_colon(ctx); } - Qualified::Infer | Qualified::No | Qualified::With { .. } => {} + Qualified::TypeAnchor { .. } | Qualified::No | Qualified::With { .. } => {} } } diff --git a/crates/ide-completion/src/completions/pattern.rs b/crates/ide-completion/src/completions/pattern.rs index 17dfe432b3..af8a085331 100644 --- a/crates/ide-completion/src/completions/pattern.rs +++ b/crates/ide-completion/src/completions/pattern.rs @@ -180,6 +180,6 @@ pub(crate) fn complete_pattern_path( acc.add_nameref_keywords_with_colon(ctx); } - Qualified::Infer | Qualified::With { .. } => {} + Qualified::TypeAnchor { .. } | Qualified::With { .. } => {} } } diff --git a/crates/ide-completion/src/completions/type.rs b/crates/ide-completion/src/completions/type.rs index 87a998dfcc..8f9db2f94c 100644 --- a/crates/ide-completion/src/completions/type.rs +++ b/crates/ide-completion/src/completions/type.rs @@ -49,11 +49,27 @@ pub(crate) fn complete_type_path( }; match qualified { - Qualified::Infer => ctx + Qualified::TypeAnchor { ty: None, trait_: None } => ctx .traits_in_scope() .iter() .flat_map(|&it| hir::Trait::from(it).items(ctx.sema.db)) .for_each(|item| add_assoc_item(acc, item)), + Qualified::TypeAnchor { trait_: Some(trait_), .. } => { + trait_.items(ctx.sema.db).into_iter().for_each(|item| add_assoc_item(acc, item)) + } + Qualified::TypeAnchor { ty: Some(ty), trait_: None } => { + ctx.iterate_path_candidates(&ty, |item| { + add_assoc_item(acc, item); + }); + + // Iterate assoc types separately + ty.iterate_assoc_items(ctx.db, ctx.krate, |item| { + if let hir::AssocItem::TypeAlias(ty) = item { + acc.add_type_alias(ctx, ty) + } + None::<()> + }); + } Qualified::With { resolution: None, .. } => {} Qualified::With { resolution: Some(resolution), .. } => { // Add associated types on type parameters and `Self`. diff --git a/crates/ide-completion/src/completions/use_.rs b/crates/ide-completion/src/completions/use_.rs index bb2ecc9fdd..2555c34aa7 100644 --- a/crates/ide-completion/src/completions/use_.rs +++ b/crates/ide-completion/src/completions/use_.rs @@ -115,6 +115,6 @@ pub(crate) fn complete_use_path( }); acc.add_nameref_keywords_with_colon(ctx); } - Qualified::Infer | Qualified::With { resolution: None, .. } => {} + Qualified::TypeAnchor { .. } | Qualified::With { resolution: None, .. } => {} } } diff --git a/crates/ide-completion/src/completions/vis.rs b/crates/ide-completion/src/completions/vis.rs index ca8303906a..5e6cf4bf9a 100644 --- a/crates/ide-completion/src/completions/vis.rs +++ b/crates/ide-completion/src/completions/vis.rs @@ -29,7 +29,7 @@ pub(crate) fn complete_vis_path( acc.add_super_keyword(ctx, *super_chain_len); } - Qualified::Absolute | Qualified::Infer | Qualified::With { .. } => {} + Qualified::Absolute | Qualified::TypeAnchor { .. } | Qualified::With { .. } => {} Qualified::No => { if !has_in_token { cov_mark::hit!(kw_completion_in); diff --git a/crates/ide-completion/src/context.rs b/crates/ide-completion/src/context.rs index 93b6ad5d14..e35f79d2b6 100644 --- a/crates/ide-completion/src/context.rs +++ b/crates/ide-completion/src/context.rs @@ -193,7 +193,10 @@ pub(super) enum Qualified { super_chain_len: Option, }, /// <_>:: - Infer, + TypeAnchor { + ty: Option, + trait_: Option, + }, /// Whether the path is an absolute path Absolute, } diff --git a/crates/ide-completion/src/context/analysis.rs b/crates/ide-completion/src/context/analysis.rs index 09a1a99eb6..3e7e637dd9 100644 --- a/crates/ide-completion/src/context/analysis.rs +++ b/crates/ide-completion/src/context/analysis.rs @@ -920,49 +920,53 @@ impl<'a> CompletionContext<'a> { path_ctx.has_type_args = segment.generic_arg_list().is_some(); // calculate the qualifier context - if let Some((path, use_tree_parent)) = path_or_use_tree_qualifier(&path) { + if let Some((qualifier, use_tree_parent)) = path_or_use_tree_qualifier(&path) { path_ctx.use_tree_parent = use_tree_parent; if !use_tree_parent && segment.coloncolon_token().is_some() { path_ctx.qualified = Qualified::Absolute; } else { - let path = path + let qualifier = qualifier .segment() .and_then(|it| find_node_in_file(original_file, &it)) .map(|it| it.parent_path()); - if let Some(path) = path { - // `<_>::$0` - let is_infer_qualifier = path.qualifier().is_none() - && matches!( - path.segment().and_then(|it| it.kind()), - Some(ast::PathSegmentKind::Type { - type_ref: Some(ast::Type::InferType(_)), - trait_ref: None, - }) - ); + if let Some(qualifier) = qualifier { + let type_anchor = match qualifier.segment().and_then(|it| it.kind()) { + Some(ast::PathSegmentKind::Type { + type_ref: Some(type_ref), + trait_ref, + }) if qualifier.qualifier().is_none() => Some((type_ref, trait_ref)), + _ => None, + }; - path_ctx.qualified = if is_infer_qualifier { - Qualified::Infer + path_ctx.qualified = if let Some((ty, trait_ref)) = type_anchor { + let ty = match ty { + ast::Type::InferType(_) => None, + ty => sema.resolve_type(&ty), + }; + let trait_ = trait_ref.and_then(|it| sema.resolve_trait(&it.path()?)); + Qualified::TypeAnchor { ty, trait_ } } else { - let res = sema.resolve_path(&path); + let res = sema.resolve_path(&qualifier); // For understanding how and why super_chain_len is calculated the way it // is check the documentation at it's definition let mut segment_count = 0; - let super_count = iter::successors(Some(path.clone()), |p| p.qualifier()) - .take_while(|p| { - p.segment() - .and_then(|s| { - segment_count += 1; - s.super_token() - }) - .is_some() - }) - .count(); + let super_count = + iter::successors(Some(qualifier.clone()), |p| p.qualifier()) + .take_while(|p| { + p.segment() + .and_then(|s| { + segment_count += 1; + s.super_token() + }) + .is_some() + }) + .count(); let super_chain_len = if segment_count > super_count { None } else { Some(super_count) }; - Qualified::With { path, resolution: res, super_chain_len } + Qualified::With { path: qualifier, resolution: res, super_chain_len } } }; } diff --git a/crates/ide-completion/src/tests/special.rs b/crates/ide-completion/src/tests/special.rs index ca779c2fc7..033dc99c26 100644 --- a/crates/ide-completion/src/tests/special.rs +++ b/crates/ide-completion/src/tests/special.rs @@ -674,7 +674,60 @@ fn bar() -> Bar { expect![[r#" fn foo() (as Foo) fn() -> Self "#]], - ) + ); +} + +#[test] +fn type_anchor_type() { + check( + r#" +trait Foo { + fn foo() -> Self; +} +struct Bar; +impl Bar { + fn bar() {} +} +impl Foo for Bar { + fn foo() -> { + Bar + } +} +fn bar() -> Bar { + ::$0 +} +"#, + expect![[r#" + fn bar() fn() + fn foo() (as Foo) fn() -> Self + "#]], + ); +} + +#[test] +fn type_anchor_type_trait() { + check( + r#" +trait Foo { + fn foo() -> Self; +} +struct Bar; +impl Bar { + fn bar() {} +} +impl Foo for Bar { + fn foo() -> { + Bar + } +} +fn bar() -> Bar { + ::$0 +} +"#, + expect![[r#" + fn foo() (as Foo) fn() -> Self + "#]], + ); } #[test] From 7c59d7c75c1aed160bf1aa67047ba6063ae21ba0 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Thu, 28 Jul 2022 15:47:46 +0200 Subject: [PATCH 0036/1945] fix: Fix pattern completions adding unnecessary braces --- crates/ide-completion/src/completions.rs | 13 +++++++++-- .../ide-completion/src/completions/pattern.rs | 2 +- crates/ide-completion/src/render/pattern.rs | 23 +++++++++++++++---- crates/ide-completion/src/tests/pattern.rs | 4 ++-- 4 files changed, 33 insertions(+), 9 deletions(-) diff --git a/crates/ide-completion/src/completions.rs b/crates/ide-completion/src/completions.rs index 149afcac9d..72579e6026 100644 --- a/crates/ide-completion/src/completions.rs +++ b/crates/ide-completion/src/completions.rs @@ -400,7 +400,7 @@ impl Completions { ) { if let PathCompletionCtx { kind: PathKind::Pat { pat_ctx }, .. } = path_ctx { cov_mark::hit!(enum_variant_pattern_path); - self.add_variant_pat(ctx, pat_ctx, variant, local_name); + self.add_variant_pat(ctx, pat_ctx, Some(path_ctx), variant, local_name); return; } @@ -484,12 +484,14 @@ impl Completions { &mut self, ctx: &CompletionContext<'_>, pattern_ctx: &PatternContext, + path_ctx: Option<&PathCompletionCtx>, variant: hir::Variant, local_name: Option, ) { self.add_opt(render_variant_pat( RenderContext::new(ctx), pattern_ctx, + path_ctx, variant, local_name.clone(), None, @@ -504,7 +506,14 @@ impl Completions { path: hir::ModPath, ) { let path = Some(&path); - self.add_opt(render_variant_pat(RenderContext::new(ctx), pattern_ctx, variant, None, path)); + self.add_opt(render_variant_pat( + RenderContext::new(ctx), + pattern_ctx, + None, + variant, + None, + path, + )); } pub(crate) fn add_struct_pat( diff --git a/crates/ide-completion/src/completions/pattern.rs b/crates/ide-completion/src/completions/pattern.rs index af8a085331..71d2d9d434 100644 --- a/crates/ide-completion/src/completions/pattern.rs +++ b/crates/ide-completion/src/completions/pattern.rs @@ -74,7 +74,7 @@ pub(crate) fn complete_pattern( hir::ModuleDef::Variant(variant) if refutable || single_variant_enum(variant.parent_enum(ctx.db)) => { - acc.add_variant_pat(ctx, pattern_ctx, variant, Some(name.clone())); + acc.add_variant_pat(ctx, pattern_ctx, None, variant, Some(name.clone())); true } hir::ModuleDef::Adt(hir::Adt::Enum(e)) => refutable || single_variant_enum(e), diff --git a/crates/ide-completion/src/render/pattern.rs b/crates/ide-completion/src/render/pattern.rs index 03db08a911..34a384f2f7 100644 --- a/crates/ide-completion/src/render/pattern.rs +++ b/crates/ide-completion/src/render/pattern.rs @@ -6,7 +6,7 @@ use itertools::Itertools; use syntax::SmolStr; use crate::{ - context::{ParamContext, ParamKind, PatternContext}, + context::{ParamContext, ParamKind, PathCompletionCtx, PatternContext}, render::{ variant::{format_literal_label, visible_fields}, RenderContext, @@ -42,6 +42,7 @@ pub(crate) fn render_struct_pat( pub(crate) fn render_variant_pat( ctx: RenderContext<'_>, pattern_ctx: &PatternContext, + path_ctx: Option<&PathCompletionCtx>, variant: hir::Variant, local_name: Option, path: Option<&hir::ModPath>, @@ -58,9 +59,23 @@ pub(crate) fn render_variant_pat( (name.to_smol_str(), name.escaped().to_smol_str()) } }; - let kind = variant.kind(ctx.db()); - let label = format_literal_label(name.as_str(), kind); - let pat = render_pat(&ctx, pattern_ctx, &escaped_name, kind, &visible_fields, fields_omitted)?; + + let (label, pat) = match path_ctx { + Some(PathCompletionCtx { has_call_parens: true, .. }) => (name, escaped_name.to_string()), + _ => { + let kind = variant.kind(ctx.db()); + let label = format_literal_label(name.as_str(), kind); + let pat = render_pat( + &ctx, + pattern_ctx, + &escaped_name, + kind, + &visible_fields, + fields_omitted, + )?; + (label, pat) + } + }; Some(build_completion(ctx, label, pat, variant)) } diff --git a/crates/ide-completion/src/tests/pattern.rs b/crates/ide-completion/src/tests/pattern.rs index 877b5f2164..30ddbe2dc6 100644 --- a/crates/ide-completion/src/tests/pattern.rs +++ b/crates/ide-completion/src/tests/pattern.rs @@ -443,7 +443,7 @@ fn foo() { } "#, expect![[r#" - bn TupleVariant(…) TupleVariant($1)$0 + bn TupleVariant TupleVariant "#]], ); check_empty( @@ -458,7 +458,7 @@ fn foo() { } "#, expect![[r#" - bn RecordVariant {…} RecordVariant { field$1 }$0 + bn RecordVariant RecordVariant "#]], ); } From 74abd44a265d6daded813ccb48ed599d835ea532 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Thu, 28 Jul 2022 17:09:31 +0200 Subject: [PATCH 0037/1945] fix: Do completions in path qualifier position --- crates/ide-completion/src/completions/expr.rs | 16 +- crates/ide-completion/src/context/analysis.rs | 189 ++++++++++-------- 2 files changed, 119 insertions(+), 86 deletions(-) diff --git a/crates/ide-completion/src/completions/expr.rs b/crates/ide-completion/src/completions/expr.rs index d4f2766602..bafaeb502a 100644 --- a/crates/ide-completion/src/completions/expr.rs +++ b/crates/ide-completion/src/completions/expr.rs @@ -11,7 +11,14 @@ pub(crate) fn complete_expr_path( acc: &mut Completions, ctx: &CompletionContext<'_>, path_ctx @ PathCompletionCtx { qualified, .. }: &PathCompletionCtx, - &ExprCtx { + expr_ctx: &ExprCtx, +) { + let _p = profile::span("complete_expr_path"); + if !ctx.qualifier_ctx.none() { + return; + } + + let &ExprCtx { in_block_expr, in_loop_body, after_if_expr, @@ -23,12 +30,7 @@ pub(crate) fn complete_expr_path( ref impl_, in_match_guard, .. - }: &ExprCtx, -) { - let _p = profile::span("complete_expr_path"); - if !ctx.qualifier_ctx.none() { - return; - } + } = expr_ctx; let wants_mut_token = ref_expr_parent.as_ref().map(|it| it.mut_token().is_none()).unwrap_or(false); diff --git a/crates/ide-completion/src/context/analysis.rs b/crates/ide-completion/src/context/analysis.rs index 3e7e637dd9..76fc74c01d 100644 --- a/crates/ide-completion/src/context/analysis.rs +++ b/crates/ide-completion/src/context/analysis.rs @@ -592,7 +592,7 @@ impl<'a> CompletionContext<'a> { has_call_parens: false, has_macro_bang: false, qualified: Qualified::No, - parent: path.parent_path(), + parent: None, path: path.clone(), kind: PathKind::Item { kind: ItemListKind::SourceFile }, has_type_args: false, @@ -827,92 +827,123 @@ impl<'a> CompletionContext<'a> { PathKind::Type { location: location.unwrap_or(TypeLocation::Other) } }; + let mut kind_macro_call = |it: ast::MacroCall| { + path_ctx.has_macro_bang = it.excl_token().is_some(); + let parent = it.syntax().parent()?; + // Any path in an item list will be treated as a macro call by the parser + let kind = match_ast! { + match parent { + ast::MacroExpr(expr) => make_path_kind_expr(expr.into()), + ast::MacroPat(it) => PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())}, + ast::MacroType(ty) => make_path_kind_type(ty.into()), + ast::ItemList(_) => PathKind::Item { kind: ItemListKind::Module }, + ast::AssocItemList(_) => PathKind::Item { kind: match parent.parent() { + Some(it) => match_ast! { + match it { + ast::Trait(_) => ItemListKind::Trait, + ast::Impl(it) => if it.trait_().is_some() { + ItemListKind::TraitImpl(find_node_in_file_compensated(sema, original_file, &it)) + } else { + ItemListKind::Impl + }, + _ => return None + } + }, + None => return None, + } }, + ast::ExternItemList(_) => PathKind::Item { kind: ItemListKind::ExternBlock }, + ast::SourceFile(_) => PathKind::Item { kind: ItemListKind::SourceFile }, + _ => return None, + } + }; + Some(kind) + }; + let make_path_kind_attr = |meta: ast::Meta| { + let attr = meta.parent_attr()?; + let kind = attr.kind(); + let attached = attr.syntax().parent()?; + let is_trailing_outer_attr = kind != AttrKind::Inner + && non_trivia_sibling(attr.syntax().clone().into(), syntax::Direction::Next) + .is_none(); + let annotated_item_kind = + if is_trailing_outer_attr { None } else { Some(attached.kind()) }; + Some(PathKind::Attr { attr_ctx: AttrCtx { kind, annotated_item_kind } }) + }; + // Infer the path kind let parent = path.syntax().parent()?; let kind = match_ast! { - match parent { - ast::PathType(it) => make_path_kind_type(it.into()), - ast::PathExpr(it) => { - if let Some(p) = it.syntax().parent() { - if ast::ExprStmt::can_cast(p.kind()) { - if let Some(kind) = inbetween_body_and_decl_check(p) { - return Some(make_res(NameRefKind::Keyword(kind))); - } + match parent { + ast::PathType(it) => make_path_kind_type(it.into()), + ast::PathExpr(it) => { + if let Some(p) = it.syntax().parent() { + if ast::ExprStmt::can_cast(p.kind()) { + if let Some(kind) = inbetween_body_and_decl_check(p) { + return Some(make_res(NameRefKind::Keyword(kind))); } } + } - path_ctx.has_call_parens = it.syntax().parent().map_or(false, |it| ast::CallExpr::can_cast(it.kind())); + path_ctx.has_call_parens = it.syntax().parent().map_or(false, |it| ast::CallExpr::can_cast(it.kind())); - make_path_kind_expr(it.into()) - }, - ast::TupleStructPat(it) => { - path_ctx.has_call_parens = true; - PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())} - }, - ast::RecordPat(it) => { - path_ctx.has_call_parens = true; - PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())} - }, - ast::PathPat(it) => { - PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())} - }, - ast::MacroCall(it) => { - // A macro call in this position is usually a result of parsing recovery, so check that - if let Some(kind) = inbetween_body_and_decl_check(it.syntax().clone()) { - return Some(make_res(NameRefKind::Keyword(kind))); + make_path_kind_expr(it.into()) + }, + ast::TupleStructPat(it) => { + path_ctx.has_call_parens = true; + PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) } + }, + ast::RecordPat(it) => { + path_ctx.has_call_parens = true; + PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) } + }, + ast::PathPat(it) => { + PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())} + }, + ast::MacroCall(it) => { + // A macro call in this position is usually a result of parsing recovery, so check that + if let Some(kind) = inbetween_body_and_decl_check(it.syntax().clone()) { + return Some(make_res(NameRefKind::Keyword(kind))); + } + + kind_macro_call(it)? + }, + ast::Meta(meta) => make_path_kind_attr(meta)?, + ast::Visibility(it) => PathKind::Vis { has_in_token: it.in_token().is_some() }, + ast::UseTree(_) => PathKind::Use, + // completing inside a qualifier + ast::Path(parent) => { + path_ctx.parent = Some(parent.clone()); + let parent = iter::successors(Some(parent), |it| it.parent_path()).last()?.syntax().parent()?; + match_ast! { + match parent { + ast::PathType(it) => make_path_kind_type(it.into()), + ast::PathExpr(it) => { + path_ctx.has_call_parens = it.syntax().parent().map_or(false, |it| ast::CallExpr::can_cast(it.kind())); + + make_path_kind_expr(it.into()) + }, + ast::TupleStructPat(it) => { + path_ctx.has_call_parens = true; + PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) } + }, + ast::RecordPat(it) => { + path_ctx.has_call_parens = true; + PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) } + }, + ast::PathPat(it) => { + PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())} + }, + ast::MacroCall(it) => { + kind_macro_call(it)? + }, + ast::Meta(meta) => make_path_kind_attr(meta)?, + ast::Visibility(it) => PathKind::Vis { has_in_token: it.in_token().is_some() }, + ast::UseTree(_) => PathKind::Use, + _ => return None, } - - path_ctx.has_macro_bang = it.excl_token().is_some(); - let parent = it.syntax().parent()?; - // Any path in an item list will be treated as a macro call by the parser - match_ast! { - match parent { - ast::MacroExpr(expr) => make_path_kind_expr(expr.into()), - ast::MacroPat(it) => PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())}, - ast::MacroType(ty) => make_path_kind_type(ty.into()), - ast::ItemList(_) => PathKind::Item { kind: ItemListKind::Module }, - ast::AssocItemList(_) => PathKind::Item { kind: match parent.parent() { - Some(it) => match_ast! { - match it { - ast::Trait(_) => ItemListKind::Trait, - ast::Impl(it) => if it.trait_().is_some() { - ItemListKind::TraitImpl(find_node_in_file_compensated(sema, original_file, &it)) - } else { - ItemListKind::Impl - }, - _ => return None - } - }, - None => return None, - } }, - ast::ExternItemList(_) => PathKind::Item { kind: ItemListKind::ExternBlock }, - ast::SourceFile(_) => PathKind::Item { kind: ItemListKind::SourceFile }, - _ => return None, - } - } - }, - ast::Meta(meta) => { - let attr = meta.parent_attr()?; - let kind = attr.kind(); - let attached = attr.syntax().parent()?; - let is_trailing_outer_attr = kind != AttrKind::Inner - && non_trivia_sibling(attr.syntax().clone().into(), syntax::Direction::Next).is_none(); - let annotated_item_kind = if is_trailing_outer_attr { - None - } else { - Some(attached.kind()) - }; - PathKind::Attr { - attr_ctx: AttrCtx { - kind, - annotated_item_kind, - } - } - }, - ast::Visibility(it) => PathKind::Vis { has_in_token: it.in_token().is_some() }, - ast::UseTree(_) => PathKind::Use, - _ => return None, - + } + }, + _ => return None, } }; From 8658425a672e5e56ac5e02db8423ca56c81c2c31 Mon Sep 17 00:00:00 2001 From: Pavan Kumar Sunkara Date: Thu, 28 Jul 2022 16:21:14 +0100 Subject: [PATCH 0038/1945] publish: Use cargo ws rename to rename crates --- .github/workflows/publish.yml | 17 +++++++++++++++-- 1 file changed, 15 insertions(+), 2 deletions(-) diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 927996c1be..a4497f49e3 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -34,8 +34,21 @@ jobs: git config --global user.email "runner@gha.local" git config --global user.name "Github Action" rm Cargo.lock + # Fix names for crates that were published before switch to kebab-case. + cargo workspaces rename --from base-db base_db + cargo workspaces rename --from hir-def hir_def + cargo workspaces rename --from hir-expand hir_expand + cargo workspaces rename --from hir-ty hir_ty + cargo workspaces rename --from ide-assists ide_assists + cargo workspaces rename --from ide-completion ide_completion + cargo workspaces rename --from ide-db ide_db + cargo workspaces rename --from ide-diagnostics ide_diagnostics + cargo workspaces rename --from ide-ssr ide_ssr + cargo workspaces rename --from proc-macro-api proc_macro_api + cargo workspaces rename --from proc-macro-srv proc_macro_srv + cargo workspaces rename --from project-model project_model + cargo workspaces rename --from test-utils test_utils + cargo workspaces rename --from text-edit text_edit cargo workspaces rename ra_ap_%n find crates/rust-analyzer -type f -name '*.rs' -exec sed -i 's/rust_analyzer/ra_ap_rust_analyzer/g' {} + - # Fix names for crates that were published before switch to kebab-case. - find crates -name 'Cargo.toml' -exec sed -i "s/ra_ap_base-db/ra_ap_base_db/g; s/ra_ap_hir-def/ra_ap_hir_def/g; s/ra_ap_hir-expand/ra_ap_hir_expand/g; s/ra_ap_hir-ty/ra_ap_hir_ty/g; s/ra_ap_ide-assists/ra_ap_ide_assists/g; s/ra_ap_ide-completion/ra_ap_ide_completion/g; s/ra_ap_ide-db/ra_ap_ide_db/g; s/ra_ap_ide-diagnostics/ra_ap_ide_diagnostics/g; s/ra_ap_ide-ssr/ra_ap_ide_ssr/g; s/ra_ap_proc-macro-api/ra_ap_proc_macro_api/g; s/ra_ap_proc-macro-srv/ra_ap_proc_macro_srv/g; s/ra_ap_project-model/ra_ap_project_model/g; s/ra_ap_test-utils/ra_ap_test_utils/g; s/ra_ap_text-edit/ra_ap_text_edit/g" {} + cargo workspaces publish --yes --force '*' --exact --no-git-commit --allow-dirty --skip-published custom 0.0.$PATCH From ce7541260d6a5c76633b4c0e2e1639730018773d Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Thu, 28 Jul 2022 17:49:23 +0200 Subject: [PATCH 0039/1945] fix: Don't complete marker traits in expression position --- crates/ide-completion/src/completions/expr.rs | 17 ++++++++++++++--- crates/ide-completion/src/render.rs | 3 --- crates/ide-completion/src/tests/expression.rs | 2 -- crates/ide-completion/src/tests/record.rs | 1 - 4 files changed, 14 insertions(+), 9 deletions(-) diff --git a/crates/ide-completion/src/completions/expr.rs b/crates/ide-completion/src/completions/expr.rs index bafaeb502a..5d0ddaaf2a 100644 --- a/crates/ide-completion/src/completions/expr.rs +++ b/crates/ide-completion/src/completions/expr.rs @@ -202,10 +202,21 @@ pub(crate) fn complete_expr_path( } } } - ctx.process_all_names(&mut |name, def| { - if scope_def_applicable(def) { - acc.add_path_resolution(ctx, path_ctx, name, def); + ctx.process_all_names(&mut |name, def| match def { + ScopeDef::ModuleDef(hir::ModuleDef::Trait(t)) => { + let assocs = t.items_with_supertraits(ctx.db); + match &*assocs { + // traits with no assoc items are unusable as expressions since + // there is no associated item path that can be constructed with them + [] => (), + // FIXME: Render the assoc item with the trait qualified + &[_item] => acc.add_path_resolution(ctx, path_ctx, name, def), + // FIXME: Append `::` to the thing here, since a trait on its own won't work + [..] => acc.add_path_resolution(ctx, path_ctx, name, def), + } } + _ if scope_def_applicable(def) => acc.add_path_resolution(ctx, path_ctx, name, def), + _ => (), }); if is_func_update.is_none() { diff --git a/crates/ide-completion/src/render.rs b/crates/ide-completion/src/render.rs index 9b25964a60..39cf957137 100644 --- a/crates/ide-completion/src/render.rs +++ b/crates/ide-completion/src/render.rs @@ -1347,7 +1347,6 @@ fn main() { fn main() [] fn foo(…) [] md core [] - tt Sized [] "#]], ) } @@ -1394,7 +1393,6 @@ fn main() { fn main() [] fn foo(…) [] md core [] - tt Sized [] "#]], ) } @@ -1492,7 +1490,6 @@ fn main() { fn &bar() [type] fn foo(…) [] md core [] - tt Sized [] "#]], ) } diff --git a/crates/ide-completion/src/tests/expression.rs b/crates/ide-completion/src/tests/expression.rs index ce9d01d337..ce7a543d9f 100644 --- a/crates/ide-completion/src/tests/expression.rs +++ b/crates/ide-completion/src/tests/expression.rs @@ -44,7 +44,6 @@ fn baz() { st Record st Tuple st Unit - tt Trait un Union ev TupleV(…) TupleV(u32) bt u32 @@ -137,7 +136,6 @@ impl Unit { st Record st Tuple st Unit - tt Trait tp TypeParam un Union ev TupleV(…) TupleV(u32) diff --git a/crates/ide-completion/src/tests/record.rs b/crates/ide-completion/src/tests/record.rs index ec32602fa3..f6accc68e5 100644 --- a/crates/ide-completion/src/tests/record.rs +++ b/crates/ide-completion/src/tests/record.rs @@ -167,7 +167,6 @@ fn main() { st Foo st Foo {…} Foo { foo1: u32, foo2: u32 } tt Default - tt Sized bt u32 kw crate:: kw self:: From f867ddc6217438ad514980fcbd5f59e2d99c3d55 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Thu, 28 Jul 2022 18:49:31 +0200 Subject: [PATCH 0040/1945] fix: Order ItemScope::entries results --- crates/hir-def/src/item_scope.rs | 10 ++++----- .../src/handlers/expand_glob_import.rs | 10 ++++----- crates/ide-assists/src/tests/generated.rs | 2 +- crates/ide-completion/src/render.rs | 22 +++++++++---------- 4 files changed, 22 insertions(+), 22 deletions(-) diff --git a/crates/hir-def/src/item_scope.rs b/crates/hir-def/src/item_scope.rs index 579f803ea1..a11a92204c 100644 --- a/crates/hir-def/src/item_scope.rs +++ b/crates/hir-def/src/item_scope.rs @@ -5,6 +5,7 @@ use std::collections::hash_map::Entry; use base_db::CrateId; use hir_expand::{name::Name, AstId, MacroCallId}; +use itertools::Itertools; use once_cell::sync::Lazy; use profile::Count; use rustc_hash::{FxHashMap, FxHashSet}; @@ -97,15 +98,14 @@ pub(crate) enum BuiltinShadowMode { impl ItemScope { pub fn entries<'a>(&'a self) -> impl Iterator + 'a { // FIXME: shadowing - let keys: FxHashSet<_> = self - .types + self.types .keys() .chain(self.values.keys()) .chain(self.macros.keys()) .chain(self.unresolved.iter()) - .collect(); - - keys.into_iter().map(move |name| (name, self.get(name))) + .sorted() + .unique() + .map(move |name| (name, self.get(name))) } pub fn declarations(&self) -> impl Iterator + '_ { diff --git a/crates/ide-assists/src/handlers/expand_glob_import.rs b/crates/ide-assists/src/handlers/expand_glob_import.rs index 943c1d90e6..87f5018fb6 100644 --- a/crates/ide-assists/src/handlers/expand_glob_import.rs +++ b/crates/ide-assists/src/handlers/expand_glob_import.rs @@ -36,7 +36,7 @@ use crate::{ // pub struct Baz; // } // -// use foo::{Baz, Bar}; +// use foo::{Bar, Baz}; // // fn qux(bar: Bar, baz: Baz) {} // ``` @@ -281,7 +281,7 @@ mod foo { pub fn f() {} } -use foo::{Baz, Bar, f}; +use foo::{Bar, Baz, f}; fn qux(bar: Bar, baz: Baz) { f(); @@ -351,7 +351,7 @@ mod foo { pub fn f() {} } -use foo::{Baz, Bar, f}; +use foo::{Bar, Baz, f}; fn qux(bar: Bar, baz: Baz) { f(); @@ -440,7 +440,7 @@ mod foo { } } -use foo::{bar::{Baz, Bar, f}, baz::*}; +use foo::{bar::{Bar, Baz, f}, baz::*}; fn qux(bar: Bar, baz: Baz) { f(); @@ -561,7 +561,7 @@ mod foo { use foo::{ bar::{*, f}, - baz::{g, qux::{q, h}} + baz::{g, qux::{h, q}} }; fn qux(bar: Bar, baz: Baz) { diff --git a/crates/ide-assists/src/tests/generated.rs b/crates/ide-assists/src/tests/generated.rs index e8d48607be..6eaab48a32 100644 --- a/crates/ide-assists/src/tests/generated.rs +++ b/crates/ide-assists/src/tests/generated.rs @@ -535,7 +535,7 @@ mod foo { pub struct Baz; } -use foo::{Baz, Bar}; +use foo::{Bar, Baz}; fn qux(bar: Bar, baz: Baz) {} "#####, diff --git a/crates/ide-completion/src/render.rs b/crates/ide-completion/src/render.rs index 39cf957137..946134b0ff 100644 --- a/crates/ide-completion/src/render.rs +++ b/crates/ide-completion/src/render.rs @@ -1271,8 +1271,8 @@ fn main() { st S [] st &mut S [type] st S [] - fn main() [] fn foo(…) [] + fn main() [] "#]], ); check_relevance( @@ -1288,8 +1288,8 @@ fn main() { lc s [type+name+local] st S [type] st S [] - fn main() [] fn foo(…) [] + fn main() [] "#]], ); check_relevance( @@ -1305,8 +1305,8 @@ fn main() { lc ssss [type+local] st S [type] st S [] - fn main() [] fn foo(…) [] + fn main() [] "#]], ); } @@ -1342,10 +1342,10 @@ fn main() { lc &t [type+local] st S [] st &S [type] - st T [] st S [] - fn main() [] + st T [] fn foo(…) [] + fn main() [] md core [] "#]], ) @@ -1388,10 +1388,10 @@ fn main() { lc &mut t [type+local] st S [] st &mut S [type] - st T [] st S [] - fn main() [] + st T [] fn foo(…) [] + fn main() [] md core [] "#]], ) @@ -1483,12 +1483,12 @@ fn main() { expect![[r#" st S [] st &S [type] - st T [] st S [] - fn main() [] + st T [] fn bar() [] fn &bar() [type] fn foo(…) [] + fn main() [] md core [] "#]], ) @@ -1633,8 +1633,8 @@ fn foo() { ev Foo::B [type_could_unify] fn foo() [] en Foo [] - fn baz() [] fn bar() [] + fn baz() [] "#]], ); } @@ -1724,9 +1724,9 @@ fn f() { } "#, expect![[r#" - md std [] st Buffer [] fn f() [] + md std [] tt BufRead (use std::io::BufRead) [requires_import] st BufReader (use std::io::BufReader) [requires_import] st BufWriter (use std::io::BufWriter) [requires_import] From 948c9afc73786d5b4895fa4ea9041992601a59c3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Thu, 28 Jul 2022 21:02:36 +0300 Subject: [PATCH 0041/1945] Only run rainbow highlighting test on 64-bit Unix --- crates/ide/src/syntax_highlighting/tests.rs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/crates/ide/src/syntax_highlighting/tests.rs b/crates/ide/src/syntax_highlighting/tests.rs index 6ba6153178..d21bfc9385 100644 --- a/crates/ide/src/syntax_highlighting/tests.rs +++ b/crates/ide/src/syntax_highlighting/tests.rs @@ -915,6 +915,10 @@ fn main() { } #[test] +#[cfg_attr( + all(unix, not(target_pointer_width = "64")), + ignore = "depends on `DefaultHasher` outputs" +)] fn test_rainbow_highlighting() { check_highlighting( r#" From 61d1c3e1385be65abc197115817c28fbeb3294d6 Mon Sep 17 00:00:00 2001 From: hkalbasi Date: Thu, 28 Jul 2022 22:38:20 +0430 Subject: [PATCH 0042/1945] add debug impl for AnyDiagnostic --- crates/hir/src/diagnostics.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/crates/hir/src/diagnostics.rs b/crates/hir/src/diagnostics.rs index 6c6c11ea4e..50374f4b3f 100644 --- a/crates/hir/src/diagnostics.rs +++ b/crates/hir/src/diagnostics.rs @@ -14,6 +14,7 @@ use crate::{MacroKind, Type}; macro_rules! diagnostics { ($($diag:ident,)*) => { + #[derive(Debug)] pub enum AnyDiagnostic {$( $diag(Box<$diag>), )*} From 11ef494b3764811b8ea5178d6c9bd67b287f104a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Thu, 28 Jul 2022 21:45:47 +0300 Subject: [PATCH 0043/1945] Be more explicit when filtering built-in completions --- crates/ide-completion/src/tests.rs | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/crates/ide-completion/src/tests.rs b/crates/ide-completion/src/tests.rs index 4be6acbe84..cf826648dc 100644 --- a/crates/ide-completion/src/tests.rs +++ b/crates/ide-completion/src/tests.rs @@ -23,8 +23,6 @@ mod type_pos; mod use_tree; mod visibility; -use std::mem; - use hir::{db::DefDatabase, PrefixKind, Semantics}; use ide_db::{ base_db::{fixture::ChangeFixture, FileLoader, FilePosition}, @@ -107,12 +105,9 @@ fn completion_list_with_config( ) -> String { // filter out all but one builtintype completion for smaller test outputs let items = get_all_items(config, ra_fixture, trigger_character); - let mut bt_seen = false; let items = items .into_iter() - .filter(|it| { - it.kind() != CompletionItemKind::BuiltinType || !mem::replace(&mut bt_seen, true) - }) + .filter(|it| it.kind() != CompletionItemKind::BuiltinType || it.label() == "u32") .filter(|it| include_keywords || it.kind() != CompletionItemKind::Keyword) .filter(|it| include_keywords || it.kind() != CompletionItemKind::Snippet) .sorted_by_key(|it| (it.kind(), it.label().to_owned(), it.detail().map(ToOwned::to_owned))) From 902fd6ddcdd4e3ad03c7a9099f0b3b1d7e6a95e8 Mon Sep 17 00:00:00 2001 From: cynecx Date: Fri, 29 Jul 2022 02:27:16 +0200 Subject: [PATCH 0044/1945] fix: complete path of existing record expr --- crates/ide-completion/src/context/analysis.rs | 2 ++ crates/ide-completion/src/tests/expression.rs | 19 +++++++++++++++++++ crates/parser/src/grammar/paths.rs | 2 +- 3 files changed, 22 insertions(+), 1 deletion(-) diff --git a/crates/ide-completion/src/context/analysis.rs b/crates/ide-completion/src/context/analysis.rs index 76fc74c01d..22ec7cead4 100644 --- a/crates/ide-completion/src/context/analysis.rs +++ b/crates/ide-completion/src/context/analysis.rs @@ -939,10 +939,12 @@ impl<'a> CompletionContext<'a> { ast::Meta(meta) => make_path_kind_attr(meta)?, ast::Visibility(it) => PathKind::Vis { has_in_token: it.in_token().is_some() }, ast::UseTree(_) => PathKind::Use, + ast::RecordExpr(it) => make_path_kind_expr(it.into()), _ => return None, } } }, + ast::RecordExpr(it) => make_path_kind_expr(it.into()), _ => return None, } }; diff --git a/crates/ide-completion/src/tests/expression.rs b/crates/ide-completion/src/tests/expression.rs index ce7a543d9f..925081ebf6 100644 --- a/crates/ide-completion/src/tests/expression.rs +++ b/crates/ide-completion/src/tests/expression.rs @@ -651,3 +651,22 @@ fn main() { "]], ); } + +#[test] +fn complete_record_expr_path() { + check( + r#" +struct Zulu; +impl Zulu { + fn test() -> Self { } +} +fn boi(val: Zulu) { } +fn main() { + boi(Zulu:: $0 {}); +} +"#, + expect![[r#" + fn test() fn() -> Zulu + "#]], + ); +} diff --git a/crates/parser/src/grammar/paths.rs b/crates/parser/src/grammar/paths.rs index f9efcef92a..8de5d33a19 100644 --- a/crates/parser/src/grammar/paths.rs +++ b/crates/parser/src/grammar/paths.rs @@ -54,7 +54,7 @@ fn path_for_qualifier( mut qual: CompletedMarker, ) -> CompletedMarker { loop { - let use_tree = matches!(p.nth(2), T![*] | T!['{']); + let use_tree = mode == Mode::Use && matches!(p.nth(2), T![*] | T!['{']); if p.at(T![::]) && !use_tree { let path = qual.precede(p); p.bump(T![::]); From c1a175f61e3584eb7c4ecf59a589a57447621461 Mon Sep 17 00:00:00 2001 From: hkalbasi Date: Thu, 28 Jul 2022 22:38:44 +0430 Subject: [PATCH 0045/1945] Generate rust type from json --- Cargo.lock | 1 + crates/ide-diagnostics/Cargo.toml | 1 + .../src/handlers/json_is_not_rust.rs | 201 ++++++++++++++++++ crates/ide-diagnostics/src/lib.rs | 2 + 4 files changed, 205 insertions(+) create mode 100644 crates/ide-diagnostics/src/handlers/json_is_not_rust.rs diff --git a/Cargo.lock b/Cargo.lock index 4c83000683..6c01b8a0df 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -710,6 +710,7 @@ dependencies = [ "ide-db", "itertools", "profile", + "serde_json", "sourcegen", "stdx", "syntax", diff --git a/crates/ide-diagnostics/Cargo.toml b/crates/ide-diagnostics/Cargo.toml index e221425edd..9b9e21a4dd 100644 --- a/crates/ide-diagnostics/Cargo.toml +++ b/crates/ide-diagnostics/Cargo.toml @@ -15,6 +15,7 @@ itertools = "0.10.3" either = "1.7.0" +serde_json = "1.0.82" profile = { path = "../profile", version = "0.0.0" } stdx = { path = "../stdx", version = "0.0.0" } diff --git a/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs b/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs new file mode 100644 index 0000000000..aa7fcffb48 --- /dev/null +++ b/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs @@ -0,0 +1,201 @@ +//! This diagnostic provides an assist for creating a struct definition from a JSON +//! example. + +use ide_db::{base_db::FileId, source_change::SourceChange}; +use itertools::Itertools; +use stdx::format_to; +use syntax::{ + ast::{self, make}, + SyntaxKind, SyntaxNode, +}; +use text_edit::TextEdit; + +use crate::{fix, Diagnostic, Severity}; + +#[derive(Default)] +struct State { + result: String, + struct_counts: usize, +} + +impl State { + fn generate_new_name(&mut self) -> ast::Name { + self.struct_counts += 1; + make::name(&format!("Struct{}", self.struct_counts)) + } + + fn build_struct(&mut self, value: &serde_json::Map) -> ast::Type { + let name = self.generate_new_name(); + let ty = make::ty(&name.to_string()); + let strukt = make::struct_( + None, + name, + None, + make::record_field_list(value.iter().sorted_unstable_by_key(|x| x.0).map( + |(name, value)| make::record_field(None, make::name(name), self.type_of(value)), + )) + .into(), + ); + format_to!(self.result, "#[derive(Serialize, Deserialize)]\n{}\n", strukt); + ty + } + + fn type_of(&mut self, value: &serde_json::Value) -> ast::Type { + match value { + serde_json::Value::Null => make::ty_unit(), + serde_json::Value::Bool(_) => make::ty("bool"), + serde_json::Value::Number(x) => make::ty(if x.is_i64() { "i64" } else { "f64" }), + serde_json::Value::String(_) => make::ty("String"), + serde_json::Value::Array(x) => { + let ty = match x.iter().next() { + Some(x) => self.type_of(x), + None => make::ty_placeholder(), + }; + make::ty(&format!("Vec<{ty}>")) + } + serde_json::Value::Object(x) => self.build_struct(x), + } + } +} + +pub(crate) fn json_in_items(acc: &mut Vec, file_id: FileId, node: &SyntaxNode) { + if node.kind() == SyntaxKind::ERROR + && node.first_token().map(|x| x.kind()) == Some(SyntaxKind::L_CURLY) + && node.last_token().map(|x| x.kind()) == Some(SyntaxKind::R_CURLY) + { + let node_string = node.to_string(); + if let Ok(x) = serde_json::from_str(&node_string) { + if let serde_json::Value::Object(x) = x { + let range = node.text_range(); + let mut edit = TextEdit::builder(); + edit.delete(range); + let mut state = State::default(); + state.build_struct(&x); + edit.insert(range.start(), state.result); + acc.push( + Diagnostic::new( + "json-is-not-rust", + "JSON syntax is not valid as a Rust item", + range, + ) + .severity(Severity::WeakWarning) + .with_fixes(Some(vec![fix( + "convert_json_to_struct", + "Convert JSON to struct", + SourceChange::from_text_edit(file_id, edit.finish()), + range, + )])), + ); + } + } + } +} + +#[cfg(test)] +mod tests { + use crate::{ + tests::{check_diagnostics_with_config, check_fix, check_no_fix}, + DiagnosticsConfig, + }; + + #[test] + fn diagnostic_for_simple_case() { + let mut config = DiagnosticsConfig::default(); + config.disabled.insert("syntax-error".to_string()); + check_diagnostics_with_config( + config, + r#" + { "foo": "bar" } + // ^^^^^^^^^^^^^^^^ 💡 weak: JSON syntax is not valid as a Rust item +"#, + ); + } + + #[test] + fn types_of_primitives() { + check_fix( + r#" + {$0 + "foo": "bar", + "bar": 2.3, + "baz": null, + "bay": 57, + "box": true + } + "#, + r#" + #[derive(Serialize, Deserialize)] + struct Struct1{ bar: f64, bay: i64, baz: (), r#box: bool, foo: String } + + "#, + ); + } + + #[test] + fn nested_structs() { + check_fix( + r#" + {$0 + "foo": "bar", + "bar": { + "kind": "Object", + "value": {} + } + } + "#, + r#" + #[derive(Serialize, Deserialize)] + struct Struct3{ } + #[derive(Serialize, Deserialize)] + struct Struct2{ kind: String, value: Struct3 } + #[derive(Serialize, Deserialize)] + struct Struct1{ bar: Struct2, foo: String } + + "#, + ); + } + + #[test] + fn arrays() { + check_fix( + r#" + { + "of_string": ["foo", "2", "x"], $0 + "of_object": [{ + "x": 10, + "y": 20 + }, { + "x": 10, + "y": 20 + }], + "nested": [[[2]]], + "empty": [] + } + "#, + r#" + #[derive(Serialize, Deserialize)] + struct Struct2{ x: i64, y: i64 } + #[derive(Serialize, Deserialize)] + struct Struct1{ empty: Vec<_>, nested: Vec>>, of_object: Vec, of_string: Vec } + + "#, + ); + } + + #[test] + fn no_emit_outside_of_item_position() { + check_no_fix( + r#" + fn foo() { + let json = {$0 + "foo": "bar", + "bar": { + "kind": "Object", + "value": {} + } + }; + } + "#, + ); + } +} diff --git a/crates/ide-diagnostics/src/lib.rs b/crates/ide-diagnostics/src/lib.rs index 41abaa836f..7034f010e1 100644 --- a/crates/ide-diagnostics/src/lib.rs +++ b/crates/ide-diagnostics/src/lib.rs @@ -50,6 +50,7 @@ mod handlers { pub(crate) mod field_shorthand; pub(crate) mod useless_braces; pub(crate) mod unlinked_file; + pub(crate) mod json_is_not_rust; } #[cfg(test)] @@ -175,6 +176,7 @@ pub fn diagnostics( for node in parse.tree().syntax().descendants() { handlers::useless_braces::useless_braces(&mut res, file_id, &node); handlers::field_shorthand::field_shorthand(&mut res, file_id, &node); + handlers::json_is_not_rust::json_in_items(&mut res, file_id, &node); } let module = sema.to_module_def(file_id); From 618cfd792ca2b3da5cd027bad20a7769f5d9f973 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sat, 30 Jul 2022 09:43:30 +0200 Subject: [PATCH 0046/1945] fix: Fix ast-id up when merging raw attributes --- crates/hir-def/src/attr.rs | 21 ++++++-- crates/ide/src/syntax_highlighting.rs | 4 +- crates/ide/src/syntax_highlighting/inject.rs | 9 ++-- .../highlight_module_docs_inline.html | 51 +++++++++++++++++++ .../highlight_module_docs_outline.html | 50 ++++++++++++++++++ crates/ide/src/syntax_highlighting/tests.rs | 42 +++++++++++++++ 6 files changed, 167 insertions(+), 10 deletions(-) create mode 100644 crates/ide/src/syntax_highlighting/test_data/highlight_module_docs_inline.html create mode 100644 crates/ide/src/syntax_highlighting/test_data/highlight_module_docs_outline.html diff --git a/crates/hir-def/src/attr.rs b/crates/hir-def/src/attr.rs index 8a6b6f3eff..2b39c6f8da 100644 --- a/crates/hir-def/src/attr.rs +++ b/crates/hir-def/src/attr.rs @@ -124,13 +124,24 @@ impl RawAttrs { pub(crate) fn merge(&self, other: Self) -> Self { // FIXME: This needs to fixup `AttrId`s - match (&self.entries, &other.entries) { + match (&self.entries, other.entries) { (None, None) => Self::EMPTY, - (Some(entries), None) | (None, Some(entries)) => { - Self { entries: Some(entries.clone()) } - } + (None, entries @ Some(_)) => Self { entries }, + (Some(entries), None) => Self { entries: Some(entries.clone()) }, (Some(a), Some(b)) => { - Self { entries: Some(a.iter().chain(b.iter()).cloned().collect()) } + let last_ast_index = a.last().map_or(0, |it| it.id.ast_index + 1); + Self { + entries: Some( + a.iter() + .cloned() + .chain(b.iter().map(|it| { + let mut it = it.clone(); + it.id.ast_index += last_ast_index; + it + })) + .collect(), + ), + } } } } diff --git a/crates/ide/src/syntax_highlighting.rs b/crates/ide/src/syntax_highlighting.rs index d013d6f4b1..3fb49b45d9 100644 --- a/crates/ide/src/syntax_highlighting.rs +++ b/crates/ide/src/syntax_highlighting.rs @@ -13,7 +13,7 @@ mod html; #[cfg(test)] mod tests; -use hir::{InFile, Name, Semantics}; +use hir::{Name, Semantics}; use ide_db::{FxHashMap, RootDatabase}; use syntax::{ ast, AstNode, AstToken, NodeOrToken, SyntaxKind::*, SyntaxNode, TextRange, WalkEvent, T, @@ -325,7 +325,7 @@ fn traverse( Leave(NodeOrToken::Node(node)) => { // Doc comment highlighting injection, we do this when leaving the node // so that we overwrite the highlighting of the doc comment itself. - inject::doc_comment(hl, sema, InFile::new(file_id.into(), &node)); + inject::doc_comment(hl, sema, file_id, &node); continue; } }; diff --git a/crates/ide/src/syntax_highlighting/inject.rs b/crates/ide/src/syntax_highlighting/inject.rs index f779a985a9..f376f9fda7 100644 --- a/crates/ide/src/syntax_highlighting/inject.rs +++ b/crates/ide/src/syntax_highlighting/inject.rs @@ -5,7 +5,8 @@ use std::mem; use either::Either; use hir::{InFile, Semantics}; use ide_db::{ - active_parameter::ActiveParameter, defs::Definition, rust_doc::is_rust_fence, SymbolKind, + active_parameter::ActiveParameter, base_db::FileId, defs::Definition, rust_doc::is_rust_fence, + SymbolKind, }; use syntax::{ ast::{self, AstNode, IsString, QuoteOffsets}, @@ -81,16 +82,18 @@ pub(super) fn ra_fixture( const RUSTDOC_FENCE_LENGTH: usize = 3; const RUSTDOC_FENCES: [&str; 2] = ["```", "~~~"]; -/// Injection of syntax highlighting of doctests. +/// Injection of syntax highlighting of doctests and intra doc links. pub(super) fn doc_comment( hl: &mut Highlights, sema: &Semantics<'_, RootDatabase>, - InFile { file_id: src_file_id, value: node }: InFile<&SyntaxNode>, + src_file_id: FileId, + node: &SyntaxNode, ) { let (attributes, def) = match doc_attributes(sema, node) { Some(it) => it, None => return, }; + let src_file_id = src_file_id.into(); // Extract intra-doc links and emit highlights for them. if let Some((docs, doc_mapping)) = attributes.docs_with_rangemap(sema.db) { diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_module_docs_inline.html b/crates/ide/src/syntax_highlighting/test_data/highlight_module_docs_inline.html new file mode 100644 index 0000000000..8a1d69816e --- /dev/null +++ b/crates/ide/src/syntax_highlighting/test_data/highlight_module_docs_inline.html @@ -0,0 +1,51 @@ + + +
//! [Struct]
+//! This is an intra doc injection test for modules
+//! [Struct]
+//! This is an intra doc injection test for modules
+
+pub struct Struct;
+
\ No newline at end of file diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_module_docs_outline.html b/crates/ide/src/syntax_highlighting/test_data/highlight_module_docs_outline.html new file mode 100644 index 0000000000..c4c3e3dc26 --- /dev/null +++ b/crates/ide/src/syntax_highlighting/test_data/highlight_module_docs_outline.html @@ -0,0 +1,50 @@ + + +
/// [crate::foo::Struct]
+/// This is an intra doc injection test for modules
+/// [crate::foo::Struct]
+/// This is an intra doc injection test for modules
+mod foo;
+
\ No newline at end of file diff --git a/crates/ide/src/syntax_highlighting/tests.rs b/crates/ide/src/syntax_highlighting/tests.rs index d21bfc9385..99be7c6648 100644 --- a/crates/ide/src/syntax_highlighting/tests.rs +++ b/crates/ide/src/syntax_highlighting/tests.rs @@ -914,6 +914,48 @@ fn main() { ); } +#[test] +fn test_mod_hl_injection() { + check_highlighting( + r##" +//- /foo.rs +//! [Struct] +//! This is an intra doc injection test for modules +//! [Struct] +//! This is an intra doc injection test for modules + +pub struct Struct; +//- /lib.rs crate:foo +/// [crate::foo::Struct] +/// This is an intra doc injection test for modules +/// [crate::foo::Struct] +/// This is an intra doc injection test for modules +mod foo; +"##, + expect_file!["./test_data/highlight_module_docs_inline.html"], + false, + ); + check_highlighting( + r##" +//- /lib.rs crate:foo +/// [crate::foo::Struct] +/// This is an intra doc injection test for modules +/// [crate::foo::Struct] +/// This is an intra doc injection test for modules +mod foo; +//- /foo.rs +//! [Struct] +//! This is an intra doc injection test for modules +//! [Struct] +//! This is an intra doc injection test for modules + +pub struct Struct; +"##, + expect_file!["./test_data/highlight_module_docs_outline.html"], + false, + ); +} + #[test] #[cfg_attr( all(unix, not(target_pointer_width = "64")), From 58c3a5634fec30705be19f21971e016c730f305c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Sun, 31 Jul 2022 20:26:35 +0300 Subject: [PATCH 0047/1945] Update xtask promote and release instructions --- docs/dev/README.md | 7 ++++--- xtask/src/release.rs | 10 ++-------- 2 files changed, 6 insertions(+), 11 deletions(-) diff --git a/docs/dev/README.md b/docs/dev/README.md index 468f2b9e98..76bbd1e918 100644 --- a/docs/dev/README.md +++ b/docs/dev/README.md @@ -210,7 +210,8 @@ Release process is handled by `release`, `dist` and `promote` xtasks, `release` ./rust-rust-analyzer # Note the name! ``` -Additionally, it assumes that the remote for `rust-analyzer` is called `upstream` (I use `origin` to point to my fork). +The remote for `rust-analyzer` must be called `upstream` (I use `origin` to point to my fork). +In addition, for `xtask promote` (see below), `rust-rust-analyzer` must have a `rust-analyzer` remote pointing to this repository on GitHub. `release` calls the GitHub API calls to scrape pull request comments and categorize them in the changelog. This step uses the `curl` and `jq` applications, which need to be available in `PATH`. @@ -225,13 +226,13 @@ Release steps: * push it to `upstream`. This triggers GitHub Actions which: * runs `cargo xtask dist` to package binaries and VS Code extension * makes a GitHub release - * pushes VS Code extension to the marketplace + * publishes the VS Code extension to the marketplace * call the GitHub API for PR details * create a new changelog in `rust-analyzer.github.io` 3. While the release is in progress, fill in the changelog 4. Commit & push the changelog 5. Tweet -6. Inside `rust-analyzer`, run `cargo xtask promote` -- this will create a PR to rust-lang/rust updating rust-analyzer's submodule. +6. Inside `rust-analyzer`, run `cargo xtask promote` -- this will create a PR to rust-lang/rust updating rust-analyzer's subtree. Self-approve the PR. If the GitHub Actions release fails because of a transient problem like a timeout, you can re-run the job from the Actions console. diff --git a/xtask/src/release.rs b/xtask/src/release.rs index 1c5fc64c24..17ada51564 100644 --- a/xtask/src/release.rs +++ b/xtask/src/release.rs @@ -77,18 +77,12 @@ impl flags::Promote { cmd!(sh, "git switch master").run()?; cmd!(sh, "git fetch upstream").run()?; cmd!(sh, "git reset --hard upstream/master").run()?; - cmd!(sh, "git submodule update --recursive").run()?; let date = date_iso(sh)?; let branch = format!("rust-analyzer-{date}"); cmd!(sh, "git switch -c {branch}").run()?; - { - let _dir = sh.push_dir("src/tools/rust-analyzer"); - cmd!(sh, "git fetch origin").run()?; - cmd!(sh, "git reset --hard origin/release").run()?; - } - cmd!(sh, "git add src/tools/rust-analyzer").run()?; - cmd!(sh, "git commit -m':arrow_up: rust-analyzer'").run()?; + cmd!(sh, "git subtree pull -P src/tools/rust-analyzer rust-analyzer master").run()?; + if !self.dry_run { cmd!(sh, "git push -u origin {branch}").run()?; cmd!( From d31f3605cea39530cb6b5b1c89934b174c886f49 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sun, 31 Jul 2022 19:27:20 +0200 Subject: [PATCH 0048/1945] Properly cfg the `max` field of Limit --- crates/limit/src/lib.rs | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/crates/limit/src/lib.rs b/crates/limit/src/lib.rs index 3c1da80edb..d6a706a7cd 100644 --- a/crates/limit/src/lib.rs +++ b/crates/limit/src/lib.rs @@ -2,12 +2,13 @@ #![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#[cfg(feature = "tracking")] use std::sync::atomic::AtomicUsize; /// Represents a struct used to enforce a numerical limit. pub struct Limit { upper_bound: usize, - #[allow(unused)] + #[cfg(feature = "tracking")] max: AtomicUsize, } @@ -15,14 +16,22 @@ impl Limit { /// Creates a new limit. #[inline] pub const fn new(upper_bound: usize) -> Self { - Self { upper_bound, max: AtomicUsize::new(0) } + Self { + upper_bound, + #[cfg(feature = "tracking")] + max: AtomicUsize::new(0), + } } /// Creates a new limit. #[inline] #[cfg(feature = "tracking")] pub const fn new_tracking(upper_bound: usize) -> Self { - Self { upper_bound, max: AtomicUsize::new(1) } + Self { + upper_bound, + #[cfg(feature = "tracking")] + max: AtomicUsize::new(1), + } } /// Gets the underlying numeric limit. From 3b2ecf44a0ee8437f8eb0e5b607312de6af7312a Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sun, 31 Jul 2022 19:27:34 +0200 Subject: [PATCH 0049/1945] Give variables more descriptive names --- crates/hir-def/src/nameres/collector.rs | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/crates/hir-def/src/nameres/collector.rs b/crates/hir-def/src/nameres/collector.rs index f394c54171..8a6bb929c3 100644 --- a/crates/hir-def/src/nameres/collector.rs +++ b/crates/hir-def/src/nameres/collector.rs @@ -1055,7 +1055,7 @@ impl DefCollector<'_> { }; let mut res = ReachedFixedPoint::Yes; macros.retain(|directive| { - let resolver2 = |path| { + let resolver = |path| { let resolved_res = self.def_map.resolve_path_fp_with_macro( self.db, ResolveMode::Other, @@ -1068,7 +1068,7 @@ impl DefCollector<'_> { .take_macros() .map(|it| (it, macro_id_to_def_id(self.db, it))) }; - let resolver = |path| resolver2(path).map(|(_, it)| it); + let resolver_def_id = |path| resolver(path).map(|(_, it)| it); match &directive.kind { MacroDirectiveKind::FnLike { ast_id, expand_to } => { @@ -1077,7 +1077,7 @@ impl DefCollector<'_> { ast_id, *expand_to, self.def_map.krate, - &resolver, + &resolver_def_id, &mut |_err| (), ); if let Ok(Ok(call_id)) = call_id { @@ -1093,7 +1093,7 @@ impl DefCollector<'_> { *derive_attr, *derive_pos as u32, self.def_map.krate, - &resolver2, + &resolver, ); if let Ok((macro_id, def_id, call_id)) = id { @@ -1158,7 +1158,7 @@ impl DefCollector<'_> { } } - let def = match resolver(path.clone()) { + let def = match resolver_def_id(path.clone()) { Some(def) if def.is_attribute() => def, _ => return true, }; @@ -1292,7 +1292,8 @@ impl DefCollector<'_> { true }); // Attribute resolution can add unresolved macro invocations, so concatenate the lists. - self.unresolved_macros.extend(macros); + macros.extend(mem::take(&mut self.unresolved_macros)); + self.unresolved_macros = macros; for (module_id, depth, container, macro_call_id) in resolved { self.collect_macro_expansion(module_id, macro_call_id, depth, container); From d40ab66186fa477177a10d01eb16960a5f20c7ac Mon Sep 17 00:00:00 2001 From: Ryo Yoshida Date: Mon, 1 Aug 2022 16:48:33 +0900 Subject: [PATCH 0050/1945] fix: remove whitespaces from doctest names --- crates/ide/src/runnables.rs | 77 ++++++++++++++++++++++++++++++++++--- 1 file changed, 71 insertions(+), 6 deletions(-) diff --git a/crates/ide/src/runnables.rs b/crates/ide/src/runnables.rs index bec770ed99..3155f97f25 100644 --- a/crates/ide/src/runnables.rs +++ b/crates/ide/src/runnables.rs @@ -373,11 +373,13 @@ pub(crate) fn runnable_impl( let adt_name = ty.as_adt()?.name(sema.db); let mut ty_args = ty.type_arguments().peekable(); let params = if ty_args.peek().is_some() { - format!("<{}>", ty_args.format_with(", ", |ty, cb| cb(&ty.display(sema.db)))) + format!("<{}>", ty_args.format_with(",", |ty, cb| cb(&ty.display(sema.db)))) } else { String::new() }; - let test_id = TestId::Path(format!("{}{}", adt_name, params)); + let mut test_id = format!("{}{}", adt_name, params); + test_id.retain(|c| c != ' '); + let test_id = TestId::Path(test_id); Some(Runnable { use_name_in_title: false, nav, kind: RunnableKind::DocTest { test_id }, cfg }) } @@ -441,10 +443,11 @@ fn module_def_doctest(db: &RootDatabase, def: Definition) -> Option { format_to!( path, "<{}>", - ty_args.format_with(", ", |ty, cb| cb(&ty.display(db))) + ty_args.format_with(",", |ty, cb| cb(&ty.display(db))) ); } format_to!(path, "::{}", def_name); + path.retain(|c| c != ' '); return Some(path); } } @@ -2067,13 +2070,23 @@ mod tests { $0 struct Foo; +/// ``` +/// ``` impl Foo { /// ```rust /// ```` fn t() {} } + +/// ``` +/// ``` +impl Foo, ()> { + /// ``` + /// ``` + fn t() {} +} "#, - &[DocTest], + &[DocTest, DocTest, DocTest, DocTest], expect![[r#" [ Runnable { @@ -2082,12 +2095,64 @@ impl Foo { file_id: FileId( 0, ), - full_range: 47..85, + full_range: 20..103, + focus_range: 47..56, + name: "impl", + kind: Impl, + }, + kind: DocTest { + test_id: Path( + "Foo", + ), + }, + cfg: None, + }, + Runnable { + use_name_in_title: false, + nav: NavigationTarget { + file_id: FileId( + 0, + ), + full_range: 63..101, name: "t", }, kind: DocTest { test_id: Path( - "Foo::t", + "Foo::t", + ), + }, + cfg: None, + }, + Runnable { + use_name_in_title: false, + nav: NavigationTarget { + file_id: FileId( + 0, + ), + full_range: 105..188, + focus_range: 126..146, + name: "impl", + kind: Impl, + }, + kind: DocTest { + test_id: Path( + "Foo,()>", + ), + }, + cfg: None, + }, + Runnable { + use_name_in_title: false, + nav: NavigationTarget { + file_id: FileId( + 0, + ), + full_range: 153..186, + name: "t", + }, + kind: DocTest { + test_id: Path( + "Foo,()>::t", ), }, cfg: None, From bd7dfac5ebbd8ba3bd63384000759fb4bb49b329 Mon Sep 17 00:00:00 2001 From: Jonas Schievink Date: Mon, 1 Aug 2022 13:47:09 +0200 Subject: [PATCH 0051/1945] Fix r-a spelling in some places --- .github/ISSUE_TEMPLATE/bug_report.md | 2 +- README.md | 2 +- crates/hir-def/src/item_tree.rs | 2 +- crates/ide-db/src/apply_change.rs | 2 +- crates/ide-ssr/src/lib.rs | 2 +- crates/ide/src/expand_macro.rs | 4 +- crates/ide/src/inlay_hints.rs | 2 +- crates/ide/src/join_lines.rs | 2 +- crates/ide/src/matching_brace.rs | 2 +- crates/ide/src/move_item.rs | 4 +- crates/ide/src/parent_module.rs | 2 +- crates/ide/src/runnables.rs | 4 +- crates/ide/src/shuffle_crate_graph.rs | 2 +- crates/ide/src/status.rs | 2 +- crates/ide/src/syntax_tree.rs | 2 +- crates/ide/src/view_crate_graph.rs | 2 +- crates/ide/src/view_hir.rs | 2 +- crates/ide/src/view_item_tree.rs | 2 +- crates/proc-macro-api/src/lib.rs | 2 +- crates/proc-macro-srv/src/abis/mod.rs | 2 +- crates/project-model/src/cargo_workspace.rs | 2 +- .../src/integrated_benchmarks.rs | 4 +- crates/syntax/src/lib.rs | 2 +- docs/dev/README.md | 8 +-- docs/dev/architecture.md | 2 +- docs/dev/guide.md | 2 +- docs/user/manual.adoc | 4 +- editors/code/package.json | 58 +++++++++---------- 28 files changed, 64 insertions(+), 64 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index 7ba06356a3..1f246ed79b 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -19,7 +19,7 @@ Before submitting, please make sure that you're not running into one of these kn Otherwise please try to provide information which will help us to fix the issue faster. Minimal reproducible examples with few dependencies are especially lovely <3. --> -**rust-analyzer version**: (eg. output of "Rust Analyzer: Show RA Version" command) +**rust-analyzer version**: (eg. output of "rust-analyzer: Show RA Version" command) **rustc version**: (eg. output of `rustc -V`) diff --git a/README.md b/README.md index 8bb0517ed5..8c3f6f8468 100644 --- a/README.md +++ b/README.md @@ -43,7 +43,7 @@ https://rust-lang.zulipchat.com/#narrow/stream/185405-t-compiler.2Frust-analyzer ## License -Rust analyzer is primarily distributed under the terms of both the MIT +rust-analyzer is primarily distributed under the terms of both the MIT license and the Apache License (Version 2.0). See LICENSE-APACHE and LICENSE-MIT for details. diff --git a/crates/hir-def/src/item_tree.rs b/crates/hir-def/src/item_tree.rs index 375587ee93..3342d4db4a 100644 --- a/crates/hir-def/src/item_tree.rs +++ b/crates/hir-def/src/item_tree.rs @@ -14,7 +14,7 @@ //! unaffected, so we don't have to recompute name resolution results or item data (see `data.rs`). //! //! The `ItemTree` for the currently open file can be displayed by using the VS Code command -//! "Rust Analyzer: Debug ItemTree". +//! "rust-analyzer: Debug ItemTree". //! //! Compared to rustc's architecture, `ItemTree` has properties from both rustc's AST and HIR: many //! syntax-level Rust features are already desugared to simpler forms in the `ItemTree`, but name diff --git a/crates/ide-db/src/apply_change.rs b/crates/ide-db/src/apply_change.rs index 98b0e9c947..f8134c552f 100644 --- a/crates/ide-db/src/apply_change.rs +++ b/crates/ide-db/src/apply_change.rs @@ -45,7 +45,7 @@ impl RootDatabase { // |=== // | Editor | Action Name // - // | VS Code | **Rust Analyzer: Memory Usage (Clears Database)** + // | VS Code | **rust-analyzer: Memory Usage (Clears Database)** // |=== // image::https://user-images.githubusercontent.com/48062697/113065592-08559f00-91b1-11eb-8c96-64b88068ec02.gif[] pub fn per_query_memory_usage(&mut self) -> Vec<(String, Bytes)> { diff --git a/crates/ide-ssr/src/lib.rs b/crates/ide-ssr/src/lib.rs index a5e24daa9f..739e0ccb43 100644 --- a/crates/ide-ssr/src/lib.rs +++ b/crates/ide-ssr/src/lib.rs @@ -57,7 +57,7 @@ // |=== // | Editor | Action Name // -// | VS Code | **Rust Analyzer: Structural Search Replace** +// | VS Code | **rust-analyzer: Structural Search Replace** // |=== // // Also available as an assist, by writing a comment containing the structural diff --git a/crates/ide/src/expand_macro.rs b/crates/ide/src/expand_macro.rs index efa8551a00..93252339cd 100644 --- a/crates/ide/src/expand_macro.rs +++ b/crates/ide/src/expand_macro.rs @@ -19,7 +19,7 @@ pub struct ExpandedMacro { // |=== // | Editor | Action Name // -// | VS Code | **Rust Analyzer: Expand macro recursively** +// | VS Code | **rust-analyzer: Expand macro recursively** // |=== // // image::https://user-images.githubusercontent.com/48062697/113020648-b3973180-917a-11eb-84a9-ecb921293dc5.gif[] @@ -32,7 +32,7 @@ pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option< _ => 0, })?; - // due to how Rust Analyzer works internally, we need to special case derive attributes, + // due to how rust-analyzer works internally, we need to special case derive attributes, // otherwise they might not get found, e.g. here with the cursor at $0 `#[attr]` would expand: // ``` // #[attr] diff --git a/crates/ide/src/inlay_hints.rs b/crates/ide/src/inlay_hints.rs index 5aae669aa4..ed19784d1f 100644 --- a/crates/ide/src/inlay_hints.rs +++ b/crates/ide/src/inlay_hints.rs @@ -100,7 +100,7 @@ pub enum InlayTooltip { // |=== // | Editor | Action Name // -// | VS Code | **Rust Analyzer: Toggle inlay hints* +// | VS Code | **rust-analyzer: Toggle inlay hints* // |=== // // image::https://user-images.githubusercontent.com/48062697/113020660-b5f98b80-917a-11eb-8d70-3be3fd558cdd.png[] diff --git a/crates/ide/src/join_lines.rs b/crates/ide/src/join_lines.rs index 08621addee..edc48e84d7 100644 --- a/crates/ide/src/join_lines.rs +++ b/crates/ide/src/join_lines.rs @@ -28,7 +28,7 @@ pub struct JoinLinesConfig { // |=== // | Editor | Action Name // -// | VS Code | **Rust Analyzer: Join lines** +// | VS Code | **rust-analyzer: Join lines** // |=== // // image::https://user-images.githubusercontent.com/48062697/113020661-b6922200-917a-11eb-87c4-b75acc028f11.gif[] diff --git a/crates/ide/src/matching_brace.rs b/crates/ide/src/matching_brace.rs index da70cecdd8..6e8a6d020c 100644 --- a/crates/ide/src/matching_brace.rs +++ b/crates/ide/src/matching_brace.rs @@ -12,7 +12,7 @@ use syntax::{ // |=== // | Editor | Action Name // -// | VS Code | **Rust Analyzer: Find matching brace** +// | VS Code | **rust-analyzer: Find matching brace** // |=== // // image::https://user-images.githubusercontent.com/48062697/113065573-04298180-91b1-11eb-8dec-d4e2a202f304.gif[] diff --git a/crates/ide/src/move_item.rs b/crates/ide/src/move_item.rs index 02e9fb8b5e..ffc4bdd7da 100644 --- a/crates/ide/src/move_item.rs +++ b/crates/ide/src/move_item.rs @@ -19,8 +19,8 @@ pub enum Direction { // |=== // | Editor | Action Name // -// | VS Code | **Rust Analyzer: Move item up** -// | VS Code | **Rust Analyzer: Move item down** +// | VS Code | **rust-analyzer: Move item up** +// | VS Code | **rust-analyzer: Move item down** // |=== // // image::https://user-images.githubusercontent.com/48062697/113065576-04298180-91b1-11eb-91ce-4505e99ed598.gif[] diff --git a/crates/ide/src/parent_module.rs b/crates/ide/src/parent_module.rs index 9b1f480446..8f3cc86873 100644 --- a/crates/ide/src/parent_module.rs +++ b/crates/ide/src/parent_module.rs @@ -18,7 +18,7 @@ use crate::NavigationTarget; // |=== // | Editor | Action Name // -// | VS Code | **Rust Analyzer: Locate parent module** +// | VS Code | **rust-analyzer: Locate parent module** // |=== // // image::https://user-images.githubusercontent.com/48062697/113065580-04c21800-91b1-11eb-9a32-00086161c0bd.gif[] diff --git a/crates/ide/src/runnables.rs b/crates/ide/src/runnables.rs index 3155f97f25..b0853b10fd 100644 --- a/crates/ide/src/runnables.rs +++ b/crates/ide/src/runnables.rs @@ -116,7 +116,7 @@ impl Runnable { // |=== // | Editor | Action Name // -// | VS Code | **Rust Analyzer: Run** +// | VS Code | **rust-analyzer: Run** // |=== // image::https://user-images.githubusercontent.com/48062697/113065583-055aae80-91b1-11eb-958f-d67efcaf6a2f.gif[] pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec { @@ -202,7 +202,7 @@ pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec { // |=== // | Editor | Action Name // -// | VS Code | **Rust Analyzer: Peek related tests** +// | VS Code | **rust-analyzer: Peek related tests** // |=== pub(crate) fn related_tests( db: &RootDatabase, diff --git a/crates/ide/src/shuffle_crate_graph.rs b/crates/ide/src/shuffle_crate_graph.rs index 15cb89dcce..2d86627643 100644 --- a/crates/ide/src/shuffle_crate_graph.rs +++ b/crates/ide/src/shuffle_crate_graph.rs @@ -12,7 +12,7 @@ use ide_db::{ // |=== // | Editor | Action Name // -// | VS Code | **Rust Analyzer: Shuffle Crate Graph** +// | VS Code | **rust-analyzer: Shuffle Crate Graph** // |=== pub(crate) fn shuffle_crate_graph(db: &mut RootDatabase) { let crate_graph = db.crate_graph(); diff --git a/crates/ide/src/status.rs b/crates/ide/src/status.rs index 3191870eb5..32e39f82a0 100644 --- a/crates/ide/src/status.rs +++ b/crates/ide/src/status.rs @@ -29,7 +29,7 @@ fn macro_syntax_tree_stats(db: &RootDatabase) -> SyntaxTreeStats { // |=== // | Editor | Action Name // -// | VS Code | **Rust Analyzer: Status** +// | VS Code | **rust-analyzer: Status** // |=== // image::https://user-images.githubusercontent.com/48062697/113065584-05f34500-91b1-11eb-98cc-5c196f76be7f.gif[] pub(crate) fn status(db: &RootDatabase, file_id: Option) -> String { diff --git a/crates/ide/src/syntax_tree.rs b/crates/ide/src/syntax_tree.rs index 9003e7cd34..4256fea0f8 100644 --- a/crates/ide/src/syntax_tree.rs +++ b/crates/ide/src/syntax_tree.rs @@ -12,7 +12,7 @@ use syntax::{ // |=== // | Editor | Action Name // -// | VS Code | **Rust Analyzer: Show Syntax Tree** +// | VS Code | **rust-analyzer: Show Syntax Tree** // |=== // image::https://user-images.githubusercontent.com/48062697/113065586-068bdb80-91b1-11eb-9507-fee67f9f45a0.gif[] pub(crate) fn syntax_tree( diff --git a/crates/ide/src/view_crate_graph.rs b/crates/ide/src/view_crate_graph.rs index 51291a6453..bf7b7efe28 100644 --- a/crates/ide/src/view_crate_graph.rs +++ b/crates/ide/src/view_crate_graph.rs @@ -16,7 +16,7 @@ use ide_db::{ // |=== // | Editor | Action Name // -// | VS Code | **Rust Analyzer: View Crate Graph** +// | VS Code | **rust-analyzer: View Crate Graph** // |=== pub(crate) fn view_crate_graph(db: &RootDatabase, full: bool) -> Result { let crate_graph = db.crate_graph(); diff --git a/crates/ide/src/view_hir.rs b/crates/ide/src/view_hir.rs index 7312afe531..bf0835ed7e 100644 --- a/crates/ide/src/view_hir.rs +++ b/crates/ide/src/view_hir.rs @@ -8,7 +8,7 @@ use syntax::{algo::find_node_at_offset, ast, AstNode}; // |=== // | Editor | Action Name // -// | VS Code | **Rust Analyzer: View Hir** +// | VS Code | **rust-analyzer: View Hir** // |=== // image::https://user-images.githubusercontent.com/48062697/113065588-068bdb80-91b1-11eb-9a78-0b4ef1e972fb.gif[] pub(crate) fn view_hir(db: &RootDatabase, position: FilePosition) -> String { diff --git a/crates/ide/src/view_item_tree.rs b/crates/ide/src/view_item_tree.rs index 3dc03085d6..9c1f93356e 100644 --- a/crates/ide/src/view_item_tree.rs +++ b/crates/ide/src/view_item_tree.rs @@ -9,7 +9,7 @@ use ide_db::RootDatabase; // |=== // | Editor | Action Name // -// | VS Code | **Rust Analyzer: Debug ItemTree** +// | VS Code | **rust-analyzer: Debug ItemTree** // |=== pub(crate) fn view_item_tree(db: &RootDatabase, file_id: FileId) -> String { db.file_item_tree(file_id.into()).pretty_print() diff --git a/crates/proc-macro-api/src/lib.rs b/crates/proc-macro-api/src/lib.rs index d7010e825a..a3ea05f4af 100644 --- a/crates/proc-macro-api/src/lib.rs +++ b/crates/proc-macro-api/src/lib.rs @@ -60,7 +60,7 @@ impl MacroDylib { let info = version::read_dylib_info(&path)?; if info.version.0 < 1 || info.version.1 < 47 { - let msg = format!("proc-macro {} built by {:#?} is not supported by Rust Analyzer, please update your rust version.", path.display(), info); + let msg = format!("proc-macro {} built by {:#?} is not supported by rust-analyzer, please update your Rust version.", path.display(), info); return Err(io::Error::new(io::ErrorKind::InvalidData, msg)); } diff --git a/crates/proc-macro-srv/src/abis/mod.rs b/crates/proc-macro-srv/src/abis/mod.rs index bcf3f1184c..705d09ea94 100644 --- a/crates/proc-macro-srv/src/abis/mod.rs +++ b/crates/proc-macro-srv/src/abis/mod.rs @@ -5,7 +5,7 @@ //! compiler into submodules of this module (e.g proc_macro_srv::abis::abi_1_47). //! //! All of these ABIs are subsumed in the `Abi` enum, which exposes a simple -//! interface the rest of rust analyzer can use to talk to the macro +//! interface the rest of rust-analyzer can use to talk to the macro //! provider. //! //! # Adding a new ABI diff --git a/crates/project-model/src/cargo_workspace.rs b/crates/project-model/src/cargo_workspace.rs index 597880c2ca..eed955b42d 100644 --- a/crates/project-model/src/cargo_workspace.rs +++ b/crates/project-model/src/cargo_workspace.rs @@ -19,7 +19,7 @@ use crate::{utf8_stdout, ManifestPath}; /// [`CargoWorkspace`] represents the logical structure of, well, a Cargo /// workspace. It pretty closely mirrors `cargo metadata` output. /// -/// Note that internally, rust analyzer uses a different structure: +/// Note that internally, rust-analyzer uses a different structure: /// `CrateGraph`. `CrateGraph` is lower-level: it knows only about the crates, /// while this knows about `Packages` & `Targets`: purely cargo-related /// concepts. diff --git a/crates/rust-analyzer/src/integrated_benchmarks.rs b/crates/rust-analyzer/src/integrated_benchmarks.rs index 47cdd8dfc7..e49a98685a 100644 --- a/crates/rust-analyzer/src/integrated_benchmarks.rs +++ b/crates/rust-analyzer/src/integrated_benchmarks.rs @@ -6,8 +6,8 @@ //! code here exercise this specific completion, and thus have a fast //! edit/compile/test cycle. //! -//! Note that "Rust Analyzer: Run" action does not allow running a single test -//! in release mode in VS Code. There's however "Rust Analyzer: Copy Run Command Line" +//! Note that "rust-analyzer: Run" action does not allow running a single test +//! in release mode in VS Code. There's however "rust-analyzer: Copy Run Command Line" //! which you can use to paste the command in terminal and add `--release` manually. use std::sync::Arc; diff --git a/crates/syntax/src/lib.rs b/crates/syntax/src/lib.rs index 7fa354c0c4..4f5e273a52 100644 --- a/crates/syntax/src/lib.rs +++ b/crates/syntax/src/lib.rs @@ -1,4 +1,4 @@ -//! Syntax Tree library used throughout the rust analyzer. +//! Syntax Tree library used throughout the rust-analyzer. //! //! Properties: //! - easy and fast incremental re-parsing diff --git a/docs/dev/README.md b/docs/dev/README.md index 76bbd1e918..c7f152acc2 100644 --- a/docs/dev/README.md +++ b/docs/dev/README.md @@ -82,7 +82,7 @@ There's **"Run Extension (Debug Build)"** launch configuration for this in VS Co In general, I use one of the following workflows for fixing bugs and implementing features: If the problem concerns only internal parts of rust-analyzer (i.e. I don't need to touch the `rust-analyzer` crate or TypeScript code), there is a unit-test for it. -So, I use **Rust Analyzer: Run** action in VS Code to run this single test, and then just do printf-driven development/debugging. +So, I use **rust-analyzer: Run** action in VS Code to run this single test, and then just do printf-driven development/debugging. As a sanity check after I'm done, I use `cargo xtask install --server` and **Reload Window** action in VS Code to verify that the thing works as I expect. If the problem concerns only the VS Code extension, I use **Run Installed Extension** launch configuration from `launch.json`. @@ -152,11 +152,11 @@ To log all communication between the server and the client, there are two choice There are also several VS Code commands which might be of interest: -* `Rust Analyzer: Status` shows some memory-usage statistics. +* `rust-analyzer: Status` shows some memory-usage statistics. -* `Rust Analyzer: Syntax Tree` shows syntax tree of the current file/selection. +* `rust-analyzer: Syntax Tree` shows syntax tree of the current file/selection. -* `Rust Analyzer: View Hir` shows the HIR expressions within the function containing the cursor. +* `rust-analyzer: View Hir` shows the HIR expressions within the function containing the cursor. You can hover over syntax nodes in the opened text file to see the appropriate rust code that it refers to and the rust editor will also highlight the proper diff --git a/docs/dev/architecture.md b/docs/dev/architecture.md index ea4035baf1..51e26c58a9 100644 --- a/docs/dev/architecture.md +++ b/docs/dev/architecture.md @@ -371,7 +371,7 @@ That is, rust-analyzer requires unwinding. ### Testing -Rust Analyzer has three interesting [system boundaries](https://www.tedinski.com/2018/04/10/making-tests-a-positive-influence-on-design.html) to concentrate tests on. +rust-analyzer has three interesting [system boundaries](https://www.tedinski.com/2018/04/10/making-tests-a-positive-influence-on-design.html) to concentrate tests on. The outermost boundary is the `rust-analyzer` crate, which defines an LSP interface in terms of stdio. We do integration testing of this component, by feeding it with a stream of LSP requests and checking responses. diff --git a/docs/dev/guide.md b/docs/dev/guide.md index 47ae3f3e6a..808eb5d10b 100644 --- a/docs/dev/guide.md +++ b/docs/dev/guide.md @@ -63,7 +63,7 @@ Next, let's talk about what the inputs to the `Analysis` are, precisely. ## Inputs -Rust Analyzer never does any I/O itself, all inputs get passed explicitly via +rust-analyzer never does any I/O itself, all inputs get passed explicitly via the `AnalysisHost::apply_change` method, which accepts a single argument, a `Change`. [`Change`] is a builder for a single change "transaction", so it suffices to study its methods to understand all of the diff --git a/docs/user/manual.adoc b/docs/user/manual.adoc index 999a6437ab..c482fcbed0 100644 --- a/docs/user/manual.adoc +++ b/docs/user/manual.adoc @@ -479,7 +479,7 @@ You can follow instructions for installing < Date: Mon, 1 Aug 2022 14:26:20 +0200 Subject: [PATCH 0052/1945] Publish extension for 32-bit ARM systems --- .github/workflows/release.yaml | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 4e62f2cde2..abc0a34c5e 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -5,6 +5,8 @@ on: workflow_dispatch: + pull_request: + push: branches: - release @@ -36,6 +38,9 @@ jobs: - os: ubuntu-18.04 target: aarch64-unknown-linux-gnu code-target: linux-arm64 + - os: ubuntu-18.04 + target: arm-unknown-linux-gnueabihf + code-target: linux-armhf - os: macos-11 target: x86_64-apple-darwin code-target: darwin-x64 @@ -67,13 +72,17 @@ jobs: node-version: 14.x - name: Update apt repositories - if: matrix.target == 'aarch64-unknown-linux-gnu' + if: matrix.target == 'aarch64-unknown-linux-gnu' || matrix.target == 'gcc-arm-linux-gnueabihf' run: sudo apt-get update - - name: Install target toolchain + - name: Install AArch64 target toolchain if: matrix.target == 'aarch64-unknown-linux-gnu' run: sudo apt-get install gcc-aarch64-linux-gnu + - name: Install ARM target toolchain + if: matrix.target == 'gcc-arm-linux-gnueabihf' + run: sudo apt-get install gcc-arm-linux-gnueabihf + - name: Dist run: cargo xtask dist --client-patch-version ${{ github.run_number }} From caf8a6454a0ee4a045537cfdbe5ab859b3dc6b6d Mon Sep 17 00:00:00 2001 From: Jonas Schievink Date: Mon, 1 Aug 2022 15:09:45 +0200 Subject: [PATCH 0053/1945] Set linker --- .github/workflows/release.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index abc0a34c5e..56ee876cf1 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -20,6 +20,7 @@ env: FETCH_DEPTH: 0 # pull in the tags for the version string MACOSX_DEPLOYMENT_TARGET: 10.15 CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER: aarch64-linux-gnu-gcc + CARGO_TARGET_ARM_UNKNOWN_LINUX_GNUEABIHD_LINKER: arm-linux-gnueabihf-gcc jobs: dist: From 64090ee27cc630778fabd25a4d2ffd70329d62db Mon Sep 17 00:00:00 2001 From: Jonas Schievink Date: Mon, 1 Aug 2022 15:22:02 +0200 Subject: [PATCH 0054/1945] Fix target check --- .github/workflows/release.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 56ee876cf1..4845168282 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -73,7 +73,7 @@ jobs: node-version: 14.x - name: Update apt repositories - if: matrix.target == 'aarch64-unknown-linux-gnu' || matrix.target == 'gcc-arm-linux-gnueabihf' + if: matrix.target == 'aarch64-unknown-linux-gnu' || matrix.target == 'arm-unknown-linux-gnueabihf' run: sudo apt-get update - name: Install AArch64 target toolchain @@ -81,7 +81,7 @@ jobs: run: sudo apt-get install gcc-aarch64-linux-gnu - name: Install ARM target toolchain - if: matrix.target == 'gcc-arm-linux-gnueabihf' + if: matrix.target == 'arm-unknown-linux-gnueabihf' run: sudo apt-get install gcc-arm-linux-gnueabihf - name: Dist From b72ff95901bcd65931b600f36e3a79b92b6cab3f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Mon, 1 Aug 2022 16:46:18 +0300 Subject: [PATCH 0055/1945] Fix linker env var name --- .github/workflows/release.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 4845168282..7ffe2748b7 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -20,7 +20,7 @@ env: FETCH_DEPTH: 0 # pull in the tags for the version string MACOSX_DEPLOYMENT_TARGET: 10.15 CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER: aarch64-linux-gnu-gcc - CARGO_TARGET_ARM_UNKNOWN_LINUX_GNUEABIHD_LINKER: arm-linux-gnueabihf-gcc + CARGO_TARGET_ARM_UNKNOWN_LINUX_GNUEABIHF_LINKER: arm-linux-gnueabihf-gcc jobs: dist: From cfbada4e0fbdb0fbde515df21be934d12cad63bf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Mon, 1 Aug 2022 16:54:54 +0300 Subject: [PATCH 0056/1945] Upload arm-unknown-linux-gnueabihf build artifact --- .github/workflows/release.yaml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 7ffe2748b7..015614c8b4 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -214,6 +214,10 @@ jobs: with: name: dist-aarch64-unknown-linux-gnu path: dist + - uses: actions/download-artifact@v1 + with: + name: dist-arm-unknown-linux-gnueabihf + path: dist - uses: actions/download-artifact@v1 with: name: dist-x86_64-pc-windows-msvc From c71f1e70a87bfbaab7095a297628ff78acfc14df Mon Sep 17 00:00:00 2001 From: Jonas Schievink Date: Mon, 1 Aug 2022 16:16:22 +0200 Subject: [PATCH 0057/1945] Don't run on PRs --- .github/workflows/release.yaml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 015614c8b4..ca8eb1309d 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -5,8 +5,6 @@ on: workflow_dispatch: - pull_request: - push: branches: - release From 405dd77d30382f417f033e4feba2b2bd02ebe00e Mon Sep 17 00:00:00 2001 From: Dorian Scheidt Date: Wed, 20 Jul 2022 12:03:18 -0500 Subject: [PATCH 0058/1945] Support adding variants via structural editing --- crates/syntax/src/ast/edit_in_place.rs | 171 +++++++++++++++++++++---- crates/syntax/src/ast/make.rs | 5 +- 2 files changed, 148 insertions(+), 28 deletions(-) diff --git a/crates/syntax/src/ast/edit_in_place.rs b/crates/syntax/src/ast/edit_in_place.rs index e3e928aecd..8efd58e2c3 100644 --- a/crates/syntax/src/ast/edit_in_place.rs +++ b/crates/syntax/src/ast/edit_in_place.rs @@ -11,7 +11,7 @@ use crate::{ ted::{self, Position}, AstNode, AstToken, Direction, SyntaxKind::{ATTR, COMMENT, WHITESPACE}, - SyntaxNode, + SyntaxNode, SyntaxToken, }; use super::HasName; @@ -506,19 +506,7 @@ impl ast::RecordExprFieldList { let position = match self.fields().last() { Some(last_field) => { - let comma = match last_field - .syntax() - .siblings_with_tokens(Direction::Next) - .filter_map(|it| it.into_token()) - .find(|it| it.kind() == T![,]) - { - Some(it) => it, - None => { - let comma = ast::make::token(T![,]); - ted::insert(Position::after(last_field.syntax()), &comma); - comma - } - }; + let comma = get_or_insert_comma_after(last_field.syntax()); Position::after(comma) } None => match self.l_curly_token() { @@ -579,19 +567,8 @@ impl ast::RecordPatFieldList { let position = match self.fields().last() { Some(last_field) => { - let comma = match last_field - .syntax() - .siblings_with_tokens(Direction::Next) - .filter_map(|it| it.into_token()) - .find(|it| it.kind() == T![,]) - { - Some(it) => it, - None => { - let comma = ast::make::token(T![,]); - ted::insert(Position::after(last_field.syntax()), &comma); - comma - } - }; + let syntax = last_field.syntax(); + let comma = get_or_insert_comma_after(syntax); Position::after(comma) } None => match self.l_curly_token() { @@ -606,12 +583,53 @@ impl ast::RecordPatFieldList { } } } + +fn get_or_insert_comma_after(syntax: &SyntaxNode) -> SyntaxToken { + let comma = match syntax + .siblings_with_tokens(Direction::Next) + .filter_map(|it| it.into_token()) + .find(|it| it.kind() == T![,]) + { + Some(it) => it, + None => { + let comma = ast::make::token(T![,]); + ted::insert(Position::after(syntax), &comma); + comma + } + }; + comma +} + impl ast::StmtList { pub fn push_front(&self, statement: ast::Stmt) { ted::insert(Position::after(self.l_curly_token().unwrap()), statement.syntax()); } } +impl ast::VariantList { + pub fn add_variant(&self, variant: ast::Variant) { + let (indent, position) = match self.variants().last() { + Some(last_item) => ( + IndentLevel::from_node(last_item.syntax()), + Position::after(get_or_insert_comma_after(last_item.syntax())), + ), + None => match self.l_curly_token() { + Some(l_curly) => { + normalize_ws_between_braces(self.syntax()); + (IndentLevel::from_token(&l_curly) + 1, Position::after(&l_curly)) + } + None => (IndentLevel::single(), Position::last_child_of(self.syntax())), + }, + }; + let elements: Vec> = vec![ + make::tokens::whitespace(&format!("{}{}", "\n", indent)).into(), + variant.syntax().clone().into(), + ast::make::token(T![,]).into(), + ]; + ted::insert_all(position, elements); + } +} + fn normalize_ws_between_braces(node: &SyntaxNode) -> Option<()> { let l = node .children_with_tokens() @@ -661,6 +679,9 @@ impl Indent for N {} mod tests { use std::fmt; + use stdx::trim_indent; + use test_utils::assert_eq_text; + use crate::SourceFile; use super::*; @@ -714,4 +735,100 @@ mod tests { }", ); } + + #[test] + fn add_variant_to_empty_enum() { + let variant = make::variant(make::name("Bar"), None).clone_for_update(); + + check_add_variant( + r#" +enum Foo {} +"#, + r#" +enum Foo { + Bar, +} +"#, + variant, + ); + } + + #[test] + fn add_variant_to_non_empty_enum() { + let variant = make::variant(make::name("Baz"), None).clone_for_update(); + + check_add_variant( + r#" +enum Foo { + Bar, +} +"#, + r#" +enum Foo { + Bar, + Baz, +} +"#, + variant, + ); + } + + #[test] + fn add_variant_with_tuple_field_list() { + let variant = make::variant( + make::name("Baz"), + Some(ast::FieldList::TupleFieldList(make::tuple_field_list(std::iter::once( + make::tuple_field(None, make::ty("bool")), + )))), + ) + .clone_for_update(); + + check_add_variant( + r#" +enum Foo { + Bar, +} +"#, + r#" +enum Foo { + Bar, + Baz(bool), +} +"#, + variant, + ); + } + + #[test] + fn add_variant_with_record_field_list() { + let variant = make::variant( + make::name("Baz"), + Some(ast::FieldList::RecordFieldList(make::record_field_list(std::iter::once( + make::record_field(None, make::name("x"), make::ty("bool")), + )))), + ) + .clone_for_update(); + + check_add_variant( + r#" +enum Foo { + Bar, +} +"#, + r#" +enum Foo { + Bar, + Baz { x: bool }, +} +"#, + variant, + ); + } + + fn check_add_variant(before: &str, expected: &str, variant: ast::Variant) { + let enum_ = ast_mut_from_text::(before); + enum_.variant_list().map(|it| it.add_variant(variant)); + let after = enum_.to_string(); + assert_eq_text!(&trim_indent(expected.trim()), &trim_indent(&after.trim())); + } } diff --git a/crates/syntax/src/ast/make.rs b/crates/syntax/src/ast/make.rs index 5908dda8e6..037de876d4 100644 --- a/crates/syntax/src/ast/make.rs +++ b/crates/syntax/src/ast/make.rs @@ -745,7 +745,10 @@ pub fn tuple_field(visibility: Option, ty: ast::Type) -> ast::T pub fn variant(name: ast::Name, field_list: Option) -> ast::Variant { let field_list = match field_list { None => String::new(), - Some(it) => format!("{}", it), + Some(it) => match it { + ast::FieldList::RecordFieldList(record) => format!(" {}", record), + ast::FieldList::TupleFieldList(tuple) => format!("{}", tuple), + }, }; ast_from_text(&format!("enum f {{ {}{} }}", name, field_list)) } From bea1fec7a2c949d6470f2182d34c5b349605f7a1 Mon Sep 17 00:00:00 2001 From: Dorian Scheidt Date: Wed, 20 Jul 2022 12:11:00 -0500 Subject: [PATCH 0059/1945] convert generate_enum_variant to use add_variant --- .../src/handlers/generate_enum_variant.rs | 20 +++++-------------- 1 file changed, 5 insertions(+), 15 deletions(-) diff --git a/crates/ide-assists/src/handlers/generate_enum_variant.rs b/crates/ide-assists/src/handlers/generate_enum_variant.rs index 4461fbd5ac..04620645e6 100644 --- a/crates/ide-assists/src/handlers/generate_enum_variant.rs +++ b/crates/ide-assists/src/handlers/generate_enum_variant.rs @@ -1,8 +1,8 @@ use hir::{HasSource, InFile}; use ide_db::assists::{AssistId, AssistKind}; use syntax::{ - ast::{self, edit::IndentLevel}, - AstNode, TextSize, + ast::{self, make}, + AstNode, }; use crate::assist_context::{AssistContext, Assists}; @@ -65,26 +65,16 @@ fn add_variant_to_accumulator( ) -> Option<()> { let db = ctx.db(); let InFile { file_id, value: enum_node } = adt.source(db)?.original_ast_node(db)?; - let enum_indent = IndentLevel::from_node(&enum_node.syntax()); - - let variant_list = enum_node.variant_list()?; - let offset = variant_list.syntax().text_range().end() - TextSize::of('}'); - let empty_enum = variant_list.variants().next().is_none(); + let variant = make::variant(make::name(&name_ref.text()), None); acc.add( AssistId("generate_enum_variant", AssistKind::Generate), "Generate variant", target, |builder| { builder.edit_file(file_id.original_file(db)); - let text = format!( - "{maybe_newline}{indent_1}{name},\n{enum_indent}", - maybe_newline = if empty_enum { "\n" } else { "" }, - indent_1 = IndentLevel(1), - name = name_ref, - enum_indent = enum_indent - ); - builder.insert(offset, text) + let node = builder.make_mut(enum_node); + node.variant_list().map(|it| it.add_variant(variant.clone_for_update())); }, ) } From 15d8049fa9a9cd3ecbd19401336c4c150606522c Mon Sep 17 00:00:00 2001 From: Dorian Scheidt Date: Wed, 20 Jul 2022 12:26:27 -0500 Subject: [PATCH 0060/1945] Support tuple fields in generate_enum_variant --- .../src/handlers/generate_enum_variant.rs | 115 +++++++++++++++++- 1 file changed, 111 insertions(+), 4 deletions(-) diff --git a/crates/ide-assists/src/handlers/generate_enum_variant.rs b/crates/ide-assists/src/handlers/generate_enum_variant.rs index 04620645e6..f671877e52 100644 --- a/crates/ide-assists/src/handlers/generate_enum_variant.rs +++ b/crates/ide-assists/src/handlers/generate_enum_variant.rs @@ -1,7 +1,7 @@ -use hir::{HasSource, InFile}; +use hir::{HasSource, HirDisplay, InFile}; use ide_db::assists::{AssistId, AssistKind}; use syntax::{ - ast::{self, make}, + ast::{self, make, HasArgList}, AstNode, }; @@ -50,7 +50,7 @@ pub(crate) fn generate_enum_variant(acc: &mut Assists, ctx: &AssistContext<'_>) ctx.sema.resolve_path(&path.qualifier()?) { let target = path.syntax().text_range(); - return add_variant_to_accumulator(acc, ctx, target, e, &name_ref); + return add_variant_to_accumulator(acc, ctx, target, e, &name_ref, &path); } None @@ -62,11 +62,11 @@ fn add_variant_to_accumulator( target: syntax::TextRange, adt: hir::Enum, name_ref: &ast::NameRef, + path: &ast::Path, ) -> Option<()> { let db = ctx.db(); let InFile { file_id, value: enum_node } = adt.source(db)?.original_ast_node(db)?; - let variant = make::variant(make::name(&name_ref.text()), None); acc.add( AssistId("generate_enum_variant", AssistKind::Generate), "Generate variant", @@ -74,11 +74,53 @@ fn add_variant_to_accumulator( |builder| { builder.edit_file(file_id.original_file(db)); let node = builder.make_mut(enum_node); + let variant = make_variant(ctx, name_ref, &path); node.variant_list().map(|it| it.add_variant(variant.clone_for_update())); }, ) } +fn make_variant( + ctx: &AssistContext<'_>, + name_ref: &ast::NameRef, + path: &ast::Path, +) -> ast::Variant { + let field_list = make_field_list(ctx, path); + make::variant(make::name(&name_ref.text()), field_list) +} + +fn make_field_list(ctx: &AssistContext<'_>, path: &ast::Path) -> Option { + let scope = ctx.sema.scope(&path.syntax())?; + if let Some(call_expr) = + path.syntax().parent().and_then(|it| it.parent()).and_then(ast::CallExpr::cast) + { + make_tuple_field_list(call_expr, ctx, &scope) + } else { + None + } +} + +fn make_tuple_field_list( + call_expr: ast::CallExpr, + ctx: &AssistContext<'_>, + scope: &hir::SemanticsScope<'_>, +) -> Option { + let args = call_expr.arg_list()?.args(); + let tuple_fields = args.map(|arg| { + let ty = expr_ty(ctx, arg, &scope); + make::tuple_field(None, ty) + }); + Some(make::tuple_field_list(tuple_fields).into()) +} + +fn expr_ty(ctx: &AssistContext<'_>, arg: ast::Expr, scope: &hir::SemanticsScope<'_>) -> ast::Type { + let ty = ctx.sema.type_of_expr(&arg).map(|it| it.adjusted()); + let text = ty + .and_then(|it| it.display_source_code(ctx.db(), scope.module().into()).ok()) + .unwrap_or_else(|| "_".to_string()); + make::ty(&text) +} + #[cfg(test)] mod tests { use crate::tests::{check_assist, check_assist_not_applicable}; @@ -211,6 +253,71 @@ mod m { fn main() { m::Foo::Baz } +", + ) + } + + #[test] + fn associated_single_element_tuple() { + check_assist( + generate_enum_variant, + r" +enum Foo {} +fn main() { + Foo::Bar$0(true) +} +", + r" +enum Foo { + Bar(bool), +} +fn main() { + Foo::Bar(true) +} +", + ) + } + + #[test] + fn associated_single_element_tuple_unknown_type() { + check_assist( + generate_enum_variant, + r" +enum Foo {} +fn main() { + Foo::Bar$0(x) +} +", + r" +enum Foo { + Bar(_), +} +fn main() { + Foo::Bar(x) +} +", + ) + } + + #[test] + fn associated_multi_element_tuple() { + check_assist( + generate_enum_variant, + r" +struct Struct {} +enum Foo {} +fn main() { + Foo::Bar$0(true, x, Struct {}) +} +", + r" +struct Struct {} +enum Foo { + Bar(bool, _, Struct), +} +fn main() { + Foo::Bar(true, x, Struct {}) +} ", ) } From e4638def7aedd8d69ec2670abf6a08b876ad75d9 Mon Sep 17 00:00:00 2001 From: Dorian Scheidt Date: Wed, 20 Jul 2022 13:02:21 -0500 Subject: [PATCH 0061/1945] Support record fields in generate_enum_variant --- .../src/handlers/generate_enum_variant.rs | 165 +++++++++++++++++- 1 file changed, 156 insertions(+), 9 deletions(-) diff --git a/crates/ide-assists/src/handlers/generate_enum_variant.rs b/crates/ide-assists/src/handlers/generate_enum_variant.rs index f671877e52..afbfc74b72 100644 --- a/crates/ide-assists/src/handlers/generate_enum_variant.rs +++ b/crates/ide-assists/src/handlers/generate_enum_variant.rs @@ -32,8 +32,7 @@ use crate::assist_context::{AssistContext, Assists}; // } // ``` pub(crate) fn generate_enum_variant(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { - let path_expr: ast::PathExpr = ctx.find_node_at_offset()?; - let path = path_expr.path()?; + let path: ast::Path = ctx.find_node_at_offset()?; if ctx.sema.resolve_path(&path).is_some() { // No need to generate anything if the path resolves @@ -95,11 +94,48 @@ fn make_field_list(ctx: &AssistContext<'_>, path: &ast::Path) -> Option, + scope: &hir::SemanticsScope<'_>, +) -> Option { + let fields = record.record_expr_field_list()?.fields(); + let record_fields = fields.map(|field| { + let name = name_from_field(&field); + + let ty = field + .expr() + .and_then(|it| expr_ty(ctx, it, scope)) + .unwrap_or_else(make::ty_placeholder); + + make::record_field(None, name, ty) + }); + Some(make::record_field_list(record_fields).into()) +} + +fn name_from_field(field: &ast::RecordExprField) -> ast::Name { + let text = match field.name_ref() { + Some(it) => it.to_string(), + None => name_from_field_shorthand(field).unwrap_or("unknown".to_string()), + }; + make::name(&text) +} + +fn name_from_field_shorthand(field: &ast::RecordExprField) -> Option { + let path = match field.expr()? { + ast::Expr::PathExpr(path_expr) => path_expr.path(), + _ => None, + }?; + Some(path.as_single_name_ref()?.to_string()) +} + fn make_tuple_field_list( call_expr: ast::CallExpr, ctx: &AssistContext<'_>, @@ -107,18 +143,20 @@ fn make_tuple_field_list( ) -> Option { let args = call_expr.arg_list()?.args(); let tuple_fields = args.map(|arg| { - let ty = expr_ty(ctx, arg, &scope); + let ty = expr_ty(ctx, arg, &scope).unwrap_or_else(make::ty_placeholder); make::tuple_field(None, ty) }); Some(make::tuple_field_list(tuple_fields).into()) } -fn expr_ty(ctx: &AssistContext<'_>, arg: ast::Expr, scope: &hir::SemanticsScope<'_>) -> ast::Type { - let ty = ctx.sema.type_of_expr(&arg).map(|it| it.adjusted()); - let text = ty - .and_then(|it| it.display_source_code(ctx.db(), scope.module().into()).ok()) - .unwrap_or_else(|| "_".to_string()); - make::ty(&text) +fn expr_ty( + ctx: &AssistContext<'_>, + arg: ast::Expr, + scope: &hir::SemanticsScope<'_>, +) -> Option { + let ty = ctx.sema.type_of_expr(&arg).map(|it| it.adjusted())?; + let text = ty.display_source_code(ctx.db(), scope.module().into()).ok()?; + Some(make::ty(&text)) } #[cfg(test)] @@ -318,6 +356,115 @@ enum Foo { fn main() { Foo::Bar(true, x, Struct {}) } +", + ) + } + + #[test] + fn associated_record() { + check_assist( + generate_enum_variant, + r" +enum Foo {} +fn main() { + Foo::$0Bar { x: true } +} +", + r" +enum Foo { + Bar { x: bool }, +} +fn main() { + Foo::Bar { x: true } +} +", + ) + } + + #[test] + fn associated_record_unknown_type() { + check_assist( + generate_enum_variant, + r" +enum Foo {} +fn main() { + Foo::$0Bar { x: y } +} +", + r" +enum Foo { + Bar { x: _ }, +} +fn main() { + Foo::Bar { x: y } +} +", + ) + } + + #[test] + fn associated_record_field_shorthand() { + check_assist( + generate_enum_variant, + r" +enum Foo {} +fn main() { + let x = true; + Foo::$0Bar { x } +} +", + r" +enum Foo { + Bar { x: bool }, +} +fn main() { + let x = true; + Foo::Bar { x } +} +", + ) + } + + #[test] + fn associated_record_field_shorthand_unknown_type() { + check_assist( + generate_enum_variant, + r" +enum Foo {} +fn main() { + Foo::$0Bar { x } +} +", + r" +enum Foo { + Bar { x: _ }, +} +fn main() { + Foo::Bar { x } +} +", + ) + } + + #[test] + fn associated_record_field_multiple_fields() { + check_assist( + generate_enum_variant, + r" +struct Struct {} +enum Foo {} +fn main() { + Foo::$0Bar { x, y: x, s: Struct {} } +} +", + r" +struct Struct {} +enum Foo { + Bar { x: _, y: _, s: Struct }, +} +fn main() { + Foo::Bar { x, y: x, s: Struct {} } +} ", ) } From 111694d85bc64e3604599668c5b2db574e07abf0 Mon Sep 17 00:00:00 2001 From: Dorian Scheidt Date: Tue, 2 Aug 2022 11:54:00 -0400 Subject: [PATCH 0062/1945] Be more strict about supported Paths in generate_enum_variant PathType path parents don't support this assist --- .../src/handlers/generate_enum_variant.rs | 105 ++++++++++++++---- 1 file changed, 85 insertions(+), 20 deletions(-) diff --git a/crates/ide-assists/src/handlers/generate_enum_variant.rs b/crates/ide-assists/src/handlers/generate_enum_variant.rs index afbfc74b72..b3dd29b771 100644 --- a/crates/ide-assists/src/handlers/generate_enum_variant.rs +++ b/crates/ide-assists/src/handlers/generate_enum_variant.rs @@ -2,7 +2,7 @@ use hir::{HasSource, HirDisplay, InFile}; use ide_db::assists::{AssistId, AssistKind}; use syntax::{ ast::{self, make, HasArgList}, - AstNode, + match_ast, AstNode, SyntaxNode, }; use crate::assist_context::{AssistContext, Assists}; @@ -33,6 +33,7 @@ use crate::assist_context::{AssistContext, Assists}; // ``` pub(crate) fn generate_enum_variant(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { let path: ast::Path = ctx.find_node_at_offset()?; + let parent = path_parent(&path)?; if ctx.sema.resolve_path(&path).is_some() { // No need to generate anything if the path resolves @@ -49,19 +50,65 @@ pub(crate) fn generate_enum_variant(acc: &mut Assists, ctx: &AssistContext<'_>) ctx.sema.resolve_path(&path.qualifier()?) { let target = path.syntax().text_range(); - return add_variant_to_accumulator(acc, ctx, target, e, &name_ref, &path); + return add_variant_to_accumulator(acc, ctx, target, e, &name_ref, parent); } None } +#[derive(Debug)] +enum PathParent { + PathExpr(ast::PathExpr), + RecordExpr(ast::RecordExpr), + UseTree(ast::UseTree), +} + +impl PathParent { + fn syntax(&self) -> &SyntaxNode { + match self { + PathParent::PathExpr(it) => it.syntax(), + PathParent::RecordExpr(it) => it.syntax(), + PathParent::UseTree(it) => it.syntax(), + } + } + + fn make_field_list(&self, ctx: &AssistContext<'_>) -> Option { + let scope = ctx.sema.scope(self.syntax())?; + + match self { + PathParent::PathExpr(it) => { + if let Some(call_expr) = it.syntax().parent().and_then(ast::CallExpr::cast) { + make_tuple_field_list(call_expr, ctx, &scope) + } else { + None + } + } + PathParent::RecordExpr(it) => make_record_field_list(it, ctx, &scope), + PathParent::UseTree(_) => None, + } + } +} + +fn path_parent(path: &ast::Path) -> Option { + let parent = path.syntax().parent()?; + + match_ast! { + match parent { + ast::PathExpr(it) => Some(PathParent::PathExpr(it)), + ast::RecordExpr(it) => Some(PathParent::RecordExpr(it)), + ast::UseTree(it) => Some(PathParent::UseTree(it)), + _ => None + } + } +} + fn add_variant_to_accumulator( acc: &mut Assists, ctx: &AssistContext<'_>, target: syntax::TextRange, adt: hir::Enum, name_ref: &ast::NameRef, - path: &ast::Path, + parent: PathParent, ) -> Option<()> { let db = ctx.db(); let InFile { file_id, value: enum_node } = adt.source(db)?.original_ast_node(db)?; @@ -73,7 +120,7 @@ fn add_variant_to_accumulator( |builder| { builder.edit_file(file_id.original_file(db)); let node = builder.make_mut(enum_node); - let variant = make_variant(ctx, name_ref, &path); + let variant = make_variant(ctx, name_ref, parent); node.variant_list().map(|it| it.add_variant(variant.clone_for_update())); }, ) @@ -82,27 +129,14 @@ fn add_variant_to_accumulator( fn make_variant( ctx: &AssistContext<'_>, name_ref: &ast::NameRef, - path: &ast::Path, + parent: PathParent, ) -> ast::Variant { - let field_list = make_field_list(ctx, path); + let field_list = parent.make_field_list(ctx); make::variant(make::name(&name_ref.text()), field_list) } -fn make_field_list(ctx: &AssistContext<'_>, path: &ast::Path) -> Option { - let scope = ctx.sema.scope(&path.syntax())?; - if let Some(call_expr) = - path.syntax().parent().and_then(|it| it.parent()).and_then(ast::CallExpr::cast) - { - make_tuple_field_list(call_expr, ctx, &scope) - } else if let Some(record_expr) = path.syntax().parent().and_then(ast::RecordExpr::cast) { - make_record_field_list(record_expr, ctx, &scope) - } else { - None - } -} - fn make_record_field_list( - record: ast::RecordExpr, + record: &ast::RecordExpr, ctx: &AssistContext<'_>, scope: &hir::SemanticsScope<'_>, ) -> Option { @@ -465,6 +499,37 @@ enum Foo { fn main() { Foo::Bar { x, y: x, s: Struct {} } } +", + ) + } + + #[test] + fn use_tree() { + check_assist( + generate_enum_variant, + r" +//- /main.rs +mod foo; +use foo::Foo::Bar$0; + +//- /foo.rs +enum Foo {} +", + r" +enum Foo { + Bar, +} +", + ) + } + + #[test] + fn not_applicable_for_path_type() { + check_assist_not_applicable( + generate_enum_variant, + r" +enum Foo {} +impl Foo::Bar$0 {} ", ) } From 1980c1192c7bcfb10e1ed380ee90bcaa7153d58d Mon Sep 17 00:00:00 2001 From: Dorian Scheidt Date: Tue, 2 Aug 2022 12:11:32 -0400 Subject: [PATCH 0063/1945] Support PathPat paths in generate_enum_variant --- .../src/handlers/generate_enum_variant.rs | 30 ++++++++++++++++++- 1 file changed, 29 insertions(+), 1 deletion(-) diff --git a/crates/ide-assists/src/handlers/generate_enum_variant.rs b/crates/ide-assists/src/handlers/generate_enum_variant.rs index b3dd29b771..35cd42908a 100644 --- a/crates/ide-assists/src/handlers/generate_enum_variant.rs +++ b/crates/ide-assists/src/handlers/generate_enum_variant.rs @@ -60,6 +60,7 @@ pub(crate) fn generate_enum_variant(acc: &mut Assists, ctx: &AssistContext<'_>) enum PathParent { PathExpr(ast::PathExpr), RecordExpr(ast::RecordExpr), + PathPat(ast::PathPat), UseTree(ast::UseTree), } @@ -68,6 +69,7 @@ impl PathParent { match self { PathParent::PathExpr(it) => it.syntax(), PathParent::RecordExpr(it) => it.syntax(), + PathParent::PathPat(it) => it.syntax(), PathParent::UseTree(it) => it.syntax(), } } @@ -84,7 +86,7 @@ impl PathParent { } } PathParent::RecordExpr(it) => make_record_field_list(it, ctx, &scope), - PathParent::UseTree(_) => None, + PathParent::UseTree(_) | PathParent::PathPat(_) => None, } } } @@ -96,6 +98,7 @@ fn path_parent(path: &ast::Path) -> Option { match parent { ast::PathExpr(it) => Some(PathParent::PathExpr(it)), ast::RecordExpr(it) => Some(PathParent::RecordExpr(it)), + ast::PathPat(it) => Some(PathParent::PathPat(it)), ast::UseTree(it) => Some(PathParent::UseTree(it)), _ => None } @@ -530,6 +533,31 @@ enum Foo { r" enum Foo {} impl Foo::Bar$0 {} +", + ) + } + + #[test] + fn path_pat() { + check_assist( + generate_enum_variant, + r" +enum Foo {} +fn foo(x: Foo) { + match x { + Foo::Bar$0 => + } +} +", + r" +enum Foo { + Bar, +} +fn foo(x: Foo) { + match x { + Foo::Bar => + } +} ", ) } From 9a447c04f6c07ec6b835b3577610cfaf3f7667cc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Wed, 3 Aug 2022 09:48:44 +0300 Subject: [PATCH 0064/1945] Use the release branch in xtask promote --- xtask/src/release.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/xtask/src/release.rs b/xtask/src/release.rs index 17ada51564..eda8fceef0 100644 --- a/xtask/src/release.rs +++ b/xtask/src/release.rs @@ -81,7 +81,7 @@ impl flags::Promote { let date = date_iso(sh)?; let branch = format!("rust-analyzer-{date}"); cmd!(sh, "git switch -c {branch}").run()?; - cmd!(sh, "git subtree pull -P src/tools/rust-analyzer rust-analyzer master").run()?; + cmd!(sh, "git subtree pull -m ':arrow_up: rust-analyzer' -P src/tools/rust-analyzer rust-analyzer release").run()?; if !self.dry_run { cmd!(sh, "git push -u origin {branch}").run()?; From c203ac2cf5aaf7b7ed759e0f84426debc063d3e8 Mon Sep 17 00:00:00 2001 From: oxalica Date: Wed, 3 Aug 2022 17:35:31 +0800 Subject: [PATCH 0065/1945] Add more constructors for la-arena --- lib/la-arena/src/lib.rs | 10 ++++++++++ lib/la-arena/src/map.rs | 12 +++++++++++- 2 files changed, 21 insertions(+), 1 deletion(-) diff --git a/lib/la-arena/src/lib.rs b/lib/la-arena/src/lib.rs index dadee43b10..a6766edd41 100644 --- a/lib/la-arena/src/lib.rs +++ b/lib/la-arena/src/lib.rs @@ -208,6 +208,16 @@ impl Arena { Arena { data: Vec::new() } } + /// Create a new empty arena with specific capacity. + /// + /// ``` + /// let arena: la_arena::Arena = la_arena::Arena::with_capacity(42); + /// assert!(arena.is_empty()); + /// ``` + pub fn with_capacity(capacity: usize) -> Arena { + Arena { data: Vec::with_capacity(capacity) } + } + /// Empties the arena, removing all contained values. /// /// ``` diff --git a/lib/la-arena/src/map.rs b/lib/la-arena/src/map.rs index d27f086d37..b5c49b3435 100644 --- a/lib/la-arena/src/map.rs +++ b/lib/la-arena/src/map.rs @@ -11,6 +11,16 @@ pub struct ArenaMap { } impl ArenaMap, V> { + /// Creates a new empty map. + pub const fn new() -> Self { + Self { v: Vec::new(), _ty: PhantomData } + } + + /// Create a new empty map with specific capacity. + pub fn with_capacity(capacity: usize) -> Self { + Self { v: Vec::with_capacity(capacity), _ty: PhantomData } + } + /// Inserts a value associated with a given arena index into the map. pub fn insert(&mut self, idx: Idx, t: V) { let idx = Self::to_idx(idx); @@ -70,6 +80,6 @@ impl std::ops::IndexMut> for ArenaMap, T> { impl Default for ArenaMap, T> { fn default() -> Self { - ArenaMap { v: Vec::new(), _ty: PhantomData } + Self::new() } } From 10f870eff44f07e3e8dc241e5cc11e0fa4d50ae3 Mon Sep 17 00:00:00 2001 From: oxalica Date: Wed, 3 Aug 2022 18:16:05 +0800 Subject: [PATCH 0066/1945] Impl entry-API for la_arena::ArenaMap We enforce integral and `Copy` key, so some key-related functions are not necessary since user can just reuse the index for the `entry` call. --- lib/la-arena/src/lib.rs | 2 +- lib/la-arena/src/map.rs | 107 ++++++++++++++++++++++++++++++++++++++++ 2 files changed, 108 insertions(+), 1 deletion(-) diff --git a/lib/la-arena/src/lib.rs b/lib/la-arena/src/lib.rs index a6766edd41..a3fe59e946 100644 --- a/lib/la-arena/src/lib.rs +++ b/lib/la-arena/src/lib.rs @@ -12,7 +12,7 @@ use std::{ }; mod map; -pub use map::ArenaMap; +pub use map::{ArenaMap, Entry, OccupiedEntry, VacantEntry}; /// The raw index of a value in an arena. #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] diff --git a/lib/la-arena/src/map.rs b/lib/la-arena/src/map.rs index b5c49b3435..f74ec5d97c 100644 --- a/lib/la-arena/src/map.rs +++ b/lib/la-arena/src/map.rs @@ -56,6 +56,16 @@ impl ArenaMap, V> { self.v.iter().enumerate().filter_map(|(idx, o)| Some((Self::from_idx(idx), o.as_ref()?))) } + /// Gets the given key's corresponding entry in the map for in-place manipulation. + pub fn entry(&mut self, idx: Idx) -> Entry<'_, Idx, V> { + let idx = Self::to_idx(idx); + self.v.resize_with((idx + 1).max(self.v.len()), || None); + match &mut self.v[idx] { + slot @ Some(_) => Entry::Occupied(OccupiedEntry { slot, _ty: PhantomData }), + slot @ None => Entry::Vacant(VacantEntry { slot, _ty: PhantomData }), + } + } + fn to_idx(idx: Idx) -> usize { u32::from(idx.into_raw()) as usize } @@ -83,3 +93,100 @@ impl Default for ArenaMap, T> { Self::new() } } + +/// A view into a single entry in a map, which may either be vacant or occupied. +/// +/// This `enum` is constructed from the [`entry`] method on [`ArenaMap`]. +/// +/// [`entry`]: ArenaMap::entry +pub enum Entry<'a, IDX, V> { + /// A vacant entry. + Vacant(VacantEntry<'a, IDX, V>), + /// An occupied entry. + Occupied(OccupiedEntry<'a, IDX, V>), +} + +impl<'a, IDX, V> Entry<'a, IDX, V> { + /// Ensures a value is in the entry by inserting the default if empty, and returns a mutable reference to + /// the value in the entry. + pub fn or_insert(self, default: V) -> &'a mut V { + match self { + Self::Vacant(ent) => ent.insert(default), + Self::Occupied(ent) => ent.into_mut(), + } + } + + /// Ensures a value is in the entry by inserting the result of the default function if empty, and returns + /// a mutable reference to the value in the entry. + pub fn or_insert_with V>(self, default: F) -> &'a mut V { + match self { + Self::Vacant(ent) => ent.insert(default()), + Self::Occupied(ent) => ent.into_mut(), + } + } + + /// Provides in-place mutable access to an occupied entry before any potential inserts into the map. + pub fn and_modify(mut self, f: F) -> Self { + if let Self::Occupied(ent) = &mut self { + f(ent.get_mut()); + } + self + } +} + +impl<'a, IDX, V> Entry<'a, IDX, V> +where + V: Default, +{ + /// Ensures a value is in the entry by inserting the default value if empty, and returns a mutable reference + /// to the value in the entry. + pub fn or_default(self) -> &'a mut V { + self.or_insert_with(Default::default) + } +} + +/// A view into an vacant entry in a [`ArenaMap`]. It is part of the [`Entry`] enum. +pub struct VacantEntry<'a, IDX, V> { + slot: &'a mut Option, + _ty: PhantomData, +} + +impl<'a, IDX, V> VacantEntry<'a, IDX, V> { + /// Sets the value of the entry with the `VacantEntry`’s key, and returns a mutable reference to it. + pub fn insert(self, value: V) -> &'a mut V { + self.slot.insert(value) + } +} + +/// A view into an occupied entry in a [`ArenaMap`]. It is part of the [`Entry`] enum. +pub struct OccupiedEntry<'a, IDX, V> { + slot: &'a mut Option, + _ty: PhantomData, +} + +impl<'a, IDX, V> OccupiedEntry<'a, IDX, V> { + /// Gets a reference to the value in the entry. + pub fn get(&self) -> &V { + self.slot.as_ref().expect("Occupied") + } + + /// Gets a mutable reference to the value in the entry. + pub fn get_mut(&mut self) -> &mut V { + self.slot.as_mut().expect("Occupied") + } + + /// Converts the entry into a mutable reference to its value. + pub fn into_mut(self) -> &'a mut V { + self.slot.as_mut().expect("Occupied") + } + + /// Sets the value of the entry with the `OccupiedEntry`’s key, and returns the entry’s old value. + pub fn insert(&mut self, value: V) -> V { + self.slot.replace(value).expect("Occupied") + } + + /// Takes the value of the entry out of the map, and returns it. + pub fn remove(self) -> V { + self.slot.take().expect("Occupied") + } +} From a8a6c160be157ee7fe130d2c99521a2f76d3b4be Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Wed, 3 Aug 2022 18:10:15 +0200 Subject: [PATCH 0067/1945] Use an empty expander for ignored non-attribute proc-macros --- crates/rust-analyzer/src/reload.rs | 26 ++++++++++++++++++++++---- 1 file changed, 22 insertions(+), 4 deletions(-) diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs index eaab275bc6..9a9a9dd4d8 100644 --- a/crates/rust-analyzer/src/reload.rs +++ b/crates/rust-analyzer/src/reload.rs @@ -621,7 +621,10 @@ pub(crate) fn load_proc_macro( }; let expander: Arc = if dummy_replace.iter().any(|replace| &**replace == name) { - Arc::new(DummyExpander) + match kind { + ProcMacroKind::Attr => Arc::new(IdentityExpander), + _ => Arc::new(EmptyExpander), + } } else { Arc::new(Expander(expander)) }; @@ -647,11 +650,11 @@ pub(crate) fn load_proc_macro( } } - /// Dummy identity expander, used for proc-macros that are deliberately ignored by the user. + /// Dummy identity expander, used for attribute proc-macros that are deliberately ignored by the user. #[derive(Debug)] - struct DummyExpander; + struct IdentityExpander; - impl ProcMacroExpander for DummyExpander { + impl ProcMacroExpander for IdentityExpander { fn expand( &self, subtree: &tt::Subtree, @@ -661,6 +664,21 @@ pub(crate) fn load_proc_macro( Ok(subtree.clone()) } } + + /// Empty expander, used for proc-macros that are deliberately ignored by the user. + #[derive(Debug)] + struct EmptyExpander; + + impl ProcMacroExpander for EmptyExpander { + fn expand( + &self, + _: &tt::Subtree, + _: Option<&tt::Subtree>, + _: &Env, + ) -> Result { + Ok(tt::Subtree::default()) + } + } } pub(crate) fn should_refresh_for_change(path: &AbsPath, change_kind: ChangeKind) -> bool { From 46d6357994eb8ced835a69390dfbf01abed40ee1 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Wed, 3 Aug 2022 18:22:45 +0200 Subject: [PATCH 0068/1945] Add a setting to disable comment continuation in VSCode --- editors/code/package.json | 5 +++++ editors/code/src/config.ts | 14 +++++++++++--- editors/code/src/main.ts | 4 +++- 3 files changed, 19 insertions(+), 4 deletions(-) diff --git a/editors/code/package.json b/editors/code/package.json index a13798d8b3..39ff3eb085 100644 --- a/editors/code/package.json +++ b/editors/code/package.json @@ -380,6 +380,11 @@ "default": false, "type": "boolean" }, + "rust-analyzer.typing.continueCommentsOnNewline": { + "markdownDescription": "Whether to prefix newlines after comments with the corresponding comment prefix.", + "default": true, + "type": "boolean" + }, "$generated-start": {}, "rust-analyzer.assist.expressionFillDefault": { "markdownDescription": "Placeholder expression to use for missing expressions in assists.", diff --git a/editors/code/src/config.ts b/editors/code/src/config.ts index b04f18890b..1c58040d58 100644 --- a/editors/code/src/config.ts +++ b/editors/code/src/config.ts @@ -16,9 +16,13 @@ export class Config { readonly extensionId = "rust-lang.rust-analyzer"; readonly rootSection = "rust-analyzer"; - private readonly requiresWorkspaceReloadOpts = ["serverPath", "server"].map( - (opt) => `${this.rootSection}.${opt}` - ); + private readonly requiresWorkspaceReloadOpts = [ + "serverPath", + "server", + // FIXME: This shouldn't be here, changing this setting should reload + // `continueCommentsOnNewline` behavior without restart + "typing", + ].map((opt) => `${this.rootSection}.${opt}`); private readonly requiresReloadOpts = [ "cargo", "procMacro", @@ -140,6 +144,10 @@ export class Config { return this.get("restartServerOnConfigChange"); } + get typingContinueCommentsOnNewline() { + return this.get("typing.continueCommentsOnNewline"); + } + get debug() { let sourceFileMap = this.get | "auto">("debug.sourceFileMap"); if (sourceFileMap !== "auto") { diff --git a/editors/code/src/main.ts b/editors/code/src/main.ts index 9ae20ddc4a..d78b711a47 100644 --- a/editors/code/src/main.ts +++ b/editors/code/src/main.ts @@ -84,7 +84,9 @@ async function tryActivate(context: vscode.ExtensionContext): Promise From 5cb3e7a41b89b5612cefe9c688f0ffdff18d4df4 Mon Sep 17 00:00:00 2001 From: fprasx Date: Wed, 3 Aug 2022 14:44:21 -0400 Subject: [PATCH 0069/1945] Added fixup for match statements w/ missing parts Passes tests --- crates/hir-expand/src/fixup.rs | 111 ++++++++++++++++++++++++++++++++- 1 file changed, 110 insertions(+), 1 deletion(-) diff --git a/crates/hir-expand/src/fixup.rs b/crates/hir-expand/src/fixup.rs index e46f43a878..c875b23b2d 100644 --- a/crates/hir-expand/src/fixup.rs +++ b/crates/hir-expand/src/fixup.rs @@ -67,7 +67,6 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { preorder.skip_subtree(); continue; } - // In some other situations, we can fix things by just appending some tokens. let end_range = TextRange::empty(node.text_range().end()); match_ast! { @@ -195,6 +194,69 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { }, // FIXME: foo:: // FIXME: for, match etc. + ast::MatchExpr(it) => { + if it.expr().is_none() { + let match_token = match it.match_token() { + Some(t) => t, + None => continue + }; + append.insert(match_token.into(), vec![ + SyntheticToken { + kind: SyntaxKind::IDENT, + text: "__ra_fixup".into(), + range: end_range, + id: EMPTY_ID + }, + ]); + } + if it.match_arm_list().is_none() { + // No match arms + append.insert(node.clone().into(), vec![ + SyntheticToken { + kind: SyntaxKind::L_CURLY, + text: "{".into(), + range: end_range, + id: EMPTY_ID, + }, + SyntheticToken { + kind: SyntaxKind::UNDERSCORE, + text: "_".into(), + range: end_range, + id: EMPTY_ID + }, + SyntheticToken { + kind: SyntaxKind::EQ, + text: "=".into(), + range: end_range, + id: EMPTY_ID + }, + SyntheticToken { + kind: SyntaxKind::R_ANGLE, + text: ">".into(), + range: end_range, + id: EMPTY_ID + }, + SyntheticToken { + kind: SyntaxKind::L_CURLY, + text: "{".into(), + range: end_range, + id: EMPTY_ID, + }, + SyntheticToken { + kind: SyntaxKind::R_CURLY, + text: "}".into(), + range: end_range, + id: EMPTY_ID, + }, + SyntheticToken { + kind: SyntaxKind::R_CURLY, + text: "}".into(), + range: end_range, + id: EMPTY_ID, + }, + ]); + } + }, _ => (), } } @@ -287,6 +349,53 @@ mod tests { assert_eq!(tt.to_string(), original_as_tt.to_string()); } + + #[test] + fn match_no_expr_no_arms() { + check( + r#" +fn foo() { + match +} +"#, + expect![[r#" +fn foo () {match __ra_fixup {_ => {}}} +"#]], + ) + } + + #[test] + fn match_expr_no_arms() { + check( + r#" +fn foo() { + match x { + + } +} +"#, + expect![[r#" +fn foo () {match x {}} +"#]], + ) + } + + #[test] + fn match_no_expr() { + check( + r#" +fn foo() { + match { + _ => {} + } +} +"#, + expect![[r#" +fn foo () {match __ra_fixup {_ => {}}} +"#]], + ) + } + #[test] fn incomplete_field_expr_1() { check( From d513b4c8baaf2c71e5f13ce790bd0083ccb81a64 Mon Sep 17 00:00:00 2001 From: fprasx Date: Wed, 3 Aug 2022 15:51:30 -0400 Subject: [PATCH 0070/1945] Added fixup for for loops w/ missing parts --- crates/hir-expand/src/fixup.rs | 78 +++++++++++++++++++++++++++++++++- 1 file changed, 77 insertions(+), 1 deletion(-) diff --git a/crates/hir-expand/src/fixup.rs b/crates/hir-expand/src/fixup.rs index c875b23b2d..ade28f27bf 100644 --- a/crates/hir-expand/src/fixup.rs +++ b/crates/hir-expand/src/fixup.rs @@ -193,7 +193,6 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { } }, // FIXME: foo:: - // FIXME: for, match etc. ast::MatchExpr(it) => { if it.expr().is_none() { let match_token = match it.match_token() { @@ -257,6 +256,42 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { ]); } }, + ast::ForExpr(it) => { + let for_token = match it.for_token() { + Some(token) => token, + None => continue + }; + + let [pat, in_token, iter] = [ + (SyntaxKind::UNDERSCORE, "_"), + (SyntaxKind::IN_KW, "in"), + (SyntaxKind::IDENT, "__ra_fixup") + ].map(|(kind, text)| SyntheticToken { kind, text: text.into(), range: end_range, id: EMPTY_ID}); + + if it.pat().is_none() && it.in_token().is_none() && it.iterable().is_none() { + append.insert(for_token.into(), vec![pat, in_token, iter]); + } + + // Tricky: add logic to add in just a pattern or iterable if not all + // the pieces are missing + + if it.loop_body().is_none() { + append.insert(node.clone().into(), vec![ + SyntheticToken { + kind: SyntaxKind::L_CURLY, + text: "{".into(), + range: end_range, + id: EMPTY_ID, + }, + SyntheticToken { + kind: SyntaxKind::R_CURLY, + text: "}".into(), + range: end_range, + id: EMPTY_ID, + }, + ]); + } + }, _ => (), } } @@ -349,6 +384,47 @@ mod tests { assert_eq!(tt.to_string(), original_as_tt.to_string()); } + #[test] + fn for_no_iter_no_body() { + check( + r#" +fn foo() { + for +} +"#, + expect![[r#" +fn foo () {for _ in __ra_fixup {}} +"#]], + ) + } + + #[test] + fn for_no_iter() { + check( + r#" +fn foo() { + for {} +} +"#, + expect![[r#" +fn foo () {for _ in __ra_fixup {}} +"#]], + ) + } + + #[test] + fn for_no_body() { + check( + r#" +fn foo() { + for bar in qux +} +"#, + expect![[r#" +fn foo () {for bar in qux {}} +"#]], + ) + } #[test] fn match_no_expr_no_arms() { From ef2eabbfa84fb11deda705df724b28ef3431256c Mon Sep 17 00:00:00 2001 From: fprasx Date: Wed, 3 Aug 2022 16:27:43 -0400 Subject: [PATCH 0071/1945] Tidy formatted --- crates/hir-expand/src/fixup.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/hir-expand/src/fixup.rs b/crates/hir-expand/src/fixup.rs index ade28f27bf..cd02c802e5 100644 --- a/crates/hir-expand/src/fixup.rs +++ b/crates/hir-expand/src/fixup.rs @@ -263,14 +263,14 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { }; let [pat, in_token, iter] = [ - (SyntaxKind::UNDERSCORE, "_"), - (SyntaxKind::IN_KW, "in"), + (SyntaxKind::UNDERSCORE, "_"), + (SyntaxKind::IN_KW, "in"), (SyntaxKind::IDENT, "__ra_fixup") ].map(|(kind, text)| SyntheticToken { kind, text: text.into(), range: end_range, id: EMPTY_ID}); if it.pat().is_none() && it.in_token().is_none() && it.iterable().is_none() { append.insert(for_token.into(), vec![pat, in_token, iter]); - } + } // Tricky: add logic to add in just a pattern or iterable if not all // the pieces are missing From 49dac4070cf731ad53bfe12e784975b38262d6ab Mon Sep 17 00:00:00 2001 From: Jean santos Date: Wed, 3 Aug 2022 17:37:11 -0300 Subject: [PATCH 0072/1945] on hover fallback error, adds ast::type as possible node --- crates/ide/src/hover.rs | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/crates/ide/src/hover.rs b/crates/ide/src/hover.rs index 59c97f2dcf..f74b981a28 100644 --- a/crates/ide/src/hover.rs +++ b/crates/ide/src/hover.rs @@ -235,7 +235,11 @@ fn hover_type_fallback( let node = token .parent_ancestors() .take_while(|it| !ast::Item::can_cast(it.kind())) - .find(|n| ast::Expr::can_cast(n.kind()) || ast::Pat::can_cast(n.kind()))?; + .find(|n| { + ast::Expr::can_cast(n.kind()) + || ast::Pat::can_cast(n.kind()) + || ast::Type::can_cast(n.kind()) + })?; let expr_or_pat = match_ast! { match node { From df9d3db82f1d0dce5f74faf8ae0501a534953f84 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Thu, 4 Aug 2022 13:22:15 +0200 Subject: [PATCH 0073/1945] Trigger flycheck on all transitive dependencies as well --- crates/rust-analyzer/src/main_loop.rs | 32 ++++++++++++++++++++------- 1 file changed, 24 insertions(+), 8 deletions(-) diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs index 561c2d7aef..5c93874ca2 100644 --- a/crates/rust-analyzer/src/main_loop.rs +++ b/crates/rust-analyzer/src/main_loop.rs @@ -9,7 +9,8 @@ use std::{ use always_assert::always; use crossbeam_channel::{select, Receiver}; -use ide_db::base_db::{SourceDatabaseExt, VfsPath}; +use ide_db::base_db::{SourceDatabase, SourceDatabaseExt, VfsPath}; +use itertools::Itertools; use lsp_server::{Connection, Notification, Request}; use lsp_types::notification::Notification as _; use vfs::{ChangeKind, FileId}; @@ -727,9 +728,21 @@ impl GlobalState { let (vfs, _) = &*this.vfs.read(); if let Some(file_id) = vfs.file_id(&vfs_path) { let analysis = this.analysis_host.analysis(); - let crate_ids = analysis.crate_for(file_id)?; + // Crates containing or depending on the saved file + let crate_ids: Vec<_> = analysis + .crate_for(file_id)? + .into_iter() + .flat_map(|id| { + this.analysis_host + .raw_database() + .crate_graph() + .transitive_rev_deps(id) + }) + .sorted() + .unique() + .collect(); - let paths: Vec<_> = crate_ids + let crate_root_paths: Vec<_> = crate_ids .iter() .filter_map(|&crate_id| { analysis @@ -740,16 +753,17 @@ impl GlobalState { .transpose() }) .collect::>()?; - let paths: Vec<_> = paths.iter().map(Deref::deref).collect(); + let crate_root_paths: Vec<_> = + crate_root_paths.iter().map(Deref::deref).collect(); + // Find all workspaces that have at least one target containing the saved file let workspace_ids = this.workspaces.iter().enumerate().filter(|(_, ws)| match ws { project_model::ProjectWorkspace::Cargo { cargo, .. } => { cargo.packages().any(|pkg| { - cargo[pkg] - .targets - .iter() - .any(|&it| paths.contains(&cargo[it].root.as_path())) + cargo[pkg].targets.iter().any(|&it| { + crate_root_paths.contains(&cargo[it].root.as_path()) + }) }) } project_model::ProjectWorkspace::Json { project, .. } => project @@ -757,6 +771,8 @@ impl GlobalState { .any(|(c, _)| crate_ids.iter().any(|&crate_id| crate_id == c)), project_model::ProjectWorkspace::DetachedFiles { .. } => false, }); + + // Find and trigger corresponding flychecks for flycheck in &this.flycheck { for (id, _) in workspace_ids.clone() { if id == flycheck.id() { From 5698e51027a8f60e0fd7b8ae5f7c077b051197c6 Mon Sep 17 00:00:00 2001 From: Jean santos Date: Thu, 4 Aug 2022 09:28:34 -0300 Subject: [PATCH 0074/1945] tidy formatting --- crates/ide/src/hover.rs | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/crates/ide/src/hover.rs b/crates/ide/src/hover.rs index f74b981a28..784f85dbbe 100644 --- a/crates/ide/src/hover.rs +++ b/crates/ide/src/hover.rs @@ -232,10 +232,8 @@ fn hover_type_fallback( token: &SyntaxToken, original_token: &SyntaxToken, ) -> Option> { - let node = token - .parent_ancestors() - .take_while(|it| !ast::Item::can_cast(it.kind())) - .find(|n| { + let node = + token.parent_ancestors().take_while(|it| !ast::Item::can_cast(it.kind())).find(|n| { ast::Expr::can_cast(n.kind()) || ast::Pat::can_cast(n.kind()) || ast::Type::can_cast(n.kind()) From df7f755e3b08e58a30bdbb39685bea76be7762ba Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Thu, 4 Aug 2022 14:01:33 +0200 Subject: [PATCH 0075/1945] Don't flycheck while the workspace is being loaded --- crates/rust-analyzer/src/diagnostics.rs | 1 - crates/rust-analyzer/src/global_state.rs | 1 + crates/rust-analyzer/src/main_loop.rs | 5 ++++- 3 files changed, 5 insertions(+), 2 deletions(-) diff --git a/crates/rust-analyzer/src/diagnostics.rs b/crates/rust-analyzer/src/diagnostics.rs index 7917ced666..09150c77d7 100644 --- a/crates/rust-analyzer/src/diagnostics.rs +++ b/crates/rust-analyzer/src/diagnostics.rs @@ -72,7 +72,6 @@ impl DiagnosticCollection { let check_fixes = Arc::make_mut(&mut self.check_fixes); check_fixes.entry(flycheck_id).or_default().entry(file_id).or_default().extend(fix); diagnostics.push(diagnostic); - tracing::warn!(?flycheck_id, ?file_id, "add_check_diagnostic changes pushed"); self.changes.insert(file_id); } diff --git a/crates/rust-analyzer/src/global_state.rs b/crates/rust-analyzer/src/global_state.rs index 2cd2044aef..55c4cfcf86 100644 --- a/crates/rust-analyzer/src/global_state.rs +++ b/crates/rust-analyzer/src/global_state.rs @@ -192,6 +192,7 @@ impl GlobalState { if let Some(path) = vfs.file_path(file.file_id).as_path() { let path = path.to_path_buf(); if reload::should_refresh_for_change(&path, file.change_kind) { + tracing::warn!("fetch-fiel_change"); self.fetch_workspaces_queue .request_op(format!("vfs file change: {}", path.display())); } diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs index 5c93874ca2..4ed34df01c 100644 --- a/crates/rust-analyzer/src/main_loop.rs +++ b/crates/rust-analyzer/src/main_loop.rs @@ -447,7 +447,10 @@ impl GlobalState { let memdocs_added_or_removed = self.mem_docs.take_changes(); if self.is_quiescent() { - if !was_quiescent { + if !was_quiescent + && !self.fetch_workspaces_queue.op_requested() + && !self.fetch_build_data_queue.op_requested() + { for flycheck in &self.flycheck { flycheck.update(); } From d6d8a1c18f29ad6402f9594e3908cbb6d80aa31c Mon Sep 17 00:00:00 2001 From: fprasx Date: Thu, 4 Aug 2022 09:28:25 -0400 Subject: [PATCH 0076/1945] Shortened fixup for match, added cases for for Previously added a blank _ => {} for match statements --- crates/hir-expand/src/fixup.rs | 53 ++++++++++++---------------------- 1 file changed, 18 insertions(+), 35 deletions(-) diff --git a/crates/hir-expand/src/fixup.rs b/crates/hir-expand/src/fixup.rs index cd02c802e5..58d73f2d6c 100644 --- a/crates/hir-expand/src/fixup.rs +++ b/crates/hir-expand/src/fixup.rs @@ -217,36 +217,6 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { range: end_range, id: EMPTY_ID, }, - SyntheticToken { - kind: SyntaxKind::UNDERSCORE, - text: "_".into(), - range: end_range, - id: EMPTY_ID - }, - SyntheticToken { - kind: SyntaxKind::EQ, - text: "=".into(), - range: end_range, - id: EMPTY_ID - }, - SyntheticToken { - kind: SyntaxKind::R_ANGLE, - text: ">".into(), - range: end_range, - id: EMPTY_ID - }, - SyntheticToken { - kind: SyntaxKind::L_CURLY, - text: "{".into(), - range: end_range, - id: EMPTY_ID, - }, - SyntheticToken { - kind: SyntaxKind::R_CURLY, - text: "}".into(), - range: end_range, - id: EMPTY_ID, - }, SyntheticToken { kind: SyntaxKind::R_CURLY, text: "}".into(), @@ -270,11 +240,12 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { if it.pat().is_none() && it.in_token().is_none() && it.iterable().is_none() { append.insert(for_token.into(), vec![pat, in_token, iter]); + } else if it.pat().is_none() { + append.insert(for_token.into(), vec![pat]); + } else if it.pat().is_none() && it.in_token().is_none() { + append.insert(for_token.into(), vec![pat, in_token]); } - // Tricky: add logic to add in just a pattern or iterable if not all - // the pieces are missing - if it.loop_body().is_none() { append.insert(node.clone().into(), vec![ SyntheticToken { @@ -398,6 +369,18 @@ fn foo () {for _ in __ra_fixup {}} ) } + fn for_no_iter_no_in() { + check( + r#" +fn foo() { + for _ {} +} +"#, + expect![[r#" +fn foo () {for _ in __ra_fixup {}} +"#]], + ) + } #[test] fn for_no_iter() { check( @@ -435,7 +418,7 @@ fn foo() { } "#, expect![[r#" -fn foo () {match __ra_fixup {_ => {}}} +fn foo () {match __ra_fixup {}} "#]], ) } @@ -467,7 +450,7 @@ fn foo() { } "#, expect![[r#" -fn foo () {match __ra_fixup {_ => {}}} +fn foo () {match __ra_fixup {}} "#]], ) } From ab44a811501c12c4e3c471387f02ad451034639f Mon Sep 17 00:00:00 2001 From: fprasx Date: Thu, 4 Aug 2022 10:43:09 -0400 Subject: [PATCH 0077/1945] Fixed up for loops, added fixme with problem https://github.com/rust-lang/rust-analyzer/pull/12937#discussion_r937633695 --- crates/hir-expand/src/fixup.rs | 36 +++++++++++++++++++--------------- 1 file changed, 20 insertions(+), 16 deletions(-) diff --git a/crates/hir-expand/src/fixup.rs b/crates/hir-expand/src/fixup.rs index 58d73f2d6c..46257b6bc4 100644 --- a/crates/hir-expand/src/fixup.rs +++ b/crates/hir-expand/src/fixup.rs @@ -240,10 +240,9 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { if it.pat().is_none() && it.in_token().is_none() && it.iterable().is_none() { append.insert(for_token.into(), vec![pat, in_token, iter]); + // does something funky -- see test case for_no_pat } else if it.pat().is_none() { append.insert(for_token.into(), vec![pat]); - } else if it.pat().is_none() && it.in_token().is_none() { - append.insert(for_token.into(), vec![pat, in_token]); } if it.loop_body().is_none() { @@ -356,7 +355,7 @@ mod tests { } #[test] - fn for_no_iter_no_body() { + fn just_for_token() { check( r#" fn foo() { @@ -369,20 +368,8 @@ fn foo () {for _ in __ra_fixup {}} ) } - fn for_no_iter_no_in() { - check( - r#" -fn foo() { - for _ {} -} -"#, - expect![[r#" -fn foo () {for _ in __ra_fixup {}} -"#]], - ) - } #[test] - fn for_no_iter() { + fn for_no_iter_pattern() { check( r#" fn foo() { @@ -409,6 +396,23 @@ fn foo () {for bar in qux {}} ) } + // FIXME: https://github.com/rust-lang/rust-analyzer/pull/12937#discussion_r937633695 + #[test] + fn for_no_pat() { + check( + r#" +fn foo() { + for in qux { + + } +} +"#, + expect![[r#" +fn foo () {__ra_fixup} +"#]], + ) + } + #[test] fn match_no_expr_no_arms() { check( From 859d467276bf45e621858069192d7a68c42d2a32 Mon Sep 17 00:00:00 2001 From: Ryo Yoshida Date: Fri, 5 Aug 2022 02:51:38 +0900 Subject: [PATCH 0078/1945] fix: make `concat!` work with char --- .../src/macro_expansion_tests/builtin_fn_macro.rs | 4 ++-- crates/hir-expand/src/builtin_fn_macro.rs | 14 ++++++++++++-- 2 files changed, 14 insertions(+), 4 deletions(-) diff --git a/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs b/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs index 92dffa7f37..32006c4b43 100644 --- a/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs +++ b/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs @@ -295,13 +295,13 @@ fn test_concat_expand() { #[rustc_builtin_macro] macro_rules! concat {} -fn main() { concat!("foo", "r", 0, r#"bar"#, "\n", false); } +fn main() { concat!("foo", "r", 0, r#"bar"#, "\n", false, '\n'); } "##, expect![[r##" #[rustc_builtin_macro] macro_rules! concat {} -fn main() { "foor0bar\nfalse"; } +fn main() { "foor0bar\nfalse\n"; } "##]], ); } diff --git a/crates/hir-expand/src/builtin_fn_macro.rs b/crates/hir-expand/src/builtin_fn_macro.rs index 76da7c9f1e..c21b35cdc0 100644 --- a/crates/hir-expand/src/builtin_fn_macro.rs +++ b/crates/hir-expand/src/builtin_fn_macro.rs @@ -357,6 +357,12 @@ fn unquote_str(lit: &tt::Literal) -> Option { token.value().map(|it| it.into_owned()) } +fn unquote_char(lit: &tt::Literal) -> Option { + let lit = ast::make::tokens::literal(&lit.to_string()); + let token = ast::Char::cast(lit)?; + token.value() +} + fn unquote_byte_string(lit: &tt::Literal) -> Option> { let lit = ast::make::tokens::literal(&lit.to_string()); let token = ast::ByteString::cast(lit)?; @@ -408,8 +414,12 @@ fn concat_expand( // concat works with string and char literals, so remove any quotes. // It also works with integer, float and boolean literals, so just use the rest // as-is. - let component = unquote_str(it).unwrap_or_else(|| it.text.to_string()); - text.push_str(&component); + if let Some(c) = unquote_char(it) { + text.push(c); + } else { + let component = unquote_str(it).unwrap_or_else(|| it.text.to_string()); + text.push_str(&component); + } } // handle boolean literals tt::TokenTree::Leaf(tt::Leaf::Ident(id)) From 30c4f9fa7d31cd60f1fcc5998af6c17f69178fae Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 5 Aug 2022 08:42:13 +0200 Subject: [PATCH 0079/1945] Update bug_report.md --- .github/ISSUE_TEMPLATE/bug_report.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index 1f246ed79b..5566b3d1f8 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -19,7 +19,7 @@ Before submitting, please make sure that you're not running into one of these kn Otherwise please try to provide information which will help us to fix the issue faster. Minimal reproducible examples with few dependencies are especially lovely <3. --> -**rust-analyzer version**: (eg. output of "rust-analyzer: Show RA Version" command) +**rust-analyzer version**: (eg. output of "rust-analyzer: Show RA Version" command, accessible in VSCode via Ctrl+Shift+P) **rustc version**: (eg. output of `rustc -V`) From d94a42d652f4c402891bd2b1004364d32bc41253 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 5 Aug 2022 09:04:27 +0200 Subject: [PATCH 0080/1945] Update bug_report.md --- .github/ISSUE_TEMPLATE/bug_report.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index 5566b3d1f8..a038dce324 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -19,7 +19,7 @@ Before submitting, please make sure that you're not running into one of these kn Otherwise please try to provide information which will help us to fix the issue faster. Minimal reproducible examples with few dependencies are especially lovely <3. --> -**rust-analyzer version**: (eg. output of "rust-analyzer: Show RA Version" command, accessible in VSCode via Ctrl+Shift+P) +**rust-analyzer version**: (eg. output of "rust-analyzer: Show RA Version" command, accessible in VSCode via Ctrl/⌘+Shift+P) **rustc version**: (eg. output of `rustc -V`) From 6a1737242b75ae96bae03885c3d3e75b61a3f8ed Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 5 Aug 2022 12:06:31 +0200 Subject: [PATCH 0081/1945] Don't switch workspace on vfs file changes from libraries When r-a starts up, it starts switching the workspace before all vfs events have been processed which causes us to switch workspace multiple times until all vfs changes have been processed. This scales with the size of the project and its dependencies. If workspace files from dependencies as well as the sysroot get loaded, we shouldn't switch the workspace as those have no impact on the project workspace. --- crates/rust-analyzer/src/global_state.rs | 34 +++++++++++-------- crates/rust-analyzer/src/reload.rs | 42 +++++++++++------------- crates/vfs-notify/src/lib.rs | 7 +++- 3 files changed, 47 insertions(+), 36 deletions(-) diff --git a/crates/rust-analyzer/src/global_state.rs b/crates/rust-analyzer/src/global_state.rs index 932a31e08f..b5f6aef2e1 100644 --- a/crates/rust-analyzer/src/global_state.rs +++ b/crates/rust-analyzer/src/global_state.rs @@ -8,7 +8,7 @@ use std::{sync::Arc, time::Instant}; use crossbeam_channel::{unbounded, Receiver, Sender}; use flycheck::FlycheckHandle; use ide::{Analysis, AnalysisHost, Cancellable, Change, FileId}; -use ide_db::base_db::{CrateId, FileLoader, SourceDatabase}; +use ide_db::base_db::{CrateId, FileLoader, SourceDatabase, SourceDatabaseExt}; use lsp_types::{SemanticTokens, Url}; use parking_lot::{Mutex, RwLock}; use proc_macro_api::ProcMacroServer; @@ -176,7 +176,7 @@ impl GlobalState { pub(crate) fn process_changes(&mut self) -> bool { let _p = profile::span("GlobalState::process_changes"); - let mut fs_changes = Vec::new(); + let mut fs_refresh_changes = Vec::new(); // A file was added or deleted let mut has_structure_changes = false; @@ -192,11 +192,8 @@ impl GlobalState { if let Some(path) = vfs.file_path(file.file_id).as_path() { let path = path.to_path_buf(); if reload::should_refresh_for_change(&path, file.change_kind) { - tracing::warn!("fetch-fiel_change"); - self.fetch_workspaces_queue - .request_op(format!("vfs file change: {}", path.display())); + fs_refresh_changes.push((path, file.file_id)); } - fs_changes.push((path, file.change_kind)); if file.is_created_or_deleted() { has_structure_changes = true; } @@ -228,14 +225,25 @@ impl GlobalState { self.analysis_host.apply_change(change); - let raw_database = &self.analysis_host.raw_database(); - self.proc_macro_changed = - changed_files.iter().filter(|file| !file.is_created_or_deleted()).any(|file| { - let crates = raw_database.relevant_crates(file.file_id); - let crate_graph = raw_database.crate_graph(); + { + let raw_database = self.analysis_host.raw_database(); + let workspace_structure_change = + fs_refresh_changes.into_iter().find(|&(_, file_id)| { + !raw_database.source_root(raw_database.file_source_root(file_id)).is_library + }); + if let Some((path, _)) = workspace_structure_change { + self.fetch_workspaces_queue + .request_op(format!("workspace vfs file change: {}", path.display())); + } + self.proc_macro_changed = + changed_files.iter().filter(|file| !file.is_created_or_deleted()).any(|file| { + let crates = raw_database.relevant_crates(file.file_id); + let crate_graph = raw_database.crate_graph(); + + crates.iter().any(|&krate| crate_graph[krate].is_proc_macro) + }); + } - crates.iter().any(|&krate| crate_graph[krate].is_proc_macro) - }); true } diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs index c90291eb5e..49ccad71a1 100644 --- a/crates/rust-analyzer/src/reload.rs +++ b/crates/rust-analyzer/src/reload.rs @@ -196,10 +196,7 @@ impl GlobalState { } if let Err(error) = self.fetch_build_data_error() { - self.show_and_log_error( - "rust-analyzer failed to run build scripts".to_string(), - Some(error), - ); + self.show_and_log_error("failed to run build scripts".to_string(), Some(error)); } let workspaces = self @@ -308,6 +305,7 @@ impl GlobalState { if self.proc_macro_clients.is_empty() { if let Some((path, args)) = self.config.proc_macro_srv() { + tracing::info!("Spawning proc-macro servers"); self.proc_macro_clients = self .workspaces .iter() @@ -316,20 +314,20 @@ impl GlobalState { let mut path = path.clone(); if let ProjectWorkspace::Cargo { sysroot, .. } = ws { - tracing::info!("Found a cargo workspace..."); + tracing::debug!("Found a cargo workspace..."); if let Some(sysroot) = sysroot.as_ref() { - tracing::info!("Found a cargo workspace with a sysroot..."); + tracing::debug!("Found a cargo workspace with a sysroot..."); let server_path = sysroot.root().join("libexec").join(&standalone_server_name); if std::fs::metadata(&server_path).is_ok() { - tracing::info!( + tracing::debug!( "And the server exists at {}", server_path.display() ); path = server_path; args = vec![]; } else { - tracing::info!( + tracing::debug!( "And the server does not exist at {}", server_path.display() ); @@ -337,14 +335,10 @@ impl GlobalState { } } - tracing::info!( - "Using proc-macro server at {} with args {:?}", - path.display(), - args - ); + tracing::info!(?args, "Using proc-macro server at {}", path.display(),); ProcMacroServer::spawn(path.clone(), args.clone()).map_err(|err| { let error = format!( - "Failed to run proc_macro_srv from path {}, error: {:?}", + "Failed to run proc-macro server from path {}, error: {:?}", path.display(), err ); @@ -684,22 +678,26 @@ pub(crate) fn load_proc_macro( pub(crate) fn should_refresh_for_change(path: &AbsPath, change_kind: ChangeKind) -> bool { const IMPLICIT_TARGET_FILES: &[&str] = &["build.rs", "src/main.rs", "src/lib.rs"]; const IMPLICIT_TARGET_DIRS: &[&str] = &["src/bin", "examples", "tests", "benches"]; - let file_name = path.file_name().unwrap_or_default(); - if file_name == "Cargo.toml" || file_name == "Cargo.lock" { + let file_name = match path.file_name().unwrap_or_default().to_str() { + Some(it) => it, + None => return false, + }; + + if let "Cargo.toml" | "Cargo.lock" = file_name { return true; } if change_kind == ChangeKind::Modify { return false; } + + // .cargo/config{.toml} if path.extension().unwrap_or_default() != "rs" { - if (file_name == "config.toml" || file_name == "config") - && path.parent().map(|parent| parent.as_ref().ends_with(".cargo")) == Some(true) - { - return true; - } - return false; + let is_cargo_config = matches!(file_name, "config.toml" | "config") + && path.parent().map(|parent| parent.as_ref().ends_with(".cargo")).unwrap_or(false); + return is_cargo_config; } + if IMPLICIT_TARGET_FILES.iter().any(|it| path.as_ref().ends_with(it)) { return true; } diff --git a/crates/vfs-notify/src/lib.rs b/crates/vfs-notify/src/lib.rs index 4d33a9afb9..d6d9c66159 100644 --- a/crates/vfs-notify/src/lib.rs +++ b/crates/vfs-notify/src/lib.rs @@ -40,12 +40,15 @@ impl loader::Handle for NotifyHandle { .expect("failed to spawn thread"); NotifyHandle { sender, _thread: thread } } + fn set_config(&mut self, config: loader::Config) { self.sender.send(Message::Config(config)).unwrap(); } + fn invalidate(&mut self, path: AbsPathBuf) { self.sender.send(Message::Invalidate(path)).unwrap(); } + fn load_sync(&mut self, path: &AbsPath) -> Option> { read(path) } @@ -70,6 +73,7 @@ impl NotifyActor { fn new(sender: loader::Sender) -> NotifyActor { NotifyActor { sender, watched_entries: Vec::new(), watcher: None } } + fn next_event(&self, receiver: &Receiver) -> Option { let watcher_receiver = self.watcher.as_ref().map(|(_, receiver)| receiver); select! { @@ -77,9 +81,10 @@ impl NotifyActor { recv(watcher_receiver.unwrap_or(&never())) -> it => Some(Event::NotifyEvent(it.unwrap())), } } + fn run(mut self, inbox: Receiver) { while let Some(event) = self.next_event(&inbox) { - tracing::debug!("vfs-notify event: {:?}", event); + tracing::debug!(?event, "vfs-notify event"); match event { Event::Message(msg) => match msg { Message::Config(config) => { From d6e78b04d00c9144b70b2477d21076b516d5fca7 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 5 Aug 2022 14:16:36 +0200 Subject: [PATCH 0082/1945] feat: Handle operators like their trait functions in the IDE --- crates/hir-expand/src/name.rs | 2 + crates/hir-ty/src/infer/expr.rs | 63 ++----------- crates/hir-ty/src/method_resolution.rs | 51 ++++++++++- crates/hir/src/semantics.rs | 40 ++++++++ crates/hir/src/source_analyzer.rs | 116 ++++++++++++++++++++++- crates/ide-db/src/defs.rs | 80 ++++++++++++++++ crates/ide/src/goto_definition.rs | 122 ++++++++++++++++++++++++- crates/ide/src/highlight_related.rs | 3 +- crates/ide/src/hover.rs | 12 ++- crates/ide/src/hover/tests.rs | 34 +++++++ crates/ide/src/moniker.rs | 2 +- crates/ide/src/static_index.rs | 2 +- 12 files changed, 459 insertions(+), 68 deletions(-) diff --git a/crates/hir-expand/src/name.rs b/crates/hir-expand/src/name.rs index 85b0a7735f..47d191822d 100644 --- a/crates/hir-expand/src/name.rs +++ b/crates/hir-expand/src/name.rs @@ -381,6 +381,7 @@ pub mod known { bitor, bitxor_assign, bitxor, + branch, deref_mut, deref, div_assign, @@ -396,6 +397,7 @@ pub mod known { not, owned_box, partial_ord, + poll, r#fn, rem_assign, rem, diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs index d164e64a8b..2a13106390 100644 --- a/crates/hir-ty/src/infer/expr.rs +++ b/crates/hir-ty/src/infer/expr.rs @@ -10,13 +10,13 @@ use chalk_ir::{ cast::Cast, fold::Shift, DebruijnIndex, GenericArgData, Mutability, TyVariableKind, }; use hir_def::{ - expr::{ArithOp, Array, BinaryOp, CmpOp, Expr, ExprId, Literal, Ordering, Statement, UnaryOp}, + expr::{ArithOp, Array, BinaryOp, CmpOp, Expr, ExprId, Literal, Statement, UnaryOp}, generics::TypeOrConstParamData, path::{GenericArg, GenericArgs}, resolver::resolver_for_expr, - ConstParamId, FieldId, FunctionId, ItemContainerId, Lookup, + ConstParamId, FieldId, ItemContainerId, Lookup, }; -use hir_expand::name::{name, Name}; +use hir_expand::name::Name; use stdx::always; use syntax::ast::RangeOp; @@ -28,7 +28,7 @@ use crate::{ const_or_path_to_chalk, generic_arg_to_chalk, lower_to_chalk_mutability, ParamLoweringMode, }, mapping::{from_chalk, ToChalk}, - method_resolution::{self, VisibleFromModule}, + method_resolution::{self, lang_names_for_bin_op, VisibleFromModule}, primitive::{self, UintTy}, static_lifetime, to_chalk_trait_id, utils::{generics, Generics}, @@ -947,7 +947,9 @@ impl<'a> InferenceContext<'a> { let lhs_ty = self.infer_expr(lhs, &lhs_expectation); let rhs_ty = self.table.new_type_var(); - let func = self.resolve_binop_method(op); + let func = lang_names_for_bin_op(op).and_then(|(name, lang_item)| { + self.db.trait_data(self.resolve_lang_item(lang_item)?.as_trait()?).method_by_name(&name) + }); let func = match func { Some(func) => func, None => { @@ -1473,55 +1475,4 @@ impl<'a> InferenceContext<'a> { }, }) } - - fn resolve_binop_method(&self, op: BinaryOp) -> Option { - let (name, lang_item) = match op { - BinaryOp::LogicOp(_) => return None, - BinaryOp::ArithOp(aop) => match aop { - ArithOp::Add => (name!(add), name!(add)), - ArithOp::Mul => (name!(mul), name!(mul)), - ArithOp::Sub => (name!(sub), name!(sub)), - ArithOp::Div => (name!(div), name!(div)), - ArithOp::Rem => (name!(rem), name!(rem)), - ArithOp::Shl => (name!(shl), name!(shl)), - ArithOp::Shr => (name!(shr), name!(shr)), - ArithOp::BitXor => (name!(bitxor), name!(bitxor)), - ArithOp::BitOr => (name!(bitor), name!(bitor)), - ArithOp::BitAnd => (name!(bitand), name!(bitand)), - }, - BinaryOp::Assignment { op: Some(aop) } => match aop { - ArithOp::Add => (name!(add_assign), name!(add_assign)), - ArithOp::Mul => (name!(mul_assign), name!(mul_assign)), - ArithOp::Sub => (name!(sub_assign), name!(sub_assign)), - ArithOp::Div => (name!(div_assign), name!(div_assign)), - ArithOp::Rem => (name!(rem_assign), name!(rem_assign)), - ArithOp::Shl => (name!(shl_assign), name!(shl_assign)), - ArithOp::Shr => (name!(shr_assign), name!(shr_assign)), - ArithOp::BitXor => (name!(bitxor_assign), name!(bitxor_assign)), - ArithOp::BitOr => (name!(bitor_assign), name!(bitor_assign)), - ArithOp::BitAnd => (name!(bitand_assign), name!(bitand_assign)), - }, - BinaryOp::CmpOp(cop) => match cop { - CmpOp::Eq { negated: false } => (name!(eq), name!(eq)), - CmpOp::Eq { negated: true } => (name!(ne), name!(eq)), - CmpOp::Ord { ordering: Ordering::Less, strict: false } => { - (name!(le), name!(partial_ord)) - } - CmpOp::Ord { ordering: Ordering::Less, strict: true } => { - (name!(lt), name!(partial_ord)) - } - CmpOp::Ord { ordering: Ordering::Greater, strict: false } => { - (name!(ge), name!(partial_ord)) - } - CmpOp::Ord { ordering: Ordering::Greater, strict: true } => { - (name!(gt), name!(partial_ord)) - } - }, - BinaryOp::Assignment { op: None } => return None, - }; - - let trait_ = self.resolve_lang_item(lang_item)?.as_trait()?; - - self.db.trait_data(trait_).method_by_name(&name) - } } diff --git a/crates/hir-ty/src/method_resolution.rs b/crates/hir-ty/src/method_resolution.rs index 15df7b3dd2..64622545f8 100644 --- a/crates/hir-ty/src/method_resolution.rs +++ b/crates/hir-ty/src/method_resolution.rs @@ -336,7 +336,7 @@ impl InherentImpls { } } -pub fn inherent_impl_crates_query( +pub(crate) fn inherent_impl_crates_query( db: &dyn HirDatabase, krate: CrateId, fp: TyFingerprint, @@ -419,6 +419,55 @@ pub fn def_crates( } } +pub fn lang_names_for_bin_op(op: syntax::ast::BinaryOp) -> Option<(Name, Name)> { + use hir_expand::name; + use syntax::ast::{ArithOp, BinaryOp, CmpOp, Ordering}; + Some(match op { + BinaryOp::LogicOp(_) => return None, + BinaryOp::ArithOp(aop) => match aop { + ArithOp::Add => (name!(add), name!(add)), + ArithOp::Mul => (name!(mul), name!(mul)), + ArithOp::Sub => (name!(sub), name!(sub)), + ArithOp::Div => (name!(div), name!(div)), + ArithOp::Rem => (name!(rem), name!(rem)), + ArithOp::Shl => (name!(shl), name!(shl)), + ArithOp::Shr => (name!(shr), name!(shr)), + ArithOp::BitXor => (name!(bitxor), name!(bitxor)), + ArithOp::BitOr => (name!(bitor), name!(bitor)), + ArithOp::BitAnd => (name!(bitand), name!(bitand)), + }, + BinaryOp::Assignment { op: Some(aop) } => match aop { + ArithOp::Add => (name!(add_assign), name!(add_assign)), + ArithOp::Mul => (name!(mul_assign), name!(mul_assign)), + ArithOp::Sub => (name!(sub_assign), name!(sub_assign)), + ArithOp::Div => (name!(div_assign), name!(div_assign)), + ArithOp::Rem => (name!(rem_assign), name!(rem_assign)), + ArithOp::Shl => (name!(shl_assign), name!(shl_assign)), + ArithOp::Shr => (name!(shr_assign), name!(shr_assign)), + ArithOp::BitXor => (name!(bitxor_assign), name!(bitxor_assign)), + ArithOp::BitOr => (name!(bitor_assign), name!(bitor_assign)), + ArithOp::BitAnd => (name!(bitand_assign), name!(bitand_assign)), + }, + BinaryOp::CmpOp(cop) => match cop { + CmpOp::Eq { negated: false } => (name!(eq), name!(eq)), + CmpOp::Eq { negated: true } => (name!(ne), name!(eq)), + CmpOp::Ord { ordering: Ordering::Less, strict: false } => { + (name!(le), name!(partial_ord)) + } + CmpOp::Ord { ordering: Ordering::Less, strict: true } => { + (name!(lt), name!(partial_ord)) + } + CmpOp::Ord { ordering: Ordering::Greater, strict: false } => { + (name!(ge), name!(partial_ord)) + } + CmpOp::Ord { ordering: Ordering::Greater, strict: true } => { + (name!(gt), name!(partial_ord)) + } + }, + BinaryOp::Assignment { op: None } => return None, + }) +} + /// Look up the method with the given name. pub(crate) fn lookup_method( ty: &Canonical, diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs index c84318b2fb..36756866f9 100644 --- a/crates/hir/src/semantics.rs +++ b/crates/hir/src/semantics.rs @@ -357,6 +357,26 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { self.imp.resolve_method_call(call).map(Function::from) } + pub fn resolve_await_to_poll(&self, await_expr: &ast::AwaitExpr) -> Option { + self.imp.resolve_await_to_poll(await_expr).map(Function::from) + } + + pub fn resolve_prefix_expr(&self, prefix_expr: &ast::PrefixExpr) -> Option { + self.imp.resolve_prefix_expr(prefix_expr).map(Function::from) + } + + pub fn resolve_index_expr(&self, index_expr: &ast::IndexExpr) -> Option { + self.imp.resolve_index_expr(index_expr).map(Function::from) + } + + pub fn resolve_bin_expr(&self, bin_expr: &ast::BinExpr) -> Option { + self.imp.resolve_bin_expr(bin_expr).map(Function::from) + } + + pub fn resolve_try_expr(&self, try_expr: &ast::TryExpr) -> Option { + self.imp.resolve_try_expr(try_expr).map(Function::from) + } + pub fn resolve_method_call_as_callable(&self, call: &ast::MethodCallExpr) -> Option { self.imp.resolve_method_call_as_callable(call) } @@ -1066,6 +1086,26 @@ impl<'db> SemanticsImpl<'db> { self.analyze(call.syntax())?.resolve_method_call(self.db, call) } + pub fn resolve_await_to_poll(&self, await_expr: &ast::AwaitExpr) -> Option { + self.analyze(await_expr.syntax())?.resolve_await_to_poll(self.db, await_expr) + } + + pub fn resolve_prefix_expr(&self, prefix_expr: &ast::PrefixExpr) -> Option { + self.analyze(prefix_expr.syntax())?.resolve_prefix_expr(self.db, prefix_expr) + } + + pub fn resolve_index_expr(&self, index_expr: &ast::IndexExpr) -> Option { + self.analyze(index_expr.syntax())?.resolve_index_expr(self.db, index_expr) + } + + pub fn resolve_bin_expr(&self, bin_expr: &ast::BinExpr) -> Option { + self.analyze(bin_expr.syntax())?.resolve_bin_expr(self.db, bin_expr) + } + + pub fn resolve_try_expr(&self, try_expr: &ast::TryExpr) -> Option { + self.analyze(try_expr.syntax())?.resolve_try_expr(self.db, try_expr) + } + fn resolve_method_call_as_callable(&self, call: &ast::MethodCallExpr) -> Option { self.analyze(call.syntax())?.resolve_method_call_as_callable(self.db, call) } diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs index 1eb51b20c3..45879ff9c6 100644 --- a/crates/hir/src/source_analyzer.rs +++ b/crates/hir/src/source_analyzer.rs @@ -25,15 +25,17 @@ use hir_def::{ Lookup, ModuleDefId, VariantId, }; use hir_expand::{ - builtin_fn_macro::BuiltinFnLikeExpander, hygiene::Hygiene, name::AsName, HirFileId, InFile, + builtin_fn_macro::BuiltinFnLikeExpander, hygiene::Hygiene, name, name::AsName, HirFileId, + InFile, }; use hir_ty::{ diagnostics::{ record_literal_missing_fields, record_pattern_missing_fields, unsafe_expressions, UnsafeExpr, }, - method_resolution, Adjust, Adjustment, AutoBorrow, InferenceResult, Interner, Substitution, - TyExt, TyKind, TyLoweringContext, + method_resolution::{self, lang_names_for_bin_op}, + Adjust, Adjustment, AutoBorrow, InferenceResult, Interner, Substitution, TyExt, TyKind, + TyLoweringContext, }; use itertools::Itertools; use smallvec::SmallVec; @@ -255,8 +257,103 @@ impl SourceAnalyzer { ) -> Option { let expr_id = self.expr_id(db, &call.clone().into())?; let (f_in_trait, substs) = self.infer.as_ref()?.method_resolution(expr_id)?; - let f_in_impl = self.resolve_impl_method(db, f_in_trait, &substs); - f_in_impl.or(Some(f_in_trait)) + + Some(self.resolve_impl_method_or_trait_def(db, f_in_trait, &substs)) + } + + pub fn resolve_await_to_poll( + &self, + db: &dyn HirDatabase, + await_expr: &ast::AwaitExpr, + ) -> Option { + let expr_id = self.expr_id(db, &await_expr.expr()?.into())?; + let ty = self.infer.as_ref()?.type_of_expr.get(expr_id)?; + + let op_fn = db + .lang_item(self.resolver.krate(), hir_expand::name![poll].to_smol_str())? + .as_function()?; + let substs = hir_ty::TyBuilder::subst_for_def(db, op_fn).push(ty.clone()).build(); + + Some(self.resolve_impl_method_or_trait_def(db, op_fn, &substs)) + } + + pub fn resolve_prefix_expr( + &self, + db: &dyn HirDatabase, + prefix_expr: &ast::PrefixExpr, + ) -> Option { + let lang_item_name = match prefix_expr.op_kind()? { + ast::UnaryOp::Deref => name![deref], + ast::UnaryOp::Not => name![not], + ast::UnaryOp::Neg => name![neg], + }; + let expr_id = self.expr_id(db, &prefix_expr.expr()?.into())?; + let ty = self.infer.as_ref()?.type_of_expr.get(expr_id)?; + + let trait_ = + db.lang_item(self.resolver.krate(), lang_item_name.to_smol_str())?.as_trait()?; + let op_fn = db.trait_data(trait_).method_by_name(&lang_item_name)?; + let substs = hir_ty::TyBuilder::subst_for_def(db, op_fn).push(ty.clone()).build(); + + Some(self.resolve_impl_method_or_trait_def(db, op_fn, &substs)) + } + + pub fn resolve_index_expr( + &self, + db: &dyn HirDatabase, + index_expr: &ast::IndexExpr, + ) -> Option { + let base_expr_id = self.expr_id(db, &index_expr.base()?.into())?; + let index_expr_id = self.expr_id(db, &index_expr.index()?.into())?; + let base_ty = self.infer.as_ref()?.type_of_expr.get(base_expr_id)?; + let index_ty = self.infer.as_ref()?.type_of_expr.get(index_expr_id)?; + + let lang_item_name = name![index]; + + let trait_ = + db.lang_item(self.resolver.krate(), lang_item_name.to_smol_str())?.as_trait()?; + let op_fn = db.trait_data(trait_).method_by_name(&lang_item_name)?; + let substs = hir_ty::TyBuilder::subst_for_def(db, op_fn) + .push(base_ty.clone()) + .push(index_ty.clone()) + .build(); + Some(self.resolve_impl_method_or_trait_def(db, op_fn, &substs)) + } + + pub fn resolve_bin_expr( + &self, + db: &dyn HirDatabase, + binop_expr: &ast::BinExpr, + ) -> Option { + let op = binop_expr.op_kind()?; + let lhs_expr_id = self.expr_id(db, &binop_expr.lhs()?.into())?; + let rhs_expr_id = self.expr_id(db, &binop_expr.rhs()?.into())?; + let lhs = self.infer.as_ref()?.type_of_expr.get(lhs_expr_id)?; + let rhs = self.infer.as_ref()?.type_of_expr.get(rhs_expr_id)?; + + let op_fn = lang_names_for_bin_op(op).and_then(|(name, lang_item)| { + db.trait_data(db.lang_item(self.resolver.krate(), lang_item.to_smol_str())?.as_trait()?) + .method_by_name(&name) + })?; + let substs = + hir_ty::TyBuilder::subst_for_def(db, op_fn).push(lhs.clone()).push(rhs.clone()).build(); + + Some(self.resolve_impl_method_or_trait_def(db, op_fn, &substs)) + } + + pub fn resolve_try_expr( + &self, + db: &dyn HirDatabase, + try_expr: &ast::TryExpr, + ) -> Option { + let expr_id = self.expr_id(db, &try_expr.expr()?.into())?; + let ty = self.infer.as_ref()?.type_of_expr.get(expr_id)?; + + let op_fn = + db.lang_item(self.resolver.krate(), name![branch].to_smol_str())?.as_function()?; + let substs = hir_ty::TyBuilder::subst_for_def(db, op_fn).push(ty.clone()).build(); + + Some(self.resolve_impl_method_or_trait_def(db, op_fn, &substs)) } pub(crate) fn resolve_field( @@ -666,6 +763,15 @@ impl SourceAnalyzer { let fun_data = db.function_data(func); method_resolution::lookup_impl_method(self_ty, db, trait_env, impled_trait, &fun_data.name) } + + fn resolve_impl_method_or_trait_def( + &self, + db: &dyn HirDatabase, + func: FunctionId, + substs: &Substitution, + ) -> FunctionId { + self.resolve_impl_method(db, func, substs).unwrap_or(func) + } } fn scope_for( diff --git a/crates/ide-db/src/defs.rs b/crates/ide-db/src/defs.rs index aeaca00ec6..6c13c03972 100644 --- a/crates/ide-db/src/defs.rs +++ b/crates/ide-db/src/defs.rs @@ -127,10 +127,12 @@ impl Definition { } } +// FIXME: IdentClass as a name no longer fits #[derive(Debug)] pub enum IdentClass { NameClass(NameClass), NameRefClass(NameRefClass), + Operator(OperatorClass), } impl IdentClass { @@ -147,6 +149,11 @@ impl IdentClass { .map(IdentClass::NameClass) .or_else(|| NameRefClass::classify_lifetime(sema, &lifetime).map(IdentClass::NameRefClass)) }, + ast::AwaitExpr(await_expr) => OperatorClass::classify_await(sema, &await_expr).map(IdentClass::Operator), + ast::BinExpr(bin_expr) => OperatorClass::classify_bin(sema, &bin_expr).map(IdentClass::Operator), + ast::IndexExpr(index_expr) => OperatorClass::classify_index(sema, &index_expr).map(IdentClass::Operator), + ast::PrefixExpr(prefix_expr) => OperatorClass::classify_prefix(sema,&prefix_expr).map(IdentClass::Operator), + ast::TryExpr(try_expr) => OperatorClass::classify_try(sema,&try_expr).map(IdentClass::Operator), _ => None, } } @@ -184,6 +191,33 @@ impl IdentClass { res.push(Definition::Local(local_ref)); res.push(Definition::Field(field_ref)); } + IdentClass::Operator( + OperatorClass::Await(func) + | OperatorClass::Prefix(func) + | OperatorClass::Bin(func) + | OperatorClass::Index(func) + | OperatorClass::Try(func), + ) => res.push(Definition::Function(func)), + } + res + } + + pub fn definitions_no_ops(self) -> ArrayVec { + let mut res = ArrayVec::new(); + match self { + IdentClass::NameClass(NameClass::Definition(it) | NameClass::ConstReference(it)) => { + res.push(it) + } + IdentClass::NameClass(NameClass::PatFieldShorthand { local_def, field_ref }) => { + res.push(Definition::Local(local_def)); + res.push(Definition::Field(field_ref)); + } + IdentClass::NameRefClass(NameRefClass::Definition(it)) => res.push(it), + IdentClass::NameRefClass(NameRefClass::FieldShorthand { local_ref, field_ref }) => { + res.push(Definition::Local(local_ref)); + res.push(Definition::Field(field_ref)); + } + IdentClass::Operator(_) => (), } res } @@ -332,6 +366,52 @@ impl NameClass { } } +#[derive(Debug)] +pub enum OperatorClass { + Await(Function), + Prefix(Function), + Index(Function), + Try(Function), + Bin(Function), +} + +impl OperatorClass { + pub fn classify_await( + sema: &Semantics<'_, RootDatabase>, + await_expr: &ast::AwaitExpr, + ) -> Option { + sema.resolve_await_to_poll(await_expr).map(OperatorClass::Await) + } + + pub fn classify_prefix( + sema: &Semantics<'_, RootDatabase>, + prefix_expr: &ast::PrefixExpr, + ) -> Option { + sema.resolve_prefix_expr(prefix_expr).map(OperatorClass::Prefix) + } + + pub fn classify_try( + sema: &Semantics<'_, RootDatabase>, + try_expr: &ast::TryExpr, + ) -> Option { + sema.resolve_try_expr(try_expr).map(OperatorClass::Try) + } + + pub fn classify_index( + sema: &Semantics<'_, RootDatabase>, + index_expr: &ast::IndexExpr, + ) -> Option { + sema.resolve_index_expr(index_expr).map(OperatorClass::Index) + } + + pub fn classify_bin( + sema: &Semantics<'_, RootDatabase>, + bin_expr: &ast::BinExpr, + ) -> Option { + sema.resolve_bin_expr(bin_expr).map(OperatorClass::Bin) + } +} + /// This is similar to [`NameClass`], but works for [`ast::NameRef`] rather than /// for [`ast::Name`]. Similarly, what looks like a reference in syntax is a /// reference most of the time, but there are a couple of annoying exceptions. diff --git a/crates/ide/src/goto_definition.rs b/crates/ide/src/goto_definition.rs index d9c97751c9..b2123b9a87 100644 --- a/crates/ide/src/goto_definition.rs +++ b/crates/ide/src/goto_definition.rs @@ -39,7 +39,11 @@ pub(crate) fn goto_definition( | T![super] | T![crate] | T![Self] - | COMMENT => 2, + | COMMENT => 4, + // index and prefix ops + T!['['] | T![']'] | T![?] | T![*] | T![-] | T![!] => 3, + kind if kind.is_keyword() => 2, + T!['('] | T![')'] => 2, kind if kind.is_trivia() => 0, _ => 1, })?; @@ -1628,6 +1632,122 @@ macro_rules! foo { } foo!(bar$0); +"#, + ); + } + + #[test] + fn goto_await_poll() { + check( + r#" +//- minicore: future + +struct MyFut; + +impl core::future::Future for MyFut { + type Output = (); + + fn poll( + //^^^^ + self: std::pin::Pin<&mut Self>, + cx: &mut std::task::Context<'_> + ) -> std::task::Poll + { + () + } +} + +fn f() { + MyFut.await$0; +} +"#, + ); + } + + #[test] + fn goto_try_op() { + check( + r#" +//- minicore: try + +struct Struct; + +impl core::ops::Try for Struct { + fn branch( + //^^^^^^ + self + ) {} +} + +fn f() { + Struct?$0; +} +"#, + ); + } + + #[test] + fn goto_index_op() { + check( + r#" +//- minicore: index + +struct Struct; + +impl core::ops::Index for Struct { + fn index( + //^^^^^ + self + ) {} +} + +fn f() { + Struct[0]$0; +} +"#, + ); + } + + #[test] + fn goto_prefix_op() { + check( + r#" +//- minicore: deref + +struct Struct; + +impl core::ops::Deref for Struct { + fn deref( + //^^^^^ + self + ) {} +} + +fn f() { + $0*Struct; +} +"#, + ); + } + + #[test] + fn goto_bin_op() { + check( + r#" +//- minicore: add + +struct Struct; + +impl core::ops::Add for Struct { + fn add( + //^^^ + self + ) {} +} + +fn f() { + Struct +$0 Struct; +} "#, ); } diff --git a/crates/ide/src/highlight_related.rs b/crates/ide/src/highlight_related.rs index f2d7029eab..f190da326e 100644 --- a/crates/ide/src/highlight_related.rs +++ b/crates/ide/src/highlight_related.rs @@ -333,7 +333,8 @@ fn cover_range(r0: Option, r1: Option) -> Option, token: SyntaxToken) -> FxHashSet { sema.descend_into_macros(token) .into_iter() - .filter_map(|token| IdentClass::classify_token(sema, &token).map(IdentClass::definitions)) + .filter_map(|token| IdentClass::classify_token(sema, &token)) + .map(IdentClass::definitions_no_ops) .flatten() .collect() } diff --git a/crates/ide/src/hover.rs b/crates/ide/src/hover.rs index 784f85dbbe..3ada181f1e 100644 --- a/crates/ide/src/hover.rs +++ b/crates/ide/src/hover.rs @@ -9,7 +9,7 @@ use either::Either; use hir::{HasSource, Semantics}; use ide_db::{ base_db::FileRange, - defs::{Definition, IdentClass}, + defs::{Definition, IdentClass, OperatorClass}, famous_defs::FamousDefs, helpers::pick_best_token, FxIndexSet, RootDatabase, @@ -101,7 +101,10 @@ pub(crate) fn hover( let offset = range.start(); let original_token = pick_best_token(file.token_at_offset(offset), |kind| match kind { - IDENT | INT_NUMBER | LIFETIME_IDENT | T![self] | T![super] | T![crate] | T![Self] => 3, + IDENT | INT_NUMBER | LIFETIME_IDENT | T![self] | T![super] | T![crate] | T![Self] => 4, + // index and prefix ops + T!['['] | T![']'] | T![?] | T![*] | T![-] | T![!] => 3, + kind if kind.is_keyword() => 2, T!['('] | T![')'] => 2, kind if kind.is_trivia() => 0, _ => 1, @@ -136,6 +139,11 @@ pub(crate) fn hover( .filter_map(|token| { let node = token.parent()?; let class = IdentClass::classify_token(sema, token)?; + if let IdentClass::Operator(OperatorClass::Await(_)) = class { + // It's better for us to fall back to the keyword hover here, + // rendering poll is very confusing + return None; + } Some(class.definitions().into_iter().zip(iter::once(node).cycle())) }) .flatten() diff --git a/crates/ide/src/hover/tests.rs b/crates/ide/src/hover/tests.rs index 867d1f54d4..c6274264b8 100644 --- a/crates/ide/src/hover/tests.rs +++ b/crates/ide/src/hover/tests.rs @@ -5051,3 +5051,37 @@ fn f() { ```"#]], ); } + +#[test] +fn hover_deref() { + check( + r#" +//- minicore: deref + +struct Struct(usize); + +impl core::ops::Deref for Struct { + type Target = usize; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +fn f() { + $0*Struct(0); +} +"#, + expect![[r#" + *** + + ```rust + test::Struct + ``` + + ```rust + fn deref(&self) -> &Self::Target + ``` + "#]], + ); +} diff --git a/crates/ide/src/moniker.rs b/crates/ide/src/moniker.rs index 6bab9fa1eb..4f758967b4 100644 --- a/crates/ide/src/moniker.rs +++ b/crates/ide/src/moniker.rs @@ -90,7 +90,7 @@ pub(crate) fn moniker( .descend_into_macros(original_token.clone()) .into_iter() .filter_map(|token| { - IdentClass::classify_token(sema, &token).map(IdentClass::definitions).map(|it| { + IdentClass::classify_token(sema, &token).map(IdentClass::definitions_no_ops).map(|it| { it.into_iter().flat_map(|def| def_to_moniker(sema.db, def, current_crate)) }) }) diff --git a/crates/ide/src/static_index.rs b/crates/ide/src/static_index.rs index d74b640415..cc79ee55b7 100644 --- a/crates/ide/src/static_index.rs +++ b/crates/ide/src/static_index.rs @@ -204,7 +204,7 @@ impl StaticIndex<'_> { fn get_definition(sema: &Semantics<'_, RootDatabase>, token: SyntaxToken) -> Option { for token in sema.descend_into_macros(token) { - let def = IdentClass::classify_token(sema, &token).map(IdentClass::definitions); + let def = IdentClass::classify_token(sema, &token).map(IdentClass::definitions_no_ops); if let Some(&[x]) = def.as_deref() { return Some(x); } else { From 352d3c6e50f4c64cded06b51ef81d4ac83b4e34c Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 5 Aug 2022 14:28:36 +0200 Subject: [PATCH 0083/1945] Fix visibilities --- crates/hir/src/semantics.rs | 10 +++++----- crates/hir/src/source_analyzer.rs | 10 +++++----- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs index 36756866f9..416b6f5806 100644 --- a/crates/hir/src/semantics.rs +++ b/crates/hir/src/semantics.rs @@ -1086,23 +1086,23 @@ impl<'db> SemanticsImpl<'db> { self.analyze(call.syntax())?.resolve_method_call(self.db, call) } - pub fn resolve_await_to_poll(&self, await_expr: &ast::AwaitExpr) -> Option { + fn resolve_await_to_poll(&self, await_expr: &ast::AwaitExpr) -> Option { self.analyze(await_expr.syntax())?.resolve_await_to_poll(self.db, await_expr) } - pub fn resolve_prefix_expr(&self, prefix_expr: &ast::PrefixExpr) -> Option { + fn resolve_prefix_expr(&self, prefix_expr: &ast::PrefixExpr) -> Option { self.analyze(prefix_expr.syntax())?.resolve_prefix_expr(self.db, prefix_expr) } - pub fn resolve_index_expr(&self, index_expr: &ast::IndexExpr) -> Option { + fn resolve_index_expr(&self, index_expr: &ast::IndexExpr) -> Option { self.analyze(index_expr.syntax())?.resolve_index_expr(self.db, index_expr) } - pub fn resolve_bin_expr(&self, bin_expr: &ast::BinExpr) -> Option { + fn resolve_bin_expr(&self, bin_expr: &ast::BinExpr) -> Option { self.analyze(bin_expr.syntax())?.resolve_bin_expr(self.db, bin_expr) } - pub fn resolve_try_expr(&self, try_expr: &ast::TryExpr) -> Option { + fn resolve_try_expr(&self, try_expr: &ast::TryExpr) -> Option { self.analyze(try_expr.syntax())?.resolve_try_expr(self.db, try_expr) } diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs index 45879ff9c6..5e0bf7c5fb 100644 --- a/crates/hir/src/source_analyzer.rs +++ b/crates/hir/src/source_analyzer.rs @@ -261,7 +261,7 @@ impl SourceAnalyzer { Some(self.resolve_impl_method_or_trait_def(db, f_in_trait, &substs)) } - pub fn resolve_await_to_poll( + pub(crate) fn resolve_await_to_poll( &self, db: &dyn HirDatabase, await_expr: &ast::AwaitExpr, @@ -277,7 +277,7 @@ impl SourceAnalyzer { Some(self.resolve_impl_method_or_trait_def(db, op_fn, &substs)) } - pub fn resolve_prefix_expr( + pub(crate) fn resolve_prefix_expr( &self, db: &dyn HirDatabase, prefix_expr: &ast::PrefixExpr, @@ -298,7 +298,7 @@ impl SourceAnalyzer { Some(self.resolve_impl_method_or_trait_def(db, op_fn, &substs)) } - pub fn resolve_index_expr( + pub(crate) fn resolve_index_expr( &self, db: &dyn HirDatabase, index_expr: &ast::IndexExpr, @@ -320,7 +320,7 @@ impl SourceAnalyzer { Some(self.resolve_impl_method_or_trait_def(db, op_fn, &substs)) } - pub fn resolve_bin_expr( + pub(crate) fn resolve_bin_expr( &self, db: &dyn HirDatabase, binop_expr: &ast::BinExpr, @@ -341,7 +341,7 @@ impl SourceAnalyzer { Some(self.resolve_impl_method_or_trait_def(db, op_fn, &substs)) } - pub fn resolve_try_expr( + pub(crate) fn resolve_try_expr( &self, db: &dyn HirDatabase, try_expr: &ast::TryExpr, From 8aa50e08af4bd2ecc6b2132d2db48edabe51f352 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 5 Aug 2022 14:54:14 +0200 Subject: [PATCH 0084/1945] Simplify --- crates/hir/src/source_analyzer.rs | 58 +++++++++++++++++-------------- 1 file changed, 31 insertions(+), 27 deletions(-) diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs index 5e0bf7c5fb..f5e2e44307 100644 --- a/crates/hir/src/source_analyzer.rs +++ b/crates/hir/src/source_analyzer.rs @@ -25,8 +25,11 @@ use hir_def::{ Lookup, ModuleDefId, VariantId, }; use hir_expand::{ - builtin_fn_macro::BuiltinFnLikeExpander, hygiene::Hygiene, name, name::AsName, HirFileId, - InFile, + builtin_fn_macro::BuiltinFnLikeExpander, + hygiene::Hygiene, + name, + name::{AsName, Name}, + HirFileId, InFile, }; use hir_ty::{ diagnostics::{ @@ -34,7 +37,7 @@ use hir_ty::{ UnsafeExpr, }, method_resolution::{self, lang_names_for_bin_op}, - Adjust, Adjustment, AutoBorrow, InferenceResult, Interner, Substitution, TyExt, TyKind, + Adjust, Adjustment, AutoBorrow, InferenceResult, Interner, Substitution, Ty, TyExt, TyKind, TyLoweringContext, }; use itertools::Itertools; @@ -266,8 +269,7 @@ impl SourceAnalyzer { db: &dyn HirDatabase, await_expr: &ast::AwaitExpr, ) -> Option { - let expr_id = self.expr_id(db, &await_expr.expr()?.into())?; - let ty = self.infer.as_ref()?.type_of_expr.get(expr_id)?; + let ty = self.ty_of_expr(db, &await_expr.expr()?.into())?; let op_fn = db .lang_item(self.resolver.krate(), hir_expand::name![poll].to_smol_str())? @@ -287,12 +289,9 @@ impl SourceAnalyzer { ast::UnaryOp::Not => name![not], ast::UnaryOp::Neg => name![neg], }; - let expr_id = self.expr_id(db, &prefix_expr.expr()?.into())?; - let ty = self.infer.as_ref()?.type_of_expr.get(expr_id)?; + let ty = self.ty_of_expr(db, &prefix_expr.expr()?.into())?; - let trait_ = - db.lang_item(self.resolver.krate(), lang_item_name.to_smol_str())?.as_trait()?; - let op_fn = db.trait_data(trait_).method_by_name(&lang_item_name)?; + let op_fn = self.lang_trait_fn(db, &lang_item_name, &lang_item_name)?; let substs = hir_ty::TyBuilder::subst_for_def(db, op_fn).push(ty.clone()).build(); Some(self.resolve_impl_method_or_trait_def(db, op_fn, &substs)) @@ -303,16 +302,12 @@ impl SourceAnalyzer { db: &dyn HirDatabase, index_expr: &ast::IndexExpr, ) -> Option { - let base_expr_id = self.expr_id(db, &index_expr.base()?.into())?; - let index_expr_id = self.expr_id(db, &index_expr.index()?.into())?; - let base_ty = self.infer.as_ref()?.type_of_expr.get(base_expr_id)?; - let index_ty = self.infer.as_ref()?.type_of_expr.get(index_expr_id)?; + let base_ty = self.ty_of_expr(db, &index_expr.base()?.into())?; + let index_ty = self.ty_of_expr(db, &index_expr.index()?.into())?; let lang_item_name = name![index]; - let trait_ = - db.lang_item(self.resolver.krate(), lang_item_name.to_smol_str())?.as_trait()?; - let op_fn = db.trait_data(trait_).method_by_name(&lang_item_name)?; + let op_fn = self.lang_trait_fn(db, &lang_item_name, &lang_item_name)?; let substs = hir_ty::TyBuilder::subst_for_def(db, op_fn) .push(base_ty.clone()) .push(index_ty.clone()) @@ -326,15 +321,11 @@ impl SourceAnalyzer { binop_expr: &ast::BinExpr, ) -> Option { let op = binop_expr.op_kind()?; - let lhs_expr_id = self.expr_id(db, &binop_expr.lhs()?.into())?; - let rhs_expr_id = self.expr_id(db, &binop_expr.rhs()?.into())?; - let lhs = self.infer.as_ref()?.type_of_expr.get(lhs_expr_id)?; - let rhs = self.infer.as_ref()?.type_of_expr.get(rhs_expr_id)?; + let lhs = self.ty_of_expr(db, &binop_expr.lhs()?.into())?; + let rhs = self.ty_of_expr(db, &binop_expr.rhs()?.into())?; - let op_fn = lang_names_for_bin_op(op).and_then(|(name, lang_item)| { - db.trait_data(db.lang_item(self.resolver.krate(), lang_item.to_smol_str())?.as_trait()?) - .method_by_name(&name) - })?; + let op_fn = lang_names_for_bin_op(op) + .and_then(|(name, lang_item)| self.lang_trait_fn(db, &lang_item, &name))?; let substs = hir_ty::TyBuilder::subst_for_def(db, op_fn).push(lhs.clone()).push(rhs.clone()).build(); @@ -346,8 +337,7 @@ impl SourceAnalyzer { db: &dyn HirDatabase, try_expr: &ast::TryExpr, ) -> Option { - let expr_id = self.expr_id(db, &try_expr.expr()?.into())?; - let ty = self.infer.as_ref()?.type_of_expr.get(expr_id)?; + let ty = self.ty_of_expr(db, &try_expr.expr()?.into())?; let op_fn = db.lang_item(self.resolver.krate(), name![branch].to_smol_str())?.as_function()?; @@ -772,6 +762,20 @@ impl SourceAnalyzer { ) -> FunctionId { self.resolve_impl_method(db, func, substs).unwrap_or(func) } + + fn lang_trait_fn( + &self, + db: &dyn HirDatabase, + lang_trait: &Name, + method_name: &Name, + ) -> Option { + db.trait_data(db.lang_item(self.resolver.krate(), lang_trait.to_smol_str())?.as_trait()?) + .method_by_name(method_name) + } + + fn ty_of_expr(&self, db: &dyn HirDatabase, expr: &ast::Expr) -> Option<&Ty> { + self.infer.as_ref()?.type_of_expr.get(self.expr_id(db, &expr)?) + } } fn scope_for( From 6bf674c8e4ddcccb4fc7eaa8754bdc431c6cad33 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 5 Aug 2022 14:59:26 +0200 Subject: [PATCH 0085/1945] fix: Fix incorrect token pick rankings --- crates/ide/src/call_hierarchy.rs | 4 ++-- crates/ide/src/goto_implementation.rs | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/ide/src/call_hierarchy.rs b/crates/ide/src/call_hierarchy.rs index a18a6bea97..5a8cda8fb3 100644 --- a/crates/ide/src/call_hierarchy.rs +++ b/crates/ide/src/call_hierarchy.rs @@ -7,7 +7,7 @@ use ide_db::{ search::FileReference, FxIndexMap, RootDatabase, }; -use syntax::{ast, AstNode, SyntaxKind::NAME, TextRange}; +use syntax::{ast, AstNode, SyntaxKind::IDENT, TextRange}; use crate::{goto_definition, FilePosition, NavigationTarget, RangeInfo, TryToNav}; @@ -79,7 +79,7 @@ pub(crate) fn outgoing_calls(db: &RootDatabase, position: FilePosition) -> Optio let file = sema.parse(file_id); let file = file.syntax(); let token = pick_best_token(file.token_at_offset(position.offset), |kind| match kind { - NAME => 1, + IDENT => 1, _ => 0, })?; let mut calls = CallLocations::default(); diff --git a/crates/ide/src/goto_implementation.rs b/crates/ide/src/goto_implementation.rs index 04b51c8394..b3f711b6b8 100644 --- a/crates/ide/src/goto_implementation.rs +++ b/crates/ide/src/goto_implementation.rs @@ -30,7 +30,7 @@ pub(crate) fn goto_implementation( let original_token = pick_best_token(syntax.token_at_offset(position.offset), |kind| match kind { - IDENT | T![self] => 1, + IDENT | T![self] | INT_NUMBER => 1, _ => 0, })?; let range = original_token.text_range(); From 4d5873e92f50f3f08f98f04ade4768f9d9ab380f Mon Sep 17 00:00:00 2001 From: Ryo Yoshida Date: Fri, 5 Aug 2022 22:01:09 +0900 Subject: [PATCH 0086/1945] minor: align with rustc on escaping characters in macro expansion --- crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs | 4 ++-- crates/hir-expand/src/quote.rs | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs b/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs index 32006c4b43..4f626105a5 100644 --- a/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs +++ b/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs @@ -295,13 +295,13 @@ fn test_concat_expand() { #[rustc_builtin_macro] macro_rules! concat {} -fn main() { concat!("foo", "r", 0, r#"bar"#, "\n", false, '\n'); } +fn main() { concat!("foo", "r", 0, r#"bar"#, "\n", false, '"', '\0'); } "##, expect![[r##" #[rustc_builtin_macro] macro_rules! concat {} -fn main() { "foor0bar\nfalse\n"; } +fn main() { "foor0bar\nfalse\"\u{0}"; } "##]], ); } diff --git a/crates/hir-expand/src/quote.rs b/crates/hir-expand/src/quote.rs index 82f410ecda..e839e97bf0 100644 --- a/crates/hir-expand/src/quote.rs +++ b/crates/hir-expand/src/quote.rs @@ -196,8 +196,8 @@ impl_to_to_tokentrees! { tt::Literal => self { self }; tt::Ident => self { self }; tt::Punct => self { self }; - &str => self { tt::Literal{text: format!("\"{}\"", self.escape_debug()).into(), id: tt::TokenId::unspecified()}}; - String => self { tt::Literal{text: format!("\"{}\"", self.escape_debug()).into(), id: tt::TokenId::unspecified()}} + &str => self { tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), id: tt::TokenId::unspecified()}}; + String => self { tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), id: tt::TokenId::unspecified()}} } #[cfg(test)] From fd00bd4d3b96109dcd1b9fb2ebfeb3b747776ad1 Mon Sep 17 00:00:00 2001 From: Jonas Schievink Date: Fri, 5 Aug 2022 15:28:53 +0200 Subject: [PATCH 0087/1945] Document CLI flag stability --- crates/rust-analyzer/src/cli/flags.rs | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/crates/rust-analyzer/src/cli/flags.rs b/crates/rust-analyzer/src/cli/flags.rs index 19907ebddb..080e2fb443 100644 --- a/crates/rust-analyzer/src/cli/flags.rs +++ b/crates/rust-analyzer/src/cli/flags.rs @@ -10,6 +10,10 @@ xflags::xflags! { src "./src/cli/flags.rs" /// LSP server for the Rust programming language. + /// + /// Subcommands and their flags do not provide any stability guarantees and may be removed or + /// changed without notice. Top-level flags that are not are marked as [Unstable] provide + /// backwards-compatibility and may be relied on. cmd rust-analyzer { /// Verbosity level, can be repeated multiple times. repeated -v, --verbose @@ -21,7 +25,7 @@ xflags::xflags! { /// Flush log records to the file immediately. optional --no-log-buffering - /// Wait until a debugger is attached to (requires debug build). + /// [Unstable] Wait until a debugger is attached to (requires debug build). optional --wait-dbg default cmd lsp-server { From e8a9bc09a3f0ddf4d5f5706a6b101eceb0d90960 Mon Sep 17 00:00:00 2001 From: Adrian Stanciu Date: Sat, 6 Aug 2022 17:12:13 +0300 Subject: [PATCH 0088/1945] run stable fmt through rustup --- crates/rust-analyzer/tests/slow-tests/tidy.rs | 7 +++---- crates/sourcegen/src/lib.rs | 14 ++++++++------ 2 files changed, 11 insertions(+), 10 deletions(-) diff --git a/crates/rust-analyzer/tests/slow-tests/tidy.rs b/crates/rust-analyzer/tests/slow-tests/tidy.rs index 18f95925d9..58099a58de 100644 --- a/crates/rust-analyzer/tests/slow-tests/tidy.rs +++ b/crates/rust-analyzer/tests/slow-tests/tidy.rs @@ -13,9 +13,8 @@ use xshell::cmd; fn check_code_formatting() { let sh = &Shell::new().unwrap(); sh.change_dir(sourcegen::project_root()); - sh.set_var("RUSTUP_TOOLCHAIN", "stable"); - let out = cmd!(sh, "rustfmt --version").read().unwrap(); + let out = cmd!(sh, "rustup run stable rustfmt --version").read().unwrap(); if !out.contains("stable") { panic!( "Failed to run rustfmt from toolchain 'stable'. \ @@ -23,9 +22,9 @@ fn check_code_formatting() { ) } - let res = cmd!(sh, "cargo fmt -- --check").run(); + let res = cmd!(sh, "rustup run stable cargo fmt -- --check").run(); if res.is_err() { - let _ = cmd!(sh, "cargo fmt").run(); + let _ = cmd!(sh, "rustup run stable cargo fmt").run(); } res.unwrap() } diff --git a/crates/sourcegen/src/lib.rs b/crates/sourcegen/src/lib.rs index ce0224ec74..4e0ee63f32 100644 --- a/crates/sourcegen/src/lib.rs +++ b/crates/sourcegen/src/lib.rs @@ -136,7 +136,7 @@ impl fmt::Display for Location { } fn ensure_rustfmt(sh: &Shell) { - let version = cmd!(sh, "rustfmt --version").read().unwrap_or_default(); + let version = cmd!(sh, "rustup run stable rustfmt --version").read().unwrap_or_default(); if !version.contains("stable") { panic!( "Failed to run rustfmt from toolchain 'stable'. \ @@ -147,13 +147,15 @@ fn ensure_rustfmt(sh: &Shell) { pub fn reformat(text: String) -> String { let sh = Shell::new().unwrap(); - sh.set_var("RUSTUP_TOOLCHAIN", "stable"); ensure_rustfmt(&sh); let rustfmt_toml = project_root().join("rustfmt.toml"); - let mut stdout = cmd!(sh, "rustfmt --config-path {rustfmt_toml} --config fn_single_line=true") - .stdin(text) - .read() - .unwrap(); + let mut stdout = cmd!( + sh, + "rustup run stable rustfmt --config-path {rustfmt_toml} --config fn_single_line=true" + ) + .stdin(text) + .read() + .unwrap(); if !stdout.ends_with('\n') { stdout.push('\n'); } From 851f6db7f704587e4b5f0fb429a843d5f26f73c7 Mon Sep 17 00:00:00 2001 From: hkalbasi Date: Sat, 6 Aug 2022 19:21:51 +0430 Subject: [PATCH 0089/1945] Import serde derives on converting json to struct --- crates/ide-assists/src/assist_context.rs | 157 +------------- .../convert_tuple_struct_to_named_struct.rs | 10 +- .../src/handlers/destructure_tuple_binding.rs | 10 +- .../extract_struct_from_enum_variant.rs | 4 +- .../src/handlers/generate_deref.rs | 4 +- .../src/handlers/introduce_named_lifetime.rs | 4 +- .../src/handlers/remove_unused_param.rs | 5 +- .../replace_derive_with_manual_impl.rs | 4 +- crates/ide-assists/src/utils.rs | 4 +- crates/ide-db/src/source_change.rs | 136 ++++++++++++- .../src/handlers/inactive_code.rs | 2 +- .../src/handlers/json_is_not_rust.rs | 191 ++++++++++++++---- .../src/handlers/macro_error.rs | 2 +- crates/ide-diagnostics/src/lib.rs | 33 ++- crates/ide-diagnostics/src/tests.rs | 12 +- crates/rust-analyzer/src/cli/diagnostics.rs | 2 +- crates/rust-analyzer/src/config.rs | 1 + 17 files changed, 357 insertions(+), 224 deletions(-) diff --git a/crates/ide-assists/src/assist_context.rs b/crates/ide-assists/src/assist_context.rs index f9b4266142..8c7670e0cb 100644 --- a/crates/ide-assists/src/assist_context.rs +++ b/crates/ide-assists/src/assist_context.rs @@ -1,28 +1,20 @@ //! See [`AssistContext`]. -use std::mem; - use hir::Semantics; -use ide_db::{ - base_db::{AnchoredPathBuf, FileId, FileRange}, - SnippetCap, -}; -use ide_db::{ - label::Label, - source_change::{FileSystemEdit, SourceChange}, - RootDatabase, -}; +use ide_db::base_db::{FileId, FileRange}; +use ide_db::{label::Label, RootDatabase}; use syntax::{ algo::{self, find_node_at_offset, find_node_at_range}, - AstNode, AstToken, Direction, SourceFile, SyntaxElement, SyntaxKind, SyntaxNode, SyntaxNodePtr, - SyntaxToken, TextRange, TextSize, TokenAtOffset, + AstNode, AstToken, Direction, SourceFile, SyntaxElement, SyntaxKind, SyntaxToken, TextRange, + TextSize, TokenAtOffset, }; -use text_edit::{TextEdit, TextEditBuilder}; use crate::{ assist_config::AssistConfig, Assist, AssistId, AssistKind, AssistResolveStrategy, GroupLabel, }; +pub(crate) use ide_db::source_change::{SourceChangeBuilder, TreeMutator}; + /// `AssistContext` allows to apply an assist or check if it could be applied. /// /// Assists use a somewhat over-engineered approach, given the current needs. @@ -163,7 +155,7 @@ impl Assists { id: AssistId, label: impl Into, target: TextRange, - f: impl FnOnce(&mut AssistBuilder), + f: impl FnOnce(&mut SourceChangeBuilder), ) -> Option<()> { let mut f = Some(f); self.add_impl(None, id, label.into(), target, &mut |it| f.take().unwrap()(it)) @@ -175,7 +167,7 @@ impl Assists { id: AssistId, label: impl Into, target: TextRange, - f: impl FnOnce(&mut AssistBuilder), + f: impl FnOnce(&mut SourceChangeBuilder), ) -> Option<()> { let mut f = Some(f); self.add_impl(Some(group), id, label.into(), target, &mut |it| f.take().unwrap()(it)) @@ -187,7 +179,7 @@ impl Assists { id: AssistId, label: String, target: TextRange, - f: &mut dyn FnMut(&mut AssistBuilder), + f: &mut dyn FnMut(&mut SourceChangeBuilder), ) -> Option<()> { if !self.is_allowed(&id) { return None; @@ -195,7 +187,7 @@ impl Assists { let mut trigger_signature_help = false; let source_change = if self.resolve.should_resolve(&id) { - let mut builder = AssistBuilder::new(self.file); + let mut builder = SourceChangeBuilder::new(self.file); f(&mut builder); trigger_signature_help = builder.trigger_signature_help; Some(builder.finish()) @@ -216,132 +208,3 @@ impl Assists { } } } - -pub(crate) struct AssistBuilder { - edit: TextEditBuilder, - file_id: FileId, - source_change: SourceChange, - trigger_signature_help: bool, - - /// Maps the original, immutable `SyntaxNode` to a `clone_for_update` twin. - mutated_tree: Option, -} - -pub(crate) struct TreeMutator { - immutable: SyntaxNode, - mutable_clone: SyntaxNode, -} - -impl TreeMutator { - pub(crate) fn new(immutable: &SyntaxNode) -> TreeMutator { - let immutable = immutable.ancestors().last().unwrap(); - let mutable_clone = immutable.clone_for_update(); - TreeMutator { immutable, mutable_clone } - } - - pub(crate) fn make_mut(&self, node: &N) -> N { - N::cast(self.make_syntax_mut(node.syntax())).unwrap() - } - - pub(crate) fn make_syntax_mut(&self, node: &SyntaxNode) -> SyntaxNode { - let ptr = SyntaxNodePtr::new(node); - ptr.to_node(&self.mutable_clone) - } -} - -impl AssistBuilder { - pub(crate) fn new(file_id: FileId) -> AssistBuilder { - AssistBuilder { - edit: TextEdit::builder(), - file_id, - source_change: SourceChange::default(), - trigger_signature_help: false, - mutated_tree: None, - } - } - - pub(crate) fn edit_file(&mut self, file_id: FileId) { - self.commit(); - self.file_id = file_id; - } - - fn commit(&mut self) { - if let Some(tm) = self.mutated_tree.take() { - algo::diff(&tm.immutable, &tm.mutable_clone).into_text_edit(&mut self.edit) - } - - let edit = mem::take(&mut self.edit).finish(); - if !edit.is_empty() { - self.source_change.insert_source_edit(self.file_id, edit); - } - } - - pub(crate) fn make_mut(&mut self, node: N) -> N { - self.mutated_tree.get_or_insert_with(|| TreeMutator::new(node.syntax())).make_mut(&node) - } - /// Returns a copy of the `node`, suitable for mutation. - /// - /// Syntax trees in rust-analyzer are typically immutable, and mutating - /// operations panic at runtime. However, it is possible to make a copy of - /// the tree and mutate the copy freely. Mutation is based on interior - /// mutability, and different nodes in the same tree see the same mutations. - /// - /// The typical pattern for an assist is to find specific nodes in the read - /// phase, and then get their mutable couterparts using `make_mut` in the - /// mutable state. - pub(crate) fn make_syntax_mut(&mut self, node: SyntaxNode) -> SyntaxNode { - self.mutated_tree.get_or_insert_with(|| TreeMutator::new(&node)).make_syntax_mut(&node) - } - - /// Remove specified `range` of text. - pub(crate) fn delete(&mut self, range: TextRange) { - self.edit.delete(range) - } - /// Append specified `text` at the given `offset` - pub(crate) fn insert(&mut self, offset: TextSize, text: impl Into) { - self.edit.insert(offset, text.into()) - } - /// Append specified `snippet` at the given `offset` - pub(crate) fn insert_snippet( - &mut self, - _cap: SnippetCap, - offset: TextSize, - snippet: impl Into, - ) { - self.source_change.is_snippet = true; - self.insert(offset, snippet); - } - /// Replaces specified `range` of text with a given string. - pub(crate) fn replace(&mut self, range: TextRange, replace_with: impl Into) { - self.edit.replace(range, replace_with.into()) - } - /// Replaces specified `range` of text with a given `snippet`. - pub(crate) fn replace_snippet( - &mut self, - _cap: SnippetCap, - range: TextRange, - snippet: impl Into, - ) { - self.source_change.is_snippet = true; - self.replace(range, snippet); - } - pub(crate) fn replace_ast(&mut self, old: N, new: N) { - algo::diff(old.syntax(), new.syntax()).into_text_edit(&mut self.edit) - } - pub(crate) fn create_file(&mut self, dst: AnchoredPathBuf, content: impl Into) { - let file_system_edit = FileSystemEdit::CreateFile { dst, initial_contents: content.into() }; - self.source_change.push_file_system_edit(file_system_edit); - } - pub(crate) fn move_file(&mut self, src: FileId, dst: AnchoredPathBuf) { - let file_system_edit = FileSystemEdit::MoveFile { src, dst }; - self.source_change.push_file_system_edit(file_system_edit); - } - pub(crate) fn trigger_signature_help(&mut self) { - self.trigger_signature_help = true; - } - - fn finish(mut self) -> SourceChange { - self.commit(); - mem::take(&mut self.source_change) - } -} diff --git a/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs b/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs index 4ab8e93a29..d8f5227084 100644 --- a/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs +++ b/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs @@ -5,7 +5,7 @@ use syntax::{ match_ast, SyntaxNode, }; -use crate::{assist_context::AssistBuilder, AssistContext, AssistId, AssistKind, Assists}; +use crate::{assist_context::SourceChangeBuilder, AssistContext, AssistId, AssistKind, Assists}; // Assist: convert_tuple_struct_to_named_struct // @@ -80,7 +80,7 @@ pub(crate) fn convert_tuple_struct_to_named_struct( fn edit_struct_def( ctx: &AssistContext<'_>, - edit: &mut AssistBuilder, + edit: &mut SourceChangeBuilder, strukt: &Either, tuple_fields: ast::TupleFieldList, names: Vec, @@ -122,7 +122,7 @@ fn edit_struct_def( fn edit_struct_references( ctx: &AssistContext<'_>, - edit: &mut AssistBuilder, + edit: &mut SourceChangeBuilder, strukt: Either, names: &[ast::Name], ) { @@ -132,7 +132,7 @@ fn edit_struct_references( }; let usages = strukt_def.usages(&ctx.sema).include_self_refs().all(); - let edit_node = |edit: &mut AssistBuilder, node: SyntaxNode| -> Option<()> { + let edit_node = |edit: &mut SourceChangeBuilder, node: SyntaxNode| -> Option<()> { match_ast! { match node { ast::TupleStructPat(tuple_struct_pat) => { @@ -203,7 +203,7 @@ fn edit_struct_references( fn edit_field_references( ctx: &AssistContext<'_>, - edit: &mut AssistBuilder, + edit: &mut SourceChangeBuilder, fields: impl Iterator, names: &[ast::Name], ) { diff --git a/crates/ide-assists/src/handlers/destructure_tuple_binding.rs b/crates/ide-assists/src/handlers/destructure_tuple_binding.rs index c1f57532bb..dc581ff3bd 100644 --- a/crates/ide-assists/src/handlers/destructure_tuple_binding.rs +++ b/crates/ide-assists/src/handlers/destructure_tuple_binding.rs @@ -8,7 +8,7 @@ use syntax::{ TextRange, }; -use crate::assist_context::{AssistBuilder, AssistContext, Assists}; +use crate::assist_context::{AssistContext, Assists, SourceChangeBuilder}; // Assist: destructure_tuple_binding // @@ -151,7 +151,7 @@ struct TupleData { } fn edit_tuple_assignment( ctx: &AssistContext<'_>, - builder: &mut AssistBuilder, + builder: &mut SourceChangeBuilder, data: &TupleData, in_sub_pattern: bool, ) { @@ -195,7 +195,7 @@ fn edit_tuple_assignment( fn edit_tuple_usages( data: &TupleData, - builder: &mut AssistBuilder, + builder: &mut SourceChangeBuilder, ctx: &AssistContext<'_>, in_sub_pattern: bool, ) { @@ -211,7 +211,7 @@ fn edit_tuple_usages( } fn edit_tuple_usage( ctx: &AssistContext<'_>, - builder: &mut AssistBuilder, + builder: &mut SourceChangeBuilder, usage: &FileReference, data: &TupleData, in_sub_pattern: bool, @@ -239,7 +239,7 @@ fn edit_tuple_usage( fn edit_tuple_field_usage( ctx: &AssistContext<'_>, - builder: &mut AssistBuilder, + builder: &mut SourceChangeBuilder, data: &TupleData, index: TupleIndex, ) { diff --git a/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs b/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs index a93648f2d3..dfb5652126 100644 --- a/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs +++ b/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs @@ -20,7 +20,7 @@ use syntax::{ SyntaxNode, T, }; -use crate::{assist_context::AssistBuilder, AssistContext, AssistId, AssistKind, Assists}; +use crate::{assist_context::SourceChangeBuilder, AssistContext, AssistId, AssistKind, Assists}; // Assist: extract_struct_from_enum_variant // @@ -374,7 +374,7 @@ fn apply_references( fn process_references( ctx: &AssistContext<'_>, - builder: &mut AssistBuilder, + builder: &mut SourceChangeBuilder, visited_modules: &mut FxHashSet, enum_module_def: &ModuleDef, variant_hir_name: &Name, diff --git a/crates/ide-assists/src/handlers/generate_deref.rs b/crates/ide-assists/src/handlers/generate_deref.rs index b9637ee8d7..b484635121 100644 --- a/crates/ide-assists/src/handlers/generate_deref.rs +++ b/crates/ide-assists/src/handlers/generate_deref.rs @@ -8,7 +8,7 @@ use syntax::{ }; use crate::{ - assist_context::{AssistBuilder, AssistContext, Assists}, + assist_context::{AssistContext, Assists, SourceChangeBuilder}, utils::generate_trait_impl_text, AssistId, AssistKind, }; @@ -120,7 +120,7 @@ fn generate_tuple_deref(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<() } fn generate_edit( - edit: &mut AssistBuilder, + edit: &mut SourceChangeBuilder, strukt: ast::Struct, field_type_syntax: &SyntaxNode, field_name: impl Display, diff --git a/crates/ide-assists/src/handlers/introduce_named_lifetime.rs b/crates/ide-assists/src/handlers/introduce_named_lifetime.rs index ce91dd2370..2fc754e3e5 100644 --- a/crates/ide-assists/src/handlers/introduce_named_lifetime.rs +++ b/crates/ide-assists/src/handlers/introduce_named_lifetime.rs @@ -5,7 +5,7 @@ use syntax::{ AstNode, TextRange, }; -use crate::{assist_context::AssistBuilder, AssistContext, AssistId, AssistKind, Assists}; +use crate::{assist_context::SourceChangeBuilder, AssistContext, AssistId, AssistKind, Assists}; static ASSIST_NAME: &str = "introduce_named_lifetime"; static ASSIST_LABEL: &str = "Introduce named lifetime"; @@ -140,7 +140,7 @@ enum NeedsLifetime { } impl NeedsLifetime { - fn make_mut(self, builder: &mut AssistBuilder) -> Self { + fn make_mut(self, builder: &mut SourceChangeBuilder) -> Self { match self { Self::SelfParam(it) => Self::SelfParam(builder.make_mut(it)), Self::RefType(it) => Self::RefType(builder.make_mut(it)), diff --git a/crates/ide-assists/src/handlers/remove_unused_param.rs b/crates/ide-assists/src/handlers/remove_unused_param.rs index 59ea94ea1f..bd2e8fbe38 100644 --- a/crates/ide-assists/src/handlers/remove_unused_param.rs +++ b/crates/ide-assists/src/handlers/remove_unused_param.rs @@ -8,7 +8,8 @@ use syntax::{ use SyntaxKind::WHITESPACE; use crate::{ - assist_context::AssistBuilder, utils::next_prev, AssistContext, AssistId, AssistKind, Assists, + assist_context::SourceChangeBuilder, utils::next_prev, AssistContext, AssistId, AssistKind, + Assists, }; // Assist: remove_unused_param @@ -88,7 +89,7 @@ pub(crate) fn remove_unused_param(acc: &mut Assists, ctx: &AssistContext<'_>) -> fn process_usages( ctx: &AssistContext<'_>, - builder: &mut AssistBuilder, + builder: &mut SourceChangeBuilder, file_id: FileId, references: Vec, arg_to_remove: usize, diff --git a/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs b/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs index bd50208da5..d139f78a6f 100644 --- a/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs +++ b/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs @@ -10,7 +10,7 @@ use syntax::{ }; use crate::{ - assist_context::{AssistBuilder, AssistContext, Assists}, + assist_context::{AssistContext, Assists, SourceChangeBuilder}, utils::{ add_trait_assoc_items_to_impl, filter_assoc_items, gen_trait_fn_body, generate_trait_impl_text, render_snippet, Cursor, DefaultMethods, @@ -224,7 +224,7 @@ fn impl_def_from_trait( } fn update_attribute( - builder: &mut AssistBuilder, + builder: &mut SourceChangeBuilder, old_derives: &[ast::Path], old_tree: &ast::TokenTree, old_trait_path: &ast::Path, diff --git a/crates/ide-assists/src/utils.rs b/crates/ide-assists/src/utils.rs index 3e61d0741d..103e3259fa 100644 --- a/crates/ide-assists/src/utils.rs +++ b/crates/ide-assists/src/utils.rs @@ -20,7 +20,7 @@ use syntax::{ SyntaxNode, TextRange, TextSize, T, }; -use crate::assist_context::{AssistBuilder, AssistContext}; +use crate::assist_context::{AssistContext, SourceChangeBuilder}; pub(crate) mod suggest_name; mod gen_trait_fn_body; @@ -484,7 +484,7 @@ fn generate_impl_text_inner(adt: &ast::Adt, trait_text: Option<&str>, code: &str } pub(crate) fn add_method_to_adt( - builder: &mut AssistBuilder, + builder: &mut SourceChangeBuilder, adt: &ast::Adt, impl_def: Option, method: &str, diff --git a/crates/ide-db/src/source_change.rs b/crates/ide-db/src/source_change.rs index 8132c73ef2..21314ad74e 100644 --- a/crates/ide-db/src/source_change.rs +++ b/crates/ide-db/src/source_change.rs @@ -3,12 +3,15 @@ //! //! It can be viewed as a dual for `Change`. -use std::{collections::hash_map::Entry, iter}; +use std::{collections::hash_map::Entry, iter, mem}; use base_db::{AnchoredPathBuf, FileId}; use rustc_hash::FxHashMap; use stdx::never; -use text_edit::TextEdit; +use syntax::{algo, AstNode, SyntaxNode, SyntaxNodePtr, TextRange, TextSize}; +use text_edit::{TextEdit, TextEditBuilder}; + +use crate::SnippetCap; #[derive(Default, Debug, Clone)] pub struct SourceChange { @@ -81,6 +84,135 @@ impl From> for SourceChange { } } +pub struct SourceChangeBuilder { + pub edit: TextEditBuilder, + pub file_id: FileId, + pub source_change: SourceChange, + pub trigger_signature_help: bool, + + /// Maps the original, immutable `SyntaxNode` to a `clone_for_update` twin. + pub mutated_tree: Option, +} + +pub struct TreeMutator { + immutable: SyntaxNode, + mutable_clone: SyntaxNode, +} + +impl TreeMutator { + pub fn new(immutable: &SyntaxNode) -> TreeMutator { + let immutable = immutable.ancestors().last().unwrap(); + let mutable_clone = immutable.clone_for_update(); + TreeMutator { immutable, mutable_clone } + } + + pub fn make_mut(&self, node: &N) -> N { + N::cast(self.make_syntax_mut(node.syntax())).unwrap() + } + + pub fn make_syntax_mut(&self, node: &SyntaxNode) -> SyntaxNode { + let ptr = SyntaxNodePtr::new(node); + ptr.to_node(&self.mutable_clone) + } +} + +impl SourceChangeBuilder { + pub fn new(file_id: FileId) -> SourceChangeBuilder { + SourceChangeBuilder { + edit: TextEdit::builder(), + file_id, + source_change: SourceChange::default(), + trigger_signature_help: false, + mutated_tree: None, + } + } + + pub fn edit_file(&mut self, file_id: FileId) { + self.commit(); + self.file_id = file_id; + } + + fn commit(&mut self) { + if let Some(tm) = self.mutated_tree.take() { + algo::diff(&tm.immutable, &tm.mutable_clone).into_text_edit(&mut self.edit) + } + + let edit = mem::take(&mut self.edit).finish(); + if !edit.is_empty() { + self.source_change.insert_source_edit(self.file_id, edit); + } + } + + pub fn make_mut(&mut self, node: N) -> N { + self.mutated_tree.get_or_insert_with(|| TreeMutator::new(node.syntax())).make_mut(&node) + } + /// Returns a copy of the `node`, suitable for mutation. + /// + /// Syntax trees in rust-analyzer are typically immutable, and mutating + /// operations panic at runtime. However, it is possible to make a copy of + /// the tree and mutate the copy freely. Mutation is based on interior + /// mutability, and different nodes in the same tree see the same mutations. + /// + /// The typical pattern for an assist is to find specific nodes in the read + /// phase, and then get their mutable couterparts using `make_mut` in the + /// mutable state. + pub fn make_syntax_mut(&mut self, node: SyntaxNode) -> SyntaxNode { + self.mutated_tree.get_or_insert_with(|| TreeMutator::new(&node)).make_syntax_mut(&node) + } + + /// Remove specified `range` of text. + pub fn delete(&mut self, range: TextRange) { + self.edit.delete(range) + } + /// Append specified `text` at the given `offset` + pub fn insert(&mut self, offset: TextSize, text: impl Into) { + self.edit.insert(offset, text.into()) + } + /// Append specified `snippet` at the given `offset` + pub fn insert_snippet( + &mut self, + _cap: SnippetCap, + offset: TextSize, + snippet: impl Into, + ) { + self.source_change.is_snippet = true; + self.insert(offset, snippet); + } + /// Replaces specified `range` of text with a given string. + pub fn replace(&mut self, range: TextRange, replace_with: impl Into) { + self.edit.replace(range, replace_with.into()) + } + /// Replaces specified `range` of text with a given `snippet`. + pub fn replace_snippet( + &mut self, + _cap: SnippetCap, + range: TextRange, + snippet: impl Into, + ) { + self.source_change.is_snippet = true; + self.replace(range, snippet); + } + pub fn replace_ast(&mut self, old: N, new: N) { + algo::diff(old.syntax(), new.syntax()).into_text_edit(&mut self.edit) + } + pub fn create_file(&mut self, dst: AnchoredPathBuf, content: impl Into) { + let file_system_edit = FileSystemEdit::CreateFile { dst, initial_contents: content.into() }; + self.source_change.push_file_system_edit(file_system_edit); + } + pub fn move_file(&mut self, src: FileId, dst: AnchoredPathBuf) { + let file_system_edit = FileSystemEdit::MoveFile { src, dst }; + self.source_change.push_file_system_edit(file_system_edit); + } + pub fn trigger_signature_help(&mut self) { + self.trigger_signature_help = true; + } + + pub fn finish(mut self) -> SourceChange { + self.commit(); + mem::take(&mut self.source_change) + } +} + #[derive(Debug, Clone)] pub enum FileSystemEdit { CreateFile { dst: AnchoredPathBuf, initial_contents: String }, diff --git a/crates/ide-diagnostics/src/handlers/inactive_code.rs b/crates/ide-diagnostics/src/handlers/inactive_code.rs index 97ea5c456a..5694f33525 100644 --- a/crates/ide-diagnostics/src/handlers/inactive_code.rs +++ b/crates/ide-diagnostics/src/handlers/inactive_code.rs @@ -43,7 +43,7 @@ mod tests { use crate::{tests::check_diagnostics_with_config, DiagnosticsConfig}; pub(crate) fn check(ra_fixture: &str) { - let config = DiagnosticsConfig::default(); + let config = DiagnosticsConfig::test_sample(); check_diagnostics_with_config(config, ra_fixture) } diff --git a/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs b/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs index aa7fcffb48..a21db5b2ce 100644 --- a/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs +++ b/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs @@ -1,21 +1,30 @@ //! This diagnostic provides an assist for creating a struct definition from a JSON //! example. -use ide_db::{base_db::FileId, source_change::SourceChange}; +use hir::{PathResolution, Semantics}; +use ide_db::{ + base_db::FileId, + helpers::mod_path_to_ast, + imports::insert_use::{insert_use, ImportScope}, + source_change::SourceChangeBuilder, + RootDatabase, +}; use itertools::Itertools; -use stdx::format_to; +use stdx::{format_to, never}; use syntax::{ ast::{self, make}, SyntaxKind, SyntaxNode, }; use text_edit::TextEdit; -use crate::{fix, Diagnostic, Severity}; +use crate::{fix, Diagnostic, DiagnosticsConfig, Severity}; #[derive(Default)] struct State { result: String, struct_counts: usize, + has_serialize: bool, + has_deserialize: bool, } impl State { @@ -24,6 +33,25 @@ impl State { make::name(&format!("Struct{}", self.struct_counts)) } + fn serde_derive(&self) -> String { + let mut v = vec![]; + if self.has_serialize { + v.push("Serialize"); + } + if self.has_deserialize { + v.push("Deserialize"); + } + match v.as_slice() { + [] => "".to_string(), + [x] => format!("#[derive({x})]\n"), + [x, y] => format!("#[derive({x}, {y})]\n"), + _ => { + never!(); + "".to_string() + } + } + } + fn build_struct(&mut self, value: &serde_json::Map) -> ast::Type { let name = self.generate_new_name(); let ty = make::ty(&name.to_string()); @@ -36,7 +64,7 @@ impl State { )) .into(), ); - format_to!(self.result, "#[derive(Serialize, Deserialize)]\n{}\n", strukt); + format_to!(self.result, "{}{}\n", self.serde_derive(), strukt); ty } @@ -44,10 +72,10 @@ impl State { match value { serde_json::Value::Null => make::ty_unit(), serde_json::Value::Bool(_) => make::ty("bool"), - serde_json::Value::Number(x) => make::ty(if x.is_i64() { "i64" } else { "f64" }), + serde_json::Value::Number(it) => make::ty(if it.is_i64() { "i64" } else { "f64" }), serde_json::Value::String(_) => make::ty("String"), - serde_json::Value::Array(x) => { - let ty = match x.iter().next() { + serde_json::Value::Array(it) => { + let ty = match it.iter().next() { Some(x) => self.type_of(x), None => make::ty_placeholder(), }; @@ -58,37 +86,91 @@ impl State { } } -pub(crate) fn json_in_items(acc: &mut Vec, file_id: FileId, node: &SyntaxNode) { - if node.kind() == SyntaxKind::ERROR - && node.first_token().map(|x| x.kind()) == Some(SyntaxKind::L_CURLY) - && node.last_token().map(|x| x.kind()) == Some(SyntaxKind::R_CURLY) - { - let node_string = node.to_string(); - if let Ok(x) = serde_json::from_str(&node_string) { - if let serde_json::Value::Object(x) = x { - let range = node.text_range(); - let mut edit = TextEdit::builder(); - edit.delete(range); - let mut state = State::default(); - state.build_struct(&x); - edit.insert(range.start(), state.result); - acc.push( - Diagnostic::new( - "json-is-not-rust", - "JSON syntax is not valid as a Rust item", - range, - ) - .severity(Severity::WeakWarning) - .with_fixes(Some(vec![fix( - "convert_json_to_struct", - "Convert JSON to struct", - SourceChange::from_text_edit(file_id, edit.finish()), - range, - )])), - ); +pub(crate) fn json_in_items( + sema: &Semantics<'_, RootDatabase>, + acc: &mut Vec, + file_id: FileId, + node: &SyntaxNode, + config: &DiagnosticsConfig, +) { + (|| { + if node.kind() == SyntaxKind::ERROR + && node.first_token().map(|x| x.kind()) == Some(SyntaxKind::L_CURLY) + && node.last_token().map(|x| x.kind()) == Some(SyntaxKind::R_CURLY) + { + let node_string = node.to_string(); + if let Ok(it) = serde_json::from_str(&node_string) { + if let serde_json::Value::Object(it) = it { + let import_scope = ImportScope::find_insert_use_container(node, sema)?; + let range = node.text_range(); + let mut edit = TextEdit::builder(); + edit.delete(range); + let mut state = State::default(); + let semantics_scope = sema.scope(node)?; + let scope_resolve = + |it| semantics_scope.speculative_resolve(&make::path_from_text(it)); + let scope_has = |it| scope_resolve(it).is_some(); + let deserialize_resolved = scope_resolve("::serde::Deserialize"); + let serialize_resolved = scope_resolve("::serde::Serialize"); + state.has_deserialize = deserialize_resolved.is_some(); + state.has_serialize = serialize_resolved.is_some(); + state.build_struct(&it); + edit.insert(range.start(), state.result); + acc.push( + Diagnostic::new( + "json-is-not-rust", + "JSON syntax is not valid as a Rust item", + range, + ) + .severity(Severity::WeakWarning) + .with_fixes(Some(vec![{ + let mut scb = SourceChangeBuilder::new(file_id); + let scope = match import_scope.clone() { + ImportScope::File(it) => ImportScope::File(scb.make_mut(it)), + ImportScope::Module(it) => ImportScope::Module(scb.make_mut(it)), + ImportScope::Block(it) => ImportScope::Block(scb.make_mut(it)), + }; + let current_module = semantics_scope.module(); + if !scope_has("Serialize") { + if let Some(PathResolution::Def(it)) = serialize_resolved { + if let Some(it) = current_module.find_use_path_prefixed( + sema.db, + it, + config.insert_use.prefix_kind, + ) { + insert_use( + &scope, + mod_path_to_ast(&it), + &config.insert_use, + ); + } + } + } + if !scope_has("Deserialize") { + if let Some(PathResolution::Def(it)) = deserialize_resolved { + if let Some(it) = current_module.find_use_path_prefixed( + sema.db, + it, + config.insert_use.prefix_kind, + ) { + insert_use( + &scope, + mod_path_to_ast(&it), + &config.insert_use, + ); + } + } + } + let mut sc = scb.finish(); + sc.insert_source_edit(file_id, edit.finish()); + fix("convert_json_to_struct", "Convert JSON to struct", sc, range) + }])), + ); + } } } - } + Some(()) + })(); } #[cfg(test)] @@ -100,7 +182,7 @@ mod tests { #[test] fn diagnostic_for_simple_case() { - let mut config = DiagnosticsConfig::default(); + let mut config = DiagnosticsConfig::test_sample(); config.disabled.insert("syntax-error".to_string()); check_diagnostics_with_config( config, @@ -115,6 +197,13 @@ mod tests { fn types_of_primitives() { check_fix( r#" + //- /lib.rs crate:lib deps:serde + use serde::Serialize; + + fn some_garbage() { + + } + {$0 "foo": "bar", "bar": 2.3, @@ -122,9 +211,20 @@ mod tests { "bay": 57, "box": true } + //- /serde.rs crate:serde + + pub trait Serialize { + fn serialize() -> u8; + } "#, r#" - #[derive(Serialize, Deserialize)] + use serde::Serialize; + + fn some_garbage() { + + } + + #[derive(Serialize)] struct Struct1{ bar: f64, bay: i64, baz: (), r#box: bool, foo: String } "#, @@ -144,11 +244,8 @@ mod tests { } "#, r#" - #[derive(Serialize, Deserialize)] struct Struct3{ } - #[derive(Serialize, Deserialize)] struct Struct2{ kind: String, value: Struct3 } - #[derive(Serialize, Deserialize)] struct Struct1{ bar: Struct2, foo: String } "#, @@ -159,6 +256,7 @@ mod tests { fn arrays() { check_fix( r#" + //- /lib.rs crate:lib deps:serde { "of_string": ["foo", "2", "x"], $0 "of_object": [{ @@ -171,8 +269,19 @@ mod tests { "nested": [[[2]]], "empty": [] } + //- /serde.rs crate:serde + + pub trait Serialize { + fn serialize() -> u8; + } + pub trait Deserialize { + fn deserialize() -> u8; + } "#, r#" + use serde::Serialize; + use serde::Deserialize; + #[derive(Serialize, Deserialize)] struct Struct2{ x: i64, y: i64 } #[derive(Serialize, Deserialize)] diff --git a/crates/ide-diagnostics/src/handlers/macro_error.rs b/crates/ide-diagnostics/src/handlers/macro_error.rs index d6a66dc150..43ff4ed5a6 100644 --- a/crates/ide-diagnostics/src/handlers/macro_error.rs +++ b/crates/ide-diagnostics/src/handlers/macro_error.rs @@ -79,7 +79,7 @@ pub macro panic { #[test] fn include_macro_should_allow_empty_content() { - let mut config = DiagnosticsConfig::default(); + let mut config = DiagnosticsConfig::test_sample(); // FIXME: This is a false-positive, the file is actually linked in via // `include!` macro diff --git a/crates/ide-diagnostics/src/lib.rs b/crates/ide-diagnostics/src/lib.rs index 7034f010e1..61e63ea7a9 100644 --- a/crates/ide-diagnostics/src/lib.rs +++ b/crates/ide-diagnostics/src/lib.rs @@ -60,6 +60,7 @@ use hir::{diagnostics::AnyDiagnostic, InFile, Semantics}; use ide_db::{ assists::{Assist, AssistId, AssistKind, AssistResolveStrategy}, base_db::{FileId, FileRange, SourceDatabase}, + imports::insert_use::InsertUseConfig, label::Label, source_change::SourceChange, FxHashSet, RootDatabase, @@ -140,13 +141,37 @@ impl Default for ExprFillDefaultMode { } } -#[derive(Default, Debug, Clone)] +#[derive(Debug, Clone)] pub struct DiagnosticsConfig { pub proc_macros_enabled: bool, pub proc_attr_macros_enabled: bool, pub disable_experimental: bool, pub disabled: FxHashSet, pub expr_fill_default: ExprFillDefaultMode, + // FIXME: We may want to include a whole `AssistConfig` here + pub insert_use: InsertUseConfig, +} + +impl DiagnosticsConfig { + pub fn test_sample() -> Self { + use hir::PrefixKind; + use ide_db::imports::insert_use::ImportGranularity; + + Self { + proc_macros_enabled: Default::default(), + proc_attr_macros_enabled: Default::default(), + disable_experimental: Default::default(), + disabled: Default::default(), + expr_fill_default: Default::default(), + insert_use: InsertUseConfig { + granularity: ImportGranularity::Preserve, + enforce_granularity: false, + prefix_kind: PrefixKind::Plain, + group: false, + skip_glob_imports: false, + }, + } + } } struct DiagnosticsContext<'a> { @@ -173,10 +198,12 @@ pub fn diagnostics( }), ); - for node in parse.tree().syntax().descendants() { + let parse = sema.parse(file_id); + + for node in parse.syntax().descendants() { handlers::useless_braces::useless_braces(&mut res, file_id, &node); handlers::field_shorthand::field_shorthand(&mut res, file_id, &node); - handlers::json_is_not_rust::json_in_items(&mut res, file_id, &node); + handlers::json_is_not_rust::json_in_items(&sema, &mut res, file_id, &node, &config); } let module = sema.to_module_def(file_id); diff --git a/crates/ide-diagnostics/src/tests.rs b/crates/ide-diagnostics/src/tests.rs index 7312bca32f..729619cfde 100644 --- a/crates/ide-diagnostics/src/tests.rs +++ b/crates/ide-diagnostics/src/tests.rs @@ -37,7 +37,7 @@ fn check_nth_fix(nth: usize, ra_fixture_before: &str, ra_fixture_after: &str) { let after = trim_indent(ra_fixture_after); let (db, file_position) = RootDatabase::with_position(ra_fixture_before); - let mut conf = DiagnosticsConfig::default(); + let mut conf = DiagnosticsConfig::test_sample(); conf.expr_fill_default = ExprFillDefaultMode::Default; let diagnostic = super::diagnostics(&db, &conf, &AssistResolveStrategy::All, file_position.file_id) @@ -69,7 +69,7 @@ pub(crate) fn check_no_fix(ra_fixture: &str) { let (db, file_position) = RootDatabase::with_position(ra_fixture); let diagnostic = super::diagnostics( &db, - &DiagnosticsConfig::default(), + &DiagnosticsConfig::test_sample(), &AssistResolveStrategy::All, file_position.file_id, ) @@ -82,7 +82,7 @@ pub(crate) fn check_expect(ra_fixture: &str, expect: Expect) { let (db, file_id) = RootDatabase::with_single_file(ra_fixture); let diagnostics = super::diagnostics( &db, - &DiagnosticsConfig::default(), + &DiagnosticsConfig::test_sample(), &AssistResolveStrategy::All, file_id, ); @@ -91,7 +91,7 @@ pub(crate) fn check_expect(ra_fixture: &str, expect: Expect) { #[track_caller] pub(crate) fn check_diagnostics(ra_fixture: &str) { - let mut config = DiagnosticsConfig::default(); + let mut config = DiagnosticsConfig::test_sample(); config.disabled.insert("inactive-code".to_string()); check_diagnostics_with_config(config, ra_fixture) } @@ -127,7 +127,7 @@ pub(crate) fn check_diagnostics_with_config(config: DiagnosticsConfig, ra_fixtur #[test] fn test_disabled_diagnostics() { - let mut config = DiagnosticsConfig::default(); + let mut config = DiagnosticsConfig::test_sample(); config.disabled.insert("unresolved-module".into()); let (db, file_id) = RootDatabase::with_single_file(r#"mod foo;"#); @@ -137,7 +137,7 @@ fn test_disabled_diagnostics() { let diagnostics = super::diagnostics( &db, - &DiagnosticsConfig::default(), + &DiagnosticsConfig::test_sample(), &AssistResolveStrategy::All, file_id, ); diff --git a/crates/rust-analyzer/src/cli/diagnostics.rs b/crates/rust-analyzer/src/cli/diagnostics.rs index 52511ceb58..247007db0a 100644 --- a/crates/rust-analyzer/src/cli/diagnostics.rs +++ b/crates/rust-analyzer/src/cli/diagnostics.rs @@ -43,7 +43,7 @@ impl flags::Diagnostics { println!("processing crate: {}, module: {}", crate_name, _vfs.file_path(file_id)); for diagnostic in analysis .diagnostics( - &DiagnosticsConfig::default(), + &DiagnosticsConfig::test_sample(), AssistResolveStrategy::None, file_id, ) diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index ac0fdf85a7..1629c1dd32 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs @@ -881,6 +881,7 @@ impl Config { ExprFillDefaultDef::Todo => ExprFillDefaultMode::Todo, ExprFillDefaultDef::Default => ExprFillDefaultMode::Default, }, + insert_use: self.insert_use_config(), } } From 1a94193602e65c99344ba313c71d4298b0d4c0cc Mon Sep 17 00:00:00 2001 From: oxalica Date: Sun, 7 Aug 2022 04:17:04 +0800 Subject: [PATCH 0090/1945] Impl more methods and traits for la_arena::ArenaMap --- lib/la-arena/src/map.rs | 43 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 43 insertions(+) diff --git a/lib/la-arena/src/map.rs b/lib/la-arena/src/map.rs index f74ec5d97c..3034e33a78 100644 --- a/lib/la-arena/src/map.rs +++ b/lib/la-arena/src/map.rs @@ -21,6 +21,33 @@ impl ArenaMap, V> { Self { v: Vec::with_capacity(capacity), _ty: PhantomData } } + /// Reserves capacity for at least additional more elements to be inserted in the map. + pub fn reserve(&mut self, additional: usize) { + self.v.reserve(additional); + } + + /// Clears the map, removing all elements. + pub fn clear(&mut self) { + self.v.clear(); + } + + /// Shrinks the capacity of the map as much as possible. + pub fn shrink_to_fit(&mut self) { + let min_len = self.v.iter().rposition(|slot| slot.is_some()).map_or(0, |i| i + 1); + self.v.truncate(min_len); + self.v.shrink_to_fit(); + } + + /// Returns whether the map contains a value for the specified index. + pub fn contains_idx(&self, idx: Idx) -> bool { + matches!(self.v.get(Self::to_idx(idx)), Some(Some(_))) + } + + /// Removes an index from the map, returning the value at the index if the index was previously in the map. + pub fn remove(&mut self, idx: Idx) -> Option { + self.v.get_mut(Self::to_idx(idx))?.take() + } + /// Inserts a value associated with a given arena index into the map. pub fn insert(&mut self, idx: Idx, t: V) { let idx = Self::to_idx(idx); @@ -94,6 +121,22 @@ impl Default for ArenaMap, T> { } } +impl Extend<(Idx, T)> for ArenaMap, T> { + fn extend, T)>>(&mut self, iter: I) { + iter.into_iter().for_each(move |(k, v)| { + self.insert(k, v); + }); + } +} + +impl FromIterator<(Idx, T)> for ArenaMap, T> { + fn from_iter, T)>>(iter: I) -> Self { + let mut this = Self::new(); + this.extend(iter); + this + } +} + /// A view into a single entry in a map, which may either be vacant or occupied. /// /// This `enum` is constructed from the [`entry`] method on [`ArenaMap`]. From 326ffee5b71e0113be20fc3b3b2aefbd4cd3b6d9 Mon Sep 17 00:00:00 2001 From: oxalica Date: Sun, 7 Aug 2022 04:48:52 +0800 Subject: [PATCH 0091/1945] Returns the old value for la_arena::ArenaMap::insert --- crates/hir-def/src/generics.rs | 2 +- crates/hir-def/src/visibility.rs | 2 +- crates/hir-ty/src/lower.rs | 2 +- lib/la-arena/src/map.rs | 7 +++++-- 4 files changed, 8 insertions(+), 5 deletions(-) diff --git a/crates/hir-def/src/generics.rs b/crates/hir-def/src/generics.rs index 2397cf5015..469b28c2d9 100644 --- a/crates/hir-def/src/generics.rs +++ b/crates/hir-def/src/generics.rs @@ -451,7 +451,7 @@ impl HasChildSource for GenericDefId { if let GenericDefId::TraitId(id) = *self { let trait_ref = id.lookup(db).source(db).value; let idx = idx_iter.next().unwrap(); - params.insert(idx, Either::Right(trait_ref)) + params.insert(idx, Either::Right(trait_ref)); } if let Some(generic_params_list) = generic_params_list { diff --git a/crates/hir-def/src/visibility.rs b/crates/hir-def/src/visibility.rs index 6e22a877a9..087268a9ec 100644 --- a/crates/hir-def/src/visibility.rs +++ b/crates/hir-def/src/visibility.rs @@ -224,7 +224,7 @@ pub(crate) fn field_visibilities_query( let resolver = variant_id.module(db).resolver(db); let mut res = ArenaMap::default(); for (field_id, field_data) in var_data.fields().iter() { - res.insert(field_id, field_data.visibility.resolve(db, &resolver)) + res.insert(field_id, field_data.visibility.resolve(db, &resolver)); } Arc::new(res) } diff --git a/crates/hir-ty/src/lower.rs b/crates/hir-ty/src/lower.rs index 3ed9c941f4..239f66bcb7 100644 --- a/crates/hir-ty/src/lower.rs +++ b/crates/hir-ty/src/lower.rs @@ -1126,7 +1126,7 @@ pub(crate) fn field_types_query( let ctx = TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable); for (field_id, field_data) in var_data.fields().iter() { - res.insert(field_id, make_binders(db, &generics, ctx.lower_ty(&field_data.type_ref))) + res.insert(field_id, make_binders(db, &generics, ctx.lower_ty(&field_data.type_ref))); } Arc::new(res) } diff --git a/lib/la-arena/src/map.rs b/lib/la-arena/src/map.rs index 3034e33a78..5f347e2745 100644 --- a/lib/la-arena/src/map.rs +++ b/lib/la-arena/src/map.rs @@ -49,11 +49,14 @@ impl ArenaMap, V> { } /// Inserts a value associated with a given arena index into the map. - pub fn insert(&mut self, idx: Idx, t: V) { + /// + /// If the map did not have this index present, None is returned. + /// Otherwise, the value is updated, and the old value is returned. + pub fn insert(&mut self, idx: Idx, t: V) -> Option { let idx = Self::to_idx(idx); self.v.resize_with((idx + 1).max(self.v.len()), || None); - self.v[idx] = Some(t); + self.v[idx].replace(t) } /// Returns a reference to the value associated with the provided index From eca6f2e897a0c09546fd8d8faba79ec2ce45be52 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Sun, 7 Aug 2022 09:29:26 +0300 Subject: [PATCH 0092/1945] Fix test_rainbow_highlighting gate --- crates/ide/src/syntax_highlighting/tests.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/ide/src/syntax_highlighting/tests.rs b/crates/ide/src/syntax_highlighting/tests.rs index 99be7c6648..382735cb36 100644 --- a/crates/ide/src/syntax_highlighting/tests.rs +++ b/crates/ide/src/syntax_highlighting/tests.rs @@ -958,7 +958,7 @@ pub struct Struct; #[test] #[cfg_attr( - all(unix, not(target_pointer_width = "64")), + not(all(unix, target_pointer_width = "64")), ignore = "depends on `DefaultHasher` outputs" )] fn test_rainbow_highlighting() { From a81c7a2974841f335fd87bfd8c3c3b8a7c3d3612 Mon Sep 17 00:00:00 2001 From: Ryo Yoshida Date: Fri, 5 Aug 2022 20:01:50 +0900 Subject: [PATCH 0093/1945] Parse range patterns in struct and slice without trailing comma --- crates/parser/src/grammar/patterns.rs | 27 +++- .../parser/inline/ok/0058_range_pat.rast | 116 ++++++++++++++++++ .../parser/inline/ok/0058_range_pat.rs | 10 ++ 3 files changed, 150 insertions(+), 3 deletions(-) diff --git a/crates/parser/src/grammar/patterns.rs b/crates/parser/src/grammar/patterns.rs index 4cbf103061..bfa41c686e 100644 --- a/crates/parser/src/grammar/patterns.rs +++ b/crates/parser/src/grammar/patterns.rs @@ -75,6 +75,16 @@ fn pattern_single_r(p: &mut Parser<'_>, recovery_set: TokenSet) { // Some(1..) => () // } // + // match () { + // S { a: 0 } => (), + // S { a: 1.. } => (), + // } + // + // match () { + // [0] => (), + // [1..] => (), + // } + // // match (10 as u8, 5 as u8) { // (0, _) => (), // (1.., _) => () @@ -88,9 +98,20 @@ fn pattern_single_r(p: &mut Parser<'_>, recovery_set: TokenSet) { let m = lhs.precede(p); p.bump(range_op); - // `0 .. =>` or `let 0 .. =` or `Some(0 .. )` - // ^ ^ ^ - if p.at(T![=]) | p.at(T![')']) | p.at(T![,]) { + // testing if we're at one of the following positions: + // `0 .. =>` + // ^ + // `let 0 .. =` + // ^ + // (1.., _) + // ^ + // `Some(0 .. )` + // ^ + // `S { t: 0.. }` + // ^ + // `[0..]` + // ^ + if p.at(T![=]) | p.at(T![')']) | p.at(T![,]) | p.at(T!['}']) | p.at(T![']']) { // test half_open_range_pat // fn f() { let 0 .. = 1u32; } } else { diff --git a/crates/parser/test_data/parser/inline/ok/0058_range_pat.rast b/crates/parser/test_data/parser/inline/ok/0058_range_pat.rast index 44c967e8dc..cfef5d3f95 100644 --- a/crates/parser/test_data/parser/inline/ok/0058_range_pat.rast +++ b/crates/parser/test_data/parser/inline/ok/0058_range_pat.rast @@ -172,6 +172,122 @@ SOURCE_FILE WHITESPACE "\n " R_CURLY "}" WHITESPACE "\n\n " + EXPR_STMT + MATCH_EXPR + MATCH_KW "match" + WHITESPACE " " + TUPLE_EXPR + L_PAREN "(" + R_PAREN ")" + WHITESPACE " " + MATCH_ARM_LIST + L_CURLY "{" + WHITESPACE "\n " + MATCH_ARM + RECORD_PAT + PATH + PATH_SEGMENT + NAME_REF + IDENT "S" + WHITESPACE " " + RECORD_PAT_FIELD_LIST + L_CURLY "{" + WHITESPACE " " + RECORD_PAT_FIELD + NAME_REF + IDENT "a" + COLON ":" + WHITESPACE " " + LITERAL_PAT + LITERAL + INT_NUMBER "0" + WHITESPACE " " + R_CURLY "}" + WHITESPACE " " + FAT_ARROW "=>" + WHITESPACE " " + TUPLE_EXPR + L_PAREN "(" + R_PAREN ")" + COMMA "," + WHITESPACE "\n " + MATCH_ARM + RECORD_PAT + PATH + PATH_SEGMENT + NAME_REF + IDENT "S" + WHITESPACE " " + RECORD_PAT_FIELD_LIST + L_CURLY "{" + WHITESPACE " " + RECORD_PAT_FIELD + NAME_REF + IDENT "a" + COLON ":" + WHITESPACE " " + RANGE_PAT + LITERAL_PAT + LITERAL + INT_NUMBER "1" + DOT2 ".." + WHITESPACE " " + R_CURLY "}" + WHITESPACE " " + FAT_ARROW "=>" + WHITESPACE " " + TUPLE_EXPR + L_PAREN "(" + R_PAREN ")" + COMMA "," + WHITESPACE "\n " + R_CURLY "}" + WHITESPACE "\n\n " + EXPR_STMT + MATCH_EXPR + MATCH_KW "match" + WHITESPACE " " + TUPLE_EXPR + L_PAREN "(" + R_PAREN ")" + WHITESPACE " " + MATCH_ARM_LIST + L_CURLY "{" + WHITESPACE "\n " + MATCH_ARM + SLICE_PAT + L_BRACK "[" + LITERAL_PAT + LITERAL + INT_NUMBER "0" + R_BRACK "]" + WHITESPACE " " + FAT_ARROW "=>" + WHITESPACE " " + TUPLE_EXPR + L_PAREN "(" + R_PAREN ")" + COMMA "," + WHITESPACE "\n " + MATCH_ARM + SLICE_PAT + L_BRACK "[" + RANGE_PAT + LITERAL_PAT + LITERAL + INT_NUMBER "1" + DOT2 ".." + R_BRACK "]" + WHITESPACE " " + FAT_ARROW "=>" + WHITESPACE " " + TUPLE_EXPR + L_PAREN "(" + R_PAREN ")" + COMMA "," + WHITESPACE "\n " + R_CURLY "}" + WHITESPACE "\n\n " MATCH_EXPR MATCH_KW "match" WHITESPACE " " diff --git a/crates/parser/test_data/parser/inline/ok/0058_range_pat.rs b/crates/parser/test_data/parser/inline/ok/0058_range_pat.rs index 6c586a8956..2411d51096 100644 --- a/crates/parser/test_data/parser/inline/ok/0058_range_pat.rs +++ b/crates/parser/test_data/parser/inline/ok/0058_range_pat.rs @@ -11,6 +11,16 @@ fn main() { Some(1..) => () } + match () { + S { a: 0 } => (), + S { a: 1.. } => (), + } + + match () { + [0] => (), + [1..] => (), + } + match (10 as u8, 5 as u8) { (0, _) => (), (1.., _) => () From 70255029cf49928862c5b5603e731668ade5fa85 Mon Sep 17 00:00:00 2001 From: Kartavya Vashishtha Date: Sun, 7 Aug 2022 17:09:36 +0530 Subject: [PATCH 0094/1945] clippy: make generated code nice to read Feel free to close if this is too minor. --- crates/syntax/src/tests/sourcegen_ast.rs | 17 +++++------------ 1 file changed, 5 insertions(+), 12 deletions(-) diff --git a/crates/syntax/src/tests/sourcegen_ast.rs b/crates/syntax/src/tests/sourcegen_ast.rs index 6d27662251..daad939f82 100644 --- a/crates/syntax/src/tests/sourcegen_ast.rs +++ b/crates/syntax/src/tests/sourcegen_ast.rs @@ -410,24 +410,17 @@ fn generate_syntax_kinds(grammar: KindsSrc<'_>) -> String { impl SyntaxKind { pub fn is_keyword(self) -> bool { - match self { - #(#all_keywords)|* => true, - _ => false, - } + matches!(self, #(#all_keywords)|*) } pub fn is_punct(self) -> bool { - match self { - #(#punctuation)|* => true, - _ => false, - } + + matches!(self, #(#punctuation)|*) + } pub fn is_literal(self) -> bool { - match self { - #(#literals)|* => true, - _ => false, - } + matches!(self, #(#literals)|*) } pub fn from_keyword(ident: &str) -> Option { From a3fc4dbb04e9e9fd639ad5c0988af6c9c540b9a0 Mon Sep 17 00:00:00 2001 From: Kartavya Vashishtha Date: Sun, 7 Aug 2022 17:37:50 +0530 Subject: [PATCH 0095/1945] more matches! sites --- crates/syntax/src/tests/sourcegen_ast.rs | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/crates/syntax/src/tests/sourcegen_ast.rs b/crates/syntax/src/tests/sourcegen_ast.rs index daad939f82..70b54843db 100644 --- a/crates/syntax/src/tests/sourcegen_ast.rs +++ b/crates/syntax/src/tests/sourcegen_ast.rs @@ -169,10 +169,7 @@ fn generate_nodes(kinds: KindsSrc<'_>, grammar: &AstSrc) -> String { quote! { impl AstNode for #name { fn can_cast(kind: SyntaxKind) -> bool { - match kind { - #(#kinds)|* => true, - _ => false, - } + matches!(kind, #(#kinds)|*) } fn cast(syntax: SyntaxNode) -> Option { let res = match syntax.kind() { @@ -253,10 +250,7 @@ fn generate_nodes(kinds: KindsSrc<'_>, grammar: &AstSrc) -> String { } impl AstNode for #name { fn can_cast(kind: SyntaxKind) -> bool { - match kind { - #(#kinds)|* => true, - _ => false, - } + matches!(kind, #(#kinds)|*) } fn cast(syntax: SyntaxNode) -> Option { Self::can_cast(syntax.kind()).then(|| #name { syntax }) From bcab4be9383f5dc3d177ec585df85e2becf2512b Mon Sep 17 00:00:00 2001 From: Kartavya Vashishtha Date: Sun, 7 Aug 2022 17:38:20 +0530 Subject: [PATCH 0096/1945] regenerate files with new syntax --- crates/parser/src/syntax_kind/generated.rs | 130 +++++-- crates/syntax/src/ast/generated/nodes.rs | 410 ++++++++++++--------- 2 files changed, 345 insertions(+), 195 deletions(-) diff --git a/crates/parser/src/syntax_kind/generated.rs b/crates/parser/src/syntax_kind/generated.rs index 628fa745e7..c84f45f1f8 100644 --- a/crates/parser/src/syntax_kind/generated.rs +++ b/crates/parser/src/syntax_kind/generated.rs @@ -262,33 +262,117 @@ pub enum SyntaxKind { use self::SyntaxKind::*; impl SyntaxKind { pub fn is_keyword(self) -> bool { - match self { - AS_KW | ASYNC_KW | AWAIT_KW | BOX_KW | BREAK_KW | CONST_KW | CONTINUE_KW | CRATE_KW - | DYN_KW | ELSE_KW | ENUM_KW | EXTERN_KW | FALSE_KW | FN_KW | FOR_KW | IF_KW - | IMPL_KW | IN_KW | LET_KW | LOOP_KW | MACRO_KW | MATCH_KW | MOD_KW | MOVE_KW - | MUT_KW | PUB_KW | REF_KW | RETURN_KW | SELF_KW | SELF_TYPE_KW | STATIC_KW - | STRUCT_KW | SUPER_KW | TRAIT_KW | TRUE_KW | TRY_KW | TYPE_KW | UNSAFE_KW | USE_KW - | WHERE_KW | WHILE_KW | YIELD_KW | AUTO_KW | DEFAULT_KW | EXISTENTIAL_KW | UNION_KW - | RAW_KW | MACRO_RULES_KW => true, - _ => false, - } + matches!( + self, + AS_KW + | ASYNC_KW + | AWAIT_KW + | BOX_KW + | BREAK_KW + | CONST_KW + | CONTINUE_KW + | CRATE_KW + | DYN_KW + | ELSE_KW + | ENUM_KW + | EXTERN_KW + | FALSE_KW + | FN_KW + | FOR_KW + | IF_KW + | IMPL_KW + | IN_KW + | LET_KW + | LOOP_KW + | MACRO_KW + | MATCH_KW + | MOD_KW + | MOVE_KW + | MUT_KW + | PUB_KW + | REF_KW + | RETURN_KW + | SELF_KW + | SELF_TYPE_KW + | STATIC_KW + | STRUCT_KW + | SUPER_KW + | TRAIT_KW + | TRUE_KW + | TRY_KW + | TYPE_KW + | UNSAFE_KW + | USE_KW + | WHERE_KW + | WHILE_KW + | YIELD_KW + | AUTO_KW + | DEFAULT_KW + | EXISTENTIAL_KW + | UNION_KW + | RAW_KW + | MACRO_RULES_KW + ) } pub fn is_punct(self) -> bool { - match self { - SEMICOLON | COMMA | L_PAREN | R_PAREN | L_CURLY | R_CURLY | L_BRACK | R_BRACK - | L_ANGLE | R_ANGLE | AT | POUND | TILDE | QUESTION | DOLLAR | AMP | PIPE | PLUS - | STAR | SLASH | CARET | PERCENT | UNDERSCORE | DOT | DOT2 | DOT3 | DOT2EQ | COLON - | COLON2 | EQ | EQ2 | FAT_ARROW | BANG | NEQ | MINUS | THIN_ARROW | LTEQ | GTEQ - | PLUSEQ | MINUSEQ | PIPEEQ | AMPEQ | CARETEQ | SLASHEQ | STAREQ | PERCENTEQ | AMP2 - | PIPE2 | SHL | SHR | SHLEQ | SHREQ => true, - _ => false, - } + matches!( + self, + SEMICOLON + | COMMA + | L_PAREN + | R_PAREN + | L_CURLY + | R_CURLY + | L_BRACK + | R_BRACK + | L_ANGLE + | R_ANGLE + | AT + | POUND + | TILDE + | QUESTION + | DOLLAR + | AMP + | PIPE + | PLUS + | STAR + | SLASH + | CARET + | PERCENT + | UNDERSCORE + | DOT + | DOT2 + | DOT3 + | DOT2EQ + | COLON + | COLON2 + | EQ + | EQ2 + | FAT_ARROW + | BANG + | NEQ + | MINUS + | THIN_ARROW + | LTEQ + | GTEQ + | PLUSEQ + | MINUSEQ + | PIPEEQ + | AMPEQ + | CARETEQ + | SLASHEQ + | STAREQ + | PERCENTEQ + | AMP2 + | PIPE2 + | SHL + | SHR + | SHLEQ + | SHREQ + ) } pub fn is_literal(self) -> bool { - match self { - INT_NUMBER | FLOAT_NUMBER | CHAR | BYTE | STRING | BYTE_STRING => true, - _ => false, - } + matches!(self, INT_NUMBER | FLOAT_NUMBER | CHAR | BYTE | STRING | BYTE_STRING) } pub fn from_keyword(ident: &str) -> Option { let kw = match ident { diff --git a/crates/syntax/src/ast/generated/nodes.rs b/crates/syntax/src/ast/generated/nodes.rs index 63309a1552..8c4825ad69 100644 --- a/crates/syntax/src/ast/generated/nodes.rs +++ b/crates/syntax/src/ast/generated/nodes.rs @@ -3169,10 +3169,7 @@ impl From for GenericArg { } impl AstNode for GenericArg { fn can_cast(kind: SyntaxKind) -> bool { - match kind { - TYPE_ARG | ASSOC_TYPE_ARG | LIFETIME_ARG | CONST_ARG => true, - _ => false, - } + matches!(kind, TYPE_ARG | ASSOC_TYPE_ARG | LIFETIME_ARG | CONST_ARG) } fn cast(syntax: SyntaxNode) -> Option { let res = match syntax.kind() { @@ -3237,12 +3234,23 @@ impl From for Type { } impl AstNode for Type { fn can_cast(kind: SyntaxKind) -> bool { - match kind { - ARRAY_TYPE | DYN_TRAIT_TYPE | FN_PTR_TYPE | FOR_TYPE | IMPL_TRAIT_TYPE | INFER_TYPE - | MACRO_TYPE | NEVER_TYPE | PAREN_TYPE | PATH_TYPE | PTR_TYPE | REF_TYPE - | SLICE_TYPE | TUPLE_TYPE => true, - _ => false, - } + matches!( + kind, + ARRAY_TYPE + | DYN_TRAIT_TYPE + | FN_PTR_TYPE + | FOR_TYPE + | IMPL_TRAIT_TYPE + | INFER_TYPE + | MACRO_TYPE + | NEVER_TYPE + | PAREN_TYPE + | PATH_TYPE + | PTR_TYPE + | REF_TYPE + | SLICE_TYPE + | TUPLE_TYPE + ) } fn cast(syntax: SyntaxNode) -> Option { let res = match syntax.kind() { @@ -3384,15 +3392,42 @@ impl From for Expr { } impl AstNode for Expr { fn can_cast(kind: SyntaxKind) -> bool { - match kind { - ARRAY_EXPR | AWAIT_EXPR | BIN_EXPR | BLOCK_EXPR | BOX_EXPR | BREAK_EXPR | CALL_EXPR - | CAST_EXPR | CLOSURE_EXPR | CONTINUE_EXPR | FIELD_EXPR | FOR_EXPR | IF_EXPR - | INDEX_EXPR | LITERAL | LOOP_EXPR | MACRO_EXPR | MACRO_STMTS | MATCH_EXPR - | METHOD_CALL_EXPR | PAREN_EXPR | PATH_EXPR | PREFIX_EXPR | RANGE_EXPR - | RECORD_EXPR | REF_EXPR | RETURN_EXPR | TRY_EXPR | TUPLE_EXPR | WHILE_EXPR - | YIELD_EXPR | LET_EXPR | UNDERSCORE_EXPR => true, - _ => false, - } + matches!( + kind, + ARRAY_EXPR + | AWAIT_EXPR + | BIN_EXPR + | BLOCK_EXPR + | BOX_EXPR + | BREAK_EXPR + | CALL_EXPR + | CAST_EXPR + | CLOSURE_EXPR + | CONTINUE_EXPR + | FIELD_EXPR + | FOR_EXPR + | IF_EXPR + | INDEX_EXPR + | LITERAL + | LOOP_EXPR + | MACRO_EXPR + | MACRO_STMTS + | MATCH_EXPR + | METHOD_CALL_EXPR + | PAREN_EXPR + | PATH_EXPR + | PREFIX_EXPR + | RANGE_EXPR + | RECORD_EXPR + | REF_EXPR + | RETURN_EXPR + | TRY_EXPR + | TUPLE_EXPR + | WHILE_EXPR + | YIELD_EXPR + | LET_EXPR + | UNDERSCORE_EXPR + ) } fn cast(syntax: SyntaxNode) -> Option { let res = match syntax.kind() { @@ -3521,11 +3556,25 @@ impl From for Item { } impl AstNode for Item { fn can_cast(kind: SyntaxKind) -> bool { - match kind { - CONST | ENUM | EXTERN_BLOCK | EXTERN_CRATE | FN | IMPL | MACRO_CALL | MACRO_RULES - | MACRO_DEF | MODULE | STATIC | STRUCT | TRAIT | TYPE_ALIAS | UNION | USE => true, - _ => false, - } + matches!( + kind, + CONST + | ENUM + | EXTERN_BLOCK + | EXTERN_CRATE + | FN + | IMPL + | MACRO_CALL + | MACRO_RULES + | MACRO_DEF + | MODULE + | STATIC + | STRUCT + | TRAIT + | TYPE_ALIAS + | UNION + | USE + ) } fn cast(syntax: SyntaxNode) -> Option { let res = match syntax.kind() { @@ -3629,12 +3678,25 @@ impl From for Pat { } impl AstNode for Pat { fn can_cast(kind: SyntaxKind) -> bool { - match kind { - IDENT_PAT | BOX_PAT | REST_PAT | LITERAL_PAT | MACRO_PAT | OR_PAT | PAREN_PAT - | PATH_PAT | WILDCARD_PAT | RANGE_PAT | RECORD_PAT | REF_PAT | SLICE_PAT - | TUPLE_PAT | TUPLE_STRUCT_PAT | CONST_BLOCK_PAT => true, - _ => false, - } + matches!( + kind, + IDENT_PAT + | BOX_PAT + | REST_PAT + | LITERAL_PAT + | MACRO_PAT + | OR_PAT + | PAREN_PAT + | PATH_PAT + | WILDCARD_PAT + | RANGE_PAT + | RECORD_PAT + | REF_PAT + | SLICE_PAT + | TUPLE_PAT + | TUPLE_STRUCT_PAT + | CONST_BLOCK_PAT + ) } fn cast(syntax: SyntaxNode) -> Option { let res = match syntax.kind() { @@ -3686,12 +3748,7 @@ impl From for FieldList { fn from(node: TupleFieldList) -> FieldList { FieldList::TupleFieldList(node) } } impl AstNode for FieldList { - fn can_cast(kind: SyntaxKind) -> bool { - match kind { - RECORD_FIELD_LIST | TUPLE_FIELD_LIST => true, - _ => false, - } - } + fn can_cast(kind: SyntaxKind) -> bool { matches!(kind, RECORD_FIELD_LIST | TUPLE_FIELD_LIST) } fn cast(syntax: SyntaxNode) -> Option { let res = match syntax.kind() { RECORD_FIELD_LIST => FieldList::RecordFieldList(RecordFieldList { syntax }), @@ -3717,12 +3774,7 @@ impl From for Adt { fn from(node: Union) -> Adt { Adt::Union(node) } } impl AstNode for Adt { - fn can_cast(kind: SyntaxKind) -> bool { - match kind { - ENUM | STRUCT | UNION => true, - _ => false, - } - } + fn can_cast(kind: SyntaxKind) -> bool { matches!(kind, ENUM | STRUCT | UNION) } fn cast(syntax: SyntaxNode) -> Option { let res = match syntax.kind() { ENUM => Adt::Enum(Enum { syntax }), @@ -3753,12 +3805,7 @@ impl From for AssocItem { fn from(node: TypeAlias) -> AssocItem { AssocItem::TypeAlias(node) } } impl AstNode for AssocItem { - fn can_cast(kind: SyntaxKind) -> bool { - match kind { - CONST | FN | MACRO_CALL | TYPE_ALIAS => true, - _ => false, - } - } + fn can_cast(kind: SyntaxKind) -> bool { matches!(kind, CONST | FN | MACRO_CALL | TYPE_ALIAS) } fn cast(syntax: SyntaxNode) -> Option { let res = match syntax.kind() { CONST => AssocItem::Const(Const { syntax }), @@ -3791,12 +3838,7 @@ impl From for ExternItem { fn from(node: TypeAlias) -> ExternItem { ExternItem::TypeAlias(node) } } impl AstNode for ExternItem { - fn can_cast(kind: SyntaxKind) -> bool { - match kind { - FN | MACRO_CALL | STATIC | TYPE_ALIAS => true, - _ => false, - } - } + fn can_cast(kind: SyntaxKind) -> bool { matches!(kind, FN | MACRO_CALL | STATIC | TYPE_ALIAS) } fn cast(syntax: SyntaxNode) -> Option { let res = match syntax.kind() { FN => ExternItem::Fn(Fn { syntax }), @@ -3827,10 +3869,7 @@ impl From for GenericParam { } impl AstNode for GenericParam { fn can_cast(kind: SyntaxKind) -> bool { - match kind { - CONST_PARAM | LIFETIME_PARAM | TYPE_PARAM => true, - _ => false, - } + matches!(kind, CONST_PARAM | LIFETIME_PARAM | TYPE_PARAM) } fn cast(syntax: SyntaxNode) -> Option { let res = match syntax.kind() { @@ -3856,12 +3895,7 @@ impl AnyHasArgList { } } impl AstNode for AnyHasArgList { - fn can_cast(kind: SyntaxKind) -> bool { - match kind { - CALL_EXPR | METHOD_CALL_EXPR => true, - _ => false, - } - } + fn can_cast(kind: SyntaxKind) -> bool { matches!(kind, CALL_EXPR | METHOD_CALL_EXPR) } fn cast(syntax: SyntaxNode) -> Option { Self::can_cast(syntax.kind()).then(|| AnyHasArgList { syntax }) } @@ -3875,76 +3909,76 @@ impl AnyHasAttrs { } impl AstNode for AnyHasAttrs { fn can_cast(kind: SyntaxKind) -> bool { - match kind { + matches!( + kind, MACRO_CALL - | SOURCE_FILE - | CONST - | ENUM - | EXTERN_BLOCK - | EXTERN_CRATE - | FN - | IMPL - | MACRO_RULES - | MACRO_DEF - | MODULE - | STATIC - | STRUCT - | TRAIT - | TYPE_ALIAS - | UNION - | USE - | ITEM_LIST - | BLOCK_EXPR - | SELF_PARAM - | PARAM - | RECORD_FIELD - | TUPLE_FIELD - | VARIANT - | ASSOC_ITEM_LIST - | EXTERN_ITEM_LIST - | CONST_PARAM - | LIFETIME_PARAM - | TYPE_PARAM - | LET_STMT - | ARRAY_EXPR - | AWAIT_EXPR - | BIN_EXPR - | BOX_EXPR - | BREAK_EXPR - | CALL_EXPR - | CAST_EXPR - | CLOSURE_EXPR - | CONTINUE_EXPR - | FIELD_EXPR - | FOR_EXPR - | IF_EXPR - | INDEX_EXPR - | LITERAL - | LOOP_EXPR - | MATCH_EXPR - | METHOD_CALL_EXPR - | PAREN_EXPR - | PATH_EXPR - | PREFIX_EXPR - | RANGE_EXPR - | REF_EXPR - | RETURN_EXPR - | TRY_EXPR - | TUPLE_EXPR - | WHILE_EXPR - | YIELD_EXPR - | LET_EXPR - | UNDERSCORE_EXPR - | STMT_LIST - | RECORD_EXPR_FIELD_LIST - | RECORD_EXPR_FIELD - | MATCH_ARM_LIST - | MATCH_ARM - | IDENT_PAT - | REST_PAT - | RECORD_PAT_FIELD => true, - _ => false, - } + | SOURCE_FILE + | CONST + | ENUM + | EXTERN_BLOCK + | EXTERN_CRATE + | FN + | IMPL + | MACRO_RULES + | MACRO_DEF + | MODULE + | STATIC + | STRUCT + | TRAIT + | TYPE_ALIAS + | UNION + | USE + | ITEM_LIST + | BLOCK_EXPR + | SELF_PARAM + | PARAM + | RECORD_FIELD + | TUPLE_FIELD + | VARIANT + | ASSOC_ITEM_LIST + | EXTERN_ITEM_LIST + | CONST_PARAM + | LIFETIME_PARAM + | TYPE_PARAM + | LET_STMT + | ARRAY_EXPR + | AWAIT_EXPR + | BIN_EXPR + | BOX_EXPR + | BREAK_EXPR + | CALL_EXPR + | CAST_EXPR + | CLOSURE_EXPR + | CONTINUE_EXPR + | FIELD_EXPR + | FOR_EXPR + | IF_EXPR + | INDEX_EXPR + | LITERAL + | LOOP_EXPR + | MATCH_EXPR + | METHOD_CALL_EXPR + | PAREN_EXPR + | PATH_EXPR + | PREFIX_EXPR + | RANGE_EXPR + | REF_EXPR + | RETURN_EXPR + | TRY_EXPR + | TUPLE_EXPR + | WHILE_EXPR + | YIELD_EXPR + | LET_EXPR + | UNDERSCORE_EXPR + | STMT_LIST + | RECORD_EXPR_FIELD_LIST + | RECORD_EXPR_FIELD + | MATCH_ARM_LIST + | MATCH_ARM + | IDENT_PAT + | REST_PAT + | RECORD_PAT_FIELD + ) } fn cast(syntax: SyntaxNode) -> Option { Self::can_cast(syntax.kind()).then(|| AnyHasAttrs { syntax }) @@ -3959,12 +3993,29 @@ impl AnyHasDocComments { } impl AstNode for AnyHasDocComments { fn can_cast(kind: SyntaxKind) -> bool { - match kind { - MACRO_CALL | SOURCE_FILE | CONST | ENUM | EXTERN_BLOCK | EXTERN_CRATE | FN | IMPL - | MACRO_RULES | MACRO_DEF | MODULE | STATIC | STRUCT | TRAIT | TYPE_ALIAS | UNION - | USE | RECORD_FIELD | TUPLE_FIELD | VARIANT => true, - _ => false, - } + matches!( + kind, + MACRO_CALL + | SOURCE_FILE + | CONST + | ENUM + | EXTERN_BLOCK + | EXTERN_CRATE + | FN + | IMPL + | MACRO_RULES + | MACRO_DEF + | MODULE + | STATIC + | STRUCT + | TRAIT + | TYPE_ALIAS + | UNION + | USE + | RECORD_FIELD + | TUPLE_FIELD + | VARIANT + ) } fn cast(syntax: SyntaxNode) -> Option { Self::can_cast(syntax.kind()).then(|| AnyHasDocComments { syntax }) @@ -3979,10 +4030,7 @@ impl AnyHasGenericParams { } impl AstNode for AnyHasGenericParams { fn can_cast(kind: SyntaxKind) -> bool { - match kind { - ENUM | FN | IMPL | STRUCT | TRAIT | TYPE_ALIAS | UNION => true, - _ => false, - } + matches!(kind, ENUM | FN | IMPL | STRUCT | TRAIT | TYPE_ALIAS | UNION) } fn cast(syntax: SyntaxNode) -> Option { Self::can_cast(syntax.kind()).then(|| AnyHasGenericParams { syntax }) @@ -3996,12 +4044,7 @@ impl AnyHasLoopBody { } } impl AstNode for AnyHasLoopBody { - fn can_cast(kind: SyntaxKind) -> bool { - match kind { - FOR_EXPR | LOOP_EXPR | WHILE_EXPR => true, - _ => false, - } - } + fn can_cast(kind: SyntaxKind) -> bool { matches!(kind, FOR_EXPR | LOOP_EXPR | WHILE_EXPR) } fn cast(syntax: SyntaxNode) -> Option { Self::can_cast(syntax.kind()).then(|| AnyHasLoopBody { syntax }) } @@ -4014,12 +4057,7 @@ impl AnyHasModuleItem { } } impl AstNode for AnyHasModuleItem { - fn can_cast(kind: SyntaxKind) -> bool { - match kind { - MACRO_ITEMS | SOURCE_FILE | ITEM_LIST => true, - _ => false, - } - } + fn can_cast(kind: SyntaxKind) -> bool { matches!(kind, MACRO_ITEMS | SOURCE_FILE | ITEM_LIST) } fn cast(syntax: SyntaxNode) -> Option { Self::can_cast(syntax.kind()).then(|| AnyHasModuleItem { syntax }) } @@ -4033,12 +4071,27 @@ impl AnyHasName { } impl AstNode for AnyHasName { fn can_cast(kind: SyntaxKind) -> bool { - match kind { - CONST | ENUM | FN | MACRO_RULES | MACRO_DEF | MODULE | STATIC | STRUCT | TRAIT - | TYPE_ALIAS | UNION | RENAME | SELF_PARAM | RECORD_FIELD | VARIANT | CONST_PARAM - | TYPE_PARAM | IDENT_PAT => true, - _ => false, - } + matches!( + kind, + CONST + | ENUM + | FN + | MACRO_RULES + | MACRO_DEF + | MODULE + | STATIC + | STRUCT + | TRAIT + | TYPE_ALIAS + | UNION + | RENAME + | SELF_PARAM + | RECORD_FIELD + | VARIANT + | CONST_PARAM + | TYPE_PARAM + | IDENT_PAT + ) } fn cast(syntax: SyntaxNode) -> Option { Self::can_cast(syntax.kind()).then(|| AnyHasName { syntax }) @@ -4053,10 +4106,10 @@ impl AnyHasTypeBounds { } impl AstNode for AnyHasTypeBounds { fn can_cast(kind: SyntaxKind) -> bool { - match kind { - ASSOC_TYPE_ARG | TRAIT | TYPE_ALIAS | LIFETIME_PARAM | TYPE_PARAM | WHERE_PRED => true, - _ => false, - } + matches!( + kind, + ASSOC_TYPE_ARG | TRAIT | TYPE_ALIAS | LIFETIME_PARAM | TYPE_PARAM | WHERE_PRED + ) } fn cast(syntax: SyntaxNode) -> Option { Self::can_cast(syntax.kind()).then(|| AnyHasTypeBounds { syntax }) @@ -4071,13 +4124,26 @@ impl AnyHasVisibility { } impl AstNode for AnyHasVisibility { fn can_cast(kind: SyntaxKind) -> bool { - match kind { - CONST | ENUM | EXTERN_CRATE | FN | IMPL | MACRO_RULES | MACRO_DEF | MODULE | STATIC - | STRUCT | TRAIT | TYPE_ALIAS | UNION | USE | RECORD_FIELD | TUPLE_FIELD | VARIANT => { - true - } - _ => false, - } + matches!( + kind, + CONST + | ENUM + | EXTERN_CRATE + | FN + | IMPL + | MACRO_RULES + | MACRO_DEF + | MODULE + | STATIC + | STRUCT + | TRAIT + | TYPE_ALIAS + | UNION + | USE + | RECORD_FIELD + | TUPLE_FIELD + | VARIANT + ) } fn cast(syntax: SyntaxNode) -> Option { Self::can_cast(syntax.kind()).then(|| AnyHasVisibility { syntax }) From 1883d1f14146959cfa9cdf1ed43e5d1ad013e07e Mon Sep 17 00:00:00 2001 From: Kartavya Vashishtha Date: Sun, 7 Aug 2022 20:39:11 +0530 Subject: [PATCH 0097/1945] activate assoc item test --- crates/ide-diagnostics/src/handlers/inactive_code.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/ide-diagnostics/src/handlers/inactive_code.rs b/crates/ide-diagnostics/src/handlers/inactive_code.rs index 97ea5c456a..6715823f1f 100644 --- a/crates/ide-diagnostics/src/handlers/inactive_code.rs +++ b/crates/ide-diagnostics/src/handlers/inactive_code.rs @@ -112,12 +112,12 @@ fn f() { struct Foo; impl Foo { #[cfg(any())] pub fn f() {} - //*************************** weak: code is inactive due to #[cfg] directives + //^^^^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives } trait Bar { #[cfg(any())] pub fn f() {} - //*************************** weak: code is inactive due to #[cfg] directives + //^^^^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives } "#, ); From 4b648d8f6cfb9f58490a70e4539aa36c89b9d35e Mon Sep 17 00:00:00 2001 From: Jake Heinz Date: Mon, 8 Aug 2022 00:20:17 +0000 Subject: [PATCH 0098/1945] [code] make toggleInlayHints understand {off,on}UntilPressed --- editors/code/src/commands.ts | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/editors/code/src/commands.ts b/editors/code/src/commands.ts index bf55329ca1..1b793bb0b1 100644 --- a/editors/code/src/commands.ts +++ b/editors/code/src/commands.ts @@ -326,8 +326,22 @@ export function toggleInlayHints(_ctx: Ctx): Cmd { const config = vscode.workspace.getConfiguration("editor.inlayHints", { languageId: "rust", }); - const value = !config.get("enabled"); - await config.update("enabled", value, vscode.ConfigurationTarget.Global); + + const value = config.get("enabled"); + let stringValue; + if (typeof value === "string") { + stringValue = value; + } else { + stringValue = value ? "on" : "off"; + } + const nextValues: Record = { + on: "off", + off: "on", + onUnlessPressed: "offUnlessPressed", + offUnlessPressed: "onUnlessPressed", + }; + const nextValue = nextValues[stringValue] ?? "on"; + await config.update("enabled", nextValue, vscode.ConfigurationTarget.Global); }; } From 196f389a708d5e0002a1d3b4e1059d43dc4542fb Mon Sep 17 00:00:00 2001 From: Kartavya Vashishtha Date: Mon, 8 Aug 2022 16:40:29 +0530 Subject: [PATCH 0099/1945] try adding diagnostrics for AssocItems --- crates/hir-def/src/data.rs | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/crates/hir-def/src/data.rs b/crates/hir-def/src/data.rs index 35c8708955..1b4f4ed04a 100644 --- a/crates/hir-def/src/data.rs +++ b/crates/hir-def/src/data.rs @@ -2,7 +2,7 @@ use std::sync::Arc; -use hir_expand::{name::Name, AstId, ExpandResult, HirFileId, MacroCallId, MacroDefKind}; +use hir_expand::{name::Name, AstId, ExpandResult, HirFileId, MacroCallId, MacroDefKind, InFile}; use smallvec::SmallVec; use syntax::ast; @@ -12,7 +12,7 @@ use crate::{ db::DefDatabase, intern::Interned, item_tree::{self, AssocItem, FnFlags, ItemTree, ItemTreeId, ModItem, Param, TreeId}, - nameres::{attr_resolution::ResolvedAttr, proc_macro::ProcMacroKind, DefMap}, + nameres::{attr_resolution::ResolvedAttr, proc_macro::ProcMacroKind, DefMap, diagnostics::DefDiagnostic}, type_ref::{TraitRef, TypeBound, TypeRef}, visibility::RawVisibility, AssocItemId, AstIdWithPath, ConstId, ConstLoc, FunctionId, FunctionLoc, HasModule, ImplId, @@ -479,6 +479,13 @@ impl<'a> AssocItemCollector<'a> { 'items: for &item in assoc_items { let attrs = item_tree.attrs(self.db, self.module_id.krate, ModItem::from(item).into()); if !attrs.is_cfg_enabled(self.expander.cfg_options()) { + self.def_map.push_diagnostic(DefDiagnostic::unconfigured_code( + self.module_id.local_id, + InFile::new(tree_id.file_id(), item.ast_id(&item_tree).upcast()), + attrs.cfg().unwrap(), + self.expander.cfg_options().clone() + )); + dbg!("Ignoring assoc item!"); continue; } From c1eae3d0281e1e34f64332b02a7dc4bdd0ae6e5b Mon Sep 17 00:00:00 2001 From: Kartavya Vashishtha Date: Mon, 8 Aug 2022 16:45:27 +0530 Subject: [PATCH 0100/1945] make diagnostic function public --- crates/hir-def/src/nameres.rs | 8 ++++++++ crates/hir-def/src/nameres/diagnostics.rs | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/crates/hir-def/src/nameres.rs b/crates/hir-def/src/nameres.rs index 45f631936d..a2181a6bf4 100644 --- a/crates/hir-def/src/nameres.rs +++ b/crates/hir-def/src/nameres.rs @@ -511,6 +511,14 @@ impl DefMap { self.diagnostics.as_slice() } + pub fn push_diagnostic(&mut self, d: DefDiagnostic) { + self.diagnostics.push(d) + } + + pub fn push_diagnostics(&mut self, i: impl Iterator) { + self.diagnostics.extend(i) + } + pub fn recursion_limit(&self) -> Option { self.recursion_limit } diff --git a/crates/hir-def/src/nameres/diagnostics.rs b/crates/hir-def/src/nameres/diagnostics.rs index 0d01f6d0ab..ed7e920fd2 100644 --- a/crates/hir-def/src/nameres/diagnostics.rs +++ b/crates/hir-def/src/nameres/diagnostics.rs @@ -73,7 +73,7 @@ impl DefDiagnostic { Self { in_module: container, kind: DefDiagnosticKind::UnresolvedImport { id, index } } } - pub(super) fn unconfigured_code( + pub fn unconfigured_code( container: LocalModuleId, ast: AstId, cfg: CfgExpr, From b14062aaecfe11978a5525824567e8011526aeb2 Mon Sep 17 00:00:00 2001 From: Ryo Yoshida Date: Tue, 9 Aug 2022 00:00:22 +0900 Subject: [PATCH 0101/1945] Parse range patterns in let statement with type annotation --- crates/parser/src/grammar/patterns.rs | 9 ++++++-- .../inline/ok/0166_half_open_range_pat.rast | 23 +++++++++++++++++-- .../inline/ok/0166_half_open_range_pat.rs | 5 +++- 3 files changed, 32 insertions(+), 5 deletions(-) diff --git a/crates/parser/src/grammar/patterns.rs b/crates/parser/src/grammar/patterns.rs index bfa41c686e..7e21a808da 100644 --- a/crates/parser/src/grammar/patterns.rs +++ b/crates/parser/src/grammar/patterns.rs @@ -103,6 +103,8 @@ fn pattern_single_r(p: &mut Parser<'_>, recovery_set: TokenSet) { // ^ // `let 0 .. =` // ^ + // `let 0..: _ =` + // ^ // (1.., _) // ^ // `Some(0 .. )` @@ -111,9 +113,12 @@ fn pattern_single_r(p: &mut Parser<'_>, recovery_set: TokenSet) { // ^ // `[0..]` // ^ - if p.at(T![=]) | p.at(T![')']) | p.at(T![,]) | p.at(T!['}']) | p.at(T![']']) { + if matches!(p.current(), T![=] | T![,] | T![:] | T![')'] | T!['}'] | T![']']) { // test half_open_range_pat - // fn f() { let 0 .. = 1u32; } + // fn f() { + // let 0 .. = 1u32; + // let 0..: _ = 1u32; + // } } else { atom_pat(p, recovery_set); } diff --git a/crates/parser/test_data/parser/inline/ok/0166_half_open_range_pat.rast b/crates/parser/test_data/parser/inline/ok/0166_half_open_range_pat.rast index 3d3587a706..4b401b60df 100644 --- a/crates/parser/test_data/parser/inline/ok/0166_half_open_range_pat.rast +++ b/crates/parser/test_data/parser/inline/ok/0166_half_open_range_pat.rast @@ -11,7 +11,7 @@ SOURCE_FILE BLOCK_EXPR STMT_LIST L_CURLY "{" - WHITESPACE " " + WHITESPACE "\n " LET_STMT LET_KW "let" WHITESPACE " " @@ -27,6 +27,25 @@ SOURCE_FILE LITERAL INT_NUMBER "1u32" SEMICOLON ";" - WHITESPACE " " + WHITESPACE "\n " + LET_STMT + LET_KW "let" + WHITESPACE " " + RANGE_PAT + LITERAL_PAT + LITERAL + INT_NUMBER "0" + DOT2 ".." + COLON ":" + WHITESPACE " " + INFER_TYPE + UNDERSCORE "_" + WHITESPACE " " + EQ "=" + WHITESPACE " " + LITERAL + INT_NUMBER "1u32" + SEMICOLON ";" + WHITESPACE "\n" R_CURLY "}" WHITESPACE "\n" diff --git a/crates/parser/test_data/parser/inline/ok/0166_half_open_range_pat.rs b/crates/parser/test_data/parser/inline/ok/0166_half_open_range_pat.rs index 1360eda056..c9386a221a 100644 --- a/crates/parser/test_data/parser/inline/ok/0166_half_open_range_pat.rs +++ b/crates/parser/test_data/parser/inline/ok/0166_half_open_range_pat.rs @@ -1 +1,4 @@ -fn f() { let 0 .. = 1u32; } +fn f() { + let 0 .. = 1u32; + let 0..: _ = 1u32; +} From e39918f553d7dd5fb5afd6517f80d3e35290c4d2 Mon Sep 17 00:00:00 2001 From: fprasx Date: Mon, 8 Aug 2022 11:05:18 -0400 Subject: [PATCH 0102/1945] Corrected order of printing op and `=` --- crates/syntax/src/ast/operators.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/syntax/src/ast/operators.rs b/crates/syntax/src/ast/operators.rs index a687ba0b77..8f7b3fb600 100644 --- a/crates/syntax/src/ast/operators.rs +++ b/crates/syntax/src/ast/operators.rs @@ -111,10 +111,10 @@ impl fmt::Display for BinaryOp { BinaryOp::ArithOp(op) => fmt::Display::fmt(op, f), BinaryOp::CmpOp(op) => fmt::Display::fmt(op, f), BinaryOp::Assignment { op } => { - f.write_str("=")?; if let Some(op) = op { fmt::Display::fmt(op, f)?; } + f.write_str("=")?; Ok(()) } } From 232176b46af3955e20997eb4ffb5e77fefde0ed5 Mon Sep 17 00:00:00 2001 From: KaDiWa Date: Tue, 9 Aug 2022 01:16:32 +0200 Subject: [PATCH 0103/1945] remove imports that are also in edition 2021's prelude --- crates/base-db/src/input.rs | 2 +- crates/hir-ty/src/consteval.rs | 1 - crates/ide-db/src/search.rs | 2 +- crates/ide/src/goto_definition.rs | 2 +- crates/ide/src/status.rs | 2 +- crates/proc-macro-api/src/msg/flat.rs | 5 +---- crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/mod.rs | 4 ++-- crates/proc-macro-srv/src/abis/abi_1_58/ra_server.rs | 1 - crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/mod.rs | 4 ++-- crates/proc-macro-srv/src/abis/abi_1_63/ra_server.rs | 1 - crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/mod.rs | 4 ++-- crates/proc-macro-srv/src/abis/abi_1_64/ra_server.rs | 1 - crates/proc-macro-srv/src/abis/abi_sysroot/ra_server.rs | 2 +- crates/proc-macro-srv/src/dylib.rs | 1 - crates/rust-analyzer/src/diagnostics/to_proto.rs | 2 +- crates/rust-analyzer/src/to_proto.rs | 2 +- crates/syntax/src/fuzz.rs | 5 +---- lib/la-arena/src/lib.rs | 1 - 18 files changed, 15 insertions(+), 27 deletions(-) diff --git a/crates/base-db/src/input.rs b/crates/base-db/src/input.rs index 9b5a10acfb..9580ce8007 100644 --- a/crates/base-db/src/input.rs +++ b/crates/base-db/src/input.rs @@ -6,7 +6,7 @@ //! actual IO. See `vfs` and `project_model` in the `rust-analyzer` crate for how //! actual IO is done and lowered to input. -use std::{fmt, iter::FromIterator, ops, panic::RefUnwindSafe, str::FromStr, sync::Arc}; +use std::{fmt, ops, panic::RefUnwindSafe, str::FromStr, sync::Arc}; use cfg::CfgOptions; use rustc_hash::{FxHashMap, FxHashSet}; diff --git a/crates/hir-ty/src/consteval.rs b/crates/hir-ty/src/consteval.rs index 0495a4e64c..6ecb6e6fd1 100644 --- a/crates/hir-ty/src/consteval.rs +++ b/crates/hir-ty/src/consteval.rs @@ -2,7 +2,6 @@ use std::{ collections::HashMap, - convert::TryInto, fmt::{Display, Write}, }; diff --git a/crates/ide-db/src/search.rs b/crates/ide-db/src/search.rs index bd038cdaa0..9eaabeec7a 100644 --- a/crates/ide-db/src/search.rs +++ b/crates/ide-db/src/search.rs @@ -4,7 +4,7 @@ //! get a super-set of matches. Then, we we confirm each match using precise //! name resolution. -use std::{convert::TryInto, mem, sync::Arc}; +use std::{mem, sync::Arc}; use base_db::{FileId, FileRange, SourceDatabase, SourceDatabaseExt}; use hir::{DefWithBody, HasAttrs, HasSource, InFile, ModuleSource, Semantics, Visibility}; diff --git a/crates/ide/src/goto_definition.rs b/crates/ide/src/goto_definition.rs index b2123b9a87..d6cd5783f0 100644 --- a/crates/ide/src/goto_definition.rs +++ b/crates/ide/src/goto_definition.rs @@ -1,4 +1,4 @@ -use std::{convert::TryInto, mem::discriminant}; +use std::mem::discriminant; use crate::{doc_links::token_as_doc_comment, FilePosition, NavigationTarget, RangeInfo, TryToNav}; use hir::{AsAssocItem, AssocItem, Semantics}; diff --git a/crates/ide/src/status.rs b/crates/ide/src/status.rs index 32e39f82a0..f4d0387440 100644 --- a/crates/ide/src/status.rs +++ b/crates/ide/src/status.rs @@ -1,4 +1,4 @@ -use std::{fmt, iter::FromIterator, sync::Arc}; +use std::{fmt, sync::Arc}; use hir::{ExpandResult, MacroFile}; use ide_db::base_db::{ diff --git a/crates/proc-macro-api/src/msg/flat.rs b/crates/proc-macro-api/src/msg/flat.rs index 8437444e18..268a03bb53 100644 --- a/crates/proc-macro-api/src/msg/flat.rs +++ b/crates/proc-macro-api/src/msg/flat.rs @@ -35,10 +35,7 @@ //! as we don't have bincode in Cargo.toml yet, lets stick with serde_json for //! the time being. -use std::{ - collections::{HashMap, VecDeque}, - convert::TryInto, -}; +use std::collections::{HashMap, VecDeque}; use serde::{Deserialize, Serialize}; use tt::TokenId; diff --git a/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/mod.rs b/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/mod.rs index 4a07f22779..a405497f3c 100644 --- a/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/mod.rs +++ b/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/mod.rs @@ -157,7 +157,7 @@ impl From for TokenStream { } /// Collects a number of token trees into a single stream. -impl iter::FromIterator for TokenStream { +impl FromIterator for TokenStream { fn from_iter>(trees: I) -> Self { trees.into_iter().map(TokenStream::from).collect() } @@ -165,7 +165,7 @@ impl iter::FromIterator for TokenStream { /// A "flattening" operation on token streams, collects token trees /// from multiple token streams into a single stream. -impl iter::FromIterator for TokenStream { +impl FromIterator for TokenStream { fn from_iter>(streams: I) -> Self { let mut builder = bridge::client::TokenStreamBuilder::new(); streams.into_iter().for_each(|stream| builder.push(stream.0)); diff --git a/crates/proc-macro-srv/src/abis/abi_1_58/ra_server.rs b/crates/proc-macro-srv/src/abis/abi_1_58/ra_server.rs index ebdfca00d7..b1e982f477 100644 --- a/crates/proc-macro-srv/src/abis/abi_1_58/ra_server.rs +++ b/crates/proc-macro-srv/src/abis/abi_1_58/ra_server.rs @@ -12,7 +12,6 @@ use super::proc_macro::bridge::{self, server}; use std::collections::HashMap; use std::hash::Hash; -use std::iter::FromIterator; use std::ops::Bound; use std::{ascii, vec::IntoIter}; diff --git a/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/mod.rs b/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/mod.rs index c50a16bf4d..7ab1f421da 100644 --- a/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/mod.rs +++ b/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/mod.rs @@ -207,7 +207,7 @@ impl ConcatStreamsHelper { } /// Collects a number of token trees into a single stream. -impl iter::FromIterator for TokenStream { +impl FromIterator for TokenStream { fn from_iter>(trees: I) -> Self { trees.into_iter().map(TokenStream::from).collect() } @@ -215,7 +215,7 @@ impl iter::FromIterator for TokenStream { /// A "flattening" operation on token streams, collects token trees /// from multiple token streams into a single stream. -impl iter::FromIterator for TokenStream { +impl FromIterator for TokenStream { fn from_iter>(streams: I) -> Self { let iter = streams.into_iter(); let mut builder = ConcatStreamsHelper::new(iter.size_hint().0); diff --git a/crates/proc-macro-srv/src/abis/abi_1_63/ra_server.rs b/crates/proc-macro-srv/src/abis/abi_1_63/ra_server.rs index 05a565fbf3..ed49cc7596 100644 --- a/crates/proc-macro-srv/src/abis/abi_1_63/ra_server.rs +++ b/crates/proc-macro-srv/src/abis/abi_1_63/ra_server.rs @@ -12,7 +12,6 @@ use super::proc_macro::bridge::{self, server}; use std::collections::HashMap; use std::hash::Hash; -use std::iter::FromIterator; use std::ops::Bound; use std::{ascii, vec::IntoIter}; diff --git a/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/mod.rs b/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/mod.rs index be62c73ef3..86a59b6455 100644 --- a/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/mod.rs +++ b/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/mod.rs @@ -207,7 +207,7 @@ impl ConcatStreamsHelper { } /// Collects a number of token trees into a single stream. -impl iter::FromIterator for TokenStream { +impl FromIterator for TokenStream { fn from_iter>(trees: I) -> Self { trees.into_iter().map(TokenStream::from).collect() } @@ -215,7 +215,7 @@ impl iter::FromIterator for TokenStream { /// A "flattening" operation on token streams, collects token trees /// from multiple token streams into a single stream. -impl iter::FromIterator for TokenStream { +impl FromIterator for TokenStream { fn from_iter>(streams: I) -> Self { let iter = streams.into_iter(); let mut builder = ConcatStreamsHelper::new(iter.size_hint().0); diff --git a/crates/proc-macro-srv/src/abis/abi_1_64/ra_server.rs b/crates/proc-macro-srv/src/abis/abi_1_64/ra_server.rs index 7e8e67856e..d8aa1ec429 100644 --- a/crates/proc-macro-srv/src/abis/abi_1_64/ra_server.rs +++ b/crates/proc-macro-srv/src/abis/abi_1_64/ra_server.rs @@ -12,7 +12,6 @@ use super::proc_macro::bridge::{self, server}; use std::collections::HashMap; use std::hash::Hash; -use std::iter::FromIterator; use std::ops::Bound; use std::{ascii, vec::IntoIter}; diff --git a/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server.rs b/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server.rs index 46882845a8..52eb7ce17d 100644 --- a/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server.rs +++ b/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server.rs @@ -20,7 +20,7 @@ use token_stream::TokenStreamBuilder; mod symbol; pub use symbol::*; -use std::{iter::FromIterator, ops::Bound}; +use std::ops::Bound; type Group = tt::Subtree; type TokenTree = tt::TokenTree; diff --git a/crates/proc-macro-srv/src/dylib.rs b/crates/proc-macro-srv/src/dylib.rs index 2b6c070fec..7aba74e539 100644 --- a/crates/proc-macro-srv/src/dylib.rs +++ b/crates/proc-macro-srv/src/dylib.rs @@ -1,7 +1,6 @@ //! Handles dynamic library loading for proc macro use std::{ - convert::TryInto, fmt, fs::File, io, diff --git a/crates/rust-analyzer/src/diagnostics/to_proto.rs b/crates/rust-analyzer/src/diagnostics/to_proto.rs index cff4bd7f66..74689fd875 100644 --- a/crates/rust-analyzer/src/diagnostics/to_proto.rs +++ b/crates/rust-analyzer/src/diagnostics/to_proto.rs @@ -512,7 +512,7 @@ fn clippy_code_description(code: Option<&str>) -> Option Date: Mon, 8 Aug 2022 20:20:45 -0400 Subject: [PATCH 0104/1945] Implement IntoFuture type inference --- crates/hir-expand/src/mod_path.rs | 1 + crates/hir-expand/src/name.rs | 2 + crates/hir-ty/src/infer.rs | 5 ++- crates/hir-ty/src/tests/traits.rs | 25 +++++++++++ crates/hir/src/lib.rs | 2 + crates/hir/src/source_analyzer.rs | 2 + crates/ide-assists/src/utils/suggest_name.rs | 1 + .../ide-completion/src/completions/keyword.rs | 44 ++++++++++--------- crates/test-utils/src/minicore.rs | 15 +++++++ 9 files changed, 75 insertions(+), 22 deletions(-) diff --git a/crates/hir-expand/src/mod_path.rs b/crates/hir-expand/src/mod_path.rs index fea09521e8..9ddafe0181 100644 --- a/crates/hir-expand/src/mod_path.rs +++ b/crates/hir-expand/src/mod_path.rs @@ -257,6 +257,7 @@ macro_rules! __known_path { (core::ops::RangeToInclusive) => {}; (core::ops::RangeInclusive) => {}; (core::future::Future) => {}; + (core::future::IntoFuture) => {}; (core::ops::Try) => {}; ($path:path) => { compile_error!("Please register your known path in the path module") diff --git a/crates/hir-expand/src/name.rs b/crates/hir-expand/src/name.rs index 47d191822d..757db79db4 100644 --- a/crates/hir-expand/src/name.rs +++ b/crates/hir-expand/src/name.rs @@ -258,6 +258,7 @@ pub mod known { Try, Ok, Future, + IntoFuture, Result, Option, Output, @@ -391,6 +392,7 @@ pub mod known { future_trait, index, index_mut, + into_future, mul_assign, mul, neg, diff --git a/crates/hir-ty/src/infer.rs b/crates/hir-ty/src/infer.rs index 46eeea0e6f..95a7229e87 100644 --- a/crates/hir-ty/src/infer.rs +++ b/crates/hir-ty/src/infer.rs @@ -875,7 +875,10 @@ impl<'a> InferenceContext<'a> { } fn resolve_future_future_output(&self) -> Option { - let trait_ = self.resolve_lang_item(name![future_trait])?.as_trait()?; + let trait_ = self + .resolver + .resolve_known_trait(self.db.upcast(), &path![core::future::IntoFuture]) + .or_else(|| self.resolve_lang_item(name![future_trait])?.as_trait())?; self.db.trait_data(trait_).associated_type_by_name(&name![Output]) } diff --git a/crates/hir-ty/src/tests/traits.rs b/crates/hir-ty/src/tests/traits.rs index 75802a5eb4..730ebe2357 100644 --- a/crates/hir-ty/src/tests/traits.rs +++ b/crates/hir-ty/src/tests/traits.rs @@ -137,6 +137,31 @@ fn not_send() -> Box + 'static> { ); } +#[test] +fn into_future_trait() { + check_types( + r#" +//- minicore: future +struct Futurable; +impl core::future::IntoFuture for Futurable { + type Output = u64; + type IntoFuture = IntFuture; +} + +struct IntFuture; +impl core::future::Future for IntFuture { + type Output = u64; +} + +fn test() { + let r = Futurable; + let v = r.await; + v; +} //^ u64 +"#, + ); +} + #[test] fn infer_try() { check_types( diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 8f984210e1..800ea58ba2 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -2780,6 +2780,8 @@ impl Type { /// Checks that particular type `ty` implements `std::future::Future`. /// This function is used in `.await` syntax completion. pub fn impls_future(&self, db: &dyn HirDatabase) -> bool { + // FIXME: This should be checking for IntoFuture trait, but I don't know how to find the + // right TraitId in this crate. let std_future_trait = db .lang_item(self.env.krate, SmolStr::new_inline("future_trait")) .and_then(|it| it.as_trait()); diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs index f5e2e44307..756772cf84 100644 --- a/crates/hir/src/source_analyzer.rs +++ b/crates/hir/src/source_analyzer.rs @@ -269,6 +269,8 @@ impl SourceAnalyzer { db: &dyn HirDatabase, await_expr: &ast::AwaitExpr, ) -> Option { + // FIXME This should be pointing to the poll of IntoFuture::Output's Future impl, but I + // don't know how to resolve the Output type so that we can query for its poll method. let ty = self.ty_of_expr(db, &await_expr.expr()?.into())?; let op_fn = db diff --git a/crates/ide-assists/src/utils/suggest_name.rs b/crates/ide-assists/src/utils/suggest_name.rs index 779cdbc93c..68d59daef4 100644 --- a/crates/ide-assists/src/utils/suggest_name.rs +++ b/crates/ide-assists/src/utils/suggest_name.rs @@ -55,6 +55,7 @@ const USELESS_METHODS: &[&str] = &[ "iter", "into_iter", "iter_mut", + "into_future", ]; pub(crate) fn for_generic_parameter(ty: &ast::ImplTraitType) -> SmolStr { diff --git a/crates/ide-completion/src/completions/keyword.rs b/crates/ide-completion/src/completions/keyword.rs index 3989a451bd..032b23725f 100644 --- a/crates/ide-completion/src/completions/keyword.rs +++ b/crates/ide-completion/src/completions/keyword.rs @@ -75,16 +75,17 @@ impl Future for A {} fn foo(a: A) { a.$0 } "#, expect![[r#" - kw await expr.await - sn box Box::new(expr) - sn call function(expr) - sn dbg dbg!(expr) - sn dbgr dbg!(&expr) - sn let let - sn letm let mut - sn match match expr {} - sn ref &expr - sn refm &mut expr + kw await expr.await + me into_future() (as IntoFuture) fn(self) -> ::IntoFuture + sn box Box::new(expr) + sn call function(expr) + sn dbg dbg!(expr) + sn dbgr dbg!(&expr) + sn let let + sn letm let mut + sn match match expr {} + sn ref &expr + sn refm &mut expr "#]], ); @@ -98,18 +99,19 @@ fn foo() { } "#, expect![[r#" - kw await expr.await - sn box Box::new(expr) - sn call function(expr) - sn dbg dbg!(expr) - sn dbgr dbg!(&expr) - sn let let - sn letm let mut - sn match match expr {} - sn ref &expr - sn refm &mut expr + kw await expr.await + me into_future() (use core::future::IntoFuture) fn(self) -> ::IntoFuture + sn box Box::new(expr) + sn call function(expr) + sn dbg dbg!(expr) + sn dbgr dbg!(&expr) + sn let let + sn letm let mut + sn match match expr {} + sn ref &expr + sn refm &mut expr "#]], - ) + ); } #[test] diff --git a/crates/test-utils/src/minicore.rs b/crates/test-utils/src/minicore.rs index f48d1ec66a..6df29db474 100644 --- a/crates/test-utils/src/minicore.rs +++ b/crates/test-utils/src/minicore.rs @@ -471,6 +471,21 @@ pub mod future { #[lang = "poll"] fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll; } + + pub trait IntoFuture { + type Output; + type IntoFuture: Future; + #[lang = "into_future"] + fn into_future(self) -> Self::IntoFuture; + } + + impl IntoFuture for F { + type Output = F::Output; + type IntoFuture = F; + fn into_future(self) -> F { + self + } + } } pub mod task { pub enum Poll { From 950de7c3c355398a9c15812b74376e252cb176db Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 9 Aug 2022 14:31:17 +0200 Subject: [PATCH 0105/1945] Use `--keep-going` cargo flag when building build scripts --- crates/project-model/src/build_scripts.rs | 15 +++++++++++++-- crates/project-model/src/tests.rs | 1 + crates/project-model/src/workspace.rs | 18 ++++++++++++++---- crates/rust-analyzer/src/reload.rs | 1 + 4 files changed, 29 insertions(+), 6 deletions(-) diff --git a/crates/project-model/src/build_scripts.rs b/crates/project-model/src/build_scripts.rs index ee7f8339a7..4b00479a4c 100644 --- a/crates/project-model/src/build_scripts.rs +++ b/crates/project-model/src/build_scripts.rs @@ -12,6 +12,7 @@ use cargo_metadata::{camino::Utf8Path, Message}; use la_arena::ArenaMap; use paths::AbsPathBuf; use rustc_hash::FxHashMap; +use semver::Version; use serde::Deserialize; use crate::{cfg_flag::CfgFlag, CargoConfig, CargoWorkspace, Package}; @@ -38,7 +39,7 @@ pub(crate) struct BuildScriptOutput { } impl WorkspaceBuildScripts { - fn build_command(config: &CargoConfig) -> Command { + fn build_command(config: &CargoConfig, toolchain: &Option) -> Command { if let Some([program, args @ ..]) = config.run_build_script_command.as_deref() { let mut cmd = Command::new(program); cmd.args(args); @@ -70,6 +71,15 @@ impl WorkspaceBuildScripts { } } + const RUST_1_62: Version = Version::new(1, 62, 0); + + match toolchain { + Some(v) if v >= &RUST_1_62 => { + cmd.args(&["-Z", "unstable-options", "--keep-going"]).env("RUSTC_BOOTSTRAP", "1"); + } + _ => (), + } + cmd } @@ -77,8 +87,9 @@ impl WorkspaceBuildScripts { config: &CargoConfig, workspace: &CargoWorkspace, progress: &dyn Fn(String), + toolchain: &Option, ) -> io::Result { - let mut cmd = Self::build_command(config); + let mut cmd = Self::build_command(config, toolchain); if config.wrap_rustc_in_build_scripts { // Setup RUSTC_WRAPPER to point to `rust-analyzer` binary itself. We use diff --git a/crates/project-model/src/tests.rs b/crates/project-model/src/tests.rs index e304a59c01..8d0fa757c2 100644 --- a/crates/project-model/src/tests.rs +++ b/crates/project-model/src/tests.rs @@ -28,6 +28,7 @@ fn load_cargo_with_overrides(file: &str, cfg_overrides: CfgOverrides) -> CrateGr rustc: None, rustc_cfg: Vec::new(), cfg_overrides, + toolchain: None, }; to_crate_graph(project_workspace) } diff --git a/crates/project-model/src/workspace.rs b/crates/project-model/src/workspace.rs index b144006b44..daabb299f7 100644 --- a/crates/project-model/src/workspace.rs +++ b/crates/project-model/src/workspace.rs @@ -12,6 +12,7 @@ use base_db::{ use cfg::{CfgDiff, CfgOptions}; use paths::{AbsPath, AbsPathBuf}; use rustc_hash::{FxHashMap, FxHashSet}; +use semver::Version; use stdx::always; use crate::{ @@ -77,6 +78,7 @@ pub enum ProjectWorkspace { /// different target. rustc_cfg: Vec, cfg_overrides: CfgOverrides, + toolchain: Option, }, /// Project workspace was manually specified using a `rust-project.json` file. Json { project: ProjectJson, sysroot: Option, rustc_cfg: Vec }, @@ -105,6 +107,7 @@ impl fmt::Debug for ProjectWorkspace { rustc, rustc_cfg, cfg_overrides, + toolchain, } => f .debug_struct("Cargo") .field("root", &cargo.workspace_root().file_name()) @@ -116,6 +119,7 @@ impl fmt::Debug for ProjectWorkspace { ) .field("n_rustc_cfg", &rustc_cfg.len()) .field("n_cfg_overrides", &cfg_overrides.len()) + .field("toolchain", &toolchain) .finish(), ProjectWorkspace::Json { project, sysroot, rustc_cfg } => { let mut debug_struct = f.debug_struct("Json"); @@ -160,6 +164,9 @@ impl ProjectWorkspace { cmd.arg("--version"); cmd })?; + let toolchain = cargo_version + .get("cargo ".len()..) + .and_then(|it| Version::parse(it.split_whitespace().next()?).ok()); let meta = CargoWorkspace::fetch_metadata( &cargo_toml, @@ -169,9 +176,9 @@ impl ProjectWorkspace { ) .with_context(|| { format!( - "Failed to read Cargo metadata from Cargo.toml file {}, {}", + "Failed to read Cargo metadata from Cargo.toml file {}, {:?}", cargo_toml.display(), - cargo_version + toolchain ) })?; let cargo = CargoWorkspace::new(meta); @@ -219,6 +226,7 @@ impl ProjectWorkspace { rustc, rustc_cfg, cfg_overrides, + toolchain, } } }; @@ -271,8 +279,8 @@ impl ProjectWorkspace { progress: &dyn Fn(String), ) -> Result { match self { - ProjectWorkspace::Cargo { cargo, .. } => { - WorkspaceBuildScripts::run(config, cargo, progress).with_context(|| { + ProjectWorkspace::Cargo { cargo, toolchain, .. } => { + WorkspaceBuildScripts::run(config, cargo, progress, toolchain).with_context(|| { format!("Failed to run build scripts for {}", &cargo.workspace_root().display()) }) } @@ -320,6 +328,7 @@ impl ProjectWorkspace { rustc_cfg: _, cfg_overrides: _, build_scripts, + toolchain: _, } => { cargo .packages() @@ -425,6 +434,7 @@ impl ProjectWorkspace { rustc_cfg, cfg_overrides, build_scripts, + toolchain: _, } => cargo_to_crate_graph( rustc_cfg.clone(), cfg_overrides, diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs index 49ccad71a1..ceb2a6d703 100644 --- a/crates/rust-analyzer/src/reload.rs +++ b/crates/rust-analyzer/src/reload.rs @@ -219,6 +219,7 @@ impl GlobalState { cfg_overrides, build_scripts: _, + toolchain: _, } => Some((cargo, sysroot, rustc, rustc_cfg, cfg_overrides)), _ => None, }; From 8c9359b072098403eb0b37a1b0822bb7b4f3b8ba Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 9 Aug 2022 17:53:16 +0200 Subject: [PATCH 0106/1945] Fix pattern field completions not working for unions --- crates/ide-completion/src/completions.rs | 1 - crates/ide-completion/src/completions/expr.rs | 116 ++++++++++-------- .../ide-completion/src/completions/record.rs | 47 ++++--- crates/ide-completion/src/context.rs | 1 + crates/ide-completion/src/tests/record.rs | 91 ++++++++------ 5 files changed, 140 insertions(+), 116 deletions(-) diff --git a/crates/ide-completion/src/completions.rs b/crates/ide-completion/src/completions.rs index 72579e6026..55c3e28392 100644 --- a/crates/ide-completion/src/completions.rs +++ b/crates/ide-completion/src/completions.rs @@ -617,7 +617,6 @@ pub(super) fn complete_name_ref( dot::complete_undotted_self(acc, ctx, path_ctx, expr_ctx); item_list::complete_item_list_in_expr(acc, ctx, path_ctx, expr_ctx); - record::complete_record_expr_func_update(acc, ctx, path_ctx, expr_ctx); snippet::complete_expr_snippet(acc, ctx, path_ctx, expr_ctx); } PathKind::Type { location } => { diff --git a/crates/ide-completion/src/completions/expr.rs b/crates/ide-completion/src/completions/expr.rs index 5d0ddaaf2a..e6c4bdf520 100644 --- a/crates/ide-completion/src/completions/expr.rs +++ b/crates/ide-completion/src/completions/expr.rs @@ -1,8 +1,10 @@ //! Completion of names from the current scope in expression position. use hir::ScopeDef; +use syntax::ast; use crate::{ + completions::record::add_default_update, context::{ExprCtx, PathCompletionCtx, Qualified}, CompletionContext, Completions, }; @@ -219,60 +221,76 @@ pub(crate) fn complete_expr_path( _ => (), }); - if is_func_update.is_none() { - let mut add_keyword = - |kw, snippet| acc.add_keyword_snippet_expr(ctx, incomplete_let, kw, snippet); + match is_func_update { + Some(record_expr) => { + let ty = ctx.sema.type_of_expr(&ast::Expr::RecordExpr(record_expr.clone())); - if !in_block_expr { - add_keyword("unsafe", "unsafe {\n $0\n}"); + match ty.as_ref().and_then(|t| t.original.as_adt()) { + Some(hir::Adt::Union(_)) => (), + _ => { + cov_mark::hit!(functional_update); + let missing_fields = + ctx.sema.record_literal_missing_fields(record_expr); + add_default_update(acc, ctx, ty, &missing_fields); + } + }; } - add_keyword("match", "match $1 {\n $0\n}"); - add_keyword("while", "while $1 {\n $0\n}"); - add_keyword("while let", "while let $1 = $2 {\n $0\n}"); - add_keyword("loop", "loop {\n $0\n}"); - if in_match_guard { - add_keyword("if", "if $0"); - } else { - add_keyword("if", "if $1 {\n $0\n}"); - } - add_keyword("if let", "if let $1 = $2 {\n $0\n}"); - add_keyword("for", "for $1 in $2 {\n $0\n}"); - add_keyword("true", "true"); - add_keyword("false", "false"); + None => { + let mut add_keyword = |kw, snippet| { + acc.add_keyword_snippet_expr(ctx, incomplete_let, kw, snippet) + }; - if in_condition || in_block_expr { - add_keyword("let", "let"); - } - - if after_if_expr { - add_keyword("else", "else {\n $0\n}"); - add_keyword("else if", "else if $1 {\n $0\n}"); - } - - if wants_mut_token { - add_keyword("mut", "mut "); - } - - if in_loop_body { - if in_block_expr { - add_keyword("continue", "continue;"); - add_keyword("break", "break;"); - } else { - add_keyword("continue", "continue"); - add_keyword("break", "break"); + if !in_block_expr { + add_keyword("unsafe", "unsafe {\n $0\n}"); } - } + add_keyword("match", "match $1 {\n $0\n}"); + add_keyword("while", "while $1 {\n $0\n}"); + add_keyword("while let", "while let $1 = $2 {\n $0\n}"); + add_keyword("loop", "loop {\n $0\n}"); + if in_match_guard { + add_keyword("if", "if $0"); + } else { + add_keyword("if", "if $1 {\n $0\n}"); + } + add_keyword("if let", "if let $1 = $2 {\n $0\n}"); + add_keyword("for", "for $1 in $2 {\n $0\n}"); + add_keyword("true", "true"); + add_keyword("false", "false"); - if let Some(ty) = innermost_ret_ty { - add_keyword( - "return", - match (in_block_expr, ty.is_unit()) { - (true, true) => "return ;", - (true, false) => "return;", - (false, true) => "return $0", - (false, false) => "return", - }, - ); + if in_condition || in_block_expr { + add_keyword("let", "let"); + } + + if after_if_expr { + add_keyword("else", "else {\n $0\n}"); + add_keyword("else if", "else if $1 {\n $0\n}"); + } + + if wants_mut_token { + add_keyword("mut", "mut "); + } + + if in_loop_body { + if in_block_expr { + add_keyword("continue", "continue;"); + add_keyword("break", "break;"); + } else { + add_keyword("continue", "continue"); + add_keyword("break", "break"); + } + } + + if let Some(ty) = innermost_ret_ty { + add_keyword( + "return", + match (in_block_expr, ty.is_unit()) { + (true, true) => "return ;", + (true, false) => "return;", + (false, true) => "return $0", + (false, false) => "return", + }, + ); + } } } } diff --git a/crates/ide-completion/src/completions/record.rs b/crates/ide-completion/src/completions/record.rs index 1c9042390d..3f85b45a13 100644 --- a/crates/ide-completion/src/completions/record.rs +++ b/crates/ide-completion/src/completions/record.rs @@ -3,7 +3,7 @@ use ide_db::SymbolKind; use syntax::ast::{self, Expr}; use crate::{ - context::{DotAccess, DotAccessKind, ExprCtx, PathCompletionCtx, PatternContext, Qualified}, + context::{DotAccess, DotAccessKind, PatternContext}, CompletionContext, CompletionItem, CompletionItemKind, CompletionRelevance, CompletionRelevancePostfixMatch, Completions, }; @@ -14,7 +14,24 @@ pub(crate) fn complete_record_pattern_fields( pattern_ctx: &PatternContext, ) { if let PatternContext { record_pat: Some(record_pat), .. } = pattern_ctx { - complete_fields(acc, ctx, ctx.sema.record_pattern_missing_fields(record_pat)); + let ty = ctx.sema.type_of_pat(&ast::Pat::RecordPat(record_pat.clone())); + let missing_fields = match ty.as_ref().and_then(|t| t.original.as_adt()) { + Some(hir::Adt::Union(un)) => { + // ctx.sema.record_pat_missing_fields will always return + // an empty Vec on a union literal. This is normally + // reasonable, but here we'd like to present the full list + // of fields if the literal is empty. + let were_fields_specified = + record_pat.record_pat_field_list().and_then(|fl| fl.fields().next()).is_some(); + + match were_fields_specified { + false => un.fields(ctx.db).into_iter().map(|f| (f, f.ty(ctx.db))).collect(), + true => return, + } + } + _ => ctx.sema.record_pattern_missing_fields(record_pat), + }; + complete_fields(acc, ctx, missing_fields); } } @@ -44,6 +61,7 @@ pub(crate) fn complete_record_expr_fields( let missing_fields = ctx.sema.record_literal_missing_fields(record_expr); add_default_update(acc, ctx, ty, &missing_fields); if dot_prefix { + cov_mark::hit!(functional_update_one_dot); let mut item = CompletionItem::new(CompletionItemKind::Snippet, ctx.source_range(), ".."); item.insert_text("."); @@ -56,30 +74,7 @@ pub(crate) fn complete_record_expr_fields( complete_fields(acc, ctx, missing_fields); } -// FIXME: This should probably be part of complete_path_expr -pub(crate) fn complete_record_expr_func_update( - acc: &mut Completions, - ctx: &CompletionContext<'_>, - path_ctx: &PathCompletionCtx, - expr_ctx: &ExprCtx, -) { - if !matches!(path_ctx.qualified, Qualified::No) { - return; - } - if let ExprCtx { is_func_update: Some(record_expr), .. } = expr_ctx { - let ty = ctx.sema.type_of_expr(&Expr::RecordExpr(record_expr.clone())); - - match ty.as_ref().and_then(|t| t.original.as_adt()) { - Some(hir::Adt::Union(_)) => (), - _ => { - let missing_fields = ctx.sema.record_literal_missing_fields(record_expr); - add_default_update(acc, ctx, ty, &missing_fields); - } - }; - } -} - -fn add_default_update( +pub(crate) fn add_default_update( acc: &mut Completions, ctx: &CompletionContext<'_>, ty: Option, diff --git a/crates/ide-completion/src/context.rs b/crates/ide-completion/src/context.rs index e35f79d2b6..759742d347 100644 --- a/crates/ide-completion/src/context.rs +++ b/crates/ide-completion/src/context.rs @@ -134,6 +134,7 @@ pub(crate) struct ExprCtx { pub(crate) in_condition: bool, pub(crate) incomplete_let: bool, pub(crate) ref_expr_parent: Option, + /// The surrounding RecordExpression we are completing a functional update pub(crate) is_func_update: Option, pub(crate) self_param: Option, pub(crate) innermost_ret_ty: Option, diff --git a/crates/ide-completion/src/tests/record.rs b/crates/ide-completion/src/tests/record.rs index f6accc68e5..e9ee516117 100644 --- a/crates/ide-completion/src/tests/record.rs +++ b/crates/ide-completion/src/tests/record.rs @@ -103,47 +103,9 @@ fn foo(f: Struct) { } #[test] -fn functional_update() { - // FIXME: This should filter out all completions that do not have the type `Foo` - check( - r#" -//- minicore:default -struct Foo { foo1: u32, foo2: u32 } -impl Default for Foo { - fn default() -> Self { loop {} } -} +fn in_functional_update() { + cov_mark::check!(functional_update); -fn main() { - let thing = 1; - let foo = Foo { foo1: 0, foo2: 0 }; - let foo2 = Foo { thing, $0 } -} -"#, - expect![[r#" - fd ..Default::default() - fd foo1 u32 - fd foo2 u32 - "#]], - ); - check( - r#" -//- minicore:default -struct Foo { foo1: u32, foo2: u32 } -impl Default for Foo { - fn default() -> Self { loop {} } -} - -fn main() { - let thing = 1; - let foo = Foo { foo1: 0, foo2: 0 }; - let foo2 = Foo { thing, .$0 } -} -"#, - expect![[r#" - fd ..Default::default() - sn .. - "#]], - ); check( r#" //- minicore:default @@ -192,6 +154,55 @@ fn main() { ); } +#[test] +fn functional_update_no_dot() { + // FIXME: This should filter out all completions that do not have the type `Foo` + check( + r#" +//- minicore:default +struct Foo { foo1: u32, foo2: u32 } +impl Default for Foo { + fn default() -> Self { loop {} } +} + +fn main() { + let thing = 1; + let foo = Foo { foo1: 0, foo2: 0 }; + let foo2 = Foo { thing, $0 } +} +"#, + expect![[r#" + fd ..Default::default() + fd foo1 u32 + fd foo2 u32 + "#]], + ); +} + +#[test] +fn functional_update_one_dot() { + cov_mark::check!(functional_update_one_dot); + check( + r#" +//- minicore:default +struct Foo { foo1: u32, foo2: u32 } +impl Default for Foo { + fn default() -> Self { loop {} } +} + +fn main() { + let thing = 1; + let foo = Foo { foo1: 0, foo2: 0 }; + let foo2 = Foo { thing, .$0 } +} +"#, + expect![[r#" + fd ..Default::default() + sn .. + "#]], + ); +} + #[test] fn empty_union_literal() { check( From b3ac58dfb80e6dad9fc777f23247fd379424b65e Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 9 Aug 2022 18:08:05 +0200 Subject: [PATCH 0107/1945] Add some more `cov_mark`s --- crates/ide-completion/src/completions/expr.rs | 4 +++- crates/ide-completion/src/completions/record.rs | 12 ++++++++---- crates/ide-completion/src/tests/record.rs | 1 + 3 files changed, 12 insertions(+), 5 deletions(-) diff --git a/crates/ide-completion/src/completions/expr.rs b/crates/ide-completion/src/completions/expr.rs index e6c4bdf520..4d66af9e8d 100644 --- a/crates/ide-completion/src/completions/expr.rs +++ b/crates/ide-completion/src/completions/expr.rs @@ -231,7 +231,9 @@ pub(crate) fn complete_expr_path( cov_mark::hit!(functional_update); let missing_fields = ctx.sema.record_literal_missing_fields(record_expr); - add_default_update(acc, ctx, ty, &missing_fields); + if !missing_fields.is_empty() { + add_default_update(acc, ctx, ty); + } } }; } diff --git a/crates/ide-completion/src/completions/record.rs b/crates/ide-completion/src/completions/record.rs index 3f85b45a13..bfb98b9f27 100644 --- a/crates/ide-completion/src/completions/record.rs +++ b/crates/ide-completion/src/completions/record.rs @@ -59,7 +59,11 @@ pub(crate) fn complete_record_expr_fields( } _ => { let missing_fields = ctx.sema.record_literal_missing_fields(record_expr); - add_default_update(acc, ctx, ty, &missing_fields); + + if !missing_fields.is_empty() { + cov_mark::hit!(functional_update_field); + add_default_update(acc, ctx, ty); + } if dot_prefix { cov_mark::hit!(functional_update_one_dot); let mut item = @@ -78,14 +82,14 @@ pub(crate) fn add_default_update( acc: &mut Completions, ctx: &CompletionContext<'_>, ty: Option, - missing_fields: &[(hir::Field, hir::Type)], ) { let default_trait = ctx.famous_defs().core_default_Default(); - let impl_default_trait = default_trait + let impls_default_trait = default_trait .zip(ty.as_ref()) .map_or(false, |(default_trait, ty)| ty.original.impls_trait(ctx.db, default_trait, &[])); - if impl_default_trait && !missing_fields.is_empty() { + if impls_default_trait { // FIXME: This should make use of scope_def like completions so we get all the other goodies + // that is we should handle this like actually completing the default function let completion_text = "..Default::default()"; let mut item = CompletionItem::new(SymbolKind::Field, ctx.source_range(), completion_text); let completion_text = diff --git a/crates/ide-completion/src/tests/record.rs b/crates/ide-completion/src/tests/record.rs index e9ee516117..328faaa060 100644 --- a/crates/ide-completion/src/tests/record.rs +++ b/crates/ide-completion/src/tests/record.rs @@ -156,6 +156,7 @@ fn main() { #[test] fn functional_update_no_dot() { + cov_mark::check!(functional_update_field); // FIXME: This should filter out all completions that do not have the type `Foo` check( r#" From 49d24f639f60325000463f6bb20d832c0ac72da6 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 9 Aug 2022 18:23:25 +0200 Subject: [PATCH 0108/1945] Recover from missing ellipsis in record literals for path expressions --- crates/parser/src/grammar/expressions.rs | 26 +++++++--- ...ord_literal_missing_ellipsis_recovery.rast | 43 ++++++++++++++++ ...ecord_literal_missing_ellipsis_recovery.rs | 3 ++ .../parser/inline/ok/0061_record_lit.rast | 49 +++++++++++++++++++ .../parser/inline/ok/0061_record_lit.rs | 2 + 5 files changed, 116 insertions(+), 7 deletions(-) create mode 100644 crates/parser/test_data/parser/inline/err/0014_record_literal_missing_ellipsis_recovery.rast create mode 100644 crates/parser/test_data/parser/inline/err/0014_record_literal_missing_ellipsis_recovery.rs diff --git a/crates/parser/src/grammar/expressions.rs b/crates/parser/src/grammar/expressions.rs index e7402104eb..dcaceade65 100644 --- a/crates/parser/src/grammar/expressions.rs +++ b/crates/parser/src/grammar/expressions.rs @@ -564,8 +564,10 @@ fn path_expr(p: &mut Parser<'_>, r: Restrictions) -> (CompletedMarker, BlockLike // test record_lit // fn foo() { // S {}; +// S { x }; // S { x, y: 32, }; // S { x, y: 32, ..Default::default() }; +// S { x: ::default() }; // TupleStruct { 0: 1 }; // } pub(crate) fn record_expr_field_list(p: &mut Parser<'_>) { @@ -582,16 +584,26 @@ pub(crate) fn record_expr_field_list(p: &mut Parser<'_>) { match p.current() { IDENT | INT_NUMBER => { - // test_err record_literal_before_ellipsis_recovery + // test_err record_literal_missing_ellipsis_recovery // fn main() { - // S { field ..S::default() } + // S { S::default() } // } - if p.nth_at(1, T![:]) || p.nth_at(1, T![..]) { - name_ref_or_index(p); - p.expect(T![:]); + if p.nth_at(1, T![::]) { + m.abandon(p); + p.expect(T![..]); + expr(p); + } else { + // test_err record_literal_before_ellipsis_recovery + // fn main() { + // S { field ..S::default() } + // } + if p.nth_at(1, T![:]) || p.nth_at(1, T![..]) { + name_ref_or_index(p); + p.expect(T![:]); + } + expr(p); + m.complete(p, RECORD_EXPR_FIELD); } - expr(p); - m.complete(p, RECORD_EXPR_FIELD); } T![.] if p.at(T![..]) => { m.abandon(p); diff --git a/crates/parser/test_data/parser/inline/err/0014_record_literal_missing_ellipsis_recovery.rast b/crates/parser/test_data/parser/inline/err/0014_record_literal_missing_ellipsis_recovery.rast new file mode 100644 index 0000000000..0c5b618e6f --- /dev/null +++ b/crates/parser/test_data/parser/inline/err/0014_record_literal_missing_ellipsis_recovery.rast @@ -0,0 +1,43 @@ +SOURCE_FILE + FN + FN_KW "fn" + WHITESPACE " " + NAME + IDENT "main" + PARAM_LIST + L_PAREN "(" + R_PAREN ")" + WHITESPACE " " + BLOCK_EXPR + STMT_LIST + L_CURLY "{" + WHITESPACE "\n " + RECORD_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "S" + WHITESPACE " " + RECORD_EXPR_FIELD_LIST + L_CURLY "{" + WHITESPACE " " + CALL_EXPR + PATH_EXPR + PATH + PATH + PATH_SEGMENT + NAME_REF + IDENT "S" + COLON2 "::" + PATH_SEGMENT + NAME_REF + IDENT "default" + ARG_LIST + L_PAREN "(" + R_PAREN ")" + WHITESPACE " " + R_CURLY "}" + WHITESPACE "\n" + R_CURLY "}" + WHITESPACE "\n" +error 19: expected DOT2 diff --git a/crates/parser/test_data/parser/inline/err/0014_record_literal_missing_ellipsis_recovery.rs b/crates/parser/test_data/parser/inline/err/0014_record_literal_missing_ellipsis_recovery.rs new file mode 100644 index 0000000000..1b594e8ab9 --- /dev/null +++ b/crates/parser/test_data/parser/inline/err/0014_record_literal_missing_ellipsis_recovery.rs @@ -0,0 +1,3 @@ +fn main() { + S { S::default() } +} diff --git a/crates/parser/test_data/parser/inline/ok/0061_record_lit.rast b/crates/parser/test_data/parser/inline/ok/0061_record_lit.rast index 9997d0ae34..00948c322f 100644 --- a/crates/parser/test_data/parser/inline/ok/0061_record_lit.rast +++ b/crates/parser/test_data/parser/inline/ok/0061_record_lit.rast @@ -24,6 +24,26 @@ SOURCE_FILE R_CURLY "}" SEMICOLON ";" WHITESPACE "\n " + EXPR_STMT + RECORD_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "S" + WHITESPACE " " + RECORD_EXPR_FIELD_LIST + L_CURLY "{" + WHITESPACE " " + RECORD_EXPR_FIELD + PATH_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "x" + WHITESPACE " " + R_CURLY "}" + SEMICOLON ";" + WHITESPACE "\n " EXPR_STMT RECORD_EXPR PATH @@ -100,6 +120,35 @@ SOURCE_FILE R_CURLY "}" SEMICOLON ";" WHITESPACE "\n " + EXPR_STMT + RECORD_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "S" + WHITESPACE " " + RECORD_EXPR_FIELD_LIST + L_CURLY "{" + WHITESPACE " " + RECORD_EXPR_FIELD + NAME_REF + IDENT "x" + COLON ":" + WHITESPACE " " + CALL_EXPR + PATH_EXPR + PATH + PATH_SEGMENT + COLON2 "::" + NAME_REF + IDENT "default" + ARG_LIST + L_PAREN "(" + R_PAREN ")" + WHITESPACE " " + R_CURLY "}" + SEMICOLON ";" + WHITESPACE "\n " EXPR_STMT RECORD_EXPR PATH diff --git a/crates/parser/test_data/parser/inline/ok/0061_record_lit.rs b/crates/parser/test_data/parser/inline/ok/0061_record_lit.rs index 6285e55497..86411fbb7d 100644 --- a/crates/parser/test_data/parser/inline/ok/0061_record_lit.rs +++ b/crates/parser/test_data/parser/inline/ok/0061_record_lit.rs @@ -1,6 +1,8 @@ fn foo() { S {}; + S { x }; S { x, y: 32, }; S { x, y: 32, ..Default::default() }; + S { x: ::default() }; TupleStruct { 0: 1 }; } From dc3219bb11f032e2e6f1d16deab2b5eabe9463d7 Mon Sep 17 00:00:00 2001 From: Justin Ridgewell Date: Tue, 9 Aug 2022 16:25:25 -0400 Subject: [PATCH 0109/1945] Suggest `.await` when type impls IntoFuture --- crates/hir/src/lib.rs | 23 +++++++++++----- .../ide-completion/src/completions/keyword.rs | 26 +++++++++++++++++++ 2 files changed, 42 insertions(+), 7 deletions(-) diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 800ea58ba2..7f16634afe 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -2780,19 +2780,28 @@ impl Type { /// Checks that particular type `ty` implements `std::future::Future`. /// This function is used in `.await` syntax completion. pub fn impls_future(&self, db: &dyn HirDatabase) -> bool { - // FIXME: This should be checking for IntoFuture trait, but I don't know how to find the - // right TraitId in this crate. - let std_future_trait = db - .lang_item(self.env.krate, SmolStr::new_inline("future_trait")) - .and_then(|it| it.as_trait()); - let std_future_trait = match std_future_trait { + let trait_ = db + .lang_item(self.env.krate, SmolStr::new_inline("into_future")) + .and_then(|it| { + let into_future_fn = it.as_function()?; + let assoc_item = as_assoc_item(db, AssocItem::Function, into_future_fn)?; + let into_future_trait = assoc_item.containing_trait_or_trait_impl(db)?; + Some(into_future_trait.id) + }) + .or_else(|| { + let future_trait = + db.lang_item(self.env.krate, SmolStr::new_inline("future_trait"))?; + future_trait.as_trait() + }); + + let trait_ = match trait_ { Some(it) => it, None => return false, }; let canonical_ty = Canonical { value: self.ty.clone(), binders: CanonicalVarKinds::empty(Interner) }; - method_resolution::implements_trait(&canonical_ty, db, self.env.clone(), std_future_trait) + method_resolution::implements_trait(&canonical_ty, db, self.env.clone(), trait_) } /// Checks that particular type `ty` implements `std::ops::FnOnce`. diff --git a/crates/ide-completion/src/completions/keyword.rs b/crates/ide-completion/src/completions/keyword.rs index 032b23725f..1d03c8cc5c 100644 --- a/crates/ide-completion/src/completions/keyword.rs +++ b/crates/ide-completion/src/completions/keyword.rs @@ -114,6 +114,32 @@ fn foo() { ); } + #[test] + fn test_completion_await_impls_into_future() { + check( + r#" +//- minicore: future +use core::future::*; +struct A {} +impl IntoFuture for A {} +fn foo(a: A) { a.$0 } +"#, + expect![[r#" + kw await expr.await + me into_future() (as IntoFuture) fn(self) -> ::IntoFuture + sn box Box::new(expr) + sn call function(expr) + sn dbg dbg!(expr) + sn dbgr dbg!(&expr) + sn let let + sn letm let mut + sn match match expr {} + sn ref &expr + sn refm &mut expr + "#]], + ); + } + #[test] fn let_semi() { cov_mark::check!(let_semi); From f4f70c0e0da9783eb0b935e0c8698a9505c7b45e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Wed, 10 Aug 2022 10:49:49 +0300 Subject: [PATCH 0110/1945] Remove incomplete 1.64 ABI --- .../proc-macro-srv/src/abis/abi_1_64/mod.rs | 105 -- .../abis/abi_1_64/proc_macro/bridge/buffer.rs | 156 --- .../abis/abi_1_64/proc_macro/bridge/client.rs | 529 -------- .../abi_1_64/proc_macro/bridge/closure.rs | 32 - .../abis/abi_1_64/proc_macro/bridge/handle.rs | 89 -- .../abis/abi_1_64/proc_macro/bridge/mod.rs | 493 -------- .../abis/abi_1_64/proc_macro/bridge/rpc.rs | 304 ----- .../abi_1_64/proc_macro/bridge/scoped_cell.rs | 81 -- .../proc_macro/bridge/selfless_reify.rs | 84 -- .../abis/abi_1_64/proc_macro/bridge/server.rs | 339 ----- .../abis/abi_1_64/proc_macro/diagnostic.rs | 166 --- .../src/abis/abi_1_64/proc_macro/mod.rs | 1125 ----------------- .../src/abis/abi_1_64/proc_macro/quote.rs | 139 -- .../src/abis/abi_1_64/ra_server.rs | 791 ------------ crates/proc-macro-srv/src/abis/mod.rs | 11 +- 15 files changed, 1 insertion(+), 4443 deletions(-) delete mode 100644 crates/proc-macro-srv/src/abis/abi_1_64/mod.rs delete mode 100644 crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/buffer.rs delete mode 100644 crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/client.rs delete mode 100644 crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/closure.rs delete mode 100644 crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/handle.rs delete mode 100644 crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/mod.rs delete mode 100644 crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/rpc.rs delete mode 100644 crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/scoped_cell.rs delete mode 100644 crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/selfless_reify.rs delete mode 100644 crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/server.rs delete mode 100644 crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/diagnostic.rs delete mode 100644 crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/mod.rs delete mode 100644 crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/quote.rs delete mode 100644 crates/proc-macro-srv/src/abis/abi_1_64/ra_server.rs diff --git a/crates/proc-macro-srv/src/abis/abi_1_64/mod.rs b/crates/proc-macro-srv/src/abis/abi_1_64/mod.rs deleted file mode 100644 index 9d56f0eaf8..0000000000 --- a/crates/proc-macro-srv/src/abis/abi_1_64/mod.rs +++ /dev/null @@ -1,105 +0,0 @@ -//! Proc macro ABI. - -#[allow(dead_code)] -#[doc(hidden)] -mod proc_macro; - -#[allow(dead_code)] -#[doc(hidden)] -mod ra_server; - -use libloading::Library; -use proc_macro_api::ProcMacroKind; - -use super::PanicMessage; - -pub use ra_server::TokenStream; - -pub(crate) struct Abi { - exported_macros: Vec, -} - -impl From for PanicMessage { - fn from(p: proc_macro::bridge::PanicMessage) -> Self { - Self { message: p.as_str().map(|s| s.to_string()) } - } -} - -impl Abi { - pub unsafe fn from_lib(lib: &Library, symbol_name: String) -> Result { - let macros: libloading::Symbol<'_, &&[proc_macro::bridge::client::ProcMacro]> = - lib.get(symbol_name.as_bytes())?; - Ok(Self { exported_macros: macros.to_vec() }) - } - - pub fn expand( - &self, - macro_name: &str, - macro_body: &tt::Subtree, - attributes: Option<&tt::Subtree>, - ) -> Result { - let parsed_body = TokenStream::with_subtree(macro_body.clone()); - - let parsed_attributes = - attributes.map_or(TokenStream::new(), |attr| TokenStream::with_subtree(attr.clone())); - - for proc_macro in &self.exported_macros { - match proc_macro { - proc_macro::bridge::client::ProcMacro::CustomDerive { - trait_name, client, .. - } if *trait_name == macro_name => { - let res = client.run( - &proc_macro::bridge::server::SameThread, - ra_server::RustAnalyzer::default(), - parsed_body, - true, - ); - return res.map(|it| it.into_subtree()).map_err(PanicMessage::from); - } - proc_macro::bridge::client::ProcMacro::Bang { name, client } - if *name == macro_name => - { - let res = client.run( - &proc_macro::bridge::server::SameThread, - ra_server::RustAnalyzer::default(), - parsed_body, - true, - ); - return res.map(|it| it.into_subtree()).map_err(PanicMessage::from); - } - proc_macro::bridge::client::ProcMacro::Attr { name, client } - if *name == macro_name => - { - let res = client.run( - &proc_macro::bridge::server::SameThread, - ra_server::RustAnalyzer::default(), - parsed_attributes, - parsed_body, - true, - ); - return res.map(|it| it.into_subtree()).map_err(PanicMessage::from); - } - _ => continue, - } - } - - Err(proc_macro::bridge::PanicMessage::String("Nothing to expand".to_string()).into()) - } - - pub fn list_macros(&self) -> Vec<(String, ProcMacroKind)> { - self.exported_macros - .iter() - .map(|proc_macro| match proc_macro { - proc_macro::bridge::client::ProcMacro::CustomDerive { trait_name, .. } => { - (trait_name.to_string(), ProcMacroKind::CustomDerive) - } - proc_macro::bridge::client::ProcMacro::Bang { name, .. } => { - (name.to_string(), ProcMacroKind::FuncLike) - } - proc_macro::bridge::client::ProcMacro::Attr { name, .. } => { - (name.to_string(), ProcMacroKind::Attr) - } - }) - .collect() - } -} diff --git a/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/buffer.rs b/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/buffer.rs deleted file mode 100644 index 48030f8d82..0000000000 --- a/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/buffer.rs +++ /dev/null @@ -1,156 +0,0 @@ -//! Buffer management for same-process client<->server communication. - -use std::io::{self, Write}; -use std::mem; -use std::ops::{Deref, DerefMut}; -use std::slice; - -#[repr(C)] -pub struct Buffer { - data: *mut u8, - len: usize, - capacity: usize, - reserve: extern "C" fn(Buffer, usize) -> Buffer, - drop: extern "C" fn(Buffer), -} - -unsafe impl Sync for Buffer {} -unsafe impl Send for Buffer {} - -impl Default for Buffer { - #[inline] - fn default() -> Self { - Self::from(vec![]) - } -} - -impl Deref for Buffer { - type Target = [u8]; - #[inline] - fn deref(&self) -> &[u8] { - unsafe { slice::from_raw_parts(self.data as *const u8, self.len) } - } -} - -impl DerefMut for Buffer { - #[inline] - fn deref_mut(&mut self) -> &mut [u8] { - unsafe { slice::from_raw_parts_mut(self.data, self.len) } - } -} - -impl Buffer { - #[inline] - pub(super) fn new() -> Self { - Self::default() - } - - #[inline] - pub(super) fn clear(&mut self) { - self.len = 0; - } - - #[inline] - pub(super) fn take(&mut self) -> Self { - mem::take(self) - } - - // We have the array method separate from extending from a slice. This is - // because in the case of small arrays, codegen can be more efficient - // (avoiding a memmove call). With extend_from_slice, LLVM at least - // currently is not able to make that optimization. - #[inline] - pub(super) fn extend_from_array(&mut self, xs: &[u8; N]) { - if xs.len() > (self.capacity - self.len) { - let b = self.take(); - *self = (b.reserve)(b, xs.len()); - } - unsafe { - xs.as_ptr().copy_to_nonoverlapping(self.data.add(self.len), xs.len()); - self.len += xs.len(); - } - } - - #[inline] - pub(super) fn extend_from_slice(&mut self, xs: &[u8]) { - if xs.len() > (self.capacity - self.len) { - let b = self.take(); - *self = (b.reserve)(b, xs.len()); - } - unsafe { - xs.as_ptr().copy_to_nonoverlapping(self.data.add(self.len), xs.len()); - self.len += xs.len(); - } - } - - #[inline] - pub(super) fn push(&mut self, v: u8) { - // The code here is taken from Vec::push, and we know that reserve() - // will panic if we're exceeding isize::MAX bytes and so there's no need - // to check for overflow. - if self.len == self.capacity { - let b = self.take(); - *self = (b.reserve)(b, 1); - } - unsafe { - *self.data.add(self.len) = v; - self.len += 1; - } - } -} - -impl Write for Buffer { - #[inline] - fn write(&mut self, xs: &[u8]) -> io::Result { - self.extend_from_slice(xs); - Ok(xs.len()) - } - - #[inline] - fn write_all(&mut self, xs: &[u8]) -> io::Result<()> { - self.extend_from_slice(xs); - Ok(()) - } - - #[inline] - fn flush(&mut self) -> io::Result<()> { - Ok(()) - } -} - -impl Drop for Buffer { - #[inline] - fn drop(&mut self) { - let b = self.take(); - (b.drop)(b); - } -} - -impl From> for Buffer { - fn from(mut v: Vec) -> Self { - let (data, len, capacity) = (v.as_mut_ptr(), v.len(), v.capacity()); - mem::forget(v); - - // This utility function is nested in here because it can *only* - // be safely called on `Buffer`s created by *this* `proc_macro`. - fn to_vec(b: Buffer) -> Vec { - unsafe { - let Buffer { data, len, capacity, .. } = b; - mem::forget(b); - Vec::from_raw_parts(data, len, capacity) - } - } - - extern "C" fn reserve(b: Buffer, additional: usize) -> Buffer { - let mut v = to_vec(b); - v.reserve(additional); - Buffer::from(v) - } - - extern "C" fn drop(b: Buffer) { - mem::drop(to_vec(b)); - } - - Buffer { data, len, capacity, reserve, drop } - } -} diff --git a/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/client.rs b/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/client.rs deleted file mode 100644 index 22bda8ba5a..0000000000 --- a/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/client.rs +++ /dev/null @@ -1,529 +0,0 @@ -//! Client-side types. - -use super::*; - -use std::marker::PhantomData; - -macro_rules! define_handles { - ( - 'owned: $($oty:ident,)* - 'interned: $($ity:ident,)* - ) => { - #[repr(C)] - #[allow(non_snake_case)] - pub struct HandleCounters { - $($oty: AtomicUsize,)* - $($ity: AtomicUsize,)* - } - - impl HandleCounters { - // FIXME(eddyb) use a reference to the `static COUNTERS`, instead of - // a wrapper `fn` pointer, once `const fn` can reference `static`s. - extern "C" fn get() -> &'static Self { - static COUNTERS: HandleCounters = HandleCounters { - $($oty: AtomicUsize::new(1),)* - $($ity: AtomicUsize::new(1),)* - }; - &COUNTERS - } - } - - // FIXME(eddyb) generate the definition of `HandleStore` in `server.rs`. - #[repr(C)] - #[allow(non_snake_case)] - pub(super) struct HandleStore { - $($oty: handle::OwnedStore,)* - $($ity: handle::InternedStore,)* - } - - impl HandleStore { - pub(super) fn new(handle_counters: &'static HandleCounters) -> Self { - HandleStore { - $($oty: handle::OwnedStore::new(&handle_counters.$oty),)* - $($ity: handle::InternedStore::new(&handle_counters.$ity),)* - } - } - } - - $( - #[repr(C)] - pub(crate) struct $oty { - handle: handle::Handle, - // Prevent Send and Sync impls. `!Send`/`!Sync` is the usual - // way of doing this, but that requires unstable features. - // rust-analyzer uses this code and avoids unstable features. - _marker: PhantomData<*mut ()>, - } - - // Forward `Drop::drop` to the inherent `drop` method. - impl Drop for $oty { - fn drop(&mut self) { - $oty { - handle: self.handle, - _marker: PhantomData, - }.drop(); - } - } - - impl Encode for $oty { - fn encode(self, w: &mut Writer, s: &mut S) { - let handle = self.handle; - mem::forget(self); - handle.encode(w, s); - } - } - - impl DecodeMut<'_, '_, HandleStore>> - for Marked - { - fn decode(r: &mut Reader<'_>, s: &mut HandleStore>) -> Self { - s.$oty.take(handle::Handle::decode(r, &mut ())) - } - } - - impl Encode for &$oty { - fn encode(self, w: &mut Writer, s: &mut S) { - self.handle.encode(w, s); - } - } - - impl<'s, S: server::Types> Decode<'_, 's, HandleStore>> - for &'s Marked - { - fn decode(r: &mut Reader<'_>, s: &'s HandleStore>) -> Self { - &s.$oty[handle::Handle::decode(r, &mut ())] - } - } - - impl Encode for &mut $oty { - fn encode(self, w: &mut Writer, s: &mut S) { - self.handle.encode(w, s); - } - } - - impl<'s, S: server::Types> DecodeMut<'_, 's, HandleStore>> - for &'s mut Marked - { - fn decode( - r: &mut Reader<'_>, - s: &'s mut HandleStore> - ) -> Self { - &mut s.$oty[handle::Handle::decode(r, &mut ())] - } - } - - impl Encode>> - for Marked - { - fn encode(self, w: &mut Writer, s: &mut HandleStore>) { - s.$oty.alloc(self).encode(w, s); - } - } - - impl DecodeMut<'_, '_, S> for $oty { - fn decode(r: &mut Reader<'_>, s: &mut S) -> Self { - $oty { - handle: handle::Handle::decode(r, s), - _marker: PhantomData, - } - } - } - )* - - $( - #[repr(C)] - #[derive(Copy, Clone, PartialEq, Eq, Hash)] - pub(crate) struct $ity { - handle: handle::Handle, - // Prevent Send and Sync impls. `!Send`/`!Sync` is the usual - // way of doing this, but that requires unstable features. - // rust-analyzer uses this code and avoids unstable features. - _marker: PhantomData<*mut ()>, - } - - impl Encode for $ity { - fn encode(self, w: &mut Writer, s: &mut S) { - self.handle.encode(w, s); - } - } - - impl DecodeMut<'_, '_, HandleStore>> - for Marked - { - fn decode(r: &mut Reader<'_>, s: &mut HandleStore>) -> Self { - s.$ity.copy(handle::Handle::decode(r, &mut ())) - } - } - - impl Encode>> - for Marked - { - fn encode(self, w: &mut Writer, s: &mut HandleStore>) { - s.$ity.alloc(self).encode(w, s); - } - } - - impl DecodeMut<'_, '_, S> for $ity { - fn decode(r: &mut Reader<'_>, s: &mut S) -> Self { - $ity { - handle: handle::Handle::decode(r, s), - _marker: PhantomData, - } - } - } - )* - } -} -define_handles! { - 'owned: - FreeFunctions, - TokenStream, - Literal, - SourceFile, - MultiSpan, - Diagnostic, - - 'interned: - Ident, - Span, -} - -// FIXME(eddyb) generate these impls by pattern-matching on the -// names of methods - also could use the presence of `fn drop` -// to distinguish between 'owned and 'interned, above. -// Alternatively, special "modes" could be listed of types in with_api -// instead of pattern matching on methods, here and in server decl. - -impl Clone for TokenStream { - fn clone(&self) -> Self { - self.clone() - } -} - -impl Clone for Literal { - fn clone(&self) -> Self { - self.clone() - } -} - -impl fmt::Debug for Literal { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("Literal") - // format the kind without quotes, as in `kind: Float` - .field("kind", &format_args!("{}", &self.debug_kind())) - .field("symbol", &self.symbol()) - // format `Some("...")` on one line even in {:#?} mode - .field("suffix", &format_args!("{:?}", &self.suffix())) - .field("span", &self.span()) - .finish() - } -} - -impl Clone for SourceFile { - fn clone(&self) -> Self { - self.clone() - } -} - -impl Span { - pub(crate) fn def_site() -> Span { - Bridge::with(|bridge| bridge.globals.def_site) - } - - pub(crate) fn call_site() -> Span { - Bridge::with(|bridge| bridge.globals.call_site) - } - - pub(crate) fn mixed_site() -> Span { - Bridge::with(|bridge| bridge.globals.mixed_site) - } -} - -impl fmt::Debug for Span { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str(&self.debug()) - } -} - -macro_rules! define_client_side { - ($($name:ident { - $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)*;)* - }),* $(,)?) => { - $(impl $name { - $(pub(crate) fn $method($($arg: $arg_ty),*) $(-> $ret_ty)* { - Bridge::with(|bridge| { - let mut buf = bridge.cached_buffer.take(); - - buf.clear(); - api_tags::Method::$name(api_tags::$name::$method).encode(&mut buf, &mut ()); - reverse_encode!(buf; $($arg),*); - - buf = bridge.dispatch.call(buf); - - let r = Result::<_, PanicMessage>::decode(&mut &buf[..], &mut ()); - - bridge.cached_buffer = buf; - - r.unwrap_or_else(|e| panic::resume_unwind(e.into())) - }) - })* - })* - } -} -with_api!(self, self, define_client_side); - -struct Bridge<'a> { - /// Reusable buffer (only `clear`-ed, never shrunk), primarily - /// used for making requests. - cached_buffer: Buffer, - - /// Server-side function that the client uses to make requests. - dispatch: closure::Closure<'a, Buffer, Buffer>, - - /// Provided globals for this macro expansion. - globals: ExpnGlobals, -} - -enum BridgeState<'a> { - /// No server is currently connected to this client. - NotConnected, - - /// A server is connected and available for requests. - Connected(Bridge<'a>), - - /// Access to the bridge is being exclusively acquired - /// (e.g., during `BridgeState::with`). - InUse, -} - -enum BridgeStateL {} - -impl<'a> scoped_cell::ApplyL<'a> for BridgeStateL { - type Out = BridgeState<'a>; -} - -thread_local! { - static BRIDGE_STATE: scoped_cell::ScopedCell = - scoped_cell::ScopedCell::new(BridgeState::NotConnected); -} - -impl BridgeState<'_> { - /// Take exclusive control of the thread-local - /// `BridgeState`, and pass it to `f`, mutably. - /// The state will be restored after `f` exits, even - /// by panic, including modifications made to it by `f`. - /// - /// N.B., while `f` is running, the thread-local state - /// is `BridgeState::InUse`. - fn with(f: impl FnOnce(&mut BridgeState<'_>) -> R) -> R { - BRIDGE_STATE.with(|state| { - state.replace(BridgeState::InUse, |mut state| { - // FIXME(#52812) pass `f` directly to `replace` when `RefMutL` is gone - f(&mut *state) - }) - }) - } -} - -impl Bridge<'_> { - fn with(f: impl FnOnce(&mut Bridge<'_>) -> R) -> R { - BridgeState::with(|state| match state { - BridgeState::NotConnected => { - panic!("procedural macro API is used outside of a procedural macro"); - } - BridgeState::InUse => { - panic!("procedural macro API is used while it's already in use"); - } - BridgeState::Connected(bridge) => f(bridge), - }) - } -} - -pub(crate) fn is_available() -> bool { - BridgeState::with(|state| match state { - BridgeState::Connected(_) | BridgeState::InUse => true, - BridgeState::NotConnected => false, - }) -} - -/// A client-side RPC entry-point, which may be using a different `proc_macro` -/// from the one used by the server, but can be invoked compatibly. -/// -/// Note that the (phantom) `I` ("input") and `O` ("output") type parameters -/// decorate the `Client` with the RPC "interface" of the entry-point, but -/// do not themselves participate in ABI, at all, only facilitate type-checking. -/// -/// E.g. `Client` is the common proc macro interface, -/// used for `#[proc_macro] fn foo(input: TokenStream) -> TokenStream`, -/// indicating that the RPC input and output will be serialized token streams, -/// and forcing the use of APIs that take/return `S::TokenStream`, server-side. -#[repr(C)] -pub struct Client { - // FIXME(eddyb) use a reference to the `static COUNTERS`, instead of - // a wrapper `fn` pointer, once `const fn` can reference `static`s. - pub(super) get_handle_counters: extern "C" fn() -> &'static HandleCounters, - - pub(super) run: extern "C" fn(BridgeConfig<'_>) -> Buffer, - - pub(super) _marker: PhantomData O>, -} - -impl Copy for Client {} -impl Clone for Client { - fn clone(&self) -> Self { - *self - } -} - -fn maybe_install_panic_hook(force_show_panics: bool) { - // Hide the default panic output within `proc_macro` expansions. - // NB. the server can't do this because it may use a different libstd. - static HIDE_PANICS_DURING_EXPANSION: Once = Once::new(); - HIDE_PANICS_DURING_EXPANSION.call_once(|| { - let prev = panic::take_hook(); - panic::set_hook(Box::new(move |info| { - let show = BridgeState::with(|state| match state { - BridgeState::NotConnected => true, - BridgeState::Connected(_) | BridgeState::InUse => force_show_panics, - }); - if show { - prev(info) - } - })); - }); -} - -/// Client-side helper for handling client panics, entering the bridge, -/// deserializing input and serializing output. -// FIXME(eddyb) maybe replace `Bridge::enter` with this? -fn run_client DecodeMut<'a, 's, ()>, R: Encode<()>>( - config: BridgeConfig<'_>, - f: impl FnOnce(A) -> R, -) -> Buffer { - let BridgeConfig { input: mut buf, dispatch, force_show_panics, .. } = config; - - panic::catch_unwind(panic::AssertUnwindSafe(|| { - maybe_install_panic_hook(force_show_panics); - - let reader = &mut &buf[..]; - let (globals, input) = <(ExpnGlobals, A)>::decode(reader, &mut ()); - - // Put the buffer we used for input back in the `Bridge` for requests. - let new_state = - BridgeState::Connected(Bridge { cached_buffer: buf.take(), dispatch, globals }); - - BRIDGE_STATE.with(|state| { - state.set(new_state, || { - let output = f(input); - - // Take the `cached_buffer` back out, for the output value. - buf = Bridge::with(|bridge| bridge.cached_buffer.take()); - - // HACK(eddyb) Separate encoding a success value (`Ok(output)`) - // from encoding a panic (`Err(e: PanicMessage)`) to avoid - // having handles outside the `bridge.enter(|| ...)` scope, and - // to catch panics that could happen while encoding the success. - // - // Note that panics should be impossible beyond this point, but - // this is defensively trying to avoid any accidental panicking - // reaching the `extern "C"` (which should `abort` but might not - // at the moment, so this is also potentially preventing UB). - buf.clear(); - Ok::<_, ()>(output).encode(&mut buf, &mut ()); - }) - }) - })) - .map_err(PanicMessage::from) - .unwrap_or_else(|e| { - buf.clear(); - Err::<(), _>(e).encode(&mut buf, &mut ()); - }); - buf -} - -impl Client { - pub const fn expand1( - f: impl Fn(super::super::TokenStream) -> super::super::TokenStream + Copy, - ) -> Self { - Client { - get_handle_counters: HandleCounters::get, - run: super::selfless_reify::reify_to_extern_c_fn_hrt_bridge(move |bridge| { - run_client(bridge, |input| f(super::super::TokenStream(input)).0) - }), - _marker: PhantomData, - } - } -} - -impl Client<(super::super::TokenStream, super::super::TokenStream), super::super::TokenStream> { - pub const fn expand2( - f: impl Fn(super::super::TokenStream, super::super::TokenStream) -> super::super::TokenStream - + Copy, - ) -> Self { - Client { - get_handle_counters: HandleCounters::get, - run: super::selfless_reify::reify_to_extern_c_fn_hrt_bridge(move |bridge| { - run_client(bridge, |(input, input2)| { - f(super::super::TokenStream(input), super::super::TokenStream(input2)).0 - }) - }), - _marker: PhantomData, - } - } -} - -#[repr(C)] -#[derive(Copy, Clone)] -pub enum ProcMacro { - CustomDerive { - trait_name: &'static str, - attributes: &'static [&'static str], - client: Client, - }, - - Attr { - name: &'static str, - client: Client< - (super::super::TokenStream, super::super::TokenStream), - super::super::TokenStream, - >, - }, - - Bang { - name: &'static str, - client: Client, - }, -} - -impl ProcMacro { - pub fn name(&self) -> &'static str { - match self { - ProcMacro::CustomDerive { trait_name, .. } => trait_name, - ProcMacro::Attr { name, .. } => name, - ProcMacro::Bang { name, .. } => name, - } - } - - pub const fn custom_derive( - trait_name: &'static str, - attributes: &'static [&'static str], - expand: impl Fn(super::super::TokenStream) -> super::super::TokenStream + Copy, - ) -> Self { - ProcMacro::CustomDerive { trait_name, attributes, client: Client::expand1(expand) } - } - - pub const fn attr( - name: &'static str, - expand: impl Fn(super::super::TokenStream, super::super::TokenStream) -> super::super::TokenStream - + Copy, - ) -> Self { - ProcMacro::Attr { name, client: Client::expand2(expand) } - } - - pub const fn bang( - name: &'static str, - expand: impl Fn(super::super::TokenStream) -> super::super::TokenStream + Copy, - ) -> Self { - ProcMacro::Bang { name, client: Client::expand1(expand) } - } -} diff --git a/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/closure.rs b/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/closure.rs deleted file mode 100644 index d371ae3cea..0000000000 --- a/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/closure.rs +++ /dev/null @@ -1,32 +0,0 @@ -//! Closure type (equivalent to `&mut dyn FnMut(A) -> R`) that's `repr(C)`. - -use std::marker::PhantomData; - -#[repr(C)] -pub struct Closure<'a, A, R> { - call: unsafe extern "C" fn(*mut Env, A) -> R, - env: *mut Env, - // Prevent Send and Sync impls. `!Send`/`!Sync` is the usual way of doing - // this, but that requires unstable features. rust-analyzer uses this code - // and avoids unstable features. - // - // The `'a` lifetime parameter represents the lifetime of `Env`. - _marker: PhantomData<*mut &'a mut ()>, -} - -struct Env; - -impl<'a, A, R, F: FnMut(A) -> R> From<&'a mut F> for Closure<'a, A, R> { - fn from(f: &'a mut F) -> Self { - unsafe extern "C" fn call R>(env: *mut Env, arg: A) -> R { - (*(env as *mut _ as *mut F))(arg) - } - Closure { call: call::, env: f as *mut _ as *mut Env, _marker: PhantomData } - } -} - -impl<'a, A, R> Closure<'a, A, R> { - pub fn call(&mut self, arg: A) -> R { - unsafe { (self.call)(self.env, arg) } - } -} diff --git a/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/handle.rs b/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/handle.rs deleted file mode 100644 index c219a9465d..0000000000 --- a/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/handle.rs +++ /dev/null @@ -1,89 +0,0 @@ -//! Server-side handles and storage for per-handle data. - -use std::collections::{BTreeMap, HashMap}; -use std::hash::{BuildHasher, Hash}; -use std::num::NonZeroU32; -use std::ops::{Index, IndexMut}; -use std::sync::atomic::{AtomicUsize, Ordering}; - -pub(super) type Handle = NonZeroU32; - -/// A store that associates values of type `T` with numeric handles. A value can -/// be looked up using its handle. -pub(super) struct OwnedStore { - counter: &'static AtomicUsize, - data: BTreeMap, -} - -impl OwnedStore { - pub(super) fn new(counter: &'static AtomicUsize) -> Self { - // Ensure the handle counter isn't 0, which would panic later, - // when `NonZeroU32::new` (aka `Handle::new`) is called in `alloc`. - assert_ne!(counter.load(Ordering::SeqCst), 0); - - OwnedStore { counter, data: BTreeMap::new() } - } -} - -impl OwnedStore { - pub(super) fn alloc(&mut self, x: T) -> Handle { - let counter = self.counter.fetch_add(1, Ordering::SeqCst); - let handle = Handle::new(counter as u32).expect("`proc_macro` handle counter overflowed"); - assert!(self.data.insert(handle, x).is_none()); - handle - } - - pub(super) fn take(&mut self, h: Handle) -> T { - self.data.remove(&h).expect("use-after-free in `proc_macro` handle") - } -} - -impl Index for OwnedStore { - type Output = T; - fn index(&self, h: Handle) -> &T { - self.data.get(&h).expect("use-after-free in `proc_macro` handle") - } -} - -impl IndexMut for OwnedStore { - fn index_mut(&mut self, h: Handle) -> &mut T { - self.data.get_mut(&h).expect("use-after-free in `proc_macro` handle") - } -} - -// HACK(eddyb) deterministic `std::collections::hash_map::RandomState` replacement -// that doesn't require adding any dependencies to `proc_macro` (like `rustc-hash`). -#[derive(Clone)] -struct NonRandomState; - -impl BuildHasher for NonRandomState { - type Hasher = std::collections::hash_map::DefaultHasher; - #[inline] - fn build_hasher(&self) -> Self::Hasher { - Self::Hasher::new() - } -} - -/// Like `OwnedStore`, but avoids storing any value more than once. -pub(super) struct InternedStore { - owned: OwnedStore, - interner: HashMap, -} - -impl InternedStore { - pub(super) fn new(counter: &'static AtomicUsize) -> Self { - InternedStore { - owned: OwnedStore::new(counter), - interner: HashMap::with_hasher(NonRandomState), - } - } - - pub(super) fn alloc(&mut self, x: T) -> Handle { - let owned = &mut self.owned; - *self.interner.entry(x).or_insert_with(|| owned.alloc(x)) - } - - pub(super) fn copy(&mut self, h: Handle) -> T { - self.owned[h] - } -} diff --git a/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/mod.rs b/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/mod.rs deleted file mode 100644 index ffd4407932..0000000000 --- a/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/mod.rs +++ /dev/null @@ -1,493 +0,0 @@ -//! Internal interface for communicating between a `proc_macro` client -//! (a proc macro crate) and a `proc_macro` server (a compiler front-end). -//! -//! Serialization (with C ABI buffers) and unique integer handles are employed -//! to allow safely interfacing between two copies of `proc_macro` built -//! (from the same source) by different compilers with potentially mismatching -//! Rust ABIs (e.g., stage0/bin/rustc vs stage1/bin/rustc during bootstrap). - -#![deny(unsafe_code)] - -pub use super::{Delimiter, Level, LineColumn, Spacing}; -use std::fmt; -use std::hash::Hash; -use std::marker; -use std::mem; -use std::ops::Bound; -use std::panic; -use std::sync::atomic::AtomicUsize; -use std::sync::Once; -use std::thread; - -/// Higher-order macro describing the server RPC API, allowing automatic -/// generation of type-safe Rust APIs, both client-side and server-side. -/// -/// `with_api!(MySelf, my_self, my_macro)` expands to: -/// ```rust,ignore (pseudo-code) -/// my_macro! { -/// // ... -/// Literal { -/// // ... -/// fn character(ch: char) -> MySelf::Literal; -/// // ... -/// fn span(my_self: &MySelf::Literal) -> MySelf::Span; -/// fn set_span(my_self: &mut MySelf::Literal, span: MySelf::Span); -/// }, -/// // ... -/// } -/// ``` -/// -/// The first two arguments serve to customize the arguments names -/// and argument/return types, to enable several different usecases: -/// -/// If `my_self` is just `self`, then each `fn` signature can be used -/// as-is for a method. If it's anything else (`self_` in practice), -/// then the signatures don't have a special `self` argument, and -/// can, therefore, have a different one introduced. -/// -/// If `MySelf` is just `Self`, then the types are only valid inside -/// a trait or a trait impl, where the trait has associated types -/// for each of the API types. If non-associated types are desired, -/// a module name (`self` in practice) can be used instead of `Self`. -macro_rules! with_api { - ($S:ident, $self:ident, $m:ident) => { - $m! { - FreeFunctions { - fn drop($self: $S::FreeFunctions); - fn track_env_var(var: &str, value: Option<&str>); - fn track_path(path: &str); - }, - TokenStream { - fn drop($self: $S::TokenStream); - fn clone($self: &$S::TokenStream) -> $S::TokenStream; - fn is_empty($self: &$S::TokenStream) -> bool; - fn expand_expr($self: &$S::TokenStream) -> Result<$S::TokenStream, ()>; - fn from_str(src: &str) -> $S::TokenStream; - fn to_string($self: &$S::TokenStream) -> String; - fn from_token_tree( - tree: TokenTree<$S::TokenStream, $S::Span, $S::Ident, $S::Literal>, - ) -> $S::TokenStream; - fn concat_trees( - base: Option<$S::TokenStream>, - trees: Vec>, - ) -> $S::TokenStream; - fn concat_streams( - base: Option<$S::TokenStream>, - streams: Vec<$S::TokenStream>, - ) -> $S::TokenStream; - fn into_trees( - $self: $S::TokenStream - ) -> Vec>; - }, - Ident { - fn new(string: &str, span: $S::Span, is_raw: bool) -> $S::Ident; - fn span($self: $S::Ident) -> $S::Span; - fn with_span($self: $S::Ident, span: $S::Span) -> $S::Ident; - }, - Literal { - fn drop($self: $S::Literal); - fn clone($self: &$S::Literal) -> $S::Literal; - fn from_str(s: &str) -> Result<$S::Literal, ()>; - fn to_string($self: &$S::Literal) -> String; - fn debug_kind($self: &$S::Literal) -> String; - fn symbol($self: &$S::Literal) -> String; - fn suffix($self: &$S::Literal) -> Option; - fn integer(n: &str) -> $S::Literal; - fn typed_integer(n: &str, kind: &str) -> $S::Literal; - fn float(n: &str) -> $S::Literal; - fn f32(n: &str) -> $S::Literal; - fn f64(n: &str) -> $S::Literal; - fn string(string: &str) -> $S::Literal; - fn character(ch: char) -> $S::Literal; - fn byte_string(bytes: &[u8]) -> $S::Literal; - fn span($self: &$S::Literal) -> $S::Span; - fn set_span($self: &mut $S::Literal, span: $S::Span); - fn subspan( - $self: &$S::Literal, - start: Bound, - end: Bound, - ) -> Option<$S::Span>; - }, - SourceFile { - fn drop($self: $S::SourceFile); - fn clone($self: &$S::SourceFile) -> $S::SourceFile; - fn eq($self: &$S::SourceFile, other: &$S::SourceFile) -> bool; - fn path($self: &$S::SourceFile) -> String; - fn is_real($self: &$S::SourceFile) -> bool; - }, - MultiSpan { - fn drop($self: $S::MultiSpan); - fn new() -> $S::MultiSpan; - fn push($self: &mut $S::MultiSpan, span: $S::Span); - }, - Diagnostic { - fn drop($self: $S::Diagnostic); - fn new(level: Level, msg: &str, span: $S::MultiSpan) -> $S::Diagnostic; - fn sub( - $self: &mut $S::Diagnostic, - level: Level, - msg: &str, - span: $S::MultiSpan, - ); - fn emit($self: $S::Diagnostic); - }, - Span { - fn debug($self: $S::Span) -> String; - fn source_file($self: $S::Span) -> $S::SourceFile; - fn parent($self: $S::Span) -> Option<$S::Span>; - fn source($self: $S::Span) -> $S::Span; - fn start($self: $S::Span) -> LineColumn; - fn end($self: $S::Span) -> LineColumn; - fn before($self: $S::Span) -> $S::Span; - fn after($self: $S::Span) -> $S::Span; - fn join($self: $S::Span, other: $S::Span) -> Option<$S::Span>; - fn resolved_at($self: $S::Span, at: $S::Span) -> $S::Span; - fn source_text($self: $S::Span) -> Option; - fn save_span($self: $S::Span) -> usize; - fn recover_proc_macro_span(id: usize) -> $S::Span; - }, - } - }; -} - -// FIXME(eddyb) this calls `encode` for each argument, but in reverse, -// to match the ordering in `reverse_decode`. -macro_rules! reverse_encode { - ($writer:ident;) => {}; - ($writer:ident; $first:ident $(, $rest:ident)*) => { - reverse_encode!($writer; $($rest),*); - $first.encode(&mut $writer, &mut ()); - } -} - -// FIXME(eddyb) this calls `decode` for each argument, but in reverse, -// to avoid borrow conflicts from borrows started by `&mut` arguments. -macro_rules! reverse_decode { - ($reader:ident, $s:ident;) => {}; - ($reader:ident, $s:ident; $first:ident: $first_ty:ty $(, $rest:ident: $rest_ty:ty)*) => { - reverse_decode!($reader, $s; $($rest: $rest_ty),*); - let $first = <$first_ty>::decode(&mut $reader, $s); - } -} - -#[allow(unsafe_code)] -mod buffer; -#[forbid(unsafe_code)] -pub mod client; -#[allow(unsafe_code)] -mod closure; -#[forbid(unsafe_code)] -mod handle; -#[macro_use] -#[forbid(unsafe_code)] -mod rpc; -#[allow(unsafe_code)] -mod scoped_cell; -#[allow(unsafe_code)] -mod selfless_reify; -#[forbid(unsafe_code)] -pub mod server; - -use buffer::Buffer; -pub use rpc::PanicMessage; -use rpc::{Decode, DecodeMut, Encode, Reader, Writer}; - -/// Configuration for establishing an active connection between a server and a -/// client. The server creates the bridge config (`run_server` in `server.rs`), -/// then passes it to the client through the function pointer in the `run` field -/// of `client::Client`. The client constructs a local `Bridge` from the config -/// in TLS during its execution (`Bridge::{enter, with}` in `client.rs`). -#[repr(C)] -pub struct BridgeConfig<'a> { - /// Buffer used to pass initial input to the client. - input: Buffer, - - /// Server-side function that the client uses to make requests. - dispatch: closure::Closure<'a, Buffer, Buffer>, - - /// If 'true', always invoke the default panic hook - force_show_panics: bool, - - // Prevent Send and Sync impls. `!Send`/`!Sync` is the usual way of doing - // this, but that requires unstable features. rust-analyzer uses this code - // and avoids unstable features. - _marker: marker::PhantomData<*mut ()>, -} - -#[forbid(unsafe_code)] -#[allow(non_camel_case_types)] -mod api_tags { - use super::rpc::{DecodeMut, Encode, Reader, Writer}; - - macro_rules! declare_tags { - ($($name:ident { - $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)*;)* - }),* $(,)?) => { - $( - pub(super) enum $name { - $($method),* - } - rpc_encode_decode!(enum $name { $($method),* }); - )* - - pub(super) enum Method { - $($name($name)),* - } - rpc_encode_decode!(enum Method { $($name(m)),* }); - } - } - with_api!(self, self, declare_tags); -} - -/// Helper to wrap associated types to allow trait impl dispatch. -/// That is, normally a pair of impls for `T::Foo` and `T::Bar` -/// can overlap, but if the impls are, instead, on types like -/// `Marked` and `Marked`, they can't. -trait Mark { - type Unmarked; - fn mark(unmarked: Self::Unmarked) -> Self; -} - -/// Unwrap types wrapped by `Mark::mark` (see `Mark` for details). -trait Unmark { - type Unmarked; - fn unmark(self) -> Self::Unmarked; -} - -#[derive(Copy, Clone, PartialEq, Eq, Hash)] -struct Marked { - value: T, - _marker: marker::PhantomData, -} - -impl Mark for Marked { - type Unmarked = T; - fn mark(unmarked: Self::Unmarked) -> Self { - Marked { value: unmarked, _marker: marker::PhantomData } - } -} -impl Unmark for Marked { - type Unmarked = T; - fn unmark(self) -> Self::Unmarked { - self.value - } -} -impl<'a, T, M> Unmark for &'a Marked { - type Unmarked = &'a T; - fn unmark(self) -> Self::Unmarked { - &self.value - } -} -impl<'a, T, M> Unmark for &'a mut Marked { - type Unmarked = &'a mut T; - fn unmark(self) -> Self::Unmarked { - &mut self.value - } -} - -impl Mark for Vec { - type Unmarked = Vec; - fn mark(unmarked: Self::Unmarked) -> Self { - // Should be a no-op due to std's in-place collect optimizations. - unmarked.into_iter().map(T::mark).collect() - } -} -impl Unmark for Vec { - type Unmarked = Vec; - fn unmark(self) -> Self::Unmarked { - // Should be a no-op due to std's in-place collect optimizations. - self.into_iter().map(T::unmark).collect() - } -} - -macro_rules! mark_noop { - ($($ty:ty),* $(,)?) => { - $( - impl Mark for $ty { - type Unmarked = Self; - fn mark(unmarked: Self::Unmarked) -> Self { - unmarked - } - } - impl Unmark for $ty { - type Unmarked = Self; - fn unmark(self) -> Self::Unmarked { - self - } - } - )* - } -} -mark_noop! { - (), - bool, - char, - &'_ [u8], - &'_ str, - String, - u8, - usize, - Delimiter, - Level, - LineColumn, - Spacing, -} - -rpc_encode_decode!( - enum Delimiter { - Parenthesis, - Brace, - Bracket, - None, - } -); -rpc_encode_decode!( - enum Level { - Error, - Warning, - Note, - Help, - } -); -rpc_encode_decode!(struct LineColumn { line, column }); -rpc_encode_decode!( - enum Spacing { - Alone, - Joint, - } -); - -macro_rules! mark_compound { - (struct $name:ident <$($T:ident),+> { $($field:ident),* $(,)? }) => { - impl<$($T: Mark),+> Mark for $name <$($T),+> { - type Unmarked = $name <$($T::Unmarked),+>; - fn mark(unmarked: Self::Unmarked) -> Self { - $name { - $($field: Mark::mark(unmarked.$field)),* - } - } - } - impl<$($T: Unmark),+> Unmark for $name <$($T),+> { - type Unmarked = $name <$($T::Unmarked),+>; - fn unmark(self) -> Self::Unmarked { - $name { - $($field: Unmark::unmark(self.$field)),* - } - } - } - }; - (enum $name:ident <$($T:ident),+> { $($variant:ident $(($field:ident))?),* $(,)? }) => { - impl<$($T: Mark),+> Mark for $name <$($T),+> { - type Unmarked = $name <$($T::Unmarked),+>; - fn mark(unmarked: Self::Unmarked) -> Self { - match unmarked { - $($name::$variant $(($field))? => { - $name::$variant $((Mark::mark($field)))? - })* - } - } - } - impl<$($T: Unmark),+> Unmark for $name <$($T),+> { - type Unmarked = $name <$($T::Unmarked),+>; - fn unmark(self) -> Self::Unmarked { - match self { - $($name::$variant $(($field))? => { - $name::$variant $((Unmark::unmark($field)))? - })* - } - } - } - } -} - -macro_rules! compound_traits { - ($($t:tt)*) => { - rpc_encode_decode!($($t)*); - mark_compound!($($t)*); - }; -} - -compound_traits!( - enum Bound { - Included(x), - Excluded(x), - Unbounded, - } -); - -compound_traits!( - enum Option { - Some(t), - None, - } -); - -compound_traits!( - enum Result { - Ok(t), - Err(e), - } -); - -#[derive(Copy, Clone)] -pub struct DelimSpan { - pub open: Span, - pub close: Span, - pub entire: Span, -} - -impl DelimSpan { - pub fn from_single(span: Span) -> Self { - DelimSpan { open: span, close: span, entire: span } - } -} - -compound_traits!(struct DelimSpan { open, close, entire }); - -#[derive(Clone)] -pub struct Group { - pub delimiter: Delimiter, - pub stream: Option, - pub span: DelimSpan, -} - -compound_traits!(struct Group { delimiter, stream, span }); - -#[derive(Clone)] -pub struct Punct { - pub ch: u8, - pub joint: bool, - pub span: Span, -} - -compound_traits!(struct Punct { ch, joint, span }); - -#[derive(Clone)] -pub enum TokenTree { - Group(Group), - Punct(Punct), - Ident(Ident), - Literal(Literal), -} - -compound_traits!( - enum TokenTree { - Group(tt), - Punct(tt), - Ident(tt), - Literal(tt), - } -); - -/// Globals provided alongside the initial inputs for a macro expansion. -/// Provides values such as spans which are used frequently to avoid RPC. -#[derive(Clone)] -pub struct ExpnGlobals { - pub def_site: Span, - pub call_site: Span, - pub mixed_site: Span, -} - -compound_traits!( - struct ExpnGlobals { def_site, call_site, mixed_site } -); diff --git a/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/rpc.rs b/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/rpc.rs deleted file mode 100644 index e9d7a46c06..0000000000 --- a/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/rpc.rs +++ /dev/null @@ -1,304 +0,0 @@ -//! Serialization for client-server communication. - -use std::any::Any; -use std::char; -use std::io::Write; -use std::num::NonZeroU32; -use std::str; - -pub(super) type Writer = super::buffer::Buffer; - -pub(super) trait Encode: Sized { - fn encode(self, w: &mut Writer, s: &mut S); -} - -pub(super) type Reader<'a> = &'a [u8]; - -pub(super) trait Decode<'a, 's, S>: Sized { - fn decode(r: &mut Reader<'a>, s: &'s S) -> Self; -} - -pub(super) trait DecodeMut<'a, 's, S>: Sized { - fn decode(r: &mut Reader<'a>, s: &'s mut S) -> Self; -} - -macro_rules! rpc_encode_decode { - (le $ty:ty) => { - impl Encode for $ty { - fn encode(self, w: &mut Writer, _: &mut S) { - w.extend_from_array(&self.to_le_bytes()); - } - } - - impl DecodeMut<'_, '_, S> for $ty { - fn decode(r: &mut Reader<'_>, _: &mut S) -> Self { - const N: usize = ::std::mem::size_of::<$ty>(); - - let mut bytes = [0; N]; - bytes.copy_from_slice(&r[..N]); - *r = &r[N..]; - - Self::from_le_bytes(bytes) - } - } - }; - (struct $name:ident $(<$($T:ident),+>)? { $($field:ident),* $(,)? }) => { - impl),+)?> Encode for $name $(<$($T),+>)? { - fn encode(self, w: &mut Writer, s: &mut S) { - $(self.$field.encode(w, s);)* - } - } - - impl<'a, S, $($($T: for<'s> DecodeMut<'a, 's, S>),+)?> DecodeMut<'a, '_, S> - for $name $(<$($T),+>)? - { - fn decode(r: &mut Reader<'a>, s: &mut S) -> Self { - $name { - $($field: DecodeMut::decode(r, s)),* - } - } - } - }; - (enum $name:ident $(<$($T:ident),+>)? { $($variant:ident $(($field:ident))*),* $(,)? }) => { - impl),+)?> Encode for $name $(<$($T),+>)? { - fn encode(self, w: &mut Writer, s: &mut S) { - // HACK(eddyb): `Tag` enum duplicated between the - // two impls as there's no other place to stash it. - #[allow(non_upper_case_globals)] - mod tag { - #[repr(u8)] enum Tag { $($variant),* } - - $(pub const $variant: u8 = Tag::$variant as u8;)* - } - - match self { - $($name::$variant $(($field))* => { - tag::$variant.encode(w, s); - $($field.encode(w, s);)* - })* - } - } - } - - impl<'a, S, $($($T: for<'s> DecodeMut<'a, 's, S>),+)?> DecodeMut<'a, '_, S> - for $name $(<$($T),+>)? - { - fn decode(r: &mut Reader<'a>, s: &mut S) -> Self { - // HACK(eddyb): `Tag` enum duplicated between the - // two impls as there's no other place to stash it. - #[allow(non_upper_case_globals)] - mod tag { - #[repr(u8)] enum Tag { $($variant),* } - - $(pub const $variant: u8 = Tag::$variant as u8;)* - } - - match u8::decode(r, s) { - $(tag::$variant => { - $(let $field = DecodeMut::decode(r, s);)* - $name::$variant $(($field))* - })* - _ => unreachable!(), - } - } - } - } -} - -impl Encode for () { - fn encode(self, _: &mut Writer, _: &mut S) {} -} - -impl DecodeMut<'_, '_, S> for () { - fn decode(_: &mut Reader<'_>, _: &mut S) -> Self {} -} - -impl Encode for u8 { - fn encode(self, w: &mut Writer, _: &mut S) { - w.push(self); - } -} - -impl DecodeMut<'_, '_, S> for u8 { - fn decode(r: &mut Reader<'_>, _: &mut S) -> Self { - let x = r[0]; - *r = &r[1..]; - x - } -} - -rpc_encode_decode!(le u32); -rpc_encode_decode!(le usize); - -impl Encode for bool { - fn encode(self, w: &mut Writer, s: &mut S) { - (self as u8).encode(w, s); - } -} - -impl DecodeMut<'_, '_, S> for bool { - fn decode(r: &mut Reader<'_>, s: &mut S) -> Self { - match u8::decode(r, s) { - 0 => false, - 1 => true, - _ => unreachable!(), - } - } -} - -impl Encode for char { - fn encode(self, w: &mut Writer, s: &mut S) { - (self as u32).encode(w, s); - } -} - -impl DecodeMut<'_, '_, S> for char { - fn decode(r: &mut Reader<'_>, s: &mut S) -> Self { - char::from_u32(u32::decode(r, s)).unwrap() - } -} - -impl Encode for NonZeroU32 { - fn encode(self, w: &mut Writer, s: &mut S) { - self.get().encode(w, s); - } -} - -impl DecodeMut<'_, '_, S> for NonZeroU32 { - fn decode(r: &mut Reader<'_>, s: &mut S) -> Self { - Self::new(u32::decode(r, s)).unwrap() - } -} - -impl, B: Encode> Encode for (A, B) { - fn encode(self, w: &mut Writer, s: &mut S) { - self.0.encode(w, s); - self.1.encode(w, s); - } -} - -impl<'a, S, A: for<'s> DecodeMut<'a, 's, S>, B: for<'s> DecodeMut<'a, 's, S>> DecodeMut<'a, '_, S> - for (A, B) -{ - fn decode(r: &mut Reader<'a>, s: &mut S) -> Self { - (DecodeMut::decode(r, s), DecodeMut::decode(r, s)) - } -} - -impl Encode for &[u8] { - fn encode(self, w: &mut Writer, s: &mut S) { - self.len().encode(w, s); - w.write_all(self).unwrap(); - } -} - -impl<'a, S> DecodeMut<'a, '_, S> for &'a [u8] { - fn decode(r: &mut Reader<'a>, s: &mut S) -> Self { - let len = usize::decode(r, s); - let xs = &r[..len]; - *r = &r[len..]; - xs - } -} - -impl Encode for &str { - fn encode(self, w: &mut Writer, s: &mut S) { - self.as_bytes().encode(w, s); - } -} - -impl<'a, S> DecodeMut<'a, '_, S> for &'a str { - fn decode(r: &mut Reader<'a>, s: &mut S) -> Self { - str::from_utf8(<&[u8]>::decode(r, s)).unwrap() - } -} - -impl Encode for String { - fn encode(self, w: &mut Writer, s: &mut S) { - self[..].encode(w, s); - } -} - -impl DecodeMut<'_, '_, S> for String { - fn decode(r: &mut Reader<'_>, s: &mut S) -> Self { - <&str>::decode(r, s).to_string() - } -} - -impl> Encode for Vec { - fn encode(self, w: &mut Writer, s: &mut S) { - self.len().encode(w, s); - for x in self { - x.encode(w, s); - } - } -} - -impl<'a, S, T: for<'s> DecodeMut<'a, 's, S>> DecodeMut<'a, '_, S> for Vec { - fn decode(r: &mut Reader<'a>, s: &mut S) -> Self { - let len = usize::decode(r, s); - let mut vec = Vec::with_capacity(len); - for _ in 0..len { - vec.push(T::decode(r, s)); - } - vec - } -} - -/// Simplified version of panic payloads, ignoring -/// types other than `&'static str` and `String`. -pub enum PanicMessage { - StaticStr(&'static str), - String(String), - Unknown, -} - -impl From> for PanicMessage { - fn from(payload: Box) -> Self { - if let Some(s) = payload.downcast_ref::<&'static str>() { - return PanicMessage::StaticStr(s); - } - if let Ok(s) = payload.downcast::() { - return PanicMessage::String(*s); - } - PanicMessage::Unknown - } -} - -impl Into> for PanicMessage { - fn into(self) -> Box { - match self { - PanicMessage::StaticStr(s) => Box::new(s), - PanicMessage::String(s) => Box::new(s), - PanicMessage::Unknown => { - struct UnknownPanicMessage; - Box::new(UnknownPanicMessage) - } - } - } -} - -impl PanicMessage { - pub fn as_str(&self) -> Option<&str> { - match self { - PanicMessage::StaticStr(s) => Some(s), - PanicMessage::String(s) => Some(s), - PanicMessage::Unknown => None, - } - } -} - -impl Encode for PanicMessage { - fn encode(self, w: &mut Writer, s: &mut S) { - self.as_str().encode(w, s); - } -} - -impl DecodeMut<'_, '_, S> for PanicMessage { - fn decode(r: &mut Reader<'_>, s: &mut S) -> Self { - match Option::::decode(r, s) { - Some(s) => PanicMessage::String(s), - None => PanicMessage::Unknown, - } - } -} diff --git a/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/scoped_cell.rs b/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/scoped_cell.rs deleted file mode 100644 index 2cde1f65ad..0000000000 --- a/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/scoped_cell.rs +++ /dev/null @@ -1,81 +0,0 @@ -//! `Cell` variant for (scoped) existential lifetimes. - -use std::cell::Cell; -use std::mem; -use std::ops::{Deref, DerefMut}; - -/// Type lambda application, with a lifetime. -#[allow(unused_lifetimes)] -pub trait ApplyL<'a> { - type Out; -} - -/// Type lambda taking a lifetime, i.e., `Lifetime -> Type`. -pub trait LambdaL: for<'a> ApplyL<'a> {} - -impl ApplyL<'a>> LambdaL for T {} - -// HACK(eddyb) work around projection limitations with a newtype -// FIXME(#52812) replace with `&'a mut >::Out` -pub struct RefMutL<'a, 'b, T: LambdaL>(&'a mut >::Out); - -impl<'a, 'b, T: LambdaL> Deref for RefMutL<'a, 'b, T> { - type Target = >::Out; - fn deref(&self) -> &Self::Target { - self.0 - } -} - -impl<'a, 'b, T: LambdaL> DerefMut for RefMutL<'a, 'b, T> { - fn deref_mut(&mut self) -> &mut Self::Target { - self.0 - } -} - -pub struct ScopedCell(Cell<>::Out>); - -impl ScopedCell { - pub const fn new(value: >::Out) -> Self { - ScopedCell(Cell::new(value)) - } - - /// Sets the value in `self` to `replacement` while - /// running `f`, which gets the old value, mutably. - /// The old value will be restored after `f` exits, even - /// by panic, including modifications made to it by `f`. - pub fn replace<'a, R>( - &self, - replacement: >::Out, - f: impl for<'b, 'c> FnOnce(RefMutL<'b, 'c, T>) -> R, - ) -> R { - /// Wrapper that ensures that the cell always gets filled - /// (with the original state, optionally changed by `f`), - /// even if `f` had panicked. - struct PutBackOnDrop<'a, T: LambdaL> { - cell: &'a ScopedCell, - value: Option<>::Out>, - } - - impl<'a, T: LambdaL> Drop for PutBackOnDrop<'a, T> { - fn drop(&mut self) { - self.cell.0.set(self.value.take().unwrap()); - } - } - - let mut put_back_on_drop = PutBackOnDrop { - cell: self, - value: Some(self.0.replace(unsafe { - let erased = mem::transmute_copy(&replacement); - mem::forget(replacement); - erased - })), - }; - - f(RefMutL(put_back_on_drop.value.as_mut().unwrap())) - } - - /// Sets the value in `self` to `value` while running `f`. - pub fn set(&self, value: >::Out, f: impl FnOnce() -> R) -> R { - self.replace(value, |_| f()) - } -} diff --git a/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/selfless_reify.rs b/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/selfless_reify.rs deleted file mode 100644 index 907ad256e4..0000000000 --- a/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/selfless_reify.rs +++ /dev/null @@ -1,84 +0,0 @@ -//! Abstraction for creating `fn` pointers from any callable that *effectively* -//! has the equivalent of implementing `Default`, even if the compiler neither -//! provides `Default` nor allows reifying closures (i.e. creating `fn` pointers) -//! other than those with absolutely no captures. -//! -//! More specifically, for a closure-like type to be "effectively `Default`": -//! * it must be a ZST (zero-sized type): no information contained within, so -//! that `Default`'s return value (if it were implemented) is unambiguous -//! * it must be `Copy`: no captured "unique ZST tokens" or any other similar -//! types that would make duplicating values at will unsound -//! * combined with the ZST requirement, this confers a kind of "telecopy" -//! ability: similar to `Copy`, but without keeping the value around, and -//! instead "reconstructing" it (a noop given it's a ZST) when needed -//! * it must be *provably* inhabited: no captured uninhabited types or any -//! other types that cannot be constructed by the user of this abstraction -//! * the proof is a value of the closure-like type itself, in a sense the -//! "seed" for the "telecopy" process made possible by ZST + `Copy` -//! * this requirement is the only reason an abstraction limited to a specific -//! usecase is required: ZST + `Copy` can be checked with *at worst* a panic -//! at the "attempted `::default()` call" time, but that doesn't guarantee -//! that the value can be soundly created, and attempting to use the typical -//! "proof ZST token" approach leads yet again to having a ZST + `Copy` type -//! that is not proof of anything without a value (i.e. isomorphic to a -//! newtype of the type it's trying to prove the inhabitation of) -//! -//! A more flexible (and safer) solution to the general problem could exist once -//! `const`-generic parameters can have type parameters in their types: -//! -//! ```rust,ignore (needs future const-generics) -//! extern "C" fn ffi_wrapper< -//! A, R, -//! F: Fn(A) -> R, -//! const f: F, // <-- this `const`-generic is not yet allowed -//! >(arg: A) -> R { -//! f(arg) -//! } -//! ``` - -use std::mem; - -// FIXME(eddyb) this could be `trait` impls except for the `const fn` requirement. -macro_rules! define_reify_functions { - ($( - fn $name:ident $(<$($param:ident),*>)? - for $(extern $abi:tt)? fn($($arg:ident: $arg_ty:ty),*) -> $ret_ty:ty; - )+) => { - $(pub const fn $name< - $($($param,)*)? - F: Fn($($arg_ty),*) -> $ret_ty + Copy - >(f: F) -> $(extern $abi)? fn($($arg_ty),*) -> $ret_ty { - // FIXME(eddyb) describe the `F` type (e.g. via `type_name::`) once panic - // formatting becomes possible in `const fn`. - assert!(mem::size_of::() == 0, "selfless_reify: closure must be zero-sized"); - - $(extern $abi)? fn wrapper< - $($($param,)*)? - F: Fn($($arg_ty),*) -> $ret_ty + Copy - >($($arg: $arg_ty),*) -> $ret_ty { - let f = unsafe { - // SAFETY: `F` satisfies all criteria for "out of thin air" - // reconstructability (see module-level doc comment). - mem::MaybeUninit::::uninit().assume_init() - }; - f($($arg),*) - } - let _f_proof = f; - wrapper::< - $($($param,)*)? - F - > - })+ - } -} - -define_reify_functions! { - fn _reify_to_extern_c_fn_unary for extern "C" fn(arg: A) -> R; - - // HACK(eddyb) this abstraction is used with `for<'a> fn(BridgeConfig<'a>) - // -> T` but that doesn't work with just `reify_to_extern_c_fn_unary` - // because of the `fn` pointer type being "higher-ranked" (i.e. the - // `for<'a>` binder). - // FIXME(eddyb) try to remove the lifetime from `BridgeConfig`, that'd help. - fn reify_to_extern_c_fn_hrt_bridge for extern "C" fn(bridge: super::BridgeConfig<'_>) -> R; -} diff --git a/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/server.rs b/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/server.rs deleted file mode 100644 index 6e7a8d8c10..0000000000 --- a/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/bridge/server.rs +++ /dev/null @@ -1,339 +0,0 @@ -//! Server-side traits. - -use super::*; - -// FIXME(eddyb) generate the definition of `HandleStore` in `server.rs`. -use super::client::HandleStore; - -pub trait Types { - type FreeFunctions: 'static; - type TokenStream: 'static + Clone; - type Ident: 'static + Copy + Eq + Hash; - type Literal: 'static + Clone; - type SourceFile: 'static + Clone; - type MultiSpan: 'static; - type Diagnostic: 'static; - type Span: 'static + Copy + Eq + Hash; -} - -/// Declare an associated fn of one of the traits below, adding necessary -/// default bodies. -macro_rules! associated_fn { - (fn drop(&mut self, $arg:ident: $arg_ty:ty)) => - (fn drop(&mut self, $arg: $arg_ty) { mem::drop($arg) }); - - (fn clone(&mut self, $arg:ident: $arg_ty:ty) -> $ret_ty:ty) => - (fn clone(&mut self, $arg: $arg_ty) -> $ret_ty { $arg.clone() }); - - ($($item:tt)*) => ($($item)*;) -} - -macro_rules! declare_server_traits { - ($($name:ident { - $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)?;)* - }),* $(,)?) => { - $(pub trait $name: Types { - $(associated_fn!(fn $method(&mut self, $($arg: $arg_ty),*) $(-> $ret_ty)?);)* - })* - - pub trait Server: Types $(+ $name)* { - fn globals(&mut self) -> ExpnGlobals; - } - } -} -with_api!(Self, self_, declare_server_traits); - -pub(super) struct MarkedTypes(S); - -impl Server for MarkedTypes { - fn globals(&mut self) -> ExpnGlobals { - <_>::mark(Server::globals(&mut self.0)) - } -} - -macro_rules! define_mark_types_impls { - ($($name:ident { - $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)?;)* - }),* $(,)?) => { - impl Types for MarkedTypes { - $(type $name = Marked;)* - } - - $(impl $name for MarkedTypes { - $(fn $method(&mut self, $($arg: $arg_ty),*) $(-> $ret_ty)? { - <_>::mark($name::$method(&mut self.0, $($arg.unmark()),*)) - })* - })* - } -} -with_api!(Self, self_, define_mark_types_impls); - -struct Dispatcher { - handle_store: HandleStore, - server: S, -} - -macro_rules! define_dispatcher_impl { - ($($name:ident { - $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)?;)* - }),* $(,)?) => { - // FIXME(eddyb) `pub` only for `ExecutionStrategy` below. - pub trait DispatcherTrait { - // HACK(eddyb) these are here to allow `Self::$name` to work below. - $(type $name;)* - fn dispatch(&mut self, buf: Buffer) -> Buffer; - } - - impl DispatcherTrait for Dispatcher> { - $(type $name = as Types>::$name;)* - fn dispatch(&mut self, mut buf: Buffer) -> Buffer { - let Dispatcher { handle_store, server } = self; - - let mut reader = &buf[..]; - match api_tags::Method::decode(&mut reader, &mut ()) { - $(api_tags::Method::$name(m) => match m { - $(api_tags::$name::$method => { - let mut call_method = || { - reverse_decode!(reader, handle_store; $($arg: $arg_ty),*); - $name::$method(server, $($arg),*) - }; - // HACK(eddyb) don't use `panic::catch_unwind` in a panic. - // If client and server happen to use the same `libstd`, - // `catch_unwind` asserts that the panic counter was 0, - // even when the closure passed to it didn't panic. - let r = if thread::panicking() { - Ok(call_method()) - } else { - panic::catch_unwind(panic::AssertUnwindSafe(call_method)) - .map_err(PanicMessage::from) - }; - - buf.clear(); - r.encode(&mut buf, handle_store); - })* - }),* - } - buf - } - } - } -} -with_api!(Self, self_, define_dispatcher_impl); - -pub trait ExecutionStrategy { - fn run_bridge_and_client( - &self, - dispatcher: &mut impl DispatcherTrait, - input: Buffer, - run_client: extern "C" fn(BridgeConfig<'_>) -> Buffer, - force_show_panics: bool, - ) -> Buffer; -} - -pub struct SameThread; - -impl ExecutionStrategy for SameThread { - fn run_bridge_and_client( - &self, - dispatcher: &mut impl DispatcherTrait, - input: Buffer, - run_client: extern "C" fn(BridgeConfig<'_>) -> Buffer, - force_show_panics: bool, - ) -> Buffer { - let mut dispatch = |buf| dispatcher.dispatch(buf); - - run_client(BridgeConfig { - input, - dispatch: (&mut dispatch).into(), - force_show_panics, - _marker: marker::PhantomData, - }) - } -} - -// NOTE(eddyb) Two implementations are provided, the second one is a bit -// faster but neither is anywhere near as fast as same-thread execution. - -pub struct CrossThread1; - -impl ExecutionStrategy for CrossThread1 { - fn run_bridge_and_client( - &self, - dispatcher: &mut impl DispatcherTrait, - input: Buffer, - run_client: extern "C" fn(BridgeConfig<'_>) -> Buffer, - force_show_panics: bool, - ) -> Buffer { - use std::sync::mpsc::channel; - - let (req_tx, req_rx) = channel(); - let (res_tx, res_rx) = channel(); - - let join_handle = thread::spawn(move || { - let mut dispatch = |buf| { - req_tx.send(buf).unwrap(); - res_rx.recv().unwrap() - }; - - run_client(BridgeConfig { - input, - dispatch: (&mut dispatch).into(), - force_show_panics, - _marker: marker::PhantomData, - }) - }); - - for b in req_rx { - res_tx.send(dispatcher.dispatch(b)).unwrap(); - } - - join_handle.join().unwrap() - } -} - -pub struct CrossThread2; - -impl ExecutionStrategy for CrossThread2 { - fn run_bridge_and_client( - &self, - dispatcher: &mut impl DispatcherTrait, - input: Buffer, - run_client: extern "C" fn(BridgeConfig<'_>) -> Buffer, - force_show_panics: bool, - ) -> Buffer { - use std::sync::{Arc, Mutex}; - - enum State { - Req(T), - Res(T), - } - - let mut state = Arc::new(Mutex::new(State::Res(Buffer::new()))); - - let server_thread = thread::current(); - let state2 = state.clone(); - let join_handle = thread::spawn(move || { - let mut dispatch = |b| { - *state2.lock().unwrap() = State::Req(b); - server_thread.unpark(); - loop { - thread::park(); - if let State::Res(b) = &mut *state2.lock().unwrap() { - break b.take(); - } - } - }; - - let r = run_client(BridgeConfig { - input, - dispatch: (&mut dispatch).into(), - force_show_panics, - _marker: marker::PhantomData, - }); - - // Wake up the server so it can exit the dispatch loop. - drop(state2); - server_thread.unpark(); - - r - }); - - // Check whether `state2` was dropped, to know when to stop. - while Arc::get_mut(&mut state).is_none() { - thread::park(); - let mut b = match &mut *state.lock().unwrap() { - State::Req(b) => b.take(), - _ => continue, - }; - b = dispatcher.dispatch(b.take()); - *state.lock().unwrap() = State::Res(b); - join_handle.thread().unpark(); - } - - join_handle.join().unwrap() - } -} - -fn run_server< - S: Server, - I: Encode>>, - O: for<'a, 's> DecodeMut<'a, 's, HandleStore>>, ->( - strategy: &impl ExecutionStrategy, - handle_counters: &'static client::HandleCounters, - server: S, - input: I, - run_client: extern "C" fn(BridgeConfig<'_>) -> Buffer, - force_show_panics: bool, -) -> Result { - let mut dispatcher = - Dispatcher { handle_store: HandleStore::new(handle_counters), server: MarkedTypes(server) }; - - let globals = dispatcher.server.globals(); - - let mut buf = Buffer::new(); - (globals, input).encode(&mut buf, &mut dispatcher.handle_store); - - buf = strategy.run_bridge_and_client(&mut dispatcher, buf, run_client, force_show_panics); - - Result::decode(&mut &buf[..], &mut dispatcher.handle_store) -} - -impl client::Client { - pub fn run( - &self, - strategy: &impl ExecutionStrategy, - server: S, - input: S::TokenStream, - force_show_panics: bool, - ) -> Result - where - S: Server, - S::TokenStream: Default, - { - let client::Client { get_handle_counters, run, _marker } = *self; - run_server( - strategy, - get_handle_counters(), - server, - as Types>::TokenStream::mark(input), - run, - force_show_panics, - ) - .map(|s| as Types>::TokenStream>>::unmark(s).unwrap_or_default()) - } -} - -impl - client::Client< - (super::super::TokenStream, super::super::TokenStream), - super::super::TokenStream, - > -{ - pub fn run( - &self, - strategy: &impl ExecutionStrategy, - server: S, - input: S::TokenStream, - input2: S::TokenStream, - force_show_panics: bool, - ) -> Result - where - S: Server, - S::TokenStream: Default, - { - let client::Client { get_handle_counters, run, _marker } = *self; - run_server( - strategy, - get_handle_counters(), - server, - ( - as Types>::TokenStream::mark(input), - as Types>::TokenStream::mark(input2), - ), - run, - force_show_panics, - ) - .map(|s| as Types>::TokenStream>>::unmark(s).unwrap_or_default()) - } -} diff --git a/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/diagnostic.rs b/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/diagnostic.rs deleted file mode 100644 index 3fade2dc4f..0000000000 --- a/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/diagnostic.rs +++ /dev/null @@ -1,166 +0,0 @@ -//! lib-proc-macro diagnostic -//! -//! Copy from -//! augmented with removing unstable features - -use super::Span; - -/// An enum representing a diagnostic level. -#[derive(Copy, Clone, Debug)] -#[non_exhaustive] -pub enum Level { - /// An error. - Error, - /// A warning. - Warning, - /// A note. - Note, - /// A help message. - Help, -} - -/// Trait implemented by types that can be converted into a set of `Span`s. -pub trait MultiSpan { - /// Converts `self` into a `Vec`. - fn into_spans(self) -> Vec; -} - -impl MultiSpan for Span { - fn into_spans(self) -> Vec { - vec![self] - } -} - -impl MultiSpan for Vec { - fn into_spans(self) -> Vec { - self - } -} - -impl<'a> MultiSpan for &'a [Span] { - fn into_spans(self) -> Vec { - self.to_vec() - } -} - -/// A structure representing a diagnostic message and associated children -/// messages. -#[derive(Clone, Debug)] -pub struct Diagnostic { - level: Level, - message: String, - spans: Vec, - children: Vec, -} - -macro_rules! diagnostic_child_methods { - ($spanned:ident, $regular:ident, $level:expr) => { - #[doc = concat!("Adds a new child diagnostics message to `self` with the [`", - stringify!($level), "`] level, and the given `spans` and `message`.")] - pub fn $spanned(mut self, spans: S, message: T) -> Diagnostic - where - S: MultiSpan, - T: Into, - { - self.children.push(Diagnostic::spanned(spans, $level, message)); - self - } - - #[doc = concat!("Adds a new child diagnostic message to `self` with the [`", - stringify!($level), "`] level, and the given `message`.")] - pub fn $regular>(mut self, message: T) -> Diagnostic { - self.children.push(Diagnostic::new($level, message)); - self - } - }; -} - -/// Iterator over the children diagnostics of a `Diagnostic`. -#[derive(Debug, Clone)] -pub struct Children<'a>(std::slice::Iter<'a, Diagnostic>); - -impl<'a> Iterator for Children<'a> { - type Item = &'a Diagnostic; - - fn next(&mut self) -> Option { - self.0.next() - } -} - -impl Diagnostic { - /// Creates a new diagnostic with the given `level` and `message`. - pub fn new>(level: Level, message: T) -> Diagnostic { - Diagnostic { level, message: message.into(), spans: vec![], children: vec![] } - } - - /// Creates a new diagnostic with the given `level` and `message` pointing to - /// the given set of `spans`. - pub fn spanned(spans: S, level: Level, message: T) -> Diagnostic - where - S: MultiSpan, - T: Into, - { - Diagnostic { level, message: message.into(), spans: spans.into_spans(), children: vec![] } - } - - diagnostic_child_methods!(span_error, error, Level::Error); - diagnostic_child_methods!(span_warning, warning, Level::Warning); - diagnostic_child_methods!(span_note, note, Level::Note); - diagnostic_child_methods!(span_help, help, Level::Help); - - /// Returns the diagnostic `level` for `self`. - pub fn level(&self) -> Level { - self.level - } - - /// Sets the level in `self` to `level`. - pub fn set_level(&mut self, level: Level) { - self.level = level; - } - - /// Returns the message in `self`. - pub fn message(&self) -> &str { - &self.message - } - - /// Sets the message in `self` to `message`. - pub fn set_message>(&mut self, message: T) { - self.message = message.into(); - } - - /// Returns the `Span`s in `self`. - pub fn spans(&self) -> &[Span] { - &self.spans - } - - /// Sets the `Span`s in `self` to `spans`. - pub fn set_spans(&mut self, spans: S) { - self.spans = spans.into_spans(); - } - - /// Returns an iterator over the children diagnostics of `self`. - pub fn children(&self) -> Children<'_> { - Children(self.children.iter()) - } - - /// Emit the diagnostic. - pub fn emit(self) { - fn to_internal(spans: Vec) -> super::bridge::client::MultiSpan { - let mut multi_span = super::bridge::client::MultiSpan::new(); - for span in spans { - multi_span.push(span.0); - } - multi_span - } - - let mut diag = super::bridge::client::Diagnostic::new( - self.level, - &self.message[..], - to_internal(self.spans), - ); - for c in self.children { - diag.sub(c.level, &c.message[..], to_internal(c.spans)); - } - diag.emit(); - } -} diff --git a/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/mod.rs b/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/mod.rs deleted file mode 100644 index 86a59b6455..0000000000 --- a/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/mod.rs +++ /dev/null @@ -1,1125 +0,0 @@ -//! A support library for macro authors when defining new macros. -//! -//! This library, provided by the standard distribution, provides the types -//! consumed in the interfaces of procedurally defined macro definitions such as -//! function-like macros `#[proc_macro]`, macro attributes `#[proc_macro_attribute]` and -//! custom derive attributes`#[proc_macro_derive]`. -//! -//! See [the book] for more. -//! -//! [the book]: ../book/ch19-06-macros.html#procedural-macros-for-generating-code-from-attributes - -#[doc(hidden)] -pub mod bridge; - -mod diagnostic; - -pub use diagnostic::{Diagnostic, Level, MultiSpan}; - -use std::cmp::Ordering; -use std::ops::RangeBounds; -use std::path::PathBuf; -use std::str::FromStr; -use std::{error, fmt, iter, mem}; - -/// Determines whether proc_macro has been made accessible to the currently -/// running program. -/// -/// The proc_macro crate is only intended for use inside the implementation of -/// procedural macros. All the functions in this crate panic if invoked from -/// outside of a procedural macro, such as from a build script or unit test or -/// ordinary Rust binary. -/// -/// With consideration for Rust libraries that are designed to support both -/// macro and non-macro use cases, `proc_macro::is_available()` provides a -/// non-panicking way to detect whether the infrastructure required to use the -/// API of proc_macro is presently available. Returns true if invoked from -/// inside of a procedural macro, false if invoked from any other binary. -pub fn is_available() -> bool { - bridge::client::is_available() -} - -/// The main type provided by this crate, representing an abstract stream of -/// tokens, or, more specifically, a sequence of token trees. -/// The type provide interfaces for iterating over those token trees and, conversely, -/// collecting a number of token trees into one stream. -/// -/// This is both the input and output of `#[proc_macro]`, `#[proc_macro_attribute]` -/// and `#[proc_macro_derive]` definitions. -#[derive(Clone)] -pub struct TokenStream(Option); - -/// Error returned from `TokenStream::from_str`. -#[non_exhaustive] -#[derive(Debug)] -pub struct LexError; - -impl fmt::Display for LexError { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str("cannot parse string into token stream") - } -} - -impl error::Error for LexError {} - -/// Error returned from `TokenStream::expand_expr`. -#[non_exhaustive] -#[derive(Debug)] -pub struct ExpandError; - -impl fmt::Display for ExpandError { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str("macro expansion failed") - } -} - -impl error::Error for ExpandError {} - -impl TokenStream { - /// Returns an empty `TokenStream` containing no token trees. - pub fn new() -> TokenStream { - TokenStream(None) - } - - /// Checks if this `TokenStream` is empty. - pub fn is_empty(&self) -> bool { - self.0.as_ref().map(|h| h.is_empty()).unwrap_or(true) - } - - /// Parses this `TokenStream` as an expression and attempts to expand any - /// macros within it. Returns the expanded `TokenStream`. - /// - /// Currently only expressions expanding to literals will succeed, although - /// this may be relaxed in the future. - /// - /// NOTE: In error conditions, `expand_expr` may leave macros unexpanded, - /// report an error, failing compilation, and/or return an `Err(..)`. The - /// specific behavior for any error condition, and what conditions are - /// considered errors, is unspecified and may change in the future. - pub fn expand_expr(&self) -> Result { - let stream = self.0.as_ref().ok_or(ExpandError)?; - match bridge::client::TokenStream::expand_expr(stream) { - Ok(stream) => Ok(TokenStream(Some(stream))), - Err(_) => Err(ExpandError), - } - } -} - -/// Attempts to break the string into tokens and parse those tokens into a token stream. -/// May fail for a number of reasons, for example, if the string contains unbalanced delimiters -/// or characters not existing in the language. -/// All tokens in the parsed stream get `Span::call_site()` spans. -/// -/// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to -/// change these errors into `LexError`s later. -impl FromStr for TokenStream { - type Err = LexError; - - fn from_str(src: &str) -> Result { - Ok(TokenStream(Some(bridge::client::TokenStream::from_str(src)))) - } -} - -/// Prints the token stream as a string that is supposed to be losslessly convertible back -/// into the same token stream (modulo spans), except for possibly `TokenTree::Group`s -/// with `Delimiter::None` delimiters and negative numeric literals. -impl fmt::Display for TokenStream { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str(&self.to_string()) - } -} - -/// Prints token in a form convenient for debugging. -impl fmt::Debug for TokenStream { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str("TokenStream ")?; - f.debug_list().entries(self.clone()).finish() - } -} - -impl Default for TokenStream { - fn default() -> Self { - TokenStream::new() - } -} - -pub use quote::{quote, quote_span}; - -fn tree_to_bridge_tree( - tree: TokenTree, -) -> bridge::TokenTree< - bridge::client::TokenStream, - bridge::client::Span, - bridge::client::Ident, - bridge::client::Literal, -> { - match tree { - TokenTree::Group(tt) => bridge::TokenTree::Group(tt.0), - TokenTree::Punct(tt) => bridge::TokenTree::Punct(tt.0), - TokenTree::Ident(tt) => bridge::TokenTree::Ident(tt.0), - TokenTree::Literal(tt) => bridge::TokenTree::Literal(tt.0), - } -} - -/// Creates a token stream containing a single token tree. -impl From for TokenStream { - fn from(tree: TokenTree) -> TokenStream { - TokenStream(Some(bridge::client::TokenStream::from_token_tree(tree_to_bridge_tree(tree)))) - } -} - -/// Non-generic helper for implementing `FromIterator` and -/// `Extend` with less monomorphization in calling crates. -struct ConcatStreamsHelper { - streams: Vec, -} - -impl ConcatStreamsHelper { - fn new(capacity: usize) -> Self { - ConcatStreamsHelper { streams: Vec::with_capacity(capacity) } - } - - fn push(&mut self, stream: TokenStream) { - if let Some(stream) = stream.0 { - self.streams.push(stream); - } - } - - fn build(mut self) -> TokenStream { - if self.streams.len() <= 1 { - TokenStream(self.streams.pop()) - } else { - TokenStream(Some(bridge::client::TokenStream::concat_streams(None, self.streams))) - } - } - - fn append_to(mut self, stream: &mut TokenStream) { - if self.streams.is_empty() { - return; - } - let base = stream.0.take(); - if base.is_none() && self.streams.len() == 1 { - stream.0 = self.streams.pop(); - } else { - stream.0 = Some(bridge::client::TokenStream::concat_streams(base, self.streams)); - } - } -} - -/// Collects a number of token trees into a single stream. -impl FromIterator for TokenStream { - fn from_iter>(trees: I) -> Self { - trees.into_iter().map(TokenStream::from).collect() - } -} - -/// A "flattening" operation on token streams, collects token trees -/// from multiple token streams into a single stream. -impl FromIterator for TokenStream { - fn from_iter>(streams: I) -> Self { - let iter = streams.into_iter(); - let mut builder = ConcatStreamsHelper::new(iter.size_hint().0); - iter.for_each(|stream| builder.push(stream)); - builder.build() - } -} - -impl Extend for TokenStream { - fn extend>(&mut self, trees: I) { - self.extend(trees.into_iter().map(TokenStream::from)); - } -} - -impl Extend for TokenStream { - fn extend>(&mut self, streams: I) { - // FIXME(eddyb) Use an optimized implementation if/when possible. - *self = iter::once(mem::replace(self, Self::new())).chain(streams).collect(); - } -} - -/// Public implementation details for the `TokenStream` type, such as iterators. -pub mod token_stream { - use super::{bridge, Group, Ident, Literal, Punct, TokenStream, TokenTree}; - - /// An iterator over `TokenStream`'s `TokenTree`s. - /// The iteration is "shallow", e.g., the iterator doesn't recurse into delimited groups, - /// and returns whole groups as token trees. - #[derive(Clone)] - pub struct IntoIter( - std::vec::IntoIter< - bridge::TokenTree< - bridge::client::TokenStream, - bridge::client::Span, - bridge::client::Ident, - bridge::client::Literal, - >, - >, - ); - - impl Iterator for IntoIter { - type Item = TokenTree; - - fn next(&mut self) -> Option { - self.0.next().map(|tree| match tree { - bridge::TokenTree::Group(tt) => TokenTree::Group(Group(tt)), - bridge::TokenTree::Punct(tt) => TokenTree::Punct(Punct(tt)), - bridge::TokenTree::Ident(tt) => TokenTree::Ident(Ident(tt)), - bridge::TokenTree::Literal(tt) => TokenTree::Literal(Literal(tt)), - }) - } - } - - impl IntoIterator for TokenStream { - type Item = TokenTree; - type IntoIter = IntoIter; - - fn into_iter(self) -> IntoIter { - IntoIter(self.0.map(|v| v.into_trees()).unwrap_or_default().into_iter()) - } - } -} - -#[doc(hidden)] -mod quote; - -/// A region of source code, along with macro expansion information. -#[derive(Copy, Clone)] -pub struct Span(bridge::client::Span); - -macro_rules! diagnostic_method { - ($name:ident, $level:expr) => { - /// Creates a new `Diagnostic` with the given `message` at the span - /// `self`. - pub fn $name>(self, message: T) -> Diagnostic { - Diagnostic::spanned(self, $level, message) - } - }; -} - -impl Span { - /// A span that resolves at the macro definition site. - pub fn def_site() -> Span { - Span(bridge::client::Span::def_site()) - } - - /// The span of the invocation of the current procedural macro. - /// Identifiers created with this span will be resolved as if they were written - /// directly at the macro call location (call-site hygiene) and other code - /// at the macro call site will be able to refer to them as well. - pub fn call_site() -> Span { - Span(bridge::client::Span::call_site()) - } - - /// A span that represents `macro_rules` hygiene, and sometimes resolves at the macro - /// definition site (local variables, labels, `$crate`) and sometimes at the macro - /// call site (everything else). - /// The span location is taken from the call-site. - pub fn mixed_site() -> Span { - Span(bridge::client::Span::mixed_site()) - } - - /// The original source file into which this span points. - pub fn source_file(&self) -> SourceFile { - SourceFile(self.0.source_file()) - } - - /// The `Span` for the tokens in the previous macro expansion from which - /// `self` was generated from, if any. - pub fn parent(&self) -> Option { - self.0.parent().map(Span) - } - - /// The span for the origin source code that `self` was generated from. If - /// this `Span` wasn't generated from other macro expansions then the return - /// value is the same as `*self`. - pub fn source(&self) -> Span { - Span(self.0.source()) - } - - /// Gets the starting line/column in the source file for this span. - pub fn start(&self) -> LineColumn { - self.0.start().add_1_to_column() - } - - /// Gets the ending line/column in the source file for this span. - pub fn end(&self) -> LineColumn { - self.0.end().add_1_to_column() - } - - /// Creates an empty span pointing to directly before this span. - pub fn before(&self) -> Span { - Span(self.0.before()) - } - - /// Creates an empty span pointing to directly after this span. - pub fn after(&self) -> Span { - Span(self.0.after()) - } - - /// Creates a new span encompassing `self` and `other`. - /// - /// Returns `None` if `self` and `other` are from different files. - pub fn join(&self, other: Span) -> Option { - self.0.join(other.0).map(Span) - } - - /// Creates a new span with the same line/column information as `self` but - /// that resolves symbols as though it were at `other`. - pub fn resolved_at(&self, other: Span) -> Span { - Span(self.0.resolved_at(other.0)) - } - - /// Creates a new span with the same name resolution behavior as `self` but - /// with the line/column information of `other`. - pub fn located_at(&self, other: Span) -> Span { - other.resolved_at(*self) - } - - /// Compares to spans to see if they're equal. - pub fn eq(&self, other: &Span) -> bool { - self.0 == other.0 - } - - /// Returns the source text behind a span. This preserves the original source - /// code, including spaces and comments. It only returns a result if the span - /// corresponds to real source code. - /// - /// Note: The observable result of a macro should only rely on the tokens and - /// not on this source text. The result of this function is a best effort to - /// be used for diagnostics only. - pub fn source_text(&self) -> Option { - self.0.source_text() - } - - // Used by the implementation of `Span::quote` - #[doc(hidden)] - pub fn save_span(&self) -> usize { - self.0.save_span() - } - - // Used by the implementation of `Span::quote` - #[doc(hidden)] - pub fn recover_proc_macro_span(id: usize) -> Span { - Span(bridge::client::Span::recover_proc_macro_span(id)) - } - - diagnostic_method!(error, Level::Error); - diagnostic_method!(warning, Level::Warning); - diagnostic_method!(note, Level::Note); - diagnostic_method!(help, Level::Help); -} - -/// Prints a span in a form convenient for debugging. -impl fmt::Debug for Span { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.0.fmt(f) - } -} - -/// A line-column pair representing the start or end of a `Span`. -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -pub struct LineColumn { - /// The 1-indexed line in the source file on which the span starts or ends (inclusive). - pub line: usize, - /// The 1-indexed column (number of bytes in UTF-8 encoding) in the source - /// file on which the span starts or ends (inclusive). - pub column: usize, -} - -impl LineColumn { - fn add_1_to_column(self) -> Self { - LineColumn { line: self.line, column: self.column + 1 } - } -} - -impl Ord for LineColumn { - fn cmp(&self, other: &Self) -> Ordering { - self.line.cmp(&other.line).then(self.column.cmp(&other.column)) - } -} - -impl PartialOrd for LineColumn { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} - -/// The source file of a given `Span`. -#[derive(Clone)] -pub struct SourceFile(bridge::client::SourceFile); - -impl SourceFile { - /// Gets the path to this source file. - /// - /// ### Note - /// If the code span associated with this `SourceFile` was generated by an external macro, this - /// macro, this might not be an actual path on the filesystem. Use [`is_real`] to check. - /// - /// Also note that even if `is_real` returns `true`, if `--remap-path-prefix` was passed on - /// the command line, the path as given might not actually be valid. - /// - /// [`is_real`]: Self::is_real - pub fn path(&self) -> PathBuf { - PathBuf::from(self.0.path()) - } - - /// Returns `true` if this source file is a real source file, and not generated by an external - /// macro's expansion. - pub fn is_real(&self) -> bool { - // This is a hack until intercrate spans are implemented and we can have real source files - // for spans generated in external macros. - // https://github.com/rust-lang/rust/pull/43604#issuecomment-333334368 - self.0.is_real() - } -} - -impl fmt::Debug for SourceFile { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("SourceFile") - .field("path", &self.path()) - .field("is_real", &self.is_real()) - .finish() - } -} - -impl PartialEq for SourceFile { - fn eq(&self, other: &Self) -> bool { - self.0.eq(&other.0) - } -} - -impl Eq for SourceFile {} - -/// A single token or a delimited sequence of token trees (e.g., `[1, (), ..]`). -#[derive(Clone)] -pub enum TokenTree { - /// A token stream surrounded by bracket delimiters. - Group(Group), - /// An identifier. - Ident(Ident), - /// A single punctuation character (`+`, `,`, `$`, etc.). - Punct(Punct), - /// A literal character (`'a'`), string (`"hello"`), number (`2.3`), etc. - Literal(Literal), -} - -impl TokenTree { - /// Returns the span of this tree, delegating to the `span` method of - /// the contained token or a delimited stream. - pub fn span(&self) -> Span { - match *self { - TokenTree::Group(ref t) => t.span(), - TokenTree::Ident(ref t) => t.span(), - TokenTree::Punct(ref t) => t.span(), - TokenTree::Literal(ref t) => t.span(), - } - } - - /// Configures the span for *only this token*. - /// - /// Note that if this token is a `Group` then this method will not configure - /// the span of each of the internal tokens, this will simply delegate to - /// the `set_span` method of each variant. - pub fn set_span(&mut self, span: Span) { - match *self { - TokenTree::Group(ref mut t) => t.set_span(span), - TokenTree::Ident(ref mut t) => t.set_span(span), - TokenTree::Punct(ref mut t) => t.set_span(span), - TokenTree::Literal(ref mut t) => t.set_span(span), - } - } -} - -/// Prints token tree in a form convenient for debugging. -impl fmt::Debug for TokenTree { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - // Each of these has the name in the struct type in the derived debug, - // so don't bother with an extra layer of indirection - match *self { - TokenTree::Group(ref tt) => tt.fmt(f), - TokenTree::Ident(ref tt) => tt.fmt(f), - TokenTree::Punct(ref tt) => tt.fmt(f), - TokenTree::Literal(ref tt) => tt.fmt(f), - } - } -} - -impl From for TokenTree { - fn from(g: Group) -> TokenTree { - TokenTree::Group(g) - } -} - -impl From for TokenTree { - fn from(g: Ident) -> TokenTree { - TokenTree::Ident(g) - } -} - -impl From for TokenTree { - fn from(g: Punct) -> TokenTree { - TokenTree::Punct(g) - } -} - -impl From for TokenTree { - fn from(g: Literal) -> TokenTree { - TokenTree::Literal(g) - } -} - -/// Prints the token tree as a string that is supposed to be losslessly convertible back -/// into the same token tree (modulo spans), except for possibly `TokenTree::Group`s -/// with `Delimiter::None` delimiters and negative numeric literals. -impl fmt::Display for TokenTree { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str(&self.to_string()) - } -} - -/// A delimited token stream. -/// -/// A `Group` internally contains a `TokenStream` which is surrounded by `Delimiter`s. -#[derive(Clone)] -pub struct Group(bridge::Group); - -/// Describes how a sequence of token trees is delimited. -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -pub enum Delimiter { - /// `( ... )` - Parenthesis, - /// `{ ... }` - Brace, - /// `[ ... ]` - Bracket, - /// `Ø ... Ø` - /// An invisible delimiter, that may, for example, appear around tokens coming from a - /// "macro variable" `$var`. It is important to preserve operator priorities in cases like - /// `$var * 3` where `$var` is `1 + 2`. - /// Invisible delimiters might not survive roundtrip of a token stream through a string. - None, -} - -impl Group { - /// Creates a new `Group` with the given delimiter and token stream. - /// - /// This constructor will set the span for this group to - /// `Span::call_site()`. To change the span you can use the `set_span` - /// method below. - pub fn new(delimiter: Delimiter, stream: TokenStream) -> Group { - Group(bridge::Group { - delimiter, - stream: stream.0, - span: bridge::DelimSpan::from_single(Span::call_site().0), - }) - } - - /// Returns the delimiter of this `Group` - pub fn delimiter(&self) -> Delimiter { - self.0.delimiter - } - - /// Returns the `TokenStream` of tokens that are delimited in this `Group`. - /// - /// Note that the returned token stream does not include the delimiter - /// returned above. - pub fn stream(&self) -> TokenStream { - TokenStream(self.0.stream.clone()) - } - - /// Returns the span for the delimiters of this token stream, spanning the - /// entire `Group`. - /// - /// ```text - /// pub fn span(&self) -> Span { - /// ^^^^^^^ - /// ``` - pub fn span(&self) -> Span { - Span(self.0.span.entire) - } - - /// Returns the span pointing to the opening delimiter of this group. - /// - /// ```text - /// pub fn span_open(&self) -> Span { - /// ^ - /// ``` - pub fn span_open(&self) -> Span { - Span(self.0.span.open) - } - - /// Returns the span pointing to the closing delimiter of this group. - /// - /// ```text - /// pub fn span_close(&self) -> Span { - /// ^ - /// ``` - pub fn span_close(&self) -> Span { - Span(self.0.span.close) - } - - /// Configures the span for this `Group`'s delimiters, but not its internal - /// tokens. - /// - /// This method will **not** set the span of all the internal tokens spanned - /// by this group, but rather it will only set the span of the delimiter - /// tokens at the level of the `Group`. - pub fn set_span(&mut self, span: Span) { - self.0.span = bridge::DelimSpan::from_single(span.0); - } -} - -/// Prints the group as a string that should be losslessly convertible back -/// into the same group (modulo spans), except for possibly `TokenTree::Group`s -/// with `Delimiter::None` delimiters. -impl fmt::Display for Group { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str(&self.to_string()) - } -} - -impl fmt::Debug for Group { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("Group") - .field("delimiter", &self.delimiter()) - .field("stream", &self.stream()) - .field("span", &self.span()) - .finish() - } -} - -/// A `Punct` is a single punctuation character such as `+`, `-` or `#`. -/// -/// Multi-character operators like `+=` are represented as two instances of `Punct` with different -/// forms of `Spacing` returned. -#[derive(Clone)] -pub struct Punct(bridge::Punct); - -/// Describes whether a `Punct` is followed immediately by another `Punct` ([`Spacing::Joint`]) or -/// by a different token or whitespace ([`Spacing::Alone`]). -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -pub enum Spacing { - /// A `Punct` is not immediately followed by another `Punct`. - /// E.g. `+` is `Alone` in `+ =`, `+ident` and `+()`. - Alone, - /// A `Punct` is immediately followed by another `Punct`. - /// E.g. `+` is `Joint` in `+=` and `++`. - /// - /// Additionally, single quote `'` can join with identifiers to form lifetimes: `'ident`. - Joint, -} - -impl Punct { - /// Creates a new `Punct` from the given character and spacing. - /// The `ch` argument must be a valid punctuation character permitted by the language, - /// otherwise the function will panic. - /// - /// The returned `Punct` will have the default span of `Span::call_site()` - /// which can be further configured with the `set_span` method below. - pub fn new(ch: char, spacing: Spacing) -> Punct { - const LEGAL_CHARS: &[char] = &[ - '=', '<', '>', '!', '~', '+', '-', '*', '/', '%', '^', '&', '|', '@', '.', ',', ';', - ':', '#', '$', '?', '\'', - ]; - if !LEGAL_CHARS.contains(&ch) { - panic!("unsupported character `{:?}`", ch); - } - Punct(bridge::Punct { - ch: ch as u8, - joint: spacing == Spacing::Joint, - span: Span::call_site().0, - }) - } - - /// Returns the value of this punctuation character as `char`. - pub fn as_char(&self) -> char { - self.0.ch as char - } - - /// Returns the spacing of this punctuation character, indicating whether it's immediately - /// followed by another `Punct` in the token stream, so they can potentially be combined into - /// a multi-character operator (`Joint`), or it's followed by some other token or whitespace - /// (`Alone`) so the operator has certainly ended. - pub fn spacing(&self) -> Spacing { - if self.0.joint { - Spacing::Joint - } else { - Spacing::Alone - } - } - - /// Returns the span for this punctuation character. - pub fn span(&self) -> Span { - Span(self.0.span) - } - - /// Configure the span for this punctuation character. - pub fn set_span(&mut self, span: Span) { - self.0.span = span.0; - } -} - -/// Prints the punctuation character as a string that should be losslessly convertible -/// back into the same character. -impl fmt::Display for Punct { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str(&self.to_string()) - } -} - -impl fmt::Debug for Punct { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("Punct") - .field("ch", &self.as_char()) - .field("spacing", &self.spacing()) - .field("span", &self.span()) - .finish() - } -} - -impl PartialEq for Punct { - fn eq(&self, rhs: &char) -> bool { - self.as_char() == *rhs - } -} - -impl PartialEq for char { - fn eq(&self, rhs: &Punct) -> bool { - *self == rhs.as_char() - } -} - -/// An identifier (`ident`). -#[derive(Clone)] -pub struct Ident(bridge::client::Ident); - -impl Ident { - /// Creates a new `Ident` with the given `string` as well as the specified - /// `span`. - /// The `string` argument must be a valid identifier permitted by the - /// language (including keywords, e.g. `self` or `fn`). Otherwise, the function will panic. - /// - /// Note that `span`, currently in rustc, configures the hygiene information - /// for this identifier. - /// - /// As of this time `Span::call_site()` explicitly opts-in to "call-site" hygiene - /// meaning that identifiers created with this span will be resolved as if they were written - /// directly at the location of the macro call, and other code at the macro call site will be - /// able to refer to them as well. - /// - /// Later spans like `Span::def_site()` will allow to opt-in to "definition-site" hygiene - /// meaning that identifiers created with this span will be resolved at the location of the - /// macro definition and other code at the macro call site will not be able to refer to them. - /// - /// Due to the current importance of hygiene this constructor, unlike other - /// tokens, requires a `Span` to be specified at construction. - pub fn new(string: &str, span: Span) -> Ident { - Ident(bridge::client::Ident::new(string, span.0, false)) - } - - /// Same as `Ident::new`, but creates a raw identifier (`r#ident`). - /// The `string` argument be a valid identifier permitted by the language - /// (including keywords, e.g. `fn`). Keywords which are usable in path segments - /// (e.g. `self`, `super`) are not supported, and will cause a panic. - pub fn new_raw(string: &str, span: Span) -> Ident { - Ident(bridge::client::Ident::new(string, span.0, true)) - } - - /// Returns the span of this `Ident`, encompassing the entire string returned - /// by [`to_string`](Self::to_string). - pub fn span(&self) -> Span { - Span(self.0.span()) - } - - /// Configures the span of this `Ident`, possibly changing its hygiene context. - pub fn set_span(&mut self, span: Span) { - self.0 = self.0.with_span(span.0); - } -} - -/// Prints the identifier as a string that should be losslessly convertible -/// back into the same identifier. -impl fmt::Display for Ident { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str(&self.to_string()) - } -} - -impl fmt::Debug for Ident { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("Ident") - .field("ident", &self.to_string()) - .field("span", &self.span()) - .finish() - } -} - -/// A literal string (`"hello"`), byte string (`b"hello"`), -/// character (`'a'`), byte character (`b'a'`), an integer or floating point number -/// with or without a suffix (`1`, `1u8`, `2.3`, `2.3f32`). -/// Boolean literals like `true` and `false` do not belong here, they are `Ident`s. -#[derive(Clone)] -pub struct Literal(bridge::client::Literal); - -macro_rules! suffixed_int_literals { - ($($name:ident => $kind:ident,)*) => ($( - /// Creates a new suffixed integer literal with the specified value. - /// - /// This function will create an integer like `1u32` where the integer - /// value specified is the first part of the token and the integral is - /// also suffixed at the end. - /// Literals created from negative numbers might not survive round-trips through - /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal). - /// - /// Literals created through this method have the `Span::call_site()` - /// span by default, which can be configured with the `set_span` method - /// below. - pub fn $name(n: $kind) -> Literal { - Literal(bridge::client::Literal::typed_integer(&n.to_string(), stringify!($kind))) - } - )*) -} - -macro_rules! unsuffixed_int_literals { - ($($name:ident => $kind:ident,)*) => ($( - /// Creates a new unsuffixed integer literal with the specified value. - /// - /// This function will create an integer like `1` where the integer - /// value specified is the first part of the token. No suffix is - /// specified on this token, meaning that invocations like - /// `Literal::i8_unsuffixed(1)` are equivalent to - /// `Literal::u32_unsuffixed(1)`. - /// Literals created from negative numbers might not survive rountrips through - /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal). - /// - /// Literals created through this method have the `Span::call_site()` - /// span by default, which can be configured with the `set_span` method - /// below. - pub fn $name(n: $kind) -> Literal { - Literal(bridge::client::Literal::integer(&n.to_string())) - } - )*) -} - -impl Literal { - suffixed_int_literals! { - u8_suffixed => u8, - u16_suffixed => u16, - u32_suffixed => u32, - u64_suffixed => u64, - u128_suffixed => u128, - usize_suffixed => usize, - i8_suffixed => i8, - i16_suffixed => i16, - i32_suffixed => i32, - i64_suffixed => i64, - i128_suffixed => i128, - isize_suffixed => isize, - } - - unsuffixed_int_literals! { - u8_unsuffixed => u8, - u16_unsuffixed => u16, - u32_unsuffixed => u32, - u64_unsuffixed => u64, - u128_unsuffixed => u128, - usize_unsuffixed => usize, - i8_unsuffixed => i8, - i16_unsuffixed => i16, - i32_unsuffixed => i32, - i64_unsuffixed => i64, - i128_unsuffixed => i128, - isize_unsuffixed => isize, - } - - /// Creates a new unsuffixed floating-point literal. - /// - /// This constructor is similar to those like `Literal::i8_unsuffixed` where - /// the float's value is emitted directly into the token but no suffix is - /// used, so it may be inferred to be a `f64` later in the compiler. - /// Literals created from negative numbers might not survive rountrips through - /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal). - /// - /// # Panics - /// - /// This function requires that the specified float is finite, for - /// example if it is infinity or NaN this function will panic. - pub fn f32_unsuffixed(n: f32) -> Literal { - if !n.is_finite() { - panic!("Invalid float literal {n}"); - } - let mut repr = n.to_string(); - if !repr.contains('.') { - repr.push_str(".0"); - } - Literal(bridge::client::Literal::float(&repr)) - } - - /// Creates a new suffixed floating-point literal. - /// - /// This constructor will create a literal like `1.0f32` where the value - /// specified is the preceding part of the token and `f32` is the suffix of - /// the token. This token will always be inferred to be an `f32` in the - /// compiler. - /// Literals created from negative numbers might not survive rountrips through - /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal). - /// - /// # Panics - /// - /// This function requires that the specified float is finite, for - /// example if it is infinity or NaN this function will panic. - pub fn f32_suffixed(n: f32) -> Literal { - if !n.is_finite() { - panic!("Invalid float literal {n}"); - } - Literal(bridge::client::Literal::f32(&n.to_string())) - } - - /// Creates a new unsuffixed floating-point literal. - /// - /// This constructor is similar to those like `Literal::i8_unsuffixed` where - /// the float's value is emitted directly into the token but no suffix is - /// used, so it may be inferred to be a `f64` later in the compiler. - /// Literals created from negative numbers might not survive rountrips through - /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal). - /// - /// # Panics - /// - /// This function requires that the specified float is finite, for - /// example if it is infinity or NaN this function will panic. - pub fn f64_unsuffixed(n: f64) -> Literal { - if !n.is_finite() { - panic!("Invalid float literal {n}"); - } - let mut repr = n.to_string(); - if !repr.contains('.') { - repr.push_str(".0"); - } - Literal(bridge::client::Literal::float(&repr)) - } - - /// Creates a new suffixed floating-point literal. - /// - /// This constructor will create a literal like `1.0f64` where the value - /// specified is the preceding part of the token and `f64` is the suffix of - /// the token. This token will always be inferred to be an `f64` in the - /// compiler. - /// Literals created from negative numbers might not survive rountrips through - /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal). - /// - /// # Panics - /// - /// This function requires that the specified float is finite, for - /// example if it is infinity or NaN this function will panic. - pub fn f64_suffixed(n: f64) -> Literal { - if !n.is_finite() { - panic!("Invalid float literal {n}"); - } - Literal(bridge::client::Literal::f64(&n.to_string())) - } - - /// String literal. - pub fn string(string: &str) -> Literal { - Literal(bridge::client::Literal::string(string)) - } - - /// Character literal. - pub fn character(ch: char) -> Literal { - Literal(bridge::client::Literal::character(ch)) - } - - /// Byte string literal. - pub fn byte_string(bytes: &[u8]) -> Literal { - Literal(bridge::client::Literal::byte_string(bytes)) - } - - /// Returns the span encompassing this literal. - pub fn span(&self) -> Span { - Span(self.0.span()) - } - - /// Configures the span associated for this literal. - pub fn set_span(&mut self, span: Span) { - self.0.set_span(span.0); - } - - /// Returns a `Span` that is a subset of `self.span()` containing only the - /// source bytes in range `range`. Returns `None` if the would-be trimmed - /// span is outside the bounds of `self`. - // FIXME(SergioBenitez): check that the byte range starts and ends at a - // UTF-8 boundary of the source. otherwise, it's likely that a panic will - // occur elsewhere when the source text is printed. - // FIXME(SergioBenitez): there is no way for the user to know what - // `self.span()` actually maps to, so this method can currently only be - // called blindly. For example, `to_string()` for the character 'c' returns - // "'\u{63}'"; there is no way for the user to know whether the source text - // was 'c' or whether it was '\u{63}'. - pub fn subspan>(&self, range: R) -> Option { - self.0.subspan(range.start_bound().cloned(), range.end_bound().cloned()).map(Span) - } -} - -/// Parse a single literal from its stringified representation. -/// -/// In order to parse successfully, the input string must not contain anything -/// but the literal token. Specifically, it must not contain whitespace or -/// comments in addition to the literal. -/// -/// The resulting literal token will have a `Span::call_site()` span. -/// -/// NOTE: some errors may cause panics instead of returning `LexError`. We -/// reserve the right to change these errors into `LexError`s later. -impl FromStr for Literal { - type Err = LexError; - - fn from_str(src: &str) -> Result { - match bridge::client::Literal::from_str(src) { - Ok(literal) => Ok(Literal(literal)), - Err(()) => Err(LexError), - } - } -} - -/// Prints the literal as a string that should be losslessly convertible -/// back into the same literal (except for possible rounding for floating point literals). -impl fmt::Display for Literal { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str(&self.to_string()) - } -} - -impl fmt::Debug for Literal { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.0.fmt(f) - } -} - -/// Tracked access to environment variables. -pub mod tracked_env { - use std::env::{self, VarError}; - use std::ffi::OsStr; - - /// Retrieve an environment variable and add it to build dependency info. - /// Build system executing the compiler will know that the variable was accessed during - /// compilation, and will be able to rerun the build when the value of that variable changes. - /// Besides the dependency tracking this function should be equivalent to `env::var` from the - /// standard library, except that the argument must be UTF-8. - pub fn var + AsRef>(key: K) -> Result { - let key: &str = key.as_ref(); - let value = env::var(key); - super::bridge::client::FreeFunctions::track_env_var(key, value.as_deref().ok()); - value - } -} - -/// Tracked access to additional files. -pub mod tracked_path { - - /// Track a file explicitly. - /// - /// Commonly used for tracking asset preprocessing. - pub fn path>(path: P) { - let path: &str = path.as_ref(); - super::bridge::client::FreeFunctions::track_path(path); - } -} diff --git a/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/quote.rs b/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/quote.rs deleted file mode 100644 index 39309faa41..0000000000 --- a/crates/proc-macro-srv/src/abis/abi_1_64/proc_macro/quote.rs +++ /dev/null @@ -1,139 +0,0 @@ -//! # Quasiquoter -//! This file contains the implementation internals of the quasiquoter provided by `quote!`. - -//! This quasiquoter uses macros 2.0 hygiene to reliably access -//! items from `proc_macro`, to build a `proc_macro::TokenStream`. - -use super::{Delimiter, Group, Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree}; - -macro_rules! quote_tt { - (($($t:tt)*)) => { Group::new(Delimiter::Parenthesis, quote!($($t)*)) }; - ([$($t:tt)*]) => { Group::new(Delimiter::Bracket, quote!($($t)*)) }; - ({$($t:tt)*}) => { Group::new(Delimiter::Brace, quote!($($t)*)) }; - (,) => { Punct::new(',', Spacing::Alone) }; - (.) => { Punct::new('.', Spacing::Alone) }; - (;) => { Punct::new(';', Spacing::Alone) }; - (!) => { Punct::new('!', Spacing::Alone) }; - (<) => { Punct::new('<', Spacing::Alone) }; - (>) => { Punct::new('>', Spacing::Alone) }; - (&) => { Punct::new('&', Spacing::Alone) }; - (=) => { Punct::new('=', Spacing::Alone) }; - ($i:ident) => { Ident::new(stringify!($i), Span::def_site()) }; -} - -macro_rules! quote_ts { - ((@ $($t:tt)*)) => { $($t)* }; - (::) => { - [ - TokenTree::from(Punct::new(':', Spacing::Joint)), - TokenTree::from(Punct::new(':', Spacing::Alone)), - ].iter() - .cloned() - .map(|mut x| { - x.set_span(Span::def_site()); - x - }) - .collect::() - }; - ($t:tt) => { TokenTree::from(quote_tt!($t)) }; -} - -/// Simpler version of the real `quote!` macro, implemented solely -/// through `macro_rules`, for bootstrapping the real implementation -/// (see the `quote` function), which does not have access to the -/// real `quote!` macro due to the `proc_macro` crate not being -/// able to depend on itself. -/// -/// Note: supported tokens are a subset of the real `quote!`, but -/// unquoting is different: instead of `$x`, this uses `(@ expr)`. -macro_rules! quote { - () => { TokenStream::new() }; - ($($t:tt)*) => { - [ - $(TokenStream::from(quote_ts!($t)),)* - ].iter().cloned().collect::() - }; -} - -/// Quote a `TokenStream` into a `TokenStream`. -/// This is the actual implementation of the `quote!()` proc macro. -/// -/// It is loaded by the compiler in `register_builtin_macros`. -pub fn quote(stream: TokenStream) -> TokenStream { - if stream.is_empty() { - return quote!(super::TokenStream::new()); - } - let proc_macro_crate = quote!(crate); - let mut after_dollar = false; - let tokens = stream - .into_iter() - .filter_map(|tree| { - if after_dollar { - after_dollar = false; - match tree { - TokenTree::Ident(_) => { - return Some(quote!(Into::::into( - Clone::clone(&(@ tree))),)); - } - TokenTree::Punct(ref tt) if tt.as_char() == '$' => {} - _ => panic!("`$` must be followed by an ident or `$` in `quote!`"), - } - } else if let TokenTree::Punct(ref tt) = tree { - if tt.as_char() == '$' { - after_dollar = true; - return None; - } - } - - Some(quote!(super::TokenStream::from((@ match tree { - TokenTree::Punct(tt) => quote!(super::TokenTree::Punct(super::Punct::new( - (@ TokenTree::from(Literal::character(tt.as_char()))), - (@ match tt.spacing() { - Spacing::Alone => quote!(super::Spacing::Alone), - Spacing::Joint => quote!(super::Spacing::Joint), - }), - ))), - TokenTree::Group(tt) => quote!(super::TokenTree::Group(super::Group::new( - (@ match tt.delimiter() { - Delimiter::Parenthesis => quote!(super::Delimiter::Parenthesis), - Delimiter::Brace => quote!(super::Delimiter::Brace), - Delimiter::Bracket => quote!(super::Delimiter::Bracket), - Delimiter::None => quote!(super::Delimiter::None), - }), - (@ quote(tt.stream())), - ))), - TokenTree::Ident(tt) => quote!(super::TokenTree::Ident(super::Ident::new( - (@ TokenTree::from(Literal::string(&tt.to_string()))), - (@ quote_span(proc_macro_crate.clone(), tt.span())), - ))), - TokenTree::Literal(tt) => quote!(super::TokenTree::Literal({ - let mut iter = (@ TokenTree::from(Literal::string(&tt.to_string()))) - .parse::() - .unwrap() - .into_iter(); - if let (Some(super::TokenTree::Literal(mut lit)), None) = - (iter.next(), iter.next()) - { - lit.set_span((@ quote_span(proc_macro_crate.clone(), tt.span()))); - lit - } else { - unreachable!() - } - })) - })),)) - }) - .collect::(); - - if after_dollar { - panic!("unexpected trailing `$` in `quote!`"); - } - - quote!([(@ tokens)].iter().cloned().collect::()) -} - -/// Quote a `Span` into a `TokenStream`. -/// This is needed to implement a custom quoter. -pub fn quote_span(proc_macro_crate: TokenStream, span: Span) -> TokenStream { - let id = span.save_span(); - quote!((@ proc_macro_crate ) ::Span::recover_proc_macro_span((@ TokenTree::from(Literal::usize_unsuffixed(id))))) -} diff --git a/crates/proc-macro-srv/src/abis/abi_1_64/ra_server.rs b/crates/proc-macro-srv/src/abis/abi_1_64/ra_server.rs deleted file mode 100644 index d8aa1ec429..0000000000 --- a/crates/proc-macro-srv/src/abis/abi_1_64/ra_server.rs +++ /dev/null @@ -1,791 +0,0 @@ -//! Rustc proc-macro server implementation with tt -//! -//! Based on idea from -//! The lib-proc-macro server backend is `TokenStream`-agnostic, such that -//! we could provide any TokenStream implementation. -//! The original idea from fedochet is using proc-macro2 as backend, -//! we use tt instead for better integration with RA. -//! -//! FIXME: No span and source file information is implemented yet - -use super::proc_macro::bridge::{self, server}; - -use std::collections::HashMap; -use std::hash::Hash; -use std::ops::Bound; -use std::{ascii, vec::IntoIter}; - -type Group = tt::Subtree; -type TokenTree = tt::TokenTree; -type Punct = tt::Punct; -type Spacing = tt::Spacing; -type Literal = tt::Literal; -type Span = tt::TokenId; - -#[derive(Debug, Default, Clone)] -pub struct TokenStream { - pub token_trees: Vec, -} - -impl TokenStream { - pub fn new() -> Self { - TokenStream::default() - } - - pub fn with_subtree(subtree: tt::Subtree) -> Self { - if subtree.delimiter.is_some() { - TokenStream { token_trees: vec![TokenTree::Subtree(subtree)] } - } else { - TokenStream { token_trees: subtree.token_trees } - } - } - - pub fn into_subtree(self) -> tt::Subtree { - tt::Subtree { delimiter: None, token_trees: self.token_trees } - } - - pub fn is_empty(&self) -> bool { - self.token_trees.is_empty() - } -} - -/// Creates a token stream containing a single token tree. -impl From for TokenStream { - fn from(tree: TokenTree) -> TokenStream { - TokenStream { token_trees: vec![tree] } - } -} - -/// Collects a number of token trees into a single stream. -impl FromIterator for TokenStream { - fn from_iter>(trees: I) -> Self { - trees.into_iter().map(TokenStream::from).collect() - } -} - -/// A "flattening" operation on token streams, collects token trees -/// from multiple token streams into a single stream. -impl FromIterator for TokenStream { - fn from_iter>(streams: I) -> Self { - let mut builder = TokenStreamBuilder::new(); - streams.into_iter().for_each(|stream| builder.push(stream)); - builder.build() - } -} - -impl Extend for TokenStream { - fn extend>(&mut self, trees: I) { - self.extend(trees.into_iter().map(TokenStream::from)); - } -} - -impl Extend for TokenStream { - fn extend>(&mut self, streams: I) { - for item in streams { - for tkn in item { - match tkn { - tt::TokenTree::Subtree(subtree) if subtree.delimiter.is_none() => { - self.token_trees.extend(subtree.token_trees); - } - _ => { - self.token_trees.push(tkn); - } - } - } - } - } -} - -#[derive(Clone)] -pub struct SourceFile { - // FIXME stub -} - -type Level = super::proc_macro::Level; -type LineColumn = super::proc_macro::LineColumn; - -/// A structure representing a diagnostic message and associated children -/// messages. -#[derive(Clone, Debug)] -pub struct Diagnostic { - level: Level, - message: String, - spans: Vec, - children: Vec, -} - -impl Diagnostic { - /// Creates a new diagnostic with the given `level` and `message`. - pub fn new>(level: Level, message: T) -> Diagnostic { - Diagnostic { level, message: message.into(), spans: vec![], children: vec![] } - } -} - -// Rustc Server Ident has to be `Copyable` -// We use a stub here for bypassing -#[derive(Hash, Eq, PartialEq, Copy, Clone)] -pub struct IdentId(u32); - -#[derive(Clone, Hash, Eq, PartialEq)] -struct IdentData(tt::Ident); - -#[derive(Default)] -struct IdentInterner { - idents: HashMap, - ident_data: Vec, -} - -impl IdentInterner { - fn intern(&mut self, data: &IdentData) -> u32 { - if let Some(index) = self.idents.get(data) { - return *index; - } - - let index = self.idents.len() as u32; - self.ident_data.push(data.clone()); - self.idents.insert(data.clone(), index); - index - } - - fn get(&self, index: u32) -> &IdentData { - &self.ident_data[index as usize] - } - - #[allow(unused)] - fn get_mut(&mut self, index: u32) -> &mut IdentData { - self.ident_data.get_mut(index as usize).expect("Should be consistent") - } -} - -pub struct TokenStreamBuilder { - acc: TokenStream, -} - -/// Public implementation details for the `TokenStream` type, such as iterators. -pub mod token_stream { - use std::str::FromStr; - - use super::{TokenStream, TokenTree}; - - /// An iterator over `TokenStream`'s `TokenTree`s. - /// The iteration is "shallow", e.g., the iterator doesn't recurse into delimited groups, - /// and returns whole groups as token trees. - impl IntoIterator for TokenStream { - type Item = TokenTree; - type IntoIter = super::IntoIter; - - fn into_iter(self) -> Self::IntoIter { - self.token_trees.into_iter() - } - } - - type LexError = String; - - /// Attempts to break the string into tokens and parse those tokens into a token stream. - /// May fail for a number of reasons, for example, if the string contains unbalanced delimiters - /// or characters not existing in the language. - /// All tokens in the parsed stream get `Span::call_site()` spans. - /// - /// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to - /// change these errors into `LexError`s later. - impl FromStr for TokenStream { - type Err = LexError; - - fn from_str(src: &str) -> Result { - let (subtree, _token_map) = - mbe::parse_to_token_tree(src).ok_or("Failed to parse from mbe")?; - - let subtree = subtree_replace_token_ids_with_unspecified(subtree); - Ok(TokenStream::with_subtree(subtree)) - } - } - - impl ToString for TokenStream { - fn to_string(&self) -> String { - tt::pretty(&self.token_trees) - } - } - - fn subtree_replace_token_ids_with_unspecified(subtree: tt::Subtree) -> tt::Subtree { - tt::Subtree { - delimiter: subtree - .delimiter - .map(|d| tt::Delimiter { id: tt::TokenId::unspecified(), ..d }), - token_trees: subtree - .token_trees - .into_iter() - .map(token_tree_replace_token_ids_with_unspecified) - .collect(), - } - } - - fn token_tree_replace_token_ids_with_unspecified(tt: tt::TokenTree) -> tt::TokenTree { - match tt { - tt::TokenTree::Leaf(leaf) => { - tt::TokenTree::Leaf(leaf_replace_token_ids_with_unspecified(leaf)) - } - tt::TokenTree::Subtree(subtree) => { - tt::TokenTree::Subtree(subtree_replace_token_ids_with_unspecified(subtree)) - } - } - } - - fn leaf_replace_token_ids_with_unspecified(leaf: tt::Leaf) -> tt::Leaf { - match leaf { - tt::Leaf::Literal(lit) => { - tt::Leaf::Literal(tt::Literal { id: tt::TokenId::unspecified(), ..lit }) - } - tt::Leaf::Punct(punct) => { - tt::Leaf::Punct(tt::Punct { id: tt::TokenId::unspecified(), ..punct }) - } - tt::Leaf::Ident(ident) => { - tt::Leaf::Ident(tt::Ident { id: tt::TokenId::unspecified(), ..ident }) - } - } - } -} - -impl TokenStreamBuilder { - fn new() -> TokenStreamBuilder { - TokenStreamBuilder { acc: TokenStream::new() } - } - - fn push(&mut self, stream: TokenStream) { - self.acc.extend(stream.into_iter()) - } - - fn build(self) -> TokenStream { - self.acc - } -} - -pub struct FreeFunctions; - -#[derive(Clone)] -pub struct TokenStreamIter { - trees: IntoIter, -} - -#[derive(Default)] -pub struct RustAnalyzer { - ident_interner: IdentInterner, - // FIXME: store span information here. -} - -impl server::Types for RustAnalyzer { - type FreeFunctions = FreeFunctions; - type TokenStream = TokenStream; - type Ident = IdentId; - type Literal = Literal; - type SourceFile = SourceFile; - type Diagnostic = Diagnostic; - type Span = Span; - type MultiSpan = Vec; -} - -impl server::FreeFunctions for RustAnalyzer { - fn track_env_var(&mut self, _var: &str, _value: Option<&str>) { - // FIXME: track env var accesses - // https://github.com/rust-lang/rust/pull/71858 - } - fn track_path(&mut self, _path: &str) {} -} - -impl server::TokenStream for RustAnalyzer { - fn is_empty(&mut self, stream: &Self::TokenStream) -> bool { - stream.is_empty() - } - fn from_str(&mut self, src: &str) -> Self::TokenStream { - use std::str::FromStr; - - Self::TokenStream::from_str(src).expect("cannot parse string") - } - fn to_string(&mut self, stream: &Self::TokenStream) -> String { - stream.to_string() - } - fn from_token_tree( - &mut self, - tree: bridge::TokenTree, - ) -> Self::TokenStream { - match tree { - bridge::TokenTree::Group(group) => { - let group = Group { - delimiter: delim_to_internal(group.delimiter), - token_trees: match group.stream { - Some(stream) => stream.into_iter().collect(), - None => Vec::new(), - }, - }; - let tree = TokenTree::from(group); - Self::TokenStream::from_iter(vec![tree]) - } - - bridge::TokenTree::Ident(IdentId(index)) => { - let IdentData(ident) = self.ident_interner.get(index).clone(); - let ident: tt::Ident = ident; - let leaf = tt::Leaf::from(ident); - let tree = TokenTree::from(leaf); - Self::TokenStream::from_iter(vec![tree]) - } - - bridge::TokenTree::Literal(literal) => { - let leaf = tt::Leaf::from(literal); - let tree = TokenTree::from(leaf); - Self::TokenStream::from_iter(vec![tree]) - } - - bridge::TokenTree::Punct(p) => { - let punct = tt::Punct { - char: p.ch as char, - spacing: if p.joint { Spacing::Joint } else { Spacing::Alone }, - id: p.span, - }; - let leaf = tt::Leaf::from(punct); - let tree = TokenTree::from(leaf); - Self::TokenStream::from_iter(vec![tree]) - } - } - } - - fn expand_expr(&mut self, self_: &Self::TokenStream) -> Result { - Ok(self_.clone()) - } - - fn concat_trees( - &mut self, - base: Option, - trees: Vec>, - ) -> Self::TokenStream { - let mut builder = TokenStreamBuilder::new(); - if let Some(base) = base { - builder.push(base); - } - for tree in trees { - builder.push(self.from_token_tree(tree)); - } - builder.build() - } - - fn concat_streams( - &mut self, - base: Option, - streams: Vec, - ) -> Self::TokenStream { - let mut builder = TokenStreamBuilder::new(); - if let Some(base) = base { - builder.push(base); - } - for stream in streams { - builder.push(stream); - } - builder.build() - } - - fn into_trees( - &mut self, - stream: Self::TokenStream, - ) -> Vec> { - stream - .into_iter() - .map(|tree| match tree { - tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => { - bridge::TokenTree::Ident(IdentId(self.ident_interner.intern(&IdentData(ident)))) - } - tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => bridge::TokenTree::Literal(lit), - tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => { - bridge::TokenTree::Punct(bridge::Punct { - ch: punct.char as u8, - joint: punct.spacing == Spacing::Joint, - span: punct.id, - }) - } - tt::TokenTree::Subtree(subtree) => bridge::TokenTree::Group(bridge::Group { - delimiter: delim_to_external(subtree.delimiter), - stream: if subtree.token_trees.is_empty() { - None - } else { - Some(subtree.token_trees.into_iter().collect()) - }, - span: bridge::DelimSpan::from_single( - subtree.delimiter.map_or(Span::unspecified(), |del| del.id), - ), - }), - }) - .collect() - } -} - -fn delim_to_internal(d: bridge::Delimiter) -> Option { - let kind = match d { - bridge::Delimiter::Parenthesis => tt::DelimiterKind::Parenthesis, - bridge::Delimiter::Brace => tt::DelimiterKind::Brace, - bridge::Delimiter::Bracket => tt::DelimiterKind::Bracket, - bridge::Delimiter::None => return None, - }; - Some(tt::Delimiter { id: tt::TokenId::unspecified(), kind }) -} - -fn delim_to_external(d: Option) -> bridge::Delimiter { - match d.map(|it| it.kind) { - Some(tt::DelimiterKind::Parenthesis) => bridge::Delimiter::Parenthesis, - Some(tt::DelimiterKind::Brace) => bridge::Delimiter::Brace, - Some(tt::DelimiterKind::Bracket) => bridge::Delimiter::Bracket, - None => bridge::Delimiter::None, - } -} - -fn spacing_to_internal(spacing: bridge::Spacing) -> Spacing { - match spacing { - bridge::Spacing::Alone => Spacing::Alone, - bridge::Spacing::Joint => Spacing::Joint, - } -} - -fn spacing_to_external(spacing: Spacing) -> bridge::Spacing { - match spacing { - Spacing::Alone => bridge::Spacing::Alone, - Spacing::Joint => bridge::Spacing::Joint, - } -} - -impl server::Ident for RustAnalyzer { - fn new(&mut self, string: &str, span: Self::Span, _is_raw: bool) -> Self::Ident { - IdentId(self.ident_interner.intern(&IdentData(tt::Ident { text: string.into(), id: span }))) - } - - fn span(&mut self, ident: Self::Ident) -> Self::Span { - self.ident_interner.get(ident.0).0.id - } - fn with_span(&mut self, ident: Self::Ident, span: Self::Span) -> Self::Ident { - let data = self.ident_interner.get(ident.0); - let new = IdentData(tt::Ident { id: span, ..data.0.clone() }); - IdentId(self.ident_interner.intern(&new)) - } -} - -impl server::Literal for RustAnalyzer { - fn debug_kind(&mut self, _literal: &Self::Literal) -> String { - // r-a: debug_kind and suffix are unsupported; corresponding client code has been changed to not call these. - // They must still be present to be ABI-compatible and work with upstream proc_macro. - "".to_owned() - } - fn from_str(&mut self, s: &str) -> Result { - Ok(Literal { text: s.into(), id: tt::TokenId::unspecified() }) - } - fn symbol(&mut self, literal: &Self::Literal) -> String { - literal.text.to_string() - } - fn suffix(&mut self, _literal: &Self::Literal) -> Option { - None - } - - fn to_string(&mut self, literal: &Self::Literal) -> String { - literal.to_string() - } - - fn integer(&mut self, n: &str) -> Self::Literal { - let n = match n.parse::() { - Ok(n) => n.to_string(), - Err(_) => n.parse::().unwrap().to_string(), - }; - Literal { text: n.into(), id: tt::TokenId::unspecified() } - } - - fn typed_integer(&mut self, n: &str, kind: &str) -> Self::Literal { - macro_rules! def_suffixed_integer { - ($kind:ident, $($ty:ty),*) => { - match $kind { - $( - stringify!($ty) => { - let n: $ty = n.parse().unwrap(); - format!(concat!("{}", stringify!($ty)), n) - } - )* - _ => unimplemented!("unknown args for typed_integer: n {}, kind {}", n, $kind), - } - } - } - - let text = def_suffixed_integer! {kind, u8, u16, u32, u64, u128, usize, i8, i16, i32, i64, i128, isize}; - - Literal { text: text.into(), id: tt::TokenId::unspecified() } - } - - fn float(&mut self, n: &str) -> Self::Literal { - let n: f64 = n.parse().unwrap(); - let mut text = f64::to_string(&n); - if !text.contains('.') { - text += ".0" - } - Literal { text: text.into(), id: tt::TokenId::unspecified() } - } - - fn f32(&mut self, n: &str) -> Self::Literal { - let n: f32 = n.parse().unwrap(); - let text = format!("{}f32", n); - Literal { text: text.into(), id: tt::TokenId::unspecified() } - } - - fn f64(&mut self, n: &str) -> Self::Literal { - let n: f64 = n.parse().unwrap(); - let text = format!("{}f64", n); - Literal { text: text.into(), id: tt::TokenId::unspecified() } - } - - fn string(&mut self, string: &str) -> Self::Literal { - let mut escaped = String::new(); - for ch in string.chars() { - escaped.extend(ch.escape_debug()); - } - Literal { text: format!("\"{}\"", escaped).into(), id: tt::TokenId::unspecified() } - } - - fn character(&mut self, ch: char) -> Self::Literal { - Literal { text: format!("'{}'", ch).into(), id: tt::TokenId::unspecified() } - } - - fn byte_string(&mut self, bytes: &[u8]) -> Self::Literal { - let string = bytes - .iter() - .cloned() - .flat_map(ascii::escape_default) - .map(Into::::into) - .collect::(); - - Literal { text: format!("b\"{}\"", string).into(), id: tt::TokenId::unspecified() } - } - - fn span(&mut self, literal: &Self::Literal) -> Self::Span { - literal.id - } - - fn set_span(&mut self, literal: &mut Self::Literal, span: Self::Span) { - literal.id = span; - } - - fn subspan( - &mut self, - _literal: &Self::Literal, - _start: Bound, - _end: Bound, - ) -> Option { - // FIXME handle span - None - } -} - -impl server::SourceFile for RustAnalyzer { - // FIXME these are all stubs - fn eq(&mut self, _file1: &Self::SourceFile, _file2: &Self::SourceFile) -> bool { - true - } - fn path(&mut self, _file: &Self::SourceFile) -> String { - String::new() - } - fn is_real(&mut self, _file: &Self::SourceFile) -> bool { - true - } -} - -impl server::Diagnostic for RustAnalyzer { - fn new(&mut self, level: Level, msg: &str, spans: Self::MultiSpan) -> Self::Diagnostic { - let mut diag = Diagnostic::new(level, msg); - diag.spans = spans; - diag - } - - fn sub( - &mut self, - _diag: &mut Self::Diagnostic, - _level: Level, - _msg: &str, - _spans: Self::MultiSpan, - ) { - // FIXME handle diagnostic - // - } - - fn emit(&mut self, _diag: Self::Diagnostic) { - // FIXME handle diagnostic - // diag.emit() - } -} - -impl server::Span for RustAnalyzer { - fn debug(&mut self, span: Self::Span) -> String { - format!("{:?}", span.0) - } - fn source_file(&mut self, _span: Self::Span) -> Self::SourceFile { - SourceFile {} - } - fn save_span(&mut self, _span: Self::Span) -> usize { - // FIXME stub - 0 - } - fn recover_proc_macro_span(&mut self, _id: usize) -> Self::Span { - // FIXME stub - tt::TokenId::unspecified() - } - /// Recent feature, not yet in the proc_macro - /// - /// See PR: - /// https://github.com/rust-lang/rust/pull/55780 - fn source_text(&mut self, _span: Self::Span) -> Option { - None - } - - fn parent(&mut self, _span: Self::Span) -> Option { - // FIXME handle span - None - } - fn source(&mut self, span: Self::Span) -> Self::Span { - // FIXME handle span - span - } - fn start(&mut self, _span: Self::Span) -> LineColumn { - // FIXME handle span - LineColumn { line: 0, column: 0 } - } - fn end(&mut self, _span: Self::Span) -> LineColumn { - // FIXME handle span - LineColumn { line: 0, column: 0 } - } - fn join(&mut self, first: Self::Span, _second: Self::Span) -> Option { - // Just return the first span again, because some macros will unwrap the result. - Some(first) - } - fn resolved_at(&mut self, _span: Self::Span, _at: Self::Span) -> Self::Span { - // FIXME handle span - tt::TokenId::unspecified() - } - - fn after(&mut self, _self_: Self::Span) -> Self::Span { - tt::TokenId::unspecified() - } - - fn before(&mut self, _self_: Self::Span) -> Self::Span { - tt::TokenId::unspecified() - } -} - -impl server::MultiSpan for RustAnalyzer { - fn new(&mut self) -> Self::MultiSpan { - // FIXME handle span - vec![] - } - - fn push(&mut self, other: &mut Self::MultiSpan, span: Self::Span) { - //TODP - other.push(span) - } -} - -impl server::Server for RustAnalyzer { - fn globals(&mut self) -> bridge::ExpnGlobals { - bridge::ExpnGlobals { - def_site: Span::unspecified(), - call_site: Span::unspecified(), - mixed_site: Span::unspecified(), - } - } -} - -#[cfg(test)] -mod tests { - use super::super::proc_macro::bridge::server::Literal; - use super::*; - - #[test] - fn test_ra_server_literals() { - let mut srv = RustAnalyzer { ident_interner: IdentInterner::default() }; - assert_eq!(srv.integer("1234").text, "1234"); - - assert_eq!(srv.typed_integer("12", "u8").text, "12u8"); - assert_eq!(srv.typed_integer("255", "u16").text, "255u16"); - assert_eq!(srv.typed_integer("1234", "u32").text, "1234u32"); - assert_eq!(srv.typed_integer("15846685", "u64").text, "15846685u64"); - assert_eq!(srv.typed_integer("15846685258", "u128").text, "15846685258u128"); - assert_eq!(srv.typed_integer("156788984", "usize").text, "156788984usize"); - assert_eq!(srv.typed_integer("127", "i8").text, "127i8"); - assert_eq!(srv.typed_integer("255", "i16").text, "255i16"); - assert_eq!(srv.typed_integer("1234", "i32").text, "1234i32"); - assert_eq!(srv.typed_integer("15846685", "i64").text, "15846685i64"); - assert_eq!(srv.typed_integer("15846685258", "i128").text, "15846685258i128"); - assert_eq!(srv.float("0").text, "0.0"); - assert_eq!(srv.float("15684.5867").text, "15684.5867"); - assert_eq!(srv.f32("15684.58").text, "15684.58f32"); - assert_eq!(srv.f64("15684.58").text, "15684.58f64"); - - assert_eq!(srv.string("hello_world").text, "\"hello_world\""); - assert_eq!(srv.character('c').text, "'c'"); - assert_eq!(srv.byte_string(b"1234586\x88").text, "b\"1234586\\x88\""); - - // u128::max - assert_eq!( - srv.integer("340282366920938463463374607431768211455").text, - "340282366920938463463374607431768211455" - ); - // i128::min - assert_eq!( - srv.integer("-170141183460469231731687303715884105728").text, - "-170141183460469231731687303715884105728" - ); - } - - #[test] - fn test_ra_server_to_string() { - let s = TokenStream { - token_trees: vec![ - tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { - text: "struct".into(), - id: tt::TokenId::unspecified(), - })), - tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { - text: "T".into(), - id: tt::TokenId::unspecified(), - })), - tt::TokenTree::Subtree(tt::Subtree { - delimiter: Some(tt::Delimiter { - id: tt::TokenId::unspecified(), - kind: tt::DelimiterKind::Brace, - }), - token_trees: vec![], - }), - ], - }; - - assert_eq!(s.to_string(), "struct T {}"); - } - - #[test] - fn test_ra_server_from_str() { - use std::str::FromStr; - let subtree_paren_a = tt::TokenTree::Subtree(tt::Subtree { - delimiter: Some(tt::Delimiter { - id: tt::TokenId::unspecified(), - kind: tt::DelimiterKind::Parenthesis, - }), - token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { - text: "a".into(), - id: tt::TokenId::unspecified(), - }))], - }); - - let t1 = TokenStream::from_str("(a)").unwrap(); - assert_eq!(t1.token_trees.len(), 1); - assert_eq!(t1.token_trees[0], subtree_paren_a); - - let t2 = TokenStream::from_str("(a);").unwrap(); - assert_eq!(t2.token_trees.len(), 2); - assert_eq!(t2.token_trees[0], subtree_paren_a); - - let underscore = TokenStream::from_str("_").unwrap(); - assert_eq!( - underscore.token_trees[0], - tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { - text: "_".into(), - id: tt::TokenId::unspecified(), - })) - ); - } -} diff --git a/crates/proc-macro-srv/src/abis/mod.rs b/crates/proc-macro-srv/src/abis/mod.rs index 705d09ea94..f7d3a30919 100644 --- a/crates/proc-macro-srv/src/abis/mod.rs +++ b/crates/proc-macro-srv/src/abis/mod.rs @@ -25,7 +25,6 @@ mod abi_1_58; mod abi_1_63; -mod abi_1_64; #[cfg(feature = "sysroot-abi")] mod abi_sysroot; @@ -34,12 +33,11 @@ include!(concat!(env!("OUT_DIR"), "/rustc_version.rs")); // Used by `test/utils.rs` #[cfg(test)] -pub(crate) use abi_1_64::TokenStream as TestTokenStream; +pub(crate) use abi_1_63::TokenStream as TestTokenStream; use super::dylib::LoadProcMacroDylibError; pub(crate) use abi_1_58::Abi as Abi_1_58; pub(crate) use abi_1_63::Abi as Abi_1_63; -pub(crate) use abi_1_64::Abi as Abi_1_64; #[cfg(feature = "sysroot-abi")] pub(crate) use abi_sysroot::Abi as Abi_Sysroot; use libloading::Library; @@ -58,7 +56,6 @@ impl PanicMessage { pub(crate) enum Abi { Abi1_58(Abi_1_58), Abi1_63(Abi_1_63), - Abi1_64(Abi_1_64), #[cfg(feature = "sysroot-abi")] AbiSysroot(Abi_Sysroot), } @@ -120,10 +117,6 @@ impl Abi { let inner = unsafe { Abi_1_63::from_lib(lib, symbol_name) }?; Ok(Abi::Abi1_63(inner)) } - (1, 64..) => { - let inner = unsafe { Abi_1_64::from_lib(lib, symbol_name) }?; - Ok(Abi::Abi1_64(inner)) - } _ => Err(LoadProcMacroDylibError::UnsupportedABI), } } @@ -137,7 +130,6 @@ impl Abi { match self { Self::Abi1_58(abi) => abi.expand(macro_name, macro_body, attributes), Self::Abi1_63(abi) => abi.expand(macro_name, macro_body, attributes), - Self::Abi1_64(abi) => abi.expand(macro_name, macro_body, attributes), #[cfg(feature = "sysroot-abi")] Self::AbiSysroot(abi) => abi.expand(macro_name, macro_body, attributes), } @@ -147,7 +139,6 @@ impl Abi { match self { Self::Abi1_58(abi) => abi.list_macros(), Self::Abi1_63(abi) => abi.list_macros(), - Self::Abi1_64(abi) => abi.list_macros(), #[cfg(feature = "sysroot-abi")] Self::AbiSysroot(abi) => abi.list_macros(), } From 25d4fbe9dae2b875ebfa81a28e10796475def74b Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Wed, 10 Aug 2022 10:23:46 +0200 Subject: [PATCH 0111/1945] Re-try build script building with --keep-going --- crates/project-model/src/build_scripts.rs | 35 ++++++++++++++++------- 1 file changed, 24 insertions(+), 11 deletions(-) diff --git a/crates/project-model/src/build_scripts.rs b/crates/project-model/src/build_scripts.rs index 4b00479a4c..84e772d168 100644 --- a/crates/project-model/src/build_scripts.rs +++ b/crates/project-model/src/build_scripts.rs @@ -39,7 +39,7 @@ pub(crate) struct BuildScriptOutput { } impl WorkspaceBuildScripts { - fn build_command(config: &CargoConfig, toolchain: &Option) -> Command { + fn build_command(config: &CargoConfig) -> Command { if let Some([program, args @ ..]) = config.run_build_script_command.as_deref() { let mut cmd = Command::new(program); cmd.args(args); @@ -71,15 +71,6 @@ impl WorkspaceBuildScripts { } } - const RUST_1_62: Version = Version::new(1, 62, 0); - - match toolchain { - Some(v) if v >= &RUST_1_62 => { - cmd.args(&["-Z", "unstable-options", "--keep-going"]).env("RUSTC_BOOTSTRAP", "1"); - } - _ => (), - } - cmd } @@ -89,8 +80,30 @@ impl WorkspaceBuildScripts { progress: &dyn Fn(String), toolchain: &Option, ) -> io::Result { - let mut cmd = Self::build_command(config, toolchain); + const RUST_1_62: Version = Version::new(1, 62, 0); + match Self::run_(Self::build_command(config), config, workspace, progress) { + Ok(WorkspaceBuildScripts { error: Some(error), .. }) + if toolchain.as_ref().map_or(false, |it| *it >= RUST_1_62) => + { + // building build scripts failed, attempt to build with --keep-going so + // that we potentially get more build data + let mut cmd = Self::build_command(config); + cmd.args(&["-Z", "unstable-options", "--keep-going"]).env("RUSTC_BOOTSTRAP", "1"); + let mut res = Self::run_(cmd, config, workspace, progress)?; + res.error = Some(error); + Ok(res) + } + res => res, + } + } + + fn run_( + mut cmd: Command, + config: &CargoConfig, + workspace: &CargoWorkspace, + progress: &dyn Fn(String), + ) -> io::Result { if config.wrap_rustc_in_build_scripts { // Setup RUSTC_WRAPPER to point to `rust-analyzer` binary itself. We use // that to compile only proc macros and build scripts during the initial From 23e17cd5819a21731122b4dc4975170e0efc4ca2 Mon Sep 17 00:00:00 2001 From: Edwin Cheng Date: Wed, 10 Aug 2022 16:28:56 +0800 Subject: [PATCH 0112/1945] Improve insert whitespace in mbe --- crates/hir-expand/src/builtin_fn_macro.rs | 8 ++++++-- crates/mbe/src/syntax_bridge.rs | 11 +---------- 2 files changed, 7 insertions(+), 12 deletions(-) diff --git a/crates/hir-expand/src/builtin_fn_macro.rs b/crates/hir-expand/src/builtin_fn_macro.rs index c21b35cdc0..8befa7f7da 100644 --- a/crates/hir-expand/src/builtin_fn_macro.rs +++ b/crates/hir-expand/src/builtin_fn_macro.rs @@ -251,9 +251,13 @@ fn format_args_expand( } for arg in &mut args { // Remove `key =`. - if matches!(arg.token_trees.get(1), Some(tt::TokenTree::Leaf(tt::Leaf::Punct(p))) if p.char == '=' && p.spacing != tt::Spacing::Joint) + if matches!(arg.token_trees.get(1), Some(tt::TokenTree::Leaf(tt::Leaf::Punct(p))) if p.char == '=') { - arg.token_trees.drain(..2); + // but not with `==` + if !matches!(arg.token_trees.get(2), Some(tt::TokenTree::Leaf(tt::Leaf::Punct(p))) if p.char == '=' ) + { + arg.token_trees.drain(..2); + } } } let _format_string = args.remove(0); diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs index aca6ecd424..e4c56565b9 100644 --- a/crates/mbe/src/syntax_bridge.rs +++ b/crates/mbe/src/syntax_bridge.rs @@ -228,16 +228,7 @@ fn convert_tokens(conv: &mut C) -> tt::Subtree { } let spacing = match conv.peek().map(|next| next.kind(conv)) { - Some(kind) - if !kind.is_trivia() - && kind.is_punct() - && kind != T!['['] - && kind != T!['{'] - && kind != T!['('] - && kind != UNDERSCORE => - { - tt::Spacing::Joint - } + Some(kind) if !kind.is_trivia() => tt::Spacing::Joint, _ => tt::Spacing::Alone, }; let char = match token.to_char(conv) { From c47914c6cfbce6e99bdeb5b8183b9655673b1110 Mon Sep 17 00:00:00 2001 From: Edwin Cheng Date: Wed, 10 Aug 2022 16:29:23 +0800 Subject: [PATCH 0113/1945] Fixes tests --- crates/hir-def/src/macro_expansion_tests/mbe.rs | 12 ++++++------ .../src/macro_expansion_tests/mbe/regression.rs | 4 ++-- .../src/macro_expansion_tests/mbe/tt_conversion.rs | 2 +- .../src/macro_expansion_tests/proc_macros.rs | 2 +- crates/hir-expand/src/fixup.rs | 14 +++++++------- .../src/handlers/add_missing_impl_members.rs | 2 +- crates/proc-macro-srv/src/tests/mod.rs | 6 +++--- 7 files changed, 21 insertions(+), 21 deletions(-) diff --git a/crates/hir-def/src/macro_expansion_tests/mbe.rs b/crates/hir-def/src/macro_expansion_tests/mbe.rs index 30d39d52f3..457e43925c 100644 --- a/crates/hir-def/src/macro_expansion_tests/mbe.rs +++ b/crates/hir-def/src/macro_expansion_tests/mbe.rs @@ -885,7 +885,7 @@ macro_rules! m { ($t:ty) => ( fn bar() -> $ t {} ) } -fn bar() -> & 'a Baz {} +fn bar() -> &'a Baz {} fn bar() -> extern "Rust"fn() -> Ret {} "#]], @@ -1578,7 +1578,7 @@ macro_rules !register_methods { ($$($val: expr), *) = > { struct Foo; impl Foo { - $(fn $method()-> & 'static[u32] { + $(fn $method()-> &'static[u32] { &[$$($$val), *] } )* @@ -1591,10 +1591,10 @@ macro_rules !implement_methods { ($($val: expr), *) = > { struct Foo; impl Foo { - fn alpha()-> & 'static[u32] { + fn alpha()-> &'static[u32] { &[$($val), *] } - fn beta()-> & 'static[u32] { + fn beta()-> &'static[u32] { &[$($val), *] } } @@ -1602,10 +1602,10 @@ macro_rules !implement_methods { } struct Foo; impl Foo { - fn alpha() -> & 'static[u32] { + fn alpha() -> &'static[u32] { &[1, 2, 3] } - fn beta() -> & 'static[u32] { + fn beta() -> &'static[u32] { &[1, 2, 3] } } diff --git a/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs b/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs index 2dff4adf2e..d2505e7caf 100644 --- a/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs +++ b/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs @@ -166,7 +166,7 @@ macro_rules! int_base { } } #[stable(feature = "rust1", since = "1.0.0")] impl fmt::Binary for isize { - fn fmt(&self , f: &mut fmt::Formatter< '_>) -> fmt::Result { + fn fmt(&self , f: &mut fmt::Formatter<'_>) -> fmt::Result { Binary.fmt_int(*self as usize, f) } } @@ -724,7 +724,7 @@ macro_rules! delegate_impl { } } } -impl <> Data for & 'amut G where G: Data {} +impl <> Data for &'amut G where G: Data {} "##]], ); } diff --git a/crates/hir-def/src/macro_expansion_tests/mbe/tt_conversion.rs b/crates/hir-def/src/macro_expansion_tests/mbe/tt_conversion.rs index 0710b1ac3d..b8d2ca687c 100644 --- a/crates/hir-def/src/macro_expansion_tests/mbe/tt_conversion.rs +++ b/crates/hir-def/src/macro_expansion_tests/mbe/tt_conversion.rs @@ -78,7 +78,7 @@ m!(static bar: &'static str = "hello";); macro_rules! m { ($($t:tt)*) => { $($t)*} } -static bar: & 'static str = "hello"; +static bar: &'static str = "hello"; "#]], ); } diff --git a/crates/hir-def/src/macro_expansion_tests/proc_macros.rs b/crates/hir-def/src/macro_expansion_tests/proc_macros.rs index 72c44a0fbc..029821e5e8 100644 --- a/crates/hir-def/src/macro_expansion_tests/proc_macros.rs +++ b/crates/hir-def/src/macro_expansion_tests/proc_macros.rs @@ -87,7 +87,7 @@ fn foo() { bar.; blub } fn foo() { bar.; blub } fn foo() { - bar. ; + bar.; blub }"##]], ); diff --git a/crates/hir-expand/src/fixup.rs b/crates/hir-expand/src/fixup.rs index 46257b6bc4..893e6fe4b8 100644 --- a/crates/hir-expand/src/fixup.rs +++ b/crates/hir-expand/src/fixup.rs @@ -468,7 +468,7 @@ fn foo() { } "#, expect![[r#" -fn foo () {a . __ra_fixup} +fn foo () {a .__ra_fixup} "#]], ) } @@ -478,11 +478,11 @@ fn foo () {a . __ra_fixup} check( r#" fn foo() { - a. ; + a.; } "#, expect![[r#" -fn foo () {a . __ra_fixup ;} +fn foo () {a .__ra_fixup ;} "#]], ) } @@ -492,12 +492,12 @@ fn foo () {a . __ra_fixup ;} check( r#" fn foo() { - a. ; + a.; bar(); } "#, expect![[r#" -fn foo () {a . __ra_fixup ; bar () ;} +fn foo () {a .__ra_fixup ; bar () ;} "#]], ) } @@ -525,7 +525,7 @@ fn foo() { } "#, expect![[r#" -fn foo () {let x = a . __ra_fixup ;} +fn foo () {let x = a .__ra_fixup ;} "#]], ) } @@ -541,7 +541,7 @@ fn foo() { } "#, expect![[r#" -fn foo () {a . b ; bar () ;} +fn foo () {a .b ; bar () ;} "#]], ) } diff --git a/crates/ide-assists/src/handlers/add_missing_impl_members.rs b/crates/ide-assists/src/handlers/add_missing_impl_members.rs index c808c010c6..62cf5ab4f3 100644 --- a/crates/ide-assists/src/handlers/add_missing_impl_members.rs +++ b/crates/ide-assists/src/handlers/add_missing_impl_members.rs @@ -944,7 +944,7 @@ foo!(); struct Foo(usize); impl FooB for Foo { - $0fn foo< 'lt>(& 'lt self){} + $0fn foo<'lt>(&'lt self){} } "#, ) diff --git a/crates/proc-macro-srv/src/tests/mod.rs b/crates/proc-macro-srv/src/tests/mod.rs index 07222907f0..6339d56d01 100644 --- a/crates/proc-macro-srv/src/tests/mod.rs +++ b/crates/proc-macro-srv/src/tests/mod.rs @@ -19,7 +19,7 @@ fn test_derive_error() { expect![[r##" SUBTREE $ IDENT compile_error 4294967295 - PUNCH ! [alone] 4294967295 + PUNCH ! [joint] 4294967295 SUBTREE () 4294967295 LITERAL "#[derive(DeriveError)] struct S ;" 4294967295 PUNCH ; [alone] 4294967295"##]], @@ -109,7 +109,7 @@ fn test_fn_like_macro_clone_literals() { PUNCH , [alone] 4294967295 LITERAL 2_u32 4294967295 PUNCH , [alone] 4294967295 - PUNCH - [alone] 4294967295 + PUNCH - [joint] 4294967295 LITERAL 4i64 4294967295 PUNCH , [alone] 4294967295 LITERAL 3.14f32 4294967295 @@ -130,7 +130,7 @@ fn test_attr_macro() { expect![[r##" SUBTREE $ IDENT compile_error 4294967295 - PUNCH ! [alone] 4294967295 + PUNCH ! [joint] 4294967295 SUBTREE () 4294967295 LITERAL "#[attr_error(some arguments)] mod m {}" 4294967295 PUNCH ; [alone] 4294967295"##]], From 7c568961fd0e1d4ccce04797acfbb07e5f30b551 Mon Sep 17 00:00:00 2001 From: Tiddo Langerak Date: Tue, 9 Aug 2022 09:41:56 +0300 Subject: [PATCH 0114/1945] Feat: extracted method from trait impl is placed in existing impl Previously, when triggering a method extraction from within a trait impl block, then this would always create a new impl block for the struct, even if there already is one. Now, it'll put the extracted method in the matching existing block if it exists. --- .../src/handlers/extract_function.rs | 272 +++++++++++++++++- 1 file changed, 268 insertions(+), 4 deletions(-) diff --git a/crates/ide-assists/src/handlers/extract_function.rs b/crates/ide-assists/src/handlers/extract_function.rs index 52a55ead3a..40d0327ef7 100644 --- a/crates/ide-assists/src/handlers/extract_function.rs +++ b/crates/ide-assists/src/handlers/extract_function.rs @@ -109,8 +109,6 @@ pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op let params = body.extracted_function_params(ctx, &container_info, locals_used.iter().copied()); - let extracted_from_trait_impl = body.extracted_from_trait_impl(); - let name = make_function_name(&semantics_scope); let fun = Function { @@ -129,8 +127,13 @@ pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op builder.replace(target_range, make_call(ctx, &fun, old_indent)); + let has_impl_wrapper = insert_after + .ancestors() + .find(|a| a.kind() == SyntaxKind::IMPL && a != &insert_after) + .is_some(); + let fn_def = match fun.self_param_adt(ctx) { - Some(adt) if extracted_from_trait_impl => { + Some(adt) if anchor == Anchor::Method && !has_impl_wrapper => { let fn_def = format_function(ctx, module, &fun, old_indent, new_indent + 1); generate_impl_text(&adt, &fn_def).replace("{\n\n", "{") } @@ -271,7 +274,7 @@ enum FunType { } /// Where to put extracted function definition -#[derive(Debug)] +#[derive(Debug, Eq, PartialEq, Clone, Copy)] enum Anchor { /// Extract free function and put right after current top-level function Freestanding, @@ -1244,6 +1247,15 @@ fn node_to_insert_after(body: &FunctionBody, anchor: Anchor) -> Option break, + SyntaxKind::IMPL => { + if body.extracted_from_trait_impl() && matches!(anchor, Anchor::Method) { + let impl_node = find_non_trait_impl(&next_ancestor); + let target_node = impl_node.as_ref().and_then(last_impl_member); + if target_node.is_some() { + return target_node; + } + } + } SyntaxKind::ITEM_LIST if !matches!(anchor, Anchor::Freestanding) => continue, SyntaxKind::ITEM_LIST => { if ancestors.peek().map(SyntaxNode::kind) == Some(SyntaxKind::MODULE) { @@ -1264,6 +1276,28 @@ fn node_to_insert_after(body: &FunctionBody, anchor: Anchor) -> Option Option { + let impl_type = Some(impl_type_name(trait_impl)?); + + let mut sibblings = trait_impl.parent()?.children(); + sibblings.find(|s| impl_type_name(s) == impl_type && !is_trait_impl(s)) +} + +fn last_impl_member(impl_node: &SyntaxNode) -> Option { + impl_node.children().find(|c| c.kind() == SyntaxKind::ASSOC_ITEM_LIST)?.last_child() +} + +fn is_trait_impl(node: &SyntaxNode) -> bool { + match ast::Impl::cast(node.clone()) { + Some(c) => c.trait_().is_some(), + None => false, + } +} + +fn impl_type_name(impl_node: &SyntaxNode) -> Option { + Some(ast::Impl::cast(impl_node.clone())?.self_ty()?.to_string()) +} + fn make_call(ctx: &AssistContext<'_>, fun: &Function, indent: IndentLevel) -> String { let ret_ty = fun.return_type(ctx); @@ -5058,6 +5092,236 @@ impl Struct { ); } + #[test] + fn extract_method_from_trait_with_existing_non_empty_impl_block() { + check_assist( + extract_function, + r#" +struct Struct(i32); +trait Trait { + fn bar(&self) -> i32; +} + +impl Struct { + fn foo() {} +} + +impl Trait for Struct { + fn bar(&self) -> i32 { + $0self.0 + 2$0 + } +} +"#, + r#" +struct Struct(i32); +trait Trait { + fn bar(&self) -> i32; +} + +impl Struct { + fn foo() {} + + fn $0fun_name(&self) -> i32 { + self.0 + 2 + } +} + +impl Trait for Struct { + fn bar(&self) -> i32 { + self.fun_name() + } +} +"#, + ) + } + + #[test] + fn extract_function_from_trait_with_existing_non_empty_impl_block() { + check_assist( + extract_function, + r#" +struct Struct(i32); +trait Trait { + fn bar(&self) -> i32; +} + +impl Struct { + fn foo() {} +} + +impl Trait for Struct { + fn bar(&self) -> i32 { + let three_squared = $03 * 3$0; + self.0 + three_squared + } +} +"#, + r#" +struct Struct(i32); +trait Trait { + fn bar(&self) -> i32; +} + +impl Struct { + fn foo() {} +} + +impl Trait for Struct { + fn bar(&self) -> i32 { + let three_squared = fun_name(); + self.0 + three_squared + } +} + +fn $0fun_name() -> i32 { + 3 * 3 +} +"#, + ) + } + + #[test] + fn extract_method_from_trait_with_multiple_existing_impl_blocks() { + check_assist( + extract_function, + r#" +struct Struct(i32); +struct StructBefore(i32); +struct StructAfter(i32); +trait Trait { + fn bar(&self) -> i32; +} + +impl StructBefore { + fn foo(){} +} + +impl Struct { + fn foo(){} +} + +impl StructAfter { + fn foo(){} +} + +impl Trait for Struct { + fn bar(&self) -> i32 { + $0self.0 + 2$0 + } +} +"#, + r#" +struct Struct(i32); +struct StructBefore(i32); +struct StructAfter(i32); +trait Trait { + fn bar(&self) -> i32; +} + +impl StructBefore { + fn foo(){} +} + +impl Struct { + fn foo(){} + + fn $0fun_name(&self) -> i32 { + self.0 + 2 + } +} + +impl StructAfter { + fn foo(){} +} + +impl Trait for Struct { + fn bar(&self) -> i32 { + self.fun_name() + } +} +"#, + ) + } + + #[test] + fn extract_method_from_trait_with_multiple_existing_trait_impl_blocks() { + check_assist( + extract_function, + r#" +struct Struct(i32); +trait Trait { + fn bar(&self) -> i32; +} +trait TraitBefore { + fn before(&self) -> i32; +} +trait TraitAfter { + fn after(&self) -> i32; +} + +impl TraitBefore for Struct { + fn before(&self) -> i32 { + 42 + } +} + +impl Struct { + fn foo(){} +} + +impl TraitAfter for Struct { + fn after(&self) -> i32 { + 42 + } +} + +impl Trait for Struct { + fn bar(&self) -> i32 { + $0self.0 + 2$0 + } +} +"#, + r#" +struct Struct(i32); +trait Trait { + fn bar(&self) -> i32; +} +trait TraitBefore { + fn before(&self) -> i32; +} +trait TraitAfter { + fn after(&self) -> i32; +} + +impl TraitBefore for Struct { + fn before(&self) -> i32 { + 42 + } +} + +impl Struct { + fn foo(){} + + fn $0fun_name(&self) -> i32 { + self.0 + 2 + } +} + +impl TraitAfter for Struct { + fn after(&self) -> i32 { + 42 + } +} + +impl Trait for Struct { + fn bar(&self) -> i32 { + self.fun_name() + } +} +"#, + ) + } + #[test] fn closure_arguments() { check_assist( From ffc6b42901fc1f64c812df571110e665de75da45 Mon Sep 17 00:00:00 2001 From: Ryo Yoshida Date: Wed, 10 Aug 2022 19:17:13 +0900 Subject: [PATCH 0115/1945] fix: infer byte string pattern as `&[u8]` when matched against slices --- crates/hir-ty/src/infer/pat.rs | 33 ++++++++++++++++++--- crates/hir-ty/src/tests/patterns.rs | 45 +++++++++++++++++++++++++++++ 2 files changed, 74 insertions(+), 4 deletions(-) diff --git a/crates/hir-ty/src/infer/pat.rs b/crates/hir-ty/src/infer/pat.rs index 5e7320a5dd..53259d66de 100644 --- a/crates/hir-ty/src/infer/pat.rs +++ b/crates/hir-ty/src/infer/pat.rs @@ -14,8 +14,9 @@ use crate::{ consteval::intern_const_scalar, infer::{BindingMode, Expectation, InferenceContext, TypeMismatch}, lower::lower_to_chalk_mutability, - static_lifetime, ConcreteConst, ConstValue, Interner, Substitution, Ty, TyBuilder, TyExt, - TyKind, + primitive::UintTy, + static_lifetime, ConcreteConst, ConstValue, Interner, Scalar, Substitution, Ty, TyBuilder, + TyExt, TyKind, }; use super::PatLike; @@ -294,7 +295,29 @@ impl<'a> InferenceContext<'a> { let start_ty = self.infer_expr(*start, &Expectation::has_type(expected.clone())); self.infer_expr(*end, &Expectation::has_type(start_ty)) } - Pat::Lit(expr) => self.infer_expr(*expr, &Expectation::has_type(expected.clone())), + &Pat::Lit(expr) => { + // FIXME: using `Option` here is a workaround until we can use if-let chains in stable. + let mut pat_ty = None; + + // Like slice patterns, byte string patterns can denote both `&[u8; N]` and `&[u8]`. + if let Expr::Literal(Literal::ByteString(_)) = self.body[expr] { + if let Some((inner, ..)) = expected.as_reference() { + let inner = self.resolve_ty_shallow(inner); + if matches!(inner.kind(Interner), TyKind::Slice(_)) { + let elem_ty = TyKind::Scalar(Scalar::Uint(UintTy::U8)).intern(Interner); + let slice_ty = TyKind::Slice(elem_ty).intern(Interner); + let ty = TyKind::Ref(Mutability::Not, static_lifetime(), slice_ty) + .intern(Interner); + self.write_expr_ty(expr, ty.clone()); + pat_ty = Some(ty); + } + } + } + + pat_ty.unwrap_or_else(|| { + self.infer_expr(expr, &Expectation::has_type(expected.clone())) + }) + } Pat::Box { inner } => match self.resolve_boxed_box() { Some(box_adt) => { let (inner_ty, alloc_ty) = match expected.as_adt() { @@ -343,7 +366,9 @@ fn is_non_ref_pat(body: &hir_def::body::Body, pat: PatId) -> bool { // FIXME: ConstBlock/Path/Lit might actually evaluate to ref, but inference is unimplemented. Pat::Path(..) => true, Pat::ConstBlock(..) => true, - Pat::Lit(expr) => !matches!(body[*expr], Expr::Literal(Literal::String(..))), + Pat::Lit(expr) => { + !matches!(body[*expr], Expr::Literal(Literal::String(..) | Literal::ByteString(..))) + } Pat::Bind { mode: BindingAnnotation::Mutable | BindingAnnotation::Unannotated, subpat: Some(subpat), diff --git a/crates/hir-ty/src/tests/patterns.rs b/crates/hir-ty/src/tests/patterns.rs index 399553356b..94efe7bc11 100644 --- a/crates/hir-ty/src/tests/patterns.rs +++ b/crates/hir-ty/src/tests/patterns.rs @@ -315,6 +315,51 @@ fn infer_pattern_match_string_literal() { ); } +#[test] +fn infer_pattern_match_byte_string_literal() { + check_infer_with_mismatches( + r#" + //- minicore: index + struct S; + impl core::ops::Index for [T; N] { + type Output = [u8]; + fn index(&self, index: core::ops::RangeFull) -> &Self::Output { + loop {} + } + } + fn test(v: [u8; 3]) { + if let b"foo" = &v[S] {} + if let b"foo" = &v {} + } + "#, + expect![[r#" + 105..109 'self': &[T; N] + 111..116 'index': {unknown} + 157..180 '{ ... }': &[u8] + 167..174 'loop {}': ! + 172..174 '{}': () + 191..192 'v': [u8; 3] + 203..261 '{ ...v {} }': () + 209..233 'if let...[S] {}': () + 212..230 'let b"... &v[S]': bool + 216..222 'b"foo"': &[u8] + 216..222 'b"foo"': &[u8] + 225..230 '&v[S]': &[u8] + 226..227 'v': [u8; 3] + 226..230 'v[S]': [u8] + 228..229 'S': S + 231..233 '{}': () + 238..259 'if let... &v {}': () + 241..256 'let b"foo" = &v': bool + 245..251 'b"foo"': &[u8; 3] + 245..251 'b"foo"': &[u8; 3] + 254..256 '&v': &[u8; 3] + 255..256 'v': [u8; 3] + 257..259 '{}': () + "#]], + ); +} + #[test] fn infer_pattern_match_or() { check_infer_with_mismatches( From 7402366877b3f50563cba4ba919cdc88c6220d14 Mon Sep 17 00:00:00 2001 From: Tiddo Langerak Date: Wed, 10 Aug 2022 17:03:15 +0300 Subject: [PATCH 0116/1945] Avoid cloning IMPL node if we don't have to --- crates/ide-assists/src/handlers/extract_function.rs | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/crates/ide-assists/src/handlers/extract_function.rs b/crates/ide-assists/src/handlers/extract_function.rs index 40d0327ef7..969b0c1fb3 100644 --- a/crates/ide-assists/src/handlers/extract_function.rs +++ b/crates/ide-assists/src/handlers/extract_function.rs @@ -1288,6 +1288,9 @@ fn last_impl_member(impl_node: &SyntaxNode) -> Option { } fn is_trait_impl(node: &SyntaxNode) -> bool { + if !ast::Impl::can_cast(node.kind()) { + return false; + } match ast::Impl::cast(node.clone()) { Some(c) => c.trait_().is_some(), None => false, @@ -1295,6 +1298,9 @@ fn is_trait_impl(node: &SyntaxNode) -> bool { } fn impl_type_name(impl_node: &SyntaxNode) -> Option { + if !ast::Impl::can_cast(impl_node.kind()) { + return None; + } Some(ast::Impl::cast(impl_node.clone())?.self_ty()?.to_string()) } From 53ec791dc6b22e581f9e5aabeda5edfc806564cb Mon Sep 17 00:00:00 2001 From: Ryo Yoshida Date: Mon, 1 Aug 2022 14:20:05 +0900 Subject: [PATCH 0117/1945] Add `UnescapedName` and make `Name` hold escaped name --- crates/hir-expand/src/name.rs | 54 +++++++++++++++++++++++++++++++++-- 1 file changed, 52 insertions(+), 2 deletions(-) diff --git a/crates/hir-expand/src/name.rs b/crates/hir-expand/src/name.rs index 47d191822d..18b0793f10 100644 --- a/crates/hir-expand/src/name.rs +++ b/crates/hir-expand/src/name.rs @@ -7,6 +7,10 @@ use syntax::{ast, SmolStr, SyntaxKind}; /// `Name` is a wrapper around string, which is used in hir for both references /// and declarations. In theory, names should also carry hygiene info, but we are /// not there yet! +/// +/// Note that `Name` holds and prints escaped name i.e. prefixed with "r#" when it +/// is a raw identifier. Use [`unescaped()`][Name::unescaped] when you need the +/// name without "r#". #[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] pub struct Name(Repr); @@ -14,6 +18,10 @@ pub struct Name(Repr); #[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] pub struct EscapedName<'a>(&'a Name); +/// Wrapper of `Name` to print the name without "r#" even when it is a raw identifier. +#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] +pub struct UnescapedName<'a>(&'a Name); + #[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] enum Repr { Text(SmolStr), @@ -49,6 +57,35 @@ impl<'a> fmt::Display for EscapedName<'a> { } } +impl<'a> fmt::Display for UnescapedName<'a> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match &self.0 .0 { + Repr::Text(text) => { + let text = text.strip_prefix("r#").unwrap_or(text); + fmt::Display::fmt(&text, f) + } + Repr::TupleField(idx) => fmt::Display::fmt(&idx, f), + } + } +} + +impl<'a> UnescapedName<'a> { + /// Returns the textual representation of this name as a [`SmolStr`]. Prefer using this over + /// [`ToString::to_string`] if possible as this conversion is cheaper in the general case. + pub fn to_smol_str(&self) -> SmolStr { + match &self.0 .0 { + Repr::Text(it) => { + if let Some(stripped) = it.strip_prefix("r#") { + SmolStr::new(stripped) + } else { + it.clone() + } + } + Repr::TupleField(it) => SmolStr::new(&it.to_string()), + } + } +} + impl<'a> EscapedName<'a> { pub fn is_escaped(&self) -> bool { match &self.0 .0 { @@ -97,9 +134,11 @@ impl Name { /// Resolve a name from the text of token. fn resolve(raw_text: &str) -> Name { + // When `raw_text` starts with "r#" but the name does not coincide with any + // keyword, we never need the prefix so we strip it. match raw_text.strip_prefix("r#") { - Some(text) => Name::new_text(SmolStr::new(text)), - None => Name::new_text(raw_text.into()), + Some(text) if !is_raw_identifier(text) => Name::new_text(SmolStr::new(text)), + _ => Name::new_text(raw_text.into()), } } @@ -145,6 +184,17 @@ impl Name { pub fn escaped(&self) -> EscapedName<'_> { EscapedName(self) } + + pub fn unescaped(&self) -> UnescapedName<'_> { + UnescapedName(self) + } + + pub fn is_escaped(&self) -> bool { + match &self.0 { + Repr::Text(it) => it.starts_with("r#"), + Repr::TupleField(_) => false, + } + } } pub trait AsName { From 4322cf7f5b4cc31ae88320add185184f5d58abb7 Mon Sep 17 00:00:00 2001 From: Ryo Yoshida Date: Mon, 1 Aug 2022 15:19:49 +0900 Subject: [PATCH 0118/1945] Remove `EscapedName` --- crates/hir-expand/src/mod_path.rs | 4 +- crates/hir-expand/src/name.rs | 48 ------------------- .../src/completions/item_list/trait_impl.rs | 3 +- crates/ide-completion/src/render.rs | 8 ++-- crates/ide-completion/src/render/const_.rs | 2 +- crates/ide-completion/src/render/function.rs | 6 +-- crates/ide-completion/src/render/macro_.rs | 2 +- crates/ide-completion/src/render/pattern.rs | 8 ++-- .../ide-completion/src/render/type_alias.rs | 4 +- .../src/render/union_literal.rs | 10 ++-- crates/ide-completion/src/render/variant.rs | 4 +- 11 files changed, 26 insertions(+), 73 deletions(-) diff --git a/crates/hir-expand/src/mod_path.rs b/crates/hir-expand/src/mod_path.rs index fea09521e8..ed9d902988 100644 --- a/crates/hir-expand/src/mod_path.rs +++ b/crates/hir-expand/src/mod_path.rs @@ -134,9 +134,9 @@ impl ModPath { } first_segment = false; if escaped { - segment.escaped().fmt(f)? - } else { segment.fmt(f)? + } else { + segment.unescaped().fmt(f)? }; } Ok(()) diff --git a/crates/hir-expand/src/name.rs b/crates/hir-expand/src/name.rs index 18b0793f10..87c663eec8 100644 --- a/crates/hir-expand/src/name.rs +++ b/crates/hir-expand/src/name.rs @@ -14,10 +14,6 @@ use syntax::{ast, SmolStr, SyntaxKind}; #[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] pub struct Name(Repr); -/// `EscapedName` will add a prefix "r#" to the wrapped `Name` when it is a raw identifier -#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] -pub struct EscapedName<'a>(&'a Name); - /// Wrapper of `Name` to print the name without "r#" even when it is a raw identifier. #[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] pub struct UnescapedName<'a>(&'a Name); @@ -42,21 +38,6 @@ fn is_raw_identifier(name: &str) -> bool { is_keyword && !matches!(name, "self" | "crate" | "super" | "Self") } -impl<'a> fmt::Display for EscapedName<'a> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match &self.0 .0 { - Repr::Text(text) => { - if is_raw_identifier(text) { - write!(f, "r#{}", &text) - } else { - fmt::Display::fmt(&text, f) - } - } - Repr::TupleField(idx) => fmt::Display::fmt(&idx, f), - } - } -} - impl<'a> fmt::Display for UnescapedName<'a> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match &self.0 .0 { @@ -86,31 +67,6 @@ impl<'a> UnescapedName<'a> { } } -impl<'a> EscapedName<'a> { - pub fn is_escaped(&self) -> bool { - match &self.0 .0 { - Repr::Text(it) => is_raw_identifier(&it), - Repr::TupleField(_) => false, - } - } - - /// Returns the textual representation of this name as a [`SmolStr`]. - /// Prefer using this over [`ToString::to_string`] if possible as this conversion is cheaper in - /// the general case. - pub fn to_smol_str(&self) -> SmolStr { - match &self.0 .0 { - Repr::Text(it) => { - if is_raw_identifier(&it) { - SmolStr::from_iter(["r#", &it]) - } else { - it.clone() - } - } - Repr::TupleField(it) => SmolStr::new(&it.to_string()), - } - } -} - impl Name { /// Note: this is private to make creating name from random string hard. /// Hopefully, this should allow us to integrate hygiene cleaner in the @@ -181,10 +137,6 @@ impl Name { } } - pub fn escaped(&self) -> EscapedName<'_> { - EscapedName(self) - } - pub fn unescaped(&self) -> UnescapedName<'_> { UnescapedName(self) } diff --git a/crates/ide-completion/src/completions/item_list/trait_impl.rs b/crates/ide-completion/src/completions/item_list/trait_impl.rs index e9256803cc..785db6fde1 100644 --- a/crates/ide-completion/src/completions/item_list/trait_impl.rs +++ b/crates/ide-completion/src/completions/item_list/trait_impl.rs @@ -233,7 +233,8 @@ fn add_type_alias_impl( type_alias: hir::TypeAlias, ) { let alias_name = type_alias.name(ctx.db); - let (alias_name, escaped_name) = (alias_name.to_smol_str(), alias_name.escaped().to_smol_str()); + let (alias_name, escaped_name) = + (alias_name.unescaped().to_smol_str(), alias_name.to_smol_str()); let label = format!("type {} =", alias_name); let replacement = format!("type {} = ", escaped_name); diff --git a/crates/ide-completion/src/render.rs b/crates/ide-completion/src/render.rs index 946134b0ff..6c466777a1 100644 --- a/crates/ide-completion/src/render.rs +++ b/crates/ide-completion/src/render.rs @@ -117,7 +117,7 @@ pub(crate) fn render_field( ) -> CompletionItem { let is_deprecated = ctx.is_deprecated(field); let name = field.name(ctx.db()); - let (name, escaped_name) = (name.to_smol_str(), name.escaped().to_smol_str()); + let (name, escaped_name) = (name.unescaped().to_smol_str(), name.to_smol_str()); let mut item = CompletionItem::new( SymbolKind::Field, ctx.source_range(), @@ -283,8 +283,8 @@ fn render_resolution_path( let name = local_name.to_smol_str(); let mut item = render_resolution_simple_(ctx, &local_name, import_to_add, resolution); - if local_name.escaped().is_escaped() { - item.insert_text(local_name.escaped().to_smol_str()); + if local_name.is_escaped() { + item.insert_text(local_name.to_smol_str()); } // Add `<>` for generic types let type_path_no_ty_args = matches!( @@ -306,7 +306,7 @@ fn render_resolution_path( item.lookup_by(name.clone()) .label(SmolStr::from_iter([&name, "<…>"])) .trigger_call_info() - .insert_snippet(cap, format!("{}<$0>", local_name.escaped())); + .insert_snippet(cap, format!("{}<$0>", local_name)); } } } diff --git a/crates/ide-completion/src/render/const_.rs b/crates/ide-completion/src/render/const_.rs index a810eef18d..93ea825e00 100644 --- a/crates/ide-completion/src/render/const_.rs +++ b/crates/ide-completion/src/render/const_.rs @@ -13,7 +13,7 @@ pub(crate) fn render_const(ctx: RenderContext<'_>, const_: hir::Const) -> Option fn render(ctx: RenderContext<'_>, const_: hir::Const) -> Option { let db = ctx.db(); let name = const_.name(db)?; - let (name, escaped_name) = (name.to_smol_str(), name.escaped().to_smol_str()); + let (name, escaped_name) = (name.unescaped().to_smol_str(), name.to_smol_str()); let detail = const_.display(db).to_string(); let mut item = CompletionItem::new(SymbolKind::Const, ctx.source_range(), name.clone()); diff --git a/crates/ide-completion/src/render/function.rs b/crates/ide-completion/src/render/function.rs index 4b5535718c..2a90703b09 100644 --- a/crates/ide-completion/src/render/function.rs +++ b/crates/ide-completion/src/render/function.rs @@ -52,10 +52,10 @@ fn render( let (call, escaped_call) = match &func_kind { FuncKind::Method(_, Some(receiver)) => ( - format!("{}.{}", receiver, &name).into(), - format!("{}.{}", receiver.escaped(), name.escaped()).into(), + format!("{}.{}", receiver.unescaped(), name.unescaped()).into(), + format!("{}.{}", receiver, name).into(), ), - _ => (name.to_smol_str(), name.escaped().to_smol_str()), + _ => (name.unescaped().to_smol_str(), name.to_smol_str()), }; let mut item = CompletionItem::new( if func.self_param(db).is_some() { diff --git a/crates/ide-completion/src/render/macro_.rs b/crates/ide-completion/src/render/macro_.rs index ca2269f139..eabd0bd17d 100644 --- a/crates/ide-completion/src/render/macro_.rs +++ b/crates/ide-completion/src/render/macro_.rs @@ -46,7 +46,7 @@ fn render( ctx.source_range() }; - let (name, escaped_name) = (name.to_smol_str(), name.escaped().to_smol_str()); + let (name, escaped_name) = (name.unescaped().to_smol_str(), name.to_smol_str()); let docs = ctx.docs(macro_); let docs_str = docs.as_ref().map(Documentation::as_str).unwrap_or_default(); let is_fn_like = macro_.is_fn_like(completion.db); diff --git a/crates/ide-completion/src/render/pattern.rs b/crates/ide-completion/src/render/pattern.rs index 34a384f2f7..4f4c064027 100644 --- a/crates/ide-completion/src/render/pattern.rs +++ b/crates/ide-completion/src/render/pattern.rs @@ -31,7 +31,7 @@ pub(crate) fn render_struct_pat( } let name = local_name.unwrap_or_else(|| strukt.name(ctx.db())); - let (name, escaped_name) = (name.to_smol_str(), name.escaped().to_smol_str()); + let (name, escaped_name) = (name.unescaped().to_smol_str(), name.to_smol_str()); let kind = strukt.kind(ctx.db()); let label = format_literal_label(name.as_str(), kind); let pat = render_pat(&ctx, pattern_ctx, &escaped_name, kind, &visible_fields, fields_omitted)?; @@ -56,7 +56,7 @@ pub(crate) fn render_variant_pat( Some(path) => (path.to_string().into(), path.escaped().to_string().into()), None => { let name = local_name.unwrap_or_else(|| variant.name(ctx.db())); - (name.to_smol_str(), name.escaped().to_smol_str()) + (name.unescaped().to_smol_str(), name.to_smol_str()) } }; @@ -146,7 +146,7 @@ fn render_record_as_pat( format!( "{name} {{ {}{} }}", fields.enumerate().format_with(", ", |(idx, field), f| { - f(&format_args!("{}${}", field.name(db).escaped(), idx + 1)) + f(&format_args!("{}${}", field.name(db), idx + 1)) }), if fields_omitted { ", .." } else { "" }, name = name @@ -155,7 +155,7 @@ fn render_record_as_pat( None => { format!( "{name} {{ {}{} }}", - fields.map(|field| field.name(db).escaped().to_smol_str()).format(", "), + fields.map(|field| field.name(db).to_smol_str()).format(", "), if fields_omitted { ", .." } else { "" }, name = name ) diff --git a/crates/ide-completion/src/render/type_alias.rs b/crates/ide-completion/src/render/type_alias.rs index f1b23c76e7..de919429f2 100644 --- a/crates/ide-completion/src/render/type_alias.rs +++ b/crates/ide-completion/src/render/type_alias.rs @@ -32,11 +32,11 @@ fn render( let name = type_alias.name(db); let (name, escaped_name) = if with_eq { ( + SmolStr::from_iter([&name.unescaped().to_smol_str(), " = "]), SmolStr::from_iter([&name.to_smol_str(), " = "]), - SmolStr::from_iter([&name.escaped().to_smol_str(), " = "]), ) } else { - (name.to_smol_str(), name.escaped().to_smol_str()) + (name.unescaped().to_smol_str(), name.to_smol_str()) }; let detail = type_alias.display(db).to_string(); diff --git a/crates/ide-completion/src/render/union_literal.rs b/crates/ide-completion/src/render/union_literal.rs index 9c9540a9bd..b04c8e3bf9 100644 --- a/crates/ide-completion/src/render/union_literal.rs +++ b/crates/ide-completion/src/render/union_literal.rs @@ -22,7 +22,7 @@ pub(crate) fn render_union_literal( let (qualified_name, escaped_qualified_name) = match path { Some(p) => (p.to_string(), p.escaped().to_string()), - None => (name.to_string(), name.escaped().to_string()), + None => (name.unescaped().to_string(), name.to_string()), }; let mut item = CompletionItem::new( @@ -42,15 +42,15 @@ pub(crate) fn render_union_literal( format!( "{} {{ ${{1|{}|}}: ${{2:()}} }}$0", escaped_qualified_name, - fields.iter().map(|field| field.name(ctx.db()).escaped().to_smol_str()).format(",") + fields.iter().map(|field| field.name(ctx.db()).to_smol_str()).format(",") ) } else { format!( "{} {{ {} }}", escaped_qualified_name, - fields.iter().format_with(", ", |field, f| { - f(&format_args!("{}: ()", field.name(ctx.db()).escaped())) - }) + fields + .iter() + .format_with(", ", |field, f| { f(&format_args!("{}: ()", field.name(ctx.db()))) }) ) }; diff --git a/crates/ide-completion/src/render/variant.rs b/crates/ide-completion/src/render/variant.rs index 003a0c11ed..664845330e 100644 --- a/crates/ide-completion/src/render/variant.rs +++ b/crates/ide-completion/src/render/variant.rs @@ -24,9 +24,9 @@ pub(crate) fn render_record_lit( ) -> RenderedLiteral { let completions = fields.iter().enumerate().format_with(", ", |(idx, field), f| { if snippet_cap.is_some() { - f(&format_args!("{}: ${{{}:()}}", field.name(db).escaped(), idx + 1)) + f(&format_args!("{}: ${{{}:()}}", field.name(db), idx + 1)) } else { - f(&format_args!("{}: ()", field.name(db).escaped())) + f(&format_args!("{}: ()", field.name(db))) } }); From 8fe73a224005a153c89a40243e71a021e9304958 Mon Sep 17 00:00:00 2001 From: Ryo Yoshida Date: Mon, 1 Aug 2022 15:20:14 +0900 Subject: [PATCH 0119/1945] Make tests pass --- crates/hir-def/src/nameres/mod_resolution.rs | 2 ++ crates/hir-def/src/nameres/tests/mod_resolution.rs | 4 ++-- crates/ide-completion/src/render.rs | 3 ++- crates/ide-completion/src/render/function.rs | 2 +- 4 files changed, 7 insertions(+), 4 deletions(-) diff --git a/crates/hir-def/src/nameres/mod_resolution.rs b/crates/hir-def/src/nameres/mod_resolution.rs index 52a620fe22..99f7f1b549 100644 --- a/crates/hir-def/src/nameres/mod_resolution.rs +++ b/crates/hir-def/src/nameres/mod_resolution.rs @@ -73,10 +73,12 @@ impl ModDir { candidate_files.push(self.dir_path.join_attr(attr_path, self.root_non_dir_owner)) } None if file_id.is_include_macro(db.upcast()) => { + let name = name.unescaped(); candidate_files.push(format!("{}.rs", name)); candidate_files.push(format!("{}/mod.rs", name)); } None => { + let name = name.unescaped(); candidate_files.push(format!("{}{}.rs", self.dir_path.0, name)); candidate_files.push(format!("{}{}/mod.rs", self.dir_path.0, name)); } diff --git a/crates/hir-def/src/nameres/tests/mod_resolution.rs b/crates/hir-def/src/nameres/tests/mod_resolution.rs index 79a74873b4..3fa585574d 100644 --- a/crates/hir-def/src/nameres/tests/mod_resolution.rs +++ b/crates/hir-def/src/nameres/tests/mod_resolution.rs @@ -132,9 +132,9 @@ pub struct Bar; expect![[r#" crate Bar: t v - async: t + r#async: t - crate::async + crate::r#async Bar: t v "#]], ); diff --git a/crates/ide-completion/src/render.rs b/crates/ide-completion/src/render.rs index 6c466777a1..693007ca30 100644 --- a/crates/ide-completion/src/render.rs +++ b/crates/ide-completion/src/render.rs @@ -342,7 +342,8 @@ fn render_resolution_simple_( let ctx = ctx.import_to_add(import_to_add); let kind = res_to_kind(resolution); - let mut item = CompletionItem::new(kind, ctx.source_range(), local_name.to_smol_str()); + let mut item = + CompletionItem::new(kind, ctx.source_range(), local_name.unescaped().to_smol_str()); item.set_relevance(ctx.completion_relevance()) .set_documentation(scope_def_docs(db, resolution)) .set_deprecated(scope_def_is_deprecated(&ctx, resolution)); diff --git a/crates/ide-completion/src/render/function.rs b/crates/ide-completion/src/render/function.rs index 2a90703b09..9cf6469129 100644 --- a/crates/ide-completion/src/render/function.rs +++ b/crates/ide-completion/src/render/function.rs @@ -96,7 +96,7 @@ fn render( item.set_documentation(ctx.docs(func)) .set_deprecated(ctx.is_deprecated(func) || ctx.is_deprecated_assoc_item(func)) .detail(detail(db, func)) - .lookup_by(name.to_smol_str()); + .lookup_by(name.unescaped().to_smol_str()); match ctx.completion.config.snippet_cap { Some(cap) => { From 018266a7ff6d06747e798afdf84faadb7567d179 Mon Sep 17 00:00:00 2001 From: Ryo Yoshida Date: Thu, 11 Aug 2022 02:17:15 +0900 Subject: [PATCH 0120/1945] Make `ModPath` display escaped path --- crates/hir-expand/src/mod_path.rs | 12 ++++++------ crates/ide-completion/src/render/literal.rs | 2 +- crates/ide-completion/src/render/pattern.rs | 2 +- crates/ide-completion/src/render/union_literal.rs | 2 +- 4 files changed, 9 insertions(+), 9 deletions(-) diff --git a/crates/hir-expand/src/mod_path.rs b/crates/hir-expand/src/mod_path.rs index ed9d902988..d0f73ec820 100644 --- a/crates/hir-expand/src/mod_path.rs +++ b/crates/hir-expand/src/mod_path.rs @@ -22,7 +22,7 @@ pub struct ModPath { } #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub struct EscapedModPath<'a>(&'a ModPath); +pub struct UnescapedModPath<'a>(&'a ModPath); #[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] pub enum PathKind { @@ -102,8 +102,8 @@ impl ModPath { } } - pub fn escaped(&self) -> EscapedModPath<'_> { - EscapedModPath(self) + pub fn unescaped(&self) -> UnescapedModPath<'_> { + UnescapedModPath(self) } fn _fmt(&self, f: &mut fmt::Formatter<'_>, escaped: bool) -> fmt::Result { @@ -145,13 +145,13 @@ impl ModPath { impl Display for ModPath { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self._fmt(f, false) + self._fmt(f, true) } } -impl<'a> Display for EscapedModPath<'a> { +impl<'a> Display for UnescapedModPath<'a> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.0._fmt(f, true) + self.0._fmt(f, false) } } diff --git a/crates/ide-completion/src/render/literal.rs b/crates/ide-completion/src/render/literal.rs index 91a253f8fc..707dea206b 100644 --- a/crates/ide-completion/src/render/literal.rs +++ b/crates/ide-completion/src/render/literal.rs @@ -73,7 +73,7 @@ fn render( None => (name.clone().into(), name.into(), false), }; let (qualified_name, escaped_qualified_name) = - (qualified_name.to_string(), qualified_name.escaped().to_string()); + (qualified_name.unescaped().to_string(), qualified_name.to_string()); let snippet_cap = ctx.snippet_cap(); let mut rendered = match kind { diff --git a/crates/ide-completion/src/render/pattern.rs b/crates/ide-completion/src/render/pattern.rs index 4f4c064027..1c1299e33b 100644 --- a/crates/ide-completion/src/render/pattern.rs +++ b/crates/ide-completion/src/render/pattern.rs @@ -53,7 +53,7 @@ pub(crate) fn render_variant_pat( let (visible_fields, fields_omitted) = visible_fields(ctx.completion, &fields, variant)?; let (name, escaped_name) = match path { - Some(path) => (path.to_string().into(), path.escaped().to_string().into()), + Some(path) => (path.unescaped().to_string().into(), path.to_string().into()), None => { let name = local_name.unwrap_or_else(|| variant.name(ctx.db())); (name.unescaped().to_smol_str(), name.to_smol_str()) diff --git a/crates/ide-completion/src/render/union_literal.rs b/crates/ide-completion/src/render/union_literal.rs index b04c8e3bf9..bb32330f27 100644 --- a/crates/ide-completion/src/render/union_literal.rs +++ b/crates/ide-completion/src/render/union_literal.rs @@ -21,7 +21,7 @@ pub(crate) fn render_union_literal( let name = local_name.unwrap_or_else(|| un.name(ctx.db())); let (qualified_name, escaped_qualified_name) = match path { - Some(p) => (p.to_string(), p.escaped().to_string()), + Some(p) => (p.unescaped().to_string(), p.to_string()), None => (name.unescaped().to_string(), name.to_string()), }; From ba6db3e9b054902549a0cf487cd7f69db2654aa1 Mon Sep 17 00:00:00 2001 From: Ryo Yoshida Date: Sat, 30 Jul 2022 22:28:34 +0900 Subject: [PATCH 0121/1945] Add test for runnables with raw identifiers --- crates/ide/src/runnables.rs | 186 ++++++++++++++++++++++++++++++++++++ 1 file changed, 186 insertions(+) diff --git a/crates/ide/src/runnables.rs b/crates/ide/src/runnables.rs index b0853b10fd..0181c6b8e4 100644 --- a/crates/ide/src/runnables.rs +++ b/crates/ide/src/runnables.rs @@ -2225,4 +2225,190 @@ macro_rules! foo { "#]], ); } + + #[test] + fn test_paths_with_raw_ident() { + check( + r#" +//- /lib.rs +$0 +mod r#mod { + #[test] + fn r#fn() {} + + /// ``` + /// ``` + fn r#for() {} + + /// ``` + /// ``` + struct r#struct(r#type); + + /// ``` + /// ``` + impl r#struct { + /// ``` + /// ``` + fn r#fn() {} + } + + enum r#enum {} + impl r#struct { + /// ``` + /// ``` + fn r#fn() {} + } + + trait r#trait {} + + /// ``` + /// ``` + impl r#trait for r#struct {} +} +"#, + &[TestMod, Test, DocTest, DocTest, DocTest, DocTest, DocTest, DocTest], + expect![[r#" + [ + Runnable { + use_name_in_title: false, + nav: NavigationTarget { + file_id: FileId( + 0, + ), + full_range: 1..461, + focus_range: 5..10, + name: "r#mod", + kind: Module, + description: "mod r#mod", + }, + kind: TestMod { + path: "r#mod", + }, + cfg: None, + }, + Runnable { + use_name_in_title: false, + nav: NavigationTarget { + file_id: FileId( + 0, + ), + full_range: 17..41, + focus_range: 32..36, + name: "r#fn", + kind: Function, + }, + kind: Test { + test_id: Path( + "r#mod::r#fn", + ), + attr: TestAttr { + ignore: false, + }, + }, + cfg: None, + }, + Runnable { + use_name_in_title: false, + nav: NavigationTarget { + file_id: FileId( + 0, + ), + full_range: 47..84, + name: "r#for", + }, + kind: DocTest { + test_id: Path( + "r#mod::r#for", + ), + }, + cfg: None, + }, + Runnable { + use_name_in_title: false, + nav: NavigationTarget { + file_id: FileId( + 0, + ), + full_range: 90..146, + name: "r#struct", + }, + kind: DocTest { + test_id: Path( + "r#mod::r#struct", + ), + }, + cfg: None, + }, + Runnable { + use_name_in_title: false, + nav: NavigationTarget { + file_id: FileId( + 0, + ), + full_range: 152..266, + focus_range: 189..205, + name: "impl", + kind: Impl, + }, + kind: DocTest { + test_id: Path( + "r#struct", + ), + }, + cfg: None, + }, + Runnable { + use_name_in_title: false, + nav: NavigationTarget { + file_id: FileId( + 0, + ), + full_range: 216..260, + name: "r#fn", + }, + kind: DocTest { + test_id: Path( + "r#mod::r#struct::r#fn", + ), + }, + cfg: None, + }, + Runnable { + use_name_in_title: false, + nav: NavigationTarget { + file_id: FileId( + 0, + ), + full_range: 323..367, + name: "r#fn", + }, + kind: DocTest { + test_id: Path( + "r#mod::r#struct::r#fn", + ), + }, + cfg: None, + }, + Runnable { + use_name_in_title: false, + nav: NavigationTarget { + file_id: FileId( + 0, + ), + full_range: 401..459, + focus_range: 445..456, + name: "impl", + kind: Impl, + }, + kind: DocTest { + test_id: Path( + "r#struct", + ), + }, + cfg: None, + }, + ] + "#]], + ) + } } From 1bb58205f0b09ccdb1a8915cfdede35bd2c09da4 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Thu, 11 Aug 2022 10:41:30 +0200 Subject: [PATCH 0122/1945] Fix panic in no_such_field when using tuple fields on record structs --- crates/hir/src/source_analyzer.rs | 1 + .../src/handlers/no_such_field.rs | 18 ++++++++++++++++-- 2 files changed, 17 insertions(+), 2 deletions(-) diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs index f5e2e44307..ae2896e193 100644 --- a/crates/hir/src/source_analyzer.rs +++ b/crates/hir/src/source_analyzer.rs @@ -368,6 +368,7 @@ impl SourceAnalyzer { let local = if field.name_ref().is_some() { None } else { + // Shorthand syntax, resolve to the local let path = ModPath::from_segments(PathKind::Plain, once(local_name.clone())); match self.resolver.resolve_path_in_value_ns_fully(db.upcast(), &path) { Some(ValueNs::LocalBinding(pat_id)) => { diff --git a/crates/ide-diagnostics/src/handlers/no_such_field.rs b/crates/ide-diagnostics/src/handlers/no_such_field.rs index e032c578f0..a80299106b 100644 --- a/crates/ide-diagnostics/src/handlers/no_such_field.rs +++ b/crates/ide-diagnostics/src/handlers/no_such_field.rs @@ -68,7 +68,7 @@ fn missing_record_expr_field_fixes( } let new_field = make::record_field( None, - make::name(&record_expr_field.field_name()?.text()), + make::name(&record_expr_field.field_name()?.ident_token()?.text()), make::ty(&new_field_type.display_source_code(sema.db, module.into()).ok()?), ); @@ -109,7 +109,7 @@ fn missing_record_expr_field_fixes( #[cfg(test)] mod tests { - use crate::tests::{check_diagnostics, check_fix}; + use crate::tests::{check_diagnostics, check_fix, check_no_fix}; #[test] fn no_such_field_diagnostics() { @@ -277,6 +277,20 @@ struct Foo { bar: i32, pub(crate) baz: bool } +"#, + ) + } + + #[test] + fn test_tuple_field_on_record_struct() { + check_no_fix( + r#" +struct Struct {} +fn main() { + Struct { + 0$0: 0 + } +} "#, ) } From 1ce978370bd190d952d0d31bb09d934895278238 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Fri, 12 Aug 2022 18:19:48 +0300 Subject: [PATCH 0123/1945] Use Node 16 in CI workflows --- .github/workflows/ci.yaml | 2 +- .github/workflows/release.yaml | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 0c81ff0789..a70252fa65 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -101,7 +101,7 @@ jobs: - name: Install Nodejs uses: actions/setup-node@v1 with: - node-version: 14.x + node-version: 16.x - name: Install xvfb if: matrix.os == 'ubuntu-latest' diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index ca8eb1309d..d5d8df2345 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -68,7 +68,7 @@ jobs: - name: Install Node.js uses: actions/setup-node@v1 with: - node-version: 14.x + node-version: 16.x - name: Update apt repositories if: matrix.target == 'aarch64-unknown-linux-gnu' || matrix.target == 'arm-unknown-linux-gnueabihf' @@ -133,7 +133,7 @@ jobs: container: image: rust:alpine volumes: - - /usr/local/cargo/registry + - /usr/local/cargo/registry:/usr/local/cargo/registry steps: - name: Install dependencies @@ -176,7 +176,7 @@ jobs: - name: Install Nodejs uses: actions/setup-node@v1 with: - node-version: 14.x + node-version: 16.x - run: echo "TAG=$(date --iso -u)" >> $GITHUB_ENV if: github.ref == 'refs/heads/release' From 19da03291d5253f658e29dbe916e41b4c0cfefee Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Fri, 12 Aug 2022 18:22:14 +0300 Subject: [PATCH 0124/1945] Upgrade npm lockfile --- editors/code/package-lock.json | 3974 +++++++++++++++++++++++++++++++- 1 file changed, 3962 insertions(+), 12 deletions(-) diff --git a/editors/code/package-lock.json b/editors/code/package-lock.json index 0436681b1a..3ff4b6897a 100644 --- a/editors/code/package-lock.json +++ b/editors/code/package-lock.json @@ -1,8 +1,3956 @@ { "name": "rust-analyzer", "version": "0.5.0-dev", - "lockfileVersion": 1, + "lockfileVersion": 2, "requires": true, + "packages": { + "": { + "name": "rust-analyzer", + "version": "0.5.0-dev", + "license": "MIT OR Apache-2.0", + "dependencies": { + "d3": "^7.6.1", + "d3-graphviz": "^4.1.1", + "vscode-languageclient": "^8.0.0-next.14" + }, + "devDependencies": { + "@types/node": "~16.11.7", + "@types/vscode": "~1.66.0", + "@typescript-eslint/eslint-plugin": "^5.30.5", + "@typescript-eslint/parser": "^5.30.5", + "@vscode/test-electron": "^2.1.5", + "cross-env": "^7.0.3", + "esbuild": "^0.14.48", + "eslint": "^8.19.0", + "eslint-config-prettier": "^8.5.0", + "ovsx": "^0.5.1", + "prettier": "^2.7.1", + "tslib": "^2.4.0", + "typescript": "^4.7.4", + "vsce": "^2.9.2" + }, + "engines": { + "vscode": "^1.66.0" + } + }, + "node_modules/@eslint/eslintrc": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-1.3.0.tgz", + "integrity": "sha512-UWW0TMTmk2d7hLcWD1/e2g5HDM/HQ3csaLSqXCfqwh4uNDuNqlaKWXmEsL4Cs41Z0KnILNvwbHAah3C2yt06kw==", + "dev": true, + "dependencies": { + "ajv": "^6.12.4", + "debug": "^4.3.2", + "espree": "^9.3.2", + "globals": "^13.15.0", + "ignore": "^5.2.0", + "import-fresh": "^3.2.1", + "js-yaml": "^4.1.0", + "minimatch": "^3.1.2", + "strip-json-comments": "^3.1.1" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } + }, + "node_modules/@hpcc-js/wasm": { + "version": "1.12.8", + "resolved": "https://registry.npmjs.org/@hpcc-js/wasm/-/wasm-1.12.8.tgz", + "integrity": "sha512-n4q9ARKco2hpCLsuVaW6Az3cDVaua7B3DSONHkc49WtEzgY/btvcDG5Zr1P6PZDv0sQ7oPnAi9Y+W2DI++MgcQ==", + "dependencies": { + "yargs": "^17.3.1" + }, + "bin": { + "dot-wasm": "bin/cli.js" + } + }, + "node_modules/@humanwhocodes/config-array": { + "version": "0.9.5", + "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.9.5.tgz", + "integrity": "sha512-ObyMyWxZiCu/yTisA7uzx81s40xR2fD5Cg/2Kq7G02ajkNubJf6BopgDTmDyc3U7sXpNKM8cYOw7s7Tyr+DnCw==", + "dev": true, + "dependencies": { + "@humanwhocodes/object-schema": "^1.2.1", + "debug": "^4.1.1", + "minimatch": "^3.0.4" + }, + "engines": { + "node": ">=10.10.0" + } + }, + "node_modules/@humanwhocodes/object-schema": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz", + "integrity": "sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==", + "dev": true + }, + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "dev": true, + "dependencies": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.stat": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "dev": true, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "dev": true, + "dependencies": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@tootallnate/once": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-1.1.2.tgz", + "integrity": "sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw==", + "dev": true, + "engines": { + "node": ">= 6" + } + }, + "node_modules/@types/json-schema": { + "version": "7.0.11", + "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.11.tgz", + "integrity": "sha512-wOuvG1SN4Us4rez+tylwwwCV1psiNVOkJeM3AUWUNWg/jDQY2+HE/444y5gc+jBmRqASOm2Oeh5c1axHobwRKQ==", + "dev": true + }, + "node_modules/@types/node": { + "version": "16.11.43", + "resolved": "https://registry.npmjs.org/@types/node/-/node-16.11.43.tgz", + "integrity": "sha512-GqWykok+3uocgfAJM8imbozrqLnPyTrpFlrryURQlw1EesPUCx5XxTiucWDSFF9/NUEXDuD4bnvHm8xfVGWTpQ==", + "dev": true + }, + "node_modules/@types/vscode": { + "version": "1.66.0", + "resolved": "https://registry.npmjs.org/@types/vscode/-/vscode-1.66.0.tgz", + "integrity": "sha512-ZfJck4M7nrGasfs4A4YbUoxis3Vu24cETw3DERsNYtDZmYSYtk6ljKexKFKhImO/ZmY6ZMsmegu2FPkXoUFImA==", + "dev": true + }, + "node_modules/@typescript-eslint/eslint-plugin": { + "version": "5.30.5", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-5.30.5.tgz", + "integrity": "sha512-lftkqRoBvc28VFXEoRgyZuztyVUQ04JvUnATSPtIRFAccbXTWL6DEtXGYMcbg998kXw1NLUJm7rTQ9eUt+q6Ig==", + "dev": true, + "dependencies": { + "@typescript-eslint/scope-manager": "5.30.5", + "@typescript-eslint/type-utils": "5.30.5", + "@typescript-eslint/utils": "5.30.5", + "debug": "^4.3.4", + "functional-red-black-tree": "^1.0.1", + "ignore": "^5.2.0", + "regexpp": "^3.2.0", + "semver": "^7.3.7", + "tsutils": "^3.21.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "@typescript-eslint/parser": "^5.0.0", + "eslint": "^6.0.0 || ^7.0.0 || ^8.0.0" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/parser": { + "version": "5.30.5", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-5.30.5.tgz", + "integrity": "sha512-zj251pcPXI8GO9NDKWWmygP6+UjwWmrdf9qMW/L/uQJBM/0XbU2inxe5io/234y/RCvwpKEYjZ6c1YrXERkK4Q==", + "dev": true, + "dependencies": { + "@typescript-eslint/scope-manager": "5.30.5", + "@typescript-eslint/types": "5.30.5", + "@typescript-eslint/typescript-estree": "5.30.5", + "debug": "^4.3.4" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^6.0.0 || ^7.0.0 || ^8.0.0" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/scope-manager": { + "version": "5.30.5", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-5.30.5.tgz", + "integrity": "sha512-NJ6F+YHHFT/30isRe2UTmIGGAiXKckCyMnIV58cE3JkHmaD6e5zyEYm5hBDv0Wbin+IC0T1FWJpD3YqHUG/Ydg==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "5.30.5", + "@typescript-eslint/visitor-keys": "5.30.5" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/type-utils": { + "version": "5.30.5", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-5.30.5.tgz", + "integrity": "sha512-k9+ejlv1GgwN1nN7XjVtyCgE0BTzhzT1YsQF0rv4Vfj2U9xnslBgMYYvcEYAFVdvhuEscELJsB7lDkN7WusErw==", + "dev": true, + "dependencies": { + "@typescript-eslint/utils": "5.30.5", + "debug": "^4.3.4", + "tsutils": "^3.21.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "*" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/types": { + "version": "5.30.5", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-5.30.5.tgz", + "integrity": "sha512-kZ80w/M2AvsbRvOr3PjaNh6qEW1LFqs2pLdo2s5R38B2HYXG8Z0PP48/4+j1QHJFL3ssHIbJ4odPRS8PlHrFfw==", + "dev": true, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/typescript-estree": { + "version": "5.30.5", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-5.30.5.tgz", + "integrity": "sha512-qGTc7QZC801kbYjAr4AgdOfnokpwStqyhSbiQvqGBLixniAKyH+ib2qXIVo4P9NgGzwyfD9I0nlJN7D91E1VpQ==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "5.30.5", + "@typescript-eslint/visitor-keys": "5.30.5", + "debug": "^4.3.4", + "globby": "^11.1.0", + "is-glob": "^4.0.3", + "semver": "^7.3.7", + "tsutils": "^3.21.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/utils": { + "version": "5.30.5", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-5.30.5.tgz", + "integrity": "sha512-o4SSUH9IkuA7AYIfAvatldovurqTAHrfzPApOZvdUq01hHojZojCFXx06D/aFpKCgWbMPRdJBWAC3sWp3itwTA==", + "dev": true, + "dependencies": { + "@types/json-schema": "^7.0.9", + "@typescript-eslint/scope-manager": "5.30.5", + "@typescript-eslint/types": "5.30.5", + "@typescript-eslint/typescript-estree": "5.30.5", + "eslint-scope": "^5.1.1", + "eslint-utils": "^3.0.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^6.0.0 || ^7.0.0 || ^8.0.0" + } + }, + "node_modules/@typescript-eslint/visitor-keys": { + "version": "5.30.5", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-5.30.5.tgz", + "integrity": "sha512-D+xtGo9HUMELzWIUqcQc0p2PO4NyvTrgIOK/VnSH083+8sq0tiLozNRKuLarwHYGRuA6TVBQSuuLwJUDWd3aaA==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "5.30.5", + "eslint-visitor-keys": "^3.3.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@vscode/test-electron": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@vscode/test-electron/-/test-electron-2.1.5.tgz", + "integrity": "sha512-O/ioqFpV+RvKbRykX2ItYPnbcZ4Hk5V0rY4uhQjQTLhGL9WZUvS7exzuYQCCI+ilSqJpctvxq2llTfGXf9UnnA==", + "dev": true, + "dependencies": { + "http-proxy-agent": "^4.0.1", + "https-proxy-agent": "^5.0.0", + "rimraf": "^3.0.2", + "unzipper": "^0.10.11" + }, + "engines": { + "node": ">=8.9.3" + } + }, + "node_modules/acorn": { + "version": "8.7.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.7.1.tgz", + "integrity": "sha512-Xx54uLJQZ19lKygFXOWsscKUbsBZW0CPykPhVQdhIeIwrbPmJzqeASDInc8nKBnp/JT6igTs82qPXz069H8I/A==", + "dev": true, + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/acorn-jsx": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", + "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", + "dev": true, + "peerDependencies": { + "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" + } + }, + "node_modules/agent-base": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", + "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", + "dev": true, + "dependencies": { + "debug": "4" + }, + "engines": { + "node": ">= 6.0.0" + } + }, + "node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dev": true, + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true + }, + "node_modules/array-union": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", + "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/azure-devops-node-api": { + "version": "11.2.0", + "resolved": "https://registry.npmjs.org/azure-devops-node-api/-/azure-devops-node-api-11.2.0.tgz", + "integrity": "sha512-XdiGPhrpaT5J8wdERRKs5g8E0Zy1pvOYTli7z9E8nmOn3YGp4FhtjhrOyFmX/8veWCwdI69mCHKJw6l+4J/bHA==", + "dev": true, + "dependencies": { + "tunnel": "0.0.6", + "typed-rest-client": "^1.8.4" + } + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" + }, + "node_modules/base64-js": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", + "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/big-integer": { + "version": "1.6.51", + "resolved": "https://registry.npmjs.org/big-integer/-/big-integer-1.6.51.tgz", + "integrity": "sha512-GPEid2Y9QU1Exl1rpO9B2IPJGHPSupF5GnVIP0blYvNOMer2bTvSWs1jGOUg04hTmu67nmLsQ9TBo1puaotBHg==", + "dev": true, + "engines": { + "node": ">=0.6" + } + }, + "node_modules/binary": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/binary/-/binary-0.3.0.tgz", + "integrity": "sha512-D4H1y5KYwpJgK8wk1Cue5LLPgmwHKYSChkbspQg5JtVuR5ulGckxfR62H3AE9UDkdMC8yyXlqYihuz3Aqg2XZg==", + "dev": true, + "dependencies": { + "buffers": "~0.1.1", + "chainsaw": "~0.1.0" + }, + "engines": { + "node": "*" + } + }, + "node_modules/bl": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz", + "integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==", + "dev": true, + "dependencies": { + "buffer": "^5.5.0", + "inherits": "^2.0.4", + "readable-stream": "^3.4.0" + } + }, + "node_modules/bl/node_modules/readable-stream": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "dev": true, + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/bluebird": { + "version": "3.4.7", + "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.4.7.tgz", + "integrity": "sha512-iD3898SR7sWVRHbiQv+sHUtHnMvC1o3nW5rAcqnq3uOn07DSAppZYUkIGslDz6gXC7HfunPe7YVBgoEJASPcHA==", + "dev": true + }, + "node_modules/boolbase": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz", + "integrity": "sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==", + "dev": true + }, + "node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/braces": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", + "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "dev": true, + "dependencies": { + "fill-range": "^7.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/buffer": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", + "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "base64-js": "^1.3.1", + "ieee754": "^1.1.13" + } + }, + "node_modules/buffer-crc32": { + "version": "0.2.13", + "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.13.tgz", + "integrity": "sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ==", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/buffer-indexof-polyfill": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/buffer-indexof-polyfill/-/buffer-indexof-polyfill-1.0.2.tgz", + "integrity": "sha512-I7wzHwA3t1/lwXQh+A5PbNvJxgfo5r3xulgpYDB5zckTu/Z9oUK9biouBKQUjEqzaz3HnAT6TYoovmE+GqSf7A==", + "dev": true, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/buffers": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/buffers/-/buffers-0.1.1.tgz", + "integrity": "sha512-9q/rDEGSb/Qsvv2qvzIzdluL5k7AaJOTrw23z9reQthrbF7is4CtlT0DXyO1oei2DCp4uojjzQ7igaSHp1kAEQ==", + "dev": true, + "engines": { + "node": ">=0.2.0" + } + }, + "node_modules/call-bind": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz", + "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==", + "dev": true, + "dependencies": { + "function-bind": "^1.1.1", + "get-intrinsic": "^1.0.2" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/callsites": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/chainsaw": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/chainsaw/-/chainsaw-0.1.0.tgz", + "integrity": "sha512-75kWfWt6MEKNC8xYXIdRpDehRYY/tNSgwKaJq+dbbDcxORuVrrQ+SEHoWsniVn9XPYfP4gmdWIeDk/4YNp1rNQ==", + "dev": true, + "dependencies": { + "traverse": ">=0.3.0 <0.4" + }, + "engines": { + "node": "*" + } + }, + "node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/cheerio": { + "version": "1.0.0-rc.12", + "resolved": "https://registry.npmjs.org/cheerio/-/cheerio-1.0.0-rc.12.tgz", + "integrity": "sha512-VqR8m68vM46BNnuZ5NtnGBKIE/DfN0cRIzg9n40EIq9NOv90ayxLBXA8fXC5gquFRGJSTRqBq25Jt2ECLR431Q==", + "dev": true, + "dependencies": { + "cheerio-select": "^2.1.0", + "dom-serializer": "^2.0.0", + "domhandler": "^5.0.3", + "domutils": "^3.0.1", + "htmlparser2": "^8.0.1", + "parse5": "^7.0.0", + "parse5-htmlparser2-tree-adapter": "^7.0.0" + }, + "engines": { + "node": ">= 6" + }, + "funding": { + "url": "https://github.com/cheeriojs/cheerio?sponsor=1" + } + }, + "node_modules/cheerio-select": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/cheerio-select/-/cheerio-select-2.1.0.tgz", + "integrity": "sha512-9v9kG0LvzrlcungtnJtpGNxY+fzECQKhK4EGJX2vByejiMX84MFNQw4UxPJl3bFbTMw+Dfs37XaIkCwTZfLh4g==", + "dev": true, + "dependencies": { + "boolbase": "^1.0.0", + "css-select": "^5.1.0", + "css-what": "^6.1.0", + "domelementtype": "^2.3.0", + "domhandler": "^5.0.3", + "domutils": "^3.0.1" + }, + "funding": { + "url": "https://github.com/sponsors/fb55" + } + }, + "node_modules/chownr": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/chownr/-/chownr-1.1.4.tgz", + "integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==", + "dev": true + }, + "node_modules/ci-info": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-2.0.0.tgz", + "integrity": "sha512-5tK7EtrZ0N+OLFMthtqOj4fI2Jeb88C4CAZPu25LDVUgXJ0A3Js4PMGqrn0JU1W0Mh1/Z8wZzYPxqUrXeBboCQ==", + "dev": true + }, + "node_modules/cliui": { + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", + "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==", + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.0", + "wrap-ansi": "^7.0.0" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" + }, + "node_modules/commander": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz", + "integrity": "sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==", + "engines": { + "node": ">= 10" + } + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==" + }, + "node_modules/core-util-is": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", + "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==", + "dev": true + }, + "node_modules/cross-env": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/cross-env/-/cross-env-7.0.3.tgz", + "integrity": "sha512-+/HKd6EgcQCJGh2PSjZuUitQBQynKor4wrFbRg4DtAgS1aWO+gU52xpH7M9ScGgXSYmAVS9bIJ8EzuaGw0oNAw==", + "dev": true, + "dependencies": { + "cross-spawn": "^7.0.1" + }, + "bin": { + "cross-env": "src/bin/cross-env.js", + "cross-env-shell": "src/bin/cross-env-shell.js" + }, + "engines": { + "node": ">=10.14", + "npm": ">=6", + "yarn": ">=1" + } + }, + "node_modules/cross-spawn": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", + "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", + "dev": true, + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/css-select": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/css-select/-/css-select-5.1.0.tgz", + "integrity": "sha512-nwoRF1rvRRnnCqqY7updORDsuqKzqYJ28+oSMaJMMgOauh3fvwHqMS7EZpIPqK8GL+g9mKxF1vP/ZjSeNjEVHg==", + "dev": true, + "dependencies": { + "boolbase": "^1.0.0", + "css-what": "^6.1.0", + "domhandler": "^5.0.2", + "domutils": "^3.0.1", + "nth-check": "^2.0.1" + }, + "funding": { + "url": "https://github.com/sponsors/fb55" + } + }, + "node_modules/css-what": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/css-what/-/css-what-6.1.0.tgz", + "integrity": "sha512-HTUrgRJ7r4dsZKU6GjmpfRK1O76h97Z8MfS1G0FozR+oF2kG6Vfe8JE6zwrkbxigziPHinCJ+gCPjA9EaBDtRw==", + "dev": true, + "engines": { + "node": ">= 6" + }, + "funding": { + "url": "https://github.com/sponsors/fb55" + } + }, + "node_modules/d3": { + "version": "7.6.1", + "resolved": "https://registry.npmjs.org/d3/-/d3-7.6.1.tgz", + "integrity": "sha512-txMTdIHFbcpLx+8a0IFhZsbp+PfBBPt8yfbmukZTQFroKuFqIwqswF0qE5JXWefylaAVpSXFoKm3yP+jpNLFLw==", + "dependencies": { + "d3-array": "3", + "d3-axis": "3", + "d3-brush": "3", + "d3-chord": "3", + "d3-color": "3", + "d3-contour": "4", + "d3-delaunay": "6", + "d3-dispatch": "3", + "d3-drag": "3", + "d3-dsv": "3", + "d3-ease": "3", + "d3-fetch": "3", + "d3-force": "3", + "d3-format": "3", + "d3-geo": "3", + "d3-hierarchy": "3", + "d3-interpolate": "3", + "d3-path": "3", + "d3-polygon": "3", + "d3-quadtree": "3", + "d3-random": "3", + "d3-scale": "4", + "d3-scale-chromatic": "3", + "d3-selection": "3", + "d3-shape": "3", + "d3-time": "3", + "d3-time-format": "4", + "d3-timer": "3", + "d3-transition": "3", + "d3-zoom": "3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-array": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/d3-array/-/d3-array-3.2.0.tgz", + "integrity": "sha512-3yXFQo0oG3QCxbF06rMPFyGRMGJNS7NvsV1+2joOjbBE+9xvWQ8+GcMJAjRCzw06zQ3/arXeJgbPYcjUCuC+3g==", + "dependencies": { + "internmap": "1 - 2" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-axis": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-axis/-/d3-axis-3.0.0.tgz", + "integrity": "sha512-IH5tgjV4jE/GhHkRV0HiVYPDtvfjHQlQfJHs0usq7M30XcSBvOotpmH1IgkcXsO/5gEQZD43B//fc7SRT5S+xw==", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-brush": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-brush/-/d3-brush-3.0.0.tgz", + "integrity": "sha512-ALnjWlVYkXsVIGlOsuWH1+3udkYFI48Ljihfnh8FZPF2QS9o+PzGLBslO0PjzVoHLZ2KCVgAM8NVkXPJB2aNnQ==", + "dependencies": { + "d3-dispatch": "1 - 3", + "d3-drag": "2 - 3", + "d3-interpolate": "1 - 3", + "d3-selection": "3", + "d3-transition": "3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-brush/node_modules/d3-selection": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-selection/-/d3-selection-3.0.0.tgz", + "integrity": "sha512-fmTRWbNMmsmWq6xJV8D19U/gw/bwrHfNXxrIN+HfZgnzqTHp9jOmKMhsTUjXOJnZOdZY9Q28y4yebKzqDKlxlQ==", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-chord": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-chord/-/d3-chord-3.0.1.tgz", + "integrity": "sha512-VE5S6TNa+j8msksl7HwjxMHDM2yNK3XCkusIlpX5kwauBfXuyLAtNg9jCp/iHH61tgI4sb6R/EIMWCqEIdjT/g==", + "dependencies": { + "d3-path": "1 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-color": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-color/-/d3-color-3.1.0.tgz", + "integrity": "sha512-zg/chbXyeBtMQ1LbD/WSoW2DpC3I0mpmPdW+ynRTj/x2DAWYrIY7qeZIHidozwV24m4iavr15lNwIwLxRmOxhA==", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-contour": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/d3-contour/-/d3-contour-4.0.0.tgz", + "integrity": "sha512-7aQo0QHUTu/Ko3cP9YK9yUTxtoDEiDGwnBHyLxG5M4vqlBkO/uixMRele3nfsfj6UXOcuReVpVXzAboGraYIJw==", + "dependencies": { + "d3-array": "^3.2.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-delaunay": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/d3-delaunay/-/d3-delaunay-6.0.2.tgz", + "integrity": "sha512-IMLNldruDQScrcfT+MWnazhHbDJhcRJyOEBAJfwQnHle1RPh6WDuLvxNArUju2VSMSUuKlY5BGHRJ2cYyoFLQQ==", + "dependencies": { + "delaunator": "5" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-dispatch": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-dispatch/-/d3-dispatch-3.0.1.tgz", + "integrity": "sha512-rzUyPU/S7rwUflMyLc1ETDeBj0NRuHKKAcvukozwhshr6g6c5d8zh4c2gQjY2bZ0dXeGLWc1PF174P2tVvKhfg==", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-drag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-drag/-/d3-drag-3.0.0.tgz", + "integrity": "sha512-pWbUJLdETVA8lQNJecMxoXfH6x+mO2UQo8rSmZ+QqxcbyA3hfeprFgIT//HW2nlHChWeIIMwS2Fq+gEARkhTkg==", + "dependencies": { + "d3-dispatch": "1 - 3", + "d3-selection": "3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-drag/node_modules/d3-selection": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-selection/-/d3-selection-3.0.0.tgz", + "integrity": "sha512-fmTRWbNMmsmWq6xJV8D19U/gw/bwrHfNXxrIN+HfZgnzqTHp9jOmKMhsTUjXOJnZOdZY9Q28y4yebKzqDKlxlQ==", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-dsv": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-dsv/-/d3-dsv-3.0.1.tgz", + "integrity": "sha512-UG6OvdI5afDIFP9w4G0mNq50dSOsXHJaRE8arAS5o9ApWnIElp8GZw1Dun8vP8OyHOZ/QJUKUJwxiiCCnUwm+Q==", + "dependencies": { + "commander": "7", + "iconv-lite": "0.6", + "rw": "1" + }, + "bin": { + "csv2json": "bin/dsv2json.js", + "csv2tsv": "bin/dsv2dsv.js", + "dsv2dsv": "bin/dsv2dsv.js", + "dsv2json": "bin/dsv2json.js", + "json2csv": "bin/json2dsv.js", + "json2dsv": "bin/json2dsv.js", + "json2tsv": "bin/json2dsv.js", + "tsv2csv": "bin/dsv2dsv.js", + "tsv2json": "bin/dsv2json.js" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-ease": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-ease/-/d3-ease-3.0.1.tgz", + "integrity": "sha512-wR/XK3D3XcLIZwpbvQwQ5fK+8Ykds1ip7A2Txe0yxncXSdq1L9skcG7blcedkOX+ZcgxGAmLX1FrRGbADwzi0w==", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-fetch": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-fetch/-/d3-fetch-3.0.1.tgz", + "integrity": "sha512-kpkQIM20n3oLVBKGg6oHrUchHM3xODkTzjMoj7aWQFq5QEM+R6E4WkzT5+tojDY7yjez8KgCBRoj4aEr99Fdqw==", + "dependencies": { + "d3-dsv": "1 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-force": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-force/-/d3-force-3.0.0.tgz", + "integrity": "sha512-zxV/SsA+U4yte8051P4ECydjD/S+qeYtnaIyAs9tgHCqfguma/aAQDjo85A9Z6EKhBirHRJHXIgJUlffT4wdLg==", + "dependencies": { + "d3-dispatch": "1 - 3", + "d3-quadtree": "1 - 3", + "d3-timer": "1 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-format": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-format/-/d3-format-3.1.0.tgz", + "integrity": "sha512-YyUI6AEuY/Wpt8KWLgZHsIU86atmikuoOmCfommt0LYHiQSPjvX2AcFc38PX0CBpr2RCyZhjex+NS/LPOv6YqA==", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-geo": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-geo/-/d3-geo-3.0.1.tgz", + "integrity": "sha512-Wt23xBych5tSy9IYAM1FR2rWIBFWa52B/oF/GYe5zbdHrg08FU8+BuI6X4PvTwPDdqdAdq04fuWJpELtsaEjeA==", + "dependencies": { + "d3-array": "2.5.0 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-graphviz": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/d3-graphviz/-/d3-graphviz-4.1.1.tgz", + "integrity": "sha512-s0IVbKf8rs4eJI2xo5Umr7nXDX/LEZw/x2WtKxmlyQxR0qUY49UiLhBNOX7VDHZywMle43NKEXnU6fn22fpJvQ==", + "dependencies": { + "@hpcc-js/wasm": "1.12.8", + "d3-dispatch": "^2.0.0", + "d3-format": "^2.0.0", + "d3-interpolate": "^2.0.1", + "d3-path": "^2.0.0", + "d3-timer": "^2.0.0", + "d3-transition": "^2.0.0", + "d3-zoom": "^2.0.0" + }, + "peerDependencies": { + "d3-selection": "^2.0.0" + } + }, + "node_modules/d3-graphviz/node_modules/d3-color": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/d3-color/-/d3-color-2.0.0.tgz", + "integrity": "sha512-SPXi0TSKPD4g9tw0NMZFnR95XVgUZiBH+uUTqQuDu1OsE2zomHU7ho0FISciaPvosimixwHFl3WHLGabv6dDgQ==" + }, + "node_modules/d3-graphviz/node_modules/d3-dispatch": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/d3-dispatch/-/d3-dispatch-2.0.0.tgz", + "integrity": "sha512-S/m2VsXI7gAti2pBoLClFFTMOO1HTtT0j99AuXLoGFKO6deHDdnv6ZGTxSTTUTgO1zVcv82fCOtDjYK4EECmWA==" + }, + "node_modules/d3-graphviz/node_modules/d3-drag": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/d3-drag/-/d3-drag-2.0.0.tgz", + "integrity": "sha512-g9y9WbMnF5uqB9qKqwIIa/921RYWzlUDv9Jl1/yONQwxbOfszAWTCm8u7HOTgJgRDXiRZN56cHT9pd24dmXs8w==", + "dependencies": { + "d3-dispatch": "1 - 2", + "d3-selection": "2" + } + }, + "node_modules/d3-graphviz/node_modules/d3-ease": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/d3-ease/-/d3-ease-2.0.0.tgz", + "integrity": "sha512-68/n9JWarxXkOWMshcT5IcjbB+agblQUaIsbnXmrzejn2O82n3p2A9R2zEB9HIEFWKFwPAEDDN8gR0VdSAyyAQ==" + }, + "node_modules/d3-graphviz/node_modules/d3-format": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/d3-format/-/d3-format-2.0.0.tgz", + "integrity": "sha512-Ab3S6XuE/Q+flY96HXT0jOXcM4EAClYFnRGY5zsjRGNy6qCYrQsMffs7cV5Q9xejb35zxW5hf/guKw34kvIKsA==" + }, + "node_modules/d3-graphviz/node_modules/d3-interpolate": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/d3-interpolate/-/d3-interpolate-2.0.1.tgz", + "integrity": "sha512-c5UhwwTs/yybcmTpAVqwSFl6vrQ8JZJoT5F7xNFK9pymv5C0Ymcc9/LIJHtYIggg/yS9YHw8i8O8tgb9pupjeQ==", + "dependencies": { + "d3-color": "1 - 2" + } + }, + "node_modules/d3-graphviz/node_modules/d3-path": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/d3-path/-/d3-path-2.0.0.tgz", + "integrity": "sha512-ZwZQxKhBnv9yHaiWd6ZU4x5BtCQ7pXszEV9CU6kRgwIQVQGLMv1oiL4M+MK/n79sYzsj+gcgpPQSctJUsLN7fA==" + }, + "node_modules/d3-graphviz/node_modules/d3-timer": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/d3-timer/-/d3-timer-2.0.0.tgz", + "integrity": "sha512-TO4VLh0/420Y/9dO3+f9abDEFYeCUr2WZRlxJvbp4HPTQcSylXNiL6yZa9FIUvV1yRiFufl1bszTCLDqv9PWNA==" + }, + "node_modules/d3-graphviz/node_modules/d3-transition": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/d3-transition/-/d3-transition-2.0.0.tgz", + "integrity": "sha512-42ltAGgJesfQE3u9LuuBHNbGrI/AJjNL2OAUdclE70UE6Vy239GCBEYD38uBPoLeNsOhFStGpPI0BAOV+HMxog==", + "dependencies": { + "d3-color": "1 - 2", + "d3-dispatch": "1 - 2", + "d3-ease": "1 - 2", + "d3-interpolate": "1 - 2", + "d3-timer": "1 - 2" + }, + "peerDependencies": { + "d3-selection": "2" + } + }, + "node_modules/d3-graphviz/node_modules/d3-zoom": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/d3-zoom/-/d3-zoom-2.0.0.tgz", + "integrity": "sha512-fFg7aoaEm9/jf+qfstak0IYpnesZLiMX6GZvXtUSdv8RH2o4E2qeelgdU09eKS6wGuiGMfcnMI0nTIqWzRHGpw==", + "dependencies": { + "d3-dispatch": "1 - 2", + "d3-drag": "2", + "d3-interpolate": "1 - 2", + "d3-selection": "2", + "d3-transition": "2" + } + }, + "node_modules/d3-hierarchy": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/d3-hierarchy/-/d3-hierarchy-3.1.2.tgz", + "integrity": "sha512-FX/9frcub54beBdugHjDCdikxThEqjnR93Qt7PvQTOHxyiNCAlvMrHhclk3cD5VeAaq9fxmfRp+CnWw9rEMBuA==", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-interpolate": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-interpolate/-/d3-interpolate-3.0.1.tgz", + "integrity": "sha512-3bYs1rOD33uo8aqJfKP3JWPAibgw8Zm2+L9vBKEHJ2Rg+viTR7o5Mmv5mZcieN+FRYaAOWX5SJATX6k1PWz72g==", + "dependencies": { + "d3-color": "1 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-path": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-path/-/d3-path-3.0.1.tgz", + "integrity": "sha512-gq6gZom9AFZby0YLduxT1qmrp4xpBA1YZr19OI717WIdKE2OM5ETq5qrHLb301IgxhLwcuxvGZVLeeWc/k1I6w==", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-polygon": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-polygon/-/d3-polygon-3.0.1.tgz", + "integrity": "sha512-3vbA7vXYwfe1SYhED++fPUQlWSYTTGmFmQiany/gdbiWgU/iEyQzyymwL9SkJjFFuCS4902BSzewVGsHHmHtXg==", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-quadtree": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-quadtree/-/d3-quadtree-3.0.1.tgz", + "integrity": "sha512-04xDrxQTDTCFwP5H6hRhsRcb9xxv2RzkcsygFzmkSIOJy3PeRJP7sNk3VRIbKXcog561P9oU0/rVH6vDROAgUw==", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-random": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-random/-/d3-random-3.0.1.tgz", + "integrity": "sha512-FXMe9GfxTxqd5D6jFsQ+DJ8BJS4E/fT5mqqdjovykEB2oFbTMDVdg1MGFxfQW+FBOGoB++k8swBrgwSHT1cUXQ==", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-scale": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/d3-scale/-/d3-scale-4.0.2.tgz", + "integrity": "sha512-GZW464g1SH7ag3Y7hXjf8RoUuAFIqklOAq3MRl4OaWabTFJY9PN/E1YklhXLh+OQ3fM9yS2nOkCoS+WLZ6kvxQ==", + "dependencies": { + "d3-array": "2.10.0 - 3", + "d3-format": "1 - 3", + "d3-interpolate": "1.2.0 - 3", + "d3-time": "2.1.1 - 3", + "d3-time-format": "2 - 4" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-scale-chromatic": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-scale-chromatic/-/d3-scale-chromatic-3.0.0.tgz", + "integrity": "sha512-Lx9thtxAKrO2Pq6OO2Ua474opeziKr279P/TKZsMAhYyNDD3EnCffdbgeSYN5O7m2ByQsxtuP2CSDczNUIZ22g==", + "dependencies": { + "d3-color": "1 - 3", + "d3-interpolate": "1 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-selection": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/d3-selection/-/d3-selection-2.0.0.tgz", + "integrity": "sha512-XoGGqhLUN/W14NmaqcO/bb1nqjDAw5WtSYb2X8wiuQWvSZUsUVYsOSkOybUrNvcBjaywBdYPy03eXHMXjk9nZA==" + }, + "node_modules/d3-shape": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-shape/-/d3-shape-3.1.0.tgz", + "integrity": "sha512-tGDh1Muf8kWjEDT/LswZJ8WF85yDZLvVJpYU9Nq+8+yW1Z5enxrmXOhTArlkaElU+CTn0OTVNli+/i+HP45QEQ==", + "dependencies": { + "d3-path": "1 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-time": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-time/-/d3-time-3.0.0.tgz", + "integrity": "sha512-zmV3lRnlaLI08y9IMRXSDshQb5Nj77smnfpnd2LrBa/2K281Jijactokeak14QacHs/kKq0AQ121nidNYlarbQ==", + "dependencies": { + "d3-array": "2 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-time-format": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/d3-time-format/-/d3-time-format-4.1.0.tgz", + "integrity": "sha512-dJxPBlzC7NugB2PDLwo9Q8JiTR3M3e4/XANkreKSUxF8vvXKqm1Yfq4Q5dl8budlunRVlUUaDUgFt7eA8D6NLg==", + "dependencies": { + "d3-time": "1 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-timer": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-timer/-/d3-timer-3.0.1.tgz", + "integrity": "sha512-ndfJ/JxxMd3nw31uyKoY2naivF+r29V+Lc0svZxe1JvvIRmi8hUsrMvdOwgS1o6uBHmiz91geQ0ylPP0aj1VUA==", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-transition": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-transition/-/d3-transition-3.0.1.tgz", + "integrity": "sha512-ApKvfjsSR6tg06xrL434C0WydLr7JewBB3V+/39RMHsaXTOG0zmt/OAXeng5M5LBm0ojmxJrpomQVZ1aPvBL4w==", + "dependencies": { + "d3-color": "1 - 3", + "d3-dispatch": "1 - 3", + "d3-ease": "1 - 3", + "d3-interpolate": "1 - 3", + "d3-timer": "1 - 3" + }, + "engines": { + "node": ">=12" + }, + "peerDependencies": { + "d3-selection": "2 - 3" + } + }, + "node_modules/d3-zoom": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-zoom/-/d3-zoom-3.0.0.tgz", + "integrity": "sha512-b8AmV3kfQaqWAuacbPuNbL6vahnOJflOhexLzMMNLga62+/nh0JzvJ0aO/5a5MVgUFGS7Hu1P9P03o3fJkDCyw==", + "dependencies": { + "d3-dispatch": "1 - 3", + "d3-drag": "2 - 3", + "d3-interpolate": "1 - 3", + "d3-selection": "2 - 3", + "d3-transition": "2 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3/node_modules/d3-selection": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-selection/-/d3-selection-3.0.0.tgz", + "integrity": "sha512-fmTRWbNMmsmWq6xJV8D19U/gw/bwrHfNXxrIN+HfZgnzqTHp9jOmKMhsTUjXOJnZOdZY9Q28y4yebKzqDKlxlQ==", + "engines": { + "node": ">=12" + } + }, + "node_modules/debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dev": true, + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/decompress-response": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-6.0.0.tgz", + "integrity": "sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==", + "dev": true, + "dependencies": { + "mimic-response": "^3.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/deep-extend": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.6.0.tgz", + "integrity": "sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==", + "dev": true, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/deep-is": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", + "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", + "dev": true + }, + "node_modules/delaunator": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/delaunator/-/delaunator-5.0.0.tgz", + "integrity": "sha512-AyLvtyJdbv/U1GkiS6gUUzclRoAY4Gs75qkMygJJhU75LW4DNuSF2RMzpxs9jw9Oz1BobHjTdkG3zdP55VxAqw==", + "dependencies": { + "robust-predicates": "^3.0.0" + } + }, + "node_modules/detect-libc": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.1.tgz", + "integrity": "sha512-463v3ZeIrcWtdgIg6vI6XUncguvr2TnGl4SzDXinkt9mSLpBJKXT3mW6xT3VQdDN11+WVs29pgvivTc4Lp8v+w==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/dir-glob": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", + "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==", + "dev": true, + "dependencies": { + "path-type": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/doctrine": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", + "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==", + "dev": true, + "dependencies": { + "esutils": "^2.0.2" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/dom-serializer": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-2.0.0.tgz", + "integrity": "sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==", + "dev": true, + "dependencies": { + "domelementtype": "^2.3.0", + "domhandler": "^5.0.2", + "entities": "^4.2.0" + }, + "funding": { + "url": "https://github.com/cheeriojs/dom-serializer?sponsor=1" + } + }, + "node_modules/domelementtype": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-2.3.0.tgz", + "integrity": "sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fb55" + } + ] + }, + "node_modules/domhandler": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-5.0.3.tgz", + "integrity": "sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==", + "dev": true, + "dependencies": { + "domelementtype": "^2.3.0" + }, + "engines": { + "node": ">= 4" + }, + "funding": { + "url": "https://github.com/fb55/domhandler?sponsor=1" + } + }, + "node_modules/domutils": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/domutils/-/domutils-3.0.1.tgz", + "integrity": "sha512-z08c1l761iKhDFtfXO04C7kTdPBLi41zwOZl00WS8b5eiaebNpY00HKbztwBq+e3vyqWNwWF3mP9YLUeqIrF+Q==", + "dev": true, + "dependencies": { + "dom-serializer": "^2.0.0", + "domelementtype": "^2.3.0", + "domhandler": "^5.0.1" + }, + "funding": { + "url": "https://github.com/fb55/domutils?sponsor=1" + } + }, + "node_modules/duplexer2": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/duplexer2/-/duplexer2-0.1.4.tgz", + "integrity": "sha512-asLFVfWWtJ90ZyOUHMqk7/S2w2guQKxUI2itj3d92ADHhxUSbCMGi1f1cBcJ7xM1To+pE/Khbwo1yuNbMEPKeA==", + "dev": true, + "dependencies": { + "readable-stream": "^2.0.2" + } + }, + "node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" + }, + "node_modules/end-of-stream": { + "version": "1.4.4", + "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", + "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", + "dev": true, + "dependencies": { + "once": "^1.4.0" + } + }, + "node_modules/entities": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/entities/-/entities-4.3.1.tgz", + "integrity": "sha512-o4q/dYJlmyjP2zfnaWDUC6A3BQFmVTX+tZPezK7k0GLSU9QYCauscf5Y+qcEPzKL+EixVouYDgLQK5H9GrLpkg==", + "dev": true, + "engines": { + "node": ">=0.12" + }, + "funding": { + "url": "https://github.com/fb55/entities?sponsor=1" + } + }, + "node_modules/esbuild": { + "version": "0.14.48", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.14.48.tgz", + "integrity": "sha512-w6N1Yn5MtqK2U1/WZTX9ZqUVb8IOLZkZ5AdHkT6x3cHDMVsYWC7WPdiLmx19w3i4Rwzy5LqsEMtVihG3e4rFzA==", + "dev": true, + "hasInstallScript": true, + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=12" + }, + "optionalDependencies": { + "esbuild-android-64": "0.14.48", + "esbuild-android-arm64": "0.14.48", + "esbuild-darwin-64": "0.14.48", + "esbuild-darwin-arm64": "0.14.48", + "esbuild-freebsd-64": "0.14.48", + "esbuild-freebsd-arm64": "0.14.48", + "esbuild-linux-32": "0.14.48", + "esbuild-linux-64": "0.14.48", + "esbuild-linux-arm": "0.14.48", + "esbuild-linux-arm64": "0.14.48", + "esbuild-linux-mips64le": "0.14.48", + "esbuild-linux-ppc64le": "0.14.48", + "esbuild-linux-riscv64": "0.14.48", + "esbuild-linux-s390x": "0.14.48", + "esbuild-netbsd-64": "0.14.48", + "esbuild-openbsd-64": "0.14.48", + "esbuild-sunos-64": "0.14.48", + "esbuild-windows-32": "0.14.48", + "esbuild-windows-64": "0.14.48", + "esbuild-windows-arm64": "0.14.48" + } + }, + "node_modules/esbuild-android-64": { + "version": "0.14.48", + "resolved": "https://registry.npmjs.org/esbuild-android-64/-/esbuild-android-64-0.14.48.tgz", + "integrity": "sha512-3aMjboap/kqwCUpGWIjsk20TtxVoKck8/4Tu19rubh7t5Ra0Yrpg30Mt1QXXlipOazrEceGeWurXKeFJgkPOUg==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/esbuild-android-arm64": { + "version": "0.14.48", + "resolved": "https://registry.npmjs.org/esbuild-android-arm64/-/esbuild-android-arm64-0.14.48.tgz", + "integrity": "sha512-vptI3K0wGALiDq+EvRuZotZrJqkYkN5282iAfcffjI5lmGG9G1ta/CIVauhY42MBXwEgDJkweiDcDMRLzBZC4g==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/esbuild-darwin-64": { + "version": "0.14.48", + "resolved": "https://registry.npmjs.org/esbuild-darwin-64/-/esbuild-darwin-64-0.14.48.tgz", + "integrity": "sha512-gGQZa4+hab2Va/Zww94YbshLuWteyKGD3+EsVon8EWTWhnHFRm5N9NbALNbwi/7hQ/hM1Zm4FuHg+k6BLsl5UA==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/esbuild-darwin-arm64": { + "version": "0.14.48", + "resolved": "https://registry.npmjs.org/esbuild-darwin-arm64/-/esbuild-darwin-arm64-0.14.48.tgz", + "integrity": "sha512-bFjnNEXjhZT+IZ8RvRGNJthLWNHV5JkCtuOFOnjvo5pC0sk2/QVk0Qc06g2PV3J0TcU6kaPC3RN9yy9w2PSLEA==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/esbuild-freebsd-64": { + "version": "0.14.48", + "resolved": "https://registry.npmjs.org/esbuild-freebsd-64/-/esbuild-freebsd-64-0.14.48.tgz", + "integrity": "sha512-1NOlwRxmOsnPcWOGTB10JKAkYSb2nue0oM1AfHWunW/mv3wERfJmnYlGzL3UAOIUXZqW8GeA2mv+QGwq7DToqA==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/esbuild-freebsd-arm64": { + "version": "0.14.48", + "resolved": "https://registry.npmjs.org/esbuild-freebsd-arm64/-/esbuild-freebsd-arm64-0.14.48.tgz", + "integrity": "sha512-gXqKdO8wabVcYtluAbikDH2jhXp+Klq5oCD5qbVyUG6tFiGhrC9oczKq3vIrrtwcxDQqK6+HDYK8Zrd4bCA9Gw==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/esbuild-linux-32": { + "version": "0.14.48", + "resolved": "https://registry.npmjs.org/esbuild-linux-32/-/esbuild-linux-32-0.14.48.tgz", + "integrity": "sha512-ghGyDfS289z/LReZQUuuKq9KlTiTspxL8SITBFQFAFRA/IkIvDpnZnCAKTCjGXAmUqroMQfKJXMxyjJA69c/nQ==", + "cpu": [ + "ia32" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/esbuild-linux-64": { + "version": "0.14.48", + "resolved": "https://registry.npmjs.org/esbuild-linux-64/-/esbuild-linux-64-0.14.48.tgz", + "integrity": "sha512-vni3p/gppLMVZLghI7oMqbOZdGmLbbKR23XFARKnszCIBpEMEDxOMNIKPmMItQrmH/iJrL1z8Jt2nynY0bE1ug==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/esbuild-linux-arm": { + "version": "0.14.48", + "resolved": "https://registry.npmjs.org/esbuild-linux-arm/-/esbuild-linux-arm-0.14.48.tgz", + "integrity": "sha512-+VfSV7Akh1XUiDNXgqgY1cUP1i2vjI+BmlyXRfVz5AfV3jbpde8JTs5Q9sYgaoq5cWfuKfoZB/QkGOI+QcL1Tw==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/esbuild-linux-arm64": { + "version": "0.14.48", + "resolved": "https://registry.npmjs.org/esbuild-linux-arm64/-/esbuild-linux-arm64-0.14.48.tgz", + "integrity": "sha512-3CFsOlpoxlKPRevEHq8aAntgYGYkE1N9yRYAcPyng/p4Wyx0tPR5SBYsxLKcgPB9mR8chHEhtWYz6EZ+H199Zw==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/esbuild-linux-mips64le": { + "version": "0.14.48", + "resolved": "https://registry.npmjs.org/esbuild-linux-mips64le/-/esbuild-linux-mips64le-0.14.48.tgz", + "integrity": "sha512-cs0uOiRlPp6ymknDnjajCgvDMSsLw5mST2UXh+ZIrXTj2Ifyf2aAP3Iw4DiqgnyYLV2O/v/yWBJx+WfmKEpNLA==", + "cpu": [ + "mips64el" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/esbuild-linux-ppc64le": { + "version": "0.14.48", + "resolved": "https://registry.npmjs.org/esbuild-linux-ppc64le/-/esbuild-linux-ppc64le-0.14.48.tgz", + "integrity": "sha512-+2F0vJMkuI0Wie/wcSPDCqXvSFEELH7Jubxb7mpWrA/4NpT+/byjxDz0gG6R1WJoeDefcrMfpBx4GFNN1JQorQ==", + "cpu": [ + "ppc64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/esbuild-linux-riscv64": { + "version": "0.14.48", + "resolved": "https://registry.npmjs.org/esbuild-linux-riscv64/-/esbuild-linux-riscv64-0.14.48.tgz", + "integrity": "sha512-BmaK/GfEE+5F2/QDrIXteFGKnVHGxlnK9MjdVKMTfvtmudjY3k2t8NtlY4qemKSizc+QwyombGWTBDc76rxePA==", + "cpu": [ + "riscv64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/esbuild-linux-s390x": { + "version": "0.14.48", + "resolved": "https://registry.npmjs.org/esbuild-linux-s390x/-/esbuild-linux-s390x-0.14.48.tgz", + "integrity": "sha512-tndw/0B9jiCL+KWKo0TSMaUm5UWBLsfCKVdbfMlb3d5LeV9WbijZ8Ordia8SAYv38VSJWOEt6eDCdOx8LqkC4g==", + "cpu": [ + "s390x" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/esbuild-netbsd-64": { + "version": "0.14.48", + "resolved": "https://registry.npmjs.org/esbuild-netbsd-64/-/esbuild-netbsd-64-0.14.48.tgz", + "integrity": "sha512-V9hgXfwf/T901Lr1wkOfoevtyNkrxmMcRHyticybBUHookznipMOHoF41Al68QBsqBxnITCEpjjd4yAos7z9Tw==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/esbuild-openbsd-64": { + "version": "0.14.48", + "resolved": "https://registry.npmjs.org/esbuild-openbsd-64/-/esbuild-openbsd-64-0.14.48.tgz", + "integrity": "sha512-+IHf4JcbnnBl4T52egorXMatil/za0awqzg2Vy6FBgPcBpisDWT2sVz/tNdrK9kAqj+GZG/jZdrOkj7wsrNTKA==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/esbuild-sunos-64": { + "version": "0.14.48", + "resolved": "https://registry.npmjs.org/esbuild-sunos-64/-/esbuild-sunos-64-0.14.48.tgz", + "integrity": "sha512-77m8bsr5wOpOWbGi9KSqDphcq6dFeJyun8TA+12JW/GAjyfTwVtOnN8DOt6DSPUfEV+ltVMNqtXUeTeMAxl5KA==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/esbuild-windows-32": { + "version": "0.14.48", + "resolved": "https://registry.npmjs.org/esbuild-windows-32/-/esbuild-windows-32-0.14.48.tgz", + "integrity": "sha512-EPgRuTPP8vK9maxpTGDe5lSoIBHGKO/AuxDncg5O3NkrPeLNdvvK8oywB0zGaAZXxYWfNNSHskvvDgmfVTguhg==", + "cpu": [ + "ia32" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/esbuild-windows-64": { + "version": "0.14.48", + "resolved": "https://registry.npmjs.org/esbuild-windows-64/-/esbuild-windows-64-0.14.48.tgz", + "integrity": "sha512-YmpXjdT1q0b8ictSdGwH3M8VCoqPpK1/UArze3X199w6u8hUx3V8BhAi1WjbsfDYRBanVVtduAhh2sirImtAvA==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/esbuild-windows-arm64": { + "version": "0.14.48", + "resolved": "https://registry.npmjs.org/esbuild-windows-arm64/-/esbuild-windows-arm64-0.14.48.tgz", + "integrity": "sha512-HHaOMCsCXp0rz5BT2crTka6MPWVno121NKApsGs/OIW5QC0ggC69YMGs1aJct9/9FSUF4A1xNE/cLvgB5svR4g==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/escalade": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", + "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", + "engines": { + "node": ">=6" + } + }, + "node_modules/escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/eslint": { + "version": "8.19.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.19.0.tgz", + "integrity": "sha512-SXOPj3x9VKvPe81TjjUJCYlV4oJjQw68Uek+AM0X4p+33dj2HY5bpTZOgnQHcG2eAm1mtCU9uNMnJi7exU/kYw==", + "dev": true, + "dependencies": { + "@eslint/eslintrc": "^1.3.0", + "@humanwhocodes/config-array": "^0.9.2", + "ajv": "^6.10.0", + "chalk": "^4.0.0", + "cross-spawn": "^7.0.2", + "debug": "^4.3.2", + "doctrine": "^3.0.0", + "escape-string-regexp": "^4.0.0", + "eslint-scope": "^7.1.1", + "eslint-utils": "^3.0.0", + "eslint-visitor-keys": "^3.3.0", + "espree": "^9.3.2", + "esquery": "^1.4.0", + "esutils": "^2.0.2", + "fast-deep-equal": "^3.1.3", + "file-entry-cache": "^6.0.1", + "functional-red-black-tree": "^1.0.1", + "glob-parent": "^6.0.1", + "globals": "^13.15.0", + "ignore": "^5.2.0", + "import-fresh": "^3.0.0", + "imurmurhash": "^0.1.4", + "is-glob": "^4.0.0", + "js-yaml": "^4.1.0", + "json-stable-stringify-without-jsonify": "^1.0.1", + "levn": "^0.4.1", + "lodash.merge": "^4.6.2", + "minimatch": "^3.1.2", + "natural-compare": "^1.4.0", + "optionator": "^0.9.1", + "regexpp": "^3.2.0", + "strip-ansi": "^6.0.1", + "strip-json-comments": "^3.1.0", + "text-table": "^0.2.0", + "v8-compile-cache": "^2.0.3" + }, + "bin": { + "eslint": "bin/eslint.js" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint-config-prettier": { + "version": "8.5.0", + "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-8.5.0.tgz", + "integrity": "sha512-obmWKLUNCnhtQRKc+tmnYuQl0pFU1ibYJQ5BGhTVB08bHe9wC8qUeG7c08dj9XX+AuPj1YSGSQIHl1pnDHZR0Q==", + "dev": true, + "bin": { + "eslint-config-prettier": "bin/cli.js" + }, + "peerDependencies": { + "eslint": ">=7.0.0" + } + }, + "node_modules/eslint-scope": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz", + "integrity": "sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==", + "dev": true, + "dependencies": { + "esrecurse": "^4.3.0", + "estraverse": "^4.1.1" + }, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/eslint-utils": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-3.0.0.tgz", + "integrity": "sha512-uuQC43IGctw68pJA1RgbQS8/NP7rch6Cwd4j3ZBtgo4/8Flj4eGE7ZYSZRN3iq5pVUv6GPdW5Z1RFleo84uLDA==", + "dev": true, + "dependencies": { + "eslint-visitor-keys": "^2.0.0" + }, + "engines": { + "node": "^10.0.0 || ^12.0.0 || >= 14.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/mysticatea" + }, + "peerDependencies": { + "eslint": ">=5" + } + }, + "node_modules/eslint-utils/node_modules/eslint-visitor-keys": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz", + "integrity": "sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/eslint-visitor-keys": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.3.0.tgz", + "integrity": "sha512-mQ+suqKJVyeuwGYHAdjMFqjCyfl8+Ldnxuyp3ldiMBFKkvytrXUZWaiPCEav8qDHKty44bD+qV1IP4T+w+xXRA==", + "dev": true, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } + }, + "node_modules/eslint/node_modules/eslint-scope": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.1.1.tgz", + "integrity": "sha512-QKQM/UXpIiHcLqJ5AOyIW7XZmzjkzQXYE54n1++wb0u9V/abW3l9uQnxX8Z5Xd18xyKIMTUAyQ0k1e8pz6LUrw==", + "dev": true, + "dependencies": { + "esrecurse": "^4.3.0", + "estraverse": "^5.2.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } + }, + "node_modules/eslint/node_modules/estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/espree": { + "version": "9.3.2", + "resolved": "https://registry.npmjs.org/espree/-/espree-9.3.2.tgz", + "integrity": "sha512-D211tC7ZwouTIuY5x9XnS0E9sWNChB7IYKX/Xp5eQj3nFXhqmiUDB9q27y76oFl8jTg3pXcQx/bpxMfs3CIZbA==", + "dev": true, + "dependencies": { + "acorn": "^8.7.1", + "acorn-jsx": "^5.3.2", + "eslint-visitor-keys": "^3.3.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } + }, + "node_modules/esquery": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.4.0.tgz", + "integrity": "sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w==", + "dev": true, + "dependencies": { + "estraverse": "^5.1.0" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/esquery/node_modules/estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/esrecurse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", + "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", + "dev": true, + "dependencies": { + "estraverse": "^5.2.0" + }, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/esrecurse/node_modules/estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/estraverse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", + "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==", + "dev": true, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/esutils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/expand-template": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/expand-template/-/expand-template-2.0.3.tgz", + "integrity": "sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "dev": true + }, + "node_modules/fast-glob": { + "version": "3.2.11", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.11.tgz", + "integrity": "sha512-xrO3+1bxSo3ZVHAnqzyuewYT6aMFHRAd4Kcs92MAonjwQZLsK9d0SF1IyQ3k5PoirxTW0Oe/RqFgMQ6TcNE5Ew==", + "dev": true, + "dependencies": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.4" + }, + "engines": { + "node": ">=8.6.0" + } + }, + "node_modules/fast-glob/node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "dev": true + }, + "node_modules/fast-levenshtein": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", + "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", + "dev": true + }, + "node_modules/fastq": { + "version": "1.13.0", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.13.0.tgz", + "integrity": "sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw==", + "dev": true, + "dependencies": { + "reusify": "^1.0.4" + } + }, + "node_modules/fd-slicer": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/fd-slicer/-/fd-slicer-1.1.0.tgz", + "integrity": "sha512-cE1qsB/VwyQozZ+q1dGxR8LBYNZeofhEdUNGSMbQD3Gw2lAzX9Zb3uIU6Ebc/Fmyjo9AWWfnn0AUCHqtevs/8g==", + "dev": true, + "dependencies": { + "pend": "~1.2.0" + } + }, + "node_modules/file-entry-cache": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz", + "integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==", + "dev": true, + "dependencies": { + "flat-cache": "^3.0.4" + }, + "engines": { + "node": "^10.12.0 || >=12.0.0" + } + }, + "node_modules/fill-range": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", + "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "dev": true, + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/flat-cache": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.0.4.tgz", + "integrity": "sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg==", + "dev": true, + "dependencies": { + "flatted": "^3.1.0", + "rimraf": "^3.0.2" + }, + "engines": { + "node": "^10.12.0 || >=12.0.0" + } + }, + "node_modules/flatted": { + "version": "3.2.6", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.2.6.tgz", + "integrity": "sha512-0sQoMh9s0BYsm+12Huy/rkKxVu4R1+r96YX5cG44rHV0pQ6iC3Q+mkoMFaGWObMFYQxCVT+ssG1ksneA2MI9KQ==", + "dev": true + }, + "node_modules/follow-redirects": { + "version": "1.15.1", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.1.tgz", + "integrity": "sha512-yLAMQs+k0b2m7cVxpS1VKJVvoz7SS9Td1zss3XRwXj+ZDH00RJgnuLx7E44wx02kQLrdM3aOOy+FpzS7+8OizA==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/RubenVerborgh" + } + ], + "engines": { + "node": ">=4.0" + }, + "peerDependenciesMeta": { + "debug": { + "optional": true + } + } + }, + "node_modules/fs-constants": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs-constants/-/fs-constants-1.0.0.tgz", + "integrity": "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==", + "dev": true + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", + "dev": true + }, + "node_modules/fstream": { + "version": "1.0.12", + "resolved": "https://registry.npmjs.org/fstream/-/fstream-1.0.12.tgz", + "integrity": "sha512-WvJ193OHa0GHPEL+AycEJgxvBEwyfRkN1vhjca23OaPVMCaLCXTd5qAu82AjTcgP1UJmytkOKb63Ypde7raDIg==", + "dev": true, + "dependencies": { + "graceful-fs": "^4.1.2", + "inherits": "~2.0.0", + "mkdirp": ">=0.5 0", + "rimraf": "2" + }, + "engines": { + "node": ">=0.6" + } + }, + "node_modules/fstream/node_modules/rimraf": { + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz", + "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==", + "dev": true, + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + } + }, + "node_modules/function-bind": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", + "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==", + "dev": true + }, + "node_modules/functional-red-black-tree": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz", + "integrity": "sha512-dsKNQNdj6xA3T+QlADDA7mOSlX0qiMINjn0cgr+eGHGsbSHzTabcIogz2+p/iqP1Xs6EP/sS2SbqH+brGTbq0g==", + "dev": true + }, + "node_modules/get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "engines": { + "node": "6.* || 8.* || >= 10.*" + } + }, + "node_modules/get-intrinsic": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.2.tgz", + "integrity": "sha512-Jfm3OyCxHh9DJyc28qGk+JmfkpO41A4XkneDSujN9MDXrm4oDKdHvndhZ2dN94+ERNfkYJWDclW6k2L/ZGHjXA==", + "dev": true, + "dependencies": { + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.3" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/github-from-package": { + "version": "0.0.0", + "resolved": "https://registry.npmjs.org/github-from-package/-/github-from-package-0.0.0.tgz", + "integrity": "sha512-SyHy3T1v2NUXn29OsWdxmK6RwHD+vkj3v8en8AOBZ1wBQ/hCAQ5bAQTD02kW4W9tUp/3Qh6J8r9EvntiyCmOOw==", + "dev": true + }, + "node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.3" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/globals": { + "version": "13.16.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.16.0.tgz", + "integrity": "sha512-A1lrQfpNF+McdPOnnFqY3kSN0AFTy485bTi1bkLk4mVPODIUEcSfhHgRqA+QdXPksrSTTztYXx37NFV+GpGk3Q==", + "dev": true, + "dependencies": { + "type-fest": "^0.20.2" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/globby": { + "version": "11.1.0", + "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz", + "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==", + "dev": true, + "dependencies": { + "array-union": "^2.1.0", + "dir-glob": "^3.0.1", + "fast-glob": "^3.2.9", + "ignore": "^5.2.0", + "merge2": "^1.4.1", + "slash": "^3.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/graceful-fs": { + "version": "4.2.10", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.10.tgz", + "integrity": "sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==", + "dev": true + }, + "node_modules/has": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", + "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "dev": true, + "dependencies": { + "function-bind": "^1.1.1" + }, + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/has-symbols": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", + "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/hosted-git-info": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-4.1.0.tgz", + "integrity": "sha512-kyCuEOWjJqZuDbRHzL8V93NzQhwIB71oFWSyzVo+KPZI+pnQPPxucdkrOZvkLRnrf5URsQM+IJ09Dw29cRALIA==", + "dev": true, + "dependencies": { + "lru-cache": "^6.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/htmlparser2": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/htmlparser2/-/htmlparser2-8.0.1.tgz", + "integrity": "sha512-4lVbmc1diZC7GUJQtRQ5yBAeUCL1exyMwmForWkRLnwyzWBFxN633SALPMGYaWZvKe9j1pRZJpauvmxENSp/EA==", + "dev": true, + "funding": [ + "https://github.com/fb55/htmlparser2?sponsor=1", + { + "type": "github", + "url": "https://github.com/sponsors/fb55" + } + ], + "dependencies": { + "domelementtype": "^2.3.0", + "domhandler": "^5.0.2", + "domutils": "^3.0.1", + "entities": "^4.3.0" + } + }, + "node_modules/http-proxy-agent": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-4.0.1.tgz", + "integrity": "sha512-k0zdNgqWTGA6aeIRVpvfVob4fL52dTfaehylg0Y4UvSySvOq/Y+BOyPrgpUrA7HylqvU8vIZGsRuXmspskV0Tg==", + "dev": true, + "dependencies": { + "@tootallnate/once": "1", + "agent-base": "6", + "debug": "4" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/https-proxy-agent": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz", + "integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==", + "dev": true, + "dependencies": { + "agent-base": "6", + "debug": "4" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/iconv-lite": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", + "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ieee754": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", + "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/ignore": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.2.0.tgz", + "integrity": "sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ==", + "dev": true, + "engines": { + "node": ">= 4" + } + }, + "node_modules/import-fresh": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz", + "integrity": "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==", + "dev": true, + "dependencies": { + "parent-module": "^1.0.0", + "resolve-from": "^4.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", + "dev": true, + "engines": { + "node": ">=0.8.19" + } + }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "dev": true, + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true + }, + "node_modules/ini": { + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz", + "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==", + "dev": true + }, + "node_modules/internmap": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/internmap/-/internmap-2.0.3.tgz", + "integrity": "sha512-5Hh7Y1wQbvY5ooGgPbDaL5iYLAPzMTUrjMulskHLH6wnv/A+1q5rgEaiuqEjB+oxGXIVZs1FF+R/KPN3ZSQYYg==", + "engines": { + "node": ">=12" + } + }, + "node_modules/is-ci": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-ci/-/is-ci-2.0.0.tgz", + "integrity": "sha512-YfJT7rkpQB0updsdHLGWrvhBJfcfzNNawYDNIyQXJz0IViGf75O8EBPKSdvw2rF+LGCsX4FZ8tcr3b19LcZq4w==", + "dev": true, + "dependencies": { + "ci-info": "^2.0.0" + }, + "bin": { + "is-ci": "bin.js" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "engines": { + "node": ">=8" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==", + "dev": true + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "dev": true + }, + "node_modules/js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "dev": true, + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true + }, + "node_modules/json-stable-stringify-without-jsonify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", + "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", + "dev": true + }, + "node_modules/keytar": { + "version": "7.9.0", + "resolved": "https://registry.npmjs.org/keytar/-/keytar-7.9.0.tgz", + "integrity": "sha512-VPD8mtVtm5JNtA2AErl6Chp06JBfy7diFQ7TQQhdpWOl6MrCRB+eRbvAZUsbGQS9kiMq0coJsy0W0vHpDCkWsQ==", + "dev": true, + "hasInstallScript": true, + "dependencies": { + "node-addon-api": "^4.3.0", + "prebuild-install": "^7.0.1" + } + }, + "node_modules/leven": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/leven/-/leven-3.1.0.tgz", + "integrity": "sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/levn": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", + "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", + "dev": true, + "dependencies": { + "prelude-ls": "^1.2.1", + "type-check": "~0.4.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/linkify-it": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/linkify-it/-/linkify-it-3.0.3.tgz", + "integrity": "sha512-ynTsyrFSdE5oZ/O9GEf00kPngmOfVwazR5GKDq6EYfhlpFug3J2zybX56a2PRRpc9P+FuSoGNAwjlbDs9jJBPQ==", + "dev": true, + "dependencies": { + "uc.micro": "^1.0.1" + } + }, + "node_modules/listenercount": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/listenercount/-/listenercount-1.0.1.tgz", + "integrity": "sha512-3mk/Zag0+IJxeDrxSgaDPy4zZ3w05PRZeJNnlWhzFz5OkX49J4krc+A8X2d2M69vGMBEX0uyl8M+W+8gH+kBqQ==", + "dev": true + }, + "node_modules/lodash.merge": { + "version": "4.6.2", + "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", + "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", + "dev": true + }, + "node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/markdown-it": { + "version": "12.3.2", + "resolved": "https://registry.npmjs.org/markdown-it/-/markdown-it-12.3.2.tgz", + "integrity": "sha512-TchMembfxfNVpHkbtriWltGWc+m3xszaRD0CZup7GFFhzIgQqxIfn3eGj1yZpfuflzPvfkt611B2Q/Bsk1YnGg==", + "dev": true, + "dependencies": { + "argparse": "^2.0.1", + "entities": "~2.1.0", + "linkify-it": "^3.0.1", + "mdurl": "^1.0.1", + "uc.micro": "^1.0.5" + }, + "bin": { + "markdown-it": "bin/markdown-it.js" + } + }, + "node_modules/markdown-it/node_modules/entities": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-2.1.0.tgz", + "integrity": "sha512-hCx1oky9PFrJ611mf0ifBLBRW8lUUVRlFolb5gWRfIELabBlbp9xZvrqZLZAs+NxFnbfQoeGd8wDkygjg7U85w==", + "dev": true, + "funding": { + "url": "https://github.com/fb55/entities?sponsor=1" + } + }, + "node_modules/mdurl": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/mdurl/-/mdurl-1.0.1.tgz", + "integrity": "sha512-/sKlQJCBYVY9Ers9hqzKou4H6V5UWc/M59TH2dvkt+84itfnq7uFOMLpOiOS4ujvHP4etln18fmIxA5R5fll0g==", + "dev": true + }, + "node_modules/merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "dev": true, + "engines": { + "node": ">= 8" + } + }, + "node_modules/micromatch": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz", + "integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==", + "dev": true, + "dependencies": { + "braces": "^3.0.2", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/mime": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", + "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==", + "dev": true, + "bin": { + "mime": "cli.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/mimic-response": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-3.1.0.tgz", + "integrity": "sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/minimist": { + "version": "1.2.6", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz", + "integrity": "sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==", + "dev": true + }, + "node_modules/mkdirp": { + "version": "0.5.6", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.6.tgz", + "integrity": "sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==", + "dev": true, + "dependencies": { + "minimist": "^1.2.6" + }, + "bin": { + "mkdirp": "bin/cmd.js" + } + }, + "node_modules/mkdirp-classic": { + "version": "0.5.3", + "resolved": "https://registry.npmjs.org/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz", + "integrity": "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==", + "dev": true + }, + "node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, + "node_modules/mute-stream": { + "version": "0.0.8", + "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.8.tgz", + "integrity": "sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==", + "dev": true + }, + "node_modules/napi-build-utils": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/napi-build-utils/-/napi-build-utils-1.0.2.tgz", + "integrity": "sha512-ONmRUqK7zj7DWX0D9ADe03wbwOBZxNAfF20PlGfCWQcD3+/MakShIHrMqx9YwPTfxDdF1zLeL+RGZiR9kGMLdg==", + "dev": true + }, + "node_modules/natural-compare": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", + "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", + "dev": true + }, + "node_modules/node-abi": { + "version": "3.22.0", + "resolved": "https://registry.npmjs.org/node-abi/-/node-abi-3.22.0.tgz", + "integrity": "sha512-u4uAs/4Zzmp/jjsD9cyFYDXeISfUWaAVWshPmDZOFOv4Xl4SbzTXm53I04C2uRueYJ+0t5PEtLH/owbn2Npf/w==", + "dev": true, + "dependencies": { + "semver": "^7.3.5" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/node-addon-api": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-4.3.0.tgz", + "integrity": "sha512-73sE9+3UaLYYFmDsFZnqCInzPyh3MqIwZO9cw58yIqAZhONrrabrYyYe3TuIqtIiOuTXVhsGau8hcrhhwSsDIQ==", + "dev": true + }, + "node_modules/nth-check": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-2.1.1.tgz", + "integrity": "sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w==", + "dev": true, + "dependencies": { + "boolbase": "^1.0.0" + }, + "funding": { + "url": "https://github.com/fb55/nth-check?sponsor=1" + } + }, + "node_modules/object-inspect": { + "version": "1.12.2", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.2.tgz", + "integrity": "sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ==", + "dev": true, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dev": true, + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/optionator": { + "version": "0.9.1", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.1.tgz", + "integrity": "sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw==", + "dev": true, + "dependencies": { + "deep-is": "^0.1.3", + "fast-levenshtein": "^2.0.6", + "levn": "^0.4.1", + "prelude-ls": "^1.2.1", + "type-check": "^0.4.0", + "word-wrap": "^1.2.3" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/ovsx": { + "version": "0.5.1", + "resolved": "https://registry.npmjs.org/ovsx/-/ovsx-0.5.1.tgz", + "integrity": "sha512-3OWq0l7DuVHi2bd2aQe5+QVQlFIqvrcw3/2vGXL404L6Tr+R4QHtzfnYYghv8CCa85xJHjU0RhcaC7pyXkAUbg==", + "dev": true, + "dependencies": { + "commander": "^6.1.0", + "follow-redirects": "^1.14.6", + "is-ci": "^2.0.0", + "leven": "^3.1.0", + "tmp": "^0.2.1", + "vsce": "^2.6.3" + }, + "bin": { + "ovsx": "lib/ovsx" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/ovsx/node_modules/commander": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-6.2.1.tgz", + "integrity": "sha512-U7VdrJFnJgo4xjrHpTzu0yrHPGImdsmD95ZlgYSEajAn2JKzDhDTPG9kBTefmObL2w/ngeZnilk+OV9CG3d7UA==", + "dev": true, + "engines": { + "node": ">= 6" + } + }, + "node_modules/parent-module": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", + "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", + "dev": true, + "dependencies": { + "callsites": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/parse-semver": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/parse-semver/-/parse-semver-1.1.1.tgz", + "integrity": "sha512-Eg1OuNntBMH0ojvEKSrvDSnwLmvVuUOSdylH/pSCPNMIspLlweJyIWXCE+k/5hm3cj/EBUYwmWkjhBALNP4LXQ==", + "dev": true, + "dependencies": { + "semver": "^5.1.0" + } + }, + "node_modules/parse-semver/node_modules/semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true, + "bin": { + "semver": "bin/semver" + } + }, + "node_modules/parse5": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/parse5/-/parse5-7.0.0.tgz", + "integrity": "sha512-y/t8IXSPWTuRZqXc0ajH/UwDj4mnqLEbSttNbThcFhGrZuOyoyvNBO85PBp2jQa55wY9d07PBNjsK8ZP3K5U6g==", + "dev": true, + "dependencies": { + "entities": "^4.3.0" + }, + "funding": { + "url": "https://github.com/inikulin/parse5?sponsor=1" + } + }, + "node_modules/parse5-htmlparser2-tree-adapter": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/parse5-htmlparser2-tree-adapter/-/parse5-htmlparser2-tree-adapter-7.0.0.tgz", + "integrity": "sha512-B77tOZrqqfUfnVcOrUvfdLbz4pu4RopLD/4vmu3HUPswwTA8OH0EMW9BlWR2B0RCoiZRAHEUu7IxeP1Pd1UU+g==", + "dev": true, + "dependencies": { + "domhandler": "^5.0.2", + "parse5": "^7.0.0" + }, + "funding": { + "url": "https://github.com/inikulin/parse5?sponsor=1" + } + }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/path-type": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", + "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/pend": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/pend/-/pend-1.2.0.tgz", + "integrity": "sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg==", + "dev": true + }, + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true, + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/prebuild-install": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/prebuild-install/-/prebuild-install-7.1.1.tgz", + "integrity": "sha512-jAXscXWMcCK8GgCoHOfIr0ODh5ai8mj63L2nWrjuAgXE6tDyYGnx4/8o/rCgU+B4JSyZBKbeZqzhtwtC3ovxjw==", + "dev": true, + "dependencies": { + "detect-libc": "^2.0.0", + "expand-template": "^2.0.3", + "github-from-package": "0.0.0", + "minimist": "^1.2.3", + "mkdirp-classic": "^0.5.3", + "napi-build-utils": "^1.0.1", + "node-abi": "^3.3.0", + "pump": "^3.0.0", + "rc": "^1.2.7", + "simple-get": "^4.0.0", + "tar-fs": "^2.0.0", + "tunnel-agent": "^0.6.0" + }, + "bin": { + "prebuild-install": "bin.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/prelude-ls": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", + "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", + "dev": true, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/prettier": { + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.7.1.tgz", + "integrity": "sha512-ujppO+MkdPqoVINuDFDRLClm7D78qbDt0/NR+wp5FqEZOoTNAjPHWj17QRhu7geIHJfcNhRk1XVQmF8Bp3ye+g==", + "dev": true, + "bin": { + "prettier": "bin-prettier.js" + }, + "engines": { + "node": ">=10.13.0" + }, + "funding": { + "url": "https://github.com/prettier/prettier?sponsor=1" + } + }, + "node_modules/process-nextick-args": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", + "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==", + "dev": true + }, + "node_modules/pump": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", + "integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==", + "dev": true, + "dependencies": { + "end-of-stream": "^1.1.0", + "once": "^1.3.1" + } + }, + "node_modules/punycode": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", + "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/qs": { + "version": "6.11.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz", + "integrity": "sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==", + "dev": true, + "dependencies": { + "side-channel": "^1.0.4" + }, + "engines": { + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/rc": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/rc/-/rc-1.2.8.tgz", + "integrity": "sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==", + "dev": true, + "dependencies": { + "deep-extend": "^0.6.0", + "ini": "~1.3.0", + "minimist": "^1.2.0", + "strip-json-comments": "~2.0.1" + }, + "bin": { + "rc": "cli.js" + } + }, + "node_modules/rc/node_modules/strip-json-comments": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", + "integrity": "sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/read": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/read/-/read-1.0.7.tgz", + "integrity": "sha512-rSOKNYUmaxy0om1BNjMN4ezNT6VKK+2xF4GBhc81mkH7L60i6dp8qPYrkndNLT3QPphoII3maL9PVC9XmhHwVQ==", + "dev": true, + "dependencies": { + "mute-stream": "~0.0.4" + }, + "engines": { + "node": ">=0.8" + } + }, + "node_modules/readable-stream": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "dev": true, + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "node_modules/regexpp": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-3.2.0.tgz", + "integrity": "sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/mysticatea" + } + }, + "node_modules/require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/resolve-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/reusify": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", + "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==", + "dev": true, + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + } + }, + "node_modules/rimraf": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", + "dev": true, + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/robust-predicates": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/robust-predicates/-/robust-predicates-3.0.1.tgz", + "integrity": "sha512-ndEIpszUHiG4HtDsQLeIuMvRsDnn8c8rYStabochtUeCvfuvNptb5TUbVD68LRAILPX7p9nqQGh4xJgn3EHS/g==" + }, + "node_modules/run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "queue-microtask": "^1.2.2" + } + }, + "node_modules/rw": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/rw/-/rw-1.3.3.tgz", + "integrity": "sha512-PdhdWy89SiZogBLaw42zdeqtRJ//zFd2PgQavcICDUgJT5oW10QCRKbJ6bg4r0/UY2M6BWd5tkxuGFRvCkgfHQ==" + }, + "node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "dev": true + }, + "node_modules/safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" + }, + "node_modules/sax": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz", + "integrity": "sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw==", + "dev": true + }, + "node_modules/semver": { + "version": "7.3.7", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.7.tgz", + "integrity": "sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g==", + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/setimmediate": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/setimmediate/-/setimmediate-1.0.5.tgz", + "integrity": "sha512-MATJdZp8sLqDl/68LfQmbP8zKPLQNV6BIZoIgrscFDQ+RsvK/BxeDQOgyxKKoh0y/8h3BqVFnCqQ/gd+reiIXA==", + "dev": true + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/side-channel": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", + "integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.0", + "get-intrinsic": "^1.0.2", + "object-inspect": "^1.9.0" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/simple-concat": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/simple-concat/-/simple-concat-1.0.1.tgz", + "integrity": "sha512-cSFtAPtRhljv69IK0hTVZQ+OfE9nePi/rtJmw5UjHeVyVroEqJXP1sFztKUy1qU+xvz3u/sfYJLa947b7nAN2Q==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/simple-get": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/simple-get/-/simple-get-4.0.1.tgz", + "integrity": "sha512-brv7p5WgH0jmQJr1ZDDfKDOSeWWg+OVypG99A/5vYGPqJ6pxiaHLy8nxtFjBA7oMa01ebA9gfh1uMCFqOuXxvA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "decompress-response": "^6.0.0", + "once": "^1.3.1", + "simple-concat": "^1.0.0" + } + }, + "node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dev": true, + "dependencies": { + "safe-buffer": "~5.1.0" + } + }, + "node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/tar-fs": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.1.1.tgz", + "integrity": "sha512-V0r2Y9scmbDRLCNex/+hYzvp/zyYjvFbHPNgVTKfQvVrb6guiE/fxP+XblDNR011utopbkex2nM4dHNV6GDsng==", + "dev": true, + "dependencies": { + "chownr": "^1.1.1", + "mkdirp-classic": "^0.5.2", + "pump": "^3.0.0", + "tar-stream": "^2.1.4" + } + }, + "node_modules/tar-stream": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-2.2.0.tgz", + "integrity": "sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ==", + "dev": true, + "dependencies": { + "bl": "^4.0.3", + "end-of-stream": "^1.4.1", + "fs-constants": "^1.0.0", + "inherits": "^2.0.3", + "readable-stream": "^3.1.1" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/tar-stream/node_modules/readable-stream": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "dev": true, + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/text-table": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", + "integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==", + "dev": true + }, + "node_modules/tmp": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.1.tgz", + "integrity": "sha512-76SUhtfqR2Ijn+xllcI5P1oyannHNHByD80W1q447gU3mp9G9PSpGdWmjUOHRDPiHYacIk66W7ubDTuPF3BEtQ==", + "dev": true, + "dependencies": { + "rimraf": "^3.0.0" + }, + "engines": { + "node": ">=8.17.0" + } + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/traverse": { + "version": "0.3.9", + "resolved": "https://registry.npmjs.org/traverse/-/traverse-0.3.9.tgz", + "integrity": "sha512-iawgk0hLP3SxGKDfnDJf8wTz4p2qImnyihM5Hh/sGvQ3K37dPi/w8sRhdNIxYA1TwFwc5mDhIJq+O0RsvXBKdQ==", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/tslib": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", + "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==", + "dev": true + }, + "node_modules/tsutils": { + "version": "3.21.0", + "resolved": "https://registry.npmjs.org/tsutils/-/tsutils-3.21.0.tgz", + "integrity": "sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA==", + "dev": true, + "dependencies": { + "tslib": "^1.8.1" + }, + "engines": { + "node": ">= 6" + }, + "peerDependencies": { + "typescript": ">=2.8.0 || >= 3.2.0-dev || >= 3.3.0-dev || >= 3.4.0-dev || >= 3.5.0-dev || >= 3.6.0-dev || >= 3.6.0-beta || >= 3.7.0-dev || >= 3.7.0-beta" + } + }, + "node_modules/tsutils/node_modules/tslib": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", + "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==", + "dev": true + }, + "node_modules/tunnel": { + "version": "0.0.6", + "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz", + "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==", + "dev": true, + "engines": { + "node": ">=0.6.11 <=0.7.0 || >=0.7.3" + } + }, + "node_modules/tunnel-agent": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", + "integrity": "sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==", + "dev": true, + "dependencies": { + "safe-buffer": "^5.0.1" + }, + "engines": { + "node": "*" + } + }, + "node_modules/type-check": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", + "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", + "dev": true, + "dependencies": { + "prelude-ls": "^1.2.1" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/type-fest": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", + "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/typed-rest-client": { + "version": "1.8.9", + "resolved": "https://registry.npmjs.org/typed-rest-client/-/typed-rest-client-1.8.9.tgz", + "integrity": "sha512-uSmjE38B80wjL85UFX3sTYEUlvZ1JgCRhsWj/fJ4rZ0FqDUFoIuodtiVeE+cUqiVTOKPdKrp/sdftD15MDek6g==", + "dev": true, + "dependencies": { + "qs": "^6.9.1", + "tunnel": "0.0.6", + "underscore": "^1.12.1" + } + }, + "node_modules/typescript": { + "version": "4.7.4", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.7.4.tgz", + "integrity": "sha512-C0WQT0gezHuw6AdY1M2jxUO83Rjf0HP7Sk1DtXj6j1EwkQNZrHAg2XPWlq62oqEhYvONq5pkC2Y9oPljWToLmQ==", + "dev": true, + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=4.2.0" + } + }, + "node_modules/uc.micro": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/uc.micro/-/uc.micro-1.0.6.tgz", + "integrity": "sha512-8Y75pvTYkLJW2hWQHXxoqRgV7qb9B+9vFEtidML+7koHUFapnVJAZ6cKs+Qjz5Aw3aZWHMC6u0wJE3At+nSGwA==", + "dev": true + }, + "node_modules/underscore": { + "version": "1.13.4", + "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.13.4.tgz", + "integrity": "sha512-BQFnUDuAQ4Yf/cYY5LNrK9NCJFKriaRbD9uR1fTeXnBeoa97W0i41qkZfGO9pSo8I5KzjAcSY2XYtdf0oKd7KQ==", + "dev": true + }, + "node_modules/unzipper": { + "version": "0.10.11", + "resolved": "https://registry.npmjs.org/unzipper/-/unzipper-0.10.11.tgz", + "integrity": "sha512-+BrAq2oFqWod5IESRjL3S8baohbevGcVA+teAIOYWM3pDVdseogqbzhhvvmiyQrUNKFUnDMtELW3X8ykbyDCJw==", + "dev": true, + "dependencies": { + "big-integer": "^1.6.17", + "binary": "~0.3.0", + "bluebird": "~3.4.1", + "buffer-indexof-polyfill": "~1.0.0", + "duplexer2": "~0.1.4", + "fstream": "^1.0.12", + "graceful-fs": "^4.2.2", + "listenercount": "~1.0.1", + "readable-stream": "~2.3.6", + "setimmediate": "~1.0.4" + } + }, + "node_modules/uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "dev": true, + "dependencies": { + "punycode": "^2.1.0" + } + }, + "node_modules/url-join": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/url-join/-/url-join-4.0.1.tgz", + "integrity": "sha512-jk1+QP6ZJqyOiuEI9AEWQfju/nB2Pw466kbA0LEZljHwKeMgd9WrAEgEGxjPDD2+TNbbb37rTyhEfrCXfuKXnA==", + "dev": true + }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", + "dev": true + }, + "node_modules/v8-compile-cache": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.3.0.tgz", + "integrity": "sha512-l8lCEmLcLYZh4nbunNZvQCJc5pv7+RCwa8q/LdUx8u7lsWvPDKmpodJAJNwkAhJC//dFY48KuIEmjtd4RViDrA==", + "dev": true + }, + "node_modules/vsce": { + "version": "2.9.2", + "resolved": "https://registry.npmjs.org/vsce/-/vsce-2.9.2.tgz", + "integrity": "sha512-xyLqL4U82BilUX1t6Ym2opQEa2tLGWYjbgB7+ETeNVXlIJz5sWBJjQJSYJVFOKJSpiOtQclolu88cj7oY6vvPQ==", + "dev": true, + "dependencies": { + "azure-devops-node-api": "^11.0.1", + "chalk": "^2.4.2", + "cheerio": "^1.0.0-rc.9", + "commander": "^6.1.0", + "glob": "^7.0.6", + "hosted-git-info": "^4.0.2", + "keytar": "^7.7.0", + "leven": "^3.1.0", + "markdown-it": "^12.3.2", + "mime": "^1.3.4", + "minimatch": "^3.0.3", + "parse-semver": "^1.1.1", + "read": "^1.0.7", + "semver": "^5.1.0", + "tmp": "^0.2.1", + "typed-rest-client": "^1.8.4", + "url-join": "^4.0.1", + "xml2js": "^0.4.23", + "yauzl": "^2.3.1", + "yazl": "^2.2.2" + }, + "bin": { + "vsce": "vsce" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/vsce/node_modules/ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dev": true, + "dependencies": { + "color-convert": "^1.9.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/vsce/node_modules/chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dev": true, + "dependencies": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/vsce/node_modules/color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dev": true, + "dependencies": { + "color-name": "1.1.3" + } + }, + "node_modules/vsce/node_modules/color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==", + "dev": true + }, + "node_modules/vsce/node_modules/commander": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-6.2.1.tgz", + "integrity": "sha512-U7VdrJFnJgo4xjrHpTzu0yrHPGImdsmD95ZlgYSEajAn2JKzDhDTPG9kBTefmObL2w/ngeZnilk+OV9CG3d7UA==", + "dev": true, + "engines": { + "node": ">= 6" + } + }, + "node_modules/vsce/node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "dev": true, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/vsce/node_modules/has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/vsce/node_modules/semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true, + "bin": { + "semver": "bin/semver" + } + }, + "node_modules/vsce/node_modules/supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dev": true, + "dependencies": { + "has-flag": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/vscode-jsonrpc": { + "version": "8.0.0-next.7", + "resolved": "https://registry.npmjs.org/vscode-jsonrpc/-/vscode-jsonrpc-8.0.0-next.7.tgz", + "integrity": "sha512-JX/F31LEsims0dAlOTKFE4E+AJMiJvdRSRViifFJSqSN7EzeYyWlfuDchF7g91oRNPZOIWfibTkDf3/UMsQGzQ==", + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/vscode-languageclient": { + "version": "8.0.0-next.14", + "resolved": "https://registry.npmjs.org/vscode-languageclient/-/vscode-languageclient-8.0.0-next.14.tgz", + "integrity": "sha512-NqjkOuDTMu8uo+PhoMsV72VO9Gd3wBi/ZpOrkRUOrWKQo7yUdiIw183g8wjH8BImgbK9ZP51HM7TI0ZhCnI1Mw==", + "dependencies": { + "minimatch": "^3.0.4", + "semver": "^7.3.5", + "vscode-languageserver-protocol": "3.17.0-next.16" + }, + "engines": { + "vscode": "^1.66.0" + } + }, + "node_modules/vscode-languageserver-protocol": { + "version": "3.17.0-next.16", + "resolved": "https://registry.npmjs.org/vscode-languageserver-protocol/-/vscode-languageserver-protocol-3.17.0-next.16.tgz", + "integrity": "sha512-tx4DnXw9u3N7vw+bx6n2NKp6FoxoNwiP/biH83AS30I2AnTGyLd7afSeH6Oewn2E8jvB7K15bs12sMppkKOVeQ==", + "dependencies": { + "vscode-jsonrpc": "8.0.0-next.7", + "vscode-languageserver-types": "3.17.0-next.9" + } + }, + "node_modules/vscode-languageserver-types": { + "version": "3.17.0-next.9", + "resolved": "https://registry.npmjs.org/vscode-languageserver-types/-/vscode-languageserver-types-3.17.0-next.9.tgz", + "integrity": "sha512-9/PeDNPYduaoXRUzYpqmu4ZV9L01HGo0wH9FUt+sSHR7IXwA7xoXBfNUlv8gB9H0D2WwEmMomSy1NmhjKQyn3A==" + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/word-wrap": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", + "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "dev": true + }, + "node_modules/xml2js": { + "version": "0.4.23", + "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.23.tgz", + "integrity": "sha512-ySPiMjM0+pLDftHgXY4By0uswI3SPKLDw/i3UXbnO8M/p28zqexCUoPmQFrYD+/1BzhGJSs2i1ERWKJAtiLrug==", + "dev": true, + "dependencies": { + "sax": ">=0.6.0", + "xmlbuilder": "~11.0.0" + }, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/xmlbuilder": { + "version": "11.0.1", + "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-11.0.1.tgz", + "integrity": "sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==", + "dev": true, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/y18n": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "engines": { + "node": ">=10" + } + }, + "node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + }, + "node_modules/yargs": { + "version": "17.5.1", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.5.1.tgz", + "integrity": "sha512-t6YAJcxDkNX7NFYiVtKvWUz8l+PaKTLiL63mJYWR2GnHq2gjEWISzsLp9wg3aY36dY1j+gfIEL3pIF+XlJJfbA==", + "dependencies": { + "cliui": "^7.0.2", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.3", + "y18n": "^5.0.5", + "yargs-parser": "^21.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/yargs-parser": { + "version": "21.0.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.0.1.tgz", + "integrity": "sha512-9BK1jFpLzJROCI5TzwZL/TU4gqjK5xiHV/RfWLOahrjAko/e4DJkRDZQXfvqAsiZzzYhgAzbgz6lg48jcm4GLg==", + "engines": { + "node": ">=12" + } + }, + "node_modules/yauzl": { + "version": "2.10.0", + "resolved": "https://registry.npmjs.org/yauzl/-/yauzl-2.10.0.tgz", + "integrity": "sha512-p4a9I6X6nu6IhoGmBqAcbJy1mlC4j27vEPZX9F4L4/vZT3Lyq1VkFHw/V/PUcB9Buo+DG3iHkT0x3Qya58zc3g==", + "dev": true, + "dependencies": { + "buffer-crc32": "~0.2.3", + "fd-slicer": "~1.1.0" + } + }, + "node_modules/yazl": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/yazl/-/yazl-2.5.1.tgz", + "integrity": "sha512-phENi2PLiHnHb6QBVot+dJnaAZ0xosj7p3fWl+znIjBDlnMI2PsZCJZ306BPTFOaHf5qdDEI8x5qFrSOBN5vrw==", + "dev": true, + "dependencies": { + "buffer-crc32": "~0.2.3" + } + } + }, "dependencies": { "@eslint/eslintrc": { "version": "1.3.0", @@ -213,7 +4161,8 @@ "version": "5.3.2", "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", - "dev": true + "dev": true, + "requires": {} }, "agent-base": { "version": "6.0.2", @@ -1278,7 +5227,8 @@ "version": "8.5.0", "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-8.5.0.tgz", "integrity": "sha512-obmWKLUNCnhtQRKc+tmnYuQl0pFU1ibYJQ5BGhTVB08bHe9wC8qUeG7c08dj9XX+AuPj1YSGSQIHl1pnDHZR0Q==", - "dev": true + "dev": true, + "requires": {} }, "eslint-scope": { "version": "5.1.1", @@ -2348,6 +6298,15 @@ "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", "dev": true }, + "string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dev": true, + "requires": { + "safe-buffer": "~5.1.0" + } + }, "string-width": { "version": "4.2.3", "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", @@ -2358,15 +6317,6 @@ "strip-ansi": "^6.0.1" } }, - "string_decoder": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", - "dev": true, - "requires": { - "safe-buffer": "~5.1.0" - } - }, "strip-ansi": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", From fd58459373c285df53e6254782e83d70440f192e Mon Sep 17 00:00:00 2001 From: shoffmeister Date: Thu, 11 Aug 2022 13:36:41 +0200 Subject: [PATCH 0125/1945] Take into account renamed extension id when launching Signed-off-by: Stefan Hoffmeister --- .vscode/launch.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.vscode/launch.json b/.vscode/launch.json index 021b8f048c..1e21214ffc 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -78,7 +78,7 @@ "request": "launch", "runtimeExecutable": "${execPath}", "args": [ - "--disable-extension", "matklad.rust-analyzer", + "--disable-extension", "rust-lang.rust-analyzer", "--extensionDevelopmentPath=${workspaceFolder}/editors/code" ], "outFiles": [ From 72ae308c73a2d9d71715ec35ba1f644989338761 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sat, 13 Aug 2022 12:13:48 +0200 Subject: [PATCH 0126/1945] Do not unconditionally succeed RUSTC_WRAPPER checks when run by build scripts rust-analyzer's RUSTC_WRAPPER unconditionally succeeds `cargo check` invocations tripping up build scripts using `cargo check` to probe for successful compilations. To prevent this from happening the RUSTC_WRAPPER now checks if it's run from a build script by looking for the `CARGO_CFG_TARGET_ARCH` env var that cargo sets only when running build scripts. --- crates/rust-analyzer/src/bin/rustc_wrapper.rs | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/crates/rust-analyzer/src/bin/rustc_wrapper.rs b/crates/rust-analyzer/src/bin/rustc_wrapper.rs index 2f6d4706d8..38e9c7dd7e 100644 --- a/crates/rust-analyzer/src/bin/rustc_wrapper.rs +++ b/crates/rust-analyzer/src/bin/rustc_wrapper.rs @@ -17,6 +17,11 @@ pub(crate) fn run_rustc_skipping_cargo_checking( rustc_executable: OsString, args: Vec, ) -> io::Result { + // `CARGO_CFG_TARGET_ARCH` is only set by cargo when executing build scripts + // We don't want to exit out checks unconditionally with success if a build + // script tries to invoke checks themselves + // See https://github.com/rust-lang/rust-analyzer/issues/12973 for context + let not_invoked_by_build_script = std::env::var_os("CARGO_CFG_TARGET_ARCH").is_none(); let is_cargo_check = args.iter().any(|arg| { let arg = arg.to_string_lossy(); // `cargo check` invokes `rustc` with `--emit=metadata` argument. @@ -29,7 +34,7 @@ pub(crate) fn run_rustc_skipping_cargo_checking( // The default output filename is CRATE_NAME.rmeta. arg.starts_with("--emit=") && arg.contains("metadata") && !arg.contains("link") }); - if is_cargo_check { + if not_invoked_by_build_script && is_cargo_check { return Ok(ExitCode(Some(0))); } run_rustc(rustc_executable, args) From 038c36a1f5195d96d9247529d9cc1e14f00c8c17 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sat, 13 Aug 2022 20:03:06 +0200 Subject: [PATCH 0127/1945] Simplify `GlobalState::handle_event` --- crates/rust-analyzer/src/main_loop.rs | 381 +++++++++++++------------- 1 file changed, 192 insertions(+), 189 deletions(-) diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs index b504c24878..92b6be22e7 100644 --- a/crates/rust-analyzer/src/main_loop.rs +++ b/crates/rust-analyzer/src/main_loop.rs @@ -9,6 +9,7 @@ use std::{ use always_assert::always; use crossbeam_channel::{select, Receiver}; +use flycheck::FlycheckHandle; use ide_db::base_db::{SourceDatabase, SourceDatabaseExt, VfsPath}; use itertools::Itertools; use lsp_server::{Connection, Notification, Request}; @@ -205,81 +206,14 @@ impl GlobalState { } lsp_server::Message::Response(resp) => self.complete_request(resp), }, - Event::Task(mut task) => { + Event::Task(task) => { let _p = profile::span("GlobalState::handle_event/task"); let mut prime_caches_progress = Vec::new(); - loop { - match task { - Task::Response(response) => self.respond(response), - Task::Retry(req) => self.on_request(req), - Task::Diagnostics(diagnostics_per_file) => { - for (file_id, diagnostics) in diagnostics_per_file { - self.diagnostics.set_native_diagnostics(file_id, diagnostics) - } - } - Task::PrimeCaches(progress) => match progress { - PrimeCachesProgress::Begin => prime_caches_progress.push(progress), - PrimeCachesProgress::Report(_) => { - match prime_caches_progress.last_mut() { - Some(last @ PrimeCachesProgress::Report(_)) => { - // Coalesce subsequent update events. - *last = progress; - } - _ => prime_caches_progress.push(progress), - } - } - PrimeCachesProgress::End { .. } => prime_caches_progress.push(progress), - }, - Task::FetchWorkspace(progress) => { - let (state, msg) = match progress { - ProjectWorkspaceProgress::Begin => (Progress::Begin, None), - ProjectWorkspaceProgress::Report(msg) => { - (Progress::Report, Some(msg)) - } - ProjectWorkspaceProgress::End(workspaces) => { - self.fetch_workspaces_queue.op_completed(workspaces); - let old = Arc::clone(&self.workspaces); - self.switch_workspaces("fetched workspace".to_string()); - let workspaces_updated = !Arc::ptr_eq(&old, &self.workspaces); - - if self.config.run_build_scripts() && workspaces_updated { - self.fetch_build_data_queue - .request_op(format!("workspace updated")); - } - - (Progress::End, None) - } - }; - - self.report_progress("Fetching", state, msg, None); - } - Task::FetchBuildData(progress) => { - let (state, msg) = match progress { - BuildDataProgress::Begin => (Some(Progress::Begin), None), - BuildDataProgress::Report(msg) => { - (Some(Progress::Report), Some(msg)) - } - BuildDataProgress::End(build_data_result) => { - self.fetch_build_data_queue.op_completed(build_data_result); - - self.switch_workspaces("fetched build data".to_string()); - - (Some(Progress::End), None) - } - }; - - if let Some(state) = state { - self.report_progress("Loading", state, msg, None); - } - } - } - - // Coalesce multiple task events into one loop turn - task = match self.task_pool.receiver.try_recv() { - Ok(task) => task, - Err(_) => break, - }; + self.handle_task(&mut prime_caches_progress, task); + // Coalesce multiple task events into one loop turn + while let Ok(task) = self.task_pool.receiver.try_recv() { + self.handle_task(&mut prime_caches_progress, task); } for progress in prime_caches_progress { @@ -326,119 +260,20 @@ impl GlobalState { self.report_progress("Indexing", state, message, Some(fraction)); } } - Event::Vfs(mut task) => { + Event::Vfs(message) => { let _p = profile::span("GlobalState::handle_event/vfs"); - loop { - match task { - vfs::loader::Message::Loaded { files } => { - let vfs = &mut self.vfs.write().0; - for (path, contents) in files { - let path = VfsPath::from(path); - if !self.mem_docs.contains(&path) { - vfs.set_file_contents(path, contents); - } - } - } - vfs::loader::Message::Progress { n_total, n_done, config_version } => { - always!(config_version <= self.vfs_config_version); - - self.vfs_progress_config_version = config_version; - self.vfs_progress_n_total = n_total; - self.vfs_progress_n_done = n_done; - - let state = if n_done == 0 { - Progress::Begin - } else if n_done < n_total { - Progress::Report - } else { - assert_eq!(n_done, n_total); - Progress::End - }; - self.report_progress( - "Roots Scanned", - state, - Some(format!("{}/{}", n_done, n_total)), - Some(Progress::fraction(n_done, n_total)), - ) - } - } - // Coalesce many VFS event into a single loop turn - task = match self.loader.receiver.try_recv() { - Ok(task) => task, - Err(_) => break, - } + self.handle_vfs_msg(message); + // Coalesce many VFS event into a single loop turn + while let Ok(message) = self.loader.receiver.try_recv() { + self.handle_vfs_msg(message); } } - Event::Flycheck(mut task) => { + Event::Flycheck(message) => { let _p = profile::span("GlobalState::handle_event/flycheck"); - loop { - match task { - flycheck::Message::AddDiagnostic { id, workspace_root, diagnostic } => { - let snap = self.snapshot(); - let diagnostics = - crate::diagnostics::to_proto::map_rust_diagnostic_to_lsp( - &self.config.diagnostics_map(), - &diagnostic, - &workspace_root, - &snap, - ); - for diag in diagnostics { - match url_to_file_id(&self.vfs.read().0, &diag.url) { - Ok(file_id) => self.diagnostics.add_check_diagnostic( - id, - file_id, - diag.diagnostic, - diag.fix, - ), - Err(err) => { - tracing::error!( - "File with cargo diagnostic not found in VFS: {}", - err - ); - } - }; - } - } - - flycheck::Message::Progress { id, progress } => { - let (state, message) = match progress { - flycheck::Progress::DidStart => { - self.diagnostics.clear_check(id); - (Progress::Begin, None) - } - flycheck::Progress::DidCheckCrate(target) => { - (Progress::Report, Some(target)) - } - flycheck::Progress::DidCancel => (Progress::End, None), - flycheck::Progress::DidFinish(result) => { - if let Err(err) = result { - self.show_and_log_error( - "cargo check failed".to_string(), - Some(err.to_string()), - ); - } - (Progress::End, None) - } - }; - - // When we're running multiple flychecks, we have to include a disambiguator in - // the title, or the editor complains. Note that this is a user-facing string. - let title = if self.flycheck.len() == 1 { - match self.config.flycheck() { - Some(config) => format!("{}", config), - None => "cargo check".to_string(), - } - } else { - format!("cargo check (#{})", id + 1) - }; - self.report_progress(&title, state, message, None); - } - } - // Coalesce many flycheck updates into a single loop turn - task = match self.flycheck_receiver.try_recv() { - Ok(task) => task, - Err(_) => break, - } + self.handle_flycheck_msg(message); + // Coalesce many flycheck updates into a single loop turn + while let Ok(message) = self.flycheck_receiver.try_recv() { + self.handle_flycheck_msg(message); } } } @@ -447,13 +282,13 @@ impl GlobalState { let memdocs_added_or_removed = self.mem_docs.take_changes(); if self.is_quiescent() { - if !was_quiescent - && !self.fetch_workspaces_queue.op_requested() - && !self.fetch_build_data_queue.op_requested() - { - for flycheck in &self.flycheck { - flycheck.update(); - } + let became_quiescent = !(was_quiescent + || self.fetch_workspaces_queue.op_requested() + || self.fetch_build_data_queue.op_requested()); + + if became_quiescent { + // Project has loaded properly, kick off initial flycheck + self.flycheck.iter().for_each(FlycheckHandle::update); if self.config.prefill_caches() { self.prime_caches_queue.request_op("became quiescent".to_string()); } @@ -495,8 +330,9 @@ impl GlobalState { let url = file_id_to_url(&self.vfs.read().0, file_id); let mut diagnostics = self.diagnostics.diagnostics_for(file_id).cloned().collect::>(); - // https://github.com/rust-lang/rust-analyzer/issues/11404 for d in &mut diagnostics { + // https://github.com/rust-lang/rust-analyzer/issues/11404 + // FIXME: We should move this workaround into the client code if d.message.is_empty() { d.message = " ".to_string(); } @@ -575,11 +411,171 @@ impl GlobalState { Ok(()) } + fn handle_task(&mut self, prime_caches_progress: &mut Vec, task: Task) { + match task { + Task::Response(response) => self.respond(response), + Task::Retry(req) => self.on_request(req), + Task::Diagnostics(diagnostics_per_file) => { + for (file_id, diagnostics) in diagnostics_per_file { + self.diagnostics.set_native_diagnostics(file_id, diagnostics) + } + } + Task::PrimeCaches(progress) => match progress { + PrimeCachesProgress::Begin => prime_caches_progress.push(progress), + PrimeCachesProgress::Report(_) => { + match prime_caches_progress.last_mut() { + Some(last @ PrimeCachesProgress::Report(_)) => { + // Coalesce subsequent update events. + *last = progress; + } + _ => prime_caches_progress.push(progress), + } + } + PrimeCachesProgress::End { .. } => prime_caches_progress.push(progress), + }, + Task::FetchWorkspace(progress) => { + let (state, msg) = match progress { + ProjectWorkspaceProgress::Begin => (Progress::Begin, None), + ProjectWorkspaceProgress::Report(msg) => (Progress::Report, Some(msg)), + ProjectWorkspaceProgress::End(workspaces) => { + self.fetch_workspaces_queue.op_completed(workspaces); + + let old = Arc::clone(&self.workspaces); + self.switch_workspaces("fetched workspace".to_string()); + let workspaces_updated = !Arc::ptr_eq(&old, &self.workspaces); + + if self.config.run_build_scripts() && workspaces_updated { + self.fetch_build_data_queue.request_op(format!("workspace updated")); + } + + (Progress::End, None) + } + }; + + self.report_progress("Fetching", state, msg, None); + } + Task::FetchBuildData(progress) => { + let (state, msg) = match progress { + BuildDataProgress::Begin => (Some(Progress::Begin), None), + BuildDataProgress::Report(msg) => (Some(Progress::Report), Some(msg)), + BuildDataProgress::End(build_data_result) => { + self.fetch_build_data_queue.op_completed(build_data_result); + + self.switch_workspaces("fetched build data".to_string()); + + (Some(Progress::End), None) + } + }; + + if let Some(state) = state { + self.report_progress("Loading", state, msg, None); + } + } + } + } + + fn handle_vfs_msg(&mut self, message: vfs::loader::Message) { + match message { + vfs::loader::Message::Loaded { files } => { + let vfs = &mut self.vfs.write().0; + for (path, contents) in files { + let path = VfsPath::from(path); + if !self.mem_docs.contains(&path) { + vfs.set_file_contents(path, contents); + } + } + } + vfs::loader::Message::Progress { n_total, n_done, config_version } => { + always!(config_version <= self.vfs_config_version); + + self.vfs_progress_config_version = config_version; + self.vfs_progress_n_total = n_total; + self.vfs_progress_n_done = n_done; + + let state = if n_done == 0 { + Progress::Begin + } else if n_done < n_total { + Progress::Report + } else { + assert_eq!(n_done, n_total); + Progress::End + }; + self.report_progress( + "Roots Scanned", + state, + Some(format!("{}/{}", n_done, n_total)), + Some(Progress::fraction(n_done, n_total)), + ) + } + } + } + + fn handle_flycheck_msg(&mut self, message: flycheck::Message) { + match message { + flycheck::Message::AddDiagnostic { id, workspace_root, diagnostic } => { + let snap = self.snapshot(); + let diagnostics = crate::diagnostics::to_proto::map_rust_diagnostic_to_lsp( + &self.config.diagnostics_map(), + &diagnostic, + &workspace_root, + &snap, + ); + for diag in diagnostics { + match url_to_file_id(&self.vfs.read().0, &diag.url) { + Ok(file_id) => self.diagnostics.add_check_diagnostic( + id, + file_id, + diag.diagnostic, + diag.fix, + ), + Err(err) => { + tracing::error!("File with cargo diagnostic not found in VFS: {}", err); + } + }; + } + } + + flycheck::Message::Progress { id, progress } => { + let (state, message) = match progress { + flycheck::Progress::DidStart => { + self.diagnostics.clear_check(id); + (Progress::Begin, None) + } + flycheck::Progress::DidCheckCrate(target) => (Progress::Report, Some(target)), + flycheck::Progress::DidCancel => (Progress::End, None), + flycheck::Progress::DidFinish(result) => { + if let Err(err) = result { + self.show_and_log_error( + "cargo check failed".to_string(), + Some(err.to_string()), + ); + } + (Progress::End, None) + } + }; + + // When we're running multiple flychecks, we have to include a disambiguator in + // the title, or the editor complains. Note that this is a user-facing string. + let title = if self.flycheck.len() == 1 { + match self.config.flycheck() { + Some(config) => format!("{}", config), + None => "cargo check".to_string(), + } + } else { + format!("cargo check (#{})", id + 1) + }; + self.report_progress(&title, state, message, None); + } + } + } + + /// Registers and handles a request. This should only be called once per incoming request. fn on_new_request(&mut self, request_received: Instant, req: Request) { self.register_request(&req, request_received); self.on_request(req); } + /// Handles a request. fn on_request(&mut self, req: Request) { if self.shutdown_requested { self.respond(lsp_server::Response::new_err( @@ -670,6 +666,7 @@ impl GlobalState { .finish(); } + /// Handles an incoming notification. fn on_notification(&mut self, not: Notification) -> Result<()> { NotificationDispatcher { not: Some(not), global_state: self } .on::(|this, params| { @@ -743,6 +740,8 @@ impl GlobalState { let mut updated = false; if let Ok(vfs_path) = from_proto::vfs_path(¶ms.text_document.uri) { let (vfs, _) = &*this.vfs.read(); + + // Trigger flychecks for all workspaces that depend on the saved file if let Some(file_id) = vfs.file_id(&vfs_path) { let analysis = this.analysis_host.analysis(); // Crates containing or depending on the saved file @@ -800,6 +799,8 @@ impl GlobalState { } } } + + // Re-fetch workspaces if a workspace related file has changed if let Some(abs_path) = vfs_path.as_path() { if reload::should_refresh_for_change(&abs_path, ChangeKind::Modify) { this.fetch_workspaces_queue @@ -807,6 +808,8 @@ impl GlobalState { } } } + + // No specific flycheck was triggered, so let's trigger all of them. if !updated { for flycheck in &this.flycheck { flycheck.update(); From ec8256dd8027bbe5493698e4b9125f1f0cf8c646 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sat, 13 Aug 2022 20:18:21 +0200 Subject: [PATCH 0128/1945] Move VSCode diagnostics workaroudn into client code --- crates/rust-analyzer/src/handlers.rs | 3 +-- crates/rust-analyzer/src/main_loop.rs | 22 ++++------------------ editors/code/src/client.ts | 9 +++++++++ 3 files changed, 14 insertions(+), 20 deletions(-) diff --git a/crates/rust-analyzer/src/handlers.rs b/crates/rust-analyzer/src/handlers.rs index 47daa732d5..943d043bc1 100644 --- a/crates/rust-analyzer/src/handlers.rs +++ b/crates/rust-analyzer/src/handlers.rs @@ -1320,8 +1320,7 @@ pub(crate) fn publish_diagnostics( .unwrap(), }), source: Some("rust-analyzer".to_string()), - // https://github.com/rust-lang/rust-analyzer/issues/11404 - message: if !d.message.is_empty() { d.message } else { " ".to_string() }, + message: d.message, related_information: None, tags: if d.unused { Some(vec![DiagnosticTag::UNNECESSARY]) } else { None }, data: None, diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs index 92b6be22e7..7741999824 100644 --- a/crates/rust-analyzer/src/main_loop.rs +++ b/crates/rust-analyzer/src/main_loop.rs @@ -327,29 +327,15 @@ impl GlobalState { continue; } - let url = file_id_to_url(&self.vfs.read().0, file_id); - let mut diagnostics = + let uri = file_id_to_url(&self.vfs.read().0, file_id); + let diagnostics = self.diagnostics.diagnostics_for(file_id).cloned().collect::>(); - for d in &mut diagnostics { - // https://github.com/rust-lang/rust-analyzer/issues/11404 - // FIXME: We should move this workaround into the client code - if d.message.is_empty() { - d.message = " ".to_string(); - } - if let Some(rds) = d.related_information.as_mut() { - for rd in rds { - if rd.message.is_empty() { - rd.message = " ".to_string(); - } - } - } - } - let version = from_proto::vfs_path(&url) + let version = from_proto::vfs_path(&uri) .map(|path| self.mem_docs.get(&path).map(|it| it.version)) .unwrap_or_default(); self.send_notification::( - lsp_types::PublishDiagnosticsParams { uri: url, diagnostics, version }, + lsp_types::PublishDiagnosticsParams { uri, diagnostics, version }, ); } } diff --git a/editors/code/src/client.ts b/editors/code/src/client.ts index 8a2dea6b35..40ba17844b 100644 --- a/editors/code/src/client.ts +++ b/editors/code/src/client.ts @@ -105,6 +105,15 @@ export async function createClient( traceOutputChannel: traceOutputChannel(), outputChannel: outputChannel(), middleware: { + async handleDiagnostics(uri, diagnostics, next) { + // Workaround for https://github.com/microsoft/vscode/issues/155531 + for (const diagnostic of diagnostics) { + if (!diagnostic.message) { + diagnostic.message = " "; + } + } + next(uri, diagnostics); + }, async provideHover( document: vscode.TextDocument, position: vscode.Position, From 614969baa770360b711881a3d9776d60937c98db Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sat, 13 Aug 2022 20:49:00 +0200 Subject: [PATCH 0129/1945] Pad empty diagnostic messages in relatedInformation as well --- editors/code/src/client.ts | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/editors/code/src/client.ts b/editors/code/src/client.ts index 40ba17844b..27ab31db8d 100644 --- a/editors/code/src/client.ts +++ b/editors/code/src/client.ts @@ -111,6 +111,13 @@ export async function createClient( if (!diagnostic.message) { diagnostic.message = " "; } + if (diagnostic.relatedInformation) { + for (const relatedInformation of diagnostic.relatedInformation) { + if (!relatedInformation.message) { + relatedInformation.message = " "; + } + } + } } next(uri, diagnostics); }, From c61237b2b293d6f62270f6047ca11306f6871c39 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Sun, 14 Aug 2022 20:52:43 +0300 Subject: [PATCH 0130/1945] Remove redundant --pre-release flag from publish --- .github/workflows/release.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index d5d8df2345..3c36c4fb84 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -253,9 +253,9 @@ jobs: - name: Publish Extension (Code Marketplace, nightly) if: github.ref != 'refs/heads/release' && (github.repository == 'rust-analyzer/rust-analyzer' || github.repository == 'rust-lang/rust-analyzer') working-directory: ./editors/code - run: npx vsce publish --pat ${{ secrets.MARKETPLACE_TOKEN }} --packagePath ../../dist/rust-analyzer-*.vsix --pre-release + run: npx vsce publish --pat ${{ secrets.MARKETPLACE_TOKEN }} --packagePath ../../dist/rust-analyzer-*.vsix - name: Publish Extension (OpenVSX, nightly) if: github.ref != 'refs/heads/release' && (github.repository == 'rust-analyzer/rust-analyzer' || github.repository == 'rust-lang/rust-analyzer') working-directory: ./editors/code - run: npx ovsx publish --pat ${{ secrets.OPENVSX_TOKEN }} --packagePath ../../dist/rust-analyzer-*.vsix --pre-release + run: npx ovsx publish --pat ${{ secrets.OPENVSX_TOKEN }} --packagePath ../../dist/rust-analyzer-*.vsix From dcbe892d7ceec113a148a89bc6d1c339c55586d1 Mon Sep 17 00:00:00 2001 From: Jonas Schievink Date: Mon, 15 Aug 2022 13:51:45 +0200 Subject: [PATCH 0131/1945] Add an HIR pretty-printer --- crates/hir-def/src/body.rs | 5 + crates/hir-def/src/body/pretty.rs | 621 +++++++++++++++++++++++++ crates/hir-def/src/builtin_type.rs | 35 ++ crates/hir-def/src/expr.rs | 6 +- crates/hir-def/src/item_tree/pretty.rs | 179 +------ crates/hir-def/src/item_tree/tests.rs | 12 +- crates/hir-def/src/lib.rs | 1 + crates/hir-def/src/pretty.rs | 209 +++++++++ crates/hir-def/src/type_ref.rs | 4 + crates/hir/src/lib.rs | 29 +- crates/ide/src/view_hir.rs | 14 +- editors/code/src/commands.ts | 2 +- 12 files changed, 914 insertions(+), 203 deletions(-) create mode 100644 crates/hir-def/src/body/pretty.rs create mode 100644 crates/hir-def/src/pretty.rs diff --git a/crates/hir-def/src/body.rs b/crates/hir-def/src/body.rs index 080a307b1f..1d818d9626 100644 --- a/crates/hir-def/src/body.rs +++ b/crates/hir-def/src/body.rs @@ -4,6 +4,7 @@ mod lower; #[cfg(test)] mod tests; pub mod scope; +mod pretty; use std::{ops::Index, sync::Arc}; @@ -352,6 +353,10 @@ impl Body { } } + pub fn pretty_print(&self, db: &dyn DefDatabase, owner: DefWithBodyId) -> String { + pretty::print_body_hir(db, self, owner) + } + fn new( db: &dyn DefDatabase, expander: Expander, diff --git a/crates/hir-def/src/body/pretty.rs b/crates/hir-def/src/body/pretty.rs new file mode 100644 index 0000000000..ddd476efe5 --- /dev/null +++ b/crates/hir-def/src/body/pretty.rs @@ -0,0 +1,621 @@ +//! A pretty-printer for HIR. + +use std::fmt::{self, Write}; + +use crate::{ + expr::{Array, BindingAnnotation, Literal, Statement}, + pretty::{print_generic_args, print_path, print_type_ref}, + type_ref::TypeRef, +}; + +use super::*; + +pub(super) fn print_body_hir(db: &dyn DefDatabase, body: &Body, owner: DefWithBodyId) -> String { + let needs_semi; + let header = match owner { + DefWithBodyId::FunctionId(it) => { + needs_semi = false; + let item_tree_id = it.lookup(db).id; + format!("fn {}(…) ", item_tree_id.item_tree(db)[item_tree_id.value].name) + } + DefWithBodyId::StaticId(it) => { + needs_semi = true; + let item_tree_id = it.lookup(db).id; + format!("static {} = ", item_tree_id.item_tree(db)[item_tree_id.value].name) + } + DefWithBodyId::ConstId(it) => { + needs_semi = true; + let item_tree_id = it.lookup(db).id; + let name = match &item_tree_id.item_tree(db)[item_tree_id.value].name { + Some(name) => name.to_string(), + None => "_".to_string(), + }; + format!("const {} = ", name) + } + }; + + let mut p = Printer { body, buf: header, indent_level: 0, needs_indent: false }; + p.print_expr(body.body_expr); + if needs_semi { + p.buf.push(';'); + } + p.buf +} + +macro_rules! w { + ($dst:expr, $($arg:tt)*) => { + { let _ = write!($dst, $($arg)*); } + }; +} + +macro_rules! wln { + ($dst:expr) => { + { let _ = writeln!($dst); } + }; + ($dst:expr, $($arg:tt)*) => { + { let _ = writeln!($dst, $($arg)*); } + }; +} + +struct Printer<'a> { + body: &'a Body, + buf: String, + indent_level: usize, + needs_indent: bool, +} + +impl<'a> Write for Printer<'a> { + fn write_str(&mut self, s: &str) -> fmt::Result { + for line in s.split_inclusive('\n') { + if self.needs_indent { + match self.buf.chars().rev().skip_while(|ch| *ch == ' ').next() { + Some('\n') | None => {} + _ => self.buf.push('\n'), + } + self.buf.push_str(&" ".repeat(self.indent_level)); + self.needs_indent = false; + } + + self.buf.push_str(line); + self.needs_indent = line.ends_with('\n'); + } + + Ok(()) + } +} + +impl<'a> Printer<'a> { + fn indented(&mut self, f: impl FnOnce(&mut Self)) { + self.indent_level += 1; + wln!(self); + f(self); + self.indent_level -= 1; + self.buf = self.buf.trim_end_matches('\n').to_string(); + } + + fn whitespace(&mut self) { + match self.buf.chars().next_back() { + None | Some('\n' | ' ') => {} + _ => self.buf.push(' '), + } + } + + fn newline(&mut self) { + match self.buf.chars().rev().skip_while(|ch| *ch == ' ').next() { + Some('\n') | None => {} + _ => writeln!(self).unwrap(), + } + } + + fn print_expr(&mut self, expr: ExprId) { + let expr = &self.body[expr]; + + match expr { + Expr::Missing => w!(self, "�"), + Expr::Underscore => w!(self, "_"), + Expr::Path(path) => self.print_path(path), + Expr::If { condition, then_branch, else_branch } => { + w!(self, "if "); + self.print_expr(*condition); + w!(self, " "); + self.print_expr(*then_branch); + if let Some(els) = *else_branch { + w!(self, " else "); + self.print_expr(els); + } + } + Expr::Let { pat, expr } => { + w!(self, "let "); + self.print_pat(*pat); + w!(self, " = "); + self.print_expr(*expr); + } + Expr::Loop { body, label } => { + if let Some(lbl) = label { + w!(self, "{}: ", self.body[*lbl].name); + } + w!(self, "loop "); + self.print_expr(*body); + } + Expr::While { condition, body, label } => { + if let Some(lbl) = label { + w!(self, "{}: ", self.body[*lbl].name); + } + w!(self, "while "); + self.print_expr(*condition); + self.print_expr(*body); + } + Expr::For { iterable, pat, body, label } => { + if let Some(lbl) = label { + w!(self, "{}: ", self.body[*lbl].name); + } + w!(self, "for "); + self.print_pat(*pat); + w!(self, " in "); + self.print_expr(*iterable); + self.print_expr(*body); + } + Expr::Call { callee, args, is_assignee_expr: _ } => { + self.print_expr(*callee); + w!(self, "("); + if !args.is_empty() { + self.indented(|p| { + for arg in &**args { + p.print_expr(*arg); + wln!(p, ","); + } + }); + } + w!(self, ")"); + } + Expr::MethodCall { receiver, method_name, args, generic_args } => { + self.print_expr(*receiver); + w!(self, ".{}", method_name); + if let Some(args) = generic_args { + w!(self, "::<"); + print_generic_args(args, self).unwrap(); + w!(self, ">"); + } + w!(self, "("); + if !args.is_empty() { + self.indented(|p| { + for arg in &**args { + p.print_expr(*arg); + wln!(p, ","); + } + }); + } + w!(self, ")"); + } + Expr::Match { expr, arms } => { + w!(self, "match "); + self.print_expr(*expr); + w!(self, " {{"); + self.indented(|p| { + for arm in &**arms { + p.print_pat(arm.pat); + if let Some(guard) = arm.guard { + w!(p, " if "); + p.print_expr(guard); + } + w!(p, " => "); + p.print_expr(arm.expr); + wln!(p, ","); + } + }); + wln!(self, "}}"); + } + Expr::Continue { label } => { + w!(self, "continue"); + if let Some(label) = label { + w!(self, " {}", label); + } + } + Expr::Break { expr, label } => { + w!(self, "break"); + if let Some(label) = label { + w!(self, " {}", label); + } + if let Some(expr) = expr { + self.whitespace(); + self.print_expr(*expr); + } + } + Expr::Return { expr } => { + w!(self, "return"); + if let Some(expr) = expr { + self.whitespace(); + self.print_expr(*expr); + } + } + Expr::Yield { expr } => { + w!(self, "yield"); + if let Some(expr) = expr { + self.whitespace(); + self.print_expr(*expr); + } + } + Expr::RecordLit { path, fields, spread, ellipsis, is_assignee_expr: _ } => { + match path { + Some(path) => self.print_path(path), + None => w!(self, "�"), + } + + w!(self, "{{"); + self.indented(|p| { + for field in &**fields { + w!(p, "{}: ", field.name); + p.print_expr(field.expr); + wln!(p, ","); + } + if let Some(spread) = spread { + w!(p, ".."); + p.print_expr(*spread); + wln!(p); + } + if *ellipsis { + wln!(p, ".."); + } + }); + w!(self, "}}"); + } + Expr::Field { expr, name } => { + self.print_expr(*expr); + w!(self, ".{}", name); + } + Expr::Await { expr } => { + self.print_expr(*expr); + w!(self, ".await"); + } + Expr::Try { expr } => { + self.print_expr(*expr); + w!(self, "?"); + } + Expr::TryBlock { body } => { + w!(self, "try "); + self.print_expr(*body); + } + Expr::Async { body } => { + w!(self, "async "); + self.print_expr(*body); + } + Expr::Const { body } => { + w!(self, "const "); + self.print_expr(*body); + } + Expr::Cast { expr, type_ref } => { + self.print_expr(*expr); + w!(self, " as "); + self.print_type_ref(type_ref); + } + Expr::Ref { expr, rawness, mutability } => { + w!(self, "&"); + if rawness.is_raw() { + w!(self, "raw "); + } + if mutability.is_mut() { + w!(self, "mut "); + } + self.print_expr(*expr); + } + Expr::Box { expr } => { + w!(self, "box "); + self.print_expr(*expr); + } + Expr::UnaryOp { expr, op } => { + let op = match op { + ast::UnaryOp::Deref => "*", + ast::UnaryOp::Not => "!", + ast::UnaryOp::Neg => "-", + }; + w!(self, "{}", op); + self.print_expr(*expr); + } + Expr::BinaryOp { lhs, rhs, op } => { + let (bra, ket) = match op { + None | Some(ast::BinaryOp::Assignment { .. }) => ("", ""), + _ => ("(", ")"), + }; + w!(self, "{}", bra); + self.print_expr(*lhs); + w!(self, "{} ", ket); + match op { + Some(op) => w!(self, "{}", op), + None => w!(self, "�"), // :) + } + w!(self, " {}", bra); + self.print_expr(*rhs); + w!(self, "{}", ket); + } + Expr::Range { lhs, rhs, range_type } => { + if let Some(lhs) = lhs { + w!(self, "("); + self.print_expr(*lhs); + w!(self, ") "); + } + let range = match range_type { + ast::RangeOp::Exclusive => "..", + ast::RangeOp::Inclusive => "..=", + }; + w!(self, "{}", range); + if let Some(rhs) = rhs { + w!(self, "("); + self.print_expr(*rhs); + w!(self, ") "); + } + } + Expr::Index { base, index } => { + self.print_expr(*base); + w!(self, "["); + self.print_expr(*index); + w!(self, "]"); + } + Expr::Closure { args, arg_types, ret_type, body } => { + w!(self, "|"); + for (i, (pat, ty)) in args.iter().zip(arg_types.iter()).enumerate() { + if i != 0 { + w!(self, ", "); + } + self.print_pat(*pat); + if let Some(ty) = ty { + w!(self, ": "); + self.print_type_ref(ty); + } + } + w!(self, "|"); + if let Some(ret_ty) = ret_type { + w!(self, " -> "); + self.print_type_ref(ret_ty); + } + self.whitespace(); + self.print_expr(*body); + } + Expr::Tuple { exprs, is_assignee_expr: _ } => { + w!(self, "("); + for expr in exprs.iter() { + self.print_expr(*expr); + w!(self, ", "); + } + w!(self, ")"); + } + Expr::Unsafe { body } => { + w!(self, "unsafe "); + self.print_expr(*body); + } + Expr::Array(arr) => { + w!(self, "["); + if !matches!(arr, Array::ElementList { elements, .. } if elements.is_empty()) { + self.indented(|p| match arr { + Array::ElementList { elements, is_assignee_expr: _ } => { + for elem in elements.iter() { + p.print_expr(*elem); + w!(p, ", "); + } + } + Array::Repeat { initializer, repeat } => { + p.print_expr(*initializer); + w!(p, "; "); + p.print_expr(*repeat); + } + }); + self.newline(); + } + w!(self, "]"); + } + Expr::Literal(lit) => self.print_literal(lit), + Expr::Block { id: _, statements, tail, label } => { + self.whitespace(); + if let Some(lbl) = label { + w!(self, "{}: ", self.body[*lbl].name); + } + w!(self, "{{"); + if !statements.is_empty() || tail.is_some() { + self.indented(|p| { + for stmt in &**statements { + p.print_stmt(stmt); + } + if let Some(tail) = tail { + p.print_expr(*tail); + } + p.newline(); + }); + } + w!(self, "}}"); + } + Expr::MacroStmts { statements, tail } => { + w!(self, "{{ // macro statements"); + self.indented(|p| { + for stmt in statements.iter() { + p.print_stmt(stmt); + } + if let Some(tail) = tail { + p.print_expr(*tail); + } + }); + self.newline(); + w!(self, "}}"); + } + } + } + + fn print_pat(&mut self, pat: PatId) { + let pat = &self.body[pat]; + + match pat { + Pat::Missing => w!(self, "�"), + Pat::Wild => w!(self, "_"), + Pat::Tuple { args, ellipsis } => { + w!(self, "("); + for (i, pat) in args.iter().enumerate() { + if i != 0 { + w!(self, ", "); + } + if *ellipsis == Some(i) { + w!(self, ".., "); + } + self.print_pat(*pat); + } + w!(self, ")"); + } + Pat::Or(pats) => { + for (i, pat) in pats.iter().enumerate() { + if i != 0 { + w!(self, " | "); + } + self.print_pat(*pat); + } + } + Pat::Record { path, args, ellipsis } => { + match path { + Some(path) => self.print_path(path), + None => w!(self, "�"), + } + + w!(self, " {{"); + self.indented(|p| { + for arg in args.iter() { + w!(p, "{}: ", arg.name); + p.print_pat(arg.pat); + wln!(p, ","); + } + if *ellipsis { + wln!(p, ".."); + } + }); + w!(self, "}}"); + } + Pat::Range { start, end } => { + self.print_expr(*start); + w!(self, "..."); + self.print_expr(*end); + } + Pat::Slice { prefix, slice, suffix } => { + w!(self, "["); + for pat in prefix.iter() { + self.print_pat(*pat); + w!(self, ", "); + } + if let Some(pat) = slice { + self.print_pat(*pat); + w!(self, ", "); + } + for pat in suffix.iter() { + self.print_pat(*pat); + w!(self, ", "); + } + w!(self, "]"); + } + Pat::Path(path) => self.print_path(path), + Pat::Lit(expr) => self.print_expr(*expr), + Pat::Bind { mode, name, subpat } => { + let mode = match mode { + BindingAnnotation::Unannotated => "", + BindingAnnotation::Mutable => "mut ", + BindingAnnotation::Ref => "ref ", + BindingAnnotation::RefMut => "ref mut ", + }; + w!(self, "{}{}", mode, name); + if let Some(pat) = subpat { + self.whitespace(); + self.print_pat(*pat); + } + } + Pat::TupleStruct { path, args, ellipsis } => { + match path { + Some(path) => self.print_path(path), + None => w!(self, "�"), + } + w!(self, "("); + for (i, arg) in args.iter().enumerate() { + if i != 0 { + w!(self, ", "); + } + if *ellipsis == Some(i) { + w!(self, ", .."); + } + self.print_pat(*arg); + } + w!(self, ")"); + } + Pat::Ref { pat, mutability } => { + w!(self, "&"); + if mutability.is_mut() { + w!(self, "mut "); + } + self.print_pat(*pat); + } + Pat::Box { inner } => { + w!(self, "box "); + self.print_pat(*inner); + } + Pat::ConstBlock(c) => { + w!(self, "const "); + self.print_expr(*c); + } + } + } + + fn print_stmt(&mut self, stmt: &Statement) { + match stmt { + Statement::Let { pat, type_ref, initializer, else_branch } => { + w!(self, "let "); + self.print_pat(*pat); + if let Some(ty) = type_ref { + w!(self, ": "); + self.print_type_ref(ty); + } + if let Some(init) = initializer { + w!(self, " = "); + self.print_expr(*init); + } + if let Some(els) = else_branch { + w!(self, " else "); + self.print_expr(*els); + } + wln!(self, ";"); + } + Statement::Expr { expr, has_semi } => { + self.print_expr(*expr); + if *has_semi { + w!(self, ";"); + } + wln!(self); + } + } + } + + fn print_literal(&mut self, literal: &Literal) { + match literal { + Literal::String(it) => w!(self, "{:?}", it), + Literal::ByteString(it) => w!(self, "\"{}\"", it.escape_ascii()), + Literal::Char(it) => w!(self, "'{}'", it.escape_debug()), + Literal::Bool(it) => w!(self, "{}", it), + Literal::Int(i, suffix) => { + w!(self, "{}", i); + if let Some(suffix) = suffix { + w!(self, "{}", suffix); + } + } + Literal::Uint(i, suffix) => { + w!(self, "{}", i); + if let Some(suffix) = suffix { + w!(self, "{}", suffix); + } + } + Literal::Float(f, suffix) => { + w!(self, "{}", f); + if let Some(suffix) = suffix { + w!(self, "{}", suffix); + } + } + } + } + + fn print_type_ref(&mut self, ty: &TypeRef) { + print_type_ref(ty, self).unwrap(); + } + + fn print_path(&mut self, path: &Path) { + print_path(path, self).unwrap(); + } +} diff --git a/crates/hir-def/src/builtin_type.rs b/crates/hir-def/src/builtin_type.rs index 25a408036f..dd69c3ab47 100644 --- a/crates/hir-def/src/builtin_type.rs +++ b/crates/hir-def/src/builtin_type.rs @@ -156,3 +156,38 @@ impl BuiltinFloat { Some(res) } } + +impl fmt::Display for BuiltinInt { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str(match self { + BuiltinInt::Isize => "isize", + BuiltinInt::I8 => "i8", + BuiltinInt::I16 => "i16", + BuiltinInt::I32 => "i32", + BuiltinInt::I64 => "i64", + BuiltinInt::I128 => "i128", + }) + } +} + +impl fmt::Display for BuiltinUint { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str(match self { + BuiltinUint::Usize => "usize", + BuiltinUint::U8 => "u8", + BuiltinUint::U16 => "u16", + BuiltinUint::U32 => "u32", + BuiltinUint::U64 => "u64", + BuiltinUint::U128 => "u128", + }) + } +} + +impl fmt::Display for BuiltinFloat { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str(match self { + BuiltinFloat::F32 => "f32", + BuiltinFloat::F64 => "f64", + }) + } +} diff --git a/crates/hir-def/src/expr.rs b/crates/hir-def/src/expr.rs index c1b3788acb..4381b43c25 100644 --- a/crates/hir-def/src/expr.rs +++ b/crates/hir-def/src/expr.rs @@ -12,6 +12,8 @@ //! //! See also a neighboring `body` module. +use std::fmt; + use hir_expand::name::Name; use la_arena::{Idx, RawIdx}; @@ -52,8 +54,8 @@ impl FloatTypeWrapper { } } -impl std::fmt::Display for FloatTypeWrapper { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { +impl fmt::Display for FloatTypeWrapper { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{:?}", f64::from_bits(self.0)) } } diff --git a/crates/hir-def/src/item_tree/pretty.rs b/crates/hir-def/src/item_tree/pretty.rs index f12d9a1273..34dd817fd1 100644 --- a/crates/hir-def/src/item_tree/pretty.rs +++ b/crates/hir-def/src/item_tree/pretty.rs @@ -2,13 +2,10 @@ use std::fmt::{self, Write}; -use itertools::Itertools; - use crate::{ attr::RawAttrs, generics::{TypeOrConstParamData, WherePredicate, WherePredicateTypeTarget}, - path::GenericArg, - type_ref::TraitBoundModifier, + pretty::{print_path, print_type_bounds, print_type_ref}, visibility::RawVisibility, }; @@ -464,183 +461,15 @@ impl<'a> Printer<'a> { } fn print_type_ref(&mut self, type_ref: &TypeRef) { - // FIXME: deduplicate with `HirDisplay` impl - match type_ref { - TypeRef::Never => w!(self, "!"), - TypeRef::Placeholder => w!(self, "_"), - TypeRef::Tuple(fields) => { - w!(self, "("); - for (i, field) in fields.iter().enumerate() { - if i != 0 { - w!(self, ", "); - } - self.print_type_ref(field); - } - w!(self, ")"); - } - TypeRef::Path(path) => self.print_path(path), - TypeRef::RawPtr(pointee, mtbl) => { - let mtbl = match mtbl { - Mutability::Shared => "*const", - Mutability::Mut => "*mut", - }; - w!(self, "{} ", mtbl); - self.print_type_ref(pointee); - } - TypeRef::Reference(pointee, lt, mtbl) => { - let mtbl = match mtbl { - Mutability::Shared => "", - Mutability::Mut => "mut ", - }; - w!(self, "&"); - if let Some(lt) = lt { - w!(self, "{} ", lt.name); - } - w!(self, "{}", mtbl); - self.print_type_ref(pointee); - } - TypeRef::Array(elem, len) => { - w!(self, "["); - self.print_type_ref(elem); - w!(self, "; {}]", len); - } - TypeRef::Slice(elem) => { - w!(self, "["); - self.print_type_ref(elem); - w!(self, "]"); - } - TypeRef::Fn(args_and_ret, varargs) => { - let ((_, return_type), args) = - args_and_ret.split_last().expect("TypeRef::Fn is missing return type"); - w!(self, "fn("); - for (i, (_, typeref)) in args.iter().enumerate() { - if i != 0 { - w!(self, ", "); - } - self.print_type_ref(typeref); - } - if *varargs { - if !args.is_empty() { - w!(self, ", "); - } - w!(self, "..."); - } - w!(self, ") -> "); - self.print_type_ref(return_type); - } - TypeRef::Macro(_ast_id) => { - w!(self, ""); - } - TypeRef::Error => w!(self, "{{unknown}}"), - TypeRef::ImplTrait(bounds) => { - w!(self, "impl "); - self.print_type_bounds(bounds); - } - TypeRef::DynTrait(bounds) => { - w!(self, "dyn "); - self.print_type_bounds(bounds); - } - } + print_type_ref(type_ref, self).unwrap(); } fn print_type_bounds(&mut self, bounds: &[Interned]) { - for (i, bound) in bounds.iter().enumerate() { - if i != 0 { - w!(self, " + "); - } - - match bound.as_ref() { - TypeBound::Path(path, modifier) => { - match modifier { - TraitBoundModifier::None => (), - TraitBoundModifier::Maybe => w!(self, "?"), - } - self.print_path(path) - } - TypeBound::ForLifetime(lifetimes, path) => { - w!(self, "for<{}> ", lifetimes.iter().format(", ")); - self.print_path(path); - } - TypeBound::Lifetime(lt) => w!(self, "{}", lt.name), - TypeBound::Error => w!(self, "{{unknown}}"), - } - } + print_type_bounds(bounds, self).unwrap(); } fn print_path(&mut self, path: &Path) { - match path.type_anchor() { - Some(anchor) => { - w!(self, "<"); - self.print_type_ref(anchor); - w!(self, ">::"); - } - None => match path.kind() { - PathKind::Plain => {} - PathKind::Super(0) => w!(self, "self::"), - PathKind::Super(n) => { - for _ in 0..*n { - w!(self, "super::"); - } - } - PathKind::Crate => w!(self, "crate::"), - PathKind::Abs => w!(self, "::"), - PathKind::DollarCrate(_) => w!(self, "$crate::"), - }, - } - - for (i, segment) in path.segments().iter().enumerate() { - if i != 0 { - w!(self, "::"); - } - - w!(self, "{}", segment.name); - if let Some(generics) = segment.args_and_bindings { - // NB: these are all in type position, so `::<` turbofish syntax is not necessary - w!(self, "<"); - let mut first = true; - let args = if generics.has_self_type { - let (self_ty, args) = generics.args.split_first().unwrap(); - w!(self, "Self="); - self.print_generic_arg(self_ty); - first = false; - args - } else { - &generics.args - }; - for arg in args { - if !first { - w!(self, ", "); - } - first = false; - self.print_generic_arg(arg); - } - for binding in &generics.bindings { - if !first { - w!(self, ", "); - } - first = false; - w!(self, "{}", binding.name); - if !binding.bounds.is_empty() { - w!(self, ": "); - self.print_type_bounds(&binding.bounds); - } - if let Some(ty) = &binding.type_ref { - w!(self, " = "); - self.print_type_ref(ty); - } - } - - w!(self, ">"); - } - } - } - - fn print_generic_arg(&mut self, arg: &GenericArg) { - match arg { - GenericArg::Type(ty) => self.print_type_ref(ty), - GenericArg::Const(c) => w!(self, "{}", c), - GenericArg::Lifetime(lt) => w!(self, "{}", lt.name), - } + print_path(path, self).unwrap(); } fn print_generic_params(&mut self, params: &GenericParams) { diff --git a/crates/hir-def/src/item_tree/tests.rs b/crates/hir-def/src/item_tree/tests.rs index 5cdf36cc61..e30d9652bb 100644 --- a/crates/hir-def/src/item_tree/tests.rs +++ b/crates/hir-def/src/item_tree/tests.rs @@ -283,10 +283,10 @@ struct S { "#, expect![[r#" pub(self) struct S { - pub(self) a: Mixed<'a, T, Item = (), OtherItem = u8>, - pub(self) b: Qualified::Syntax, - pub(self) c: ::Path<'a>, - pub(self) d: dyn for<'a> Trait<'a>, + pub(self) a: Mixed::<'a, T, Item = (), OtherItem = u8>, + pub(self) b: Qualified::::Syntax, + pub(self) c: ::Path::<'a>, + pub(self) d: dyn for<'a> Trait::<'a>, } "#]], ) @@ -329,7 +329,7 @@ trait Tr<'a, T: 'a>: Super where Self: for<'a> Tr<'a, T> {} T: Copy, U: ?Sized; - impl<'a, 'b, T, const K: u8> S<'a, 'b, T, K> + impl<'a, 'b, T, const K: u8> S::<'a, 'b, T, K> where T: Copy, T: 'a, @@ -352,7 +352,7 @@ trait Tr<'a, T: 'a>: Super where Self: for<'a> Tr<'a, T> {} where Self: Super, T: 'a, - Self: for<'a> Tr<'a, T> + Self: for<'a> Tr::<'a, T> { } "#]], diff --git a/crates/hir-def/src/lib.rs b/crates/hir-def/src/lib.rs index 56603f4b15..32ebfda4fd 100644 --- a/crates/hir-def/src/lib.rs +++ b/crates/hir-def/src/lib.rs @@ -53,6 +53,7 @@ pub mod import_map; mod test_db; #[cfg(test)] mod macro_expansion_tests; +mod pretty; use std::{ hash::{Hash, Hasher}, diff --git a/crates/hir-def/src/pretty.rs b/crates/hir-def/src/pretty.rs new file mode 100644 index 0000000000..6636c8a23c --- /dev/null +++ b/crates/hir-def/src/pretty.rs @@ -0,0 +1,209 @@ +//! Display and pretty printing routines. + +use std::fmt::{self, Write}; + +use hir_expand::mod_path::PathKind; +use itertools::Itertools; + +use crate::{ + intern::Interned, + path::{GenericArg, GenericArgs, Path}, + type_ref::{Mutability, TraitBoundModifier, TypeBound, TypeRef}, +}; + +pub(crate) fn print_path(path: &Path, buf: &mut dyn Write) -> fmt::Result { + match path.type_anchor() { + Some(anchor) => { + write!(buf, "<")?; + print_type_ref(anchor, buf)?; + write!(buf, ">::")?; + } + None => match path.kind() { + PathKind::Plain => {} + PathKind::Super(0) => write!(buf, "self")?, + PathKind::Super(n) => { + for i in 0..*n { + if i == 0 { + buf.write_str("super")?; + } else { + buf.write_str("::super")?; + } + } + } + PathKind::Crate => write!(buf, "crate")?, + PathKind::Abs => {} + PathKind::DollarCrate(_) => write!(buf, "$crate")?, + }, + } + + for (i, segment) in path.segments().iter().enumerate() { + if i != 0 || !matches!(path.kind(), PathKind::Plain) { + write!(buf, "::")?; + } + + write!(buf, "{}", segment.name)?; + if let Some(generics) = segment.args_and_bindings { + write!(buf, "::<")?; + print_generic_args(generics, buf)?; + + write!(buf, ">")?; + } + } + + Ok(()) +} + +pub(crate) fn print_generic_args(generics: &GenericArgs, buf: &mut dyn Write) -> fmt::Result { + let mut first = true; + let args = if generics.has_self_type { + let (self_ty, args) = generics.args.split_first().unwrap(); + write!(buf, "Self=")?; + print_generic_arg(self_ty, buf)?; + first = false; + args + } else { + &generics.args + }; + for arg in args { + if !first { + write!(buf, ", ")?; + } + first = false; + print_generic_arg(arg, buf)?; + } + for binding in &generics.bindings { + if !first { + write!(buf, ", ")?; + } + first = false; + write!(buf, "{}", binding.name)?; + if !binding.bounds.is_empty() { + write!(buf, ": ")?; + print_type_bounds(&binding.bounds, buf)?; + } + if let Some(ty) = &binding.type_ref { + write!(buf, " = ")?; + print_type_ref(ty, buf)?; + } + } + Ok(()) +} + +pub(crate) fn print_generic_arg(arg: &GenericArg, buf: &mut dyn Write) -> fmt::Result { + match arg { + GenericArg::Type(ty) => print_type_ref(ty, buf), + GenericArg::Const(c) => write!(buf, "{}", c), + GenericArg::Lifetime(lt) => write!(buf, "{}", lt.name), + } +} + +pub(crate) fn print_type_ref(type_ref: &TypeRef, buf: &mut dyn Write) -> fmt::Result { + // FIXME: deduplicate with `HirDisplay` impl + match type_ref { + TypeRef::Never => write!(buf, "!")?, + TypeRef::Placeholder => write!(buf, "_")?, + TypeRef::Tuple(fields) => { + write!(buf, "(")?; + for (i, field) in fields.iter().enumerate() { + if i != 0 { + write!(buf, ", ")?; + } + print_type_ref(field, buf)?; + } + write!(buf, ")")?; + } + TypeRef::Path(path) => print_path(path, buf)?, + TypeRef::RawPtr(pointee, mtbl) => { + let mtbl = match mtbl { + Mutability::Shared => "*const", + Mutability::Mut => "*mut", + }; + write!(buf, "{} ", mtbl)?; + print_type_ref(pointee, buf)?; + } + TypeRef::Reference(pointee, lt, mtbl) => { + let mtbl = match mtbl { + Mutability::Shared => "", + Mutability::Mut => "mut ", + }; + write!(buf, "&")?; + if let Some(lt) = lt { + write!(buf, "{} ", lt.name)?; + } + write!(buf, "{}", mtbl)?; + print_type_ref(pointee, buf)?; + } + TypeRef::Array(elem, len) => { + write!(buf, "[")?; + print_type_ref(elem, buf)?; + write!(buf, "; {}]", len)?; + } + TypeRef::Slice(elem) => { + write!(buf, "[")?; + print_type_ref(elem, buf)?; + write!(buf, "]")?; + } + TypeRef::Fn(args_and_ret, varargs) => { + let ((_, return_type), args) = + args_and_ret.split_last().expect("TypeRef::Fn is missing return type"); + write!(buf, "fn(")?; + for (i, (_, typeref)) in args.iter().enumerate() { + if i != 0 { + write!(buf, ", ")?; + } + print_type_ref(typeref, buf)?; + } + if *varargs { + if !args.is_empty() { + write!(buf, ", ")?; + } + write!(buf, "...")?; + } + write!(buf, ") -> ")?; + print_type_ref(return_type, buf)?; + } + TypeRef::Macro(_ast_id) => { + write!(buf, "")?; + } + TypeRef::Error => write!(buf, "{{unknown}}")?, + TypeRef::ImplTrait(bounds) => { + write!(buf, "impl ")?; + print_type_bounds(bounds, buf)?; + } + TypeRef::DynTrait(bounds) => { + write!(buf, "dyn ")?; + print_type_bounds(bounds, buf)?; + } + } + + Ok(()) +} + +pub(crate) fn print_type_bounds( + bounds: &[Interned], + buf: &mut dyn Write, +) -> fmt::Result { + for (i, bound) in bounds.iter().enumerate() { + if i != 0 { + write!(buf, " + ")?; + } + + match bound.as_ref() { + TypeBound::Path(path, modifier) => { + match modifier { + TraitBoundModifier::None => (), + TraitBoundModifier::Maybe => write!(buf, "?")?, + } + print_path(path, buf)?; + } + TypeBound::ForLifetime(lifetimes, path) => { + write!(buf, "for<{}> ", lifetimes.iter().format(", "))?; + print_path(path, buf)?; + } + TypeBound::Lifetime(lt) => write!(buf, "{}", lt.name)?, + TypeBound::Error => write!(buf, "{{unknown}}")?, + } + } + + Ok(()) +} diff --git a/crates/hir-def/src/type_ref.rs b/crates/hir-def/src/type_ref.rs index 9248059627..5b4c71be7f 100644 --- a/crates/hir-def/src/type_ref.rs +++ b/crates/hir-def/src/type_ref.rs @@ -77,6 +77,10 @@ impl Rawness { Rawness::Ref } } + + pub fn is_raw(&self) -> bool { + matches!(self, Self::RawPtr) + } } #[derive(Clone, PartialEq, Eq, Hash, Debug)] diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 8f984210e1..3561bdeba7 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -72,7 +72,7 @@ use itertools::Itertools; use nameres::diagnostics::DefDiagnosticKind; use once_cell::unsync::Lazy; use rustc_hash::FxHashSet; -use stdx::{format_to, impl_from, never}; +use stdx::{impl_from, never}; use syntax::{ ast::{self, HasAttrs as _, HasDocComments, HasName}, AstNode, AstPtr, SmolStr, SyntaxNodePtr, TextRange, T, @@ -1136,6 +1136,20 @@ impl DefWithBody { } } + fn id(&self) -> DefWithBodyId { + match self { + DefWithBody::Function(it) => it.id.into(), + DefWithBody::Static(it) => it.id.into(), + DefWithBody::Const(it) => it.id.into(), + } + } + + /// A textual representation of the HIR of this def's body for debugging purposes. + pub fn debug_hir(self, db: &dyn HirDatabase) -> String { + let body = db.body(self.id()); + body.pretty_print(db.upcast(), self.id()) + } + pub fn diagnostics(self, db: &dyn HirDatabase, acc: &mut Vec) { let krate = self.module(db).id.krate(); @@ -1470,19 +1484,6 @@ impl Function { let def_map = db.crate_def_map(loc.krate(db).into()); def_map.fn_as_proc_macro(self.id).map(|id| Macro { id: id.into() }) } - - /// A textual representation of the HIR of this function for debugging purposes. - pub fn debug_hir(self, db: &dyn HirDatabase) -> String { - let body = db.body(self.id.into()); - - let mut result = String::new(); - format_to!(result, "HIR expressions in the body of `{}`:\n", self.name(db)); - for (id, expr) in body.exprs.iter() { - format_to!(result, "{:?}: {:?}\n", id, expr); - } - - result - } } // Note: logically, this belongs to `hir_ty`, but we are not using it there yet. diff --git a/crates/ide/src/view_hir.rs b/crates/ide/src/view_hir.rs index bf0835ed7e..d2bbbf6d26 100644 --- a/crates/ide/src/view_hir.rs +++ b/crates/ide/src/view_hir.rs @@ -1,4 +1,4 @@ -use hir::{Function, Semantics}; +use hir::{DefWithBody, Semantics}; use ide_db::base_db::FilePosition; use ide_db::RootDatabase; use syntax::{algo::find_node_at_offset, ast, AstNode}; @@ -19,8 +19,12 @@ fn body_hir(db: &RootDatabase, position: FilePosition) -> Option { let sema = Semantics::new(db); let source_file = sema.parse(position.file_id); - let function = find_node_at_offset::(source_file.syntax(), position.offset)?; - - let function: Function = sema.to_def(&function)?; - Some(function.debug_hir(db)) + let item = find_node_at_offset::(source_file.syntax(), position.offset)?; + let def: DefWithBody = match item { + ast::Item::Fn(it) => sema.to_def(&it)?.into(), + ast::Item::Const(it) => sema.to_def(&it)?.into(), + ast::Item::Static(it) => sema.to_def(&it)?.into(), + _ => return None, + }; + Some(def.debug_hir(db)) } diff --git a/editors/code/src/commands.ts b/editors/code/src/commands.ts index 1b793bb0b1..f58de9da1b 100644 --- a/editors/code/src/commands.ts +++ b/editors/code/src/commands.ts @@ -433,7 +433,7 @@ export function syntaxTree(ctx: Ctx): Cmd { // The contents of the file come from the `TextDocumentContentProvider` export function viewHir(ctx: Ctx): Cmd { const tdcp = new (class implements vscode.TextDocumentContentProvider { - readonly uri = vscode.Uri.parse("rust-analyzer-hir://viewHir/hir.txt"); + readonly uri = vscode.Uri.parse("rust-analyzer-hir://viewHir/hir.rs"); readonly eventEmitter = new vscode.EventEmitter(); constructor() { vscode.workspace.onDidChangeTextDocument( From 3f149a63d226cbdedd466d872677da765ada9df8 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 15 Aug 2022 16:16:59 +0200 Subject: [PATCH 0132/1945] Simplify --- crates/hir-expand/src/ast_id_map.rs | 21 ++++++++------------- crates/hir-expand/src/db.rs | 6 +++++- crates/hir-expand/src/lib.rs | 1 - crates/ide/src/hover.rs | 2 ++ 4 files changed, 15 insertions(+), 15 deletions(-) diff --git a/crates/hir-expand/src/ast_id_map.rs b/crates/hir-expand/src/ast_id_map.rs index c1ddef03ba..11c0a6764e 100644 --- a/crates/hir-expand/src/ast_id_map.rs +++ b/crates/hir-expand/src/ast_id_map.rs @@ -15,7 +15,7 @@ use std::{ use la_arena::{Arena, Idx}; use profile::Count; use rustc_hash::FxHasher; -use syntax::{ast, match_ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr}; +use syntax::{ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr}; /// `AstId` points to an AST node in a specific file. pub struct FileAstId { @@ -92,18 +92,12 @@ impl AstIdMap { // change parent's id. This means that, say, adding a new function to a // trait does not change ids of top-level items, which helps caching. bdfs(node, |it| { - match_ast! { - match it { - ast::Item(module_item) => { - res.alloc(module_item.syntax()); - true - }, - ast::BlockExpr(block) => { - res.alloc(block.syntax()); - true - }, - _ => false, - } + let kind = it.kind(); + if ast::Item::can_cast(kind) || ast::BlockExpr::can_cast(kind) { + res.alloc(&it); + true + } else { + false } }); res.map = hashbrown::HashMap::with_capacity_and_hasher(res.arena.len(), ()); @@ -123,6 +117,7 @@ impl AstIdMap { let raw = self.erased_ast_id(item.syntax()); FileAstId { raw, _ty: PhantomData } } + fn erased_ast_id(&self, item: &SyntaxNode) -> ErasedFileAstId { let ptr = SyntaxNodePtr::new(item); let hash = hash_ptr(&ptr); diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs index bd60c3d268..bc97ee15c7 100644 --- a/crates/hir-expand/src/db.rs +++ b/crates/hir-expand/src/db.rs @@ -321,7 +321,11 @@ fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet), BuiltIn(BuiltinFnLikeExpander, AstId), - // FIXME: maybe just Builtin and rename BuiltinFnLikeExpander to BuiltinExpander BuiltInAttr(BuiltinAttrExpander, AstId), BuiltInDerive(BuiltinDeriveExpander, AstId), BuiltInEager(EagerExpander, AstId), diff --git a/crates/ide/src/hover.rs b/crates/ide/src/hover.rs index 3ada181f1e..fa2c6f09ab 100644 --- a/crates/ide/src/hover.rs +++ b/crates/ide/src/hover.rs @@ -119,6 +119,8 @@ pub(crate) fn hover( } let in_attr = matches!(original_token.parent().and_then(ast::TokenTree::cast), Some(tt) if tt.syntax().ancestors().any(|it| ast::Meta::can_cast(it.kind()))); + // prefer descending the same token kind in attribute expansions, in normal macros text + // equivalency is more important let descended = if in_attr { [sema.descend_into_macros_with_kind_preference(original_token.clone())].into() } else { From 88b19cc39b14446cc12e02a20c6f489f9883b760 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 15 Aug 2022 16:41:51 +0200 Subject: [PATCH 0133/1945] Make resolve_name_in_module a bit more lazy --- crates/hir-def/src/nameres/path_resolution.rs | 32 +++++++++---------- crates/hir-def/src/per_ns.rs | 12 +++++++ 2 files changed, 28 insertions(+), 16 deletions(-) diff --git a/crates/hir-def/src/nameres/path_resolution.rs b/crates/hir-def/src/nameres/path_resolution.rs index cc5fc0a52a..32514ffb13 100644 --- a/crates/hir-def/src/nameres/path_resolution.rs +++ b/crates/hir-def/src/nameres/path_resolution.rs @@ -397,14 +397,15 @@ impl DefMap { Some(_) | None => from_scope.or(from_builtin), }, }; - let from_extern_prelude = self - .extern_prelude - .get(name) - .map_or(PerNs::none(), |&it| PerNs::types(it.into(), Visibility::Public)); - let from_prelude = self.resolve_in_prelude(db, name); + let extern_prelude = || { + self.extern_prelude + .get(name) + .map_or(PerNs::none(), |&it| PerNs::types(it.into(), Visibility::Public)) + }; + let prelude = || self.resolve_in_prelude(db, name); - from_legacy_macro.or(from_scope_or_builtin).or(from_extern_prelude).or(from_prelude) + from_legacy_macro.or(from_scope_or_builtin).or_else(extern_prelude).or_else(prelude) } fn resolve_name_in_crate_root_or_extern_prelude( @@ -412,20 +413,19 @@ impl DefMap { db: &dyn DefDatabase, name: &Name, ) -> PerNs { - let arc; - let crate_def_map = match self.block { + let from_crate_root = match self.block { Some(_) => { - arc = self.crate_root(db).def_map(db); - &arc + let def_map = self.crate_root(db).def_map(db); + def_map[def_map.root].scope.get(name) } - None => self, + None => self[self.root].scope.get(name), + }; + let from_extern_prelude = || { + self.resolve_name_in_extern_prelude(db, name) + .map_or(PerNs::none(), |it| PerNs::types(it.into(), Visibility::Public)) }; - let from_crate_root = crate_def_map[crate_def_map.root].scope.get(name); - let from_extern_prelude = self - .resolve_name_in_extern_prelude(db, name) - .map_or(PerNs::none(), |it| PerNs::types(it.into(), Visibility::Public)); - from_crate_root.or(from_extern_prelude) + from_crate_root.or_else(from_extern_prelude) } fn resolve_in_prelude(&self, db: &dyn DefDatabase, name: &Name) -> PerNs { diff --git a/crates/hir-def/src/per_ns.rs b/crates/hir-def/src/per_ns.rs index bf5bf10c4c..2bc1f8e926 100644 --- a/crates/hir-def/src/per_ns.rs +++ b/crates/hir-def/src/per_ns.rs @@ -43,6 +43,10 @@ impl PerNs { self.types.is_none() && self.values.is_none() && self.macros.is_none() } + pub fn is_full(&self) -> bool { + self.types.is_some() && self.values.is_some() && self.macros.is_some() + } + pub fn take_types(self) -> Option { self.types.map(|it| it.0) } @@ -84,6 +88,14 @@ impl PerNs { } } + pub fn or_else(self, f: impl FnOnce() -> PerNs) -> PerNs { + if self.is_full() { + self + } else { + self.or(f()) + } + } + pub fn iter_items(self) -> impl Iterator { let _p = profile::span("PerNs::iter_items"); self.types From 8c60813096fca9ce5b81bdb22894ca1e1240fdb5 Mon Sep 17 00:00:00 2001 From: Jonas Schievink Date: Mon, 15 Aug 2022 17:44:52 +0200 Subject: [PATCH 0134/1945] Fix lowering of empty macro expressions in trailing position --- crates/hir-def/src/body/lower.rs | 10 +++++++++- crates/hir-ty/src/tests/regression.rs | 17 +++++++++++++++++ 2 files changed, 26 insertions(+), 1 deletion(-) diff --git a/crates/hir-def/src/body/lower.rs b/crates/hir-def/src/body/lower.rs index 66f9c24e87..f6ec8bf7e9 100644 --- a/crates/hir-def/src/body/lower.rs +++ b/crates/hir-def/src/body/lower.rs @@ -551,9 +551,17 @@ impl ExprCollector<'_> { } } ast::Expr::MacroStmts(e) => { - let statements = e.statements().filter_map(|s| self.collect_stmt(s)).collect(); + let statements: Box<[_]> = + e.statements().filter_map(|s| self.collect_stmt(s)).collect(); let tail = e.expr().map(|e| self.collect_expr(e)); + if e.syntax().children().next().is_none() { + // HACK: make sure that macros that expand to nothing aren't treated as a `()` + // expression when used in block tail position. + cov_mark::hit!(empty_macro_in_trailing_position_is_removed); + return None; + } + self.alloc_expr(Expr::MacroStmts { tail, statements }, syntax_ptr) } ast::Expr::UnderscoreExpr(_) => self.alloc_expr(Expr::Underscore, syntax_ptr), diff --git a/crates/hir-ty/src/tests/regression.rs b/crates/hir-ty/src/tests/regression.rs index 93a88ab58e..1b5ed0603b 100644 --- a/crates/hir-ty/src/tests/regression.rs +++ b/crates/hir-ty/src/tests/regression.rs @@ -1648,3 +1648,20 @@ fn main() { "#]], ); } + +#[test] +fn trailing_empty_macro() { + cov_mark::check!(empty_macro_in_trailing_position_is_removed); + check_no_mismatches( + r#" +macro_rules! m2 { + ($($t:tt)*) => {$($t)*}; +} + +fn macrostmts() -> u8 { + m2! { 0 } + m2! {} +} + "#, + ); +} From 91358bd937ff0fdace4371f313fa0cda42d3d81c Mon Sep 17 00:00:00 2001 From: yue4u Date: Tue, 16 Aug 2022 01:24:21 +0900 Subject: [PATCH 0135/1945] fix: format literal lookup --- .../ide-completion/src/completions/record.rs | 10 +++++----- crates/ide-completion/src/render.rs | 4 +++- crates/ide-completion/src/render/literal.rs | 16 ++++++++++------ crates/ide-completion/src/render/pattern.rs | 18 ++++++++++++------ .../ide-completion/src/render/union_literal.rs | 9 ++++++--- crates/ide-completion/src/render/variant.rs | 9 +++++++++ crates/ide-completion/src/tests/pattern.rs | 2 +- 7 files changed, 46 insertions(+), 22 deletions(-) diff --git a/crates/ide-completion/src/completions/record.rs b/crates/ide-completion/src/completions/record.rs index bfb98b9f27..5d96fbd30a 100644 --- a/crates/ide-completion/src/completions/record.rs +++ b/crates/ide-completion/src/completions/record.rs @@ -129,7 +129,7 @@ mod tests { #[test] fn literal_struct_completion_edit() { check_edit( - "FooDesc {…}", + "FooDesc{}", r#" struct FooDesc { pub bar: bool } @@ -154,7 +154,7 @@ fn baz() { #[test] fn literal_struct_impl_self_completion() { check_edit( - "Self {…}", + "Self{}", r#" struct Foo { bar: u64, @@ -180,7 +180,7 @@ impl Foo { ); check_edit( - "Self(…)", + "Self()", r#" mod submod { pub struct Foo(pub u64); @@ -209,7 +209,7 @@ impl submod::Foo { #[test] fn literal_struct_completion_from_sub_modules() { check_edit( - "submod::Struct {…}", + "submod::Struct{}", r#" mod submod { pub struct Struct { @@ -238,7 +238,7 @@ fn f() -> submod::Struct { #[test] fn literal_struct_complexion_module() { check_edit( - "FooDesc {…}", + "FooDesc{}", r#" mod _69latrick { pub struct FooDesc { pub six: bool, pub neuf: Vec, pub bar: bool } diff --git a/crates/ide-completion/src/render.rs b/crates/ide-completion/src/render.rs index 693007ca30..a2cf6d68e5 100644 --- a/crates/ide-completion/src/render.rs +++ b/crates/ide-completion/src/render.rs @@ -565,6 +565,7 @@ fn main() { Foo::Fo$0 } kind: SymbolKind( Variant, ), + lookup: "Foo{}", detail: "Foo { x: i32, y: i32 }", }, ] @@ -591,6 +592,7 @@ fn main() { Foo::Fo$0 } kind: SymbolKind( Variant, ), + lookup: "Foo()", detail: "Foo(i32, i32)", }, ] @@ -707,7 +709,7 @@ fn main() { let _: m::Spam = S$0 } kind: SymbolKind( Variant, ), - lookup: "Spam::Bar(…)", + lookup: "Spam::Bar()", detail: "m::Spam::Bar(i32)", relevance: CompletionRelevance { exact_name_match: false, diff --git a/crates/ide-completion/src/render/literal.rs b/crates/ide-completion/src/render/literal.rs index 707dea206b..af9c88a7e0 100644 --- a/crates/ide-completion/src/render/literal.rs +++ b/crates/ide-completion/src/render/literal.rs @@ -10,8 +10,8 @@ use crate::{ render::{ compute_ref_match, compute_type_match, variant::{ - format_literal_label, render_record_lit, render_tuple_lit, visible_fields, - RenderedLiteral, + format_literal_label, format_literal_lookup, render_record_lit, render_tuple_lit, + visible_fields, RenderedLiteral, }, RenderContext, }, @@ -97,13 +97,20 @@ fn render( if !should_add_parens { kind = StructKind::Unit; } + let label = format_literal_label(&qualified_name, kind); + let lookup = if qualified { + format_literal_lookup(&short_qualified_name.to_string(), kind) + } else { + format_literal_lookup(&qualified_name, kind) + }; let mut item = CompletionItem::new( CompletionItemKind::SymbolKind(thing.symbol_kind()), ctx.source_range(), - format_literal_label(&qualified_name, kind), + label, ); + item.lookup_by(lookup); item.detail(rendered.detail); match snippet_cap { @@ -111,9 +118,6 @@ fn render( None => item.insert_text(rendered.literal), }; - if qualified { - item.lookup_by(format_literal_label(&short_qualified_name.to_string(), kind)); - } item.set_documentation(thing.docs(db)).set_deprecated(thing.is_deprecated(&ctx)); let ty = thing.ty(db); diff --git a/crates/ide-completion/src/render/pattern.rs b/crates/ide-completion/src/render/pattern.rs index 1c1299e33b..c845ff21aa 100644 --- a/crates/ide-completion/src/render/pattern.rs +++ b/crates/ide-completion/src/render/pattern.rs @@ -8,7 +8,7 @@ use syntax::SmolStr; use crate::{ context::{ParamContext, ParamKind, PathCompletionCtx, PatternContext}, render::{ - variant::{format_literal_label, visible_fields}, + variant::{format_literal_label, format_literal_lookup, visible_fields}, RenderContext, }, CompletionItem, CompletionItemKind, @@ -34,9 +34,10 @@ pub(crate) fn render_struct_pat( let (name, escaped_name) = (name.unescaped().to_smol_str(), name.to_smol_str()); let kind = strukt.kind(ctx.db()); let label = format_literal_label(name.as_str(), kind); + let lookup = format_literal_lookup(name.as_str(), kind); let pat = render_pat(&ctx, pattern_ctx, &escaped_name, kind, &visible_fields, fields_omitted)?; - Some(build_completion(ctx, label, pat, strukt)) + Some(build_completion(ctx, label, lookup, pat, strukt)) } pub(crate) fn render_variant_pat( @@ -60,11 +61,14 @@ pub(crate) fn render_variant_pat( } }; - let (label, pat) = match path_ctx { - Some(PathCompletionCtx { has_call_parens: true, .. }) => (name, escaped_name.to_string()), + let (label, lookup, pat) = match path_ctx { + Some(PathCompletionCtx { has_call_parens: true, .. }) => { + (name.clone(), name, escaped_name.to_string()) + } _ => { let kind = variant.kind(ctx.db()); let label = format_literal_label(name.as_str(), kind); + let lookup = format_literal_lookup(name.as_str(), kind); let pat = render_pat( &ctx, pattern_ctx, @@ -73,16 +77,17 @@ pub(crate) fn render_variant_pat( &visible_fields, fields_omitted, )?; - (label, pat) + (label, lookup, pat) } }; - Some(build_completion(ctx, label, pat, variant)) + Some(build_completion(ctx, label, lookup, pat, variant)) } fn build_completion( ctx: RenderContext<'_>, label: SmolStr, + lookup: SmolStr, pat: String, def: impl HasAttrs + Copy, ) -> CompletionItem { @@ -90,6 +95,7 @@ fn build_completion( item.set_documentation(ctx.docs(def)) .set_deprecated(ctx.is_deprecated(def)) .detail(&pat) + .lookup_by(lookup) .set_relevance(ctx.completion_relevance()); match ctx.snippet_cap() { Some(snippet_cap) => item.insert_snippet(snippet_cap, pat), diff --git a/crates/ide-completion/src/render/union_literal.rs b/crates/ide-completion/src/render/union_literal.rs index bb32330f27..54e97dd57b 100644 --- a/crates/ide-completion/src/render/union_literal.rs +++ b/crates/ide-completion/src/render/union_literal.rs @@ -6,7 +6,7 @@ use itertools::Itertools; use crate::{ render::{ - variant::{format_literal_label, visible_fields}, + variant::{format_literal_label, format_literal_lookup, visible_fields}, RenderContext, }, CompletionItem, CompletionItemKind, @@ -24,13 +24,16 @@ pub(crate) fn render_union_literal( Some(p) => (p.unescaped().to_string(), p.to_string()), None => (name.unescaped().to_string(), name.to_string()), }; - + let label = format_literal_label(&name.to_smol_str(), StructKind::Record); + let lookup = format_literal_lookup(&name.to_smol_str(), StructKind::Record); let mut item = CompletionItem::new( CompletionItemKind::SymbolKind(SymbolKind::Union), ctx.source_range(), - format_literal_label(&name.to_smol_str(), StructKind::Record), + label, ); + item.lookup_by(lookup); + let fields = un.fields(ctx.db()); let (fields, fields_omitted) = visible_fields(ctx.completion, &fields, un)?; diff --git a/crates/ide-completion/src/render/variant.rs b/crates/ide-completion/src/render/variant.rs index 664845330e..24e6abdc9a 100644 --- a/crates/ide-completion/src/render/variant.rs +++ b/crates/ide-completion/src/render/variant.rs @@ -94,3 +94,12 @@ pub(crate) fn format_literal_label(name: &str, kind: StructKind) -> SmolStr { StructKind::Unit => name.into(), } } + +/// Format a struct, etc. literal option for lookup used in completions filtering. +pub(crate) fn format_literal_lookup(name: &str, kind: StructKind) -> SmolStr { + match kind { + StructKind::Tuple => SmolStr::from_iter([name, "()"]), + StructKind::Record => SmolStr::from_iter([name, "{}"]), + StructKind::Unit => name.into(), + } +} diff --git a/crates/ide-completion/src/tests/pattern.rs b/crates/ide-completion/src/tests/pattern.rs index 30ddbe2dc6..85c4dbd662 100644 --- a/crates/ide-completion/src/tests/pattern.rs +++ b/crates/ide-completion/src/tests/pattern.rs @@ -467,7 +467,7 @@ fn foo() { fn completes_enum_variant_pat() { cov_mark::check!(enum_variant_pattern_path); check_edit( - "RecordVariant {…}", + "RecordVariant{}", r#" enum Enum { RecordVariant { field: u32 } From 14db0809335f51e5f239682d63895866f4ebdb8c Mon Sep 17 00:00:00 2001 From: Aleksandr Pak Date: Tue, 16 Aug 2022 17:06:32 +0300 Subject: [PATCH 0136/1945] feat: add inline_type_alias_uses assist --- .../src/handlers/inline_type_alias.rs | 234 +++++++++++++++--- crates/ide-assists/src/lib.rs | 1 + crates/ide-assists/src/tests/generated.rs | 25 ++ 3 files changed, 227 insertions(+), 33 deletions(-) diff --git a/crates/ide-assists/src/handlers/inline_type_alias.rs b/crates/ide-assists/src/handlers/inline_type_alias.rs index 054663a06a..6b1cf9b90d 100644 --- a/crates/ide-assists/src/handlers/inline_type_alias.rs +++ b/crates/ide-assists/src/handlers/inline_type_alias.rs @@ -1,9 +1,9 @@ // Some ideas for future improvements: // - Support replacing aliases which are used in expressions, e.g. `A::new()`. -// - "inline_alias_to_users" assist #10881. // - Remove unused aliases if there are no longer any users, see inline_call.rs. use hir::{HasSource, PathResolution}; +use ide_db::{defs::Definition, search::FileReference}; use itertools::Itertools; use std::collections::HashMap; use syntax::{ @@ -16,6 +16,78 @@ use crate::{ AssistId, AssistKind, }; +// Assist: inline_type_alias_uses +// +// Inline a type alias into all of its uses where possible. +// +// ``` +// type $0A = i32; +// fn id(x: A) -> A { +// x +// }; +// fn foo() { +// let _: A = 3; +// } +// ``` +// -> +// ``` +// type A = i32; +// fn id(x: i32) -> i32 { +// x +// }; +// fn foo() { +// let _: i32 = 3; +// } +pub(crate) fn inline_type_alias_uses(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { + let name = ctx.find_node_at_offset::()?; + let ast_alias = name.syntax().parent().and_then(ast::TypeAlias::cast)?; + + let hir_alias = ctx.sema.to_def(&ast_alias)?; + let concrete_type = ast_alias.ty()?; + + let usages = Definition::TypeAlias(hir_alias).usages(&ctx.sema); + if !usages.at_least_one() { + return None; + } + + // until this is ok + + acc.add( + AssistId("inline_type_alias_uses", AssistKind::RefactorInline), + "Inline type alias into all uses", + name.syntax().text_range(), + |builder| { + let usages = usages.all(); + + let mut inline_refs_for_file = |file_id, refs: Vec| { + builder.edit_file(file_id); + + let path_types: Vec = refs + .into_iter() + .filter_map(|file_ref| match file_ref.name { + ast::NameLike::NameRef(path_type) => { + path_type.syntax().ancestors().find_map(ast::PathType::cast) + } + _ => None, + }) + .collect(); + + for (target, replacement) in path_types.into_iter().filter_map(|path_type| { + let replacement = inline(&ast_alias, &path_type)?.to_text(&concrete_type); + let target = path_type.syntax().text_range(); + Some((target, replacement)) + }) { + builder.replace(target, replacement); + } + }; + + for (file_id, refs) in usages.into_iter() { + inline_refs_for_file(file_id, refs); + } + }, + ) +} + // Assist: inline_type_alias // // Replace a type alias with its concrete type. @@ -36,11 +108,6 @@ use crate::{ // } // ``` pub(crate) fn inline_type_alias(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { - enum Replacement { - Generic { lifetime_map: LifetimeMap, const_and_type_map: ConstAndTypeMap }, - Plain, - } - let alias_instance = ctx.find_node_at_offset::()?; let concrete_type; let replacement; @@ -59,23 +126,7 @@ pub(crate) fn inline_type_alias(acc: &mut Assists, ctx: &AssistContext<'_>) -> O _ => { let alias = get_type_alias(&ctx, &alias_instance)?; concrete_type = alias.ty()?; - - replacement = if let Some(alias_generics) = alias.generic_param_list() { - if alias_generics.generic_params().next().is_none() { - cov_mark::hit!(no_generics_params); - return None; - } - - let instance_args = - alias_instance.syntax().descendants().find_map(ast::GenericArgList::cast); - - Replacement::Generic { - lifetime_map: LifetimeMap::new(&instance_args, &alias_generics)?, - const_and_type_map: ConstAndTypeMap::new(&instance_args, &alias_generics)?, - } - } else { - Replacement::Plain - }; + replacement = inline(&alias, &alias_instance)?; } } @@ -85,19 +136,45 @@ pub(crate) fn inline_type_alias(acc: &mut Assists, ctx: &AssistContext<'_>) -> O AssistId("inline_type_alias", AssistKind::RefactorInline), "Inline type alias", target, - |builder| { - let replacement_text = match replacement { - Replacement::Generic { lifetime_map, const_and_type_map } => { - create_replacement(&lifetime_map, &const_and_type_map, &concrete_type) - } - Replacement::Plain => concrete_type.to_string(), - }; - - builder.replace(target, replacement_text); - }, + |builder| builder.replace(target, replacement.to_text(&concrete_type)), ) } +impl Replacement { + fn to_text(&self, concrete_type: &ast::Type) -> String { + match self { + Replacement::Generic { lifetime_map, const_and_type_map } => { + create_replacement(&lifetime_map, &const_and_type_map, &concrete_type) + } + Replacement::Plain => concrete_type.to_string(), + } + } +} + +enum Replacement { + Generic { lifetime_map: LifetimeMap, const_and_type_map: ConstAndTypeMap }, + Plain, +} + +fn inline(alias_def: &ast::TypeAlias, alias_instance: &ast::PathType) -> Option { + let repl = if let Some(alias_generics) = alias_def.generic_param_list() { + if alias_generics.generic_params().next().is_none() { + cov_mark::hit!(no_generics_params); + return None; + } + let instance_args = + alias_instance.syntax().descendants().find_map(ast::GenericArgList::cast); + + Replacement::Generic { + lifetime_map: LifetimeMap::new(&instance_args, &alias_generics)?, + const_and_type_map: ConstAndTypeMap::new(&instance_args, &alias_generics)?, + } + } else { + Replacement::Plain + }; + Some(repl) +} + struct LifetimeMap(HashMap); impl LifetimeMap { @@ -835,4 +912,95 @@ trait Tr { "#, ); } + + mod inline_type_alias_uses { + use crate::{handlers::inline_type_alias::inline_type_alias_uses, tests::check_assist}; + + #[test] + fn inline_uses() { + check_assist( + inline_type_alias_uses, + r#" +type $0A = u32; + +fn foo() { + let _: A = 3; + let _: A = 4; +} +"#, + r#" +type A = u32; + +fn foo() { + let _: u32 = 3; + let _: u32 = 4; +} +"#, + ); + } + + #[test] + fn inline_uses_across_files() { + check_assist( + inline_type_alias_uses, + r#" +//- /lib.rs +mod foo; +type $0T = Vec; +fn f() -> T<&str> { + vec!["hello"] +} + +//- /foo.rs +use super::T; +fn foo() { + let _: T = Vec::new(); +} +"#, + r#" +//- /lib.rs +mod foo; +type T = Vec; +fn f() -> Vec<&str> { + vec!["hello"] +} + +//- /foo.rs +use super::T; +fn foo() { + let _: Vec = Vec::new(); +} +"#, + ); + } + + #[test] + fn inline_uses_across_files_fails() { + check_assist( + inline_type_alias_uses, + r#" +//- /lib.rs +mod foo; +type $0I = i32; + +//- /foo.rs +use super::I; +fn foo() { + let _: I = 0; +} +"#, + r#" +//- /lib.rs +mod foo; +type I = i32; + +//- /foo.rs +use super::I; +fn foo() { + let _: i32 = 0; +} +"#, + ); + } + } } diff --git a/crates/ide-assists/src/lib.rs b/crates/ide-assists/src/lib.rs index fe87aa15fc..7fb35143fa 100644 --- a/crates/ide-assists/src/lib.rs +++ b/crates/ide-assists/src/lib.rs @@ -243,6 +243,7 @@ mod handlers { inline_call::inline_into_callers, inline_local_variable::inline_local_variable, inline_type_alias::inline_type_alias, + inline_type_alias::inline_type_alias_uses, introduce_named_generic::introduce_named_generic, introduce_named_lifetime::introduce_named_lifetime, invert_if::invert_if, diff --git a/crates/ide-assists/src/tests/generated.rs b/crates/ide-assists/src/tests/generated.rs index 6eaab48a32..22319f3613 100644 --- a/crates/ide-assists/src/tests/generated.rs +++ b/crates/ide-assists/src/tests/generated.rs @@ -1356,6 +1356,31 @@ fn main() { ) } +#[test] +fn doctest_inline_type_alias_uses() { + check_doc_test( + "inline_type_alias_uses", + r#####" +type $0A = i32; +fn id(x: A) -> A { + x +}; +fn foo() { + let _: A = 3; +} +"#####, + r#####" +type A = i32; +fn id(x: i32) -> i32 { + x +}; +fn foo() { + let _: i32 = 3; +} +"#####, + ) +} + #[test] fn doctest_introduce_named_generic() { check_doc_test( From 0616cee92b47b539a802171f7e6817663175502d Mon Sep 17 00:00:00 2001 From: Jonas Schievink Date: Tue, 16 Aug 2022 16:51:40 +0200 Subject: [PATCH 0137/1945] Add a setting for keyword hover popups --- crates/ide/src/hover.rs | 1 + crates/ide/src/hover/render.rs | 2 +- crates/ide/src/hover/tests.rs | 42 +++++++++++++++++++++++++----- crates/ide/src/static_index.rs | 7 +++-- crates/rust-analyzer/src/config.rs | 4 +++ docs/user/generated_config.adoc | 6 +++++ editors/code/package.json | 5 ++++ 7 files changed, 57 insertions(+), 10 deletions(-) diff --git a/crates/ide/src/hover.rs b/crates/ide/src/hover.rs index fa2c6f09ab..3687b597fc 100644 --- a/crates/ide/src/hover.rs +++ b/crates/ide/src/hover.rs @@ -27,6 +27,7 @@ use crate::{ pub struct HoverConfig { pub links_in_hover: bool, pub documentation: Option, + pub keywords: bool, } impl HoverConfig { diff --git a/crates/ide/src/hover/render.rs b/crates/ide/src/hover/render.rs index 6c50a4e6ad..d52adaee53 100644 --- a/crates/ide/src/hover/render.rs +++ b/crates/ide/src/hover/render.rs @@ -230,7 +230,7 @@ pub(super) fn keyword( config: &HoverConfig, token: &SyntaxToken, ) -> Option { - if !token.kind().is_keyword() || !config.documentation.is_some() { + if !token.kind().is_keyword() || !config.documentation.is_some() || !config.keywords { return None; } let parent = token.parent()?; diff --git a/crates/ide/src/hover/tests.rs b/crates/ide/src/hover/tests.rs index c6274264b8..685eb4521e 100644 --- a/crates/ide/src/hover/tests.rs +++ b/crates/ide/src/hover/tests.rs @@ -8,7 +8,11 @@ fn check_hover_no_result(ra_fixture: &str) { let (analysis, position) = fixture::position(ra_fixture); let hover = analysis .hover( - &HoverConfig { links_in_hover: true, documentation: Some(HoverDocFormat::Markdown) }, + &HoverConfig { + links_in_hover: true, + documentation: Some(HoverDocFormat::Markdown), + keywords: true, + }, FileRange { file_id: position.file_id, range: TextRange::empty(position.offset) }, ) .unwrap(); @@ -20,7 +24,11 @@ fn check(ra_fixture: &str, expect: Expect) { let (analysis, position) = fixture::position(ra_fixture); let hover = analysis .hover( - &HoverConfig { links_in_hover: true, documentation: Some(HoverDocFormat::Markdown) }, + &HoverConfig { + links_in_hover: true, + documentation: Some(HoverDocFormat::Markdown), + keywords: true, + }, FileRange { file_id: position.file_id, range: TextRange::empty(position.offset) }, ) .unwrap() @@ -37,7 +45,11 @@ fn check_hover_no_links(ra_fixture: &str, expect: Expect) { let (analysis, position) = fixture::position(ra_fixture); let hover = analysis .hover( - &HoverConfig { links_in_hover: false, documentation: Some(HoverDocFormat::Markdown) }, + &HoverConfig { + links_in_hover: false, + documentation: Some(HoverDocFormat::Markdown), + keywords: true, + }, FileRange { file_id: position.file_id, range: TextRange::empty(position.offset) }, ) .unwrap() @@ -54,7 +66,11 @@ fn check_hover_no_markdown(ra_fixture: &str, expect: Expect) { let (analysis, position) = fixture::position(ra_fixture); let hover = analysis .hover( - &HoverConfig { links_in_hover: true, documentation: Some(HoverDocFormat::PlainText) }, + &HoverConfig { + links_in_hover: true, + documentation: Some(HoverDocFormat::PlainText), + keywords: true, + }, FileRange { file_id: position.file_id, range: TextRange::empty(position.offset) }, ) .unwrap() @@ -71,7 +87,11 @@ fn check_actions(ra_fixture: &str, expect: Expect) { let (analysis, file_id, position) = fixture::range_or_position(ra_fixture); let hover = analysis .hover( - &HoverConfig { links_in_hover: true, documentation: Some(HoverDocFormat::Markdown) }, + &HoverConfig { + links_in_hover: true, + documentation: Some(HoverDocFormat::Markdown), + keywords: true, + }, FileRange { file_id, range: position.range_or_empty() }, ) .unwrap() @@ -83,7 +103,11 @@ fn check_hover_range(ra_fixture: &str, expect: Expect) { let (analysis, range) = fixture::range(ra_fixture); let hover = analysis .hover( - &HoverConfig { links_in_hover: false, documentation: Some(HoverDocFormat::Markdown) }, + &HoverConfig { + links_in_hover: false, + documentation: Some(HoverDocFormat::Markdown), + keywords: true, + }, range, ) .unwrap() @@ -95,7 +119,11 @@ fn check_hover_range_no_results(ra_fixture: &str) { let (analysis, range) = fixture::range(ra_fixture); let hover = analysis .hover( - &HoverConfig { links_in_hover: false, documentation: Some(HoverDocFormat::Markdown) }, + &HoverConfig { + links_in_hover: false, + documentation: Some(HoverDocFormat::Markdown), + keywords: true, + }, range, ) .unwrap(); diff --git a/crates/ide/src/static_index.rs b/crates/ide/src/static_index.rs index cc79ee55b7..9e5eb90950 100644 --- a/crates/ide/src/static_index.rs +++ b/crates/ide/src/static_index.rs @@ -130,8 +130,11 @@ impl StaticIndex<'_> { syntax::NodeOrToken::Node(_) => None, syntax::NodeOrToken::Token(x) => Some(x), }); - let hover_config = - HoverConfig { links_in_hover: true, documentation: Some(HoverDocFormat::Markdown) }; + let hover_config = HoverConfig { + links_in_hover: true, + documentation: Some(HoverDocFormat::Markdown), + keywords: true, + }; let tokens = tokens.filter(|token| { matches!( token.kind(), diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index 1629c1dd32..f2fae58602 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs @@ -244,6 +244,9 @@ config_data! { /// Whether to show documentation on hover. hover_documentation_enable: bool = "true", + /// Whether to show keyword hover popups. Only applies when + /// `#rust-analyzer.hover.documentation.enable#` is set. + hover_documentation_keywords: bool = "true", /// Use markdown syntax for links in hover. hover_links_enable: bool = "true", @@ -1187,6 +1190,7 @@ impl Config { HoverDocFormat::PlainText } }), + keywords: self.data.hover_documentation_keywords, } } diff --git a/docs/user/generated_config.adoc b/docs/user/generated_config.adoc index b0f2f1614d..00a6e3c43c 100644 --- a/docs/user/generated_config.adoc +++ b/docs/user/generated_config.adoc @@ -318,6 +318,12 @@ Whether to show `Run` action. Only applies when -- Whether to show documentation on hover. -- +[[rust-analyzer.hover.documentation.keywords]]rust-analyzer.hover.documentation.keywords (default: `true`):: ++ +-- +Whether to show keyword hover popups. Only applies when +`#rust-analyzer.hover.documentation.enable#` is set. +-- [[rust-analyzer.hover.links.enable]]rust-analyzer.hover.links.enable (default: `true`):: + -- diff --git a/editors/code/package.json b/editors/code/package.json index fbdc69c801..6352e96811 100644 --- a/editors/code/package.json +++ b/editors/code/package.json @@ -756,6 +756,11 @@ "default": true, "type": "boolean" }, + "rust-analyzer.hover.documentation.keywords": { + "markdownDescription": "Whether to show keyword hover popups. Only applies when\n`#rust-analyzer.hover.documentation.enable#` is set.", + "default": true, + "type": "boolean" + }, "rust-analyzer.hover.links.enable": { "markdownDescription": "Use markdown syntax for links in hover.", "default": true, From 6d6356b103225522166496975f98b15d9827b6c8 Mon Sep 17 00:00:00 2001 From: Aleksandr Pak Date: Tue, 16 Aug 2022 18:29:22 +0300 Subject: [PATCH 0138/1945] fixup! feat: add inline_type_alias_uses assist --- crates/ide-assists/src/handlers/inline_type_alias.rs | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/crates/ide-assists/src/handlers/inline_type_alias.rs b/crates/ide-assists/src/handlers/inline_type_alias.rs index 6b1cf9b90d..c4e5bc9c7c 100644 --- a/crates/ide-assists/src/handlers/inline_type_alias.rs +++ b/crates/ide-assists/src/handlers/inline_type_alias.rs @@ -975,7 +975,7 @@ fn foo() { } #[test] - fn inline_uses_across_files_fails() { + fn inline_uses_across_files_2() { check_assist( inline_type_alias_uses, r#" @@ -990,11 +990,6 @@ fn foo() { } "#, r#" -//- /lib.rs -mod foo; -type I = i32; - -//- /foo.rs use super::I; fn foo() { let _: i32 = 0; From ad7a1ed8cc9ba44c6a294317a2289a0b6fd75219 Mon Sep 17 00:00:00 2001 From: Dominik Gschwind Date: Tue, 16 Aug 2022 17:30:17 +0200 Subject: [PATCH 0139/1945] fix: Fix panics on GATs involving const generics This workaround avoids constant crashing of rust analyzer when using GATs with const generics, even when the const generics are only on the `impl` block. The workaround treats GATs as non-existing if either itself or the parent has const generics and removes relevant panicking code-paths. --- crates/hir-ty/src/lower.rs | 9 ++++++++- crates/hir-ty/src/tests/regression.rs | 21 +++++++++++++++++++++ crates/hir-ty/src/utils.rs | 18 ++++++++++-------- 3 files changed, 39 insertions(+), 9 deletions(-) diff --git a/crates/hir-ty/src/lower.rs b/crates/hir-ty/src/lower.rs index 239f66bcb7..3ec321ad37 100644 --- a/crates/hir-ty/src/lower.rs +++ b/crates/hir-ty/src/lower.rs @@ -509,7 +509,14 @@ impl<'a> TyLoweringContext<'a> { TyKind::Placeholder(to_placeholder_idx(self.db, param_id.into())) } ParamLoweringMode::Variable => { - let idx = generics.param_idx(param_id.into()).expect("matching generics"); + let idx = match generics.param_idx(param_id.into()) { + None => { + never!("no matching generics"); + return (TyKind::Error.intern(Interner), None); + } + Some(idx) => idx, + }; + TyKind::BoundVar(BoundVar::new(self.in_binders, idx)) } } diff --git a/crates/hir-ty/src/tests/regression.rs b/crates/hir-ty/src/tests/regression.rs index 93a88ab58e..9b64fccccd 100644 --- a/crates/hir-ty/src/tests/regression.rs +++ b/crates/hir-ty/src/tests/regression.rs @@ -1526,6 +1526,27 @@ unsafe impl Storage for InlineStorage { ); } +#[test] +fn gat_crash_3() { + cov_mark::check!(ignore_gats); + check_no_mismatches( + r#" +trait Collection { + type Item; + type Member: Collection; + fn add(&mut self, value: Self::Item) -> Result<(), Self::Error>; +} +struct ConstGen { + data: [T; N], +} +impl Collection for ConstGen { + type Item = T; + type Member = ConstGen; +} + "#, + ); +} + #[test] fn cfgd_out_self_param() { cov_mark::check!(cfgd_out_self_param); diff --git a/crates/hir-ty/src/utils.rs b/crates/hir-ty/src/utils.rs index 83319755da..bdb9ade9c8 100644 --- a/crates/hir-ty/src/utils.rs +++ b/crates/hir-ty/src/utils.rs @@ -176,10 +176,16 @@ pub(crate) fn generics(db: &dyn DefDatabase, def: GenericDefId) -> Generics { let parent_generics = parent_generic_def(db, def).map(|def| Box::new(generics(db, def))); if parent_generics.is_some() && matches!(def, GenericDefId::TypeAliasId(_)) { let params = db.generic_params(def); + let parent_params = &parent_generics.as_ref().unwrap().params; let has_consts = params.iter().any(|(_, x)| matches!(x, TypeOrConstParamData::ConstParamData(_))); - return if has_consts { - // XXX: treat const generic associated types as not existing to avoid crashes (#11769) + let parent_has_consts = + parent_params.iter().any(|(_, x)| matches!(x, TypeOrConstParamData::ConstParamData(_))); + return if has_consts || parent_has_consts { + // XXX: treat const generic associated types as not existing to avoid crashes + // (#11769, #12193) + // Note: also crashes when the parent has const generics (also even if the GAT + // doesn't use them), see `tests::regression::gat_crash_3` for an example. // // Chalk expects the inner associated type's parameters to come // *before*, not after the trait's generics as we've always done it. @@ -264,12 +270,8 @@ impl Generics { fn find_param(&self, param: TypeOrConstParamId) -> Option<(usize, &TypeOrConstParamData)> { if param.parent == self.def { - let (idx, (_local_id, data)) = self - .params - .iter() - .enumerate() - .find(|(_, (idx, _))| *idx == param.local_id) - .unwrap(); + let (idx, (_local_id, data)) = + self.params.iter().enumerate().find(|(_, (idx, _))| *idx == param.local_id)?; let parent_len = self.parent_generics().map_or(0, Generics::len); Some((parent_len + idx, data)) } else { From 7fb7c248c7b37699da7eb5944f45a9b59121b827 Mon Sep 17 00:00:00 2001 From: Jonas Schievink Date: Tue, 16 Aug 2022 18:12:15 +0200 Subject: [PATCH 0140/1945] Add `.enable` suffix --- crates/rust-analyzer/src/config.rs | 6 +++--- docs/user/generated_config.adoc | 2 +- editors/code/package.json | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index f2fae58602..e63cfb163c 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs @@ -243,10 +243,10 @@ config_data! { hover_actions_run_enable: bool = "true", /// Whether to show documentation on hover. - hover_documentation_enable: bool = "true", + hover_documentation_enable: bool = "true", /// Whether to show keyword hover popups. Only applies when /// `#rust-analyzer.hover.documentation.enable#` is set. - hover_documentation_keywords: bool = "true", + hover_documentation_keywords_enable: bool = "true", /// Use markdown syntax for links in hover. hover_links_enable: bool = "true", @@ -1190,7 +1190,7 @@ impl Config { HoverDocFormat::PlainText } }), - keywords: self.data.hover_documentation_keywords, + keywords: self.data.hover_documentation_keywords_enable, } } diff --git a/docs/user/generated_config.adoc b/docs/user/generated_config.adoc index 00a6e3c43c..e3dbeec1fb 100644 --- a/docs/user/generated_config.adoc +++ b/docs/user/generated_config.adoc @@ -318,7 +318,7 @@ Whether to show `Run` action. Only applies when -- Whether to show documentation on hover. -- -[[rust-analyzer.hover.documentation.keywords]]rust-analyzer.hover.documentation.keywords (default: `true`):: +[[rust-analyzer.hover.documentation.keywords.enable]]rust-analyzer.hover.documentation.keywords.enable (default: `true`):: + -- Whether to show keyword hover popups. Only applies when diff --git a/editors/code/package.json b/editors/code/package.json index 6352e96811..0714b356fe 100644 --- a/editors/code/package.json +++ b/editors/code/package.json @@ -756,7 +756,7 @@ "default": true, "type": "boolean" }, - "rust-analyzer.hover.documentation.keywords": { + "rust-analyzer.hover.documentation.keywords.enable": { "markdownDescription": "Whether to show keyword hover popups. Only applies when\n`#rust-analyzer.hover.documentation.enable#` is set.", "default": true, "type": "boolean" From 1f73cbe8390d4a8ebe4e4039891567d8358c2c70 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 16 Aug 2022 19:13:10 +0200 Subject: [PATCH 0141/1945] Revert #12947, trigger workspace switches on all structure changes again --- crates/rust-analyzer/src/global_state.rs | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/crates/rust-analyzer/src/global_state.rs b/crates/rust-analyzer/src/global_state.rs index b5f6aef2e1..c55bbbbe6e 100644 --- a/crates/rust-analyzer/src/global_state.rs +++ b/crates/rust-analyzer/src/global_state.rs @@ -8,7 +8,7 @@ use std::{sync::Arc, time::Instant}; use crossbeam_channel::{unbounded, Receiver, Sender}; use flycheck::FlycheckHandle; use ide::{Analysis, AnalysisHost, Cancellable, Change, FileId}; -use ide_db::base_db::{CrateId, FileLoader, SourceDatabase, SourceDatabaseExt}; +use ide_db::base_db::{CrateId, FileLoader, SourceDatabase}; use lsp_types::{SemanticTokens, Url}; use parking_lot::{Mutex, RwLock}; use proc_macro_api::ProcMacroServer; @@ -176,9 +176,9 @@ impl GlobalState { pub(crate) fn process_changes(&mut self) -> bool { let _p = profile::span("GlobalState::process_changes"); - let mut fs_refresh_changes = Vec::new(); // A file was added or deleted let mut has_structure_changes = false; + let mut workspace_structure_change = None; let (change, changed_files) = { let mut change = Change::new(); @@ -192,7 +192,7 @@ impl GlobalState { if let Some(path) = vfs.file_path(file.file_id).as_path() { let path = path.to_path_buf(); if reload::should_refresh_for_change(&path, file.change_kind) { - fs_refresh_changes.push((path, file.file_id)); + workspace_structure_change = Some(path); } if file.is_created_or_deleted() { has_structure_changes = true; @@ -227,11 +227,10 @@ impl GlobalState { { let raw_database = self.analysis_host.raw_database(); - let workspace_structure_change = - fs_refresh_changes.into_iter().find(|&(_, file_id)| { - !raw_database.source_root(raw_database.file_source_root(file_id)).is_library - }); - if let Some((path, _)) = workspace_structure_change { + // FIXME: ideally we should only trigger a workspace fetch for non-library changes + // but somethings going wrong with the source root business when we add a new local + // crate see https://github.com/rust-lang/rust-analyzer/issues/13029 + if let Some(path) = workspace_structure_change { self.fetch_workspaces_queue .request_op(format!("workspace vfs file change: {}", path.display())); } From a0b257c9d91d00c054376facc6681f563f1a5e1b Mon Sep 17 00:00:00 2001 From: David Barsky Date: Tue, 16 Aug 2022 13:38:50 -0400 Subject: [PATCH 0142/1945] chore: remove unused `currentExtensionIsNightly()` in config.ts --- editors/code/src/config.ts | 6 ------ 1 file changed, 6 deletions(-) diff --git a/editors/code/src/config.ts b/editors/code/src/config.ts index 1c58040d58..b83582a344 100644 --- a/editors/code/src/config.ts +++ b/editors/code/src/config.ts @@ -5,8 +5,6 @@ import { log } from "./util"; export type UpdatesChannel = "stable" | "nightly"; -const NIGHTLY_TAG = "nightly"; - export type RunnableEnvCfg = | undefined | Record @@ -175,10 +173,6 @@ export class Config { gotoTypeDef: this.get("hover.actions.gotoTypeDef.enable"), }; } - - get currentExtensionIsNightly() { - return this.package.releaseTag === NIGHTLY_TAG; - } } export async function updateConfig(config: vscode.WorkspaceConfiguration) { From 39d17efde7d2da436f349c1b0ca55c37bae0254f Mon Sep 17 00:00:00 2001 From: Dorian Scheidt Date: Sat, 13 Aug 2022 13:29:41 -0500 Subject: [PATCH 0143/1945] feat: Generate static method using Self::assoc() syntax This change improves the `generate_function` assist to support generating static methods/associated functions using the `Self::assoc()` syntax. Previously, one could generate a static method, but only when specifying the type name directly (like `Foo::assoc()`). After this change, `Self` is supported as well as the type name. Fixes #13012 --- .../src/handlers/generate_function.rs | 64 +++++++++++++++---- 1 file changed, 53 insertions(+), 11 deletions(-) diff --git a/crates/ide-assists/src/handlers/generate_function.rs b/crates/ide-assists/src/handlers/generate_function.rs index d564a05408..49345cb983 100644 --- a/crates/ide-assists/src/handlers/generate_function.rs +++ b/crates/ide-assists/src/handlers/generate_function.rs @@ -61,7 +61,7 @@ fn gen_fn(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { } let fn_name = &*name_ref.text(); - let target_module; + let mut target_module = None; let mut adt_name = None; let (target, file, insert_offset) = match path.qualifier() { @@ -78,16 +78,11 @@ fn gen_fn(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { } } - let current_module = ctx.sema.scope(call.syntax())?.module(); - let module = adt.module(ctx.sema.db); - target_module = if current_module == module { None } else { Some(module) }; - if current_module.krate() != module.krate() { - return None; - } - let (impl_, file) = get_adt_source(ctx, &adt, fn_name)?; - let (target, insert_offset) = get_method_target(ctx, &module, &impl_)?; - adt_name = if impl_.is_none() { Some(adt.name(ctx.sema.db)) } else { None }; - (target, file, insert_offset) + static_method_target(ctx, &call, adt, &mut target_module, fn_name, &mut adt_name)? + } + Some(hir::PathResolution::SelfType(impl_)) => { + let adt = impl_.self_ty(ctx.db()).as_adt()?; + static_method_target(ctx, &call, adt, &mut target_module, fn_name, &mut adt_name)? } _ => { return None; @@ -399,6 +394,26 @@ fn get_method_target( Some((target.clone(), get_insert_offset(&target))) } +fn static_method_target( + ctx: &AssistContext<'_>, + call: &CallExpr, + adt: hir::Adt, + target_module: &mut Option, + fn_name: &str, + adt_name: &mut Option, +) -> Option<(GeneratedFunctionTarget, FileId, TextSize)> { + let current_module = ctx.sema.scope(call.syntax())?.module(); + let module = adt.module(ctx.sema.db); + *target_module = if current_module == module { None } else { Some(module) }; + if current_module.krate() != module.krate() { + return None; + } + let (impl_, file) = get_adt_source(ctx, &adt, fn_name)?; + let (target, insert_offset) = get_method_target(ctx, &module, &impl_)?; + *adt_name = if impl_.is_none() { Some(adt.name(ctx.sema.db)) } else { None }; + Some((target, file, insert_offset)) +} + fn get_insert_offset(target: &GeneratedFunctionTarget) -> TextSize { match &target { GeneratedFunctionTarget::BehindItem(it) => it.text_range().end(), @@ -1633,6 +1648,33 @@ fn bar() ${0:-> _} { ) } + #[test] + fn create_static_method_within_an_impl_with_self_syntax() { + check_assist( + generate_function, + r" +struct S; +impl S { + fn foo(&self) { + Self::bar$0(); + } +} +", + r" +struct S; +impl S { + fn foo(&self) { + Self::bar(); + } + + fn bar() ${0:-> _} { + todo!() + } +} +", + ) + } + #[test] fn no_panic_on_invalid_global_path() { check_assist( From cebf95718c32b1024a6845992bee96c50f830f3a Mon Sep 17 00:00:00 2001 From: Justin Ridgewell Date: Tue, 16 Aug 2022 17:53:10 -0400 Subject: [PATCH 0144/1945] Find IntoFuture::IntoFuture's poll method --- crates/hir/src/lib.rs | 5 +-- crates/hir/src/source_analyzer.rs | 34 ++++++++++++++++---- crates/ide-completion/src/completions/dot.rs | 2 +- crates/ide/src/goto_definition.rs | 34 ++++++++++++++++++++ 4 files changed, 65 insertions(+), 10 deletions(-) diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 7f16634afe..46d24b9f14 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -2777,9 +2777,10 @@ impl Type { self.ty.is_unknown() } - /// Checks that particular type `ty` implements `std::future::Future`. + /// Checks that particular type `ty` implements `std::future::IntoFuture` or + /// `std::future::Future`. /// This function is used in `.await` syntax completion. - pub fn impls_future(&self, db: &dyn HirDatabase) -> bool { + pub fn impls_into_future(&self, db: &dyn HirDatabase) -> bool { let trait_ = db .lang_item(self.env.krate, SmolStr::new_inline("into_future")) .and_then(|it| { diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs index 756772cf84..9418afa91d 100644 --- a/crates/hir/src/source_analyzer.rs +++ b/crates/hir/src/source_analyzer.rs @@ -27,6 +27,7 @@ use hir_def::{ use hir_expand::{ builtin_fn_macro::BuiltinFnLikeExpander, hygiene::Hygiene, + mod_path::path, name, name::{AsName, Name}, HirFileId, InFile, @@ -269,16 +270,35 @@ impl SourceAnalyzer { db: &dyn HirDatabase, await_expr: &ast::AwaitExpr, ) -> Option { - // FIXME This should be pointing to the poll of IntoFuture::Output's Future impl, but I - // don't know how to resolve the Output type so that we can query for its poll method. - let ty = self.ty_of_expr(db, &await_expr.expr()?.into())?; + let mut ty = self.ty_of_expr(db, &await_expr.expr()?.into())?.clone(); - let op_fn = db + let into_future_trait = self + .resolver + .resolve_known_trait(db.upcast(), &path![core::future::IntoFuture]) + .map(Trait::from); + + if let Some(into_future_trait) = into_future_trait { + let type_ = Type::new_with_resolver(db, &self.resolver, ty.clone()); + if type_.impls_trait(db, into_future_trait, &[]) { + let items = into_future_trait.items(db); + let into_future_type = items.into_iter().find_map(|item| match item { + AssocItem::TypeAlias(alias) + if alias.name(db) == hir_expand::name![IntoFuture] => + { + Some(alias) + } + _ => None, + })?; + let future_trait = type_.normalize_trait_assoc_type(db, &[], into_future_type)?; + ty = future_trait.ty; + } + } + + let poll_fn = db .lang_item(self.resolver.krate(), hir_expand::name![poll].to_smol_str())? .as_function()?; - let substs = hir_ty::TyBuilder::subst_for_def(db, op_fn).push(ty.clone()).build(); - - Some(self.resolve_impl_method_or_trait_def(db, op_fn, &substs)) + let substs = hir_ty::TyBuilder::subst_for_def(db, poll_fn).push(ty.clone()).build(); + Some(self.resolve_impl_method_or_trait_def(db, poll_fn, &substs)) } pub(crate) fn resolve_prefix_expr( diff --git a/crates/ide-completion/src/completions/dot.rs b/crates/ide-completion/src/completions/dot.rs index cf40ca489c..02004ff7b6 100644 --- a/crates/ide-completion/src/completions/dot.rs +++ b/crates/ide-completion/src/completions/dot.rs @@ -19,7 +19,7 @@ pub(crate) fn complete_dot( }; // Suggest .await syntax for types that implement Future trait - if receiver_ty.impls_future(ctx.db) { + if receiver_ty.impls_into_future(ctx.db) { let mut item = CompletionItem::new(CompletionItemKind::Keyword, ctx.source_range(), "await"); item.detail("expr.await"); diff --git a/crates/ide/src/goto_definition.rs b/crates/ide/src/goto_definition.rs index b2123b9a87..c7a922e6cc 100644 --- a/crates/ide/src/goto_definition.rs +++ b/crates/ide/src/goto_definition.rs @@ -1664,6 +1664,40 @@ fn f() { ); } + #[test] + fn goto_await_into_future_poll() { + check( + r#" +//- minicore: future + +struct Futurable; + +impl core::future::IntoFuture for Futurable { + type IntoFuture = MyFut; +} + +struct MyFut; + +impl core::future::Future for MyFut { + type Output = (); + + fn poll( + //^^^^ + self: std::pin::Pin<&mut Self>, + cx: &mut std::task::Context<'_> + ) -> std::task::Poll + { + () + } +} + +fn f() { + Futurable.await$0; +} +"#, + ); + } + #[test] fn goto_try_op() { check( From 557c5b4dc57cb0fd5455bec35f2a97a4626e3df1 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Wed, 17 Aug 2022 09:32:25 +0200 Subject: [PATCH 0145/1945] minor: Change tracing event level in apply_change --- crates/ide-db/src/apply_change.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/ide-db/src/apply_change.rs b/crates/ide-db/src/apply_change.rs index f8134c552f..b1ee9b58d5 100644 --- a/crates/ide-db/src/apply_change.rs +++ b/crates/ide-db/src/apply_change.rs @@ -20,7 +20,7 @@ impl RootDatabase { pub fn apply_change(&mut self, change: Change) { let _p = profile::span("RootDatabase::apply_change"); self.request_cancellation(); - tracing::info!("apply_change {:?}", change); + tracing::trace!("apply_change {:?}", change); if let Some(roots) = &change.roots { let mut local_roots = FxHashSet::default(); let mut library_roots = FxHashSet::default(); From a3409c3a83fb1d8139bac8b3e438f56f2ac633a3 Mon Sep 17 00:00:00 2001 From: Ryo Yoshida Date: Tue, 16 Aug 2022 10:08:45 +0900 Subject: [PATCH 0146/1945] fix: escape keywords used as names in earlier editions --- crates/hir-expand/src/name.rs | 10 ++++++++-- crates/ide-db/src/search.rs | 4 +++- 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/crates/hir-expand/src/name.rs b/crates/hir-expand/src/name.rs index 87c663eec8..a1bb353374 100644 --- a/crates/hir-expand/src/name.rs +++ b/crates/hir-expand/src/name.rs @@ -90,10 +90,16 @@ impl Name { /// Resolve a name from the text of token. fn resolve(raw_text: &str) -> Name { - // When `raw_text` starts with "r#" but the name does not coincide with any - // keyword, we never need the prefix so we strip it. match raw_text.strip_prefix("r#") { + // When `raw_text` starts with "r#" but the name does not coincide with any + // keyword, we never need the prefix so we strip it. Some(text) if !is_raw_identifier(text) => Name::new_text(SmolStr::new(text)), + // Keywords (in the current edition) *can* be used as a name in earlier editions of + // Rust, e.g. "try" in Rust 2015. Even in such cases, we keep track of them in their + // escaped form. + None if is_raw_identifier(raw_text) => { + Name::new_text(SmolStr::from_iter(["r#", raw_text])) + } _ => Name::new_text(raw_text.into()), } } diff --git a/crates/ide-db/src/search.rs b/crates/ide-db/src/search.rs index 9eaabeec7a..2f4aa11317 100644 --- a/crates/ide-db/src/search.rs +++ b/crates/ide-db/src/search.rs @@ -402,7 +402,9 @@ impl<'a> FindUsages<'a> { .or_else(|| ty.as_builtin().map(|builtin| builtin.name())) }) }; - self.def.name(sema.db).or_else(self_kw_refs).map(|it| it.to_smol_str()) + // We need to unescape the name in case it is written without "r#" in earlier + // editions of Rust where it isn't a keyword. + self.def.name(sema.db).or_else(self_kw_refs).map(|it| it.unescaped().to_smol_str()) } }; let name = match &name { From 313b004ef76d3352f36e5128b16ca5212de97d49 Mon Sep 17 00:00:00 2001 From: Aleksandr Pak Date: Wed, 17 Aug 2022 12:50:33 +0300 Subject: [PATCH 0147/1945] fixup! feat: add inline_type_alias_uses assist --- crates/ide-assists/src/handlers/inline_type_alias.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/ide-assists/src/handlers/inline_type_alias.rs b/crates/ide-assists/src/handlers/inline_type_alias.rs index c4e5bc9c7c..9adf6381c1 100644 --- a/crates/ide-assists/src/handlers/inline_type_alias.rs +++ b/crates/ide-assists/src/handlers/inline_type_alias.rs @@ -66,7 +66,7 @@ pub(crate) fn inline_type_alias_uses(acc: &mut Assists, ctx: &AssistContext<'_>) .into_iter() .filter_map(|file_ref| match file_ref.name { ast::NameLike::NameRef(path_type) => { - path_type.syntax().ancestors().find_map(ast::PathType::cast) + path_type.syntax().ancestors().nth(3).and_then(ast::PathType::cast) } _ => None, }) From 23747419ca52414d3ecf6f69f1e530e47ab1e937 Mon Sep 17 00:00:00 2001 From: Dezhi Wu Date: Wed, 17 Aug 2022 21:44:58 +0800 Subject: [PATCH 0148/1945] fix: a bunch of typos This PR will fix some typos detected by [typos]. There are also some other typos in the function names, variable names, and file names, which I leave as they are. I'm more certain that typos in comments should be fixed. [typos]: https://github.com/crate-ci/typos --- crates/flycheck/src/lib.rs | 2 +- crates/hir-def/src/nameres/proc_macro.rs | 2 +- crates/hir-expand/src/lib.rs | 2 +- crates/ide-assists/src/handlers/extract_module.rs | 6 +++--- .../src/handlers/replace_turbofish_with_explicit_type.rs | 2 +- crates/ide-assists/src/utils/suggest_name.rs | 4 ++-- .../ide-completion/src/completions/postfix/format_like.rs | 4 ++-- crates/ide-completion/src/tests/flyimport.rs | 2 +- crates/ide-db/src/rename.rs | 2 +- crates/ide-diagnostics/src/handlers/missing_match_arms.rs | 2 +- crates/mbe/src/expander/matcher.rs | 2 +- crates/project-model/src/lib.rs | 2 +- crates/project-model/src/workspace.rs | 2 +- crates/rust-analyzer/src/bin/logger.rs | 2 +- crates/syntax/src/hacks.rs | 2 +- crates/vfs/src/lib.rs | 2 +- docs/dev/architecture.md | 2 +- editors/code/src/commands.ts | 2 +- editors/code/src/toolchain.ts | 2 +- 19 files changed, 23 insertions(+), 23 deletions(-) diff --git a/crates/flycheck/src/lib.rs b/crates/flycheck/src/lib.rs index 3347940ec6..2bebea2fbf 100644 --- a/crates/flycheck/src/lib.rs +++ b/crates/flycheck/src/lib.rs @@ -345,7 +345,7 @@ impl CargoActor { // // Because cargo only outputs one JSON object per line, we can // simply skip a line if it doesn't parse, which just ignores any - // erroneus output. + // erroneous output. let mut error = String::new(); let mut read_at_least_one_message = false; diff --git a/crates/hir-def/src/nameres/proc_macro.rs b/crates/hir-def/src/nameres/proc_macro.rs index 5089ef2d81..52b79cd0fd 100644 --- a/crates/hir-def/src/nameres/proc_macro.rs +++ b/crates/hir-def/src/nameres/proc_macro.rs @@ -45,7 +45,7 @@ impl Attrs { kind: ProcMacroKind::CustomDerive { helpers: Box::new([]) }, }), - // `#[proc_macro_derive(Trait, attibutes(helper1, helper2, ...))]` + // `#[proc_macro_derive(Trait, attributes(helper1, helper2, ...))]` [ TokenTree::Leaf(Leaf::Ident(trait_name)), TokenTree::Leaf(Leaf::Punct(comma)), diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs index f6b97fd540..d753d88470 100644 --- a/crates/hir-expand/src/lib.rs +++ b/crates/hir-expand/src/lib.rs @@ -616,7 +616,7 @@ impl ExpansionInfo { let token_id = match token_id_in_attr_input { Some(token_id) => token_id, - // the token is not inside an attribute's input so do the lookup in the macro_arg as ususal + // the token is not inside an attribute's input so do the lookup in the macro_arg as usual None => { let relative_range = token.value.text_range().checked_sub(self.arg.value.text_range().start())?; diff --git a/crates/ide-assists/src/handlers/extract_module.rs b/crates/ide-assists/src/handlers/extract_module.rs index b3c4d306ac..897980c665 100644 --- a/crates/ide-assists/src/handlers/extract_module.rs +++ b/crates/ide-assists/src/handlers/extract_module.rs @@ -29,7 +29,7 @@ use super::remove_unused_param::range_to_remove; // Assist: extract_module // -// Extracts a selected region as seperate module. All the references, visibility and imports are +// Extracts a selected region as separate module. All the references, visibility and imports are // resolved. // // ``` @@ -105,7 +105,7 @@ pub(crate) fn extract_module(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti // //- Thirdly, resolving all the imports this includes removing paths from imports // outside the module, shifting/cloning them inside new module, or shifting the imports, or making - // new import statemnts + // new import statements //We are getting item usages and record_fields together, record_fields //for change_visibility and usages for first point mentioned above in the process @@ -661,7 +661,7 @@ fn check_intersection_and_push( import_path: TextRange, ) { if import_paths_to_be_removed.len() > 0 { - // Text ranges recieved here for imports are extended to the + // Text ranges received here for imports are extended to the // next/previous comma which can cause intersections among them // and later deletion of these can cause panics similar // to reported in #11766. So to mitigate it, we diff --git a/crates/ide-assists/src/handlers/replace_turbofish_with_explicit_type.rs b/crates/ide-assists/src/handlers/replace_turbofish_with_explicit_type.rs index 6112e09455..5242f3b510 100644 --- a/crates/ide-assists/src/handlers/replace_turbofish_with_explicit_type.rs +++ b/crates/ide-assists/src/handlers/replace_turbofish_with_explicit_type.rs @@ -88,7 +88,7 @@ pub(crate) fn replace_turbofish_with_explicit_type( }, ); } else if let Some(InferType(t)) = let_stmt.ty() { - // If there's a type inferrence underscore, we can offer to replace it with the type in + // If there's a type inference underscore, we can offer to replace it with the type in // the turbofish. // let x: _ = fn::<...>(); let underscore_range = t.syntax().text_range(); diff --git a/crates/ide-assists/src/utils/suggest_name.rs b/crates/ide-assists/src/utils/suggest_name.rs index 779cdbc93c..662ddd063f 100644 --- a/crates/ide-assists/src/utils/suggest_name.rs +++ b/crates/ide-assists/src/utils/suggest_name.rs @@ -75,7 +75,7 @@ pub(crate) fn for_generic_parameter(ty: &ast::ImplTraitType) -> SmolStr { /// In current implementation, the function tries to get the name from /// the following sources: /// -/// * if expr is an argument to function/method, use paramter name +/// * if expr is an argument to function/method, use parameter name /// * if expr is a function/method call, use function name /// * expression type name if it exists (E.g. `()`, `fn() -> ()` or `!` do not have names) /// * fallback: `var_name` @@ -85,7 +85,7 @@ pub(crate) fn for_generic_parameter(ty: &ast::ImplTraitType) -> SmolStr { /// Currently it sticks to the first name found. // FIXME: Microoptimize and return a `SmolStr` here. pub(crate) fn for_variable(expr: &ast::Expr, sema: &Semantics<'_, RootDatabase>) -> String { - // `from_param` does not benifit from stripping + // `from_param` does not benefit from stripping // it need the largest context possible // so we check firstmost if let Some(name) = from_param(expr, sema) { diff --git a/crates/ide-completion/src/completions/postfix/format_like.rs b/crates/ide-completion/src/completions/postfix/format_like.rs index 6b94347e0a..b273a4cb53 100644 --- a/crates/ide-completion/src/completions/postfix/format_like.rs +++ b/crates/ide-completion/src/completions/postfix/format_like.rs @@ -173,7 +173,7 @@ impl FormatStrParser { } } (State::Expr, ':') if chars.peek().copied() == Some(':') => { - // path seperator + // path separator current_expr.push_str("::"); chars.next(); } @@ -185,7 +185,7 @@ impl FormatStrParser { current_expr = String::new(); self.state = State::FormatOpts; } else { - // We're inside of braced expression, assume that it's a struct field name/value delimeter. + // We're inside of braced expression, assume that it's a struct field name/value delimiter. current_expr.push(chr); } } diff --git a/crates/ide-completion/src/tests/flyimport.rs b/crates/ide-completion/src/tests/flyimport.rs index 0bba7f2459..a63ef00687 100644 --- a/crates/ide-completion/src/tests/flyimport.rs +++ b/crates/ide-completion/src/tests/flyimport.rs @@ -159,7 +159,7 @@ pub mod some_module { pub struct ThiiiiiirdStruct; // contains all letters from the query, but not in the beginning, displayed second pub struct AfterThirdStruct; - // contains all letters from the query in the begginning, displayed first + // contains all letters from the query in the beginning, displayed first pub struct ThirdStruct; } diff --git a/crates/ide-db/src/rename.rs b/crates/ide-db/src/rename.rs index 517fe3f246..49b81265ea 100644 --- a/crates/ide-db/src/rename.rs +++ b/crates/ide-db/src/rename.rs @@ -82,7 +82,7 @@ impl Definition { } /// Textual range of the identifier which will change when renaming this - /// `Definition`. Note that some definitions, like buitin types, can't be + /// `Definition`. Note that some definitions, like builtin types, can't be /// renamed. pub fn range_for_rename(self, sema: &Semantics<'_, RootDatabase>) -> Option { let res = match self { diff --git a/crates/ide-diagnostics/src/handlers/missing_match_arms.rs b/crates/ide-diagnostics/src/handlers/missing_match_arms.rs index 9e66fbfb75..5fcaf405b1 100644 --- a/crates/ide-diagnostics/src/handlers/missing_match_arms.rs +++ b/crates/ide-diagnostics/src/handlers/missing_match_arms.rs @@ -750,7 +750,7 @@ fn main() { enum Foo { A } fn main() { // FIXME: this should not bail out but current behavior is such as the old algorithm. - // ExprValidator::validate_match(..) checks types of top level patterns incorrecly. + // ExprValidator::validate_match(..) checks types of top level patterns incorrectly. match Foo::A { ref _x => {} Foo::A => {} diff --git a/crates/mbe/src/expander/matcher.rs b/crates/mbe/src/expander/matcher.rs index 5020e9abaf..c1aa14d6b7 100644 --- a/crates/mbe/src/expander/matcher.rs +++ b/crates/mbe/src/expander/matcher.rs @@ -321,7 +321,7 @@ struct MatchState<'t> { /// The KleeneOp of this sequence if we are in a repetition. sep_kind: Option, - /// Number of tokens of seperator parsed + /// Number of tokens of separator parsed sep_parsed: Option, /// Matched meta variables bindings diff --git a/crates/project-model/src/lib.rs b/crates/project-model/src/lib.rs index e3f83084ac..b81b7432f6 100644 --- a/crates/project-model/src/lib.rs +++ b/crates/project-model/src/lib.rs @@ -3,7 +3,7 @@ //! //! Pure model is represented by the [`base_db::CrateGraph`] from another crate. //! -//! In this crate, we are conserned with "real world" project models. +//! In this crate, we are concerned with "real world" project models. //! //! Specifically, here we have a representation for a Cargo project //! ([`CargoWorkspace`]) and for manually specified layout ([`ProjectJson`]). diff --git a/crates/project-model/src/workspace.rs b/crates/project-model/src/workspace.rs index daabb299f7..8d6f50f558 100644 --- a/crates/project-model/src/workspace.rs +++ b/crates/project-model/src/workspace.rs @@ -770,7 +770,7 @@ fn handle_rustc_crates( queue.push_back(root_pkg); while let Some(pkg) = queue.pop_front() { // Don't duplicate packages if they are dependended on a diamond pattern - // N.B. if this line is ommitted, we try to analyse over 4_800_000 crates + // N.B. if this line is omitted, we try to analyse over 4_800_000 crates // which is not ideal if rustc_pkg_crates.contains_key(&pkg) { continue; diff --git a/crates/rust-analyzer/src/bin/logger.rs b/crates/rust-analyzer/src/bin/logger.rs index 0b69f75bc0..298814af5a 100644 --- a/crates/rust-analyzer/src/bin/logger.rs +++ b/crates/rust-analyzer/src/bin/logger.rs @@ -52,7 +52,7 @@ impl Logger { // merge chalk filter to our main filter (from RA_LOG env). // // The acceptable syntax of CHALK_DEBUG is `target[span{field=value}]=level`. - // As the value should only affect chalk crates, we'd better mannually + // As the value should only affect chalk crates, we'd better manually // specify the target. And for simplicity, CHALK_DEBUG only accept the value // that specify level. let chalk_level_dir = std::env::var("CHALK_DEBUG") diff --git a/crates/syntax/src/hacks.rs b/crates/syntax/src/hacks.rs index a047f61fa0..ec3d3d444c 100644 --- a/crates/syntax/src/hacks.rs +++ b/crates/syntax/src/hacks.rs @@ -1,4 +1,4 @@ -//! Things which exist to solve practial issues, but which shouldn't exist. +//! Things which exist to solve practical issues, but which shouldn't exist. //! //! Please avoid adding new usages of the functions in this module diff --git a/crates/vfs/src/lib.rs b/crates/vfs/src/lib.rs index 10fae41d08..7badb1c363 100644 --- a/crates/vfs/src/lib.rs +++ b/crates/vfs/src/lib.rs @@ -64,7 +64,7 @@ pub struct FileId(pub u32); /// Storage for all files read by rust-analyzer. /// -/// For more informations see the [crate-level](crate) documentation. +/// For more information see the [crate-level](crate) documentation. #[derive(Default)] pub struct Vfs { interner: PathInterner, diff --git a/docs/dev/architecture.md b/docs/dev/architecture.md index 51e26c58a9..c173a239fe 100644 --- a/docs/dev/architecture.md +++ b/docs/dev/architecture.md @@ -485,7 +485,7 @@ Mind the code--architecture gap: at the moment, we are using fewer feature flags ### Serialization In Rust, it is easy (often too easy) to add serialization to any type by adding `#[derive(Serialize)]`. -This easiness is misleading -- serializable types impose significant backwards compatability constraints. +This easiness is misleading -- serializable types impose significant backwards compatibility constraints. If a type is serializable, then it is a part of some IPC boundary. You often don't control the other side of this boundary, so changing serializable types is hard. diff --git a/editors/code/src/commands.ts b/editors/code/src/commands.ts index f58de9da1b..12f666401f 100644 --- a/editors/code/src/commands.ts +++ b/editors/code/src/commands.ts @@ -655,7 +655,7 @@ function crateGraph(ctx: Ctx, full: boolean): Cmd { html, body { margin:0; padding:0; overflow:hidden } svg { position:fixed; top:0; left:0; height:100%; width:100% } - /* Disable the graphviz backgroud and fill the polygons */ + /* Disable the graphviz background and fill the polygons */ .graph > polygon { display:none; } :is(.node,.edge) polygon { fill: white; } diff --git a/editors/code/src/toolchain.ts b/editors/code/src/toolchain.ts index bac163da9f..e1ca495428 100644 --- a/editors/code/src/toolchain.ts +++ b/editors/code/src/toolchain.ts @@ -158,7 +158,7 @@ export const getPathForExecutable = memoizeAsync( try { // hmm, `os.homedir()` seems to be infallible - // it is not mentioned in docs and cannot be infered by the type signature... + // it is not mentioned in docs and cannot be inferred by the type signature... const standardPath = vscode.Uri.joinPath( vscode.Uri.file(os.homedir()), ".cargo", From 85b9568e2d7cc3c4b9a9d420ab3d4b91037bc6a5 Mon Sep 17 00:00:00 2001 From: Dorian Scheidt Date: Wed, 17 Aug 2022 11:57:14 -0500 Subject: [PATCH 0149/1945] feat: Run test mod from anywhere in parent file The "Run" feature of rust-analyzer is super useful, especially for running individual tests or test-modules during development. One common pattern in rust development is to develop tests in the same file as production code, inside a module (usually called `test` or `tests`) marked with `#[cfg(test)]`. Unforunately, this pattern is not well supported by r-a today, as a test module won't show up as a runnable unless the cursor is inside it. In my experience, it is quite common to want to run the tests associated with some production code immediately after editing it, not only after editing the tests themselves. As such it would be better if test modules were available from the "Run" menu even when the cursor is outside the test module. This change updates the filtration logic for runnables in `handlers::handle_runnables` to special case `RunnableKind::TestMod`, making test modules available regardless of the cursor location. Other `RunnableKind`s are unnaffected. Fixes #9589 --- crates/rust-analyzer/src/handlers.rs | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/crates/rust-analyzer/src/handlers.rs b/crates/rust-analyzer/src/handlers.rs index 943d043bc1..6337d49c24 100644 --- a/crates/rust-analyzer/src/handlers.rs +++ b/crates/rust-analyzer/src/handlers.rs @@ -703,10 +703,8 @@ pub(crate) fn handle_runnables( let mut res = Vec::new(); for runnable in snap.analysis.runnables(file_id)? { - if let Some(offset) = offset { - if !runnable.nav.full_range.contains_inclusive(offset) { - continue; - } + if should_skip_for_offset(&runnable, offset) { + continue; } if should_skip_target(&runnable, cargo_spec.as_ref()) { continue; @@ -772,6 +770,14 @@ pub(crate) fn handle_runnables( Ok(res) } +fn should_skip_for_offset(runnable: &Runnable, offset: Option) -> bool { + match offset { + None => false, + _ if matches!(&runnable.kind, RunnableKind::TestMod { .. }) => false, + Some(offset) => !runnable.nav.full_range.contains_inclusive(offset), + } +} + pub(crate) fn handle_related_tests( snap: GlobalStateSnapshot, params: lsp_types::TextDocumentPositionParams, From 12abaf8ddde026fd733c7e783cc3081f4c22cce2 Mon Sep 17 00:00:00 2001 From: Ryo Yoshida Date: Thu, 18 Aug 2022 01:30:04 +0900 Subject: [PATCH 0150/1945] fix: resolve associated types of bare dyn types --- crates/hir-ty/src/lower.rs | 61 +++++++++++++------------------ crates/hir-ty/src/tests/traits.rs | 28 ++++++++++++++ 2 files changed, 54 insertions(+), 35 deletions(-) diff --git a/crates/hir-ty/src/lower.rs b/crates/hir-ty/src/lower.rs index 239f66bcb7..ae115c8c0d 100644 --- a/crates/hir-ty/src/lower.rs +++ b/crates/hir-ty/src/lower.rs @@ -238,18 +238,7 @@ impl<'a> TyLoweringContext<'a> { }) .intern(Interner) } - TypeRef::DynTrait(bounds) => { - let self_ty = - TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, 0)).intern(Interner); - let bounds = self.with_shifted_in(DebruijnIndex::ONE, |ctx| { - QuantifiedWhereClauses::from_iter( - Interner, - bounds.iter().flat_map(|b| ctx.lower_type_bound(b, self_ty.clone(), false)), - ) - }); - let bounds = crate::make_single_type_binders(bounds); - TyKind::Dyn(DynTy { bounds, lifetime: static_lifetime() }).intern(Interner) - } + TypeRef::DynTrait(bounds) => self.lower_dyn_trait(bounds), TypeRef::ImplTrait(bounds) => { match self.impl_trait_mode { ImplTraitLoweringMode::Opaque => { @@ -468,29 +457,10 @@ impl<'a> TyLoweringContext<'a> { } } 0 => { - let self_ty = Some( - TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, 0)) - .intern(Interner), - ); - let trait_ref = self.with_shifted_in(DebruijnIndex::ONE, |ctx| { - ctx.lower_trait_ref_from_resolved_path( - trait_, - resolved_segment, - self_ty, - ) - }); - let dyn_ty = DynTy { - bounds: crate::make_single_type_binders( - QuantifiedWhereClauses::from_iter( - Interner, - Some(crate::wrap_empty_binders(WhereClause::Implemented( - trait_ref, - ))), - ), - ), - lifetime: static_lifetime(), - }; - TyKind::Dyn(dyn_ty).intern(Interner) + // Trait object type without dyn; this should be handled in upstream. See + // `lower_path()`. + stdx::never!("unexpected fully resolved trait path"); + TyKind::Error.intern(Interner) } _ => { // FIXME report error (ambiguous associated type) @@ -555,11 +525,20 @@ impl<'a> TyLoweringContext<'a> { let (ty, res) = self.lower_ty_ext(type_ref); return self.lower_ty_relative_path(ty, res, path.segments()); } + let (resolution, remaining_index) = match self.resolver.resolve_path_in_type_ns(self.db.upcast(), path.mod_path()) { Some(it) => it, None => return (TyKind::Error.intern(Interner), None), }; + + if matches!(resolution, TypeNs::TraitId(_)) && remaining_index.is_none() { + // trait object type without dyn + let bound = TypeBound::Path(path.clone(), TraitBoundModifier::None); + let ty = self.lower_dyn_trait(&[Interned::new(bound)]); + return (ty, None); + } + let (resolved_segment, remaining_segments) = match remaining_index { None => ( path.segments().last().expect("resolved path has at least one element"), @@ -987,6 +966,18 @@ impl<'a> TyLoweringContext<'a> { }) } + fn lower_dyn_trait(&self, bounds: &[Interned]) -> Ty { + let self_ty = TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, 0)).intern(Interner); + let bounds = self.with_shifted_in(DebruijnIndex::ONE, |ctx| { + QuantifiedWhereClauses::from_iter( + Interner, + bounds.iter().flat_map(|b| ctx.lower_type_bound(b, self_ty.clone(), false)), + ) + }); + let bounds = crate::make_single_type_binders(bounds); + TyKind::Dyn(DynTy { bounds, lifetime: static_lifetime() }).intern(Interner) + } + fn lower_impl_trait( &self, bounds: &[Interned], diff --git a/crates/hir-ty/src/tests/traits.rs b/crates/hir-ty/src/tests/traits.rs index 75802a5eb4..c128a051f7 100644 --- a/crates/hir-ty/src/tests/traits.rs +++ b/crates/hir-ty/src/tests/traits.rs @@ -1476,6 +1476,34 @@ fn test(x: Trait, y: &Trait) -> u64 { 165..172 'z.foo()': u64 "#]], ); + + check_infer_with_mismatches( + r#" +//- minicore: fn, coerce_unsized +struct S; +impl S { + fn foo(&self) {} +} +fn f(_: &Fn(S)) {} +fn main() { + f(&|number| number.foo()); +} + "#, + expect![[r#" + 31..35 'self': &S + 37..39 '{}': () + 47..48 '_': &dyn Fn(S) + 58..60 '{}': () + 71..105 '{ ...()); }': () + 77..78 'f': fn f(&dyn Fn(S)) + 77..102 'f(&|nu...foo())': () + 79..101 '&|numb....foo()': &|S| -> () + 80..101 '|numbe....foo()': |S| -> () + 81..87 'number': S + 89..95 'number': S + 89..101 'number.foo()': () + "#]], + ) } #[test] From dac27679f75725c8a3e8ff5fc009f3bc59ce3523 Mon Sep 17 00:00:00 2001 From: Ryo Yoshida Date: Thu, 18 Aug 2022 08:46:06 +0900 Subject: [PATCH 0151/1945] fix: resolve path `Self` alone in value namespace --- crates/hir-ty/src/infer.rs | 1 + crates/hir-ty/src/tests/patterns.rs | 36 +++++++++++++++++++++++++++++ 2 files changed, 37 insertions(+) diff --git a/crates/hir-ty/src/infer.rs b/crates/hir-ty/src/infer.rs index 46eeea0e6f..7440439522 100644 --- a/crates/hir-ty/src/infer.rs +++ b/crates/hir-ty/src/infer.rs @@ -734,6 +734,7 @@ impl<'a> InferenceContext<'a> { let ty = self.insert_type_vars(ty.substitute(Interner, &substs)); return (ty, Some(strukt.into())); } + ValueNs::ImplSelf(impl_id) => (TypeNs::SelfType(impl_id), None), _ => return (self.err_ty(), None), }, Some(ResolveValueResult::Partial(typens, unresolved)) => (typens, Some(unresolved)), diff --git a/crates/hir-ty/src/tests/patterns.rs b/crates/hir-ty/src/tests/patterns.rs index 94efe7bc11..eb04bf8778 100644 --- a/crates/hir-ty/src/tests/patterns.rs +++ b/crates/hir-ty/src/tests/patterns.rs @@ -488,6 +488,42 @@ fn infer_adt_pattern() { ); } +#[test] +fn tuple_struct_destructured_with_self() { + check_infer( + r#" +struct Foo(usize,); +impl Foo { + fn f() { + let Self(s,) = &Foo(0,); + let Self(s,) = &mut Foo(0,); + let Self(s,) = Foo(0,); + } +} + "#, + expect![[r#" + 42..151 '{ ... }': () + 56..64 'Self(s,)': Foo + 61..62 's': &usize + 67..75 '&Foo(0,)': &Foo + 68..71 'Foo': Foo(usize) -> Foo + 68..75 'Foo(0,)': Foo + 72..73 '0': usize + 89..97 'Self(s,)': Foo + 94..95 's': &mut usize + 100..112 '&mut Foo(0,)': &mut Foo + 105..108 'Foo': Foo(usize) -> Foo + 105..112 'Foo(0,)': Foo + 109..110 '0': usize + 126..134 'Self(s,)': Foo + 131..132 's': usize + 137..140 'Foo': Foo(usize) -> Foo + 137..144 'Foo(0,)': Foo + 141..142 '0': usize + "#]], + ); +} + #[test] fn enum_variant_through_self_in_pattern() { check_infer( From 581a01d0cc608d02090a5fd6d4fdc8a7b71b5c34 Mon Sep 17 00:00:00 2001 From: DropDemBits Date: Thu, 18 Aug 2022 13:42:10 -0400 Subject: [PATCH 0152/1945] Migrate `syntax::make` to use format arg captures --- crates/syntax/src/ast/make.rs | 263 +++++++++++++++++----------------- 1 file changed, 132 insertions(+), 131 deletions(-) diff --git a/crates/syntax/src/ast/make.rs b/crates/syntax/src/ast/make.rs index 037de876d4..83f8bbac58 100644 --- a/crates/syntax/src/ast/make.rs +++ b/crates/syntax/src/ast/make.rs @@ -25,7 +25,7 @@ pub mod ext { return from_text(&name.text()); fn from_text(text: &str) -> ast::IdentPat { - ast_from_text(&format!("fn f({}: ())", text)) + ast_from_text(&format!("fn f({text}: ())")) } } pub fn ident_path(ident: &str) -> ast::Path { @@ -60,10 +60,10 @@ pub mod ext { expr_from_text("todo!()") } pub fn expr_ty_default(ty: &ast::Type) -> ast::Expr { - expr_from_text(&format!("{}::default()", ty)) + expr_from_text(&format!("{ty}::default()")) } pub fn expr_ty_new(ty: &ast::Type) -> ast::Expr { - expr_from_text(&format!("{}::new()", ty)) + expr_from_text(&format!("{ty}::new()")) } pub fn zero_number() -> ast::Expr { @@ -92,18 +92,20 @@ pub mod ext { ty_path(ident_path("bool")) } pub fn ty_option(t: ast::Type) -> ast::Type { - ty_from_text(&format!("Option<{}>", t)) + ty_from_text(&format!("Option<{t}>")) } pub fn ty_result(t: ast::Type, e: ast::Type) -> ast::Type { - ty_from_text(&format!("Result<{}, {}>", t, e)) + ty_from_text(&format!("Result<{t}, {e}>")) } } -pub fn name(text: &str) -> ast::Name { - ast_from_text(&format!("mod {}{};", raw_ident_esc(text), text)) +pub fn name(name: &str) -> ast::Name { + let raw_escape = raw_ident_esc(name); + ast_from_text(&format!("mod {raw_escape}{name};")) } -pub fn name_ref(text: &str) -> ast::NameRef { - ast_from_text(&format!("fn f() {{ {}{}; }}", raw_ident_esc(text), text)) +pub fn name_ref(name_ref: &str) -> ast::NameRef { + let raw_escape = raw_ident_esc(name_ref); + ast_from_text(&format!("fn f() {{ {raw_escape}{name_ref}; }}")) } fn raw_ident_esc(ident: &str) -> &'static str { let is_keyword = parser::SyntaxKind::from_keyword(ident).is_some(); @@ -118,10 +120,10 @@ pub fn lifetime(text: &str) -> ast::Lifetime { let mut text = text; let tmp; if never!(!text.starts_with('\'')) { - tmp = format!("'{}", text); + tmp = format!("'{text}"); text = &tmp; } - ast_from_text(&format!("fn f<{}>() {{ }}", text)) + ast_from_text(&format!("fn f<{text}>() {{ }}")) } // FIXME: replace stringly-typed constructor with a family of typed ctors, a-la @@ -142,16 +144,16 @@ pub fn ty_tuple(types: impl IntoIterator) -> ast::Type { contents.push(','); } - ty_from_text(&format!("({})", contents)) + ty_from_text(&format!("({contents})")) } pub fn ty_ref(target: ast::Type, exclusive: bool) -> ast::Type { - ty_from_text(&if exclusive { format!("&mut {}", target) } else { format!("&{}", target) }) + ty_from_text(&if exclusive { format!("&mut {target}") } else { format!("&{target}") }) } pub fn ty_path(path: ast::Path) -> ast::Type { ty_from_text(&path.to_string()) } fn ty_from_text(text: &str) -> ast::Type { - ast_from_text(&format!("type _T = {};", text)) + ast_from_text(&format!("type _T = {text};")) } pub fn assoc_item_list() -> ast::AssocItemList { @@ -171,7 +173,7 @@ pub fn impl_( Some(params) => params.to_string(), None => String::new(), }; - ast_from_text(&format!("impl{} {}{} {{}}", params, ty, ty_params)) + ast_from_text(&format!("impl{params} {ty}{ty_params} {{}}")) } pub fn impl_trait( @@ -180,7 +182,7 @@ pub fn impl_trait( ty_params: Option, ) -> ast::Impl { let ty_params = ty_params.map_or_else(String::new, |params| params.to_string()); - ast_from_text(&format!("impl{2} {} for {}{2} {{}}", trait_, ty, ty_params)) + ast_from_text(&format!("impl{ty_params} {trait_} for {ty}{ty_params} {{}}")) } pub(crate) fn generic_arg_list() -> ast::GenericArgList { @@ -188,13 +190,13 @@ pub(crate) fn generic_arg_list() -> ast::GenericArgList { } pub fn path_segment(name_ref: ast::NameRef) -> ast::PathSegment { - ast_from_text(&format!("type __ = {};", name_ref)) + ast_from_text(&format!("type __ = {name_ref};")) } pub fn path_segment_ty(type_ref: ast::Type, trait_ref: Option) -> ast::PathSegment { let text = match trait_ref { - Some(trait_ref) => format!("fn f(x: <{} as {}>) {{}}", type_ref, trait_ref), - None => format!("fn f(x: <{}>) {{}}", type_ref), + Some(trait_ref) => format!("fn f(x: <{type_ref} as {trait_ref}>) {{}}"), + None => format!("fn f(x: <{type_ref}>) {{}}"), }; ast_from_text(&text) } @@ -212,15 +214,15 @@ pub fn path_segment_crate() -> ast::PathSegment { } pub fn path_unqualified(segment: ast::PathSegment) -> ast::Path { - ast_from_text(&format!("type __ = {};", segment)) + ast_from_text(&format!("type __ = {segment};")) } pub fn path_qualified(qual: ast::Path, segment: ast::PathSegment) -> ast::Path { - ast_from_text(&format!("{}::{}", qual, segment)) + ast_from_text(&format!("{qual}::{segment}")) } // FIXME: path concatenation operation doesn't make sense as AST op. pub fn path_concat(first: ast::Path, second: ast::Path) -> ast::Path { - ast_from_text(&format!("type __ = {}::{};", first, second)) + ast_from_text(&format!("type __ = {first}::{second};")) } pub fn path_from_segments( @@ -229,20 +231,20 @@ pub fn path_from_segments( ) -> ast::Path { let segments = segments.into_iter().map(|it| it.syntax().clone()).join("::"); ast_from_text(&if is_abs { - format!("fn f(x: ::{}) {{}}", segments) + format!("fn f(x: ::{segments}) {{}}") } else { - format!("fn f(x: {}) {{}}", segments) + format!("fn f(x: {segments}) {{}}") }) } pub fn join_paths(paths: impl IntoIterator) -> ast::Path { let paths = paths.into_iter().map(|it| it.syntax().clone()).join("::"); - ast_from_text(&format!("type __ = {};", paths)) + ast_from_text(&format!("type __ = {paths};")) } // FIXME: should not be pub pub fn path_from_text(text: &str) -> ast::Path { - ast_from_text(&format!("fn main() {{ let test = {}; }}", text)) + ast_from_text(&format!("fn main() {{ let test = {text}; }}")) } pub fn use_tree_glob() -> ast::UseTree { @@ -257,50 +259,50 @@ pub fn use_tree( let mut buf = "use ".to_string(); buf += &path.syntax().to_string(); if let Some(use_tree_list) = use_tree_list { - format_to!(buf, "::{}", use_tree_list); + format_to!(buf, "::{use_tree_list}"); } if add_star { buf += "::*"; } if let Some(alias) = alias { - format_to!(buf, " {}", alias); + format_to!(buf, " {alias}"); } ast_from_text(&buf) } pub fn use_tree_list(use_trees: impl IntoIterator) -> ast::UseTreeList { let use_trees = use_trees.into_iter().map(|it| it.syntax().clone()).join(", "); - ast_from_text(&format!("use {{{}}};", use_trees)) + ast_from_text(&format!("use {{{use_trees}}};")) } pub fn use_(visibility: Option, use_tree: ast::UseTree) -> ast::Use { let visibility = match visibility { None => String::new(), - Some(it) => format!("{} ", it), + Some(it) => format!("{it} "), }; - ast_from_text(&format!("{}use {};", visibility, use_tree)) + ast_from_text(&format!("{visibility}use {use_tree};")) } pub fn record_expr(path: ast::Path, fields: ast::RecordExprFieldList) -> ast::RecordExpr { - ast_from_text(&format!("fn f() {{ {} {} }}", path, fields)) + ast_from_text(&format!("fn f() {{ {path} {fields} }}")) } pub fn record_expr_field_list( fields: impl IntoIterator, ) -> ast::RecordExprFieldList { let fields = fields.into_iter().join(", "); - ast_from_text(&format!("fn f() {{ S {{ {} }} }}", fields)) + ast_from_text(&format!("fn f() {{ S {{ {fields} }} }}")) } pub fn record_expr_field(name: ast::NameRef, expr: Option) -> ast::RecordExprField { return match expr { - Some(expr) => from_text(&format!("{}: {}", name, expr)), + Some(expr) => from_text(&format!("{name}: {expr}")), None => from_text(&name.to_string()), }; fn from_text(text: &str) -> ast::RecordExprField { - ast_from_text(&format!("fn f() {{ S {{ {}, }} }}", text)) + ast_from_text(&format!("fn f() {{ S {{ {text}, }} }}")) } } @@ -311,9 +313,9 @@ pub fn record_field( ) -> ast::RecordField { let visibility = match visibility { None => String::new(), - Some(it) => format!("{} ", it), + Some(it) => format!("{it} "), }; - ast_from_text(&format!("struct S {{ {}{}: {}, }}", visibility, name, ty)) + ast_from_text(&format!("struct S {{ {visibility}{name}: {ty}, }}")) } // TODO @@ -323,13 +325,13 @@ pub fn block_expr( ) -> ast::BlockExpr { let mut buf = "{\n".to_string(); for stmt in stmts.into_iter() { - format_to!(buf, " {}\n", stmt); + format_to!(buf, " {stmt}\n"); } if let Some(tail_expr) = tail_expr { - format_to!(buf, " {}\n", tail_expr); + format_to!(buf, " {tail_expr}\n"); } buf += "}"; - ast_from_text(&format!("fn f() {}", buf)) + ast_from_text(&format!("fn f() {buf}")) } /// Ideally this function wouldn't exist since it involves manual indenting. @@ -343,18 +345,18 @@ pub fn hacky_block_expr_with_comments( let mut buf = "{\n".to_string(); for node_or_token in elements.into_iter() { match node_or_token { - rowan::NodeOrToken::Node(n) => format_to!(buf, " {}\n", n), + rowan::NodeOrToken::Node(n) => format_to!(buf, " {n}\n"), rowan::NodeOrToken::Token(t) if t.kind() == SyntaxKind::COMMENT => { - format_to!(buf, " {}\n", t) + format_to!(buf, " {t}\n") } _ => (), } } if let Some(tail_expr) = tail_expr { - format_to!(buf, " {}\n", tail_expr); + format_to!(buf, " {tail_expr}\n"); } buf += "}"; - ast_from_text(&format!("fn f() {}", buf)) + ast_from_text(&format!("fn f() {buf}")) } pub fn expr_unit() -> ast::Expr { @@ -362,7 +364,7 @@ pub fn expr_unit() -> ast::Expr { } pub fn expr_literal(text: &str) -> ast::Literal { assert_eq!(text.trim(), text); - ast_from_text(&format!("fn f() {{ let _ = {}; }}", text)) + ast_from_text(&format!("fn f() {{ let _ = {text}; }}")) } pub fn expr_empty_block() -> ast::Expr { @@ -373,41 +375,41 @@ pub fn expr_path(path: ast::Path) -> ast::Expr { } pub fn expr_continue(label: Option) -> ast::Expr { match label { - Some(label) => expr_from_text(&format!("continue {}", label)), + Some(label) => expr_from_text(&format!("continue {label}")), None => expr_from_text("continue"), } } // Consider `op: SyntaxKind` instead for nicer syntax at the call-site? pub fn expr_bin_op(lhs: ast::Expr, op: ast::BinaryOp, rhs: ast::Expr) -> ast::Expr { - expr_from_text(&format!("{} {} {}", lhs, op, rhs)) + expr_from_text(&format!("{lhs} {op} {rhs}")) } pub fn expr_break(label: Option, expr: Option) -> ast::Expr { let mut s = String::from("break"); if let Some(label) = label { - format_to!(s, " {}", label); + format_to!(s, " {label}"); } if let Some(expr) = expr { - format_to!(s, " {}", expr); + format_to!(s, " {expr}"); } expr_from_text(&s) } pub fn expr_return(expr: Option) -> ast::Expr { match expr { - Some(expr) => expr_from_text(&format!("return {}", expr)), + Some(expr) => expr_from_text(&format!("return {expr}")), None => expr_from_text("return"), } } pub fn expr_try(expr: ast::Expr) -> ast::Expr { - expr_from_text(&format!("{}?", expr)) + expr_from_text(&format!("{expr}?")) } pub fn expr_await(expr: ast::Expr) -> ast::Expr { - expr_from_text(&format!("{}.await", expr)) + expr_from_text(&format!("{expr}.await")) } pub fn expr_match(expr: ast::Expr, match_arm_list: ast::MatchArmList) -> ast::Expr { - expr_from_text(&format!("match {} {}", expr, match_arm_list)) + expr_from_text(&format!("match {expr} {match_arm_list}")) } pub fn expr_if( condition: ast::Expr, @@ -415,66 +417,67 @@ pub fn expr_if( else_branch: Option, ) -> ast::Expr { let else_branch = match else_branch { - Some(ast::ElseBranch::Block(block)) => format!("else {}", block), - Some(ast::ElseBranch::IfExpr(if_expr)) => format!("else {}", if_expr), + Some(ast::ElseBranch::Block(block)) => format!("else {block}"), + Some(ast::ElseBranch::IfExpr(if_expr)) => format!("else {if_expr}"), None => String::new(), }; - expr_from_text(&format!("if {} {} {}", condition, then_branch, else_branch)) + expr_from_text(&format!("if {condition} {then_branch} {else_branch}")) } pub fn expr_for_loop(pat: ast::Pat, expr: ast::Expr, block: ast::BlockExpr) -> ast::Expr { - expr_from_text(&format!("for {} in {} {}", pat, expr, block)) + expr_from_text(&format!("for {pat} in {expr} {block}")) } pub fn expr_loop(block: ast::BlockExpr) -> ast::Expr { - expr_from_text(&format!("loop {}", block)) + expr_from_text(&format!("loop {block}")) } pub fn expr_prefix(op: SyntaxKind, expr: ast::Expr) -> ast::Expr { let token = token(op); - expr_from_text(&format!("{}{}", token, expr)) + expr_from_text(&format!("{token}{expr}")) } pub fn expr_call(f: ast::Expr, arg_list: ast::ArgList) -> ast::Expr { - expr_from_text(&format!("{}{}", f, arg_list)) + expr_from_text(&format!("{f}{arg_list}")) } pub fn expr_method_call( receiver: ast::Expr, method: ast::NameRef, arg_list: ast::ArgList, ) -> ast::Expr { - expr_from_text(&format!("{}.{}{}", receiver, method, arg_list)) + expr_from_text(&format!("{receiver}.{method}{arg_list}")) } pub fn expr_macro_call(f: ast::Expr, arg_list: ast::ArgList) -> ast::Expr { - expr_from_text(&format!("{}!{}", f, arg_list)) + expr_from_text(&format!("{f}!{arg_list}")) } pub fn expr_ref(expr: ast::Expr, exclusive: bool) -> ast::Expr { - expr_from_text(&if exclusive { format!("&mut {}", expr) } else { format!("&{}", expr) }) + expr_from_text(&if exclusive { format!("&mut {expr}") } else { format!("&{expr}") }) } pub fn expr_closure(pats: impl IntoIterator, expr: ast::Expr) -> ast::Expr { let params = pats.into_iter().join(", "); - expr_from_text(&format!("|{}| {}", params, expr)) + expr_from_text(&format!("|{params}| {expr}")) } pub fn expr_field(receiver: ast::Expr, field: &str) -> ast::Expr { - expr_from_text(&format!("{}.{}", receiver, field)) + expr_from_text(&format!("{receiver}.{field}")) } pub fn expr_paren(expr: ast::Expr) -> ast::Expr { - expr_from_text(&format!("({})", expr)) + expr_from_text(&format!("({expr})")) } pub fn expr_tuple(elements: impl IntoIterator) -> ast::Expr { let expr = elements.into_iter().format(", "); - expr_from_text(&format!("({})", expr)) + expr_from_text(&format!("({expr})")) } pub fn expr_assignment(lhs: ast::Expr, rhs: ast::Expr) -> ast::Expr { - expr_from_text(&format!("{} = {}", lhs, rhs)) + expr_from_text(&format!("{lhs} = {rhs}")) } fn expr_from_text(text: &str) -> ast::Expr { - ast_from_text(&format!("const C: () = {};", text)) + ast_from_text(&format!("const C: () = {text};")) } pub fn expr_let(pattern: ast::Pat, expr: ast::Expr) -> ast::LetExpr { - ast_from_text(&format!("const _: () = while let {} = {} {{}};", pattern, expr)) + ast_from_text(&format!("const _: () = while let {pattern} = {expr} {{}};")) } pub fn arg_list(args: impl IntoIterator) -> ast::ArgList { - ast_from_text(&format!("fn main() {{ ()({}) }}", args.into_iter().format(", "))) + let args = args.into_iter().format(", "); + ast_from_text(&format!("fn main() {{ ()({args}) }}")) } pub fn ident_pat(ref_: bool, mut_: bool, name: ast::Name) -> ast::IdentPat { @@ -485,7 +488,7 @@ pub fn ident_pat(ref_: bool, mut_: bool, name: ast::Name) -> ast::IdentPat { if mut_ { s.push_str("mut "); } - format_to!(s, "{}", name); + format_to!(s, "{name}"); s.push_str(": ())"); ast_from_text(&s) } @@ -494,7 +497,7 @@ pub fn wildcard_pat() -> ast::WildcardPat { return from_text("_"); fn from_text(text: &str) -> ast::WildcardPat { - ast_from_text(&format!("fn f({}: ())", text)) + ast_from_text(&format!("fn f({text}: ())")) } } @@ -502,7 +505,7 @@ pub fn literal_pat(lit: &str) -> ast::LiteralPat { return from_text(lit); fn from_text(text: &str) -> ast::LiteralPat { - ast_from_text(&format!("fn f() {{ match x {{ {} => {{}} }} }}", text)) + ast_from_text(&format!("fn f() {{ match x {{ {text} => {{}} }} }}")) } } @@ -515,10 +518,10 @@ pub fn tuple_pat(pats: impl IntoIterator) -> ast::TuplePat { if count == 1 { pats_str.push(','); } - return from_text(&format!("({})", pats_str)); + return from_text(&format!("({pats_str})")); fn from_text(text: &str) -> ast::TuplePat { - ast_from_text(&format!("fn f({}: ())", text)) + ast_from_text(&format!("fn f({text}: ())")) } } @@ -527,46 +530,46 @@ pub fn tuple_struct_pat( pats: impl IntoIterator, ) -> ast::TupleStructPat { let pats_str = pats.into_iter().join(", "); - return from_text(&format!("{}({})", path, pats_str)); + return from_text(&format!("{path}({pats_str})")); fn from_text(text: &str) -> ast::TupleStructPat { - ast_from_text(&format!("fn f({}: ())", text)) + ast_from_text(&format!("fn f({text}: ())")) } } pub fn record_pat(path: ast::Path, pats: impl IntoIterator) -> ast::RecordPat { let pats_str = pats.into_iter().join(", "); - return from_text(&format!("{} {{ {} }}", path, pats_str)); + return from_text(&format!("{path} {{ {pats_str} }}")); fn from_text(text: &str) -> ast::RecordPat { - ast_from_text(&format!("fn f({}: ())", text)) + ast_from_text(&format!("fn f({text}: ())")) } } pub fn record_pat_with_fields(path: ast::Path, fields: ast::RecordPatFieldList) -> ast::RecordPat { - ast_from_text(&format!("fn f({} {}: ()))", path, fields)) + ast_from_text(&format!("fn f({path} {fields}: ()))")) } pub fn record_pat_field_list( fields: impl IntoIterator, ) -> ast::RecordPatFieldList { let fields = fields.into_iter().join(", "); - ast_from_text(&format!("fn f(S {{ {} }}: ()))", fields)) + ast_from_text(&format!("fn f(S {{ {fields} }}: ()))")) } pub fn record_pat_field(name_ref: ast::NameRef, pat: ast::Pat) -> ast::RecordPatField { - ast_from_text(&format!("fn f(S {{ {}: {} }}: ()))", name_ref, pat)) + ast_from_text(&format!("fn f(S {{ {name_ref}: {pat} }}: ()))")) } pub fn record_pat_field_shorthand(name_ref: ast::NameRef) -> ast::RecordPatField { - ast_from_text(&format!("fn f(S {{ {} }}: ()))", name_ref)) + ast_from_text(&format!("fn f(S {{ {name_ref} }}: ()))")) } /// Returns a `BindPat` if the path has just one segment, a `PathPat` otherwise. pub fn path_pat(path: ast::Path) -> ast::Pat { return from_text(&path.to_string()); fn from_text(text: &str) -> ast::Pat { - ast_from_text(&format!("fn f({}: ())", text)) + ast_from_text(&format!("fn f({text}: ())")) } } @@ -577,12 +580,12 @@ pub fn match_arm( ) -> ast::MatchArm { let pats_str = pats.into_iter().join(" | "); return match guard { - Some(guard) => from_text(&format!("{} if {} => {}", pats_str, guard, expr)), - None => from_text(&format!("{} => {}", pats_str, expr)), + Some(guard) => from_text(&format!("{pats_str} if {guard} => {expr}")), + None => from_text(&format!("{pats_str} => {expr}")), }; fn from_text(text: &str) -> ast::MatchArm { - ast_from_text(&format!("fn f() {{ match () {{{}}} }}", text)) + ast_from_text(&format!("fn f() {{ match () {{{text}}} }}")) } } @@ -592,10 +595,10 @@ pub fn match_arm_with_guard( expr: ast::Expr, ) -> ast::MatchArm { let pats_str = pats.into_iter().join(" | "); - return from_text(&format!("{} if {} => {}", pats_str, guard, expr)); + return from_text(&format!("{pats_str} if {guard} => {expr}")); fn from_text(text: &str) -> ast::MatchArm { - ast_from_text(&format!("fn f() {{ match () {{{}}} }}", text)) + ast_from_text(&format!("fn f() {{ match () {{{text}}} }}")) } } @@ -605,13 +608,14 @@ pub fn match_arm_list(arms: impl IntoIterator) -> ast::Mat .map(|arm| { let needs_comma = arm.expr().map_or(true, |it| !it.is_block_like()); let comma = if needs_comma { "," } else { "" }; - format!(" {}{}\n", arm.syntax(), comma) + let arm = arm.syntax(); + format!(" {arm}{comma}\n") }) .collect::(); return from_text(&arms_str); fn from_text(text: &str) -> ast::MatchArmList { - ast_from_text(&format!("fn f() {{ match () {{\n{}}} }}", text)) + ast_from_text(&format!("fn f() {{ match () {{\n{text}}} }}")) } } @@ -620,10 +624,10 @@ pub fn where_pred( bounds: impl IntoIterator, ) -> ast::WherePred { let bounds = bounds.into_iter().join(" + "); - return from_text(&format!("{}: {}", path, bounds)); + return from_text(&format!("{path}: {bounds}")); fn from_text(text: &str) -> ast::WherePred { - ast_from_text(&format!("fn f() where {} {{ }}", text)) + ast_from_text(&format!("fn f() where {text} {{ }}")) } } @@ -632,7 +636,7 @@ pub fn where_clause(preds: impl IntoIterator) -> ast::Whe return from_text(preds.as_str()); fn from_text(text: &str) -> ast::WhereClause { - ast_from_text(&format!("fn f() where {} {{ }}", text)) + ast_from_text(&format!("fn f() where {text} {{ }}")) } } @@ -642,19 +646,19 @@ pub fn let_stmt( initializer: Option, ) -> ast::LetStmt { let mut text = String::new(); - format_to!(text, "let {}", pattern); + format_to!(text, "let {pattern}"); if let Some(ty) = ty { - format_to!(text, ": {}", ty); + format_to!(text, ": {ty}"); } match initializer { - Some(it) => format_to!(text, " = {};", it), + Some(it) => format_to!(text, " = {it};"), None => format_to!(text, ";"), }; - ast_from_text(&format!("fn f() {{ {} }}", text)) + ast_from_text(&format!("fn f() {{ {text} }}")) } pub fn expr_stmt(expr: ast::Expr) -> ast::ExprStmt { let semi = if expr.is_block_like() { "" } else { ";" }; - ast_from_text(&format!("fn f() {{ {}{} (); }}", expr, semi)) + ast_from_text(&format!("fn f() {{ {expr}{semi} (); }}")) } pub fn item_const( @@ -665,13 +669,13 @@ pub fn item_const( ) -> ast::Const { let visibility = match visibility { None => String::new(), - Some(it) => format!("{} ", it), + Some(it) => format!("{it} "), }; - ast_from_text(&format!("{} const {}: {} = {};", visibility, name, ty, expr)) + ast_from_text(&format!("{visibility} const {name}: {ty} = {expr};")) } pub fn param(pat: ast::Pat, ty: ast::Type) -> ast::Param { - ast_from_text(&format!("fn f({}: {}) {{ }}", pat, ty)) + ast_from_text(&format!("fn f({pat}: {ty}) {{ }}")) } pub fn self_param() -> ast::SelfParam { @@ -679,7 +683,7 @@ pub fn self_param() -> ast::SelfParam { } pub fn ret_type(ty: ast::Type) -> ast::RetType { - ast_from_text(&format!("fn f() -> {} {{ }}", ty)) + ast_from_text(&format!("fn f() -> {ty} {{ }}")) } pub fn param_list( @@ -688,30 +692,30 @@ pub fn param_list( ) -> ast::ParamList { let args = pats.into_iter().join(", "); let list = match self_param { - Some(self_param) if args.is_empty() => format!("fn f({}) {{ }}", self_param), - Some(self_param) => format!("fn f({}, {}) {{ }}", self_param, args), - None => format!("fn f({}) {{ }}", args), + Some(self_param) if args.is_empty() => format!("fn f({self_param}) {{ }}"), + Some(self_param) => format!("fn f({self_param}, {args}) {{ }}"), + None => format!("fn f({args}) {{ }}"), }; ast_from_text(&list) } pub fn type_param(name: ast::Name, ty: Option) -> ast::TypeParam { let bound = match ty { - Some(it) => format!(": {}", it), + Some(it) => format!(": {it}"), None => String::new(), }; - ast_from_text(&format!("fn f<{}{}>() {{ }}", name, bound)) + ast_from_text(&format!("fn f<{name}{bound}>() {{ }}")) } pub fn lifetime_param(lifetime: ast::Lifetime) -> ast::LifetimeParam { - ast_from_text(&format!("fn f<{}>() {{ }}", lifetime)) + ast_from_text(&format!("fn f<{lifetime}>() {{ }}")) } pub fn generic_param_list( pats: impl IntoIterator, ) -> ast::GenericParamList { let args = pats.into_iter().join(", "); - ast_from_text(&format!("fn f<{}>() {{ }}", args)) + ast_from_text(&format!("fn f<{args}>() {{ }}")) } pub fn visibility_pub_crate() -> ast::Visibility { @@ -724,33 +728,33 @@ pub fn visibility_pub() -> ast::Visibility { pub fn tuple_field_list(fields: impl IntoIterator) -> ast::TupleFieldList { let fields = fields.into_iter().join(", "); - ast_from_text(&format!("struct f({});", fields)) + ast_from_text(&format!("struct f({fields});")) } pub fn record_field_list( fields: impl IntoIterator, ) -> ast::RecordFieldList { let fields = fields.into_iter().join(", "); - ast_from_text(&format!("struct f {{ {} }}", fields)) + ast_from_text(&format!("struct f {{ {fields} }}")) } pub fn tuple_field(visibility: Option, ty: ast::Type) -> ast::TupleField { let visibility = match visibility { None => String::new(), - Some(it) => format!("{} ", it), + Some(it) => format!("{it} "), }; - ast_from_text(&format!("struct f({}{});", visibility, ty)) + ast_from_text(&format!("struct f({visibility}{ty});")) } pub fn variant(name: ast::Name, field_list: Option) -> ast::Variant { let field_list = match field_list { None => String::new(), Some(it) => match it { - ast::FieldList::RecordFieldList(record) => format!(" {}", record), - ast::FieldList::TupleFieldList(tuple) => format!("{}", tuple), + ast::FieldList::RecordFieldList(record) => format!(" {record}"), + ast::FieldList::TupleFieldList(tuple) => format!("{tuple}"), }, }; - ast_from_text(&format!("enum f {{ {}{} }}", name, field_list)) + ast_from_text(&format!("enum f {{ {name}{field_list} }}")) } pub fn fn_( @@ -763,23 +767,22 @@ pub fn fn_( is_async: bool, ) -> ast::Fn { let type_params = match type_params { - Some(type_params) => format!("{}", type_params), + Some(type_params) => format!("{type_params}"), None => "".into(), }; let ret_type = match ret_type { - Some(ret_type) => format!("{} ", ret_type), + Some(ret_type) => format!("{ret_type} "), None => "".into(), }; let visibility = match visibility { None => String::new(), - Some(it) => format!("{} ", it), + Some(it) => format!("{it} "), }; let async_literal = if is_async { "async " } else { "" }; ast_from_text(&format!( - "{}{}fn {}{}{} {}{}", - visibility, async_literal, fn_name, type_params, params, ret_type, body + "{visibility}{async_literal}fn {fn_name}{type_params}{params} {ret_type}{body}", )) } @@ -793,13 +796,10 @@ pub fn struct_( let type_params = generic_param_list.map_or_else(String::new, |it| it.to_string()); let visibility = match visibility { None => String::new(), - Some(it) => format!("{} ", it), + Some(it) => format!("{it} "), }; - ast_from_text(&format!( - "{}struct {}{}{}{}", - visibility, strukt_name, type_params, field_list, semicolon - )) + ast_from_text(&format!("{visibility}struct {strukt_name}{type_params}{field_list}{semicolon}",)) } #[track_caller] @@ -808,7 +808,8 @@ fn ast_from_text(text: &str) -> N { let node = match parse.tree().syntax().descendants().find_map(N::cast) { Some(it) => it, None => { - panic!("Failed to make ast node `{}` from text {}", std::any::type_name::(), text) + let node = std::any::type_name::(); + panic!("Failed to make ast node `{node}` from text {text}") } }; let node = node.clone_subtree(); @@ -824,7 +825,7 @@ pub fn token(kind: SyntaxKind) -> SyntaxToken { .descendants_with_tokens() .filter_map(|it| it.into_token()) .find(|it| it.kind() == kind) - .unwrap_or_else(|| panic!("unhandled token: {:?}", kind)) + .unwrap_or_else(|| panic!("unhandled token: {kind:?}")) } pub mod tokens { @@ -863,7 +864,7 @@ pub mod tokens { pub fn literal(text: &str) -> SyntaxToken { assert_eq!(text.trim(), text); - let lit: ast::Literal = super::ast_from_text(&format!("fn f() {{ let _ = {}; }}", text)); + let lit: ast::Literal = super::ast_from_text(&format!("fn f() {{ let _ = {text}; }}")); lit.syntax().first_child_or_token().unwrap().into_token().unwrap() } From d39677c1ebca7a764172abe1999123d7100963d7 Mon Sep 17 00:00:00 2001 From: Dorian Scheidt Date: Thu, 18 Aug 2022 18:10:37 -0500 Subject: [PATCH 0153/1945] Rename static_method_target -> assoc_fn_target --- crates/ide-assists/src/handlers/generate_function.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/ide-assists/src/handlers/generate_function.rs b/crates/ide-assists/src/handlers/generate_function.rs index 49345cb983..f5d2b27cb2 100644 --- a/crates/ide-assists/src/handlers/generate_function.rs +++ b/crates/ide-assists/src/handlers/generate_function.rs @@ -78,11 +78,11 @@ fn gen_fn(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { } } - static_method_target(ctx, &call, adt, &mut target_module, fn_name, &mut adt_name)? + assoc_fn_target(ctx, &call, adt, &mut target_module, fn_name, &mut adt_name)? } Some(hir::PathResolution::SelfType(impl_)) => { let adt = impl_.self_ty(ctx.db()).as_adt()?; - static_method_target(ctx, &call, adt, &mut target_module, fn_name, &mut adt_name)? + assoc_fn_target(ctx, &call, adt, &mut target_module, fn_name, &mut adt_name)? } _ => { return None; @@ -394,7 +394,7 @@ fn get_method_target( Some((target.clone(), get_insert_offset(&target))) } -fn static_method_target( +fn assoc_fn_target( ctx: &AssistContext<'_>, call: &CallExpr, adt: hir::Adt, From 61308607893b1a3c2a9de19d2f80c811d7ad643b Mon Sep 17 00:00:00 2001 From: Dorian Scheidt Date: Thu, 18 Aug 2022 18:12:53 -0500 Subject: [PATCH 0154/1945] use fn_name instead of name_ref.text() --- crates/ide-assists/src/handlers/generate_function.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/ide-assists/src/handlers/generate_function.rs b/crates/ide-assists/src/handlers/generate_function.rs index f5d2b27cb2..20855adf3c 100644 --- a/crates/ide-assists/src/handlers/generate_function.rs +++ b/crates/ide-assists/src/handlers/generate_function.rs @@ -73,7 +73,7 @@ fn gen_fn(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { Some(hir::PathResolution::Def(hir::ModuleDef::Adt(adt))) => { if let hir::Adt::Enum(_) = adt { // Don't suggest generating function if the name starts with an uppercase letter - if name_ref.text().starts_with(char::is_uppercase) { + if fn_name.starts_with(char::is_uppercase) { return None; } } From 30eabeb95d047b91268a70c5170b6374c6bab622 Mon Sep 17 00:00:00 2001 From: Dorian Scheidt Date: Thu, 18 Aug 2022 18:14:51 -0500 Subject: [PATCH 0155/1945] extract fn_target_info --- .../src/handlers/generate_function.rs | 42 ++++++++++++------- 1 file changed, 26 insertions(+), 16 deletions(-) diff --git a/crates/ide-assists/src/handlers/generate_function.rs b/crates/ide-assists/src/handlers/generate_function.rs index 20855adf3c..ba8bfe0c19 100644 --- a/crates/ide-assists/src/handlers/generate_function.rs +++ b/crates/ide-assists/src/handlers/generate_function.rs @@ -61,9 +61,31 @@ fn gen_fn(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { } let fn_name = &*name_ref.text(); + let (target_module, adt_name, target, file, insert_offset) = + fn_target_info(path, ctx, &call, fn_name)?; + let function_builder = FunctionBuilder::from_call(ctx, &call, fn_name, target_module, target)?; + let text_range = call.syntax().text_range(); + let label = format!("Generate {} function", function_builder.fn_name); + add_func_to_accumulator( + acc, + ctx, + text_range, + function_builder, + insert_offset, + file, + adt_name, + label, + ) +} + +fn fn_target_info( + path: ast::Path, + ctx: &AssistContext<'_>, + call: &CallExpr, + fn_name: &str, +) -> Option<(Option, Option, GeneratedFunctionTarget, FileId, TextSize)> { let mut target_module = None; let mut adt_name = None; - let (target, file, insert_offset) = match path.qualifier() { Some(qualifier) => match ctx.sema.resolve_path(&qualifier) { Some(hir::PathResolution::Def(hir::ModuleDef::Module(module))) => { @@ -78,11 +100,11 @@ fn gen_fn(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { } } - assoc_fn_target(ctx, &call, adt, &mut target_module, fn_name, &mut adt_name)? + assoc_fn_target(ctx, call, adt, &mut target_module, fn_name, &mut adt_name)? } Some(hir::PathResolution::SelfType(impl_)) => { let adt = impl_.self_ty(ctx.db()).as_adt()?; - assoc_fn_target(ctx, &call, adt, &mut target_module, fn_name, &mut adt_name)? + assoc_fn_target(ctx, call, adt, &mut target_module, fn_name, &mut adt_name)? } _ => { return None; @@ -93,19 +115,7 @@ fn gen_fn(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { get_fn_target(ctx, &target_module, call.clone())? } }; - let function_builder = FunctionBuilder::from_call(ctx, &call, fn_name, target_module, target)?; - let text_range = call.syntax().text_range(); - let label = format!("Generate {} function", function_builder.fn_name); - add_func_to_accumulator( - acc, - ctx, - text_range, - function_builder, - insert_offset, - file, - adt_name, - label, - ) + Some((target_module, adt_name, target, file, insert_offset)) } fn gen_method(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { From e513d7b5049092c3797cc30e13d658efb11ad17a Mon Sep 17 00:00:00 2001 From: Dorian Scheidt Date: Thu, 18 Aug 2022 18:20:06 -0500 Subject: [PATCH 0156/1945] Replace tuple with TargetInfo struct --- .../ide-assists/src/handlers/generate_function.rs | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/crates/ide-assists/src/handlers/generate_function.rs b/crates/ide-assists/src/handlers/generate_function.rs index ba8bfe0c19..dee5fea233 100644 --- a/crates/ide-assists/src/handlers/generate_function.rs +++ b/crates/ide-assists/src/handlers/generate_function.rs @@ -61,7 +61,7 @@ fn gen_fn(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { } let fn_name = &*name_ref.text(); - let (target_module, adt_name, target, file, insert_offset) = + let TargetInfo { target_module, adt_name, target, file, insert_offset } = fn_target_info(path, ctx, &call, fn_name)?; let function_builder = FunctionBuilder::from_call(ctx, &call, fn_name, target_module, target)?; let text_range = call.syntax().text_range(); @@ -78,12 +78,20 @@ fn gen_fn(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { ) } +struct TargetInfo { + target_module: Option, + adt_name: Option, + target: GeneratedFunctionTarget, + file: FileId, + insert_offset: TextSize, +} + fn fn_target_info( path: ast::Path, ctx: &AssistContext<'_>, call: &CallExpr, fn_name: &str, -) -> Option<(Option, Option, GeneratedFunctionTarget, FileId, TextSize)> { +) -> Option { let mut target_module = None; let mut adt_name = None; let (target, file, insert_offset) = match path.qualifier() { @@ -115,7 +123,7 @@ fn fn_target_info( get_fn_target(ctx, &target_module, call.clone())? } }; - Some((target_module, adt_name, target, file, insert_offset)) + Some(TargetInfo { target_module, adt_name, target, file, insert_offset }) } fn gen_method(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { From e27af5fae3e44ebd2339c3dd3fcaa1e9b59de598 Mon Sep 17 00:00:00 2001 From: Dorian Scheidt Date: Thu, 18 Aug 2022 18:21:58 -0500 Subject: [PATCH 0157/1945] Reorder args with flip_comma --- crates/ide-assists/src/handlers/generate_function.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/ide-assists/src/handlers/generate_function.rs b/crates/ide-assists/src/handlers/generate_function.rs index dee5fea233..2283805a30 100644 --- a/crates/ide-assists/src/handlers/generate_function.rs +++ b/crates/ide-assists/src/handlers/generate_function.rs @@ -62,7 +62,7 @@ fn gen_fn(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { let fn_name = &*name_ref.text(); let TargetInfo { target_module, adt_name, target, file, insert_offset } = - fn_target_info(path, ctx, &call, fn_name)?; + fn_target_info(ctx, path, &call, fn_name)?; let function_builder = FunctionBuilder::from_call(ctx, &call, fn_name, target_module, target)?; let text_range = call.syntax().text_range(); let label = format!("Generate {} function", function_builder.fn_name); @@ -87,8 +87,8 @@ struct TargetInfo { } fn fn_target_info( - path: ast::Path, ctx: &AssistContext<'_>, + path: ast::Path, call: &CallExpr, fn_name: &str, ) -> Option { From 2086c48cff23aff651c18636e8227993a85cd6ac Mon Sep 17 00:00:00 2001 From: Dorian Scheidt Date: Thu, 18 Aug 2022 18:26:54 -0500 Subject: [PATCH 0158/1945] Remove mut out params via assoc_fn_target_info --- .../src/handlers/generate_function.rs | 20 +++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/crates/ide-assists/src/handlers/generate_function.rs b/crates/ide-assists/src/handlers/generate_function.rs index 2283805a30..adb5ec2b99 100644 --- a/crates/ide-assists/src/handlers/generate_function.rs +++ b/crates/ide-assists/src/handlers/generate_function.rs @@ -92,8 +92,8 @@ fn fn_target_info( call: &CallExpr, fn_name: &str, ) -> Option { - let mut target_module = None; - let mut adt_name = None; + let target_module; + let adt_name = None; let (target, file, insert_offset) = match path.qualifier() { Some(qualifier) => match ctx.sema.resolve_path(&qualifier) { Some(hir::PathResolution::Def(hir::ModuleDef::Module(module))) => { @@ -108,11 +108,11 @@ fn fn_target_info( } } - assoc_fn_target(ctx, call, adt, &mut target_module, fn_name, &mut adt_name)? + return assoc_fn_target_info(ctx, call, adt, fn_name); } Some(hir::PathResolution::SelfType(impl_)) => { let adt = impl_.self_ty(ctx.db()).as_adt()?; - assoc_fn_target(ctx, call, adt, &mut target_module, fn_name, &mut adt_name)? + return assoc_fn_target_info(ctx, call, adt, fn_name); } _ => { return None; @@ -412,6 +412,18 @@ fn get_method_target( Some((target.clone(), get_insert_offset(&target))) } +fn assoc_fn_target_info( + ctx: &AssistContext<'_>, + call: &CallExpr, + adt: hir::Adt, + fn_name: &str, +) -> Option { + let mut target_module = None; + let mut adt_name = None; + let (target, file, insert_offset) = + assoc_fn_target(ctx, call, adt, &mut target_module, fn_name, &mut adt_name)?; + Some(TargetInfo { target_module, adt_name, target, file, insert_offset }) +} fn assoc_fn_target( ctx: &AssistContext<'_>, call: &CallExpr, From 96c04c5e1c54ceef0b16679ecb71f64548150826 Mon Sep 17 00:00:00 2001 From: Dorian Scheidt Date: Thu, 18 Aug 2022 18:28:14 -0500 Subject: [PATCH 0159/1945] inline assoc_fn_target --- .../src/handlers/generate_function.rs | 35 ++++++++----------- 1 file changed, 14 insertions(+), 21 deletions(-) diff --git a/crates/ide-assists/src/handlers/generate_function.rs b/crates/ide-assists/src/handlers/generate_function.rs index adb5ec2b99..6c045a3333 100644 --- a/crates/ide-assists/src/handlers/generate_function.rs +++ b/crates/ide-assists/src/handlers/generate_function.rs @@ -420,29 +420,22 @@ fn assoc_fn_target_info( ) -> Option { let mut target_module = None; let mut adt_name = None; - let (target, file, insert_offset) = - assoc_fn_target(ctx, call, adt, &mut target_module, fn_name, &mut adt_name)?; + let (target, file, insert_offset) = { + let target_module: &mut Option = &mut target_module; + let adt_name: &mut Option = &mut adt_name; + let current_module = ctx.sema.scope(call.syntax())?.module(); + let module = adt.module(ctx.sema.db); + *target_module = if current_module == module { None } else { Some(module) }; + if current_module.krate() != module.krate() { + return None; + } + let (impl_, file) = get_adt_source(ctx, &adt, fn_name)?; + let (target, insert_offset) = get_method_target(ctx, &module, &impl_)?; + *adt_name = if impl_.is_none() { Some(adt.name(ctx.sema.db)) } else { None }; + Some((target, file, insert_offset)) + }?; Some(TargetInfo { target_module, adt_name, target, file, insert_offset }) } -fn assoc_fn_target( - ctx: &AssistContext<'_>, - call: &CallExpr, - adt: hir::Adt, - target_module: &mut Option, - fn_name: &str, - adt_name: &mut Option, -) -> Option<(GeneratedFunctionTarget, FileId, TextSize)> { - let current_module = ctx.sema.scope(call.syntax())?.module(); - let module = adt.module(ctx.sema.db); - *target_module = if current_module == module { None } else { Some(module) }; - if current_module.krate() != module.krate() { - return None; - } - let (impl_, file) = get_adt_source(ctx, &adt, fn_name)?; - let (target, insert_offset) = get_method_target(ctx, &module, &impl_)?; - *adt_name = if impl_.is_none() { Some(adt.name(ctx.sema.db)) } else { None }; - Some((target, file, insert_offset)) -} fn get_insert_offset(target: &GeneratedFunctionTarget) -> TextSize { match &target { From 2e4a4f1a9c1cba63ec44562f0ab0d5b5783ad561 Mon Sep 17 00:00:00 2001 From: Dorian Scheidt Date: Thu, 18 Aug 2022 18:28:43 -0500 Subject: [PATCH 0160/1945] Cleanup inline --- .../src/handlers/generate_function.rs | 25 +++++++------------ 1 file changed, 9 insertions(+), 16 deletions(-) diff --git a/crates/ide-assists/src/handlers/generate_function.rs b/crates/ide-assists/src/handlers/generate_function.rs index 6c045a3333..cf28f49e03 100644 --- a/crates/ide-assists/src/handlers/generate_function.rs +++ b/crates/ide-assists/src/handlers/generate_function.rs @@ -418,22 +418,15 @@ fn assoc_fn_target_info( adt: hir::Adt, fn_name: &str, ) -> Option { - let mut target_module = None; - let mut adt_name = None; - let (target, file, insert_offset) = { - let target_module: &mut Option = &mut target_module; - let adt_name: &mut Option = &mut adt_name; - let current_module = ctx.sema.scope(call.syntax())?.module(); - let module = adt.module(ctx.sema.db); - *target_module = if current_module == module { None } else { Some(module) }; - if current_module.krate() != module.krate() { - return None; - } - let (impl_, file) = get_adt_source(ctx, &adt, fn_name)?; - let (target, insert_offset) = get_method_target(ctx, &module, &impl_)?; - *adt_name = if impl_.is_none() { Some(adt.name(ctx.sema.db)) } else { None }; - Some((target, file, insert_offset)) - }?; + let current_module = ctx.sema.scope(call.syntax())?.module(); + let module = adt.module(ctx.sema.db); + let target_module = if current_module == module { None } else { Some(module) }; + if current_module.krate() != module.krate() { + return None; + } + let (impl_, file) = get_adt_source(ctx, &adt, fn_name)?; + let (target, insert_offset) = get_method_target(ctx, &module, &impl_)?; + let adt_name = if impl_.is_none() { Some(adt.name(ctx.sema.db)) } else { None }; Some(TargetInfo { target_module, adt_name, target, file, insert_offset }) } From d8e7419c647ff1cd24263fb8e6fbfd609d17d05b Mon Sep 17 00:00:00 2001 From: Dorian Scheidt Date: Thu, 18 Aug 2022 18:32:35 -0500 Subject: [PATCH 0161/1945] Generate and use TargetInfo::new --- .../src/handlers/generate_function.rs | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/crates/ide-assists/src/handlers/generate_function.rs b/crates/ide-assists/src/handlers/generate_function.rs index cf28f49e03..892b166a2c 100644 --- a/crates/ide-assists/src/handlers/generate_function.rs +++ b/crates/ide-assists/src/handlers/generate_function.rs @@ -86,6 +86,18 @@ struct TargetInfo { insert_offset: TextSize, } +impl TargetInfo { + fn new( + target_module: Option, + adt_name: Option, + target: GeneratedFunctionTarget, + file: FileId, + insert_offset: TextSize, + ) -> Self { + Self { target_module, adt_name, target, file, insert_offset } + } +} + fn fn_target_info( ctx: &AssistContext<'_>, path: ast::Path, @@ -123,7 +135,7 @@ fn fn_target_info( get_fn_target(ctx, &target_module, call.clone())? } }; - Some(TargetInfo { target_module, adt_name, target, file, insert_offset }) + Some(TargetInfo::new(target_module, adt_name, target, file, insert_offset)) } fn gen_method(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { @@ -427,7 +439,7 @@ fn assoc_fn_target_info( let (impl_, file) = get_adt_source(ctx, &adt, fn_name)?; let (target, insert_offset) = get_method_target(ctx, &module, &impl_)?; let adt_name = if impl_.is_none() { Some(adt.name(ctx.sema.db)) } else { None }; - Some(TargetInfo { target_module, adt_name, target, file, insert_offset }) + Some(TargetInfo::new(target_module, adt_name, target, file, insert_offset)) } fn get_insert_offset(target: &GeneratedFunctionTarget) -> TextSize { From 48ea3825b8951a39e27503ea16cacc9147255d79 Mon Sep 17 00:00:00 2001 From: Dorian Scheidt Date: Thu, 18 Aug 2022 18:39:42 -0500 Subject: [PATCH 0162/1945] Introduce and use get_fn_target_info --- .../src/handlers/generate_function.rs | 32 +++++++++---------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/crates/ide-assists/src/handlers/generate_function.rs b/crates/ide-assists/src/handlers/generate_function.rs index 892b166a2c..e26c76da18 100644 --- a/crates/ide-assists/src/handlers/generate_function.rs +++ b/crates/ide-assists/src/handlers/generate_function.rs @@ -104,13 +104,10 @@ fn fn_target_info( call: &CallExpr, fn_name: &str, ) -> Option { - let target_module; - let adt_name = None; - let (target, file, insert_offset) = match path.qualifier() { + match path.qualifier() { Some(qualifier) => match ctx.sema.resolve_path(&qualifier) { Some(hir::PathResolution::Def(hir::ModuleDef::Module(module))) => { - target_module = Some(module); - get_fn_target(ctx, &target_module, call.clone())? + get_fn_target_info(ctx, &Some(module), call.clone()) } Some(hir::PathResolution::Def(hir::ModuleDef::Adt(adt))) => { if let hir::Adt::Enum(_) = adt { @@ -120,22 +117,16 @@ fn fn_target_info( } } - return assoc_fn_target_info(ctx, call, adt, fn_name); + assoc_fn_target_info(ctx, call, adt, fn_name) } Some(hir::PathResolution::SelfType(impl_)) => { let adt = impl_.self_ty(ctx.db()).as_adt()?; - return assoc_fn_target_info(ctx, call, adt, fn_name); - } - _ => { - return None; + assoc_fn_target_info(ctx, call, adt, fn_name) } + _ => None, }, - _ => { - target_module = None; - get_fn_target(ctx, &target_module, call.clone())? - } - }; - Some(TargetInfo::new(target_module, adt_name, target, file, insert_offset)) + _ => get_fn_target_info(ctx, &None, call.clone()), + } } fn gen_method(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { @@ -391,6 +382,15 @@ fn make_return_type( (ret_type, should_focus_return_type) } +fn get_fn_target_info( + ctx: &AssistContext<'_>, + target_module: &Option, + call: CallExpr, +) -> Option { + let (target, file, insert_offset) = get_fn_target(ctx, target_module, call)?; + Some(TargetInfo::new(*target_module, None, target, file, insert_offset)) +} + fn get_fn_target( ctx: &AssistContext<'_>, target_module: &Option, From b6fe46055bd9b16fd74df43cbbcb89e612a82b4d Mon Sep 17 00:00:00 2001 From: ice1000 Date: Fri, 19 Aug 2022 01:06:00 +0000 Subject: [PATCH 0163/1945] feat: Improved inline_call to replace `Self` --- .../ide-assists/src/handlers/inline_call.rs | 38 ++++++++++++++++++- 1 file changed, 37 insertions(+), 1 deletion(-) diff --git a/crates/ide-assists/src/handlers/inline_call.rs b/crates/ide-assists/src/handlers/inline_call.rs index 80d3b92559..c78c5eaa9f 100644 --- a/crates/ide-assists/src/handlers/inline_call.rs +++ b/crates/ide-assists/src/handlers/inline_call.rs @@ -13,7 +13,7 @@ use ide_db::{ use itertools::{izip, Itertools}; use syntax::{ ast::{self, edit_in_place::Indent, HasArgList, PathExpr}, - ted, AstNode, + ted, AstNode, SyntaxKind, }; use crate::{ @@ -311,6 +311,16 @@ fn inline( } else { fn_body.clone_for_update() }; + // TODO: use if-let chains - https://github.com/rust-lang/rust/pull/94927 + if let Some(i) = body.syntax().ancestors().find_map(ast::Impl::cast) { + if let Some(st) = i.self_ty() { + for tok in body.syntax().descendants_with_tokens().filter_map(|t| t.into_token()) { + if tok.kind() == SyntaxKind::SELF_TYPE_KW { + ted::replace(tok, st.syntax()); + } + } + } + } let usages_for_locals = |local| { Definition::Local(local) .usages(sema) @@ -345,6 +355,7 @@ fn inline( } }) .collect(); + if function.self_param(sema.db).is_some() { let this = || make::name_ref("this").syntax().clone_for_update(); if let Some(self_local) = params[0].2.as_local(sema.db) { @@ -1188,6 +1199,31 @@ fn bar() -> u32 { x } } +"#, + ) + } + + #[test] + fn inline_call_with_self_type() { + check_assist( + inline_call, + r#" +struct A(u32); +impl A { + fn f() -> Self { Self(114514) } +} +fn main() { + A::f$0(); +} +"#, + r#" +struct A(u32); +impl A { + fn f() -> Self { Self(114514) } +} +fn main() { + A(114514); +} "#, ) } From 5a6c51ebb8549467abeeac69bc2e12494cde5b29 Mon Sep 17 00:00:00 2001 From: ice1000 Date: Fri, 19 Aug 2022 01:08:59 +0000 Subject: [PATCH 0164/1945] fix: use functional programming --- crates/ide-assists/src/handlers/inline_call.rs | 17 +++++++---------- 1 file changed, 7 insertions(+), 10 deletions(-) diff --git a/crates/ide-assists/src/handlers/inline_call.rs b/crates/ide-assists/src/handlers/inline_call.rs index c78c5eaa9f..b5d092e39b 100644 --- a/crates/ide-assists/src/handlers/inline_call.rs +++ b/crates/ide-assists/src/handlers/inline_call.rs @@ -13,7 +13,7 @@ use ide_db::{ use itertools::{izip, Itertools}; use syntax::{ ast::{self, edit_in_place::Indent, HasArgList, PathExpr}, - ted, AstNode, SyntaxKind, + ted, AstNode, NodeOrToken, SyntaxKind, }; use crate::{ @@ -311,15 +311,12 @@ fn inline( } else { fn_body.clone_for_update() }; - // TODO: use if-let chains - https://github.com/rust-lang/rust/pull/94927 - if let Some(i) = body.syntax().ancestors().find_map(ast::Impl::cast) { - if let Some(st) = i.self_ty() { - for tok in body.syntax().descendants_with_tokens().filter_map(|t| t.into_token()) { - if tok.kind() == SyntaxKind::SELF_TYPE_KW { - ted::replace(tok, st.syntax()); - } - } - } + if let Some(t) = body.syntax().ancestors().find_map(ast::Impl::cast).and_then(|i| i.self_ty()) { + body.syntax() + .descendants_with_tokens() + .filter_map(NodeOrToken::into_token) + .filter(|tok| tok.kind() == SyntaxKind::SELF_TYPE_KW) + .for_each(|tok| ted::replace(tok, t.syntax())); } let usages_for_locals = |local| { Definition::Local(local) From 45b7b6a60a7404d093f4afe81a0da10824cef7b8 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 19 Aug 2022 08:52:31 +0200 Subject: [PATCH 0165/1945] Implement lsp extension for cancelling running flychecks --- crates/flycheck/src/lib.rs | 26 ++++++++++++++++++----- crates/rust-analyzer/src/handlers.rs | 6 ++++++ crates/rust-analyzer/src/lsp_ext.rs | 8 +++++++ crates/rust-analyzer/src/main_loop.rs | 7 ++++--- docs/dev/lsp-extensions.md | 2 +- editors/code/package.json | 5 +++++ editors/code/src/commands.ts | 6 ++++++ editors/code/src/lsp_ext.ts | 30 +++++++++++++++------------ editors/code/src/main.ts | 1 + 9 files changed, 69 insertions(+), 22 deletions(-) diff --git a/crates/flycheck/src/lib.rs b/crates/flycheck/src/lib.rs index 2bebea2fbf..c22945c81f 100644 --- a/crates/flycheck/src/lib.rs +++ b/crates/flycheck/src/lib.rs @@ -77,8 +77,13 @@ impl FlycheckHandle { } /// Schedule a re-start of the cargo check worker. - pub fn update(&self) { - self.sender.send(Restart).unwrap(); + pub fn restart(&self) { + self.sender.send(Restart::Yes).unwrap(); + } + + /// Stop this cargo check worker. + pub fn cancel(&self) { + self.sender.send(Restart::No).unwrap(); } pub fn id(&self) -> usize { @@ -122,7 +127,10 @@ pub enum Progress { DidCancel, } -struct Restart; +enum Restart { + Yes, + No, +} struct FlycheckActor { id: usize, @@ -149,6 +157,7 @@ impl FlycheckActor { config: FlycheckConfig, workspace_root: AbsPathBuf, ) -> FlycheckActor { + tracing::info!(%id, ?workspace_root, "Spawning flycheck"); FlycheckActor { id, sender, config, workspace_root, cargo_handle: None } } fn progress(&self, progress: Progress) { @@ -164,10 +173,13 @@ impl FlycheckActor { fn run(mut self, inbox: Receiver) { while let Some(event) = self.next_event(&inbox) { match event { - Event::Restart(Restart) => { + Event::Restart(Restart::No) => { + self.cancel_check_process(); + } + Event::Restart(Restart::Yes) => { // Cancel the previously spawned process self.cancel_check_process(); - while let Ok(Restart) = inbox.recv_timeout(Duration::from_millis(50)) {} + while let Ok(_) = inbox.recv_timeout(Duration::from_millis(50)) {} let command = self.check_command(); tracing::debug!(?command, "will restart flycheck"); @@ -223,6 +235,10 @@ impl FlycheckActor { fn cancel_check_process(&mut self) { if let Some(cargo_handle) = self.cargo_handle.take() { + tracing::debug!( + command = ?self.check_command(), + "did cancel flycheck" + ); cargo_handle.cancel(); self.progress(Progress::DidCancel); } diff --git a/crates/rust-analyzer/src/handlers.rs b/crates/rust-analyzer/src/handlers.rs index 6337d49c24..fdb3205b0a 100644 --- a/crates/rust-analyzer/src/handlers.rs +++ b/crates/rust-analyzer/src/handlers.rs @@ -51,6 +51,12 @@ pub(crate) fn handle_workspace_reload(state: &mut GlobalState, _: ()) -> Result< Ok(()) } +pub(crate) fn handle_cancel_flycheck(state: &mut GlobalState, _: ()) -> Result<()> { + let _p = profile::span("handle_stop_flycheck"); + state.flycheck.iter().for_each(|flycheck| flycheck.cancel()); + Ok(()) +} + pub(crate) fn handle_analyzer_status( snap: GlobalStateSnapshot, params: lsp_ext::AnalyzerStatusParams, diff --git a/crates/rust-analyzer/src/lsp_ext.rs b/crates/rust-analyzer/src/lsp_ext.rs index 5f0e108624..e61c8b643d 100644 --- a/crates/rust-analyzer/src/lsp_ext.rs +++ b/crates/rust-analyzer/src/lsp_ext.rs @@ -129,6 +129,14 @@ pub struct ExpandedMacro { pub expansion: String, } +pub enum CancelFlycheck {} + +impl Request for CancelFlycheck { + type Params = (); + type Result = (); + const METHOD: &'static str = "rust-analyzer/cancelFlycheck"; +} + pub enum MatchingBrace {} impl Request for MatchingBrace { diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs index 7741999824..f187547019 100644 --- a/crates/rust-analyzer/src/main_loop.rs +++ b/crates/rust-analyzer/src/main_loop.rs @@ -288,7 +288,7 @@ impl GlobalState { if became_quiescent { // Project has loaded properly, kick off initial flycheck - self.flycheck.iter().for_each(FlycheckHandle::update); + self.flycheck.iter().for_each(FlycheckHandle::restart); if self.config.prefill_caches() { self.prime_caches_queue.request_op("became quiescent".to_string()); } @@ -590,6 +590,7 @@ impl GlobalState { .on_sync_mut::(handlers::handle_workspace_reload) .on_sync_mut::(handlers::handle_memory_usage) .on_sync_mut::(handlers::handle_shuffle_crate_graph) + .on_sync_mut::(handlers::handle_cancel_flycheck) .on_sync::(handlers::handle_join_lines) .on_sync::(handlers::handle_on_enter) .on_sync::(handlers::handle_selection_range) @@ -779,7 +780,7 @@ impl GlobalState { for (id, _) in workspace_ids.clone() { if id == flycheck.id() { updated = true; - flycheck.update(); + flycheck.restart(); continue; } } @@ -798,7 +799,7 @@ impl GlobalState { // No specific flycheck was triggered, so let's trigger all of them. if !updated { for flycheck in &this.flycheck { - flycheck.update(); + flycheck.restart(); } } Ok(()) diff --git a/docs/dev/lsp-extensions.md b/docs/dev/lsp-extensions.md index 5040643d34..6d2c7d7b06 100644 --- a/docs/dev/lsp-extensions.md +++ b/docs/dev/lsp-extensions.md @@ -1,5 +1,5 @@ diff --git a/.github/ISSUE_TEMPLATE/critical_nightly_regression.md b/.github/ISSUE_TEMPLATE/critical_nightly_regression.md index a0b1627d7e..ad220ff65c 100644 --- a/.github/ISSUE_TEMPLATE/critical_nightly_regression.md +++ b/.github/ISSUE_TEMPLATE/critical_nightly_regression.md @@ -2,8 +2,8 @@ name: Critical Nightly Regression about: You are using nightly rust-analyzer and the latest version is unusable. title: '' -labels: '' -assignees: 'matklad' +labels: 'Broken Window' +assignees: '' --- @@ -14,4 +14,3 @@ Please try to provide information which will help us to fix the issue faster. Mi --> This is a serious regression in nightly and it's important to fix it before the next release. -@matklad, please take a look. From 1dcc25a70afdc484081c0fc5cda1f8911d6660b8 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sat, 5 Nov 2022 16:28:04 +0100 Subject: [PATCH 0536/1945] internal: Use a process group for flycheck --- Cargo.lock | 24 ++++++++++++++++++++++++ crates/flycheck/Cargo.toml | 1 + crates/flycheck/src/lib.rs | 19 +++++++++++-------- 3 files changed, 36 insertions(+), 8 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 8931c17bbd..c04906c453 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -221,6 +221,16 @@ dependencies = [ "tracing", ] +[[package]] +name = "command-group" +version = "1.0.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7a8a86f409b4a59df3a3e4bee2de0b83f1755fdd2a25e3a9684c396fc4bed2c" +dependencies = [ + "nix", + "winapi", +] + [[package]] name = "countme" version = "3.0.1" @@ -390,6 +400,7 @@ name = "flycheck" version = "0.0.0" dependencies = [ "cargo_metadata", + "command-group", "crossbeam-channel", "jod-thread", "paths", @@ -970,6 +981,19 @@ dependencies = [ "windows-sys 0.28.0", ] +[[package]] +name = "nix" +version = "0.22.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e4916f159ed8e5de0082076562152a76b7a1f64a01fd9d1e0fea002c37624faf" +dependencies = [ + "bitflags", + "cc", + "cfg-if", + "libc", + "memoffset", +] + [[package]] name = "notify" version = "5.0.0" diff --git a/crates/flycheck/Cargo.toml b/crates/flycheck/Cargo.toml index 2ad32d2483..6871f90015 100644 --- a/crates/flycheck/Cargo.toml +++ b/crates/flycheck/Cargo.toml @@ -17,6 +17,7 @@ rustc-hash = "1.1.0" serde = { version = "1.0.137", features = ["derive"] } serde_json = "1.0.86" jod-thread = "0.1.2" +command-group = "1.0.8" toolchain = { path = "../toolchain", version = "0.0.0" } stdx = { path = "../stdx", version = "0.0.0" } diff --git a/crates/flycheck/src/lib.rs b/crates/flycheck/src/lib.rs index 8a91d60666..1758c9c27a 100644 --- a/crates/flycheck/src/lib.rs +++ b/crates/flycheck/src/lib.rs @@ -10,11 +10,12 @@ use std::{ time::Duration, }; +use command_group::{CommandGroup, GroupChild}; use crossbeam_channel::{never, select, unbounded, Receiver, Sender}; use paths::AbsPathBuf; use rustc_hash::FxHashMap; use serde::Deserialize; -use stdx::{process::streaming_output, JodChild}; +use stdx::process::streaming_output; pub use cargo_metadata::diagnostic::{ Applicability, Diagnostic, DiagnosticCode, DiagnosticLevel, DiagnosticSpan, @@ -359,6 +360,8 @@ impl FlycheckActor { } } +struct JodChild(GroupChild); + /// A handle to a cargo process used for fly-checking. struct CargoHandle { /// The handle to the actual cargo process. As we cannot cancel directly from with @@ -371,10 +374,10 @@ struct CargoHandle { impl CargoHandle { fn spawn(mut command: Command) -> std::io::Result { command.stdout(Stdio::piped()).stderr(Stdio::piped()).stdin(Stdio::null()); - let mut child = JodChild::spawn(command)?; + let mut child = command.group_spawn().map(JodChild)?; - let stdout = child.stdout.take().unwrap(); - let stderr = child.stderr.take().unwrap(); + let stdout = child.0.inner().stdout.take().unwrap(); + let stderr = child.0.inner().stderr.take().unwrap(); let (sender, receiver) = unbounded(); let actor = CargoActor::new(sender, stdout, stderr); @@ -386,13 +389,13 @@ impl CargoHandle { } fn cancel(mut self) { - let _ = self.child.kill(); - let _ = self.child.wait(); + let _ = self.child.0.kill(); + let _ = self.child.0.wait(); } fn join(mut self) -> io::Result<()> { - let _ = self.child.kill(); - let exit_status = self.child.wait()?; + let _ = self.child.0.kill(); + let exit_status = self.child.0.wait()?; let (read_at_least_one_message, error) = self.thread.join()?; if read_at_least_one_message || exit_status.success() { Ok(()) From 935eb3f63424d62ae9d76502aeb7074ce6713000 Mon Sep 17 00:00:00 2001 From: Rongjian Zhang Date: Sun, 6 Nov 2022 01:33:57 +0800 Subject: [PATCH 0537/1945] docs: fix adoc links --- docs/user/manual.adoc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/user/manual.adoc b/docs/user/manual.adoc index 62e1c5b41b..49500e390a 100644 --- a/docs/user/manual.adoc +++ b/docs/user/manual.adoc @@ -489,9 +489,9 @@ You can follow instructions for installing < Date: Mon, 7 Nov 2022 12:53:33 +0200 Subject: [PATCH 0538/1945] Fix typos --- crates/flycheck/src/lib.rs | 2 +- crates/hir-expand/src/lib.rs | 4 ++-- crates/rust-analyzer/src/config.rs | 2 +- crates/rust-analyzer/src/line_index.rs | 2 +- crates/syntax/src/tests/sourcegen_ast.rs | 2 +- docs/user/generated_config.adoc | 2 +- editors/code/package.json | 2 +- 7 files changed, 8 insertions(+), 8 deletions(-) diff --git a/crates/flycheck/src/lib.rs b/crates/flycheck/src/lib.rs index 1758c9c27a..ff507a52d5 100644 --- a/crates/flycheck/src/lib.rs +++ b/crates/flycheck/src/lib.rs @@ -365,7 +365,7 @@ struct JodChild(GroupChild); /// A handle to a cargo process used for fly-checking. struct CargoHandle { /// The handle to the actual cargo process. As we cannot cancel directly from with - /// a read syscall dropping and therefor terminating the process is our best option. + /// a read syscall dropping and therefore terminating the process is our best option. child: JodChild, thread: jod_thread::JoinHandle>, receiver: Receiver, diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs index a5b499fe8d..7352b003a4 100644 --- a/crates/hir-expand/src/lib.rs +++ b/crates/hir-expand/src/lib.rs @@ -814,7 +814,7 @@ impl<'a> InFile<&'a SyntaxNode> { pub fn original_syntax_node(self, db: &dyn db::AstDatabase) -> Option> { // This kind of upmapping can only be achieved in attribute expanded files, - // as we don't have node inputs otherwise and therefor can't find an `N` node in the input + // as we don't have node inputs otherwise and therefore can't find an `N` node in the input if !self.file_id.is_macro() { return Some(self.map(Clone::clone)); } else if !self.file_id.is_attr_macro(db) { @@ -926,7 +926,7 @@ impl InFile { pub fn original_ast_node(self, db: &dyn db::AstDatabase) -> Option> { // This kind of upmapping can only be achieved in attribute expanded files, - // as we don't have node inputs otherwise and therefor can't find an `N` node in the input + // as we don't have node inputs otherwise and therefore can't find an `N` node in the input if !self.file_id.is_macro() { return Some(self); } else if !self.file_id.is_attr_macro(db) { diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index 4072ae585d..c278ba2d7c 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs @@ -157,7 +157,7 @@ config_data! { checkOnSave_noDefaultFeatures: Option = "null", /// Override the command rust-analyzer uses instead of `cargo check` for /// diagnostics on save. The command is required to output json and - /// should therefor include `--message-format=json` or a similar option. + /// should therefore include `--message-format=json` or a similar option. /// /// If you're changing this because you're using some tool wrapping /// Cargo, you might also want to change diff --git a/crates/rust-analyzer/src/line_index.rs b/crates/rust-analyzer/src/line_index.rs index 7636c3da7f..2945dba12f 100644 --- a/crates/rust-analyzer/src/line_index.rs +++ b/crates/rust-analyzer/src/line_index.rs @@ -42,7 +42,7 @@ impl LineEndings { loop { let idx = match find_crlf(&tail[gap_len..]) { None if crlf_seen => tail.len(), - // SAFETY: buf is unchanged and therefor still contains utf8 data + // SAFETY: buf is unchanged and therefore still contains utf8 data None => return (unsafe { String::from_utf8_unchecked(buf) }, LineEndings::Unix), Some(idx) => { crlf_seen = true; diff --git a/crates/syntax/src/tests/sourcegen_ast.rs b/crates/syntax/src/tests/sourcegen_ast.rs index 70b54843db..712ef5f63b 100644 --- a/crates/syntax/src/tests/sourcegen_ast.rs +++ b/crates/syntax/src/tests/sourcegen_ast.rs @@ -86,7 +86,7 @@ fn generate_nodes(kinds: KindsSrc<'_>, grammar: &AstSrc) -> String { .traits .iter() .filter(|trait_name| { - // Loops have two expressions so this might collide, therefor manual impl it + // Loops have two expressions so this might collide, therefore manual impl it node.name != "ForExpr" && node.name != "WhileExpr" || trait_name.as_str() != "HasLoopBody" }) diff --git a/docs/user/generated_config.adoc b/docs/user/generated_config.adoc index 36794efe42..f171eb41bf 100644 --- a/docs/user/generated_config.adoc +++ b/docs/user/generated_config.adoc @@ -173,7 +173,7 @@ Whether to pass `--no-default-features` to Cargo. Defaults to -- Override the command rust-analyzer uses instead of `cargo check` for diagnostics on save. The command is required to output json and -should therefor include `--message-format=json` or a similar option. +should therefore include `--message-format=json` or a similar option. If you're changing this because you're using some tool wrapping Cargo, you might also want to change diff --git a/editors/code/package.json b/editors/code/package.json index 1a97a9c089..4357dc7306 100644 --- a/editors/code/package.json +++ b/editors/code/package.json @@ -623,7 +623,7 @@ ] }, "rust-analyzer.checkOnSave.overrideCommand": { - "markdownDescription": "Override the command rust-analyzer uses instead of `cargo check` for\ndiagnostics on save. The command is required to output json and\nshould therefor include `--message-format=json` or a similar option.\n\nIf you're changing this because you're using some tool wrapping\nCargo, you might also want to change\n`#rust-analyzer.cargo.buildScripts.overrideCommand#`.\n\nIf there are multiple linked projects, this command is invoked for\neach of them, with the working directory being the project root\n(i.e., the folder containing the `Cargo.toml`).\n\nAn example command would be:\n\n```bash\ncargo check --workspace --message-format=json --all-targets\n```\n.", + "markdownDescription": "Override the command rust-analyzer uses instead of `cargo check` for\ndiagnostics on save. The command is required to output json and\nshould therefore include `--message-format=json` or a similar option.\n\nIf you're changing this because you're using some tool wrapping\nCargo, you might also want to change\n`#rust-analyzer.cargo.buildScripts.overrideCommand#`.\n\nIf there are multiple linked projects, this command is invoked for\neach of them, with the working directory being the project root\n(i.e., the folder containing the `Cargo.toml`).\n\nAn example command would be:\n\n```bash\ncargo check --workspace --message-format=json --all-targets\n```\n.", "default": null, "type": [ "null", From f24fbc20274962860e15e1160bfdaade543092bf Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 7 Nov 2022 11:58:57 +0100 Subject: [PATCH 0539/1945] rustfmt --- crates/ide-assists/src/handlers/extract_function.rs | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/crates/ide-assists/src/handlers/extract_function.rs b/crates/ide-assists/src/handlers/extract_function.rs index c40fb291a3..f24a6aacc9 100644 --- a/crates/ide-assists/src/handlers/extract_function.rs +++ b/crates/ide-assists/src/handlers/extract_function.rs @@ -127,10 +127,8 @@ pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op builder.replace(target_range, make_call(ctx, &fun, old_indent)); - let has_impl_wrapper = insert_after - .ancestors() - .find(|a| a.kind() == SyntaxKind::IMPL && a != &insert_after) - .is_some(); + let has_impl_wrapper = + insert_after.ancestors().any(|a| a.kind() == SyntaxKind::IMPL && a != insert_after); let fn_def = match fun.self_param_adt(ctx) { Some(adt) if anchor == Anchor::Method && !has_impl_wrapper => { @@ -1250,8 +1248,7 @@ fn node_to_insert_after(body: &FunctionBody, anchor: Anchor) -> Option { if body.extracted_from_trait_impl() && matches!(anchor, Anchor::Method) { let impl_node = find_non_trait_impl(&next_ancestor); - let target_node = impl_node.as_ref().and_then(last_impl_member); - if target_node.is_some() { + if let target_node @ Some(_) = impl_node.as_ref().and_then(last_impl_member) { return target_node; } } @@ -1281,7 +1278,8 @@ fn find_non_trait_impl(trait_impl: &SyntaxNode) -> Option { let impl_type = Some(impl_type_name(&as_impl)?); let sibblings = trait_impl.parent()?.children(); - sibblings.filter_map(ast::Impl::cast) + sibblings + .filter_map(ast::Impl::cast) .find(|s| impl_type_name(s) == impl_type && !is_trait_impl(s)) } From 8ad4a1d1187326e66a0de73133ea7197992c3837 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 7 Nov 2022 11:45:52 +0100 Subject: [PATCH 0540/1945] Update sysroot crates --- crates/project-model/src/sysroot.rs | 27 ++++++---- crates/project-model/src/tests.rs | 79 +++++++---------------------- 2 files changed, 35 insertions(+), 71 deletions(-) diff --git a/crates/project-model/src/sysroot.rs b/crates/project-model/src/sysroot.rs index fa8d76f3f4..f6c09a27c9 100644 --- a/crates/project-model/src/sysroot.rs +++ b/crates/project-model/src/sysroot.rs @@ -128,14 +128,18 @@ impl Sysroot { } if let Some(alloc) = sysroot.by_name("alloc") { - if let Some(core) = sysroot.by_name("core") { - sysroot.crates[alloc].deps.push(core); + for dep in ALLOC_DEPS.trim().lines() { + if let Some(dep) = sysroot.by_name(dep) { + sysroot.crates[alloc].deps.push(dep) + } } } if let Some(proc_macro) = sysroot.by_name("proc_macro") { - if let Some(std) = sysroot.by_name("std") { - sysroot.crates[proc_macro].deps.push(std); + for dep in PROC_MACRO_DEPS.trim().lines() { + if let Some(dep) = sysroot.by_name(dep) { + sysroot.crates[proc_macro].deps.push(dep) + } } } @@ -239,6 +243,7 @@ fn get_rust_src(sysroot_path: &AbsPath) -> Option { const SYSROOT_CRATES: &str = " alloc +backtrace core panic_abort panic_unwind @@ -246,17 +251,19 @@ proc_macro profiler_builtins std stdarch/crates/std_detect -term test unwind"; +const ALLOC_DEPS: &str = "core"; + const STD_DEPS: &str = " alloc -core -panic_abort panic_unwind +panic_abort +core profiler_builtins +unwind std_detect -term -test -unwind"; +test"; + +const PROC_MACRO_DEPS: &str = "std"; diff --git a/crates/project-model/src/tests.rs b/crates/project-model/src/tests.rs index e2444e2497..a1cb438bdd 100644 --- a/crates/project-model/src/tests.rs +++ b/crates/project-model/src/tests.rs @@ -1566,10 +1566,10 @@ fn rust_project_hello_world_project_model() { }, Dependency { crate_id: CrateId( - 1, + 3, ), name: CrateName( - "core", + "panic_unwind", ), prelude: true, }, @@ -1584,10 +1584,10 @@ fn rust_project_hello_world_project_model() { }, Dependency { crate_id: CrateId( - 3, + 1, ), name: CrateName( - "panic_unwind", + "core", ), prelude: true, }, @@ -1600,6 +1600,15 @@ fn rust_project_hello_world_project_model() { ), prelude: true, }, + Dependency { + crate_id: CrateId( + 9, + ), + name: CrateName( + "unwind", + ), + prelude: true, + }, Dependency { crate_id: CrateId( 7, @@ -1613,29 +1622,11 @@ fn rust_project_hello_world_project_model() { crate_id: CrateId( 8, ), - name: CrateName( - "term", - ), - prelude: true, - }, - Dependency { - crate_id: CrateId( - 9, - ), name: CrateName( "test", ), prelude: true, }, - Dependency { - crate_id: CrateId( - 10, - ), - name: CrateName( - "unwind", - ), - prelude: true, - }, ], proc_macro: Err( "no proc macro loaded for sysroot crate", @@ -1687,40 +1678,6 @@ fn rust_project_hello_world_project_model() { ), edition: Edition2018, version: None, - display_name: Some( - CrateDisplayName { - crate_name: CrateName( - "term", - ), - canonical_name: "term", - }, - ), - cfg_options: CfgOptions( - [], - ), - potential_cfg_options: CfgOptions( - [], - ), - env: Env { - entries: {}, - }, - dependencies: [], - proc_macro: Err( - "no proc macro loaded for sysroot crate", - ), - origin: Lang( - Other, - ), - is_proc_macro: false, - }, - CrateId( - 9, - ): CrateData { - root_file_id: FileId( - 10, - ), - edition: Edition2018, - version: None, display_name: Some( CrateDisplayName { crate_name: CrateName( @@ -1748,10 +1705,10 @@ fn rust_project_hello_world_project_model() { is_proc_macro: false, }, CrateId( - 10, + 9, ): CrateData { root_file_id: FileId( - 11, + 10, ), edition: Edition2018, version: None, @@ -1782,10 +1739,10 @@ fn rust_project_hello_world_project_model() { is_proc_macro: false, }, CrateId( - 11, + 10, ): CrateData { root_file_id: FileId( - 12, + 11, ), edition: Edition2018, version: None, @@ -1836,7 +1793,7 @@ fn rust_project_hello_world_project_model() { }, Dependency { crate_id: CrateId( - 9, + 8, ), name: CrateName( "test", From 180b4cedec730d0c0127d0d41714abe4176d5365 Mon Sep 17 00:00:00 2001 From: Noritada Kobayashi Date: Mon, 7 Nov 2022 19:25:07 +0900 Subject: [PATCH 0541/1945] Fix the length displayed for byte string literals with escaped newlines The length of byte strings containing escaped newlines is displayed two bytes longer when the first escaped character is a newline. This is due to a small bug in handling the first escaped newline in string literals. Closes #13567 --- crates/syntax/src/ast/token_ext.rs | 22 ++++++++++++++++------ 1 file changed, 16 insertions(+), 6 deletions(-) diff --git a/crates/syntax/src/ast/token_ext.rs b/crates/syntax/src/ast/token_ext.rs index ba72e64425..22ad6db9ae 100644 --- a/crates/syntax/src/ast/token_ext.rs +++ b/crates/syntax/src/ast/token_ext.rs @@ -209,17 +209,19 @@ impl ast::String { let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()]; let mut buf = String::new(); - let mut text_iter = text.chars(); + let mut prev = 0; let mut has_error = false; unescape_literal(text, Mode::Str, &mut |char_range, unescaped_char| match ( unescaped_char, buf.capacity() == 0, ) { (Ok(c), false) => buf.push(c), - (Ok(c), true) if char_range.len() == 1 && Some(c) == text_iter.next() => (), + (Ok(_), true) if char_range.len() == 1 && char_range.start == prev => { + prev = char_range.end + } (Ok(c), true) => { buf.reserve_exact(text.len()); - buf.push_str(&text[..char_range.start]); + buf.push_str(&text[..prev]); buf.push(c); } (Err(_), _) => has_error = true, @@ -252,17 +254,19 @@ impl ast::ByteString { let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()]; let mut buf: Vec = Vec::new(); - let mut text_iter = text.chars(); + let mut prev = 0; let mut has_error = false; unescape_literal(text, Mode::ByteStr, &mut |char_range, unescaped_char| match ( unescaped_char, buf.capacity() == 0, ) { (Ok(c), false) => buf.push(c as u8), - (Ok(c), true) if char_range.len() == 1 && Some(c) == text_iter.next() => (), + (Ok(_), true) if char_range.len() == 1 && char_range.start == prev => { + prev = char_range.end + } (Ok(c), true) => { buf.reserve_exact(text.len()); - buf.extend_from_slice(text[..char_range.start].as_bytes()); + buf.extend_from_slice(text[..prev].as_bytes()); buf.push(c as u8); } (Err(_), _) => has_error = true, @@ -445,6 +449,12 @@ mod tests { check_string_value(r"\foobar", None); check_string_value(r"\nfoobar", "\nfoobar"); check_string_value(r"C:\\Windows\\System32\\", "C:\\Windows\\System32\\"); + check_string_value(r"\x61bcde", "a\x62cde"); + check_string_value( + r"a\ +bcde", "a\ +bcde", + ); } #[test] From ee2dd934caeeb2fb044ac1cacd3e65c3b033bc8b Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 7 Nov 2022 12:49:52 +0100 Subject: [PATCH 0542/1945] Don't trigger adjustment hints in all inlay hint tests --- crates/ide/src/inlay_hints.rs | 37 +++++++++++++++++------------------ 1 file changed, 18 insertions(+), 19 deletions(-) diff --git a/crates/ide/src/inlay_hints.rs b/crates/ide/src/inlay_hints.rs index 50934a27f8..325e609054 100644 --- a/crates/ide/src/inlay_hints.rs +++ b/crates/ide/src/inlay_hints.rs @@ -861,23 +861,23 @@ fn binding_mode_hints( tooltip: Some(InlayTooltip::String("Inferred binding mode".into())), }); }); - // match pat { - // ast::Pat::IdentPat(pat) if pat.ref_token().is_none() && pat.mut_token().is_none() => { - // let bm = sema.binding_mode_of_pat(pat)?; - // let bm = match bm { - // hir::BindingMode::Move => return None, - // hir::BindingMode::Ref(Mutability::Mut) => "ref mut", - // hir::BindingMode::Ref(Mutability::Shared) => "ref", - // }; - // acc.push(InlayHint { - // range, - // kind: InlayKind::BindingModeHint, - // label: bm.to_string().into(), - // tooltip: Some(InlayTooltip::String("Inferred binding mode".into())), - // }); - // } - // _ => (), - // } + match pat { + ast::Pat::IdentPat(pat) if pat.ref_token().is_none() && pat.mut_token().is_none() => { + let bm = sema.binding_mode_of_pat(pat)?; + let bm = match bm { + hir::BindingMode::Move => return None, + hir::BindingMode::Ref(Mutability::Mut) => "ref mut", + hir::BindingMode::Ref(Mutability::Shared) => "ref", + }; + acc.push(InlayHint { + range, + kind: InlayKind::BindingModeHint, + label: bm.to_string().into(), + tooltip: Some(InlayTooltip::String("Inferred binding mode".into())), + }); + } + _ => (), + } Some(()) } @@ -1306,7 +1306,7 @@ mod tests { chaining_hints: false, lifetime_elision_hints: LifetimeElisionHints::Never, closure_return_type_hints: ClosureReturnTypeHints::Never, - adjustment_hints: AdjustmentHints::Always, + adjustment_hints: AdjustmentHints::Never, binding_mode_hints: false, hide_named_constructor_hints: false, hide_closure_initialization_hints: false, @@ -1318,7 +1318,6 @@ mod tests { type_hints: true, parameter_hints: true, chaining_hints: true, - adjustment_hints: AdjustmentHints::Always, closure_return_type_hints: ClosureReturnTypeHints::WithBlock, binding_mode_hints: true, lifetime_elision_hints: LifetimeElisionHints::Always, From ffd7bf8bf9c99d115e7e0c4e6d1cffe60ea3ff84 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 7 Nov 2022 12:59:51 +0100 Subject: [PATCH 0543/1945] Bump Cargo rust-version fields to latest stable --- crates/base-db/Cargo.toml | 2 +- crates/cfg/Cargo.toml | 2 +- crates/flycheck/Cargo.toml | 2 +- crates/hir-def/Cargo.toml | 2 +- crates/hir-expand/Cargo.toml | 2 +- crates/hir-ty/Cargo.toml | 2 +- crates/hir/Cargo.toml | 2 +- crates/ide-assists/Cargo.toml | 2 +- crates/ide-completion/Cargo.toml | 2 +- crates/ide-db/Cargo.toml | 2 +- crates/ide-diagnostics/Cargo.toml | 2 +- crates/ide-ssr/Cargo.toml | 2 +- crates/ide/Cargo.toml | 2 +- crates/limit/Cargo.toml | 2 +- crates/mbe/Cargo.toml | 2 +- crates/parser/Cargo.toml | 2 +- crates/paths/Cargo.toml | 2 +- crates/proc-macro-api/Cargo.toml | 2 +- crates/proc-macro-srv-cli/Cargo.toml | 2 +- crates/proc-macro-srv/Cargo.toml | 2 +- crates/proc-macro-test/Cargo.toml | 2 +- crates/proc-macro-test/imp/Cargo.toml | 2 +- crates/profile/Cargo.toml | 2 +- crates/project-model/Cargo.toml | 2 +- crates/rust-analyzer/Cargo.toml | 2 +- crates/sourcegen/Cargo.toml | 2 +- crates/stdx/Cargo.toml | 2 +- crates/syntax/Cargo.toml | 2 +- crates/syntax/fuzz/Cargo.toml | 2 +- crates/test-utils/Cargo.toml | 2 +- crates/text-edit/Cargo.toml | 2 +- crates/toolchain/Cargo.toml | 2 +- crates/tt/Cargo.toml | 2 +- crates/vfs-notify/Cargo.toml | 2 +- crates/vfs/Cargo.toml | 2 +- xtask/Cargo.toml | 2 +- 36 files changed, 36 insertions(+), 36 deletions(-) diff --git a/crates/base-db/Cargo.toml b/crates/base-db/Cargo.toml index f02a51ab6c..a484ecec68 100644 --- a/crates/base-db/Cargo.toml +++ b/crates/base-db/Cargo.toml @@ -4,7 +4,7 @@ version = "0.0.0" description = "TBD" license = "MIT OR Apache-2.0" edition = "2021" -rust-version = "1.57" +rust-version = "1.65" [lib] doctest = false diff --git a/crates/cfg/Cargo.toml b/crates/cfg/Cargo.toml index ee1ad677a9..2857420c28 100644 --- a/crates/cfg/Cargo.toml +++ b/crates/cfg/Cargo.toml @@ -4,7 +4,7 @@ version = "0.0.0" description = "TBD" license = "MIT OR Apache-2.0" edition = "2021" -rust-version = "1.57" +rust-version = "1.65" [lib] doctest = false diff --git a/crates/flycheck/Cargo.toml b/crates/flycheck/Cargo.toml index 6871f90015..514d567fcc 100644 --- a/crates/flycheck/Cargo.toml +++ b/crates/flycheck/Cargo.toml @@ -4,7 +4,7 @@ version = "0.0.0" description = "TBD" license = "MIT OR Apache-2.0" edition = "2021" -rust-version = "1.57" +rust-version = "1.65" [lib] doctest = false diff --git a/crates/hir-def/Cargo.toml b/crates/hir-def/Cargo.toml index 4ad8e75970..22f98ea7cd 100644 --- a/crates/hir-def/Cargo.toml +++ b/crates/hir-def/Cargo.toml @@ -4,7 +4,7 @@ version = "0.0.0" description = "TBD" license = "MIT OR Apache-2.0" edition = "2021" -rust-version = "1.57" +rust-version = "1.65" [lib] doctest = false diff --git a/crates/hir-expand/Cargo.toml b/crates/hir-expand/Cargo.toml index 3359c99b39..77eb1fd450 100644 --- a/crates/hir-expand/Cargo.toml +++ b/crates/hir-expand/Cargo.toml @@ -4,7 +4,7 @@ version = "0.0.0" description = "TBD" license = "MIT OR Apache-2.0" edition = "2021" -rust-version = "1.57" +rust-version = "1.65" [lib] doctest = false diff --git a/crates/hir-ty/Cargo.toml b/crates/hir-ty/Cargo.toml index ed13275bab..a1d6835bfa 100644 --- a/crates/hir-ty/Cargo.toml +++ b/crates/hir-ty/Cargo.toml @@ -4,7 +4,7 @@ version = "0.0.0" description = "TBD" license = "MIT OR Apache-2.0" edition = "2021" -rust-version = "1.57" +rust-version = "1.65" [lib] doctest = false diff --git a/crates/hir/Cargo.toml b/crates/hir/Cargo.toml index e1418de3cd..f780e3f53c 100644 --- a/crates/hir/Cargo.toml +++ b/crates/hir/Cargo.toml @@ -4,7 +4,7 @@ version = "0.0.0" description = "TBD" license = "MIT OR Apache-2.0" edition = "2021" -rust-version = "1.57" +rust-version = "1.65" [lib] doctest = false diff --git a/crates/ide-assists/Cargo.toml b/crates/ide-assists/Cargo.toml index 57a41f3d9a..e781c0a016 100644 --- a/crates/ide-assists/Cargo.toml +++ b/crates/ide-assists/Cargo.toml @@ -4,7 +4,7 @@ version = "0.0.0" description = "TBD" license = "MIT OR Apache-2.0" edition = "2021" -rust-version = "1.57" +rust-version = "1.65" [lib] doctest = false diff --git a/crates/ide-completion/Cargo.toml b/crates/ide-completion/Cargo.toml index 75835bce95..11310e2f12 100644 --- a/crates/ide-completion/Cargo.toml +++ b/crates/ide-completion/Cargo.toml @@ -4,7 +4,7 @@ version = "0.0.0" description = "TBD" license = "MIT OR Apache-2.0" edition = "2021" -rust-version = "1.57" +rust-version = "1.65" [lib] doctest = false diff --git a/crates/ide-db/Cargo.toml b/crates/ide-db/Cargo.toml index cf0bcd5c96..f48cce58c6 100644 --- a/crates/ide-db/Cargo.toml +++ b/crates/ide-db/Cargo.toml @@ -4,7 +4,7 @@ version = "0.0.0" description = "TBD" license = "MIT OR Apache-2.0" edition = "2021" -rust-version = "1.57" +rust-version = "1.65" [lib] doctest = false diff --git a/crates/ide-diagnostics/Cargo.toml b/crates/ide-diagnostics/Cargo.toml index e1d146f4ee..7e9a1125d7 100644 --- a/crates/ide-diagnostics/Cargo.toml +++ b/crates/ide-diagnostics/Cargo.toml @@ -4,7 +4,7 @@ version = "0.0.0" description = "TBD" license = "MIT OR Apache-2.0" edition = "2021" -rust-version = "1.57" +rust-version = "1.65" [lib] doctest = false diff --git a/crates/ide-ssr/Cargo.toml b/crates/ide-ssr/Cargo.toml index 4baf786c45..7be62a8d9f 100644 --- a/crates/ide-ssr/Cargo.toml +++ b/crates/ide-ssr/Cargo.toml @@ -5,7 +5,7 @@ description = "Structural search and replace of Rust code" license = "MIT OR Apache-2.0" repository = "https://github.com/rust-lang/rust-analyzer" edition = "2021" -rust-version = "1.57" +rust-version = "1.65" [lib] doctest = false diff --git a/crates/ide/Cargo.toml b/crates/ide/Cargo.toml index 712459a7ee..73f202630f 100644 --- a/crates/ide/Cargo.toml +++ b/crates/ide/Cargo.toml @@ -4,7 +4,7 @@ version = "0.0.0" description = "TBD" license = "MIT OR Apache-2.0" edition = "2021" -rust-version = "1.57" +rust-version = "1.65" [lib] doctest = false diff --git a/crates/limit/Cargo.toml b/crates/limit/Cargo.toml index 893db436d8..3536f73da7 100644 --- a/crates/limit/Cargo.toml +++ b/crates/limit/Cargo.toml @@ -4,7 +4,7 @@ version = "0.0.0" description = "TBD" license = "MIT OR Apache-2.0" edition = "2021" -rust-version = "1.57" +rust-version = "1.65" [features] tracking = [] diff --git a/crates/mbe/Cargo.toml b/crates/mbe/Cargo.toml index 13cd890103..bce2fc9a70 100644 --- a/crates/mbe/Cargo.toml +++ b/crates/mbe/Cargo.toml @@ -4,7 +4,7 @@ version = "0.0.0" description = "TBD" license = "MIT OR Apache-2.0" edition = "2021" -rust-version = "1.57" +rust-version = "1.65" [lib] doctest = false diff --git a/crates/parser/Cargo.toml b/crates/parser/Cargo.toml index a286a6bcdd..d1420de893 100644 --- a/crates/parser/Cargo.toml +++ b/crates/parser/Cargo.toml @@ -4,7 +4,7 @@ version = "0.0.0" description = "TBD" license = "MIT OR Apache-2.0" edition = "2021" -rust-version = "1.57" +rust-version = "1.65" [lib] doctest = false diff --git a/crates/paths/Cargo.toml b/crates/paths/Cargo.toml index 5e83de7d99..d23a63d2a9 100644 --- a/crates/paths/Cargo.toml +++ b/crates/paths/Cargo.toml @@ -4,7 +4,7 @@ version = "0.0.0" description = "TBD" license = "MIT OR Apache-2.0" edition = "2021" -rust-version = "1.57" +rust-version = "1.65" [lib] doctest = false diff --git a/crates/proc-macro-api/Cargo.toml b/crates/proc-macro-api/Cargo.toml index 54879c1870..f261f3def4 100644 --- a/crates/proc-macro-api/Cargo.toml +++ b/crates/proc-macro-api/Cargo.toml @@ -4,7 +4,7 @@ version = "0.0.0" description = "TBD" license = "MIT OR Apache-2.0" edition = "2021" -rust-version = "1.57" +rust-version = "1.65" [lib] doctest = false diff --git a/crates/proc-macro-srv-cli/Cargo.toml b/crates/proc-macro-srv-cli/Cargo.toml index 9d0da5dee9..7991e125ab 100644 --- a/crates/proc-macro-srv-cli/Cargo.toml +++ b/crates/proc-macro-srv-cli/Cargo.toml @@ -4,7 +4,7 @@ version = "0.0.0" description = "TBD" license = "MIT OR Apache-2.0" edition = "2021" -rust-version = "1.57" +rust-version = "1.65" [dependencies] proc-macro-srv = { version = "0.0.0", path = "../proc-macro-srv" } diff --git a/crates/proc-macro-srv/Cargo.toml b/crates/proc-macro-srv/Cargo.toml index e39026ac70..a136abc12b 100644 --- a/crates/proc-macro-srv/Cargo.toml +++ b/crates/proc-macro-srv/Cargo.toml @@ -4,7 +4,7 @@ version = "0.0.0" description = "TBD" license = "MIT OR Apache-2.0" edition = "2021" -rust-version = "1.57" +rust-version = "1.65" [lib] doctest = false diff --git a/crates/proc-macro-test/Cargo.toml b/crates/proc-macro-test/Cargo.toml index 684477191b..d2a79f9107 100644 --- a/crates/proc-macro-test/Cargo.toml +++ b/crates/proc-macro-test/Cargo.toml @@ -3,7 +3,7 @@ name = "proc-macro-test" version = "0.0.0" license = "MIT OR Apache-2.0" edition = "2021" -rust-version = "1.57" +rust-version = "1.65" publish = false [lib] diff --git a/crates/proc-macro-test/imp/Cargo.toml b/crates/proc-macro-test/imp/Cargo.toml index 2d1fc3c5c7..1bd14070e9 100644 --- a/crates/proc-macro-test/imp/Cargo.toml +++ b/crates/proc-macro-test/imp/Cargo.toml @@ -3,7 +3,7 @@ name = "proc-macro-test-impl" version = "0.0.0" license = "MIT OR Apache-2.0" edition = "2021" -rust-version = "1.57" +rust-version = "1.65" publish = false [lib] diff --git a/crates/profile/Cargo.toml b/crates/profile/Cargo.toml index 5697aea964..01d1735bf7 100644 --- a/crates/profile/Cargo.toml +++ b/crates/profile/Cargo.toml @@ -4,7 +4,7 @@ version = "0.0.0" description = "TBD" license = "MIT OR Apache-2.0" edition = "2021" -rust-version = "1.57" +rust-version = "1.65" [lib] doctest = false diff --git a/crates/project-model/Cargo.toml b/crates/project-model/Cargo.toml index cf9868740c..39902a5321 100644 --- a/crates/project-model/Cargo.toml +++ b/crates/project-model/Cargo.toml @@ -4,7 +4,7 @@ version = "0.0.0" description = "TBD" license = "MIT OR Apache-2.0" edition = "2021" -rust-version = "1.57" +rust-version = "1.65" [lib] doctest = false diff --git a/crates/rust-analyzer/Cargo.toml b/crates/rust-analyzer/Cargo.toml index 7ae5324ab0..56f14fe187 100644 --- a/crates/rust-analyzer/Cargo.toml +++ b/crates/rust-analyzer/Cargo.toml @@ -8,7 +8,7 @@ documentation = "https://rust-analyzer.github.io/manual.html" license = "MIT OR Apache-2.0" autobins = false edition = "2021" -rust-version = "1.57" +rust-version = "1.65" [lib] doctest = false diff --git a/crates/sourcegen/Cargo.toml b/crates/sourcegen/Cargo.toml index e75867e2d8..593dc4e55b 100644 --- a/crates/sourcegen/Cargo.toml +++ b/crates/sourcegen/Cargo.toml @@ -4,7 +4,7 @@ version = "0.0.0" description = "TBD" license = "MIT OR Apache-2.0" edition = "2021" -rust-version = "1.57" +rust-version = "1.65" [lib] doctest = false diff --git a/crates/stdx/Cargo.toml b/crates/stdx/Cargo.toml index e0657ab0f6..957d16c036 100644 --- a/crates/stdx/Cargo.toml +++ b/crates/stdx/Cargo.toml @@ -4,7 +4,7 @@ version = "0.0.0" description = "TBD" license = "MIT OR Apache-2.0" edition = "2021" -rust-version = "1.57" +rust-version = "1.65" [lib] doctest = false diff --git a/crates/syntax/Cargo.toml b/crates/syntax/Cargo.toml index 1ef903371c..00743cca55 100644 --- a/crates/syntax/Cargo.toml +++ b/crates/syntax/Cargo.toml @@ -5,7 +5,7 @@ description = "Comment and whitespace preserving parser for the Rust language" license = "MIT OR Apache-2.0" repository = "https://github.com/rust-lang/rust-analyzer" edition = "2021" -rust-version = "1.57" +rust-version = "1.65" [lib] doctest = false diff --git a/crates/syntax/fuzz/Cargo.toml b/crates/syntax/fuzz/Cargo.toml index ba2f515b0b..f295c40065 100644 --- a/crates/syntax/fuzz/Cargo.toml +++ b/crates/syntax/fuzz/Cargo.toml @@ -4,7 +4,7 @@ name = "syntax-fuzz" version = "0.0.1" publish = false edition = "2021" -rust-version = "1.57" +rust-version = "1.65" [package.metadata] cargo-fuzz = true diff --git a/crates/test-utils/Cargo.toml b/crates/test-utils/Cargo.toml index cceafe04e3..1047373b1c 100644 --- a/crates/test-utils/Cargo.toml +++ b/crates/test-utils/Cargo.toml @@ -4,7 +4,7 @@ version = "0.0.0" description = "TBD" license = "MIT OR Apache-2.0" edition = "2021" -rust-version = "1.57" +rust-version = "1.65" [lib] doctest = false diff --git a/crates/text-edit/Cargo.toml b/crates/text-edit/Cargo.toml index 7a90d64a98..8df7e1af61 100644 --- a/crates/text-edit/Cargo.toml +++ b/crates/text-edit/Cargo.toml @@ -4,7 +4,7 @@ version = "0.0.0" description = "TBD" license = "MIT OR Apache-2.0" edition = "2021" -rust-version = "1.57" +rust-version = "1.65" [lib] doctest = false diff --git a/crates/toolchain/Cargo.toml b/crates/toolchain/Cargo.toml index 3e0f31f19c..a6a3ae742a 100644 --- a/crates/toolchain/Cargo.toml +++ b/crates/toolchain/Cargo.toml @@ -4,7 +4,7 @@ version = "0.0.0" description = "TBD" license = "MIT OR Apache-2.0" edition = "2021" -rust-version = "1.57" +rust-version = "1.65" [lib] doctest = false diff --git a/crates/tt/Cargo.toml b/crates/tt/Cargo.toml index 52dfb86080..4f2103f3a9 100644 --- a/crates/tt/Cargo.toml +++ b/crates/tt/Cargo.toml @@ -4,7 +4,7 @@ version = "0.0.0" description = "TBD" license = "MIT OR Apache-2.0" edition = "2021" -rust-version = "1.57" +rust-version = "1.65" [lib] doctest = false diff --git a/crates/vfs-notify/Cargo.toml b/crates/vfs-notify/Cargo.toml index df5dc24e2c..061f3c157a 100644 --- a/crates/vfs-notify/Cargo.toml +++ b/crates/vfs-notify/Cargo.toml @@ -4,7 +4,7 @@ version = "0.0.0" description = "TBD" license = "MIT OR Apache-2.0" edition = "2021" -rust-version = "1.57" +rust-version = "1.65" [lib] doctest = false diff --git a/crates/vfs/Cargo.toml b/crates/vfs/Cargo.toml index d7549a2841..e55bf6f293 100644 --- a/crates/vfs/Cargo.toml +++ b/crates/vfs/Cargo.toml @@ -4,7 +4,7 @@ version = "0.0.0" description = "TBD" license = "MIT OR Apache-2.0" edition = "2021" -rust-version = "1.57" +rust-version = "1.65" [lib] doctest = false diff --git a/xtask/Cargo.toml b/xtask/Cargo.toml index 0be0bf920d..95e27beab5 100644 --- a/xtask/Cargo.toml +++ b/xtask/Cargo.toml @@ -4,7 +4,7 @@ version = "0.1.0" publish = false license = "MIT OR Apache-2.0" edition = "2021" -rust-version = "1.57" +rust-version = "1.65" [dependencies] anyhow = "1.0.62" From bdf854701375c0692f9014db71a097520fddd41f Mon Sep 17 00:00:00 2001 From: Noritada Kobayashi Date: Mon, 7 Nov 2022 22:51:29 +0900 Subject: [PATCH 0544/1945] Clarify the intent Thanks to Lukas Wirth for a suggestion. --- crates/syntax/src/ast/token_ext.rs | 21 ++++++++++----------- 1 file changed, 10 insertions(+), 11 deletions(-) diff --git a/crates/syntax/src/ast/token_ext.rs b/crates/syntax/src/ast/token_ext.rs index 22ad6db9ae..32dd2db405 100644 --- a/crates/syntax/src/ast/token_ext.rs +++ b/crates/syntax/src/ast/token_ext.rs @@ -209,19 +209,19 @@ impl ast::String { let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()]; let mut buf = String::new(); - let mut prev = 0; + let mut prev_end = 0; let mut has_error = false; unescape_literal(text, Mode::Str, &mut |char_range, unescaped_char| match ( unescaped_char, buf.capacity() == 0, ) { (Ok(c), false) => buf.push(c), - (Ok(_), true) if char_range.len() == 1 && char_range.start == prev => { - prev = char_range.end + (Ok(_), true) if char_range.len() == 1 && char_range.start == prev_end => { + prev_end = char_range.end } (Ok(c), true) => { buf.reserve_exact(text.len()); - buf.push_str(&text[..prev]); + buf.push_str(&text[..prev_end]); buf.push(c); } (Err(_), _) => has_error = true, @@ -254,19 +254,19 @@ impl ast::ByteString { let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()]; let mut buf: Vec = Vec::new(); - let mut prev = 0; + let mut prev_end = 0; let mut has_error = false; unescape_literal(text, Mode::ByteStr, &mut |char_range, unescaped_char| match ( unescaped_char, buf.capacity() == 0, ) { (Ok(c), false) => buf.push(c as u8), - (Ok(_), true) if char_range.len() == 1 && char_range.start == prev => { - prev = char_range.end + (Ok(_), true) if char_range.len() == 1 && char_range.start == prev_end => { + prev_end = char_range.end } (Ok(c), true) => { buf.reserve_exact(text.len()); - buf.extend_from_slice(text[..prev].as_bytes()); + buf.extend_from_slice(text[..prev_end].as_bytes()); buf.push(c as u8); } (Err(_), _) => has_error = true, @@ -449,11 +449,10 @@ mod tests { check_string_value(r"\foobar", None); check_string_value(r"\nfoobar", "\nfoobar"); check_string_value(r"C:\\Windows\\System32\\", "C:\\Windows\\System32\\"); - check_string_value(r"\x61bcde", "a\x62cde"); + check_string_value(r"\x61bcde", "abcde"); check_string_value( r"a\ -bcde", "a\ -bcde", +bcde", "abcde", ); } From 2340d7059e3b89a5233b0c91cf3c36fa94adfe6e Mon Sep 17 00:00:00 2001 From: Noritada Kobayashi Date: Mon, 7 Nov 2022 23:39:02 +0900 Subject: [PATCH 0545/1945] Add test code for unescaping byte strings --- crates/syntax/src/ast/token_ext.rs | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/crates/syntax/src/ast/token_ext.rs b/crates/syntax/src/ast/token_ext.rs index 32dd2db405..8990f7a7d4 100644 --- a/crates/syntax/src/ast/token_ext.rs +++ b/crates/syntax/src/ast/token_ext.rs @@ -456,6 +456,31 @@ bcde", "abcde", ); } + fn check_byte_string_value<'a, const N: usize>( + lit: &str, + expected: impl Into>, + ) { + assert_eq!( + ast::ByteString { syntax: make::tokens::literal(&format!("b\"{}\"", lit)) } + .value() + .as_deref(), + expected.into().map(|value| &value[..]) + ); + } + + #[test] + fn test_byte_string_escape() { + check_byte_string_value(r"foobar", b"foobar"); + check_byte_string_value(r"\foobar", None::<&[u8; 0]>); + check_byte_string_value(r"\nfoobar", b"\nfoobar"); + check_byte_string_value(r"C:\\Windows\\System32\\", b"C:\\Windows\\System32\\"); + check_byte_string_value(r"\x61bcde", b"abcde"); + check_byte_string_value( + r"a\ +bcde", b"abcde", + ); + } + #[test] fn test_value_underscores() { check_float_value("3.141592653589793_f64", 3.141592653589793_f64); From b169e1e5decbbda7ad09a8fa07fb32ca83ec7a50 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 7 Nov 2022 15:49:26 +0100 Subject: [PATCH 0546/1945] Remove code duplication --- crates/ide/src/moniker.rs | 20 ++++---------------- crates/ide/src/static_index.rs | 5 +++-- 2 files changed, 7 insertions(+), 18 deletions(-) diff --git a/crates/ide/src/moniker.rs b/crates/ide/src/moniker.rs index 07d117aff1..fcbf6d8e58 100644 --- a/crates/ide/src/moniker.rs +++ b/crates/ide/src/moniker.rs @@ -1,9 +1,9 @@ //! This module generates [moniker](https://microsoft.github.io/language-server-protocol/specifications/lsif/0.6.0/specification/#exportsImports) //! for LSIF and LSP. -use hir::{db::DefDatabase, AsAssocItem, AssocItemContainer, Crate, Name, Semantics}; +use hir::{AsAssocItem, AssocItemContainer, Crate, Name, Semantics}; use ide_db::{ - base_db::{CrateOrigin, FileId, FileLoader, FilePosition, LangCrateOrigin}, + base_db::{CrateOrigin, FilePosition, LangCrateOrigin}, defs::{Definition, IdentClass}, helpers::pick_best_token, RootDatabase, @@ -11,7 +11,7 @@ use ide_db::{ use itertools::Itertools; use syntax::{AstNode, SyntaxKind::*, T}; -use crate::{doc_links::token_as_doc_comment, RangeInfo}; +use crate::{doc_links::token_as_doc_comment, parent_module::crates_for, RangeInfo}; #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] pub enum MonikerDescriptorKind { @@ -77,25 +77,13 @@ pub struct PackageInformation { pub version: Option, } -pub(crate) fn crate_for_file(db: &RootDatabase, file_id: FileId) -> Option { - for &krate in db.relevant_crates(file_id).iter() { - let crate_def_map = db.crate_def_map(krate); - for (_, data) in crate_def_map.modules() { - if data.origin.file_id() == Some(file_id) { - return Some(krate.into()); - } - } - } - None -} - pub(crate) fn moniker( db: &RootDatabase, FilePosition { file_id, offset }: FilePosition, ) -> Option>> { let sema = &Semantics::new(db); let file = sema.parse(file_id).syntax().clone(); - let current_crate = crate_for_file(db, file_id)?; + let current_crate: hir::Crate = crates_for(db, file_id).pop()?.into(); let original_token = pick_best_token(file.token_at_offset(offset), |kind| match kind { IDENT | INT_NUMBER diff --git a/crates/ide/src/static_index.rs b/crates/ide/src/static_index.rs index 27ad1a948d..954d3b018a 100644 --- a/crates/ide/src/static_index.rs +++ b/crates/ide/src/static_index.rs @@ -13,7 +13,8 @@ use syntax::{AstNode, SyntaxKind::*, SyntaxToken, TextRange, T}; use crate::{ hover::hover_for_definition, - moniker::{crate_for_file, def_to_moniker, MonikerResult}, + moniker::{def_to_moniker, MonikerResult}, + parent_module::crates_for, Analysis, Fold, HoverConfig, HoverDocFormat, HoverResult, InlayHint, InlayHintsConfig, TryToNav, }; @@ -99,7 +100,7 @@ fn all_modules(db: &dyn HirDatabase) -> Vec { impl StaticIndex<'_> { fn add_file(&mut self, file_id: FileId) { - let current_crate = crate_for_file(self.db, file_id); + let current_crate = crates_for(self.db, file_id).pop().map(Into::into); let folds = self.analysis.folding_ranges(file_id).unwrap(); let inlay_hints = self .analysis From 6a06f6f724d385c61e428ef46e6bf9e13e1c37ce Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 7 Nov 2022 16:48:50 +0100 Subject: [PATCH 0547/1945] Deduplicate reference search results --- crates/ide/src/references.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/crates/ide/src/references.rs b/crates/ide/src/references.rs index e942413c11..0f758cfa2d 100644 --- a/crates/ide/src/references.rs +++ b/crates/ide/src/references.rs @@ -16,6 +16,7 @@ use ide_db::{ search::{ReferenceCategory, SearchScope, UsageSearchResult}, RootDatabase, }; +use itertools::Itertools; use stdx::hash::NoHashHashMap; use syntax::{ algo::find_node_at_offset, @@ -86,6 +87,7 @@ pub(crate) fn find_all_refs( file_id, refs.into_iter() .map(|file_ref| (file_ref.range, file_ref.category)) + .unique() .collect(), ) }) From fa70b0a86ec89ea53c0855caba42d121cbcc5697 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 7 Nov 2022 17:21:37 +0100 Subject: [PATCH 0548/1945] internal: Use Cancellable in favor of Result for clarity --- crates/ide/src/inlay_hints.rs | 10 ++--- crates/ide/src/lib.rs | 2 +- crates/rust-analyzer/src/cargo_target_spec.rs | 10 ++--- crates/rust-analyzer/src/handlers.rs | 19 +++++----- crates/rust-analyzer/src/mem_docs.rs | 8 +++- crates/rust-analyzer/src/to_proto.rs | 38 +++++++++---------- 6 files changed, 44 insertions(+), 43 deletions(-) diff --git a/crates/ide/src/inlay_hints.rs b/crates/ide/src/inlay_hints.rs index 34d8bf67a3..8ef122528b 100644 --- a/crates/ide/src/inlay_hints.rs +++ b/crates/ide/src/inlay_hints.rs @@ -180,7 +180,7 @@ impl fmt::Debug for InlayHintLabelPart { pub(crate) fn inlay_hints( db: &RootDatabase, file_id: FileId, - range_limit: Option, + range_limit: Option, config: &InlayHintsConfig, ) -> Vec { let _p = profile::span("inlay_hints"); @@ -195,7 +195,7 @@ pub(crate) fn inlay_hints( let hints = |node| hints(&mut acc, &famous_defs, config, file_id, node); match range_limit { - Some(FileRange { range, .. }) => match file.covering_element(range) { + Some(range) => match file.covering_element(range) { NodeOrToken::Token(_) => return acc, NodeOrToken::Node(n) => n .descendants() @@ -1213,7 +1213,6 @@ fn get_callable( #[cfg(test)] mod tests { use expect_test::{expect, Expect}; - use ide_db::base_db::FileRange; use itertools::Itertools; use syntax::{TextRange, TextSize}; use test_utils::extract_annotations; @@ -1838,10 +1837,7 @@ fn main() { .inlay_hints( &InlayHintsConfig { type_hints: true, ..DISABLED_CONFIG }, file_id, - Some(FileRange { - file_id, - range: TextRange::new(TextSize::from(500), TextSize::from(600)), - }), + Some(TextRange::new(TextSize::from(500), TextSize::from(600))), ) .unwrap(); let actual = diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs index 416817ca0b..841a5832c5 100644 --- a/crates/ide/src/lib.rs +++ b/crates/ide/src/lib.rs @@ -367,7 +367,7 @@ impl Analysis { &self, config: &InlayHintsConfig, file_id: FileId, - range: Option, + range: Option, ) -> Cancellable> { self.with_db(|db| inlay_hints::inlay_hints(db, file_id, range, config)) } diff --git a/crates/rust-analyzer/src/cargo_target_spec.rs b/crates/rust-analyzer/src/cargo_target_spec.rs index 6ede194bab..cf51cf15a0 100644 --- a/crates/rust-analyzer/src/cargo_target_spec.rs +++ b/crates/rust-analyzer/src/cargo_target_spec.rs @@ -3,11 +3,11 @@ use std::mem; use cfg::{CfgAtom, CfgExpr}; -use ide::{FileId, RunnableKind, TestId}; +use ide::{Cancellable, FileId, RunnableKind, TestId}; use project_model::{self, CargoFeatures, ManifestPath, TargetKind}; use vfs::AbsPathBuf; -use crate::{global_state::GlobalStateSnapshot, Result}; +use crate::global_state::GlobalStateSnapshot; /// Abstract representation of Cargo target. /// @@ -29,7 +29,7 @@ impl CargoTargetSpec { spec: Option, kind: &RunnableKind, cfg: &Option, - ) -> Result<(Vec, Vec)> { + ) -> (Vec, Vec) { let mut args = Vec::new(); let mut extra_args = Vec::new(); @@ -111,13 +111,13 @@ impl CargoTargetSpec { } } } - Ok((args, extra_args)) + (args, extra_args) } pub(crate) fn for_file( global_state_snapshot: &GlobalStateSnapshot, file_id: FileId, - ) -> Result> { + ) -> Cancellable> { let crate_id = match &*global_state_snapshot.analysis.crates_for(file_id)? { &[crate_id, ..] => crate_id, _ => return Ok(None), diff --git a/crates/rust-analyzer/src/handlers.rs b/crates/rust-analyzer/src/handlers.rs index 34795a8eb4..d190a9f4e2 100644 --- a/crates/rust-analyzer/src/handlers.rs +++ b/crates/rust-analyzer/src/handlers.rs @@ -9,9 +9,9 @@ use std::{ use anyhow::Context; use ide::{ - AnnotationConfig, AssistKind, AssistResolveStrategy, FileId, FilePosition, FileRange, - HoverAction, HoverGotoTypeData, Query, RangeInfo, ReferenceCategory, Runnable, RunnableKind, - SingleResolve, SourceChange, TextEdit, + AnnotationConfig, AssistKind, AssistResolveStrategy, Cancellable, FileId, FilePosition, + FileRange, HoverAction, HoverGotoTypeData, Query, RangeInfo, ReferenceCategory, Runnable, + RunnableKind, SingleResolve, SourceChange, TextEdit, }; use ide_db::SymbolKind; use lsp_server::ErrorCode; @@ -556,7 +556,7 @@ pub(crate) fn handle_will_rename_files( if source_change.source_file_edits.is_empty() { Ok(None) } else { - to_proto::workspace_edit(&snap, source_change).map(Some) + Ok(Some(to_proto::workspace_edit(&snap, source_change)?)) } } @@ -1313,7 +1313,7 @@ pub(crate) fn handle_ssr( position, selections, )??; - to_proto::workspace_edit(&snap, source_change) + to_proto::workspace_edit(&snap, source_change).map_err(Into::into) } pub(crate) fn publish_diagnostics( @@ -1354,13 +1354,12 @@ pub(crate) fn handle_inlay_hints( ) -> Result>> { let _p = profile::span("handle_inlay_hints"); let document_uri = ¶ms.text_document.uri; - let file_id = from_proto::file_id(&snap, document_uri)?; - let line_index = snap.file_line_index(file_id)?; - let range = from_proto::file_range( + let FileRange { file_id, range } = from_proto::file_range( &snap, TextDocumentIdentifier::new(document_uri.to_owned()), params.range, )?; + let line_index = snap.file_line_index(file_id)?; let inlay_hints_config = snap.config.inlay_hints(); Ok(Some( snap.analysis @@ -1369,7 +1368,7 @@ pub(crate) fn handle_inlay_hints( .map(|it| { to_proto::inlay_hint(&snap, &line_index, inlay_hints_config.render_colons, it) }) - .collect::>>()?, + .collect::>>()?, )) } @@ -1426,7 +1425,7 @@ pub(crate) fn handle_call_hierarchy_prepare( .into_iter() .filter(|it| it.kind == Some(SymbolKind::Function)) .map(|it| to_proto::call_hierarchy_item(&snap, it)) - .collect::>>()?; + .collect::>>()?; Ok(Some(res)) } diff --git a/crates/rust-analyzer/src/mem_docs.rs b/crates/rust-analyzer/src/mem_docs.rs index f86a0f66ad..45a1dab977 100644 --- a/crates/rust-analyzer/src/mem_docs.rs +++ b/crates/rust-analyzer/src/mem_docs.rs @@ -7,7 +7,7 @@ use vfs::VfsPath; /// Holds the set of in-memory documents. /// -/// For these document, there true contents is maintained by the client. It +/// For these document, their true contents is maintained by the client. It /// might be different from what's on disk. #[derive(Default, Clone)] pub(crate) struct MemDocs { @@ -19,6 +19,7 @@ impl MemDocs { pub(crate) fn contains(&self, path: &VfsPath) -> bool { self.mem_docs.contains_key(path) } + pub(crate) fn insert(&mut self, path: VfsPath, data: DocumentData) -> Result<(), ()> { self.added_or_removed = true; match self.mem_docs.insert(path, data) { @@ -26,6 +27,7 @@ impl MemDocs { None => Ok(()), } } + pub(crate) fn remove(&mut self, path: &VfsPath) -> Result<(), ()> { self.added_or_removed = true; match self.mem_docs.remove(path) { @@ -33,17 +35,21 @@ impl MemDocs { None => Err(()), } } + pub(crate) fn get(&self, path: &VfsPath) -> Option<&DocumentData> { self.mem_docs.get(path) } + pub(crate) fn get_mut(&mut self, path: &VfsPath) -> Option<&mut DocumentData> { // NB: don't set `self.added_or_removed` here, as that purposefully only // tracks changes to the key set. self.mem_docs.get_mut(path) } + pub(crate) fn iter(&self) -> impl Iterator { self.mem_docs.keys() } + pub(crate) fn take_changes(&mut self) -> bool { mem::replace(&mut self.added_or_removed, false) } diff --git a/crates/rust-analyzer/src/to_proto.rs b/crates/rust-analyzer/src/to_proto.rs index 6c84a2069c..830b071b94 100644 --- a/crates/rust-analyzer/src/to_proto.rs +++ b/crates/rust-analyzer/src/to_proto.rs @@ -24,7 +24,7 @@ use crate::{ line_index::{LineEndings, LineIndex, PositionEncoding}, lsp_ext, lsp_utils::invalid_params_error, - semantic_tokens, Result, + semantic_tokens, }; pub(crate) fn position(line_index: &LineIndex, offset: TextSize) -> lsp_types::Position { @@ -429,7 +429,7 @@ pub(crate) fn inlay_hint( line_index: &LineIndex, render_colons: bool, mut inlay_hint: InlayHint, -) -> Result { +) -> Cancellable { match inlay_hint.kind { InlayKind::ParameterHint if render_colons => inlay_hint.label.append_str(":"), InlayKind::TypeHint if render_colons => inlay_hint.label.prepend_str(": "), @@ -518,7 +518,7 @@ pub(crate) fn inlay_hint( fn inlay_hint_label( snap: &GlobalStateSnapshot, label: InlayHintLabel, -) -> Result { +) -> Cancellable { Ok(match label.as_simple_str() { Some(s) => lsp_types::InlayHintLabel::String(s.into()), None => lsp_types::InlayHintLabel::LabelParts( @@ -536,7 +536,7 @@ fn inlay_hint_label( command: None, }) }) - .collect::>>()?, + .collect::>>()?, ), }) } @@ -794,7 +794,7 @@ pub(crate) fn optional_versioned_text_document_identifier( pub(crate) fn location( snap: &GlobalStateSnapshot, frange: FileRange, -) -> Result { +) -> Cancellable { let url = url(snap, frange.file_id); let line_index = snap.file_line_index(frange.file_id)?; let range = range(&line_index, frange.range); @@ -806,7 +806,7 @@ pub(crate) fn location( pub(crate) fn location_from_nav( snap: &GlobalStateSnapshot, nav: NavigationTarget, -) -> Result { +) -> Cancellable { let url = url(snap, nav.file_id); let line_index = snap.file_line_index(nav.file_id)?; let range = range(&line_index, nav.full_range); @@ -818,7 +818,7 @@ pub(crate) fn location_link( snap: &GlobalStateSnapshot, src: Option, target: NavigationTarget, -) -> Result { +) -> Cancellable { let origin_selection_range = match src { Some(src) => { let line_index = snap.file_line_index(src.file_id)?; @@ -840,7 +840,7 @@ pub(crate) fn location_link( fn location_info( snap: &GlobalStateSnapshot, target: NavigationTarget, -) -> Result<(lsp_types::Url, lsp_types::Range, lsp_types::Range)> { +) -> Cancellable<(lsp_types::Url, lsp_types::Range, lsp_types::Range)> { let line_index = snap.file_line_index(target.file_id)?; let target_uri = url(snap, target.file_id); @@ -854,12 +854,12 @@ pub(crate) fn goto_definition_response( snap: &GlobalStateSnapshot, src: Option, targets: Vec, -) -> Result { +) -> Cancellable { if snap.config.location_link() { let links = targets .into_iter() .map(|nav| location_link(snap, src, nav)) - .collect::>>()?; + .collect::>>()?; Ok(links.into()) } else { let locations = targets @@ -867,7 +867,7 @@ pub(crate) fn goto_definition_response( .map(|nav| { location(snap, FileRange { file_id: nav.file_id, range: nav.focus_or_full_range() }) }) - .collect::>>()?; + .collect::>>()?; Ok(locations.into()) } } @@ -881,7 +881,7 @@ pub(crate) fn snippet_text_document_edit( is_snippet: bool, file_id: FileId, edit: TextEdit, -) -> Result { +) -> Cancellable { let text_document = optional_versioned_text_document_identifier(snap, file_id); let line_index = snap.file_line_index(file_id)?; let mut edits: Vec<_> = @@ -958,7 +958,7 @@ pub(crate) fn snippet_text_document_ops( pub(crate) fn snippet_workspace_edit( snap: &GlobalStateSnapshot, source_change: SourceChange, -) -> Result { +) -> Cancellable { let mut document_changes: Vec = Vec::new(); for op in source_change.file_system_edits { @@ -995,7 +995,7 @@ pub(crate) fn snippet_workspace_edit( pub(crate) fn workspace_edit( snap: &GlobalStateSnapshot, source_change: SourceChange, -) -> Result { +) -> Cancellable { assert!(!source_change.is_snippet); snippet_workspace_edit(snap, source_change).map(|it| it.into()) } @@ -1048,7 +1048,7 @@ impl From pub(crate) fn call_hierarchy_item( snap: &GlobalStateSnapshot, target: NavigationTarget, -) -> Result { +) -> Cancellable { let name = target.name.to_string(); let detail = target.description.clone(); let kind = target.kind.map(symbol_kind).unwrap_or(lsp_types::SymbolKind::FUNCTION); @@ -1080,7 +1080,7 @@ pub(crate) fn code_action( snap: &GlobalStateSnapshot, assist: Assist, resolve_data: Option<(usize, lsp_types::CodeActionParams)>, -) -> Result { +) -> Cancellable { let mut res = lsp_ext::CodeAction { title: assist.label.to_string(), group: assist.group.filter(|_| snap.config.code_action_group()).map(|gr| gr.0), @@ -1113,13 +1113,13 @@ pub(crate) fn code_action( pub(crate) fn runnable( snap: &GlobalStateSnapshot, runnable: Runnable, -) -> Result { +) -> Cancellable { let config = snap.config.runnables(); let spec = CargoTargetSpec::for_file(snap, runnable.nav.file_id)?; let workspace_root = spec.as_ref().map(|it| it.workspace_root.clone()); let target = spec.as_ref().map(|s| s.target.clone()); let (cargo_args, executable_args) = - CargoTargetSpec::runnable_args(snap, spec, &runnable.kind, &runnable.cfg)?; + CargoTargetSpec::runnable_args(snap, spec, &runnable.kind, &runnable.cfg); let label = runnable.label(target); let location = location_link(snap, None, runnable.nav)?; @@ -1142,7 +1142,7 @@ pub(crate) fn code_lens( acc: &mut Vec, snap: &GlobalStateSnapshot, annotation: Annotation, -) -> Result<()> { +) -> Cancellable<()> { let client_commands_config = snap.config.client_commands(); match annotation.kind { AnnotationKind::Runnable(run) => { From 1cb6ab89eebb76953864b3d7fae8ed068fddc5a0 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 7 Nov 2022 17:43:22 +0100 Subject: [PATCH 0549/1945] internal: error instead of panic on invalid file range --- crates/rust-analyzer/src/from_proto.rs | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/crates/rust-analyzer/src/from_proto.rs b/crates/rust-analyzer/src/from_proto.rs index 936957bab4..dd433b0f4d 100644 --- a/crates/rust-analyzer/src/from_proto.rs +++ b/crates/rust-analyzer/src/from_proto.rs @@ -42,8 +42,10 @@ pub(crate) fn offset(line_index: &LineIndex, position: lsp_types::Position) -> R pub(crate) fn text_range(line_index: &LineIndex, range: lsp_types::Range) -> Result { let start = offset(line_index, range.start)?; let end = offset(line_index, range.end)?; - let text_range = TextRange::new(start, end); - Ok(text_range) + match end < start { + true => Err(format_err!("Invalid Range").into()), + false => Ok(TextRange::new(start, end)), + } } pub(crate) fn file_id(snap: &GlobalStateSnapshot, url: &lsp_types::Url) -> Result { From 90e2db81269f285cd1a0232e35cfd4519e8ce5f3 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 8 Nov 2022 08:37:45 +0100 Subject: [PATCH 0550/1945] fix: Fix item completions not working properly after unit structs and outline modules --- crates/ide-completion/src/context/analysis.rs | 8 +++-- crates/ide-completion/src/tests/item_list.rs | 32 +++++++++++++++++++ 2 files changed, 38 insertions(+), 2 deletions(-) diff --git a/crates/ide-completion/src/context/analysis.rs b/crates/ide-completion/src/context/analysis.rs index 04111ec7ef..c142a7305f 100644 --- a/crates/ide-completion/src/context/analysis.rs +++ b/crates/ide-completion/src/context/analysis.rs @@ -681,9 +681,13 @@ fn classify_name_ref( ast::Item::ExternBlock(it) => it.extern_item_list().is_none(), ast::Item::Fn(it) => it.body().is_none(), ast::Item::Impl(it) => it.assoc_item_list().is_none(), - ast::Item::Module(it) => it.item_list().is_none(), + ast::Item::Module(it) => { + it.item_list().is_none() && it.semicolon_token().is_none() + } ast::Item::Static(it) => it.body().is_none(), - ast::Item::Struct(it) => it.field_list().is_none(), + ast::Item::Struct(it) => { + it.field_list().is_none() && it.semicolon_token().is_none() + } ast::Item::Trait(it) => it.assoc_item_list().is_none(), ast::Item::TypeAlias(it) => it.ty().is_none(), ast::Item::Union(it) => it.record_field_list().is_none(), diff --git a/crates/ide-completion/src/tests/item_list.rs b/crates/ide-completion/src/tests/item_list.rs index 5076c6e86c..8ed6cb3cf8 100644 --- a/crates/ide-completion/src/tests/item_list.rs +++ b/crates/ide-completion/src/tests/item_list.rs @@ -245,3 +245,35 @@ impl Test for () { "#]], ); } + +#[test] +fn after_unit_struct() { + check( + r#"struct S; f$0"#, + expect![[r#" + ma makro!(…) macro_rules! makro + md module + kw const + kw crate:: + kw enum + kw extern + kw fn + kw impl + kw mod + kw pub + kw pub(crate) + kw pub(super) + kw self:: + kw static + kw struct + kw trait + kw type + kw union + kw unsafe + kw use + sn macro_rules + sn tfn (Test function) + sn tmod (Test module) + "#]], + ); +} From 4403dde711dd947670b1cf52e39c25a91edd9f4e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Tue, 8 Nov 2022 09:57:05 +0200 Subject: [PATCH 0551/1945] Nest Cargo.lock under Cargo.toml in Code --- editors/code/package.json | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/editors/code/package.json b/editors/code/package.json index 4357dc7306..9c9f120efa 100644 --- a/editors/code/package.json +++ b/editors/code/package.json @@ -1276,6 +1276,11 @@ "$generated-end": {} } }, + "configurationDefaults": { + "explorer.fileNesting.patterns": { + "Cargo.toml": "Cargo.lock" + } + }, "problemPatterns": [ { "name": "rustc", From 9be0615bde83a4a67c8319bce31f5929239d2fd9 Mon Sep 17 00:00:00 2001 From: Jonas Schievink Date: Tue, 8 Nov 2022 18:05:07 +0100 Subject: [PATCH 0552/1945] Don't canonicalize self type when querying FnOnce signature --- crates/hir-ty/src/lib.rs | 48 +++++++++++++++----------------- crates/hir/src/lib.rs | 3 +- crates/ide/src/signature_help.rs | 31 +++++++++++++++++++++ 3 files changed, 54 insertions(+), 28 deletions(-) diff --git a/crates/hir-ty/src/lib.rs b/crates/hir-ty/src/lib.rs index ad33053ad0..39514fc44e 100644 --- a/crates/hir-ty/src/lib.rs +++ b/crates/hir-ty/src/lib.rs @@ -523,7 +523,7 @@ where } pub fn callable_sig_from_fnonce( - self_ty: &Canonical, + self_ty: &Ty, env: Arc, db: &dyn HirDatabase, ) -> Option { @@ -531,27 +531,28 @@ pub fn callable_sig_from_fnonce( let fn_once_trait = FnTrait::FnOnce.get_id(db, krate)?; let output_assoc_type = db.trait_data(fn_once_trait).associated_type_by_name(&name![Output])?; - let mut kinds = self_ty.binders.interned().to_vec(); let b = TyBuilder::trait_ref(db, fn_once_trait); if b.remaining() != 2 { return None; } - let fn_once = b - .push(self_ty.value.clone()) - .fill_with_bound_vars(DebruijnIndex::INNERMOST, kinds.len()) - .build(); - kinds.extend(fn_once.substitution.iter(Interner).skip(1).map(|x| { - let vk = match x.data(Interner) { - chalk_ir::GenericArgData::Ty(_) => { - chalk_ir::VariableKind::Ty(chalk_ir::TyVariableKind::General) - } - chalk_ir::GenericArgData::Lifetime(_) => chalk_ir::VariableKind::Lifetime, - chalk_ir::GenericArgData::Const(c) => { - chalk_ir::VariableKind::Const(c.data(Interner).ty.clone()) - } - }; - chalk_ir::WithKind::new(vk, UniverseIndex::ROOT) - })); + let fn_once = b.push(self_ty.clone()).fill_with_bound_vars(DebruijnIndex::INNERMOST, 0).build(); + let kinds = fn_once + .substitution + .iter(Interner) + .skip(1) + .map(|x| { + let vk = match x.data(Interner) { + chalk_ir::GenericArgData::Ty(_) => { + chalk_ir::VariableKind::Ty(chalk_ir::TyVariableKind::General) + } + chalk_ir::GenericArgData::Lifetime(_) => chalk_ir::VariableKind::Lifetime, + chalk_ir::GenericArgData::Const(c) => { + chalk_ir::VariableKind::Const(c.data(Interner).ty.clone()) + } + }; + chalk_ir::WithKind::new(vk, UniverseIndex::ROOT) + }) + .collect::>(); // FIXME: chalk refuses to solve `>::Output == ^0.1`, so we first solve // `>` and then replace `^0.0` with the concrete argument tuple. @@ -563,21 +564,16 @@ pub fn callable_sig_from_fnonce( Some(Solution::Unique(vars)) => vars.value.subst, _ => return None, }; - let args = subst.at(Interner, self_ty.binders.interned().len()).ty(Interner)?; + let args = subst.at(Interner, 0).ty(Interner)?; let params = match args.kind(Interner) { chalk_ir::TyKind::Tuple(_, subst) => { subst.iter(Interner).filter_map(|arg| arg.ty(Interner).cloned()).collect::>() } _ => return None, }; - if params.iter().any(|ty| ty.is_unknown()) { - return None; - } - let fn_once = TyBuilder::trait_ref(db, fn_once_trait) - .push(self_ty.value.clone()) - .push(args.clone()) - .build(); + let fn_once = + TyBuilder::trait_ref(db, fn_once_trait).push(self_ty.clone()).push(args.clone()).build(); let projection = TyBuilder::assoc_type_projection(db, output_assoc_type, Some(fn_once.substitution.clone())) .build(); diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 9d77f343bc..cbbcaebb42 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -2997,8 +2997,7 @@ impl Type { TyKind::Function(_) => Callee::FnPtr, TyKind::FnDef(..) => Callee::Def(self.ty.callable_def(db)?), _ => { - let ty = hir_ty::replace_errors_with_variables(&self.ty); - let sig = hir_ty::callable_sig_from_fnonce(&ty, self.env.clone(), db)?; + let sig = hir_ty::callable_sig_from_fnonce(&self.ty, self.env.clone(), db)?; return Some(Callable { ty: self.clone(), sig, diff --git a/crates/ide/src/signature_help.rs b/crates/ide/src/signature_help.rs index 7486b20293..e7412d27fa 100644 --- a/crates/ide/src/signature_help.rs +++ b/crates/ide/src/signature_help.rs @@ -1345,5 +1345,36 @@ fn f i32>(f: F) { ^^ --- "#]], ); + check( + r#" +fn f &T>(f: F) { + f($0) +} +"#, + expect![[r#" + (&T, u16) -> &T + ^^ --- + "#]], + ); + } + + #[test] + fn regression_13579() { + check( + r#" +fn f() { + take(2)($0); +} + +fn take( + count: C +) -> impl Fn() -> C { + move || count +} +"#, + expect![[r#" + () -> i32 + "#]], + ); } } From 3c35d44f55d0af18116a8403533f83c2ebb9bf6f Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Wed, 9 Nov 2022 20:50:18 +0100 Subject: [PATCH 0553/1945] Add proc-macro dependency to rustc_private crates --- crates/project-model/src/workspace.rs | 77 ++++++++++++++++----------- 1 file changed, 45 insertions(+), 32 deletions(-) diff --git a/crates/project-model/src/workspace.rs b/crates/project-model/src/workspace.rs index 2780c62ed1..4a2f468de7 100644 --- a/crates/project-model/src/workspace.rs +++ b/crates/project-model/src/workspace.rs @@ -509,14 +509,14 @@ impl ProjectWorkspace { build_scripts, toolchain: _, } => cargo_to_crate_graph( - rustc_cfg.clone(), - cfg_overrides, load_proc_macro, load, - cargo, - build_scripts, - sysroot.as_ref(), rustc, + cargo, + sysroot.as_ref(), + rustc_cfg.clone(), + cfg_overrides, + build_scripts, ), ProjectWorkspace::DetachedFiles { files, sysroot, rustc_cfg } => { detached_files_to_crate_graph(rustc_cfg.clone(), load, files, sysroot) @@ -602,7 +602,7 @@ fn project_json_to_crate_graph( for (from, krate) in project.crates() { if let Some(&from) = crates.get(&from) { if let Some((public_deps, libproc_macro)) = &sysroot_deps { - public_deps.add(from, &mut crate_graph); + public_deps.add_to_crate_graph(&mut crate_graph, from); if krate.is_proc_macro { if let Some(proc_macro) = libproc_macro { add_dep( @@ -626,14 +626,14 @@ fn project_json_to_crate_graph( } fn cargo_to_crate_graph( - rustc_cfg: Vec, - override_cfg: &CfgOverrides, load_proc_macro: &mut dyn FnMut(&str, &AbsPath) -> ProcMacroLoadResult, load: &mut dyn FnMut(&AbsPath) -> Option, - cargo: &CargoWorkspace, - build_scripts: &WorkspaceBuildScripts, - sysroot: Option<&Sysroot>, rustc: &Option, + cargo: &CargoWorkspace, + sysroot: Option<&Sysroot>, + rustc_cfg: Vec, + override_cfg: &CfgOverrides, + build_scripts: &WorkspaceBuildScripts, ) -> CrateGraph { let _p = profile::span("cargo_to_crate_graph"); let mut crate_graph = CrateGraph::default(); @@ -642,13 +642,15 @@ fn cargo_to_crate_graph( None => (SysrootPublicDeps::default(), None), }; - let mut cfg_options = CfgOptions::default(); - cfg_options.extend(rustc_cfg); + let cfg_options = { + let mut cfg_options = CfgOptions::default(); + cfg_options.extend(rustc_cfg); + cfg_options.insert_atom("debug_assertions".into()); + cfg_options + }; let mut pkg_to_lib_crate = FxHashMap::default(); - cfg_options.insert_atom("debug_assertions".into()); - let mut pkg_crates = FxHashMap::default(); // Does any crate signal to rust-analyzer that they need the rustc_private crates? let mut has_private = false; @@ -723,7 +725,7 @@ fn cargo_to_crate_graph( // Set deps to the core, std and to the lib target of the current package for &(from, kind) in pkg_crates.get(&pkg).into_iter().flatten() { // Add sysroot deps first so that a lib target named `core` etc. can overwrite them. - public_deps.add(from, &mut crate_graph); + public_deps.add_to_crate_graph(&mut crate_graph, from); if let Some((to, name)) = lib_tgt.clone() { if to != from && kind != TargetKind::BuildScript { @@ -767,15 +769,16 @@ fn cargo_to_crate_graph( if let Some(rustc_workspace) = rustc { handle_rustc_crates( &mut crate_graph, - rustc_workspace, + &mut pkg_to_lib_crate, load, + load_proc_macro, + rustc_workspace, + cargo, + &public_deps, + libproc_macro, + &pkg_crates, &cfg_options, override_cfg, - load_proc_macro, - &mut pkg_to_lib_crate, - &public_deps, - cargo, - &pkg_crates, build_scripts, ); } @@ -825,28 +828,29 @@ fn detached_files_to_crate_graph( }, ); - public_deps.add(detached_file_crate, &mut crate_graph); + public_deps.add_to_crate_graph(&mut crate_graph, detached_file_crate); } crate_graph } fn handle_rustc_crates( crate_graph: &mut CrateGraph, - rustc_workspace: &CargoWorkspace, + pkg_to_lib_crate: &mut FxHashMap, load: &mut dyn FnMut(&AbsPath) -> Option, + load_proc_macro: &mut dyn FnMut(&str, &AbsPath) -> ProcMacroLoadResult, + rustc_workspace: &CargoWorkspace, + cargo: &CargoWorkspace, + public_deps: &SysrootPublicDeps, + libproc_macro: Option, + pkg_crates: &FxHashMap>, cfg_options: &CfgOptions, override_cfg: &CfgOverrides, - load_proc_macro: &mut dyn FnMut(&str, &AbsPath) -> ProcMacroLoadResult, - pkg_to_lib_crate: &mut FxHashMap, - public_deps: &SysrootPublicDeps, - cargo: &CargoWorkspace, - pkg_crates: &FxHashMap>, build_scripts: &WorkspaceBuildScripts, ) { let mut rustc_pkg_crates = FxHashMap::default(); // The root package of the rustc-dev component is rustc_driver, so we match that let root_pkg = - rustc_workspace.packages().find(|package| rustc_workspace[*package].name == "rustc_driver"); + rustc_workspace.packages().find(|&package| rustc_workspace[package].name == "rustc_driver"); // The rustc workspace might be incomplete (such as if rustc-dev is not // installed for the current toolchain) and `rustc_source` is set to discover. if let Some(root_pkg) = root_pkg { @@ -901,7 +905,16 @@ fn handle_rustc_crates( ); pkg_to_lib_crate.insert(pkg, crate_id); // Add dependencies on core / std / alloc for this crate - public_deps.add(crate_id, crate_graph); + public_deps.add_to_crate_graph(crate_graph, crate_id); + if let Some(proc_macro) = libproc_macro { + add_dep_with_prelude( + crate_graph, + crate_id, + CrateName::new("proc_macro").unwrap(), + proc_macro, + rustc_workspace[tgt].is_proc_macro, + ); + } rustc_pkg_crates.entry(pkg).or_insert_with(Vec::new).push(crate_id); } } @@ -1009,7 +1022,7 @@ struct SysrootPublicDeps { impl SysrootPublicDeps { /// Makes `from` depend on the public sysroot crates. - fn add(&self, from: CrateId, crate_graph: &mut CrateGraph) { + fn add_to_crate_graph(&self, crate_graph: &mut CrateGraph, from: CrateId) { for (name, krate, prelude) in &self.deps { add_dep_with_prelude(crate_graph, from, name.clone(), *krate, *prelude); } From 4f415fc3489d5a8e910a035455d7d33f15928ed7 Mon Sep 17 00:00:00 2001 From: Ryo Yoshida Date: Thu, 10 Nov 2022 19:22:20 +0900 Subject: [PATCH 0554/1945] Ignore outermost non-delimited `Subtree` when reversing fixups --- crates/hir-expand/src/fixup.rs | 42 ++++++++++++++++++++++------------ 1 file changed, 28 insertions(+), 14 deletions(-) diff --git a/crates/hir-expand/src/fixup.rs b/crates/hir-expand/src/fixup.rs index 00b4cb3f96..aab36f640e 100644 --- a/crates/hir-expand/src/fixup.rs +++ b/crates/hir-expand/src/fixup.rs @@ -4,6 +4,7 @@ use std::mem; use mbe::{SyntheticToken, SyntheticTokenId, TokenMap}; use rustc_hash::FxHashMap; +use smallvec::SmallVec; use syntax::{ ast::{self, AstNode, HasLoopBody}, match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange, @@ -292,21 +293,34 @@ pub(crate) fn reverse_fixups( token_map: &TokenMap, undo_info: &SyntaxFixupUndoInfo, ) { - tt.token_trees.retain(|tt| match tt { - tt::TokenTree::Leaf(leaf) => token_map.synthetic_token_id(leaf.id()) != Some(EMPTY_ID), - tt::TokenTree::Subtree(st) => { - st.delimiter.map_or(true, |d| token_map.synthetic_token_id(d.id) != Some(EMPTY_ID)) - } - }); - tt.token_trees.iter_mut().for_each(|tt| match tt { - tt::TokenTree::Subtree(tt) => reverse_fixups(tt, token_map, undo_info), - tt::TokenTree::Leaf(leaf) => { - if let Some(id) = token_map.synthetic_token_id(leaf.id()) { - let original = &undo_info.original[id.0 as usize]; - *tt = tt::TokenTree::Subtree(original.clone()); + let tts = std::mem::take(&mut tt.token_trees); + tt.token_trees = tts + .into_iter() + .filter(|tt| match tt { + tt::TokenTree::Leaf(leaf) => token_map.synthetic_token_id(leaf.id()) != Some(EMPTY_ID), + tt::TokenTree::Subtree(st) => { + st.delimiter.map_or(true, |d| token_map.synthetic_token_id(d.id) != Some(EMPTY_ID)) } - } - }); + }) + .flat_map(|tt| match tt { + tt::TokenTree::Subtree(mut tt) => { + reverse_fixups(&mut tt, token_map, undo_info); + SmallVec::from_const([tt.into()]) + } + tt::TokenTree::Leaf(leaf) => { + if let Some(id) = token_map.synthetic_token_id(leaf.id()) { + let original = undo_info.original[id.0 as usize].clone(); + if original.delimiter.is_none() { + original.token_trees.into() + } else { + SmallVec::from_const([original.into()]) + } + } else { + SmallVec::from_const([leaf.into()]) + } + } + }) + .collect(); } #[cfg(test)] From 5b070610118a98d96869af4cad79575d4edc5941 Mon Sep 17 00:00:00 2001 From: Ryo Yoshida Date: Thu, 10 Nov 2022 19:24:01 +0900 Subject: [PATCH 0555/1945] Test `TokenTree`s' equality modulo `Punct`s' spacing --- crates/hir-expand/src/fixup.rs | 41 +++++++++++++++++++++++++++------- 1 file changed, 33 insertions(+), 8 deletions(-) diff --git a/crates/hir-expand/src/fixup.rs b/crates/hir-expand/src/fixup.rs index aab36f640e..a4abe75626 100644 --- a/crates/hir-expand/src/fixup.rs +++ b/crates/hir-expand/src/fixup.rs @@ -329,6 +329,31 @@ mod tests { use super::reverse_fixups; + // The following three functions are only meant to check partial structural equivalence of + // `TokenTree`s, see the last assertion in `check()`. + fn check_leaf_eq(a: &tt::Leaf, b: &tt::Leaf) -> bool { + match (a, b) { + (tt::Leaf::Literal(a), tt::Leaf::Literal(b)) => a.text == b.text, + (tt::Leaf::Punct(a), tt::Leaf::Punct(b)) => a.char == b.char, + (tt::Leaf::Ident(a), tt::Leaf::Ident(b)) => a.text == b.text, + _ => false, + } + } + + fn check_subtree_eq(a: &tt::Subtree, b: &tt::Subtree) -> bool { + a.delimiter.map(|it| it.kind) == b.delimiter.map(|it| it.kind) + && a.token_trees.len() == b.token_trees.len() + && a.token_trees.iter().zip(&b.token_trees).all(|(a, b)| check_tt_eq(a, b)) + } + + fn check_tt_eq(a: &tt::TokenTree, b: &tt::TokenTree) -> bool { + match (a, b) { + (tt::TokenTree::Leaf(a), tt::TokenTree::Leaf(b)) => check_leaf_eq(a, b), + (tt::TokenTree::Subtree(a), tt::TokenTree::Subtree(b)) => check_subtree_eq(a, b), + _ => false, + } + } + #[track_caller] fn check(ra_fixture: &str, mut expect: Expect) { let parsed = syntax::SourceFile::parse(ra_fixture); @@ -341,8 +366,7 @@ mod tests { fixups.append, ); - let mut actual = tt.to_string(); - actual.push('\n'); + let actual = format!("{}\n", tt); expect.indent(false); expect.assert_eq(&actual); @@ -358,9 +382,12 @@ mod tests { reverse_fixups(&mut tt, &tmap, &fixups.undo_info); // the fixed-up + reversed version should be equivalent to the original input - // (but token IDs don't matter) + // modulo token IDs and `Punct`s' spacing. let (original_as_tt, _) = mbe::syntax_node_to_token_tree(&parsed.syntax_node()); - assert_eq!(tt.to_string(), original_as_tt.to_string()); + assert!( + check_subtree_eq(&tt, &original_as_tt), + "different token tree: {tt:?}, {original_as_tt:?}" + ); } #[test] @@ -483,7 +510,6 @@ fn foo () {a . __ra_fixup} } #[test] - #[ignore] fn incomplete_field_expr_2() { check( r#" @@ -492,13 +518,12 @@ fn foo() { } "#, expect![[r#" -fn foo () {a .__ra_fixup ;} +fn foo () {a . __ra_fixup ;} "#]], ) } #[test] - #[ignore] fn incomplete_field_expr_3() { check( r#" @@ -508,7 +533,7 @@ fn foo() { } "#, expect![[r#" -fn foo () {a .__ra_fixup ; bar () ;} +fn foo () {a . __ra_fixup ; bar () ;} "#]], ) } From dea49d082644cb3dbc7755849b6e3c325a6f32d9 Mon Sep 17 00:00:00 2001 From: Ryo Yoshida Date: Fri, 11 Nov 2022 19:50:26 +0900 Subject: [PATCH 0556/1945] fix: check visibility of each segment in path resolution --- crates/hir-def/src/nameres/collector.rs | 4 +++ crates/hir-def/src/nameres/path_resolution.rs | 7 +++++ crates/hir-def/src/nameres/tests/globs.rs | 30 +++++++++++++++++++ 3 files changed, 41 insertions(+) diff --git a/crates/hir-def/src/nameres/collector.rs b/crates/hir-def/src/nameres/collector.rs index 9ffc218818..b0dd01f9db 100644 --- a/crates/hir-def/src/nameres/collector.rs +++ b/crates/hir-def/src/nameres/collector.rs @@ -212,6 +212,7 @@ impl Import { #[derive(Debug, Eq, PartialEq)] struct ImportDirective { + /// The module this import directive is in. module_id: LocalModuleId, import: Import, status: PartialResolvedImport, @@ -963,8 +964,10 @@ impl DefCollector<'_> { fn update( &mut self, + // The module for which `resolutions` have been resolve module_id: LocalModuleId, resolutions: &[(Option, PerNs)], + // Visibility this import will have vis: Visibility, import_type: ImportType, ) { @@ -974,6 +977,7 @@ impl DefCollector<'_> { fn update_recursive( &mut self, + // The module for which `resolutions` have been resolve module_id: LocalModuleId, resolutions: &[(Option, PerNs)], // All resolutions are imported with this visibility; the visibilities in diff --git a/crates/hir-def/src/nameres/path_resolution.rs b/crates/hir-def/src/nameres/path_resolution.rs index 8dfda6df64..20d39ec6cb 100644 --- a/crates/hir-def/src/nameres/path_resolution.rs +++ b/crates/hir-def/src/nameres/path_resolution.rs @@ -73,7 +73,10 @@ impl DefMap { pub(crate) fn resolve_visibility( &self, db: &dyn DefDatabase, + // module to import to original_module: LocalModuleId, + // pub(path) + // ^^^^ this visibility: &RawVisibility, ) -> Option { let mut vis = match visibility { @@ -115,6 +118,7 @@ impl DefMap { &self, db: &dyn DefDatabase, mode: ResolveMode, + // module to import to mut original_module: LocalModuleId, path: &ModPath, shadow: BuiltinShadowMode, @@ -361,6 +365,9 @@ impl DefMap { ); } }; + + curr_per_ns = curr_per_ns + .filter_visibility(|vis| vis.is_visible_from_def_map(db, self, original_module)); } ResolvePathResult::with(curr_per_ns, ReachedFixedPoint::Yes, None, Some(self.krate)) diff --git a/crates/hir-def/src/nameres/tests/globs.rs b/crates/hir-def/src/nameres/tests/globs.rs index b2a6a592cf..84d14e3b92 100644 --- a/crates/hir-def/src/nameres/tests/globs.rs +++ b/crates/hir-def/src/nameres/tests/globs.rs @@ -336,3 +336,33 @@ mod d { "#]], ); } + +#[test] +fn glob_name_collision_check_visibility() { + check( + r#" +mod event { + mod serenity { + pub fn Event() {} + } + use serenity::*; + + pub struct Event {} +} + +use event::Event; + "#, + expect![[r#" + crate + Event: t + event: t + + crate::event + Event: t v + serenity: t + + crate::event::serenity + Event: v + "#]], + ); +} From e75afebeb243538ac85e4cb965bd40582f4c7b59 Mon Sep 17 00:00:00 2001 From: Ryo Yoshida Date: Fri, 11 Nov 2022 19:56:47 +0900 Subject: [PATCH 0557/1945] Resolve invisible defs in `fix_visibility` assist --- .../src/handlers/fix_visibility.rs | 20 +++++++++++-------- crates/ide-assists/src/tests/generated.rs | 4 ++-- 2 files changed, 14 insertions(+), 10 deletions(-) diff --git a/crates/ide-assists/src/handlers/fix_visibility.rs b/crates/ide-assists/src/handlers/fix_visibility.rs index 8764543028..d9e00435ec 100644 --- a/crates/ide-assists/src/handlers/fix_visibility.rs +++ b/crates/ide-assists/src/handlers/fix_visibility.rs @@ -1,4 +1,4 @@ -use hir::{db::HirDatabase, HasSource, HasVisibility, PathResolution}; +use hir::{db::HirDatabase, HasSource, HasVisibility, ModuleDef, PathResolution, ScopeDef}; use ide_db::base_db::FileId; use syntax::{ ast::{self, HasVisibility as _}, @@ -18,7 +18,7 @@ use crate::{utils::vis_offset, AssistContext, AssistId, AssistKind, Assists}; // fn frobnicate() {} // } // fn main() { -// m::frobnicate$0() {} +// m::frobnicate$0(); // } // ``` // -> @@ -27,7 +27,7 @@ use crate::{utils::vis_offset, AssistContext, AssistId, AssistKind, Assists}; // $0pub(crate) fn frobnicate() {} // } // fn main() { -// m::frobnicate() {} +// m::frobnicate(); // } // ``` pub(crate) fn fix_visibility(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { @@ -37,11 +37,15 @@ pub(crate) fn fix_visibility(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti fn add_vis_to_referenced_module_def(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { let path: ast::Path = ctx.find_node_at_offset()?; - let path_res = ctx.sema.resolve_path(&path)?; - let def = match path_res { - PathResolution::Def(def) => def, - _ => return None, - }; + let qualifier = path.qualifier()?; + let name_ref = path.segment()?.name_ref()?; + let qualifier_res = ctx.sema.resolve_path(&qualifier)?; + let PathResolution::Def(ModuleDef::Module(module)) = qualifier_res else { return None; }; + let (_, def) = module + .scope(ctx.db(), None) + .into_iter() + .find(|(name, _)| name.to_smol_str() == name_ref.text().as_str())?; + let ScopeDef::ModuleDef(def) = def else { return None; }; let current_module = ctx.sema.scope(path.syntax())?.module(); let target_module = def.module(ctx.db())?; diff --git a/crates/ide-assists/src/tests/generated.rs b/crates/ide-assists/src/tests/generated.rs index 029d169899..c09317572a 100644 --- a/crates/ide-assists/src/tests/generated.rs +++ b/crates/ide-assists/src/tests/generated.rs @@ -741,7 +741,7 @@ mod m { fn frobnicate() {} } fn main() { - m::frobnicate$0() {} + m::frobnicate$0(); } "#####, r#####" @@ -749,7 +749,7 @@ mod m { $0pub(crate) fn frobnicate() {} } fn main() { - m::frobnicate() {} + m::frobnicate(); } "#####, ) From 19306c070d5deb8afa6e9da21fcf22e5b7e24a11 Mon Sep 17 00:00:00 2001 From: Ryo Yoshida Date: Fri, 11 Nov 2022 19:59:52 +0900 Subject: [PATCH 0558/1945] Fix tests that depended on loose visibility restriction --- crates/hir-def/src/nameres/tests.rs | 6 +-- crates/hir-def/src/nameres/tests/globs.rs | 3 +- .../src/nameres/tests/mod_resolution.rs | 2 +- crates/hir-ty/src/tests/method_resolution.rs | 40 +++++++++---------- crates/hir-ty/src/tests/simple.rs | 18 ++++----- crates/hir-ty/src/tests/traits.rs | 36 ++++++++--------- .../src/handlers/add_missing_impl_members.rs | 28 ++++++------- .../src/handlers/generate_enum_variant.rs | 12 +++--- .../src/handlers/generate_function.rs | 4 +- .../src/handlers/no_such_field.rs | 4 +- .../src/handlers/useless_braces.rs | 20 +++++----- crates/ide/src/goto_definition.rs | 6 +-- 12 files changed, 90 insertions(+), 89 deletions(-) diff --git a/crates/hir-def/src/nameres/tests.rs b/crates/hir-def/src/nameres/tests.rs index 70dd2eb3ad..0d90047c28 100644 --- a/crates/hir-def/src/nameres/tests.rs +++ b/crates/hir-def/src/nameres/tests.rs @@ -58,9 +58,9 @@ extern { "#, expect![[r#" crate - E: t + E: _ S: t v - V: t v + V: _ foo: t crate::foo @@ -307,7 +307,7 @@ pub struct FromLib; Bar: t v crate::foo - Bar: t v + Bar: _ FromLib: t v "#]], ); diff --git a/crates/hir-def/src/nameres/tests/globs.rs b/crates/hir-def/src/nameres/tests/globs.rs index 84d14e3b92..88a3c76393 100644 --- a/crates/hir-def/src/nameres/tests/globs.rs +++ b/crates/hir-def/src/nameres/tests/globs.rs @@ -119,7 +119,7 @@ use foo::*; use foo::bar::*; //- /foo/mod.rs -mod bar; +pub mod bar; fn Foo() {}; pub struct Foo {}; @@ -132,6 +132,7 @@ pub(crate) struct PubCrateStruct; crate Foo: t PubCrateStruct: t v + bar: t foo: t crate::foo diff --git a/crates/hir-def/src/nameres/tests/mod_resolution.rs b/crates/hir-def/src/nameres/tests/mod_resolution.rs index ba3bf8b5a5..c575bf7cac 100644 --- a/crates/hir-def/src/nameres/tests/mod_resolution.rs +++ b/crates/hir-def/src/nameres/tests/mod_resolution.rs @@ -580,7 +580,7 @@ fn module_resolution_decl_inside_inline_module_in_crate_root() { //- /main.rs mod foo { #[path = "baz.rs"] - mod bar; + pub mod bar; } use self::foo::bar::Baz; diff --git a/crates/hir-ty/src/tests/method_resolution.rs b/crates/hir-ty/src/tests/method_resolution.rs index ac8edb841a..5d76d185ff 100644 --- a/crates/hir-ty/src/tests/method_resolution.rs +++ b/crates/hir-ty/src/tests/method_resolution.rs @@ -164,16 +164,16 @@ fn infer_associated_method_with_modules() { check_infer( r#" mod a { - struct A; + pub struct A; impl A { pub fn thing() -> A { A {} }} } mod b { - struct B; + pub struct B; impl B { pub fn thing() -> u32 { 99 }} - mod c { - struct C; + pub mod c { + pub struct C; impl C { pub fn thing() -> C { C {} }} } } @@ -186,22 +186,22 @@ fn infer_associated_method_with_modules() { } "#, expect![[r#" - 55..63 '{ A {} }': A - 57..61 'A {}': A - 125..131 '{ 99 }': u32 - 127..129 '99': u32 - 201..209 '{ C {} }': C - 203..207 'C {}': C - 240..324 '{ ...g(); }': () - 250..251 'x': A - 254..265 'a::A::thing': fn thing() -> A - 254..267 'a::A::thing()': A - 277..278 'y': u32 - 281..292 'b::B::thing': fn thing() -> u32 - 281..294 'b::B::thing()': u32 - 304..305 'z': C - 308..319 'c::C::thing': fn thing() -> C - 308..321 'c::C::thing()': C + 59..67 '{ A {} }': A + 61..65 'A {}': A + 133..139 '{ 99 }': u32 + 135..137 '99': u32 + 217..225 '{ C {} }': C + 219..223 'C {}': C + 256..340 '{ ...g(); }': () + 266..267 'x': A + 270..281 'a::A::thing': fn thing() -> A + 270..283 'a::A::thing()': A + 293..294 'y': u32 + 297..308 'b::B::thing': fn thing() -> u32 + 297..310 'b::B::thing()': u32 + 320..321 'z': C + 324..335 'c::C::thing': fn thing() -> C + 324..337 'c::C::thing()': C "#]], ); } diff --git a/crates/hir-ty/src/tests/simple.rs b/crates/hir-ty/src/tests/simple.rs index 080e2ac1b8..d7431443b8 100644 --- a/crates/hir-ty/src/tests/simple.rs +++ b/crates/hir-ty/src/tests/simple.rs @@ -214,7 +214,7 @@ fn infer_paths() { fn a() -> u32 { 1 } mod b { - fn c() -> u32 { 1 } + pub fn c() -> u32 { 1 } } fn test() { @@ -225,13 +225,13 @@ fn test() { expect![[r#" 14..19 '{ 1 }': u32 16..17 '1': u32 - 47..52 '{ 1 }': u32 - 49..50 '1': u32 - 66..90 '{ ...c(); }': () - 72..73 'a': fn a() -> u32 - 72..75 'a()': u32 - 81..85 'b::c': fn c() -> u32 - 81..87 'b::c()': u32 + 51..56 '{ 1 }': u32 + 53..54 '1': u32 + 70..94 '{ ...c(); }': () + 76..77 'a': fn a() -> u32 + 76..79 'a()': u32 + 85..89 'b::c': fn c() -> u32 + 85..91 'b::c()': u32 "#]], ); } @@ -1856,7 +1856,7 @@ fn not_shadowing_module_by_primitive() { check_types( r#" //- /str.rs -fn foo() -> u32 {0} +pub fn foo() -> u32 {0} //- /main.rs mod str; diff --git a/crates/hir-ty/src/tests/traits.rs b/crates/hir-ty/src/tests/traits.rs index 7d42b8b9bc..3d7194b6f4 100644 --- a/crates/hir-ty/src/tests/traits.rs +++ b/crates/hir-ty/src/tests/traits.rs @@ -1706,7 +1706,7 @@ fn where_clause_trait_in_scope_for_method_resolution() { check_types( r#" mod foo { - trait Trait { + pub trait Trait { fn foo(&self) -> u32 { 0 } } } @@ -1723,7 +1723,7 @@ fn super_trait_method_resolution() { check_infer( r#" mod foo { - trait SuperTrait { + pub trait SuperTrait { fn foo(&self) -> u32 {} } } @@ -1735,15 +1735,15 @@ fn test(x: T, y: U) { y.foo(); }"#, expect![[r#" - 49..53 'self': &Self - 62..64 '{}': u32 - 181..182 'x': T - 187..188 'y': U - 193..222 '{ ...o(); }': () - 199..200 'x': T - 199..206 'x.foo()': u32 - 212..213 'y': U - 212..219 'y.foo()': u32 + 53..57 'self': &Self + 66..68 '{}': u32 + 185..186 'x': T + 191..192 'y': U + 197..226 '{ ...o(); }': () + 203..204 'x': T + 203..210 'x.foo()': u32 + 216..217 'y': U + 216..223 'y.foo()': u32 "#]], ); } @@ -1754,7 +1754,7 @@ fn super_trait_impl_trait_method_resolution() { r#" //- minicore: sized mod foo { - trait SuperTrait { + pub trait SuperTrait { fn foo(&self) -> u32 {} } } @@ -1764,12 +1764,12 @@ fn test(x: &impl Trait1) { x.foo(); }"#, expect![[r#" - 49..53 'self': &Self - 62..64 '{}': u32 - 115..116 'x': &impl Trait1 - 132..148 '{ ...o(); }': () - 138..139 'x': &impl Trait1 - 138..145 'x.foo()': u32 + 53..57 'self': &Self + 66..68 '{}': u32 + 119..120 'x': &impl Trait1 + 136..152 '{ ...o(); }': () + 142..143 'x': &impl Trait1 + 142..149 'x.foo()': u32 "#]], ); } diff --git a/crates/ide-assists/src/handlers/add_missing_impl_members.rs b/crates/ide-assists/src/handlers/add_missing_impl_members.rs index 62cf5ab4f3..722302f991 100644 --- a/crates/ide-assists/src/handlers/add_missing_impl_members.rs +++ b/crates/ide-assists/src/handlers/add_missing_impl_members.rs @@ -379,14 +379,14 @@ impl Foo for S { r#" mod foo { pub struct Bar; - trait Foo { fn foo(&self, bar: Bar); } + pub trait Foo { fn foo(&self, bar: Bar); } } struct S; impl foo::Foo for S { $0 }"#, r#" mod foo { pub struct Bar; - trait Foo { fn foo(&self, bar: Bar); } + pub trait Foo { fn foo(&self, bar: Bar); } } struct S; impl foo::Foo for S { @@ -439,14 +439,14 @@ impl bar::Foo for S { r#" mod foo { pub struct Bar; - trait Foo { fn foo(&self, bar: Bar); } + pub trait Foo { fn foo(&self, bar: Bar); } } struct S; impl foo::Foo for S { $0 }"#, r#" mod foo { pub struct Bar; - trait Foo { fn foo(&self, bar: Bar); } + pub trait Foo { fn foo(&self, bar: Bar); } } struct S; impl foo::Foo for S { @@ -464,14 +464,14 @@ impl foo::Foo for S { r#" mod foo { pub struct Bar; - trait Foo { fn foo(&self, bar: Bar); } + pub trait Foo { fn foo(&self, bar: Bar); } } struct S; impl foo::Foo for S { $0 }"#, r#" mod foo { pub struct Bar; - trait Foo { fn foo(&self, bar: Bar); } + pub trait Foo { fn foo(&self, bar: Bar); } } struct S; impl foo::Foo for S { @@ -489,7 +489,7 @@ impl foo::Foo for S { add_missing_impl_members, r#" mod foo { - trait Foo { fn foo(&self, bar: T); } + pub trait Foo { fn foo(&self, bar: T); } pub struct Param; } struct Param; @@ -497,7 +497,7 @@ struct S; impl foo::Foo for S { $0 }"#, r#" mod foo { - trait Foo { fn foo(&self, bar: T); } + pub trait Foo { fn foo(&self, bar: T); } pub struct Param; } struct Param; @@ -518,7 +518,7 @@ impl foo::Foo for S { mod foo { pub struct Bar; impl Bar { type Assoc = u32; } - trait Foo { fn foo(&self, bar: Bar::Assoc); } + pub trait Foo { fn foo(&self, bar: Bar::Assoc); } } struct S; impl foo::Foo for S { $0 }"#, @@ -526,7 +526,7 @@ impl foo::Foo for S { $0 }"#, mod foo { pub struct Bar; impl Bar { type Assoc = u32; } - trait Foo { fn foo(&self, bar: Bar::Assoc); } + pub trait Foo { fn foo(&self, bar: Bar::Assoc); } } struct S; impl foo::Foo for S { @@ -545,7 +545,7 @@ impl foo::Foo for S { mod foo { pub struct Bar; pub struct Baz; - trait Foo { fn foo(&self, bar: Bar); } + pub trait Foo { fn foo(&self, bar: Bar); } } struct S; impl foo::Foo for S { $0 }"#, @@ -553,7 +553,7 @@ impl foo::Foo for S { $0 }"#, mod foo { pub struct Bar; pub struct Baz; - trait Foo { fn foo(&self, bar: Bar); } + pub trait Foo { fn foo(&self, bar: Bar); } } struct S; impl foo::Foo for S { @@ -571,14 +571,14 @@ impl foo::Foo for S { r#" mod foo { pub trait Fn { type Output; } - trait Foo { fn foo(&self, bar: dyn Fn(u32) -> i32); } + pub trait Foo { fn foo(&self, bar: dyn Fn(u32) -> i32); } } struct S; impl foo::Foo for S { $0 }"#, r#" mod foo { pub trait Fn { type Output; } - trait Foo { fn foo(&self, bar: dyn Fn(u32) -> i32); } + pub trait Foo { fn foo(&self, bar: dyn Fn(u32) -> i32); } } struct S; impl foo::Foo for S { diff --git a/crates/ide-assists/src/handlers/generate_enum_variant.rs b/crates/ide-assists/src/handlers/generate_enum_variant.rs index 35cd42908a..0bcb572831 100644 --- a/crates/ide-assists/src/handlers/generate_enum_variant.rs +++ b/crates/ide-assists/src/handlers/generate_enum_variant.rs @@ -261,12 +261,12 @@ fn main() { } //- /foo.rs -enum Foo { +pub enum Foo { Bar, } ", r" -enum Foo { +pub enum Foo { Bar, Baz, } @@ -310,7 +310,7 @@ fn main() { generate_enum_variant, r" mod m { - enum Foo { + pub enum Foo { Bar, } } @@ -320,7 +320,7 @@ fn main() { ", r" mod m { - enum Foo { + pub enum Foo { Bar, Baz, } @@ -516,10 +516,10 @@ mod foo; use foo::Foo::Bar$0; //- /foo.rs -enum Foo {} +pub enum Foo {} ", r" -enum Foo { +pub enum Foo { Bar, } ", diff --git a/crates/ide-assists/src/handlers/generate_function.rs b/crates/ide-assists/src/handlers/generate_function.rs index c229127e48..57f198748c 100644 --- a/crates/ide-assists/src/handlers/generate_function.rs +++ b/crates/ide-assists/src/handlers/generate_function.rs @@ -1324,7 +1324,7 @@ fn foo() { generate_function, r" mod bar { - mod baz {} + pub mod baz {} } fn foo() { @@ -1333,7 +1333,7 @@ fn foo() { ", r" mod bar { - mod baz { + pub mod baz { pub(crate) fn my_fn() { ${0:todo!()} } diff --git a/crates/ide-diagnostics/src/handlers/no_such_field.rs b/crates/ide-diagnostics/src/handlers/no_such_field.rs index a80299106b..d8f2a9de98 100644 --- a/crates/ide-diagnostics/src/handlers/no_such_field.rs +++ b/crates/ide-diagnostics/src/handlers/no_such_field.rs @@ -268,12 +268,12 @@ fn main() { foo::Foo { bar: 3, $0baz: false}; } //- /foo.rs -struct Foo { +pub struct Foo { bar: i32 } "#, r#" -struct Foo { +pub struct Foo { bar: i32, pub(crate) baz: bool } diff --git a/crates/ide-diagnostics/src/handlers/useless_braces.rs b/crates/ide-diagnostics/src/handlers/useless_braces.rs index 8b9330e040..289ed0458c 100644 --- a/crates/ide-diagnostics/src/handlers/useless_braces.rs +++ b/crates/ide-diagnostics/src/handlers/useless_braces.rs @@ -71,9 +71,9 @@ use a; use a::{c, d::e}; mod a { - mod c {} - mod d { - mod e {} + pub mod c {} + pub mod d { + pub mod e {} } } "#, @@ -87,9 +87,9 @@ use a::{ }; mod a { - mod c {} - mod d { - mod e {} + pub mod c {} + pub mod d { + pub mod e {} } } "#, @@ -116,11 +116,11 @@ use b; ); check_fix( r#" -mod a { mod c {} } +mod a { pub mod c {} } use a::{c$0}; "#, r#" -mod a { mod c {} } +mod a { pub mod c {} } use a::c; "#, ); @@ -136,11 +136,11 @@ use a; ); check_fix( r#" -mod a { mod c {} mod d { mod e {} } } +mod a { pub mod c {} pub mod d { pub mod e {} } } use a::{c, d::{e$0}}; "#, r#" -mod a { mod c {} mod d { mod e {} } } +mod a { pub mod c {} pub mod d { pub mod e {} } } use a::{c, d::e}; "#, ); diff --git a/crates/ide/src/goto_definition.rs b/crates/ide/src/goto_definition.rs index f97c67b144..43f7a529bc 100644 --- a/crates/ide/src/goto_definition.rs +++ b/crates/ide/src/goto_definition.rs @@ -289,10 +289,10 @@ mod b; enum E { X(Foo$0) } //- /a.rs -struct Foo; - //^^^ +pub struct Foo; + //^^^ //- /b.rs -struct Foo; +pub struct Foo; "#, ); } From e35836eb811a99872fdf63f1f0f1046ee651eeb0 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 11 Nov 2022 13:00:22 +0100 Subject: [PATCH 0559/1945] Send status notification if there are no found workspaces --- crates/rust-analyzer/src/reload.rs | 19 ++++++++++++++++--- 1 file changed, 16 insertions(+), 3 deletions(-) diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs index e1f651786d..407416d9f4 100644 --- a/crates/rust-analyzer/src/reload.rs +++ b/crates/rust-analyzer/src/reload.rs @@ -106,6 +106,14 @@ impl GlobalState { status.health = lsp_ext::Health::Error; status.message = Some(error) } + + if self.config.linked_projects().is_empty() + && self.config.detached_files().is_empty() + && self.config.notifications().cargo_toml_not_found + { + status.health = lsp_ext::Health::Warning; + status.message = Some("Workspace reload required".to_string()) + } status } @@ -427,9 +435,14 @@ impl GlobalState { fn fetch_workspace_error(&self) -> Result<(), String> { let mut buf = String::new(); - for ws in self.fetch_workspaces_queue.last_op_result() { - if let Err(err) = ws { - stdx::format_to!(buf, "rust-analyzer failed to load workspace: {:#}\n", err); + let last_op_result = self.fetch_workspaces_queue.last_op_result(); + if last_op_result.is_empty() { + stdx::format_to!(buf, "rust-analyzer failed to discover workspace"); + } else { + for ws in last_op_result { + if let Err(err) = ws { + stdx::format_to!(buf, "rust-analyzer failed to load workspace: {:#}\n", err); + } } } From e50712cf2c115a1c287080b351f584edce49485b Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 11 Nov 2022 13:38:07 +0100 Subject: [PATCH 0560/1945] fix: Fix hover in attributed items not preferring similar kinded tokens --- crates/ide/src/hover.rs | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/crates/ide/src/hover.rs b/crates/ide/src/hover.rs index 3687b597fc..838fb18c3d 100644 --- a/crates/ide/src/hover.rs +++ b/crates/ide/src/hover.rs @@ -119,7 +119,14 @@ pub(crate) fn hover( }); } - let in_attr = matches!(original_token.parent().and_then(ast::TokenTree::cast), Some(tt) if tt.syntax().ancestors().any(|it| ast::Meta::can_cast(it.kind()))); + let in_attr = original_token + .parent_ancestors() + .filter_map(ast::Item::cast) + .any(|item| sema.is_attr_macro_call(&item)) + && !matches!( + original_token.parent().and_then(ast::TokenTree::cast), + Some(tt) if tt.syntax().ancestors().any(|it| ast::Meta::can_cast(it.kind())) + ); // prefer descending the same token kind in attribute expansions, in normal macros text // equivalency is more important let descended = if in_attr { From c6c932d3f3b57b6e5e04321398e67326ebd1ce58 Mon Sep 17 00:00:00 2001 From: "Alexis (Poliorcetics) Bourget" Date: Sun, 25 Sep 2022 01:23:20 +0200 Subject: [PATCH 0561/1945] chore: Align config property --- crates/rust-analyzer/src/config.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index d642cd9b92..25e341391a 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs @@ -120,7 +120,7 @@ config_data! { /// Compilation target override (target triple). cargo_target: Option = "null", /// Unsets `#[cfg(test)]` for the specified crates. - cargo_unsetTest: Vec = "[\"core\"]", + cargo_unsetTest: Vec = "[\"core\"]", /// Check all targets and tests (`--all-targets`). checkOnSave_allTargets: bool = "true", From 0d4737adb68704a9db802614f6ac9ba933d81f65 Mon Sep 17 00:00:00 2001 From: "Alexis (Poliorcetics) Bourget" Date: Sun, 25 Sep 2022 01:22:27 +0200 Subject: [PATCH 0562/1945] feat: Support passing multiple targets to cargo (for Rust 1.64.0+) --- crates/flycheck/src/lib.rs | 6 +-- crates/project-model/src/build_scripts.rs | 2 +- crates/project-model/src/cargo_workspace.rs | 52 +++++++++++++++------ crates/rust-analyzer/src/config.rs | 35 ++++++++++---- docs/user/generated_config.adoc | 10 ++-- editors/code/package.json | 17 +++++-- 6 files changed, 87 insertions(+), 35 deletions(-) diff --git a/crates/flycheck/src/lib.rs b/crates/flycheck/src/lib.rs index ff507a52d5..8f93dad06e 100644 --- a/crates/flycheck/src/lib.rs +++ b/crates/flycheck/src/lib.rs @@ -40,7 +40,7 @@ pub enum InvocationLocation { pub enum FlycheckConfig { CargoCommand { command: String, - target_triple: Option, + target_triples: Vec, all_targets: bool, no_default_features: bool, all_features: bool, @@ -286,7 +286,7 @@ impl FlycheckActor { let (mut cmd, args) = match &self.config { FlycheckConfig::CargoCommand { command, - target_triple, + target_triples, no_default_features, all_targets, all_features, @@ -300,7 +300,7 @@ impl FlycheckActor { cmd.args(&["--workspace", "--message-format=json", "--manifest-path"]) .arg(self.root.join("Cargo.toml").as_os_str()); - if let Some(target) = target_triple { + for target in target_triples { cmd.args(&["--target", target.as_str()]); } if *all_targets { diff --git a/crates/project-model/src/build_scripts.rs b/crates/project-model/src/build_scripts.rs index a26a7c57ac..ae2b41f27d 100644 --- a/crates/project-model/src/build_scripts.rs +++ b/crates/project-model/src/build_scripts.rs @@ -69,7 +69,7 @@ impl WorkspaceBuildScripts { cmd.args(&["check", "--quiet", "--workspace", "--message-format=json"]); // --all-targets includes tests, benches and examples in addition to the - // default lib and bins. This is an independent concept from the --targets + // default lib and bins. This is an independent concept from the --target // flag below. cmd.arg("--all-targets"); diff --git a/crates/project-model/src/cargo_workspace.rs b/crates/project-model/src/cargo_workspace.rs index b4c2ba4367..02ec7a4f6f 100644 --- a/crates/project-model/src/cargo_workspace.rs +++ b/crates/project-model/src/cargo_workspace.rs @@ -270,11 +270,7 @@ impl CargoWorkspace { config: &CargoConfig, progress: &dyn Fn(String), ) -> Result { - let target = config - .target - .clone() - .or_else(|| cargo_config_build_target(cargo_toml, &config.extra_env)) - .or_else(|| rustc_discover_host_triple(cargo_toml, &config.extra_env)); + let targets = find_list_of_build_targets(config, cargo_toml); let mut meta = MetadataCommand::new(); meta.cargo_path(toolchain::cargo()); @@ -294,8 +290,12 @@ impl CargoWorkspace { } meta.current_dir(current_dir.as_os_str()); - if let Some(target) = target { - meta.other_options(vec![String::from("--filter-platform"), target]); + if !targets.is_empty() { + let other_options: Vec<_> = targets + .into_iter() + .flat_map(|target| ["--filter-platform".to_string(), target]) + .collect(); + meta.other_options(other_options); } // FIXME: Fetching metadata is a slow process, as it might require @@ -469,6 +469,19 @@ impl CargoWorkspace { } } +fn find_list_of_build_targets(config: &CargoConfig, cargo_toml: &ManifestPath) -> Vec { + if let Some(target) = &config.target { + return [target.into()].to_vec(); + } + + let build_targets = cargo_config_build_target(cargo_toml, &config.extra_env); + if !build_targets.is_empty() { + return build_targets; + } + + rustc_discover_host_triple(cargo_toml, &config.extra_env).into_iter().collect() +} + fn rustc_discover_host_triple( cargo_toml: &ManifestPath, extra_env: &FxHashMap, @@ -499,7 +512,7 @@ fn rustc_discover_host_triple( fn cargo_config_build_target( cargo_toml: &ManifestPath, extra_env: &FxHashMap, -) -> Option { +) -> Vec { let mut cargo_config = Command::new(toolchain::cargo()); cargo_config.envs(extra_env); cargo_config @@ -507,12 +520,21 @@ fn cargo_config_build_target( .args(&["-Z", "unstable-options", "config", "get", "build.target"]) .env("RUSTC_BOOTSTRAP", "1"); // if successful we receive `build.target = "target-triple"` + // or `build.target = ["", ..]` tracing::debug!("Discovering cargo config target by {:?}", cargo_config); - match utf8_stdout(cargo_config) { - Ok(stdout) => stdout - .strip_prefix("build.target = \"") - .and_then(|stdout| stdout.strip_suffix('"')) - .map(ToOwned::to_owned), - Err(_) => None, - } + utf8_stdout(cargo_config).map(parse_output_cargo_config_build_target).unwrap_or_default() +} + +fn parse_output_cargo_config_build_target(stdout: String) -> Vec { + let trimmed = stdout.trim_start_matches("build.target = ").trim_matches('"'); + + if !trimmed.starts_with('[') { + return [trimmed.to_string()].to_vec(); + } + + let res = serde_json::from_str(trimmed); + if let Err(e) = &res { + tracing::warn!("Failed to parse `build.target` as an array of target: {}`", e); + } + res.unwrap_or_default() } diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index 25e341391a..6b2f22faa7 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs @@ -118,6 +118,8 @@ config_data! { /// This option does not take effect until rust-analyzer is restarted. cargo_sysroot: Option = "\"discover\"", /// Compilation target override (target triple). + // FIXME(@poliorcetics): move to multiple targets here too, but this will need more work + // than `checkOnSave_target` cargo_target: Option = "null", /// Unsets `#[cfg(test)]` for the specified crates. cargo_unsetTest: Vec = "[\"core\"]", @@ -174,9 +176,13 @@ config_data! { /// ``` /// . checkOnSave_overrideCommand: Option> = "null", - /// Check for a specific target. Defaults to - /// `#rust-analyzer.cargo.target#`. - checkOnSave_target: Option = "null", + /// Check for specific targets. Defaults to `#rust-analyzer.cargo.target#` if empty. + /// + /// Can be a single target, e.g. `"x86_64-unknown-linux-gnu"` or a list of targets, e.g. + /// `["aarch64-apple-darwin", "x86_64-apple-darwin"]`. + /// + /// Aliased as `"checkOnSave.targets"`. + checkOnSave_target | checkOnSave_targets: CheckOnSaveTargets = "[]", /// Toggles the additional completions that automatically add imports when completed. /// Note that your client must specify the `additionalTextEdits` LSP client capability to truly have this feature enabled. @@ -1147,11 +1153,10 @@ impl Config { } Some(_) | None => FlycheckConfig::CargoCommand { command: self.data.checkOnSave_command.clone(), - target_triple: self - .data - .checkOnSave_target - .clone() - .or_else(|| self.data.cargo_target.clone()), + target_triples: match &self.data.checkOnSave_target.0[..] { + [] => self.data.cargo_target.clone().into_iter().collect(), + targets => targets.into(), + }, all_targets: self.data.checkOnSave_allTargets, no_default_features: self .data @@ -1657,6 +1662,9 @@ enum InvocationStrategy { PerWorkspace, } +#[derive(Deserialize, Debug, Clone)] +struct CheckOnSaveTargets(#[serde(deserialize_with = "single_or_array")] Vec); + #[derive(Deserialize, Debug, Clone)] #[serde(rename_all = "snake_case")] enum InvocationLocation { @@ -2118,6 +2126,17 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json "The command will be executed in the project root." ], }, + "CheckOnSaveTargets" => set! { + "anyOf": [ + { + "type": "string", + }, + { + "type": "array", + "items": { "type": "string" } + }, + ], + }, _ => panic!("missing entry for {}: {}", ty, default), } diff --git a/docs/user/generated_config.adoc b/docs/user/generated_config.adoc index da8e629807..57f950034c 100644 --- a/docs/user/generated_config.adoc +++ b/docs/user/generated_config.adoc @@ -190,11 +190,15 @@ cargo check --workspace --message-format=json --all-targets ``` . -- -[[rust-analyzer.checkOnSave.target]]rust-analyzer.checkOnSave.target (default: `null`):: +[[rust-analyzer.checkOnSave.target]]rust-analyzer.checkOnSave.target (default: `[]`):: + -- -Check for a specific target. Defaults to -`#rust-analyzer.cargo.target#`. +Check for specific targets. Defaults to `#rust-analyzer.cargo.target#` if empty. + +Can be a single target, e.g. `"x86_64-unknown-linux-gnu"` or a list of targets, e.g. +`["aarch64-apple-darwin", "x86_64-apple-darwin"]`. + +Aliased as `"checkOnSave.targets"`. -- [[rust-analyzer.completion.autoimport.enable]]rust-analyzer.completion.autoimport.enable (default: `true`):: + diff --git a/editors/code/package.json b/editors/code/package.json index 0c78165960..762726842f 100644 --- a/editors/code/package.json +++ b/editors/code/package.json @@ -634,11 +634,18 @@ } }, "rust-analyzer.checkOnSave.target": { - "markdownDescription": "Check for a specific target. Defaults to\n`#rust-analyzer.cargo.target#`.", - "default": null, - "type": [ - "null", - "string" + "markdownDescription": "Check for specific targets. Defaults to `#rust-analyzer.cargo.target#` if empty.\n\nCan be a single target, e.g. `\"x86_64-unknown-linux-gnu\"` or a list of targets, e.g.\n`[\"aarch64-apple-darwin\", \"x86_64-apple-darwin\"]`.\n\nAliased as `\"checkOnSave.targets\"`.", + "default": [], + "anyOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "type": "string" + } + } ] }, "rust-analyzer.completion.autoimport.enable": { From a143ff0248320bfd234aad0d93f2a59baf78216f Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 11 Nov 2022 14:36:27 +0100 Subject: [PATCH 0563/1945] fix: Fix r-a eagerly showing no discovered workspace errors --- crates/rust-analyzer/src/global_state.rs | 2 +- crates/rust-analyzer/src/main_loop.rs | 2 +- crates/rust-analyzer/src/reload.rs | 11 ++++------- 3 files changed, 6 insertions(+), 9 deletions(-) diff --git a/crates/rust-analyzer/src/global_state.rs b/crates/rust-analyzer/src/global_state.rs index 74277ff2e5..4e8bc8d646 100644 --- a/crates/rust-analyzer/src/global_state.rs +++ b/crates/rust-analyzer/src/global_state.rs @@ -100,7 +100,7 @@ pub(crate) struct GlobalState { /// the user just adds comments or whitespace to Cargo.toml, we do not want /// to invalidate any salsa caches. pub(crate) workspaces: Arc>, - pub(crate) fetch_workspaces_queue: OpQueue>>, + pub(crate) fetch_workspaces_queue: OpQueue>>>, pub(crate) fetch_build_data_queue: OpQueue<(Arc>, Vec>)>, diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs index 6e5da58fe3..274588ce0e 100644 --- a/crates/rust-analyzer/src/main_loop.rs +++ b/crates/rust-analyzer/src/main_loop.rs @@ -451,7 +451,7 @@ impl GlobalState { ProjectWorkspaceProgress::Begin => (Progress::Begin, None), ProjectWorkspaceProgress::Report(msg) => (Progress::Report, Some(msg)), ProjectWorkspaceProgress::End(workspaces) => { - self.fetch_workspaces_queue.op_completed(workspaces); + self.fetch_workspaces_queue.op_completed(Some(workspaces)); let old = Arc::clone(&self.workspaces); self.switch_workspaces("fetched workspace".to_string()); diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs index 407416d9f4..aa0510a4ea 100644 --- a/crates/rust-analyzer/src/reload.rs +++ b/crates/rust-analyzer/src/reload.rs @@ -206,12 +206,9 @@ impl GlobalState { self.show_and_log_error("failed to run build scripts".to_string(), Some(error)); } - let workspaces = self - .fetch_workspaces_queue - .last_op_result() - .iter() - .filter_map(|res| res.as_ref().ok().cloned()) - .collect::>(); + let Some(workspaces) = self.fetch_workspaces_queue.last_op_result() else { return; }; + let workspaces = + workspaces.iter().filter_map(|res| res.as_ref().ok().cloned()).collect::>(); fn eq_ignore_build_data<'a>( left: &'a ProjectWorkspace, @@ -435,7 +432,7 @@ impl GlobalState { fn fetch_workspace_error(&self) -> Result<(), String> { let mut buf = String::new(); - let last_op_result = self.fetch_workspaces_queue.last_op_result(); + let Some(last_op_result) = self.fetch_workspaces_queue.last_op_result() else { return Ok(()) }; if last_op_result.is_empty() { stdx::format_to!(buf, "rust-analyzer failed to discover workspace"); } else { From 6674bd898ed0e85fd61dba2b018144477ef6347c Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 11 Nov 2022 15:25:15 +0100 Subject: [PATCH 0564/1945] fix: Add trait alias grammar to rust.ungram --- crates/hir-def/src/data.rs | 18 +++++++++++++----- crates/hir-def/src/item_tree.rs | 3 ++- crates/hir-def/src/item_tree/lower.rs | 10 +--------- crates/hir-def/src/item_tree/pretty.rs | 19 ++++++++++++++----- crates/syntax/rust.ungram | 7 +++++-- crates/syntax/src/ast/generated/nodes.rs | 2 ++ 6 files changed, 37 insertions(+), 22 deletions(-) diff --git a/crates/hir-def/src/data.rs b/crates/hir-def/src/data.rs index 2dc69b00ac..9c76969086 100644 --- a/crates/hir-def/src/data.rs +++ b/crates/hir-def/src/data.rs @@ -236,11 +236,19 @@ impl TraitData { .by_key("rustc_skip_array_during_method_dispatch") .exists(); - let mut collector = - AssocItemCollector::new(db, module_id, tree_id.file_id(), ItemContainerId::TraitId(tr)); - collector.collect(&item_tree, tree_id.tree_id(), &tr_def.items); - let (items, attribute_calls, diagnostics) = collector.finish(); - + let (items, attribute_calls, diagnostics) = match &tr_def.items { + Some(items) => { + let mut collector = AssocItemCollector::new( + db, + module_id, + tree_id.file_id(), + ItemContainerId::TraitId(tr), + ); + collector.collect(&item_tree, tree_id.tree_id(), items); + collector.finish() + } + None => Default::default(), + }; ( Arc::new(TraitData { name, diff --git a/crates/hir-def/src/item_tree.rs b/crates/hir-def/src/item_tree.rs index 570344596d..0aa531eff7 100644 --- a/crates/hir-def/src/item_tree.rs +++ b/crates/hir-def/src/item_tree.rs @@ -666,7 +666,8 @@ pub struct Trait { pub generic_params: Interned, pub is_auto: bool, pub is_unsafe: bool, - pub items: Box<[AssocItem]>, + /// This is [`None`] if this Trait is a trait alias. + pub items: Option>, pub ast_id: FileAstId, } diff --git a/crates/hir-def/src/item_tree/lower.rs b/crates/hir-def/src/item_tree/lower.rs index 79249757d9..b25274bccc 100644 --- a/crates/hir-def/src/item_tree/lower.rs +++ b/crates/hir-def/src/item_tree/lower.rs @@ -451,15 +451,7 @@ impl<'a> Ctx<'a> { .collect() }); let ast_id = self.source_ast_id_map.ast_id(trait_def); - let res = Trait { - name, - visibility, - generic_params, - is_auto, - is_unsafe, - items: items.unwrap_or_default(), - ast_id, - }; + let res = Trait { name, visibility, generic_params, is_auto, is_unsafe, items, ast_id }; Some(id(self.data().traits.alloc(res))) } diff --git a/crates/hir-def/src/item_tree/pretty.rs b/crates/hir-def/src/item_tree/pretty.rs index da1643152c..48c40df22f 100644 --- a/crates/hir-def/src/item_tree/pretty.rs +++ b/crates/hir-def/src/item_tree/pretty.rs @@ -375,12 +375,21 @@ impl<'a> Printer<'a> { } w!(self, "trait {}", name); self.print_generic_params(generic_params); - self.print_where_clause_and_opening_brace(generic_params); - self.indented(|this| { - for item in &**items { - this.print_mod_item((*item).into()); + match items { + Some(items) => { + self.print_where_clause_and_opening_brace(generic_params); + self.indented(|this| { + for item in &**items { + this.print_mod_item((*item).into()); + } + }); } - }); + None => { + w!(self, " = "); + // FIXME: Print the aliased traits + self.print_where_clause_and_opening_brace(generic_params); + } + } wln!(self, "}}"); } ModItem::Impl(it) => { diff --git a/crates/syntax/rust.ungram b/crates/syntax/rust.ungram index 5379732ac6..0a0cb0290d 100644 --- a/crates/syntax/rust.ungram +++ b/crates/syntax/rust.ungram @@ -239,8 +239,11 @@ Static = Trait = Attr* Visibility? 'unsafe'? 'auto'? - 'trait' Name GenericParamList? (':' TypeBoundList?)? WhereClause? - AssocItemList + 'trait' Name GenericParamList? + ( + (':' TypeBoundList?)? WhereClause? AssocItemList + | '=' TypeBoundList? WhereClause? ';' + ) AssocItemList = '{' Attr* AssocItem* '}' diff --git a/crates/syntax/src/ast/generated/nodes.rs b/crates/syntax/src/ast/generated/nodes.rs index 6cfb98d92f..2ea715f47f 100644 --- a/crates/syntax/src/ast/generated/nodes.rs +++ b/crates/syntax/src/ast/generated/nodes.rs @@ -407,6 +407,8 @@ impl Trait { pub fn auto_token(&self) -> Option { support::token(&self.syntax, T![auto]) } pub fn trait_token(&self) -> Option { support::token(&self.syntax, T![trait]) } pub fn assoc_item_list(&self) -> Option { support::child(&self.syntax) } + pub fn eq_token(&self) -> Option { support::token(&self.syntax, T![=]) } + pub fn semicolon_token(&self) -> Option { support::token(&self.syntax, T![;]) } } #[derive(Debug, Clone, PartialEq, Eq, Hash)] From 6b4b7d81e461b758a5785e2f8ac6eea0ed71e6f2 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 11 Nov 2022 16:57:05 +0100 Subject: [PATCH 0565/1945] internal: Add version info to unsupported proc macro abi error --- crates/proc-macro-srv/src/abis/mod.rs | 2 +- crates/proc-macro-srv/src/dylib.rs | 4 ++-- crates/proc-macro-srv/src/lib.rs | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/crates/proc-macro-srv/src/abis/mod.rs b/crates/proc-macro-srv/src/abis/mod.rs index 2f854bc159..0ce099ae0b 100644 --- a/crates/proc-macro-srv/src/abis/mod.rs +++ b/crates/proc-macro-srv/src/abis/mod.rs @@ -117,7 +117,7 @@ impl Abi { let inner = unsafe { Abi_1_63::from_lib(lib, symbol_name) }?; Ok(Abi::Abi1_63(inner)) } - _ => Err(LoadProcMacroDylibError::UnsupportedABI), + _ => Err(LoadProcMacroDylibError::UnsupportedABI(info.version_string.clone())), } } diff --git a/crates/proc-macro-srv/src/dylib.rs b/crates/proc-macro-srv/src/dylib.rs index 7aba74e539..0722cd89d7 100644 --- a/crates/proc-macro-srv/src/dylib.rs +++ b/crates/proc-macro-srv/src/dylib.rs @@ -80,14 +80,14 @@ fn load_library(file: &Path) -> Result { pub enum LoadProcMacroDylibError { Io(io::Error), LibLoading(libloading::Error), - UnsupportedABI, + UnsupportedABI(String), } impl fmt::Display for LoadProcMacroDylibError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Self::Io(e) => e.fmt(f), - Self::UnsupportedABI => write!(f, "unsupported ABI version"), + Self::UnsupportedABI(v) => write!(f, "unsupported ABI `{v}`"), Self::LibLoading(e) => e.fmt(f), } } diff --git a/crates/proc-macro-srv/src/lib.rs b/crates/proc-macro-srv/src/lib.rs index 72a2dfe72d..b4f5ebd157 100644 --- a/crates/proc-macro-srv/src/lib.rs +++ b/crates/proc-macro-srv/src/lib.rs @@ -113,12 +113,12 @@ impl ProcMacroSrv { fn expander(&mut self, path: &Path) -> Result<&dylib::Expander, String> { let time = fs::metadata(path).and_then(|it| it.modified()).map_err(|err| { - format!("Failed to get file metadata for {}: {:?}", path.display(), err) + format!("Failed to get file metadata for {}: {}", path.display(), err) })?; Ok(match self.expanders.entry((path.to_path_buf(), time)) { Entry::Vacant(v) => v.insert(dylib::Expander::new(path).map_err(|err| { - format!("Cannot create expander for {}: {:?}", path.display(), err) + format!("Cannot create expander for {}: {}", path.display(), err) })?), Entry::Occupied(e) => e.into_mut(), }) From f26d5484d89bd41e4fe89367fc4d13463d498d8b Mon Sep 17 00:00:00 2001 From: yue4u Date: Sat, 12 Nov 2022 22:33:40 +0900 Subject: [PATCH 0566/1945] fix: filter unnecessary completions after colon --- crates/ide-completion/src/context.rs | 42 ++++++++- crates/ide-completion/src/lib.rs | 1 - crates/ide-completion/src/tests/attribute.rs | 24 +++++ crates/ide-completion/src/tests/special.rs | 97 +++++++++++++++++++- crates/rust-analyzer/src/handlers.rs | 14 +-- 5 files changed, 162 insertions(+), 16 deletions(-) diff --git a/crates/ide-completion/src/context.rs b/crates/ide-completion/src/context.rs index 9850813a0c..27f6745d24 100644 --- a/crates/ide-completion/src/context.rs +++ b/crates/ide-completion/src/context.rs @@ -19,7 +19,7 @@ use syntax::{ ast::{self, AttrKind, NameOrNameRef}, AstNode, SyntaxKind::{self, *}, - SyntaxToken, TextRange, TextSize, + SyntaxToken, TextRange, TextSize, T, }; use text_edit::Indel; @@ -569,6 +569,28 @@ impl<'a> CompletionContext<'a> { // completing on let original_token = original_file.syntax().token_at_offset(offset).left_biased()?; + // try to skip completions on path with qinvalid colons + // this approach works in normal path and inside token tree + match original_token.kind() { + T![:] => { + // return if no prev token before colon + let prev_token = original_token.prev_token()?; + + // only has a single colon + if prev_token.kind() != T![:] { + return None; + } + + if !is_prev_token_valid_path_start_or_segment(&prev_token) { + return None; + } + } + T![::] if !is_prev_token_valid_path_start_or_segment(&original_token) => { + return None; + } + _ => {} + } + let AnalysisResult { analysis, expected: (expected_type, expected_name), @@ -618,6 +640,24 @@ impl<'a> CompletionContext<'a> { } } +fn is_prev_token_valid_path_start_or_segment(token: &SyntaxToken) -> bool { + if let Some(prev_token) = token.prev_token() { + // token before coloncolon is invalid + if !matches!( + prev_token.kind(), + // trival + WHITESPACE | COMMENT + // PathIdentSegment + | IDENT | T![super] | T![self] | T![Self] | T![crate] + // QualifiedPath + | T![>] + ) { + return false; + } + } + true +} + const OP_TRAIT_LANG_NAMES: &[&str] = &[ "add_assign", "add", diff --git a/crates/ide-completion/src/lib.rs b/crates/ide-completion/src/lib.rs index 9d0044e55f..4b48ec6bc3 100644 --- a/crates/ide-completion/src/lib.rs +++ b/crates/ide-completion/src/lib.rs @@ -164,7 +164,6 @@ pub fn completions( completions::vis::complete_vis_path(&mut completions, ctx, path_ctx, has_in_token); } } - // prevent `(` from triggering unwanted completion noise return Some(completions.into()); } diff --git a/crates/ide-completion/src/tests/attribute.rs b/crates/ide-completion/src/tests/attribute.rs index 1578ba2c37..4e60820dd6 100644 --- a/crates/ide-completion/src/tests/attribute.rs +++ b/crates/ide-completion/src/tests/attribute.rs @@ -607,6 +607,30 @@ fn attr_in_source_file_end() { ); } +#[test] +fn invalid_path() { + check( + r#" +//- proc_macros: identity +#[proc_macros:::$0] +struct Foo; +"#, + expect![[r#""#]], + ); + + check( + r#" +//- minicore: derive, copy +mod foo { + pub use Copy as Bar; +} +#[derive(foo:::::$0)] +struct Foo; +"#, + expect![""], + ); +} + mod cfg { use super::*; diff --git a/crates/ide-completion/src/tests/special.rs b/crates/ide-completion/src/tests/special.rs index 033dc99c26..1aea5d89b4 100644 --- a/crates/ide-completion/src/tests/special.rs +++ b/crates/ide-completion/src/tests/special.rs @@ -2,13 +2,22 @@ use expect_test::{expect, Expect}; -use crate::tests::{check_edit, completion_list_no_kw}; +use crate::tests::{check_edit, completion_list_no_kw, completion_list_with_trigger_character}; fn check(ra_fixture: &str, expect: Expect) { let actual = completion_list_no_kw(ra_fixture); expect.assert_eq(&actual) } +pub(crate) fn check_with_trigger_character( + ra_fixture: &str, + trigger_character: Option, + expect: Expect, +) { + let actual = completion_list_with_trigger_character(ra_fixture, trigger_character); + expect.assert_eq(&actual) +} + #[test] fn completes_if_prefix_is_keyword() { check_edit( @@ -893,3 +902,89 @@ fn f() { "#]], ); } + +#[test] +fn completes_after_colon_with_trigger() { + check_with_trigger_character( + r#" +//- minicore: option +fn foo { ::$0 } +"#, + Some(':'), + expect![[r#" + md core + "#]], + ); + check_with_trigger_character( + r#" +//- minicore: option +fn foo { /* test */::$0 } +"#, + Some(':'), + expect![[r#" + md core + "#]], + ); + + check_with_trigger_character( + r#" +fn foo { crate::$0 } +"#, + Some(':'), + expect![[r#" + fn foo() fn() + "#]], + ); + + check_with_trigger_character( + r#" +fn foo { crate:$0 } +"#, + Some(':'), + expect![""], + ); +} + +#[test] +fn completes_after_colon_without_trigger() { + check_with_trigger_character( + r#" +fn foo { crate::$0 } +"#, + None, + expect![[r#" + fn foo() fn() + "#]], + ); + + check_with_trigger_character( + r#" +fn foo { crate:$0 } +"#, + None, + expect![""], + ); +} + +#[test] +fn no_completions_in_invalid_path() { + check( + r#" +fn foo { crate:::$0 } +"#, + expect![""], + ); + check( + r#" +fn foo { crate::::$0 } +"#, + expect![""], + ); + + check( + r#" +fn foo { crate:::::$0 } +"#, + expect![""], + ); +} diff --git a/crates/rust-analyzer/src/handlers.rs b/crates/rust-analyzer/src/handlers.rs index d190a9f4e2..4f318f39de 100644 --- a/crates/rust-analyzer/src/handlers.rs +++ b/crates/rust-analyzer/src/handlers.rs @@ -28,7 +28,7 @@ use lsp_types::{ use project_model::{ManifestPath, ProjectWorkspace, TargetKind}; use serde_json::json; use stdx::{format_to, never}; -use syntax::{algo, ast, AstNode, TextRange, TextSize, T}; +use syntax::{algo, ast, AstNode, TextRange, TextSize}; use vfs::AbsPathBuf; use crate::{ @@ -812,18 +812,6 @@ pub(crate) fn handle_completion( let completion_trigger_character = params.context.and_then(|ctx| ctx.trigger_character).and_then(|s| s.chars().next()); - if Some(':') == completion_trigger_character { - let source_file = snap.analysis.parse(position.file_id)?; - let left_token = source_file.syntax().token_at_offset(position.offset).left_biased(); - let completion_triggered_after_single_colon = match left_token { - Some(left_token) => left_token.kind() == T![:], - None => true, - }; - if completion_triggered_after_single_colon { - return Ok(None); - } - } - let completion_config = &snap.config.completion(); let items = match snap.analysis.completions( completion_config, From 46417add8ddc623f8abb8e96861dba721044f656 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mateusz=20Miku=C5=82a?= Date: Sun, 13 Nov 2022 22:23:56 +0100 Subject: [PATCH 0567/1945] Update several crates to bring support for the new Tier 3 Windows targets --- Cargo.lock | 92 ++++++++++++++++++++++++------------------ crates/stdx/Cargo.toml | 2 +- 2 files changed, 54 insertions(+), 40 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c04906c453..41c5d36671 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -310,7 +310,7 @@ dependencies = [ "hashbrown", "lock_api", "once_cell", - "parking_lot_core 0.9.3", + "parking_lot_core 0.9.4", ] [[package]] @@ -369,14 +369,14 @@ dependencies = [ [[package]] name = "filetime" -version = "0.2.17" +version = "0.2.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e94a7bbaa59354bc20dd75b67f23e2797b4490e9d6928203fb105c79e448c86c" +checksum = "4b9663d381d07ae25dc88dbdf27df458faa83a9b25336bcac83d5e452b5fc9d3" dependencies = [ "cfg-if", "libc", "redox_syscall", - "windows-sys 0.36.1", + "windows-sys 0.42.0", ] [[package]] @@ -974,11 +974,11 @@ dependencies = [ [[package]] name = "miow" -version = "0.4.0" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7377f7792b3afb6a3cba68daa54ca23c032137010460d667fda53a8d66be00e" +checksum = "52ffbca2f655e33c08be35d87278e5b18b89550a37dbd598c20db92f6a471123" dependencies = [ - "windows-sys 0.28.0", + "windows-sys 0.42.0", ] [[package]] @@ -1061,7 +1061,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" dependencies = [ "lock_api", - "parking_lot_core 0.9.3", + "parking_lot_core 0.9.4", ] [[package]] @@ -1080,15 +1080,15 @@ dependencies = [ [[package]] name = "parking_lot_core" -version = "0.9.3" +version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09a279cbf25cb0757810394fbc1e359949b59e348145c643a939a525692e6929" +checksum = "4dc9e0dc2adc1c69d09143aff38d3d30c5c3f0df0dad82e6d25547af174ebec0" dependencies = [ "cfg-if", "libc", "redox_syscall", "smallvec", - "windows-sys 0.36.1", + "windows-sys 0.42.0", ] [[package]] @@ -2003,19 +2003,6 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" -[[package]] -name = "windows-sys" -version = "0.28.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "82ca39602d5cbfa692c4b67e3bcbb2751477355141c1ed434c94da4186836ff6" -dependencies = [ - "windows_aarch64_msvc 0.28.0", - "windows_i686_gnu 0.28.0", - "windows_i686_msvc 0.28.0", - "windows_x86_64_gnu 0.28.0", - "windows_x86_64_msvc 0.28.0", -] - [[package]] name = "windows-sys" version = "0.36.1" @@ -2030,10 +2017,25 @@ dependencies = [ ] [[package]] -name = "windows_aarch64_msvc" -version = "0.28.0" +name = "windows-sys" +version = "0.42.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52695a41e536859d5308cc613b4a022261a274390b25bd29dfff4bf08505f3c2" +checksum = "5a3e1820f08b8513f676f7ab6c1f99ff312fb97b553d30ff4dd86f9f15728aa7" +dependencies = [ + "windows_aarch64_gnullvm", + "windows_aarch64_msvc 0.42.0", + "windows_i686_gnu 0.42.0", + "windows_i686_msvc 0.42.0", + "windows_x86_64_gnu 0.42.0", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc 0.42.0", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.42.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41d2aa71f6f0cbe00ae5167d90ef3cfe66527d6f613ca78ac8024c3ccab9a19e" [[package]] name = "windows_aarch64_msvc" @@ -2042,10 +2044,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9bb8c3fd39ade2d67e9874ac4f3db21f0d710bee00fe7cab16949ec184eeaa47" [[package]] -name = "windows_i686_gnu" -version = "0.28.0" +name = "windows_aarch64_msvc" +version = "0.42.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f54725ac23affef038fecb177de6c9bf065787c2f432f79e3c373da92f3e1d8a" +checksum = "dd0f252f5a35cac83d6311b2e795981f5ee6e67eb1f9a7f64eb4500fbc4dcdb4" [[package]] name = "windows_i686_gnu" @@ -2054,10 +2056,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "180e6ccf01daf4c426b846dfc66db1fc518f074baa793aa7d9b9aaeffad6a3b6" [[package]] -name = "windows_i686_msvc" -version = "0.28.0" +name = "windows_i686_gnu" +version = "0.42.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51d5158a43cc43623c0729d1ad6647e62fa384a3d135fd15108d37c683461f64" +checksum = "fbeae19f6716841636c28d695375df17562ca208b2b7d0dc47635a50ae6c5de7" [[package]] name = "windows_i686_msvc" @@ -2066,10 +2068,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e2e7917148b2812d1eeafaeb22a97e4813dfa60a3f8f78ebe204bcc88f12f024" [[package]] -name = "windows_x86_64_gnu" -version = "0.28.0" +name = "windows_i686_msvc" +version = "0.42.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc31f409f565611535130cfe7ee8e6655d3fa99c1c61013981e491921b5ce954" +checksum = "84c12f65daa39dd2babe6e442988fc329d6243fdce47d7d2d155b8d874862246" [[package]] name = "windows_x86_64_gnu" @@ -2078,10 +2080,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4dcd171b8776c41b97521e5da127a2d86ad280114807d0b2ab1e462bc764d9e1" [[package]] -name = "windows_x86_64_msvc" -version = "0.28.0" +name = "windows_x86_64_gnu" +version = "0.42.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f2b8c7cbd3bfdddd9ab98769f9746a7fad1bca236554cd032b78d768bc0e89f" +checksum = "bf7b1b21b5362cbc318f686150e5bcea75ecedc74dd157d874d754a2ca44b0ed" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.42.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09d525d2ba30eeb3297665bd434a54297e4170c7f1a44cad4ef58095b4cd2028" [[package]] name = "windows_x86_64_msvc" @@ -2089,6 +2097,12 @@ version = "0.36.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c811ca4a8c853ef420abd8592ba53ddbbac90410fab6903b3e79972a631f7680" +[[package]] +name = "windows_x86_64_msvc" +version = "0.42.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f40009d85759725a34da6d89a94e63d7bdc50a862acf0dbc7c8e488f1edcb6f5" + [[package]] name = "write-json" version = "0.1.2" diff --git a/crates/stdx/Cargo.toml b/crates/stdx/Cargo.toml index 957d16c036..f7b7d09640 100644 --- a/crates/stdx/Cargo.toml +++ b/crates/stdx/Cargo.toml @@ -16,7 +16,7 @@ always-assert = { version = "0.1.2", features = ["log"] } # Think twice before adding anything here [target.'cfg(windows)'.dependencies] -miow = "0.4.0" +miow = "0.5.0" winapi = { version = "0.3.9", features = ["winerror"] } [features] From 15dfeabb96b370e9fab1ddbb8b39a9dcefc3764b Mon Sep 17 00:00:00 2001 From: Jonas Schievink Date: Tue, 15 Nov 2022 12:05:11 +0100 Subject: [PATCH 0568/1945] Fix GAT completion not including generic parameters --- .../src/completions/item_list/trait_impl.rs | 77 +++++++++++++++---- 1 file changed, 63 insertions(+), 14 deletions(-) diff --git a/crates/ide-completion/src/completions/item_list/trait_impl.rs b/crates/ide-completion/src/completions/item_list/trait_impl.rs index e82cbfdcb8..b612cdc4a1 100644 --- a/crates/ide-completion/src/completions/item_list/trait_impl.rs +++ b/crates/ide-completion/src/completions/item_list/trait_impl.rs @@ -157,7 +157,7 @@ fn complete_trait_impl( add_function_impl(acc, ctx, replacement_range, func, hir_impl) } (hir::AssocItem::TypeAlias(type_alias), All | TypeAlias) => { - add_type_alias_impl(acc, ctx, replacement_range, type_alias) + add_type_alias_impl(acc, ctx, replacement_range, type_alias, hir_impl) } (hir::AssocItem::Const(const_), All | Const) => { add_const_impl(acc, ctx, replacement_range, const_, hir_impl) @@ -247,24 +247,50 @@ fn add_type_alias_impl( ctx: &CompletionContext<'_>, replacement_range: TextRange, type_alias: hir::TypeAlias, + impl_def: hir::Impl, ) { - let alias_name = type_alias.name(ctx.db); - let (alias_name, escaped_name) = - (alias_name.unescaped().to_smol_str(), alias_name.to_smol_str()); + let alias_name = type_alias.name(ctx.db).unescaped().to_smol_str(); let label = format!("type {} =", alias_name); - let replacement = format!("type {} = ", escaped_name); let mut item = CompletionItem::new(SymbolKind::TypeAlias, replacement_range, label); item.lookup_by(format!("type {}", alias_name)) .set_documentation(type_alias.docs(ctx.db)) .set_relevance(CompletionRelevance { is_item_from_trait: true, ..Default::default() }); - match ctx.config.snippet_cap { - Some(cap) => item - .snippet_edit(cap, TextEdit::replace(replacement_range, format!("{}$0;", replacement))), - None => item.text_edit(TextEdit::replace(replacement_range, replacement)), - }; - item.add_to(acc); + + if let Some(source) = ctx.sema.source(type_alias) { + let assoc_item = ast::AssocItem::TypeAlias(source.value); + if let Some(transformed_item) = get_transformed_assoc_item(ctx, assoc_item, impl_def) { + let transformed_ty = match transformed_item { + ast::AssocItem::TypeAlias(ty) => ty, + _ => unreachable!(), + }; + + let start = transformed_ty.syntax().text_range().start(); + let Some(end) = transformed_ty + .eq_token() + .map(|tok| tok.text_range().start()) + .or(transformed_ty.semicolon_token().map(|tok| tok.text_range().start())) else { return }; + + let len = end - start; + let mut decl = transformed_ty.syntax().text().slice(..len).to_string(); + if !decl.ends_with(' ') { + decl.push(' '); + } + decl.push_str("= "); + + match ctx.config.snippet_cap { + Some(cap) => { + let snippet = format!("{}$0;", decl); + item.snippet_edit(cap, TextEdit::replace(replacement_range, snippet)); + } + None => { + item.text_edit(TextEdit::replace(replacement_range, decl)); + } + }; + item.add_to(acc); + } + } } fn add_const_impl( @@ -350,9 +376,7 @@ fn function_declaration(node: &ast::Fn, needs_whitespace: bool) -> String { .map_or(end, |f| f.text_range().start()); let len = end - start; - let range = TextRange::new(0.into(), len); - - let syntax = node.text().slice(range).to_string(); + let syntax = node.text().slice(..len).to_string(); syntax.trim_end().to_owned() } @@ -1160,6 +1184,31 @@ impl Foo for Test { $0 } } +"#, + ); + } + + #[test] + fn includes_gat_generics() { + check_edit( + "type Ty", + r#" +trait Tr<'b> { + type Ty<'a: 'b, T: Copy, const C: usize>; +} + +impl<'b> Tr<'b> for () { + $0 +} +"#, + r#" +trait Tr<'b> { + type Ty<'a: 'b, T: Copy, const C: usize>; +} + +impl<'b> Tr<'b> for () { + type Ty<'a: 'b, T: Copy, const C: usize> = $0; +} "#, ); } From 7e77d4e310fa36c9ede7905be5971776a075be16 Mon Sep 17 00:00:00 2001 From: Jonas Schievink Date: Tue, 15 Nov 2022 12:41:39 +0100 Subject: [PATCH 0569/1945] Strip comments and attributes off of all trait item completions --- .../src/completions/item_list/trait_impl.rs | 82 +++++++++++++++++-- 1 file changed, 76 insertions(+), 6 deletions(-) diff --git a/crates/ide-completion/src/completions/item_list/trait_impl.rs b/crates/ide-completion/src/completions/item_list/trait_impl.rs index b612cdc4a1..7384a3f2d8 100644 --- a/crates/ide-completion/src/completions/item_list/trait_impl.rs +++ b/crates/ide-completion/src/completions/item_list/trait_impl.rs @@ -236,9 +236,7 @@ fn get_transformed_assoc_item( ); transform.apply(assoc_item.syntax()); - if let ast::AssocItem::Fn(func) = &assoc_item { - func.remove_attrs_and_docs(); - } + assoc_item.remove_attrs_and_docs(); Some(assoc_item) } @@ -335,7 +333,6 @@ fn add_const_impl( } fn make_const_compl_syntax(const_: &ast::Const, needs_whitespace: bool) -> String { - const_.remove_attrs_and_docs(); let const_ = if needs_whitespace { insert_whitespace_into_node::insert_ws_into(const_.syntax().clone()) } else { @@ -359,8 +356,6 @@ fn make_const_compl_syntax(const_: &ast::Const, needs_whitespace: bool) -> Strin } fn function_declaration(node: &ast::Fn, needs_whitespace: bool) -> String { - node.remove_attrs_and_docs(); - let node = if needs_whitespace { insert_whitespace_into_node::insert_ws_into(node.syntax().clone()) } else { @@ -1209,6 +1204,81 @@ trait Tr<'b> { impl<'b> Tr<'b> for () { type Ty<'a: 'b, T: Copy, const C: usize> = $0; } +"#, + ); + } + + #[test] + fn strips_comments() { + check_edit( + "fn func", + r#" +trait Tr { + /// docs + #[attr] + fn func(); +} +impl Tr for () { + $0 +} +"#, + r#" +trait Tr { + /// docs + #[attr] + fn func(); +} +impl Tr for () { + fn func() { + $0 +} +} +"#, + ); + check_edit( + "const C", + r#" +trait Tr { + /// docs + #[attr] + const C: usize; +} +impl Tr for () { + $0 +} +"#, + r#" +trait Tr { + /// docs + #[attr] + const C: usize; +} +impl Tr for () { + const C: usize = $0; +} +"#, + ); + check_edit( + "type Item", + r#" +trait Tr { + /// docs + #[attr] + type Item; +} +impl Tr for () { + $0 +} +"#, + r#" +trait Tr { + /// docs + #[attr] + type Item; +} +impl Tr for () { + type Item = $0; +} "#, ); } From 1ad11b53661979624ba1bfcf4520d5e2b9edbc01 Mon Sep 17 00:00:00 2001 From: Ryo Yoshida Date: Wed, 16 Nov 2022 19:54:07 +0900 Subject: [PATCH 0570/1945] fix: resolve inference variable before applying adjustments --- crates/hir-ty/src/method_resolution.rs | 2 +- crates/hir-ty/src/tests/regression.rs | 16 ++++++++++++++++ 2 files changed, 17 insertions(+), 1 deletion(-) diff --git a/crates/hir-ty/src/method_resolution.rs b/crates/hir-ty/src/method_resolution.rs index b1178ba0d2..20bed7bf3c 100644 --- a/crates/hir-ty/src/method_resolution.rs +++ b/crates/hir-ty/src/method_resolution.rs @@ -541,7 +541,7 @@ pub struct ReceiverAdjustments { impl ReceiverAdjustments { pub(crate) fn apply(&self, table: &mut InferenceTable<'_>, ty: Ty) -> (Ty, Vec) { - let mut ty = ty; + let mut ty = table.resolve_ty_shallow(&ty); let mut adjust = Vec::new(); for _ in 0..self.autoderefs { match autoderef::autoderef_step(table, ty.clone()) { diff --git a/crates/hir-ty/src/tests/regression.rs b/crates/hir-ty/src/tests/regression.rs index a155adcec6..4e46397459 100644 --- a/crates/hir-ty/src/tests/regression.rs +++ b/crates/hir-ty/src/tests/regression.rs @@ -1707,3 +1707,19 @@ impl Trait for [T; N] { "#, ); } + +#[test] +fn unsize_array_with_inference_variable() { + check_types( + r#" +//- minicore: try, slice +use core::ops::ControlFlow; +fn foo() -> ControlFlow<(), [usize; 1]> { loop {} } +fn bar() -> ControlFlow<(), ()> { + let a = foo()?.len(); + //^ usize + ControlFlow::Continue(()) +} +"#, + ); +} From 7577c44c65321b20b165445e417329619a9bc506 Mon Sep 17 00:00:00 2001 From: Ryo Yoshida Date: Thu, 17 Nov 2022 01:42:56 +0900 Subject: [PATCH 0571/1945] Update proc-macro-srv tests --- crates/proc-macro-srv/src/tests/mod.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/proc-macro-srv/src/tests/mod.rs b/crates/proc-macro-srv/src/tests/mod.rs index b46cdddcf6..cc0fc91fe9 100644 --- a/crates/proc-macro-srv/src/tests/mod.rs +++ b/crates/proc-macro-srv/src/tests/mod.rs @@ -19,7 +19,7 @@ fn test_derive_error() { expect![[r##" SUBTREE $ IDENT compile_error 4294967295 - PUNCH ! [joint] 4294967295 + PUNCH ! [alone] 4294967295 SUBTREE () 4294967295 LITERAL "#[derive(DeriveError)] struct S ;" 4294967295 PUNCH ; [alone] 4294967295"##]], @@ -109,7 +109,7 @@ fn test_fn_like_macro_clone_literals() { PUNCH , [alone] 4294967295 LITERAL 2_u32 4294967295 PUNCH , [alone] 4294967295 - PUNCH - [joint] 4294967295 + PUNCH - [alone] 4294967295 LITERAL 4i64 4294967295 PUNCH , [alone] 4294967295 LITERAL 3.14f32 4294967295 @@ -130,7 +130,7 @@ fn test_attr_macro() { expect![[r##" SUBTREE $ IDENT compile_error 4294967295 - PUNCH ! [joint] 4294967295 + PUNCH ! [alone] 4294967295 SUBTREE () 4294967295 LITERAL "#[attr_error(some arguments)] mod m {}" 4294967295 PUNCH ; [alone] 4294967295"##]], From cd6459e7b389f1127200e83f99da45fc96b01589 Mon Sep 17 00:00:00 2001 From: Jonas Schievink Date: Thu, 17 Nov 2022 17:39:31 +0100 Subject: [PATCH 0572/1945] Make "Remove dbg!()" assist work on selections --- crates/ide-assists/src/handlers/remove_dbg.rs | 63 ++++++++++++++++--- 1 file changed, 56 insertions(+), 7 deletions(-) diff --git a/crates/ide-assists/src/handlers/remove_dbg.rs b/crates/ide-assists/src/handlers/remove_dbg.rs index 3d9cbff177..99ae60e07b 100644 --- a/crates/ide-assists/src/handlers/remove_dbg.rs +++ b/crates/ide-assists/src/handlers/remove_dbg.rs @@ -1,7 +1,7 @@ use itertools::Itertools; use syntax::{ ast::{self, AstNode, AstToken}, - match_ast, NodeOrToken, SyntaxElement, TextSize, T, + match_ast, NodeOrToken, SyntaxElement, TextRange, TextSize, T, }; use crate::{AssistContext, AssistId, AssistKind, Assists}; @@ -22,7 +22,36 @@ use crate::{AssistContext, AssistId, AssistKind, Assists}; // } // ``` pub(crate) fn remove_dbg(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { - let macro_call = ctx.find_node_at_offset::()?; + let macro_calls = if ctx.has_empty_selection() { + vec![ctx.find_node_at_offset::()?] + } else { + ctx.covering_element() + .as_node()? + .descendants() + .filter(|node| ctx.selection_trimmed().contains_range(node.text_range())) + .filter_map(ast::MacroCall::cast) + .collect() + }; + + let replacements = + macro_calls.into_iter().filter_map(compute_dbg_replacement).collect::>(); + if replacements.is_empty() { + return None; + } + + acc.add( + AssistId("remove_dbg", AssistKind::Refactor), + "Remove dbg!()", + ctx.selection_trimmed(), + |builder| { + for (range, text) in replacements { + builder.replace(range, text); + } + }, + ) +} + +fn compute_dbg_replacement(macro_call: ast::MacroCall) -> Option<(TextRange, String)> { let tt = macro_call.token_tree()?; let r_delim = NodeOrToken::Token(tt.right_delimiter_token()?); if macro_call.path()?.segment()?.name_ref()?.text() != "dbg" @@ -41,7 +70,7 @@ pub(crate) fn remove_dbg(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<( let macro_expr = ast::MacroExpr::cast(macro_call.syntax().parent()?)?; let parent = macro_expr.syntax().parent()?; - let (range, text) = match &*input_expressions { + Some(match &*input_expressions { // dbg!() [] => { match_ast! { @@ -107,10 +136,6 @@ pub(crate) fn remove_dbg(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<( } // dbg!(expr0, expr1, ...) exprs => (macro_call.syntax().text_range(), format!("({})", exprs.iter().format(", "))), - }; - - acc.add(AssistId("remove_dbg", AssistKind::Refactor), "Remove dbg!()", range, |builder| { - builder.replace(range, text); }) } @@ -238,4 +263,28 @@ fn foo() { check(r#"$0dbg!(0, 1)"#, r#"(0, 1)"#); check(r#"$0dbg!(0, (1, 2))"#, r#"(0, (1, 2))"#); } + + #[test] + fn test_range() { + check( + r#" +fn f() { + dbg!(0) + $0dbg!(1); + dbg!(())$0 +} +"#, + r#" +fn f() { + dbg!(0) + 1; + () +} +"#, + ); + } + + #[test] + fn test_range_partial() { + check_assist_not_applicable(remove_dbg, r#"$0dbg$0!(0)"#); + check_assist_not_applicable(remove_dbg, r#"$0dbg!(0$0)"#); + } } From 656d886ca8a37ac4b0df60f48d699584d3abca46 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 18 Nov 2022 11:31:12 +0100 Subject: [PATCH 0573/1945] Make it more obvious which SCIP features we do not yet emit in code --- crates/rust-analyzer/src/cli/scip.rs | 113 ++++++++++++++++----------- 1 file changed, 67 insertions(+), 46 deletions(-) diff --git a/crates/rust-analyzer/src/cli/scip.rs b/crates/rust-analyzer/src/cli/scip.rs index ca7ba896b6..9edd045ab0 100644 --- a/crates/rust-analyzer/src/cli/scip.rs +++ b/crates/rust-analyzer/src/cli/scip.rs @@ -47,30 +47,27 @@ impl flags::Scip { let si = StaticIndex::compute(&analysis); - let mut index = scip_types::Index { - metadata: Some(scip_types::Metadata { - version: scip_types::ProtocolVersion::UnspecifiedProtocolVersion.into(), - tool_info: Some(scip_types::ToolInfo { - name: "rust-analyzer".to_owned(), - version: "0.1".to_owned(), - arguments: vec![], - ..Default::default() - }) - .into(), - project_root: format!( - "file://{}", - path.normalize() - .as_os_str() - .to_str() - .ok_or(anyhow::anyhow!("Unable to normalize project_root path"))? - .to_string() - ), - text_document_encoding: scip_types::TextEncoding::UTF8.into(), - ..Default::default() + let metadata = scip_types::Metadata { + version: scip_types::ProtocolVersion::UnspecifiedProtocolVersion.into(), + tool_info: Some(scip_types::ToolInfo { + name: "rust-analyzer".to_owned(), + version: "0.1".to_owned(), + arguments: vec![], + special_fields: Default::default(), }) .into(), - ..Default::default() + project_root: format!( + "file://{}", + path.normalize() + .as_os_str() + .to_str() + .ok_or(anyhow::anyhow!("Unable to normalize project_root path"))? + .to_string() + ), + text_document_encoding: scip_types::TextEncoding::UTF8.into(), + special_fields: Default::default(), }; + let mut documents = Vec::new(); let mut symbols_emitted: HashSet = HashSet::default(); let mut tokens_to_symbol: HashMap = HashMap::new(); @@ -95,18 +92,14 @@ impl flags::Scip { endings: LineEndings::Unix, }; - let mut doc = scip_types::Document { - relative_path, - language: "rust".to_string(), - ..Default::default() - }; + let mut occurrences = Vec::new(); + let mut symbols = Vec::new(); - tokens.into_iter().for_each(|(range, id)| { + tokens.into_iter().for_each(|(text_range, id)| { let token = si.tokens.get(id).unwrap(); - let mut occurrence = scip_types::Occurrence::default(); - occurrence.range = text_range_to_scip_range(&line_index, range); - occurrence.symbol = tokens_to_symbol + let range = text_range_to_scip_range(&line_index, text_range); + let symbol = tokens_to_symbol .entry(id) .or_insert_with(|| { let symbol = token_to_symbol(&token).unwrap_or_else(&mut new_local_symbol); @@ -114,34 +107,62 @@ impl flags::Scip { }) .clone(); + let mut symbol_roles = Default::default(); + if let Some(def) = token.definition { - if def.range == range { - occurrence.symbol_roles |= scip_types::SymbolRole::Definition as i32; + if def.range == text_range { + symbol_roles |= scip_types::SymbolRole::Definition as i32; } if symbols_emitted.insert(id) { - let mut symbol_info = scip_types::SymbolInformation::default(); - symbol_info.symbol = occurrence.symbol.clone(); - if let Some(hover) = &token.hover { - if !hover.markup.as_str().is_empty() { - symbol_info.documentation = vec![hover.markup.as_str().to_string()]; - } - } + let documentation = token + .hover + .as_ref() + .map(|hover| hover.markup.as_str()) + .filter(|it| !it.is_empty()) + .map(|it| vec![it.to_owned()]); + let symbol_info = scip_types::SymbolInformation { + symbol: symbol.clone(), + documentation: documentation.unwrap_or_default(), + relationships: Vec::new(), + special_fields: Default::default(), + }; - doc.symbols.push(symbol_info) + symbols.push(symbol_info) } } - doc.occurrences.push(occurrence); + occurrences.push(scip_types::Occurrence { + range, + symbol, + symbol_roles, + override_documentation: Vec::new(), + syntax_kind: Default::default(), + diagnostics: Vec::new(), + special_fields: Default::default(), + }); }); - if doc.occurrences.is_empty() { + if occurrences.is_empty() { continue; } - index.documents.push(doc); + documents.push(scip_types::Document { + relative_path, + language: "rust".to_string(), + occurrences, + symbols, + special_fields: Default::default(), + }); } + let index = scip_types::Index { + metadata: Some(metadata).into(), + documents, + external_symbols: Vec::new(), + special_fields: Default::default(), + }; + scip::write_message_to_file("index.scip", index) .map_err(|err| anyhow::anyhow!("Failed to write scip to file: {}", err))?; @@ -181,7 +202,7 @@ fn new_descriptor_str( name: name.to_string(), disambiguator: "".to_string(), suffix: suffix.into(), - ..Default::default() + special_fields: Default::default(), } } @@ -232,11 +253,11 @@ fn token_to_symbol(token: &TokenStaticData) -> Option { manager: "cargo".to_string(), name: package_name, version: version.unwrap_or_else(|| ".".to_string()), - ..Default::default() + special_fields: Default::default(), }) .into(), descriptors, - ..Default::default() + special_fields: Default::default(), }) } From 073a63b93ec39b51cb412119c38716c1af5db871 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 18 Nov 2022 19:47:45 +0100 Subject: [PATCH 0574/1945] feat: Allow viewing the full compiler diagnostic in a readonly textview --- .../rust-analyzer/src/diagnostics/to_proto.rs | 19 +++++---- editors/code/package.json | 5 +++ editors/code/src/client.ts | 42 ++++++++++++++++++- editors/code/src/config.ts | 3 ++ editors/code/src/ctx.ts | 3 +- editors/code/src/main.ts | 24 +++++++++++ 6 files changed, 84 insertions(+), 12 deletions(-) diff --git a/crates/rust-analyzer/src/diagnostics/to_proto.rs b/crates/rust-analyzer/src/diagnostics/to_proto.rs index 189ac2fbf5..35f37c740b 100644 --- a/crates/rust-analyzer/src/diagnostics/to_proto.rs +++ b/crates/rust-analyzer/src/diagnostics/to_proto.rs @@ -359,14 +359,15 @@ pub(crate) fn map_rust_diagnostic_to_lsp( .iter() .flat_map(|primary_span| { let primary_location = primary_location(config, workspace_root, primary_span, snap); - - let mut message = message.clone(); - if needs_primary_span_label { - if let Some(primary_span_label) = &primary_span.label { - format_to!(message, "\n{}", primary_span_label); + let message = { + let mut message = message.clone(); + if needs_primary_span_label { + if let Some(primary_span_label) = &primary_span.label { + format_to!(message, "\n{}", primary_span_label); + } } - } - + message + }; // Each primary diagnostic span may result in multiple LSP diagnostics. let mut diagnostics = Vec::new(); @@ -417,7 +418,7 @@ pub(crate) fn map_rust_diagnostic_to_lsp( message: message.clone(), related_information: Some(information_for_additional_diagnostic), tags: if tags.is_empty() { None } else { Some(tags.clone()) }, - data: None, + data: Some(serde_json::json!({ "rendered": rd.rendered })), }; diagnostics.push(MappedRustDiagnostic { url: secondary_location.uri, @@ -449,7 +450,7 @@ pub(crate) fn map_rust_diagnostic_to_lsp( } }, tags: if tags.is_empty() { None } else { Some(tags.clone()) }, - data: None, + data: Some(serde_json::json!({ "rendered": rd.rendered })), }, fix: None, }); diff --git a/editors/code/package.json b/editors/code/package.json index 0c78165960..b1c3473b82 100644 --- a/editors/code/package.json +++ b/editors/code/package.json @@ -396,6 +396,11 @@ "default": true, "type": "boolean" }, + "rust-analyzer.diagnostics.previewRustcOutput": { + "markdownDescription": "Whether to show the main part of the rendered rustc output of a diagnostic message.", + "default": false, + "type": "boolean" + }, "$generated-start": {}, "rust-analyzer.assist.emitMustUse": { "markdownDescription": "Whether to insert #[must_use] when generating `as_` methods\nfor enum variants.", diff --git a/editors/code/src/client.ts b/editors/code/src/client.ts index fb667619c8..23e039722e 100644 --- a/editors/code/src/client.ts +++ b/editors/code/src/client.ts @@ -4,7 +4,7 @@ import * as ra from "../src/lsp_ext"; import * as Is from "vscode-languageclient/lib/common/utils/is"; import { assert } from "./util"; import { WorkspaceEdit } from "vscode"; -import { substituteVSCodeVariables } from "./config"; +import { Config, substituteVSCodeVariables } from "./config"; import { randomUUID } from "crypto"; export interface Env { @@ -66,7 +66,8 @@ export async function createClient( traceOutputChannel: vscode.OutputChannel, outputChannel: vscode.OutputChannel, initializationOptions: vscode.WorkspaceConfiguration, - serverOptions: lc.ServerOptions + serverOptions: lc.ServerOptions, + config: Config ): Promise { const clientOptions: lc.LanguageClientOptions = { documentSelector: [{ scheme: "file", language: "rust" }], @@ -99,6 +100,43 @@ export async function createClient( } }, }, + async handleDiagnostics( + uri: vscode.Uri, + diagnostics: vscode.Diagnostic[], + next: lc.HandleDiagnosticsSignature + ) { + const preview = config.previewRustcOutput; + diagnostics.forEach((diag, idx) => { + // Abuse the fact that VSCode leaks the LSP diagnostics data field through the + // Diagnostic class, if they ever break this we are out of luck and have to go + // back to the worst diagnostics experience ever:) + + // We encode the rendered output of a rustc diagnostic in the rendered field of + // the data payload of the lsp diagnostic. If that field exists, overwrite the + // diagnostic code such that clicking it opens the diagnostic in a readonly + // text editor for easy inspection + const rendered = (diag as unknown as { data?: { rendered?: string } }).data + ?.rendered; + if (rendered) { + if (preview) { + const index = rendered.match(/^(note|help):/m)?.index || 0; + diag.message = rendered + .substring(0, index) + .replace(/^ -->[^\n]+\n/m, ""); + } + diag.code = { + target: vscode.Uri.from({ + scheme: "rust-analyzer-diagnostics-view", + path: "/diagnostic message", + fragment: uri.toString(), + query: idx.toString(), + }), + value: "Click for full compiler diagnostic", + }; + } + }); + return next(uri, diagnostics); + }, async provideHover( document: vscode.TextDocument, position: vscode.Position, diff --git a/editors/code/src/config.ts b/editors/code/src/config.ts index 632a7d86fa..d8dbd1df16 100644 --- a/editors/code/src/config.ts +++ b/editors/code/src/config.ts @@ -238,6 +238,9 @@ export class Config { gotoTypeDef: this.get("hover.actions.gotoTypeDef.enable"), }; } + get previewRustcOutput() { + return this.get("diagnostics.previewRustcOutput"); + } } const VarRegex = new RegExp(/\$\{(.+?)\}/g); diff --git a/editors/code/src/ctx.ts b/editors/code/src/ctx.ts index 3e366525ee..d6cee5c8fc 100644 --- a/editors/code/src/ctx.ts +++ b/editors/code/src/ctx.ts @@ -179,7 +179,8 @@ export class Ctx { this.traceOutputChannel, this.outputChannel, initializationOptions, - serverOptions + serverOptions, + this.config ); this.pushClientCleanup( this._client.onNotification(ra.serverStatus, (params) => diff --git a/editors/code/src/main.ts b/editors/code/src/main.ts index e76b657c1b..25f1e83d10 100644 --- a/editors/code/src/main.ts +++ b/editors/code/src/main.ts @@ -48,6 +48,30 @@ async function activateServer(ctx: Ctx): Promise { ctx.pushExtCleanup(activateTaskProvider(ctx.config)); } + ctx.pushExtCleanup( + vscode.workspace.registerTextDocumentContentProvider( + "rust-analyzer-diagnostics-view", + new (class implements vscode.TextDocumentContentProvider { + async provideTextDocumentContent(uri: vscode.Uri): Promise { + const diags = ctx.client?.diagnostics?.get( + vscode.Uri.parse(uri.fragment, true) + ); + if (!diags) { + return "Unable to find original rustc diagnostic"; + } + + const diag = diags[parseInt(uri.query)]; + if (!diag) { + return "Unable to find original rustc diagnostic"; + } + const rendered = (diag as unknown as { data?: { rendered?: string } }).data + ?.rendered; + return rendered ?? "Unable to find original rustc diagnostic"; + } + })() + ) + ); + vscode.workspace.onDidChangeWorkspaceFolders( async (_) => ctx.onWorkspaceFolderChanges(), null, From 8452844c2674584064d8d459e41a3a809648a62f Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 18 Nov 2022 20:11:48 +0100 Subject: [PATCH 0575/1945] Fix tests checking the data value --- crates/rust-analyzer/src/diagnostics/to_proto.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/crates/rust-analyzer/src/diagnostics/to_proto.rs b/crates/rust-analyzer/src/diagnostics/to_proto.rs index 35f37c740b..beb23c54c9 100644 --- a/crates/rust-analyzer/src/diagnostics/to_proto.rs +++ b/crates/rust-analyzer/src/diagnostics/to_proto.rs @@ -535,7 +535,8 @@ mod tests { Config::new(workspace_root.to_path_buf(), ClientCapabilities::default()), ); let snap = state.snapshot(); - let actual = map_rust_diagnostic_to_lsp(&config, &diagnostic, workspace_root, &snap); + let mut actual = map_rust_diagnostic_to_lsp(&config, &diagnostic, workspace_root, &snap); + actual.iter_mut().for_each(|diag| diag.diagnostic.data = None); expect.assert_debug_eq(&actual) } From 52bc15fc1fe763f5e7a91cb70c71a00997bb52ab Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 18 Nov 2022 23:32:26 +0100 Subject: [PATCH 0576/1945] fix: Fix proc-macro-srv search paths for Arch Linux --- crates/project-model/src/workspace.rs | 15 +++++++++ crates/rust-analyzer/src/cli/load_cargo.rs | 22 +++---------- crates/rust-analyzer/src/reload.rs | 36 +++------------------- 3 files changed, 25 insertions(+), 48 deletions(-) diff --git a/crates/project-model/src/workspace.rs b/crates/project-model/src/workspace.rs index 4a2f468de7..3d199ed24a 100644 --- a/crates/project-model/src/workspace.rs +++ b/crates/project-model/src/workspace.rs @@ -377,6 +377,21 @@ impl ProjectWorkspace { } } + pub fn find_sysroot_proc_macro_srv(&self) -> Option { + match self { + ProjectWorkspace::Cargo { sysroot: Some(sysroot), .. } + | ProjectWorkspace::Json { sysroot: Some(sysroot), .. } => { + let standalone_server_name = + format!("rust-analyzer-proc-macro-srv{}", std::env::consts::EXE_SUFFIX); + ["libexec", "lib"] + .into_iter() + .map(|segment| sysroot.root().join(segment).join(&standalone_server_name)) + .find(|server_path| std::fs::metadata(&server_path).is_ok()) + } + _ => None, + } + } + /// Returns the roots for the current `ProjectWorkspace` /// The return type contains the path and whether or not /// the root is a member of the current workspace diff --git a/crates/rust-analyzer/src/cli/load_cargo.rs b/crates/rust-analyzer/src/cli/load_cargo.rs index 5dba545b87..762d7d3a18 100644 --- a/crates/rust-analyzer/src/cli/load_cargo.rs +++ b/crates/rust-analyzer/src/cli/load_cargo.rs @@ -60,24 +60,12 @@ pub fn load_workspace( }; let proc_macro_client = if load_config.with_proc_macro { - let mut path = AbsPathBuf::assert(std::env::current_exe()?); - let mut args = vec!["proc-macro"]; + let (server_path, args): (_, &[_]) = match ws.find_sysroot_proc_macro_srv() { + Some(server_path) => (server_path, &[]), + None => (AbsPathBuf::assert(std::env::current_exe()?), &["proc-macro"]), + }; - if let ProjectWorkspace::Cargo { sysroot, .. } | ProjectWorkspace::Json { sysroot, .. } = - &ws - { - if let Some(sysroot) = sysroot.as_ref() { - let standalone_server_name = - format!("rust-analyzer-proc-macro-srv{}", std::env::consts::EXE_SUFFIX); - let server_path = sysroot.root().join("libexec").join(&standalone_server_name); - if std::fs::metadata(&server_path).is_ok() { - path = server_path; - args = vec![]; - } - } - } - - ProcMacroServer::spawn(path.clone(), args.clone()).map_err(|e| e.to_string()) + ProcMacroServer::spawn(server_path, args).map_err(|e| e.to_string()) } else { Err("proc macro server disabled".to_owned()) }; diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs index aa0510a4ea..fcfe4be0b8 100644 --- a/crates/rust-analyzer/src/reload.rs +++ b/crates/rust-analyzer/src/reload.rs @@ -305,9 +305,6 @@ impl GlobalState { let files_config = self.config.files(); let project_folders = ProjectFolders::new(&self.workspaces, &files_config.exclude); - let standalone_server_name = - format!("rust-analyzer-proc-macro-srv{}", std::env::consts::EXE_SUFFIX); - if self.proc_macro_clients.is_empty() { if let Some((path, path_manually_set)) = self.config.proc_macro_srv() { tracing::info!("Spawning proc-macro servers"); @@ -315,40 +312,17 @@ impl GlobalState { .workspaces .iter() .map(|ws| { - let (path, args) = if path_manually_set { + let (path, args): (_, &[_]) = if path_manually_set { tracing::debug!( "Pro-macro server path explicitly set: {}", path.display() ); - (path.clone(), vec![]) + (path.clone(), &[]) } else { - let mut sysroot_server = None; - if let ProjectWorkspace::Cargo { sysroot, .. } - | ProjectWorkspace::Json { sysroot, .. } = ws - { - if let Some(sysroot) = sysroot.as_ref() { - let server_path = sysroot - .root() - .join("libexec") - .join(&standalone_server_name); - if std::fs::metadata(&server_path).is_ok() { - tracing::debug!( - "Sysroot proc-macro server exists at {}", - server_path.display() - ); - sysroot_server = Some(server_path); - } else { - tracing::debug!( - "Sysroot proc-macro server does not exist at {}", - server_path.display() - ); - } - } + match ws.find_sysroot_proc_macro_srv() { + Some(server_path) => (server_path, &[]), + None => (path.clone(), &["proc-macro"]), } - sysroot_server.map_or_else( - || (path.clone(), vec!["proc-macro".to_owned()]), - |path| (path, vec![]), - ) }; tracing::info!(?args, "Using proc-macro server at {}", path.display(),); From 0ffb361eb943cdfaa6c21b761a7906161fd3b041 Mon Sep 17 00:00:00 2001 From: Kartavya Vashishtha Date: Sat, 19 Nov 2022 11:10:47 +0530 Subject: [PATCH 0577/1945] feat: adds hover hint to ".." in record pattern currently only works with struct pattern --- crates/ide/src/hover.rs | 7 +++-- crates/ide/src/hover/render.rs | 54 +++++++++++++++++++++++++++++++++- 2 files changed, 58 insertions(+), 3 deletions(-) diff --git a/crates/ide/src/hover.rs b/crates/ide/src/hover.rs index 838fb18c3d..966daad135 100644 --- a/crates/ide/src/hover.rs +++ b/crates/ide/src/hover.rs @@ -87,7 +87,7 @@ pub struct HoverResult { // Shows additional information, like the type of an expression or the documentation for a definition when "focusing" code. // Focusing is usually hovering with a mouse, but can also be triggered with a shortcut. // -// image::https://user-images.githubusercontent.com/48062697/113020658-b5f98b80-917a-11eb-9f88-3dbc27320c95.gif[] +// image::https://user-images.githubusercontent.com/48062697/113020658-b5f98b80-917a-11eb-9f88-3dbc27320c95.gif pub(crate) fn hover( db: &RootDatabase, FileRange { file_id, range }: FileRange, @@ -268,7 +268,10 @@ fn hover_type_fallback( } }; - let res = render::type_info(sema, config, &expr_or_pat)?; + let res = + render::type_info(sema, config, &expr_or_pat) + .or_else(|| render::struct_rest_pat(sema, config, &expr_or_pat))?; + let range = sema .original_range_opt(&node) .map(|frange| frange.range) diff --git a/crates/ide/src/hover/render.rs b/crates/ide/src/hover/render.rs index d109c07691..3b561f65da 100644 --- a/crates/ide/src/hover/render.rs +++ b/crates/ide/src/hover/render.rs @@ -14,7 +14,7 @@ use ide_db::{ use itertools::Itertools; use stdx::format_to; use syntax::{ - algo, ast, match_ast, AstNode, Direction, + algo, ast::{self, RecordPat}, match_ast, AstNode, Direction, SyntaxKind::{LET_EXPR, LET_STMT}, SyntaxToken, T, }; @@ -250,6 +250,58 @@ pub(super) fn keyword( Some(HoverResult { markup, actions }) } +pub(super) fn struct_rest_pat( + sema: &Semantics<'_, RootDatabase>, + config: &HoverConfig, + expr_or_pat: &Either, +) -> Option { + let pat = expr_or_pat.as_ref().right()?; + + let mut ancestors = sema.ancestors_with_macros(pat.syntax().clone()); + let _record_pat_field_list = ancestors.next()?; + let record_pat = ancestors.next()?; + let pattern = sema + .find_nodes_at_offset_with_descend::( + &record_pat, + record_pat.text_range().start()) + .next()?; + + let missing_fields = sema.record_pattern_missing_fields(&pattern); + + // if there are no missing fields, the end result is a hover that shows ".." + // should be left in to indicate that there are no more fields in the pattern + // example, S {a: 1, b: 2, ..} when struct S {a: u32, b: u32} + + let mut res = HoverResult::default(); + let mut targets: Vec = Vec::new(); + let mut push_new_def = |item: hir::ModuleDef| { + if !targets.contains(&item) { + targets.push(item); + } + }; + for (_, t) in &missing_fields { + walk_and_push_ty(sema.db, &t, &mut push_new_def); + } + + res.markup = { + let mut s = String::from(".., "); + for (f, _) in &missing_fields { + s += f.display(sema.db).to_string().as_ref(); + s += ", "; + } + // get rid of trailing comma + if s.len() > 0 {s.truncate(s.len() - 2);} + + if config.markdown() { + Markup::fenced_block(&s) + } else { + s.into() + } + }; + res.actions.push(HoverAction::goto_type_from_targets(sema.db, targets)); + Some(res) +} + pub(super) fn try_for_lint(attr: &ast::Attr, token: &SyntaxToken) -> Option { let (path, tt) = attr.as_simple_call()?; if !tt.syntax().text_range().contains(token.text_range().start()) { From a778203db9d23d2c6dbbd1bd038d6ef2d5368b94 Mon Sep 17 00:00:00 2001 From: Kartavya Vashishtha Date: Sat, 19 Nov 2022 11:17:43 +0530 Subject: [PATCH 0578/1945] simplify ancestor climbing to not consider macros --- crates/ide/src/hover/render.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/ide/src/hover/render.rs b/crates/ide/src/hover/render.rs index 3b561f65da..b5320c6b6b 100644 --- a/crates/ide/src/hover/render.rs +++ b/crates/ide/src/hover/render.rs @@ -257,7 +257,7 @@ pub(super) fn struct_rest_pat( ) -> Option { let pat = expr_or_pat.as_ref().right()?; - let mut ancestors = sema.ancestors_with_macros(pat.syntax().clone()); + let mut ancestors = pat.syntax().ancestors(); let _record_pat_field_list = ancestors.next()?; let record_pat = ancestors.next()?; let pattern = sema From 8b176810588734bdba3b5ea3374d39046f184f45 Mon Sep 17 00:00:00 2001 From: Kartavya Vashishtha Date: Sat, 19 Nov 2022 11:34:49 +0530 Subject: [PATCH 0579/1945] fix formatting and remove unnecessary check --- crates/ide/src/hover.rs | 3 +-- crates/ide/src/hover/render.rs | 9 ++++++--- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/crates/ide/src/hover.rs b/crates/ide/src/hover.rs index 966daad135..cc64b0c3db 100644 --- a/crates/ide/src/hover.rs +++ b/crates/ide/src/hover.rs @@ -268,8 +268,7 @@ fn hover_type_fallback( } }; - let res = - render::type_info(sema, config, &expr_or_pat) + let res = render::type_info(sema, config, &expr_or_pat) .or_else(|| render::struct_rest_pat(sema, config, &expr_or_pat))?; let range = sema diff --git a/crates/ide/src/hover/render.rs b/crates/ide/src/hover/render.rs index b5320c6b6b..b66c029257 100644 --- a/crates/ide/src/hover/render.rs +++ b/crates/ide/src/hover/render.rs @@ -14,7 +14,9 @@ use ide_db::{ use itertools::Itertools; use stdx::format_to; use syntax::{ - algo, ast::{self, RecordPat}, match_ast, AstNode, Direction, + algo, + ast::{self, RecordPat}, + match_ast, AstNode, Direction, SyntaxKind::{LET_EXPR, LET_STMT}, SyntaxToken, T, }; @@ -263,7 +265,8 @@ pub(super) fn struct_rest_pat( let pattern = sema .find_nodes_at_offset_with_descend::( &record_pat, - record_pat.text_range().start()) + record_pat.text_range().start(), + ) .next()?; let missing_fields = sema.record_pattern_missing_fields(&pattern); @@ -290,7 +293,7 @@ pub(super) fn struct_rest_pat( s += ", "; } // get rid of trailing comma - if s.len() > 0 {s.truncate(s.len() - 2);} + s.truncate(s.len() - 2); if config.markdown() { Markup::fenced_block(&s) From a3f8fd71df0e09b6cd161a7e7df78c67bb47d329 Mon Sep 17 00:00:00 2001 From: Kartavya Vashishtha Date: Sat, 19 Nov 2022 15:00:25 +0530 Subject: [PATCH 0580/1945] fix: format expression parsing edge-cases handle positional arg with formatting handle indexed positional args --- .../src/handlers/move_format_string_arg.rs | 6 +- .../src/syntax_helpers/format_string_exprs.rs | 62 +++++++++---------- 2 files changed, 34 insertions(+), 34 deletions(-) diff --git a/crates/ide-assists/src/handlers/move_format_string_arg.rs b/crates/ide-assists/src/handlers/move_format_string_arg.rs index aa710d2ce6..11db6ae7f7 100644 --- a/crates/ide-assists/src/handlers/move_format_string_arg.rs +++ b/crates/ide-assists/src/handlers/move_format_string_arg.rs @@ -92,7 +92,7 @@ pub(crate) fn move_format_string_arg(acc: &mut Assists, ctx: &AssistContext<'_>) NodeOrToken::Node(n) => { format_to!(current_arg, "{n}"); }, - NodeOrToken::Token(t) if t.kind() == COMMA=> { + NodeOrToken::Token(t) if t.kind() == COMMA => { existing_args.push(current_arg.trim().into()); current_arg.clear(); }, @@ -238,14 +238,14 @@ fn main() { &add_macro_decl( r#" fn main() { - print!("{} {x + 1:b} {Struct(1, 2)}$0", 1); + print!("{:b} {x + 1:b} {Struct(1, 2)}$0", 1); } "#, ), &add_macro_decl( r#" fn main() { - print!("{} {:b} {}"$0, 1, x + 1, Struct(1, 2)); + print!("{:b} {:b} {}"$0, 1, x + 1, Struct(1, 2)); } "#, ), diff --git a/crates/ide-db/src/syntax_helpers/format_string_exprs.rs b/crates/ide-db/src/syntax_helpers/format_string_exprs.rs index ac6c6e8fee..c3b7bb27d8 100644 --- a/crates/ide-db/src/syntax_helpers/format_string_exprs.rs +++ b/crates/ide-db/src/syntax_helpers/format_string_exprs.rs @@ -103,7 +103,12 @@ pub fn parse_format_exprs(input: &str) -> Result<(String, Vec), ()> { output.push(chr); extracted_expressions.push(Arg::Placeholder); state = State::NotArg; - } + }, + (State::MaybeArg, ':') => { + output.push(chr); + extracted_expressions.push(Arg::Placeholder); + state = State::FormatOpts; + }, (State::MaybeArg, _) => { if matches!(chr, '\\' | '$') { current_expr.push('\\'); @@ -117,49 +122,40 @@ pub fn parse_format_exprs(input: &str) -> Result<(String, Vec), ()> { } else { state = State::Expr; } - } - (State::Ident | State::Expr, '}') => { - if inexpr_open_count == 0 { - output.push(chr); - - if matches!(state, State::Expr) { - extracted_expressions.push(Arg::Expr(current_expr.trim().into())); - } else { - extracted_expressions.push(Arg::Ident(current_expr.trim().into())); - } - - current_expr = String::new(); - state = State::NotArg; - } else { - // We're closing one brace met before inside of the expression. - current_expr.push(chr); - inexpr_open_count -= 1; - } - } + }, (State::Ident | State::Expr, ':') if matches!(chars.peek(), Some(':')) => { // path separator state = State::Expr; current_expr.push_str("::"); chars.next(); - } - (State::Ident | State::Expr, ':') => { + }, + (State::Ident | State::Expr, ':' | '}') => { if inexpr_open_count == 0 { - // We're outside of braces, thus assume that it's a specifier, like "{Some(value):?}" - output.push(chr); + let trimmed = current_expr.trim(); - if matches!(state, State::Expr) { - extracted_expressions.push(Arg::Expr(current_expr.trim().into())); + // if the expression consists of a single number, like "0" or "12", it can refer to + // format args in the order they are specified. + // see: https://doc.rust-lang.org/std/fmt/#positional-parameters + if trimmed.chars().fold(true, |only_num, c| c.is_ascii_digit() && only_num) { + output.push_str(trimmed); + } else if matches!(state, State::Expr) { + extracted_expressions.push(Arg::Expr(trimmed.into())); } else { - extracted_expressions.push(Arg::Ident(current_expr.trim().into())); + extracted_expressions.push(Arg::Ident(trimmed.into())); } - current_expr = String::new(); - state = State::FormatOpts; - } else { + output.push(chr); + current_expr.clear(); + state = if chr == ':' {State::FormatOpts} else if chr == '}' {State::NotArg} else {unreachable!()}; + } else if chr == '}' { + // We're closing one brace met before inside of the expression. + current_expr.push(chr); + inexpr_open_count -= 1; + } else if chr == ':' { // We're inside of braced expression, assume that it's a struct field name/value delimiter. current_expr.push(chr); } - } + }, (State::Ident | State::Expr, '{') => { state = State::Expr; current_expr.push(chr); @@ -219,6 +215,10 @@ mod tests { ("{expr} is {2 + 2}", expect![["{} is {}; expr, 2 + 2"]]), ("{expr:?}", expect![["{:?}; expr"]]), ("{expr:1$}", expect![[r"{:1\$}; expr"]]), + ("{:1$}", expect![[r"{:1\$}; $1"]]), + ("{:>padding$}", expect![[r"{:>padding\$}; $1"]]), + ("{}, {}, {0}", expect![[r"{}, {}, {0}; $1, $2"]]), + ("{}, {}, {0:b}", expect![[r"{}, {}, {0:b}; $1, $2"]]), ("{$0}", expect![[r"{}; \$0"]]), ("{malformed", expect![["-"]]), ("malformed}", expect![["-"]]), From dc8254c6abdfcd273cb49475e798048558dcf4db Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sat, 19 Nov 2022 10:32:32 +0100 Subject: [PATCH 0581/1945] fix: Fix nested macro diagnostics pointing at macro expansion files --- .../src/handlers/macro_error.rs | 5 +---- .../src/handlers/unresolved_macro_call.rs | 5 +---- .../src/handlers/unresolved_proc_macro.rs | 12 +--------- crates/ide-diagnostics/src/lib.rs | 22 +++++++++++++++++++ 4 files changed, 25 insertions(+), 19 deletions(-) diff --git a/crates/ide-diagnostics/src/handlers/macro_error.rs b/crates/ide-diagnostics/src/handlers/macro_error.rs index 43ff4ed5a6..870c78d1f1 100644 --- a/crates/ide-diagnostics/src/handlers/macro_error.rs +++ b/crates/ide-diagnostics/src/handlers/macro_error.rs @@ -5,10 +5,7 @@ use crate::{Diagnostic, DiagnosticsContext}; // This diagnostic is shown for macro expansion errors. pub(crate) fn macro_error(ctx: &DiagnosticsContext<'_>, d: &hir::MacroError) -> Diagnostic { // Use more accurate position if available. - let display_range = d - .precise_location - .unwrap_or_else(|| ctx.sema.diagnostics_display_range(d.node.clone()).range); - + let display_range = ctx.resolve_precise_location(&d.node, d.precise_location); Diagnostic::new("macro-error", d.message.clone(), display_range).experimental() } diff --git a/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs b/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs index 4b43124757..87531f4acf 100644 --- a/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs +++ b/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs @@ -9,10 +9,7 @@ pub(crate) fn unresolved_macro_call( d: &hir::UnresolvedMacroCall, ) -> Diagnostic { // Use more accurate position if available. - let display_range = d - .precise_location - .unwrap_or_else(|| ctx.sema.diagnostics_display_range(d.macro_call.clone()).range); - + let display_range = ctx.resolve_precise_location(&d.macro_call, d.precise_location); let bang = if d.is_bang { "!" } else { "" }; Diagnostic::new( "unresolved-macro-call", diff --git a/crates/ide-diagnostics/src/handlers/unresolved_proc_macro.rs b/crates/ide-diagnostics/src/handlers/unresolved_proc_macro.rs index 760f51f904..23818d883f 100644 --- a/crates/ide-diagnostics/src/handlers/unresolved_proc_macro.rs +++ b/crates/ide-diagnostics/src/handlers/unresolved_proc_macro.rs @@ -1,5 +1,4 @@ use hir::db::DefDatabase; -use syntax::NodeOrToken; use crate::{Diagnostic, DiagnosticsContext, Severity}; @@ -19,16 +18,7 @@ pub(crate) fn unresolved_proc_macro( proc_attr_macros_enabled: bool, ) -> Diagnostic { // Use more accurate position if available. - let display_range = (|| { - let precise_location = d.precise_location?; - let root = ctx.sema.parse_or_expand(d.node.file_id)?; - match root.covering_element(precise_location) { - NodeOrToken::Node(it) => Some(ctx.sema.original_range(&it)), - NodeOrToken::Token(it) => d.node.with_value(it).original_file_range_opt(ctx.sema.db), - } - })() - .unwrap_or_else(|| ctx.sema.diagnostics_display_range(d.node.clone())) - .range; + let display_range = ctx.resolve_precise_location(&d.node, d.precise_location); let config_enabled = match d.kind { hir::MacroKind::Attr => proc_macros_enabled && proc_attr_macros_enabled, diff --git a/crates/ide-diagnostics/src/lib.rs b/crates/ide-diagnostics/src/lib.rs index ae299f0584..d81e36a1f8 100644 --- a/crates/ide-diagnostics/src/lib.rs +++ b/crates/ide-diagnostics/src/lib.rs @@ -182,6 +182,28 @@ struct DiagnosticsContext<'a> { resolve: &'a AssistResolveStrategy, } +impl<'a> DiagnosticsContext<'a> { + fn resolve_precise_location( + &self, + node: &InFile, + precise_location: Option, + ) -> TextRange { + let sema = &self.sema; + (|| { + let precise_location = precise_location?; + let root = sema.parse_or_expand(node.file_id)?; + match root.covering_element(precise_location) { + syntax::NodeOrToken::Node(it) => Some(sema.original_range(&it)), + syntax::NodeOrToken::Token(it) => { + node.with_value(it).original_file_range_opt(sema.db) + } + } + })() + .unwrap_or_else(|| sema.diagnostics_display_range(node.clone())) + .range + } +} + pub fn diagnostics( db: &RootDatabase, config: &DiagnosticsConfig, From 6d4b2b4b17841cdb29e99bf7e7e71b57dbaa2dc1 Mon Sep 17 00:00:00 2001 From: Kartavya Vashishtha Date: Sat, 19 Nov 2022 15:08:32 +0530 Subject: [PATCH 0582/1945] run cargo fmt --- .../src/syntax_helpers/format_string_exprs.rs | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/crates/ide-db/src/syntax_helpers/format_string_exprs.rs b/crates/ide-db/src/syntax_helpers/format_string_exprs.rs index c3b7bb27d8..313346ee13 100644 --- a/crates/ide-db/src/syntax_helpers/format_string_exprs.rs +++ b/crates/ide-db/src/syntax_helpers/format_string_exprs.rs @@ -103,12 +103,12 @@ pub fn parse_format_exprs(input: &str) -> Result<(String, Vec), ()> { output.push(chr); extracted_expressions.push(Arg::Placeholder); state = State::NotArg; - }, + } (State::MaybeArg, ':') => { output.push(chr); extracted_expressions.push(Arg::Placeholder); state = State::FormatOpts; - }, + } (State::MaybeArg, _) => { if matches!(chr, '\\' | '$') { current_expr.push('\\'); @@ -122,13 +122,13 @@ pub fn parse_format_exprs(input: &str) -> Result<(String, Vec), ()> { } else { state = State::Expr; } - }, + } (State::Ident | State::Expr, ':') if matches!(chars.peek(), Some(':')) => { // path separator state = State::Expr; current_expr.push_str("::"); chars.next(); - }, + } (State::Ident | State::Expr, ':' | '}') => { if inexpr_open_count == 0 { let trimmed = current_expr.trim(); @@ -146,7 +146,13 @@ pub fn parse_format_exprs(input: &str) -> Result<(String, Vec), ()> { output.push(chr); current_expr.clear(); - state = if chr == ':' {State::FormatOpts} else if chr == '}' {State::NotArg} else {unreachable!()}; + state = if chr == ':' { + State::FormatOpts + } else if chr == '}' { + State::NotArg + } else { + unreachable!() + }; } else if chr == '}' { // We're closing one brace met before inside of the expression. current_expr.push(chr); @@ -155,7 +161,7 @@ pub fn parse_format_exprs(input: &str) -> Result<(String, Vec), ()> { // We're inside of braced expression, assume that it's a struct field name/value delimiter. current_expr.push(chr); } - }, + } (State::Ident | State::Expr, '{') => { state = State::Expr; current_expr.push(chr); From a4f071afd5ebe8a1fc537136f9e823349cdee526 Mon Sep 17 00:00:00 2001 From: bvanjoi Date: Sat, 19 Nov 2022 19:38:53 +0800 Subject: [PATCH 0583/1945] fix(assists): remove `item_const` which had default value when implement missing members` --- .../src/handlers/add_missing_impl_members.rs | 10 ++++++++-- .../src/handlers/replace_derive_with_manual_impl.rs | 2 -- crates/ide-assists/src/utils.rs | 4 ++++ 3 files changed, 12 insertions(+), 4 deletions(-) diff --git a/crates/ide-assists/src/handlers/add_missing_impl_members.rs b/crates/ide-assists/src/handlers/add_missing_impl_members.rs index 722302f991..2b3793659c 100644 --- a/crates/ide-assists/src/handlers/add_missing_impl_members.rs +++ b/crates/ide-assists/src/handlers/add_missing_impl_members.rs @@ -196,6 +196,7 @@ trait Foo { type Output; const CONST: usize = 42; + const CONST_2: i32; fn foo(&self); fn bar(&self); @@ -213,6 +214,7 @@ trait Foo { type Output; const CONST: usize = 42; + const CONST_2: i32; fn foo(&self); fn bar(&self); @@ -226,7 +228,7 @@ impl Foo for S { $0type Output; - const CONST: usize = 42; + const CONST_2: i32; fn foo(&self) { todo!() @@ -658,6 +660,7 @@ trait Foo { type Output; const CONST: usize = 42; + const CONST_2: i32; fn valid(some: u32) -> bool { false } fn foo(some: u32) -> bool; @@ -669,13 +672,16 @@ trait Foo { type Output; const CONST: usize = 42; + const CONST_2: i32; fn valid(some: u32) -> bool { false } fn foo(some: u32) -> bool; } struct S; impl Foo for S { - $0fn valid(some: u32) -> bool { false } + $0const CONST: usize = 42; + + fn valid(some: u32) -> bool { false } }"#, ) } diff --git a/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs b/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs index f9ba289ee1..6fa15b28e4 100644 --- a/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs +++ b/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs @@ -1019,8 +1019,6 @@ struct Foo { impl foo::Bar for Foo { $0type Qux; - const Baz: usize = 42; - const Fez: usize; fn foo() { diff --git a/crates/ide-assists/src/utils.rs b/crates/ide-assists/src/utils.rs index 307e679270..68c31b4f8e 100644 --- a/crates/ide-assists/src/utils.rs +++ b/crates/ide-assists/src/utils.rs @@ -119,6 +119,10 @@ pub fn filter_assoc_items( (default_methods, def.body()), (DefaultMethods::Only, Some(_)) | (DefaultMethods::No, None) ), + ast::AssocItem::Const(def) => matches!( + (default_methods, def.body()), + (DefaultMethods::Only, Some(_)) | (DefaultMethods::No, None) + ), _ => default_methods == DefaultMethods::No, }) .collect::>() From 87658c81731cd99b80bcd32d2188890f2ca22291 Mon Sep 17 00:00:00 2001 From: Kartavya Vashishtha Date: Sat, 19 Nov 2022 21:22:10 +0530 Subject: [PATCH 0584/1945] refactor hover change struct_rest_pat to guarentee a HoverResult Move token and node matching out of struct_rest_pat into hover --- crates/ide/src/hover.rs | 104 +++++++++++++++++++-------------- crates/ide/src/hover/render.rs | 23 +++----- 2 files changed, 66 insertions(+), 61 deletions(-) diff --git a/crates/ide/src/hover.rs b/crates/ide/src/hover.rs index cc64b0c3db..1e07e7dbac 100644 --- a/crates/ide/src/hover.rs +++ b/crates/ide/src/hover.rs @@ -127,6 +127,7 @@ pub(crate) fn hover( original_token.parent().and_then(ast::TokenTree::cast), Some(tt) if tt.syntax().ancestors().any(|it| ast::Meta::can_cast(it.kind())) ); + // prefer descending the same token kind in attribute expansions, in normal macros text // equivalency is more important let descended = if in_attr { @@ -135,54 +136,68 @@ pub(crate) fn hover( sema.descend_into_macros_with_same_text(original_token.clone()) }; - // FIXME: Definition should include known lints and the like instead of having this special case here - let hovered_lint = descended.iter().find_map(|token| { - let attr = token.parent_ancestors().find_map(ast::Attr::cast)?; - render::try_for_lint(&attr, token) - }); - if let Some(res) = hovered_lint { - return Some(RangeInfo::new(original_token.text_range(), res)); - } - + // try lint hover let result = descended .iter() - .filter_map(|token| { - let node = token.parent()?; - let class = IdentClass::classify_token(sema, token)?; - if let IdentClass::Operator(OperatorClass::Await(_)) = class { - // It's better for us to fall back to the keyword hover here, - // rendering poll is very confusing - return None; - } - Some(class.definitions().into_iter().zip(iter::once(node).cycle())) + .find_map(|token| { + // FIXME: Definition should include known lints and the like instead of having this special case here + let attr = token.parent_ancestors().find_map(ast::Attr::cast)?; + render::try_for_lint(&attr, token) }) - .flatten() - .unique_by(|&(def, _)| def) - .filter_map(|(def, node)| hover_for_definition(sema, file_id, def, &node, config)) - .reduce(|mut acc: HoverResult, HoverResult { markup, actions }| { - acc.actions.extend(actions); - acc.markup = Markup::from(format!("{}\n---\n{}", acc.markup, markup)); - acc + // try item definitions + .or_else(|| { + descended + .iter() + .filter_map(|token| { + let node = token.parent()?; + let class = IdentClass::classify_token(sema, token)?; + if let IdentClass::Operator(OperatorClass::Await(_)) = class { + // It's better for us to fall back to the keyword hover here, + // rendering poll is very confusing + return None; + } + Some(class.definitions().into_iter().zip(iter::once(node).cycle())) + }) + .flatten() + .unique_by(|&(def, _)| def) + .filter_map(|(def, node)| hover_for_definition(sema, file_id, def, &node, config)) + .reduce(|mut acc: HoverResult, HoverResult { markup, actions }| { + acc.actions.extend(actions); + acc.markup = Markup::from(format!("{}\n---\n{}", acc.markup, markup)); + acc + }) + }) + // try keywords + .or_else(|| { + descended.iter().find_map(|token| render::keyword(sema, config, token)) + }) + // try rest item hover + .or_else(|| { + descended.iter().find_map(|token| { + if token.kind() != DOT2 { + return None; + } + + let record_pat_field_list = + token.parent_ancestors().find_map(ast::RecordPatFieldList::cast)?; + + let record_pat = + record_pat_field_list.syntax().parent().and_then(ast::RecordPat::cast)?; + + Some(render::struct_rest_pat(sema, config, &record_pat)) + }) }); - if result.is_none() { - // fallbacks, show keywords or types - - let res = descended.iter().find_map(|token| render::keyword(sema, config, token)); - if let Some(res) = res { - return Some(RangeInfo::new(original_token.text_range(), res)); - } - let res = descended - .iter() - .find_map(|token| hover_type_fallback(sema, config, token, &original_token)); - if let Some(_) = res { - return res; - } - } - result.map(|mut res: HoverResult| { - res.actions = dedupe_or_merge_hover_actions(res.actions); - RangeInfo::new(original_token.text_range(), res) - }) + result + .map(|mut res: HoverResult| { + res.actions = dedupe_or_merge_hover_actions(res.actions); + RangeInfo::new(original_token.text_range(), res) + }) + // fallback to type hover if there aren't any other suggestions + // this finds its own range instead of using the closest token's range + .or_else(|| { + descended.iter().find_map(|token| hover_type_fallback(sema, config, token, &token)) + }) } pub(crate) fn hover_for_definition( @@ -268,8 +283,7 @@ fn hover_type_fallback( } }; - let res = render::type_info(sema, config, &expr_or_pat) - .or_else(|| render::struct_rest_pat(sema, config, &expr_or_pat))?; + let res = render::type_info(sema, config, &expr_or_pat)?; let range = sema .original_range_opt(&node) diff --git a/crates/ide/src/hover/render.rs b/crates/ide/src/hover/render.rs index b66c029257..fb00a40f96 100644 --- a/crates/ide/src/hover/render.rs +++ b/crates/ide/src/hover/render.rs @@ -252,24 +252,15 @@ pub(super) fn keyword( Some(HoverResult { markup, actions }) } +/// Returns missing types in a record pattern. +/// Only makes sense when there's a rest pattern in the record pattern. +/// i.e. `let S {a, ..} = S {a: 1, b: 2}` pub(super) fn struct_rest_pat( sema: &Semantics<'_, RootDatabase>, config: &HoverConfig, - expr_or_pat: &Either, -) -> Option { - let pat = expr_or_pat.as_ref().right()?; - - let mut ancestors = pat.syntax().ancestors(); - let _record_pat_field_list = ancestors.next()?; - let record_pat = ancestors.next()?; - let pattern = sema - .find_nodes_at_offset_with_descend::( - &record_pat, - record_pat.text_range().start(), - ) - .next()?; - - let missing_fields = sema.record_pattern_missing_fields(&pattern); + pattern: &RecordPat, +) -> HoverResult { + let missing_fields = sema.record_pattern_missing_fields(pattern); // if there are no missing fields, the end result is a hover that shows ".." // should be left in to indicate that there are no more fields in the pattern @@ -302,7 +293,7 @@ pub(super) fn struct_rest_pat( } }; res.actions.push(HoverAction::goto_type_from_targets(sema.db, targets)); - Some(res) + res } pub(super) fn try_for_lint(attr: &ast::Attr, token: &SyntaxToken) -> Option { From 29951f9766dc8ffc2b661b78cdff44ceafef3d57 Mon Sep 17 00:00:00 2001 From: Kartavya Vashishtha Date: Sat, 19 Nov 2022 21:23:33 +0530 Subject: [PATCH 0585/1945] fix formatting --- crates/ide/src/hover.rs | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/crates/ide/src/hover.rs b/crates/ide/src/hover.rs index 1e07e7dbac..9751dc11db 100644 --- a/crates/ide/src/hover.rs +++ b/crates/ide/src/hover.rs @@ -127,7 +127,7 @@ pub(crate) fn hover( original_token.parent().and_then(ast::TokenTree::cast), Some(tt) if tt.syntax().ancestors().any(|it| ast::Meta::can_cast(it.kind())) ); - + // prefer descending the same token kind in attribute expansions, in normal macros text // equivalency is more important let descended = if in_attr { @@ -168,9 +168,7 @@ pub(crate) fn hover( }) }) // try keywords - .or_else(|| { - descended.iter().find_map(|token| render::keyword(sema, config, token)) - }) + .or_else(|| descended.iter().find_map(|token| render::keyword(sema, config, token))) // try rest item hover .or_else(|| { descended.iter().find_map(|token| { From a6d0e342a3eff8494f49966ca58f858a0ff5cc85 Mon Sep 17 00:00:00 2001 From: yue Date: Sun, 20 Nov 2022 00:58:59 +0900 Subject: [PATCH 0586/1945] Update crates/ide-completion/src/context.rs Co-authored-by: Lukas Wirth --- crates/ide-completion/src/context.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/ide-completion/src/context.rs b/crates/ide-completion/src/context.rs index 27f6745d24..1201854ff4 100644 --- a/crates/ide-completion/src/context.rs +++ b/crates/ide-completion/src/context.rs @@ -569,7 +569,7 @@ impl<'a> CompletionContext<'a> { // completing on let original_token = original_file.syntax().token_at_offset(offset).left_biased()?; - // try to skip completions on path with qinvalid colons + // try to skip completions on path with invalid colons // this approach works in normal path and inside token tree match original_token.kind() { T![:] => { From 7a568f7f9571c2c93fd8e60712878736d942c787 Mon Sep 17 00:00:00 2001 From: yue4u Date: Sun, 20 Nov 2022 01:29:02 +0900 Subject: [PATCH 0587/1945] fix: remove insufficient check for coloncolon --- crates/ide-completion/src/context.rs | 3 --- crates/ide-completion/src/tests/special.rs | 7 ------- 2 files changed, 10 deletions(-) diff --git a/crates/ide-completion/src/context.rs b/crates/ide-completion/src/context.rs index 1201854ff4..a0e1153f08 100644 --- a/crates/ide-completion/src/context.rs +++ b/crates/ide-completion/src/context.rs @@ -585,9 +585,6 @@ impl<'a> CompletionContext<'a> { return None; } } - T![::] if !is_prev_token_valid_path_start_or_segment(&original_token) => { - return None; - } _ => {} } diff --git a/crates/ide-completion/src/tests/special.rs b/crates/ide-completion/src/tests/special.rs index 1aea5d89b4..6cfa72d653 100644 --- a/crates/ide-completion/src/tests/special.rs +++ b/crates/ide-completion/src/tests/special.rs @@ -977,13 +977,6 @@ fn foo { crate:::$0 } check( r#" fn foo { crate::::$0 } -"#, - expect![""], - ); - - check( - r#" -fn foo { crate:::::$0 } "#, expect![""], ); From 23cfe0702de3a7144be6842bf54d94091b0cde6d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nyikos=20Zolt=C3=A1n?= Date: Sat, 19 Nov 2022 20:08:01 +0100 Subject: [PATCH 0588/1945] fix: tuple to named struct inside macros seems to fix #13634 --- .../convert_tuple_struct_to_named_struct.rs | 43 ++++++++++++++++++- 1 file changed, 42 insertions(+), 1 deletion(-) diff --git a/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs b/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs index 92e091fca1..b0383291e7 100644 --- a/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs +++ b/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs @@ -168,7 +168,7 @@ fn edit_struct_references( let arg_list = call_expr.syntax().descendants().find_map(ast::ArgList::cast)?; edit.replace( - call_expr.syntax().text_range(), + ctx.sema.original_range(&node).range, ast::make::record_expr( path, ast::make::record_expr_field_list(arg_list.args().zip(names).map( @@ -249,6 +249,24 @@ mod tests { ); check_assist_not_applicable(convert_tuple_struct_to_named_struct, r#"struct Foo$0;"#); } + #[test] + fn convert_in_macro_args() { + check_assist( + convert_tuple_struct_to_named_struct, + r#" +macro_rules! foo {($i:expr) => {$i} } +struct T$0(u8); +fn test() { + foo!(T(1)); +}"#, + r#" +macro_rules! foo {($i:expr) => {$i} } +struct T { field1: u8 } +fn test() { + foo!(T { field1: 1 }); +}"#, + ); + } #[test] fn convert_simple_struct() { @@ -554,6 +572,29 @@ where ); } + #[test] + fn convert_variant_in_macro_args() { + check_assist( + convert_tuple_struct_to_named_struct, + r#" +macro_rules! foo {($i:expr) => {$i} } +enum T { + V$0(u8) +} +fn test() { + foo!(T::V(1)); +}"#, + r#" +macro_rules! foo {($i:expr) => {$i} } +enum T { + V { field1: u8 } +} +fn test() { + foo!(T::V { field1: 1 }); +}"#, + ); + } + #[test] fn convert_simple_variant() { check_assist( From e39d90a8e6723cbd9937aca273d1f58a9beee2c9 Mon Sep 17 00:00:00 2001 From: ZZzzaaKK <66885975+ZZzzaaKK@users.noreply.github.com> Date: Sun, 20 Nov 2022 01:58:16 +0100 Subject: [PATCH 0589/1945] Improve grammar of architecture.md --- docs/dev/architecture.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/dev/architecture.md b/docs/dev/architecture.md index e3a4fdfda9..a07cf036e0 100644 --- a/docs/dev/architecture.md +++ b/docs/dev/architecture.md @@ -481,7 +481,7 @@ It is not cheap enough to enable in prod, and this is a bug which should be fixe rust-analyzer strives to be as configurable as possible while offering reasonable defaults where no configuration exists yet. The rule of thumb is to enable most features by default unless they are buggy or degrade performance too much. There will always be features that some people find more annoying than helpful, so giving the users the ability to tweak or disable these is a big part of offering a good user experience. -Enabling them by default is a matter of discoverability, as many users end up don't know about some features even though they are presented in the manual. +Enabling them by default is a matter of discoverability, as many users don't know about some features even though they are presented in the manual. Mind the code--architecture gap: at the moment, we are using fewer feature flags than we really should. ### Serialization @@ -492,8 +492,8 @@ If a type is serializable, then it is a part of some IPC boundary. You often don't control the other side of this boundary, so changing serializable types is hard. For this reason, the types in `ide`, `base_db` and below are not serializable by design. -If such types need to cross an IPC boundary, then the client of rust-analyzer needs to provide custom, client-specific serialization format. +If such types need to cross an IPC boundary, then the client of rust-analyzer needs to provide a custom, client-specific serialization format. This isolates backwards compatibility and migration concerns to a specific client. -For example, `rust-project.json` is it's own format -- it doesn't include `CrateGraph` as is. +For example, `rust-project.json` is its own format -- it doesn't include `CrateGraph` as is. Instead, it creates a `CrateGraph` by calling appropriate constructing functions. From 427b63b676c89dc6be407181f25f74ddc9950ea1 Mon Sep 17 00:00:00 2001 From: Jake Heinz Date: Sun, 20 Nov 2022 08:48:27 +0000 Subject: [PATCH 0590/1945] hir-expand: fix compile_error! expansion not unquoting strings --- .../src/macro_expansion_tests/builtin_fn_macro.rs | 6 ++++-- crates/hir-expand/src/builtin_fn_macro.rs | 13 ++++--------- 2 files changed, 8 insertions(+), 11 deletions(-) diff --git a/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs b/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs index c04cd16519..bb45266725 100644 --- a/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs +++ b/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs @@ -163,7 +163,8 @@ macro_rules! compile_error { } // This expands to nothing (since it's in item position), but emits an error. -compile_error!("error!"); +compile_error!("error, with an escaped quote: \""); +compile_error!(r"this is a raw string"); "#, expect![[r##" #[rustc_builtin_macro] @@ -172,7 +173,8 @@ macro_rules! compile_error { ($msg:expr,) => ({ /* compiler built-in */ }) } -/* error: error! */ +/* error: error, with an escaped quote: " */ +/* error: this is a raw string */ "##]], ); } diff --git a/crates/hir-expand/src/builtin_fn_macro.rs b/crates/hir-expand/src/builtin_fn_macro.rs index 7b19518e25..985954d44e 100644 --- a/crates/hir-expand/src/builtin_fn_macro.rs +++ b/crates/hir-expand/src/builtin_fn_macro.rs @@ -379,15 +379,10 @@ fn compile_error_expand( tt: &tt::Subtree, ) -> ExpandResult { let err = match &*tt.token_trees { - [tt::TokenTree::Leaf(tt::Leaf::Literal(it))] => { - let text = it.text.as_str(); - if text.starts_with('"') && text.ends_with('"') { - // FIXME: does not handle raw strings - ExpandError::Other(text[1..text.len() - 1].into()) - } else { - ExpandError::Other("`compile_error!` argument must be a string".into()) - } - } + [tt::TokenTree::Leaf(tt::Leaf::Literal(it))] => match unquote_str(it) { + Some(unquoted) => ExpandError::Other(unquoted.into()), + None => ExpandError::Other("`compile_error!` argument must be a string".into()), + }, _ => ExpandError::Other("`compile_error!` argument must be a string".into()), }; From ebbc5492f586601a9a369ace53397864bc39b71a Mon Sep 17 00:00:00 2001 From: Max Gautier Date: Sun, 20 Nov 2022 12:47:28 +0100 Subject: [PATCH 0591/1945] Fix typo on 'configuration' anchor https://rust-analyzer.github.io/manual.html#_configuration lands you at the start of the page, while https://rust-analyzer.github.io/manual.html#configuration correctly puts you at the correct anchor --- docs/user/manual.adoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/user/manual.adoc b/docs/user/manual.adoc index 49500e390a..1a4c70575b 100644 --- a/docs/user/manual.adoc +++ b/docs/user/manual.adoc @@ -367,7 +367,7 @@ if executable('rust-analyzer') endif ---- -There is no dedicated UI for the server configuration, so you would need to send any options as a value of the `initialization_options` field, as described in the <<_configuration,Configuration>> section. +There is no dedicated UI for the server configuration, so you would need to send any options as a value of the `initialization_options` field, as described in the <> section. Here is an example of how to enable the proc-macro support: [source,vim] From 95b4a7487b4055d951b59967caffcdc92a733c84 Mon Sep 17 00:00:00 2001 From: Bben01 <52465698+Bben01@users.noreply.github.com> Date: Mon, 7 Nov 2022 22:31:43 +0200 Subject: [PATCH 0592/1945] Suppress "Implement default members" inside contained items --- .../src/handlers/add_missing_impl_members.rs | 99 +++++++++++++++++++ 1 file changed, 99 insertions(+) diff --git a/crates/ide-assists/src/handlers/add_missing_impl_members.rs b/crates/ide-assists/src/handlers/add_missing_impl_members.rs index 62cf5ab4f3..2394e4341a 100644 --- a/crates/ide-assists/src/handlers/add_missing_impl_members.rs +++ b/crates/ide-assists/src/handlers/add_missing_impl_members.rs @@ -107,6 +107,14 @@ fn add_missing_impl_members_inner( ) -> Option<()> { let _p = profile::span("add_missing_impl_members_inner"); let impl_def = ctx.find_node_at_offset::()?; + + if ctx.token_at_offset().all(|t| { + t.parent_ancestors() + .any(|s| ast::BlockExpr::can_cast(s.kind()) || ast::ParamList::can_cast(s.kind())) + }) { + return None; + } + let target_scope = ctx.sema.scope(impl_def.syntax())?; let trait_ = resolve_target_trait(&ctx.sema, &impl_def)?; @@ -1337,4 +1345,95 @@ impl PartialEq for SomeStruct { "#, ); } + + #[test] + fn test_ignore_function_body() { + check_assist_not_applicable( + add_missing_default_members, + r#" +trait Trait { + type X; + fn foo(&self); + fn bar(&self) {} +} + +impl Trait for () { + type X = u8; + fn foo(&self) {$0 + let x = 5; + } +}"#, + ) + } + + #[test] + fn test_ignore_param_list() { + check_assist_not_applicable( + add_missing_impl_members, + r#" +trait Trait { + type X; + fn foo(&self); + fn bar(&self); +} + +impl Trait for () { + type X = u8; + fn foo(&self$0) { + let x = 5; + } +}"#, + ) + } + + #[test] + fn test_ignore_scope_inside_function() { + check_assist_not_applicable( + add_missing_impl_members, + r#" +trait Trait { + type X; + fn foo(&self); + fn bar(&self); +} + +impl Trait for () { + type X = u8; + fn foo(&self) { + let x = async {$0 5 }; + } +}"#, + ) + } + + #[test] + fn test_apply_outside_function() { + check_assist( + add_missing_default_members, + r#" +trait Trait { + type X; + fn foo(&self); + fn bar(&self) {} +} + +impl Trait for () { + type X = u8; + fn foo(&self)$0 {} +}"#, + r#" +trait Trait { + type X; + fn foo(&self); + fn bar(&self) {} +} + +impl Trait for () { + type X = u8; + fn foo(&self) {} + + $0fn bar(&self) {} +}"#, + ) + } } From ecb15ca7173a6d416c9f74af06f71cf003cdd597 Mon Sep 17 00:00:00 2001 From: Mihail Mihov Date: Wed, 9 Nov 2022 23:43:07 +0200 Subject: [PATCH 0593/1945] Add assist to generate trait impl's --- .../src/handlers/generate_trait_impl.rs | 226 ++++++++++++++++++ crates/ide-assists/src/lib.rs | 2 + crates/ide-assists/src/tests/generated.rs | 21 ++ 3 files changed, 249 insertions(+) create mode 100644 crates/ide-assists/src/handlers/generate_trait_impl.rs diff --git a/crates/ide-assists/src/handlers/generate_trait_impl.rs b/crates/ide-assists/src/handlers/generate_trait_impl.rs new file mode 100644 index 0000000000..7b1ee5b5c3 --- /dev/null +++ b/crates/ide-assists/src/handlers/generate_trait_impl.rs @@ -0,0 +1,226 @@ +use syntax::ast::{self, AstNode, HasName}; + +use crate::{utils::generate_trait_impl_text, AssistContext, AssistId, AssistKind, Assists}; + +// Assist: generate_trait_impl +// +// Adds a new trait impl for a type. +// +// ``` +// struct $0Ctx { +// data: T, +// } +// ``` +// -> +// ``` +// struct Ctx { +// data: T, +// } +// +// impl $0 for Ctx { +// +// } +// ``` +pub(crate) fn generate_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { + let nominal = ctx.find_node_at_offset::()?; + let name = nominal.name()?; + let target = nominal.syntax().text_range(); + + if let Some(_) = ctx.find_node_at_offset::() { + return None; + } + + acc.add( + AssistId("generate_trait_impl", AssistKind::Generate), + format!("Generate trait impl for `{name}`"), + target, + |edit| { + let start_offset = nominal.syntax().text_range().end(); + match ctx.config.snippet_cap { + Some(cap) => { + let snippet = generate_trait_impl_text(&nominal, "$0", ""); + edit.insert_snippet(cap, start_offset, snippet); + } + None => { + let text = generate_trait_impl_text(&nominal, "", ""); + edit.insert(start_offset, text); + } + } + }, + ) +} + +#[cfg(test)] +mod tests { + use crate::tests::{check_assist, check_assist_target}; + + use super::*; + + #[test] + fn test_add_trait_impl() { + check_assist( + generate_trait_impl, + r#" + struct Foo$0 {} + "#, + r#" + struct Foo {} + + impl $0 for Foo { + + } + "#, + ); + } + + #[test] + fn test_add_trait_impl_with_generics() { + check_assist( + generate_trait_impl, + r#" + struct Foo$0 {} + "#, + r#" + struct Foo {} + + impl $0 for Foo { + + } + "#, + ); + } + + #[test] + fn test_add_trait_impl_with_generics_and_lifetime_parameters() { + check_assist( + generate_trait_impl, + r#" + struct Foo<'a, T: Foo<'a>>$0 {} + "#, + r#" + struct Foo<'a, T: Foo<'a>> {} + + impl<'a, T: Foo<'a>> $0 for Foo<'a, T> { + + } + "#, + ); + } + + #[test] + fn test_add_trait_impl_with_attributes() { + check_assist( + generate_trait_impl, + r#" + #[cfg(feature = "foo")] + struct Foo<'a, T: Foo$0<'a>> {} + "#, + r#" + #[cfg(feature = "foo")] + struct Foo<'a, T: Foo<'a>> {} + + #[cfg(feature = "foo")] + impl<'a, T: Foo<'a>> $0 for Foo<'a, T> { + + } + "#, + ); + } + + #[test] + fn test_add_trait_impl_with_default_generic() { + check_assist( + generate_trait_impl, + r#" + struct Defaulted$0 {} + "#, + r#" + struct Defaulted {} + + impl $0 for Defaulted { + + } + "#, + ); + } + + #[test] + fn test_add_trait_impl_with_constrained_default_generic() { + check_assist( + generate_trait_impl, + r#" + struct Defaulted$0<'a, 'b: 'a, T: Debug + Clone + 'a + 'b = String, const S: usize> {} + "#, + r#" + struct Defaulted<'a, 'b: 'a, T: Debug + Clone + 'a + 'b = String, const S: usize> {} + + impl<'a, 'b: 'a, T: Debug + Clone + 'a + 'b, const S: usize> $0 for Defaulted<'a, 'b, T, S> { + + } + "#, + ); + } + + #[test] + fn test_add_trait_impl_with_const_defaulted_generic() { + check_assist( + generate_trait_impl, + r#" + struct Defaulted$0 {} + "#, + r#" + struct Defaulted {} + + impl $0 for Defaulted { + + } + "#, + ); + } + + #[test] + fn test_add_trait_impl_with_trait_constraint() { + check_assist( + generate_trait_impl, + r#" + pub trait Trait {} + struct Struct$0 + where + T: Trait, + { + inner: T, + } + "#, + r#" + pub trait Trait {} + struct Struct + where + T: Trait, + { + inner: T, + } + + impl $0 for Struct + where + T: Trait, + { + + } + "#, + ); + } + + #[test] + fn add_trait_impl_target() { + check_assist_target( + generate_trait_impl, + r#" + struct SomeThingIrrelevant; + /// Has a lifetime parameter + struct Foo$0<'a, T: Foo<'a>> {} + struct EvenMoreIrrelevant; + "#, + "/// Has a lifetime parameter\nstruct Foo<'a, T: Foo<'a>> {}", + ); + } +} diff --git a/crates/ide-assists/src/lib.rs b/crates/ide-assists/src/lib.rs index 387cc63142..e3c483cf0a 100644 --- a/crates/ide-assists/src/lib.rs +++ b/crates/ide-assists/src/lib.rs @@ -152,6 +152,7 @@ mod handlers { mod generate_function; mod generate_getter; mod generate_impl; + mod generate_trait_impl; mod generate_is_empty_from_len; mod generate_new; mod generate_setter; @@ -247,6 +248,7 @@ mod handlers { generate_from_impl_for_enum::generate_from_impl_for_enum, generate_function::generate_function, generate_impl::generate_impl, + generate_trait_impl::generate_trait_impl, generate_is_empty_from_len::generate_is_empty_from_len, generate_new::generate_new, inline_call::inline_call, diff --git a/crates/ide-assists/src/tests/generated.rs b/crates/ide-assists/src/tests/generated.rs index c09317572a..67657b61bb 100644 --- a/crates/ide-assists/src/tests/generated.rs +++ b/crates/ide-assists/src/tests/generated.rs @@ -1341,6 +1341,27 @@ impl Person { ) } +#[test] +fn doctest_generate_trait_impl() { + check_doc_test( + "generate_trait_impl", + r#####" +struct $0Ctx { + data: T, +} +"#####, + r#####" +struct Ctx { + data: T, +} + +impl $0 for Ctx { + +} +"#####, + ) +} + #[test] fn doctest_inline_call() { check_doc_test( From 0bd11f817136f372b30a328ccf71c0bb0c68ef21 Mon Sep 17 00:00:00 2001 From: Mihail Mihov Date: Thu, 10 Nov 2022 00:01:29 +0200 Subject: [PATCH 0594/1945] Reduce trigger range of `generate_impl` assist and update tests --- .../ide-assists/src/handlers/generate_impl.rs | 280 ++++++++++-------- crates/ide-assists/src/tests/generated.rs | 4 +- 2 files changed, 160 insertions(+), 124 deletions(-) diff --git a/crates/ide-assists/src/handlers/generate_impl.rs b/crates/ide-assists/src/handlers/generate_impl.rs index 9af26c04eb..1bea220f38 100644 --- a/crates/ide-assists/src/handlers/generate_impl.rs +++ b/crates/ide-assists/src/handlers/generate_impl.rs @@ -7,8 +7,8 @@ use crate::{utils::generate_impl_text, AssistContext, AssistId, AssistKind, Assi // Adds a new inherent impl for a type. // // ``` -// struct Ctx { -// data: T,$0 +// struct Ctx$0 { +// data: T, // } // ``` // -> @@ -26,6 +26,10 @@ pub(crate) fn generate_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> Optio let name = nominal.name()?; let target = nominal.syntax().text_range(); + if let Some(_) = ctx.find_node_at_offset::() { + return None; + } + acc.add( AssistId("generate_impl", AssistKind::Generate), format!("Generate impl for `{name}`"), @@ -52,139 +56,171 @@ mod tests { use super::*; - // FIXME: break up into separate test fns #[test] fn test_add_impl() { - check_assist( - generate_impl, - "struct Foo {$0}\n", - "struct Foo {}\n\nimpl Foo {\n $0\n}\n", - ); - check_assist( - generate_impl, - "struct Foo {$0}", - "struct Foo {}\n\nimpl Foo {\n $0\n}", - ); - check_assist( - generate_impl, - "struct Foo<'a, T: Foo<'a>> {$0}", - "struct Foo<'a, T: Foo<'a>> {}\n\nimpl<'a, T: Foo<'a>> Foo<'a, T> {\n $0\n}", - ); check_assist( generate_impl, r#" - struct MyOwnArray {}$0"#, + struct Foo$0 {} + "#, r#" - struct MyOwnArray {} + struct Foo {} - impl MyOwnArray { - $0 - }"#, - ); - check_assist( - generate_impl, - r#" - #[cfg(feature = "foo")] - struct Foo<'a, T: Foo<'a>> {$0}"#, - r#" - #[cfg(feature = "foo")] - struct Foo<'a, T: Foo<'a>> {} - - #[cfg(feature = "foo")] - impl<'a, T: Foo<'a>> Foo<'a, T> { - $0 - }"#, - ); - - check_assist( - generate_impl, - r#" - #[cfg(not(feature = "foo"))] - struct Foo<'a, T: Foo<'a>> {$0}"#, - r#" - #[cfg(not(feature = "foo"))] - struct Foo<'a, T: Foo<'a>> {} - - #[cfg(not(feature = "foo"))] - impl<'a, T: Foo<'a>> Foo<'a, T> { - $0 - }"#, - ); - - check_assist( - generate_impl, - r#" - struct Defaulted {}$0"#, - r#" - struct Defaulted {} - - impl Defaulted { - $0 - }"#, - ); - - check_assist( - generate_impl, - r#" - struct Defaulted<'a, 'b: 'a, T: Debug + Clone + 'a + 'b = String, const S: usize> {}$0"#, - r#" - struct Defaulted<'a, 'b: 'a, T: Debug + Clone + 'a + 'b = String, const S: usize> {} - - impl<'a, 'b: 'a, T: Debug + Clone + 'a + 'b, const S: usize> Defaulted<'a, 'b, T, S> { - $0 - }"#, - ); - - check_assist( - generate_impl, - r#" - struct Defaulted {}$0"#, - r#" - struct Defaulted {} - - impl Defaulted { - $0 - }"#, - ); - - check_assist( - generate_impl, - r#"pub trait Trait {} -struct Struct$0 -where - T: Trait, -{ - inner: T, -}"#, - r#"pub trait Trait {} -struct Struct -where - T: Trait, -{ - inner: T, -} - -impl Struct -where - T: Trait, -{ - $0 -}"#, + impl Foo { + $0 + } + "#, ); } #[test] - fn add_impl_target() { + fn test_add_impl_with_generics() { + check_assist( + generate_impl, + r#" + struct Foo$0 {} + "#, + r#" + struct Foo {} + + impl Foo { + $0 + } + "#, + ); + } + + #[test] + fn test_add_impl_with_generics_and_lifetime_parameters() { + check_assist( + generate_impl, + r#" + struct Foo<'a, T: Foo<'a>>$0 {} + "#, + r#" + struct Foo<'a, T: Foo<'a>> {} + + impl<'a, T: Foo<'a>> Foo<'a, T> { + $0 + } + "#, + ); + } + + #[test] + fn test_add_impl_with_attributes() { + check_assist( + generate_impl, + r#" + #[cfg(feature = "foo")] + struct Foo<'a, T: Foo$0<'a>> {} + "#, + r#" + #[cfg(feature = "foo")] + struct Foo<'a, T: Foo<'a>> {} + + #[cfg(feature = "foo")] + impl<'a, T: Foo<'a>> Foo<'a, T> { + $0 + } + "#, + ); + } + + #[test] + fn test_add_impl_with_default_generic() { + check_assist( + generate_impl, + r#" + struct Defaulted$0 {} + "#, + r#" + struct Defaulted {} + + impl Defaulted { + $0 + } + "#, + ); + } + + #[test] + fn test_add_impl_with_constrained_default_generic() { + check_assist( + generate_impl, + r#" + struct Defaulted$0<'a, 'b: 'a, T: Debug + Clone + 'a + 'b = String, const S: usize> {} + "#, + r#" + struct Defaulted<'a, 'b: 'a, T: Debug + Clone + 'a + 'b = String, const S: usize> {} + + impl<'a, 'b: 'a, T: Debug + Clone + 'a + 'b, const S: usize> Defaulted<'a, 'b, T, S> { + $0 + } + "#, + ); + } + + #[test] + fn test_add_impl_with_const_defaulted_generic() { + check_assist( + generate_impl, + r#" + struct Defaulted$0 {} + "#, + r#" + struct Defaulted {} + + impl Defaulted { + $0 + } + "#, + ); + } + + #[test] + fn test_add_impl_with_trait_constraint() { + check_assist( + generate_impl, + r#" + pub trait Trait {} + struct Struct$0 + where + T: Trait, + { + inner: T, + } + "#, + r#" + pub trait Trait {} + struct Struct + where + T: Trait, + { + inner: T, + } + + impl Struct + where + T: Trait, + { + $0 + } + "#, + ); + } + + #[test] + fn add_trait_impl_target() { check_assist_target( generate_impl, - " -struct SomeThingIrrelevant; -/// Has a lifetime parameter -struct Foo<'a, T: Foo<'a>> {$0} -struct EvenMoreIrrelevant; -", - "/// Has a lifetime parameter -struct Foo<'a, T: Foo<'a>> {}", + r#" + struct SomeThingIrrelevant; + /// Has a lifetime parameter + struct Foo$0<'a, T: Foo<'a>> {} + struct EvenMoreIrrelevant; + "#, + "/// Has a lifetime parameter\nstruct Foo<'a, T: Foo<'a>> {}", ); } } diff --git a/crates/ide-assists/src/tests/generated.rs b/crates/ide-assists/src/tests/generated.rs index 67657b61bb..d797f07767 100644 --- a/crates/ide-assists/src/tests/generated.rs +++ b/crates/ide-assists/src/tests/generated.rs @@ -1249,8 +1249,8 @@ fn doctest_generate_impl() { check_doc_test( "generate_impl", r#####" -struct Ctx { - data: T,$0 +struct Ctx$0 { + data: T, } "#####, r#####" From 469f620b064d2935c741222927dc955da395532b Mon Sep 17 00:00:00 2001 From: Mihail Mihov Date: Mon, 21 Nov 2022 22:58:01 +0200 Subject: [PATCH 0595/1945] Combine `generate_impl` and `generate_trait_impl` into a single file --- .../ide-assists/src/handlers/generate_impl.rs | 223 ++++++++++++++++- .../src/handlers/generate_trait_impl.rs | 226 ------------------ crates/ide-assists/src/lib.rs | 3 +- 3 files changed, 222 insertions(+), 230 deletions(-) delete mode 100644 crates/ide-assists/src/handlers/generate_trait_impl.rs diff --git a/crates/ide-assists/src/handlers/generate_impl.rs b/crates/ide-assists/src/handlers/generate_impl.rs index 1bea220f38..690c97e26d 100644 --- a/crates/ide-assists/src/handlers/generate_impl.rs +++ b/crates/ide-assists/src/handlers/generate_impl.rs @@ -1,6 +1,9 @@ use syntax::ast::{self, AstNode, HasName}; -use crate::{utils::generate_impl_text, AssistContext, AssistId, AssistKind, Assists}; +use crate::{ + utils::{generate_impl_text, generate_trait_impl_text}, + AssistContext, AssistId, AssistKind, Assists, +}; // Assist: generate_impl // @@ -50,6 +53,54 @@ pub(crate) fn generate_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> Optio ) } +// Assist: generate_trait_impl +// +// Adds a new trait impl for a type. +// +// ``` +// struct $0Ctx { +// data: T, +// } +// ``` +// -> +// ``` +// struct Ctx { +// data: T, +// } +// +// impl $0 for Ctx { +// +// } +// ``` +pub(crate) fn generate_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { + let nominal = ctx.find_node_at_offset::()?; + let name = nominal.name()?; + let target = nominal.syntax().text_range(); + + if let Some(_) = ctx.find_node_at_offset::() { + return None; + } + + acc.add( + AssistId("generate_trait_impl", AssistKind::Generate), + format!("Generate trait impl for `{name}`"), + target, + |edit| { + let start_offset = nominal.syntax().text_range().end(); + match ctx.config.snippet_cap { + Some(cap) => { + let snippet = generate_trait_impl_text(&nominal, "$0", ""); + edit.insert_snippet(cap, start_offset, snippet); + } + None => { + let text = generate_trait_impl_text(&nominal, "", ""); + edit.insert(start_offset, text); + } + } + }, + ) +} + #[cfg(test)] mod tests { use crate::tests::{check_assist, check_assist_target}; @@ -211,7 +262,7 @@ mod tests { } #[test] - fn add_trait_impl_target() { + fn add_impl_target() { check_assist_target( generate_impl, r#" @@ -223,4 +274,172 @@ mod tests { "/// Has a lifetime parameter\nstruct Foo<'a, T: Foo<'a>> {}", ); } + + #[test] + fn test_add_trait_impl() { + check_assist( + generate_trait_impl, + r#" + struct Foo$0 {} + "#, + r#" + struct Foo {} + + impl $0 for Foo { + + } + "#, + ); + } + + #[test] + fn test_add_trait_impl_with_generics() { + check_assist( + generate_trait_impl, + r#" + struct Foo$0 {} + "#, + r#" + struct Foo {} + + impl $0 for Foo { + + } + "#, + ); + } + + #[test] + fn test_add_trait_impl_with_generics_and_lifetime_parameters() { + check_assist( + generate_trait_impl, + r#" + struct Foo<'a, T: Foo<'a>>$0 {} + "#, + r#" + struct Foo<'a, T: Foo<'a>> {} + + impl<'a, T: Foo<'a>> $0 for Foo<'a, T> { + + } + "#, + ); + } + + #[test] + fn test_add_trait_impl_with_attributes() { + check_assist( + generate_trait_impl, + r#" + #[cfg(feature = "foo")] + struct Foo<'a, T: Foo$0<'a>> {} + "#, + r#" + #[cfg(feature = "foo")] + struct Foo<'a, T: Foo<'a>> {} + + #[cfg(feature = "foo")] + impl<'a, T: Foo<'a>> $0 for Foo<'a, T> { + + } + "#, + ); + } + + #[test] + fn test_add_trait_impl_with_default_generic() { + check_assist( + generate_trait_impl, + r#" + struct Defaulted$0 {} + "#, + r#" + struct Defaulted {} + + impl $0 for Defaulted { + + } + "#, + ); + } + + #[test] + fn test_add_trait_impl_with_constrained_default_generic() { + check_assist( + generate_trait_impl, + r#" + struct Defaulted$0<'a, 'b: 'a, T: Debug + Clone + 'a + 'b = String, const S: usize> {} + "#, + r#" + struct Defaulted<'a, 'b: 'a, T: Debug + Clone + 'a + 'b = String, const S: usize> {} + + impl<'a, 'b: 'a, T: Debug + Clone + 'a + 'b, const S: usize> $0 for Defaulted<'a, 'b, T, S> { + + } + "#, + ); + } + + #[test] + fn test_add_trait_impl_with_const_defaulted_generic() { + check_assist( + generate_trait_impl, + r#" + struct Defaulted$0 {} + "#, + r#" + struct Defaulted {} + + impl $0 for Defaulted { + + } + "#, + ); + } + + #[test] + fn test_add_trait_impl_with_trait_constraint() { + check_assist( + generate_trait_impl, + r#" + pub trait Trait {} + struct Struct$0 + where + T: Trait, + { + inner: T, + } + "#, + r#" + pub trait Trait {} + struct Struct + where + T: Trait, + { + inner: T, + } + + impl $0 for Struct + where + T: Trait, + { + + } + "#, + ); + } + + #[test] + fn add_trait_impl_target() { + check_assist_target( + generate_trait_impl, + r#" + struct SomeThingIrrelevant; + /// Has a lifetime parameter + struct Foo$0<'a, T: Foo<'a>> {} + struct EvenMoreIrrelevant; + "#, + "/// Has a lifetime parameter\nstruct Foo<'a, T: Foo<'a>> {}", + ); + } } diff --git a/crates/ide-assists/src/handlers/generate_trait_impl.rs b/crates/ide-assists/src/handlers/generate_trait_impl.rs deleted file mode 100644 index 7b1ee5b5c3..0000000000 --- a/crates/ide-assists/src/handlers/generate_trait_impl.rs +++ /dev/null @@ -1,226 +0,0 @@ -use syntax::ast::{self, AstNode, HasName}; - -use crate::{utils::generate_trait_impl_text, AssistContext, AssistId, AssistKind, Assists}; - -// Assist: generate_trait_impl -// -// Adds a new trait impl for a type. -// -// ``` -// struct $0Ctx { -// data: T, -// } -// ``` -// -> -// ``` -// struct Ctx { -// data: T, -// } -// -// impl $0 for Ctx { -// -// } -// ``` -pub(crate) fn generate_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { - let nominal = ctx.find_node_at_offset::()?; - let name = nominal.name()?; - let target = nominal.syntax().text_range(); - - if let Some(_) = ctx.find_node_at_offset::() { - return None; - } - - acc.add( - AssistId("generate_trait_impl", AssistKind::Generate), - format!("Generate trait impl for `{name}`"), - target, - |edit| { - let start_offset = nominal.syntax().text_range().end(); - match ctx.config.snippet_cap { - Some(cap) => { - let snippet = generate_trait_impl_text(&nominal, "$0", ""); - edit.insert_snippet(cap, start_offset, snippet); - } - None => { - let text = generate_trait_impl_text(&nominal, "", ""); - edit.insert(start_offset, text); - } - } - }, - ) -} - -#[cfg(test)] -mod tests { - use crate::tests::{check_assist, check_assist_target}; - - use super::*; - - #[test] - fn test_add_trait_impl() { - check_assist( - generate_trait_impl, - r#" - struct Foo$0 {} - "#, - r#" - struct Foo {} - - impl $0 for Foo { - - } - "#, - ); - } - - #[test] - fn test_add_trait_impl_with_generics() { - check_assist( - generate_trait_impl, - r#" - struct Foo$0 {} - "#, - r#" - struct Foo {} - - impl $0 for Foo { - - } - "#, - ); - } - - #[test] - fn test_add_trait_impl_with_generics_and_lifetime_parameters() { - check_assist( - generate_trait_impl, - r#" - struct Foo<'a, T: Foo<'a>>$0 {} - "#, - r#" - struct Foo<'a, T: Foo<'a>> {} - - impl<'a, T: Foo<'a>> $0 for Foo<'a, T> { - - } - "#, - ); - } - - #[test] - fn test_add_trait_impl_with_attributes() { - check_assist( - generate_trait_impl, - r#" - #[cfg(feature = "foo")] - struct Foo<'a, T: Foo$0<'a>> {} - "#, - r#" - #[cfg(feature = "foo")] - struct Foo<'a, T: Foo<'a>> {} - - #[cfg(feature = "foo")] - impl<'a, T: Foo<'a>> $0 for Foo<'a, T> { - - } - "#, - ); - } - - #[test] - fn test_add_trait_impl_with_default_generic() { - check_assist( - generate_trait_impl, - r#" - struct Defaulted$0 {} - "#, - r#" - struct Defaulted {} - - impl $0 for Defaulted { - - } - "#, - ); - } - - #[test] - fn test_add_trait_impl_with_constrained_default_generic() { - check_assist( - generate_trait_impl, - r#" - struct Defaulted$0<'a, 'b: 'a, T: Debug + Clone + 'a + 'b = String, const S: usize> {} - "#, - r#" - struct Defaulted<'a, 'b: 'a, T: Debug + Clone + 'a + 'b = String, const S: usize> {} - - impl<'a, 'b: 'a, T: Debug + Clone + 'a + 'b, const S: usize> $0 for Defaulted<'a, 'b, T, S> { - - } - "#, - ); - } - - #[test] - fn test_add_trait_impl_with_const_defaulted_generic() { - check_assist( - generate_trait_impl, - r#" - struct Defaulted$0 {} - "#, - r#" - struct Defaulted {} - - impl $0 for Defaulted { - - } - "#, - ); - } - - #[test] - fn test_add_trait_impl_with_trait_constraint() { - check_assist( - generate_trait_impl, - r#" - pub trait Trait {} - struct Struct$0 - where - T: Trait, - { - inner: T, - } - "#, - r#" - pub trait Trait {} - struct Struct - where - T: Trait, - { - inner: T, - } - - impl $0 for Struct - where - T: Trait, - { - - } - "#, - ); - } - - #[test] - fn add_trait_impl_target() { - check_assist_target( - generate_trait_impl, - r#" - struct SomeThingIrrelevant; - /// Has a lifetime parameter - struct Foo$0<'a, T: Foo<'a>> {} - struct EvenMoreIrrelevant; - "#, - "/// Has a lifetime parameter\nstruct Foo<'a, T: Foo<'a>> {}", - ); - } -} diff --git a/crates/ide-assists/src/lib.rs b/crates/ide-assists/src/lib.rs index e3c483cf0a..b12f99cc53 100644 --- a/crates/ide-assists/src/lib.rs +++ b/crates/ide-assists/src/lib.rs @@ -152,7 +152,6 @@ mod handlers { mod generate_function; mod generate_getter; mod generate_impl; - mod generate_trait_impl; mod generate_is_empty_from_len; mod generate_new; mod generate_setter; @@ -248,7 +247,7 @@ mod handlers { generate_from_impl_for_enum::generate_from_impl_for_enum, generate_function::generate_function, generate_impl::generate_impl, - generate_trait_impl::generate_trait_impl, + generate_impl::generate_trait_impl, generate_is_empty_from_len::generate_is_empty_from_len, generate_new::generate_new, inline_call::inline_call, From b116fe9be0812ce3052d7c96359e87cfe7aca558 Mon Sep 17 00:00:00 2001 From: Isobel Redelmeier Date: Mon, 21 Nov 2022 16:40:32 -0500 Subject: [PATCH 0596/1945] Fix: Handle empty `checkOnSave/target` values This fixes a regression introduced by #13290, in which failing to set `checkOnSave/target` (or `checkOnSave/targets`) would lead to an invalid config. --- crates/rust-analyzer/src/config.rs | 20 ++++++++++++++------ docs/user/generated_config.adoc | 2 +- editors/code/package.json | 5 ++++- 3 files changed, 19 insertions(+), 8 deletions(-) diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index 6b2f22faa7..6c0d712a4f 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs @@ -182,7 +182,7 @@ config_data! { /// `["aarch64-apple-darwin", "x86_64-apple-darwin"]`. /// /// Aliased as `"checkOnSave.targets"`. - checkOnSave_target | checkOnSave_targets: CheckOnSaveTargets = "[]", + checkOnSave_target | checkOnSave_targets: Option = "null", /// Toggles the additional completions that automatically add imports when completed. /// Note that your client must specify the `additionalTextEdits` LSP client capability to truly have this feature enabled. @@ -1153,10 +1153,15 @@ impl Config { } Some(_) | None => FlycheckConfig::CargoCommand { command: self.data.checkOnSave_command.clone(), - target_triples: match &self.data.checkOnSave_target.0[..] { - [] => self.data.cargo_target.clone().into_iter().collect(), - targets => targets.into(), - }, + target_triples: self + .data + .checkOnSave_target + .clone() + .and_then(|targets| match &targets.0[..] { + [] => None, + targets => Some(targets.into()), + }) + .unwrap_or_else(|| self.data.cargo_target.clone().into_iter().collect()), all_targets: self.data.checkOnSave_allTargets, no_default_features: self .data @@ -2126,8 +2131,11 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json "The command will be executed in the project root." ], }, - "CheckOnSaveTargets" => set! { + "Option" => set! { "anyOf": [ + { + "type": "null" + }, { "type": "string", }, diff --git a/docs/user/generated_config.adoc b/docs/user/generated_config.adoc index 57f950034c..db41c7bf10 100644 --- a/docs/user/generated_config.adoc +++ b/docs/user/generated_config.adoc @@ -190,7 +190,7 @@ cargo check --workspace --message-format=json --all-targets ``` . -- -[[rust-analyzer.checkOnSave.target]]rust-analyzer.checkOnSave.target (default: `[]`):: +[[rust-analyzer.checkOnSave.target]]rust-analyzer.checkOnSave.target (default: `null`):: + -- Check for specific targets. Defaults to `#rust-analyzer.cargo.target#` if empty. diff --git a/editors/code/package.json b/editors/code/package.json index c4d4e428ea..a3385a3868 100644 --- a/editors/code/package.json +++ b/editors/code/package.json @@ -640,8 +640,11 @@ }, "rust-analyzer.checkOnSave.target": { "markdownDescription": "Check for specific targets. Defaults to `#rust-analyzer.cargo.target#` if empty.\n\nCan be a single target, e.g. `\"x86_64-unknown-linux-gnu\"` or a list of targets, e.g.\n`[\"aarch64-apple-darwin\", \"x86_64-apple-darwin\"]`.\n\nAliased as `\"checkOnSave.targets\"`.", - "default": [], + "default": null, "anyOf": [ + { + "type": "null" + }, { "type": "string" }, From 2300c9de83d0dbff1f27546a04f12a1ebe59c9e3 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Thu, 24 Nov 2022 10:21:19 +0100 Subject: [PATCH 0597/1945] Handle sysroot config in detached-files workspaces --- crates/project-model/src/workspace.rs | 58 ++++++++++++++++++--------- crates/rust-analyzer/src/reload.rs | 6 ++- 2 files changed, 42 insertions(+), 22 deletions(-) diff --git a/crates/project-model/src/workspace.rs b/crates/project-model/src/workspace.rs index 4a2f468de7..d263912328 100644 --- a/crates/project-model/src/workspace.rs +++ b/crates/project-model/src/workspace.rs @@ -93,7 +93,7 @@ pub enum ProjectWorkspace { // // /// Project with a set of disjoint files, not belonging to any particular workspace. /// Backed by basic sysroot crates for basic completion and highlighting. - DetachedFiles { files: Vec, sysroot: Sysroot, rustc_cfg: Vec }, + DetachedFiles { files: Vec, sysroot: Option, rustc_cfg: Vec }, } impl fmt::Debug for ProjectWorkspace { @@ -133,7 +133,7 @@ impl fmt::Debug for ProjectWorkspace { ProjectWorkspace::DetachedFiles { files, sysroot, rustc_cfg } => f .debug_struct("DetachedFiles") .field("n_files", &files.len()) - .field("n_sysroot_crates", &sysroot.crates().len()) + .field("sysroot", &sysroot.is_some()) .field("n_rustc_cfg", &rustc_cfg.len()) .finish(), } @@ -191,10 +191,7 @@ impl ProjectWorkspace { let sysroot = match &config.sysroot { Some(RustcSource::Path(path)) => { Some(Sysroot::with_sysroot_dir(path.clone()).with_context(|| { - format!( - "Failed to find sysroot for Cargo.toml file {}.", - cargo_toml.display() - ) + format!("Failed to find sysroot at {}.", path.display()) })?) } Some(RustcSource::Discover) => Some( @@ -291,14 +288,29 @@ impl ProjectWorkspace { Ok(ProjectWorkspace::Json { project: project_json, sysroot, rustc_cfg }) } - pub fn load_detached_files(detached_files: Vec) -> Result { - let sysroot = Sysroot::discover( - detached_files - .first() - .and_then(|it| it.parent()) - .ok_or_else(|| format_err!("No detached files to load"))?, - &Default::default(), - )?; + pub fn load_detached_files( + detached_files: Vec, + config: &CargoConfig, + ) -> Result { + let sysroot = match &config.sysroot { + Some(RustcSource::Path(path)) => Some( + Sysroot::with_sysroot_dir(path.clone()) + .with_context(|| format!("Failed to find sysroot at {}.", path.display()))?, + ), + Some(RustcSource::Discover) => { + let dir = &detached_files + .first() + .and_then(|it| it.parent()) + .ok_or_else(|| format_err!("No detached files to load"))?; + Some(Sysroot::discover(dir, &config.extra_env).with_context(|| { + format!("Failed to find sysroot in {}. Is rust-src installed?", dir.display()) + })?) + } + None => None, + }; + if let Some(sysroot) = &sysroot { + tracing::info!(src_root = %sysroot.src_root().display(), root = %sysroot.root().display(), "Using sysroot"); + } let rustc_cfg = rustc_cfg::get(None, None, &Default::default()); Ok(ProjectWorkspace::DetachedFiles { files: detached_files, sysroot, rustc_cfg }) } @@ -464,21 +476,25 @@ impl ProjectWorkspace { include: vec![detached_file.clone()], exclude: Vec::new(), }) - .chain(mk_sysroot(Some(sysroot))) + .chain(mk_sysroot(sysroot.as_ref())) .collect(), } } pub fn n_packages(&self) -> usize { match self { - ProjectWorkspace::Json { project, .. } => project.n_crates(), + ProjectWorkspace::Json { project, sysroot, .. } => { + let sysroot_package_len = sysroot.as_ref().map_or(0, |it| it.crates().len()); + sysroot_package_len + project.n_crates() + } ProjectWorkspace::Cargo { cargo, sysroot, rustc, .. } => { let rustc_package_len = rustc.as_ref().map_or(0, |it| it.packages().len()); let sysroot_package_len = sysroot.as_ref().map_or(0, |it| it.crates().len()); cargo.packages().len() + sysroot_package_len + rustc_package_len } ProjectWorkspace::DetachedFiles { sysroot, files, .. } => { - sysroot.crates().len() + files.len() + let sysroot_package_len = sysroot.as_ref().map_or(0, |it| it.crates().len()); + sysroot_package_len + files.len() } } } @@ -790,12 +806,14 @@ fn detached_files_to_crate_graph( rustc_cfg: Vec, load: &mut dyn FnMut(&AbsPath) -> Option, detached_files: &[AbsPathBuf], - sysroot: &Sysroot, + sysroot: &Option, ) -> CrateGraph { let _p = profile::span("detached_files_to_crate_graph"); let mut crate_graph = CrateGraph::default(); - let (public_deps, _libproc_macro) = - sysroot_to_crate_graph(&mut crate_graph, sysroot, rustc_cfg.clone(), load); + let (public_deps, _libproc_macro) = match sysroot { + Some(sysroot) => sysroot_to_crate_graph(&mut crate_graph, sysroot, rustc_cfg.clone(), load), + None => (SysrootPublicDeps::default(), None), + }; let mut cfg_options = CfgOptions::default(); cfg_options.extend(rustc_cfg); diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs index aa0510a4ea..0306b6640b 100644 --- a/crates/rust-analyzer/src/reload.rs +++ b/crates/rust-analyzer/src/reload.rs @@ -158,8 +158,10 @@ impl GlobalState { .collect::>(); if !detached_files.is_empty() { - workspaces - .push(project_model::ProjectWorkspace::load_detached_files(detached_files)); + workspaces.push(project_model::ProjectWorkspace::load_detached_files( + detached_files, + &cargo_config, + )); } tracing::info!("did fetch workspaces {:?}", workspaces); From c8b6fef70fd77e412a665ea8a1e45b0122e68544 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Thu, 24 Nov 2022 21:30:15 +0100 Subject: [PATCH 0598/1945] Properly implement Drop for JodGroupChild --- crates/flycheck/src/lib.rs | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/crates/flycheck/src/lib.rs b/crates/flycheck/src/lib.rs index 8f93dad06e..f13088cd90 100644 --- a/crates/flycheck/src/lib.rs +++ b/crates/flycheck/src/lib.rs @@ -360,13 +360,20 @@ impl FlycheckActor { } } -struct JodChild(GroupChild); +struct JodGroupChild(GroupChild); + +impl Drop for JodGroupChild { + fn drop(&mut self) { + _ = self.0.kill(); + _ = self.0.wait(); + } +} /// A handle to a cargo process used for fly-checking. struct CargoHandle { /// The handle to the actual cargo process. As we cannot cancel directly from with /// a read syscall dropping and therefore terminating the process is our best option. - child: JodChild, + child: JodGroupChild, thread: jod_thread::JoinHandle>, receiver: Receiver, } @@ -374,7 +381,7 @@ struct CargoHandle { impl CargoHandle { fn spawn(mut command: Command) -> std::io::Result { command.stdout(Stdio::piped()).stderr(Stdio::piped()).stdin(Stdio::null()); - let mut child = command.group_spawn().map(JodChild)?; + let mut child = command.group_spawn().map(JodGroupChild)?; let stdout = child.0.inner().stdout.take().unwrap(); let stderr = child.0.inner().stderr.take().unwrap(); From 9fba39f0c02e43a0503a64b1ed4b36a05d8a1658 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Thu, 24 Nov 2022 22:26:05 +0100 Subject: [PATCH 0599/1945] Add deriveHelper to semanticTokenTypes section of package.json --- editors/code/package.json | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/editors/code/package.json b/editors/code/package.json index c4d4e428ea..eb4f258120 100644 --- a/editors/code/package.json +++ b/editors/code/package.json @@ -1493,6 +1493,11 @@ "description": "Style for derives", "superType": "attribute" }, + { + "id": "deriveHelper", + "description": "Style for derive helpers", + "superType": "attribute" + }, { "id": "dot", "description": "Style for .", From f64feeb11ac78db9c634339465d973ceb9cb3c97 Mon Sep 17 00:00:00 2001 From: Kartavya Vashishtha Date: Thu, 24 Nov 2022 22:32:15 -0800 Subject: [PATCH 0600/1945] Correct node traversal to look at parent instead Co-authored-by: Lukas Wirth --- crates/ide/src/hover.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/ide/src/hover.rs b/crates/ide/src/hover.rs index 9751dc11db..5e812a1569 100644 --- a/crates/ide/src/hover.rs +++ b/crates/ide/src/hover.rs @@ -176,8 +176,8 @@ pub(crate) fn hover( return None; } - let record_pat_field_list = - token.parent_ancestors().find_map(ast::RecordPatFieldList::cast)?; + let rest_pat = token.syntax().parent().and_then(ast::RestPat::cast)?; + let record_pat_field_list = rest_pat.syntax().parent().and_then(ast::RecordPatFieldList::cast)?; let record_pat = record_pat_field_list.syntax().parent().and_then(ast::RecordPat::cast)?; From 132d5ffc7d86e77ee80a45545b8e9f503b59a8c4 Mon Sep 17 00:00:00 2001 From: Kartavya Vashishtha Date: Fri, 25 Nov 2022 12:03:49 +0530 Subject: [PATCH 0601/1945] add back [] in hover documentation --- crates/ide/src/hover.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/ide/src/hover.rs b/crates/ide/src/hover.rs index 5e812a1569..cbf61efba0 100644 --- a/crates/ide/src/hover.rs +++ b/crates/ide/src/hover.rs @@ -87,7 +87,7 @@ pub struct HoverResult { // Shows additional information, like the type of an expression or the documentation for a definition when "focusing" code. // Focusing is usually hovering with a mouse, but can also be triggered with a shortcut. // -// image::https://user-images.githubusercontent.com/48062697/113020658-b5f98b80-917a-11eb-9f88-3dbc27320c95.gif +// image::https://user-images.githubusercontent.com/48062697/113020658-b5f98b80-917a-11eb-9f88-3dbc27320c95.gif[] pub(crate) fn hover( db: &RootDatabase, FileRange { file_id, range }: FileRange, From e86d451484bf06a452a330af4b1b0c00de286715 Mon Sep 17 00:00:00 2001 From: Kartavya Vashishtha Date: Fri, 25 Nov 2022 12:20:34 +0530 Subject: [PATCH 0602/1945] fix token method call --- crates/ide/src/hover.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/ide/src/hover.rs b/crates/ide/src/hover.rs index cbf61efba0..016567311c 100644 --- a/crates/ide/src/hover.rs +++ b/crates/ide/src/hover.rs @@ -176,7 +176,7 @@ pub(crate) fn hover( return None; } - let rest_pat = token.syntax().parent().and_then(ast::RestPat::cast)?; + let rest_pat = token.parent().and_then(ast::RestPat::cast)?; let record_pat_field_list = rest_pat.syntax().parent().and_then(ast::RecordPatFieldList::cast)?; let record_pat = From 91e7624de01fc28e4abe2400a6f945b430ab0ab1 Mon Sep 17 00:00:00 2001 From: Kartavya Vashishtha Date: Fri, 25 Nov 2022 12:20:38 +0530 Subject: [PATCH 0603/1945] add hover tests --- crates/ide/src/hover/tests.rs | 35 +++++++++++++++++++++++++++++++++++ 1 file changed, 35 insertions(+) diff --git a/crates/ide/src/hover/tests.rs b/crates/ide/src/hover/tests.rs index eb997e6fef..f8be4cfb04 100644 --- a/crates/ide/src/hover/tests.rs +++ b/crates/ide/src/hover/tests.rs @@ -5307,3 +5307,38 @@ fn main() { $0V; } "#]], ); } + +#[test] +fn hover_rest_pat() { + check( + r#" +struct Struct {a: u32, b: u32, c: u8, d: u16}; + +fn main() { + let Struct {a, c, .$0.} = Struct {a: 1, b: 2, c: 3, d: 4}; +} +"#, + expect![[r#" + *..* + ```rust + .., b: u32, d: u16 + ``` + "#]], + ); + + check( + r#" +struct Struct {a: u32, b: u32, c: u8, d: u16}; + +fn main() { + let Struct {a, b, c, d, .$0.} = Struct {a: 1, b: 2, c: 3, d: 4}; +} +"#, + expect![[r#" + *..* + ```rust + .. + ``` + "#]], + ); +} From a26aef9055e116d790ea63c37118aefcf93d3d0f Mon Sep 17 00:00:00 2001 From: Kartavya Vashishtha Date: Fri, 25 Nov 2022 12:22:06 +0530 Subject: [PATCH 0604/1945] fix formatting --- crates/ide/src/hover.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/crates/ide/src/hover.rs b/crates/ide/src/hover.rs index 016567311c..9cbfed4763 100644 --- a/crates/ide/src/hover.rs +++ b/crates/ide/src/hover.rs @@ -177,7 +177,8 @@ pub(crate) fn hover( } let rest_pat = token.parent().and_then(ast::RestPat::cast)?; - let record_pat_field_list = rest_pat.syntax().parent().and_then(ast::RecordPatFieldList::cast)?; + let record_pat_field_list = + rest_pat.syntax().parent().and_then(ast::RecordPatFieldList::cast)?; let record_pat = record_pat_field_list.syntax().parent().and_then(ast::RecordPat::cast)?; From 3c794a34da24398bfc3783966f14e21c59026cb6 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 25 Nov 2022 10:27:49 +0100 Subject: [PATCH 0605/1945] Go to declaration goes to assoc items of trait declarations --- crates/ide/src/goto_declaration.rs | 90 +++++++++++++++++++++++++++--- 1 file changed, 82 insertions(+), 8 deletions(-) diff --git a/crates/ide/src/goto_declaration.rs b/crates/ide/src/goto_declaration.rs index 926292c9b3..5c17b7f415 100644 --- a/crates/ide/src/goto_declaration.rs +++ b/crates/ide/src/goto_declaration.rs @@ -1,18 +1,22 @@ -use hir::Semantics; +use hir::{AsAssocItem, Semantics}; use ide_db::{ defs::{Definition, NameClass, NameRefClass}, RootDatabase, }; use syntax::{ast, match_ast, AstNode, SyntaxKind::*, T}; -use crate::{FilePosition, NavigationTarget, RangeInfo}; +use crate::{ + goto_definition::goto_definition, navigation_target::TryToNav, FilePosition, NavigationTarget, + RangeInfo, +}; // Feature: Go to Declaration // // Navigates to the declaration of an identifier. // -// This is currently the same as `Go to Definition` with the exception of outline modules where it -// will navigate to the `mod name;` item declaration. +// This is the same as `Go to Definition` with the following exceptions: +// - outline modules will navigate to the `mod name;` item declaration +// - trait assoc items will navigate to the assoc item of the trait declaration opposed to the trait impl pub(crate) fn goto_declaration( db: &RootDatabase, position: FilePosition, @@ -41,16 +45,28 @@ pub(crate) fn goto_declaration( _ => None } }; - match def? { + let assoc = match def? { Definition::Module(module) => { - Some(NavigationTarget::from_module_to_decl(db, module)) + return Some(NavigationTarget::from_module_to_decl(db, module)) } + Definition::Const(c) => c.as_assoc_item(db), + Definition::TypeAlias(ta) => ta.as_assoc_item(db), + Definition::Function(f) => f.as_assoc_item(db), _ => None, - } + }?; + + let trait_ = assoc.containing_trait_impl(db)?; + let name = Some(assoc.name(db)?); + let item = trait_.items(db).into_iter().find(|it| it.name(db) == name)?; + item.try_to_nav(db) }) .collect(); - Some(RangeInfo::new(range, info)) + if info.is_empty() { + goto_definition(db, position) + } else { + Some(RangeInfo::new(range, info)) + } } #[cfg(test)] @@ -109,4 +125,62 @@ mod foo { "#, ) } + + #[test] + fn goto_decl_goto_def_fallback() { + check( + r#" +struct Foo; + // ^^^ +impl Foo$0 {} +"#, + ); + } + + #[test] + fn goto_decl_assoc_item_no_impl_item() { + check( + r#" +trait Trait { + const C: () = (); + // ^ +} +impl Trait for () {} + +fn main() { + <()>::C$0; +} +"#, + ); + } + + #[test] + fn goto_decl_assoc_item() { + check( + r#" +trait Trait { + const C: () = (); + // ^ +} +impl Trait for () { + const C: () = (); +} + +fn main() { + <()>::C$0; +} +"#, + ); + check( + r#" +trait Trait { + const C: () = (); + // ^ +} +impl Trait for () { + const C$0: () = (); +} +"#, + ); + } } From ae0bdffcc9b9e25412f118dfe788cf112fb7e2ef Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 25 Nov 2022 00:03:48 +0100 Subject: [PATCH 0606/1945] Go to declaration goes to field declaration in pattern and expression shorthands --- crates/ide/src/goto_declaration.rs | 33 +++++++++++++++++++++++++++--- 1 file changed, 30 insertions(+), 3 deletions(-) diff --git a/crates/ide/src/goto_declaration.rs b/crates/ide/src/goto_declaration.rs index 5c17b7f415..c7130a2a4b 100644 --- a/crates/ide/src/goto_declaration.rs +++ b/crates/ide/src/goto_declaration.rs @@ -36,11 +36,11 @@ pub(crate) fn goto_declaration( match parent { ast::NameRef(name_ref) => match NameRefClass::classify(&sema, &name_ref)? { NameRefClass::Definition(it) => Some(it), - _ => None + NameRefClass::FieldShorthand { field_ref, .. } => return field_ref.try_to_nav(db), }, ast::Name(name) => match NameClass::classify(&sema, &name)? { - NameClass::Definition(it) => Some(it), - _ => None + NameClass::Definition(it) | NameClass::ConstReference(it) => Some(it), + NameClass::PatFieldShorthand { field_ref, .. } => return field_ref.try_to_nav(db), }, _ => None } @@ -180,6 +180,33 @@ trait Trait { impl Trait for () { const C$0: () = (); } +"#, + ); + } + + #[test] + fn goto_decl_field_pat_shorthand() { + check( + r#" +struct Foo { field: u32 } + //^^^^^ +fn main() { + let Foo { field$0 }; +} +"#, + ); + } + + #[test] + fn goto_decl_constructor_shorthand() { + check( + r#" +struct Foo { field: u32 } + //^^^^^ +fn main() { + let field = 0; + Foo { field$0 }; +} "#, ); } From 7bf2a25dfe8337fe46a2698756cdfab46556ef88 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 25 Nov 2022 23:17:21 +0100 Subject: [PATCH 0607/1945] Encode the variants of `HirFileId` in a u32 with MSB as the tag --- crates/hir-expand/src/db.rs | 2 +- crates/hir-expand/src/hygiene.rs | 8 +- crates/hir-expand/src/lib.rs | 227 +++++++++--------- .../test_symbol_index_collection.txt | 134 ++--------- 4 files changed, 145 insertions(+), 226 deletions(-) diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs index 87e4db0398..0096649be1 100644 --- a/crates/hir-expand/src/db.rs +++ b/crates/hir-expand/src/db.rs @@ -240,7 +240,7 @@ fn ast_id_map(db: &dyn AstDatabase, file_id: HirFileId) -> Arc { } fn parse_or_expand(db: &dyn AstDatabase, file_id: HirFileId) -> Option { - match file_id.0 { + match file_id.repr() { HirFileIdRepr::FileId(file_id) => Some(db.parse(file_id).tree().syntax().clone()), HirFileIdRepr::MacroFile(macro_file) => { // FIXME: Note how we convert from `Parse` to `SyntaxNode` here, diff --git a/crates/hir-expand/src/hygiene.rs b/crates/hir-expand/src/hygiene.rs index d60734372c..df1e20256c 100644 --- a/crates/hir-expand/src/hygiene.rs +++ b/crates/hir-expand/src/hygiene.rs @@ -17,7 +17,7 @@ use crate::{ db::{self, AstDatabase}, fixup, name::{AsName, Name}, - HirFileId, HirFileIdRepr, InFile, MacroCallKind, MacroCallLoc, MacroDefKind, MacroFile, + HirFileId, InFile, MacroCallKind, MacroCallLoc, MacroDefKind, MacroFile, }; #[derive(Clone, Debug)] @@ -216,9 +216,9 @@ fn make_hygiene_info( impl HygieneFrame { pub(crate) fn new(db: &dyn AstDatabase, file_id: HirFileId) -> HygieneFrame { - let (info, krate, local_inner) = match file_id.0 { - HirFileIdRepr::FileId(_) => (None, None, false), - HirFileIdRepr::MacroFile(macro_file) => { + let (info, krate, local_inner) = match file_id.macro_file() { + None => (None, None, false), + Some(macro_file) => { let loc = db.lookup_intern_macro_call(macro_file.macro_call_id); let info = make_hygiene_info(db, macro_file, &loc).map(|info| (loc.kind.file_id(), info)); diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs index 7352b003a4..bc5f9f3b8a 100644 --- a/crates/hir-expand/src/lib.rs +++ b/crates/hir-expand/src/lib.rs @@ -23,7 +23,11 @@ pub use mbe::{Origin, ValueResult}; use std::{fmt, hash::Hash, iter, sync::Arc}; -use base_db::{impl_intern_key, salsa, CrateId, FileId, FileRange, ProcMacroKind}; +use base_db::{ + impl_intern_key, + salsa::{self, InternId}, + CrateId, FileId, FileRange, ProcMacroKind, +}; use either::Either; use syntax::{ algo::{self, skip_trivia_token}, @@ -79,26 +83,12 @@ impl fmt::Display for ExpandError { /// finite (because everything bottoms out at the real `FileId`) and small /// (`MacroCallId` uses the location interning. You can check details here: /// ). +/// +/// The two variants are encoded in a single u32 which are differentiated by the MSB. +/// If the MSB is 0, the value represents a `FileId`, otherwise the remaining 31 bits represent a +/// `MacroCallId`. #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct HirFileId(HirFileIdRepr); - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -enum HirFileIdRepr { - FileId(FileId), - MacroFile(MacroFile), -} - -impl From for HirFileId { - fn from(id: FileId) -> Self { - HirFileId(HirFileIdRepr::FileId(id)) - } -} - -impl From for HirFileId { - fn from(id: MacroFile) -> Self { - HirFileId(HirFileIdRepr::MacroFile(id)) - } -} +pub struct HirFileId(u32); #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct MacroFile { @@ -172,13 +162,37 @@ pub enum MacroCallKind { }, } +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +enum HirFileIdRepr { + FileId(FileId), + MacroFile(MacroFile), +} + +impl From for HirFileId { + fn from(FileId(id): FileId) -> Self { + assert!(id < Self::MAX_FILE_ID); + HirFileId(id) + } +} + +impl From for HirFileId { + fn from(MacroFile { macro_call_id: MacroCallId(id) }: MacroFile) -> Self { + let id = id.as_u32(); + assert!(id < Self::MAX_FILE_ID); + HirFileId(id | Self::MACRO_FILE_TAG_MASK) + } +} + impl HirFileId { + const MAX_FILE_ID: u32 = u32::MAX ^ Self::MACRO_FILE_TAG_MASK; + const MACRO_FILE_TAG_MASK: u32 = 1 << 31; + /// For macro-expansion files, returns the file original source file the /// expansion originated from. pub fn original_file(self, db: &dyn db::AstDatabase) -> FileId { let mut file_id = self; loop { - match file_id.0 { + match file_id.repr() { HirFileIdRepr::FileId(id) => break id, HirFileIdRepr::MacroFile(MacroFile { macro_call_id }) => { let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_call_id); @@ -194,7 +208,7 @@ impl HirFileId { pub fn expansion_level(self, db: &dyn db::AstDatabase) -> u32 { let mut level = 0; let mut curr = self; - while let HirFileIdRepr::MacroFile(macro_file) = curr.0 { + while let Some(macro_file) = curr.macro_file() { let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); level += 1; @@ -205,25 +219,17 @@ impl HirFileId { /// If this is a macro call, returns the syntax node of the call. pub fn call_node(self, db: &dyn db::AstDatabase) -> Option> { - match self.0 { - HirFileIdRepr::FileId(_) => None, - HirFileIdRepr::MacroFile(macro_file) => { - let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); - Some(loc.kind.to_node(db)) - } - } + let macro_file = self.macro_file()?; + let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); + Some(loc.kind.to_node(db)) } /// If this is a macro call, returns the syntax node of the very first macro call this file resides in. pub fn original_call_node(self, db: &dyn db::AstDatabase) -> Option<(FileId, SyntaxNode)> { - let mut call = match self.0 { - HirFileIdRepr::FileId(_) => return None, - HirFileIdRepr::MacroFile(MacroFile { macro_call_id }) => { - db.lookup_intern_macro_call(macro_call_id).kind.to_node(db) - } - }; + let mut call = + db.lookup_intern_macro_call(self.macro_file()?.macro_call_id).kind.to_node(db); loop { - match call.file_id.0 { + match call.file_id.repr() { HirFileIdRepr::FileId(file_id) => break Some((file_id, call.value)), HirFileIdRepr::MacroFile(MacroFile { macro_call_id }) => { call = db.lookup_intern_macro_call(macro_call_id).kind.to_node(db); @@ -234,84 +240,74 @@ impl HirFileId { /// Return expansion information if it is a macro-expansion file pub fn expansion_info(self, db: &dyn db::AstDatabase) -> Option { - match self.0 { - HirFileIdRepr::FileId(_) => None, - HirFileIdRepr::MacroFile(macro_file) => { - let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); + let macro_file = self.macro_file()?; + let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); - let arg_tt = loc.kind.arg(db)?; + let arg_tt = loc.kind.arg(db)?; - let macro_def = db.macro_def(loc.def).ok()?; - let (parse, exp_map) = db.parse_macro_expansion(macro_file).value?; - let macro_arg = db.macro_arg(macro_file.macro_call_id)?; + let macro_def = db.macro_def(loc.def).ok()?; + let (parse, exp_map) = db.parse_macro_expansion(macro_file).value?; + let macro_arg = db.macro_arg(macro_file.macro_call_id)?; - let def = loc.def.ast_id().left().and_then(|id| { - let def_tt = match id.to_node(db) { - ast::Macro::MacroRules(mac) => mac.token_tree()?, - ast::Macro::MacroDef(_) - if matches!(*macro_def, TokenExpander::BuiltinAttr(_)) => - { - return None - } - ast::Macro::MacroDef(mac) => mac.body()?, - }; - Some(InFile::new(id.file_id, def_tt)) - }); - let attr_input_or_mac_def = def.or_else(|| match loc.kind { - MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => { - let tt = ast_id - .to_node(db) - .doc_comments_and_attrs() - .nth(invoc_attr_index as usize) - .and_then(Either::left)? - .token_tree()?; - Some(InFile::new(ast_id.file_id, tt)) - } - _ => None, - }); - - Some(ExpansionInfo { - expanded: InFile::new(self, parse.syntax_node()), - arg: InFile::new(loc.kind.file_id(), arg_tt), - attr_input_or_mac_def, - macro_arg_shift: mbe::Shift::new(¯o_arg.0), - macro_arg, - macro_def, - exp_map, - }) + let def = loc.def.ast_id().left().and_then(|id| { + let def_tt = match id.to_node(db) { + ast::Macro::MacroRules(mac) => mac.token_tree()?, + ast::Macro::MacroDef(_) if matches!(*macro_def, TokenExpander::BuiltinAttr(_)) => { + return None + } + ast::Macro::MacroDef(mac) => mac.body()?, + }; + Some(InFile::new(id.file_id, def_tt)) + }); + let attr_input_or_mac_def = def.or_else(|| match loc.kind { + MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => { + let tt = ast_id + .to_node(db) + .doc_comments_and_attrs() + .nth(invoc_attr_index as usize) + .and_then(Either::left)? + .token_tree()?; + Some(InFile::new(ast_id.file_id, tt)) } - } + _ => None, + }); + + Some(ExpansionInfo { + expanded: InFile::new(self, parse.syntax_node()), + arg: InFile::new(loc.kind.file_id(), arg_tt), + attr_input_or_mac_def, + macro_arg_shift: mbe::Shift::new(¯o_arg.0), + macro_arg, + macro_def, + exp_map, + }) } /// Indicate it is macro file generated for builtin derive pub fn is_builtin_derive(&self, db: &dyn db::AstDatabase) -> Option> { - match self.0 { - HirFileIdRepr::FileId(_) => None, - HirFileIdRepr::MacroFile(macro_file) => { - let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); - let attr = match loc.def.kind { - MacroDefKind::BuiltInDerive(..) => loc.kind.to_node(db), - _ => return None, - }; - Some(attr.with_value(ast::Attr::cast(attr.value.clone())?)) - } - } + let macro_file = self.macro_file()?; + let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); + let attr = match loc.def.kind { + MacroDefKind::BuiltInDerive(..) => loc.kind.to_node(db), + _ => return None, + }; + Some(attr.with_value(ast::Attr::cast(attr.value.clone())?)) } pub fn is_custom_derive(&self, db: &dyn db::AstDatabase) -> bool { - match self.0 { - HirFileIdRepr::FileId(_) => false, - HirFileIdRepr::MacroFile(macro_file) => { + match self.macro_file() { + Some(macro_file) => { let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); matches!(loc.def.kind, MacroDefKind::ProcMacro(_, ProcMacroKind::CustomDerive, _)) } + None => false, } } /// Return whether this file is an include macro pub fn is_include_macro(&self, db: &dyn db::AstDatabase) -> bool { - match self.0 { - HirFileIdRepr::MacroFile(macro_file) => { + match self.macro_file() { + Some(macro_file) => { let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); matches!(loc.eager, Some(EagerCallInfo { included_file: Some(_), .. })) } @@ -321,8 +317,8 @@ impl HirFileId { /// Return whether this file is an attr macro pub fn is_attr_macro(&self, db: &dyn db::AstDatabase) -> bool { - match self.0 { - HirFileIdRepr::MacroFile(macro_file) => { + match self.macro_file() { + Some(macro_file) => { let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); matches!(loc.kind, MacroCallKind::Attr { .. }) } @@ -333,23 +329,36 @@ impl HirFileId { /// Return whether this file is the pseudo expansion of the derive attribute. /// See [`crate::builtin_attr_macro::derive_attr_expand`]. pub fn is_derive_attr_pseudo_expansion(&self, db: &dyn db::AstDatabase) -> bool { - match self.0 { - HirFileIdRepr::MacroFile(macro_file) => { + match self.macro_file() { + Some(macro_file) => { let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); matches!(loc.kind, MacroCallKind::Attr { is_derive: true, .. }) } - _ => false, + None => false, } } + #[inline] pub fn is_macro(self) -> bool { - matches!(self.0, HirFileIdRepr::MacroFile(_)) + self.0 & Self::MACRO_FILE_TAG_MASK != 0 } + #[inline] pub fn macro_file(self) -> Option { - match self.0 { - HirFileIdRepr::FileId(_) => None, - HirFileIdRepr::MacroFile(m) => Some(m), + match self.0 & Self::MACRO_FILE_TAG_MASK { + 0 => None, + _ => Some(MacroFile { + macro_call_id: MacroCallId(InternId::from(self.0 ^ Self::MACRO_FILE_TAG_MASK)), + }), + } + } + + fn repr(self) -> HirFileIdRepr { + match self.0 & Self::MACRO_FILE_TAG_MASK { + 0 => HirFileIdRepr::FileId(FileId(self.0)), + _ => HirFileIdRepr::MacroFile(MacroFile { + macro_call_id: MacroCallId(InternId::from(self.0 ^ Self::MACRO_FILE_TAG_MASK)), + }), } } } @@ -442,7 +451,7 @@ impl MacroCallKind { pub fn original_call_range_with_body(self, db: &dyn db::AstDatabase) -> FileRange { let mut kind = self; let file_id = loop { - match kind.file_id().0 { + match kind.file_id().repr() { HirFileIdRepr::MacroFile(file) => { kind = db.lookup_intern_macro_call(file.macro_call_id).kind; } @@ -467,7 +476,7 @@ impl MacroCallKind { pub fn original_call_range(self, db: &dyn db::AstDatabase) -> FileRange { let mut kind = self; let file_id = loop { - match kind.file_id().0 { + match kind.file_id().repr() { HirFileIdRepr::MacroFile(file) => { kind = db.lookup_intern_macro_call(file.macro_call_id).kind; } @@ -779,7 +788,7 @@ impl<'a> InFile<&'a SyntaxNode> { /// For attributes and derives, this will point back to the attribute only. /// For the entire item `InFile::use original_file_range_full`. pub fn original_file_range(self, db: &dyn db::AstDatabase) -> FileRange { - match self.file_id.0 { + match self.file_id.repr() { HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() }, HirFileIdRepr::MacroFile(mac_file) => { if let Some(res) = self.original_file_range_opt(db) { @@ -846,7 +855,7 @@ impl InFile { /// Falls back to the macro call range if the node cannot be mapped up fully. pub fn original_file_range(self, db: &dyn db::AstDatabase) -> FileRange { - match self.file_id.0 { + match self.file_id.repr() { HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() }, HirFileIdRepr::MacroFile(mac_file) => { if let Some(res) = self.original_file_range_opt(db) { @@ -861,7 +870,7 @@ impl InFile { /// Attempts to map the syntax node back up its macro calls. pub fn original_file_range_opt(self, db: &dyn db::AstDatabase) -> Option { - match self.file_id.0 { + match self.file_id.repr() { HirFileIdRepr::FileId(file_id) => { Some(FileRange { file_id, range: self.value.text_range() }) } diff --git a/crates/ide-db/src/test_data/test_symbol_index_collection.txt b/crates/ide-db/src/test_data/test_symbol_index_collection.txt index 2f531ca0c7..8c11408dec 100644 --- a/crates/ide-db/src/test_data/test_symbol_index_collection.txt +++ b/crates/ide-db/src/test_data/test_symbol_index_collection.txt @@ -14,11 +14,7 @@ name: "Alias", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 0, - ), - ), + 0, ), ptr: SyntaxNodePtr { kind: TYPE_ALIAS, @@ -36,11 +32,7 @@ name: "CONST", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 0, - ), - ), + 0, ), ptr: SyntaxNodePtr { kind: CONST, @@ -58,11 +50,7 @@ name: "CONST_WITH_INNER", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 0, - ), - ), + 0, ), ptr: SyntaxNodePtr { kind: CONST, @@ -80,11 +68,7 @@ name: "Enum", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 0, - ), - ), + 0, ), ptr: SyntaxNodePtr { kind: ENUM, @@ -102,11 +86,7 @@ name: "Macro", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 0, - ), - ), + 0, ), ptr: SyntaxNodePtr { kind: MACRO_DEF, @@ -124,11 +104,7 @@ name: "STATIC", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 0, - ), - ), + 0, ), ptr: SyntaxNodePtr { kind: STATIC, @@ -146,11 +122,7 @@ name: "Struct", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 0, - ), - ), + 0, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -168,13 +140,7 @@ name: "StructFromMacro", loc: DeclarationLocation { hir_file_id: HirFileId( - MacroFile( - MacroFile { - macro_call_id: MacroCallId( - 0, - ), - }, - ), + 2147483648, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -192,11 +158,7 @@ name: "StructInFn", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 0, - ), - ), + 0, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -216,11 +178,7 @@ name: "StructInNamedConst", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 0, - ), - ), + 0, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -240,11 +198,7 @@ name: "StructInUnnamedConst", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 0, - ), - ), + 0, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -262,11 +216,7 @@ name: "Trait", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 0, - ), - ), + 0, ), ptr: SyntaxNodePtr { kind: TRAIT, @@ -284,11 +234,7 @@ name: "Union", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 0, - ), - ), + 0, ), ptr: SyntaxNodePtr { kind: UNION, @@ -306,11 +252,7 @@ name: "a_mod", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 0, - ), - ), + 0, ), ptr: SyntaxNodePtr { kind: MODULE, @@ -328,11 +270,7 @@ name: "b_mod", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 0, - ), - ), + 0, ), ptr: SyntaxNodePtr { kind: MODULE, @@ -350,11 +288,7 @@ name: "define_struct", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 0, - ), - ), + 0, ), ptr: SyntaxNodePtr { kind: MACRO_RULES, @@ -372,11 +306,7 @@ name: "impl_fn", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 0, - ), - ), + 0, ), ptr: SyntaxNodePtr { kind: FN, @@ -394,11 +324,7 @@ name: "macro_rules_macro", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 0, - ), - ), + 0, ), ptr: SyntaxNodePtr { kind: MACRO_RULES, @@ -416,11 +342,7 @@ name: "main", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 0, - ), - ), + 0, ), ptr: SyntaxNodePtr { kind: FN, @@ -438,11 +360,7 @@ name: "trait_fn", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 0, - ), - ), + 0, ), ptr: SyntaxNodePtr { kind: FN, @@ -475,11 +393,7 @@ name: "StructInModA", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 0, - ), - ), + 0, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -510,11 +424,7 @@ name: "StructInModB", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 1, - ), - ), + 1, ), ptr: SyntaxNodePtr { kind: STRUCT, From 822c61f559dc522dbd28f2886d20989a55613fc0 Mon Sep 17 00:00:00 2001 From: Ryo Yoshida Date: Sat, 26 Nov 2022 23:51:57 +0900 Subject: [PATCH 0608/1945] refactor: remove unnecessary stuff --- crates/ide-assists/src/handlers/extract_function.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/ide-assists/src/handlers/extract_function.rs b/crates/ide-assists/src/handlers/extract_function.rs index c1e2f19ab1..10a3a33226 100644 --- a/crates/ide-assists/src/handlers/extract_function.rs +++ b/crates/ide-assists/src/handlers/extract_function.rs @@ -265,7 +265,7 @@ enum ParamKind { MutRef, } -#[derive(Debug, Eq, PartialEq)] +#[derive(Debug)] enum FunType { Unit, Single(hir::Type), @@ -1368,7 +1368,7 @@ impl FlowHandler { None => FlowHandler::None, Some(flow_kind) => { let action = flow_kind.clone(); - if *ret_ty == FunType::Unit { + if let FunType::Unit = ret_ty { match flow_kind { FlowKind::Return(None) | FlowKind::Break(_, None) @@ -1946,7 +1946,7 @@ fn update_external_control_flow(handler: &FlowHandler, syntax: &SyntaxNode) { if nested_scope.is_none() { if let Some(expr) = ast::Expr::cast(e.clone()) { match expr { - ast::Expr::ReturnExpr(return_expr) if nested_scope.is_none() => { + ast::Expr::ReturnExpr(return_expr) => { let expr = return_expr.expr(); if let Some(replacement) = make_rewritten_flow(handler, expr) { ted::replace(return_expr.syntax(), replacement.syntax()) From 8e03f18e37d2782189391955bc56d3aebead81f5 Mon Sep 17 00:00:00 2001 From: Ryo Yoshida Date: Sat, 26 Nov 2022 23:51:22 +0900 Subject: [PATCH 0609/1945] fix: check if range contains tail expression --- .../src/handlers/extract_function.rs | 204 ++++++++++++++++-- 1 file changed, 183 insertions(+), 21 deletions(-) diff --git a/crates/ide-assists/src/handlers/extract_function.rs b/crates/ide-assists/src/handlers/extract_function.rs index 10a3a33226..0483cfdc64 100644 --- a/crates/ide-assists/src/handlers/extract_function.rs +++ b/crates/ide-assists/src/handlers/extract_function.rs @@ -11,7 +11,9 @@ use ide_db::{ helpers::mod_path_to_ast, imports::insert_use::{insert_use, ImportScope}, search::{FileReference, ReferenceCategory, SearchScope}, - syntax_helpers::node_ext::{preorder_expr, walk_expr, walk_pat, walk_patterns_in_expr}, + syntax_helpers::node_ext::{ + for_each_tail_expr, preorder_expr, walk_expr, walk_pat, walk_patterns_in_expr, + }, FxIndexSet, RootDatabase, }; use itertools::Itertools; @@ -78,7 +80,7 @@ pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op }; let body = extraction_target(&node, range)?; - let container_info = body.analyze_container(&ctx.sema)?; + let (container_info, contains_tail_expr) = body.analyze_container(&ctx.sema)?; let (locals_used, self_param) = body.analyze(&ctx.sema); @@ -119,6 +121,7 @@ pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op ret_ty, body, outliving_locals, + contains_tail_expr, mods: container_info, }; @@ -245,6 +248,8 @@ struct Function { ret_ty: RetType, body: FunctionBody, outliving_locals: Vec, + /// Whether at least one of the container's tail expr is contained in the range we're extracting. + contains_tail_expr: bool, mods: ContainerInfo, } @@ -294,7 +299,6 @@ struct ControlFlow { #[derive(Clone, Debug)] struct ContainerInfo { is_const: bool, - is_in_tail: bool, parent_loop: Option, /// The function's return type, const's type etc. ret_type: Option, @@ -743,7 +747,10 @@ impl FunctionBody { (res, self_param) } - fn analyze_container(&self, sema: &Semantics<'_, RootDatabase>) -> Option { + fn analyze_container( + &self, + sema: &Semantics<'_, RootDatabase>, + ) -> Option<(ContainerInfo, bool)> { let mut ancestors = self.parent()?.ancestors(); let infer_expr_opt = |expr| sema.type_of_expr(&expr?).map(TypeInfo::adjusted); let mut parent_loop = None; @@ -815,28 +822,36 @@ impl FunctionBody { } }; }; - let container_tail = match expr? { - ast::Expr::BlockExpr(block) => block.tail_expr(), - expr => Some(expr), - }; - let is_in_tail = - container_tail.zip(self.tail_expr()).map_or(false, |(container_tail, body_tail)| { - container_tail.syntax().text_range().contains_range(body_tail.syntax().text_range()) + + let expr = expr?; + let contains_tail_expr = if let Some(body_tail) = self.tail_expr() { + let mut contains_tail_expr = false; + let tail_expr_range = body_tail.syntax().text_range(); + for_each_tail_expr(&expr, &mut |e| { + if tail_expr_range.contains_range(e.syntax().text_range()) { + contains_tail_expr = true; + } }); + contains_tail_expr + } else { + false + }; let parent = self.parent()?; let parents = generic_parents(&parent); let generic_param_lists = parents.iter().filter_map(|it| it.generic_param_list()).collect(); let where_clauses = parents.iter().filter_map(|it| it.where_clause()).collect(); - Some(ContainerInfo { - is_in_tail, - is_const, - parent_loop, - ret_type: ty, - generic_param_lists, - where_clauses, - }) + Some(( + ContainerInfo { + is_const, + parent_loop, + ret_type: ty, + generic_param_lists, + where_clauses, + }, + contains_tail_expr, + )) } fn return_ty(&self, ctx: &AssistContext<'_>) -> Option { @@ -1633,7 +1648,7 @@ impl Function { fn make_ret_ty(&self, ctx: &AssistContext<'_>, module: hir::Module) -> Option { let fun_ty = self.return_type(ctx); - let handler = if self.mods.is_in_tail { + let handler = if self.contains_tail_expr { FlowHandler::None } else { FlowHandler::from_ret_ty(self, &fun_ty) @@ -1707,7 +1722,7 @@ fn make_body( fun: &Function, ) -> ast::BlockExpr { let ret_ty = fun.return_type(ctx); - let handler = if fun.mods.is_in_tail { + let handler = if fun.contains_tail_expr { FlowHandler::None } else { FlowHandler::from_ret_ty(fun, &ret_ty) @@ -5582,6 +5597,153 @@ impl Struct where T: Into + Copy, U: Debug { fn $0fun_name(t: T, v: V) -> i32 where T: Into + Copy, V: Into { t.into() + v.into() } +"#, + ); + } + + #[test] + fn non_tail_expr_of_tail_expr_loop() { + check_assist( + extract_function, + r#" +pub fn f() { + loop { + $0if true { + continue; + }$0 + + if false { + break; + } + } +} +"#, + r#" +pub fn f() { + loop { + if let ControlFlow::Break(_) = fun_name() { + continue; + } + + if false { + break; + } + } +} + +fn $0fun_name() -> ControlFlow<()> { + if true { + return ControlFlow::Break(()); + } + ControlFlow::Continue(()) +} +"#, + ); + } + + #[test] + fn non_tail_expr_of_tail_if_block() { + // FIXME: double semicolon + check_assist( + extract_function, + r#" +//- minicore: option, try +impl core::ops::Try for Option { + type Output = T; + type Residual = Option; +} +impl core::ops::FromResidual for Option {} + +fn f() -> Option<()> { + if true { + let a = $0if true { + Some(())? + } else { + () + }$0; + Some(a) + } else { + None + } +} +"#, + r#" +impl core::ops::Try for Option { + type Output = T; + type Residual = Option; +} +impl core::ops::FromResidual for Option {} + +fn f() -> Option<()> { + if true { + let a = fun_name()?;; + Some(a) + } else { + None + } +} + +fn $0fun_name() -> Option<()> { + Some(if true { + Some(())? + } else { + () + }) +} +"#, + ); + } + + #[test] + fn tail_expr_of_tail_block_nested() { + check_assist( + extract_function, + r#" +//- minicore: option, try +impl core::ops::Try for Option { + type Output = T; + type Residual = Option; +} +impl core::ops::FromResidual for Option {} + +fn f() -> Option<()> { + if true { + $0{ + let a = if true { + Some(())? + } else { + () + }; + Some(a) + }$0 + } else { + None + } +} +"#, + r#" +impl core::ops::Try for Option { + type Output = T; + type Residual = Option; +} +impl core::ops::FromResidual for Option {} + +fn f() -> Option<()> { + if true { + fun_name()? + } else { + None + } +} + +fn $0fun_name() -> Option<()> { + let a = if true { + Some(())? + } else { + () + }; + Some(a) +} "#, ); } From e1de04d60ceff530295f57ae47a65e9c05716bbf Mon Sep 17 00:00:00 2001 From: yue4u Date: Sun, 27 Nov 2022 01:53:45 +0900 Subject: [PATCH 0610/1945] fix: only special casing 3 colon in a row --- crates/ide-completion/src/context.rs | 22 +++------------------- crates/ide-completion/src/tests/special.rs | 8 +------- 2 files changed, 4 insertions(+), 26 deletions(-) diff --git a/crates/ide-completion/src/context.rs b/crates/ide-completion/src/context.rs index a0e1153f08..0e3b677f2d 100644 --- a/crates/ide-completion/src/context.rs +++ b/crates/ide-completion/src/context.rs @@ -581,7 +581,9 @@ impl<'a> CompletionContext<'a> { return None; } - if !is_prev_token_valid_path_start_or_segment(&prev_token) { + // has 3 colon in a row + // special casing this as per discussion in https://github.com/rust-lang/rust-analyzer/pull/13611#discussion_r1031845205 + if prev_token.prev_token().map(|t| t.kind() == T![:]).unwrap_or(false) { return None; } } @@ -637,24 +639,6 @@ impl<'a> CompletionContext<'a> { } } -fn is_prev_token_valid_path_start_or_segment(token: &SyntaxToken) -> bool { - if let Some(prev_token) = token.prev_token() { - // token before coloncolon is invalid - if !matches!( - prev_token.kind(), - // trival - WHITESPACE | COMMENT - // PathIdentSegment - | IDENT | T![super] | T![self] | T![Self] | T![crate] - // QualifiedPath - | T![>] - ) { - return false; - } - } - true -} - const OP_TRAIT_LANG_NAMES: &[&str] = &[ "add_assign", "add", diff --git a/crates/ide-completion/src/tests/special.rs b/crates/ide-completion/src/tests/special.rs index 6cfa72d653..0e59f4ec54 100644 --- a/crates/ide-completion/src/tests/special.rs +++ b/crates/ide-completion/src/tests/special.rs @@ -967,16 +967,10 @@ fn foo { crate:$0 } } #[test] -fn no_completions_in_invalid_path() { +fn no_completions_in_after_tripple_colon() { check( r#" fn foo { crate:::$0 } -"#, - expect![""], - ); - check( - r#" -fn foo { crate::::$0 } "#, expect![""], ); From 1ca5cb7ed9928fad4222cb26727da41ff9ac148f Mon Sep 17 00:00:00 2001 From: yue4u Date: Sun, 27 Nov 2022 02:39:38 +0900 Subject: [PATCH 0611/1945] fix: also exclude 2 coloncolon in a row --- crates/ide-completion/src/context.rs | 9 +++++++-- crates/ide-completion/src/tests/special.rs | 8 +++++++- 2 files changed, 14 insertions(+), 3 deletions(-) diff --git a/crates/ide-completion/src/context.rs b/crates/ide-completion/src/context.rs index 0e3b677f2d..aa77f44953 100644 --- a/crates/ide-completion/src/context.rs +++ b/crates/ide-completion/src/context.rs @@ -581,9 +581,14 @@ impl<'a> CompletionContext<'a> { return None; } - // has 3 colon in a row + // has 3 colon or 2 coloncolon in a row // special casing this as per discussion in https://github.com/rust-lang/rust-analyzer/pull/13611#discussion_r1031845205 - if prev_token.prev_token().map(|t| t.kind() == T![:]).unwrap_or(false) { + // and https://github.com/rust-lang/rust-analyzer/pull/13611#discussion_r1032812751 + if prev_token + .prev_token() + .map(|t| t.kind() == T![:] || t.kind() == T![::]) + .unwrap_or(false) + { return None; } } diff --git a/crates/ide-completion/src/tests/special.rs b/crates/ide-completion/src/tests/special.rs index 0e59f4ec54..cad4af4937 100644 --- a/crates/ide-completion/src/tests/special.rs +++ b/crates/ide-completion/src/tests/special.rs @@ -967,11 +967,17 @@ fn foo { crate:$0 } } #[test] -fn no_completions_in_after_tripple_colon() { +fn no_completions_in_invalid_path() { check( r#" fn foo { crate:::$0 } "#, expect![""], ); + check( + r#" +fn foo { crate::::$0 } +"#, + expect![""], + ) } From 8661740626ae7f608051e1309d802c408b1e20e2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Maria=20Jos=C3=A9=20Solano?= Date: Sun, 27 Nov 2022 09:46:37 -0800 Subject: [PATCH 0612/1945] Use typed notification method --- editors/code/src/main.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/editors/code/src/main.ts b/editors/code/src/main.ts index 25f1e83d10..01a1a2db7f 100644 --- a/editors/code/src/main.ts +++ b/editors/code/src/main.ts @@ -79,7 +79,7 @@ async function activateServer(ctx: Ctx): Promise { ); vscode.workspace.onDidChangeConfiguration( async (_) => { - await ctx.client?.sendNotification("workspace/didChangeConfiguration", { + await ctx.client?.sendNotification(lc.DidChangeConfigurationNotification.type, { settings: "", }); }, From 2174aca8f8601a0135827377ebf349838e1123db Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Maria=20Jos=C3=A9=20Solano?= Date: Sun, 27 Nov 2022 10:07:09 -0800 Subject: [PATCH 0613/1945] Check for workspace root in runnable codelens --- crates/rust-analyzer/src/to_proto.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/rust-analyzer/src/to_proto.rs b/crates/rust-analyzer/src/to_proto.rs index 81cc1952ba..fcc4d7be24 100644 --- a/crates/rust-analyzer/src/to_proto.rs +++ b/crates/rust-analyzer/src/to_proto.rs @@ -1164,7 +1164,7 @@ pub(crate) fn code_lens( let r = runnable(snap, run)?; let lens_config = snap.config.lens(); - if lens_config.run && client_commands_config.run_single { + if lens_config.run && client_commands_config.run_single && r.args.workspace_root.is_some() { let command = command::run_single(&r, &title); acc.push(lsp_types::CodeLens { range: annotation_range, From b3bd5a471e314841fb1d2d94fdc50ef3c8d8548f Mon Sep 17 00:00:00 2001 From: Crauzer Date: Tue, 29 Nov 2022 00:32:13 +0100 Subject: [PATCH 0614/1945] implement vararg type collection from function params --- crates/hir-expand/src/name.rs | 1 + crates/hir-ty/src/infer.rs | 19 ++++++++++++++++++- crates/hir-ty/src/tests/patterns.rs | 12 ++++++++++++ 3 files changed, 31 insertions(+), 1 deletion(-) diff --git a/crates/hir-expand/src/name.rs b/crates/hir-expand/src/name.rs index 2679a1c360..259fe1327f 100644 --- a/crates/hir-expand/src/name.rs +++ b/crates/hir-expand/src/name.rs @@ -419,6 +419,7 @@ pub mod known { shr, sub_assign, sub, + va_list ); // self/Self cannot be used as an identifier diff --git a/crates/hir-ty/src/infer.rs b/crates/hir-ty/src/infer.rs index 0b3c23f574..112eb5bd84 100644 --- a/crates/hir-ty/src/infer.rs +++ b/crates/hir-ty/src/infer.rs @@ -537,8 +537,20 @@ impl<'a> InferenceContext<'a> { let data = self.db.function_data(func); let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver) .with_impl_trait_mode(ImplTraitLoweringMode::Param); - let param_tys = + let mut param_tys = data.params.iter().map(|(_, type_ref)| ctx.lower_ty(type_ref)).collect::>(); + // Check if function contains a va_list, if it does then we append it to the parameter types + // that are collected from the function data + if data.is_varargs() { + let va_list_ty = match self.resolve_va_list() { + Some(va_list) => TyBuilder::adt(self.db, va_list) + .fill_with_defaults(self.db, || self.table.new_type_var()) + .build(), + None => self.err_ty(), + }; + + param_tys.push(va_list_ty) + } for (ty, pat) in param_tys.into_iter().zip(self.body.params.iter()) { let ty = self.insert_type_vars(ty); let ty = self.normalize_associated_types_in(ty); @@ -983,6 +995,11 @@ impl<'a> InferenceContext<'a> { let trait_ = self.resolve_ops_index()?; self.db.trait_data(trait_).associated_type_by_name(&name![Output]) } + + fn resolve_va_list(&self) -> Option { + let struct_ = self.resolve_lang_item(name![va_list])?.as_struct()?; + Some(struct_.into()) + } } /// When inferring an expression, we propagate downward whatever type hint we diff --git a/crates/hir-ty/src/tests/patterns.rs b/crates/hir-ty/src/tests/patterns.rs index 74de33117e..9333e26935 100644 --- a/crates/hir-ty/src/tests/patterns.rs +++ b/crates/hir-ty/src/tests/patterns.rs @@ -1080,3 +1080,15 @@ fn my_fn(#[cfg(feature = "feature")] u8: u8, u32: u32) {} "#, ); } + +#[test] +fn var_args() { + check_types( + r#" +#[lang = "va_list"] +pub struct VaListImpl<'f>; +fn my_fn(foo: ...) {} + //^^^ VaListImpl +"#, + ); +} From 9914d30450ffbd64c2acbfa44d22add033df2a5a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Maria=20Jos=C3=A9=20Solano?= Date: Mon, 28 Nov 2022 19:10:16 -0800 Subject: [PATCH 0615/1945] Fix formatting --- crates/rust-analyzer/src/to_proto.rs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/crates/rust-analyzer/src/to_proto.rs b/crates/rust-analyzer/src/to_proto.rs index fcc4d7be24..d1f805af1d 100644 --- a/crates/rust-analyzer/src/to_proto.rs +++ b/crates/rust-analyzer/src/to_proto.rs @@ -1164,7 +1164,10 @@ pub(crate) fn code_lens( let r = runnable(snap, run)?; let lens_config = snap.config.lens(); - if lens_config.run && client_commands_config.run_single && r.args.workspace_root.is_some() { + if lens_config.run + && client_commands_config.run_single + && r.args.workspace_root.is_some() + { let command = command::run_single(&r, &title); acc.push(lsp_types::CodeLens { range: annotation_range, From 16bf32fcdd1398d7adeccfbd9e11cd8e2d3b4c11 Mon Sep 17 00:00:00 2001 From: Wilco Kusee Date: Tue, 29 Nov 2022 15:25:09 +0100 Subject: [PATCH 0616/1945] Update Chalk to version 87 --- Cargo.lock | 16 ++++++++-------- crates/hir-ty/Cargo.toml | 8 ++++---- crates/hir-ty/src/chalk_db.rs | 2 ++ crates/hir-ty/src/display.rs | 1 - 4 files changed, 14 insertions(+), 13 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 41c5d36671..84dda206db 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -171,9 +171,9 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "chalk-derive" -version = "0.86.0" +version = "0.87.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5499d415d855b5094366a824815341893ad3de0ecb6048c430118bdae6d27402" +checksum = "d552b2fa341f5fc35c6b917b1d289d3c3a34d0b74e579390ea6192d6152a8cdb" dependencies = [ "proc-macro2", "quote", @@ -183,9 +183,9 @@ dependencies = [ [[package]] name = "chalk-ir" -version = "0.86.0" +version = "0.87.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3800118c76a48507b0eece3a01f3a429b5c478d203c493096e6040c67ab960e1" +checksum = "43aa55deff4e7fbdb09fa014543372f2c95a06835ac487b9ce57b5099b950838" dependencies = [ "bitflags", "chalk-derive", @@ -194,9 +194,9 @@ dependencies = [ [[package]] name = "chalk-recursive" -version = "0.86.0" +version = "0.87.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1baf60628fd73104d1f8562586a52d48f37f1e84435aab2e62674b1fd935b8c8" +checksum = "80179569cdc8b618b02e2b91b3314802778f4d73b75cd6fd2a451171df9d5611" dependencies = [ "chalk-derive", "chalk-ir", @@ -207,9 +207,9 @@ dependencies = [ [[package]] name = "chalk-solve" -version = "0.86.0" +version = "0.87.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e9c3c068f9358786348e58a1b94ef0a5cf90a9810fc1f10fda896f0b5d80185" +checksum = "61213deefc36ba265ad01c4d997e18bcddf7922862a4594a47ca4575afb3dab4" dependencies = [ "chalk-derive", "chalk-ir", diff --git a/crates/hir-ty/Cargo.toml b/crates/hir-ty/Cargo.toml index a1d6835bfa..802face852 100644 --- a/crates/hir-ty/Cargo.toml +++ b/crates/hir-ty/Cargo.toml @@ -18,10 +18,10 @@ ena = "0.14.0" tracing = "0.1.35" rustc-hash = "1.1.0" scoped-tls = "1.0.0" -chalk-solve = { version = "0.86.0", default-features = false } -chalk-ir = "0.86.0" -chalk-recursive = { version = "0.86.0", default-features = false } -chalk-derive = "0.86.0" +chalk-solve = { version = "0.87.0", default-features = false } +chalk-ir = "0.87.0" +chalk-recursive = { version = "0.87.0", default-features = false } +chalk-derive = "0.87.0" la-arena = { version = "0.3.0", path = "../../lib/la-arena" } once_cell = "1.15.0" typed-arena = "2.0.1" diff --git a/crates/hir-ty/src/chalk_db.rs b/crates/hir-ty/src/chalk_db.rs index 43c3451cab..1c2b8de7f7 100644 --- a/crates/hir-ty/src/chalk_db.rs +++ b/crates/hir-ty/src/chalk_db.rs @@ -568,6 +568,7 @@ fn well_known_trait_from_lang_attr(name: &str) -> Option { "sized" => WellKnownTrait::Sized, "unpin" => WellKnownTrait::Unpin, "unsize" => WellKnownTrait::Unsize, + "tuple_trait" => WellKnownTrait::Tuple, _ => return None, }) } @@ -585,6 +586,7 @@ fn lang_attr_from_well_known_trait(attr: WellKnownTrait) -> &'static str { WellKnownTrait::FnOnce => "fn_once", WellKnownTrait::Generator => "generator", WellKnownTrait::Sized => "sized", + WellKnownTrait::Tuple => "tuple_trait", WellKnownTrait::Unpin => "unpin", WellKnownTrait::Unsize => "unsize", } diff --git a/crates/hir-ty/src/display.rs b/crates/hir-ty/src/display.rs index a22a4b170f..9d453eef71 100644 --- a/crates/hir-ty/src/display.rs +++ b/crates/hir-ty/src/display.rs @@ -1098,7 +1098,6 @@ impl HirDisplay for LifetimeData { write!(f, "{}", param_data.name) } LifetimeData::Static => write!(f, "'static"), - LifetimeData::Empty(_) => Ok(()), LifetimeData::Erased => Ok(()), LifetimeData::Phantom(_, _) => Ok(()), } From b65b02fd976d0174312e71ac8735ed8fecdfc74e Mon Sep 17 00:00:00 2001 From: Jonas Schievink Date: Tue, 29 Nov 2022 18:50:21 +0100 Subject: [PATCH 0617/1945] Fix signature help not showing up when cursor is between `))` or `>>` --- crates/ide/src/signature_help.rs | 58 +++++++++++++++++++++++--------- 1 file changed, 43 insertions(+), 15 deletions(-) diff --git a/crates/ide/src/signature_help.rs b/crates/ide/src/signature_help.rs index e7412d27fa..6045a787ec 100644 --- a/crates/ide/src/signature_help.rs +++ b/crates/ide/src/signature_help.rs @@ -74,20 +74,28 @@ pub(crate) fn signature_help(db: &RootDatabase, position: FilePosition) -> Optio ast::ArgList(arg_list) => { let cursor_outside = arg_list.r_paren_token().as_ref() == Some(&token); if cursor_outside { - return None; + continue; } - return signature_help_for_call(&sema, token); + return signature_help_for_call(&sema, arg_list, token); }, ast::GenericArgList(garg_list) => { let cursor_outside = garg_list.r_angle_token().as_ref() == Some(&token); if cursor_outside { - return None; + continue; } - return signature_help_for_generics(&sema, token); + return signature_help_for_generics(&sema, garg_list, token); }, _ => (), } } + + // Stop at multi-line expressions, since the signature of the outer call is not very + // helpful inside them. + if let Some(expr) = ast::Expr::cast(node.clone()) { + if expr.syntax().text().contains_char('\n') { + return None; + } + } } None @@ -95,10 +103,11 @@ pub(crate) fn signature_help(db: &RootDatabase, position: FilePosition) -> Optio fn signature_help_for_call( sema: &Semantics<'_, RootDatabase>, + arg_list: ast::ArgList, token: SyntaxToken, ) -> Option { // Find the calling expression and its NameRef - let mut node = token.parent()?; + let mut node = arg_list.syntax().parent()?; let calling_node = loop { if let Some(callable) = ast::CallableExpr::cast(node.clone()) { if callable @@ -109,14 +118,6 @@ fn signature_help_for_call( } } - // Stop at multi-line expressions, since the signature of the outer call is not very - // helpful inside them. - if let Some(expr) = ast::Expr::cast(node.clone()) { - if expr.syntax().text().contains_char('\n') { - return None; - } - } - node = node.parent()?; }; @@ -200,10 +201,11 @@ fn signature_help_for_call( fn signature_help_for_generics( sema: &Semantics<'_, RootDatabase>, + garg_list: ast::GenericArgList, token: SyntaxToken, ) -> Option { - let parent = token.parent()?; - let arg_list = parent + let arg_list = garg_list + .syntax() .ancestors() .filter_map(ast::GenericArgList::cast) .find(|list| list.syntax().text_range().contains(token.text_range().start()))?; @@ -770,6 +772,32 @@ fn f() { "#, expect![[]], ); + check( + r#" +fn foo(a: u8) -> u8 {a} +fn bar(a: u8) -> u8 {a} +fn f() { + foo(bar(123)$0) +} +"#, + expect![[r#" + fn foo(a: u8) -> u8 + ^^^^^ + "#]], + ); + check( + r#" +struct Vec(T); +struct Vec2(T); +fn f() { + let _: Vec2$0> +} +"#, + expect![[r#" + struct Vec2 + ^ + "#]], + ); } #[test] From 335cb2605010b29426a87c7ba15244f580fd92f7 Mon Sep 17 00:00:00 2001 From: Jonas Schievink Date: Tue, 29 Nov 2022 19:20:32 +0100 Subject: [PATCH 0618/1945] Version the inlay hint resolve data --- crates/rust-analyzer/src/from_proto.rs | 10 +++++++++- crates/rust-analyzer/src/handlers.rs | 22 ++++++++++++++++++++-- crates/rust-analyzer/src/lsp_ext.rs | 4 ++-- crates/rust-analyzer/src/to_proto.rs | 10 ++++++++-- 4 files changed, 39 insertions(+), 7 deletions(-) diff --git a/crates/rust-analyzer/src/from_proto.rs b/crates/rust-analyzer/src/from_proto.rs index dd433b0f4d..4e7095b3ce 100644 --- a/crates/rust-analyzer/src/from_proto.rs +++ b/crates/rust-analyzer/src/from_proto.rs @@ -67,7 +67,15 @@ pub(crate) fn file_range( text_document_identifier: lsp_types::TextDocumentIdentifier, range: lsp_types::Range, ) -> Result { - let file_id = file_id(snap, &text_document_identifier.uri)?; + file_range_uri(snap, &text_document_identifier.uri, range) +} + +pub(crate) fn file_range_uri( + snap: &GlobalStateSnapshot, + document: &lsp_types::Url, + range: lsp_types::Range, +) -> Result { + let file_id = file_id(snap, document)?; let line_index = snap.file_line_index(file_id)?; let range = text_range(&line_index, range)?; Ok(FileRange { file_id, range }) diff --git a/crates/rust-analyzer/src/handlers.rs b/crates/rust-analyzer/src/handlers.rs index 4f318f39de..1604a02fb1 100644 --- a/crates/rust-analyzer/src/handlers.rs +++ b/crates/rust-analyzer/src/handlers.rs @@ -29,6 +29,7 @@ use project_model::{ManifestPath, ProjectWorkspace, TargetKind}; use serde_json::json; use stdx::{format_to, never}; use syntax::{algo, ast, AstNode, TextRange, TextSize}; +use tracing::error; use vfs::AbsPathBuf; use crate::{ @@ -1372,9 +1373,26 @@ pub(crate) fn handle_inlay_hints_resolve( let resolve_data: lsp_ext::InlayHintResolveData = serde_json::from_value(data)?; - let file_range = from_proto::file_range( + match snap.url_file_version(&resolve_data.text_document.uri) { + Some(version) if version == resolve_data.text_document.version => {} + Some(version) => { + error!( + "attempted inlayHints/resolve of '{}' at version {} while server version is {}", + resolve_data.text_document.uri, resolve_data.text_document.version, version, + ); + return Ok(hint); + } + None => { + error!( + "attempted inlayHints/resolve of unknown file '{}' at version {}", + resolve_data.text_document.uri, resolve_data.text_document.version, + ); + return Ok(hint); + } + } + let file_range = from_proto::file_range_uri( &snap, - resolve_data.text_document, + &resolve_data.text_document.uri, match resolve_data.position { PositionOrRange::Position(pos) => Range::new(pos, pos), PositionOrRange::Range(range) => range, diff --git a/crates/rust-analyzer/src/lsp_ext.rs b/crates/rust-analyzer/src/lsp_ext.rs index 8cc5648f3c..a1df0b6d7d 100644 --- a/crates/rust-analyzer/src/lsp_ext.rs +++ b/crates/rust-analyzer/src/lsp_ext.rs @@ -3,11 +3,11 @@ use std::{collections::HashMap, path::PathBuf}; use lsp_types::request::Request; -use lsp_types::PositionEncodingKind; use lsp_types::{ notification::Notification, CodeActionKind, DocumentOnTypeFormattingParams, PartialResultParams, Position, Range, TextDocumentIdentifier, WorkDoneProgressParams, }; +use lsp_types::{PositionEncodingKind, VersionedTextDocumentIdentifier}; use serde::{Deserialize, Serialize}; pub enum AnalyzerStatus {} @@ -550,7 +550,7 @@ pub struct CompletionResolveData { #[derive(Debug, Serialize, Deserialize)] pub struct InlayHintResolveData { - pub text_document: TextDocumentIdentifier, + pub text_document: VersionedTextDocumentIdentifier, pub position: PositionOrRange, } diff --git a/crates/rust-analyzer/src/to_proto.rs b/crates/rust-analyzer/src/to_proto.rs index d1f805af1d..3ce24bdd14 100644 --- a/crates/rust-analyzer/src/to_proto.rs +++ b/crates/rust-analyzer/src/to_proto.rs @@ -492,7 +492,10 @@ pub(crate) fn inlay_hint( let uri = url(snap, file_id); let line_index = snap.file_line_index(file_id).ok()?; - let text_document = lsp_types::TextDocumentIdentifier { uri }; + let text_document = lsp_types::VersionedTextDocumentIdentifier { + version: snap.url_file_version(&uri)?, + uri, + }; to_value(lsp_ext::InlayHintResolveData { text_document, position: lsp_ext::PositionOrRange::Position(position(&line_index, offset)), @@ -501,7 +504,10 @@ pub(crate) fn inlay_hint( } Some(ide::InlayTooltip::HoverRanged(file_id, text_range)) => { let uri = url(snap, file_id); - let text_document = lsp_types::TextDocumentIdentifier { uri }; + let text_document = lsp_types::VersionedTextDocumentIdentifier { + version: snap.url_file_version(&uri)?, + uri, + }; let line_index = snap.file_line_index(file_id).ok()?; to_value(lsp_ext::InlayHintResolveData { text_document, From 32f59cf01de639d0b23753687f5fb5b08c0758c0 Mon Sep 17 00:00:00 2001 From: Jonas Schievink Date: Tue, 29 Nov 2022 19:33:16 +0100 Subject: [PATCH 0619/1945] Update hash --- docs/dev/lsp-extensions.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/dev/lsp-extensions.md b/docs/dev/lsp-extensions.md index fe316fcae9..5aced035d5 100644 --- a/docs/dev/lsp-extensions.md +++ b/docs/dev/lsp-extensions.md @@ -1,5 +1,5 @@ [^\n]+\n/m, ""); } + let value; + if (errorCode) { + if (typeof diag.code === "string" || typeof diag.code === "number") { + value = diag.code; + } else { + value = diag.code?.value; + } + } diag.code = { target: vscode.Uri.from({ scheme: "rust-analyzer-diagnostics-view", @@ -131,7 +141,7 @@ export async function createClient( fragment: uri.toString(), query: idx.toString(), }), - value: "Click for full compiler diagnostic", + value: value ?? "Click for full compiler diagnostic", }; } }); diff --git a/editors/code/src/config.ts b/editors/code/src/config.ts index d8dbd1df16..eb4f965291 100644 --- a/editors/code/src/config.ts +++ b/editors/code/src/config.ts @@ -241,6 +241,10 @@ export class Config { get previewRustcOutput() { return this.get("diagnostics.previewRustcOutput"); } + + get useRustcErrorCode() { + return this.get("diagnostics.useRustcErrorCode"); + } } const VarRegex = new RegExp(/\$\{(.+?)\}/g); From b996a54cd845ae315a7b089437b43bf1b79a5e65 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 3 Jan 2023 11:58:31 +0100 Subject: [PATCH 0757/1945] Skip lifetime elision on fn pointers and fn trait types --- crates/hir-ty/src/infer/expr.rs | 1 + .../src/handlers/add_explicit_type.rs | 5 +- .../src/handlers/extract_type_alias.rs | 130 +++++++++--------- crates/ide-db/src/syntax_helpers/node_ext.rs | 9 +- crates/ide/src/inlay_hints/fn_lifetime_fn.rs | 30 +++- 5 files changed, 105 insertions(+), 70 deletions(-) diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs index 2e5c424410..a015f67024 100644 --- a/crates/hir-ty/src/infer/expr.rs +++ b/crates/hir-ty/src/infer/expr.rs @@ -334,6 +334,7 @@ impl<'a> InferenceContext<'a> { let (param_tys, ret_ty) = match res { Some(res) => { let adjustments = auto_deref_adjust_steps(&derefs); + // FIXME: Handle call adjustments for Fn/FnMut self.write_expr_adj(*callee, adjustments); res } diff --git a/crates/ide-assists/src/handlers/add_explicit_type.rs b/crates/ide-assists/src/handlers/add_explicit_type.rs index b5f99726fe..0057f439f1 100644 --- a/crates/ide-assists/src/handlers/add_explicit_type.rs +++ b/crates/ide-assists/src/handlers/add_explicit_type.rs @@ -47,7 +47,10 @@ pub(crate) fn add_explicit_type(acc: &mut Assists, ctx: &AssistContext<'_>) -> O // Don't enable the assist if there is a type ascription without any placeholders if let Some(ty) = &ascribed_ty { let mut contains_infer_ty = false; - walk_ty(ty, &mut |ty| contains_infer_ty |= matches!(ty, ast::Type::InferType(_))); + walk_ty(ty, &mut |ty| { + contains_infer_ty |= matches!(ty, ast::Type::InferType(_)); + false + }); if !contains_infer_ty { cov_mark::hit!(add_explicit_type_not_applicable_if_ty_already_specified); return None; diff --git a/crates/ide-assists/src/handlers/extract_type_alias.rs b/crates/ide-assists/src/handlers/extract_type_alias.rs index 3116935fc5..0505f5784f 100644 --- a/crates/ide-assists/src/handlers/extract_type_alias.rs +++ b/crates/ide-assists/src/handlers/extract_type_alias.rs @@ -108,76 +108,80 @@ fn collect_used_generics<'gp>( } let mut generics = Vec::new(); - walk_ty(ty, &mut |ty| match ty { - ast::Type::PathType(ty) => { - if let Some(path) = ty.path() { - if let Some(name_ref) = path.as_single_name_ref() { - if let Some(param) = known_generics.iter().find(|gp| { - match gp { - ast::GenericParam::ConstParam(cp) => cp.name(), - ast::GenericParam::TypeParam(tp) => tp.name(), - _ => None, - } - .map_or(false, |n| n.text() == name_ref.text()) - }) { - generics.push(param); - } - } - generics.extend( - path.segments() - .filter_map(|seg| seg.generic_arg_list()) - .flat_map(|it| it.generic_args()) - .filter_map(|it| match it { - ast::GenericArg::LifetimeArg(lt) => { - let lt = lt.lifetime()?; - known_generics.iter().find(find_lifetime(<.text())) + walk_ty(ty, &mut |ty| { + match ty { + ast::Type::PathType(ty) => { + if let Some(path) = ty.path() { + if let Some(name_ref) = path.as_single_name_ref() { + if let Some(param) = known_generics.iter().find(|gp| { + match gp { + ast::GenericParam::ConstParam(cp) => cp.name(), + ast::GenericParam::TypeParam(tp) => tp.name(), + _ => None, } - _ => None, - }), - ); - } - } - ast::Type::ImplTraitType(impl_ty) => { - if let Some(it) = impl_ty.type_bound_list() { - generics.extend( - it.bounds() - .filter_map(|it| it.lifetime()) - .filter_map(|lt| known_generics.iter().find(find_lifetime(<.text()))), - ); - } - } - ast::Type::DynTraitType(dyn_ty) => { - if let Some(it) = dyn_ty.type_bound_list() { - generics.extend( - it.bounds() - .filter_map(|it| it.lifetime()) - .filter_map(|lt| known_generics.iter().find(find_lifetime(<.text()))), - ); - } - } - ast::Type::RefType(ref_) => generics.extend( - ref_.lifetime().and_then(|lt| known_generics.iter().find(find_lifetime(<.text()))), - ), - ast::Type::ArrayType(ar) => { - if let Some(expr) = ar.expr() { - if let ast::Expr::PathExpr(p) = expr { - if let Some(path) = p.path() { - if let Some(name_ref) = path.as_single_name_ref() { - if let Some(param) = known_generics.iter().find(|gp| { - if let ast::GenericParam::ConstParam(cp) = gp { - cp.name().map_or(false, |n| n.text() == name_ref.text()) - } else { - false + .map_or(false, |n| n.text() == name_ref.text()) + }) { + generics.push(param); + } + } + generics.extend( + path.segments() + .filter_map(|seg| seg.generic_arg_list()) + .flat_map(|it| it.generic_args()) + .filter_map(|it| match it { + ast::GenericArg::LifetimeArg(lt) => { + let lt = lt.lifetime()?; + known_generics.iter().find(find_lifetime(<.text())) + } + _ => None, + }), + ); + } + } + ast::Type::ImplTraitType(impl_ty) => { + if let Some(it) = impl_ty.type_bound_list() { + generics.extend( + it.bounds() + .filter_map(|it| it.lifetime()) + .filter_map(|lt| known_generics.iter().find(find_lifetime(<.text()))), + ); + } + } + ast::Type::DynTraitType(dyn_ty) => { + if let Some(it) = dyn_ty.type_bound_list() { + generics.extend( + it.bounds() + .filter_map(|it| it.lifetime()) + .filter_map(|lt| known_generics.iter().find(find_lifetime(<.text()))), + ); + } + } + ast::Type::RefType(ref_) => generics.extend( + ref_.lifetime() + .and_then(|lt| known_generics.iter().find(find_lifetime(<.text()))), + ), + ast::Type::ArrayType(ar) => { + if let Some(expr) = ar.expr() { + if let ast::Expr::PathExpr(p) = expr { + if let Some(path) = p.path() { + if let Some(name_ref) = path.as_single_name_ref() { + if let Some(param) = known_generics.iter().find(|gp| { + if let ast::GenericParam::ConstParam(cp) = gp { + cp.name().map_or(false, |n| n.text() == name_ref.text()) + } else { + false + } + }) { + generics.push(param); } - }) { - generics.push(param); } } } } } - } - _ => (), + _ => (), + }; + false }); // stable resort to lifetime, type, const generics.sort_by_key(|gp| match gp { diff --git a/crates/ide-db/src/syntax_helpers/node_ext.rs b/crates/ide-db/src/syntax_helpers/node_ext.rs index b72003ff36..aa03478599 100644 --- a/crates/ide-db/src/syntax_helpers/node_ext.rs +++ b/crates/ide-db/src/syntax_helpers/node_ext.rs @@ -173,7 +173,8 @@ pub fn walk_pat(pat: &ast::Pat, cb: &mut dyn FnMut(ast::Pat)) { } /// Preorder walk all the type's sub types. -pub fn walk_ty(ty: &ast::Type, cb: &mut dyn FnMut(ast::Type)) { +// FIXME: Make the control flow more proper +pub fn walk_ty(ty: &ast::Type, cb: &mut dyn FnMut(ast::Type) -> bool) { let mut preorder = ty.syntax().preorder(); while let Some(event) = preorder.next() { let node = match event { @@ -184,10 +185,12 @@ pub fn walk_ty(ty: &ast::Type, cb: &mut dyn FnMut(ast::Type)) { match ast::Type::cast(node) { Some(ty @ ast::Type::MacroType(_)) => { preorder.skip_subtree(); - cb(ty) + cb(ty); } Some(ty) => { - cb(ty); + if cb(ty) { + preorder.skip_subtree(); + } } // skip const args None if ast::ConstArg::can_cast(kind) => { diff --git a/crates/ide/src/inlay_hints/fn_lifetime_fn.rs b/crates/ide/src/inlay_hints/fn_lifetime_fn.rs index 1f5bcea63a..2aa5e3dc73 100644 --- a/crates/ide/src/inlay_hints/fn_lifetime_fn.rs +++ b/crates/ide/src/inlay_hints/fn_lifetime_fn.rs @@ -59,9 +59,14 @@ pub(super) fn hints( r.amp_token(), lifetime, is_elided, - )) + )); + false } - _ => (), + ast::Type::FnPtrType(_) => true, + ast::Type::PathType(t) => { + t.path().and_then(|it| it.segment()).and_then(|it| it.param_list()).is_some() + } + _ => false, }) }); acc @@ -146,8 +151,13 @@ pub(super) fn hints( is_trivial = false; acc.push(mk_lt_hint(amp, output_lt.to_string())); } + false } - _ => (), + ast::Type::FnPtrType(_) => true, + ast::Type::PathType(t) => { + t.path().and_then(|it| it.segment()).and_then(|it| it.param_list()).is_some() + } + _ => false, }) } } @@ -295,6 +305,20 @@ impl () { // ^^^<'0, '1> // ^'0 ^'1 ^'0 } +"#, + ); + } + + #[test] + fn hints_lifetimes_skip_fn_likes() { + check_with_config( + InlayHintsConfig { + lifetime_elision_hints: LifetimeElisionHints::Always, + ..TEST_CONFIG + }, + r#" +fn fn_ptr(a: fn(&()) -> &()) {} +fn fn_trait<>(a: impl Fn(&()) -> &()) {} "#, ); } From c4d8cf1dad9e2ad5710c56533c42979e91520c78 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 3 Jan 2023 12:32:38 +0100 Subject: [PATCH 0758/1945] Use ZWNJ to prevent VSCode from forming ligatures between hints and code --- editors/code/src/client.ts | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/editors/code/src/client.ts b/editors/code/src/client.ts index 1470c1f581..e6595340aa 100644 --- a/editors/code/src/client.ts +++ b/editors/code/src/client.ts @@ -100,6 +100,24 @@ export async function createClient( } }, }, + async provideInlayHints(document, viewPort, token, next) { + const inlays = await next(document, viewPort, token); + if (!inlays) { + return inlays; + } + // U+200C is a zero-width non-joiner to prevent the editor from forming a ligature + // between code and hints + for (const inlay of inlays) { + if (typeof inlay.label === "string") { + inlay.label = `\u{200c}${inlay.label}\u{200c}`; + } else if (Array.isArray(inlay.label)) { + for (const it of inlay.label) { + it.value = `\u{200c}${it.value}\u{200c}`; + } + } + } + return inlays; + }, async handleDiagnostics( uri: vscode.Uri, diagnostics: vscode.Diagnostic[], From b6bb1e9ae777d54da910885cc1437f7660d987a4 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 3 Jan 2023 15:46:08 +0100 Subject: [PATCH 0759/1945] Only set machine-applicable rustc diagnostics as preferred --- crates/rust-analyzer/src/diagnostics/to_proto.rs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/crates/rust-analyzer/src/diagnostics/to_proto.rs b/crates/rust-analyzer/src/diagnostics/to_proto.rs index d1ee99af3e..80be43fe80 100644 --- a/crates/rust-analyzer/src/diagnostics/to_proto.rs +++ b/crates/rust-analyzer/src/diagnostics/to_proto.rs @@ -191,6 +191,7 @@ fn map_rust_child_diagnostic( let mut edit_map: HashMap> = HashMap::new(); let mut suggested_replacements = Vec::new(); + let mut is_preferred = true; for &span in &spans { if let Some(suggested_replacement) = &span.suggested_replacement { if !suggested_replacement.is_empty() { @@ -209,6 +210,8 @@ fn map_rust_child_diagnostic( ) { edit_map.entry(location.uri).or_default().push(edit); } + is_preferred &= + matches!(span.suggestion_applicability, Some(Applicability::MachineApplicable)); } } @@ -251,7 +254,7 @@ fn map_rust_child_diagnostic( document_changes: None, change_annotations: None, }), - is_preferred: Some(true), + is_preferred: Some(is_preferred), data: None, command: None, }, From 21ea0048cd879342cbec67152dbacec6e38b56bb Mon Sep 17 00:00:00 2001 From: Ryo Yoshida Date: Wed, 4 Jan 2023 18:20:15 +0900 Subject: [PATCH 0760/1945] fix: unescape inline module names in module resolution --- crates/hir-def/src/nameres/mod_resolution.rs | 2 +- .../src/nameres/tests/mod_resolution.rs | 37 +++++++++++++++++++ 2 files changed, 38 insertions(+), 1 deletion(-) diff --git a/crates/hir-def/src/nameres/mod_resolution.rs b/crates/hir-def/src/nameres/mod_resolution.rs index 11d20d3db3..4c263846d2 100644 --- a/crates/hir-def/src/nameres/mod_resolution.rs +++ b/crates/hir-def/src/nameres/mod_resolution.rs @@ -34,7 +34,7 @@ impl ModDir { let path = match attr_path.map(SmolStr::as_str) { None => { let mut path = self.dir_path.clone(); - path.push(&name.to_smol_str()); + path.push(&name.unescaped().to_smol_str()); path } Some(attr_path) => { diff --git a/crates/hir-def/src/nameres/tests/mod_resolution.rs b/crates/hir-def/src/nameres/tests/mod_resolution.rs index c575bf7cac..a019312884 100644 --- a/crates/hir-def/src/nameres/tests/mod_resolution.rs +++ b/crates/hir-def/src/nameres/tests/mod_resolution.rs @@ -156,6 +156,43 @@ pub struct Baz; ); } +#[test] +fn module_resolution_works_for_inline_raw_modules() { + check( + r#" +//- /lib.rs +mod r#async { + pub mod a; + pub mod r#async; +} +use self::r#async::a::Foo; +use self::r#async::r#async::Bar; + +//- /async/a.rs +pub struct Foo; + +//- /async/async.rs +pub struct Bar; +"#, + expect![[r#" + crate + Bar: t v + Foo: t v + r#async: t + + crate::r#async + a: t + r#async: t + + crate::r#async::a + Foo: t v + + crate::r#async::r#async + Bar: t v + "#]], + ); +} + #[test] fn module_resolution_decl_path() { check( From ae73628f6bf0ee3ac6f571b447972f4f37a39103 Mon Sep 17 00:00:00 2001 From: bvanjoi Date: Wed, 4 Jan 2023 22:10:17 +0800 Subject: [PATCH 0761/1945] fix: keep whitespace in extract function handler --- .../src/handlers/extract_function.rs | 9 +++++---- crates/syntax/src/ast/make.rs | 17 ++++++++++++----- 2 files changed, 17 insertions(+), 9 deletions(-) diff --git a/crates/ide-assists/src/handlers/extract_function.rs b/crates/ide-assists/src/handlers/extract_function.rs index 4ee9d4638b..94614360c7 100644 --- a/crates/ide-assists/src/handlers/extract_function.rs +++ b/crates/ide-assists/src/handlers/extract_function.rs @@ -1799,7 +1799,8 @@ fn make_body( }) .collect::>(); let tail_expr = tail_expr.map(|expr| expr.dedent(old_indent).indent(body_indent)); - make::hacky_block_expr_with_comments(elements, tail_expr) + + make::hacky_block_expr(elements, tail_expr) } }; @@ -1881,7 +1882,7 @@ fn with_tail_expr(block: ast::BlockExpr, tail_expr: ast::Expr) -> ast::BlockExpr elements.push(syntax::NodeOrToken::Node(stmt_tail.syntax().clone())); } - make::hacky_block_expr_with_comments(elements, Some(tail_expr)) + make::hacky_block_expr(elements, Some(tail_expr)) } fn format_type(ty: &hir::Type, ctx: &AssistContext<'_>, module: hir::Module) -> String { @@ -4978,9 +4979,8 @@ fn $0fun_name() { ); } - // FIXME: we do want to preserve whitespace #[test] - fn extract_function_does_not_preserve_whitespace() { + fn extract_function_does_preserve_whitespace() { check_assist( extract_function, r#" @@ -4999,6 +4999,7 @@ fn func() { fn $0fun_name() { let a = 0; + let x = 0; } "#, diff --git a/crates/syntax/src/ast/make.rs b/crates/syntax/src/ast/make.rs index 8c26009add..f17b7d8557 100644 --- a/crates/syntax/src/ast/make.rs +++ b/crates/syntax/src/ast/make.rs @@ -339,10 +339,10 @@ pub fn tail_only_block_expr(tail_expr: ast::Expr) -> ast::BlockExpr { } /// Ideally this function wouldn't exist since it involves manual indenting. -/// It differs from `make::block_expr` by also supporting comments. +/// It differs from `make::block_expr` by also supporting comments and whitespace. /// /// FIXME: replace usages of this with the mutable syntax tree API -pub fn hacky_block_expr_with_comments( +pub fn hacky_block_expr( elements: impl IntoIterator, tail_expr: Option, ) -> ast::BlockExpr { @@ -350,10 +350,17 @@ pub fn hacky_block_expr_with_comments( for node_or_token in elements.into_iter() { match node_or_token { rowan::NodeOrToken::Node(n) => format_to!(buf, " {n}\n"), - rowan::NodeOrToken::Token(t) if t.kind() == SyntaxKind::COMMENT => { - format_to!(buf, " {t}\n") + rowan::NodeOrToken::Token(t) => { + let kind = t.kind(); + if kind == SyntaxKind::COMMENT { + format_to!(buf, " {t}\n") + } else if kind == SyntaxKind::WHITESPACE { + let content = t.text().trim_matches(|c| c != '\n'); + if content.len() >= 1 { + format_to!(buf, "{}", &content[1..]) + } + } } - _ => (), } } if let Some(tail_expr) = tail_expr { From 150da92b5c6e4a8dc3c27f24864de471c2b2c380 Mon Sep 17 00:00:00 2001 From: Brent Westbrook Date: Wed, 4 Jan 2023 12:55:05 -0500 Subject: [PATCH 0762/1945] return immediately from `render_record_lit` if `snippet_cap` is None this is the record literal version of #13805, which handled the same issue for tuple literals --- .../ide-completion/src/completions/record.rs | 31 ++++++++++++++++++- crates/ide-completion/src/render/variant.rs | 3 ++ 2 files changed, 33 insertions(+), 1 deletion(-) diff --git a/crates/ide-completion/src/completions/record.rs b/crates/ide-completion/src/completions/record.rs index 6743ec897f..0521e735de 100644 --- a/crates/ide-completion/src/completions/record.rs +++ b/crates/ide-completion/src/completions/record.rs @@ -159,8 +159,9 @@ fn baz() { #[test] fn enum_variant_no_snippets() { let conf = CompletionConfig { snippet_cap: SnippetCap::new(false), ..TEST_CONFIG }; + // tuple variant check_edit_with_config( - conf, + conf.clone(), "Variant()", r#" enum Enum { @@ -178,6 +179,34 @@ enum Enum { Variant(usize), } +impl Enum { + fn new(u: usize) -> Self { + Self::Variant + } +} +"#, + ); + + // record variant + check_edit_with_config( + conf, + "Variant{}", + r#" +enum Enum { + Variant{u: usize}, +} + +impl Enum { + fn new(u: usize) -> Self { + Self::Va$0 + } +} +"#, + r#" +enum Enum { + Variant{u: usize}, +} + impl Enum { fn new(u: usize) -> Self { Self::Variant diff --git a/crates/ide-completion/src/render/variant.rs b/crates/ide-completion/src/render/variant.rs index f3e88489f4..55c55725be 100644 --- a/crates/ide-completion/src/render/variant.rs +++ b/crates/ide-completion/src/render/variant.rs @@ -22,6 +22,9 @@ pub(crate) fn render_record_lit( fields: &[hir::Field], path: &str, ) -> RenderedLiteral { + if snippet_cap.is_none() { + return RenderedLiteral { literal: path.to_string(), detail: path.to_string() }; + } let completions = fields.iter().enumerate().format_with(", ", |(idx, field), f| { if snippet_cap.is_some() { f(&format_args!("{}: ${{{}:()}}", field.name(db), idx + 1)) From 1bfc732b78aa1664245009c1d40d47d9ac149e94 Mon Sep 17 00:00:00 2001 From: Ryo Yoshida Date: Thu, 5 Jan 2023 20:06:49 +0900 Subject: [PATCH 0763/1945] Store diverging flag for type variables as bitflags --- Cargo.lock | 1 + crates/hir-ty/Cargo.toml | 1 + crates/hir-ty/src/infer/unify.rs | 32 +++++++++++++++++--------------- 3 files changed, 19 insertions(+), 15 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 4c255b12b7..59cd66756c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -557,6 +557,7 @@ version = "0.0.0" dependencies = [ "arrayvec", "base-db", + "bitflags", "chalk-derive", "chalk-ir", "chalk-recursive", diff --git a/crates/hir-ty/Cargo.toml b/crates/hir-ty/Cargo.toml index c72199c37f..ae837ac6dc 100644 --- a/crates/hir-ty/Cargo.toml +++ b/crates/hir-ty/Cargo.toml @@ -13,6 +13,7 @@ doctest = false cov-mark = "2.0.0-pre.1" itertools = "0.10.5" arrayvec = "0.7.2" +bitflags = "1.3.2" smallvec = "1.10.0" ena = "0.14.0" tracing = "0.1.35" diff --git a/crates/hir-ty/src/infer/unify.rs b/crates/hir-ty/src/infer/unify.rs index 12f45f00f9..c4488d6d91 100644 --- a/crates/hir-ty/src/infer/unify.rs +++ b/crates/hir-ty/src/infer/unify.rs @@ -1,6 +1,6 @@ //! Unification and canonicalization logic. -use std::{fmt, mem, sync::Arc}; +use std::{fmt, iter, mem, sync::Arc}; use chalk_ir::{ cast::Cast, fold::TypeFoldable, interner::HasInterner, zip::Zip, CanonicalVarKind, FloatTy, @@ -128,9 +128,11 @@ pub(crate) fn unify( )) } -#[derive(Copy, Clone, Debug)] -pub(crate) struct TypeVariableData { - diverging: bool, +bitflags::bitflags! { + #[derive(Default)] + pub(crate) struct TypeVariableFlags: u8 { + const DIVERGING = 1 << 0; + } } type ChalkInferenceTable = chalk_solve::infer::InferenceTable; @@ -140,14 +142,14 @@ pub(crate) struct InferenceTable<'a> { pub(crate) db: &'a dyn HirDatabase, pub(crate) trait_env: Arc, var_unification_table: ChalkInferenceTable, - type_variable_table: Vec, + type_variable_table: Vec, pending_obligations: Vec>>, } pub(crate) struct InferenceTableSnapshot { var_table_snapshot: chalk_solve::infer::InferenceSnapshot, pending_obligations: Vec>>, - type_variable_table_snapshot: Vec, + type_variable_table_snapshot: Vec, } impl<'a> InferenceTable<'a> { @@ -169,19 +171,19 @@ impl<'a> InferenceTable<'a> { /// result. pub(super) fn propagate_diverging_flag(&mut self) { for i in 0..self.type_variable_table.len() { - if !self.type_variable_table[i].diverging { + if !self.type_variable_table[i].contains(TypeVariableFlags::DIVERGING) { continue; } let v = InferenceVar::from(i as u32); let root = self.var_unification_table.inference_var_root(v); if let Some(data) = self.type_variable_table.get_mut(root.index() as usize) { - data.diverging = true; + *data |= TypeVariableFlags::DIVERGING; } } } pub(super) fn set_diverging(&mut self, iv: InferenceVar, diverging: bool) { - self.type_variable_table[iv.index() as usize].diverging = diverging; + self.type_variable_table[iv.index() as usize].set(TypeVariableFlags::DIVERGING, diverging); } fn fallback_value(&self, iv: InferenceVar, kind: TyVariableKind) -> Ty { @@ -189,7 +191,7 @@ impl<'a> InferenceTable<'a> { _ if self .type_variable_table .get(iv.index() as usize) - .map_or(false, |data| data.diverging) => + .map_or(false, |data| data.contains(TypeVariableFlags::DIVERGING)) => { TyKind::Never } @@ -247,10 +249,8 @@ impl<'a> InferenceTable<'a> { } fn extend_type_variable_table(&mut self, to_index: usize) { - self.type_variable_table.extend( - (0..1 + to_index - self.type_variable_table.len()) - .map(|_| TypeVariableData { diverging: false }), - ); + let count = to_index - self.type_variable_table.len() + 1; + self.type_variable_table.extend(iter::repeat(TypeVariableFlags::default()).take(count)); } fn new_var(&mut self, kind: TyVariableKind, diverging: bool) -> Ty { @@ -258,7 +258,9 @@ impl<'a> InferenceTable<'a> { // Chalk might have created some type variables for its own purposes that we don't know about... self.extend_type_variable_table(var.index() as usize); assert_eq!(var.index() as usize, self.type_variable_table.len() - 1); - self.type_variable_table[var.index() as usize].diverging = diverging; + if diverging { + self.type_variable_table[var.index() as usize] |= TypeVariableFlags::DIVERGING; + } var.to_ty_with_kind(Interner, kind) } From b183612610c6383861f9c983a10588124800bac7 Mon Sep 17 00:00:00 2001 From: Ryo Yoshida Date: Thu, 5 Jan 2023 20:19:46 +0900 Subject: [PATCH 0764/1945] Add `INTEGER` and `FLOAT` flags for type variables --- crates/hir-ty/src/infer/unify.rs | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/crates/hir-ty/src/infer/unify.rs b/crates/hir-ty/src/infer/unify.rs index c4488d6d91..a1fef48f25 100644 --- a/crates/hir-ty/src/infer/unify.rs +++ b/crates/hir-ty/src/infer/unify.rs @@ -132,6 +132,8 @@ bitflags::bitflags! { #[derive(Default)] pub(crate) struct TypeVariableFlags: u8 { const DIVERGING = 1 << 0; + const INTEGER = 1 << 1; + const FLOAT = 1 << 2; } } @@ -258,8 +260,14 @@ impl<'a> InferenceTable<'a> { // Chalk might have created some type variables for its own purposes that we don't know about... self.extend_type_variable_table(var.index() as usize); assert_eq!(var.index() as usize, self.type_variable_table.len() - 1); + let flags = self.type_variable_table.get_mut(var.index() as usize).unwrap(); if diverging { - self.type_variable_table[var.index() as usize] |= TypeVariableFlags::DIVERGING; + *flags |= TypeVariableFlags::DIVERGING; + } + if matches!(kind, TyVariableKind::Integer) { + *flags |= TypeVariableFlags::INTEGER; + } else if matches!(kind, TyVariableKind::Float) { + *flags |= TypeVariableFlags::FLOAT; } var.to_ty_with_kind(Interner, kind) } From d01630c8f39e88aa175db9dbe92a9e2a188007ab Mon Sep 17 00:00:00 2001 From: Ryo Yoshida Date: Thu, 5 Jan 2023 21:31:10 +0900 Subject: [PATCH 0765/1945] Apply fallback to scalar type variables before final obligation resolution --- crates/hir-ty/src/infer.rs | 2 + crates/hir-ty/src/infer/unify.rs | 45 +++++++++++++++++++++ crates/hir-ty/src/tests/traits.rs | 65 +++++++++++++++++++++++++++++++ 3 files changed, 112 insertions(+) diff --git a/crates/hir-ty/src/infer.rs b/crates/hir-ty/src/infer.rs index 7b54886d53..6b59f1c20d 100644 --- a/crates/hir-ty/src/infer.rs +++ b/crates/hir-ty/src/infer.rs @@ -512,6 +512,8 @@ impl<'a> InferenceContext<'a> { fn resolve_all(self) -> InferenceResult { let InferenceContext { mut table, mut result, .. } = self; + table.fallback_if_possible(); + // FIXME resolve obligations as well (use Guidance if necessary) table.resolve_obligations_as_possible(); diff --git a/crates/hir-ty/src/infer/unify.rs b/crates/hir-ty/src/infer/unify.rs index a1fef48f25..e7ddd1591f 100644 --- a/crates/hir-ty/src/infer/unify.rs +++ b/crates/hir-ty/src/infer/unify.rs @@ -350,6 +350,51 @@ impl<'a> InferenceTable<'a> { self.resolve_with_fallback(t, &|_, _, d, _| d) } + /// Apply a fallback to unresolved scalar types. Integer type variables and float type + /// variables are replaced with i32 and f64, respectively. + /// + /// This method is only intended to be called just before returning inference results (i.e. in + /// `InferenceContext::resolve_all()`). + /// + /// FIXME: This method currently doesn't apply fallback to unconstrained general type variables + /// whereas rustc replaces them with `()` or `!`. + pub(super) fn fallback_if_possible(&mut self) { + let int_fallback = TyKind::Scalar(Scalar::Int(IntTy::I32)).intern(Interner); + let float_fallback = TyKind::Scalar(Scalar::Float(FloatTy::F64)).intern(Interner); + + let scalar_vars: Vec<_> = self + .type_variable_table + .iter() + .enumerate() + .filter_map(|(index, flags)| { + let kind = if flags.contains(TypeVariableFlags::INTEGER) { + TyVariableKind::Integer + } else if flags.contains(TypeVariableFlags::FLOAT) { + TyVariableKind::Float + } else { + return None; + }; + + // FIXME: This is not really the nicest way to get `InferenceVar`s. Can we get them + // without directly constructing them from `index`? + let var = InferenceVar::from(index as u32).to_ty(Interner, kind); + Some(var) + }) + .collect(); + + for var in scalar_vars { + let maybe_resolved = self.resolve_ty_shallow(&var); + if let TyKind::InferenceVar(_, kind) = maybe_resolved.kind(Interner) { + let fallback = match kind { + TyVariableKind::Integer => &int_fallback, + TyVariableKind::Float => &float_fallback, + TyVariableKind::General => unreachable!(), + }; + self.unify(&var, fallback); + } + } + } + /// Unify two relatable values (e.g. `Ty`) and register new trait goals that arise from that. pub(crate) fn unify>(&mut self, ty1: &T, ty2: &T) -> bool { let result = match self.try_unify(ty1, ty2) { diff --git a/crates/hir-ty/src/tests/traits.rs b/crates/hir-ty/src/tests/traits.rs index a9fd01ee01..d01fe06328 100644 --- a/crates/hir-ty/src/tests/traits.rs +++ b/crates/hir-ty/src/tests/traits.rs @@ -4100,3 +4100,68 @@ where "#, ); } + +#[test] +fn bin_op_with_scalar_fallback() { + // Extra impls are significant so that chalk doesn't give us definite guidances. + check_types( + r#" +//- minicore: add +use core::ops::Add; + +struct Vec2(T, T); + +impl Add for Vec2 { + type Output = Self; + fn add(self, rhs: Self) -> Self::Output { loop {} } +} +impl Add for Vec2 { + type Output = Self; + fn add(self, rhs: Self) -> Self::Output { loop {} } +} +impl Add for Vec2 { + type Output = Self; + fn add(self, rhs: Self) -> Self::Output { loop {} } +} +impl Add for Vec2 { + type Output = Self; + fn add(self, rhs: Self) -> Self::Output { loop {} } +} + +fn test() { + let a = Vec2(1, 2); + let b = Vec2(3, 4); + let c = a + b; + //^ Vec2 + let a = Vec2(1., 2.); + let b = Vec2(3., 4.); + let c = a + b; + //^ Vec2 +} +"#, + ); +} + +#[test] +fn trait_method_with_scalar_fallback() { + check_types( + r#" +trait Trait { + type Output; + fn foo(&self) -> Self::Output; +} +impl Trait for T { + type Output = T; + fn foo(&self) -> Self::Output { loop {} } +} +fn test() { + let a = 42; + let b = a.foo(); + //^ i32 + let a = 3.14; + let b = a.foo(); + //^ f64 +} +"#, + ); +} From 9f6567f20c90adc5451b0a3b87b29997131906d3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Fri, 6 Jan 2023 16:00:23 +0200 Subject: [PATCH 0766/1945] Fix option_env expansion --- crates/hir-expand/src/builtin_fn_macro.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/hir-expand/src/builtin_fn_macro.rs b/crates/hir-expand/src/builtin_fn_macro.rs index 985954d44e..eed71d88e2 100644 --- a/crates/hir-expand/src/builtin_fn_macro.rs +++ b/crates/hir-expand/src/builtin_fn_macro.rs @@ -671,7 +671,7 @@ fn option_env_expand( let expanded = match get_env_inner(db, arg_id, &key) { None => quote! { #DOLLAR_CRATE::option::Option::None::<&str> }, - Some(s) => quote! { #DOLLAR_CRATE::option::Some(#s) }, + Some(s) => quote! { #DOLLAR_CRATE::option::Option::Some(#s) }, }; ExpandResult::ok(ExpandedEager::new(expanded)) From 41d290d671c48dfae282abfe43b1435803d6d053 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 7 Jan 2023 19:20:08 +0000 Subject: [PATCH 0767/1945] Bump d3-color and d3-graphviz in /editors/code Bumps [d3-color](https://github.com/d3/d3-color) to 3.1.0 and updates ancestor dependency [d3-graphviz](https://github.com/magjac/d3-graphviz). These dependencies need to be updated together. Updates `d3-color` from 2.0.0 to 3.1.0 - [Release notes](https://github.com/d3/d3-color/releases) - [Commits](https://github.com/d3/d3-color/compare/v2.0.0...v3.1.0) Updates `d3-graphviz` from 4.1.1 to 5.0.2 - [Release notes](https://github.com/magjac/d3-graphviz/releases) - [Changelog](https://github.com/magjac/d3-graphviz/blob/master/CHANGELOG.md) - [Commits](https://github.com/magjac/d3-graphviz/compare/v4.1.1...v5.0.2) --- updated-dependencies: - dependency-name: d3-color dependency-type: indirect - dependency-name: d3-graphviz dependency-type: direct:production ... Signed-off-by: dependabot[bot] --- editors/code/package-lock.json | 339 ++++++++------------------------- editors/code/package.json | 2 +- 2 files changed, 79 insertions(+), 262 deletions(-) diff --git a/editors/code/package-lock.json b/editors/code/package-lock.json index 0b25564e28..ee69d22476 100644 --- a/editors/code/package-lock.json +++ b/editors/code/package-lock.json @@ -10,7 +10,7 @@ "license": "MIT OR Apache-2.0", "dependencies": { "d3": "^7.6.1", - "d3-graphviz": "^4.1.1", + "d3-graphviz": "^5.0.2", "vscode-languageclient": "^8.0.2" }, "devDependencies": { @@ -54,14 +54,14 @@ } }, "node_modules/@hpcc-js/wasm": { - "version": "1.12.8", - "resolved": "https://registry.npmjs.org/@hpcc-js/wasm/-/wasm-1.12.8.tgz", - "integrity": "sha512-n4q9ARKco2hpCLsuVaW6Az3cDVaua7B3DSONHkc49WtEzgY/btvcDG5Zr1P6PZDv0sQ7oPnAi9Y+W2DI++MgcQ==", + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/@hpcc-js/wasm/-/wasm-2.5.0.tgz", + "integrity": "sha512-G26BamgaHW46f6P8bmkygapgNcy+tTDMwIvCzmMzdp39sxUS1u4gaT/vR2SSDc4x3SfL5RE4B2B8ef/wd429Hg==", "dependencies": { - "yargs": "^17.3.1" + "yargs": "17.6.2" }, "bin": { - "dot-wasm": "bin/cli.js" + "dot-wasm": "bin/dot-wasm.js" } }, "node_modules/@humanwhocodes/config-array": { @@ -698,13 +698,16 @@ "dev": true }, "node_modules/cliui": { - "version": "7.0.4", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", - "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==", + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", + "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", "dependencies": { "string-width": "^4.2.0", - "strip-ansi": "^6.0.0", + "strip-ansi": "^6.0.1", "wrap-ansi": "^7.0.0" + }, + "engines": { + "node": ">=12" } }, "node_modules/color-convert": { @@ -876,14 +879,6 @@ "node": ">=12" } }, - "node_modules/d3-brush/node_modules/d3-selection": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/d3-selection/-/d3-selection-3.0.0.tgz", - "integrity": "sha512-fmTRWbNMmsmWq6xJV8D19U/gw/bwrHfNXxrIN+HfZgnzqTHp9jOmKMhsTUjXOJnZOdZY9Q28y4yebKzqDKlxlQ==", - "engines": { - "node": ">=12" - } - }, "node_modules/d3-chord": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/d3-chord/-/d3-chord-3.0.1.tgz", @@ -945,14 +940,6 @@ "node": ">=12" } }, - "node_modules/d3-drag/node_modules/d3-selection": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/d3-selection/-/d3-selection-3.0.0.tgz", - "integrity": "sha512-fmTRWbNMmsmWq6xJV8D19U/gw/bwrHfNXxrIN+HfZgnzqTHp9jOmKMhsTUjXOJnZOdZY9Q28y4yebKzqDKlxlQ==", - "engines": { - "node": ">=12" - } - }, "node_modules/d3-dsv": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/d3-dsv/-/d3-dsv-3.0.1.tgz", @@ -1029,95 +1016,24 @@ } }, "node_modules/d3-graphviz": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/d3-graphviz/-/d3-graphviz-4.1.1.tgz", - "integrity": "sha512-s0IVbKf8rs4eJI2xo5Umr7nXDX/LEZw/x2WtKxmlyQxR0qUY49UiLhBNOX7VDHZywMle43NKEXnU6fn22fpJvQ==", + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/d3-graphviz/-/d3-graphviz-5.0.2.tgz", + "integrity": "sha512-EVRow9rnFgm/L1trbbnu2PGOND11IcSEdWXbrDbz9hH0/Kj3YM2AqMkkTN/EAWgawD5/zryyCy+3Vm05oSJ1Kg==", "dependencies": { - "@hpcc-js/wasm": "1.12.8", - "d3-dispatch": "^2.0.0", - "d3-format": "^2.0.0", - "d3-interpolate": "^2.0.1", - "d3-path": "^2.0.0", - "d3-timer": "^2.0.0", - "d3-transition": "^2.0.0", - "d3-zoom": "^2.0.0" + "@hpcc-js/wasm": "2.5.0", + "d3-dispatch": "^3.0.1", + "d3-format": "^3.1.0", + "d3-interpolate": "^3.0.1", + "d3-path": "^3.1.0", + "d3-timer": "^3.0.1", + "d3-transition": "^3.0.1", + "d3-zoom": "^3.0.0" + }, + "engines": { + "node": ">=14" }, "peerDependencies": { - "d3-selection": "^2.0.0" - } - }, - "node_modules/d3-graphviz/node_modules/d3-color": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/d3-color/-/d3-color-2.0.0.tgz", - "integrity": "sha512-SPXi0TSKPD4g9tw0NMZFnR95XVgUZiBH+uUTqQuDu1OsE2zomHU7ho0FISciaPvosimixwHFl3WHLGabv6dDgQ==" - }, - "node_modules/d3-graphviz/node_modules/d3-dispatch": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/d3-dispatch/-/d3-dispatch-2.0.0.tgz", - "integrity": "sha512-S/m2VsXI7gAti2pBoLClFFTMOO1HTtT0j99AuXLoGFKO6deHDdnv6ZGTxSTTUTgO1zVcv82fCOtDjYK4EECmWA==" - }, - "node_modules/d3-graphviz/node_modules/d3-drag": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/d3-drag/-/d3-drag-2.0.0.tgz", - "integrity": "sha512-g9y9WbMnF5uqB9qKqwIIa/921RYWzlUDv9Jl1/yONQwxbOfszAWTCm8u7HOTgJgRDXiRZN56cHT9pd24dmXs8w==", - "dependencies": { - "d3-dispatch": "1 - 2", - "d3-selection": "2" - } - }, - "node_modules/d3-graphviz/node_modules/d3-ease": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/d3-ease/-/d3-ease-2.0.0.tgz", - "integrity": "sha512-68/n9JWarxXkOWMshcT5IcjbB+agblQUaIsbnXmrzejn2O82n3p2A9R2zEB9HIEFWKFwPAEDDN8gR0VdSAyyAQ==" - }, - "node_modules/d3-graphviz/node_modules/d3-format": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/d3-format/-/d3-format-2.0.0.tgz", - "integrity": "sha512-Ab3S6XuE/Q+flY96HXT0jOXcM4EAClYFnRGY5zsjRGNy6qCYrQsMffs7cV5Q9xejb35zxW5hf/guKw34kvIKsA==" - }, - "node_modules/d3-graphviz/node_modules/d3-interpolate": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/d3-interpolate/-/d3-interpolate-2.0.1.tgz", - "integrity": "sha512-c5UhwwTs/yybcmTpAVqwSFl6vrQ8JZJoT5F7xNFK9pymv5C0Ymcc9/LIJHtYIggg/yS9YHw8i8O8tgb9pupjeQ==", - "dependencies": { - "d3-color": "1 - 2" - } - }, - "node_modules/d3-graphviz/node_modules/d3-path": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/d3-path/-/d3-path-2.0.0.tgz", - "integrity": "sha512-ZwZQxKhBnv9yHaiWd6ZU4x5BtCQ7pXszEV9CU6kRgwIQVQGLMv1oiL4M+MK/n79sYzsj+gcgpPQSctJUsLN7fA==" - }, - "node_modules/d3-graphviz/node_modules/d3-timer": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/d3-timer/-/d3-timer-2.0.0.tgz", - "integrity": "sha512-TO4VLh0/420Y/9dO3+f9abDEFYeCUr2WZRlxJvbp4HPTQcSylXNiL6yZa9FIUvV1yRiFufl1bszTCLDqv9PWNA==" - }, - "node_modules/d3-graphviz/node_modules/d3-transition": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/d3-transition/-/d3-transition-2.0.0.tgz", - "integrity": "sha512-42ltAGgJesfQE3u9LuuBHNbGrI/AJjNL2OAUdclE70UE6Vy239GCBEYD38uBPoLeNsOhFStGpPI0BAOV+HMxog==", - "dependencies": { - "d3-color": "1 - 2", - "d3-dispatch": "1 - 2", - "d3-ease": "1 - 2", - "d3-interpolate": "1 - 2", - "d3-timer": "1 - 2" - }, - "peerDependencies": { - "d3-selection": "2" - } - }, - "node_modules/d3-graphviz/node_modules/d3-zoom": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/d3-zoom/-/d3-zoom-2.0.0.tgz", - "integrity": "sha512-fFg7aoaEm9/jf+qfstak0IYpnesZLiMX6GZvXtUSdv8RH2o4E2qeelgdU09eKS6wGuiGMfcnMI0nTIqWzRHGpw==", - "dependencies": { - "d3-dispatch": "1 - 2", - "d3-drag": "2", - "d3-interpolate": "1 - 2", - "d3-selection": "2", - "d3-transition": "2" + "d3-selection": "^3.0.0" } }, "node_modules/d3-hierarchy": { @@ -1140,9 +1056,9 @@ } }, "node_modules/d3-path": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/d3-path/-/d3-path-3.0.1.tgz", - "integrity": "sha512-gq6gZom9AFZby0YLduxT1qmrp4xpBA1YZr19OI717WIdKE2OM5ETq5qrHLb301IgxhLwcuxvGZVLeeWc/k1I6w==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-path/-/d3-path-3.1.0.tgz", + "integrity": "sha512-p3KP5HCf/bvjBSSKuXid6Zqijx7wIfNW+J/maPs+iwR35at5JCbLUT0LzF1cnjbCHWhqzQTIN2Jpe8pRebIEFQ==", "engines": { "node": ">=12" } @@ -1199,9 +1115,12 @@ } }, "node_modules/d3-selection": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/d3-selection/-/d3-selection-2.0.0.tgz", - "integrity": "sha512-XoGGqhLUN/W14NmaqcO/bb1nqjDAw5WtSYb2X8wiuQWvSZUsUVYsOSkOybUrNvcBjaywBdYPy03eXHMXjk9nZA==" + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-selection/-/d3-selection-3.0.0.tgz", + "integrity": "sha512-fmTRWbNMmsmWq6xJV8D19U/gw/bwrHfNXxrIN+HfZgnzqTHp9jOmKMhsTUjXOJnZOdZY9Q28y4yebKzqDKlxlQ==", + "engines": { + "node": ">=12" + } }, "node_modules/d3-shape": { "version": "3.1.0", @@ -1277,14 +1196,6 @@ "node": ">=12" } }, - "node_modules/d3/node_modules/d3-selection": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/d3-selection/-/d3-selection-3.0.0.tgz", - "integrity": "sha512-fmTRWbNMmsmWq6xJV8D19U/gw/bwrHfNXxrIN+HfZgnzqTHp9jOmKMhsTUjXOJnZOdZY9Q28y4yebKzqDKlxlQ==", - "engines": { - "node": ">=12" - } - }, "node_modules/debug": { "version": "4.3.4", "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", @@ -3907,26 +3818,26 @@ "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" }, "node_modules/yargs": { - "version": "17.5.1", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.5.1.tgz", - "integrity": "sha512-t6YAJcxDkNX7NFYiVtKvWUz8l+PaKTLiL63mJYWR2GnHq2gjEWISzsLp9wg3aY36dY1j+gfIEL3pIF+XlJJfbA==", + "version": "17.6.2", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.6.2.tgz", + "integrity": "sha512-1/9UrdHjDZc0eOU0HxOHoS78C69UD3JRMvzlJ7S79S2nTaWRA/whGCTV8o9e/N/1Va9YIV7Q4sOxD8VV4pCWOw==", "dependencies": { - "cliui": "^7.0.2", + "cliui": "^8.0.1", "escalade": "^3.1.1", "get-caller-file": "^2.0.5", "require-directory": "^2.1.1", "string-width": "^4.2.3", "y18n": "^5.0.5", - "yargs-parser": "^21.0.0" + "yargs-parser": "^21.1.1" }, "engines": { "node": ">=12" } }, "node_modules/yargs-parser": { - "version": "21.0.1", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.0.1.tgz", - "integrity": "sha512-9BK1jFpLzJROCI5TzwZL/TU4gqjK5xiHV/RfWLOahrjAko/e4DJkRDZQXfvqAsiZzzYhgAzbgz6lg48jcm4GLg==", + "version": "21.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", + "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", "engines": { "node": ">=12" } @@ -3970,11 +3881,11 @@ } }, "@hpcc-js/wasm": { - "version": "1.12.8", - "resolved": "https://registry.npmjs.org/@hpcc-js/wasm/-/wasm-1.12.8.tgz", - "integrity": "sha512-n4q9ARKco2hpCLsuVaW6Az3cDVaua7B3DSONHkc49WtEzgY/btvcDG5Zr1P6PZDv0sQ7oPnAi9Y+W2DI++MgcQ==", + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/@hpcc-js/wasm/-/wasm-2.5.0.tgz", + "integrity": "sha512-G26BamgaHW46f6P8bmkygapgNcy+tTDMwIvCzmMzdp39sxUS1u4gaT/vR2SSDc4x3SfL5RE4B2B8ef/wd429Hg==", "requires": { - "yargs": "^17.3.1" + "yargs": "17.6.2" } }, "@humanwhocodes/config-array": { @@ -4406,12 +4317,12 @@ "dev": true }, "cliui": { - "version": "7.0.4", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", - "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==", + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", + "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", "requires": { "string-width": "^4.2.0", - "strip-ansi": "^6.0.0", + "strip-ansi": "^6.0.1", "wrap-ansi": "^7.0.0" } }, @@ -4518,13 +4429,6 @@ "d3-timer": "3", "d3-transition": "3", "d3-zoom": "3" - }, - "dependencies": { - "d3-selection": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/d3-selection/-/d3-selection-3.0.0.tgz", - "integrity": "sha512-fmTRWbNMmsmWq6xJV8D19U/gw/bwrHfNXxrIN+HfZgnzqTHp9jOmKMhsTUjXOJnZOdZY9Q28y4yebKzqDKlxlQ==" - } } }, "d3-array": { @@ -4550,13 +4454,6 @@ "d3-interpolate": "1 - 3", "d3-selection": "3", "d3-transition": "3" - }, - "dependencies": { - "d3-selection": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/d3-selection/-/d3-selection-3.0.0.tgz", - "integrity": "sha512-fmTRWbNMmsmWq6xJV8D19U/gw/bwrHfNXxrIN+HfZgnzqTHp9jOmKMhsTUjXOJnZOdZY9Q28y4yebKzqDKlxlQ==" - } } }, "d3-chord": { @@ -4600,13 +4497,6 @@ "requires": { "d3-dispatch": "1 - 3", "d3-selection": "3" - }, - "dependencies": { - "d3-selection": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/d3-selection/-/d3-selection-3.0.0.tgz", - "integrity": "sha512-fmTRWbNMmsmWq6xJV8D19U/gw/bwrHfNXxrIN+HfZgnzqTHp9jOmKMhsTUjXOJnZOdZY9Q28y4yebKzqDKlxlQ==" - } } }, "d3-dsv": { @@ -4656,91 +4546,18 @@ } }, "d3-graphviz": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/d3-graphviz/-/d3-graphviz-4.1.1.tgz", - "integrity": "sha512-s0IVbKf8rs4eJI2xo5Umr7nXDX/LEZw/x2WtKxmlyQxR0qUY49UiLhBNOX7VDHZywMle43NKEXnU6fn22fpJvQ==", + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/d3-graphviz/-/d3-graphviz-5.0.2.tgz", + "integrity": "sha512-EVRow9rnFgm/L1trbbnu2PGOND11IcSEdWXbrDbz9hH0/Kj3YM2AqMkkTN/EAWgawD5/zryyCy+3Vm05oSJ1Kg==", "requires": { - "@hpcc-js/wasm": "1.12.8", - "d3-dispatch": "^2.0.0", - "d3-format": "^2.0.0", - "d3-interpolate": "^2.0.1", - "d3-path": "^2.0.0", - "d3-timer": "^2.0.0", - "d3-transition": "^2.0.0", - "d3-zoom": "^2.0.0" - }, - "dependencies": { - "d3-color": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/d3-color/-/d3-color-2.0.0.tgz", - "integrity": "sha512-SPXi0TSKPD4g9tw0NMZFnR95XVgUZiBH+uUTqQuDu1OsE2zomHU7ho0FISciaPvosimixwHFl3WHLGabv6dDgQ==" - }, - "d3-dispatch": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/d3-dispatch/-/d3-dispatch-2.0.0.tgz", - "integrity": "sha512-S/m2VsXI7gAti2pBoLClFFTMOO1HTtT0j99AuXLoGFKO6deHDdnv6ZGTxSTTUTgO1zVcv82fCOtDjYK4EECmWA==" - }, - "d3-drag": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/d3-drag/-/d3-drag-2.0.0.tgz", - "integrity": "sha512-g9y9WbMnF5uqB9qKqwIIa/921RYWzlUDv9Jl1/yONQwxbOfszAWTCm8u7HOTgJgRDXiRZN56cHT9pd24dmXs8w==", - "requires": { - "d3-dispatch": "1 - 2", - "d3-selection": "2" - } - }, - "d3-ease": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/d3-ease/-/d3-ease-2.0.0.tgz", - "integrity": "sha512-68/n9JWarxXkOWMshcT5IcjbB+agblQUaIsbnXmrzejn2O82n3p2A9R2zEB9HIEFWKFwPAEDDN8gR0VdSAyyAQ==" - }, - "d3-format": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/d3-format/-/d3-format-2.0.0.tgz", - "integrity": "sha512-Ab3S6XuE/Q+flY96HXT0jOXcM4EAClYFnRGY5zsjRGNy6qCYrQsMffs7cV5Q9xejb35zxW5hf/guKw34kvIKsA==" - }, - "d3-interpolate": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/d3-interpolate/-/d3-interpolate-2.0.1.tgz", - "integrity": "sha512-c5UhwwTs/yybcmTpAVqwSFl6vrQ8JZJoT5F7xNFK9pymv5C0Ymcc9/LIJHtYIggg/yS9YHw8i8O8tgb9pupjeQ==", - "requires": { - "d3-color": "1 - 2" - } - }, - "d3-path": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/d3-path/-/d3-path-2.0.0.tgz", - "integrity": "sha512-ZwZQxKhBnv9yHaiWd6ZU4x5BtCQ7pXszEV9CU6kRgwIQVQGLMv1oiL4M+MK/n79sYzsj+gcgpPQSctJUsLN7fA==" - }, - "d3-timer": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/d3-timer/-/d3-timer-2.0.0.tgz", - "integrity": "sha512-TO4VLh0/420Y/9dO3+f9abDEFYeCUr2WZRlxJvbp4HPTQcSylXNiL6yZa9FIUvV1yRiFufl1bszTCLDqv9PWNA==" - }, - "d3-transition": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/d3-transition/-/d3-transition-2.0.0.tgz", - "integrity": "sha512-42ltAGgJesfQE3u9LuuBHNbGrI/AJjNL2OAUdclE70UE6Vy239GCBEYD38uBPoLeNsOhFStGpPI0BAOV+HMxog==", - "requires": { - "d3-color": "1 - 2", - "d3-dispatch": "1 - 2", - "d3-ease": "1 - 2", - "d3-interpolate": "1 - 2", - "d3-timer": "1 - 2" - } - }, - "d3-zoom": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/d3-zoom/-/d3-zoom-2.0.0.tgz", - "integrity": "sha512-fFg7aoaEm9/jf+qfstak0IYpnesZLiMX6GZvXtUSdv8RH2o4E2qeelgdU09eKS6wGuiGMfcnMI0nTIqWzRHGpw==", - "requires": { - "d3-dispatch": "1 - 2", - "d3-drag": "2", - "d3-interpolate": "1 - 2", - "d3-selection": "2", - "d3-transition": "2" - } - } + "@hpcc-js/wasm": "2.5.0", + "d3-dispatch": "^3.0.1", + "d3-format": "^3.1.0", + "d3-interpolate": "^3.0.1", + "d3-path": "^3.1.0", + "d3-timer": "^3.0.1", + "d3-transition": "^3.0.1", + "d3-zoom": "^3.0.0" } }, "d3-hierarchy": { @@ -4757,9 +4574,9 @@ } }, "d3-path": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/d3-path/-/d3-path-3.0.1.tgz", - "integrity": "sha512-gq6gZom9AFZby0YLduxT1qmrp4xpBA1YZr19OI717WIdKE2OM5ETq5qrHLb301IgxhLwcuxvGZVLeeWc/k1I6w==" + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-path/-/d3-path-3.1.0.tgz", + "integrity": "sha512-p3KP5HCf/bvjBSSKuXid6Zqijx7wIfNW+J/maPs+iwR35at5JCbLUT0LzF1cnjbCHWhqzQTIN2Jpe8pRebIEFQ==" }, "d3-polygon": { "version": "3.0.1", @@ -4798,9 +4615,9 @@ } }, "d3-selection": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/d3-selection/-/d3-selection-2.0.0.tgz", - "integrity": "sha512-XoGGqhLUN/W14NmaqcO/bb1nqjDAw5WtSYb2X8wiuQWvSZUsUVYsOSkOybUrNvcBjaywBdYPy03eXHMXjk9nZA==" + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-selection/-/d3-selection-3.0.0.tgz", + "integrity": "sha512-fmTRWbNMmsmWq6xJV8D19U/gw/bwrHfNXxrIN+HfZgnzqTHp9jOmKMhsTUjXOJnZOdZY9Q28y4yebKzqDKlxlQ==" }, "d3-shape": { "version": "3.1.0", @@ -6720,23 +6537,23 @@ "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" }, "yargs": { - "version": "17.5.1", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.5.1.tgz", - "integrity": "sha512-t6YAJcxDkNX7NFYiVtKvWUz8l+PaKTLiL63mJYWR2GnHq2gjEWISzsLp9wg3aY36dY1j+gfIEL3pIF+XlJJfbA==", + "version": "17.6.2", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.6.2.tgz", + "integrity": "sha512-1/9UrdHjDZc0eOU0HxOHoS78C69UD3JRMvzlJ7S79S2nTaWRA/whGCTV8o9e/N/1Va9YIV7Q4sOxD8VV4pCWOw==", "requires": { - "cliui": "^7.0.2", + "cliui": "^8.0.1", "escalade": "^3.1.1", "get-caller-file": "^2.0.5", "require-directory": "^2.1.1", "string-width": "^4.2.3", "y18n": "^5.0.5", - "yargs-parser": "^21.0.0" + "yargs-parser": "^21.1.1" } }, "yargs-parser": { - "version": "21.0.1", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.0.1.tgz", - "integrity": "sha512-9BK1jFpLzJROCI5TzwZL/TU4gqjK5xiHV/RfWLOahrjAko/e4DJkRDZQXfvqAsiZzzYhgAzbgz6lg48jcm4GLg==" + "version": "21.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", + "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==" }, "yauzl": { "version": "2.10.0", diff --git a/editors/code/package.json b/editors/code/package.json index 89ff64fca7..454b95a63b 100644 --- a/editors/code/package.json +++ b/editors/code/package.json @@ -36,7 +36,7 @@ }, "dependencies": { "d3": "^7.6.1", - "d3-graphviz": "^4.1.1", + "d3-graphviz": "^5.0.2", "vscode-languageclient": "^8.0.2" }, "devDependencies": { From 0dd26821783ccc809fb0995f178fd6c53f6eebd7 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 9 Jan 2023 11:59:09 +0100 Subject: [PATCH 0768/1945] Refactor replace_arith assists into one module --- .../src/handlers/replace_arith_op.rs | 226 ++++++++++++++++++ .../handlers/replace_arith_with_checked.rs | 45 ---- .../handlers/replace_arith_with_saturating.rs | 48 ---- .../handlers/replace_arith_with_wrapping.rs | 48 ---- crates/ide-assists/src/lib.rs | 10 +- crates/ide-assists/src/utils.rs | 124 +--------- 6 files changed, 234 insertions(+), 267 deletions(-) create mode 100644 crates/ide-assists/src/handlers/replace_arith_op.rs delete mode 100644 crates/ide-assists/src/handlers/replace_arith_with_checked.rs delete mode 100644 crates/ide-assists/src/handlers/replace_arith_with_saturating.rs delete mode 100644 crates/ide-assists/src/handlers/replace_arith_with_wrapping.rs diff --git a/crates/ide-assists/src/handlers/replace_arith_op.rs b/crates/ide-assists/src/handlers/replace_arith_op.rs new file mode 100644 index 0000000000..f1ca35cafc --- /dev/null +++ b/crates/ide-assists/src/handlers/replace_arith_op.rs @@ -0,0 +1,226 @@ +use ide_db::assists::{AssistId, AssistKind, GroupLabel}; +use syntax::{ + ast::{self, ArithOp, BinaryOp}, + AstNode, TextRange, +}; + +use crate::assist_context::{AssistContext, Assists}; + +// Assist: replace_arith_with_checked +// +// Replaces arithmetic on integers with the `checked_*` equivalent. +// +// ``` +// fn main() { +// let x = 1 $0+ 2; +// } +// ``` +// -> +// ``` +// fn main() { +// let x = 1.checked_add(2); +// } +// ``` +pub(crate) fn replace_arith_with_checked(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { + replace_arith(acc, ctx, ArithKind::Checked) +} + +// Assist: replace_arith_with_saturating +// +// Replaces arithmetic on integers with the `saturating_*` equivalent. +// +// ``` +// fn main() { +// let x = 1 $0+ 2; +// } +// ``` +// -> +// ``` +// fn main() { +// let x = 1.saturating_add(2); +// } +// ``` +pub(crate) fn replace_arith_with_saturating( + acc: &mut Assists, + ctx: &AssistContext<'_>, +) -> Option<()> { + replace_arith(acc, ctx, ArithKind::Saturating) +} + +// Assist: replace_arith_with_wrapping +// +// Replaces arithmetic on integers with the `wrapping_*` equivalent. +// +// ``` +// fn main() { +// let x = 1 $0+ 2; +// } +// ``` +// -> +// ``` +// fn main() { +// let x = 1.wrapping_add(2); +// } +// ``` +pub(crate) fn replace_arith_with_wrapping( + acc: &mut Assists, + ctx: &AssistContext<'_>, +) -> Option<()> { + replace_arith(acc, ctx, ArithKind::Wrapping) +} + +fn replace_arith(acc: &mut Assists, ctx: &AssistContext<'_>, kind: ArithKind) -> Option<()> { + let (lhs, op, rhs) = parse_binary_op(ctx)?; + + if !is_primitive_int(ctx, &lhs) || !is_primitive_int(ctx, &rhs) { + return None; + } + + let start = lhs.syntax().text_range().start(); + let end = rhs.syntax().text_range().end(); + let range = TextRange::new(start, end); + + acc.add_group( + &GroupLabel("replace_arith".into()), + kind.assist_id(), + kind.label(), + range, + |builder| { + let method_name = kind.method_name(op); + + builder.replace(range, format!("{lhs}.{method_name}({rhs})")) + }, + ) +} + +fn is_primitive_int(ctx: &AssistContext<'_>, expr: &ast::Expr) -> bool { + match ctx.sema.type_of_expr(expr) { + Some(ty) => ty.adjusted().is_int_or_uint(), + _ => false, + } +} + +/// Extract the operands of an arithmetic expression (e.g. `1 + 2` or `1.checked_add(2)`) +fn parse_binary_op(ctx: &AssistContext<'_>) -> Option<(ast::Expr, ArithOp, ast::Expr)> { + let expr = ctx.find_node_at_offset::()?; + + let op = match expr.op_kind() { + Some(BinaryOp::ArithOp(ArithOp::Add)) => ArithOp::Add, + Some(BinaryOp::ArithOp(ArithOp::Sub)) => ArithOp::Sub, + Some(BinaryOp::ArithOp(ArithOp::Mul)) => ArithOp::Mul, + Some(BinaryOp::ArithOp(ArithOp::Div)) => ArithOp::Div, + _ => return None, + }; + + let lhs = expr.lhs()?; + let rhs = expr.rhs()?; + + Some((lhs, op, rhs)) +} + +pub(crate) enum ArithKind { + Saturating, + Wrapping, + Checked, +} + +impl ArithKind { + fn assist_id(&self) -> AssistId { + let s = match self { + ArithKind::Saturating => "replace_arith_with_saturating", + ArithKind::Checked => "replace_arith_with_checked", + ArithKind::Wrapping => "replace_arith_with_wrapping", + }; + + AssistId(s, AssistKind::RefactorRewrite) + } + + fn label(&self) -> &'static str { + match self { + ArithKind::Saturating => "Replace arithmetic with call to saturating_*", + ArithKind::Checked => "Replace arithmetic with call to checked_*", + ArithKind::Wrapping => "Replace arithmetic with call to wrapping_*", + } + } + + fn method_name(&self, op: ArithOp) -> String { + let prefix = match self { + ArithKind::Checked => "checked_", + ArithKind::Wrapping => "wrapping_", + ArithKind::Saturating => "saturating_", + }; + + let suffix = match op { + ArithOp::Add => "add", + ArithOp::Sub => "sub", + ArithOp::Mul => "mul", + ArithOp::Div => "div", + _ => unreachable!("this function should only be called with +, -, / or *"), + }; + format!("{prefix}{suffix}") + } +} + +#[cfg(test)] +mod tests { + use crate::tests::check_assist; + + use super::*; + + #[test] + fn arith_kind_method_name() { + assert_eq!(ArithKind::Saturating.method_name(ArithOp::Add), "saturating_add"); + assert_eq!(ArithKind::Checked.method_name(ArithOp::Sub), "checked_sub"); + } + + #[test] + fn replace_arith_with_checked_add() { + check_assist( + replace_arith_with_checked, + r#" +fn main() { + let x = 1 $0+ 2; +} +"#, + r#" +fn main() { + let x = 1.checked_add(2); +} +"#, + ) + } + + #[test] + fn replace_arith_with_saturating_add() { + check_assist( + replace_arith_with_saturating, + r#" +fn main() { + let x = 1 $0+ 2; +} +"#, + r#" +fn main() { + let x = 1.saturating_add(2); +} +"#, + ) + } + + #[test] + fn replace_arith_with_wrapping_add() { + check_assist( + replace_arith_with_wrapping, + r#" +fn main() { + let x = 1 $0+ 2; +} +"#, + r#" +fn main() { + let x = 1.wrapping_add(2); +} +"#, + ) + } +} diff --git a/crates/ide-assists/src/handlers/replace_arith_with_checked.rs b/crates/ide-assists/src/handlers/replace_arith_with_checked.rs deleted file mode 100644 index ff1fba5818..0000000000 --- a/crates/ide-assists/src/handlers/replace_arith_with_checked.rs +++ /dev/null @@ -1,45 +0,0 @@ -use crate::assist_context::{AssistContext, Assists}; -use crate::utils::{replace_arith, ArithKind}; - -// Assist: replace_arith_with_checked -// -// Replaces arithmetic on integers with the `checked_*` equivalent. -// -// ``` -// fn main() { -// let x = 1 $0+ 2; -// } -// ``` -// -> -// ``` -// fn main() { -// let x = 1.checked_add(2); -// } -// ``` -pub(crate) fn replace_arith_with_checked(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { - replace_arith(acc, ctx, ArithKind::Checked) -} - -#[cfg(test)] -mod tests { - use crate::tests::check_assist; - - use super::*; - - #[test] - fn replace_arith_with_saturating_add() { - check_assist( - replace_arith_with_checked, - r#" -fn main() { - let x = 1 $0+ 2; -} -"#, - r#" -fn main() { - let x = 1.checked_add(2); -} -"#, - ) - } -} diff --git a/crates/ide-assists/src/handlers/replace_arith_with_saturating.rs b/crates/ide-assists/src/handlers/replace_arith_with_saturating.rs deleted file mode 100644 index 717875e84f..0000000000 --- a/crates/ide-assists/src/handlers/replace_arith_with_saturating.rs +++ /dev/null @@ -1,48 +0,0 @@ -use crate::assist_context::{AssistContext, Assists}; -use crate::utils::{replace_arith, ArithKind}; - -// Assist: replace_arith_with_saturating -// -// Replaces arithmetic on integers with the `saturating_*` equivalent. -// -// ``` -// fn main() { -// let x = 1 $0+ 2; -// } -// ``` -// -> -// ``` -// fn main() { -// let x = 1.saturating_add(2); -// } -// ``` -pub(crate) fn replace_arith_with_saturating( - acc: &mut Assists, - ctx: &AssistContext<'_>, -) -> Option<()> { - replace_arith(acc, ctx, ArithKind::Saturating) -} - -#[cfg(test)] -mod tests { - use crate::tests::check_assist; - - use super::*; - - #[test] - fn replace_arith_with_saturating_add() { - check_assist( - replace_arith_with_saturating, - r#" -fn main() { - let x = 1 $0+ 2; -} -"#, - r#" -fn main() { - let x = 1.saturating_add(2); -} -"#, - ) - } -} diff --git a/crates/ide-assists/src/handlers/replace_arith_with_wrapping.rs b/crates/ide-assists/src/handlers/replace_arith_with_wrapping.rs deleted file mode 100644 index e0a90292b1..0000000000 --- a/crates/ide-assists/src/handlers/replace_arith_with_wrapping.rs +++ /dev/null @@ -1,48 +0,0 @@ -use crate::assist_context::{AssistContext, Assists}; -use crate::utils::{replace_arith, ArithKind}; - -// Assist: replace_arith_with_wrapping -// -// Replaces arithmetic on integers with the `wrapping_*` equivalent. -// -// ``` -// fn main() { -// let x = 1 $0+ 2; -// } -// ``` -// -> -// ``` -// fn main() { -// let x = 1.wrapping_add(2); -// } -// ``` -pub(crate) fn replace_arith_with_wrapping( - acc: &mut Assists, - ctx: &AssistContext<'_>, -) -> Option<()> { - replace_arith(acc, ctx, ArithKind::Wrapping) -} - -#[cfg(test)] -mod tests { - use crate::tests::check_assist; - - use super::*; - - #[test] - fn replace_arith_with_saturating_add() { - check_assist( - replace_arith_with_wrapping, - r#" -fn main() { - let x = 1 $0+ 2; -} -"#, - r#" -fn main() { - let x = 1.wrapping_add(2); -} -"#, - ) - } -} diff --git a/crates/ide-assists/src/lib.rs b/crates/ide-assists/src/lib.rs index 3938fa0983..0255ffdea3 100644 --- a/crates/ide-assists/src/lib.rs +++ b/crates/ide-assists/src/lib.rs @@ -183,9 +183,7 @@ mod handlers { mod replace_derive_with_manual_impl; mod replace_if_let_with_match; mod replace_or_with_or_else; - mod replace_arith_with_checked; - mod replace_arith_with_wrapping; - mod replace_arith_with_saturating; + mod replace_arith_op; mod introduce_named_generic; mod replace_let_with_if_let; mod replace_qualified_name_with_use; @@ -289,9 +287,9 @@ mod handlers { replace_or_with_or_else::replace_or_with_or_else, replace_turbofish_with_explicit_type::replace_turbofish_with_explicit_type, replace_qualified_name_with_use::replace_qualified_name_with_use, - replace_arith_with_wrapping::replace_arith_with_wrapping, - replace_arith_with_checked::replace_arith_with_checked, - replace_arith_with_saturating::replace_arith_with_saturating, + replace_arith_op::replace_arith_with_wrapping, + replace_arith_op::replace_arith_with_checked, + replace_arith_op::replace_arith_with_saturating, sort_items::sort_items, split_import::split_import, toggle_ignore::toggle_ignore, diff --git a/crates/ide-assists/src/utils.rs b/crates/ide-assists/src/utils.rs index 0227222c08..db32e7182c 100644 --- a/crates/ide-assists/src/utils.rs +++ b/crates/ide-assists/src/utils.rs @@ -4,27 +4,21 @@ use std::ops; pub(crate) use gen_trait_fn_body::gen_trait_fn_body; use hir::{db::HirDatabase, HirDisplay, Semantics}; -use ide_db::{ - assists::{AssistId, AssistKind}, - famous_defs::FamousDefs, - path_transform::PathTransform, - RootDatabase, SnippetCap, -}; +use ide_db::{famous_defs::FamousDefs, path_transform::PathTransform, RootDatabase, SnippetCap}; use stdx::format_to; use syntax::{ ast::{ self, edit::{self, AstNodeEdit}, edit_in_place::{AttrsOwnerEdit, Removable}, - make, ArithOp, BinExpr, BinaryOp, Expr, HasArgList, HasAttrs, HasGenericParams, HasName, - HasTypeBounds, Whitespace, + make, HasArgList, HasAttrs, HasGenericParams, HasName, HasTypeBounds, Whitespace, }, - ted, AstNode, AstToken, Direction, SmolStr, SourceFile, + ted, AstNode, AstToken, Direction, SourceFile, SyntaxKind::*, SyntaxNode, TextRange, TextSize, T, }; -use crate::assist_context::{AssistContext, Assists, SourceChangeBuilder}; +use crate::assist_context::{AssistContext, SourceChangeBuilder}; pub(crate) mod suggest_name; mod gen_trait_fn_body; @@ -711,113 +705,3 @@ pub(crate) fn convert_param_list_to_arg_list(list: ast::ParamList) -> ast::ArgLi } make::arg_list(args) } - -pub(crate) enum ArithKind { - Saturating, - Wrapping, - Checked, -} - -impl ArithKind { - fn assist_id(&self) -> AssistId { - let s = match self { - ArithKind::Saturating => "replace_arith_with_saturating", - ArithKind::Checked => "replace_arith_with_checked", - ArithKind::Wrapping => "replace_arith_with_wrapping", - }; - - AssistId(s, AssistKind::RefactorRewrite) - } - - fn label(&self) -> &'static str { - match self { - ArithKind::Saturating => "Replace arithmetic with call to saturating_*", - ArithKind::Checked => "Replace arithmetic with call to checked_*", - ArithKind::Wrapping => "Replace arithmetic with call to wrapping_*", - } - } - - fn method_name(&self, op: ArithOp) -> SmolStr { - // is this too much effort to avoid an allocation? is there a better way? - let mut bytes = [0u8; 14]; - let prefix = match self { - ArithKind::Checked => "checked_", - ArithKind::Wrapping => "wrapping_", - ArithKind::Saturating => "saturating_", - }; - - bytes[0..(prefix.len())].copy_from_slice(prefix.as_bytes()); - - let suffix = match op { - ArithOp::Add => "add", - ArithOp::Sub => "sub", - ArithOp::Mul => "mul", - ArithOp::Div => "div", - _ => unreachable!("this function should only be called with +, -, / or *"), - }; - - bytes[(prefix.len())..(prefix.len() + suffix.len())].copy_from_slice(suffix.as_bytes()); - - let len = prefix.len() + suffix.len(); - let s = core::str::from_utf8(&bytes[0..len]).unwrap(); - SmolStr::from(s) - } -} - -pub(crate) fn replace_arith( - acc: &mut Assists, - ctx: &AssistContext<'_>, - kind: ArithKind, -) -> Option<()> { - let (lhs, op, rhs) = parse_binary_op(ctx)?; - - if !is_primitive_int(ctx, &lhs) || !is_primitive_int(ctx, &rhs) { - return None; - } - - let start = lhs.syntax().text_range().start(); - let end = rhs.syntax().text_range().end(); - let range = TextRange::new(start, end); - - acc.add(kind.assist_id(), kind.label(), range, |builder| { - let method_name = kind.method_name(op); - - builder.replace(range, format!("{lhs}.{method_name}({rhs})")) - }) -} - -fn is_primitive_int(ctx: &AssistContext<'_>, expr: &Expr) -> bool { - match ctx.sema.type_of_expr(expr) { - Some(ty) => ty.adjusted().is_int_or_uint(), - _ => false, - } -} - -/// Extract the operands of an arithmetic expression (e.g. `1 + 2` or `1.checked_add(2)`) -fn parse_binary_op(ctx: &AssistContext<'_>) -> Option<(Expr, ArithOp, Expr)> { - let expr = ctx.find_node_at_offset::()?; - - let op = match expr.op_kind() { - Some(BinaryOp::ArithOp(ArithOp::Add)) => ArithOp::Add, - Some(BinaryOp::ArithOp(ArithOp::Sub)) => ArithOp::Sub, - Some(BinaryOp::ArithOp(ArithOp::Mul)) => ArithOp::Mul, - Some(BinaryOp::ArithOp(ArithOp::Div)) => ArithOp::Div, - _ => return None, - }; - - let lhs = expr.lhs()?; - let rhs = expr.rhs()?; - - Some((lhs, op, rhs)) -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn arith_kind_method_name() { - assert_eq!(ArithKind::Saturating.method_name(ArithOp::Add), "saturating_add"); - assert_eq!(ArithKind::Checked.method_name(ArithOp::Sub), "checked_sub"); - } -} From 9290399ec438c8ba6f3819bbd16212dfabbcb610 Mon Sep 17 00:00:00 2001 From: unvalley Date: Sat, 26 Nov 2022 23:00:03 +0900 Subject: [PATCH 0769/1945] fix: rename to extract_expressions_from_format_string --- ...extract_expressions_from_format_string.rs} | 21 ++++--- crates/ide-assists/src/lib.rs | 4 +- crates/ide-assists/src/tests/generated.rs | 62 +++++++++---------- 3 files changed, 45 insertions(+), 42 deletions(-) rename crates/ide-assists/src/handlers/{move_format_string_arg.rs => extract_expressions_from_format_string.rs} (93%) diff --git a/crates/ide-assists/src/handlers/move_format_string_arg.rs b/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs similarity index 93% rename from crates/ide-assists/src/handlers/move_format_string_arg.rs rename to crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs index 11db6ae7f7..b1bc2a9e6d 100644 --- a/crates/ide-assists/src/handlers/move_format_string_arg.rs +++ b/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs @@ -10,7 +10,7 @@ use itertools::Itertools; use stdx::format_to; use syntax::{ast, AstNode, AstToken, NodeOrToken, SyntaxKind::COMMA, TextRange}; -// Assist: move_format_string_arg +// Assist: extract_expressions_from_format_string // // Move an expression out of a format string. // @@ -40,7 +40,10 @@ use syntax::{ast, AstNode, AstToken, NodeOrToken, SyntaxKind::COMMA, TextRange}; // } // ``` -pub(crate) fn move_format_string_arg(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { +pub(crate) fn extract_expressions_from_format_string( + acc: &mut Assists, + ctx: &AssistContext<'_>, +) -> Option<()> { let fmt_string = ctx.find_token_at_offset::()?; let tt = fmt_string.syntax().parent().and_then(ast::TokenTree::cast)?; @@ -58,7 +61,7 @@ pub(crate) fn move_format_string_arg(acc: &mut Assists, ctx: &AssistContext<'_>) acc.add( AssistId( - "move_format_string_arg", + "extract_expressions_from_format_string", // if there aren't any expressions, then make the assist a RefactorExtract if extracted_args.iter().filter(|f| matches!(f, Arg::Expr(_))).count() == 0 { AssistKind::RefactorExtract @@ -171,7 +174,7 @@ macro_rules! print { #[test] fn multiple_middle_arg() { check_assist( - move_format_string_arg, + extract_expressions_from_format_string, &add_macro_decl( r#" fn main() { @@ -192,7 +195,7 @@ fn main() { #[test] fn single_arg() { check_assist( - move_format_string_arg, + extract_expressions_from_format_string, &add_macro_decl( r#" fn main() { @@ -213,7 +216,7 @@ fn main() { #[test] fn multiple_middle_placeholders_arg() { check_assist( - move_format_string_arg, + extract_expressions_from_format_string, &add_macro_decl( r#" fn main() { @@ -234,7 +237,7 @@ fn main() { #[test] fn multiple_trailing_args() { check_assist( - move_format_string_arg, + extract_expressions_from_format_string, &add_macro_decl( r#" fn main() { @@ -255,7 +258,7 @@ fn main() { #[test] fn improper_commas() { check_assist( - move_format_string_arg, + extract_expressions_from_format_string, &add_macro_decl( r#" fn main() { @@ -276,7 +279,7 @@ fn main() { #[test] fn nested_tt() { check_assist( - move_format_string_arg, + extract_expressions_from_format_string, &add_macro_decl( r#" fn main() { diff --git a/crates/ide-assists/src/lib.rs b/crates/ide-assists/src/lib.rs index 8b1247c640..0bf502fdbc 100644 --- a/crates/ide-assists/src/lib.rs +++ b/crates/ide-assists/src/lib.rs @@ -138,7 +138,7 @@ mod handlers { mod flip_binexpr; mod flip_comma; mod flip_trait_bound; - mod move_format_string_arg; + mod extract_expressions_from_format_string; mod generate_constant; mod generate_default_from_enum_variant; mod generate_default_from_new; @@ -230,6 +230,7 @@ mod handlers { convert_while_to_loop::convert_while_to_loop, destructure_tuple_binding::destructure_tuple_binding, expand_glob_import::expand_glob_import, + extract_expressions_from_format_string::extract_expressions_from_format_string, extract_struct_from_enum_variant::extract_struct_from_enum_variant, extract_type_alias::extract_type_alias, fix_visibility::fix_visibility, @@ -264,7 +265,6 @@ mod handlers { merge_match_arms::merge_match_arms, move_bounds::move_bounds_to_where_clause, move_const_to_impl::move_const_to_impl, - move_format_string_arg::move_format_string_arg, move_guard::move_arm_cond_to_match_guard, move_guard::move_guard_to_arm_body, move_module_to_file::move_module_to_file, diff --git a/crates/ide-assists/src/tests/generated.rs b/crates/ide-assists/src/tests/generated.rs index 80b8c27c7c..5e24de6207 100644 --- a/crates/ide-assists/src/tests/generated.rs +++ b/crates/ide-assists/src/tests/generated.rs @@ -624,6 +624,37 @@ fn qux(bar: Bar, baz: Baz) {} ) } +#[test] +fn doctest_extract_expressions_from_format_string() { + check_doc_test( + "extract_expressions_from_format_string", + r#####" +macro_rules! format_args { + ($lit:literal $(tt:tt)*) => { 0 }, +} +macro_rules! print { + ($($arg:tt)*) => (std::io::_print(format_args!($($arg)*))); +} + +fn main() { + print!("{x + 1}$0"); +} +"#####, + r#####" +macro_rules! format_args { + ($lit:literal $(tt:tt)*) => { 0 }, +} +macro_rules! print { + ($($arg:tt)*) => (std::io::_print(format_args!($($arg)*))); +} + +fn main() { + print!("{}"$0, x + 1); +} +"#####, + ) +} + #[test] fn doctest_extract_function() { check_doc_test( @@ -1703,37 +1734,6 @@ impl S { ) } -#[test] -fn doctest_move_format_string_arg() { - check_doc_test( - "move_format_string_arg", - r#####" -macro_rules! format_args { - ($lit:literal $(tt:tt)*) => { 0 }, -} -macro_rules! print { - ($($arg:tt)*) => (std::io::_print(format_args!($($arg)*))); -} - -fn main() { - print!("{x + 1}$0"); -} -"#####, - r#####" -macro_rules! format_args { - ($lit:literal $(tt:tt)*) => { 0 }, -} -macro_rules! print { - ($($arg:tt)*) => (std::io::_print(format_args!($($arg)*))); -} - -fn main() { - print!("{}"$0, x + 1); -} -"#####, - ) -} - #[test] fn doctest_move_from_mod_rs() { check_doc_test( From 796a1064122d7e85b509716d46beb6bb3d267811 Mon Sep 17 00:00:00 2001 From: unvalley Date: Sat, 26 Nov 2022 23:01:52 +0900 Subject: [PATCH 0770/1945] fix: ide assist handlers order --- crates/ide-assists/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/ide-assists/src/lib.rs b/crates/ide-assists/src/lib.rs index 0bf502fdbc..cdb9c70f71 100644 --- a/crates/ide-assists/src/lib.rs +++ b/crates/ide-assists/src/lib.rs @@ -128,6 +128,7 @@ mod handlers { mod convert_while_to_loop; mod destructure_tuple_binding; mod expand_glob_import; + mod extract_expressions_from_format_string; mod extract_function; mod extract_module; mod extract_struct_from_enum_variant; @@ -138,7 +139,6 @@ mod handlers { mod flip_binexpr; mod flip_comma; mod flip_trait_bound; - mod extract_expressions_from_format_string; mod generate_constant; mod generate_default_from_enum_variant; mod generate_default_from_new; From 13267adb12b6a62d69c5ca0cf872beba9413b6ec Mon Sep 17 00:00:00 2001 From: unvalley Date: Sun, 27 Nov 2022 14:44:32 +0900 Subject: [PATCH 0771/1945] fix: to leave Ident in parse_format_exprs --- .../src/syntax_helpers/format_string_exprs.rs | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/crates/ide-db/src/syntax_helpers/format_string_exprs.rs b/crates/ide-db/src/syntax_helpers/format_string_exprs.rs index f5f03d70b0..6ff580c167 100644 --- a/crates/ide-db/src/syntax_helpers/format_string_exprs.rs +++ b/crates/ide-db/src/syntax_helpers/format_string_exprs.rs @@ -140,8 +140,8 @@ pub fn parse_format_exprs(input: &str) -> Result<(String, Vec), ()> { output.push_str(trimmed); } else if matches!(state, State::Expr) { extracted_expressions.push(Arg::Expr(trimmed.into())); - } else { - extracted_expressions.push(Arg::Ident(trimmed.into())); + } else if matches!(state, State::Ident) { + output.push_str(trimmed); } output.push(chr); @@ -218,9 +218,9 @@ mod tests { let test_vector = &[ ("no expressions", expect![["no expressions"]]), (r"no expressions with \$0$1", expect![r"no expressions with \\\$0\$1"]), - ("{expr} is {2 + 2}", expect![["{} is {}; expr, 2 + 2"]]), - ("{expr:?}", expect![["{:?}; expr"]]), - ("{expr:1$}", expect![[r"{:1\$}; expr"]]), + ("{expr} is {2 + 2}", expect![["{expr} is {}; 2 + 2"]]), + ("{expr:?}", expect![["{expr:?}"]]), + ("{expr:1$}", expect![[r"{expr:1\$}"]]), ("{:1$}", expect![[r"{:1\$}; $1"]]), ("{:>padding$}", expect![[r"{:>padding\$}; $1"]]), ("{}, {}, {0}", expect![[r"{}, {}, {0}; $1, $2"]]), @@ -230,8 +230,8 @@ mod tests { ("malformed}", expect![["-"]]), ("{{correct", expect![["{{correct"]]), ("correct}}", expect![["correct}}"]]), - ("{correct}}}", expect![["{}}}; correct"]]), - ("{correct}}}}}", expect![["{}}}}}; correct"]]), + ("{correct}}}", expect![["{correct}}}"]]), + ("{correct}}}}}", expect![["{correct}}}}}"]]), ("{incorrect}}", expect![["-"]]), ("placeholders {} {}", expect![["placeholders {} {}; $1, $2"]]), ("mixed {} {2 + 2} {}", expect![["mixed {} {} {}; $1, 2 + 2, $2"]]), @@ -239,7 +239,7 @@ mod tests { "{SomeStruct { val_a: 0, val_b: 1 }}", expect![["{}; SomeStruct { val_a: 0, val_b: 1 }"]], ), - ("{expr:?} is {2.32f64:.5}", expect![["{:?} is {:.5}; expr, 2.32f64"]]), + ("{expr:?} is {2.32f64:.5}", expect![["{expr:?} is {:.5}; 2.32f64"]]), ( "{SomeStruct { val_a: 0, val_b: 1 }:?}", expect![["{:?}; SomeStruct { val_a: 0, val_b: 1 }"]], From 29f3d7dee75e20eeec573065dc01070544d3d6e3 Mon Sep 17 00:00:00 2001 From: unvalley Date: Sun, 27 Nov 2022 14:49:30 +0900 Subject: [PATCH 0772/1945] test: fix arg_type test --- crates/ide-db/src/syntax_helpers/format_string_exprs.rs | 2 -- 1 file changed, 2 deletions(-) diff --git a/crates/ide-db/src/syntax_helpers/format_string_exprs.rs b/crates/ide-db/src/syntax_helpers/format_string_exprs.rs index 6ff580c167..fcef71fb74 100644 --- a/crates/ide-db/src/syntax_helpers/format_string_exprs.rs +++ b/crates/ide-db/src/syntax_helpers/format_string_exprs.rs @@ -262,8 +262,6 @@ mod tests { .unwrap() .1, vec![ - Arg::Ident("_ident".to_owned()), - Arg::Ident("r#raw_ident".to_owned()), Arg::Expr("expr.obj".to_owned()), Arg::Expr("name {thing: 42}".to_owned()), Arg::Placeholder From 285f09cfa8dfb696b8bd20a1fac5db5fec1ae46d Mon Sep 17 00:00:00 2001 From: unvalley Date: Sun, 27 Nov 2022 15:36:26 +0900 Subject: [PATCH 0773/1945] feat: extract only expressions from format string --- .../extract_expressions_from_format_string.rs | 31 ++++++++++++++++--- 1 file changed, 27 insertions(+), 4 deletions(-) diff --git a/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs b/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs index b1bc2a9e6d..6b947fe364 100644 --- a/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs +++ b/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs @@ -121,15 +121,14 @@ pub(crate) fn extract_expressions_from_format_string( let mut placeholder_idx = 1; for extracted_args in extracted_args { - // remove expr from format string - args.push_str(", "); - match extracted_args { - Arg::Ident(s) | Arg::Expr(s) => { + Arg::Expr(s)=> { + args.push_str(", "); // insert arg args.push_str(&s); } Arg::Placeholder => { + args.push_str(", "); // try matching with existing argument match existing_args.next() { Some(ea) => { @@ -142,6 +141,7 @@ pub(crate) fn extract_expressions_from_format_string( } } } + Arg::Ident(_s) => (), } } @@ -292,6 +292,29 @@ fn main() { fn main() { print!("My name is {} {}"$0, stringify!(Paperino), x + x) } +"#, + ), + ); + } + + #[test] + fn extract_only_expressions() { + check_assist( + extract_expressions_from_format_string, + &add_macro_decl( + r#" +fn main() { + let var = 1 + 1; + print!("foobar {var} {var:?} {x$0 + x}") +} +"#, + ), + &add_macro_decl( + r#" +fn main() { + let var = 1 + 1; + print!("foobar {var} {var:?} {}"$0, x + x) +} "#, ), ); From 872df2f4131d1dc40a412d4b160cc031a2f30c03 Mon Sep 17 00:00:00 2001 From: unvalley Date: Sun, 27 Nov 2022 17:23:46 +0900 Subject: [PATCH 0774/1945] chore: update assist label name --- .../src/handlers/extract_expressions_from_format_string.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs b/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs index 6b947fe364..46d1d7b955 100644 --- a/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs +++ b/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs @@ -69,7 +69,7 @@ pub(crate) fn extract_expressions_from_format_string( AssistKind::QuickFix }, ), - "Extract format args", + "Extract format expressions", tt.syntax().text_range(), |edit| { let fmt_range = fmt_string.syntax().text_range(); From a310fc0cd53ff8528ef31b5021c490fd944ab199 Mon Sep 17 00:00:00 2001 From: unvalley Date: Sun, 27 Nov 2022 18:24:43 +0900 Subject: [PATCH 0775/1945] docs: update assist comment --- .../src/handlers/extract_expressions_from_format_string.rs | 4 ++-- crates/ide-assists/src/tests/generated.rs | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs b/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs index 46d1d7b955..4f3b6e0c28 100644 --- a/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs +++ b/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs @@ -23,7 +23,7 @@ use syntax::{ast, AstNode, AstToken, NodeOrToken, SyntaxKind::COMMA, TextRange}; // } // // fn main() { -// print!("{x + 1}$0"); +// print!("{var} {x + 1}$0"); // } // ``` // -> @@ -36,7 +36,7 @@ use syntax::{ast, AstNode, AstToken, NodeOrToken, SyntaxKind::COMMA, TextRange}; // } // // fn main() { -// print!("{}"$0, x + 1); +// print!("{var} {}"$0, x + 1); // } // ``` diff --git a/crates/ide-assists/src/tests/generated.rs b/crates/ide-assists/src/tests/generated.rs index 5e24de6207..f7ff173bb0 100644 --- a/crates/ide-assists/src/tests/generated.rs +++ b/crates/ide-assists/src/tests/generated.rs @@ -637,7 +637,7 @@ macro_rules! print { } fn main() { - print!("{x + 1}$0"); + print!("{var} {x + 1}$0"); } "#####, r#####" @@ -649,7 +649,7 @@ macro_rules! print { } fn main() { - print!("{}"$0, x + 1); + print!("{var} {}"$0, x + 1); } "#####, ) From 9eabc2cde8e76d718de81b6cd4d0091c7a8d9690 Mon Sep 17 00:00:00 2001 From: unvalley Date: Tue, 29 Nov 2022 01:39:27 +0900 Subject: [PATCH 0776/1945] fix: add_format_like_completions to handle no exprs --- .../ide-completion/src/completions/postfix.rs | 4 +-- .../src/completions/postfix/format_like.rs | 25 ++++++++++++++++--- 2 files changed, 23 insertions(+), 6 deletions(-) diff --git a/crates/ide-completion/src/completions/postfix.rs b/crates/ide-completion/src/completions/postfix.rs index 3db400604b..f4f37d77d8 100644 --- a/crates/ide-completion/src/completions/postfix.rs +++ b/crates/ide-completion/src/completions/postfix.rs @@ -595,12 +595,12 @@ fn main() { check_edit( "format", r#"fn main() { "{some_var:?}".$0 }"#, - r#"fn main() { format!("{:?}", some_var) }"#, + r#"fn main() { format!("{some_var:?}") }"#, ); check_edit( "panic", r#"fn main() { "Panic with {a}".$0 }"#, - r#"fn main() { panic!("Panic with {}", a) }"#, + r#"fn main() { panic!("Panic with {a}") }"#, ); check_edit( "println", diff --git a/crates/ide-completion/src/completions/postfix/format_like.rs b/crates/ide-completion/src/completions/postfix/format_like.rs index d64d6379a9..dfcc78e923 100644 --- a/crates/ide-completion/src/completions/postfix/format_like.rs +++ b/crates/ide-completion/src/completions/postfix/format_like.rs @@ -54,7 +54,11 @@ pub(crate) fn add_format_like_completions( if let Ok((out, exprs)) = parse_format_exprs(receiver_text.text()) { let exprs = with_placeholders(exprs); for (label, macro_name) in KINDS { - let snippet = format!(r#"{macro_name}({out}, {})"#, exprs.join(", ")); + let snippet = if exprs.is_empty() { + format!(r#"{}({})"#, macro_name, out) + } else { + format!(r#"{}({}, {})"#, macro_name, out, exprs.join(", ")) + }; postfix_snippet(label, macro_name, &snippet).add_to(acc); } @@ -72,10 +76,9 @@ mod tests { ("eprintln!", "{}", r#"eprintln!("{}", $1)"#), ( "log::info!", - "{} {expr} {} {2 + 2}", - r#"log::info!("{} {} {} {}", $1, expr, $2, 2 + 2)"#, + "{} {ident} {} {2 + 2}", + r#"log::info!("{} {ident} {} {}", $1, $2, 2 + 2)"#, ), - ("format!", "{expr:?}", r#"format!("{:?}", expr)"#), ]; for (kind, input, output) in test_vector { @@ -85,4 +88,18 @@ mod tests { assert_eq!(&snippet, output); } } + + #[test] + fn test_into_suggestion_no_epxrs() { + let test_vector = &[ + ("println!", "{ident}", r#"println!("{ident}")"#), + ("format!", "{ident:?}", r#"format!("{ident:?}")"#), + ]; + + for (kind, input, output) in test_vector { + let (parsed_string, _exprs) = parse_format_exprs(input).unwrap(); + let snippet = format!(r#"{}("{}")"#, kind, parsed_string); + assert_eq!(&snippet, output); + } + } } From 87d57f51bcf07eb364fcf835f9be987006158961 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 20 Dec 2022 11:31:07 +0100 Subject: [PATCH 0777/1945] Rename checkOnSave settings to flycheck --- crates/rust-analyzer/src/config.rs | 180 +++++++------- .../src/config/patch_old_style.rs | 17 +- docs/user/generated_config.adoc | 176 +++++++------- editors/code/package.json | 224 +++++++++--------- 4 files changed, 302 insertions(+), 295 deletions(-) diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index 4ee92b3d4e..25bfa0c28e 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs @@ -124,65 +124,8 @@ config_data! { /// Unsets `#[cfg(test)]` for the specified crates. cargo_unsetTest: Vec = "[\"core\"]", - /// Check all targets and tests (`--all-targets`). - checkOnSave_allTargets: bool = "true", - /// Cargo command to use for `cargo check`. - checkOnSave_command: String = "\"check\"", - /// Run specified `cargo check` command for diagnostics on save. - checkOnSave_enable: bool = "true", - /// Extra arguments for `cargo check`. - checkOnSave_extraArgs: Vec = "[]", - /// Extra environment variables that will be set when running `cargo check`. - /// Extends `#rust-analyzer.cargo.extraEnv#`. - checkOnSave_extraEnv: FxHashMap = "{}", - /// List of features to activate. Defaults to - /// `#rust-analyzer.cargo.features#`. - /// - /// Set to `"all"` to pass `--all-features` to Cargo. - checkOnSave_features: Option = "null", - /// Specifies the working directory for running checks. - /// - "workspace": run checks for workspaces in the corresponding workspaces' root directories. - // FIXME: Ideally we would support this in some way - /// This falls back to "root" if `#rust-analyzer.cargo.checkOnSave.invocationStrategy#` is set to `once`. - /// - "root": run checks in the project's root directory. - /// This config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#` - /// is set. - checkOnSave_invocationLocation: InvocationLocation = "\"workspace\"", - /// Specifies the invocation strategy to use when running the checkOnSave command. - /// If `per_workspace` is set, the command will be executed for each workspace. - /// If `once` is set, the command will be executed once. - /// This config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#` - /// is set. - checkOnSave_invocationStrategy: InvocationStrategy = "\"per_workspace\"", - /// Whether to pass `--no-default-features` to Cargo. Defaults to - /// `#rust-analyzer.cargo.noDefaultFeatures#`. - checkOnSave_noDefaultFeatures: Option = "null", - /// Override the command rust-analyzer uses instead of `cargo check` for - /// diagnostics on save. The command is required to output json and - /// should therefore include `--message-format=json` or a similar option. - /// - /// If you're changing this because you're using some tool wrapping - /// Cargo, you might also want to change - /// `#rust-analyzer.cargo.buildScripts.overrideCommand#`. - /// - /// If there are multiple linked projects, this command is invoked for - /// each of them, with the working directory being the project root - /// (i.e., the folder containing the `Cargo.toml`). - /// - /// An example command would be: - /// - /// ```bash - /// cargo check --workspace --message-format=json --all-targets - /// ``` - /// . - checkOnSave_overrideCommand: Option> = "null", - /// Check for specific targets. Defaults to `#rust-analyzer.cargo.target#` if empty. - /// - /// Can be a single target, e.g. `"x86_64-unknown-linux-gnu"` or a list of targets, e.g. - /// `["aarch64-apple-darwin", "x86_64-apple-darwin"]`. - /// - /// Aliased as `"checkOnSave.targets"`. - checkOnSave_target | checkOnSave_targets: Option = "null", + /// Run the flycheck command for diagnostics on save. + checkOnSave | checkOnSave_enable: bool = "true", /// Toggles the additional completions that automatically add imports when completed. /// Note that your client must specify the `additionalTextEdits` LSP client capability to truly have this feature enabled. @@ -268,6 +211,64 @@ config_data! { /// Controls file watching implementation. files_watcher: FilesWatcherDef = "\"client\"", + /// Check all targets and tests (`--all-targets`). + flycheck_allTargets | checkOnSave_allTargets: bool = "true", + /// Cargo command to use for `cargo check`. + flycheck_command | checkOnSave_command: String = "\"check\"", + /// Extra arguments for `cargo check`. + flycheck_extraArgs | checkOnSave_extraArgs: Vec = "[]", + /// Extra environment variables that will be set when running `cargo check`. + /// Extends `#rust-analyzer.cargo.extraEnv#`. + flycheck_extraEnv | checkOnSave_extraEnv: FxHashMap = "{}", + /// List of features to activate. Defaults to + /// `#rust-analyzer.cargo.features#`. + /// + /// Set to `"all"` to pass `--all-features` to Cargo. + flycheck_features | checkOnSave_features: Option = "null", + /// Specifies the working directory for running checks. + /// - "workspace": run checks for workspaces in the corresponding workspaces' root directories. + // FIXME: Ideally we would support this in some way + /// This falls back to "root" if `#rust-analyzer.cargo.checkOnSave.invocationStrategy#` is set to `once`. + /// - "root": run checks in the project's root directory. + /// This config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#` + /// is set. + flycheck_invocationLocation | checkOnSave_invocationLocation: InvocationLocation = "\"workspace\"", + /// Specifies the invocation strategy to use when running the checkOnSave command. + /// If `per_workspace` is set, the command will be executed for each workspace. + /// If `once` is set, the command will be executed once. + /// This config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#` + /// is set. + flycheck_invocationStrategy | checkOnSave_invocationStrategy: InvocationStrategy = "\"per_workspace\"", + /// Whether to pass `--no-default-features` to Cargo. Defaults to + /// `#rust-analyzer.cargo.noDefaultFeatures#`. + flycheck_noDefaultFeatures | checkOnSave_noDefaultFeatures: Option = "null", + /// Override the command rust-analyzer uses instead of `cargo check` for + /// diagnostics on save. The command is required to output json and + /// should therefore include `--message-format=json` or a similar option. + /// + /// If you're changing this because you're using some tool wrapping + /// Cargo, you might also want to change + /// `#rust-analyzer.cargo.buildScripts.overrideCommand#`. + /// + /// If there are multiple linked projects, this command is invoked for + /// each of them, with the working directory being the project root + /// (i.e., the folder containing the `Cargo.toml`). + /// + /// An example command would be: + /// + /// ```bash + /// cargo check --workspace --message-format=json --all-targets + /// ``` + /// . + flycheck_overrideCommand | checkOnSave_overrideCommand: Option> = "null", + /// Check for specific targets. Defaults to `#rust-analyzer.cargo.target#` if empty. + /// + /// Can be a single target, e.g. `"x86_64-unknown-linux-gnu"` or a list of targets, e.g. + /// `["aarch64-apple-darwin", "x86_64-apple-darwin"]`. + /// + /// Aliased as `"checkOnSave.targets"`. + flycheck_targets | checkOnSave_targets | checkOnSave_target: Option = "null", + /// Enables highlighting of related references while the cursor is on `break`, `loop`, `while`, or `for` keywords. highlightRelated_breakPoints_enable: bool = "true", /// Enables highlighting of all exit points while the cursor is on any `return`, `?`, `fn`, or return type arrow (`->`). @@ -786,9 +787,9 @@ impl Config { fn validate(&self, error_sink: &mut Vec<(String, serde_json::Error)>) { use serde::de::Error; - if self.data.checkOnSave_command.is_empty() { + if self.data.flycheck_command.is_empty() { error_sink.push(( - "/checkOnSave/command".to_string(), + "/flycheck/command".to_string(), serde_json::Error::custom("expected a non-empty string"), )); } @@ -1033,7 +1034,7 @@ impl Config { pub fn check_on_save_extra_env(&self) -> FxHashMap { let mut extra_env = self.data.cargo_extraEnv.clone(); - extra_env.extend(self.data.checkOnSave_extraEnv.clone()); + extra_env.extend(self.data.flycheck_extraEnv.clone()); extra_env } @@ -1145,7 +1146,7 @@ impl Config { } pub fn flycheck(&self) -> FlycheckConfig { - match &self.data.checkOnSave_overrideCommand { + match &self.data.flycheck_overrideCommand { Some(args) if !args.is_empty() => { let mut args = args.clone(); let command = args.remove(0); @@ -1153,13 +1154,13 @@ impl Config { command, args, extra_env: self.check_on_save_extra_env(), - invocation_strategy: match self.data.checkOnSave_invocationStrategy { + invocation_strategy: match self.data.flycheck_invocationStrategy { InvocationStrategy::Once => flycheck::InvocationStrategy::Once, InvocationStrategy::PerWorkspace => { flycheck::InvocationStrategy::PerWorkspace } }, - invocation_location: match self.data.checkOnSave_invocationLocation { + invocation_location: match self.data.flycheck_invocationLocation { InvocationLocation::Root => { flycheck::InvocationLocation::Root(self.root_path.clone()) } @@ -1168,42 +1169,42 @@ impl Config { } } Some(_) | None => FlycheckConfig::CargoCommand { - command: self.data.checkOnSave_command.clone(), + command: self.data.flycheck_command.clone(), target_triples: self .data - .checkOnSave_target + .flycheck_targets .clone() .and_then(|targets| match &targets.0[..] { [] => None, targets => Some(targets.into()), }) .unwrap_or_else(|| self.data.cargo_target.clone().into_iter().collect()), - all_targets: self.data.checkOnSave_allTargets, + all_targets: self.data.flycheck_allTargets, no_default_features: self .data - .checkOnSave_noDefaultFeatures + .flycheck_noDefaultFeatures .unwrap_or(self.data.cargo_noDefaultFeatures), all_features: matches!( - self.data.checkOnSave_features.as_ref().unwrap_or(&self.data.cargo_features), + self.data.flycheck_features.as_ref().unwrap_or(&self.data.cargo_features), CargoFeaturesDef::All ), features: match self .data - .checkOnSave_features + .flycheck_features .clone() .unwrap_or_else(|| self.data.cargo_features.clone()) { CargoFeaturesDef::All => vec![], CargoFeaturesDef::Selected(it) => it, }, - extra_args: self.data.checkOnSave_extraArgs.clone(), + extra_args: self.data.flycheck_extraArgs.clone(), extra_env: self.check_on_save_extra_env(), }, } } pub fn check_on_save(&self) -> bool { - self.data.checkOnSave_enable + self.data.checkOnSave } pub fn runnables(&self) -> RunnablesConfig { @@ -1862,25 +1863,27 @@ fn get_field( alias: Option<&'static str>, default: &str, ) -> T { - let default = serde_json::from_str(default).unwrap(); // XXX: check alias first, to work-around the VS Code where it pre-fills the // defaults instead of sending an empty object. alias .into_iter() .chain(iter::once(field)) - .find_map(move |field| { + .filter_map(move |field| { let mut pointer = field.replace('_', "/"); pointer.insert(0, '/'); - json.pointer_mut(&pointer).and_then(|it| match serde_json::from_value(it.take()) { - Ok(it) => Some(it), - Err(e) => { - tracing::warn!("Failed to deserialize config field at {}: {:?}", pointer, e); - error_sink.push((pointer, e)); - None - } - }) + json.pointer_mut(&pointer) + .map(|it| serde_json::from_value(it.take()).map_err(|e| (e, pointer))) }) - .unwrap_or(default) + .find(Result::is_ok) + .and_then(|res| match res { + Ok(it) => Some(it), + Err((e, pointer)) => { + tracing::warn!("Failed to deserialize config field at {}: {:?}", pointer, e); + error_sink.push((pointer, e)); + None + } + }) + .unwrap_or_else(|| serde_json::from_str(default).unwrap()) } fn schema(fields: &[(&'static str, &'static str, &[&str], &str)]) -> serde_json::Value { @@ -1964,15 +1967,6 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json "type": ["null", "array"], "items": { "type": "string" }, }, - "MergeBehaviorDef" => set! { - "type": "string", - "enum": ["none", "crate", "module"], - "enumDescriptions": [ - "Do not merge imports at all.", - "Merge imports from the same crate into a single `use` statement.", - "Merge imports from the same module into a single `use` statement." - ], - }, "ExprFillDefaultDef" => set! { "type": "string", "enum": ["todo", "default"], diff --git a/crates/rust-analyzer/src/config/patch_old_style.rs b/crates/rust-analyzer/src/config/patch_old_style.rs index 3b174a7193..38b70219cb 100644 --- a/crates/rust-analyzer/src/config/patch_old_style.rs +++ b/crates/rust-analyzer/src/config/patch_old_style.rs @@ -4,6 +4,9 @@ use serde_json::{json, Value}; /// This function patches the json config to the new expected keys. /// That is we try to load old known config keys here and convert them to the new ones. /// See https://github.com/rust-lang/rust-analyzer/pull/12010 +/// +/// We already have an alias system for simple cases, but if we make structural changes +/// the alias infra fails down. pub(super) fn patch_json_for_outdated_configs(json: &mut Value) { let copy = json.clone(); @@ -105,9 +108,9 @@ pub(super) fn patch_json_for_outdated_configs(json: &mut Value) { merge(json, json!({ "cargo": { "features": "all" } })); } - // checkOnSave_allFeatures, checkOnSave_features -> checkOnSave_features + // checkOnSave_allFeatures, checkOnSave_features -> flycheck_features if let Some(Value::Bool(true)) = copy.pointer("/checkOnSave/allFeatures") { - merge(json, json!({ "checkOnSave": { "features": "all" } })); + merge(json, json!({ "flycheck": { "features": "all" } })); } // completion_addCallArgumentSnippets completion_addCallParenthesis -> completion_callable_snippets @@ -121,6 +124,16 @@ pub(super) fn patch_json_for_outdated_configs(json: &mut Value) { (_, _) => return, }; merge(json, json!({ "completion": { "callable": {"snippets": res }} })); + + // We need to do this due to the checkOnSave_enable -> checkOnSave change, as that key now can either be an object or a bool + // checkOnSave_* -> flycheck_* + if let Some(Value::Object(obj)) = copy.pointer("/checkOnSave") { + // checkOnSave_enable -> checkOnSave + if let Some(b @ Value::Bool(_)) = obj.get("enable") { + merge(json, json!({ "checkOnSave": b })); + } + merge(json, json!({ "flycheck": obj })); + } } fn merge(dst: &mut Value, src: Value) { diff --git a/docs/user/generated_config.adoc b/docs/user/generated_config.adoc index 91f8e98449..e6da4f38ab 100644 --- a/docs/user/generated_config.adoc +++ b/docs/user/generated_config.adoc @@ -109,96 +109,10 @@ Compilation target override (target triple). -- Unsets `#[cfg(test)]` for the specified crates. -- -[[rust-analyzer.checkOnSave.allTargets]]rust-analyzer.checkOnSave.allTargets (default: `true`):: +[[rust-analyzer.checkOnSave]]rust-analyzer.checkOnSave (default: `true`):: + -- -Check all targets and tests (`--all-targets`). --- -[[rust-analyzer.checkOnSave.command]]rust-analyzer.checkOnSave.command (default: `"check"`):: -+ --- -Cargo command to use for `cargo check`. --- -[[rust-analyzer.checkOnSave.enable]]rust-analyzer.checkOnSave.enable (default: `true`):: -+ --- -Run specified `cargo check` command for diagnostics on save. --- -[[rust-analyzer.checkOnSave.extraArgs]]rust-analyzer.checkOnSave.extraArgs (default: `[]`):: -+ --- -Extra arguments for `cargo check`. --- -[[rust-analyzer.checkOnSave.extraEnv]]rust-analyzer.checkOnSave.extraEnv (default: `{}`):: -+ --- -Extra environment variables that will be set when running `cargo check`. -Extends `#rust-analyzer.cargo.extraEnv#`. --- -[[rust-analyzer.checkOnSave.features]]rust-analyzer.checkOnSave.features (default: `null`):: -+ --- -List of features to activate. Defaults to -`#rust-analyzer.cargo.features#`. - -Set to `"all"` to pass `--all-features` to Cargo. --- -[[rust-analyzer.checkOnSave.invocationLocation]]rust-analyzer.checkOnSave.invocationLocation (default: `"workspace"`):: -+ --- -Specifies the working directory for running checks. -- "workspace": run checks for workspaces in the corresponding workspaces' root directories. - This falls back to "root" if `#rust-analyzer.cargo.checkOnSave.invocationStrategy#` is set to `once`. -- "root": run checks in the project's root directory. -This config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#` -is set. --- -[[rust-analyzer.checkOnSave.invocationStrategy]]rust-analyzer.checkOnSave.invocationStrategy (default: `"per_workspace"`):: -+ --- -Specifies the invocation strategy to use when running the checkOnSave command. -If `per_workspace` is set, the command will be executed for each workspace. -If `once` is set, the command will be executed once. -This config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#` -is set. --- -[[rust-analyzer.checkOnSave.noDefaultFeatures]]rust-analyzer.checkOnSave.noDefaultFeatures (default: `null`):: -+ --- -Whether to pass `--no-default-features` to Cargo. Defaults to -`#rust-analyzer.cargo.noDefaultFeatures#`. --- -[[rust-analyzer.checkOnSave.overrideCommand]]rust-analyzer.checkOnSave.overrideCommand (default: `null`):: -+ --- -Override the command rust-analyzer uses instead of `cargo check` for -diagnostics on save. The command is required to output json and -should therefore include `--message-format=json` or a similar option. - -If you're changing this because you're using some tool wrapping -Cargo, you might also want to change -`#rust-analyzer.cargo.buildScripts.overrideCommand#`. - -If there are multiple linked projects, this command is invoked for -each of them, with the working directory being the project root -(i.e., the folder containing the `Cargo.toml`). - -An example command would be: - -```bash -cargo check --workspace --message-format=json --all-targets -``` -. --- -[[rust-analyzer.checkOnSave.target]]rust-analyzer.checkOnSave.target (default: `null`):: -+ --- -Check for specific targets. Defaults to `#rust-analyzer.cargo.target#` if empty. - -Can be a single target, e.g. `"x86_64-unknown-linux-gnu"` or a list of targets, e.g. -`["aarch64-apple-darwin", "x86_64-apple-darwin"]`. - -Aliased as `"checkOnSave.targets"`. +Run the flycheck command for diagnostics on save. -- [[rust-analyzer.completion.autoimport.enable]]rust-analyzer.completion.autoimport.enable (default: `true`):: + @@ -327,6 +241,92 @@ also need to add the folders to Code's `files.watcherExclude`. -- Controls file watching implementation. -- +[[rust-analyzer.flycheck.allTargets]]rust-analyzer.flycheck.allTargets (default: `true`):: ++ +-- +Check all targets and tests (`--all-targets`). +-- +[[rust-analyzer.flycheck.command]]rust-analyzer.flycheck.command (default: `"check"`):: ++ +-- +Cargo command to use for `cargo check`. +-- +[[rust-analyzer.flycheck.extraArgs]]rust-analyzer.flycheck.extraArgs (default: `[]`):: ++ +-- +Extra arguments for `cargo check`. +-- +[[rust-analyzer.flycheck.extraEnv]]rust-analyzer.flycheck.extraEnv (default: `{}`):: ++ +-- +Extra environment variables that will be set when running `cargo check`. +Extends `#rust-analyzer.cargo.extraEnv#`. +-- +[[rust-analyzer.flycheck.features]]rust-analyzer.flycheck.features (default: `null`):: ++ +-- +List of features to activate. Defaults to +`#rust-analyzer.cargo.features#`. + +Set to `"all"` to pass `--all-features` to Cargo. +-- +[[rust-analyzer.flycheck.invocationLocation]]rust-analyzer.flycheck.invocationLocation (default: `"workspace"`):: ++ +-- +Specifies the working directory for running checks. +- "workspace": run checks for workspaces in the corresponding workspaces' root directories. + This falls back to "root" if `#rust-analyzer.cargo.checkOnSave.invocationStrategy#` is set to `once`. +- "root": run checks in the project's root directory. +This config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#` +is set. +-- +[[rust-analyzer.flycheck.invocationStrategy]]rust-analyzer.flycheck.invocationStrategy (default: `"per_workspace"`):: ++ +-- +Specifies the invocation strategy to use when running the checkOnSave command. +If `per_workspace` is set, the command will be executed for each workspace. +If `once` is set, the command will be executed once. +This config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#` +is set. +-- +[[rust-analyzer.flycheck.noDefaultFeatures]]rust-analyzer.flycheck.noDefaultFeatures (default: `null`):: ++ +-- +Whether to pass `--no-default-features` to Cargo. Defaults to +`#rust-analyzer.cargo.noDefaultFeatures#`. +-- +[[rust-analyzer.flycheck.overrideCommand]]rust-analyzer.flycheck.overrideCommand (default: `null`):: ++ +-- +Override the command rust-analyzer uses instead of `cargo check` for +diagnostics on save. The command is required to output json and +should therefore include `--message-format=json` or a similar option. + +If you're changing this because you're using some tool wrapping +Cargo, you might also want to change +`#rust-analyzer.cargo.buildScripts.overrideCommand#`. + +If there are multiple linked projects, this command is invoked for +each of them, with the working directory being the project root +(i.e., the folder containing the `Cargo.toml`). + +An example command would be: + +```bash +cargo check --workspace --message-format=json --all-targets +``` +. +-- +[[rust-analyzer.flycheck.targets]]rust-analyzer.flycheck.targets (default: `null`):: ++ +-- +Check for specific targets. Defaults to `#rust-analyzer.cargo.target#` if empty. + +Can be a single target, e.g. `"x86_64-unknown-linux-gnu"` or a list of targets, e.g. +`["aarch64-apple-darwin", "x86_64-apple-darwin"]`. + +Aliased as `"checkOnSave.targets"`. +-- [[rust-analyzer.highlightRelated.breakPoints.enable]]rust-analyzer.highlightRelated.breakPoints.enable (default: `true`):: + -- diff --git a/editors/code/package.json b/editors/code/package.json index 454b95a63b..4fe829382d 100644 --- a/editors/code/package.json +++ b/editors/code/package.json @@ -556,121 +556,11 @@ "type": "string" } }, - "rust-analyzer.checkOnSave.allTargets": { - "markdownDescription": "Check all targets and tests (`--all-targets`).", + "rust-analyzer.checkOnSave": { + "markdownDescription": "Run the flycheck command for diagnostics on save.", "default": true, "type": "boolean" }, - "rust-analyzer.checkOnSave.command": { - "markdownDescription": "Cargo command to use for `cargo check`.", - "default": "check", - "type": "string" - }, - "rust-analyzer.checkOnSave.enable": { - "markdownDescription": "Run specified `cargo check` command for diagnostics on save.", - "default": true, - "type": "boolean" - }, - "rust-analyzer.checkOnSave.extraArgs": { - "markdownDescription": "Extra arguments for `cargo check`.", - "default": [], - "type": "array", - "items": { - "type": "string" - } - }, - "rust-analyzer.checkOnSave.extraEnv": { - "markdownDescription": "Extra environment variables that will be set when running `cargo check`.\nExtends `#rust-analyzer.cargo.extraEnv#`.", - "default": {}, - "type": "object" - }, - "rust-analyzer.checkOnSave.features": { - "markdownDescription": "List of features to activate. Defaults to\n`#rust-analyzer.cargo.features#`.\n\nSet to `\"all\"` to pass `--all-features` to Cargo.", - "default": null, - "anyOf": [ - { - "type": "string", - "enum": [ - "all" - ], - "enumDescriptions": [ - "Pass `--all-features` to cargo" - ] - }, - { - "type": "array", - "items": { - "type": "string" - } - }, - { - "type": "null" - } - ] - }, - "rust-analyzer.checkOnSave.invocationLocation": { - "markdownDescription": "Specifies the working directory for running checks.\n- \"workspace\": run checks for workspaces in the corresponding workspaces' root directories.\n This falls back to \"root\" if `#rust-analyzer.cargo.checkOnSave.invocationStrategy#` is set to `once`.\n- \"root\": run checks in the project's root directory.\nThis config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#`\nis set.", - "default": "workspace", - "type": "string", - "enum": [ - "workspace", - "root" - ], - "enumDescriptions": [ - "The command will be executed in the corresponding workspace root.", - "The command will be executed in the project root." - ] - }, - "rust-analyzer.checkOnSave.invocationStrategy": { - "markdownDescription": "Specifies the invocation strategy to use when running the checkOnSave command.\nIf `per_workspace` is set, the command will be executed for each workspace.\nIf `once` is set, the command will be executed once.\nThis config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#`\nis set.", - "default": "per_workspace", - "type": "string", - "enum": [ - "per_workspace", - "once" - ], - "enumDescriptions": [ - "The command will be executed for each workspace.", - "The command will be executed once." - ] - }, - "rust-analyzer.checkOnSave.noDefaultFeatures": { - "markdownDescription": "Whether to pass `--no-default-features` to Cargo. Defaults to\n`#rust-analyzer.cargo.noDefaultFeatures#`.", - "default": null, - "type": [ - "null", - "boolean" - ] - }, - "rust-analyzer.checkOnSave.overrideCommand": { - "markdownDescription": "Override the command rust-analyzer uses instead of `cargo check` for\ndiagnostics on save. The command is required to output json and\nshould therefore include `--message-format=json` or a similar option.\n\nIf you're changing this because you're using some tool wrapping\nCargo, you might also want to change\n`#rust-analyzer.cargo.buildScripts.overrideCommand#`.\n\nIf there are multiple linked projects, this command is invoked for\neach of them, with the working directory being the project root\n(i.e., the folder containing the `Cargo.toml`).\n\nAn example command would be:\n\n```bash\ncargo check --workspace --message-format=json --all-targets\n```\n.", - "default": null, - "type": [ - "null", - "array" - ], - "items": { - "type": "string" - } - }, - "rust-analyzer.checkOnSave.target": { - "markdownDescription": "Check for specific targets. Defaults to `#rust-analyzer.cargo.target#` if empty.\n\nCan be a single target, e.g. `\"x86_64-unknown-linux-gnu\"` or a list of targets, e.g.\n`[\"aarch64-apple-darwin\", \"x86_64-apple-darwin\"]`.\n\nAliased as `\"checkOnSave.targets\"`.", - "default": null, - "anyOf": [ - { - "type": "null" - }, - { - "type": "string" - }, - { - "type": "array", - "items": { - "type": "string" - } - } - ] - }, "rust-analyzer.completion.autoimport.enable": { "markdownDescription": "Toggles the additional completions that automatically add imports when completed.\nNote that your client must specify the `additionalTextEdits` LSP client capability to truly have this feature enabled.", "default": true, @@ -812,6 +702,116 @@ "Use server-side file watching" ] }, + "rust-analyzer.flycheck.allTargets": { + "markdownDescription": "Check all targets and tests (`--all-targets`).", + "default": true, + "type": "boolean" + }, + "rust-analyzer.flycheck.command": { + "markdownDescription": "Cargo command to use for `cargo check`.", + "default": "check", + "type": "string" + }, + "rust-analyzer.flycheck.extraArgs": { + "markdownDescription": "Extra arguments for `cargo check`.", + "default": [], + "type": "array", + "items": { + "type": "string" + } + }, + "rust-analyzer.flycheck.extraEnv": { + "markdownDescription": "Extra environment variables that will be set when running `cargo check`.\nExtends `#rust-analyzer.cargo.extraEnv#`.", + "default": {}, + "type": "object" + }, + "rust-analyzer.flycheck.features": { + "markdownDescription": "List of features to activate. Defaults to\n`#rust-analyzer.cargo.features#`.\n\nSet to `\"all\"` to pass `--all-features` to Cargo.", + "default": null, + "anyOf": [ + { + "type": "string", + "enum": [ + "all" + ], + "enumDescriptions": [ + "Pass `--all-features` to cargo" + ] + }, + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "null" + } + ] + }, + "rust-analyzer.flycheck.invocationLocation": { + "markdownDescription": "Specifies the working directory for running checks.\n- \"workspace\": run checks for workspaces in the corresponding workspaces' root directories.\n This falls back to \"root\" if `#rust-analyzer.cargo.checkOnSave.invocationStrategy#` is set to `once`.\n- \"root\": run checks in the project's root directory.\nThis config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#`\nis set.", + "default": "workspace", + "type": "string", + "enum": [ + "workspace", + "root" + ], + "enumDescriptions": [ + "The command will be executed in the corresponding workspace root.", + "The command will be executed in the project root." + ] + }, + "rust-analyzer.flycheck.invocationStrategy": { + "markdownDescription": "Specifies the invocation strategy to use when running the checkOnSave command.\nIf `per_workspace` is set, the command will be executed for each workspace.\nIf `once` is set, the command will be executed once.\nThis config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#`\nis set.", + "default": "per_workspace", + "type": "string", + "enum": [ + "per_workspace", + "once" + ], + "enumDescriptions": [ + "The command will be executed for each workspace.", + "The command will be executed once." + ] + }, + "rust-analyzer.flycheck.noDefaultFeatures": { + "markdownDescription": "Whether to pass `--no-default-features` to Cargo. Defaults to\n`#rust-analyzer.cargo.noDefaultFeatures#`.", + "default": null, + "type": [ + "null", + "boolean" + ] + }, + "rust-analyzer.flycheck.overrideCommand": { + "markdownDescription": "Override the command rust-analyzer uses instead of `cargo check` for\ndiagnostics on save. The command is required to output json and\nshould therefore include `--message-format=json` or a similar option.\n\nIf you're changing this because you're using some tool wrapping\nCargo, you might also want to change\n`#rust-analyzer.cargo.buildScripts.overrideCommand#`.\n\nIf there are multiple linked projects, this command is invoked for\neach of them, with the working directory being the project root\n(i.e., the folder containing the `Cargo.toml`).\n\nAn example command would be:\n\n```bash\ncargo check --workspace --message-format=json --all-targets\n```\n.", + "default": null, + "type": [ + "null", + "array" + ], + "items": { + "type": "string" + } + }, + "rust-analyzer.flycheck.targets": { + "markdownDescription": "Check for specific targets. Defaults to `#rust-analyzer.cargo.target#` if empty.\n\nCan be a single target, e.g. `\"x86_64-unknown-linux-gnu\"` or a list of targets, e.g.\n`[\"aarch64-apple-darwin\", \"x86_64-apple-darwin\"]`.\n\nAliased as `\"checkOnSave.targets\"`.", + "default": null, + "anyOf": [ + { + "type": "null" + }, + { + "type": "string" + }, + { + "type": "array", + "items": { + "type": "string" + } + } + ] + }, "rust-analyzer.highlightRelated.breakPoints.enable": { "markdownDescription": "Enables highlighting of related references while the cursor is on `break`, `loop`, `while`, or `for` keywords.", "default": true, From d2bb62b6a81d26f1e41712e04d4ac760f860d3b3 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 9 Jan 2023 14:15:13 +0100 Subject: [PATCH 0778/1945] Rename checkOnSave settings to check --- crates/rust-analyzer/src/config.rs | 151 ++++++------ .../src/config/patch_old_style.rs | 8 +- docs/user/generated_config.adoc | 174 +++++++------- editors/code/package.json | 222 +++++++++--------- 4 files changed, 274 insertions(+), 281 deletions(-) diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index 25bfa0c28e..b97c8106be 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs @@ -124,9 +124,67 @@ config_data! { /// Unsets `#[cfg(test)]` for the specified crates. cargo_unsetTest: Vec = "[\"core\"]", - /// Run the flycheck command for diagnostics on save. + /// Run the check command for diagnostics on save. checkOnSave | checkOnSave_enable: bool = "true", + /// Check all targets and tests (`--all-targets`). + check_allTargets | checkOnSave_allTargets: bool = "true", + /// Cargo command to use for `cargo check`. + check_command | checkOnSave_command: String = "\"check\"", + /// Extra arguments for `cargo check`. + check_extraArgs | checkOnSave_extraArgs: Vec = "[]", + /// Extra environment variables that will be set when running `cargo check`. + /// Extends `#rust-analyzer.cargo.extraEnv#`. + check_extraEnv | checkOnSave_extraEnv: FxHashMap = "{}", + /// List of features to activate. Defaults to + /// `#rust-analyzer.cargo.features#`. + /// + /// Set to `"all"` to pass `--all-features` to Cargo. + check_features | checkOnSave_features: Option = "null", + /// Specifies the working directory for running checks. + /// - "workspace": run checks for workspaces in the corresponding workspaces' root directories. + // FIXME: Ideally we would support this in some way + /// This falls back to "root" if `#rust-analyzer.cargo.checkOnSave.invocationStrategy#` is set to `once`. + /// - "root": run checks in the project's root directory. + /// This config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#` + /// is set. + check_invocationLocation | checkOnSave_invocationLocation: InvocationLocation = "\"workspace\"", + /// Specifies the invocation strategy to use when running the checkOnSave command. + /// If `per_workspace` is set, the command will be executed for each workspace. + /// If `once` is set, the command will be executed once. + /// This config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#` + /// is set. + check_invocationStrategy | checkOnSave_invocationStrategy: InvocationStrategy = "\"per_workspace\"", + /// Whether to pass `--no-default-features` to Cargo. Defaults to + /// `#rust-analyzer.cargo.noDefaultFeatures#`. + check_noDefaultFeatures | checkOnSave_noDefaultFeatures: Option = "null", + /// Override the command rust-analyzer uses instead of `cargo check` for + /// diagnostics on save. The command is required to output json and + /// should therefore include `--message-format=json` or a similar option. + /// + /// If you're changing this because you're using some tool wrapping + /// Cargo, you might also want to change + /// `#rust-analyzer.cargo.buildScripts.overrideCommand#`. + /// + /// If there are multiple linked projects, this command is invoked for + /// each of them, with the working directory being the project root + /// (i.e., the folder containing the `Cargo.toml`). + /// + /// An example command would be: + /// + /// ```bash + /// cargo check --workspace --message-format=json --all-targets + /// ``` + /// . + check_overrideCommand | checkOnSave_overrideCommand: Option> = "null", + /// Check for specific targets. Defaults to `#rust-analyzer.cargo.target#` if empty. + /// + /// Can be a single target, e.g. `"x86_64-unknown-linux-gnu"` or a list of targets, e.g. + /// `["aarch64-apple-darwin", "x86_64-apple-darwin"]`. + /// + /// Aliased as `"checkOnSave.targets"`. + check_targets | checkOnSave_targets | checkOnSave_target: Option = "null", + /// Toggles the additional completions that automatically add imports when completed. /// Note that your client must specify the `additionalTextEdits` LSP client capability to truly have this feature enabled. completion_autoimport_enable: bool = "true", @@ -211,64 +269,6 @@ config_data! { /// Controls file watching implementation. files_watcher: FilesWatcherDef = "\"client\"", - /// Check all targets and tests (`--all-targets`). - flycheck_allTargets | checkOnSave_allTargets: bool = "true", - /// Cargo command to use for `cargo check`. - flycheck_command | checkOnSave_command: String = "\"check\"", - /// Extra arguments for `cargo check`. - flycheck_extraArgs | checkOnSave_extraArgs: Vec = "[]", - /// Extra environment variables that will be set when running `cargo check`. - /// Extends `#rust-analyzer.cargo.extraEnv#`. - flycheck_extraEnv | checkOnSave_extraEnv: FxHashMap = "{}", - /// List of features to activate. Defaults to - /// `#rust-analyzer.cargo.features#`. - /// - /// Set to `"all"` to pass `--all-features` to Cargo. - flycheck_features | checkOnSave_features: Option = "null", - /// Specifies the working directory for running checks. - /// - "workspace": run checks for workspaces in the corresponding workspaces' root directories. - // FIXME: Ideally we would support this in some way - /// This falls back to "root" if `#rust-analyzer.cargo.checkOnSave.invocationStrategy#` is set to `once`. - /// - "root": run checks in the project's root directory. - /// This config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#` - /// is set. - flycheck_invocationLocation | checkOnSave_invocationLocation: InvocationLocation = "\"workspace\"", - /// Specifies the invocation strategy to use when running the checkOnSave command. - /// If `per_workspace` is set, the command will be executed for each workspace. - /// If `once` is set, the command will be executed once. - /// This config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#` - /// is set. - flycheck_invocationStrategy | checkOnSave_invocationStrategy: InvocationStrategy = "\"per_workspace\"", - /// Whether to pass `--no-default-features` to Cargo. Defaults to - /// `#rust-analyzer.cargo.noDefaultFeatures#`. - flycheck_noDefaultFeatures | checkOnSave_noDefaultFeatures: Option = "null", - /// Override the command rust-analyzer uses instead of `cargo check` for - /// diagnostics on save. The command is required to output json and - /// should therefore include `--message-format=json` or a similar option. - /// - /// If you're changing this because you're using some tool wrapping - /// Cargo, you might also want to change - /// `#rust-analyzer.cargo.buildScripts.overrideCommand#`. - /// - /// If there are multiple linked projects, this command is invoked for - /// each of them, with the working directory being the project root - /// (i.e., the folder containing the `Cargo.toml`). - /// - /// An example command would be: - /// - /// ```bash - /// cargo check --workspace --message-format=json --all-targets - /// ``` - /// . - flycheck_overrideCommand | checkOnSave_overrideCommand: Option> = "null", - /// Check for specific targets. Defaults to `#rust-analyzer.cargo.target#` if empty. - /// - /// Can be a single target, e.g. `"x86_64-unknown-linux-gnu"` or a list of targets, e.g. - /// `["aarch64-apple-darwin", "x86_64-apple-darwin"]`. - /// - /// Aliased as `"checkOnSave.targets"`. - flycheck_targets | checkOnSave_targets | checkOnSave_target: Option = "null", - /// Enables highlighting of related references while the cursor is on `break`, `loop`, `while`, or `for` keywords. highlightRelated_breakPoints_enable: bool = "true", /// Enables highlighting of all exit points while the cursor is on any `return`, `?`, `fn`, or return type arrow (`->`). @@ -787,9 +787,9 @@ impl Config { fn validate(&self, error_sink: &mut Vec<(String, serde_json::Error)>) { use serde::de::Error; - if self.data.flycheck_command.is_empty() { + if self.data.check_command.is_empty() { error_sink.push(( - "/flycheck/command".to_string(), + "/check/command".to_string(), serde_json::Error::custom("expected a non-empty string"), )); } @@ -1034,7 +1034,7 @@ impl Config { pub fn check_on_save_extra_env(&self) -> FxHashMap { let mut extra_env = self.data.cargo_extraEnv.clone(); - extra_env.extend(self.data.flycheck_extraEnv.clone()); + extra_env.extend(self.data.check_extraEnv.clone()); extra_env } @@ -1146,7 +1146,7 @@ impl Config { } pub fn flycheck(&self) -> FlycheckConfig { - match &self.data.flycheck_overrideCommand { + match &self.data.check_overrideCommand { Some(args) if !args.is_empty() => { let mut args = args.clone(); let command = args.remove(0); @@ -1154,13 +1154,13 @@ impl Config { command, args, extra_env: self.check_on_save_extra_env(), - invocation_strategy: match self.data.flycheck_invocationStrategy { + invocation_strategy: match self.data.check_invocationStrategy { InvocationStrategy::Once => flycheck::InvocationStrategy::Once, InvocationStrategy::PerWorkspace => { flycheck::InvocationStrategy::PerWorkspace } }, - invocation_location: match self.data.flycheck_invocationLocation { + invocation_location: match self.data.check_invocationLocation { InvocationLocation::Root => { flycheck::InvocationLocation::Root(self.root_path.clone()) } @@ -1169,35 +1169,35 @@ impl Config { } } Some(_) | None => FlycheckConfig::CargoCommand { - command: self.data.flycheck_command.clone(), + command: self.data.check_command.clone(), target_triples: self .data - .flycheck_targets + .check_targets .clone() .and_then(|targets| match &targets.0[..] { [] => None, targets => Some(targets.into()), }) .unwrap_or_else(|| self.data.cargo_target.clone().into_iter().collect()), - all_targets: self.data.flycheck_allTargets, + all_targets: self.data.check_allTargets, no_default_features: self .data - .flycheck_noDefaultFeatures + .check_noDefaultFeatures .unwrap_or(self.data.cargo_noDefaultFeatures), all_features: matches!( - self.data.flycheck_features.as_ref().unwrap_or(&self.data.cargo_features), + self.data.check_features.as_ref().unwrap_or(&self.data.cargo_features), CargoFeaturesDef::All ), features: match self .data - .flycheck_features + .check_features .clone() .unwrap_or_else(|| self.data.cargo_features.clone()) { CargoFeaturesDef::All => vec![], CargoFeaturesDef::Selected(it) => it, }, - extra_args: self.data.flycheck_extraArgs.clone(), + extra_args: self.data.check_extraArgs.clone(), extra_env: self.check_on_save_extra_env(), }, } @@ -1887,13 +1887,6 @@ fn get_field( } fn schema(fields: &[(&'static str, &'static str, &[&str], &str)]) -> serde_json::Value { - for ((f1, ..), (f2, ..)) in fields.iter().zip(&fields[1..]) { - fn key(f: &str) -> &str { - f.splitn(2, '_').next().unwrap() - } - assert!(key(f1) <= key(f2), "wrong field order: {f1:?} {f2:?}"); - } - let map = fields .iter() .map(|(field, ty, doc, default)| { diff --git a/crates/rust-analyzer/src/config/patch_old_style.rs b/crates/rust-analyzer/src/config/patch_old_style.rs index 38b70219cb..de6ac946a6 100644 --- a/crates/rust-analyzer/src/config/patch_old_style.rs +++ b/crates/rust-analyzer/src/config/patch_old_style.rs @@ -108,9 +108,9 @@ pub(super) fn patch_json_for_outdated_configs(json: &mut Value) { merge(json, json!({ "cargo": { "features": "all" } })); } - // checkOnSave_allFeatures, checkOnSave_features -> flycheck_features + // checkOnSave_allFeatures, checkOnSave_features -> check_features if let Some(Value::Bool(true)) = copy.pointer("/checkOnSave/allFeatures") { - merge(json, json!({ "flycheck": { "features": "all" } })); + merge(json, json!({ "check": { "features": "all" } })); } // completion_addCallArgumentSnippets completion_addCallParenthesis -> completion_callable_snippets @@ -126,13 +126,13 @@ pub(super) fn patch_json_for_outdated_configs(json: &mut Value) { merge(json, json!({ "completion": { "callable": {"snippets": res }} })); // We need to do this due to the checkOnSave_enable -> checkOnSave change, as that key now can either be an object or a bool - // checkOnSave_* -> flycheck_* + // checkOnSave_* -> check_* if let Some(Value::Object(obj)) = copy.pointer("/checkOnSave") { // checkOnSave_enable -> checkOnSave if let Some(b @ Value::Bool(_)) = obj.get("enable") { merge(json, json!({ "checkOnSave": b })); } - merge(json, json!({ "flycheck": obj })); + merge(json, json!({ "check": obj })); } } diff --git a/docs/user/generated_config.adoc b/docs/user/generated_config.adoc index e6da4f38ab..7bdfd4288a 100644 --- a/docs/user/generated_config.adoc +++ b/docs/user/generated_config.adoc @@ -112,7 +112,93 @@ Unsets `#[cfg(test)]` for the specified crates. [[rust-analyzer.checkOnSave]]rust-analyzer.checkOnSave (default: `true`):: + -- -Run the flycheck command for diagnostics on save. +Run the check command for diagnostics on save. +-- +[[rust-analyzer.check.allTargets]]rust-analyzer.check.allTargets (default: `true`):: ++ +-- +Check all targets and tests (`--all-targets`). +-- +[[rust-analyzer.check.command]]rust-analyzer.check.command (default: `"check"`):: ++ +-- +Cargo command to use for `cargo check`. +-- +[[rust-analyzer.check.extraArgs]]rust-analyzer.check.extraArgs (default: `[]`):: ++ +-- +Extra arguments for `cargo check`. +-- +[[rust-analyzer.check.extraEnv]]rust-analyzer.check.extraEnv (default: `{}`):: ++ +-- +Extra environment variables that will be set when running `cargo check`. +Extends `#rust-analyzer.cargo.extraEnv#`. +-- +[[rust-analyzer.check.features]]rust-analyzer.check.features (default: `null`):: ++ +-- +List of features to activate. Defaults to +`#rust-analyzer.cargo.features#`. + +Set to `"all"` to pass `--all-features` to Cargo. +-- +[[rust-analyzer.check.invocationLocation]]rust-analyzer.check.invocationLocation (default: `"workspace"`):: ++ +-- +Specifies the working directory for running checks. +- "workspace": run checks for workspaces in the corresponding workspaces' root directories. + This falls back to "root" if `#rust-analyzer.cargo.checkOnSave.invocationStrategy#` is set to `once`. +- "root": run checks in the project's root directory. +This config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#` +is set. +-- +[[rust-analyzer.check.invocationStrategy]]rust-analyzer.check.invocationStrategy (default: `"per_workspace"`):: ++ +-- +Specifies the invocation strategy to use when running the checkOnSave command. +If `per_workspace` is set, the command will be executed for each workspace. +If `once` is set, the command will be executed once. +This config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#` +is set. +-- +[[rust-analyzer.check.noDefaultFeatures]]rust-analyzer.check.noDefaultFeatures (default: `null`):: ++ +-- +Whether to pass `--no-default-features` to Cargo. Defaults to +`#rust-analyzer.cargo.noDefaultFeatures#`. +-- +[[rust-analyzer.check.overrideCommand]]rust-analyzer.check.overrideCommand (default: `null`):: ++ +-- +Override the command rust-analyzer uses instead of `cargo check` for +diagnostics on save. The command is required to output json and +should therefore include `--message-format=json` or a similar option. + +If you're changing this because you're using some tool wrapping +Cargo, you might also want to change +`#rust-analyzer.cargo.buildScripts.overrideCommand#`. + +If there are multiple linked projects, this command is invoked for +each of them, with the working directory being the project root +(i.e., the folder containing the `Cargo.toml`). + +An example command would be: + +```bash +cargo check --workspace --message-format=json --all-targets +``` +. +-- +[[rust-analyzer.check.targets]]rust-analyzer.check.targets (default: `null`):: ++ +-- +Check for specific targets. Defaults to `#rust-analyzer.cargo.target#` if empty. + +Can be a single target, e.g. `"x86_64-unknown-linux-gnu"` or a list of targets, e.g. +`["aarch64-apple-darwin", "x86_64-apple-darwin"]`. + +Aliased as `"checkOnSave.targets"`. -- [[rust-analyzer.completion.autoimport.enable]]rust-analyzer.completion.autoimport.enable (default: `true`):: + @@ -241,92 +327,6 @@ also need to add the folders to Code's `files.watcherExclude`. -- Controls file watching implementation. -- -[[rust-analyzer.flycheck.allTargets]]rust-analyzer.flycheck.allTargets (default: `true`):: -+ --- -Check all targets and tests (`--all-targets`). --- -[[rust-analyzer.flycheck.command]]rust-analyzer.flycheck.command (default: `"check"`):: -+ --- -Cargo command to use for `cargo check`. --- -[[rust-analyzer.flycheck.extraArgs]]rust-analyzer.flycheck.extraArgs (default: `[]`):: -+ --- -Extra arguments for `cargo check`. --- -[[rust-analyzer.flycheck.extraEnv]]rust-analyzer.flycheck.extraEnv (default: `{}`):: -+ --- -Extra environment variables that will be set when running `cargo check`. -Extends `#rust-analyzer.cargo.extraEnv#`. --- -[[rust-analyzer.flycheck.features]]rust-analyzer.flycheck.features (default: `null`):: -+ --- -List of features to activate. Defaults to -`#rust-analyzer.cargo.features#`. - -Set to `"all"` to pass `--all-features` to Cargo. --- -[[rust-analyzer.flycheck.invocationLocation]]rust-analyzer.flycheck.invocationLocation (default: `"workspace"`):: -+ --- -Specifies the working directory for running checks. -- "workspace": run checks for workspaces in the corresponding workspaces' root directories. - This falls back to "root" if `#rust-analyzer.cargo.checkOnSave.invocationStrategy#` is set to `once`. -- "root": run checks in the project's root directory. -This config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#` -is set. --- -[[rust-analyzer.flycheck.invocationStrategy]]rust-analyzer.flycheck.invocationStrategy (default: `"per_workspace"`):: -+ --- -Specifies the invocation strategy to use when running the checkOnSave command. -If `per_workspace` is set, the command will be executed for each workspace. -If `once` is set, the command will be executed once. -This config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#` -is set. --- -[[rust-analyzer.flycheck.noDefaultFeatures]]rust-analyzer.flycheck.noDefaultFeatures (default: `null`):: -+ --- -Whether to pass `--no-default-features` to Cargo. Defaults to -`#rust-analyzer.cargo.noDefaultFeatures#`. --- -[[rust-analyzer.flycheck.overrideCommand]]rust-analyzer.flycheck.overrideCommand (default: `null`):: -+ --- -Override the command rust-analyzer uses instead of `cargo check` for -diagnostics on save. The command is required to output json and -should therefore include `--message-format=json` or a similar option. - -If you're changing this because you're using some tool wrapping -Cargo, you might also want to change -`#rust-analyzer.cargo.buildScripts.overrideCommand#`. - -If there are multiple linked projects, this command is invoked for -each of them, with the working directory being the project root -(i.e., the folder containing the `Cargo.toml`). - -An example command would be: - -```bash -cargo check --workspace --message-format=json --all-targets -``` -. --- -[[rust-analyzer.flycheck.targets]]rust-analyzer.flycheck.targets (default: `null`):: -+ --- -Check for specific targets. Defaults to `#rust-analyzer.cargo.target#` if empty. - -Can be a single target, e.g. `"x86_64-unknown-linux-gnu"` or a list of targets, e.g. -`["aarch64-apple-darwin", "x86_64-apple-darwin"]`. - -Aliased as `"checkOnSave.targets"`. --- [[rust-analyzer.highlightRelated.breakPoints.enable]]rust-analyzer.highlightRelated.breakPoints.enable (default: `true`):: + -- diff --git a/editors/code/package.json b/editors/code/package.json index 4fe829382d..56018b9929 100644 --- a/editors/code/package.json +++ b/editors/code/package.json @@ -557,10 +557,120 @@ } }, "rust-analyzer.checkOnSave": { - "markdownDescription": "Run the flycheck command for diagnostics on save.", + "markdownDescription": "Run the check command for diagnostics on save.", "default": true, "type": "boolean" }, + "rust-analyzer.check.allTargets": { + "markdownDescription": "Check all targets and tests (`--all-targets`).", + "default": true, + "type": "boolean" + }, + "rust-analyzer.check.command": { + "markdownDescription": "Cargo command to use for `cargo check`.", + "default": "check", + "type": "string" + }, + "rust-analyzer.check.extraArgs": { + "markdownDescription": "Extra arguments for `cargo check`.", + "default": [], + "type": "array", + "items": { + "type": "string" + } + }, + "rust-analyzer.check.extraEnv": { + "markdownDescription": "Extra environment variables that will be set when running `cargo check`.\nExtends `#rust-analyzer.cargo.extraEnv#`.", + "default": {}, + "type": "object" + }, + "rust-analyzer.check.features": { + "markdownDescription": "List of features to activate. Defaults to\n`#rust-analyzer.cargo.features#`.\n\nSet to `\"all\"` to pass `--all-features` to Cargo.", + "default": null, + "anyOf": [ + { + "type": "string", + "enum": [ + "all" + ], + "enumDescriptions": [ + "Pass `--all-features` to cargo" + ] + }, + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "null" + } + ] + }, + "rust-analyzer.check.invocationLocation": { + "markdownDescription": "Specifies the working directory for running checks.\n- \"workspace\": run checks for workspaces in the corresponding workspaces' root directories.\n This falls back to \"root\" if `#rust-analyzer.cargo.checkOnSave.invocationStrategy#` is set to `once`.\n- \"root\": run checks in the project's root directory.\nThis config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#`\nis set.", + "default": "workspace", + "type": "string", + "enum": [ + "workspace", + "root" + ], + "enumDescriptions": [ + "The command will be executed in the corresponding workspace root.", + "The command will be executed in the project root." + ] + }, + "rust-analyzer.check.invocationStrategy": { + "markdownDescription": "Specifies the invocation strategy to use when running the checkOnSave command.\nIf `per_workspace` is set, the command will be executed for each workspace.\nIf `once` is set, the command will be executed once.\nThis config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#`\nis set.", + "default": "per_workspace", + "type": "string", + "enum": [ + "per_workspace", + "once" + ], + "enumDescriptions": [ + "The command will be executed for each workspace.", + "The command will be executed once." + ] + }, + "rust-analyzer.check.noDefaultFeatures": { + "markdownDescription": "Whether to pass `--no-default-features` to Cargo. Defaults to\n`#rust-analyzer.cargo.noDefaultFeatures#`.", + "default": null, + "type": [ + "null", + "boolean" + ] + }, + "rust-analyzer.check.overrideCommand": { + "markdownDescription": "Override the command rust-analyzer uses instead of `cargo check` for\ndiagnostics on save. The command is required to output json and\nshould therefore include `--message-format=json` or a similar option.\n\nIf you're changing this because you're using some tool wrapping\nCargo, you might also want to change\n`#rust-analyzer.cargo.buildScripts.overrideCommand#`.\n\nIf there are multiple linked projects, this command is invoked for\neach of them, with the working directory being the project root\n(i.e., the folder containing the `Cargo.toml`).\n\nAn example command would be:\n\n```bash\ncargo check --workspace --message-format=json --all-targets\n```\n.", + "default": null, + "type": [ + "null", + "array" + ], + "items": { + "type": "string" + } + }, + "rust-analyzer.check.targets": { + "markdownDescription": "Check for specific targets. Defaults to `#rust-analyzer.cargo.target#` if empty.\n\nCan be a single target, e.g. `\"x86_64-unknown-linux-gnu\"` or a list of targets, e.g.\n`[\"aarch64-apple-darwin\", \"x86_64-apple-darwin\"]`.\n\nAliased as `\"checkOnSave.targets\"`.", + "default": null, + "anyOf": [ + { + "type": "null" + }, + { + "type": "string" + }, + { + "type": "array", + "items": { + "type": "string" + } + } + ] + }, "rust-analyzer.completion.autoimport.enable": { "markdownDescription": "Toggles the additional completions that automatically add imports when completed.\nNote that your client must specify the `additionalTextEdits` LSP client capability to truly have this feature enabled.", "default": true, @@ -702,116 +812,6 @@ "Use server-side file watching" ] }, - "rust-analyzer.flycheck.allTargets": { - "markdownDescription": "Check all targets and tests (`--all-targets`).", - "default": true, - "type": "boolean" - }, - "rust-analyzer.flycheck.command": { - "markdownDescription": "Cargo command to use for `cargo check`.", - "default": "check", - "type": "string" - }, - "rust-analyzer.flycheck.extraArgs": { - "markdownDescription": "Extra arguments for `cargo check`.", - "default": [], - "type": "array", - "items": { - "type": "string" - } - }, - "rust-analyzer.flycheck.extraEnv": { - "markdownDescription": "Extra environment variables that will be set when running `cargo check`.\nExtends `#rust-analyzer.cargo.extraEnv#`.", - "default": {}, - "type": "object" - }, - "rust-analyzer.flycheck.features": { - "markdownDescription": "List of features to activate. Defaults to\n`#rust-analyzer.cargo.features#`.\n\nSet to `\"all\"` to pass `--all-features` to Cargo.", - "default": null, - "anyOf": [ - { - "type": "string", - "enum": [ - "all" - ], - "enumDescriptions": [ - "Pass `--all-features` to cargo" - ] - }, - { - "type": "array", - "items": { - "type": "string" - } - }, - { - "type": "null" - } - ] - }, - "rust-analyzer.flycheck.invocationLocation": { - "markdownDescription": "Specifies the working directory for running checks.\n- \"workspace\": run checks for workspaces in the corresponding workspaces' root directories.\n This falls back to \"root\" if `#rust-analyzer.cargo.checkOnSave.invocationStrategy#` is set to `once`.\n- \"root\": run checks in the project's root directory.\nThis config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#`\nis set.", - "default": "workspace", - "type": "string", - "enum": [ - "workspace", - "root" - ], - "enumDescriptions": [ - "The command will be executed in the corresponding workspace root.", - "The command will be executed in the project root." - ] - }, - "rust-analyzer.flycheck.invocationStrategy": { - "markdownDescription": "Specifies the invocation strategy to use when running the checkOnSave command.\nIf `per_workspace` is set, the command will be executed for each workspace.\nIf `once` is set, the command will be executed once.\nThis config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#`\nis set.", - "default": "per_workspace", - "type": "string", - "enum": [ - "per_workspace", - "once" - ], - "enumDescriptions": [ - "The command will be executed for each workspace.", - "The command will be executed once." - ] - }, - "rust-analyzer.flycheck.noDefaultFeatures": { - "markdownDescription": "Whether to pass `--no-default-features` to Cargo. Defaults to\n`#rust-analyzer.cargo.noDefaultFeatures#`.", - "default": null, - "type": [ - "null", - "boolean" - ] - }, - "rust-analyzer.flycheck.overrideCommand": { - "markdownDescription": "Override the command rust-analyzer uses instead of `cargo check` for\ndiagnostics on save. The command is required to output json and\nshould therefore include `--message-format=json` or a similar option.\n\nIf you're changing this because you're using some tool wrapping\nCargo, you might also want to change\n`#rust-analyzer.cargo.buildScripts.overrideCommand#`.\n\nIf there are multiple linked projects, this command is invoked for\neach of them, with the working directory being the project root\n(i.e., the folder containing the `Cargo.toml`).\n\nAn example command would be:\n\n```bash\ncargo check --workspace --message-format=json --all-targets\n```\n.", - "default": null, - "type": [ - "null", - "array" - ], - "items": { - "type": "string" - } - }, - "rust-analyzer.flycheck.targets": { - "markdownDescription": "Check for specific targets. Defaults to `#rust-analyzer.cargo.target#` if empty.\n\nCan be a single target, e.g. `\"x86_64-unknown-linux-gnu\"` or a list of targets, e.g.\n`[\"aarch64-apple-darwin\", \"x86_64-apple-darwin\"]`.\n\nAliased as `\"checkOnSave.targets\"`.", - "default": null, - "anyOf": [ - { - "type": "null" - }, - { - "type": "string" - }, - { - "type": "array", - "items": { - "type": "string" - } - } - ] - }, "rust-analyzer.highlightRelated.breakPoints.enable": { "markdownDescription": "Enables highlighting of related references while the cursor is on `break`, `loop`, `while`, or `for` keywords.", "default": true, From b89c4f0a0529ca90dbe13d41fdd5e52331770900 Mon Sep 17 00:00:00 2001 From: Maybe Waffle Date: Tue, 20 Dec 2022 22:07:00 +0000 Subject: [PATCH 0779/1945] Implement postfix adjustment hints I'd say "First stab at implementing..." but I've been working on this for a month already lol --- crates/ide/src/inlay_hints.rs | 3 + crates/ide/src/inlay_hints/adjustment.rs | 252 ++++++++++++++++++++--- crates/ide/src/static_index.rs | 1 + crates/rust-analyzer/src/config.rs | 3 + crates/rust-analyzer/src/to_proto.rs | 4 + docs/user/generated_config.adoc | 5 + editors/code/package.json | 5 + 7 files changed, 246 insertions(+), 27 deletions(-) diff --git a/crates/ide/src/inlay_hints.rs b/crates/ide/src/inlay_hints.rs index 86d25e2f5a..7315a37ebc 100644 --- a/crates/ide/src/inlay_hints.rs +++ b/crates/ide/src/inlay_hints.rs @@ -35,6 +35,7 @@ pub struct InlayHintsConfig { pub parameter_hints: bool, pub chaining_hints: bool, pub adjustment_hints: AdjustmentHints, + pub adjustment_hints_postfix: bool, pub adjustment_hints_hide_outside_unsafe: bool, pub closure_return_type_hints: ClosureReturnTypeHints, pub binding_mode_hints: bool, @@ -82,6 +83,7 @@ pub enum InlayKind { ClosureReturnTypeHint, GenericParamListHint, AdjustmentHint, + AdjustmentHintPostfix, LifetimeHint, ParameterHint, TypeHint, @@ -446,6 +448,7 @@ mod tests { lifetime_elision_hints: LifetimeElisionHints::Never, closure_return_type_hints: ClosureReturnTypeHints::Never, adjustment_hints: AdjustmentHints::Never, + adjustment_hints_postfix: false, adjustment_hints_hide_outside_unsafe: false, binding_mode_hints: false, hide_named_constructor_hints: false, diff --git a/crates/ide/src/inlay_hints/adjustment.rs b/crates/ide/src/inlay_hints/adjustment.rs index 1c13f31cf2..367bd2f661 100644 --- a/crates/ide/src/inlay_hints/adjustment.rs +++ b/crates/ide/src/inlay_hints/adjustment.rs @@ -5,7 +5,11 @@ //! ``` use hir::{Adjust, AutoBorrow, Mutability, OverloadedDeref, PointerCast, Safety, Semantics}; use ide_db::RootDatabase; -use syntax::ast::{self, AstNode}; + +use syntax::{ + ast::{self, make, AstNode}, + ted, +}; use crate::{AdjustmentHints, InlayHint, InlayHintsConfig, InlayKind}; @@ -32,28 +36,14 @@ pub(super) fn hints( return None; } - let parent = expr.syntax().parent().and_then(ast::Expr::cast); let descended = sema.descend_node_into_attributes(expr.clone()).pop(); let desc_expr = descended.as_ref().unwrap_or(expr); let adjustments = sema.expr_adjustments(desc_expr).filter(|it| !it.is_empty())?; - let needs_parens = match parent { - Some(parent) => { - match parent { - ast::Expr::AwaitExpr(_) - | ast::Expr::CallExpr(_) - | ast::Expr::CastExpr(_) - | ast::Expr::FieldExpr(_) - | ast::Expr::MethodCallExpr(_) - | ast::Expr::TryExpr(_) => true, - // FIXME: shorthands need special casing, though not sure if adjustments are even valid there - ast::Expr::RecordExpr(_) => false, - ast::Expr::IndexExpr(index) => index.base().as_ref() == Some(expr), - _ => false, - } - } - None => false, - }; - if needs_parens { + + let (needs_outer_parens, needs_inner_parens) = + needs_parens_for_adjustment_hints(expr, config.adjustment_hints_postfix); + + if needs_outer_parens { acc.push(InlayHint { range: expr.syntax().text_range(), kind: InlayKind::OpeningParenthesis, @@ -61,7 +51,32 @@ pub(super) fn hints( tooltip: None, }); } - for adjustment in adjustments.into_iter().rev() { + + if config.adjustment_hints_postfix && needs_inner_parens { + acc.push(InlayHint { + range: expr.syntax().text_range(), + kind: InlayKind::OpeningParenthesis, + label: "(".into(), + tooltip: None, + }); + acc.push(InlayHint { + range: expr.syntax().text_range(), + kind: InlayKind::ClosingParenthesis, + label: ")".into(), + tooltip: None, + }); + } + + let (mut tmp0, mut tmp1); + let iter: &mut dyn Iterator = if config.adjustment_hints_postfix { + tmp0 = adjustments.into_iter(); + &mut tmp0 + } else { + tmp1 = adjustments.into_iter().rev(); + &mut tmp1 + }; + + for adjustment in iter { if adjustment.source == adjustment.target { continue; } @@ -97,12 +112,34 @@ pub(super) fn hints( }; acc.push(InlayHint { range: expr.syntax().text_range(), - kind: InlayKind::AdjustmentHint, - label: text.into(), + kind: if config.adjustment_hints_postfix { + InlayKind::AdjustmentHintPostfix + } else { + InlayKind::AdjustmentHint + }, + label: if config.adjustment_hints_postfix { + format!(".{}", text.trim_end()).into() + } else { + text.into() + }, tooltip: None, }); } - if needs_parens { + if !config.adjustment_hints_postfix && needs_inner_parens { + acc.push(InlayHint { + range: expr.syntax().text_range(), + kind: InlayKind::OpeningParenthesis, + label: "(".into(), + tooltip: None, + }); + acc.push(InlayHint { + range: expr.syntax().text_range(), + kind: InlayKind::ClosingParenthesis, + label: ")".into(), + tooltip: None, + }); + } + if needs_outer_parens { acc.push(InlayHint { range: expr.syntax().text_range(), kind: InlayKind::ClosingParenthesis, @@ -113,6 +150,69 @@ pub(super) fn hints( Some(()) } +/// Returns whatever we need to add paretheses on the inside and/or outside of `expr`, +/// if we are going to add (`postfix`) adjustments hints to it. +fn needs_parens_for_adjustment_hints(expr: &ast::Expr, postfix: bool) -> (bool, bool) { + // This is a very miserable pile of hacks... + // + // `Expr::needs_parens_in` requires that the expression is the child of the other expression, + // that is supposed to be its parent. + // + // But we want to check what would happen if we add `*`/`.*` to the inner expression. + // To check for inner we need `` expr.needs_parens_in(`*expr`) ``, + // to check for outer we need `` `*expr`.needs_parens_in(parent) ``, + // where "expr" is the `expr` parameter, `*expr` is the editted `expr`, + // and "parent" is the parent of the original expression... + // + // For this we utilize mutable mutable trees, which is a HACK, but it works. + + // Make `&expr`/`expr?` + let dummy_expr = { + // `make::*` function go through a string, so they parse wrongly. + // for example `` make::expr_try(`|| a`) `` would result in a + // `|| (a?)` and not `(|| a)?`. + // + // Thus we need dummy parens to preserve the relationship we want. + // The parens are then simply ignored by the following code. + let dummy_paren = make::expr_paren(expr.clone()); + if postfix { + make::expr_try(dummy_paren) + } else { + make::expr_ref(dummy_paren, false) + } + }; + + // Do the dark mutable tree magic. + // This essentially makes `dummy_expr` and `expr` switch places (families), + // so that `expr`'s parent is not `dummy_expr`'s parent. + let dummy_expr = dummy_expr.clone_for_update(); + let expr = expr.clone_for_update(); + ted::replace(expr.syntax(), dummy_expr.syntax()); + + let parent = dummy_expr.syntax().parent(); + let expr = if postfix { + let ast::Expr::TryExpr(e) = &dummy_expr else { unreachable!() }; + let Some(ast::Expr::ParenExpr(e)) = e.expr() else { unreachable!() }; + + e.expr().unwrap() + } else { + let ast::Expr::RefExpr(e) = &dummy_expr else { unreachable!() }; + let Some(ast::Expr::ParenExpr(e)) = e.expr() else { unreachable!() }; + + e.expr().unwrap() + }; + + // At this point + // - `parent` is the parrent of the original expression + // - `dummy_expr` is the original expression wrapped in the operator we want (`*`/`.*`) + // - `expr` is the clone of the original expression (with `dummy_expr` as the parent) + + let needs_outer_parens = parent.map_or(false, |p| dummy_expr.needs_parens_in(p)); + let needs_inner_parens = expr.needs_parens_in(dummy_expr.syntax().clone()); + + (needs_outer_parens, needs_inner_parens) +} + #[cfg(test)] mod tests { use crate::{ @@ -125,7 +225,7 @@ mod tests { check_with_config( InlayHintsConfig { adjustment_hints: AdjustmentHints::Always, ..DISABLED_CONFIG }, r#" -//- minicore: coerce_unsized +//- minicore: coerce_unsized, fn fn main() { let _: u32 = loop {}; //^^^^^^^ @@ -148,12 +248,16 @@ fn main() { //^^^^ let _: unsafe fn() = main as fn(); //^^^^^^^^^^^^ + //^^^^^^^^^^^^( + //^^^^^^^^^^^^) let _: fn() = || {}; //^^^^^ let _: unsafe fn() = || {}; //^^^^^ let _: *const u32 = &mut 0u32 as *mut u32; //^^^^^^^^^^^^^^^^^^^^^ + //^^^^^^^^^^^^^^^^^^^^^( + //^^^^^^^^^^^^^^^^^^^^^) let _: &mut [_] = &mut [0; 0]; //^^^^^^^^^^^ //^^^^^^^^^^^&mut $ @@ -206,6 +310,11 @@ fn main() { //^^^^^^^ //^^^^^^^&mut $ //^^^^^^^* + + let _: &mut dyn Fn() = &mut || (); + //^^^^^^^^^^ + //^^^^^^^^^^&mut $ + //^^^^^^^^^^* } #[derive(Copy, Clone)] @@ -215,12 +324,101 @@ impl Struct { fn by_ref(&self) {} fn by_ref_mut(&mut self) {} } -trait Trait {} -impl Trait for Struct {} "#, ) } + #[test] + fn adjustment_hints_postfix() { + check_with_config( + InlayHintsConfig { + adjustment_hints: AdjustmentHints::Always, + adjustment_hints_postfix: true, + ..DISABLED_CONFIG + }, + r#" +//- minicore: coerce_unsized, fn +fn main() { + + Struct.consume(); + Struct.by_ref(); + //^^^^^^.& + Struct.by_ref_mut(); + //^^^^^^.&mut + + (&Struct).consume(); + //^^^^^^^( + //^^^^^^^) + //^^^^^^^.* + (&Struct).by_ref(); + + (&mut Struct).consume(); + //^^^^^^^^^^^( + //^^^^^^^^^^^) + //^^^^^^^^^^^.* + (&mut Struct).by_ref(); + //^^^^^^^^^^^( + //^^^^^^^^^^^) + //^^^^^^^^^^^.* + //^^^^^^^^^^^.& + (&mut Struct).by_ref_mut(); + + // Check that block-like expressions don't duplicate hints + let _: &mut [u32] = (&mut []); + //^^^^^^^( + //^^^^^^^) + //^^^^^^^.* + //^^^^^^^.&mut + //^^^^^^^. + let _: &mut [u32] = { &mut [] }; + //^^^^^^^( + //^^^^^^^) + //^^^^^^^.* + //^^^^^^^.&mut + //^^^^^^^. + let _: &mut [u32] = unsafe { &mut [] }; + //^^^^^^^( + //^^^^^^^) + //^^^^^^^.* + //^^^^^^^.&mut + //^^^^^^^. + let _: &mut [u32] = if true { + &mut [] + //^^^^^^^( + //^^^^^^^) + //^^^^^^^.* + //^^^^^^^.&mut + //^^^^^^^. + } else { + loop {} + //^^^^^^^. + }; + let _: &mut [u32] = match () { () => &mut [] } + //^^^^^^^( + //^^^^^^^) + //^^^^^^^.* + //^^^^^^^.&mut + //^^^^^^^. + + let _: &mut dyn Fn() = &mut || (); + //^^^^^^^^^^( + //^^^^^^^^^^) + //^^^^^^^^^^.* + //^^^^^^^^^^.&mut + //^^^^^^^^^^. +} + +#[derive(Copy, Clone)] +struct Struct; +impl Struct { + fn consume(self) {} + fn by_ref(&self) {} + fn by_ref_mut(&mut self) {} +} +"#, + ); + } + #[test] fn never_to_never_is_never_shown() { check_with_config( diff --git a/crates/ide/src/static_index.rs b/crates/ide/src/static_index.rs index 6e31a1420e..c6cca0d869 100644 --- a/crates/ide/src/static_index.rs +++ b/crates/ide/src/static_index.rs @@ -115,6 +115,7 @@ impl StaticIndex<'_> { closure_return_type_hints: crate::ClosureReturnTypeHints::WithBlock, lifetime_elision_hints: crate::LifetimeElisionHints::Never, adjustment_hints: crate::AdjustmentHints::Never, + adjustment_hints_postfix: false, adjustment_hints_hide_outside_unsafe: false, hide_named_constructor_hints: false, hide_closure_initialization_hints: false, diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index 0f515a6d5c..4d11a84091 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs @@ -333,6 +333,8 @@ config_data! { inlayHints_expressionAdjustmentHints_enable: AdjustmentHintsDef = "\"never\"", /// Whether to hide inlay hints for type adjustments outside of `unsafe` blocks. inlayHints_expressionAdjustmentHints_hideOutsideUnsafe: bool = "false", + /// Whether to show inlay hints for type adjustments as postfix ops (`.*` instead of `*`, etc). + inlayHints_expressionAdjustmentHints_postfix: bool = "false", /// Whether to show inlay type hints for elided lifetimes in function signatures. inlayHints_lifetimeElisionHints_enable: LifetimeElisionDef = "\"never\"", /// Whether to prefer using parameter names as the name for elided lifetime hints if possible. @@ -1252,6 +1254,7 @@ impl Config { }, AdjustmentHintsDef::Reborrow => ide::AdjustmentHints::ReborrowOnly, }, + adjustment_hints_postfix: self.data.inlayHints_expressionAdjustmentHints_postfix, adjustment_hints_hide_outside_unsafe: self .data .inlayHints_expressionAdjustmentHints_hideOutsideUnsafe, diff --git a/crates/rust-analyzer/src/to_proto.rs b/crates/rust-analyzer/src/to_proto.rs index a12bd3952c..e736b2ff9a 100644 --- a/crates/rust-analyzer/src/to_proto.rs +++ b/crates/rust-analyzer/src/to_proto.rs @@ -452,6 +452,7 @@ pub(crate) fn inlay_hint( | InlayKind::ChainingHint | InlayKind::GenericParamListHint | InlayKind::ClosingParenthesis + | InlayKind::AdjustmentHintPostfix | InlayKind::LifetimeHint | InlayKind::ClosingBraceHint => position(line_index, inlay_hint.range.end()), }, @@ -465,6 +466,7 @@ pub(crate) fn inlay_hint( | InlayKind::ClosureReturnTypeHint | InlayKind::GenericParamListHint | InlayKind::AdjustmentHint + | InlayKind::AdjustmentHintPostfix | InlayKind::LifetimeHint | InlayKind::ParameterHint => false, }), @@ -475,6 +477,7 @@ pub(crate) fn inlay_hint( | InlayKind::ClosureReturnTypeHint | InlayKind::GenericParamListHint | InlayKind::AdjustmentHint + | InlayKind::AdjustmentHintPostfix | InlayKind::TypeHint | InlayKind::DiscriminantHint | InlayKind::ClosingBraceHint => false, @@ -493,6 +496,7 @@ pub(crate) fn inlay_hint( | InlayKind::GenericParamListHint | InlayKind::LifetimeHint | InlayKind::AdjustmentHint + | InlayKind::AdjustmentHintPostfix | InlayKind::ClosingBraceHint => None, }, text_edits: None, diff --git a/docs/user/generated_config.adoc b/docs/user/generated_config.adoc index d9794e7052..60c16ecadf 100644 --- a/docs/user/generated_config.adoc +++ b/docs/user/generated_config.adoc @@ -469,6 +469,11 @@ Whether to show inlay hints for type adjustments. -- Whether to hide inlay hints for type adjustments outside of `unsafe` blocks. -- +[[rust-analyzer.inlayHints.expressionAdjustmentHints.postfix]]rust-analyzer.inlayHints.expressionAdjustmentHints.postfix (default: `false`):: ++ +-- +Whether to show inlay hints for type adjustments as postfix ops (`.*` instead of `*`, etc). +-- [[rust-analyzer.inlayHints.lifetimeElisionHints.enable]]rust-analyzer.inlayHints.lifetimeElisionHints.enable (default: `"never"`):: + -- diff --git a/editors/code/package.json b/editors/code/package.json index f508dde4f6..aeb1d97c5f 100644 --- a/editors/code/package.json +++ b/editors/code/package.json @@ -1000,6 +1000,11 @@ "default": false, "type": "boolean" }, + "rust-analyzer.inlayHints.expressionAdjustmentHints.postfix": { + "markdownDescription": "Whether to show inlay hints for type adjustments as postfix ops (`.*` instead of `*`, etc).", + "default": false, + "type": "boolean" + }, "rust-analyzer.inlayHints.lifetimeElisionHints.enable": { "markdownDescription": "Whether to show inlay type hints for elided lifetimes in function signatures.", "default": "never", From 12b7f9f7bfb4d50ee29232d9d40430fa22db7157 Mon Sep 17 00:00:00 2001 From: Maybe Waffle Date: Wed, 21 Dec 2022 15:00:05 +0000 Subject: [PATCH 0780/1945] Add an option to minimize parentheses for adjustment hints --- crates/ide/src/inlay_hints.rs | 14 ++- crates/ide/src/inlay_hints/adjustment.rs | 111 ++++++++++++++++++++--- crates/ide/src/lib.rs | 4 +- crates/ide/src/static_index.rs | 3 +- crates/rust-analyzer/src/config.rs | 35 ++++++- docs/user/generated_config.adoc | 4 +- editors/code/package.json | 20 +++- 7 files changed, 162 insertions(+), 29 deletions(-) diff --git a/crates/ide/src/inlay_hints.rs b/crates/ide/src/inlay_hints.rs index 7315a37ebc..48a7bbfecf 100644 --- a/crates/ide/src/inlay_hints.rs +++ b/crates/ide/src/inlay_hints.rs @@ -35,7 +35,7 @@ pub struct InlayHintsConfig { pub parameter_hints: bool, pub chaining_hints: bool, pub adjustment_hints: AdjustmentHints, - pub adjustment_hints_postfix: bool, + pub adjustment_hints_mode: AdjustmentHintsMode, pub adjustment_hints_hide_outside_unsafe: bool, pub closure_return_type_hints: ClosureReturnTypeHints, pub binding_mode_hints: bool, @@ -75,6 +75,14 @@ pub enum AdjustmentHints { Never, } +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub enum AdjustmentHintsMode { + Prefix, + Postfix, + PreferPrefix, + PreferPostfix, +} + #[derive(Clone, Debug, PartialEq, Eq)] pub enum InlayKind { BindingModeHint, @@ -432,7 +440,7 @@ mod tests { use itertools::Itertools; use test_utils::extract_annotations; - use crate::inlay_hints::AdjustmentHints; + use crate::inlay_hints::{AdjustmentHints, AdjustmentHintsMode}; use crate::DiscriminantHints; use crate::{fixture, inlay_hints::InlayHintsConfig, LifetimeElisionHints}; @@ -448,7 +456,7 @@ mod tests { lifetime_elision_hints: LifetimeElisionHints::Never, closure_return_type_hints: ClosureReturnTypeHints::Never, adjustment_hints: AdjustmentHints::Never, - adjustment_hints_postfix: false, + adjustment_hints_mode: AdjustmentHintsMode::Prefix, adjustment_hints_hide_outside_unsafe: false, binding_mode_hints: false, hide_named_constructor_hints: false, diff --git a/crates/ide/src/inlay_hints/adjustment.rs b/crates/ide/src/inlay_hints/adjustment.rs index 367bd2f661..47e854e4bd 100644 --- a/crates/ide/src/inlay_hints/adjustment.rs +++ b/crates/ide/src/inlay_hints/adjustment.rs @@ -11,7 +11,7 @@ use syntax::{ ted, }; -use crate::{AdjustmentHints, InlayHint, InlayHintsConfig, InlayKind}; +use crate::{AdjustmentHints, AdjustmentHintsMode, InlayHint, InlayHintsConfig, InlayKind}; pub(super) fn hints( acc: &mut Vec, @@ -40,8 +40,8 @@ pub(super) fn hints( let desc_expr = descended.as_ref().unwrap_or(expr); let adjustments = sema.expr_adjustments(desc_expr).filter(|it| !it.is_empty())?; - let (needs_outer_parens, needs_inner_parens) = - needs_parens_for_adjustment_hints(expr, config.adjustment_hints_postfix); + let (postfix, needs_outer_parens, needs_inner_parens) = + mode_and_needs_parens_for_adjustment_hints(expr, config.adjustment_hints_mode); if needs_outer_parens { acc.push(InlayHint { @@ -52,7 +52,7 @@ pub(super) fn hints( }); } - if config.adjustment_hints_postfix && needs_inner_parens { + if postfix && needs_inner_parens { acc.push(InlayHint { range: expr.syntax().text_range(), kind: InlayKind::OpeningParenthesis, @@ -68,7 +68,7 @@ pub(super) fn hints( } let (mut tmp0, mut tmp1); - let iter: &mut dyn Iterator = if config.adjustment_hints_postfix { + let iter: &mut dyn Iterator = if postfix { tmp0 = adjustments.into_iter(); &mut tmp0 } else { @@ -112,20 +112,16 @@ pub(super) fn hints( }; acc.push(InlayHint { range: expr.syntax().text_range(), - kind: if config.adjustment_hints_postfix { + kind: if postfix { InlayKind::AdjustmentHintPostfix } else { InlayKind::AdjustmentHint }, - label: if config.adjustment_hints_postfix { - format!(".{}", text.trim_end()).into() - } else { - text.into() - }, + label: if postfix { format!(".{}", text.trim_end()).into() } else { text.into() }, tooltip: None, }); } - if !config.adjustment_hints_postfix && needs_inner_parens { + if !postfix && needs_inner_parens { acc.push(InlayHint { range: expr.syntax().text_range(), kind: InlayKind::OpeningParenthesis, @@ -150,6 +146,41 @@ pub(super) fn hints( Some(()) } +/// Returns whatever the hint should be postfix and if we need to add paretheses on the inside and/or outside of `expr`, +/// if we are going to add (`postfix`) adjustments hints to it. +fn mode_and_needs_parens_for_adjustment_hints( + expr: &ast::Expr, + mode: AdjustmentHintsMode, +) -> (bool, bool, bool) { + use {std::cmp::Ordering::*, AdjustmentHintsMode::*}; + + match mode { + Prefix | Postfix => { + let postfix = matches!(mode, Postfix); + let (inside, outside) = needs_parens_for_adjustment_hints(expr, postfix); + (postfix, inside, outside) + } + PreferPrefix | PreferPostfix => { + let prefer_postfix = matches!(mode, PreferPostfix); + + let (pre_inside, pre_outside) = needs_parens_for_adjustment_hints(expr, false); + let prefix = (false, pre_inside, pre_outside); + let pre_count = pre_inside as u8 + pre_outside as u8; + + let (post_inside, post_outside) = needs_parens_for_adjustment_hints(expr, true); + let postfix = (true, post_inside, post_outside); + let post_count = post_inside as u8 + post_outside as u8; + + match pre_count.cmp(&post_count) { + Less => prefix, + Greater => postfix, + Equal if prefer_postfix => postfix, + Equal => prefix, + } + } + } +} + /// Returns whatever we need to add paretheses on the inside and/or outside of `expr`, /// if we are going to add (`postfix`) adjustments hints to it. fn needs_parens_for_adjustment_hints(expr: &ast::Expr, postfix: bool) -> (bool, bool) { @@ -217,7 +248,7 @@ fn needs_parens_for_adjustment_hints(expr: &ast::Expr, postfix: bool) -> (bool, mod tests { use crate::{ inlay_hints::tests::{check_with_config, DISABLED_CONFIG}, - AdjustmentHints, InlayHintsConfig, + AdjustmentHints, AdjustmentHintsMode, InlayHintsConfig, }; #[test] @@ -333,7 +364,7 @@ impl Struct { check_with_config( InlayHintsConfig { adjustment_hints: AdjustmentHints::Always, - adjustment_hints_postfix: true, + adjustment_hints_mode: AdjustmentHintsMode::Postfix, ..DISABLED_CONFIG }, r#" @@ -419,6 +450,58 @@ impl Struct { ); } + #[test] + fn adjustment_hints_prefer_prefix() { + check_with_config( + InlayHintsConfig { + adjustment_hints: AdjustmentHints::Always, + adjustment_hints_mode: AdjustmentHintsMode::PreferPrefix, + ..DISABLED_CONFIG + }, + r#" +fn main() { + let _: u32 = loop {}; + //^^^^^^^ + + Struct.by_ref(); + //^^^^^^.& + + let (): () = return (); + //^^^^^^^^^ + + struct Struct; + impl Struct { fn by_ref(&self) {} } +} + "#, + ) + } + + #[test] + fn adjustment_hints_prefer_postfix() { + check_with_config( + InlayHintsConfig { + adjustment_hints: AdjustmentHints::Always, + adjustment_hints_mode: AdjustmentHintsMode::PreferPostfix, + ..DISABLED_CONFIG + }, + r#" +fn main() { + let _: u32 = loop {}; + //^^^^^^^. + + Struct.by_ref(); + //^^^^^^.& + + let (): () = return (); + //^^^^^^^^^ + + struct Struct; + impl Struct { fn by_ref(&self) {} } +} + "#, + ) + } + #[test] fn never_to_never_is_never_shown() { check_with_config( diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs index 200958a433..239456cb28 100644 --- a/crates/ide/src/lib.rs +++ b/crates/ide/src/lib.rs @@ -81,8 +81,8 @@ pub use crate::{ highlight_related::{HighlightRelatedConfig, HighlightedRange}, hover::{HoverAction, HoverConfig, HoverDocFormat, HoverGotoTypeData, HoverResult}, inlay_hints::{ - AdjustmentHints, ClosureReturnTypeHints, DiscriminantHints, InlayHint, InlayHintLabel, - InlayHintsConfig, InlayKind, InlayTooltip, LifetimeElisionHints, + AdjustmentHints, AdjustmentHintsMode, ClosureReturnTypeHints, DiscriminantHints, InlayHint, + InlayHintLabel, InlayHintsConfig, InlayKind, InlayTooltip, LifetimeElisionHints, }, join_lines::JoinLinesConfig, markup::Markup, diff --git a/crates/ide/src/static_index.rs b/crates/ide/src/static_index.rs index c6cca0d869..a6b30ba139 100644 --- a/crates/ide/src/static_index.rs +++ b/crates/ide/src/static_index.rs @@ -13,6 +13,7 @@ use syntax::{AstNode, SyntaxKind::*, SyntaxToken, TextRange, T}; use crate::{ hover::hover_for_definition, + inlay_hints::AdjustmentHintsMode, moniker::{def_to_moniker, MonikerResult}, parent_module::crates_for, Analysis, Fold, HoverConfig, HoverDocFormat, HoverResult, InlayHint, InlayHintsConfig, @@ -115,7 +116,7 @@ impl StaticIndex<'_> { closure_return_type_hints: crate::ClosureReturnTypeHints::WithBlock, lifetime_elision_hints: crate::LifetimeElisionHints::Never, adjustment_hints: crate::AdjustmentHints::Never, - adjustment_hints_postfix: false, + adjustment_hints_mode: AdjustmentHintsMode::Prefix, adjustment_hints_hide_outside_unsafe: false, hide_named_constructor_hints: false, hide_closure_initialization_hints: false, diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index 4d11a84091..27a86db382 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs @@ -333,8 +333,8 @@ config_data! { inlayHints_expressionAdjustmentHints_enable: AdjustmentHintsDef = "\"never\"", /// Whether to hide inlay hints for type adjustments outside of `unsafe` blocks. inlayHints_expressionAdjustmentHints_hideOutsideUnsafe: bool = "false", - /// Whether to show inlay hints for type adjustments as postfix ops (`.*` instead of `*`, etc). - inlayHints_expressionAdjustmentHints_postfix: bool = "false", + /// Whether to show inlay hints as postfix ops (`.*` instead of `*`, etc). + inlayHints_expressionAdjustmentHints_mode: AdjustmentHintsModeDef = "\"prefix\"", /// Whether to show inlay type hints for elided lifetimes in function signatures. inlayHints_lifetimeElisionHints_enable: LifetimeElisionDef = "\"never\"", /// Whether to prefer using parameter names as the name for elided lifetime hints if possible. @@ -1254,7 +1254,12 @@ impl Config { }, AdjustmentHintsDef::Reborrow => ide::AdjustmentHints::ReborrowOnly, }, - adjustment_hints_postfix: self.data.inlayHints_expressionAdjustmentHints_postfix, + adjustment_hints_mode: match self.data.inlayHints_expressionAdjustmentHints_mode { + AdjustmentHintsModeDef::Prefix => ide::AdjustmentHintsMode::Prefix, + AdjustmentHintsModeDef::Postfix => ide::AdjustmentHintsMode::Postfix, + AdjustmentHintsModeDef::PreferPrefix => ide::AdjustmentHintsMode::PreferPrefix, + AdjustmentHintsModeDef::PreferPostfix => ide::AdjustmentHintsMode::PreferPostfix, + }, adjustment_hints_hide_outside_unsafe: self .data .inlayHints_expressionAdjustmentHints_hideOutsideUnsafe, @@ -1771,6 +1776,15 @@ enum DiscriminantHintsDef { Fieldless, } +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "snake_case")] +enum AdjustmentHintsModeDef { + Prefix, + Postfix, + PreferPrefix, + PreferPostfix, +} + #[derive(Deserialize, Debug, Clone)] #[serde(rename_all = "snake_case")] enum FilesWatcherDef { @@ -2104,6 +2118,21 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json "Only show discriminant hints on fieldless enum variants." ] }, + "AdjustmentHintsModeDef" => set! { + "type": "string", + "enum": [ + "prefix", + "postfix", + "prefer_prefix", + "prefer_postfix", + ], + "enumDescriptions": [ + "Always show adjustment hints as prefix (`*expr`).", + "Always show adjustment hints as postfix (`expr.*`).", + "Show prefix or postfix depending on which uses less parenthesis, prefering prefix.", + "Show prefix or postfix depending on which uses less parenthesis, prefering postfix.", + ] + }, "CargoFeaturesDef" => set! { "anyOf": [ { diff --git a/docs/user/generated_config.adoc b/docs/user/generated_config.adoc index 60c16ecadf..0aaf07ebf3 100644 --- a/docs/user/generated_config.adoc +++ b/docs/user/generated_config.adoc @@ -469,10 +469,10 @@ Whether to show inlay hints for type adjustments. -- Whether to hide inlay hints for type adjustments outside of `unsafe` blocks. -- -[[rust-analyzer.inlayHints.expressionAdjustmentHints.postfix]]rust-analyzer.inlayHints.expressionAdjustmentHints.postfix (default: `false`):: +[[rust-analyzer.inlayHints.expressionAdjustmentHints.mode]]rust-analyzer.inlayHints.expressionAdjustmentHints.mode (default: `"prefix"`):: + -- -Whether to show inlay hints for type adjustments as postfix ops (`.*` instead of `*`, etc). +Whether to show inlay hints as postfix ops (`.*` instead of `*`, etc). -- [[rust-analyzer.inlayHints.lifetimeElisionHints.enable]]rust-analyzer.inlayHints.lifetimeElisionHints.enable (default: `"never"`):: + diff --git a/editors/code/package.json b/editors/code/package.json index aeb1d97c5f..5ffce2f553 100644 --- a/editors/code/package.json +++ b/editors/code/package.json @@ -1000,10 +1000,22 @@ "default": false, "type": "boolean" }, - "rust-analyzer.inlayHints.expressionAdjustmentHints.postfix": { - "markdownDescription": "Whether to show inlay hints for type adjustments as postfix ops (`.*` instead of `*`, etc).", - "default": false, - "type": "boolean" + "rust-analyzer.inlayHints.expressionAdjustmentHints.mode": { + "markdownDescription": "Whether to show inlay hints as postfix ops (`.*` instead of `*`, etc).", + "default": "prefix", + "type": "string", + "enum": [ + "prefix", + "postfix", + "prefer_prefix", + "prefer_postfix" + ], + "enumDescriptions": [ + "Always show adjustment hints as prefix (`*expr`).", + "Always show adjustment hints as postfix (`expr.*`).", + "Show prefix or postfix depending on which uses less parenthesis, prefering prefix.", + "Show prefix or postfix depending on which uses less parenthesis, prefering postfix." + ] }, "rust-analyzer.inlayHints.lifetimeElisionHints.enable": { "markdownDescription": "Whether to show inlay type hints for elided lifetimes in function signatures.", From a9676cfbe3f81515483fb563f20c74e27cfa7c41 Mon Sep 17 00:00:00 2001 From: Maybe Waffle Date: Wed, 21 Dec 2022 15:31:57 +0000 Subject: [PATCH 0781/1945] Add a "bug" test for adjustment hints to check for status quo --- crates/ide/src/inlay_hints/adjustment.rs | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/crates/ide/src/inlay_hints/adjustment.rs b/crates/ide/src/inlay_hints/adjustment.rs index 47e854e4bd..9f6984b59e 100644 --- a/crates/ide/src/inlay_hints/adjustment.rs +++ b/crates/ide/src/inlay_hints/adjustment.rs @@ -609,4 +609,20 @@ fn a() { "#, ); } + + #[test] + fn bug() { + check_with_config( + InlayHintsConfig { adjustment_hints: AdjustmentHints::Always, ..DISABLED_CONFIG }, + r#" +fn main() { + // These should be identical, but they are not... + + let () = return; + let (): () = return; + //^^^^^^ +} + "#, + ) + } } From b6169c2a2e5a24e9905a2614e6fe469fb37f50c9 Mon Sep 17 00:00:00 2001 From: Maybe Waffle Date: Mon, 9 Jan 2023 13:37:37 +0000 Subject: [PATCH 0782/1945] Add a fixme to remove hacks --- crates/ide/src/inlay_hints/adjustment.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/crates/ide/src/inlay_hints/adjustment.rs b/crates/ide/src/inlay_hints/adjustment.rs index 9f6984b59e..bdd7c05e00 100644 --- a/crates/ide/src/inlay_hints/adjustment.rs +++ b/crates/ide/src/inlay_hints/adjustment.rs @@ -196,6 +196,8 @@ fn needs_parens_for_adjustment_hints(expr: &ast::Expr, postfix: bool) -> (bool, // and "parent" is the parent of the original expression... // // For this we utilize mutable mutable trees, which is a HACK, but it works. + // + // FIXME: comeup with a better API for `needs_parens_in`, so that we don't have to do *this* // Make `&expr`/`expr?` let dummy_expr = { From 44c84a8d2848c9fae4d496c0d9dc9e8c98c8f1ef Mon Sep 17 00:00:00 2001 From: Maybe Waffle Date: Tue, 6 Dec 2022 17:34:47 +0000 Subject: [PATCH 0783/1945] Add `convert_ufcs_to_method` assist --- .../src/handlers/convert_ufcs_to_method.rs | 211 ++++++++++++++++++ crates/ide-assists/src/lib.rs | 2 + crates/ide-assists/src/tests/generated.rs | 19 ++ 3 files changed, 232 insertions(+) create mode 100644 crates/ide-assists/src/handlers/convert_ufcs_to_method.rs diff --git a/crates/ide-assists/src/handlers/convert_ufcs_to_method.rs b/crates/ide-assists/src/handlers/convert_ufcs_to_method.rs new file mode 100644 index 0000000000..8704e40b0d --- /dev/null +++ b/crates/ide-assists/src/handlers/convert_ufcs_to_method.rs @@ -0,0 +1,211 @@ +use syntax::{ + ast::{self, make, AstNode, HasArgList}, + TextRange, +}; + +use crate::{AssistContext, AssistId, AssistKind, Assists}; + +// Assist: convert_ufcs_to_method +// +// Transforms universal function call syntax into a method call. +// +// ``` +// fn main() { +// std::ops::Add::add$0(1, 2); +// } +// # mod std { pub mod ops { pub trait Add { fn add(self, _: Self) {} } impl Add for i32 {} } } +// ``` +// -> +// ``` +// fn main() { +// 1.add(2); +// } +// # mod std { pub mod ops { pub trait Add { fn add(self, _: Self) {} } impl Add for i32 {} } } +// ``` +pub(crate) fn convert_ufcs_to_method(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { + let call = ctx.find_node_at_offset::()?; + let ast::Expr::PathExpr(path_expr) = call.expr()? else { return None }; + let path = path_expr.path()?; + + let cursor_in_range = path.syntax().text_range().contains_range(ctx.selection_trimmed()); + if !cursor_in_range { + return None; + } + + let args = call.arg_list()?; + let l_paren = args.l_paren_token()?; + let mut args_iter = args.args(); + let first_arg = args_iter.next()?; + let second_arg = args_iter.next(); + + _ = path.qualifier()?; + let method_name = path.segment()?.name_ref()?; + + let res = ctx.sema.resolve_path(&path)?; + let hir::PathResolution::Def(hir::ModuleDef::Function(fun)) = res else { return None }; + if !fun.has_self_param(ctx.sema.db) { + return None; + } + + // `core::ops::Add::add(` -> `` + let delete_path = + TextRange::new(path.syntax().text_range().start(), l_paren.text_range().end()); + + // Parens around `expr` if needed + let parens = needs_parens_as_receiver(&first_arg).then(|| { + let range = first_arg.syntax().text_range(); + (range.start(), range.end()) + }); + + // `, ` -> `.add(` + let replace_comma = TextRange::new( + first_arg.syntax().text_range().end(), + second_arg + .map(|a| a.syntax().text_range().start()) + .unwrap_or_else(|| first_arg.syntax().text_range().end()), + ); + + acc.add( + AssistId("convert_ufcs_to_method", AssistKind::RefactorRewrite), + "Convert UFCS to a method call", + call.syntax().text_range(), + |edit| { + edit.delete(delete_path); + if let Some((open, close)) = parens { + edit.insert(open, "("); + edit.insert(close, ")"); + } + edit.replace(replace_comma, format!(".{method_name}(")); + }, + ) +} + +fn needs_parens_as_receiver(expr: &ast::Expr) -> bool { + // Make `(expr).dummy()` + let dummy_call = make::expr_method_call( + make::expr_paren(expr.clone()), + make::name_ref("dummy"), + make::arg_list([]), + ); + + // Get the `expr` clone with the right parent back + // (unreachable!s are fine since we've just constructed the expression) + let ast::Expr::MethodCallExpr(call) = &dummy_call else { unreachable!() }; + let Some(receiver) = call.receiver() else { unreachable!() }; + let ast::Expr::ParenExpr(parens) = receiver else { unreachable!() }; + let Some(expr) = parens.expr() else { unreachable!() }; + + expr.needs_parens_in(dummy_call.syntax().clone()) +} + +#[cfg(test)] +mod tests { + use crate::tests::{check_assist, check_assist_not_applicable}; + + use super::*; + + #[test] + fn ufcs2method_simple() { + check_assist( + convert_ufcs_to_method, + r#" +struct S; +impl S { fn f(self, S: S) {} } +fn f() { S::$0f(S, S); }"#, + r#" +struct S; +impl S { fn f(self, S: S) {} } +fn f() { S.f(S); }"#, + ); + } + + #[test] + fn ufcs2method_trait() { + check_assist( + convert_ufcs_to_method, + r#" +//- minicore: add +fn f() { ::$0add(2, 2); }"#, + r#" +fn f() { 2.add(2); }"#, + ); + + check_assist( + convert_ufcs_to_method, + r#" +//- minicore: add +fn f() { core::ops::Add::$0add(2, 2); }"#, + r#" +fn f() { 2.add(2); }"#, + ); + + check_assist( + convert_ufcs_to_method, + r#" +//- minicore: add +use core::ops::Add; +fn f() { <_>::$0add(2, 2); }"#, + r#" +use core::ops::Add; +fn f() { 2.add(2); }"#, + ); + } + + #[test] + fn ufcs2method_single_arg() { + check_assist( + convert_ufcs_to_method, + r#" + struct S; + impl S { fn f(self) {} } + fn f() { S::$0f(S); }"#, + r#" + struct S; + impl S { fn f(self) {} } + fn f() { S.f(); }"#, + ); + } + + #[test] + fn ufcs2method_parens() { + check_assist( + convert_ufcs_to_method, + r#" +//- minicore: deref +struct S; +impl core::ops::Deref for S { + type Target = S; + fn deref(&self) -> &S { self } +} +fn f() { core::ops::Deref::$0deref(&S); }"#, + r#" +struct S; +impl core::ops::Deref for S { + type Target = S; + fn deref(&self) -> &S { self } +} +fn f() { (&S).deref(); }"#, + ); + } + + #[test] + fn ufcs2method_doesnt_apply_with_cursor_not_on_path() { + check_assist_not_applicable( + convert_ufcs_to_method, + r#" +//- minicore: add +fn f() { core::ops::Add::add(2,$0 2); }"#, + ); + } + + #[test] + fn ufcs2method_doesnt_apply_with_no_self() { + check_assist_not_applicable( + convert_ufcs_to_method, + r#" +struct S; +impl S { fn assoc(S: S, S: S) {} } +fn f() { S::assoc$0(S, S); }"#, + ); + } +} diff --git a/crates/ide-assists/src/lib.rs b/crates/ide-assists/src/lib.rs index f7ac9d8fd6..6da51cfa4f 100644 --- a/crates/ide-assists/src/lib.rs +++ b/crates/ide-assists/src/lib.rs @@ -126,6 +126,7 @@ mod handlers { mod convert_to_guarded_return; mod convert_two_arm_bool_match_to_matches_macro; mod convert_while_to_loop; + mod convert_ufcs_to_method; mod destructure_tuple_binding; mod expand_glob_import; mod extract_expressions_from_format_string; @@ -218,6 +219,7 @@ mod handlers { convert_bool_then::convert_bool_then_to_if, convert_bool_then::convert_if_to_bool_then, convert_comment_block::convert_comment_block, + convert_ufcs_to_method::convert_ufcs_to_method, convert_integer_literal::convert_integer_literal, convert_into_to_from::convert_into_to_from, convert_iter_for_each_to_for::convert_iter_for_each_to_for, diff --git a/crates/ide-assists/src/tests/generated.rs b/crates/ide-assists/src/tests/generated.rs index 210df6999d..d84f343c55 100644 --- a/crates/ide-assists/src/tests/generated.rs +++ b/crates/ide-assists/src/tests/generated.rs @@ -554,6 +554,25 @@ fn main() { ) } +#[test] +fn doctest_convert_ufcs_to_method() { + check_doc_test( + "convert_ufcs_to_method", + r#####" +fn main() { + std::ops::Add::add$0(1, 2); +} +mod std { pub mod ops { pub trait Add { fn add(self, _: Self) {} } impl Add for i32 {} } } +"#####, + r#####" +fn main() { + 1.add(2); +} +mod std { pub mod ops { pub trait Add { fn add(self, _: Self) {} } impl Add for i32 {} } } +"#####, + ) +} + #[test] fn doctest_convert_while_to_loop() { check_doc_test( From 769273ca4ca41fe3ca704ff7793de95c5def1728 Mon Sep 17 00:00:00 2001 From: Tom Kunc Date: Mon, 9 Jan 2023 07:01:41 -0700 Subject: [PATCH 0784/1945] Simplify code with @Veykril's suggestion. --- .../ide-assists/src/handlers/inline_macro.rs | 19 +++++-------------- 1 file changed, 5 insertions(+), 14 deletions(-) diff --git a/crates/ide-assists/src/handlers/inline_macro.rs b/crates/ide-assists/src/handlers/inline_macro.rs index d669826aa7..9d03f03d20 100644 --- a/crates/ide-assists/src/handlers/inline_macro.rs +++ b/crates/ide-assists/src/handlers/inline_macro.rs @@ -34,25 +34,16 @@ use crate::{AssistContext, AssistId, AssistKind, Assists}; // } // ``` pub(crate) fn inline_macro(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { - let tok = ctx.token_at_offset().right_biased()?; + let unexpanded = ctx.find_node_at_offset::()?; + let expanded = ctx.sema.expand(&unexpanded)?.clone_for_update(); - let mut anc = tok.parent_ancestors(); - let (_name, expanded, unexpanded) = loop { - let node = anc.next()?; - if let Some(mac) = ast::MacroCall::cast(node.clone()) { - break ( - mac.path()?.segment()?.name_ref()?.to_string(), - ctx.sema.expand(&mac)?.clone_for_update(), - node, - ); - } - }; + let text_range = unexpanded.syntax().text_range(); acc.add( AssistId("inline_macro", AssistKind::RefactorRewrite), format!("Inline macro"), - unexpanded.text_range(), - |builder| builder.replace(unexpanded.text_range(), expanded.to_string()), + text_range, + |builder| builder.replace(text_range, expanded.to_string()), ) } From c782353a907ea121dcae670a5f0d8e14b8865b19 Mon Sep 17 00:00:00 2001 From: Maybe Waffle Date: Mon, 9 Jan 2023 13:59:02 +0000 Subject: [PATCH 0785/1945] Rename assist: `convert_ufcs_to_method` => `unqualify_method_call` --- ..._to_method.rs => unqualify_method_call.rs} | 36 +++++++++--------- crates/ide-assists/src/lib.rs | 4 +- crates/ide-assists/src/tests/generated.rs | 38 +++++++++---------- 3 files changed, 39 insertions(+), 39 deletions(-) rename crates/ide-assists/src/handlers/{convert_ufcs_to_method.rs => unqualify_method_call.rs} (85%) diff --git a/crates/ide-assists/src/handlers/convert_ufcs_to_method.rs b/crates/ide-assists/src/handlers/unqualify_method_call.rs similarity index 85% rename from crates/ide-assists/src/handlers/convert_ufcs_to_method.rs rename to crates/ide-assists/src/handlers/unqualify_method_call.rs index 8704e40b0d..e9d4e270cd 100644 --- a/crates/ide-assists/src/handlers/convert_ufcs_to_method.rs +++ b/crates/ide-assists/src/handlers/unqualify_method_call.rs @@ -5,7 +5,7 @@ use syntax::{ use crate::{AssistContext, AssistId, AssistKind, Assists}; -// Assist: convert_ufcs_to_method +// Assist: unqualify_method_call // // Transforms universal function call syntax into a method call. // @@ -22,7 +22,7 @@ use crate::{AssistContext, AssistId, AssistKind, Assists}; // } // # mod std { pub mod ops { pub trait Add { fn add(self, _: Self) {} } impl Add for i32 {} } } // ``` -pub(crate) fn convert_ufcs_to_method(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { +pub(crate) fn unqualify_method_call(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { let call = ctx.find_node_at_offset::()?; let ast::Expr::PathExpr(path_expr) = call.expr()? else { return None }; let path = path_expr.path()?; @@ -66,8 +66,8 @@ pub(crate) fn convert_ufcs_to_method(acc: &mut Assists, ctx: &AssistContext<'_>) ); acc.add( - AssistId("convert_ufcs_to_method", AssistKind::RefactorRewrite), - "Convert UFCS to a method call", + AssistId("unqualify_method_call", AssistKind::RefactorRewrite), + "Unqualify method call", call.syntax().text_range(), |edit| { edit.delete(delete_path); @@ -105,9 +105,9 @@ mod tests { use super::*; #[test] - fn ufcs2method_simple() { + fn unqualify_method_call_simple() { check_assist( - convert_ufcs_to_method, + unqualify_method_call, r#" struct S; impl S { fn f(self, S: S) {} } @@ -120,9 +120,9 @@ fn f() { S.f(S); }"#, } #[test] - fn ufcs2method_trait() { + fn unqualify_method_call_trait() { check_assist( - convert_ufcs_to_method, + unqualify_method_call, r#" //- minicore: add fn f() { ::$0add(2, 2); }"#, @@ -131,7 +131,7 @@ fn f() { 2.add(2); }"#, ); check_assist( - convert_ufcs_to_method, + unqualify_method_call, r#" //- minicore: add fn f() { core::ops::Add::$0add(2, 2); }"#, @@ -140,7 +140,7 @@ fn f() { 2.add(2); }"#, ); check_assist( - convert_ufcs_to_method, + unqualify_method_call, r#" //- minicore: add use core::ops::Add; @@ -152,9 +152,9 @@ fn f() { 2.add(2); }"#, } #[test] - fn ufcs2method_single_arg() { + fn unqualify_method_call_single_arg() { check_assist( - convert_ufcs_to_method, + unqualify_method_call, r#" struct S; impl S { fn f(self) {} } @@ -167,9 +167,9 @@ fn f() { 2.add(2); }"#, } #[test] - fn ufcs2method_parens() { + fn unqualify_method_call_parens() { check_assist( - convert_ufcs_to_method, + unqualify_method_call, r#" //- minicore: deref struct S; @@ -189,9 +189,9 @@ fn f() { (&S).deref(); }"#, } #[test] - fn ufcs2method_doesnt_apply_with_cursor_not_on_path() { + fn unqualify_method_call_doesnt_apply_with_cursor_not_on_path() { check_assist_not_applicable( - convert_ufcs_to_method, + unqualify_method_call, r#" //- minicore: add fn f() { core::ops::Add::add(2,$0 2); }"#, @@ -199,9 +199,9 @@ fn f() { core::ops::Add::add(2,$0 2); }"#, } #[test] - fn ufcs2method_doesnt_apply_with_no_self() { + fn unqualify_method_call_doesnt_apply_with_no_self() { check_assist_not_applicable( - convert_ufcs_to_method, + unqualify_method_call, r#" struct S; impl S { fn assoc(S: S, S: S) {} } diff --git a/crates/ide-assists/src/lib.rs b/crates/ide-assists/src/lib.rs index 6da51cfa4f..6747b95007 100644 --- a/crates/ide-assists/src/lib.rs +++ b/crates/ide-assists/src/lib.rs @@ -126,7 +126,6 @@ mod handlers { mod convert_to_guarded_return; mod convert_two_arm_bool_match_to_matches_macro; mod convert_while_to_loop; - mod convert_ufcs_to_method; mod destructure_tuple_binding; mod expand_glob_import; mod extract_expressions_from_format_string; @@ -202,6 +201,7 @@ mod handlers { mod unnecessary_async; mod unwrap_block; mod unwrap_result_return_type; + mod unqualify_method_call; mod wrap_return_type_in_result; pub(crate) fn all() -> &'static [Handler] { @@ -219,7 +219,6 @@ mod handlers { convert_bool_then::convert_bool_then_to_if, convert_bool_then::convert_if_to_bool_then, convert_comment_block::convert_comment_block, - convert_ufcs_to_method::convert_ufcs_to_method, convert_integer_literal::convert_integer_literal, convert_into_to_from::convert_into_to_from, convert_iter_for_each_to_for::convert_iter_for_each_to_for, @@ -308,6 +307,7 @@ mod handlers { unwrap_block::unwrap_block, unwrap_result_return_type::unwrap_result_return_type, unwrap_tuple::unwrap_tuple, + unqualify_method_call::unqualify_method_call, wrap_return_type_in_result::wrap_return_type_in_result, // These are manually sorted for better priorities. By default, // priority is determined by the size of the target range (smaller diff --git a/crates/ide-assists/src/tests/generated.rs b/crates/ide-assists/src/tests/generated.rs index d84f343c55..2644e7dd11 100644 --- a/crates/ide-assists/src/tests/generated.rs +++ b/crates/ide-assists/src/tests/generated.rs @@ -554,25 +554,6 @@ fn main() { ) } -#[test] -fn doctest_convert_ufcs_to_method() { - check_doc_test( - "convert_ufcs_to_method", - r#####" -fn main() { - std::ops::Add::add$0(1, 2); -} -mod std { pub mod ops { pub trait Add { fn add(self, _: Self) {} } impl Add for i32 {} } } -"#####, - r#####" -fn main() { - 1.add(2); -} -mod std { pub mod ops { pub trait Add { fn add(self, _: Self) {} } impl Add for i32 {} } } -"#####, - ) -} - #[test] fn doctest_convert_while_to_loop() { check_doc_test( @@ -2552,6 +2533,25 @@ pub async fn bar() { foo() } ) } +#[test] +fn doctest_unqualify_method_call() { + check_doc_test( + "unqualify_method_call", + r#####" +fn main() { + std::ops::Add::add$0(1, 2); +} +mod std { pub mod ops { pub trait Add { fn add(self, _: Self) {} } impl Add for i32 {} } } +"#####, + r#####" +fn main() { + 1.add(2); +} +mod std { pub mod ops { pub trait Add { fn add(self, _: Self) {} } impl Add for i32 {} } } +"#####, + ) +} + #[test] fn doctest_unwrap_block() { check_doc_test( From bdaad9eb157b3a56f318d7e818c80bb65236211e Mon Sep 17 00:00:00 2001 From: Maybe Waffle Date: Mon, 9 Jan 2023 14:26:48 +0000 Subject: [PATCH 0786/1945] Make `qualify_method_call` `RefactorRewrite` --- crates/ide-assists/src/handlers/qualify_method_call.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/ide-assists/src/handlers/qualify_method_call.rs b/crates/ide-assists/src/handlers/qualify_method_call.rs index 1ea87429c5..e7014597a1 100644 --- a/crates/ide-assists/src/handlers/qualify_method_call.rs +++ b/crates/ide-assists/src/handlers/qualify_method_call.rs @@ -53,7 +53,7 @@ pub(crate) fn qualify_method_call(acc: &mut Assists, ctx: &AssistContext<'_>) -> let qualify_candidate = QualifyCandidate::ImplMethod(ctx.sema.db, call, resolved_call); acc.add( - AssistId("qualify_method_call", AssistKind::RefactorInline), + AssistId("qualify_method_call", AssistKind::RefactorRewrite), format!("Qualify `{ident}` method call"), range, |builder| { From 9eb50d3cde97690f8149f560b70f56f6dafa4da3 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 9 Jan 2023 17:03:36 +0100 Subject: [PATCH 0787/1945] Make it clearer when the server expects an initialized notification --- lib/lsp-server/src/lib.rs | 24 ++++++++---------------- 1 file changed, 8 insertions(+), 16 deletions(-) diff --git a/lib/lsp-server/src/lib.rs b/lib/lsp-server/src/lib.rs index b95cec4f01..beccde40a8 100644 --- a/lib/lsp-server/src/lib.rs +++ b/lib/lsp-server/src/lib.rs @@ -114,10 +114,8 @@ impl Connection { /// ``` pub fn initialize_start(&self) -> Result<(RequestId, serde_json::Value), ProtocolError> { loop { - match self.receiver.recv() { - Ok(Message::Request(req)) if req.is_initialize() => { - return Ok((req.id, req.params)) - } + break match self.receiver.recv() { + Ok(Message::Request(req)) if req.is_initialize() => Ok((req.id, req.params)), // Respond to non-initialize requests with ServerNotInitialized Ok(Message::Request(req)) => { let resp = Response::new_err( @@ -126,14 +124,11 @@ impl Connection { format!("expected initialize request, got {req:?}"), ); self.sender.send(resp.into()).unwrap(); + continue; } - Ok(msg) => { - return Err(ProtocolError(format!("expected initialize request, got {msg:?}"))) - } + Ok(msg) => Err(ProtocolError(format!("expected initialize request, got {msg:?}"))), Err(e) => { - return Err(ProtocolError(format!( - "expected initialize request, got error: {e}" - ))) + Err(ProtocolError(format!("expected initialize request, got error: {e}"))) } }; } @@ -148,17 +143,14 @@ impl Connection { let resp = Response::new_ok(initialize_id, initialize_result); self.sender.send(resp.into()).unwrap(); match &self.receiver.recv() { - Ok(Message::Notification(n)) if n.is_initialized() => (), + Ok(Message::Notification(n)) if n.is_initialized() => Ok(()), Ok(msg) => { - return Err(ProtocolError(format!("expected Message::Notification, got: {msg:?}",))) + Err(ProtocolError(format!(r#"expected initialized notification, got: {msg:?}"#))) } Err(e) => { - return Err(ProtocolError(format!( - "expected initialized notification, got error: {e}", - ))) + Err(ProtocolError(format!("expected initialized notification, got error: {e}",))) } } - Ok(()) } /// Initialize the connection. Sends the server capabilities From 1b8141b54cb5f89bd3dee7bfbab41109d1b48cb6 Mon Sep 17 00:00:00 2001 From: Ian Chamberlain Date: Tue, 3 Jan 2023 10:16:16 -0500 Subject: [PATCH 0788/1945] Parse + decorate rendered ANSI cargo output Use ANSI control characters to display text decorations matching the VScode terminal theme, and strip them out when providing text content for rustc diagnostics. This adds the small `anser` library to parse the control codes, and it also supports HTML output so it should be fairly easy to switch to a rendered HTML/webview implementation if desired. --- editors/code/package-lock.json | 11 ++ editors/code/package.json | 1 + editors/code/src/client.ts | 11 +- editors/code/src/diagnostics.ts | 212 ++++++++++++++++++++++++++++++++ editors/code/src/main.ts | 61 ++++++--- 5 files changed, 272 insertions(+), 24 deletions(-) create mode 100644 editors/code/src/diagnostics.ts diff --git a/editors/code/package-lock.json b/editors/code/package-lock.json index ee69d22476..4844837a06 100644 --- a/editors/code/package-lock.json +++ b/editors/code/package-lock.json @@ -9,6 +9,7 @@ "version": "0.5.0-dev", "license": "MIT OR Apache-2.0", "dependencies": { + "anser": "^2.1.1", "d3": "^7.6.1", "d3-graphviz": "^5.0.2", "vscode-languageclient": "^8.0.2" @@ -394,6 +395,11 @@ "url": "https://github.com/sponsors/epoberezkin" } }, + "node_modules/anser": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/anser/-/anser-2.1.1.tgz", + "integrity": "sha512-nqLm4HxOTpeLOxcmB3QWmV5TcDFhW9y/fyQ+hivtDFcK4OQ+pQ5fzPnXHM1Mfcm0VkLtvVi1TCPr++Qy0Q/3EQ==" + }, "node_modules/ansi-regex": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", @@ -4096,6 +4102,11 @@ "uri-js": "^4.2.2" } }, + "anser": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/anser/-/anser-2.1.1.tgz", + "integrity": "sha512-nqLm4HxOTpeLOxcmB3QWmV5TcDFhW9y/fyQ+hivtDFcK4OQ+pQ5fzPnXHM1Mfcm0VkLtvVi1TCPr++Qy0Q/3EQ==" + }, "ansi-regex": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", diff --git a/editors/code/package.json b/editors/code/package.json index 468368668f..3fe189e2b3 100644 --- a/editors/code/package.json +++ b/editors/code/package.json @@ -35,6 +35,7 @@ "test": "cross-env TEST_VARIABLE=test node ./out/tests/runTests.js" }, "dependencies": { + "anser": "^2.1.1", "d3": "^7.6.1", "d3-graphviz": "^5.0.2", "vscode-languageclient": "^8.0.2" diff --git a/editors/code/src/client.ts b/editors/code/src/client.ts index e6595340aa..74cf44f42f 100644 --- a/editors/code/src/client.ts +++ b/editors/code/src/client.ts @@ -3,6 +3,7 @@ import * as vscode from "vscode"; import * as ra from "../src/lsp_ext"; import * as Is from "vscode-languageclient/lib/common/utils/is"; import { assert } from "./util"; +import * as diagnostics from "./diagnostics"; import { WorkspaceEdit } from "vscode"; import { Config, substituteVSCodeVariables } from "./config"; import { randomUUID } from "crypto"; @@ -120,12 +121,12 @@ export async function createClient( }, async handleDiagnostics( uri: vscode.Uri, - diagnostics: vscode.Diagnostic[], + diagnosticList: vscode.Diagnostic[], next: lc.HandleDiagnosticsSignature ) { const preview = config.previewRustcOutput; const errorCode = config.useRustcErrorCode; - diagnostics.forEach((diag, idx) => { + diagnosticList.forEach((diag, idx) => { // Abuse the fact that VSCode leaks the LSP diagnostics data field through the // Diagnostic class, if they ever break this we are out of luck and have to go // back to the worst diagnostics experience ever:) @@ -154,8 +155,8 @@ export async function createClient( } diag.code = { target: vscode.Uri.from({ - scheme: "rust-analyzer-diagnostics-view", - path: "/diagnostic message", + scheme: diagnostics.URI_SCHEME, + path: `/diagnostic message [${idx.toString()}]`, fragment: uri.toString(), query: idx.toString(), }), @@ -163,7 +164,7 @@ export async function createClient( }; } }); - return next(uri, diagnostics); + return next(uri, diagnosticList); }, async provideHover( document: vscode.TextDocument, diff --git a/editors/code/src/diagnostics.ts b/editors/code/src/diagnostics.ts new file mode 100644 index 0000000000..9695d8bf26 --- /dev/null +++ b/editors/code/src/diagnostics.ts @@ -0,0 +1,212 @@ +import * as anser from "anser"; +import * as vscode from "vscode"; +import { ProviderResult, Range, TextEditorDecorationType, ThemeColor, window } from "vscode"; +import { Ctx } from "./ctx"; + +export const URI_SCHEME = "rust-analyzer-diagnostics-view"; + +export class TextDocumentProvider implements vscode.TextDocumentContentProvider { + private _onDidChange = new vscode.EventEmitter(); + + public constructor(private readonly ctx: Ctx) {} + + get onDidChange(): vscode.Event { + return this._onDidChange.event; + } + + triggerUpdate(uri: vscode.Uri) { + if (uri.scheme === URI_SCHEME) { + this._onDidChange.fire(uri); + } + } + + dispose() { + this._onDidChange.dispose(); + } + + async provideTextDocumentContent(uri: vscode.Uri): Promise { + const contents = getRenderedDiagnostic(this.ctx, uri); + return anser.ansiToText(contents); + } +} + +function getRenderedDiagnostic(ctx: Ctx, uri: vscode.Uri): string { + const diags = ctx.client?.diagnostics?.get(vscode.Uri.parse(uri.fragment, true)); + if (!diags) { + return "Unable to find original rustc diagnostic"; + } + + const diag = diags[parseInt(uri.query)]; + if (!diag) { + return "Unable to find original rustc diagnostic"; + } + const rendered = (diag as unknown as { data?: { rendered?: string } }).data?.rendered; + + if (!rendered) { + return "Unable to find original rustc diagnostic"; + } + + return rendered; +} + +interface AnserStyle { + fg: string; + bg: string; + fg_truecolor: string; + bg_truecolor: string; + decorations: Array; +} + +export class AnsiDecorationProvider implements vscode.Disposable { + private _decorationTypes = new Map(); + + public constructor(private readonly ctx: Ctx) {} + + dispose(): void { + for (const decorationType of this._decorationTypes.values()) { + decorationType.dispose(); + } + + this._decorationTypes.clear(); + } + + async provideDecorations(editor: vscode.TextEditor) { + if (editor.document.uri.scheme !== URI_SCHEME) { + return; + } + + const decorations = (await this._getDecorations(editor.document.uri)) || []; + for (const [decorationType, ranges] of decorations) { + editor.setDecorations(decorationType, ranges); + } + } + + private _getDecorations( + uri: vscode.Uri + ): ProviderResult<[TextEditorDecorationType, Range[]][]> { + const stringContents = getRenderedDiagnostic(this.ctx, uri); + const lines = stringContents.split("\n"); + + const result = new Map(); + // Populate all known decoration types in the result. This forces any + // lingering decorations to be cleared if the text content changes to + // something without ANSI codes for a given decoration type. + for (const decorationType of this._decorationTypes.values()) { + result.set(decorationType, []); + } + + for (const [lineNumber, line] of lines.entries()) { + const totalEscapeLength = 0; + + // eslint-disable-next-line camelcase + const parsed = anser.ansiToJson(line, { use_classes: true }); + + let offset = 0; + + for (const span of parsed) { + const { content, ...style } = span; + + const range = new Range( + lineNumber, + offset - totalEscapeLength, + lineNumber, + offset + content.length - totalEscapeLength + ); + + offset += content.length; + + const decorationType = this._getDecorationType(style); + + if (!result.has(decorationType)) { + result.set(decorationType, []); + } + + result.get(decorationType)!.push(range); + } + } + + return [...result]; + } + + private _getDecorationType(style: AnserStyle): TextEditorDecorationType { + let decorationType = this._decorationTypes.get(style); + + if (decorationType) { + return decorationType; + } + + const fontWeight = style.decorations.find((s) => s === "bold"); + const fontStyle = style.decorations.find((s) => s === "italic"); + const textDecoration = style.decorations.find((s) => s === "underline"); + + decorationType = window.createTextEditorDecorationType({ + backgroundColor: AnsiDecorationProvider._convertColor(style.bg, style.bg_truecolor), + color: AnsiDecorationProvider._convertColor(style.fg, style.fg_truecolor), + fontWeight, + fontStyle, + textDecoration, + }); + + this._decorationTypes.set(style, decorationType); + + return decorationType; + } + + // NOTE: This could just be a kebab-case to camelCase conversion, but I think it's + // a short enough list to just write these by hand + static readonly _anserToThemeColor: Record = { + "ansi-black": "ansiBlack", + "ansi-white": "ansiWhite", + "ansi-red": "ansiRed", + "ansi-green": "ansiGreen", + "ansi-yellow": "ansiYellow", + "ansi-blue": "ansiBlue", + "ansi-magenta": "ansiMagenta", + "ansi-cyan": "ansiCyan", + + "ansi-bright-black": "ansiBrightBlack", + "ansi-bright-white": "ansiBrightWhite", + "ansi-bright-red": "ansiBrightRed", + "ansi-bright-green": "ansiBrightGreen", + "ansi-bright-yellow": "ansiBrightYellow", + "ansi-bright-blue": "ansiBrightBlue", + "ansi-bright-magenta": "ansiBrightMagenta", + "ansi-bright-cyan": "ansiBrightCyan", + }; + + private static _convertColor( + color?: string, + truecolor?: string + ): ThemeColor | string | undefined { + if (!color) { + return undefined; + } + + if (color === "ansi-truecolor") { + if (!truecolor) { + return undefined; + } + return `rgb(${truecolor})`; + } + + const paletteMatch = color.match(/ansi-palette-(.+)/); + if (paletteMatch) { + const paletteColor = paletteMatch[1]; + // anser won't return both the RGB and the color name at the same time, + // so just fake a single foreground control char with the palette number: + const spans = anser.ansiToJson(`\x1b[38;5;${paletteColor}m`); + const rgb = spans[1].fg; + + if (rgb) { + return `rgb(${rgb})`; + } + } + + const themeColor = AnsiDecorationProvider._anserToThemeColor[color]; + if (themeColor) { + return new ThemeColor("terminal." + themeColor); + } + + return undefined; + } +} diff --git a/editors/code/src/main.ts b/editors/code/src/main.ts index 9a9667b2cd..dd439317c7 100644 --- a/editors/code/src/main.ts +++ b/editors/code/src/main.ts @@ -3,6 +3,7 @@ import * as lc from "vscode-languageclient/node"; import * as commands from "./commands"; import { CommandFactory, Ctx, fetchWorkspace } from "./ctx"; +import * as diagnostics from "./diagnostics"; import { activateTaskProvider } from "./tasks"; import { setContextValue } from "./util"; @@ -48,30 +49,52 @@ async function activateServer(ctx: Ctx): Promise { ctx.pushExtCleanup(activateTaskProvider(ctx.config)); } + const diagnosticProvider = new diagnostics.TextDocumentProvider(ctx); ctx.pushExtCleanup( vscode.workspace.registerTextDocumentContentProvider( - "rust-analyzer-diagnostics-view", - new (class implements vscode.TextDocumentContentProvider { - async provideTextDocumentContent(uri: vscode.Uri): Promise { - const diags = ctx.client?.diagnostics?.get( - vscode.Uri.parse(uri.fragment, true) - ); - if (!diags) { - return "Unable to find original rustc diagnostic"; - } - - const diag = diags[parseInt(uri.query)]; - if (!diag) { - return "Unable to find original rustc diagnostic"; - } - const rendered = (diag as unknown as { data?: { rendered?: string } }).data - ?.rendered; - return rendered ?? "Unable to find original rustc diagnostic"; - } - })() + diagnostics.URI_SCHEME, + diagnosticProvider ) ); + const decorationProvider = new diagnostics.AnsiDecorationProvider(ctx); + ctx.pushExtCleanup(decorationProvider); + + async function decorateVisibleEditors(document: vscode.TextDocument) { + for (const editor of vscode.window.visibleTextEditors) { + if (document === editor.document) { + await decorationProvider.provideDecorations(editor); + } + } + } + + vscode.workspace.onDidChangeTextDocument( + async (event) => await decorateVisibleEditors(event.document), + null, + ctx.subscriptions + ); + vscode.workspace.onDidOpenTextDocument(decorateVisibleEditors, null, ctx.subscriptions); + vscode.window.onDidChangeActiveTextEditor( + async (editor) => { + if (editor) { + diagnosticProvider.triggerUpdate(editor.document.uri); + await decorateVisibleEditors(editor.document); + } + }, + null, + ctx.subscriptions + ); + vscode.window.onDidChangeVisibleTextEditors( + async (visibleEditors) => { + for (const editor of visibleEditors) { + diagnosticProvider.triggerUpdate(editor.document.uri); + await decorationProvider.provideDecorations(editor); + } + }, + null, + ctx.subscriptions + ); + vscode.workspace.onDidChangeWorkspaceFolders( async (_) => ctx.onWorkspaceFolderChanges(), null, From 40207906f42c018a775dfdffdfeb89c99660fe0a Mon Sep 17 00:00:00 2001 From: Ian Chamberlain Date: Wed, 28 Dec 2022 09:41:24 -0500 Subject: [PATCH 0789/1945] Default to use colored ANSI diagnostics --- crates/flycheck/src/lib.rs | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/crates/flycheck/src/lib.rs b/crates/flycheck/src/lib.rs index 590a93fbaa..2911c2589a 100644 --- a/crates/flycheck/src/lib.rs +++ b/crates/flycheck/src/lib.rs @@ -297,8 +297,12 @@ impl FlycheckActor { let mut cmd = Command::new(toolchain::cargo()); cmd.arg(command); cmd.current_dir(&self.root); - cmd.args(["--workspace", "--message-format=json", "--manifest-path"]) - .arg(self.root.join("Cargo.toml").as_os_str()); + cmd.args([ + "--workspace", + "--message-format=json-diagnostic-rendered-ansi", + "--manifest-path", + ]) + .arg(self.root.join("Cargo.toml").as_os_str()); for target in target_triples { cmd.args(["--target", target.as_str()]); From c3e4bc313611d571123d4e4257af58784e42f47d Mon Sep 17 00:00:00 2001 From: Ian Chamberlain Date: Sun, 25 Dec 2022 13:52:42 -0500 Subject: [PATCH 0790/1945] Update docs to include note about ANSI diagnostics --- crates/rust-analyzer/src/config.rs | 4 +++- docs/user/generated_config.adoc | 4 +++- editors/code/package.json | 2 +- 3 files changed, 7 insertions(+), 3 deletions(-) diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index cb55a32758..006256544f 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs @@ -160,7 +160,9 @@ config_data! { check_noDefaultFeatures | checkOnSave_noDefaultFeatures: Option = "null", /// Override the command rust-analyzer uses instead of `cargo check` for /// diagnostics on save. The command is required to output json and - /// should therefore include `--message-format=json` or a similar option. + /// should therefore include `--message-format=json` or a similar option + /// (for colored diagnostics, use + /// `--message-format=json-diagnostic-rendered-ansi`). /// /// If you're changing this because you're using some tool wrapping /// Cargo, you might also want to change diff --git a/docs/user/generated_config.adoc b/docs/user/generated_config.adoc index 755c69e12c..5a3019831a 100644 --- a/docs/user/generated_config.adoc +++ b/docs/user/generated_config.adoc @@ -173,7 +173,9 @@ Whether to pass `--no-default-features` to Cargo. Defaults to -- Override the command rust-analyzer uses instead of `cargo check` for diagnostics on save. The command is required to output json and -should therefore include `--message-format=json` or a similar option. +should therefore include `--message-format=json` or a similar option +(for colored diagnostics, use +`--message-format=json-diagnostic-rendered-ansi`). If you're changing this because you're using some tool wrapping Cargo, you might also want to change diff --git a/editors/code/package.json b/editors/code/package.json index 3fe189e2b3..77da5e5453 100644 --- a/editors/code/package.json +++ b/editors/code/package.json @@ -644,7 +644,7 @@ ] }, "rust-analyzer.check.overrideCommand": { - "markdownDescription": "Override the command rust-analyzer uses instead of `cargo check` for\ndiagnostics on save. The command is required to output json and\nshould therefore include `--message-format=json` or a similar option.\n\nIf you're changing this because you're using some tool wrapping\nCargo, you might also want to change\n`#rust-analyzer.cargo.buildScripts.overrideCommand#`.\n\nIf there are multiple linked projects, this command is invoked for\neach of them, with the working directory being the project root\n(i.e., the folder containing the `Cargo.toml`).\n\nAn example command would be:\n\n```bash\ncargo check --workspace --message-format=json --all-targets\n```\n.", + "markdownDescription": "Override the command rust-analyzer uses instead of `cargo check` for\ndiagnostics on save. The command is required to output json and\nshould therefore include `--message-format=json` or a similar option\n(for colored diagnostics, use\n`--message-format=json-diagnostic-rendered-ansi`).\n\nIf you're changing this because you're using some tool wrapping\nCargo, you might also want to change\n`#rust-analyzer.cargo.buildScripts.overrideCommand#`.\n\nIf there are multiple linked projects, this command is invoked for\neach of them, with the working directory being the project root\n(i.e., the folder containing the `Cargo.toml`).\n\nAn example command would be:\n\n```bash\ncargo check --workspace --message-format=json --all-targets\n```\n.", "default": null, "type": [ "null", From 738ce83d858aa498c3ff28be3417101ab5f9ce6f Mon Sep 17 00:00:00 2001 From: Ian Chamberlain Date: Tue, 3 Jan 2023 10:49:47 -0500 Subject: [PATCH 0791/1945] Strip colors before matching preview diagnostics --- editors/code/src/client.ts | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/editors/code/src/client.ts b/editors/code/src/client.ts index 74cf44f42f..c6d64ebc1e 100644 --- a/editors/code/src/client.ts +++ b/editors/code/src/client.ts @@ -1,3 +1,4 @@ +import * as anser from "anser"; import * as lc from "vscode-languageclient/node"; import * as vscode from "vscode"; import * as ra from "../src/lsp_ext"; @@ -139,9 +140,10 @@ export async function createClient( ?.rendered; if (rendered) { if (preview) { + const decolorized = anser.ansiToText(rendered); const index = - rendered.match(/^(note|help):/m)?.index || rendered.length; - diag.message = rendered + decolorized.match(/^(note|help):/m)?.index || rendered.length; + diag.message = decolorized .substring(0, index) .replace(/^ -->[^\n]+\n/m, ""); } From 65cf7abbe2ee75cce74592fa0af75356181566be Mon Sep 17 00:00:00 2001 From: Ian Chamberlain Date: Wed, 4 Jan 2023 12:04:45 -0500 Subject: [PATCH 0792/1945] Use experimental capability to enable color codes --- crates/flycheck/src/lib.rs | 18 ++++++++++++------ crates/rust-analyzer/src/config.rs | 10 ++++++++-- docs/user/generated_config.adoc | 4 ++-- editors/code/package.json | 2 +- editors/code/src/client.ts | 1 + 5 files changed, 24 insertions(+), 11 deletions(-) diff --git a/crates/flycheck/src/lib.rs b/crates/flycheck/src/lib.rs index 2911c2589a..11f7b068ec 100644 --- a/crates/flycheck/src/lib.rs +++ b/crates/flycheck/src/lib.rs @@ -47,6 +47,7 @@ pub enum FlycheckConfig { features: Vec, extra_args: Vec, extra_env: FxHashMap, + ansi_color_output: bool, }, CustomCommand { command: String, @@ -293,16 +294,21 @@ impl FlycheckActor { extra_args, features, extra_env, + ansi_color_output, } => { let mut cmd = Command::new(toolchain::cargo()); cmd.arg(command); cmd.current_dir(&self.root); - cmd.args([ - "--workspace", - "--message-format=json-diagnostic-rendered-ansi", - "--manifest-path", - ]) - .arg(self.root.join("Cargo.toml").as_os_str()); + cmd.arg("--workspace"); + + cmd.arg(if *ansi_color_output { + "--message-format=json-diagnostic-rendered-ansi" + } else { + "--message-format=json" + }); + + cmd.arg("--manifest-path"); + cmd.arg(self.root.join("Cargo.toml").as_os_str()); for target in target_triples { cmd.args(["--target", target.as_str()]); diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index 006256544f..b0afbdc9a4 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs @@ -161,8 +161,8 @@ config_data! { /// Override the command rust-analyzer uses instead of `cargo check` for /// diagnostics on save. The command is required to output json and /// should therefore include `--message-format=json` or a similar option - /// (for colored diagnostics, use - /// `--message-format=json-diagnostic-rendered-ansi`). + /// (if your client supports the `colorDiagnosticOutput` experimental + /// capability, you can use `--message-format=json-diagnostic-rendered-ansi`). /// /// If you're changing this because you're using some tool wrapping /// Cargo, you might also want to change @@ -1008,6 +1008,11 @@ impl Config { self.experimental("serverStatusNotification") } + /// Whether the client supports colored output for full diagnostics from `checkOnSave`. + pub fn color_diagnostic_output(&self) -> bool { + self.experimental("colorDiagnosticOutput") + } + pub fn publish_diagnostics(&self) -> bool { self.data.diagnostics_enable } @@ -1206,6 +1211,7 @@ impl Config { }, extra_args: self.data.check_extraArgs.clone(), extra_env: self.check_on_save_extra_env(), + ansi_color_output: self.color_diagnostic_output(), }, } } diff --git a/docs/user/generated_config.adoc b/docs/user/generated_config.adoc index 5a3019831a..b33a2e7952 100644 --- a/docs/user/generated_config.adoc +++ b/docs/user/generated_config.adoc @@ -174,8 +174,8 @@ Whether to pass `--no-default-features` to Cargo. Defaults to Override the command rust-analyzer uses instead of `cargo check` for diagnostics on save. The command is required to output json and should therefore include `--message-format=json` or a similar option -(for colored diagnostics, use -`--message-format=json-diagnostic-rendered-ansi`). +(if your client supports the `colorDiagnosticOutput` experimental +capability, you can use `--message-format=json-diagnostic-rendered-ansi`). If you're changing this because you're using some tool wrapping Cargo, you might also want to change diff --git a/editors/code/package.json b/editors/code/package.json index 77da5e5453..930564bd7c 100644 --- a/editors/code/package.json +++ b/editors/code/package.json @@ -644,7 +644,7 @@ ] }, "rust-analyzer.check.overrideCommand": { - "markdownDescription": "Override the command rust-analyzer uses instead of `cargo check` for\ndiagnostics on save. The command is required to output json and\nshould therefore include `--message-format=json` or a similar option\n(for colored diagnostics, use\n`--message-format=json-diagnostic-rendered-ansi`).\n\nIf you're changing this because you're using some tool wrapping\nCargo, you might also want to change\n`#rust-analyzer.cargo.buildScripts.overrideCommand#`.\n\nIf there are multiple linked projects, this command is invoked for\neach of them, with the working directory being the project root\n(i.e., the folder containing the `Cargo.toml`).\n\nAn example command would be:\n\n```bash\ncargo check --workspace --message-format=json --all-targets\n```\n.", + "markdownDescription": "Override the command rust-analyzer uses instead of `cargo check` for\ndiagnostics on save. The command is required to output json and\nshould therefore include `--message-format=json` or a similar option\n(if your client supports the `colorDiagnosticOutput` experimental\ncapability, you can use `--message-format=json-diagnostic-rendered-ansi`).\n\nIf you're changing this because you're using some tool wrapping\nCargo, you might also want to change\n`#rust-analyzer.cargo.buildScripts.overrideCommand#`.\n\nIf there are multiple linked projects, this command is invoked for\neach of them, with the working directory being the project root\n(i.e., the folder containing the `Cargo.toml`).\n\nAn example command would be:\n\n```bash\ncargo check --workspace --message-format=json --all-targets\n```\n.", "default": null, "type": [ "null", diff --git a/editors/code/src/client.ts b/editors/code/src/client.ts index c6d64ebc1e..82cdf0390a 100644 --- a/editors/code/src/client.ts +++ b/editors/code/src/client.ts @@ -333,6 +333,7 @@ class ExperimentalFeatures implements lc.StaticFeature { caps.codeActionGroup = true; caps.hoverActions = true; caps.serverStatusNotification = true; + caps.colorDiagnosticOutput = true; caps.commands = { commands: [ "rust-analyzer.runSingle", From 283dfc45dd8fb1e6bbe78c8b2e90cb5d543f2f06 Mon Sep 17 00:00:00 2001 From: Ian Chamberlain Date: Mon, 9 Jan 2023 11:44:38 -0500 Subject: [PATCH 0793/1945] Add docs for `colorDiagnosticOutput` capability --- docs/dev/lsp-extensions.md | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/docs/dev/lsp-extensions.md b/docs/dev/lsp-extensions.md index 1bbb4c2323..a4780af1a2 100644 --- a/docs/dev/lsp-extensions.md +++ b/docs/dev/lsp-extensions.md @@ -792,3 +792,29 @@ export interface ClientCommandOptions { commands: string[]; } ``` + +## Colored Diagnostic Output + +**Experimental Client Capability:** `{ "colorDiagnosticOutput": boolean }` + +If this capability is set, the "full compiler diagnostics" provided by `checkOnSave` +will include ANSI color and style codes to render the diagnostic in a similar manner +as `cargo`. This is translated into `--message-format=json-diagnostic-rendered-ansi` +when flycheck is run, instead of the default `--message-format=json`. + +The full compiler rendered diagnostics are included in the server response +regardless of this capability: + +```typescript +// https://microsoft.github.io/language-server-protocol/specifications/specification-current#diagnostic +export interface Diagnostic { + ... + data?: { + /** + * The human-readable compiler output as it would be printed to a terminal. + * Includes ANSI color and style codes if the client has set the experimental + * `colorDiagnosticOutput` capability. + */ + rendered?: string; + }; +} From 68723043db2e7ee6e780c7f42cd9e57df72a1fd0 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 9 Jan 2023 19:29:28 +0100 Subject: [PATCH 0794/1945] Split out hir-def attribute handling parts into hir-expand --- Cargo.lock | 13 + crates/hir-def/Cargo.toml | 1 + crates/hir-def/src/adt.rs | 2 +- crates/hir-def/src/attr.rs | 332 +----------------- crates/hir-def/src/body.rs | 8 +- crates/hir-def/src/body/lower.rs | 2 +- crates/hir-def/src/data.rs | 2 +- crates/hir-def/src/db.rs | 2 +- crates/hir-def/src/expr.rs | 2 +- crates/hir-def/src/generics.rs | 2 +- crates/hir-def/src/item_scope.rs | 6 +- crates/hir-def/src/item_tree.rs | 15 +- crates/hir-def/src/item_tree/lower.rs | 24 +- crates/hir-def/src/item_tree/pretty.rs | 1 - crates/hir-def/src/keys.rs | 3 +- crates/hir-def/src/lib.rs | 13 +- crates/hir-def/src/nameres/attr_resolution.rs | 3 +- crates/hir-def/src/nameres/collector.rs | 3 +- crates/hir-def/src/nameres/diagnostics.rs | 3 +- crates/hir-def/src/path.rs | 2 +- crates/hir-def/src/path/lower.rs | 3 +- crates/hir-def/src/pretty.rs | 2 +- crates/hir-def/src/resolver.rs | 2 +- crates/hir-def/src/type_ref.rs | 2 +- crates/hir-expand/Cargo.toml | 1 + crates/hir-expand/src/attrs.rs | 313 +++++++++++++++++ crates/hir-expand/src/lib.rs | 3 + crates/hir-ty/Cargo.toml | 1 + crates/hir-ty/src/display.rs | 2 +- crates/hir-ty/src/interner.rs | 7 +- crates/hir-ty/src/lower.rs | 2 +- crates/hir-ty/src/utils.rs | 2 +- crates/hir/src/lib.rs | 3 +- crates/hir/src/semantics/source_to_def.rs | 3 +- crates/intern/Cargo.toml | 13 + .../src/intern.rs => intern/src/lib.rs} | 19 +- 36 files changed, 435 insertions(+), 382 deletions(-) create mode 100644 crates/hir-expand/src/attrs.rs create mode 100644 crates/intern/Cargo.toml rename crates/{hir-def/src/intern.rs => intern/src/lib.rs} (92%) diff --git a/Cargo.lock b/Cargo.lock index d27ae416f0..5f426d8856 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -519,6 +519,7 @@ dependencies = [ "hkalbasi-rustc-ap-rustc_abi", "hkalbasi-rustc-ap-rustc_index", "indexmap", + "intern", "itertools", "la-arena", "limit", @@ -544,6 +545,7 @@ dependencies = [ "either", "expect-test", "hashbrown", + "intern", "itertools", "la-arena", "limit", @@ -574,6 +576,7 @@ dependencies = [ "hir-def", "hir-expand", "hkalbasi-rustc-ap-rustc_index", + "intern", "itertools", "la-arena", "limit", @@ -803,6 +806,16 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "intern" +version = "0.0.0" +dependencies = [ + "dashmap", + "hashbrown", + "once_cell", + "rustc-hash", +] + [[package]] name = "itertools" version = "0.10.5" diff --git a/crates/hir-def/Cargo.toml b/crates/hir-def/Cargo.toml index 698be76656..a107842770 100644 --- a/crates/hir-def/Cargo.toml +++ b/crates/hir-def/Cargo.toml @@ -29,6 +29,7 @@ smallvec = "1.10.0" tracing = "0.1.35" stdx = { path = "../stdx", version = "0.0.0" } +intern = { path = "../intern", version = "0.0.0" } base-db = { path = "../base-db", version = "0.0.0" } syntax = { path = "../syntax", version = "0.0.0" } profile = { path = "../profile", version = "0.0.0" } diff --git a/crates/hir-def/src/adt.rs b/crates/hir-def/src/adt.rs index db3b419488..cd35ba00f6 100644 --- a/crates/hir-def/src/adt.rs +++ b/crates/hir-def/src/adt.rs @@ -8,6 +8,7 @@ use hir_expand::{ name::{AsName, Name}, HirFileId, InFile, }; +use intern::Interned; use la_arena::{Arena, ArenaMap}; use rustc_abi::{Integer, IntegerType}; use syntax::ast::{self, HasName, HasVisibility}; @@ -17,7 +18,6 @@ use crate::{ body::{CfgExpander, LowerCtx}, builtin_type::{BuiltinInt, BuiltinUint}, db::DefDatabase, - intern::Interned, item_tree::{AttrOwner, Field, FieldAstId, Fields, ItemTree, ModItem, RawVisibilityId}, layout::{Align, ReprFlags, ReprOptions}, nameres::diagnostics::DefDiagnostic, diff --git a/crates/hir-def/src/attr.rs b/crates/hir-def/src/attr.rs index ab5d180e1b..eb88a74e44 100644 --- a/crates/hir-def/src/attr.rs +++ b/crates/hir-def/src/attr.rs @@ -1,27 +1,27 @@ //! A higher level attributes based on TokenTree, with also some shortcuts. -use std::{fmt, hash::Hash, ops, sync::Arc}; +use std::{hash::Hash, ops, sync::Arc}; use base_db::CrateId; use cfg::{CfgExpr, CfgOptions}; use either::Either; -use hir_expand::{hygiene::Hygiene, name::AsName, HirFileId, InFile}; +use hir_expand::{ + attrs::{collect_attrs, Attr, AttrId, RawAttrs}, + HirFileId, InFile, +}; use itertools::Itertools; use la_arena::{ArenaMap, Idx, RawIdx}; -use mbe::{syntax_node_to_token_tree, DelimiterKind, Punct}; -use smallvec::{smallvec, SmallVec}; +use mbe::DelimiterKind; use syntax::{ - ast::{self, AstNode, HasAttrs, IsString}, - match_ast, AstPtr, AstToken, SmolStr, SyntaxNode, TextRange, TextSize, + ast::{self, HasAttrs, IsString}, + AstPtr, AstToken, SmolStr, TextRange, TextSize, }; use tt::Subtree; use crate::{ db::DefDatabase, - intern::Interned, item_tree::{AttrOwner, Fields, ItemTreeId, ItemTreeNode}, nameres::{ModuleOrigin, ModuleSource}, - path::{ModPath, PathKind}, src::{HasChildSource, HasSource}, AdtId, AttrDefId, EnumId, GenericParamId, LocalEnumVariantId, LocalFieldId, Lookup, MacroId, VariantId, @@ -47,12 +47,6 @@ impl From for String { } } -/// Syntactical attributes, without filtering of `cfg_attr`s. -#[derive(Default, Debug, Clone, PartialEq, Eq)] -pub(crate) struct RawAttrs { - entries: Option>, -} - #[derive(Default, Debug, Clone, PartialEq, Eq)] pub struct Attrs(RawAttrs); @@ -62,30 +56,21 @@ pub struct AttrsWithOwner { owner: AttrDefId, } -impl ops::Deref for RawAttrs { - type Target = [Attr]; - - fn deref(&self) -> &[Attr] { - match &self.entries { - Some(it) => &*it, - None => &[], - } - } -} impl Attrs { pub fn get(&self, id: AttrId) -> Option<&Attr> { (**self).iter().find(|attr| attr.id == id) } + + pub(crate) fn filter(db: &dyn DefDatabase, krate: CrateId, raw_attrs: RawAttrs) -> Attrs { + Attrs(raw_attrs.filter(db.upcast(), krate)) + } } impl ops::Deref for Attrs { type Target = [Attr]; fn deref(&self) -> &[Attr] { - match &self.0.entries { - Some(it) => &*it, - None => &[], - } + &self.0 } } @@ -97,114 +82,6 @@ impl ops::Deref for AttrsWithOwner { } } -impl RawAttrs { - pub(crate) const EMPTY: Self = Self { entries: None }; - - pub(crate) fn new(db: &dyn DefDatabase, owner: &dyn ast::HasAttrs, hygiene: &Hygiene) -> Self { - let entries = collect_attrs(owner) - .filter_map(|(id, attr)| match attr { - Either::Left(attr) => { - attr.meta().and_then(|meta| Attr::from_src(db, meta, hygiene, id)) - } - Either::Right(comment) => comment.doc_comment().map(|doc| Attr { - id, - input: Some(Interned::new(AttrInput::Literal(SmolStr::new(doc)))), - path: Interned::new(ModPath::from(hir_expand::name!(doc))), - }), - }) - .collect::>(); - - Self { entries: if entries.is_empty() { None } else { Some(entries) } } - } - - fn from_attrs_owner(db: &dyn DefDatabase, owner: InFile<&dyn ast::HasAttrs>) -> Self { - let hygiene = Hygiene::new(db.upcast(), owner.file_id); - Self::new(db, owner.value, &hygiene) - } - - pub(crate) fn merge(&self, other: Self) -> Self { - // FIXME: This needs to fixup `AttrId`s - match (&self.entries, other.entries) { - (None, None) => Self::EMPTY, - (None, entries @ Some(_)) => Self { entries }, - (Some(entries), None) => Self { entries: Some(entries.clone()) }, - (Some(a), Some(b)) => { - let last_ast_index = a.last().map_or(0, |it| it.id.ast_index + 1); - Self { - entries: Some( - a.iter() - .cloned() - .chain(b.iter().map(|it| { - let mut it = it.clone(); - it.id.ast_index += last_ast_index; - it - })) - .collect(), - ), - } - } - } - } - - /// Processes `cfg_attr`s, returning the resulting semantic `Attrs`. - pub(crate) fn filter(self, db: &dyn DefDatabase, krate: CrateId) -> Attrs { - let has_cfg_attrs = self.iter().any(|attr| { - attr.path.as_ident().map_or(false, |name| *name == hir_expand::name![cfg_attr]) - }); - if !has_cfg_attrs { - return Attrs(self); - } - - let crate_graph = db.crate_graph(); - let new_attrs = self - .iter() - .flat_map(|attr| -> SmallVec<[_; 1]> { - let is_cfg_attr = - attr.path.as_ident().map_or(false, |name| *name == hir_expand::name![cfg_attr]); - if !is_cfg_attr { - return smallvec![attr.clone()]; - } - - let subtree = match attr.token_tree_value() { - Some(it) => it, - _ => return smallvec![attr.clone()], - }; - - // Input subtree is: `(cfg, $(attr),+)` - // Split it up into a `cfg` subtree and the `attr` subtrees. - // FIXME: There should be a common API for this. - let mut parts = subtree.token_trees.split(|tt| { - matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Punct(Punct { char: ',', .. }))) - }); - let cfg = match parts.next() { - Some(it) => it, - None => return smallvec![], - }; - let cfg = Subtree { delimiter: subtree.delimiter, token_trees: cfg.to_vec() }; - let cfg = CfgExpr::parse(&cfg); - let index = attr.id; - let attrs = parts.filter(|a| !a.is_empty()).filter_map(|attr| { - let tree = Subtree { delimiter: None, token_trees: attr.to_vec() }; - // FIXME hygiene - let hygiene = Hygiene::new_unhygienic(); - Attr::from_tt(db, &tree, &hygiene, index) - }); - - let cfg_options = &crate_graph[krate].cfg_options; - if cfg_options.check(&cfg) == Some(false) { - smallvec![] - } else { - cov_mark::hit!(cfg_attr_active); - - attrs.collect() - } - }) - .collect(); - - Attrs(RawAttrs { entries: Some(new_attrs) }) - } -} - impl Attrs { pub const EMPTY: Self = Self(RawAttrs::EMPTY); @@ -403,7 +280,7 @@ impl AttrsWithOwner { .raw_attrs(AttrOwner::ModItem(definition_tree_id.value.into())) .clone(), ModuleOrigin::BlockExpr { block } => RawAttrs::from_attrs_owner( - db, + db.upcast(), InFile::new(block.file_id, block.to_node(db.upcast())) .as_ref() .map(|it| it as &dyn ast::HasAttrs), @@ -439,7 +316,7 @@ impl AttrsWithOwner { GenericParamId::ConstParamId(it) => { let src = it.parent().child_source(db); RawAttrs::from_attrs_owner( - db, + db.upcast(), src.with_value(src.value[it.local_id()].as_ref().either( |it| match it { ast::TypeOrConstParam::Type(it) => it as _, @@ -452,7 +329,7 @@ impl AttrsWithOwner { GenericParamId::TypeParamId(it) => { let src = it.parent().child_source(db); RawAttrs::from_attrs_owner( - db, + db.upcast(), src.with_value(src.value[it.local_id()].as_ref().either( |it| match it { ast::TypeOrConstParam::Type(it) => it as _, @@ -464,14 +341,14 @@ impl AttrsWithOwner { } GenericParamId::LifetimeParamId(it) => { let src = it.parent.child_source(db); - RawAttrs::from_attrs_owner(db, src.with_value(&src.value[it.local_id])) + RawAttrs::from_attrs_owner(db.upcast(), src.with_value(&src.value[it.local_id])) } }, AttrDefId::ExternBlockId(it) => attrs_from_item_tree(it.lookup(db).id, db), }; - let attrs = raw_attrs.filter(db, def.krate(db)); - Self { attrs, owner: def } + let attrs = raw_attrs.filter(db.upcast(), def.krate(db)); + Self { attrs: Attrs(attrs), owner: def } } pub fn source_map(&self, db: &dyn DefDatabase) -> AttrSourceMap { @@ -627,40 +504,6 @@ fn doc_indent(attrs: &Attrs) -> usize { .unwrap_or(0) } -fn inner_attributes( - syntax: &SyntaxNode, -) -> Option>> { - let node = match_ast! { - match syntax { - ast::SourceFile(_) => syntax.clone(), - ast::ExternBlock(it) => it.extern_item_list()?.syntax().clone(), - ast::Fn(it) => it.body()?.stmt_list()?.syntax().clone(), - ast::Impl(it) => it.assoc_item_list()?.syntax().clone(), - ast::Module(it) => it.item_list()?.syntax().clone(), - ast::BlockExpr(it) => { - use syntax::SyntaxKind::{BLOCK_EXPR , EXPR_STMT}; - // Block expressions accept outer and inner attributes, but only when they are the outer - // expression of an expression statement or the final expression of another block expression. - let may_carry_attributes = matches!( - it.syntax().parent().map(|it| it.kind()), - Some(BLOCK_EXPR | EXPR_STMT) - ); - if !may_carry_attributes { - return None - } - syntax.clone() - }, - _ => return None, - } - }; - - let attrs = ast::AttrDocCommentIter::from_syntax_node(&node).filter(|el| match el { - Either::Left(attr) => attr.kind().is_inner(), - Either::Right(comment) => comment.is_inner(), - }); - Some(attrs) -} - #[derive(Debug)] pub struct AttrSourceMap { source: Vec>, @@ -779,128 +622,6 @@ fn get_doc_string_in_attr(it: &ast::Attr) -> Option { } } -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct AttrId { - pub(crate) ast_index: u32, -} - -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct Attr { - pub(crate) id: AttrId, - pub(crate) path: Interned, - pub(crate) input: Option>, -} - -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub enum AttrInput { - /// `#[attr = "string"]` - Literal(SmolStr), - /// `#[attr(subtree)]` - TokenTree(tt::Subtree, mbe::TokenMap), -} - -impl fmt::Display for AttrInput { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - AttrInput::Literal(lit) => write!(f, " = \"{}\"", lit.escape_debug()), - AttrInput::TokenTree(subtree, _) => subtree.fmt(f), - } - } -} - -impl Attr { - fn from_src( - db: &dyn DefDatabase, - ast: ast::Meta, - hygiene: &Hygiene, - id: AttrId, - ) -> Option { - let path = Interned::new(ModPath::from_src(db.upcast(), ast.path()?, hygiene)?); - let input = if let Some(ast::Expr::Literal(lit)) = ast.expr() { - let value = match lit.kind() { - ast::LiteralKind::String(string) => string.value()?.into(), - _ => lit.syntax().first_token()?.text().trim_matches('"').into(), - }; - Some(Interned::new(AttrInput::Literal(value))) - } else if let Some(tt) = ast.token_tree() { - let (tree, map) = syntax_node_to_token_tree(tt.syntax()); - Some(Interned::new(AttrInput::TokenTree(tree, map))) - } else { - None - }; - Some(Attr { id, path, input }) - } - - fn from_tt( - db: &dyn DefDatabase, - tt: &tt::Subtree, - hygiene: &Hygiene, - id: AttrId, - ) -> Option { - let (parse, _) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MetaItem); - let ast = ast::Meta::cast(parse.syntax_node())?; - - Self::from_src(db, ast, hygiene, id) - } - - pub fn path(&self) -> &ModPath { - &self.path - } -} - -impl Attr { - /// #[path = "string"] - pub fn string_value(&self) -> Option<&SmolStr> { - match self.input.as_deref()? { - AttrInput::Literal(it) => Some(it), - _ => None, - } - } - - /// #[path(ident)] - pub fn single_ident_value(&self) -> Option<&tt::Ident> { - match self.input.as_deref()? { - AttrInput::TokenTree(subtree, _) => match &*subtree.token_trees { - [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] => Some(ident), - _ => None, - }, - _ => None, - } - } - - /// #[path TokenTree] - pub fn token_tree_value(&self) -> Option<&Subtree> { - match self.input.as_deref()? { - AttrInput::TokenTree(subtree, _) => Some(subtree), - _ => None, - } - } - - /// Parses this attribute as a token tree consisting of comma separated paths. - pub fn parse_path_comma_token_tree(&self) -> Option + '_> { - let args = self.token_tree_value()?; - - if args.delimiter_kind() != Some(DelimiterKind::Parenthesis) { - return None; - } - let paths = args - .token_trees - .split(|tt| matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Punct(Punct { char: ',', .. })))) - .filter_map(|tts| { - if tts.is_empty() { - return None; - } - let segments = tts.iter().filter_map(|tt| match tt { - tt::TokenTree::Leaf(tt::Leaf::Ident(id)) => Some(id.as_name()), - _ => None, - }); - Some(ModPath::from_segments(PathKind::Plain, segments)) - }); - - Some(paths) - } -} - #[derive(Debug, Clone, Copy)] pub struct AttrQuery<'attr> { attrs: &'attr Attrs, @@ -953,21 +674,6 @@ fn attrs_from_item_tree(id: ItemTreeId, db: &dyn DefDatabase tree.raw_attrs(mod_item.into()).clone() } -fn collect_attrs( - owner: &dyn ast::HasAttrs, -) -> impl Iterator)> { - let inner_attrs = inner_attributes(owner.syntax()).into_iter().flatten(); - let outer_attrs = - ast::AttrDocCommentIter::from_syntax_node(owner.syntax()).filter(|el| match el { - Either::Left(attr) => attr.kind().is_outer(), - Either::Right(comment) => comment.is_outer(), - }); - outer_attrs - .chain(inner_attrs) - .enumerate() - .map(|(id, attr)| (AttrId { ast_index: id as u32 }, attr)) -} - pub(crate) fn variants_attrs_source_map( db: &dyn DefDatabase, def: EnumId, diff --git a/crates/hir-def/src/body.rs b/crates/hir-def/src/body.rs index 78fbaa9d7d..9713256813 100644 --- a/crates/hir-def/src/body.rs +++ b/crates/hir-def/src/body.rs @@ -12,7 +12,9 @@ use base_db::CrateId; use cfg::{CfgExpr, CfgOptions}; use drop_bomb::DropBomb; use either::Either; -use hir_expand::{hygiene::Hygiene, ExpandError, ExpandResult, HirFileId, InFile, MacroCallId}; +use hir_expand::{ + attrs::RawAttrs, hygiene::Hygiene, ExpandError, ExpandResult, HirFileId, InFile, MacroCallId, +}; use la_arena::{Arena, ArenaMap}; use limit::Limit; use profile::Count; @@ -20,7 +22,7 @@ use rustc_hash::FxHashMap; use syntax::{ast, AstPtr, SyntaxNodePtr}; use crate::{ - attr::{Attrs, RawAttrs}, + attr::Attrs, db::DefDatabase, expr::{dummy_expr_id, Expr, ExprId, Label, LabelId, Pat, PatId}, item_scope::BuiltinShadowMode, @@ -64,7 +66,7 @@ impl CfgExpander { } pub(crate) fn parse_attrs(&self, db: &dyn DefDatabase, owner: &dyn ast::HasAttrs) -> Attrs { - RawAttrs::new(db, owner, &self.hygiene).filter(db, self.krate) + Attrs::filter(db, self.krate, RawAttrs::new(db.upcast(), owner, &self.hygiene)) } pub(crate) fn is_cfg_enabled(&self, db: &dyn DefDatabase, owner: &dyn ast::HasAttrs) -> bool { diff --git a/crates/hir-def/src/body/lower.rs b/crates/hir-def/src/body/lower.rs index e8da24e3ad..4c29e16b2b 100644 --- a/crates/hir-def/src/body/lower.rs +++ b/crates/hir-def/src/body/lower.rs @@ -10,6 +10,7 @@ use hir_expand::{ name::{name, AsName, Name}, AstId, ExpandError, HirFileId, InFile, }; +use intern::Interned; use la_arena::Arena; use once_cell::unsync::OnceCell; use profile::Count; @@ -33,7 +34,6 @@ use crate::{ Label, LabelId, Literal, MatchArm, Movability, Pat, PatId, RecordFieldPat, RecordLitField, Statement, }, - intern::Interned, item_scope::BuiltinShadowMode, path::{GenericArgs, Path}, type_ref::{Mutability, Rawness, TypeRef}, diff --git a/crates/hir-def/src/data.rs b/crates/hir-def/src/data.rs index e6b05f27a5..f461e85b01 100644 --- a/crates/hir-def/src/data.rs +++ b/crates/hir-def/src/data.rs @@ -3,6 +3,7 @@ use std::sync::Arc; use hir_expand::{name::Name, AstId, ExpandResult, HirFileId, InFile, MacroCallId, MacroDefKind}; +use intern::Interned; use smallvec::SmallVec; use syntax::ast; @@ -10,7 +11,6 @@ use crate::{ attr::Attrs, body::{Expander, Mark}, db::DefDatabase, - intern::Interned, item_tree::{self, AssocItem, FnFlags, ItemTree, ItemTreeId, ModItem, Param, TreeId}, nameres::{ attr_resolution::ResolvedAttr, diff --git a/crates/hir-def/src/db.rs b/crates/hir-def/src/db.rs index 431c825549..65cdd1b69b 100644 --- a/crates/hir-def/src/db.rs +++ b/crates/hir-def/src/db.rs @@ -4,6 +4,7 @@ use std::sync::Arc; use base_db::{salsa, CrateId, SourceDatabase, Upcast}; use either::Either; use hir_expand::{db::AstDatabase, HirFileId}; +use intern::Interned; use la_arena::ArenaMap; use syntax::{ast, AstPtr, SmolStr}; @@ -17,7 +18,6 @@ use crate::{ }, generics::GenericParams, import_map::ImportMap, - intern::Interned, item_tree::{AttrOwner, ItemTree}, lang_item::{LangItemTarget, LangItems}, nameres::{diagnostics::DefDiagnostic, DefMap}, diff --git a/crates/hir-def/src/expr.rs b/crates/hir-def/src/expr.rs index 7b65694211..48028b7c6a 100644 --- a/crates/hir-def/src/expr.rs +++ b/crates/hir-def/src/expr.rs @@ -15,11 +15,11 @@ use std::fmt; use hir_expand::name::Name; +use intern::Interned; use la_arena::{Idx, RawIdx}; use crate::{ builtin_type::{BuiltinFloat, BuiltinInt, BuiltinUint}, - intern::Interned, path::{GenericArgs, Path}, type_ref::{Mutability, Rawness, TypeRef}, BlockId, diff --git a/crates/hir-def/src/generics.rs b/crates/hir-def/src/generics.rs index f74559f5d6..b2ab0c30e0 100644 --- a/crates/hir-def/src/generics.rs +++ b/crates/hir-def/src/generics.rs @@ -9,6 +9,7 @@ use hir_expand::{ name::{AsName, Name}, ExpandResult, HirFileId, InFile, }; +use intern::Interned; use la_arena::{Arena, ArenaMap, Idx}; use once_cell::unsync::Lazy; use std::ops::DerefMut; @@ -20,7 +21,6 @@ use crate::{ child_by_source::ChildBySource, db::DefDatabase, dyn_map::DynMap, - intern::Interned, keys, src::{HasChildSource, HasSource}, type_ref::{LifetimeRef, TypeBound, TypeRef}, diff --git a/crates/hir-def/src/item_scope.rs b/crates/hir-def/src/item_scope.rs index c7b213b7e9..53a4173ff4 100644 --- a/crates/hir-def/src/item_scope.rs +++ b/crates/hir-def/src/item_scope.rs @@ -4,7 +4,7 @@ use std::collections::hash_map::Entry; use base_db::CrateId; -use hir_expand::{name::Name, AstId, MacroCallId}; +use hir_expand::{attrs::AttrId, name::Name, AstId, MacroCallId}; use itertools::Itertools; use once_cell::sync::Lazy; use profile::Count; @@ -14,8 +14,8 @@ use stdx::format_to; use syntax::ast; use crate::{ - attr::AttrId, db::DefDatabase, per_ns::PerNs, visibility::Visibility, AdtId, BuiltinType, - ConstId, HasModule, ImplId, LocalModuleId, MacroId, ModuleDefId, ModuleId, TraitId, + db::DefDatabase, per_ns::PerNs, visibility::Visibility, AdtId, BuiltinType, ConstId, HasModule, + ImplId, LocalModuleId, MacroId, ModuleDefId, ModuleId, TraitId, }; #[derive(Copy, Clone, Debug)] diff --git a/crates/hir-def/src/item_tree.rs b/crates/hir-def/src/item_tree.rs index 80297f8adf..3e1f7d4446 100644 --- a/crates/hir-def/src/item_tree.rs +++ b/crates/hir-def/src/item_tree.rs @@ -48,10 +48,12 @@ use base_db::CrateId; use either::Either; use hir_expand::{ ast_id_map::FileAstId, + attrs::RawAttrs, hygiene::Hygiene, name::{name, AsName, Name}, ExpandTo, HirFileId, InFile, }; +use intern::Interned; use la_arena::{Arena, Idx, IdxRange, RawIdx}; use profile::Count; use rustc_hash::FxHashMap; @@ -60,10 +62,9 @@ use stdx::never; use syntax::{ast, match_ast, SyntaxKind}; use crate::{ - attr::{Attrs, RawAttrs}, + attr::Attrs, db::DefDatabase, generics::GenericParams, - intern::Interned, path::{path, AssociatedTypeBinding, GenericArgs, ImportAlias, ModPath, Path, PathKind}, type_ref::{Mutability, TraitRef, TypeBound, TypeRef}, visibility::RawVisibility, @@ -120,7 +121,7 @@ impl ItemTree { let mut item_tree = match_ast! { match syntax { ast::SourceFile(file) => { - top_attrs = Some(RawAttrs::new(db, &file, ctx.hygiene())); + top_attrs = Some(RawAttrs::new(db.upcast(), &file, ctx.hygiene())); ctx.lower_module_items(&file) }, ast::MacroItems(items) => { @@ -152,7 +153,11 @@ impl ItemTree { /// Returns the inner attributes of the source file. pub fn top_level_attrs(&self, db: &dyn DefDatabase, krate: CrateId) -> Attrs { - self.attrs.get(&AttrOwner::TopLevel).unwrap_or(&RawAttrs::EMPTY).clone().filter(db, krate) + Attrs::filter( + db, + krate, + self.attrs.get(&AttrOwner::TopLevel).unwrap_or(&RawAttrs::EMPTY).clone(), + ) } pub(crate) fn raw_attrs(&self, of: AttrOwner) -> &RawAttrs { @@ -160,7 +165,7 @@ impl ItemTree { } pub(crate) fn attrs(&self, db: &dyn DefDatabase, krate: CrateId, of: AttrOwner) -> Attrs { - self.raw_attrs(of).clone().filter(db, krate) + Attrs::filter(db, krate, self.raw_attrs(of).clone()) } pub fn pretty_print(&self) -> String { diff --git a/crates/hir-def/src/item_tree/lower.rs b/crates/hir-def/src/item_tree/lower.rs index b25274bccc..27705cbbbd 100644 --- a/crates/hir-def/src/item_tree/lower.rs +++ b/crates/hir-def/src/item_tree/lower.rs @@ -99,7 +99,7 @@ impl<'a> Ctx<'a> { } fn lower_mod_item(&mut self, item: &ast::Item) -> Option { - let attrs = RawAttrs::new(self.db, item, self.hygiene()); + let attrs = RawAttrs::new(self.db.upcast(), item, self.hygiene()); let item: ModItem = match item { ast::Item::Struct(ast) => self.lower_struct(ast)?.into(), ast::Item::Union(ast) => self.lower_union(ast)?.into(), @@ -173,7 +173,7 @@ impl<'a> Ctx<'a> { for field in fields.fields() { if let Some(data) = self.lower_record_field(&field) { let idx = self.data().fields.alloc(data); - self.add_attrs(idx.into(), RawAttrs::new(self.db, &field, self.hygiene())); + self.add_attrs(idx.into(), RawAttrs::new(self.db.upcast(), &field, self.hygiene())); } } let end = self.next_field_idx(); @@ -194,7 +194,7 @@ impl<'a> Ctx<'a> { for (i, field) in fields.fields().enumerate() { let data = self.lower_tuple_field(i, &field); let idx = self.data().fields.alloc(data); - self.add_attrs(idx.into(), RawAttrs::new(self.db, &field, self.hygiene())); + self.add_attrs(idx.into(), RawAttrs::new(self.db.upcast(), &field, self.hygiene())); } let end = self.next_field_idx(); IdxRange::new(start..end) @@ -239,7 +239,10 @@ impl<'a> Ctx<'a> { for variant in variants.variants() { if let Some(data) = self.lower_variant(&variant) { let idx = self.data().variants.alloc(data); - self.add_attrs(idx.into(), RawAttrs::new(self.db, &variant, self.hygiene())); + self.add_attrs( + idx.into(), + RawAttrs::new(self.db.upcast(), &variant, self.hygiene()), + ); } } let end = self.next_variant_idx(); @@ -283,7 +286,10 @@ impl<'a> Ctx<'a> { }; let ty = Interned::new(self_type); let idx = self.data().params.alloc(Param::Normal(None, ty)); - self.add_attrs(idx.into(), RawAttrs::new(self.db, &self_param, self.hygiene())); + self.add_attrs( + idx.into(), + RawAttrs::new(self.db.upcast(), &self_param, self.hygiene()), + ); has_self_param = true; } for param in param_list.params() { @@ -307,7 +313,7 @@ impl<'a> Ctx<'a> { self.data().params.alloc(Param::Normal(name, ty)) } }; - self.add_attrs(idx.into(), RawAttrs::new(self.db, ¶m, self.hygiene())); + self.add_attrs(idx.into(), RawAttrs::new(self.db.upcast(), ¶m, self.hygiene())); } } let end_param = self.next_param_idx(); @@ -442,7 +448,7 @@ impl<'a> Ctx<'a> { let items = trait_def.assoc_item_list().map(|list| { list.assoc_items() .filter_map(|item| { - let attrs = RawAttrs::new(self.db, &item, self.hygiene()); + let attrs = RawAttrs::new(self.db.upcast(), &item, self.hygiene()); self.lower_assoc_item(&item).map(|item| { self.add_attrs(ModItem::from(item).into(), attrs); item @@ -471,7 +477,7 @@ impl<'a> Ctx<'a> { .flat_map(|it| it.assoc_items()) .filter_map(|item| { let assoc = self.lower_assoc_item(&item)?; - let attrs = RawAttrs::new(self.db, &item, self.hygiene()); + let attrs = RawAttrs::new(self.db.upcast(), &item, self.hygiene()); self.add_attrs(ModItem::from(assoc).into(), attrs); Some(assoc) }) @@ -541,7 +547,7 @@ impl<'a> Ctx<'a> { // (in other words, the knowledge that they're in an extern block must not be used). // This is because an extern block can contain macros whose ItemTree's top-level items // should be considered to be in an extern block too. - let attrs = RawAttrs::new(self.db, &item, self.hygiene()); + let attrs = RawAttrs::new(self.db.upcast(), &item, self.hygiene()); let id: ModItem = match item { ast::ExternItem::Fn(ast) => self.lower_function(&ast)?.into(), ast::ExternItem::Static(ast) => self.lower_static(&ast)?.into(), diff --git a/crates/hir-def/src/item_tree/pretty.rs b/crates/hir-def/src/item_tree/pretty.rs index 48c40df22f..8f230b87d0 100644 --- a/crates/hir-def/src/item_tree/pretty.rs +++ b/crates/hir-def/src/item_tree/pretty.rs @@ -3,7 +3,6 @@ use std::fmt::{self, Write}; use crate::{ - attr::RawAttrs, generics::{TypeOrConstParamData, WherePredicate, WherePredicateTypeTarget}, pretty::{print_path, print_type_bounds, print_type_ref}, visibility::RawVisibility, diff --git a/crates/hir-def/src/keys.rs b/crates/hir-def/src/keys.rs index c5cb9a2af5..72beec8186 100644 --- a/crates/hir-def/src/keys.rs +++ b/crates/hir-def/src/keys.rs @@ -2,12 +2,11 @@ use std::marker::PhantomData; -use hir_expand::MacroCallId; +use hir_expand::{attrs::AttrId, MacroCallId}; use rustc_hash::FxHashMap; use syntax::{ast, AstNode, AstPtr}; use crate::{ - attr::AttrId, dyn_map::{DynMap, Policy}, ConstId, EnumId, EnumVariantId, FieldId, FunctionId, ImplId, LifetimeParamId, Macro2Id, MacroRulesId, ProcMacroId, StaticId, StructId, TraitId, TypeAliasId, TypeOrConstParamId, diff --git a/crates/hir-def/src/lib.rs b/crates/hir-def/src/lib.rs index 8267ef09cb..cc0ea14d01 100644 --- a/crates/hir-def/src/lib.rs +++ b/crates/hir-def/src/lib.rs @@ -28,7 +28,6 @@ pub mod dyn_map; pub mod keys; pub mod item_tree; -pub mod intern; pub mod adt; pub mod data; @@ -61,10 +60,10 @@ use std::{ sync::Arc, }; -use attr::Attr; use base_db::{impl_intern_key, salsa, CrateId, ProcMacroKind}; use hir_expand::{ ast_id_map::FileAstId, + attrs::{Attr, AttrId, AttrInput}, builtin_attr_macro::BuiltinAttrExpander, builtin_derive_macro::BuiltinDeriveExpander, builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander}, @@ -82,7 +81,6 @@ use syntax::ast; use crate::{ adt::VariantData, - attr::AttrId, builtin_type::BuiltinType, item_tree::{ Const, Enum, Function, Impl, ItemTreeId, ItemTreeNode, MacroDef, MacroRules, ModItem, @@ -971,7 +969,7 @@ fn attr_macro_as_call_id( is_derive: bool, ) -> MacroCallId { let mut arg = match macro_attr.input.as_deref() { - Some(attr::AttrInput::TokenTree(tt, map)) => (tt.clone(), map.clone()), + Some(AttrInput::TokenTree(tt, map)) => (tt.clone(), map.clone()), _ => Default::default(), }; @@ -990,3 +988,10 @@ fn attr_macro_as_call_id( ); res } +intern::impl_internable!( + crate::type_ref::TypeRef, + crate::type_ref::TraitRef, + crate::type_ref::TypeBound, + crate::path::GenericArgs, + generics::GenericParams, +); diff --git a/crates/hir-def/src/nameres/attr_resolution.rs b/crates/hir-def/src/nameres/attr_resolution.rs index 3650204ee9..79cabeb0fb 100644 --- a/crates/hir-def/src/nameres/attr_resolution.rs +++ b/crates/hir-def/src/nameres/attr_resolution.rs @@ -1,10 +1,9 @@ //! Post-nameres attribute resolution. -use hir_expand::MacroCallId; +use hir_expand::{attrs::Attr, MacroCallId}; use syntax::{ast, SmolStr}; use crate::{ - attr::Attr, attr_macro_as_call_id, builtin_attr, db::DefDatabase, item_scope::BuiltinShadowMode, diff --git a/crates/hir-def/src/nameres/collector.rs b/crates/hir-def/src/nameres/collector.rs index 160203b778..ad31e9aac2 100644 --- a/crates/hir-def/src/nameres/collector.rs +++ b/crates/hir-def/src/nameres/collector.rs @@ -10,6 +10,7 @@ use cfg::{CfgExpr, CfgOptions}; use either::Either; use hir_expand::{ ast_id_map::FileAstId, + attrs::{Attr, AttrId}, builtin_attr_macro::find_builtin_attr, builtin_derive_macro::find_builtin_derive, builtin_fn_macro::find_builtin_macro, @@ -26,7 +27,7 @@ use stdx::always; use syntax::{ast, SmolStr}; use crate::{ - attr::{Attr, AttrId, Attrs}, + attr::Attrs, attr_macro_as_call_id, db::DefDatabase, derive_macro_as_call_id, diff --git a/crates/hir-def/src/nameres/diagnostics.rs b/crates/hir-def/src/nameres/diagnostics.rs index 0661422919..74b25f4cc9 100644 --- a/crates/hir-def/src/nameres/diagnostics.rs +++ b/crates/hir-def/src/nameres/diagnostics.rs @@ -2,12 +2,11 @@ use base_db::CrateId; use cfg::{CfgExpr, CfgOptions}; -use hir_expand::MacroCallKind; +use hir_expand::{attrs::AttrId, MacroCallKind}; use la_arena::Idx; use syntax::ast::{self, AnyHasAttrs}; use crate::{ - attr::AttrId, item_tree::{self, ItemTreeId}, nameres::LocalModuleId, path::ModPath, diff --git a/crates/hir-def/src/path.rs b/crates/hir-def/src/path.rs index 592223f7d8..25a23fcd61 100644 --- a/crates/hir-def/src/path.rs +++ b/crates/hir-def/src/path.rs @@ -8,10 +8,10 @@ use std::{ use crate::{ body::LowerCtx, - intern::Interned, type_ref::{ConstScalarOrPath, LifetimeRef}, }; use hir_expand::name::Name; +use intern::Interned; use syntax::ast; use crate::type_ref::{TypeBound, TypeRef}; diff --git a/crates/hir-def/src/path/lower.rs b/crates/hir-def/src/path/lower.rs index cfa3a6baaf..d570191595 100644 --- a/crates/hir-def/src/path/lower.rs +++ b/crates/hir-def/src/path/lower.rs @@ -1,9 +1,10 @@ //! Transforms syntax into `Path` objects, ideally with accounting for hygiene -use crate::{intern::Interned, type_ref::ConstScalarOrPath}; +use crate::type_ref::ConstScalarOrPath; use either::Either; use hir_expand::name::{name, AsName}; +use intern::Interned; use syntax::ast::{self, AstNode, HasTypeBounds}; use super::AssociatedTypeBinding; diff --git a/crates/hir-def/src/pretty.rs b/crates/hir-def/src/pretty.rs index befd0c5ffa..1c0bd204d3 100644 --- a/crates/hir-def/src/pretty.rs +++ b/crates/hir-def/src/pretty.rs @@ -3,10 +3,10 @@ use std::fmt::{self, Write}; use hir_expand::mod_path::PathKind; +use intern::Interned; use itertools::Itertools; use crate::{ - intern::Interned, path::{GenericArg, GenericArgs, Path}, type_ref::{Mutability, TraitBoundModifier, TypeBound, TypeRef}, }; diff --git a/crates/hir-def/src/resolver.rs b/crates/hir-def/src/resolver.rs index 1ef7f9577f..86958e3dae 100644 --- a/crates/hir-def/src/resolver.rs +++ b/crates/hir-def/src/resolver.rs @@ -4,6 +4,7 @@ use std::{hash::BuildHasherDefault, sync::Arc}; use base_db::CrateId; use hir_expand::name::{name, Name}; use indexmap::IndexMap; +use intern::Interned; use rustc_hash::FxHashSet; use smallvec::{smallvec, SmallVec}; @@ -13,7 +14,6 @@ use crate::{ db::DefDatabase, expr::{ExprId, LabelId, PatId}, generics::{GenericParams, TypeOrConstParamData}, - intern::Interned, item_scope::{BuiltinShadowMode, BUILTIN_SCOPE}, nameres::DefMap, path::{ModPath, PathKind}, diff --git a/crates/hir-def/src/type_ref.rs b/crates/hir-def/src/type_ref.rs index f8bb78ddcf..0149fdaa43 100644 --- a/crates/hir-def/src/type_ref.rs +++ b/crates/hir-def/src/type_ref.rs @@ -7,13 +7,13 @@ use hir_expand::{ name::{AsName, Name}, AstId, }; +use intern::Interned; use syntax::ast::{self, HasName}; use crate::{ body::LowerCtx, builtin_type::{BuiltinInt, BuiltinType, BuiltinUint}, expr::Literal, - intern::Interned, path::Path, }; diff --git a/crates/hir-expand/Cargo.toml b/crates/hir-expand/Cargo.toml index 77eb1fd450..a73e690a7e 100644 --- a/crates/hir-expand/Cargo.toml +++ b/crates/hir-expand/Cargo.toml @@ -22,6 +22,7 @@ hashbrown = { version = "0.12.1", features = [ smallvec = { version = "1.10.0", features = ["const_new"] } stdx = { path = "../stdx", version = "0.0.0" } +intern = { path = "../intern", version = "0.0.0" } base-db = { path = "../base-db", version = "0.0.0" } cfg = { path = "../cfg", version = "0.0.0" } syntax = { path = "../syntax", version = "0.0.0" } diff --git a/crates/hir-expand/src/attrs.rs b/crates/hir-expand/src/attrs.rs new file mode 100644 index 0000000000..6967d15327 --- /dev/null +++ b/crates/hir-expand/src/attrs.rs @@ -0,0 +1,313 @@ +use std::{fmt, ops, sync::Arc}; + +use base_db::CrateId; +use cfg::CfgExpr; +use either::Either; +use intern::Interned; +use mbe::{syntax_node_to_token_tree, DelimiterKind, Punct}; +use smallvec::{smallvec, SmallVec}; +use syntax::{ast, match_ast, AstNode, SmolStr, SyntaxNode}; +use tt::Subtree; + +use crate::{ + db::AstDatabase, + hygiene::Hygiene, + mod_path::{ModPath, PathKind}, + name::AsName, + InFile, +}; + +/// Syntactical attributes, without filtering of `cfg_attr`s. +#[derive(Default, Debug, Clone, PartialEq, Eq)] +pub struct RawAttrs { + entries: Option>, +} + +impl ops::Deref for RawAttrs { + type Target = [Attr]; + + fn deref(&self) -> &[Attr] { + match &self.entries { + Some(it) => &*it, + None => &[], + } + } +} + +impl RawAttrs { + pub const EMPTY: Self = Self { entries: None }; + + pub fn new(db: &dyn AstDatabase, owner: &dyn ast::HasAttrs, hygiene: &Hygiene) -> Self { + let entries = collect_attrs(owner) + .filter_map(|(id, attr)| match attr { + Either::Left(attr) => { + attr.meta().and_then(|meta| Attr::from_src(db, meta, hygiene, id)) + } + Either::Right(comment) => comment.doc_comment().map(|doc| Attr { + id, + input: Some(Interned::new(AttrInput::Literal(SmolStr::new(doc)))), + path: Interned::new(ModPath::from(crate::name!(doc))), + }), + }) + .collect::>(); + + Self { entries: if entries.is_empty() { None } else { Some(entries) } } + } + + pub fn from_attrs_owner(db: &dyn AstDatabase, owner: InFile<&dyn ast::HasAttrs>) -> Self { + let hygiene = Hygiene::new(db, owner.file_id); + Self::new(db, owner.value, &hygiene) + } + + pub fn merge(&self, other: Self) -> Self { + match (&self.entries, other.entries) { + (None, None) => Self::EMPTY, + (None, entries @ Some(_)) => Self { entries }, + (Some(entries), None) => Self { entries: Some(entries.clone()) }, + (Some(a), Some(b)) => { + let last_ast_index = a.last().map_or(0, |it| it.id.ast_index + 1); + Self { + entries: Some( + a.iter() + .cloned() + .chain(b.iter().map(|it| { + let mut it = it.clone(); + it.id.ast_index += last_ast_index; + it + })) + .collect(), + ), + } + } + } + } + + /// Processes `cfg_attr`s, returning the resulting semantic `Attrs`. + pub fn filter(self, db: &dyn AstDatabase, krate: CrateId) -> RawAttrs { + let has_cfg_attrs = self + .iter() + .any(|attr| attr.path.as_ident().map_or(false, |name| *name == crate::name![cfg_attr])); + if !has_cfg_attrs { + return self; + } + + let crate_graph = db.crate_graph(); + let new_attrs = self + .iter() + .flat_map(|attr| -> SmallVec<[_; 1]> { + let is_cfg_attr = + attr.path.as_ident().map_or(false, |name| *name == crate::name![cfg_attr]); + if !is_cfg_attr { + return smallvec![attr.clone()]; + } + + let subtree = match attr.token_tree_value() { + Some(it) => it, + _ => return smallvec![attr.clone()], + }; + + // Input subtree is: `(cfg, $(attr),+)` + // Split it up into a `cfg` subtree and the `attr` subtrees. + // FIXME: There should be a common API for this. + let mut parts = subtree.token_trees.split(|tt| { + matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Punct(Punct { char: ',', .. }))) + }); + let cfg = match parts.next() { + Some(it) => it, + None => return smallvec![], + }; + let cfg = Subtree { delimiter: subtree.delimiter, token_trees: cfg.to_vec() }; + let cfg = CfgExpr::parse(&cfg); + let index = attr.id; + let attrs = parts.filter(|a| !a.is_empty()).filter_map(|attr| { + let tree = Subtree { delimiter: None, token_trees: attr.to_vec() }; + // FIXME hygiene + let hygiene = Hygiene::new_unhygienic(); + Attr::from_tt(db, &tree, &hygiene, index) + }); + + let cfg_options = &crate_graph[krate].cfg_options; + if cfg_options.check(&cfg) == Some(false) { + smallvec![] + } else { + cov_mark::hit!(cfg_attr_active); + + attrs.collect() + } + }) + .collect(); + + RawAttrs { entries: Some(new_attrs) } + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct AttrId { + pub ast_index: u32, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct Attr { + pub id: AttrId, + pub path: Interned, + pub input: Option>, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum AttrInput { + /// `#[attr = "string"]` + Literal(SmolStr), + /// `#[attr(subtree)]` + TokenTree(tt::Subtree, mbe::TokenMap), +} + +impl fmt::Display for AttrInput { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + AttrInput::Literal(lit) => write!(f, " = \"{}\"", lit.escape_debug()), + AttrInput::TokenTree(subtree, _) => subtree.fmt(f), + } + } +} + +impl Attr { + fn from_src( + db: &dyn AstDatabase, + ast: ast::Meta, + hygiene: &Hygiene, + id: AttrId, + ) -> Option { + let path = Interned::new(ModPath::from_src(db, ast.path()?, hygiene)?); + let input = if let Some(ast::Expr::Literal(lit)) = ast.expr() { + let value = match lit.kind() { + ast::LiteralKind::String(string) => string.value()?.into(), + _ => lit.syntax().first_token()?.text().trim_matches('"').into(), + }; + Some(Interned::new(AttrInput::Literal(value))) + } else if let Some(tt) = ast.token_tree() { + let (tree, map) = syntax_node_to_token_tree(tt.syntax()); + Some(Interned::new(AttrInput::TokenTree(tree, map))) + } else { + None + }; + Some(Attr { id, path, input }) + } + + fn from_tt( + db: &dyn AstDatabase, + tt: &tt::Subtree, + hygiene: &Hygiene, + id: AttrId, + ) -> Option { + let (parse, _) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MetaItem); + let ast = ast::Meta::cast(parse.syntax_node())?; + + Self::from_src(db, ast, hygiene, id) + } + + pub fn path(&self) -> &ModPath { + &self.path + } +} + +impl Attr { + /// #[path = "string"] + pub fn string_value(&self) -> Option<&SmolStr> { + match self.input.as_deref()? { + AttrInput::Literal(it) => Some(it), + _ => None, + } + } + + /// #[path(ident)] + pub fn single_ident_value(&self) -> Option<&tt::Ident> { + match self.input.as_deref()? { + AttrInput::TokenTree(subtree, _) => match &*subtree.token_trees { + [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] => Some(ident), + _ => None, + }, + _ => None, + } + } + + /// #[path TokenTree] + pub fn token_tree_value(&self) -> Option<&Subtree> { + match self.input.as_deref()? { + AttrInput::TokenTree(subtree, _) => Some(subtree), + _ => None, + } + } + + /// Parses this attribute as a token tree consisting of comma separated paths. + pub fn parse_path_comma_token_tree(&self) -> Option + '_> { + let args = self.token_tree_value()?; + + if args.delimiter_kind() != Some(DelimiterKind::Parenthesis) { + return None; + } + let paths = args + .token_trees + .split(|tt| matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Punct(Punct { char: ',', .. })))) + .filter_map(|tts| { + if tts.is_empty() { + return None; + } + let segments = tts.iter().filter_map(|tt| match tt { + tt::TokenTree::Leaf(tt::Leaf::Ident(id)) => Some(id.as_name()), + _ => None, + }); + Some(ModPath::from_segments(PathKind::Plain, segments)) + }); + + Some(paths) + } +} + +pub fn collect_attrs( + owner: &dyn ast::HasAttrs, +) -> impl Iterator)> { + let inner_attrs = inner_attributes(owner.syntax()).into_iter().flatten(); + let outer_attrs = + ast::AttrDocCommentIter::from_syntax_node(owner.syntax()).filter(|el| match el { + Either::Left(attr) => attr.kind().is_outer(), + Either::Right(comment) => comment.is_outer(), + }); + outer_attrs + .chain(inner_attrs) + .enumerate() + .map(|(id, attr)| (AttrId { ast_index: id as u32 }, attr)) +} + +fn inner_attributes( + syntax: &SyntaxNode, +) -> Option>> { + let node = match_ast! { + match syntax { + ast::SourceFile(_) => syntax.clone(), + ast::ExternBlock(it) => it.extern_item_list()?.syntax().clone(), + ast::Fn(it) => it.body()?.stmt_list()?.syntax().clone(), + ast::Impl(it) => it.assoc_item_list()?.syntax().clone(), + ast::Module(it) => it.item_list()?.syntax().clone(), + ast::BlockExpr(it) => { + use syntax::SyntaxKind::{BLOCK_EXPR , EXPR_STMT}; + // Block expressions accept outer and inner attributes, but only when they are the outer + // expression of an expression statement or the final expression of another block expression. + let may_carry_attributes = matches!( + it.syntax().parent().map(|it| it.kind()), + Some(BLOCK_EXPR | EXPR_STMT) + ); + if !may_carry_attributes { + return None + } + syntax.clone() + }, + _ => return None, + } + }; + + let attrs = ast::AttrDocCommentIter::from_syntax_node(&node).filter(|el| match el { + Either::Left(attr) => attr.kind().is_inner(), + Either::Right(comment) => comment.is_inner(), + }); + Some(attrs) +} diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs index bc5f9f3b8a..9d61588a8b 100644 --- a/crates/hir-expand/src/lib.rs +++ b/crates/hir-expand/src/lib.rs @@ -17,6 +17,7 @@ pub mod proc_macro; pub mod quote; pub mod eager; pub mod mod_path; +pub mod attrs; mod fixup; pub use mbe::{Origin, ValueResult}; @@ -1031,3 +1032,5 @@ impl ExpandTo { pub struct UnresolvedMacro { pub path: ModPath, } + +intern::impl_internable!(ModPath, attrs::AttrInput); diff --git a/crates/hir-ty/Cargo.toml b/crates/hir-ty/Cargo.toml index ae837ac6dc..75b33da1f2 100644 --- a/crates/hir-ty/Cargo.toml +++ b/crates/hir-ty/Cargo.toml @@ -29,6 +29,7 @@ typed-arena = "2.0.1" rustc_index = { version = "0.0.20221221", package = "hkalbasi-rustc-ap-rustc_index", default-features = false } stdx = { path = "../stdx", version = "0.0.0" } +intern = { path = "../intern", version = "0.0.0" } hir-def = { path = "../hir-def", version = "0.0.0" } hir-expand = { path = "../hir-expand", version = "0.0.0" } base-db = { path = "../base-db", version = "0.0.0" } diff --git a/crates/hir-ty/src/display.rs b/crates/hir-ty/src/display.rs index 66e813eed8..f9642aa747 100644 --- a/crates/hir-ty/src/display.rs +++ b/crates/hir-ty/src/display.rs @@ -11,7 +11,6 @@ use hir_def::{ db::DefDatabase, find_path, generics::{TypeOrConstParamData, TypeParamProvenance}, - intern::{Internable, Interned}, item_scope::ItemInNs, path::{Path, PathKind}, type_ref::{ConstScalar, TraitBoundModifier, TypeBound, TypeRef}, @@ -19,6 +18,7 @@ use hir_def::{ HasModule, ItemContainerId, Lookup, ModuleDefId, ModuleId, TraitId, }; use hir_expand::{hygiene::Hygiene, name::Name}; +use intern::{Internable, Interned}; use itertools::Itertools; use smallvec::SmallVec; use syntax::SmolStr; diff --git a/crates/hir-ty/src/interner.rs b/crates/hir-ty/src/interner.rs index 441503a300..7bf73560cb 100644 --- a/crates/hir-ty/src/interner.rs +++ b/crates/hir-ty/src/interner.rs @@ -4,11 +4,8 @@ use crate::{chalk_db, tls, GenericArg}; use base_db::salsa::InternId; use chalk_ir::{Goal, GoalData}; -use hir_def::{ - intern::{impl_internable, InternStorage, Internable, Interned}, - type_ref::ConstScalar, - TypeAliasId, -}; +use hir_def::{type_ref::ConstScalar, TypeAliasId}; +use intern::{impl_internable, Interned}; use smallvec::SmallVec; use std::{fmt, sync::Arc}; diff --git a/crates/hir-ty/src/lower.rs b/crates/hir-ty/src/lower.rs index 592410008a..4b1f40f91d 100644 --- a/crates/hir-ty/src/lower.rs +++ b/crates/hir-ty/src/lower.rs @@ -23,7 +23,6 @@ use hir_def::{ generics::{ TypeOrConstParamData, TypeParamProvenance, WherePredicate, WherePredicateTypeTarget, }, - intern::Interned, lang_item::lang_attr, path::{GenericArg, ModPath, Path, PathKind, PathSegment, PathSegments}, resolver::{HasResolver, Resolver, TypeNs}, @@ -35,6 +34,7 @@ use hir_def::{ TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId, VariantId, }; use hir_expand::{name::Name, ExpandResult}; +use intern::Interned; use itertools::Either; use la_arena::ArenaMap; use rustc_hash::FxHashSet; diff --git a/crates/hir-ty/src/utils.rs b/crates/hir-ty/src/utils.rs index 9893566bd5..4f516e18be 100644 --- a/crates/hir-ty/src/utils.rs +++ b/crates/hir-ty/src/utils.rs @@ -11,13 +11,13 @@ use hir_def::{ GenericParams, TypeOrConstParamData, TypeParamProvenance, WherePredicate, WherePredicateTypeTarget, }, - intern::Interned, resolver::{HasResolver, TypeNs}, type_ref::{TraitBoundModifier, TypeRef}, ConstParamId, FunctionId, GenericDefId, ItemContainerId, Lookup, TraitId, TypeAliasId, TypeOrConstParamId, TypeParamId, }; use hir_expand::name::Name; +use intern::Interned; use itertools::Either; use rustc_hash::FxHashSet; use smallvec::{smallvec, SmallVec}; diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 08fd4453df..ad44e24042 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -107,7 +107,7 @@ pub use { cfg::{CfgAtom, CfgExpr, CfgOptions}, hir_def::{ adt::StructKind, - attr::{Attr, Attrs, AttrsWithOwner, Documentation}, + attr::{Attrs, AttrsWithOwner, Documentation}, builtin_attr::AttributeTemplate, find_path::PrefixKind, import_map, @@ -122,6 +122,7 @@ pub use { ModuleDefId, }, hir_expand::{ + attrs::Attr, name::{known, Name}, ExpandResult, HirFileId, InFile, MacroFile, Origin, }, diff --git a/crates/hir/src/semantics/source_to_def.rs b/crates/hir/src/semantics/source_to_def.rs index fa45e3c12e..2b5bfda1d4 100644 --- a/crates/hir/src/semantics/source_to_def.rs +++ b/crates/hir/src/semantics/source_to_def.rs @@ -87,7 +87,6 @@ use base_db::FileId; use hir_def::{ - attr::AttrId, child_by_source::ChildBySource, dyn_map::DynMap, expr::{LabelId, PatId}, @@ -96,7 +95,7 @@ use hir_def::{ GenericDefId, GenericParamId, ImplId, LifetimeParamId, MacroId, ModuleId, StaticId, StructId, TraitId, TypeAliasId, TypeParamId, UnionId, VariantId, }; -use hir_expand::{name::AsName, HirFileId, MacroCallId}; +use hir_expand::{attrs::AttrId, name::AsName, HirFileId, MacroCallId}; use rustc_hash::FxHashMap; use smallvec::SmallVec; use stdx::impl_from; diff --git a/crates/intern/Cargo.toml b/crates/intern/Cargo.toml new file mode 100644 index 0000000000..dd5110255c --- /dev/null +++ b/crates/intern/Cargo.toml @@ -0,0 +1,13 @@ +[package] +name = "intern" +version = "0.0.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +# We need to freeze the version of the crate, as the raw-api feature is considered unstable +dashmap = { version = "=5.4.0", features = ["raw-api"] } +hashbrown = { version = "0.12.1", default-features = false } +once_cell = "1.15.0" +rustc-hash = "1.1.0" diff --git a/crates/hir-def/src/intern.rs b/crates/intern/src/lib.rs similarity index 92% rename from crates/hir-def/src/intern.rs rename to crates/intern/src/lib.rs index f08521a340..fb2903696b 100644 --- a/crates/hir-def/src/intern.rs +++ b/crates/intern/src/lib.rs @@ -14,8 +14,6 @@ use hashbrown::HashMap; use once_cell::sync::OnceCell; use rustc_hash::FxHasher; -use crate::generics::GenericParams; - type InternMap = DashMap, (), BuildHasherDefault>; type Guard = dashmap::RwLockWriteGuard< 'static, @@ -204,9 +202,9 @@ pub trait Internable: Hash + Eq + 'static { #[doc(hidden)] macro_rules! _impl_internable { ( $($t:path),+ $(,)? ) => { $( - impl Internable for $t { - fn storage() -> &'static InternStorage { - static STORAGE: InternStorage<$t> = InternStorage::new(); + impl $crate::Internable for $t { + fn storage() -> &'static $crate::InternStorage { + static STORAGE: $crate::InternStorage<$t> = $crate::InternStorage::new(); &STORAGE } } @@ -215,13 +213,4 @@ macro_rules! _impl_internable { pub use crate::_impl_internable as impl_internable; -impl_internable!( - crate::type_ref::TypeRef, - crate::type_ref::TraitRef, - crate::type_ref::TypeBound, - crate::path::ModPath, - crate::path::GenericArgs, - crate::attr::AttrInput, - GenericParams, - str, -); +impl_internable!(str,); From 621e96bd6ad0cf35c4dc4d1cfd8f6bc83096946f Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 9 Jan 2023 20:47:51 +0100 Subject: [PATCH 0795/1945] Encode one level of cfg_attr in attr_id --- crates/hir-def/src/attr.rs | 2 +- crates/hir-def/src/child_by_source.rs | 2 +- crates/hir-def/src/lib.rs | 6 +- crates/hir-def/src/nameres/collector.rs | 4 +- crates/hir-def/src/nameres/diagnostics.rs | 8 +-- crates/hir-expand/src/attrs.rs | 79 ++++++++++++++++------- crates/hir-expand/src/db.rs | 9 ++- crates/hir-expand/src/hygiene.rs | 2 +- crates/hir-expand/src/lib.rs | 24 ++++--- crates/hir/src/lib.rs | 8 ++- crates/intern/Cargo.toml | 7 +- 11 files changed, 99 insertions(+), 52 deletions(-) diff --git a/crates/hir-def/src/attr.rs b/crates/hir-def/src/attr.rs index eb88a74e44..a0113fd048 100644 --- a/crates/hir-def/src/attr.rs +++ b/crates/hir-def/src/attr.rs @@ -546,7 +546,7 @@ impl AttrSourceMap { } fn source_of_id(&self, id: AttrId) -> InFile<&Either> { - let ast_idx = id.ast_index as usize; + let ast_idx = id.ast_index(); let file_id = match self.mod_def_site_file_id { Some((file_id, def_site_cut)) if def_site_cut <= ast_idx => file_id, _ => self.file_id, diff --git a/crates/hir-def/src/child_by_source.rs b/crates/hir-def/src/child_by_source.rs index bb13165257..19d2fe956f 100644 --- a/crates/hir-def/src/child_by_source.rs +++ b/crates/hir-def/src/child_by_source.rs @@ -117,7 +117,7 @@ impl ChildBySource for ItemScope { let adt = ast_id.to_node(db.upcast()); calls.for_each(|(attr_id, call_id, calls)| { if let Some(Either::Left(attr)) = - adt.doc_comments_and_attrs().nth(attr_id.ast_index as usize) + adt.doc_comments_and_attrs().nth(attr_id.ast_index()) { res[keys::DERIVE_MACRO_CALL].insert(attr, (attr_id, call_id, calls.into())); } diff --git a/crates/hir-def/src/lib.rs b/crates/hir-def/src/lib.rs index cc0ea14d01..8eae2e92f4 100644 --- a/crates/hir-def/src/lib.rs +++ b/crates/hir-def/src/lib.rs @@ -941,7 +941,7 @@ pub fn macro_id_to_def_id(db: &dyn db::DefDatabase, id: MacroId) -> MacroDefId { fn derive_macro_as_call_id( db: &dyn db::DefDatabase, item_attr: &AstIdWithPath, - derive_attr: AttrId, + derive_attr_index: AttrId, derive_pos: u32, krate: CrateId, resolver: impl Fn(path::ModPath) -> Option<(MacroId, MacroDefId)>, @@ -954,7 +954,7 @@ fn derive_macro_as_call_id( MacroCallKind::Derive { ast_id: item_attr.ast_id, derive_index: derive_pos, - derive_attr_index: derive_attr.ast_index, + derive_attr_index, }, ); Ok((macro_id, def_id, call_id)) @@ -982,7 +982,7 @@ fn attr_macro_as_call_id( MacroCallKind::Attr { ast_id: item_attr.ast_id, attr_args: Arc::new(arg), - invoc_attr_index: macro_attr.id.ast_index, + invoc_attr_index: macro_attr.id, is_derive, }, ); diff --git a/crates/hir-def/src/nameres/collector.rs b/crates/hir-def/src/nameres/collector.rs index ad31e9aac2..33a787fd9f 100644 --- a/crates/hir-def/src/nameres/collector.rs +++ b/crates/hir-def/src/nameres/collector.rs @@ -452,7 +452,7 @@ impl DefCollector<'_> { MacroCallKind::Attr { ast_id: ast_id.ast_id, attr_args: Default::default(), - invoc_attr_index: attr.id.ast_index, + invoc_attr_index: attr.id, is_derive: false, }, attr.path().clone(), @@ -1407,7 +1407,7 @@ impl DefCollector<'_> { directive.module_id, MacroCallKind::Derive { ast_id: ast_id.ast_id, - derive_attr_index: derive_attr.ast_index, + derive_attr_index: *derive_attr, derive_index: *derive_pos as u32, }, ast_id.path.clone(), diff --git a/crates/hir-def/src/nameres/diagnostics.rs b/crates/hir-def/src/nameres/diagnostics.rs index 74b25f4cc9..b024d7c677 100644 --- a/crates/hir-def/src/nameres/diagnostics.rs +++ b/crates/hir-def/src/nameres/diagnostics.rs @@ -31,9 +31,9 @@ pub enum DefDiagnosticKind { UnimplementedBuiltinMacro { ast: AstId }, - InvalidDeriveTarget { ast: AstId, id: u32 }, + InvalidDeriveTarget { ast: AstId, id: usize }, - MalformedDerive { ast: AstId, id: u32 }, + MalformedDerive { ast: AstId, id: usize }, } #[derive(Debug, PartialEq, Eq)] @@ -119,7 +119,7 @@ impl DefDiagnostic { ) -> Self { Self { in_module: container, - kind: DefDiagnosticKind::InvalidDeriveTarget { ast, id: id.ast_index }, + kind: DefDiagnosticKind::InvalidDeriveTarget { ast, id: id.ast_index() }, } } @@ -130,7 +130,7 @@ impl DefDiagnostic { ) -> Self { Self { in_module: container, - kind: DefDiagnosticKind::MalformedDerive { ast, id: id.ast_index }, + kind: DefDiagnosticKind::MalformedDerive { ast, id: id.ast_index() }, } } } diff --git a/crates/hir-expand/src/attrs.rs b/crates/hir-expand/src/attrs.rs index 6967d15327..c7135732b8 100644 --- a/crates/hir-expand/src/attrs.rs +++ b/crates/hir-expand/src/attrs.rs @@ -1,3 +1,4 @@ +//! A higher level attributes based on TokenTree, with also some shortcuts. use std::{fmt, ops, sync::Arc}; use base_db::CrateId; @@ -65,14 +66,16 @@ impl RawAttrs { (None, entries @ Some(_)) => Self { entries }, (Some(entries), None) => Self { entries: Some(entries.clone()) }, (Some(a), Some(b)) => { - let last_ast_index = a.last().map_or(0, |it| it.id.ast_index + 1); + let last_ast_index = a.last().map_or(0, |it| it.id.ast_index() + 1) as u32; Self { entries: Some( a.iter() .cloned() .chain(b.iter().map(|it| { let mut it = it.clone(); - it.id.ast_index += last_ast_index; + it.id.id = it.id.ast_index() as u32 + last_ast_index + | (it.id.cfg_attr_index().unwrap_or(0) as u32) + << AttrId::AST_INDEX_BITS; it })) .collect(), @@ -83,6 +86,7 @@ impl RawAttrs { } /// Processes `cfg_attr`s, returning the resulting semantic `Attrs`. + // FIXME: This should return a different type pub fn filter(self, db: &dyn AstDatabase, krate: CrateId) -> RawAttrs { let has_cfg_attrs = self .iter() @@ -106,27 +110,22 @@ impl RawAttrs { _ => return smallvec![attr.clone()], }; - // Input subtree is: `(cfg, $(attr),+)` - // Split it up into a `cfg` subtree and the `attr` subtrees. - // FIXME: There should be a common API for this. - let mut parts = subtree.token_trees.split(|tt| { - matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Punct(Punct { char: ',', .. }))) - }); - let cfg = match parts.next() { + let (cfg, parts) = match parse_cfg_attr_input(subtree) { Some(it) => it, - None => return smallvec![], + None => return smallvec![attr.clone()], }; - let cfg = Subtree { delimiter: subtree.delimiter, token_trees: cfg.to_vec() }; - let cfg = CfgExpr::parse(&cfg); let index = attr.id; - let attrs = parts.filter(|a| !a.is_empty()).filter_map(|attr| { - let tree = Subtree { delimiter: None, token_trees: attr.to_vec() }; - // FIXME hygiene - let hygiene = Hygiene::new_unhygienic(); - Attr::from_tt(db, &tree, &hygiene, index) - }); + let attrs = + parts.enumerate().take(1 << AttrId::CFG_ATTR_BITS).filter_map(|(idx, attr)| { + let tree = Subtree { delimiter: None, token_trees: attr.to_vec() }; + // FIXME hygiene + let hygiene = Hygiene::new_unhygienic(); + Attr::from_tt(db, &tree, &hygiene, index.with_cfg_attr(idx)) + }); let cfg_options = &crate_graph[krate].cfg_options; + let cfg = Subtree { delimiter: subtree.delimiter, token_trees: cfg.to_vec() }; + let cfg = CfgExpr::parse(&cfg); if cfg_options.check(&cfg) == Some(false) { smallvec![] } else { @@ -143,7 +142,32 @@ impl RawAttrs { #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct AttrId { - pub ast_index: u32, + id: u32, +} + +// FIXME: This only handles a single level of cfg_attr nesting +// that is `#[cfg_attr(all(), cfg_attr(all(), cfg(any())))]` breaks again +impl AttrId { + const CFG_ATTR_BITS: usize = 7; + const AST_INDEX_MASK: usize = 0x00FF_FFFF; + const AST_INDEX_BITS: usize = Self::AST_INDEX_MASK.count_ones() as usize; + const CFG_ATTR_SET_BITS: u32 = 1 << 31; + + pub fn ast_index(&self) -> usize { + self.id as usize & Self::AST_INDEX_MASK + } + + pub fn cfg_attr_index(&self) -> Option { + if self.id & Self::CFG_ATTR_SET_BITS == 0 { + None + } else { + Some(self.id as usize >> Self::AST_INDEX_BITS) + } + } + + pub fn with_cfg_attr(self, idx: usize) -> AttrId { + AttrId { id: self.id | (idx as u32) << Self::AST_INDEX_BITS | Self::CFG_ATTR_SET_BITS } + } } #[derive(Debug, Clone, PartialEq, Eq)] @@ -272,10 +296,7 @@ pub fn collect_attrs( Either::Left(attr) => attr.kind().is_outer(), Either::Right(comment) => comment.is_outer(), }); - outer_attrs - .chain(inner_attrs) - .enumerate() - .map(|(id, attr)| (AttrId { ast_index: id as u32 }, attr)) + outer_attrs.chain(inner_attrs).enumerate().map(|(id, attr)| (AttrId { id: id as u32 }, attr)) } fn inner_attributes( @@ -311,3 +332,15 @@ fn inner_attributes( }); Some(attrs) } + +// Input subtree is: `(cfg, $(attr),+)` +// Split it up into a `cfg` subtree and the `attr` subtrees. +pub fn parse_cfg_attr_input( + subtree: &Subtree, +) -> Option<(&[tt::TokenTree], impl Iterator)> { + let mut parts = subtree + .token_trees + .split(|tt| matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Punct(Punct { char: ',', .. })))); + let cfg = parts.next()?; + Some((cfg, parts.filter(|it| !it.is_empty()))) +} diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs index b28e60187d..ec5886824f 100644 --- a/crates/hir-expand/src/db.rs +++ b/crates/hir-expand/src/db.rs @@ -168,7 +168,9 @@ pub fn expand_speculative( // Attributes may have an input token tree, build the subtree and map for this as well // then try finding a token id for our token if it is inside this input subtree. let item = ast::Item::cast(speculative_args.clone())?; - item.doc_comments_and_attrs().nth(invoc_attr_index as usize).and_then(Either::left) + item.doc_comments_and_attrs() + .nth(invoc_attr_index.ast_index()) + .and_then(Either::left) }?; match attr.token_tree() { Some(token_tree) => { @@ -321,6 +323,7 @@ fn macro_arg( } fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet { + // FIXME: handle `cfg_attr` (|| { let censor = match loc.kind { MacroCallKind::FnLike { .. } => return None, @@ -328,7 +331,7 @@ fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet FxHashSet { + // FIXME: handle `cfg_attr` let tt = ast_id .to_node(db) .doc_comments_and_attrs() - .nth(invoc_attr_index as usize) + .nth(invoc_attr_index.ast_index()) .and_then(Either::left)? .token_tree()?; Some(InFile::new(ast_id.file_id, tt)) @@ -398,8 +400,7 @@ impl MacroDefId { } } -// FIXME: attribute indices do not account for `cfg_attr`, which means that we'll strip the whole -// `cfg_attr` instead of just one of the attributes it expands to +// FIXME: attribute indices do not account for nested `cfg_attr` impl MacroCallKind { /// Returns the file containing the macro invocation. @@ -420,7 +421,7 @@ impl MacroCallKind { // FIXME: handle `cfg_attr` ast_id.with_value(ast_id.to_node(db)).map(|it| { it.doc_comments_and_attrs() - .nth(*derive_attr_index as usize) + .nth(derive_attr_index.ast_index()) .and_then(|it| match it { Either::Left(attr) => Some(attr.syntax().clone()), Either::Right(_) => None, @@ -432,7 +433,7 @@ impl MacroCallKind { // FIXME: handle `cfg_attr` ast_id.with_value(ast_id.to_node(db)).map(|it| { it.doc_comments_and_attrs() - .nth(*invoc_attr_index as usize) + .nth(invoc_attr_index.ast_index()) .and_then(|it| match it { Either::Left(attr) => Some(attr.syntax().clone()), Either::Right(_) => None, @@ -489,19 +490,21 @@ impl MacroCallKind { MacroCallKind::FnLike { ast_id, .. } => ast_id.to_node(db).syntax().text_range(), MacroCallKind::Derive { ast_id, derive_attr_index, .. } => { // FIXME: should be the range of the macro name, not the whole derive + // FIXME: handle `cfg_attr` ast_id .to_node(db) .doc_comments_and_attrs() - .nth(derive_attr_index as usize) + .nth(derive_attr_index.ast_index()) .expect("missing derive") .expect_left("derive is a doc comment?") .syntax() .text_range() } + // FIXME: handle `cfg_attr` MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => ast_id .to_node(db) .doc_comments_and_attrs() - .nth(invoc_attr_index as usize) + .nth(invoc_attr_index.ast_index()) .expect("missing attribute") .expect_left("attribute macro is a doc comment?") .syntax() @@ -593,9 +596,10 @@ impl ExpansionInfo { let token_range = token.value.text_range(); match &loc.kind { MacroCallKind::Attr { attr_args, invoc_attr_index, is_derive, .. } => { + // FIXME: handle `cfg_attr` let attr = item .doc_comments_and_attrs() - .nth(*invoc_attr_index as usize) + .nth(invoc_attr_index.ast_index()) .and_then(Either::left)?; match attr.token_tree() { Some(token_tree) diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index ad44e24042..ec66660f34 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -785,7 +785,7 @@ fn precise_macro_call_location( let token = (|| { let derive_attr = node .doc_comments_and_attrs() - .nth(*derive_attr_index as usize) + .nth(derive_attr_index.ast_index()) .and_then(Either::left)?; let token_tree = derive_attr.meta()?.token_tree()?; let group_by = token_tree @@ -813,9 +813,11 @@ fn precise_macro_call_location( let node = ast_id.to_node(db.upcast()); let attr = node .doc_comments_and_attrs() - .nth((*invoc_attr_index) as usize) + .nth(invoc_attr_index.ast_index()) .and_then(Either::left) - .unwrap_or_else(|| panic!("cannot find attribute #{invoc_attr_index}")); + .unwrap_or_else(|| { + panic!("cannot find attribute #{}", invoc_attr_index.ast_index()) + }); ( ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&attr))), diff --git a/crates/intern/Cargo.toml b/crates/intern/Cargo.toml index dd5110255c..78007fc860 100644 --- a/crates/intern/Cargo.toml +++ b/crates/intern/Cargo.toml @@ -1,9 +1,14 @@ [package] name = "intern" version = "0.0.0" +description = "TBD" +license = "MIT OR Apache-2.0" edition = "2021" +rust-version = "1.65" + +[lib] +doctest = false -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] # We need to freeze the version of the crate, as the raw-api feature is considered unstable From 9a15cc81b4ba96cfc8164ba4848cbf648d40f86a Mon Sep 17 00:00:00 2001 From: bvanjoi Date: Fri, 6 Jan 2023 18:52:41 +0800 Subject: [PATCH 0796/1945] fix(ty): should query impls in nearest block --- crates/hir-ty/src/method_resolution.rs | 9 +++- crates/ide/src/goto_definition.rs | 64 ++++++++++++++++++++++++++ 2 files changed, 71 insertions(+), 2 deletions(-) diff --git a/crates/hir-ty/src/method_resolution.rs b/crates/hir-ty/src/method_resolution.rs index 2f5fa3083c..680e01e26b 100644 --- a/crates/hir-ty/src/method_resolution.rs +++ b/crates/hir-ty/src/method_resolution.rs @@ -1094,13 +1094,13 @@ fn iterate_inherent_methods( None => return ControlFlow::Continue(()), }; - let (module, block) = match visible_from_module { + let (module, mut block) = match visible_from_module { VisibleFromModule::Filter(module) => (Some(module), module.containing_block()), VisibleFromModule::IncludeBlock(block) => (None, Some(block)), VisibleFromModule::None => (None, None), }; - if let Some(block_id) = block { + while let Some(block_id) = block { if let Some(impls) = db.inherent_impls_in_block(block_id) { impls_for_self_ty( &impls, @@ -1113,6 +1113,11 @@ fn iterate_inherent_methods( callback, )?; } + + block = db + .block_def_map(block_id) + .and_then(|map| map.parent()) + .and_then(|module| module.containing_block()); } for krate in def_crates { diff --git a/crates/ide/src/goto_definition.rs b/crates/ide/src/goto_definition.rs index 73fd518a9e..93019527f4 100644 --- a/crates/ide/src/goto_definition.rs +++ b/crates/ide/src/goto_definition.rs @@ -1916,4 +1916,68 @@ fn main() { "#, ) } + + #[test] + fn query_impls_in_nearest_block() { + check( + r#" +struct S1; +impl S1 { + fn e() -> () {} +} +fn f1() { + struct S1; + impl S1 { + fn e() -> () {} + //^ + } + fn f2() { + fn f3() { + S1::e$0(); + } + } +} +"#, + ); + + check( + r#" +struct S1; +impl S1 { + fn e() -> () {} +} +fn f1() { + struct S1; + impl S1 { + fn e() -> () {} + //^ + } + fn f2() { + struct S2; + S1::e$0(); + } +} +fn f12() { + struct S1; + impl S1 { + fn e() -> () {} + } +} +"#, + ); + + check( + r#" +struct S1; +impl S1 { + fn e() -> () {} + //^ +} +fn f2() { + struct S2; + S1::e$0(); +} +"#, + ); + } } From ef4debc8b1d9b71de01e966cf17133ae2a1f5c43 Mon Sep 17 00:00:00 2001 From: Daniel Eades Date: Fri, 30 Dec 2022 11:20:48 +0000 Subject: [PATCH 0797/1945] comment out disabled code --- crates/syntax/src/syntax_node.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/crates/syntax/src/syntax_node.rs b/crates/syntax/src/syntax_node.rs index a08c01597d..524be8139d 100644 --- a/crates/syntax/src/syntax_node.rs +++ b/crates/syntax/src/syntax_node.rs @@ -48,10 +48,10 @@ impl SyntaxTreeBuilder { pub fn finish(self) -> Parse { let (green, errors) = self.finish_raw(); // Disable block validation, see https://github.com/rust-lang/rust-analyzer/pull/10357 - if cfg!(debug_assertions) && false { - let node = SyntaxNode::new_root(green.clone()); - crate::validation::validate_block_structure(&node); - } + // if cfg!(debug_assertions) { + // let node = SyntaxNode::new_root(green.clone()); + // crate::validation::validate_block_structure(&node); + // } Parse::new(green, errors) } From b971b5b64f784a08a0d787eb8c9a6a0bb928779b Mon Sep 17 00:00:00 2001 From: Daniel Eades Date: Tue, 10 Jan 2023 07:06:26 +0000 Subject: [PATCH 0798/1945] suppress lint --- crates/syntax/src/syntax_node.rs | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/crates/syntax/src/syntax_node.rs b/crates/syntax/src/syntax_node.rs index 524be8139d..2e9e0bc226 100644 --- a/crates/syntax/src/syntax_node.rs +++ b/crates/syntax/src/syntax_node.rs @@ -48,10 +48,11 @@ impl SyntaxTreeBuilder { pub fn finish(self) -> Parse { let (green, errors) = self.finish_raw(); // Disable block validation, see https://github.com/rust-lang/rust-analyzer/pull/10357 - // if cfg!(debug_assertions) { - // let node = SyntaxNode::new_root(green.clone()); - // crate::validation::validate_block_structure(&node); - // } + #[allow(clippy::overly_complex_bool_expr)] + if cfg!(debug_assertions) && false { + let node = SyntaxNode::new_root(green.clone()); + crate::validation::validate_block_structure(&node); + } Parse::new(green, errors) } From db41e6b40892b89ebb7184ceda8e94896bf8d37f Mon Sep 17 00:00:00 2001 From: Daniel Eades Date: Tue, 10 Jan 2023 07:16:27 +0000 Subject: [PATCH 0799/1945] remove 'deny_clippy' test --- crates/rust-analyzer/tests/slow-tests/tidy.rs | 27 ------------------- 1 file changed, 27 deletions(-) diff --git a/crates/rust-analyzer/tests/slow-tests/tidy.rs b/crates/rust-analyzer/tests/slow-tests/tidy.rs index 35b5af7319..8e3097fce4 100644 --- a/crates/rust-analyzer/tests/slow-tests/tidy.rs +++ b/crates/rust-analyzer/tests/slow-tests/tidy.rs @@ -82,7 +82,6 @@ fn files_are_tidy() { check_dbg(&path, &text); check_test_attrs(&path, &text); check_trailing_ws(&path, &text); - deny_clippy(&path, &text); tidy_docs.visit(&path, &text); tidy_marks.visit(&path, &text); } @@ -144,32 +143,6 @@ fn check_cargo_toml(path: &Path, text: String) { } } -fn deny_clippy(path: &Path, text: &str) { - let ignore = &[ - // The documentation in string literals may contain anything for its own purposes - "ide-db/src/generated/lints.rs", - // The tests test clippy lint hovers - "ide/src/hover/tests.rs", - // The tests test clippy lint completions - "ide-completion/src/tests/attribute.rs", - ]; - if ignore.iter().any(|p| path.ends_with(p)) { - return; - } - - if text.contains("\u{61}llow(clippy") { - panic!( - "\n\nallowing lints is forbidden: {}. -rust-analyzer intentionally doesn't check clippy on CI. -You can allow lint globally via `xtask clippy`. -See https://github.com/rust-lang/rust-clippy/issues/5537 for discussion. - -", - path.display() - ) - } -} - #[cfg(not(feature = "in-rust-tree"))] #[test] fn check_licenses() { From 56ffe63c3c108f1c6002ac59b76511a0b81953c6 Mon Sep 17 00:00:00 2001 From: Daniel Eades Date: Tue, 10 Jan 2023 07:18:17 +0000 Subject: [PATCH 0800/1945] derive 'Hash' --- crates/base-db/src/input.rs | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/crates/base-db/src/input.rs b/crates/base-db/src/input.rs index 5fa4a80249..b44a157e25 100644 --- a/crates/base-db/src/input.rs +++ b/crates/base-db/src/input.rs @@ -84,15 +84,10 @@ pub struct CrateGraph { arena: NoHashHashMap, } -#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct CrateId(pub u32); impl stdx::hash::NoHashHashable for CrateId {} -impl std::hash::Hash for CrateId { - fn hash(&self, state: &mut H) { - self.0.hash(state); - } -} #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct CrateName(SmolStr); From 6153ea8dce1dcd60ecff446332c2639be8a6b452 Mon Sep 17 00:00:00 2001 From: Daniel Eades Date: Tue, 10 Jan 2023 08:28:28 +0000 Subject: [PATCH 0801/1945] loop-that-never-loops --- crates/hir-def/src/attr.rs | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/crates/hir-def/src/attr.rs b/crates/hir-def/src/attr.rs index ab5d180e1b..fb1e72b0d0 100644 --- a/crates/hir-def/src/attr.rs +++ b/crates/hir-def/src/attr.rs @@ -251,17 +251,17 @@ impl Attrs { let enum_ = &item_tree[loc.id.value]; let cfg_options = &crate_graph[krate].cfg_options; - let variant = 'tri: loop { - let mut idx = 0; - for variant in enum_.variants.clone() { - let attrs = item_tree.attrs(db, krate, variant.into()); - if attrs.is_cfg_enabled(cfg_options) { - if it.local_id == Idx::from_raw(RawIdx::from(idx)) { - break 'tri variant; - } - idx += 1; + let mut idx = 0; + let Some(variant) = enum_.variants.clone().find(|variant| { + let attrs = item_tree.attrs(db, krate, (*variant).into()); + if attrs.is_cfg_enabled(cfg_options) { + if it.local_id == Idx::from_raw(RawIdx::from(idx)) { + return true } + idx += 1; } + false + }) else { return Arc::new(res); }; (item_tree[variant].fields.clone(), item_tree, krate) From 4267b11c4097b52ec59323c29709a8d06ed7b4cd Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 10 Jan 2023 09:43:08 +0100 Subject: [PATCH 0802/1945] Revert "Use ZWNJ to prevent VSCode from forming ligatures between hints and code" --- editors/code/src/client.ts | 18 ------------------ 1 file changed, 18 deletions(-) diff --git a/editors/code/src/client.ts b/editors/code/src/client.ts index 82cdf0390a..1fbe9d4ea0 100644 --- a/editors/code/src/client.ts +++ b/editors/code/src/client.ts @@ -102,24 +102,6 @@ export async function createClient( } }, }, - async provideInlayHints(document, viewPort, token, next) { - const inlays = await next(document, viewPort, token); - if (!inlays) { - return inlays; - } - // U+200C is a zero-width non-joiner to prevent the editor from forming a ligature - // between code and hints - for (const inlay of inlays) { - if (typeof inlay.label === "string") { - inlay.label = `\u{200c}${inlay.label}\u{200c}`; - } else if (Array.isArray(inlay.label)) { - for (const it of inlay.label) { - it.value = `\u{200c}${it.value}\u{200c}`; - } - } - } - return inlays; - }, async handleDiagnostics( uri: vscode.Uri, diagnosticList: vscode.Diagnostic[], From 220996c5c1446add4fb508e60542f383857dc257 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Tue, 10 Jan 2023 17:25:02 +0200 Subject: [PATCH 0803/1945] Tweak wording and remove blank issue template --- .github/ISSUE_TEMPLATE/blank_issue.md | 10 ---------- .github/ISSUE_TEMPLATE/bug_report.md | 2 +- 2 files changed, 1 insertion(+), 11 deletions(-) delete mode 100644 .github/ISSUE_TEMPLATE/blank_issue.md diff --git a/.github/ISSUE_TEMPLATE/blank_issue.md b/.github/ISSUE_TEMPLATE/blank_issue.md deleted file mode 100644 index a08ad07cbf..0000000000 --- a/.github/ISSUE_TEMPLATE/blank_issue.md +++ /dev/null @@ -1,10 +0,0 @@ ---- -name: Blank Issue -about: Create a blank issue. -title: '' -labels: '' -assignees: '' - ---- - - diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index c2e21933c9..b2a2153f09 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -22,4 +22,4 @@ Otherwise please try to provide information which will help us to fix the issue **rustc version**: (eg. output of `rustc -V`) -**relevant settings**: (eg. client settings, or environment variables like `CARGO`, `RUSTUP_HOME` or `CARGO_HOME`) +**relevant settings**: (eg. client settings, or environment variables like `CARGO`, `RUSTC`, `RUSTUP_HOME` or `CARGO_HOME`) From 95d14c393c7efd7ccb56076eb2bc799e5a367c68 Mon Sep 17 00:00:00 2001 From: Daniel Eades Date: Tue, 10 Jan 2023 18:20:12 +0000 Subject: [PATCH 0804/1945] avoid 'cloning' types that implement 'Copy' --- crates/hir-ty/src/lower.rs | 2 +- crates/hir/src/lib.rs | 2 +- crates/mbe/src/benchmark.rs | 2 +- crates/mbe/src/expander/transcriber.rs | 2 +- crates/mbe/src/parser.rs | 2 +- crates/mbe/src/tt_iter.rs | 4 ++-- 6 files changed, 7 insertions(+), 7 deletions(-) diff --git a/crates/hir-ty/src/lower.rs b/crates/hir-ty/src/lower.rs index 4b1f40f91d..6a58546982 100644 --- a/crates/hir-ty/src/lower.rs +++ b/crates/hir-ty/src/lower.rs @@ -1931,7 +1931,7 @@ pub(crate) fn const_or_path_to_chalk( debruijn: DebruijnIndex, ) -> Const { match value { - ConstScalarOrPath::Scalar(s) => intern_const_scalar(s.clone(), expected_ty), + ConstScalarOrPath::Scalar(s) => intern_const_scalar(*s, expected_ty), ConstScalarOrPath::Path(n) => { let path = ModPath::from_segments(PathKind::Plain, Some(n.clone())); path_to_const(db, resolver, &path, mode, args, debruijn) diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index ec66660f34..6baf53e35c 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -923,7 +923,7 @@ impl Struct { } pub fn repr(self, db: &dyn HirDatabase) -> Option { - db.struct_data(self.id).repr.clone() + db.struct_data(self.id).repr } pub fn kind(self, db: &dyn HirDatabase) -> StructKind { diff --git a/crates/mbe/src/benchmark.rs b/crates/mbe/src/benchmark.rs index 4b75002501..0fee6dfe43 100644 --- a/crates/mbe/src/benchmark.rs +++ b/crates/mbe/src/benchmark.rs @@ -145,7 +145,7 @@ fn invocation_fixtures(rules: &FxHashMap) -> Vec<(Stri Op::Ident(it) => parent.token_trees.push(tt::Leaf::from(it.clone()).into()), Op::Punct(puncts) => { for punct in puncts { - parent.token_trees.push(tt::Leaf::from(punct.clone()).into()); + parent.token_trees.push(tt::Leaf::from(*punct).into()); } } Op::Repeat { tokens, kind, separator } => { diff --git a/crates/mbe/src/expander/transcriber.rs b/crates/mbe/src/expander/transcriber.rs index db0d327bf4..fb316320ae 100644 --- a/crates/mbe/src/expander/transcriber.rs +++ b/crates/mbe/src/expander/transcriber.rs @@ -138,7 +138,7 @@ fn expand_subtree( Op::Ident(it) => arena.push(tt::Leaf::from(it.clone()).into()), Op::Punct(puncts) => { for punct in puncts { - arena.push(tt::Leaf::from(punct.clone()).into()); + arena.push(tt::Leaf::from(*punct).into()); } } Op::Subtree { tokens, delimiter } => { diff --git a/crates/mbe/src/parser.rs b/crates/mbe/src/parser.rs index fad905e97f..875dc1ad87 100644 --- a/crates/mbe/src/parser.rs +++ b/crates/mbe/src/parser.rs @@ -126,7 +126,7 @@ fn next_op( src.next().expect("first token already peeked"); // Note that the '$' itself is a valid token inside macro_rules. let second = match src.next() { - None => return Ok(Op::Punct(smallvec![p.clone()])), + None => return Ok(Op::Punct(smallvec![*p])), Some(it) => it, }; match second { diff --git a/crates/mbe/src/tt_iter.rs b/crates/mbe/src/tt_iter.rs index bee7b5de6a..7787c74da8 100644 --- a/crates/mbe/src/tt_iter.rs +++ b/crates/mbe/src/tt_iter.rs @@ -114,7 +114,7 @@ impl<'a> TtIter<'a> { ('.', '.', Some('.' | '=')) | ('<', '<', Some('=')) | ('>', '>', Some('=')) => { let _ = self.next().unwrap(); let _ = self.next().unwrap(); - Ok(smallvec![first, second.clone(), third.unwrap().clone()]) + Ok(smallvec![first, *second, *third.unwrap()]) } ('-' | '!' | '*' | '/' | '&' | '%' | '^' | '+' | '<' | '=' | '>' | '|', '=', _) | ('-' | '=' | '>', '>', _) @@ -125,7 +125,7 @@ impl<'a> TtIter<'a> { | ('<', '<', _) | ('|', '|', _) => { let _ = self.next().unwrap(); - Ok(smallvec![first, second.clone()]) + Ok(smallvec![first, *second]) } _ => Ok(smallvec![first]), } From ac3844a0bb9a13260bf211354dbc014e892d7491 Mon Sep 17 00:00:00 2001 From: Daniel Eades Date: Tue, 10 Jan 2023 18:48:51 +0000 Subject: [PATCH 0805/1945] a number of code simplifications --- crates/hir-def/src/body/pretty.rs | 2 +- crates/hir-def/src/nameres/collector.rs | 20 ++- crates/hir/src/lib.rs | 4 +- .../src/handlers/generate_default_from_new.rs | 6 +- .../ide-assists/src/handlers/merge_imports.rs | 2 +- .../src/handlers/unmerge_match_arm.rs | 3 +- crates/ide-db/src/rename.rs | 8 +- crates/ide-db/src/search.rs | 40 +++--- .../src/handlers/json_is_not_rust.rs | 122 ++++++++---------- .../src/handlers/private_assoc_item.rs | 5 +- .../src/handlers/unresolved_proc_macro.rs | 5 +- crates/mbe/src/expander/transcriber.rs | 2 +- crates/project-model/src/workspace.rs | 4 +- 13 files changed, 99 insertions(+), 124 deletions(-) diff --git a/crates/hir-def/src/body/pretty.rs b/crates/hir-def/src/body/pretty.rs index 10b9b26bbe..41783081ec 100644 --- a/crates/hir-def/src/body/pretty.rs +++ b/crates/hir-def/src/body/pretty.rs @@ -80,7 +80,7 @@ impl<'a> Write for Printer<'a> { fn write_str(&mut self, s: &str) -> fmt::Result { for line in s.split_inclusive('\n') { if self.needs_indent { - match self.buf.chars().rev().skip_while(|ch| *ch == ' ').next() { + match self.buf.chars().rev().find(|ch| *ch != ' ') { Some('\n') | None => {} _ => self.buf.push('\n'), } diff --git a/crates/hir-def/src/nameres/collector.rs b/crates/hir-def/src/nameres/collector.rs index 33a787fd9f..c69599079b 100644 --- a/crates/hir-def/src/nameres/collector.rs +++ b/crates/hir-def/src/nameres/collector.rs @@ -1600,17 +1600,15 @@ impl ModCollector<'_, '_> { FunctionLoc { container, id: ItemTreeId::new(self.tree_id, id) }.intern(db); let vis = resolve_vis(def_map, &self.item_tree[it.visibility]); - if self.def_collector.is_proc_macro { - if self.module_id == def_map.root { - if let Some(proc_macro) = attrs.parse_proc_macro_decl(&it.name) { - let crate_root = def_map.module_id(def_map.root); - self.def_collector.export_proc_macro( - proc_macro, - ItemTreeId::new(self.tree_id, id), - fn_id, - crate_root, - ); - } + if self.def_collector.is_proc_macro && self.module_id == def_map.root { + if let Some(proc_macro) = attrs.parse_proc_macro_decl(&it.name) { + let crate_root = def_map.module_id(def_map.root); + self.def_collector.export_proc_macro( + proc_macro, + ItemTreeId::new(self.tree_id, id), + fn_id, + crate_root, + ); } } diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index ec66660f34..c1e165b638 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -472,8 +472,8 @@ impl Module { let def_map = self.id.def_map(db.upcast()); let children = def_map[self.id.local_id] .children - .iter() - .map(|(_, module_id)| Module { id: def_map.module_id(*module_id) }) + .values() + .map(|module_id| Module { id: def_map.module_id(*module_id) }) .collect::>(); children.into_iter() } diff --git a/crates/ide-assists/src/handlers/generate_default_from_new.rs b/crates/ide-assists/src/handlers/generate_default_from_new.rs index 2d074a33e7..860372941f 100644 --- a/crates/ide-assists/src/handlers/generate_default_from_new.rs +++ b/crates/ide-assists/src/handlers/generate_default_from_new.rs @@ -82,18 +82,18 @@ fn generate_trait_impl_text_from_impl(impl_: &ast::Impl, trait_text: &str, code: let generic_params = impl_.generic_param_list().map(|generic_params| { let lifetime_params = generic_params.lifetime_params().map(ast::GenericParam::LifetimeParam); - let ty_or_const_params = generic_params.type_or_const_params().filter_map(|param| { + let ty_or_const_params = generic_params.type_or_const_params().map(|param| { // remove defaults since they can't be specified in impls match param { ast::TypeOrConstParam::Type(param) => { let param = param.clone_for_update(); param.remove_default(); - Some(ast::GenericParam::TypeParam(param)) + ast::GenericParam::TypeParam(param) } ast::TypeOrConstParam::Const(param) => { let param = param.clone_for_update(); param.remove_default(); - Some(ast::GenericParam::ConstParam(param)) + ast::GenericParam::ConstParam(param) } } }); diff --git a/crates/ide-assists/src/handlers/merge_imports.rs b/crates/ide-assists/src/handlers/merge_imports.rs index 2bdbec93b1..d7ddc5f23f 100644 --- a/crates/ide-assists/src/handlers/merge_imports.rs +++ b/crates/ide-assists/src/handlers/merge_imports.rs @@ -92,7 +92,7 @@ trait Merge: AstNode + Clone { fn try_merge_from(self, items: &mut dyn Iterator) -> Option> { let mut edits = Vec::new(); let mut merged = self.clone(); - while let Some(item) = items.next() { + for item in items { merged = merged.try_merge(&item)?; edits.push(Edit::Remove(item.into_either())); } diff --git a/crates/ide-assists/src/handlers/unmerge_match_arm.rs b/crates/ide-assists/src/handlers/unmerge_match_arm.rs index 9565f0ee6f..db789cfa33 100644 --- a/crates/ide-assists/src/handlers/unmerge_match_arm.rs +++ b/crates/ide-assists/src/handlers/unmerge_match_arm.rs @@ -86,8 +86,7 @@ pub(crate) fn unmerge_match_arm(acc: &mut Assists, ctx: &AssistContext<'_>) -> O it.prev_sibling_or_token() }) .map(|it| it.kind()) - .skip_while(|it| it.is_trivia()) - .next() + .find(|it| !it.is_trivia()) == Some(T![,]); let has_arms_after = neighbor(&match_arm, Direction::Next).is_some(); if !has_comma_after && !has_arms_after { diff --git a/crates/ide-db/src/rename.rs b/crates/ide-db/src/rename.rs index cd4a7e1554..8f310b0f42 100644 --- a/crates/ide-db/src/rename.rs +++ b/crates/ide-db/src/rename.rs @@ -334,11 +334,9 @@ pub fn source_edit_from_references( } _ => false, }; - if !has_emitted_edit { - if !edited_ranges.contains(&range.start()) { - edit.replace(range, new_name.to_string()); - edited_ranges.push(range.start()); - } + if !has_emitted_edit && !edited_ranges.contains(&range.start()) { + edit.replace(range, new_name.to_string()); + edited_ranges.push(range.start()); } } diff --git a/crates/ide-db/src/search.rs b/crates/ide-db/src/search.rs index b2b0e49085..f6d4ccc3ce 100644 --- a/crates/ide-db/src/search.rs +++ b/crates/ide-db/src/search.rs @@ -494,20 +494,28 @@ impl<'a> FindUsages<'a> { } // Search for `super` and `crate` resolving to our module - match self.def { - Definition::Module(module) => { - let scope = search_scope - .intersection(&SearchScope::module_and_children(self.sema.db, module)); + if let Definition::Module(module) = self.def { + let scope = + search_scope.intersection(&SearchScope::module_and_children(self.sema.db, module)); - let is_crate_root = - module.is_crate_root(self.sema.db).then(|| Finder::new("crate")); - let finder = &Finder::new("super"); + let is_crate_root = module.is_crate_root(self.sema.db).then(|| Finder::new("crate")); + let finder = &Finder::new("super"); - for (text, file_id, search_range) in scope_files(sema, &scope) { - let tree = Lazy::new(move || sema.parse(file_id).syntax().clone()); + for (text, file_id, search_range) in scope_files(sema, &scope) { + let tree = Lazy::new(move || sema.parse(file_id).syntax().clone()); + for offset in match_indices(&text, finder, search_range) { + if let Some(iter) = find_nodes("super", &tree, offset) { + for name_ref in iter.filter_map(ast::NameRef::cast) { + if self.found_name_ref(&name_ref, sink) { + return; + } + } + } + } + if let Some(finder) = &is_crate_root { for offset in match_indices(&text, finder, search_range) { - if let Some(iter) = find_nodes("super", &tree, offset) { + if let Some(iter) = find_nodes("crate", &tree, offset) { for name_ref in iter.filter_map(ast::NameRef::cast) { if self.found_name_ref(&name_ref, sink) { return; @@ -515,20 +523,8 @@ impl<'a> FindUsages<'a> { } } } - if let Some(finder) = &is_crate_root { - for offset in match_indices(&text, finder, search_range) { - if let Some(iter) = find_nodes("crate", &tree, offset) { - for name_ref in iter.filter_map(ast::NameRef::cast) { - if self.found_name_ref(&name_ref, sink) { - return; - } - } - } - } - } } } - _ => (), } // search for module `self` references in our module's definition source diff --git a/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs b/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs index e8df6dcf28..04ce1e0fee 100644 --- a/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs +++ b/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs @@ -99,76 +99,66 @@ pub(crate) fn json_in_items( && node.last_token().map(|x| x.kind()) == Some(SyntaxKind::R_CURLY) { let node_string = node.to_string(); - if let Ok(it) = serde_json::from_str(&node_string) { - if let serde_json::Value::Object(it) = it { - let import_scope = ImportScope::find_insert_use_container(node, sema)?; - let range = node.text_range(); - let mut edit = TextEdit::builder(); - edit.delete(range); - let mut state = State::default(); - let semantics_scope = sema.scope(node)?; - let scope_resolve = - |it| semantics_scope.speculative_resolve(&make::path_from_text(it)); - let scope_has = |it| scope_resolve(it).is_some(); - let deserialize_resolved = scope_resolve("::serde::Deserialize"); - let serialize_resolved = scope_resolve("::serde::Serialize"); - state.has_deserialize = deserialize_resolved.is_some(); - state.has_serialize = serialize_resolved.is_some(); - state.build_struct(&it); - edit.insert(range.start(), state.result); - acc.push( - Diagnostic::new( - "json-is-not-rust", - "JSON syntax is not valid as a Rust item", - range, - ) - .severity(Severity::WeakWarning) - .with_fixes(Some(vec![{ - let mut scb = SourceChangeBuilder::new(file_id); - let scope = match import_scope { - ImportScope::File(it) => ImportScope::File(scb.make_mut(it)), - ImportScope::Module(it) => ImportScope::Module(scb.make_mut(it)), - ImportScope::Block(it) => ImportScope::Block(scb.make_mut(it)), - }; - let current_module = semantics_scope.module(); - if !scope_has("Serialize") { - if let Some(PathResolution::Def(it)) = serialize_resolved { - if let Some(it) = current_module.find_use_path_prefixed( - sema.db, - it, - config.insert_use.prefix_kind, - config.prefer_no_std, - ) { - insert_use( - &scope, - mod_path_to_ast(&it), - &config.insert_use, - ); - } + if let Ok(serde_json::Value::Object(it)) = serde_json::from_str(&node_string) { + let import_scope = ImportScope::find_insert_use_container(node, sema)?; + let range = node.text_range(); + let mut edit = TextEdit::builder(); + edit.delete(range); + let mut state = State::default(); + let semantics_scope = sema.scope(node)?; + let scope_resolve = + |it| semantics_scope.speculative_resolve(&make::path_from_text(it)); + let scope_has = |it| scope_resolve(it).is_some(); + let deserialize_resolved = scope_resolve("::serde::Deserialize"); + let serialize_resolved = scope_resolve("::serde::Serialize"); + state.has_deserialize = deserialize_resolved.is_some(); + state.has_serialize = serialize_resolved.is_some(); + state.build_struct(&it); + edit.insert(range.start(), state.result); + acc.push( + Diagnostic::new( + "json-is-not-rust", + "JSON syntax is not valid as a Rust item", + range, + ) + .severity(Severity::WeakWarning) + .with_fixes(Some(vec![{ + let mut scb = SourceChangeBuilder::new(file_id); + let scope = match import_scope { + ImportScope::File(it) => ImportScope::File(scb.make_mut(it)), + ImportScope::Module(it) => ImportScope::Module(scb.make_mut(it)), + ImportScope::Block(it) => ImportScope::Block(scb.make_mut(it)), + }; + let current_module = semantics_scope.module(); + if !scope_has("Serialize") { + if let Some(PathResolution::Def(it)) = serialize_resolved { + if let Some(it) = current_module.find_use_path_prefixed( + sema.db, + it, + config.insert_use.prefix_kind, + config.prefer_no_std, + ) { + insert_use(&scope, mod_path_to_ast(&it), &config.insert_use); } } - if !scope_has("Deserialize") { - if let Some(PathResolution::Def(it)) = deserialize_resolved { - if let Some(it) = current_module.find_use_path_prefixed( - sema.db, - it, - config.insert_use.prefix_kind, - config.prefer_no_std, - ) { - insert_use( - &scope, - mod_path_to_ast(&it), - &config.insert_use, - ); - } + } + if !scope_has("Deserialize") { + if let Some(PathResolution::Def(it)) = deserialize_resolved { + if let Some(it) = current_module.find_use_path_prefixed( + sema.db, + it, + config.insert_use.prefix_kind, + config.prefer_no_std, + ) { + insert_use(&scope, mod_path_to_ast(&it), &config.insert_use); } } - let mut sc = scb.finish(); - sc.insert_source_edit(file_id, edit.finish()); - fix("convert_json_to_struct", "Convert JSON to struct", sc, range) - }])), - ); - } + } + let mut sc = scb.finish(); + sc.insert_source_edit(file_id, edit.finish()); + fix("convert_json_to_struct", "Convert JSON to struct", sc, range) + }])), + ); } } Some(()) diff --git a/crates/ide-diagnostics/src/handlers/private_assoc_item.rs b/crates/ide-diagnostics/src/handlers/private_assoc_item.rs index b363a516dd..0b3121c765 100644 --- a/crates/ide-diagnostics/src/handlers/private_assoc_item.rs +++ b/crates/ide-diagnostics/src/handlers/private_assoc_item.rs @@ -11,10 +11,7 @@ pub(crate) fn private_assoc_item( d: &hir::PrivateAssocItem, ) -> Diagnostic { // FIXME: add quickfix - let name = match d.item.name(ctx.sema.db) { - Some(name) => format!("`{}` ", name), - None => String::new(), - }; + let name = d.item.name(ctx.sema.db).map(|name| format!("`{name}` ")).unwrap_or_default(); Diagnostic::new( "private-assoc-item", format!( diff --git a/crates/ide-diagnostics/src/handlers/unresolved_proc_macro.rs b/crates/ide-diagnostics/src/handlers/unresolved_proc_macro.rs index b2ed19104e..9a984ba6bf 100644 --- a/crates/ide-diagnostics/src/handlers/unresolved_proc_macro.rs +++ b/crates/ide-diagnostics/src/handlers/unresolved_proc_macro.rs @@ -34,10 +34,7 @@ pub(crate) fn unresolved_proc_macro( let message = format!( "{message}: {}", if config_enabled { - match def_map.proc_macro_loading_error() { - Some(e) => e, - None => "proc macro not found in the built dylib", - } + def_map.proc_macro_loading_error().unwrap_or("proc macro not found in the built dylib") } else { match d.kind { hir::MacroKind::Attr if proc_macros_enabled => { diff --git a/crates/mbe/src/expander/transcriber.rs b/crates/mbe/src/expander/transcriber.rs index db0d327bf4..605ddd4a06 100644 --- a/crates/mbe/src/expander/transcriber.rs +++ b/crates/mbe/src/expander/transcriber.rs @@ -212,7 +212,7 @@ fn expand_var(ctx: &mut ExpandCtx<'_>, v: &SmolStr, id: tt::TokenId) -> ExpandRe } else { ctx.bindings.get(v, &mut ctx.nesting).map_or_else( |e| ExpandResult { value: Fragment::Tokens(tt::TokenTree::empty()), err: Some(e) }, - |it| ExpandResult::ok(it), + ExpandResult::ok, ) } } diff --git a/crates/project-model/src/workspace.rs b/crates/project-model/src/workspace.rs index e2382aa37e..5ab767a0e5 100644 --- a/crates/project-model/src/workspace.rs +++ b/crates/project-model/src/workspace.rs @@ -366,11 +366,11 @@ impl ProjectWorkspace { _ => None, }) .collect(); - let ref mut outputs = match WorkspaceBuildScripts::run_once(config, &cargo_ws, progress) { + let outputs = &mut (match WorkspaceBuildScripts::run_once(config, &cargo_ws, progress) { Ok(it) => Ok(it.into_iter()), // io::Error is not Clone? Err(e) => Err(Arc::new(e)), - }; + }); workspaces .iter() From 5b1cd8245f64d6501f0866c49e948e2d8a75038a Mon Sep 17 00:00:00 2001 From: Daniel Eades Date: Tue, 10 Jan 2023 19:44:19 +0000 Subject: [PATCH 0806/1945] fixup --- crates/project-model/src/workspace.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/project-model/src/workspace.rs b/crates/project-model/src/workspace.rs index 5ab767a0e5..d562066533 100644 --- a/crates/project-model/src/workspace.rs +++ b/crates/project-model/src/workspace.rs @@ -366,11 +366,11 @@ impl ProjectWorkspace { _ => None, }) .collect(); - let outputs = &mut (match WorkspaceBuildScripts::run_once(config, &cargo_ws, progress) { + let outputs = &mut match WorkspaceBuildScripts::run_once(config, &cargo_ws, progress) { Ok(it) => Ok(it.into_iter()), // io::Error is not Clone? Err(e) => Err(Arc::new(e)), - }); + }; workspaces .iter() From 95fc3ba41cff006a3ae4c3c730122b0503fdefab Mon Sep 17 00:00:00 2001 From: Daniel Eades Date: Tue, 10 Jan 2023 19:51:21 +0000 Subject: [PATCH 0807/1945] fixup --- crates/hir-def/src/body/pretty.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/hir-def/src/body/pretty.rs b/crates/hir-def/src/body/pretty.rs index 41783081ec..4b4664a1cf 100644 --- a/crates/hir-def/src/body/pretty.rs +++ b/crates/hir-def/src/body/pretty.rs @@ -113,7 +113,7 @@ impl<'a> Printer<'a> { } fn newline(&mut self) { - match self.buf.chars().rev().skip_while(|ch| *ch == ' ').next() { + match self.buf.chars().rev().find(|ch| *ch != ' ') { Some('\n') | None => {} _ => writeln!(self).unwrap(), } From d218b237fd8e5495c5399f0c1651c7dbf3b3daaf Mon Sep 17 00:00:00 2001 From: Daniel Eades Date: Tue, 10 Jan 2023 20:40:08 +0000 Subject: [PATCH 0808/1945] collapse some nested blocks --- crates/hir-ty/src/infer/expr.rs | 34 +++++++++---------- crates/hir-ty/src/layout.rs | 17 ++++------ .../src/handlers/extract_type_alias.rs | 22 ++++++------ crates/ide-db/src/rename.rs | 22 ++++++------ crates/ide-db/src/symbol_index.rs | 8 ++--- crates/ide/src/hover/render.rs | 8 ++--- crates/ide/src/join_lines.rs | 6 ++-- crates/ide/src/syntax_highlighting.rs | 9 +++-- crates/ide/src/typing.rs | 6 ++-- crates/rust-analyzer/src/main_loop.rs | 8 ++--- 10 files changed, 62 insertions(+), 78 deletions(-) diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs index 8f9cdac378..ea04a3d17b 100644 --- a/crates/hir-ty/src/infer/expr.rs +++ b/crates/hir-ty/src/infer/expr.rs @@ -1136,18 +1136,16 @@ impl<'a> InferenceContext<'a> { if self.diverges.is_always() { // we don't even make an attempt at coercion self.table.new_maybe_never_var() - } else { - if let Some(t) = expected.only_has_type(&mut self.table) { - if self.coerce(Some(expr), &TyBuilder::unit(), &t).is_err() { - self.result.type_mismatches.insert( - expr.into(), - TypeMismatch { expected: t.clone(), actual: TyBuilder::unit() }, - ); - } - t - } else { - TyBuilder::unit() + } else if let Some(t) = expected.only_has_type(&mut self.table) { + if self.coerce(Some(expr), &TyBuilder::unit(), &t).is_err() { + self.result.type_mismatches.insert( + expr.into(), + TypeMismatch { expected: t.clone(), actual: TyBuilder::unit() }, + ); } + t + } else { + TyBuilder::unit() } } } @@ -1314,13 +1312,13 @@ impl<'a> InferenceContext<'a> { } else { param_ty }; - if !coercion_target.is_unknown() { - if self.coerce(Some(arg), &ty, &coercion_target).is_err() { - self.result.type_mismatches.insert( - arg.into(), - TypeMismatch { expected: coercion_target, actual: ty.clone() }, - ); - } + if !coercion_target.is_unknown() + && self.coerce(Some(arg), &ty, &coercion_target).is_err() + { + self.result.type_mismatches.insert( + arg.into(), + TypeMismatch { expected: coercion_target, actual: ty.clone() }, + ); } } } diff --git a/crates/hir-ty/src/layout.rs b/crates/hir-ty/src/layout.rs index 7a1cca3143..958d3dabe0 100644 --- a/crates/hir-ty/src/layout.rs +++ b/crates/hir-ty/src/layout.rs @@ -251,17 +251,14 @@ fn layout_of_unit(cx: &LayoutCx<'_>, dl: &TargetDataLayout) -> Result Ty { match pointee.kind(Interner) { - TyKind::Adt(AdtId(adt), subst) => match adt { - &hir_def::AdtId::StructId(i) => { - let data = db.struct_data(i); - let mut it = data.variant_data.fields().iter().rev(); - match it.next() { - Some((f, _)) => field_ty(db, i.into(), f, subst), - None => pointee, - } + TyKind::Adt(AdtId(hir_def::AdtId::StructId(i)), subst) => { + let data = db.struct_data(*i); + let mut it = data.variant_data.fields().iter().rev(); + match it.next() { + Some((f, _)) => field_ty(db, (*i).into(), f, subst), + None => pointee, } - _ => pointee, - }, + } _ => pointee, } } diff --git a/crates/ide-assists/src/handlers/extract_type_alias.rs b/crates/ide-assists/src/handlers/extract_type_alias.rs index 0505f5784f..6c0238f35d 100644 --- a/crates/ide-assists/src/handlers/extract_type_alias.rs +++ b/crates/ide-assists/src/handlers/extract_type_alias.rs @@ -161,19 +161,17 @@ fn collect_used_generics<'gp>( .and_then(|lt| known_generics.iter().find(find_lifetime(<.text()))), ), ast::Type::ArrayType(ar) => { - if let Some(expr) = ar.expr() { - if let ast::Expr::PathExpr(p) = expr { - if let Some(path) = p.path() { - if let Some(name_ref) = path.as_single_name_ref() { - if let Some(param) = known_generics.iter().find(|gp| { - if let ast::GenericParam::ConstParam(cp) = gp { - cp.name().map_or(false, |n| n.text() == name_ref.text()) - } else { - false - } - }) { - generics.push(param); + if let Some(ast::Expr::PathExpr(p)) = ar.expr() { + if let Some(path) = p.path() { + if let Some(name_ref) = path.as_single_name_ref() { + if let Some(param) = known_generics.iter().find(|gp| { + if let ast::GenericParam::ConstParam(cp) = gp { + cp.name().map_or(false, |n| n.text() == name_ref.text()) + } else { + false } + }) { + generics.push(param); } } } diff --git a/crates/ide-db/src/rename.rs b/crates/ide-db/src/rename.rs index 8f310b0f42..0e5906097c 100644 --- a/crates/ide-db/src/rename.rs +++ b/crates/ide-db/src/rename.rs @@ -389,19 +389,17 @@ fn source_edit_from_name_ref( edit.delete(TextRange::new(s, e)); return true; } - } else if init == name_ref { - if field_name.text() == new_name { - cov_mark::hit!(test_rename_local_put_init_shorthand); - // Foo { field: local } -> Foo { field } - // ^^^^^^^ delete this + } else if init == name_ref && field_name.text() == new_name { + cov_mark::hit!(test_rename_local_put_init_shorthand); + // Foo { field: local } -> Foo { field } + // ^^^^^^^ delete this - // same names, we can use a shorthand here instead. - // we do not want to erase attributes hence this range start - let s = field_name.syntax().text_range().end(); - let e = init.syntax().text_range().end(); - edit.delete(TextRange::new(s, e)); - return true; - } + // same names, we can use a shorthand here instead. + // we do not want to erase attributes hence this range start + let s = field_name.syntax().text_range().end(); + let e = init.syntax().text_range().end(); + edit.delete(TextRange::new(s, e)); + return true; } } // init shorthand diff --git a/crates/ide-db/src/symbol_index.rs b/crates/ide-db/src/symbol_index.rs index c054cc1597..a91ffd1ec4 100644 --- a/crates/ide-db/src/symbol_index.rs +++ b/crates/ide-db/src/symbol_index.rs @@ -323,10 +323,10 @@ impl Query { if symbol.name != self.query { continue; } - } else if self.case_sensitive { - if self.query.chars().any(|c| !symbol.name.contains(c)) { - continue; - } + } else if self.case_sensitive + && self.query.chars().any(|c| !symbol.name.contains(c)) + { + continue; } res.push(symbol.clone()); diff --git a/crates/ide/src/hover/render.rs b/crates/ide/src/hover/render.rs index 47257f0bfa..cb537d7ef7 100644 --- a/crates/ide/src/hover/render.rs +++ b/crates/ide/src/hover/render.rs @@ -64,12 +64,10 @@ pub(super) fn type_info( bt_end = if config.markdown() { "```\n" } else { "" } ) .into() + } else if config.markdown() { + Markup::fenced_block(&original.display(sema.db)) } else { - if config.markdown() { - Markup::fenced_block(&original.display(sema.db)) - } else { - original.display(sema.db).to_string().into() - } + original.display(sema.db).to_string().into() }; res.actions.push(HoverAction::goto_type_from_targets(sema.db, targets)); Some(res) diff --git a/crates/ide/src/join_lines.rs b/crates/ide/src/join_lines.rs index edc48e84d7..1cfde23624 100644 --- a/crates/ide/src/join_lines.rs +++ b/crates/ide/src/join_lines.rs @@ -161,10 +161,8 @@ fn remove_newline( } } - if config.join_assignments { - if join_assignments(edit, &prev, &next).is_some() { - return; - } + if config.join_assignments && join_assignments(edit, &prev, &next).is_some() { + return; } if config.unwrap_trivial_blocks { diff --git a/crates/ide/src/syntax_highlighting.rs b/crates/ide/src/syntax_highlighting.rs index 50371d620e..454a250f3d 100644 --- a/crates/ide/src/syntax_highlighting.rs +++ b/crates/ide/src/syntax_highlighting.rs @@ -413,11 +413,10 @@ fn traverse( let string = ast::String::cast(token); let string_to_highlight = ast::String::cast(descended_token.clone()); if let Some((string, expanded_string)) = string.zip(string_to_highlight) { - if string.is_raw() { - if inject::ra_fixture(hl, sema, config, &string, &expanded_string).is_some() - { - continue; - } + if string.is_raw() + && inject::ra_fixture(hl, sema, config, &string, &expanded_string).is_some() + { + continue; } highlight_format_string(hl, &string, &expanded_string, range); highlight_escape_string(hl, &string, range.start()); diff --git a/crates/ide/src/typing.rs b/crates/ide/src/typing.rs index eba5a48563..c265487562 100644 --- a/crates/ide/src/typing.rs +++ b/crates/ide/src/typing.rs @@ -205,10 +205,8 @@ fn on_eq_typed(file: &SourceFile, offset: TextSize) -> Option { if expr_stmt.semicolon_token().is_some() { return None; } - } else { - if !ast::StmtList::can_cast(binop.syntax().parent()?.kind()) { - return None; - } + } else if !ast::StmtList::can_cast(binop.syntax().parent()?.kind()) { + return None; } let expr = binop.rhs()?; diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs index 0bc940dfe8..a270049019 100644 --- a/crates/rust-analyzer/src/main_loop.rs +++ b/crates/rust-analyzer/src/main_loop.rs @@ -307,10 +307,10 @@ impl GlobalState { } } - if !was_quiescent || state_changed || memdocs_added_or_removed { - if self.config.publish_diagnostics() { - self.update_diagnostics() - } + if (!was_quiescent || state_changed || memdocs_added_or_removed) + && self.config.publish_diagnostics() + { + self.update_diagnostics() } } From a3114c3697c21310dbf8a11feea74712bf1eddd7 Mon Sep 17 00:00:00 2001 From: "daniel.eades" Date: Wed, 11 Jan 2023 16:07:35 +0000 Subject: [PATCH 0809/1945] move loop index inside iterator --- crates/hir-def/src/attr.rs | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/crates/hir-def/src/attr.rs b/crates/hir-def/src/attr.rs index fb1e72b0d0..c209bd9490 100644 --- a/crates/hir-def/src/attr.rs +++ b/crates/hir-def/src/attr.rs @@ -251,19 +251,18 @@ impl Attrs { let enum_ = &item_tree[loc.id.value]; let cfg_options = &crate_graph[krate].cfg_options; - let mut idx = 0; - let Some(variant) = enum_.variants.clone().find(|variant| { + + let Some(variant) = enum_.variants.clone().filter(|variant| { let attrs = item_tree.attrs(db, krate, (*variant).into()); - if attrs.is_cfg_enabled(cfg_options) { - if it.local_id == Idx::from_raw(RawIdx::from(idx)) { - return true - } - idx += 1; - } - false - }) else { + attrs.is_cfg_enabled(cfg_options) + }) + .zip(0u32..) + .find(|(_variant, idx)| it.local_id == Idx::from_raw(RawIdx::from(*idx))) + .map(|(variant, _idx)| variant) + else { return Arc::new(res); }; + (item_tree[variant].fields.clone(), item_tree, krate) } VariantId::StructId(it) => { From bb4e272d8a05c5b6a000808c5c84d722d582603d Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Wed, 11 Jan 2023 17:10:04 +0100 Subject: [PATCH 0810/1945] Refine search for const and function assoc items --- crates/hir-ty/src/method_resolution.rs | 12 +- crates/hir/src/lib.rs | 2 +- crates/hir/src/source_analyzer.rs | 10 +- crates/ide-db/src/search.rs | 62 +++--- crates/ide/src/highlight_related.rs | 2 - crates/ide/src/references.rs | 261 +++++++++++++++++++++++++ crates/rust-analyzer/src/config.rs | 6 +- 7 files changed, 319 insertions(+), 36 deletions(-) diff --git a/crates/hir-ty/src/method_resolution.rs b/crates/hir-ty/src/method_resolution.rs index ae25704f20..64376e10bc 100644 --- a/crates/hir-ty/src/method_resolution.rs +++ b/crates/hir-ty/src/method_resolution.rs @@ -712,17 +712,17 @@ fn lookup_impl_assoc_item_for_trait_ref( let table = InferenceTable::new(db, env); let impl_data = find_matching_impl(impls, table, trait_ref)?; - impl_data.items.iter().find_map(|it| match it { + impl_data.items.iter().find_map(|&it| match it { AssocItemId::FunctionId(f) => { - (db.function_data(*f).name == *name).then_some(AssocItemId::FunctionId(*f)) + (db.function_data(f).name == *name).then_some(AssocItemId::FunctionId(f)) } AssocItemId::ConstId(c) => db - .const_data(*c) + .const_data(c) .name .as_ref() - .map(|n| *n == *name) - .and_then(|result| if result { Some(AssocItemId::ConstId(*c)) } else { None }), - _ => None, + .map(|n| n == name) + .and_then(|result| if result { Some(AssocItemId::ConstId(c)) } else { None }), + AssocItemId::TypeAliasId(_) => None, }) } diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index a1d2ec02f2..2fc2673bd2 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -2129,7 +2129,7 @@ pub enum AssocItem { Const(Const), TypeAlias(TypeAlias), } -#[derive(Debug)] +#[derive(Debug, Clone)] pub enum AssocItemContainer { Trait(Trait), Impl(Impl), diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs index 059b80bcf1..2354eb2c9c 100644 --- a/crates/hir/src/source_analyzer.rs +++ b/crates/hir/src/source_analyzer.rs @@ -504,7 +504,7 @@ impl SourceAnalyzer { AssocItemId::ConstId(const_id) => { self.resolve_impl_const_or_trait_def(db, const_id, subs).into() } - _ => assoc, + assoc => assoc, }; return Some(PathResolution::Def(AssocItem::from(assoc).into())); @@ -517,7 +517,13 @@ impl SourceAnalyzer { prefer_value_ns = true; } else if let Some(path_pat) = parent().and_then(ast::PathPat::cast) { let pat_id = self.pat_id(&path_pat.into())?; - if let Some((assoc, _)) = infer.assoc_resolutions_for_pat(pat_id) { + if let Some((assoc, subs)) = infer.assoc_resolutions_for_pat(pat_id) { + let assoc = match assoc { + AssocItemId::ConstId(const_id) => { + self.resolve_impl_const_or_trait_def(db, const_id, subs).into() + } + assoc => assoc, + }; return Some(PathResolution::Def(AssocItem::from(assoc).into())); } if let Some(VariantId::EnumVariantId(variant)) = diff --git a/crates/ide-db/src/search.rs b/crates/ide-db/src/search.rs index f6d4ccc3ce..fd09fdeb0b 100644 --- a/crates/ide-db/src/search.rs +++ b/crates/ide-db/src/search.rs @@ -7,7 +7,9 @@ use std::{mem, sync::Arc}; use base_db::{FileId, FileRange, SourceDatabase, SourceDatabaseExt}; -use hir::{DefWithBody, HasAttrs, HasSource, InFile, ModuleSource, Semantics, Visibility}; +use hir::{ + AsAssocItem, DefWithBody, HasAttrs, HasSource, InFile, ModuleSource, Semantics, Visibility, +}; use memchr::memmem::Finder; use once_cell::unsync::Lazy; use parser::SyntaxKind; @@ -311,15 +313,15 @@ impl Definition { pub fn usages<'a>(self, sema: &'a Semantics<'_, RootDatabase>) -> FindUsages<'a> { FindUsages { + def: self, + assoc_item_container: self.as_assoc_item(sema.db).map(|a| a.container(sema.db)), + sema, + scope: None, + include_self_kw_refs: None, local_repr: match self { Definition::Local(local) => Some(local.representative(sema.db)), _ => None, }, - def: self, - trait_assoc_def: as_trait_assoc_def(sema.db, self), - sema, - scope: None, - include_self_kw_refs: None, search_self_mod: false, } } @@ -328,8 +330,7 @@ impl Definition { #[derive(Clone)] pub struct FindUsages<'a> { def: Definition, - /// If def is an assoc item from a trait or trait impl, this is the corresponding item of the trait definition - trait_assoc_def: Option, + assoc_item_container: Option, sema: &'a Semantics<'a, RootDatabase>, scope: Option, include_self_kw_refs: Option, @@ -380,7 +381,9 @@ impl<'a> FindUsages<'a> { let sema = self.sema; let search_scope = { - let base = self.trait_assoc_def.unwrap_or(self.def).search_scope(sema.db); + // FIXME: Is the trait scope needed for trait impl assoc items? + let base = + as_trait_assoc_def(sema.db, self.def).unwrap_or(self.def).search_scope(sema.db); match &self.scope { None => base, Some(scope) => base.intersection(scope), @@ -651,13 +654,26 @@ impl<'a> FindUsages<'a> { sink(file_id, reference) } Some(NameRefClass::Definition(def)) - if match self.trait_assoc_def { - Some(trait_assoc_def) => { - // we have a trait assoc item, so force resolve all assoc items to their trait version - convert_to_def_in_trait(self.sema.db, def) == trait_assoc_def - } - None => self.def == def, - } => + if self.def == def + // is our def a trait assoc item? then we want to find everything + || matches!(self.assoc_item_container, Some(hir::AssocItemContainer::Trait(_))) + && convert_to_def_in_trait(self.sema.db, def) == self.def => + { + let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax()); + let reference = FileReference { + range, + name: ast::NameLike::NameRef(name_ref.clone()), + category: ReferenceCategory::new(&def, name_ref), + }; + sink(file_id, reference) + } + // FIXME: special case type aliases, we can't filter between impl and trait defs here as we lack the substitutions + // so we always resolve all assoc type aliases to both their trait def and impl defs + Some(NameRefClass::Definition(def)) + if self.assoc_item_container.is_some() + && matches!(self.def, Definition::TypeAlias(_)) + && convert_to_def_in_trait(self.sema.db, def) + == convert_to_def_in_trait(self.sema.db, self.def) => { let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax()); let reference = FileReference { @@ -748,12 +764,14 @@ impl<'a> FindUsages<'a> { false } Some(NameClass::Definition(def)) if def != self.def => { - // if the def we are looking for is a trait (impl) assoc item, we'll have to resolve the items to trait definition assoc item - if !matches!( - self.trait_assoc_def, - Some(trait_assoc_def) - if convert_to_def_in_trait(self.sema.db, def) == trait_assoc_def - ) { + // only when looking for trait assoc items, we want to find other assoc items + if !matches!(self.assoc_item_container, Some(hir::AssocItemContainer::Trait(_))) + // FIXME: special case type aliases, we can't filter between impl and trait defs here as we lack the substitutions + // so we always resolve all assoc type aliases to both their trait def and impl defs + && !(matches!(self.def, Definition::TypeAlias(_)) + && convert_to_def_in_trait(self.sema.db, def) + == convert_to_def_in_trait(self.sema.db, self.def)) + { return false; } let FileRange { file_id, range } = self.sema.original_range(name.syntax()); diff --git a/crates/ide/src/highlight_related.rs b/crates/ide/src/highlight_related.rs index 55f8779eed..c889eb930f 100644 --- a/crates/ide/src/highlight_related.rs +++ b/crates/ide/src/highlight_related.rs @@ -1356,7 +1356,6 @@ fn main() { r#" trait Trait { fn func(self) {} - //^^^^ } impl Trait for () { @@ -1376,7 +1375,6 @@ fn main() { r#" trait Trait { fn func(self) {} - //^^^^ } impl Trait for () { diff --git a/crates/ide/src/references.rs b/crates/ide/src/references.rs index 0f758cfa2d..e49f68c57b 100644 --- a/crates/ide/src/references.rs +++ b/crates/ide/src/references.rs @@ -1636,4 +1636,265 @@ pub fn deri$0ve(_stream: TokenStream) -> TokenStream {} "#]], ); } + + #[test] + fn assoc_items_trait_def() { + check( + r#" +trait Trait { + const CONST$0: usize; +} + +impl Trait for () { + const CONST: usize = 0; +} + +impl Trait for ((),) { + const CONST: usize = 0; +} + +fn f() { + let _ = <()>::CONST; + + let _ = T::CONST; +} +"#, + expect![[r#" + CONST Const FileId(0) 18..37 24..29 + + FileId(0) 71..76 + FileId(0) 125..130 + FileId(0) 183..188 + FileId(0) 206..211 + "#]], + ); + check( + r#" +trait Trait { + type TypeAlias$0; +} + +impl Trait for () { + type TypeAlias = (); +} + +impl Trait for ((),) { + type TypeAlias = (); +} + +fn f() { + let _: <() as Trait>::TypeAlias; + + let _: T::TypeAlias; +} +"#, + expect![[r#" + TypeAlias TypeAlias FileId(0) 18..33 23..32 + + FileId(0) 66..75 + FileId(0) 117..126 + FileId(0) 181..190 + FileId(0) 207..216 + "#]], + ); + check( + r#" +trait Trait { + fn function$0() {} +} + +impl Trait for () { + fn function() {} +} + +impl Trait for ((),) { + fn function() {} +} + +fn f() { + let _ = <()>::function; + + let _ = T::function; +} +"#, + expect![[r#" + function Function FileId(0) 18..34 21..29 + + FileId(0) 65..73 + FileId(0) 112..120 + FileId(0) 166..174 + FileId(0) 192..200 + "#]], + ); + } + + #[test] + fn assoc_items_trait_impl_def() { + check( + r#" +trait Trait { + const CONST: usize; +} + +impl Trait for () { + const CONST$0: usize = 0; +} + +impl Trait for ((),) { + const CONST: usize = 0; +} + +fn f() { + let _ = <()>::CONST; + + let _ = T::CONST; +} +"#, + expect![[r#" + CONST Const FileId(0) 65..88 71..76 + + FileId(0) 183..188 + "#]], + ); + check( + r#" +trait Trait { + type TypeAlias; +} + +impl Trait for () { + type TypeAlias$0 = (); +} + +impl Trait for ((),) { + type TypeAlias = (); +} + +fn f() { + let _: <() as Trait>::TypeAlias; + + let _: T::TypeAlias; +} +"#, + expect![[r#" + TypeAlias TypeAlias FileId(0) 61..81 66..75 + + FileId(0) 23..32 + FileId(0) 117..126 + FileId(0) 181..190 + FileId(0) 207..216 + "#]], + ); + check( + r#" +trait Trait { + fn function() {} +} + +impl Trait for () { + fn function$0() {} +} + +impl Trait for ((),) { + fn function() {} +} + +fn f() { + let _ = <()>::function; + + let _ = T::function; +} +"#, + expect![[r#" + function Function FileId(0) 62..78 65..73 + + FileId(0) 166..174 + "#]], + ); + } + + #[test] + fn assoc_items_ref() { + check( + r#" +trait Trait { + const CONST: usize; +} + +impl Trait for () { + const CONST: usize = 0; +} + +impl Trait for ((),) { + const CONST: usize = 0; +} + +fn f() { + let _ = <()>::CONST$0; + + let _ = T::CONST; +} +"#, + expect![[r#" + CONST Const FileId(0) 65..88 71..76 + + FileId(0) 183..188 + "#]], + ); + check( + r#" +trait Trait { + type TypeAlias; +} + +impl Trait for () { + type TypeAlias = (); +} + +impl Trait for ((),) { + type TypeAlias = (); +} + +fn f() { + let _: <() as Trait>::TypeAlias$0; + + let _: T::TypeAlias; +} +"#, + expect![[r#" + TypeAlias TypeAlias FileId(0) 18..33 23..32 + + FileId(0) 66..75 + FileId(0) 117..126 + FileId(0) 181..190 + FileId(0) 207..216 + "#]], + ); + check( + r#" +trait Trait { + fn function() {} +} + +impl Trait for () { + fn function() {} +} + +impl Trait for ((),) { + fn function() {} +} + +fn f() { + let _ = <()>::function$0; + + let _ = T::function; +} +"#, + expect![[r#" + function Function FileId(0) 62..78 65..73 + + FileId(0) 166..174 + "#]], + ); + } } diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index b0afbdc9a4..6d6e367e37 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs @@ -1044,7 +1044,7 @@ impl Config { &self.data.cargo_extraEnv } - pub fn check_on_save_extra_env(&self) -> FxHashMap { + pub fn check_extra_env(&self) -> FxHashMap { let mut extra_env = self.data.cargo_extraEnv.clone(); extra_env.extend(self.data.check_extraEnv.clone()); extra_env @@ -1165,7 +1165,7 @@ impl Config { FlycheckConfig::CustomCommand { command, args, - extra_env: self.check_on_save_extra_env(), + extra_env: self.check_extra_env(), invocation_strategy: match self.data.check_invocationStrategy { InvocationStrategy::Once => flycheck::InvocationStrategy::Once, InvocationStrategy::PerWorkspace => { @@ -1210,7 +1210,7 @@ impl Config { CargoFeaturesDef::Selected(it) => it, }, extra_args: self.data.check_extraArgs.clone(), - extra_env: self.check_on_save_extra_env(), + extra_env: self.check_extra_env(), ansi_color_output: self.color_diagnostic_output(), }, } From 1ce3e820dcb3ca90dec6d3b008e14531c54afc06 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Wed, 11 Jan 2023 22:39:05 +0100 Subject: [PATCH 0811/1945] feat: Make unlinked_file diagnostic quickfixes work for inline modules --- crates/hir-expand/src/lib.rs | 8 + .../src/handlers/unlinked_file.rs | 219 ++++++++++++++---- crates/vfs/src/vfs_path.rs | 17 +- 3 files changed, 202 insertions(+), 42 deletions(-) diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs index 5554c7517f..b879eec4cc 100644 --- a/crates/hir-expand/src/lib.rs +++ b/crates/hir-expand/src/lib.rs @@ -356,6 +356,14 @@ impl HirFileId { } } + #[inline] + pub fn file_id(self) -> Option { + match self.0 & Self::MACRO_FILE_TAG_MASK { + 0 => Some(FileId(self.0)), + _ => None, + } + } + fn repr(self) -> HirFileIdRepr { match self.0 & Self::MACRO_FILE_TAG_MASK { 0 => HirFileIdRepr::FileId(FileId(self.0)), diff --git a/crates/ide-diagnostics/src/handlers/unlinked_file.rs b/crates/ide-diagnostics/src/handlers/unlinked_file.rs index be70f0ac4f..4bb3789d69 100644 --- a/crates/ide-diagnostics/src/handlers/unlinked_file.rs +++ b/crates/ide-diagnostics/src/handlers/unlinked_file.rs @@ -1,6 +1,8 @@ //! Diagnostic emitted for files that aren't part of any crate. -use hir::db::DefDatabase; +use std::iter; + +use hir::{db::DefDatabase, InFile, ModuleSource}; use ide_db::{ base_db::{FileId, FileLoader, SourceDatabase, SourceDatabaseExt}, source_change::SourceChange, @@ -42,45 +44,106 @@ fn fixes(ctx: &DiagnosticsContext<'_>, file_id: FileId) -> Option> { let source_root = ctx.sema.db.source_root(ctx.sema.db.file_source_root(file_id)); let our_path = source_root.path_for_file(&file_id)?; - let (mut module_name, _) = our_path.name_and_extension()?; - - // Candidates to look for: - // - `mod.rs`, `main.rs` and `lib.rs` in the same folder - // - `$dir.rs` in the parent folder, where `$dir` is the directory containing `self.file_id` let parent = our_path.parent()?; - let paths = { - let parent = if module_name == "mod" { - // for mod.rs we need to actually look up one higher - // and take the parent as our to be module name - let (name, _) = parent.name_and_extension()?; - module_name = name; - parent.parent()? - } else { - parent - }; - let mut paths = - vec![parent.join("mod.rs")?, parent.join("lib.rs")?, parent.join("main.rs")?]; - - // `submod/bla.rs` -> `submod.rs` - let parent_mod = (|| { - let (name, _) = parent.name_and_extension()?; - parent.parent()?.join(&format!("{name}.rs")) - })(); - paths.extend(parent_mod); - paths + let (module_name, _) = our_path.name_and_extension()?; + let (parent, module_name) = if module_name == "mod" { + // for mod.rs we need to actually look up one higher + // and take the parent as our to be module name + let (name, _) = parent.name_and_extension()?; + (parent.parent()?, name.to_owned()) + } else { + (parent, module_name.to_owned()) }; - for &parent_id in paths.iter().filter_map(|path| source_root.file_for_path(path)) { + // check crate roots, i.e. main.rs, lib.rs, ... + 'outer: for &krate in &*ctx.sema.db.relevant_crates(file_id) { + let crate_def_map = ctx.sema.db.crate_def_map(krate); + if let Some(root_file_id) = crate_def_map[crate_def_map.root()].origin.file_id() { + if let Some(path) = source_root.path_for_file(&root_file_id) { + let parent2 = path.parent()?; + if let Some(rel) = parent.strip_prefix(&parent2) { + let mut current = &crate_def_map[crate_def_map.root()]; + for ele in rel.as_ref().components() { + let seg = match ele { + std::path::Component::Normal(seg) => seg.to_str()?, + std::path::Component::RootDir => continue, + // shouldn't occur + _ => continue 'outer, + }; + match current.children.iter().find(|(name, _)| name.to_smol_str() == seg) { + Some((_, child)) => { + current = &crate_def_map[*child]; + } + None => continue 'outer, + } + } + let InFile { file_id: parent_file_id, value: source } = + current.definition_source(ctx.sema.db); + if let Some(parent_file_id) = parent_file_id.file_id() { + return make_fixes( + ctx.sema.db, + parent_file_id, + source, + &module_name, + file_id, + ); + } + } + } + } + } + // build all parent paths of the form `../module_name/mod.rs` and `../module_name.rs` + let paths = iter::successors(Some(parent.clone()), |prev| prev.parent()).filter_map(|path| { + let parent = path.parent()?; + let (name, _) = path.name_and_extension()?; + Some(([parent.join(&format!("{name}.rs"))?, path.join("mod.rs")?], name.to_owned())) + }); + let mut stack = vec![]; + if let Some(&parent_id) = paths + .inspect(|(_, name)| stack.push(name.clone())) + .find_map(|(paths, _)| paths.into_iter().find_map(|path| source_root.file_for_path(&path))) + { + stack.pop(); for &krate in ctx.sema.db.relevant_crates(parent_id).iter() { let crate_def_map = ctx.sema.db.crate_def_map(krate); - for (_, module) in crate_def_map.modules() { - if module.origin.is_inline() { - // We don't handle inline `mod parent {}`s, they use different paths. - continue; - } - + 'outer: for (_, module) in crate_def_map.modules() { if module.origin.file_id() == Some(parent_id) { - return make_fixes(ctx.sema.db, parent_id, module_name, file_id); + if module.origin.is_inline() { + continue; + } + if stack.is_empty() { + return make_fixes( + ctx.sema.db, + parent_id, + module.definition_source(ctx.sema.db).value, + &module_name, + file_id, + ); + } else { + let mut current = module; + for s in stack.iter().rev() { + match module.children.iter().find(|(name, _)| name.to_smol_str() == s) { + Some((_, child)) => { + current = &crate_def_map[*child]; + } + None => break 'outer, + } + } + let InFile { file_id: parent_file_id, value: source } = + current.definition_source(ctx.sema.db); + if let Some(parent_file_id) = parent_file_id.file_id() { + if current.origin.is_inline() { + return make_fixes( + ctx.sema.db, + parent_file_id, + source, + &module_name, + file_id, + ); + } + } + break; + } } } } @@ -92,6 +155,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, file_id: FileId) -> Option> { fn make_fixes( db: &RootDatabase, parent_file_id: FileId, + source: ModuleSource, new_mod_name: &str, added_file_id: FileId, ) -> Option> { @@ -102,14 +166,18 @@ fn make_fixes( let mod_decl = format!("mod {new_mod_name};"); let pub_mod_decl = format!("pub mod {new_mod_name};"); - let ast: ast::SourceFile = db.parse(parent_file_id).tree(); - let mut mod_decl_builder = TextEdit::builder(); let mut pub_mod_decl_builder = TextEdit::builder(); + let mut items = match &source { + ModuleSource::SourceFile(it) => it.items(), + ModuleSource::Module(it) => it.item_list()?.items(), + ModuleSource::BlockExpr(_) => return None, + }; + // If there's an existing `mod m;` statement matching the new one, don't emit a fix (it's // probably `#[cfg]`d out). - for item in ast.items() { + for item in items.clone() { if let ast::Item::Module(m) = item { if let Some(name) = m.name() { if m.item_list().is_none() && name.to_string() == new_mod_name { @@ -121,7 +189,7 @@ fn make_fixes( } // If there are existing `mod m;` items, append after them (after the first group of them, rather). - match ast.items().skip_while(|item| !is_outline_mod(item)).take_while(is_outline_mod).last() { + match items.clone().skip_while(|item| !is_outline_mod(item)).take_while(is_outline_mod).last() { Some(last) => { cov_mark::hit!(unlinked_file_append_to_existing_mods); let offset = last.syntax().text_range().end(); @@ -130,7 +198,7 @@ fn make_fixes( } None => { // Prepend before the first item in the file. - match ast.items().next() { + match items.next() { Some(item) => { cov_mark::hit!(unlinked_file_prepend_before_first_item); let offset = item.syntax().text_range().start(); @@ -140,7 +208,13 @@ fn make_fixes( None => { // No items in the file, so just append at the end. cov_mark::hit!(unlinked_file_empty_file); - let offset = ast.syntax().text_range().end(); + let offset = match &source { + ModuleSource::SourceFile(it) => it.syntax().text_range().end(), + ModuleSource::Module(it) => { + it.item_list()?.r_curly_token()?.text_range().start() + } + ModuleSource::BlockExpr(_) => return None, + }; mod_decl_builder.insert(offset, format!("{mod_decl}\n")); pub_mod_decl_builder.insert(offset, format!("{pub_mod_decl}\n")); } @@ -167,7 +241,6 @@ fn make_fixes( #[cfg(test)] mod tests { - use crate::tests::{check_diagnostics, check_fix, check_fixes, check_no_fix}; #[test] @@ -330,6 +403,70 @@ $0 mod foo; //- /foo.rs +"#, + ); + } + + #[test] + fn unlinked_file_insert_into_inline_simple() { + check_fix( + r#" +//- /main.rs +mod bar; +//- /bar.rs +mod foo { + +} +//- /bar/foo/baz.rs +$0 +"#, + r#" +mod foo { + +mod baz; +} +"#, + ); + } + + #[test] + fn unlinked_file_insert_into_inline_simple_modrs() { + check_fix( + r#" +//- /main.rs +mod bar; +//- /bar.rs +mod baz { + +} +//- /bar/baz/foo/mod.rs +$0 +"#, + r#" +mod baz { + +mod foo; +} +"#, + ); + } + + #[test] + fn unlinked_file_insert_into_inline_simple_modrs_main() { + check_fix( + r#" +//- /main.rs +mod bar { + +} +//- /bar/foo/mod.rs +$0 +"#, + r#" +mod bar { + +mod foo; +} "#, ); } diff --git a/crates/vfs/src/vfs_path.rs b/crates/vfs/src/vfs_path.rs index b23c9f1966..38501a8ba5 100644 --- a/crates/vfs/src/vfs_path.rs +++ b/crates/vfs/src/vfs_path.rs @@ -1,7 +1,7 @@ //! Abstract-ish representation of paths for VFS. use std::fmt; -use paths::{AbsPath, AbsPathBuf}; +use paths::{AbsPath, AbsPathBuf, RelPath}; /// Path in [`Vfs`]. /// @@ -84,6 +84,14 @@ impl VfsPath { } } + pub fn strip_prefix(&self, other: &VfsPath) -> Option<&RelPath> { + match (&self.0, &other.0) { + (VfsPathRepr::PathBuf(lhs), VfsPathRepr::PathBuf(rhs)) => lhs.strip_prefix(rhs), + (VfsPathRepr::VirtualPath(lhs), VfsPathRepr::VirtualPath(rhs)) => lhs.strip_prefix(rhs), + (VfsPathRepr::PathBuf(_) | VfsPathRepr::VirtualPath(_), _) => None, + } + } + /// Returns the `VfsPath` without its final component, if there is one. /// /// Returns [`None`] if the path is a root or prefix. @@ -320,6 +328,13 @@ impl VirtualPath { self.0.starts_with(&other.0) } + fn strip_prefix(&self, base: &VirtualPath) -> Option<&RelPath> { + <_ as AsRef>::as_ref(&self.0) + .strip_prefix(&base.0) + .ok() + .map(RelPath::new_unchecked) + } + /// Remove the last component of `self`. /// /// This will find the last `'/'` in `self`, and remove everything after it, From 5214a98d9c8030998a233e2308ef371535f8ae86 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Wed, 11 Jan 2023 23:04:35 +0100 Subject: [PATCH 0812/1945] Simplify --- .../src/handlers/unlinked_file.rs | 153 +++++++++--------- 1 file changed, 72 insertions(+), 81 deletions(-) diff --git a/crates/ide-diagnostics/src/handlers/unlinked_file.rs b/crates/ide-diagnostics/src/handlers/unlinked_file.rs index 4bb3789d69..f527aa290c 100644 --- a/crates/ide-diagnostics/src/handlers/unlinked_file.rs +++ b/crates/ide-diagnostics/src/handlers/unlinked_file.rs @@ -46,52 +46,51 @@ fn fixes(ctx: &DiagnosticsContext<'_>, file_id: FileId) -> Option> { let our_path = source_root.path_for_file(&file_id)?; let parent = our_path.parent()?; let (module_name, _) = our_path.name_and_extension()?; - let (parent, module_name) = if module_name == "mod" { + let (parent, module_name) = match module_name { // for mod.rs we need to actually look up one higher // and take the parent as our to be module name - let (name, _) = parent.name_and_extension()?; - (parent.parent()?, name.to_owned()) - } else { - (parent, module_name.to_owned()) + "mod" => { + let (name, _) = parent.name_and_extension()?; + (parent.parent()?, name.to_owned()) + } + _ => (parent, module_name.to_owned()), }; // check crate roots, i.e. main.rs, lib.rs, ... - 'outer: for &krate in &*ctx.sema.db.relevant_crates(file_id) { + 'crates: for &krate in &*ctx.sema.db.relevant_crates(file_id) { let crate_def_map = ctx.sema.db.crate_def_map(krate); - if let Some(root_file_id) = crate_def_map[crate_def_map.root()].origin.file_id() { - if let Some(path) = source_root.path_for_file(&root_file_id) { - let parent2 = path.parent()?; - if let Some(rel) = parent.strip_prefix(&parent2) { - let mut current = &crate_def_map[crate_def_map.root()]; - for ele in rel.as_ref().components() { - let seg = match ele { - std::path::Component::Normal(seg) => seg.to_str()?, - std::path::Component::RootDir => continue, - // shouldn't occur - _ => continue 'outer, - }; - match current.children.iter().find(|(name, _)| name.to_smol_str() == seg) { - Some((_, child)) => { - current = &crate_def_map[*child]; - } - None => continue 'outer, - } - } - let InFile { file_id: parent_file_id, value: source } = - current.definition_source(ctx.sema.db); - if let Some(parent_file_id) = parent_file_id.file_id() { - return make_fixes( - ctx.sema.db, - parent_file_id, - source, - &module_name, - file_id, - ); - } - } + + let root_module = &crate_def_map[crate_def_map.root()]; + let Some(root_file_id) = root_module.origin.file_id() else { continue }; + let Some(crate_root_path) = source_root.path_for_file(&root_file_id) else { continue }; + let Some(rel) = parent.strip_prefix(&crate_root_path.parent()?) else { continue }; + + // try resolving the relative difference of the paths as inline modules + let mut current = root_module; + for ele in rel.as_ref().components() { + let seg = match ele { + std::path::Component::Normal(seg) => seg.to_str()?, + std::path::Component::RootDir => continue, + // shouldn't occur + _ => continue 'crates, + }; + match current.children.iter().find(|(name, _)| name.to_smol_str() == seg) { + Some((_, &child)) => current = &crate_def_map[child], + None => continue 'crates, + } + if !current.origin.is_inline() { + continue 'crates; } } + + let InFile { file_id: parent_file_id, value: source } = + current.definition_source(ctx.sema.db); + let parent_file_id = parent_file_id.file_id()?; + return make_fixes(ctx.sema.db, parent_file_id, source, &module_name, file_id); } + + // if we aren't adding to a crate root, walk backwards such that we support `#[path = ...]` overrides if possible + // build all parent paths of the form `../module_name/mod.rs` and `../module_name.rs` let paths = iter::successors(Some(parent.clone()), |prev| prev.parent()).filter_map(|path| { let parent = path.parent()?; @@ -99,53 +98,45 @@ fn fixes(ctx: &DiagnosticsContext<'_>, file_id: FileId) -> Option> { Some(([parent.join(&format!("{name}.rs"))?, path.join("mod.rs")?], name.to_owned())) }); let mut stack = vec![]; - if let Some(&parent_id) = paths - .inspect(|(_, name)| stack.push(name.clone())) - .find_map(|(paths, _)| paths.into_iter().find_map(|path| source_root.file_for_path(&path))) - { - stack.pop(); - for &krate in ctx.sema.db.relevant_crates(parent_id).iter() { - let crate_def_map = ctx.sema.db.crate_def_map(krate); - 'outer: for (_, module) in crate_def_map.modules() { - if module.origin.file_id() == Some(parent_id) { - if module.origin.is_inline() { - continue; - } - if stack.is_empty() { - return make_fixes( - ctx.sema.db, - parent_id, - module.definition_source(ctx.sema.db).value, - &module_name, - file_id, - ); - } else { - let mut current = module; - for s in stack.iter().rev() { - match module.children.iter().find(|(name, _)| name.to_smol_str() == s) { - Some((_, child)) => { - current = &crate_def_map[*child]; - } - None => break 'outer, - } - } - let InFile { file_id: parent_file_id, value: source } = - current.definition_source(ctx.sema.db); - if let Some(parent_file_id) = parent_file_id.file_id() { - if current.origin.is_inline() { - return make_fixes( - ctx.sema.db, - parent_file_id, - source, - &module_name, - file_id, - ); - } - } - break; + let &parent_id = + paths.inspect(|(_, name)| stack.push(name.clone())).find_map(|(paths, _)| { + paths.into_iter().find_map(|path| source_root.file_for_path(&path)) + })?; + stack.pop(); + 'crates: for &krate in ctx.sema.db.relevant_crates(parent_id).iter() { + let crate_def_map = ctx.sema.db.crate_def_map(krate); + let Some((_, module)) = + crate_def_map.modules() + .find(|(_, module)| module.origin.file_id() == Some(parent_id) && !module.origin.is_inline()) + else { continue }; + + if stack.is_empty() { + return make_fixes( + ctx.sema.db, + parent_id, + module.definition_source(ctx.sema.db).value, + &module_name, + file_id, + ); + } else { + // direct parent file is missing, + // try finding a parent that has an inline tree from here on + let mut current = module; + for s in stack.iter().rev() { + match module.children.iter().find(|(name, _)| name.to_smol_str() == s) { + Some((_, child)) => { + current = &crate_def_map[*child]; } + None => continue 'crates, + } + if !current.origin.is_inline() { + continue 'crates; } } + let InFile { file_id: parent_file_id, value: source } = + current.definition_source(ctx.sema.db); + let parent_file_id = parent_file_id.file_id()?; + return make_fixes(ctx.sema.db, parent_file_id, source, &module_name, file_id); } } From 6f201cfc56783beacb98c9e9033ddd58567c4517 Mon Sep 17 00:00:00 2001 From: Roland Fredenhagen Date: Thu, 12 Jan 2023 02:28:13 +0100 Subject: [PATCH 0813/1945] Assist: desugar doc-comment --- .../src/handlers/convert_comment_block.rs | 4 +- .../src/handlers/desugar_doc_comment.rs | 313 ++++++++++++++++++ crates/ide-assists/src/handlers/raw_string.rs | 2 +- crates/ide-assists/src/lib.rs | 2 + crates/ide-assists/src/tests/generated.rs | 15 + 5 files changed, 333 insertions(+), 3 deletions(-) create mode 100644 crates/ide-assists/src/handlers/desugar_doc_comment.rs diff --git a/crates/ide-assists/src/handlers/convert_comment_block.rs b/crates/ide-assists/src/handlers/convert_comment_block.rs index 312cb65abd..282820b167 100644 --- a/crates/ide-assists/src/handlers/convert_comment_block.rs +++ b/crates/ide-assists/src/handlers/convert_comment_block.rs @@ -107,7 +107,7 @@ fn line_to_block(acc: &mut Assists, comment: ast::Comment) -> Option<()> { /// The line -> block assist can be invoked from anywhere within a sequence of line comments. /// relevant_line_comments crawls backwards and forwards finding the complete sequence of comments that will /// be joined. -fn relevant_line_comments(comment: &ast::Comment) -> Vec { +pub fn relevant_line_comments(comment: &ast::Comment) -> Vec { // The prefix identifies the kind of comment we're dealing with let prefix = comment.prefix(); let same_prefix = |c: &ast::Comment| c.prefix() == prefix; @@ -159,7 +159,7 @@ fn relevant_line_comments(comment: &ast::Comment) -> Vec { // */ // // But since such comments aren't idiomatic we're okay with this. -fn line_comment_text(indentation: IndentLevel, comm: ast::Comment) -> String { +pub fn line_comment_text(indentation: IndentLevel, comm: ast::Comment) -> String { let contents_without_prefix = comm.text().strip_prefix(comm.prefix()).unwrap(); let contents = contents_without_prefix.strip_prefix(' ').unwrap_or(contents_without_prefix); diff --git a/crates/ide-assists/src/handlers/desugar_doc_comment.rs b/crates/ide-assists/src/handlers/desugar_doc_comment.rs new file mode 100644 index 0000000000..ea7044a544 --- /dev/null +++ b/crates/ide-assists/src/handlers/desugar_doc_comment.rs @@ -0,0 +1,313 @@ +use either::Either; +use itertools::Itertools; +use syntax::{ + ast::{self, edit::IndentLevel, CommentPlacement, Whitespace}, + AstToken, TextRange, +}; + +use crate::{AssistContext, AssistId, AssistKind, Assists}; + +use super::{ + convert_comment_block::{line_comment_text, relevant_line_comments}, + raw_string::required_hashes, +}; + +// Assist: desugar_doc_comment +// +// Desugars doc-comments to the attribute form. +// +// ``` +// /// Multi-line$0 +// /// comment +// ``` +// -> +// ``` +// #[doc = r"Multi-line +// comment"] +// ``` +pub(crate) fn desugar_doc_comment(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { + let comment = ctx.find_token_at_offset::()?; + // Only allow doc comments + let Some(placement) = comment.kind().doc else { return None; }; + + // Only allow comments which are alone on their line + if let Some(prev) = comment.syntax().prev_token() { + if Whitespace::cast(prev).filter(|w| w.text().contains('\n')).is_none() { + return None; + } + } + + let indentation = IndentLevel::from_token(comment.syntax()).to_string(); + + let (target, comments) = match comment.kind().shape { + ast::CommentShape::Block => (comment.syntax().text_range(), Either::Left(comment)), + ast::CommentShape::Line => { + // Find all the comments we'll be desugaring + let comments = relevant_line_comments(&comment); + + // Establish the target of our edit based on the comments we found + ( + TextRange::new( + comments[0].syntax().text_range().start(), + comments.last().unwrap().syntax().text_range().end(), + ), + Either::Right(comments), + ) + } + }; + + acc.add( + AssistId("desugar_doc_comment", AssistKind::RefactorRewrite), + "Desugar doc-comment to attribute macro", + target, + |edit| { + let text = match comments { + Either::Left(comment) => { + let text = comment.text(); + text[comment.prefix().len()..(text.len() - "*/".len())] + .trim() + .lines() + .map(|l| l.strip_prefix(&indentation).unwrap_or(l)) + .join("\n") + } + Either::Right(comments) => { + comments.into_iter().map(|c| line_comment_text(IndentLevel(0), c)).join("\n") + } + }; + + let hashes = "#".repeat(required_hashes(&text)); + + let prefix = match placement { + CommentPlacement::Inner => "#!", + CommentPlacement::Outer => "#", + }; + + let output = format!(r#"{prefix}[doc = r{hashes}"{text}"{hashes}]"#); + + edit.replace(target, output) + }, + ) +} + +#[cfg(test)] +mod tests { + use crate::tests::{check_assist, check_assist_not_applicable}; + + use super::*; + + #[test] + fn single_line() { + check_assist( + desugar_doc_comment, + r#" +/// line$0 comment +fn main() { + foo(); +} +"#, + r#" +#[doc = r"line comment"] +fn main() { + foo(); +} +"#, + ); + check_assist( + desugar_doc_comment, + r#" +//! line$0 comment +fn main() { + foo(); +} +"#, + r#" +#![doc = r"line comment"] +fn main() { + foo(); +} +"#, + ); + } + + #[test] + fn single_line_indented() { + check_assist( + desugar_doc_comment, + r#" +fn main() { + /// line$0 comment + struct Foo; +} +"#, + r#" +fn main() { + #[doc = r"line comment"] + struct Foo; +} +"#, + ); + } + + #[test] + fn multiline() { + check_assist( + desugar_doc_comment, + r#" +fn main() { + /// above + /// line$0 comment + /// + /// below + struct Foo; +} +"#, + r#" +fn main() { + #[doc = r"above +line comment + +below"] + struct Foo; +} +"#, + ); + } + + #[test] + fn end_of_line() { + check_assist_not_applicable( + desugar_doc_comment, + r#" +fn main() { /// end-of-line$0 comment + struct Foo; +} +"#, + ); + } + + #[test] + fn single_line_different_kinds() { + check_assist( + desugar_doc_comment, + r#" +fn main() { + //! different prefix + /// line$0 comment + /// below + struct Foo; +} +"#, + r#" +fn main() { + //! different prefix + #[doc = r"line comment +below"] + struct Foo; +} +"#, + ); + } + + #[test] + fn single_line_separate_chunks() { + check_assist( + desugar_doc_comment, + r#" +/// different chunk + +/// line$0 comment +/// below +"#, + r#" +/// different chunk + +#[doc = r"line comment +below"] +"#, + ); + } + + #[test] + fn block_comment() { + check_assist( + desugar_doc_comment, + r#" +/** + hi$0 there +*/ +"#, + r#" +#[doc = r"hi there"] +"#, + ); + } + + #[test] + fn inner_doc_block() { + check_assist( + desugar_doc_comment, + r#" +/*! + hi$0 there +*/ +"#, + r#" +#![doc = r"hi there"] +"#, + ); + } + + #[test] + fn block_indent() { + check_assist( + desugar_doc_comment, + r#" +fn main() { + /*! + hi$0 there + + ``` + code_sample + ``` + */ +} +"#, + r#" +fn main() { + #![doc = r"hi there + +``` + code_sample +```"] +} +"#, + ); + } + + #[test] + fn end_of_line_block() { + check_assist_not_applicable( + desugar_doc_comment, + r#" +fn main() { + foo(); /** end-of-line$0 comment */ +} +"#, + ); + } + + #[test] + fn regular_comment() { + check_assist_not_applicable(desugar_doc_comment, r#"// some$0 comment"#); + check_assist_not_applicable(desugar_doc_comment, r#"/* some$0 comment*/"#); + } + + #[test] + fn quotes_and_escapes() { + check_assist( + desugar_doc_comment, + r###"/// some$0 "\ "## comment"###, + r####"#[doc = r###"some "\ "## comment"###]"####, + ); + } +} diff --git a/crates/ide-assists/src/handlers/raw_string.rs b/crates/ide-assists/src/handlers/raw_string.rs index c9bc25b27a..eff5a1f21f 100644 --- a/crates/ide-assists/src/handlers/raw_string.rs +++ b/crates/ide-assists/src/handlers/raw_string.rs @@ -155,7 +155,7 @@ pub(crate) fn remove_hash(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option< }) } -fn required_hashes(s: &str) -> usize { +pub fn required_hashes(s: &str) -> usize { let mut res = 0usize; for idx in s.match_indices('"').map(|(i, _)| i) { let (_, sub) = s.split_at(idx + 1); diff --git a/crates/ide-assists/src/lib.rs b/crates/ide-assists/src/lib.rs index 7813c9f9cb..546ef96260 100644 --- a/crates/ide-assists/src/lib.rs +++ b/crates/ide-assists/src/lib.rs @@ -126,6 +126,7 @@ mod handlers { mod convert_to_guarded_return; mod convert_two_arm_bool_match_to_matches_macro; mod convert_while_to_loop; + mod desugar_doc_comment; mod destructure_tuple_binding; mod expand_glob_import; mod extract_expressions_from_format_string; @@ -231,6 +232,7 @@ mod handlers { convert_tuple_struct_to_named_struct::convert_tuple_struct_to_named_struct, convert_two_arm_bool_match_to_matches_macro::convert_two_arm_bool_match_to_matches_macro, convert_while_to_loop::convert_while_to_loop, + desugar_doc_comment::desugar_doc_comment, destructure_tuple_binding::destructure_tuple_binding, expand_glob_import::expand_glob_import, extract_expressions_from_format_string::extract_expressions_from_format_string, diff --git a/crates/ide-assists/src/tests/generated.rs b/crates/ide-assists/src/tests/generated.rs index 006ae4b303..16a06b60de 100644 --- a/crates/ide-assists/src/tests/generated.rs +++ b/crates/ide-assists/src/tests/generated.rs @@ -597,6 +597,21 @@ fn main() { ) } +#[test] +fn doctest_desugar_doc_comment() { + check_doc_test( + "desugar_doc_comment", + r#####" +/// Multi-line$0 +/// comment +"#####, + r#####" +#[doc = r"Multi-line +comment"] +"#####, + ) +} + #[test] fn doctest_expand_glob_import() { check_doc_test( From 03bc46f96ba5c0bdf352cf32660d8760172dd010 Mon Sep 17 00:00:00 2001 From: Roland Fredenhagen Date: Thu, 12 Jan 2023 09:54:48 +0100 Subject: [PATCH 0814/1945] Convert pub to pub(crate) --- crates/ide-assists/src/handlers/convert_comment_block.rs | 4 ++-- crates/ide-assists/src/handlers/raw_string.rs | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/ide-assists/src/handlers/convert_comment_block.rs b/crates/ide-assists/src/handlers/convert_comment_block.rs index 282820b167..1acd5ee972 100644 --- a/crates/ide-assists/src/handlers/convert_comment_block.rs +++ b/crates/ide-assists/src/handlers/convert_comment_block.rs @@ -107,7 +107,7 @@ fn line_to_block(acc: &mut Assists, comment: ast::Comment) -> Option<()> { /// The line -> block assist can be invoked from anywhere within a sequence of line comments. /// relevant_line_comments crawls backwards and forwards finding the complete sequence of comments that will /// be joined. -pub fn relevant_line_comments(comment: &ast::Comment) -> Vec { +pub(crate) fn relevant_line_comments(comment: &ast::Comment) -> Vec { // The prefix identifies the kind of comment we're dealing with let prefix = comment.prefix(); let same_prefix = |c: &ast::Comment| c.prefix() == prefix; @@ -159,7 +159,7 @@ pub fn relevant_line_comments(comment: &ast::Comment) -> Vec { // */ // // But since such comments aren't idiomatic we're okay with this. -pub fn line_comment_text(indentation: IndentLevel, comm: ast::Comment) -> String { +pub(crate) fn line_comment_text(indentation: IndentLevel, comm: ast::Comment) -> String { let contents_without_prefix = comm.text().strip_prefix(comm.prefix()).unwrap(); let contents = contents_without_prefix.strip_prefix(' ').unwrap_or(contents_without_prefix); diff --git a/crates/ide-assists/src/handlers/raw_string.rs b/crates/ide-assists/src/handlers/raw_string.rs index eff5a1f21f..b1b3bab7e5 100644 --- a/crates/ide-assists/src/handlers/raw_string.rs +++ b/crates/ide-assists/src/handlers/raw_string.rs @@ -155,7 +155,7 @@ pub(crate) fn remove_hash(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option< }) } -pub fn required_hashes(s: &str) -> usize { +pub(crate) fn required_hashes(s: &str) -> usize { let mut res = 0usize; for idx in s.match_indices('"').map(|(i, _)| i) { let (_, sub) = s.split_at(idx + 1); From 14777ce75190d21f52ae068e3d43a7d16de84ca7 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Thu, 12 Jan 2023 11:24:44 +0100 Subject: [PATCH 0815/1945] fix indentation of unlinked_file quickfix --- .../src/handlers/unlinked_file.rs | 38 +++++++++---------- crates/ide/src/goto_declaration.rs | 1 + 2 files changed, 20 insertions(+), 19 deletions(-) diff --git a/crates/ide-diagnostics/src/handlers/unlinked_file.rs b/crates/ide-diagnostics/src/handlers/unlinked_file.rs index f527aa290c..3d45a75913 100644 --- a/crates/ide-diagnostics/src/handlers/unlinked_file.rs +++ b/crates/ide-diagnostics/src/handlers/unlinked_file.rs @@ -9,7 +9,7 @@ use ide_db::{ RootDatabase, }; use syntax::{ - ast::{self, HasModuleItem, HasName}, + ast::{self, edit::IndentLevel, HasModuleItem, HasName}, AstNode, TextRange, TextSize, }; use text_edit::TextEdit; @@ -184,30 +184,36 @@ fn make_fixes( Some(last) => { cov_mark::hit!(unlinked_file_append_to_existing_mods); let offset = last.syntax().text_range().end(); - mod_decl_builder.insert(offset, format!("\n{mod_decl}")); - pub_mod_decl_builder.insert(offset, format!("\n{pub_mod_decl}")); + let indent = IndentLevel::from_node(last.syntax()); + mod_decl_builder.insert(offset, format!("\n{indent}{mod_decl}")); + pub_mod_decl_builder.insert(offset, format!("\n{indent}{pub_mod_decl}")); } None => { // Prepend before the first item in the file. match items.next() { - Some(item) => { + Some(first) => { cov_mark::hit!(unlinked_file_prepend_before_first_item); - let offset = item.syntax().text_range().start(); - mod_decl_builder.insert(offset, format!("{mod_decl}\n\n")); - pub_mod_decl_builder.insert(offset, format!("{pub_mod_decl}\n\n")); + let offset = first.syntax().text_range().start(); + let indent = IndentLevel::from_node(first.syntax()); + mod_decl_builder.insert(offset, format!("{mod_decl}\n\n{indent}")); + pub_mod_decl_builder.insert(offset, format!("{pub_mod_decl}\n\n{indent}")); } None => { // No items in the file, so just append at the end. cov_mark::hit!(unlinked_file_empty_file); + let mut indent = IndentLevel::from(0); let offset = match &source { ModuleSource::SourceFile(it) => it.syntax().text_range().end(), ModuleSource::Module(it) => { + indent = IndentLevel::from_node(it.syntax()) + 1; it.item_list()?.r_curly_token()?.text_range().start() } - ModuleSource::BlockExpr(_) => return None, + ModuleSource::BlockExpr(it) => { + it.stmt_list()?.r_curly_token()?.text_range().start() + } }; - mod_decl_builder.insert(offset, format!("{mod_decl}\n")); - pub_mod_decl_builder.insert(offset, format!("{pub_mod_decl}\n")); + mod_decl_builder.insert(offset, format!("{indent}{mod_decl}\n")); + pub_mod_decl_builder.insert(offset, format!("{indent}{pub_mod_decl}\n")); } } } @@ -406,15 +412,13 @@ mod foo; mod bar; //- /bar.rs mod foo { - } //- /bar/foo/baz.rs $0 "#, r#" mod foo { - -mod baz; + mod baz; } "#, ); @@ -428,15 +432,13 @@ mod baz; mod bar; //- /bar.rs mod baz { - } //- /bar/baz/foo/mod.rs $0 "#, r#" mod baz { - -mod foo; + mod foo; } "#, ); @@ -448,15 +450,13 @@ mod foo; r#" //- /main.rs mod bar { - } //- /bar/foo/mod.rs $0 "#, r#" mod bar { - -mod foo; + mod foo; } "#, ); diff --git a/crates/ide/src/goto_declaration.rs b/crates/ide/src/goto_declaration.rs index c7130a2a4b..e70bc2ec54 100644 --- a/crates/ide/src/goto_declaration.rs +++ b/crates/ide/src/goto_declaration.rs @@ -17,6 +17,7 @@ use crate::{ // This is the same as `Go to Definition` with the following exceptions: // - outline modules will navigate to the `mod name;` item declaration // - trait assoc items will navigate to the assoc item of the trait declaration opposed to the trait impl +// - fields in patterns will navigate to the field declaration of the struct, union or variant pub(crate) fn goto_declaration( db: &RootDatabase, position: FilePosition, From 7413a9954f2d2967f790fc634509a0850ff81769 Mon Sep 17 00:00:00 2001 From: Daniel Eades Date: Thu, 12 Jan 2023 10:48:10 +0000 Subject: [PATCH 0816/1945] interior-mutable types should be 'static' rather than 'const --- crates/hir-def/src/nameres/mod_resolution.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/hir-def/src/nameres/mod_resolution.rs b/crates/hir-def/src/nameres/mod_resolution.rs index 4c263846d2..51c565fe12 100644 --- a/crates/hir-def/src/nameres/mod_resolution.rs +++ b/crates/hir-def/src/nameres/mod_resolution.rs @@ -7,7 +7,7 @@ use syntax::SmolStr; use crate::{db::DefDatabase, HirFileId}; -const MOD_DEPTH_LIMIT: Limit = Limit::new(32); +static MOD_DEPTH_LIMIT: Limit = Limit::new(32); #[derive(Clone, Debug)] pub(super) struct ModDir { From c4b36b64aae0d5bace5d19e0fbd64d1db9114bf9 Mon Sep 17 00:00:00 2001 From: Daniel Eades Date: Thu, 12 Jan 2023 11:53:45 +0000 Subject: [PATCH 0817/1945] remove recursive 'Display' implementations --- .../src/abis/abi_1_58/proc_macro/mod.rs | 24 +++++++++---------- .../src/abis/abi_1_63/proc_macro/mod.rs | 24 +++++++++---------- 2 files changed, 24 insertions(+), 24 deletions(-) diff --git a/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/mod.rs b/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/mod.rs index c5145d00e3..a94b909941 100644 --- a/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/mod.rs +++ b/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/mod.rs @@ -123,8 +123,8 @@ impl FromStr for TokenStream { /// into the same token stream (modulo spans), except for possibly `TokenTree::Group`s /// with `Delimiter::None` delimiters and negative numeric literals. impl fmt::Display for TokenStream { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str(&self.to_string()) + fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result { + unimplemented!() } } @@ -522,8 +522,8 @@ impl From for TokenTree { /// into the same token tree (modulo spans), except for possibly `TokenTree::Group`s /// with `Delimiter::None` delimiters and negative numeric literals. impl fmt::Display for TokenTree { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str(&self.to_string()) + fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result { + unimplemented!() } } @@ -619,8 +619,8 @@ impl Group { /// into the same group (modulo spans), except for possibly `TokenTree::Group`s /// with `Delimiter::None` delimiters. impl fmt::Display for Group { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str(&self.to_string()) + fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result { + unimplemented!() } } @@ -693,8 +693,8 @@ impl Punct { /// Prints the punctuation character as a string that should be losslessly convertible /// back into the same character. impl fmt::Display for Punct { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str(&self.to_string()) + fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result { + unimplemented!() } } @@ -771,8 +771,8 @@ impl Ident { /// Prints the identifier as a string that should be losslessly convertible /// back into the same identifier. impl fmt::Display for Ident { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str(&self.to_string()) + fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result { + unimplemented!() } } @@ -1014,8 +1014,8 @@ impl FromStr for Literal { /// Prints the literal as a string that should be losslessly convertible /// back into the same literal (except for possible rounding for floating point literals). impl fmt::Display for Literal { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str(&self.to_string()) + fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result { + unimplemented!() } } diff --git a/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/mod.rs b/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/mod.rs index 7ab1f421da..89bd10da5e 100644 --- a/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/mod.rs +++ b/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/mod.rs @@ -124,8 +124,8 @@ impl FromStr for TokenStream { /// into the same token stream (modulo spans), except for possibly `TokenTree::Group`s /// with `Delimiter::None` delimiters and negative numeric literals. impl fmt::Display for TokenStream { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str(&self.to_string()) + fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result { + unimplemented!() } } @@ -572,8 +572,8 @@ impl From for TokenTree { /// into the same token tree (modulo spans), except for possibly `TokenTree::Group`s /// with `Delimiter::None` delimiters and negative numeric literals. impl fmt::Display for TokenTree { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str(&self.to_string()) + fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result { + unimplemented!() } } @@ -669,8 +669,8 @@ impl Group { /// into the same group (modulo spans), except for possibly `TokenTree::Group`s /// with `Delimiter::None` delimiters. impl fmt::Display for Group { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str(&self.to_string()) + fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result { + unimplemented!() } } @@ -743,8 +743,8 @@ impl Punct { /// Prints the punctuation character as a string that should be losslessly convertible /// back into the same character. impl fmt::Display for Punct { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str(&self.to_string()) + fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result { + unimplemented!() } } @@ -821,8 +821,8 @@ impl Ident { /// Prints the identifier as a string that should be losslessly convertible /// back into the same identifier. impl fmt::Display for Ident { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str(&self.to_string()) + fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result { + unimplemented!() } } @@ -1064,8 +1064,8 @@ impl FromStr for Literal { /// Prints the literal as a string that should be losslessly convertible /// back into the same literal (except for possible rounding for floating point literals). impl fmt::Display for Literal { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str(&self.to_string()) + fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result { + unimplemented!() } } From 9721505bf1cc441334c9973525a5c9de826e51bb Mon Sep 17 00:00:00 2001 From: Neel Yadav Date: Wed, 30 Nov 2022 20:14:06 -0600 Subject: [PATCH 0818/1945] Fix panicking Option unwraping in match arm analysis --- crates/ide-assists/src/handlers/convert_match_to_let_else.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/ide-assists/src/handlers/convert_match_to_let_else.rs b/crates/ide-assists/src/handlers/convert_match_to_let_else.rs index 5bf04a3ad3..fbd81c8015 100644 --- a/crates/ide-assists/src/handlers/convert_match_to_let_else.rs +++ b/crates/ide-assists/src/handlers/convert_match_to_let_else.rs @@ -87,7 +87,7 @@ fn find_arms( let mut extracting = None; let mut diverging = None; for arm in arms { - if ctx.sema.type_of_expr(&arm.expr().unwrap()).unwrap().original().is_never() { + if ctx.sema.type_of_expr(&arm.expr()?)?.original().is_never() { diverging = Some(arm); } else { extracting = Some(arm); From aa73366b0b91e1b99a0e6059e7f8dfbcb41cf929 Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Fri, 13 Jan 2023 11:44:02 +0000 Subject: [PATCH 0819/1945] internal: explain the idea behind rust-project.json --- crates/project-model/src/project_json.rs | 44 ++++++++++++++++++++++++ 1 file changed, 44 insertions(+) diff --git a/crates/project-model/src/project_json.rs b/crates/project-model/src/project_json.rs index 9af0eafe9f..a990fb267a 100644 --- a/crates/project-model/src/project_json.rs +++ b/crates/project-model/src/project_json.rs @@ -4,6 +4,50 @@ //! idea here is that people who do not use Cargo, can instead teach their build //! system to generate `rust-project.json` which can be ingested by //! rust-analyzer. +//! +//! This short file is a somewhat big conceptual piece of the architecture of +//! rust-analyzer, so it's worth elaborating on the underlying ideas and +//! motivation. +//! +//! For rust-analyzer to function, it needs some information about the project. +//! Specifically, it maintains an in-memory data structure which lists all the +//! crates (compilation units) and dependencies between them. This is necessary +//! a global singleton, as we do want, eg, find usages to always search across +//! the whole project, rather than just in the "current" crate. +//! +//! Normally, we get this "crate graph" by calling `cargo metadata +//! --message-format=json` for each cargo workspace and merging results. This +//! works for your typical cargo project, but breaks down for large folks who +//! have a monorepo with an infitine amount of Rust which is build with bazel or +//! some such. +//! +//! To support this use-case, we need to make _something_ configurable. To avoid +//! [midlayer mistake](https://lwn.net/Articles/336262/), we allow configuring +//! the lowest possible layer. `ProjectJson` is essentially a hook to just set +//! that global singleton in-memory data structure. It is optimized for power, +//! not for convenience (you'd be using cargo anyway if you wanted nice things, +//! right? :) +//! +//! `rust-project.json` also isn't necessary a file. Architecturally, we support +//! any convenient way to specify this data, which today is: +//! +//! * file on disk +//! * a field in the config (ie, you can send a JSON request with the contents +//! of rust-project.json to rust-analyzer, no need to write anything to disk) +//! +//! Another possible thing we don't do today, but which would be totally valid, +//! is to add an extension point to VS Code extension to register custom +//! project. +//! +//! In general, it is assumed that if you are going to use `rust-project.json`, +//! you'd write a fair bit of custom code gluing your build system to ra through +//! this JSON format. This logic can take form of a VS Code extension, or a +//! proxy process which injects data into "configure" LSP request, or maybe just +//! a simple build system rule to generate the file. +//! +//! In particular, the logic for lazily loading parts of the monorepo as the +//! user explores them belongs to that extension (it's totally valid to change +//! rust-project.json over time via configuration request!) use std::path::PathBuf; From 87315ef5c39647d1ddab66d58153840562dab353 Mon Sep 17 00:00:00 2001 From: Alex Kladov Date: Fri, 13 Jan 2023 11:57:02 +0000 Subject: [PATCH 0820/1945] Apply suggestions from code review MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Laurențiu Nicola --- crates/project-model/src/project_json.rs | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/crates/project-model/src/project_json.rs b/crates/project-model/src/project_json.rs index a990fb267a..319897890f 100644 --- a/crates/project-model/src/project_json.rs +++ b/crates/project-model/src/project_json.rs @@ -18,10 +18,11 @@ //! Normally, we get this "crate graph" by calling `cargo metadata //! --message-format=json` for each cargo workspace and merging results. This //! works for your typical cargo project, but breaks down for large folks who -//! have a monorepo with an infitine amount of Rust which is build with bazel or +//! have a monorepo with an infinite amount of Rust code which is built with bazel or //! some such. //! -//! To support this use-case, we need to make _something_ configurable. To avoid +//! To support this use case, we need to make _something_ configurable. To avoid +//! a [midlayer mistake](https://lwn.net/Articles/336262/), we allow configuring //! [midlayer mistake](https://lwn.net/Articles/336262/), we allow configuring //! the lowest possible layer. `ProjectJson` is essentially a hook to just set //! that global singleton in-memory data structure. It is optimized for power, From bd350085f66c5e392a14b11d40ac24b4fd139aca Mon Sep 17 00:00:00 2001 From: Alex Kladov Date: Fri, 13 Jan 2023 12:01:04 +0000 Subject: [PATCH 0821/1945] Update crates/project-model/src/project_json.rs MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Laurențiu Nicola --- crates/project-model/src/project_json.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/crates/project-model/src/project_json.rs b/crates/project-model/src/project_json.rs index 319897890f..4b2448e47f 100644 --- a/crates/project-model/src/project_json.rs +++ b/crates/project-model/src/project_json.rs @@ -23,7 +23,6 @@ //! //! To support this use case, we need to make _something_ configurable. To avoid //! a [midlayer mistake](https://lwn.net/Articles/336262/), we allow configuring -//! [midlayer mistake](https://lwn.net/Articles/336262/), we allow configuring //! the lowest possible layer. `ProjectJson` is essentially a hook to just set //! that global singleton in-memory data structure. It is optimized for power, //! not for convenience (you'd be using cargo anyway if you wanted nice things, From aafb0f1f8dd4c57d258374ac554b3162c497847d Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 13 Jan 2023 15:03:37 +0100 Subject: [PATCH 0822/1945] Use smallvec for inlay-hint parts --- Cargo.lock | 1 + crates/ide/Cargo.toml | 1 + crates/ide/src/inlay_hints.rs | 15 +++++++++++---- crates/ide/src/inlay_hints/closing_brace.rs | 5 ++++- 4 files changed, 17 insertions(+), 5 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 5f426d8856..13d8d40ddd 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -645,6 +645,7 @@ dependencies = [ "profile", "pulldown-cmark", "pulldown-cmark-to-cmark", + "smallvec", "stdx", "syntax", "test-utils", diff --git a/crates/ide/Cargo.toml b/crates/ide/Cargo.toml index 73f202630f..397383bc3a 100644 --- a/crates/ide/Cargo.toml +++ b/crates/ide/Cargo.toml @@ -20,6 +20,7 @@ pulldown-cmark-to-cmark = "10.0.4" pulldown-cmark = { version = "0.9.1", default-features = false } url = "2.3.1" dot = "0.1.4" +smallvec = "1.10.0" stdx = { path = "../stdx", version = "0.0.0" } syntax = { path = "../syntax", version = "0.0.0" } diff --git a/crates/ide/src/inlay_hints.rs b/crates/ide/src/inlay_hints.rs index 48a7bbfecf..f4a9d7b653 100644 --- a/crates/ide/src/inlay_hints.rs +++ b/crates/ide/src/inlay_hints.rs @@ -7,6 +7,7 @@ use either::Either; use hir::{known, HasVisibility, HirDisplay, HirWrite, ModuleDef, ModuleDefId, Semantics}; use ide_db::{base_db::FileRange, famous_defs::FamousDefs, RootDatabase}; use itertools::Itertools; +use smallvec::{smallvec, SmallVec}; use stdx::never; use syntax::{ ast::{self, AstNode}, @@ -83,7 +84,7 @@ pub enum AdjustmentHintsMode { PreferPostfix, } -#[derive(Clone, Debug, PartialEq, Eq)] +#[derive(Copy, Clone, Debug, PartialEq, Eq)] pub enum InlayKind { BindingModeHint, ChainingHint, @@ -102,9 +103,15 @@ pub enum InlayKind { #[derive(Debug)] pub struct InlayHint { + /// The text range this inlay hint applies to. pub range: TextRange, + /// The kind of this inlay hint. This is used to determine side and padding of the hint for + /// rendering purposes. pub kind: InlayKind, + /// The actual label to show in the inlay hint. pub label: InlayHintLabel, + /// The tooltip to show when hovering over the inlay hint, this may invoke other actions like + /// hover requests to show. pub tooltip: Option, } @@ -117,7 +124,7 @@ pub enum InlayTooltip { #[derive(Default)] pub struct InlayHintLabel { - pub parts: Vec, + pub parts: SmallVec<[InlayHintLabelPart; 1]>, } impl InlayHintLabel { @@ -145,13 +152,13 @@ impl InlayHintLabel { impl From for InlayHintLabel { fn from(s: String) -> Self { - Self { parts: vec![InlayHintLabelPart { text: s, linked_location: None }] } + Self { parts: smallvec![InlayHintLabelPart { text: s, linked_location: None }] } } } impl From<&str> for InlayHintLabel { fn from(s: &str) -> Self { - Self { parts: vec![InlayHintLabelPart { text: s.into(), linked_location: None }] } + Self { parts: smallvec![InlayHintLabelPart { text: s.into(), linked_location: None }] } } } diff --git a/crates/ide/src/inlay_hints/closing_brace.rs b/crates/ide/src/inlay_hints/closing_brace.rs index e340c64c54..12abefc140 100644 --- a/crates/ide/src/inlay_hints/closing_brace.rs +++ b/crates/ide/src/inlay_hints/closing_brace.rs @@ -5,6 +5,7 @@ //! ``` use hir::{HirDisplay, Semantics}; use ide_db::{base_db::FileRange, RootDatabase}; +use smallvec::smallvec; use syntax::{ ast::{self, AstNode, HasName}, match_ast, SyntaxKind, SyntaxNode, T, @@ -116,7 +117,9 @@ pub(super) fn hints( acc.push(InlayHint { range: closing_token.text_range(), kind: InlayKind::ClosingBraceHint, - label: InlayHintLabel { parts: vec![InlayHintLabelPart { text: label, linked_location }] }, + label: InlayHintLabel { + parts: smallvec![InlayHintLabelPart { text: label, linked_location }], + }, tooltip: None, // provided by label part location }); From e9724e55dfcbd6142a4a746e36bdd28f685072d6 Mon Sep 17 00:00:00 2001 From: bvanjoi Date: Sat, 14 Jan 2023 18:51:49 +0800 Subject: [PATCH 0823/1945] fix: check orpat in missing match --- .../src/handlers/add_missing_match_arms.rs | 61 +++++++++++++++++++ 1 file changed, 61 insertions(+) diff --git a/crates/ide-assists/src/handlers/add_missing_match_arms.rs b/crates/ide-assists/src/handlers/add_missing_match_arms.rs index 8e4ac69ae6..0461cc790e 100644 --- a/crates/ide-assists/src/handlers/add_missing_match_arms.rs +++ b/crates/ide-assists/src/handlers/add_missing_match_arms.rs @@ -269,6 +269,7 @@ fn does_pat_match_variant(pat: &Pat, var: &Pat) -> bool { (Pat::TuplePat(tpat), Pat::TuplePat(tvar)) => { tpat.fields().zip(tvar.fields()).all(|(p, v)| does_pat_match_variant(&p, &v)) } + (Pat::OrPat(opat), _) => opat.pats().any(|p| does_pat_match_variant(&p, var)), _ => utils::does_pat_match_variant(pat, var), } } @@ -525,6 +526,19 @@ fn foo(a: bool) { add_missing_match_arms, r#" fn foo(a: bool) { + match (a, a)$0 { + (true | false, true) => {} + (true, false) => {} + (false, false) => {} + } +} +"#, + ); + + check_assist_not_applicable( + add_missing_match_arms, + r#" +fn foo(a: bool) { match (a, a)$0 { (true, true) => {} (true, false) => {} @@ -565,6 +579,26 @@ fn foo(a: bool) { add_missing_match_arms, r#" fn foo(a: bool) { + match (a, a)$0 { + (true | false, true) => {} + } +} +"#, + r#" +fn foo(a: bool) { + match (a, a) { + (true | false, true) => {} + $0(true, false) => todo!(), + (false, false) => todo!(), + } +} +"#, + ); + + check_assist( + add_missing_match_arms, + r#" +fn foo(a: bool) { match (a, a)$0 { (false, true) => {} } @@ -882,6 +916,33 @@ fn main() { } "#, ); + + check_assist( + add_missing_match_arms, + r#" +enum E { A, B, C } +fn main() { + use E::*; + match (A, B, C)$0 { + (A | B , A, A | B | C) => (), + (A | B | C , B | C, A | B | C) => (), + } +} +"#, + r#" +enum E { A, B, C } +fn main() { + use E::*; + match (A, B, C) { + (A | B , A, A | B | C) => (), + (A | B | C , B | C, A | B | C) => (), + $0(C, A, A) => todo!(), + (C, A, B) => todo!(), + (C, A, C) => todo!(), + } +} +"#, + ) } #[test] From 60075a6625bb5d4064ef02123b84bd81e574c8e6 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sat, 14 Jan 2023 12:19:29 +0100 Subject: [PATCH 0824/1945] Remove hover inlay tooltips, replace them with location links --- crates/ide/src/inlay_hints.rs | 88 ++++++---- crates/ide/src/inlay_hints/adjustment.rs | 43 +---- crates/ide/src/inlay_hints/bind_pat.rs | 28 +--- crates/ide/src/inlay_hints/binding_mode.rs | 18 +- crates/ide/src/inlay_hints/chaining.rs | 158 +++--------------- crates/ide/src/inlay_hints/closing_brace.rs | 10 +- crates/ide/src/inlay_hints/closure_ret.rs | 5 +- crates/ide/src/inlay_hints/discriminant.rs | 23 ++- crates/ide/src/inlay_hints/fn_lifetime_fn.rs | 5 +- crates/ide/src/inlay_hints/implicit_static.rs | 3 +- crates/ide/src/inlay_hints/param_name.rs | 9 +- crates/ide/src/lib.rs | 3 +- crates/rust-analyzer/src/handlers.rs | 50 +----- crates/rust-analyzer/src/lsp_ext.rs | 7 +- crates/rust-analyzer/src/to_proto.rs | 117 ++++++------- 15 files changed, 187 insertions(+), 380 deletions(-) diff --git a/crates/ide/src/inlay_hints.rs b/crates/ide/src/inlay_hints.rs index f4a9d7b653..108696673e 100644 --- a/crates/ide/src/inlay_hints.rs +++ b/crates/ide/src/inlay_hints.rs @@ -11,7 +11,7 @@ use smallvec::{smallvec, SmallVec}; use stdx::never; use syntax::{ ast::{self, AstNode}, - match_ast, NodeOrToken, SyntaxNode, TextRange, TextSize, + match_ast, NodeOrToken, SyntaxNode, TextRange, }; use crate::{navigation_target::TryToNav, FileId}; @@ -110,16 +110,21 @@ pub struct InlayHint { pub kind: InlayKind, /// The actual label to show in the inlay hint. pub label: InlayHintLabel, - /// The tooltip to show when hovering over the inlay hint, this may invoke other actions like - /// hover requests to show. - pub tooltip: Option, +} + +impl InlayHint { + fn closing_paren(range: TextRange) -> InlayHint { + InlayHint { range, kind: InlayKind::ClosingParenthesis, label: InlayHintLabel::from(")") } + } + fn opening_paren(range: TextRange) -> InlayHint { + InlayHint { range, kind: InlayKind::OpeningParenthesis, label: InlayHintLabel::from("(") } + } } #[derive(Debug)] pub enum InlayTooltip { String(String), - HoverRanged(FileId, TextRange), - HoverOffset(FileId, TextSize), + Markdown(String), } #[derive(Default)] @@ -128,37 +133,59 @@ pub struct InlayHintLabel { } impl InlayHintLabel { - pub fn as_simple_str(&self) -> Option<&str> { - match &*self.parts { - [part] => part.as_simple_str(), - _ => None, + pub fn simple( + s: impl Into, + tooltip: Option, + linked_location: Option, + ) -> InlayHintLabel { + InlayHintLabel { + parts: smallvec![InlayHintLabelPart { text: s.into(), linked_location, tooltip }], } } pub fn prepend_str(&mut self, s: &str) { match &mut *self.parts { - [part, ..] if part.as_simple_str().is_some() => part.text = format!("{s}{}", part.text), - _ => self.parts.insert(0, InlayHintLabelPart { text: s.into(), linked_location: None }), + [InlayHintLabelPart { text, linked_location: None, tooltip: None }, ..] => { + text.insert_str(0, s) + } + _ => self.parts.insert( + 0, + InlayHintLabelPart { text: s.into(), linked_location: None, tooltip: None }, + ), } } pub fn append_str(&mut self, s: &str) { match &mut *self.parts { - [.., part] if part.as_simple_str().is_some() => part.text.push_str(s), - _ => self.parts.push(InlayHintLabelPart { text: s.into(), linked_location: None }), + [.., InlayHintLabelPart { text, linked_location: None, tooltip: None }] => { + text.push_str(s) + } + _ => self.parts.push(InlayHintLabelPart { + text: s.into(), + linked_location: None, + tooltip: None, + }), } } } impl From for InlayHintLabel { fn from(s: String) -> Self { - Self { parts: smallvec![InlayHintLabelPart { text: s, linked_location: None }] } + Self { + parts: smallvec![InlayHintLabelPart { text: s, linked_location: None, tooltip: None }], + } } } impl From<&str> for InlayHintLabel { fn from(s: &str) -> Self { - Self { parts: smallvec![InlayHintLabelPart { text: s.into(), linked_location: None }] } + Self { + parts: smallvec![InlayHintLabelPart { + text: s.into(), + linked_location: None, + tooltip: None + }], + } } } @@ -182,25 +209,25 @@ pub struct InlayHintLabelPart { /// When setting this, no tooltip must be set on the containing hint, or VS Code will display /// them both. pub linked_location: Option, -} - -impl InlayHintLabelPart { - pub fn as_simple_str(&self) -> Option<&str> { - match self { - Self { text, linked_location: None } => Some(text), - _ => None, - } - } + /// The tooltip to show when hovering over the inlay hint, this may invoke other actions like + /// hover requests to show. + pub tooltip: Option, } impl fmt::Debug for InlayHintLabelPart { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self.as_simple_str() { - Some(string) => string.fmt(f), - None => f + match self { + Self { text, linked_location: None, tooltip: None } => text.fmt(f), + Self { text, linked_location, tooltip } => f .debug_struct("InlayHintLabelPart") - .field("text", &self.text) - .field("linked_location", &self.linked_location) + .field("text", text) + .field("linked_location", linked_location) + .field( + "tooltip", + &tooltip.as_ref().map_or("", |it| match it { + InlayTooltip::String(it) | InlayTooltip::Markdown(it) => it, + }), + ) .finish(), } } @@ -249,6 +276,7 @@ impl InlayHintLabelBuilder<'_> { self.result.parts.push(InlayHintLabelPart { text: take(&mut self.last_part), linked_location: self.location.take(), + tooltip: None, }); } diff --git a/crates/ide/src/inlay_hints/adjustment.rs b/crates/ide/src/inlay_hints/adjustment.rs index bdd7c05e00..9c4caec605 100644 --- a/crates/ide/src/inlay_hints/adjustment.rs +++ b/crates/ide/src/inlay_hints/adjustment.rs @@ -44,27 +44,12 @@ pub(super) fn hints( mode_and_needs_parens_for_adjustment_hints(expr, config.adjustment_hints_mode); if needs_outer_parens { - acc.push(InlayHint { - range: expr.syntax().text_range(), - kind: InlayKind::OpeningParenthesis, - label: "(".into(), - tooltip: None, - }); + acc.push(InlayHint::opening_paren(expr.syntax().text_range())); } if postfix && needs_inner_parens { - acc.push(InlayHint { - range: expr.syntax().text_range(), - kind: InlayKind::OpeningParenthesis, - label: "(".into(), - tooltip: None, - }); - acc.push(InlayHint { - range: expr.syntax().text_range(), - kind: InlayKind::ClosingParenthesis, - label: ")".into(), - tooltip: None, - }); + acc.push(InlayHint::opening_paren(expr.syntax().text_range())); + acc.push(InlayHint::closing_paren(expr.syntax().text_range())); } let (mut tmp0, mut tmp1); @@ -118,30 +103,14 @@ pub(super) fn hints( InlayKind::AdjustmentHint }, label: if postfix { format!(".{}", text.trim_end()).into() } else { text.into() }, - tooltip: None, }); } if !postfix && needs_inner_parens { - acc.push(InlayHint { - range: expr.syntax().text_range(), - kind: InlayKind::OpeningParenthesis, - label: "(".into(), - tooltip: None, - }); - acc.push(InlayHint { - range: expr.syntax().text_range(), - kind: InlayKind::ClosingParenthesis, - label: ")".into(), - tooltip: None, - }); + acc.push(InlayHint::opening_paren(expr.syntax().text_range())); + acc.push(InlayHint::closing_paren(expr.syntax().text_range())); } if needs_outer_parens { - acc.push(InlayHint { - range: expr.syntax().text_range(), - kind: InlayKind::ClosingParenthesis, - label: ")".into(), - tooltip: None, - }); + acc.push(InlayHint::closing_paren(expr.syntax().text_range())); } Some(()) } diff --git a/crates/ide/src/inlay_hints/bind_pat.rs b/crates/ide/src/inlay_hints/bind_pat.rs index adec19c765..1a4bd353e7 100644 --- a/crates/ide/src/inlay_hints/bind_pat.rs +++ b/crates/ide/src/inlay_hints/bind_pat.rs @@ -12,9 +12,7 @@ use syntax::{ match_ast, }; -use crate::{ - inlay_hints::closure_has_block_body, InlayHint, InlayHintsConfig, InlayKind, InlayTooltip, -}; +use crate::{inlay_hints::closure_has_block_body, InlayHint, InlayHintsConfig, InlayKind}; use super::label_of_ty; @@ -22,7 +20,7 @@ pub(super) fn hints( acc: &mut Vec, famous_defs @ FamousDefs(sema, _): &FamousDefs<'_, '_>, config: &InlayHintsConfig, - file_id: FileId, + _file_id: FileId, pat: &ast::IdentPat, ) -> Option<()> { if !config.type_hints { @@ -52,10 +50,6 @@ pub(super) fn hints( }, kind: InlayKind::TypeHint, label, - tooltip: pat - .name() - .map(|it| it.syntax().text_range()) - .map(|it| InlayTooltip::HoverRanged(file_id, it)), }); Some(()) @@ -326,14 +320,6 @@ fn main(a: SliceIter<'_, Container>) { label: [ "impl Iterator>", ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 484..554, - ), - ), }, InlayHint { range: 484..485, @@ -350,6 +336,7 @@ fn main(a: SliceIter<'_, Container>) { range: 289..298, }, ), + tooltip: "", }, "<", InlayHintLabelPart { @@ -362,17 +349,10 @@ fn main(a: SliceIter<'_, Container>) { range: 238..247, }, ), + tooltip: "", }, ">", ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 484..485, - ), - ), }, ] "#]], diff --git a/crates/ide/src/inlay_hints/binding_mode.rs b/crates/ide/src/inlay_hints/binding_mode.rs index a0166d0048..d0e42bc914 100644 --- a/crates/ide/src/inlay_hints/binding_mode.rs +++ b/crates/ide/src/inlay_hints/binding_mode.rs @@ -7,7 +7,7 @@ use ide_db::RootDatabase; use syntax::ast::{self, AstNode}; -use crate::{InlayHint, InlayHintsConfig, InlayKind, InlayTooltip}; +use crate::{InlayHint, InlayHintsConfig, InlayKind}; pub(super) fn hints( acc: &mut Vec, @@ -44,7 +44,6 @@ pub(super) fn hints( range, kind: InlayKind::BindingModeHint, label: r.to_string().into(), - tooltip: Some(InlayTooltip::String("Inferred binding mode".into())), }); }); match pat { @@ -59,22 +58,11 @@ pub(super) fn hints( range: pat.syntax().text_range(), kind: InlayKind::BindingModeHint, label: bm.to_string().into(), - tooltip: Some(InlayTooltip::String("Inferred binding mode".into())), }); } ast::Pat::OrPat(pat) if !pattern_adjustments.is_empty() && outer_paren_pat.is_none() => { - acc.push(InlayHint { - range: pat.syntax().text_range(), - kind: InlayKind::OpeningParenthesis, - label: "(".into(), - tooltip: None, - }); - acc.push(InlayHint { - range: pat.syntax().text_range(), - kind: InlayKind::ClosingParenthesis, - label: ")".into(), - tooltip: None, - }); + acc.push(InlayHint::opening_paren(pat.syntax().text_range())); + acc.push(InlayHint::closing_paren(pat.syntax().text_range())); } _ => (), } diff --git a/crates/ide/src/inlay_hints/chaining.rs b/crates/ide/src/inlay_hints/chaining.rs index 8810d5d34d..c00fb83a88 100644 --- a/crates/ide/src/inlay_hints/chaining.rs +++ b/crates/ide/src/inlay_hints/chaining.rs @@ -5,7 +5,7 @@ use syntax::{ Direction, NodeOrToken, SyntaxKind, T, }; -use crate::{FileId, InlayHint, InlayHintsConfig, InlayKind, InlayTooltip}; +use crate::{FileId, InlayHint, InlayHintsConfig, InlayKind}; use super::label_of_ty; @@ -13,7 +13,7 @@ pub(super) fn hints( acc: &mut Vec, famous_defs @ FamousDefs(sema, _): &FamousDefs<'_, '_>, config: &InlayHintsConfig, - file_id: FileId, + _file_id: FileId, expr: &ast::Expr, ) -> Option<()> { if !config.chaining_hints { @@ -61,7 +61,6 @@ pub(super) fn hints( range: expr.syntax().text_range(), kind: InlayKind::ChainingHint, label: label_of_ty(famous_defs, config, ty)?, - tooltip: Some(InlayTooltip::HoverRanged(file_id, expr.syntax().text_range())), }); } } @@ -124,17 +123,10 @@ fn main() { range: 63..64, }, ), + tooltip: "", }, "", ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 147..172, - ), - ), }, InlayHint { range: 147..154, @@ -151,17 +143,10 @@ fn main() { range: 7..8, }, ), + tooltip: "", }, "", ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 147..154, - ), - ), }, ] "#]], @@ -214,14 +199,6 @@ fn main() { label: [ "C", ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 143..190, - ), - ), }, InlayHint { range: 143..179, @@ -229,14 +206,6 @@ fn main() { label: [ "B", ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 143..179, - ), - ), }, ] "#]], @@ -282,17 +251,10 @@ fn main() { range: 51..52, }, ), + tooltip: "", }, "", ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 143..190, - ), - ), }, InlayHint { range: 143..179, @@ -309,17 +271,10 @@ fn main() { range: 29..30, }, ), + tooltip: "", }, "", ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 143..179, - ), - ), }, ] "#]], @@ -366,6 +321,7 @@ fn main() { range: 23..24, }, ), + tooltip: "", }, "<", InlayHintLabelPart { @@ -378,17 +334,10 @@ fn main() { range: 55..56, }, ), + tooltip: "", }, ">", ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 246..283, - ), - ), }, InlayHint { range: 246..265, @@ -405,6 +354,7 @@ fn main() { range: 7..8, }, ), + tooltip: "", }, "<", InlayHintLabelPart { @@ -417,17 +367,10 @@ fn main() { range: 55..56, }, ), + tooltip: "", }, ">", ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 246..265, - ), - ), }, ] "#]], @@ -467,14 +410,6 @@ fn main() { label: [ "impl Iterator", ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 174..241, - ), - ), }, InlayHint { range: 174..224, @@ -482,14 +417,6 @@ fn main() { label: [ "impl Iterator", ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 174..224, - ), - ), }, InlayHint { range: 174..206, @@ -497,14 +424,6 @@ fn main() { label: [ "impl Iterator", ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 174..206, - ), - ), }, InlayHint { range: 174..189, @@ -521,17 +440,10 @@ fn main() { range: 24..30, }, ), + tooltip: "", }, "", ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 174..189, - ), - ), }, ] "#]], @@ -577,17 +489,10 @@ fn main() { range: 7..13, }, ), + tooltip: "", }, "", ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 124..130, - ), - ), }, InlayHint { range: 145..185, @@ -604,17 +509,10 @@ fn main() { range: 7..13, }, ), + tooltip: "", }, "", ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 145..185, - ), - ), }, InlayHint { range: 145..168, @@ -631,32 +529,28 @@ fn main() { range: 7..13, }, ), + tooltip: "", }, "", ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 145..168, - ), - ), }, InlayHint { range: 222..228, kind: ParameterHint, label: [ - "self", - ], - tooltip: Some( - HoverOffset( - FileId( - 0, + InlayHintLabelPart { + text: "self", + linked_location: Some( + FileRange { + file_id: FileId( + 0, + ), + range: 42..46, + }, ), - 42, - ), - ), + tooltip: "", + }, + ], }, ] "#]], diff --git a/crates/ide/src/inlay_hints/closing_brace.rs b/crates/ide/src/inlay_hints/closing_brace.rs index 12abefc140..684d74282b 100644 --- a/crates/ide/src/inlay_hints/closing_brace.rs +++ b/crates/ide/src/inlay_hints/closing_brace.rs @@ -5,15 +5,12 @@ //! ``` use hir::{HirDisplay, Semantics}; use ide_db::{base_db::FileRange, RootDatabase}; -use smallvec::smallvec; use syntax::{ ast::{self, AstNode, HasName}, match_ast, SyntaxKind, SyntaxNode, T, }; -use crate::{ - inlay_hints::InlayHintLabelPart, FileId, InlayHint, InlayHintLabel, InlayHintsConfig, InlayKind, -}; +use crate::{FileId, InlayHint, InlayHintLabel, InlayHintsConfig, InlayKind}; pub(super) fn hints( acc: &mut Vec, @@ -117,10 +114,7 @@ pub(super) fn hints( acc.push(InlayHint { range: closing_token.text_range(), kind: InlayKind::ClosingBraceHint, - label: InlayHintLabel { - parts: smallvec![InlayHintLabelPart { text: label, linked_location }], - }, - tooltip: None, // provided by label part location + label: InlayHintLabel::simple(label, None, linked_location), }); None diff --git a/crates/ide/src/inlay_hints/closure_ret.rs b/crates/ide/src/inlay_hints/closure_ret.rs index d9929beaac..b1aa726ba5 100644 --- a/crates/ide/src/inlay_hints/closure_ret.rs +++ b/crates/ide/src/inlay_hints/closure_ret.rs @@ -4,7 +4,7 @@ use syntax::ast::{self, AstNode}; use crate::{ inlay_hints::closure_has_block_body, ClosureReturnTypeHints, InlayHint, InlayHintsConfig, - InlayKind, InlayTooltip, + InlayKind, }; use super::label_of_ty; @@ -13,7 +13,7 @@ pub(super) fn hints( acc: &mut Vec, famous_defs @ FamousDefs(sema, _): &FamousDefs<'_, '_>, config: &InlayHintsConfig, - file_id: FileId, + _file_id: FileId, closure: ast::ClosureExpr, ) -> Option<()> { if config.closure_return_type_hints == ClosureReturnTypeHints::Never { @@ -43,7 +43,6 @@ pub(super) fn hints( range: param_list.syntax().text_range(), kind: InlayKind::ClosureReturnTypeHint, label: label_of_ty(famous_defs, config, ty)?, - tooltip: Some(InlayTooltip::HoverRanged(file_id, param_list.syntax().text_range())), }); Some(()) } diff --git a/crates/ide/src/inlay_hints/discriminant.rs b/crates/ide/src/inlay_hints/discriminant.rs index f32c4bdf28..6afc4b6330 100644 --- a/crates/ide/src/inlay_hints/discriminant.rs +++ b/crates/ide/src/inlay_hints/discriminant.rs @@ -7,7 +7,9 @@ use ide_db::{base_db::FileId, famous_defs::FamousDefs}; use syntax::ast::{self, AstNode, HasName}; -use crate::{DiscriminantHints, InlayHint, InlayHintsConfig, InlayKind, InlayTooltip}; +use crate::{ + DiscriminantHints, InlayHint, InlayHintLabel, InlayHintsConfig, InlayKind, InlayTooltip, +}; pub(super) fn hints( acc: &mut Vec, @@ -42,14 +44,17 @@ pub(super) fn hints( None => name.syntax().text_range(), }, kind: InlayKind::DiscriminantHint, - label: match &d { - Ok(v) => format!("{}", v).into(), - Err(_) => "?".into(), - }, - tooltip: Some(InlayTooltip::String(match &d { - Ok(_) => "enum variant discriminant".into(), - Err(e) => format!("{e:?}").into(), - })), + label: InlayHintLabel::simple( + match &d { + Ok(v) => format!("{}", v), + Err(_) => "?".into(), + }, + Some(InlayTooltip::String(match &d { + Ok(_) => "enum variant discriminant".into(), + Err(e) => format!("{e:?}").into(), + })), + None, + ), }); Some(()) diff --git a/crates/ide/src/inlay_hints/fn_lifetime_fn.rs b/crates/ide/src/inlay_hints/fn_lifetime_fn.rs index 2aa5e3dc73..ce6f2e486c 100644 --- a/crates/ide/src/inlay_hints/fn_lifetime_fn.rs +++ b/crates/ide/src/inlay_hints/fn_lifetime_fn.rs @@ -10,7 +10,7 @@ use syntax::{ SyntaxToken, }; -use crate::{InlayHint, InlayHintsConfig, InlayKind, InlayTooltip, LifetimeElisionHints}; +use crate::{InlayHint, InlayHintsConfig, InlayKind, LifetimeElisionHints}; pub(super) fn hints( acc: &mut Vec, @@ -25,7 +25,6 @@ pub(super) fn hints( range: t.text_range(), kind: InlayKind::LifetimeHint, label: label.into(), - tooltip: Some(InlayTooltip::String("Elided lifetime".into())), }; let param_list = func.param_list()?; @@ -190,14 +189,12 @@ pub(super) fn hints( if is_empty { "" } else { ", " } ) .into(), - tooltip: Some(InlayTooltip::String("Elided lifetimes".into())), }); } (None, allocated_lifetimes) => acc.push(InlayHint { range: func.name()?.syntax().text_range(), kind: InlayKind::GenericParamListHint, label: format!("<{}>", allocated_lifetimes.iter().format(", "),).into(), - tooltip: Some(InlayTooltip::String("Elided lifetimes".into())), }), } Some(()) diff --git a/crates/ide/src/inlay_hints/implicit_static.rs b/crates/ide/src/inlay_hints/implicit_static.rs index 588a0e3b6a..f65b1d09a4 100644 --- a/crates/ide/src/inlay_hints/implicit_static.rs +++ b/crates/ide/src/inlay_hints/implicit_static.rs @@ -8,7 +8,7 @@ use syntax::{ SyntaxKind, }; -use crate::{InlayHint, InlayHintsConfig, InlayKind, InlayTooltip, LifetimeElisionHints}; +use crate::{InlayHint, InlayHintsConfig, InlayKind, LifetimeElisionHints}; pub(super) fn hints( acc: &mut Vec, @@ -34,7 +34,6 @@ pub(super) fn hints( range: t.text_range(), kind: InlayKind::LifetimeHint, label: "'static".to_owned().into(), - tooltip: Some(InlayTooltip::String("Elided static lifetime".into())), }); } } diff --git a/crates/ide/src/inlay_hints/param_name.rs b/crates/ide/src/inlay_hints/param_name.rs index ecee67632e..dbbc35d589 100644 --- a/crates/ide/src/inlay_hints/param_name.rs +++ b/crates/ide/src/inlay_hints/param_name.rs @@ -10,7 +10,7 @@ use ide_db::{base_db::FileRange, RootDatabase}; use stdx::to_lower_snake_case; use syntax::ast::{self, AstNode, HasArgList, HasName, UnaryOp}; -use crate::{InlayHint, InlayHintsConfig, InlayKind, InlayTooltip}; +use crate::{InlayHint, InlayHintLabel, InlayHintsConfig, InlayKind}; pub(super) fn hints( acc: &mut Vec, @@ -43,12 +43,12 @@ pub(super) fn hints( !should_hide_param_name_hint(sema, &callable, param_name, arg) }) .map(|(param, param_name, _, FileRange { range, .. })| { - let mut tooltip = None; + let mut linked_location = None; if let Some(name) = param { if let hir::CallableKind::Function(f) = callable.kind() { // assert the file is cached so we can map out of macros if let Some(_) = sema.source(f) { - tooltip = sema.original_range_opt(name.syntax()); + linked_location = sema.original_range_opt(name.syntax()); } } } @@ -56,8 +56,7 @@ pub(super) fn hints( InlayHint { range, kind: InlayKind::ParameterHint, - label: param_name.into(), - tooltip: tooltip.map(|it| InlayTooltip::HoverOffset(it.file_id, it.range.start())), + label: InlayHintLabel::simple(param_name, None, linked_location), } }); diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs index 239456cb28..8424d82aa1 100644 --- a/crates/ide/src/lib.rs +++ b/crates/ide/src/lib.rs @@ -82,7 +82,8 @@ pub use crate::{ hover::{HoverAction, HoverConfig, HoverDocFormat, HoverGotoTypeData, HoverResult}, inlay_hints::{ AdjustmentHints, AdjustmentHintsMode, ClosureReturnTypeHints, DiscriminantHints, InlayHint, - InlayHintLabel, InlayHintsConfig, InlayKind, InlayTooltip, LifetimeElisionHints, + InlayHintLabel, InlayHintLabelPart, InlayHintsConfig, InlayKind, InlayTooltip, + LifetimeElisionHints, }, join_lines::JoinLinesConfig, markup::Markup, diff --git a/crates/rust-analyzer/src/handlers.rs b/crates/rust-analyzer/src/handlers.rs index 59bdd30612..033ef75cca 100644 --- a/crates/rust-analyzer/src/handlers.rs +++ b/crates/rust-analyzer/src/handlers.rs @@ -29,7 +29,6 @@ use project_model::{ManifestPath, ProjectWorkspace, TargetKind}; use serde_json::json; use stdx::{format_to, never}; use syntax::{algo, ast, AstNode, TextRange, TextSize}; -use tracing::error; use vfs::AbsPathBuf; use crate::{ @@ -1360,55 +1359,10 @@ pub(crate) fn handle_inlay_hints( } pub(crate) fn handle_inlay_hints_resolve( - snap: GlobalStateSnapshot, - mut hint: InlayHint, + _snap: GlobalStateSnapshot, + hint: InlayHint, ) -> Result { let _p = profile::span("handle_inlay_hints_resolve"); - let data = match hint.data.take() { - Some(it) => it, - None => return Ok(hint), - }; - - let resolve_data: lsp_ext::InlayHintResolveData = serde_json::from_value(data)?; - - match snap.url_file_version(&resolve_data.text_document.uri) { - Some(version) if version == resolve_data.text_document.version => {} - Some(version) => { - error!( - "attempted inlayHints/resolve of '{}' at version {} while server version is {}", - resolve_data.text_document.uri, resolve_data.text_document.version, version, - ); - return Ok(hint); - } - None => { - error!( - "attempted inlayHints/resolve of unknown file '{}' at version {}", - resolve_data.text_document.uri, resolve_data.text_document.version, - ); - return Ok(hint); - } - } - let file_range = from_proto::file_range_uri( - &snap, - &resolve_data.text_document.uri, - match resolve_data.position { - PositionOrRange::Position(pos) => Range::new(pos, pos), - PositionOrRange::Range(range) => range, - }, - )?; - let info = match snap.analysis.hover(&snap.config.hover(), file_range)? { - None => return Ok(hint), - Some(info) => info, - }; - - let markup_kind = - snap.config.hover().documentation.map_or(ide::HoverDocFormat::Markdown, |kind| kind); - - // FIXME: hover actions? - hint.tooltip = Some(lsp_types::InlayHintTooltip::MarkupContent(to_proto::markup_content( - info.info.markup, - markup_kind, - ))); Ok(hint) } diff --git a/crates/rust-analyzer/src/lsp_ext.rs b/crates/rust-analyzer/src/lsp_ext.rs index 65620b4209..b117acd1b0 100644 --- a/crates/rust-analyzer/src/lsp_ext.rs +++ b/crates/rust-analyzer/src/lsp_ext.rs @@ -3,11 +3,11 @@ use std::{collections::HashMap, path::PathBuf}; use lsp_types::request::Request; +use lsp_types::PositionEncodingKind; use lsp_types::{ notification::Notification, CodeActionKind, DocumentOnTypeFormattingParams, PartialResultParams, Position, Range, TextDocumentIdentifier, WorkDoneProgressParams, }; -use lsp_types::{PositionEncodingKind, VersionedTextDocumentIdentifier}; use serde::{Deserialize, Serialize}; pub enum AnalyzerStatus {} @@ -568,10 +568,7 @@ pub struct CompletionResolveData { } #[derive(Debug, Serialize, Deserialize)] -pub struct InlayHintResolveData { - pub text_document: VersionedTextDocumentIdentifier, - pub position: PositionOrRange, -} +pub struct InlayHintResolveData {} #[derive(Debug, Serialize, Deserialize)] pub struct CompletionImport { diff --git a/crates/rust-analyzer/src/to_proto.rs b/crates/rust-analyzer/src/to_proto.rs index e736b2ff9a..d9cbb2bc20 100644 --- a/crates/rust-analyzer/src/to_proto.rs +++ b/crates/rust-analyzer/src/to_proto.rs @@ -9,9 +9,9 @@ use ide::{ Annotation, AnnotationKind, Assist, AssistKind, Cancellable, CompletionItem, CompletionItemKind, CompletionRelevance, Documentation, FileId, FileRange, FileSystemEdit, Fold, FoldKind, Highlight, HlMod, HlOperator, HlPunct, HlRange, HlTag, Indel, InlayHint, - InlayHintLabel, InlayKind, Markup, NavigationTarget, ReferenceCategory, RenameError, Runnable, - Severity, SignatureHelp, SourceChange, StructureNodeKind, SymbolKind, TextEdit, TextRange, - TextSize, + InlayHintLabel, InlayHintLabelPart, InlayKind, Markup, NavigationTarget, ReferenceCategory, + RenameError, Runnable, Severity, SignatureHelp, SourceChange, StructureNodeKind, SymbolKind, + TextEdit, TextRange, TextSize, }; use itertools::Itertools; use serde_json::to_value; @@ -438,6 +438,8 @@ pub(crate) fn inlay_hint( _ => {} } + let (label, tooltip) = inlay_hint_label(snap, inlay_hint.label)?; + Ok(lsp_types::InlayHint { position: match inlay_hint.kind { // before annotated thing @@ -481,7 +483,9 @@ pub(crate) fn inlay_hint( | InlayKind::TypeHint | InlayKind::DiscriminantHint | InlayKind::ClosingBraceHint => false, - InlayKind::BindingModeHint => inlay_hint.label.as_simple_str() != Some("&"), + InlayKind::BindingModeHint => { + matches!(&label, lsp_types::InlayHintLabel::String(s) if s != "&") + } InlayKind::ParameterHint | InlayKind::LifetimeHint => true, }), kind: match inlay_hint.kind { @@ -500,68 +504,67 @@ pub(crate) fn inlay_hint( | InlayKind::ClosingBraceHint => None, }, text_edits: None, - data: (|| match inlay_hint.tooltip { - Some(ide::InlayTooltip::HoverOffset(file_id, offset)) => { - let uri = url(snap, file_id); - let line_index = snap.file_line_index(file_id).ok()?; - - let text_document = lsp_types::VersionedTextDocumentIdentifier { - version: snap.url_file_version(&uri)?, - uri, - }; - to_value(lsp_ext::InlayHintResolveData { - text_document, - position: lsp_ext::PositionOrRange::Position(position(&line_index, offset)), - }) - .ok() - } - Some(ide::InlayTooltip::HoverRanged(file_id, text_range)) => { - let uri = url(snap, file_id); - let text_document = lsp_types::VersionedTextDocumentIdentifier { - version: snap.url_file_version(&uri)?, - uri, - }; - let line_index = snap.file_line_index(file_id).ok()?; - to_value(lsp_ext::InlayHintResolveData { - text_document, - position: lsp_ext::PositionOrRange::Range(range(&line_index, text_range)), - }) - .ok() - } - _ => None, - })(), - tooltip: Some(match inlay_hint.tooltip { - Some(ide::InlayTooltip::String(s)) => lsp_types::InlayHintTooltip::String(s), - _ => lsp_types::InlayHintTooltip::String(inlay_hint.label.to_string()), - }), - label: inlay_hint_label(snap, inlay_hint.label)?, + data: None, + tooltip, + label, }) } fn inlay_hint_label( snap: &GlobalStateSnapshot, - label: InlayHintLabel, -) -> Cancellable { - Ok(match label.as_simple_str() { - Some(s) => lsp_types::InlayHintLabel::String(s.into()), - None => lsp_types::InlayHintLabel::LabelParts( - label + mut label: InlayHintLabel, +) -> Cancellable<(lsp_types::InlayHintLabel, Option)> { + let res = match &*label.parts { + [InlayHintLabelPart { linked_location: None, .. }] => { + let InlayHintLabelPart { text, tooltip, .. } = label.parts.pop().unwrap(); + ( + lsp_types::InlayHintLabel::String(text), + match tooltip { + Some(ide::InlayTooltip::String(s)) => { + Some(lsp_types::InlayHintTooltip::String(s)) + } + Some(ide::InlayTooltip::Markdown(s)) => { + Some(lsp_types::InlayHintTooltip::MarkupContent(lsp_types::MarkupContent { + kind: lsp_types::MarkupKind::Markdown, + value: s, + })) + } + None => None, + }, + ) + } + _ => { + let parts = label .parts .into_iter() .map(|part| { - Ok(lsp_types::InlayHintLabelPart { - value: part.text, - tooltip: None, - location: part - .linked_location - .map(|range| location(snap, range)) - .transpose()?, - command: None, - }) + part.linked_location.map(|range| location(snap, range)).transpose().map( + |location| lsp_types::InlayHintLabelPart { + value: part.text, + tooltip: match part.tooltip { + Some(ide::InlayTooltip::String(s)) => { + Some(lsp_types::InlayHintLabelPartTooltip::String(s)) + } + Some(ide::InlayTooltip::Markdown(s)) => { + Some(lsp_types::InlayHintLabelPartTooltip::MarkupContent( + lsp_types::MarkupContent { + kind: lsp_types::MarkupKind::Markdown, + value: s, + }, + )) + } + None => None, + }, + location, + command: None, + }, + ) }) - .collect::>>()?, - ), - }) + .collect::>()?; + (lsp_types::InlayHintLabel::LabelParts(parts), None) + } + }; + Ok(res) } static TOKEN_RESULT_COUNTER: AtomicU32 = AtomicU32::new(1); From f2444b2a402f8d551152cb482686c7839574ac1d Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sat, 14 Jan 2023 12:20:12 +0100 Subject: [PATCH 0825/1945] Remove unnecessary Hint suffix on InlayKind variants --- crates/ide/src/inlay_hints.rs | 22 ++--- crates/ide/src/inlay_hints/adjustment.rs | 6 +- crates/ide/src/inlay_hints/bind_pat.rs | 6 +- crates/ide/src/inlay_hints/binding_mode.rs | 8 +- crates/ide/src/inlay_hints/chaining.rs | 34 +++---- crates/ide/src/inlay_hints/closing_brace.rs | 2 +- crates/ide/src/inlay_hints/closure_ret.rs | 2 +- crates/ide/src/inlay_hints/discriminant.rs | 2 +- crates/ide/src/inlay_hints/fn_lifetime_fn.rs | 6 +- crates/ide/src/inlay_hints/implicit_static.rs | 2 +- crates/ide/src/inlay_hints/param_name.rs | 2 +- crates/rust-analyzer/src/to_proto.rs | 88 +++++++++---------- docs/dev/lsp-extensions.md | 2 +- 13 files changed, 87 insertions(+), 95 deletions(-) diff --git a/crates/ide/src/inlay_hints.rs b/crates/ide/src/inlay_hints.rs index 108696673e..861bf1c66c 100644 --- a/crates/ide/src/inlay_hints.rs +++ b/crates/ide/src/inlay_hints.rs @@ -86,17 +86,17 @@ pub enum AdjustmentHintsMode { #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub enum InlayKind { - BindingModeHint, - ChainingHint, - ClosingBraceHint, - ClosureReturnTypeHint, - GenericParamListHint, - AdjustmentHint, - AdjustmentHintPostfix, - LifetimeHint, - ParameterHint, - TypeHint, - DiscriminantHint, + BindingMode, + Chaining, + ClosingBrace, + ClosureReturnType, + GenericParamList, + Adjustment, + AdjustmentPostfix, + Lifetime, + Parameter, + Type, + Discriminant, OpeningParenthesis, ClosingParenthesis, } diff --git a/crates/ide/src/inlay_hints/adjustment.rs b/crates/ide/src/inlay_hints/adjustment.rs index 9c4caec605..581813f3b7 100644 --- a/crates/ide/src/inlay_hints/adjustment.rs +++ b/crates/ide/src/inlay_hints/adjustment.rs @@ -97,11 +97,7 @@ pub(super) fn hints( }; acc.push(InlayHint { range: expr.syntax().text_range(), - kind: if postfix { - InlayKind::AdjustmentHintPostfix - } else { - InlayKind::AdjustmentHint - }, + kind: if postfix { InlayKind::AdjustmentPostfix } else { InlayKind::Adjustment }, label: if postfix { format!(".{}", text.trim_end()).into() } else { text.into() }, }); } diff --git a/crates/ide/src/inlay_hints/bind_pat.rs b/crates/ide/src/inlay_hints/bind_pat.rs index 1a4bd353e7..5227c651ff 100644 --- a/crates/ide/src/inlay_hints/bind_pat.rs +++ b/crates/ide/src/inlay_hints/bind_pat.rs @@ -48,7 +48,7 @@ pub(super) fn hints( Some(name) => name.syntax().text_range(), None => pat.syntax().text_range(), }, - kind: InlayKind::TypeHint, + kind: InlayKind::Type, label, }); @@ -316,14 +316,14 @@ fn main(a: SliceIter<'_, Container>) { [ InlayHint { range: 484..554, - kind: ChainingHint, + kind: Chaining, label: [ "impl Iterator>", ], }, InlayHint { range: 484..485, - kind: ChainingHint, + kind: Chaining, label: [ "", InlayHintLabelPart { diff --git a/crates/ide/src/inlay_hints/binding_mode.rs b/crates/ide/src/inlay_hints/binding_mode.rs index d0e42bc914..11b9cd269b 100644 --- a/crates/ide/src/inlay_hints/binding_mode.rs +++ b/crates/ide/src/inlay_hints/binding_mode.rs @@ -40,11 +40,7 @@ pub(super) fn hints( (true, false) => "&", _ => return, }; - acc.push(InlayHint { - range, - kind: InlayKind::BindingModeHint, - label: r.to_string().into(), - }); + acc.push(InlayHint { range, kind: InlayKind::BindingMode, label: r.to_string().into() }); }); match pat { ast::Pat::IdentPat(pat) if pat.ref_token().is_none() && pat.mut_token().is_none() => { @@ -56,7 +52,7 @@ pub(super) fn hints( }; acc.push(InlayHint { range: pat.syntax().text_range(), - kind: InlayKind::BindingModeHint, + kind: InlayKind::BindingMode, label: bm.to_string().into(), }); } diff --git a/crates/ide/src/inlay_hints/chaining.rs b/crates/ide/src/inlay_hints/chaining.rs index c00fb83a88..e0045a53d7 100644 --- a/crates/ide/src/inlay_hints/chaining.rs +++ b/crates/ide/src/inlay_hints/chaining.rs @@ -59,7 +59,7 @@ pub(super) fn hints( } acc.push(InlayHint { range: expr.syntax().text_range(), - kind: InlayKind::ChainingHint, + kind: InlayKind::Chaining, label: label_of_ty(famous_defs, config, ty)?, }); } @@ -110,7 +110,7 @@ fn main() { [ InlayHint { range: 147..172, - kind: ChainingHint, + kind: Chaining, label: [ "", InlayHintLabelPart { @@ -130,7 +130,7 @@ fn main() { }, InlayHint { range: 147..154, - kind: ChainingHint, + kind: Chaining, label: [ "", InlayHintLabelPart { @@ -195,14 +195,14 @@ fn main() { [ InlayHint { range: 143..190, - kind: ChainingHint, + kind: Chaining, label: [ "C", ], }, InlayHint { range: 143..179, - kind: ChainingHint, + kind: Chaining, label: [ "B", ], @@ -238,7 +238,7 @@ fn main() { [ InlayHint { range: 143..190, - kind: ChainingHint, + kind: Chaining, label: [ "", InlayHintLabelPart { @@ -258,7 +258,7 @@ fn main() { }, InlayHint { range: 143..179, - kind: ChainingHint, + kind: Chaining, label: [ "", InlayHintLabelPart { @@ -308,7 +308,7 @@ fn main() { [ InlayHint { range: 246..283, - kind: ChainingHint, + kind: Chaining, label: [ "", InlayHintLabelPart { @@ -341,7 +341,7 @@ fn main() { }, InlayHint { range: 246..265, - kind: ChainingHint, + kind: Chaining, label: [ "", InlayHintLabelPart { @@ -406,28 +406,28 @@ fn main() { [ InlayHint { range: 174..241, - kind: ChainingHint, + kind: Chaining, label: [ "impl Iterator", ], }, InlayHint { range: 174..224, - kind: ChainingHint, + kind: Chaining, label: [ "impl Iterator", ], }, InlayHint { range: 174..206, - kind: ChainingHint, + kind: Chaining, label: [ "impl Iterator", ], }, InlayHint { range: 174..189, - kind: ChainingHint, + kind: Chaining, label: [ "&mut ", InlayHintLabelPart { @@ -476,7 +476,7 @@ fn main() { [ InlayHint { range: 124..130, - kind: TypeHint, + kind: Type, label: [ "", InlayHintLabelPart { @@ -496,7 +496,7 @@ fn main() { }, InlayHint { range: 145..185, - kind: ChainingHint, + kind: Chaining, label: [ "", InlayHintLabelPart { @@ -516,7 +516,7 @@ fn main() { }, InlayHint { range: 145..168, - kind: ChainingHint, + kind: Chaining, label: [ "", InlayHintLabelPart { @@ -536,7 +536,7 @@ fn main() { }, InlayHint { range: 222..228, - kind: ParameterHint, + kind: Parameter, label: [ InlayHintLabelPart { text: "self", diff --git a/crates/ide/src/inlay_hints/closing_brace.rs b/crates/ide/src/inlay_hints/closing_brace.rs index 684d74282b..aae805f78d 100644 --- a/crates/ide/src/inlay_hints/closing_brace.rs +++ b/crates/ide/src/inlay_hints/closing_brace.rs @@ -113,7 +113,7 @@ pub(super) fn hints( .flatten(); acc.push(InlayHint { range: closing_token.text_range(), - kind: InlayKind::ClosingBraceHint, + kind: InlayKind::ClosingBrace, label: InlayHintLabel::simple(label, None, linked_location), }); diff --git a/crates/ide/src/inlay_hints/closure_ret.rs b/crates/ide/src/inlay_hints/closure_ret.rs index b1aa726ba5..f03a18b8e9 100644 --- a/crates/ide/src/inlay_hints/closure_ret.rs +++ b/crates/ide/src/inlay_hints/closure_ret.rs @@ -41,7 +41,7 @@ pub(super) fn hints( } acc.push(InlayHint { range: param_list.syntax().text_range(), - kind: InlayKind::ClosureReturnTypeHint, + kind: InlayKind::ClosureReturnType, label: label_of_ty(famous_defs, config, ty)?, }); Some(()) diff --git a/crates/ide/src/inlay_hints/discriminant.rs b/crates/ide/src/inlay_hints/discriminant.rs index 6afc4b6330..310295cc37 100644 --- a/crates/ide/src/inlay_hints/discriminant.rs +++ b/crates/ide/src/inlay_hints/discriminant.rs @@ -43,7 +43,7 @@ pub(super) fn hints( Some(field_list) => name.syntax().text_range().cover(field_list.syntax().text_range()), None => name.syntax().text_range(), }, - kind: InlayKind::DiscriminantHint, + kind: InlayKind::Discriminant, label: InlayHintLabel::simple( match &d { Ok(v) => format!("{}", v), diff --git a/crates/ide/src/inlay_hints/fn_lifetime_fn.rs b/crates/ide/src/inlay_hints/fn_lifetime_fn.rs index ce6f2e486c..b7182085b3 100644 --- a/crates/ide/src/inlay_hints/fn_lifetime_fn.rs +++ b/crates/ide/src/inlay_hints/fn_lifetime_fn.rs @@ -23,7 +23,7 @@ pub(super) fn hints( let mk_lt_hint = |t: SyntaxToken, label: String| InlayHint { range: t.text_range(), - kind: InlayKind::LifetimeHint, + kind: InlayKind::Lifetime, label: label.into(), }; @@ -182,7 +182,7 @@ pub(super) fn hints( let is_empty = gpl.generic_params().next().is_none(); acc.push(InlayHint { range: angle_tok.text_range(), - kind: InlayKind::LifetimeHint, + kind: InlayKind::Lifetime, label: format!( "{}{}", allocated_lifetimes.iter().format(", "), @@ -193,7 +193,7 @@ pub(super) fn hints( } (None, allocated_lifetimes) => acc.push(InlayHint { range: func.name()?.syntax().text_range(), - kind: InlayKind::GenericParamListHint, + kind: InlayKind::GenericParamList, label: format!("<{}>", allocated_lifetimes.iter().format(", "),).into(), }), } diff --git a/crates/ide/src/inlay_hints/implicit_static.rs b/crates/ide/src/inlay_hints/implicit_static.rs index f65b1d09a4..1122ee2e39 100644 --- a/crates/ide/src/inlay_hints/implicit_static.rs +++ b/crates/ide/src/inlay_hints/implicit_static.rs @@ -32,7 +32,7 @@ pub(super) fn hints( let t = ty.amp_token()?; acc.push(InlayHint { range: t.text_range(), - kind: InlayKind::LifetimeHint, + kind: InlayKind::Lifetime, label: "'static".to_owned().into(), }); } diff --git a/crates/ide/src/inlay_hints/param_name.rs b/crates/ide/src/inlay_hints/param_name.rs index dbbc35d589..9cdae63241 100644 --- a/crates/ide/src/inlay_hints/param_name.rs +++ b/crates/ide/src/inlay_hints/param_name.rs @@ -55,7 +55,7 @@ pub(super) fn hints( InlayHint { range, - kind: InlayKind::ParameterHint, + kind: InlayKind::Parameter, label: InlayHintLabel::simple(param_name, None, linked_location), } }); diff --git a/crates/rust-analyzer/src/to_proto.rs b/crates/rust-analyzer/src/to_proto.rs index d9cbb2bc20..0f0642bb4b 100644 --- a/crates/rust-analyzer/src/to_proto.rs +++ b/crates/rust-analyzer/src/to_proto.rs @@ -431,10 +431,10 @@ pub(crate) fn inlay_hint( mut inlay_hint: InlayHint, ) -> Cancellable { match inlay_hint.kind { - InlayKind::ParameterHint if render_colons => inlay_hint.label.append_str(":"), - InlayKind::TypeHint if render_colons => inlay_hint.label.prepend_str(": "), - InlayKind::ClosureReturnTypeHint => inlay_hint.label.prepend_str(" -> "), - InlayKind::DiscriminantHint => inlay_hint.label.prepend_str(" = "), + InlayKind::Parameter if render_colons => inlay_hint.label.append_str(":"), + InlayKind::Type if render_colons => inlay_hint.label.prepend_str(": "), + InlayKind::ClosureReturnType => inlay_hint.label.prepend_str(" -> "), + InlayKind::Discriminant => inlay_hint.label.prepend_str(" = "), _ => {} } @@ -444,64 +444,64 @@ pub(crate) fn inlay_hint( position: match inlay_hint.kind { // before annotated thing InlayKind::OpeningParenthesis - | InlayKind::ParameterHint - | InlayKind::AdjustmentHint - | InlayKind::BindingModeHint => position(line_index, inlay_hint.range.start()), + | InlayKind::Parameter + | InlayKind::Adjustment + | InlayKind::BindingMode => position(line_index, inlay_hint.range.start()), // after annotated thing - InlayKind::ClosureReturnTypeHint - | InlayKind::TypeHint - | InlayKind::DiscriminantHint - | InlayKind::ChainingHint - | InlayKind::GenericParamListHint + InlayKind::ClosureReturnType + | InlayKind::Type + | InlayKind::Discriminant + | InlayKind::Chaining + | InlayKind::GenericParamList | InlayKind::ClosingParenthesis - | InlayKind::AdjustmentHintPostfix - | InlayKind::LifetimeHint - | InlayKind::ClosingBraceHint => position(line_index, inlay_hint.range.end()), + | InlayKind::AdjustmentPostfix + | InlayKind::Lifetime + | InlayKind::ClosingBrace => position(line_index, inlay_hint.range.end()), }, padding_left: Some(match inlay_hint.kind { - InlayKind::TypeHint => !render_colons, - InlayKind::ChainingHint | InlayKind::ClosingBraceHint => true, + InlayKind::Type => !render_colons, + InlayKind::Chaining | InlayKind::ClosingBrace => true, InlayKind::ClosingParenthesis - | InlayKind::DiscriminantHint + | InlayKind::Discriminant | InlayKind::OpeningParenthesis - | InlayKind::BindingModeHint - | InlayKind::ClosureReturnTypeHint - | InlayKind::GenericParamListHint - | InlayKind::AdjustmentHint - | InlayKind::AdjustmentHintPostfix - | InlayKind::LifetimeHint - | InlayKind::ParameterHint => false, + | InlayKind::BindingMode + | InlayKind::ClosureReturnType + | InlayKind::GenericParamList + | InlayKind::Adjustment + | InlayKind::AdjustmentPostfix + | InlayKind::Lifetime + | InlayKind::Parameter => false, }), padding_right: Some(match inlay_hint.kind { InlayKind::ClosingParenthesis | InlayKind::OpeningParenthesis - | InlayKind::ChainingHint - | InlayKind::ClosureReturnTypeHint - | InlayKind::GenericParamListHint - | InlayKind::AdjustmentHint - | InlayKind::AdjustmentHintPostfix - | InlayKind::TypeHint - | InlayKind::DiscriminantHint - | InlayKind::ClosingBraceHint => false, - InlayKind::BindingModeHint => { + | InlayKind::Chaining + | InlayKind::ClosureReturnType + | InlayKind::GenericParamList + | InlayKind::Adjustment + | InlayKind::AdjustmentPostfix + | InlayKind::Type + | InlayKind::Discriminant + | InlayKind::ClosingBrace => false, + InlayKind::BindingMode => { matches!(&label, lsp_types::InlayHintLabel::String(s) if s != "&") } - InlayKind::ParameterHint | InlayKind::LifetimeHint => true, + InlayKind::Parameter | InlayKind::Lifetime => true, }), kind: match inlay_hint.kind { - InlayKind::ParameterHint => Some(lsp_types::InlayHintKind::PARAMETER), - InlayKind::ClosureReturnTypeHint | InlayKind::TypeHint | InlayKind::ChainingHint => { + InlayKind::Parameter => Some(lsp_types::InlayHintKind::PARAMETER), + InlayKind::ClosureReturnType | InlayKind::Type | InlayKind::Chaining => { Some(lsp_types::InlayHintKind::TYPE) } InlayKind::ClosingParenthesis - | InlayKind::DiscriminantHint + | InlayKind::Discriminant | InlayKind::OpeningParenthesis - | InlayKind::BindingModeHint - | InlayKind::GenericParamListHint - | InlayKind::LifetimeHint - | InlayKind::AdjustmentHint - | InlayKind::AdjustmentHintPostfix - | InlayKind::ClosingBraceHint => None, + | InlayKind::BindingMode + | InlayKind::GenericParamList + | InlayKind::Lifetime + | InlayKind::Adjustment + | InlayKind::AdjustmentPostfix + | InlayKind::ClosingBrace => None, }, text_edits: None, data: None, diff --git a/docs/dev/lsp-extensions.md b/docs/dev/lsp-extensions.md index a4780af1a2..0f24ddbbc0 100644 --- a/docs/dev/lsp-extensions.md +++ b/docs/dev/lsp-extensions.md @@ -1,5 +1,5 @@ [^\n]+\n/m, ""); } - let value; - if (errorCode) { - if (typeof diag.code === "string" || typeof diag.code === "number") { - value = diag.code; - } else { - value = diag.code?.value; - } - } diag.code = { target: vscode.Uri.from({ scheme: diagnostics.URI_SCHEME, @@ -153,7 +206,8 @@ export async function createClient( fragment: uri.toString(), query: idx.toString(), }), - value: value ?? "Click for full compiler diagnostic", + value: + errorCode && value ? value : "Click for full compiler diagnostic", }; } }); diff --git a/editors/code/src/ctx.ts b/editors/code/src/ctx.ts index c2dca733df..5515921ed1 100644 --- a/editors/code/src/ctx.ts +++ b/editors/code/src/ctx.ts @@ -82,6 +82,7 @@ export class Ctx { private state: PersistentState; private commandFactories: Record; private commandDisposables: Disposable[]; + private unlinkedFiles: vscode.Uri[]; get client() { return this._client; @@ -99,6 +100,7 @@ export class Ctx { this.clientSubscriptions = []; this.commandDisposables = []; this.commandFactories = commandFactories; + this.unlinkedFiles = []; this.state = new PersistentState(extCtx.globalState); this.config = new Config(extCtx); @@ -218,7 +220,8 @@ export class Ctx { this.outputChannel, initializationOptions, serverOptions, - this.config + this.config, + this.unlinkedFiles ); this.pushClientCleanup( this._client.onNotification(ra.serverStatus, (params) => From 6778d1a6ebf865250c83398c0b2a984413284c47 Mon Sep 17 00:00:00 2001 From: feniljain Date: Thu, 16 Mar 2023 22:52:35 +0530 Subject: [PATCH 1181/1945] feat: do not remove other variants, just push them down in list --- crates/ide-completion/src/completions.rs | 6 +----- crates/ide-completion/src/completions/expr.rs | 1 - crates/ide-completion/src/completions/pattern.rs | 1 - crates/ide-completion/src/render.rs | 13 ++++++++++++- crates/ide-completion/src/render/pattern.rs | 11 +++++++++-- crates/ide-completion/src/tests/record.rs | 2 ++ 6 files changed, 24 insertions(+), 10 deletions(-) diff --git a/crates/ide-completion/src/completions.rs b/crates/ide-completion/src/completions.rs index ec8a23e907..50be5c788f 100644 --- a/crates/ide-completion/src/completions.rs +++ b/crates/ide-completion/src/completions.rs @@ -536,7 +536,6 @@ fn enum_variants_with_paths( enum_: hir::Enum, impl_: &Option, cb: impl Fn(&mut Completions, &CompletionContext<'_>, hir::Variant, hir::ModPath), - missing_variants: Option>, ) { let mut process_variant = |variant: Variant| { let self_path = hir::ModPath::from_segments( @@ -547,10 +546,7 @@ fn enum_variants_with_paths( cb(acc, ctx, variant, self_path); }; - let variants = match missing_variants { - Some(missing_variants) => missing_variants, - None => enum_.variants(ctx.db), - }; + let variants = enum_.variants(ctx.db); if let Some(impl_) = impl_.as_ref().and_then(|impl_| ctx.sema.to_def(impl_)) { if impl_.self_ty(ctx.db).as_adt() == Some(hir::Adt::Enum(enum_)) { diff --git a/crates/ide-completion/src/completions/expr.rs b/crates/ide-completion/src/completions/expr.rs index 8946011280..3192b21cfb 100644 --- a/crates/ide-completion/src/completions/expr.rs +++ b/crates/ide-completion/src/completions/expr.rs @@ -208,7 +208,6 @@ pub(crate) fn complete_expr_path( |acc, ctx, variant, path| { acc.add_qualified_enum_variant(ctx, path_ctx, variant, path) }, - None, ); } } diff --git a/crates/ide-completion/src/completions/pattern.rs b/crates/ide-completion/src/completions/pattern.rs index 6ad6a06f11..58d5bf114c 100644 --- a/crates/ide-completion/src/completions/pattern.rs +++ b/crates/ide-completion/src/completions/pattern.rs @@ -58,7 +58,6 @@ pub(crate) fn complete_pattern( |acc, ctx, variant, path| { acc.add_qualified_variant_pat(ctx, pattern_ctx, variant, path); }, - Some(pattern_ctx.missing_variants.clone()), ); } } diff --git a/crates/ide-completion/src/render.rs b/crates/ide-completion/src/render.rs index 86302cb067..eb2df395c4 100644 --- a/crates/ide-completion/src/render.rs +++ b/crates/ide-completion/src/render.rs @@ -32,11 +32,22 @@ pub(crate) struct RenderContext<'a> { completion: &'a CompletionContext<'a>, is_private_editable: bool, import_to_add: Option, + // For variants which are missing + // in match completion context + // + // Option -> only applicable for enums + // bool -> is enum variant missing or not? + is_variant_missing: Option, } impl<'a> RenderContext<'a> { pub(crate) fn new(completion: &'a CompletionContext<'a>) -> RenderContext<'a> { - RenderContext { completion, is_private_editable: false, import_to_add: None } + RenderContext { + completion, + is_private_editable: false, + import_to_add: None, + is_variant_missing: None, + } } pub(crate) fn private_editable(mut self, private_editable: bool) -> Self { diff --git a/crates/ide-completion/src/render/pattern.rs b/crates/ide-completion/src/render/pattern.rs index 37c65abba9..d70f02127d 100644 --- a/crates/ide-completion/src/render/pattern.rs +++ b/crates/ide-completion/src/render/pattern.rs @@ -43,7 +43,7 @@ pub(crate) fn render_struct_pat( } pub(crate) fn render_variant_pat( - ctx: RenderContext<'_>, + mut ctx: RenderContext<'_>, pattern_ctx: &PatternContext, path_ctx: Option<&PathCompletionCtx>, variant: hir::Variant, @@ -56,6 +56,11 @@ pub(crate) fn render_variant_pat( let (visible_fields, fields_omitted) = visible_fields(ctx.completion, &fields, variant)?; let enum_ty = variant.parent_enum(ctx.db()).ty(ctx.db()); + // Missing in context of match statement completions + if pattern_ctx.missing_variants.contains(&variant) { + ctx.is_variant_missing = Some(true); + } + let (name, escaped_name) = match path { Some(path) => (path.unescaped().to_string().into(), path.to_string().into()), None => { @@ -97,7 +102,9 @@ fn build_completion( ) -> CompletionItem { let mut relevance = ctx.completion_relevance(); - relevance.type_match = super::compute_type_match(ctx.completion, &adt_ty); + if let Some(true) = ctx.is_variant_missing { + relevance.type_match = super::compute_type_match(ctx.completion, &adt_ty); + } let mut item = CompletionItem::new(CompletionItemKind::Binding, ctx.source_range(), label); item.set_documentation(ctx.docs(def)) diff --git a/crates/ide-completion/src/tests/record.rs b/crates/ide-completion/src/tests/record.rs index 727114610e..2de42e8435 100644 --- a/crates/ide-completion/src/tests/record.rs +++ b/crates/ide-completion/src/tests/record.rs @@ -67,6 +67,7 @@ fn foo(baz: Baz) { ev Err ev Ok bn Baz::Bar Baz::Bar$0 + bn Baz::Foo Baz::Foo$0 bn Err(…) Err($1)$0 bn Ok(…) Ok($1)$0 kw mut @@ -93,6 +94,7 @@ fn foo(baz: Baz) { ev Err ev Ok bn Baz::Bar Baz::Bar$0 + bn Baz::Foo Baz::Foo$0 bn Err(…) Err($1)$0 bn Ok(…) Ok($1)$0 kw mut From b7b9ae59a081f0ecbd11d66e205307328cd5cfbe Mon Sep 17 00:00:00 2001 From: hkalbasi Date: Wed, 8 Mar 2023 20:58:52 +0330 Subject: [PATCH 1182/1945] desugar `? operator` --- crates/hir-def/src/body.rs | 5 +- crates/hir-def/src/body/lower.rs | 93 +++++++++++++++++-- crates/hir-def/src/body/pretty.rs | 4 - crates/hir-def/src/expr.rs | 4 - crates/hir-def/src/lang_item.rs | 21 +++-- crates/hir-def/src/path.rs | 83 ++++++++++++----- crates/hir-def/src/path/lower.rs | 9 +- crates/hir-def/src/resolver.rs | 48 ++++++++-- crates/hir-ty/src/consteval.rs | 8 +- crates/hir-ty/src/consteval/tests.rs | 67 +++++++++++++ crates/hir-ty/src/db.rs | 4 + crates/hir-ty/src/diagnostics/unsafe_check.rs | 2 +- crates/hir-ty/src/infer.rs | 36 ++++--- crates/hir-ty/src/infer/expr.rs | 30 +++--- crates/hir-ty/src/infer/path.rs | 2 +- crates/hir-ty/src/lower.rs | 88 ++++++++++++------ crates/hir-ty/src/mir/eval.rs | 62 +++++++------ crates/hir-ty/src/mir/lower.rs | 18 +++- crates/hir-ty/src/mir/lower/as_place.rs | 2 +- crates/hir-ty/src/mir/pretty.rs | 1 + crates/hir-ty/src/utils.rs | 2 +- crates/hir/src/semantics.rs | 5 +- crates/hir/src/source_analyzer.rs | 34 +++---- .../handlers/replace_try_expr_with_match.rs | 6 +- crates/ide-assists/src/tests/generated.rs | 2 +- crates/ide/src/hover/tests.rs | 8 +- crates/test-utils/src/minicore.rs | 64 ++++++++++++- 27 files changed, 517 insertions(+), 191 deletions(-) diff --git a/crates/hir-def/src/body.rs b/crates/hir-def/src/body.rs index b70e658efd..7f9b9476dc 100644 --- a/crates/hir-def/src/body.rs +++ b/crates/hir-def/src/body.rs @@ -391,7 +391,7 @@ impl Body { } }; let expander = Expander::new(db, file_id, module); - let (mut body, source_map) = Body::new(db, expander, params, body); + let (mut body, source_map) = Body::new(db, expander, params, body, module.krate); body.shrink_to_fit(); (Arc::new(body), Arc::new(source_map)) @@ -420,8 +420,9 @@ impl Body { expander: Expander, params: Option<(ast::ParamList, impl Iterator)>, body: Option, + krate: CrateId, ) -> (Body, BodySourceMap) { - lower::lower(db, expander, params, body) + lower::lower(db, expander, params, body, krate) } fn shrink_to_fit(&mut self) { diff --git a/crates/hir-def/src/body/lower.rs b/crates/hir-def/src/body/lower.rs index fedaf39559..348b7589ff 100644 --- a/crates/hir-def/src/body/lower.rs +++ b/crates/hir-def/src/body/lower.rs @@ -3,6 +3,7 @@ use std::{mem, sync::Arc}; +use base_db::CrateId; use either::Either; use hir_expand::{ ast_id_map::AstIdMap, @@ -36,6 +37,7 @@ use crate::{ RecordFieldPat, RecordLitField, Statement, }, item_scope::BuiltinShadowMode, + lang_item::LangItem, path::{GenericArgs, Path}, type_ref::{Mutability, Rawness, TypeRef}, AdtId, BlockId, BlockLoc, ModuleDefId, UnresolvedMacro, @@ -80,9 +82,11 @@ pub(super) fn lower( expander: Expander, params: Option<(ast::ParamList, impl Iterator)>, body: Option, + krate: CrateId, ) -> (Body, BodySourceMap) { ExprCollector { db, + krate, source_map: BodySourceMap::default(), ast_id_map: db.ast_id_map(expander.current_file_id), body: Body { @@ -107,6 +111,7 @@ struct ExprCollector<'a> { expander: Expander, ast_id_map: Arc, body: Body, + krate: CrateId, source_map: BodySourceMap, is_lowering_assignee_expr: bool, is_lowering_generator: bool, @@ -176,8 +181,7 @@ impl ExprCollector<'_> { self.source_map.expr_map.insert(src, id); id } - // desugared exprs don't have ptr, that's wrong and should be fixed - // somehow. + // FIXME: desugared exprs don't have ptr, that's wrong and should be fixed somehow. fn alloc_expr_desugared(&mut self, expr: Expr) -> ExprId { self.body.exprs.alloc(expr) } @@ -199,6 +203,10 @@ impl ExprCollector<'_> { self.source_map.pat_map.insert(src, id); id } + // FIXME: desugared pats don't have ptr, that's wrong and should be fixed somehow. + fn alloc_pat_desugared(&mut self, pat: Pat) -> PatId { + self.body.pats.alloc(pat) + } fn missing_pat(&mut self) -> PatId { self.body.pats.alloc(Pat::Missing) } @@ -437,10 +445,7 @@ impl ExprCollector<'_> { let expr = self.collect_expr_opt(e.expr()); self.alloc_expr(Expr::Await { expr }, syntax_ptr) } - ast::Expr::TryExpr(e) => { - let expr = self.collect_expr_opt(e.expr()); - self.alloc_expr(Expr::Try { expr }, syntax_ptr) - } + ast::Expr::TryExpr(e) => self.collect_try_operator(syntax_ptr, e), ast::Expr::CastExpr(e) => { let expr = self.collect_expr_opt(e.expr()); let type_ref = Interned::new(TypeRef::from_ast_opt(&self.ctx(), e.ty())); @@ -601,6 +606,82 @@ impl ExprCollector<'_> { }) } + fn collect_try_operator(&mut self, syntax_ptr: AstPtr, e: ast::TryExpr) -> ExprId { + let (try_branch, cf_continue, cf_break, try_from_residual) = 'if_chain: { + if let Some(try_branch) = LangItem::TryTraitBranch.path(self.db, self.krate) { + if let Some(cf_continue) = + LangItem::ControlFlowContinue.path(self.db, self.krate) + { + if let Some(cf_break) = + LangItem::ControlFlowBreak.path(self.db, self.krate) + { + if let Some(try_from_residual) = + LangItem::TryTraitFromResidual.path(self.db, self.krate) + { + break 'if_chain ( + try_branch, + cf_continue, + cf_break, + try_from_residual, + ); + } + } + } + } + // Some of the needed lang items are missing, so we can't desugar + return self.alloc_expr(Expr::Missing, syntax_ptr); + }; + let operand = self.collect_expr_opt(e.expr()); + let try_branch = self.alloc_expr(Expr::Path(try_branch), syntax_ptr.clone()); + let expr = self.alloc_expr( + Expr::Call { + callee: try_branch, + args: Box::new([operand]), + is_assignee_expr: false, + }, + syntax_ptr.clone(), + ); + let continue_binding = + self.alloc_binding(name![v1], BindingAnnotation::Unannotated); + let continue_bpat = + self.alloc_pat_desugared(Pat::Bind { id: continue_binding, subpat: None }); + self.add_definition_to_binding(continue_binding, continue_bpat); + let continue_arm = MatchArm { + pat: self.alloc_pat_desugared(Pat::TupleStruct { + path: Some(Box::new(cf_continue)), + args: Box::new([continue_bpat]), + ellipsis: None, + }), + guard: None, + expr: self.alloc_expr(Expr::Path(Path::from(name![v1])), syntax_ptr.clone()), + }; + let break_binding = self.alloc_binding(name![v1], BindingAnnotation::Unannotated); + let break_bpat = + self.alloc_pat_desugared(Pat::Bind { id: break_binding, subpat: None }); + self.add_definition_to_binding(break_binding, break_bpat); + let break_arm = MatchArm { + pat: self.alloc_pat_desugared(Pat::TupleStruct { + path: Some(Box::new(cf_break)), + args: Box::new([break_bpat]), + ellipsis: None, + }), + guard: None, + expr: { + let x = + self.alloc_expr(Expr::Path(Path::from(name![v1])), syntax_ptr.clone()); + let callee = + self.alloc_expr(Expr::Path(try_from_residual), syntax_ptr.clone()); + let result = self.alloc_expr( + Expr::Call { callee, args: Box::new([x]), is_assignee_expr: false }, + syntax_ptr.clone(), + ); + self.alloc_expr(Expr::Return { expr: Some(result) }, syntax_ptr.clone()) + }, + }; + let arms = Box::new([continue_arm, break_arm]); + self.alloc_expr(Expr::Match { expr, arms }, syntax_ptr) + } + fn collect_macro_call( &mut self, mcall: ast::MacroCall, diff --git a/crates/hir-def/src/body/pretty.rs b/crates/hir-def/src/body/pretty.rs index 5a9b825a25..c091ad0d15 100644 --- a/crates/hir-def/src/body/pretty.rs +++ b/crates/hir-def/src/body/pretty.rs @@ -288,10 +288,6 @@ impl<'a> Printer<'a> { self.print_expr(*expr); w!(self, ".await"); } - Expr::Try { expr } => { - self.print_expr(*expr); - w!(self, "?"); - } Expr::Cast { expr, type_ref } => { self.print_expr(*expr); w!(self, " as "); diff --git a/crates/hir-def/src/expr.rs b/crates/hir-def/src/expr.rs index 19fa6b2541..5b87582243 100644 --- a/crates/hir-def/src/expr.rs +++ b/crates/hir-def/src/expr.rs @@ -192,9 +192,6 @@ pub enum Expr { Await { expr: ExprId, }, - Try { - expr: ExprId, - }, Cast { expr: ExprId, type_ref: Interned, @@ -383,7 +380,6 @@ impl Expr { } Expr::Field { expr, .. } | Expr::Await { expr } - | Expr::Try { expr } | Expr::Cast { expr, .. } | Expr::Ref { expr, .. } | Expr::UnaryOp { expr, .. } diff --git a/crates/hir-def/src/lang_item.rs b/crates/hir-def/src/lang_item.rs index 4096e0a382..818054188b 100644 --- a/crates/hir-def/src/lang_item.rs +++ b/crates/hir-def/src/lang_item.rs @@ -8,8 +8,8 @@ use rustc_hash::FxHashMap; use syntax::SmolStr; use crate::{ - db::DefDatabase, AdtId, AssocItemId, AttrDefId, CrateId, EnumId, EnumVariantId, FunctionId, - ImplId, ModuleDefId, StaticId, StructId, TraitId, TypeAliasId, UnionId, + db::DefDatabase, path::Path, AdtId, AssocItemId, AttrDefId, CrateId, EnumId, EnumVariantId, + FunctionId, ImplId, ModuleDefId, StaticId, StructId, TraitId, TypeAliasId, UnionId, }; #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] @@ -220,11 +220,6 @@ macro_rules! language_item_table { } } - /// Opposite of [`LangItem::name`] - pub fn from_name(name: &hir_expand::name::Name) -> Option { - Self::from_str(name.as_str()?) - } - /// Opposite of [`LangItem::name`] pub fn from_str(name: &str) -> Option { match name { @@ -236,6 +231,18 @@ macro_rules! language_item_table { } } +impl LangItem { + /// Opposite of [`LangItem::name`] + pub fn from_name(name: &hir_expand::name::Name) -> Option { + Self::from_str(name.as_str()?) + } + + pub fn path(&self, db: &dyn DefDatabase, start_crate: CrateId) -> Option { + let t = db.lang_item(start_crate, *self)?; + Some(Path::LangItem(t)) + } +} + language_item_table! { // Variant name, Name, Getter method name, Target Generic requirements; Sized, sized, sized_trait, Target::Trait, GenericRequirement::Exact(0); diff --git a/crates/hir-def/src/path.rs b/crates/hir-def/src/path.rs index f3197d1800..c67c29818f 100644 --- a/crates/hir-def/src/path.rs +++ b/crates/hir-def/src/path.rs @@ -8,6 +8,7 @@ use std::{ use crate::{ body::LowerCtx, + lang_item::LangItemTarget, type_ref::{ConstRefOrPath, LifetimeRef}, }; use hir_expand::name::Name; @@ -36,13 +37,19 @@ impl Display for ImportAlias { } #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct Path { - /// Type based path like `::foo`. - /// Note that paths like `::foo` are desugared to `Trait::::foo`. - type_anchor: Option>, - mod_path: Interned, - /// Invariant: the same len as `self.mod_path.segments` or `None` if all segments are `None`. - generic_args: Option>]>>, +pub enum Path { + /// A normal path + Normal { + /// Type based path like `::foo`. + /// Note that paths like `::foo` are desugared to `Trait::::foo`. + type_anchor: Option>, + mod_path: Interned, + /// Invariant: the same len as `self.mod_path.segments` or `None` if all segments are `None`. + generic_args: Option>]>>, + }, + /// A link to a lang item. It is used in desugaring of things like `x?`. We can show these + /// links via a normal path since they might be private and not accessible in the usage place. + LangItem(LangItemTarget), } /// Generic arguments to a path segment (e.g. the `i32` in `Option`). This @@ -102,51 +109,77 @@ impl Path { ) -> Path { let generic_args = generic_args.into(); assert_eq!(path.len(), generic_args.len()); - Path { type_anchor: None, mod_path: Interned::new(path), generic_args: Some(generic_args) } + Path::Normal { + type_anchor: None, + mod_path: Interned::new(path), + generic_args: Some(generic_args), + } + } + + /// Converts a known mod path to `Path`. + pub fn from_known_path_with_no_generic(path: ModPath) -> Path { + Path::Normal { type_anchor: None, mod_path: Interned::new(path), generic_args: None } } pub fn kind(&self) -> &PathKind { - &self.mod_path.kind + match self { + Path::Normal { mod_path, .. } => &mod_path.kind, + Path::LangItem(_) => &PathKind::Abs, + } } pub fn type_anchor(&self) -> Option<&TypeRef> { - self.type_anchor.as_deref() + match self { + Path::Normal { type_anchor, .. } => type_anchor.as_deref(), + Path::LangItem(_) => None, + } } pub fn segments(&self) -> PathSegments<'_> { - let s = PathSegments { - segments: self.mod_path.segments(), - generic_args: self.generic_args.as_deref(), + let Path::Normal { mod_path, generic_args, .. } = self else { + return PathSegments { + segments: &[], + generic_args: None, + }; }; + let s = + PathSegments { segments: mod_path.segments(), generic_args: generic_args.as_deref() }; if let Some(generic_args) = s.generic_args { assert_eq!(s.segments.len(), generic_args.len()); } s } - pub fn mod_path(&self) -> &ModPath { - &self.mod_path + pub fn mod_path(&self) -> Option<&ModPath> { + match self { + Path::Normal { mod_path, .. } => Some(&mod_path), + Path::LangItem(_) => None, + } } pub fn qualifier(&self) -> Option { - if self.mod_path.is_ident() { + let Path::Normal { mod_path, generic_args, type_anchor } = self else { + return None; + }; + if mod_path.is_ident() { return None; } - let res = Path { - type_anchor: self.type_anchor.clone(), + let res = Path::Normal { + type_anchor: type_anchor.clone(), mod_path: Interned::new(ModPath::from_segments( - self.mod_path.kind, - self.mod_path.segments()[..self.mod_path.segments().len() - 1].iter().cloned(), + mod_path.kind, + mod_path.segments()[..mod_path.segments().len() - 1].iter().cloned(), )), - generic_args: self.generic_args.as_ref().map(|it| it[..it.len() - 1].to_vec().into()), + generic_args: generic_args.as_ref().map(|it| it[..it.len() - 1].to_vec().into()), }; Some(res) } pub fn is_self_type(&self) -> bool { - self.type_anchor.is_none() - && self.generic_args.as_deref().is_none() - && self.mod_path.is_Self() + let Path::Normal { mod_path, generic_args, type_anchor } = self else { + return false; + }; + type_anchor.is_none() && generic_args.as_deref().is_none() && mod_path.is_Self() } } @@ -222,7 +255,7 @@ impl GenericArgs { impl From for Path { fn from(name: Name) -> Path { - Path { + Path::Normal { type_anchor: None, mod_path: Interned::new(ModPath::from_segments(PathKind::Plain, iter::once(name))), generic_args: None, diff --git a/crates/hir-def/src/path/lower.rs b/crates/hir-def/src/path/lower.rs index b7542bd777..407f38daad 100644 --- a/crates/hir-def/src/path/lower.rs +++ b/crates/hir-def/src/path/lower.rs @@ -75,8 +75,11 @@ pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option>::Foo desugars to Trait::Foo Some(trait_ref) => { - let Path { mod_path, generic_args: path_generic_args, .. } = - Path::from_src(trait_ref.path()?, ctx)?; + let Path::Normal { mod_path, generic_args: path_generic_args, .. } = + Path::from_src(trait_ref.path()?, ctx)? else + { + return None; + }; let num_segments = mod_path.segments().len(); kind = mod_path.kind; @@ -157,7 +160,7 @@ pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option Option<(TypeNs, Option)> { + let path = match path { + Path::Normal { mod_path, .. } => mod_path, + Path::LangItem(l) => { + return Some(( + match *l { + LangItemTarget::Union(x) => TypeNs::AdtId(x.into()), + LangItemTarget::TypeAlias(x) => TypeNs::TypeAliasId(x), + LangItemTarget::Struct(x) => TypeNs::AdtId(x.into()), + LangItemTarget::EnumVariant(x) => TypeNs::EnumVariantId(x), + LangItemTarget::EnumId(x) => TypeNs::AdtId(x.into()), + LangItemTarget::Trait(x) => TypeNs::TraitId(x), + LangItemTarget::Function(_) + | LangItemTarget::ImplDef(_) + | LangItemTarget::Static(_) => return None, + }, + None, + )) + } + }; let first_name = path.segments().first()?; let skip_to_mod = path.kind != PathKind::Plain; if skip_to_mod { @@ -217,7 +237,7 @@ impl Resolver { pub fn resolve_path_in_type_ns_fully( &self, db: &dyn DefDatabase, - path: &ModPath, + path: &Path, ) -> Option { let (res, unresolved) = self.resolve_path_in_type_ns(db, path)?; if unresolved.is_some() { @@ -245,8 +265,24 @@ impl Resolver { pub fn resolve_path_in_value_ns( &self, db: &dyn DefDatabase, - path: &ModPath, + path: &Path, ) -> Option { + let path = match path { + Path::Normal { mod_path, .. } => mod_path, + Path::LangItem(l) => { + return Some(ResolveValueResult::ValueNs(match *l { + LangItemTarget::Function(x) => ValueNs::FunctionId(x), + LangItemTarget::Static(x) => ValueNs::StaticId(x), + LangItemTarget::Struct(x) => ValueNs::StructId(x), + LangItemTarget::EnumVariant(x) => ValueNs::EnumVariantId(x), + LangItemTarget::Union(_) + | LangItemTarget::ImplDef(_) + | LangItemTarget::TypeAlias(_) + | LangItemTarget::Trait(_) + | LangItemTarget::EnumId(_) => return None, + })) + } + }; let n_segments = path.segments().len(); let tmp = name![self]; let first_name = if path.is_self() { &tmp } else { path.segments().first()? }; @@ -340,7 +376,7 @@ impl Resolver { pub fn resolve_path_in_value_ns_fully( &self, db: &dyn DefDatabase, - path: &ModPath, + path: &Path, ) -> Option { match self.resolve_path_in_value_ns(db, path)? { ResolveValueResult::ValueNs(it) => Some(it), @@ -441,7 +477,7 @@ impl Resolver { &Scope::ImplDefScope(impl_) => { if let Some(target_trait) = &db.impl_data(impl_).target_trait { if let Some(TypeNs::TraitId(trait_)) = - self.resolve_path_in_type_ns_fully(db, target_trait.path.mod_path()) + self.resolve_path_in_type_ns_fully(db, &target_trait.path) { traits.insert(trait_); } diff --git a/crates/hir-ty/src/consteval.rs b/crates/hir-ty/src/consteval.rs index 5830c48988..fcb3445a54 100644 --- a/crates/hir-ty/src/consteval.rs +++ b/crates/hir-ty/src/consteval.rs @@ -4,7 +4,7 @@ use base_db::CrateId; use chalk_ir::{BoundVar, DebruijnIndex, GenericArgData}; use hir_def::{ expr::Expr, - path::ModPath, + path::Path, resolver::{Resolver, ValueNs}, type_ref::ConstRef, ConstId, EnumVariantId, @@ -72,7 +72,7 @@ impl From for ConstEvalError { pub(crate) fn path_to_const( db: &dyn HirDatabase, resolver: &Resolver, - path: &ModPath, + path: &Path, mode: ParamLoweringMode, args_lazy: impl FnOnce() -> Generics, debruijn: DebruijnIndex, @@ -89,7 +89,7 @@ pub(crate) fn path_to_const( Some(x) => ConstValue::BoundVar(BoundVar::new(debruijn, x)), None => { never!( - "Generic list doesn't contain this param: {:?}, {}, {:?}", + "Generic list doesn't contain this param: {:?}, {:?}, {:?}", args, path, p @@ -228,7 +228,7 @@ pub(crate) fn eval_to_const( let db = ctx.db; if let Expr::Path(p) = &ctx.body.exprs[expr] { let resolver = &ctx.resolver; - if let Some(c) = path_to_const(db, resolver, p.mod_path(), mode, args, debruijn) { + if let Some(c) = path_to_const(db, resolver, p, mode, args, debruijn) { return c; } } diff --git a/crates/hir-ty/src/consteval/tests.rs b/crates/hir-ty/src/consteval/tests.rs index 6a29e8ce52..3bec2ee88b 100644 --- a/crates/hir-ty/src/consteval/tests.rs +++ b/crates/hir-ty/src/consteval/tests.rs @@ -801,6 +801,73 @@ fn options() { ); } +#[test] +fn from_trait() { + check_number( + r#" + //- minicore: from + struct E1(i32); + struct E2(i32); + + impl From for E2 { + fn from(E1(x): E1) -> Self { + E2(1000 * x) + } + } + const GOAL: i32 = { + let x: E2 = E1(2).into(); + x.0 + }; + "#, + 2000, + ); +} + +#[test] +fn try_operator() { + check_number( + r#" + //- minicore: option, try + const fn f(x: Option, y: Option) -> Option { + Some(x? * y?) + } + const fn g(x: Option, y: Option) -> i32 { + match f(x, y) { + Some(k) => k, + None => 5, + } + } + const GOAL: i32 = g(Some(10), Some(20)) + g(Some(30), None) + g(None, Some(40)) + g(None, None); + "#, + 215, + ); + check_number( + r#" + //- minicore: result, try, from + struct E1(i32); + struct E2(i32); + + impl From for E2 { + fn from(E1(x): E1) -> Self { + E2(1000 * x) + } + } + + const fn f(x: Result) -> Result { + Ok(x? * 10) + } + const fn g(x: Result) -> i32 { + match f(x) { + Ok(k) => 7 * k, + Err(E2(k)) => 5 * k, + } + } + const GOAL: i32 = g(Ok(2)) + g(Err(E1(3))); + "#, + 15140, + ); +} + #[test] fn or_pattern() { check_number( diff --git a/crates/hir-ty/src/db.rs b/crates/hir-ty/src/db.rs index 304c78767f..8f1af4c2f8 100644 --- a/crates/hir-ty/src/db.rs +++ b/crates/hir-ty/src/db.rs @@ -97,6 +97,10 @@ pub trait HirDatabase: DefDatabase + Upcast { #[salsa::invoke(crate::lower::generic_predicates_query)] fn generic_predicates(&self, def: GenericDefId) -> Arc<[Binders]>; + #[salsa::invoke(crate::lower::trait_environment_for_body_query)] + #[salsa::transparent] + fn trait_environment_for_body(&self, def: DefWithBodyId) -> Arc; + #[salsa::invoke(crate::lower::trait_environment_query)] fn trait_environment(&self, def: GenericDefId) -> Arc; diff --git a/crates/hir-ty/src/diagnostics/unsafe_check.rs b/crates/hir-ty/src/diagnostics/unsafe_check.rs index d25c0ccf00..664822ee6f 100644 --- a/crates/hir-ty/src/diagnostics/unsafe_check.rs +++ b/crates/hir-ty/src/diagnostics/unsafe_check.rs @@ -73,7 +73,7 @@ fn walk_unsafe( } Expr::Path(path) => { let resolver = resolver_for_expr(db.upcast(), def, current); - let value_or_partial = resolver.resolve_path_in_value_ns(db.upcast(), path.mod_path()); + let value_or_partial = resolver.resolve_path_in_value_ns(db.upcast(), path); if let Some(ResolveValueResult::ValueNs(ValueNs::StaticId(id))) = value_or_partial { if db.static_data(id).mutable { unsafe_expr_cb(UnsafeExpr { expr: current, inside_unsafe_block }); diff --git a/crates/hir-ty/src/infer.rs b/crates/hir-ty/src/infer.rs index 7de5b4295f..00b5f7948a 100644 --- a/crates/hir-ty/src/infer.rs +++ b/crates/hir-ty/src/infer.rs @@ -25,16 +25,16 @@ use hir_def::{ expr::{BindingAnnotation, BindingId, ExprId, ExprOrPatId, PatId}, lang_item::{LangItem, LangItemTarget}, layout::Integer, - path::Path, + path::{ModPath, Path}, resolver::{HasResolver, ResolveValueResult, Resolver, TypeNs, ValueNs}, type_ref::TypeRef, - AdtId, AssocItemId, DefWithBodyId, EnumVariantId, FieldId, FunctionId, HasModule, - ItemContainerId, Lookup, TraitId, TypeAliasId, VariantId, + AdtId, AssocItemId, DefWithBodyId, EnumVariantId, FieldId, FunctionId, ItemContainerId, Lookup, + TraitId, TypeAliasId, VariantId, }; use hir_expand::name::{name, Name}; use la_arena::ArenaMap; use rustc_hash::{FxHashMap, FxHashSet}; -use stdx::always; +use stdx::{always, never}; use crate::{ db::HirDatabase, fold_tys, fold_tys_and_consts, infer::coerce::CoerceMany, @@ -110,10 +110,7 @@ pub(crate) fn normalize(db: &dyn HirDatabase, owner: DefWithBodyId, ty: Ty) -> T if !ty.data(Interner).flags.intersects(TypeFlags::HAS_PROJECTION) { return ty; } - let krate = owner.module(db.upcast()).krate(); - let trait_env = owner - .as_generic_def_id() - .map_or_else(|| Arc::new(TraitEnvironment::empty(krate)), |d| db.trait_environment(d)); + let trait_env = db.trait_environment_for_body(owner); let mut table = unify::InferenceTable::new(db, trait_env); let ty_with_vars = table.normalize_associated_types_in(ty); @@ -506,10 +503,7 @@ impl<'a> InferenceContext<'a> { body: &'a Body, resolver: Resolver, ) -> Self { - let krate = owner.module(db.upcast()).krate(); - let trait_env = owner - .as_generic_def_id() - .map_or_else(|| Arc::new(TraitEnvironment::empty(krate)), |d| db.trait_environment(d)); + let trait_env = db.trait_environment_for_body(owner); InferenceContext { result: InferenceResult::default(), table: unify::InferenceTable::new(db, trait_env.clone()), @@ -851,7 +845,7 @@ impl<'a> InferenceContext<'a> { // FIXME: this should resolve assoc items as well, see this example: // https://play.rust-lang.org/?gist=087992e9e22495446c01c0d4e2d69521 let (resolution, unresolved) = if value_ns { - match self.resolver.resolve_path_in_value_ns(self.db.upcast(), path.mod_path()) { + match self.resolver.resolve_path_in_value_ns(self.db.upcast(), path) { Some(ResolveValueResult::ValueNs(value)) => match value { ValueNs::EnumVariantId(var) => { let substs = ctx.substs_from_path(path, var.into(), true); @@ -872,11 +866,15 @@ impl<'a> InferenceContext<'a> { None => return (self.err_ty(), None), } } else { - match self.resolver.resolve_path_in_type_ns(self.db.upcast(), path.mod_path()) { + match self.resolver.resolve_path_in_type_ns(self.db.upcast(), path) { Some(it) => it, None => return (self.err_ty(), None), } }; + let Some(mod_path) = path.mod_path() else { + never!("resolver should always resolve lang item paths"); + return (self.err_ty(), None); + }; return match resolution { TypeNs::AdtId(AdtId::StructId(strukt)) => { let substs = ctx.substs_from_path(path, strukt.into(), true); @@ -900,7 +898,7 @@ impl<'a> InferenceContext<'a> { let generics = crate::utils::generics(self.db.upcast(), impl_id.into()); let substs = generics.placeholder_subst(self.db); let ty = self.db.impl_self_ty(impl_id).substitute(Interner, &substs); - self.resolve_variant_on_alias(ty, unresolved, path) + self.resolve_variant_on_alias(ty, unresolved, mod_path) } TypeNs::TypeAliasId(it) => { let container = it.lookup(self.db.upcast()).container; @@ -917,7 +915,7 @@ impl<'a> InferenceContext<'a> { let ty = TyBuilder::def_ty(self.db, it.into(), parent_subst) .fill_with_inference_vars(&mut self.table) .build(); - self.resolve_variant_on_alias(ty, unresolved, path) + self.resolve_variant_on_alias(ty, unresolved, mod_path) } TypeNs::AdtSelfType(_) => { // FIXME this could happen in array size expressions, once we're checking them @@ -953,9 +951,9 @@ impl<'a> InferenceContext<'a> { &mut self, ty: Ty, unresolved: Option, - path: &Path, + path: &ModPath, ) -> (Ty, Option) { - let remaining = unresolved.map(|x| path.segments().skip(x).len()).filter(|x| x > &0); + let remaining = unresolved.map(|x| path.segments()[x..].len()).filter(|x| x > &0); match remaining { None => { let variant = ty.as_adt().and_then(|(adt_id, _)| match adt_id { @@ -969,7 +967,7 @@ impl<'a> InferenceContext<'a> { (ty, variant) } Some(1) => { - let segment = path.mod_path().segments().last().unwrap(); + let segment = path.segments().last().unwrap(); // this could be an enum variant or associated type if let Some((AdtId::EnumId(enum_id), _)) = ty.as_adt() { let enum_data = self.db.enum_data(enum_id); diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs index ee186673ee..7bf227a27f 100644 --- a/crates/hir-ty/src/infer/expr.rs +++ b/crates/hir-ty/src/infer/expr.rs @@ -601,21 +601,21 @@ impl<'a> InferenceContext<'a> { let inner_ty = self.infer_expr_inner(*expr, &Expectation::none()); self.resolve_associated_type(inner_ty, self.resolve_future_future_output()) } - Expr::Try { expr } => { - let inner_ty = self.infer_expr_inner(*expr, &Expectation::none()); - if let Some(trait_) = self.resolve_lang_trait(LangItem::Try) { - if let Some(func) = self.db.trait_data(trait_).method_by_name(&name!(branch)) { - let subst = TyBuilder::subst_for_def(self.db, trait_, None) - .push(inner_ty.clone()) - .build(); - self.write_method_resolution(tgt_expr, func, subst.clone()); - } - let try_output = self.resolve_output_on(trait_); - self.resolve_associated_type(inner_ty, try_output) - } else { - self.err_ty() - } - } + // Expr::Try { expr } => { + // let inner_ty = self.infer_expr_inner(*expr, &Expectation::none()); + // if let Some(trait_) = self.resolve_lang_trait(LangItem::Try) { + // if let Some(func) = self.db.trait_data(trait_).method_by_name(&name!(branch)) { + // let subst = TyBuilder::subst_for_def(self.db, trait_, None) + // .push(inner_ty.clone()) + // .build(); + // self.write_method_resolution(tgt_expr, func, subst.clone()); + // } + // let try_output = self.resolve_output_on(trait_); + // self.resolve_associated_type(inner_ty, try_output) + // } else { + // self.err_ty() + // } + // } Expr::Cast { expr, type_ref } => { let cast_ty = self.make_ty(type_ref); // FIXME: propagate the "castable to" expectation diff --git a/crates/hir-ty/src/infer/path.rs b/crates/hir-ty/src/infer/path.rs index 2267fedaa8..266e410187 100644 --- a/crates/hir-ty/src/infer/path.rs +++ b/crates/hir-ty/src/infer/path.rs @@ -39,7 +39,7 @@ impl<'a> InferenceContext<'a> { } else { // FIXME: report error, unresolved first path segment let value_or_partial = - self.resolver.resolve_path_in_value_ns(self.db.upcast(), path.mod_path())?; + self.resolver.resolve_path_in_value_ns(self.db.upcast(), path)?; match value_or_partial { ResolveValueResult::ValueNs(it) => (it, None), diff --git a/crates/hir-ty/src/lower.rs b/crates/hir-ty/src/lower.rs index 23b15087e3..e7490087e7 100644 --- a/crates/hir-ty/src/lower.rs +++ b/crates/hir-ty/src/lower.rs @@ -25,12 +25,12 @@ use hir_def::{ TypeOrConstParamData, TypeParamProvenance, WherePredicate, WherePredicateTypeTarget, }, lang_item::{lang_attr, LangItem}, - path::{GenericArg, ModPath, Path, PathKind, PathSegment, PathSegments}, + path::{GenericArg, GenericArgs, ModPath, Path, PathKind, PathSegment, PathSegments}, resolver::{HasResolver, Resolver, TypeNs}, type_ref::{ConstRefOrPath, TraitBoundModifier, TraitRef as HirTraitRef, TypeBound, TypeRef}, - AdtId, AssocItemId, ConstId, ConstParamId, EnumId, EnumVariantId, FunctionId, GenericDefId, - HasModule, ImplId, ItemContainerId, LocalFieldId, Lookup, ModuleDefId, StaticId, StructId, - TraitId, TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId, VariantId, + AdtId, AssocItemId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId, FunctionId, + GenericDefId, HasModule, ImplId, ItemContainerId, LocalFieldId, Lookup, ModuleDefId, StaticId, + StructId, TraitId, TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId, VariantId, }; use hir_expand::{name::Name, ExpandResult}; use intern::Interned; @@ -425,11 +425,10 @@ impl<'a> TyLoweringContext<'a> { if path.segments().len() > 1 { return None; } - let resolution = - match self.resolver.resolve_path_in_type_ns(self.db.upcast(), path.mod_path()) { - Some((it, None)) => it, - _ => return None, - }; + let resolution = match self.resolver.resolve_path_in_type_ns(self.db.upcast(), path) { + Some((it, None)) => it, + _ => return None, + }; match resolution { TypeNs::GenericParam(param_id) => Some(param_id.into()), _ => None, @@ -608,7 +607,7 @@ impl<'a> TyLoweringContext<'a> { } let (resolution, remaining_index) = - match self.resolver.resolve_path_in_type_ns(self.db.upcast(), path.mod_path()) { + match self.resolver.resolve_path_in_type_ns(self.db.upcast(), path) { Some(it) => it, None => return (TyKind::Error.intern(Interner), None), }; @@ -716,7 +715,7 @@ impl<'a> TyLoweringContext<'a> { resolved: ValueTyDefId, infer_args: bool, ) -> Substitution { - let last = path.segments().last().expect("path should have at least one segment"); + let last = path.segments().last(); let (segment, generic_def) = match resolved { ValueTyDefId::FunctionId(it) => (last, Some(it.into())), ValueTyDefId::StructId(it) => (last, Some(it.into())), @@ -732,13 +731,20 @@ impl<'a> TyLoweringContext<'a> { let len = path.segments().len(); let penultimate = len.checked_sub(2).and_then(|idx| path.segments().get(idx)); let segment = match penultimate { - Some(segment) if segment.args_and_bindings.is_some() => segment, + Some(segment) if segment.args_and_bindings.is_some() => Some(segment), _ => last, }; (segment, Some(var.parent.into())) } }; - self.substs_from_path_segment(segment, generic_def, infer_args, None) + if let Some(segment) = segment { + self.substs_from_path_segment(segment, generic_def, infer_args, None) + } else if let Some(generic_def) = generic_def { + // lang item + self.substs_from_args_and_bindings(None, Some(generic_def), infer_args, None) + } else { + Substitution::empty(Interner) + } } fn substs_from_path_segment( @@ -747,6 +753,21 @@ impl<'a> TyLoweringContext<'a> { def: Option, infer_args: bool, explicit_self_ty: Option, + ) -> Substitution { + self.substs_from_args_and_bindings( + segment.args_and_bindings, + def, + infer_args, + explicit_self_ty, + ) + } + + fn substs_from_args_and_bindings( + &self, + args_and_bindings: Option<&GenericArgs>, + def: Option, + infer_args: bool, + explicit_self_ty: Option, ) -> Substitution { // Remember that the item's own generic args come before its parent's. let mut substs = Vec::new(); @@ -780,7 +801,7 @@ impl<'a> TyLoweringContext<'a> { }; let mut had_explicit_args = false; - if let Some(generic_args) = &segment.args_and_bindings { + if let Some(generic_args) = &args_and_bindings { if !generic_args.has_self_type { fill_self_params(); } @@ -879,12 +900,11 @@ impl<'a> TyLoweringContext<'a> { path: &Path, explicit_self_ty: Option, ) -> Option { - let resolved = - match self.resolver.resolve_path_in_type_ns_fully(self.db.upcast(), path.mod_path())? { - // FIXME(trait_alias): We need to handle trait alias here. - TypeNs::TraitId(tr) => tr, - _ => return None, - }; + let resolved = match self.resolver.resolve_path_in_type_ns_fully(self.db.upcast(), path)? { + // FIXME(trait_alias): We need to handle trait alias here. + TypeNs::TraitId(tr) => tr, + _ => return None, + }; let segment = path.segments().last().expect("path should have at least one segment"); Some(self.lower_trait_ref_from_resolved_path(resolved, segment, explicit_self_ty)) } @@ -1381,9 +1401,7 @@ pub(crate) fn generic_predicates_for_param_query( Some(it) => it, None => return true, }; - let tr = match resolver - .resolve_path_in_type_ns_fully(db.upcast(), path.mod_path()) - { + let tr = match resolver.resolve_path_in_type_ns_fully(db.upcast(), path) { Some(TypeNs::TraitId(tr)) => tr, _ => return false, }; @@ -1423,6 +1441,17 @@ pub(crate) fn generic_predicates_for_param_recover( Arc::new([]) } +pub(crate) fn trait_environment_for_body_query( + db: &dyn HirDatabase, + def: DefWithBodyId, +) -> Arc { + let Some(def) = def.as_generic_def_id() else { + let krate = def.module(db.upcast()).krate(); + return Arc::new(TraitEnvironment::empty(krate)); + }; + db.trait_environment(def) +} + pub(crate) fn trait_environment_query( db: &dyn HirDatabase, def: GenericDefId, @@ -1948,7 +1977,7 @@ pub(crate) fn generic_arg_to_chalk<'a, T>( // as types. Maybe here is not the best place to do it, but // it works. if let TypeRef::Path(p) = t { - let p = p.mod_path(); + let p = p.mod_path()?; if p.kind == PathKind::Plain { if let [n] = p.segments() { let c = ConstRefOrPath::Path(n.clone()); @@ -1977,8 +2006,15 @@ pub(crate) fn const_or_path_to_chalk( ConstRefOrPath::Scalar(s) => intern_const_ref(db, s, expected_ty, resolver.krate()), ConstRefOrPath::Path(n) => { let path = ModPath::from_segments(PathKind::Plain, Some(n.clone())); - path_to_const(db, resolver, &path, mode, args, debruijn) - .unwrap_or_else(|| unknown_const(expected_ty)) + path_to_const( + db, + resolver, + &Path::from_known_path_with_no_generic(path), + mode, + args, + debruijn, + ) + .unwrap_or_else(|| unknown_const(expected_ty)) } } } diff --git a/crates/hir-ty/src/mir/eval.rs b/crates/hir-ty/src/mir/eval.rs index c5d843d9eb..1ff6e10602 100644 --- a/crates/hir-ty/src/mir/eval.rs +++ b/crates/hir-ty/src/mir/eval.rs @@ -1,6 +1,6 @@ //! This module provides a MIR interpreter, which is used in const eval. -use std::{borrow::Cow, collections::HashMap, iter}; +use std::{borrow::Cow, collections::HashMap, iter, sync::Arc}; use base_db::CrateId; use chalk_ir::{ @@ -24,7 +24,8 @@ use crate::{ layout::layout_of_ty, mapping::from_chalk, method_resolution::lookup_impl_method, - CallableDefId, Const, ConstScalar, Interner, MemoryMap, Substitution, Ty, TyBuilder, TyExt, + CallableDefId, Const, ConstScalar, Interner, MemoryMap, Substitution, TraitEnvironment, Ty, + TyBuilder, TyExt, }; use super::{ @@ -34,6 +35,7 @@ use super::{ pub struct Evaluator<'a> { db: &'a dyn HirDatabase, + trait_env: Arc, stack: Vec, heap: Vec, crate_id: CrateId, @@ -217,8 +219,7 @@ pub fn interpret_mir( assert_placeholder_ty_is_unused: bool, ) -> Result { let ty = body.locals[return_slot()].ty.clone(); - let mut evaluator = - Evaluator::new(db, body.owner.module(db.upcast()).krate(), assert_placeholder_ty_is_unused); + let mut evaluator = Evaluator::new(db, body, assert_placeholder_ty_is_unused); let bytes = evaluator.interpret_mir_with_no_arg(&body)?; let memory_map = evaluator.create_memory_map( &bytes, @@ -231,13 +232,16 @@ pub fn interpret_mir( impl Evaluator<'_> { pub fn new<'a>( db: &'a dyn HirDatabase, - crate_id: CrateId, + body: &MirBody, assert_placeholder_ty_is_unused: bool, ) -> Evaluator<'a> { + let crate_id = body.owner.module(db.upcast()).krate(); + let trait_env = db.trait_environment_for_body(body.owner); Evaluator { stack: vec![0], heap: vec![0], db, + trait_env, crate_id, assert_placeholder_ty_is_unused, stack_depth_limit: 100, @@ -500,15 +504,9 @@ impl Evaluator<'_> { } else if let Some(x) = self.detect_lang_function(def) { self.exec_lang_item(x, arg_bytes)? } else { - let trait_env = { - let Some(d) = body.owner.as_generic_def_id() else { - not_supported!("trait resolving in non generic def id"); - }; - self.db.trait_environment(d) - }; let (imp, generic_args) = lookup_impl_method( self.db, - trait_env, + self.trait_env.clone(), def, generic_args.clone(), ); @@ -584,7 +582,7 @@ impl Evaluator<'_> { .to_owned()); } Terminator::Unreachable => { - return Err(MirEvalError::UndefinedBehavior("unreachable executed")) + return Err(MirEvalError::UndefinedBehavior("unreachable executed")); } _ => not_supported!("unknown terminator"), } @@ -710,8 +708,24 @@ impl Evaluator<'_> { let ty = self.place_ty(p, locals)?; let bytes = self.eval_place(p, locals)?.get(&self)?; let layout = self.layout(&ty)?; + let enum_id = 'b: { + match ty.kind(Interner) { + TyKind::Adt(e, _) => match e.0 { + AdtId::EnumId(e) => break 'b e, + _ => (), + }, + _ => (), + } + return Ok(Owned(0u128.to_le_bytes().to_vec())); + }; match layout.variants { - Variants::Single { .. } => Owned(0u128.to_le_bytes().to_vec()), + Variants::Single { index } => { + let r = self.db.const_eval_discriminant(EnumVariantId { + parent: enum_id, + local_id: index.0, + })?; + Owned(r.to_le_bytes().to_vec()) + } Variants::Multiple { tag, tag_encoding, .. } => { let Some(target_data_layout) = self.db.target_data_layout(self.crate_id) else { not_supported!("missing target data layout"); @@ -727,13 +741,6 @@ impl Evaluator<'_> { let tag = &bytes[offset..offset + size]; let candidate_discriminant = i128::from_le_bytes(pad16(tag, false)) .wrapping_sub(niche_start as i128); - let enum_id = match ty.kind(Interner) { - TyKind::Adt(e, _) => match e.0 { - AdtId::EnumId(e) => e, - _ => not_supported!("Non enum with multi variant layout"), - }, - _ => not_supported!("Non adt with multi variant layout"), - }; let enum_data = self.db.enum_data(enum_id); let result = 'b: { for (local_id, _) in enum_data.variants.iter() { @@ -790,8 +797,8 @@ impl Evaluator<'_> { Owned(result) } AggregateKind::Adt(x, subst) => { - let (size, variant_layout, tag) = - self.layout_of_variant(*x, subst.clone(), locals)?; + let subst = self.subst_filler(subst, locals); + let (size, variant_layout, tag) = self.layout_of_variant(*x, subst, locals)?; Owned(self.make_by_layout(size, &variant_layout, tag, values, locals)?) } }, @@ -1124,12 +1131,13 @@ impl Evaluator<'_> { } fn detect_lang_function(&self, def: FunctionId) -> Option { + use LangItem::*; let candidate = lang_attr(self.db.upcast(), def)?; - // filter normal lang functions out - if [LangItem::IntoIterIntoIter, LangItem::IteratorNext].contains(&candidate) { - return None; + // We want to execute these functions with special logic + if [PanicFmt, BeginPanic, SliceLen].contains(&candidate) { + return Some(candidate); } - Some(candidate) + None } fn create_memory_map(&self, bytes: &[u8], ty: &Ty, locals: &Locals<'_>) -> Result { diff --git a/crates/hir-ty/src/mir/lower.rs b/crates/hir-ty/src/mir/lower.rs index c4dd7c0ace..3b9a31c772 100644 --- a/crates/hir-ty/src/mir/lower.rs +++ b/crates/hir-ty/src/mir/lower.rs @@ -13,7 +13,7 @@ use hir_def::{ layout::LayoutError, path::Path, resolver::{resolver_for_expr, ResolveValueResult, ValueNs}, - DefWithBodyId, EnumVariantId, HasModule, + DefWithBodyId, EnumVariantId, HasModule, ItemContainerId, TraitId, }; use hir_expand::name::Name; use la_arena::ArenaMap; @@ -50,6 +50,8 @@ pub enum MirLowerError { ConstEvalError(Box), LayoutError(LayoutError), IncompleteExpr, + /// Trying to lower a trait function, instead of an implementation + TraitFunctionDefinition(TraitId, Name), UnresolvedName(String), RecordLiteralWithoutPath, UnresolvedMethod, @@ -200,12 +202,21 @@ impl MirLowerCtx<'_> { mut current: BasicBlockId, ) -> Result> { match &self.body.exprs[expr_id] { - Expr::Missing => Err(MirLowerError::IncompleteExpr), + Expr::Missing => { + if let DefWithBodyId::FunctionId(f) = self.owner { + let assoc = self.db.lookup_intern_function(f); + if let ItemContainerId::TraitId(t) = assoc.container { + let name = &self.db.function_data(f).name; + return Err(MirLowerError::TraitFunctionDefinition(t, name.clone())); + } + } + Err(MirLowerError::IncompleteExpr) + }, Expr::Path(p) => { let unresolved_name = || MirLowerError::unresolved_path(self.db, p); let resolver = resolver_for_expr(self.db.upcast(), self.owner, expr_id); let pr = resolver - .resolve_path_in_value_ns(self.db.upcast(), p.mod_path()) + .resolve_path_in_value_ns(self.db.upcast(), p) .ok_or_else(unresolved_name)?; let pr = match pr { ResolveValueResult::ValueNs(v) => v, @@ -608,7 +619,6 @@ impl MirLowerCtx<'_> { } } Expr::Await { .. } => not_supported!("await"), - Expr::Try { .. } => not_supported!("? operator"), Expr::Yeet { .. } => not_supported!("yeet"), Expr::TryBlock { .. } => not_supported!("try block"), Expr::Async { .. } => not_supported!("async block"), diff --git a/crates/hir-ty/src/mir/lower/as_place.rs b/crates/hir-ty/src/mir/lower/as_place.rs index fe8147dcd3..b683dd7f90 100644 --- a/crates/hir-ty/src/mir/lower/as_place.rs +++ b/crates/hir-ty/src/mir/lower/as_place.rs @@ -125,7 +125,7 @@ impl MirLowerCtx<'_> { match &self.body.exprs[expr_id] { Expr::Path(p) => { let resolver = resolver_for_expr(self.db.upcast(), self.owner, expr_id); - let Some(pr) = resolver.resolve_path_in_value_ns(self.db.upcast(), p.mod_path()) else { + let Some(pr) = resolver.resolve_path_in_value_ns(self.db.upcast(), p) else { return Err(MirLowerError::unresolved_path(self.db, p)); }; let pr = match pr { diff --git a/crates/hir-ty/src/mir/pretty.rs b/crates/hir-ty/src/mir/pretty.rs index ffc08b7e34..70364d0882 100644 --- a/crates/hir-ty/src/mir/pretty.rs +++ b/crates/hir-ty/src/mir/pretty.rs @@ -77,6 +77,7 @@ impl Display for LocalName { impl<'a> MirPrettyCtx<'a> { fn for_body(&mut self) { + wln!(self, "// {:?}", self.body.owner); self.with_block(|this| { this.locals(); wln!(this); diff --git a/crates/hir-ty/src/utils.rs b/crates/hir-ty/src/utils.rs index 34d957e26e..a6967414aa 100644 --- a/crates/hir-ty/src/utils.rs +++ b/crates/hir-ty/src/utils.rs @@ -130,7 +130,7 @@ fn direct_super_traits(db: &dyn DefDatabase, trait_: TraitId, cb: impl FnMut(Tra WherePredicate::Lifetime { .. } => None, }) .filter(|(_, bound_modifier)| matches!(bound_modifier, TraitBoundModifier::None)) - .filter_map(|(path, _)| match resolver.resolve_path_in_type_ns_fully(db, path.mod_path()) { + .filter_map(|(path, _)| match resolver.resolve_path_in_type_ns_fully(db, path) { Some(TypeNs::TraitId(t)) => Some(t), _ => None, }) diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs index 407ba6f658..9709970db1 100644 --- a/crates/hir/src/semantics.rs +++ b/crates/hir/src/semantics.rs @@ -1076,10 +1076,7 @@ impl<'db> SemanticsImpl<'db> { let hygiene = hir_expand::hygiene::Hygiene::new(self.db.upcast(), analyze.file_id); let ctx = body::LowerCtx::with_hygiene(self.db.upcast(), &hygiene); let hir_path = Path::from_src(path.clone(), &ctx)?; - match analyze - .resolver - .resolve_path_in_type_ns_fully(self.db.upcast(), hir_path.mod_path())? - { + match analyze.resolver.resolve_path_in_type_ns_fully(self.db.upcast(), &hir_path)? { TypeNs::TraitId(id) => Some(Trait { id }), _ => None, } diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs index c24d196e1b..5b18e44572 100644 --- a/crates/hir/src/source_analyzer.rs +++ b/crates/hir/src/source_analyzer.rs @@ -420,7 +420,10 @@ impl SourceAnalyzer { None } else { // Shorthand syntax, resolve to the local - let path = ModPath::from_segments(PathKind::Plain, once(local_name.clone())); + let path = Path::from_known_path_with_no_generic(ModPath::from_segments( + PathKind::Plain, + once(local_name.clone()), + )); match self.resolver.resolve_path_in_value_ns_fully(db.upcast(), &path) { Some(ValueNs::LocalBinding(binding_id)) => { Some(Local { binding_id, parent: self.resolver.body_owner()? }) @@ -461,7 +464,7 @@ impl SourceAnalyzer { ) -> Option { let ctx = body::LowerCtx::new(db.upcast(), macro_call.file_id); let path = macro_call.value.path().and_then(|ast| Path::from_src(ast, &ctx))?; - self.resolver.resolve_path_as_macro(db.upcast(), path.mod_path()).map(|it| it.into()) + self.resolver.resolve_path_as_macro(db.upcast(), path.mod_path()?).map(|it| it.into()) } pub(crate) fn resolve_bind_pat_to_const( @@ -801,15 +804,11 @@ impl SourceAnalyzer { func: FunctionId, substs: Substitution, ) -> FunctionId { - let krate = self.resolver.krate(); let owner = match self.resolver.body_owner() { Some(it) => it, None => return func, }; - let env = owner.as_generic_def_id().map_or_else( - || Arc::new(hir_ty::TraitEnvironment::empty(krate)), - |d| db.trait_environment(d), - ); + let env = db.trait_environment_for_body(owner); method_resolution::lookup_impl_method(db, env, func, substs).0 } @@ -819,15 +818,11 @@ impl SourceAnalyzer { const_id: ConstId, subs: Substitution, ) -> ConstId { - let krate = self.resolver.krate(); let owner = match self.resolver.body_owner() { Some(it) => it, None => return const_id, }; - let env = owner.as_generic_def_id().map_or_else( - || Arc::new(hir_ty::TraitEnvironment::empty(krate)), - |d| db.trait_environment(d), - ); + let env = db.trait_environment_for_body(owner); method_resolution::lookup_impl_const(db, env, const_id, subs).0 } @@ -946,7 +941,7 @@ pub(crate) fn resolve_hir_path_as_macro( resolver: &Resolver, path: &Path, ) -> Option { - resolver.resolve_path_as_macro(db.upcast(), path.mod_path()).map(Into::into) + resolver.resolve_path_as_macro(db.upcast(), path.mod_path()?).map(Into::into) } fn resolve_hir_path_( @@ -962,8 +957,7 @@ fn resolve_hir_path_( res.map(|ty_ns| (ty_ns, path.segments().first())) } None => { - let (ty, remaining_idx) = - resolver.resolve_path_in_type_ns(db.upcast(), path.mod_path())?; + let (ty, remaining_idx) = resolver.resolve_path_in_type_ns(db.upcast(), path)?; match remaining_idx { Some(remaining_idx) => { if remaining_idx + 1 == path.segments().len() { @@ -1019,7 +1013,7 @@ fn resolve_hir_path_( let body_owner = resolver.body_owner(); let values = || { - resolver.resolve_path_in_value_ns_fully(db.upcast(), path.mod_path()).and_then(|val| { + resolver.resolve_path_in_value_ns_fully(db.upcast(), path).and_then(|val| { let res = match val { ValueNs::LocalBinding(binding_id) => { let var = Local { parent: body_owner?, binding_id }; @@ -1039,14 +1033,14 @@ fn resolve_hir_path_( let items = || { resolver - .resolve_module_path_in_items(db.upcast(), path.mod_path()) + .resolve_module_path_in_items(db.upcast(), path.mod_path()?) .take_types() .map(|it| PathResolution::Def(it.into())) }; let macros = || { resolver - .resolve_path_as_macro(db.upcast(), path.mod_path()) + .resolve_path_as_macro(db.upcast(), path.mod_path()?) .map(|def| PathResolution::Def(ModuleDef::Macro(def.into()))) }; @@ -1074,7 +1068,7 @@ fn resolve_hir_path_qualifier( path: &Path, ) -> Option { resolver - .resolve_path_in_type_ns_fully(db.upcast(), path.mod_path()) + .resolve_path_in_type_ns_fully(db.upcast(), &path) .map(|ty| match ty { TypeNs::SelfType(it) => PathResolution::SelfType(it.into()), TypeNs::GenericParam(id) => PathResolution::TypeParam(id.into()), @@ -1089,7 +1083,7 @@ fn resolve_hir_path_qualifier( }) .or_else(|| { resolver - .resolve_module_path_in_items(db.upcast(), path.mod_path()) + .resolve_module_path_in_items(db.upcast(), path.mod_path()?) .take_types() .map(|it| PathResolution::Def(it.into())) }) diff --git a/crates/ide-assists/src/handlers/replace_try_expr_with_match.rs b/crates/ide-assists/src/handlers/replace_try_expr_with_match.rs index 38fccb3382..2e26f59d03 100644 --- a/crates/ide-assists/src/handlers/replace_try_expr_with_match.rs +++ b/crates/ide-assists/src/handlers/replace_try_expr_with_match.rs @@ -20,7 +20,7 @@ use crate::assist_context::{AssistContext, Assists}; // Replaces a `try` expression with a `match` expression. // // ``` -// # //- minicore:option +// # //- minicore: try, option // fn handle() { // let pat = Some(true)$0?; // } @@ -111,7 +111,7 @@ mod tests { check_assist( replace_try_expr_with_match, r#" -//- minicore:option +//- minicore: try, option fn test() { let pat = Some(true)$0?; } @@ -132,7 +132,7 @@ fn test() { check_assist( replace_try_expr_with_match, r#" -//- minicore:result +//- minicore: try, from, result fn test() { let pat = Ok(true)$0?; } diff --git a/crates/ide-assists/src/tests/generated.rs b/crates/ide-assists/src/tests/generated.rs index e5a8d675a9..aff11367de 100644 --- a/crates/ide-assists/src/tests/generated.rs +++ b/crates/ide-assists/src/tests/generated.rs @@ -2352,7 +2352,7 @@ fn doctest_replace_try_expr_with_match() { check_doc_test( "replace_try_expr_with_match", r#####" -//- minicore:option +//- minicore: try, option fn handle() { let pat = Some(true)$0?; } diff --git a/crates/ide/src/hover/tests.rs b/crates/ide/src/hover/tests.rs index 57bf0f9ad5..70ec915e96 100644 --- a/crates/ide/src/hover/tests.rs +++ b/crates/ide/src/hover/tests.rs @@ -5009,7 +5009,7 @@ fn foo() { fn hover_try_expr_res() { check_hover_range( r#" -//- minicore:result +//- minicore: try, from, result struct FooError; fn foo() -> Result<(), FooError> { @@ -5023,7 +5023,7 @@ fn foo() -> Result<(), FooError> { ); check_hover_range( r#" -//- minicore:result +//- minicore: try, from, result struct FooError; struct BarError; @@ -5044,6 +5044,7 @@ fn foo() -> Result<(), FooError> { fn hover_try_expr() { check_hover_range( r#" +//- minicore: try struct NotResult(T, U); struct Short; struct Looooong; @@ -5061,6 +5062,7 @@ fn foo() -> NotResult<(), Looooong> { ); check_hover_range( r#" +//- minicore: try struct NotResult(T, U); struct Short; struct Looooong; @@ -5092,7 +5094,7 @@ fn foo() -> Option<()> { "#, expect![[r#" ```rust - as Try>::Output + i32 ```"#]], ); } diff --git a/crates/test-utils/src/minicore.rs b/crates/test-utils/src/minicore.rs index ca6de4061a..8dd9f306c8 100644 --- a/crates/test-utils/src/minicore.rs +++ b/crates/test-utils/src/minicore.rs @@ -181,7 +181,7 @@ pub mod convert { } // endregion:as_ref // region:infallible - pub enum Infallibe {} + pub enum Infallible {} // endregion:infallible } @@ -380,11 +380,15 @@ pub mod ops { // endregion:fn // region:try mod try_ { + use super::super::convert::Infallible; + pub enum ControlFlow { + #[lang = "Continue"] Continue(C), + #[lang = "Break"] Break(B), } - pub trait FromResidual { + pub trait FromResidual::Residual> { #[lang = "from_residual"] fn from_residual(residual: R) -> Self; } @@ -400,14 +404,66 @@ pub mod ops { impl Try for ControlFlow { type Output = C; - type Residual = ControlFlow; + type Residual = ControlFlow; fn from_output(output: Self::Output) -> Self {} fn branch(self) -> ControlFlow {} } impl FromResidual for ControlFlow { - fn from_residual(residual: ControlFlow) -> Self {} + fn from_residual(residual: ControlFlow) -> Self {} } + // region:option + impl Try for Option { + type Output = T; + type Residual = Option; + fn from_output(output: Self::Output) -> Self { + Some(output) + } + fn branch(self) -> ControlFlow { + match self { + Some(x) => ControlFlow::Continue(x), + None => ControlFlow::Break(None), + } + } + } + + impl FromResidual for Option { + fn from_residual(x: Option) -> Self { + match x { + None => None, + } + } + } + // endregion:option + // region:result + // region:from + use super::super::convert::From; + + impl Try for Result { + type Output = T; + type Residual = Result; + + fn from_output(output: Self::Output) -> Self { + Ok(output) + } + + fn branch(self) -> ControlFlow { + match self { + Ok(v) => ControlFlow::Continue(v), + Err(e) => ControlFlow::Break(Err(e)), + } + } + } + + impl> FromResidual> for Result { + fn from_residual(residual: Result) -> Self { + match residual { + Err(e) => Err(From::from(e)), + } + } + } + // endregion:from + // endregion:result } pub use self::try_::{ControlFlow, FromResidual, Try}; // endregion:try From 1b85b43e6f5283963e901597c67e89b82d90df01 Mon Sep 17 00:00:00 2001 From: hkalbasi Date: Wed, 8 Mar 2023 23:09:58 +0330 Subject: [PATCH 1183/1945] add mir-stats to analysis-stats --- .../rust-analyzer/src/cli/analysis_stats.rs | 23 +++++++++++++++++++ crates/rust-analyzer/src/cli/flags.rs | 3 +++ 2 files changed, 26 insertions(+) diff --git a/crates/rust-analyzer/src/cli/analysis_stats.rs b/crates/rust-analyzer/src/cli/analysis_stats.rs index 6ce1de5d32..e1504743bf 100644 --- a/crates/rust-analyzer/src/cli/analysis_stats.rs +++ b/crates/rust-analyzer/src/cli/analysis_stats.rs @@ -2,6 +2,7 @@ //! errors. use std::{ + collections::HashMap, env, time::{SystemTime, UNIX_EPOCH}, }; @@ -153,6 +154,10 @@ impl flags::AnalysisStats { self.run_inference(&host, db, &vfs, &funcs, verbosity); } + if self.mir_stats { + self.lower_mir(db, &funcs); + } + let total_span = analysis_sw.elapsed(); eprintln!("{:<20} {total_span}", "Total:"); report_metric("total time", total_span.time.as_millis() as u64, "ms"); @@ -189,6 +194,24 @@ impl flags::AnalysisStats { Ok(()) } + fn lower_mir(&self, db: &RootDatabase, funcs: &[Function]) { + let all = funcs.len(); + let mut fail = 0; + let mut h: HashMap = HashMap::new(); + for f in funcs { + let f = FunctionId::from(*f); + let Err(e) = db.mir_body(f.into()) else { + continue; + }; + let es = format!("{:?}", e); + *h.entry(es).or_default() += 1; + fail += 1; + } + let h = h.into_iter().sorted_by_key(|x| x.1).collect::>(); + eprintln!("Mir failed reasons: {:#?}", h); + eprintln!("Mir failed bodies: {fail} ({}%)", fail * 100 / all); + } + fn run_inference( &self, host: &AnalysisHost, diff --git a/crates/rust-analyzer/src/cli/flags.rs b/crates/rust-analyzer/src/cli/flags.rs index 770612cc94..b085a0a892 100644 --- a/crates/rust-analyzer/src/cli/flags.rs +++ b/crates/rust-analyzer/src/cli/flags.rs @@ -66,6 +66,8 @@ xflags::xflags! { optional --memory-usage /// Print the total length of all source and macro files (whitespace is not counted). optional --source-stats + /// Print the number of bodies that fail to lower to mir, in addition to failed reasons. + optional --mir-stats /// Only analyze items matching this path. optional -o, --only path: String @@ -172,6 +174,7 @@ pub struct AnalysisStats { pub parallel: bool, pub memory_usage: bool, pub source_stats: bool, + pub mir_stats: bool, pub only: Option, pub with_deps: bool, pub no_sysroot: bool, From a063f000ff99989406abd1e6f58a9c2b576ba41a Mon Sep 17 00:00:00 2001 From: hkalbasi Date: Sat, 11 Mar 2023 21:43:53 +0330 Subject: [PATCH 1184/1945] Support function pointer MIR lowering --- crates/hir-expand/src/name.rs | 2 + crates/hir-ty/src/consteval/tests.rs | 102 ++++++ crates/hir-ty/src/infer.rs | 13 +- crates/hir-ty/src/infer/coerce.rs | 17 +- crates/hir-ty/src/infer/expr.rs | 35 +- crates/hir-ty/src/infer/unify.rs | 36 ++- crates/hir-ty/src/lib.rs | 6 +- crates/hir-ty/src/method_resolution.rs | 8 +- crates/hir-ty/src/mir/eval.rs | 299 ++++++++++++------ crates/hir-ty/src/mir/lower.rs | 46 +-- crates/hir-ty/src/mir/pretty.rs | 4 +- crates/hir-ty/src/tests/coercion.rs | 30 ++ crates/hir-ty/src/traits.rs | 9 + .../src/handlers/mutability_errors.rs | 25 ++ 14 files changed, 468 insertions(+), 164 deletions(-) diff --git a/crates/hir-expand/src/name.rs b/crates/hir-expand/src/name.rs index c3462beac7..71eb35d9df 100644 --- a/crates/hir-expand/src/name.rs +++ b/crates/hir-expand/src/name.rs @@ -343,6 +343,8 @@ pub mod known { feature, // known methods of lang items call_once, + call_mut, + call, eq, ne, ge, diff --git a/crates/hir-ty/src/consteval/tests.rs b/crates/hir-ty/src/consteval/tests.rs index 3bec2ee88b..8a9a5d254d 100644 --- a/crates/hir-ty/src/consteval/tests.rs +++ b/crates/hir-ty/src/consteval/tests.rs @@ -906,6 +906,108 @@ fn or_pattern() { ); } +#[test] +fn function_pointer() { + check_number( + r#" + fn add2(x: u8) -> u8 { + x + 2 + } + const GOAL: u8 = { + let plus2 = add2; + plus2(3) + }; + "#, + 5, + ); + check_number( + r#" + fn add2(x: u8) -> u8 { + x + 2 + } + const GOAL: u8 = { + let plus2: fn(u8) -> u8 = add2; + plus2(3) + }; + "#, + 5, + ); + check_number( + r#" + //- minicore: coerce_unsized, index, slice + fn add2(x: u8) -> u8 { + x + 2 + } + fn mult3(x: u8) -> u8 { + x * 3 + } + const GOAL: u8 = { + let x = [add2, mult3]; + x[0](1) + x[1](5) + }; + "#, + 18, + ); +} + +#[test] +fn function_traits() { + check_number( + r#" + //- minicore: fn + fn add2(x: u8) -> u8 { + x + 2 + } + fn call(f: impl Fn(u8) -> u8, x: u8) -> u8 { + f(x) + } + fn call_mut(mut f: impl FnMut(u8) -> u8, x: u8) -> u8 { + f(x) + } + fn call_once(f: impl FnOnce(u8) -> u8, x: u8) -> u8 { + f(x) + } + const GOAL: u8 = call(add2, 3) + call_mut(add2, 3) + call_once(add2, 3); + "#, + 15, + ); + check_number( + r#" + //- minicore: fn + fn add2(x: u8) -> u8 { + x + 2 + } + fn call(f: impl Fn(u8) -> u8, x: u8) -> u8 { + f(x) + } + fn call_mut(mut f: impl FnMut(u8) -> u8, x: u8) -> u8 { + f(x) + } + fn call_once(f: impl FnOnce(u8) -> u8, x: u8) -> u8 { + f(x) + } + const GOAL: u8 = { + let add2: fn(u8) -> u8 = add2; + call(add2, 3) + call_mut(add2, 3) + call_once(add2, 3) + }; + "#, + 15, + ); + check_number( + r#" + //- minicore: fn + fn add2(x: u8) -> u8 { + x + 2 + } + fn call(f: &&&&&impl Fn(u8) -> u8, x: u8) -> u8 { + f(x) + } + const GOAL: u8 = call(&&&&&add2, 3); + "#, + 5, + ); +} + #[test] fn array_and_index() { check_number( diff --git a/crates/hir-ty/src/infer.rs b/crates/hir-ty/src/infer.rs index 00b5f7948a..06d74215ff 100644 --- a/crates/hir-ty/src/infer.rs +++ b/crates/hir-ty/src/infer.rs @@ -38,9 +38,9 @@ use stdx::{always, never}; use crate::{ db::HirDatabase, fold_tys, fold_tys_and_consts, infer::coerce::CoerceMany, - lower::ImplTraitLoweringMode, to_assoc_type_id, AliasEq, AliasTy, Const, DomainGoal, - GenericArg, Goal, ImplTraitId, InEnvironment, Interner, ProjectionTy, RpitId, Substitution, - TraitEnvironment, TraitRef, Ty, TyBuilder, TyExt, TyKind, + lower::ImplTraitLoweringMode, static_lifetime, to_assoc_type_id, AliasEq, AliasTy, Const, + DomainGoal, GenericArg, Goal, ImplTraitId, InEnvironment, Interner, ProjectionTy, RpitId, + Substitution, TraitEnvironment, TraitRef, Ty, TyBuilder, TyExt, TyKind, }; // This lint has a false positive here. See the link below for details. @@ -273,6 +273,13 @@ pub struct Adjustment { pub target: Ty, } +impl Adjustment { + pub fn borrow(m: Mutability, ty: Ty) -> Self { + let ty = TyKind::Ref(m, static_lifetime(), ty).intern(Interner); + Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(m)), target: ty } + } +} + #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] pub enum Adjust { /// Go from ! to any type. diff --git a/crates/hir-ty/src/infer/coerce.rs b/crates/hir-ty/src/infer/coerce.rs index 48c9153026..6e899249b6 100644 --- a/crates/hir-ty/src/infer/coerce.rs +++ b/crates/hir-ty/src/infer/coerce.rs @@ -51,11 +51,12 @@ fn success( pub(super) struct CoerceMany { expected_ty: Ty, final_ty: Option, + expressions: Vec, } impl CoerceMany { pub(super) fn new(expected: Ty) -> Self { - CoerceMany { expected_ty: expected, final_ty: None } + CoerceMany { expected_ty: expected, final_ty: None, expressions: vec![] } } /// Returns the "expected type" with which this coercion was @@ -125,8 +126,15 @@ impl CoerceMany { let result1 = ctx.table.coerce_inner(self.merged_ty(), &target_ty); let result2 = ctx.table.coerce_inner(expr_ty.clone(), &target_ty); if let (Ok(result1), Ok(result2)) = (result1, result2) { - ctx.table.register_infer_ok(result1); - ctx.table.register_infer_ok(result2); + ctx.table.register_infer_ok(InferOk { value: (), goals: result1.goals }); + for &e in &self.expressions { + ctx.write_expr_adj(e, result1.value.0.clone()); + } + ctx.table.register_infer_ok(InferOk { value: (), goals: result2.goals }); + if let Some(expr) = expr { + ctx.write_expr_adj(expr, result2.value.0); + self.expressions.push(expr); + } return self.final_ty = Some(target_ty); } } @@ -148,6 +156,9 @@ impl CoerceMany { } cov_mark::hit!(coerce_merge_fail_fallback); } + if let Some(expr) = expr { + self.expressions.push(expr); + } } } diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs index 7bf227a27f..ca285f2fec 100644 --- a/crates/hir-ty/src/infer/expr.rs +++ b/crates/hir-ty/src/infer/expr.rs @@ -34,6 +34,7 @@ use crate::{ method_resolution::{self, lang_items_for_bin_op, VisibleFromModule}, primitive::{self, UintTy}, static_lifetime, to_chalk_trait_id, + traits::FnTrait, utils::{generics, Generics}, Adjust, Adjustment, AdtId, AutoBorrow, Binders, CallableDefId, FnPointer, FnSig, FnSubst, Interner, Rawness, Scalar, Substitution, TraitRef, Ty, TyBuilder, TyExt, @@ -385,16 +386,32 @@ impl<'a> InferenceContext<'a> { || res.is_none(); let (param_tys, ret_ty) = match res { Some((func, params, ret_ty)) => { - let adjustments = auto_deref_adjust_steps(&derefs); - // FIXME: Handle call adjustments for Fn/FnMut - self.write_expr_adj(*callee, adjustments); - if let Some((trait_, func)) = func { - let subst = TyBuilder::subst_for_def(self.db, trait_, None) - .push(callee_ty.clone()) - .push(TyBuilder::tuple_with(params.iter().cloned())) - .build(); - self.write_method_resolution(tgt_expr, func, subst.clone()); + let mut adjustments = auto_deref_adjust_steps(&derefs); + if let Some(fn_x) = func { + match fn_x { + FnTrait::FnOnce => (), + FnTrait::FnMut => adjustments.push(Adjustment::borrow( + Mutability::Mut, + derefed_callee.clone(), + )), + FnTrait::Fn => adjustments.push(Adjustment::borrow( + Mutability::Not, + derefed_callee.clone(), + )), + } + let trait_ = fn_x + .get_id(self.db, self.trait_env.krate) + .expect("We just used it"); + let trait_data = self.db.trait_data(trait_); + if let Some(func) = trait_data.method_by_name(&fn_x.method_name()) { + let subst = TyBuilder::subst_for_def(self.db, trait_, None) + .push(callee_ty.clone()) + .push(TyBuilder::tuple_with(params.iter().cloned())) + .build(); + self.write_method_resolution(tgt_expr, func, subst.clone()); + } } + self.write_expr_adj(*callee, adjustments); (params, ret_ty) } None => { diff --git a/crates/hir-ty/src/infer/unify.rs b/crates/hir-ty/src/infer/unify.rs index 504f0743aa..2a07dd708c 100644 --- a/crates/hir-ty/src/infer/unify.rs +++ b/crates/hir-ty/src/infer/unify.rs @@ -8,16 +8,15 @@ use chalk_ir::{ }; use chalk_solve::infer::ParameterEnaVariableExt; use ena::unify::UnifyKey; -use hir_def::{FunctionId, TraitId}; use hir_expand::name; use stdx::never; use super::{InferOk, InferResult, InferenceContext, TypeError}; use crate::{ - db::HirDatabase, fold_tys, static_lifetime, traits::FnTrait, AliasEq, AliasTy, BoundVar, - Canonical, Const, DebruijnIndex, GenericArg, GenericArgData, Goal, Guidance, InEnvironment, - InferenceVar, Interner, Lifetime, ParamKind, ProjectionTy, ProjectionTyExt, Scalar, Solution, - Substitution, TraitEnvironment, Ty, TyBuilder, TyExt, TyKind, VariableKind, + db::HirDatabase, fold_tys, static_lifetime, to_chalk_trait_id, traits::FnTrait, AliasEq, + AliasTy, BoundVar, Canonical, Const, DebruijnIndex, GenericArg, GenericArgData, Goal, Guidance, + InEnvironment, InferenceVar, Interner, Lifetime, ParamKind, ProjectionTy, ProjectionTyExt, + Scalar, Solution, Substitution, TraitEnvironment, Ty, TyBuilder, TyExt, TyKind, VariableKind, }; impl<'a> InferenceContext<'a> { @@ -631,7 +630,7 @@ impl<'a> InferenceTable<'a> { &mut self, ty: &Ty, num_args: usize, - ) -> Option<(Option<(TraitId, FunctionId)>, Vec, Ty)> { + ) -> Option<(Option, Vec, Ty)> { match ty.callable_sig(self.db) { Some(sig) => Some((None, sig.params().to_vec(), sig.ret().clone())), None => self.callable_sig_from_fn_trait(ty, num_args), @@ -642,7 +641,7 @@ impl<'a> InferenceTable<'a> { &mut self, ty: &Ty, num_args: usize, - ) -> Option<(Option<(TraitId, FunctionId)>, Vec, Ty)> { + ) -> Option<(Option, Vec, Ty)> { let krate = self.trait_env.krate; let fn_once_trait = FnTrait::FnOnce.get_id(self.db, krate)?; let trait_data = self.db.trait_data(fn_once_trait); @@ -676,19 +675,28 @@ impl<'a> InferenceTable<'a> { }; let trait_env = self.trait_env.env.clone(); + let mut trait_ref = projection.trait_ref(self.db); let obligation = InEnvironment { - goal: projection.trait_ref(self.db).cast(Interner), - environment: trait_env, + goal: trait_ref.clone().cast(Interner), + environment: trait_env.clone(), }; let canonical = self.canonicalize(obligation.clone()); if self.db.trait_solve(krate, canonical.value.cast(Interner)).is_some() { self.register_obligation(obligation.goal); let return_ty = self.normalize_projection_ty(projection); - Some(( - Some(fn_once_trait).zip(trait_data.method_by_name(&name!(call_once))), - arg_tys, - return_ty, - )) + for fn_x in [FnTrait::Fn, FnTrait::FnMut, FnTrait::FnOnce] { + let fn_x_trait = fn_x.get_id(self.db, krate)?; + trait_ref.trait_id = to_chalk_trait_id(fn_x_trait); + let obligation: chalk_ir::InEnvironment> = InEnvironment { + goal: trait_ref.clone().cast(Interner), + environment: trait_env.clone(), + }; + let canonical = self.canonicalize(obligation.clone()); + if self.db.trait_solve(krate, canonical.value.cast(Interner)).is_some() { + return Some((Some(fn_x), arg_tys, return_ty)); + } + } + unreachable!("It should at least implement FnOnce at this point"); } else { None } diff --git a/crates/hir-ty/src/lib.rs b/crates/hir-ty/src/lib.rs index 9c63d67ab1..782a8ab4aa 100644 --- a/crates/hir-ty/src/lib.rs +++ b/crates/hir-ty/src/lib.rs @@ -576,10 +576,14 @@ where } pub fn callable_sig_from_fnonce( - self_ty: &Ty, + mut self_ty: &Ty, env: Arc, db: &dyn HirDatabase, ) -> Option { + if let Some((ty, _, _)) = self_ty.as_reference() { + // This will happen when it implements fn or fn mut, since we add a autoborrow adjustment + self_ty = ty; + } let krate = env.krate; let fn_once_trait = FnTrait::FnOnce.get_id(db, krate)?; let output_assoc_type = db.trait_data(fn_once_trait).associated_type_by_name(&name![Output])?; diff --git a/crates/hir-ty/src/method_resolution.rs b/crates/hir-ty/src/method_resolution.rs index f3a27632bf..f105c94086 100644 --- a/crates/hir-ty/src/method_resolution.rs +++ b/crates/hir-ty/src/method_resolution.rs @@ -20,7 +20,7 @@ use crate::{ autoderef::{self, AutoderefKind}, db::HirDatabase, from_chalk_trait_id, from_foreign_def_id, - infer::{unify::InferenceTable, Adjust, Adjustment, AutoBorrow, OverloadedDeref, PointerCast}, + infer::{unify::InferenceTable, Adjust, Adjustment, OverloadedDeref, PointerCast}, primitive::{FloatTy, IntTy, UintTy}, static_lifetime, to_chalk_trait_id, utils::all_super_traits, @@ -600,9 +600,9 @@ impl ReceiverAdjustments { } } if let Some(m) = self.autoref { - ty = TyKind::Ref(m, static_lifetime(), ty).intern(Interner); - adjust - .push(Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(m)), target: ty.clone() }); + let a = Adjustment::borrow(m, ty); + ty = a.target.clone(); + adjust.push(a); } if self.unsize_array { ty = 'x: { diff --git a/crates/hir-ty/src/mir/eval.rs b/crates/hir-ty/src/mir/eval.rs index 1ff6e10602..88ef92a4ae 100644 --- a/crates/hir-ty/src/mir/eval.rs +++ b/crates/hir-ty/src/mir/eval.rs @@ -11,7 +11,7 @@ use hir_def::{ builtin_type::BuiltinType, lang_item::{lang_attr, LangItem}, layout::{Layout, LayoutError, RustcEnumVariantIdx, TagEncoding, Variants}, - AdtId, DefWithBodyId, EnumVariantId, FunctionId, HasModule, Lookup, VariantId, + AdtId, DefWithBodyId, EnumVariantId, FunctionId, HasModule, ItemContainerId, Lookup, VariantId, }; use intern::Interned; use la_arena::ArenaMap; @@ -24,8 +24,9 @@ use crate::{ layout::layout_of_ty, mapping::from_chalk, method_resolution::lookup_impl_method, - CallableDefId, Const, ConstScalar, Interner, MemoryMap, Substitution, TraitEnvironment, Ty, - TyBuilder, TyExt, + traits::FnTrait, + CallableDefId, Const, ConstScalar, FnDefId, Interner, MemoryMap, Substitution, + TraitEnvironment, Ty, TyBuilder, TyExt, }; use super::{ @@ -33,11 +34,37 @@ use super::{ Operand, Place, ProjectionElem, Rvalue, StatementKind, Terminator, UnOp, }; +#[derive(Debug, Default)] +struct VTableMap { + ty_to_id: HashMap, + id_to_ty: Vec, +} + +impl VTableMap { + fn id(&mut self, ty: Ty) -> usize { + if let Some(x) = self.ty_to_id.get(&ty) { + return *x; + } + let id = self.id_to_ty.len(); + self.id_to_ty.push(ty.clone()); + self.ty_to_id.insert(ty, id); + id + } + + fn ty(&self, id: usize) -> Result<&Ty> { + self.id_to_ty.get(id).ok_or(MirEvalError::InvalidVTableId(id)) + } +} + pub struct Evaluator<'a> { db: &'a dyn HirDatabase, trait_env: Arc, stack: Vec, heap: Vec, + /// We don't really have function pointers, i.e. pointers to some assembly instructions that we can run. Instead, we + /// store the type as an interned id in place of function and vtable pointers, and we recover back the type at the + /// time of use. + vtable_map: VTableMap, crate_id: CrateId, // FIXME: This is a workaround, see the comment on `interpret_mir` assert_placeholder_ty_is_unused: bool, @@ -147,6 +174,7 @@ pub enum MirEvalError { ExecutionLimitExceeded, StackOverflow, TargetDataLayoutNotAvailable, + InvalidVTableId(usize), } impl std::fmt::Debug for MirEvalError { @@ -168,6 +196,7 @@ impl std::fmt::Debug for MirEvalError { Self::MirLowerError(arg0, arg1) => { f.debug_tuple("MirLowerError").field(arg0).field(arg1).finish() } + Self::InvalidVTableId(arg0) => f.debug_tuple("InvalidVTableId").field(arg0).finish(), Self::NotSupported(arg0) => f.debug_tuple("NotSupported").field(arg0).finish(), Self::InvalidConst(arg0) => { let data = &arg0.data(Interner); @@ -240,6 +269,7 @@ impl Evaluator<'_> { Evaluator { stack: vec![0], heap: vec![0], + vtable_map: VTableMap::default(), db, trait_env, crate_id, @@ -461,108 +491,16 @@ impl Evaluator<'_> { } => { let fn_ty = self.operand_ty(func, &locals)?; match &fn_ty.data(Interner).kind { - TyKind::FnDef(def, generic_args) => { - let def: CallableDefId = from_chalk(self.db, *def); - let generic_args = self.subst_filler(generic_args, &locals); - match def { - CallableDefId::FunctionId(def) => { - let arg_bytes = args - .iter() - .map(|x| { - Ok(self - .eval_operand(x, &locals)? - .get(&self)? - .to_owned()) - }) - .collect::>>()? - .into_iter(); - let function_data = self.db.function_data(def); - let is_intrinsic = match &function_data.abi { - Some(abi) => *abi == Interned::new_str("rust-intrinsic"), - None => match def.lookup(self.db.upcast()).container { - hir_def::ItemContainerId::ExternBlockId(block) => { - let id = block.lookup(self.db.upcast()).id; - id.item_tree(self.db.upcast())[id.value] - .abi - .as_deref() - == Some("rust-intrinsic") - } - _ => false, - }, - }; - let result = if is_intrinsic { - self.exec_intrinsic( - function_data - .name - .as_text() - .unwrap_or_default() - .as_str(), - arg_bytes, - generic_args, - &locals, - )? - } else if let Some(x) = self.detect_lang_function(def) { - self.exec_lang_item(x, arg_bytes)? - } else { - let (imp, generic_args) = lookup_impl_method( - self.db, - self.trait_env.clone(), - def, - generic_args.clone(), - ); - let generic_args = - self.subst_filler(&generic_args, &locals); - let def = imp.into(); - let mir_body = self - .db - .mir_body(def) - .map_err(|e| MirEvalError::MirLowerError(imp, e))?; - self.interpret_mir(&mir_body, arg_bytes, generic_args) - .map_err(|e| { - MirEvalError::InFunction(imp, Box::new(e)) - })? - }; - let dest_addr = self.place_addr(destination, &locals)?; - self.write_memory(dest_addr, &result)?; - } - CallableDefId::StructId(id) => { - let (size, variant_layout, tag) = self.layout_of_variant( - id.into(), - generic_args.clone(), - &locals, - )?; - let result = self.make_by_layout( - size, - &variant_layout, - tag, - args, - &locals, - )?; - let dest_addr = self.place_addr(destination, &locals)?; - self.write_memory(dest_addr, &result)?; - } - CallableDefId::EnumVariantId(id) => { - let (size, variant_layout, tag) = self.layout_of_variant( - id.into(), - generic_args.clone(), - &locals, - )?; - let result = self.make_by_layout( - size, - &variant_layout, - tag, - args, - &locals, - )?; - let dest_addr = self.place_addr(destination, &locals)?; - self.write_memory(dest_addr, &result)?; - } - } - current_block_idx = - target.expect("broken mir, function without target"); + TyKind::Function(_) => { + let bytes = self.eval_operand(func, &locals)?; + self.exec_fn_pointer(bytes, destination, args, &locals)?; } - _ => not_supported!("unknown function type"), + TyKind::FnDef(def, generic_args) => { + self.exec_fn_def(*def, generic_args, destination, args, &locals)?; + } + x => not_supported!("unknown function type {x:?}"), } + current_block_idx = target.expect("broken mir, function without target"); } Terminator::SwitchInt { discr, targets } => { let val = u128::from_le_bytes(pad16( @@ -808,6 +746,16 @@ impl Evaluator<'_> { not_supported!("creating pointer from exposed address") } CastKind::Pointer(cast) => match cast { + PointerCast::ReifyFnPointer => { + let current_ty = self.operand_ty(operand, locals)?; + if let TyKind::FnDef(_, _) = ¤t_ty.data(Interner).kind { + let id = self.vtable_map.id(current_ty); + let ptr_size = self.ptr_size(); + Owned(id.to_le_bytes()[0..ptr_size].to_vec()) + } else { + not_supported!("ReifyFnPointer cast of a non FnDef type"); + } + } PointerCast::Unsize => { let current_ty = self.operand_ty(operand, locals)?; match &target_ty.data(Interner).kind { @@ -920,7 +868,7 @@ impl Evaluator<'_> { size: usize, // Not neccessarily equal to variant_layout.size variant_layout: &Layout, tag: Option<(usize, usize, i128)>, - values: &Vec, + values: &[Operand], locals: &Locals<'_>, ) -> Result> { let mut result = vec![0; size]; @@ -1140,6 +1088,20 @@ impl Evaluator<'_> { None } + fn detect_fn_trait(&self, def: FunctionId) -> Option { + use LangItem::*; + let ItemContainerId::TraitId(parent) = self.db.lookup_intern_function(def).container else { + return None; + }; + let l = lang_attr(self.db.upcast(), parent)?; + match l { + FnOnce => Some(FnTrait::FnOnce), + FnMut => Some(FnTrait::FnMut), + Fn => Some(FnTrait::Fn), + _ => None, + } + } + fn create_memory_map(&self, bytes: &[u8], ty: &Ty, locals: &Locals<'_>) -> Result { // FIXME: support indirect references let mut mm = MemoryMap::default(); @@ -1228,7 +1190,134 @@ impl Evaluator<'_> { } } - pub(crate) fn exec_lang_item( + fn exec_fn_pointer( + &mut self, + bytes: Interval, + destination: &Place, + args: &[Operand], + locals: &Locals<'_>, + ) -> Result<()> { + let id = from_bytes!(usize, bytes.get(self)?); + let next_ty = self.vtable_map.ty(id)?.clone(); + if let TyKind::FnDef(def, generic_args) = &next_ty.data(Interner).kind { + self.exec_fn_def(*def, generic_args, destination, args, &locals)?; + } else { + return Err(MirEvalError::TypeError("function pointer to non function")); + } + Ok(()) + } + + fn exec_fn_def( + &mut self, + def: FnDefId, + generic_args: &Substitution, + destination: &Place, + args: &[Operand], + locals: &Locals<'_>, + ) -> Result<()> { + let def: CallableDefId = from_chalk(self.db, def); + let generic_args = self.subst_filler(generic_args, &locals); + match def { + CallableDefId::FunctionId(def) => { + let dest_addr = self.place_addr(destination, &locals)?; + if let Some(x) = self.detect_fn_trait(def) { + self.exec_fn_trait(x, &args, destination, locals)?; + return Ok(()); + } + let arg_bytes = args + .iter() + .map(|x| Ok(self.eval_operand(x, &locals)?.get(&self)?.to_owned())) + .collect::>>()? + .into_iter(); + let function_data = self.db.function_data(def); + let is_intrinsic = match &function_data.abi { + Some(abi) => *abi == Interned::new_str("rust-intrinsic"), + None => match def.lookup(self.db.upcast()).container { + hir_def::ItemContainerId::ExternBlockId(block) => { + let id = block.lookup(self.db.upcast()).id; + id.item_tree(self.db.upcast())[id.value].abi.as_deref() + == Some("rust-intrinsic") + } + _ => false, + }, + }; + let result = if is_intrinsic { + self.exec_intrinsic( + function_data.name.as_text().unwrap_or_default().as_str(), + arg_bytes, + generic_args, + &locals, + )? + } else if let Some(x) = self.detect_lang_function(def) { + self.exec_lang_item(x, arg_bytes)? + } else { + let (imp, generic_args) = lookup_impl_method( + self.db, + self.trait_env.clone(), + def, + generic_args.clone(), + ); + let generic_args = self.subst_filler(&generic_args, &locals); + let def = imp.into(); + let mir_body = + self.db.mir_body(def).map_err(|e| MirEvalError::MirLowerError(imp, e))?; + self.interpret_mir(&mir_body, arg_bytes, generic_args) + .map_err(|e| MirEvalError::InFunction(imp, Box::new(e)))? + }; + self.write_memory(dest_addr, &result)?; + } + CallableDefId::StructId(id) => { + let (size, variant_layout, tag) = + self.layout_of_variant(id.into(), generic_args.clone(), &locals)?; + let result = self.make_by_layout(size, &variant_layout, tag, args, &locals)?; + let dest_addr = self.place_addr(destination, &locals)?; + self.write_memory(dest_addr, &result)?; + } + CallableDefId::EnumVariantId(id) => { + let (size, variant_layout, tag) = + self.layout_of_variant(id.into(), generic_args.clone(), &locals)?; + let result = self.make_by_layout(size, &variant_layout, tag, args, &locals)?; + let dest_addr = self.place_addr(destination, &locals)?; + self.write_memory(dest_addr, &result)?; + } + } + Ok(()) + } + + fn exec_fn_trait( + &mut self, + ft: FnTrait, + args: &[Operand], + destination: &Place, + locals: &Locals<'_>, + ) -> Result<()> { + let func = args.get(0).ok_or(MirEvalError::TypeError("fn trait with no arg"))?; + let ref_func_ty = self.operand_ty(func, locals)?; + let func_ty = match ft { + FnTrait::FnOnce => ref_func_ty, + FnTrait::FnMut | FnTrait::Fn => match ref_func_ty.as_reference() { + Some(x) => x.0.clone(), + None => return Err(MirEvalError::TypeError("fn trait with non-reference arg")), + }, + }; + match &func_ty.data(Interner).kind { + TyKind::FnDef(def, subst) => { + self.exec_fn_def(*def, subst, destination, &args[1..], locals)?; + } + TyKind::Function(_) => { + let mut func_data = self.eval_operand(func, locals)?; + if let FnTrait::FnMut | FnTrait::Fn = ft { + let addr = Address::from_bytes(func_data.get(self)?)?; + func_data = Interval { addr, size: self.ptr_size() }; + } + self.exec_fn_pointer(func_data, destination, &args[1..], locals)?; + } + x => not_supported!("Call {ft:?} trait methods with type {x:?}"), + } + Ok(()) + } + + fn exec_lang_item( &self, x: LangItem, mut args: std::vec::IntoIter>, diff --git a/crates/hir-ty/src/mir/lower.rs b/crates/hir-ty/src/mir/lower.rs index 3b9a31c772..7a5ca08942 100644 --- a/crates/hir-ty/src/mir/lower.rs +++ b/crates/hir-ty/src/mir/lower.rs @@ -298,7 +298,7 @@ impl MirLowerCtx<'_> { ); Ok(Some(current)) } - ValueNs::StructId(_) => { + ValueNs::FunctionId(_) | ValueNs::StructId(_) => { // It's probably a unit struct or a zero sized function, so no action is needed. Ok(Some(current)) } @@ -445,36 +445,36 @@ impl MirLowerCtx<'_> { }) }, Expr::Call { callee, args, .. } => { + if let Some((func_id, generic_args)) = + self.infer.method_resolution(expr_id) { + let ty = chalk_ir::TyKind::FnDef( + CallableDefId::FunctionId(func_id).to_chalk(self.db), + generic_args, + ) + .intern(Interner); + let func = Operand::from_bytes(vec![], ty); + return self.lower_call_and_args( + func, + iter::once(*callee).chain(args.iter().copied()), + place, + current, + self.is_uninhabited(expr_id), + ); + } let callee_ty = self.expr_ty_after_adjustments(*callee); match &callee_ty.data(Interner).kind { chalk_ir::TyKind::FnDef(..) => { let func = Operand::from_bytes(vec![], callee_ty.clone()); self.lower_call_and_args(func, args.iter().copied(), place, current, self.is_uninhabited(expr_id)) } - TyKind::Scalar(_) - | TyKind::Tuple(_, _) - | TyKind::Array(_, _) - | TyKind::Adt(_, _) - | TyKind::Str - | TyKind::Foreign(_) - | TyKind::Slice(_) => { - return Err(MirLowerError::TypeError("function call on data type")) + chalk_ir::TyKind::Function(_) => { + let Some((func, current)) = self.lower_expr_to_some_operand(*callee, current)? else { + return Ok(None); + }; + self.lower_call_and_args(func, args.iter().copied(), place, current, self.is_uninhabited(expr_id)) } TyKind::Error => return Err(MirLowerError::MissingFunctionDefinition), - TyKind::AssociatedType(_, _) - | TyKind::Raw(_, _) - | TyKind::Ref(_, _, _) - | TyKind::OpaqueType(_, _) - | TyKind::Never - | TyKind::Closure(_, _) - | TyKind::Generator(_, _) - | TyKind::GeneratorWitness(_, _) - | TyKind::Placeholder(_) - | TyKind::Dyn(_) - | TyKind::Alias(_) - | TyKind::Function(_) - | TyKind::BoundVar(_) - | TyKind::InferenceVar(_, _) => not_supported!("dynamic function call"), + _ => return Err(MirLowerError::TypeError("function call on bad type")), } } Expr::MethodCall { receiver, args, .. } => { diff --git a/crates/hir-ty/src/mir/pretty.rs b/crates/hir-ty/src/mir/pretty.rs index 70364d0882..eb0002266d 100644 --- a/crates/hir-ty/src/mir/pretty.rs +++ b/crates/hir-ty/src/mir/pretty.rs @@ -301,9 +301,9 @@ impl<'a> MirPrettyCtx<'a> { w!(self, ")"); } Rvalue::Cast(ck, op, ty) => { - w!(self, "Discriminant({ck:?}"); + w!(self, "Cast({ck:?}, "); self.operand(op); - w!(self, "{})", ty.display(self.db)); + w!(self, ", {})", ty.display(self.db)); } Rvalue::CheckedBinaryOp(b, o1, o2) => { self.operand(o1); diff --git a/crates/hir-ty/src/tests/coercion.rs b/crates/hir-ty/src/tests/coercion.rs index b524922b6c..696bdef03f 100644 --- a/crates/hir-ty/src/tests/coercion.rs +++ b/crates/hir-ty/src/tests/coercion.rs @@ -396,10 +396,40 @@ fn test() { ); } +#[test] +fn coerce_fn_item_to_fn_ptr_in_array() { + check_no_mismatches( + r" +fn foo(x: u32) -> isize { 1 } +fn bar(x: u32) -> isize { 1 } +fn test() { + let f = [foo, bar]; + // ^^^ adjustments: Pointer(ReifyFnPointer) +}", + ); +} + #[test] fn coerce_fn_items_in_match_arms() { cov_mark::check!(coerce_fn_reification); + check_no_mismatches( + r" +fn foo1(x: u32) -> isize { 1 } +fn foo2(x: u32) -> isize { 2 } +fn foo3(x: u32) -> isize { 3 } +fn test() { + let x = match 1 { + 1 => foo1, + // ^^^^ adjustments: Pointer(ReifyFnPointer) + 2 => foo2, + // ^^^^ adjustments: Pointer(ReifyFnPointer) + _ => foo3, + // ^^^^ adjustments: Pointer(ReifyFnPointer) + }; + x; +}", + ); check_types( r" fn foo1(x: u32) -> isize { 1 } diff --git a/crates/hir-ty/src/traits.rs b/crates/hir-ty/src/traits.rs index 3ab85c68f5..aebf59f315 100644 --- a/crates/hir-ty/src/traits.rs +++ b/crates/hir-ty/src/traits.rs @@ -11,6 +11,7 @@ use hir_def::{ lang_item::{LangItem, LangItemTarget}, TraitId, }; +use hir_expand::name::{name, Name}; use stdx::panic_context; use crate::{ @@ -187,6 +188,14 @@ impl FnTrait { } } + pub fn method_name(&self) -> Name { + match self { + FnTrait::FnOnce => name!(call_once), + FnTrait::FnMut => name!(call_mut), + FnTrait::Fn => name!(call), + } + } + pub fn get_id(&self, db: &dyn HirDatabase, krate: CrateId) -> Option { let target = db.lang_item(krate, self.lang_item())?; match target { diff --git a/crates/ide-diagnostics/src/handlers/mutability_errors.rs b/crates/ide-diagnostics/src/handlers/mutability_errors.rs index 96470265d1..03951ea2bf 100644 --- a/crates/ide-diagnostics/src/handlers/mutability_errors.rs +++ b/crates/ide-diagnostics/src/handlers/mutability_errors.rs @@ -631,6 +631,31 @@ fn f(inp: (Foo, Foo, Foo, Foo)) { ); } + #[test] + fn fn_traits() { + check_diagnostics( + r#" +//- minicore: fn +fn fn_ref(mut x: impl Fn(u8) -> u8) -> u8 { + //^^^^^ 💡 weak: variable does not need to be mutable + x(2) +} +fn fn_mut(x: impl FnMut(u8) -> u8) -> u8 { + x(2) + //^ 💡 error: cannot mutate immutable variable `x` +} +fn fn_borrow_mut(mut x: &mut impl FnMut(u8) -> u8) -> u8 { + //^^^^^ 💡 weak: variable does not need to be mutable + x(2) +} +fn fn_once(mut x: impl FnOnce(u8) -> u8) -> u8 { + //^^^^^ 💡 weak: variable does not need to be mutable + x(2) +} +"#, + ); + } + #[test] fn respect_allow_unused_mut() { // FIXME: respect From 7525a38af5ebd9eef404b19a11cefe8a033f9d2d Mon Sep 17 00:00:00 2001 From: hkalbasi Date: Tue, 14 Mar 2023 12:14:02 +0330 Subject: [PATCH 1185/1945] Support evaluating `dyn Trait` methods --- crates/hir-ty/src/consteval/tests.rs | 51 ++++++++ crates/hir-ty/src/method_resolution.rs | 39 +++++- crates/hir-ty/src/mir/eval.rs | 158 ++++++++++++++++--------- crates/hir-ty/src/mir/lower.rs | 9 +- 4 files changed, 197 insertions(+), 60 deletions(-) diff --git a/crates/hir-ty/src/consteval/tests.rs b/crates/hir-ty/src/consteval/tests.rs index 8a9a5d254d..f7914b578e 100644 --- a/crates/hir-ty/src/consteval/tests.rs +++ b/crates/hir-ty/src/consteval/tests.rs @@ -1008,6 +1008,57 @@ fn function_traits() { ); } +#[test] +fn dyn_trait() { + check_number( + r#" + //- minicore: coerce_unsized, index, slice + trait Foo { + fn foo(&self) -> u8 { 10 } + } + struct S1; + struct S2; + struct S3; + impl Foo for S1 { + fn foo(&self) -> u8 { 1 } + } + impl Foo for S2 { + fn foo(&self) -> u8 { 2 } + } + impl Foo for S3 {} + const GOAL: u8 = { + let x: &[&dyn Foo] = &[&S1, &S2, &S3]; + x[0].foo() + x[1].foo() + x[2].foo() + }; + "#, + 13, + ); + check_number( + r#" + //- minicore: coerce_unsized, index, slice + trait Foo { + fn foo(&self) -> i32 { 10 } + } + trait Bar { + fn bar(&self) -> i32 { 20 } + } + + struct S; + impl Foo for S { + fn foo(&self) -> i32 { 200 } + } + impl Bar for dyn Foo { + fn bar(&self) -> i32 { 700 } + } + const GOAL: i32 = { + let x: &dyn Foo = &S; + x.bar() + x.foo() + }; + "#, + 900, + ); +} + #[test] fn array_and_index() { check_number( diff --git a/crates/hir-ty/src/method_resolution.rs b/crates/hir-ty/src/method_resolution.rs index f105c94086..6244b98104 100644 --- a/crates/hir-ty/src/method_resolution.rs +++ b/crates/hir-ty/src/method_resolution.rs @@ -5,7 +5,7 @@ use std::{ops::ControlFlow, sync::Arc}; use base_db::{CrateId, Edition}; -use chalk_ir::{cast::Cast, Mutability, TyKind, UniverseIndex}; +use chalk_ir::{cast::Cast, Mutability, TyKind, UniverseIndex, WhereClause}; use hir_def::{ data::ImplData, item_scope::ItemScope, lang_item::LangItem, nameres::DefMap, AssocItemId, BlockId, ConstId, FunctionId, HasModule, ImplId, ItemContainerId, Lookup, ModuleDefId, @@ -692,6 +692,38 @@ pub fn lookup_impl_const( .unwrap_or((const_id, subs)) } +/// Checks if the self parameter of `Trait` method is the `dyn Trait` and we should +/// call the method using the vtable. +pub fn is_dyn_method( + db: &dyn HirDatabase, + _env: Arc, + func: FunctionId, + fn_subst: Substitution, +) -> Option { + let ItemContainerId::TraitId(trait_id) = func.lookup(db.upcast()).container else { + return None; + }; + let trait_params = db.generic_params(trait_id.into()).type_or_consts.len(); + let fn_params = fn_subst.len(Interner) - trait_params; + let trait_ref = TraitRef { + trait_id: to_chalk_trait_id(trait_id), + substitution: Substitution::from_iter(Interner, fn_subst.iter(Interner).skip(fn_params)), + }; + let self_ty = trait_ref.self_type_parameter(Interner); + if let TyKind::Dyn(d) = self_ty.kind(Interner) { + let is_my_trait_in_bounds = d.bounds.skip_binders().as_slice(Interner).iter().any(|x| match x.skip_binders() { + // rustc doesn't accept `impl Foo<2> for dyn Foo<5>`, so if the trait id is equal, no matter + // what the generics are, we are sure that the method is come from the vtable. + WhereClause::Implemented(tr) => tr.trait_id == trait_ref.trait_id, + _ => false, + }); + if is_my_trait_in_bounds { + return Some(fn_params); + } + } + None +} + /// Looks up the impl method that actually runs for the trait method `func`. /// /// Returns `func` if it's not a method defined in a trait or the lookup failed. @@ -701,9 +733,8 @@ pub fn lookup_impl_method( func: FunctionId, fn_subst: Substitution, ) -> (FunctionId, Substitution) { - let trait_id = match func.lookup(db.upcast()).container { - ItemContainerId::TraitId(id) => id, - _ => return (func, fn_subst), + let ItemContainerId::TraitId(trait_id) = func.lookup(db.upcast()).container else { + return (func, fn_subst) }; let trait_params = db.generic_params(trait_id.into()).type_or_consts.len(); let fn_params = fn_subst.len(Interner) - trait_params; diff --git a/crates/hir-ty/src/mir/eval.rs b/crates/hir-ty/src/mir/eval.rs index 88ef92a4ae..7293156a97 100644 --- a/crates/hir-ty/src/mir/eval.rs +++ b/crates/hir-ty/src/mir/eval.rs @@ -23,10 +23,10 @@ use crate::{ infer::{normalize, PointerCast}, layout::layout_of_ty, mapping::from_chalk, - method_resolution::lookup_impl_method, + method_resolution::{is_dyn_method, lookup_impl_method}, traits::FnTrait, CallableDefId, Const, ConstScalar, FnDefId, Interner, MemoryMap, Substitution, - TraitEnvironment, Ty, TyBuilder, TyExt, + TraitEnvironment, Ty, TyBuilder, TyExt, GenericArgData, }; use super::{ @@ -34,6 +34,15 @@ use super::{ Operand, Place, ProjectionElem, Rvalue, StatementKind, Terminator, UnOp, }; +macro_rules! from_bytes { + ($ty:tt, $value:expr) => { + ($ty::from_le_bytes(match ($value).try_into() { + Ok(x) => x, + Err(_) => return Err(MirEvalError::TypeError("mismatched size")), + })) + }; +} + #[derive(Debug, Default)] struct VTableMap { ty_to_id: HashMap, @@ -54,6 +63,11 @@ impl VTableMap { fn ty(&self, id: usize) -> Result<&Ty> { self.id_to_ty.get(id).ok_or(MirEvalError::InvalidVTableId(id)) } + + fn ty_of_bytes(&self, bytes: &[u8]) -> Result<&Ty> { + let id = from_bytes!(usize, bytes); + self.ty(id) + } } pub struct Evaluator<'a> { @@ -110,15 +124,6 @@ impl IntervalOrOwned { } } -macro_rules! from_bytes { - ($ty:tt, $value:expr) => { - ($ty::from_le_bytes(match ($value).try_into() { - Ok(x) => x, - Err(_) => return Err(MirEvalError::TypeError("mismatched size")), - })) - }; -} - impl Address { fn from_bytes(x: &[u8]) -> Result { Ok(Address::from_usize(from_bytes!(usize, x))) @@ -781,7 +786,18 @@ impl Evaluator<'_> { } _ => not_supported!("slice unsizing from non pointers"), }, - TyKind::Dyn(_) => not_supported!("dyn pointer unsize cast"), + TyKind::Dyn(_) => match ¤t_ty.data(Interner).kind { + TyKind::Raw(_, ty) | TyKind::Ref(_, _, ty) => { + let vtable = self.vtable_map.id(ty.clone()); + let addr = + self.eval_operand(operand, locals)?.get(&self)?; + let mut r = Vec::with_capacity(16); + r.extend(addr.iter().copied()); + r.extend(vtable.to_le_bytes().into_iter()); + Owned(r) + } + _ => not_supported!("dyn unsizing from non pointers"), + }, _ => not_supported!("unknown unsized cast"), } } @@ -1227,44 +1243,8 @@ impl Evaluator<'_> { let arg_bytes = args .iter() .map(|x| Ok(self.eval_operand(x, &locals)?.get(&self)?.to_owned())) - .collect::>>()? - .into_iter(); - let function_data = self.db.function_data(def); - let is_intrinsic = match &function_data.abi { - Some(abi) => *abi == Interned::new_str("rust-intrinsic"), - None => match def.lookup(self.db.upcast()).container { - hir_def::ItemContainerId::ExternBlockId(block) => { - let id = block.lookup(self.db.upcast()).id; - id.item_tree(self.db.upcast())[id.value].abi.as_deref() - == Some("rust-intrinsic") - } - _ => false, - }, - }; - let result = if is_intrinsic { - self.exec_intrinsic( - function_data.name.as_text().unwrap_or_default().as_str(), - arg_bytes, - generic_args, - &locals, - )? - } else if let Some(x) = self.detect_lang_function(def) { - self.exec_lang_item(x, arg_bytes)? - } else { - let (imp, generic_args) = lookup_impl_method( - self.db, - self.trait_env.clone(), - def, - generic_args.clone(), - ); - let generic_args = self.subst_filler(&generic_args, &locals); - let def = imp.into(); - let mir_body = - self.db.mir_body(def).map_err(|e| MirEvalError::MirLowerError(imp, e))?; - self.interpret_mir(&mir_body, arg_bytes, generic_args) - .map_err(|e| MirEvalError::InFunction(imp, Box::new(e)))? - }; - self.write_memory(dest_addr, &result)?; + .collect::>>()?; + self.exec_fn_with_args(def, arg_bytes, generic_args, locals, dest_addr)?; } CallableDefId::StructId(id) => { let (size, variant_layout, tag) = @@ -1284,6 +1264,77 @@ impl Evaluator<'_> { Ok(()) } + fn exec_fn_with_args( + &mut self, + def: FunctionId, + arg_bytes: Vec>, + generic_args: Substitution, + locals: &Locals<'_>, + dest_addr: Address, + ) -> Result<()> { + let function_data = self.db.function_data(def); + let is_intrinsic = match &function_data.abi { + Some(abi) => *abi == Interned::new_str("rust-intrinsic"), + None => match def.lookup(self.db.upcast()).container { + hir_def::ItemContainerId::ExternBlockId(block) => { + let id = block.lookup(self.db.upcast()).id; + id.item_tree(self.db.upcast())[id.value].abi.as_deref() + == Some("rust-intrinsic") + } + _ => false, + }, + }; + let result = if is_intrinsic { + self.exec_intrinsic( + function_data.name.as_text().unwrap_or_default().as_str(), + arg_bytes.iter().cloned(), + generic_args, + &locals, + )? + } else if let Some(x) = self.detect_lang_function(def) { + self.exec_lang_item(x, &arg_bytes)? + } else { + if let Some(self_ty_idx) = + is_dyn_method(self.db, self.trait_env.clone(), def, generic_args.clone()) + { + // In the layout of current possible receiver, which at the moment of writing this code is one of + // `&T`, `&mut T`, `Box`, `Rc`, `Arc`, and `Pin

` where `P` is one of possible recievers, + // the vtable is exactly in the `[ptr_size..2*ptr_size]` bytes. So we can use it without branching on + // the type. + let ty = self + .vtable_map + .ty_of_bytes(&arg_bytes[0][self.ptr_size()..self.ptr_size() * 2])?; + let ty = GenericArgData::Ty(ty.clone()).intern(Interner); + let mut args_for_target = arg_bytes; + args_for_target[0] = args_for_target[0][0..self.ptr_size()].to_vec(); + let generics_for_target = Substitution::from_iter( + Interner, + generic_args + .iter(Interner) + .enumerate() + .map(|(i, x)| if i == self_ty_idx { &ty } else { x }) + ); + return self.exec_fn_with_args( + def, + args_for_target, + generics_for_target, + locals, + dest_addr, + ); + } + let (imp, generic_args) = + lookup_impl_method(self.db, self.trait_env.clone(), def, generic_args.clone()); + let generic_args = self.subst_filler(&generic_args, &locals); + let def = imp.into(); + let mir_body = + self.db.mir_body(def).map_err(|e| MirEvalError::MirLowerError(imp, e))?; + self.interpret_mir(&mir_body, arg_bytes.iter().cloned(), generic_args) + .map_err(|e| MirEvalError::InFunction(imp, Box::new(e)))? + }; + self.write_memory(dest_addr, &result)?; + Ok(()) + } + fn exec_fn_trait( &mut self, ft: FnTrait, @@ -1317,12 +1368,9 @@ impl Evaluator<'_> { Ok(()) } - fn exec_lang_item( - &self, - x: LangItem, - mut args: std::vec::IntoIter>, - ) -> Result> { + fn exec_lang_item(&self, x: LangItem, args: &[Vec]) -> Result> { use LangItem::*; + let mut args = args.iter(); match x { PanicFmt | BeginPanic => Err(MirEvalError::Panic), SliceLen => { diff --git a/crates/hir-ty/src/mir/lower.rs b/crates/hir-ty/src/mir/lower.rs index 7a5ca08942..4fc3c67a6e 100644 --- a/crates/hir-ty/src/mir/lower.rs +++ b/crates/hir-ty/src/mir/lower.rs @@ -230,7 +230,14 @@ impl MirLowerCtx<'_> { self.lower_const(c, current, place, expr_id.into())?; return Ok(Some(current)) }, - _ => not_supported!("associated functions and types"), + hir_def::AssocItemId::FunctionId(_) => { + // FnDefs are zero sized, no action is needed. + return Ok(Some(current)) + } + hir_def::AssocItemId::TypeAliasId(_) => { + // FIXME: If it is unreachable, use proper error instead of `not_supported`. + not_supported!("associated functions and types") + }, } } else if let Some(variant) = self .infer From 513e340bd344ee373847953baead3097f0b43815 Mon Sep 17 00:00:00 2001 From: hkalbasi Date: Tue, 14 Mar 2023 12:49:09 +0330 Subject: [PATCH 1186/1945] implement transmute intrinsic --- crates/hir-ty/src/mir/eval.rs | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/crates/hir-ty/src/mir/eval.rs b/crates/hir-ty/src/mir/eval.rs index 7293156a97..3001832d79 100644 --- a/crates/hir-ty/src/mir/eval.rs +++ b/crates/hir-ty/src/mir/eval.rs @@ -1187,7 +1187,7 @@ impl Evaluator<'_> { fn exec_intrinsic( &self, as_str: &str, - _arg_bytes: impl Iterator>, + mut arg_bytes: impl Iterator>, generic_args: Substitution, locals: &Locals<'_>, ) -> Result> { @@ -1202,6 +1202,12 @@ impl Evaluator<'_> { None => return Err(MirEvalError::TypeError("size_of arg is unsized")), } } + "transmute" => { + let Some(arg) = arg_bytes.next() else { + return Err(MirEvalError::TypeError("trasmute arg is not provided")); + }; + Ok(arg) + } _ => not_supported!("unknown intrinsic {as_str}"), } } From 051dae222164a04c703c5c0914a304d35206a62f Mon Sep 17 00:00:00 2001 From: hkalbasi Date: Tue, 14 Mar 2023 17:02:38 +0330 Subject: [PATCH 1187/1945] Support record pattern MIR lowering --- crates/hir-ty/src/consteval/tests.rs | 41 +++++- crates/hir-ty/src/method_resolution.rs | 13 +- crates/hir-ty/src/mir/eval.rs | 15 +- crates/hir-ty/src/mir/lower.rs | 193 +++++++++++++++++++------ lib/la-arena/src/lib.rs | 2 +- 5 files changed, 201 insertions(+), 63 deletions(-) diff --git a/crates/hir-ty/src/consteval/tests.rs b/crates/hir-ty/src/consteval/tests.rs index f7914b578e..1d298f9609 100644 --- a/crates/hir-ty/src/consteval/tests.rs +++ b/crates/hir-ty/src/consteval/tests.rs @@ -555,6 +555,38 @@ fn structs() { "#, 17, ); + check_number( + r#" + struct Point { + x: i32, + y: i32, + } + + const GOAL: i32 = { + let p = Point { x: 5, y: 2 }; + let p2 = Point { x: 3, ..p }; + p.x * 1000 + p.y * 100 + p2.x * 10 + p2.y + }; + "#, + 5232, + ); + check_number( + r#" + struct Point { + x: i32, + y: i32, + } + + const GOAL: i32 = { + let p = Point { x: 5, y: 2 }; + let Point { x, y } = p; + let Point { x: x2, .. } = p; + let Point { y: y2, .. } = p; + x * 1000 + y * 100 + x2 * 10 + y2 + }; + "#, + 5252, + ); } #[test] @@ -599,13 +631,14 @@ fn tuples() { ); check_number( r#" - struct TupleLike(i32, u8, i64, u16); - const GOAL: u8 = { + struct TupleLike(i32, i64, u8, u16); + const GOAL: i64 = { let a = TupleLike(10, 20, 3, 15); - a.1 + let TupleLike(b, .., c) = a; + a.1 * 100 + b as i64 + c as i64 }; "#, - 20, + 2025, ); check_number( r#" diff --git a/crates/hir-ty/src/method_resolution.rs b/crates/hir-ty/src/method_resolution.rs index 6244b98104..2003d24038 100644 --- a/crates/hir-ty/src/method_resolution.rs +++ b/crates/hir-ty/src/method_resolution.rs @@ -711,12 +711,13 @@ pub fn is_dyn_method( }; let self_ty = trait_ref.self_type_parameter(Interner); if let TyKind::Dyn(d) = self_ty.kind(Interner) { - let is_my_trait_in_bounds = d.bounds.skip_binders().as_slice(Interner).iter().any(|x| match x.skip_binders() { - // rustc doesn't accept `impl Foo<2> for dyn Foo<5>`, so if the trait id is equal, no matter - // what the generics are, we are sure that the method is come from the vtable. - WhereClause::Implemented(tr) => tr.trait_id == trait_ref.trait_id, - _ => false, - }); + let is_my_trait_in_bounds = + d.bounds.skip_binders().as_slice(Interner).iter().any(|x| match x.skip_binders() { + // rustc doesn't accept `impl Foo<2> for dyn Foo<5>`, so if the trait id is equal, no matter + // what the generics are, we are sure that the method is come from the vtable. + WhereClause::Implemented(tr) => tr.trait_id == trait_ref.trait_id, + _ => false, + }); if is_my_trait_in_bounds { return Some(fn_params); } diff --git a/crates/hir-ty/src/mir/eval.rs b/crates/hir-ty/src/mir/eval.rs index 3001832d79..787665bb63 100644 --- a/crates/hir-ty/src/mir/eval.rs +++ b/crates/hir-ty/src/mir/eval.rs @@ -25,8 +25,8 @@ use crate::{ mapping::from_chalk, method_resolution::{is_dyn_method, lookup_impl_method}, traits::FnTrait, - CallableDefId, Const, ConstScalar, FnDefId, Interner, MemoryMap, Substitution, - TraitEnvironment, Ty, TyBuilder, TyExt, GenericArgData, + CallableDefId, Const, ConstScalar, FnDefId, GenericArgData, Interner, MemoryMap, Substitution, + TraitEnvironment, Ty, TyBuilder, TyExt, }; use super::{ @@ -1315,10 +1315,13 @@ impl Evaluator<'_> { args_for_target[0] = args_for_target[0][0..self.ptr_size()].to_vec(); let generics_for_target = Substitution::from_iter( Interner, - generic_args - .iter(Interner) - .enumerate() - .map(|(i, x)| if i == self_ty_idx { &ty } else { x }) + generic_args.iter(Interner).enumerate().map(|(i, x)| { + if i == self_ty_idx { + &ty + } else { + x + } + }), ); return self.exec_fn_with_args( def, diff --git a/crates/hir-ty/src/mir/lower.rs b/crates/hir-ty/src/mir/lower.rs index 4fc3c67a6e..d36d9e946c 100644 --- a/crates/hir-ty/src/mir/lower.rs +++ b/crates/hir-ty/src/mir/lower.rs @@ -4,16 +4,17 @@ use std::{iter, mem, sync::Arc}; use chalk_ir::{BoundVar, ConstData, DebruijnIndex, TyKind}; use hir_def::{ + adt::VariantData, body::Body, expr::{ Array, BindingAnnotation, BindingId, ExprId, LabelId, Literal, MatchArm, Pat, PatId, - RecordLitField, + RecordFieldPat, RecordLitField, }, lang_item::{LangItem, LangItemTarget}, layout::LayoutError, path::Path, resolver::{resolver_for_expr, ResolveValueResult, ValueNs}, - DefWithBodyId, EnumVariantId, HasModule, ItemContainerId, TraitId, + DefWithBodyId, EnumVariantId, HasModule, ItemContainerId, LocalFieldId, TraitId, }; use hir_expand::name::Name; use la_arena::ArenaMap; @@ -106,6 +107,12 @@ impl MirLowerError { type Result = std::result::Result; +enum AdtPatternShape<'a> { + Tuple { args: &'a [PatId], ellipsis: Option }, + Record { args: &'a [RecordFieldPat] }, + Unit, +} + impl MirLowerCtx<'_> { fn temp(&mut self, ty: Ty) -> Result { if matches!(ty.kind(Interner), TyKind::Slice(_) | TyKind::Dyn(_)) { @@ -444,7 +451,8 @@ impl MirLowerCtx<'_> { current, pat.into(), Some(end), - &[pat], &None)?; + AdtPatternShape::Tuple { args: &[pat], ellipsis: None }, + )?; if let Some((_, block)) = this.lower_expr_as_place(current, body, true)? { this.set_goto(block, begin); } @@ -573,7 +581,17 @@ impl MirLowerCtx<'_> { Ok(None) } Expr::Yield { .. } => not_supported!("yield"), - Expr::RecordLit { fields, path, .. } => { + Expr::RecordLit { fields, path, spread, ellipsis: _, is_assignee_expr: _ } => { + let spread_place = match spread { + &Some(x) => { + let Some((p, c)) = self.lower_expr_as_place(current, x, true)? else { + return Ok(None); + }; + current = c; + Some(p) + }, + None => None, + }; let variant_id = self .infer .variant_resolution_for_expr(expr_id) @@ -603,9 +621,24 @@ impl MirLowerCtx<'_> { place, Rvalue::Aggregate( AggregateKind::Adt(variant_id, subst), - operands.into_iter().map(|x| x).collect::>().ok_or( - MirLowerError::TypeError("missing field in record literal"), - )?, + match spread_place { + Some(sp) => operands.into_iter().enumerate().map(|(i, x)| { + match x { + Some(x) => x, + None => { + let mut p = sp.clone(); + p.projection.push(ProjectionElem::Field(FieldId { + parent: variant_id, + local_id: LocalFieldId::from_raw(RawIdx::from(i as u32)), + })); + Operand::Copy(p) + }, + } + }).collect(), + None => operands.into_iter().map(|x| x).collect::>().ok_or( + MirLowerError::TypeError("missing field in record literal"), + )?, + }, ), expr_id.into(), ); @@ -1021,14 +1054,11 @@ impl MirLowerCtx<'_> { self.pattern_match_tuple_like( current, current_else, - args.iter().enumerate().map(|(i, x)| { - ( - PlaceElem::TupleField(i), - *x, - subst.at(Interner, i).assert_ty_ref(Interner).clone(), - ) - }), + args, *ellipsis, + subst.iter(Interner).enumerate().map(|(i, x)| { + (PlaceElem::TupleField(i), x.assert_ty_ref(Interner).clone()) + }), &cond_place, binding_mode, )? @@ -1062,7 +1092,21 @@ impl MirLowerCtx<'_> { } (then_target, current_else) } - Pat::Record { .. } => not_supported!("record pattern"), + Pat::Record { args, .. } => { + let Some(variant) = self.infer.variant_resolution_for_pat(pattern) else { + not_supported!("unresolved variant"); + }; + self.pattern_matching_variant( + cond_ty, + binding_mode, + cond_place, + variant, + current, + pattern.into(), + current_else, + AdtPatternShape::Record { args: &*args }, + )? + } Pat::Range { .. } => not_supported!("range pattern"), Pat::Slice { .. } => not_supported!("slice pattern"), Pat::Path(_) => { @@ -1077,8 +1121,7 @@ impl MirLowerCtx<'_> { current, pattern.into(), current_else, - &[], - &None, + AdtPatternShape::Unit, )? } Pat::Lit(l) => { @@ -1160,8 +1203,7 @@ impl MirLowerCtx<'_> { current, pattern.into(), current_else, - args, - ellipsis, + AdtPatternShape::Tuple { args, ellipsis: *ellipsis }, )? } Pat::Ref { .. } => not_supported!("& pattern"), @@ -1179,15 +1221,13 @@ impl MirLowerCtx<'_> { current: BasicBlockId, span: MirSpan, current_else: Option, - args: &[PatId], - ellipsis: &Option, + shape: AdtPatternShape<'_>, ) -> Result<(BasicBlockId, Option)> { pattern_matching_dereference(&mut cond_ty, &mut binding_mode, &mut cond_place); let subst = match cond_ty.kind(Interner) { TyKind::Adt(_, s) => s, _ => return Err(MirLowerError::TypeError("non adt type matched with tuple struct")), }; - let fields_type = self.db.field_types(variant); Ok(match variant { VariantId::EnumVariantId(v) => { let e = self.db.const_eval_discriminant(v)? as u128; @@ -1208,35 +1248,26 @@ impl MirLowerCtx<'_> { }, ); let enum_data = self.db.enum_data(v.parent); - let fields = - enum_data.variants[v.local_id].variant_data.fields().iter().map(|(x, _)| { - ( - PlaceElem::Field(FieldId { parent: v.into(), local_id: x }), - fields_type[x].clone().substitute(Interner, subst), - ) - }); - self.pattern_match_tuple_like( + self.pattern_matching_variant_fields( + shape, + &enum_data.variants[v.local_id].variant_data, + variant, + subst, next, Some(else_target), - args.iter().zip(fields).map(|(x, y)| (y.0, *x, y.1)), - *ellipsis, &cond_place, binding_mode, )? } VariantId::StructId(s) => { let struct_data = self.db.struct_data(s); - let fields = struct_data.variant_data.fields().iter().map(|(x, _)| { - ( - PlaceElem::Field(FieldId { parent: s.into(), local_id: x }), - fields_type[x].clone().substitute(Interner, subst), - ) - }); - self.pattern_match_tuple_like( + self.pattern_matching_variant_fields( + shape, + &struct_data.variant_data, + variant, + subst, current, current_else, - args.iter().zip(fields).map(|(x, y)| (y.0, *x, y.1)), - *ellipsis, &cond_place, binding_mode, )? @@ -1247,18 +1278,69 @@ impl MirLowerCtx<'_> { }) } - fn pattern_match_tuple_like( + fn pattern_matching_variant_fields( + &mut self, + shape: AdtPatternShape<'_>, + variant_data: &VariantData, + v: VariantId, + subst: &Substitution, + current: BasicBlockId, + current_else: Option, + cond_place: &Place, + binding_mode: BindingAnnotation, + ) -> Result<(BasicBlockId, Option)> { + let fields_type = self.db.field_types(v); + Ok(match shape { + AdtPatternShape::Record { args } => { + let it = args + .iter() + .map(|x| { + let field_id = + variant_data.field(&x.name).ok_or(MirLowerError::UnresolvedField)?; + Ok(( + PlaceElem::Field(FieldId { parent: v.into(), local_id: field_id }), + x.pat, + fields_type[field_id].clone().substitute(Interner, subst), + )) + }) + .collect::>>()?; + self.pattern_match_adt( + current, + current_else, + it.into_iter(), + cond_place, + binding_mode, + )? + } + AdtPatternShape::Tuple { args, ellipsis } => { + let fields = variant_data.fields().iter().map(|(x, _)| { + ( + PlaceElem::Field(FieldId { parent: v.into(), local_id: x }), + fields_type[x].clone().substitute(Interner, subst), + ) + }); + self.pattern_match_tuple_like( + current, + current_else, + args, + ellipsis, + fields, + cond_place, + binding_mode, + )? + } + AdtPatternShape::Unit => (current, current_else), + }) + } + + fn pattern_match_adt( &mut self, mut current: BasicBlockId, mut current_else: Option, args: impl Iterator, - ellipsis: Option, cond_place: &Place, binding_mode: BindingAnnotation, ) -> Result<(BasicBlockId, Option)> { - if ellipsis.is_some() { - not_supported!("tuple like pattern with ellipsis"); - } for (proj, arg, ty) in args { let mut cond_place = cond_place.clone(); cond_place.projection.push(proj); @@ -1268,6 +1350,25 @@ impl MirLowerCtx<'_> { Ok((current, current_else)) } + fn pattern_match_tuple_like( + &mut self, + current: BasicBlockId, + current_else: Option, + args: &[PatId], + ellipsis: Option, + fields: impl DoubleEndedIterator + Clone, + cond_place: &Place, + binding_mode: BindingAnnotation, + ) -> Result<(BasicBlockId, Option)> { + let (al, ar) = args.split_at(ellipsis.unwrap_or(args.len())); + let it = al + .iter() + .zip(fields.clone()) + .chain(ar.iter().rev().zip(fields.rev())) + .map(|(x, y)| (y.0, *x, y.1)); + self.pattern_match_adt(current, current_else, it, cond_place, binding_mode) + } + fn discr_temp_place(&mut self) -> Place { match &self.discr_temp { Some(x) => x.clone(), diff --git a/lib/la-arena/src/lib.rs b/lib/la-arena/src/lib.rs index ccaaf39917..f6597efd8f 100644 --- a/lib/la-arena/src/lib.rs +++ b/lib/la-arena/src/lib.rs @@ -295,7 +295,7 @@ impl Arena { /// ``` pub fn iter( &self, - ) -> impl Iterator, &T)> + ExactSizeIterator + DoubleEndedIterator { + ) -> impl Iterator, &T)> + ExactSizeIterator + DoubleEndedIterator + Clone { self.data.iter().enumerate().map(|(idx, value)| (Idx::from_raw(RawIdx(idx as u32)), value)) } From 9564773d5e6dff6d430594028383315ca2e202ef Mon Sep 17 00:00:00 2001 From: hkalbasi Date: Tue, 14 Mar 2023 23:01:46 +0330 Subject: [PATCH 1188/1945] Improve pattern matching MIR lowering --- crates/hir-def/src/body/lower.rs | 19 +- crates/hir-def/src/expr.rs | 10 + crates/hir-ty/src/consteval/tests.rs | 112 +++++ crates/hir-ty/src/mir/eval.rs | 10 +- crates/hir-ty/src/mir/lower.rs | 415 ++---------------- .../hir-ty/src/mir/lower/pattern_matching.rs | 399 +++++++++++++++++ crates/hir-ty/src/mir/pretty.rs | 14 +- crates/syntax/src/ast/generated/nodes.rs | 1 + crates/test-utils/src/minicore.rs | 5 +- 9 files changed, 590 insertions(+), 395 deletions(-) create mode 100644 crates/hir-ty/src/mir/lower/pattern_matching.rs diff --git a/crates/hir-def/src/body/lower.rs b/crates/hir-def/src/body/lower.rs index 348b7589ff..7cce23b531 100644 --- a/crates/hir-def/src/body/lower.rs +++ b/crates/hir-def/src/body/lower.rs @@ -1030,9 +1030,16 @@ impl ExprCollector<'_> { .collect(), } } - ast::Pat::LiteralPat(lit) => { + ast::Pat::LiteralPat(lit) => 'b: { if let Some(ast_lit) = lit.literal() { - let expr = Expr::Literal(ast_lit.kind().into()); + let mut hir_lit: Literal = ast_lit.kind().into(); + if lit.minus_token().is_some() { + let Some(h) = hir_lit.negate() else { + break 'b Pat::Missing; + }; + hir_lit = h; + } + let expr = Expr::Literal(hir_lit); let expr_ptr = AstPtr::new(&ast::Expr::Literal(ast_lit)); let expr_id = self.alloc_expr(expr, expr_ptr); Pat::Lit(expr_id) @@ -1144,11 +1151,11 @@ impl From for Literal { FloatTypeWrapper::new(lit.float_value().unwrap_or(Default::default())), builtin, ) - } else if let builtin @ Some(_) = lit.suffix().and_then(BuiltinInt::from_suffix) { - Literal::Int(lit.value().unwrap_or(0) as i128, builtin) - } else { - let builtin = lit.suffix().and_then(BuiltinUint::from_suffix); + } else if let builtin @ Some(_) = lit.suffix().and_then(BuiltinUint::from_suffix) { Literal::Uint(lit.value().unwrap_or(0), builtin) + } else { + let builtin = lit.suffix().and_then(BuiltinInt::from_suffix); + Literal::Int(lit.value().unwrap_or(0) as i128, builtin) } } LiteralKind::FloatNumber(lit) => { diff --git a/crates/hir-def/src/expr.rs b/crates/hir-def/src/expr.rs index 5b87582243..8b1528f81e 100644 --- a/crates/hir-def/src/expr.rs +++ b/crates/hir-def/src/expr.rs @@ -92,6 +92,16 @@ pub enum Literal { Float(FloatTypeWrapper, Option), } +impl Literal { + pub fn negate(self) -> Option { + if let Literal::Int(i, k) = self { + Some(Literal::Int(-i, k)) + } else { + None + } + } +} + #[derive(Debug, Clone, Eq, PartialEq)] pub enum Expr { /// This is produced if the syntax tree does not have a required expression piece. diff --git a/crates/hir-ty/src/consteval/tests.rs b/crates/hir-ty/src/consteval/tests.rs index 1d298f9609..ecc163a415 100644 --- a/crates/hir-ty/src/consteval/tests.rs +++ b/crates/hir-ty/src/consteval/tests.rs @@ -685,6 +685,36 @@ fn path_pattern_matching() { ); } +#[test] +fn pattern_matching_literal() { + check_number( + r#" + const fn f(x: i32) -> i32 { + match x { + -1 => 1, + 1 => 10, + _ => 100, + } + } + const GOAL: i32 = f(-1) + f(1) + f(0) + f(-5); + "#, + 211 + ); + check_number( + r#" + const fn f(x: &str) -> u8 { + match x { + "foo" => 1, + "bar" => 10, + _ => 100, + } + } + const GOAL: u8 = f("foo") + f("bar"); + "#, + 11 + ); +} + #[test] fn pattern_matching_ergonomics() { check_number( @@ -698,6 +728,16 @@ fn pattern_matching_ergonomics() { "#, 5, ); + check_number( + r#" + const GOAL: u8 = { + let a = &(2, 3); + let &(x, y) = a; + x + y + }; + "#, + 5, + ); } #[test] @@ -781,6 +821,33 @@ fn function_param_patterns() { ); } +#[test] +fn match_guards() { + check_number( + r#" + //- minicore: option, eq + impl PartialEq for Option { + fn eq(&self, other: &Rhs) -> bool { + match (self, other) { + (Some(x), Some(y)) => x == y, + (None, None) => true, + _ => false, + } + } + } + fn f(x: Option) -> i32 { + match x { + y if y == Some(42) => 42000, + Some(y) => y, + None => 10 + } + } + const GOAL: i32 = f(Some(42)) + f(Some(2)) + f(None); + "#, + 42012, + ); +} + #[test] fn options() { check_number( @@ -983,6 +1050,51 @@ fn function_pointer() { ); } +#[test] +fn enum_variant_as_function() { + check_number( + r#" + //- minicore: option + const GOAL: u8 = { + let f = Some; + f(3).unwrap_or(2) + }; + "#, + 3, + ); + check_number( + r#" + //- minicore: option + const GOAL: u8 = { + let f: fn(u8) -> Option = Some; + f(3).unwrap_or(2) + }; + "#, + 3, + ); + check_number( + r#" + //- minicore: coerce_unsized, index, slice + enum Foo { + Add2(u8), + Mult3(u8), + } + use Foo::*; + const fn f(x: Foo) -> u8 { + match x { + Add2(x) => x + 2, + Mult3(x) => x * 3, + } + } + const GOAL: u8 = { + let x = [Add2, Mult3]; + f(x[0](1)) + f(x[1](5)) + }; + "#, + 18, + ); +} + #[test] fn function_traits() { check_number( diff --git a/crates/hir-ty/src/mir/eval.rs b/crates/hir-ty/src/mir/eval.rs index 787665bb63..450cd5404e 100644 --- a/crates/hir-ty/src/mir/eval.rs +++ b/crates/hir-ty/src/mir/eval.rs @@ -423,6 +423,7 @@ impl Evaluator<'_> { args: impl Iterator>, subst: Substitution, ) -> Result> { + dbg!(body.dbg(self.db)); if let Some(x) = self.stack_depth_limit.checked_sub(1) { self.stack_depth_limit = x; } else { @@ -581,7 +582,14 @@ impl Evaluator<'_> { let mut ty = self.operand_ty(lhs, locals)?; while let TyKind::Ref(_, _, z) = ty.kind(Interner) { ty = z.clone(); - let size = self.size_of_sized(&ty, locals, "operand of binary op")?; + let size = if ty.kind(Interner) == &TyKind::Str { + let ns = from_bytes!(usize, &lc[self.ptr_size()..self.ptr_size() * 2]); + lc = &lc[..self.ptr_size()]; + rc = &rc[..self.ptr_size()]; + ns + } else { + self.size_of_sized(&ty, locals, "operand of binary op")? + }; lc = self.read_memory(Address::from_bytes(lc)?, size)?; rc = self.read_memory(Address::from_bytes(rc)?, size)?; } diff --git a/crates/hir-ty/src/mir/lower.rs b/crates/hir-ty/src/mir/lower.rs index d36d9e946c..4da0f87609 100644 --- a/crates/hir-ty/src/mir/lower.rs +++ b/crates/hir-ty/src/mir/lower.rs @@ -4,7 +4,7 @@ use std::{iter, mem, sync::Arc}; use chalk_ir::{BoundVar, ConstData, DebruijnIndex, TyKind}; use hir_def::{ - adt::VariantData, + adt::{VariantData, StructKind}, body::Body, expr::{ Array, BindingAnnotation, BindingId, ExprId, LabelId, Literal, MatchArm, Pat, PatId, @@ -28,6 +28,9 @@ use crate::{ use super::*; mod as_place; +mod pattern_matching; + +use pattern_matching::AdtPatternShape; #[derive(Debug, Clone, Copy)] struct LoopBlocks { @@ -107,12 +110,6 @@ impl MirLowerError { type Result = std::result::Result; -enum AdtPatternShape<'a> { - Tuple { args: &'a [PatId], ellipsis: Option }, - Record { args: &'a [RecordFieldPat] }, - Unit, -} - impl MirLowerCtx<'_> { fn temp(&mut self, ty: Ty) -> Result { if matches!(ty.kind(Interner), TyKind::Slice(_) | TyKind::Dyn(_)) { @@ -275,15 +272,19 @@ impl MirLowerCtx<'_> { Ok(Some(current)) } ValueNs::EnumVariantId(variant_id) => { - let ty = self.infer.type_of_expr[expr_id].clone(); - let current = self.lower_enum_variant( - variant_id, - current, - place, - ty, - vec![], - expr_id.into(), - )?; + let variant_data = &self.db.enum_data(variant_id.parent).variants[variant_id.local_id]; + if variant_data.variant_data.kind() == StructKind::Unit { + let ty = self.infer.type_of_expr[expr_id].clone(); + current = self.lower_enum_variant( + variant_id, + current, + place, + ty, + vec![], + expr_id.into(), + )?; + } + // Otherwise its a tuple like enum, treated like a zero sized function, so no action is needed Ok(Some(current)) } ValueNs::GenericParam(p) => { @@ -517,10 +518,7 @@ impl MirLowerCtx<'_> { let cond_ty = self.expr_ty_after_adjustments(*expr); let mut end = None; for MatchArm { pat, guard, expr } in arms.iter() { - if guard.is_some() { - not_supported!("pattern matching with guard"); - } - let (then, otherwise) = self.pattern_match( + let (then, mut otherwise) = self.pattern_match( current, None, cond_place.clone(), @@ -528,6 +526,16 @@ impl MirLowerCtx<'_> { *pat, BindingAnnotation::Unannotated, )?; + let then = if let &Some(guard) = guard { + let next = self.new_basic_block(); + let o = otherwise.get_or_insert_with(|| self.new_basic_block()); + if let Some((discr, c)) = self.lower_expr_to_some_operand(guard, then)? { + self.set_terminator(c, Terminator::SwitchInt { discr, targets: SwitchTargets::static_if(1, next, *o) }); + } + next + } else { + then + }; if let Some(block) = self.lower_expr_to_place(*expr, place.clone(), then)? { let r = end.get_or_insert_with(|| self.new_basic_block()); self.set_goto(block, *r); @@ -922,7 +930,7 @@ impl MirLowerCtx<'_> { ) -> Result { let subst = match ty.kind(Interner) { TyKind::Adt(_, subst) => subst.clone(), - _ => not_supported!("Non ADT enum"), + _ => implementation_error!("Non ADT enum"), }; self.push_assignment( prev_block, @@ -1020,355 +1028,6 @@ impl MirLowerCtx<'_> { self.push_statement(block, StatementKind::Assign(place, rvalue).with_span(span)); } - /// It gets a `current` unterminated block, appends some statements and possibly a terminator to it to check if - /// the pattern matches and write bindings, and returns two unterminated blocks, one for the matched path (which - /// can be the `current` block) and one for the mismatched path. If the input pattern is irrefutable, the - /// mismatched path block is `None`. - /// - /// By default, it will create a new block for mismatched path. If you already have one, you can provide it with - /// `current_else` argument to save an unneccessary jump. If `current_else` isn't `None`, the result mismatched path - /// wouldn't be `None` as well. Note that this function will add jumps to the beginning of the `current_else` block, - /// so it should be an empty block. - fn pattern_match( - &mut self, - mut current: BasicBlockId, - mut current_else: Option, - mut cond_place: Place, - mut cond_ty: Ty, - pattern: PatId, - mut binding_mode: BindingAnnotation, - ) -> Result<(BasicBlockId, Option)> { - Ok(match &self.body.pats[pattern] { - Pat::Missing => return Err(MirLowerError::IncompleteExpr), - Pat::Wild => (current, current_else), - Pat::Tuple { args, ellipsis } => { - pattern_matching_dereference(&mut cond_ty, &mut binding_mode, &mut cond_place); - let subst = match cond_ty.kind(Interner) { - TyKind::Tuple(_, s) => s, - _ => { - return Err(MirLowerError::TypeError( - "non tuple type matched with tuple pattern", - )) - } - }; - self.pattern_match_tuple_like( - current, - current_else, - args, - *ellipsis, - subst.iter(Interner).enumerate().map(|(i, x)| { - (PlaceElem::TupleField(i), x.assert_ty_ref(Interner).clone()) - }), - &cond_place, - binding_mode, - )? - } - Pat::Or(pats) => { - let then_target = self.new_basic_block(); - let mut finished = false; - for pat in &**pats { - let (next, next_else) = self.pattern_match( - current, - None, - cond_place.clone(), - cond_ty.clone(), - *pat, - binding_mode, - )?; - self.set_goto(next, then_target); - match next_else { - Some(t) => { - current = t; - } - None => { - finished = true; - break; - } - } - } - if !finished { - let ce = *current_else.get_or_insert_with(|| self.new_basic_block()); - self.set_goto(current, ce); - } - (then_target, current_else) - } - Pat::Record { args, .. } => { - let Some(variant) = self.infer.variant_resolution_for_pat(pattern) else { - not_supported!("unresolved variant"); - }; - self.pattern_matching_variant( - cond_ty, - binding_mode, - cond_place, - variant, - current, - pattern.into(), - current_else, - AdtPatternShape::Record { args: &*args }, - )? - } - Pat::Range { .. } => not_supported!("range pattern"), - Pat::Slice { .. } => not_supported!("slice pattern"), - Pat::Path(_) => { - let Some(variant) = self.infer.variant_resolution_for_pat(pattern) else { - not_supported!("unresolved variant"); - }; - self.pattern_matching_variant( - cond_ty, - binding_mode, - cond_place, - variant, - current, - pattern.into(), - current_else, - AdtPatternShape::Unit, - )? - } - Pat::Lit(l) => { - let then_target = self.new_basic_block(); - let else_target = current_else.unwrap_or_else(|| self.new_basic_block()); - match &self.body.exprs[*l] { - Expr::Literal(l) => match l { - hir_def::expr::Literal::Int(x, _) => { - self.set_terminator( - current, - Terminator::SwitchInt { - discr: Operand::Copy(cond_place), - targets: SwitchTargets::static_if( - *x as u128, - then_target, - else_target, - ), - }, - ); - } - hir_def::expr::Literal::Uint(x, _) => { - self.set_terminator( - current, - Terminator::SwitchInt { - discr: Operand::Copy(cond_place), - targets: SwitchTargets::static_if(*x, then_target, else_target), - }, - ); - } - _ => not_supported!("non int path literal"), - }, - _ => not_supported!("expression path literal"), - } - (then_target, Some(else_target)) - } - Pat::Bind { id, subpat } => { - let target_place = self.result.binding_locals[*id]; - let mode = self.body.bindings[*id].mode; - if let Some(subpat) = subpat { - (current, current_else) = self.pattern_match( - current, - current_else, - cond_place.clone(), - cond_ty, - *subpat, - binding_mode, - )? - } - if matches!(mode, BindingAnnotation::Ref | BindingAnnotation::RefMut) { - binding_mode = mode; - } - self.push_storage_live(*id, current); - self.push_assignment( - current, - target_place.into(), - match binding_mode { - BindingAnnotation::Unannotated | BindingAnnotation::Mutable => { - Operand::Copy(cond_place).into() - } - BindingAnnotation::Ref => Rvalue::Ref(BorrowKind::Shared, cond_place), - BindingAnnotation::RefMut => Rvalue::Ref( - BorrowKind::Mut { allow_two_phase_borrow: false }, - cond_place, - ), - }, - pattern.into(), - ); - (current, current_else) - } - Pat::TupleStruct { path: _, args, ellipsis } => { - let Some(variant) = self.infer.variant_resolution_for_pat(pattern) else { - not_supported!("unresolved variant"); - }; - self.pattern_matching_variant( - cond_ty, - binding_mode, - cond_place, - variant, - current, - pattern.into(), - current_else, - AdtPatternShape::Tuple { args, ellipsis: *ellipsis }, - )? - } - Pat::Ref { .. } => not_supported!("& pattern"), - Pat::Box { .. } => not_supported!("box pattern"), - Pat::ConstBlock(_) => not_supported!("const block pattern"), - }) - } - - fn pattern_matching_variant( - &mut self, - mut cond_ty: Ty, - mut binding_mode: BindingAnnotation, - mut cond_place: Place, - variant: VariantId, - current: BasicBlockId, - span: MirSpan, - current_else: Option, - shape: AdtPatternShape<'_>, - ) -> Result<(BasicBlockId, Option)> { - pattern_matching_dereference(&mut cond_ty, &mut binding_mode, &mut cond_place); - let subst = match cond_ty.kind(Interner) { - TyKind::Adt(_, s) => s, - _ => return Err(MirLowerError::TypeError("non adt type matched with tuple struct")), - }; - Ok(match variant { - VariantId::EnumVariantId(v) => { - let e = self.db.const_eval_discriminant(v)? as u128; - let next = self.new_basic_block(); - let tmp = self.discr_temp_place(); - self.push_assignment( - current, - tmp.clone(), - Rvalue::Discriminant(cond_place.clone()), - span, - ); - let else_target = current_else.unwrap_or_else(|| self.new_basic_block()); - self.set_terminator( - current, - Terminator::SwitchInt { - discr: Operand::Copy(tmp), - targets: SwitchTargets::static_if(e, next, else_target), - }, - ); - let enum_data = self.db.enum_data(v.parent); - self.pattern_matching_variant_fields( - shape, - &enum_data.variants[v.local_id].variant_data, - variant, - subst, - next, - Some(else_target), - &cond_place, - binding_mode, - )? - } - VariantId::StructId(s) => { - let struct_data = self.db.struct_data(s); - self.pattern_matching_variant_fields( - shape, - &struct_data.variant_data, - variant, - subst, - current, - current_else, - &cond_place, - binding_mode, - )? - } - VariantId::UnionId(_) => { - return Err(MirLowerError::TypeError("pattern matching on union")) - } - }) - } - - fn pattern_matching_variant_fields( - &mut self, - shape: AdtPatternShape<'_>, - variant_data: &VariantData, - v: VariantId, - subst: &Substitution, - current: BasicBlockId, - current_else: Option, - cond_place: &Place, - binding_mode: BindingAnnotation, - ) -> Result<(BasicBlockId, Option)> { - let fields_type = self.db.field_types(v); - Ok(match shape { - AdtPatternShape::Record { args } => { - let it = args - .iter() - .map(|x| { - let field_id = - variant_data.field(&x.name).ok_or(MirLowerError::UnresolvedField)?; - Ok(( - PlaceElem::Field(FieldId { parent: v.into(), local_id: field_id }), - x.pat, - fields_type[field_id].clone().substitute(Interner, subst), - )) - }) - .collect::>>()?; - self.pattern_match_adt( - current, - current_else, - it.into_iter(), - cond_place, - binding_mode, - )? - } - AdtPatternShape::Tuple { args, ellipsis } => { - let fields = variant_data.fields().iter().map(|(x, _)| { - ( - PlaceElem::Field(FieldId { parent: v.into(), local_id: x }), - fields_type[x].clone().substitute(Interner, subst), - ) - }); - self.pattern_match_tuple_like( - current, - current_else, - args, - ellipsis, - fields, - cond_place, - binding_mode, - )? - } - AdtPatternShape::Unit => (current, current_else), - }) - } - - fn pattern_match_adt( - &mut self, - mut current: BasicBlockId, - mut current_else: Option, - args: impl Iterator, - cond_place: &Place, - binding_mode: BindingAnnotation, - ) -> Result<(BasicBlockId, Option)> { - for (proj, arg, ty) in args { - let mut cond_place = cond_place.clone(); - cond_place.projection.push(proj); - (current, current_else) = - self.pattern_match(current, current_else, cond_place, ty, arg, binding_mode)?; - } - Ok((current, current_else)) - } - - fn pattern_match_tuple_like( - &mut self, - current: BasicBlockId, - current_else: Option, - args: &[PatId], - ellipsis: Option, - fields: impl DoubleEndedIterator + Clone, - cond_place: &Place, - binding_mode: BindingAnnotation, - ) -> Result<(BasicBlockId, Option)> { - let (al, ar) = args.split_at(ellipsis.unwrap_or(args.len())); - let it = al - .iter() - .zip(fields.clone()) - .chain(ar.iter().rev().zip(fields.rev())) - .map(|(x, y)| (y.0, *x, y.1)); - self.pattern_match_adt(current, current_else, it, cond_place, binding_mode) - } - fn discr_temp_place(&mut self) -> Place { match &self.discr_temp { Some(x) => x.clone(), @@ -1546,22 +1205,6 @@ impl MirLowerCtx<'_> { } } -fn pattern_matching_dereference( - cond_ty: &mut Ty, - binding_mode: &mut BindingAnnotation, - cond_place: &mut Place, -) { - while let Some((ty, _, mu)) = cond_ty.as_reference() { - if mu == Mutability::Mut && *binding_mode != BindingAnnotation::Ref { - *binding_mode = BindingAnnotation::RefMut; - } else { - *binding_mode = BindingAnnotation::Ref; - } - *cond_ty = ty.clone(); - cond_place.projection.push(ProjectionElem::Deref); - } -} - fn cast_kind(source_ty: &Ty, target_ty: &Ty) -> Result { Ok(match (source_ty.kind(Interner), target_ty.kind(Interner)) { (TyKind::Scalar(s), TyKind::Scalar(t)) => match (s, t) { diff --git a/crates/hir-ty/src/mir/lower/pattern_matching.rs b/crates/hir-ty/src/mir/lower/pattern_matching.rs new file mode 100644 index 0000000000..c3ced82aab --- /dev/null +++ b/crates/hir-ty/src/mir/lower/pattern_matching.rs @@ -0,0 +1,399 @@ +//! MIR lowering for patterns + +use super::*; + +macro_rules! not_supported { + ($x: expr) => { + return Err(MirLowerError::NotSupported(format!($x))) + }; +} + +pub(super) enum AdtPatternShape<'a> { + Tuple { args: &'a [PatId], ellipsis: Option }, + Record { args: &'a [RecordFieldPat] }, + Unit, +} + +impl MirLowerCtx<'_> { + /// It gets a `current` unterminated block, appends some statements and possibly a terminator to it to check if + /// the pattern matches and write bindings, and returns two unterminated blocks, one for the matched path (which + /// can be the `current` block) and one for the mismatched path. If the input pattern is irrefutable, the + /// mismatched path block is `None`. + /// + /// By default, it will create a new block for mismatched path. If you already have one, you can provide it with + /// `current_else` argument to save an unneccessary jump. If `current_else` isn't `None`, the result mismatched path + /// wouldn't be `None` as well. Note that this function will add jumps to the beginning of the `current_else` block, + /// so it should be an empty block. + pub(super) fn pattern_match( + &mut self, + mut current: BasicBlockId, + mut current_else: Option, + mut cond_place: Place, + mut cond_ty: Ty, + pattern: PatId, + mut binding_mode: BindingAnnotation, + ) -> Result<(BasicBlockId, Option)> { + Ok(match &self.body.pats[pattern] { + Pat::Missing => return Err(MirLowerError::IncompleteExpr), + Pat::Wild => (current, current_else), + Pat::Tuple { args, ellipsis } => { + pattern_matching_dereference(&mut cond_ty, &mut binding_mode, &mut cond_place); + let subst = match cond_ty.kind(Interner) { + TyKind::Tuple(_, s) => s, + _ => { + return Err(MirLowerError::TypeError( + "non tuple type matched with tuple pattern", + )) + } + }; + self.pattern_match_tuple_like( + current, + current_else, + args, + *ellipsis, + subst.iter(Interner).enumerate().map(|(i, x)| { + (PlaceElem::TupleField(i), x.assert_ty_ref(Interner).clone()) + }), + &cond_place, + binding_mode, + )? + } + Pat::Or(pats) => { + let then_target = self.new_basic_block(); + let mut finished = false; + for pat in &**pats { + let (next, next_else) = self.pattern_match( + current, + None, + cond_place.clone(), + cond_ty.clone(), + *pat, + binding_mode, + )?; + self.set_goto(next, then_target); + match next_else { + Some(t) => { + current = t; + } + None => { + finished = true; + break; + } + } + } + if !finished { + let ce = *current_else.get_or_insert_with(|| self.new_basic_block()); + self.set_goto(current, ce); + } + (then_target, current_else) + } + Pat::Record { args, .. } => { + let Some(variant) = self.infer.variant_resolution_for_pat(pattern) else { + not_supported!("unresolved variant"); + }; + self.pattern_matching_variant( + cond_ty, + binding_mode, + cond_place, + variant, + current, + pattern.into(), + current_else, + AdtPatternShape::Record { args: &*args }, + )? + } + Pat::Range { .. } => not_supported!("range pattern"), + Pat::Slice { .. } => not_supported!("slice pattern"), + Pat::Path(_) => { + let Some(variant) = self.infer.variant_resolution_for_pat(pattern) else { + not_supported!("unresolved variant"); + }; + self.pattern_matching_variant( + cond_ty, + binding_mode, + cond_place, + variant, + current, + pattern.into(), + current_else, + AdtPatternShape::Unit, + )? + } + Pat::Lit(l) => match &self.body.exprs[*l] { + Expr::Literal(l) => { + let c = self.lower_literal_to_operand(cond_ty, l)?; + self.pattern_match_const(current_else, current, c, cond_place, pattern)? + } + _ => not_supported!("expression path literal"), + }, + Pat::Bind { id, subpat } => { + let target_place = self.result.binding_locals[*id]; + let mode = self.body.bindings[*id].mode; + if let Some(subpat) = subpat { + (current, current_else) = self.pattern_match( + current, + current_else, + cond_place.clone(), + cond_ty, + *subpat, + binding_mode, + )? + } + if matches!(mode, BindingAnnotation::Ref | BindingAnnotation::RefMut) { + binding_mode = mode; + } + self.push_storage_live(*id, current); + self.push_assignment( + current, + target_place.into(), + match binding_mode { + BindingAnnotation::Unannotated | BindingAnnotation::Mutable => { + Operand::Copy(cond_place).into() + } + BindingAnnotation::Ref => Rvalue::Ref(BorrowKind::Shared, cond_place), + BindingAnnotation::RefMut => Rvalue::Ref( + BorrowKind::Mut { allow_two_phase_borrow: false }, + cond_place, + ), + }, + pattern.into(), + ); + (current, current_else) + } + Pat::TupleStruct { path: _, args, ellipsis } => { + let Some(variant) = self.infer.variant_resolution_for_pat(pattern) else { + not_supported!("unresolved variant"); + }; + self.pattern_matching_variant( + cond_ty, + binding_mode, + cond_place, + variant, + current, + pattern.into(), + current_else, + AdtPatternShape::Tuple { args, ellipsis: *ellipsis }, + )? + } + Pat::Ref { pat, mutability: _ } => { + if let Some((ty, _, _)) = cond_ty.as_reference() { + cond_ty = ty.clone(); + cond_place.projection.push(ProjectionElem::Deref); + self.pattern_match( + current, + current_else, + cond_place, + cond_ty, + *pat, + binding_mode, + )? + } else { + return Err(MirLowerError::TypeError("& pattern for non reference")); + } + } + Pat::Box { .. } => not_supported!("box pattern"), + Pat::ConstBlock(_) => not_supported!("const block pattern"), + }) + } + + fn pattern_match_const( + &mut self, + current_else: Option, + current: BasicBlockId, + c: Operand, + cond_place: Place, + pattern: Idx, + ) -> Result<(BasicBlockId, Option)> { + let then_target = self.new_basic_block(); + let else_target = current_else.unwrap_or_else(|| self.new_basic_block()); + let discr: Place = self.temp(TyBuilder::bool())?.into(); + self.push_assignment( + current, + discr.clone(), + Rvalue::CheckedBinaryOp(BinOp::Eq, c, Operand::Copy(cond_place)), + pattern.into(), + ); + let discr = Operand::Copy(discr); + self.set_terminator( + current, + Terminator::SwitchInt { + discr, + targets: SwitchTargets::static_if(1, then_target, else_target), + }, + ); + Ok((then_target, Some(else_target))) + } + + pub(super) fn pattern_matching_variant( + &mut self, + mut cond_ty: Ty, + mut binding_mode: BindingAnnotation, + mut cond_place: Place, + variant: VariantId, + current: BasicBlockId, + span: MirSpan, + current_else: Option, + shape: AdtPatternShape<'_>, + ) -> Result<(BasicBlockId, Option)> { + pattern_matching_dereference(&mut cond_ty, &mut binding_mode, &mut cond_place); + let subst = match cond_ty.kind(Interner) { + TyKind::Adt(_, s) => s, + _ => return Err(MirLowerError::TypeError("non adt type matched with tuple struct")), + }; + Ok(match variant { + VariantId::EnumVariantId(v) => { + let e = self.db.const_eval_discriminant(v)? as u128; + let next = self.new_basic_block(); + let tmp = self.discr_temp_place(); + self.push_assignment( + current, + tmp.clone(), + Rvalue::Discriminant(cond_place.clone()), + span, + ); + let else_target = current_else.unwrap_or_else(|| self.new_basic_block()); + self.set_terminator( + current, + Terminator::SwitchInt { + discr: Operand::Copy(tmp), + targets: SwitchTargets::static_if(e, next, else_target), + }, + ); + let enum_data = self.db.enum_data(v.parent); + self.pattern_matching_variant_fields( + shape, + &enum_data.variants[v.local_id].variant_data, + variant, + subst, + next, + Some(else_target), + &cond_place, + binding_mode, + )? + } + VariantId::StructId(s) => { + let struct_data = self.db.struct_data(s); + self.pattern_matching_variant_fields( + shape, + &struct_data.variant_data, + variant, + subst, + current, + current_else, + &cond_place, + binding_mode, + )? + } + VariantId::UnionId(_) => { + return Err(MirLowerError::TypeError("pattern matching on union")) + } + }) + } + + fn pattern_matching_variant_fields( + &mut self, + shape: AdtPatternShape<'_>, + variant_data: &VariantData, + v: VariantId, + subst: &Substitution, + current: BasicBlockId, + current_else: Option, + cond_place: &Place, + binding_mode: BindingAnnotation, + ) -> Result<(BasicBlockId, Option)> { + let fields_type = self.db.field_types(v); + Ok(match shape { + AdtPatternShape::Record { args } => { + let it = args + .iter() + .map(|x| { + let field_id = + variant_data.field(&x.name).ok_or(MirLowerError::UnresolvedField)?; + Ok(( + PlaceElem::Field(FieldId { parent: v.into(), local_id: field_id }), + x.pat, + fields_type[field_id].clone().substitute(Interner, subst), + )) + }) + .collect::>>()?; + self.pattern_match_adt( + current, + current_else, + it.into_iter(), + cond_place, + binding_mode, + )? + } + AdtPatternShape::Tuple { args, ellipsis } => { + let fields = variant_data.fields().iter().map(|(x, _)| { + ( + PlaceElem::Field(FieldId { parent: v.into(), local_id: x }), + fields_type[x].clone().substitute(Interner, subst), + ) + }); + self.pattern_match_tuple_like( + current, + current_else, + args, + ellipsis, + fields, + cond_place, + binding_mode, + )? + } + AdtPatternShape::Unit => (current, current_else), + }) + } + + fn pattern_match_adt( + &mut self, + mut current: BasicBlockId, + mut current_else: Option, + args: impl Iterator, + cond_place: &Place, + binding_mode: BindingAnnotation, + ) -> Result<(BasicBlockId, Option)> { + for (proj, arg, ty) in args { + let mut cond_place = cond_place.clone(); + cond_place.projection.push(proj); + (current, current_else) = + self.pattern_match(current, current_else, cond_place, ty, arg, binding_mode)?; + } + Ok((current, current_else)) + } + + fn pattern_match_tuple_like( + &mut self, + current: BasicBlockId, + current_else: Option, + args: &[PatId], + ellipsis: Option, + fields: impl DoubleEndedIterator + Clone, + cond_place: &Place, + binding_mode: BindingAnnotation, + ) -> Result<(BasicBlockId, Option)> { + let (al, ar) = args.split_at(ellipsis.unwrap_or(args.len())); + let it = al + .iter() + .zip(fields.clone()) + .chain(ar.iter().rev().zip(fields.rev())) + .map(|(x, y)| (y.0, *x, y.1)); + self.pattern_match_adt(current, current_else, it, cond_place, binding_mode) + } +} + +fn pattern_matching_dereference( + cond_ty: &mut Ty, + binding_mode: &mut BindingAnnotation, + cond_place: &mut Place, +) { + while let Some((ty, _, mu)) = cond_ty.as_reference() { + if mu == Mutability::Mut && *binding_mode != BindingAnnotation::Ref { + *binding_mode = BindingAnnotation::RefMut; + } else { + *binding_mode = BindingAnnotation::Ref; + } + *cond_ty = ty.clone(); + cond_place.projection.push(ProjectionElem::Deref); + } +} diff --git a/crates/hir-ty/src/mir/pretty.rs b/crates/hir-ty/src/mir/pretty.rs index eb0002266d..ab3bb643e7 100644 --- a/crates/hir-ty/src/mir/pretty.rs +++ b/crates/hir-ty/src/mir/pretty.rs @@ -1,6 +1,6 @@ //! A pretty-printer for MIR. -use std::fmt::{Display, Write}; +use std::fmt::{Display, Write, Debug}; use hir_def::{body::Body, expr::BindingId}; use hir_expand::name::Name; @@ -23,6 +23,18 @@ impl MirBody { ctx.for_body(); ctx.result } + + // String with lines is rendered poorly in `dbg!` macros, which I use very much, so this + // function exists to solve that. + pub fn dbg(&self, db: &dyn HirDatabase) -> impl Debug { + struct StringDbg(String); + impl Debug for StringDbg { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.write_str(&self.0) + } + } + StringDbg(self.pretty_print(db)) + } } struct MirPrettyCtx<'a> { diff --git a/crates/syntax/src/ast/generated/nodes.rs b/crates/syntax/src/ast/generated/nodes.rs index fe32484536..a2124f3ac1 100644 --- a/crates/syntax/src/ast/generated/nodes.rs +++ b/crates/syntax/src/ast/generated/nodes.rs @@ -1376,6 +1376,7 @@ pub struct LiteralPat { } impl LiteralPat { pub fn literal(&self) -> Option { support::child(&self.syntax) } + pub fn minus_token(&self) -> Option { support::token(&self.syntax, T![-]) } } #[derive(Debug, Clone, PartialEq, Eq, Hash)] diff --git a/crates/test-utils/src/minicore.rs b/crates/test-utils/src/minicore.rs index 8dd9f306c8..bf592c1f7b 100644 --- a/crates/test-utils/src/minicore.rs +++ b/crates/test-utils/src/minicore.rs @@ -597,7 +597,10 @@ pub mod option { loop {} } pub fn unwrap_or(self, default: T) -> T { - loop {} + match self { + Some(val) => val, + None => default, + } } // region:fn pub fn and_then(self, f: F) -> Option From eb4939e217960ee77d79ec436a39f3cead646de4 Mon Sep 17 00:00:00 2001 From: hkalbasi Date: Fri, 17 Mar 2023 14:02:55 +0330 Subject: [PATCH 1189/1945] Support overloaded deref MIR lowering --- crates/hir-def/src/body/lower.rs | 42 ++-- crates/hir-ty/src/consteval/tests.rs | 27 ++- crates/hir-ty/src/infer.rs | 3 + crates/hir-ty/src/infer/expr.rs | 47 ++++- crates/hir-ty/src/infer/mutability.rs | 182 ++++++++++++++++++ crates/hir-ty/src/mir/eval.rs | 33 ++-- crates/hir-ty/src/mir/lower.rs | 2 +- crates/hir-ty/src/mir/lower/as_place.rs | 26 ++- crates/hir-ty/src/mir/pretty.rs | 4 +- crates/hir-ty/src/tests/coercion.rs | 3 +- crates/hir-ty/src/tests/method_resolution.rs | 3 +- .../src/handlers/mutability_errors.rs | 31 ++- crates/ide/src/inlay_hints/chaining.rs | 12 +- crates/ide/src/syntax_highlighting/tests.rs | 2 +- crates/syntax/rust.ungram | 2 +- crates/syntax/src/ast/generated/nodes.rs | 2 +- crates/syntax/src/tests/sourcegen_ast.rs | 1 + crates/test-utils/src/minicore.rs | 62 ++++++ 18 files changed, 398 insertions(+), 86 deletions(-) create mode 100644 crates/hir-ty/src/infer/mutability.rs diff --git a/crates/hir-def/src/body/lower.rs b/crates/hir-def/src/body/lower.rs index 7cce23b531..448821d384 100644 --- a/crates/hir-def/src/body/lower.rs +++ b/crates/hir-def/src/body/lower.rs @@ -609,21 +609,12 @@ impl ExprCollector<'_> { fn collect_try_operator(&mut self, syntax_ptr: AstPtr, e: ast::TryExpr) -> ExprId { let (try_branch, cf_continue, cf_break, try_from_residual) = 'if_chain: { if let Some(try_branch) = LangItem::TryTraitBranch.path(self.db, self.krate) { - if let Some(cf_continue) = - LangItem::ControlFlowContinue.path(self.db, self.krate) - { - if let Some(cf_break) = - LangItem::ControlFlowBreak.path(self.db, self.krate) - { + if let Some(cf_continue) = LangItem::ControlFlowContinue.path(self.db, self.krate) { + if let Some(cf_break) = LangItem::ControlFlowBreak.path(self.db, self.krate) { if let Some(try_from_residual) = LangItem::TryTraitFromResidual.path(self.db, self.krate) { - break 'if_chain ( - try_branch, - cf_continue, - cf_break, - try_from_residual, - ); + break 'if_chain (try_branch, cf_continue, cf_break, try_from_residual); } } } @@ -634,15 +625,10 @@ impl ExprCollector<'_> { let operand = self.collect_expr_opt(e.expr()); let try_branch = self.alloc_expr(Expr::Path(try_branch), syntax_ptr.clone()); let expr = self.alloc_expr( - Expr::Call { - callee: try_branch, - args: Box::new([operand]), - is_assignee_expr: false, - }, + Expr::Call { callee: try_branch, args: Box::new([operand]), is_assignee_expr: false }, syntax_ptr.clone(), ); - let continue_binding = - self.alloc_binding(name![v1], BindingAnnotation::Unannotated); + let continue_binding = self.alloc_binding(name![v1], BindingAnnotation::Unannotated); let continue_bpat = self.alloc_pat_desugared(Pat::Bind { id: continue_binding, subpat: None }); self.add_definition_to_binding(continue_binding, continue_bpat); @@ -656,8 +642,7 @@ impl ExprCollector<'_> { expr: self.alloc_expr(Expr::Path(Path::from(name![v1])), syntax_ptr.clone()), }; let break_binding = self.alloc_binding(name![v1], BindingAnnotation::Unannotated); - let break_bpat = - self.alloc_pat_desugared(Pat::Bind { id: break_binding, subpat: None }); + let break_bpat = self.alloc_pat_desugared(Pat::Bind { id: break_binding, subpat: None }); self.add_definition_to_binding(break_binding, break_bpat); let break_arm = MatchArm { pat: self.alloc_pat_desugared(Pat::TupleStruct { @@ -667,10 +652,8 @@ impl ExprCollector<'_> { }), guard: None, expr: { - let x = - self.alloc_expr(Expr::Path(Path::from(name![v1])), syntax_ptr.clone()); - let callee = - self.alloc_expr(Expr::Path(try_from_residual), syntax_ptr.clone()); + let x = self.alloc_expr(Expr::Path(Path::from(name![v1])), syntax_ptr.clone()); + let callee = self.alloc_expr(Expr::Path(try_from_residual), syntax_ptr.clone()); let result = self.alloc_expr( Expr::Call { callee, args: Box::new([x]), is_assignee_expr: false }, syntax_ptr.clone(), @@ -1030,8 +1013,9 @@ impl ExprCollector<'_> { .collect(), } } - ast::Pat::LiteralPat(lit) => 'b: { - if let Some(ast_lit) = lit.literal() { + // FIXME: rustfmt removes this label if it is a block and not a loop + ast::Pat::LiteralPat(lit) => 'b: loop { + break if let Some(ast_lit) = lit.literal() { let mut hir_lit: Literal = ast_lit.kind().into(); if lit.minus_token().is_some() { let Some(h) = hir_lit.negate() else { @@ -1045,8 +1029,8 @@ impl ExprCollector<'_> { Pat::Lit(expr_id) } else { Pat::Missing - } - } + }; + }, ast::Pat::RestPat(_) => { // `RestPat` requires special handling and should not be mapped // to a Pat. Here we are using `Pat::Missing` as a fallback for diff --git a/crates/hir-ty/src/consteval/tests.rs b/crates/hir-ty/src/consteval/tests.rs index ecc163a415..a658bfa0c9 100644 --- a/crates/hir-ty/src/consteval/tests.rs +++ b/crates/hir-ty/src/consteval/tests.rs @@ -166,8 +166,7 @@ fn reference_autoderef() { #[test] fn overloaded_deref() { - // FIXME: We should support this. - check_fail( + check_number( r#" //- minicore: deref_mut struct Foo; @@ -185,9 +184,7 @@ fn overloaded_deref() { *y + *x }; "#, - ConstEvalError::MirLowerError(MirLowerError::NotSupported( - "explicit overloaded deref".into(), - )), + 10, ); } @@ -698,7 +695,7 @@ fn pattern_matching_literal() { } const GOAL: i32 = f(-1) + f(1) + f(0) + f(-5); "#, - 211 + 211, ); check_number( r#" @@ -711,7 +708,7 @@ fn pattern_matching_literal() { } const GOAL: u8 = f("foo") + f("bar"); "#, - 11 + 11, ); } @@ -1116,6 +1113,22 @@ fn function_traits() { "#, 15, ); + check_number( + r#" + //- minicore: coerce_unsized, fn + fn add2(x: u8) -> u8 { + x + 2 + } + fn call(f: &dyn Fn(u8) -> u8, x: u8) -> u8 { + f(x) + } + fn call_mut(f: &mut dyn FnMut(u8) -> u8, x: u8) -> u8 { + f(x) + } + const GOAL: u8 = call(&add2, 3) + call_mut(&mut add2, 3); + "#, + 10, + ); check_number( r#" //- minicore: fn diff --git a/crates/hir-ty/src/infer.rs b/crates/hir-ty/src/infer.rs index 06d74215ff..d1b9aff36d 100644 --- a/crates/hir-ty/src/infer.rs +++ b/crates/hir-ty/src/infer.rs @@ -57,6 +57,7 @@ mod expr; mod pat; mod coerce; mod closure; +mod mutability; /// The entry point of type inference. pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc { @@ -99,6 +100,8 @@ pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc InferenceContext<'a> { if let Some(fn_x) = func { match fn_x { FnTrait::FnOnce => (), - FnTrait::FnMut => adjustments.push(Adjustment::borrow( - Mutability::Mut, - derefed_callee.clone(), - )), - FnTrait::Fn => adjustments.push(Adjustment::borrow( - Mutability::Not, - derefed_callee.clone(), - )), + FnTrait::FnMut => { + if !matches!( + derefed_callee.kind(Interner), + TyKind::Ref(Mutability::Mut, _, _) + ) { + adjustments.push(Adjustment::borrow( + Mutability::Mut, + derefed_callee.clone(), + )); + } + } + FnTrait::Fn => { + if !matches!( + derefed_callee.kind(Interner), + TyKind::Ref(Mutability::Not, _, _) + ) { + adjustments.push(Adjustment::borrow( + Mutability::Not, + derefed_callee.clone(), + )); + } + } } let trait_ = fn_x .get_id(self.db, self.trait_env.krate) @@ -673,6 +687,23 @@ impl<'a> InferenceContext<'a> { // FIXME: Note down method resolution her match op { UnaryOp::Deref => { + if let Some(deref_trait) = self + .db + .lang_item(self.table.trait_env.krate, LangItem::Deref) + .and_then(|l| l.as_trait()) + { + if let Some(deref_fn) = + self.db.trait_data(deref_trait).method_by_name(&name![deref]) + { + // FIXME: this is wrong in multiple ways, subst is empty, and we emit it even for builtin deref (note that + // the mutability is not wrong, and will be fixed in `self.infer_mut`). + self.write_method_resolution( + tgt_expr, + deref_fn, + Substitution::empty(Interner), + ); + } + } autoderef::deref(&mut self.table, inner_ty).unwrap_or_else(|| self.err_ty()) } UnaryOp::Neg => { diff --git a/crates/hir-ty/src/infer/mutability.rs b/crates/hir-ty/src/infer/mutability.rs new file mode 100644 index 0000000000..8e3d71788f --- /dev/null +++ b/crates/hir-ty/src/infer/mutability.rs @@ -0,0 +1,182 @@ +//! Finds if an expression is an immutable context or a mutable context, which is used in selecting +//! between `Deref` and `DerefMut` or `Index` and `IndexMut` or similar. + +use chalk_ir::Mutability; +use hir_def::{ + expr::{Array, BindingAnnotation, Expr, ExprId, PatId, Statement, UnaryOp}, + lang_item::LangItem, +}; +use hir_expand::name; + +use crate::{lower::lower_to_chalk_mutability, Adjust, AutoBorrow, OverloadedDeref}; + +use super::InferenceContext; + +impl<'a> InferenceContext<'a> { + pub(crate) fn infer_mut_body(&mut self) { + self.infer_mut_expr(self.body.body_expr, Mutability::Not); + } + + fn infer_mut_expr(&mut self, tgt_expr: ExprId, mut mutability: Mutability) { + let mut v = vec![]; + let adjustments = self.result.expr_adjustments.get_mut(&tgt_expr).unwrap_or(&mut v); + for adj in adjustments.iter_mut().rev() { + match &mut adj.kind { + Adjust::NeverToAny | Adjust::Deref(None) | Adjust::Pointer(_) => (), + Adjust::Deref(Some(d)) => *d = OverloadedDeref(Some(mutability)), + Adjust::Borrow(b) => match b { + AutoBorrow::Ref(m) | AutoBorrow::RawPtr(m) => mutability = *m, + }, + } + } + self.infer_mut_expr_without_adjust(tgt_expr, mutability); + } + + fn infer_mut_expr_without_adjust(&mut self, tgt_expr: ExprId, mutability: Mutability) { + match &self.body[tgt_expr] { + Expr::Missing => (), + &Expr::If { condition, then_branch, else_branch } => { + self.infer_mut_expr(condition, Mutability::Not); + self.infer_mut_expr(then_branch, Mutability::Not); + if let Some(else_branch) = else_branch { + self.infer_mut_expr(else_branch, Mutability::Not); + } + } + Expr::Let { pat, expr } => self.infer_mut_expr(*expr, self.pat_bound_mutability(*pat)), + Expr::Block { id: _, statements, tail, label: _ } + | Expr::TryBlock { id: _, statements, tail } + | Expr::Async { id: _, statements, tail } + | Expr::Const { id: _, statements, tail } + | Expr::Unsafe { id: _, statements, tail } => { + for st in statements.iter() { + match st { + Statement::Let { pat, type_ref: _, initializer, else_branch } => { + if let Some(i) = initializer { + self.infer_mut_expr(*i, self.pat_bound_mutability(*pat)); + } + if let Some(e) = else_branch { + self.infer_mut_expr(*e, Mutability::Not); + } + } + Statement::Expr { expr, has_semi: _ } => { + self.infer_mut_expr(*expr, Mutability::Not); + } + } + } + if let Some(tail) = tail { + self.infer_mut_expr(*tail, Mutability::Not); + } + } + &Expr::For { iterable: c, pat: _, body, label: _ } + | &Expr::While { condition: c, body, label: _ } => { + self.infer_mut_expr(c, Mutability::Not); + self.infer_mut_expr(body, Mutability::Not); + } + Expr::MethodCall { receiver: x, method_name: _, args, generic_args: _ } + | Expr::Call { callee: x, args, is_assignee_expr: _ } => { + self.infer_mut_not_expr_iter(args.iter().copied().chain(Some(*x))); + } + Expr::Match { expr, arms } => { + let m = self.pat_iter_bound_mutability(arms.iter().map(|x| x.pat)); + self.infer_mut_expr(*expr, m); + for arm in arms.iter() { + self.infer_mut_expr(arm.expr, Mutability::Not); + } + } + Expr::Yield { expr } + | Expr::Yeet { expr } + | Expr::Return { expr } + | Expr::Break { expr, label: _ } => { + if let &Some(expr) = expr { + self.infer_mut_expr(expr, Mutability::Not); + } + } + Expr::RecordLit { path: _, fields, spread, ellipsis: _, is_assignee_expr: _ } => { + self.infer_mut_not_expr_iter(fields.iter().map(|x| x.expr).chain(*spread)) + } + &Expr::Index { base, index } => { + self.infer_mut_expr(base, mutability); + self.infer_mut_expr(index, Mutability::Not); + } + Expr::UnaryOp { expr, op: UnaryOp::Deref } => { + if let Some((f, _)) = self.result.method_resolutions.get_mut(&tgt_expr) { + if mutability == Mutability::Mut { + if let Some(deref_trait) = self + .db + .lang_item(self.table.trait_env.krate, LangItem::DerefMut) + .and_then(|l| l.as_trait()) + { + if let Some(deref_fn) = + self.db.trait_data(deref_trait).method_by_name(&name![deref_mut]) + { + *f = deref_fn; + } + } + } + } + self.infer_mut_expr(*expr, mutability); + } + Expr::Field { expr, name: _ } => { + self.infer_mut_expr(*expr, mutability); + } + Expr::UnaryOp { expr, op: _ } + | Expr::Range { lhs: Some(expr), rhs: None, range_type: _ } + | Expr::Range { rhs: Some(expr), lhs: None, range_type: _ } + | Expr::Await { expr } + | Expr::Box { expr } + | Expr::Loop { body: expr, label: _ } + | Expr::Cast { expr, type_ref: _ } => { + self.infer_mut_expr(*expr, Mutability::Not); + } + Expr::Ref { expr, rawness: _, mutability } => { + let mutability = lower_to_chalk_mutability(*mutability); + self.infer_mut_expr(*expr, mutability); + } + Expr::Array(Array::Repeat { initializer: lhs, repeat: rhs }) + | Expr::BinaryOp { lhs, rhs, op: _ } + | Expr::Range { lhs: Some(lhs), rhs: Some(rhs), range_type: _ } => { + self.infer_mut_expr(*lhs, Mutability::Not); + self.infer_mut_expr(*rhs, Mutability::Not); + } + // not implemented + Expr::Closure { .. } => (), + Expr::Tuple { exprs, is_assignee_expr: _ } + | Expr::Array(Array::ElementList { elements: exprs, is_assignee_expr: _ }) => { + self.infer_mut_not_expr_iter(exprs.iter().copied()); + } + // These don't need any action, as they don't have sub expressions + Expr::Range { lhs: None, rhs: None, range_type: _ } + | Expr::Literal(_) + | Expr::Path(_) + | Expr::Continue { .. } + | Expr::Underscore => (), + } + } + + fn infer_mut_not_expr_iter(&mut self, exprs: impl Iterator) { + for expr in exprs { + self.infer_mut_expr(expr, Mutability::Not); + } + } + + fn pat_iter_bound_mutability(&self, mut pat: impl Iterator) -> Mutability { + if pat.any(|p| self.pat_bound_mutability(p) == Mutability::Mut) { + Mutability::Mut + } else { + Mutability::Not + } + } + + /// Checks if the pat contains a `ref mut` binding. Such paths makes the context of bounded expressions + /// mutable. For example in `let (ref mut x0, ref x1) = *x;` we need to use `DerefMut` for `*x` but in + /// `let (ref x0, ref x1) = *x;` we should use `Deref`. + fn pat_bound_mutability(&self, pat: PatId) -> Mutability { + let mut r = Mutability::Not; + self.body.walk_bindings_in_pat(pat, |b| { + if self.body.bindings[b].mode == BindingAnnotation::RefMut { + r = Mutability::Mut; + } + }); + r + } +} diff --git a/crates/hir-ty/src/mir/eval.rs b/crates/hir-ty/src/mir/eval.rs index 450cd5404e..f8545e88ad 100644 --- a/crates/hir-ty/src/mir/eval.rs +++ b/crates/hir-ty/src/mir/eval.rs @@ -1,6 +1,6 @@ //! This module provides a MIR interpreter, which is used in const eval. -use std::{borrow::Cow, collections::HashMap, iter, sync::Arc}; +use std::{borrow::Cow, collections::HashMap, iter, ops::Range, sync::Arc}; use base_db::CrateId; use chalk_ir::{ @@ -109,6 +109,10 @@ impl Interval { fn get<'a>(&self, memory: &'a Evaluator<'a>) -> Result<&'a [u8]> { memory.read_memory(self.addr, self.size) } + + fn slice(self, range: Range) -> Interval { + Interval { addr: self.addr.offset(range.start), size: range.len() } + } } enum IntervalOrOwned { @@ -423,7 +427,6 @@ impl Evaluator<'_> { args: impl Iterator>, subst: Substitution, ) -> Result> { - dbg!(body.dbg(self.db)); if let Some(x) = self.stack_depth_limit.checked_sub(1) { self.stack_depth_limit = x; } else { @@ -1360,24 +1363,24 @@ impl Evaluator<'_> { locals: &Locals<'_>, ) -> Result<()> { let func = args.get(0).ok_or(MirEvalError::TypeError("fn trait with no arg"))?; - let ref_func_ty = self.operand_ty(func, locals)?; - let func_ty = match ft { - FnTrait::FnOnce => ref_func_ty, - FnTrait::FnMut | FnTrait::Fn => match ref_func_ty.as_reference() { - Some(x) => x.0.clone(), - None => return Err(MirEvalError::TypeError("fn trait with non-reference arg")), - }, - }; + let mut func_ty = self.operand_ty(func, locals)?; + let mut func_data = self.eval_operand(func, locals)?; + while let TyKind::Ref(_, _, z) = func_ty.kind(Interner) { + func_ty = z.clone(); + if matches!(func_ty.kind(Interner), TyKind::Dyn(_)) { + let id = + from_bytes!(usize, &func_data.get(self)?[self.ptr_size()..self.ptr_size() * 2]); + func_data = func_data.slice(0..self.ptr_size()); + func_ty = self.vtable_map.ty(id)?.clone(); + } + let size = self.size_of_sized(&func_ty, locals, "self type of fn trait")?; + func_data = Interval { addr: Address::from_bytes(func_data.get(self)?)?, size }; + } match &func_ty.data(Interner).kind { TyKind::FnDef(def, subst) => { self.exec_fn_def(*def, subst, destination, &args[1..], locals)?; } TyKind::Function(_) => { - let mut func_data = self.eval_operand(func, locals)?; - if let FnTrait::FnMut | FnTrait::Fn = ft { - let addr = Address::from_bytes(func_data.get(self)?)?; - func_data = Interval { addr, size: self.ptr_size() }; - } self.exec_fn_pointer(func_data, destination, &args[1..], locals)?; } x => not_supported!("Call {ft:?} trait methods with type {x:?}"), diff --git a/crates/hir-ty/src/mir/lower.rs b/crates/hir-ty/src/mir/lower.rs index 4da0f87609..04175fb4d9 100644 --- a/crates/hir-ty/src/mir/lower.rs +++ b/crates/hir-ty/src/mir/lower.rs @@ -4,7 +4,7 @@ use std::{iter, mem, sync::Arc}; use chalk_ir::{BoundVar, ConstData, DebruijnIndex, TyKind}; use hir_def::{ - adt::{VariantData, StructKind}, + adt::{StructKind, VariantData}, body::Body, expr::{ Array, BindingAnnotation, BindingId, ExprId, LabelId, Literal, MatchArm, Pat, PatId, diff --git a/crates/hir-ty/src/mir/lower/as_place.rs b/crates/hir-ty/src/mir/lower/as_place.rs index b683dd7f90..c6f4f66ada 100644 --- a/crates/hir-ty/src/mir/lower/as_place.rs +++ b/crates/hir-ty/src/mir/lower/as_place.rs @@ -145,10 +145,32 @@ impl MirLowerCtx<'_> { self.expr_ty(*expr).kind(Interner), TyKind::Ref(..) | TyKind::Raw(..) ) { - let Some(_) = self.lower_expr_as_place(current, *expr, true)? else { + let Some((p, current)) = self.lower_expr_as_place(current, *expr, true)? else { return Ok(None); }; - not_supported!("explicit overloaded deref"); + return self.lower_overloaded_deref( + current, + p, + self.expr_ty_after_adjustments(*expr), + self.expr_ty(expr_id), + expr_id.into(), + 'b: { + if let Some((f, _)) = self.infer.method_resolution(expr_id) { + if let Some(deref_trait) = + self.resolve_lang_item(LangItem::DerefMut)?.as_trait() + { + if let Some(deref_fn) = self + .db + .trait_data(deref_trait) + .method_by_name(&name![deref_mut]) + { + break 'b deref_fn == f; + } + } + } + false + }, + ); } let Some((mut r, current)) = self.lower_expr_as_place(current, *expr, true)? else { return Ok(None); diff --git a/crates/hir-ty/src/mir/pretty.rs b/crates/hir-ty/src/mir/pretty.rs index ab3bb643e7..9ec2913dce 100644 --- a/crates/hir-ty/src/mir/pretty.rs +++ b/crates/hir-ty/src/mir/pretty.rs @@ -1,6 +1,6 @@ //! A pretty-printer for MIR. -use std::fmt::{Display, Write, Debug}; +use std::fmt::{Debug, Display, Write}; use hir_def::{body::Body, expr::BindingId}; use hir_expand::name::Name; @@ -24,7 +24,7 @@ impl MirBody { ctx.result } - // String with lines is rendered poorly in `dbg!` macros, which I use very much, so this + // String with lines is rendered poorly in `dbg` macros, which I use very much, so this // function exists to solve that. pub fn dbg(&self, db: &dyn HirDatabase) -> impl Debug { struct StringDbg(String); diff --git a/crates/hir-ty/src/tests/coercion.rs b/crates/hir-ty/src/tests/coercion.rs index 696bdef03f..9f624cc32c 100644 --- a/crates/hir-ty/src/tests/coercion.rs +++ b/crates/hir-ty/src/tests/coercion.rs @@ -258,7 +258,6 @@ fn test() { #[test] fn coerce_autoderef_block() { - // FIXME: We should know mutability in overloaded deref check_no_mismatches( r#" //- minicore: deref @@ -268,7 +267,7 @@ fn takes_ref_str(x: &str) {} fn returns_string() -> String { loop {} } fn test() { takes_ref_str(&{ returns_string() }); - // ^^^^^^^^^^^^^^^^^^^^^ adjustments: Deref(None), Deref(Some(OverloadedDeref(None))), Borrow(Ref(Not)) + // ^^^^^^^^^^^^^^^^^^^^^ adjustments: Deref(None), Deref(Some(OverloadedDeref(Some(Not)))), Borrow(Ref(Not)) } "#, ); diff --git a/crates/hir-ty/src/tests/method_resolution.rs b/crates/hir-ty/src/tests/method_resolution.rs index 378d478336..f3ca93672d 100644 --- a/crates/hir-ty/src/tests/method_resolution.rs +++ b/crates/hir-ty/src/tests/method_resolution.rs @@ -1255,7 +1255,6 @@ fn foo(a: &T) { #[test] fn autoderef_visibility_field() { - // FIXME: We should know mutability in overloaded deref check( r#" //- minicore: deref @@ -1277,7 +1276,7 @@ mod a { mod b { fn foo() { let x = super::a::Bar::new().0; - // ^^^^^^^^^^^^^^^^^^^^ adjustments: Deref(Some(OverloadedDeref(None))) + // ^^^^^^^^^^^^^^^^^^^^ adjustments: Deref(Some(OverloadedDeref(Some(Not)))) // ^^^^^^^^^^^^^^^^^^^^^^ type: char } } diff --git a/crates/ide-diagnostics/src/handlers/mutability_errors.rs b/crates/ide-diagnostics/src/handlers/mutability_errors.rs index 03951ea2bf..83c61f73db 100644 --- a/crates/ide-diagnostics/src/handlers/mutability_errors.rs +++ b/crates/ide-diagnostics/src/handlers/mutability_errors.rs @@ -566,7 +566,6 @@ fn f(x: [(i32, u8); 10]) { #[test] fn overloaded_deref() { - // FIXME: check for false negative check_diagnostics( r#" //- minicore: deref_mut @@ -574,22 +573,36 @@ use core::ops::{Deref, DerefMut}; struct Foo; impl Deref for Foo { - type Target = i32; - fn deref(&self) -> &i32 { - &5 + type Target = (i32, u8); + fn deref(&self) -> &(i32, u8) { + &(5, 2) } } impl DerefMut for Foo { - fn deref_mut(&mut self) -> &mut i32 { - &mut 5 + fn deref_mut(&mut self) -> &mut (i32, u8) { + &mut (5, 2) } } fn f() { - let x = Foo; + let mut x = Foo; + //^^^^^ 💡 weak: variable does not need to be mutable let y = &*x; let x = Foo; - let mut x = Foo; - let y: &mut i32 = &mut x; + let y = &mut *x; + //^^ 💡 error: cannot mutate immutable variable `x` + let x = Foo; + let x = Foo; + let y: &mut (i32, u8) = &mut x; + //^^^^^^ 💡 error: cannot mutate immutable variable `x` + let ref mut y = *x; + //^^ 💡 error: cannot mutate immutable variable `x` + let (ref mut y, _) = *x; + //^^ 💡 error: cannot mutate immutable variable `x` + match *x { + //^^ 💡 error: cannot mutate immutable variable `x` + (ref y, _) => (), + (_, ref mut y) => (), + } } "#, ); diff --git a/crates/ide/src/inlay_hints/chaining.rs b/crates/ide/src/inlay_hints/chaining.rs index 1e1771259b..11e6dc05fa 100644 --- a/crates/ide/src/inlay_hints/chaining.rs +++ b/crates/ide/src/inlay_hints/chaining.rs @@ -435,7 +435,7 @@ fn main() { file_id: FileId( 1, ), - range: 3415..3423, + range: 5805..5813, }, ), tooltip: "", @@ -448,7 +448,7 @@ fn main() { file_id: FileId( 1, ), - range: 3447..3451, + range: 5837..5841, }, ), tooltip: "", @@ -468,7 +468,7 @@ fn main() { file_id: FileId( 1, ), - range: 3415..3423, + range: 5805..5813, }, ), tooltip: "", @@ -481,7 +481,7 @@ fn main() { file_id: FileId( 1, ), - range: 3447..3451, + range: 5837..5841, }, ), tooltip: "", @@ -501,7 +501,7 @@ fn main() { file_id: FileId( 1, ), - range: 3415..3423, + range: 5805..5813, }, ), tooltip: "", @@ -514,7 +514,7 @@ fn main() { file_id: FileId( 1, ), - range: 3447..3451, + range: 5837..5841, }, ), tooltip: "", diff --git a/crates/ide/src/syntax_highlighting/tests.rs b/crates/ide/src/syntax_highlighting/tests.rs index ac9bd8e39d..5cc3bad04b 100644 --- a/crates/ide/src/syntax_highlighting/tests.rs +++ b/crates/ide/src/syntax_highlighting/tests.rs @@ -1126,5 +1126,5 @@ fn benchmark_syntax_highlighting_parser() { .filter(|it| it.highlight.tag == HlTag::Symbol(SymbolKind::Function)) .count() }; - assert_eq!(hash, 1608); + assert_eq!(hash, 1170); } diff --git a/crates/syntax/rust.ungram b/crates/syntax/rust.ungram index 548b5ba8b8..1c15a606f9 100644 --- a/crates/syntax/rust.ungram +++ b/crates/syntax/rust.ungram @@ -613,7 +613,7 @@ Pat = | ConstBlockPat LiteralPat = - Literal + '-'? Literal IdentPat = Attr* 'ref'? 'mut'? Name ('@' Pat)? diff --git a/crates/syntax/src/ast/generated/nodes.rs b/crates/syntax/src/ast/generated/nodes.rs index a2124f3ac1..0e84aca5c7 100644 --- a/crates/syntax/src/ast/generated/nodes.rs +++ b/crates/syntax/src/ast/generated/nodes.rs @@ -1375,8 +1375,8 @@ pub struct LiteralPat { pub(crate) syntax: SyntaxNode, } impl LiteralPat { - pub fn literal(&self) -> Option { support::child(&self.syntax) } pub fn minus_token(&self) -> Option { support::token(&self.syntax, T![-]) } + pub fn literal(&self) -> Option { support::child(&self.syntax) } } #[derive(Debug, Clone, PartialEq, Eq, Hash)] diff --git a/crates/syntax/src/tests/sourcegen_ast.rs b/crates/syntax/src/tests/sourcegen_ast.rs index e954b58251..77a8363a18 100644 --- a/crates/syntax/src/tests/sourcegen_ast.rs +++ b/crates/syntax/src/tests/sourcegen_ast.rs @@ -535,6 +535,7 @@ impl Field { "!" => "excl", "*" => "star", "&" => "amp", + "-" => "minus", "_" => "underscore", "." => "dot", ".." => "dotdot", diff --git a/crates/test-utils/src/minicore.rs b/crates/test-utils/src/minicore.rs index bf592c1f7b..118b9ad631 100644 --- a/crates/test-utils/src/minicore.rs +++ b/crates/test-utils/src/minicore.rs @@ -375,6 +375,68 @@ pub mod ops { type Output; extern "rust-call" fn call_once(self, args: Args) -> Self::Output; } + + mod impls { + use crate::marker::Tuple; + + #[stable(feature = "rust1", since = "1.0.0")] + #[rustc_const_unstable(feature = "const_fn_trait_ref_impls", issue = "101803")] + impl const Fn for &F + where + F: ~const Fn, + { + extern "rust-call" fn call(&self, args: A) -> F::Output { + (**self).call(args) + } + } + + #[stable(feature = "rust1", since = "1.0.0")] + #[rustc_const_unstable(feature = "const_fn_trait_ref_impls", issue = "101803")] + impl const FnMut for &F + where + F: ~const Fn, + { + extern "rust-call" fn call_mut(&mut self, args: A) -> F::Output { + (**self).call(args) + } + } + + #[stable(feature = "rust1", since = "1.0.0")] + #[rustc_const_unstable(feature = "const_fn_trait_ref_impls", issue = "101803")] + impl const FnOnce for &F + where + F: ~const Fn, + { + type Output = F::Output; + + extern "rust-call" fn call_once(self, args: A) -> F::Output { + (*self).call(args) + } + } + + #[stable(feature = "rust1", since = "1.0.0")] + #[rustc_const_unstable(feature = "const_fn_trait_ref_impls", issue = "101803")] + impl const FnMut for &mut F + where + F: ~const FnMut, + { + extern "rust-call" fn call_mut(&mut self, args: A) -> F::Output { + (*self).call_mut(args) + } + } + + #[stable(feature = "rust1", since = "1.0.0")] + #[rustc_const_unstable(feature = "const_fn_trait_ref_impls", issue = "101803")] + impl const FnOnce for &mut F + where + F: ~const FnMut, + { + type Output = F::Output; + extern "rust-call" fn call_once(self, args: A) -> F::Output { + (*self).call_mut(args) + } + } + } } pub use self::function::{Fn, FnMut, FnOnce}; // endregion:fn From 9ad83deeccd8cb3d375b5558eb7e3d339b1a4e0b Mon Sep 17 00:00:00 2001 From: hkalbasi Date: Fri, 17 Mar 2023 19:10:25 +0330 Subject: [PATCH 1190/1945] Support overloaded index MIR lowering --- crates/hir-ty/src/consteval/tests.rs | 46 +++++++++++++- crates/hir-ty/src/infer/mutability.rs | 15 +++++ crates/hir-ty/src/mir/lower/as_place.rs | 63 ++++++++++++++++++- .../src/handlers/mutability_errors.rs | 48 ++++++++++++++ 4 files changed, 169 insertions(+), 3 deletions(-) diff --git a/crates/hir-ty/src/consteval/tests.rs b/crates/hir-ty/src/consteval/tests.rs index a658bfa0c9..944912b6c2 100644 --- a/crates/hir-ty/src/consteval/tests.rs +++ b/crates/hir-ty/src/consteval/tests.rs @@ -2,7 +2,8 @@ use base_db::fixture::WithFixture; use hir_def::db::DefDatabase; use crate::{ - consteval::try_const_usize, db::HirDatabase, test_db::TestDB, Const, ConstScalar, Interner, + consteval::try_const_usize, db::HirDatabase, mir::pad16, test_db::TestDB, Const, ConstScalar, + Interner, }; use super::{ @@ -30,7 +31,12 @@ fn check_number(ra_fixture: &str, answer: i128) { match &r.data(Interner).value { chalk_ir::ConstValue::Concrete(c) => match &c.interned { ConstScalar::Bytes(b, _) => { - assert_eq!(b, &answer.to_le_bytes()[0..b.len()]); + assert_eq!( + b, + &answer.to_le_bytes()[0..b.len()], + "Bytes differ. In decimal form: actual = {}, expected = {answer}", + i128::from_le_bytes(pad16(b, true)) + ); } x => panic!("Expected number but found {:?}", x), }, @@ -215,6 +221,42 @@ fn overloaded_deref_autoref() { ); } +#[test] +fn overloaded_index() { + check_number( + r#" + //- minicore: index + struct Foo; + + impl core::ops::Index for Foo { + type Output = i32; + fn index(&self, index: usize) -> &i32 { + if index == 7 { + &700 + } else { + &1000 + } + } + } + + impl core::ops::IndexMut for Foo { + fn index_mut(&mut self, index: usize) -> &mut i32 { + if index == 7 { + &mut 7 + } else { + &mut 10 + } + } + } + + const GOAL: i32 = { + (Foo[2]) + (Foo[7]) + (*&Foo[2]) + (*&Foo[7]) + (*&mut Foo[2]) + (*&mut Foo[7]) + }; + "#, + 3417, + ); +} + #[test] fn function_call() { check_number( diff --git a/crates/hir-ty/src/infer/mutability.rs b/crates/hir-ty/src/infer/mutability.rs index 8e3d71788f..784725da93 100644 --- a/crates/hir-ty/src/infer/mutability.rs +++ b/crates/hir-ty/src/infer/mutability.rs @@ -95,6 +95,21 @@ impl<'a> InferenceContext<'a> { self.infer_mut_not_expr_iter(fields.iter().map(|x| x.expr).chain(*spread)) } &Expr::Index { base, index } => { + if let Some((f, _)) = self.result.method_resolutions.get_mut(&tgt_expr) { + if mutability == Mutability::Mut { + if let Some(index_trait) = self + .db + .lang_item(self.table.trait_env.krate, LangItem::IndexMut) + .and_then(|l| l.as_trait()) + { + if let Some(index_fn) = + self.db.trait_data(index_trait).method_by_name(&name![index_mut]) + { + *f = index_fn; + } + } + } + } self.infer_mut_expr(base, mutability); self.infer_mut_expr(index, Mutability::Not); } diff --git a/crates/hir-ty/src/mir/lower/as_place.rs b/crates/hir-ty/src/mir/lower/as_place.rs index c6f4f66ada..425904850b 100644 --- a/crates/hir-ty/src/mir/lower/as_place.rs +++ b/crates/hir-ty/src/mir/lower/as_place.rs @@ -1,6 +1,7 @@ //! MIR lowering for places use super::*; +use hir_def::FunctionId; use hir_expand::name; macro_rules! not_supported { @@ -193,7 +194,24 @@ impl MirLowerCtx<'_> { if index_ty != TyBuilder::usize() || !matches!(base_ty.kind(Interner), TyKind::Array(..) | TyKind::Slice(..)) { - not_supported!("overloaded index"); + let Some(index_fn) = self.infer.method_resolution(expr_id) else { + return Err(MirLowerError::UnresolvedMethod); + }; + let Some((base_place, current)) = self.lower_expr_as_place(current, *base, true)? else { + return Ok(None); + }; + let Some((index_operand, current)) = self.lower_expr_to_some_operand(*index, current)? else { + return Ok(None); + }; + return self.lower_overloaded_index( + current, + base_place, + self.expr_ty_after_adjustments(*base), + self.expr_ty(expr_id), + index_operand, + expr_id.into(), + index_fn, + ); } let Some((mut p_base, current)) = self.lower_expr_as_place(current, *base, true)? else { @@ -210,6 +228,49 @@ impl MirLowerCtx<'_> { } } + fn lower_overloaded_index( + &mut self, + current: BasicBlockId, + place: Place, + base_ty: Ty, + result_ty: Ty, + index_operand: Operand, + span: MirSpan, + index_fn: (FunctionId, Substitution), + ) -> Result> { + let is_mutable = 'b: { + if let Some(index_mut_trait) = self.resolve_lang_item(LangItem::IndexMut)?.as_trait() { + if let Some(index_mut_fn) = + self.db.trait_data(index_mut_trait).method_by_name(&name![index_mut]) + { + break 'b index_mut_fn == index_fn.0; + } + } + false + }; + let (mutability, borrow_kind) = match is_mutable { + true => (Mutability::Mut, BorrowKind::Mut { allow_two_phase_borrow: false }), + false => (Mutability::Not, BorrowKind::Shared), + }; + let base_ref = TyKind::Ref(mutability, static_lifetime(), base_ty).intern(Interner); + let result_ref = TyKind::Ref(mutability, static_lifetime(), result_ty).intern(Interner); + let ref_place: Place = self.temp(base_ref)?.into(); + self.push_assignment(current, ref_place.clone(), Rvalue::Ref(borrow_kind, place), span); + let mut result: Place = self.temp(result_ref)?.into(); + let index_fn_op = Operand::const_zst( + TyKind::FnDef( + self.db.intern_callable_def(CallableDefId::FunctionId(index_fn.0)).into(), + index_fn.1, + ) + .intern(Interner), + ); + let Some(current) = self.lower_call(index_fn_op, vec![Operand::Copy(ref_place), index_operand], result.clone(), current, false)? else { + return Ok(None); + }; + result.projection.push(ProjectionElem::Deref); + Ok(Some((result, current))) + } + fn lower_overloaded_deref( &mut self, current: BasicBlockId, diff --git a/crates/ide-diagnostics/src/handlers/mutability_errors.rs b/crates/ide-diagnostics/src/handlers/mutability_errors.rs index 83c61f73db..17a70f5701 100644 --- a/crates/ide-diagnostics/src/handlers/mutability_errors.rs +++ b/crates/ide-diagnostics/src/handlers/mutability_errors.rs @@ -564,6 +564,54 @@ fn f(x: [(i32, u8); 10]) { ); } + #[test] + fn overloaded_index() { + check_diagnostics( + r#" +//- minicore: index +use core::ops::{Index, IndexMut}; + +struct Foo; +impl Index for Foo { + type Output = (i32, u8); + fn index(&self, index: usize) -> &(i32, u8) { + &(5, 2) + } +} +impl IndexMut for Foo { + fn index_mut(&mut self, index: usize) -> &mut (i32, u8) { + &mut (5, 2) + } +} +fn f() { + let mut x = Foo; + //^^^^^ 💡 weak: variable does not need to be mutable + let y = &x[2]; + let x = Foo; + let y = &mut x[2]; + //^^^^ 💡 error: cannot mutate immutable variable `x` + let mut x = &mut Foo; + //^^^^^ 💡 weak: variable does not need to be mutable + let y: &mut (i32, u8) = &mut x[2]; + let x = Foo; + let ref mut y = x[7]; + //^^^^ 💡 error: cannot mutate immutable variable `x` + let (ref mut y, _) = x[3]; + //^^^^ 💡 error: cannot mutate immutable variable `x` + match x[10] { + //^^^^^ 💡 error: cannot mutate immutable variable `x` + (ref y, _) => (), + (_, ref mut y) => (), + } + let mut x = Foo; + let mut i = 5; + //^^^^^ 💡 weak: variable does not need to be mutable + let y = &mut x[i]; +} +"#, + ); + } + #[test] fn overloaded_deref() { check_diagnostics( From 453ae2e00e2f3bcd96a2fa78626296e14641dae8 Mon Sep 17 00:00:00 2001 From: hkalbasi Date: Sat, 18 Mar 2023 02:06:36 +0330 Subject: [PATCH 1191/1945] Support range MIR lowering --- crates/hir-ty/src/consteval/tests.rs | 12 +++++++ crates/hir-ty/src/infer/unify.rs | 9 ++++-- crates/hir-ty/src/mir/lower.rs | 48 ++++++++++++++++++++++++++-- crates/hir-ty/src/traits.rs | 4 +-- 4 files changed, 65 insertions(+), 8 deletions(-) diff --git a/crates/hir-ty/src/consteval/tests.rs b/crates/hir-ty/src/consteval/tests.rs index 944912b6c2..97c8d62860 100644 --- a/crates/hir-ty/src/consteval/tests.rs +++ b/crates/hir-ty/src/consteval/tests.rs @@ -561,6 +561,18 @@ fn for_loops() { ); } +#[test] +fn ranges() { + check_number( + r#" + //- minicore: range + const GOAL: i32 = (1..2).start + (20..10).end + (100..=200).start + (2000..=1000).end + + (10000..).start + (..100000).end + (..=1000000).end; + "#, + 1111111, + ); +} + #[test] fn recursion() { check_number( diff --git a/crates/hir-ty/src/infer/unify.rs b/crates/hir-ty/src/infer/unify.rs index 2a07dd708c..0e516b9399 100644 --- a/crates/hir-ty/src/infer/unify.rs +++ b/crates/hir-ty/src/infer/unify.rs @@ -633,7 +633,10 @@ impl<'a> InferenceTable<'a> { ) -> Option<(Option, Vec, Ty)> { match ty.callable_sig(self.db) { Some(sig) => Some((None, sig.params().to_vec(), sig.ret().clone())), - None => self.callable_sig_from_fn_trait(ty, num_args), + None => { + let (f, args_ty, return_ty) = self.callable_sig_from_fn_trait(ty, num_args)?; + Some((Some(f), args_ty, return_ty)) + } } } @@ -641,7 +644,7 @@ impl<'a> InferenceTable<'a> { &mut self, ty: &Ty, num_args: usize, - ) -> Option<(Option, Vec, Ty)> { + ) -> Option<(FnTrait, Vec, Ty)> { let krate = self.trait_env.krate; let fn_once_trait = FnTrait::FnOnce.get_id(self.db, krate)?; let trait_data = self.db.trait_data(fn_once_trait); @@ -693,7 +696,7 @@ impl<'a> InferenceTable<'a> { }; let canonical = self.canonicalize(obligation.clone()); if self.db.trait_solve(krate, canonical.value.cast(Interner)).is_some() { - return Some((Some(fn_x), arg_tys, return_ty)); + return Some((fn_x, arg_tys, return_ty)); } } unreachable!("It should at least implement FnOnce at this point"); diff --git a/crates/hir-ty/src/mir/lower.rs b/crates/hir-ty/src/mir/lower.rs index 04175fb4d9..4b43e44a8e 100644 --- a/crates/hir-ty/src/mir/lower.rs +++ b/crates/hir-ty/src/mir/lower.rs @@ -14,7 +14,7 @@ use hir_def::{ layout::LayoutError, path::Path, resolver::{resolver_for_expr, ResolveValueResult, ValueNs}, - DefWithBodyId, EnumVariantId, HasModule, ItemContainerId, LocalFieldId, TraitId, + AdtId, DefWithBodyId, EnumVariantId, HasModule, ItemContainerId, LocalFieldId, TraitId, }; use hir_expand::name::Name; use la_arena::ArenaMap; @@ -643,7 +643,7 @@ impl MirLowerCtx<'_> { }, } }).collect(), - None => operands.into_iter().map(|x| x).collect::>().ok_or( + None => operands.into_iter().collect::>().ok_or( MirLowerError::TypeError("missing field in record literal"), )?, }, @@ -761,7 +761,49 @@ impl MirLowerCtx<'_> { ); Ok(Some(current)) } - Expr::Range { .. } => not_supported!("range"), + &Expr::Range { lhs, rhs, range_type: _ } => { + let ty = self.expr_ty(expr_id); + let Some((adt, subst)) = ty.as_adt() else { + return Err(MirLowerError::TypeError("Range type is not adt")); + }; + let AdtId::StructId(st) = adt else { + return Err(MirLowerError::TypeError("Range type is not struct")); + }; + let mut lp = None; + let mut rp = None; + if let Some(x) = lhs { + let Some((o, c)) = self.lower_expr_to_some_operand(x, current)? else { + return Ok(None); + }; + lp = Some(o); + current = c; + } + if let Some(x) = rhs { + let Some((o, c)) = self.lower_expr_to_some_operand(x, current)? else { + return Ok(None); + }; + rp = Some(o); + current = c; + } + self.push_assignment( + current, + place, + Rvalue::Aggregate( + AggregateKind::Adt(st.into(), subst.clone()), + self.db.struct_data(st).variant_data.fields().iter().map(|x| { + let o = match x.1.name.as_str() { + Some("start") => lp.take(), + Some("end") => rp.take(), + Some("exhausted") => Some(Operand::from_bytes(vec![0], TyBuilder::bool())), + _ => None, + }; + o.ok_or(MirLowerError::UnresolvedField) + }).collect::>()?, + ), + expr_id.into(), + ); + Ok(Some(current)) + }, Expr::Closure { .. } => not_supported!("closure"), Expr::Tuple { exprs, is_assignee_expr: _ } => { let Some(values) = exprs diff --git a/crates/hir-ty/src/traits.rs b/crates/hir-ty/src/traits.rs index aebf59f315..e7fffc4cc7 100644 --- a/crates/hir-ty/src/traits.rs +++ b/crates/hir-ty/src/traits.rs @@ -188,7 +188,7 @@ impl FnTrait { } } - pub fn method_name(&self) -> Name { + pub fn method_name(self) -> Name { match self { FnTrait::FnOnce => name!(call_once), FnTrait::FnMut => name!(call_mut), @@ -196,7 +196,7 @@ impl FnTrait { } } - pub fn get_id(&self, db: &dyn HirDatabase, krate: CrateId) -> Option { + pub fn get_id(self, db: &dyn HirDatabase, krate: CrateId) -> Option { let target = db.lang_item(krate, self.lang_item())?; match target { LangItemTarget::Trait(t) => Some(t), From f7113685b563f2b20e770ee1e901b2b93ccbc990 Mon Sep 17 00:00:00 2001 From: feniljain Date: Sat, 18 Mar 2023 11:43:45 +0530 Subject: [PATCH 1192/1945] fix: make tests valid rust code --- crates/ide-completion/src/tests/record.rs | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/crates/ide-completion/src/tests/record.rs b/crates/ide-completion/src/tests/record.rs index 2de42e8435..65cefdb085 100644 --- a/crates/ide-completion/src/tests/record.rs +++ b/crates/ide-completion/src/tests/record.rs @@ -81,6 +81,7 @@ fn foo(baz: Baz) { enum Baz { Foo, Bar } fn foo(baz: Baz) { + use Baz::*; match baz { Foo => (), $0 @@ -91,12 +92,14 @@ fn foo(baz: Baz) { en Baz en Result md core + ev Bar ev Err + ev Foo ev Ok - bn Baz::Bar Baz::Bar$0 - bn Baz::Foo Baz::Foo$0 - bn Err(…) Err($1)$0 - bn Ok(…) Ok($1)$0 + bn Bar Bar$0 + bn Err(…) Err($1)$0 + bn Foo Foo$0 + bn Ok(…) Ok($1)$0 kw mut kw ref "#]], From a79a76a94274c22676e2047039cf68ca199c07dd Mon Sep 17 00:00:00 2001 From: feniljain Date: Sat, 18 Mar 2023 11:43:57 +0530 Subject: [PATCH 1193/1945] refactor: pass is_variant_missing as args to build_completion --- crates/ide-completion/src/render.rs | 13 +----------- crates/ide-completion/src/render/pattern.rs | 23 +++++++++++++-------- 2 files changed, 15 insertions(+), 21 deletions(-) diff --git a/crates/ide-completion/src/render.rs b/crates/ide-completion/src/render.rs index eb2df395c4..86302cb067 100644 --- a/crates/ide-completion/src/render.rs +++ b/crates/ide-completion/src/render.rs @@ -32,22 +32,11 @@ pub(crate) struct RenderContext<'a> { completion: &'a CompletionContext<'a>, is_private_editable: bool, import_to_add: Option, - // For variants which are missing - // in match completion context - // - // Option -> only applicable for enums - // bool -> is enum variant missing or not? - is_variant_missing: Option, } impl<'a> RenderContext<'a> { pub(crate) fn new(completion: &'a CompletionContext<'a>) -> RenderContext<'a> { - RenderContext { - completion, - is_private_editable: false, - import_to_add: None, - is_variant_missing: None, - } + RenderContext { completion, is_private_editable: false, import_to_add: None } } pub(crate) fn private_editable(mut self, private_editable: bool) -> Self { diff --git a/crates/ide-completion/src/render/pattern.rs b/crates/ide-completion/src/render/pattern.rs index d70f02127d..fcc7899815 100644 --- a/crates/ide-completion/src/render/pattern.rs +++ b/crates/ide-completion/src/render/pattern.rs @@ -39,11 +39,11 @@ pub(crate) fn render_struct_pat( let db = ctx.db(); - Some(build_completion(ctx, label, lookup, pat, strukt, strukt.ty(db))) + Some(build_completion(ctx, label, lookup, pat, strukt, strukt.ty(db), false)) } pub(crate) fn render_variant_pat( - mut ctx: RenderContext<'_>, + ctx: RenderContext<'_>, pattern_ctx: &PatternContext, path_ctx: Option<&PathCompletionCtx>, variant: hir::Variant, @@ -56,11 +56,6 @@ pub(crate) fn render_variant_pat( let (visible_fields, fields_omitted) = visible_fields(ctx.completion, &fields, variant)?; let enum_ty = variant.parent_enum(ctx.db()).ty(ctx.db()); - // Missing in context of match statement completions - if pattern_ctx.missing_variants.contains(&variant) { - ctx.is_variant_missing = Some(true); - } - let (name, escaped_name) = match path { Some(path) => (path.unescaped().to_string().into(), path.to_string().into()), None => { @@ -89,7 +84,15 @@ pub(crate) fn render_variant_pat( } }; - Some(build_completion(ctx, label, lookup, pat, variant, enum_ty)) + Some(build_completion( + ctx, + label, + lookup, + pat, + variant, + enum_ty, + pattern_ctx.missing_variants.contains(&variant), + )) } fn build_completion( @@ -99,10 +102,12 @@ fn build_completion( pat: String, def: impl HasAttrs + Copy, adt_ty: hir::Type, + // Missing in context of match statement completions + is_variant_missing: bool, ) -> CompletionItem { let mut relevance = ctx.completion_relevance(); - if let Some(true) = ctx.is_variant_missing { + if is_variant_missing { relevance.type_match = super::compute_type_match(ctx.completion, &adt_ty); } From 1d1a86f3508581b5867df202c87016520fd221a5 Mon Sep 17 00:00:00 2001 From: Ryo Yoshida Date: Sun, 19 Mar 2023 16:45:26 +0900 Subject: [PATCH 1194/1945] Bump chalk to 0.89 --- Cargo.lock | 16 ++++++++-------- crates/hir-ty/Cargo.toml | 8 ++++---- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index fc77515b63..25242c6028 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -169,9 +169,9 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "chalk-derive" -version = "0.88.0" +version = "0.89.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4df80a3fbc1f0e59f560eeeebca94bf655566a8ad3023c210a109deb6056455a" +checksum = "ea176c50987dc4765961aa165001e8eb5a722a26308c5797a47303ea91686aab" dependencies = [ "proc-macro2", "quote", @@ -181,9 +181,9 @@ dependencies = [ [[package]] name = "chalk-ir" -version = "0.88.0" +version = "0.89.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f39e5272016916956298cceea5147006f897972c274a768ed4d6e074efe5d3fb" +checksum = "473b480241695428c14e8f84f1c9a47ef232450a50faf3a4041e5c9dc11e0a3b" dependencies = [ "bitflags", "chalk-derive", @@ -192,9 +192,9 @@ dependencies = [ [[package]] name = "chalk-recursive" -version = "0.88.0" +version = "0.89.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9d60b42ad7478d3e027e2f9ea4e99fbbb8fdee0c8c3cf068be269f57e603618" +checksum = "6764b4fe67cac3a3758185084efbfbd39bf0352795824ba849ddd2b64cd4bb28" dependencies = [ "chalk-derive", "chalk-ir", @@ -205,9 +205,9 @@ dependencies = [ [[package]] name = "chalk-solve" -version = "0.88.0" +version = "0.89.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab30620ea5b36819525eaab2204f4b8e1842fc7ee36826424a28bef59ae7fecf" +checksum = "55a7e6160966eceb6e7dcc2f479a2af4c477aaf5bccbc640d82515995ab1a6cc" dependencies = [ "chalk-derive", "chalk-ir", diff --git a/crates/hir-ty/Cargo.toml b/crates/hir-ty/Cargo.toml index 4572e33486..9b3296df25 100644 --- a/crates/hir-ty/Cargo.toml +++ b/crates/hir-ty/Cargo.toml @@ -22,10 +22,10 @@ either = "1.7.0" tracing = "0.1.35" rustc-hash = "1.1.0" scoped-tls = "1.0.0" -chalk-solve = { version = "0.88.0", default-features = false } -chalk-ir = "0.88.0" -chalk-recursive = { version = "0.88.0", default-features = false } -chalk-derive = "0.88.0" +chalk-solve = { version = "0.89.0", default-features = false } +chalk-ir = "0.89.0" +chalk-recursive = { version = "0.89.0", default-features = false } +chalk-derive = "0.89.0" la-arena = { version = "0.3.0", path = "../../lib/la-arena" } once_cell = "1.17.0" typed-arena = "2.0.1" From e12460bbca48ec354c40caf51603ddc9f97c17a5 Mon Sep 17 00:00:00 2001 From: Ryo Yoshida Date: Sun, 19 Mar 2023 16:58:10 +0900 Subject: [PATCH 1195/1945] Add regression test so that we can catch regressions when we move away from chalk. --- crates/hir-ty/src/tests/regression.rs | 32 +++++++++++++++++++++++++++ 1 file changed, 32 insertions(+) diff --git a/crates/hir-ty/src/tests/regression.rs b/crates/hir-ty/src/tests/regression.rs index e6b4f13c8d..689f0da44f 100644 --- a/crates/hir-ty/src/tests/regression.rs +++ b/crates/hir-ty/src/tests/regression.rs @@ -1756,3 +1756,35 @@ const C: usize = 2 + 2; "#, ); } + +#[test] +fn regression_14164() { + check_types( + r#" +trait Rec { + type K; + type Rebind: Rec; +} + +trait Expr { + type Part: Rec; + fn foo(_: ::Rebind) {} +} + +struct Head(K); +impl Rec for Head { + type K = K; + type Rebind = Head; +} + +fn test() +where + E: Expr>, +{ + let head; + //^^^^ Head + E::foo(head); +} +"#, + ); +} From 8e73ea52537fe5189fac5cd02380592563fe7f0c Mon Sep 17 00:00:00 2001 From: hkalbasi Date: Sun, 19 Mar 2023 13:02:51 +0330 Subject: [PATCH 1196/1945] Desugar try blocks --- crates/hir-def/src/body/lower.rs | 87 +++++++++++++++++++++++---- crates/hir-def/src/body/pretty.rs | 3 - crates/hir-def/src/body/scope.rs | 3 +- crates/hir-def/src/expr.rs | 6 -- crates/hir-expand/src/name.rs | 14 ++++- crates/hir-ty/src/consteval/tests.rs | 54 +++++++++++++++++ crates/hir-ty/src/infer.rs | 4 -- crates/hir-ty/src/infer/expr.rs | 20 ------ crates/hir-ty/src/infer/mutability.rs | 1 - crates/hir-ty/src/mir/lower.rs | 87 ++++++++++++++++++--------- crates/hir-ty/src/tests/traits.rs | 20 ++++-- 11 files changed, 215 insertions(+), 84 deletions(-) diff --git a/crates/hir-def/src/body/lower.rs b/crates/hir-def/src/body/lower.rs index 448821d384..a93fcb3b1d 100644 --- a/crates/hir-def/src/body/lower.rs +++ b/crates/hir-def/src/body/lower.rs @@ -19,7 +19,7 @@ use rustc_hash::FxHashMap; use smallvec::SmallVec; use syntax::{ ast::{ - self, ArrayExprKind, AstChildren, HasArgList, HasLoopBody, HasName, LiteralKind, + self, ArrayExprKind, AstChildren, BlockExpr, HasArgList, HasLoopBody, HasName, LiteralKind, SlicePatComponents, }, AstNode, AstPtr, SyntaxNodePtr, @@ -100,6 +100,7 @@ pub(super) fn lower( _c: Count::new(), }, expander, + current_try_block: None, is_lowering_assignee_expr: false, is_lowering_generator: false, } @@ -113,6 +114,7 @@ struct ExprCollector<'a> { body: Body, krate: CrateId, source_map: BodySourceMap, + current_try_block: Option, is_lowering_assignee_expr: bool, is_lowering_generator: bool, } @@ -222,6 +224,10 @@ impl ExprCollector<'_> { self.source_map.label_map.insert(src, id); id } + // FIXME: desugared labels don't have ptr, that's wrong and should be fixed somehow. + fn alloc_label_desugared(&mut self, label: Label) -> LabelId { + self.body.labels.alloc(label) + } fn make_label(&mut self, label: Label, src: LabelSource) -> LabelId { let id = self.body.labels.alloc(label); self.source_map.label_map_back.insert(id, src); @@ -259,13 +265,7 @@ impl ExprCollector<'_> { self.alloc_expr(Expr::Let { pat, expr }, syntax_ptr) } ast::Expr::BlockExpr(e) => match e.modifier() { - Some(ast::BlockModifier::Try(_)) => { - self.collect_block_(e, |id, statements, tail| Expr::TryBlock { - id, - statements, - tail, - }) - } + Some(ast::BlockModifier::Try(_)) => self.collect_try_block(e), Some(ast::BlockModifier::Unsafe(_)) => { self.collect_block_(e, |id, statements, tail| Expr::Unsafe { id, @@ -606,6 +606,59 @@ impl ExprCollector<'_> { }) } + /// Desugar `try { ; }` into `': { ; ::std::ops::Try::from_output() }`, + /// `try { ; }` into `': { ; ::std::ops::Try::from_output(()) }` + /// and save the `` to use it as a break target for desugaring of the `?` operator. + fn collect_try_block(&mut self, e: BlockExpr) -> ExprId { + let Some(try_from_output) = LangItem::TryTraitFromOutput.path(self.db, self.krate) else { + return self.alloc_expr_desugared(Expr::Missing); + }; + let prev_try_block = self.current_try_block.take(); + self.current_try_block = + Some(self.alloc_label_desugared(Label { name: Name::generate_new_name() })); + let expr_id = self.collect_block(e); + let callee = self.alloc_expr_desugared(Expr::Path(try_from_output)); + let Expr::Block { label, tail, .. } = &mut self.body.exprs[expr_id] else { + unreachable!("It is the output of collect block"); + }; + *label = self.current_try_block; + let next_tail = match *tail { + Some(tail) => self.alloc_expr_desugared(Expr::Call { + callee, + args: Box::new([tail]), + is_assignee_expr: false, + }), + None => { + let unit = self.alloc_expr_desugared(Expr::Tuple { + exprs: Box::new([]), + is_assignee_expr: false, + }); + self.alloc_expr_desugared(Expr::Call { + callee, + args: Box::new([unit]), + is_assignee_expr: false, + }) + } + }; + let Expr::Block { tail, .. } = &mut self.body.exprs[expr_id] else { + unreachable!("It is the output of collect block"); + }; + *tail = Some(next_tail); + self.current_try_block = prev_try_block; + expr_id + } + + /// Desugar `ast::TryExpr` from: `?` into: + /// ```ignore (pseudo-rust) + /// match Try::branch() { + /// ControlFlow::Continue(val) => val, + /// ControlFlow::Break(residual) => + /// // If there is an enclosing `try {...}`: + /// break 'catch_target Try::from_residual(residual), + /// // Otherwise: + /// return Try::from_residual(residual), + /// } + /// ``` fn collect_try_operator(&mut self, syntax_ptr: AstPtr, e: ast::TryExpr) -> ExprId { let (try_branch, cf_continue, cf_break, try_from_residual) = 'if_chain: { if let Some(try_branch) = LangItem::TryTraitBranch.path(self.db, self.krate) { @@ -628,7 +681,9 @@ impl ExprCollector<'_> { Expr::Call { callee: try_branch, args: Box::new([operand]), is_assignee_expr: false }, syntax_ptr.clone(), ); - let continue_binding = self.alloc_binding(name![v1], BindingAnnotation::Unannotated); + let continue_name = Name::generate_new_name(); + let continue_binding = + self.alloc_binding(continue_name.clone(), BindingAnnotation::Unannotated); let continue_bpat = self.alloc_pat_desugared(Pat::Bind { id: continue_binding, subpat: None }); self.add_definition_to_binding(continue_binding, continue_bpat); @@ -639,9 +694,10 @@ impl ExprCollector<'_> { ellipsis: None, }), guard: None, - expr: self.alloc_expr(Expr::Path(Path::from(name![v1])), syntax_ptr.clone()), + expr: self.alloc_expr(Expr::Path(Path::from(continue_name)), syntax_ptr.clone()), }; - let break_binding = self.alloc_binding(name![v1], BindingAnnotation::Unannotated); + let break_name = Name::generate_new_name(); + let break_binding = self.alloc_binding(break_name.clone(), BindingAnnotation::Unannotated); let break_bpat = self.alloc_pat_desugared(Pat::Bind { id: break_binding, subpat: None }); self.add_definition_to_binding(break_binding, break_bpat); let break_arm = MatchArm { @@ -652,13 +708,18 @@ impl ExprCollector<'_> { }), guard: None, expr: { - let x = self.alloc_expr(Expr::Path(Path::from(name![v1])), syntax_ptr.clone()); + let x = self.alloc_expr(Expr::Path(Path::from(break_name)), syntax_ptr.clone()); let callee = self.alloc_expr(Expr::Path(try_from_residual), syntax_ptr.clone()); let result = self.alloc_expr( Expr::Call { callee, args: Box::new([x]), is_assignee_expr: false }, syntax_ptr.clone(), ); - self.alloc_expr(Expr::Return { expr: Some(result) }, syntax_ptr.clone()) + if let Some(label) = self.current_try_block { + let label = Some(self.body.labels[label].name.clone()); + self.alloc_expr(Expr::Break { expr: Some(result), label }, syntax_ptr.clone()) + } else { + self.alloc_expr(Expr::Return { expr: Some(result) }, syntax_ptr.clone()) + } }, }; let arms = Box::new([continue_arm, break_arm]); diff --git a/crates/hir-def/src/body/pretty.rs b/crates/hir-def/src/body/pretty.rs index c091ad0d15..8c9d77620e 100644 --- a/crates/hir-def/src/body/pretty.rs +++ b/crates/hir-def/src/body/pretty.rs @@ -420,9 +420,6 @@ impl<'a> Printer<'a> { Expr::Unsafe { id: _, statements, tail } => { self.print_block(Some("unsafe "), statements, tail); } - Expr::TryBlock { id: _, statements, tail } => { - self.print_block(Some("try "), statements, tail); - } Expr::Async { id: _, statements, tail } => { self.print_block(Some("async "), statements, tail); } diff --git a/crates/hir-def/src/body/scope.rs b/crates/hir-def/src/body/scope.rs index 12fc1f116d..8ddb89a472 100644 --- a/crates/hir-def/src/body/scope.rs +++ b/crates/hir-def/src/body/scope.rs @@ -202,8 +202,7 @@ fn compute_expr_scopes(expr: ExprId, body: &Body, scopes: &mut ExprScopes, scope } Expr::Unsafe { id, statements, tail } | Expr::Async { id, statements, tail } - | Expr::Const { id, statements, tail } - | Expr::TryBlock { id, statements, tail } => { + | Expr::Const { id, statements, tail } => { let mut scope = scopes.new_block_scope(*scope, *id, None); // Overwrite the old scope for the block expr, so that every block scope can be found // via the block itself (important for blocks that only contain items, no expressions). diff --git a/crates/hir-def/src/expr.rs b/crates/hir-def/src/expr.rs index 8b1528f81e..7ede19cc3c 100644 --- a/crates/hir-def/src/expr.rs +++ b/crates/hir-def/src/expr.rs @@ -122,11 +122,6 @@ pub enum Expr { tail: Option, label: Option, }, - TryBlock { - id: BlockId, - statements: Box<[Statement]>, - tail: Option, - }, Async { id: BlockId, statements: Box<[Statement]>, @@ -310,7 +305,6 @@ impl Expr { f(*expr); } Expr::Block { statements, tail, .. } - | Expr::TryBlock { statements, tail, .. } | Expr::Unsafe { statements, tail, .. } | Expr::Async { statements, tail, .. } | Expr::Const { statements, tail, .. } => { diff --git a/crates/hir-expand/src/name.rs b/crates/hir-expand/src/name.rs index 71eb35d9df..8099c20b02 100644 --- a/crates/hir-expand/src/name.rs +++ b/crates/hir-expand/src/name.rs @@ -78,7 +78,7 @@ impl Name { Self::new_text(lt.text().into()) } - /// Shortcut to create inline plain text name + /// Shortcut to create inline plain text name. Panics if `text.len() > 22` const fn new_inline(text: &str) -> Name { Name::new_text(SmolStr::new_inline(text)) } @@ -112,6 +112,18 @@ impl Name { Name::new_inline("[missing name]") } + /// Generates a new name which is only equal to itself, by incrementing a counter. Due + /// its implementation, it should not be used in things that salsa considers, like + /// type names or field names, and it should be only used in names of local variables + /// and labels and similar things. + pub fn generate_new_name() -> Name { + use std::sync::atomic::{AtomicUsize, Ordering}; + static CNT: AtomicUsize = AtomicUsize::new(0); + let c = CNT.fetch_add(1, Ordering::Relaxed); + // FIXME: Currently a `__RA_generated_name` in user code will break our analysis + Name::new_text(format!("__RA_geneated_name_{c}").into()) + } + /// Returns the tuple index this name represents if it is a tuple field. pub fn as_tuple_index(&self) -> Option { match self.0 { diff --git a/crates/hir-ty/src/consteval/tests.rs b/crates/hir-ty/src/consteval/tests.rs index 97c8d62860..2ba0cbd5db 100644 --- a/crates/hir-ty/src/consteval/tests.rs +++ b/crates/hir-ty/src/consteval/tests.rs @@ -522,6 +522,42 @@ fn loops() { "#, 4, ); + check_number( + r#" + const GOAL: u8 = { + let mut x = 0; + loop { + x = x + 1; + if x == 5 { + break x + 2; + } + } + }; + "#, + 7, + ); + check_number( + r#" + const GOAL: u8 = { + 'a: loop { + let x = 'b: loop { + let x = 'c: loop { + let x = 'd: loop { + let x = 'e: loop { + break 'd 1; + }; + break 2 + x; + }; + break 3 + x; + }; + break 'a 4 + x; + }; + break 5 + x; + } + }; + "#, + 8, + ); } #[test] @@ -1019,6 +1055,24 @@ fn try_operator() { ); } +#[test] +fn try_block() { + check_number( + r#" + //- minicore: option, try + const fn g(x: Option, y: Option) -> i32 { + let r = try { x? * y? }; + match r { + Some(k) => k, + None => 5, + } + } + const GOAL: i32 = g(Some(10), Some(20)) + g(Some(30), None) + g(None, Some(40)) + g(None, None); + "#, + 215, + ); +} + #[test] fn or_pattern() { check_number( diff --git a/crates/hir-ty/src/infer.rs b/crates/hir-ty/src/infer.rs index d1b9aff36d..38b7dee75f 100644 --- a/crates/hir-ty/src/infer.rs +++ b/crates/hir-ty/src/infer.rs @@ -1025,10 +1025,6 @@ impl<'a> InferenceContext<'a> { self.resolve_lang_item(lang)?.as_trait() } - fn resolve_ops_try_output(&self) -> Option { - self.resolve_output_on(self.resolve_lang_trait(LangItem::Try)?) - } - fn resolve_ops_neg_output(&self) -> Option { self.resolve_output_on(self.resolve_lang_trait(LangItem::Neg)?) } diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs index 82119c97ec..6d2aa59ea3 100644 --- a/crates/hir-ty/src/infer/expr.rs +++ b/crates/hir-ty/src/infer/expr.rs @@ -159,26 +159,6 @@ impl<'a> InferenceContext<'a> { }) .1 } - Expr::TryBlock { id: _, statements, tail } => { - // The type that is returned from the try block - let try_ty = self.table.new_type_var(); - if let Some(ty) = expected.only_has_type(&mut self.table) { - self.unify(&try_ty, &ty); - } - - // The ok-ish type that is expected from the last expression - let ok_ty = - self.resolve_associated_type(try_ty.clone(), self.resolve_ops_try_output()); - - self.infer_block( - tgt_expr, - statements, - *tail, - None, - &Expectation::has_type(ok_ty.clone()), - ); - try_ty - } Expr::Async { id: _, statements, tail } => { let ret_ty = self.table.new_type_var(); let prev_diverges = mem::replace(&mut self.diverges, Diverges::Maybe); diff --git a/crates/hir-ty/src/infer/mutability.rs b/crates/hir-ty/src/infer/mutability.rs index 784725da93..7ed21d230c 100644 --- a/crates/hir-ty/src/infer/mutability.rs +++ b/crates/hir-ty/src/infer/mutability.rs @@ -44,7 +44,6 @@ impl<'a> InferenceContext<'a> { } Expr::Let { pat, expr } => self.infer_mut_expr(*expr, self.pat_bound_mutability(*pat)), Expr::Block { id: _, statements, tail, label: _ } - | Expr::TryBlock { id: _, statements, tail } | Expr::Async { id: _, statements, tail } | Expr::Const { id: _, statements, tail } | Expr::Unsafe { id: _, statements, tail } => { diff --git a/crates/hir-ty/src/mir/lower.rs b/crates/hir-ty/src/mir/lower.rs index 4b43e44a8e..5d9ae32072 100644 --- a/crates/hir-ty/src/mir/lower.rs +++ b/crates/hir-ty/src/mir/lower.rs @@ -18,6 +18,7 @@ use hir_def::{ }; use hir_expand::name::Name; use la_arena::ArenaMap; +use rustc_hash::FxHashMap; use crate::{ consteval::ConstEvalError, db::HirDatabase, display::HirDisplay, infer::TypeMismatch, @@ -32,17 +33,21 @@ mod pattern_matching; use pattern_matching::AdtPatternShape; -#[derive(Debug, Clone, Copy)] +#[derive(Debug, Clone)] struct LoopBlocks { begin: BasicBlockId, /// `None` for loops that are not terminating end: Option, + place: Place, } struct MirLowerCtx<'a> { result: MirBody, owner: DefWithBodyId, current_loop_blocks: Option, + // FIXME: we should resolve labels in HIR lowering and always work with label id here, not + // with raw names. + labeled_loop_blocks: FxHashMap, discr_temp: Option, db: &'a dyn HirDatabase, body: &'a Body, @@ -72,6 +77,7 @@ pub enum MirLowerError { ImplementationError(&'static str), LangItemNotFound(LangItem), MutatingRvalue, + UnresolvedLabel, } macro_rules! not_supported { @@ -375,19 +381,29 @@ impl MirLowerCtx<'_> { Ok(self.merge_blocks(Some(then_target), else_target)) } Expr::Unsafe { id: _, statements, tail } => { - self.lower_block_to_place(None, statements, current, *tail, place) + self.lower_block_to_place(statements, current, *tail, place) } Expr::Block { id: _, statements, tail, label } => { - self.lower_block_to_place(*label, statements, current, *tail, place) + if let Some(label) = label { + self.lower_loop(current, place.clone(), Some(*label), |this, begin| { + if let Some(block) = this.lower_block_to_place(statements, begin, *tail, place)? { + let end = this.current_loop_end()?; + this.set_goto(block, end); + } + Ok(()) + }) + } else { + self.lower_block_to_place(statements, current, *tail, place) + } } - Expr::Loop { body, label } => self.lower_loop(current, *label, |this, begin| { + Expr::Loop { body, label } => self.lower_loop(current, place, *label, |this, begin| { if let Some((_, block)) = this.lower_expr_as_place(begin, *body, true)? { this.set_goto(block, begin); } Ok(()) }), Expr::While { condition, body, label } => { - self.lower_loop(current, *label, |this, begin| { + self.lower_loop(current, place, *label, |this, begin| { let Some((discr, to_switch)) = this.lower_expr_to_some_operand(*condition, begin)? else { return Ok(()); }; @@ -438,7 +454,7 @@ impl MirLowerCtx<'_> { return Ok(None); }; self.push_assignment(current, ref_mut_iterator_place.clone(), Rvalue::Ref(BorrowKind::Mut { allow_two_phase_borrow: false }, iterator_place), expr_id.into()); - self.lower_loop(current, label, |this, begin| { + self.lower_loop(current, place, label, |this, begin| { let Some(current) = this.lower_call(iter_next_fn_op, vec![Operand::Copy(ref_mut_iterator_place)], option_item_place.clone(), begin, false)? else { return Ok(()); @@ -558,24 +574,28 @@ impl MirLowerCtx<'_> { Some(_) => not_supported!("continue with label"), None => { let loop_data = - self.current_loop_blocks.ok_or(MirLowerError::ContinueWithoutLoop)?; + self.current_loop_blocks.as_ref().ok_or(MirLowerError::ContinueWithoutLoop)?; self.set_goto(current, loop_data.begin); Ok(None) } }, Expr::Break { expr, label } => { - if expr.is_some() { - not_supported!("break with value"); - } - match label { - Some(_) => not_supported!("break with label"), - None => { - let end = - self.current_loop_end()?; - self.set_goto(current, end); - Ok(None) - } + if let Some(expr) = expr { + let loop_data = match label { + Some(l) => self.labeled_loop_blocks.get(l).ok_or(MirLowerError::UnresolvedLabel)?, + None => self.current_loop_blocks.as_ref().ok_or(MirLowerError::BreakWithoutLoop)?, + }; + let Some(c) = self.lower_expr_to_place(*expr, loop_data.place.clone(), current)? else { + return Ok(None); + }; + current = c; } + let end = match label { + Some(l) => self.labeled_loop_blocks.get(l).ok_or(MirLowerError::UnresolvedLabel)?.end.expect("We always generate end for labeled loops"), + None => self.current_loop_end()?, + }; + self.set_goto(current, end); + Ok(None) } Expr::Return { expr } => { if let Some(expr) = expr { @@ -668,7 +688,6 @@ impl MirLowerCtx<'_> { } Expr::Await { .. } => not_supported!("await"), Expr::Yeet { .. } => not_supported!("yeet"), - Expr::TryBlock { .. } => not_supported!("try block"), Expr::Async { .. } => not_supported!("async block"), Expr::Const { .. } => not_supported!("anonymous const block"), Expr::Cast { expr, type_ref: _ } => { @@ -1085,19 +1104,34 @@ impl MirLowerCtx<'_> { fn lower_loop( &mut self, prev_block: BasicBlockId, + place: Place, label: Option, f: impl FnOnce(&mut MirLowerCtx<'_>, BasicBlockId) -> Result<()>, ) -> Result> { - if label.is_some() { - not_supported!("loop with label"); - } let begin = self.new_basic_block(); - let prev = - mem::replace(&mut self.current_loop_blocks, Some(LoopBlocks { begin, end: None })); + let prev = mem::replace( + &mut self.current_loop_blocks, + Some(LoopBlocks { begin, end: None, place }), + ); + let prev_label = if let Some(label) = label { + // We should generate the end now, to make sure that it wouldn't change later. It is + // bad as we may emit end (unneccessary unreachable block) for unterminating loop, but + // it should not affect correctness. + self.current_loop_end()?; + self.labeled_loop_blocks.insert( + self.body.labels[label].name.clone(), + self.current_loop_blocks.as_ref().unwrap().clone(), + ) + } else { + None + }; self.set_goto(prev_block, begin); f(self, begin)?; let my = mem::replace(&mut self.current_loop_blocks, prev) .ok_or(MirLowerError::ImplementationError("current_loop_blocks is corrupt"))?; + if let Some(prev) = prev_label { + self.labeled_loop_blocks.insert(self.body.labels[label.unwrap()].name.clone(), prev); + } Ok(my.end) } @@ -1185,15 +1219,11 @@ impl MirLowerCtx<'_> { fn lower_block_to_place( &mut self, - label: Option, statements: &[hir_def::expr::Statement], mut current: BasicBlockId, tail: Option, place: Place, ) -> Result>> { - if label.is_some() { - not_supported!("block with label"); - } for statement in statements.iter() { match statement { hir_def::expr::Statement::Let { pat, initializer, else_branch, type_ref: _ } => { @@ -1355,6 +1385,7 @@ pub fn lower_to_mir( body, owner, current_loop_blocks: None, + labeled_loop_blocks: Default::default(), discr_temp: None, }; let mut current = start_block; diff --git a/crates/hir-ty/src/tests/traits.rs b/crates/hir-ty/src/tests/traits.rs index da76d7fd83..97ec1bb871 100644 --- a/crates/hir-ty/src/tests/traits.rs +++ b/crates/hir-ty/src/tests/traits.rs @@ -206,19 +206,27 @@ fn test() { fn infer_try_trait() { check_types( r#" -//- minicore: try, result +//- minicore: try, result, from fn test() { let r: Result = Result::Ok(1); let v = r?; v; } //^ i32 - -impl core::ops::Try for Result { - type Output = O; - type Error = Result; +"#, + ); } -impl> core::ops::FromResidual> for Result {} +#[test] +fn infer_try_block() { + // FIXME: We should test more cases, but it currently doesn't work, since + // our labeled block type inference is broken. + check_types( + r#" +//- minicore: try, option +fn test() { + let x: Option<_> = try { Some(2)?; }; + //^ Option<()> +} "#, ); } From 3303a6eff5f759d5bc6f9a2e891bddab0d1f21e7 Mon Sep 17 00:00:00 2001 From: hkalbasi Date: Mon, 20 Mar 2023 21:48:01 +0330 Subject: [PATCH 1197/1945] Implement some intrinsics --- crates/hir-ty/src/consteval/tests.rs | 70 +++- .../hir-ty/src/consteval/tests/intrinsics.rs | 162 ++++++++ crates/hir-ty/src/infer/expr.rs | 11 +- crates/hir-ty/src/mir/eval.rs | 383 +++++++++++++----- crates/hir-ty/src/mir/lower.rs | 5 + crates/hir-ty/src/tests/simple.rs | 34 +- crates/ide/src/hover/render.rs | 1 - 7 files changed, 521 insertions(+), 145 deletions(-) create mode 100644 crates/hir-ty/src/consteval/tests/intrinsics.rs diff --git a/crates/hir-ty/src/consteval/tests.rs b/crates/hir-ty/src/consteval/tests.rs index 2ba0cbd5db..47ef26fc58 100644 --- a/crates/hir-ty/src/consteval/tests.rs +++ b/crates/hir-ty/src/consteval/tests.rs @@ -11,6 +11,8 @@ use super::{ ConstEvalError, }; +mod intrinsics; + fn simplify(e: ConstEvalError) -> ConstEvalError { match e { ConstEvalError::MirEvalError(MirEvalError::InFunction(_, e)) => { @@ -82,6 +84,49 @@ fn bit_op() { check_number(r#"const GOAL: i8 = 1 << 8"#, 0); } +#[test] +fn casts() { + check_number(r#"const GOAL: usize = 12 as *const i32 as usize"#, 12); + check_number( + r#" + //- minicore: coerce_unsized, index, slice + const GOAL: i32 = { + let a = [10, 20, 3, 15]; + let x: &[i32] = &a; + let y: *const [i32] = x; + let z = y as *const i32; + unsafe { *z } + }; + "#, + 10, + ); + check_number( + r#" + //- minicore: coerce_unsized, index, slice + const GOAL: i16 = { + let a = &mut 5; + let z = a as *mut _; + unsafe { *z } + }; + "#, + 5, + ); + check_number( + r#" + //- minicore: coerce_unsized, index, slice + const GOAL: usize = { + let a = [10, 20, 3, 15]; + let x: &[i32] = &a; + let y: *const [i32] = x; + let z = y as *const [u8]; // slice fat pointer cast don't touch metadata + let w = unsafe { &*z }; + w.len() + }; + "#, + 4, + ); +} + #[test] fn locals() { check_number( @@ -279,20 +324,6 @@ fn function_call() { ); } -#[test] -fn intrinsics() { - check_number( - r#" - extern "rust-intrinsic" { - pub fn size_of() -> usize; - } - - const GOAL: usize = size_of::(); - "#, - 4, - ); -} - #[test] fn trait_basic() { check_number( @@ -1353,6 +1384,17 @@ fn array_and_index() { check_number( r#" //- minicore: coerce_unsized, index, slice + const GOAL: usize = { + let a = [1, 2, 3]; + let x: &[i32] = &a; + let y = &*x; + y.len() + };"#, + 3, + ); + check_number( + r#" + //- minicore: coerce_unsized, index, slice const GOAL: usize = [1, 2, 3, 4, 5].len();"#, 5, ); diff --git a/crates/hir-ty/src/consteval/tests/intrinsics.rs b/crates/hir-ty/src/consteval/tests/intrinsics.rs new file mode 100644 index 0000000000..371d5cab33 --- /dev/null +++ b/crates/hir-ty/src/consteval/tests/intrinsics.rs @@ -0,0 +1,162 @@ +use super::*; + +#[test] +fn size_of() { + check_number( + r#" + extern "rust-intrinsic" { + pub fn size_of() -> usize; + } + + const GOAL: usize = size_of::(); + "#, + 4, + ); +} + +#[test] +fn transmute() { + check_number( + r#" + extern "rust-intrinsic" { + pub fn transmute(e: T) -> U; + } + + const GOAL: i32 = transmute((1i16, 1i16)); + "#, + 0x00010001, + ); +} + +#[test] +fn const_eval_select() { + check_number( + r#" + extern "rust-intrinsic" { + pub fn const_eval_select(arg: ARG, called_in_const: F, called_at_rt: G) -> RET + where + G: FnOnce, + F: FnOnce; + } + + const fn in_const(x: i32, y: i32) -> i32 { + x + y + } + + fn in_rt(x: i32, y: i32) -> i32 { + x + y + } + + const GOAL: i32 = const_eval_select((2, 3), in_const, in_rt); + "#, + 5, + ); +} + +#[test] +fn wrapping_add() { + check_number( + r#" + extern "rust-intrinsic" { + pub fn wrapping_add(a: T, b: T) -> T; + } + + const GOAL: u8 = wrapping_add(10, 250); + "#, + 4, + ); +} + +#[test] +fn offset() { + check_number( + r#" + //- minicore: coerce_unsized, index, slice + extern "rust-intrinsic" { + pub fn offset(dst: *const T, offset: isize) -> *const T; + } + + const GOAL: u8 = unsafe { + let ar: &[(u8, u8, u8)] = &[ + (10, 11, 12), + (20, 21, 22), + (30, 31, 32), + (40, 41, 42), + (50, 51, 52), + ]; + let ar: *const [(u8, u8, u8)] = ar; + let ar = ar as *const (u8, u8, u8); + let element = offset(ar, 2); + element.1 + }; + "#, + 31, + ); +} + +#[test] +fn arith_offset() { + check_number( + r#" + //- minicore: coerce_unsized, index, slice + extern "rust-intrinsic" { + pub fn arith_offset(dst: *const T, offset: isize) -> *const T; + } + + const GOAL: u8 = unsafe { + let ar: &[(u8, u8, u8)] = &[ + (10, 11, 12), + (20, 21, 22), + (30, 31, 32), + (40, 41, 42), + (50, 51, 52), + ]; + let ar: *const [(u8, u8, u8)] = ar; + let ar = ar as *const (u8, u8, u8); + let element = arith_offset(arith_offset(ar, 102), -100); + element.1 + }; + "#, + 31, + ); +} + +#[test] +fn copy_nonoverlapping() { + check_number( + r#" + extern "rust-intrinsic" { + pub fn copy_nonoverlapping(src: *const T, dst: *mut T, count: usize); + } + + const GOAL: u8 = unsafe { + let mut x = 2; + let y = 5; + copy_nonoverlapping(&y, &mut x, 1); + x + }; + "#, + 5, + ); +} + +#[test] +fn copy() { + check_number( + r#" + //- minicore: coerce_unsized, index, slice + extern "rust-intrinsic" { + pub fn copy(src: *const T, dst: *mut T, count: usize); + } + + const GOAL: i32 = unsafe { + let mut x = [1i32, 2, 3, 4, 5]; + let y = (&mut x as *mut _) as *mut i32; + let z = (y as usize + 4) as *const i32; + copy(z, y, 4); + x[0] + x[1] + x[2] + x[3] + x[4] + }; + "#, + 19, + ); +} diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs index 6d2aa59ea3..d14b3f7140 100644 --- a/crates/hir-ty/src/infer/expr.rs +++ b/crates/hir-ty/src/infer/expr.rs @@ -630,8 +630,15 @@ impl<'a> InferenceContext<'a> { Expr::Cast { expr, type_ref } => { let cast_ty = self.make_ty(type_ref); // FIXME: propagate the "castable to" expectation - let _inner_ty = self.infer_expr_no_expect(*expr); - // FIXME check the cast... + let inner_ty = self.infer_expr_no_expect(*expr); + match (inner_ty.kind(Interner), cast_ty.kind(Interner)) { + (TyKind::Ref(_, _, inner), TyKind::Raw(_, cast)) => { + // FIXME: record invalid cast diagnostic in case of mismatch + self.unify(inner, cast); + } + // FIXME check the other kinds of cast... + _ => (), + } cast_ty } Expr::Ref { expr, rawness, mutability } => { diff --git a/crates/hir-ty/src/mir/eval.rs b/crates/hir-ty/src/mir/eval.rs index f8545e88ad..26bf877cf0 100644 --- a/crates/hir-ty/src/mir/eval.rs +++ b/crates/hir-ty/src/mir/eval.rs @@ -96,11 +96,18 @@ enum Address { use Address::*; +#[derive(Debug, Clone, Copy)] struct Interval { addr: Address, size: usize, } +#[derive(Debug, Clone)] +struct IntervalAndTy { + interval: Interval, + ty: Ty, +} + impl Interval { fn new(addr: Address, size: usize) -> Self { Self { addr, size } @@ -110,11 +117,37 @@ impl Interval { memory.read_memory(self.addr, self.size) } + fn write_from_bytes(&self, memory: &mut Evaluator<'_>, bytes: &[u8]) -> Result<()> { + memory.write_memory(self.addr, bytes) + } + + fn write_from_interval(&self, memory: &mut Evaluator<'_>, interval: Interval) -> Result<()> { + // FIXME: this could be more efficent + let bytes = &interval.get(memory)?.to_vec(); + memory.write_memory(self.addr, bytes) + } + fn slice(self, range: Range) -> Interval { Interval { addr: self.addr.offset(range.start), size: range.len() } } } +impl IntervalAndTy { + fn get<'a>(&self, memory: &'a Evaluator<'a>) -> Result<&'a [u8]> { + memory.read_memory(self.interval.addr, self.interval.size) + } + + fn new( + addr: Address, + ty: Ty, + evaluator: &Evaluator<'_>, + locals: &Locals<'_>, + ) -> Result { + let size = evaluator.size_of_sized(&ty, locals, "type of interval")?; + Ok(IntervalAndTy { interval: Interval { addr, size }, ty }) + } +} + enum IntervalOrOwned { Owned(Vec), Borrowed(Interval), @@ -135,7 +168,7 @@ impl Address { fn from_usize(x: usize) -> Self { if x > usize::MAX / 2 { - Stack(usize::MAX - x) + Stack(x - usize::MAX / 2) } else { Heap(x) } @@ -147,7 +180,7 @@ impl Address { fn to_usize(&self) -> usize { let as_num = match self { - Stack(x) => usize::MAX - *x, + Stack(x) => *x + usize::MAX / 2, Heap(x) => *x, }; as_num @@ -174,7 +207,7 @@ pub enum MirEvalError { /// Means that code had undefined behavior. We don't try to actively detect UB, but if it was detected /// then use this type of error. UndefinedBehavior(&'static str), - Panic, + Panic(String), MirLowerError(FunctionId, MirLowerError), TypeIsUnsized(Ty, &'static str), NotSupported(String), @@ -197,7 +230,7 @@ impl std::fmt::Debug for MirEvalError { Self::UndefinedBehavior(arg0) => { f.debug_tuple("UndefinedBehavior").field(arg0).finish() } - Self::Panic => write!(f, "Panic"), + Self::Panic(msg) => write!(f, "Panic with message:\n{msg:?}"), Self::TargetDataLayoutNotAvailable => write!(f, "TargetDataLayoutNotAvailable"), Self::TypeIsUnsized(ty, it) => write!(f, "{ty:?} is unsized. {it} should be sized."), Self::ExecutionLimitExceeded => write!(f, "execution limit exceeded"), @@ -289,7 +322,19 @@ impl Evaluator<'_> { } fn place_addr(&self, p: &Place, locals: &Locals<'_>) -> Result

{ - Ok(self.place_addr_and_ty(p, locals)?.0) + Ok(self.place_addr_and_ty_and_metadata(p, locals)?.0) + } + + fn place_interval(&self, p: &Place, locals: &Locals<'_>) -> Result { + let place_addr_and_ty = self.place_addr_and_ty_and_metadata(p, locals)?; + Ok(Interval { + addr: place_addr_and_ty.0, + size: self.size_of_sized( + &place_addr_and_ty.1, + locals, + "Type of place that we need its interval", + )?, + }) } fn ptr_size(&self) -> usize { @@ -299,10 +344,15 @@ impl Evaluator<'_> { } } - fn place_addr_and_ty<'a>(&'a self, p: &Place, locals: &'a Locals<'a>) -> Result<(Address, Ty)> { + fn place_addr_and_ty_and_metadata<'a>( + &'a self, + p: &Place, + locals: &'a Locals<'a>, + ) -> Result<(Address, Ty, Option)> { let mut addr = locals.ptr[p.local]; let mut ty: Ty = self.ty_filler(&locals.body.locals[p.local].ty, locals.subst, locals.body.owner)?; + let mut metadata = None; // locals are always sized for proj in &p.projection { match proj { ProjectionElem::Deref => { @@ -314,12 +364,18 @@ impl Evaluator<'_> { )) } }; + metadata = if self.size_of(&ty, locals)?.is_none() { + Some(Interval { addr: addr.offset(self.ptr_size()), size: self.ptr_size() }) + } else { + None + }; let x = from_bytes!(usize, self.read_memory(addr, self.ptr_size())?); addr = Address::from_usize(x); } ProjectionElem::Index(op) => { let offset = from_bytes!(usize, self.read_memory(locals.ptr[*op], self.ptr_size())?); + metadata = None; // Result of index is always sized match &ty.data(Interner).kind { TyKind::Ref(_, _, inner) => match &inner.data(Interner).kind { TyKind::Slice(inner) => { @@ -357,6 +413,7 @@ impl Evaluator<'_> { .clone(); let offset = layout.fields.offset(f).bytes_usize(); addr = addr.offset(offset); + metadata = None; // tuple field is always sized } _ => return Err(MirEvalError::TypeError("Only tuple has tuple fields")), }, @@ -386,6 +443,8 @@ impl Evaluator<'_> { .offset(u32::from(f.local_id.into_raw()) as usize) .bytes_usize(); addr = addr.offset(offset); + // FIXME: support structs with unsized fields + metadata = None; } _ => return Err(MirEvalError::TypeError("Only adt has fields")), }, @@ -396,7 +455,7 @@ impl Evaluator<'_> { ProjectionElem::OpaqueCast(_) => not_supported!("opaque cast"), } } - Ok((addr, ty)) + Ok((addr, ty, metadata)) } fn layout(&self, ty: &Ty) -> Result { @@ -411,16 +470,23 @@ impl Evaluator<'_> { } fn place_ty<'a>(&'a self, p: &Place, locals: &'a Locals<'a>) -> Result { - Ok(self.place_addr_and_ty(p, locals)?.1) + Ok(self.place_addr_and_ty_and_metadata(p, locals)?.1) } - fn operand_ty<'a>(&'a self, o: &'a Operand, locals: &'a Locals<'a>) -> Result { + fn operand_ty(&self, o: &Operand, locals: &Locals<'_>) -> Result { Ok(match o { Operand::Copy(p) | Operand::Move(p) => self.place_ty(p, locals)?, Operand::Constant(c) => c.data(Interner).ty.clone(), }) } + fn operand_ty_and_eval(&mut self, o: &Operand, locals: &Locals<'_>) -> Result { + Ok(IntervalAndTy { + interval: self.eval_operand(o, locals)?, + ty: self.operand_ty(o, locals)?, + }) + } + fn interpret_mir( &mut self, body: &MirBody, @@ -498,14 +564,19 @@ impl Evaluator<'_> { cleanup: _, from_hir_call: _, } => { + let destination = self.place_interval(destination, &locals)?; let fn_ty = self.operand_ty(func, &locals)?; + let args = args + .iter() + .map(|x| self.operand_ty_and_eval(x, &locals)) + .collect::>>()?; match &fn_ty.data(Interner).kind { TyKind::Function(_) => { let bytes = self.eval_operand(func, &locals)?; - self.exec_fn_pointer(bytes, destination, args, &locals)?; + self.exec_fn_pointer(bytes, destination, &args, &locals)?; } TyKind::FnDef(def, generic_args) => { - self.exec_fn_def(*def, generic_args, destination, args, &locals)?; + self.exec_fn_def(*def, generic_args, destination, &args, &locals)?; } x => not_supported!("unknown function type {x:?}"), } @@ -545,8 +616,12 @@ impl Evaluator<'_> { Ok(match r { Rvalue::Use(x) => Borrowed(self.eval_operand(x, locals)?), Rvalue::Ref(_, p) => { - let addr = self.place_addr(p, locals)?; - Owned(addr.to_bytes()) + let (addr, _, metadata) = self.place_addr_and_ty_and_metadata(p, locals)?; + let mut r = addr.to_bytes(); + if let Some(metadata) = metadata { + r.extend(metadata.get(self)?); + } + Owned(r) } Rvalue::Len(_) => not_supported!("rvalue len"), Rvalue::UnaryOp(op, val) => { @@ -624,8 +699,12 @@ impl Evaluator<'_> { let r = match op { BinOp::Add => l128.overflowing_add(r128).0, BinOp::Mul => l128.overflowing_mul(r128).0, - BinOp::Div => l128.checked_div(r128).ok_or(MirEvalError::Panic)?, - BinOp::Rem => l128.checked_rem(r128).ok_or(MirEvalError::Panic)?, + BinOp::Div => l128.checked_div(r128).ok_or_else(|| { + MirEvalError::Panic(format!("Overflow in {op:?}")) + })?, + BinOp::Rem => l128.checked_rem(r128).ok_or_else(|| { + MirEvalError::Panic(format!("Overflow in {op:?}")) + })?, BinOp::Sub => l128.overflowing_sub(r128).0, BinOp::BitAnd => l128 & r128, BinOp::BitOr => l128 | r128, @@ -635,16 +714,16 @@ impl Evaluator<'_> { let r = r.to_le_bytes(); for &k in &r[lc.len()..] { if k != 0 && (k != 255 || !is_signed) { - return Err(MirEvalError::Panic); + return Err(MirEvalError::Panic(format!("Overflow in {op:?}"))); } } Owned(r[0..lc.len()].into()) } BinOp::Shl | BinOp::Shr => { let shift_amout = if r128 < 0 { - return Err(MirEvalError::Panic); + return Err(MirEvalError::Panic(format!("Overflow in {op:?}"))); } else if r128 > 128 { - return Err(MirEvalError::Panic); + return Err(MirEvalError::Panic(format!("Overflow in {op:?}"))); } else { r128 as u8 }; @@ -720,47 +799,54 @@ impl Evaluator<'_> { } Rvalue::ShallowInitBox(_, _) => not_supported!("shallow init box"), Rvalue::CopyForDeref(_) => not_supported!("copy for deref"), - Rvalue::Aggregate(kind, values) => match kind { - AggregateKind::Array(_) => { - let mut r = vec![]; - for x in values { - let value = self.eval_operand(x, locals)?.get(&self)?; - r.extend(value); + Rvalue::Aggregate(kind, values) => { + let values = values + .iter() + .map(|x| self.eval_operand(x, locals)) + .collect::>>()?; + match kind { + AggregateKind::Array(_) => { + let mut r = vec![]; + for x in values { + let value = x.get(&self)?; + r.extend(value); + } + Owned(r) + } + AggregateKind::Tuple(ty) => { + let layout = self.layout(&ty)?; + Owned(self.make_by_layout( + layout.size.bytes_usize(), + &layout, + None, + values.iter().copied(), + )?) + } + AggregateKind::Union(x, f) => { + let layout = self.layout_adt((*x).into(), Substitution::empty(Interner))?; + let offset = layout + .fields + .offset(u32::from(f.local_id.into_raw()) as usize) + .bytes_usize(); + let op = values[0].get(&self)?; + let mut result = vec![0; layout.size.bytes_usize()]; + result[offset..offset + op.len()].copy_from_slice(op); + Owned(result) + } + AggregateKind::Adt(x, subst) => { + let subst = self.subst_filler(subst, locals); + let (size, variant_layout, tag) = + self.layout_of_variant(*x, subst, locals)?; + Owned(self.make_by_layout( + size, + &variant_layout, + tag, + values.iter().copied(), + )?) } - Owned(r) } - AggregateKind::Tuple(ty) => { - let layout = self.layout(&ty)?; - Owned(self.make_by_layout( - layout.size.bytes_usize(), - &layout, - None, - values, - locals, - )?) - } - AggregateKind::Union(x, f) => { - let layout = self.layout_adt((*x).into(), Substitution::empty(Interner))?; - let offset = layout - .fields - .offset(u32::from(f.local_id.into_raw()) as usize) - .bytes_usize(); - let op = self.eval_operand(&values[0], locals)?.get(&self)?; - let mut result = vec![0; layout.size.bytes_usize()]; - result[offset..offset + op.len()].copy_from_slice(op); - Owned(result) - } - AggregateKind::Adt(x, subst) => { - let subst = self.subst_filler(subst, locals); - let (size, variant_layout, tag) = self.layout_of_variant(*x, subst, locals)?; - Owned(self.make_by_layout(size, &variant_layout, tag, values, locals)?) - } - }, + } Rvalue::Cast(kind, operand, target_ty) => match kind { - CastKind::PointerExposeAddress => not_supported!("exposing pointer address"), - CastKind::PointerFromExposedAddress => { - not_supported!("creating pointer from exposed address") - } CastKind::Pointer(cast) => match cast { PointerCast::ReifyFnPointer => { let current_ty = self.operand_ty(operand, locals)?; @@ -818,7 +904,9 @@ impl Evaluator<'_> { x => not_supported!("pointer cast {x:?}"), }, CastKind::DynStar => not_supported!("dyn star cast"), - CastKind::IntToInt => { + CastKind::IntToInt + | CastKind::PointerExposeAddress + | CastKind::PointerFromExposedAddress => { // FIXME: handle signed cast let current = pad16(self.eval_operand(operand, locals)?.get(&self)?, false); let dest_size = @@ -828,7 +916,12 @@ impl Evaluator<'_> { CastKind::FloatToInt => not_supported!("float to int cast"), CastKind::FloatToFloat => not_supported!("float to float cast"), CastKind::IntToFloat => not_supported!("float to int cast"), - CastKind::PtrToPtr => not_supported!("ptr to ptr cast"), + CastKind::PtrToPtr => { + let current = pad16(self.eval_operand(operand, locals)?.get(&self)?, false); + let dest_size = + self.size_of_sized(target_ty, locals, "destination of ptr to ptr cast")?; + Owned(current[0..dest_size].to_vec()) + } CastKind::FnPtrToPtr => not_supported!("fn ptr to ptr cast"), }, }) @@ -895,16 +988,15 @@ impl Evaluator<'_> { size: usize, // Not neccessarily equal to variant_layout.size variant_layout: &Layout, tag: Option<(usize, usize, i128)>, - values: &[Operand], - locals: &Locals<'_>, + values: impl Iterator, ) -> Result> { let mut result = vec![0; size]; if let Some((offset, size, value)) = tag { result[offset..offset + size].copy_from_slice(&value.to_le_bytes()[0..size]); } - for (i, op) in values.iter().enumerate() { + for (i, op) in values.enumerate() { let offset = variant_layout.fields.offset(i).bytes_usize(); - let op = self.eval_operand(op, locals)?.get(&self)?; + let op = op.get(&self)?; result[offset..offset + op.len()].copy_from_slice(op); } Ok(result) @@ -1196,28 +1288,89 @@ impl Evaluator<'_> { } fn exec_intrinsic( - &self, + &mut self, as_str: &str, - mut arg_bytes: impl Iterator>, + args: &[IntervalAndTy], generic_args: Substitution, + destination: Interval, locals: &Locals<'_>, - ) -> Result> { + ) -> Result<()> { match as_str { "size_of" => { let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)) else { return Err(MirEvalError::TypeError("size_of generic arg is not provided")); }; - let size = self.size_of(ty, locals)?; - match size { - Some(x) => Ok(x.to_le_bytes().to_vec()), - None => return Err(MirEvalError::TypeError("size_of arg is unsized")), - } + let size = self.size_of_sized(ty, locals, "size_of arg")?; + destination.write_from_bytes(self, &size.to_le_bytes()[0..destination.size]) + } + "wrapping_add" => { + let [lhs, rhs] = args else { + return Err(MirEvalError::TypeError("const_eval_select args are not provided")); + }; + let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false)); + let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false)); + let ans = lhs.wrapping_add(rhs); + destination.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size]) + } + "copy" | "copy_nonoverlapping" => { + let [src, dst, offset] = args else { + return Err(MirEvalError::TypeError("copy_nonoverlapping args are not provided")); + }; + let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)) else { + return Err(MirEvalError::TypeError("copy_nonoverlapping generic arg is not provided")); + }; + let src = Address::from_bytes(src.get(self)?)?; + let dst = Address::from_bytes(dst.get(self)?)?; + let offset = from_bytes!(usize, offset.get(self)?); + let size = self.size_of_sized(ty, locals, "copy_nonoverlapping ptr type")?; + let size = offset * size; + let src = Interval { addr: src, size }; + let dst = Interval { addr: dst, size }; + dst.write_from_interval(self, src) + } + "offset" | "arith_offset" => { + let [ptr, offset] = args else { + return Err(MirEvalError::TypeError("offset args are not provided")); + }; + let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)) else { + return Err(MirEvalError::TypeError("offset generic arg is not provided")); + }; + let ptr = u128::from_le_bytes(pad16(ptr.get(self)?, false)); + let offset = u128::from_le_bytes(pad16(offset.get(self)?, false)); + let size = self.size_of_sized(ty, locals, "offset ptr type")? as u128; + let ans = ptr + offset * size; + destination.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size]) + } + "assert_inhabited" | "assert_zero_valid" | "assert_uninit_valid" => { + // FIXME: We should actually implement these checks + Ok(()) + } + "forget" => { + // We don't call any drop glue yet, so there is nothing here + Ok(()) } "transmute" => { - let Some(arg) = arg_bytes.next() else { + let [arg] = args else { return Err(MirEvalError::TypeError("trasmute arg is not provided")); }; - Ok(arg) + destination.write_from_interval(self, arg.interval) + } + "const_eval_select" => { + let [tuple, const_fn, _] = args else { + return Err(MirEvalError::TypeError("const_eval_select args are not provided")); + }; + let mut args = vec![const_fn.clone()]; + let TyKind::Tuple(_, fields) = tuple.ty.kind(Interner) else { + return Err(MirEvalError::TypeError("const_eval_select arg[0] is not a tuple")); + }; + let layout = self.layout(&tuple.ty)?; + for (i, field) in fields.iter(Interner).enumerate() { + let field = field.assert_ty_ref(Interner).clone(); + let offset = layout.fields.offset(i).bytes_usize(); + let addr = tuple.interval.addr.offset(offset); + args.push(IntervalAndTy::new(addr, field, self, locals)?); + } + self.exec_fn_trait(&args, destination, locals) } _ => not_supported!("unknown intrinsic {as_str}"), } @@ -1226,8 +1379,8 @@ impl Evaluator<'_> { fn exec_fn_pointer( &mut self, bytes: Interval, - destination: &Place, - args: &[Operand], + destination: Interval, + args: &[IntervalAndTy], locals: &Locals<'_>, ) -> Result<()> { let id = from_bytes!(usize, bytes.get(self)?); @@ -1244,38 +1397,41 @@ impl Evaluator<'_> { &mut self, def: FnDefId, generic_args: &Substitution, - destination: &Place, - args: &[Operand], + destination: Interval, + args: &[IntervalAndTy], locals: &Locals<'_>, ) -> Result<()> { let def: CallableDefId = from_chalk(self.db, def); let generic_args = self.subst_filler(generic_args, &locals); match def { CallableDefId::FunctionId(def) => { - let dest_addr = self.place_addr(destination, &locals)?; - if let Some(x) = self.detect_fn_trait(def) { - self.exec_fn_trait(x, &args, destination, locals)?; + if let Some(_) = self.detect_fn_trait(def) { + self.exec_fn_trait(&args, destination, locals)?; return Ok(()); } - let arg_bytes = args - .iter() - .map(|x| Ok(self.eval_operand(x, &locals)?.get(&self)?.to_owned())) - .collect::>>()?; - self.exec_fn_with_args(def, arg_bytes, generic_args, locals, dest_addr)?; + self.exec_fn_with_args(def, args, generic_args, locals, destination)?; } CallableDefId::StructId(id) => { let (size, variant_layout, tag) = self.layout_of_variant(id.into(), generic_args.clone(), &locals)?; - let result = self.make_by_layout(size, &variant_layout, tag, args, &locals)?; - let dest_addr = self.place_addr(destination, &locals)?; - self.write_memory(dest_addr, &result)?; + let result = self.make_by_layout( + size, + &variant_layout, + tag, + args.iter().map(|x| x.interval), + )?; + destination.write_from_bytes(self, &result)?; } CallableDefId::EnumVariantId(id) => { let (size, variant_layout, tag) = self.layout_of_variant(id.into(), generic_args.clone(), &locals)?; - let result = self.make_by_layout(size, &variant_layout, tag, args, &locals)?; - let dest_addr = self.place_addr(destination, &locals)?; - self.write_memory(dest_addr, &result)?; + let result = self.make_by_layout( + size, + &variant_layout, + tag, + args.iter().map(|x| x.interval), + )?; + destination.write_from_bytes(self, &result)?; } } Ok(()) @@ -1284,10 +1440,10 @@ impl Evaluator<'_> { fn exec_fn_with_args( &mut self, def: FunctionId, - arg_bytes: Vec>, + args: &[IntervalAndTy], generic_args: Substitution, locals: &Locals<'_>, - dest_addr: Address, + destination: Interval, ) -> Result<()> { let function_data = self.db.function_data(def); let is_intrinsic = match &function_data.abi { @@ -1301,14 +1457,18 @@ impl Evaluator<'_> { _ => false, }, }; - let result = if is_intrinsic { - self.exec_intrinsic( + if is_intrinsic { + return self.exec_intrinsic( function_data.name.as_text().unwrap_or_default().as_str(), - arg_bytes.iter().cloned(), + args, generic_args, + destination, &locals, - )? - } else if let Some(x) = self.detect_lang_function(def) { + ); + } + let arg_bytes = + args.iter().map(|x| Ok(x.get(&self)?.to_owned())).collect::>>()?; + let result = if let Some(x) = self.detect_lang_function(def) { self.exec_lang_item(x, &arg_bytes)? } else { if let Some(self_ty_idx) = @@ -1321,9 +1481,12 @@ impl Evaluator<'_> { let ty = self .vtable_map .ty_of_bytes(&arg_bytes[0][self.ptr_size()..self.ptr_size() * 2])?; + let mut args_for_target = args.to_vec(); + args_for_target[0] = IntervalAndTy { + interval: args_for_target[0].interval.slice(0..self.ptr_size()), + ty: ty.clone(), + }; let ty = GenericArgData::Ty(ty.clone()).intern(Interner); - let mut args_for_target = arg_bytes; - args_for_target[0] = args_for_target[0][0..self.ptr_size()].to_vec(); let generics_for_target = Substitution::from_iter( Interner, generic_args.iter(Interner).enumerate().map(|(i, x)| { @@ -1336,10 +1499,10 @@ impl Evaluator<'_> { ); return self.exec_fn_with_args( def, - args_for_target, + &args_for_target, generics_for_target, locals, - dest_addr, + destination, ); } let (imp, generic_args) = @@ -1351,20 +1514,19 @@ impl Evaluator<'_> { self.interpret_mir(&mir_body, arg_bytes.iter().cloned(), generic_args) .map_err(|e| MirEvalError::InFunction(imp, Box::new(e)))? }; - self.write_memory(dest_addr, &result)?; + destination.write_from_bytes(self, &result)?; Ok(()) } fn exec_fn_trait( &mut self, - ft: FnTrait, - args: &[Operand], - destination: &Place, + args: &[IntervalAndTy], + destination: Interval, locals: &Locals<'_>, ) -> Result<()> { let func = args.get(0).ok_or(MirEvalError::TypeError("fn trait with no arg"))?; - let mut func_ty = self.operand_ty(func, locals)?; - let mut func_data = self.eval_operand(func, locals)?; + let mut func_ty = func.ty.clone(); + let mut func_data = func.interval; while let TyKind::Ref(_, _, z) = func_ty.kind(Interner) { func_ty = z.clone(); if matches!(func_ty.kind(Interner), TyKind::Dyn(_)) { @@ -1383,7 +1545,7 @@ impl Evaluator<'_> { TyKind::Function(_) => { self.exec_fn_pointer(func_data, destination, &args[1..], locals)?; } - x => not_supported!("Call {ft:?} trait methods with type {x:?}"), + x => not_supported!("Call FnTrait methods with type {x:?}"), } Ok(()) } @@ -1392,7 +1554,10 @@ impl Evaluator<'_> { use LangItem::*; let mut args = args.iter(); match x { - PanicFmt | BeginPanic => Err(MirEvalError::Panic), + // FIXME: we want to find the panic message from arguments, but it wouldn't work + // currently even if we do that, since macro expansion of panic related macros + // is dummy. + PanicFmt | BeginPanic => Err(MirEvalError::Panic("".to_string())), SliceLen => { let arg = args .next() diff --git a/crates/hir-ty/src/mir/lower.rs b/crates/hir-ty/src/mir/lower.rs index 5d9ae32072..65e3348b21 100644 --- a/crates/hir-ty/src/mir/lower.rs +++ b/crates/hir-ty/src/mir/lower.rs @@ -1285,6 +1285,11 @@ fn cast_kind(source_ty: &Ty, target_ty: &Ty) -> Result { (_, chalk_ir::Scalar::Float(_)) => CastKind::IntToFloat, (_, _) => CastKind::IntToInt, }, + (TyKind::Scalar(_), TyKind::Raw(..)) => CastKind::PointerFromExposedAddress, + (TyKind::Raw(..), TyKind::Scalar(_)) => CastKind::PointerExposeAddress, + (TyKind::Raw(..) | TyKind::Ref(..), TyKind::Raw(..) | TyKind::Ref(..)) => { + CastKind::PtrToPtr + } // Enum to int casts (TyKind::Scalar(_), TyKind::Adt(..)) | (TyKind::Adt(..), TyKind::Scalar(_)) => { CastKind::IntToInt diff --git a/crates/hir-ty/src/tests/simple.rs b/crates/hir-ty/src/tests/simple.rs index 13cc3fea52..8322b9e1ca 100644 --- a/crates/hir-ty/src/tests/simple.rs +++ b/crates/hir-ty/src/tests/simple.rs @@ -2696,6 +2696,21 @@ fn f() { ) } +#[test] +fn infer_ref_to_raw_cast() { + check_types( + r#" +struct S; + +fn f() { + let s = &mut S; + let s = s as *mut _; + //^ *mut S +} + "#, + ); +} + #[test] fn infer_missing_type() { check_types( @@ -3258,25 +3273,6 @@ fn f(t: Ark) { ); } -// FIXME -#[test] -fn castable_to2() { - check_infer( - r#" -fn func() { - let x = &0u32 as *const _; -} -"#, - expect![[r#" - 10..44 '{ ...t _; }': () - 20..21 'x': *const {unknown} - 24..29 '&0u32': &u32 - 24..41 '&0u32 ...onst _': *const {unknown} - 25..29 '0u32': u32 - "#]], - ); -} - #[test] fn issue_14275() { // FIXME: evaluate const generic diff --git a/crates/ide/src/hover/render.rs b/crates/ide/src/hover/render.rs index da725ce502..fb7b15e05d 100644 --- a/crates/ide/src/hover/render.rs +++ b/crates/ide/src/hover/render.rs @@ -409,7 +409,6 @@ pub(super) fn definition( } match it.eval(db) { Ok(()) => Some("pass".into()), - Err(MirEvalError::Panic) => Some("fail".into()), Err(MirEvalError::MirLowerError(f, e)) => { let name = &db.function_data(f).name; Some(format!("error: fail to lower {name} due {e:?}")) From 8a3ad7c3d592adac72b1fce6bc208cdf7a40c8ad Mon Sep 17 00:00:00 2001 From: hkalbasi Date: Tue, 21 Mar 2023 02:20:30 +0330 Subject: [PATCH 1198/1945] Support evaluating inherent associated constants with generics --- crates/hir-ty/src/consteval.rs | 10 ++++++---- crates/hir-ty/src/consteval/tests.rs | 8 ++++---- crates/hir-ty/src/db.rs | 2 +- crates/hir-ty/src/infer/expr.rs | 15 --------------- crates/hir-ty/src/mir/eval.rs | 5 +++-- crates/hir-ty/src/mir/lower.rs | 11 ++++++----- crates/hir/src/lib.rs | 4 ++-- 7 files changed, 22 insertions(+), 33 deletions(-) diff --git a/crates/hir-ty/src/consteval.rs b/crates/hir-ty/src/consteval.rs index fcb3445a54..7e69971fee 100644 --- a/crates/hir-ty/src/consteval.rs +++ b/crates/hir-ty/src/consteval.rs @@ -15,7 +15,7 @@ use stdx::never; use crate::{ db::HirDatabase, infer::InferenceContext, layout::layout_of_ty, lower::ParamLoweringMode, to_placeholder_idx, utils::Generics, Const, ConstData, ConstScalar, ConstValue, GenericArg, - Interner, MemoryMap, Ty, TyBuilder, + Interner, MemoryMap, Substitution, Ty, TyBuilder, }; use super::mir::{interpret_mir, lower_to_mir, pad16, MirEvalError, MirLowerError}; @@ -169,6 +169,7 @@ pub(crate) fn const_eval_recover( _: &dyn HirDatabase, _: &[String], _: &ConstId, + _: &Substitution, ) -> Result { Err(ConstEvalError::MirLowerError(MirLowerError::Loop)) } @@ -184,10 +185,11 @@ pub(crate) fn const_eval_discriminant_recover( pub(crate) fn const_eval_query( db: &dyn HirDatabase, const_id: ConstId, + subst: Substitution, ) -> Result { let def = const_id.into(); let body = db.mir_body(def)?; - let c = interpret_mir(db, &body, false)?; + let c = interpret_mir(db, &body, subst, false)?; Ok(c) } @@ -210,7 +212,7 @@ pub(crate) fn const_eval_discriminant_variant( return Ok(value); } let mir_body = db.mir_body(def)?; - let c = interpret_mir(db, &mir_body, false)?; + let c = interpret_mir(db, &mir_body, Substitution::empty(Interner), false)?; let c = try_const_usize(&c).unwrap() as i128; Ok(c) } @@ -234,7 +236,7 @@ pub(crate) fn eval_to_const( } let infer = ctx.clone().resolve_all(); if let Ok(mir_body) = lower_to_mir(ctx.db, ctx.owner, &ctx.body, &infer, expr) { - if let Ok(result) = interpret_mir(db, &mir_body, true) { + if let Ok(result) = interpret_mir(db, &mir_body, Substitution::empty(Interner), true) { return result; } } diff --git a/crates/hir-ty/src/consteval/tests.rs b/crates/hir-ty/src/consteval/tests.rs index 47ef26fc58..a0efc7541e 100644 --- a/crates/hir-ty/src/consteval/tests.rs +++ b/crates/hir-ty/src/consteval/tests.rs @@ -1,4 +1,5 @@ use base_db::fixture::WithFixture; +use chalk_ir::Substitution; use hir_def::db::DefDatabase; use crate::{ @@ -64,7 +65,7 @@ fn eval_goal(ra_fixture: &str) -> Result { _ => None, }) .unwrap(); - db.const_eval(const_id) + db.const_eval(const_id, Substitution::empty(Interner)) } #[test] @@ -1519,8 +1520,7 @@ fn const_generic_subst_fn() { #[test] fn const_generic_subst_assoc_const_impl() { - // FIXME: this should evaluate to 5 - check_fail( + check_number( r#" struct Adder; impl Adder { @@ -1528,7 +1528,7 @@ fn const_generic_subst_assoc_const_impl() { } const GOAL: usize = Adder::<2, 3>::VAL; "#, - ConstEvalError::MirEvalError(MirEvalError::TypeError("missing generic arg")), + 5, ); } diff --git a/crates/hir-ty/src/db.rs b/crates/hir-ty/src/db.rs index 8f1af4c2f8..000944e0b5 100644 --- a/crates/hir-ty/src/db.rs +++ b/crates/hir-ty/src/db.rs @@ -57,7 +57,7 @@ pub trait HirDatabase: DefDatabase + Upcast { #[salsa::invoke(crate::consteval::const_eval_query)] #[salsa::cycle(crate::consteval::const_eval_recover)] - fn const_eval(&self, def: ConstId) -> Result; + fn const_eval(&self, def: ConstId, subst: Substitution) -> Result; #[salsa::invoke(crate::consteval::const_eval_discriminant_variant)] #[salsa::cycle(crate::consteval::const_eval_discriminant_recover)] diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs index d14b3f7140..d52188bb28 100644 --- a/crates/hir-ty/src/infer/expr.rs +++ b/crates/hir-ty/src/infer/expr.rs @@ -612,21 +612,6 @@ impl<'a> InferenceContext<'a> { let inner_ty = self.infer_expr_inner(*expr, &Expectation::none()); self.resolve_associated_type(inner_ty, self.resolve_future_future_output()) } - // Expr::Try { expr } => { - // let inner_ty = self.infer_expr_inner(*expr, &Expectation::none()); - // if let Some(trait_) = self.resolve_lang_trait(LangItem::Try) { - // if let Some(func) = self.db.trait_data(trait_).method_by_name(&name!(branch)) { - // let subst = TyBuilder::subst_for_def(self.db, trait_, None) - // .push(inner_ty.clone()) - // .build(); - // self.write_method_resolution(tgt_expr, func, subst.clone()); - // } - // let try_output = self.resolve_output_on(trait_); - // self.resolve_associated_type(inner_ty, try_output) - // } else { - // self.err_ty() - // } - // } Expr::Cast { expr, type_ref } => { let cast_ty = self.make_ty(type_ref); // FIXME: propagate the "castable to" expectation diff --git a/crates/hir-ty/src/mir/eval.rs b/crates/hir-ty/src/mir/eval.rs index 26bf877cf0..7b83645fae 100644 --- a/crates/hir-ty/src/mir/eval.rs +++ b/crates/hir-ty/src/mir/eval.rs @@ -282,6 +282,7 @@ struct Locals<'a> { pub fn interpret_mir( db: &dyn HirDatabase, body: &MirBody, + subst: Substitution, // FIXME: This is workaround. Ideally, const generics should have a separate body (issue #7434), but now // they share their body with their parent, so in MIR lowering we have locals of the parent body, which // might have placeholders. With this argument, we (wrongly) assume that every placeholder type has @@ -291,11 +292,11 @@ pub fn interpret_mir( ) -> Result { let ty = body.locals[return_slot()].ty.clone(); let mut evaluator = Evaluator::new(db, body, assert_placeholder_ty_is_unused); - let bytes = evaluator.interpret_mir_with_no_arg(&body)?; + let bytes = evaluator.interpret_mir(&body, None.into_iter(), subst.clone())?; let memory_map = evaluator.create_memory_map( &bytes, &ty, - &Locals { ptr: &ArenaMap::new(), body: &body, subst: &Substitution::empty(Interner) }, + &Locals { ptr: &ArenaMap::new(), body: &body, subst: &subst }, )?; return Ok(intern_const_scalar(ConstScalar::Bytes(bytes, memory_map), ty)); } diff --git a/crates/hir-ty/src/mir/lower.rs b/crates/hir-ty/src/mir/lower.rs index 65e3348b21..1821796be3 100644 --- a/crates/hir-ty/src/mir/lower.rs +++ b/crates/hir-ty/src/mir/lower.rs @@ -231,13 +231,13 @@ impl MirLowerCtx<'_> { let pr = match pr { ResolveValueResult::ValueNs(v) => v, ResolveValueResult::Partial(..) => { - if let Some(assoc) = self + if let Some((assoc, subst)) = self .infer .assoc_resolutions_for_expr(expr_id) { - match assoc.0 { + match assoc { hir_def::AssocItemId::ConstId(c) => { - self.lower_const(c, current, place, expr_id.into())?; + self.lower_const(c, current, place, subst, expr_id.into())?; return Ok(Some(current)) }, hir_def::AssocItemId::FunctionId(_) => { @@ -274,7 +274,7 @@ impl MirLowerCtx<'_> { Ok(Some(current)) } ValueNs::ConstId(const_id) => { - self.lower_const(const_id, current, place, expr_id.into())?; + self.lower_const(const_id, current, place, Substitution::empty(Interner), expr_id.into())?; Ok(Some(current)) } ValueNs::EnumVariantId(variant_id) => { @@ -951,9 +951,10 @@ impl MirLowerCtx<'_> { const_id: hir_def::ConstId, prev_block: BasicBlockId, place: Place, + subst: Substitution, span: MirSpan, ) -> Result<()> { - let c = self.db.const_eval(const_id)?; + let c = self.db.const_eval(const_id, subst)?; self.write_const_to_place(c, prev_block, place, span) } diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 35424feec8..dbf618afa6 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -1801,7 +1801,7 @@ impl Function { let body = db .mir_body(self.id.into()) .map_err(|e| MirEvalError::MirLowerError(self.id.into(), e))?; - interpret_mir(db, &body, false)?; + interpret_mir(db, &body, Substitution::empty(Interner), false)?; Ok(()) } } @@ -1947,7 +1947,7 @@ impl Const { } pub fn render_eval(self, db: &dyn HirDatabase) -> Result { - let c = db.const_eval(self.id)?; + let c = db.const_eval(self.id, Substitution::empty(Interner))?; let r = format!("{}", HexifiedConst(c).display(db)); // We want to see things like `` and `` as they are probably bug in our // implementation, but there is no need to show things like `` or `` to From 9745a25d57fbd9a73aec1306554ac87936a63e7d Mon Sep 17 00:00:00 2001 From: x2cf Date: Tue, 21 Mar 2023 14:56:07 +0800 Subject: [PATCH 1199/1945] Fix VS Code status message formatting error --- editors/code/src/ctx.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/editors/code/src/ctx.ts b/editors/code/src/ctx.ts index c2dca733df..85579453a6 100644 --- a/editors/code/src/ctx.ts +++ b/editors/code/src/ctx.ts @@ -383,7 +383,7 @@ export class Ctx { ); statusBar.tooltip.appendMarkdown("\n\n[Open logs](command:rust-analyzer.openLogs)"); statusBar.tooltip.appendMarkdown("\n\n[Restart server](command:rust-analyzer.startServer)"); - statusBar.tooltip.appendMarkdown("[Stop server](command:rust-analyzer.stopServer)"); + statusBar.tooltip.appendMarkdown("\n\n[Stop server](command:rust-analyzer.stopServer)"); if (!status.quiescent) icon = "$(sync~spin) "; statusBar.text = `${icon}rust-analyzer`; } From 28225cc33d60615aaa140dc4b0d32e2a1c21462c Mon Sep 17 00:00:00 2001 From: DropDemBits Date: Sat, 18 Mar 2023 11:41:38 -0400 Subject: [PATCH 1200/1945] internal: Coalesce adjacent Indels --- crates/text-edit/src/lib.rs | 52 +++++++++++++++++++++++++++++++++++++ 1 file changed, 52 insertions(+) diff --git a/crates/text-edit/src/lib.rs b/crates/text-edit/src/lib.rs index 9bb4271b65..4705d18187 100644 --- a/crates/text-edit/src/lib.rs +++ b/crates/text-edit/src/lib.rs @@ -176,6 +176,7 @@ impl TextEditBuilder { pub fn finish(self) -> TextEdit { let mut indels = self.indels; assert_disjoint_or_equal(&mut indels); + indels = coalesce_indels(indels); TextEdit { indels } } pub fn invalidates_offset(&self, offset: TextSize) -> bool { @@ -205,6 +206,21 @@ where indels.clone().zip(indels.skip(1)).all(|(l, r)| l.delete.end() <= r.delete.start() || l == r) } +fn coalesce_indels(indels: Vec) -> Vec { + indels + .into_iter() + .coalesce(|mut a, b| { + if a.delete.end() == b.delete.start() { + a.insert.push_str(&b.insert); + a.delete = TextRange::new(a.delete.start(), b.delete.end()); + Ok(a) + } else { + Err((a, b)) + } + }) + .collect_vec() +} + #[cfg(test)] mod tests { use super::{TextEdit, TextEditBuilder, TextRange}; @@ -261,4 +277,40 @@ mod tests { let edit2 = TextEdit::delete(range(9, 13)); assert!(edit1.union(edit2).is_err()); } + + #[test] + fn test_coalesce_disjoint() { + let mut builder = TextEditBuilder::default(); + builder.replace(range(1, 3), "aa".into()); + builder.replace(range(5, 7), "bb".into()); + let edit = builder.finish(); + + assert_eq!(edit.indels.len(), 2); + } + + #[test] + fn test_coalesce_adjacent() { + let mut builder = TextEditBuilder::default(); + builder.replace(range(1, 3), "aa".into()); + builder.replace(range(3, 5), "bb".into()); + + let edit = builder.finish(); + assert_eq!(edit.indels.len(), 1); + assert_eq!(edit.indels[0].insert, "aabb"); + assert_eq!(edit.indels[0].delete, range(1, 5)); + } + + #[test] + fn test_coalesce_adjacent_series() { + let mut builder = TextEditBuilder::default(); + builder.replace(range(1, 3), "au".into()); + builder.replace(range(3, 5), "www".into()); + builder.replace(range(5, 8), "".into()); + builder.replace(range(8, 9), "ub".into()); + + let edit = builder.finish(); + assert_eq!(edit.indels.len(), 1); + assert_eq!(edit.indels[0].insert, "auwwwub"); + assert_eq!(edit.indels[0].delete, range(1, 9)); + } } From 3622fb645632fcf81af82919259be4b0012a3939 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 20 Mar 2023 21:24:53 +0100 Subject: [PATCH 1201/1945] Fix lints --- editors/code/src/client.ts | 75 ++++++++++++++++++-------------------- editors/code/src/ctx.ts | 4 +- 2 files changed, 37 insertions(+), 42 deletions(-) diff --git a/editors/code/src/client.ts b/editors/code/src/client.ts index 2a1c757dfe..4ca6601a6a 100644 --- a/editors/code/src/client.ts +++ b/editors/code/src/client.ts @@ -121,59 +121,54 @@ export async function createClient( const preview = config.previewRustcOutput; const errorCode = config.useRustcErrorCode; diagnosticList.forEach((diag, idx) => { - let value = + const value = typeof diag.code === "string" || typeof diag.code === "number" ? diag.code : diag.code?.value; if (value === "unlinked-file" && !unlinkedFiles.includes(uri)) { - let config = vscode.workspace.getConfiguration("rust-analyzer"); + const config = vscode.workspace.getConfiguration("rust-analyzer"); if (config.get("showUnlinkedFileNotification")) { unlinkedFiles.push(uri); - let folder = vscode.workspace.getWorkspaceFolder(uri)?.uri.fsPath; + const folder = vscode.workspace.getWorkspaceFolder(uri)?.uri.fsPath; if (folder) { - let parent_backslash = uri.fsPath.lastIndexOf( + const parentBackslash = uri.fsPath.lastIndexOf( pathSeparator + "src" ); - let parent = uri.fsPath.substring(0, parent_backslash); + const parent = uri.fsPath.substring(0, parentBackslash); if (parent.startsWith(folder)) { - let path = vscode.Uri.file( + const path = vscode.Uri.file( parent + pathSeparator + "Cargo.toml" ); - void vscode.workspace.fs.stat(path).then(() => { - vscode.window - .showInformationMessage( - `This rust file does not belong to a loaded cargo project. It looks like it might belong to the workspace at ${path}, do you want to add it to the linked Projects?`, - "Yes", - "No", - "Don't show this again" - ) - .then((choice) => { - switch (choice) { - case "Yes": - break; - case "No": - config.update( - "linkedProjects", - config - .get("linkedProjects") - ?.concat( - path.fsPath.substring( - folder!.length - ) - ), - false - ); - break; - case "Don't show this again": - config.update( - "showUnlinkedFileNotification", - false, - false - ); - break; - } - }); + void vscode.workspace.fs.stat(path).then(async () => { + const choice = await vscode.window.showInformationMessage( + `This rust file does not belong to a loaded cargo project. It looks like it might belong to the workspace at ${path}, do you want to add it to the linked Projects?`, + "Yes", + "No", + "Don't show this again" + ); + switch (choice) { + case "Yes": + break; + case "No": + await config.update( + "linkedProjects", + config + .get("linkedProjects") + ?.concat( + path.fsPath.substring(folder.length) + ), + false + ); + break; + case "Don't show this again": + await config.update( + "showUnlinkedFileNotification", + false, + false + ); + break; + } }); } } diff --git a/editors/code/src/ctx.ts b/editors/code/src/ctx.ts index 5515921ed1..8a0f4ea477 100644 --- a/editors/code/src/ctx.ts +++ b/editors/code/src/ctx.ts @@ -95,7 +95,6 @@ export class Ctx { ) { extCtx.subscriptions.push(this); this.statusBar = vscode.window.createStatusBarItem(vscode.StatusBarAlignment.Left); - this.statusBar.show(); this.workspace = workspace; this.clientSubscriptions = []; this.commandDisposables = []; @@ -338,6 +337,7 @@ export class Ctx { setServerStatus(status: ServerStatusParams | { health: "stopped" }) { let icon = ""; const statusBar = this.statusBar; + statusBar.show(); statusBar.tooltip = new vscode.MarkdownString("", true); statusBar.tooltip.isTrusted = true; switch (status.health) { @@ -386,7 +386,7 @@ export class Ctx { ); statusBar.tooltip.appendMarkdown("\n\n[Open logs](command:rust-analyzer.openLogs)"); statusBar.tooltip.appendMarkdown("\n\n[Restart server](command:rust-analyzer.startServer)"); - statusBar.tooltip.appendMarkdown("[Stop server](command:rust-analyzer.stopServer)"); + statusBar.tooltip.appendMarkdown("\n\n[Stop server](command:rust-analyzer.stopServer)"); if (!status.quiescent) icon = "$(sync~spin) "; statusBar.text = `${icon}rust-analyzer`; } From c01ba4a3101e343464093603b12bb5327d8a320f Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sat, 25 Mar 2023 14:22:05 +0100 Subject: [PATCH 1202/1945] Reject symlinks in project-json --- crates/project-model/src/workspace.rs | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/crates/project-model/src/workspace.rs b/crates/project-model/src/workspace.rs index d1e53e12ee..2158485a33 100644 --- a/crates/project-model/src/workspace.rs +++ b/crates/project-model/src/workspace.rs @@ -4,7 +4,7 @@ use std::{collections::VecDeque, fmt, fs, process::Command, sync::Arc}; -use anyhow::{format_err, Context, Result}; +use anyhow::{bail, format_err, Context, Result}; use base_db::{ CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, Edition, Env, FileId, LangCrateOrigin, ProcMacroLoadResult, TargetLayoutLoadResult, @@ -154,6 +154,12 @@ impl ProjectWorkspace { ) -> Result { let res = match manifest { ProjectManifest::ProjectJson(project_json) => { + let metadata = fs::symlink_metadata(&project_json).with_context(|| { + format!("Failed to read json file {}", project_json.display()) + })?; + if metadata.is_symlink() { + bail!("The project-json may not currently point to a symlink"); + } let file = fs::read_to_string(&project_json).with_context(|| { format!("Failed to read json file {}", project_json.display()) })?; From 39e86e78c3e3a8fb35b93ac5e658b3379e1f9e6e Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sat, 25 Mar 2023 14:43:08 +0100 Subject: [PATCH 1203/1945] Bump Cargo.lock --- Cargo.lock | 257 +++++++++++++++---------------- Cargo.toml | 4 + crates/flycheck/Cargo.toml | 4 +- crates/paths/Cargo.toml | 2 +- crates/proc-macro-api/Cargo.toml | 4 +- crates/profile/Cargo.toml | 2 +- crates/project-model/Cargo.toml | 4 +- crates/rust-analyzer/Cargo.toml | 14 +- crates/syntax/Cargo.toml | 2 +- lib/lsp-server/Cargo.toml | 4 +- 10 files changed, 150 insertions(+), 147 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 25242c6028..f97591411e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -28,9 +28,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.68" +version = "1.0.70" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2cb2f989d18dd141ab8ae82f64d1a8cdd37e0840f73a406896cf5e99502fab61" +checksum = "7de8ce5e0f9f8d88245311066a578d72b7af3e7088f32783804676302df237e4" [[package]] name = "anymap" @@ -40,9 +40,9 @@ checksum = "8f1f8f5a6f3d50d89e3797d7593a50f96bb2aaa20ca0cc7be1fb673232c91d72" [[package]] name = "arbitrary" -version = "1.2.2" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0224938f92e7aef515fac2ff2d18bd1115c1394ddf4a092e0c87e8be9499ee5" +checksum = "e2d098ff73c1ca148721f37baad5ea6a465a13f9573aba8641fbbbae8164a54e" [[package]] name = "arrayvec" @@ -111,9 +111,9 @@ checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" [[package]] name = "camino" -version = "1.1.2" +version = "1.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c77df041dc383319cc661b428b6961a005db4d6808d5e12536931b1ca9556055" +checksum = "c530edf18f37068ac2d977409ed5cd50d53d73bc653c7647b48eb78976ac9ae2" dependencies = [ "serde", ] @@ -129,9 +129,9 @@ dependencies = [ [[package]] name = "cargo_metadata" -version = "0.15.2" +version = "0.15.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "982a0cf6a99c350d7246035613882e376d58cebe571785abc5da4f648d53ac0a" +checksum = "08a1ec454bc3eead8719cb56e15dbbfecdbc14e4b3a3ae4936cc6e31f5fc0d07" dependencies = [ "camino", "cargo-platform", @@ -143,9 +143,9 @@ dependencies = [ [[package]] name = "cc" -version = "1.0.78" +version = "1.0.79" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a20104e2335ce8a659d6dd92a51a767a0c062599c73b343fd152cb401e828c3d" +checksum = "50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f" [[package]] name = "cfg" @@ -221,9 +221,9 @@ dependencies = [ [[package]] name = "command-group" -version = "2.0.1" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "026c3922235f9f7d78f21251a026f3acdeb7cce3deba107fe09a4bfa63d850a2" +checksum = "5080df6b0f0ecb76cab30808f00d937ba725cebe266a3da8cd89dff92f2a9916" dependencies = [ "nix", "winapi", @@ -257,9 +257,9 @@ dependencies = [ [[package]] name = "crossbeam-channel" -version = "0.5.6" +version = "0.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2dd04ddaf88237dc3b8d8f9a3c1004b506b54b3313403944054d23c0870c521" +checksum = "cf2b3e8478797446514c91ef04bafcb59faba183e621ad488df88983cc14128c" dependencies = [ "cfg-if", "crossbeam-utils", @@ -267,9 +267,9 @@ dependencies = [ [[package]] name = "crossbeam-deque" -version = "0.8.2" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "715e8152b692bba2d374b53d4875445368fdf21a94751410af607a5ac677d1fc" +checksum = "ce6fd6f855243022dcecf8702fef0c297d4338e226845fe067f6341ad9fa0cef" dependencies = [ "cfg-if", "crossbeam-epoch", @@ -278,22 +278,22 @@ dependencies = [ [[package]] name = "crossbeam-epoch" -version = "0.9.13" +version = "0.9.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01a9af1f4c2ef74bb8aa1f7e19706bc72d03598c8a570bb5de72243c7a9d9d5a" +checksum = "46bd5f3f85273295a9d14aedfb86f6aadbff6d8f5295c4a9edb08e819dcf5695" dependencies = [ "autocfg", "cfg-if", "crossbeam-utils", - "memoffset 0.7.1", + "memoffset", "scopeguard", ] [[package]] name = "crossbeam-utils" -version = "0.8.14" +version = "0.8.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fb766fa798726286dbbb842f174001dab8abc7b627a1dd86e0b7222a95d929f" +checksum = "3c063cd8cc95f5c377ed0d4b49a4b21f632396ff690e8470c29b3359b346984b" dependencies = [ "cfg-if", ] @@ -313,9 +313,9 @@ dependencies = [ [[package]] name = "derive_arbitrary" -version = "1.2.2" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf460bbff5f571bfc762da5102729f59f338be7db17a21fade44c5c4f5005350" +checksum = "f3cdeb9ec472d588e539a818b2dee436825730da08ad0017c4b1a17676bdc8b7" dependencies = [ "proc-macro2", "quote", @@ -342,24 +342,24 @@ checksum = "9bda8e21c04aca2ae33ffc2fd8c23134f3cac46db123ba97bd9d3f3b8a4a85e1" [[package]] name = "either" -version = "1.8.0" +version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90e5c1c8368803113bf0c9584fc495a58b86dc8a29edbf8fe877d21d9507e797" +checksum = "7fcaabb2fef8c910e7f4c7ce9f67a1283a1715879a7c230ca9d6d1ae31f16d91" [[package]] name = "ena" -version = "0.14.0" +version = "0.14.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7402b94a93c24e742487327a7cd839dc9d36fec9de9fb25b09f2dae459f36c3" +checksum = "c533630cf40e9caa44bd91aadc88a75d75a4c3a12b4cfde353cbed41daa1e1f1" dependencies = [ "log", ] [[package]] name = "expect-test" -version = "1.4.0" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d4661aca38d826eb7c72fe128e4238220616de4c0cc00db7bfc38e2e1364dd3" +checksum = "30d9eafeadd538e68fb28016364c9732d78e420b9ff8853fa5e4058861e9f8d3" dependencies = [ "dissimilar", "once_cell", @@ -419,12 +419,6 @@ dependencies = [ "percent-encoding", ] -[[package]] -name = "fs_extra" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2022715d62ab30faffd124d40b76f4134a550a87792276512b18d63272333394" - [[package]] name = "fsevent-sys" version = "4.1.0" @@ -442,9 +436,9 @@ checksum = "7ab85b9b05e3978cc9a9cf8fea7f01b494e1a09ed3037e16ba39edc7a29eb61a" [[package]] name = "gimli" -version = "0.27.0" +version = "0.27.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dec7af912d60cdbd3677c1af9352ebae6fb8394d165568a2234df0fa00f87793" +checksum = "ad0a93d233ebf96623465aad4046a8d3aa4da22d4f4beba5388838c8a434bbb4" [[package]] name = "hashbrown" @@ -774,9 +768,9 @@ dependencies = [ [[package]] name = "indexmap" -version = "1.9.2" +version = "1.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1885e79c1fc4b10f0e172c475f458b7f7b93061064d98c3293e98c5ba0c8b399" +checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" dependencies = [ "autocfg", "hashbrown", @@ -832,9 +826,9 @@ dependencies = [ [[package]] name = "itoa" -version = "1.0.5" +version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fad582f4b9e86b6caa621cabeb0963332d92eea04729ab12892c2533951e6440" +checksum = "453ad9f582a441959e5f0d088b02ce04cfe8d51a8eaf077f12ac6d3e94164ca6" [[package]] name = "jod-thread" @@ -874,9 +868,9 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" [[package]] name = "libc" -version = "0.2.139" +version = "0.2.140" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "201de327520df007757c1f0adce6e827fe8562fbc28bfd9c15571c66ca1f5f79" +checksum = "99227334921fae1a979cf0bfdfcc6b3e5ce376ef57e16fb6fb3ea2ed6095f80c" [[package]] name = "libloading" @@ -977,27 +971,18 @@ checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" [[package]] name = "memmap2" -version = "0.5.8" +version = "0.5.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b182332558b18d807c4ce1ca8ca983b34c3ee32765e47b3f0f69b90355cc1dc" +checksum = "83faa42c0a078c393f6b29d5db232d8be22776a891f8f56e5284faee4a20b327" dependencies = [ "libc", ] [[package]] name = "memoffset" -version = "0.6.5" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5aa361d4faea93603064a027415f07bd8e1d5c88c9fbf68bf56a285428fd79ce" -dependencies = [ - "autocfg", -] - -[[package]] -name = "memoffset" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5de893c32cde5f383baa4c04c5d6dbdd735cfd4a794b0debdb2bb1b421da5ff4" +checksum = "d61c719bcfbcf5d62b3a09efa6088de8c54bc0bfcd3ea7ae39fcc186108b8de1" dependencies = [ "autocfg", ] @@ -1055,9 +1040,9 @@ dependencies = [ [[package]] name = "notify" -version = "5.0.0" +version = "5.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed2c66da08abae1c024c01d635253e402341b4060a12e99b31c7594063bf490a" +checksum = "58ea850aa68a06e48fdb069c0ec44d0d64c8dbffa49bf3b6f7f0a901fdea1ba9" dependencies = [ "bitflags", "crossbeam-channel", @@ -1068,7 +1053,7 @@ dependencies = [ "libc", "mio", "walkdir", - "winapi", + "windows-sys", ] [[package]] @@ -1093,18 +1078,18 @@ dependencies = [ [[package]] name = "object" -version = "0.30.2" +version = "0.30.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b8c786513eb403643f2a88c244c2aaa270ef2153f55094587d0c48a3cf22a83" +checksum = "ea86265d3d3dcb6a27fc51bd29a4bf387fae9d2986b823079d4986af253eb439" dependencies = [ "memchr", ] [[package]] name = "once_cell" -version = "1.17.0" +version = "1.17.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f61fba1741ea2b3d6a1e3178721804bb716a68a6aeba1149b5d52e3d464ea66" +checksum = "b7e5500299e16ebb147ae15a00a942af264cf3688f47923b8fc2cd5858f23ad3" [[package]] name = "oorandom" @@ -1180,9 +1165,9 @@ dependencies = [ [[package]] name = "paste" -version = "1.0.11" +version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d01a5bd0424d00070b0098dd17ebca6f961a959dead1dbcbbbc1d1cd8d3deeba" +checksum = "9f746c4065a8fa3fe23974dd82f15431cc8d40779821001404d10d2e79ca7d79" [[package]] name = "paths" @@ -1282,9 +1267,9 @@ version = "0.0.0" [[package]] name = "proc-macro2" -version = "1.0.50" +version = "1.0.53" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ef7d57beacfaf2d8aee5937dab7b7f28de3cb8b1828479bb5de2a7106f2bae2" +checksum = "ba466839c78239c09faf015484e5cc04860f88242cff4d03eb038f04b4699b73" dependencies = [ "unicode-ident", ] @@ -1366,18 +1351,18 @@ dependencies = [ [[package]] name = "quote" -version = "1.0.23" +version = "1.0.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8856d8364d252a14d474036ea1358d63c9e6965c8e5c1885c18f73d70bff9c7b" +checksum = "4424af4bf778aae2051a77b60283332f386554255d722233d09fbfc7e30da2fc" dependencies = [ "proc-macro2", ] [[package]] name = "rayon" -version = "1.6.1" +version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6db3a213adf02b3bcfd2d3846bb41cb22857d131789e01df434fb7e7bc0759b7" +checksum = "1d2df5196e37bcc87abebc0053e20787d73847bb33134a69841207dd0a47f03b" dependencies = [ "either", "rayon-core", @@ -1385,9 +1370,9 @@ dependencies = [ [[package]] name = "rayon-core" -version = "1.10.1" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cac410af5d00ab6884528b4ab69d1e8e146e8d471201800fa1b4524126de6ad3" +checksum = "4b8f95bd6966f5c87776639160a66bd8ab9895d9d4ab01ddba9fc60661aebe8d" dependencies = [ "crossbeam-channel", "crossbeam-deque", @@ -1406,9 +1391,9 @@ dependencies = [ [[package]] name = "regex" -version = "1.7.1" +version = "1.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48aaa5748ba571fb95cd2c85c09f629215d3a6ece942baa100950af03a34f733" +checksum = "8b1f693b24f6ac912f4893ef08244d70b6067480d2f1a46e950c9691e6749d1d" dependencies = [ "regex-syntax", ] @@ -1424,19 +1409,19 @@ dependencies = [ [[package]] name = "regex-syntax" -version = "0.6.28" +version = "0.6.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "456c603be3e8d448b072f410900c09faf164fbce2d480456f50eea6e25f9c848" +checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" [[package]] name = "rowan" -version = "0.15.10" +version = "0.15.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5811547e7ba31e903fe48c8ceab10d40d70a101f3d15523c847cce91aa71f332" +checksum = "64449cfef9483a475ed56ae30e2da5ee96448789fb2aa240a04beb6a055078bf" dependencies = [ "countme", "hashbrown", - "memoffset 0.6.5", + "memoffset", "rustc-hash", "text-size", ] @@ -1451,6 +1436,7 @@ dependencies = [ "crossbeam-channel", "dissimilar", "expect-test", + "filetime", "flycheck", "hir", "hir-def", @@ -1464,9 +1450,11 @@ dependencies = [ "lsp-types", "mbe", "mimalloc", + "mio", "num_cpus", "oorandom", "parking_lot 0.12.1", + "parking_lot_core 0.9.6", "proc-macro-api", "proc-macro-srv", "profile", @@ -1476,10 +1464,12 @@ dependencies = [ "scip", "serde", "serde_json", + "serde_repr", "sourcegen", "stdx", "syntax", "test-utils", + "thiserror", "threadpool", "tikv-jemallocator", "toolchain", @@ -1506,9 +1496,9 @@ dependencies = [ [[package]] name = "rustc-demangle" -version = "0.1.21" +version = "0.1.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ef03e0a2b150c7a90d01faf6254c9c48a41e95fb2a8c2ac1c6f0d2b9aefc342" +checksum = "d4a36c42d1873f9a77c53bde094f9664d9891bc604a45b4798fd2c389ed12e5b" [[package]] name = "rustc-hash" @@ -1518,9 +1508,9 @@ checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" [[package]] name = "ryu" -version = "1.0.12" +version = "1.0.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b4b9743ed687d4b4bcedf9ff5eaa7398495ae14e61cba0a295704edbc7decde" +checksum = "f91339c0467de62360649f8d3e185ca8de4224ff281f66000de5eb2a77a79041" [[package]] name = "salsa" @@ -1583,27 +1573,27 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" [[package]] name = "semver" -version = "1.0.16" +version = "1.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "58bc9567378fc7690d6b2addae4e60ac2eeea07becb2c64b9f218b53865cba2a" +checksum = "bebd363326d05ec3e2f532ab7660680f3b02130d780c299bca73469d521bc0ed" dependencies = [ "serde", ] [[package]] name = "serde" -version = "1.0.152" +version = "1.0.156" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb7d1f0d3021d347a83e556fc4683dea2ea09d87bccdf88ff5c12545d89d5efb" +checksum = "314b5b092c0ade17c00142951e50ced110ec27cea304b1037c6969246c2469a4" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.152" +version = "1.0.156" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af487d118eecd09402d70a5d72551860e788df87b464af30e5ea6a38c75c541e" +checksum = "d7e29c4601e36bcec74a223228dce795f4cd3616341a4af93520ca1a837c087d" dependencies = [ "proc-macro2", "quote", @@ -1612,9 +1602,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.91" +version = "1.0.94" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "877c235533714907a8c2464236f5c4b2a17262ef1bd71f38f35ea592c8da6883" +checksum = "1c533a59c9d8a93a09c6ab31f0fd5e5f4dd1b8fc9434804029839884765d04ea" dependencies = [ "indexmap", "itoa", @@ -1624,9 +1614,9 @@ dependencies = [ [[package]] name = "serde_repr" -version = "0.1.10" +version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a5ec9fa74a20ebbe5d9ac23dac1fc96ba0ecfe9f50f2843b52e537b10fbcb4e" +checksum = "395627de918015623b32e7669714206363a7fc00382bf477e72c1f7533e8eafc" dependencies = [ "proc-macro2", "quote", @@ -1650,9 +1640,9 @@ checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0" [[package]] name = "smol_str" -version = "0.1.23" +version = "0.1.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7475118a28b7e3a2e157ce0131ba8c5526ea96e90ee601d9f6bb2e286a35ab44" +checksum = "fad6c857cbab2627dcf01ec85a623ca4e7dcb5691cbaa3d7fb7653671f0d09c9" dependencies = [ "serde", ] @@ -1689,9 +1679,9 @@ dependencies = [ [[package]] name = "syn" -version = "1.0.107" +version = "1.0.109" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f4064b5b16e03ae50984a5a8ed5d4f8803e6bc1fd170a3cda91a1be4b18e3f5" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" dependencies = [ "proc-macro2", "quote", @@ -1763,18 +1753,18 @@ checksum = "288cb548dbe72b652243ea797201f3d481a0609a967980fcc5b2315ea811560a" [[package]] name = "thiserror" -version = "1.0.38" +version = "1.0.39" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a9cd18aa97d5c45c6603caea1da6628790b37f7a34b6ca89522331c5180fed0" +checksum = "a5ab016db510546d856297882807df8da66a16fb8c4101cb8b30054b0d5b2d9c" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.38" +version = "1.0.39" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fb327af4685e4d03fa8cbcf1716380da910eeb2bb8be417e7f9fd3fb164f36f" +checksum = "5420d42e90af0c38c3290abcca25b9b3bdf379fc9f55c528f53a269d9c9a267e" dependencies = [ "proc-macro2", "quote", @@ -1783,10 +1773,11 @@ dependencies = [ [[package]] name = "thread_local" -version = "1.1.4" +version = "1.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5516c27b78311c50bf42c071425c560ac799b11c30b31f87e3081965fe5e0180" +checksum = "3fdd6f064ccff2d6567adcb3873ca630700f00b5ad3f060c25b5dcfd9a4ce152" dependencies = [ + "cfg-if", "once_cell", ] @@ -1812,12 +1803,11 @@ dependencies = [ [[package]] name = "tikv-jemalloc-sys" -version = "0.5.2+5.3.0-patched" +version = "0.5.3+5.3.0-patched" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec45c14da997d0925c7835883e4d5c181f196fa142f8c19d7643d1e9af2592c3" +checksum = "a678df20055b43e57ef8cddde41cdfda9a3c1a060b67f4c5836dfb1d78543ba8" dependencies = [ "cc", - "fs_extra", "libc", ] @@ -1833,9 +1823,9 @@ dependencies = [ [[package]] name = "time" -version = "0.3.17" +version = "0.3.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a561bf4617eebd33bca6434b988f39ed798e527f51a1e797d0ee4f61c0a38376" +checksum = "cd0cbfecb4d19b5ea75bb31ad904eb5b9fa13f21079c3b92017ebdf4999a5890" dependencies = [ "serde", "time-core", @@ -1858,9 +1848,9 @@ dependencies = [ [[package]] name = "tinyvec_macros" -version = "0.1.0" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "toolchain" @@ -1973,15 +1963,15 @@ dependencies = [ [[package]] name = "unicode-bidi" -version = "0.3.10" +version = "0.3.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d54675592c1dbefd78cbd98db9bacd89886e1ca50692a0692baefffdeb92dd58" +checksum = "92888ba5573ff080736b3648696b70cafad7d250551175acbaa4e0385b3e1460" [[package]] name = "unicode-ident" -version = "1.0.6" +version = "1.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84a22b9f218b40614adcb3f4ff08b703773ad44fa9423e4e0d346d5db86e4ebc" +checksum = "e5464a87b239f13a63a501f2701565754bae92d243d4bb7eb12f6d57d2269bf4" [[package]] name = "unicode-normalization" @@ -1994,9 +1984,9 @@ dependencies = [ [[package]] name = "unicode-segmentation" -version = "1.10.0" +version = "1.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fdbf052a0783de01e944a6ce7a8cb939e295b1e7be835a1112c3b9a7f047a5a" +checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36" [[package]] name = "unicode-xid" @@ -2054,12 +2044,11 @@ dependencies = [ [[package]] name = "walkdir" -version = "2.3.2" +version = "2.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "808cf2735cd4b6866113f648b791c6adc5714537bc222d9347bb203386ffda56" +checksum = "36df944cda56c7d8d8b7496af378e6b16de9284591917d307c9b4d313c44e698" dependencies = [ "same-file", - "winapi", "winapi-util", ] @@ -2117,45 +2106,45 @@ dependencies = [ [[package]] name = "windows_aarch64_gnullvm" -version = "0.42.1" +version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c9864e83243fdec7fc9c5444389dcbbfd258f745e7853198f365e3c4968a608" +checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" [[package]] name = "windows_aarch64_msvc" -version = "0.42.1" +version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c8b1b673ffc16c47a9ff48570a9d85e25d265735c503681332589af6253c6c7" +checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43" [[package]] name = "windows_i686_gnu" -version = "0.42.1" +version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "de3887528ad530ba7bdbb1faa8275ec7a1155a45ffa57c37993960277145d640" +checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f" [[package]] name = "windows_i686_msvc" -version = "0.42.1" +version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf4d1122317eddd6ff351aa852118a2418ad4214e6613a50e0191f7004372605" +checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060" [[package]] name = "windows_x86_64_gnu" -version = "0.42.1" +version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1040f221285e17ebccbc2591ffdc2d44ee1f9186324dd3e84e99ac68d699c45" +checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36" [[package]] name = "windows_x86_64_gnullvm" -version = "0.42.1" +version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "628bfdf232daa22b0d64fdb62b09fcc36bb01f05a3939e20ab73aaf9470d0463" +checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3" [[package]] name = "windows_x86_64_msvc" -version = "0.42.1" +version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "447660ad36a13288b1db4d4248e857b510e8c3a225c822ba4fb748c0aafecffd" +checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0" [[package]] name = "write-json" diff --git a/Cargo.toml b/Cargo.toml index 333f03ce2f..511cb24a61 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -74,5 +74,9 @@ toolchain = { path = "./crates/toolchain", version = "0.0.0" } tt = { path = "./crates/tt", version = "0.0.0" } vfs-notify = { path = "./crates/vfs-notify", version = "0.0.0" } vfs = { path = "./crates/vfs", version = "0.0.0" } + # non-local crates smallvec = { version = "1.10.0", features = ["const_new", "union", "const_generics"] } +# the following crates are pinned to prevent us from pulling in syn 2 until all our dependencies have moved +serde = { version = "=1.0.156", features = ["derive"] } +serde_json = "1.0.94" diff --git a/crates/flycheck/Cargo.toml b/crates/flycheck/Cargo.toml index 609d18c4ee..1e0b3605b1 100644 --- a/crates/flycheck/Cargo.toml +++ b/crates/flycheck/Cargo.toml @@ -16,8 +16,8 @@ crossbeam-channel = "0.5.5" tracing = "0.1.37" cargo_metadata = "0.15.0" rustc-hash = "1.1.0" -serde = { version = "1.0.137", features = ["derive"] } -serde_json = "1.0.86" +serde_json.workspace = true +serde.workspace = true jod-thread = "0.1.2" command-group = "2.0.1" diff --git a/crates/paths/Cargo.toml b/crates/paths/Cargo.toml index e24e6eceff..28b54be521 100644 --- a/crates/paths/Cargo.toml +++ b/crates/paths/Cargo.toml @@ -15,4 +15,4 @@ doctest = false # Adding this dep sadly puts a lot of rust-analyzer crates after the # serde-derive crate. Even though we don't activate the derive feature here, # someone else in the crate graph certainly does! -# serde = "1" +# serde.workspace = true diff --git a/crates/proc-macro-api/Cargo.toml b/crates/proc-macro-api/Cargo.toml index 28469b8324..e748421f6d 100644 --- a/crates/proc-macro-api/Cargo.toml +++ b/crates/proc-macro-api/Cargo.toml @@ -19,8 +19,8 @@ object = { version = "0.30.2", default-features = false, features = [ "macho", "pe", ] } -serde = { version = "1.0.137", features = ["derive"] } -serde_json = { version = "1.0.81", features = ["unbounded_depth"] } +serde.workspace = true +serde_json = { workspace = true, features = ["unbounded_depth"] } tracing = "0.1.37" memmap2 = "0.5.4" snap = "1.1.0" diff --git a/crates/profile/Cargo.toml b/crates/profile/Cargo.toml index 6273ea51db..602e742751 100644 --- a/crates/profile/Cargo.toml +++ b/crates/profile/Cargo.toml @@ -20,7 +20,7 @@ countme = { version = "3.0.1", features = ["enable"] } jemalloc-ctl = { version = "0.5.0", package = "tikv-jemalloc-ctl", optional = true } [target.'cfg(target_os = "linux")'.dependencies] -perf-event = "0.4.7" +perf-event = "=0.4.7" [target.'cfg(windows)'.dependencies] winapi = { version = "0.3.9", features = ["processthreadsapi", "psapi"] } diff --git a/crates/project-model/Cargo.toml b/crates/project-model/Cargo.toml index 22d6a6e789..080b224e86 100644 --- a/crates/project-model/Cargo.toml +++ b/crates/project-model/Cargo.toml @@ -16,8 +16,8 @@ tracing = "0.1.35" rustc-hash = "1.1.0" cargo_metadata = "0.15.0" semver = "1.0.14" -serde = { version = "1.0.137", features = ["derive"] } -serde_json = "1.0.86" +serde_json.workspace = true +serde.workspace = true anyhow = "1.0.62" la-arena = { version = "0.3.0", path = "../../lib/la-arena" } diff --git a/crates/rust-analyzer/Cargo.toml b/crates/rust-analyzer/Cargo.toml index f0f1900c78..1a4155d3bb 100644 --- a/crates/rust-analyzer/Cargo.toml +++ b/crates/rust-analyzer/Cargo.toml @@ -29,8 +29,8 @@ parking_lot = "0.12.1" xflags = "0.3.0" oorandom = "11.1.3" rustc-hash = "1.1.0" -serde = { version = "1.0.137", features = ["derive"] } -serde_json = { version = "1.0.81", features = ["preserve_order"] } +serde_json = { workspace = true, features = ["preserve_order"] } +serde.workspace = true threadpool = "1.8.1" rayon = "1.6.1" num_cpus = "1.15.0" @@ -47,6 +47,16 @@ tracing-log = "0.1.3" tracing-tree = "0.2.1" always-assert = "0.1.2" +# These dependencies are unused, but we pin them to a version here to restrict them for our transitive dependencies +# so that we don't pull in duplicates of their depdendenceies like windows-sys and syn 1 vs 2 +# these would pull in serde 2 +thiserror = "=1.0.39" +serde_repr = "=0.1.11" +# these would pull in windows-sys 0.45.0 +mio = "=0.8.5" +filetime = "=0.2.19" +parking_lot_core = "=0.9.6" + cfg.workspace = true flycheck.workspace = true hir-def.workspace = true diff --git a/crates/syntax/Cargo.toml b/crates/syntax/Cargo.toml index 305cf2d394..7200df0a57 100644 --- a/crates/syntax/Cargo.toml +++ b/crates/syntax/Cargo.toml @@ -16,7 +16,7 @@ doctest = false cov-mark = "2.0.0-pre.1" either = "1.7.0" itertools = "0.10.5" -rowan = "0.15.10" +rowan = "0.15.11" rustc_lexer = { version = "727.0.0", package = "rustc-ap-rustc_lexer" } rustc-hash = "1.1.0" once_cell = "1.17.0" diff --git a/lib/lsp-server/Cargo.toml b/lib/lsp-server/Cargo.toml index 6e32e39605..e78a9d2eb1 100644 --- a/lib/lsp-server/Cargo.toml +++ b/lib/lsp-server/Cargo.toml @@ -8,8 +8,8 @@ edition = "2021" [dependencies] log = "0.4.17" -serde_json = "1.0.86" -serde = { version = "1.0.144", features = ["derive"] } +serde_json.workspace = true +serde.workspace = true crossbeam-channel = "0.5.6" [dev-dependencies] From 3ae9bfe2661c4a3542a5b44cb1f9700cb17d8f30 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sat, 25 Mar 2023 15:43:58 +0100 Subject: [PATCH 1204/1945] Remove client side proc-macro version check --- crates/proc-macro-api/src/lib.rs | 14 ++------------ crates/rust-analyzer/src/reload.rs | 3 +-- 2 files changed, 3 insertions(+), 14 deletions(-) diff --git a/crates/proc-macro-api/src/lib.rs b/crates/proc-macro-api/src/lib.rs index 90d06967e8..4525d9dfdd 100644 --- a/crates/proc-macro-api/src/lib.rs +++ b/crates/proc-macro-api/src/lib.rs @@ -54,18 +54,8 @@ pub struct MacroDylib { } impl MacroDylib { - // FIXME: this is buggy due to TOCTOU, we should check the version in the - // macro process instead. - pub fn new(path: AbsPathBuf) -> io::Result { - let _p = profile::span("MacroDylib::new"); - - let info = version::read_dylib_info(&path)?; - if info.version.0 < 1 || info.version.1 < 47 { - let msg = format!("proc-macro {} built by {info:#?} is not supported by rust-analyzer, please update your Rust version.", path.display()); - return Err(io::Error::new(io::ErrorKind::InvalidData, msg)); - } - - Ok(MacroDylib { path }) + pub fn new(path: AbsPathBuf) -> MacroDylib { + MacroDylib { path } } } diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs index 1a6e1af2eb..138b8446b9 100644 --- a/crates/rust-analyzer/src/reload.rs +++ b/crates/rust-analyzer/src/reload.rs @@ -648,8 +648,7 @@ pub(crate) fn load_proc_macro( ) -> ProcMacroLoadResult { let server = server.map_err(ToOwned::to_owned)?; let res: Result, String> = (|| { - let dylib = MacroDylib::new(path.to_path_buf()) - .map_err(|io| format!("Proc-macro dylib loading failed: {io}"))?; + let dylib = MacroDylib::new(path.to_path_buf()); let vec = server.load_dylib(dylib).map_err(|e| format!("{e}"))?; if vec.is_empty() { return Err("proc macro library returned no proc macros".to_string()); From d154ea88f90e570b9d88dba021050f9030238c79 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sat, 25 Mar 2023 16:42:52 +0100 Subject: [PATCH 1205/1945] Split out proc-macros from the CrateGraph --- crates/base-db/src/change.rs | 10 ++- crates/base-db/src/fixture.rs | 17 ++-- crates/base-db/src/input.rs | 24 +++-- crates/base-db/src/lib.rs | 6 +- crates/hir-def/src/nameres/collector.rs | 46 ++++++---- crates/hir-expand/src/proc_macro.rs | 9 +- crates/ide/src/lib.rs | 1 - crates/ide/src/shuffle_crate_graph.rs | 7 +- crates/project-model/src/tests.rs | 100 +++------------------ crates/project-model/src/workspace.rs | 66 ++++++++------ crates/rust-analyzer/src/cli/load_cargo.rs | 18 +++- crates/rust-analyzer/src/reload.rs | 13 +-- 12 files changed, 141 insertions(+), 176 deletions(-) diff --git a/crates/base-db/src/change.rs b/crates/base-db/src/change.rs index b57f234576..b906511dbc 100644 --- a/crates/base-db/src/change.rs +++ b/crates/base-db/src/change.rs @@ -6,7 +6,7 @@ use std::{fmt, sync::Arc}; use salsa::Durability; use vfs::FileId; -use crate::{CrateGraph, SourceDatabaseExt, SourceRoot, SourceRootId}; +use crate::{CrateGraph, ProcMacros, SourceDatabaseExt, SourceRoot, SourceRootId}; /// Encapsulate a bunch of raw `.set` calls on the database. #[derive(Default)] @@ -14,6 +14,7 @@ pub struct Change { pub roots: Option>, pub files_changed: Vec<(FileId, Option>)>, pub crate_graph: Option, + pub proc_macros: Option, } impl fmt::Debug for Change { @@ -49,6 +50,10 @@ impl Change { self.crate_graph = Some(graph); } + pub fn set_proc_macros(&mut self, proc_macros: ProcMacros) { + self.proc_macros = Some(proc_macros); + } + pub fn apply(self, db: &mut dyn SourceDatabaseExt) { let _p = profile::span("RootDatabase::apply_change"); if let Some(roots) = self.roots { @@ -73,6 +78,9 @@ impl Change { if let Some(crate_graph) = self.crate_graph { db.set_crate_graph_with_durability(Arc::new(crate_graph), Durability::HIGH) } + if let Some(proc_macros) = self.proc_macros { + db.set_proc_macros_with_durability(Arc::new(proc_macros), Durability::HIGH) + } } } diff --git a/crates/base-db/src/fixture.rs b/crates/base-db/src/fixture.rs index 8a7e9dfadf..7269180a5d 100644 --- a/crates/base-db/src/fixture.rs +++ b/crates/base-db/src/fixture.rs @@ -12,8 +12,8 @@ use vfs::{file_set::FileSet, VfsPath}; use crate::{ input::{CrateName, CrateOrigin, LangCrateOrigin}, Change, CrateDisplayName, CrateGraph, CrateId, Dependency, Edition, Env, FileId, FilePosition, - FileRange, ProcMacro, ProcMacroExpander, ProcMacroExpansionError, SourceDatabaseExt, - SourceRoot, SourceRootId, + FileRange, ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacros, + SourceDatabaseExt, SourceRoot, SourceRootId, }; pub const WORKSPACE: SourceRootId = SourceRootId(0); @@ -100,7 +100,7 @@ impl ChangeFixture { pub fn parse_with_proc_macros( ra_fixture: &str, - mut proc_macros: Vec<(String, ProcMacro)>, + mut proc_macro_defs: Vec<(String, ProcMacro)>, ) -> ChangeFixture { let (mini_core, proc_macro_names, fixture) = Fixture::parse(ra_fixture); let mut change = Change::new(); @@ -160,7 +160,6 @@ impl ChangeFixture { meta.cfg.clone(), meta.cfg, meta.env, - Ok(Vec::new()), false, origin, meta.target_data_layout @@ -200,7 +199,6 @@ impl ChangeFixture { default_cfg.clone(), default_cfg, Env::default(), - Ok(Vec::new()), false, CrateOrigin::CratesIo { repo: None, name: None }, default_target_data_layout @@ -244,7 +242,6 @@ impl ChangeFixture { CfgOptions::default(), CfgOptions::default(), Env::default(), - Ok(Vec::new()), false, CrateOrigin::Lang(LangCrateOrigin::Core), target_layout.clone(), @@ -257,12 +254,13 @@ impl ChangeFixture { } } + let mut proc_macros = ProcMacros::default(); if !proc_macro_names.is_empty() { let proc_lib_file = file_id; file_id.0 += 1; - proc_macros.extend(default_test_proc_macros()); - let (proc_macro, source) = filter_test_proc_macros(&proc_macro_names, proc_macros); + proc_macro_defs.extend(default_test_proc_macros()); + let (proc_macro, source) = filter_test_proc_macros(&proc_macro_names, proc_macro_defs); let mut fs = FileSet::default(); fs.insert( proc_lib_file, @@ -282,11 +280,11 @@ impl ChangeFixture { CfgOptions::default(), CfgOptions::default(), Env::default(), - Ok(proc_macro), true, CrateOrigin::CratesIo { repo: None, name: None }, target_layout, ); + proc_macros.insert(proc_macros_crate, Ok(proc_macro)); for krate in all_crates { crate_graph @@ -305,6 +303,7 @@ impl ChangeFixture { roots.push(root); change.set_roots(roots); change.set_crate_graph(crate_graph); + change.set_proc_macros(proc_macros); ChangeFixture { file_position, files, change } } diff --git a/crates/base-db/src/input.rs b/crates/base-db/src/input.rs index 43388e915b..41a2abd803 100644 --- a/crates/base-db/src/input.rs +++ b/crates/base-db/src/input.rs @@ -15,6 +15,8 @@ use syntax::SmolStr; use tt::token_id::Subtree; use vfs::{file_set::FileSet, AnchoredPath, FileId, VfsPath}; +pub type ProcMacros = FxHashMap; + /// Files are grouped into source roots. A source root is a directory on the /// file systems which is watched for changes. Typically it corresponds to a /// Rust crate. Source roots *might* be nested: in this case, a file belongs to @@ -269,7 +271,6 @@ pub struct CrateData { pub target_layout: TargetLayoutLoadResult, pub env: Env, pub dependencies: Vec, - pub proc_macro: ProcMacroLoadResult, pub origin: CrateOrigin, pub is_proc_macro: bool, } @@ -322,7 +323,6 @@ impl CrateGraph { cfg_options: CfgOptions, potential_cfg_options: CfgOptions, env: Env, - proc_macro: ProcMacroLoadResult, is_proc_macro: bool, origin: CrateOrigin, target_layout: Result, Arc>, @@ -335,7 +335,6 @@ impl CrateGraph { cfg_options, potential_cfg_options, env, - proc_macro, dependencies: Vec::new(), origin, target_layout, @@ -460,7 +459,12 @@ impl CrateGraph { /// /// The ids of the crates in the `other` graph are shifted by the return /// amount. - pub fn extend(&mut self, other: CrateGraph) -> u32 { + pub fn extend( + &mut self, + other: CrateGraph, + proc_macros: &mut ProcMacros, + other_proc_macros: ProcMacros, + ) -> u32 { let start = self.arena.len() as u32; self.arena.extend(other.arena.into_iter().map(|(id, mut data)| { let new_id = id.shift(start); @@ -469,6 +473,8 @@ impl CrateGraph { } (new_id, data) })); + proc_macros + .extend(other_proc_macros.into_iter().map(|(id, macros)| (id.shift(start), macros))); start } @@ -645,7 +651,6 @@ mod tests { CfgOptions::default(), CfgOptions::default(), Env::default(), - Ok(Vec::new()), false, CrateOrigin::CratesIo { repo: None, name: None }, Err("".into()), @@ -658,7 +663,6 @@ mod tests { CfgOptions::default(), CfgOptions::default(), Env::default(), - Ok(Vec::new()), false, CrateOrigin::CratesIo { repo: None, name: None }, Err("".into()), @@ -671,7 +675,6 @@ mod tests { CfgOptions::default(), CfgOptions::default(), Env::default(), - Ok(Vec::new()), false, CrateOrigin::CratesIo { repo: None, name: None }, Err("".into()), @@ -698,7 +701,6 @@ mod tests { CfgOptions::default(), CfgOptions::default(), Env::default(), - Ok(Vec::new()), false, CrateOrigin::CratesIo { repo: None, name: None }, Err("".into()), @@ -711,7 +713,6 @@ mod tests { CfgOptions::default(), CfgOptions::default(), Env::default(), - Ok(Vec::new()), false, CrateOrigin::CratesIo { repo: None, name: None }, Err("".into()), @@ -735,7 +736,6 @@ mod tests { CfgOptions::default(), CfgOptions::default(), Env::default(), - Ok(Vec::new()), false, CrateOrigin::CratesIo { repo: None, name: None }, Err("".into()), @@ -748,7 +748,6 @@ mod tests { CfgOptions::default(), CfgOptions::default(), Env::default(), - Ok(Vec::new()), false, CrateOrigin::CratesIo { repo: None, name: None }, Err("".into()), @@ -761,7 +760,6 @@ mod tests { CfgOptions::default(), CfgOptions::default(), Env::default(), - Ok(Vec::new()), false, CrateOrigin::CratesIo { repo: None, name: None }, Err("".into()), @@ -785,7 +783,6 @@ mod tests { CfgOptions::default(), CfgOptions::default(), Env::default(), - Ok(Vec::new()), false, CrateOrigin::CratesIo { repo: None, name: None }, Err("".into()), @@ -798,7 +795,6 @@ mod tests { CfgOptions::default(), CfgOptions::default(), Env::default(), - Ok(Vec::new()), false, CrateOrigin::CratesIo { repo: None, name: None }, Err("".into()), diff --git a/crates/base-db/src/lib.rs b/crates/base-db/src/lib.rs index 9720db9d8a..7ab9aa8709 100644 --- a/crates/base-db/src/lib.rs +++ b/crates/base-db/src/lib.rs @@ -16,7 +16,7 @@ pub use crate::{ input::{ CrateData, CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, Edition, Env, LangCrateOrigin, ProcMacro, ProcMacroExpander, ProcMacroExpansionError, - ProcMacroId, ProcMacroKind, ProcMacroLoadResult, SourceRoot, SourceRootId, + ProcMacroId, ProcMacroKind, ProcMacroLoadResult, ProcMacros, SourceRoot, SourceRootId, TargetLayoutLoadResult, }, }; @@ -73,6 +73,10 @@ pub trait SourceDatabase: FileLoader + std::fmt::Debug { /// The crate graph. #[salsa::input] fn crate_graph(&self) -> Arc; + + /// The crate graph. + #[salsa::input] + fn proc_macros(&self) -> Arc; } fn parse_query(db: &dyn SourceDatabase, file_id: FileId) -> Parse { diff --git a/crates/hir-def/src/nameres/collector.rs b/crates/hir-def/src/nameres/collector.rs index ddcee77ec4..b3ce913d9a 100644 --- a/crates/hir-def/src/nameres/collector.rs +++ b/crates/hir-def/src/nameres/collector.rs @@ -78,25 +78,35 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, mut def_map: DefMap, tree_id: T } let cfg_options = &krate.cfg_options; - let proc_macros = match &krate.proc_macro { - Ok(proc_macros) => { - proc_macros - .iter() - .enumerate() - .map(|(idx, it)| { - // FIXME: a hacky way to create a Name from string. - let name = - tt::Ident { text: it.name.clone(), span: tt::TokenId::unspecified() }; - (name.as_name(), ProcMacroExpander::new(base_db::ProcMacroId(idx as u32))) - }) - .collect() - } - Err(e) => { - def_map.proc_macro_loading_error = Some(e.clone().into_boxed_str()); - Vec::new() - } - }; + let is_proc_macro = krate.is_proc_macro; + let proc_macros = if is_proc_macro { + match db.proc_macros().get(&def_map.krate) { + Some(Ok(proc_macros)) => { + proc_macros + .iter() + .enumerate() + .map(|(idx, it)| { + // FIXME: a hacky way to create a Name from string. + let name = + tt::Ident { text: it.name.clone(), span: tt::TokenId::unspecified() }; + (name.as_name(), ProcMacroExpander::new(base_db::ProcMacroId(idx as u32))) + }) + .collect() + } + Some(Err(e)) => { + def_map.proc_macro_loading_error = Some(e.clone().into_boxed_str()); + Vec::new() + } + None => { + def_map.proc_macro_loading_error = + Some("No proc-macros present for crate".to_owned().into_boxed_str()); + Vec::new() + } + } + } else { + vec![] + }; let mut collector = DefCollector { db, diff --git a/crates/hir-expand/src/proc_macro.rs b/crates/hir-expand/src/proc_macro.rs index d758e9302c..ad98935874 100644 --- a/crates/hir-expand/src/proc_macro.rs +++ b/crates/hir-expand/src/proc_macro.rs @@ -33,10 +33,10 @@ impl ProcMacroExpander { ) -> ExpandResult { match self.proc_macro_id { Some(id) => { - let krate_graph = db.crate_graph(); - let proc_macros = match &krate_graph[def_crate].proc_macro { - Ok(proc_macros) => proc_macros, - Err(_) => { + let proc_macros = db.proc_macros(); + let proc_macros = match proc_macros.get(&def_crate) { + Some(Ok(proc_macros)) => proc_macros, + Some(Err(_)) | None => { never!("Non-dummy expander even though there are no proc macros"); return ExpandResult::with_err( tt::Subtree::empty(), @@ -59,6 +59,7 @@ impl ProcMacroExpander { } }; + let krate_graph = db.crate_graph(); // Proc macros have access to the environment variables of the invoking crate. let env = &krate_graph[calling_crate].env; match proc_macro.expander.expand(tt, attr_arg, env) { diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs index 078b66dd39..8477a8e622 100644 --- a/crates/ide/src/lib.rs +++ b/crates/ide/src/lib.rs @@ -235,7 +235,6 @@ impl Analysis { cfg_options.clone(), cfg_options, Env::default(), - Ok(Vec::new()), false, CrateOrigin::CratesIo { repo: None, name: None }, Err("Analysis::from_single_file has no target layout".into()), diff --git a/crates/ide/src/shuffle_crate_graph.rs b/crates/ide/src/shuffle_crate_graph.rs index e606072a82..471c36dfec 100644 --- a/crates/ide/src/shuffle_crate_graph.rs +++ b/crates/ide/src/shuffle_crate_graph.rs @@ -1,7 +1,7 @@ use std::sync::Arc; use ide_db::{ - base_db::{salsa::Durability, CrateGraph, SourceDatabase}, + base_db::{salsa::Durability, CrateGraph, ProcMacros, SourceDatabase}, FxHashMap, RootDatabase, }; @@ -16,6 +16,7 @@ use ide_db::{ // |=== pub(crate) fn shuffle_crate_graph(db: &mut RootDatabase) { let crate_graph = db.crate_graph(); + let proc_macros = db.proc_macros(); let mut shuffled_ids = crate_graph.iter().collect::>(); @@ -23,6 +24,7 @@ pub(crate) fn shuffle_crate_graph(db: &mut RootDatabase) { stdx::rand::shuffle(&mut shuffled_ids, |i| rng.rand_range(0..i as u32) as usize); let mut new_graph = CrateGraph::default(); + let mut new_proc_macros = ProcMacros::default(); let mut map = FxHashMap::default(); for old_id in shuffled_ids.iter().copied() { @@ -35,11 +37,11 @@ pub(crate) fn shuffle_crate_graph(db: &mut RootDatabase) { data.cfg_options.clone(), data.potential_cfg_options.clone(), data.env.clone(), - data.proc_macro.clone(), data.is_proc_macro, data.origin.clone(), data.target_layout.clone(), ); + new_proc_macros.insert(new_id, proc_macros[&old_id].clone()); map.insert(old_id, new_id); } @@ -53,4 +55,5 @@ pub(crate) fn shuffle_crate_graph(db: &mut RootDatabase) { } db.set_crate_graph_with_durability(Arc::new(new_graph), Durability::HIGH); + db.set_proc_macros_with_durability(Arc::new(new_proc_macros), Durability::HIGH); } diff --git a/crates/project-model/src/tests.rs b/crates/project-model/src/tests.rs index 3754accbb0..ed78d71a1a 100644 --- a/crates/project-model/src/tests.rs +++ b/crates/project-model/src/tests.rs @@ -3,7 +3,7 @@ use std::{ path::{Path, PathBuf}, }; -use base_db::{CrateGraph, FileId}; +use base_db::{CrateGraph, FileId, ProcMacros}; use cfg::{CfgAtom, CfgDiff}; use expect_test::{expect, Expect}; use paths::{AbsPath, AbsPathBuf}; @@ -14,11 +14,11 @@ use crate::{ WorkspaceBuildScripts, }; -fn load_cargo(file: &str) -> CrateGraph { +fn load_cargo(file: &str) -> (CrateGraph, ProcMacros) { load_cargo_with_overrides(file, CfgOverrides::default()) } -fn load_cargo_with_overrides(file: &str, cfg_overrides: CfgOverrides) -> CrateGraph { +fn load_cargo_with_overrides(file: &str, cfg_overrides: CfgOverrides) -> (CrateGraph, ProcMacros) { let meta = get_test_json_file(file); let cargo_workspace = CargoWorkspace::new(meta); let project_workspace = ProjectWorkspace::Cargo { @@ -34,7 +34,7 @@ fn load_cargo_with_overrides(file: &str, cfg_overrides: CfgOverrides) -> CrateGr to_crate_graph(project_workspace) } -fn load_rust_project(file: &str) -> CrateGraph { +fn load_rust_project(file: &str) -> (CrateGraph, ProcMacros) { let data = get_test_json_file(file); let project = rooted_project_json(data); let sysroot = Ok(get_fake_sysroot()); @@ -92,7 +92,7 @@ fn rooted_project_json(data: ProjectJsonData) -> ProjectJson { ProjectJson::new(base, data) } -fn to_crate_graph(project_workspace: ProjectWorkspace) -> CrateGraph { +fn to_crate_graph(project_workspace: ProjectWorkspace) -> (CrateGraph, ProcMacros) { project_workspace.to_crate_graph( &mut |_, _| Ok(Vec::new()), &mut { @@ -117,7 +117,8 @@ fn cargo_hello_world_project_model_with_wildcard_overrides() { let cfg_overrides = CfgOverrides::Wildcard( CfgDiff::new(Vec::new(), vec![CfgAtom::Flag("test".into())]).unwrap(), ); - let crate_graph = load_cargo_with_overrides("hello-world-metadata.json", cfg_overrides); + let (crate_graph, _proc_macros) = + load_cargo_with_overrides("hello-world-metadata.json", cfg_overrides); check_crate_graph( crate_graph, expect![[r#" @@ -184,9 +185,6 @@ fn cargo_hello_world_project_model_with_wildcard_overrides() { prelude: true, }, ], - proc_macro: Err( - "crate has not (yet) been built", - ), origin: CratesIo { repo: None, name: Some( @@ -265,9 +263,6 @@ fn cargo_hello_world_project_model_with_wildcard_overrides() { prelude: true, }, ], - proc_macro: Err( - "crate has not (yet) been built", - ), origin: CratesIo { repo: None, name: Some( @@ -346,9 +341,6 @@ fn cargo_hello_world_project_model_with_wildcard_overrides() { prelude: true, }, ], - proc_macro: Err( - "crate has not (yet) been built", - ), origin: CratesIo { repo: None, name: Some( @@ -427,9 +419,6 @@ fn cargo_hello_world_project_model_with_wildcard_overrides() { prelude: true, }, ], - proc_macro: Err( - "crate has not (yet) been built", - ), origin: CratesIo { repo: None, name: Some( @@ -498,9 +487,6 @@ fn cargo_hello_world_project_model_with_wildcard_overrides() { }, }, dependencies: [], - proc_macro: Err( - "crate has not (yet) been built", - ), origin: CratesIo { repo: Some( "https://github.com/rust-lang/libc", @@ -527,7 +513,8 @@ fn cargo_hello_world_project_model_with_selective_overrides() { .collect(), ) }; - let crate_graph = load_cargo_with_overrides("hello-world-metadata.json", cfg_overrides); + let (crate_graph, _proc_macros) = + load_cargo_with_overrides("hello-world-metadata.json", cfg_overrides); check_crate_graph( crate_graph, expect![[r#" @@ -596,9 +583,6 @@ fn cargo_hello_world_project_model_with_selective_overrides() { prelude: true, }, ], - proc_macro: Err( - "crate has not (yet) been built", - ), origin: CratesIo { repo: None, name: Some( @@ -679,9 +663,6 @@ fn cargo_hello_world_project_model_with_selective_overrides() { prelude: true, }, ], - proc_macro: Err( - "crate has not (yet) been built", - ), origin: CratesIo { repo: None, name: Some( @@ -762,9 +743,6 @@ fn cargo_hello_world_project_model_with_selective_overrides() { prelude: true, }, ], - proc_macro: Err( - "crate has not (yet) been built", - ), origin: CratesIo { repo: None, name: Some( @@ -845,9 +823,6 @@ fn cargo_hello_world_project_model_with_selective_overrides() { prelude: true, }, ], - proc_macro: Err( - "crate has not (yet) been built", - ), origin: CratesIo { repo: None, name: Some( @@ -916,9 +891,6 @@ fn cargo_hello_world_project_model_with_selective_overrides() { }, }, dependencies: [], - proc_macro: Err( - "crate has not (yet) been built", - ), origin: CratesIo { repo: Some( "https://github.com/rust-lang/libc", @@ -936,7 +908,7 @@ fn cargo_hello_world_project_model_with_selective_overrides() { #[test] fn cargo_hello_world_project_model() { - let crate_graph = load_cargo("hello-world-metadata.json"); + let (crate_graph, _proc_macros) = load_cargo("hello-world-metadata.json"); check_crate_graph( crate_graph, expect![[r#" @@ -1005,9 +977,6 @@ fn cargo_hello_world_project_model() { prelude: true, }, ], - proc_macro: Err( - "crate has not (yet) been built", - ), origin: CratesIo { repo: None, name: Some( @@ -1088,9 +1057,6 @@ fn cargo_hello_world_project_model() { prelude: true, }, ], - proc_macro: Err( - "crate has not (yet) been built", - ), origin: CratesIo { repo: None, name: Some( @@ -1171,9 +1137,6 @@ fn cargo_hello_world_project_model() { prelude: true, }, ], - proc_macro: Err( - "crate has not (yet) been built", - ), origin: CratesIo { repo: None, name: Some( @@ -1254,9 +1217,6 @@ fn cargo_hello_world_project_model() { prelude: true, }, ], - proc_macro: Err( - "crate has not (yet) been built", - ), origin: CratesIo { repo: None, name: Some( @@ -1325,9 +1285,6 @@ fn cargo_hello_world_project_model() { }, }, dependencies: [], - proc_macro: Err( - "crate has not (yet) been built", - ), origin: CratesIo { repo: Some( "https://github.com/rust-lang/libc", @@ -1345,7 +1302,7 @@ fn cargo_hello_world_project_model() { #[test] fn rust_project_hello_world_project_model() { - let crate_graph = load_rust_project("hello-world-project.json"); + let (crate_graph, _proc_macros) = load_rust_project("hello-world-project.json"); check_crate_graph( crate_graph, expect![[r#" @@ -1390,9 +1347,6 @@ fn rust_project_hello_world_project_model() { prelude: true, }, ], - proc_macro: Err( - "no proc macro loaded for sysroot crate", - ), origin: Lang( Alloc, ), @@ -1427,9 +1381,6 @@ fn rust_project_hello_world_project_model() { entries: {}, }, dependencies: [], - proc_macro: Err( - "no proc macro loaded for sysroot crate", - ), origin: Lang( Core, ), @@ -1464,9 +1415,6 @@ fn rust_project_hello_world_project_model() { entries: {}, }, dependencies: [], - proc_macro: Err( - "no proc macro loaded for sysroot crate", - ), origin: Lang( Other, ), @@ -1501,9 +1449,6 @@ fn rust_project_hello_world_project_model() { entries: {}, }, dependencies: [], - proc_macro: Err( - "no proc macro loaded for sysroot crate", - ), origin: Lang( Other, ), @@ -1557,9 +1502,6 @@ fn rust_project_hello_world_project_model() { prelude: true, }, ], - proc_macro: Err( - "no proc macro loaded for sysroot crate", - ), origin: Lang( Other, ), @@ -1594,9 +1536,6 @@ fn rust_project_hello_world_project_model() { entries: {}, }, dependencies: [], - proc_macro: Err( - "no proc macro loaded for sysroot crate", - ), origin: Lang( Other, ), @@ -1704,9 +1643,6 @@ fn rust_project_hello_world_project_model() { prelude: true, }, ], - proc_macro: Err( - "no proc macro loaded for sysroot crate", - ), origin: Lang( Std, ), @@ -1741,9 +1677,6 @@ fn rust_project_hello_world_project_model() { entries: {}, }, dependencies: [], - proc_macro: Err( - "no proc macro loaded for sysroot crate", - ), origin: Lang( Other, ), @@ -1778,9 +1711,6 @@ fn rust_project_hello_world_project_model() { entries: {}, }, dependencies: [], - proc_macro: Err( - "no proc macro loaded for sysroot crate", - ), origin: Lang( Test, ), @@ -1815,9 +1745,6 @@ fn rust_project_hello_world_project_model() { entries: {}, }, dependencies: [], - proc_macro: Err( - "no proc macro loaded for sysroot crate", - ), origin: Lang( Other, ), @@ -1889,9 +1816,6 @@ fn rust_project_hello_world_project_model() { prelude: false, }, ], - proc_macro: Err( - "no proc macro dylib present", - ), origin: CratesIo { repo: None, name: Some( @@ -1907,7 +1831,7 @@ fn rust_project_hello_world_project_model() { #[test] fn rust_project_is_proc_macro_has_proc_macro_dep() { - let crate_graph = load_rust_project("is-proc-macro-project.json"); + let (crate_graph, _proc_macros) = load_rust_project("is-proc-macro-project.json"); // Since the project only defines one crate (outside the sysroot crates), // it should be the one with the biggest Id. let crate_id = crate_graph.iter().max().unwrap(); diff --git a/crates/project-model/src/workspace.rs b/crates/project-model/src/workspace.rs index 2158485a33..5766859143 100644 --- a/crates/project-model/src/workspace.rs +++ b/crates/project-model/src/workspace.rs @@ -7,7 +7,7 @@ use std::{collections::VecDeque, fmt, fs, process::Command, sync::Arc}; use anyhow::{bail, format_err, Context, Result}; use base_db::{ CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, Edition, Env, - FileId, LangCrateOrigin, ProcMacroLoadResult, TargetLayoutLoadResult, + FileId, LangCrateOrigin, ProcMacroLoadResult, ProcMacros, TargetLayoutLoadResult, }; use cfg::{CfgDiff, CfgOptions}; use paths::{AbsPath, AbsPathBuf}; @@ -579,10 +579,10 @@ impl ProjectWorkspace { load_proc_macro: &mut dyn FnMut(&str, &AbsPath) -> ProcMacroLoadResult, load: &mut dyn FnMut(&AbsPath) -> Option, extra_env: &FxHashMap, - ) -> CrateGraph { + ) -> (CrateGraph, ProcMacros) { let _p = profile::span("ProjectWorkspace::to_crate_graph"); - let mut crate_graph = match self { + let (mut crate_graph, proc_macros) = match self { ProjectWorkspace::Json { project, sysroot, rustc_cfg } => project_json_to_crate_graph( rustc_cfg.clone(), load_proc_macro, @@ -630,7 +630,7 @@ impl ProjectWorkspace { } else { tracing::debug!("Did not patch std to depend on cfg-if") } - crate_graph + (crate_graph, proc_macros) } pub fn eq_ignore_build_data(&self, other: &Self) -> bool { @@ -685,8 +685,9 @@ fn project_json_to_crate_graph( sysroot: Option<&Sysroot>, extra_env: &FxHashMap, target_layout: TargetLayoutLoadResult, -) -> CrateGraph { +) -> (CrateGraph, ProcMacros) { let mut crate_graph = CrateGraph::default(); + let mut proc_macros = FxHashMap::<_, _>::default(); let sysroot_deps = sysroot.as_ref().map(|sysroot| { sysroot_to_crate_graph( &mut crate_graph, @@ -707,13 +708,15 @@ fn project_json_to_crate_graph( }) .map(|(crate_id, krate, file_id)| { let env = krate.env.clone().into_iter().collect(); - let proc_macro = match krate.proc_macro_dylib_path.clone() { - Some(it) => load_proc_macro( - krate.display_name.as_ref().map(|it| it.canonical_name()).unwrap_or(""), - &it, - ), - None => Err("no proc macro dylib present".into()), - }; + if let Some(it) = krate.proc_macro_dylib_path.clone() { + proc_macros.insert( + crate_id, + load_proc_macro( + krate.display_name.as_ref().map(|it| it.canonical_name()).unwrap_or(""), + &it, + ), + ); + } let target_cfgs = match krate.target.as_deref() { Some(target) => cfg_cache @@ -734,7 +737,6 @@ fn project_json_to_crate_graph( cfg_options.clone(), cfg_options, env, - proc_macro, krate.is_proc_macro, if krate.display_name.is_some() { CrateOrigin::CratesIo { @@ -776,7 +778,7 @@ fn project_json_to_crate_graph( } } } - crate_graph + (crate_graph, proc_macros) } fn cargo_to_crate_graph( @@ -789,9 +791,10 @@ fn cargo_to_crate_graph( override_cfg: &CfgOverrides, build_scripts: &WorkspaceBuildScripts, target_layout: TargetLayoutLoadResult, -) -> CrateGraph { +) -> (CrateGraph, ProcMacros) { let _p = profile::span("cargo_to_crate_graph"); let mut crate_graph = CrateGraph::default(); + let mut proc_macros = FxHashMap::default(); let (public_deps, libproc_macro) = match sysroot { Some(sysroot) => sysroot_to_crate_graph( &mut crate_graph, @@ -855,6 +858,7 @@ fn cargo_to_crate_graph( if let Some(file_id) = load(&cargo[tgt].root) { let crate_id = add_target_crate_root( &mut crate_graph, + &mut proc_macros, &cargo[pkg], build_scripts.get_output(pkg), cfg_options.clone(), @@ -931,6 +935,7 @@ fn cargo_to_crate_graph( if let Some((rustc_workspace, rustc_build_scripts)) = rustc { handle_rustc_crates( &mut crate_graph, + &mut proc_macros, &mut pkg_to_lib_crate, load, load_proc_macro, @@ -952,7 +957,7 @@ fn cargo_to_crate_graph( ); } } - crate_graph + (crate_graph, proc_macros) } fn detached_files_to_crate_graph( @@ -961,7 +966,7 @@ fn detached_files_to_crate_graph( detached_files: &[AbsPathBuf], sysroot: Option<&Sysroot>, target_layout: TargetLayoutLoadResult, -) -> CrateGraph { +) -> (CrateGraph, ProcMacros) { let _p = profile::span("detached_files_to_crate_graph"); let mut crate_graph = CrateGraph::default(); let (public_deps, _libproc_macro) = match sysroot { @@ -998,7 +1003,6 @@ fn detached_files_to_crate_graph( cfg_options.clone(), cfg_options.clone(), Env::default(), - Ok(Vec::new()), false, CrateOrigin::CratesIo { repo: None, @@ -1009,11 +1013,12 @@ fn detached_files_to_crate_graph( public_deps.add_to_crate_graph(&mut crate_graph, detached_file_crate); } - crate_graph + (crate_graph, FxHashMap::default()) } fn handle_rustc_crates( crate_graph: &mut CrateGraph, + proc_macros: &mut ProcMacros, pkg_to_lib_crate: &mut FxHashMap, load: &mut dyn FnMut(&AbsPath) -> Option, load_proc_macro: &mut dyn FnMut(&str, &AbsPath) -> ProcMacroLoadResult, @@ -1075,6 +1080,7 @@ fn handle_rustc_crates( if let Some(file_id) = load(&rustc_workspace[tgt].root) { let crate_id = add_target_crate_root( crate_graph, + proc_macros, &rustc_workspace[pkg], build_scripts.get_output(pkg), cfg_options.clone(), @@ -1140,6 +1146,7 @@ fn handle_rustc_crates( fn add_target_crate_root( crate_graph: &mut CrateGraph, + proc_macros: &mut ProcMacros, pkg: &PackageData, build_data: Option<&BuildScriptOutput>, cfg_options: CfgOptions, @@ -1176,14 +1183,8 @@ fn add_target_crate_root( } } - let proc_macro = match build_data.as_ref().map(|it| it.proc_macro_dylib_path.as_ref()) { - Some(Some(it)) => load_proc_macro(it), - Some(None) => Err("no proc macro dylib present".into()), - None => Err("crate has not (yet) been built".into()), - }; - let display_name = CrateDisplayName::from_canonical_name(cargo_name.to_string()); - crate_graph.add_crate_root( + let crate_id = crate_graph.add_crate_root( file_id, edition, Some(display_name), @@ -1191,11 +1192,19 @@ fn add_target_crate_root( cfg_options, potential_cfg_options, env, - proc_macro, is_proc_macro, CrateOrigin::CratesIo { repo: pkg.repository.clone(), name: Some(pkg.name.clone()) }, target_layout, - ) + ); + let proc_macro = match build_data.as_ref().map(|it| &it.proc_macro_dylib_path) { + Some(it) => it.as_deref().map(load_proc_macro), + None => Some(Err("crate has not (yet) been built".into())), + }; + if let Some(proc_macro) = proc_macro { + proc_macros.insert(crate_id, proc_macro); + } + + crate_id } #[derive(Default)] @@ -1237,7 +1246,6 @@ fn sysroot_to_crate_graph( cfg_options.clone(), cfg_options.clone(), env, - Err("no proc macro loaded for sysroot crate".into()), false, CrateOrigin::Lang(LangCrateOrigin::from(&*sysroot[krate].name)), target_layout.clone(), diff --git a/crates/rust-analyzer/src/cli/load_cargo.rs b/crates/rust-analyzer/src/cli/load_cargo.rs index 5a958d963e..2d15d673ed 100644 --- a/crates/rust-analyzer/src/cli/load_cargo.rs +++ b/crates/rust-analyzer/src/cli/load_cargo.rs @@ -6,7 +6,10 @@ use anyhow::Result; use crossbeam_channel::{unbounded, Receiver}; use hir::db::DefDatabase; use ide::{AnalysisHost, Change}; -use ide_db::{base_db::CrateGraph, FxHashMap}; +use ide_db::{ + base_db::{CrateGraph, ProcMacros}, + FxHashMap, +}; use proc_macro_api::ProcMacroServer; use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace}; use vfs::{loader::Handle, AbsPath, AbsPathBuf}; @@ -79,7 +82,7 @@ pub fn load_workspace( ProcMacroServerChoice::None => Err("proc macro server disabled".to_owned()), }; - let crate_graph = ws.to_crate_graph( + let (crate_graph, proc_macros) = ws.to_crate_graph( &mut |_, path: &AbsPath| { load_proc_macro(proc_macro_client.as_ref().map_err(|e| &**e), path, &[]) }, @@ -100,8 +103,13 @@ pub fn load_workspace( }); tracing::debug!("crate graph: {:?}", crate_graph); - let host = - load_crate_graph(crate_graph, project_folders.source_root_config, &mut vfs, &receiver); + let host = load_crate_graph( + crate_graph, + proc_macros, + project_folders.source_root_config, + &mut vfs, + &receiver, + ); if load_config.prefill_caches { host.analysis().parallel_prime_caches(1, |_| {})?; @@ -111,6 +119,7 @@ pub fn load_workspace( fn load_crate_graph( crate_graph: CrateGraph, + proc_macros: ProcMacros, source_root_config: SourceRootConfig, vfs: &mut vfs::Vfs, receiver: &Receiver, @@ -149,6 +158,7 @@ fn load_crate_graph( analysis_change.set_roots(source_roots); analysis_change.set_crate_graph(crate_graph); + analysis_change.set_proc_macros(proc_macros); host.apply_change(analysis_change); host diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs index 138b8446b9..65758419de 100644 --- a/crates/rust-analyzer/src/reload.rs +++ b/crates/rust-analyzer/src/reload.rs @@ -20,7 +20,7 @@ use ide::Change; use ide_db::{ base_db::{ CrateGraph, Env, ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind, - ProcMacroLoadResult, SourceRoot, VfsPath, + ProcMacroLoadResult, ProcMacros, SourceRoot, VfsPath, }, FxHashMap, }; @@ -355,7 +355,7 @@ impl GlobalState { }); // Create crate graph from all the workspaces - let crate_graph = { + let (crate_graph, proc_macros) = { let dummy_replacements = self.config.dummy_replacements(); let vfs = &mut self.vfs.write().0; @@ -376,6 +376,7 @@ impl GlobalState { }; let mut crate_graph = CrateGraph::default(); + let mut proc_macros = ProcMacros::default(); for (idx, ws) in self.workspaces.iter().enumerate() { let proc_macro_client = match self.proc_macro_clients.get(idx) { Some(res) => res.as_ref().map_err(|e| &**e), @@ -388,15 +389,17 @@ impl GlobalState { dummy_replacements.get(crate_name).map(|v| &**v).unwrap_or_default(), ) }; - crate_graph.extend(ws.to_crate_graph( + let (other, other_proc_macros) = ws.to_crate_graph( &mut load_proc_macro, &mut load, &self.config.cargo().extra_env, - )); + ); + crate_graph.extend(other, &mut proc_macros, other_proc_macros); } - crate_graph + (crate_graph, proc_macros) }; change.set_crate_graph(crate_graph); + change.set_proc_macros(proc_macros); self.source_root_config = project_folders.source_root_config; From e9fb2ffe4572471588b6bd602521a1421baf4dc3 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sat, 25 Mar 2023 16:47:41 +0100 Subject: [PATCH 1206/1945] Add lsp command for rebuilding proc macros --- crates/rust-analyzer/src/handlers.rs | 8 ++++++++ crates/rust-analyzer/src/lsp_ext.rs | 8 ++++++++ crates/rust-analyzer/src/main_loop.rs | 1 + editors/code/src/commands.ts | 4 ++++ editors/code/src/ctx.ts | 5 ++++- editors/code/src/lsp_ext.ts | 1 + editors/code/src/main.ts | 1 + 7 files changed, 27 insertions(+), 1 deletion(-) diff --git a/crates/rust-analyzer/src/handlers.rs b/crates/rust-analyzer/src/handlers.rs index 2fca2ab851..a56c245dca 100644 --- a/crates/rust-analyzer/src/handlers.rs +++ b/crates/rust-analyzer/src/handlers.rs @@ -52,6 +52,14 @@ pub(crate) fn handle_workspace_reload(state: &mut GlobalState, _: ()) -> Result< Ok(()) } +pub(crate) fn handle_proc_macros_reload(state: &mut GlobalState, _: ()) -> Result<()> { + state.proc_macro_clients.clear(); + state.proc_macro_changed = false; + + state.fetch_build_data_queue.request_op("reload proc macros request".to_string()); + Ok(()) +} + pub(crate) fn handle_cancel_flycheck(state: &mut GlobalState, _: ()) -> Result<()> { let _p = profile::span("handle_stop_flycheck"); state.flycheck.iter().for_each(|flycheck| flycheck.cancel()); diff --git a/crates/rust-analyzer/src/lsp_ext.rs b/crates/rust-analyzer/src/lsp_ext.rs index c7b513db98..2f8829ec73 100644 --- a/crates/rust-analyzer/src/lsp_ext.rs +++ b/crates/rust-analyzer/src/lsp_ext.rs @@ -51,6 +51,14 @@ impl Request for ReloadWorkspace { const METHOD: &'static str = "rust-analyzer/reloadWorkspace"; } +pub enum ReloadProcMacros {} + +impl Request for ReloadProcMacros { + type Params = (); + type Result = (); + const METHOD: &'static str = "rust-analyzer/reloadProcMacros"; +} + pub enum SyntaxTree {} impl Request for SyntaxTree { diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs index 67a54cde68..ae7457e347 100644 --- a/crates/rust-analyzer/src/main_loop.rs +++ b/crates/rust-analyzer/src/main_loop.rs @@ -633,6 +633,7 @@ impl GlobalState { dispatcher .on_sync_mut::(handlers::handle_workspace_reload) + .on_sync_mut::(handlers::handle_proc_macros_reload) .on_sync_mut::(handlers::handle_memory_usage) .on_sync_mut::(handlers::handle_shuffle_crate_graph) .on_sync::(handlers::handle_join_lines) diff --git a/editors/code/src/commands.ts b/editors/code/src/commands.ts index 8a953577e9..8ce3466ed4 100644 --- a/editors/code/src/commands.ts +++ b/editors/code/src/commands.ts @@ -749,6 +749,10 @@ export function reloadWorkspace(ctx: CtxInit): Cmd { return async () => ctx.client.sendRequest(ra.reloadWorkspace); } +export function reloadProcMacros(ctx: CtxInit): Cmd { + return async () => ctx.client.sendRequest(ra.reloadProcMacros); +} + export function addProject(ctx: CtxInit): Cmd { return async () => { const discoverProjectCommand = ctx.config.discoverProjectCommand; diff --git a/editors/code/src/ctx.ts b/editors/code/src/ctx.ts index 85579453a6..8da8b0d63a 100644 --- a/editors/code/src/ctx.ts +++ b/editors/code/src/ctx.ts @@ -378,10 +378,13 @@ export class Ctx { if (statusBar.tooltip.value) { statusBar.tooltip.appendText("\n\n"); } + statusBar.tooltip.appendMarkdown("\n\n[Open logs](command:rust-analyzer.openLogs)"); statusBar.tooltip.appendMarkdown( "\n\n[Reload Workspace](command:rust-analyzer.reloadWorkspace)" ); - statusBar.tooltip.appendMarkdown("\n\n[Open logs](command:rust-analyzer.openLogs)"); + statusBar.tooltip.appendMarkdown( + "\n\n[Rebuild Proc Macros](command:rust-analyzer.reloadProcMacros)" + ); statusBar.tooltip.appendMarkdown("\n\n[Restart server](command:rust-analyzer.startServer)"); statusBar.tooltip.appendMarkdown("\n\n[Stop server](command:rust-analyzer.stopServer)"); if (!status.quiescent) icon = "$(sync~spin) "; diff --git a/editors/code/src/lsp_ext.ts b/editors/code/src/lsp_ext.ts index 872d7199b8..a03777d1c0 100644 --- a/editors/code/src/lsp_ext.ts +++ b/editors/code/src/lsp_ext.ts @@ -43,6 +43,7 @@ export const relatedTests = new lc.RequestType("rust-analyzer/reloadWorkspace"); +export const reloadProcMacros = new lc.RequestType0("rust-analyzer/reloadProcMacros"); export const runFlycheck = new lc.NotificationType<{ textDocument: lc.TextDocumentIdentifier | null; diff --git a/editors/code/src/main.ts b/editors/code/src/main.ts index d5de00561b..7079f235ca 100644 --- a/editors/code/src/main.ts +++ b/editors/code/src/main.ts @@ -153,6 +153,7 @@ function createCommands(): Record { memoryUsage: { enabled: commands.memoryUsage }, shuffleCrateGraph: { enabled: commands.shuffleCrateGraph }, reloadWorkspace: { enabled: commands.reloadWorkspace }, + reloadProcMacros: { enabled: commands.reloadProcMacros }, addProject: { enabled: commands.addProject }, matchingBrace: { enabled: commands.matchingBrace }, joinLines: { enabled: commands.joinLines }, From 607375dc206259955a386c48d88c45529c316145 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sat, 25 Mar 2023 18:06:06 +0100 Subject: [PATCH 1207/1945] Load proc-macros asynchronously --- crates/base-db/src/input.rs | 21 ++--- crates/base-db/src/lib.rs | 4 +- crates/project-model/src/tests.rs | 14 +-- crates/project-model/src/workspace.rs | 41 +++----- crates/rust-analyzer/src/cli/load_cargo.rs | 22 ++++- crates/rust-analyzer/src/global_state.rs | 10 +- crates/rust-analyzer/src/handlers.rs | 5 +- crates/rust-analyzer/src/main_loop.rs | 18 +++- crates/rust-analyzer/src/reload.rs | 105 +++++++++++++++------ docs/dev/lsp-extensions.md | 2 +- 10 files changed, 154 insertions(+), 88 deletions(-) diff --git a/crates/base-db/src/input.rs b/crates/base-db/src/input.rs index 41a2abd803..9580b76faa 100644 --- a/crates/base-db/src/input.rs +++ b/crates/base-db/src/input.rs @@ -6,15 +6,16 @@ //! actual IO. See `vfs` and `project_model` in the `rust-analyzer` crate for how //! actual IO is done and lowered to input. -use std::{fmt, ops, panic::RefUnwindSafe, str::FromStr, sync::Arc}; +use std::{fmt, mem, ops, panic::RefUnwindSafe, str::FromStr, sync::Arc}; use cfg::CfgOptions; use rustc_hash::FxHashMap; use stdx::hash::{NoHashHashMap, NoHashHashSet}; use syntax::SmolStr; use tt::token_id::Subtree; -use vfs::{file_set::FileSet, AnchoredPath, FileId, VfsPath}; +use vfs::{file_set::FileSet, AbsPathBuf, AnchoredPath, FileId, VfsPath}; +pub type ProcMacroPaths = FxHashMap, AbsPathBuf), String>>; pub type ProcMacros = FxHashMap; /// Files are grouped into source roots. A source root is a directory on the @@ -455,16 +456,11 @@ impl CrateGraph { } /// Extends this crate graph by adding a complete disjoint second crate - /// graph. + /// graph and adjust the ids in the [`ProcMacroPaths`] accordingly. /// /// The ids of the crates in the `other` graph are shifted by the return /// amount. - pub fn extend( - &mut self, - other: CrateGraph, - proc_macros: &mut ProcMacros, - other_proc_macros: ProcMacros, - ) -> u32 { + pub fn extend(&mut self, other: CrateGraph, proc_macros: &mut ProcMacroPaths) -> u32 { let start = self.arena.len() as u32; self.arena.extend(other.arena.into_iter().map(|(id, mut data)| { let new_id = id.shift(start); @@ -473,8 +469,11 @@ impl CrateGraph { } (new_id, data) })); - proc_macros - .extend(other_proc_macros.into_iter().map(|(id, macros)| (id.shift(start), macros))); + + *proc_macros = mem::take(proc_macros) + .into_iter() + .map(|(id, macros)| (id.shift(start), macros)) + .collect(); start } diff --git a/crates/base-db/src/lib.rs b/crates/base-db/src/lib.rs index 7ab9aa8709..f6975f2fbd 100644 --- a/crates/base-db/src/lib.rs +++ b/crates/base-db/src/lib.rs @@ -16,8 +16,8 @@ pub use crate::{ input::{ CrateData, CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, Edition, Env, LangCrateOrigin, ProcMacro, ProcMacroExpander, ProcMacroExpansionError, - ProcMacroId, ProcMacroKind, ProcMacroLoadResult, ProcMacros, SourceRoot, SourceRootId, - TargetLayoutLoadResult, + ProcMacroId, ProcMacroKind, ProcMacroLoadResult, ProcMacroPaths, ProcMacros, SourceRoot, + SourceRootId, TargetLayoutLoadResult, }, }; pub use salsa::{self, Cancelled}; diff --git a/crates/project-model/src/tests.rs b/crates/project-model/src/tests.rs index ed78d71a1a..26c4c89f76 100644 --- a/crates/project-model/src/tests.rs +++ b/crates/project-model/src/tests.rs @@ -3,7 +3,7 @@ use std::{ path::{Path, PathBuf}, }; -use base_db::{CrateGraph, FileId, ProcMacros}; +use base_db::{CrateGraph, FileId, ProcMacroPaths}; use cfg::{CfgAtom, CfgDiff}; use expect_test::{expect, Expect}; use paths::{AbsPath, AbsPathBuf}; @@ -14,11 +14,14 @@ use crate::{ WorkspaceBuildScripts, }; -fn load_cargo(file: &str) -> (CrateGraph, ProcMacros) { +fn load_cargo(file: &str) -> (CrateGraph, ProcMacroPaths) { load_cargo_with_overrides(file, CfgOverrides::default()) } -fn load_cargo_with_overrides(file: &str, cfg_overrides: CfgOverrides) -> (CrateGraph, ProcMacros) { +fn load_cargo_with_overrides( + file: &str, + cfg_overrides: CfgOverrides, +) -> (CrateGraph, ProcMacroPaths) { let meta = get_test_json_file(file); let cargo_workspace = CargoWorkspace::new(meta); let project_workspace = ProjectWorkspace::Cargo { @@ -34,7 +37,7 @@ fn load_cargo_with_overrides(file: &str, cfg_overrides: CfgOverrides) -> (CrateG to_crate_graph(project_workspace) } -fn load_rust_project(file: &str) -> (CrateGraph, ProcMacros) { +fn load_rust_project(file: &str) -> (CrateGraph, ProcMacroPaths) { let data = get_test_json_file(file); let project = rooted_project_json(data); let sysroot = Ok(get_fake_sysroot()); @@ -92,9 +95,8 @@ fn rooted_project_json(data: ProjectJsonData) -> ProjectJson { ProjectJson::new(base, data) } -fn to_crate_graph(project_workspace: ProjectWorkspace) -> (CrateGraph, ProcMacros) { +fn to_crate_graph(project_workspace: ProjectWorkspace) -> (CrateGraph, ProcMacroPaths) { project_workspace.to_crate_graph( - &mut |_, _| Ok(Vec::new()), &mut { let mut counter = 0; move |_path| { diff --git a/crates/project-model/src/workspace.rs b/crates/project-model/src/workspace.rs index 5766859143..1fd7c68193 100644 --- a/crates/project-model/src/workspace.rs +++ b/crates/project-model/src/workspace.rs @@ -7,7 +7,7 @@ use std::{collections::VecDeque, fmt, fs, process::Command, sync::Arc}; use anyhow::{bail, format_err, Context, Result}; use base_db::{ CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, Edition, Env, - FileId, LangCrateOrigin, ProcMacroLoadResult, ProcMacros, TargetLayoutLoadResult, + FileId, LangCrateOrigin, ProcMacroPaths, TargetLayoutLoadResult, }; use cfg::{CfgDiff, CfgOptions}; use paths::{AbsPath, AbsPathBuf}; @@ -576,16 +576,14 @@ impl ProjectWorkspace { pub fn to_crate_graph( &self, - load_proc_macro: &mut dyn FnMut(&str, &AbsPath) -> ProcMacroLoadResult, load: &mut dyn FnMut(&AbsPath) -> Option, extra_env: &FxHashMap, - ) -> (CrateGraph, ProcMacros) { + ) -> (CrateGraph, ProcMacroPaths) { let _p = profile::span("ProjectWorkspace::to_crate_graph"); let (mut crate_graph, proc_macros) = match self { ProjectWorkspace::Json { project, sysroot, rustc_cfg } => project_json_to_crate_graph( rustc_cfg.clone(), - load_proc_macro, load, project, sysroot.as_ref().ok(), @@ -602,7 +600,6 @@ impl ProjectWorkspace { toolchain: _, target_layout, } => cargo_to_crate_graph( - load_proc_macro, load, rustc.as_ref().ok(), cargo, @@ -679,15 +676,14 @@ impl ProjectWorkspace { fn project_json_to_crate_graph( rustc_cfg: Vec, - load_proc_macro: &mut dyn FnMut(&str, &AbsPath) -> ProcMacroLoadResult, load: &mut dyn FnMut(&AbsPath) -> Option, project: &ProjectJson, sysroot: Option<&Sysroot>, extra_env: &FxHashMap, target_layout: TargetLayoutLoadResult, -) -> (CrateGraph, ProcMacros) { +) -> (CrateGraph, ProcMacroPaths) { let mut crate_graph = CrateGraph::default(); - let mut proc_macros = FxHashMap::<_, _>::default(); + let mut proc_macros = FxHashMap::default(); let sysroot_deps = sysroot.as_ref().map(|sysroot| { sysroot_to_crate_graph( &mut crate_graph, @@ -708,16 +704,15 @@ fn project_json_to_crate_graph( }) .map(|(crate_id, krate, file_id)| { let env = krate.env.clone().into_iter().collect(); - if let Some(it) = krate.proc_macro_dylib_path.clone() { + if let Some(path) = krate.proc_macro_dylib_path.clone() { proc_macros.insert( crate_id, - load_proc_macro( - krate.display_name.as_ref().map(|it| it.canonical_name()).unwrap_or(""), - &it, - ), + Ok(( + krate.display_name.as_ref().map(|it| it.canonical_name().to_owned()), + path, + )), ); } - let target_cfgs = match krate.target.as_deref() { Some(target) => cfg_cache .entry(target) @@ -782,7 +777,6 @@ fn project_json_to_crate_graph( } fn cargo_to_crate_graph( - load_proc_macro: &mut dyn FnMut(&str, &AbsPath) -> ProcMacroLoadResult, load: &mut dyn FnMut(&AbsPath) -> Option, rustc: Option<&(CargoWorkspace, WorkspaceBuildScripts)>, cargo: &CargoWorkspace, @@ -791,7 +785,7 @@ fn cargo_to_crate_graph( override_cfg: &CfgOverrides, build_scripts: &WorkspaceBuildScripts, target_layout: TargetLayoutLoadResult, -) -> (CrateGraph, ProcMacros) { +) -> (CrateGraph, ProcMacroPaths) { let _p = profile::span("cargo_to_crate_graph"); let mut crate_graph = CrateGraph::default(); let mut proc_macros = FxHashMap::default(); @@ -862,7 +856,6 @@ fn cargo_to_crate_graph( &cargo[pkg], build_scripts.get_output(pkg), cfg_options.clone(), - &mut |path| load_proc_macro(&cargo[tgt].name, path), file_id, &cargo[tgt].name, cargo[tgt].is_proc_macro, @@ -938,7 +931,6 @@ fn cargo_to_crate_graph( &mut proc_macros, &mut pkg_to_lib_crate, load, - load_proc_macro, rustc_workspace, cargo, &public_deps, @@ -966,7 +958,7 @@ fn detached_files_to_crate_graph( detached_files: &[AbsPathBuf], sysroot: Option<&Sysroot>, target_layout: TargetLayoutLoadResult, -) -> (CrateGraph, ProcMacros) { +) -> (CrateGraph, ProcMacroPaths) { let _p = profile::span("detached_files_to_crate_graph"); let mut crate_graph = CrateGraph::default(); let (public_deps, _libproc_macro) = match sysroot { @@ -1018,10 +1010,9 @@ fn detached_files_to_crate_graph( fn handle_rustc_crates( crate_graph: &mut CrateGraph, - proc_macros: &mut ProcMacros, + proc_macros: &mut ProcMacroPaths, pkg_to_lib_crate: &mut FxHashMap, load: &mut dyn FnMut(&AbsPath) -> Option, - load_proc_macro: &mut dyn FnMut(&str, &AbsPath) -> ProcMacroLoadResult, rustc_workspace: &CargoWorkspace, cargo: &CargoWorkspace, public_deps: &SysrootPublicDeps, @@ -1084,7 +1075,6 @@ fn handle_rustc_crates( &rustc_workspace[pkg], build_scripts.get_output(pkg), cfg_options.clone(), - &mut |path| load_proc_macro(&rustc_workspace[tgt].name, path), file_id, &rustc_workspace[tgt].name, rustc_workspace[tgt].is_proc_macro, @@ -1146,11 +1136,10 @@ fn handle_rustc_crates( fn add_target_crate_root( crate_graph: &mut CrateGraph, - proc_macros: &mut ProcMacros, + proc_macros: &mut ProcMacroPaths, pkg: &PackageData, build_data: Option<&BuildScriptOutput>, cfg_options: CfgOptions, - load_proc_macro: &mut dyn FnMut(&AbsPath) -> ProcMacroLoadResult, file_id: FileId, cargo_name: &str, is_proc_macro: bool, @@ -1197,11 +1186,11 @@ fn add_target_crate_root( target_layout, ); let proc_macro = match build_data.as_ref().map(|it| &it.proc_macro_dylib_path) { - Some(it) => it.as_deref().map(load_proc_macro), + Some(it) => it.clone().map(Ok), None => Some(Err("crate has not (yet) been built".into())), }; if let Some(proc_macro) = proc_macro { - proc_macros.insert(crate_id, proc_macro); + proc_macros.insert(crate_id, proc_macro.map(|path| (Some(cargo_name.to_owned()), path))); } crate_id diff --git a/crates/rust-analyzer/src/cli/load_cargo.rs b/crates/rust-analyzer/src/cli/load_cargo.rs index 2d15d673ed..f5bc3c12c1 100644 --- a/crates/rust-analyzer/src/cli/load_cargo.rs +++ b/crates/rust-analyzer/src/cli/load_cargo.rs @@ -69,7 +69,7 @@ pub fn load_workspace( Box::new(loader) }; - let proc_macro_client = match &load_config.with_proc_macro_server { + let proc_macro_server = match &load_config.with_proc_macro_server { ProcMacroServerChoice::Sysroot => ws .find_sysroot_proc_macro_srv() .ok_or_else(|| "failed to find sysroot proc-macro server".to_owned()) @@ -83,9 +83,6 @@ pub fn load_workspace( }; let (crate_graph, proc_macros) = ws.to_crate_graph( - &mut |_, path: &AbsPath| { - load_proc_macro(proc_macro_client.as_ref().map_err(|e| &**e), path, &[]) - }, &mut |path: &AbsPath| { let contents = loader.load_sync(path); let path = vfs::VfsPath::from(path.to_path_buf()); @@ -94,6 +91,21 @@ pub fn load_workspace( }, extra_env, ); + let proc_macros = { + let proc_macro_server = match &proc_macro_server { + Ok(it) => Ok(it), + Err(e) => Err(e.as_str()), + }; + proc_macros + .into_iter() + .map(|(crate_id, path)| { + ( + crate_id, + path.and_then(|(_, path)| load_proc_macro(proc_macro_server, &path, &[])), + ) + }) + .collect() + }; let project_folders = ProjectFolders::new(&[ws], &[]); loader.set_config(vfs::loader::Config { @@ -114,7 +126,7 @@ pub fn load_workspace( if load_config.prefill_caches { host.analysis().parallel_prime_caches(1, |_| {})?; } - Ok((host, vfs, proc_macro_client.ok())) + Ok((host, vfs, proc_macro_server.ok())) } fn load_crate_graph( diff --git a/crates/rust-analyzer/src/global_state.rs b/crates/rust-analyzer/src/global_state.rs index aca6c92357..d02714ad1e 100644 --- a/crates/rust-analyzer/src/global_state.rs +++ b/crates/rust-analyzer/src/global_state.rs @@ -59,10 +59,11 @@ pub(crate) struct GlobalState { pub(crate) mem_docs: MemDocs, pub(crate) semantic_tokens_cache: Arc>>, pub(crate) shutdown_requested: bool, - pub(crate) proc_macro_changed: bool, pub(crate) last_reported_status: Option, pub(crate) source_root_config: SourceRootConfig, - pub(crate) proc_macro_clients: Vec>, + + pub(crate) proc_macro_changed: bool, + pub(crate) proc_macro_clients: Arc<[Result]>, pub(crate) flycheck: Arc<[FlycheckHandle]>, pub(crate) flycheck_sender: Sender, @@ -151,10 +152,11 @@ impl GlobalState { mem_docs: MemDocs::default(), semantic_tokens_cache: Arc::new(Default::default()), shutdown_requested: false, - proc_macro_changed: false, last_reported_status: None, source_root_config: SourceRootConfig::default(), - proc_macro_clients: vec![], + + proc_macro_changed: false, + proc_macro_clients: Arc::new([]), flycheck: Arc::new([]), flycheck_sender, diff --git a/crates/rust-analyzer/src/handlers.rs b/crates/rust-analyzer/src/handlers.rs index a56c245dca..8866515bb9 100644 --- a/crates/rust-analyzer/src/handlers.rs +++ b/crates/rust-analyzer/src/handlers.rs @@ -5,6 +5,7 @@ use std::{ io::Write as _, process::{self, Stdio}, + sync::Arc, }; use anyhow::Context; @@ -44,7 +45,7 @@ use crate::{ }; pub(crate) fn handle_workspace_reload(state: &mut GlobalState, _: ()) -> Result<()> { - state.proc_macro_clients.clear(); + state.proc_macro_clients = Arc::new([]); state.proc_macro_changed = false; state.fetch_workspaces_queue.request_op("reload workspace request".to_string()); @@ -53,7 +54,7 @@ pub(crate) fn handle_workspace_reload(state: &mut GlobalState, _: ()) -> Result< } pub(crate) fn handle_proc_macros_reload(state: &mut GlobalState, _: ()) -> Result<()> { - state.proc_macro_clients.clear(); + state.proc_macro_clients = Arc::new([]); state.proc_macro_changed = false; state.fetch_build_data_queue.request_op("reload proc macros request".to_string()); diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs index ae7457e347..8db526e0b7 100644 --- a/crates/rust-analyzer/src/main_loop.rs +++ b/crates/rust-analyzer/src/main_loop.rs @@ -24,7 +24,7 @@ use crate::{ handlers, lsp_ext, lsp_utils::{apply_document_changes, notification_is, Progress}, mem_docs::DocumentData, - reload::{self, BuildDataProgress, ProjectWorkspaceProgress}, + reload::{self, BuildDataProgress, ProcMacroProgress, ProjectWorkspaceProgress}, Result, }; @@ -68,6 +68,7 @@ pub(crate) enum Task { PrimeCaches(PrimeCachesProgress), FetchWorkspace(ProjectWorkspaceProgress), FetchBuildData(BuildDataProgress), + LoadProcMacros(ProcMacroProgress), } #[derive(Debug)] @@ -487,6 +488,21 @@ impl GlobalState { } }; + if let Some(state) = state { + self.report_progress("Building", state, msg, None, None); + } + } + Task::LoadProcMacros(progress) => { + let (state, msg) = match progress { + ProcMacroProgress::Begin => (Some(Progress::Begin), None), + ProcMacroProgress::Report(msg) => (Some(Progress::Report), Some(msg)), + ProcMacroProgress::End(proc_macro_load_result) => { + self.set_proc_macros(proc_macro_load_result); + + (Some(Progress::End), None) + } + }; + if let Some(state) = state { self.report_progress("Loading", state, msg, None, None); } diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs index 65758419de..f8f2cb0932 100644 --- a/crates/rust-analyzer/src/reload.rs +++ b/crates/rust-analyzer/src/reload.rs @@ -12,7 +12,7 @@ //! correct. Instead, we try to provide a best-effort service. Even if the //! project is currently loading and we don't have a full project model, we //! still want to respond to various requests. -use std::{collections::hash_map::Entry, mem, sync::Arc}; +use std::{collections::hash_map::Entry, iter, mem, sync::Arc}; use flycheck::{FlycheckConfig, FlycheckHandle}; use hir::db::DefDatabase; @@ -20,7 +20,7 @@ use ide::Change; use ide_db::{ base_db::{ CrateGraph, Env, ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind, - ProcMacroLoadResult, ProcMacros, SourceRoot, VfsPath, + ProcMacroLoadResult, ProcMacroPaths, ProcMacros, SourceRoot, VfsPath, }, FxHashMap, }; @@ -54,6 +54,13 @@ pub(crate) enum BuildDataProgress { End((Arc>, Vec>)), } +#[derive(Debug)] +pub(crate) enum ProcMacroProgress { + Begin, + Report(String), + End(ProcMacros), +} + impl GlobalState { pub(crate) fn is_quiescent(&self) -> bool { !(self.last_reported_status.is_none() @@ -216,6 +223,59 @@ impl GlobalState { }); } + pub(crate) fn load_proc_macros(&mut self, paths: Vec) { + tracing::info!("will load proc macros"); + let dummy_replacements = self.config.dummy_replacements().clone(); + let proc_macro_clients = self.proc_macro_clients.clone(); + + self.task_pool.handle.spawn_with_sender(move |sender| { + sender.send(Task::LoadProcMacros(ProcMacroProgress::Begin)).unwrap(); + + let dummy_replacements = &dummy_replacements; + let progress = { + let sender = sender.clone(); + &move |msg| { + sender.send(Task::LoadProcMacros(ProcMacroProgress::Report(msg))).unwrap() + } + }; + + let mut res = FxHashMap::default(); + for (client, paths) in proc_macro_clients + .iter() + .map(|res| res.as_ref().map_err(|e| &**e)) + .chain(iter::repeat_with(|| Err("Proc macros are disabled"))) + .zip(paths) + { + res.extend(paths.into_iter().map(move |(crate_id, res)| { + ( + crate_id, + res.and_then(|(crate_name, path)| { + progress(path.display().to_string()); + load_proc_macro( + client, + &path, + crate_name + .as_deref() + .and_then(|crate_name| { + dummy_replacements.get(crate_name).map(|v| &**v) + }) + .unwrap_or_default(), + ) + }), + ) + })); + } + + sender.send(Task::LoadProcMacros(ProcMacroProgress::End(res))).unwrap(); + }); + } + + pub(crate) fn set_proc_macros(&mut self, proc_macros: ProcMacros) { + let mut change = Change::new(); + change.set_proc_macros(proc_macros); + self.analysis_host.apply_change(change); + } + pub(crate) fn switch_workspaces(&mut self, cause: Cause) { let _p = profile::span("GlobalState::switch_workspaces"); tracing::info!(%cause, "will switch workspaces"); @@ -303,8 +363,6 @@ impl GlobalState { ); } - let mut change = Change::new(); - let files_config = self.config.files(); let project_folders = ProjectFolders::new(&self.workspaces, &files_config.exclude); @@ -353,11 +411,10 @@ impl GlobalState { watch, version: self.vfs_config_version, }); + self.source_root_config = project_folders.source_root_config; // Create crate graph from all the workspaces - let (crate_graph, proc_macros) = { - let dummy_replacements = self.config.dummy_replacements(); - + let (crate_graph, proc_macro_paths) = { let vfs = &mut self.vfs.write().0; let loader = &mut self.loader; let mem_docs = &self.mem_docs; @@ -376,34 +433,22 @@ impl GlobalState { }; let mut crate_graph = CrateGraph::default(); - let mut proc_macros = ProcMacros::default(); - for (idx, ws) in self.workspaces.iter().enumerate() { - let proc_macro_client = match self.proc_macro_clients.get(idx) { - Some(res) => res.as_ref().map_err(|e| &**e), - None => Err("Proc macros are disabled"), - }; - let mut load_proc_macro = move |crate_name: &str, path: &AbsPath| { - load_proc_macro( - proc_macro_client, - path, - dummy_replacements.get(crate_name).map(|v| &**v).unwrap_or_default(), - ) - }; - let (other, other_proc_macros) = ws.to_crate_graph( - &mut load_proc_macro, - &mut load, - &self.config.cargo().extra_env, - ); - crate_graph.extend(other, &mut proc_macros, other_proc_macros); + let mut proc_macros = Vec::default(); + for ws in &**self.workspaces { + let (other, mut crate_proc_macros) = + ws.to_crate_graph(&mut load, &self.config.cargo().extra_env); + crate_graph.extend(other, &mut crate_proc_macros); + proc_macros.push(crate_proc_macros); } (crate_graph, proc_macros) }; + let mut change = Change::new(); change.set_crate_graph(crate_graph); - change.set_proc_macros(proc_macros); - - self.source_root_config = project_folders.source_root_config; - self.analysis_host.apply_change(change); + + if same_workspaces { + self.load_proc_macros(proc_macro_paths); + } self.process_changes(); self.reload_flycheck(); tracing::info!("did switch workspaces"); diff --git a/docs/dev/lsp-extensions.md b/docs/dev/lsp-extensions.md index de14220320..11eda94f5b 100644 --- a/docs/dev/lsp-extensions.md +++ b/docs/dev/lsp-extensions.md @@ -1,5 +1,5 @@