diff --git a/Cargo.lock b/Cargo.lock index 41c5d36671..d27ae416f0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -112,6 +112,12 @@ version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" +[[package]] +name = "byteorder" +version = "1.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" + [[package]] name = "camino" version = "1.1.1" @@ -171,9 +177,9 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "chalk-derive" -version = "0.86.0" +version = "0.88.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5499d415d855b5094366a824815341893ad3de0ecb6048c430118bdae6d27402" +checksum = "4df80a3fbc1f0e59f560eeeebca94bf655566a8ad3023c210a109deb6056455a" dependencies = [ "proc-macro2", "quote", @@ -183,9 +189,9 @@ dependencies = [ [[package]] name = "chalk-ir" -version = "0.86.0" +version = "0.88.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3800118c76a48507b0eece3a01f3a429b5c478d203c493096e6040c67ab960e1" +checksum = "f39e5272016916956298cceea5147006f897972c274a768ed4d6e074efe5d3fb" dependencies = [ "bitflags", "chalk-derive", @@ -194,9 +200,9 @@ dependencies = [ [[package]] name = "chalk-recursive" -version = "0.86.0" +version = "0.88.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1baf60628fd73104d1f8562586a52d48f37f1e84435aab2e62674b1fd935b8c8" +checksum = "d9d60b42ad7478d3e027e2f9ea4e99fbbb8fdee0c8c3cf068be269f57e603618" dependencies = [ "chalk-derive", "chalk-ir", @@ -207,9 +213,9 @@ dependencies = [ [[package]] name = "chalk-solve" -version = "0.86.0" +version = "0.88.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e9c3c068f9358786348e58a1b94ef0a5cf90a9810fc1f10fda896f0b5d80185" +checksum = "ab30620ea5b36819525eaab2204f4b8e1842fc7ee36826424a28bef59ae7fecf" dependencies = [ "chalk-derive", "chalk-ir", @@ -510,6 +516,8 @@ dependencies = [ "fst", "hashbrown", "hir-expand", + "hkalbasi-rustc-ap-rustc_abi", + "hkalbasi-rustc-ap-rustc_index", "indexmap", "itertools", "la-arena", @@ -555,6 +563,7 @@ version = "0.0.0" dependencies = [ "arrayvec", "base-db", + "bitflags", "chalk-derive", "chalk-ir", "chalk-recursive", @@ -564,6 +573,7 @@ dependencies = [ "expect-test", "hir-def", "hir-expand", + "hkalbasi-rustc-ap-rustc_index", "itertools", "la-arena", "limit", @@ -581,6 +591,27 @@ dependencies = [ "typed-arena", ] +[[package]] +name = "hkalbasi-rustc-ap-rustc_abi" +version = "0.0.20221221" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "adabaadad9aa7576f97af02241cdf5554d62fb3d51a84cb05d77ba28edd3013f" +dependencies = [ + "bitflags", + "hkalbasi-rustc-ap-rustc_index", + "tracing", +] + +[[package]] +name = "hkalbasi-rustc-ap-rustc_index" +version = "0.0.20221221" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4d3c48474e09afb0f5efbd6f758e05411699301a113c47d454d28ec7059d00e" +dependencies = [ + "arrayvec", + "smallvec", +] + [[package]] name = "home" version = "0.5.4" @@ -631,6 +662,7 @@ dependencies = [ "ide-db", "itertools", "profile", + "smallvec", "sourcegen", "stdx", "syntax", @@ -1750,6 +1782,33 @@ dependencies = [ "tikv-jemalloc-sys", ] +[[package]] +name = "time" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a561bf4617eebd33bca6434b988f39ed798e527f51a1e797d0ee4f61c0a38376" +dependencies = [ + "itoa", + "serde", + "time-core", + "time-macros", +] + +[[package]] +name = "time-core" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e153e1f1acaef8acc537e68b44906d2db6436e2b35ac2c6b42640fff91f00fd" + +[[package]] +name = "time-macros" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d967f99f534ca7e495c575c62638eebc2898a8c84c119b89e250477bc4ba16b2" +dependencies = [ + "time-core", +] + [[package]] name = "tinyvec" version = "1.6.0" @@ -2148,4 +2207,18 @@ dependencies = [ "write-json", "xflags", "xshell", + "zip", +] + +[[package]] +name = "zip" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "537ce7411d25e54e8ae21a7ce0b15840e7bfcff15b51d697ec3266cc76bdf080" +dependencies = [ + "byteorder", + "crc32fast", + "crossbeam-utils", + "flate2", + "time", ] diff --git a/crates/base-db/src/fixture.rs b/crates/base-db/src/fixture.rs index 5b7828a269..6f83ea40e7 100644 --- a/crates/base-db/src/fixture.rs +++ b/crates/base-db/src/fixture.rs @@ -162,6 +162,7 @@ impl ChangeFixture { Ok(Vec::new()), false, origin, + meta.target_data_layout.as_deref().map(Arc::from), ); let prev = crates.insert(crate_name.clone(), crate_id); assert!(prev.is_none()); @@ -197,6 +198,7 @@ impl ChangeFixture { Ok(Vec::new()), false, CrateOrigin::CratesIo { repo: None, name: None }, + None, ); } else { for (from, to, prelude) in crate_deps { @@ -210,6 +212,8 @@ impl ChangeFixture { .unwrap(); } } + let target_layout = + crate_graph.iter().next().and_then(|it| crate_graph[it].target_layout.clone()); if let Some(mini_core) = mini_core { let core_file = file_id; @@ -234,6 +238,7 @@ impl ChangeFixture { Ok(Vec::new()), false, CrateOrigin::Lang(LangCrateOrigin::Core), + target_layout.clone(), ); for krate in all_crates { @@ -271,6 +276,7 @@ impl ChangeFixture { Ok(proc_macro), true, CrateOrigin::CratesIo { repo: None, name: None }, + target_layout, ); for krate in all_crates { @@ -391,6 +397,7 @@ struct FileMeta { edition: Edition, env: Env, introduce_new_source_root: Option, + target_data_layout: Option, } fn parse_crate(crate_str: String) -> (String, CrateOrigin, Option) { @@ -400,9 +407,9 @@ fn parse_crate(crate_str: String) -> (String, CrateOrigin, Option) { Some((version, url)) => { (version, CrateOrigin::CratesIo { repo: Some(url.to_owned()), name: None }) } - _ => panic!("Bad crates.io parameter: {}", data), + _ => panic!("Bad crates.io parameter: {data}"), }, - _ => panic!("Bad string for crate origin: {}", b), + _ => panic!("Bad string for crate origin: {b}"), }; (a.to_owned(), origin, Some(version.to_string())) } else { @@ -432,8 +439,9 @@ impl From for FileMeta { introduce_new_source_root: f.introduce_new_source_root.map(|kind| match &*kind { "local" => SourceRootKind::Local, "library" => SourceRootKind::Library, - invalid => panic!("invalid source root kind '{}'", invalid), + invalid => panic!("invalid source root kind '{invalid}'"), }), + target_data_layout: f.target_data_layout, } } } diff --git a/crates/base-db/src/input.rs b/crates/base-db/src/input.rs index e7f0c4ec29..5fa4a80249 100644 --- a/crates/base-db/src/input.rs +++ b/crates/base-db/src/input.rs @@ -128,7 +128,7 @@ impl fmt::Display for CrateName { impl ops::Deref for CrateName { type Target = str; fn deref(&self) -> &str { - &*self.0 + &self.0 } } @@ -211,7 +211,7 @@ impl fmt::Display for CrateDisplayName { impl ops::Deref for CrateDisplayName { type Target = str; fn deref(&self) -> &str { - &*self.crate_name + &self.crate_name } } @@ -270,6 +270,7 @@ pub struct CrateData { pub display_name: Option, pub cfg_options: CfgOptions, pub potential_cfg_options: CfgOptions, + pub target_layout: Option>, pub env: Env, pub dependencies: Vec, pub proc_macro: ProcMacroLoadResult, @@ -328,6 +329,7 @@ impl CrateGraph { proc_macro: ProcMacroLoadResult, is_proc_macro: bool, origin: CrateOrigin, + target_layout: Option>, ) -> CrateId { let data = CrateData { root_file_id, @@ -340,6 +342,7 @@ impl CrateGraph { proc_macro, dependencies: Vec::new(), origin, + target_layout, is_proc_macro, }; let crate_id = CrateId(self.arena.len() as u32); @@ -615,8 +618,8 @@ impl CyclicDependenciesError { impl fmt::Display for CyclicDependenciesError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let render = |(id, name): &(CrateId, Option)| match name { - Some(it) => format!("{}({:?})", it, id), - None => format!("{:?}", id), + Some(it) => format!("{it}({id:?})"), + None => format!("{id:?}"), }; let path = self.path.iter().rev().map(render).collect::>().join(" -> "); write!( @@ -649,6 +652,7 @@ mod tests { Ok(Vec::new()), false, CrateOrigin::CratesIo { repo: None, name: None }, + None, ); let crate2 = graph.add_crate_root( FileId(2u32), @@ -661,6 +665,7 @@ mod tests { Ok(Vec::new()), false, CrateOrigin::CratesIo { repo: None, name: None }, + None, ); let crate3 = graph.add_crate_root( FileId(3u32), @@ -673,6 +678,7 @@ mod tests { Ok(Vec::new()), false, CrateOrigin::CratesIo { repo: None, name: None }, + None, ); assert!(graph .add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2)) @@ -699,6 +705,7 @@ mod tests { Ok(Vec::new()), false, CrateOrigin::CratesIo { repo: None, name: None }, + None, ); let crate2 = graph.add_crate_root( FileId(2u32), @@ -711,6 +718,7 @@ mod tests { Ok(Vec::new()), false, CrateOrigin::CratesIo { repo: None, name: None }, + None, ); assert!(graph .add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2)) @@ -734,6 +742,7 @@ mod tests { Ok(Vec::new()), false, CrateOrigin::CratesIo { repo: None, name: None }, + None, ); let crate2 = graph.add_crate_root( FileId(2u32), @@ -746,6 +755,7 @@ mod tests { Ok(Vec::new()), false, CrateOrigin::CratesIo { repo: None, name: None }, + None, ); let crate3 = graph.add_crate_root( FileId(3u32), @@ -758,6 +768,7 @@ mod tests { Ok(Vec::new()), false, CrateOrigin::CratesIo { repo: None, name: None }, + None, ); assert!(graph .add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2)) @@ -781,6 +792,7 @@ mod tests { Ok(Vec::new()), false, CrateOrigin::CratesIo { repo: None, name: None }, + None, ); let crate2 = graph.add_crate_root( FileId(2u32), @@ -793,6 +805,7 @@ mod tests { Ok(Vec::new()), false, CrateOrigin::CratesIo { repo: None, name: None }, + None, ); assert!(graph .add_dep( diff --git a/crates/base-db/src/lib.rs b/crates/base-db/src/lib.rs index da11e4ae7b..55a51d3bbb 100644 --- a/crates/base-db/src/lib.rs +++ b/crates/base-db/src/lib.rs @@ -75,9 +75,9 @@ pub trait SourceDatabase: FileLoader + std::fmt::Debug { } fn parse_query(db: &dyn SourceDatabase, file_id: FileId) -> Parse { - let _p = profile::span("parse_query").detail(|| format!("{:?}", file_id)); + let _p = profile::span("parse_query").detail(|| format!("{file_id:?}")); let text = db.file_text(file_id); - SourceFile::parse(&*text) + SourceFile::parse(&text) } /// We don't want to give HIR knowledge of source roots, hence we extract these diff --git a/crates/cfg/src/cfg_expr.rs b/crates/cfg/src/cfg_expr.rs index fd9e31ed3b..5f4eefa836 100644 --- a/crates/cfg/src/cfg_expr.rs +++ b/crates/cfg/src/cfg_expr.rs @@ -44,7 +44,7 @@ impl fmt::Display for CfgAtom { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { CfgAtom::Flag(name) => name.fmt(f), - CfgAtom::KeyValue { key, value } => write!(f, "{} = {:?}", key, value), + CfgAtom::KeyValue { key, value } => write!(f, "{key} = {value:?}"), } } } diff --git a/crates/cfg/src/lib.rs b/crates/cfg/src/lib.rs index d78ef4fb11..30709c968d 100644 --- a/crates/cfg/src/lib.rs +++ b/crates/cfg/src/lib.rs @@ -37,7 +37,7 @@ impl fmt::Debug for CfgOptions { .iter() .map(|atom| match atom { CfgAtom::Flag(it) => it.to_string(), - CfgAtom::KeyValue { key, value } => format!("{}={}", key, value), + CfgAtom::KeyValue { key, value } => format!("{key}={value}"), }) .collect::>(); items.sort(); @@ -175,7 +175,7 @@ impl fmt::Display for InactiveReason { atom.fmt(f)?; } let is_are = if self.enabled.len() == 1 { "is" } else { "are" }; - write!(f, " {} enabled", is_are)?; + write!(f, " {is_are} enabled")?; if !self.disabled.is_empty() { f.write_str(" and ")?; @@ -194,7 +194,7 @@ impl fmt::Display for InactiveReason { atom.fmt(f)?; } let is_are = if self.disabled.len() == 1 { "is" } else { "are" }; - write!(f, " {} disabled", is_are)?; + write!(f, " {is_are} disabled")?; } Ok(()) diff --git a/crates/flycheck/src/lib.rs b/crates/flycheck/src/lib.rs index 8f93dad06e..11f7b068ec 100644 --- a/crates/flycheck/src/lib.rs +++ b/crates/flycheck/src/lib.rs @@ -47,6 +47,7 @@ pub enum FlycheckConfig { features: Vec, extra_args: Vec, extra_env: FxHashMap, + ansi_color_output: bool, }, CustomCommand { command: String, @@ -60,9 +61,9 @@ pub enum FlycheckConfig { impl fmt::Display for FlycheckConfig { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { - FlycheckConfig::CargoCommand { command, .. } => write!(f, "cargo {}", command), + FlycheckConfig::CargoCommand { command, .. } => write!(f, "cargo {command}"), FlycheckConfig::CustomCommand { command, args, .. } => { - write!(f, "{} {}", command, args.join(" ")) + write!(f, "{command} {}", args.join(" ")) } } } @@ -293,15 +294,24 @@ impl FlycheckActor { extra_args, features, extra_env, + ansi_color_output, } => { let mut cmd = Command::new(toolchain::cargo()); cmd.arg(command); cmd.current_dir(&self.root); - cmd.args(&["--workspace", "--message-format=json", "--manifest-path"]) - .arg(self.root.join("Cargo.toml").as_os_str()); + cmd.arg("--workspace"); + + cmd.arg(if *ansi_color_output { + "--message-format=json-diagnostic-rendered-ansi" + } else { + "--message-format=json" + }); + + cmd.arg("--manifest-path"); + cmd.arg(self.root.join("Cargo.toml").as_os_str()); for target in target_triples { - cmd.args(&["--target", target.as_str()]); + cmd.args(["--target", target.as_str()]); } if *all_targets { cmd.arg("--all-targets"); @@ -360,13 +370,20 @@ impl FlycheckActor { } } -struct JodChild(GroupChild); +struct JodGroupChild(GroupChild); + +impl Drop for JodGroupChild { + fn drop(&mut self) { + _ = self.0.kill(); + _ = self.0.wait(); + } +} /// A handle to a cargo process used for fly-checking. struct CargoHandle { /// The handle to the actual cargo process. As we cannot cancel directly from with /// a read syscall dropping and therefore terminating the process is our best option. - child: JodChild, + child: JodGroupChild, thread: jod_thread::JoinHandle>, receiver: Receiver, } @@ -374,7 +391,7 @@ struct CargoHandle { impl CargoHandle { fn spawn(mut command: Command) -> std::io::Result { command.stdout(Stdio::piped()).stderr(Stdio::piped()).stdin(Stdio::null()); - let mut child = command.group_spawn().map(JodChild)?; + let mut child = command.group_spawn().map(JodGroupChild)?; let stdout = child.0.inner().stdout.take().unwrap(); let stderr = child.0.inner().stderr.take().unwrap(); @@ -401,8 +418,7 @@ impl CargoHandle { Ok(()) } else { Err(io::Error::new(io::ErrorKind::Other, format!( - "Cargo watcher failed, the command produced no valid metadata (exit code: {:?}):\n{}", - exit_status, error + "Cargo watcher failed, the command produced no valid metadata (exit code: {exit_status:?}):\n{error}" ))) } } @@ -467,7 +483,7 @@ impl CargoActor { ); match output { Ok(_) => Ok((read_at_least_one_message, error)), - Err(e) => Err(io::Error::new(e.kind(), format!("{:?}: {}", e, error))), + Err(e) => Err(io::Error::new(e.kind(), format!("{e:?}: {error}"))), } } } diff --git a/crates/hir-def/Cargo.toml b/crates/hir-def/Cargo.toml index 22f98ea7cd..698be76656 100644 --- a/crates/hir-def/Cargo.toml +++ b/crates/hir-def/Cargo.toml @@ -33,6 +33,8 @@ base-db = { path = "../base-db", version = "0.0.0" } syntax = { path = "../syntax", version = "0.0.0" } profile = { path = "../profile", version = "0.0.0" } hir-expand = { path = "../hir-expand", version = "0.0.0" } +rustc_abi = { version = "0.0.20221221", package = "hkalbasi-rustc-ap-rustc_abi", default-features = false } +rustc_index = { version = "0.0.20221221", package = "hkalbasi-rustc-ap-rustc_index", default-features = false } mbe = { path = "../mbe", version = "0.0.0" } cfg = { path = "../cfg", version = "0.0.0" } tt = { path = "../tt", version = "0.0.0" } diff --git a/crates/hir-def/src/adt.rs b/crates/hir-def/src/adt.rs index 938db032fb..db3b419488 100644 --- a/crates/hir-def/src/adt.rs +++ b/crates/hir-def/src/adt.rs @@ -1,6 +1,6 @@ //! Defines hir-level representation of structs, enums and unions -use std::{num::NonZeroU32, sync::Arc}; +use std::sync::Arc; use base_db::CrateId; use either::Either; @@ -9,6 +9,7 @@ use hir_expand::{ HirFileId, InFile, }; use la_arena::{Arena, ArenaMap}; +use rustc_abi::{Integer, IntegerType}; use syntax::ast::{self, HasName, HasVisibility}; use tt::{Delimiter, DelimiterKind, Leaf, Subtree, TokenTree}; @@ -18,6 +19,7 @@ use crate::{ db::DefDatabase, intern::Interned, item_tree::{AttrOwner, Field, FieldAstId, Fields, ItemTree, ModItem, RawVisibilityId}, + layout::{Align, ReprFlags, ReprOptions}, nameres::diagnostics::DefDiagnostic, src::HasChildSource, src::HasSource, @@ -34,16 +36,18 @@ use cfg::CfgOptions; pub struct StructData { pub name: Name, pub variant_data: Arc, - pub repr: Option, + pub repr: Option, pub visibility: RawVisibility, + pub rustc_has_incoherent_inherent_impls: bool, } #[derive(Debug, Clone, PartialEq, Eq)] pub struct EnumData { pub name: Name, pub variants: Arena, - pub repr: Option, + pub repr: Option, pub visibility: RawVisibility, + pub rustc_has_incoherent_inherent_impls: bool, } #[derive(Debug, Clone, PartialEq, Eq)] @@ -67,80 +71,91 @@ pub struct FieldData { pub visibility: RawVisibility, } -#[derive(Copy, Debug, Clone, PartialEq, Eq)] -pub enum ReprKind { - C, - BuiltinInt { builtin: Either, is_c: bool }, - Transparent, - Default, -} - -#[derive(Copy, Debug, Clone, PartialEq, Eq)] -pub struct ReprData { - pub kind: ReprKind, - pub packed: bool, - pub align: Option, -} - fn repr_from_value( db: &dyn DefDatabase, krate: CrateId, item_tree: &ItemTree, of: AttrOwner, -) -> Option { +) -> Option { item_tree.attrs(db, krate, of).by_key("repr").tt_values().find_map(parse_repr_tt) } -fn parse_repr_tt(tt: &Subtree) -> Option { +fn parse_repr_tt(tt: &Subtree) -> Option { match tt.delimiter { Some(Delimiter { kind: DelimiterKind::Parenthesis, .. }) => {} _ => return None, } - let mut data = ReprData { kind: ReprKind::Default, packed: false, align: None }; + let mut flags = ReprFlags::empty(); + let mut int = None; + let mut max_align: Option = None; + let mut min_pack: Option = None; let mut tts = tt.token_trees.iter().peekable(); while let Some(tt) = tts.next() { if let TokenTree::Leaf(Leaf::Ident(ident)) = tt { - match &*ident.text { + flags.insert(match &*ident.text { "packed" => { - data.packed = true; - if let Some(TokenTree::Subtree(_)) = tts.peek() { + let pack = if let Some(TokenTree::Subtree(tt)) = tts.peek() { tts.next(); - } + if let Some(TokenTree::Leaf(Leaf::Literal(lit))) = tt.token_trees.first() { + lit.text.parse().unwrap_or_default() + } else { + 0 + } + } else { + 0 + }; + let pack = Align::from_bytes(pack).unwrap(); + min_pack = + Some(if let Some(min_pack) = min_pack { min_pack.min(pack) } else { pack }); + ReprFlags::empty() } "align" => { if let Some(TokenTree::Subtree(tt)) = tts.peek() { tts.next(); if let Some(TokenTree::Leaf(Leaf::Literal(lit))) = tt.token_trees.first() { if let Ok(align) = lit.text.parse() { - data.align = Some(align); + let align = Align::from_bytes(align).ok(); + max_align = max_align.max(align); } } } + ReprFlags::empty() } - "C" => { - if let ReprKind::BuiltinInt { is_c, .. } = &mut data.kind { - *is_c = true; - } else { - data.kind = ReprKind::C; - } - } - "transparent" => data.kind = ReprKind::Transparent, + "C" => ReprFlags::IS_C, + "transparent" => ReprFlags::IS_TRANSPARENT, repr => { - let is_c = matches!(data.kind, ReprKind::C); if let Some(builtin) = BuiltinInt::from_suffix(repr) .map(Either::Left) .or_else(|| BuiltinUint::from_suffix(repr).map(Either::Right)) { - data.kind = ReprKind::BuiltinInt { builtin, is_c }; + int = Some(match builtin { + Either::Left(bi) => match bi { + BuiltinInt::Isize => IntegerType::Pointer(true), + BuiltinInt::I8 => IntegerType::Fixed(Integer::I8, true), + BuiltinInt::I16 => IntegerType::Fixed(Integer::I16, true), + BuiltinInt::I32 => IntegerType::Fixed(Integer::I32, true), + BuiltinInt::I64 => IntegerType::Fixed(Integer::I64, true), + BuiltinInt::I128 => IntegerType::Fixed(Integer::I128, true), + }, + Either::Right(bu) => match bu { + BuiltinUint::Usize => IntegerType::Pointer(false), + BuiltinUint::U8 => IntegerType::Fixed(Integer::I8, false), + BuiltinUint::U16 => IntegerType::Fixed(Integer::I16, false), + BuiltinUint::U32 => IntegerType::Fixed(Integer::I32, false), + BuiltinUint::U64 => IntegerType::Fixed(Integer::I64, false), + BuiltinUint::U128 => IntegerType::Fixed(Integer::I128, false), + }, + }); } + ReprFlags::empty() } - } + }) } } - Some(data) + Some(ReprOptions { int, align: max_align, pack: min_pack, flags, field_shuffle_seed: 0 }) } impl StructData { @@ -157,6 +172,10 @@ impl StructData { let item_tree = loc.id.item_tree(db); let repr = repr_from_value(db, krate, &item_tree, ModItem::from(loc.id.value).into()); let cfg_options = db.crate_graph()[loc.container.krate].cfg_options.clone(); + let rustc_has_incoherent_inherent_impls = item_tree + .attrs(db, loc.container.krate, ModItem::from(loc.id.value).into()) + .by_key("rustc_has_incoherent_inherent_impls") + .exists(); let strukt = &item_tree[loc.id.value]; let (variant_data, diagnostics) = lower_fields( @@ -175,6 +194,7 @@ impl StructData { variant_data: Arc::new(variant_data), repr, visibility: item_tree[strukt.visibility].clone(), + rustc_has_incoherent_inherent_impls, }), diagnostics.into(), ) @@ -194,6 +214,11 @@ impl StructData { let repr = repr_from_value(db, krate, &item_tree, ModItem::from(loc.id.value).into()); let cfg_options = db.crate_graph()[loc.container.krate].cfg_options.clone(); + let rustc_has_incoherent_inherent_impls = item_tree + .attrs(db, loc.container.krate, ModItem::from(loc.id.value).into()) + .by_key("rustc_has_incoherent_inherent_impls") + .exists(); + let union = &item_tree[loc.id.value]; let (variant_data, diagnostics) = lower_fields( db, @@ -211,6 +236,7 @@ impl StructData { variant_data: Arc::new(variant_data), repr, visibility: item_tree[union.visibility].clone(), + rustc_has_incoherent_inherent_impls, }), diagnostics.into(), ) @@ -231,6 +257,10 @@ impl EnumData { let item_tree = loc.id.item_tree(db); let cfg_options = db.crate_graph()[krate].cfg_options.clone(); let repr = repr_from_value(db, krate, &item_tree, ModItem::from(loc.id.value).into()); + let rustc_has_incoherent_inherent_impls = item_tree + .attrs(db, loc.container.krate, ModItem::from(loc.id.value).into()) + .by_key("rustc_has_incoherent_inherent_impls") + .exists(); let enum_ = &item_tree[loc.id.value]; let mut variants = Arena::new(); @@ -271,6 +301,7 @@ impl EnumData { variants, repr, visibility: item_tree[enum_.visibility].clone(), + rustc_has_incoherent_inherent_impls, }), diagnostics.into(), ) @@ -281,10 +312,10 @@ impl EnumData { Some(id) } - pub fn variant_body_type(&self) -> Either { + pub fn variant_body_type(&self) -> IntegerType { match self.repr { - Some(ReprData { kind: ReprKind::BuiltinInt { builtin, .. }, .. }) => builtin, - _ => Either::Left(BuiltinInt::Isize), + Some(ReprOptions { int: Some(builtin), .. }) => builtin, + _ => IntegerType::Pointer(true), } } } diff --git a/crates/hir-def/src/attr.rs b/crates/hir-def/src/attr.rs index 2b39c6f8da..ab5d180e1b 100644 --- a/crates/hir-def/src/attr.rs +++ b/crates/hir-def/src/attr.rs @@ -712,7 +712,7 @@ impl AttrSourceMap { self.source .get(ast_idx) .map(|it| InFile::new(file_id, it)) - .unwrap_or_else(|| panic!("cannot find attr at index {:?}", id)) + .unwrap_or_else(|| panic!("cannot find attr at index {id:?}")) } } diff --git a/crates/hir-def/src/body.rs b/crates/hir-def/src/body.rs index 759f3b8c04..78fbaa9d7d 100644 --- a/crates/hir-def/src/body.rs +++ b/crates/hir-def/src/body.rs @@ -372,7 +372,7 @@ impl Body { /// Retrieves all ident patterns this pattern shares the ident with. pub fn ident_patterns_for<'slf>(&'slf self, pat: &'slf PatId) -> &'slf [PatId] { match self.or_pats.get(pat) { - Some(pats) => &**pats, + Some(pats) => pats, None => std::slice::from_ref(pat), } } diff --git a/crates/hir-def/src/body/lower.rs b/crates/hir-def/src/body/lower.rs index ccc01c3efc..e8da24e3ad 100644 --- a/crates/hir-def/src/body/lower.rs +++ b/crates/hir-def/src/body/lower.rs @@ -371,6 +371,10 @@ impl ExprCollector<'_> { let expr = e.expr().map(|e| self.collect_expr(e)); self.alloc_expr(Expr::Yield { expr }, syntax_ptr) } + ast::Expr::YeetExpr(e) => { + let expr = e.expr().map(|e| self.collect_expr(e)); + self.alloc_expr(Expr::Yeet { expr }, syntax_ptr) + } ast::Expr::RecordExpr(e) => { let path = e.path().and_then(|path| self.expander.parse_path(self.db, path)).map(Box::new); diff --git a/crates/hir-def/src/body/pretty.rs b/crates/hir-def/src/body/pretty.rs index 162d173d52..10b9b26bbe 100644 --- a/crates/hir-def/src/body/pretty.rs +++ b/crates/hir-def/src/body/pretty.rs @@ -32,7 +32,7 @@ pub(super) fn print_body_hir(db: &dyn DefDatabase, body: &Body, owner: DefWithBo Some(name) => name.to_string(), None => "_".to_string(), }; - format!("const {} = ", name) + format!("const {name} = ") } DefWithBodyId::VariantId(it) => { needs_semi = false; @@ -42,7 +42,7 @@ pub(super) fn print_body_hir(db: &dyn DefDatabase, body: &Body, owner: DefWithBo Some(name) => name.to_string(), None => "_".to_string(), }; - format!("{}", name) + format!("{name}") } }; @@ -247,6 +247,15 @@ impl<'a> Printer<'a> { self.print_expr(*expr); } } + Expr::Yeet { expr } => { + w!(self, "do"); + self.whitespace(); + w!(self, "yeet"); + if let Some(expr) = expr { + self.whitespace(); + self.print_expr(*expr); + } + } Expr::RecordLit { path, fields, spread, ellipsis, is_assignee_expr: _ } => { match path { Some(path) => self.print_path(path), diff --git a/crates/hir-def/src/body/scope.rs b/crates/hir-def/src/body/scope.rs index 45f64ebb06..2617d4288a 100644 --- a/crates/hir-def/src/body/scope.rs +++ b/crates/hir-def/src/body/scope.rs @@ -47,7 +47,7 @@ pub struct ScopeData { impl ExprScopes { pub(crate) fn expr_scopes_query(db: &dyn DefDatabase, def: DefWithBodyId) -> Arc { let body = db.body(def); - let mut scopes = ExprScopes::new(&*body); + let mut scopes = ExprScopes::new(&body); scopes.shrink_to_fit(); Arc::new(scopes) } diff --git a/crates/hir-def/src/builtin_attr.rs b/crates/hir-def/src/builtin_attr.rs index 39581b33a8..f7c1e683d0 100644 --- a/crates/hir-def/src/builtin_attr.rs +++ b/crates/hir-def/src/builtin_attr.rs @@ -350,6 +350,7 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[ ), ungated!(rustc_const_unstable, Normal, template!(List: r#"feature = "name""#), DuplicatesOk), ungated!(rustc_const_stable, Normal, template!(List: r#"feature = "name""#), DuplicatesOk), + ungated!(rustc_safe_intrinsic, Normal, template!(List: r#"feature = "name""#), DuplicatesOk), gated!( allow_internal_unstable, Normal, template!(Word, List: "feat1, feat2, ..."), DuplicatesOk, "allow_internal_unstable side-steps feature gating and stability checks", diff --git a/crates/hir-def/src/data.rs b/crates/hir-def/src/data.rs index 9c76969086..e6b05f27a5 100644 --- a/crates/hir-def/src/data.rs +++ b/crates/hir-def/src/data.rs @@ -13,7 +13,9 @@ use crate::{ intern::Interned, item_tree::{self, AssocItem, FnFlags, ItemTree, ItemTreeId, ModItem, Param, TreeId}, nameres::{ - attr_resolution::ResolvedAttr, diagnostics::DefDiagnostic, proc_macro::ProcMacroKind, + attr_resolution::ResolvedAttr, + diagnostics::DefDiagnostic, + proc_macro::{parse_macro_name_and_helper_attrs, ProcMacroKind}, DefMap, }, type_ref::{TraitRef, TypeBound, TypeRef}, @@ -168,6 +170,7 @@ pub struct TypeAliasData { pub type_ref: Option>, pub visibility: RawVisibility, pub is_extern: bool, + pub rustc_has_incoherent_inherent_impls: bool, /// Bounds restricting the type alias itself (eg. `type Ty: Bound;` in a trait or impl). pub bounds: Vec>, } @@ -186,11 +189,17 @@ impl TypeAliasData { item_tree[typ.visibility].clone() }; + let rustc_has_incoherent_inherent_impls = item_tree + .attrs(db, loc.container.module(db).krate(), ModItem::from(loc.id.value).into()) + .by_key("rustc_has_incoherent_inherent_impls") + .exists(); + Arc::new(TypeAliasData { name: typ.name.clone(), type_ref: typ.type_ref.clone(), visibility, is_extern: matches!(loc.container, ItemContainerId::ExternBlockId(_)), + rustc_has_incoherent_inherent_impls, bounds: typ.bounds.to_vec(), }) } @@ -202,6 +211,7 @@ pub struct TraitData { pub items: Vec<(Name, AssocItemId)>, pub is_auto: bool, pub is_unsafe: bool, + pub rustc_has_incoherent_inherent_impls: bool, pub visibility: RawVisibility, /// Whether the trait has `#[rust_skip_array_during_method_dispatch]`. `hir_ty` will ignore /// method calls to this trait's methods when the receiver is an array and the crate edition is @@ -224,18 +234,17 @@ impl TraitData { let item_tree = tree_id.item_tree(db); let tr_def = &item_tree[tree_id.value]; let _cx = stdx::panic_context::enter(format!( - "trait_data_query({:?} -> {:?} -> {:?})", - tr, tr_loc, tr_def + "trait_data_query({tr:?} -> {tr_loc:?} -> {tr_def:?})" )); let name = tr_def.name.clone(); let is_auto = tr_def.is_auto; let is_unsafe = tr_def.is_unsafe; let visibility = item_tree[tr_def.visibility].clone(); - let skip_array_during_method_dispatch = item_tree - .attrs(db, module_id.krate(), ModItem::from(tree_id.value).into()) - .by_key("rustc_skip_array_during_method_dispatch") - .exists(); - + let attrs = item_tree.attrs(db, module_id.krate(), ModItem::from(tree_id.value).into()); + let skip_array_during_method_dispatch = + attrs.by_key("rustc_skip_array_during_method_dispatch").exists(); + let rustc_has_incoherent_inherent_impls = + attrs.by_key("rustc_has_incoherent_inherent_impls").exists(); let (items, attribute_calls, diagnostics) = match &tr_def.items { Some(items) => { let mut collector = AssocItemCollector::new( @@ -258,6 +267,7 @@ impl TraitData { is_unsafe, visibility, skip_array_during_method_dispatch, + rustc_has_incoherent_inherent_impls, }), diagnostics.into(), ) @@ -339,6 +349,10 @@ impl ImplData { pub struct Macro2Data { pub name: Name, pub visibility: RawVisibility, + // It's a bit wasteful as currently this is only for builtin `Default` derive macro, but macro2 + // are rarely used in practice so I think it's okay for now. + /// Derive helpers, if this is a derive rustc_builtin_macro + pub helpers: Option>, } impl Macro2Data { @@ -347,9 +361,18 @@ impl Macro2Data { let item_tree = loc.id.item_tree(db); let makro = &item_tree[loc.id.value]; + let helpers = item_tree + .attrs(db, loc.container.krate(), ModItem::from(loc.id.value).into()) + .by_key("rustc_builtin_macro") + .tt_values() + .next() + .and_then(|attr| parse_macro_name_and_helper_attrs(&attr.token_trees)) + .map(|(_, helpers)| helpers); + Arc::new(Macro2Data { name: makro.name.clone(), visibility: item_tree[makro.visibility].clone(), + helpers, }) } } @@ -519,7 +542,7 @@ impl<'a> AssocItemCollector<'a> { if !attrs.is_cfg_enabled(self.expander.cfg_options()) { self.inactive_diagnostics.push(DefDiagnostic::unconfigured_code( self.module_id.local_id, - InFile::new(self.expander.current_file_id(), item.ast_id(&item_tree).upcast()), + InFile::new(self.expander.current_file_id(), item.ast_id(item_tree).upcast()), attrs.cfg().unwrap(), self.expander.cfg_options().clone(), )); @@ -528,7 +551,7 @@ impl<'a> AssocItemCollector<'a> { 'attrs: for attr in &*attrs { let ast_id = - AstId::new(self.expander.current_file_id(), item.ast_id(&item_tree).upcast()); + AstId::new(self.expander.current_file_id(), item.ast_id(item_tree).upcast()); let ast_id_with_path = AstIdWithPath { path: (*attr.path).clone(), ast_id }; if let Ok(ResolvedAttr::Macro(call_id)) = self.def_map.resolve_attr_macro( @@ -595,10 +618,8 @@ impl<'a> AssocItemCollector<'a> { let ast_id_map = self.db.ast_id_map(self.expander.current_file_id()); let call = ast_id_map.get(call.ast_id).to_node(&root); - let _cx = stdx::panic_context::enter(format!( - "collect_items MacroCall: {}", - call - )); + let _cx = + stdx::panic_context::enter(format!("collect_items MacroCall: {call}")); let res = self.expander.enter_expand::(self.db, call); if let Ok(ExpandResult { value: Some((mark, _)), .. }) = res { diff --git a/crates/hir-def/src/expr.rs b/crates/hir-def/src/expr.rs index 1626465502..7b65694211 100644 --- a/crates/hir-def/src/expr.rs +++ b/crates/hir-def/src/expr.rs @@ -36,6 +36,13 @@ pub(crate) fn dummy_expr_id() -> ExprId { pub type PatId = Idx; +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +pub enum ExprOrPatId { + ExprId(ExprId), + PatId(PatId), +} +stdx::impl_from!(ExprId, PatId for ExprOrPatId); + #[derive(Debug, Clone, Eq, PartialEq)] pub struct Label { pub name: Name, @@ -137,6 +144,9 @@ pub enum Expr { Yield { expr: Option, }, + Yeet { + expr: Option, + }, RecordLit { path: Option>, fields: Box<[RecordLitField]>, @@ -313,7 +323,10 @@ impl Expr { arms.iter().map(|arm| arm.expr).for_each(f); } Expr::Continue { .. } => {} - Expr::Break { expr, .. } | Expr::Return { expr } | Expr::Yield { expr } => { + Expr::Break { expr, .. } + | Expr::Return { expr } + | Expr::Yield { expr } + | Expr::Yeet { expr } => { if let &Some(expr) = expr { f(expr); } diff --git a/crates/hir-def/src/find_path.rs b/crates/hir-def/src/find_path.rs index c70e6fdccd..ddd7ad99e9 100644 --- a/crates/hir-def/src/find_path.rs +++ b/crates/hir-def/src/find_path.rs @@ -107,7 +107,7 @@ fn find_path_inner( } // - if the item is in the prelude, return the name from there - if let Some(value) = find_in_prelude(db, &crate_root.def_map(db), item, from) { + if let value @ Some(_) = find_in_prelude(db, &crate_root.def_map(db), &def_map, item, from) { return value; } @@ -176,7 +176,7 @@ fn find_path_for_module( // - if relative paths are fine, check if we are searching for a parent if prefixed.filter(PrefixKind::is_absolute).is_none() { - if let modpath @ Some(_) = find_self_super(&def_map, module_id, from) { + if let modpath @ Some(_) = find_self_super(def_map, module_id, from) { return modpath; } } @@ -205,7 +205,8 @@ fn find_path_for_module( } } - if let Some(value) = find_in_prelude(db, &root_def_map, ItemInNs::Types(module_id.into()), from) + if let value @ Some(_) = + find_in_prelude(db, &root_def_map, &def_map, ItemInNs::Types(module_id.into()), from) { return value; } @@ -234,23 +235,41 @@ fn find_in_scope( }) } +/// Returns single-segment path (i.e. without any prefix) if `item` is found in prelude and its +/// name doesn't clash in current scope. fn find_in_prelude( db: &dyn DefDatabase, root_def_map: &DefMap, + local_def_map: &DefMap, item: ItemInNs, from: ModuleId, -) -> Option> { - if let Some(prelude_module) = root_def_map.prelude() { - // Preludes in block DefMaps are ignored, only the crate DefMap is searched - let prelude_def_map = prelude_module.def_map(db); - let prelude_scope = &prelude_def_map[prelude_module.local_id].scope; - if let Some((name, vis)) = prelude_scope.name_of(item) { - if vis.is_visible_from(db, from) { - return Some(Some(ModPath::from_segments(PathKind::Plain, Some(name.clone())))); - } - } +) -> Option { + let prelude_module = root_def_map.prelude()?; + // Preludes in block DefMaps are ignored, only the crate DefMap is searched + let prelude_def_map = prelude_module.def_map(db); + let prelude_scope = &prelude_def_map[prelude_module.local_id].scope; + let (name, vis) = prelude_scope.name_of(item)?; + if !vis.is_visible_from(db, from) { + return None; + } + + // Check if the name is in current scope and it points to the same def. + let found_and_same_def = + local_def_map.with_ancestor_maps(db, from.local_id, &mut |def_map, local_id| { + let per_ns = def_map[local_id].scope.get(name); + let same_def = match item { + ItemInNs::Types(it) => per_ns.take_types()? == it, + ItemInNs::Values(it) => per_ns.take_values()? == it, + ItemInNs::Macros(it) => per_ns.take_macros()? == it, + }; + Some(same_def) + }); + + if found_and_same_def.unwrap_or(true) { + Some(ModPath::from_segments(PathKind::Plain, Some(name.clone()))) + } else { + None } - None } fn find_self_super(def_map: &DefMap, item: ModuleId, from: ModuleId) -> Option { @@ -512,7 +531,7 @@ mod tests { fn check_found_path_(ra_fixture: &str, path: &str, prefix_kind: Option) { let (db, pos) = TestDB::with_position(ra_fixture); let module = db.module_at_position(pos); - let parsed_path_file = syntax::SourceFile::parse(&format!("use {};", path)); + let parsed_path_file = syntax::SourceFile::parse(&format!("use {path};")); let ast_path = parsed_path_file.syntax_node().descendants().find_map(syntax::ast::Path::cast).unwrap(); let mod_path = ModPath::from_src(&db, ast_path, &Hygiene::new_unhygienic()).unwrap(); @@ -531,7 +550,7 @@ mod tests { let found_path = find_path_inner(&db, ItemInNs::Types(resolved), module, prefix_kind, false); - assert_eq!(found_path, Some(mod_path), "{:?}", prefix_kind); + assert_eq!(found_path, Some(mod_path), "{prefix_kind:?}"); } fn check_found_path( @@ -808,6 +827,48 @@ pub mod prelude { ); } + #[test] + fn shadowed_prelude() { + check_found_path( + r#" +//- /main.rs crate:main deps:std +struct S; +$0 +//- /std.rs crate:std +pub mod prelude { + pub mod rust_2018 { + pub struct S; + } +} +"#, + "std::prelude::rust_2018::S", + "std::prelude::rust_2018::S", + "std::prelude::rust_2018::S", + "std::prelude::rust_2018::S", + ); + } + + #[test] + fn imported_prelude() { + check_found_path( + r#" +//- /main.rs crate:main deps:std +use S; +$0 +//- /std.rs crate:std +pub mod prelude { + pub mod rust_2018 { + pub struct S; + } +} +"#, + "S", + "S", + "S", + "S", + ); + } + #[test] fn enum_variant_from_prelude() { let code = r#" diff --git a/crates/hir-def/src/generics.rs b/crates/hir-def/src/generics.rs index 469b28c2d9..f74559f5d6 100644 --- a/crates/hir-def/src/generics.rs +++ b/crates/hir-def/src/generics.rs @@ -142,8 +142,8 @@ pub enum WherePredicateTypeTarget { impl GenericParams { /// Iterator of type_or_consts field - pub fn iter<'a>( - &'a self, + pub fn iter( + &self, ) -> impl DoubleEndedIterator, &TypeOrConstParamData)> { self.type_or_consts.iter() } diff --git a/crates/hir-def/src/import_map.rs b/crates/hir-def/src/import_map.rs index 688055e430..1ce191942e 100644 --- a/crates/hir-def/src/import_map.rs +++ b/crates/hir-def/src/import_map.rs @@ -239,7 +239,7 @@ impl fmt::Debug for ImportMap { ItemInNs::Values(_) => "v", ItemInNs::Macros(_) => "m", }; - format!("- {} ({})", info.path, ns) + format!("- {} ({ns})", info.path) }) .collect(); @@ -389,12 +389,12 @@ impl Query { /// Searches dependencies of `krate` for an importable path matching `query`. /// /// This returns a list of items that could be imported from dependencies of `krate`. -pub fn search_dependencies<'a>( - db: &'a dyn DefDatabase, +pub fn search_dependencies( + db: &dyn DefDatabase, krate: CrateId, query: Query, ) -> FxHashSet { - let _p = profile::span("search_dependencies").detail(|| format!("{:?}", query)); + let _p = profile::span("search_dependencies").detail(|| format!("{query:?}")); let graph = db.crate_graph(); let import_maps: Vec<_> = @@ -545,7 +545,7 @@ mod tests { None } })?; - return Some(format!("{}::{}", dependency_imports.path_of(trait_)?, assoc_item_name)); + return Some(format!("{}::{assoc_item_name}", dependency_imports.path_of(trait_)?)); } None } @@ -585,7 +585,7 @@ mod tests { let map = db.import_map(krate); - Some(format!("{}:\n{:?}\n", name, map)) + Some(format!("{name}:\n{map:?}\n")) }) .sorted() .collect::(); diff --git a/crates/hir-def/src/item_scope.rs b/crates/hir-def/src/item_scope.rs index 7721221c44..c7b213b7e9 100644 --- a/crates/hir-def/src/item_scope.rs +++ b/crates/hir-def/src/item_scope.rs @@ -96,7 +96,7 @@ pub(crate) enum BuiltinShadowMode { /// Legacy macros can only be accessed through special methods like `get_legacy_macros`. /// Other methods will only resolve values, types and module scoped macros only. impl ItemScope { - pub fn entries<'a>(&'a self) -> impl Iterator + 'a { + pub fn entries(&self) -> impl Iterator + '_ { // FIXME: shadowing self.types .keys() @@ -159,18 +159,17 @@ impl ItemScope { pub(crate) fn name_of(&self, item: ItemInNs) -> Option<(&Name, Visibility)> { let (def, mut iter) = match item { ItemInNs::Macros(def) => { - return self - .macros - .iter() - .find_map(|(name, &(other_def, vis))| (other_def == def).then(|| (name, vis))); + return self.macros.iter().find_map(|(name, &(other_def, vis))| { + (other_def == def).then_some((name, vis)) + }); } ItemInNs::Types(def) => (def, self.types.iter()), ItemInNs::Values(def) => (def, self.values.iter()), }; - iter.find_map(|(name, &(other_def, vis))| (other_def == def).then(|| (name, vis))) + iter.find_map(|(name, &(other_def, vis))| (other_def == def).then_some((name, vis))) } - pub(crate) fn traits<'a>(&'a self) -> impl Iterator + 'a { + pub(crate) fn traits(&self) -> impl Iterator + '_ { self.types .values() .filter_map(|&(def, _)| match def { @@ -327,7 +326,7 @@ impl ItemScope { changed } - pub(crate) fn resolutions<'a>(&'a self) -> impl Iterator, PerNs)> + 'a { + pub(crate) fn resolutions(&self) -> impl Iterator, PerNs)> + '_ { self.entries().map(|(name, res)| (Some(name.clone()), res)).chain( self.unnamed_trait_imports .iter() diff --git a/crates/hir-def/src/item_tree.rs b/crates/hir-def/src/item_tree.rs index 0aa531eff7..80297f8adf 100644 --- a/crates/hir-def/src/item_tree.rs +++ b/crates/hir-def/src/item_tree.rs @@ -105,7 +105,7 @@ pub struct ItemTree { impl ItemTree { pub(crate) fn file_item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) -> Arc { - let _p = profile::span("file_item_tree_query").detail(|| format!("{:?}", file_id)); + let _p = profile::span("file_item_tree_query").detail(|| format!("{file_id:?}")); let syntax = match db.parse_or_expand(file_id) { Some(node) => node, None => return Default::default(), @@ -132,7 +132,7 @@ impl ItemTree { ctx.lower_macro_stmts(stmts) }, _ => { - panic!("cannot create item tree from {:?} {}", syntax, syntax); + panic!("cannot create item tree from {syntax:?} {syntax}"); }, } }; diff --git a/crates/hir-def/src/layout.rs b/crates/hir-def/src/layout.rs new file mode 100644 index 0000000000..6bb4cd94f8 --- /dev/null +++ b/crates/hir-def/src/layout.rs @@ -0,0 +1,96 @@ +//! Definitions needed for computing data layout of types. + +use std::cmp; + +use la_arena::{Idx, RawIdx}; +pub use rustc_abi::{ + Abi, AbiAndPrefAlign, AddressSpace, Align, Endian, FieldsShape, Integer, IntegerType, + LayoutCalculator, Niche, Primitive, ReprFlags, ReprOptions, Scalar, Size, StructKind, + TargetDataLayout, TargetDataLayoutErrors, WrappingRange, +}; + +use crate::LocalEnumVariantId; + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct RustcEnumVariantIdx(pub LocalEnumVariantId); + +impl rustc_index::vec::Idx for RustcEnumVariantIdx { + fn new(idx: usize) -> Self { + RustcEnumVariantIdx(Idx::from_raw(RawIdx::from(idx as u32))) + } + + fn index(self) -> usize { + u32::from(self.0.into_raw()) as usize + } +} + +pub type Layout = rustc_abi::LayoutS; +pub type TagEncoding = rustc_abi::TagEncoding; +pub type Variants = rustc_abi::Variants; + +pub trait IntegerExt { + fn repr_discr( + dl: &TargetDataLayout, + repr: &ReprOptions, + min: i128, + max: i128, + ) -> Result<(Integer, bool), LayoutError>; +} + +impl IntegerExt for Integer { + /// Finds the appropriate Integer type and signedness for the given + /// signed discriminant range and `#[repr]` attribute. + /// N.B.: `u128` values above `i128::MAX` will be treated as signed, but + /// that shouldn't affect anything, other than maybe debuginfo. + fn repr_discr( + dl: &TargetDataLayout, + repr: &ReprOptions, + min: i128, + max: i128, + ) -> Result<(Integer, bool), LayoutError> { + // Theoretically, negative values could be larger in unsigned representation + // than the unsigned representation of the signed minimum. However, if there + // are any negative values, the only valid unsigned representation is u128 + // which can fit all i128 values, so the result remains unaffected. + let unsigned_fit = Integer::fit_unsigned(cmp::max(min as u128, max as u128)); + let signed_fit = cmp::max(Integer::fit_signed(min), Integer::fit_signed(max)); + + if let Some(ity) = repr.int { + let discr = Integer::from_attr(dl, ity); + let fit = if ity.is_signed() { signed_fit } else { unsigned_fit }; + if discr < fit { + return Err(LayoutError::UserError( + "Integer::repr_discr: `#[repr]` hint too small for \ + discriminant range of enum " + .to_string(), + )); + } + return Ok((discr, ity.is_signed())); + } + + let at_least = if repr.c() { + // This is usually I32, however it can be different on some platforms, + // notably hexagon and arm-none/thumb-none + dl.c_enum_min_size + } else { + // repr(Rust) enums try to be as small as possible + Integer::I8 + }; + + // If there are no negative values, we can use the unsigned fit. + Ok(if min >= 0 { + (cmp::max(unsigned_fit, at_least), false) + } else { + (cmp::max(signed_fit, at_least), true) + }) + } +} + +#[derive(Debug, PartialEq, Eq, Clone)] +pub enum LayoutError { + UserError(String), + SizeOverflow, + HasPlaceholder, + NotImplemented, + Unknown, +} diff --git a/crates/hir-def/src/lib.rs b/crates/hir-def/src/lib.rs index 5c7aa72349..8267ef09cb 100644 --- a/crates/hir-def/src/lib.rs +++ b/crates/hir-def/src/lib.rs @@ -34,6 +34,7 @@ pub mod adt; pub mod data; pub mod generics; pub mod lang_item; +pub mod layout; pub mod expr; pub mod body; diff --git a/crates/hir-def/src/macro_expansion_tests.rs b/crates/hir-def/src/macro_expansion_tests.rs index 81b9c5c4bf..79c85d1183 100644 --- a/crates/hir-def/src/macro_expansion_tests.rs +++ b/crates/hir-def/src/macro_expansion_tests.rs @@ -170,7 +170,7 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream } let pp = pretty_print_macro_expansion( parse.syntax_node(), - show_token_ids.then(|| &*token_map), + show_token_ids.then_some(&*token_map), ); let indent = IndentLevel::from_node(call.syntax()); let pp = reindent(indent, pp); @@ -179,7 +179,7 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream if tree { let tree = format!("{:#?}", parse.syntax_node()) .split_inclusive('\n') - .map(|line| format!("// {}", line)) + .map(|line| format!("// {line}")) .collect::(); format_to!(expn_text, "\n{}", tree) } diff --git a/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs b/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs index c04cd16519..bb45266725 100644 --- a/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs +++ b/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs @@ -163,7 +163,8 @@ macro_rules! compile_error { } // This expands to nothing (since it's in item position), but emits an error. -compile_error!("error!"); +compile_error!("error, with an escaped quote: \""); +compile_error!(r"this is a raw string"); "#, expect![[r##" #[rustc_builtin_macro] @@ -172,7 +173,8 @@ macro_rules! compile_error { ($msg:expr,) => ({ /* compiler built-in */ }) } -/* error: error! */ +/* error: error, with an escaped quote: " */ +/* error: this is a raw string */ "##]], ); } diff --git a/crates/hir-def/src/macro_expansion_tests/mbe.rs b/crates/hir-def/src/macro_expansion_tests/mbe.rs index 457e43925c..2d5f2a692e 100644 --- a/crates/hir-def/src/macro_expansion_tests/mbe.rs +++ b/crates/hir-def/src/macro_expansion_tests/mbe.rs @@ -1630,3 +1630,48 @@ const _: i32 = -0--1--2; "#]], ); } + +#[test] +fn test_punct_without_space() { + // Puncts are "glued" greedily. + check( + r#" +macro_rules! foo { + (: : :) => { "1 1 1" }; + (: ::) => { "1 2" }; + (:: :) => { "2 1" }; + + (: : : :) => { "1 1 1 1" }; + (:: : :) => { "2 1 1" }; + (: :: :) => { "1 2 1" }; + (: : ::) => { "1 1 2" }; + (:: ::) => { "2 2" }; +} + +fn test() { + foo!(:::); + foo!(: :::); + foo!(::::); +} +"#, + expect![[r#" +macro_rules! foo { + (: : :) => { "1 1 1" }; + (: ::) => { "1 2" }; + (:: :) => { "2 1" }; + + (: : : :) => { "1 1 1 1" }; + (:: : :) => { "2 1 1" }; + (: :: :) => { "1 2 1" }; + (: : ::) => { "1 1 2" }; + (:: ::) => { "2 2" }; +} + +fn test() { + "2 1"; + "1 2 1"; + "2 2"; +} +"#]], + ); +} diff --git a/crates/hir-def/src/macro_expansion_tests/mbe/matching.rs b/crates/hir-def/src/macro_expansion_tests/mbe/matching.rs index fc90c6e9f3..26f16542cb 100644 --- a/crates/hir-def/src/macro_expansion_tests/mbe/matching.rs +++ b/crates/hir-def/src/macro_expansion_tests/mbe/matching.rs @@ -136,3 +136,52 @@ macro_rules! m { ($($i:ident)? $vis:vis) => () } "#]], ) } + +// For this test and the one below, see rust-lang/rust#86730. +#[test] +fn expr_dont_match_let_expr() { + check( + r#" +macro_rules! foo { + ($e:expr) => { $e } +} + +fn test() { + foo!(let a = 3); +} +"#, + expect![[r#" +macro_rules! foo { + ($e:expr) => { $e } +} + +fn test() { + /* error: no rule matches input tokens */missing; +} +"#]], + ); +} + +#[test] +fn expr_dont_match_inline_const() { + check( + r#" +macro_rules! foo { + ($e:expr) => { $e } +} + +fn test() { + foo!(const { 3 }); +} +"#, + expect![[r#" +macro_rules! foo { + ($e:expr) => { $e } +} + +fn test() { + /* error: no rule matches input tokens */missing; +} +"#]], + ); +} diff --git a/crates/hir-def/src/nameres.rs b/crates/hir-def/src/nameres.rs index 9b4ce9f97c..f42b0079d7 100644 --- a/crates/hir-def/src/nameres.rs +++ b/crates/hir-def/src/nameres.rs @@ -457,7 +457,7 @@ impl DefMap { for (name, child) in map.modules[module].children.iter().sorted_by(|a, b| Ord::cmp(&a.0, &b.0)) { - let path = format!("{}::{}", path, name); + let path = format!("{path}::{name}"); buf.push('\n'); go(buf, map, &path, *child); } diff --git a/crates/hir-def/src/nameres/collector.rs b/crates/hir-def/src/nameres/collector.rs index b0dd01f9db..160203b778 100644 --- a/crates/hir-def/src/nameres/collector.rs +++ b/crates/hir-def/src/nameres/collector.rs @@ -40,7 +40,7 @@ use crate::{ diagnostics::DefDiagnostic, mod_resolution::ModDir, path_resolution::ReachedFixedPoint, - proc_macro::{ProcMacroDef, ProcMacroKind}, + proc_macro::{parse_macro_name_and_helper_attrs, ProcMacroDef, ProcMacroKind}, BuiltinShadowMode, DefMap, ModuleData, ModuleOrigin, ResolveMode, }, path::{ImportAlias, ModPath, PathKind}, @@ -67,7 +67,7 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, mut def_map: DefMap, tree_id: T let dep_def_map = db.crate_def_map(dep.crate_id); let dep_root = dep_def_map.module_id(dep_def_map.root); - deps.insert(dep.as_name(), dep_root.into()); + deps.insert(dep.as_name(), dep_root); if dep.is_prelude() && !tree_id.is_block() { def_map.extern_prelude.insert(dep.as_name(), dep_root); @@ -1017,7 +1017,7 @@ impl DefCollector<'_> { None => true, Some(old_vis) => { let max_vis = old_vis.max(vis, &self.def_map).unwrap_or_else(|| { - panic!("`Tr as _` imports with unrelated visibilities {:?} and {:?} (trait {:?})", old_vis, vis, tr); + panic!("`Tr as _` imports with unrelated visibilities {old_vis:?} and {vis:?} (trait {tr:?})"); }); if max_vis == old_vis { @@ -1094,7 +1094,7 @@ impl DefCollector<'_> { ast_id, *expand_to, self.def_map.krate, - &resolver_def_id, + resolver_def_id, &mut |_err| (), ); if let Ok(Ok(call_id)) = call_id { @@ -1110,7 +1110,7 @@ impl DefCollector<'_> { *derive_attr, *derive_pos as u32, self.def_map.krate, - &resolver, + resolver, ); if let Ok((macro_id, def_id, call_id)) = id { @@ -1345,7 +1345,7 @@ impl DefCollector<'_> { // Missing proc macros are non-fatal, so they are handled specially. DefDiagnostic::unresolved_proc_macro(module_id, loc.kind.clone(), loc.def.krate) } - _ => DefDiagnostic::macro_error(module_id, loc.kind.clone(), err.to_string()), + _ => DefDiagnostic::macro_error(module_id, loc.kind, err.to_string()), }; self.def_map.diagnostics.push(diag); @@ -2005,6 +2005,7 @@ impl ModCollector<'_, '_> { let ast_id = InFile::new(self.file_id(), mac.ast_id.upcast()); // Case 1: builtin macros + let mut helpers_opt = None; let attrs = self.item_tree.attrs(self.def_collector.db, krate, ModItem::from(id).into()); let expander = if attrs.by_key("rustc_builtin_macro").exists() { if let Some(expander) = find_builtin_macro(&mac.name) { @@ -2013,6 +2014,25 @@ impl ModCollector<'_, '_> { Either::Right(it) => MacroExpander::BuiltInEager(it), } } else if let Some(expander) = find_builtin_derive(&mac.name) { + if let Some(attr) = attrs.by_key("rustc_builtin_macro").tt_values().next() { + // NOTE: The item *may* have both `#[rustc_builtin_macro]` and `#[proc_macro_derive]`, + // in which case rustc ignores the helper attributes from the latter, but it + // "doesn't make sense in practice" (see rust-lang/rust#87027). + if let Some((name, helpers)) = + parse_macro_name_and_helper_attrs(&attr.token_trees) + { + // NOTE: rustc overrides the name if the macro name if it's different from the + // macro name, but we assume it isn't as there's no such case yet. FIXME if + // the following assertion fails. + stdx::always!( + name == mac.name, + "built-in macro {} has #[rustc_builtin_macro] which declares different name {}", + mac.name, + name + ); + helpers_opt = Some(helpers); + } + } MacroExpander::BuiltInDerive(expander) } else if let Some(expander) = find_builtin_attr(&mac.name) { MacroExpander::BuiltInAttr(expander) @@ -2037,6 +2057,12 @@ impl ModCollector<'_, '_> { macro_id, &self.item_tree[mac.visibility], ); + if let Some(helpers) = helpers_opt { + self.def_collector + .def_map + .exported_derives + .insert(macro_id_to_def_id(self.def_collector.db, macro_id.into()), helpers); + } } fn collect_macro_call(&mut self, mac: &MacroCall, container: ItemContainerId) { @@ -2059,7 +2085,7 @@ impl ModCollector<'_, '_> { .scope .get_legacy_macro(name) .and_then(|it| it.last()) - .map(|&it| macro_id_to_def_id(self.def_collector.db, it.into())) + .map(|&it| macro_id_to_def_id(self.def_collector.db, it)) }, ) }) diff --git a/crates/hir-def/src/nameres/mod_resolution.rs b/crates/hir-def/src/nameres/mod_resolution.rs index ca7bcc814e..4c263846d2 100644 --- a/crates/hir-def/src/nameres/mod_resolution.rs +++ b/crates/hir-def/src/nameres/mod_resolution.rs @@ -34,7 +34,7 @@ impl ModDir { let path = match attr_path.map(SmolStr::as_str) { None => { let mut path = self.dir_path.clone(); - path.push(&name.to_smol_str()); + path.push(&name.unescaped().to_smol_str()); path } Some(attr_path) => { @@ -74,12 +74,12 @@ impl ModDir { candidate_files.push(self.dir_path.join_attr(attr_path, self.root_non_dir_owner)) } None if file_id.is_include_macro(db.upcast()) => { - candidate_files.push(format!("{}.rs", name)); - candidate_files.push(format!("{}/mod.rs", name)); + candidate_files.push(format!("{name}.rs")); + candidate_files.push(format!("{name}/mod.rs")); } None => { - candidate_files.push(format!("{}{}.rs", self.dir_path.0, name)); - candidate_files.push(format!("{}{}/mod.rs", self.dir_path.0, name)); + candidate_files.push(format!("{}{name}.rs", self.dir_path.0)); + candidate_files.push(format!("{}{name}/mod.rs", self.dir_path.0)); } }; @@ -91,7 +91,7 @@ impl ModDir { let (dir_path, root_non_dir_owner) = if is_mod_rs || attr_path.is_some() { (DirPath::empty(), false) } else { - (DirPath::new(format!("{}/", name)), true) + (DirPath::new(format!("{name}/")), true) }; if let Some(mod_dir) = self.child(dir_path, root_non_dir_owner) { return Ok((file_id, is_mod_rs, mod_dir)); @@ -156,7 +156,7 @@ impl DirPath { } else { attr }; - let res = format!("{}{}", base, attr); + let res = format!("{base}{attr}"); res } } diff --git a/crates/hir-def/src/nameres/path_resolution.rs b/crates/hir-def/src/nameres/path_resolution.rs index 20d39ec6cb..1d9d5cccde 100644 --- a/crates/hir-def/src/nameres/path_resolution.rs +++ b/crates/hir-def/src/nameres/path_resolution.rs @@ -170,8 +170,8 @@ impl DefMap { ) -> ResolvePathResult { let graph = db.crate_graph(); let _cx = stdx::panic_context::enter(format!( - "DefMap {:?} crate_name={:?} block={:?} path={}", - self.krate, graph[self.krate].display_name, self.block, path + "DefMap {:?} crate_name={:?} block={:?} path={path}", + self.krate, graph[self.krate].display_name, self.block )); let mut segments = path.segments().iter().enumerate(); @@ -390,7 +390,7 @@ impl DefMap { .get_legacy_macro(name) // FIXME: shadowing .and_then(|it| it.last()) - .map_or_else(PerNs::none, |&m| PerNs::macros(m.into(), Visibility::Public)); + .map_or_else(PerNs::none, |&m| PerNs::macros(m, Visibility::Public)); let from_scope = self[module].scope.get(name); let from_builtin = match self.block { Some(_) => { diff --git a/crates/hir-def/src/nameres/proc_macro.rs b/crates/hir-def/src/nameres/proc_macro.rs index 52b79cd0fd..06b23392cf 100644 --- a/crates/hir-def/src/nameres/proc_macro.rs +++ b/crates/hir-def/src/nameres/proc_macro.rs @@ -37,45 +37,53 @@ impl Attrs { Some(ProcMacroDef { name: func_name.clone(), kind: ProcMacroKind::Attr }) } else if self.by_key("proc_macro_derive").exists() { let derive = self.by_key("proc_macro_derive").tt_values().next()?; + let def = parse_macro_name_and_helper_attrs(&derive.token_trees) + .map(|(name, helpers)| ProcMacroDef { name, kind: ProcMacroKind::CustomDerive { helpers } }); - match &*derive.token_trees { - // `#[proc_macro_derive(Trait)]` - [TokenTree::Leaf(Leaf::Ident(trait_name))] => Some(ProcMacroDef { - name: trait_name.as_name(), - kind: ProcMacroKind::CustomDerive { helpers: Box::new([]) }, - }), - - // `#[proc_macro_derive(Trait, attributes(helper1, helper2, ...))]` - [ - TokenTree::Leaf(Leaf::Ident(trait_name)), - TokenTree::Leaf(Leaf::Punct(comma)), - TokenTree::Leaf(Leaf::Ident(attributes)), - TokenTree::Subtree(helpers) - ] if comma.char == ',' && attributes.text == "attributes" => - { - let helpers = helpers.token_trees.iter() - .filter(|tt| !matches!(tt, TokenTree::Leaf(Leaf::Punct(comma)) if comma.char == ',')) - .map(|tt| { - match tt { - TokenTree::Leaf(Leaf::Ident(helper)) => Some(helper.as_name()), - _ => None - } - }) - .collect::>>()?; - - Some(ProcMacroDef { - name: trait_name.as_name(), - kind: ProcMacroKind::CustomDerive { helpers }, - }) - } - - _ => { - tracing::trace!("malformed `#[proc_macro_derive]`: {}", derive); - None - } + if def.is_none() { + tracing::trace!("malformed `#[proc_macro_derive]`: {}", derive); } + + def } else { None } } } + +// This fn is intended for `#[proc_macro_derive(..)]` and `#[rustc_builtin_macro(..)]`, which have +// the same strucuture. +#[rustfmt::skip] +pub(crate) fn parse_macro_name_and_helper_attrs(tt: &[TokenTree]) -> Option<(Name, Box<[Name]>)> { + match tt { + // `#[proc_macro_derive(Trait)]` + // `#[rustc_builtin_macro(Trait)]` + [TokenTree::Leaf(Leaf::Ident(trait_name))] => Some((trait_name.as_name(), Box::new([]))), + + // `#[proc_macro_derive(Trait, attributes(helper1, helper2, ...))]` + // `#[rustc_builtin_macro(Trait, attributes(helper1, helper2, ...))]` + [ + TokenTree::Leaf(Leaf::Ident(trait_name)), + TokenTree::Leaf(Leaf::Punct(comma)), + TokenTree::Leaf(Leaf::Ident(attributes)), + TokenTree::Subtree(helpers) + ] if comma.char == ',' && attributes.text == "attributes" => + { + let helpers = helpers + .token_trees + .iter() + .filter( + |tt| !matches!(tt, TokenTree::Leaf(Leaf::Punct(comma)) if comma.char == ','), + ) + .map(|tt| match tt { + TokenTree::Leaf(Leaf::Ident(helper)) => Some(helper.as_name()), + _ => None, + }) + .collect::>>()?; + + Some((trait_name.as_name(), helpers)) + } + + _ => None, + } +} diff --git a/crates/hir-def/src/nameres/tests/incremental.rs b/crates/hir-def/src/nameres/tests/incremental.rs index 2e8cb3621f..f5190b76db 100644 --- a/crates/hir-def/src/nameres/tests/incremental.rs +++ b/crates/hir-def/src/nameres/tests/incremental.rs @@ -13,7 +13,7 @@ fn check_def_map_is_not_recomputed(ra_fixture_initial: &str, ra_fixture_change: let events = db.log_executed(|| { db.crate_def_map(krate); }); - assert!(format!("{:?}", events).contains("crate_def_map"), "{:#?}", events) + assert!(format!("{events:?}").contains("crate_def_map"), "{events:#?}") } db.set_file_text(pos.file_id, Arc::new(ra_fixture_change.to_string())); @@ -21,7 +21,7 @@ fn check_def_map_is_not_recomputed(ra_fixture_initial: &str, ra_fixture_change: let events = db.log_executed(|| { db.crate_def_map(krate); }); - assert!(!format!("{:?}", events).contains("crate_def_map"), "{:#?}", events) + assert!(!format!("{events:?}").contains("crate_def_map"), "{events:#?}") } } @@ -94,7 +94,7 @@ fn typing_inside_a_macro_should_not_invalidate_def_map() { let (_, module_data) = crate_def_map.modules.iter().last().unwrap(); assert_eq!(module_data.scope.resolutions().count(), 1); }); - assert!(format!("{:?}", events).contains("crate_def_map"), "{:#?}", events) + assert!(format!("{events:?}").contains("crate_def_map"), "{events:#?}") } db.set_file_text(pos.file_id, Arc::new("m!(Y);".to_string())); @@ -104,7 +104,7 @@ fn typing_inside_a_macro_should_not_invalidate_def_map() { let (_, module_data) = crate_def_map.modules.iter().last().unwrap(); assert_eq!(module_data.scope.resolutions().count(), 1); }); - assert!(!format!("{:?}", events).contains("crate_def_map"), "{:#?}", events) + assert!(!format!("{events:?}").contains("crate_def_map"), "{events:#?}") } } diff --git a/crates/hir-def/src/nameres/tests/macros.rs b/crates/hir-def/src/nameres/tests/macros.rs index 3ece1379ad..fe0ad4f386 100644 --- a/crates/hir-def/src/nameres/tests/macros.rs +++ b/crates/hir-def/src/nameres/tests/macros.rs @@ -822,6 +822,28 @@ fn derive() {} ); } +#[test] +fn resolves_derive_helper_rustc_builtin_macro() { + cov_mark::check!(resolved_derive_helper); + // This is NOT the correct usage of `default` helper attribute, but we don't resolve helper + // attributes on non mod items in hir nameres. + check( + r#" +//- minicore: derive, default +#[derive(Default)] +#[default] +enum E { + A, + B, +} +"#, + expect![[r#" + crate + E: t + "#]], + ); +} + #[test] fn unresolved_attr_with_cfg_attr_hang() { // Another regression test for https://github.com/rust-lang/rust-analyzer/issues/8905 diff --git a/crates/hir-def/src/nameres/tests/mod_resolution.rs b/crates/hir-def/src/nameres/tests/mod_resolution.rs index c575bf7cac..a019312884 100644 --- a/crates/hir-def/src/nameres/tests/mod_resolution.rs +++ b/crates/hir-def/src/nameres/tests/mod_resolution.rs @@ -156,6 +156,43 @@ pub struct Baz; ); } +#[test] +fn module_resolution_works_for_inline_raw_modules() { + check( + r#" +//- /lib.rs +mod r#async { + pub mod a; + pub mod r#async; +} +use self::r#async::a::Foo; +use self::r#async::r#async::Bar; + +//- /async/a.rs +pub struct Foo; + +//- /async/async.rs +pub struct Bar; +"#, + expect![[r#" + crate + Bar: t v + Foo: t v + r#async: t + + crate::r#async + a: t + r#async: t + + crate::r#async::a + Foo: t v + + crate::r#async::r#async + Bar: t v + "#]], + ); +} + #[test] fn module_resolution_decl_path() { check( diff --git a/crates/hir-def/src/pretty.rs b/crates/hir-def/src/pretty.rs index 933970d10e..befd0c5ffa 100644 --- a/crates/hir-def/src/pretty.rs +++ b/crates/hir-def/src/pretty.rs @@ -92,7 +92,7 @@ pub(crate) fn print_generic_args(generics: &GenericArgs, buf: &mut dyn Write) -> pub(crate) fn print_generic_arg(arg: &GenericArg, buf: &mut dyn Write) -> fmt::Result { match arg { GenericArg::Type(ty) => print_type_ref(ty, buf), - GenericArg::Const(c) => write!(buf, "{}", c), + GenericArg::Const(c) => write!(buf, "{c}"), GenericArg::Lifetime(lt) => write!(buf, "{}", lt.name), } } @@ -118,7 +118,7 @@ pub(crate) fn print_type_ref(type_ref: &TypeRef, buf: &mut dyn Write) -> fmt::Re Mutability::Shared => "*const", Mutability::Mut => "*mut", }; - write!(buf, "{} ", mtbl)?; + write!(buf, "{mtbl} ")?; print_type_ref(pointee, buf)?; } TypeRef::Reference(pointee, lt, mtbl) => { @@ -130,13 +130,13 @@ pub(crate) fn print_type_ref(type_ref: &TypeRef, buf: &mut dyn Write) -> fmt::Re if let Some(lt) = lt { write!(buf, "{} ", lt.name)?; } - write!(buf, "{}", mtbl)?; + write!(buf, "{mtbl}")?; print_type_ref(pointee, buf)?; } TypeRef::Array(elem, len) => { write!(buf, "[")?; print_type_ref(elem, buf)?; - write!(buf, "; {}]", len)?; + write!(buf, "; {len}]")?; } TypeRef::Slice(elem) => { write!(buf, "[")?; diff --git a/crates/hir-def/src/resolver.rs b/crates/hir-def/src/resolver.rs index 070f683713..1ef7f9577f 100644 --- a/crates/hir-def/src/resolver.rs +++ b/crates/hir-def/src/resolver.rs @@ -381,7 +381,7 @@ impl Resolver { }); def_map[module_id].scope.legacy_macros().for_each(|(name, macs)| { macs.iter().for_each(|&mac| { - res.add(name, ScopeDef::ModuleDef(ModuleDefId::MacroId(MacroId::from(mac)))); + res.add(name, ScopeDef::ModuleDef(ModuleDefId::MacroId(mac))); }) }); def_map.extern_prelude().for_each(|(name, &def)| { @@ -517,10 +517,7 @@ impl Scope { }); m.def_map[m.module_id].scope.legacy_macros().for_each(|(name, macs)| { macs.iter().for_each(|&mac| { - acc.add( - name, - ScopeDef::ModuleDef(ModuleDefId::MacroId(MacroId::from(mac))), - ); + acc.add(name, ScopeDef::ModuleDef(ModuleDefId::MacroId(mac))); }) }); } diff --git a/crates/hir-expand/src/builtin_attr_macro.rs b/crates/hir-expand/src/builtin_attr_macro.rs index 0c886ac4da..58d192f9fe 100644 --- a/crates/hir-expand/src/builtin_attr_macro.rs +++ b/crates/hir-expand/src/builtin_attr_macro.rs @@ -115,7 +115,8 @@ pub fn pseudo_derive_attr_expansion( }; let mut token_trees = Vec::new(); - for tt in (&args.token_trees) + for tt in args + .token_trees .split(|tt| matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: ',', .. })))) { token_trees.push(mk_leaf('#')); diff --git a/crates/hir-expand/src/builtin_fn_macro.rs b/crates/hir-expand/src/builtin_fn_macro.rs index 7b19518e25..5522bdf3b3 100644 --- a/crates/hir-expand/src/builtin_fn_macro.rs +++ b/crates/hir-expand/src/builtin_fn_macro.rs @@ -379,15 +379,10 @@ fn compile_error_expand( tt: &tt::Subtree, ) -> ExpandResult { let err = match &*tt.token_trees { - [tt::TokenTree::Leaf(tt::Leaf::Literal(it))] => { - let text = it.text.as_str(); - if text.starts_with('"') && text.ends_with('"') { - // FIXME: does not handle raw strings - ExpandError::Other(text[1..text.len() - 1].into()) - } else { - ExpandError::Other("`compile_error!` argument must be a string".into()) - } - } + [tt::TokenTree::Leaf(tt::Leaf::Literal(it))] => match unquote_str(it) { + Some(unquoted) => ExpandError::Other(unquoted.into()), + None => ExpandError::Other("`compile_error!` argument must be a string".into()), + }, _ => ExpandError::Other("`compile_error!` argument must be a string".into()), }; @@ -454,7 +449,7 @@ fn concat_bytes_expand( match token.kind() { syntax::SyntaxKind::BYTE => bytes.push(token.text().to_string()), syntax::SyntaxKind::BYTE_STRING => { - let components = unquote_byte_string(lit).unwrap_or_else(Vec::new); + let components = unquote_byte_string(lit).unwrap_or_default(); components.into_iter().for_each(|x| bytes.push(x.to_string())); } _ => { @@ -676,7 +671,7 @@ fn option_env_expand( let expanded = match get_env_inner(db, arg_id, &key) { None => quote! { #DOLLAR_CRATE::option::Option::None::<&str> }, - Some(s) => quote! { #DOLLAR_CRATE::option::Some(#s) }, + Some(s) => quote! { #DOLLAR_CRATE::option::Option::Some(#s) }, }; ExpandResult::ok(ExpandedEager::new(expanded)) diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs index 87e4db0398..b28e60187d 100644 --- a/crates/hir-expand/src/db.rs +++ b/crates/hir-expand/src/db.rs @@ -240,7 +240,7 @@ fn ast_id_map(db: &dyn AstDatabase, file_id: HirFileId) -> Arc { } fn parse_or_expand(db: &dyn AstDatabase, file_id: HirFileId) -> Option { - match file_id.0 { + match file_id.repr() { HirFileIdRepr::FileId(file_id) => Some(db.parse(file_id).tree().syntax().clone()), HirFileIdRepr::MacroFile(macro_file) => { // FIXME: Note how we convert from `Parse` to `SyntaxNode` here, @@ -444,7 +444,7 @@ fn macro_expand(db: &dyn AstDatabase, id: MacroCallId) -> ExpandResult { return ExpandResult::only_err(ExpandError::Other( - format!("invalid macro definition: {}", err).into(), + format!("invalid macro definition: {err}").into(), )) } }; diff --git a/crates/hir-expand/src/eager.rs b/crates/hir-expand/src/eager.rs index 5fd099aea7..a1474c44e6 100644 --- a/crates/hir-expand/src/eager.rs +++ b/crates/hir-expand/src/eager.rs @@ -161,7 +161,7 @@ pub fn expand_eager_macro( Ok(Ok(db.intern_macro_call(loc))) } else { - panic!("called `expand_eager_macro` on non-eager macro def {:?}", def); + panic!("called `expand_eager_macro` on non-eager macro def {def:?}"); } } @@ -208,7 +208,7 @@ fn eager_macro_recur( // Collect replacement for child in children { let def = match child.path().and_then(|path| ModPath::from_src(db, path, hygiene)) { - Some(path) => macro_resolver(path.clone()).ok_or_else(|| UnresolvedMacro { path })?, + Some(path) => macro_resolver(path.clone()).ok_or(UnresolvedMacro { path })?, None => { diagnostic_sink(ExpandError::Other("malformed macro invocation".into())); continue; diff --git a/crates/hir-expand/src/fixup.rs b/crates/hir-expand/src/fixup.rs index a4abe75626..75d364d5f8 100644 --- a/crates/hir-expand/src/fixup.rs +++ b/crates/hir-expand/src/fixup.rs @@ -366,7 +366,7 @@ mod tests { fixups.append, ); - let actual = format!("{}\n", tt); + let actual = format!("{tt}\n"); expect.indent(false); expect.assert_eq(&actual); diff --git a/crates/hir-expand/src/hygiene.rs b/crates/hir-expand/src/hygiene.rs index d60734372c..df1e20256c 100644 --- a/crates/hir-expand/src/hygiene.rs +++ b/crates/hir-expand/src/hygiene.rs @@ -17,7 +17,7 @@ use crate::{ db::{self, AstDatabase}, fixup, name::{AsName, Name}, - HirFileId, HirFileIdRepr, InFile, MacroCallKind, MacroCallLoc, MacroDefKind, MacroFile, + HirFileId, InFile, MacroCallKind, MacroCallLoc, MacroDefKind, MacroFile, }; #[derive(Clone, Debug)] @@ -216,9 +216,9 @@ fn make_hygiene_info( impl HygieneFrame { pub(crate) fn new(db: &dyn AstDatabase, file_id: HirFileId) -> HygieneFrame { - let (info, krate, local_inner) = match file_id.0 { - HirFileIdRepr::FileId(_) => (None, None, false), - HirFileIdRepr::MacroFile(macro_file) => { + let (info, krate, local_inner) = match file_id.macro_file() { + None => (None, None, false), + Some(macro_file) => { let loc = db.lookup_intern_macro_call(macro_file.macro_call_id); let info = make_hygiene_info(db, macro_file, &loc).map(|info| (loc.kind.file_id(), info)); diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs index 7352b003a4..bc5f9f3b8a 100644 --- a/crates/hir-expand/src/lib.rs +++ b/crates/hir-expand/src/lib.rs @@ -23,7 +23,11 @@ pub use mbe::{Origin, ValueResult}; use std::{fmt, hash::Hash, iter, sync::Arc}; -use base_db::{impl_intern_key, salsa, CrateId, FileId, FileRange, ProcMacroKind}; +use base_db::{ + impl_intern_key, + salsa::{self, InternId}, + CrateId, FileId, FileRange, ProcMacroKind, +}; use either::Either; use syntax::{ algo::{self, skip_trivia_token}, @@ -79,26 +83,12 @@ impl fmt::Display for ExpandError { /// finite (because everything bottoms out at the real `FileId`) and small /// (`MacroCallId` uses the location interning. You can check details here: /// ). +/// +/// The two variants are encoded in a single u32 which are differentiated by the MSB. +/// If the MSB is 0, the value represents a `FileId`, otherwise the remaining 31 bits represent a +/// `MacroCallId`. #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct HirFileId(HirFileIdRepr); - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -enum HirFileIdRepr { - FileId(FileId), - MacroFile(MacroFile), -} - -impl From for HirFileId { - fn from(id: FileId) -> Self { - HirFileId(HirFileIdRepr::FileId(id)) - } -} - -impl From for HirFileId { - fn from(id: MacroFile) -> Self { - HirFileId(HirFileIdRepr::MacroFile(id)) - } -} +pub struct HirFileId(u32); #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct MacroFile { @@ -172,13 +162,37 @@ pub enum MacroCallKind { }, } +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +enum HirFileIdRepr { + FileId(FileId), + MacroFile(MacroFile), +} + +impl From for HirFileId { + fn from(FileId(id): FileId) -> Self { + assert!(id < Self::MAX_FILE_ID); + HirFileId(id) + } +} + +impl From for HirFileId { + fn from(MacroFile { macro_call_id: MacroCallId(id) }: MacroFile) -> Self { + let id = id.as_u32(); + assert!(id < Self::MAX_FILE_ID); + HirFileId(id | Self::MACRO_FILE_TAG_MASK) + } +} + impl HirFileId { + const MAX_FILE_ID: u32 = u32::MAX ^ Self::MACRO_FILE_TAG_MASK; + const MACRO_FILE_TAG_MASK: u32 = 1 << 31; + /// For macro-expansion files, returns the file original source file the /// expansion originated from. pub fn original_file(self, db: &dyn db::AstDatabase) -> FileId { let mut file_id = self; loop { - match file_id.0 { + match file_id.repr() { HirFileIdRepr::FileId(id) => break id, HirFileIdRepr::MacroFile(MacroFile { macro_call_id }) => { let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_call_id); @@ -194,7 +208,7 @@ impl HirFileId { pub fn expansion_level(self, db: &dyn db::AstDatabase) -> u32 { let mut level = 0; let mut curr = self; - while let HirFileIdRepr::MacroFile(macro_file) = curr.0 { + while let Some(macro_file) = curr.macro_file() { let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); level += 1; @@ -205,25 +219,17 @@ impl HirFileId { /// If this is a macro call, returns the syntax node of the call. pub fn call_node(self, db: &dyn db::AstDatabase) -> Option> { - match self.0 { - HirFileIdRepr::FileId(_) => None, - HirFileIdRepr::MacroFile(macro_file) => { - let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); - Some(loc.kind.to_node(db)) - } - } + let macro_file = self.macro_file()?; + let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); + Some(loc.kind.to_node(db)) } /// If this is a macro call, returns the syntax node of the very first macro call this file resides in. pub fn original_call_node(self, db: &dyn db::AstDatabase) -> Option<(FileId, SyntaxNode)> { - let mut call = match self.0 { - HirFileIdRepr::FileId(_) => return None, - HirFileIdRepr::MacroFile(MacroFile { macro_call_id }) => { - db.lookup_intern_macro_call(macro_call_id).kind.to_node(db) - } - }; + let mut call = + db.lookup_intern_macro_call(self.macro_file()?.macro_call_id).kind.to_node(db); loop { - match call.file_id.0 { + match call.file_id.repr() { HirFileIdRepr::FileId(file_id) => break Some((file_id, call.value)), HirFileIdRepr::MacroFile(MacroFile { macro_call_id }) => { call = db.lookup_intern_macro_call(macro_call_id).kind.to_node(db); @@ -234,84 +240,74 @@ impl HirFileId { /// Return expansion information if it is a macro-expansion file pub fn expansion_info(self, db: &dyn db::AstDatabase) -> Option { - match self.0 { - HirFileIdRepr::FileId(_) => None, - HirFileIdRepr::MacroFile(macro_file) => { - let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); + let macro_file = self.macro_file()?; + let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); - let arg_tt = loc.kind.arg(db)?; + let arg_tt = loc.kind.arg(db)?; - let macro_def = db.macro_def(loc.def).ok()?; - let (parse, exp_map) = db.parse_macro_expansion(macro_file).value?; - let macro_arg = db.macro_arg(macro_file.macro_call_id)?; + let macro_def = db.macro_def(loc.def).ok()?; + let (parse, exp_map) = db.parse_macro_expansion(macro_file).value?; + let macro_arg = db.macro_arg(macro_file.macro_call_id)?; - let def = loc.def.ast_id().left().and_then(|id| { - let def_tt = match id.to_node(db) { - ast::Macro::MacroRules(mac) => mac.token_tree()?, - ast::Macro::MacroDef(_) - if matches!(*macro_def, TokenExpander::BuiltinAttr(_)) => - { - return None - } - ast::Macro::MacroDef(mac) => mac.body()?, - }; - Some(InFile::new(id.file_id, def_tt)) - }); - let attr_input_or_mac_def = def.or_else(|| match loc.kind { - MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => { - let tt = ast_id - .to_node(db) - .doc_comments_and_attrs() - .nth(invoc_attr_index as usize) - .and_then(Either::left)? - .token_tree()?; - Some(InFile::new(ast_id.file_id, tt)) - } - _ => None, - }); - - Some(ExpansionInfo { - expanded: InFile::new(self, parse.syntax_node()), - arg: InFile::new(loc.kind.file_id(), arg_tt), - attr_input_or_mac_def, - macro_arg_shift: mbe::Shift::new(¯o_arg.0), - macro_arg, - macro_def, - exp_map, - }) + let def = loc.def.ast_id().left().and_then(|id| { + let def_tt = match id.to_node(db) { + ast::Macro::MacroRules(mac) => mac.token_tree()?, + ast::Macro::MacroDef(_) if matches!(*macro_def, TokenExpander::BuiltinAttr(_)) => { + return None + } + ast::Macro::MacroDef(mac) => mac.body()?, + }; + Some(InFile::new(id.file_id, def_tt)) + }); + let attr_input_or_mac_def = def.or_else(|| match loc.kind { + MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => { + let tt = ast_id + .to_node(db) + .doc_comments_and_attrs() + .nth(invoc_attr_index as usize) + .and_then(Either::left)? + .token_tree()?; + Some(InFile::new(ast_id.file_id, tt)) } - } + _ => None, + }); + + Some(ExpansionInfo { + expanded: InFile::new(self, parse.syntax_node()), + arg: InFile::new(loc.kind.file_id(), arg_tt), + attr_input_or_mac_def, + macro_arg_shift: mbe::Shift::new(¯o_arg.0), + macro_arg, + macro_def, + exp_map, + }) } /// Indicate it is macro file generated for builtin derive pub fn is_builtin_derive(&self, db: &dyn db::AstDatabase) -> Option> { - match self.0 { - HirFileIdRepr::FileId(_) => None, - HirFileIdRepr::MacroFile(macro_file) => { - let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); - let attr = match loc.def.kind { - MacroDefKind::BuiltInDerive(..) => loc.kind.to_node(db), - _ => return None, - }; - Some(attr.with_value(ast::Attr::cast(attr.value.clone())?)) - } - } + let macro_file = self.macro_file()?; + let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); + let attr = match loc.def.kind { + MacroDefKind::BuiltInDerive(..) => loc.kind.to_node(db), + _ => return None, + }; + Some(attr.with_value(ast::Attr::cast(attr.value.clone())?)) } pub fn is_custom_derive(&self, db: &dyn db::AstDatabase) -> bool { - match self.0 { - HirFileIdRepr::FileId(_) => false, - HirFileIdRepr::MacroFile(macro_file) => { + match self.macro_file() { + Some(macro_file) => { let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); matches!(loc.def.kind, MacroDefKind::ProcMacro(_, ProcMacroKind::CustomDerive, _)) } + None => false, } } /// Return whether this file is an include macro pub fn is_include_macro(&self, db: &dyn db::AstDatabase) -> bool { - match self.0 { - HirFileIdRepr::MacroFile(macro_file) => { + match self.macro_file() { + Some(macro_file) => { let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); matches!(loc.eager, Some(EagerCallInfo { included_file: Some(_), .. })) } @@ -321,8 +317,8 @@ impl HirFileId { /// Return whether this file is an attr macro pub fn is_attr_macro(&self, db: &dyn db::AstDatabase) -> bool { - match self.0 { - HirFileIdRepr::MacroFile(macro_file) => { + match self.macro_file() { + Some(macro_file) => { let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); matches!(loc.kind, MacroCallKind::Attr { .. }) } @@ -333,23 +329,36 @@ impl HirFileId { /// Return whether this file is the pseudo expansion of the derive attribute. /// See [`crate::builtin_attr_macro::derive_attr_expand`]. pub fn is_derive_attr_pseudo_expansion(&self, db: &dyn db::AstDatabase) -> bool { - match self.0 { - HirFileIdRepr::MacroFile(macro_file) => { + match self.macro_file() { + Some(macro_file) => { let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); matches!(loc.kind, MacroCallKind::Attr { is_derive: true, .. }) } - _ => false, + None => false, } } + #[inline] pub fn is_macro(self) -> bool { - matches!(self.0, HirFileIdRepr::MacroFile(_)) + self.0 & Self::MACRO_FILE_TAG_MASK != 0 } + #[inline] pub fn macro_file(self) -> Option { - match self.0 { - HirFileIdRepr::FileId(_) => None, - HirFileIdRepr::MacroFile(m) => Some(m), + match self.0 & Self::MACRO_FILE_TAG_MASK { + 0 => None, + _ => Some(MacroFile { + macro_call_id: MacroCallId(InternId::from(self.0 ^ Self::MACRO_FILE_TAG_MASK)), + }), + } + } + + fn repr(self) -> HirFileIdRepr { + match self.0 & Self::MACRO_FILE_TAG_MASK { + 0 => HirFileIdRepr::FileId(FileId(self.0)), + _ => HirFileIdRepr::MacroFile(MacroFile { + macro_call_id: MacroCallId(InternId::from(self.0 ^ Self::MACRO_FILE_TAG_MASK)), + }), } } } @@ -442,7 +451,7 @@ impl MacroCallKind { pub fn original_call_range_with_body(self, db: &dyn db::AstDatabase) -> FileRange { let mut kind = self; let file_id = loop { - match kind.file_id().0 { + match kind.file_id().repr() { HirFileIdRepr::MacroFile(file) => { kind = db.lookup_intern_macro_call(file.macro_call_id).kind; } @@ -467,7 +476,7 @@ impl MacroCallKind { pub fn original_call_range(self, db: &dyn db::AstDatabase) -> FileRange { let mut kind = self; let file_id = loop { - match kind.file_id().0 { + match kind.file_id().repr() { HirFileIdRepr::MacroFile(file) => { kind = db.lookup_intern_macro_call(file.macro_call_id).kind; } @@ -779,7 +788,7 @@ impl<'a> InFile<&'a SyntaxNode> { /// For attributes and derives, this will point back to the attribute only. /// For the entire item `InFile::use original_file_range_full`. pub fn original_file_range(self, db: &dyn db::AstDatabase) -> FileRange { - match self.file_id.0 { + match self.file_id.repr() { HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() }, HirFileIdRepr::MacroFile(mac_file) => { if let Some(res) = self.original_file_range_opt(db) { @@ -846,7 +855,7 @@ impl InFile { /// Falls back to the macro call range if the node cannot be mapped up fully. pub fn original_file_range(self, db: &dyn db::AstDatabase) -> FileRange { - match self.file_id.0 { + match self.file_id.repr() { HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() }, HirFileIdRepr::MacroFile(mac_file) => { if let Some(res) = self.original_file_range_opt(db) { @@ -861,7 +870,7 @@ impl InFile { /// Attempts to map the syntax node back up its macro calls. pub fn original_file_range_opt(self, db: &dyn db::AstDatabase) -> Option { - match self.file_id.0 { + match self.file_id.repr() { HirFileIdRepr::FileId(file_id) => { Some(FileRange { file_id, range: self.value.text_range() }) } diff --git a/crates/hir-expand/src/name.rs b/crates/hir-expand/src/name.rs index 2679a1c360..e8b3e312aa 100644 --- a/crates/hir-expand/src/name.rs +++ b/crates/hir-expand/src/name.rs @@ -62,7 +62,7 @@ impl<'a> UnescapedName<'a> { it.clone() } } - Repr::TupleField(it) => SmolStr::new(&it.to_string()), + Repr::TupleField(it) => SmolStr::new(it.to_string()), } } } @@ -139,7 +139,7 @@ impl Name { pub fn to_smol_str(&self) -> SmolStr { match &self.0 { Repr::Text(it) => it.clone(), - Repr::TupleField(it) => SmolStr::new(&it.to_string()), + Repr::TupleField(it) => SmolStr::new(it.to_string()), } } @@ -338,44 +338,6 @@ pub mod known { test_case, recursion_limit, feature, - // Safe intrinsics - abort, - add_with_overflow, - black_box, - bitreverse, - bswap, - caller_location, - ctlz, - ctpop, - cttz, - discriminant_value, - forget, - likely, - maxnumf32, - maxnumf64, - min_align_of_val, - min_align_of, - minnumf32, - minnumf64, - mul_with_overflow, - needs_drop, - ptr_guaranteed_eq, - ptr_guaranteed_ne, - rotate_left, - rotate_right, - rustc_peek, - saturating_add, - saturating_sub, - size_of_val, - size_of, - sub_with_overflow, - type_id, - type_name, - unlikely, - variant_count, - wrapping_add, - wrapping_mul, - wrapping_sub, // known methods of lang items eq, ne, @@ -419,6 +381,8 @@ pub mod known { shr, sub_assign, sub, + unsafe_cell, + va_list ); // self/Self cannot be used as an identifier diff --git a/crates/hir-expand/src/quote.rs b/crates/hir-expand/src/quote.rs index e839e97bf0..c0a7bc7ca8 100644 --- a/crates/hir-expand/src/quote.rs +++ b/crates/hir-expand/src/quote.rs @@ -233,7 +233,7 @@ mod tests { let quoted = quote!(#a); assert_eq!(quoted.to_string(), "hello"); - let t = format!("{:?}", quoted); + let t = format!("{quoted:?}"); assert_eq!(t, "SUBTREE $\n IDENT hello 4294967295"); } diff --git a/crates/hir-ty/Cargo.toml b/crates/hir-ty/Cargo.toml index a1d6835bfa..ae837ac6dc 100644 --- a/crates/hir-ty/Cargo.toml +++ b/crates/hir-ty/Cargo.toml @@ -13,18 +13,20 @@ doctest = false cov-mark = "2.0.0-pre.1" itertools = "0.10.5" arrayvec = "0.7.2" +bitflags = "1.3.2" smallvec = "1.10.0" ena = "0.14.0" tracing = "0.1.35" rustc-hash = "1.1.0" scoped-tls = "1.0.0" -chalk-solve = { version = "0.86.0", default-features = false } -chalk-ir = "0.86.0" -chalk-recursive = { version = "0.86.0", default-features = false } -chalk-derive = "0.86.0" +chalk-solve = { version = "0.88.0", default-features = false } +chalk-ir = "0.88.0" +chalk-recursive = { version = "0.88.0", default-features = false } +chalk-derive = "0.88.0" la-arena = { version = "0.3.0", path = "../../lib/la-arena" } once_cell = "1.15.0" typed-arena = "2.0.1" +rustc_index = { version = "0.0.20221221", package = "hkalbasi-rustc-ap-rustc_index", default-features = false } stdx = { path = "../stdx", version = "0.0.0" } hir-def = { path = "../hir-def", version = "0.0.0" } diff --git a/crates/hir-ty/src/autoderef.rs b/crates/hir-ty/src/autoderef.rs index 78911d8dc0..cbcf8f74c5 100644 --- a/crates/hir-ty/src/autoderef.rs +++ b/crates/hir-ty/src/autoderef.rs @@ -82,11 +82,11 @@ pub(crate) fn autoderef_step( } // FIXME: replace uses of this with Autoderef above -pub fn autoderef<'a>( - db: &'a dyn HirDatabase, +pub fn autoderef( + db: &dyn HirDatabase, env: Arc, ty: Canonical, -) -> impl Iterator> + 'a { +) -> impl Iterator> + '_ { let mut table = InferenceTable::new(db, env); let ty = table.instantiate_canonical(ty); let mut autoderef = Autoderef::new(&mut table, ty); diff --git a/crates/hir-ty/src/builder.rs b/crates/hir-ty/src/builder.rs index 9ae752556d..d5ef0c22de 100644 --- a/crates/hir-ty/src/builder.rs +++ b/crates/hir-ty/src/builder.rs @@ -142,7 +142,7 @@ impl TyBuilder { match (a.data(Interner), e) { (chalk_ir::GenericArgData::Ty(_), ParamKind::Type) | (chalk_ir::GenericArgData::Const(_), ParamKind::Const(_)) => (), - _ => panic!("Mismatched kinds: {:?}, {:?}, {:?}", a, self.vec, self.param_kinds), + _ => panic!("Mismatched kinds: {a:?}, {:?}, {:?}", self.vec, self.param_kinds), } } } diff --git a/crates/hir-ty/src/chalk_db.rs b/crates/hir-ty/src/chalk_db.rs index 43c3451cab..1c2b8de7f7 100644 --- a/crates/hir-ty/src/chalk_db.rs +++ b/crates/hir-ty/src/chalk_db.rs @@ -568,6 +568,7 @@ fn well_known_trait_from_lang_attr(name: &str) -> Option { "sized" => WellKnownTrait::Sized, "unpin" => WellKnownTrait::Unpin, "unsize" => WellKnownTrait::Unsize, + "tuple_trait" => WellKnownTrait::Tuple, _ => return None, }) } @@ -585,6 +586,7 @@ fn lang_attr_from_well_known_trait(attr: WellKnownTrait) -> &'static str { WellKnownTrait::FnOnce => "fn_once", WellKnownTrait::Generator => "generator", WellKnownTrait::Sized => "sized", + WellKnownTrait::Tuple => "tuple_trait", WellKnownTrait::Unpin => "unpin", WellKnownTrait::Unsize => "unsize", } diff --git a/crates/hir-ty/src/consteval.rs b/crates/hir-ty/src/consteval.rs index 2c0c6e0b83..8df70330fa 100644 --- a/crates/hir-ty/src/consteval.rs +++ b/crates/hir-ty/src/consteval.rs @@ -90,14 +90,14 @@ impl Display for ComputedExpr { ComputedExpr::Literal(l) => match l { Literal::Int(x, _) => { if *x >= 10 { - write!(f, "{} ({:#X})", x, x) + write!(f, "{x} ({x:#X})") } else { x.fmt(f) } } Literal::Uint(x, _) => { if *x >= 10 { - write!(f, "{} ({:#X})", x, x) + write!(f, "{x} ({x:#X})") } else { x.fmt(f) } @@ -131,7 +131,7 @@ fn scalar_max(scalar: &Scalar) -> i128 { IntTy::I16 => i16::MAX as i128, IntTy::I32 => i32::MAX as i128, IntTy::I64 => i64::MAX as i128, - IntTy::I128 => i128::MAX as i128, + IntTy::I128 => i128::MAX, }, Scalar::Uint(x) => match x { chalk_ir::UintTy::Usize => usize::MAX as i128, @@ -139,7 +139,7 @@ fn scalar_max(scalar: &Scalar) -> i128 { chalk_ir::UintTy::U16 => u16::MAX as i128, chalk_ir::UintTy::U32 => u32::MAX as i128, chalk_ir::UintTy::U64 => u64::MAX as i128, - chalk_ir::UintTy::U128 => i128::MAX as i128, // ignore too big u128 for now + chalk_ir::UintTy::U128 => i128::MAX, // ignore too big u128 for now }, Scalar::Float(_) => 0, } @@ -351,15 +351,17 @@ pub fn eval_const( .infer .assoc_resolutions_for_expr(expr_id) .ok_or(ConstEvalError::SemanticError("unresolved assoc item"))? + .0 { hir_def::AssocItemId::FunctionId(_) => { Err(ConstEvalError::NotSupported("assoc function")) } + // FIXME use actual impl for trait assoc const hir_def::AssocItemId::ConstId(c) => ctx.db.const_eval(c), hir_def::AssocItemId::TypeAliasId(_) => { Err(ConstEvalError::NotSupported("assoc type alias")) } - } + }; } }; match pr { @@ -402,7 +404,7 @@ pub(crate) fn path_to_const( args_lazy: impl FnOnce() -> Generics, debruijn: DebruijnIndex, ) -> Option { - match resolver.resolve_path_in_value_ns_fully(db.upcast(), &path) { + match resolver.resolve_path_in_value_ns_fully(db.upcast(), path) { Some(ValueNs::GenericParam(p)) => { let ty = db.const_param_ty(p); let args = args_lazy(); @@ -509,10 +511,10 @@ pub(crate) fn const_eval_query_variant( ) } -pub(crate) fn eval_to_const<'a>( +pub(crate) fn eval_to_const( expr: Idx, mode: ParamLoweringMode, - ctx: &mut InferenceContext<'a>, + ctx: &mut InferenceContext<'_>, args: impl FnOnce() -> Generics, debruijn: DebruijnIndex, ) -> Const { diff --git a/crates/hir-ty/src/consteval/tests.rs b/crates/hir-ty/src/consteval/tests.rs index b76506f6eb..3c930c077b 100644 --- a/crates/hir-ty/src/consteval/tests.rs +++ b/crates/hir-ty/src/consteval/tests.rs @@ -14,7 +14,7 @@ fn check_number(ra_fixture: &str, answer: i128) { match r { ComputedExpr::Literal(Literal::Int(r, _)) => assert_eq!(r, answer), ComputedExpr::Literal(Literal::Uint(r, _)) => assert_eq!(r, answer as u128), - x => panic!("Expected number but found {:?}", x), + x => panic!("Expected number but found {x:?}"), } } @@ -25,7 +25,6 @@ fn eval_goal(ra_fixture: &str) -> Result { let scope = &def_map[module_id.local_id].scope; let const_id = scope .declarations() - .into_iter() .find_map(|x| match x { hir_def::ModuleDefId::ConstId(x) => { if db.const_data(x).name.as_ref()?.to_string() == "GOAL" { @@ -126,7 +125,7 @@ fn enums() { assert_eq!(name, "E::A"); assert_eq!(val, 1); } - x => panic!("Expected enum but found {:?}", x), + x => panic!("Expected enum but found {x:?}"), } } diff --git a/crates/hir-ty/src/db.rs b/crates/hir-ty/src/db.rs index 932fce8356..54b244620f 100644 --- a/crates/hir-ty/src/db.rs +++ b/crates/hir-ty/src/db.rs @@ -3,20 +3,23 @@ use std::sync::Arc; -use arrayvec::ArrayVec; use base_db::{impl_intern_key, salsa, CrateId, Upcast}; use hir_def::{ - db::DefDatabase, expr::ExprId, BlockId, ConstId, ConstParamId, DefWithBodyId, EnumVariantId, - FunctionId, GenericDefId, ImplId, LifetimeParamId, LocalFieldId, TypeOrConstParamId, VariantId, + db::DefDatabase, + expr::ExprId, + layout::{Layout, LayoutError, TargetDataLayout}, + AdtId, BlockId, ConstId, ConstParamId, DefWithBodyId, EnumVariantId, FunctionId, GenericDefId, + ImplId, LifetimeParamId, LocalFieldId, TypeOrConstParamId, VariantId, }; use la_arena::ArenaMap; +use smallvec::SmallVec; use crate::{ chalk_db, consteval::{ComputedExpr, ConstEvalError}, method_resolution::{InherentImpls, TraitImpls, TyFingerprint}, Binders, CallableDefId, FnDefId, GenericArg, ImplTraitId, InferenceResult, Interner, PolyFnSig, - QuantifiedWhereClause, ReturnTypeImplTraits, TraitRef, Ty, TyDefId, ValueTyDefId, + QuantifiedWhereClause, ReturnTypeImplTraits, Substitution, TraitRef, Ty, TyDefId, ValueTyDefId, }; use hir_expand::name::Name; @@ -57,6 +60,13 @@ pub trait HirDatabase: DefDatabase + Upcast { #[salsa::invoke(crate::lower::field_types_query)] fn field_types(&self, var: VariantId) -> Arc>>; + #[salsa::invoke(crate::layout::layout_of_adt_query)] + #[salsa::cycle(crate::layout::layout_of_adt_recover)] + fn layout_of_adt(&self, def: AdtId, subst: Substitution) -> Result; + + #[salsa::invoke(crate::layout::target_data_layout_query)] + fn target_data_layout(&self, krate: CrateId) -> Arc; + #[salsa::invoke(crate::lower::callable_item_sig)] fn callable_item_signature(&self, def: CallableDefId) -> PolyFnSig; @@ -92,10 +102,15 @@ pub trait HirDatabase: DefDatabase + Upcast { fn inherent_impls_in_block(&self, block: BlockId) -> Option>; /// Collects all crates in the dependency graph that have impls for the - /// given fingerprint. This is only used for primitive types; for - /// user-defined types we just look at the crate where the type is defined. - #[salsa::invoke(crate::method_resolution::inherent_impl_crates_query)] - fn inherent_impl_crates(&self, krate: CrateId, fp: TyFingerprint) -> ArrayVec; + /// given fingerprint. This is only used for primitive types and types + /// annotated with `rustc_has_incoherent_inherent_impls`; for other types + /// we just look at the crate where the type is defined. + #[salsa::invoke(crate::method_resolution::incoherent_inherent_impl_crates)] + fn incoherent_inherent_impl_crates( + &self, + krate: CrateId, + fp: TyFingerprint, + ) -> SmallVec<[CrateId; 2]>; #[salsa::invoke(TraitImpls::trait_impls_in_crate_query)] fn trait_impls_in_crate(&self, krate: CrateId) -> Arc; diff --git a/crates/hir-ty/src/diagnostics/match_check.rs b/crates/hir-ty/src/diagnostics/match_check.rs index d51ad72bd2..8b0f051b46 100644 --- a/crates/hir-ty/src/diagnostics/match_check.rs +++ b/crates/hir-ty/src/diagnostics/match_check.rs @@ -12,16 +12,16 @@ pub(crate) mod usefulness; use chalk_ir::Mutability; use hir_def::{ - adt::VariantData, body::Body, expr::PatId, AdtId, EnumVariantId, HasModule, LocalFieldId, - VariantId, + adt::VariantData, body::Body, expr::PatId, AdtId, EnumVariantId, LocalFieldId, VariantId, }; -use hir_expand::name::{name, Name}; +use hir_expand::name::Name; use stdx::{always, never}; use crate::{ db::HirDatabase, display::{HirDisplay, HirDisplayError, HirFormatter}, infer::BindingMode, + lang_items::is_box, InferenceResult, Interner, Substitution, Ty, TyExt, TyKind, }; @@ -386,7 +386,7 @@ impl HirDisplay for Pat { } subpattern.hir_fmt(f) } - PatKind::LiteralBool { value } => write!(f, "{}", value), + PatKind::LiteralBool { value } => write!(f, "{value}"), PatKind::Or { pats } => f.write_joined(pats.iter(), " | "), } } @@ -405,13 +405,6 @@ where } } -fn is_box(adt: AdtId, db: &dyn HirDatabase) -> bool { - let owned_box = name![owned_box].to_smol_str(); - let krate = adt.module(db.upcast()).krate(); - let box_adt = db.lang_item(krate, owned_box).and_then(|it| it.as_struct()).map(AdtId::from); - Some(adt) == box_adt -} - pub(crate) trait PatternFoldable: Sized { fn fold_with(&self, folder: &mut F) -> Self { self.super_fold_with(folder) diff --git a/crates/hir-ty/src/diagnostics/match_check/deconstruct_pat.rs b/crates/hir-ty/src/diagnostics/match_check/deconstruct_pat.rs index 47d60fc41e..d130827a77 100644 --- a/crates/hir-ty/src/diagnostics/match_check/deconstruct_pat.rs +++ b/crates/hir-ty/src/diagnostics/match_check/deconstruct_pat.rs @@ -372,7 +372,7 @@ impl Constructor { hir_def::AdtId::UnionId(id) => id.into(), } } - _ => panic!("bad constructor {:?} for adt {:?}", self, adt), + _ => panic!("bad constructor {self:?} for adt {adt:?}"), } } diff --git a/crates/hir-ty/src/display.rs b/crates/hir-ty/src/display.rs index a22a4b170f..66e813eed8 100644 --- a/crates/hir-ty/src/display.rs +++ b/crates/hir-ty/src/display.rs @@ -16,7 +16,7 @@ use hir_def::{ path::{Path, PathKind}, type_ref::{ConstScalar, TraitBoundModifier, TypeBound, TypeRef}, visibility::Visibility, - HasModule, ItemContainerId, Lookup, ModuleId, TraitId, + HasModule, ItemContainerId, Lookup, ModuleDefId, ModuleId, TraitId, }; use hir_expand::{hygiene::Hygiene, name::Name}; use itertools::Itertools; @@ -35,9 +35,27 @@ use crate::{ TraitRefExt, Ty, TyExt, TyKind, WhereClause, }; +pub trait HirWrite: fmt::Write { + fn start_location_link(&mut self, location: ModuleDefId); + fn end_location_link(&mut self); +} + +// String will ignore link metadata +impl HirWrite for String { + fn start_location_link(&mut self, _: ModuleDefId) {} + + fn end_location_link(&mut self) {} +} + +// `core::Formatter` will ignore metadata +impl HirWrite for fmt::Formatter<'_> { + fn start_location_link(&mut self, _: ModuleDefId) {} + fn end_location_link(&mut self) {} +} + pub struct HirFormatter<'a> { pub db: &'a dyn HirDatabase, - fmt: &'a mut dyn fmt::Write, + fmt: &'a mut dyn HirWrite, buf: String, curr_size: usize, pub(crate) max_size: Option, @@ -45,6 +63,16 @@ pub struct HirFormatter<'a> { display_target: DisplayTarget, } +impl HirFormatter<'_> { + fn start_location_link(&mut self, location: ModuleDefId) { + self.fmt.start_location_link(location); + } + + fn end_location_link(&mut self) { + self.fmt.end_location_link(); + } +} + pub trait HirDisplay { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError>; @@ -148,13 +176,13 @@ impl<'a> HirFormatter<'a> { let mut first = true; for e in iter { if !first { - write!(self, "{}", sep)?; + write!(self, "{sep}")?; } first = false; // Abbreviate multiple omitted types with a single ellipsis. if self.should_truncate() { - return write!(self, "{}", TYPE_HINT_TRUNCATION); + return write!(self, "{TYPE_HINT_TRUNCATION}"); } e.hir_fmt(self)?; @@ -245,12 +273,9 @@ pub struct HirDisplayWrapper<'a, T> { display_target: DisplayTarget, } -impl<'a, T> fmt::Display for HirDisplayWrapper<'a, T> -where - T: HirDisplay, -{ - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self.t.hir_fmt(&mut HirFormatter { +impl HirDisplayWrapper<'_, T> { + pub fn write_to(&self, f: &mut F) -> Result<(), HirDisplayError> { + self.t.hir_fmt(&mut HirFormatter { db: self.db, fmt: f, buf: String::with_capacity(20), @@ -258,7 +283,16 @@ where max_size: self.max_size, omit_verbose_types: self.omit_verbose_types, display_target: self.display_target, - }) { + }) + } +} + +impl<'a, T> fmt::Display for HirDisplayWrapper<'a, T> +where + T: HirDisplay, +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self.write_to(f) { Ok(()) => Ok(()), Err(HirDisplayError::FmtError) => Err(fmt::Error), Err(HirDisplayError::DisplaySourceCodeError(_)) => { @@ -286,7 +320,7 @@ impl HirDisplay for Interned { impl HirDisplay for ProjectionTy { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { if f.should_truncate() { - return write!(f, "{}", TYPE_HINT_TRUNCATION); + return write!(f, "{TYPE_HINT_TRUNCATION}"); } let trait_ref = self.trait_ref(f.db); @@ -308,7 +342,7 @@ impl HirDisplay for ProjectionTy { impl HirDisplay for OpaqueTy { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { if f.should_truncate() { - return write!(f, "{}", TYPE_HINT_TRUNCATION); + return write!(f, "{TYPE_HINT_TRUNCATION}"); } self.substitution.at(Interner, 0).hir_fmt(f) @@ -351,7 +385,7 @@ impl HirDisplay for BoundVar { impl HirDisplay for Ty { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { if f.should_truncate() { - return write!(f, "{}", TYPE_HINT_TRUNCATION); + return write!(f, "{TYPE_HINT_TRUNCATION}"); } match self.kind(Interner) { @@ -530,6 +564,7 @@ impl HirDisplay for Ty { } } TyKind::Adt(AdtId(def_id), parameters) => { + f.start_location_link((*def_id).into()); match f.display_target { DisplayTarget::Diagnostics | DisplayTarget::Test => { let name = match *def_id { @@ -537,7 +572,7 @@ impl HirDisplay for Ty { hir_def::AdtId::UnionId(it) => f.db.union_data(it).name.clone(), hir_def::AdtId::EnumId(it) => f.db.enum_data(it).name.clone(), }; - write!(f, "{}", name)?; + write!(f, "{name}")?; } DisplayTarget::SourceCode { module_id } => { if let Some(path) = find_path::find_path( @@ -546,7 +581,7 @@ impl HirDisplay for Ty { module_id, false, ) { - write!(f, "{}", path)?; + write!(f, "{path}")?; } else { return Err(HirDisplayError::DisplaySourceCodeError( DisplaySourceCodeError::PathNotFound, @@ -554,6 +589,7 @@ impl HirDisplay for Ty { } } } + f.end_location_link(); if parameters.len(Interner) > 0 { let parameters_to_write = if f.display_target.is_source_code() @@ -701,7 +737,7 @@ impl HirDisplay for Ty { if sig.params().is_empty() { write!(f, "||")?; } else if f.should_truncate() { - write!(f, "|{}|", TYPE_HINT_TRUNCATION)?; + write!(f, "|{TYPE_HINT_TRUNCATION}|")?; } else { write!(f, "|")?; f.write_joined(sig.params(), ", ")?; @@ -892,7 +928,7 @@ pub fn write_bounds_like_dyn_trait_with_prefix( default_sized: SizedByDefault, f: &mut HirFormatter<'_>, ) -> Result<(), HirDisplayError> { - write!(f, "{}", prefix)?; + write!(f, "{prefix}")?; if !predicates.is_empty() || predicates.is_empty() && matches!(default_sized, SizedByDefault::Sized { .. }) { @@ -1020,7 +1056,7 @@ fn fmt_trait_ref( use_as: bool, ) -> Result<(), HirDisplayError> { if f.should_truncate() { - return write!(f, "{}", TYPE_HINT_TRUNCATION); + return write!(f, "{TYPE_HINT_TRUNCATION}"); } tr.self_type_parameter(Interner).hir_fmt(f)?; @@ -1047,7 +1083,7 @@ impl HirDisplay for TraitRef { impl HirDisplay for WhereClause { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { if f.should_truncate() { - return write!(f, "{}", TYPE_HINT_TRUNCATION); + return write!(f, "{TYPE_HINT_TRUNCATION}"); } match self { @@ -1098,7 +1134,6 @@ impl HirDisplay for LifetimeData { write!(f, "{}", param_data.name) } LifetimeData::Static => write!(f, "'static"), - LifetimeData::Empty(_) => Ok(()), LifetimeData::Erased => Ok(()), LifetimeData::Phantom(_, _) => Ok(()), } @@ -1162,7 +1197,7 @@ impl HirDisplay for TypeRef { hir_def::type_ref::Mutability::Shared => "*const ", hir_def::type_ref::Mutability::Mut => "*mut ", }; - write!(f, "{}", mutability)?; + write!(f, "{mutability}")?; inner.hir_fmt(f)?; } TypeRef::Reference(inner, lifetime, mutability) => { @@ -1174,13 +1209,13 @@ impl HirDisplay for TypeRef { if let Some(lifetime) = lifetime { write!(f, "{} ", lifetime.name)?; } - write!(f, "{}", mutability)?; + write!(f, "{mutability}")?; inner.hir_fmt(f)?; } TypeRef::Array(inner, len) => { write!(f, "[")?; inner.hir_fmt(f)?; - write!(f, "; {}]", len)?; + write!(f, "; {len}]")?; } TypeRef::Slice(inner) => { write!(f, "[")?; @@ -1197,7 +1232,7 @@ impl HirDisplay for TypeRef { for index in 0..function_parameters.len() { let (param_name, param_type) = &function_parameters[index]; if let Some(name) = param_name { - write!(f, "{}: ", name)?; + write!(f, "{name}: ")?; } param_type.hir_fmt(f)?; @@ -1373,7 +1408,7 @@ impl HirDisplay for hir_def::path::GenericArg { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { match self { hir_def::path::GenericArg::Type(ty) => ty.hir_fmt(f), - hir_def::path::GenericArg::Const(c) => write!(f, "{}", c), + hir_def::path::GenericArg::Const(c) => write!(f, "{c}"), hir_def::path::GenericArg::Lifetime(lifetime) => write!(f, "{}", lifetime.name), } } diff --git a/crates/hir-ty/src/infer.rs b/crates/hir-ty/src/infer.rs index 0b3c23f574..6b59f1c20d 100644 --- a/crates/hir-ty/src/infer.rs +++ b/crates/hir-ty/src/infer.rs @@ -19,10 +19,11 @@ use std::sync::Arc; use chalk_ir::{cast::Cast, ConstValue, DebruijnIndex, Mutability, Safety, Scalar, TypeFlags}; use hir_def::{ body::Body, - builtin_type::BuiltinType, + builtin_type::{BuiltinInt, BuiltinType, BuiltinUint}, data::{ConstData, StaticData}, - expr::{BindingAnnotation, ExprId, PatId}, + expr::{BindingAnnotation, ExprId, ExprOrPatId, PatId}, lang_item::LangItemTarget, + layout::Integer, path::{path, Path}, resolver::{HasResolver, ResolveValueResult, Resolver, TypeNs, ValueNs}, type_ref::TypeRef, @@ -33,7 +34,7 @@ use hir_expand::name::{name, Name}; use itertools::Either; use la_arena::ArenaMap; use rustc_hash::FxHashMap; -use stdx::{always, impl_from}; +use stdx::always; use crate::{ db::HirDatabase, fold_tys, fold_tys_and_consts, infer::coerce::CoerceMany, @@ -70,8 +71,26 @@ pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc ctx.collect_static(&db.static_data(s)), DefWithBodyId::VariantId(v) => { ctx.return_ty = TyBuilder::builtin(match db.enum_data(v.parent).variant_body_type() { - Either::Left(builtin) => BuiltinType::Int(builtin), - Either::Right(builtin) => BuiltinType::Uint(builtin), + hir_def::layout::IntegerType::Pointer(signed) => match signed { + true => BuiltinType::Int(BuiltinInt::Isize), + false => BuiltinType::Uint(BuiltinUint::Usize), + }, + hir_def::layout::IntegerType::Fixed(size, signed) => match signed { + true => BuiltinType::Int(match size { + Integer::I8 => BuiltinInt::I8, + Integer::I16 => BuiltinInt::I16, + Integer::I32 => BuiltinInt::I32, + Integer::I64 => BuiltinInt::I64, + Integer::I128 => BuiltinInt::I128, + }), + false => BuiltinType::Uint(match size { + Integer::I8 => BuiltinUint::U8, + Integer::I16 => BuiltinUint::U16, + Integer::I32 => BuiltinUint::U32, + Integer::I64 => BuiltinUint::U64, + Integer::I128 => BuiltinUint::U128, + }), + }, }); } } @@ -101,13 +120,6 @@ pub(crate) fn normalize(db: &dyn HirDatabase, owner: DefWithBodyId, ty: Ty) -> T table.resolve_completely(ty_with_vars) } -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] -enum ExprOrPatId { - ExprId(ExprId), - PatId(PatId), -} -impl_from!(ExprId, PatId for ExprOrPatId); - /// Binding modes inferred for patterns. /// #[derive(Copy, Clone, Debug, Eq, PartialEq)] @@ -189,6 +201,8 @@ pub(crate) type InferResult = Result, TypeError>; #[derive(Debug, PartialEq, Eq, Clone)] pub enum InferenceDiagnostic { NoSuchField { expr: ExprId }, + PrivateField { expr: ExprId, field: FieldId }, + PrivateAssocItem { id: ExprOrPatId, item: AssocItemId }, BreakOutsideOfLoop { expr: ExprId, is_break: bool }, MismatchedArgCount { call_expr: ExprId, expected: usize, found: usize }, } @@ -330,7 +344,7 @@ pub struct InferenceResult { /// For each struct literal or pattern, records the variant it resolves to. variant_resolutions: FxHashMap, /// For each associated item record what it resolves to - assoc_resolutions: FxHashMap, + assoc_resolutions: FxHashMap, pub diagnostics: Vec, pub type_of_expr: ArenaMap, /// For each pattern record the type it resolves to. @@ -360,11 +374,11 @@ impl InferenceResult { pub fn variant_resolution_for_pat(&self, id: PatId) -> Option { self.variant_resolutions.get(&id.into()).copied() } - pub fn assoc_resolutions_for_expr(&self, id: ExprId) -> Option { - self.assoc_resolutions.get(&id.into()).copied() + pub fn assoc_resolutions_for_expr(&self, id: ExprId) -> Option<(AssocItemId, Substitution)> { + self.assoc_resolutions.get(&id.into()).cloned() } - pub fn assoc_resolutions_for_pat(&self, id: PatId) -> Option { - self.assoc_resolutions.get(&id.into()).copied() + pub fn assoc_resolutions_for_pat(&self, id: PatId) -> Option<(AssocItemId, Substitution)> { + self.assoc_resolutions.get(&id.into()).cloned() } pub fn type_mismatch_for_expr(&self, expr: ExprId) -> Option<&TypeMismatch> { self.type_mismatches.get(&expr.into()) @@ -484,7 +498,7 @@ impl<'a> InferenceContext<'a> { result: InferenceResult::default(), table: unify::InferenceTable::new(db, trait_env.clone()), trait_env, - return_ty: TyKind::Error.intern(Interner), // set in collect_fn_signature + return_ty: TyKind::Error.intern(Interner), // set in collect_* calls resume_yield_tys: None, db, owner, @@ -498,6 +512,8 @@ impl<'a> InferenceContext<'a> { fn resolve_all(self) -> InferenceResult { let InferenceContext { mut table, mut result, .. } = self; + table.fallback_if_possible(); + // FIXME resolve obligations as well (use Guidance if necessary) table.resolve_obligations_as_possible(); @@ -516,6 +532,9 @@ impl<'a> InferenceContext<'a> { for (_, subst) in result.method_resolutions.values_mut() { *subst = table.resolve_completely(subst.clone()); } + for (_, subst) in result.assoc_resolutions.values_mut() { + *subst = table.resolve_completely(subst.clone()); + } for adjustment in result.expr_adjustments.values_mut().flatten() { adjustment.target = table.resolve_completely(adjustment.target.clone()); } @@ -537,8 +556,20 @@ impl<'a> InferenceContext<'a> { let data = self.db.function_data(func); let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver) .with_impl_trait_mode(ImplTraitLoweringMode::Param); - let param_tys = + let mut param_tys = data.params.iter().map(|(_, type_ref)| ctx.lower_ty(type_ref)).collect::>(); + // Check if function contains a va_list, if it does then we append it to the parameter types + // that are collected from the function data + if data.is_varargs() { + let va_list_ty = match self.resolve_va_list() { + Some(va_list) => TyBuilder::adt(self.db, va_list) + .fill_with_defaults(self.db, || self.table.new_type_var()) + .build(), + None => self.err_ty(), + }; + + param_tys.push(va_list_ty) + } for (ty, pat) in param_tys.into_iter().zip(self.body.params.iter()) { let ty = self.insert_type_vars(ty); let ty = self.normalize_associated_types_in(ty); @@ -551,14 +582,17 @@ impl<'a> InferenceContext<'a> { } else { &*data.ret_type }; - let return_ty = self.make_ty_with_mode(return_ty, ImplTraitLoweringMode::Opaque); - self.return_ty = return_ty; - if let Some(rpits) = self.db.return_type_impl_traits(func) { + let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver) + .with_impl_trait_mode(ImplTraitLoweringMode::Opaque); + let return_ty = ctx.lower_ty(return_ty); + let return_ty = self.insert_type_vars(return_ty); + + let return_ty = if let Some(rpits) = self.db.return_type_impl_traits(func) { // RPIT opaque types use substitution of their parent function. let fn_placeholders = TyBuilder::placeholder_subst(self.db, func); - self.return_ty = fold_tys( - self.return_ty.clone(), + fold_tys( + return_ty, |ty, _| { let opaque_ty_id = match ty.kind(Interner) { TyKind::OpaqueType(opaque_ty_id, _) => *opaque_ty_id, @@ -579,14 +613,18 @@ impl<'a> InferenceContext<'a> { let (var_predicate, binders) = predicate .substitute(Interner, &var_subst) .into_value_and_skipped_binders(); - always!(binders.len(Interner) == 0); // quantified where clauses not yet handled + always!(binders.is_empty(Interner)); // quantified where clauses not yet handled self.push_obligation(var_predicate.cast(Interner)); } var }, DebruijnIndex::INNERMOST, - ); - } + ) + } else { + return_ty + }; + + self.return_ty = self.normalize_associated_types_in(return_ty); } fn infer_body(&mut self) { @@ -609,8 +647,8 @@ impl<'a> InferenceContext<'a> { self.result.variant_resolutions.insert(id, variant); } - fn write_assoc_resolution(&mut self, id: ExprOrPatId, item: AssocItemId) { - self.result.assoc_resolutions.insert(id, item); + fn write_assoc_resolution(&mut self, id: ExprOrPatId, item: AssocItemId, subs: Substitution) { + self.result.assoc_resolutions.insert(id, (item, subs)); } fn write_pat_ty(&mut self, pat: PatId, ty: Ty) { @@ -621,23 +659,14 @@ impl<'a> InferenceContext<'a> { self.result.diagnostics.push(diagnostic); } - fn make_ty_with_mode( - &mut self, - type_ref: &TypeRef, - impl_trait_mode: ImplTraitLoweringMode, - ) -> Ty { + fn make_ty(&mut self, type_ref: &TypeRef) -> Ty { // FIXME use right resolver for block - let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver) - .with_impl_trait_mode(impl_trait_mode); + let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver); let ty = ctx.lower_ty(type_ref); let ty = self.insert_type_vars(ty); self.normalize_associated_types_in(ty) } - fn make_ty(&mut self, type_ref: &TypeRef) -> Ty { - self.make_ty_with_mode(type_ref, ImplTraitLoweringMode::Disallowed) - } - fn err_ty(&self) -> Ty { self.result.standard_types.unknown.clone() } @@ -656,7 +685,7 @@ impl<'a> InferenceContext<'a> { } } - /// Replaces Ty::Unknown by a new type var, so we can maybe still infer it. + /// Replaces `Ty::Error` by a new type var, so we can maybe still infer it. fn insert_type_vars_shallow(&mut self, ty: Ty) -> Ty { match ty.kind(Interner) { TyKind::Error => self.table.new_type_var(), @@ -983,6 +1012,11 @@ impl<'a> InferenceContext<'a> { let trait_ = self.resolve_ops_index()?; self.db.trait_data(trait_).associated_type_by_name(&name![Output]) } + + fn resolve_va_list(&self) -> Option { + let struct_ = self.resolve_lang_item(name![va_list])?.as_struct()?; + Some(struct_.into()) + } } /// When inferring an expression, we propagate downward whatever type hint we diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs index b1f4de8260..8f9cdac378 100644 --- a/crates/hir-ty/src/infer/expr.rs +++ b/crates/hir-ty/src/infer/expr.rs @@ -1,13 +1,12 @@ //! Type inference for expressions. use std::{ - collections::hash_map::Entry, iter::{repeat, repeat_with}, mem, }; use chalk_ir::{ - cast::Cast, fold::Shift, DebruijnIndex, GenericArgData, Mutability, TyVariableKind, + cast::Cast, fold::Shift, DebruijnIndex, GenericArgData, Mutability, TyKind, TyVariableKind, }; use hir_def::{ expr::{ @@ -35,8 +34,8 @@ use crate::{ primitive::{self, UintTy}, static_lifetime, to_chalk_trait_id, utils::{generics, Generics}, - AdtId, Binders, CallableDefId, FnPointer, FnSig, FnSubst, Interner, Rawness, Scalar, - Substitution, TraitRef, Ty, TyBuilder, TyExt, TyKind, + Adjust, Adjustment, AdtId, AutoBorrow, Binders, CallableDefId, FnPointer, FnSig, FnSubst, + Interner, Rawness, Scalar, Substitution, TraitRef, Ty, TyBuilder, TyExt, }; use super::{ @@ -152,11 +151,20 @@ impl<'a> InferenceContext<'a> { .1 } Expr::TryBlock { body } => { - self.with_breakable_ctx(BreakableKind::Border, self.err_ty(), None, |this| { - let _inner = this.infer_expr(*body, expected); + // The type that is returned from the try block + let try_ty = self.table.new_type_var(); + if let Some(ty) = expected.only_has_type(&mut self.table) { + self.unify(&try_ty, &ty); + } + + // The ok-ish type that is expected from the last expression + let ok_ty = self.resolve_associated_type(try_ty.clone(), self.resolve_ops_try_ok()); + + self.with_breakable_ctx(BreakableKind::Block, ok_ty.clone(), None, |this| { + this.infer_expr(*body, &Expectation::has_type(ok_ty)); }); - // FIXME should be std::result::Result<{inner}, _> - self.err_ty() + + try_ty } Expr::Async { body } => { let ret_ty = self.table.new_type_var(); @@ -326,6 +334,7 @@ impl<'a> InferenceContext<'a> { let (param_tys, ret_ty) = match res { Some(res) => { let adjustments = auto_deref_adjust_steps(&derefs); + // FIXME: Handle call adjustments for Fn/FnMut self.write_expr_adj(*callee, adjustments); res } @@ -465,6 +474,12 @@ impl<'a> InferenceContext<'a> { TyKind::Error.intern(Interner) } } + Expr::Yeet { expr } => { + if let &Some(expr) = expr { + self.infer_expr_inner(expr, &Expectation::None); + } + TyKind::Never.intern(Interner) + } Expr::RecordLit { path, fields, spread, .. } => { let (ty, def_id) = self.resolve_variant(path.as_deref(), false); if let Some(variant) = def_id { @@ -506,6 +521,7 @@ impl<'a> InferenceContext<'a> { let receiver_ty = self.infer_expr_inner(*expr, &Expectation::none()); let mut autoderef = Autoderef::new(&mut self.table, receiver_ty); + let mut private_field = None; let ty = autoderef.by_ref().find_map(|(derefed_ty, _)| { let (field_id, parameters) = match derefed_ty.kind(Interner) { TyKind::Tuple(_, substs) => { @@ -532,13 +548,8 @@ impl<'a> InferenceContext<'a> { let is_visible = self.db.field_visibilities(field_id.parent)[field_id.local_id] .is_visible_from(self.db.upcast(), self.resolver.module()); if !is_visible { - // Write down the first field resolution even if it is not visible - // This aids IDE features for private fields like goto def and in - // case of autoderef finding an applicable field, this will be - // overwritten in a following cycle - if let Entry::Vacant(entry) = self.result.field_resolutions.entry(tgt_expr) - { - entry.insert(field_id); + if private_field.is_none() { + private_field = Some(field_id); } return None; } @@ -557,7 +568,17 @@ impl<'a> InferenceContext<'a> { let ty = self.normalize_associated_types_in(ty); ty } - _ => self.err_ty(), + _ => { + // Write down the first private field resolution if we found no field + // This aids IDE features for private fields like goto def + if let Some(field) = private_field { + self.result.field_resolutions.insert(tgt_expr, field); + self.result + .diagnostics + .push(InferenceDiagnostic::PrivateField { expr: tgt_expr, field }); + } + self.err_ty() + } }; ty } @@ -940,7 +961,7 @@ impl<'a> InferenceContext<'a> { Expr::RecordLit { path, fields, .. } => { let subs = fields.iter().map(|f| (f.name.clone(), f.expr)); - self.infer_record_pat_like(path.as_deref(), &rhs_ty, (), lhs.into(), subs) + self.infer_record_pat_like(path.as_deref(), &rhs_ty, (), lhs, subs) } Expr::Underscore => rhs_ty.clone(), _ => { @@ -1018,14 +1039,38 @@ impl<'a> InferenceContext<'a> { self.infer_expr_coerce(rhs, &Expectation::has_type(rhs_ty.clone())); let ret_ty = match method_ty.callable_sig(self.db) { - Some(sig) => sig.ret().clone(), + Some(sig) => { + let p_left = &sig.params()[0]; + if matches!(op, BinaryOp::CmpOp(..) | BinaryOp::Assignment { .. }) { + if let &TyKind::Ref(mtbl, _, _) = p_left.kind(Interner) { + self.write_expr_adj( + lhs, + vec![Adjustment { + kind: Adjust::Borrow(AutoBorrow::Ref(mtbl)), + target: p_left.clone(), + }], + ); + } + } + let p_right = &sig.params()[1]; + if matches!(op, BinaryOp::CmpOp(..)) { + if let &TyKind::Ref(mtbl, _, _) = p_right.kind(Interner) { + self.write_expr_adj( + rhs, + vec![Adjustment { + kind: Adjust::Borrow(AutoBorrow::Ref(mtbl)), + target: p_right.clone(), + }], + ); + } + } + sig.ret().clone() + } None => self.err_ty(), }; let ret_ty = self.normalize_associated_types_in(ret_ty); - // FIXME: record autoref adjustments - // use knowledge of built-in binary ops, which can sometimes help inference if let Some(builtin_rhs) = self.builtin_binary_op_rhs_expectation(op, lhs_ty.clone()) { self.unify(&builtin_rhs, &rhs_ty); @@ -1122,20 +1167,26 @@ impl<'a> InferenceContext<'a> { let traits_in_scope = self.resolver.traits_in_scope(self.db.upcast()); let resolved = method_resolution::lookup_method( - &canonicalized_receiver.value, self.db, + &canonicalized_receiver.value, self.trait_env.clone(), &traits_in_scope, VisibleFromModule::Filter(self.resolver.module()), method_name, ); let (receiver_ty, method_ty, substs) = match resolved { - Some((adjust, func)) => { + Some((adjust, func, visible)) => { let (ty, adjustments) = adjust.apply(&mut self.table, receiver_ty); let generics = generics(self.db.upcast(), func.into()); let substs = self.substs_for_method_call(generics, generic_args); self.write_expr_adj(receiver, adjustments); self.write_method_resolution(tgt_expr, func, substs.clone()); + if !visible { + self.push_diagnostic(InferenceDiagnostic::PrivateAssocItem { + id: tgt_expr.into(), + item: func.into(), + }) + } (ty, self.db.value_ty(func.into()), substs) } None => ( @@ -1309,7 +1360,7 @@ impl<'a> InferenceContext<'a> { ty, c, ParamLoweringMode::Placeholder, - || generics(this.db.upcast(), (&this.resolver).generic_def().unwrap()), + || generics(this.db.upcast(), this.resolver.generic_def().unwrap()), DebruijnIndex::INNERMOST, ) }, diff --git a/crates/hir-ty/src/infer/pat.rs b/crates/hir-ty/src/infer/pat.rs index 53259d66de..f154dac8e8 100644 --- a/crates/hir-ty/src/infer/pat.rs +++ b/crates/hir-ty/src/infer/pat.rs @@ -153,7 +153,7 @@ impl<'a> InferenceContext<'a> { ) -> Ty { let mut expected = self.resolve_ty_shallow(expected); - if is_non_ref_pat(&self.body, pat) { + if is_non_ref_pat(self.body, pat) { let mut pat_adjustments = Vec::new(); while let Some((inner, _lifetime, mutability)) = expected.as_reference() { pat_adjustments.push(expected.clone()); @@ -220,7 +220,7 @@ impl<'a> InferenceContext<'a> { ), Pat::Record { path: p, args: fields, ellipsis: _ } => { let subs = fields.iter().map(|f| (f.name.clone(), f.pat)); - self.infer_record_pat_like(p.as_deref(), &expected, default_bm, pat.into(), subs) + self.infer_record_pat_like(p.as_deref(), &expected, default_bm, pat, subs) } Pat::Path(path) => { // FIXME use correct resolver for the surrounding expression diff --git a/crates/hir-ty/src/infer/path.rs b/crates/hir-ty/src/infer/path.rs index ebe9d6fb5e..8bd17c0f39 100644 --- a/crates/hir-ty/src/infer/path.rs +++ b/crates/hir-ty/src/infer/path.rs @@ -7,13 +7,15 @@ use hir_def::{ AdtId, AssocItemId, EnumVariantId, ItemContainerId, Lookup, }; use hir_expand::name::Name; +use stdx::never; use crate::{ builder::ParamKind, consteval, method_resolution::{self, VisibleFromModule}, utils::generics, - Interner, Substitution, TraitRefExt, Ty, TyBuilder, TyExt, TyKind, ValueTyDefId, + InferenceDiagnostic, Interner, Substitution, TraitRefExt, Ty, TyBuilder, TyExt, TyKind, + ValueTyDefId, }; use super::{ExprOrPatId, InferenceContext, TraitRef}; @@ -212,7 +214,7 @@ impl<'a> InferenceContext<'a> { AssocItemId::TypeAliasId(_) => unreachable!(), }; - self.write_assoc_resolution(id, item); + self.write_assoc_resolution(id, item, trait_ref.substitution.clone()); Some((def, Some(trait_ref.substitution))) } @@ -233,7 +235,8 @@ impl<'a> InferenceContext<'a> { let canonical_ty = self.canonicalize(ty.clone()); let traits_in_scope = self.resolver.traits_in_scope(self.db.upcast()); - method_resolution::iterate_method_candidates( + let mut not_visible = None; + let res = method_resolution::iterate_method_candidates( &canonical_ty.value, self.db, self.table.trait_env.clone(), @@ -241,7 +244,7 @@ impl<'a> InferenceContext<'a> { VisibleFromModule::Filter(self.resolver.module()), Some(name), method_resolution::LookupMode::Path, - move |_ty, item| { + |_ty, item, visible| { let (def, container) = match item { AssocItemId::FunctionId(f) => { (ValueNs::FunctionId(f), f.lookup(self.db.upcast()).container) @@ -259,7 +262,7 @@ impl<'a> InferenceContext<'a> { let impl_self_ty = self.db.impl_self_ty(impl_id).substitute(Interner, &impl_substs); self.unify(&impl_self_ty, &ty); - Some(impl_substs) + impl_substs } ItemContainerId::TraitId(trait_) => { // we're picking this method @@ -268,15 +271,32 @@ impl<'a> InferenceContext<'a> { .fill_with_inference_vars(&mut self.table) .build(); self.push_obligation(trait_ref.clone().cast(Interner)); - Some(trait_ref.substitution) + trait_ref.substitution + } + ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => { + never!("assoc item contained in module/extern block"); + return None; } - ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => None, }; - self.write_assoc_resolution(id, item); - Some((def, substs)) + if visible { + Some((def, item, Some(substs), true)) + } else { + if not_visible.is_none() { + not_visible = Some((def, item, Some(substs), false)); + } + None + } }, - ) + ); + let res = res.or(not_visible); + if let Some((_, item, Some(ref substs), visible)) = res { + self.write_assoc_resolution(id, item, substs.clone()); + if !visible { + self.push_diagnostic(InferenceDiagnostic::PrivateAssocItem { id, item }) + } + } + res.map(|(def, _, substs, _)| (def, substs)) } fn resolve_enum_variant_on_ty( diff --git a/crates/hir-ty/src/infer/unify.rs b/crates/hir-ty/src/infer/unify.rs index 12f45f00f9..e7ddd1591f 100644 --- a/crates/hir-ty/src/infer/unify.rs +++ b/crates/hir-ty/src/infer/unify.rs @@ -1,6 +1,6 @@ //! Unification and canonicalization logic. -use std::{fmt, mem, sync::Arc}; +use std::{fmt, iter, mem, sync::Arc}; use chalk_ir::{ cast::Cast, fold::TypeFoldable, interner::HasInterner, zip::Zip, CanonicalVarKind, FloatTy, @@ -128,9 +128,13 @@ pub(crate) fn unify( )) } -#[derive(Copy, Clone, Debug)] -pub(crate) struct TypeVariableData { - diverging: bool, +bitflags::bitflags! { + #[derive(Default)] + pub(crate) struct TypeVariableFlags: u8 { + const DIVERGING = 1 << 0; + const INTEGER = 1 << 1; + const FLOAT = 1 << 2; + } } type ChalkInferenceTable = chalk_solve::infer::InferenceTable; @@ -140,14 +144,14 @@ pub(crate) struct InferenceTable<'a> { pub(crate) db: &'a dyn HirDatabase, pub(crate) trait_env: Arc, var_unification_table: ChalkInferenceTable, - type_variable_table: Vec, + type_variable_table: Vec, pending_obligations: Vec>>, } pub(crate) struct InferenceTableSnapshot { var_table_snapshot: chalk_solve::infer::InferenceSnapshot, pending_obligations: Vec>>, - type_variable_table_snapshot: Vec, + type_variable_table_snapshot: Vec, } impl<'a> InferenceTable<'a> { @@ -169,19 +173,19 @@ impl<'a> InferenceTable<'a> { /// result. pub(super) fn propagate_diverging_flag(&mut self) { for i in 0..self.type_variable_table.len() { - if !self.type_variable_table[i].diverging { + if !self.type_variable_table[i].contains(TypeVariableFlags::DIVERGING) { continue; } let v = InferenceVar::from(i as u32); let root = self.var_unification_table.inference_var_root(v); if let Some(data) = self.type_variable_table.get_mut(root.index() as usize) { - data.diverging = true; + *data |= TypeVariableFlags::DIVERGING; } } } pub(super) fn set_diverging(&mut self, iv: InferenceVar, diverging: bool) { - self.type_variable_table[iv.index() as usize].diverging = diverging; + self.type_variable_table[iv.index() as usize].set(TypeVariableFlags::DIVERGING, diverging); } fn fallback_value(&self, iv: InferenceVar, kind: TyVariableKind) -> Ty { @@ -189,7 +193,7 @@ impl<'a> InferenceTable<'a> { _ if self .type_variable_table .get(iv.index() as usize) - .map_or(false, |data| data.diverging) => + .map_or(false, |data| data.contains(TypeVariableFlags::DIVERGING)) => { TyKind::Never } @@ -247,10 +251,8 @@ impl<'a> InferenceTable<'a> { } fn extend_type_variable_table(&mut self, to_index: usize) { - self.type_variable_table.extend( - (0..1 + to_index - self.type_variable_table.len()) - .map(|_| TypeVariableData { diverging: false }), - ); + let count = to_index - self.type_variable_table.len() + 1; + self.type_variable_table.extend(iter::repeat(TypeVariableFlags::default()).take(count)); } fn new_var(&mut self, kind: TyVariableKind, diverging: bool) -> Ty { @@ -258,7 +260,15 @@ impl<'a> InferenceTable<'a> { // Chalk might have created some type variables for its own purposes that we don't know about... self.extend_type_variable_table(var.index() as usize); assert_eq!(var.index() as usize, self.type_variable_table.len() - 1); - self.type_variable_table[var.index() as usize].diverging = diverging; + let flags = self.type_variable_table.get_mut(var.index() as usize).unwrap(); + if diverging { + *flags |= TypeVariableFlags::DIVERGING; + } + if matches!(kind, TyVariableKind::Integer) { + *flags |= TypeVariableFlags::INTEGER; + } else if matches!(kind, TyVariableKind::Float) { + *flags |= TypeVariableFlags::FLOAT; + } var.to_ty_with_kind(Interner, kind) } @@ -340,6 +350,51 @@ impl<'a> InferenceTable<'a> { self.resolve_with_fallback(t, &|_, _, d, _| d) } + /// Apply a fallback to unresolved scalar types. Integer type variables and float type + /// variables are replaced with i32 and f64, respectively. + /// + /// This method is only intended to be called just before returning inference results (i.e. in + /// `InferenceContext::resolve_all()`). + /// + /// FIXME: This method currently doesn't apply fallback to unconstrained general type variables + /// whereas rustc replaces them with `()` or `!`. + pub(super) fn fallback_if_possible(&mut self) { + let int_fallback = TyKind::Scalar(Scalar::Int(IntTy::I32)).intern(Interner); + let float_fallback = TyKind::Scalar(Scalar::Float(FloatTy::F64)).intern(Interner); + + let scalar_vars: Vec<_> = self + .type_variable_table + .iter() + .enumerate() + .filter_map(|(index, flags)| { + let kind = if flags.contains(TypeVariableFlags::INTEGER) { + TyVariableKind::Integer + } else if flags.contains(TypeVariableFlags::FLOAT) { + TyVariableKind::Float + } else { + return None; + }; + + // FIXME: This is not really the nicest way to get `InferenceVar`s. Can we get them + // without directly constructing them from `index`? + let var = InferenceVar::from(index as u32).to_ty(Interner, kind); + Some(var) + }) + .collect(); + + for var in scalar_vars { + let maybe_resolved = self.resolve_ty_shallow(&var); + if let TyKind::InferenceVar(_, kind) = maybe_resolved.kind(Interner) { + let fallback = match kind { + TyVariableKind::Integer => &int_fallback, + TyVariableKind::Float => &float_fallback, + TyVariableKind::General => unreachable!(), + }; + self.unify(&var, fallback); + } + } + } + /// Unify two relatable values (e.g. `Ty`) and register new trait goals that arise from that. pub(crate) fn unify>(&mut self, ty1: &T, ty2: &T) -> bool { let result = match self.try_unify(ty1, ty2) { diff --git a/crates/hir-ty/src/interner.rs b/crates/hir-ty/src/interner.rs index ca76e08fdd..441503a300 100644 --- a/crates/hir-ty/src/interner.rs +++ b/crates/hir-ty/src/interner.rs @@ -143,7 +143,7 @@ impl chalk_ir::interner::Interner for Interner { fn debug_goal(goal: &Goal, fmt: &mut fmt::Formatter<'_>) -> Option { let goal_data = goal.data(Interner); - Some(write!(fmt, "{:?}", goal_data)) + Some(write!(fmt, "{goal_data:?}")) } fn debug_goals( @@ -228,7 +228,7 @@ impl chalk_ir::interner::Interner for Interner { Interned::new(InternedWrapper(chalk_ir::TyData { kind, flags })) } - fn ty_data<'a>(self, ty: &'a Self::InternedType) -> &'a chalk_ir::TyData { + fn ty_data(self, ty: &Self::InternedType) -> &chalk_ir::TyData { &ty.0 } @@ -236,10 +236,7 @@ impl chalk_ir::interner::Interner for Interner { Interned::new(InternedWrapper(lifetime)) } - fn lifetime_data<'a>( - self, - lifetime: &'a Self::InternedLifetime, - ) -> &'a chalk_ir::LifetimeData { + fn lifetime_data(self, lifetime: &Self::InternedLifetime) -> &chalk_ir::LifetimeData { &lifetime.0 } @@ -247,7 +244,7 @@ impl chalk_ir::interner::Interner for Interner { Interned::new(InternedWrapper(constant)) } - fn const_data<'a>(self, constant: &'a Self::InternedConst) -> &'a chalk_ir::ConstData { + fn const_data(self, constant: &Self::InternedConst) -> &chalk_ir::ConstData { &constant.0 } @@ -267,10 +264,10 @@ impl chalk_ir::interner::Interner for Interner { parameter } - fn generic_arg_data<'a>( + fn generic_arg_data( self, - parameter: &'a Self::InternedGenericArg, - ) -> &'a chalk_ir::GenericArgData { + parameter: &Self::InternedGenericArg, + ) -> &chalk_ir::GenericArgData { parameter } @@ -285,11 +282,11 @@ impl chalk_ir::interner::Interner for Interner { data.into_iter().collect() } - fn goal_data<'a>(self, goal: &'a Self::InternedGoal) -> &'a GoalData { + fn goal_data(self, goal: &Self::InternedGoal) -> &GoalData { goal } - fn goals_data<'a>(self, goals: &'a Self::InternedGoals) -> &'a [Goal] { + fn goals_data(self, goals: &Self::InternedGoals) -> &[Goal] { goals } @@ -300,10 +297,7 @@ impl chalk_ir::interner::Interner for Interner { Ok(Interned::new(InternedWrapper(data.into_iter().collect::>()?))) } - fn substitution_data<'a>( - self, - substitution: &'a Self::InternedSubstitution, - ) -> &'a [GenericArg] { + fn substitution_data(self, substitution: &Self::InternedSubstitution) -> &[GenericArg] { &substitution.as_ref().0 } @@ -314,10 +308,10 @@ impl chalk_ir::interner::Interner for Interner { data } - fn program_clause_data<'a>( + fn program_clause_data( self, - clause: &'a Self::InternedProgramClause, - ) -> &'a chalk_ir::ProgramClauseData { + clause: &Self::InternedProgramClause, + ) -> &chalk_ir::ProgramClauseData { clause } @@ -328,10 +322,10 @@ impl chalk_ir::interner::Interner for Interner { Ok(Interned::new(InternedWrapper(data.into_iter().collect::>()?))) } - fn program_clauses_data<'a>( + fn program_clauses_data( self, - clauses: &'a Self::InternedProgramClauses, - ) -> &'a [chalk_ir::ProgramClause] { + clauses: &Self::InternedProgramClauses, + ) -> &[chalk_ir::ProgramClause] { clauses } @@ -342,10 +336,10 @@ impl chalk_ir::interner::Interner for Interner { Ok(Interned::new(InternedWrapper(data.into_iter().collect::>()?))) } - fn quantified_where_clauses_data<'a>( + fn quantified_where_clauses_data( self, - clauses: &'a Self::InternedQuantifiedWhereClauses, - ) -> &'a [chalk_ir::QuantifiedWhereClause] { + clauses: &Self::InternedQuantifiedWhereClauses, + ) -> &[chalk_ir::QuantifiedWhereClause] { clauses } @@ -356,10 +350,10 @@ impl chalk_ir::interner::Interner for Interner { Ok(Interned::new(InternedWrapper(data.into_iter().collect::>()?))) } - fn variable_kinds_data<'a>( + fn variable_kinds_data( self, - parameter_kinds: &'a Self::InternedVariableKinds, - ) -> &'a [chalk_ir::VariableKind] { + parameter_kinds: &Self::InternedVariableKinds, + ) -> &[chalk_ir::VariableKind] { ¶meter_kinds.as_ref().0 } @@ -370,10 +364,10 @@ impl chalk_ir::interner::Interner for Interner { Ok(Interned::new(InternedWrapper(data.into_iter().collect::>()?))) } - fn canonical_var_kinds_data<'a>( + fn canonical_var_kinds_data( self, - canonical_var_kinds: &'a Self::InternedCanonicalVarKinds, - ) -> &'a [chalk_ir::CanonicalVarKind] { + canonical_var_kinds: &Self::InternedCanonicalVarKinds, + ) -> &[chalk_ir::CanonicalVarKind] { canonical_var_kinds } @@ -384,10 +378,10 @@ impl chalk_ir::interner::Interner for Interner { data.into_iter().collect() } - fn constraints_data<'a>( + fn constraints_data( self, - constraints: &'a Self::InternedConstraints, - ) -> &'a [chalk_ir::InEnvironment>] { + constraints: &Self::InternedConstraints, + ) -> &[chalk_ir::InEnvironment>] { constraints } fn debug_closure_id( @@ -410,10 +404,7 @@ impl chalk_ir::interner::Interner for Interner { Ok(Interned::new(InternedWrapper(data.into_iter().collect::>()?))) } - fn variances_data<'a>( - self, - variances: &'a Self::InternedVariances, - ) -> &'a [chalk_ir::Variance] { + fn variances_data(self, variances: &Self::InternedVariances) -> &[chalk_ir::Variance] { variances } } diff --git a/crates/hir-ty/src/lang_items.rs b/crates/hir-ty/src/lang_items.rs new file mode 100644 index 0000000000..afc54e729f --- /dev/null +++ b/crates/hir-ty/src/lang_items.rs @@ -0,0 +1,20 @@ +//! Functions to detect special lang items + +use hir_def::{AdtId, HasModule}; +use hir_expand::name; + +use crate::db::HirDatabase; + +pub fn is_box(adt: AdtId, db: &dyn HirDatabase) -> bool { + let owned_box = name![owned_box].to_smol_str(); + let krate = adt.module(db.upcast()).krate(); + let box_adt = db.lang_item(krate, owned_box).and_then(|it| it.as_struct()).map(AdtId::from); + Some(adt) == box_adt +} + +pub fn is_unsafe_cell(adt: AdtId, db: &dyn HirDatabase) -> bool { + let owned_box = name![unsafe_cell].to_smol_str(); + let krate = adt.module(db.upcast()).krate(); + let box_adt = db.lang_item(krate, owned_box).and_then(|it| it.as_struct()).map(AdtId::from); + Some(adt) == box_adt +} diff --git a/crates/hir-ty/src/layout.rs b/crates/hir-ty/src/layout.rs new file mode 100644 index 0000000000..7a1cca3143 --- /dev/null +++ b/crates/hir-ty/src/layout.rs @@ -0,0 +1,279 @@ +//! Compute the binary representation of a type + +use std::sync::Arc; + +use base_db::CrateId; +use chalk_ir::{AdtId, TyKind}; +use hir_def::{ + layout::{ + Abi, FieldsShape, Integer, Layout, LayoutCalculator, LayoutError, Primitive, ReprOptions, + RustcEnumVariantIdx, Scalar, Size, StructKind, TargetDataLayout, Variants, WrappingRange, + }, + LocalFieldId, +}; +use stdx::never; + +use crate::{db::HirDatabase, Interner, Substitution, Ty}; + +use self::adt::struct_variant_idx; +pub use self::{ + adt::{layout_of_adt_query, layout_of_adt_recover}, + target::target_data_layout_query, +}; + +macro_rules! user_error { + ($x: expr) => { + return Err(LayoutError::UserError(format!($x))) + }; +} + +mod adt; +mod target; + +struct LayoutCx<'a> { + db: &'a dyn HirDatabase, + krate: CrateId, +} + +impl LayoutCalculator for LayoutCx<'_> { + type TargetDataLayoutRef = Arc; + + fn delay_bug(&self, txt: &str) { + never!("{}", txt); + } + + fn current_data_layout(&self) -> Arc { + self.db.target_data_layout(self.krate) + } +} + +fn scalar_unit(dl: &TargetDataLayout, value: Primitive) -> Scalar { + Scalar::Initialized { value, valid_range: WrappingRange::full(value.size(dl)) } +} + +fn scalar(dl: &TargetDataLayout, value: Primitive) -> Layout { + Layout::scalar(dl, scalar_unit(dl, value)) +} + +pub fn layout_of_ty(db: &dyn HirDatabase, ty: &Ty, krate: CrateId) -> Result { + let cx = LayoutCx { db, krate }; + let dl = &*cx.current_data_layout(); + Ok(match ty.kind(Interner) { + TyKind::Adt(AdtId(def), subst) => db.layout_of_adt(*def, subst.clone())?, + TyKind::Scalar(s) => match s { + chalk_ir::Scalar::Bool => Layout::scalar( + dl, + Scalar::Initialized { + value: Primitive::Int(Integer::I8, false), + valid_range: WrappingRange { start: 0, end: 1 }, + }, + ), + chalk_ir::Scalar::Char => Layout::scalar( + dl, + Scalar::Initialized { + value: Primitive::Int(Integer::I32, false), + valid_range: WrappingRange { start: 0, end: 0x10FFFF }, + }, + ), + chalk_ir::Scalar::Int(i) => scalar( + dl, + Primitive::Int( + match i { + chalk_ir::IntTy::Isize => dl.ptr_sized_integer(), + chalk_ir::IntTy::I8 => Integer::I8, + chalk_ir::IntTy::I16 => Integer::I16, + chalk_ir::IntTy::I32 => Integer::I32, + chalk_ir::IntTy::I64 => Integer::I64, + chalk_ir::IntTy::I128 => Integer::I128, + }, + true, + ), + ), + chalk_ir::Scalar::Uint(i) => scalar( + dl, + Primitive::Int( + match i { + chalk_ir::UintTy::Usize => dl.ptr_sized_integer(), + chalk_ir::UintTy::U8 => Integer::I8, + chalk_ir::UintTy::U16 => Integer::I16, + chalk_ir::UintTy::U32 => Integer::I32, + chalk_ir::UintTy::U64 => Integer::I64, + chalk_ir::UintTy::U128 => Integer::I128, + }, + false, + ), + ), + chalk_ir::Scalar::Float(f) => scalar( + dl, + match f { + chalk_ir::FloatTy::F32 => Primitive::F32, + chalk_ir::FloatTy::F64 => Primitive::F64, + }, + ), + }, + TyKind::Tuple(len, tys) => { + let kind = if *len == 0 { StructKind::AlwaysSized } else { StructKind::MaybeUnsized }; + + let fields = tys + .iter(Interner) + .map(|k| layout_of_ty(db, k.assert_ty_ref(Interner), krate)) + .collect::, _>>()?; + let fields = fields.iter().collect::>(); + let fields = fields.iter().collect::>(); + cx.univariant(dl, &fields, &ReprOptions::default(), kind).ok_or(LayoutError::Unknown)? + } + TyKind::Array(element, count) => { + let count = match count.data(Interner).value { + chalk_ir::ConstValue::Concrete(c) => match c.interned { + hir_def::type_ref::ConstScalar::Int(x) => x as u64, + hir_def::type_ref::ConstScalar::UInt(x) => x as u64, + hir_def::type_ref::ConstScalar::Unknown => { + user_error!("unknown const generic parameter") + } + _ => user_error!("mismatched type of const generic parameter"), + }, + _ => return Err(LayoutError::HasPlaceholder), + }; + let element = layout_of_ty(db, element, krate)?; + let size = element.size.checked_mul(count, dl).ok_or(LayoutError::SizeOverflow)?; + + let abi = if count != 0 && matches!(element.abi, Abi::Uninhabited) { + Abi::Uninhabited + } else { + Abi::Aggregate { sized: true } + }; + + let largest_niche = if count != 0 { element.largest_niche } else { None }; + + Layout { + variants: Variants::Single { index: struct_variant_idx() }, + fields: FieldsShape::Array { stride: element.size, count }, + abi, + largest_niche, + align: element.align, + size, + } + } + TyKind::Slice(element) => { + let element = layout_of_ty(db, element, krate)?; + Layout { + variants: Variants::Single { index: struct_variant_idx() }, + fields: FieldsShape::Array { stride: element.size, count: 0 }, + abi: Abi::Aggregate { sized: false }, + largest_niche: None, + align: element.align, + size: Size::ZERO, + } + } + // Potentially-wide pointers. + TyKind::Ref(_, _, pointee) | TyKind::Raw(_, pointee) => { + let mut data_ptr = scalar_unit(dl, Primitive::Pointer); + if matches!(ty.kind(Interner), TyKind::Ref(..)) { + data_ptr.valid_range_mut().start = 1; + } + + // let pointee = tcx.normalize_erasing_regions(param_env, pointee); + // if pointee.is_sized(tcx.at(DUMMY_SP), param_env) { + // return Ok(tcx.intern_layout(LayoutS::scalar(cx, data_ptr))); + // } + + let unsized_part = struct_tail_erasing_lifetimes(db, pointee.clone()); + let metadata = match unsized_part.kind(Interner) { + TyKind::Slice(_) | TyKind::Str => { + scalar_unit(dl, Primitive::Int(dl.ptr_sized_integer(), false)) + } + TyKind::Dyn(..) => { + let mut vtable = scalar_unit(dl, Primitive::Pointer); + vtable.valid_range_mut().start = 1; + vtable + } + _ => { + // pointee is sized + return Ok(Layout::scalar(dl, data_ptr)); + } + }; + + // Effectively a (ptr, meta) tuple. + cx.scalar_pair(data_ptr, metadata) + } + TyKind::FnDef(_, _) => layout_of_unit(&cx, dl)?, + TyKind::Str => Layout { + variants: Variants::Single { index: struct_variant_idx() }, + fields: FieldsShape::Array { stride: Size::from_bytes(1), count: 0 }, + abi: Abi::Aggregate { sized: false }, + largest_niche: None, + align: dl.i8_align, + size: Size::ZERO, + }, + TyKind::Never => Layout { + variants: Variants::Single { index: struct_variant_idx() }, + fields: FieldsShape::Primitive, + abi: Abi::Uninhabited, + largest_niche: None, + align: dl.i8_align, + size: Size::ZERO, + }, + TyKind::Dyn(_) | TyKind::Foreign(_) => { + let mut unit = layout_of_unit(&cx, dl)?; + match unit.abi { + Abi::Aggregate { ref mut sized } => *sized = false, + _ => user_error!("bug"), + } + unit + } + TyKind::Function(_) => { + let mut ptr = scalar_unit(dl, Primitive::Pointer); + ptr.valid_range_mut().start = 1; + Layout::scalar(dl, ptr) + } + TyKind::Closure(_, _) + | TyKind::OpaqueType(_, _) + | TyKind::Generator(_, _) + | TyKind::GeneratorWitness(_, _) => return Err(LayoutError::NotImplemented), + TyKind::AssociatedType(_, _) + | TyKind::Error + | TyKind::Alias(_) + | TyKind::Placeholder(_) + | TyKind::BoundVar(_) + | TyKind::InferenceVar(_, _) => return Err(LayoutError::HasPlaceholder), + }) +} + +fn layout_of_unit(cx: &LayoutCx<'_>, dl: &TargetDataLayout) -> Result { + cx.univariant::( + dl, + &[], + &ReprOptions::default(), + StructKind::AlwaysSized, + ) + .ok_or(LayoutError::Unknown) +} + +fn struct_tail_erasing_lifetimes(db: &dyn HirDatabase, pointee: Ty) -> Ty { + match pointee.kind(Interner) { + TyKind::Adt(AdtId(adt), subst) => match adt { + &hir_def::AdtId::StructId(i) => { + let data = db.struct_data(i); + let mut it = data.variant_data.fields().iter().rev(); + match it.next() { + Some((f, _)) => field_ty(db, i.into(), f, subst), + None => pointee, + } + } + _ => pointee, + }, + _ => pointee, + } +} + +fn field_ty( + db: &dyn HirDatabase, + def: hir_def::VariantId, + fd: LocalFieldId, + subst: &Substitution, +) -> Ty { + db.field_types(def)[fd].clone().substitute(Interner, subst) +} + +#[cfg(test)] +mod tests; diff --git a/crates/hir-ty/src/layout/adt.rs b/crates/hir-ty/src/layout/adt.rs new file mode 100644 index 0000000000..23166a5a52 --- /dev/null +++ b/crates/hir-ty/src/layout/adt.rs @@ -0,0 +1,134 @@ +//! Compute the binary representation of structs, unions and enums + +use std::ops::Bound; + +use hir_def::{ + adt::VariantData, + layout::{Integer, IntegerExt, Layout, LayoutCalculator, LayoutError, RustcEnumVariantIdx}, + AdtId, EnumVariantId, HasModule, LocalEnumVariantId, VariantId, +}; +use la_arena::RawIdx; +use smallvec::SmallVec; + +use crate::{db::HirDatabase, lang_items::is_unsafe_cell, layout::field_ty, Substitution}; + +use super::{layout_of_ty, LayoutCx}; + +pub(crate) fn struct_variant_idx() -> RustcEnumVariantIdx { + RustcEnumVariantIdx(LocalEnumVariantId::from_raw(RawIdx::from(0))) +} + +pub fn layout_of_adt_query( + db: &dyn HirDatabase, + def: AdtId, + subst: Substitution, +) -> Result { + let cx = LayoutCx { db, krate: def.module(db.upcast()).krate() }; + let dl = cx.current_data_layout(); + let handle_variant = |def: VariantId, var: &VariantData| { + var.fields() + .iter() + .map(|(fd, _)| layout_of_ty(db, &field_ty(db, def, fd, &subst), cx.krate)) + .collect::, _>>() + }; + let (variants, is_enum, is_union, repr) = match def { + AdtId::StructId(s) => { + let data = db.struct_data(s); + let mut r = SmallVec::<[_; 1]>::new(); + r.push(handle_variant(s.into(), &data.variant_data)?); + (r, false, false, data.repr.unwrap_or_default()) + } + AdtId::UnionId(id) => { + let data = db.union_data(id); + let mut r = SmallVec::new(); + r.push(handle_variant(id.into(), &data.variant_data)?); + (r, false, true, data.repr.unwrap_or_default()) + } + AdtId::EnumId(e) => { + let data = db.enum_data(e); + let r = data + .variants + .iter() + .map(|(idx, v)| { + handle_variant( + EnumVariantId { parent: e, local_id: idx }.into(), + &v.variant_data, + ) + }) + .collect::, _>>()?; + (r, true, false, data.repr.unwrap_or_default()) + } + }; + let variants = + variants.iter().map(|x| x.iter().collect::>()).collect::>(); + let variants = variants.iter().map(|x| x.iter().collect()).collect(); + if is_union { + cx.layout_of_union(&repr, &variants).ok_or(LayoutError::Unknown) + } else { + cx.layout_of_struct_or_enum( + &repr, + &variants, + is_enum, + is_unsafe_cell(def, db), + layout_scalar_valid_range(db, def), + |min, max| Integer::repr_discr(&dl, &repr, min, max).unwrap_or((Integer::I8, false)), + variants.iter_enumerated().filter_map(|(id, _)| { + let AdtId::EnumId(e) = def else { return None }; + let d = match db + .const_eval_variant(EnumVariantId { parent: e, local_id: id.0 }) + .ok()? + { + crate::consteval::ComputedExpr::Literal(l) => match l { + hir_def::expr::Literal::Int(i, _) => i, + hir_def::expr::Literal::Uint(i, _) => i as i128, + _ => return None, + }, + _ => return None, + }; + Some((id, d)) + }), + // FIXME: The current code for niche-filling relies on variant indices + // instead of actual discriminants, so enums with + // explicit discriminants (RFC #2363) would misbehave and we should disable + // niche optimization for them. + // The code that do it in rustc: + // repr.inhibit_enum_layout_opt() || def + // .variants() + // .iter_enumerated() + // .any(|(i, v)| v.discr != ty::VariantDiscr::Relative(i.as_u32())) + repr.inhibit_enum_layout_opt(), + !is_enum + && variants + .iter() + .next() + .and_then(|x| x.last().map(|x| x.is_unsized())) + .unwrap_or(true), + ) + .ok_or(LayoutError::SizeOverflow) + } +} + +fn layout_scalar_valid_range(db: &dyn HirDatabase, def: AdtId) -> (Bound, Bound) { + let attrs = db.attrs(def.into()); + let get = |name| { + let attr = attrs.by_key(name).tt_values(); + for tree in attr { + if let Some(x) = tree.token_trees.first() { + if let Ok(x) = x.to_string().parse() { + return Bound::Included(x); + } + } + } + Bound::Unbounded + }; + (get("rustc_layout_scalar_valid_range_start"), get("rustc_layout_scalar_valid_range_end")) +} + +pub fn layout_of_adt_recover( + _: &dyn HirDatabase, + _: &[String], + _: &AdtId, + _: &Substitution, +) -> Result { + user_error!("infinite sized recursive type"); +} diff --git a/crates/hir-ty/src/layout/target.rs b/crates/hir-ty/src/layout/target.rs new file mode 100644 index 0000000000..37b831652f --- /dev/null +++ b/crates/hir-ty/src/layout/target.rs @@ -0,0 +1,36 @@ +//! Target dependent parameters needed for layouts + +use std::sync::Arc; + +use base_db::CrateId; +use hir_def::layout::{Endian, Size, TargetDataLayout}; + +use crate::db::HirDatabase; + +pub fn target_data_layout_query(db: &dyn HirDatabase, krate: CrateId) -> Arc { + let crate_graph = db.crate_graph(); + let target_layout = &crate_graph[krate].target_layout; + let cfg_options = &crate_graph[krate].cfg_options; + Arc::new( + target_layout + .as_ref() + .and_then(|it| TargetDataLayout::parse_from_llvm_datalayout_string(it).ok()) + .unwrap_or_else(|| { + let endian = match cfg_options.get_cfg_values("target_endian").next() { + Some(x) if x.as_str() == "big" => Endian::Big, + _ => Endian::Little, + }; + let pointer_size = Size::from_bytes( + match cfg_options.get_cfg_values("target_pointer_width").next() { + Some(x) => match x.as_str() { + "16" => 2, + "32" => 4, + _ => 8, + }, + _ => 8, + }, + ); + TargetDataLayout { endian, pointer_size, ..TargetDataLayout::default() } + }), + ) +} diff --git a/crates/hir-ty/src/layout/tests.rs b/crates/hir-ty/src/layout/tests.rs new file mode 100644 index 0000000000..53838cf41d --- /dev/null +++ b/crates/hir-ty/src/layout/tests.rs @@ -0,0 +1,208 @@ +use base_db::fixture::WithFixture; +use chalk_ir::{AdtId, TyKind}; +use hir_def::{ + db::DefDatabase, + layout::{Layout, LayoutError}, +}; + +use crate::{test_db::TestDB, Interner, Substitution}; + +use super::layout_of_ty; + +fn eval_goal(ra_fixture: &str, minicore: &str) -> Result { + // using unstable cargo features failed, fall back to using plain rustc + let mut cmd = std::process::Command::new("rustc"); + cmd.args(["-Z", "unstable-options", "--print", "target-spec-json"]).env("RUSTC_BOOTSTRAP", "1"); + let output = cmd.output().unwrap(); + assert!(output.status.success(), "{}", output.status); + let stdout = String::from_utf8(output.stdout).unwrap(); + let target_data_layout = + stdout.split_once(r#""data-layout": ""#).unwrap().1.split_once('"').unwrap().0.to_owned(); + + let ra_fixture = format!( + "{minicore}//- /main.rs crate:test target_data_layout:{target_data_layout}\n{ra_fixture}", + ); + + let (db, file_id) = TestDB::with_single_file(&ra_fixture); + let module_id = db.module_for_file(file_id); + let def_map = module_id.def_map(&db); + let scope = &def_map[module_id.local_id].scope; + let adt_id = scope + .declarations() + .find_map(|x| match x { + hir_def::ModuleDefId::AdtId(x) => { + let name = match x { + hir_def::AdtId::StructId(x) => db.struct_data(x).name.to_smol_str(), + hir_def::AdtId::UnionId(x) => db.union_data(x).name.to_smol_str(), + hir_def::AdtId::EnumId(x) => db.enum_data(x).name.to_smol_str(), + }; + (name == "Goal").then_some(x) + } + _ => None, + }) + .unwrap(); + let goal_ty = TyKind::Adt(AdtId(adt_id), Substitution::empty(Interner)).intern(Interner); + layout_of_ty(&db, &goal_ty, module_id.krate()) +} + +#[track_caller] +fn check_size_and_align(ra_fixture: &str, minicore: &str, size: u64, align: u64) { + let l = eval_goal(ra_fixture, minicore).unwrap(); + assert_eq!(l.size.bytes(), size); + assert_eq!(l.align.abi.bytes(), align); +} + +#[track_caller] +fn check_fail(ra_fixture: &str, e: LayoutError) { + let r = eval_goal(ra_fixture, ""); + assert_eq!(r, Err(e)); +} + +macro_rules! size_and_align { + (minicore: $($x:tt),*;$($t:tt)*) => { + { + #[allow(dead_code)] + $($t)* + check_size_and_align( + stringify!($($t)*), + &format!("//- minicore: {}\n", stringify!($($x),*)), + ::std::mem::size_of::() as u64, + ::std::mem::align_of::() as u64, + ); + } + }; + ($($t:tt)*) => { + { + #[allow(dead_code)] + $($t)* + check_size_and_align( + stringify!($($t)*), + "", + ::std::mem::size_of::() as u64, + ::std::mem::align_of::() as u64, + ); + } + }; +} + +#[test] +fn hello_world() { + size_and_align! { + struct Goal(i32); + } +} + +#[test] +fn field_order_optimization() { + size_and_align! { + struct Goal(u8, i32, u8); + } + size_and_align! { + #[repr(C)] + struct Goal(u8, i32, u8); + } +} + +#[test] +fn recursive() { + size_and_align! { + struct Goal { + left: &'static Goal, + right: &'static Goal, + } + } + size_and_align! { + struct BoxLike(*mut T); + struct Goal(BoxLike); + } + check_fail( + r#"struct Goal(Goal);"#, + LayoutError::UserError("infinite sized recursive type".to_string()), + ); + check_fail( + r#" + struct Foo(Foo); + struct Goal(Foo); + "#, + LayoutError::UserError("infinite sized recursive type".to_string()), + ); +} + +#[test] +fn generic() { + size_and_align! { + struct Pair(A, B); + struct Goal(Pair, i64>); + } + size_and_align! { + struct X { + field1: [i32; N], + field2: [u8; N], + } + struct Goal(X<1000>); + } +} + +#[test] +fn enums() { + size_and_align! { + enum Goal { + Quit, + Move { x: i32, y: i32 }, + ChangeColor(i32, i32, i32), + } + } +} + +#[test] +fn primitives() { + size_and_align! { + struct Goal(i32, i128, isize, usize, f32, f64, bool, char); + } +} + +#[test] +fn tuple() { + size_and_align! { + struct Goal((), (i32, u64, bool)); + } +} + +#[test] +fn non_zero() { + size_and_align! { + minicore: non_zero, option; + use core::num::NonZeroU8; + struct Goal(Option); + } +} + +#[test] +fn niche_optimization() { + size_and_align! { + minicore: option; + struct Goal(Option<&'static i32>); + } + size_and_align! { + minicore: option; + struct Goal(Option>); + } +} + +#[test] +fn enums_with_discriminants() { + size_and_align! { + enum Goal { + A = 1000, + B = 2000, + C = 3000, + } + } + size_and_align! { + enum Goal { + A = 254, + B, + C, // implicitly becomes 256, so we need two bytes + } + } +} diff --git a/crates/hir-ty/src/lib.rs b/crates/hir-ty/src/lib.rs index 39514fc44e..cbe6873c7d 100644 --- a/crates/hir-ty/src/lib.rs +++ b/crates/hir-ty/src/lib.rs @@ -27,6 +27,8 @@ pub mod display; pub mod method_resolution; pub mod primitive; pub mod traits; +pub mod layout; +pub mod lang_items; #[cfg(test)] mod tests; @@ -38,7 +40,7 @@ use std::sync::Arc; use chalk_ir::{ fold::{Shift, TypeFoldable}, interner::HasInterner, - NoSolution, UniverseIndex, + NoSolution, }; use hir_def::{expr::ExprId, type_ref::Rawness, TypeOrConstParamId}; use hir_expand::name; @@ -46,7 +48,9 @@ use itertools::Either; use traits::FnTrait; use utils::Generics; -use crate::{consteval::unknown_const, db::HirDatabase, utils::generics}; +use crate::{ + consteval::unknown_const, db::HirDatabase, infer::unify::InferenceTable, utils::generics, +}; pub use autoderef::autoderef; pub use builder::{ParamKind, TyBuilder}; @@ -511,7 +515,7 @@ where let mut error_replacer = ErrorReplacer { vars: 0 }; let value = match t.clone().try_fold_with(&mut error_replacer, DebruijnIndex::INNERMOST) { Ok(t) => t, - Err(_) => panic!("Encountered unbound or inference vars in {:?}", t), + Err(_) => panic!("Encountered unbound or inference vars in {t:?}"), }; let kinds = (0..error_replacer.vars).map(|_| { chalk_ir::CanonicalVarKind::new( @@ -531,54 +535,31 @@ pub fn callable_sig_from_fnonce( let fn_once_trait = FnTrait::FnOnce.get_id(db, krate)?; let output_assoc_type = db.trait_data(fn_once_trait).associated_type_by_name(&name![Output])?; + let mut table = InferenceTable::new(db, env.clone()); let b = TyBuilder::trait_ref(db, fn_once_trait); if b.remaining() != 2 { return None; } - let fn_once = b.push(self_ty.clone()).fill_with_bound_vars(DebruijnIndex::INNERMOST, 0).build(); - let kinds = fn_once - .substitution - .iter(Interner) - .skip(1) - .map(|x| { - let vk = match x.data(Interner) { - chalk_ir::GenericArgData::Ty(_) => { - chalk_ir::VariableKind::Ty(chalk_ir::TyVariableKind::General) - } - chalk_ir::GenericArgData::Lifetime(_) => chalk_ir::VariableKind::Lifetime, - chalk_ir::GenericArgData::Const(c) => { - chalk_ir::VariableKind::Const(c.data(Interner).ty.clone()) - } - }; - chalk_ir::WithKind::new(vk, UniverseIndex::ROOT) - }) - .collect::>(); - // FIXME: chalk refuses to solve `>::Output == ^0.1`, so we first solve - // `>` and then replace `^0.0` with the concrete argument tuple. - let trait_env = env.env.clone(); - let obligation = InEnvironment { goal: fn_once.cast(Interner), environment: trait_env }; - let canonical = - Canonical { binders: CanonicalVarKinds::from_iter(Interner, kinds), value: obligation }; - let subst = match db.trait_solve(krate, canonical) { - Some(Solution::Unique(vars)) => vars.value.subst, - _ => return None, - }; - let args = subst.at(Interner, 0).ty(Interner)?; - let params = match args.kind(Interner) { - chalk_ir::TyKind::Tuple(_, subst) => { - subst.iter(Interner).filter_map(|arg| arg.ty(Interner).cloned()).collect::>() - } - _ => return None, - }; + // Register two obligations: + // - Self: FnOnce + // - >::Output == ?ret_ty + let args_ty = table.new_type_var(); + let trait_ref = b.push(self_ty.clone()).push(args_ty.clone()).build(); + let projection = TyBuilder::assoc_type_projection( + db, + output_assoc_type, + Some(trait_ref.substitution.clone()), + ) + .build(); + table.register_obligation(trait_ref.cast(Interner)); + let ret_ty = table.normalize_projection_ty(projection); - let fn_once = - TyBuilder::trait_ref(db, fn_once_trait).push(self_ty.clone()).push(args.clone()).build(); - let projection = - TyBuilder::assoc_type_projection(db, output_assoc_type, Some(fn_once.substitution.clone())) - .build(); + let ret_ty = table.resolve_completely(ret_ty); + let args_ty = table.resolve_completely(args_ty); - let ret_ty = db.normalize_projection(projection, env); + let params = + args_ty.as_tuple()?.iter(Interner).map(|it| it.assert_ty_ref(Interner)).cloned().collect(); - Some(CallableSig::from_params_and_return(params, ret_ty.clone(), false, Safety::Safe)) + Some(CallableSig::from_params_and_return(params, ret_ty, false, Safety::Safe)) } diff --git a/crates/hir-ty/src/lower.rs b/crates/hir-ty/src/lower.rs index baf9842d5f..592410008a 100644 --- a/crates/hir-ty/src/lower.rs +++ b/crates/hir-ty/src/lower.rs @@ -603,9 +603,8 @@ impl<'a> TyLoweringContext<'a> { } fn select_associated_type(&self, res: Option, segment: PathSegment<'_>) -> Ty { - let (def, res) = match (self.resolver.generic_def(), res) { - (Some(def), Some(res)) => (def, res), - _ => return TyKind::Error.intern(Interner), + let Some((def, res)) = self.resolver.generic_def().zip(res) else { + return TyKind::Error.intern(Interner); }; let ty = named_associated_type_shorthand_candidates( self.db, @@ -617,6 +616,21 @@ impl<'a> TyLoweringContext<'a> { return None; } + let parent_subst = t.substitution.clone(); + let parent_subst = match self.type_param_mode { + ParamLoweringMode::Placeholder => { + // if we're lowering to placeholders, we have to put them in now. + let generics = generics(self.db.upcast(), def); + let s = generics.placeholder_subst(self.db); + s.apply(parent_subst, Interner) + } + ParamLoweringMode::Variable => { + // We need to shift in the bound vars, since + // `named_associated_type_shorthand_candidates` does not do that. + parent_subst.shifted_in_from(Interner, self.in_binders) + } + }; + // FIXME: `substs_from_path_segment()` pushes `TyKind::Error` for every parent // generic params. It's inefficient to splice the `Substitution`s, so we may want // that method to optionally take parent `Substitution` as we already know them at @@ -632,22 +646,9 @@ impl<'a> TyLoweringContext<'a> { let substs = Substitution::from_iter( Interner, - substs.iter(Interner).take(len_self).chain(t.substitution.iter(Interner)), + substs.iter(Interner).take(len_self).chain(parent_subst.iter(Interner)), ); - let substs = match self.type_param_mode { - ParamLoweringMode::Placeholder => { - // if we're lowering to placeholders, we have to put - // them in now - let generics = generics(self.db.upcast(), def); - let s = generics.placeholder_subst(self.db); - s.apply(substs, Interner) - } - ParamLoweringMode::Variable => substs, - }; - // We need to shift in the bound vars, since - // associated_type_shorthand_candidates does not do that - let substs = substs.shifted_in_from(Interner, self.in_binders); Some( TyKind::Alias(AliasTy::Projection(ProjectionTy { associated_ty_id: to_assoc_type_id(associated_ty), @@ -779,7 +780,7 @@ impl<'a> TyLoweringContext<'a> { |_, c, ty| { const_or_path_to_chalk( self.db, - &self.resolver, + self.resolver, ty, c, self.type_param_mode, @@ -1190,9 +1191,9 @@ pub fn associated_type_shorthand_candidates( db: &dyn HirDatabase, def: GenericDefId, res: TypeNs, - cb: impl FnMut(&Name, &TraitRef, TypeAliasId) -> Option, + mut cb: impl FnMut(&Name, TypeAliasId) -> Option, ) -> Option { - named_associated_type_shorthand_candidates(db, def, res, None, cb) + named_associated_type_shorthand_candidates(db, def, res, None, |name, _, id| cb(name, id)) } fn named_associated_type_shorthand_candidates( @@ -1202,6 +1203,9 @@ fn named_associated_type_shorthand_candidates( def: GenericDefId, res: TypeNs, assoc_name: Option, + // Do NOT let `cb` touch `TraitRef` outside of `TyLoweringContext`. Its substitution contains + // free `BoundVar`s that need to be shifted and only `TyLoweringContext` knows how to do that + // properly (see `TyLoweringContext::select_associated_type()`). mut cb: impl FnMut(&Name, &TraitRef, TypeAliasId) -> Option, ) -> Option { let mut search = |t| { @@ -1792,8 +1796,7 @@ pub(crate) fn impl_self_ty_query(db: &dyn HirDatabase, impl_id: ImplId) -> Binde let impl_data = db.impl_data(impl_id); let resolver = impl_id.resolver(db.upcast()); let _cx = stdx::panic_context::enter(format!( - "impl_self_ty_query({:?} -> {:?} -> {:?})", - impl_id, impl_loc, impl_data + "impl_self_ty_query({impl_id:?} -> {impl_loc:?} -> {impl_data:?})" )); let generics = generics(db.upcast(), impl_id.into()); let ctx = @@ -1830,8 +1833,7 @@ pub(crate) fn impl_trait_query(db: &dyn HirDatabase, impl_id: ImplId) -> Option< let impl_data = db.impl_data(impl_id); let resolver = impl_id.resolver(db.upcast()); let _cx = stdx::panic_context::enter(format!( - "impl_trait_query({:?} -> {:?} -> {:?})", - impl_id, impl_loc, impl_data + "impl_trait_query({impl_id:?} -> {impl_loc:?} -> {impl_data:?})" )); let ctx = TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable); @@ -1850,7 +1852,7 @@ pub(crate) fn return_type_impl_traits( let ctx_ret = TyLoweringContext::new(db, &resolver) .with_impl_trait_mode(ImplTraitLoweringMode::Opaque) .with_type_param_mode(ParamLoweringMode::Variable); - let _ret = (&ctx_ret).lower_ty(&data.ret_type); + let _ret = ctx_ret.lower_ty(&data.ret_type); let generics = generics(db.upcast(), def.into()); let return_type_impl_traits = ReturnTypeImplTraits { impl_traits: ctx_ret.opaque_type_data.into_inner() }; @@ -1979,7 +1981,7 @@ fn fallback_bound_vars + HasInterner ArrayVec { +) -> SmallVec<[CrateId; 2]> { let _p = profile::span("inherent_impl_crates_query"); - let mut res = ArrayVec::new(); + let mut res = SmallVec::new(); let crate_graph = db.crate_graph(); + // should pass crate for finger print and do reverse deps + for krate in crate_graph.transitive_deps(krate) { - if res.is_full() { - // we don't currently look for or store more than two crates here, - // so don't needlessly look at more crates than necessary. - break; - } let impls = db.inherent_impls_in_crate(krate); if impls.map.get(&fp).map_or(false, |v| !v.is_empty()) { res.push(krate); @@ -392,19 +388,40 @@ pub fn def_crates( db: &dyn HirDatabase, ty: &Ty, cur_crate: CrateId, -) -> Option> { - let mod_to_crate_ids = |module: ModuleId| Some(iter::once(module.krate()).collect()); - - let fp = TyFingerprint::for_inherent_impl(ty); - +) -> Option> { match ty.kind(Interner) { - TyKind::Adt(AdtId(def_id), _) => mod_to_crate_ids(def_id.module(db.upcast())), - TyKind::Foreign(id) => { - mod_to_crate_ids(from_foreign_def_id(*id).lookup(db.upcast()).module(db.upcast())) + &TyKind::Adt(AdtId(def_id), _) => { + let rustc_has_incoherent_inherent_impls = match def_id { + hir_def::AdtId::StructId(id) => { + db.struct_data(id).rustc_has_incoherent_inherent_impls + } + hir_def::AdtId::UnionId(id) => { + db.union_data(id).rustc_has_incoherent_inherent_impls + } + hir_def::AdtId::EnumId(id) => db.enum_data(id).rustc_has_incoherent_inherent_impls, + }; + Some(if rustc_has_incoherent_inherent_impls { + db.incoherent_inherent_impl_crates(cur_crate, TyFingerprint::Adt(def_id)) + } else { + smallvec![def_id.module(db.upcast()).krate()] + }) + } + &TyKind::Foreign(id) => { + let alias = from_foreign_def_id(id); + Some(if db.type_alias_data(alias).rustc_has_incoherent_inherent_impls { + db.incoherent_inherent_impl_crates(cur_crate, TyFingerprint::ForeignType(id)) + } else { + smallvec![alias.module(db.upcast()).krate()] + }) + } + TyKind::Dyn(_) => { + let trait_id = ty.dyn_trait()?; + Some(if db.trait_data(trait_id).rustc_has_incoherent_inherent_impls { + db.incoherent_inherent_impl_crates(cur_crate, TyFingerprint::Dyn(trait_id)) + } else { + smallvec![trait_id.module(db.upcast()).krate()] + }) } - TyKind::Dyn(_) => ty - .dyn_trait() - .and_then(|trait_| mod_to_crate_ids(GenericDefId::TraitId(trait_).module(db.upcast()))), // for primitives, there may be impls in various places (core and alloc // mostly). We just check the whole crate graph for crates with impls // (cached behind a query). @@ -412,10 +429,11 @@ pub fn def_crates( | TyKind::Str | TyKind::Slice(_) | TyKind::Array(..) - | TyKind::Raw(..) => { - Some(db.inherent_impl_crates(cur_crate, fp.expect("fingerprint for primitive"))) - } - _ => return None, + | TyKind::Raw(..) => Some(db.incoherent_inherent_impl_crates( + cur_crate, + TyFingerprint::for_inherent_impl(ty).expect("fingerprint for primitive"), + )), + _ => None, } } @@ -470,14 +488,15 @@ pub fn lang_names_for_bin_op(op: syntax::ast::BinaryOp) -> Option<(Name, Name)> /// Look up the method with the given name. pub(crate) fn lookup_method( - ty: &Canonical, db: &dyn HirDatabase, + ty: &Canonical, env: Arc, traits_in_scope: &FxHashSet, visible_from_module: VisibleFromModule, name: &Name, -) -> Option<(ReceiverAdjustments, FunctionId)> { - iterate_method_candidates( +) -> Option<(ReceiverAdjustments, FunctionId, bool)> { + let mut not_visible = None; + let res = iterate_method_candidates( ty, db, env, @@ -485,11 +504,16 @@ pub(crate) fn lookup_method( visible_from_module, Some(name), LookupMode::MethodCall, - |adjustments, f| match f { - AssocItemId::FunctionId(f) => Some((adjustments, f)), + |adjustments, f, visible| match f { + AssocItemId::FunctionId(f) if visible => Some((adjustments, f, true)), + AssocItemId::FunctionId(f) if not_visible.is_none() => { + not_visible = Some((adjustments, f, false)); + None + } _ => None, }, - ) + ); + res.or(not_visible) } /// Whether we're looking up a dotted method call (like `v.len()`) or a path @@ -601,7 +625,7 @@ pub(crate) fn iterate_method_candidates( visible_from_module: VisibleFromModule, name: Option<&Name>, mode: LookupMode, - mut callback: impl FnMut(ReceiverAdjustments, AssocItemId) -> Option, + mut callback: impl FnMut(ReceiverAdjustments, AssocItemId, bool) -> Option, ) -> Option { let mut slot = None; iterate_method_candidates_dyn( @@ -612,9 +636,9 @@ pub(crate) fn iterate_method_candidates( visible_from_module, name, mode, - &mut |adj, item| { + &mut |adj, item, visible| { assert!(slot.is_none()); - if let Some(it) = callback(adj, item) { + if let Some(it) = callback(adj, item, visible) { slot = Some(it); return ControlFlow::Break(()); } @@ -624,6 +648,30 @@ pub(crate) fn iterate_method_candidates( slot } +pub fn lookup_impl_const( + db: &dyn HirDatabase, + env: Arc, + const_id: ConstId, + subs: Substitution, +) -> ConstId { + let trait_id = match const_id.lookup(db.upcast()).container { + ItemContainerId::TraitId(id) => id, + _ => return const_id, + }; + let substitution = Substitution::from_iter(Interner, subs.iter(Interner)); + let trait_ref = TraitRef { trait_id: to_chalk_trait_id(trait_id), substitution }; + + let const_data = db.const_data(const_id); + let name = match const_data.name.as_ref() { + Some(name) => name, + None => return const_id, + }; + + lookup_impl_assoc_item_for_trait_ref(trait_ref, db, env, name) + .and_then(|assoc| if let AssocItemId::ConstId(id) = assoc { Some(id) } else { None }) + .unwrap_or(const_id) +} + /// Looks up the impl method that actually runs for the trait method `func`. /// /// Returns `func` if it's not a method defined in a trait or the lookup failed. @@ -645,15 +693,17 @@ pub fn lookup_impl_method( }; let name = &db.function_data(func).name; - lookup_impl_method_for_trait_ref(trait_ref, db, env, name).unwrap_or(func) + lookup_impl_assoc_item_for_trait_ref(trait_ref, db, env, name) + .and_then(|assoc| if let AssocItemId::FunctionId(id) = assoc { Some(id) } else { None }) + .unwrap_or(func) } -fn lookup_impl_method_for_trait_ref( +fn lookup_impl_assoc_item_for_trait_ref( trait_ref: TraitRef, db: &dyn HirDatabase, env: Arc, name: &Name, -) -> Option { +) -> Option { let self_ty = trait_ref.self_type_parameter(Interner); let self_ty_fp = TyFingerprint::for_trait_impl(&self_ty)?; let impls = db.trait_impls_in_deps(env.krate); @@ -663,7 +713,15 @@ fn lookup_impl_method_for_trait_ref( let impl_data = find_matching_impl(impls, table, trait_ref)?; impl_data.items.iter().find_map(|it| match it { - AssocItemId::FunctionId(f) => (db.function_data(*f).name == *name).then(|| *f), + AssocItemId::FunctionId(f) => { + (db.function_data(*f).name == *name).then_some(AssocItemId::FunctionId(*f)) + } + AssocItemId::ConstId(c) => db + .const_data(*c) + .name + .as_ref() + .map(|n| *n == *name) + .and_then(|result| if result { Some(AssocItemId::ConstId(*c)) } else { None }), _ => None, }) } @@ -719,7 +777,7 @@ pub fn iterate_path_candidates( name, LookupMode::Path, // the adjustments are not relevant for path lookup - &mut |_, id| callback(id), + &mut |_, id, _| callback(id), ) } @@ -731,7 +789,7 @@ pub fn iterate_method_candidates_dyn( visible_from_module: VisibleFromModule, name: Option<&Name>, mode: LookupMode, - callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>, + callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId, bool) -> ControlFlow<()>, ) -> ControlFlow<()> { match mode { LookupMode::MethodCall => { @@ -795,7 +853,7 @@ fn iterate_method_candidates_with_autoref( traits_in_scope: &FxHashSet, visible_from_module: VisibleFromModule, name: Option<&Name>, - mut callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>, + mut callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId, bool) -> ControlFlow<()>, ) -> ControlFlow<()> { if receiver_ty.value.is_general_var(Interner, &receiver_ty.binders) { // don't try to resolve methods on unknown types @@ -856,7 +914,7 @@ fn iterate_method_candidates_by_receiver( traits_in_scope: &FxHashSet, visible_from_module: VisibleFromModule, name: Option<&Name>, - mut callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>, + mut callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId, bool) -> ControlFlow<()>, ) -> ControlFlow<()> { let mut table = InferenceTable::new(db, env); let receiver_ty = table.instantiate_canonical(receiver_ty.clone()); @@ -868,7 +926,7 @@ fn iterate_method_candidates_by_receiver( while let Some((self_ty, _)) = autoderef.next() { iterate_inherent_methods( &self_ty, - &mut autoderef.table, + autoderef.table, name, Some(&receiver_ty), Some(receiver_adjustments.clone()), @@ -883,7 +941,7 @@ fn iterate_method_candidates_by_receiver( while let Some((self_ty, _)) = autoderef.next() { iterate_trait_method_candidates( &self_ty, - &mut autoderef.table, + autoderef.table, traits_in_scope, name, Some(&receiver_ty), @@ -902,7 +960,7 @@ fn iterate_method_candidates_for_self_ty( traits_in_scope: &FxHashSet, visible_from_module: VisibleFromModule, name: Option<&Name>, - mut callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>, + mut callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId, bool) -> ControlFlow<()>, ) -> ControlFlow<()> { let mut table = InferenceTable::new(db, env); let self_ty = table.instantiate_canonical(self_ty.clone()); @@ -933,7 +991,7 @@ fn iterate_trait_method_candidates( name: Option<&Name>, receiver_ty: Option<&Ty>, receiver_adjustments: Option, - callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>, + callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId, bool) -> ControlFlow<()>, ) -> ControlFlow<()> { let db = table.db; let env = table.trait_env.clone(); @@ -964,9 +1022,11 @@ fn iterate_trait_method_candidates( for &(_, item) in data.items.iter() { // Don't pass a `visible_from_module` down to `is_valid_candidate`, // since only inherent methods should be included into visibility checking. - if !is_valid_candidate(table, name, receiver_ty, item, self_ty, None) { - continue; - } + let visible = match is_valid_candidate(table, name, receiver_ty, item, self_ty, None) { + IsValidCandidate::Yes => true, + IsValidCandidate::NotVisible => false, + IsValidCandidate::No => continue, + }; if !known_implemented { let goal = generic_implements_goal(db, env.clone(), t, &canonical_self_ty); if db.trait_solve(env.krate, goal.cast(Interner)).is_none() { @@ -974,7 +1034,7 @@ fn iterate_trait_method_candidates( } } known_implemented = true; - callback(receiver_adjustments.clone().unwrap_or_default(), item)?; + callback(receiver_adjustments.clone().unwrap_or_default(), item, visible)?; } } ControlFlow::Continue(()) @@ -987,7 +1047,7 @@ fn iterate_inherent_methods( receiver_ty: Option<&Ty>, receiver_adjustments: Option, visible_from_module: VisibleFromModule, - callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>, + callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId, bool) -> ControlFlow<()>, ) -> ControlFlow<()> { let db = table.db; let env = table.trait_env.clone(); @@ -1076,7 +1136,7 @@ fn iterate_inherent_methods( name: Option<&Name>, receiver_ty: Option<&Ty>, receiver_adjustments: Option, - callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>, + callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId, bool) -> ControlFlow<()>, traits: impl Iterator, ) -> ControlFlow<()> { let db = table.db; @@ -1084,9 +1144,13 @@ fn iterate_inherent_methods( let data = db.trait_data(t); for &(_, item) in data.items.iter() { // We don't pass `visible_from_module` as all trait items should be visible. - if is_valid_candidate(table, name, receiver_ty, item, self_ty, None) { - callback(receiver_adjustments.clone().unwrap_or_default(), item)?; - } + let visible = + match is_valid_candidate(table, name, receiver_ty, item, self_ty, None) { + IsValidCandidate::Yes => true, + IsValidCandidate::NotVisible => false, + IsValidCandidate::No => continue, + }; + callback(receiver_adjustments.clone().unwrap_or_default(), item, visible)?; } } ControlFlow::Continue(()) @@ -1100,17 +1164,25 @@ fn iterate_inherent_methods( receiver_ty: Option<&Ty>, receiver_adjustments: Option, visible_from_module: Option, - callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>, + callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId, bool) -> ControlFlow<()>, ) -> ControlFlow<()> { let db = table.db; let impls_for_self_ty = impls.for_self_ty(self_ty); for &impl_def in impls_for_self_ty { for &item in &db.impl_data(impl_def).items { - if !is_valid_candidate(table, name, receiver_ty, item, self_ty, visible_from_module) - { - continue; - } - callback(receiver_adjustments.clone().unwrap_or_default(), item)?; + let visible = match is_valid_candidate( + table, + name, + receiver_ty, + item, + self_ty, + visible_from_module, + ) { + IsValidCandidate::Yes => true, + IsValidCandidate::NotVisible => false, + IsValidCandidate::No => continue, + }; + callback(receiver_adjustments.clone().unwrap_or_default(), item, visible)?; } } ControlFlow::Continue(()) @@ -1139,7 +1211,7 @@ pub fn resolve_indexing_op( macro_rules! check_that { ($cond:expr) => { if !$cond { - return false; + return IsValidCandidate::No; } }; } @@ -1151,7 +1223,7 @@ fn is_valid_candidate( item: AssocItemId, self_ty: &Ty, visible_from_module: Option, -) -> bool { +) -> IsValidCandidate { let db = table.db; match item { AssocItemId::FunctionId(m) => { @@ -1162,31 +1234,37 @@ fn is_valid_candidate( check_that!(receiver_ty.is_none()); check_that!(name.map_or(true, |n| data.name.as_ref() == Some(n))); - check_that!(visible_from_module.map_or(true, |from_module| { - let v = db.const_visibility(c).is_visible_from(db.upcast(), from_module); - if !v { + + if let Some(from_module) = visible_from_module { + if !db.const_visibility(c).is_visible_from(db.upcast(), from_module) { cov_mark::hit!(const_candidate_not_visible); + return IsValidCandidate::NotVisible; } - v - })); + } if let ItemContainerId::ImplId(impl_id) = c.lookup(db.upcast()).container { let self_ty_matches = table.run_in_snapshot(|table| { let expected_self_ty = TyBuilder::impl_self_ty(db, impl_id) .fill_with_inference_vars(table) .build(); - table.unify(&expected_self_ty, &self_ty) + table.unify(&expected_self_ty, self_ty) }); if !self_ty_matches { cov_mark::hit!(const_candidate_self_type_mismatch); - return false; + return IsValidCandidate::No; } } - true + IsValidCandidate::Yes } - _ => false, + _ => IsValidCandidate::No, } } +enum IsValidCandidate { + Yes, + No, + NotVisible, +} + fn is_valid_fn_candidate( table: &mut InferenceTable<'_>, fn_id: FunctionId, @@ -1194,19 +1272,17 @@ fn is_valid_fn_candidate( receiver_ty: Option<&Ty>, self_ty: &Ty, visible_from_module: Option, -) -> bool { +) -> IsValidCandidate { let db = table.db; let data = db.function_data(fn_id); check_that!(name.map_or(true, |n| n == &data.name)); - check_that!(visible_from_module.map_or(true, |from_module| { - let v = db.function_visibility(fn_id).is_visible_from(db.upcast(), from_module); - if !v { + if let Some(from_module) = visible_from_module { + if !db.function_visibility(fn_id).is_visible_from(db.upcast(), from_module) { cov_mark::hit!(autoderef_candidate_not_visible); + return IsValidCandidate::NotVisible; } - v - })); - + } table.run_in_snapshot(|table| { let container = fn_id.lookup(db.upcast()).container; let (impl_subst, expect_self_ty) = match container { @@ -1245,7 +1321,7 @@ fn is_valid_fn_candidate( // We need to consider the bounds on the impl to distinguish functions of the same name // for a type. let predicates = db.generic_predicates(impl_id.into()); - predicates + let valid = predicates .iter() .map(|predicate| { let (p, b) = predicate @@ -1260,12 +1336,16 @@ fn is_valid_fn_candidate( // It's ok to get ambiguity here, as we may not have enough information to prove // obligations. We'll check if the user is calling the selected method properly // later anyway. - .all(|p| table.try_obligation(p.cast(Interner)).is_some()) + .all(|p| table.try_obligation(p.cast(Interner)).is_some()); + match valid { + true => IsValidCandidate::Yes, + false => IsValidCandidate::No, + } } else { // For `ItemContainerId::TraitId`, we check if `self_ty` implements the trait in // `iterate_trait_method_candidates()`. // For others, this function shouldn't be called. - true + IsValidCandidate::Yes } }) } diff --git a/crates/hir-ty/src/tests.rs b/crates/hir-ty/src/tests.rs index ebbc541014..ba5d9c2412 100644 --- a/crates/hir-ty/src/tests.rs +++ b/crates/hir-ty/src/tests.rs @@ -94,18 +94,19 @@ fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_sour types.insert(file_range, expected.trim_start_matches("type: ").to_string()); } else if expected.starts_with("expected") { mismatches.insert(file_range, expected); - } else if expected.starts_with("adjustments: ") { + } else if expected.starts_with("adjustments:") { adjustments.insert( file_range, expected - .trim_start_matches("adjustments: ") + .trim_start_matches("adjustments:") + .trim() .split(',') .map(|it| it.trim().to_string()) .filter(|it| !it.is_empty()) .collect(), ); } else { - panic!("unexpected annotation: {}", expected); + panic!("unexpected annotation: {expected}"); } had_annotations = true; } @@ -176,17 +177,17 @@ fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_sour assert_eq!(actual, expected); } if let Some(expected) = adjustments.remove(&range) { - if let Some(adjustments) = inference_result.expr_adjustments.get(&expr) { - assert_eq!( - expected, - adjustments - .iter() - .map(|Adjustment { kind, .. }| format!("{:?}", kind)) - .collect::>() - ); - } else { - panic!("expected {:?} adjustments, found none", expected); - } + let adjustments = inference_result + .expr_adjustments + .get(&expr) + .map_or_else(Default::default, |it| &**it); + assert_eq!( + expected, + adjustments + .iter() + .map(|Adjustment { kind, .. }| format!("{kind:?}")) + .collect::>() + ); } } diff --git a/crates/hir-ty/src/tests/coercion.rs b/crates/hir-ty/src/tests/coercion.rs index 7e3aecc2ae..3e110abaf4 100644 --- a/crates/hir-ty/src/tests/coercion.rs +++ b/crates/hir-ty/src/tests/coercion.rs @@ -807,3 +807,37 @@ fn main() { "#, ); } + +#[test] +fn adjust_comparison_arguments() { + check_no_mismatches( + r" +//- minicore: eq +struct Struct; +impl core::cmp::PartialEq for Struct { + fn eq(&self, other: &Self) -> bool { true } +} +fn test() { + Struct == Struct; + // ^^^^^^ adjustments: Borrow(Ref(Not)) + // ^^^^^^ adjustments: Borrow(Ref(Not)) +}", + ); +} + +#[test] +fn adjust_assign_lhs() { + check_no_mismatches( + r" +//- minicore: add +struct Struct; +impl core::ops::AddAssign for Struct { + fn add_assign(&mut self, other: Self) {} +} +fn test() { + Struct += Struct; + // ^^^^^^ adjustments: Borrow(Ref(Mut)) + // ^^^^^^ adjustments: +}", + ); +} diff --git a/crates/hir-ty/src/tests/incremental.rs b/crates/hir-ty/src/tests/incremental.rs index 3e08e83e89..073d6d9be2 100644 --- a/crates/hir-ty/src/tests/incremental.rs +++ b/crates/hir-ty/src/tests/incremental.rs @@ -24,7 +24,7 @@ fn typing_whitespace_inside_a_function_should_not_invalidate_types() { db.infer(def); }); }); - assert!(format!("{:?}", events).contains("infer")) + assert!(format!("{events:?}").contains("infer")) } let new_text = " @@ -46,6 +46,6 @@ fn typing_whitespace_inside_a_function_should_not_invalidate_types() { db.infer(def); }); }); - assert!(!format!("{:?}", events).contains("infer"), "{:#?}", events) + assert!(!format!("{events:?}").contains("infer"), "{events:#?}") } } diff --git a/crates/hir-ty/src/tests/macros.rs b/crates/hir-ty/src/tests/macros.rs index b3adafaafd..8b75ec842a 100644 --- a/crates/hir-ty/src/tests/macros.rs +++ b/crates/hir-ty/src/tests/macros.rs @@ -849,7 +849,7 @@ fn main() { //^^^^^^^^^^^^^^^^^ RegisterBlock } "#; - let fixture = format!("{}\n//- /foo.rs\n{}", fixture, data); + let fixture = format!("{fixture}\n//- /foo.rs\n{data}"); { let _b = bench("include macro"); diff --git a/crates/hir-ty/src/tests/method_resolution.rs b/crates/hir-ty/src/tests/method_resolution.rs index 5d76d185ff..6c7a532997 100644 --- a/crates/hir-ty/src/tests/method_resolution.rs +++ b/crates/hir-ty/src/tests/method_resolution.rs @@ -1867,3 +1867,53 @@ fn g(a: T) { "#, ); } + +#[test] +fn incoherent_impls() { + check( + r#" +//- minicore: error, send +pub struct Box(T); +use core::error::Error; + +#[rustc_allow_incoherent_impl] +impl dyn Error { + pub fn downcast(self: Box) -> Result, Box> { + loop {} + } +} +#[rustc_allow_incoherent_impl] +impl dyn Error + Send { + /// Attempts to downcast the box to a concrete type. + pub fn downcast(self: Box) -> Result, Box> { + let err: Box = self; + // ^^^^ expected Box, got Box + // FIXME, type mismatch should not occur + ::downcast(err).map_err(|_| loop {}) + //^^^^^^^^^^^^^^^^^^^^^ type: fn downcast<{unknown}>(Box) -> Result, Box> + } +} +"#, + ); +} + +#[test] +fn fallback_private_methods() { + check( + r#" +mod module { + pub struct Struct; + + impl Struct { + fn func(&self) {} + } +} + +fn foo() { + let s = module::Struct; + s.func(); + //^^^^^^^^ type: () +} +"#, + ); +} diff --git a/crates/hir-ty/src/tests/patterns.rs b/crates/hir-ty/src/tests/patterns.rs index 74de33117e..9333e26935 100644 --- a/crates/hir-ty/src/tests/patterns.rs +++ b/crates/hir-ty/src/tests/patterns.rs @@ -1080,3 +1080,15 @@ fn my_fn(#[cfg(feature = "feature")] u8: u8, u32: u32) {} "#, ); } + +#[test] +fn var_args() { + check_types( + r#" +#[lang = "va_list"] +pub struct VaListImpl<'f>; +fn my_fn(foo: ...) {} + //^^^ VaListImpl +"#, + ); +} diff --git a/crates/hir-ty/src/tests/regression.rs b/crates/hir-ty/src/tests/regression.rs index 4e46397459..de6ae7fff8 100644 --- a/crates/hir-ty/src/tests/regression.rs +++ b/crates/hir-ty/src/tests/regression.rs @@ -1723,3 +1723,24 @@ fn bar() -> ControlFlow<(), ()> { "#, ); } + +#[test] +fn assoc_type_shorthand_with_gats_in_binders() { + // c.f. test `issue_4885()` + check_no_mismatches( + r#" +trait Gats { + type Assoc; +} +trait Foo {} + +struct Bar<'a, B: Gats, A> { + field: &'a dyn Foo>, +} + +fn foo(b: Bar) { + let _ = b.field; +} +"#, + ); +} diff --git a/crates/hir-ty/src/tests/simple.rs b/crates/hir-ty/src/tests/simple.rs index d7431443b8..146145523b 100644 --- a/crates/hir-ty/src/tests/simple.rs +++ b/crates/hir-ty/src/tests/simple.rs @@ -2064,17 +2064,17 @@ fn fn_pointer_return() { fn block_modifiers_smoke_test() { check_infer( r#" -//- minicore: future +//- minicore: future, try async fn main() { let x = unsafe { 92 }; let y = async { async { () }.await }; - let z = try { () }; + let z: core::ops::ControlFlow<(), _> = try { () }; let w = const { 92 }; let t = 'a: { 92 }; } "#, expect![[r#" - 16..162 '{ ...2 }; }': () + 16..193 '{ ...2 }; }': () 26..27 'x': i32 30..43 'unsafe { 92 }': i32 30..43 'unsafe { 92 }': i32 @@ -2086,17 +2086,17 @@ async fn main() { 65..77 'async { () }': impl Future 65..83 'async ....await': () 73..75 '()': () - 95..96 'z': {unknown} - 99..109 'try { () }': () - 99..109 'try { () }': {unknown} - 105..107 '()': () - 119..120 'w': i32 - 123..135 'const { 92 }': i32 - 123..135 'const { 92 }': i32 - 131..133 '92': i32 - 145..146 't': i32 - 149..159 ''a: { 92 }': i32 - 155..157 '92': i32 + 95..96 'z': ControlFlow<(), ()> + 130..140 'try { () }': () + 130..140 'try { () }': ControlFlow<(), ()> + 136..138 '()': () + 150..151 'w': i32 + 154..166 'const { 92 }': i32 + 154..166 'const { 92 }': i32 + 162..164 '92': i32 + 176..177 't': i32 + 180..190 ''a: { 92 }': i32 + 186..188 '92': i32 "#]], ) } diff --git a/crates/hir-ty/src/tests/traits.rs b/crates/hir-ty/src/tests/traits.rs index 3d7194b6f4..d01fe06328 100644 --- a/crates/hir-ty/src/tests/traits.rs +++ b/crates/hir-ty/src/tests/traits.rs @@ -1388,6 +1388,22 @@ fn foo() -> (impl FnOnce(&str, T), impl Trait) { ); } +#[test] +fn return_pos_impl_trait_in_projection() { + // Note that the unused type param `X` is significant; see #13307. + check_no_mismatches( + r#" +//- minicore: sized +trait Future { type Output; } +impl Future for () { type Output = i32; } +type Foo = (::Output, F); +fn foo() -> Foo> { + (0, ()) +} +"#, + ) +} + #[test] fn dyn_trait() { check_infer( @@ -4084,3 +4100,68 @@ where "#, ); } + +#[test] +fn bin_op_with_scalar_fallback() { + // Extra impls are significant so that chalk doesn't give us definite guidances. + check_types( + r#" +//- minicore: add +use core::ops::Add; + +struct Vec2(T, T); + +impl Add for Vec2 { + type Output = Self; + fn add(self, rhs: Self) -> Self::Output { loop {} } +} +impl Add for Vec2 { + type Output = Self; + fn add(self, rhs: Self) -> Self::Output { loop {} } +} +impl Add for Vec2 { + type Output = Self; + fn add(self, rhs: Self) -> Self::Output { loop {} } +} +impl Add for Vec2 { + type Output = Self; + fn add(self, rhs: Self) -> Self::Output { loop {} } +} + +fn test() { + let a = Vec2(1, 2); + let b = Vec2(3, 4); + let c = a + b; + //^ Vec2 + let a = Vec2(1., 2.); + let b = Vec2(3., 4.); + let c = a + b; + //^ Vec2 +} +"#, + ); +} + +#[test] +fn trait_method_with_scalar_fallback() { + check_types( + r#" +trait Trait { + type Output; + fn foo(&self) -> Self::Output; +} +impl Trait for T { + type Output = T; + fn foo(&self) -> Self::Output { loop {} } +} +fn test() { + let a = 42; + let b = a.foo(); + //^ i32 + let a = 3.14; + let b = a.foo(); + //^ f64 +} +"#, + ); +} diff --git a/crates/hir-ty/src/tls.rs b/crates/hir-ty/src/tls.rs index 92711a24fe..b7e6ee6740 100644 --- a/crates/hir-ty/src/tls.rs +++ b/crates/hir-ty/src/tls.rs @@ -67,12 +67,12 @@ impl DebugContext<'_> { let trait_ref = projection_ty.trait_ref(self.0); let trait_params = trait_ref.substitution.as_slice(Interner); let self_ty = trait_ref.self_type_parameter(Interner); - write!(fmt, "<{:?} as {}", self_ty, trait_name)?; + write!(fmt, "<{self_ty:?} as {trait_name}")?; if trait_params.len() > 1 { write!( fmt, "<{}>", - trait_params[1..].iter().format_with(", ", |x, f| f(&format_args!("{:?}", x))), + trait_params[1..].iter().format_with(", ", |x, f| f(&format_args!("{x:?}"))), )?; } write!(fmt, ">::{}", type_alias_data.name)?; @@ -83,7 +83,7 @@ impl DebugContext<'_> { write!( fmt, "<{}>", - proj_params.iter().format_with(", ", |x, f| f(&format_args!("{:?}", x))), + proj_params.iter().format_with(", ", |x, f| f(&format_args!("{x:?}"))), )?; } @@ -105,9 +105,9 @@ impl DebugContext<'_> { } }; match def { - CallableDefId::FunctionId(_) => write!(fmt, "{{fn {}}}", name), + CallableDefId::FunctionId(_) => write!(fmt, "{{fn {name}}}"), CallableDefId::StructId(_) | CallableDefId::EnumVariantId(_) => { - write!(fmt, "{{ctor {}}}", name) + write!(fmt, "{{ctor {name}}}") } } } diff --git a/crates/hir-ty/src/traits.rs b/crates/hir-ty/src/traits.rs index c425f35acf..778a6b8204 100644 --- a/crates/hir-ty/src/traits.rs +++ b/crates/hir-ty/src/traits.rs @@ -18,7 +18,7 @@ use crate::{ }; /// This controls how much 'time' we give the Chalk solver before giving up. -const CHALK_SOLVER_FUEL: i32 = 100; +const CHALK_SOLVER_FUEL: i32 = 1000; #[derive(Debug, Copy, Clone)] pub(crate) struct ChalkContext<'a> { @@ -55,13 +55,10 @@ impl TraitEnvironment { } } - pub fn traits_in_scope_from_clauses<'a>( - &'a self, - ty: Ty, - ) -> impl Iterator + 'a { + pub fn traits_in_scope_from_clauses(&self, ty: Ty) -> impl Iterator + '_ { self.traits_from_clauses .iter() - .filter_map(move |(self_ty, trait_id)| (*self_ty == ty).then(|| *trait_id)) + .filter_map(move |(self_ty, trait_id)| (*self_ty == ty).then_some(*trait_id)) } } @@ -130,7 +127,7 @@ fn solve( let mut solve = || { let _ctx = if is_chalk_debug() || is_chalk_print() { - Some(panic_context::enter(format!("solving {:?}", goal))) + Some(panic_context::enter(format!("solving {goal:?}"))) } else { None }; diff --git a/crates/hir-ty/src/utils.rs b/crates/hir-ty/src/utils.rs index e54bcb421a..9893566bd5 100644 --- a/crates/hir-ty/src/utils.rs +++ b/crates/hir-ty/src/utils.rs @@ -17,7 +17,7 @@ use hir_def::{ ConstParamId, FunctionId, GenericDefId, ItemContainerId, Lookup, TraitId, TypeAliasId, TypeOrConstParamId, TypeParamId, }; -use hir_expand::name::{known, Name}; +use hir_expand::name::Name; use itertools::Either; use rustc_hash::FxHashSet; use smallvec::{smallvec, SmallVec}; @@ -184,9 +184,7 @@ pub(crate) struct Generics { } impl Generics { - pub(crate) fn iter_id<'a>( - &'a self, - ) -> impl Iterator> + 'a { + pub(crate) fn iter_id(&self) -> impl Iterator> + '_ { self.iter().map(|(id, data)| match data { TypeOrConstParamData::TypeParamData(_) => Either::Left(TypeParamId::from_unchecked(id)), TypeOrConstParamData::ConstParamData(_) => { @@ -216,9 +214,9 @@ impl Generics { } /// Iterator over types and const params of parent. - pub(crate) fn iter_parent<'a>( - &'a self, - ) -> impl DoubleEndedIterator + 'a { + pub(crate) fn iter_parent( + &self, + ) -> impl DoubleEndedIterator { self.parent_generics().into_iter().flat_map(|it| { let to_toc_id = move |(local_id, p)| (TypeOrConstParamId { parent: it.def, local_id }, p); @@ -335,54 +333,18 @@ pub fn is_fn_unsafe_to_call(db: &dyn HirDatabase, func: FunctionId) -> bool { // Function in an `extern` block are always unsafe to call, except when it has // `"rust-intrinsic"` ABI there are a few exceptions. let id = block.lookup(db.upcast()).id; - !matches!( - id.item_tree(db.upcast())[id.value].abi.as_deref(), - Some("rust-intrinsic") if !is_intrinsic_fn_unsafe(&data.name) - ) + + let is_intrinsic = + id.item_tree(db.upcast())[id.value].abi.as_deref() == Some("rust-intrinsic"); + + if is_intrinsic { + // Intrinsics are unsafe unless they have the rustc_safe_intrinsic attribute + !data.attrs.by_key("rustc_safe_intrinsic").exists() + } else { + // Extern items are always unsafe + true + } } _ => false, } } - -/// Returns `true` if the given intrinsic is unsafe to call, or false otherwise. -fn is_intrinsic_fn_unsafe(name: &Name) -> bool { - // Should be kept in sync with https://github.com/rust-lang/rust/blob/532d2b14c05f9bc20b2d27cbb5f4550d28343a36/compiler/rustc_typeck/src/check/intrinsic.rs#L72-L106 - ![ - known::abort, - known::add_with_overflow, - known::bitreverse, - known::black_box, - known::bswap, - known::caller_location, - known::ctlz, - known::ctpop, - known::cttz, - known::discriminant_value, - known::forget, - known::likely, - known::maxnumf32, - known::maxnumf64, - known::min_align_of, - known::minnumf32, - known::minnumf64, - known::mul_with_overflow, - known::needs_drop, - known::ptr_guaranteed_eq, - known::ptr_guaranteed_ne, - known::rotate_left, - known::rotate_right, - known::rustc_peek, - known::saturating_add, - known::saturating_sub, - known::size_of, - known::sub_with_overflow, - known::type_id, - known::type_name, - known::unlikely, - known::variant_count, - known::wrapping_add, - known::wrapping_mul, - known::wrapping_sub, - ] - .contains(name) -} diff --git a/crates/hir/src/attrs.rs b/crates/hir/src/attrs.rs index 0bd3793400..54425d69b6 100644 --- a/crates/hir/src/attrs.rs +++ b/crates/hir/src/attrs.rs @@ -148,7 +148,7 @@ fn resolve_doc_path( let modpath = { // FIXME: this is not how we should get a mod path here - let ast_path = ast::SourceFile::parse(&format!("type T = {};", link)) + let ast_path = ast::SourceFile::parse(&format!("type T = {link};")) .syntax_node() .descendants() .find_map(ast::Path::cast)?; diff --git a/crates/hir/src/diagnostics.rs b/crates/hir/src/diagnostics.rs index c5dc60f1ec..54d43fa8dc 100644 --- a/crates/hir/src/diagnostics.rs +++ b/crates/hir/src/diagnostics.rs @@ -10,7 +10,7 @@ use hir_def::path::ModPath; use hir_expand::{name::Name, HirFileId, InFile}; use syntax::{ast, AstPtr, SyntaxNodePtr, TextRange}; -use crate::{MacroKind, Type}; +use crate::{AssocItem, Field, MacroKind, Type}; macro_rules! diagnostics { ($($diag:ident,)*) => { @@ -41,6 +41,8 @@ diagnostics![ MissingMatchArms, MissingUnsafe, NoSuchField, + PrivateAssocItem, + PrivateField, ReplaceFilterMapNextWithFindMap, TypeMismatch, UnimplementedBuiltinMacro, @@ -121,6 +123,19 @@ pub struct NoSuchField { pub field: InFile>, } +#[derive(Debug)] +pub struct PrivateAssocItem { + pub expr_or_pat: + InFile, Either, AstPtr>>>, + pub item: AssocItem, +} + +#[derive(Debug)] +pub struct PrivateField { + pub expr: InFile>, + pub field: Field, +} + #[derive(Debug)] pub struct BreakOutsideOfLoop { pub expr: InFile>, diff --git a/crates/hir/src/display.rs b/crates/hir/src/display.rs index 27b2f445d7..5a4b2f3344 100644 --- a/crates/hir/src/display.rs +++ b/crates/hir/src/display.rs @@ -79,7 +79,7 @@ impl HirDisplay for Function { } } match name { - Some(name) => write!(f, "{}: ", name)?, + Some(name) => write!(f, "{name}: ")?, None => f.write_str("_: ")?, } // FIXME: Use resolved `param.ty` or raw `type_ref`? @@ -327,7 +327,7 @@ fn write_generic_params( continue; } delim(f)?; - write!(f, "{}", name)?; + write!(f, "{name}")?; if let Some(default) = &ty.default { f.write_str(" = ")?; default.hir_fmt(f)?; @@ -335,7 +335,7 @@ fn write_generic_params( } TypeOrConstParamData::ConstParamData(c) => { delim(f)?; - write!(f, "const {}: ", name)?; + write!(f, "const {name}: ")?; c.ty.hir_fmt(f)?; } } @@ -372,7 +372,7 @@ fn write_where_clause(def: GenericDefId, f: &mut HirFormatter<'_>) -> Result<(), WherePredicateTypeTarget::TypeRef(ty) => ty.hir_fmt(f), WherePredicateTypeTarget::TypeOrConstParam(id) => { match ¶ms.type_or_consts[*id].name() { - Some(name) => write!(f, "{}", name), + Some(name) => write!(f, "{name}"), None => f.write_str("{unnamed}"), } } @@ -424,7 +424,7 @@ fn write_where_clause(def: GenericDefId, f: &mut HirFormatter<'_>) -> Result<(), if idx != 0 { f.write_str(", ")?; } - write!(f, "{}", lifetime)?; + write!(f, "{lifetime}")?; } f.write_str("> ")?; write_target(target, f)?; @@ -447,7 +447,7 @@ impl HirDisplay for Const { let data = f.db.const_data(self.id); f.write_str("const ")?; match &data.name { - Some(name) => write!(f, "{}: ", name)?, + Some(name) => write!(f, "{name}: ")?, None => f.write_str("_: ")?, } data.type_ref.hir_fmt(f)?; @@ -511,9 +511,9 @@ impl HirDisplay for Module { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { // FIXME: Module doesn't have visibility saved in data. match self.name(f.db) { - Some(name) => write!(f, "mod {}", name), + Some(name) => write!(f, "mod {name}"), None if self.is_crate_root(f.db) => match self.krate(f.db).display_name(f.db) { - Some(name) => write!(f, "extern crate {}", name), + Some(name) => write!(f, "extern crate {name}"), None => f.write_str("extern crate {unknown}"), }, None => f.write_str("mod {unnamed}"), diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index cbbcaebb42..08fd4453df 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -39,12 +39,13 @@ use arrayvec::ArrayVec; use base_db::{CrateDisplayName, CrateId, CrateOrigin, Edition, FileId, ProcMacroKind}; use either::Either; use hir_def::{ - adt::{ReprData, VariantData}, + adt::VariantData, body::{BodyDiagnostic, SyntheticSyntax}, - expr::{BindingAnnotation, LabelId, Pat, PatId}, + expr::{BindingAnnotation, ExprOrPatId, LabelId, Pat, PatId}, generics::{TypeOrConstParamData, TypeParamProvenance}, item_tree::ItemTreeNode, lang_item::LangItemTarget, + layout::{Layout, LayoutError, ReprOptions}, nameres::{self, diagnostics::DefDiagnostic}, per_ns::PerNs, resolver::{HasResolver, Resolver}, @@ -59,6 +60,7 @@ use hir_ty::{ all_super_traits, autoderef, consteval::{unknown_const_as_generic, ComputedExpr, ConstEvalError, ConstExt}, diagnostics::BodyValidationDiagnostic, + layout::layout_of_ty, method_resolution::{self, TyFingerprint}, primitive::UintTy, traits::FnTrait, @@ -72,7 +74,7 @@ use once_cell::unsync::Lazy; use rustc_hash::FxHashSet; use stdx::{impl_from, never}; use syntax::{ - ast::{self, Expr, HasAttrs as _, HasDocComments, HasName}, + ast::{self, HasAttrs as _, HasDocComments, HasName}, AstNode, AstPtr, SmolStr, SyntaxNodePtr, TextRange, T, }; @@ -83,9 +85,10 @@ pub use crate::{ diagnostics::{ AnyDiagnostic, BreakOutsideOfLoop, InactiveCode, IncorrectCase, InvalidDeriveTarget, MacroError, MalformedDerive, MismatchedArgCount, MissingFields, MissingMatchArms, - MissingUnsafe, NoSuchField, ReplaceFilterMapNextWithFindMap, TypeMismatch, - UnimplementedBuiltinMacro, UnresolvedExternCrate, UnresolvedImport, UnresolvedMacroCall, - UnresolvedModule, UnresolvedProcMacro, + MissingUnsafe, NoSuchField, PrivateAssocItem, PrivateField, + ReplaceFilterMapNextWithFindMap, TypeMismatch, UnimplementedBuiltinMacro, + UnresolvedExternCrate, UnresolvedImport, UnresolvedMacroCall, UnresolvedModule, + UnresolvedProcMacro, }, has_source::HasSource, semantics::{PathResolution, Semantics, SemanticsScope, TypeInfo, VisibleTraits}, @@ -112,12 +115,20 @@ pub use { path::{ModPath, PathKind}, type_ref::{Mutability, TypeRef}, visibility::Visibility, + // FIXME: This is here since it is input of a method in `HirWrite` + // and things outside of hir need to implement that trait. We probably + // should move whole `hir_ty::display` to this crate so we will become + // able to use `ModuleDef` or `Definition` instead of `ModuleDefId`. + ModuleDefId, }, hir_expand::{ name::{known, Name}, ExpandResult, HirFileId, InFile, MacroFile, Origin, }, - hir_ty::{display::HirDisplay, PointerCast, Safety}, + hir_ty::{ + display::{HirDisplay, HirWrite}, + PointerCast, Safety, + }, }; // These are negative re-exports: pub using these names is forbidden, they @@ -597,7 +608,7 @@ impl Module { pub fn legacy_macros(self, db: &dyn HirDatabase) -> Vec { let def_map = self.id.def_map(db.upcast()); let scope = &def_map[self.id.local_id].scope; - scope.legacy_macros().flat_map(|(_, it)| it).map(|&it| MacroId::from(it).into()).collect() + scope.legacy_macros().flat_map(|(_, it)| it).map(|&it| it.into()).collect() } pub fn impl_defs(self, db: &dyn HirDatabase) -> Vec { @@ -803,7 +814,7 @@ fn precise_macro_call_location( .doc_comments_and_attrs() .nth((*invoc_attr_index) as usize) .and_then(Either::left) - .unwrap_or_else(|| panic!("cannot find attribute #{}", invoc_attr_index)); + .unwrap_or_else(|| panic!("cannot find attribute #{invoc_attr_index}")); ( ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&attr))), @@ -844,6 +855,10 @@ impl Field { self.parent.variant_data(db).fields()[self.id].name.clone() } + pub fn index(&self) -> usize { + u32::from(self.id.into_raw()) as usize + } + /// Returns the type as in the signature of the struct (i.e., with /// placeholder types for type parameters). Only use this in the context of /// the field definition. @@ -859,6 +874,10 @@ impl Field { Type::new(db, var_id, ty) } + pub fn layout(&self, db: &dyn HirDatabase) -> Result { + layout_of_ty(db, &self.ty(db).ty, self.parent.module(db).krate().into()) + } + pub fn parent_def(&self, _db: &dyn HirDatabase) -> VariantDef { self.parent } @@ -900,7 +919,7 @@ impl Struct { Type::from_def(db, self.id) } - pub fn repr(self, db: &dyn HirDatabase) -> Option { + pub fn repr(self, db: &dyn HirDatabase) -> Option { db.struct_data(self.id).repr.clone() } @@ -984,8 +1003,30 @@ impl Enum { Type::new_for_crate( self.id.lookup(db.upcast()).container.krate(), TyBuilder::builtin(match db.enum_data(self.id).variant_body_type() { - Either::Left(builtin) => hir_def::builtin_type::BuiltinType::Int(builtin), - Either::Right(builtin) => hir_def::builtin_type::BuiltinType::Uint(builtin), + hir_def::layout::IntegerType::Pointer(sign) => match sign { + true => hir_def::builtin_type::BuiltinType::Int( + hir_def::builtin_type::BuiltinInt::Isize, + ), + false => hir_def::builtin_type::BuiltinType::Uint( + hir_def::builtin_type::BuiltinUint::Usize, + ), + }, + hir_def::layout::IntegerType::Fixed(i, sign) => match sign { + true => hir_def::builtin_type::BuiltinType::Int(match i { + hir_def::layout::Integer::I8 => hir_def::builtin_type::BuiltinInt::I8, + hir_def::layout::Integer::I16 => hir_def::builtin_type::BuiltinInt::I16, + hir_def::layout::Integer::I32 => hir_def::builtin_type::BuiltinInt::I32, + hir_def::layout::Integer::I64 => hir_def::builtin_type::BuiltinInt::I64, + hir_def::layout::Integer::I128 => hir_def::builtin_type::BuiltinInt::I128, + }), + false => hir_def::builtin_type::BuiltinType::Uint(match i { + hir_def::layout::Integer::I8 => hir_def::builtin_type::BuiltinUint::U8, + hir_def::layout::Integer::I16 => hir_def::builtin_type::BuiltinUint::U16, + hir_def::layout::Integer::I32 => hir_def::builtin_type::BuiltinUint::U32, + hir_def::layout::Integer::I64 => hir_def::builtin_type::BuiltinUint::U64, + hir_def::layout::Integer::I128 => hir_def::builtin_type::BuiltinUint::U128, + }), + }, }), ) } @@ -1042,7 +1083,7 @@ impl Variant { db.enum_data(self.parent.id).variants[self.id].variant_data.clone() } - pub fn value(self, db: &dyn HirDatabase) -> Option { + pub fn value(self, db: &dyn HirDatabase) -> Option { self.source(db)?.value.expr() } @@ -1076,6 +1117,13 @@ impl Adt { }) } + pub fn layout(self, db: &dyn HirDatabase) -> Result { + if db.generic_params(self.into()).iter().count() != 0 { + return Err(LayoutError::HasPlaceholder); + } + db.layout_of_adt(self.into(), Substitution::empty(Interner)) + } + /// Turns this ADT into a type. Any type parameters of the ADT will be /// turned into unknown types, which is good for e.g. finding the most /// general set of completions, but will not look very nice when printed. @@ -1306,6 +1354,25 @@ impl DefWithBody { Err(SyntheticSyntax) => (), } } + &hir_ty::InferenceDiagnostic::PrivateField { expr, field } => { + let expr = source_map.expr_syntax(expr).expect("unexpected synthetic"); + let field = field.into(); + acc.push(PrivateField { expr, field }.into()) + } + &hir_ty::InferenceDiagnostic::PrivateAssocItem { id, item } => { + let expr_or_pat = match id { + ExprOrPatId::ExprId(expr) => source_map + .expr_syntax(expr) + .expect("unexpected synthetic") + .map(Either::Left), + ExprOrPatId::PatId(pat) => source_map + .pat_syntax(pat) + .expect("unexpected synthetic") + .map(Either::Right), + }; + let item = item.into(); + acc.push(PrivateAssocItem { expr_or_pat, item }.into()) + } } } for (expr, mismatch) in infer.expr_type_mismatches() { @@ -1492,7 +1559,7 @@ impl Function { } pub fn self_param(self, db: &dyn HirDatabase) -> Option { - self.has_self_param(db).then(|| SelfParam { func: self.id }) + self.has_self_param(db).then_some(SelfParam { func: self.id }) } pub fn assoc_fn_params(self, db: &dyn HirDatabase) -> Vec { @@ -2344,17 +2411,19 @@ pub struct DeriveHelper { impl DeriveHelper { pub fn derive(&self) -> Macro { - Macro { id: self.derive.into() } + Macro { id: self.derive } } pub fn name(&self, db: &dyn HirDatabase) -> Name { match self.derive { - MacroId::Macro2Id(_) => None, + MacroId::Macro2Id(it) => { + db.macro2_data(it).helpers.as_deref().and_then(|it| it.get(self.idx)).cloned() + } MacroId::MacroRulesId(_) => None, MacroId::ProcMacroId(proc_macro) => db .proc_macro_data(proc_macro) .helpers - .as_ref() + .as_deref() .and_then(|it| it.get(self.idx)) .cloned(), } @@ -2712,7 +2781,7 @@ impl Impl { pub fn all_for_trait(db: &dyn HirDatabase, trait_: Trait) -> Vec { let krate = trait_.module(db).krate(); let mut all = Vec::new(); - for Crate { id } in krate.transitive_reverse_dependencies(db).into_iter() { + for Crate { id } in krate.transitive_reverse_dependencies(db) { let impls = db.trait_impls_in_crate(id); all.extend(impls.for_trait(trait_.id).map(Self::from)) } @@ -2857,6 +2926,13 @@ impl Type { matches!(self.ty.kind(Interner), TyKind::Scalar(Scalar::Uint(UintTy::Usize))) } + pub fn is_int_or_uint(&self) -> bool { + match self.ty.kind(Interner) { + TyKind::Scalar(Scalar::Int(_) | Scalar::Uint(_)) => true, + _ => false, + } + } + pub fn remove_ref(&self) -> Option { match &self.ty.kind(Interner) { TyKind::Ref(.., ty) => Some(self.derived(ty.clone())), @@ -3031,7 +3107,7 @@ impl Type { let adt = adt_id.into(); match adt { - Adt::Struct(s) => matches!(s.repr(db), Some(ReprData { packed: true, .. })), + Adt::Struct(s) => s.repr(db).unwrap_or_default().pack.is_some(), _ => false, } } @@ -3225,7 +3301,7 @@ impl Type { with_local_impls.and_then(|b| b.id.containing_block()).into(), name, method_resolution::LookupMode::MethodCall, - &mut |_adj, id| callback(id), + &mut |_adj, id, _| callback(id), ); } @@ -3650,6 +3726,13 @@ impl From for ScopeDef { } } +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct Adjustment { + pub source: Type, + pub target: Type, + pub kind: Adjust, +} + #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] pub enum Adjust { /// Go from ! to any type. diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs index 2e1f88ba09..e0d2610391 100644 --- a/crates/hir/src/semantics.rs +++ b/crates/hir/src/semantics.rs @@ -2,14 +2,17 @@ mod source_to_def; -use std::{cell::RefCell, fmt, iter, ops}; +use std::{cell::RefCell, fmt, iter, mem, ops}; use base_db::{FileId, FileRange}; +use either::Either; use hir_def::{ - body, macro_id_to_def_id, + body, + expr::Expr, + macro_id_to_def_id, resolver::{self, HasResolver, Resolver, TypeNs}, type_ref::Mutability, - AsMacroCall, FunctionId, MacroId, TraitId, VariantId, + AsMacroCall, DefWithBodyId, FunctionId, MacroId, TraitId, VariantId, }; use hir_expand::{ db::AstDatabase, @@ -29,7 +32,7 @@ use crate::{ db::HirDatabase, semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx}, source_analyzer::{resolve_hir_path, SourceAnalyzer}, - Access, Adjust, AutoBorrow, BindingMode, BuiltinAttr, Callable, ConstParam, Crate, + Access, Adjust, Adjustment, AutoBorrow, BindingMode, BuiltinAttr, Callable, ConstParam, Crate, DeriveHelper, Field, Function, HasSource, HirFileId, Impl, InFile, Label, LifetimeParam, Local, Macro, Module, ModuleDef, Name, OverloadedDeref, Path, ScopeDef, ToolModule, Trait, Type, TypeAlias, TypeParam, VariantDef, @@ -334,7 +337,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { self.imp.resolve_trait(trait_) } - pub fn expr_adjustments(&self, expr: &ast::Expr) -> Option> { + pub fn expr_adjustments(&self, expr: &ast::Expr) -> Option> { self.imp.expr_adjustments(expr) } @@ -438,8 +441,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { } pub fn to_def(&self, src: &T) -> Option { - let src = self.imp.find_file(src.syntax()).with_value(src).cloned(); - T::to_def(&self.imp, src) + self.imp.to_def(src) } pub fn to_module_def(&self, file: FileId) -> Option { @@ -481,6 +483,11 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { pub fn is_unsafe_ident_pat(&self, ident_pat: &ast::IdentPat) -> bool { self.imp.is_unsafe_ident_pat(ident_pat) } + + /// Returns `true` if the `node` is inside an `unsafe` context. + pub fn is_inside_unsafe(&self, expr: &ast::Expr) -> bool { + self.imp.is_inside_unsafe(expr) + } } impl<'db> SemanticsImpl<'db> { @@ -788,7 +795,7 @@ impl<'db> SemanticsImpl<'db> { // requeue the tokens we got from mapping our current token down stack.extend(mapped_tokens); // if the length changed we have found a mapping for the token - (stack.len() != len).then(|| ()) + (stack.len() != len).then_some(()) }; // Remap the next token in the queue into a macro call its in, if it is not being remapped @@ -840,7 +847,7 @@ impl<'db> SemanticsImpl<'db> { } }; process_expansion_for_token(&mut stack, file_id, None, token.as_ref()) - } else if let Some(meta) = ast::Meta::cast(parent.clone()) { + } else if let Some(meta) = ast::Meta::cast(parent) { // attribute we failed expansion for earlier, this might be a derive invocation // or derive helper attribute let attr = meta.parent_attr()?; @@ -1067,26 +1074,42 @@ impl<'db> SemanticsImpl<'db> { } } - fn expr_adjustments(&self, expr: &ast::Expr) -> Option> { + fn expr_adjustments(&self, expr: &ast::Expr) -> Option> { let mutability = |m| match m { hir_ty::Mutability::Not => Mutability::Shared, hir_ty::Mutability::Mut => Mutability::Mut, }; - self.analyze(expr.syntax())?.expr_adjustments(self.db, expr).map(|it| { + + let analyzer = self.analyze(expr.syntax())?; + + let (mut source_ty, _) = analyzer.type_of_expr(self.db, expr)?; + + analyzer.expr_adjustments(self.db, expr).map(|it| { it.iter() - .map(|adjust| match adjust.kind { - hir_ty::Adjust::NeverToAny => Adjust::NeverToAny, - hir_ty::Adjust::Deref(Some(hir_ty::OverloadedDeref(m))) => { - Adjust::Deref(Some(OverloadedDeref(mutability(m)))) - } - hir_ty::Adjust::Deref(None) => Adjust::Deref(None), - hir_ty::Adjust::Borrow(hir_ty::AutoBorrow::RawPtr(m)) => { - Adjust::Borrow(AutoBorrow::RawPtr(mutability(m))) - } - hir_ty::Adjust::Borrow(hir_ty::AutoBorrow::Ref(m)) => { - Adjust::Borrow(AutoBorrow::Ref(mutability(m))) - } - hir_ty::Adjust::Pointer(pc) => Adjust::Pointer(pc), + .map(|adjust| { + let target = + Type::new_with_resolver(self.db, &analyzer.resolver, adjust.target.clone()); + let kind = match adjust.kind { + hir_ty::Adjust::NeverToAny => Adjust::NeverToAny, + hir_ty::Adjust::Deref(Some(hir_ty::OverloadedDeref(m))) => { + Adjust::Deref(Some(OverloadedDeref(mutability(m)))) + } + hir_ty::Adjust::Deref(None) => Adjust::Deref(None), + hir_ty::Adjust::Borrow(hir_ty::AutoBorrow::RawPtr(m)) => { + Adjust::Borrow(AutoBorrow::RawPtr(mutability(m))) + } + hir_ty::Adjust::Borrow(hir_ty::AutoBorrow::Ref(m)) => { + Adjust::Borrow(AutoBorrow::Ref(mutability(m))) + } + hir_ty::Adjust::Pointer(pc) => Adjust::Pointer(pc), + }; + + // Update `source_ty` for the next adjustment + let source = mem::replace(&mut source_ty, target.clone()); + + let adjustment = Adjustment { source, target, kind }; + + adjustment }) .collect() }) @@ -1198,7 +1221,7 @@ impl<'db> SemanticsImpl<'db> { krate .dependencies(self.db) .into_iter() - .find_map(|dep| (dep.name == name).then(|| dep.krate)) + .find_map(|dep| (dep.name == name).then_some(dep.krate)) } fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option { @@ -1223,10 +1246,15 @@ impl<'db> SemanticsImpl<'db> { fn with_ctx) -> T, T>(&self, f: F) -> T { let mut cache = self.s2d_cache.borrow_mut(); - let mut ctx = SourceToDefCtx { db: self.db, cache: &mut *cache }; + let mut ctx = SourceToDefCtx { db: self.db, cache: &mut cache }; f(&mut ctx) } + fn to_def(&self, src: &T) -> Option { + let src = self.find_file(src.syntax()).with_value(src).cloned(); + T::to_def(self, src) + } + fn to_module_def(&self, file: FileId) -> impl Iterator { self.with_ctx(|ctx| ctx.file_to_def(file)).into_iter().map(Module::from) } @@ -1350,7 +1378,7 @@ impl<'db> SemanticsImpl<'db> { self.cache .borrow() .keys() - .map(|it| format!("{:?}", it)) + .map(|it| format!("{it:?}")) .collect::>() .join(", ") ) @@ -1442,6 +1470,56 @@ impl<'db> SemanticsImpl<'db> { .map(|ty| ty.original.is_packed(self.db)) .unwrap_or(false) } + + fn is_inside_unsafe(&self, expr: &ast::Expr) -> bool { + let item_or_variant = |ancestor: SyntaxNode| { + if ast::Item::can_cast(ancestor.kind()) { + ast::Item::cast(ancestor).map(Either::Left) + } else { + ast::Variant::cast(ancestor).map(Either::Right) + } + }; + let Some(enclosing_item) = expr.syntax().ancestors().find_map(item_or_variant) else { return false }; + + let def = match &enclosing_item { + Either::Left(ast::Item::Fn(it)) if it.unsafe_token().is_some() => return true, + Either::Left(ast::Item::Fn(it)) => { + self.to_def(it).map(<_>::into).map(DefWithBodyId::FunctionId) + } + Either::Left(ast::Item::Const(it)) => { + self.to_def(it).map(<_>::into).map(DefWithBodyId::ConstId) + } + Either::Left(ast::Item::Static(it)) => { + self.to_def(it).map(<_>::into).map(DefWithBodyId::StaticId) + } + Either::Left(_) => None, + Either::Right(it) => self.to_def(it).map(<_>::into).map(DefWithBodyId::VariantId), + }; + let Some(def) = def else { return false }; + let enclosing_node = enclosing_item.as_ref().either(|i| i.syntax(), |v| v.syntax()); + + let (body, source_map) = self.db.body_with_source_map(def); + + let file_id = self.find_file(expr.syntax()).file_id; + + let Some(mut parent) = expr.syntax().parent() else { return false }; + loop { + if &parent == enclosing_node { + break false; + } + + if let Some(parent) = ast::Expr::cast(parent.clone()) { + if let Some(expr_id) = source_map.node_expr(InFile { file_id, value: &parent }) { + if let Expr::Unsafe { .. } = body[expr_id] { + break true; + } + } + } + + let Some(parent_) = parent.parent() else { break false }; + parent = parent_; + } + } } fn macro_call_to_macro_id( @@ -1600,7 +1678,7 @@ impl<'a> SemanticsScope<'a> { self.db, def, resolution.in_type_ns()?, - |name, _, id| cb(name, id.into()), + |name, id| cb(name, id.into()), ) } } diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs index 91ea1c24d1..059b80bcf1 100644 --- a/crates/hir/src/source_analyzer.rs +++ b/crates/hir/src/source_analyzer.rs @@ -21,8 +21,8 @@ use hir_def::{ path::{ModPath, Path, PathKind}, resolver::{resolver_for_scope, Resolver, TypeNs, ValueNs}, type_ref::Mutability, - AsMacroCall, AssocItemId, DefWithBodyId, FieldId, FunctionId, ItemContainerId, LocalFieldId, - Lookup, ModuleDefId, TraitId, VariantId, + AsMacroCall, AssocItemId, ConstId, DefWithBodyId, FieldId, FunctionId, ItemContainerId, + LocalFieldId, Lookup, ModuleDefId, TraitId, VariantId, }; use hir_expand::{ builtin_fn_macro::BuiltinFnLikeExpander, @@ -118,7 +118,7 @@ impl SourceAnalyzer { fn expr_id(&self, db: &dyn HirDatabase, expr: &ast::Expr) -> Option { let src = match expr { ast::Expr::MacroExpr(expr) => { - self.expand_expr(db, InFile::new(self.file_id, expr.macro_call()?.clone()))? + self.expand_expr(db, InFile::new(self.file_id, expr.macro_call()?))? } _ => InFile::new(self.file_id, expr.clone()), }; @@ -228,7 +228,7 @@ impl SourceAnalyzer { db: &dyn HirDatabase, pat: &ast::Pat, ) -> Option> { - let pat_id = self.pat_id(&pat)?; + let pat_id = self.pat_id(pat)?; let infer = self.infer.as_ref()?; Some( infer @@ -270,7 +270,7 @@ impl SourceAnalyzer { db: &dyn HirDatabase, await_expr: &ast::AwaitExpr, ) -> Option { - let mut ty = self.ty_of_expr(db, &await_expr.expr()?.into())?.clone(); + let mut ty = self.ty_of_expr(db, &await_expr.expr()?)?.clone(); let into_future_trait = self .resolver @@ -316,7 +316,7 @@ impl SourceAnalyzer { ast::UnaryOp::Not => name![not], ast::UnaryOp::Neg => name![neg], }; - let ty = self.ty_of_expr(db, &prefix_expr.expr()?.into())?; + let ty = self.ty_of_expr(db, &prefix_expr.expr()?)?; let (op_trait, op_fn) = self.lang_trait_fn(db, &lang_item_name, &lang_item_name)?; // HACK: subst for all methods coincides with that for their trait because the methods @@ -331,8 +331,8 @@ impl SourceAnalyzer { db: &dyn HirDatabase, index_expr: &ast::IndexExpr, ) -> Option { - let base_ty = self.ty_of_expr(db, &index_expr.base()?.into())?; - let index_ty = self.ty_of_expr(db, &index_expr.index()?.into())?; + let base_ty = self.ty_of_expr(db, &index_expr.base()?)?; + let index_ty = self.ty_of_expr(db, &index_expr.index()?)?; let lang_item_name = name![index]; @@ -352,8 +352,8 @@ impl SourceAnalyzer { binop_expr: &ast::BinExpr, ) -> Option { let op = binop_expr.op_kind()?; - let lhs = self.ty_of_expr(db, &binop_expr.lhs()?.into())?; - let rhs = self.ty_of_expr(db, &binop_expr.rhs()?.into())?; + let lhs = self.ty_of_expr(db, &binop_expr.lhs()?)?; + let rhs = self.ty_of_expr(db, &binop_expr.rhs()?)?; let (op_trait, op_fn) = lang_names_for_bin_op(op) .and_then(|(name, lang_item)| self.lang_trait_fn(db, &lang_item, &name))?; @@ -372,7 +372,7 @@ impl SourceAnalyzer { db: &dyn HirDatabase, try_expr: &ast::TryExpr, ) -> Option { - let ty = self.ty_of_expr(db, &try_expr.expr()?.into())?; + let ty = self.ty_of_expr(db, &try_expr.expr()?)?; let op_fn = db.lang_item(self.resolver.krate(), name![branch].to_smol_str())?.as_function()?; @@ -482,7 +482,7 @@ impl SourceAnalyzer { let infer = self.infer.as_deref()?; if let Some(path_expr) = parent().and_then(ast::PathExpr::cast) { let expr_id = self.expr_id(db, &path_expr.into())?; - if let Some(assoc) = infer.assoc_resolutions_for_expr(expr_id) { + if let Some((assoc, subs)) = infer.assoc_resolutions_for_expr(expr_id) { let assoc = match assoc { AssocItemId::FunctionId(f_in_trait) => { match infer.type_of_expr.get(expr_id) { @@ -501,7 +501,9 @@ impl SourceAnalyzer { } } } - + AssocItemId::ConstId(const_id) => { + self.resolve_impl_const_or_trait_def(db, const_id, subs).into() + } _ => assoc, }; @@ -515,7 +517,7 @@ impl SourceAnalyzer { prefer_value_ns = true; } else if let Some(path_pat) = parent().and_then(ast::PathPat::cast) { let pat_id = self.pat_id(&path_pat.into())?; - if let Some(assoc) = infer.assoc_resolutions_for_pat(pat_id) { + if let Some((assoc, _)) = infer.assoc_resolutions_for_pat(pat_id) { return Some(PathResolution::Def(AssocItem::from(assoc).into())); } if let Some(VariantId::EnumVariantId(variant)) = @@ -792,6 +794,24 @@ impl SourceAnalyzer { method_resolution::lookup_impl_method(db, env, func, substs) } + fn resolve_impl_const_or_trait_def( + &self, + db: &dyn HirDatabase, + const_id: ConstId, + subs: Substitution, + ) -> ConstId { + let krate = self.resolver.krate(); + let owner = match self.resolver.body_owner() { + Some(it) => it, + None => return const_id, + }; + let env = owner.as_generic_def_id().map_or_else( + || Arc::new(hir_ty::TraitEnvironment::empty(krate)), + |d| db.trait_environment(d), + ); + method_resolution::lookup_impl_const(db, env, const_id, subs) + } + fn lang_trait_fn( &self, db: &dyn HirDatabase, @@ -804,7 +824,7 @@ impl SourceAnalyzer { } fn ty_of_expr(&self, db: &dyn HirDatabase, expr: &ast::Expr) -> Option<&Ty> { - self.infer.as_ref()?.type_of_expr.get(self.expr_id(db, &expr)?) + self.infer.as_ref()?.type_of_expr.get(self.expr_id(db, expr)?) } } @@ -967,7 +987,7 @@ fn resolve_hir_path_( db, def, res.in_type_ns()?, - |name, _, id| (name == unresolved.name).then(|| id), + |name, id| (name == unresolved.name).then_some(id), ) }) .map(TypeAlias::from) diff --git a/crates/ide-assists/Cargo.toml b/crates/ide-assists/Cargo.toml index e781c0a016..b9260473b1 100644 --- a/crates/ide-assists/Cargo.toml +++ b/crates/ide-assists/Cargo.toml @@ -14,6 +14,7 @@ cov-mark = "2.0.0-pre.1" itertools = "0.10.5" either = "1.7.0" +smallvec = "1.10.0" stdx = { path = "../stdx", version = "0.0.0" } syntax = { path = "../syntax", version = "0.0.0" } diff --git a/crates/ide-assists/src/handlers/add_explicit_type.rs b/crates/ide-assists/src/handlers/add_explicit_type.rs index b5f99726fe..0057f439f1 100644 --- a/crates/ide-assists/src/handlers/add_explicit_type.rs +++ b/crates/ide-assists/src/handlers/add_explicit_type.rs @@ -47,7 +47,10 @@ pub(crate) fn add_explicit_type(acc: &mut Assists, ctx: &AssistContext<'_>) -> O // Don't enable the assist if there is a type ascription without any placeholders if let Some(ty) = &ascribed_ty { let mut contains_infer_ty = false; - walk_ty(ty, &mut |ty| contains_infer_ty |= matches!(ty, ast::Type::InferType(_))); + walk_ty(ty, &mut |ty| { + contains_infer_ty |= matches!(ty, ast::Type::InferType(_)); + false + }); if !contains_infer_ty { cov_mark::hit!(add_explicit_type_not_applicable_if_ty_already_specified); return None; diff --git a/crates/ide-assists/src/handlers/add_missing_impl_members.rs b/crates/ide-assists/src/handlers/add_missing_impl_members.rs index 2b3793659c..161bcc5c8d 100644 --- a/crates/ide-assists/src/handlers/add_missing_impl_members.rs +++ b/crates/ide-assists/src/handlers/add_missing_impl_members.rs @@ -107,6 +107,14 @@ fn add_missing_impl_members_inner( ) -> Option<()> { let _p = profile::span("add_missing_impl_members_inner"); let impl_def = ctx.find_node_at_offset::()?; + + if ctx.token_at_offset().all(|t| { + t.parent_ancestors() + .any(|s| ast::BlockExpr::can_cast(s.kind()) || ast::ParamList::can_cast(s.kind())) + }) { + return None; + } + let target_scope = ctx.sema.scope(impl_def.syntax())?; let trait_ = resolve_target_trait(&ctx.sema, &impl_def)?; @@ -1343,4 +1351,95 @@ impl PartialEq for SomeStruct { "#, ); } + + #[test] + fn test_ignore_function_body() { + check_assist_not_applicable( + add_missing_default_members, + r#" +trait Trait { + type X; + fn foo(&self); + fn bar(&self) {} +} + +impl Trait for () { + type X = u8; + fn foo(&self) {$0 + let x = 5; + } +}"#, + ) + } + + #[test] + fn test_ignore_param_list() { + check_assist_not_applicable( + add_missing_impl_members, + r#" +trait Trait { + type X; + fn foo(&self); + fn bar(&self); +} + +impl Trait for () { + type X = u8; + fn foo(&self$0) { + let x = 5; + } +}"#, + ) + } + + #[test] + fn test_ignore_scope_inside_function() { + check_assist_not_applicable( + add_missing_impl_members, + r#" +trait Trait { + type X; + fn foo(&self); + fn bar(&self); +} + +impl Trait for () { + type X = u8; + fn foo(&self) { + let x = async {$0 5 }; + } +}"#, + ) + } + + #[test] + fn test_apply_outside_function() { + check_assist( + add_missing_default_members, + r#" +trait Trait { + type X; + fn foo(&self); + fn bar(&self) {} +} + +impl Trait for () { + type X = u8; + fn foo(&self)$0 {} +}"#, + r#" +trait Trait { + type X; + fn foo(&self); + fn bar(&self) {} +} + +impl Trait for () { + type X = u8; + fn foo(&self) {} + + $0fn bar(&self) {} +}"#, + ) + } } diff --git a/crates/ide-assists/src/handlers/add_missing_match_arms.rs b/crates/ide-assists/src/handlers/add_missing_match_arms.rs index 73f4db4e5f..8e4ac69ae6 100644 --- a/crates/ide-assists/src/handlers/add_missing_match_arms.rs +++ b/crates/ide-assists/src/handlers/add_missing_match_arms.rs @@ -326,7 +326,7 @@ impl ExtendedEnum { fn resolve_enum_def(sema: &Semantics<'_, RootDatabase>, expr: &ast::Expr) -> Option { sema.type_of_expr(expr)?.adjusted().autoderef(sema.db).find_map(|ty| match ty.as_adt() { Some(Adt::Enum(e)) => Some(ExtendedEnum::Enum(e)), - _ => ty.is_bool().then(|| ExtendedEnum::Bool), + _ => ty.is_bool().then_some(ExtendedEnum::Bool), }) } @@ -344,7 +344,7 @@ fn resolve_tuple_of_enum_def( // For now we only handle expansion for a tuple of enums. Here // we map non-enum items to None and rely on `collect` to // convert Vec> into Option>. - _ => ty.is_bool().then(|| ExtendedEnum::Bool), + _ => ty.is_bool().then_some(ExtendedEnum::Bool), }) }) .collect() diff --git a/crates/ide-assists/src/handlers/add_return_type.rs b/crates/ide-assists/src/handlers/add_return_type.rs index 89040a8569..879c478acf 100644 --- a/crates/ide-assists/src/handlers/add_return_type.rs +++ b/crates/ide-assists/src/handlers/add_return_type.rs @@ -35,16 +35,16 @@ pub(crate) fn add_return_type(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opt match builder_edit_pos { InsertOrReplace::Insert(insert_pos, needs_whitespace) => { let preceeding_whitespace = if needs_whitespace { " " } else { "" }; - builder.insert(insert_pos, &format!("{preceeding_whitespace}-> {ty} ")) + builder.insert(insert_pos, format!("{preceeding_whitespace}-> {ty} ")) } InsertOrReplace::Replace(text_range) => { - builder.replace(text_range, &format!("-> {ty}")) + builder.replace(text_range, format!("-> {ty}")) } } if let FnType::Closure { wrap_expr: true } = fn_type { cov_mark::hit!(wrap_closure_non_block_expr); // `|x| x` becomes `|x| -> T x` which is invalid, so wrap it in a block - builder.replace(tail_expr.syntax().text_range(), &format!("{{{tail_expr}}}")); + builder.replace(tail_expr.syntax().text_range(), format!("{{{tail_expr}}}")); } }, ) diff --git a/crates/ide-assists/src/handlers/auto_import.rs b/crates/ide-assists/src/handlers/auto_import.rs index a689270bc0..698ad78cce 100644 --- a/crates/ide-assists/src/handlers/auto_import.rs +++ b/crates/ide-assists/src/handlers/auto_import.rs @@ -203,7 +203,7 @@ fn relevance_score( // get the distance between the imported path and the current module // (prefer items that are more local) Some((item_module, current_module)) => { - score -= module_distance_hueristic(db, ¤t_module, &item_module) as i32; + score -= module_distance_hueristic(db, current_module, &item_module) as i32; } // could not find relevant modules, so just use the length of the path as an estimate diff --git a/crates/ide-assists/src/handlers/convert_iter_for_each_to_for.rs b/crates/ide-assists/src/handlers/convert_iter_for_each_to_for.rs index 80eecf4a09..f32ef2d59d 100644 --- a/crates/ide-assists/src/handlers/convert_iter_for_each_to_for.rs +++ b/crates/ide-assists/src/handlers/convert_iter_for_each_to_for.rs @@ -216,7 +216,7 @@ fn validate_method_call_expr( let krate = module.krate(); let iter_trait = FamousDefs(sema, krate).core_iter_Iterator()?; - it_type.impls_trait(sema.db, iter_trait, &[]).then(|| (expr, receiver)) + it_type.impls_trait(sema.db, iter_trait, &[]).then_some((expr, receiver)) } #[cfg(test)] diff --git a/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs b/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs index 92e091fca1..b0383291e7 100644 --- a/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs +++ b/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs @@ -168,7 +168,7 @@ fn edit_struct_references( let arg_list = call_expr.syntax().descendants().find_map(ast::ArgList::cast)?; edit.replace( - call_expr.syntax().text_range(), + ctx.sema.original_range(&node).range, ast::make::record_expr( path, ast::make::record_expr_field_list(arg_list.args().zip(names).map( @@ -249,6 +249,24 @@ mod tests { ); check_assist_not_applicable(convert_tuple_struct_to_named_struct, r#"struct Foo$0;"#); } + #[test] + fn convert_in_macro_args() { + check_assist( + convert_tuple_struct_to_named_struct, + r#" +macro_rules! foo {($i:expr) => {$i} } +struct T$0(u8); +fn test() { + foo!(T(1)); +}"#, + r#" +macro_rules! foo {($i:expr) => {$i} } +struct T { field1: u8 } +fn test() { + foo!(T { field1: 1 }); +}"#, + ); + } #[test] fn convert_simple_struct() { @@ -554,6 +572,29 @@ where ); } + #[test] + fn convert_variant_in_macro_args() { + check_assist( + convert_tuple_struct_to_named_struct, + r#" +macro_rules! foo {($i:expr) => {$i} } +enum T { + V$0(u8) +} +fn test() { + foo!(T::V(1)); +}"#, + r#" +macro_rules! foo {($i:expr) => {$i} } +enum T { + V { field1: u8 } +} +fn test() { + foo!(T::V { field1: 1 }); +}"#, + ); + } + #[test] fn convert_simple_variant() { check_assist( diff --git a/crates/ide-assists/src/handlers/move_format_string_arg.rs b/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs similarity index 84% rename from crates/ide-assists/src/handlers/move_format_string_arg.rs rename to crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs index 11db6ae7f7..4f3b6e0c28 100644 --- a/crates/ide-assists/src/handlers/move_format_string_arg.rs +++ b/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs @@ -10,7 +10,7 @@ use itertools::Itertools; use stdx::format_to; use syntax::{ast, AstNode, AstToken, NodeOrToken, SyntaxKind::COMMA, TextRange}; -// Assist: move_format_string_arg +// Assist: extract_expressions_from_format_string // // Move an expression out of a format string. // @@ -23,7 +23,7 @@ use syntax::{ast, AstNode, AstToken, NodeOrToken, SyntaxKind::COMMA, TextRange}; // } // // fn main() { -// print!("{x + 1}$0"); +// print!("{var} {x + 1}$0"); // } // ``` // -> @@ -36,11 +36,14 @@ use syntax::{ast, AstNode, AstToken, NodeOrToken, SyntaxKind::COMMA, TextRange}; // } // // fn main() { -// print!("{}"$0, x + 1); +// print!("{var} {}"$0, x + 1); // } // ``` -pub(crate) fn move_format_string_arg(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { +pub(crate) fn extract_expressions_from_format_string( + acc: &mut Assists, + ctx: &AssistContext<'_>, +) -> Option<()> { let fmt_string = ctx.find_token_at_offset::()?; let tt = fmt_string.syntax().parent().and_then(ast::TokenTree::cast)?; @@ -58,7 +61,7 @@ pub(crate) fn move_format_string_arg(acc: &mut Assists, ctx: &AssistContext<'_>) acc.add( AssistId( - "move_format_string_arg", + "extract_expressions_from_format_string", // if there aren't any expressions, then make the assist a RefactorExtract if extracted_args.iter().filter(|f| matches!(f, Arg::Expr(_))).count() == 0 { AssistKind::RefactorExtract @@ -66,7 +69,7 @@ pub(crate) fn move_format_string_arg(acc: &mut Assists, ctx: &AssistContext<'_>) AssistKind::QuickFix }, ), - "Extract format args", + "Extract format expressions", tt.syntax().text_range(), |edit| { let fmt_range = fmt_string.syntax().text_range(); @@ -118,15 +121,14 @@ pub(crate) fn move_format_string_arg(acc: &mut Assists, ctx: &AssistContext<'_>) let mut placeholder_idx = 1; for extracted_args in extracted_args { - // remove expr from format string - args.push_str(", "); - match extracted_args { - Arg::Ident(s) | Arg::Expr(s) => { + Arg::Expr(s)=> { + args.push_str(", "); // insert arg args.push_str(&s); } Arg::Placeholder => { + args.push_str(", "); // try matching with existing argument match existing_args.next() { Some(ea) => { @@ -139,6 +141,7 @@ pub(crate) fn move_format_string_arg(acc: &mut Assists, ctx: &AssistContext<'_>) } } } + Arg::Ident(_s) => (), } } @@ -171,7 +174,7 @@ macro_rules! print { #[test] fn multiple_middle_arg() { check_assist( - move_format_string_arg, + extract_expressions_from_format_string, &add_macro_decl( r#" fn main() { @@ -192,7 +195,7 @@ fn main() { #[test] fn single_arg() { check_assist( - move_format_string_arg, + extract_expressions_from_format_string, &add_macro_decl( r#" fn main() { @@ -213,7 +216,7 @@ fn main() { #[test] fn multiple_middle_placeholders_arg() { check_assist( - move_format_string_arg, + extract_expressions_from_format_string, &add_macro_decl( r#" fn main() { @@ -234,7 +237,7 @@ fn main() { #[test] fn multiple_trailing_args() { check_assist( - move_format_string_arg, + extract_expressions_from_format_string, &add_macro_decl( r#" fn main() { @@ -255,7 +258,7 @@ fn main() { #[test] fn improper_commas() { check_assist( - move_format_string_arg, + extract_expressions_from_format_string, &add_macro_decl( r#" fn main() { @@ -276,7 +279,7 @@ fn main() { #[test] fn nested_tt() { check_assist( - move_format_string_arg, + extract_expressions_from_format_string, &add_macro_decl( r#" fn main() { @@ -289,6 +292,29 @@ fn main() { fn main() { print!("My name is {} {}"$0, stringify!(Paperino), x + x) } +"#, + ), + ); + } + + #[test] + fn extract_only_expressions() { + check_assist( + extract_expressions_from_format_string, + &add_macro_decl( + r#" +fn main() { + let var = 1 + 1; + print!("foobar {var} {var:?} {x$0 + x}") +} +"#, + ), + &add_macro_decl( + r#" +fn main() { + let var = 1 + 1; + print!("foobar {var} {var:?} {}"$0, x + x) +} "#, ), ); diff --git a/crates/ide-assists/src/handlers/extract_function.rs b/crates/ide-assists/src/handlers/extract_function.rs index c1e2f19ab1..e04a1dabb2 100644 --- a/crates/ide-assists/src/handlers/extract_function.rs +++ b/crates/ide-assists/src/handlers/extract_function.rs @@ -11,7 +11,9 @@ use ide_db::{ helpers::mod_path_to_ast, imports::insert_use::{insert_use, ImportScope}, search::{FileReference, ReferenceCategory, SearchScope}, - syntax_helpers::node_ext::{preorder_expr, walk_expr, walk_pat, walk_patterns_in_expr}, + syntax_helpers::node_ext::{ + for_each_tail_expr, preorder_expr, walk_expr, walk_pat, walk_patterns_in_expr, + }, FxIndexSet, RootDatabase, }; use itertools::Itertools; @@ -78,7 +80,7 @@ pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op }; let body = extraction_target(&node, range)?; - let container_info = body.analyze_container(&ctx.sema)?; + let (container_info, contains_tail_expr) = body.analyze_container(&ctx.sema)?; let (locals_used, self_param) = body.analyze(&ctx.sema); @@ -119,6 +121,7 @@ pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op ret_ty, body, outliving_locals, + contains_tail_expr, mods: container_info, }; @@ -245,6 +248,8 @@ struct Function { ret_ty: RetType, body: FunctionBody, outliving_locals: Vec, + /// Whether at least one of the container's tail expr is contained in the range we're extracting. + contains_tail_expr: bool, mods: ContainerInfo, } @@ -265,7 +270,7 @@ enum ParamKind { MutRef, } -#[derive(Debug, Eq, PartialEq)] +#[derive(Debug)] enum FunType { Unit, Single(hir::Type), @@ -294,7 +299,6 @@ struct ControlFlow { #[derive(Clone, Debug)] struct ContainerInfo { is_const: bool, - is_in_tail: bool, parent_loop: Option, /// The function's return type, const's type etc. ret_type: Option, @@ -584,7 +588,7 @@ impl FunctionBody { FunctionBody::Expr(expr) => Some(expr.clone()), FunctionBody::Span { parent, text_range } => { let tail_expr = parent.tail_expr()?; - text_range.contains_range(tail_expr.syntax().text_range()).then(|| tail_expr) + text_range.contains_range(tail_expr.syntax().text_range()).then_some(tail_expr) } } } @@ -743,7 +747,10 @@ impl FunctionBody { (res, self_param) } - fn analyze_container(&self, sema: &Semantics<'_, RootDatabase>) -> Option { + fn analyze_container( + &self, + sema: &Semantics<'_, RootDatabase>, + ) -> Option<(ContainerInfo, bool)> { let mut ancestors = self.parent()?.ancestors(); let infer_expr_opt = |expr| sema.type_of_expr(&expr?).map(TypeInfo::adjusted); let mut parent_loop = None; @@ -815,28 +822,36 @@ impl FunctionBody { } }; }; - let container_tail = match expr? { - ast::Expr::BlockExpr(block) => block.tail_expr(), - expr => Some(expr), - }; - let is_in_tail = - container_tail.zip(self.tail_expr()).map_or(false, |(container_tail, body_tail)| { - container_tail.syntax().text_range().contains_range(body_tail.syntax().text_range()) + + let expr = expr?; + let contains_tail_expr = if let Some(body_tail) = self.tail_expr() { + let mut contains_tail_expr = false; + let tail_expr_range = body_tail.syntax().text_range(); + for_each_tail_expr(&expr, &mut |e| { + if tail_expr_range.contains_range(e.syntax().text_range()) { + contains_tail_expr = true; + } }); + contains_tail_expr + } else { + false + }; let parent = self.parent()?; let parents = generic_parents(&parent); let generic_param_lists = parents.iter().filter_map(|it| it.generic_param_list()).collect(); let where_clauses = parents.iter().filter_map(|it| it.where_clause()).collect(); - Some(ContainerInfo { - is_in_tail, - is_const, - parent_loop, - ret_type: ty, - generic_param_lists, - where_clauses, - }) + Some(( + ContainerInfo { + is_const, + parent_loop, + ret_type: ty, + generic_param_lists, + where_clauses, + }, + contains_tail_expr, + )) } fn return_ty(&self, ctx: &AssistContext<'_>) -> Option { @@ -1368,7 +1383,7 @@ impl FlowHandler { None => FlowHandler::None, Some(flow_kind) => { let action = flow_kind.clone(); - if *ret_ty == FunType::Unit { + if let FunType::Unit = ret_ty { match flow_kind { FlowKind::Return(None) | FlowKind::Break(_, None) @@ -1633,7 +1648,7 @@ impl Function { fn make_ret_ty(&self, ctx: &AssistContext<'_>, module: hir::Module) -> Option { let fun_ty = self.return_type(ctx); - let handler = if self.mods.is_in_tail { + let handler = if self.contains_tail_expr { FlowHandler::None } else { FlowHandler::from_ret_ty(self, &fun_ty) @@ -1707,7 +1722,7 @@ fn make_body( fun: &Function, ) -> ast::BlockExpr { let ret_ty = fun.return_type(ctx); - let handler = if fun.mods.is_in_tail { + let handler = if fun.contains_tail_expr { FlowHandler::None } else { FlowHandler::from_ret_ty(fun, &ret_ty) @@ -1785,7 +1800,7 @@ fn make_body( .collect::>(); let tail_expr = tail_expr.map(|expr| expr.dedent(old_indent).indent(body_indent)); - make::hacky_block_expr_with_comments(elements, tail_expr) + make::hacky_block_expr(elements, tail_expr) } }; @@ -1845,9 +1860,29 @@ fn with_default_tail_expr(block: ast::BlockExpr, tail_expr: ast::Expr) -> ast::B } fn with_tail_expr(block: ast::BlockExpr, tail_expr: ast::Expr) -> ast::BlockExpr { - let stmt_tail = block.tail_expr().map(|expr| make::expr_stmt(expr).into()); - let stmts = block.statements().chain(stmt_tail); - make::block_expr(stmts, Some(tail_expr)) + let stmt_tail_opt: Option = + block.tail_expr().map(|expr| make::expr_stmt(expr).into()); + + let mut elements: Vec = vec![]; + + block.statements().for_each(|stmt| { + elements.push(syntax::NodeOrToken::Node(stmt.syntax().clone())); + }); + + if let Some(stmt_list) = block.stmt_list() { + stmt_list.syntax().children_with_tokens().for_each(|node_or_token| { + match &node_or_token { + syntax::NodeOrToken::Token(_) => elements.push(node_or_token), + _ => (), + }; + }); + } + + if let Some(stmt_tail) = stmt_tail_opt { + elements.push(syntax::NodeOrToken::Node(stmt_tail.syntax().clone())); + } + + make::hacky_block_expr(elements, Some(tail_expr)) } fn format_type(ty: &hir::Type, ctx: &AssistContext<'_>, module: hir::Module) -> String { @@ -1946,7 +1981,7 @@ fn update_external_control_flow(handler: &FlowHandler, syntax: &SyntaxNode) { if nested_scope.is_none() { if let Some(expr) = ast::Expr::cast(e.clone()) { match expr { - ast::Expr::ReturnExpr(return_expr) if nested_scope.is_none() => { + ast::Expr::ReturnExpr(return_expr) => { let expr = return_expr.expr(); if let Some(replacement) = make_rewritten_flow(handler, expr) { ted::replace(return_expr.syntax(), replacement.syntax()) @@ -4944,9 +4979,8 @@ fn $0fun_name() { ); } - // FIXME: we do want to preserve whitespace #[test] - fn extract_function_does_not_preserve_whitespace() { + fn extract_function_does_preserve_whitespace() { check_assist( extract_function, r#" @@ -4965,6 +4999,7 @@ fn func() { fn $0fun_name() { let a = 0; + let x = 0; } "#, @@ -5582,6 +5617,193 @@ impl Struct where T: Into + Copy, U: Debug { fn $0fun_name(t: T, v: V) -> i32 where T: Into + Copy, V: Into { t.into() + v.into() } +"#, + ); + } + + #[test] + fn non_tail_expr_of_tail_expr_loop() { + check_assist( + extract_function, + r#" +pub fn f() { + loop { + $0if true { + continue; + }$0 + + if false { + break; + } + } +} +"#, + r#" +pub fn f() { + loop { + if let ControlFlow::Break(_) = fun_name() { + continue; + } + + if false { + break; + } + } +} + +fn $0fun_name() -> ControlFlow<()> { + if true { + return ControlFlow::Break(()); + } + ControlFlow::Continue(()) +} +"#, + ); + } + + #[test] + fn non_tail_expr_of_tail_if_block() { + // FIXME: double semicolon + check_assist( + extract_function, + r#" +//- minicore: option, try +impl core::ops::Try for Option { + type Output = T; + type Residual = Option; +} +impl core::ops::FromResidual for Option {} + +fn f() -> Option<()> { + if true { + let a = $0if true { + Some(())? + } else { + () + }$0; + Some(a) + } else { + None + } +} +"#, + r#" +impl core::ops::Try for Option { + type Output = T; + type Residual = Option; +} +impl core::ops::FromResidual for Option {} + +fn f() -> Option<()> { + if true { + let a = fun_name()?;; + Some(a) + } else { + None + } +} + +fn $0fun_name() -> Option<()> { + Some(if true { + Some(())? + } else { + () + }) +} +"#, + ); + } + + #[test] + fn tail_expr_of_tail_block_nested() { + check_assist( + extract_function, + r#" +//- minicore: option, try +impl core::ops::Try for Option { + type Output = T; + type Residual = Option; +} +impl core::ops::FromResidual for Option {} + +fn f() -> Option<()> { + if true { + $0{ + let a = if true { + Some(())? + } else { + () + }; + Some(a) + }$0 + } else { + None + } +} +"#, + r#" +impl core::ops::Try for Option { + type Output = T; + type Residual = Option; +} +impl core::ops::FromResidual for Option {} + +fn f() -> Option<()> { + if true { + fun_name()? + } else { + None + } +} + +fn $0fun_name() -> Option<()> { + let a = if true { + Some(())? + } else { + () + }; + Some(a) +} +"#, + ); + } + + #[test] + fn non_tail_expr_with_comment_of_tail_expr_loop() { + check_assist( + extract_function, + r#" +pub fn f() { + loop { + $0// A comment + if true { + continue; + }$0 + if false { + break; + } + } +} +"#, + r#" +pub fn f() { + loop { + if let ControlFlow::Break(_) = fun_name() { + continue; + } + if false { + break; + } + } +} + +fn $0fun_name() -> ControlFlow<()> { + // A comment + if true { + return ControlFlow::Break(()); + } + ControlFlow::Continue(()) +} "#, ); } diff --git a/crates/ide-assists/src/handlers/extract_module.rs b/crates/ide-assists/src/handlers/extract_module.rs index 56834394ae..0fa7bd558b 100644 --- a/crates/ide-assists/src/handlers/extract_module.rs +++ b/crates/ide-assists/src/handlers/extract_module.rs @@ -10,6 +10,8 @@ use ide_db::{ defs::{Definition, NameClass, NameRefClass}, search::{FileReference, SearchScope}, }; +use itertools::Itertools; +use smallvec::SmallVec; use stdx::format_to; use syntax::{ algo::find_node_at_range, @@ -116,13 +118,13 @@ pub(crate) fn extract_module(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti let mut body_items: Vec = Vec::new(); let mut items_to_be_processed: Vec = module.body_items.clone(); - let mut new_item_indent = old_item_indent + 1; - if impl_parent.is_some() { - new_item_indent = old_item_indent + 2; + let new_item_indent = if impl_parent.is_some() { + old_item_indent + 2 } else { items_to_be_processed = [module.use_items.clone(), items_to_be_processed].concat(); - } + old_item_indent + 1 + }; for item in items_to_be_processed { let item = item.indent(IndentLevel(1)); @@ -657,28 +659,23 @@ impl Module { fn check_intersection_and_push( import_paths_to_be_removed: &mut Vec, - import_path: TextRange, + mut import_path: TextRange, ) { - if import_paths_to_be_removed.len() > 0 { - // Text ranges received here for imports are extended to the - // next/previous comma which can cause intersections among them - // and later deletion of these can cause panics similar - // to reported in #11766. So to mitigate it, we - // check for intersection between all current members - // and if it exists we combine both text ranges into - // one - let r = import_paths_to_be_removed - .into_iter() - .position(|it| it.intersect(import_path).is_some()); - match r { - Some(it) => { - import_paths_to_be_removed[it] = import_paths_to_be_removed[it].cover(import_path) - } - None => import_paths_to_be_removed.push(import_path), - } - } else { - import_paths_to_be_removed.push(import_path); + // Text ranges received here for imports are extended to the + // next/previous comma which can cause intersections among them + // and later deletion of these can cause panics similar + // to reported in #11766. So to mitigate it, we + // check for intersection between all current members + // and combine all such ranges into one. + let s: SmallVec<[_; 2]> = import_paths_to_be_removed + .into_iter() + .positions(|it| it.intersect(import_path).is_some()) + .collect(); + for pos in s.into_iter().rev() { + let intersecting_path = import_paths_to_be_removed.swap_remove(pos); + import_path = import_path.cover(intersecting_path); } + import_paths_to_be_removed.push(import_path); } fn does_source_exists_outside_sel_in_same_mod( @@ -1766,4 +1763,49 @@ mod modname { ", ) } + + #[test] + fn test_merge_multiple_intersections() { + check_assist( + extract_module, + r#" +mod dep { + pub struct A; + pub struct B; + pub struct C; +} + +use dep::{A, B, C}; + +$0struct S { + inner: A, + state: C, + condvar: B, +}$0 +"#, + r#" +mod dep { + pub struct A; + pub struct B; + pub struct C; +} + +use dep::{}; + +mod modname { + use super::dep::B; + + use super::dep::C; + + use super::dep::A; + + pub(crate) struct S { + pub(crate) inner: A, + pub(crate) state: C, + pub(crate) condvar: B, + } +} +"#, + ); + } } diff --git a/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs b/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs index b4e10667b0..49debafe1a 100644 --- a/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs +++ b/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs @@ -178,7 +178,7 @@ fn extract_generic_params( .fold(false, |tagged, ty| tag_generics_in_variant(&ty, &mut generics) || tagged), }; - let generics = generics.into_iter().filter_map(|(param, tag)| tag.then(|| param)); + let generics = generics.into_iter().filter_map(|(param, tag)| tag.then_some(param)); tagged_one.then(|| make::generic_param_list(generics)) } diff --git a/crates/ide-assists/src/handlers/extract_type_alias.rs b/crates/ide-assists/src/handlers/extract_type_alias.rs index 3116935fc5..0505f5784f 100644 --- a/crates/ide-assists/src/handlers/extract_type_alias.rs +++ b/crates/ide-assists/src/handlers/extract_type_alias.rs @@ -108,76 +108,80 @@ fn collect_used_generics<'gp>( } let mut generics = Vec::new(); - walk_ty(ty, &mut |ty| match ty { - ast::Type::PathType(ty) => { - if let Some(path) = ty.path() { - if let Some(name_ref) = path.as_single_name_ref() { - if let Some(param) = known_generics.iter().find(|gp| { - match gp { - ast::GenericParam::ConstParam(cp) => cp.name(), - ast::GenericParam::TypeParam(tp) => tp.name(), - _ => None, - } - .map_or(false, |n| n.text() == name_ref.text()) - }) { - generics.push(param); - } - } - generics.extend( - path.segments() - .filter_map(|seg| seg.generic_arg_list()) - .flat_map(|it| it.generic_args()) - .filter_map(|it| match it { - ast::GenericArg::LifetimeArg(lt) => { - let lt = lt.lifetime()?; - known_generics.iter().find(find_lifetime(<.text())) + walk_ty(ty, &mut |ty| { + match ty { + ast::Type::PathType(ty) => { + if let Some(path) = ty.path() { + if let Some(name_ref) = path.as_single_name_ref() { + if let Some(param) = known_generics.iter().find(|gp| { + match gp { + ast::GenericParam::ConstParam(cp) => cp.name(), + ast::GenericParam::TypeParam(tp) => tp.name(), + _ => None, } - _ => None, - }), - ); - } - } - ast::Type::ImplTraitType(impl_ty) => { - if let Some(it) = impl_ty.type_bound_list() { - generics.extend( - it.bounds() - .filter_map(|it| it.lifetime()) - .filter_map(|lt| known_generics.iter().find(find_lifetime(<.text()))), - ); - } - } - ast::Type::DynTraitType(dyn_ty) => { - if let Some(it) = dyn_ty.type_bound_list() { - generics.extend( - it.bounds() - .filter_map(|it| it.lifetime()) - .filter_map(|lt| known_generics.iter().find(find_lifetime(<.text()))), - ); - } - } - ast::Type::RefType(ref_) => generics.extend( - ref_.lifetime().and_then(|lt| known_generics.iter().find(find_lifetime(<.text()))), - ), - ast::Type::ArrayType(ar) => { - if let Some(expr) = ar.expr() { - if let ast::Expr::PathExpr(p) = expr { - if let Some(path) = p.path() { - if let Some(name_ref) = path.as_single_name_ref() { - if let Some(param) = known_generics.iter().find(|gp| { - if let ast::GenericParam::ConstParam(cp) = gp { - cp.name().map_or(false, |n| n.text() == name_ref.text()) - } else { - false + .map_or(false, |n| n.text() == name_ref.text()) + }) { + generics.push(param); + } + } + generics.extend( + path.segments() + .filter_map(|seg| seg.generic_arg_list()) + .flat_map(|it| it.generic_args()) + .filter_map(|it| match it { + ast::GenericArg::LifetimeArg(lt) => { + let lt = lt.lifetime()?; + known_generics.iter().find(find_lifetime(<.text())) + } + _ => None, + }), + ); + } + } + ast::Type::ImplTraitType(impl_ty) => { + if let Some(it) = impl_ty.type_bound_list() { + generics.extend( + it.bounds() + .filter_map(|it| it.lifetime()) + .filter_map(|lt| known_generics.iter().find(find_lifetime(<.text()))), + ); + } + } + ast::Type::DynTraitType(dyn_ty) => { + if let Some(it) = dyn_ty.type_bound_list() { + generics.extend( + it.bounds() + .filter_map(|it| it.lifetime()) + .filter_map(|lt| known_generics.iter().find(find_lifetime(<.text()))), + ); + } + } + ast::Type::RefType(ref_) => generics.extend( + ref_.lifetime() + .and_then(|lt| known_generics.iter().find(find_lifetime(<.text()))), + ), + ast::Type::ArrayType(ar) => { + if let Some(expr) = ar.expr() { + if let ast::Expr::PathExpr(p) = expr { + if let Some(path) = p.path() { + if let Some(name_ref) = path.as_single_name_ref() { + if let Some(param) = known_generics.iter().find(|gp| { + if let ast::GenericParam::ConstParam(cp) = gp { + cp.name().map_or(false, |n| n.text() == name_ref.text()) + } else { + false + } + }) { + generics.push(param); } - }) { - generics.push(param); } } } } } - } - _ => (), + _ => (), + }; + false }); // stable resort to lifetime, type, const generics.sort_by_key(|gp| match gp { diff --git a/crates/ide-assists/src/handlers/generate_default_from_new.rs b/crates/ide-assists/src/handlers/generate_default_from_new.rs index 49d9fd707f..2d074a33e7 100644 --- a/crates/ide-assists/src/handlers/generate_default_from_new.rs +++ b/crates/ide-assists/src/handlers/generate_default_from_new.rs @@ -53,7 +53,7 @@ pub(crate) fn generate_default_from_new(acc: &mut Assists, ctx: &AssistContext<' return None; } - let impl_ = fn_node.syntax().ancestors().into_iter().find_map(ast::Impl::cast)?; + let impl_ = fn_node.syntax().ancestors().find_map(ast::Impl::cast)?; if is_default_implemented(ctx, &impl_) { cov_mark::hit!(default_block_is_already_present); cov_mark::hit!(struct_in_module_with_default); diff --git a/crates/ide-assists/src/handlers/generate_delegate_methods.rs b/crates/ide-assists/src/handlers/generate_delegate_methods.rs index ceae807550..c8d0493d09 100644 --- a/crates/ide-assists/src/handlers/generate_delegate_methods.rs +++ b/crates/ide-assists/src/handlers/generate_delegate_methods.rs @@ -81,7 +81,7 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<' acc.add_group( &GroupLabel("Generate delegate methods…".to_owned()), AssistId("generate_delegate_methods", AssistKind::Generate), - format!("Generate delegate for `{}.{}()`", field_name, method.name(ctx.db())), + format!("Generate delegate for `{field_name}.{}()`", method.name(ctx.db())), target, |builder| { // Create the function @@ -104,9 +104,11 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<' make::name_ref(&method_name.to_string()), arg_list, ); - let body = make::block_expr([], Some(tail_expr)); let ret_type = method_source.ret_type(); let is_async = method_source.async_token().is_some(); + let tail_expr_finished = + if is_async { make::expr_await(tail_expr) } else { tail_expr }; + let body = make::block_expr([], Some(tail_expr_finished)); let f = make::fn_(vis, name, type_params, params, body, ret_type, is_async) .indent(ast::edit::IndentLevel(1)) .clone_for_update(); @@ -306,7 +308,7 @@ struct Person { impl Person { $0pub(crate) async fn age(&'a mut self, ty: T, arg: J) -> T { - self.age.age(ty, arg) + self.age.age(ty, arg).await } }"#, ); diff --git a/crates/ide-assists/src/handlers/generate_deref.rs b/crates/ide-assists/src/handlers/generate_deref.rs index 55b7afb3d3..b6958e2919 100644 --- a/crates/ide-assists/src/handlers/generate_deref.rs +++ b/crates/ide-assists/src/handlers/generate_deref.rs @@ -85,8 +85,7 @@ fn generate_tuple_deref(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<() let strukt = ctx.find_node_at_offset::()?; let field = ctx.find_node_at_offset::()?; let field_list = ctx.find_node_at_offset::()?; - let field_list_index = - field_list.syntax().children().into_iter().position(|s| &s == field.syntax())?; + let field_list_index = field_list.syntax().children().position(|s| &s == field.syntax())?; let deref_type_to_generate = match existing_deref_impl(&ctx.sema, &strukt) { None => DerefType::Deref, diff --git a/crates/ide-assists/src/handlers/generate_enum_projection_method.rs b/crates/ide-assists/src/handlers/generate_enum_projection_method.rs index c9aa41c845..ee643ce9a4 100644 --- a/crates/ide-assists/src/handlers/generate_enum_projection_method.rs +++ b/crates/ide-assists/src/handlers/generate_enum_projection_method.rs @@ -157,7 +157,7 @@ fn generate_enum_projection_method( assist_description, target, |builder| { - let vis = parent_enum.visibility().map_or(String::new(), |v| format!("{} ", v)); + let vis = parent_enum.visibility().map_or(String::new(), |v| format!("{v} ")); let field_type_syntax = field_type.syntax(); diff --git a/crates/ide-assists/src/handlers/generate_enum_variant.rs b/crates/ide-assists/src/handlers/generate_enum_variant.rs index 0bcb572831..cd037f7492 100644 --- a/crates/ide-assists/src/handlers/generate_enum_variant.rs +++ b/crates/ide-assists/src/handlers/generate_enum_variant.rs @@ -180,7 +180,7 @@ fn make_tuple_field_list( ) -> Option { let args = call_expr.arg_list()?.args(); let tuple_fields = args.map(|arg| { - let ty = expr_ty(ctx, arg, &scope).unwrap_or_else(make::ty_placeholder); + let ty = expr_ty(ctx, arg, scope).unwrap_or_else(make::ty_placeholder); make::tuple_field(None, ty) }); Some(make::tuple_field_list(tuple_fields).into()) diff --git a/crates/ide-assists/src/handlers/generate_from_impl_for_enum.rs b/crates/ide-assists/src/handlers/generate_from_impl_for_enum.rs index 7c81d2c6a6..742f1f78c2 100644 --- a/crates/ide-assists/src/handlers/generate_from_impl_for_enum.rs +++ b/crates/ide-assists/src/handlers/generate_from_impl_for_enum.rs @@ -1,7 +1,9 @@ use ide_db::{famous_defs::FamousDefs, RootDatabase}; use syntax::ast::{self, AstNode, HasName}; -use crate::{utils::generate_trait_impl_text, AssistContext, AssistId, AssistKind, Assists}; +use crate::{ + utils::generate_trait_impl_text_intransitive, AssistContext, AssistId, AssistKind, Assists, +}; // Assist: generate_from_impl_for_enum // @@ -70,7 +72,7 @@ pub(crate) fn generate_from_impl_for_enum( }}"# ) }; - let from_impl = generate_trait_impl_text(&enum_, &from_trait, &impl_code); + let from_impl = generate_trait_impl_text_intransitive(&enum_, &from_trait, &impl_code); edit.insert(start_offset, from_impl); }, ) diff --git a/crates/ide-assists/src/handlers/generate_function.rs b/crates/ide-assists/src/handlers/generate_function.rs index 57f198748c..da9b0cda5b 100644 --- a/crates/ide-assists/src/handlers/generate_function.rs +++ b/crates/ide-assists/src/handlers/generate_function.rs @@ -514,7 +514,7 @@ fn fn_args( /// vec!["foo_1".into(), "foo_2".into(), "bar_1".into(), "baz".into(), "bar_2".into()]; /// assert_eq!(names, expected); /// ``` -fn deduplicate_arg_names(arg_names: &mut Vec) { +fn deduplicate_arg_names(arg_names: &mut [String]) { let mut arg_name_counts = FxHashMap::default(); for name in arg_names.iter() { *arg_name_counts.entry(name).or_insert(0) += 1; diff --git a/crates/ide-assists/src/handlers/generate_getter.rs b/crates/ide-assists/src/handlers/generate_getter.rs index 5e71914283..15641b448d 100644 --- a/crates/ide-assists/src/handlers/generate_getter.rs +++ b/crates/ide-assists/src/handlers/generate_getter.rs @@ -176,7 +176,7 @@ pub(crate) fn generate_getter_impl( // for separating it from other assoc items, that needs // to be handled spearately let mut getter_buf = - generate_getter_from_info(ctx, &getter_info, &record_field_info); + generate_getter_from_info(ctx, &getter_info, record_field_info); // Insert `$0` only for last getter we generate if i == record_fields_count - 1 { @@ -235,7 +235,7 @@ fn generate_getter_from_info( ) -> String { let mut buf = String::with_capacity(512); - let vis = info.strukt.visibility().map_or(String::new(), |v| format!("{} ", v)); + let vis = info.strukt.visibility().map_or(String::new(), |v| format!("{v} ")); let (ty, body) = if info.mutable { ( format!("&mut {}", record_field_info.field_ty), @@ -271,7 +271,7 @@ fn generate_getter_from_info( }}", vis, record_field_info.fn_name, - info.mutable.then(|| "mut ").unwrap_or_default(), + info.mutable.then_some("mut ").unwrap_or_default(), ty, body, ); diff --git a/crates/ide-assists/src/handlers/generate_impl.rs b/crates/ide-assists/src/handlers/generate_impl.rs index 9af26c04eb..9ad14a819d 100644 --- a/crates/ide-assists/src/handlers/generate_impl.rs +++ b/crates/ide-assists/src/handlers/generate_impl.rs @@ -1,14 +1,17 @@ use syntax::ast::{self, AstNode, HasName}; -use crate::{utils::generate_impl_text, AssistContext, AssistId, AssistKind, Assists}; +use crate::{ + utils::{generate_impl_text, generate_trait_impl_text_intransitive}, + AssistContext, AssistId, AssistKind, Assists, +}; // Assist: generate_impl // // Adds a new inherent impl for a type. // // ``` -// struct Ctx { -// data: T,$0 +// struct Ctx$0 { +// data: T, // } // ``` // -> @@ -26,6 +29,10 @@ pub(crate) fn generate_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> Optio let name = nominal.name()?; let target = nominal.syntax().text_range(); + if let Some(_) = ctx.find_node_at_offset::() { + return None; + } + acc.add( AssistId("generate_impl", AssistKind::Generate), format!("Generate impl for `{name}`"), @@ -46,130 +53,211 @@ pub(crate) fn generate_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> Optio ) } +// Assist: generate_trait_impl +// +// Adds a new trait impl for a type. +// +// ``` +// struct $0Ctx { +// data: T, +// } +// ``` +// -> +// ``` +// struct Ctx { +// data: T, +// } +// +// impl $0 for Ctx { +// +// } +// ``` +pub(crate) fn generate_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { + let nominal = ctx.find_node_at_offset::()?; + let name = nominal.name()?; + let target = nominal.syntax().text_range(); + + if let Some(_) = ctx.find_node_at_offset::() { + return None; + } + + acc.add( + AssistId("generate_trait_impl", AssistKind::Generate), + format!("Generate trait impl for `{name}`"), + target, + |edit| { + let start_offset = nominal.syntax().text_range().end(); + match ctx.config.snippet_cap { + Some(cap) => { + let snippet = generate_trait_impl_text_intransitive(&nominal, "$0", ""); + edit.insert_snippet(cap, start_offset, snippet); + } + None => { + let text = generate_trait_impl_text_intransitive(&nominal, "", ""); + edit.insert(start_offset, text); + } + } + }, + ) +} + #[cfg(test)] mod tests { use crate::tests::{check_assist, check_assist_target}; use super::*; - // FIXME: break up into separate test fns #[test] fn test_add_impl() { check_assist( generate_impl, - "struct Foo {$0}\n", - "struct Foo {}\n\nimpl Foo {\n $0\n}\n", - ); - check_assist( - generate_impl, - "struct Foo {$0}", - "struct Foo {}\n\nimpl Foo {\n $0\n}", - ); - check_assist( - generate_impl, - "struct Foo<'a, T: Foo<'a>> {$0}", - "struct Foo<'a, T: Foo<'a>> {}\n\nimpl<'a, T: Foo<'a>> Foo<'a, T> {\n $0\n}", + r#" + struct Foo$0 {} + "#, + r#" + struct Foo {} + + impl Foo { + $0 + } + "#, ); + } + + #[test] + fn test_add_impl_with_generics() { check_assist( generate_impl, r#" - struct MyOwnArray {}$0"#, + struct Foo$0 {} + "#, r#" - struct MyOwnArray {} + struct Foo {} - impl MyOwnArray { - $0 - }"#, + impl Foo { + $0 + } + "#, ); + } + + #[test] + fn test_add_impl_with_generics_and_lifetime_parameters() { check_assist( generate_impl, r#" - #[cfg(feature = "foo")] - struct Foo<'a, T: Foo<'a>> {$0}"#, + struct Foo<'a, T: Foo<'a>>$0 {} + "#, r#" - #[cfg(feature = "foo")] - struct Foo<'a, T: Foo<'a>> {} + struct Foo<'a, T: Foo<'a>> {} - #[cfg(feature = "foo")] - impl<'a, T: Foo<'a>> Foo<'a, T> { - $0 - }"#, + impl<'a, T: Foo<'a>> Foo<'a, T> { + $0 + } + "#, ); + } + #[test] + fn test_add_impl_with_attributes() { check_assist( generate_impl, r#" - #[cfg(not(feature = "foo"))] - struct Foo<'a, T: Foo<'a>> {$0}"#, + #[cfg(feature = "foo")] + struct Foo<'a, T: Foo$0<'a>> {} + "#, r#" - #[cfg(not(feature = "foo"))] - struct Foo<'a, T: Foo<'a>> {} + #[cfg(feature = "foo")] + struct Foo<'a, T: Foo<'a>> {} - #[cfg(not(feature = "foo"))] - impl<'a, T: Foo<'a>> Foo<'a, T> { - $0 - }"#, + #[cfg(feature = "foo")] + impl<'a, T: Foo<'a>> Foo<'a, T> { + $0 + } + "#, ); + } + #[test] + fn test_add_impl_with_default_generic() { check_assist( generate_impl, r#" - struct Defaulted {}$0"#, + struct Defaulted$0 {} + "#, r#" - struct Defaulted {} + struct Defaulted {} - impl Defaulted { - $0 - }"#, + impl Defaulted { + $0 + } + "#, ); + } + #[test] + fn test_add_impl_with_constrained_default_generic() { check_assist( generate_impl, r#" - struct Defaulted<'a, 'b: 'a, T: Debug + Clone + 'a + 'b = String, const S: usize> {}$0"#, + struct Defaulted$0<'a, 'b: 'a, T: Debug + Clone + 'a + 'b = String, const S: usize> {} + "#, r#" - struct Defaulted<'a, 'b: 'a, T: Debug + Clone + 'a + 'b = String, const S: usize> {} + struct Defaulted<'a, 'b: 'a, T: Debug + Clone + 'a + 'b = String, const S: usize> {} - impl<'a, 'b: 'a, T: Debug + Clone + 'a + 'b, const S: usize> Defaulted<'a, 'b, T, S> { - $0 - }"#, + impl<'a, 'b: 'a, T: Debug + Clone + 'a + 'b, const S: usize> Defaulted<'a, 'b, T, S> { + $0 + } + "#, ); + } + #[test] + fn test_add_impl_with_const_defaulted_generic() { check_assist( generate_impl, r#" - struct Defaulted {}$0"#, + struct Defaulted$0 {} + "#, r#" - struct Defaulted {} + struct Defaulted {} - impl Defaulted { - $0 - }"#, + impl Defaulted { + $0 + } + "#, ); + } + #[test] + fn test_add_impl_with_trait_constraint() { check_assist( generate_impl, - r#"pub trait Trait {} -struct Struct$0 -where - T: Trait, -{ - inner: T, -}"#, - r#"pub trait Trait {} -struct Struct -where - T: Trait, -{ - inner: T, -} + r#" + pub trait Trait {} + struct Struct$0 + where + T: Trait, + { + inner: T, + } + "#, + r#" + pub trait Trait {} + struct Struct + where + T: Trait, + { + inner: T, + } -impl Struct -where - T: Trait, -{ - $0 -}"#, + impl Struct + where + T: Trait, + { + $0 + } + "#, ); } @@ -177,14 +265,181 @@ where fn add_impl_target() { check_assist_target( generate_impl, - " -struct SomeThingIrrelevant; -/// Has a lifetime parameter -struct Foo<'a, T: Foo<'a>> {$0} -struct EvenMoreIrrelevant; -", - "/// Has a lifetime parameter -struct Foo<'a, T: Foo<'a>> {}", + r#" + struct SomeThingIrrelevant; + /// Has a lifetime parameter + struct Foo$0<'a, T: Foo<'a>> {} + struct EvenMoreIrrelevant; + "#, + "/// Has a lifetime parameter\nstruct Foo<'a, T: Foo<'a>> {}", + ); + } + + #[test] + fn test_add_trait_impl() { + check_assist( + generate_trait_impl, + r#" + struct Foo$0 {} + "#, + r#" + struct Foo {} + + impl $0 for Foo { + + } + "#, + ); + } + + #[test] + fn test_add_trait_impl_with_generics() { + check_assist( + generate_trait_impl, + r#" + struct Foo$0 {} + "#, + r#" + struct Foo {} + + impl $0 for Foo { + + } + "#, + ); + } + + #[test] + fn test_add_trait_impl_with_generics_and_lifetime_parameters() { + check_assist( + generate_trait_impl, + r#" + struct Foo<'a, T: Foo<'a>>$0 {} + "#, + r#" + struct Foo<'a, T: Foo<'a>> {} + + impl<'a, T: Foo<'a>> $0 for Foo<'a, T> { + + } + "#, + ); + } + + #[test] + fn test_add_trait_impl_with_attributes() { + check_assist( + generate_trait_impl, + r#" + #[cfg(feature = "foo")] + struct Foo<'a, T: Foo$0<'a>> {} + "#, + r#" + #[cfg(feature = "foo")] + struct Foo<'a, T: Foo<'a>> {} + + #[cfg(feature = "foo")] + impl<'a, T: Foo<'a>> $0 for Foo<'a, T> { + + } + "#, + ); + } + + #[test] + fn test_add_trait_impl_with_default_generic() { + check_assist( + generate_trait_impl, + r#" + struct Defaulted$0 {} + "#, + r#" + struct Defaulted {} + + impl $0 for Defaulted { + + } + "#, + ); + } + + #[test] + fn test_add_trait_impl_with_constrained_default_generic() { + check_assist( + generate_trait_impl, + r#" + struct Defaulted$0<'a, 'b: 'a, T: Debug + Clone + 'a + 'b = String, const S: usize> {} + "#, + r#" + struct Defaulted<'a, 'b: 'a, T: Debug + Clone + 'a + 'b = String, const S: usize> {} + + impl<'a, 'b: 'a, T: Debug + Clone + 'a + 'b, const S: usize> $0 for Defaulted<'a, 'b, T, S> { + + } + "#, + ); + } + + #[test] + fn test_add_trait_impl_with_const_defaulted_generic() { + check_assist( + generate_trait_impl, + r#" + struct Defaulted$0 {} + "#, + r#" + struct Defaulted {} + + impl $0 for Defaulted { + + } + "#, + ); + } + + #[test] + fn test_add_trait_impl_with_trait_constraint() { + check_assist( + generate_trait_impl, + r#" + pub trait Trait {} + struct Struct$0 + where + T: Trait, + { + inner: T, + } + "#, + r#" + pub trait Trait {} + struct Struct + where + T: Trait, + { + inner: T, + } + + impl $0 for Struct + where + T: Trait, + { + + } + "#, + ); + } + + #[test] + fn add_trait_impl_target() { + check_assist_target( + generate_trait_impl, + r#" + struct SomeThingIrrelevant; + /// Has a lifetime parameter + struct Foo$0<'a, T: Foo<'a>> {} + struct EvenMoreIrrelevant; + "#, + "/// Has a lifetime parameter\nstruct Foo<'a, T: Foo<'a>> {}", ); } } diff --git a/crates/ide-assists/src/handlers/generate_new.rs b/crates/ide-assists/src/handlers/generate_new.rs index 17fadea0ea..8d311262a7 100644 --- a/crates/ide-assists/src/handlers/generate_new.rs +++ b/crates/ide-assists/src/handlers/generate_new.rs @@ -70,7 +70,7 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option )?; let expr = use_trivial_constructor( - &ctx.sema.db, + ctx.sema.db, ide_db::helpers::mod_path_to_ast(&type_path), &ty, )?; diff --git a/crates/ide-assists/src/handlers/inline_call.rs b/crates/ide-assists/src/handlers/inline_call.rs index 0c546ce5d4..5ac18727c1 100644 --- a/crates/ide-assists/src/handlers/inline_call.rs +++ b/crates/ide-assists/src/handlers/inline_call.rs @@ -394,7 +394,7 @@ fn inline( // Inline parameter expressions or generate `let` statements depending on whether inlining works or not. for ((pat, param_ty, _), usages, expr) in izip!(params, param_use_nodes, arguments).rev() { // izip confuses RA due to our lack of hygiene info currently losing us type info causing incorrect errors - let usages: &[ast::PathExpr] = &*usages; + let usages: &[ast::PathExpr] = &usages; let expr: &ast::Expr = expr; let insert_let_stmt = || { diff --git a/crates/ide-assists/src/handlers/inline_macro.rs b/crates/ide-assists/src/handlers/inline_macro.rs new file mode 100644 index 0000000000..9d03f03d20 --- /dev/null +++ b/crates/ide-assists/src/handlers/inline_macro.rs @@ -0,0 +1,233 @@ +use syntax::ast::{self, AstNode}; + +use crate::{AssistContext, AssistId, AssistKind, Assists}; + +// Assist: inline_macro +// +// Takes a macro and inlines it one step. +// +// ``` +// macro_rules! num { +// (+$($t:tt)+) => (1 + num!($($t )+)); +// (-$($t:tt)+) => (-1 + num!($($t )+)); +// (+) => (1); +// (-) => (-1); +// } +// +// fn main() { +// let number = num$0!(+ + + - + +); +// println!("{number}"); +// } +// ``` +// -> +// ``` +// macro_rules! num { +// (+$($t:tt)+) => (1 + num!($($t )+)); +// (-$($t:tt)+) => (-1 + num!($($t )+)); +// (+) => (1); +// (-) => (-1); +// } +// +// fn main() { +// let number = 1+num!(+ + - + +); +// println!("{number}"); +// } +// ``` +pub(crate) fn inline_macro(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { + let unexpanded = ctx.find_node_at_offset::()?; + let expanded = ctx.sema.expand(&unexpanded)?.clone_for_update(); + + let text_range = unexpanded.syntax().text_range(); + + acc.add( + AssistId("inline_macro", AssistKind::RefactorRewrite), + format!("Inline macro"), + text_range, + |builder| builder.replace(text_range, expanded.to_string()), + ) +} + +#[cfg(test)] +mod tests { + use super::*; + + use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target}; + + macro_rules! simple_macro { + () => { + r#" +macro_rules! foo { + (foo) => (true); + () => (false); +} +"# + }; + } + macro_rules! double_macro { + () => { + r#" +macro_rules! bar { + (bar) => (true); + ($($tt:tt)?) => (false); +} +macro_rules! foo { + (foo) => (true); + (bar) => (bar!(bar)); + ($($tt:tt)?) => (bar!($($tt)?)); +} +"# + }; + } + + macro_rules! complex_macro { + () => { + r#" +macro_rules! num { + (+$($t:tt)+) => (1 + num!($($t )+)); + (-$($t:tt)+) => (-1 + num!($($t )+)); + (+) => (1); + (-) => (-1); +} +"# + }; + } + #[test] + fn inline_macro_target() { + check_assist_target( + inline_macro, + concat!(simple_macro!(), r#"fn f() { let a = foo$0!(foo); }"#), + "foo!(foo)", + ); + } + + #[test] + fn inline_macro_target_start() { + check_assist_target( + inline_macro, + concat!(simple_macro!(), r#"fn f() { let a = $0foo!(foo); }"#), + "foo!(foo)", + ); + } + + #[test] + fn inline_macro_target_end() { + check_assist_target( + inline_macro, + concat!(simple_macro!(), r#"fn f() { let a = foo!(foo$0); }"#), + "foo!(foo)", + ); + } + + #[test] + fn inline_macro_simple_case1() { + check_assist( + inline_macro, + concat!(simple_macro!(), r#"fn f() { let result = foo$0!(foo); }"#), + concat!(simple_macro!(), r#"fn f() { let result = true; }"#), + ); + } + + #[test] + fn inline_macro_simple_case2() { + check_assist( + inline_macro, + concat!(simple_macro!(), r#"fn f() { let result = foo$0!(); }"#), + concat!(simple_macro!(), r#"fn f() { let result = false; }"#), + ); + } + + #[test] + fn inline_macro_simple_not_applicable() { + check_assist_not_applicable( + inline_macro, + concat!(simple_macro!(), r#"fn f() { let result$0 = foo!(foo); }"#), + ); + } + + #[test] + fn inline_macro_simple_not_applicable_broken_macro() { + // FIXME: This is a bug. The macro should not expand, but it's + // the same behaviour as the "Expand Macro Recursively" commmand + // so it's presumably OK for the time being. + check_assist( + inline_macro, + concat!(simple_macro!(), r#"fn f() { let result = foo$0!(asdfasdf); }"#), + concat!(simple_macro!(), r#"fn f() { let result = true; }"#), + ); + } + + #[test] + fn inline_macro_double_case1() { + check_assist( + inline_macro, + concat!(double_macro!(), r#"fn f() { let result = foo$0!(bar); }"#), + concat!(double_macro!(), r#"fn f() { let result = bar!(bar); }"#), + ); + } + + #[test] + fn inline_macro_double_case2() { + check_assist( + inline_macro, + concat!(double_macro!(), r#"fn f() { let result = foo$0!(asdf); }"#), + concat!(double_macro!(), r#"fn f() { let result = bar!(asdf); }"#), + ); + } + + #[test] + fn inline_macro_complex_case1() { + check_assist( + inline_macro, + concat!(complex_macro!(), r#"fn f() { let result = num!(+ +$0 + - +); }"#), + concat!(complex_macro!(), r#"fn f() { let result = 1+num!(+ + - +); }"#), + ); + } + + #[test] + fn inline_macro_complex_case2() { + check_assist( + inline_macro, + concat!(complex_macro!(), r#"fn f() { let result = n$0um!(- + + - +); }"#), + concat!(complex_macro!(), r#"fn f() { let result = -1+num!(+ + - +); }"#), + ); + } + + #[test] + fn inline_macro_recursive_macro() { + check_assist( + inline_macro, + r#" +macro_rules! foo { + () => {foo!()} +} +fn f() { let result = foo$0!(); } +"#, + r#" +macro_rules! foo { + () => {foo!()} +} +fn f() { let result = foo!(); } +"#, + ); + } + + #[test] + fn inline_macro_unknown_macro() { + check_assist_not_applicable( + inline_macro, + r#" +fn f() { let result = foo$0!(); } +"#, + ); + } + + #[test] + fn inline_macro_function_call_not_applicable() { + check_assist_not_applicable( + inline_macro, + r#" +fn f() { let result = foo$0(); } +"#, + ); + } +} diff --git a/crates/ide-assists/src/handlers/inline_type_alias.rs b/crates/ide-assists/src/handlers/inline_type_alias.rs index 353d467ed1..5982e9d61d 100644 --- a/crates/ide-assists/src/handlers/inline_type_alias.rs +++ b/crates/ide-assists/src/handlers/inline_type_alias.rs @@ -138,7 +138,7 @@ pub(crate) fn inline_type_alias(acc: &mut Assists, ctx: &AssistContext<'_>) -> O replacement = Replacement::Plain; } _ => { - let alias = get_type_alias(&ctx, &alias_instance)?; + let alias = get_type_alias(ctx, &alias_instance)?; concrete_type = alias.ty()?; replacement = inline(&alias, &alias_instance)?; } @@ -158,7 +158,7 @@ impl Replacement { fn to_text(&self, concrete_type: &ast::Type) -> String { match self { Replacement::Generic { lifetime_map, const_and_type_map } => { - create_replacement(&lifetime_map, &const_and_type_map, &concrete_type) + create_replacement(lifetime_map, const_and_type_map, concrete_type) } Replacement::Plain => concrete_type.to_string(), } @@ -240,7 +240,7 @@ impl ConstAndTypeMap { ) -> Option { let mut inner = HashMap::new(); let instance_generics = generic_args_to_const_and_type_generics(instance_args); - let alias_generics = generic_param_list_to_const_and_type_generics(&alias_generics); + let alias_generics = generic_param_list_to_const_and_type_generics(alias_generics); if instance_generics.len() > alias_generics.len() { cov_mark::hit!(too_many_generic_args); diff --git a/crates/ide-assists/src/handlers/move_const_to_impl.rs b/crates/ide-assists/src/handlers/move_const_to_impl.rs new file mode 100644 index 0000000000..0e3a1e652b --- /dev/null +++ b/crates/ide-assists/src/handlers/move_const_to_impl.rs @@ -0,0 +1,481 @@ +use hir::{AsAssocItem, AssocItemContainer, HasCrate, HasSource}; +use ide_db::{assists::AssistId, base_db::FileRange, defs::Definition, search::SearchScope}; +use syntax::{ + ast::{self, edit::IndentLevel, edit_in_place::Indent, AstNode}, + SyntaxKind, +}; + +use crate::{ + assist_context::{AssistContext, Assists}, + utils, +}; + +// NOTE: Code may break if the self type implements a trait that has associated const with the same +// name, but it's pretty expensive to check that (`hir::Impl::all_for_type()`) and we assume that's +// pretty rare case. + +// Assist: move_const_to_impl +// +// Move a local constant item in a method to impl's associated constant. All the references will be +// qualified with `Self::`. +// +// ``` +// struct S; +// impl S { +// fn foo() -> usize { +// /// The answer. +// const C$0: usize = 42; +// +// C * C +// } +// } +// ``` +// -> +// ``` +// struct S; +// impl S { +// /// The answer. +// const C: usize = 42; +// +// fn foo() -> usize { +// Self::C * Self::C +// } +// } +// ``` +pub(crate) fn move_const_to_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { + let db = ctx.db(); + let const_: ast::Const = ctx.find_node_at_offset()?; + // Don't show the assist when the cursor is at the const's body. + if let Some(body) = const_.body() { + if body.syntax().text_range().contains(ctx.offset()) { + return None; + } + } + + let parent_fn = const_.syntax().ancestors().find_map(ast::Fn::cast)?; + + // NOTE: We can technically provide this assist for default methods in trait definitions, but + // it's somewhat complex to handle it correctly when the const's name conflicts with + // supertrait's item. We may want to consider implementing it in the future. + let AssocItemContainer::Impl(impl_) = ctx.sema.to_def(&parent_fn)?.as_assoc_item(db)?.container(db) else { return None; }; + if impl_.trait_(db).is_some() { + return None; + } + + let def = ctx.sema.to_def(&const_)?; + let name = def.name(db)?; + let items = impl_.source(db)?.value.assoc_item_list()?; + + let ty = impl_.self_ty(db); + // If there exists another associated item with the same name, skip the assist. + if ty + .iterate_assoc_items(db, ty.krate(db), |assoc| { + // Type aliases wouldn't conflict due to different namespaces, but we're only checking + // the items in inherent impls, so we assume `assoc` is never type alias for the sake + // of brevity (inherent associated types exist in nightly Rust, but it's *very* + // unstable and we don't support them either). + assoc.name(db).filter(|it| it == &name) + }) + .is_some() + { + return None; + } + + let usages = + Definition::Const(def).usages(&ctx.sema).in_scope(SearchScope::file_range(FileRange { + file_id: ctx.file_id(), + range: parent_fn.syntax().text_range(), + })); + + acc.add( + AssistId("move_const_to_impl", crate::AssistKind::RefactorRewrite), + "Move const to impl block", + const_.syntax().text_range(), + |builder| { + let range_to_delete = match const_.syntax().next_sibling_or_token() { + Some(s) if matches!(s.kind(), SyntaxKind::WHITESPACE) => { + // Remove following whitespaces too. + const_.syntax().text_range().cover(s.text_range()) + } + _ => const_.syntax().text_range(), + }; + builder.delete(range_to_delete); + + let const_ref = format!("Self::{name}"); + for range in usages.all().file_ranges().map(|it| it.range) { + builder.replace(range, const_ref.clone()); + } + + // Heuristically inserting the extracted const after the consecutive existing consts + // from the beginning of assoc items. We assume there are no inherent assoc type as + // above. + let last_const = + items.assoc_items().take_while(|it| matches!(it, ast::AssocItem::Const(_))).last(); + let insert_offset = match &last_const { + Some(it) => it.syntax().text_range().end(), + None => match items.l_curly_token() { + Some(l_curly) => l_curly.text_range().end(), + // Not sure if this branch is ever reachable, but it wouldn't hurt to have a + // fallback. + None => items.syntax().text_range().start(), + }, + }; + + // If the moved const will be the first item of the impl, add a new line after that. + // + // We're assuming the code is formatted according to Rust's standard style guidelines + // (i.e. no empty lines between impl's `{` token and its first assoc item). + let fixup = if last_const.is_none() { "\n" } else { "" }; + let indent = IndentLevel::from_node(parent_fn.syntax()); + + let const_ = const_.clone_for_update(); + const_.reindent_to(indent); + let mut const_text = format!("\n{indent}{const_}{fixup}"); + utils::escape_non_snippet(&mut const_text); + builder.insert(insert_offset, const_text); + }, + ) +} + +#[cfg(test)] +mod tests { + use crate::tests::{check_assist, check_assist_not_applicable}; + + use super::*; + + #[test] + fn not_applicable_to_top_level_const() { + check_assist_not_applicable( + move_const_to_impl, + r#" +const C$0: () = (); +"#, + ); + } + + #[test] + fn not_applicable_to_free_fn() { + check_assist_not_applicable( + move_const_to_impl, + r#" +fn f() { + const C$0: () = (); +} +"#, + ); + } + + #[test] + fn not_applicable_when_at_const_body() { + check_assist_not_applicable( + move_const_to_impl, + r#" +struct S; +impl S { + fn f() { + const C: () = ($0); + } +} + "#, + ); + } + + #[test] + fn not_applicable_when_inside_const_body_block() { + check_assist_not_applicable( + move_const_to_impl, + r#" +struct S; +impl S { + fn f() { + const C: () = { + ($0) + }; + } +} + "#, + ); + } + + #[test] + fn not_applicable_to_trait_impl_fn() { + check_assist_not_applicable( + move_const_to_impl, + r#" +trait Trait { + fn f(); +} +impl Trait for () { + fn f() { + const C$0: () = (); + } +} +"#, + ); + } + + #[test] + fn not_applicable_to_non_assoc_fn_inside_impl() { + check_assist_not_applicable( + move_const_to_impl, + r#" +struct S; +impl S { + fn f() { + fn g() { + const C$0: () = (); + } + } +} +"#, + ); + } + + #[test] + fn not_applicable_when_const_with_same_name_exists() { + check_assist_not_applicable( + move_const_to_impl, + r#" +struct S; +impl S { + const C: usize = 42; + fn f() { + const C$0: () = (); + } +"#, + ); + + check_assist_not_applicable( + move_const_to_impl, + r#" +struct S; +impl S { + const C: usize = 42; +} +impl S { + fn f() { + const C$0: () = (); + } +"#, + ); + } + + #[test] + fn move_const_simple_body() { + check_assist( + move_const_to_impl, + r#" +struct S; +impl S { + fn f() -> usize { + /// doc comment + const C$0: usize = 42; + + C * C + } +} +"#, + r#" +struct S; +impl S { + /// doc comment + const C: usize = 42; + + fn f() -> usize { + Self::C * Self::C + } +} +"#, + ); + } + + #[test] + fn move_const_simple_body_existing_const() { + check_assist( + move_const_to_impl, + r#" +struct S; +impl S { + const X: () = (); + const Y: () = (); + + fn f() -> usize { + /// doc comment + const C$0: usize = 42; + + C * C + } +} +"#, + r#" +struct S; +impl S { + const X: () = (); + const Y: () = (); + /// doc comment + const C: usize = 42; + + fn f() -> usize { + Self::C * Self::C + } +} +"#, + ); + } + + #[test] + fn move_const_block_body() { + check_assist( + move_const_to_impl, + r#" +struct S; +impl S { + fn f() -> usize { + /// doc comment + const C$0: usize = { + let a = 3; + let b = 4; + a * b + }; + + C * C + } +} +"#, + r#" +struct S; +impl S { + /// doc comment + const C: usize = { + let a = 3; + let b = 4; + a * b + }; + + fn f() -> usize { + Self::C * Self::C + } +} +"#, + ); + } + + #[test] + fn correct_indent_when_nested() { + check_assist( + move_const_to_impl, + r#" +fn main() { + struct S; + impl S { + fn f() -> usize { + /// doc comment + const C$0: usize = 42; + + C * C + } + } +} +"#, + r#" +fn main() { + struct S; + impl S { + /// doc comment + const C: usize = 42; + + fn f() -> usize { + Self::C * Self::C + } + } +} +"#, + ) + } + + #[test] + fn move_const_in_nested_scope_with_same_name_in_other_scope() { + check_assist( + move_const_to_impl, + r#" +struct S; +impl S { + fn f() -> usize { + const C: &str = "outer"; + + let n = { + /// doc comment + const C$0: usize = 42; + + let m = { + const C: &str = "inner"; + C.len() + }; + + C * m + }; + + n + C.len() + } +} +"#, + r#" +struct S; +impl S { + /// doc comment + const C: usize = 42; + + fn f() -> usize { + const C: &str = "outer"; + + let n = { + let m = { + const C: &str = "inner"; + C.len() + }; + + Self::C * m + }; + + n + C.len() + } +} +"#, + ); + } + + #[test] + fn moved_const_body_is_escaped() { + // Note that the last argument is what *lsp clients would see* rather than + // what users would see. Unescaping happens thereafter. + check_assist( + move_const_to_impl, + r#" +struct S; +impl S { + fn f() -> usize { + /// doc comment + /// \\ + /// ${snippet} + const C$0: &str = "\ and $1"; + + C.len() + } +} +"#, + r#" +struct S; +impl S { + /// doc comment + /// \\\\ + /// \${snippet} + const C: &str = "\\ and \$1"; + + fn f() -> usize { + Self::C.len() + } +} +"#, + ) + } +} diff --git a/crates/ide-assists/src/handlers/qualify_method_call.rs b/crates/ide-assists/src/handlers/qualify_method_call.rs index 1ea87429c5..e7014597a1 100644 --- a/crates/ide-assists/src/handlers/qualify_method_call.rs +++ b/crates/ide-assists/src/handlers/qualify_method_call.rs @@ -53,7 +53,7 @@ pub(crate) fn qualify_method_call(acc: &mut Assists, ctx: &AssistContext<'_>) -> let qualify_candidate = QualifyCandidate::ImplMethod(ctx.sema.db, call, resolved_call); acc.add( - AssistId("qualify_method_call", AssistKind::RefactorInline), + AssistId("qualify_method_call", AssistKind::RefactorRewrite), format!("Qualify `{ident}` method call"), range, |builder| { diff --git a/crates/ide-assists/src/handlers/remove_dbg.rs b/crates/ide-assists/src/handlers/remove_dbg.rs index 99ae60e07b..52dd670ec2 100644 --- a/crates/ide-assists/src/handlers/remove_dbg.rs +++ b/crates/ide-assists/src/handlers/remove_dbg.rs @@ -64,7 +64,7 @@ fn compute_dbg_replacement(macro_call: ast::MacroCall) -> Option<(TextRange, Str let input_expressions = mac_input.group_by(|tok| tok.kind() == T![,]); let input_expressions = input_expressions .into_iter() - .filter_map(|(is_sep, group)| (!is_sep).then(|| group)) + .filter_map(|(is_sep, group)| (!is_sep).then_some(group)) .map(|mut tokens| syntax::hacks::parse_expr_from_str(&tokens.join(""))) .collect::>>()?; diff --git a/crates/ide-assists/src/handlers/remove_parentheses.rs b/crates/ide-assists/src/handlers/remove_parentheses.rs new file mode 100644 index 0000000000..e9c7c6bae9 --- /dev/null +++ b/crates/ide-assists/src/handlers/remove_parentheses.rs @@ -0,0 +1,221 @@ +use syntax::{ast, AstNode}; + +use crate::{AssistContext, AssistId, AssistKind, Assists}; + +// Assist: remove_parentheses +// +// Removes redundant parentheses. +// +// ``` +// fn main() { +// _ = $0(2) + 2; +// } +// ``` +// -> +// ``` +// fn main() { +// _ = 2 + 2; +// } +// ``` +pub(crate) fn remove_parentheses(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { + let parens = ctx.find_node_at_offset::()?; + + let cursor_in_range = + parens.l_paren_token()?.text_range().contains_range(ctx.selection_trimmed()) + || parens.r_paren_token()?.text_range().contains_range(ctx.selection_trimmed()); + if !cursor_in_range { + return None; + } + + let expr = parens.expr()?; + + let parent = parens.syntax().parent()?; + if expr.needs_parens_in(parent) { + return None; + } + + let target = parens.syntax().text_range(); + acc.add( + AssistId("remove_parentheses", AssistKind::Refactor), + "Remove redundant parentheses", + target, + |builder| builder.replace_ast(parens.into(), expr), + ) +} + +#[cfg(test)] +mod tests { + use crate::tests::{check_assist, check_assist_not_applicable}; + + use super::*; + + #[test] + fn remove_parens_simple() { + check_assist(remove_parentheses, r#"fn f() { $0(2) + 2; }"#, r#"fn f() { 2 + 2; }"#); + check_assist(remove_parentheses, r#"fn f() { ($02) + 2; }"#, r#"fn f() { 2 + 2; }"#); + check_assist(remove_parentheses, r#"fn f() { (2)$0 + 2; }"#, r#"fn f() { 2 + 2; }"#); + check_assist(remove_parentheses, r#"fn f() { (2$0) + 2; }"#, r#"fn f() { 2 + 2; }"#); + } + + #[test] + fn remove_parens_closure() { + check_assist(remove_parentheses, r#"fn f() { &$0(|| 42) }"#, r#"fn f() { &|| 42 }"#); + + check_assist_not_applicable(remove_parentheses, r#"fn f() { $0(|| 42).f() }"#); + } + + #[test] + fn remove_parens_if_let_chains() { + check_assist_not_applicable( + remove_parentheses, + r#"fn f() { if let true = $0(true && true) {} }"#, + ); + } + + #[test] + fn remove_parens_associativity() { + check_assist( + remove_parentheses, + r#"fn f() { $0(2 + 2) + 2; }"#, + r#"fn f() { 2 + 2 + 2; }"#, + ); + check_assist_not_applicable(remove_parentheses, r#"fn f() { 2 + $0(2 + 2); }"#); + } + + #[test] + fn remove_parens_precedence() { + check_assist( + remove_parentheses, + r#"fn f() { $0(2 * 3) + 1; }"#, + r#"fn f() { 2 * 3 + 1; }"#, + ); + check_assist(remove_parentheses, r#"fn f() { ( $0(2) ); }"#, r#"fn f() { ( 2 ); }"#); + check_assist(remove_parentheses, r#"fn f() { $0(2?)?; }"#, r#"fn f() { 2??; }"#); + check_assist(remove_parentheses, r#"fn f() { f(($02 + 2)); }"#, r#"fn f() { f(2 + 2); }"#); + check_assist( + remove_parentheses, + r#"fn f() { (1<2)&&$0(3>4); }"#, + r#"fn f() { (1<2)&&3>4; }"#, + ); + } + + #[test] + fn remove_parens_doesnt_apply_precedence() { + check_assist_not_applicable(remove_parentheses, r#"fn f() { $0(2 + 2) * 8; }"#); + check_assist_not_applicable(remove_parentheses, r#"fn f() { $0(2 + 2).f(); }"#); + check_assist_not_applicable(remove_parentheses, r#"fn f() { $0(2 + 2).await; }"#); + check_assist_not_applicable(remove_parentheses, r#"fn f() { $0!(2..2); }"#); + } + + #[test] + fn remove_parens_doesnt_apply_with_cursor_not_on_paren() { + check_assist_not_applicable(remove_parentheses, r#"fn f() { (2 +$0 2) }"#); + check_assist_not_applicable(remove_parentheses, r#"fn f() {$0 (2 + 2) }"#); + } + + #[test] + fn remove_parens_doesnt_apply_when_expr_would_be_turned_into_a_statement() { + check_assist_not_applicable(remove_parentheses, r#"fn x() -> u8 { $0({ 0 } + 1) }"#); + check_assist_not_applicable( + remove_parentheses, + r#"fn x() -> u8 { $0(if true { 0 } else { 1 } + 1) }"#, + ); + check_assist_not_applicable(remove_parentheses, r#"fn x() -> u8 { $0(loop {} + 1) }"#); + } + + #[test] + fn remove_parens_doesnt_apply_weird_syntax_and_adge_cases() { + // removing `()` would break code because {} would be counted as the loop/if body + check_assist_not_applicable(remove_parentheses, r#"fn f() { for _ in $0(0..{3}) {} }"#); + check_assist_not_applicable(remove_parentheses, r#"fn f() { for _ in $0(S {}) {} }"#); + check_assist_not_applicable(remove_parentheses, r#"fn f() { if $0(S {} == 2) {} }"#); + check_assist_not_applicable(remove_parentheses, r#"fn f() { if $0(return) {} }"#); + } + + #[test] + fn remove_parens_return_with_value_followed_by_block() { + check_assist( + remove_parentheses, + r#"fn f() { if $0(return ()) {} }"#, + r#"fn f() { if return () {} }"#, + ); + } + + #[test] + fn remove_exprs_let_else_restrictions() { + // `}` is not allowed before `else` here + check_assist_not_applicable( + remove_parentheses, + r#"fn f() { let _ = $0(S{}) else { return }; }"#, + ); + + // logic operators can't directly appear in the let-else + check_assist_not_applicable( + remove_parentheses, + r#"fn f() { let _ = $0(false || false) else { return }; }"#, + ); + check_assist_not_applicable( + remove_parentheses, + r#"fn f() { let _ = $0(true && true) else { return }; }"#, + ); + } + + #[test] + fn remove_parens_weird_places() { + check_assist( + remove_parentheses, + r#"fn f() { match () { _=>$0(()) } }"#, + r#"fn f() { match () { _=>() } }"#, + ); + + check_assist( + remove_parentheses, + r#"fn x() -> u8 { { [$0({ 0 } + 1)] } }"#, + r#"fn x() -> u8 { { [{ 0 } + 1] } }"#, + ); + } + + #[test] + fn remove_parens_return_dot_f() { + check_assist( + remove_parentheses, + r#"fn f() { $0(return).f() }"#, + r#"fn f() { return.f() }"#, + ); + } + + #[test] + fn remove_parens_prefix_then_return_something() { + check_assist( + remove_parentheses, + r#"fn f() { &$0(return ()) }"#, + r#"fn f() { &return () }"#, + ); + } + + #[test] + fn remove_parens_double_paren_stmt() { + check_assist( + remove_parentheses, + r#"fn x() -> u8 { $0(({ 0 } + 1)) }"#, + r#"fn x() -> u8 { ({ 0 } + 1) }"#, + ); + + check_assist( + remove_parentheses, + r#"fn x() -> u8 { (($0{ 0 } + 1)) }"#, + r#"fn x() -> u8 { ({ 0 } + 1) }"#, + ); + } + + #[test] + fn remove_parens_im_tired_of_naming_tests() { + check_assist( + remove_parentheses, + r#"fn f() { 2 + $0(return 2) }"#, + r#"fn f() { 2 + return 2 }"#, + ); + + check_assist_not_applicable(remove_parentheses, r#"fn f() { $0(return 2) + 2 }"#); + } +} diff --git a/crates/ide-assists/src/handlers/replace_arith_op.rs b/crates/ide-assists/src/handlers/replace_arith_op.rs new file mode 100644 index 0000000000..f1ca35cafc --- /dev/null +++ b/crates/ide-assists/src/handlers/replace_arith_op.rs @@ -0,0 +1,226 @@ +use ide_db::assists::{AssistId, AssistKind, GroupLabel}; +use syntax::{ + ast::{self, ArithOp, BinaryOp}, + AstNode, TextRange, +}; + +use crate::assist_context::{AssistContext, Assists}; + +// Assist: replace_arith_with_checked +// +// Replaces arithmetic on integers with the `checked_*` equivalent. +// +// ``` +// fn main() { +// let x = 1 $0+ 2; +// } +// ``` +// -> +// ``` +// fn main() { +// let x = 1.checked_add(2); +// } +// ``` +pub(crate) fn replace_arith_with_checked(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { + replace_arith(acc, ctx, ArithKind::Checked) +} + +// Assist: replace_arith_with_saturating +// +// Replaces arithmetic on integers with the `saturating_*` equivalent. +// +// ``` +// fn main() { +// let x = 1 $0+ 2; +// } +// ``` +// -> +// ``` +// fn main() { +// let x = 1.saturating_add(2); +// } +// ``` +pub(crate) fn replace_arith_with_saturating( + acc: &mut Assists, + ctx: &AssistContext<'_>, +) -> Option<()> { + replace_arith(acc, ctx, ArithKind::Saturating) +} + +// Assist: replace_arith_with_wrapping +// +// Replaces arithmetic on integers with the `wrapping_*` equivalent. +// +// ``` +// fn main() { +// let x = 1 $0+ 2; +// } +// ``` +// -> +// ``` +// fn main() { +// let x = 1.wrapping_add(2); +// } +// ``` +pub(crate) fn replace_arith_with_wrapping( + acc: &mut Assists, + ctx: &AssistContext<'_>, +) -> Option<()> { + replace_arith(acc, ctx, ArithKind::Wrapping) +} + +fn replace_arith(acc: &mut Assists, ctx: &AssistContext<'_>, kind: ArithKind) -> Option<()> { + let (lhs, op, rhs) = parse_binary_op(ctx)?; + + if !is_primitive_int(ctx, &lhs) || !is_primitive_int(ctx, &rhs) { + return None; + } + + let start = lhs.syntax().text_range().start(); + let end = rhs.syntax().text_range().end(); + let range = TextRange::new(start, end); + + acc.add_group( + &GroupLabel("replace_arith".into()), + kind.assist_id(), + kind.label(), + range, + |builder| { + let method_name = kind.method_name(op); + + builder.replace(range, format!("{lhs}.{method_name}({rhs})")) + }, + ) +} + +fn is_primitive_int(ctx: &AssistContext<'_>, expr: &ast::Expr) -> bool { + match ctx.sema.type_of_expr(expr) { + Some(ty) => ty.adjusted().is_int_or_uint(), + _ => false, + } +} + +/// Extract the operands of an arithmetic expression (e.g. `1 + 2` or `1.checked_add(2)`) +fn parse_binary_op(ctx: &AssistContext<'_>) -> Option<(ast::Expr, ArithOp, ast::Expr)> { + let expr = ctx.find_node_at_offset::()?; + + let op = match expr.op_kind() { + Some(BinaryOp::ArithOp(ArithOp::Add)) => ArithOp::Add, + Some(BinaryOp::ArithOp(ArithOp::Sub)) => ArithOp::Sub, + Some(BinaryOp::ArithOp(ArithOp::Mul)) => ArithOp::Mul, + Some(BinaryOp::ArithOp(ArithOp::Div)) => ArithOp::Div, + _ => return None, + }; + + let lhs = expr.lhs()?; + let rhs = expr.rhs()?; + + Some((lhs, op, rhs)) +} + +pub(crate) enum ArithKind { + Saturating, + Wrapping, + Checked, +} + +impl ArithKind { + fn assist_id(&self) -> AssistId { + let s = match self { + ArithKind::Saturating => "replace_arith_with_saturating", + ArithKind::Checked => "replace_arith_with_checked", + ArithKind::Wrapping => "replace_arith_with_wrapping", + }; + + AssistId(s, AssistKind::RefactorRewrite) + } + + fn label(&self) -> &'static str { + match self { + ArithKind::Saturating => "Replace arithmetic with call to saturating_*", + ArithKind::Checked => "Replace arithmetic with call to checked_*", + ArithKind::Wrapping => "Replace arithmetic with call to wrapping_*", + } + } + + fn method_name(&self, op: ArithOp) -> String { + let prefix = match self { + ArithKind::Checked => "checked_", + ArithKind::Wrapping => "wrapping_", + ArithKind::Saturating => "saturating_", + }; + + let suffix = match op { + ArithOp::Add => "add", + ArithOp::Sub => "sub", + ArithOp::Mul => "mul", + ArithOp::Div => "div", + _ => unreachable!("this function should only be called with +, -, / or *"), + }; + format!("{prefix}{suffix}") + } +} + +#[cfg(test)] +mod tests { + use crate::tests::check_assist; + + use super::*; + + #[test] + fn arith_kind_method_name() { + assert_eq!(ArithKind::Saturating.method_name(ArithOp::Add), "saturating_add"); + assert_eq!(ArithKind::Checked.method_name(ArithOp::Sub), "checked_sub"); + } + + #[test] + fn replace_arith_with_checked_add() { + check_assist( + replace_arith_with_checked, + r#" +fn main() { + let x = 1 $0+ 2; +} +"#, + r#" +fn main() { + let x = 1.checked_add(2); +} +"#, + ) + } + + #[test] + fn replace_arith_with_saturating_add() { + check_assist( + replace_arith_with_saturating, + r#" +fn main() { + let x = 1 $0+ 2; +} +"#, + r#" +fn main() { + let x = 1.saturating_add(2); +} +"#, + ) + } + + #[test] + fn replace_arith_with_wrapping_add() { + check_assist( + replace_arith_with_wrapping, + r#" +fn main() { + let x = 1 $0+ 2; +} +"#, + r#" +fn main() { + let x = 1.wrapping_add(2); +} +"#, + ) + } +} diff --git a/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs b/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs index 6fa15b28e4..a6693d7d79 100644 --- a/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs +++ b/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs @@ -907,7 +907,34 @@ impl PartialEq for Foo { } #[test] - fn add_custom_impl_partial_eq_tuple_enum() { + fn add_custom_impl_partial_eq_single_variant_tuple_enum() { + check_assist( + replace_derive_with_manual_impl, + r#" +//- minicore: eq, derive +#[derive(Partial$0Eq)] +enum Foo { + Bar(String), +} +"#, + r#" +enum Foo { + Bar(String), +} + +impl PartialEq for Foo { + $0fn eq(&self, other: &Self) -> bool { + match (self, other) { + (Self::Bar(l0), Self::Bar(r0)) => l0 == r0, + } + } +} +"#, + ) + } + + #[test] + fn add_custom_impl_partial_eq_partial_tuple_enum() { check_assist( replace_derive_with_manual_impl, r#" @@ -936,6 +963,99 @@ impl PartialEq for Foo { ) } + #[test] + fn add_custom_impl_partial_eq_tuple_enum() { + check_assist( + replace_derive_with_manual_impl, + r#" +//- minicore: eq, derive +#[derive(Partial$0Eq)] +enum Foo { + Bar(String), + Baz(i32), +} +"#, + r#" +enum Foo { + Bar(String), + Baz(i32), +} + +impl PartialEq for Foo { + $0fn eq(&self, other: &Self) -> bool { + match (self, other) { + (Self::Bar(l0), Self::Bar(r0)) => l0 == r0, + (Self::Baz(l0), Self::Baz(r0)) => l0 == r0, + _ => false, + } + } +} +"#, + ) + } + + #[test] + fn add_custom_impl_partial_eq_tuple_enum_generic() { + check_assist( + replace_derive_with_manual_impl, + r#" +//- minicore: eq, derive +#[derive(Partial$0Eq)] +enum Either { + Left(T), + Right(U), +} +"#, + r#" +enum Either { + Left(T), + Right(U), +} + +impl PartialEq for Either { + $0fn eq(&self, other: &Self) -> bool { + match (self, other) { + (Self::Left(l0), Self::Left(r0)) => l0 == r0, + (Self::Right(l0), Self::Right(r0)) => l0 == r0, + _ => false, + } + } +} +"#, + ) + } + + #[test] + fn add_custom_impl_partial_eq_tuple_enum_generic_existing_bounds() { + check_assist( + replace_derive_with_manual_impl, + r#" +//- minicore: eq, derive +#[derive(Partial$0Eq)] +enum Either { + Left(T), + Right(U), +} +"#, + r#" +enum Either { + Left(T), + Right(U), +} + +impl PartialEq for Either { + $0fn eq(&self, other: &Self) -> bool { + match (self, other) { + (Self::Left(l0), Self::Left(r0)) => l0 == r0, + (Self::Right(l0), Self::Right(r0)) => l0 == r0, + _ => false, + } + } +} +"#, + ) + } + #[test] fn add_custom_impl_partial_eq_record_enum() { check_assist( @@ -1112,7 +1232,7 @@ struct Foo { bar: U, } -impl Default for Foo { +impl Default for Foo { $0fn default() -> Self { Self { foo: Default::default(), bar: Default::default() } } diff --git a/crates/ide-assists/src/handlers/replace_or_with_or_else.rs b/crates/ide-assists/src/handlers/replace_or_with_or_else.rs index 77382056c1..f0ed3c4fe6 100644 --- a/crates/ide-assists/src/handlers/replace_or_with_or_else.rs +++ b/crates/ide-assists/src/handlers/replace_or_with_or_else.rs @@ -75,7 +75,7 @@ fn into_closure(param: &Expr) -> Expr { (|| { if let ast::Expr::CallExpr(call) = param { if call.arg_list()?.args().count() == 0 { - Some(call.expr()?.clone()) + Some(call.expr()?) } else { None } @@ -151,7 +151,7 @@ fn into_call(param: &Expr) -> Expr { (|| { if let ast::Expr::ClosureExpr(closure) = param { if closure.param_list()?.params().count() == 0 { - Some(closure.body()?.clone()) + Some(closure.body()?) } else { None } diff --git a/crates/ide-assists/src/handlers/replace_turbofish_with_explicit_type.rs b/crates/ide-assists/src/handlers/replace_turbofish_with_explicit_type.rs index c177adc7a1..6626ce0795 100644 --- a/crates/ide-assists/src/handlers/replace_turbofish_with_explicit_type.rs +++ b/crates/ide-assists/src/handlers/replace_turbofish_with_explicit_type.rs @@ -42,7 +42,7 @@ pub(crate) fn replace_turbofish_with_explicit_type( let r_angle = generic_args.r_angle_token()?; let turbofish_range = TextRange::new(colon2.text_range().start(), r_angle.text_range().end()); - let turbofish_args: Vec = generic_args.generic_args().into_iter().collect(); + let turbofish_args: Vec = generic_args.generic_args().collect(); // Find type of ::<_> if turbofish_args.len() != 1 { diff --git a/crates/ide-assists/src/handlers/unnecessary_async.rs b/crates/ide-assists/src/handlers/unnecessary_async.rs index 0439883225..7f612c2a14 100644 --- a/crates/ide-assists/src/handlers/unnecessary_async.rs +++ b/crates/ide-assists/src/handlers/unnecessary_async.rs @@ -107,7 +107,7 @@ fn find_all_references( /// If no await expression is found, returns None. fn find_await_expression(ctx: &AssistContext<'_>, nameref: &NameRef) -> Option { // From the nameref, walk up the tree to the await expression. - let await_expr = if let Some(path) = full_path_of_name_ref(&nameref) { + let await_expr = if let Some(path) = full_path_of_name_ref(nameref) { // Function calls. path.syntax() .parent() diff --git a/crates/ide-assists/src/handlers/unqualify_method_call.rs b/crates/ide-assists/src/handlers/unqualify_method_call.rs new file mode 100644 index 0000000000..e9d4e270cd --- /dev/null +++ b/crates/ide-assists/src/handlers/unqualify_method_call.rs @@ -0,0 +1,211 @@ +use syntax::{ + ast::{self, make, AstNode, HasArgList}, + TextRange, +}; + +use crate::{AssistContext, AssistId, AssistKind, Assists}; + +// Assist: unqualify_method_call +// +// Transforms universal function call syntax into a method call. +// +// ``` +// fn main() { +// std::ops::Add::add$0(1, 2); +// } +// # mod std { pub mod ops { pub trait Add { fn add(self, _: Self) {} } impl Add for i32 {} } } +// ``` +// -> +// ``` +// fn main() { +// 1.add(2); +// } +// # mod std { pub mod ops { pub trait Add { fn add(self, _: Self) {} } impl Add for i32 {} } } +// ``` +pub(crate) fn unqualify_method_call(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { + let call = ctx.find_node_at_offset::()?; + let ast::Expr::PathExpr(path_expr) = call.expr()? else { return None }; + let path = path_expr.path()?; + + let cursor_in_range = path.syntax().text_range().contains_range(ctx.selection_trimmed()); + if !cursor_in_range { + return None; + } + + let args = call.arg_list()?; + let l_paren = args.l_paren_token()?; + let mut args_iter = args.args(); + let first_arg = args_iter.next()?; + let second_arg = args_iter.next(); + + _ = path.qualifier()?; + let method_name = path.segment()?.name_ref()?; + + let res = ctx.sema.resolve_path(&path)?; + let hir::PathResolution::Def(hir::ModuleDef::Function(fun)) = res else { return None }; + if !fun.has_self_param(ctx.sema.db) { + return None; + } + + // `core::ops::Add::add(` -> `` + let delete_path = + TextRange::new(path.syntax().text_range().start(), l_paren.text_range().end()); + + // Parens around `expr` if needed + let parens = needs_parens_as_receiver(&first_arg).then(|| { + let range = first_arg.syntax().text_range(); + (range.start(), range.end()) + }); + + // `, ` -> `.add(` + let replace_comma = TextRange::new( + first_arg.syntax().text_range().end(), + second_arg + .map(|a| a.syntax().text_range().start()) + .unwrap_or_else(|| first_arg.syntax().text_range().end()), + ); + + acc.add( + AssistId("unqualify_method_call", AssistKind::RefactorRewrite), + "Unqualify method call", + call.syntax().text_range(), + |edit| { + edit.delete(delete_path); + if let Some((open, close)) = parens { + edit.insert(open, "("); + edit.insert(close, ")"); + } + edit.replace(replace_comma, format!(".{method_name}(")); + }, + ) +} + +fn needs_parens_as_receiver(expr: &ast::Expr) -> bool { + // Make `(expr).dummy()` + let dummy_call = make::expr_method_call( + make::expr_paren(expr.clone()), + make::name_ref("dummy"), + make::arg_list([]), + ); + + // Get the `expr` clone with the right parent back + // (unreachable!s are fine since we've just constructed the expression) + let ast::Expr::MethodCallExpr(call) = &dummy_call else { unreachable!() }; + let Some(receiver) = call.receiver() else { unreachable!() }; + let ast::Expr::ParenExpr(parens) = receiver else { unreachable!() }; + let Some(expr) = parens.expr() else { unreachable!() }; + + expr.needs_parens_in(dummy_call.syntax().clone()) +} + +#[cfg(test)] +mod tests { + use crate::tests::{check_assist, check_assist_not_applicable}; + + use super::*; + + #[test] + fn unqualify_method_call_simple() { + check_assist( + unqualify_method_call, + r#" +struct S; +impl S { fn f(self, S: S) {} } +fn f() { S::$0f(S, S); }"#, + r#" +struct S; +impl S { fn f(self, S: S) {} } +fn f() { S.f(S); }"#, + ); + } + + #[test] + fn unqualify_method_call_trait() { + check_assist( + unqualify_method_call, + r#" +//- minicore: add +fn f() { ::$0add(2, 2); }"#, + r#" +fn f() { 2.add(2); }"#, + ); + + check_assist( + unqualify_method_call, + r#" +//- minicore: add +fn f() { core::ops::Add::$0add(2, 2); }"#, + r#" +fn f() { 2.add(2); }"#, + ); + + check_assist( + unqualify_method_call, + r#" +//- minicore: add +use core::ops::Add; +fn f() { <_>::$0add(2, 2); }"#, + r#" +use core::ops::Add; +fn f() { 2.add(2); }"#, + ); + } + + #[test] + fn unqualify_method_call_single_arg() { + check_assist( + unqualify_method_call, + r#" + struct S; + impl S { fn f(self) {} } + fn f() { S::$0f(S); }"#, + r#" + struct S; + impl S { fn f(self) {} } + fn f() { S.f(); }"#, + ); + } + + #[test] + fn unqualify_method_call_parens() { + check_assist( + unqualify_method_call, + r#" +//- minicore: deref +struct S; +impl core::ops::Deref for S { + type Target = S; + fn deref(&self) -> &S { self } +} +fn f() { core::ops::Deref::$0deref(&S); }"#, + r#" +struct S; +impl core::ops::Deref for S { + type Target = S; + fn deref(&self) -> &S { self } +} +fn f() { (&S).deref(); }"#, + ); + } + + #[test] + fn unqualify_method_call_doesnt_apply_with_cursor_not_on_path() { + check_assist_not_applicable( + unqualify_method_call, + r#" +//- minicore: add +fn f() { core::ops::Add::add(2,$0 2); }"#, + ); + } + + #[test] + fn unqualify_method_call_doesnt_apply_with_no_self() { + check_assist_not_applicable( + unqualify_method_call, + r#" +struct S; +impl S { fn assoc(S: S, S: S) {} } +fn f() { S::assoc$0(S, S); }"#, + ); + } +} diff --git a/crates/ide-assists/src/handlers/unwrap_block.rs b/crates/ide-assists/src/handlers/unwrap_block.rs index 7969a49182..53cdac03a3 100644 --- a/crates/ide-assists/src/handlers/unwrap_block.rs +++ b/crates/ide-assists/src/handlers/unwrap_block.rs @@ -37,7 +37,8 @@ pub(crate) fn unwrap_block(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option parent = parent.ancestors().find(|it| ast::MatchExpr::can_cast(it.kind()))? } - if matches!(parent.kind(), SyntaxKind::STMT_LIST | SyntaxKind::EXPR_STMT) { + if matches!(parent.kind(), SyntaxKind::STMT_LIST | SyntaxKind::EXPR_STMT | SyntaxKind::LET_STMT) + { return acc.add(assist_id, assist_label, target, |builder| { builder.replace(block.syntax().text_range(), update_expr_string(block.to_string())); }); @@ -713,6 +714,50 @@ fn main() -> i32 { return 3; 5 } +"#, + ); + } + + #[test] + fn unwrap_block_in_let_initializers() { + // https://github.com/rust-lang/rust-analyzer/issues/13679 + check_assist( + unwrap_block, + r#" +fn main() { + let x = {$0 + bar + }; +} +"#, + r#" +fn main() { + let x = bar; +} +"#, + ); + } + + #[test] + fn unwrap_if_in_let_initializers() { + // https://github.com/rust-lang/rust-analyzer/issues/13679 + check_assist( + unwrap_block, + r#" +fn main() { + let a = 1; + let x = if a - 1 == 0 {$0 + foo + } else { + bar + }; +} +"#, + r#" +fn main() { + let a = 1; + let x = foo; +} "#, ); } diff --git a/crates/ide-assists/src/lib.rs b/crates/ide-assists/src/lib.rs index 387cc63142..7813c9f9cb 100644 --- a/crates/ide-assists/src/lib.rs +++ b/crates/ide-assists/src/lib.rs @@ -128,6 +128,7 @@ mod handlers { mod convert_while_to_loop; mod destructure_tuple_binding; mod expand_glob_import; + mod extract_expressions_from_format_string; mod extract_function; mod extract_module; mod extract_struct_from_enum_variant; @@ -138,7 +139,6 @@ mod handlers { mod flip_binexpr; mod flip_comma; mod flip_trait_bound; - mod move_format_string_arg; mod generate_constant; mod generate_default_from_enum_variant; mod generate_default_from_new; @@ -159,12 +159,14 @@ mod handlers { mod add_return_type; mod inline_call; mod inline_local_variable; + mod inline_macro; mod inline_type_alias; mod introduce_named_lifetime; mod invert_if; mod merge_imports; mod merge_match_arms; mod move_bounds; + mod move_const_to_impl; mod move_guard; mod move_module_to_file; mod move_to_mod_rs; @@ -178,12 +180,14 @@ mod handlers { mod remove_dbg; mod remove_mut; mod remove_unused_param; + mod remove_parentheses; mod reorder_fields; mod reorder_impl_items; mod replace_try_expr_with_match; mod replace_derive_with_manual_impl; mod replace_if_let_with_match; mod replace_or_with_or_else; + mod replace_arith_op; mod introduce_named_generic; mod replace_let_with_if_let; mod replace_qualified_name_with_use; @@ -198,6 +202,7 @@ mod handlers { mod unnecessary_async; mod unwrap_block; mod unwrap_result_return_type; + mod unqualify_method_call; mod wrap_return_type_in_result; pub(crate) fn all() -> &'static [Handler] { @@ -228,6 +233,7 @@ mod handlers { convert_while_to_loop::convert_while_to_loop, destructure_tuple_binding::destructure_tuple_binding, expand_glob_import::expand_glob_import, + extract_expressions_from_format_string::extract_expressions_from_format_string, extract_struct_from_enum_variant::extract_struct_from_enum_variant, extract_type_alias::extract_type_alias, fix_visibility::fix_visibility, @@ -247,6 +253,7 @@ mod handlers { generate_from_impl_for_enum::generate_from_impl_for_enum, generate_function::generate_function, generate_impl::generate_impl, + generate_impl::generate_trait_impl, generate_is_empty_from_len::generate_is_empty_from_len, generate_new::generate_new, inline_call::inline_call, @@ -254,13 +261,14 @@ mod handlers { inline_local_variable::inline_local_variable, inline_type_alias::inline_type_alias, inline_type_alias::inline_type_alias_uses, + inline_macro::inline_macro, introduce_named_generic::introduce_named_generic, introduce_named_lifetime::introduce_named_lifetime, invert_if::invert_if, merge_imports::merge_imports, merge_match_arms::merge_match_arms, move_bounds::move_bounds_to_where_clause, - move_format_string_arg::move_format_string_arg, + move_const_to_impl::move_const_to_impl, move_guard::move_arm_cond_to_match_guard, move_guard::move_guard_to_arm_body, move_module_to_file::move_module_to_file, @@ -277,6 +285,7 @@ mod handlers { remove_dbg::remove_dbg, remove_mut::remove_mut, remove_unused_param::remove_unused_param, + remove_parentheses::remove_parentheses, reorder_fields::reorder_fields, reorder_impl_items::reorder_impl_items, replace_try_expr_with_match::replace_try_expr_with_match, @@ -288,6 +297,9 @@ mod handlers { replace_or_with_or_else::replace_or_with_or_else, replace_turbofish_with_explicit_type::replace_turbofish_with_explicit_type, replace_qualified_name_with_use::replace_qualified_name_with_use, + replace_arith_op::replace_arith_with_wrapping, + replace_arith_op::replace_arith_with_checked, + replace_arith_op::replace_arith_with_saturating, sort_items::sort_items, split_import::split_import, toggle_ignore::toggle_ignore, @@ -297,6 +309,7 @@ mod handlers { unwrap_block::unwrap_block, unwrap_result_return_type::unwrap_result_return_type, unwrap_tuple::unwrap_tuple, + unqualify_method_call::unqualify_method_call, wrap_return_type_in_result::wrap_return_type_in_result, // These are manually sorted for better priorities. By default, // priority is determined by the size of the target range (smaller diff --git a/crates/ide-assists/src/tests.rs b/crates/ide-assists/src/tests.rs index 92ced27c78..fca268a1f0 100644 --- a/crates/ide-assists/src/tests.rs +++ b/crates/ide-assists/src/tests.rs @@ -171,7 +171,7 @@ fn check(handler: Handler, before: &str, expected: ExpectedResult<'_>, assist_la } FileSystemEdit::MoveDir { src, src_id, dst } => { // temporary placeholder for MoveDir since we are not using MoveDir in ide assists yet. - (dst, format!("{:?}\n{:?}", src_id, src)) + (dst, format!("{src_id:?}\n{src:?}")) } }; let sr = db.file_source_root(dst.anchor); diff --git a/crates/ide-assists/src/tests/generated.rs b/crates/ide-assists/src/tests/generated.rs index c09317572a..006ae4b303 100644 --- a/crates/ide-assists/src/tests/generated.rs +++ b/crates/ide-assists/src/tests/generated.rs @@ -624,6 +624,37 @@ fn qux(bar: Bar, baz: Baz) {} ) } +#[test] +fn doctest_extract_expressions_from_format_string() { + check_doc_test( + "extract_expressions_from_format_string", + r#####" +macro_rules! format_args { + ($lit:literal $(tt:tt)*) => { 0 }, +} +macro_rules! print { + ($($arg:tt)*) => (std::io::_print(format_args!($($arg)*))); +} + +fn main() { + print!("{var} {x + 1}$0"); +} +"#####, + r#####" +macro_rules! format_args { + ($lit:literal $(tt:tt)*) => { 0 }, +} +macro_rules! print { + ($($arg:tt)*) => (std::io::_print(format_args!($($arg)*))); +} + +fn main() { + print!("{var} {}"$0, x + 1); +} +"#####, + ) +} + #[test] fn doctest_extract_function() { check_doc_test( @@ -1249,8 +1280,8 @@ fn doctest_generate_impl() { check_doc_test( "generate_impl", r#####" -struct Ctx { - data: T,$0 +struct Ctx$0 { + data: T, } "#####, r#####" @@ -1341,6 +1372,27 @@ impl Person { ) } +#[test] +fn doctest_generate_trait_impl() { + check_doc_test( + "generate_trait_impl", + r#####" +struct $0Ctx { + data: T, +} +"#####, + r#####" +struct Ctx { + data: T, +} + +impl $0 for Ctx { + +} +"#####, + ) +} + #[test] fn doctest_inline_call() { check_doc_test( @@ -1417,6 +1469,39 @@ fn main() { ) } +#[test] +fn doctest_inline_macro() { + check_doc_test( + "inline_macro", + r#####" +macro_rules! num { + (+$($t:tt)+) => (1 + num!($($t )+)); + (-$($t:tt)+) => (-1 + num!($($t )+)); + (+) => (1); + (-) => (-1); +} + +fn main() { + let number = num$0!(+ + + - + +); + println!("{number}"); +} +"#####, + r#####" +macro_rules! num { + (+$($t:tt)+) => (1 + num!($($t )+)); + (-$($t:tt)+) => (-1 + num!($($t )+)); + (+) => (1); + (-) => (-1); +} + +fn main() { + let number = 1+num!(+ + - + +); + println!("{number}"); +} +"#####, + ) +} + #[test] fn doctest_inline_type_alias() { check_doc_test( @@ -1654,31 +1739,29 @@ fn apply(f: F, x: T) -> U where F: FnOnce(T) -> U { } #[test] -fn doctest_move_format_string_arg() { +fn doctest_move_const_to_impl() { check_doc_test( - "move_format_string_arg", + "move_const_to_impl", r#####" -macro_rules! format_args { - ($lit:literal $(tt:tt)*) => { 0 }, -} -macro_rules! print { - ($($arg:tt)*) => (std::io::_print(format_args!($($arg)*))); -} +struct S; +impl S { + fn foo() -> usize { + /// The answer. + const C$0: usize = 42; -fn main() { - print!("{x + 1}$0"); + C * C + } } "#####, r#####" -macro_rules! format_args { - ($lit:literal $(tt:tt)*) => { 0 }, -} -macro_rules! print { - ($($arg:tt)*) => (std::io::_print(format_args!($($arg)*))); -} +struct S; +impl S { + /// The answer. + const C: usize = 42; -fn main() { - print!("{}"$0, x + 1); + fn foo() -> usize { + Self::C * Self::C + } } "#####, ) @@ -1928,6 +2011,23 @@ impl Walrus { ) } +#[test] +fn doctest_remove_parentheses() { + check_doc_test( + "remove_parentheses", + r#####" +fn main() { + _ = $0(2) + 2; +} +"#####, + r#####" +fn main() { + _ = 2 + 2; +} +"#####, + ) +} + #[test] fn doctest_remove_unused_param() { check_doc_test( @@ -1999,6 +2099,57 @@ impl Foo for Bar { ) } +#[test] +fn doctest_replace_arith_with_checked() { + check_doc_test( + "replace_arith_with_checked", + r#####" +fn main() { + let x = 1 $0+ 2; +} +"#####, + r#####" +fn main() { + let x = 1.checked_add(2); +} +"#####, + ) +} + +#[test] +fn doctest_replace_arith_with_saturating() { + check_doc_test( + "replace_arith_with_saturating", + r#####" +fn main() { + let x = 1 $0+ 2; +} +"#####, + r#####" +fn main() { + let x = 1.saturating_add(2); +} +"#####, + ) +} + +#[test] +fn doctest_replace_arith_with_wrapping() { + check_doc_test( + "replace_arith_with_wrapping", + r#####" +fn main() { + let x = 1 $0+ 2; +} +"#####, + r#####" +fn main() { + let x = 1.wrapping_add(2); +} +"#####, + ) +} + #[test] fn doctest_replace_char_with_string() { check_doc_test( @@ -2415,6 +2566,25 @@ pub async fn bar() { foo() } ) } +#[test] +fn doctest_unqualify_method_call() { + check_doc_test( + "unqualify_method_call", + r#####" +fn main() { + std::ops::Add::add$0(1, 2); +} +mod std { pub mod ops { pub trait Add { fn add(self, _: Self) {} } impl Add for i32 {} } } +"#####, + r#####" +fn main() { + 1.add(2); +} +mod std { pub mod ops { pub trait Add { fn add(self, _: Self) {} } impl Add for i32 {} } } +"#####, + ) +} + #[test] fn doctest_unwrap_block() { check_doc_test( diff --git a/crates/ide-assists/src/tests/sourcegen.rs b/crates/ide-assists/src/tests/sourcegen.rs index 070b83d3c1..b4f50c7fb2 100644 --- a/crates/ide-assists/src/tests/sourcegen.rs +++ b/crates/ide-assists/src/tests/sourcegen.rs @@ -18,7 +18,7 @@ use super::check_doc_test; for assist in assists.iter() { for (idx, section) in assist.sections.iter().enumerate() { let test_id = - if idx == 0 { assist.id.clone() } else { format!("{}_{}", &assist.id, idx) }; + if idx == 0 { assist.id.clone() } else { format!("{}_{idx}", &assist.id) }; let test = format!( r######" #[test] @@ -95,8 +95,7 @@ impl Assist { let id = block.id; assert!( id.chars().all(|it| it.is_ascii_lowercase() || it == '_'), - "invalid assist id: {:?}", - id + "invalid assist id: {id:?}" ); let mut lines = block.contents.iter().peekable(); let location = sourcegen::Location { file: path.to_path_buf(), line: block.line }; @@ -175,7 +174,7 @@ impl fmt::Display for Assist { fn hide_hash_comments(text: &str) -> String { text.split('\n') // want final newline .filter(|&it| !(it.starts_with("# ") || it == "#")) - .map(|it| format!("{}\n", it)) + .map(|it| format!("{it}\n")) .collect() } @@ -190,6 +189,6 @@ fn reveal_hash_comments(text: &str) -> String { it } }) - .map(|it| format!("{}\n", it)) + .map(|it| format!("{it}\n")) .collect() } diff --git a/crates/ide-assists/src/utils.rs b/crates/ide-assists/src/utils.rs index 68c31b4f8e..7add660649 100644 --- a/crates/ide-assists/src/utils.rs +++ b/crates/ide-assists/src/utils.rs @@ -208,6 +208,23 @@ pub(crate) fn render_snippet(_cap: SnippetCap, node: &SyntaxNode, cursor: Cursor } } +/// Escapes text that should be rendered as-is, typically those that we're copy-pasting what the +/// users wrote. +/// +/// This function should only be used when the text doesn't contain snippet **AND** the text +/// wouldn't be included in a snippet. +pub(crate) fn escape_non_snippet(text: &mut String) { + // While we *can* escape `}`, we don't really have to in this specific case. We only need to + // escape it inside `${}` to disambiguate it from the ending token of the syntax, but after we + // escape every occurrence of `$`, we wouldn't have `${}` in the first place. + // + // This will break if the text contains snippet or it will be included in a snippet (hence doc + // comment). Compare `fn escape(buf)` in `render_snippet()` above, where the escaped text is + // included in a snippet. + stdx::replace(text, '\\', r"\\"); + stdx::replace(text, '$', r"\$"); +} + pub(crate) fn vis_offset(node: &SyntaxNode) -> TextSize { node.children_with_tokens() .find(|it| !matches!(it.kind(), WHITESPACE | COMMENT | ATTR)) @@ -417,35 +434,67 @@ pub(crate) fn find_impl_block_end(impl_def: ast::Impl, buf: &mut String) -> Opti Some(end) } -// Generates the surrounding `impl Type { }` including type and lifetime -// parameters +/// Generates the surrounding `impl Type { }` including type and lifetime +/// parameters. pub(crate) fn generate_impl_text(adt: &ast::Adt, code: &str) -> String { - generate_impl_text_inner(adt, None, code) + generate_impl_text_inner(adt, None, true, code) } -// Generates the surrounding `impl for Type { }` including type -// and lifetime parameters +/// Generates the surrounding `impl for Type { }` including type +/// and lifetime parameters, with `` appended to `impl`'s generic parameters' bounds. +/// +/// This is useful for traits like `PartialEq`, since `impl PartialEq for U` often requires `T: PartialEq`. pub(crate) fn generate_trait_impl_text(adt: &ast::Adt, trait_text: &str, code: &str) -> String { - generate_impl_text_inner(adt, Some(trait_text), code) + generate_impl_text_inner(adt, Some(trait_text), true, code) } -fn generate_impl_text_inner(adt: &ast::Adt, trait_text: Option<&str>, code: &str) -> String { +/// Generates the surrounding `impl for Type { }` including type +/// and lifetime parameters, with `impl`'s generic parameters' bounds kept as-is. +/// +/// This is useful for traits like `From`, since `impl From for U` doesn't require `T: From`. +pub(crate) fn generate_trait_impl_text_intransitive( + adt: &ast::Adt, + trait_text: &str, + code: &str, +) -> String { + generate_impl_text_inner(adt, Some(trait_text), false, code) +} + +fn generate_impl_text_inner( + adt: &ast::Adt, + trait_text: Option<&str>, + trait_is_transitive: bool, + code: &str, +) -> String { // Ensure lifetime params are before type & const params let generic_params = adt.generic_param_list().map(|generic_params| { let lifetime_params = generic_params.lifetime_params().map(ast::GenericParam::LifetimeParam); - let ty_or_const_params = generic_params.type_or_const_params().filter_map(|param| { - // remove defaults since they can't be specified in impls + let ty_or_const_params = generic_params.type_or_const_params().map(|param| { match param { ast::TypeOrConstParam::Type(param) => { let param = param.clone_for_update(); + // remove defaults since they can't be specified in impls param.remove_default(); - Some(ast::GenericParam::TypeParam(param)) + let mut bounds = + param.type_bound_list().map_or_else(Vec::new, |it| it.bounds().collect()); + if let Some(trait_) = trait_text { + // Add the current trait to `bounds` if the trait is transitive, + // meaning `impl Trait for U` requires `T: Trait`. + if trait_is_transitive { + bounds.push(make::type_bound(trait_)); + } + }; + // `{ty_param}: {bounds}` + let param = + make::type_param(param.name().unwrap(), make::type_bound_list(bounds)); + ast::GenericParam::TypeParam(param) } ast::TypeOrConstParam::Const(param) => { let param = param.clone_for_update(); + // remove defaults since they can't be specified in impls param.remove_default(); - Some(ast::GenericParam::ConstParam(param)) + ast::GenericParam::ConstParam(param) } } }); @@ -596,7 +645,7 @@ pub(crate) fn convert_reference_type( } fn handle_copy(ty: &hir::Type, db: &dyn HirDatabase) -> Option { - ty.is_copy(db).then(|| ReferenceConversionType::Copy) + ty.is_copy(db).then_some(ReferenceConversionType::Copy) } fn handle_as_ref_str( @@ -607,7 +656,7 @@ fn handle_as_ref_str( let str_type = hir::BuiltinType::str().ty(db); ty.impls_trait(db, famous_defs.core_convert_AsRef()?, &[str_type]) - .then(|| ReferenceConversionType::AsRefStr) + .then_some(ReferenceConversionType::AsRefStr) } fn handle_as_ref_slice( @@ -619,7 +668,7 @@ fn handle_as_ref_slice( let slice_type = hir::Type::new_slice(type_argument); ty.impls_trait(db, famous_defs.core_convert_AsRef()?, &[slice_type]) - .then(|| ReferenceConversionType::AsRefSlice) + .then_some(ReferenceConversionType::AsRefSlice) } fn handle_dereferenced( @@ -630,7 +679,7 @@ fn handle_dereferenced( let type_argument = ty.type_arguments().next()?; ty.impls_trait(db, famous_defs.core_convert_AsRef()?, &[type_argument]) - .then(|| ReferenceConversionType::Dereferenced) + .then_some(ReferenceConversionType::Dereferenced) } fn handle_option_as_ref( diff --git a/crates/ide-assists/src/utils/gen_trait_fn_body.rs b/crates/ide-assists/src/utils/gen_trait_fn_body.rs index 6c87e66c13..d4abb51259 100644 --- a/crates/ide-assists/src/utils/gen_trait_fn_body.rs +++ b/crates/ide-assists/src/utils/gen_trait_fn_body.rs @@ -419,7 +419,7 @@ fn gen_partial_eq(adt: &ast::Adt, func: &ast::Fn) -> Option<()> { make::ext::path_from_idents(["Self", &variant.name()?.to_string()]) } - fn gen_tuple_field(field_name: &String) -> ast::Pat { + fn gen_tuple_field(field_name: &str) -> ast::Pat { ast::Pat::IdentPat(make::ident_pat(false, false, make::name(field_name))) } @@ -516,10 +516,18 @@ fn gen_partial_eq(adt: &ast::Adt, func: &ast::Fn) -> Option<()> { let expr = match arms.len() { 0 => eq_check, - _ => { - if n_cases > arms.len() { + arms_len => { + // Generate the fallback arm when this enum has >1 variants. + // The fallback arm will be `_ => false,` if we've already gone through every case where the variants of self and other match, + // and `_ => std::mem::discriminant(self) == std::mem::discriminant(other),` otherwise. + if n_cases > 1 { let lhs = make::wildcard_pat().into(); - arms.push(make::match_arm(Some(lhs), None, eq_check)); + let rhs = if arms_len == n_cases { + make::expr_literal("false").into() + } else { + eq_check + }; + arms.push(make::match_arm(Some(lhs), None, rhs)); } let match_target = make::expr_tuple(vec![lhs_name, rhs_name]); diff --git a/crates/ide-completion/src/completions.rs b/crates/ide-completion/src/completions.rs index 296dfc1425..eb87d6c582 100644 --- a/crates/ide-completion/src/completions.rs +++ b/crates/ide-completion/src/completions.rs @@ -133,7 +133,7 @@ impl Completions { if incomplete_let && snippet.ends_with('}') { // complete block expression snippets with a trailing semicolon, if inside an incomplete let cov_mark::hit!(let_semi); - item.insert_snippet(cap, format!("{};", snippet)); + item.insert_snippet(cap, format!("{snippet};")); } else { item.insert_snippet(cap, snippet); } @@ -494,7 +494,7 @@ impl Completions { pattern_ctx, path_ctx, variant, - local_name.clone(), + local_name, None, )); } diff --git a/crates/ide-completion/src/completions/attribute.rs b/crates/ide-completion/src/completions/attribute.rs index d9fe94cb44..bb950c76f8 100644 --- a/crates/ide-completion/src/completions/attribute.rs +++ b/crates/ide-completion/src/completions/attribute.rs @@ -357,7 +357,7 @@ fn parse_comma_sep_expr(input: ast::TokenTree) -> Option> { Some( input_expressions .into_iter() - .filter_map(|(is_sep, group)| (!is_sep).then(|| group)) + .filter_map(|(is_sep, group)| (!is_sep).then_some(group)) .filter_map(|mut tokens| syntax::hacks::parse_expr_from_str(&tokens.join(""))) .collect::>(), ) @@ -371,9 +371,7 @@ fn attributes_are_sorted() { attrs.for_each(|next| { assert!( prev < next, - r#"ATTRIBUTES array is not sorted, "{}" should come after "{}""#, - prev, - next + r#"ATTRIBUTES array is not sorted, "{prev}" should come after "{next}""# ); prev = next; }); diff --git a/crates/ide-completion/src/completions/attribute/cfg.rs b/crates/ide-completion/src/completions/attribute/cfg.rs index 311060143b..7ef4ff30b5 100644 --- a/crates/ide-completion/src/completions/attribute/cfg.rs +++ b/crates/ide-completion/src/completions/attribute/cfg.rs @@ -11,7 +11,7 @@ use crate::{completions::Completions, context::CompletionContext, CompletionItem pub(crate) fn complete_cfg(acc: &mut Completions, ctx: &CompletionContext<'_>) { let add_completion = |item: &str| { let mut completion = CompletionItem::new(SymbolKind::BuiltinAttr, ctx.source_range(), item); - completion.insert_text(format!(r#""{}""#, item)); + completion.insert_text(format!(r#""{item}""#)); acc.add(completion.build()); }; @@ -29,7 +29,7 @@ pub(crate) fn complete_cfg(acc: &mut Completions, ctx: &CompletionContext<'_>) { Some("target_vendor") => KNOWN_VENDOR.iter().copied().for_each(add_completion), Some("target_endian") => ["little", "big"].into_iter().for_each(add_completion), Some(name) => ctx.krate.potential_cfg(ctx.db).get_cfg_values(name).cloned().for_each(|s| { - let insert_text = format!(r#""{}""#, s); + let insert_text = format!(r#""{s}""#); let mut item = CompletionItem::new(SymbolKind::BuiltinAttr, ctx.source_range(), s); item.insert_text(insert_text); diff --git a/crates/ide-completion/src/completions/attribute/lint.rs b/crates/ide-completion/src/completions/attribute/lint.rs index 967f6ddd9a..818c3cfd5f 100644 --- a/crates/ide-completion/src/completions/attribute/lint.rs +++ b/crates/ide-completion/src/completions/attribute/lint.rs @@ -51,7 +51,7 @@ pub(super) fn complete_lint( continue; } let label = match qual { - Some(qual) if !is_qualified => format!("{}::{}", qual, name), + Some(qual) if !is_qualified => format!("{qual}::{name}"), _ => name.to_owned(), }; let mut item = CompletionItem::new(SymbolKind::Attribute, ctx.source_range(), label); diff --git a/crates/ide-completion/src/completions/dot.rs b/crates/ide-completion/src/completions/dot.rs index 02004ff7b6..7c6e5e100f 100644 --- a/crates/ide-completion/src/completions/dot.rs +++ b/crates/ide-completion/src/completions/dot.rs @@ -32,12 +32,12 @@ pub(crate) fn complete_dot( complete_fields( acc, ctx, - &receiver_ty, + receiver_ty, |acc, field, ty| acc.add_field(ctx, dot_access, None, field, &ty), |acc, field, ty| acc.add_tuple_field(ctx, None, field, &ty), ); } - complete_methods(ctx, &receiver_ty, |func| acc.add_method(ctx, dot_access, func, None, None)); + complete_methods(ctx, receiver_ty, |func| acc.add_method(ctx, dot_access, func, None, None)); } pub(crate) fn complete_undotted_self( diff --git a/crates/ide-completion/src/completions/env_vars.rs b/crates/ide-completion/src/completions/env_vars.rs index 09e95e53de..1002be2113 100644 --- a/crates/ide-completion/src/completions/env_vars.rs +++ b/crates/ide-completion/src/completions/env_vars.rs @@ -68,28 +68,26 @@ mod tests { &format!( r#" #[rustc_builtin_macro] - macro_rules! {} {{ + macro_rules! {macro_name} {{ ($var:literal) => {{ 0 }} }} fn main() {{ - let foo = {}!("CAR$0"); + let foo = {macro_name}!("CAR$0"); }} - "#, - macro_name, macro_name + "# ), &format!( r#" #[rustc_builtin_macro] - macro_rules! {} {{ + macro_rules! {macro_name} {{ ($var:literal) => {{ 0 }} }} fn main() {{ - let foo = {}!("CARGO_BIN_NAME"); + let foo = {macro_name}!("CARGO_BIN_NAME"); }} - "#, - macro_name, macro_name + "# ), ); } @@ -112,7 +110,7 @@ mod tests { "#; let completions = completion_list(fixture); - assert!(completions.is_empty(), "Completions weren't empty: {}", completions); + assert!(completions.is_empty(), "Completions weren't empty: {completions}"); } #[test] @@ -129,7 +127,7 @@ mod tests { "#; let completions = completion_list(fixture); - assert!(completions.is_empty(), "Completions weren't empty: {}", completions); + assert!(completions.is_empty(), "Completions weren't empty: {completions}"); } #[test] @@ -145,6 +143,6 @@ mod tests { "#; let completions = completion_list(fixture); - assert!(completions.is_empty(), "Completions weren't empty: {}", completions) + assert!(completions.is_empty(), "Completions weren't empty: {completions}") } } diff --git a/crates/ide-completion/src/completions/expr.rs b/crates/ide-completion/src/completions/expr.rs index 3192b21cfb..cfe4787f73 100644 --- a/crates/ide-completion/src/completions/expr.rs +++ b/crates/ide-completion/src/completions/expr.rs @@ -64,7 +64,7 @@ pub(crate) fn complete_expr_path( acc.add_enum_variants(ctx, path_ctx, e); } - ctx.iterate_path_candidates(&ty, |item| { + ctx.iterate_path_candidates(ty, |item| { add_assoc_item(acc, item); }); diff --git a/crates/ide-completion/src/completions/fn_param.rs b/crates/ide-completion/src/completions/fn_param.rs index f0ecc595af..d8b8a190eb 100644 --- a/crates/ide-completion/src/completions/fn_param.rs +++ b/crates/ide-completion/src/completions/fn_param.rs @@ -192,5 +192,5 @@ fn comma_wrapper(ctx: &CompletionContext<'_>) -> Option<(impl Fn(&str) -> String matches!(prev_token_kind, SyntaxKind::COMMA | SyntaxKind::L_PAREN | SyntaxKind::PIPE); let leading = if has_leading_comma { "" } else { ", " }; - Some((move |label: &_| (format!("{}{}{}", leading, label, trailing)), param.text_range())) + Some((move |label: &_| (format!("{leading}{label}{trailing}")), param.text_range())) } diff --git a/crates/ide-completion/src/completions/format_string.rs b/crates/ide-completion/src/completions/format_string.rs index 038bdb4279..5c46c5806e 100644 --- a/crates/ide-completion/src/completions/format_string.rs +++ b/crates/ide-completion/src/completions/format_string.rs @@ -13,7 +13,7 @@ pub(crate) fn format_string( original: &ast::String, expanded: &ast::String, ) { - if !is_format_string(&expanded) { + if !is_format_string(expanded) { return; } let cursor = ctx.position.offset; diff --git a/crates/ide-completion/src/completions/item_list/trait_impl.rs b/crates/ide-completion/src/completions/item_list/trait_impl.rs index 7384a3f2d8..9a060857e9 100644 --- a/crates/ide-completion/src/completions/item_list/trait_impl.rs +++ b/crates/ide-completion/src/completions/item_list/trait_impl.rs @@ -37,7 +37,7 @@ use ide_db::{ traits::get_missing_assoc_items, SymbolKind, }; use syntax::{ - ast::{self, edit_in_place::AttrsOwnerEdit}, + ast::{self, edit_in_place::AttrsOwnerEdit, HasTypeBounds}, AstNode, SyntaxElement, SyntaxKind, TextRange, T, }; use text_edit::TextEdit; @@ -190,7 +190,7 @@ fn add_function_impl( }; let mut item = CompletionItem::new(completion_kind, replacement_range, label); - item.lookup_by(format!("fn {}", fn_name)) + item.lookup_by(format!("fn {fn_name}")) .set_documentation(func.docs(ctx.db)) .set_relevance(CompletionRelevance { is_item_from_trait: true, ..Default::default() }); @@ -205,11 +205,11 @@ fn add_function_impl( let function_decl = function_declaration(&transformed_fn, source.file_id.is_macro()); match ctx.config.snippet_cap { Some(cap) => { - let snippet = format!("{} {{\n $0\n}}", function_decl); + let snippet = format!("{function_decl} {{\n $0\n}}"); item.snippet_edit(cap, TextEdit::replace(replacement_range, snippet)); } None => { - let header = format!("{} {{", function_decl); + let header = format!("{function_decl} {{"); item.text_edit(TextEdit::replace(replacement_range, header)); } }; @@ -249,10 +249,10 @@ fn add_type_alias_impl( ) { let alias_name = type_alias.name(ctx.db).unescaped().to_smol_str(); - let label = format!("type {} =", alias_name); + let label = format!("type {alias_name} ="); let mut item = CompletionItem::new(SymbolKind::TypeAlias, replacement_range, label); - item.lookup_by(format!("type {}", alias_name)) + item.lookup_by(format!("type {alias_name}")) .set_documentation(type_alias.docs(ctx.db)) .set_relevance(CompletionRelevance { is_item_from_trait: true, ..Default::default() }); @@ -265,10 +265,21 @@ fn add_type_alias_impl( }; let start = transformed_ty.syntax().text_range().start(); - let Some(end) = transformed_ty - .eq_token() - .map(|tok| tok.text_range().start()) - .or(transformed_ty.semicolon_token().map(|tok| tok.text_range().start())) else { return }; + + let end = if let Some(end) = + transformed_ty.colon_token().map(|tok| tok.text_range().start()) + { + end + } else if let Some(end) = transformed_ty.eq_token().map(|tok| tok.text_range().start()) + { + end + } else if let Some(end) = + transformed_ty.semicolon_token().map(|tok| tok.text_range().start()) + { + end + } else { + return; + }; let len = end - start; let mut decl = transformed_ty.syntax().text().slice(..len).to_string(); @@ -279,7 +290,7 @@ fn add_type_alias_impl( match ctx.config.snippet_cap { Some(cap) => { - let snippet = format!("{}$0;", decl); + let snippet = format!("{decl}$0;"); item.snippet_edit(cap, TextEdit::replace(replacement_range, snippet)); } None => { @@ -310,10 +321,10 @@ fn add_const_impl( }; let label = make_const_compl_syntax(&transformed_const, source.file_id.is_macro()); - let replacement = format!("{} ", label); + let replacement = format!("{label} "); let mut item = CompletionItem::new(SymbolKind::Const, replacement_range, label); - item.lookup_by(format!("const {}", const_name)) + item.lookup_by(format!("const {const_name}")) .set_documentation(const_.docs(ctx.db)) .set_relevance(CompletionRelevance { is_item_from_trait: true, @@ -322,7 +333,7 @@ fn add_const_impl( match ctx.config.snippet_cap { Some(cap) => item.snippet_edit( cap, - TextEdit::replace(replacement_range, format!("{}$0;", replacement)), + TextEdit::replace(replacement_range, format!("{replacement}$0;")), ), None => item.text_edit(TextEdit::replace(replacement_range, replacement)), }; @@ -834,11 +845,10 @@ trait Test {{ struct T; impl Test for T {{ - {} - {} + {hint} + {next_sibling} }} -"#, - hint, next_sibling +"# ), &format!( r#" @@ -850,11 +860,10 @@ trait Test {{ struct T; impl Test for T {{ - {} - {} + {completed} + {next_sibling} }} -"#, - completed, next_sibling +"# ), ) }; @@ -894,10 +903,9 @@ struct T; impl Foo for T {{ // Comment #[bar] - {} + {hint} }} -"#, - hint +"# ), &format!( r#" @@ -911,10 +919,9 @@ struct T; impl Foo for T {{ // Comment #[bar] - {} + {completed} }} -"#, - completed +"# ), ) }; diff --git a/crates/ide-completion/src/completions/postfix.rs b/crates/ide-completion/src/completions/postfix.rs index b9bd47f7da..f4f37d77d8 100644 --- a/crates/ide-completion/src/completions/postfix.rs +++ b/crates/ide-completion/src/completions/postfix.rs @@ -5,7 +5,7 @@ mod format_like; use hir::{Documentation, HasAttrs}; use ide_db::{imports::insert_use::ImportScope, ty_filter::TryEnum, SnippetCap}; use syntax::{ - ast::{self, AstNode, AstToken}, + ast::{self, make, AstNode, AstToken}, SyntaxKind::{EXPR_STMT, STMT_LIST}, TextRange, TextSize, }; @@ -61,7 +61,7 @@ pub(crate) fn complete_postfix( let mut item = postfix_snippet( "drop", "fn drop(&mut self)", - &format!("drop($0{})", receiver_text), + &format!("drop($0{receiver_text})"), ); item.set_documentation(drop_fn.docs(ctx.db)); item.add_to(acc); @@ -76,14 +76,14 @@ pub(crate) fn complete_postfix( postfix_snippet( "ifl", "if let Ok {}", - &format!("if let Ok($1) = {} {{\n $0\n}}", receiver_text), + &format!("if let Ok($1) = {receiver_text} {{\n $0\n}}"), ) .add_to(acc); postfix_snippet( "while", "while let Ok {}", - &format!("while let Ok($1) = {} {{\n $0\n}}", receiver_text), + &format!("while let Ok($1) = {receiver_text} {{\n $0\n}}"), ) .add_to(acc); } @@ -91,46 +91,44 @@ pub(crate) fn complete_postfix( postfix_snippet( "ifl", "if let Some {}", - &format!("if let Some($1) = {} {{\n $0\n}}", receiver_text), + &format!("if let Some($1) = {receiver_text} {{\n $0\n}}"), ) .add_to(acc); postfix_snippet( "while", "while let Some {}", - &format!("while let Some($1) = {} {{\n $0\n}}", receiver_text), + &format!("while let Some($1) = {receiver_text} {{\n $0\n}}"), ) .add_to(acc); } } } else if receiver_ty.is_bool() || receiver_ty.is_unknown() { - postfix_snippet("if", "if expr {}", &format!("if {} {{\n $0\n}}", receiver_text)) + postfix_snippet("if", "if expr {}", &format!("if {receiver_text} {{\n $0\n}}")) .add_to(acc); - postfix_snippet( - "while", - "while expr {}", - &format!("while {} {{\n $0\n}}", receiver_text), - ) - .add_to(acc); - postfix_snippet("not", "!expr", &format!("!{}", receiver_text)).add_to(acc); + postfix_snippet("while", "while expr {}", &format!("while {receiver_text} {{\n $0\n}}")) + .add_to(acc); + postfix_snippet("not", "!expr", &format!("!{receiver_text}")).add_to(acc); } else if let Some(trait_) = ctx.famous_defs().core_iter_IntoIterator() { if receiver_ty.impls_trait(ctx.db, trait_, &[]) { postfix_snippet( "for", "for ele in expr {}", - &format!("for ele in {} {{\n $0\n}}", receiver_text), + &format!("for ele in {receiver_text} {{\n $0\n}}"), ) .add_to(acc); } } - postfix_snippet("ref", "&expr", &format!("&{}", receiver_text)).add_to(acc); - postfix_snippet("refm", "&mut expr", &format!("&mut {}", receiver_text)).add_to(acc); + postfix_snippet("ref", "&expr", &format!("&{receiver_text}")).add_to(acc); + postfix_snippet("refm", "&mut expr", &format!("&mut {receiver_text}")).add_to(acc); // The rest of the postfix completions create an expression that moves an argument, // so it's better to consider references now to avoid breaking the compilation - let dot_receiver = include_references(dot_receiver); - let receiver_text = get_receiver_text(&dot_receiver, receiver_is_ambiguous_float_literal); + + let (dot_receiver, node_to_replace_with) = include_references(dot_receiver); + let receiver_text = + get_receiver_text(&node_to_replace_with, receiver_is_ambiguous_float_literal); let postfix_snippet = match build_postfix_snippet_builder(ctx, cap, &dot_receiver) { Some(it) => it, None => return, @@ -146,7 +144,7 @@ pub(crate) fn complete_postfix( postfix_snippet( "match", "match expr {}", - &format!("match {} {{\n Ok(${{1:_}}) => {{$2}},\n Err(${{3:_}}) => {{$0}},\n}}", receiver_text), + &format!("match {receiver_text} {{\n Ok(${{1:_}}) => {{$2}},\n Err(${{3:_}}) => {{$0}},\n}}"), ) .add_to(acc); } @@ -155,8 +153,7 @@ pub(crate) fn complete_postfix( "match", "match expr {}", &format!( - "match {} {{\n Some(${{1:_}}) => {{$2}},\n None => {{$0}},\n}}", - receiver_text + "match {receiver_text} {{\n Some(${{1:_}}) => {{$2}},\n None => {{$0}},\n}}" ), ) .add_to(acc); @@ -166,21 +163,21 @@ pub(crate) fn complete_postfix( postfix_snippet( "match", "match expr {}", - &format!("match {} {{\n ${{1:_}} => {{$0}},\n}}", receiver_text), + &format!("match {receiver_text} {{\n ${{1:_}} => {{$0}},\n}}"), ) .add_to(acc); } } - postfix_snippet("box", "Box::new(expr)", &format!("Box::new({})", receiver_text)).add_to(acc); - postfix_snippet("dbg", "dbg!(expr)", &format!("dbg!({})", receiver_text)).add_to(acc); // fixme - postfix_snippet("dbgr", "dbg!(&expr)", &format!("dbg!(&{})", receiver_text)).add_to(acc); - postfix_snippet("call", "function(expr)", &format!("${{1}}({})", receiver_text)).add_to(acc); + postfix_snippet("box", "Box::new(expr)", &format!("Box::new({receiver_text})")).add_to(acc); + postfix_snippet("dbg", "dbg!(expr)", &format!("dbg!({receiver_text})")).add_to(acc); // fixme + postfix_snippet("dbgr", "dbg!(&expr)", &format!("dbg!(&{receiver_text})")).add_to(acc); + postfix_snippet("call", "function(expr)", &format!("${{1}}({receiver_text})")).add_to(acc); if let Some(parent) = dot_receiver.syntax().parent().and_then(|p| p.parent()) { if matches!(parent.kind(), STMT_LIST | EXPR_STMT) { - postfix_snippet("let", "let", &format!("let $0 = {};", receiver_text)).add_to(acc); - postfix_snippet("letm", "let mut", &format!("let mut $0 = {};", receiver_text)) + postfix_snippet("let", "let", &format!("let $0 = {receiver_text};")).add_to(acc); + postfix_snippet("letm", "let mut", &format!("let mut $0 = {receiver_text};")) .add_to(acc); } } @@ -210,14 +207,35 @@ fn get_receiver_text(receiver: &ast::Expr, receiver_is_ambiguous_float_literal: text.replace('\\', "\\\\").replace('$', "\\$") } -fn include_references(initial_element: &ast::Expr) -> ast::Expr { +fn include_references(initial_element: &ast::Expr) -> (ast::Expr, ast::Expr) { let mut resulting_element = initial_element.clone(); - while let Some(parent_ref_element) = - resulting_element.syntax().parent().and_then(ast::RefExpr::cast) + + while let Some(field_expr) = resulting_element.syntax().parent().and_then(ast::FieldExpr::cast) { - resulting_element = ast::Expr::from(parent_ref_element); + resulting_element = ast::Expr::from(field_expr); } - resulting_element + + let mut new_element_opt = initial_element.clone(); + + if let Some(first_ref_expr) = resulting_element.syntax().parent().and_then(ast::RefExpr::cast) { + if let Some(expr) = first_ref_expr.expr() { + resulting_element = expr; + } + + while let Some(parent_ref_element) = + resulting_element.syntax().parent().and_then(ast::RefExpr::cast) + { + resulting_element = ast::Expr::from(parent_ref_element); + + new_element_opt = make::expr_ref(new_element_opt, false); + } + } else { + // If we do not find any ref expressions, restore + // all the progress of tree climbing + resulting_element = initial_element.clone(); + } + + (resulting_element, new_element_opt) } fn build_postfix_snippet_builder<'ctx>( @@ -225,8 +243,7 @@ fn build_postfix_snippet_builder<'ctx>( cap: SnippetCap, receiver: &'ctx ast::Expr, ) -> Option Builder + 'ctx> { - let receiver_syntax = receiver.syntax(); - let receiver_range = ctx.sema.original_range_opt(receiver_syntax)?.range; + let receiver_range = ctx.sema.original_range_opt(receiver.syntax())?.range; if ctx.source_range().end() < receiver_range.start() { // This shouldn't happen, yet it does. I assume this might be due to an incorrect token mapping. return None; @@ -278,7 +295,7 @@ fn add_custom_postfix_completions( let body = snippet.postfix_snippet(receiver_text); let mut builder = postfix_snippet(trigger, snippet.description.as_deref().unwrap_or_default(), &body); - builder.documentation(Documentation::new(format!("```rust\n{}\n```", body))); + builder.documentation(Documentation::new(format!("```rust\n{body}\n```"))); for import in imports.into_iter() { builder.add_import(import); } @@ -549,7 +566,7 @@ fn main() { ControlFlow::Break('\\\\') } ); check_edit_with_config( - config.clone(), + config, "break", r#" //- minicore: try @@ -578,12 +595,12 @@ fn main() { check_edit( "format", r#"fn main() { "{some_var:?}".$0 }"#, - r#"fn main() { format!("{:?}", some_var) }"#, + r#"fn main() { format!("{some_var:?}") }"#, ); check_edit( "panic", r#"fn main() { "Panic with {a}".$0 }"#, - r#"fn main() { panic!("Panic with {}", a) }"#, + r#"fn main() { panic!("Panic with {a}") }"#, ); check_edit( "println", @@ -616,22 +633,55 @@ fn main() { #[test] fn postfix_custom_snippets_completion_for_references() { + // https://github.com/rust-lang/rust-analyzer/issues/7929 + + let snippet = Snippet::new( + &[], + &["ok".into()], + &["Ok(${receiver})".into()], + "", + &[], + crate::SnippetScope::Expr, + ) + .unwrap(); + check_edit_with_config( - CompletionConfig { - snippets: vec![Snippet::new( - &[], - &["ok".into()], - &["Ok(${receiver})".into()], - "", - &[], - crate::SnippetScope::Expr, - ) - .unwrap()], - ..TEST_CONFIG - }, + CompletionConfig { snippets: vec![snippet.clone()], ..TEST_CONFIG }, + "ok", + r#"fn main() { &&42.o$0 }"#, + r#"fn main() { Ok(&&42) }"#, + ); + + check_edit_with_config( + CompletionConfig { snippets: vec![snippet.clone()], ..TEST_CONFIG }, "ok", r#"fn main() { &&42.$0 }"#, r#"fn main() { Ok(&&42) }"#, ); + + check_edit_with_config( + CompletionConfig { snippets: vec![snippet], ..TEST_CONFIG }, + "ok", + r#" +struct A { + a: i32, +} + +fn main() { + let a = A {a :1}; + &a.a.$0 +} + "#, + r#" +struct A { + a: i32, +} + +fn main() { + let a = A {a :1}; + Ok(&a.a) +} + "#, + ); } } diff --git a/crates/ide-completion/src/completions/postfix/format_like.rs b/crates/ide-completion/src/completions/postfix/format_like.rs index b43bdb9ab9..dfcc78e923 100644 --- a/crates/ide-completion/src/completions/postfix/format_like.rs +++ b/crates/ide-completion/src/completions/postfix/format_like.rs @@ -54,7 +54,11 @@ pub(crate) fn add_format_like_completions( if let Ok((out, exprs)) = parse_format_exprs(receiver_text.text()) { let exprs = with_placeholders(exprs); for (label, macro_name) in KINDS { - let snippet = format!(r#"{}({}, {})"#, macro_name, out, exprs.join(", ")); + let snippet = if exprs.is_empty() { + format!(r#"{}({})"#, macro_name, out) + } else { + format!(r#"{}({}, {})"#, macro_name, out, exprs.join(", ")) + }; postfix_snippet(label, macro_name, &snippet).add_to(acc); } @@ -72,16 +76,29 @@ mod tests { ("eprintln!", "{}", r#"eprintln!("{}", $1)"#), ( "log::info!", - "{} {expr} {} {2 + 2}", - r#"log::info!("{} {} {} {}", $1, expr, $2, 2 + 2)"#, + "{} {ident} {} {2 + 2}", + r#"log::info!("{} {ident} {} {}", $1, $2, 2 + 2)"#, ), - ("format!", "{expr:?}", r#"format!("{:?}", expr)"#), ]; for (kind, input, output) in test_vector { let (parsed_string, exprs) = parse_format_exprs(input).unwrap(); let exprs = with_placeholders(exprs); - let snippet = format!(r#"{}("{}", {})"#, kind, parsed_string, exprs.join(", ")); + let snippet = format!(r#"{kind}("{parsed_string}", {})"#, exprs.join(", ")); + assert_eq!(&snippet, output); + } + } + + #[test] + fn test_into_suggestion_no_epxrs() { + let test_vector = &[ + ("println!", "{ident}", r#"println!("{ident}")"#), + ("format!", "{ident:?}", r#"format!("{ident:?}")"#), + ]; + + for (kind, input, output) in test_vector { + let (parsed_string, _exprs) = parse_format_exprs(input).unwrap(); + let snippet = format!(r#"{}("{}")"#, kind, parsed_string); assert_eq!(&snippet, output); } } diff --git a/crates/ide-completion/src/completions/record.rs b/crates/ide-completion/src/completions/record.rs index 5d96fbd30a..0521e735de 100644 --- a/crates/ide-completion/src/completions/record.rs +++ b/crates/ide-completion/src/completions/record.rs @@ -124,7 +124,12 @@ fn complete_fields( #[cfg(test)] mod tests { - use crate::tests::check_edit; + use ide_db::SnippetCap; + + use crate::{ + tests::{check_edit, check_edit_with_config, TEST_CONFIG}, + CompletionConfig, + }; #[test] fn literal_struct_completion_edit() { @@ -151,6 +156,66 @@ fn baz() { ) } + #[test] + fn enum_variant_no_snippets() { + let conf = CompletionConfig { snippet_cap: SnippetCap::new(false), ..TEST_CONFIG }; + // tuple variant + check_edit_with_config( + conf.clone(), + "Variant()", + r#" +enum Enum { + Variant(usize), +} + +impl Enum { + fn new(u: usize) -> Self { + Self::Va$0 + } +} +"#, + r#" +enum Enum { + Variant(usize), +} + +impl Enum { + fn new(u: usize) -> Self { + Self::Variant + } +} +"#, + ); + + // record variant + check_edit_with_config( + conf, + "Variant{}", + r#" +enum Enum { + Variant{u: usize}, +} + +impl Enum { + fn new(u: usize) -> Self { + Self::Va$0 + } +} +"#, + r#" +enum Enum { + Variant{u: usize}, +} + +impl Enum { + fn new(u: usize) -> Self { + Self::Variant + } +} +"#, + ) + } + #[test] fn literal_struct_impl_self_completion() { check_edit( diff --git a/crates/ide-completion/src/completions/snippet.rs b/crates/ide-completion/src/completions/snippet.rs index 66adb42863..da1f0542d2 100644 --- a/crates/ide-completion/src/completions/snippet.rs +++ b/crates/ide-completion/src/completions/snippet.rs @@ -141,7 +141,7 @@ fn add_custom_completions( }; let body = snip.snippet(); let mut builder = snippet(ctx, cap, trigger, &body); - builder.documentation(Documentation::new(format!("```rust\n{}\n```", body))); + builder.documentation(Documentation::new(format!("```rust\n{body}\n```"))); for import in imports.into_iter() { builder.add_import(import); } diff --git a/crates/ide-completion/src/completions/type.rs b/crates/ide-completion/src/completions/type.rs index 8f9db2f94c..37849c251a 100644 --- a/crates/ide-completion/src/completions/type.rs +++ b/crates/ide-completion/src/completions/type.rs @@ -58,7 +58,7 @@ pub(crate) fn complete_type_path( trait_.items(ctx.sema.db).into_iter().for_each(|item| add_assoc_item(acc, item)) } Qualified::TypeAnchor { ty: Some(ty), trait_: None } => { - ctx.iterate_path_candidates(&ty, |item| { + ctx.iterate_path_candidates(ty, |item| { add_assoc_item(acc, item); }); diff --git a/crates/ide-completion/src/context.rs b/crates/ide-completion/src/context.rs index 9850813a0c..aa77f44953 100644 --- a/crates/ide-completion/src/context.rs +++ b/crates/ide-completion/src/context.rs @@ -19,7 +19,7 @@ use syntax::{ ast::{self, AttrKind, NameOrNameRef}, AstNode, SyntaxKind::{self, *}, - SyntaxToken, TextRange, TextSize, + SyntaxToken, TextRange, TextSize, T, }; use text_edit::Indel; @@ -569,6 +569,32 @@ impl<'a> CompletionContext<'a> { // completing on let original_token = original_file.syntax().token_at_offset(offset).left_biased()?; + // try to skip completions on path with invalid colons + // this approach works in normal path and inside token tree + match original_token.kind() { + T![:] => { + // return if no prev token before colon + let prev_token = original_token.prev_token()?; + + // only has a single colon + if prev_token.kind() != T![:] { + return None; + } + + // has 3 colon or 2 coloncolon in a row + // special casing this as per discussion in https://github.com/rust-lang/rust-analyzer/pull/13611#discussion_r1031845205 + // and https://github.com/rust-lang/rust-analyzer/pull/13611#discussion_r1032812751 + if prev_token + .prev_token() + .map(|t| t.kind() == T![:] || t.kind() == T![::]) + .unwrap_or(false) + { + return None; + } + } + _ => {} + } + let AnalysisResult { analysis, expected: (expected_type, expected_name), diff --git a/crates/ide-completion/src/context/analysis.rs b/crates/ide-completion/src/context/analysis.rs index c142a7305f..e34824e22e 100644 --- a/crates/ide-completion/src/context/analysis.rs +++ b/crates/ide-completion/src/context/analysis.rs @@ -226,7 +226,7 @@ fn analyze( find_node_at_offset(&file_with_fake_ident, offset) { let parent = name_ref.syntax().parent()?; - let (mut nameref_ctx, _) = classify_name_ref(&sema, &original_file, name_ref, parent)?; + let (mut nameref_ctx, _) = classify_name_ref(sema, &original_file, name_ref, parent)?; if let NameRefKind::Path(path_ctx) = &mut nameref_ctx.kind { path_ctx.kind = PathKind::Derive { existing_derives: sema @@ -277,7 +277,7 @@ fn analyze( return Some((analysis, (None, None), QualifierCtx::default())); } }; - let expected = expected_type_and_name(sema, &self_token, &name_like); + let expected = expected_type_and_name(sema, self_token, &name_like); let mut qual_ctx = QualifierCtx::default(); let analysis = match name_like { ast::NameLike::Lifetime(lifetime) => { @@ -286,7 +286,7 @@ fn analyze( ast::NameLike::NameRef(name_ref) => { let parent = name_ref.syntax().parent()?; let (nameref_ctx, qualifier_ctx) = - classify_name_ref(sema, &original_file, name_ref, parent.clone())?; + classify_name_ref(sema, &original_file, name_ref, parent)?; qual_ctx = qualifier_ctx; CompletionAnalysis::NameRef(nameref_ctx) } @@ -374,7 +374,7 @@ fn expected_type_and_name( ast::ArgList(_) => { cov_mark::hit!(expected_type_fn_param); ActiveParameter::at_token( - &sema, + sema, token.clone(), ).map(|ap| { let name = ap.ident().map(NameOrNameRef::Name); @@ -507,7 +507,7 @@ fn classify_lifetime( _ => LifetimeKind::Lifetime, } }; - let lifetime = find_node_at_offset(&original_file, lifetime.syntax().text_range().start()); + let lifetime = find_node_at_offset(original_file, lifetime.syntax().text_range().start()); Some(LifetimeContext { lifetime, kind }) } @@ -548,7 +548,7 @@ fn classify_name( _ => return None, } }; - let name = find_node_at_offset(&original_file, name.syntax().text_range().start()); + let name = find_node_at_offset(original_file, name.syntax().text_range().start()); Some(NameContext { name, kind }) } @@ -558,7 +558,7 @@ fn classify_name_ref( name_ref: ast::NameRef, parent: SyntaxNode, ) -> Option<(NameRefContext, QualifierCtx)> { - let nameref = find_node_at_offset(&original_file, name_ref.syntax().text_range().start()); + let nameref = find_node_at_offset(original_file, name_ref.syntax().text_range().start()); let make_res = |kind| (NameRefContext { nameref: nameref.clone(), kind }, Default::default()); @@ -585,11 +585,7 @@ fn classify_name_ref( original_file, &record_field.parent_record_pat(), ), - ..pattern_context_for( - sema, - original_file, - record_field.parent_record_pat().clone().into(), - ) + ..pattern_context_for(sema, original_file, record_field.parent_record_pat().into()) }); return Some(make_res(kind)); } diff --git a/crates/ide-completion/src/context/tests.rs b/crates/ide-completion/src/context/tests.rs index 50845b3881..a654a5db57 100644 --- a/crates/ide-completion/src/context/tests.rs +++ b/crates/ide-completion/src/context/tests.rs @@ -19,7 +19,7 @@ fn check_expected_type_and_name(ra_fixture: &str, expect: Expect) { let name = completion_context.expected_name.map_or_else(|| "?".to_owned(), |name| name.to_string()); - expect.assert_eq(&format!("ty: {}, name: {}", ty, name)); + expect.assert_eq(&format!("ty: {ty}, name: {name}")); } #[test] diff --git a/crates/ide-completion/src/item.rs b/crates/ide-completion/src/item.rs index 27c3ccb35a..657eab5b1b 100644 --- a/crates/ide-completion/src/item.rs +++ b/crates/ide-completion/src/item.rs @@ -453,10 +453,10 @@ impl Builder { // snippets can have multiple imports, but normal completions only have up to one if let Some(original_path) = import_edit.original_path.as_ref() { lookup = lookup.or_else(|| Some(label.clone())); - label = SmolStr::from(format!("{} (use {})", label, original_path)); + label = SmolStr::from(format!("{label} (use {original_path})")); } } else if let Some(trait_name) = self.trait_name { - label = SmolStr::from(format!("{} (as {})", label, trait_name)); + label = SmolStr::from(format!("{label} (as {trait_name})")); } let text_edit = match self.text_edit { diff --git a/crates/ide-completion/src/lib.rs b/crates/ide-completion/src/lib.rs index 9d0044e55f..4b48ec6bc3 100644 --- a/crates/ide-completion/src/lib.rs +++ b/crates/ide-completion/src/lib.rs @@ -164,7 +164,6 @@ pub fn completions( completions::vis::complete_vis_path(&mut completions, ctx, path_ctx, has_in_token); } } - // prevent `(` from triggering unwanted completion noise return Some(completions.into()); } diff --git a/crates/ide-completion/src/render.rs b/crates/ide-completion/src/render.rs index 86302cb067..e48d1aecd0 100644 --- a/crates/ide-completion/src/render.rs +++ b/crates/ide-completion/src/render.rs @@ -131,7 +131,7 @@ pub(crate) fn render_field( item.detail(ty.display(ctx.db()).to_string()) .set_documentation(field.docs(ctx.db())) .set_deprecated(is_deprecated) - .lookup_by(name.clone()); + .lookup_by(name); item.insert_text(field_with_receiver(receiver.as_ref(), &escaped_name)); if let Some(receiver) = &dot_access.receiver { if let Some(original) = ctx.completion.sema.original_ast_node(receiver.clone()) { @@ -144,8 +144,7 @@ pub(crate) fn render_field( } fn field_with_receiver(receiver: Option<&hir::Name>, field_name: &str) -> SmolStr { - receiver - .map_or_else(|| field_name.into(), |receiver| format!("{}.{}", receiver, field_name).into()) + receiver.map_or_else(|| field_name.into(), |receiver| format!("{receiver}.{field_name}").into()) } pub(crate) fn render_tuple_field( @@ -306,7 +305,7 @@ fn render_resolution_path( item.lookup_by(name.clone()) .label(SmolStr::from_iter([&name, "<…>"])) .trigger_call_info() - .insert_snippet(cap, format!("{}<$0>", local_name)); + .insert_snippet(cap, format!("{local_name}<$0>")); } } } @@ -528,13 +527,13 @@ mod tests { let tag = it.kind().tag(); let relevance = display_relevance(it.relevance()); - items.push(format!("{} {} {}\n", tag, it.label(), relevance)); + items.push(format!("{tag} {} {relevance}\n", it.label())); if let Some((mutability, _offset, relevance)) = it.ref_match() { let label = format!("&{}{}", mutability.as_keyword_for_ref(), it.label()); let relevance = display_relevance(relevance); - items.push(format!("{} {} {}\n", tag, label, relevance)); + items.push(format!("{tag} {label} {relevance}\n")); } items @@ -563,7 +562,7 @@ mod tests { .filter_map(|(cond, desc)| if cond { Some(desc) } else { None }) .join("+"); - format!("[{}]", relevance_factors) + format!("[{relevance_factors}]") } } diff --git a/crates/ide-completion/src/render/const_.rs b/crates/ide-completion/src/render/const_.rs index 93ea825e00..70b19988ca 100644 --- a/crates/ide-completion/src/render/const_.rs +++ b/crates/ide-completion/src/render/const_.rs @@ -16,7 +16,7 @@ fn render(ctx: RenderContext<'_>, const_: hir::Const) -> Option let (name, escaped_name) = (name.unescaped().to_smol_str(), name.to_smol_str()); let detail = const_.display(db).to_string(); - let mut item = CompletionItem::new(SymbolKind::Const, ctx.source_range(), name.clone()); + let mut item = CompletionItem::new(SymbolKind::Const, ctx.source_range(), name); item.set_documentation(ctx.docs(const_)) .set_deprecated(ctx.is_deprecated(const_) || ctx.is_deprecated_assoc_item(const_)) .detail(detail) diff --git a/crates/ide-completion/src/render/function.rs b/crates/ide-completion/src/render/function.rs index 3761208460..197592e78c 100644 --- a/crates/ide-completion/src/render/function.rs +++ b/crates/ide-completion/src/render/function.rs @@ -53,7 +53,7 @@ fn render( let (call, escaped_call) = match &func_kind { FuncKind::Method(_, Some(receiver)) => ( format!("{}.{}", receiver.unescaped(), name.unescaped()).into(), - format!("{}.{}", receiver, name).into(), + format!("{receiver}.{name}").into(), ), _ => (name.unescaped().to_smol_str(), name.to_smol_str()), }; @@ -162,7 +162,7 @@ pub(super) fn add_call_parens<'b>( cov_mark::hit!(inserts_parens_for_function_calls); let (snippet, label_suffix) = if self_param.is_none() && params.is_empty() { - (format!("{}()$0", escaped_name), "()") + (format!("{escaped_name}()$0"), "()") } else { builder.trigger_call_info(); let snippet = if let Some(CallableSnippets::FillArguments) = ctx.config.callable { @@ -174,7 +174,7 @@ pub(super) fn add_call_parens<'b>( let smol_str = n.to_smol_str(); let text = smol_str.as_str().trim_start_matches('_'); let ref_ = ref_of_param(ctx, text, param.ty()); - f(&format_args!("${{{}:{}{}}}", index + offset, ref_, text)) + f(&format_args!("${{{}:{ref_}{text}}}", index + offset)) } None => { let name = match param.ty().as_adt() { @@ -185,7 +185,7 @@ pub(super) fn add_call_parens<'b>( .map(|s| to_lower_snake_case(s.as_str())) .unwrap_or_else(|| "_".to_string()), }; - f(&format_args!("${{{}:{}}}", index + offset, name)) + f(&format_args!("${{{}:{name}}}", index + offset)) } } }); @@ -200,12 +200,12 @@ pub(super) fn add_call_parens<'b>( ) } None => { - format!("{}({})$0", escaped_name, function_params_snippet) + format!("{escaped_name}({function_params_snippet})$0") } } } else { cov_mark::hit!(suppress_arg_snippets); - format!("{}($0)", escaped_name) + format!("{escaped_name}($0)") }; (snippet, "(…)") diff --git a/crates/ide-completion/src/render/literal.rs b/crates/ide-completion/src/render/literal.rs index 0c791ac570..64dab02f7c 100644 --- a/crates/ide-completion/src/render/literal.rs +++ b/crates/ide-completion/src/render/literal.rs @@ -84,7 +84,7 @@ fn render( } _ => RenderedLiteral { literal: escaped_qualified_name.clone(), - detail: escaped_qualified_name.clone(), + detail: escaped_qualified_name, }, }; @@ -96,7 +96,7 @@ fn render( if !should_add_parens { kind = StructKind::Unit; } - let label = format_literal_label(&qualified_name, kind); + let label = format_literal_label(&qualified_name, kind, snippet_cap); let lookup = if qualified { format_literal_lookup(&short_qualified_name.to_string(), kind) } else { diff --git a/crates/ide-completion/src/render/macro_.rs b/crates/ide-completion/src/render/macro_.rs index eabd0bd17d..ffcad1185a 100644 --- a/crates/ide-completion/src/render/macro_.rs +++ b/crates/ide-completion/src/render/macro_.rs @@ -66,7 +66,7 @@ fn render( match ctx.snippet_cap() { Some(cap) if needs_bang && !has_call_parens => { - let snippet = format!("{}!{}$0{}", escaped_name, bra, ket); + let snippet = format!("{escaped_name}!{bra}$0{ket}"); let lookup = banged_name(&name); item.insert_snippet(cap, snippet).lookup_by(lookup); } diff --git a/crates/ide-completion/src/render/pattern.rs b/crates/ide-completion/src/render/pattern.rs index c845ff21aa..21b4bc2174 100644 --- a/crates/ide-completion/src/render/pattern.rs +++ b/crates/ide-completion/src/render/pattern.rs @@ -33,7 +33,7 @@ pub(crate) fn render_struct_pat( let name = local_name.unwrap_or_else(|| strukt.name(ctx.db())); let (name, escaped_name) = (name.unescaped().to_smol_str(), name.to_smol_str()); let kind = strukt.kind(ctx.db()); - let label = format_literal_label(name.as_str(), kind); + let label = format_literal_label(name.as_str(), kind, ctx.snippet_cap()); let lookup = format_literal_lookup(name.as_str(), kind); let pat = render_pat(&ctx, pattern_ctx, &escaped_name, kind, &visible_fields, fields_omitted)?; @@ -67,7 +67,7 @@ pub(crate) fn render_variant_pat( } _ => { let kind = variant.kind(ctx.db()); - let label = format_literal_label(name.as_str(), kind); + let label = format_literal_label(name.as_str(), kind, ctx.snippet_cap()); let lookup = format_literal_lookup(name.as_str(), kind); let pat = render_pat( &ctx, diff --git a/crates/ide-completion/src/render/type_alias.rs b/crates/ide-completion/src/render/type_alias.rs index de919429f2..fbe120d2ac 100644 --- a/crates/ide-completion/src/render/type_alias.rs +++ b/crates/ide-completion/src/render/type_alias.rs @@ -40,7 +40,7 @@ fn render( }; let detail = type_alias.display(db).to_string(); - let mut item = CompletionItem::new(SymbolKind::TypeAlias, ctx.source_range(), name.clone()); + let mut item = CompletionItem::new(SymbolKind::TypeAlias, ctx.source_range(), name); item.set_documentation(ctx.docs(type_alias)) .set_deprecated(ctx.is_deprecated(type_alias) || ctx.is_deprecated_assoc_item(type_alias)) .detail(detail) diff --git a/crates/ide-completion/src/render/union_literal.rs b/crates/ide-completion/src/render/union_literal.rs index 54e97dd57b..1b09ad1731 100644 --- a/crates/ide-completion/src/render/union_literal.rs +++ b/crates/ide-completion/src/render/union_literal.rs @@ -24,7 +24,7 @@ pub(crate) fn render_union_literal( Some(p) => (p.unescaped().to_string(), p.to_string()), None => (name.unescaped().to_string(), name.to_string()), }; - let label = format_literal_label(&name.to_smol_str(), StructKind::Record); + let label = format_literal_label(&name.to_smol_str(), StructKind::Record, ctx.snippet_cap()); let lookup = format_literal_lookup(&name.to_smol_str(), StructKind::Record); let mut item = CompletionItem::new( CompletionItemKind::SymbolKind(SymbolKind::Union), @@ -68,7 +68,7 @@ pub(crate) fn render_union_literal( item.set_documentation(ctx.docs(un)) .set_deprecated(ctx.is_deprecated(un)) - .detail(&detail) + .detail(detail) .set_relevance(ctx.completion_relevance()); match ctx.snippet_cap() { diff --git a/crates/ide-completion/src/render/variant.rs b/crates/ide-completion/src/render/variant.rs index 24e6abdc9a..55c55725be 100644 --- a/crates/ide-completion/src/render/variant.rs +++ b/crates/ide-completion/src/render/variant.rs @@ -22,6 +22,9 @@ pub(crate) fn render_record_lit( fields: &[hir::Field], path: &str, ) -> RenderedLiteral { + if snippet_cap.is_none() { + return RenderedLiteral { literal: path.to_string(), detail: path.to_string() }; + } let completions = fields.iter().enumerate().format_with(", ", |(idx, field), f| { if snippet_cap.is_some() { f(&format_args!("{}: ${{{}:()}}", field.name(db), idx + 1)) @@ -35,8 +38,8 @@ pub(crate) fn render_record_lit( }); RenderedLiteral { - literal: format!("{} {{ {} }}", path, completions), - detail: format!("{} {{ {} }}", path, types), + literal: format!("{path} {{ {completions} }}"), + detail: format!("{path} {{ {types} }}"), } } @@ -48,6 +51,9 @@ pub(crate) fn render_tuple_lit( fields: &[hir::Field], path: &str, ) -> RenderedLiteral { + if snippet_cap.is_none() { + return RenderedLiteral { literal: path.to_string(), detail: path.to_string() }; + } let completions = fields.iter().enumerate().format_with(", ", |(idx, _), f| { if snippet_cap.is_some() { f(&format_args!("${{{}:()}}", idx + 1)) @@ -59,8 +65,8 @@ pub(crate) fn render_tuple_lit( let types = fields.iter().format_with(", ", |field, f| f(&field.ty(db).display(db))); RenderedLiteral { - literal: format!("{}({})", path, completions), - detail: format!("{}({})", path, types), + literal: format!("{path}({completions})"), + detail: format!("{path}({types})"), } } @@ -87,7 +93,14 @@ pub(crate) fn visible_fields( } /// Format a struct, etc. literal option for display in the completions menu. -pub(crate) fn format_literal_label(name: &str, kind: StructKind) -> SmolStr { +pub(crate) fn format_literal_label( + name: &str, + kind: StructKind, + snippet_cap: Option, +) -> SmolStr { + if snippet_cap.is_none() { + return name.into(); + } match kind { StructKind::Tuple => SmolStr::from_iter([name, "(…)"]), StructKind::Record => SmolStr::from_iter([name, " {…}"]), diff --git a/crates/ide-completion/src/snippet.rs b/crates/ide-completion/src/snippet.rs index f3b8eae4fe..343719c536 100644 --- a/crates/ide-completion/src/snippet.rs +++ b/crates/ide-completion/src/snippet.rs @@ -199,7 +199,7 @@ fn validate_snippet( ) -> Option<(Box<[GreenNode]>, String, Option>)> { let mut imports = Vec::with_capacity(requires.len()); for path in requires.iter() { - let use_path = ast::SourceFile::parse(&format!("use {};", path)) + let use_path = ast::SourceFile::parse(&format!("use {path};")) .syntax_node() .descendants() .find_map(ast::Path::cast)?; diff --git a/crates/ide-completion/src/tests.rs b/crates/ide-completion/src/tests.rs index 9e2beb9ee3..abe14e48e2 100644 --- a/crates/ide-completion/src/tests.rs +++ b/crates/ide-completion/src/tests.rs @@ -86,7 +86,7 @@ pub(crate) fn completion_list_no_kw(ra_fixture: &str) -> String { } pub(crate) fn completion_list_no_kw_with_private_editable(ra_fixture: &str) -> String { - let mut config = TEST_CONFIG.clone(); + let mut config = TEST_CONFIG; config.enable_private_editable = true; completion_list_with_config(config, ra_fixture, false, None) } @@ -153,7 +153,7 @@ fn render_completion_list(completions: Vec) -> String { .into_iter() .map(|it| { let tag = it.kind().tag(); - let var_name = format!("{} {}", tag, it.label()); + let var_name = format!("{tag} {}", it.label()); let mut buf = var_name; if let Some(detail) = it.detail() { let width = label_width.saturating_sub(monospace_width(it.label())); @@ -183,12 +183,12 @@ pub(crate) fn check_edit_with_config( let ra_fixture_after = trim_indent(ra_fixture_after); let (db, position) = position(ra_fixture_before); let completions: Vec = - crate::completions(&db, &config, position, None).unwrap().into(); + crate::completions(&db, &config, position, None).unwrap(); let (completion,) = completions .iter() .filter(|it| it.lookup() == what) .collect_tuple() - .unwrap_or_else(|| panic!("can't find {:?} completion in {:#?}", what, completions)); + .unwrap_or_else(|| panic!("can't find {what:?} completion in {completions:#?}")); let mut actual = db.file_text(position.file_id).to_string(); let mut combined_edit = completion.text_edit().to_owned(); diff --git a/crates/ide-completion/src/tests/attribute.rs b/crates/ide-completion/src/tests/attribute.rs index 1578ba2c37..4e60820dd6 100644 --- a/crates/ide-completion/src/tests/attribute.rs +++ b/crates/ide-completion/src/tests/attribute.rs @@ -607,6 +607,30 @@ fn attr_in_source_file_end() { ); } +#[test] +fn invalid_path() { + check( + r#" +//- proc_macros: identity +#[proc_macros:::$0] +struct Foo; +"#, + expect![[r#""#]], + ); + + check( + r#" +//- minicore: derive, copy +mod foo { + pub use Copy as Bar; +} +#[derive(foo:::::$0)] +struct Foo; +"#, + expect![""], + ); +} + mod cfg { use super::*; diff --git a/crates/ide-completion/src/tests/expression.rs b/crates/ide-completion/src/tests/expression.rs index 8e26d889f9..043f552bd8 100644 --- a/crates/ide-completion/src/tests/expression.rs +++ b/crates/ide-completion/src/tests/expression.rs @@ -4,7 +4,7 @@ use expect_test::{expect, Expect}; use crate::tests::{check_edit, completion_list, BASE_ITEMS_FIXTURE}; fn check(ra_fixture: &str, expect: Expect) { - let actual = completion_list(&format!("{}{}", BASE_ITEMS_FIXTURE, ra_fixture)); + let actual = completion_list(&format!("{BASE_ITEMS_FIXTURE}{ra_fixture}")); expect.assert_eq(&actual) } diff --git a/crates/ide-completion/src/tests/item.rs b/crates/ide-completion/src/tests/item.rs index 409413c1dc..3ef2a7c942 100644 --- a/crates/ide-completion/src/tests/item.rs +++ b/crates/ide-completion/src/tests/item.rs @@ -7,7 +7,7 @@ use expect_test::{expect, Expect}; use crate::tests::{completion_list, BASE_ITEMS_FIXTURE}; fn check(ra_fixture: &str, expect: Expect) { - let actual = completion_list(&format!("{}{}", BASE_ITEMS_FIXTURE, ra_fixture)); + let actual = completion_list(&format!("{BASE_ITEMS_FIXTURE}{ra_fixture}")); expect.assert_eq(&actual) } diff --git a/crates/ide-completion/src/tests/item_list.rs b/crates/ide-completion/src/tests/item_list.rs index 8ed6cb3cf8..b62b988885 100644 --- a/crates/ide-completion/src/tests/item_list.rs +++ b/crates/ide-completion/src/tests/item_list.rs @@ -1,10 +1,10 @@ //! Completion tests for item list position. use expect_test::{expect, Expect}; -use crate::tests::{completion_list, BASE_ITEMS_FIXTURE}; +use crate::tests::{check_edit, completion_list, BASE_ITEMS_FIXTURE}; fn check(ra_fixture: &str, expect: Expect) { - let actual = completion_list(&format!("{}{}", BASE_ITEMS_FIXTURE, ra_fixture)); + let actual = completion_list(&format!("{BASE_ITEMS_FIXTURE}{ra_fixture}")); expect.assert_eq(&actual) } @@ -277,3 +277,91 @@ fn after_unit_struct() { "#]], ); } + +#[test] +fn type_in_impl_trait() { + check_edit( + "type O", + r" +struct A; +trait B { +type O: ?Sized; +} +impl B for A { +$0 +} +", + r#" +struct A; +trait B { +type O: ?Sized; +} +impl B for A { +type O = $0; +} +"#, + ); + check_edit( + "type O", + r" +struct A; +trait B { +type O; +} +impl B for A { +$0 +} +", + r#" +struct A; +trait B { +type O; +} +impl B for A { +type O = $0; +} +"#, + ); + check_edit( + "type O", + r" +struct A; +trait B { +type O: ?Sized = u32; +} +impl B for A { +$0 +} +", + r#" +struct A; +trait B { +type O: ?Sized = u32; +} +impl B for A { +type O = $0; +} +"#, + ); + check_edit( + "type O", + r" +struct A; +trait B { +type O = u32; +} +impl B for A { +$0 +} +", + r" +struct A; +trait B { +type O = u32; +} +impl B for A { +type O = $0; +} +", + ) +} diff --git a/crates/ide-completion/src/tests/pattern.rs b/crates/ide-completion/src/tests/pattern.rs index db8bef6640..ad9254e7f2 100644 --- a/crates/ide-completion/src/tests/pattern.rs +++ b/crates/ide-completion/src/tests/pattern.rs @@ -9,7 +9,7 @@ fn check_empty(ra_fixture: &str, expect: Expect) { } fn check(ra_fixture: &str, expect: Expect) { - let actual = completion_list(&format!("{}\n{}", BASE_ITEMS_FIXTURE, ra_fixture)); + let actual = completion_list(&format!("{BASE_ITEMS_FIXTURE}\n{ra_fixture}")); expect.assert_eq(&actual) } diff --git a/crates/ide-completion/src/tests/predicate.rs b/crates/ide-completion/src/tests/predicate.rs index a8676e2f24..2656a4d545 100644 --- a/crates/ide-completion/src/tests/predicate.rs +++ b/crates/ide-completion/src/tests/predicate.rs @@ -4,7 +4,7 @@ use expect_test::{expect, Expect}; use crate::tests::{completion_list, BASE_ITEMS_FIXTURE}; fn check(ra_fixture: &str, expect: Expect) { - let actual = completion_list(&format!("{}\n{}", BASE_ITEMS_FIXTURE, ra_fixture)); + let actual = completion_list(&format!("{BASE_ITEMS_FIXTURE}\n{ra_fixture}")); expect.assert_eq(&actual) } diff --git a/crates/ide-completion/src/tests/special.rs b/crates/ide-completion/src/tests/special.rs index 033dc99c26..cad4af4937 100644 --- a/crates/ide-completion/src/tests/special.rs +++ b/crates/ide-completion/src/tests/special.rs @@ -2,13 +2,22 @@ use expect_test::{expect, Expect}; -use crate::tests::{check_edit, completion_list_no_kw}; +use crate::tests::{check_edit, completion_list_no_kw, completion_list_with_trigger_character}; fn check(ra_fixture: &str, expect: Expect) { let actual = completion_list_no_kw(ra_fixture); expect.assert_eq(&actual) } +pub(crate) fn check_with_trigger_character( + ra_fixture: &str, + trigger_character: Option, + expect: Expect, +) { + let actual = completion_list_with_trigger_character(ra_fixture, trigger_character); + expect.assert_eq(&actual) +} + #[test] fn completes_if_prefix_is_keyword() { check_edit( @@ -893,3 +902,82 @@ fn f() { "#]], ); } + +#[test] +fn completes_after_colon_with_trigger() { + check_with_trigger_character( + r#" +//- minicore: option +fn foo { ::$0 } +"#, + Some(':'), + expect![[r#" + md core + "#]], + ); + check_with_trigger_character( + r#" +//- minicore: option +fn foo { /* test */::$0 } +"#, + Some(':'), + expect![[r#" + md core + "#]], + ); + + check_with_trigger_character( + r#" +fn foo { crate::$0 } +"#, + Some(':'), + expect![[r#" + fn foo() fn() + "#]], + ); + + check_with_trigger_character( + r#" +fn foo { crate:$0 } +"#, + Some(':'), + expect![""], + ); +} + +#[test] +fn completes_after_colon_without_trigger() { + check_with_trigger_character( + r#" +fn foo { crate::$0 } +"#, + None, + expect![[r#" + fn foo() fn() + "#]], + ); + + check_with_trigger_character( + r#" +fn foo { crate:$0 } +"#, + None, + expect![""], + ); +} + +#[test] +fn no_completions_in_invalid_path() { + check( + r#" +fn foo { crate:::$0 } +"#, + expect![""], + ); + check( + r#" +fn foo { crate::::$0 } +"#, + expect![""], + ) +} diff --git a/crates/ide-completion/src/tests/type_pos.rs b/crates/ide-completion/src/tests/type_pos.rs index f0b7726c51..c3f4fb4d18 100644 --- a/crates/ide-completion/src/tests/type_pos.rs +++ b/crates/ide-completion/src/tests/type_pos.rs @@ -4,7 +4,7 @@ use expect_test::{expect, Expect}; use crate::tests::{completion_list, BASE_ITEMS_FIXTURE}; fn check(ra_fixture: &str, expect: Expect) { - let actual = completion_list(&format!("{}\n{}", BASE_ITEMS_FIXTURE, ra_fixture)); + let actual = completion_list(&format!("{BASE_ITEMS_FIXTURE}\n{ra_fixture}")); expect.assert_eq(&actual) } diff --git a/crates/ide-db/src/assists.rs b/crates/ide-db/src/assists.rs index da23763dc2..8c6c1c44aa 100644 --- a/crates/ide-db/src/assists.rs +++ b/crates/ide-db/src/assists.rs @@ -88,7 +88,7 @@ impl FromStr for AssistKind { "RefactorExtract" => Ok(AssistKind::RefactorExtract), "RefactorInline" => Ok(AssistKind::RefactorInline), "RefactorRewrite" => Ok(AssistKind::RefactorRewrite), - unknown => Err(format!("Unknown AssistKind: '{}'", unknown)), + unknown => Err(format!("Unknown AssistKind: '{unknown}'")), } } } diff --git a/crates/ide-db/src/imports/import_assets.rs b/crates/ide-db/src/imports/import_assets.rs index 40a6a3e897..994d48385a 100644 --- a/crates/ide-db/src/imports/import_assets.rs +++ b/crates/ide-db/src/imports/import_assets.rs @@ -367,7 +367,7 @@ fn import_for_item( let expected_import_end = if item_as_assoc(db, original_item).is_some() { unresolved_qualifier.to_string() } else { - format!("{}::{}", unresolved_qualifier, item_name(db, original_item)?) + format!("{unresolved_qualifier}::{}", item_name(db, original_item)?) }; if !import_path_string.contains(unresolved_first_segment) || !import_path_string.ends_with(&expected_import_end) diff --git a/crates/ide-db/src/imports/insert_use/tests.rs b/crates/ide-db/src/imports/insert_use/tests.rs index 59673af320..b92e367f7e 100644 --- a/crates/ide-db/src/imports/insert_use/tests.rs +++ b/crates/ide-db/src/imports/insert_use/tests.rs @@ -1014,7 +1014,7 @@ fn check_with_config( .and_then(|it| ImportScope::find_insert_use_container(&it, sema)) .or_else(|| ImportScope::from(syntax)) .unwrap(); - let path = ast::SourceFile::parse(&format!("use {};", path)) + let path = ast::SourceFile::parse(&format!("use {path};")) .tree() .syntax() .descendants() diff --git a/crates/ide-db/src/imports/merge_imports.rs b/crates/ide-db/src/imports/merge_imports.rs index 371d642c15..27b6321f3a 100644 --- a/crates/ide-db/src/imports/merge_imports.rs +++ b/crates/ide-db/src/imports/merge_imports.rs @@ -91,7 +91,7 @@ fn recursive_merge(lhs: &ast::UseTree, rhs: &ast::UseTree, merge: MergeBehavior) .flat_map(|list| list.use_trees()) // We use Option here to early return from this function(this is not the // same as a `filter` op). - .map(|tree| merge.is_tree_allowed(&tree).then(|| tree)) + .map(|tree| merge.is_tree_allowed(&tree).then_some(tree)) .collect::>()?; use_trees.sort_unstable_by(|a, b| path_cmp_for_sort(a.path(), b.path())); for rhs_t in rhs.use_tree_list().into_iter().flat_map(|list| list.use_trees()) { diff --git a/crates/ide-db/src/lib.rs b/crates/ide-db/src/lib.rs index e0bc0f89f0..156bbb634e 100644 --- a/crates/ide-db/src/lib.rs +++ b/crates/ide-db/src/lib.rs @@ -165,7 +165,7 @@ pub trait LineIndexDatabase: base_db::SourceDatabase { fn line_index(db: &dyn LineIndexDatabase, file_id: FileId) -> Arc { let text = db.file_text(file_id); - Arc::new(LineIndex::new(&*text)) + Arc::new(LineIndex::new(&text)) } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] diff --git a/crates/ide-db/src/rename.rs b/crates/ide-db/src/rename.rs index 49b81265ea..cd4a7e1554 100644 --- a/crates/ide-db/src/rename.rs +++ b/crates/ide-db/src/rename.rs @@ -197,7 +197,7 @@ fn rename_mod( // Module exists in a named file if !is_mod_rs { - let path = format!("{}.rs", new_name); + let path = format!("{new_name}.rs"); let dst = AnchoredPathBuf { anchor, path }; source_change.push_file_system_edit(FileSystemEdit::MoveFile { src: anchor, dst }) } @@ -207,9 +207,7 @@ fn rename_mod( // - Module has submodules defined in separate files let dir_paths = match (is_mod_rs, has_detached_child, module.name(sema.db)) { // Go up one level since the anchor is inside the dir we're trying to rename - (true, _, Some(mod_name)) => { - Some((format!("../{}", mod_name), format!("../{}", new_name))) - } + (true, _, Some(mod_name)) => Some((format!("../{mod_name}"), format!("../{new_name}"))), // The anchor is on the same level as target dir (false, true, Some(mod_name)) => Some((mod_name.to_string(), new_name.to_string())), _ => None, @@ -356,7 +354,7 @@ fn source_edit_from_name(edit: &mut TextEditBuilder, name: &ast::Name, new_name: // FIXME: instead of splitting the shorthand, recursively trigger a rename of the // other name https://github.com/rust-lang/rust-analyzer/issues/6547 - edit.insert(ident_pat.syntax().text_range().start(), format!("{}: ", new_name)); + edit.insert(ident_pat.syntax().text_range().start(), format!("{new_name}: ")); return true; } } @@ -414,7 +412,7 @@ fn source_edit_from_name_ref( // Foo { field } -> Foo { new_name: field } // ^ insert `new_name: ` let offset = name_ref.syntax().text_range().start(); - edit.insert(offset, format!("{}: ", new_name)); + edit.insert(offset, format!("{new_name}: ")); return true; } (None, Some(_)) if matches!(def, Definition::Local(_)) => { @@ -422,7 +420,7 @@ fn source_edit_from_name_ref( // Foo { field } -> Foo { field: new_name } // ^ insert `: new_name` let offset = name_ref.syntax().text_range().end(); - edit.insert(offset, format!(": {}", new_name)); + edit.insert(offset, format!(": {new_name}")); return true; } _ => (), diff --git a/crates/ide-db/src/search.rs b/crates/ide-db/src/search.rs index aa5d7e9beb..b2b0e49085 100644 --- a/crates/ide-db/src/search.rs +++ b/crates/ide-db/src/search.rs @@ -608,7 +608,7 @@ impl<'a> FindUsages<'a> { let reference = FileReference { range, name: ast::NameLike::NameRef(name_ref.clone()), - category: is_name_ref_in_import(name_ref).then(|| ReferenceCategory::Import), + category: is_name_ref_in_import(name_ref).then_some(ReferenceCategory::Import), }; sink(file_id, reference) } @@ -787,7 +787,7 @@ impl ReferenceCategory { fn new(def: &Definition, r: &ast::NameRef) -> Option { // Only Locals and Fields have accesses for now. if !matches!(def, Definition::Local(_) | Definition::Field(_)) { - return is_name_ref_in_import(r).then(|| ReferenceCategory::Import); + return is_name_ref_in_import(r).then_some(ReferenceCategory::Import); } let mode = r.syntax().ancestors().find_map(|node| { diff --git a/crates/ide-db/src/symbol_index.rs b/crates/ide-db/src/symbol_index.rs index bfb0031277..c054cc1597 100644 --- a/crates/ide-db/src/symbol_index.rs +++ b/crates/ide-db/src/symbol_index.rs @@ -206,7 +206,7 @@ pub fn world_symbols(db: &RootDatabase, query: Query) -> Vec { } pub fn crate_symbols(db: &RootDatabase, krate: Crate, query: Query) -> Vec { - let _p = profile::span("crate_symbols").detail(|| format!("{:?}", query)); + let _p = profile::span("crate_symbols").detail(|| format!("{query:?}")); let modules = krate.modules(db); let indices: Vec<_> = modules diff --git a/crates/ide-db/src/syntax_helpers/format_string_exprs.rs b/crates/ide-db/src/syntax_helpers/format_string_exprs.rs index 313346ee13..fcef71fb74 100644 --- a/crates/ide-db/src/syntax_helpers/format_string_exprs.rs +++ b/crates/ide-db/src/syntax_helpers/format_string_exprs.rs @@ -140,8 +140,8 @@ pub fn parse_format_exprs(input: &str) -> Result<(String, Vec), ()> { output.push_str(trimmed); } else if matches!(state, State::Expr) { extracted_expressions.push(Arg::Expr(trimmed.into())); - } else { - extracted_expressions.push(Arg::Ident(trimmed.into())); + } else if matches!(state, State::Ident) { + output.push_str(trimmed); } output.push(chr); @@ -205,7 +205,7 @@ mod tests { fn check(input: &str, expect: &Expect) { let (output, exprs) = parse_format_exprs(input).unwrap_or(("-".to_string(), vec![])); let outcome_repr = if !exprs.is_empty() { - format!("{}; {}", output, with_placeholders(exprs).join(", ")) + format!("{output}; {}", with_placeholders(exprs).join(", ")) } else { output }; @@ -218,9 +218,9 @@ mod tests { let test_vector = &[ ("no expressions", expect![["no expressions"]]), (r"no expressions with \$0$1", expect![r"no expressions with \\\$0\$1"]), - ("{expr} is {2 + 2}", expect![["{} is {}; expr, 2 + 2"]]), - ("{expr:?}", expect![["{:?}; expr"]]), - ("{expr:1$}", expect![[r"{:1\$}; expr"]]), + ("{expr} is {2 + 2}", expect![["{expr} is {}; 2 + 2"]]), + ("{expr:?}", expect![["{expr:?}"]]), + ("{expr:1$}", expect![[r"{expr:1\$}"]]), ("{:1$}", expect![[r"{:1\$}; $1"]]), ("{:>padding$}", expect![[r"{:>padding\$}; $1"]]), ("{}, {}, {0}", expect![[r"{}, {}, {0}; $1, $2"]]), @@ -230,8 +230,8 @@ mod tests { ("malformed}", expect![["-"]]), ("{{correct", expect![["{{correct"]]), ("correct}}", expect![["correct}}"]]), - ("{correct}}}", expect![["{}}}; correct"]]), - ("{correct}}}}}", expect![["{}}}}}; correct"]]), + ("{correct}}}", expect![["{correct}}}"]]), + ("{correct}}}}}", expect![["{correct}}}}}"]]), ("{incorrect}}", expect![["-"]]), ("placeholders {} {}", expect![["placeholders {} {}; $1, $2"]]), ("mixed {} {2 + 2} {}", expect![["mixed {} {} {}; $1, 2 + 2, $2"]]), @@ -239,7 +239,7 @@ mod tests { "{SomeStruct { val_a: 0, val_b: 1 }}", expect![["{}; SomeStruct { val_a: 0, val_b: 1 }"]], ), - ("{expr:?} is {2.32f64:.5}", expect![["{:?} is {:.5}; expr, 2.32f64"]]), + ("{expr:?} is {2.32f64:.5}", expect![["{expr:?} is {:.5}; 2.32f64"]]), ( "{SomeStruct { val_a: 0, val_b: 1 }:?}", expect![["{:?}; SomeStruct { val_a: 0, val_b: 1 }"]], @@ -262,8 +262,6 @@ mod tests { .unwrap() .1, vec![ - Arg::Ident("_ident".to_owned()), - Arg::Ident("r#raw_ident".to_owned()), Arg::Expr("expr.obj".to_owned()), Arg::Expr("name {thing: 42}".to_owned()), Arg::Placeholder diff --git a/crates/ide-db/src/syntax_helpers/node_ext.rs b/crates/ide-db/src/syntax_helpers/node_ext.rs index 39710b8f13..a34dc1b695 100644 --- a/crates/ide-db/src/syntax_helpers/node_ext.rs +++ b/crates/ide-db/src/syntax_helpers/node_ext.rs @@ -173,7 +173,8 @@ pub fn walk_pat(pat: &ast::Pat, cb: &mut dyn FnMut(ast::Pat)) { } /// Preorder walk all the type's sub types. -pub fn walk_ty(ty: &ast::Type, cb: &mut dyn FnMut(ast::Type)) { +// FIXME: Make the control flow more proper +pub fn walk_ty(ty: &ast::Type, cb: &mut dyn FnMut(ast::Type) -> bool) { let mut preorder = ty.syntax().preorder(); while let Some(event) = preorder.next() { let node = match event { @@ -184,10 +185,12 @@ pub fn walk_ty(ty: &ast::Type, cb: &mut dyn FnMut(ast::Type)) { match ast::Type::cast(node) { Some(ty @ ast::Type::MacroType(_)) => { preorder.skip_subtree(); - cb(ty) + cb(ty); } Some(ty) => { - cb(ty); + if cb(ty) { + preorder.skip_subtree(); + } } // skip const args None if ast::ConstArg::can_cast(kind) => { @@ -252,6 +255,11 @@ pub fn is_pattern_cond(expr: ast::Expr) -> bool { /// Note that modifying the tree while iterating it will cause undefined iteration which might /// potentially results in an out of bounds panic. pub fn for_each_tail_expr(expr: &ast::Expr, cb: &mut dyn FnMut(&ast::Expr)) { + let walk_loop = |cb: &mut dyn FnMut(&ast::Expr), label, body: Option| { + for_each_break_expr(label, body.and_then(|it| it.stmt_list()), &mut |b| { + cb(&ast::Expr::BreakExpr(b)) + }) + }; match expr { ast::Expr::BlockExpr(b) => { match b.modifier() { @@ -291,11 +299,9 @@ pub fn for_each_tail_expr(expr: &ast::Expr, cb: &mut dyn FnMut(&ast::Expr)) { } } } - ast::Expr::LoopExpr(l) => { - for_each_break_expr(l.label(), l.loop_body().and_then(|it| it.stmt_list()), &mut |b| { - cb(&ast::Expr::BreakExpr(b)) - }) - } + ast::Expr::LoopExpr(l) => walk_loop(cb, l.label(), l.loop_body()), + ast::Expr::WhileExpr(w) => walk_loop(cb, w.label(), w.loop_body()), + ast::Expr::ForExpr(f) => walk_loop(cb, f.label(), f.loop_body()), ast::Expr::MatchExpr(m) => { if let Some(arms) = m.match_arm_list() { arms.arms().filter_map(|arm| arm.expr()).for_each(|e| for_each_tail_expr(&e, cb)); @@ -311,7 +317,6 @@ pub fn for_each_tail_expr(expr: &ast::Expr, cb: &mut dyn FnMut(&ast::Expr)) { | ast::Expr::ClosureExpr(_) | ast::Expr::ContinueExpr(_) | ast::Expr::FieldExpr(_) - | ast::Expr::ForExpr(_) | ast::Expr::IndexExpr(_) | ast::Expr::Literal(_) | ast::Expr::MacroExpr(_) @@ -325,10 +330,10 @@ pub fn for_each_tail_expr(expr: &ast::Expr, cb: &mut dyn FnMut(&ast::Expr)) { | ast::Expr::ReturnExpr(_) | ast::Expr::TryExpr(_) | ast::Expr::TupleExpr(_) - | ast::Expr::WhileExpr(_) | ast::Expr::LetExpr(_) | ast::Expr::UnderscoreExpr(_) - | ast::Expr::YieldExpr(_) => cb(expr), + | ast::Expr::YieldExpr(_) + | ast::Expr::YeetExpr(_) => cb(expr), } } @@ -447,7 +452,7 @@ pub fn parse_tt_as_comma_sep_paths(input: ast::TokenTree) -> Option it.path(), diff --git a/crates/ide-db/src/test_data/test_symbol_index_collection.txt b/crates/ide-db/src/test_data/test_symbol_index_collection.txt index 2f531ca0c7..8c11408dec 100644 --- a/crates/ide-db/src/test_data/test_symbol_index_collection.txt +++ b/crates/ide-db/src/test_data/test_symbol_index_collection.txt @@ -14,11 +14,7 @@ name: "Alias", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 0, - ), - ), + 0, ), ptr: SyntaxNodePtr { kind: TYPE_ALIAS, @@ -36,11 +32,7 @@ name: "CONST", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 0, - ), - ), + 0, ), ptr: SyntaxNodePtr { kind: CONST, @@ -58,11 +50,7 @@ name: "CONST_WITH_INNER", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 0, - ), - ), + 0, ), ptr: SyntaxNodePtr { kind: CONST, @@ -80,11 +68,7 @@ name: "Enum", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 0, - ), - ), + 0, ), ptr: SyntaxNodePtr { kind: ENUM, @@ -102,11 +86,7 @@ name: "Macro", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 0, - ), - ), + 0, ), ptr: SyntaxNodePtr { kind: MACRO_DEF, @@ -124,11 +104,7 @@ name: "STATIC", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 0, - ), - ), + 0, ), ptr: SyntaxNodePtr { kind: STATIC, @@ -146,11 +122,7 @@ name: "Struct", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 0, - ), - ), + 0, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -168,13 +140,7 @@ name: "StructFromMacro", loc: DeclarationLocation { hir_file_id: HirFileId( - MacroFile( - MacroFile { - macro_call_id: MacroCallId( - 0, - ), - }, - ), + 2147483648, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -192,11 +158,7 @@ name: "StructInFn", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 0, - ), - ), + 0, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -216,11 +178,7 @@ name: "StructInNamedConst", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 0, - ), - ), + 0, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -240,11 +198,7 @@ name: "StructInUnnamedConst", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 0, - ), - ), + 0, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -262,11 +216,7 @@ name: "Trait", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 0, - ), - ), + 0, ), ptr: SyntaxNodePtr { kind: TRAIT, @@ -284,11 +234,7 @@ name: "Union", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 0, - ), - ), + 0, ), ptr: SyntaxNodePtr { kind: UNION, @@ -306,11 +252,7 @@ name: "a_mod", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 0, - ), - ), + 0, ), ptr: SyntaxNodePtr { kind: MODULE, @@ -328,11 +270,7 @@ name: "b_mod", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 0, - ), - ), + 0, ), ptr: SyntaxNodePtr { kind: MODULE, @@ -350,11 +288,7 @@ name: "define_struct", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 0, - ), - ), + 0, ), ptr: SyntaxNodePtr { kind: MACRO_RULES, @@ -372,11 +306,7 @@ name: "impl_fn", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 0, - ), - ), + 0, ), ptr: SyntaxNodePtr { kind: FN, @@ -394,11 +324,7 @@ name: "macro_rules_macro", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 0, - ), - ), + 0, ), ptr: SyntaxNodePtr { kind: MACRO_RULES, @@ -416,11 +342,7 @@ name: "main", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 0, - ), - ), + 0, ), ptr: SyntaxNodePtr { kind: FN, @@ -438,11 +360,7 @@ name: "trait_fn", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 0, - ), - ), + 0, ), ptr: SyntaxNodePtr { kind: FN, @@ -475,11 +393,7 @@ name: "StructInModA", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 0, - ), - ), + 0, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -510,11 +424,7 @@ name: "StructInModB", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 1, - ), - ), + 1, ), ptr: SyntaxNodePtr { kind: STRUCT, diff --git a/crates/ide-db/src/tests/sourcegen_lints.rs b/crates/ide-db/src/tests/sourcegen_lints.rs index 5042f6d815..c7d5f3613d 100644 --- a/crates/ide-db/src/tests/sourcegen_lints.rs +++ b/crates/ide-db/src/tests/sourcegen_lints.rs @@ -241,9 +241,9 @@ fn generate_descriptor_clippy(buf: &mut String, path: &Path) { buf.push_str(r#"pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &["#); for (id, children) in clippy_groups { - let children = children.iter().map(|id| format!("clippy::{}", id)).collect::>(); + let children = children.iter().map(|id| format!("clippy::{id}")).collect::>(); if !children.is_empty() { - let lint_ident = format!("clippy::{}", id); + let lint_ident = format!("clippy::{id}"); let description = format!("lint group for: {}", children.iter().join(", ")); push_lint_group(buf, &lint_ident, &description, &children); } @@ -273,7 +273,7 @@ fn push_lint_group(buf: &mut String, label: &str, description: &str, children: & push_lint_completion(buf, label, description); - let children = format!("&[{}]", children.iter().map(|it| format!("\"{}\"", it)).join(", ")); + let children = format!("&[{}]", children.iter().map(|it| format!("\"{it}\"")).join(", ")); format_to!( buf, r###" diff --git a/crates/ide-diagnostics/src/handlers/break_outside_of_loop.rs b/crates/ide-diagnostics/src/handlers/break_outside_of_loop.rs index 0c92e706b3..10e637979f 100644 --- a/crates/ide-diagnostics/src/handlers/break_outside_of_loop.rs +++ b/crates/ide-diagnostics/src/handlers/break_outside_of_loop.rs @@ -37,35 +37,13 @@ fn foo() { ); } - #[test] - fn try_blocks_are_borders() { - check_diagnostics( - r#" -fn foo() { - 'a: loop { - try { - break; - //^^^^^ error: break outside of loop - break 'a; - //^^^^^^^^ error: break outside of loop - continue; - //^^^^^^^^ error: continue outside of loop - continue 'a; - //^^^^^^^^^^^ error: continue outside of loop - }; - } -} -"#, - ); - } - #[test] fn async_blocks_are_borders() { check_diagnostics( r#" fn foo() { 'a: loop { - try { + async { break; //^^^^^ error: break outside of loop break 'a; @@ -87,7 +65,7 @@ fn foo() { r#" fn foo() { 'a: loop { - try { + || { break; //^^^^^ error: break outside of loop break 'a; @@ -121,6 +99,24 @@ fn foo() { ); } + #[test] + fn try_blocks_pass_through() { + check_diagnostics( + r#" +fn foo() { + 'a: loop { + try { + break; + break 'a; + continue; + continue 'a; + }; + } +} +"#, + ); + } + #[test] fn label_blocks() { check_diagnostics( diff --git a/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs b/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs index 3034295196..e8df6dcf28 100644 --- a/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs +++ b/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs @@ -125,7 +125,7 @@ pub(crate) fn json_in_items( .severity(Severity::WeakWarning) .with_fixes(Some(vec![{ let mut scb = SourceChangeBuilder::new(file_id); - let scope = match import_scope.clone() { + let scope = match import_scope { ImportScope::File(it) => ImportScope::File(scb.make_mut(it)), ImportScope::Module(it) => ImportScope::Module(scb.make_mut(it)), ImportScope::Block(it) => ImportScope::Block(scb.make_mut(it)), diff --git a/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs b/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs index 5f8b3e543b..c5db8c3741 100644 --- a/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs +++ b/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs @@ -13,7 +13,7 @@ pub(crate) fn mismatched_arg_count( d: &hir::MismatchedArgCount, ) -> Diagnostic { let s = if d.expected == 1 { "" } else { "s" }; - let message = format!("expected {} argument{}, found {}", d.expected, s, d.found); + let message = format!("expected {} argument{s}, found {}", d.expected, d.found); Diagnostic::new("mismatched-arg-count", message, invalid_args_range(ctx, d)) } diff --git a/crates/ide-diagnostics/src/handlers/missing_fields.rs b/crates/ide-diagnostics/src/handlers/missing_fields.rs index 7f140eb6a7..43af4d4f16 100644 --- a/crates/ide-diagnostics/src/handlers/missing_fields.rs +++ b/crates/ide-diagnostics/src/handlers/missing_fields.rs @@ -128,9 +128,9 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::MissingFields) -> Option u32; // Safe intrinsic pub fn floorf32(x: f32) -> f32; // Unsafe intrinsic } diff --git a/crates/ide-diagnostics/src/handlers/no_such_field.rs b/crates/ide-diagnostics/src/handlers/no_such_field.rs index d8f2a9de98..8da04e628d 100644 --- a/crates/ide-diagnostics/src/handlers/no_such_field.rs +++ b/crates/ide-diagnostics/src/handlers/no_such_field.rs @@ -68,7 +68,7 @@ fn missing_record_expr_field_fixes( } let new_field = make::record_field( None, - make::name(&record_expr_field.field_name()?.ident_token()?.text()), + make::name(record_expr_field.field_name()?.ident_token()?.text()), make::ty(&new_field_type.display_source_code(sema.db, module.into()).ok()?), ); @@ -78,13 +78,13 @@ fn missing_record_expr_field_fixes( let mut new_field = new_field.to_string(); if usage_file_id != def_file_id { - new_field = format!("pub(crate) {}", new_field); + new_field = format!("pub(crate) {new_field}"); } - new_field = format!("\n{}{}", indent, new_field); + new_field = format!("\n{indent}{new_field}"); let needs_comma = !last_field_syntax.to_string().ends_with(','); if needs_comma { - new_field = format!(",{}", new_field); + new_field = format!(",{new_field}"); } let source_change = SourceChange::from_text_edit( diff --git a/crates/ide-diagnostics/src/handlers/private_assoc_item.rs b/crates/ide-diagnostics/src/handlers/private_assoc_item.rs new file mode 100644 index 0000000000..b363a516dd --- /dev/null +++ b/crates/ide-diagnostics/src/handlers/private_assoc_item.rs @@ -0,0 +1,124 @@ +use either::Either; + +use crate::{Diagnostic, DiagnosticsContext}; + +// Diagnostic: private-assoc-item +// +// This diagnostic is triggered if the referenced associated item is not visible from the current +// module. +pub(crate) fn private_assoc_item( + ctx: &DiagnosticsContext<'_>, + d: &hir::PrivateAssocItem, +) -> Diagnostic { + // FIXME: add quickfix + let name = match d.item.name(ctx.sema.db) { + Some(name) => format!("`{}` ", name), + None => String::new(), + }; + Diagnostic::new( + "private-assoc-item", + format!( + "{} {}is private", + match d.item { + hir::AssocItem::Function(_) => "function", + hir::AssocItem::Const(_) => "const", + hir::AssocItem::TypeAlias(_) => "type alias", + }, + name, + ), + ctx.sema + .diagnostics_display_range(d.expr_or_pat.clone().map(|it| match it { + Either::Left(it) => it.into(), + Either::Right(it) => match it { + Either::Left(it) => it.into(), + Either::Right(it) => it.into(), + }, + })) + .range, + ) +} + +#[cfg(test)] +mod tests { + use crate::tests::check_diagnostics; + + #[test] + fn private_method() { + check_diagnostics( + r#" +mod module { + pub struct Struct; + impl Struct { + fn method(&self) {} + } +} +fn main(s: module::Struct) { + s.method(); + //^^^^^^^^^^ error: function `method` is private +} +"#, + ); + } + + #[test] + fn private_func() { + check_diagnostics( + r#" +mod module { + pub struct Struct; + impl Struct { + fn func() {} + } +} +fn main() { + module::Struct::func(); + //^^^^^^^^^^^^^^^^^^^^ error: function `func` is private +} +"#, + ); + } + + #[test] + fn private_const() { + check_diagnostics( + r#" +mod module { + pub struct Struct; + impl Struct { + const CONST: u32 = 0; + } +} +fn main() { + module::Struct::CONST; + //^^^^^^^^^^^^^^^^^^^^^ error: const `CONST` is private +} +"#, + ); + } + + #[test] + fn private_but_shadowed_in_deref() { + check_diagnostics( + r#" +//- minicore: deref +mod module { + pub struct Struct { field: Inner } + pub struct Inner; + impl core::ops::Deref for Struct { + type Target = Inner; + fn deref(&self) -> &Inner { &self.field } + } + impl Struct { + fn method(&self) {} + } + impl Inner { + pub fn method(&self) {} + } +} +fn main(s: module::Struct) { + s.method(); +} +"#, + ); + } +} diff --git a/crates/ide-diagnostics/src/handlers/private_field.rs b/crates/ide-diagnostics/src/handlers/private_field.rs new file mode 100644 index 0000000000..e630ae3686 --- /dev/null +++ b/crates/ide-diagnostics/src/handlers/private_field.rs @@ -0,0 +1,68 @@ +use crate::{Diagnostic, DiagnosticsContext}; + +// Diagnostic: private-field +// +// This diagnostic is triggered if the accessed field is not visible from the current module. +pub(crate) fn private_field(ctx: &DiagnosticsContext<'_>, d: &hir::PrivateField) -> Diagnostic { + // FIXME: add quickfix + Diagnostic::new( + "private-field", + format!( + "field `{}` of `{}` is private", + d.field.name(ctx.sema.db), + d.field.parent_def(ctx.sema.db).name(ctx.sema.db) + ), + ctx.sema.diagnostics_display_range(d.expr.clone().map(|it| it.into())).range, + ) +} + +#[cfg(test)] +mod tests { + use crate::tests::check_diagnostics; + + #[test] + fn private_field() { + check_diagnostics( + r#" +mod module { pub struct Struct { field: u32 } } +fn main(s: module::Struct) { + s.field; + //^^^^^^^ error: field `field` of `Struct` is private +} +"#, + ); + } + + #[test] + fn private_tuple_field() { + check_diagnostics( + r#" +mod module { pub struct Struct(u32); } +fn main(s: module::Struct) { + s.0; + //^^^ error: field `0` of `Struct` is private +} +"#, + ); + } + + #[test] + fn private_but_shadowed_in_deref() { + check_diagnostics( + r#" +//- minicore: deref +mod module { + pub struct Struct { field: Inner } + pub struct Inner { pub field: u32 } + impl core::ops::Deref for Struct { + type Target = Inner; + fn deref(&self) -> &Inner { &self.field } + } +} +fn main(s: module::Struct) { + s.field; +} +"#, + ); + } +} diff --git a/crates/ide-diagnostics/src/handlers/type_mismatch.rs b/crates/ide-diagnostics/src/handlers/type_mismatch.rs index 62c69f90ba..2adae165e4 100644 --- a/crates/ide-diagnostics/src/handlers/type_mismatch.rs +++ b/crates/ide-diagnostics/src/handlers/type_mismatch.rs @@ -106,11 +106,11 @@ fn add_missing_ok_or_some( } let mut builder = TextEdit::builder(); - builder.insert(expr.syntax().text_range().start(), format!("{}(", variant_name)); + builder.insert(expr.syntax().text_range().start(), format!("{variant_name}(")); builder.insert(expr.syntax().text_range().end(), ")".to_string()); let source_change = SourceChange::from_text_edit(d.expr.file_id.original_file(ctx.sema.db), builder.finish()); - let name = format!("Wrap in {}", variant_name); + let name = format!("Wrap in {variant_name}"); acc.push(fix("wrap_in_constructor", &name, source_change, expr_range)); Some(()) } diff --git a/crates/ide-diagnostics/src/handlers/unlinked_file.rs b/crates/ide-diagnostics/src/handlers/unlinked_file.rs index c626932f19..be70f0ac4f 100644 --- a/crates/ide-diagnostics/src/handlers/unlinked_file.rs +++ b/crates/ide-diagnostics/src/handlers/unlinked_file.rs @@ -64,7 +64,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, file_id: FileId) -> Option> { // `submod/bla.rs` -> `submod.rs` let parent_mod = (|| { let (name, _) = parent.name_and_extension()?; - parent.parent()?.join(&format!("{}.rs", name)) + parent.parent()?.join(&format!("{name}.rs")) })(); paths.extend(parent_mod); paths @@ -99,8 +99,8 @@ fn make_fixes( matches!(item, ast::Item::Module(m) if m.item_list().is_none()) } - let mod_decl = format!("mod {};", new_mod_name); - let pub_mod_decl = format!("pub mod {};", new_mod_name); + let mod_decl = format!("mod {new_mod_name};"); + let pub_mod_decl = format!("pub mod {new_mod_name};"); let ast: ast::SourceFile = db.parse(parent_file_id).tree(); @@ -125,8 +125,8 @@ fn make_fixes( Some(last) => { cov_mark::hit!(unlinked_file_append_to_existing_mods); let offset = last.syntax().text_range().end(); - mod_decl_builder.insert(offset, format!("\n{}", mod_decl)); - pub_mod_decl_builder.insert(offset, format!("\n{}", pub_mod_decl)); + mod_decl_builder.insert(offset, format!("\n{mod_decl}")); + pub_mod_decl_builder.insert(offset, format!("\n{pub_mod_decl}")); } None => { // Prepend before the first item in the file. @@ -134,15 +134,15 @@ fn make_fixes( Some(item) => { cov_mark::hit!(unlinked_file_prepend_before_first_item); let offset = item.syntax().text_range().start(); - mod_decl_builder.insert(offset, format!("{}\n\n", mod_decl)); - pub_mod_decl_builder.insert(offset, format!("{}\n\n", pub_mod_decl)); + mod_decl_builder.insert(offset, format!("{mod_decl}\n\n")); + pub_mod_decl_builder.insert(offset, format!("{pub_mod_decl}\n\n")); } None => { // No items in the file, so just append at the end. cov_mark::hit!(unlinked_file_empty_file); let offset = ast.syntax().text_range().end(); - mod_decl_builder.insert(offset, format!("{}\n", mod_decl)); - pub_mod_decl_builder.insert(offset, format!("{}\n", pub_mod_decl)); + mod_decl_builder.insert(offset, format!("{mod_decl}\n")); + pub_mod_decl_builder.insert(offset, format!("{pub_mod_decl}\n")); } } } @@ -152,13 +152,13 @@ fn make_fixes( Some(vec![ fix( "add_mod_declaration", - &format!("Insert `{}`", mod_decl), + &format!("Insert `{mod_decl}`"), SourceChange::from_text_edit(parent_file_id, mod_decl_builder.finish()), trigger_range, ), fix( "add_pub_mod_declaration", - &format!("Insert `{}`", pub_mod_decl), + &format!("Insert `{pub_mod_decl}`"), SourceChange::from_text_edit(parent_file_id, pub_mod_decl_builder.finish()), trigger_range, ), diff --git a/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs b/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs index 87531f4acf..1a5efff2c0 100644 --- a/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs +++ b/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs @@ -13,7 +13,7 @@ pub(crate) fn unresolved_macro_call( let bang = if d.is_bang { "!" } else { "" }; Diagnostic::new( "unresolved-macro-call", - format!("unresolved macro `{}{}`", d.path, bang), + format!("unresolved macro `{}{bang}`", d.path), display_range, ) .experimental() diff --git a/crates/ide-diagnostics/src/handlers/unresolved_module.rs b/crates/ide-diagnostics/src/handlers/unresolved_module.rs index b8f2a9e94a..91395f1d84 100644 --- a/crates/ide-diagnostics/src/handlers/unresolved_module.rs +++ b/crates/ide-diagnostics/src/handlers/unresolved_module.rs @@ -16,7 +16,7 @@ pub(crate) fn unresolved_module( "unresolved-module", match &*d.candidates { [] => "unresolved module".to_string(), - [candidate] => format!("unresolved module, can't find module file: {}", candidate), + [candidate] => format!("unresolved module, can't find module file: {candidate}"), [candidates @ .., last] => { format!( "unresolved module, can't find module file: {}, or {}", diff --git a/crates/ide-diagnostics/src/handlers/unresolved_proc_macro.rs b/crates/ide-diagnostics/src/handlers/unresolved_proc_macro.rs index 23818d883f..b2ed19104e 100644 --- a/crates/ide-diagnostics/src/handlers/unresolved_proc_macro.rs +++ b/crates/ide-diagnostics/src/handlers/unresolved_proc_macro.rs @@ -26,7 +26,7 @@ pub(crate) fn unresolved_proc_macro( }; let message = match &d.macro_name { - Some(name) => format!("proc macro `{}` not expanded", name), + Some(name) => format!("proc macro `{name}` not expanded"), None => "proc macro not expanded".to_string(), }; let severity = if config_enabled { Severity::Error } else { Severity::WeakWarning }; diff --git a/crates/ide-diagnostics/src/lib.rs b/crates/ide-diagnostics/src/lib.rs index d81e36a1f8..64ba08ac88 100644 --- a/crates/ide-diagnostics/src/lib.rs +++ b/crates/ide-diagnostics/src/lib.rs @@ -37,6 +37,8 @@ mod handlers { pub(crate) mod missing_match_arms; pub(crate) mod missing_unsafe; pub(crate) mod no_such_field; + pub(crate) mod private_assoc_item; + pub(crate) mod private_field; pub(crate) mod replace_filter_map_next_with_find_map; pub(crate) mod type_mismatch; pub(crate) mod unimplemented_builtin_macro; @@ -218,7 +220,7 @@ pub fn diagnostics( // [#34344] Only take first 128 errors to prevent slowing down editor/ide, the number 128 is chosen arbitrarily. res.extend( parse.errors().iter().take(128).map(|err| { - Diagnostic::new("syntax-error", format!("Syntax Error: {}", err), err.range()) + Diagnostic::new("syntax-error", format!("Syntax Error: {err}"), err.range()) }), ); @@ -227,7 +229,7 @@ pub fn diagnostics( for node in parse.syntax().descendants() { handlers::useless_braces::useless_braces(&mut res, file_id, &node); handlers::field_shorthand::field_shorthand(&mut res, file_id, &node); - handlers::json_is_not_rust::json_in_items(&sema, &mut res, file_id, &node, &config); + handlers::json_is_not_rust::json_in_items(&sema, &mut res, file_id, &node, config); } let module = sema.to_module_def(file_id); @@ -254,6 +256,8 @@ pub fn diagnostics( AnyDiagnostic::MissingMatchArms(d) => handlers::missing_match_arms::missing_match_arms(&ctx, &d), AnyDiagnostic::MissingUnsafe(d) => handlers::missing_unsafe::missing_unsafe(&ctx, &d), AnyDiagnostic::NoSuchField(d) => handlers::no_such_field::no_such_field(&ctx, &d), + AnyDiagnostic::PrivateAssocItem(d) => handlers::private_assoc_item::private_assoc_item(&ctx, &d), + AnyDiagnostic::PrivateField(d) => handlers::private_field::private_field(&ctx, &d), AnyDiagnostic::ReplaceFilterMapNextWithFindMap(d) => handlers::replace_filter_map_next_with_find_map::replace_filter_map_next_with_find_map(&ctx, &d), AnyDiagnostic::TypeMismatch(d) => handlers::type_mismatch::type_mismatch(&ctx, &d), AnyDiagnostic::UnimplementedBuiltinMacro(d) => handlers::unimplemented_builtin_macro::unimplemented_builtin_macro(&ctx, &d), diff --git a/crates/ide-diagnostics/src/tests.rs b/crates/ide-diagnostics/src/tests.rs index 729619cfde..afa641c733 100644 --- a/crates/ide-diagnostics/src/tests.rs +++ b/crates/ide-diagnostics/src/tests.rs @@ -75,7 +75,7 @@ pub(crate) fn check_no_fix(ra_fixture: &str) { ) .pop() .unwrap(); - assert!(diagnostic.fixes.is_none(), "got a fix when none was expected: {:?}", diagnostic); + assert!(diagnostic.fixes.is_none(), "got a fix when none was expected: {diagnostic:?}"); } pub(crate) fn check_expect(ra_fixture: &str, expect: Expect) { @@ -102,7 +102,7 @@ pub(crate) fn check_diagnostics_with_config(config: DiagnosticsConfig, ra_fixtur for file_id in files { let diagnostics = super::diagnostics(&db, &config, &AssistResolveStrategy::All, file_id); - let expected = extract_annotations(&*db.file_text(file_id)); + let expected = extract_annotations(&db.file_text(file_id)); let mut actual = diagnostics .into_iter() .map(|d| { diff --git a/crates/ide-diagnostics/src/tests/sourcegen.rs b/crates/ide-diagnostics/src/tests/sourcegen.rs index ec6558a46e..9e7fcfc590 100644 --- a/crates/ide-diagnostics/src/tests/sourcegen.rs +++ b/crates/ide-diagnostics/src/tests/sourcegen.rs @@ -11,7 +11,7 @@ fn sourcegen_diagnostic_docs() { diagnostics.into_iter().map(|it| it.to_string()).collect::>().join("\n\n"); let contents = sourcegen::add_preamble("sourcegen_diagnostic_docs", contents); let dst = project_root().join("docs/user/generated_diagnostic.adoc"); - fs::write(&dst, &contents).unwrap(); + fs::write(dst, contents).unwrap(); } #[derive(Debug)] @@ -39,7 +39,7 @@ impl Diagnostic { for block in comment_blocks { let id = block.id; if let Err(msg) = is_valid_diagnostic_name(&id) { - panic!("invalid diagnostic name: {:?}:\n {}", id, msg) + panic!("invalid diagnostic name: {id:?}:\n {msg}") } let doc = block.contents.join("\n"); let location = sourcegen::Location { file: path.clone(), line: block.line }; diff --git a/crates/ide-ssr/src/parsing.rs b/crates/ide-ssr/src/parsing.rs index f6220b928a..d78d009681 100644 --- a/crates/ide-ssr/src/parsing.rs +++ b/crates/ide-ssr/src/parsing.rs @@ -352,7 +352,7 @@ impl NodeKind { impl Placeholder { fn new(name: SmolStr, constraints: Vec) -> Self { Self { - stand_in_name: format!("__placeholder_{}", name), + stand_in_name: format!("__placeholder_{name}"), constraints, ident: Var(name.to_string()), } diff --git a/crates/ide-ssr/src/tests.rs b/crates/ide-ssr/src/tests.rs index 1ecb7aa9aa..61698fca80 100644 --- a/crates/ide-ssr/src/tests.rs +++ b/crates/ide-ssr/src/tests.rs @@ -121,7 +121,7 @@ fn print_match_debug_info(match_finder: &MatchFinder<'_>, file_id: FileId, snipp snippet ); for (index, d) in debug_info.iter().enumerate() { - println!("Node #{}\n{:#?}\n", index, d); + println!("Node #{index}\n{d:#?}\n"); } } @@ -144,7 +144,7 @@ fn assert_no_match(pattern: &str, code: &str) { let matches = match_finder.matches().flattened().matches; if !matches.is_empty() { print_match_debug_info(&match_finder, position.file_id, &matches[0].matched_text()); - panic!("Got {} matches when we expected none: {:#?}", matches.len(), matches); + panic!("Got {} matches when we expected none: {matches:#?}", matches.len()); } } diff --git a/crates/ide/src/call_hierarchy.rs b/crates/ide/src/call_hierarchy.rs index 5a8cda8fb3..48bcd37b62 100644 --- a/crates/ide/src/call_hierarchy.rs +++ b/crates/ide/src/call_hierarchy.rs @@ -57,7 +57,8 @@ pub(crate) fn incoming_calls( .flat_map(|func| func.usages(sema).all()); for (_, references) in references { - let references = references.into_iter().map(|FileReference { name, .. }| name); + let references = + references.iter().filter_map(|FileReference { name, .. }| name.as_name_ref()); for name in references { // This target is the containing function let nav = sema.ancestors_with_macros(name.syntax().clone()).find_map(|node| { @@ -457,4 +458,28 @@ fn caller$0() { expect![[]], ); } + + #[test] + fn test_trait_method_call_hierarchy() { + check_hierarchy( + r#" +trait T1 { + fn call$0ee(); +} + +struct S1; + +impl T1 for S1 { + fn callee() {} +} + +fn caller() { + S1::callee(); +} +"#, + expect![["callee Function FileId(0) 15..27 18..24"]], + expect![["caller Function FileId(0) 82..115 85..91 : [104..110]"]], + expect![[]], + ); + } } diff --git a/crates/ide/src/doc_links.rs b/crates/ide/src/doc_links.rs index d96827326c..b4a7f2b918 100644 --- a/crates/ide/src/doc_links.rs +++ b/crates/ide/src/doc_links.rs @@ -273,7 +273,7 @@ impl DocCommentToken { let (in_expansion_range, link, ns) = extract_definitions_from_docs(&docs).into_iter().find_map(|(range, link, ns)| { let mapped = doc_mapping.map(range)?; - (mapped.value.contains(abs_in_expansion_offset)).then(|| (mapped.value, link, ns)) + (mapped.value.contains(abs_in_expansion_offset)).then_some((mapped.value, link, ns)) })?; // get the relative range to the doc/attribute in the expansion let in_expansion_relative_range = in_expansion_range - descended_prefix_len - token_start; @@ -285,7 +285,7 @@ impl DocCommentToken { } } -fn broken_link_clone_cb<'a>(link: BrokenLink<'a>) -> Option<(CowStr<'a>, CowStr<'a>)> { +fn broken_link_clone_cb(link: BrokenLink<'_>) -> Option<(CowStr<'_>, CowStr<'_>)> { Some((/*url*/ link.reference.clone(), /*title*/ link.reference)) } @@ -453,7 +453,7 @@ fn get_doc_base_url(db: &RootDatabase, def: Definition) -> Option { })? } }; - Url::parse(&base).ok()?.join(&format!("{}/", display_name)).ok() + Url::parse(&base).ok()?.join(&format!("{display_name}/")).ok() } /// Get the filename and extension generated for a symbol by rustdoc. @@ -488,7 +488,7 @@ fn filename_and_frag_for_def( Some(kw) => { format!("keyword.{}.html", kw.trim_matches('"')) } - None => format!("{}/index.html", name), + None => format!("{name}/index.html"), }, None => String::from("index.html"), }, diff --git a/crates/ide/src/doc_links/intra_doc_links.rs b/crates/ide/src/doc_links/intra_doc_links.rs index 1df9aaae28..13088bdc3b 100644 --- a/crates/ide/src/doc_links/intra_doc_links.rs +++ b/crates/ide/src/doc_links/intra_doc_links.rs @@ -63,8 +63,8 @@ mod tests { fn check(link: &str, expected: Expect) { let (l, a) = parse_intra_doc_link(link); - let a = a.map_or_else(String::new, |a| format!(" ({:?})", a)); - expected.assert_eq(&format!("{}{}", l, a)); + let a = a.map_or_else(String::new, |a| format!(" ({a:?})")); + expected.assert_eq(&format!("{l}{a}")); } #[test] diff --git a/crates/ide/src/doc_links/tests.rs b/crates/ide/src/doc_links/tests.rs index c6bfb6b9d0..104181a33e 100644 --- a/crates/ide/src/doc_links/tests.rs +++ b/crates/ide/src/doc_links/tests.rs @@ -40,7 +40,7 @@ fn check_doc_links(ra_fixture: &str) { .into_iter() .map(|(_, link, ns)| { let def = resolve_doc_path_for_def(sema.db, cursor_def, &link, ns) - .unwrap_or_else(|| panic!("Failed to resolve {}", link)); + .unwrap_or_else(|| panic!("Failed to resolve {link}")); let nav_target = def.try_to_nav(sema.db).unwrap(); let range = FileRange { file_id: nav_target.file_id, range: nav_target.focus_or_full_range() }; diff --git a/crates/ide/src/expand_macro.rs b/crates/ide/src/expand_macro.rs index 93252339cd..418043d679 100644 --- a/crates/ide/src/expand_macro.rs +++ b/crates/ide/src/expand_macro.rs @@ -163,7 +163,7 @@ fn _format( ) -> Option { use ide_db::base_db::{FileLoader, SourceDatabase}; // hack until we get hygiene working (same character amount to preserve formatting as much as possible) - const DOLLAR_CRATE_REPLACE: &str = &"__r_a_"; + const DOLLAR_CRATE_REPLACE: &str = "__r_a_"; let expansion = expansion.replace("$crate", DOLLAR_CRATE_REPLACE); let (prefix, suffix) = match kind { SyntaxKind::MACRO_PAT => ("fn __(", ": u32);"), diff --git a/crates/ide/src/extend_selection.rs b/crates/ide/src/extend_selection.rs index 45f1fd7484..9f78c75e90 100644 --- a/crates/ide/src/extend_selection.rs +++ b/crates/ide/src/extend_selection.rs @@ -205,7 +205,7 @@ fn extend_single_word_in_comment_or_string( } let start_idx = before.rfind(non_word_char)? as u32; - let end_idx = after.find(non_word_char).unwrap_or_else(|| after.len()) as u32; + let end_idx = after.find(non_word_char).unwrap_or(after.len()) as u32; let from: TextSize = (start_idx + 1).into(); let to: TextSize = (cursor_position + end_idx).into(); diff --git a/crates/ide/src/goto_declaration.rs b/crates/ide/src/goto_declaration.rs index 926292c9b3..c7130a2a4b 100644 --- a/crates/ide/src/goto_declaration.rs +++ b/crates/ide/src/goto_declaration.rs @@ -1,18 +1,22 @@ -use hir::Semantics; +use hir::{AsAssocItem, Semantics}; use ide_db::{ defs::{Definition, NameClass, NameRefClass}, RootDatabase, }; use syntax::{ast, match_ast, AstNode, SyntaxKind::*, T}; -use crate::{FilePosition, NavigationTarget, RangeInfo}; +use crate::{ + goto_definition::goto_definition, navigation_target::TryToNav, FilePosition, NavigationTarget, + RangeInfo, +}; // Feature: Go to Declaration // // Navigates to the declaration of an identifier. // -// This is currently the same as `Go to Definition` with the exception of outline modules where it -// will navigate to the `mod name;` item declaration. +// This is the same as `Go to Definition` with the following exceptions: +// - outline modules will navigate to the `mod name;` item declaration +// - trait assoc items will navigate to the assoc item of the trait declaration opposed to the trait impl pub(crate) fn goto_declaration( db: &RootDatabase, position: FilePosition, @@ -32,25 +36,37 @@ pub(crate) fn goto_declaration( match parent { ast::NameRef(name_ref) => match NameRefClass::classify(&sema, &name_ref)? { NameRefClass::Definition(it) => Some(it), - _ => None + NameRefClass::FieldShorthand { field_ref, .. } => return field_ref.try_to_nav(db), }, ast::Name(name) => match NameClass::classify(&sema, &name)? { - NameClass::Definition(it) => Some(it), - _ => None + NameClass::Definition(it) | NameClass::ConstReference(it) => Some(it), + NameClass::PatFieldShorthand { field_ref, .. } => return field_ref.try_to_nav(db), }, _ => None } }; - match def? { + let assoc = match def? { Definition::Module(module) => { - Some(NavigationTarget::from_module_to_decl(db, module)) + return Some(NavigationTarget::from_module_to_decl(db, module)) } + Definition::Const(c) => c.as_assoc_item(db), + Definition::TypeAlias(ta) => ta.as_assoc_item(db), + Definition::Function(f) => f.as_assoc_item(db), _ => None, - } + }?; + + let trait_ = assoc.containing_trait_impl(db)?; + let name = Some(assoc.name(db)?); + let item = trait_.items(db).into_iter().find(|it| it.name(db) == name)?; + item.try_to_nav(db) }) .collect(); - Some(RangeInfo::new(range, info)) + if info.is_empty() { + goto_definition(db, position) + } else { + Some(RangeInfo::new(range, info)) + } } #[cfg(test)] @@ -109,4 +125,89 @@ mod foo { "#, ) } + + #[test] + fn goto_decl_goto_def_fallback() { + check( + r#" +struct Foo; + // ^^^ +impl Foo$0 {} +"#, + ); + } + + #[test] + fn goto_decl_assoc_item_no_impl_item() { + check( + r#" +trait Trait { + const C: () = (); + // ^ +} +impl Trait for () {} + +fn main() { + <()>::C$0; +} +"#, + ); + } + + #[test] + fn goto_decl_assoc_item() { + check( + r#" +trait Trait { + const C: () = (); + // ^ +} +impl Trait for () { + const C: () = (); +} + +fn main() { + <()>::C$0; +} +"#, + ); + check( + r#" +trait Trait { + const C: () = (); + // ^ +} +impl Trait for () { + const C$0: () = (); +} +"#, + ); + } + + #[test] + fn goto_decl_field_pat_shorthand() { + check( + r#" +struct Foo { field: u32 } + //^^^^^ +fn main() { + let Foo { field$0 }; +} +"#, + ); + } + + #[test] + fn goto_decl_constructor_shorthand() { + check( + r#" +struct Foo { field: u32 } + //^^^^^ +fn main() { + let field = 0; + Foo { field$0 }; +} +"#, + ); + } } diff --git a/crates/ide/src/goto_definition.rs b/crates/ide/src/goto_definition.rs index 43f7a529bc..73fd518a9e 100644 --- a/crates/ide/src/goto_definition.rs +++ b/crates/ide/src/goto_definition.rs @@ -187,7 +187,7 @@ mod tests { let (analysis, position) = fixture::position(ra_fixture); let navs = analysis.goto_definition(position).unwrap().expect("no definition found").info; - assert!(navs.is_empty(), "didn't expect this to resolve anywhere: {:?}", navs) + assert!(navs.is_empty(), "didn't expect this to resolve anywhere: {navs:?}") } #[test] diff --git a/crates/ide/src/goto_implementation.rs b/crates/ide/src/goto_implementation.rs index b3f711b6b8..190ab80ba0 100644 --- a/crates/ide/src/goto_implementation.rs +++ b/crates/ide/src/goto_implementation.rs @@ -110,7 +110,7 @@ fn impls_for_trait_item( .filter_map(|imp| { let item = imp.items(sema.db).iter().find_map(|itm| { let itm_name = itm.name(sema.db)?; - (itm_name == fun_name).then(|| *itm) + (itm_name == fun_name).then_some(*itm) })?; item.try_to_nav(sema.db) }) diff --git a/crates/ide/src/highlight_related.rs b/crates/ide/src/highlight_related.rs index 540a115832..55f8779eed 100644 --- a/crates/ide/src/highlight_related.rs +++ b/crates/ide/src/highlight_related.rs @@ -110,7 +110,7 @@ fn highlight_references( .and_then(|decl| decl.focus_range) .map(|range| { let category = - references::decl_mutability(&def, node, range).then(|| ReferenceCategory::Write); + references::decl_mutability(&def, node, range).then_some(ReferenceCategory::Write); HighlightedRange { range, category } }); if let Some(hl_range) = hl_range { @@ -365,7 +365,7 @@ mod tests { let mut expected = annotations .into_iter() - .map(|(r, access)| (r.range, (!access.is_empty()).then(|| access))) + .map(|(r, access)| (r.range, (!access.is_empty()).then_some(access))) .collect::>(); let mut actual = hls @@ -765,6 +765,23 @@ fn foo() ->$0 u32 { ); } + #[test] + fn test_hl_inner_tail_exit_points_loops() { + check( + r#" +fn foo() ->$0 u32 { + 'foo: while { return 0; true } { + // ^^^^^^ + break 'foo 0; + // ^^^^^ + return 0; + // ^^^^^^ + } +} +"#, + ); + } + #[test] fn test_hl_break_loop() { check( diff --git a/crates/ide/src/hover.rs b/crates/ide/src/hover.rs index 838fb18c3d..b214fa12a4 100644 --- a/crates/ide/src/hover.rs +++ b/crates/ide/src/hover.rs @@ -127,6 +127,7 @@ pub(crate) fn hover( original_token.parent().and_then(ast::TokenTree::cast), Some(tt) if tt.syntax().ancestors().any(|it| ast::Meta::can_cast(it.kind())) ); + // prefer descending the same token kind in attribute expansions, in normal macros text // equivalency is more important let descended = if in_attr { @@ -135,54 +136,67 @@ pub(crate) fn hover( sema.descend_into_macros_with_same_text(original_token.clone()) }; - // FIXME: Definition should include known lints and the like instead of having this special case here - let hovered_lint = descended.iter().find_map(|token| { - let attr = token.parent_ancestors().find_map(ast::Attr::cast)?; - render::try_for_lint(&attr, token) - }); - if let Some(res) = hovered_lint { - return Some(RangeInfo::new(original_token.text_range(), res)); - } - + // try lint hover let result = descended .iter() - .filter_map(|token| { - let node = token.parent()?; - let class = IdentClass::classify_token(sema, token)?; - if let IdentClass::Operator(OperatorClass::Await(_)) = class { - // It's better for us to fall back to the keyword hover here, - // rendering poll is very confusing - return None; - } - Some(class.definitions().into_iter().zip(iter::once(node).cycle())) + .find_map(|token| { + // FIXME: Definition should include known lints and the like instead of having this special case here + let attr = token.parent_ancestors().find_map(ast::Attr::cast)?; + render::try_for_lint(&attr, token) }) - .flatten() - .unique_by(|&(def, _)| def) - .filter_map(|(def, node)| hover_for_definition(sema, file_id, def, &node, config)) - .reduce(|mut acc: HoverResult, HoverResult { markup, actions }| { - acc.actions.extend(actions); - acc.markup = Markup::from(format!("{}\n---\n{}", acc.markup, markup)); - acc + // try item definitions + .or_else(|| { + descended + .iter() + .filter_map(|token| { + let node = token.parent()?; + let class = IdentClass::classify_token(sema, token)?; + if let IdentClass::Operator(OperatorClass::Await(_)) = class { + // It's better for us to fall back to the keyword hover here, + // rendering poll is very confusing + return None; + } + Some(class.definitions().into_iter().zip(iter::once(node).cycle())) + }) + .flatten() + .unique_by(|&(def, _)| def) + .filter_map(|(def, node)| hover_for_definition(sema, file_id, def, &node, config)) + .reduce(|mut acc: HoverResult, HoverResult { markup, actions }| { + acc.actions.extend(actions); + acc.markup = Markup::from(format!("{}\n---\n{markup}", acc.markup)); + acc + }) + }) + // try keywords + .or_else(|| descended.iter().find_map(|token| render::keyword(sema, config, token))) + // try rest item hover + .or_else(|| { + descended.iter().find_map(|token| { + if token.kind() != DOT2 { + return None; + } + + let rest_pat = token.parent().and_then(ast::RestPat::cast)?; + let record_pat_field_list = + rest_pat.syntax().parent().and_then(ast::RecordPatFieldList::cast)?; + + let record_pat = + record_pat_field_list.syntax().parent().and_then(ast::RecordPat::cast)?; + + Some(render::struct_rest_pat(sema, config, &record_pat)) + }) }); - if result.is_none() { - // fallbacks, show keywords or types - - let res = descended.iter().find_map(|token| render::keyword(sema, config, token)); - if let Some(res) = res { - return Some(RangeInfo::new(original_token.text_range(), res)); - } - let res = descended - .iter() - .find_map(|token| hover_type_fallback(sema, config, token, &original_token)); - if let Some(_) = res { - return res; - } - } - result.map(|mut res: HoverResult| { - res.actions = dedupe_or_merge_hover_actions(res.actions); - RangeInfo::new(original_token.text_range(), res) - }) + result + .map(|mut res: HoverResult| { + res.actions = dedupe_or_merge_hover_actions(res.actions); + RangeInfo::new(original_token.text_range(), res) + }) + // fallback to type hover if there aren't any other suggestions + // this finds its own range instead of using the closest token's range + .or_else(|| { + descended.iter().find_map(|token| hover_type_fallback(sema, config, token, token)) + }) } pub(crate) fn hover_for_definition( @@ -269,6 +283,7 @@ fn hover_type_fallback( }; let res = render::type_info(sema, config, &expr_or_pat)?; + let range = sema .original_range_opt(&node) .map(|frange| frange.range) diff --git a/crates/ide/src/hover/render.rs b/crates/ide/src/hover/render.rs index d109c07691..47257f0bfa 100644 --- a/crates/ide/src/hover/render.rs +++ b/crates/ide/src/hover/render.rs @@ -2,7 +2,9 @@ use std::fmt::Display; use either::Either; -use hir::{AsAssocItem, AttributeTemplate, HasAttrs, HasSource, HirDisplay, Semantics, TypeInfo}; +use hir::{ + Adt, AsAssocItem, AttributeTemplate, HasAttrs, HasSource, HirDisplay, Semantics, TypeInfo, +}; use ide_db::{ base_db::SourceDatabase, defs::Definition, @@ -14,7 +16,9 @@ use ide_db::{ use itertools::Itertools; use stdx::format_to; use syntax::{ - algo, ast, match_ast, AstNode, Direction, + algo, + ast::{self, RecordPat}, + match_ast, AstNode, Direction, SyntaxKind::{LET_EXPR, LET_STMT}, SyntaxToken, T, }; @@ -250,6 +254,50 @@ pub(super) fn keyword( Some(HoverResult { markup, actions }) } +/// Returns missing types in a record pattern. +/// Only makes sense when there's a rest pattern in the record pattern. +/// i.e. `let S {a, ..} = S {a: 1, b: 2}` +pub(super) fn struct_rest_pat( + sema: &Semantics<'_, RootDatabase>, + config: &HoverConfig, + pattern: &RecordPat, +) -> HoverResult { + let missing_fields = sema.record_pattern_missing_fields(pattern); + + // if there are no missing fields, the end result is a hover that shows ".." + // should be left in to indicate that there are no more fields in the pattern + // example, S {a: 1, b: 2, ..} when struct S {a: u32, b: u32} + + let mut res = HoverResult::default(); + let mut targets: Vec = Vec::new(); + let mut push_new_def = |item: hir::ModuleDef| { + if !targets.contains(&item) { + targets.push(item); + } + }; + for (_, t) in &missing_fields { + walk_and_push_ty(sema.db, t, &mut push_new_def); + } + + res.markup = { + let mut s = String::from(".., "); + for (f, _) in &missing_fields { + s += f.display(sema.db).to_string().as_ref(); + s += ", "; + } + // get rid of trailing comma + s.truncate(s.len() - 2); + + if config.markdown() { + Markup::fenced_block(&s) + } else { + s.into() + } + }; + res.actions.push(HoverAction::goto_type_from_targets(sema.db, targets)); + res +} + pub(super) fn try_for_lint(attr: &ast::Attr, token: &SyntaxToken) -> Option { let (path, tt) = attr.as_simple_call()?; if !tt.syntax().text_range().contains(token.text_range().start()) { @@ -342,15 +390,35 @@ pub(super) fn definition( let mod_path = definition_mod_path(db, &def); let (label, docs) = match def { Definition::Macro(it) => label_and_docs(db, it), - Definition::Field(it) => label_and_docs(db, it), + Definition::Field(it) => label_and_layout_info_and_docs(db, it, |&it| { + let var_def = it.parent_def(db); + let id = it.index(); + let layout = it.layout(db).ok()?; + let offset = match var_def { + hir::VariantDef::Struct(s) => Adt::from(s) + .layout(db) + .ok() + .map(|layout| format!(", offset = {}", layout.fields.offset(id).bytes())), + _ => None, + }; + Some(format!( + "size = {}, align = {}{}", + layout.size.bytes(), + layout.align.abi.bytes(), + offset.as_deref().unwrap_or_default() + )) + }), Definition::Module(it) => label_and_docs(db, it), Definition::Function(it) => label_and_docs(db, it), - Definition::Adt(it) => label_and_docs(db, it), + Definition::Adt(it) => label_and_layout_info_and_docs(db, it, |&it| { + let layout = it.layout(db).ok()?; + Some(format!("size = {}, align = {}", layout.size.bytes(), layout.align.abi.bytes())) + }), Definition::Variant(it) => label_value_and_docs(db, it, |&it| { if !it.parent_enum(db).is_data_carrying(db) { match it.eval(db) { - Ok(x) => Some(format!("{}", x)), - Err(_) => it.value(db).map(|x| format!("{:?}", x)), + Ok(x) => Some(format!("{x}")), + Err(_) => it.value(db).map(|x| format!("{x:?}")), } } else { None @@ -359,7 +427,7 @@ pub(super) fn definition( Definition::Const(it) => label_value_and_docs(db, it, |it| { let body = it.eval(db); match body { - Ok(x) => Some(format!("{}", x)), + Ok(x) => Some(format!("{x}")), Err(_) => { let source = it.source(db)?; let mut body = source.value.body()?.syntax().clone(); @@ -415,7 +483,7 @@ pub(super) fn definition( fn render_builtin_attr(db: &RootDatabase, attr: hir::BuiltinAttr) -> Option { let name = attr.name(db); - let desc = format!("#[{}]", name); + let desc = format!("#[{name}]"); let AttributeTemplate { word, list, name_value_str } = match attr.template(db) { Some(template) => template, @@ -443,6 +511,25 @@ where (label, docs) } +fn label_and_layout_info_and_docs( + db: &RootDatabase, + def: D, + value_extractor: E, +) -> (String, Option) +where + D: HasAttrs + HirDisplay, + E: Fn(&D) -> Option, + V: Display, +{ + let label = if let Some(value) = value_extractor(&def) { + format!("{} // {value}", def.display(db)) + } else { + def.display(db).to_string() + }; + let docs = def.attrs(db).docs(); + (label, docs) +} + fn label_value_and_docs( db: &RootDatabase, def: D, @@ -454,7 +541,7 @@ where V: Display, { let label = if let Some(value) = value_extractor(&def) { - format!("{} = {}", def.display(db), value) + format!("{} = {value}", def.display(db)) } else { def.display(db).to_string() }; @@ -518,9 +605,9 @@ fn local(db: &RootDatabase, it: hir::Local) -> Option { } else { "" }; - format!("{}{}{}: {}", let_kw, is_mut, name, ty) + format!("{let_kw}{is_mut}{name}: {ty}") } - Either::Right(_) => format!("{}self: {}", is_mut, ty), + Either::Right(_) => format!("{is_mut}self: {ty}"), }; markup(None, desc, None) } diff --git a/crates/ide/src/hover/tests.rs b/crates/ide/src/hover/tests.rs index eb997e6fef..c7f241f2fe 100644 --- a/crates/ide/src/hover/tests.rs +++ b/crates/ide/src/hover/tests.rs @@ -37,7 +37,7 @@ fn check(ra_fixture: &str, expect: Expect) { let content = analysis.db.file_text(position.file_id); let hovered_element = &content[hover.range]; - let actual = format!("*{}*\n{}\n", hovered_element, hover.info.markup); + let actual = format!("*{hovered_element}*\n{}\n", hover.info.markup); expect.assert_eq(&actual) } @@ -58,7 +58,7 @@ fn check_hover_no_links(ra_fixture: &str, expect: Expect) { let content = analysis.db.file_text(position.file_id); let hovered_element = &content[hover.range]; - let actual = format!("*{}*\n{}\n", hovered_element, hover.info.markup); + let actual = format!("*{hovered_element}*\n{}\n", hover.info.markup); expect.assert_eq(&actual) } @@ -79,7 +79,7 @@ fn check_hover_no_markdown(ra_fixture: &str, expect: Expect) { let content = analysis.db.file_text(position.file_id); let hovered_element = &content[hover.range]; - let actual = format!("*{}*\n{}\n", hovered_element, hover.info.markup); + let actual = format!("*{hovered_element}*\n{}\n", hover.info.markup); expect.assert_eq(&actual) } @@ -522,6 +522,27 @@ fn main() { } ); } +#[test] +fn hover_field_offset() { + // Hovering over the field when instantiating + check( + r#" +struct Foo { fiel$0d_a: u8, field_b: i32, field_c: i16 } +"#, + expect![[r#" + *field_a* + + ```rust + test::Foo + ``` + + ```rust + field_a: u8 // size = 1, align = 1, offset = 4 + ``` + "#]], + ); +} + #[test] fn hover_shows_struct_field_info() { // Hovering over the field when instantiating @@ -534,16 +555,16 @@ fn main() { } "#, expect![[r#" - *field_a* + *field_a* - ```rust - test::Foo - ``` + ```rust + test::Foo + ``` - ```rust - field_a: u32 - ``` - "#]], + ```rust + field_a: u32 // size = 4, align = 4, offset = 0 + ``` + "#]], ); // Hovering over the field in the definition @@ -556,16 +577,16 @@ fn main() { } "#, expect![[r#" - *field_a* + *field_a* - ```rust - test::Foo - ``` + ```rust + test::Foo + ``` - ```rust - field_a: u32 - ``` - "#]], + ```rust + field_a: u32 // size = 4, align = 4, offset = 0 + ``` + "#]], ); } @@ -1508,30 +1529,30 @@ struct Bar; fn foo() { let bar = Ba$0r; } "#, - expect![[r##" - *Bar* + expect![[r#" + *Bar* - ```rust - test - ``` + ```rust + test + ``` - ```rust - struct Bar - ``` + ```rust + struct Bar // size = 0, align = 1 + ``` - --- + --- - This is an example - multiline doc + This is an example + multiline doc - # Example + # Example - ``` - let five = 5; + ``` + let five = 5; - assert_eq!(6, my_crate::add_one(5)); - ``` - "##]], + assert_eq!(6, my_crate::add_one(5)); + ``` + "#]], ); } @@ -1545,20 +1566,20 @@ struct Bar; fn foo() { let bar = Ba$0r; } "#, expect![[r#" - *Bar* + *Bar* - ```rust - test - ``` + ```rust + test + ``` - ```rust - struct Bar - ``` + ```rust + struct Bar // size = 0, align = 1 + ``` - --- + --- - bar docs - "#]], + bar docs + "#]], ); } @@ -1574,22 +1595,22 @@ struct Bar; fn foo() { let bar = Ba$0r; } "#, expect![[r#" - *Bar* + *Bar* - ```rust - test - ``` + ```rust + test + ``` - ```rust - struct Bar - ``` + ```rust + struct Bar // size = 0, align = 1 + ``` - --- + --- - bar docs 0 - bar docs 1 - bar docs 2 - "#]], + bar docs 0 + bar docs 1 + bar docs 2 + "#]], ); } @@ -1602,20 +1623,20 @@ pub struct Foo; pub struct B$0ar "#, expect![[r#" - *Bar* + *Bar* - ```rust - test - ``` + ```rust + test + ``` - ```rust - pub struct Bar - ``` + ```rust + pub struct Bar // size = 0, align = 1 + ``` - --- + --- - [external](https://www.google.com) - "#]], + [external](https://www.google.com) + "#]], ); } @@ -1629,20 +1650,20 @@ pub struct Foo; pub struct B$0ar "#, expect![[r#" - *Bar* + *Bar* - ```rust - test - ``` + ```rust + test + ``` - ```rust - pub struct Bar - ``` + ```rust + pub struct Bar // size = 0, align = 1 + ``` - --- + --- - [baz](Baz) - "#]], + [baz](Baz) + "#]], ); } @@ -2960,7 +2981,7 @@ fn main() { ``` ```rust - f: i32 + f: i32 // size = 4, align = 4, offset = 0 ``` "#]], ); @@ -3636,6 +3657,163 @@ enum E { #[test] fn hover_const_eval() { + check( + r#" +trait T { + const B: bool = false; +} +impl T for <()> { + /// true + const B: bool = true; +} +fn main() { + <()>::B$0; +} +"#, + expect![[r#" + *B* + + ```rust + test + ``` + + ```rust + const B: bool = true + ``` + + --- + + true + "#]], + ); + + check( + r#" +struct A { + i: i32 +}; + +trait T { + const AA: A = A { + i: 1 + }; +} +impl T for i32 { + const AA: A = A { + i: 2 + } +} +fn main() { + ::AA$0; +} +"#, + expect![[r#" + *AA* + + ```rust + test + ``` + + ```rust + const AA: A = A { + i: 2 + } + ``` + "#]], + ); + + check( + r#" +trait T { + /// false + const B: bool = false; +} +impl T for () { + /// true + const B: bool = true; +} +fn main() { + T::B$0; +} +"#, + expect![[r#" + *B* + + ```rust + test + ``` + + ```rust + const B: bool = false + ``` + + --- + + false + "#]], + ); + + check( + r#" +trait T { + /// false + const B: bool = false; +} +impl T for () { +} +fn main() { + <()>::B$0; +} +"#, + expect![[r#" + *B* + + ```rust + test + ``` + + ```rust + const B: bool = false + ``` + + --- + + false + "#]], + ); + + check( + r#" +trait T { + /// false + const B: bool = false; +} +impl T for () { + /// true + const B: bool = true; +} +impl T for i32 {} +fn main() { + ::B$0; +} +"#, + expect![[r#" + *B* + + ```rust + test + ``` + + ```rust + const B: bool = false + ``` + + --- + + false + "#]], + ); + // show hex for <10 check( r#" @@ -3901,6 +4079,37 @@ const FOO$0: f64 = 1.0f64; ); } +#[test] +fn hover_const_eval_in_generic_trait() { + // Doesn't compile, but we shouldn't crash. + check( + r#" +trait Trait { + const FOO: bool = false; +} +struct S(T); +impl Trait for S { + const FOO: bool = true; +} + +fn test() { + S::FOO$0; +} +"#, + expect![[r#" + *FOO* + + ```rust + test + ``` + + ```rust + const FOO: bool = true + ``` + "#]], + ); +} + #[test] fn hover_const_pat() { check( @@ -4203,20 +4412,20 @@ pub fn gimme() -> theitem::TheItem { } "#, expect![[r#" - *[`TheItem`]* + *[`TheItem`]* - ```rust - test::theitem - ``` + ```rust + test::theitem + ``` - ```rust - pub struct TheItem - ``` + ```rust + pub struct TheItem // size = 0, align = 1 + ``` - --- + --- - This is the item. Cool! - "#]], + This is the item. Cool! + "#]], ); } @@ -4351,20 +4560,20 @@ mod string { } "#, expect![[r#" - *String* + *String* - ```rust - main - ``` + ```rust + main + ``` - ```rust - struct String - ``` + ```rust + struct String // size = 0, align = 1 + ``` - --- + --- - Custom `String` type. - "#]], + Custom `String` type. + "#]], ) } @@ -5025,7 +5234,7 @@ foo_macro!( ``` ```rust - pub struct Foo + pub struct Foo // size = 0, align = 1 ``` --- @@ -5040,7 +5249,7 @@ fn hover_intra_in_attr() { check( r#" #[doc = "Doc comment for [`Foo$0`]"] -pub struct Foo; +pub struct Foo(i32); "#, expect![[r#" *[`Foo`]* @@ -5050,7 +5259,7 @@ pub struct Foo; ``` ```rust - pub struct Foo + pub struct Foo // size = 4, align = 4 ``` --- @@ -5155,6 +5364,28 @@ enum Enum { ); } +#[test] +fn hover_record_variant_field() { + check( + r#" +enum Enum { + RecordV { field$0: u32 } +} +"#, + expect![[r#" + *field* + + ```rust + test::RecordV + ``` + + ```rust + field: u32 // size = 4, align = 4 + ``` + "#]], + ); +} + #[test] fn hover_trait_impl_assoc_item_def_doc_forwarding() { check( @@ -5307,3 +5538,38 @@ fn main() { $0V; } "#]], ); } + +#[test] +fn hover_rest_pat() { + check( + r#" +struct Struct {a: u32, b: u32, c: u8, d: u16}; + +fn main() { + let Struct {a, c, .$0.} = Struct {a: 1, b: 2, c: 3, d: 4}; +} +"#, + expect![[r#" + *..* + ```rust + .., b: u32, d: u16 + ``` + "#]], + ); + + check( + r#" +struct Struct {a: u32, b: u32, c: u8, d: u16}; + +fn main() { + let Struct {a, b, c, d, .$0.} = Struct {a: 1, b: 2, c: 3, d: 4}; +} +"#, + expect![[r#" + *..* + ```rust + .. + ``` + "#]], + ); +} diff --git a/crates/ide/src/inlay_hints.rs b/crates/ide/src/inlay_hints.rs index 37384c4e7e..48a7bbfecf 100644 --- a/crates/ide/src/inlay_hints.rs +++ b/crates/ide/src/inlay_hints.rs @@ -1,31 +1,42 @@ -use std::fmt; +use std::{ + fmt::{self, Write}, + mem::take, +}; use either::Either; -use hir::{ - known, Adjust, AutoBorrow, Callable, HasVisibility, HirDisplay, Mutability, OverloadedDeref, - PointerCast, Safety, Semantics, TypeInfo, -}; -use ide_db::{ - base_db::FileRange, famous_defs::FamousDefs, syntax_helpers::node_ext::walk_ty, FxHashMap, - RootDatabase, -}; +use hir::{known, HasVisibility, HirDisplay, HirWrite, ModuleDef, ModuleDefId, Semantics}; +use ide_db::{base_db::FileRange, famous_defs::FamousDefs, RootDatabase}; use itertools::Itertools; -use stdx::to_lower_snake_case; +use stdx::never; use syntax::{ - ast::{self, AstNode, HasArgList, HasGenericParams, HasName, UnaryOp}, - match_ast, Direction, NodeOrToken, SmolStr, SyntaxKind, SyntaxNode, SyntaxToken, TextRange, - TextSize, T, + ast::{self, AstNode}, + match_ast, NodeOrToken, SyntaxNode, TextRange, TextSize, }; -use crate::FileId; +use crate::{navigation_target::TryToNav, FileId}; + +mod closing_brace; +mod implicit_static; +mod fn_lifetime_fn; +mod closure_ret; +mod adjustment; +mod chaining; +mod param_name; +mod binding_mode; +mod bind_pat; +mod discriminant; #[derive(Clone, Debug, PartialEq, Eq)] pub struct InlayHintsConfig { + pub location_links: bool, pub render_colons: bool, pub type_hints: bool, + pub discriminant_hints: DiscriminantHints, pub parameter_hints: bool, pub chaining_hints: bool, pub adjustment_hints: AdjustmentHints, + pub adjustment_hints_mode: AdjustmentHintsMode, + pub adjustment_hints_hide_outside_unsafe: bool, pub closure_return_type_hints: ClosureReturnTypeHints, pub binding_mode_hints: bool, pub lifetime_elision_hints: LifetimeElisionHints, @@ -43,6 +54,13 @@ pub enum ClosureReturnTypeHints { Never, } +#[derive(Clone, Debug, PartialEq, Eq)] +pub enum DiscriminantHints { + Always, + Never, + Fieldless, +} + #[derive(Clone, Debug, PartialEq, Eq)] pub enum LifetimeElisionHints { Always, @@ -57,6 +75,14 @@ pub enum AdjustmentHints { Never, } +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub enum AdjustmentHintsMode { + Prefix, + Postfix, + PreferPrefix, + PreferPostfix, +} + #[derive(Clone, Debug, PartialEq, Eq)] pub enum InlayKind { BindingModeHint, @@ -65,10 +91,13 @@ pub enum InlayKind { ClosureReturnTypeHint, GenericParamListHint, AdjustmentHint, - AdjustmentHintClosingParenthesis, + AdjustmentHintPostfix, LifetimeHint, ParameterHint, TypeHint, + DiscriminantHint, + OpeningParenthesis, + ClosingParenthesis, } #[derive(Debug)] @@ -86,6 +115,7 @@ pub enum InlayTooltip { HoverOffset(FileId, TextSize), } +#[derive(Default)] pub struct InlayHintLabel { pub parts: Vec, } @@ -169,6 +199,101 @@ impl fmt::Debug for InlayHintLabelPart { } } +#[derive(Debug)] +struct InlayHintLabelBuilder<'a> { + db: &'a RootDatabase, + result: InlayHintLabel, + last_part: String, + location_link_enabled: bool, + location: Option, +} + +impl fmt::Write for InlayHintLabelBuilder<'_> { + fn write_str(&mut self, s: &str) -> fmt::Result { + self.last_part.write_str(s) + } +} + +impl HirWrite for InlayHintLabelBuilder<'_> { + fn start_location_link(&mut self, def: ModuleDefId) { + if !self.location_link_enabled { + return; + } + if self.location.is_some() { + never!("location link is already started"); + } + self.make_new_part(); + let Some(location) = ModuleDef::from(def).try_to_nav(self.db) else { return }; + let location = + FileRange { file_id: location.file_id, range: location.focus_or_full_range() }; + self.location = Some(location); + } + + fn end_location_link(&mut self) { + if !self.location_link_enabled { + return; + } + self.make_new_part(); + } +} + +impl InlayHintLabelBuilder<'_> { + fn make_new_part(&mut self) { + self.result.parts.push(InlayHintLabelPart { + text: take(&mut self.last_part), + linked_location: self.location.take(), + }); + } + + fn finish(mut self) -> InlayHintLabel { + self.make_new_part(); + self.result + } +} + +fn label_of_ty( + famous_defs @ FamousDefs(sema, _): &FamousDefs<'_, '_>, + config: &InlayHintsConfig, + ty: hir::Type, +) -> Option { + fn rec( + sema: &Semantics<'_, RootDatabase>, + famous_defs: &FamousDefs<'_, '_>, + mut max_length: Option, + ty: hir::Type, + label_builder: &mut InlayHintLabelBuilder<'_>, + ) { + let iter_item_type = hint_iterator(sema, famous_defs, &ty); + match iter_item_type { + Some(ty) => { + const LABEL_START: &str = "impl Iterator { + let _ = ty.display_truncated(sema.db, max_length).write_to(label_builder); + } + }; + } + + let mut label_builder = InlayHintLabelBuilder { + db: sema.db, + last_part: String::new(), + location: None, + location_link_enabled: config.location_links, + result: InlayHintLabel::default(), + }; + rec(sema, famous_defs, config.max_length, ty, &mut label_builder); + let r = label_builder.finish(); + Some(r) +} + // Feature: Inlay Hints // // rust-analyzer shows additional information inline with the source code. @@ -200,7 +325,7 @@ pub(crate) fn inlay_hints( let mut acc = Vec::new(); - if let Some(scope) = sema.scope(&file) { + if let Some(scope) = sema.scope(file) { let famous_defs = FamousDefs(&sema, scope.krate()); let hints = |node| hints(&mut acc, &famous_defs, config, file_id, node); @@ -226,18 +351,18 @@ fn hints( file_id: FileId, node: SyntaxNode, ) { - closing_brace_hints(hints, sema, config, file_id, node.clone()); + closing_brace::hints(hints, sema, config, file_id, node.clone()); match_ast! { match node { ast::Expr(expr) => { - chaining_hints(hints, sema, &famous_defs, config, file_id, &expr); - adjustment_hints(hints, sema, config, &expr); + chaining::hints(hints, famous_defs, config, file_id, &expr); + adjustment::hints(hints, sema, config, &expr); match expr { - ast::Expr::CallExpr(it) => param_name_hints(hints, sema, config, ast::Expr::from(it)), + ast::Expr::CallExpr(it) => param_name::hints(hints, sema, config, ast::Expr::from(it)), ast::Expr::MethodCallExpr(it) => { - param_name_hints(hints, sema, config, ast::Expr::from(it)) + param_name::hints(hints, sema, config, ast::Expr::from(it)) } - ast::Expr::ClosureExpr(it) => closure_ret_hints(hints, sema, &famous_defs, config, file_id, it), + ast::Expr::ClosureExpr(it) => closure_ret::hints(hints, famous_defs, config, file_id, it), // We could show reborrows for all expressions, but usually that is just noise to the user // and the main point here is to show why "moving" a mutable reference doesn't necessarily move it // ast::Expr::PathExpr(_) => reborrow_hints(hints, sema, config, &expr), @@ -245,21 +370,24 @@ fn hints( } }, ast::Pat(it) => { - binding_mode_hints(hints, sema, config, &it); + binding_mode::hints(hints, sema, config, &it); if let ast::Pat::IdentPat(it) = it { - bind_pat_hints(hints, sema, config, file_id, &it); + bind_pat::hints(hints, famous_defs, config, file_id, &it); } Some(()) }, ast::Item(it) => match it { // FIXME: record impl lifetimes so they aren't being reused in assoc item lifetime inlay hints ast::Item::Impl(_) => None, - ast::Item::Fn(it) => fn_lifetime_fn_hints(hints, config, it), + ast::Item::Fn(it) => fn_lifetime_fn::hints(hints, config, it), // static type elisions - ast::Item::Static(it) => implicit_static_hints(hints, config, Either::Left(it)), - ast::Item::Const(it) => implicit_static_hints(hints, config, Either::Right(it)), + ast::Item::Static(it) => implicit_static::hints(hints, config, Either::Left(it)), + ast::Item::Const(it) => implicit_static::hints(hints, config, Either::Right(it)), _ => None, }, + ast::Variant(v) => { + discriminant::hints(hints, famous_defs, config, file_id, &v) + }, // FIXME: fn-ptr type, dyn fn type, and trait object type elisions ast::Type(_) => None, _ => None, @@ -267,733 +395,12 @@ fn hints( }; } -fn closing_brace_hints( - acc: &mut Vec, - sema: &Semantics<'_, RootDatabase>, - config: &InlayHintsConfig, - file_id: FileId, - node: SyntaxNode, -) -> Option<()> { - let min_lines = config.closing_brace_hints_min_lines?; - - let name = |it: ast::Name| it.syntax().text_range(); - - let mut closing_token; - let (label, name_range) = if let Some(item_list) = ast::AssocItemList::cast(node.clone()) { - closing_token = item_list.r_curly_token()?; - - let parent = item_list.syntax().parent()?; - match_ast! { - match parent { - ast::Impl(imp) => { - let imp = sema.to_def(&imp)?; - let ty = imp.self_ty(sema.db); - let trait_ = imp.trait_(sema.db); - let hint_text = match trait_ { - Some(tr) => format!("impl {} for {}", tr.name(sema.db), ty.display_truncated(sema.db, config.max_length)), - None => format!("impl {}", ty.display_truncated(sema.db, config.max_length)), - }; - (hint_text, None) - }, - ast::Trait(tr) => { - (format!("trait {}", tr.name()?), tr.name().map(name)) - }, - _ => return None, - } - } - } else if let Some(list) = ast::ItemList::cast(node.clone()) { - closing_token = list.r_curly_token()?; - - let module = ast::Module::cast(list.syntax().parent()?)?; - (format!("mod {}", module.name()?), module.name().map(name)) - } else if let Some(block) = ast::BlockExpr::cast(node.clone()) { - closing_token = block.stmt_list()?.r_curly_token()?; - - let parent = block.syntax().parent()?; - match_ast! { - match parent { - ast::Fn(it) => { - // FIXME: this could include parameters, but `HirDisplay` prints too much info - // and doesn't respect the max length either, so the hints end up way too long - (format!("fn {}", it.name()?), it.name().map(name)) - }, - ast::Static(it) => (format!("static {}", it.name()?), it.name().map(name)), - ast::Const(it) => { - if it.underscore_token().is_some() { - ("const _".into(), None) - } else { - (format!("const {}", it.name()?), it.name().map(name)) - } - }, - _ => return None, - } - } - } else if let Some(mac) = ast::MacroCall::cast(node.clone()) { - let last_token = mac.syntax().last_token()?; - if last_token.kind() != T![;] && last_token.kind() != SyntaxKind::R_CURLY { - return None; - } - closing_token = last_token; - - ( - format!("{}!", mac.path()?), - mac.path().and_then(|it| it.segment()).map(|it| it.syntax().text_range()), - ) - } else { - return None; - }; - - if let Some(mut next) = closing_token.next_token() { - if next.kind() == T![;] { - if let Some(tok) = next.next_token() { - closing_token = next; - next = tok; - } - } - if !(next.kind() == SyntaxKind::WHITESPACE && next.text().contains('\n')) { - // Only display the hint if the `}` is the last token on the line - return None; - } - } - - let mut lines = 1; - node.text().for_each_chunk(|s| lines += s.matches('\n').count()); - if lines < min_lines { - return None; - } - - let linked_location = name_range.map(|range| FileRange { file_id, range }); - acc.push(InlayHint { - range: closing_token.text_range(), - kind: InlayKind::ClosingBraceHint, - label: InlayHintLabel { parts: vec![InlayHintLabelPart { text: label, linked_location }] }, - tooltip: None, // provided by label part location - }); - - None -} - -fn implicit_static_hints( - acc: &mut Vec, - config: &InlayHintsConfig, - statik_or_const: Either, -) -> Option<()> { - if config.lifetime_elision_hints != LifetimeElisionHints::Always { - return None; - } - - if let Either::Right(it) = &statik_or_const { - if ast::AssocItemList::can_cast( - it.syntax().parent().map_or(SyntaxKind::EOF, |it| it.kind()), - ) { - return None; - } - } - - if let Some(ast::Type::RefType(ty)) = statik_or_const.either(|it| it.ty(), |it| it.ty()) { - if ty.lifetime().is_none() { - let t = ty.amp_token()?; - acc.push(InlayHint { - range: t.text_range(), - kind: InlayKind::LifetimeHint, - label: "'static".to_owned().into(), - tooltip: Some(InlayTooltip::String("Elided static lifetime".into())), - }); - } - } - - Some(()) -} - -fn fn_lifetime_fn_hints( - acc: &mut Vec, - config: &InlayHintsConfig, - func: ast::Fn, -) -> Option<()> { - if config.lifetime_elision_hints == LifetimeElisionHints::Never { - return None; - } - - let mk_lt_hint = |t: SyntaxToken, label: String| InlayHint { - range: t.text_range(), - kind: InlayKind::LifetimeHint, - label: label.into(), - tooltip: Some(InlayTooltip::String("Elided lifetime".into())), - }; - - let param_list = func.param_list()?; - let generic_param_list = func.generic_param_list(); - let ret_type = func.ret_type(); - let self_param = param_list.self_param().filter(|it| it.amp_token().is_some()); - - let is_elided = |lt: &Option| match lt { - Some(lt) => matches!(lt.text().as_str(), "'_"), - None => true, - }; - - let potential_lt_refs = { - let mut acc: Vec<_> = vec![]; - if let Some(self_param) = &self_param { - let lifetime = self_param.lifetime(); - let is_elided = is_elided(&lifetime); - acc.push((None, self_param.amp_token(), lifetime, is_elided)); - } - param_list.params().filter_map(|it| Some((it.pat(), it.ty()?))).for_each(|(pat, ty)| { - // FIXME: check path types - walk_ty(&ty, &mut |ty| match ty { - ast::Type::RefType(r) => { - let lifetime = r.lifetime(); - let is_elided = is_elided(&lifetime); - acc.push(( - pat.as_ref().and_then(|it| match it { - ast::Pat::IdentPat(p) => p.name(), - _ => None, - }), - r.amp_token(), - lifetime, - is_elided, - )) - } - _ => (), - }) - }); - acc - }; - - // allocate names - let mut gen_idx_name = { - let mut gen = (0u8..).map(|idx| match idx { - idx if idx < 10 => SmolStr::from_iter(['\'', (idx + 48) as char]), - idx => format!("'{idx}").into(), - }); - move || gen.next().unwrap_or_default() - }; - let mut allocated_lifetimes = vec![]; - - let mut used_names: FxHashMap = - match config.param_names_for_lifetime_elision_hints { - true => generic_param_list - .iter() - .flat_map(|gpl| gpl.lifetime_params()) - .filter_map(|param| param.lifetime()) - .filter_map(|lt| Some((SmolStr::from(lt.text().as_str().get(1..)?), 0))) - .collect(), - false => Default::default(), - }; - { - let mut potential_lt_refs = potential_lt_refs.iter().filter(|&&(.., is_elided)| is_elided); - if let Some(_) = &self_param { - if let Some(_) = potential_lt_refs.next() { - allocated_lifetimes.push(if config.param_names_for_lifetime_elision_hints { - // self can't be used as a lifetime, so no need to check for collisions - "'self".into() - } else { - gen_idx_name() - }); - } - } - potential_lt_refs.for_each(|(name, ..)| { - let name = match name { - Some(it) if config.param_names_for_lifetime_elision_hints => { - if let Some(c) = used_names.get_mut(it.text().as_str()) { - *c += 1; - SmolStr::from(format!("'{text}{c}", text = it.text().as_str())) - } else { - used_names.insert(it.text().as_str().into(), 0); - SmolStr::from_iter(["\'", it.text().as_str()]) - } - } - _ => gen_idx_name(), - }; - allocated_lifetimes.push(name); - }); - } - - // fetch output lifetime if elision rule applies - let output = match potential_lt_refs.as_slice() { - [(_, _, lifetime, _), ..] if self_param.is_some() || potential_lt_refs.len() == 1 => { - match lifetime { - Some(lt) => match lt.text().as_str() { - "'_" => allocated_lifetimes.get(0).cloned(), - "'static" => None, - name => Some(name.into()), - }, - None => allocated_lifetimes.get(0).cloned(), - } - } - [..] => None, - }; - - if allocated_lifetimes.is_empty() && output.is_none() { - return None; - } - - // apply hints - // apply output if required - let mut is_trivial = true; - if let (Some(output_lt), Some(r)) = (&output, ret_type) { - if let Some(ty) = r.ty() { - walk_ty(&ty, &mut |ty| match ty { - ast::Type::RefType(ty) if ty.lifetime().is_none() => { - if let Some(amp) = ty.amp_token() { - is_trivial = false; - acc.push(mk_lt_hint(amp, output_lt.to_string())); - } - } - _ => (), - }) - } - } - - if config.lifetime_elision_hints == LifetimeElisionHints::SkipTrivial && is_trivial { - return None; - } - - let mut a = allocated_lifetimes.iter(); - for (_, amp_token, _, is_elided) in potential_lt_refs { - if is_elided { - let t = amp_token?; - let lt = a.next()?; - acc.push(mk_lt_hint(t, lt.to_string())); - } - } - - // generate generic param list things - match (generic_param_list, allocated_lifetimes.as_slice()) { - (_, []) => (), - (Some(gpl), allocated_lifetimes) => { - let angle_tok = gpl.l_angle_token()?; - let is_empty = gpl.generic_params().next().is_none(); - acc.push(InlayHint { - range: angle_tok.text_range(), - kind: InlayKind::LifetimeHint, - label: format!( - "{}{}", - allocated_lifetimes.iter().format(", "), - if is_empty { "" } else { ", " } - ) - .into(), - tooltip: Some(InlayTooltip::String("Elided lifetimes".into())), - }); - } - (None, allocated_lifetimes) => acc.push(InlayHint { - range: func.name()?.syntax().text_range(), - kind: InlayKind::GenericParamListHint, - label: format!("<{}>", allocated_lifetimes.iter().format(", "),).into(), - tooltip: Some(InlayTooltip::String("Elided lifetimes".into())), - }), - } - Some(()) -} - -fn closure_ret_hints( - acc: &mut Vec, - sema: &Semantics<'_, RootDatabase>, - famous_defs: &FamousDefs<'_, '_>, - config: &InlayHintsConfig, - file_id: FileId, - closure: ast::ClosureExpr, -) -> Option<()> { - if config.closure_return_type_hints == ClosureReturnTypeHints::Never { - return None; - } - - if closure.ret_type().is_some() { - return None; - } - - if !closure_has_block_body(&closure) - && config.closure_return_type_hints == ClosureReturnTypeHints::WithBlock - { - return None; - } - - let param_list = closure.param_list()?; - - let closure = sema.descend_node_into_attributes(closure.clone()).pop()?; - let ty = sema.type_of_expr(&ast::Expr::ClosureExpr(closure))?.adjusted(); - let callable = ty.as_callable(sema.db)?; - let ty = callable.return_type(); - if ty.is_unit() { - return None; - } - acc.push(InlayHint { - range: param_list.syntax().text_range(), - kind: InlayKind::ClosureReturnTypeHint, - label: hint_iterator(sema, &famous_defs, config, &ty) - .unwrap_or_else(|| ty.display_truncated(sema.db, config.max_length).to_string()) - .into(), - tooltip: Some(InlayTooltip::HoverRanged(file_id, param_list.syntax().text_range())), - }); - Some(()) -} - -fn adjustment_hints( - acc: &mut Vec, - sema: &Semantics<'_, RootDatabase>, - config: &InlayHintsConfig, - expr: &ast::Expr, -) -> Option<()> { - if config.adjustment_hints == AdjustmentHints::Never { - return None; - } - - if let ast::Expr::ParenExpr(_) = expr { - // These inherit from the inner expression which would result in duplicate hints - return None; - } - - let parent = expr.syntax().parent().and_then(ast::Expr::cast); - let descended = sema.descend_node_into_attributes(expr.clone()).pop(); - let desc_expr = descended.as_ref().unwrap_or(expr); - let adjustments = sema.expr_adjustments(desc_expr).filter(|it| !it.is_empty())?; - let needs_parens = match parent { - Some(parent) => { - match parent { - ast::Expr::AwaitExpr(_) - | ast::Expr::CallExpr(_) - | ast::Expr::CastExpr(_) - | ast::Expr::FieldExpr(_) - | ast::Expr::MethodCallExpr(_) - | ast::Expr::TryExpr(_) => true, - // FIXME: shorthands need special casing, though not sure if adjustments are even valid there - ast::Expr::RecordExpr(_) => false, - ast::Expr::IndexExpr(index) => index.base().as_ref() == Some(expr), - _ => false, - } - } - None => false, - }; - if needs_parens { - acc.push(InlayHint { - range: expr.syntax().text_range(), - kind: InlayKind::AdjustmentHint, - label: "(".into(), - tooltip: None, - }); - } - for adjustment in adjustments.into_iter().rev() { - // FIXME: Add some nicer tooltips to each of these - let text = match adjustment { - Adjust::NeverToAny if config.adjustment_hints == AdjustmentHints::Always => { - "" - } - Adjust::Deref(None) => "*", - Adjust::Deref(Some(OverloadedDeref(Mutability::Mut))) => "*", - Adjust::Deref(Some(OverloadedDeref(Mutability::Shared))) => "*", - Adjust::Borrow(AutoBorrow::Ref(Mutability::Shared)) => "&", - Adjust::Borrow(AutoBorrow::Ref(Mutability::Mut)) => "&mut ", - Adjust::Borrow(AutoBorrow::RawPtr(Mutability::Shared)) => "&raw const ", - Adjust::Borrow(AutoBorrow::RawPtr(Mutability::Mut)) => "&raw mut ", - // some of these could be represented via `as` casts, but that's not too nice and - // handling everything as a prefix expr makes the `(` and `)` insertion easier - Adjust::Pointer(cast) if config.adjustment_hints == AdjustmentHints::Always => { - match cast { - PointerCast::ReifyFnPointer => "", - PointerCast::UnsafeFnPointer => "", - PointerCast::ClosureFnPointer(Safety::Unsafe) => { - "" - } - PointerCast::ClosureFnPointer(Safety::Safe) => "", - PointerCast::MutToConstPointer => "", - PointerCast::ArrayToPointer => "", - PointerCast::Unsize => "", - } - } - _ => continue, - }; - acc.push(InlayHint { - range: expr.syntax().text_range(), - kind: InlayKind::AdjustmentHint, - label: text.into(), - tooltip: None, - }); - } - if needs_parens { - acc.push(InlayHint { - range: expr.syntax().text_range(), - kind: InlayKind::AdjustmentHintClosingParenthesis, - label: ")".into(), - tooltip: None, - }); - } - Some(()) -} - -fn chaining_hints( - acc: &mut Vec, - sema: &Semantics<'_, RootDatabase>, - famous_defs: &FamousDefs<'_, '_>, - config: &InlayHintsConfig, - file_id: FileId, - expr: &ast::Expr, -) -> Option<()> { - if !config.chaining_hints { - return None; - } - - if matches!(expr, ast::Expr::RecordExpr(_)) { - return None; - } - - let descended = sema.descend_node_into_attributes(expr.clone()).pop(); - let desc_expr = descended.as_ref().unwrap_or(expr); - - let mut tokens = expr - .syntax() - .siblings_with_tokens(Direction::Next) - .filter_map(NodeOrToken::into_token) - .filter(|t| match t.kind() { - SyntaxKind::WHITESPACE if !t.text().contains('\n') => false, - SyntaxKind::COMMENT => false, - _ => true, - }); - - // Chaining can be defined as an expression whose next sibling tokens are newline and dot - // Ignoring extra whitespace and comments - let next = tokens.next()?.kind(); - if next == SyntaxKind::WHITESPACE { - let mut next_next = tokens.next()?.kind(); - while next_next == SyntaxKind::WHITESPACE { - next_next = tokens.next()?.kind(); - } - if next_next == T![.] { - let ty = sema.type_of_expr(desc_expr)?.original; - if ty.is_unknown() { - return None; - } - if matches!(expr, ast::Expr::PathExpr(_)) { - if let Some(hir::Adt::Struct(st)) = ty.as_adt() { - if st.fields(sema.db).is_empty() { - return None; - } - } - } - acc.push(InlayHint { - range: expr.syntax().text_range(), - kind: InlayKind::ChainingHint, - label: hint_iterator(sema, &famous_defs, config, &ty) - .unwrap_or_else(|| ty.display_truncated(sema.db, config.max_length).to_string()) - .into(), - tooltip: Some(InlayTooltip::HoverRanged(file_id, expr.syntax().text_range())), - }); - } - } - Some(()) -} - -fn param_name_hints( - acc: &mut Vec, - sema: &Semantics<'_, RootDatabase>, - config: &InlayHintsConfig, - expr: ast::Expr, -) -> Option<()> { - if !config.parameter_hints { - return None; - } - - let (callable, arg_list) = get_callable(sema, &expr)?; - let hints = callable - .params(sema.db) - .into_iter() - .zip(arg_list.args()) - .filter_map(|((param, _ty), arg)| { - // Only annotate hints for expressions that exist in the original file - let range = sema.original_range_opt(arg.syntax())?; - let (param_name, name_syntax) = match param.as_ref()? { - Either::Left(pat) => ("self".to_string(), pat.name()), - Either::Right(pat) => match pat { - ast::Pat::IdentPat(it) => (it.name()?.to_string(), it.name()), - _ => return None, - }, - }; - Some((name_syntax, param_name, arg, range)) - }) - .filter(|(_, param_name, arg, _)| { - !should_hide_param_name_hint(sema, &callable, param_name, arg) - }) - .map(|(param, param_name, _, FileRange { range, .. })| { - let mut tooltip = None; - if let Some(name) = param { - if let hir::CallableKind::Function(f) = callable.kind() { - // assert the file is cached so we can map out of macros - if let Some(_) = sema.source(f) { - tooltip = sema.original_range_opt(name.syntax()); - } - } - } - - InlayHint { - range, - kind: InlayKind::ParameterHint, - label: param_name.into(), - tooltip: tooltip.map(|it| InlayTooltip::HoverOffset(it.file_id, it.range.start())), - } - }); - - acc.extend(hints); - Some(()) -} - -fn binding_mode_hints( - acc: &mut Vec, - sema: &Semantics<'_, RootDatabase>, - config: &InlayHintsConfig, - pat: &ast::Pat, -) -> Option<()> { - if !config.binding_mode_hints { - return None; - } - - let range = pat.syntax().text_range(); - sema.pattern_adjustments(&pat).iter().for_each(|ty| { - let reference = ty.is_reference(); - let mut_reference = ty.is_mutable_reference(); - let r = match (reference, mut_reference) { - (true, true) => "&mut", - (true, false) => "&", - _ => return, - }; - acc.push(InlayHint { - range, - kind: InlayKind::BindingModeHint, - label: r.to_string().into(), - tooltip: Some(InlayTooltip::String("Inferred binding mode".into())), - }); - }); - match pat { - ast::Pat::IdentPat(pat) if pat.ref_token().is_none() && pat.mut_token().is_none() => { - let bm = sema.binding_mode_of_pat(pat)?; - let bm = match bm { - hir::BindingMode::Move => return None, - hir::BindingMode::Ref(Mutability::Mut) => "ref mut", - hir::BindingMode::Ref(Mutability::Shared) => "ref", - }; - acc.push(InlayHint { - range, - kind: InlayKind::BindingModeHint, - label: bm.to_string().into(), - tooltip: Some(InlayTooltip::String("Inferred binding mode".into())), - }); - } - _ => (), - } - - Some(()) -} - -fn bind_pat_hints( - acc: &mut Vec, - sema: &Semantics<'_, RootDatabase>, - config: &InlayHintsConfig, - file_id: FileId, - pat: &ast::IdentPat, -) -> Option<()> { - if !config.type_hints { - return None; - } - - let descended = sema.descend_node_into_attributes(pat.clone()).pop(); - let desc_pat = descended.as_ref().unwrap_or(pat); - let ty = sema.type_of_pat(&desc_pat.clone().into())?.original; - - if should_not_display_type_hint(sema, config, pat, &ty) { - return None; - } - - let krate = sema.scope(desc_pat.syntax())?.krate(); - let famous_defs = FamousDefs(sema, krate); - let label = hint_iterator(sema, &famous_defs, config, &ty); - - let label = match label { - Some(label) => label, - None => { - let ty_name = ty.display_truncated(sema.db, config.max_length).to_string(); - if config.hide_named_constructor_hints - && is_named_constructor(sema, pat, &ty_name).is_some() - { - return None; - } - ty_name - } - }; - - acc.push(InlayHint { - range: match pat.name() { - Some(name) => name.syntax().text_range(), - None => pat.syntax().text_range(), - }, - kind: InlayKind::TypeHint, - label: label.into(), - tooltip: pat - .name() - .map(|it| it.syntax().text_range()) - .map(|it| InlayTooltip::HoverRanged(file_id, it)), - }); - - Some(()) -} - -fn is_named_constructor( - sema: &Semantics<'_, RootDatabase>, - pat: &ast::IdentPat, - ty_name: &str, -) -> Option<()> { - let let_node = pat.syntax().parent()?; - let expr = match_ast! { - match let_node { - ast::LetStmt(it) => it.initializer(), - ast::LetExpr(it) => it.expr(), - _ => None, - } - }?; - - let expr = sema.descend_node_into_attributes(expr.clone()).pop().unwrap_or(expr); - // unwrap postfix expressions - let expr = match expr { - ast::Expr::TryExpr(it) => it.expr(), - ast::Expr::AwaitExpr(it) => it.expr(), - expr => Some(expr), - }?; - let expr = match expr { - ast::Expr::CallExpr(call) => match call.expr()? { - ast::Expr::PathExpr(path) => path, - _ => return None, - }, - ast::Expr::PathExpr(path) => path, - _ => return None, - }; - let path = expr.path()?; - - let callable = sema.type_of_expr(&ast::Expr::PathExpr(expr))?.original.as_callable(sema.db); - let callable_kind = callable.map(|it| it.kind()); - let qual_seg = match callable_kind { - Some(hir::CallableKind::Function(_) | hir::CallableKind::TupleEnumVariant(_)) => { - path.qualifier()?.segment() - } - _ => path.segment(), - }?; - - let ctor_name = match qual_seg.kind()? { - ast::PathSegmentKind::Name(name_ref) => { - match qual_seg.generic_arg_list().map(|it| it.generic_args()) { - Some(generics) => format!("{}<{}>", name_ref, generics.format(", ")), - None => name_ref.to_string(), - } - } - ast::PathSegmentKind::Type { type_ref: Some(ty), trait_ref: None } => ty.to_string(), - _ => return None, - }; - (ctor_name == ty_name).then(|| ()) -} - -/// Checks if the type is an Iterator from std::iter and replaces its hint with an `impl Iterator`. +/// Checks if the type is an Iterator from std::iter and returns its item type. fn hint_iterator( sema: &Semantics<'_, RootDatabase>, famous_defs: &FamousDefs<'_, '_>, - config: &InlayHintsConfig, ty: &hir::Type, -) -> Option { +) -> Option { let db = sema.db; let strukt = ty.strip_references().as_adt()?; let krate = strukt.module(db).krate(); @@ -1016,289 +423,32 @@ fn hint_iterator( _ => None, })?; if let Some(ty) = ty.normalize_trait_assoc_type(db, &[], assoc_type_item) { - const LABEL_START: &str = "impl Iterator bool { - if let Some(hir::Adt::Enum(enum_data)) = pat_ty.as_adt() { - let pat_text = bind_pat.to_string(); - enum_data - .variants(db) - .into_iter() - .map(|variant| variant.name(db).to_smol_str()) - .any(|enum_name| enum_name == pat_text) - } else { - false - } -} - -fn should_not_display_type_hint( - sema: &Semantics<'_, RootDatabase>, - config: &InlayHintsConfig, - bind_pat: &ast::IdentPat, - pat_ty: &hir::Type, -) -> bool { - let db = sema.db; - - if pat_ty.is_unknown() { - return true; - } - - if let Some(hir::Adt::Struct(s)) = pat_ty.as_adt() { - if s.fields(db).is_empty() && s.name(db).to_smol_str() == bind_pat.to_string() { - return true; - } - } - - if config.hide_closure_initialization_hints { - if let Some(parent) = bind_pat.syntax().parent() { - if let Some(it) = ast::LetStmt::cast(parent.clone()) { - if let Some(ast::Expr::ClosureExpr(closure)) = it.initializer() { - if closure_has_block_body(&closure) { - return true; - } - } - } - } - } - - for node in bind_pat.syntax().ancestors() { - match_ast! { - match node { - ast::LetStmt(it) => return it.ty().is_some(), - // FIXME: We might wanna show type hints in parameters for non-top level patterns as well - ast::Param(it) => return it.ty().is_some(), - ast::MatchArm(_) => return pat_is_enum_variant(db, bind_pat, pat_ty), - ast::LetExpr(_) => return pat_is_enum_variant(db, bind_pat, pat_ty), - ast::IfExpr(_) => return false, - ast::WhileExpr(_) => return false, - ast::ForExpr(it) => { - // We *should* display hint only if user provided "in {expr}" and we know the type of expr (and it's not unit). - // Type of expr should be iterable. - return it.in_token().is_none() || - it.iterable() - .and_then(|iterable_expr| sema.type_of_expr(&iterable_expr)) - .map(TypeInfo::original) - .map_or(true, |iterable_ty| iterable_ty.is_unknown() || iterable_ty.is_unit()) - }, - _ => (), - } - } - } - false -} - fn closure_has_block_body(closure: &ast::ClosureExpr) -> bool { matches!(closure.body(), Some(ast::Expr::BlockExpr(_))) } -fn should_hide_param_name_hint( - sema: &Semantics<'_, RootDatabase>, - callable: &hir::Callable, - param_name: &str, - argument: &ast::Expr, -) -> bool { - // These are to be tested in the `parameter_hint_heuristics` test - // hide when: - // - the parameter name is a suffix of the function's name - // - the argument is a qualified constructing or call expression where the qualifier is an ADT - // - exact argument<->parameter match(ignoring leading underscore) or parameter is a prefix/suffix - // of argument with _ splitting it off - // - param starts with `ra_fixture` - // - param is a well known name in a unary function - - let param_name = param_name.trim_start_matches('_'); - if param_name.is_empty() { - return true; - } - - if matches!(argument, ast::Expr::PrefixExpr(prefix) if prefix.op_kind() == Some(UnaryOp::Not)) { - return false; - } - - let fn_name = match callable.kind() { - hir::CallableKind::Function(it) => Some(it.name(sema.db).to_smol_str()), - _ => None, - }; - let fn_name = fn_name.as_deref(); - is_param_name_suffix_of_fn_name(param_name, callable, fn_name) - || is_argument_similar_to_param_name(argument, param_name) - || param_name.starts_with("ra_fixture") - || (callable.n_params() == 1 && is_obvious_param(param_name)) - || is_adt_constructor_similar_to_param_name(sema, argument, param_name) -} - -fn is_argument_similar_to_param_name(argument: &ast::Expr, param_name: &str) -> bool { - // check whether param_name and argument are the same or - // whether param_name is a prefix/suffix of argument(split at `_`) - let argument = match get_string_representation(argument) { - Some(argument) => argument, - None => return false, - }; - - // std is honestly too panic happy... - let str_split_at = |str: &str, at| str.is_char_boundary(at).then(|| argument.split_at(at)); - - let param_name = param_name.trim_start_matches('_'); - let argument = argument.trim_start_matches('_'); - - match str_split_at(argument, param_name.len()) { - Some((prefix, rest)) if prefix.eq_ignore_ascii_case(param_name) => { - return rest.is_empty() || rest.starts_with('_'); - } - _ => (), - } - match argument.len().checked_sub(param_name.len()).and_then(|at| str_split_at(argument, at)) { - Some((rest, suffix)) if param_name.eq_ignore_ascii_case(suffix) => { - return rest.is_empty() || rest.ends_with('_'); - } - _ => (), - } - false -} - -/// Hide the parameter name of a unary function if it is a `_` - prefixed suffix of the function's name, or equal. -/// -/// `fn strip_suffix(suffix)` will be hidden. -/// `fn stripsuffix(suffix)` will not be hidden. -fn is_param_name_suffix_of_fn_name( - param_name: &str, - callable: &Callable, - fn_name: Option<&str>, -) -> bool { - match (callable.n_params(), fn_name) { - (1, Some(function)) => { - function == param_name - || function - .len() - .checked_sub(param_name.len()) - .and_then(|at| function.is_char_boundary(at).then(|| function.split_at(at))) - .map_or(false, |(prefix, suffix)| { - suffix.eq_ignore_ascii_case(param_name) && prefix.ends_with('_') - }) - } - _ => false, - } -} - -fn is_adt_constructor_similar_to_param_name( - sema: &Semantics<'_, RootDatabase>, - argument: &ast::Expr, - param_name: &str, -) -> bool { - let path = match argument { - ast::Expr::CallExpr(c) => c.expr().and_then(|e| match e { - ast::Expr::PathExpr(p) => p.path(), - _ => None, - }), - ast::Expr::PathExpr(p) => p.path(), - ast::Expr::RecordExpr(r) => r.path(), - _ => return false, - }; - let path = match path { - Some(it) => it, - None => return false, - }; - (|| match sema.resolve_path(&path)? { - hir::PathResolution::Def(hir::ModuleDef::Adt(_)) => { - Some(to_lower_snake_case(&path.segment()?.name_ref()?.text()) == param_name) - } - hir::PathResolution::Def(hir::ModuleDef::Function(_) | hir::ModuleDef::Variant(_)) => { - if to_lower_snake_case(&path.segment()?.name_ref()?.text()) == param_name { - return Some(true); - } - let qual = path.qualifier()?; - match sema.resolve_path(&qual)? { - hir::PathResolution::Def(hir::ModuleDef::Adt(_)) => { - Some(to_lower_snake_case(&qual.segment()?.name_ref()?.text()) == param_name) - } - _ => None, - } - } - _ => None, - })() - .unwrap_or(false) -} - -fn get_string_representation(expr: &ast::Expr) -> Option { - match expr { - ast::Expr::MethodCallExpr(method_call_expr) => { - let name_ref = method_call_expr.name_ref()?; - match name_ref.text().as_str() { - "clone" | "as_ref" => method_call_expr.receiver().map(|rec| rec.to_string()), - name_ref => Some(name_ref.to_owned()), - } - } - ast::Expr::MacroExpr(macro_expr) => { - Some(macro_expr.macro_call()?.path()?.segment()?.to_string()) - } - ast::Expr::FieldExpr(field_expr) => Some(field_expr.name_ref()?.to_string()), - ast::Expr::PathExpr(path_expr) => Some(path_expr.path()?.segment()?.to_string()), - ast::Expr::PrefixExpr(prefix_expr) => get_string_representation(&prefix_expr.expr()?), - ast::Expr::RefExpr(ref_expr) => get_string_representation(&ref_expr.expr()?), - ast::Expr::CastExpr(cast_expr) => get_string_representation(&cast_expr.expr()?), - _ => None, - } -} - -fn is_obvious_param(param_name: &str) -> bool { - // avoid displaying hints for common functions like map, filter, etc. - // or other obvious words used in std - let is_obvious_param_name = - matches!(param_name, "predicate" | "value" | "pat" | "rhs" | "other"); - param_name.len() == 1 || is_obvious_param_name -} - -fn get_callable( - sema: &Semantics<'_, RootDatabase>, - expr: &ast::Expr, -) -> Option<(hir::Callable, ast::ArgList)> { - match expr { - ast::Expr::CallExpr(expr) => { - let descended = sema.descend_node_into_attributes(expr.clone()).pop(); - let expr = descended.as_ref().unwrap_or(expr); - sema.type_of_expr(&expr.expr()?)?.original.as_callable(sema.db).zip(expr.arg_list()) - } - ast::Expr::MethodCallExpr(expr) => { - let descended = sema.descend_node_into_attributes(expr.clone()).pop(); - let expr = descended.as_ref().unwrap_or(expr); - sema.resolve_method_call_as_callable(expr).zip(expr.arg_list()) - } - _ => None, - } -} - #[cfg(test)] mod tests { - use expect_test::{expect, Expect}; + use expect_test::Expect; use itertools::Itertools; - use syntax::{TextRange, TextSize}; use test_utils::extract_annotations; - use crate::inlay_hints::AdjustmentHints; + use crate::inlay_hints::{AdjustmentHints, AdjustmentHintsMode}; + use crate::DiscriminantHints; use crate::{fixture, inlay_hints::InlayHintsConfig, LifetimeElisionHints}; use super::ClosureReturnTypeHints; - const DISABLED_CONFIG: InlayHintsConfig = InlayHintsConfig { + pub(super) const DISABLED_CONFIG: InlayHintsConfig = InlayHintsConfig { + location_links: false, + discriminant_hints: DiscriminantHints::Never, render_colons: false, type_hints: false, parameter_hints: false, @@ -1306,6 +456,8 @@ mod tests { lifetime_elision_hints: LifetimeElisionHints::Never, closure_return_type_hints: ClosureReturnTypeHints::Never, adjustment_hints: AdjustmentHints::Never, + adjustment_hints_mode: AdjustmentHintsMode::Prefix, + adjustment_hints_hide_outside_unsafe: false, binding_mode_hints: false, hide_named_constructor_hints: false, hide_closure_initialization_hints: false, @@ -1313,43 +465,27 @@ mod tests { max_length: None, closing_brace_hints_min_lines: None, }; - const TEST_CONFIG: InlayHintsConfig = InlayHintsConfig { + pub(super) const DISABLED_CONFIG_WITH_LINKS: InlayHintsConfig = + InlayHintsConfig { location_links: true, ..DISABLED_CONFIG }; + pub(super) const TEST_CONFIG: InlayHintsConfig = InlayHintsConfig { type_hints: true, parameter_hints: true, chaining_hints: true, closure_return_type_hints: ClosureReturnTypeHints::WithBlock, binding_mode_hints: true, lifetime_elision_hints: LifetimeElisionHints::Always, - ..DISABLED_CONFIG + ..DISABLED_CONFIG_WITH_LINKS }; #[track_caller] - fn check(ra_fixture: &str) { + pub(super) fn check(ra_fixture: &str) { check_with_config(TEST_CONFIG, ra_fixture); } #[track_caller] - fn check_params(ra_fixture: &str) { - check_with_config( - InlayHintsConfig { parameter_hints: true, ..DISABLED_CONFIG }, - ra_fixture, - ); - } - - #[track_caller] - fn check_types(ra_fixture: &str) { - check_with_config(InlayHintsConfig { type_hints: true, ..DISABLED_CONFIG }, ra_fixture); - } - - #[track_caller] - fn check_chains(ra_fixture: &str) { - check_with_config(InlayHintsConfig { chaining_hints: true, ..DISABLED_CONFIG }, ra_fixture); - } - - #[track_caller] - fn check_with_config(config: InlayHintsConfig, ra_fixture: &str) { + pub(super) fn check_with_config(config: InlayHintsConfig, ra_fixture: &str) { let (analysis, file_id) = fixture::file(ra_fixture); - let mut expected = extract_annotations(&*analysis.file_text(file_id).unwrap()); + let mut expected = extract_annotations(&analysis.file_text(file_id).unwrap()); let inlay_hints = analysis.inlay_hints(&config, file_id, None).unwrap(); let actual = inlay_hints .into_iter() @@ -1358,11 +494,11 @@ mod tests { .collect::>(); expected.sort_by_key(|(range, _)| range.start()); - assert_eq!(expected, actual, "\nExpected:\n{:#?}\n\nActual:\n{:#?}", expected, actual); + assert_eq!(expected, actual, "\nExpected:\n{expected:#?}\n\nActual:\n{actual:#?}"); } #[track_caller] - fn check_expect(config: InlayHintsConfig, ra_fixture: &str, expect: Expect) { + pub(super) fn check_expect(config: InlayHintsConfig, ra_fixture: &str, expect: Expect) { let (analysis, file_id) = fixture::file(ra_fixture); let inlay_hints = analysis.inlay_hints(&config, file_id, None).unwrap(); expect.assert_debug_eq(&inlay_hints) @@ -1379,1720 +515,4 @@ fn main() { }"#, ); } - - // Parameter hint tests - - #[test] - fn param_hints_only() { - check_params( - r#" -fn foo(a: i32, b: i32) -> i32 { a + b } -fn main() { - let _x = foo( - 4, - //^ a - 4, - //^ b - ); -}"#, - ); - } - - #[test] - fn param_hints_on_closure() { - check_params( - r#" -fn main() { - let clo = |a: u8, b: u8| a + b; - clo( - 1, - //^ a - 2, - //^ b - ); -} - "#, - ); - } - - #[test] - fn param_name_similar_to_fn_name_still_hints() { - check_params( - r#" -fn max(x: i32, y: i32) -> i32 { x + y } -fn main() { - let _x = max( - 4, - //^ x - 4, - //^ y - ); -}"#, - ); - } - - #[test] - fn param_name_similar_to_fn_name() { - check_params( - r#" -fn param_with_underscore(with_underscore: i32) -> i32 { with_underscore } -fn main() { - let _x = param_with_underscore( - 4, - ); -}"#, - ); - check_params( - r#" -fn param_with_underscore(underscore: i32) -> i32 { underscore } -fn main() { - let _x = param_with_underscore( - 4, - ); -}"#, - ); - } - - #[test] - fn param_name_same_as_fn_name() { - check_params( - r#" -fn foo(foo: i32) -> i32 { foo } -fn main() { - let _x = foo( - 4, - ); -}"#, - ); - } - - #[test] - fn never_hide_param_when_multiple_params() { - check_params( - r#" -fn foo(foo: i32, bar: i32) -> i32 { bar + baz } -fn main() { - let _x = foo( - 4, - //^ foo - 8, - //^ bar - ); -}"#, - ); - } - - #[test] - fn param_hints_look_through_as_ref_and_clone() { - check_params( - r#" -fn foo(bar: i32, baz: f32) {} - -fn main() { - let bar = 3; - let baz = &"baz"; - let fez = 1.0; - foo(bar.clone(), bar.clone()); - //^^^^^^^^^^^ baz - foo(bar.as_ref(), bar.as_ref()); - //^^^^^^^^^^^^ baz -} -"#, - ); - } - - #[test] - fn self_param_hints() { - check_params( - r#" -struct Foo; - -impl Foo { - fn foo(self: Self) {} - fn bar(self: &Self) {} -} - -fn main() { - Foo::foo(Foo); - //^^^ self - Foo::bar(&Foo); - //^^^^ self -} -"#, - ) - } - - #[test] - fn param_name_hints_show_for_literals() { - check_params( - r#"pub fn test(a: i32, b: i32) -> [i32; 2] { [a, b] } -fn main() { - test( - 0xa_b, - //^^^^^ a - 0xa_b, - //^^^^^ b - ); -}"#, - ) - } - - #[test] - fn function_call_parameter_hint() { - check_params( - r#" -//- minicore: option -struct FileId {} -struct SmolStr {} - -struct TextRange {} -struct SyntaxKind {} -struct NavigationTarget {} - -struct Test {} - -impl Test { - fn method(&self, mut param: i32) -> i32 { param * 2 } - - fn from_syntax( - file_id: FileId, - name: SmolStr, - focus_range: Option, - full_range: TextRange, - kind: SyntaxKind, - docs: Option, - ) -> NavigationTarget { - NavigationTarget {} - } -} - -fn test_func(mut foo: i32, bar: i32, msg: &str, _: i32, last: i32) -> i32 { - foo + bar -} - -fn main() { - let not_literal = 1; - let _: i32 = test_func(1, 2, "hello", 3, not_literal); - //^ foo ^ bar ^^^^^^^ msg ^^^^^^^^^^^ last - let t: Test = Test {}; - t.method(123); - //^^^ param - Test::method(&t, 3456); - //^^ self ^^^^ param - Test::from_syntax( - FileId {}, - "impl".into(), - //^^^^^^^^^^^^^ name - None, - //^^^^ focus_range - TextRange {}, - //^^^^^^^^^^^^ full_range - SyntaxKind {}, - //^^^^^^^^^^^^^ kind - None, - //^^^^ docs - ); -}"#, - ); - } - - #[test] - fn parameter_hint_heuristics() { - check_params( - r#" -fn check(ra_fixture_thing: &str) {} - -fn map(f: i32) {} -fn filter(predicate: i32) {} - -fn strip_suffix(suffix: &str) {} -fn stripsuffix(suffix: &str) {} -fn same(same: u32) {} -fn same2(_same2: u32) {} - -fn enum_matches_param_name(completion_kind: CompletionKind) {} - -fn foo(param: u32) {} -fn bar(param_eter: u32) {} - -enum CompletionKind { - Keyword, -} - -fn non_ident_pat((a, b): (u32, u32)) {} - -fn main() { - const PARAM: u32 = 0; - foo(PARAM); - foo(!PARAM); - // ^^^^^^ param - check(""); - - map(0); - filter(0); - - strip_suffix(""); - stripsuffix(""); - //^^ suffix - same(0); - same2(0); - - enum_matches_param_name(CompletionKind::Keyword); - - let param = 0; - foo(param); - foo(param as _); - let param_end = 0; - foo(param_end); - let start_param = 0; - foo(start_param); - let param2 = 0; - foo(param2); - //^^^^^^ param - - macro_rules! param { - () => {}; - }; - foo(param!()); - - let param_eter = 0; - bar(param_eter); - let param_eter_end = 0; - bar(param_eter_end); - let start_param_eter = 0; - bar(start_param_eter); - let param_eter2 = 0; - bar(param_eter2); - //^^^^^^^^^^^ param_eter - - non_ident_pat((0, 0)); -}"#, - ); - } - - // Type-Hint tests - - #[test] - fn type_hints_only() { - check_types( - r#" -fn foo(a: i32, b: i32) -> i32 { a + b } -fn main() { - let _x = foo(4, 4); - //^^ i32 -}"#, - ); - } - - #[test] - fn type_hints_bindings_after_at() { - check_types( - r#" -//- minicore: option -fn main() { - let ref foo @ bar @ ref mut baz = 0; - //^^^ &i32 - //^^^ i32 - //^^^ &mut i32 - let [x @ ..] = [0]; - //^ [i32; 1] - if let x @ Some(_) = Some(0) {} - //^ Option - let foo @ (bar, baz) = (3, 3); - //^^^ (i32, i32) - //^^^ i32 - //^^^ i32 -}"#, - ); - } - - #[test] - fn default_generic_types_should_not_be_displayed() { - check( - r#" -struct Test { k: K, t: T } - -fn main() { - let zz = Test { t: 23u8, k: 33 }; - //^^ Test - let zz_ref = &zz; - //^^^^^^ &Test - let test = || zz; - //^^^^ || -> Test -}"#, - ); - } - - #[test] - fn shorten_iterators_in_associated_params() { - check_types( - r#" -//- minicore: iterators -use core::iter; - -pub struct SomeIter {} - -impl SomeIter { - pub fn new() -> Self { SomeIter {} } - pub fn push(&mut self, t: T) {} -} - -impl Iterator for SomeIter { - type Item = T; - fn next(&mut self) -> Option { - None - } -} - -fn main() { - let mut some_iter = SomeIter::new(); - //^^^^^^^^^ SomeIter>> - some_iter.push(iter::repeat(2).take(2)); - let iter_of_iters = some_iter.take(2); - //^^^^^^^^^^^^^ impl Iterator> -} -"#, - ); - } - - #[test] - fn iterator_hint_regression_issue_12674() { - // Ensure we don't crash while solving the projection type of iterators. - check_expect( - InlayHintsConfig { chaining_hints: true, ..DISABLED_CONFIG }, - r#" -//- minicore: iterators -struct S(T); -impl S { - fn iter(&self) -> Iter<'_, T> { loop {} } -} -struct Iter<'a, T: 'a>(&'a T); -impl<'a, T> Iterator for Iter<'a, T> { - type Item = &'a T; - fn next(&mut self) -> Option { loop {} } -} -struct Container<'a> { - elements: S<&'a str>, -} -struct SliceIter<'a, T>(&'a T); -impl<'a, T> Iterator for SliceIter<'a, T> { - type Item = &'a T; - fn next(&mut self) -> Option { loop {} } -} - -fn main(a: SliceIter<'_, Container>) { - a - .filter_map(|c| Some(c.elements.iter().filter_map(|v| Some(v)))) - .map(|e| e); -} - "#, - expect![[r#" - [ - InlayHint { - range: 484..554, - kind: ChainingHint, - label: [ - "impl Iterator>", - ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 484..554, - ), - ), - }, - InlayHint { - range: 484..485, - kind: ChainingHint, - label: [ - "SliceIter", - ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 484..485, - ), - ), - }, - ] - "#]], - ); - } - - #[test] - fn infer_call_method_return_associated_types_with_generic() { - check_types( - r#" - pub trait Default { - fn default() -> Self; - } - pub trait Foo { - type Bar: Default; - } - - pub fn quux() -> T::Bar { - let y = Default::default(); - //^ ::Bar - - y - } - "#, - ); - } - - #[test] - fn fn_hints() { - check_types( - r#" -//- minicore: fn, sized -fn foo() -> impl Fn() { loop {} } -fn foo1() -> impl Fn(f64) { loop {} } -fn foo2() -> impl Fn(f64, f64) { loop {} } -fn foo3() -> impl Fn(f64, f64) -> u32 { loop {} } -fn foo4() -> &'static dyn Fn(f64, f64) -> u32 { loop {} } -fn foo5() -> &'static dyn Fn(&'static dyn Fn(f64, f64) -> u32, f64) -> u32 { loop {} } -fn foo6() -> impl Fn(f64, f64) -> u32 + Sized { loop {} } -fn foo7() -> *const (impl Fn(f64, f64) -> u32 + Sized) { loop {} } - -fn main() { - let foo = foo(); - // ^^^ impl Fn() - let foo = foo1(); - // ^^^ impl Fn(f64) - let foo = foo2(); - // ^^^ impl Fn(f64, f64) - let foo = foo3(); - // ^^^ impl Fn(f64, f64) -> u32 - let foo = foo4(); - // ^^^ &dyn Fn(f64, f64) -> u32 - let foo = foo5(); - // ^^^ &dyn Fn(&dyn Fn(f64, f64) -> u32, f64) -> u32 - let foo = foo6(); - // ^^^ impl Fn(f64, f64) -> u32 - let foo = foo7(); - // ^^^ *const impl Fn(f64, f64) -> u32 -} -"#, - ) - } - - #[test] - fn check_hint_range_limit() { - let fixture = r#" - //- minicore: fn, sized - fn foo() -> impl Fn() { loop {} } - fn foo1() -> impl Fn(f64) { loop {} } - fn foo2() -> impl Fn(f64, f64) { loop {} } - fn foo3() -> impl Fn(f64, f64) -> u32 { loop {} } - fn foo4() -> &'static dyn Fn(f64, f64) -> u32 { loop {} } - fn foo5() -> &'static dyn Fn(&'static dyn Fn(f64, f64) -> u32, f64) -> u32 { loop {} } - fn foo6() -> impl Fn(f64, f64) -> u32 + Sized { loop {} } - fn foo7() -> *const (impl Fn(f64, f64) -> u32 + Sized) { loop {} } - - fn main() { - let foo = foo(); - let foo = foo1(); - let foo = foo2(); - // ^^^ impl Fn(f64, f64) - let foo = foo3(); - // ^^^ impl Fn(f64, f64) -> u32 - let foo = foo4(); - let foo = foo5(); - let foo = foo6(); - let foo = foo7(); - } - "#; - let (analysis, file_id) = fixture::file(fixture); - let expected = extract_annotations(&*analysis.file_text(file_id).unwrap()); - let inlay_hints = analysis - .inlay_hints( - &InlayHintsConfig { type_hints: true, ..DISABLED_CONFIG }, - file_id, - Some(TextRange::new(TextSize::from(500), TextSize::from(600))), - ) - .unwrap(); - let actual = - inlay_hints.into_iter().map(|it| (it.range, it.label.to_string())).collect::>(); - assert_eq!(expected, actual, "\nExpected:\n{:#?}\n\nActual:\n{:#?}", expected, actual); - } - - #[test] - fn fn_hints_ptr_rpit_fn_parentheses() { - check_types( - r#" -//- minicore: fn, sized -trait Trait {} - -fn foo1() -> *const impl Fn() { loop {} } -fn foo2() -> *const (impl Fn() + Sized) { loop {} } -fn foo3() -> *const (impl Fn() + ?Sized) { loop {} } -fn foo4() -> *const (impl Sized + Fn()) { loop {} } -fn foo5() -> *const (impl ?Sized + Fn()) { loop {} } -fn foo6() -> *const (impl Fn() + Trait) { loop {} } -fn foo7() -> *const (impl Fn() + Sized + Trait) { loop {} } -fn foo8() -> *const (impl Fn() + ?Sized + Trait) { loop {} } -fn foo9() -> *const (impl Fn() -> u8 + ?Sized) { loop {} } -fn foo10() -> *const (impl Fn() + Sized + ?Sized) { loop {} } - -fn main() { - let foo = foo1(); - // ^^^ *const impl Fn() - let foo = foo2(); - // ^^^ *const impl Fn() - let foo = foo3(); - // ^^^ *const (impl Fn() + ?Sized) - let foo = foo4(); - // ^^^ *const impl Fn() - let foo = foo5(); - // ^^^ *const (impl Fn() + ?Sized) - let foo = foo6(); - // ^^^ *const (impl Fn() + Trait) - let foo = foo7(); - // ^^^ *const (impl Fn() + Trait) - let foo = foo8(); - // ^^^ *const (impl Fn() + Trait + ?Sized) - let foo = foo9(); - // ^^^ *const (impl Fn() -> u8 + ?Sized) - let foo = foo10(); - // ^^^ *const impl Fn() -} -"#, - ) - } - - #[test] - fn unit_structs_have_no_type_hints() { - check_types( - r#" -//- minicore: result -struct SyntheticSyntax; - -fn main() { - match Ok(()) { - Ok(_) => (), - Err(SyntheticSyntax) => (), - } -}"#, - ); - } - - #[test] - fn let_statement() { - check_types( - r#" -#[derive(PartialEq)] -enum Option { None, Some(T) } - -#[derive(PartialEq)] -struct Test { a: Option, b: u8 } - -fn main() { - struct InnerStruct {} - - let test = 54; - //^^^^ i32 - let test: i32 = 33; - let mut test = 33; - //^^^^ i32 - let _ = 22; - let test = "test"; - //^^^^ &str - let test = InnerStruct {}; - //^^^^ InnerStruct - - let test = unresolved(); - - let test = (42, 'a'); - //^^^^ (i32, char) - let (a, (b, (c,)) = (2, (3, (9.2,)); - //^ i32 ^ i32 ^ f64 - let &x = &92; - //^ i32 -}"#, - ); - } - - #[test] - fn if_expr() { - check_types( - r#" -//- minicore: option -struct Test { a: Option, b: u8 } - -fn main() { - let test = Some(Test { a: Some(3), b: 1 }); - //^^^^ Option - if let None = &test {}; - if let test = &test {}; - //^^^^ &Option - if let Some(test) = &test {}; - //^^^^ &Test - if let Some(Test { a, b }) = &test {}; - //^ &Option ^ &u8 - if let Some(Test { a: x, b: y }) = &test {}; - //^ &Option ^ &u8 - if let Some(Test { a: Some(x), b: y }) = &test {}; - //^ &u32 ^ &u8 - if let Some(Test { a: None, b: y }) = &test {}; - //^ &u8 - if let Some(Test { b: y, .. }) = &test {}; - //^ &u8 - if test == None {} -}"#, - ); - } - - #[test] - fn while_expr() { - check_types( - r#" -//- minicore: option -struct Test { a: Option, b: u8 } - -fn main() { - let test = Some(Test { a: Some(3), b: 1 }); - //^^^^ Option - while let Some(Test { a: Some(x), b: y }) = &test {}; - //^ &u32 ^ &u8 -}"#, - ); - } - - #[test] - fn match_arm_list() { - check_types( - r#" -//- minicore: option -struct Test { a: Option, b: u8 } - -fn main() { - match Some(Test { a: Some(3), b: 1 }) { - None => (), - test => (), - //^^^^ Option - Some(Test { a: Some(x), b: y }) => (), - //^ u32 ^ u8 - _ => {} - } -}"#, - ); - } - - #[test] - fn complete_for_hint() { - check_types( - r#" -//- minicore: iterator -pub struct Vec {} - -impl Vec { - pub fn new() -> Self { Vec {} } - pub fn push(&mut self, t: T) {} -} - -impl IntoIterator for Vec { - type Item = T; - type IntoIter = IntoIter; -} - -struct IntoIter {} - -impl Iterator for IntoIter { - type Item = T; -} - -fn main() { - let mut data = Vec::new(); - //^^^^ Vec<&str> - data.push("foo"); - for i in data { - //^ &str - let z = i; - //^ &str - } -} -"#, - ); - } - - #[test] - fn multi_dyn_trait_bounds() { - check_types( - r#" -pub struct Vec {} - -impl Vec { - pub fn new() -> Self { Vec {} } -} - -pub struct Box {} - -trait Display {} -auto trait Sync {} - -fn main() { - // The block expression wrapping disables the constructor hint hiding logic - let _v = { Vec::>::new() }; - //^^ Vec> - let _v = { Vec::>::new() }; - //^^ Vec> - let _v = { Vec::>::new() }; - //^^ Vec> -} -"#, - ); - } - - #[test] - fn shorten_iterator_hints() { - check_types( - r#" -//- minicore: iterators -use core::iter; - -struct MyIter; - -impl Iterator for MyIter { - type Item = (); - fn next(&mut self) -> Option { - None - } -} - -fn main() { - let _x = MyIter; - //^^ MyIter - let _x = iter::repeat(0); - //^^ impl Iterator - fn generic(t: T) { - let _x = iter::repeat(t); - //^^ impl Iterator - let _chained = iter::repeat(t).take(10); - //^^^^^^^^ impl Iterator - } -} -"#, - ); - } - - #[test] - fn skip_constructor_and_enum_type_hints() { - check_with_config( - InlayHintsConfig { - type_hints: true, - hide_named_constructor_hints: true, - ..DISABLED_CONFIG - }, - r#" -//- minicore: try, option -use core::ops::ControlFlow; - -mod x { - pub mod y { pub struct Foo; } - pub struct Foo; - pub enum AnotherEnum { - Variant() - }; -} -struct Struct; -struct TupleStruct(); - -impl Struct { - fn new() -> Self { - Struct - } - fn try_new() -> ControlFlow<(), Self> { - ControlFlow::Continue(Struct) - } -} - -struct Generic(T); -impl Generic { - fn new() -> Self { - Generic(0) - } -} - -enum Enum { - Variant(u32) -} - -fn times2(value: i32) -> i32 { - 2 * value -} - -fn main() { - let enumb = Enum::Variant(0); - - let strukt = x::Foo; - let strukt = x::y::Foo; - let strukt = Struct; - let strukt = Struct::new(); - - let tuple_struct = TupleStruct(); - - let generic0 = Generic::new(); - // ^^^^^^^^ Generic - let generic1 = Generic(0); - // ^^^^^^^^ Generic - let generic2 = Generic::::new(); - let generic3 = >::new(); - let generic4 = Generic::(0); - - - let option = Some(0); - // ^^^^^^ Option - let func = times2; - // ^^^^ fn times2(i32) -> i32 - let closure = |x: i32| x * 2; - // ^^^^^^^ |i32| -> i32 -} - -fn fallible() -> ControlFlow<()> { - let strukt = Struct::try_new()?; -} -"#, - ); - } - - #[test] - fn shows_constructor_type_hints_when_enabled() { - check_types( - r#" -//- minicore: try -use core::ops::ControlFlow; - -struct Struct; -struct TupleStruct(); - -impl Struct { - fn new() -> Self { - Struct - } - fn try_new() -> ControlFlow<(), Self> { - ControlFlow::Continue(Struct) - } -} - -struct Generic(T); -impl Generic { - fn new() -> Self { - Generic(0) - } -} - -fn main() { - let strukt = Struct::new(); - // ^^^^^^ Struct - let tuple_struct = TupleStruct(); - // ^^^^^^^^^^^^ TupleStruct - let generic0 = Generic::new(); - // ^^^^^^^^ Generic - let generic1 = Generic::::new(); - // ^^^^^^^^ Generic - let generic2 = >::new(); - // ^^^^^^^^ Generic -} - -fn fallible() -> ControlFlow<()> { - let strukt = Struct::try_new()?; - // ^^^^^^ Struct -} -"#, - ); - } - - #[test] - fn closures() { - check( - r#" -fn main() { - let mut start = 0; - //^^^^^ i32 - (0..2).for_each(|increment | { start += increment; }); - //^^^^^^^^^ i32 - - let multiply = - //^^^^^^^^ |i32, i32| -> i32 - | a, b| a * b - //^ i32 ^ i32 - - ; - - let _: i32 = multiply(1, 2); - //^ a ^ b - let multiply_ref = &multiply; - //^^^^^^^^^^^^ &|i32, i32| -> i32 - - let return_42 = || 42; - //^^^^^^^^^ || -> i32 - || { 42 }; - //^^ i32 -}"#, - ); - } - - #[test] - fn return_type_hints_for_closure_without_block() { - check_with_config( - InlayHintsConfig { - closure_return_type_hints: ClosureReturnTypeHints::Always, - ..DISABLED_CONFIG - }, - r#" -fn main() { - let a = || { 0 }; - //^^ i32 - let b = || 0; - //^^ i32 -}"#, - ); - } - - #[test] - fn skip_closure_type_hints() { - check_with_config( - InlayHintsConfig { - type_hints: true, - hide_closure_initialization_hints: true, - ..DISABLED_CONFIG - }, - r#" -//- minicore: fn -fn main() { - let multiple_2 = |x: i32| { x * 2 }; - - let multiple_2 = |x: i32| x * 2; - // ^^^^^^^^^^ |i32| -> i32 - - let (not) = (|x: bool| { !x }); - // ^^^ |bool| -> bool - - let (is_zero, _b) = (|x: usize| { x == 0 }, false); - // ^^^^^^^ |usize| -> bool - // ^^ bool - - let plus_one = |x| { x + 1 }; - // ^ u8 - foo(plus_one); - - let add_mul = bar(|x: u8| { x + 1 }); - // ^^^^^^^ impl FnOnce(u8) -> u8 + ?Sized - - let closure = if let Some(6) = add_mul(2).checked_sub(1) { - // ^^^^^^^ fn(i32) -> i32 - |x: i32| { x * 2 } - } else { - |x: i32| { x * 3 } - }; -} - -fn foo(f: impl FnOnce(u8) -> u8) {} - -fn bar(f: impl FnOnce(u8) -> u8) -> impl FnOnce(u8) -> u8 { - move |x: u8| f(x) * 2 -} -"#, - ); - } - - #[test] - fn hint_truncation() { - check_with_config( - InlayHintsConfig { max_length: Some(8), ..TEST_CONFIG }, - r#" -struct Smol(T); - -struct VeryLongOuterName(T); - -fn main() { - let a = Smol(0u32); - //^ Smol - let b = VeryLongOuterName(0usize); - //^ VeryLongOuterName<…> - let c = Smol(Smol(0u32)) - //^ Smol> -}"#, - ); - } - - // Chaining hint tests - - #[test] - fn chaining_hints_ignore_comments() { - check_expect( - InlayHintsConfig { type_hints: false, chaining_hints: true, ..DISABLED_CONFIG }, - r#" -struct A(B); -impl A { fn into_b(self) -> B { self.0 } } -struct B(C); -impl B { fn into_c(self) -> C { self.0 } } -struct C; - -fn main() { - let c = A(B(C)) - .into_b() // This is a comment - // This is another comment - .into_c(); -} -"#, - expect![[r#" - [ - InlayHint { - range: 147..172, - kind: ChainingHint, - label: [ - "B", - ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 147..172, - ), - ), - }, - InlayHint { - range: 147..154, - kind: ChainingHint, - label: [ - "A", - ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 147..154, - ), - ), - }, - ] - "#]], - ); - } - - #[test] - fn chaining_hints_without_newlines() { - check_chains( - r#" -struct A(B); -impl A { fn into_b(self) -> B { self.0 } } -struct B(C); -impl B { fn into_c(self) -> C { self.0 } } -struct C; - -fn main() { - let c = A(B(C)).into_b().into_c(); -}"#, - ); - } - - #[test] - fn struct_access_chaining_hints() { - check_expect( - InlayHintsConfig { chaining_hints: true, ..DISABLED_CONFIG }, - r#" -struct A { pub b: B } -struct B { pub c: C } -struct C(pub bool); -struct D; - -impl D { - fn foo(&self) -> i32 { 42 } -} - -fn main() { - let x = A { b: B { c: C(true) } } - .b - .c - .0; - let x = D - .foo(); -}"#, - expect![[r#" - [ - InlayHint { - range: 143..190, - kind: ChainingHint, - label: [ - "C", - ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 143..190, - ), - ), - }, - InlayHint { - range: 143..179, - kind: ChainingHint, - label: [ - "B", - ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 143..179, - ), - ), - }, - ] - "#]], - ); - } - - #[test] - fn generic_chaining_hints() { - check_expect( - InlayHintsConfig { chaining_hints: true, ..DISABLED_CONFIG }, - r#" -struct A(T); -struct B(T); -struct C(T); -struct X(T, R); - -impl A { - fn new(t: T) -> Self { A(t) } - fn into_b(self) -> B { B(self.0) } -} -impl B { - fn into_c(self) -> C { C(self.0) } -} -fn main() { - let c = A::new(X(42, true)) - .into_b() - .into_c(); -} -"#, - expect![[r#" - [ - InlayHint { - range: 246..283, - kind: ChainingHint, - label: [ - "B>", - ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 246..283, - ), - ), - }, - InlayHint { - range: 246..265, - kind: ChainingHint, - label: [ - "A>", - ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 246..265, - ), - ), - }, - ] - "#]], - ); - } - - #[test] - fn shorten_iterator_chaining_hints() { - check_expect( - InlayHintsConfig { chaining_hints: true, ..DISABLED_CONFIG }, - r#" -//- minicore: iterators -use core::iter; - -struct MyIter; - -impl Iterator for MyIter { - type Item = (); - fn next(&mut self) -> Option { - None - } -} - -fn main() { - let _x = MyIter.by_ref() - .take(5) - .by_ref() - .take(5) - .by_ref(); -} -"#, - expect![[r#" - [ - InlayHint { - range: 174..241, - kind: ChainingHint, - label: [ - "impl Iterator", - ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 174..241, - ), - ), - }, - InlayHint { - range: 174..224, - kind: ChainingHint, - label: [ - "impl Iterator", - ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 174..224, - ), - ), - }, - InlayHint { - range: 174..206, - kind: ChainingHint, - label: [ - "impl Iterator", - ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 174..206, - ), - ), - }, - InlayHint { - range: 174..189, - kind: ChainingHint, - label: [ - "&mut MyIter", - ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 174..189, - ), - ), - }, - ] - "#]], - ); - } - - #[test] - fn hints_in_attr_call() { - check_expect( - TEST_CONFIG, - r#" -//- proc_macros: identity, input_replace -struct Struct; -impl Struct { - fn chain(self) -> Self { - self - } -} -#[proc_macros::identity] -fn main() { - let strukt = Struct; - strukt - .chain() - .chain() - .chain(); - Struct::chain(strukt); -} -"#, - expect![[r#" - [ - InlayHint { - range: 124..130, - kind: TypeHint, - label: [ - "Struct", - ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 124..130, - ), - ), - }, - InlayHint { - range: 145..185, - kind: ChainingHint, - label: [ - "Struct", - ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 145..185, - ), - ), - }, - InlayHint { - range: 145..168, - kind: ChainingHint, - label: [ - "Struct", - ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 145..168, - ), - ), - }, - InlayHint { - range: 222..228, - kind: ParameterHint, - label: [ - "self", - ], - tooltip: Some( - HoverOffset( - FileId( - 0, - ), - 42, - ), - ), - }, - ] - "#]], - ); - } - - #[test] - fn hints_lifetimes() { - check( - r#" -fn empty() {} - -fn no_gpl(a: &()) {} - //^^^^^^<'0> - // ^'0 -fn empty_gpl<>(a: &()) {} - // ^'0 ^'0 -fn partial<'b>(a: &(), b: &'b ()) {} -// ^'0, $ ^'0 -fn partial<'a>(a: &'a (), b: &()) {} -// ^'0, $ ^'0 - -fn single_ret(a: &()) -> &() {} -// ^^^^^^^^^^<'0> - // ^'0 ^'0 -fn full_mul(a: &(), b: &()) {} -// ^^^^^^^^<'0, '1> - // ^'0 ^'1 - -fn foo<'c>(a: &'c ()) -> &() {} - // ^'c - -fn nested_in(a: & &X< &()>) {} -// ^^^^^^^^^<'0, '1, '2> - //^'0 ^'1 ^'2 -fn nested_out(a: &()) -> & &X< &()>{} -// ^^^^^^^^^^<'0> - //^'0 ^'0 ^'0 ^'0 - -impl () { - fn foo(&self) {} - // ^^^<'0> - // ^'0 - fn foo(&self) -> &() {} - // ^^^<'0> - // ^'0 ^'0 - fn foo(&self, a: &()) -> &() {} - // ^^^<'0, '1> - // ^'0 ^'1 ^'0 -} -"#, - ); - } - - #[test] - fn hints_lifetimes_named() { - check_with_config( - InlayHintsConfig { param_names_for_lifetime_elision_hints: true, ..TEST_CONFIG }, - r#" -fn nested_in<'named>(named: & &X< &()>) {} -// ^'named1, 'named2, 'named3, $ - //^'named1 ^'named2 ^'named3 -"#, - ); - } - - #[test] - fn hints_lifetimes_trivial_skip() { - check_with_config( - InlayHintsConfig { - lifetime_elision_hints: LifetimeElisionHints::SkipTrivial, - ..TEST_CONFIG - }, - r#" -fn no_gpl(a: &()) {} -fn empty_gpl<>(a: &()) {} -fn partial<'b>(a: &(), b: &'b ()) {} -fn partial<'a>(a: &'a (), b: &()) {} - -fn single_ret(a: &()) -> &() {} -// ^^^^^^^^^^<'0> - // ^'0 ^'0 -fn full_mul(a: &(), b: &()) {} - -fn foo<'c>(a: &'c ()) -> &() {} - // ^'c - -fn nested_in(a: & &X< &()>) {} -fn nested_out(a: &()) -> & &X< &()>{} -// ^^^^^^^^^^<'0> - //^'0 ^'0 ^'0 ^'0 - -impl () { - fn foo(&self) {} - fn foo(&self) -> &() {} - // ^^^<'0> - // ^'0 ^'0 - fn foo(&self, a: &()) -> &() {} - // ^^^<'0, '1> - // ^'0 ^'1 ^'0 -} -"#, - ); - } - - #[test] - fn hints_lifetimes_static() { - check_with_config( - InlayHintsConfig { - lifetime_elision_hints: LifetimeElisionHints::Always, - ..TEST_CONFIG - }, - r#" -trait Trait {} -static S: &str = ""; -// ^'static -const C: &str = ""; -// ^'static -const C: &dyn Trait = panic!(); -// ^'static - -impl () { - const C: &str = ""; - const C: &dyn Trait = panic!(); -} -"#, - ); - } - - #[test] - fn hints_binding_modes() { - check_with_config( - InlayHintsConfig { binding_mode_hints: true, ..DISABLED_CONFIG }, - r#" -fn __( - (x,): (u32,), - (x,): &(u32,), - //^^^^& - //^ ref - (x,): &mut (u32,) - //^^^^&mut - //^ ref mut -) { - let (x,) = (0,); - let (x,) = &(0,); - //^^^^ & - //^ ref - let (x,) = &mut (0,); - //^^^^ &mut - //^ ref mut - let &mut (x,) = &mut (0,); - let (ref mut x,) = &mut (0,); - //^^^^^^^^^^^^ &mut - let &mut (ref mut x,) = &mut (0,); - let (mut x,) = &mut (0,); - //^^^^^^^^ &mut - match (0,) { - (x,) => () - } - match &(0,) { - (x,) => () - //^^^^ & - //^ ref - } - match &mut (0,) { - (x,) => () - //^^^^ &mut - //^ ref mut - } -}"#, - ); - } - - #[test] - fn hints_closing_brace() { - check_with_config( - InlayHintsConfig { closing_brace_hints_min_lines: Some(2), ..DISABLED_CONFIG }, - r#" -fn a() {} - -fn f() { -} // no hint unless `}` is the last token on the line - -fn g() { - } -//^ fn g - -fn h(with: T, arguments: u8, ...) { - } -//^ fn h - -trait Tr { - fn f(); - fn g() { - } - //^ fn g - } -//^ trait Tr -impl Tr for () { - } -//^ impl Tr for () -impl dyn Tr { - } -//^ impl dyn Tr - -static S0: () = 0; -static S1: () = {}; -static S2: () = { - }; -//^ static S2 -const _: () = { - }; -//^ const _ - -mod m { - } -//^ mod m - -m! {} -m!(); -m!( - ); -//^ m! - -m! { - } -//^ m! - -fn f() { - let v = vec![ - ]; - } -//^ fn f -"#, - ); - } - - #[test] - fn adjustment_hints() { - check_with_config( - InlayHintsConfig { adjustment_hints: AdjustmentHints::Always, ..DISABLED_CONFIG }, - r#" -//- minicore: coerce_unsized -fn main() { - let _: u32 = loop {}; - //^^^^^^^ - let _: &u32 = &mut 0; - //^^^^^^& - //^^^^^^* - let _: &mut u32 = &mut 0; - //^^^^^^&mut $ - //^^^^^^* - let _: *const u32 = &mut 0; - //^^^^^^&raw const $ - //^^^^^^* - let _: *mut u32 = &mut 0; - //^^^^^^&raw mut $ - //^^^^^^* - let _: fn() = main; - //^^^^ - let _: unsafe fn() = main; - //^^^^ - //^^^^ - let _: unsafe fn() = main as fn(); - //^^^^^^^^^^^^ - let _: fn() = || {}; - //^^^^^ - let _: unsafe fn() = || {}; - //^^^^^ - let _: *const u32 = &mut 0u32 as *mut u32; - //^^^^^^^^^^^^^^^^^^^^^ - let _: &mut [_] = &mut [0; 0]; - //^^^^^^^^^^^ - //^^^^^^^^^^^&mut $ - //^^^^^^^^^^^* - - Struct.consume(); - Struct.by_ref(); - //^^^^^^( - //^^^^^^& - //^^^^^^) - Struct.by_ref_mut(); - //^^^^^^( - //^^^^^^&mut $ - //^^^^^^) - - (&Struct).consume(); - //^^^^^^^* - (&Struct).by_ref(); - - (&mut Struct).consume(); - //^^^^^^^^^^^* - (&mut Struct).by_ref(); - //^^^^^^^^^^^& - //^^^^^^^^^^^* - (&mut Struct).by_ref_mut(); -} - -#[derive(Copy, Clone)] -struct Struct; -impl Struct { - fn consume(self) {} - fn by_ref(&self) {} - fn by_ref_mut(&mut self) {} -} -"#, - ) - } } diff --git a/crates/ide/src/inlay_hints/adjustment.rs b/crates/ide/src/inlay_hints/adjustment.rs new file mode 100644 index 0000000000..bdd7c05e00 --- /dev/null +++ b/crates/ide/src/inlay_hints/adjustment.rs @@ -0,0 +1,630 @@ +//! Implementation of "adjustment" inlay hints: +//! ```no_run +//! let _: u32 = /* */ loop {}; +//! let _: &u32 = /* &* */ &mut 0; +//! ``` +use hir::{Adjust, AutoBorrow, Mutability, OverloadedDeref, PointerCast, Safety, Semantics}; +use ide_db::RootDatabase; + +use syntax::{ + ast::{self, make, AstNode}, + ted, +}; + +use crate::{AdjustmentHints, AdjustmentHintsMode, InlayHint, InlayHintsConfig, InlayKind}; + +pub(super) fn hints( + acc: &mut Vec, + sema: &Semantics<'_, RootDatabase>, + config: &InlayHintsConfig, + expr: &ast::Expr, +) -> Option<()> { + if config.adjustment_hints_hide_outside_unsafe && !sema.is_inside_unsafe(expr) { + return None; + } + + if config.adjustment_hints == AdjustmentHints::Never { + return None; + } + + // These inherit from the inner expression which would result in duplicate hints + if let ast::Expr::ParenExpr(_) + | ast::Expr::IfExpr(_) + | ast::Expr::BlockExpr(_) + | ast::Expr::MatchExpr(_) = expr + { + return None; + } + + let descended = sema.descend_node_into_attributes(expr.clone()).pop(); + let desc_expr = descended.as_ref().unwrap_or(expr); + let adjustments = sema.expr_adjustments(desc_expr).filter(|it| !it.is_empty())?; + + let (postfix, needs_outer_parens, needs_inner_parens) = + mode_and_needs_parens_for_adjustment_hints(expr, config.adjustment_hints_mode); + + if needs_outer_parens { + acc.push(InlayHint { + range: expr.syntax().text_range(), + kind: InlayKind::OpeningParenthesis, + label: "(".into(), + tooltip: None, + }); + } + + if postfix && needs_inner_parens { + acc.push(InlayHint { + range: expr.syntax().text_range(), + kind: InlayKind::OpeningParenthesis, + label: "(".into(), + tooltip: None, + }); + acc.push(InlayHint { + range: expr.syntax().text_range(), + kind: InlayKind::ClosingParenthesis, + label: ")".into(), + tooltip: None, + }); + } + + let (mut tmp0, mut tmp1); + let iter: &mut dyn Iterator = if postfix { + tmp0 = adjustments.into_iter(); + &mut tmp0 + } else { + tmp1 = adjustments.into_iter().rev(); + &mut tmp1 + }; + + for adjustment in iter { + if adjustment.source == adjustment.target { + continue; + } + + // FIXME: Add some nicer tooltips to each of these + let text = match adjustment.kind { + Adjust::NeverToAny if config.adjustment_hints == AdjustmentHints::Always => { + "" + } + Adjust::Deref(None) => "*", + Adjust::Deref(Some(OverloadedDeref(Mutability::Mut))) => "*", + Adjust::Deref(Some(OverloadedDeref(Mutability::Shared))) => "*", + Adjust::Borrow(AutoBorrow::Ref(Mutability::Shared)) => "&", + Adjust::Borrow(AutoBorrow::Ref(Mutability::Mut)) => "&mut ", + Adjust::Borrow(AutoBorrow::RawPtr(Mutability::Shared)) => "&raw const ", + Adjust::Borrow(AutoBorrow::RawPtr(Mutability::Mut)) => "&raw mut ", + // some of these could be represented via `as` casts, but that's not too nice and + // handling everything as a prefix expr makes the `(` and `)` insertion easier + Adjust::Pointer(cast) if config.adjustment_hints == AdjustmentHints::Always => { + match cast { + PointerCast::ReifyFnPointer => "", + PointerCast::UnsafeFnPointer => "", + PointerCast::ClosureFnPointer(Safety::Unsafe) => { + "" + } + PointerCast::ClosureFnPointer(Safety::Safe) => "", + PointerCast::MutToConstPointer => "", + PointerCast::ArrayToPointer => "", + PointerCast::Unsize => "", + } + } + _ => continue, + }; + acc.push(InlayHint { + range: expr.syntax().text_range(), + kind: if postfix { + InlayKind::AdjustmentHintPostfix + } else { + InlayKind::AdjustmentHint + }, + label: if postfix { format!(".{}", text.trim_end()).into() } else { text.into() }, + tooltip: None, + }); + } + if !postfix && needs_inner_parens { + acc.push(InlayHint { + range: expr.syntax().text_range(), + kind: InlayKind::OpeningParenthesis, + label: "(".into(), + tooltip: None, + }); + acc.push(InlayHint { + range: expr.syntax().text_range(), + kind: InlayKind::ClosingParenthesis, + label: ")".into(), + tooltip: None, + }); + } + if needs_outer_parens { + acc.push(InlayHint { + range: expr.syntax().text_range(), + kind: InlayKind::ClosingParenthesis, + label: ")".into(), + tooltip: None, + }); + } + Some(()) +} + +/// Returns whatever the hint should be postfix and if we need to add paretheses on the inside and/or outside of `expr`, +/// if we are going to add (`postfix`) adjustments hints to it. +fn mode_and_needs_parens_for_adjustment_hints( + expr: &ast::Expr, + mode: AdjustmentHintsMode, +) -> (bool, bool, bool) { + use {std::cmp::Ordering::*, AdjustmentHintsMode::*}; + + match mode { + Prefix | Postfix => { + let postfix = matches!(mode, Postfix); + let (inside, outside) = needs_parens_for_adjustment_hints(expr, postfix); + (postfix, inside, outside) + } + PreferPrefix | PreferPostfix => { + let prefer_postfix = matches!(mode, PreferPostfix); + + let (pre_inside, pre_outside) = needs_parens_for_adjustment_hints(expr, false); + let prefix = (false, pre_inside, pre_outside); + let pre_count = pre_inside as u8 + pre_outside as u8; + + let (post_inside, post_outside) = needs_parens_for_adjustment_hints(expr, true); + let postfix = (true, post_inside, post_outside); + let post_count = post_inside as u8 + post_outside as u8; + + match pre_count.cmp(&post_count) { + Less => prefix, + Greater => postfix, + Equal if prefer_postfix => postfix, + Equal => prefix, + } + } + } +} + +/// Returns whatever we need to add paretheses on the inside and/or outside of `expr`, +/// if we are going to add (`postfix`) adjustments hints to it. +fn needs_parens_for_adjustment_hints(expr: &ast::Expr, postfix: bool) -> (bool, bool) { + // This is a very miserable pile of hacks... + // + // `Expr::needs_parens_in` requires that the expression is the child of the other expression, + // that is supposed to be its parent. + // + // But we want to check what would happen if we add `*`/`.*` to the inner expression. + // To check for inner we need `` expr.needs_parens_in(`*expr`) ``, + // to check for outer we need `` `*expr`.needs_parens_in(parent) ``, + // where "expr" is the `expr` parameter, `*expr` is the editted `expr`, + // and "parent" is the parent of the original expression... + // + // For this we utilize mutable mutable trees, which is a HACK, but it works. + // + // FIXME: comeup with a better API for `needs_parens_in`, so that we don't have to do *this* + + // Make `&expr`/`expr?` + let dummy_expr = { + // `make::*` function go through a string, so they parse wrongly. + // for example `` make::expr_try(`|| a`) `` would result in a + // `|| (a?)` and not `(|| a)?`. + // + // Thus we need dummy parens to preserve the relationship we want. + // The parens are then simply ignored by the following code. + let dummy_paren = make::expr_paren(expr.clone()); + if postfix { + make::expr_try(dummy_paren) + } else { + make::expr_ref(dummy_paren, false) + } + }; + + // Do the dark mutable tree magic. + // This essentially makes `dummy_expr` and `expr` switch places (families), + // so that `expr`'s parent is not `dummy_expr`'s parent. + let dummy_expr = dummy_expr.clone_for_update(); + let expr = expr.clone_for_update(); + ted::replace(expr.syntax(), dummy_expr.syntax()); + + let parent = dummy_expr.syntax().parent(); + let expr = if postfix { + let ast::Expr::TryExpr(e) = &dummy_expr else { unreachable!() }; + let Some(ast::Expr::ParenExpr(e)) = e.expr() else { unreachable!() }; + + e.expr().unwrap() + } else { + let ast::Expr::RefExpr(e) = &dummy_expr else { unreachable!() }; + let Some(ast::Expr::ParenExpr(e)) = e.expr() else { unreachable!() }; + + e.expr().unwrap() + }; + + // At this point + // - `parent` is the parrent of the original expression + // - `dummy_expr` is the original expression wrapped in the operator we want (`*`/`.*`) + // - `expr` is the clone of the original expression (with `dummy_expr` as the parent) + + let needs_outer_parens = parent.map_or(false, |p| dummy_expr.needs_parens_in(p)); + let needs_inner_parens = expr.needs_parens_in(dummy_expr.syntax().clone()); + + (needs_outer_parens, needs_inner_parens) +} + +#[cfg(test)] +mod tests { + use crate::{ + inlay_hints::tests::{check_with_config, DISABLED_CONFIG}, + AdjustmentHints, AdjustmentHintsMode, InlayHintsConfig, + }; + + #[test] + fn adjustment_hints() { + check_with_config( + InlayHintsConfig { adjustment_hints: AdjustmentHints::Always, ..DISABLED_CONFIG }, + r#" +//- minicore: coerce_unsized, fn +fn main() { + let _: u32 = loop {}; + //^^^^^^^ + let _: &u32 = &mut 0; + //^^^^^^& + //^^^^^^* + let _: &mut u32 = &mut 0; + //^^^^^^&mut $ + //^^^^^^* + let _: *const u32 = &mut 0; + //^^^^^^&raw const $ + //^^^^^^* + let _: *mut u32 = &mut 0; + //^^^^^^&raw mut $ + //^^^^^^* + let _: fn() = main; + //^^^^ + let _: unsafe fn() = main; + //^^^^ + //^^^^ + let _: unsafe fn() = main as fn(); + //^^^^^^^^^^^^ + //^^^^^^^^^^^^( + //^^^^^^^^^^^^) + let _: fn() = || {}; + //^^^^^ + let _: unsafe fn() = || {}; + //^^^^^ + let _: *const u32 = &mut 0u32 as *mut u32; + //^^^^^^^^^^^^^^^^^^^^^ + //^^^^^^^^^^^^^^^^^^^^^( + //^^^^^^^^^^^^^^^^^^^^^) + let _: &mut [_] = &mut [0; 0]; + //^^^^^^^^^^^ + //^^^^^^^^^^^&mut $ + //^^^^^^^^^^^* + + Struct.consume(); + Struct.by_ref(); + //^^^^^^( + //^^^^^^& + //^^^^^^) + Struct.by_ref_mut(); + //^^^^^^( + //^^^^^^&mut $ + //^^^^^^) + + (&Struct).consume(); + //^^^^^^^* + (&Struct).by_ref(); + + (&mut Struct).consume(); + //^^^^^^^^^^^* + (&mut Struct).by_ref(); + //^^^^^^^^^^^& + //^^^^^^^^^^^* + (&mut Struct).by_ref_mut(); + + // Check that block-like expressions don't duplicate hints + let _: &mut [u32] = (&mut []); + //^^^^^^^ + //^^^^^^^&mut $ + //^^^^^^^* + let _: &mut [u32] = { &mut [] }; + //^^^^^^^ + //^^^^^^^&mut $ + //^^^^^^^* + let _: &mut [u32] = unsafe { &mut [] }; + //^^^^^^^ + //^^^^^^^&mut $ + //^^^^^^^* + let _: &mut [u32] = if true { + &mut [] + //^^^^^^^ + //^^^^^^^&mut $ + //^^^^^^^* + } else { + loop {} + //^^^^^^^ + }; + let _: &mut [u32] = match () { () => &mut [] } + //^^^^^^^ + //^^^^^^^&mut $ + //^^^^^^^* + + let _: &mut dyn Fn() = &mut || (); + //^^^^^^^^^^ + //^^^^^^^^^^&mut $ + //^^^^^^^^^^* +} + +#[derive(Copy, Clone)] +struct Struct; +impl Struct { + fn consume(self) {} + fn by_ref(&self) {} + fn by_ref_mut(&mut self) {} +} +"#, + ) + } + + #[test] + fn adjustment_hints_postfix() { + check_with_config( + InlayHintsConfig { + adjustment_hints: AdjustmentHints::Always, + adjustment_hints_mode: AdjustmentHintsMode::Postfix, + ..DISABLED_CONFIG + }, + r#" +//- minicore: coerce_unsized, fn +fn main() { + + Struct.consume(); + Struct.by_ref(); + //^^^^^^.& + Struct.by_ref_mut(); + //^^^^^^.&mut + + (&Struct).consume(); + //^^^^^^^( + //^^^^^^^) + //^^^^^^^.* + (&Struct).by_ref(); + + (&mut Struct).consume(); + //^^^^^^^^^^^( + //^^^^^^^^^^^) + //^^^^^^^^^^^.* + (&mut Struct).by_ref(); + //^^^^^^^^^^^( + //^^^^^^^^^^^) + //^^^^^^^^^^^.* + //^^^^^^^^^^^.& + (&mut Struct).by_ref_mut(); + + // Check that block-like expressions don't duplicate hints + let _: &mut [u32] = (&mut []); + //^^^^^^^( + //^^^^^^^) + //^^^^^^^.* + //^^^^^^^.&mut + //^^^^^^^. + let _: &mut [u32] = { &mut [] }; + //^^^^^^^( + //^^^^^^^) + //^^^^^^^.* + //^^^^^^^.&mut + //^^^^^^^. + let _: &mut [u32] = unsafe { &mut [] }; + //^^^^^^^( + //^^^^^^^) + //^^^^^^^.* + //^^^^^^^.&mut + //^^^^^^^. + let _: &mut [u32] = if true { + &mut [] + //^^^^^^^( + //^^^^^^^) + //^^^^^^^.* + //^^^^^^^.&mut + //^^^^^^^. + } else { + loop {} + //^^^^^^^. + }; + let _: &mut [u32] = match () { () => &mut [] } + //^^^^^^^( + //^^^^^^^) + //^^^^^^^.* + //^^^^^^^.&mut + //^^^^^^^. + + let _: &mut dyn Fn() = &mut || (); + //^^^^^^^^^^( + //^^^^^^^^^^) + //^^^^^^^^^^.* + //^^^^^^^^^^.&mut + //^^^^^^^^^^. +} + +#[derive(Copy, Clone)] +struct Struct; +impl Struct { + fn consume(self) {} + fn by_ref(&self) {} + fn by_ref_mut(&mut self) {} +} +"#, + ); + } + + #[test] + fn adjustment_hints_prefer_prefix() { + check_with_config( + InlayHintsConfig { + adjustment_hints: AdjustmentHints::Always, + adjustment_hints_mode: AdjustmentHintsMode::PreferPrefix, + ..DISABLED_CONFIG + }, + r#" +fn main() { + let _: u32 = loop {}; + //^^^^^^^ + + Struct.by_ref(); + //^^^^^^.& + + let (): () = return (); + //^^^^^^^^^ + + struct Struct; + impl Struct { fn by_ref(&self) {} } +} + "#, + ) + } + + #[test] + fn adjustment_hints_prefer_postfix() { + check_with_config( + InlayHintsConfig { + adjustment_hints: AdjustmentHints::Always, + adjustment_hints_mode: AdjustmentHintsMode::PreferPostfix, + ..DISABLED_CONFIG + }, + r#" +fn main() { + let _: u32 = loop {}; + //^^^^^^^. + + Struct.by_ref(); + //^^^^^^.& + + let (): () = return (); + //^^^^^^^^^ + + struct Struct; + impl Struct { fn by_ref(&self) {} } +} + "#, + ) + } + + #[test] + fn never_to_never_is_never_shown() { + check_with_config( + InlayHintsConfig { adjustment_hints: AdjustmentHints::Always, ..DISABLED_CONFIG }, + r#" +fn never() -> ! { + return loop {}; +} + +fn or_else() { + let () = () else { return }; +} + "#, + ) + } + + #[test] + fn adjustment_hints_unsafe_only() { + check_with_config( + InlayHintsConfig { + adjustment_hints: AdjustmentHints::Always, + adjustment_hints_hide_outside_unsafe: true, + ..DISABLED_CONFIG + }, + r#" +unsafe fn enabled() { + f(&&()); + //^^^^& + //^^^^* + //^^^^* +} + +fn disabled() { + f(&&()); +} + +fn mixed() { + f(&&()); + + unsafe { + f(&&()); + //^^^^& + //^^^^* + //^^^^* + } +} + +const _: () = { + f(&&()); + + unsafe { + f(&&()); + //^^^^& + //^^^^* + //^^^^* + } +}; + +static STATIC: () = { + f(&&()); + + unsafe { + f(&&()); + //^^^^& + //^^^^* + //^^^^* + } +}; + +enum E { + Disable = { f(&&()); 0 }, + Enable = unsafe { f(&&()); 1 }, + //^^^^& + //^^^^* + //^^^^* +} + +const fn f(_: &()) {} + "#, + ) + } + + #[test] + fn adjustment_hints_unsafe_only_with_item() { + check_with_config( + InlayHintsConfig { + adjustment_hints: AdjustmentHints::Always, + adjustment_hints_hide_outside_unsafe: true, + ..DISABLED_CONFIG + }, + r#" +fn a() { + struct Struct; + impl Struct { + fn by_ref(&self) {} + } + + _ = Struct.by_ref(); + + _ = unsafe { Struct.by_ref() }; + //^^^^^^( + //^^^^^^& + //^^^^^^) +} + "#, + ); + } + + #[test] + fn bug() { + check_with_config( + InlayHintsConfig { adjustment_hints: AdjustmentHints::Always, ..DISABLED_CONFIG }, + r#" +fn main() { + // These should be identical, but they are not... + + let () = return; + let (): () = return; + //^^^^^^ +} + "#, + ) + } +} diff --git a/crates/ide/src/inlay_hints/bind_pat.rs b/crates/ide/src/inlay_hints/bind_pat.rs new file mode 100644 index 0000000000..adec19c765 --- /dev/null +++ b/crates/ide/src/inlay_hints/bind_pat.rs @@ -0,0 +1,978 @@ +//! Implementation of "type" inlay hints: +//! ```no_run +//! fn f(a: i32, b: i32) -> i32 { a + b } +//! let _x /* i32 */= f(4, 4); +//! ``` +use hir::{Semantics, TypeInfo}; +use ide_db::{base_db::FileId, famous_defs::FamousDefs, RootDatabase}; + +use itertools::Itertools; +use syntax::{ + ast::{self, AstNode, HasName}, + match_ast, +}; + +use crate::{ + inlay_hints::closure_has_block_body, InlayHint, InlayHintsConfig, InlayKind, InlayTooltip, +}; + +use super::label_of_ty; + +pub(super) fn hints( + acc: &mut Vec, + famous_defs @ FamousDefs(sema, _): &FamousDefs<'_, '_>, + config: &InlayHintsConfig, + file_id: FileId, + pat: &ast::IdentPat, +) -> Option<()> { + if !config.type_hints { + return None; + } + + let descended = sema.descend_node_into_attributes(pat.clone()).pop(); + let desc_pat = descended.as_ref().unwrap_or(pat); + let ty = sema.type_of_pat(&desc_pat.clone().into())?.original; + + if should_not_display_type_hint(sema, config, pat, &ty) { + return None; + } + + let label = label_of_ty(famous_defs, config, ty)?; + + if config.hide_named_constructor_hints + && is_named_constructor(sema, pat, &label.to_string()).is_some() + { + return None; + } + + acc.push(InlayHint { + range: match pat.name() { + Some(name) => name.syntax().text_range(), + None => pat.syntax().text_range(), + }, + kind: InlayKind::TypeHint, + label, + tooltip: pat + .name() + .map(|it| it.syntax().text_range()) + .map(|it| InlayTooltip::HoverRanged(file_id, it)), + }); + + Some(()) +} + +fn should_not_display_type_hint( + sema: &Semantics<'_, RootDatabase>, + config: &InlayHintsConfig, + bind_pat: &ast::IdentPat, + pat_ty: &hir::Type, +) -> bool { + let db = sema.db; + + if pat_ty.is_unknown() { + return true; + } + + if let Some(hir::Adt::Struct(s)) = pat_ty.as_adt() { + if s.fields(db).is_empty() && s.name(db).to_smol_str() == bind_pat.to_string() { + return true; + } + } + + if config.hide_closure_initialization_hints { + if let Some(parent) = bind_pat.syntax().parent() { + if let Some(it) = ast::LetStmt::cast(parent) { + if let Some(ast::Expr::ClosureExpr(closure)) = it.initializer() { + if closure_has_block_body(&closure) { + return true; + } + } + } + } + } + + for node in bind_pat.syntax().ancestors() { + match_ast! { + match node { + ast::LetStmt(it) => return it.ty().is_some(), + // FIXME: We might wanna show type hints in parameters for non-top level patterns as well + ast::Param(it) => return it.ty().is_some(), + ast::MatchArm(_) => return pat_is_enum_variant(db, bind_pat, pat_ty), + ast::LetExpr(_) => return pat_is_enum_variant(db, bind_pat, pat_ty), + ast::IfExpr(_) => return false, + ast::WhileExpr(_) => return false, + ast::ForExpr(it) => { + // We *should* display hint only if user provided "in {expr}" and we know the type of expr (and it's not unit). + // Type of expr should be iterable. + return it.in_token().is_none() || + it.iterable() + .and_then(|iterable_expr| sema.type_of_expr(&iterable_expr)) + .map(TypeInfo::original) + .map_or(true, |iterable_ty| iterable_ty.is_unknown() || iterable_ty.is_unit()) + }, + _ => (), + } + } + } + false +} + +fn is_named_constructor( + sema: &Semantics<'_, RootDatabase>, + pat: &ast::IdentPat, + ty_name: &str, +) -> Option<()> { + let let_node = pat.syntax().parent()?; + let expr = match_ast! { + match let_node { + ast::LetStmt(it) => it.initializer(), + ast::LetExpr(it) => it.expr(), + _ => None, + } + }?; + + let expr = sema.descend_node_into_attributes(expr.clone()).pop().unwrap_or(expr); + // unwrap postfix expressions + let expr = match expr { + ast::Expr::TryExpr(it) => it.expr(), + ast::Expr::AwaitExpr(it) => it.expr(), + expr => Some(expr), + }?; + let expr = match expr { + ast::Expr::CallExpr(call) => match call.expr()? { + ast::Expr::PathExpr(path) => path, + _ => return None, + }, + ast::Expr::PathExpr(path) => path, + _ => return None, + }; + let path = expr.path()?; + + let callable = sema.type_of_expr(&ast::Expr::PathExpr(expr))?.original.as_callable(sema.db); + let callable_kind = callable.map(|it| it.kind()); + let qual_seg = match callable_kind { + Some(hir::CallableKind::Function(_) | hir::CallableKind::TupleEnumVariant(_)) => { + path.qualifier()?.segment() + } + _ => path.segment(), + }?; + + let ctor_name = match qual_seg.kind()? { + ast::PathSegmentKind::Name(name_ref) => { + match qual_seg.generic_arg_list().map(|it| it.generic_args()) { + Some(generics) => format!("{name_ref}<{}>", generics.format(", ")), + None => name_ref.to_string(), + } + } + ast::PathSegmentKind::Type { type_ref: Some(ty), trait_ref: None } => ty.to_string(), + _ => return None, + }; + (ctor_name == ty_name).then_some(()) +} + +fn pat_is_enum_variant(db: &RootDatabase, bind_pat: &ast::IdentPat, pat_ty: &hir::Type) -> bool { + if let Some(hir::Adt::Enum(enum_data)) = pat_ty.as_adt() { + let pat_text = bind_pat.to_string(); + enum_data + .variants(db) + .into_iter() + .map(|variant| variant.name(db).to_smol_str()) + .any(|enum_name| enum_name == pat_text) + } else { + false + } +} + +#[cfg(test)] +mod tests { + // This module also contains tests for super::closure_ret + + use expect_test::expect; + use syntax::{TextRange, TextSize}; + use test_utils::extract_annotations; + + use crate::{fixture, inlay_hints::InlayHintsConfig}; + + use crate::inlay_hints::tests::{ + check, check_expect, check_with_config, DISABLED_CONFIG, DISABLED_CONFIG_WITH_LINKS, + TEST_CONFIG, + }; + use crate::ClosureReturnTypeHints; + + #[track_caller] + fn check_types(ra_fixture: &str) { + check_with_config(InlayHintsConfig { type_hints: true, ..DISABLED_CONFIG }, ra_fixture); + } + + #[test] + fn type_hints_only() { + check_types( + r#" +fn foo(a: i32, b: i32) -> i32 { a + b } +fn main() { + let _x = foo(4, 4); + //^^ i32 +}"#, + ); + } + + #[test] + fn type_hints_bindings_after_at() { + check_types( + r#" +//- minicore: option +fn main() { + let ref foo @ bar @ ref mut baz = 0; + //^^^ &i32 + //^^^ i32 + //^^^ &mut i32 + let [x @ ..] = [0]; + //^ [i32; 1] + if let x @ Some(_) = Some(0) {} + //^ Option + let foo @ (bar, baz) = (3, 3); + //^^^ (i32, i32) + //^^^ i32 + //^^^ i32 +}"#, + ); + } + + #[test] + fn default_generic_types_should_not_be_displayed() { + check( + r#" +struct Test { k: K, t: T } + +fn main() { + let zz = Test { t: 23u8, k: 33 }; + //^^ Test + let zz_ref = &zz; + //^^^^^^ &Test + let test = || zz; + //^^^^ || -> Test +}"#, + ); + } + + #[test] + fn shorten_iterators_in_associated_params() { + check_types( + r#" +//- minicore: iterators +use core::iter; + +pub struct SomeIter {} + +impl SomeIter { + pub fn new() -> Self { SomeIter {} } + pub fn push(&mut self, t: T) {} +} + +impl Iterator for SomeIter { + type Item = T; + fn next(&mut self) -> Option { + None + } +} + +fn main() { + let mut some_iter = SomeIter::new(); + //^^^^^^^^^ SomeIter>> + some_iter.push(iter::repeat(2).take(2)); + let iter_of_iters = some_iter.take(2); + //^^^^^^^^^^^^^ impl Iterator> +} +"#, + ); + } + + #[test] + fn iterator_hint_regression_issue_12674() { + // Ensure we don't crash while solving the projection type of iterators. + check_expect( + InlayHintsConfig { chaining_hints: true, ..DISABLED_CONFIG_WITH_LINKS }, + r#" +//- minicore: iterators +struct S(T); +impl S { + fn iter(&self) -> Iter<'_, T> { loop {} } +} +struct Iter<'a, T: 'a>(&'a T); +impl<'a, T> Iterator for Iter<'a, T> { + type Item = &'a T; + fn next(&mut self) -> Option { loop {} } +} +struct Container<'a> { + elements: S<&'a str>, +} +struct SliceIter<'a, T>(&'a T); +impl<'a, T> Iterator for SliceIter<'a, T> { + type Item = &'a T; + fn next(&mut self) -> Option { loop {} } +} + +fn main(a: SliceIter<'_, Container>) { + a + .filter_map(|c| Some(c.elements.iter().filter_map(|v| Some(v)))) + .map(|e| e); +} + "#, + expect![[r#" + [ + InlayHint { + range: 484..554, + kind: ChainingHint, + label: [ + "impl Iterator>", + ], + tooltip: Some( + HoverRanged( + FileId( + 0, + ), + 484..554, + ), + ), + }, + InlayHint { + range: 484..485, + kind: ChainingHint, + label: [ + "", + InlayHintLabelPart { + text: "SliceIter", + linked_location: Some( + FileRange { + file_id: FileId( + 0, + ), + range: 289..298, + }, + ), + }, + "<", + InlayHintLabelPart { + text: "Container", + linked_location: Some( + FileRange { + file_id: FileId( + 0, + ), + range: 238..247, + }, + ), + }, + ">", + ], + tooltip: Some( + HoverRanged( + FileId( + 0, + ), + 484..485, + ), + ), + }, + ] + "#]], + ); + } + + #[test] + fn infer_call_method_return_associated_types_with_generic() { + check_types( + r#" + pub trait Default { + fn default() -> Self; + } + pub trait Foo { + type Bar: Default; + } + + pub fn quux() -> T::Bar { + let y = Default::default(); + //^ ::Bar + + y + } + "#, + ); + } + + #[test] + fn fn_hints() { + check_types( + r#" +//- minicore: fn, sized +fn foo() -> impl Fn() { loop {} } +fn foo1() -> impl Fn(f64) { loop {} } +fn foo2() -> impl Fn(f64, f64) { loop {} } +fn foo3() -> impl Fn(f64, f64) -> u32 { loop {} } +fn foo4() -> &'static dyn Fn(f64, f64) -> u32 { loop {} } +fn foo5() -> &'static dyn Fn(&'static dyn Fn(f64, f64) -> u32, f64) -> u32 { loop {} } +fn foo6() -> impl Fn(f64, f64) -> u32 + Sized { loop {} } +fn foo7() -> *const (impl Fn(f64, f64) -> u32 + Sized) { loop {} } + +fn main() { + let foo = foo(); + // ^^^ impl Fn() + let foo = foo1(); + // ^^^ impl Fn(f64) + let foo = foo2(); + // ^^^ impl Fn(f64, f64) + let foo = foo3(); + // ^^^ impl Fn(f64, f64) -> u32 + let foo = foo4(); + // ^^^ &dyn Fn(f64, f64) -> u32 + let foo = foo5(); + // ^^^ &dyn Fn(&dyn Fn(f64, f64) -> u32, f64) -> u32 + let foo = foo6(); + // ^^^ impl Fn(f64, f64) -> u32 + let foo = foo7(); + // ^^^ *const impl Fn(f64, f64) -> u32 +} +"#, + ) + } + + #[test] + fn check_hint_range_limit() { + let fixture = r#" + //- minicore: fn, sized + fn foo() -> impl Fn() { loop {} } + fn foo1() -> impl Fn(f64) { loop {} } + fn foo2() -> impl Fn(f64, f64) { loop {} } + fn foo3() -> impl Fn(f64, f64) -> u32 { loop {} } + fn foo4() -> &'static dyn Fn(f64, f64) -> u32 { loop {} } + fn foo5() -> &'static dyn Fn(&'static dyn Fn(f64, f64) -> u32, f64) -> u32 { loop {} } + fn foo6() -> impl Fn(f64, f64) -> u32 + Sized { loop {} } + fn foo7() -> *const (impl Fn(f64, f64) -> u32 + Sized) { loop {} } + + fn main() { + let foo = foo(); + let foo = foo1(); + let foo = foo2(); + // ^^^ impl Fn(f64, f64) + let foo = foo3(); + // ^^^ impl Fn(f64, f64) -> u32 + let foo = foo4(); + let foo = foo5(); + let foo = foo6(); + let foo = foo7(); + } + "#; + let (analysis, file_id) = fixture::file(fixture); + let expected = extract_annotations(&analysis.file_text(file_id).unwrap()); + let inlay_hints = analysis + .inlay_hints( + &InlayHintsConfig { type_hints: true, ..DISABLED_CONFIG }, + file_id, + Some(TextRange::new(TextSize::from(500), TextSize::from(600))), + ) + .unwrap(); + let actual = + inlay_hints.into_iter().map(|it| (it.range, it.label.to_string())).collect::>(); + assert_eq!(expected, actual, "\nExpected:\n{expected:#?}\n\nActual:\n{actual:#?}"); + } + + #[test] + fn fn_hints_ptr_rpit_fn_parentheses() { + check_types( + r#" +//- minicore: fn, sized +trait Trait {} + +fn foo1() -> *const impl Fn() { loop {} } +fn foo2() -> *const (impl Fn() + Sized) { loop {} } +fn foo3() -> *const (impl Fn() + ?Sized) { loop {} } +fn foo4() -> *const (impl Sized + Fn()) { loop {} } +fn foo5() -> *const (impl ?Sized + Fn()) { loop {} } +fn foo6() -> *const (impl Fn() + Trait) { loop {} } +fn foo7() -> *const (impl Fn() + Sized + Trait) { loop {} } +fn foo8() -> *const (impl Fn() + ?Sized + Trait) { loop {} } +fn foo9() -> *const (impl Fn() -> u8 + ?Sized) { loop {} } +fn foo10() -> *const (impl Fn() + Sized + ?Sized) { loop {} } + +fn main() { + let foo = foo1(); + // ^^^ *const impl Fn() + let foo = foo2(); + // ^^^ *const impl Fn() + let foo = foo3(); + // ^^^ *const (impl Fn() + ?Sized) + let foo = foo4(); + // ^^^ *const impl Fn() + let foo = foo5(); + // ^^^ *const (impl Fn() + ?Sized) + let foo = foo6(); + // ^^^ *const (impl Fn() + Trait) + let foo = foo7(); + // ^^^ *const (impl Fn() + Trait) + let foo = foo8(); + // ^^^ *const (impl Fn() + Trait + ?Sized) + let foo = foo9(); + // ^^^ *const (impl Fn() -> u8 + ?Sized) + let foo = foo10(); + // ^^^ *const impl Fn() +} +"#, + ) + } + + #[test] + fn unit_structs_have_no_type_hints() { + check_types( + r#" +//- minicore: result +struct SyntheticSyntax; + +fn main() { + match Ok(()) { + Ok(_) => (), + Err(SyntheticSyntax) => (), + } +}"#, + ); + } + + #[test] + fn let_statement() { + check_types( + r#" +#[derive(PartialEq)] +enum Option { None, Some(T) } + +#[derive(PartialEq)] +struct Test { a: Option, b: u8 } + +fn main() { + struct InnerStruct {} + + let test = 54; + //^^^^ i32 + let test: i32 = 33; + let mut test = 33; + //^^^^ i32 + let _ = 22; + let test = "test"; + //^^^^ &str + let test = InnerStruct {}; + //^^^^ InnerStruct + + let test = unresolved(); + + let test = (42, 'a'); + //^^^^ (i32, char) + let (a, (b, (c,)) = (2, (3, (9.2,)); + //^ i32 ^ i32 ^ f64 + let &x = &92; + //^ i32 +}"#, + ); + } + + #[test] + fn if_expr() { + check_types( + r#" +//- minicore: option +struct Test { a: Option, b: u8 } + +fn main() { + let test = Some(Test { a: Some(3), b: 1 }); + //^^^^ Option + if let None = &test {}; + if let test = &test {}; + //^^^^ &Option + if let Some(test) = &test {}; + //^^^^ &Test + if let Some(Test { a, b }) = &test {}; + //^ &Option ^ &u8 + if let Some(Test { a: x, b: y }) = &test {}; + //^ &Option ^ &u8 + if let Some(Test { a: Some(x), b: y }) = &test {}; + //^ &u32 ^ &u8 + if let Some(Test { a: None, b: y }) = &test {}; + //^ &u8 + if let Some(Test { b: y, .. }) = &test {}; + //^ &u8 + if test == None {} +}"#, + ); + } + + #[test] + fn while_expr() { + check_types( + r#" +//- minicore: option +struct Test { a: Option, b: u8 } + +fn main() { + let test = Some(Test { a: Some(3), b: 1 }); + //^^^^ Option + while let Some(Test { a: Some(x), b: y }) = &test {}; + //^ &u32 ^ &u8 +}"#, + ); + } + + #[test] + fn match_arm_list() { + check_types( + r#" +//- minicore: option +struct Test { a: Option, b: u8 } + +fn main() { + match Some(Test { a: Some(3), b: 1 }) { + None => (), + test => (), + //^^^^ Option + Some(Test { a: Some(x), b: y }) => (), + //^ u32 ^ u8 + _ => {} + } +}"#, + ); + } + + #[test] + fn complete_for_hint() { + check_types( + r#" +//- minicore: iterator +pub struct Vec {} + +impl Vec { + pub fn new() -> Self { Vec {} } + pub fn push(&mut self, t: T) {} +} + +impl IntoIterator for Vec { + type Item = T; + type IntoIter = IntoIter; +} + +struct IntoIter {} + +impl Iterator for IntoIter { + type Item = T; +} + +fn main() { + let mut data = Vec::new(); + //^^^^ Vec<&str> + data.push("foo"); + for i in data { + //^ &str + let z = i; + //^ &str + } +} +"#, + ); + } + + #[test] + fn multi_dyn_trait_bounds() { + check_types( + r#" +pub struct Vec {} + +impl Vec { + pub fn new() -> Self { Vec {} } +} + +pub struct Box {} + +trait Display {} +auto trait Sync {} + +fn main() { + // The block expression wrapping disables the constructor hint hiding logic + let _v = { Vec::>::new() }; + //^^ Vec> + let _v = { Vec::>::new() }; + //^^ Vec> + let _v = { Vec::>::new() }; + //^^ Vec> +} +"#, + ); + } + + #[test] + fn shorten_iterator_hints() { + check_types( + r#" +//- minicore: iterators +use core::iter; + +struct MyIter; + +impl Iterator for MyIter { + type Item = (); + fn next(&mut self) -> Option { + None + } +} + +fn main() { + let _x = MyIter; + //^^ MyIter + let _x = iter::repeat(0); + //^^ impl Iterator + fn generic(t: T) { + let _x = iter::repeat(t); + //^^ impl Iterator + let _chained = iter::repeat(t).take(10); + //^^^^^^^^ impl Iterator + } +} +"#, + ); + } + + #[test] + fn skip_constructor_and_enum_type_hints() { + check_with_config( + InlayHintsConfig { + type_hints: true, + hide_named_constructor_hints: true, + ..DISABLED_CONFIG + }, + r#" +//- minicore: try, option +use core::ops::ControlFlow; + +mod x { + pub mod y { pub struct Foo; } + pub struct Foo; + pub enum AnotherEnum { + Variant() + }; +} +struct Struct; +struct TupleStruct(); + +impl Struct { + fn new() -> Self { + Struct + } + fn try_new() -> ControlFlow<(), Self> { + ControlFlow::Continue(Struct) + } +} + +struct Generic(T); +impl Generic { + fn new() -> Self { + Generic(0) + } +} + +enum Enum { + Variant(u32) +} + +fn times2(value: i32) -> i32 { + 2 * value +} + +fn main() { + let enumb = Enum::Variant(0); + + let strukt = x::Foo; + let strukt = x::y::Foo; + let strukt = Struct; + let strukt = Struct::new(); + + let tuple_struct = TupleStruct(); + + let generic0 = Generic::new(); + // ^^^^^^^^ Generic + let generic1 = Generic(0); + // ^^^^^^^^ Generic + let generic2 = Generic::::new(); + let generic3 = >::new(); + let generic4 = Generic::(0); + + + let option = Some(0); + // ^^^^^^ Option + let func = times2; + // ^^^^ fn times2(i32) -> i32 + let closure = |x: i32| x * 2; + // ^^^^^^^ |i32| -> i32 +} + +fn fallible() -> ControlFlow<()> { + let strukt = Struct::try_new()?; +} +"#, + ); + } + + #[test] + fn shows_constructor_type_hints_when_enabled() { + check_types( + r#" +//- minicore: try +use core::ops::ControlFlow; + +struct Struct; +struct TupleStruct(); + +impl Struct { + fn new() -> Self { + Struct + } + fn try_new() -> ControlFlow<(), Self> { + ControlFlow::Continue(Struct) + } +} + +struct Generic(T); +impl Generic { + fn new() -> Self { + Generic(0) + } +} + +fn main() { + let strukt = Struct::new(); + // ^^^^^^ Struct + let tuple_struct = TupleStruct(); + // ^^^^^^^^^^^^ TupleStruct + let generic0 = Generic::new(); + // ^^^^^^^^ Generic + let generic1 = Generic::::new(); + // ^^^^^^^^ Generic + let generic2 = >::new(); + // ^^^^^^^^ Generic +} + +fn fallible() -> ControlFlow<()> { + let strukt = Struct::try_new()?; + // ^^^^^^ Struct +} +"#, + ); + } + + #[test] + fn closures() { + check( + r#" +fn main() { + let mut start = 0; + //^^^^^ i32 + (0..2).for_each(|increment | { start += increment; }); + //^^^^^^^^^ i32 + + let multiply = + //^^^^^^^^ |i32, i32| -> i32 + | a, b| a * b + //^ i32 ^ i32 + + ; + + let _: i32 = multiply(1, 2); + //^ a ^ b + let multiply_ref = &multiply; + //^^^^^^^^^^^^ &|i32, i32| -> i32 + + let return_42 = || 42; + //^^^^^^^^^ || -> i32 + || { 42 }; + //^^ i32 +}"#, + ); + } + + #[test] + fn return_type_hints_for_closure_without_block() { + check_with_config( + InlayHintsConfig { + closure_return_type_hints: ClosureReturnTypeHints::Always, + ..DISABLED_CONFIG + }, + r#" +fn main() { + let a = || { 0 }; + //^^ i32 + let b = || 0; + //^^ i32 +}"#, + ); + } + + #[test] + fn skip_closure_type_hints() { + check_with_config( + InlayHintsConfig { + type_hints: true, + hide_closure_initialization_hints: true, + ..DISABLED_CONFIG + }, + r#" +//- minicore: fn +fn main() { + let multiple_2 = |x: i32| { x * 2 }; + + let multiple_2 = |x: i32| x * 2; + // ^^^^^^^^^^ |i32| -> i32 + + let (not) = (|x: bool| { !x }); + // ^^^ |bool| -> bool + + let (is_zero, _b) = (|x: usize| { x == 0 }, false); + // ^^^^^^^ |usize| -> bool + // ^^ bool + + let plus_one = |x| { x + 1 }; + // ^ u8 + foo(plus_one); + + let add_mul = bar(|x: u8| { x + 1 }); + // ^^^^^^^ impl FnOnce(u8) -> u8 + ?Sized + + let closure = if let Some(6) = add_mul(2).checked_sub(1) { + // ^^^^^^^ fn(i32) -> i32 + |x: i32| { x * 2 } + } else { + |x: i32| { x * 3 } + }; +} + +fn foo(f: impl FnOnce(u8) -> u8) {} + +fn bar(f: impl FnOnce(u8) -> u8) -> impl FnOnce(u8) -> u8 { + move |x: u8| f(x) * 2 +} +"#, + ); + } + + #[test] + fn hint_truncation() { + check_with_config( + InlayHintsConfig { max_length: Some(8), ..TEST_CONFIG }, + r#" +struct Smol(T); + +struct VeryLongOuterName(T); + +fn main() { + let a = Smol(0u32); + //^ Smol + let b = VeryLongOuterName(0usize); + //^ VeryLongOuterName<…> + let c = Smol(Smol(0u32)) + //^ Smol> +}"#, + ); + } +} diff --git a/crates/ide/src/inlay_hints/binding_mode.rs b/crates/ide/src/inlay_hints/binding_mode.rs new file mode 100644 index 0000000000..a0166d0048 --- /dev/null +++ b/crates/ide/src/inlay_hints/binding_mode.rs @@ -0,0 +1,142 @@ +//! Implementation of "binding mode" inlay hints: +//! ```no_run +//! let /* & */ (/* ref */ x,) = &(0,); +//! ``` +use hir::{Mutability, Semantics}; +use ide_db::RootDatabase; + +use syntax::ast::{self, AstNode}; + +use crate::{InlayHint, InlayHintsConfig, InlayKind, InlayTooltip}; + +pub(super) fn hints( + acc: &mut Vec, + sema: &Semantics<'_, RootDatabase>, + config: &InlayHintsConfig, + pat: &ast::Pat, +) -> Option<()> { + if !config.binding_mode_hints { + return None; + } + + let outer_paren_pat = pat + .syntax() + .ancestors() + .skip(1) + .map_while(ast::Pat::cast) + .map_while(|pat| match pat { + ast::Pat::ParenPat(pat) => Some(pat), + _ => None, + }) + .last(); + let range = + outer_paren_pat.as_ref().map_or_else(|| pat.syntax(), |it| it.syntax()).text_range(); + let pattern_adjustments = sema.pattern_adjustments(pat); + pattern_adjustments.iter().for_each(|ty| { + let reference = ty.is_reference(); + let mut_reference = ty.is_mutable_reference(); + let r = match (reference, mut_reference) { + (true, true) => "&mut", + (true, false) => "&", + _ => return, + }; + acc.push(InlayHint { + range, + kind: InlayKind::BindingModeHint, + label: r.to_string().into(), + tooltip: Some(InlayTooltip::String("Inferred binding mode".into())), + }); + }); + match pat { + ast::Pat::IdentPat(pat) if pat.ref_token().is_none() && pat.mut_token().is_none() => { + let bm = sema.binding_mode_of_pat(pat)?; + let bm = match bm { + hir::BindingMode::Move => return None, + hir::BindingMode::Ref(Mutability::Mut) => "ref mut", + hir::BindingMode::Ref(Mutability::Shared) => "ref", + }; + acc.push(InlayHint { + range: pat.syntax().text_range(), + kind: InlayKind::BindingModeHint, + label: bm.to_string().into(), + tooltip: Some(InlayTooltip::String("Inferred binding mode".into())), + }); + } + ast::Pat::OrPat(pat) if !pattern_adjustments.is_empty() && outer_paren_pat.is_none() => { + acc.push(InlayHint { + range: pat.syntax().text_range(), + kind: InlayKind::OpeningParenthesis, + label: "(".into(), + tooltip: None, + }); + acc.push(InlayHint { + range: pat.syntax().text_range(), + kind: InlayKind::ClosingParenthesis, + label: ")".into(), + tooltip: None, + }); + } + _ => (), + } + + Some(()) +} + +#[cfg(test)] +mod tests { + use crate::{ + inlay_hints::tests::{check_with_config, DISABLED_CONFIG}, + InlayHintsConfig, + }; + + #[test] + fn hints_binding_modes() { + check_with_config( + InlayHintsConfig { binding_mode_hints: true, ..DISABLED_CONFIG }, + r#" +fn __( + (x,): (u32,), + (x,): &(u32,), + //^^^^& + //^ ref + (x,): &mut (u32,) + //^^^^&mut + //^ ref mut +) { + let (x,) = (0,); + let (x,) = &(0,); + //^^^^ & + //^ ref + let (x,) = &mut (0,); + //^^^^ &mut + //^ ref mut + let &mut (x,) = &mut (0,); + let (ref mut x,) = &mut (0,); + //^^^^^^^^^^^^ &mut + let &mut (ref mut x,) = &mut (0,); + let (mut x,) = &mut (0,); + //^^^^^^^^ &mut + match (0,) { + (x,) => () + } + match &(0,) { + (x,) | (x,) => (), + //^^^^^^^^^^^& + //^ ref + //^ ref + //^^^^^^^^^^^( + //^^^^^^^^^^^) + ((x,) | (x,)) => (), + //^^^^^^^^^^^^^& + //^ ref + //^ ref + } + match &mut (0,) { + (x,) => () + //^^^^ &mut + //^ ref mut + } +}"#, + ); + } +} diff --git a/crates/ide/src/inlay_hints/chaining.rs b/crates/ide/src/inlay_hints/chaining.rs new file mode 100644 index 0000000000..8810d5d34d --- /dev/null +++ b/crates/ide/src/inlay_hints/chaining.rs @@ -0,0 +1,665 @@ +//! Implementation of "chaining" inlay hints. +use ide_db::famous_defs::FamousDefs; +use syntax::{ + ast::{self, AstNode}, + Direction, NodeOrToken, SyntaxKind, T, +}; + +use crate::{FileId, InlayHint, InlayHintsConfig, InlayKind, InlayTooltip}; + +use super::label_of_ty; + +pub(super) fn hints( + acc: &mut Vec, + famous_defs @ FamousDefs(sema, _): &FamousDefs<'_, '_>, + config: &InlayHintsConfig, + file_id: FileId, + expr: &ast::Expr, +) -> Option<()> { + if !config.chaining_hints { + return None; + } + + if matches!(expr, ast::Expr::RecordExpr(_)) { + return None; + } + + let descended = sema.descend_node_into_attributes(expr.clone()).pop(); + let desc_expr = descended.as_ref().unwrap_or(expr); + + let mut tokens = expr + .syntax() + .siblings_with_tokens(Direction::Next) + .filter_map(NodeOrToken::into_token) + .filter(|t| match t.kind() { + SyntaxKind::WHITESPACE if !t.text().contains('\n') => false, + SyntaxKind::COMMENT => false, + _ => true, + }); + + // Chaining can be defined as an expression whose next sibling tokens are newline and dot + // Ignoring extra whitespace and comments + let next = tokens.next()?.kind(); + if next == SyntaxKind::WHITESPACE { + let mut next_next = tokens.next()?.kind(); + while next_next == SyntaxKind::WHITESPACE { + next_next = tokens.next()?.kind(); + } + if next_next == T![.] { + let ty = sema.type_of_expr(desc_expr)?.original; + if ty.is_unknown() { + return None; + } + if matches!(expr, ast::Expr::PathExpr(_)) { + if let Some(hir::Adt::Struct(st)) = ty.as_adt() { + if st.fields(sema.db).is_empty() { + return None; + } + } + } + acc.push(InlayHint { + range: expr.syntax().text_range(), + kind: InlayKind::ChainingHint, + label: label_of_ty(famous_defs, config, ty)?, + tooltip: Some(InlayTooltip::HoverRanged(file_id, expr.syntax().text_range())), + }); + } + } + Some(()) +} + +#[cfg(test)] +mod tests { + use expect_test::expect; + + use crate::{ + inlay_hints::tests::{ + check_expect, check_with_config, DISABLED_CONFIG, DISABLED_CONFIG_WITH_LINKS, + TEST_CONFIG, + }, + InlayHintsConfig, + }; + + #[track_caller] + fn check_chains(ra_fixture: &str) { + check_with_config(InlayHintsConfig { chaining_hints: true, ..DISABLED_CONFIG }, ra_fixture); + } + + #[test] + fn chaining_hints_ignore_comments() { + check_expect( + InlayHintsConfig { + type_hints: false, + chaining_hints: true, + ..DISABLED_CONFIG_WITH_LINKS + }, + r#" +struct A(B); +impl A { fn into_b(self) -> B { self.0 } } +struct B(C); +impl B { fn into_c(self) -> C { self.0 } } +struct C; + +fn main() { + let c = A(B(C)) + .into_b() // This is a comment + // This is another comment + .into_c(); +} +"#, + expect![[r#" + [ + InlayHint { + range: 147..172, + kind: ChainingHint, + label: [ + "", + InlayHintLabelPart { + text: "B", + linked_location: Some( + FileRange { + file_id: FileId( + 0, + ), + range: 63..64, + }, + ), + }, + "", + ], + tooltip: Some( + HoverRanged( + FileId( + 0, + ), + 147..172, + ), + ), + }, + InlayHint { + range: 147..154, + kind: ChainingHint, + label: [ + "", + InlayHintLabelPart { + text: "A", + linked_location: Some( + FileRange { + file_id: FileId( + 0, + ), + range: 7..8, + }, + ), + }, + "", + ], + tooltip: Some( + HoverRanged( + FileId( + 0, + ), + 147..154, + ), + ), + }, + ] + "#]], + ); + } + + #[test] + fn chaining_hints_without_newlines() { + check_chains( + r#" +struct A(B); +impl A { fn into_b(self) -> B { self.0 } } +struct B(C); +impl B { fn into_c(self) -> C { self.0 } } +struct C; + +fn main() { + let c = A(B(C)).into_b().into_c(); +}"#, + ); + } + + #[test] + fn disabled_location_links() { + check_expect( + InlayHintsConfig { chaining_hints: true, ..DISABLED_CONFIG }, + r#" + struct A { pub b: B } + struct B { pub c: C } + struct C(pub bool); + struct D; + + impl D { + fn foo(&self) -> i32 { 42 } + } + + fn main() { + let x = A { b: B { c: C(true) } } + .b + .c + .0; + let x = D + .foo(); + }"#, + expect![[r#" + [ + InlayHint { + range: 143..190, + kind: ChainingHint, + label: [ + "C", + ], + tooltip: Some( + HoverRanged( + FileId( + 0, + ), + 143..190, + ), + ), + }, + InlayHint { + range: 143..179, + kind: ChainingHint, + label: [ + "B", + ], + tooltip: Some( + HoverRanged( + FileId( + 0, + ), + 143..179, + ), + ), + }, + ] + "#]], + ); + } + + #[test] + fn struct_access_chaining_hints() { + check_expect( + InlayHintsConfig { chaining_hints: true, ..DISABLED_CONFIG_WITH_LINKS }, + r#" +struct A { pub b: B } +struct B { pub c: C } +struct C(pub bool); +struct D; + +impl D { + fn foo(&self) -> i32 { 42 } +} + +fn main() { + let x = A { b: B { c: C(true) } } + .b + .c + .0; + let x = D + .foo(); +}"#, + expect![[r#" + [ + InlayHint { + range: 143..190, + kind: ChainingHint, + label: [ + "", + InlayHintLabelPart { + text: "C", + linked_location: Some( + FileRange { + file_id: FileId( + 0, + ), + range: 51..52, + }, + ), + }, + "", + ], + tooltip: Some( + HoverRanged( + FileId( + 0, + ), + 143..190, + ), + ), + }, + InlayHint { + range: 143..179, + kind: ChainingHint, + label: [ + "", + InlayHintLabelPart { + text: "B", + linked_location: Some( + FileRange { + file_id: FileId( + 0, + ), + range: 29..30, + }, + ), + }, + "", + ], + tooltip: Some( + HoverRanged( + FileId( + 0, + ), + 143..179, + ), + ), + }, + ] + "#]], + ); + } + + #[test] + fn generic_chaining_hints() { + check_expect( + InlayHintsConfig { chaining_hints: true, ..DISABLED_CONFIG_WITH_LINKS }, + r#" +struct A(T); +struct B(T); +struct C(T); +struct X(T, R); + +impl A { + fn new(t: T) -> Self { A(t) } + fn into_b(self) -> B { B(self.0) } +} +impl B { + fn into_c(self) -> C { C(self.0) } +} +fn main() { + let c = A::new(X(42, true)) + .into_b() + .into_c(); +} +"#, + expect![[r#" + [ + InlayHint { + range: 246..283, + kind: ChainingHint, + label: [ + "", + InlayHintLabelPart { + text: "B", + linked_location: Some( + FileRange { + file_id: FileId( + 0, + ), + range: 23..24, + }, + ), + }, + "<", + InlayHintLabelPart { + text: "X", + linked_location: Some( + FileRange { + file_id: FileId( + 0, + ), + range: 55..56, + }, + ), + }, + ">", + ], + tooltip: Some( + HoverRanged( + FileId( + 0, + ), + 246..283, + ), + ), + }, + InlayHint { + range: 246..265, + kind: ChainingHint, + label: [ + "", + InlayHintLabelPart { + text: "A", + linked_location: Some( + FileRange { + file_id: FileId( + 0, + ), + range: 7..8, + }, + ), + }, + "<", + InlayHintLabelPart { + text: "X", + linked_location: Some( + FileRange { + file_id: FileId( + 0, + ), + range: 55..56, + }, + ), + }, + ">", + ], + tooltip: Some( + HoverRanged( + FileId( + 0, + ), + 246..265, + ), + ), + }, + ] + "#]], + ); + } + + #[test] + fn shorten_iterator_chaining_hints() { + check_expect( + InlayHintsConfig { chaining_hints: true, ..DISABLED_CONFIG_WITH_LINKS }, + r#" +//- minicore: iterators +use core::iter; + +struct MyIter; + +impl Iterator for MyIter { + type Item = (); + fn next(&mut self) -> Option { + None + } +} + +fn main() { + let _x = MyIter.by_ref() + .take(5) + .by_ref() + .take(5) + .by_ref(); +} +"#, + expect![[r#" + [ + InlayHint { + range: 174..241, + kind: ChainingHint, + label: [ + "impl Iterator", + ], + tooltip: Some( + HoverRanged( + FileId( + 0, + ), + 174..241, + ), + ), + }, + InlayHint { + range: 174..224, + kind: ChainingHint, + label: [ + "impl Iterator", + ], + tooltip: Some( + HoverRanged( + FileId( + 0, + ), + 174..224, + ), + ), + }, + InlayHint { + range: 174..206, + kind: ChainingHint, + label: [ + "impl Iterator", + ], + tooltip: Some( + HoverRanged( + FileId( + 0, + ), + 174..206, + ), + ), + }, + InlayHint { + range: 174..189, + kind: ChainingHint, + label: [ + "&mut ", + InlayHintLabelPart { + text: "MyIter", + linked_location: Some( + FileRange { + file_id: FileId( + 0, + ), + range: 24..30, + }, + ), + }, + "", + ], + tooltip: Some( + HoverRanged( + FileId( + 0, + ), + 174..189, + ), + ), + }, + ] + "#]], + ); + } + + #[test] + fn hints_in_attr_call() { + check_expect( + TEST_CONFIG, + r#" +//- proc_macros: identity, input_replace +struct Struct; +impl Struct { + fn chain(self) -> Self { + self + } +} +#[proc_macros::identity] +fn main() { + let strukt = Struct; + strukt + .chain() + .chain() + .chain(); + Struct::chain(strukt); +} +"#, + expect![[r#" + [ + InlayHint { + range: 124..130, + kind: TypeHint, + label: [ + "", + InlayHintLabelPart { + text: "Struct", + linked_location: Some( + FileRange { + file_id: FileId( + 0, + ), + range: 7..13, + }, + ), + }, + "", + ], + tooltip: Some( + HoverRanged( + FileId( + 0, + ), + 124..130, + ), + ), + }, + InlayHint { + range: 145..185, + kind: ChainingHint, + label: [ + "", + InlayHintLabelPart { + text: "Struct", + linked_location: Some( + FileRange { + file_id: FileId( + 0, + ), + range: 7..13, + }, + ), + }, + "", + ], + tooltip: Some( + HoverRanged( + FileId( + 0, + ), + 145..185, + ), + ), + }, + InlayHint { + range: 145..168, + kind: ChainingHint, + label: [ + "", + InlayHintLabelPart { + text: "Struct", + linked_location: Some( + FileRange { + file_id: FileId( + 0, + ), + range: 7..13, + }, + ), + }, + "", + ], + tooltip: Some( + HoverRanged( + FileId( + 0, + ), + 145..168, + ), + ), + }, + InlayHint { + range: 222..228, + kind: ParameterHint, + label: [ + "self", + ], + tooltip: Some( + HoverOffset( + FileId( + 0, + ), + 42, + ), + ), + }, + ] + "#]], + ); + } +} diff --git a/crates/ide/src/inlay_hints/closing_brace.rs b/crates/ide/src/inlay_hints/closing_brace.rs new file mode 100644 index 0000000000..e340c64c54 --- /dev/null +++ b/crates/ide/src/inlay_hints/closing_brace.rs @@ -0,0 +1,196 @@ +//! Implementation of "closing brace" inlay hints: +//! ```no_run +//! fn g() { +//! } /* fn g */ +//! ``` +use hir::{HirDisplay, Semantics}; +use ide_db::{base_db::FileRange, RootDatabase}; +use syntax::{ + ast::{self, AstNode, HasName}, + match_ast, SyntaxKind, SyntaxNode, T, +}; + +use crate::{ + inlay_hints::InlayHintLabelPart, FileId, InlayHint, InlayHintLabel, InlayHintsConfig, InlayKind, +}; + +pub(super) fn hints( + acc: &mut Vec, + sema: &Semantics<'_, RootDatabase>, + config: &InlayHintsConfig, + file_id: FileId, + node: SyntaxNode, +) -> Option<()> { + let min_lines = config.closing_brace_hints_min_lines?; + + let name = |it: ast::Name| it.syntax().text_range(); + + let mut closing_token; + let (label, name_range) = if let Some(item_list) = ast::AssocItemList::cast(node.clone()) { + closing_token = item_list.r_curly_token()?; + + let parent = item_list.syntax().parent()?; + match_ast! { + match parent { + ast::Impl(imp) => { + let imp = sema.to_def(&imp)?; + let ty = imp.self_ty(sema.db); + let trait_ = imp.trait_(sema.db); + let hint_text = match trait_ { + Some(tr) => format!("impl {} for {}", tr.name(sema.db), ty.display_truncated(sema.db, config.max_length)), + None => format!("impl {}", ty.display_truncated(sema.db, config.max_length)), + }; + (hint_text, None) + }, + ast::Trait(tr) => { + (format!("trait {}", tr.name()?), tr.name().map(name)) + }, + _ => return None, + } + } + } else if let Some(list) = ast::ItemList::cast(node.clone()) { + closing_token = list.r_curly_token()?; + + let module = ast::Module::cast(list.syntax().parent()?)?; + (format!("mod {}", module.name()?), module.name().map(name)) + } else if let Some(block) = ast::BlockExpr::cast(node.clone()) { + closing_token = block.stmt_list()?.r_curly_token()?; + + let parent = block.syntax().parent()?; + match_ast! { + match parent { + ast::Fn(it) => { + // FIXME: this could include parameters, but `HirDisplay` prints too much info + // and doesn't respect the max length either, so the hints end up way too long + (format!("fn {}", it.name()?), it.name().map(name)) + }, + ast::Static(it) => (format!("static {}", it.name()?), it.name().map(name)), + ast::Const(it) => { + if it.underscore_token().is_some() { + ("const _".into(), None) + } else { + (format!("const {}", it.name()?), it.name().map(name)) + } + }, + _ => return None, + } + } + } else if let Some(mac) = ast::MacroCall::cast(node.clone()) { + let last_token = mac.syntax().last_token()?; + if last_token.kind() != T![;] && last_token.kind() != SyntaxKind::R_CURLY { + return None; + } + closing_token = last_token; + + ( + format!("{}!", mac.path()?), + mac.path().and_then(|it| it.segment()).map(|it| it.syntax().text_range()), + ) + } else { + return None; + }; + + if let Some(mut next) = closing_token.next_token() { + if next.kind() == T![;] { + if let Some(tok) = next.next_token() { + closing_token = next; + next = tok; + } + } + if !(next.kind() == SyntaxKind::WHITESPACE && next.text().contains('\n')) { + // Only display the hint if the `}` is the last token on the line + return None; + } + } + + let mut lines = 1; + node.text().for_each_chunk(|s| lines += s.matches('\n').count()); + if lines < min_lines { + return None; + } + + let linked_location = config + .location_links + .then(|| name_range.map(|range| FileRange { file_id, range })) + .flatten(); + acc.push(InlayHint { + range: closing_token.text_range(), + kind: InlayKind::ClosingBraceHint, + label: InlayHintLabel { parts: vec![InlayHintLabelPart { text: label, linked_location }] }, + tooltip: None, // provided by label part location + }); + + None +} + +#[cfg(test)] +mod tests { + use crate::{ + inlay_hints::tests::{check_with_config, DISABLED_CONFIG}, + InlayHintsConfig, + }; + + #[test] + fn hints_closing_brace() { + check_with_config( + InlayHintsConfig { closing_brace_hints_min_lines: Some(2), ..DISABLED_CONFIG }, + r#" +fn a() {} + +fn f() { +} // no hint unless `}` is the last token on the line + +fn g() { + } +//^ fn g + +fn h(with: T, arguments: u8, ...) { + } +//^ fn h + +trait Tr { + fn f(); + fn g() { + } + //^ fn g + } +//^ trait Tr +impl Tr for () { + } +//^ impl Tr for () +impl dyn Tr { + } +//^ impl dyn Tr + +static S0: () = 0; +static S1: () = {}; +static S2: () = { + }; +//^ static S2 +const _: () = { + }; +//^ const _ + +mod m { + } +//^ mod m + +m! {} +m!(); +m!( + ); +//^ m! + +m! { + } +//^ m! + +fn f() { + let v = vec![ + ]; + } +//^ fn f +"#, + ); + } +} diff --git a/crates/ide/src/inlay_hints/closure_ret.rs b/crates/ide/src/inlay_hints/closure_ret.rs new file mode 100644 index 0000000000..d9929beaac --- /dev/null +++ b/crates/ide/src/inlay_hints/closure_ret.rs @@ -0,0 +1,49 @@ +//! Implementation of "closure return type" inlay hints. +use ide_db::{base_db::FileId, famous_defs::FamousDefs}; +use syntax::ast::{self, AstNode}; + +use crate::{ + inlay_hints::closure_has_block_body, ClosureReturnTypeHints, InlayHint, InlayHintsConfig, + InlayKind, InlayTooltip, +}; + +use super::label_of_ty; + +pub(super) fn hints( + acc: &mut Vec, + famous_defs @ FamousDefs(sema, _): &FamousDefs<'_, '_>, + config: &InlayHintsConfig, + file_id: FileId, + closure: ast::ClosureExpr, +) -> Option<()> { + if config.closure_return_type_hints == ClosureReturnTypeHints::Never { + return None; + } + + if closure.ret_type().is_some() { + return None; + } + + if !closure_has_block_body(&closure) + && config.closure_return_type_hints == ClosureReturnTypeHints::WithBlock + { + return None; + } + + let param_list = closure.param_list()?; + + let closure = sema.descend_node_into_attributes(closure).pop()?; + let ty = sema.type_of_expr(&ast::Expr::ClosureExpr(closure))?.adjusted(); + let callable = ty.as_callable(sema.db)?; + let ty = callable.return_type(); + if ty.is_unit() { + return None; + } + acc.push(InlayHint { + range: param_list.syntax().text_range(), + kind: InlayKind::ClosureReturnTypeHint, + label: label_of_ty(famous_defs, config, ty)?, + tooltip: Some(InlayTooltip::HoverRanged(file_id, param_list.syntax().text_range())), + }); + Some(()) +} diff --git a/crates/ide/src/inlay_hints/discriminant.rs b/crates/ide/src/inlay_hints/discriminant.rs new file mode 100644 index 0000000000..f32c4bdf28 --- /dev/null +++ b/crates/ide/src/inlay_hints/discriminant.rs @@ -0,0 +1,142 @@ +//! Implementation of "enum variant discriminant" inlay hints: +//! ```no_run +//! enum Foo { +//! Bar/* = 0*/, +//! } +//! ``` +use ide_db::{base_db::FileId, famous_defs::FamousDefs}; +use syntax::ast::{self, AstNode, HasName}; + +use crate::{DiscriminantHints, InlayHint, InlayHintsConfig, InlayKind, InlayTooltip}; + +pub(super) fn hints( + acc: &mut Vec, + FamousDefs(sema, _): &FamousDefs<'_, '_>, + config: &InlayHintsConfig, + _: FileId, + variant: &ast::Variant, +) -> Option<()> { + let field_list = match config.discriminant_hints { + DiscriminantHints::Always => variant.field_list(), + DiscriminantHints::Fieldless => match variant.field_list() { + Some(_) => return None, + None => None, + }, + DiscriminantHints::Never => return None, + }; + + if variant.eq_token().is_some() { + return None; + } + + let name = variant.name()?; + + let descended = sema.descend_node_into_attributes(variant.clone()).pop(); + let desc_pat = descended.as_ref().unwrap_or(variant); + let v = sema.to_def(desc_pat)?; + let d = v.eval(sema.db); + + acc.push(InlayHint { + range: match field_list { + Some(field_list) => name.syntax().text_range().cover(field_list.syntax().text_range()), + None => name.syntax().text_range(), + }, + kind: InlayKind::DiscriminantHint, + label: match &d { + Ok(v) => format!("{}", v).into(), + Err(_) => "?".into(), + }, + tooltip: Some(InlayTooltip::String(match &d { + Ok(_) => "enum variant discriminant".into(), + Err(e) => format!("{e:?}").into(), + })), + }); + + Some(()) +} + +#[cfg(test)] +mod tests { + use crate::inlay_hints::{ + tests::{check_with_config, DISABLED_CONFIG}, + DiscriminantHints, InlayHintsConfig, + }; + + #[track_caller] + fn check_discriminants(ra_fixture: &str) { + check_with_config( + InlayHintsConfig { discriminant_hints: DiscriminantHints::Always, ..DISABLED_CONFIG }, + ra_fixture, + ); + } + + #[track_caller] + fn check_discriminants_fieldless(ra_fixture: &str) { + check_with_config( + InlayHintsConfig { + discriminant_hints: DiscriminantHints::Fieldless, + ..DISABLED_CONFIG + }, + ra_fixture, + ); + } + + #[test] + fn fieldless() { + check_discriminants( + r#" +enum Enum { + Variant, + //^^^^^^^0 + Variant1, + //^^^^^^^^1 + Variant2, + //^^^^^^^^2 + Variant5 = 5, + Variant6, + //^^^^^^^^6 +} +"#, + ); + } + + #[test] + fn datacarrying_mixed() { + check_discriminants( + r#" +enum Enum { + Variant(), + //^^^^^^^^^0 + Variant1, + //^^^^^^^^1 + Variant2 {}, + //^^^^^^^^^^^2 + Variant3, + //^^^^^^^^3 + Variant5 = 5, + Variant6, + //^^^^^^^^6 +} +"#, + ); + } + + #[test] + fn datacarrying_mixed_fieldless_set() { + check_discriminants_fieldless( + r#" +enum Enum { + Variant(), + Variant1, + //^^^^^^^^1 + Variant2 {}, + Variant3, + //^^^^^^^^3 + Variant5 = 5, + Variant6, + //^^^^^^^^6 +} +"#, + ); + } +} diff --git a/crates/ide/src/inlay_hints/fn_lifetime_fn.rs b/crates/ide/src/inlay_hints/fn_lifetime_fn.rs new file mode 100644 index 0000000000..2aa5e3dc73 --- /dev/null +++ b/crates/ide/src/inlay_hints/fn_lifetime_fn.rs @@ -0,0 +1,325 @@ +//! Implementation of "lifetime elision" inlay hints: +//! ```no_run +//! fn example/* <'0> */(a: &/* '0 */()) {} +//! ``` +use ide_db::{syntax_helpers::node_ext::walk_ty, FxHashMap}; +use itertools::Itertools; +use syntax::SmolStr; +use syntax::{ + ast::{self, AstNode, HasGenericParams, HasName}, + SyntaxToken, +}; + +use crate::{InlayHint, InlayHintsConfig, InlayKind, InlayTooltip, LifetimeElisionHints}; + +pub(super) fn hints( + acc: &mut Vec, + config: &InlayHintsConfig, + func: ast::Fn, +) -> Option<()> { + if config.lifetime_elision_hints == LifetimeElisionHints::Never { + return None; + } + + let mk_lt_hint = |t: SyntaxToken, label: String| InlayHint { + range: t.text_range(), + kind: InlayKind::LifetimeHint, + label: label.into(), + tooltip: Some(InlayTooltip::String("Elided lifetime".into())), + }; + + let param_list = func.param_list()?; + let generic_param_list = func.generic_param_list(); + let ret_type = func.ret_type(); + let self_param = param_list.self_param().filter(|it| it.amp_token().is_some()); + + let is_elided = |lt: &Option| match lt { + Some(lt) => matches!(lt.text().as_str(), "'_"), + None => true, + }; + + let potential_lt_refs = { + let mut acc: Vec<_> = vec![]; + if let Some(self_param) = &self_param { + let lifetime = self_param.lifetime(); + let is_elided = is_elided(&lifetime); + acc.push((None, self_param.amp_token(), lifetime, is_elided)); + } + param_list.params().filter_map(|it| Some((it.pat(), it.ty()?))).for_each(|(pat, ty)| { + // FIXME: check path types + walk_ty(&ty, &mut |ty| match ty { + ast::Type::RefType(r) => { + let lifetime = r.lifetime(); + let is_elided = is_elided(&lifetime); + acc.push(( + pat.as_ref().and_then(|it| match it { + ast::Pat::IdentPat(p) => p.name(), + _ => None, + }), + r.amp_token(), + lifetime, + is_elided, + )); + false + } + ast::Type::FnPtrType(_) => true, + ast::Type::PathType(t) => { + t.path().and_then(|it| it.segment()).and_then(|it| it.param_list()).is_some() + } + _ => false, + }) + }); + acc + }; + + // allocate names + let mut gen_idx_name = { + let mut gen = (0u8..).map(|idx| match idx { + idx if idx < 10 => SmolStr::from_iter(['\'', (idx + 48) as char]), + idx => format!("'{idx}").into(), + }); + move || gen.next().unwrap_or_default() + }; + let mut allocated_lifetimes = vec![]; + + let mut used_names: FxHashMap = + match config.param_names_for_lifetime_elision_hints { + true => generic_param_list + .iter() + .flat_map(|gpl| gpl.lifetime_params()) + .filter_map(|param| param.lifetime()) + .filter_map(|lt| Some((SmolStr::from(lt.text().as_str().get(1..)?), 0))) + .collect(), + false => Default::default(), + }; + { + let mut potential_lt_refs = potential_lt_refs.iter().filter(|&&(.., is_elided)| is_elided); + if let Some(_) = &self_param { + if let Some(_) = potential_lt_refs.next() { + allocated_lifetimes.push(if config.param_names_for_lifetime_elision_hints { + // self can't be used as a lifetime, so no need to check for collisions + "'self".into() + } else { + gen_idx_name() + }); + } + } + potential_lt_refs.for_each(|(name, ..)| { + let name = match name { + Some(it) if config.param_names_for_lifetime_elision_hints => { + if let Some(c) = used_names.get_mut(it.text().as_str()) { + *c += 1; + SmolStr::from(format!("'{text}{c}", text = it.text().as_str())) + } else { + used_names.insert(it.text().as_str().into(), 0); + SmolStr::from_iter(["\'", it.text().as_str()]) + } + } + _ => gen_idx_name(), + }; + allocated_lifetimes.push(name); + }); + } + + // fetch output lifetime if elision rule applies + let output = match potential_lt_refs.as_slice() { + [(_, _, lifetime, _), ..] if self_param.is_some() || potential_lt_refs.len() == 1 => { + match lifetime { + Some(lt) => match lt.text().as_str() { + "'_" => allocated_lifetimes.get(0).cloned(), + "'static" => None, + name => Some(name.into()), + }, + None => allocated_lifetimes.get(0).cloned(), + } + } + [..] => None, + }; + + if allocated_lifetimes.is_empty() && output.is_none() { + return None; + } + + // apply hints + // apply output if required + let mut is_trivial = true; + if let (Some(output_lt), Some(r)) = (&output, ret_type) { + if let Some(ty) = r.ty() { + walk_ty(&ty, &mut |ty| match ty { + ast::Type::RefType(ty) if ty.lifetime().is_none() => { + if let Some(amp) = ty.amp_token() { + is_trivial = false; + acc.push(mk_lt_hint(amp, output_lt.to_string())); + } + false + } + ast::Type::FnPtrType(_) => true, + ast::Type::PathType(t) => { + t.path().and_then(|it| it.segment()).and_then(|it| it.param_list()).is_some() + } + _ => false, + }) + } + } + + if config.lifetime_elision_hints == LifetimeElisionHints::SkipTrivial && is_trivial { + return None; + } + + let mut a = allocated_lifetimes.iter(); + for (_, amp_token, _, is_elided) in potential_lt_refs { + if is_elided { + let t = amp_token?; + let lt = a.next()?; + acc.push(mk_lt_hint(t, lt.to_string())); + } + } + + // generate generic param list things + match (generic_param_list, allocated_lifetimes.as_slice()) { + (_, []) => (), + (Some(gpl), allocated_lifetimes) => { + let angle_tok = gpl.l_angle_token()?; + let is_empty = gpl.generic_params().next().is_none(); + acc.push(InlayHint { + range: angle_tok.text_range(), + kind: InlayKind::LifetimeHint, + label: format!( + "{}{}", + allocated_lifetimes.iter().format(", "), + if is_empty { "" } else { ", " } + ) + .into(), + tooltip: Some(InlayTooltip::String("Elided lifetimes".into())), + }); + } + (None, allocated_lifetimes) => acc.push(InlayHint { + range: func.name()?.syntax().text_range(), + kind: InlayKind::GenericParamListHint, + label: format!("<{}>", allocated_lifetimes.iter().format(", "),).into(), + tooltip: Some(InlayTooltip::String("Elided lifetimes".into())), + }), + } + Some(()) +} + +#[cfg(test)] +mod tests { + use crate::{ + inlay_hints::tests::{check, check_with_config, TEST_CONFIG}, + InlayHintsConfig, LifetimeElisionHints, + }; + + #[test] + fn hints_lifetimes() { + check( + r#" +fn empty() {} + +fn no_gpl(a: &()) {} + //^^^^^^<'0> + // ^'0 +fn empty_gpl<>(a: &()) {} + // ^'0 ^'0 +fn partial<'b>(a: &(), b: &'b ()) {} +// ^'0, $ ^'0 +fn partial<'a>(a: &'a (), b: &()) {} +// ^'0, $ ^'0 + +fn single_ret(a: &()) -> &() {} +// ^^^^^^^^^^<'0> + // ^'0 ^'0 +fn full_mul(a: &(), b: &()) {} +// ^^^^^^^^<'0, '1> + // ^'0 ^'1 + +fn foo<'c>(a: &'c ()) -> &() {} + // ^'c + +fn nested_in(a: & &X< &()>) {} +// ^^^^^^^^^<'0, '1, '2> + //^'0 ^'1 ^'2 +fn nested_out(a: &()) -> & &X< &()>{} +// ^^^^^^^^^^<'0> + //^'0 ^'0 ^'0 ^'0 + +impl () { + fn foo(&self) {} + // ^^^<'0> + // ^'0 + fn foo(&self) -> &() {} + // ^^^<'0> + // ^'0 ^'0 + fn foo(&self, a: &()) -> &() {} + // ^^^<'0, '1> + // ^'0 ^'1 ^'0 +} +"#, + ); + } + + #[test] + fn hints_lifetimes_named() { + check_with_config( + InlayHintsConfig { param_names_for_lifetime_elision_hints: true, ..TEST_CONFIG }, + r#" +fn nested_in<'named>(named: & &X< &()>) {} +// ^'named1, 'named2, 'named3, $ + //^'named1 ^'named2 ^'named3 +"#, + ); + } + + #[test] + fn hints_lifetimes_trivial_skip() { + check_with_config( + InlayHintsConfig { + lifetime_elision_hints: LifetimeElisionHints::SkipTrivial, + ..TEST_CONFIG + }, + r#" +fn no_gpl(a: &()) {} +fn empty_gpl<>(a: &()) {} +fn partial<'b>(a: &(), b: &'b ()) {} +fn partial<'a>(a: &'a (), b: &()) {} + +fn single_ret(a: &()) -> &() {} +// ^^^^^^^^^^<'0> + // ^'0 ^'0 +fn full_mul(a: &(), b: &()) {} + +fn foo<'c>(a: &'c ()) -> &() {} + // ^'c + +fn nested_in(a: & &X< &()>) {} +fn nested_out(a: &()) -> & &X< &()>{} +// ^^^^^^^^^^<'0> + //^'0 ^'0 ^'0 ^'0 + +impl () { + fn foo(&self) {} + fn foo(&self) -> &() {} + // ^^^<'0> + // ^'0 ^'0 + fn foo(&self, a: &()) -> &() {} + // ^^^<'0, '1> + // ^'0 ^'1 ^'0 +} +"#, + ); + } + + #[test] + fn hints_lifetimes_skip_fn_likes() { + check_with_config( + InlayHintsConfig { + lifetime_elision_hints: LifetimeElisionHints::Always, + ..TEST_CONFIG + }, + r#" +fn fn_ptr(a: fn(&()) -> &()) {} +fn fn_trait<>(a: impl Fn(&()) -> &()) {} +"#, + ); + } +} diff --git a/crates/ide/src/inlay_hints/implicit_static.rs b/crates/ide/src/inlay_hints/implicit_static.rs new file mode 100644 index 0000000000..588a0e3b6a --- /dev/null +++ b/crates/ide/src/inlay_hints/implicit_static.rs @@ -0,0 +1,75 @@ +//! Implementation of "implicit static" inlay hints: +//! ```no_run +//! static S: &/* 'static */str = ""; +//! ``` +use either::Either; +use syntax::{ + ast::{self, AstNode}, + SyntaxKind, +}; + +use crate::{InlayHint, InlayHintsConfig, InlayKind, InlayTooltip, LifetimeElisionHints}; + +pub(super) fn hints( + acc: &mut Vec, + config: &InlayHintsConfig, + statik_or_const: Either, +) -> Option<()> { + if config.lifetime_elision_hints != LifetimeElisionHints::Always { + return None; + } + + if let Either::Right(it) = &statik_or_const { + if ast::AssocItemList::can_cast( + it.syntax().parent().map_or(SyntaxKind::EOF, |it| it.kind()), + ) { + return None; + } + } + + if let Some(ast::Type::RefType(ty)) = statik_or_const.either(|it| it.ty(), |it| it.ty()) { + if ty.lifetime().is_none() { + let t = ty.amp_token()?; + acc.push(InlayHint { + range: t.text_range(), + kind: InlayKind::LifetimeHint, + label: "'static".to_owned().into(), + tooltip: Some(InlayTooltip::String("Elided static lifetime".into())), + }); + } + } + + Some(()) +} + +#[cfg(test)] +mod tests { + use crate::{ + inlay_hints::tests::{check_with_config, TEST_CONFIG}, + InlayHintsConfig, LifetimeElisionHints, + }; + + #[test] + fn hints_lifetimes_static() { + check_with_config( + InlayHintsConfig { + lifetime_elision_hints: LifetimeElisionHints::Always, + ..TEST_CONFIG + }, + r#" +trait Trait {} +static S: &str = ""; +// ^'static +const C: &str = ""; +// ^'static +const C: &dyn Trait = panic!(); +// ^'static + +impl () { + const C: &str = ""; + const C: &dyn Trait = panic!(); +} +"#, + ); + } +} diff --git a/crates/ide/src/inlay_hints/param_name.rs b/crates/ide/src/inlay_hints/param_name.rs new file mode 100644 index 0000000000..ecee67632e --- /dev/null +++ b/crates/ide/src/inlay_hints/param_name.rs @@ -0,0 +1,546 @@ +//! Implementation of "param name" inlay hints: +//! ```no_run +//! fn max(x: i32, y: i32) -> i32 { x + y } +//! _ = max(/*x*/4, /*y*/4); +//! ``` +use either::Either; +use hir::{Callable, Semantics}; +use ide_db::{base_db::FileRange, RootDatabase}; + +use stdx::to_lower_snake_case; +use syntax::ast::{self, AstNode, HasArgList, HasName, UnaryOp}; + +use crate::{InlayHint, InlayHintsConfig, InlayKind, InlayTooltip}; + +pub(super) fn hints( + acc: &mut Vec, + sema: &Semantics<'_, RootDatabase>, + config: &InlayHintsConfig, + expr: ast::Expr, +) -> Option<()> { + if !config.parameter_hints { + return None; + } + + let (callable, arg_list) = get_callable(sema, &expr)?; + let hints = callable + .params(sema.db) + .into_iter() + .zip(arg_list.args()) + .filter_map(|((param, _ty), arg)| { + // Only annotate hints for expressions that exist in the original file + let range = sema.original_range_opt(arg.syntax())?; + let (param_name, name_syntax) = match param.as_ref()? { + Either::Left(pat) => ("self".to_string(), pat.name()), + Either::Right(pat) => match pat { + ast::Pat::IdentPat(it) => (it.name()?.to_string(), it.name()), + _ => return None, + }, + }; + Some((name_syntax, param_name, arg, range)) + }) + .filter(|(_, param_name, arg, _)| { + !should_hide_param_name_hint(sema, &callable, param_name, arg) + }) + .map(|(param, param_name, _, FileRange { range, .. })| { + let mut tooltip = None; + if let Some(name) = param { + if let hir::CallableKind::Function(f) = callable.kind() { + // assert the file is cached so we can map out of macros + if let Some(_) = sema.source(f) { + tooltip = sema.original_range_opt(name.syntax()); + } + } + } + + InlayHint { + range, + kind: InlayKind::ParameterHint, + label: param_name.into(), + tooltip: tooltip.map(|it| InlayTooltip::HoverOffset(it.file_id, it.range.start())), + } + }); + + acc.extend(hints); + Some(()) +} + +fn get_callable( + sema: &Semantics<'_, RootDatabase>, + expr: &ast::Expr, +) -> Option<(hir::Callable, ast::ArgList)> { + match expr { + ast::Expr::CallExpr(expr) => { + let descended = sema.descend_node_into_attributes(expr.clone()).pop(); + let expr = descended.as_ref().unwrap_or(expr); + sema.type_of_expr(&expr.expr()?)?.original.as_callable(sema.db).zip(expr.arg_list()) + } + ast::Expr::MethodCallExpr(expr) => { + let descended = sema.descend_node_into_attributes(expr.clone()).pop(); + let expr = descended.as_ref().unwrap_or(expr); + sema.resolve_method_call_as_callable(expr).zip(expr.arg_list()) + } + _ => None, + } +} + +fn should_hide_param_name_hint( + sema: &Semantics<'_, RootDatabase>, + callable: &hir::Callable, + param_name: &str, + argument: &ast::Expr, +) -> bool { + // These are to be tested in the `parameter_hint_heuristics` test + // hide when: + // - the parameter name is a suffix of the function's name + // - the argument is a qualified constructing or call expression where the qualifier is an ADT + // - exact argument<->parameter match(ignoring leading underscore) or parameter is a prefix/suffix + // of argument with _ splitting it off + // - param starts with `ra_fixture` + // - param is a well known name in a unary function + + let param_name = param_name.trim_start_matches('_'); + if param_name.is_empty() { + return true; + } + + if matches!(argument, ast::Expr::PrefixExpr(prefix) if prefix.op_kind() == Some(UnaryOp::Not)) { + return false; + } + + let fn_name = match callable.kind() { + hir::CallableKind::Function(it) => Some(it.name(sema.db).to_smol_str()), + _ => None, + }; + let fn_name = fn_name.as_deref(); + is_param_name_suffix_of_fn_name(param_name, callable, fn_name) + || is_argument_similar_to_param_name(argument, param_name) + || param_name.starts_with("ra_fixture") + || (callable.n_params() == 1 && is_obvious_param(param_name)) + || is_adt_constructor_similar_to_param_name(sema, argument, param_name) +} + +/// Hide the parameter name of a unary function if it is a `_` - prefixed suffix of the function's name, or equal. +/// +/// `fn strip_suffix(suffix)` will be hidden. +/// `fn stripsuffix(suffix)` will not be hidden. +fn is_param_name_suffix_of_fn_name( + param_name: &str, + callable: &Callable, + fn_name: Option<&str>, +) -> bool { + match (callable.n_params(), fn_name) { + (1, Some(function)) => { + function == param_name + || function + .len() + .checked_sub(param_name.len()) + .and_then(|at| function.is_char_boundary(at).then(|| function.split_at(at))) + .map_or(false, |(prefix, suffix)| { + suffix.eq_ignore_ascii_case(param_name) && prefix.ends_with('_') + }) + } + _ => false, + } +} + +fn is_argument_similar_to_param_name(argument: &ast::Expr, param_name: &str) -> bool { + // check whether param_name and argument are the same or + // whether param_name is a prefix/suffix of argument(split at `_`) + let argument = match get_string_representation(argument) { + Some(argument) => argument, + None => return false, + }; + + // std is honestly too panic happy... + let str_split_at = |str: &str, at| str.is_char_boundary(at).then(|| argument.split_at(at)); + + let param_name = param_name.trim_start_matches('_'); + let argument = argument.trim_start_matches('_'); + + match str_split_at(argument, param_name.len()) { + Some((prefix, rest)) if prefix.eq_ignore_ascii_case(param_name) => { + return rest.is_empty() || rest.starts_with('_'); + } + _ => (), + } + match argument.len().checked_sub(param_name.len()).and_then(|at| str_split_at(argument, at)) { + Some((rest, suffix)) if param_name.eq_ignore_ascii_case(suffix) => { + return rest.is_empty() || rest.ends_with('_'); + } + _ => (), + } + false +} + +fn get_string_representation(expr: &ast::Expr) -> Option { + match expr { + ast::Expr::MethodCallExpr(method_call_expr) => { + let name_ref = method_call_expr.name_ref()?; + match name_ref.text().as_str() { + "clone" | "as_ref" => method_call_expr.receiver().map(|rec| rec.to_string()), + name_ref => Some(name_ref.to_owned()), + } + } + ast::Expr::MacroExpr(macro_expr) => { + Some(macro_expr.macro_call()?.path()?.segment()?.to_string()) + } + ast::Expr::FieldExpr(field_expr) => Some(field_expr.name_ref()?.to_string()), + ast::Expr::PathExpr(path_expr) => Some(path_expr.path()?.segment()?.to_string()), + ast::Expr::PrefixExpr(prefix_expr) => get_string_representation(&prefix_expr.expr()?), + ast::Expr::RefExpr(ref_expr) => get_string_representation(&ref_expr.expr()?), + ast::Expr::CastExpr(cast_expr) => get_string_representation(&cast_expr.expr()?), + _ => None, + } +} + +fn is_obvious_param(param_name: &str) -> bool { + // avoid displaying hints for common functions like map, filter, etc. + // or other obvious words used in std + let is_obvious_param_name = + matches!(param_name, "predicate" | "value" | "pat" | "rhs" | "other"); + param_name.len() == 1 || is_obvious_param_name +} + +fn is_adt_constructor_similar_to_param_name( + sema: &Semantics<'_, RootDatabase>, + argument: &ast::Expr, + param_name: &str, +) -> bool { + let path = match argument { + ast::Expr::CallExpr(c) => c.expr().and_then(|e| match e { + ast::Expr::PathExpr(p) => p.path(), + _ => None, + }), + ast::Expr::PathExpr(p) => p.path(), + ast::Expr::RecordExpr(r) => r.path(), + _ => return false, + }; + let path = match path { + Some(it) => it, + None => return false, + }; + (|| match sema.resolve_path(&path)? { + hir::PathResolution::Def(hir::ModuleDef::Adt(_)) => { + Some(to_lower_snake_case(&path.segment()?.name_ref()?.text()) == param_name) + } + hir::PathResolution::Def(hir::ModuleDef::Function(_) | hir::ModuleDef::Variant(_)) => { + if to_lower_snake_case(&path.segment()?.name_ref()?.text()) == param_name { + return Some(true); + } + let qual = path.qualifier()?; + match sema.resolve_path(&qual)? { + hir::PathResolution::Def(hir::ModuleDef::Adt(_)) => { + Some(to_lower_snake_case(&qual.segment()?.name_ref()?.text()) == param_name) + } + _ => None, + } + } + _ => None, + })() + .unwrap_or(false) +} + +#[cfg(test)] +mod tests { + use crate::{ + inlay_hints::tests::{check_with_config, DISABLED_CONFIG}, + InlayHintsConfig, + }; + + #[track_caller] + fn check_params(ra_fixture: &str) { + check_with_config( + InlayHintsConfig { parameter_hints: true, ..DISABLED_CONFIG }, + ra_fixture, + ); + } + + #[test] + fn param_hints_only() { + check_params( + r#" +fn foo(a: i32, b: i32) -> i32 { a + b } +fn main() { + let _x = foo( + 4, + //^ a + 4, + //^ b + ); +}"#, + ); + } + + #[test] + fn param_hints_on_closure() { + check_params( + r#" +fn main() { + let clo = |a: u8, b: u8| a + b; + clo( + 1, + //^ a + 2, + //^ b + ); +} + "#, + ); + } + + #[test] + fn param_name_similar_to_fn_name_still_hints() { + check_params( + r#" +fn max(x: i32, y: i32) -> i32 { x + y } +fn main() { + let _x = max( + 4, + //^ x + 4, + //^ y + ); +}"#, + ); + } + + #[test] + fn param_name_similar_to_fn_name() { + check_params( + r#" +fn param_with_underscore(with_underscore: i32) -> i32 { with_underscore } +fn main() { + let _x = param_with_underscore( + 4, + ); +}"#, + ); + check_params( + r#" +fn param_with_underscore(underscore: i32) -> i32 { underscore } +fn main() { + let _x = param_with_underscore( + 4, + ); +}"#, + ); + } + + #[test] + fn param_name_same_as_fn_name() { + check_params( + r#" +fn foo(foo: i32) -> i32 { foo } +fn main() { + let _x = foo( + 4, + ); +}"#, + ); + } + + #[test] + fn never_hide_param_when_multiple_params() { + check_params( + r#" +fn foo(foo: i32, bar: i32) -> i32 { bar + baz } +fn main() { + let _x = foo( + 4, + //^ foo + 8, + //^ bar + ); +}"#, + ); + } + + #[test] + fn param_hints_look_through_as_ref_and_clone() { + check_params( + r#" +fn foo(bar: i32, baz: f32) {} + +fn main() { + let bar = 3; + let baz = &"baz"; + let fez = 1.0; + foo(bar.clone(), bar.clone()); + //^^^^^^^^^^^ baz + foo(bar.as_ref(), bar.as_ref()); + //^^^^^^^^^^^^ baz +} +"#, + ); + } + + #[test] + fn self_param_hints() { + check_params( + r#" +struct Foo; + +impl Foo { + fn foo(self: Self) {} + fn bar(self: &Self) {} +} + +fn main() { + Foo::foo(Foo); + //^^^ self + Foo::bar(&Foo); + //^^^^ self +} +"#, + ) + } + + #[test] + fn param_name_hints_show_for_literals() { + check_params( + r#"pub fn test(a: i32, b: i32) -> [i32; 2] { [a, b] } +fn main() { + test( + 0xa_b, + //^^^^^ a + 0xa_b, + //^^^^^ b + ); +}"#, + ) + } + + #[test] + fn function_call_parameter_hint() { + check_params( + r#" +//- minicore: option +struct FileId {} +struct SmolStr {} + +struct TextRange {} +struct SyntaxKind {} +struct NavigationTarget {} + +struct Test {} + +impl Test { + fn method(&self, mut param: i32) -> i32 { param * 2 } + + fn from_syntax( + file_id: FileId, + name: SmolStr, + focus_range: Option, + full_range: TextRange, + kind: SyntaxKind, + docs: Option, + ) -> NavigationTarget { + NavigationTarget {} + } +} + +fn test_func(mut foo: i32, bar: i32, msg: &str, _: i32, last: i32) -> i32 { + foo + bar +} + +fn main() { + let not_literal = 1; + let _: i32 = test_func(1, 2, "hello", 3, not_literal); + //^ foo ^ bar ^^^^^^^ msg ^^^^^^^^^^^ last + let t: Test = Test {}; + t.method(123); + //^^^ param + Test::method(&t, 3456); + //^^ self ^^^^ param + Test::from_syntax( + FileId {}, + "impl".into(), + //^^^^^^^^^^^^^ name + None, + //^^^^ focus_range + TextRange {}, + //^^^^^^^^^^^^ full_range + SyntaxKind {}, + //^^^^^^^^^^^^^ kind + None, + //^^^^ docs + ); +}"#, + ); + } + + #[test] + fn parameter_hint_heuristics() { + check_params( + r#" +fn check(ra_fixture_thing: &str) {} + +fn map(f: i32) {} +fn filter(predicate: i32) {} + +fn strip_suffix(suffix: &str) {} +fn stripsuffix(suffix: &str) {} +fn same(same: u32) {} +fn same2(_same2: u32) {} + +fn enum_matches_param_name(completion_kind: CompletionKind) {} + +fn foo(param: u32) {} +fn bar(param_eter: u32) {} + +enum CompletionKind { + Keyword, +} + +fn non_ident_pat((a, b): (u32, u32)) {} + +fn main() { + const PARAM: u32 = 0; + foo(PARAM); + foo(!PARAM); + // ^^^^^^ param + check(""); + + map(0); + filter(0); + + strip_suffix(""); + stripsuffix(""); + //^^ suffix + same(0); + same2(0); + + enum_matches_param_name(CompletionKind::Keyword); + + let param = 0; + foo(param); + foo(param as _); + let param_end = 0; + foo(param_end); + let start_param = 0; + foo(start_param); + let param2 = 0; + foo(param2); + //^^^^^^ param + + macro_rules! param { + () => {}; + }; + foo(param!()); + + let param_eter = 0; + bar(param_eter); + let param_eter_end = 0; + bar(param_eter_end); + let start_param_eter = 0; + bar(start_param_eter); + let param_eter2 = 0; + bar(param_eter2); + //^^^^^^^^^^^ param_eter + + non_ident_pat((0, 0)); +}"#, + ); + } +} diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs index 7402e86f36..239456cb28 100644 --- a/crates/ide/src/lib.rs +++ b/crates/ide/src/lib.rs @@ -81,8 +81,8 @@ pub use crate::{ highlight_related::{HighlightRelatedConfig, HighlightedRange}, hover::{HoverAction, HoverConfig, HoverDocFormat, HoverGotoTypeData, HoverResult}, inlay_hints::{ - AdjustmentHints, ClosureReturnTypeHints, InlayHint, InlayHintLabel, InlayHintsConfig, - InlayKind, InlayTooltip, LifetimeElisionHints, + AdjustmentHints, AdjustmentHintsMode, ClosureReturnTypeHints, DiscriminantHints, InlayHint, + InlayHintLabel, InlayHintsConfig, InlayKind, InlayTooltip, LifetimeElisionHints, }, join_lines::JoinLinesConfig, markup::Markup, @@ -236,6 +236,7 @@ impl Analysis { Ok(Vec::new()), false, CrateOrigin::CratesIo { repo: None, name: None }, + None, ); change.change_file(file_id, Some(Arc::new(text))); change.set_crate_graph(crate_graph); diff --git a/crates/ide/src/markup.rs b/crates/ide/src/markup.rs index 60c193c40a..de9fef61a7 100644 --- a/crates/ide/src/markup.rs +++ b/crates/ide/src/markup.rs @@ -33,6 +33,6 @@ impl Markup { self.text.as_str() } pub fn fenced_block(contents: &impl fmt::Display) -> Markup { - format!("```rust\n{}\n```", contents).into() + format!("```rust\n{contents}\n```").into() } } diff --git a/crates/ide/src/moniker.rs b/crates/ide/src/moniker.rs index fcbf6d8e58..af5e96d238 100644 --- a/crates/ide/src/moniker.rs +++ b/crates/ide/src/moniker.rs @@ -273,7 +273,7 @@ mod tests { fn no_moniker(ra_fixture: &str) { let (analysis, position) = fixture::position(ra_fixture); if let Some(x) = analysis.moniker(position).unwrap() { - assert_eq!(x.info.len(), 0, "Moniker founded but no moniker expected: {:?}", x); + assert_eq!(x.info.len(), 0, "Moniker founded but no moniker expected: {x:?}"); } } diff --git a/crates/ide/src/navigation_target.rs b/crates/ide/src/navigation_target.rs index 9f049e298a..3aa799d43a 100644 --- a/crates/ide/src/navigation_target.rs +++ b/crates/ide/src/navigation_target.rs @@ -117,10 +117,10 @@ impl NavigationTarget { self.full_range ); if let Some(focus_range) = self.focus_range { - buf.push_str(&format!(" {:?}", focus_range)) + buf.push_str(&format!(" {focus_range:?}")) } if let Some(container_name) = &self.container_name { - buf.push_str(&format!(" {}", container_name)) + buf.push_str(&format!(" {container_name}")) } buf } diff --git a/crates/ide/src/rename.rs b/crates/ide/src/rename.rs index b4df043705..15bdf14fb9 100644 --- a/crates/ide/src/rename.rs +++ b/crates/ide/src/rename.rs @@ -345,7 +345,7 @@ mod tests { let (analysis, position) = fixture::position(ra_fixture_before); let rename_result = analysis .rename(position, new_name) - .unwrap_or_else(|err| panic!("Rename to '{}' was cancelled: {}", new_name, err)); + .unwrap_or_else(|err| panic!("Rename to '{new_name}' was cancelled: {err}")); match rename_result { Ok(source_change) => { let mut text_edit_builder = TextEdit::builder(); @@ -364,14 +364,11 @@ mod tests { } Err(err) => { if ra_fixture_after.starts_with("error:") { - let error_message = ra_fixture_after - .chars() - .into_iter() - .skip("error:".len()) - .collect::(); + let error_message = + ra_fixture_after.chars().skip("error:".len()).collect::(); assert_eq!(error_message.trim(), err.to_string()); } else { - panic!("Rename to '{}' failed unexpectedly: {}", new_name, err) + panic!("Rename to '{new_name}' failed unexpectedly: {err}") } } }; @@ -397,11 +394,11 @@ mod tests { let (analysis, position) = fixture::position(ra_fixture); let result = analysis .prepare_rename(position) - .unwrap_or_else(|err| panic!("PrepareRename was cancelled: {}", err)); + .unwrap_or_else(|err| panic!("PrepareRename was cancelled: {err}")); match result { Ok(RangeInfo { range, info: () }) => { let source = analysis.file_text(position.file_id).unwrap(); - expect.assert_eq(&format!("{:?}: {}", range, &source[range])) + expect.assert_eq(&format!("{range:?}: {}", &source[range])) } Err(RenameError(err)) => expect.assert_eq(&err), }; diff --git a/crates/ide/src/runnables.rs b/crates/ide/src/runnables.rs index 0181c6b8e4..5b35262aab 100644 --- a/crates/ide/src/runnables.rs +++ b/crates/ide/src/runnables.rs @@ -66,12 +66,12 @@ impl Runnable { // test package::module::testname pub fn label(&self, target: Option) -> String { match &self.kind { - RunnableKind::Test { test_id, .. } => format!("test {}", test_id), - RunnableKind::TestMod { path } => format!("test-mod {}", path), - RunnableKind::Bench { test_id } => format!("bench {}", test_id), - RunnableKind::DocTest { test_id, .. } => format!("doctest {}", test_id), + RunnableKind::Test { test_id, .. } => format!("test {test_id}"), + RunnableKind::TestMod { path } => format!("test-mod {path}"), + RunnableKind::Bench { test_id } => format!("bench {test_id}"), + RunnableKind::DocTest { test_id, .. } => format!("doctest {test_id}"), RunnableKind::Bin => { - target.map_or_else(|| "run binary".to_string(), |t| format!("run {}", t)) + target.map_or_else(|| "run binary".to_string(), |t| format!("run {t}")) } } } @@ -377,7 +377,7 @@ pub(crate) fn runnable_impl( } else { String::new() }; - let mut test_id = format!("{}{}", adt_name, params); + let mut test_id = format!("{adt_name}{params}"); test_id.retain(|c| c != ' '); let test_id = TestId::Path(test_id); diff --git a/crates/ide/src/shuffle_crate_graph.rs b/crates/ide/src/shuffle_crate_graph.rs index 2d86627643..ae539a5d39 100644 --- a/crates/ide/src/shuffle_crate_graph.rs +++ b/crates/ide/src/shuffle_crate_graph.rs @@ -36,6 +36,7 @@ pub(crate) fn shuffle_crate_graph(db: &mut RootDatabase) { data.proc_macro.clone(), data.is_proc_macro, data.origin.clone(), + data.target_layout.clone(), ); map.insert(old_id, new_id); } diff --git a/crates/ide/src/signature_help.rs b/crates/ide/src/signature_help.rs index e7412d27fa..f807ba30f4 100644 --- a/crates/ide/src/signature_help.rs +++ b/crates/ide/src/signature_help.rs @@ -74,20 +74,28 @@ pub(crate) fn signature_help(db: &RootDatabase, position: FilePosition) -> Optio ast::ArgList(arg_list) => { let cursor_outside = arg_list.r_paren_token().as_ref() == Some(&token); if cursor_outside { - return None; + continue; } - return signature_help_for_call(&sema, token); + return signature_help_for_call(&sema, arg_list, token); }, ast::GenericArgList(garg_list) => { let cursor_outside = garg_list.r_angle_token().as_ref() == Some(&token); if cursor_outside { - return None; + continue; } - return signature_help_for_generics(&sema, token); + return signature_help_for_generics(&sema, garg_list, token); }, _ => (), } } + + // Stop at multi-line expressions, since the signature of the outer call is not very + // helpful inside them. + if let Some(expr) = ast::Expr::cast(node.clone()) { + if expr.syntax().text().contains_char('\n') { + return None; + } + } } None @@ -95,10 +103,11 @@ pub(crate) fn signature_help(db: &RootDatabase, position: FilePosition) -> Optio fn signature_help_for_call( sema: &Semantics<'_, RootDatabase>, + arg_list: ast::ArgList, token: SyntaxToken, ) -> Option { // Find the calling expression and its NameRef - let mut node = token.parent()?; + let mut node = arg_list.syntax().parent()?; let calling_node = loop { if let Some(callable) = ast::CallableExpr::cast(node.clone()) { if callable @@ -109,14 +118,6 @@ fn signature_help_for_call( } } - // Stop at multi-line expressions, since the signature of the outer call is not very - // helpful inside them. - if let Some(expr) = ast::Expr::cast(node.clone()) { - if expr.syntax().text().contains_char('\n') { - return None; - } - } - node = node.parent()?; }; @@ -200,10 +201,11 @@ fn signature_help_for_call( fn signature_help_for_generics( sema: &Semantics<'_, RootDatabase>, + garg_list: ast::GenericArgList, token: SyntaxToken, ) -> Option { - let parent = token.parent()?; - let arg_list = parent + let arg_list = garg_list + .syntax() .ancestors() .filter_map(ast::GenericArgList::cast) .find(|list| list.syntax().text_range().contains(token.text_range().start()))?; @@ -644,7 +646,7 @@ pub fn add_one(x: i32) -> i32 { x + 1 } -pub fn do() { +pub fn r#do() { add_one($0 }"#, expect![[r##" @@ -770,6 +772,32 @@ fn f() { "#, expect![[]], ); + check( + r#" +fn foo(a: u8) -> u8 {a} +fn bar(a: u8) -> u8 {a} +fn f() { + foo(bar(123)$0) +} +"#, + expect![[r#" + fn foo(a: u8) -> u8 + ^^^^^ + "#]], + ); + check( + r#" +struct Vec(T); +struct Vec2(T); +fn f() { + let _: Vec2$0> +} +"#, + expect![[r#" + struct Vec2 + ^ + "#]], + ); } #[test] diff --git a/crates/ide/src/static_index.rs b/crates/ide/src/static_index.rs index 2380cf7381..a6b30ba139 100644 --- a/crates/ide/src/static_index.rs +++ b/crates/ide/src/static_index.rs @@ -13,6 +13,7 @@ use syntax::{AstNode, SyntaxKind::*, SyntaxToken, TextRange, T}; use crate::{ hover::hover_for_definition, + inlay_hints::AdjustmentHintsMode, moniker::{def_to_moniker, MonikerResult}, parent_module::crates_for, Analysis, Fold, HoverConfig, HoverDocFormat, HoverResult, InlayHint, InlayHintsConfig, @@ -106,13 +107,17 @@ impl StaticIndex<'_> { .analysis .inlay_hints( &InlayHintsConfig { + location_links: true, render_colons: true, + discriminant_hints: crate::DiscriminantHints::Fieldless, type_hints: true, parameter_hints: true, chaining_hints: true, closure_return_type_hints: crate::ClosureReturnTypeHints::WithBlock, lifetime_elision_hints: crate::LifetimeElisionHints::Never, adjustment_hints: crate::AdjustmentHints::Never, + adjustment_hints_mode: AdjustmentHintsMode::Prefix, + adjustment_hints_hide_outside_unsafe: false, hide_named_constructor_hints: false, hide_closure_initialization_hints: false, param_names_for_lifetime_elision_hints: false, @@ -231,13 +236,13 @@ mod tests { for (range, _) in f.tokens { let x = FileRange { file_id: f.file_id, range }; if !range_set.contains(&x) { - panic!("additional range {:?}", x); + panic!("additional range {x:?}"); } range_set.remove(&x); } } if !range_set.is_empty() { - panic!("unfound ranges {:?}", range_set); + panic!("unfound ranges {range_set:?}"); } } @@ -252,13 +257,13 @@ mod tests { continue; } if !range_set.contains(&x) { - panic!("additional definition {:?}", x); + panic!("additional definition {x:?}"); } range_set.remove(&x); } } if !range_set.is_empty() { - panic!("unfound definitions {:?}", range_set); + panic!("unfound definitions {range_set:?}"); } } diff --git a/crates/ide/src/status.rs b/crates/ide/src/status.rs index 20810c25b3..7ce782f93b 100644 --- a/crates/ide/src/status.rs +++ b/crates/ide/src/status.rs @@ -52,8 +52,8 @@ pub(crate) fn status(db: &RootDatabase, file_id: Option) -> String { let crate_graph = db.crate_graph(); for krate in crates { let display_crate = |krate: CrateId| match &crate_graph[krate].display_name { - Some(it) => format!("{}({:?})", it, krate), - None => format!("{:?}", krate), + Some(it) => format!("{it}({krate:?})"), + None => format!("{krate:?}"), }; format_to!(buf, "Crate: {}\n", display_crate(krate)); let deps = crate_graph[krate] diff --git a/crates/ide/src/syntax_highlighting/highlight.rs b/crates/ide/src/syntax_highlighting/highlight.rs index e7d0a8be7f..892e6a9bb0 100644 --- a/crates/ide/src/syntax_highlighting/highlight.rs +++ b/crates/ide/src/syntax_highlighting/highlight.rs @@ -111,7 +111,7 @@ fn punctuation( let is_raw_ptr = (|| { let prefix_expr = parent.and_then(ast::PrefixExpr::cast)?; let expr = prefix_expr.expr()?; - sema.type_of_expr(&expr)?.original.is_raw_ptr().then(|| ()) + sema.type_of_expr(&expr)?.original.is_raw_ptr().then_some(()) })(); if let Some(()) = is_raw_ptr { HlTag::Operator(HlOperator::Other) | HlMod::Unsafe @@ -174,6 +174,7 @@ fn keyword( | T![return] | T![while] | T![yield] => h | HlMod::ControlFlow, + T![do] | T![yeet] if parent_matches::(&token) => h | HlMod::ControlFlow, T![for] if parent_matches::(&token) => h | HlMod::ControlFlow, T![unsafe] => h | HlMod::Unsafe, T![true] | T![false] => HlTag::BoolLiteral.into(), diff --git a/crates/ide/src/syntax_highlighting/html.rs b/crates/ide/src/syntax_highlighting/html.rs index e91fd7f125..2c7823069b 100644 --- a/crates/ide/src/syntax_highlighting/html.rs +++ b/crates/ide/src/syntax_highlighting/html.rs @@ -52,7 +52,7 @@ pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: boo let class = r.highlight.to_string().replace('.', " "); let color = match (rainbow, r.binding_hash) { (true, Some(hash)) => { - format!(" data-binding-hash=\"{}\" style=\"color: {};\"", hash, rainbowify(hash)) + format!(" data-binding-hash=\"{hash}\" style=\"color: {};\"", rainbowify(hash)) } _ => "".into(), }; diff --git a/crates/ide/src/syntax_highlighting/tests.rs b/crates/ide/src/syntax_highlighting/tests.rs index 46cc667fc4..2f870d769c 100644 --- a/crates/ide/src/syntax_highlighting/tests.rs +++ b/crates/ide/src/syntax_highlighting/tests.rs @@ -1028,6 +1028,26 @@ macro_rules! test {} let _ = analysis.highlight(HL_CONFIG, file_id).unwrap(); } +#[test] +fn highlight_callable_no_crash() { + // regression test for #13838. + let (analysis, file_id) = fixture::file( + r#" +//- minicore: fn, sized +impl FnOnce for &F +where + F: Fn, +{ + type Output = F::Output; +} + +trait Trait {} +fn foo(x: &fn(&dyn Trait)) {} +"#, + ); + let _ = analysis.highlight(HL_CONFIG, file_id).unwrap(); +} + /// Highlights the code given by the `ra_fixture` argument, renders the /// result as HTML, and compares it with the HTML file given as `snapshot`. /// Note that the `snapshot` file is overwritten by the rendered HTML. diff --git a/crates/ide/src/syntax_tree.rs b/crates/ide/src/syntax_tree.rs index 4256fea0f8..bb6827e8a4 100644 --- a/crates/ide/src/syntax_tree.rs +++ b/crates/ide/src/syntax_tree.rs @@ -32,7 +32,7 @@ pub(crate) fn syntax_tree( } }; - format!("{:#?}", node) + format!("{node:#?}") } else { format!("{:#?}", parse.tree().syntax()) } diff --git a/crates/ide/src/typing.rs b/crates/ide/src/typing.rs index 9118f3c699..eba5a48563 100644 --- a/crates/ide/src/typing.rs +++ b/crates/ide/src/typing.rs @@ -397,7 +397,7 @@ mod tests { fn type_char(char_typed: char, ra_fixture_before: &str, ra_fixture_after: &str) { let actual = do_type_char(char_typed, ra_fixture_before) - .unwrap_or_else(|| panic!("typing `{}` did nothing", char_typed)); + .unwrap_or_else(|| panic!("typing `{char_typed}` did nothing")); assert_eq_text!(ra_fixture_after, &actual); } diff --git a/crates/ide/src/typing/on_enter.rs b/crates/ide/src/typing/on_enter.rs index 48c1713270..298482f2ab 100644 --- a/crates/ide/src/typing/on_enter.rs +++ b/crates/ide/src/typing/on_enter.rs @@ -108,7 +108,7 @@ fn on_enter_in_comment( } let indent = node_indent(file, comment.syntax())?; - let inserted = format!("\n{}{} $0", indent, prefix); + let inserted = format!("\n{indent}{prefix} $0"); let delete = if remove_trailing_whitespace { let trimmed_len = comment.text().trim_end().len() as u32; let trailing_whitespace_len = comment.text().len() as u32 - trimmed_len; @@ -129,7 +129,7 @@ fn on_enter_in_block(block: ast::BlockExpr, position: FilePosition) -> Option let indent = IndentLevel::from_node(list.syntax()); let mut edit = TextEdit::insert(position.offset, format!("\n{}$0", indent + 1)); - edit.union(TextEdit::insert( - list.r_curly_token()?.text_range().start(), - format!("\n{}", indent), - )) - .ok()?; + edit.union(TextEdit::insert(list.r_curly_token()?.text_range().start(), format!("\n{indent}"))) + .ok()?; Some(edit) } diff --git a/crates/limit/src/lib.rs b/crates/limit/src/lib.rs index d6a706a7cd..6b2534aa46 100644 --- a/crates/limit/src/lib.rs +++ b/crates/limit/src/lib.rs @@ -59,7 +59,7 @@ impl Limit { .compare_exchange_weak(old_max, other, Ordering::Relaxed, Ordering::Relaxed) .is_ok() { - eprintln!("new max: {}", other); + eprintln!("new max: {other}"); } } diff --git a/crates/mbe/src/benchmark.rs b/crates/mbe/src/benchmark.rs index 9c92bae6a1..4b75002501 100644 --- a/crates/mbe/src/benchmark.rs +++ b/crates/mbe/src/benchmark.rs @@ -101,7 +101,7 @@ fn invocation_fixtures(rules: &FxHashMap) -> Vec<(Stri } try_cnt += 1; if try_cnt > 100 { - panic!("invocaton fixture {} cannot be generated.\n", name); + panic!("invocaton fixture {name} cannot be generated.\n"); } } } @@ -139,9 +139,15 @@ fn invocation_fixtures(rules: &FxHashMap) -> Vec<(Stri } None => (), - Some(kind) => panic!("Unhandled kind {:?}", kind), + Some(kind) => panic!("Unhandled kind {kind:?}"), }, - Op::Leaf(leaf) => parent.token_trees.push(leaf.clone().into()), + Op::Literal(it) => parent.token_trees.push(tt::Leaf::from(it.clone()).into()), + Op::Ident(it) => parent.token_trees.push(tt::Leaf::from(it.clone()).into()), + Op::Punct(puncts) => { + for punct in puncts { + parent.token_trees.push(tt::Leaf::from(punct.clone()).into()); + } + } Op::Repeat { tokens, kind, separator } => { let max = 10; let cnt = match kind { diff --git a/crates/mbe/src/expander/matcher.rs b/crates/mbe/src/expander/matcher.rs index 3f656df25f..88eae136f7 100644 --- a/crates/mbe/src/expander/matcher.rs +++ b/crates/mbe/src/expander/matcher.rs @@ -68,7 +68,7 @@ use crate::{ expander::{Binding, Bindings, ExpandResult, Fragment}, parser::{MetaVarKind, Op, RepeatKind, Separator}, tt_iter::TtIter, - ExpandError, MetaTemplate, + ExpandError, MetaTemplate, ValueResult, }; impl Bindings { @@ -321,8 +321,8 @@ struct MatchState<'t> { /// The KleeneOp of this sequence if we are in a repetition. sep_kind: Option, - /// Number of tokens of separator parsed - sep_parsed: Option, + /// Whether we already matched separator token. + sep_matched: bool, /// Matched meta variables bindings bindings: BindingsIdx, @@ -387,7 +387,7 @@ fn match_loop_inner<'t>( None => { // We are at or past the end of the matcher of `item`. if let Some(up) = &item.up { - if item.sep_parsed.is_none() { + if !item.sep_matched { // Get the `up` matcher let mut new_pos = (**up).clone(); new_pos.bindings = bindings_builder.copy(&new_pos.bindings); @@ -401,14 +401,17 @@ fn match_loop_inner<'t>( } // Check if we need a separator. - // We check the separator one by one - let sep_idx = item.sep_parsed.unwrap_or(0); - let sep_len = item.sep.as_ref().map_or(0, Separator::tt_count); - if item.sep.is_some() && sep_idx != sep_len { + if item.sep.is_some() && !item.sep_matched { let sep = item.sep.as_ref().unwrap(); - if src.clone().expect_separator(sep, sep_idx) { + let mut fork = src.clone(); + if fork.expect_separator(sep) { + // HACK: here we use `meta_result` to pass `TtIter` back to caller because + // it might have been advanced multiple times. `ValueResult` is + // insignificant. + item.meta_result = Some((fork, ValueResult::ok(None))); item.dot.next(); - item.sep_parsed = Some(sep_idx + 1); + // item.sep_parsed = Some(sep_len); + item.sep_matched = true; try_push!(next_items, item); } } @@ -416,7 +419,7 @@ fn match_loop_inner<'t>( // and try to match again UNLESS we are only allowed to have _one_ repetition. else if item.sep_kind != Some(RepeatKind::ZeroOrOne) { item.dot = item.dot.reset(); - item.sep_parsed = None; + item.sep_matched = false; bindings_builder.push_default(&mut item.bindings); cur_items.push(item); } @@ -451,7 +454,7 @@ fn match_loop_inner<'t>( up: Some(Box::new(item)), sep: separator.clone(), sep_kind: Some(*kind), - sep_parsed: None, + sep_matched: false, bindings: bindings_builder.alloc(), meta_result: None, is_error: false, @@ -500,18 +503,69 @@ fn match_loop_inner<'t>( } } } - OpDelimited::Op(Op::Leaf(leaf)) => { - if let Err(err) = match_leaf(leaf, &mut src.clone()) { - res.add_err(err); - item.is_error = true; + OpDelimited::Op(Op::Literal(lhs)) => { + if let Ok(rhs) = src.clone().expect_leaf() { + if matches!(rhs, tt::Leaf::Literal(it) if it.text == lhs.text) { + item.dot.next(); + } else { + res.add_err(ExpandError::UnexpectedToken); + item.is_error = true; + } } else { - item.dot.next(); + res.add_err(ExpandError::binding_error(format!("expected literal: `{lhs}`"))); + item.is_error = true; } try_push!(next_items, item); } + OpDelimited::Op(Op::Ident(lhs)) => { + if let Ok(rhs) = src.clone().expect_leaf() { + if matches!(rhs, tt::Leaf::Ident(it) if it.text == lhs.text) { + item.dot.next(); + } else { + res.add_err(ExpandError::UnexpectedToken); + item.is_error = true; + } + } else { + res.add_err(ExpandError::binding_error(format!("expected ident: `{lhs}`"))); + item.is_error = true; + } + try_push!(next_items, item); + } + OpDelimited::Op(Op::Punct(lhs)) => { + let mut fork = src.clone(); + let error = if let Ok(rhs) = fork.expect_glued_punct() { + let first_is_single_quote = rhs[0].char == '\''; + let lhs = lhs.iter().map(|it| it.char); + let rhs = rhs.iter().map(|it| it.char); + if lhs.clone().eq(rhs) { + // HACK: here we use `meta_result` to pass `TtIter` back to caller because + // it might have been advanced multiple times. `ValueResult` is + // insignificant. + item.meta_result = Some((fork, ValueResult::ok(None))); + item.dot.next(); + next_items.push(item); + continue; + } + + if first_is_single_quote { + // If the first punct token is a single quote, that's a part of a lifetime + // ident, not a punct. + ExpandError::UnexpectedToken + } else { + let lhs: SmolStr = lhs.collect(); + ExpandError::binding_error(format!("expected punct: `{lhs}`")) + } + } else { + ExpandError::UnexpectedToken + }; + + res.add_err(error); + item.is_error = true; + error_items.push(item); + } OpDelimited::Op(Op::Ignore { .. } | Op::Index { .. }) => {} OpDelimited::Open => { - if matches!(src.clone().next(), Some(tt::TokenTree::Subtree(..))) { + if matches!(src.peek_n(0), Some(tt::TokenTree::Subtree(..))) { item.dot.next(); try_push!(next_items, item); } @@ -541,7 +595,7 @@ fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree) -> Match { up: None, sep: None, sep_kind: None, - sep_parsed: None, + sep_matched: false, bindings: bindings_builder.alloc(), is_error: false, meta_result: None, @@ -616,21 +670,33 @@ fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree) -> Match { } // Dump all possible `next_items` into `cur_items` for the next iteration. else if !next_items.is_empty() { + if let Some((iter, _)) = next_items[0].meta_result.take() { + // We've matched a possibly "glued" punct. The matched punct (hence + // `meta_result` also) must be the same for all items. + // FIXME: If there are multiple items, it's definitely redundant (and it's hacky! + // `meta_result` isn't supposed to be used this way). + + // We already bumped, so no need to call `.next()` like in the other branch. + src = iter; + for item in next_items.iter_mut() { + item.meta_result = None; + } + } else { + match src.next() { + Some(tt::TokenTree::Subtree(subtree)) => { + stack.push(src.clone()); + src = TtIter::new(subtree); + } + None => { + if let Some(iter) = stack.pop() { + src = iter; + } + } + _ => (), + } + } // Now process the next token cur_items.extend(next_items.drain(..)); - - match src.next() { - Some(tt::TokenTree::Subtree(subtree)) => { - stack.push(src.clone()); - src = TtIter::new(subtree); - } - None => { - if let Some(iter) = stack.pop() { - src = iter; - } - } - _ => (), - } } // Finally, we have the case where we need to call the black-box parser to get some // nonterminal. @@ -663,27 +729,6 @@ fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree) -> Match { } } -fn match_leaf(lhs: &tt::Leaf, src: &mut TtIter<'_>) -> Result<(), ExpandError> { - let rhs = src - .expect_leaf() - .map_err(|()| ExpandError::binding_error(format!("expected leaf: `{lhs}`")))?; - match (lhs, rhs) { - ( - tt::Leaf::Punct(tt::Punct { char: lhs, .. }), - tt::Leaf::Punct(tt::Punct { char: rhs, .. }), - ) if lhs == rhs => Ok(()), - ( - tt::Leaf::Ident(tt::Ident { text: lhs, .. }), - tt::Leaf::Ident(tt::Ident { text: rhs, .. }), - ) if lhs == rhs => Ok(()), - ( - tt::Leaf::Literal(tt::Literal { text: lhs, .. }), - tt::Leaf::Literal(tt::Literal { text: rhs, .. }), - ) if lhs == rhs => Ok(()), - _ => Err(ExpandError::UnexpectedToken), - } -} - fn match_meta_var(kind: MetaVarKind, input: &mut TtIter<'_>) -> ExpandResult> { let fragment = match kind { MetaVarKind::Path => parser::PrefixEntryPoint::Path, @@ -698,12 +743,16 @@ fn match_meta_var(kind: MetaVarKind, input: &mut TtIter<'_>) -> ExpandResult parser::PrefixEntryPoint::Item, MetaVarKind::Vis => parser::PrefixEntryPoint::Vis, MetaVarKind::Expr => { - // `expr` should not match underscores. + // `expr` should not match underscores, let expressions, or inline const. The latter + // two are for [backwards compatibility][0]. // HACK: Macro expansion should not be done using "rollback and try another alternative". - // rustc [explicitly checks the next token][0]. - // [0]: https://github.com/rust-lang/rust/blob/f0c4da499/compiler/rustc_expand/src/mbe/macro_parser.rs#L576 + // rustc [explicitly checks the next token][1]. + // [0]: https://github.com/rust-lang/rust/issues/86730 + // [1]: https://github.com/rust-lang/rust/blob/f0c4da499/compiler/rustc_expand/src/mbe/macro_parser.rs#L576 match input.peek_n(0) { - Some(tt::TokenTree::Leaf(tt::Leaf::Ident(it))) if it.text == "_" => { + Some(tt::TokenTree::Leaf(tt::Leaf::Ident(it))) + if it.text == "_" || it.text == "let" || it.text == "const" => + { return ExpandResult::only_err(ExpandError::NoMatchingRule) } _ => {} @@ -752,10 +801,10 @@ fn collect_vars(collector_fun: &mut impl FnMut(SmolStr), pattern: &MetaTemplate) for op in pattern.iter() { match op { Op::Var { name, .. } => collector_fun(name.clone()), - Op::Leaf(_) => (), Op::Subtree { tokens, .. } => collect_vars(collector_fun, tokens), Op::Repeat { tokens, .. } => collect_vars(collector_fun, tokens), - Op::Ignore { .. } | Op::Index { .. } => {} + Op::Ignore { .. } | Op::Index { .. } | Op::Literal(_) | Op::Ident(_) | Op::Punct(_) => { + } } } } @@ -818,14 +867,14 @@ impl<'a> Iterator for OpDelimitedIter<'a> { } impl<'a> TtIter<'a> { - fn expect_separator(&mut self, separator: &Separator, idx: usize) -> bool { + fn expect_separator(&mut self, separator: &Separator) -> bool { let mut fork = self.clone(); let ok = match separator { - Separator::Ident(lhs) if idx == 0 => match fork.expect_ident_or_underscore() { + Separator::Ident(lhs) => match fork.expect_ident_or_underscore() { Ok(rhs) => rhs.text == lhs.text, Err(_) => false, }, - Separator::Literal(lhs) if idx == 0 => match fork.expect_literal() { + Separator::Literal(lhs) => match fork.expect_literal() { Ok(rhs) => match rhs { tt::Leaf::Literal(rhs) => rhs.text == lhs.text, tt::Leaf::Ident(rhs) => rhs.text == lhs.text, @@ -833,11 +882,14 @@ impl<'a> TtIter<'a> { }, Err(_) => false, }, - Separator::Puncts(lhss) if idx < lhss.len() => match fork.expect_punct() { - Ok(rhs) => rhs.char == lhss[idx].char, + Separator::Puncts(lhs) => match fork.expect_glued_punct() { + Ok(rhs) => { + let lhs = lhs.iter().map(|it| it.char); + let rhs = rhs.iter().map(|it| it.char); + lhs.eq(rhs) + } Err(_) => false, }, - _ => false, }; if ok { *self = fork; @@ -846,52 +898,21 @@ impl<'a> TtIter<'a> { } fn expect_tt(&mut self) -> Result { - match self.peek_n(0) { - Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) if punct.char == '\'' => { - return self.expect_lifetime(); + if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) = self.peek_n(0) { + if punct.char == '\'' { + self.expect_lifetime() + } else { + let puncts = self.expect_glued_punct()?; + let token_trees = puncts.into_iter().map(|p| tt::Leaf::Punct(p).into()).collect(); + Ok(tt::TokenTree::Subtree(tt::Subtree { delimiter: None, token_trees })) } - _ => (), - } - - let tt = self.next().ok_or(())?.clone(); - let punct = match tt { - tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if punct.spacing == tt::Spacing::Joint => { - punct - } - _ => return Ok(tt), - }; - - let (second, third) = match (self.peek_n(0), self.peek_n(1)) { - ( - Some(tt::TokenTree::Leaf(tt::Leaf::Punct(p2))), - Some(tt::TokenTree::Leaf(tt::Leaf::Punct(p3))), - ) if p2.spacing == tt::Spacing::Joint => (p2.char, Some(p3.char)), - (Some(tt::TokenTree::Leaf(tt::Leaf::Punct(p2))), _) => (p2.char, None), - _ => return Ok(tt), - }; - - match (punct.char, second, third) { - ('.', '.', Some('.' | '=')) | ('<', '<', Some('=')) | ('>', '>', Some('=')) => { - let tt2 = self.next().unwrap().clone(); - let tt3 = self.next().unwrap().clone(); - Ok(tt::Subtree { delimiter: None, token_trees: vec![tt, tt2, tt3] }.into()) - } - ('-' | '!' | '*' | '/' | '&' | '%' | '^' | '+' | '<' | '=' | '>' | '|', '=', _) - | ('-' | '=' | '>', '>', _) - | (':', ':', _) - | ('.', '.', _) - | ('&', '&', _) - | ('<', '<', _) - | ('|', '|', _) => { - let tt2 = self.next().unwrap().clone(); - Ok(tt::Subtree { delimiter: None, token_trees: vec![tt, tt2] }.into()) - } - _ => Ok(tt), + } else { + self.next().ok_or(()).cloned() } } fn expect_lifetime(&mut self) -> Result { - let punct = self.expect_punct()?; + let punct = self.expect_single_punct()?; if punct.char != '\'' { return Err(()); } diff --git a/crates/mbe/src/expander/transcriber.rs b/crates/mbe/src/expander/transcriber.rs index cbb59ab8e6..db0d327bf4 100644 --- a/crates/mbe/src/expander/transcriber.rs +++ b/crates/mbe/src/expander/transcriber.rs @@ -134,7 +134,13 @@ fn expand_subtree( let mut err = None; for op in template.iter() { match op { - Op::Leaf(tt) => arena.push(tt.clone().into()), + Op::Literal(it) => arena.push(tt::Leaf::from(it.clone()).into()), + Op::Ident(it) => arena.push(tt::Leaf::from(it.clone()).into()), + Op::Punct(puncts) => { + for punct in puncts { + arena.push(tt::Leaf::from(punct.clone()).into()); + } + } Op::Subtree { tokens, delimiter } => { let ExpandResult { value: tt, err: e } = expand_subtree(ctx, tokens, *delimiter, arena); diff --git a/crates/mbe/src/lib.rs b/crates/mbe/src/lib.rs index c4f0fa20d6..2373db97a3 100644 --- a/crates/mbe/src/lib.rs +++ b/crates/mbe/src/lib.rs @@ -140,7 +140,7 @@ impl Shift { | tt::Leaf::Punct(tt::Punct { id, .. }) | tt::Leaf::Literal(tt::Literal { id, .. })) = leaf; - (id != tt::TokenId::unspecified()).then(|| id.0) + (id != tt::TokenId::unspecified()).then_some(id.0) } }; subtree.token_trees.iter().filter_map(filter).max() diff --git a/crates/mbe/src/parser.rs b/crates/mbe/src/parser.rs index 351c359b73..fad905e97f 100644 --- a/crates/mbe/src/parser.rs +++ b/crates/mbe/src/parser.rs @@ -1,7 +1,7 @@ //! Parser recognizes special macro syntax, `$var` and `$(repeat)*`, in token //! trees. -use smallvec::SmallVec; +use smallvec::{smallvec, SmallVec}; use syntax::SmolStr; use crate::{tt_iter::TtIter, ParseError}; @@ -39,7 +39,7 @@ impl MetaTemplate { let mut src = TtIter::new(tt); let mut res = Vec::new(); - while let Some(first) = src.next() { + while let Some(first) = src.peek_n(0) { let op = next_op(first, &mut src, mode)?; res.push(op); } @@ -54,8 +54,10 @@ pub(crate) enum Op { Ignore { name: SmolStr, id: tt::TokenId }, Index { depth: u32 }, Repeat { tokens: MetaTemplate, kind: RepeatKind, separator: Option }, - Leaf(tt::Leaf), Subtree { tokens: MetaTemplate, delimiter: Option }, + Literal(tt::Literal), + Punct(SmallVec<[tt::Punct; 3]>), + Ident(tt::Ident), } #[derive(Copy, Clone, Debug, PartialEq, Eq)] @@ -108,28 +110,23 @@ impl PartialEq for Separator { } } -impl Separator { - pub(crate) fn tt_count(&self) -> usize { - match self { - Separator::Literal(_) => 1, - Separator::Ident(_) => 1, - Separator::Puncts(it) => it.len(), - } - } -} - #[derive(Clone, Copy)] enum Mode { Pattern, Template, } -fn next_op<'a>(first: &tt::TokenTree, src: &mut TtIter<'a>, mode: Mode) -> Result { - let res = match first { - tt::TokenTree::Leaf(leaf @ tt::Leaf::Punct(tt::Punct { char: '$', .. })) => { +fn next_op( + first_peeked: &tt::TokenTree, + src: &mut TtIter<'_>, + mode: Mode, +) -> Result { + let res = match first_peeked { + tt::TokenTree::Leaf(tt::Leaf::Punct(p @ tt::Punct { char: '$', .. })) => { + src.next().expect("first token already peeked"); // Note that the '$' itself is a valid token inside macro_rules. let second = match src.next() { - None => return Ok(Op::Leaf(leaf.clone())), + None => return Ok(Op::Punct(smallvec![p.clone()])), Some(it) => it, }; match second { @@ -160,7 +157,7 @@ fn next_op<'a>(first: &tt::TokenTree, src: &mut TtIter<'a>, mode: Mode) -> Resul tt::TokenTree::Leaf(leaf) => match leaf { tt::Leaf::Ident(ident) if ident.text == "crate" => { // We simply produce identifier `$crate` here. And it will be resolved when lowering ast to Path. - Op::Leaf(tt::Leaf::from(tt::Ident { text: "$crate".into(), id: ident.id })) + Op::Ident(tt::Ident { text: "$crate".into(), id: ident.id }) } tt::Leaf::Ident(ident) => { let kind = eat_fragment_kind(src, mode)?; @@ -180,7 +177,7 @@ fn next_op<'a>(first: &tt::TokenTree, src: &mut TtIter<'a>, mode: Mode) -> Resul "`$$` is not allowed on the pattern side", )) } - Mode::Template => Op::Leaf(tt::Leaf::Punct(*punct)), + Mode::Template => Op::Punct(smallvec![*punct]), }, tt::Leaf::Punct(_) | tt::Leaf::Literal(_) => { return Err(ParseError::expected("expected ident")) @@ -188,8 +185,25 @@ fn next_op<'a>(first: &tt::TokenTree, src: &mut TtIter<'a>, mode: Mode) -> Resul }, } } - tt::TokenTree::Leaf(tt) => Op::Leaf(tt.clone()), + + tt::TokenTree::Leaf(tt::Leaf::Literal(it)) => { + src.next().expect("first token already peeked"); + Op::Literal(it.clone()) + } + + tt::TokenTree::Leaf(tt::Leaf::Ident(it)) => { + src.next().expect("first token already peeked"); + Op::Ident(it.clone()) + } + + tt::TokenTree::Leaf(tt::Leaf::Punct(_)) => { + // There's at least one punct so this shouldn't fail. + let puncts = src.expect_glued_punct().unwrap(); + Op::Punct(puncts) + } + tt::TokenTree::Subtree(subtree) => { + src.next().expect("first token already peeked"); let tokens = MetaTemplate::parse(subtree, mode)?; Op::Subtree { tokens, delimiter: subtree.delimiter } } @@ -259,7 +273,7 @@ fn parse_repeat(src: &mut TtIter<'_>) -> Result<(Option, RepeatKind), _ => return Err(ParseError::InvalidRepeat), }, }; - return Ok((has_sep.then(|| separator), repeat_kind)); + return Ok((has_sep.then_some(separator), repeat_kind)); } } } diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs index cf53c16726..5c96505563 100644 --- a/crates/mbe/src/syntax_bridge.rs +++ b/crates/mbe/src/syntax_bridge.rs @@ -145,7 +145,7 @@ pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec { } if iter.peek_n(0).is_some() { - res.push(tt::Subtree { delimiter: None, token_trees: iter.into_iter().cloned().collect() }); + res.push(tt::Subtree { delimiter: None, token_trees: iter.cloned().collect() }); } res @@ -237,7 +237,7 @@ fn convert_tokens(conv: &mut C) -> tt::Subtree { let char = match token.to_char(conv) { Some(c) => c, None => { - panic!("Token from lexer must be single char: token = {:#?}", token); + panic!("Token from lexer must be single char: token = {token:#?}"); } }; tt::Leaf::from(tt::Punct { char, spacing, id: conv.id_alloc().alloc(range, synth_id) }) diff --git a/crates/mbe/src/syntax_bridge/tests.rs b/crates/mbe/src/syntax_bridge/tests.rs index 4e04d2bc1c..c1a6083655 100644 --- a/crates/mbe/src/syntax_bridge/tests.rs +++ b/crates/mbe/src/syntax_bridge/tests.rs @@ -19,7 +19,7 @@ fn check_punct_spacing(fixture: &str) { let spacing = match annotation.as_str() { "Alone" => Spacing::Alone, "Joint" => Spacing::Joint, - a => panic!("unknown annotation: {}", a), + a => panic!("unknown annotation: {a}"), }; (token, spacing) }) @@ -30,7 +30,7 @@ fn check_punct_spacing(fixture: &str) { while !cursor.eof() { while let Some(token_tree) = cursor.token_tree() { if let TokenTreeRef::Leaf(Leaf::Punct(Punct { spacing, id, .. }), _) = token_tree { - if let Some(expected) = annotations.remove(&id) { + if let Some(expected) = annotations.remove(id) { assert_eq!(expected, *spacing); } } @@ -39,7 +39,7 @@ fn check_punct_spacing(fixture: &str) { cursor = cursor.bump(); } - assert!(annotations.is_empty(), "unchecked annotations: {:?}", annotations); + assert!(annotations.is_empty(), "unchecked annotations: {annotations:?}"); } #[test] diff --git a/crates/mbe/src/to_parser_input.rs b/crates/mbe/src/to_parser_input.rs index 783c3ca4a8..7013aa58b5 100644 --- a/crates/mbe/src/to_parser_input.rs +++ b/crates/mbe/src/to_parser_input.rs @@ -60,7 +60,7 @@ pub(crate) fn to_parser_input(buffer: &TokenBuffer<'_>) -> parser::Input { }, tt::Leaf::Punct(punct) => { let kind = SyntaxKind::from_char(punct.char) - .unwrap_or_else(|| panic!("{:#?} is not a valid punct", punct)); + .unwrap_or_else(|| panic!("{punct:#?} is not a valid punct")); res.push(kind); if punct.spacing == tt::Spacing::Joint { res.was_joint(); diff --git a/crates/mbe/src/tt_iter.rs b/crates/mbe/src/tt_iter.rs index 7aceb676c7..bee7b5de6a 100644 --- a/crates/mbe/src/tt_iter.rs +++ b/crates/mbe/src/tt_iter.rs @@ -1,6 +1,7 @@ //! A "Parser" structure for token trees. We use this when parsing a declarative //! macro definition into a list of patterns and templates. +use smallvec::{smallvec, SmallVec}; use syntax::SyntaxKind; use tt::buffer::TokenBuffer; @@ -80,13 +81,56 @@ impl<'a> TtIter<'a> { } } - pub(crate) fn expect_punct(&mut self) -> Result<&'a tt::Punct, ()> { + pub(crate) fn expect_single_punct(&mut self) -> Result<&'a tt::Punct, ()> { match self.expect_leaf()? { tt::Leaf::Punct(it) => Ok(it), _ => Err(()), } } + /// Returns consecutive `Punct`s that can be glued together. + /// + /// This method currently may return a single quotation, which is part of lifetime ident and + /// conceptually not a punct in the context of mbe. Callers should handle this. + pub(crate) fn expect_glued_punct(&mut self) -> Result, ()> { + let tt::TokenTree::Leaf(tt::Leaf::Punct(first)) = self.next().ok_or(())?.clone() else { + return Err(()); + }; + + if first.spacing == tt::Spacing::Alone { + return Ok(smallvec![first]); + } + + let (second, third) = match (self.peek_n(0), self.peek_n(1)) { + ( + Some(tt::TokenTree::Leaf(tt::Leaf::Punct(p2))), + Some(tt::TokenTree::Leaf(tt::Leaf::Punct(p3))), + ) if p2.spacing == tt::Spacing::Joint => (p2, Some(p3)), + (Some(tt::TokenTree::Leaf(tt::Leaf::Punct(p2))), _) => (p2, None), + _ => return Ok(smallvec![first]), + }; + + match (first.char, second.char, third.map(|it| it.char)) { + ('.', '.', Some('.' | '=')) | ('<', '<', Some('=')) | ('>', '>', Some('=')) => { + let _ = self.next().unwrap(); + let _ = self.next().unwrap(); + Ok(smallvec![first, second.clone(), third.unwrap().clone()]) + } + ('-' | '!' | '*' | '/' | '&' | '%' | '^' | '+' | '<' | '=' | '>' | '|', '=', _) + | ('-' | '=' | '>', '>', _) + | ('<', '-', _) + | (':', ':', _) + | ('.', '.', _) + | ('&', '&', _) + | ('<', '<', _) + | ('|', '|', _) => { + let _ = self.next().unwrap(); + Ok(smallvec![first, second.clone()]) + } + _ => Ok(smallvec![first]), + } + } + pub(crate) fn expect_fragment( &mut self, entry_point: parser::PrefixEntryPoint, @@ -141,7 +185,7 @@ impl<'a> TtIter<'a> { ExpandResult { value: res, err } } - pub(crate) fn peek_n(&self, n: usize) -> Option<&tt::TokenTree> { + pub(crate) fn peek_n(&self, n: usize) -> Option<&'a tt::TokenTree> { self.inner.as_slice().get(n) } } diff --git a/crates/parser/src/grammar.rs b/crates/parser/src/grammar.rs index b746832961..485b612f08 100644 --- a/crates/parser/src/grammar.rs +++ b/crates/parser/src/grammar.rs @@ -51,7 +51,7 @@ pub(crate) mod entry { use super::*; pub(crate) fn vis(p: &mut Parser<'_>) { - let _ = opt_visibility(p, false); + opt_visibility(p, false); } pub(crate) fn block(p: &mut Parser<'_>) { @@ -70,10 +70,10 @@ pub(crate) mod entry { types::type_(p); } pub(crate) fn expr(p: &mut Parser<'_>) { - let _ = expressions::expr(p); + expressions::expr(p); } pub(crate) fn path(p: &mut Parser<'_>) { - let _ = paths::type_path(p); + paths::type_path(p); } pub(crate) fn item(p: &mut Parser<'_>) { items::item_or_macro(p, true); diff --git a/crates/parser/src/grammar/expressions.rs b/crates/parser/src/grammar/expressions.rs index dcaceade65..8932330b82 100644 --- a/crates/parser/src/grammar/expressions.rs +++ b/crates/parser/src/grammar/expressions.rs @@ -288,7 +288,7 @@ fn expr_bp( } const LHS_FIRST: TokenSet = - atom::ATOM_EXPR_FIRST.union(TokenSet::new(&[T![&], T![*], T![!], T![.], T![-]])); + atom::ATOM_EXPR_FIRST.union(TokenSet::new(&[T![&], T![*], T![!], T![.], T![-], T![_]])); fn lhs(p: &mut Parser<'_>, r: Restrictions) -> Option<(CompletedMarker, BlockLike)> { let m; diff --git a/crates/parser/src/grammar/expressions/atom.rs b/crates/parser/src/grammar/expressions/atom.rs index 99f42a2662..efa3997353 100644 --- a/crates/parser/src/grammar/expressions/atom.rs +++ b/crates/parser/src/grammar/expressions/atom.rs @@ -48,6 +48,7 @@ pub(super) const ATOM_EXPR_FIRST: TokenSet = T![unsafe], T![return], T![yield], + T![do], T![break], T![continue], T![async], @@ -93,6 +94,7 @@ pub(super) fn atom_expr( T![match] => match_expr(p), T![return] => return_expr(p), T![yield] => yield_expr(p), + T![do] if p.nth_at_contextual_kw(1, T![yeet]) => yeet_expr(p), T![continue] => continue_expr(p), T![break] => break_expr(p, r), @@ -278,6 +280,8 @@ fn closure_expr(p: &mut Parser<'_>) -> CompletedMarker { // fn main() { || -> i32 { 92 }(); } block_expr(p); } else if p.at_ts(EXPR_FIRST) { + // test closure_body_underscore_assignment + // fn main() { || _ = 0; } expr(p); } else { p.error("expected expression"); @@ -531,6 +535,7 @@ fn return_expr(p: &mut Parser<'_>) -> CompletedMarker { } m.complete(p, RETURN_EXPR) } + // test yield_expr // fn foo() { // yield; @@ -546,6 +551,23 @@ fn yield_expr(p: &mut Parser<'_>) -> CompletedMarker { m.complete(p, YIELD_EXPR) } +// test yeet_expr +// fn foo() { +// do yeet; +// do yeet 1 +// } +fn yeet_expr(p: &mut Parser<'_>) -> CompletedMarker { + assert!(p.at(T![do])); + assert!(p.nth_at_contextual_kw(1, T![yeet])); + let m = p.start(); + p.bump(T![do]); + p.bump_remap(T![yeet]); + if p.at_ts(EXPR_FIRST) { + expr(p); + } + m.complete(p, YEET_EXPR) +} + // test continue_expr // fn foo() { // loop { diff --git a/crates/parser/src/grammar/paths.rs b/crates/parser/src/grammar/paths.rs index 5dc9c6c82a..af3b6f63cf 100644 --- a/crates/parser/src/grammar/paths.rs +++ b/crates/parser/src/grammar/paths.rs @@ -83,11 +83,12 @@ fn path_segment(p: &mut Parser<'_>, mode: Mode, first: bool) { } p.expect(T![>]); } else { - let mut empty = true; - if first { + let empty = if first { p.eat(T![::]); - empty = false; - } + false + } else { + true + }; match p.current() { IDENT => { name_ref(p); diff --git a/crates/parser/src/grammar/patterns.rs b/crates/parser/src/grammar/patterns.rs index bc1224af9b..abcefffa23 100644 --- a/crates/parser/src/grammar/patterns.rs +++ b/crates/parser/src/grammar/patterns.rs @@ -62,39 +62,50 @@ fn pattern_r(p: &mut Parser<'_>, recovery_set: TokenSet) { } fn pattern_single_r(p: &mut Parser<'_>, recovery_set: TokenSet) { - if let Some(lhs) = atom_pat(p, recovery_set) { - // test range_pat - // fn main() { - // match 92 { - // 0 ... 100 => (), - // 101 ..= 200 => (), - // 200 .. 301 => (), - // 302 .. => (), - // } - // - // match Some(10 as u8) { - // Some(0) | None => (), - // Some(1..) => () - // } - // - // match () { - // S { a: 0 } => (), - // S { a: 1.. } => (), - // } - // - // match () { - // [0] => (), - // [1..] => (), - // } - // - // match (10 as u8, 5 as u8) { - // (0, _) => (), - // (1.., _) => () - // } - // } + // test range_pat + // fn main() { + // match 92 { + // 0 ... 100 => (), + // 101 ..= 200 => (), + // 200 .. 301 => (), + // 302 .. => (), + // ..= 303 => (), + // } + // + // match Some(10 as u8) { + // Some(0) | None => (), + // Some(1..) => (), + // Some(..=2) => (), + // } + // + // match () { + // S { a: 0 } => (), + // S { a: 1.. } => (), + // S { a: ..=2 } => (), + // } + // + // match () { + // [0] => (), + // [1..] => (), + // [..=2] => (), + // } + // + // match (10 as u8, 5 as u8) { + // (0, _) => (), + // (1.., _) => (), + // (..=2, _) => (), + // } + // } - // FIXME: support half_open_range_patterns (`..=2`), - // exclusive_range_pattern (`..5`) with missing lhs + if p.at(T![..=]) { + let m = p.start(); + p.bump(T![..=]); + atom_pat(p, recovery_set); + m.complete(p, RANGE_PAT); + return; + } + + if let Some(lhs) = atom_pat(p, recovery_set) { for range_op in [T![...], T![..=], T![..]] { if p.at(range_op) { let m = lhs.precede(p); @@ -115,11 +126,21 @@ fn pattern_single_r(p: &mut Parser<'_>, recovery_set: TokenSet) { // ^ // `[0..]` // ^ - if matches!(p.current(), T![=] | T![,] | T![:] | T![')'] | T!['}'] | T![']']) { + // `0 .. if` + // ^ + if matches!( + p.current(), + T![=] | T![,] | T![:] | T![')'] | T!['}'] | T![']'] | T![if] + ) { // test half_open_range_pat // fn f() { // let 0 .. = 1u32; // let 0..: _ = 1u32; + // + // match 42 { + // 0 .. if true => (), + // _ => (), + // } // } } else { atom_pat(p, recovery_set); diff --git a/crates/parser/src/lexed_str.rs b/crates/parser/src/lexed_str.rs index f4b9988eac..b48921f191 100644 --- a/crates/parser/src/lexed_str.rs +++ b/crates/parser/src/lexed_str.rs @@ -57,7 +57,7 @@ impl<'a> LexedStr<'a> { let mut conv = Converter::new(text); conv.extend_token(&token.kind, text); match &*conv.res.kind { - [kind] => Some((*kind, conv.res.error.pop().map(|it| it.msg.clone()))), + [kind] => Some((*kind, conv.res.error.pop().map(|it| it.msg))), _ => None, } } diff --git a/crates/parser/src/output.rs b/crates/parser/src/output.rs index e9ec9822d6..6ca841cfe0 100644 --- a/crates/parser/src/output.rs +++ b/crates/parser/src/output.rs @@ -54,7 +54,7 @@ impl Output { } pub(crate) fn token(&mut self, kind: SyntaxKind, n_tokens: u8) { - let e = ((kind as u16 as u32) << 16) | ((n_tokens as u32) << 8) | (0 << 4) | 1; + let e = ((kind as u16 as u32) << 16) | ((n_tokens as u32) << 8) | 1; self.event.push(e) } diff --git a/crates/parser/src/parser.rs b/crates/parser/src/parser.rs index 48d8350e07..48aecb35be 100644 --- a/crates/parser/src/parser.rs +++ b/crates/parser/src/parser.rs @@ -148,11 +148,16 @@ impl<'t> Parser<'t> { kinds.contains(self.current()) } - /// Checks if the current token is contextual keyword with text `t`. + /// Checks if the current token is contextual keyword `kw`. pub(crate) fn at_contextual_kw(&self, kw: SyntaxKind) -> bool { self.inp.contextual_kind(self.pos) == kw } + /// Checks if the nth token is contextual keyword `kw`. + pub(crate) fn nth_at_contextual_kw(&self, n: usize, kw: SyntaxKind) -> bool { + self.inp.contextual_kind(self.pos + n) == kw + } + /// Starts a new node in the syntax tree. All nodes and tokens /// consumed between the `start` and the corresponding `Marker::complete` /// belong to the same node. @@ -162,7 +167,7 @@ impl<'t> Parser<'t> { Marker::new(pos) } - /// Consume the next token if `kind` matches. + /// Consume the next token. Panics if the parser isn't currently at `kind`. pub(crate) fn bump(&mut self, kind: SyntaxKind) { assert!(self.eat(kind)); } @@ -205,7 +210,7 @@ impl<'t> Parser<'t> { if self.eat(kind) { return true; } - self.error(format!("expected {:?}", kind)); + self.error(format!("expected {kind:?}")); false } @@ -237,6 +242,7 @@ impl<'t> Parser<'t> { fn do_bump(&mut self, kind: SyntaxKind, n_raw_tokens: u8) { self.pos += n_raw_tokens as usize; + self.steps.set(0); self.push_event(Event::Token { kind, n_raw_tokens }); } diff --git a/crates/parser/src/shortcuts.rs b/crates/parser/src/shortcuts.rs index 4b805faddc..2be4050d13 100644 --- a/crates/parser/src/shortcuts.rs +++ b/crates/parser/src/shortcuts.rs @@ -80,8 +80,8 @@ impl<'a> LexedStr<'a> { State::PendingEnter | State::Normal => unreachable!(), } - let is_eof = builder.pos == builder.lexed.len(); - is_eof + // is_eof? + builder.pos == builder.lexed.len() } } diff --git a/crates/parser/src/syntax_kind/generated.rs b/crates/parser/src/syntax_kind/generated.rs index c84f45f1f8..52b3fc23d5 100644 --- a/crates/parser/src/syntax_kind/generated.rs +++ b/crates/parser/src/syntax_kind/generated.rs @@ -69,6 +69,7 @@ pub enum SyntaxKind { CONST_KW, CONTINUE_KW, CRATE_KW, + DO_KW, DYN_KW, ELSE_KW, ENUM_KW, @@ -109,6 +110,7 @@ pub enum SyntaxKind { UNION_KW, RAW_KW, MACRO_RULES_KW, + YEET_KW, INT_NUMBER, FLOAT_NUMBER, CHAR, @@ -188,6 +190,7 @@ pub enum SyntaxKind { STMT_LIST, RETURN_EXPR, YIELD_EXPR, + YEET_EXPR, LET_EXPR, UNDERSCORE_EXPR, MACRO_EXPR, @@ -272,6 +275,7 @@ impl SyntaxKind { | CONST_KW | CONTINUE_KW | CRATE_KW + | DO_KW | DYN_KW | ELSE_KW | ENUM_KW @@ -312,6 +316,7 @@ impl SyntaxKind { | UNION_KW | RAW_KW | MACRO_RULES_KW + | YEET_KW ) } pub fn is_punct(self) -> bool { @@ -384,6 +389,7 @@ impl SyntaxKind { "const" => CONST_KW, "continue" => CONTINUE_KW, "crate" => CRATE_KW, + "do" => DO_KW, "dyn" => DYN_KW, "else" => ELSE_KW, "enum" => ENUM_KW, @@ -430,6 +436,7 @@ impl SyntaxKind { "union" => UNION_KW, "raw" => RAW_KW, "macro_rules" => MACRO_RULES_KW, + "yeet" => YEET_KW, _ => return None, }; Some(kw) @@ -470,5 +477,5 @@ impl SyntaxKind { } } #[macro_export] -macro_rules ! T { [;] => { $ crate :: SyntaxKind :: SEMICOLON } ; [,] => { $ crate :: SyntaxKind :: COMMA } ; ['('] => { $ crate :: SyntaxKind :: L_PAREN } ; [')'] => { $ crate :: SyntaxKind :: R_PAREN } ; ['{'] => { $ crate :: SyntaxKind :: L_CURLY } ; ['}'] => { $ crate :: SyntaxKind :: R_CURLY } ; ['['] => { $ crate :: SyntaxKind :: L_BRACK } ; [']'] => { $ crate :: SyntaxKind :: R_BRACK } ; [<] => { $ crate :: SyntaxKind :: L_ANGLE } ; [>] => { $ crate :: SyntaxKind :: R_ANGLE } ; [@] => { $ crate :: SyntaxKind :: AT } ; [#] => { $ crate :: SyntaxKind :: POUND } ; [~] => { $ crate :: SyntaxKind :: TILDE } ; [?] => { $ crate :: SyntaxKind :: QUESTION } ; [$] => { $ crate :: SyntaxKind :: DOLLAR } ; [&] => { $ crate :: SyntaxKind :: AMP } ; [|] => { $ crate :: SyntaxKind :: PIPE } ; [+] => { $ crate :: SyntaxKind :: PLUS } ; [*] => { $ crate :: SyntaxKind :: STAR } ; [/] => { $ crate :: SyntaxKind :: SLASH } ; [^] => { $ crate :: SyntaxKind :: CARET } ; [%] => { $ crate :: SyntaxKind :: PERCENT } ; [_] => { $ crate :: SyntaxKind :: UNDERSCORE } ; [.] => { $ crate :: SyntaxKind :: DOT } ; [..] => { $ crate :: SyntaxKind :: DOT2 } ; [...] => { $ crate :: SyntaxKind :: DOT3 } ; [..=] => { $ crate :: SyntaxKind :: DOT2EQ } ; [:] => { $ crate :: SyntaxKind :: COLON } ; [::] => { $ crate :: SyntaxKind :: COLON2 } ; [=] => { $ crate :: SyntaxKind :: EQ } ; [==] => { $ crate :: SyntaxKind :: EQ2 } ; [=>] => { $ crate :: SyntaxKind :: FAT_ARROW } ; [!] => { $ crate :: SyntaxKind :: BANG } ; [!=] => { $ crate :: SyntaxKind :: NEQ } ; [-] => { $ crate :: SyntaxKind :: MINUS } ; [->] => { $ crate :: SyntaxKind :: THIN_ARROW } ; [<=] => { $ crate :: SyntaxKind :: LTEQ } ; [>=] => { $ crate :: SyntaxKind :: GTEQ } ; [+=] => { $ crate :: SyntaxKind :: PLUSEQ } ; [-=] => { $ crate :: SyntaxKind :: MINUSEQ } ; [|=] => { $ crate :: SyntaxKind :: PIPEEQ } ; [&=] => { $ crate :: SyntaxKind :: AMPEQ } ; [^=] => { $ crate :: SyntaxKind :: CARETEQ } ; [/=] => { $ crate :: SyntaxKind :: SLASHEQ } ; [*=] => { $ crate :: SyntaxKind :: STAREQ } ; [%=] => { $ crate :: SyntaxKind :: PERCENTEQ } ; [&&] => { $ crate :: SyntaxKind :: AMP2 } ; [||] => { $ crate :: SyntaxKind :: PIPE2 } ; [<<] => { $ crate :: SyntaxKind :: SHL } ; [>>] => { $ crate :: SyntaxKind :: SHR } ; [<<=] => { $ crate :: SyntaxKind :: SHLEQ } ; [>>=] => { $ crate :: SyntaxKind :: SHREQ } ; [as] => { $ crate :: SyntaxKind :: AS_KW } ; [async] => { $ crate :: SyntaxKind :: ASYNC_KW } ; [await] => { $ crate :: SyntaxKind :: AWAIT_KW } ; [box] => { $ crate :: SyntaxKind :: BOX_KW } ; [break] => { $ crate :: SyntaxKind :: BREAK_KW } ; [const] => { $ crate :: SyntaxKind :: CONST_KW } ; [continue] => { $ crate :: SyntaxKind :: CONTINUE_KW } ; [crate] => { $ crate :: SyntaxKind :: CRATE_KW } ; [dyn] => { $ crate :: SyntaxKind :: DYN_KW } ; [else] => { $ crate :: SyntaxKind :: ELSE_KW } ; [enum] => { $ crate :: SyntaxKind :: ENUM_KW } ; [extern] => { $ crate :: SyntaxKind :: EXTERN_KW } ; [false] => { $ crate :: SyntaxKind :: FALSE_KW } ; [fn] => { $ crate :: SyntaxKind :: FN_KW } ; [for] => { $ crate :: SyntaxKind :: FOR_KW } ; [if] => { $ crate :: SyntaxKind :: IF_KW } ; [impl] => { $ crate :: SyntaxKind :: IMPL_KW } ; [in] => { $ crate :: SyntaxKind :: IN_KW } ; [let] => { $ crate :: SyntaxKind :: LET_KW } ; [loop] => { $ crate :: SyntaxKind :: LOOP_KW } ; [macro] => { $ crate :: SyntaxKind :: MACRO_KW } ; [match] => { $ crate :: SyntaxKind :: MATCH_KW } ; [mod] => { $ crate :: SyntaxKind :: MOD_KW } ; [move] => { $ crate :: SyntaxKind :: MOVE_KW } ; [mut] => { $ crate :: SyntaxKind :: MUT_KW } ; [pub] => { $ crate :: SyntaxKind :: PUB_KW } ; [ref] => { $ crate :: SyntaxKind :: REF_KW } ; [return] => { $ crate :: SyntaxKind :: RETURN_KW } ; [self] => { $ crate :: SyntaxKind :: SELF_KW } ; [Self] => { $ crate :: SyntaxKind :: SELF_TYPE_KW } ; [static] => { $ crate :: SyntaxKind :: STATIC_KW } ; [struct] => { $ crate :: SyntaxKind :: STRUCT_KW } ; [super] => { $ crate :: SyntaxKind :: SUPER_KW } ; [trait] => { $ crate :: SyntaxKind :: TRAIT_KW } ; [true] => { $ crate :: SyntaxKind :: TRUE_KW } ; [try] => { $ crate :: SyntaxKind :: TRY_KW } ; [type] => { $ crate :: SyntaxKind :: TYPE_KW } ; [unsafe] => { $ crate :: SyntaxKind :: UNSAFE_KW } ; [use] => { $ crate :: SyntaxKind :: USE_KW } ; [where] => { $ crate :: SyntaxKind :: WHERE_KW } ; [while] => { $ crate :: SyntaxKind :: WHILE_KW } ; [yield] => { $ crate :: SyntaxKind :: YIELD_KW } ; [auto] => { $ crate :: SyntaxKind :: AUTO_KW } ; [default] => { $ crate :: SyntaxKind :: DEFAULT_KW } ; [existential] => { $ crate :: SyntaxKind :: EXISTENTIAL_KW } ; [union] => { $ crate :: SyntaxKind :: UNION_KW } ; [raw] => { $ crate :: SyntaxKind :: RAW_KW } ; [macro_rules] => { $ crate :: SyntaxKind :: MACRO_RULES_KW } ; [lifetime_ident] => { $ crate :: SyntaxKind :: LIFETIME_IDENT } ; [ident] => { $ crate :: SyntaxKind :: IDENT } ; [shebang] => { $ crate :: SyntaxKind :: SHEBANG } ; } +macro_rules ! T { [;] => { $ crate :: SyntaxKind :: SEMICOLON } ; [,] => { $ crate :: SyntaxKind :: COMMA } ; ['('] => { $ crate :: SyntaxKind :: L_PAREN } ; [')'] => { $ crate :: SyntaxKind :: R_PAREN } ; ['{'] => { $ crate :: SyntaxKind :: L_CURLY } ; ['}'] => { $ crate :: SyntaxKind :: R_CURLY } ; ['['] => { $ crate :: SyntaxKind :: L_BRACK } ; [']'] => { $ crate :: SyntaxKind :: R_BRACK } ; [<] => { $ crate :: SyntaxKind :: L_ANGLE } ; [>] => { $ crate :: SyntaxKind :: R_ANGLE } ; [@] => { $ crate :: SyntaxKind :: AT } ; [#] => { $ crate :: SyntaxKind :: POUND } ; [~] => { $ crate :: SyntaxKind :: TILDE } ; [?] => { $ crate :: SyntaxKind :: QUESTION } ; [$] => { $ crate :: SyntaxKind :: DOLLAR } ; [&] => { $ crate :: SyntaxKind :: AMP } ; [|] => { $ crate :: SyntaxKind :: PIPE } ; [+] => { $ crate :: SyntaxKind :: PLUS } ; [*] => { $ crate :: SyntaxKind :: STAR } ; [/] => { $ crate :: SyntaxKind :: SLASH } ; [^] => { $ crate :: SyntaxKind :: CARET } ; [%] => { $ crate :: SyntaxKind :: PERCENT } ; [_] => { $ crate :: SyntaxKind :: UNDERSCORE } ; [.] => { $ crate :: SyntaxKind :: DOT } ; [..] => { $ crate :: SyntaxKind :: DOT2 } ; [...] => { $ crate :: SyntaxKind :: DOT3 } ; [..=] => { $ crate :: SyntaxKind :: DOT2EQ } ; [:] => { $ crate :: SyntaxKind :: COLON } ; [::] => { $ crate :: SyntaxKind :: COLON2 } ; [=] => { $ crate :: SyntaxKind :: EQ } ; [==] => { $ crate :: SyntaxKind :: EQ2 } ; [=>] => { $ crate :: SyntaxKind :: FAT_ARROW } ; [!] => { $ crate :: SyntaxKind :: BANG } ; [!=] => { $ crate :: SyntaxKind :: NEQ } ; [-] => { $ crate :: SyntaxKind :: MINUS } ; [->] => { $ crate :: SyntaxKind :: THIN_ARROW } ; [<=] => { $ crate :: SyntaxKind :: LTEQ } ; [>=] => { $ crate :: SyntaxKind :: GTEQ } ; [+=] => { $ crate :: SyntaxKind :: PLUSEQ } ; [-=] => { $ crate :: SyntaxKind :: MINUSEQ } ; [|=] => { $ crate :: SyntaxKind :: PIPEEQ } ; [&=] => { $ crate :: SyntaxKind :: AMPEQ } ; [^=] => { $ crate :: SyntaxKind :: CARETEQ } ; [/=] => { $ crate :: SyntaxKind :: SLASHEQ } ; [*=] => { $ crate :: SyntaxKind :: STAREQ } ; [%=] => { $ crate :: SyntaxKind :: PERCENTEQ } ; [&&] => { $ crate :: SyntaxKind :: AMP2 } ; [||] => { $ crate :: SyntaxKind :: PIPE2 } ; [<<] => { $ crate :: SyntaxKind :: SHL } ; [>>] => { $ crate :: SyntaxKind :: SHR } ; [<<=] => { $ crate :: SyntaxKind :: SHLEQ } ; [>>=] => { $ crate :: SyntaxKind :: SHREQ } ; [as] => { $ crate :: SyntaxKind :: AS_KW } ; [async] => { $ crate :: SyntaxKind :: ASYNC_KW } ; [await] => { $ crate :: SyntaxKind :: AWAIT_KW } ; [box] => { $ crate :: SyntaxKind :: BOX_KW } ; [break] => { $ crate :: SyntaxKind :: BREAK_KW } ; [const] => { $ crate :: SyntaxKind :: CONST_KW } ; [continue] => { $ crate :: SyntaxKind :: CONTINUE_KW } ; [crate] => { $ crate :: SyntaxKind :: CRATE_KW } ; [do] => { $ crate :: SyntaxKind :: DO_KW } ; [dyn] => { $ crate :: SyntaxKind :: DYN_KW } ; [else] => { $ crate :: SyntaxKind :: ELSE_KW } ; [enum] => { $ crate :: SyntaxKind :: ENUM_KW } ; [extern] => { $ crate :: SyntaxKind :: EXTERN_KW } ; [false] => { $ crate :: SyntaxKind :: FALSE_KW } ; [fn] => { $ crate :: SyntaxKind :: FN_KW } ; [for] => { $ crate :: SyntaxKind :: FOR_KW } ; [if] => { $ crate :: SyntaxKind :: IF_KW } ; [impl] => { $ crate :: SyntaxKind :: IMPL_KW } ; [in] => { $ crate :: SyntaxKind :: IN_KW } ; [let] => { $ crate :: SyntaxKind :: LET_KW } ; [loop] => { $ crate :: SyntaxKind :: LOOP_KW } ; [macro] => { $ crate :: SyntaxKind :: MACRO_KW } ; [match] => { $ crate :: SyntaxKind :: MATCH_KW } ; [mod] => { $ crate :: SyntaxKind :: MOD_KW } ; [move] => { $ crate :: SyntaxKind :: MOVE_KW } ; [mut] => { $ crate :: SyntaxKind :: MUT_KW } ; [pub] => { $ crate :: SyntaxKind :: PUB_KW } ; [ref] => { $ crate :: SyntaxKind :: REF_KW } ; [return] => { $ crate :: SyntaxKind :: RETURN_KW } ; [self] => { $ crate :: SyntaxKind :: SELF_KW } ; [Self] => { $ crate :: SyntaxKind :: SELF_TYPE_KW } ; [static] => { $ crate :: SyntaxKind :: STATIC_KW } ; [struct] => { $ crate :: SyntaxKind :: STRUCT_KW } ; [super] => { $ crate :: SyntaxKind :: SUPER_KW } ; [trait] => { $ crate :: SyntaxKind :: TRAIT_KW } ; [true] => { $ crate :: SyntaxKind :: TRUE_KW } ; [try] => { $ crate :: SyntaxKind :: TRY_KW } ; [type] => { $ crate :: SyntaxKind :: TYPE_KW } ; [unsafe] => { $ crate :: SyntaxKind :: UNSAFE_KW } ; [use] => { $ crate :: SyntaxKind :: USE_KW } ; [where] => { $ crate :: SyntaxKind :: WHERE_KW } ; [while] => { $ crate :: SyntaxKind :: WHILE_KW } ; [yield] => { $ crate :: SyntaxKind :: YIELD_KW } ; [auto] => { $ crate :: SyntaxKind :: AUTO_KW } ; [default] => { $ crate :: SyntaxKind :: DEFAULT_KW } ; [existential] => { $ crate :: SyntaxKind :: EXISTENTIAL_KW } ; [union] => { $ crate :: SyntaxKind :: UNION_KW } ; [raw] => { $ crate :: SyntaxKind :: RAW_KW } ; [macro_rules] => { $ crate :: SyntaxKind :: MACRO_RULES_KW } ; [yeet] => { $ crate :: SyntaxKind :: YEET_KW } ; [lifetime_ident] => { $ crate :: SyntaxKind :: LIFETIME_IDENT } ; [ident] => { $ crate :: SyntaxKind :: IDENT } ; [shebang] => { $ crate :: SyntaxKind :: SHEBANG } ; } pub use T; diff --git a/crates/parser/src/tests.rs b/crates/parser/src/tests.rs index 735c0b3e40..c1b4e9a7d8 100644 --- a/crates/parser/src/tests.rs +++ b/crates/parser/src/tests.rs @@ -37,8 +37,8 @@ fn lex(text: &str) -> String { let text = lexed.text(i); let error = lexed.error(i); - let error = error.map(|err| format!(" error: {}", err)).unwrap_or_default(); - writeln!(res, "{:?} {:?}{}", kind, text, error).unwrap(); + let error = error.map(|err| format!(" error: {err}")).unwrap_or_default(); + writeln!(res, "{kind:?} {text:?}{error}").unwrap(); } res } @@ -47,7 +47,7 @@ fn lex(text: &str) -> String { fn parse_ok() { for case in TestCase::list("parser/ok") { let (actual, errors) = parse(TopEntryPoint::SourceFile, &case.text); - assert!(!errors, "errors in an OK file {}:\n{}", case.rs.display(), actual); + assert!(!errors, "errors in an OK file {}:\n{actual}", case.rs.display()); expect_file![case.rast].assert_eq(&actual); } } @@ -56,7 +56,7 @@ fn parse_ok() { fn parse_inline_ok() { for case in TestCase::list("parser/inline/ok") { let (actual, errors) = parse(TopEntryPoint::SourceFile, &case.text); - assert!(!errors, "errors in an OK file {}:\n{}", case.rs.display(), actual); + assert!(!errors, "errors in an OK file {}:\n{actual}", case.rs.display()); expect_file![case.rast].assert_eq(&actual); } } @@ -65,7 +65,7 @@ fn parse_inline_ok() { fn parse_err() { for case in TestCase::list("parser/err") { let (actual, errors) = parse(TopEntryPoint::SourceFile, &case.text); - assert!(errors, "no errors in an ERR file {}:\n{}", case.rs.display(), actual); + assert!(errors, "no errors in an ERR file {}:\n{actual}", case.rs.display()); expect_file![case.rast].assert_eq(&actual) } } @@ -74,7 +74,7 @@ fn parse_err() { fn parse_inline_err() { for case in TestCase::list("parser/inline/err") { let (actual, errors) = parse(TopEntryPoint::SourceFile, &case.text); - assert!(errors, "no errors in an ERR file {}:\n{}", case.rs.display(), actual); + assert!(errors, "no errors in an ERR file {}:\n{actual}", case.rs.display()); expect_file![case.rast].assert_eq(&actual) } } @@ -93,14 +93,12 @@ fn parse(entry: TopEntryPoint, text: &str) -> (String, bool) { crate::StrStep::Token { kind, text } => { assert!(depth > 0); len += text.len(); - write!(buf, "{}", indent).unwrap(); - write!(buf, "{:?} {:?}\n", kind, text).unwrap(); + writeln!(buf, "{indent}{kind:?} {text:?}").unwrap(); } crate::StrStep::Enter { kind } => { assert!(depth > 0 || len == 0); depth += 1; - write!(buf, "{}", indent).unwrap(); - write!(buf, "{:?}\n", kind).unwrap(); + writeln!(buf, "{indent}{kind:?}").unwrap(); indent.push_str(" "); } crate::StrStep::Exit => { @@ -111,7 +109,7 @@ fn parse(entry: TopEntryPoint, text: &str) -> (String, bool) { } crate::StrStep::Error { msg, pos } => { assert!(depth > 0); - errors.push(format!("error {}: {}\n", pos, msg)) + errors.push(format!("error {pos}: {msg}\n")) } }); assert_eq!( @@ -124,7 +122,7 @@ fn parse(entry: TopEntryPoint, text: &str) -> (String, bool) { for (token, msg) in lexed.errors() { let pos = lexed.text_start(token); - errors.push(format!("error {}: {}\n", pos, msg)); + errors.push(format!("error {pos}: {msg}\n")); } let has_errors = !errors.is_empty(); @@ -149,7 +147,7 @@ impl TestCase { let mut res = Vec::new(); let read_dir = fs::read_dir(&dir) - .unwrap_or_else(|err| panic!("can't `read_dir` {}: {}", dir.display(), err)); + .unwrap_or_else(|err| panic!("can't `read_dir` {}: {err}", dir.display())); for file in read_dir { let file = file.unwrap(); let path = file.path(); diff --git a/crates/parser/src/tests/sourcegen_inline_tests.rs b/crates/parser/src/tests/sourcegen_inline_tests.rs index 7b2b703deb..54e85c0734 100644 --- a/crates/parser/src/tests/sourcegen_inline_tests.rs +++ b/crates/parser/src/tests/sourcegen_inline_tests.rs @@ -23,7 +23,7 @@ fn sourcegen_parser_tests() { // ok is never actually read, but it needs to be specified to create a Test in existing_tests let existing = existing_tests(&tests_dir, true); for t in existing.keys().filter(|&t| !tests.contains_key(t)) { - panic!("Test is deleted: {}", t); + panic!("Test is deleted: {t}"); } let mut new_idx = existing.len() + 1; @@ -31,7 +31,7 @@ fn sourcegen_parser_tests() { let path = match existing.get(name) { Some((path, _test)) => path.clone(), None => { - let file_name = format!("{:04}_{}.rs", new_idx, name); + let file_name = format!("{new_idx:04}_{name}.rs"); new_idx += 1; tests_dir.join(file_name) } @@ -116,7 +116,7 @@ fn existing_tests(dir: &Path, ok: bool) -> HashMap { let text = fs::read_to_string(&path).unwrap(); let test = Test { name: name.clone(), text, ok }; if let Some(old) = res.insert(name, (path, test)) { - println!("Duplicate test: {:?}", old); + println!("Duplicate test: {old:?}"); } } res diff --git a/crates/parser/test_data/parser/inline/ok/0058_range_pat.rast b/crates/parser/test_data/parser/inline/ok/0058_range_pat.rast index cfef5d3f95..d9981c5071 100644 --- a/crates/parser/test_data/parser/inline/ok/0058_range_pat.rast +++ b/crates/parser/test_data/parser/inline/ok/0058_range_pat.rast @@ -93,6 +93,21 @@ SOURCE_FILE L_PAREN "(" R_PAREN ")" COMMA "," + WHITESPACE "\n " + MATCH_ARM + RANGE_PAT + DOT2EQ "..=" + WHITESPACE " " + LITERAL_PAT + LITERAL + INT_NUMBER "303" + WHITESPACE " " + FAT_ARROW "=>" + WHITESPACE " " + TUPLE_EXPR + L_PAREN "(" + R_PAREN ")" + COMMA "," WHITESPACE "\n " R_CURLY "}" WHITESPACE "\n\n " @@ -169,6 +184,28 @@ SOURCE_FILE TUPLE_EXPR L_PAREN "(" R_PAREN ")" + COMMA "," + WHITESPACE "\n " + MATCH_ARM + TUPLE_STRUCT_PAT + PATH + PATH_SEGMENT + NAME_REF + IDENT "Some" + L_PAREN "(" + RANGE_PAT + DOT2EQ "..=" + LITERAL_PAT + LITERAL + INT_NUMBER "2" + R_PAREN ")" + WHITESPACE " " + FAT_ARROW "=>" + WHITESPACE " " + TUPLE_EXPR + L_PAREN "(" + R_PAREN ")" + COMMA "," WHITESPACE "\n " R_CURLY "}" WHITESPACE "\n\n " @@ -240,6 +277,36 @@ SOURCE_FILE L_PAREN "(" R_PAREN ")" COMMA "," + WHITESPACE "\n " + MATCH_ARM + RECORD_PAT + PATH + PATH_SEGMENT + NAME_REF + IDENT "S" + WHITESPACE " " + RECORD_PAT_FIELD_LIST + L_CURLY "{" + WHITESPACE " " + RECORD_PAT_FIELD + NAME_REF + IDENT "a" + COLON ":" + WHITESPACE " " + RANGE_PAT + DOT2EQ "..=" + LITERAL_PAT + LITERAL + INT_NUMBER "2" + WHITESPACE " " + R_CURLY "}" + WHITESPACE " " + FAT_ARROW "=>" + WHITESPACE " " + TUPLE_EXPR + L_PAREN "(" + R_PAREN ")" + COMMA "," WHITESPACE "\n " R_CURLY "}" WHITESPACE "\n\n " @@ -285,6 +352,23 @@ SOURCE_FILE L_PAREN "(" R_PAREN ")" COMMA "," + WHITESPACE "\n " + MATCH_ARM + SLICE_PAT + L_BRACK "[" + RANGE_PAT + DOT2EQ "..=" + LITERAL_PAT + LITERAL + INT_NUMBER "2" + R_BRACK "]" + WHITESPACE " " + FAT_ARROW "=>" + WHITESPACE " " + TUPLE_EXPR + L_PAREN "(" + R_PAREN ")" + COMMA "," WHITESPACE "\n " R_CURLY "}" WHITESPACE "\n\n " @@ -360,6 +444,28 @@ SOURCE_FILE TUPLE_EXPR L_PAREN "(" R_PAREN ")" + COMMA "," + WHITESPACE "\n " + MATCH_ARM + TUPLE_PAT + L_PAREN "(" + RANGE_PAT + DOT2EQ "..=" + LITERAL_PAT + LITERAL + INT_NUMBER "2" + COMMA "," + WHITESPACE " " + WILDCARD_PAT + UNDERSCORE "_" + R_PAREN ")" + WHITESPACE " " + FAT_ARROW "=>" + WHITESPACE " " + TUPLE_EXPR + L_PAREN "(" + R_PAREN ")" + COMMA "," WHITESPACE "\n " R_CURLY "}" WHITESPACE "\n" diff --git a/crates/parser/test_data/parser/inline/ok/0058_range_pat.rs b/crates/parser/test_data/parser/inline/ok/0058_range_pat.rs index 2411d51096..b54354211d 100644 --- a/crates/parser/test_data/parser/inline/ok/0058_range_pat.rs +++ b/crates/parser/test_data/parser/inline/ok/0058_range_pat.rs @@ -4,25 +4,30 @@ fn main() { 101 ..= 200 => (), 200 .. 301 => (), 302 .. => (), + ..= 303 => (), } match Some(10 as u8) { Some(0) | None => (), - Some(1..) => () + Some(1..) => (), + Some(..=2) => (), } match () { S { a: 0 } => (), S { a: 1.. } => (), + S { a: ..=2 } => (), } match () { [0] => (), [1..] => (), + [..=2] => (), } match (10 as u8, 5 as u8) { (0, _) => (), - (1.., _) => () + (1.., _) => (), + (..=2, _) => (), } } diff --git a/crates/parser/test_data/parser/inline/ok/0166_half_open_range_pat.rast b/crates/parser/test_data/parser/inline/ok/0166_half_open_range_pat.rast index 4b401b60df..c85a685991 100644 --- a/crates/parser/test_data/parser/inline/ok/0166_half_open_range_pat.rast +++ b/crates/parser/test_data/parser/inline/ok/0166_half_open_range_pat.rast @@ -46,6 +46,49 @@ SOURCE_FILE LITERAL INT_NUMBER "1u32" SEMICOLON ";" + WHITESPACE "\n\n " + MATCH_EXPR + MATCH_KW "match" + WHITESPACE " " + LITERAL + INT_NUMBER "42" + WHITESPACE " " + MATCH_ARM_LIST + L_CURLY "{" + WHITESPACE "\n " + MATCH_ARM + RANGE_PAT + LITERAL_PAT + LITERAL + INT_NUMBER "0" + WHITESPACE " " + DOT2 ".." + WHITESPACE " " + MATCH_GUARD + IF_KW "if" + WHITESPACE " " + LITERAL + TRUE_KW "true" + WHITESPACE " " + FAT_ARROW "=>" + WHITESPACE " " + TUPLE_EXPR + L_PAREN "(" + R_PAREN ")" + COMMA "," + WHITESPACE "\n " + MATCH_ARM + WILDCARD_PAT + UNDERSCORE "_" + WHITESPACE " " + FAT_ARROW "=>" + WHITESPACE " " + TUPLE_EXPR + L_PAREN "(" + R_PAREN ")" + COMMA "," + WHITESPACE "\n " + R_CURLY "}" WHITESPACE "\n" R_CURLY "}" WHITESPACE "\n" diff --git a/crates/parser/test_data/parser/inline/ok/0166_half_open_range_pat.rs b/crates/parser/test_data/parser/inline/ok/0166_half_open_range_pat.rs index c9386a221a..f7e2d07922 100644 --- a/crates/parser/test_data/parser/inline/ok/0166_half_open_range_pat.rs +++ b/crates/parser/test_data/parser/inline/ok/0166_half_open_range_pat.rs @@ -1,4 +1,9 @@ fn f() { let 0 .. = 1u32; let 0..: _ = 1u32; + + match 42 { + 0 .. if true => (), + _ => (), + } } diff --git a/crates/parser/test_data/parser/inline/ok/0203_closure_body_underscore_assignment.rast b/crates/parser/test_data/parser/inline/ok/0203_closure_body_underscore_assignment.rast new file mode 100644 index 0000000000..8970922c97 --- /dev/null +++ b/crates/parser/test_data/parser/inline/ok/0203_closure_body_underscore_assignment.rast @@ -0,0 +1,32 @@ +SOURCE_FILE + FN + FN_KW "fn" + WHITESPACE " " + NAME + IDENT "main" + PARAM_LIST + L_PAREN "(" + R_PAREN ")" + WHITESPACE " " + BLOCK_EXPR + STMT_LIST + L_CURLY "{" + WHITESPACE " " + EXPR_STMT + CLOSURE_EXPR + PARAM_LIST + PIPE "|" + PIPE "|" + WHITESPACE " " + BIN_EXPR + UNDERSCORE_EXPR + UNDERSCORE "_" + WHITESPACE " " + EQ "=" + WHITESPACE " " + LITERAL + INT_NUMBER "0" + SEMICOLON ";" + WHITESPACE " " + R_CURLY "}" + WHITESPACE "\n" diff --git a/crates/parser/test_data/parser/inline/ok/0203_closure_body_underscore_assignment.rs b/crates/parser/test_data/parser/inline/ok/0203_closure_body_underscore_assignment.rs new file mode 100644 index 0000000000..9a34b63d29 --- /dev/null +++ b/crates/parser/test_data/parser/inline/ok/0203_closure_body_underscore_assignment.rs @@ -0,0 +1 @@ +fn main() { || _ = 0; } diff --git a/crates/parser/test_data/parser/inline/ok/0204_yeet_expr.rast b/crates/parser/test_data/parser/inline/ok/0204_yeet_expr.rast new file mode 100644 index 0000000000..24931bfcd7 --- /dev/null +++ b/crates/parser/test_data/parser/inline/ok/0204_yeet_expr.rast @@ -0,0 +1,31 @@ +SOURCE_FILE + FN + FN_KW "fn" + WHITESPACE " " + NAME + IDENT "foo" + PARAM_LIST + L_PAREN "(" + R_PAREN ")" + WHITESPACE " " + BLOCK_EXPR + STMT_LIST + L_CURLY "{" + WHITESPACE "\n " + EXPR_STMT + YEET_EXPR + DO_KW "do" + WHITESPACE " " + YEET_KW "yeet" + SEMICOLON ";" + WHITESPACE "\n " + YEET_EXPR + DO_KW "do" + WHITESPACE " " + YEET_KW "yeet" + WHITESPACE " " + LITERAL + INT_NUMBER "1" + WHITESPACE "\n" + R_CURLY "}" + WHITESPACE "\n" diff --git a/crates/parser/test_data/parser/inline/ok/0204_yeet_expr.rs b/crates/parser/test_data/parser/inline/ok/0204_yeet_expr.rs new file mode 100644 index 0000000000..624f86c9dc --- /dev/null +++ b/crates/parser/test_data/parser/inline/ok/0204_yeet_expr.rs @@ -0,0 +1,4 @@ +fn foo() { + do yeet; + do yeet 1 +} diff --git a/crates/proc-macro-api/src/lib.rs b/crates/proc-macro-api/src/lib.rs index a3ea05f4af..7921fda331 100644 --- a/crates/proc-macro-api/src/lib.rs +++ b/crates/proc-macro-api/src/lib.rs @@ -60,7 +60,7 @@ impl MacroDylib { let info = version::read_dylib_info(&path)?; if info.version.0 < 1 || info.version.1 < 47 { - let msg = format!("proc-macro {} built by {:#?} is not supported by rust-analyzer, please update your Rust version.", path.display(), info); + let msg = format!("proc-macro {} built by {info:#?} is not supported by rust-analyzer, please update your Rust version.", path.display()); return Err(io::Error::new(io::ErrorKind::InvalidData, msg)); } diff --git a/crates/proc-macro-api/src/msg/flat.rs b/crates/proc-macro-api/src/msg/flat.rs index 268a03bb53..b178c46263 100644 --- a/crates/proc-macro-api/src/msg/flat.rs +++ b/crates/proc-macro-api/src/msg/flat.rs @@ -137,7 +137,7 @@ impl SubtreeRepr { 1 => Some(tt::DelimiterKind::Parenthesis), 2 => Some(tt::DelimiterKind::Brace), 3 => Some(tt::DelimiterKind::Bracket), - other => panic!("bad kind {}", other), + other => panic!("bad kind {other}"), }; SubtreeRepr { id: TokenId(id), kind, tt: [lo, len] } } @@ -164,7 +164,7 @@ impl PunctRepr { let spacing = match spacing { 0 => tt::Spacing::Alone, 1 => tt::Spacing::Joint, - other => panic!("bad spacing {}", other), + other => panic!("bad spacing {other}"), }; PunctRepr { id: TokenId(id), char: char.try_into().unwrap(), spacing } } @@ -210,7 +210,7 @@ impl<'a> Writer<'a> { let idx_tag = match child { tt::TokenTree::Subtree(it) => { let idx = self.enqueue(it); - idx << 2 | 0b00 + idx << 2 } tt::TokenTree::Leaf(leaf) => match leaf { tt::Leaf::Literal(lit) => { @@ -312,7 +312,7 @@ impl Reader { }) .into() } - other => panic!("bad tag: {}", other), + other => panic!("bad tag: {other}"), } }) .collect(), diff --git a/crates/proc-macro-api/src/process.rs b/crates/proc-macro-api/src/process.rs index c4018d3b39..54dcb17f4e 100644 --- a/crates/proc-macro-api/src/process.rs +++ b/crates/proc-macro-api/src/process.rs @@ -67,7 +67,7 @@ impl Process { args: impl IntoIterator>, ) -> io::Result { let args: Vec = args.into_iter().map(|s| s.as_ref().into()).collect(); - let child = JodChild(mk_child(&path, &args)?); + let child = JodChild(mk_child(&path, args)?); Ok(Process { child }) } diff --git a/crates/proc-macro-api/src/version.rs b/crates/proc-macro-api/src/version.rs index 030531b80d..40125c2a51 100644 --- a/crates/proc-macro-api/src/version.rs +++ b/crates/proc-macro-api/src/version.rs @@ -125,7 +125,7 @@ pub fn read_version(dylib_path: &AbsPath) -> io::Result { _ => { return Err(io::Error::new( io::ErrorKind::InvalidData, - format!("unsupported metadata version {}", version), + format!("unsupported metadata version {version}"), )); } } diff --git a/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/client.rs b/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/client.rs index ed0e91da36..e78842f5c3 100644 --- a/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/client.rs +++ b/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/client.rs @@ -286,7 +286,7 @@ impl BridgeState<'_> { BRIDGE_STATE.with(|state| { state.replace(BridgeState::InUse, |mut state| { // FIXME(#52812) pass `f` directly to `replace` when `RefMutL` is gone - f(&mut *state) + f(&mut state) }) }) } diff --git a/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/mod.rs b/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/mod.rs index a405497f3c..c5145d00e3 100644 --- a/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/mod.rs +++ b/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/mod.rs @@ -877,7 +877,7 @@ impl Literal { /// example if it is infinity or NaN this function will panic. pub fn f32_unsuffixed(n: f32) -> Literal { if !n.is_finite() { - panic!("Invalid float literal {}", n); + panic!("Invalid float literal {n}"); } let mut repr = n.to_string(); if !repr.contains('.') { @@ -901,7 +901,7 @@ impl Literal { /// example if it is infinity or NaN this function will panic. pub fn f32_suffixed(n: f32) -> Literal { if !n.is_finite() { - panic!("Invalid float literal {}", n); + panic!("Invalid float literal {n}"); } Literal(bridge::client::Literal::f32(&n.to_string())) } @@ -920,7 +920,7 @@ impl Literal { /// example if it is infinity or NaN this function will panic. pub fn f64_unsuffixed(n: f64) -> Literal { if !n.is_finite() { - panic!("Invalid float literal {}", n); + panic!("Invalid float literal {n}"); } let mut repr = n.to_string(); if !repr.contains('.') { @@ -944,7 +944,7 @@ impl Literal { /// example if it is infinity or NaN this function will panic. pub fn f64_suffixed(n: f64) -> Literal { if !n.is_finite() { - panic!("Invalid float literal {}", n); + panic!("Invalid float literal {n}"); } Literal(bridge::client::Literal::f64(&n.to_string())) } diff --git a/crates/proc-macro-srv/src/abis/abi_1_58/ra_server.rs b/crates/proc-macro-srv/src/abis/abi_1_58/ra_server.rs index b1e982f477..22d4ad94f7 100644 --- a/crates/proc-macro-srv/src/abis/abi_1_58/ra_server.rs +++ b/crates/proc-macro-srv/src/abis/abi_1_58/ra_server.rs @@ -471,8 +471,12 @@ impl server::Punct for RustAnalyzer { } impl server::Ident for RustAnalyzer { - fn new(&mut self, string: &str, span: Self::Span, _is_raw: bool) -> Self::Ident { - IdentId(self.ident_interner.intern(&IdentData(tt::Ident { text: string.into(), id: span }))) + fn new(&mut self, string: &str, span: Self::Span, is_raw: bool) -> Self::Ident { + IdentId(self.ident_interner.intern(&IdentData(tt::Ident::new_with_is_raw( + string.into(), + span, + is_raw, + )))) } fn span(&mut self, ident: Self::Ident) -> Self::Span { @@ -544,13 +548,13 @@ impl server::Literal for RustAnalyzer { fn f32(&mut self, n: &str) -> Self::Literal { let n: f32 = n.parse().unwrap(); - let text = format!("{}f32", n); + let text = format!("{n}f32"); Literal { text: text.into(), id: tt::TokenId::unspecified() } } fn f64(&mut self, n: &str) -> Self::Literal { let n: f64 = n.parse().unwrap(); - let text = format!("{}f64", n); + let text = format!("{n}f64"); Literal { text: text.into(), id: tt::TokenId::unspecified() } } @@ -559,11 +563,11 @@ impl server::Literal for RustAnalyzer { for ch in string.chars() { escaped.extend(ch.escape_debug()); } - Literal { text: format!("\"{}\"", escaped).into(), id: tt::TokenId::unspecified() } + Literal { text: format!("\"{escaped}\"").into(), id: tt::TokenId::unspecified() } } fn character(&mut self, ch: char) -> Self::Literal { - Literal { text: format!("'{}'", ch).into(), id: tt::TokenId::unspecified() } + Literal { text: format!("'{ch}'").into(), id: tt::TokenId::unspecified() } } fn byte_string(&mut self, bytes: &[u8]) -> Self::Literal { @@ -574,7 +578,7 @@ impl server::Literal for RustAnalyzer { .map(Into::::into) .collect::(); - Literal { text: format!("b\"{}\"", string).into(), id: tt::TokenId::unspecified() } + Literal { text: format!("b\"{string}\"").into(), id: tt::TokenId::unspecified() } } fn span(&mut self, literal: &Self::Literal) -> Self::Span { diff --git a/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/client.rs b/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/client.rs index 102027d14a..b346c2c189 100644 --- a/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/client.rs +++ b/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/client.rs @@ -301,7 +301,7 @@ impl BridgeState<'_> { BRIDGE_STATE.with(|state| { state.replace(BridgeState::InUse, |mut state| { // FIXME(#52812) pass `f` directly to `replace` when `RefMutL` is gone - f(&mut *state) + f(&mut state) }) }) } diff --git a/crates/proc-macro-srv/src/abis/abi_1_63/ra_server.rs b/crates/proc-macro-srv/src/abis/abi_1_63/ra_server.rs index ed49cc7596..f82f20c37b 100644 --- a/crates/proc-macro-srv/src/abis/abi_1_63/ra_server.rs +++ b/crates/proc-macro-srv/src/abis/abi_1_63/ra_server.rs @@ -486,8 +486,12 @@ impl server::Punct for RustAnalyzer { } impl server::Ident for RustAnalyzer { - fn new(&mut self, string: &str, span: Self::Span, _is_raw: bool) -> Self::Ident { - IdentId(self.ident_interner.intern(&IdentData(tt::Ident { text: string.into(), id: span }))) + fn new(&mut self, string: &str, span: Self::Span, is_raw: bool) -> Self::Ident { + IdentId(self.ident_interner.intern(&IdentData(tt::Ident::new_with_is_raw( + string.into(), + span, + is_raw, + )))) } fn span(&mut self, ident: Self::Ident) -> Self::Span { @@ -559,13 +563,13 @@ impl server::Literal for RustAnalyzer { fn f32(&mut self, n: &str) -> Self::Literal { let n: f32 = n.parse().unwrap(); - let text = format!("{}f32", n); + let text = format!("{n}f32"); Literal { text: text.into(), id: tt::TokenId::unspecified() } } fn f64(&mut self, n: &str) -> Self::Literal { let n: f64 = n.parse().unwrap(); - let text = format!("{}f64", n); + let text = format!("{n}f64"); Literal { text: text.into(), id: tt::TokenId::unspecified() } } @@ -574,11 +578,11 @@ impl server::Literal for RustAnalyzer { for ch in string.chars() { escaped.extend(ch.escape_debug()); } - Literal { text: format!("\"{}\"", escaped).into(), id: tt::TokenId::unspecified() } + Literal { text: format!("\"{escaped}\"").into(), id: tt::TokenId::unspecified() } } fn character(&mut self, ch: char) -> Self::Literal { - Literal { text: format!("'{}'", ch).into(), id: tt::TokenId::unspecified() } + Literal { text: format!("'{ch}'").into(), id: tt::TokenId::unspecified() } } fn byte_string(&mut self, bytes: &[u8]) -> Self::Literal { @@ -589,7 +593,7 @@ impl server::Literal for RustAnalyzer { .map(Into::::into) .collect::(); - Literal { text: format!("b\"{}\"", string).into(), id: tt::TokenId::unspecified() } + Literal { text: format!("b\"{string}\"").into(), id: tt::TokenId::unspecified() } } fn span(&mut self, literal: &Self::Literal) -> Self::Span { diff --git a/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server.rs b/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server.rs index e4e43e97dd..068f79f824 100644 --- a/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server.rs +++ b/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server.rs @@ -107,8 +107,8 @@ impl server::TokenStream for RustAnalyzer { } bridge::TokenTree::Ident(ident) => { - // FIXME: handle raw idents let text = ident.sym.text(); + let text = if ident.is_raw { tt::SmolStr::from_iter(["r#", &text]) } else { text }; let ident: tt::Ident = tt::Ident { text, id: ident.span }; let leaf = tt::Leaf::from(ident); let tree = TokenTree::from(leaf); @@ -182,9 +182,8 @@ impl server::TokenStream for RustAnalyzer { .map(|tree| match tree { tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => { bridge::TokenTree::Ident(bridge::Ident { - sym: Symbol::intern(&ident.text), - // FIXME: handle raw idents - is_raw: false, + sym: Symbol::intern(ident.text.trim_start_matches("r#")), + is_raw: ident.text.starts_with("r#"), span: ident.id, }) } diff --git a/crates/proc-macro-srv/src/abis/mod.rs b/crates/proc-macro-srv/src/abis/mod.rs index 0ce099ae0b..5b8aca4d81 100644 --- a/crates/proc-macro-srv/src/abis/mod.rs +++ b/crates/proc-macro-srv/src/abis/mod.rs @@ -117,7 +117,7 @@ impl Abi { let inner = unsafe { Abi_1_63::from_lib(lib, symbol_name) }?; Ok(Abi::Abi1_63(inner)) } - _ => Err(LoadProcMacroDylibError::UnsupportedABI(info.version_string.clone())), + _ => Err(LoadProcMacroDylibError::UnsupportedABI(info.version_string)), } } diff --git a/crates/proc-macro-srv/src/lib.rs b/crates/proc-macro-srv/src/lib.rs index b4f5ebd157..2eb939a7ce 100644 --- a/crates/proc-macro-srv/src/lib.rs +++ b/crates/proc-macro-srv/src/lib.rs @@ -48,7 +48,7 @@ impl ProcMacroSrv { pub fn expand(&mut self, task: ExpandMacro) -> Result { let expander = self.expander(task.lib.as_ref()).map_err(|err| { debug_assert!(false, "should list macros before asking to expand"); - PanicMessage(format!("failed to load macro: {}", err)) + PanicMessage(format!("failed to load macro: {err}")) })?; let prev_env = EnvSnapshot::new(); @@ -59,7 +59,7 @@ impl ProcMacroSrv { Some(dir) => { let prev_working_dir = std::env::current_dir().ok(); if let Err(err) = std::env::set_current_dir(&dir) { - eprintln!("Failed to set the current working dir to {}. Error: {:?}", dir, err) + eprintln!("Failed to set the current working dir to {dir}. Error: {err:?}") } prev_working_dir } @@ -112,14 +112,16 @@ impl ProcMacroSrv { } fn expander(&mut self, path: &Path) -> Result<&dylib::Expander, String> { - let time = fs::metadata(path).and_then(|it| it.modified()).map_err(|err| { - format!("Failed to get file metadata for {}: {}", path.display(), err) - })?; + let time = fs::metadata(path) + .and_then(|it| it.modified()) + .map_err(|err| format!("Failed to get file metadata for {}: {err}", path.display()))?; Ok(match self.expanders.entry((path.to_path_buf(), time)) { - Entry::Vacant(v) => v.insert(dylib::Expander::new(path).map_err(|err| { - format!("Cannot create expander for {}: {}", path.display(), err) - })?), + Entry::Vacant(v) => { + v.insert(dylib::Expander::new(path).map_err(|err| { + format!("Cannot create expander for {}: {err}", path.display()) + })?) + } Entry::Occupied(e) => e.into_mut(), }) } diff --git a/crates/proc-macro-srv/src/tests/mod.rs b/crates/proc-macro-srv/src/tests/mod.rs index cc0fc91fe9..1ccc170f42 100644 --- a/crates/proc-macro-srv/src/tests/mod.rs +++ b/crates/proc-macro-srv/src/tests/mod.rs @@ -63,7 +63,7 @@ fn test_fn_like_macro_clone_raw_ident() { "r#async", expect![[r#" SUBTREE $ - IDENT async 4294967295"#]], + IDENT r#async 4294967295"#]], ); } @@ -86,15 +86,13 @@ fn test_fn_like_mk_literals() { #[test] fn test_fn_like_mk_idents() { - // FIXME: this test is wrong: raw should be 'r#raw' but ABIs 1.64 and below - // simply ignore `is_raw` when implementing the `Ident` interface. assert_expand( "fn_like_mk_idents", r#""#, expect![[r#" SUBTREE $ IDENT standard 4294967295 - IDENT raw 4294967295"#]], + IDENT r#raw 4294967295"#]], ); } diff --git a/crates/proc-macro-srv/src/tests/utils.rs b/crates/proc-macro-srv/src/tests/utils.rs index 44b1b6588d..efbeb90ca9 100644 --- a/crates/proc-macro-srv/src/tests/utils.rs +++ b/crates/proc-macro-srv/src/tests/utils.rs @@ -30,12 +30,12 @@ fn assert_expand_impl(macro_name: &str, input: &str, attr: Option<&str>, expect: let attr = attr.map(|attr| parse_string(attr).unwrap().into_subtree()); let res = expander.expand(macro_name, &fixture.into_subtree(), attr.as_ref()).unwrap(); - expect.assert_eq(&format!("{:?}", res)); + expect.assert_eq(&format!("{res:?}")); } pub(crate) fn list() -> Vec { let dylib_path = proc_macro_test_dylib_path(); let mut srv = ProcMacroSrv::default(); let res = srv.list_macros(&dylib_path).unwrap(); - res.into_iter().map(|(name, kind)| format!("{} [{:?}]", name, kind)).collect() + res.into_iter().map(|(name, kind)| format!("{name} [{kind:?}]")).collect() } diff --git a/crates/proc-macro-test/build.rs b/crates/proc-macro-test/build.rs index a80c962617..19a5caa4cc 100644 --- a/crates/proc-macro-test/build.rs +++ b/crates/proc-macro-test/build.rs @@ -63,7 +63,7 @@ fn main() { }; cmd.current_dir(&staging_dir) - .args(&["build", "-p", "proc-macro-test-impl", "--message-format", "json"]) + .args(["build", "-p", "proc-macro-test-impl", "--message-format", "json"]) // Explicit override the target directory to avoid using the same one which the parent // cargo is using, or we'll deadlock. // This can happen when `CARGO_TARGET_DIR` is set or global config forces all cargo @@ -71,7 +71,7 @@ fn main() { .arg("--target-dir") .arg(&target_dir); - println!("Running {:?}", cmd); + println!("Running {cmd:?}"); let output = cmd.output().unwrap(); if !output.status.success() { @@ -85,16 +85,13 @@ fn main() { let mut artifact_path = None; for message in Message::parse_stream(output.stdout.as_slice()) { - match message.unwrap() { - Message::CompilerArtifact(artifact) => { - if artifact.target.kind.contains(&"proc-macro".to_string()) { - let repr = format!("{} {}", name, version); - if artifact.package_id.repr.starts_with(&repr) { - artifact_path = Some(PathBuf::from(&artifact.filenames[0])); - } + if let Message::CompilerArtifact(artifact) = message.unwrap() { + if artifact.target.kind.contains(&"proc-macro".to_string()) { + let repr = format!("{name} {version}"); + if artifact.package_id.repr.starts_with(&repr) { + artifact_path = Some(PathBuf::from(&artifact.filenames[0])); } } - _ => (), // Unknown message } } diff --git a/crates/profile/src/hprof.rs b/crates/profile/src/hprof.rs index b562c193e7..ea89a89c5c 100644 --- a/crates/profile/src/hprof.rs +++ b/crates/profile/src/hprof.rs @@ -133,7 +133,7 @@ static FILTER: Lazy> = Lazy::new(Default::default); fn with_profile_stack(f: impl FnOnce(&mut ProfileStack) -> T) -> T { thread_local!(static STACK: RefCell = RefCell::new(ProfileStack::new())); - STACK.with(|it| f(&mut *it.borrow_mut())) + STACK.with(|it| f(&mut it.borrow_mut())) } #[derive(Default, Clone, Debug)] @@ -238,7 +238,7 @@ impl ProfileStack { self.heartbeat(frame.heartbeats); let avg_span = duration / (frame.heartbeats + 1); if avg_span > self.filter.heartbeat_longer_than { - eprintln!("Too few heartbeats {} ({}/{:?})?", label, frame.heartbeats, duration); + eprintln!("Too few heartbeats {label} ({}/{duration:?})?", frame.heartbeats); } } @@ -275,7 +275,7 @@ fn print( out: &mut impl Write, ) { let current_indent = " ".repeat(level as usize); - let detail = tree[curr].detail.as_ref().map(|it| format!(" @ {}", it)).unwrap_or_default(); + let detail = tree[curr].detail.as_ref().map(|it| format!(" @ {it}")).unwrap_or_default(); writeln!( out, "{}{} - {}{}", @@ -302,13 +302,13 @@ fn print( } for (child_msg, (duration, count)) in &short_children { - writeln!(out, " {}{} - {} ({} calls)", current_indent, ms(*duration), child_msg, count) + writeln!(out, " {current_indent}{} - {child_msg} ({count} calls)", ms(*duration)) .expect("printing profiling info"); } let unaccounted = tree[curr].duration - accounted_for; if tree.children(curr).next().is_some() && unaccounted > longer_than { - writeln!(out, " {}{} - ???", current_indent, ms(unaccounted)) + writeln!(out, " {current_indent}{} - ???", ms(unaccounted)) .expect("printing profiling info"); } } @@ -320,7 +320,7 @@ impl fmt::Display for ms { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self.0.as_millis() { 0 => f.write_str(" 0 "), - n => write!(f, "{:5}ms", n), + n => write!(f, "{n:5}ms"), } } } diff --git a/crates/profile/src/lib.rs b/crates/profile/src/lib.rs index 00f7952e80..7ca3c7d629 100644 --- a/crates/profile/src/lib.rs +++ b/crates/profile/src/lib.rs @@ -114,11 +114,11 @@ impl Drop for CpuSpan { match out { Ok(out) if out.status.success() => { let svg = profile_data.with_extension("svg"); - std::fs::write(&svg, &out.stdout).unwrap(); + std::fs::write(&svg, out.stdout).unwrap(); eprintln!("Profile rendered to:\n\n {}\n", svg.display()); } _ => { - eprintln!("Failed to run:\n\n {:?}\n", cmd); + eprintln!("Failed to run:\n\n {cmd:?}\n"); } } } diff --git a/crates/profile/src/memory_usage.rs b/crates/profile/src/memory_usage.rs index ee882b4cb4..8017f86579 100644 --- a/crates/profile/src/memory_usage.rs +++ b/crates/profile/src/memory_usage.rs @@ -109,7 +109,7 @@ impl fmt::Display for Bytes { suffix = "mb"; } } - f.pad(&format!("{}{}", value, suffix)) + f.pad(&format!("{value}{suffix}")) } } diff --git a/crates/profile/src/stop_watch.rs b/crates/profile/src/stop_watch.rs index 6258328482..71303d5a63 100644 --- a/crates/profile/src/stop_watch.rs +++ b/crates/profile/src/stop_watch.rs @@ -33,11 +33,11 @@ impl StopWatch { if *PERF_ENABLED { let mut counter = perf_event::Builder::new() .build() - .map_err(|err| eprintln!("Failed to create perf counter: {}", err)) + .map_err(|err| eprintln!("Failed to create perf counter: {err}")) .ok(); if let Some(counter) = &mut counter { if let Err(err) = counter.enable() { - eprintln!("Failed to start perf counter: {}", err) + eprintln!("Failed to start perf counter: {err}") } } counter @@ -64,7 +64,7 @@ impl StopWatch { #[cfg(target_os = "linux")] let instructions = self.counter.as_mut().and_then(|it| { - it.read().map_err(|err| eprintln!("Failed to read perf counter: {}", err)).ok() + it.read().map_err(|err| eprintln!("Failed to read perf counter: {err}")).ok() }); #[cfg(not(target_os = "linux"))] let instructions = None; @@ -91,10 +91,10 @@ impl fmt::Display for StopWatchSpan { instructions /= 1000; prefix = "g"; } - write!(f, ", {}{}instr", instructions, prefix)?; + write!(f, ", {instructions}{prefix}instr")?; } if let Some(memory) = self.memory { - write!(f, ", {}", memory)?; + write!(f, ", {memory}")?; } Ok(()) } diff --git a/crates/project-model/src/build_scripts.rs b/crates/project-model/src/build_scripts.rs index ae2b41f27d..6550cf27e9 100644 --- a/crates/project-model/src/build_scripts.rs +++ b/crates/project-model/src/build_scripts.rs @@ -66,7 +66,7 @@ impl WorkspaceBuildScripts { _ => { let mut cmd = Command::new(toolchain::cargo()); - cmd.args(&["check", "--quiet", "--workspace", "--message-format=json"]); + cmd.args(["check", "--quiet", "--workspace", "--message-format=json"]); // --all-targets includes tests, benches and examples in addition to the // default lib and bins. This is an independent concept from the --target @@ -74,7 +74,7 @@ impl WorkspaceBuildScripts { cmd.arg("--all-targets"); if let Some(target) = &config.target { - cmd.args(&["--target", target]); + cmd.args(["--target", target]); } match &config.features { @@ -122,7 +122,7 @@ impl WorkspaceBuildScripts { InvocationLocation::Root(root) if config.run_build_script_command.is_some() => { root.as_path() } - _ => &workspace.workspace_root(), + _ => workspace.workspace_root(), } .as_ref(); @@ -133,7 +133,7 @@ impl WorkspaceBuildScripts { // building build scripts failed, attempt to build with --keep-going so // that we potentially get more build data let mut cmd = Self::build_command(config)?; - cmd.args(&["-Z", "unstable-options", "--keep-going"]).env("RUSTC_BOOTSTRAP", "1"); + cmd.args(["-Z", "unstable-options", "--keep-going"]).env("RUSTC_BOOTSTRAP", "1"); let mut res = Self::run_per_ws(cmd, workspace, current_dir, progress)?; res.error = Some(error); Ok(res) @@ -295,7 +295,7 @@ impl WorkspaceBuildScripts { match message { Message::BuildScriptExecuted(mut message) => { with_output_for(&message.package_id.repr, &mut |name, data| { - progress(format!("running build-script: {}", name)); + progress(format!("running build-script: {name}")); let cfgs = { let mut acc = Vec::new(); for cfg in &message.cfgs { @@ -303,8 +303,7 @@ impl WorkspaceBuildScripts { Ok(it) => acc.push(it), Err(err) => { push_err(&format!( - "invalid cfg from cargo-metadata: {}", - err + "invalid cfg from cargo-metadata: {err}" )); return; } @@ -334,7 +333,7 @@ impl WorkspaceBuildScripts { } Message::CompilerArtifact(message) => { with_output_for(&message.package_id.repr, &mut |name, data| { - progress(format!("building proc-macros: {}", name)); + progress(format!("building proc-macros: {name}")); if message.target.kind.iter().any(|k| k == "proc-macro") { // Skip rmeta file if let Some(filename) = diff --git a/crates/project-model/src/cargo_workspace.rs b/crates/project-model/src/cargo_workspace.rs index 02ec7a4f6f..467cf09178 100644 --- a/crates/project-model/src/cargo_workspace.rs +++ b/crates/project-model/src/cargo_workspace.rs @@ -411,7 +411,7 @@ impl CargoWorkspace { CargoWorkspace { packages, targets, workspace_root } } - pub fn packages<'a>(&'a self) -> impl Iterator + ExactSizeIterator + 'a { + pub fn packages(&self) -> impl Iterator + ExactSizeIterator + '_ { self.packages.iter().map(|(id, _pkg)| id) } @@ -427,7 +427,7 @@ impl CargoWorkspace { } pub fn package_flag(&self, package: &PackageData) -> String { - if self.is_unique(&*package.name) { + if self.is_unique(&package.name) { package.name.clone() } else { format!("{}:{}", package.name, package.version) @@ -517,7 +517,7 @@ fn cargo_config_build_target( cargo_config.envs(extra_env); cargo_config .current_dir(cargo_toml.parent()) - .args(&["-Z", "unstable-options", "config", "get", "build.target"]) + .args(["-Z", "unstable-options", "config", "get", "build.target"]) .env("RUSTC_BOOTSTRAP", "1"); // if successful we receive `build.target = "target-triple"` // or `build.target = ["", ..]` diff --git a/crates/project-model/src/cfg_flag.rs b/crates/project-model/src/cfg_flag.rs index f3dd8f5133..c134b78ab3 100644 --- a/crates/project-model/src/cfg_flag.rs +++ b/crates/project-model/src/cfg_flag.rs @@ -17,7 +17,7 @@ impl FromStr for CfgFlag { let res = match s.split_once('=') { Some((key, value)) => { if !(value.starts_with('"') && value.ends_with('"')) { - return Err(format!("Invalid cfg ({:?}), value should be in quotes", s)); + return Err(format!("Invalid cfg ({s:?}), value should be in quotes")); } let key = key.to_string(); let value = value[1..value.len() - 1].to_string(); diff --git a/crates/project-model/src/lib.rs b/crates/project-model/src/lib.rs index 575581fa54..e2f09bad2d 100644 --- a/crates/project-model/src/lib.rs +++ b/crates/project-model/src/lib.rs @@ -25,6 +25,7 @@ mod sysroot; mod workspace; mod rustc_cfg; mod build_scripts; +mod target_data_layout; #[cfg(test)] mod tests; @@ -145,7 +146,7 @@ impl ProjectManifest { } fn utf8_stdout(mut cmd: Command) -> Result { - let output = cmd.output().with_context(|| format!("{:?} failed", cmd))?; + let output = cmd.output().with_context(|| format!("{cmd:?} failed"))?; if !output.status.success() { match String::from_utf8(output.stderr) { Ok(stderr) if !stderr.is_empty() => { diff --git a/crates/project-model/src/manifest_path.rs b/crates/project-model/src/manifest_path.rs index 4910fd3d11..980d92d3df 100644 --- a/crates/project-model/src/manifest_path.rs +++ b/crates/project-model/src/manifest_path.rs @@ -40,7 +40,7 @@ impl ops::Deref for ManifestPath { type Target = AbsPath; fn deref(&self) -> &Self::Target { - &*self.file + &self.file } } diff --git a/crates/project-model/src/project_json.rs b/crates/project-model/src/project_json.rs index 5133a14d53..9af0eafe9f 100644 --- a/crates/project-model/src/project_json.rs +++ b/crates/project-model/src/project_json.rs @@ -197,5 +197,5 @@ where D: de::Deserializer<'de>, { let name = String::deserialize(de)?; - CrateName::new(&name).map_err(|err| de::Error::custom(format!("invalid crate name: {:?}", err))) + CrateName::new(&name).map_err(|err| de::Error::custom(format!("invalid crate name: {err:?}"))) } diff --git a/crates/project-model/src/rustc_cfg.rs b/crates/project-model/src/rustc_cfg.rs index 3231361836..0066f6717e 100644 --- a/crates/project-model/src/rustc_cfg.rs +++ b/crates/project-model/src/rustc_cfg.rs @@ -50,10 +50,10 @@ fn get_rust_cfgs( cargo_config.envs(extra_env); cargo_config .current_dir(cargo_toml.parent()) - .args(&["-Z", "unstable-options", "rustc", "--print", "cfg"]) + .args(["rustc", "-Z", "unstable-options", "--print", "cfg"]) .env("RUSTC_BOOTSTRAP", "1"); if let Some(target) = target { - cargo_config.args(&["--target", target]); + cargo_config.args(["--target", target]); } match utf8_stdout(cargo_config) { Ok(it) => return Ok(it), @@ -63,9 +63,9 @@ fn get_rust_cfgs( // using unstable cargo features failed, fall back to using plain rustc let mut cmd = Command::new(toolchain::rustc()); cmd.envs(extra_env); - cmd.args(&["--print", "cfg", "-O"]); + cmd.args(["--print", "cfg", "-O"]); if let Some(target) = target { - cmd.args(&["--target", target]); + cmd.args(["--target", target]); } utf8_stdout(cmd) } diff --git a/crates/project-model/src/sysroot.rs b/crates/project-model/src/sysroot.rs index f6c09a27c9..8d5ab0061e 100644 --- a/crates/project-model/src/sysroot.rs +++ b/crates/project-model/src/sysroot.rs @@ -104,7 +104,7 @@ impl Sysroot { for path in SYSROOT_CRATES.trim().lines() { let name = path.split('/').last().unwrap(); - let root = [format!("{}/src/lib.rs", path), format!("lib{}/lib.rs", path)] + let root = [format!("{path}/src/lib.rs"), format!("lib{path}/lib.rs")] .into_iter() .map(|it| sysroot.src_root.join(it)) .filter_map(|it| ManifestPath::try_from(it).ok()) @@ -171,7 +171,7 @@ fn discover_sysroot_dir( ) -> Result { let mut rustc = Command::new(toolchain::rustc()); rustc.envs(extra_env); - rustc.current_dir(current_dir).args(&["--print", "sysroot"]); + rustc.current_dir(current_dir).args(["--print", "sysroot"]); tracing::debug!("Discovering sysroot by {:?}", rustc); let stdout = utf8_stdout(rustc)?; Ok(AbsPathBuf::assert(PathBuf::from(stdout))) @@ -203,7 +203,7 @@ fn discover_sysroot_src_dir_or_add_component( .or_else(|| { let mut rustup = Command::new(toolchain::rustup()); rustup.envs(extra_env); - rustup.current_dir(current_dir).args(&["component", "add", "rust-src"]); + rustup.current_dir(current_dir).args(["component", "add", "rust-src"]); tracing::info!("adding rust-src component by {:?}", rustup); utf8_stdout(rustup).ok()?; get_rust_src(sysroot_path) diff --git a/crates/project-model/src/target_data_layout.rs b/crates/project-model/src/target_data_layout.rs new file mode 100644 index 0000000000..40cf47c3f5 --- /dev/null +++ b/crates/project-model/src/target_data_layout.rs @@ -0,0 +1,40 @@ +//! Runs `rustc --print target-spec-json` to get the target_data_layout. +use std::process::Command; + +use rustc_hash::FxHashMap; + +use crate::{utf8_stdout, ManifestPath}; + +pub(super) fn get( + cargo_toml: Option<&ManifestPath>, + target: Option<&str>, + extra_env: &FxHashMap, +) -> Option { + let output = (|| { + if let Some(cargo_toml) = cargo_toml { + let mut cmd = Command::new(toolchain::rustc()); + cmd.envs(extra_env); + cmd.current_dir(cargo_toml.parent()) + .args(["-Z", "unstable-options", "rustc", "--print", "target-spec-json"]) + .env("RUSTC_BOOTSTRAP", "1"); + if let Some(target) = target { + cmd.args(["--target", target]); + } + match utf8_stdout(cmd) { + Ok(it) => return Ok(it), + Err(e) => tracing::debug!("{e:?}: falling back to querying rustc for cfgs"), + } + } + // using unstable cargo features failed, fall back to using plain rustc + let mut cmd = Command::new(toolchain::rustc()); + cmd.envs(extra_env) + .args(["-Z", "unstable-options", "rustc", "--print", "target-spec-json"]) + .env("RUSTC_BOOTSTRAP", "1"); + if let Some(target) = target { + cmd.args(["--target", target]); + } + utf8_stdout(cmd) + })() + .ok()?; + Some(output.split_once(r#""data-layout": ""#)?.1.split_once('"')?.0.to_owned()) +} diff --git a/crates/project-model/src/tests.rs b/crates/project-model/src/tests.rs index a1cb438bdd..2bb9ebf998 100644 --- a/crates/project-model/src/tests.rs +++ b/crates/project-model/src/tests.rs @@ -29,6 +29,7 @@ fn load_cargo_with_overrides(file: &str, cfg_overrides: CfgOverrides) -> CrateGr rustc_cfg: Vec::new(), cfg_overrides, toolchain: None, + target_layout: None, }; to_crate_graph(project_workspace) } @@ -106,7 +107,7 @@ fn to_crate_graph(project_workspace: ProjectWorkspace) -> CrateGraph { } fn check_crate_graph(crate_graph: CrateGraph, expect: Expect) { - let mut crate_graph = format!("{:#?}", crate_graph); + let mut crate_graph = format!("{crate_graph:#?}"); replace_root(&mut crate_graph, false); expect.assert_eq(&crate_graph); } @@ -150,6 +151,7 @@ fn cargo_hello_world_project_model_with_wildcard_overrides() { "debug_assertions", ], ), + target_layout: None, env: Env { entries: { "CARGO_PKG_LICENSE": "", @@ -219,6 +221,7 @@ fn cargo_hello_world_project_model_with_wildcard_overrides() { "debug_assertions", ], ), + target_layout: None, env: Env { entries: { "CARGO_PKG_LICENSE": "", @@ -297,6 +300,7 @@ fn cargo_hello_world_project_model_with_wildcard_overrides() { "debug_assertions", ], ), + target_layout: None, env: Env { entries: { "CARGO_PKG_LICENSE": "", @@ -375,6 +379,7 @@ fn cargo_hello_world_project_model_with_wildcard_overrides() { "debug_assertions", ], ), + target_layout: None, env: Env { entries: { "CARGO_PKG_LICENSE": "", @@ -462,6 +467,7 @@ fn cargo_hello_world_project_model_with_wildcard_overrides() { "feature=use_std", ], ), + target_layout: None, env: Env { entries: { "CARGO_PKG_LICENSE": "", @@ -547,6 +553,7 @@ fn cargo_hello_world_project_model_with_selective_overrides() { "test", ], ), + target_layout: None, env: Env { entries: { "CARGO_PKG_LICENSE": "", @@ -618,6 +625,7 @@ fn cargo_hello_world_project_model_with_selective_overrides() { "test", ], ), + target_layout: None, env: Env { entries: { "CARGO_PKG_LICENSE": "", @@ -698,6 +706,7 @@ fn cargo_hello_world_project_model_with_selective_overrides() { "test", ], ), + target_layout: None, env: Env { entries: { "CARGO_PKG_LICENSE": "", @@ -778,6 +787,7 @@ fn cargo_hello_world_project_model_with_selective_overrides() { "test", ], ), + target_layout: None, env: Env { entries: { "CARGO_PKG_LICENSE": "", @@ -865,6 +875,7 @@ fn cargo_hello_world_project_model_with_selective_overrides() { "feature=use_std", ], ), + target_layout: None, env: Env { entries: { "CARGO_PKG_LICENSE": "", @@ -941,6 +952,7 @@ fn cargo_hello_world_project_model() { "test", ], ), + target_layout: None, env: Env { entries: { "CARGO_PKG_LICENSE": "", @@ -1012,6 +1024,7 @@ fn cargo_hello_world_project_model() { "test", ], ), + target_layout: None, env: Env { entries: { "CARGO_PKG_LICENSE": "", @@ -1092,6 +1105,7 @@ fn cargo_hello_world_project_model() { "test", ], ), + target_layout: None, env: Env { entries: { "CARGO_PKG_LICENSE": "", @@ -1172,6 +1186,7 @@ fn cargo_hello_world_project_model() { "test", ], ), + target_layout: None, env: Env { entries: { "CARGO_PKG_LICENSE": "", @@ -1259,6 +1274,7 @@ fn cargo_hello_world_project_model() { "feature=use_std", ], ), + target_layout: None, env: Env { entries: { "CARGO_PKG_LICENSE": "", @@ -1327,6 +1343,7 @@ fn rust_project_hello_world_project_model() { potential_cfg_options: CfgOptions( [], ), + target_layout: None, env: Env { entries: {}, }, @@ -1371,6 +1388,7 @@ fn rust_project_hello_world_project_model() { potential_cfg_options: CfgOptions( [], ), + target_layout: None, env: Env { entries: {}, }, @@ -1405,6 +1423,7 @@ fn rust_project_hello_world_project_model() { potential_cfg_options: CfgOptions( [], ), + target_layout: None, env: Env { entries: {}, }, @@ -1439,6 +1458,7 @@ fn rust_project_hello_world_project_model() { potential_cfg_options: CfgOptions( [], ), + target_layout: None, env: Env { entries: {}, }, @@ -1473,6 +1493,7 @@ fn rust_project_hello_world_project_model() { potential_cfg_options: CfgOptions( [], ), + target_layout: None, env: Env { entries: {}, }, @@ -1517,6 +1538,7 @@ fn rust_project_hello_world_project_model() { potential_cfg_options: CfgOptions( [], ), + target_layout: None, env: Env { entries: {}, }, @@ -1551,6 +1573,7 @@ fn rust_project_hello_world_project_model() { potential_cfg_options: CfgOptions( [], ), + target_layout: None, env: Env { entries: {}, }, @@ -1658,6 +1681,7 @@ fn rust_project_hello_world_project_model() { potential_cfg_options: CfgOptions( [], ), + target_layout: None, env: Env { entries: {}, }, @@ -1692,6 +1716,7 @@ fn rust_project_hello_world_project_model() { potential_cfg_options: CfgOptions( [], ), + target_layout: None, env: Env { entries: {}, }, @@ -1726,6 +1751,7 @@ fn rust_project_hello_world_project_model() { potential_cfg_options: CfgOptions( [], ), + target_layout: None, env: Env { entries: {}, }, @@ -1760,6 +1786,7 @@ fn rust_project_hello_world_project_model() { potential_cfg_options: CfgOptions( [], ), + target_layout: None, env: Env { entries: {}, }, diff --git a/crates/project-model/src/workspace.rs b/crates/project-model/src/workspace.rs index 3d199ed24a..e2382aa37e 100644 --- a/crates/project-model/src/workspace.rs +++ b/crates/project-model/src/workspace.rs @@ -21,8 +21,8 @@ use crate::{ cfg_flag::CfgFlag, rustc_cfg, sysroot::SysrootCrate, - utf8_stdout, CargoConfig, CargoWorkspace, InvocationStrategy, ManifestPath, Package, - ProjectJson, ProjectManifest, Sysroot, TargetKind, WorkspaceBuildScripts, + target_data_layout, utf8_stdout, CargoConfig, CargoWorkspace, InvocationStrategy, ManifestPath, + Package, ProjectJson, ProjectManifest, Sysroot, TargetKind, WorkspaceBuildScripts, }; /// A set of cfg-overrides per crate. @@ -79,6 +79,7 @@ pub enum ProjectWorkspace { rustc_cfg: Vec, cfg_overrides: CfgOverrides, toolchain: Option, + target_layout: Option, }, /// Project workspace was manually specified using a `rust-project.json` file. Json { project: ProjectJson, sysroot: Option, rustc_cfg: Vec }, @@ -93,7 +94,7 @@ pub enum ProjectWorkspace { // // /// Project with a set of disjoint files, not belonging to any particular workspace. /// Backed by basic sysroot crates for basic completion and highlighting. - DetachedFiles { files: Vec, sysroot: Sysroot, rustc_cfg: Vec }, + DetachedFiles { files: Vec, sysroot: Option, rustc_cfg: Vec }, } impl fmt::Debug for ProjectWorkspace { @@ -108,6 +109,7 @@ impl fmt::Debug for ProjectWorkspace { rustc_cfg, cfg_overrides, toolchain, + target_layout: data_layout, } => f .debug_struct("Cargo") .field("root", &cargo.workspace_root().file_name()) @@ -120,6 +122,7 @@ impl fmt::Debug for ProjectWorkspace { .field("n_rustc_cfg", &rustc_cfg.len()) .field("n_cfg_overrides", &cfg_overrides.len()) .field("toolchain", &toolchain) + .field("data_layout", &data_layout) .finish(), ProjectWorkspace::Json { project, sysroot, rustc_cfg } => { let mut debug_struct = f.debug_struct("Json"); @@ -133,7 +136,7 @@ impl fmt::Debug for ProjectWorkspace { ProjectWorkspace::DetachedFiles { files, sysroot, rustc_cfg } => f .debug_struct("DetachedFiles") .field("n_files", &files.len()) - .field("n_sysroot_crates", &sysroot.crates().len()) + .field("sysroot", &sysroot.is_some()) .field("n_rustc_cfg", &rustc_cfg.len()) .finish(), } @@ -191,10 +194,7 @@ impl ProjectWorkspace { let sysroot = match &config.sysroot { Some(RustcSource::Path(path)) => { Some(Sysroot::with_sysroot_dir(path.clone()).with_context(|| { - format!( - "Failed to find sysroot for Cargo.toml file {}.", - cargo_toml.display() - ) + format!("Failed to find sysroot at {}.", path.display()) })?) } Some(RustcSource::Discover) => Some( @@ -244,6 +244,11 @@ impl ProjectWorkspace { rustc_cfg::get(Some(&cargo_toml), config.target.as_deref(), &config.extra_env); let cfg_overrides = config.cfg_overrides(); + let data_layout = target_data_layout::get( + Some(&cargo_toml), + config.target.as_deref(), + &config.extra_env, + ); ProjectWorkspace::Cargo { cargo, build_scripts: WorkspaceBuildScripts::default(), @@ -252,6 +257,7 @@ impl ProjectWorkspace { rustc_cfg, cfg_overrides, toolchain, + target_layout: data_layout, } } }; @@ -291,14 +297,29 @@ impl ProjectWorkspace { Ok(ProjectWorkspace::Json { project: project_json, sysroot, rustc_cfg }) } - pub fn load_detached_files(detached_files: Vec) -> Result { - let sysroot = Sysroot::discover( - detached_files - .first() - .and_then(|it| it.parent()) - .ok_or_else(|| format_err!("No detached files to load"))?, - &Default::default(), - )?; + pub fn load_detached_files( + detached_files: Vec, + config: &CargoConfig, + ) -> Result { + let sysroot = match &config.sysroot { + Some(RustcSource::Path(path)) => Some( + Sysroot::with_sysroot_dir(path.clone()) + .with_context(|| format!("Failed to find sysroot at {}.", path.display()))?, + ), + Some(RustcSource::Discover) => { + let dir = &detached_files + .first() + .and_then(|it| it.parent()) + .ok_or_else(|| format_err!("No detached files to load"))?; + Some(Sysroot::discover(dir, &config.extra_env).with_context(|| { + format!("Failed to find sysroot in {}. Is rust-src installed?", dir.display()) + })?) + } + None => None, + }; + if let Some(sysroot) = &sysroot { + tracing::info!(src_root = %sysroot.src_root().display(), root = %sysroot.root().display(), "Using sysroot"); + } let rustc_cfg = rustc_cfg::get(None, None, &Default::default()); Ok(ProjectWorkspace::DetachedFiles { files: detached_files, sysroot, rustc_cfg }) } @@ -386,7 +407,7 @@ impl ProjectWorkspace { ["libexec", "lib"] .into_iter() .map(|segment| sysroot.root().join(segment).join(&standalone_server_name)) - .find(|server_path| std::fs::metadata(&server_path).is_ok()) + .find(|server_path| std::fs::metadata(server_path).is_ok()) } _ => None, } @@ -423,6 +444,7 @@ impl ProjectWorkspace { cfg_overrides: _, build_scripts, toolchain: _, + target_layout: _, } => { cargo .packages() @@ -479,21 +501,25 @@ impl ProjectWorkspace { include: vec![detached_file.clone()], exclude: Vec::new(), }) - .chain(mk_sysroot(Some(sysroot))) + .chain(mk_sysroot(sysroot.as_ref())) .collect(), } } pub fn n_packages(&self) -> usize { match self { - ProjectWorkspace::Json { project, .. } => project.n_crates(), + ProjectWorkspace::Json { project, sysroot, .. } => { + let sysroot_package_len = sysroot.as_ref().map_or(0, |it| it.crates().len()); + sysroot_package_len + project.n_crates() + } ProjectWorkspace::Cargo { cargo, sysroot, rustc, .. } => { let rustc_package_len = rustc.as_ref().map_or(0, |it| it.packages().len()); let sysroot_package_len = sysroot.as_ref().map_or(0, |it| it.crates().len()); cargo.packages().len() + sysroot_package_len + rustc_package_len } ProjectWorkspace::DetachedFiles { sysroot, files, .. } => { - sysroot.crates().len() + files.len() + let sysroot_package_len = sysroot.as_ref().map_or(0, |it| it.crates().len()); + sysroot_package_len + files.len() } } } @@ -514,6 +540,7 @@ impl ProjectWorkspace { project, sysroot, extra_env, + None, ), ProjectWorkspace::Cargo { cargo, @@ -523,6 +550,7 @@ impl ProjectWorkspace { cfg_overrides, build_scripts, toolchain: _, + target_layout, } => cargo_to_crate_graph( load_proc_macro, load, @@ -532,9 +560,10 @@ impl ProjectWorkspace { rustc_cfg.clone(), cfg_overrides, build_scripts, + target_layout.as_deref().map(Arc::from), ), ProjectWorkspace::DetachedFiles { files, sysroot, rustc_cfg } => { - detached_files_to_crate_graph(rustc_cfg.clone(), load, files, sysroot) + detached_files_to_crate_graph(rustc_cfg.clone(), load, files, sysroot, None) } }; if crate_graph.patch_cfg_if() { @@ -553,11 +582,18 @@ fn project_json_to_crate_graph( project: &ProjectJson, sysroot: &Option, extra_env: &FxHashMap, + target_layout: Option>, ) -> CrateGraph { let mut crate_graph = CrateGraph::default(); - let sysroot_deps = sysroot - .as_ref() - .map(|sysroot| sysroot_to_crate_graph(&mut crate_graph, sysroot, rustc_cfg.clone(), load)); + let sysroot_deps = sysroot.as_ref().map(|sysroot| { + sysroot_to_crate_graph( + &mut crate_graph, + sysroot, + rustc_cfg.clone(), + target_layout.clone(), + load, + ) + }); let mut cfg_cache: FxHashMap<&str, Vec> = FxHashMap::default(); let crates: NoHashHashMap = project @@ -609,6 +645,7 @@ fn project_json_to_crate_graph( } else { CrateOrigin::CratesIo { repo: None, name: None } }, + target_layout.clone(), ), ) }) @@ -649,11 +686,18 @@ fn cargo_to_crate_graph( rustc_cfg: Vec, override_cfg: &CfgOverrides, build_scripts: &WorkspaceBuildScripts, + target_layout: Option>, ) -> CrateGraph { let _p = profile::span("cargo_to_crate_graph"); let mut crate_graph = CrateGraph::default(); let (public_deps, libproc_macro) = match sysroot { - Some(sysroot) => sysroot_to_crate_graph(&mut crate_graph, sysroot, rustc_cfg.clone(), load), + Some(sysroot) => sysroot_to_crate_graph( + &mut crate_graph, + sysroot, + rustc_cfg.clone(), + target_layout.clone(), + load, + ), None => (SysrootPublicDeps::default(), None), }; @@ -716,6 +760,7 @@ fn cargo_to_crate_graph( file_id, &cargo[tgt].name, cargo[tgt].is_proc_macro, + target_layout.clone(), ); if cargo[tgt].kind == TargetKind::Lib { lib_tgt = Some((crate_id, cargo[tgt].name.clone())); @@ -795,6 +840,7 @@ fn cargo_to_crate_graph( &cfg_options, override_cfg, build_scripts, + target_layout, ); } } @@ -805,12 +851,21 @@ fn detached_files_to_crate_graph( rustc_cfg: Vec, load: &mut dyn FnMut(&AbsPath) -> Option, detached_files: &[AbsPathBuf], - sysroot: &Sysroot, + sysroot: &Option, + target_layout: Option>, ) -> CrateGraph { let _p = profile::span("detached_files_to_crate_graph"); let mut crate_graph = CrateGraph::default(); - let (public_deps, _libproc_macro) = - sysroot_to_crate_graph(&mut crate_graph, sysroot, rustc_cfg.clone(), load); + let (public_deps, _libproc_macro) = match sysroot { + Some(sysroot) => sysroot_to_crate_graph( + &mut crate_graph, + sysroot, + rustc_cfg.clone(), + target_layout.clone(), + load, + ), + None => (SysrootPublicDeps::default(), None), + }; let mut cfg_options = CfgOptions::default(); cfg_options.extend(rustc_cfg); @@ -841,6 +896,7 @@ fn detached_files_to_crate_graph( repo: None, name: display_name.map(|n| n.canonical_name().to_string()), }, + target_layout.clone(), ); public_deps.add_to_crate_graph(&mut crate_graph, detached_file_crate); @@ -861,6 +917,7 @@ fn handle_rustc_crates( cfg_options: &CfgOptions, override_cfg: &CfgOverrides, build_scripts: &WorkspaceBuildScripts, + target_layout: Option>, ) { let mut rustc_pkg_crates = FxHashMap::default(); // The root package of the rustc-dev component is rustc_driver, so we match that @@ -917,6 +974,7 @@ fn handle_rustc_crates( file_id, &rustc_workspace[tgt].name, rustc_workspace[tgt].is_proc_macro, + target_layout.clone(), ); pkg_to_lib_crate.insert(pkg, crate_id); // Add dependencies on core / std / alloc for this crate @@ -981,6 +1039,7 @@ fn add_target_crate_root( file_id: FileId, cargo_name: &str, is_proc_macro: bool, + target_layout: Option>, ) -> CrateId { let edition = pkg.edition; let mut potential_cfg_options = cfg_options.clone(); @@ -1027,6 +1086,7 @@ fn add_target_crate_root( proc_macro, is_proc_macro, CrateOrigin::CratesIo { repo: pkg.repository.clone(), name: Some(pkg.name.clone()) }, + target_layout, ) } @@ -1048,6 +1108,7 @@ fn sysroot_to_crate_graph( crate_graph: &mut CrateGraph, sysroot: &Sysroot, rustc_cfg: Vec, + target_layout: Option>, load: &mut dyn FnMut(&AbsPath) -> Option, ) -> (SysrootPublicDeps, Option) { let _p = profile::span("sysroot_to_crate_graph"); @@ -1071,6 +1132,7 @@ fn sysroot_to_crate_graph( Err("no proc macro loaded for sysroot crate".into()), false, CrateOrigin::Lang(LangCrateOrigin::from(&*sysroot[krate].name)), + target_layout.clone(), ); Some((krate, crate_id)) }) diff --git a/crates/rust-analyzer/src/bin/logger.rs b/crates/rust-analyzer/src/bin/logger.rs index ac10721d95..8caadecd85 100644 --- a/crates/rust-analyzer/src/bin/logger.rs +++ b/crates/rust-analyzer/src/bin/logger.rs @@ -81,9 +81,9 @@ impl Logger { Registry::default() .with( self.filter - .add_directive(format!("chalk_solve={}", val).parse()?) - .add_directive(format!("chalk_ir={}", val).parse()?) - .add_directive(format!("chalk_recursive={}", val).parse()?), + .add_directive(format!("chalk_solve={val}").parse()?) + .add_directive(format!("chalk_ir={val}").parse()?) + .add_directive(format!("chalk_recursive={val}").parse()?), ) .with(ra_fmt_layer) .with(chalk_layer) @@ -124,7 +124,7 @@ where Some(log) => log.target(), None => event.metadata().target(), }; - write!(writer, "[{} {}] ", level, target)?; + write!(writer, "[{level} {target}] ")?; // Write spans and fields of each span ctx.visit_spans(|span| { @@ -140,7 +140,7 @@ where let fields = &ext.get::>().expect("will never be `None`"); if !fields.is_empty() { - write!(writer, "{{{}}}", fields)?; + write!(writer, "{{{fields}}}")?; } write!(writer, ": ")?; diff --git a/crates/rust-analyzer/src/bin/main.rs b/crates/rust-analyzer/src/bin/main.rs index 7bf595d2a4..53710749de 100644 --- a/crates/rust-analyzer/src/bin/main.rs +++ b/crates/rust-analyzer/src/bin/main.rs @@ -30,7 +30,7 @@ fn main() { let code = match rustc_wrapper::run_rustc_skipping_cargo_checking(rustc, args.collect()) { Ok(rustc_wrapper::ExitCode(code)) => code.unwrap_or(102), Err(err) => { - eprintln!("{}", err); + eprintln!("{err}"); 101 } }; @@ -40,7 +40,7 @@ fn main() { let flags = flags::RustAnalyzer::from_env_or_exit(); if let Err(err) = try_main(flags) { tracing::error!("Unexpected error: {}", err); - eprintln!("{}", err); + eprintln!("{err}"); process::exit(101); } } @@ -183,6 +183,8 @@ fn run_server() -> Result<()> { } } + config.client_specific_adjustments(&initialize_params.client_info); + let server_capabilities = rust_analyzer::server_capabilities(&config); let initialize_result = lsp_types::InitializeResult { diff --git a/crates/rust-analyzer/src/caps.rs b/crates/rust-analyzer/src/caps.rs index 723b888d9a..122d2e6ff1 100644 --- a/crates/rust-analyzer/src/caps.rs +++ b/crates/rust-analyzer/src/caps.rs @@ -42,7 +42,7 @@ pub fn server_capabilities(config: &Config) -> ServerCapabilities { "(".to_string(), ]), all_commit_characters: None, - completion_item: completion_item(&config), + completion_item: completion_item(config), work_done_progress_options: WorkDoneProgressOptions { work_done_progress: None }, }), signature_help_provider: Some(SignatureHelpOptions { @@ -67,7 +67,7 @@ pub fn server_capabilities(config: &Config) -> ServerCapabilities { }, document_on_type_formatting_provider: Some(DocumentOnTypeFormattingOptions { first_trigger_character: "=".to_string(), - more_trigger_character: Some(more_trigger_character(&config)), + more_trigger_character: Some(more_trigger_character(config)), }), selection_range_provider: Some(SelectionRangeProviderCapability::Simple(true)), folding_range_provider: Some(FoldingRangeProviderCapability::Simple(true)), diff --git a/crates/rust-analyzer/src/cli.rs b/crates/rust-analyzer/src/cli.rs index 60ba67e25f..d5d877680a 100644 --- a/crates/rust-analyzer/src/cli.rs +++ b/crates/rust-analyzer/src/cli.rs @@ -46,7 +46,7 @@ fn report_metric(metric: &str, value: u64, unit: &str) { if std::env::var("RA_METRICS").is_err() { return; } - println!("METRIC:{}:{}:{}", metric, value, unit) + println!("METRIC:{metric}:{value}:{unit}") } fn print_memory_usage(mut host: AnalysisHost, vfs: Vfs) { @@ -65,6 +65,6 @@ fn print_memory_usage(mut host: AnalysisHost, vfs: Vfs) { for (name, bytes) in mem { // NOTE: Not a debug print, so avoid going through the `eprintln` defined above. - eprintln!("{:>8} {}", bytes, name); + eprintln!("{bytes:>8} {name}"); } } diff --git a/crates/rust-analyzer/src/cli/analysis_stats.rs b/crates/rust-analyzer/src/cli/analysis_stats.rs index 01fccc83e8..053db5fc53 100644 --- a/crates/rust-analyzer/src/cli/analysis_stats.rs +++ b/crates/rust-analyzer/src/cli/analysis_stats.rs @@ -87,9 +87,9 @@ impl flags::AnalysisStats { load_workspace(workspace, &cargo_config.extra_env, &load_cargo_config)?; let db = host.raw_database(); eprint!("{:<20} {}", "Database loaded:", db_load_sw.elapsed()); - eprint!(" (metadata {}", metadata_time); + eprint!(" (metadata {metadata_time}"); if let Some(build_scripts_time) = build_scripts_time { - eprint!("; build {}", build_scripts_time); + eprint!("; build {build_scripts_time}"); } eprintln!(")"); @@ -118,7 +118,7 @@ impl flags::AnalysisStats { shuffle(&mut rng, &mut visit_queue); } - eprint!(" crates: {}", num_crates); + eprint!(" crates: {num_crates}"); let mut num_decls = 0; let mut funcs = Vec::new(); while let Some(module) = visit_queue.pop() { @@ -142,7 +142,7 @@ impl flags::AnalysisStats { } } } - eprintln!(", mods: {}, decls: {}, fns: {}", visited_modules.len(), num_decls, funcs.len()); + eprintln!(", mods: {}, decls: {num_decls}, fns: {}", visited_modules.len(), funcs.len()); eprintln!("{:<20} {}", "Item Collection:", analysis_sw.elapsed()); if self.randomize { @@ -154,7 +154,7 @@ impl flags::AnalysisStats { } let total_span = analysis_sw.elapsed(); - eprintln!("{:<20} {}", "Total:", total_span); + eprintln!("{:<20} {total_span}", "Total:"); report_metric("total time", total_span.time.as_millis() as u64, "ms"); if let Some(instructions) = total_span.instructions { report_metric("total instructions", instructions, "#instr"); @@ -179,7 +179,7 @@ impl flags::AnalysisStats { total_macro_file_size += syntax_len(val.syntax_node()) } } - eprintln!("source files: {}, macro files: {}", total_file_size, total_macro_file_size); + eprintln!("source files: {total_file_size}, macro files: {total_macro_file_size}"); } if self.memory_usage && verbosity.is_verbose() { @@ -239,7 +239,7 @@ impl flags::AnalysisStats { continue; } } - let mut msg = format!("processing: {}", full_name); + let mut msg = format!("processing: {full_name}"); if verbosity.is_verbose() { if let Some(src) = f.source(db) { let original_file = src.file_id.original_file(db); @@ -275,7 +275,7 @@ impl flags::AnalysisStats { end.col, )); } else { - bar.println(format!("{}: Unknown type", name,)); + bar.println(format!("{name}: Unknown type",)); } } true @@ -402,7 +402,7 @@ fn location_csv( let text_range = original_range.range; let (start, end) = (line_index.line_col(text_range.start()), line_index.line_col(text_range.end())); - format!("{},{}:{},{}:{}", path, start.line + 1, start.col, end.line + 1, end.col) + format!("{path},{}:{},{}:{}", start.line + 1, start.col, end.line + 1, end.col) } fn expr_syntax_range( diff --git a/crates/rust-analyzer/src/cli/diagnostics.rs b/crates/rust-analyzer/src/cli/diagnostics.rs index 247007db0a..fd5b3ce61f 100644 --- a/crates/rust-analyzer/src/cli/diagnostics.rs +++ b/crates/rust-analyzer/src/cli/diagnostics.rs @@ -40,7 +40,7 @@ impl flags::Diagnostics { if !visited_files.contains(&file_id) { let crate_name = module.krate().display_name(db).as_deref().unwrap_or("unknown").to_string(); - println!("processing crate: {}, module: {}", crate_name, _vfs.file_path(file_id)); + println!("processing crate: {crate_name}, module: {}", _vfs.file_path(file_id)); for diagnostic in analysis .diagnostics( &DiagnosticsConfig::test_sample(), @@ -53,7 +53,7 @@ impl flags::Diagnostics { found_error = true; } - println!("{:?}", diagnostic); + println!("{diagnostic:?}"); } visited_files.insert(file_id); diff --git a/crates/rust-analyzer/src/cli/flags.rs b/crates/rust-analyzer/src/cli/flags.rs index 5bcc97e226..770612cc94 100644 --- a/crates/rust-analyzer/src/cli/flags.rs +++ b/crates/rust-analyzer/src/cli/flags.rs @@ -255,7 +255,7 @@ impl FromStr for OutputFormat { fn from_str(s: &str) -> Result { match s { "csv" => Ok(Self::Csv), - _ => Err(format!("unknown output format `{}`", s)), + _ => Err(format!("unknown output format `{s}`")), } } } diff --git a/crates/rust-analyzer/src/cli/highlight.rs b/crates/rust-analyzer/src/cli/highlight.rs index 4f9b362f1b..84607b9fd5 100644 --- a/crates/rust-analyzer/src/cli/highlight.rs +++ b/crates/rust-analyzer/src/cli/highlight.rs @@ -8,7 +8,7 @@ impl flags::Highlight { pub fn run(self) -> anyhow::Result<()> { let (analysis, file_id) = Analysis::from_single_file(read_stdin()?); let html = analysis.highlight_as_html(file_id, self.rainbow).unwrap(); - println!("{}", html); + println!("{html}"); Ok(()) } } diff --git a/crates/rust-analyzer/src/cli/lsif.rs b/crates/rust-analyzer/src/cli/lsif.rs index c74ddabb17..af8356d041 100644 --- a/crates/rust-analyzer/src/cli/lsif.rs +++ b/crates/rust-analyzer/src/cli/lsif.rs @@ -83,7 +83,7 @@ impl LsifManager<'_> { // FIXME: support file in addition to stdout here fn emit(&self, data: &str) { - println!("{}", data); + println!("{data}"); } fn get_token_id(&mut self, id: TokenId) -> Id { @@ -253,7 +253,7 @@ impl LsifManager<'_> { }; let result = folds .into_iter() - .map(|it| to_proto::folding_range(&*text, &line_index, false, it)) + .map(|it| to_proto::folding_range(&text, &line_index, false, it)) .collect(); let folding_id = self.add_vertex(lsif::Vertex::FoldingRangeResult { result }); self.add_edge(lsif::Edge::FoldingRange(lsif::EdgeData { diff --git a/crates/rust-analyzer/src/cli/progress_report.rs b/crates/rust-analyzer/src/cli/progress_report.rs index 5a2dc39d52..d459dd115c 100644 --- a/crates/rust-analyzer/src/cli/progress_report.rs +++ b/crates/rust-analyzer/src/cli/progress_report.rs @@ -67,7 +67,7 @@ impl ProgressReport { return; } let percent = (self.curr * 100.0) as u32; - let text = format!("{}/{} {:3>}% {}", self.pos, self.len, percent, self.msg); + let text = format!("{}/{} {percent:3>}% {}", self.pos, self.len, self.msg); self.update_text(&text); } @@ -114,7 +114,7 @@ impl ProgressReport { // Fill all last text to space and return the cursor let spaces = " ".repeat(self.text.len()); let backspaces = "\x08".repeat(self.text.len()); - print!("{}{}{}", backspaces, spaces, backspaces); + print!("{backspaces}{spaces}{backspaces}"); let _ = io::stdout().flush(); self.text = String::new(); diff --git a/crates/rust-analyzer/src/cli/scip.rs b/crates/rust-analyzer/src/cli/scip.rs index 9edd045ab0..b050d1e95a 100644 --- a/crates/rust-analyzer/src/cli/scip.rs +++ b/crates/rust-analyzer/src/cli/scip.rs @@ -28,7 +28,7 @@ impl flags::Scip { let now = Instant::now(); let cargo_config = CargoConfig::default(); - let no_progress = &|s| (eprintln!("rust-analyzer: Loading {}", s)); + let no_progress = &|s| (eprintln!("rust-analyzer: Loading {s}")); let load_cargo_config = LoadCargoConfig { load_out_dirs_from_check: true, with_proc_macro: true, @@ -102,7 +102,7 @@ impl flags::Scip { let symbol = tokens_to_symbol .entry(id) .or_insert_with(|| { - let symbol = token_to_symbol(&token).unwrap_or_else(&mut new_local_symbol); + let symbol = token_to_symbol(token).unwrap_or_else(&mut new_local_symbol); scip::symbol::format_symbol(symbol) }) .clone(); @@ -176,7 +176,7 @@ fn get_relative_filepath( rootpath: &vfs::AbsPathBuf, file_id: ide::FileId, ) -> Option { - Some(vfs.file_path(file_id).as_path()?.strip_prefix(&rootpath)?.as_ref().to_str()?.to_string()) + Some(vfs.file_path(file_id).as_path()?.strip_prefix(rootpath)?.as_ref().to_str()?.to_string()) } // SCIP Ranges have a (very large) optimization that ranges if they are on the same line @@ -209,7 +209,7 @@ fn new_descriptor_str( fn new_descriptor(name: Name, suffix: scip_types::descriptor::Suffix) -> scip_types::Descriptor { let mut name = name.to_string(); if name.contains("'") { - name = format!("`{}`", name); + name = format!("`{name}`"); } new_descriptor_str(name.as_str(), suffix) @@ -303,11 +303,11 @@ mod test { } if expected == "" { - assert!(found_symbol.is_none(), "must have no symbols {:?}", found_symbol); + assert!(found_symbol.is_none(), "must have no symbols {found_symbol:?}"); return; } - assert!(found_symbol.is_some(), "must have one symbol {:?}", found_symbol); + assert!(found_symbol.is_some(), "must have one symbol {found_symbol:?}"); let res = found_symbol.unwrap(); let formatted = format_symbol(res); assert_eq!(formatted, expected); diff --git a/crates/rust-analyzer/src/cli/ssr.rs b/crates/rust-analyzer/src/cli/ssr.rs index e8291782b7..84c4891716 100644 --- a/crates/rust-analyzer/src/cli/ssr.rs +++ b/crates/rust-analyzer/src/cli/ssr.rs @@ -70,7 +70,7 @@ impl flags::Search { let sr = db.source_root(root); for file_id in sr.iter() { for debug_info in match_finder.debug_where_text_equal(file_id, debug_snippet) { - println!("{:#?}", debug_info); + println!("{debug_info:#?}"); } } } diff --git a/crates/rust-analyzer/src/cli/symbols.rs b/crates/rust-analyzer/src/cli/symbols.rs index 84659b5ea9..9fad6723af 100644 --- a/crates/rust-analyzer/src/cli/symbols.rs +++ b/crates/rust-analyzer/src/cli/symbols.rs @@ -9,7 +9,7 @@ impl flags::Symbols { let (analysis, file_id) = Analysis::from_single_file(text); let structure = analysis.file_structure(file_id).unwrap(); for s in structure { - println!("{:?}", s); + println!("{s:?}"); } Ok(()) } diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index 6b2f22faa7..b0afbdc9a4 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs @@ -20,7 +20,7 @@ use ide_db::{ SnippetCap, }; use itertools::Itertools; -use lsp_types::{ClientCapabilities, MarkupKind}; +use lsp_types::{ClientCapabilities, ClientInfo, MarkupKind}; use project_model::{ CargoConfig, CargoFeatures, ProjectJson, ProjectJsonData, ProjectManifest, RustcSource, UnsetTestCrates, @@ -124,22 +124,23 @@ config_data! { /// Unsets `#[cfg(test)]` for the specified crates. cargo_unsetTest: Vec = "[\"core\"]", + /// Run the check command for diagnostics on save. + checkOnSave | checkOnSave_enable: bool = "true", + /// Check all targets and tests (`--all-targets`). - checkOnSave_allTargets: bool = "true", + check_allTargets | checkOnSave_allTargets: bool = "true", /// Cargo command to use for `cargo check`. - checkOnSave_command: String = "\"check\"", - /// Run specified `cargo check` command for diagnostics on save. - checkOnSave_enable: bool = "true", + check_command | checkOnSave_command: String = "\"check\"", /// Extra arguments for `cargo check`. - checkOnSave_extraArgs: Vec = "[]", + check_extraArgs | checkOnSave_extraArgs: Vec = "[]", /// Extra environment variables that will be set when running `cargo check`. /// Extends `#rust-analyzer.cargo.extraEnv#`. - checkOnSave_extraEnv: FxHashMap = "{}", + check_extraEnv | checkOnSave_extraEnv: FxHashMap = "{}", /// List of features to activate. Defaults to /// `#rust-analyzer.cargo.features#`. /// /// Set to `"all"` to pass `--all-features` to Cargo. - checkOnSave_features: Option = "null", + check_features | checkOnSave_features: Option = "null", /// Specifies the working directory for running checks. /// - "workspace": run checks for workspaces in the corresponding workspaces' root directories. // FIXME: Ideally we would support this in some way @@ -147,19 +148,21 @@ config_data! { /// - "root": run checks in the project's root directory. /// This config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#` /// is set. - checkOnSave_invocationLocation: InvocationLocation = "\"workspace\"", + check_invocationLocation | checkOnSave_invocationLocation: InvocationLocation = "\"workspace\"", /// Specifies the invocation strategy to use when running the checkOnSave command. /// If `per_workspace` is set, the command will be executed for each workspace. /// If `once` is set, the command will be executed once. /// This config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#` /// is set. - checkOnSave_invocationStrategy: InvocationStrategy = "\"per_workspace\"", + check_invocationStrategy | checkOnSave_invocationStrategy: InvocationStrategy = "\"per_workspace\"", /// Whether to pass `--no-default-features` to Cargo. Defaults to /// `#rust-analyzer.cargo.noDefaultFeatures#`. - checkOnSave_noDefaultFeatures: Option = "null", + check_noDefaultFeatures | checkOnSave_noDefaultFeatures: Option = "null", /// Override the command rust-analyzer uses instead of `cargo check` for /// diagnostics on save. The command is required to output json and - /// should therefore include `--message-format=json` or a similar option. + /// should therefore include `--message-format=json` or a similar option + /// (if your client supports the `colorDiagnosticOutput` experimental + /// capability, you can use `--message-format=json-diagnostic-rendered-ansi`). /// /// If you're changing this because you're using some tool wrapping /// Cargo, you might also want to change @@ -175,14 +178,14 @@ config_data! { /// cargo check --workspace --message-format=json --all-targets /// ``` /// . - checkOnSave_overrideCommand: Option> = "null", + check_overrideCommand | checkOnSave_overrideCommand: Option> = "null", /// Check for specific targets. Defaults to `#rust-analyzer.cargo.target#` if empty. /// /// Can be a single target, e.g. `"x86_64-unknown-linux-gnu"` or a list of targets, e.g. /// `["aarch64-apple-darwin", "x86_64-apple-darwin"]`. /// /// Aliased as `"checkOnSave.targets"`. - checkOnSave_target | checkOnSave_targets: CheckOnSaveTargets = "[]", + check_targets | checkOnSave_targets | checkOnSave_target: Option = "null", /// Toggles the additional completions that automatically add imports when completed. /// Note that your client must specify the `additionalTextEdits` LSP client capability to truly have this feature enabled. @@ -327,12 +330,20 @@ config_data! { inlayHints_closingBraceHints_minLines: usize = "25", /// Whether to show inlay type hints for return types of closures. inlayHints_closureReturnTypeHints_enable: ClosureReturnTypeHintsDef = "\"never\"", + /// Whether to show enum variant discriminant hints. + inlayHints_discriminantHints_enable: DiscriminantHintsDef = "\"never\"", /// Whether to show inlay hints for type adjustments. inlayHints_expressionAdjustmentHints_enable: AdjustmentHintsDef = "\"never\"", + /// Whether to hide inlay hints for type adjustments outside of `unsafe` blocks. + inlayHints_expressionAdjustmentHints_hideOutsideUnsafe: bool = "false", + /// Whether to show inlay hints as postfix ops (`.*` instead of `*`, etc). + inlayHints_expressionAdjustmentHints_mode: AdjustmentHintsModeDef = "\"prefix\"", /// Whether to show inlay type hints for elided lifetimes in function signatures. inlayHints_lifetimeElisionHints_enable: LifetimeElisionDef = "\"never\"", /// Whether to prefer using parameter names as the name for elided lifetime hints if possible. inlayHints_lifetimeElisionHints_useParameterNames: bool = "false", + /// Whether to use location links for parts of type mentioned in inlay hints. + inlayHints_locationLinks: bool = "true", /// Maximum length for inlay hints. Set to null to have an unlimited length. inlayHints_maxLength: Option = "25", /// Whether to show function parameter name inlay hints at the call @@ -403,6 +414,9 @@ config_data! { /// Whether to show `can't find Cargo.toml` error message. notifications_cargoTomlNotFound: bool = "true", + /// How many worker threads in the main loop. The default `null` means to pick automatically. + numThreads: Option = "null", + /// Expand attribute macros. Requires `#rust-analyzer.procMacro.enable#` to be set. procMacro_attributes_enable: bool = "true", /// Enable support for procedural macros, implies `#rust-analyzer.cargo.buildScripts.enable#`. @@ -714,6 +728,19 @@ impl Config { } } + pub fn client_specific_adjustments(&mut self, client_info: &Option) { + // FIXME: remove this when we drop support for vscode 1.65 and below + if let Some(client) = client_info { + if client.name.contains("Code") || client.name.contains("Codium") { + if let Some(version) = &client.version { + if version.as_str() < "1.76" { + self.data.inlayHints_locationLinks = false; + } + } + } + } + } + pub fn update(&mut self, mut json: serde_json::Value) -> Result<(), ConfigUpdateError> { tracing::info!("updating config from JSON: {:#}", json); if json.is_null() || json.as_object().map_or(false, |it| it.is_empty()) { @@ -767,9 +794,9 @@ impl Config { fn validate(&self, error_sink: &mut Vec<(String, serde_json::Error)>) { use serde::de::Error; - if self.data.checkOnSave_command.is_empty() { + if self.data.check_command.is_empty() { error_sink.push(( - "/checkOnSave/command".to_string(), + "/check/command".to_string(), serde_json::Error::custom("expected a non-empty string"), )); } @@ -981,6 +1008,11 @@ impl Config { self.experimental("serverStatusNotification") } + /// Whether the client supports colored output for full diagnostics from `checkOnSave`. + pub fn color_diagnostic_output(&self) -> bool { + self.experimental("colorDiagnosticOutput") + } + pub fn publish_diagnostics(&self) -> bool { self.data.diagnostics_enable } @@ -1014,7 +1046,7 @@ impl Config { pub fn check_on_save_extra_env(&self) -> FxHashMap { let mut extra_env = self.data.cargo_extraEnv.clone(); - extra_env.extend(self.data.checkOnSave_extraEnv.clone()); + extra_env.extend(self.data.check_extraEnv.clone()); extra_env } @@ -1125,11 +1157,8 @@ impl Config { } } - pub fn flycheck(&self) -> Option { - if !self.data.checkOnSave_enable { - return None; - } - let flycheck_config = match &self.data.checkOnSave_overrideCommand { + pub fn flycheck(&self) -> FlycheckConfig { + match &self.data.check_overrideCommand { Some(args) if !args.is_empty() => { let mut args = args.clone(); let command = args.remove(0); @@ -1137,13 +1166,13 @@ impl Config { command, args, extra_env: self.check_on_save_extra_env(), - invocation_strategy: match self.data.checkOnSave_invocationStrategy { + invocation_strategy: match self.data.check_invocationStrategy { InvocationStrategy::Once => flycheck::InvocationStrategy::Once, InvocationStrategy::PerWorkspace => { flycheck::InvocationStrategy::PerWorkspace } }, - invocation_location: match self.data.checkOnSave_invocationLocation { + invocation_location: match self.data.check_invocationLocation { InvocationLocation::Root => { flycheck::InvocationLocation::Root(self.root_path.clone()) } @@ -1152,34 +1181,43 @@ impl Config { } } Some(_) | None => FlycheckConfig::CargoCommand { - command: self.data.checkOnSave_command.clone(), - target_triples: match &self.data.checkOnSave_target.0[..] { - [] => self.data.cargo_target.clone().into_iter().collect(), - targets => targets.into(), - }, - all_targets: self.data.checkOnSave_allTargets, + command: self.data.check_command.clone(), + target_triples: self + .data + .check_targets + .clone() + .and_then(|targets| match &targets.0[..] { + [] => None, + targets => Some(targets.into()), + }) + .unwrap_or_else(|| self.data.cargo_target.clone().into_iter().collect()), + all_targets: self.data.check_allTargets, no_default_features: self .data - .checkOnSave_noDefaultFeatures + .check_noDefaultFeatures .unwrap_or(self.data.cargo_noDefaultFeatures), all_features: matches!( - self.data.checkOnSave_features.as_ref().unwrap_or(&self.data.cargo_features), + self.data.check_features.as_ref().unwrap_or(&self.data.cargo_features), CargoFeaturesDef::All ), features: match self .data - .checkOnSave_features + .check_features .clone() .unwrap_or_else(|| self.data.cargo_features.clone()) { CargoFeaturesDef::All => vec![], CargoFeaturesDef::Selected(it) => it, }, - extra_args: self.data.checkOnSave_extraArgs.clone(), + extra_args: self.data.check_extraArgs.clone(), extra_env: self.check_on_save_extra_env(), + ansi_color_output: self.color_diagnostic_output(), }, - }; - Some(flycheck_config) + } + } + + pub fn check_on_save(&self) -> bool { + self.data.checkOnSave } pub fn runnables(&self) -> RunnablesConfig { @@ -1191,10 +1229,16 @@ impl Config { pub fn inlay_hints(&self) -> InlayHintsConfig { InlayHintsConfig { + location_links: self.data.inlayHints_locationLinks, render_colons: self.data.inlayHints_renderColons, type_hints: self.data.inlayHints_typeHints_enable, parameter_hints: self.data.inlayHints_parameterHints_enable, chaining_hints: self.data.inlayHints_chainingHints_enable, + discriminant_hints: match self.data.inlayHints_discriminantHints_enable { + DiscriminantHintsDef::Always => ide::DiscriminantHints::Always, + DiscriminantHintsDef::Never => ide::DiscriminantHints::Never, + DiscriminantHintsDef::Fieldless => ide::DiscriminantHints::Fieldless, + }, closure_return_type_hints: match self.data.inlayHints_closureReturnTypeHints_enable { ClosureReturnTypeHintsDef::Always => ide::ClosureReturnTypeHints::Always, ClosureReturnTypeHintsDef::Never => ide::ClosureReturnTypeHints::Never, @@ -1219,6 +1263,15 @@ impl Config { }, AdjustmentHintsDef::Reborrow => ide::AdjustmentHints::ReborrowOnly, }, + adjustment_hints_mode: match self.data.inlayHints_expressionAdjustmentHints_mode { + AdjustmentHintsModeDef::Prefix => ide::AdjustmentHintsMode::Prefix, + AdjustmentHintsModeDef::Postfix => ide::AdjustmentHintsMode::Postfix, + AdjustmentHintsModeDef::PreferPrefix => ide::AdjustmentHintsMode::PreferPrefix, + AdjustmentHintsModeDef::PreferPostfix => ide::AdjustmentHintsMode::PreferPostfix, + }, + adjustment_hints_hide_outside_unsafe: self + .data + .inlayHints_expressionAdjustmentHints_hideOutsideUnsafe, binding_mode_hints: self.data.inlayHints_bindingModeHints_enable, param_names_for_lifetime_elision_hints: self .data @@ -1449,6 +1502,10 @@ impl Config { } } + pub fn main_loop_num_threads(&self) -> usize { + self.data.numThreads.unwrap_or(num_cpus::get_physical().try_into().unwrap_or(1)) + } + pub fn typing_autoclose_angle(&self) -> bool { self.data.typing_autoClosingAngleBrackets_enable } @@ -1553,6 +1610,7 @@ mod de_unit_v { named_unit_variant!(skip_trivial); named_unit_variant!(mutable); named_unit_variant!(reborrow); + named_unit_variant!(fieldless); named_unit_variant!(with_block); } @@ -1716,6 +1774,26 @@ enum AdjustmentHintsDef { Reborrow, } +#[derive(Deserialize, Debug, Clone)] +#[serde(untagged)] +enum DiscriminantHintsDef { + #[serde(deserialize_with = "true_or_always")] + Always, + #[serde(deserialize_with = "false_or_never")] + Never, + #[serde(deserialize_with = "de_unit_v::fieldless")] + Fieldless, +} + +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "snake_case")] +enum AdjustmentHintsModeDef { + Prefix, + Postfix, + PreferPrefix, + PreferPostfix, +} + #[derive(Deserialize, Debug, Clone)] #[serde(rename_all = "snake_case")] enum FilesWatcherDef { @@ -1817,40 +1895,35 @@ fn get_field( alias: Option<&'static str>, default: &str, ) -> T { - let default = serde_json::from_str(default).unwrap(); // XXX: check alias first, to work-around the VS Code where it pre-fills the // defaults instead of sending an empty object. alias .into_iter() .chain(iter::once(field)) - .find_map(move |field| { + .filter_map(move |field| { let mut pointer = field.replace('_', "/"); pointer.insert(0, '/'); - json.pointer_mut(&pointer).and_then(|it| match serde_json::from_value(it.take()) { - Ok(it) => Some(it), - Err(e) => { - tracing::warn!("Failed to deserialize config field at {}: {:?}", pointer, e); - error_sink.push((pointer, e)); - None - } - }) + json.pointer_mut(&pointer) + .map(|it| serde_json::from_value(it.take()).map_err(|e| (e, pointer))) }) - .unwrap_or(default) + .find(Result::is_ok) + .and_then(|res| match res { + Ok(it) => Some(it), + Err((e, pointer)) => { + tracing::warn!("Failed to deserialize config field at {}: {:?}", pointer, e); + error_sink.push((pointer, e)); + None + } + }) + .unwrap_or_else(|| serde_json::from_str(default).unwrap()) } fn schema(fields: &[(&'static str, &'static str, &[&str], &str)]) -> serde_json::Value { - for ((f1, ..), (f2, ..)) in fields.iter().zip(&fields[1..]) { - fn key(f: &str) -> &str { - f.splitn(2, '_').next().unwrap() - } - assert!(key(f1) <= key(f2), "wrong field order: {:?} {:?}", f1, f2); - } - let map = fields .iter() .map(|(field, ty, doc, default)| { let name = field.replace('_', "."); - let name = format!("rust-analyzer.{}", name); + let name = format!("rust-analyzer.{name}"); let props = field_props(field, ty, doc, default); (name, props) }) @@ -1863,9 +1936,7 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json let doc = doc.trim_end_matches('\n'); assert!( doc.ends_with('.') && doc.starts_with(char::is_uppercase), - "bad docs for {}: {:?}", - field, - doc + "bad docs for {field}: {doc:?}" ); let default = default.parse::().unwrap(); @@ -1921,15 +1992,6 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json "type": ["null", "array"], "items": { "type": "string" }, }, - "MergeBehaviorDef" => set! { - "type": "string", - "enum": ["none", "crate", "module"], - "enumDescriptions": [ - "Do not merge imports at all.", - "Merge imports from the same crate into a single `use` statement.", - "Merge imports from the same module into a single `use` statement." - ], - }, "ExprFillDefaultDef" => set! { "type": "string", "enum": ["todo", "default"], @@ -2038,6 +2100,34 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json "Only show auto borrow and dereference adjustment hints." ] }, + "DiscriminantHintsDef" => set! { + "type": "string", + "enum": [ + "always", + "never", + "fieldless" + ], + "enumDescriptions": [ + "Always show all discriminant hints.", + "Never show discriminant hints.", + "Only show discriminant hints on fieldless enum variants." + ] + }, + "AdjustmentHintsModeDef" => set! { + "type": "string", + "enum": [ + "prefix", + "postfix", + "prefer_prefix", + "prefer_postfix", + ], + "enumDescriptions": [ + "Always show adjustment hints as prefix (`*expr`).", + "Always show adjustment hints as postfix (`expr.*`).", + "Show prefix or postfix depending on which uses less parenthesis, prefering prefix.", + "Show prefix or postfix depending on which uses less parenthesis, prefering postfix.", + ] + }, "CargoFeaturesDef" => set! { "anyOf": [ { @@ -2126,8 +2216,11 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json "The command will be executed in the project root." ], }, - "CheckOnSaveTargets" => set! { + "Option" => set! { "anyOf": [ + { + "type": "null" + }, { "type": "string", }, @@ -2137,7 +2230,7 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json }, ], }, - _ => panic!("missing entry for {}: {}", ty, default), + _ => panic!("missing entry for {ty}: {default}"), } map.into() @@ -2149,30 +2242,29 @@ fn manual(fields: &[(&'static str, &'static str, &[&str], &str)]) -> String { .iter() .map(|(field, _ty, doc, default)| { let name = format!("rust-analyzer.{}", field.replace('_', ".")); - let doc = doc_comment_to_string(*doc); + let doc = doc_comment_to_string(doc); if default.contains('\n') { format!( - r#"[[{}]]{}:: + r#"[[{name}]]{name}:: + -- Default: ---- -{} +{default} ---- -{} +{doc} -- -"#, - name, name, default, doc +"# ) } else { - format!("[[{}]]{} (default: `{}`)::\n+\n--\n{}--\n", name, name, default, doc) + format!("[[{name}]]{name} (default: `{default}`)::\n+\n--\n{doc}--\n") } }) .collect::() } fn doc_comment_to_string(doc: &[&str]) -> String { - doc.iter().map(|it| it.strip_prefix(' ').unwrap_or(it)).map(|it| format!("{}\n", it)).collect() + doc.iter().map(|it| it.strip_prefix(' ').unwrap_or(it)).map(|it| format!("{it}\n")).collect() } #[cfg(test)] @@ -2186,7 +2278,7 @@ mod tests { #[test] fn generate_package_json_config() { let s = Config::json_schema(); - let schema = format!("{:#}", s); + let schema = format!("{s:#}"); let mut schema = schema .trim_start_matches('{') .trim_end_matches('}') diff --git a/crates/rust-analyzer/src/config/patch_old_style.rs b/crates/rust-analyzer/src/config/patch_old_style.rs index 472e2e0eee..de6ac946a6 100644 --- a/crates/rust-analyzer/src/config/patch_old_style.rs +++ b/crates/rust-analyzer/src/config/patch_old_style.rs @@ -4,6 +4,9 @@ use serde_json::{json, Value}; /// This function patches the json config to the new expected keys. /// That is we try to load old known config keys here and convert them to the new ones. /// See https://github.com/rust-lang/rust-analyzer/pull/12010 +/// +/// We already have an alias system for simple cases, but if we make structural changes +/// the alias infra fails down. pub(super) fn patch_json_for_outdated_configs(json: &mut Value) { let copy = json.clone(); @@ -105,9 +108,9 @@ pub(super) fn patch_json_for_outdated_configs(json: &mut Value) { merge(json, json!({ "cargo": { "features": "all" } })); } - // checkOnSave_allFeatures, checkOnSave_features -> checkOnSave_features + // checkOnSave_allFeatures, checkOnSave_features -> check_features if let Some(Value::Bool(true)) = copy.pointer("/checkOnSave/allFeatures") { - merge(json, json!({ "checkOnSave": { "features": "all" } })); + merge(json, json!({ "check": { "features": "all" } })); } // completion_addCallArgumentSnippets completion_addCallParenthesis -> completion_callable_snippets @@ -116,11 +119,21 @@ pub(super) fn patch_json_for_outdated_configs(json: &mut Value) { copy.pointer("/completion/addCallParenthesis"), ) { (Some(Value::Bool(true)), Some(Value::Bool(true))) => json!("fill_arguments"), - (Some(Value::Bool(true)), _) => json!("add_parentheses"), + (_, Some(Value::Bool(true))) => json!("add_parentheses"), (Some(Value::Bool(false)), Some(Value::Bool(false))) => json!("none"), (_, _) => return, }; merge(json, json!({ "completion": { "callable": {"snippets": res }} })); + + // We need to do this due to the checkOnSave_enable -> checkOnSave change, as that key now can either be an object or a bool + // checkOnSave_* -> check_* + if let Some(Value::Object(obj)) = copy.pointer("/checkOnSave") { + // checkOnSave_enable -> checkOnSave + if let Some(b @ Value::Bool(_)) = obj.get("enable") { + merge(json, json!({ "checkOnSave": b })); + } + merge(json, json!({ "check": obj })); + } } fn merge(dst: &mut Value, src: Value) { diff --git a/crates/rust-analyzer/src/diagnostics.rs b/crates/rust-analyzer/src/diagnostics.rs index f516c194da..83b03fe473 100644 --- a/crates/rust-analyzer/src/diagnostics.rs +++ b/crates/rust-analyzer/src/diagnostics.rs @@ -101,8 +101,7 @@ impl DiagnosticCollection { file_id: FileId, ) -> impl Iterator { let native = self.native.get(&file_id).into_iter().flatten(); - let check = - self.check.values().filter_map(move |it| it.get(&file_id)).into_iter().flatten(); + let check = self.check.values().filter_map(move |it| it.get(&file_id)).flatten(); native.chain(check) } diff --git a/crates/rust-analyzer/src/diagnostics/to_proto.rs b/crates/rust-analyzer/src/diagnostics/to_proto.rs index beb23c54c9..acb416a068 100644 --- a/crates/rust-analyzer/src/diagnostics/to_proto.rs +++ b/crates/rust-analyzer/src/diagnostics/to_proto.rs @@ -161,7 +161,7 @@ fn resolve_path( .iter() .find_map(|(from, to)| file_name.strip_prefix(from).map(|file_name| (to, file_name))) { - Some((to, file_name)) => workspace_root.join(format!("{}{}", to, file_name)), + Some((to, file_name)) => workspace_root.join(format!("{to}{file_name}")), None => workspace_root.join(file_name), } } @@ -191,6 +191,7 @@ fn map_rust_child_diagnostic( let mut edit_map: HashMap> = HashMap::new(); let mut suggested_replacements = Vec::new(); + let mut is_preferred = true; for &span in &spans { if let Some(suggested_replacement) = &span.suggested_replacement { if !suggested_replacement.is_empty() { @@ -209,6 +210,8 @@ fn map_rust_child_diagnostic( ) { edit_map.entry(location.uri).or_default().push(edit); } + is_preferred &= + matches!(span.suggestion_applicability, Some(Applicability::MachineApplicable)); } } @@ -218,7 +221,7 @@ fn map_rust_child_diagnostic( if !suggested_replacements.is_empty() { message.push_str(": "); let suggestions = - suggested_replacements.iter().map(|suggestion| format!("`{}`", suggestion)).join(", "); + suggested_replacements.iter().map(|suggestion| format!("`{suggestion}`")).join(", "); message.push_str(&suggestions); } @@ -251,7 +254,7 @@ fn map_rust_child_diagnostic( document_changes: None, change_annotations: None, }), - is_preferred: Some(true), + is_preferred: Some(is_preferred), data: None, command: None, }, @@ -493,7 +496,7 @@ fn rustc_code_description(code: Option<&str>) -> Option) -> Option) -> Option { code.and_then(|code| { lsp_types::Url::parse(&format!( - "https://rust-lang.github.io/rust-clippy/master/index.html#{}", - code + "https://rust-lang.github.io/rust-clippy/master/index.html#{code}" )) .ok() .map(|href| lsp_types::CodeDescription { href }) diff --git a/crates/rust-analyzer/src/dispatch.rs b/crates/rust-analyzer/src/dispatch.rs index 57899b5991..715804449a 100644 --- a/crates/rust-analyzer/src/dispatch.rs +++ b/crates/rust-analyzer/src/dispatch.rs @@ -145,7 +145,7 @@ impl<'a> RequestDispatcher<'a> { match res { Ok(params) => { let panic_context = - format!("\nversion: {}\nrequest: {} {:#?}", version(), R::METHOD, params); + format!("\nversion: {}\nrequest: {} {params:#?}", version(), R::METHOD); Some((req, params, panic_context)) } Err(err) => { diff --git a/crates/rust-analyzer/src/from_proto.rs b/crates/rust-analyzer/src/from_proto.rs index dd433b0f4d..2dbb14fcd9 100644 --- a/crates/rust-analyzer/src/from_proto.rs +++ b/crates/rust-analyzer/src/from_proto.rs @@ -25,12 +25,9 @@ pub(crate) fn vfs_path(url: &lsp_types::Url) -> Result { pub(crate) fn offset(line_index: &LineIndex, position: lsp_types::Position) -> Result { let line_col = match line_index.encoding { - PositionEncoding::Utf8 => { - LineCol { line: position.line as u32, col: position.character as u32 } - } + PositionEncoding::Utf8 => LineCol { line: position.line, col: position.character }, PositionEncoding::Utf16 => { - let line_col = - LineColUtf16 { line: position.line as u32, col: position.character as u32 }; + let line_col = LineColUtf16 { line: position.line, col: position.character }; line_index.index.to_utf8(line_col) } }; @@ -67,7 +64,15 @@ pub(crate) fn file_range( text_document_identifier: lsp_types::TextDocumentIdentifier, range: lsp_types::Range, ) -> Result { - let file_id = file_id(snap, &text_document_identifier.uri)?; + file_range_uri(snap, &text_document_identifier.uri, range) +} + +pub(crate) fn file_range_uri( + snap: &GlobalStateSnapshot, + document: &lsp_types::Url, + range: lsp_types::Range, +) -> Result { + let file_id = file_id(snap, document)?; let line_index = snap.file_line_index(file_id)?; let range = text_range(&line_index, range)?; Ok(FileRange { file_id, range }) diff --git a/crates/rust-analyzer/src/global_state.rs b/crates/rust-analyzer/src/global_state.rs index 4e8bc8d646..c6f4e9ce07 100644 --- a/crates/rust-analyzer/src/global_state.rs +++ b/crates/rust-analyzer/src/global_state.rs @@ -134,7 +134,7 @@ impl GlobalState { let task_pool = { let (sender, receiver) = unbounded(); - let handle = TaskPool::new(sender); + let handle = TaskPool::new_with_threads(sender, config.main_loop_num_threads()); Handle { handle, receiver } }; @@ -429,6 +429,6 @@ pub(crate) fn file_id_to_url(vfs: &vfs::Vfs, id: FileId) -> Url { pub(crate) fn url_to_file_id(vfs: &vfs::Vfs, url: &Url) -> Result { let path = from_proto::vfs_path(url)?; - let res = vfs.file_id(&path).ok_or_else(|| format!("file not found: {}", path))?; + let res = vfs.file_id(&path).ok_or_else(|| format!("file not found: {path}"))?; Ok(res) } diff --git a/crates/rust-analyzer/src/handlers.rs b/crates/rust-analyzer/src/handlers.rs index d190a9f4e2..59bdd30612 100644 --- a/crates/rust-analyzer/src/handlers.rs +++ b/crates/rust-analyzer/src/handlers.rs @@ -28,7 +28,8 @@ use lsp_types::{ use project_model::{ManifestPath, ProjectWorkspace, TargetKind}; use serde_json::json; use stdx::{format_to, never}; -use syntax::{algo, ast, AstNode, TextRange, TextSize, T}; +use syntax::{algo, ast, AstNode, TextRange, TextSize}; +use tracing::error; use vfs::AbsPathBuf; use crate::{ @@ -729,7 +730,7 @@ pub(crate) fn handle_runnables( Some(spec) => { for cmd in ["check", "test"] { res.push(lsp_ext::Runnable { - label: format!("cargo {} -p {} --all-targets", cmd, spec.package), + label: format!("cargo {cmd} -p {} --all-targets", spec.package), location: None, kind: lsp_ext::RunnableKind::Cargo, args: lsp_ext::CargoRunnable { @@ -812,18 +813,6 @@ pub(crate) fn handle_completion( let completion_trigger_character = params.context.and_then(|ctx| ctx.trigger_character).and_then(|s| s.chars().next()); - if Some(':') == completion_trigger_character { - let source_file = snap.analysis.parse(position.file_id)?; - let left_token = source_file.syntax().token_at_offset(position.offset).left_biased(); - let completion_triggered_after_single_colon = match left_token { - Some(left_token) => left_token.kind() == T![:], - None => true, - }; - if completion_triggered_after_single_colon { - return Ok(None); - } - } - let completion_config = &snap.config.completion(); let items = match snap.analysis.completions( completion_config, @@ -910,7 +899,7 @@ pub(crate) fn handle_folding_range( let line_folding_only = snap.config.line_folding_only(); let res = folds .into_iter() - .map(|it| to_proto::folding_range(&*text, &line_index, line_folding_only, it)) + .map(|it| to_proto::folding_range(&text, &line_index, line_folding_only, it)) .collect(); Ok(Some(res)) } @@ -990,7 +979,7 @@ pub(crate) fn handle_rename( let position = from_proto::file_position(&snap, params.text_document_position)?; let mut change = - snap.analysis.rename(position, &*params.new_name)?.map_err(to_proto::rename_error)?; + snap.analysis.rename(position, ¶ms.new_name)?.map_err(to_proto::rename_error)?; // this is kind of a hack to prevent double edits from happening when moving files // When a module gets renamed by renaming the mod declaration this causes the file to move @@ -1112,9 +1101,7 @@ pub(crate) fn handle_code_action( } // Fixes from `cargo check`. - for fix in - snap.check_fixes.values().filter_map(|it| it.get(&frange.file_id)).into_iter().flatten() - { + for fix in snap.check_fixes.values().filter_map(|it| it.get(&frange.file_id)).flatten() { // FIXME: this mapping is awkward and shouldn't exist. Refactor // `snap.check_fixes` to not convert to LSP prematurely. let intersect_fix_range = fix @@ -1157,8 +1144,8 @@ pub(crate) fn handle_code_action_resolve( Ok(parsed_data) => parsed_data, Err(e) => { return Err(invalid_params_error(format!( - "Failed to parse action id string '{}': {}", - params.id, e + "Failed to parse action id string '{}': {e}", + params.id )) .into()) } @@ -1202,7 +1189,7 @@ fn parse_action_id(action_id: &str) -> Result<(usize, SingleResolve), String> { let assist_kind: AssistKind = assist_kind_string.parse()?; let index: usize = match index_string.parse() { Ok(index) => index, - Err(e) => return Err(format!("Incorrect index string: {}", e)), + Err(e) => return Err(format!("Incorrect index string: {e}")), }; Ok((index, SingleResolve { assist_id: assist_id_string.to_string(), assist_kind })) } @@ -1384,9 +1371,26 @@ pub(crate) fn handle_inlay_hints_resolve( let resolve_data: lsp_ext::InlayHintResolveData = serde_json::from_value(data)?; - let file_range = from_proto::file_range( + match snap.url_file_version(&resolve_data.text_document.uri) { + Some(version) if version == resolve_data.text_document.version => {} + Some(version) => { + error!( + "attempted inlayHints/resolve of '{}' at version {} while server version is {}", + resolve_data.text_document.uri, resolve_data.text_document.version, version, + ); + return Ok(hint); + } + None => { + error!( + "attempted inlayHints/resolve of unknown file '{}' at version {}", + resolve_data.text_document.uri, resolve_data.text_document.version, + ); + return Ok(hint); + } + } + let file_range = from_proto::file_range_uri( &snap, - resolve_data.text_document, + &resolve_data.text_document.uri, match resolve_data.position { PositionOrRange::Position(pos) => Range::new(pos, pos), PositionOrRange::Range(range) => range, @@ -1782,14 +1786,15 @@ fn run_rustfmt( let file_id = from_proto::file_id(snap, &text_document.uri)?; let file = snap.analysis.file_text(file_id)?; - // find the edition of the package the file belongs to - // (if it belongs to multiple we'll just pick the first one and pray) - let edition = snap + // Determine the edition of the crate the file belongs to (if there's multiple, we pick the + // highest edition). + let editions = snap .analysis .relevant_crates_for(file_id)? .into_iter() - .find_map(|crate_id| snap.cargo_target_for_crate_root(crate_id)) - .map(|(ws, target)| ws[ws[target].package].edition); + .map(|crate_id| snap.analysis.crate_edition(crate_id)) + .collect::, _>>()?; + let edition = editions.iter().copied().max(); let line_index = snap.file_line_index(file_id)?; @@ -1863,7 +1868,7 @@ fn run_rustfmt( .stdout(Stdio::piped()) .stderr(Stdio::piped()) .spawn() - .context(format!("Failed to spawn {:?}", command))?; + .context(format!("Failed to spawn {command:?}"))?; rustfmt.stdin.as_mut().unwrap().write_all(file.as_bytes())?; @@ -1896,9 +1901,9 @@ fn run_rustfmt( format!( r#"rustfmt exited with: Status: {} - stdout: {} - stderr: {}"#, - output.status, captured_stdout, captured_stderr, + stdout: {captured_stdout} + stderr: {captured_stderr}"#, + output.status, ), ) .into()) diff --git a/crates/rust-analyzer/src/integrated_benchmarks.rs b/crates/rust-analyzer/src/integrated_benchmarks.rs index 96b1cb6b12..405d261db6 100644 --- a/crates/rust-analyzer/src/integrated_benchmarks.rs +++ b/crates/rust-analyzer/src/integrated_benchmarks.rs @@ -48,7 +48,7 @@ fn integrated_highlighting_benchmark() { let file_id = { let file = workspace_to_load.join(file); let path = VfsPath::from(AbsPathBuf::assert(file)); - vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {}", path)) + vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {path}")) }; { @@ -102,7 +102,7 @@ fn integrated_completion_benchmark() { let file_id = { let file = workspace_to_load.join(file); let path = VfsPath::from(AbsPathBuf::assert(file)); - vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {}", path)) + vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {path}")) }; { diff --git a/crates/rust-analyzer/src/lib.rs b/crates/rust-analyzer/src/lib.rs index 552379752f..32dc3750fd 100644 --- a/crates/rust-analyzer/src/lib.rs +++ b/crates/rust-analyzer/src/lib.rs @@ -55,7 +55,7 @@ pub type Result = std::result::Result; pub fn from_json(what: &'static str, json: &serde_json::Value) -> Result { let res = serde_json::from_value(json.clone()) - .map_err(|e| format!("Failed to deserialize {}: {}; {}", what, e, json))?; + .map_err(|e| format!("Failed to deserialize {what}: {e}; {json}"))?; Ok(res) } diff --git a/crates/rust-analyzer/src/lsp_ext.rs b/crates/rust-analyzer/src/lsp_ext.rs index 8cc5648f3c..65620b4209 100644 --- a/crates/rust-analyzer/src/lsp_ext.rs +++ b/crates/rust-analyzer/src/lsp_ext.rs @@ -3,11 +3,11 @@ use std::{collections::HashMap, path::PathBuf}; use lsp_types::request::Request; -use lsp_types::PositionEncodingKind; use lsp_types::{ notification::Notification, CodeActionKind, DocumentOnTypeFormattingParams, PartialResultParams, Position, Range, TextDocumentIdentifier, WorkDoneProgressParams, }; +use lsp_types::{PositionEncodingKind, VersionedTextDocumentIdentifier}; use serde::{Deserialize, Serialize}; pub enum AnalyzerStatus {} @@ -132,12 +132,31 @@ pub struct ExpandedMacro { pub enum CancelFlycheck {} -impl Request for CancelFlycheck { +impl Notification for CancelFlycheck { type Params = (); - type Result = (); const METHOD: &'static str = "rust-analyzer/cancelFlycheck"; } +pub enum RunFlycheck {} + +impl Notification for RunFlycheck { + type Params = RunFlycheckParams; + const METHOD: &'static str = "rust-analyzer/runFlycheck"; +} + +pub enum ClearFlycheck {} + +impl Notification for ClearFlycheck { + type Params = (); + const METHOD: &'static str = "rust-analyzer/clearFlycheck"; +} + +#[derive(Deserialize, Serialize, Debug)] +#[serde(rename_all = "camelCase")] +pub struct RunFlycheckParams { + pub text_document: Option, +} + pub enum MatchingBrace {} impl Request for MatchingBrace { @@ -550,7 +569,7 @@ pub struct CompletionResolveData { #[derive(Debug, Serialize, Deserialize)] pub struct InlayHintResolveData { - pub text_document: TextDocumentIdentifier, + pub text_document: VersionedTextDocumentIdentifier, pub position: PositionOrRange, } diff --git a/crates/rust-analyzer/src/lsp_utils.rs b/crates/rust-analyzer/src/lsp_utils.rs index 0971dc36f3..dcaee92857 100644 --- a/crates/rust-analyzer/src/lsp_utils.rs +++ b/crates/rust-analyzer/src/lsp_utils.rs @@ -98,7 +98,7 @@ impl GlobalState { }); let cancellable = Some(cancel_token.is_some()); let token = lsp_types::ProgressToken::String( - cancel_token.unwrap_or_else(|| format!("rustAnalyzer/{}", title)), + cancel_token.unwrap_or_else(|| format!("rustAnalyzer/{title}")), ); let work_done_progress = match state { Progress::Begin => { diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs index 274588ce0e..0bc940dfe8 100644 --- a/crates/rust-analyzer/src/main_loop.rs +++ b/crates/rust-analyzer/src/main_loop.rs @@ -229,8 +229,8 @@ impl GlobalState { message = match &report.crates_currently_indexing[..] { [crate_name] => Some(format!( - "{}/{} ({})", - report.crates_done, report.crates_total, crate_name + "{}/{} ({crate_name})", + report.crates_done, report.crates_total )), [crate_name, rest @ ..] => Some(format!( "{}/{} ({} + {} more)", @@ -414,10 +414,7 @@ impl GlobalState { let loop_duration = loop_start.elapsed(); if loop_duration > Duration::from_millis(100) && was_quiescent { tracing::warn!("overly long loop turn: {:?}", loop_duration); - self.poke_rust_analyzer_developer(format!( - "overly long loop turn: {:?}", - loop_duration - )); + self.poke_rust_analyzer_developer(format!("overly long loop turn: {loop_duration:?}")); } Ok(()) } @@ -516,7 +513,7 @@ impl GlobalState { self.report_progress( "Roots Scanned", state, - Some(format!("{}/{}", n_done, n_total)), + Some(format!("{n_done}/{n_total}")), Some(Progress::fraction(n_done, n_total)), None, ) @@ -561,10 +558,7 @@ impl GlobalState { flycheck::Progress::DidCheckCrate(target) => (Progress::Report, Some(target)), flycheck::Progress::DidCancel => (Progress::End, None), flycheck::Progress::DidFailToRestart(err) => { - self.show_and_log_error( - "cargo check failed".to_string(), - Some(err.to_string()), - ); + self.show_and_log_error("cargo check failed".to_string(), Some(err)); return; } flycheck::Progress::DidFinish(result) => { @@ -581,10 +575,7 @@ impl GlobalState { // When we're running multiple flychecks, we have to include a disambiguator in // the title, or the editor complains. Note that this is a user-facing string. let title = if self.flycheck.len() == 1 { - match self.config.flycheck() { - Some(config) => format!("{}", config), - None => "cargo check".to_string(), - } + format!("{}", self.config.flycheck()) } else { format!("cargo check (#{})", id + 1) }; @@ -593,7 +584,7 @@ impl GlobalState { state, message, None, - Some(format!("rust-analyzer/checkOnSave/{}", id)), + Some(format!("rust-analyzer/flycheck/{id}")), ); } } @@ -638,7 +629,6 @@ impl GlobalState { .on_sync_mut::(handlers::handle_workspace_reload) .on_sync_mut::(handlers::handle_memory_usage) .on_sync_mut::(handlers::handle_shuffle_crate_graph) - .on_sync_mut::(handlers::handle_cancel_flycheck) .on_sync::(handlers::handle_join_lines) .on_sync::(handlers::handle_on_enter) .on_sync::(handlers::handle_selection_range) @@ -703,6 +693,88 @@ impl GlobalState { /// Handles an incoming notification. fn on_notification(&mut self, not: Notification) -> Result<()> { + // FIXME: Move these implementations out into a module similar to on_request + fn run_flycheck(this: &mut GlobalState, vfs_path: VfsPath) -> bool { + let file_id = this.vfs.read().0.file_id(&vfs_path); + if let Some(file_id) = file_id { + let world = this.snapshot(); + let mut updated = false; + let task = move || -> std::result::Result<(), ide::Cancelled> { + // Trigger flychecks for all workspaces that depend on the saved file + // Crates containing or depending on the saved file + let crate_ids: Vec<_> = world + .analysis + .crates_for(file_id)? + .into_iter() + .flat_map(|id| world.analysis.transitive_rev_deps(id)) + .flatten() + .sorted() + .unique() + .collect(); + + let crate_root_paths: Vec<_> = crate_ids + .iter() + .filter_map(|&crate_id| { + world + .analysis + .crate_root(crate_id) + .map(|file_id| { + world + .file_id_to_file_path(file_id) + .as_path() + .map(ToOwned::to_owned) + }) + .transpose() + }) + .collect::>()?; + let crate_root_paths: Vec<_> = + crate_root_paths.iter().map(Deref::deref).collect(); + + // Find all workspaces that have at least one target containing the saved file + let workspace_ids = + world.workspaces.iter().enumerate().filter(|(_, ws)| match ws { + project_model::ProjectWorkspace::Cargo { cargo, .. } => { + cargo.packages().any(|pkg| { + cargo[pkg].targets.iter().any(|&it| { + crate_root_paths.contains(&cargo[it].root.as_path()) + }) + }) + } + project_model::ProjectWorkspace::Json { project, .. } => project + .crates() + .any(|(c, _)| crate_ids.iter().any(|&crate_id| crate_id == c)), + project_model::ProjectWorkspace::DetachedFiles { .. } => false, + }); + + // Find and trigger corresponding flychecks + for flycheck in world.flycheck.iter() { + for (id, _) in workspace_ids.clone() { + if id == flycheck.id() { + updated = true; + flycheck.restart(); + continue; + } + } + } + // No specific flycheck was triggered, so let's trigger all of them. + if !updated { + for flycheck in world.flycheck.iter() { + flycheck.restart(); + } + } + Ok(()) + }; + this.task_pool.handle.spawn_with_sender(move |_| { + if let Err(e) = std::panic::catch_unwind(task) { + tracing::error!("flycheck task panicked: {e:?}") + } + }); + true + } else { + false + } + } + NotificationDispatcher { not: Some(not), global_state: self } .on::(|this, params| { let id: lsp_server::RequestId = match params.id { @@ -714,7 +786,7 @@ impl GlobalState { })? .on::(|this, params| { if let lsp_types::NumberOrString::String(s) = ¶ms.token { - if let Some(id) = s.strip_prefix("rust-analyzer/checkOnSave/") { + if let Some(id) = s.strip_prefix("rust-analyzer/flycheck/") { if let Ok(id) = u32::from_str_radix(id, 10) { if let Some(flycheck) = this.flycheck.get(id as usize) { flycheck.cancel(); @@ -743,6 +815,7 @@ impl GlobalState { } Ok(()) })? + .on::(handlers::handle_cancel_flycheck)? .on::(|this, params| { if let Ok(path) = from_proto::vfs_path(¶ms.text_document.uri) { match this.mem_docs.get_mut(&path) { @@ -782,99 +855,42 @@ impl GlobalState { } Ok(()) })? + .on::(|this, ()| { + this.diagnostics.clear_check_all(); + Ok(()) + })? + .on::(|this, params| { + if let Some(text_document) = params.text_document { + if let Ok(vfs_path) = from_proto::vfs_path(&text_document.uri) { + if run_flycheck(this, vfs_path) { + return Ok(()); + } + } + } + // No specific flycheck was triggered, so let's trigger all of them. + for flycheck in this.flycheck.iter() { + flycheck.restart(); + } + Ok(()) + })? .on::(|this, params| { if let Ok(vfs_path) = from_proto::vfs_path(¶ms.text_document.uri) { // Re-fetch workspaces if a workspace related file has changed if let Some(abs_path) = vfs_path.as_path() { - if reload::should_refresh_for_change(&abs_path, ChangeKind::Modify) { + if reload::should_refresh_for_change(abs_path, ChangeKind::Modify) { this.fetch_workspaces_queue .request_op(format!("DidSaveTextDocument {}", abs_path.display())); } } - let file_id = this.vfs.read().0.file_id(&vfs_path); - if let Some(file_id) = file_id { - let world = this.snapshot(); - let mut updated = false; - let task = move || -> std::result::Result<(), ide::Cancelled> { - // Trigger flychecks for all workspaces that depend on the saved file - // Crates containing or depending on the saved file - let crate_ids: Vec<_> = world - .analysis - .crates_for(file_id)? - .into_iter() - .flat_map(|id| world.analysis.transitive_rev_deps(id)) - .flatten() - .sorted() - .unique() - .collect(); - - let crate_root_paths: Vec<_> = crate_ids - .iter() - .filter_map(|&crate_id| { - world - .analysis - .crate_root(crate_id) - .map(|file_id| { - world - .file_id_to_file_path(file_id) - .as_path() - .map(ToOwned::to_owned) - }) - .transpose() - }) - .collect::>()?; - let crate_root_paths: Vec<_> = - crate_root_paths.iter().map(Deref::deref).collect(); - - // Find all workspaces that have at least one target containing the saved file - let workspace_ids = - world.workspaces.iter().enumerate().filter(|(_, ws)| match ws { - project_model::ProjectWorkspace::Cargo { cargo, .. } => { - cargo.packages().any(|pkg| { - cargo[pkg].targets.iter().any(|&it| { - crate_root_paths.contains(&cargo[it].root.as_path()) - }) - }) - } - project_model::ProjectWorkspace::Json { project, .. } => { - project.crates().any(|(c, _)| { - crate_ids.iter().any(|&crate_id| crate_id == c) - }) - } - project_model::ProjectWorkspace::DetachedFiles { .. } => false, - }); - - // Find and trigger corresponding flychecks - for flycheck in world.flycheck.iter() { - for (id, _) in workspace_ids.clone() { - if id == flycheck.id() { - updated = true; - flycheck.restart(); - continue; - } - } - } - // No specific flycheck was triggered, so let's trigger all of them. - if !updated { - for flycheck in world.flycheck.iter() { - flycheck.restart(); - } - } - Ok(()) - }; - this.task_pool.handle.spawn_with_sender(move |_| { - if let Err(e) = std::panic::catch_unwind(task) { - tracing::error!("DidSaveTextDocument flycheck task panicked: {e:?}") - } - }); + if !this.config.check_on_save() || run_flycheck(this, vfs_path) { return Ok(()); } - } - - // No specific flycheck was triggered, so let's trigger all of them. - for flycheck in this.flycheck.iter() { - flycheck.restart(); + } else if this.config.check_on_save() { + // No specific flycheck was triggered, so let's trigger all of them. + for flycheck in this.flycheck.iter() { + flycheck.restart(); + } } Ok(()) })? diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs index fcfe4be0b8..9bbce70ec0 100644 --- a/crates/rust-analyzer/src/reload.rs +++ b/crates/rust-analyzer/src/reload.rs @@ -158,8 +158,10 @@ impl GlobalState { .collect::>(); if !detached_files.is_empty() { - workspaces - .push(project_model::ProjectWorkspace::load_detached_files(detached_files)); + workspaces.push(project_model::ProjectWorkspace::load_detached_files( + detached_files, + &cargo_config, + )); } tracing::info!("did fetch workspaces {:?}", workspaces); @@ -224,6 +226,7 @@ impl GlobalState { build_scripts: _, toolchain: _, + target_layout: _, } => Some((cargo, sysroot, rustc, rustc_cfg, cfg_overrides)), _ => None, }; @@ -447,15 +450,7 @@ impl GlobalState { fn reload_flycheck(&mut self) { let _p = profile::span("GlobalState::reload_flycheck"); - let config = match self.config.flycheck() { - Some(it) => it, - None => { - self.flycheck = Arc::new([]); - self.diagnostics.clear_check_all(); - return; - } - }; - + let config = self.config.flycheck(); let sender = self.flycheck_sender.clone(); let invocation_strategy = match config { FlycheckConfig::CargoCommand { .. } => flycheck::InvocationStrategy::PerWorkspace, @@ -466,7 +461,7 @@ impl GlobalState { flycheck::InvocationStrategy::Once => vec![FlycheckHandle::spawn( 0, Box::new(move |msg| sender.send(msg).unwrap()), - config.clone(), + config, self.config.root_path().clone(), )], flycheck::InvocationStrategy::PerWorkspace => { diff --git a/crates/rust-analyzer/src/semantic_tokens.rs b/crates/rust-analyzer/src/semantic_tokens.rs index c48410ed55..c2cc3f422d 100644 --- a/crates/rust-analyzer/src/semantic_tokens.rs +++ b/crates/rust-analyzer/src/semantic_tokens.rs @@ -161,8 +161,8 @@ impl SemanticTokensBuilder { /// Push a new token onto the builder pub(crate) fn push(&mut self, range: Range, token_index: u32, modifier_bitset: u32) { - let mut push_line = range.start.line as u32; - let mut push_char = range.start.character as u32; + let mut push_line = range.start.line; + let mut push_char = range.start.character; if !self.data.is_empty() { push_line -= self.prev_line; @@ -177,15 +177,15 @@ impl SemanticTokensBuilder { let token = SemanticToken { delta_line: push_line, delta_start: push_char, - length: token_len as u32, + length: token_len, token_type: token_index, token_modifiers_bitset: modifier_bitset, }; self.data.push(token); - self.prev_line = range.start.line as u32; - self.prev_char = range.start.character as u32; + self.prev_line = range.start.line; + self.prev_char = range.start.character; } pub(crate) fn build(self) -> SemanticTokens { diff --git a/crates/rust-analyzer/src/task_pool.rs b/crates/rust-analyzer/src/task_pool.rs index aeeb3b7c58..616e449984 100644 --- a/crates/rust-analyzer/src/task_pool.rs +++ b/crates/rust-analyzer/src/task_pool.rs @@ -8,12 +8,13 @@ pub(crate) struct TaskPool { } impl TaskPool { - pub(crate) fn new(sender: Sender) -> TaskPool { + pub(crate) fn new_with_threads(sender: Sender, threads: usize) -> TaskPool { const STACK_SIZE: usize = 8 * 1024 * 1024; let inner = threadpool::Builder::new() .thread_name("Worker".into()) .thread_stack_size(STACK_SIZE) + .num_threads(threads) .build(); TaskPool { sender, inner } } diff --git a/crates/rust-analyzer/src/to_proto.rs b/crates/rust-analyzer/src/to_proto.rs index 81cc1952ba..e736b2ff9a 100644 --- a/crates/rust-analyzer/src/to_proto.rs +++ b/crates/rust-analyzer/src/to_proto.rs @@ -228,7 +228,7 @@ fn completion_item( max_relevance: u32, item: CompletionItem, ) { - let insert_replace_support = config.insert_replace_support().then(|| tdpp.position); + let insert_replace_support = config.insert_replace_support().then_some(tdpp.position); let mut additional_text_edits = Vec::new(); // LSP does not allow arbitrary edits in completion, so we have to do a @@ -258,7 +258,7 @@ fn completion_item( text_edit.unwrap() }; - let insert_text_format = item.is_snippet().then(|| lsp_types::InsertTextFormat::SNIPPET); + let insert_text_format = item.is_snippet().then_some(lsp_types::InsertTextFormat::SNIPPET); let tags = item.deprecated().then(|| vec![lsp_types::CompletionItemTag::DEPRECATED]); let command = if item.trigger_call_info() && config.client_commands().trigger_parameter_hints { Some(command::trigger_parameter_hints()) @@ -342,7 +342,7 @@ fn completion_item( // by the client. Hex format is used because it is easier to // visually compare very large values, which the sort text // tends to be since it is the opposite of the score. - res.sort_text = Some(format!("{:08x}", sort_score)); + res.sort_text = Some(format!("{sort_score:08x}")); } } @@ -434,42 +434,52 @@ pub(crate) fn inlay_hint( InlayKind::ParameterHint if render_colons => inlay_hint.label.append_str(":"), InlayKind::TypeHint if render_colons => inlay_hint.label.prepend_str(": "), InlayKind::ClosureReturnTypeHint => inlay_hint.label.prepend_str(" -> "), + InlayKind::DiscriminantHint => inlay_hint.label.prepend_str(" = "), _ => {} } Ok(lsp_types::InlayHint { position: match inlay_hint.kind { // before annotated thing - InlayKind::ParameterHint | InlayKind::AdjustmentHint | InlayKind::BindingModeHint => { - position(line_index, inlay_hint.range.start()) - } + InlayKind::OpeningParenthesis + | InlayKind::ParameterHint + | InlayKind::AdjustmentHint + | InlayKind::BindingModeHint => position(line_index, inlay_hint.range.start()), // after annotated thing InlayKind::ClosureReturnTypeHint | InlayKind::TypeHint + | InlayKind::DiscriminantHint | InlayKind::ChainingHint | InlayKind::GenericParamListHint - | InlayKind::AdjustmentHintClosingParenthesis + | InlayKind::ClosingParenthesis + | InlayKind::AdjustmentHintPostfix | InlayKind::LifetimeHint | InlayKind::ClosingBraceHint => position(line_index, inlay_hint.range.end()), }, padding_left: Some(match inlay_hint.kind { InlayKind::TypeHint => !render_colons, InlayKind::ChainingHint | InlayKind::ClosingBraceHint => true, - InlayKind::AdjustmentHintClosingParenthesis + InlayKind::ClosingParenthesis + | InlayKind::DiscriminantHint + | InlayKind::OpeningParenthesis | InlayKind::BindingModeHint | InlayKind::ClosureReturnTypeHint | InlayKind::GenericParamListHint | InlayKind::AdjustmentHint + | InlayKind::AdjustmentHintPostfix | InlayKind::LifetimeHint | InlayKind::ParameterHint => false, }), padding_right: Some(match inlay_hint.kind { - InlayKind::AdjustmentHintClosingParenthesis + InlayKind::ClosingParenthesis + | InlayKind::OpeningParenthesis | InlayKind::ChainingHint | InlayKind::ClosureReturnTypeHint | InlayKind::GenericParamListHint | InlayKind::AdjustmentHint + | InlayKind::AdjustmentHintPostfix | InlayKind::TypeHint + | InlayKind::DiscriminantHint | InlayKind::ClosingBraceHint => false, InlayKind::BindingModeHint => inlay_hint.label.as_simple_str() != Some("&"), InlayKind::ParameterHint | InlayKind::LifetimeHint => true, @@ -479,11 +489,14 @@ pub(crate) fn inlay_hint( InlayKind::ClosureReturnTypeHint | InlayKind::TypeHint | InlayKind::ChainingHint => { Some(lsp_types::InlayHintKind::TYPE) } - InlayKind::AdjustmentHintClosingParenthesis + InlayKind::ClosingParenthesis + | InlayKind::DiscriminantHint + | InlayKind::OpeningParenthesis | InlayKind::BindingModeHint | InlayKind::GenericParamListHint | InlayKind::LifetimeHint | InlayKind::AdjustmentHint + | InlayKind::AdjustmentHintPostfix | InlayKind::ClosingBraceHint => None, }, text_edits: None, @@ -492,7 +505,10 @@ pub(crate) fn inlay_hint( let uri = url(snap, file_id); let line_index = snap.file_line_index(file_id).ok()?; - let text_document = lsp_types::TextDocumentIdentifier { uri }; + let text_document = lsp_types::VersionedTextDocumentIdentifier { + version: snap.url_file_version(&uri)?, + uri, + }; to_value(lsp_ext::InlayHintResolveData { text_document, position: lsp_ext::PositionOrRange::Position(position(&line_index, offset)), @@ -501,7 +517,10 @@ pub(crate) fn inlay_hint( } Some(ide::InlayTooltip::HoverRanged(file_id, text_range)) => { let uri = url(snap, file_id); - let text_document = lsp_types::TextDocumentIdentifier { uri }; + let text_document = lsp_types::VersionedTextDocumentIdentifier { + version: snap.url_file_version(&uri)?, + uri, + }; let line_index = snap.file_line_index(file_id).ok()?; to_value(lsp_ext::InlayHintResolveData { text_document, @@ -1103,7 +1122,7 @@ pub(crate) fn code_action( (Some(it), _) => res.edit = Some(snippet_workspace_edit(snap, it)?), (None, Some((index, code_action_params))) => { res.data = Some(lsp_ext::CodeActionData { - id: format!("{}:{}:{}", assist.id.0, assist.id.1.name(), index), + id: format!("{}:{}:{index}", assist.id.0, assist.id.1.name()), code_action_params, }); } @@ -1164,7 +1183,10 @@ pub(crate) fn code_lens( let r = runnable(snap, run)?; let lens_config = snap.config.lens(); - if lens_config.run && client_commands_config.run_single { + if lens_config.run + && client_commands_config.run_single + && r.args.workspace_root.is_some() + { let command = command::run_single(&r, &title); acc.push(lsp_types::CodeLens { range: annotation_range, @@ -1339,7 +1361,7 @@ pub(crate) fn implementation_title(count: usize) -> String { if count == 1 { "1 implementation".into() } else { - format!("{} implementations", count) + format!("{count} implementations") } } @@ -1347,7 +1369,7 @@ pub(crate) fn reference_title(count: usize) -> String { if count == 1 { "1 reference".into() } else { - format!("{} references", count) + format!("{count} references") } } diff --git a/crates/rust-analyzer/tests/slow-tests/main.rs b/crates/rust-analyzer/tests/slow-tests/main.rs index fa55f7d90c..5e3e19d44d 100644 --- a/crates/rust-analyzer/tests/slow-tests/main.rs +++ b/crates/rust-analyzer/tests/slow-tests/main.rs @@ -263,7 +263,7 @@ mod tests { for runnable in ["consumer", "dependency", "devdependency"] { server.request::( RunnablesParams { - text_document: server.doc_id(&format!("{}/src/lib.rs", runnable)), + text_document: server.doc_id(&format!("{runnable}/src/lib.rs")), position: None, }, json!([ @@ -528,14 +528,13 @@ fn test_missing_module_code_action_in_json_project() { let code = format!( r#" //- /rust-project.json -{PROJECT} +{project} //- /src/lib.rs mod bar; fn main() {{}} "#, - PROJECT = project, ); let server = @@ -595,8 +594,8 @@ fn diagnostics_dont_block_typing() { return; } - let librs: String = (0..10).map(|i| format!("mod m{};", i)).collect(); - let libs: String = (0..10).map(|i| format!("//- /src/m{}.rs\nfn foo() {{}}\n\n", i)).collect(); + let librs: String = (0..10).map(|i| format!("mod m{i};")).collect(); + let libs: String = (0..10).map(|i| format!("//- /src/m{i}.rs\nfn foo() {{}}\n\n")).collect(); let server = Project::with_fixture(&format!( r#" //- /Cargo.toml @@ -605,13 +604,12 @@ name = "foo" version = "0.0.0" //- /src/lib.rs -{} +{librs} -{} +{libs} fn main() {{}} -"#, - librs, libs +"# )) .with_config(serde_json::json!({ "cargo": { "sysroot": "discover" } @@ -622,7 +620,7 @@ fn main() {{}} for i in 0..10 { server.notification::(DidOpenTextDocumentParams { text_document: TextDocumentItem { - uri: server.doc_id(&format!("src/m{}.rs", i)).uri, + uri: server.doc_id(&format!("src/m{i}.rs")).uri, language_id: "rust".to_string(), version: 0, text: "/// Docs\nfn foo() {}".to_string(), @@ -645,7 +643,7 @@ fn main() {{}} }]), ); let elapsed = start.elapsed(); - assert!(elapsed.as_millis() < 2000, "typing enter took {:?}", elapsed); + assert!(elapsed.as_millis() < 2000, "typing enter took {elapsed:?}"); } #[test] @@ -942,7 +940,7 @@ fn test_will_rename_files_same_level() { let tmp_dir = TestDir::new(); let tmp_dir_path = tmp_dir.path().to_owned(); let tmp_dir_str = tmp_dir_path.to_str().unwrap(); - let base_path = PathBuf::from(format!("file://{}", tmp_dir_str)); + let base_path = PathBuf::from(format!("file://{tmp_dir_str}")); let code = r#" //- /Cargo.toml diff --git a/crates/rust-analyzer/tests/slow-tests/sourcegen.rs b/crates/rust-analyzer/tests/slow-tests/sourcegen.rs index e6ac018a05..2eafb0da69 100644 --- a/crates/rust-analyzer/tests/slow-tests/sourcegen.rs +++ b/crates/rust-analyzer/tests/slow-tests/sourcegen.rs @@ -14,7 +14,7 @@ fn sourcegen_feature_docs() { contents.trim() ); let dst = sourcegen::project_root().join("docs/user/generated_features.adoc"); - fs::write(&dst, &contents).unwrap(); + fs::write(dst, contents).unwrap(); } #[derive(Debug)] @@ -42,7 +42,7 @@ impl Feature { for block in comment_blocks { let id = block.id; if let Err(msg) = is_valid_feature_name(&id) { - panic!("invalid feature name: {:?}:\n {}", id, msg) + panic!("invalid feature name: {id:?}:\n {msg}") } let doc = block.contents.join("\n"); let location = sourcegen::Location { file: path.clone(), line: block.line }; @@ -63,11 +63,11 @@ fn is_valid_feature_name(feature: &str) -> Result<(), String> { } for short in ["To", "And"] { if word == short { - return Err(format!("Don't capitalize {:?}", word)); + return Err(format!("Don't capitalize {word:?}")); } } if !word.starts_with(char::is_uppercase) { - return Err(format!("Capitalize {:?}", word)); + return Err(format!("Capitalize {word:?}")); } } Ok(()) diff --git a/crates/rust-analyzer/tests/slow-tests/support.rs b/crates/rust-analyzer/tests/slow-tests/support.rs index 7257445dab..269212ebb9 100644 --- a/crates/rust-analyzer/tests/slow-tests/support.rs +++ b/crates/rust-analyzer/tests/slow-tests/support.rs @@ -216,7 +216,7 @@ impl Server { fn send_request_(&self, r: Request) -> Value { let id = r.id.clone(); self.client.sender.send(r.clone().into()).unwrap(); - while let Some(msg) = self.recv().unwrap_or_else(|Timeout| panic!("timeout: {:?}", r)) { + while let Some(msg) = self.recv().unwrap_or_else(|Timeout| panic!("timeout: {r:?}")) { match msg { Message::Request(req) => { if req.method == "client/registerCapability" { @@ -228,19 +228,19 @@ impl Server { continue; } } - panic!("unexpected request: {:?}", req) + panic!("unexpected request: {req:?}") } Message::Notification(_) => (), Message::Response(res) => { assert_eq!(res.id, id); if let Some(err) = res.error { - panic!("error response: {:#?}", err); + panic!("error response: {err:#?}"); } return res.result.unwrap(); } } } - panic!("no response for {:?}", r); + panic!("no response for {r:?}"); } pub(crate) fn wait_until_workspace_is_loaded(self) -> Server { self.wait_for_message_cond(1, &|msg: &Message| match msg { diff --git a/crates/rust-analyzer/tests/slow-tests/testdir.rs b/crates/rust-analyzer/tests/slow-tests/testdir.rs index 3bec23a911..f7fceb5888 100644 --- a/crates/rust-analyzer/tests/slow-tests/testdir.rs +++ b/crates/rust-analyzer/tests/slow-tests/testdir.rs @@ -28,7 +28,7 @@ impl TestDir { static CNT: AtomicUsize = AtomicUsize::new(0); for _ in 0..100 { let cnt = CNT.fetch_add(1, Ordering::Relaxed); - let path = base.join(format!("{}_{}", pid, cnt)); + let path = base.join(format!("{pid}_{cnt}")); if path.is_dir() { continue; } @@ -53,7 +53,7 @@ impl Drop for TestDir { return; } remove_dir_all(&self.path).unwrap_or_else(|err| { - panic!("failed to remove temporary directory {}: {}", self.path.display(), err) + panic!("failed to remove temporary directory {}: {err}", self.path.display()) }) } } diff --git a/crates/rust-analyzer/tests/slow-tests/tidy.rs b/crates/rust-analyzer/tests/slow-tests/tidy.rs index 24e68eca67..35b5af7319 100644 --- a/crates/rust-analyzer/tests/slow-tests/tidy.rs +++ b/crates/rust-analyzer/tests/slow-tests/tidy.rs @@ -56,12 +56,11 @@ fn check_lsp_extensions_docs() { " lsp_ext.rs was changed without touching lsp-extensions.md. -Expected hash: {:x} -Actual hash: {:x} +Expected hash: {expected_hash:x} +Actual hash: {actual_hash:x} Please adjust docs/dev/lsp-extensions.md. -", - expected_hash, actual_hash +" ) } } @@ -194,6 +193,7 @@ MIT OR Apache-2.0 MIT OR Apache-2.0 OR Zlib MIT OR Zlib OR Apache-2.0 MIT/Apache-2.0 +Unlicense OR MIT Unlicense/MIT Zlib OR Apache-2.0 OR MIT " @@ -216,18 +216,18 @@ Zlib OR Apache-2.0 OR MIT diff.push_str("New Licenses:\n"); for &l in licenses.iter() { if !expected.contains(&l) { - diff += &format!(" {}\n", l) + diff += &format!(" {l}\n") } } diff.push_str("\nMissing Licenses:\n"); for &l in expected.iter() { if !licenses.contains(&l) { - diff += &format!(" {}\n", l) + diff += &format!(" {l}\n") } } - panic!("different set of licenses!\n{}", diff); + panic!("different set of licenses!\n{diff}"); } assert_eq!(licenses, expected); } @@ -316,7 +316,7 @@ fn check_test_attrs(path: &Path, text: &str) { "ide-assists/src/tests/generated.rs", ]; if text.contains("#[ignore") && !need_ignore.iter().any(|p| path.ends_with(p)) { - panic!("\ndon't `#[ignore]` tests, see:\n\n {}\n\n {}\n", ignore_rule, path.display(),) + panic!("\ndon't `#[ignore]` tests, see:\n\n {ignore_rule}\n\n {}\n", path.display(),) } let panic_rule = @@ -438,7 +438,7 @@ impl TidyMarks { self.hits.symmetric_difference(&self.checks).map(|it| it.as_str()).collect(); if !diff.is_empty() { - panic!("unpaired marks: {:?}", diff) + panic!("unpaired marks: {diff:?}") } } } diff --git a/crates/sourcegen/src/lib.rs b/crates/sourcegen/src/lib.rs index 4e0ee63f32..72d26635c3 100644 --- a/crates/sourcegen/src/lib.rs +++ b/crates/sourcegen/src/lib.rs @@ -57,7 +57,7 @@ impl CommentBlock { pub fn extract(tag: &str, text: &str) -> Vec { assert!(tag.starts_with(char::is_uppercase)); - let tag = format!("{}:", tag); + let tag = format!("{tag}:"); // Would be nice if we had `.retain_mut` here! CommentBlock::extract_untagged(text) .into_iter() @@ -65,10 +65,7 @@ impl CommentBlock { let first = block.contents.remove(0); first.strip_prefix(&tag).map(|id| { if block.is_doc { - panic!( - "Use plain (non-doc) comments with tags like {}:\n {}", - tag, first - ); + panic!("Use plain (non-doc) comments with tags like {tag}:\n {first}"); } block.id = id.trim().to_string(); @@ -122,7 +119,7 @@ pub struct Location { impl fmt::Display for Location { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let path = self.file.strip_prefix(&project_root()).unwrap().display().to_string(); + let path = self.file.strip_prefix(project_root()).unwrap().display().to_string(); let path = path.replace('\\', "/"); let name = self.file.file_name().unwrap(); write!( @@ -163,7 +160,7 @@ pub fn reformat(text: String) -> String { } pub fn add_preamble(generator: &'static str, mut text: String) -> String { - let preamble = format!("//! Generated by `{}`, do not edit by hand.\n\n", generator); + let preamble = format!("//! Generated by `{generator}`, do not edit by hand.\n\n"); text.insert_str(0, &preamble); text } @@ -178,7 +175,7 @@ pub fn ensure_file_contents(file: &Path, contents: &str) { } } - let display_path = file.strip_prefix(&project_root()).unwrap_or(file); + let display_path = file.strip_prefix(project_root()).unwrap_or(file); eprintln!( "\n\x1b[31;1merror\x1b[0m: {} was not up-to-date, updating\n", display_path.display() diff --git a/crates/stdx/src/hash.rs b/crates/stdx/src/hash.rs index 9909d71bdf..0c21d2674b 100644 --- a/crates/stdx/src/hash.rs +++ b/crates/stdx/src/hash.rs @@ -51,7 +51,7 @@ impl Hasher for NoHashHasher { } fn write_u64(&mut self, i: u64) { - self.0 = i as u64; + self.0 = i; } fn write_usize(&mut self, i: usize) { diff --git a/crates/stdx/src/panic_context.rs b/crates/stdx/src/panic_context.rs index f8fafc5a67..c3e8813b0e 100644 --- a/crates/stdx/src/panic_context.rs +++ b/crates/stdx/src/panic_context.rs @@ -25,7 +25,7 @@ impl PanicContext { if !ctx.is_empty() { eprintln!("Panic context:"); for frame in ctx.iter() { - eprintln!("> {}\n", frame); + eprintln!("> {frame}\n"); } } default_hook(panic_info); @@ -45,5 +45,5 @@ fn with_ctx(f: impl FnOnce(&mut Vec)) { thread_local! { static CTX: RefCell> = RefCell::new(Vec::new()); } - CTX.with(|ctx| f(&mut *ctx.borrow_mut())); + CTX.with(|ctx| f(&mut ctx.borrow_mut())); } diff --git a/crates/syntax/rust.ungram b/crates/syntax/rust.ungram index 0a0cb0290d..2c67586a39 100644 --- a/crates/syntax/rust.ungram +++ b/crates/syntax/rust.ungram @@ -359,6 +359,7 @@ Expr = | TupleExpr | WhileExpr | YieldExpr +| YeetExpr | LetExpr | UnderscoreExpr @@ -503,6 +504,9 @@ ReturnExpr = YieldExpr = Attr* 'yield' Expr? +YeetExpr = + Attr* 'do' 'yeet' Expr? + LetExpr = Attr* 'let' Pat '=' Expr diff --git a/crates/syntax/src/algo.rs b/crates/syntax/src/algo.rs index 8b14789dd9..c402a7bcea 100644 --- a/crates/syntax/src/algo.rs +++ b/crates/syntax/src/algo.rs @@ -616,7 +616,7 @@ fn main() { let fmt_syntax = |syn: &SyntaxElement| match syn.kind() { SyntaxKind::WHITESPACE => format!("{:?}", syn.to_string()), - _ => format!("{}", syn), + _ => format!("{syn}"), }; let insertions = @@ -637,7 +637,7 @@ fn main() { .iter() .sorted_by_key(|(syntax, _)| syntax.text_range().start()) .format_with("\n", |(k, v), f| { - f(&format!("Line {}: {:?} -> {}", line_number(k), k, fmt_syntax(v))) + f(&format!("Line {}: {k:?} -> {}", line_number(k), fmt_syntax(v))) }); let deletions = diff @@ -646,8 +646,7 @@ fn main() { .format_with("\n", |v, f| f(&format!("Line {}: {}", line_number(v), &fmt_syntax(v)))); let actual = format!( - "insertions:\n\n{}\n\nreplacements:\n\n{}\n\ndeletions:\n\n{}\n", - insertions, replacements, deletions + "insertions:\n\n{insertions}\n\nreplacements:\n\n{replacements}\n\ndeletions:\n\n{deletions}\n" ); expected_diff.assert_eq(&actual); diff --git a/crates/syntax/src/ast.rs b/crates/syntax/src/ast.rs index 4aa64d0d6e..10c0457583 100644 --- a/crates/syntax/src/ast.rs +++ b/crates/syntax/src/ast.rs @@ -9,6 +9,7 @@ mod operators; pub mod edit; pub mod edit_in_place; pub mod make; +pub mod prec; use std::marker::PhantomData; diff --git a/crates/syntax/src/ast/edit.rs b/crates/syntax/src/ast/edit.rs index 15805dfc86..5bc6b780e4 100644 --- a/crates/syntax/src/ast/edit.rs +++ b/crates/syntax/src/ast/edit.rs @@ -87,7 +87,7 @@ impl IndentLevel { for token in tokens { if let Some(ws) = ast::Whitespace::cast(token) { if ws.text().contains('\n') { - let new_ws = make::tokens::whitespace(&format!("{}{}", ws.syntax(), self)); + let new_ws = make::tokens::whitespace(&format!("{}{self}", ws.syntax())); ted::replace(ws.syntax(), &new_ws); } } @@ -103,7 +103,7 @@ impl IndentLevel { if let Some(ws) = ast::Whitespace::cast(token) { if ws.text().contains('\n') { let new_ws = make::tokens::whitespace( - &ws.syntax().text().replace(&format!("\n{}", self), "\n"), + &ws.syntax().text().replace(&format!("\n{self}"), "\n"), ); ted::replace(ws.syntax(), &new_ws); } diff --git a/crates/syntax/src/ast/edit_in_place.rs b/crates/syntax/src/ast/edit_in_place.rs index 660c057e99..a493c92e7d 100644 --- a/crates/syntax/src/ast/edit_in_place.rs +++ b/crates/syntax/src/ast/edit_in_place.rs @@ -481,7 +481,7 @@ impl ast::AssocItemList { }, }; let elements: Vec> = vec![ - make::tokens::whitespace(&format!("{}{}", whitespace, indent)).into(), + make::tokens::whitespace(&format!("{whitespace}{indent}")).into(), item.syntax().clone().into(), ]; ted::insert_all(position, elements); @@ -537,7 +537,7 @@ impl ast::MatchArmList { }, }; let indent = IndentLevel::from_node(self.syntax()) + 1; - elements.push(make::tokens::whitespace(&format!("\n{}", indent)).into()); + elements.push(make::tokens::whitespace(&format!("\n{indent}")).into()); elements.push(arm.syntax().clone().into()); if needs_comma(&arm) { ted::append_child(arm.syntax(), make::token(SyntaxKind::COMMA)); @@ -555,7 +555,7 @@ impl ast::RecordExprFieldList { let is_multiline = self.syntax().text().contains_char('\n'); let whitespace = if is_multiline { let indent = IndentLevel::from_node(self.syntax()) + 1; - make::tokens::whitespace(&format!("\n{}", indent)) + make::tokens::whitespace(&format!("\n{indent}")) } else { make::tokens::single_space() }; @@ -616,7 +616,7 @@ impl ast::RecordPatFieldList { let is_multiline = self.syntax().text().contains_char('\n'); let whitespace = if is_multiline { let indent = IndentLevel::from_node(self.syntax()) + 1; - make::tokens::whitespace(&format!("\n{}", indent)) + make::tokens::whitespace(&format!("\n{indent}")) } else { make::tokens::single_space() }; @@ -681,7 +681,7 @@ impl ast::VariantList { }, }; let elements: Vec> = vec![ - make::tokens::whitespace(&format!("{}{}", "\n", indent)).into(), + make::tokens::whitespace(&format!("{}{indent}", "\n")).into(), variant.syntax().clone().into(), ast::make::token(T![,]).into(), ]; @@ -704,11 +704,11 @@ fn normalize_ws_between_braces(node: &SyntaxNode) -> Option<()> { match l.next_sibling_or_token() { Some(ws) if ws.kind() == SyntaxKind::WHITESPACE => { if ws.next_sibling_or_token()?.into_token()? == r { - ted::replace(ws, make::tokens::whitespace(&format!("\n{}", indent))); + ted::replace(ws, make::tokens::whitespace(&format!("\n{indent}"))); } } Some(ws) if ws.kind() == T!['}'] => { - ted::insert(Position::after(l), make::tokens::whitespace(&format!("\n{}", indent))); + ted::insert(Position::after(l), make::tokens::whitespace(&format!("\n{indent}"))); } _ => (), } @@ -888,6 +888,6 @@ enum Foo { let enum_ = ast_mut_from_text::(before); enum_.variant_list().map(|it| it.add_variant(variant)); let after = enum_.to_string(); - assert_eq_text!(&trim_indent(expected.trim()), &trim_indent(&after.trim())); + assert_eq_text!(&trim_indent(expected.trim()), &trim_indent(after.trim())); } } diff --git a/crates/syntax/src/ast/generated/nodes.rs b/crates/syntax/src/ast/generated/nodes.rs index 2ea715f47f..a214a5e446 100644 --- a/crates/syntax/src/ast/generated/nodes.rs +++ b/crates/syntax/src/ast/generated/nodes.rs @@ -1063,6 +1063,17 @@ impl YieldExpr { pub fn expr(&self) -> Option { support::child(&self.syntax) } } +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct YeetExpr { + pub(crate) syntax: SyntaxNode, +} +impl ast::HasAttrs for YeetExpr {} +impl YeetExpr { + pub fn do_token(&self) -> Option { support::token(&self.syntax, T![do]) } + pub fn yeet_token(&self) -> Option { support::token(&self.syntax, T![yeet]) } + pub fn expr(&self) -> Option { support::child(&self.syntax) } +} + #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct LetExpr { pub(crate) syntax: SyntaxNode, @@ -1541,6 +1552,7 @@ pub enum Expr { TupleExpr(TupleExpr), WhileExpr(WhileExpr), YieldExpr(YieldExpr), + YeetExpr(YeetExpr), LetExpr(LetExpr), UnderscoreExpr(UnderscoreExpr), } @@ -2694,6 +2706,17 @@ impl AstNode for YieldExpr { } fn syntax(&self) -> &SyntaxNode { &self.syntax } } +impl AstNode for YeetExpr { + fn can_cast(kind: SyntaxKind) -> bool { kind == YEET_EXPR } + fn cast(syntax: SyntaxNode) -> Option { + if Self::can_cast(syntax.kind()) { + Some(Self { syntax }) + } else { + None + } + } + fn syntax(&self) -> &SyntaxNode { &self.syntax } +} impl AstNode for LetExpr { fn can_cast(kind: SyntaxKind) -> bool { kind == LET_EXPR } fn cast(syntax: SyntaxNode) -> Option { @@ -3382,6 +3405,9 @@ impl From for Expr { impl From for Expr { fn from(node: YieldExpr) -> Expr { Expr::YieldExpr(node) } } +impl From for Expr { + fn from(node: YeetExpr) -> Expr { Expr::YeetExpr(node) } +} impl From for Expr { fn from(node: LetExpr) -> Expr { Expr::LetExpr(node) } } @@ -3422,6 +3448,7 @@ impl AstNode for Expr { | TUPLE_EXPR | WHILE_EXPR | YIELD_EXPR + | YEET_EXPR | LET_EXPR | UNDERSCORE_EXPR ) @@ -3458,6 +3485,7 @@ impl AstNode for Expr { TUPLE_EXPR => Expr::TupleExpr(TupleExpr { syntax }), WHILE_EXPR => Expr::WhileExpr(WhileExpr { syntax }), YIELD_EXPR => Expr::YieldExpr(YieldExpr { syntax }), + YEET_EXPR => Expr::YeetExpr(YeetExpr { syntax }), LET_EXPR => Expr::LetExpr(LetExpr { syntax }), UNDERSCORE_EXPR => Expr::UnderscoreExpr(UnderscoreExpr { syntax }), _ => return None, @@ -3496,6 +3524,7 @@ impl AstNode for Expr { Expr::TupleExpr(it) => &it.syntax, Expr::WhileExpr(it) => &it.syntax, Expr::YieldExpr(it) => &it.syntax, + Expr::YeetExpr(it) => &it.syntax, Expr::LetExpr(it) => &it.syntax, Expr::UnderscoreExpr(it) => &it.syntax, } @@ -3892,7 +3921,7 @@ impl AnyHasArgList { impl AstNode for AnyHasArgList { fn can_cast(kind: SyntaxKind) -> bool { matches!(kind, CALL_EXPR | METHOD_CALL_EXPR) } fn cast(syntax: SyntaxNode) -> Option { - Self::can_cast(syntax.kind()).then(|| AnyHasArgList { syntax }) + Self::can_cast(syntax.kind()).then_some(AnyHasArgList { syntax }) } fn syntax(&self) -> &SyntaxNode { &self.syntax } } @@ -3963,6 +3992,7 @@ impl AstNode for AnyHasAttrs { | TUPLE_EXPR | WHILE_EXPR | YIELD_EXPR + | YEET_EXPR | LET_EXPR | UNDERSCORE_EXPR | STMT_LIST @@ -3976,7 +4006,7 @@ impl AstNode for AnyHasAttrs { ) } fn cast(syntax: SyntaxNode) -> Option { - Self::can_cast(syntax.kind()).then(|| AnyHasAttrs { syntax }) + Self::can_cast(syntax.kind()).then_some(AnyHasAttrs { syntax }) } fn syntax(&self) -> &SyntaxNode { &self.syntax } } @@ -4013,7 +4043,7 @@ impl AstNode for AnyHasDocComments { ) } fn cast(syntax: SyntaxNode) -> Option { - Self::can_cast(syntax.kind()).then(|| AnyHasDocComments { syntax }) + Self::can_cast(syntax.kind()).then_some(AnyHasDocComments { syntax }) } fn syntax(&self) -> &SyntaxNode { &self.syntax } } @@ -4028,7 +4058,7 @@ impl AstNode for AnyHasGenericParams { matches!(kind, ENUM | FN | IMPL | STRUCT | TRAIT | TYPE_ALIAS | UNION) } fn cast(syntax: SyntaxNode) -> Option { - Self::can_cast(syntax.kind()).then(|| AnyHasGenericParams { syntax }) + Self::can_cast(syntax.kind()).then_some(AnyHasGenericParams { syntax }) } fn syntax(&self) -> &SyntaxNode { &self.syntax } } @@ -4041,7 +4071,7 @@ impl AnyHasLoopBody { impl AstNode for AnyHasLoopBody { fn can_cast(kind: SyntaxKind) -> bool { matches!(kind, FOR_EXPR | LOOP_EXPR | WHILE_EXPR) } fn cast(syntax: SyntaxNode) -> Option { - Self::can_cast(syntax.kind()).then(|| AnyHasLoopBody { syntax }) + Self::can_cast(syntax.kind()).then_some(AnyHasLoopBody { syntax }) } fn syntax(&self) -> &SyntaxNode { &self.syntax } } @@ -4054,7 +4084,7 @@ impl AnyHasModuleItem { impl AstNode for AnyHasModuleItem { fn can_cast(kind: SyntaxKind) -> bool { matches!(kind, MACRO_ITEMS | SOURCE_FILE | ITEM_LIST) } fn cast(syntax: SyntaxNode) -> Option { - Self::can_cast(syntax.kind()).then(|| AnyHasModuleItem { syntax }) + Self::can_cast(syntax.kind()).then_some(AnyHasModuleItem { syntax }) } fn syntax(&self) -> &SyntaxNode { &self.syntax } } @@ -4089,7 +4119,7 @@ impl AstNode for AnyHasName { ) } fn cast(syntax: SyntaxNode) -> Option { - Self::can_cast(syntax.kind()).then(|| AnyHasName { syntax }) + Self::can_cast(syntax.kind()).then_some(AnyHasName { syntax }) } fn syntax(&self) -> &SyntaxNode { &self.syntax } } @@ -4107,7 +4137,7 @@ impl AstNode for AnyHasTypeBounds { ) } fn cast(syntax: SyntaxNode) -> Option { - Self::can_cast(syntax.kind()).then(|| AnyHasTypeBounds { syntax }) + Self::can_cast(syntax.kind()).then_some(AnyHasTypeBounds { syntax }) } fn syntax(&self) -> &SyntaxNode { &self.syntax } } @@ -4141,7 +4171,7 @@ impl AstNode for AnyHasVisibility { ) } fn cast(syntax: SyntaxNode) -> Option { - Self::can_cast(syntax.kind()).then(|| AnyHasVisibility { syntax }) + Self::can_cast(syntax.kind()).then_some(AnyHasVisibility { syntax }) } fn syntax(&self) -> &SyntaxNode { &self.syntax } } @@ -4655,6 +4685,11 @@ impl std::fmt::Display for YieldExpr { std::fmt::Display::fmt(self.syntax(), f) } } +impl std::fmt::Display for YeetExpr { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + std::fmt::Display::fmt(self.syntax(), f) + } +} impl std::fmt::Display for LetExpr { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { std::fmt::Display::fmt(self.syntax(), f) diff --git a/crates/syntax/src/ast/make.rs b/crates/syntax/src/ast/make.rs index 8c26009add..d5b3296980 100644 --- a/crates/syntax/src/ast/make.rs +++ b/crates/syntax/src/ast/make.rs @@ -339,10 +339,10 @@ pub fn tail_only_block_expr(tail_expr: ast::Expr) -> ast::BlockExpr { } /// Ideally this function wouldn't exist since it involves manual indenting. -/// It differs from `make::block_expr` by also supporting comments. +/// It differs from `make::block_expr` by also supporting comments and whitespace. /// /// FIXME: replace usages of this with the mutable syntax tree API -pub fn hacky_block_expr_with_comments( +pub fn hacky_block_expr( elements: impl IntoIterator, tail_expr: Option, ) -> ast::BlockExpr { @@ -350,10 +350,17 @@ pub fn hacky_block_expr_with_comments( for node_or_token in elements.into_iter() { match node_or_token { rowan::NodeOrToken::Node(n) => format_to!(buf, " {n}\n"), - rowan::NodeOrToken::Token(t) if t.kind() == SyntaxKind::COMMENT => { - format_to!(buf, " {t}\n") + rowan::NodeOrToken::Token(t) => { + let kind = t.kind(); + if kind == SyntaxKind::COMMENT { + format_to!(buf, " {t}\n") + } else if kind == SyntaxKind::WHITESPACE { + let content = t.text().trim_matches(|c| c != '\n'); + if content.len() >= 1 { + format_to!(buf, "{}", &content[1..]) + } + } } - _ => (), } } if let Some(tail_expr) = tail_expr { @@ -719,12 +726,23 @@ pub fn param_list( ast_from_text(&list) } -pub fn type_param(name: ast::Name, ty: Option) -> ast::TypeParam { - let bound = match ty { - Some(it) => format!(": {it}"), - None => String::new(), - }; - ast_from_text(&format!("fn f<{name}{bound}>() {{ }}")) +pub fn type_bound(bound: &str) -> ast::TypeBound { + ast_from_text(&format!("fn f() {{ }}")) +} + +pub fn type_bound_list( + bounds: impl IntoIterator, +) -> Option { + let bounds = bounds.into_iter().map(|it| it.to_string()).unique().join(" + "); + if bounds.is_empty() { + return None; + } + Some(ast_from_text(&format!("fn f() {{ }}"))) +} + +pub fn type_param(name: ast::Name, bounds: Option) -> ast::TypeParam { + let bounds = bounds.map_or_else(String::new, |it| format!(": {it}")); + ast_from_text(&format!("fn f<{name}{bounds}>() {{ }}")) } pub fn lifetime_param(lifetime: ast::Lifetime) -> ast::LifetimeParam { diff --git a/crates/syntax/src/ast/prec.rs b/crates/syntax/src/ast/prec.rs new file mode 100644 index 0000000000..4ec388914e --- /dev/null +++ b/crates/syntax/src/ast/prec.rs @@ -0,0 +1,328 @@ +//! Precedence representation. + +use crate::{ + ast::{self, BinaryOp, Expr, HasArgList}, + match_ast, AstNode, SyntaxNode, +}; + +impl Expr { + // Implementation is based on + // - https://doc.rust-lang.org/reference/expressions.html#expression-precedence + // - https://matklad.github.io/2020/04/13/simple-but-powerful-pratt-parsing.html + // - rustc source, including, but not limited to + // - https://github.com/rust-lang/rust/blob/b6852428a8ea9728369b64b9964cad8e258403d3/compiler/rustc_ast/src/util/parser.rs#L296 + + /// Returns `true` if `self` would need to be wrapped in parentheses given that its parent is `parent`. + pub fn needs_parens_in(&self, parent: SyntaxNode) -> bool { + match_ast! { + match parent { + ast::Expr(e) => self.needs_parens_in_expr(&e), + ast::Stmt(e) => self.needs_parens_in_stmt(Some(&e)), + ast::StmtList(_) => self.needs_parens_in_stmt(None), + ast::ArgList(_) => false, + ast::MatchArm(_) => false, + _ => false, + } + } + } + + fn needs_parens_in_expr(&self, parent: &Expr) -> bool { + // Special-case block weirdness + if parent.child_is_followed_by_a_block() { + use Expr::*; + match self { + // Cases like `if return {}` (need parens or else `{}` is returned, instead of being `if`'s body) + ReturnExpr(e) if e.expr().is_none() => return true, + BreakExpr(e) if e.expr().is_none() => return true, + YieldExpr(e) if e.expr().is_none() => return true, + + // Same but with `..{}` + RangeExpr(e) if matches!(e.end(), Some(BlockExpr(..))) => return true, + + // Similarly with struct literals, e.g. `if S{} == 1 {}` + _ if self.contains_exterior_struct_lit() => return true, + _ => {} + } + } + + // Special-case `return.f()` + if self.is_ret_like_with_no_value() && parent.is_postfix() { + return false; + } + + if self.is_paren_like() + || parent.is_paren_like() + || self.is_prefix() && (parent.is_prefix() || !self.is_ordered_before(parent)) + || self.is_postfix() && (parent.is_postfix() || self.is_ordered_before(parent)) + { + return false; + } + + let (left, right, inv) = match self.is_ordered_before(parent) { + true => (self, parent, false), + false => (parent, self, true), + }; + + let (_, left_right_bp) = left.binding_power(); + let (right_left_bp, _) = right.binding_power(); + + (left_right_bp < right_left_bp) ^ inv + } + + fn needs_parens_in_stmt(&self, stmt: Option<&ast::Stmt>) -> bool { + use Expr::*; + + // Prevent false-positives in cases like `fn x() -> u8 { ({ 0 } + 1) }`, + // `{ { 0 } + 1 }` won't parse -- `{ 0 }` would be parsed as a self-contained stmt, + // leaving `+ 1` as a parse error. + let mut innermost = self.clone(); + loop { + let next = match &innermost { + BinExpr(e) => e.lhs(), + CallExpr(e) => e.expr(), + CastExpr(e) => e.expr(), + IndexExpr(e) => e.base(), + _ => break, + }; + + if let Some(next) = next { + innermost = next; + if !innermost.requires_semi_to_be_stmt() { + return true; + } + } else { + break; + } + } + + // Not every expression can be followed by `else` in the `let-else` + if let Some(ast::Stmt::LetStmt(e)) = stmt { + if e.let_else().is_some() { + match self { + BinExpr(e) + if e.op_kind() + .map(|op| matches!(op, BinaryOp::LogicOp(_))) + .unwrap_or(false) => + { + return true + } + _ if self.clone().trailing_brace().is_some() => return true, + _ => {} + } + } + } + + false + } + + /// Returns left and right so-called "binding powers" of this expression. + fn binding_power(&self) -> (u8, u8) { + use ast::{ArithOp::*, BinaryOp::*, Expr::*, LogicOp::*}; + + match self { + // (0, 0) -- paren-like/nullary + // (0, N) -- prefix + // (N, 0) -- postfix + // (N, N) -- infix, requires parens + // (N, N+1) -- infix, left to right associative + // (N+1, N) -- infix, right to left associative + // N is odd + // + ContinueExpr(_) => (0, 0), + + ClosureExpr(_) | ReturnExpr(_) | YieldExpr(_) | YeetExpr(_) | BreakExpr(_) => (0, 1), + + RangeExpr(_) => (5, 5), + + BinExpr(e) => { + // Return a dummy value if we don't know the op + let Some(op) = e.op_kind() else { return (0, 0) }; + match op { + Assignment { .. } => (4, 3), + // + // Ranges are here in order :) + // + LogicOp(op) => match op { + Or => (7, 8), + And => (9, 10), + }, + CmpOp(_) => (11, 11), + ArithOp(op) => match op { + BitOr => (13, 14), + BitXor => (15, 16), + BitAnd => (17, 18), + Shl | Shr => (19, 20), + Add | Sub => (21, 22), + Mul | Div | Rem => (23, 24), + }, + } + } + + CastExpr(_) => (25, 26), + + BoxExpr(_) | RefExpr(_) | LetExpr(_) | PrefixExpr(_) => (0, 27), + + AwaitExpr(_) | CallExpr(_) | MethodCallExpr(_) | IndexExpr(_) | TryExpr(_) + | MacroExpr(_) => (29, 0), + + FieldExpr(_) => (31, 32), + + ArrayExpr(_) | TupleExpr(_) | Literal(_) | PathExpr(_) | ParenExpr(_) | IfExpr(_) + | WhileExpr(_) | ForExpr(_) | LoopExpr(_) | MatchExpr(_) | BlockExpr(_) + | RecordExpr(_) | UnderscoreExpr(_) => (0, 0), + } + } + + fn is_paren_like(&self) -> bool { + matches!(self.binding_power(), (0, 0)) + } + + fn is_prefix(&self) -> bool { + matches!(self.binding_power(), (0, 1..)) + } + + fn is_postfix(&self) -> bool { + matches!(self.binding_power(), (1.., 0)) + } + + /// Returns `true` if this expression can't be a standalone statement. + fn requires_semi_to_be_stmt(&self) -> bool { + use Expr::*; + !matches!( + self, + IfExpr(..) | MatchExpr(..) | BlockExpr(..) | WhileExpr(..) | LoopExpr(..) | ForExpr(..) + ) + } + + /// If an expression ends with `}`, returns the innermost expression ending in this `}`. + fn trailing_brace(mut self) -> Option { + use Expr::*; + + loop { + let rhs = match self { + RefExpr(e) => e.expr(), + BinExpr(e) => e.rhs(), + BoxExpr(e) => e.expr(), + BreakExpr(e) => e.expr(), + LetExpr(e) => e.expr(), + RangeExpr(e) => e.end(), + ReturnExpr(e) => e.expr(), + PrefixExpr(e) => e.expr(), + YieldExpr(e) => e.expr(), + ClosureExpr(e) => e.body(), + + BlockExpr(..) | ForExpr(..) | IfExpr(..) | LoopExpr(..) | MatchExpr(..) + | RecordExpr(..) | WhileExpr(..) => break Some(self), + _ => break None, + }; + + self = rhs?; + } + } + + /// Expressions that syntactically contain an "exterior" struct literal i.e., not surrounded by any + /// parens or other delimiters, e.g., `X { y: 1 }`, `X { y: 1 }.method()`, `foo == X { y: 1 }` and + /// `X { y: 1 } == foo` all do, but `(X { y: 1 }) == foo` does not. + fn contains_exterior_struct_lit(&self) -> bool { + return contains_exterior_struct_lit_inner(self).is_some(); + + fn contains_exterior_struct_lit_inner(expr: &Expr) -> Option<()> { + use Expr::*; + + match expr { + RecordExpr(..) => Some(()), + + // X { y: 1 } + X { y: 2 } + BinExpr(e) => e + .lhs() + .as_ref() + .and_then(contains_exterior_struct_lit_inner) + .or_else(|| e.rhs().as_ref().and_then(contains_exterior_struct_lit_inner)), + + // `&X { y: 1 }`, `X { y: 1 }.y`, `X { y: 1 }.bar(...)`, etc + IndexExpr(e) => contains_exterior_struct_lit_inner(&e.base()?), + AwaitExpr(e) => contains_exterior_struct_lit_inner(&e.expr()?), + PrefixExpr(e) => contains_exterior_struct_lit_inner(&e.expr()?), + CastExpr(e) => contains_exterior_struct_lit_inner(&e.expr()?), + FieldExpr(e) => contains_exterior_struct_lit_inner(&e.expr()?), + MethodCallExpr(e) => contains_exterior_struct_lit_inner(&e.receiver()?), + + _ => None, + } + } + } + + /// Returns true if self is one of `return`, `break`, `continue` or `yield` with **no associated value**. + fn is_ret_like_with_no_value(&self) -> bool { + use Expr::*; + + match self { + ReturnExpr(e) => e.expr().is_none(), + BreakExpr(e) => e.expr().is_none(), + ContinueExpr(_) => true, + YieldExpr(e) => e.expr().is_none(), + _ => false, + } + } + + fn is_ordered_before(&self, other: &Expr) -> bool { + use Expr::*; + + return order(self) < order(other); + + /// Returns text range that can be used to compare two expression for order (which goes first). + fn order(this: &Expr) -> rowan::TextSize { + // For non-paren-like operators: get the operator itself + let token = match this { + RangeExpr(e) => e.op_token(), + BinExpr(e) => e.op_token(), + CastExpr(e) => e.as_token(), + FieldExpr(e) => e.dot_token(), + AwaitExpr(e) => e.dot_token(), + BoxExpr(e) => e.box_token(), + BreakExpr(e) => e.break_token(), + CallExpr(e) => e.arg_list().and_then(|args| args.l_paren_token()), + ClosureExpr(e) => e.param_list().and_then(|params| params.l_paren_token()), + ContinueExpr(e) => e.continue_token(), + IndexExpr(e) => e.l_brack_token(), + MethodCallExpr(e) => e.dot_token(), + PrefixExpr(e) => e.op_token(), + RefExpr(e) => e.amp_token(), + ReturnExpr(e) => e.return_token(), + TryExpr(e) => e.question_mark_token(), + YieldExpr(e) => e.yield_token(), + YeetExpr(e) => e.do_token(), + LetExpr(e) => e.let_token(), + + ArrayExpr(_) | TupleExpr(_) | Literal(_) | PathExpr(_) | ParenExpr(_) + | IfExpr(_) | WhileExpr(_) | ForExpr(_) | LoopExpr(_) | MatchExpr(_) + | BlockExpr(_) | RecordExpr(_) | UnderscoreExpr(_) | MacroExpr(_) => None, + }; + + token.map(|t| t.text_range()).unwrap_or_else(|| this.syntax().text_range()).start() + } + } + + fn child_is_followed_by_a_block(&self) -> bool { + use Expr::*; + + match self { + ArrayExpr(_) | AwaitExpr(_) | BlockExpr(_) | CallExpr(_) | CastExpr(_) + | ClosureExpr(_) | FieldExpr(_) | IndexExpr(_) | Literal(_) | LoopExpr(_) + | MacroExpr(_) | MethodCallExpr(_) | ParenExpr(_) | PathExpr(_) | RecordExpr(_) + | TryExpr(_) | TupleExpr(_) | UnderscoreExpr(_) => false, + + // For BinExpr and RangeExpr this is technically wrong -- the child can be on the left... + BinExpr(_) | RangeExpr(_) | BoxExpr(_) | BreakExpr(_) | ContinueExpr(_) + | PrefixExpr(_) | RefExpr(_) | ReturnExpr(_) | YieldExpr(_) | YeetExpr(_) + | LetExpr(_) => self + .syntax() + .parent() + .and_then(Expr::cast) + .map(|e| e.child_is_followed_by_a_block()) + .unwrap_or(false), + + ForExpr(_) | IfExpr(_) | MatchExpr(_) | WhileExpr(_) => true, + } + } +} diff --git a/crates/syntax/src/ast/token_ext.rs b/crates/syntax/src/ast/token_ext.rs index 8990f7a7d4..2cd312e7f4 100644 --- a/crates/syntax/src/ast/token_ext.rs +++ b/crates/syntax/src/ast/token_ext.rs @@ -436,9 +436,7 @@ mod tests { fn check_string_value<'a>(lit: &str, expected: impl Into>) { assert_eq!( - ast::String { syntax: make::tokens::literal(&format!("\"{}\"", lit)) } - .value() - .as_deref(), + ast::String { syntax: make::tokens::literal(&format!("\"{lit}\"")) }.value().as_deref(), expected.into() ); } @@ -461,7 +459,7 @@ bcde", "abcde", expected: impl Into>, ) { assert_eq!( - ast::ByteString { syntax: make::tokens::literal(&format!("b\"{}\"", lit)) } + ast::ByteString { syntax: make::tokens::literal(&format!("b\"{lit}\"")) } .value() .as_deref(), expected.into().map(|value| &value[..]) @@ -483,7 +481,7 @@ bcde", b"abcde", #[test] fn test_value_underscores() { - check_float_value("3.141592653589793_f64", 3.141592653589793_f64); + check_float_value("1.234567891011121_f64", 1.234567891011121_f64); check_float_value("1__0.__0__f32", 10.0); check_int_value("0b__1_0_", 2); check_int_value("1_1_1_1_1_1", 111111); diff --git a/crates/syntax/src/fuzz.rs b/crates/syntax/src/fuzz.rs index 7c7a60d629..239a89f9b2 100644 --- a/crates/syntax/src/fuzz.rs +++ b/crates/syntax/src/fuzz.rs @@ -36,7 +36,7 @@ impl CheckReparse { let delete_len = usize::from_str(lines.next()?).ok()?; let insert = lines.next()?.to_string(); let text = lines.collect::>().join("\n"); - let text = format!("{}{}{}", PREFIX, text, SUFFIX); + let text = format!("{PREFIX}{text}{SUFFIX}"); text.get(delete_start..delete_start.checked_add(delete_len)?)?; // make sure delete is a valid range let delete = TextRange::at(delete_start.try_into().unwrap(), delete_len.try_into().unwrap()); @@ -60,8 +60,8 @@ impl CheckReparse { eprint!("reparsed:\n{:#?}", new_parse.tree().syntax()); eprint!("full reparse:\n{:#?}", full_reparse.tree().syntax()); assert_eq!( - format!("{:?}", a), - format!("{:?}", b), + format!("{a:?}"), + format!("{b:?}"), "different syntax tree produced by the full reparse" ); } diff --git a/crates/syntax/src/hacks.rs b/crates/syntax/src/hacks.rs index ec3d3d444c..a3023c3195 100644 --- a/crates/syntax/src/hacks.rs +++ b/crates/syntax/src/hacks.rs @@ -6,7 +6,7 @@ use crate::{ast, AstNode}; pub fn parse_expr_from_str(s: &str) -> Option { let s = s.trim(); - let file = ast::SourceFile::parse(&format!("const _: () = {};", s)); + let file = ast::SourceFile::parse(&format!("const _: () = {s};")); let expr = file.syntax_node().descendants().find_map(ast::Expr::cast)?; if expr.syntax().text() != s { return None; diff --git a/crates/syntax/src/ptr.rs b/crates/syntax/src/ptr.rs index a886972fff..1d4a89201a 100644 --- a/crates/syntax/src/ptr.rs +++ b/crates/syntax/src/ptr.rs @@ -82,7 +82,7 @@ impl AstPtr { /// Like `SyntaxNodePtr::cast` but the trait bounds work out. pub fn try_from_raw(raw: SyntaxNodePtr) -> Option> { - N::can_cast(raw.kind()).then(|| AstPtr { raw, _ty: PhantomData }) + N::can_cast(raw.kind()).then_some(AstPtr { raw, _ty: PhantomData }) } } diff --git a/crates/syntax/src/ted.rs b/crates/syntax/src/ted.rs index a47b4b11c0..29788d05e8 100644 --- a/crates/syntax/src/ted.rs +++ b/crates/syntax/src/ted.rs @@ -157,7 +157,7 @@ fn ws_before(position: &Position, new: &SyntaxElement) -> Option { if let Some(item_list) = prev.parent().and_then(ast::ItemList::cast) { let mut indent = IndentLevel::from_element(&item_list.syntax().clone().into()); indent.0 += 1; - return Some(make::tokens::whitespace(&format!("\n{}", indent))); + return Some(make::tokens::whitespace(&format!("\n{indent}"))); } } @@ -165,7 +165,7 @@ fn ws_before(position: &Position, new: &SyntaxElement) -> Option { if let Some(stmt_list) = prev.parent().and_then(ast::StmtList::cast) { let mut indent = IndentLevel::from_element(&stmt_list.syntax().clone().into()); indent.0 += 1; - return Some(make::tokens::whitespace(&format!("\n{}", indent))); + return Some(make::tokens::whitespace(&format!("\n{indent}"))); } } @@ -200,7 +200,7 @@ fn ws_between(left: &SyntaxElement, right: &SyntaxElement) -> Option(); - panic!("Parsing errors:\n{}\n", errors); + panic!("Parsing errors:\n{errors}\n"); } } @@ -157,7 +157,7 @@ fn collect_rust_files(root_dir: &Path, paths: &[&str]) -> Vec<(PathBuf, String)> /// Collects paths to all `.rs` files from `dir` in a sorted `Vec`. fn rust_files_in_dir(dir: &Path) -> Vec { let mut acc = Vec::new(); - for file in fs::read_dir(&dir).unwrap() { + for file in fs::read_dir(dir).unwrap() { let file = file.unwrap(); let path = file.path(); if path.extension().unwrap_or_default() == "rs" { @@ -181,6 +181,6 @@ fn rust_files_in_dir(dir: &Path) -> Vec { /// so this should always be correct. fn read_text(path: &Path) -> String { fs::read_to_string(path) - .unwrap_or_else(|_| panic!("File at {:?} should be valid", path)) + .unwrap_or_else(|_| panic!("File at {path:?} should be valid")) .replace("\r\n", "\n") } diff --git a/crates/syntax/src/tests/ast_src.rs b/crates/syntax/src/tests/ast_src.rs index cf5be1c30f..3ff6e03006 100644 --- a/crates/syntax/src/tests/ast_src.rs +++ b/crates/syntax/src/tests/ast_src.rs @@ -65,12 +65,12 @@ pub(crate) const KINDS_SRC: KindsSrc<'_> = KindsSrc { (">>=", "SHREQ"), ], keywords: &[ - "as", "async", "await", "box", "break", "const", "continue", "crate", "dyn", "else", + "as", "async", "await", "box", "break", "const", "continue", "crate", "do", "dyn", "else", "enum", "extern", "false", "fn", "for", "if", "impl", "in", "let", "loop", "macro", "match", "mod", "move", "mut", "pub", "ref", "return", "self", "Self", "static", "struct", "super", "trait", "true", "try", "type", "unsafe", "use", "where", "while", "yield", ], - contextual_keywords: &["auto", "default", "existential", "union", "raw", "macro_rules"], + contextual_keywords: &["auto", "default", "existential", "union", "raw", "macro_rules", "yeet"], literals: &["INT_NUMBER", "FLOAT_NUMBER", "CHAR", "BYTE", "STRING", "BYTE_STRING"], tokens: &["ERROR", "IDENT", "WHITESPACE", "LIFETIME_IDENT", "COMMENT", "SHEBANG"], nodes: &[ @@ -142,6 +142,7 @@ pub(crate) const KINDS_SRC: KindsSrc<'_> = KindsSrc { "STMT_LIST", "RETURN_EXPR", "YIELD_EXPR", + "YEET_EXPR", "LET_EXPR", "UNDERSCORE_EXPR", "MACRO_EXPR", diff --git a/crates/syntax/src/tests/sourcegen_ast.rs b/crates/syntax/src/tests/sourcegen_ast.rs index 712ef5f63b..03aa2c451e 100644 --- a/crates/syntax/src/tests/sourcegen_ast.rs +++ b/crates/syntax/src/tests/sourcegen_ast.rs @@ -253,7 +253,7 @@ fn generate_nodes(kinds: KindsSrc<'_>, grammar: &AstSrc) -> String { matches!(kind, #(#kinds)|*) } fn cast(syntax: SyntaxNode) -> Option { - Self::can_cast(syntax.kind()).then(|| #name { syntax }) + Self::can_cast(syntax.kind()).then_some(#name { syntax }) } fn syntax(&self) -> &SyntaxNode { &self.syntax @@ -328,7 +328,7 @@ fn generate_nodes(kinds: KindsSrc<'_>, grammar: &AstSrc) -> String { fn write_doc_comment(contents: &[String], dest: &mut String) { for line in contents { - writeln!(dest, "///{}", line).unwrap(); + writeln!(dest, "///{line}").unwrap(); } } @@ -501,7 +501,7 @@ fn to_pascal_case(s: &str) -> String { } fn pluralize(s: &str) -> String { - format!("{}s", s) + format!("{s}s") } impl Field { @@ -637,7 +637,7 @@ fn lower_rule(acc: &mut Vec, grammar: &Grammar, label: Option<&String>, r let mut name = grammar[*token].name.clone(); if name != "int_number" && name != "string" { if "[]{}()".contains(&name) { - name = format!("'{}'", name); + name = format!("'{name}'"); } let field = Field::Token(name); acc.push(field); @@ -651,7 +651,7 @@ fn lower_rule(acc: &mut Vec, grammar: &Grammar, label: Option<&String>, r acc.push(field); return; } - panic!("unhandled rule: {:?}", rule) + panic!("unhandled rule: {rule:?}") } Rule::Labeled { label: l, rule } => { assert!(label.is_none()); diff --git a/crates/syntax/src/validation.rs b/crates/syntax/src/validation.rs index 1eea234645..fb2381110b 100644 --- a/crates/syntax/src/validation.rs +++ b/crates/syntax/src/validation.rs @@ -196,7 +196,7 @@ pub(crate) fn validate_block_structure(root: &SyntaxNode) { fn validate_numeric_name(name_ref: Option, errors: &mut Vec) { if let Some(int_token) = int_token(name_ref) { - if int_token.text().chars().any(|c| !c.is_digit(10)) { + if int_token.text().chars().any(|c| !c.is_ascii_digit()) { errors.push(SyntaxError::new( "Tuple (struct) field access is only allowed through \ decimal integers with no underscores or suffix", diff --git a/crates/test-utils/src/assert_linear.rs b/crates/test-utils/src/assert_linear.rs index 24502ddb41..15c30c52a5 100644 --- a/crates/test-utils/src/assert_linear.rs +++ b/crates/test-utils/src/assert_linear.rs @@ -83,7 +83,7 @@ impl Round { let a = mean_y - b * mean_x; - self.plot = format!("y_pred = {:.3} + {:.3} * x\n\nx y y_pred\n", a, b); + self.plot = format!("y_pred = {a:.3} + {b:.3} * x\n\nx y y_pred\n"); let mut se = 0.0; let mut max_error = 0.0f64; @@ -100,7 +100,7 @@ impl Round { self.linear = rmse < 0.05 && max_error < 0.1 && a > -0.1; - fn normalize(xs: &mut Vec) { + fn normalize(xs: &mut [f64]) { let max = xs.iter().copied().max_by(|a, b| a.partial_cmp(b).unwrap()).unwrap(); xs.iter_mut().for_each(|it| *it /= max); } diff --git a/crates/test-utils/src/bench_fixture.rs b/crates/test-utils/src/bench_fixture.rs index 979156263d..9296fd2e68 100644 --- a/crates/test-utils/src/bench_fixture.rs +++ b/crates/test-utils/src/bench_fixture.rs @@ -36,10 +36,10 @@ struct S{} {{ pub fn glorious_old_parser() -> String { let path = project_root().join("bench_data/glorious_old_parser"); - fs::read_to_string(&path).unwrap() + fs::read_to_string(path).unwrap() } pub fn numerous_macro_rules() -> String { let path = project_root().join("bench_data/numerous_macro_rules"); - fs::read_to_string(&path).unwrap() + fs::read_to_string(path).unwrap() } diff --git a/crates/test-utils/src/fixture.rs b/crates/test-utils/src/fixture.rs index c824f5af72..d1afd0039a 100644 --- a/crates/test-utils/src/fixture.rs +++ b/crates/test-utils/src/fixture.rs @@ -78,6 +78,7 @@ pub struct Fixture { pub edition: Option, pub env: FxHashMap, pub introduce_new_source_root: Option, + pub target_data_layout: Option, } pub struct MiniCore { @@ -134,11 +135,9 @@ impl Fixture { if line.contains("//-") { assert!( line.starts_with("//-"), - "Metadata line {} has invalid indentation. \ + "Metadata line {ix} has invalid indentation. \ All metadata lines need to have the same indentation.\n\ - The offending line: {:?}", - ix, - line + The offending line: {line:?}" ); } @@ -152,7 +151,7 @@ impl Fixture { && !line.contains('.') && line.chars().all(|it| !it.is_uppercase()) { - panic!("looks like invalid metadata line: {:?}", line); + panic!("looks like invalid metadata line: {line:?}"); } if let Some(entry) = res.last_mut() { @@ -171,7 +170,7 @@ impl Fixture { let components = meta.split_ascii_whitespace().collect::>(); let path = components[0].to_string(); - assert!(path.starts_with('/'), "fixture path does not start with `/`: {:?}", path); + assert!(path.starts_with('/'), "fixture path does not start with `/`: {path:?}"); let mut krate = None; let mut deps = Vec::new(); @@ -181,10 +180,10 @@ impl Fixture { let mut cfg_key_values = Vec::new(); let mut env = FxHashMap::default(); let mut introduce_new_source_root = None; + let mut target_data_layout = None; for component in components[1..].iter() { - let (key, value) = component - .split_once(':') - .unwrap_or_else(|| panic!("invalid meta line: {:?}", meta)); + let (key, value) = + component.split_once(':').unwrap_or_else(|| panic!("invalid meta line: {meta:?}")); match key { "crate" => krate = Some(value.to_string()), "deps" => deps = value.split(',').map(|it| it.to_string()).collect(), @@ -213,16 +212,15 @@ impl Fixture { } } "new_source_root" => introduce_new_source_root = Some(value.to_string()), - _ => panic!("bad component: {:?}", component), + "target_data_layout" => target_data_layout = Some(value.to_string()), + _ => panic!("bad component: {component:?}"), } } for prelude_dep in extern_prelude.iter().flatten() { assert!( deps.contains(prelude_dep), - "extern-prelude {:?} must be a subset of deps {:?}", - extern_prelude, - deps + "extern-prelude {extern_prelude:?} must be a subset of deps {deps:?}" ); } @@ -237,6 +235,7 @@ impl Fixture { edition, env, introduce_new_source_root, + target_data_layout, } } } @@ -249,7 +248,7 @@ impl MiniCore { #[track_caller] fn assert_valid_flag(&self, flag: &str) { if !self.valid_flags.iter().any(|it| it == flag) { - panic!("invalid flag: {:?}, valid flags: {:?}", flag, self.valid_flags); + panic!("invalid flag: {flag:?}, valid flags: {:?}", self.valid_flags); } } @@ -259,7 +258,7 @@ impl MiniCore { let line = line.strip_prefix("//- minicore:").unwrap().trim(); for entry in line.split(", ") { if res.has_flag(entry) { - panic!("duplicate minicore flag: {:?}", entry); + panic!("duplicate minicore flag: {entry:?}"); } res.activated_flags.push(entry.to_owned()); } @@ -345,11 +344,7 @@ impl MiniCore { let mut keep = true; for ®ion in &active_regions { - assert!( - !region.starts_with(' '), - "region marker starts with a space: {:?}", - region - ); + assert!(!region.starts_with(' '), "region marker starts with a space: {region:?}"); self.assert_valid_flag(region); seen_regions.push(region); keep &= self.has_flag(region); @@ -365,7 +360,7 @@ impl MiniCore { for flag in &self.valid_flags { if !seen_regions.iter().any(|it| it == flag) { - panic!("unused minicore flag: {:?}", flag); + panic!("unused minicore flag: {flag:?}"); } } buf diff --git a/crates/test-utils/src/lib.rs b/crates/test-utils/src/lib.rs index 8a9cfb6c22..a7a52e08e7 100644 --- a/crates/test-utils/src/lib.rs +++ b/crates/test-utils/src/lib.rs @@ -146,8 +146,8 @@ pub fn extract_range_or_offset(text: &str) -> (RangeOrOffset, String) { /// Extracts ranges, marked with ` ` pairs from the `text` pub fn extract_tags(mut text: &str, tag: &str) -> (Vec<(TextRange, Option)>, String) { - let open = format!("<{}", tag); - let close = format!("", tag); + let open = format!("<{tag}"); + let close = format!(""); let mut ranges = Vec::new(); let mut res = String::new(); let mut stack = Vec::new(); @@ -169,8 +169,7 @@ pub fn extract_tags(mut text: &str, tag: &str) -> (Vec<(TextRange, Option", tag)); + let (from, attr) = stack.pop().unwrap_or_else(|| panic!("unmatched ")); let to = TextSize::of(&res); ranges.push((TextRange::new(from, to), attr)); } else { @@ -180,7 +179,7 @@ pub fn extract_tags(mut text: &str, tag: &str) -> (Vec<(TextRange, Option", tag); + assert!(stack.is_empty(), "unmatched <{tag}>"); ranges.sort_by_key(|r| (r.0.start(), r.0.end())); (ranges, res) } @@ -397,7 +396,7 @@ pub fn skip_slow_tests() -> bool { eprintln!("ignoring slow test"); } else { let path = project_root().join("./target/.slow_tests_cookie"); - fs::write(&path, ".").unwrap(); + fs::write(path, ".").unwrap(); } should_skip } @@ -413,8 +412,8 @@ pub fn format_diff(chunks: Vec>) -> String { for chunk in chunks { let formatted = match chunk { dissimilar::Chunk::Equal(text) => text.into(), - dissimilar::Chunk::Delete(text) => format!("\x1b[41m{}\x1b[0m", text), - dissimilar::Chunk::Insert(text) => format!("\x1b[42m{}\x1b[0m", text), + dissimilar::Chunk::Delete(text) => format!("\x1b[41m{text}\x1b[0m"), + dissimilar::Chunk::Insert(text) => format!("\x1b[42m{text}\x1b[0m"), }; buf.push_str(&formatted); } @@ -480,7 +479,7 @@ pub fn try_ensure_file_contents(file: &Path, contents: &str) -> Result<(), ()> { } _ => (), } - let display_path = file.strip_prefix(&project_root()).unwrap_or(file); + let display_path = file.strip_prefix(project_root()).unwrap_or(file); eprintln!( "\n\x1b[31;1merror\x1b[0m: {} was not up-to-date, updating\n", display_path.display() diff --git a/crates/test-utils/src/minicore.rs b/crates/test-utils/src/minicore.rs index 69d2e62b25..3ca63fcab9 100644 --- a/crates/test-utils/src/minicore.rs +++ b/crates/test-utils/src/minicore.rs @@ -20,6 +20,7 @@ //! derive: //! drop: //! eq: sized +//! error: fmt //! fmt: result //! fn: //! from: sized @@ -29,13 +30,16 @@ //! index: sized //! iterator: option //! iterators: iterator, fn +//! non_zero: //! option: //! ord: eq, option //! pin: //! range: //! result: +//! send: sized //! sized: //! slice: +//! sync: sized //! try: //! unsize: sized @@ -47,6 +51,24 @@ pub mod marker { pub trait Sized {} // endregion:sized + // region:send + pub unsafe auto trait Send {} + + impl !Send for *const T {} + impl !Send for *mut T {} + // region:sync + unsafe impl Send for &T {} + unsafe impl Send for &mut T {} + // endregion:sync + // endregion:send + + // region:sync + pub unsafe auto trait Sync {} + + impl !Sync for *const T {} + impl !Sync for *mut T {} + // endregion:sync + // region:unsize #[lang = "unsize"] pub trait Unsize {} @@ -91,7 +113,7 @@ pub mod default { fn default() -> Self; } // region:derive - #[rustc_builtin_macro] + #[rustc_builtin_macro(Default, attributes(default))] pub macro Default($item:item) {} // endregion:derive } @@ -360,6 +382,12 @@ pub mod ops { type Output; fn add(self, rhs: Rhs) -> Self::Output; } + + #[lang = "add_assign"] + #[const_trait] + pub trait AddAssign { + fn add_assign(&mut self, rhs: Rhs); + } // endregion:add // region:generator @@ -438,6 +466,9 @@ pub mod fmt { pub trait Debug { fn fmt(&self, f: &mut Formatter<'_>) -> Result; } + pub trait Display { + fn fmt(&self, f: &mut Formatter<'_>) -> Result; + } } // endregion:fmt @@ -680,6 +711,15 @@ mod macros { } // endregion:derive +// region:non_zero +pub mod num { + #[repr(transparent)] + #[rustc_layout_scalar_valid_range_start(1)] + #[rustc_nonnull_optimization_guaranteed] + pub struct NonZeroU8(u8); +} +// endregion:non_zero + // region:bool_impl #[lang = "bool"] impl bool { @@ -693,6 +733,17 @@ impl bool { } // endregion:bool_impl +// region:error +pub mod error { + #[rustc_has_incoherent_inherent_impls] + pub trait Error: crate::fmt::Debug + crate::fmt::Display { + fn source(&self) -> Option<&(dyn Error + 'static)> { + None + } + } +} +// endregion:error + pub mod prelude { pub mod v1 { pub use crate::{ @@ -705,7 +756,9 @@ pub mod prelude { iter::{IntoIterator, Iterator}, // :iterator macros::builtin::derive, // :derive marker::Copy, // :copy + marker::Send, // :send marker::Sized, // :sized + marker::Sync, // :sync mem::drop, // :drop ops::Drop, // :drop ops::{Fn, FnMut, FnOnce}, // :fn diff --git a/crates/toolchain/src/lib.rs b/crates/toolchain/src/lib.rs index b05da76916..67bdad2aad 100644 --- a/crates/toolchain/src/lib.rs +++ b/crates/toolchain/src/lib.rs @@ -35,7 +35,7 @@ fn get_path_for_executable(executable_name: &'static str) -> PathBuf { // example: for cargo, this tries ~/.cargo/bin/cargo // It seems that this is a reasonable place to try for cargo, rustc, and rustup let env_var = executable_name.to_ascii_uppercase(); - if let Some(path) = env::var_os(&env_var) { + if let Some(path) = env::var_os(env_var) { return path.into(); } diff --git a/crates/tt/src/buffer.rs b/crates/tt/src/buffer.rs index 69226bd4c4..d27a7aa0d4 100644 --- a/crates/tt/src/buffer.rs +++ b/crates/tt/src/buffer.rs @@ -190,7 +190,7 @@ impl<'a> Cursor<'a> { pub fn token_tree(self) -> Option> { match self.entry() { Some(Entry::Leaf(tt)) => match tt { - TokenTree::Leaf(leaf) => Some(TokenTreeRef::Leaf(leaf, *tt)), + TokenTree::Leaf(leaf) => Some(TokenTreeRef::Leaf(leaf, tt)), TokenTree::Subtree(subtree) => Some(TokenTreeRef::Subtree(subtree, Some(tt))), }, Some(Entry::Subtree(tt, subtree, _)) => Some(TokenTreeRef::Subtree(subtree, *tt)), diff --git a/crates/tt/src/lib.rs b/crates/tt/src/lib.rs index a54861de95..353b09fd8c 100644 --- a/crates/tt/src/lib.rs +++ b/crates/tt/src/lib.rs @@ -86,10 +86,20 @@ pub enum Spacing { #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct Ident { + /// Identifier or keyword. Unlike rustc, we keep "r#" prefix when it represents a raw identifier. pub text: SmolStr, pub id: TokenId, } +impl Ident { + /// Constructor intended to be used only by proc macro server. `text` should not contain raw + /// identifier prefix. + pub fn new_with_is_raw(text: SmolStr, id: TokenId, is_raw: bool) -> Self { + let text = if is_raw { SmolStr::from_iter(["r#", &text]) } else { text }; + Ident { text, id } + } +} + impl Leaf { pub fn id(&self) -> TokenId { match self { @@ -105,15 +115,15 @@ fn print_debug_subtree(f: &mut fmt::Formatter<'_>, subtree: &Subtree, level: usi let aux = match subtree.delimiter.map(|it| (it.kind, it.id.0)) { None => "$".to_string(), - Some((DelimiterKind::Parenthesis, id)) => format!("() {}", id), - Some((DelimiterKind::Brace, id)) => format!("{{}} {}", id), - Some((DelimiterKind::Bracket, id)) => format!("[] {}", id), + Some((DelimiterKind::Parenthesis, id)) => format!("() {id}"), + Some((DelimiterKind::Brace, id)) => format!("{{}} {id}"), + Some((DelimiterKind::Bracket, id)) => format!("[] {id}"), }; if subtree.token_trees.is_empty() { - write!(f, "{}SUBTREE {}", align, aux)?; + write!(f, "{align}SUBTREE {aux}")?; } else { - writeln!(f, "{}SUBTREE {}", align, aux)?; + writeln!(f, "{align}SUBTREE {aux}")?; for (idx, child) in subtree.token_trees.iter().enumerate() { print_debug_token(f, child, level + 1)?; if idx != subtree.token_trees.len() - 1 { @@ -130,7 +140,7 @@ fn print_debug_token(f: &mut fmt::Formatter<'_>, tkn: &TokenTree, level: usize) match tkn { TokenTree::Leaf(leaf) => match leaf { - Leaf::Literal(lit) => write!(f, "{}LITERAL {} {}", align, lit.text, lit.id.0)?, + Leaf::Literal(lit) => write!(f, "{align}LITERAL {} {}", lit.text, lit.id.0)?, Leaf::Punct(punct) => write!( f, "{}PUNCH {} [{}] {}", @@ -139,7 +149,7 @@ fn print_debug_token(f: &mut fmt::Formatter<'_>, tkn: &TokenTree, level: usize) if punct.spacing == Spacing::Alone { "alone" } else { "joint" }, punct.id.0 )?, - Leaf::Ident(ident) => write!(f, "{}IDENT {} {}", align, ident.text, ident.id.0)?, + Leaf::Ident(ident) => write!(f, "{align}IDENT {} {}", ident.text, ident.id.0)?, }, TokenTree::Subtree(subtree) => { print_debug_subtree(f, subtree, level)?; @@ -302,7 +312,7 @@ pub fn pretty(tkns: &[TokenTree]) -> String { Some(DelimiterKind::Parenthesis) => ("(", ")"), Some(DelimiterKind::Bracket) => ("[", "]"), }; - format!("{}{}{}", open, content, close) + format!("{open}{content}{close}") } } } diff --git a/crates/vfs/src/file_set.rs b/crates/vfs/src/file_set.rs index e0ef737b3f..700aebe0b3 100644 --- a/crates/vfs/src/file_set.rs +++ b/crates/vfs/src/file_set.rs @@ -140,16 +140,11 @@ impl FileSetConfig { } /// Builder for [`FileSetConfig`]. +#[derive(Default)] pub struct FileSetConfigBuilder { roots: Vec>, } -impl Default for FileSetConfigBuilder { - fn default() -> Self { - FileSetConfigBuilder { roots: Vec::new() } - } -} - impl FileSetConfigBuilder { /// Returns the number of sets currently held. pub fn len(&self) -> usize { diff --git a/crates/vfs/src/lib.rs b/crates/vfs/src/lib.rs index afc9a0fa6f..c61f30387b 100644 --- a/crates/vfs/src/lib.rs +++ b/crates/vfs/src/lib.rs @@ -59,15 +59,10 @@ pub use paths::{AbsPath, AbsPathBuf}; /// Handle to a file in [`Vfs`] /// /// Most functions in rust-analyzer use this when they need to refer to a file. -#[derive(Copy, Clone, Debug, Ord, PartialOrd, Eq, PartialEq)] +#[derive(Copy, Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Hash)] pub struct FileId(pub u32); impl stdx::hash::NoHashHashable for FileId {} -impl std::hash::Hash for FileId { - fn hash(&self, state: &mut H) { - self.0.hash(state); - } -} /// Storage for all files read by rust-analyzer. /// diff --git a/crates/vfs/src/path_interner.rs b/crates/vfs/src/path_interner.rs index 6e049f0d40..64f5197605 100644 --- a/crates/vfs/src/path_interner.rs +++ b/crates/vfs/src/path_interner.rs @@ -9,16 +9,11 @@ use rustc_hash::FxHasher; use crate::{FileId, VfsPath}; /// Structure to map between [`VfsPath`] and [`FileId`]. +#[derive(Default)] pub(crate) struct PathInterner { map: IndexSet>, } -impl Default for PathInterner { - fn default() -> Self { - Self { map: IndexSet::default() } - } -} - impl PathInterner { /// Get the id corresponding to `path`. /// diff --git a/crates/vfs/src/vfs_path.rs b/crates/vfs/src/vfs_path.rs index 668c7320d4..b23c9f1966 100644 --- a/crates/vfs/src/vfs_path.rs +++ b/crates/vfs/src/vfs_path.rs @@ -364,7 +364,7 @@ impl VirtualPath { path = &path["../".len()..]; } path = path.trim_start_matches("./"); - res.0 = format!("{}/{}", res.0, path); + res.0 = format!("{}/{path}", res.0); Some(res) } diff --git a/docs/dev/README.md b/docs/dev/README.md index 4ac75b4bbf..cdab6b0992 100644 --- a/docs/dev/README.md +++ b/docs/dev/README.md @@ -200,7 +200,7 @@ Look for `fn benchmark_xxx` tests for a quick way to reproduce performance probl ## Release Process -Release process is handled by `release`, `dist` and `promote` xtasks, `release` being the main one. +Release process is handled by `release`, `dist`, `publish-release-notes` and `promote` xtasks, `release` being the main one. `release` assumes that you have checkouts of `rust-analyzer`, `rust-analyzer.github.io`, and `rust-lang/rust` in the same directory: @@ -231,8 +231,9 @@ Release steps: * create a new changelog in `rust-analyzer.github.io` 3. While the release is in progress, fill in the changelog 4. Commit & push the changelog -5. Tweet -6. Inside `rust-analyzer`, run `cargo xtask promote` -- this will create a PR to rust-lang/rust updating rust-analyzer's subtree. +5. Run `cargo xtask publish-release-notes ` -- this will convert the changelog entry in AsciiDoc to Markdown and update the body of GitHub Releases entry. +6. Tweet +7. Inside `rust-analyzer`, run `cargo xtask promote` -- this will create a PR to rust-lang/rust updating rust-analyzer's subtree. Self-approve the PR. If the GitHub Actions release fails because of a transient problem like a timeout, you can re-run the job from the Actions console. diff --git a/docs/dev/lsp-extensions.md b/docs/dev/lsp-extensions.md index fe316fcae9..a4780af1a2 100644 --- a/docs/dev/lsp-extensions.md +++ b/docs/dev/lsp-extensions.md @@ -1,5 +1,5 @@ [^\n]+\n/m, ""); } + let value; + if (errorCode) { + if (typeof diag.code === "string" || typeof diag.code === "number") { + value = diag.code; + } else { + value = diag.code?.value; + } + } diag.code = { target: vscode.Uri.from({ - scheme: "rust-analyzer-diagnostics-view", - path: "/diagnostic message", + scheme: diagnostics.URI_SCHEME, + path: `/diagnostic message [${idx.toString()}]`, fragment: uri.toString(), query: idx.toString(), }), - value: "Click for full compiler diagnostic", + value: value ?? "Click for full compiler diagnostic", }; } }); - return next(uri, diagnostics); + return next(uri, diagnosticList); }, async provideHover( document: vscode.TextDocument, @@ -302,6 +333,7 @@ class ExperimentalFeatures implements lc.StaticFeature { caps.codeActionGroup = true; caps.hoverActions = true; caps.serverStatusNotification = true; + caps.colorDiagnosticOutput = true; caps.commands = { commands: [ "rust-analyzer.runSingle", diff --git a/editors/code/src/commands.ts b/editors/code/src/commands.ts index 312087e4cf..cb4e13e2c6 100644 --- a/editors/code/src/commands.ts +++ b/editors/code/src/commands.ts @@ -788,8 +788,23 @@ export function openDocs(ctx: CtxInit): Cmd { export function cancelFlycheck(ctx: CtxInit): Cmd { return async () => { + await ctx.client.sendNotification(ra.cancelFlycheck); + }; +} + +export function clearFlycheck(ctx: CtxInit): Cmd { + return async () => { + await ctx.client.sendNotification(ra.clearFlycheck); + }; +} + +export function runFlycheck(ctx: CtxInit): Cmd { + return async () => { + const editor = ctx.activeRustEditor; const client = ctx.client; - await client.sendRequest(ra.cancelFlycheck); + const params = editor ? { uri: editor.document.uri.toString() } : null; + + await client.sendNotification(ra.runFlycheck, { textDocument: params }); }; } @@ -797,12 +812,12 @@ export function resolveCodeAction(ctx: CtxInit): Cmd { return async (params: lc.CodeAction) => { const client = ctx.client; params.command = undefined; - const item = await client?.sendRequest(lc.CodeActionResolveRequest.type, params); + const item = await client.sendRequest(lc.CodeActionResolveRequest.type, params); if (!item?.edit) { return; } const itemEdit = item.edit; - const edit = await client?.protocol2CodeConverter.asWorkspaceEdit(itemEdit); + const edit = await client.protocol2CodeConverter.asWorkspaceEdit(itemEdit); // filter out all text edits and recreate the WorkspaceEdit without them so we can apply // snippet edits on our own const lcFileSystemEdit = { diff --git a/editors/code/src/config.ts b/editors/code/src/config.ts index d8dbd1df16..eb4f965291 100644 --- a/editors/code/src/config.ts +++ b/editors/code/src/config.ts @@ -241,6 +241,10 @@ export class Config { get previewRustcOutput() { return this.get("diagnostics.previewRustcOutput"); } + + get useRustcErrorCode() { + return this.get("diagnostics.useRustcErrorCode"); + } } const VarRegex = new RegExp(/\$\{(.+?)\}/g); diff --git a/editors/code/src/diagnostics.ts b/editors/code/src/diagnostics.ts new file mode 100644 index 0000000000..9695d8bf26 --- /dev/null +++ b/editors/code/src/diagnostics.ts @@ -0,0 +1,212 @@ +import * as anser from "anser"; +import * as vscode from "vscode"; +import { ProviderResult, Range, TextEditorDecorationType, ThemeColor, window } from "vscode"; +import { Ctx } from "./ctx"; + +export const URI_SCHEME = "rust-analyzer-diagnostics-view"; + +export class TextDocumentProvider implements vscode.TextDocumentContentProvider { + private _onDidChange = new vscode.EventEmitter(); + + public constructor(private readonly ctx: Ctx) {} + + get onDidChange(): vscode.Event { + return this._onDidChange.event; + } + + triggerUpdate(uri: vscode.Uri) { + if (uri.scheme === URI_SCHEME) { + this._onDidChange.fire(uri); + } + } + + dispose() { + this._onDidChange.dispose(); + } + + async provideTextDocumentContent(uri: vscode.Uri): Promise { + const contents = getRenderedDiagnostic(this.ctx, uri); + return anser.ansiToText(contents); + } +} + +function getRenderedDiagnostic(ctx: Ctx, uri: vscode.Uri): string { + const diags = ctx.client?.diagnostics?.get(vscode.Uri.parse(uri.fragment, true)); + if (!diags) { + return "Unable to find original rustc diagnostic"; + } + + const diag = diags[parseInt(uri.query)]; + if (!diag) { + return "Unable to find original rustc diagnostic"; + } + const rendered = (diag as unknown as { data?: { rendered?: string } }).data?.rendered; + + if (!rendered) { + return "Unable to find original rustc diagnostic"; + } + + return rendered; +} + +interface AnserStyle { + fg: string; + bg: string; + fg_truecolor: string; + bg_truecolor: string; + decorations: Array; +} + +export class AnsiDecorationProvider implements vscode.Disposable { + private _decorationTypes = new Map(); + + public constructor(private readonly ctx: Ctx) {} + + dispose(): void { + for (const decorationType of this._decorationTypes.values()) { + decorationType.dispose(); + } + + this._decorationTypes.clear(); + } + + async provideDecorations(editor: vscode.TextEditor) { + if (editor.document.uri.scheme !== URI_SCHEME) { + return; + } + + const decorations = (await this._getDecorations(editor.document.uri)) || []; + for (const [decorationType, ranges] of decorations) { + editor.setDecorations(decorationType, ranges); + } + } + + private _getDecorations( + uri: vscode.Uri + ): ProviderResult<[TextEditorDecorationType, Range[]][]> { + const stringContents = getRenderedDiagnostic(this.ctx, uri); + const lines = stringContents.split("\n"); + + const result = new Map(); + // Populate all known decoration types in the result. This forces any + // lingering decorations to be cleared if the text content changes to + // something without ANSI codes for a given decoration type. + for (const decorationType of this._decorationTypes.values()) { + result.set(decorationType, []); + } + + for (const [lineNumber, line] of lines.entries()) { + const totalEscapeLength = 0; + + // eslint-disable-next-line camelcase + const parsed = anser.ansiToJson(line, { use_classes: true }); + + let offset = 0; + + for (const span of parsed) { + const { content, ...style } = span; + + const range = new Range( + lineNumber, + offset - totalEscapeLength, + lineNumber, + offset + content.length - totalEscapeLength + ); + + offset += content.length; + + const decorationType = this._getDecorationType(style); + + if (!result.has(decorationType)) { + result.set(decorationType, []); + } + + result.get(decorationType)!.push(range); + } + } + + return [...result]; + } + + private _getDecorationType(style: AnserStyle): TextEditorDecorationType { + let decorationType = this._decorationTypes.get(style); + + if (decorationType) { + return decorationType; + } + + const fontWeight = style.decorations.find((s) => s === "bold"); + const fontStyle = style.decorations.find((s) => s === "italic"); + const textDecoration = style.decorations.find((s) => s === "underline"); + + decorationType = window.createTextEditorDecorationType({ + backgroundColor: AnsiDecorationProvider._convertColor(style.bg, style.bg_truecolor), + color: AnsiDecorationProvider._convertColor(style.fg, style.fg_truecolor), + fontWeight, + fontStyle, + textDecoration, + }); + + this._decorationTypes.set(style, decorationType); + + return decorationType; + } + + // NOTE: This could just be a kebab-case to camelCase conversion, but I think it's + // a short enough list to just write these by hand + static readonly _anserToThemeColor: Record = { + "ansi-black": "ansiBlack", + "ansi-white": "ansiWhite", + "ansi-red": "ansiRed", + "ansi-green": "ansiGreen", + "ansi-yellow": "ansiYellow", + "ansi-blue": "ansiBlue", + "ansi-magenta": "ansiMagenta", + "ansi-cyan": "ansiCyan", + + "ansi-bright-black": "ansiBrightBlack", + "ansi-bright-white": "ansiBrightWhite", + "ansi-bright-red": "ansiBrightRed", + "ansi-bright-green": "ansiBrightGreen", + "ansi-bright-yellow": "ansiBrightYellow", + "ansi-bright-blue": "ansiBrightBlue", + "ansi-bright-magenta": "ansiBrightMagenta", + "ansi-bright-cyan": "ansiBrightCyan", + }; + + private static _convertColor( + color?: string, + truecolor?: string + ): ThemeColor | string | undefined { + if (!color) { + return undefined; + } + + if (color === "ansi-truecolor") { + if (!truecolor) { + return undefined; + } + return `rgb(${truecolor})`; + } + + const paletteMatch = color.match(/ansi-palette-(.+)/); + if (paletteMatch) { + const paletteColor = paletteMatch[1]; + // anser won't return both the RGB and the color name at the same time, + // so just fake a single foreground control char with the palette number: + const spans = anser.ansiToJson(`\x1b[38;5;${paletteColor}m`); + const rgb = spans[1].fg; + + if (rgb) { + return `rgb(${rgb})`; + } + } + + const themeColor = AnsiDecorationProvider._anserToThemeColor[color]; + if (themeColor) { + return new ThemeColor("terminal." + themeColor); + } + + return undefined; + } +} diff --git a/editors/code/src/lsp_ext.ts b/editors/code/src/lsp_ext.ts index 875261c48a..29349cc20f 100644 --- a/editors/code/src/lsp_ext.ts +++ b/editors/code/src/lsp_ext.ts @@ -79,7 +79,11 @@ export const relatedTests = new lc.RequestType("rust-analyzer/cancelFlycheck"); +export const cancelFlycheck = new lc.NotificationType0("rust-analyzer/cancelFlycheck"); +export const clearFlycheck = new lc.NotificationType0("rust-analyzer/clearFlycheck"); +export const runFlycheck = new lc.NotificationType<{ + textDocument: lc.TextDocumentIdentifier | null; +}>("rust-analyzer/runFlycheck"); // Experimental extensions diff --git a/editors/code/src/main.ts b/editors/code/src/main.ts index 25f1e83d10..dd439317c7 100644 --- a/editors/code/src/main.ts +++ b/editors/code/src/main.ts @@ -3,6 +3,7 @@ import * as lc from "vscode-languageclient/node"; import * as commands from "./commands"; import { CommandFactory, Ctx, fetchWorkspace } from "./ctx"; +import * as diagnostics from "./diagnostics"; import { activateTaskProvider } from "./tasks"; import { setContextValue } from "./util"; @@ -48,30 +49,52 @@ async function activateServer(ctx: Ctx): Promise { ctx.pushExtCleanup(activateTaskProvider(ctx.config)); } + const diagnosticProvider = new diagnostics.TextDocumentProvider(ctx); ctx.pushExtCleanup( vscode.workspace.registerTextDocumentContentProvider( - "rust-analyzer-diagnostics-view", - new (class implements vscode.TextDocumentContentProvider { - async provideTextDocumentContent(uri: vscode.Uri): Promise { - const diags = ctx.client?.diagnostics?.get( - vscode.Uri.parse(uri.fragment, true) - ); - if (!diags) { - return "Unable to find original rustc diagnostic"; - } - - const diag = diags[parseInt(uri.query)]; - if (!diag) { - return "Unable to find original rustc diagnostic"; - } - const rendered = (diag as unknown as { data?: { rendered?: string } }).data - ?.rendered; - return rendered ?? "Unable to find original rustc diagnostic"; - } - })() + diagnostics.URI_SCHEME, + diagnosticProvider ) ); + const decorationProvider = new diagnostics.AnsiDecorationProvider(ctx); + ctx.pushExtCleanup(decorationProvider); + + async function decorateVisibleEditors(document: vscode.TextDocument) { + for (const editor of vscode.window.visibleTextEditors) { + if (document === editor.document) { + await decorationProvider.provideDecorations(editor); + } + } + } + + vscode.workspace.onDidChangeTextDocument( + async (event) => await decorateVisibleEditors(event.document), + null, + ctx.subscriptions + ); + vscode.workspace.onDidOpenTextDocument(decorateVisibleEditors, null, ctx.subscriptions); + vscode.window.onDidChangeActiveTextEditor( + async (editor) => { + if (editor) { + diagnosticProvider.triggerUpdate(editor.document.uri); + await decorateVisibleEditors(editor.document); + } + }, + null, + ctx.subscriptions + ); + vscode.window.onDidChangeVisibleTextEditors( + async (visibleEditors) => { + for (const editor of visibleEditors) { + diagnosticProvider.triggerUpdate(editor.document.uri); + await decorationProvider.provideDecorations(editor); + } + }, + null, + ctx.subscriptions + ); + vscode.workspace.onDidChangeWorkspaceFolders( async (_) => ctx.onWorkspaceFolderChanges(), null, @@ -79,7 +102,7 @@ async function activateServer(ctx: Ctx): Promise { ); vscode.workspace.onDidChangeConfiguration( async (_) => { - await ctx.client?.sendNotification("workspace/didChangeConfiguration", { + await ctx.client?.sendNotification(lc.DidChangeConfigurationNotification.type, { settings: "", }); }, @@ -150,6 +173,8 @@ function createCommands(): Record { moveItemUp: { enabled: commands.moveItemUp }, moveItemDown: { enabled: commands.moveItemDown }, cancelFlycheck: { enabled: commands.cancelFlycheck }, + clearFlycheck: { enabled: commands.clearFlycheck }, + runFlycheck: { enabled: commands.runFlycheck }, ssr: { enabled: commands.ssr }, serverVersion: { enabled: commands.serverVersion }, // Internal commands which are invoked by the server. diff --git a/lib/la-arena/src/map.rs b/lib/la-arena/src/map.rs index 5f347e2745..b9d491da3c 100644 --- a/lib/la-arena/src/map.rs +++ b/lib/la-arena/src/map.rs @@ -86,6 +86,14 @@ impl ArenaMap, V> { self.v.iter().enumerate().filter_map(|(idx, o)| Some((Self::from_idx(idx), o.as_ref()?))) } + /// Returns an iterator over the arena indexes and values in the map. + pub fn iter_mut(&mut self) -> impl Iterator, &mut V)> { + self.v + .iter_mut() + .enumerate() + .filter_map(|(idx, o)| Some((Self::from_idx(idx), o.as_mut()?))) + } + /// Gets the given key's corresponding entry in the map for in-place manipulation. pub fn entry(&mut self, idx: Idx) -> Entry<'_, Idx, V> { let idx = Self::to_idx(idx); diff --git a/lib/lsp-server/examples/goto_def.rs b/lib/lsp-server/examples/goto_def.rs index ca7ad0b536..2f270afbbf 100644 --- a/lib/lsp-server/examples/goto_def.rs +++ b/lib/lsp-server/examples/goto_def.rs @@ -80,32 +80,32 @@ fn main_loop( let _params: InitializeParams = serde_json::from_value(params).unwrap(); eprintln!("starting example main loop"); for msg in &connection.receiver { - eprintln!("got msg: {:?}", msg); + eprintln!("got msg: {msg:?}"); match msg { Message::Request(req) => { if connection.handle_shutdown(&req)? { return Ok(()); } - eprintln!("got request: {:?}", req); + eprintln!("got request: {req:?}"); match cast::(req) { Ok((id, params)) => { - eprintln!("got gotoDefinition request #{}: {:?}", id, params); + eprintln!("got gotoDefinition request #{id}: {params:?}"); let result = Some(GotoDefinitionResponse::Array(Vec::new())); let result = serde_json::to_value(&result).unwrap(); let resp = Response { id, result: Some(result), error: None }; connection.sender.send(Message::Response(resp))?; continue; } - Err(err @ ExtractError::JsonError { .. }) => panic!("{:?}", err), + Err(err @ ExtractError::JsonError { .. }) => panic!("{err:?}"), Err(ExtractError::MethodMismatch(req)) => req, }; // ... } Message::Response(resp) => { - eprintln!("got response: {:?}", resp); + eprintln!("got response: {resp:?}"); } Message::Notification(not) => { - eprintln!("got notification: {:?}", not); + eprintln!("got notification: {not:?}"); } } } diff --git a/lib/lsp-server/src/lib.rs b/lib/lsp-server/src/lib.rs index d567077d4a..beccde40a8 100644 --- a/lib/lsp-server/src/lib.rs +++ b/lib/lsp-server/src/lib.rs @@ -114,30 +114,21 @@ impl Connection { /// ``` pub fn initialize_start(&self) -> Result<(RequestId, serde_json::Value), ProtocolError> { loop { - match self.receiver.recv() { - Ok(Message::Request(req)) if req.is_initialize() => { - return Ok((req.id, req.params)) - } + break match self.receiver.recv() { + Ok(Message::Request(req)) if req.is_initialize() => Ok((req.id, req.params)), // Respond to non-initialize requests with ServerNotInitialized Ok(Message::Request(req)) => { let resp = Response::new_err( req.id.clone(), ErrorCode::ServerNotInitialized as i32, - format!("expected initialize request, got {:?}", req), + format!("expected initialize request, got {req:?}"), ); self.sender.send(resp.into()).unwrap(); + continue; } - Ok(msg) => { - return Err(ProtocolError(format!( - "expected initialize request, got {:?}", - msg - ))) - } + Ok(msg) => Err(ProtocolError(format!("expected initialize request, got {msg:?}"))), Err(e) => { - return Err(ProtocolError(format!( - "expected initialize request, got error: {}", - e - ))) + Err(ProtocolError(format!("expected initialize request, got error: {e}"))) } }; } @@ -152,21 +143,14 @@ impl Connection { let resp = Response::new_ok(initialize_id, initialize_result); self.sender.send(resp.into()).unwrap(); match &self.receiver.recv() { - Ok(Message::Notification(n)) if n.is_initialized() => (), + Ok(Message::Notification(n)) if n.is_initialized() => Ok(()), Ok(msg) => { - return Err(ProtocolError(format!( - "expected Message::Notification, got: {:?}", - msg, - ))) + Err(ProtocolError(format!(r#"expected initialized notification, got: {msg:?}"#))) } Err(e) => { - return Err(ProtocolError(format!( - "expected initialized notification, got error: {}", - e, - ))) + Err(ProtocolError(format!("expected initialized notification, got error: {e}",))) } } - Ok(()) } /// Initialize the connection. Sends the server capabilities @@ -221,11 +205,9 @@ impl Connection { match &self.receiver.recv_timeout(std::time::Duration::from_secs(30)) { Ok(Message::Notification(n)) if n.is_exit() => (), Ok(msg) => { - return Err(ProtocolError(format!("unexpected message during shutdown: {:?}", msg))) - } - Err(e) => { - return Err(ProtocolError(format!("unexpected error during shutdown: {}", e))) + return Err(ProtocolError(format!("unexpected message during shutdown: {msg:?}"))) } + Err(e) => return Err(ProtocolError(format!("unexpected error during shutdown: {e}"))), } Ok(true) } diff --git a/xtask/Cargo.toml b/xtask/Cargo.toml index 95e27beab5..2dd01796c6 100644 --- a/xtask/Cargo.toml +++ b/xtask/Cargo.toml @@ -12,4 +12,5 @@ flate2 = "1.0.24" write-json = "0.1.2" xshell = "0.2.2" xflags = "0.3.0" +zip = { version = "0.6", default-features = false, features = ["deflate", "time"] } # Avoid adding more dependencies to this crate diff --git a/xtask/src/dist.rs b/xtask/src/dist.rs index 686aec4ae5..74715c53ea 100644 --- a/xtask/src/dist.rs +++ b/xtask/src/dist.rs @@ -1,12 +1,13 @@ use std::{ env, fs::File, - io, + io::{self, BufWriter}, path::{Path, PathBuf}, }; use flate2::{write::GzEncoder, Compression}; use xshell::{cmd, Shell}; +use zip::{write::FileOptions, DateTime, ZipWriter}; use crate::{date_iso, flags, project_root}; @@ -26,10 +27,10 @@ impl flags::Dist { if let Some(patch_version) = self.client_patch_version { let version = if stable { - format!("{}.{}", VERSION_STABLE, patch_version) + format!("{VERSION_STABLE}.{patch_version}") } else { // A hack to make VS Code prefer nightly over stable. - format!("{}.{}", VERSION_NIGHTLY, patch_version) + format!("{VERSION_NIGHTLY}.{patch_version}") }; dist_server(sh, &format!("{version}-standalone"), &target)?; let release_tag = if stable { date_iso(sh)? } else { "nightly".to_string() }; @@ -59,10 +60,10 @@ fn dist_client( let mut patch = Patch::new(sh, "./package.json")?; patch .replace( - &format!(r#""version": "{}.0-dev""#, VERSION_DEV), - &format!(r#""version": "{}""#, version), + &format!(r#""version": "{VERSION_DEV}.0-dev""#), + &format!(r#""version": "{version}""#), ) - .replace(r#""releaseTag": null"#, &format!(r#""releaseTag": "{}""#, release_tag)) + .replace(r#""releaseTag": null"#, &format!(r#""releaseTag": "{release_tag}""#)) .replace(r#""$generated-start": {},"#, "") .replace(",\n \"$generated-end\": {}", "") .replace(r#""enabledApiProposals": [],"#, r#""#); @@ -89,6 +90,9 @@ fn dist_server(sh: &Shell, release: &str, target: &Target) -> anyhow::Result<()> let dst = Path::new("dist").join(&target.artifact_name); gzip(&target.server_path, &dst.with_extension("gz"))?; + if target_name.contains("-windows-") { + zip(&target.server_path, target.symbols_path.as_ref(), &dst.with_extension("zip"))?; + } Ok(()) } @@ -101,6 +105,38 @@ fn gzip(src_path: &Path, dest_path: &Path) -> anyhow::Result<()> { Ok(()) } +fn zip(src_path: &Path, symbols_path: Option<&PathBuf>, dest_path: &Path) -> anyhow::Result<()> { + let file = File::create(dest_path)?; + let mut writer = ZipWriter::new(BufWriter::new(file)); + writer.start_file( + src_path.file_name().unwrap().to_str().unwrap(), + FileOptions::default() + .last_modified_time( + DateTime::from_time(std::fs::metadata(src_path)?.modified()?.into()).unwrap(), + ) + .unix_permissions(0o755) + .compression_method(zip::CompressionMethod::Deflated) + .compression_level(Some(9)), + )?; + let mut input = io::BufReader::new(File::open(src_path)?); + io::copy(&mut input, &mut writer)?; + if let Some(symbols_path) = symbols_path { + writer.start_file( + symbols_path.file_name().unwrap().to_str().unwrap(), + FileOptions::default() + .last_modified_time( + DateTime::from_time(std::fs::metadata(src_path)?.modified()?.into()).unwrap(), + ) + .compression_method(zip::CompressionMethod::Deflated) + .compression_level(Some(9)), + )?; + let mut input = io::BufReader::new(File::open(symbols_path)?); + io::copy(&mut input, &mut writer)?; + } + writer.finish()?; + Ok(()) +} + struct Target { name: String, server_path: PathBuf, @@ -130,8 +166,8 @@ impl Target { } else { (String::new(), None) }; - let server_path = out_path.join(format!("rust-analyzer{}", exe_suffix)); - let artifact_name = format!("rust-analyzer-{}{}", name, exe_suffix); + let server_path = out_path.join(format!("rust-analyzer{exe_suffix}")); + let artifact_name = format!("rust-analyzer-{name}{exe_suffix}"); Self { name, server_path, symbols_path, artifact_name } } } diff --git a/xtask/src/flags.rs b/xtask/src/flags.rs index 0fce488983..2100479701 100644 --- a/xtask/src/flags.rs +++ b/xtask/src/flags.rs @@ -34,6 +34,13 @@ xflags::xflags! { cmd dist { optional --client-patch-version version: String } + /// Read a changelog AsciiDoc file and update the GitHub Releases entry in Markdown. + cmd publish-release-notes { + /// Only run conversion and show the result. + optional --dry-run + /// Target changelog file. + required changelog: String + } cmd metrics { optional --dry-run } @@ -59,6 +66,7 @@ pub enum XtaskCmd { Release(Release), Promote(Promote), Dist(Dist), + PublishReleaseNotes(PublishReleaseNotes), Metrics(Metrics), Bb(Bb), } @@ -90,6 +98,13 @@ pub struct Dist { pub client_patch_version: Option, } +#[derive(Debug)] +pub struct PublishReleaseNotes { + pub changelog: String, + + pub dry_run: bool, +} + #[derive(Debug)] pub struct Metrics { pub dry_run: bool, diff --git a/xtask/src/install.rs b/xtask/src/install.rs index ae978d5512..83223a551d 100644 --- a/xtask/src/install.rs +++ b/xtask/src/install.rs @@ -62,7 +62,7 @@ fn fix_path_for_mac(sh: &Shell) -> Result<()> { let mut paths = env::split_paths(&vars).collect::>(); paths.append(&mut vscode_path); let new_paths = env::join_paths(paths).context("build env PATH")?; - sh.set_var("PATH", &new_paths); + sh.set_var("PATH", new_paths); } Ok(()) diff --git a/xtask/src/main.rs b/xtask/src/main.rs index a37f469adc..6a45033ada 100644 --- a/xtask/src/main.rs +++ b/xtask/src/main.rs @@ -15,6 +15,7 @@ mod flags; mod install; mod release; mod dist; +mod publish; mod metrics; use anyhow::bail; @@ -36,6 +37,7 @@ fn main() -> anyhow::Result<()> { flags::XtaskCmd::Release(cmd) => cmd.run(sh), flags::XtaskCmd::Promote(cmd) => cmd.run(sh), flags::XtaskCmd::Dist(cmd) => cmd.run(sh), + flags::XtaskCmd::PublishReleaseNotes(cmd) => cmd.run(sh), flags::XtaskCmd::Metrics(cmd) => cmd.run(sh), flags::XtaskCmd::Bb(cmd) => { { diff --git a/xtask/src/metrics.rs b/xtask/src/metrics.rs index ebeb873463..b6f730dbf1 100644 --- a/xtask/src/metrics.rs +++ b/xtask/src/metrics.rs @@ -87,7 +87,7 @@ impl Metrics { self.measure_analysis_stats_path( sh, bench, - &format!("./target/rustc-perf/collector/benchmarks/{}", bench), + &format!("./target/rustc-perf/collector/benchmarks/{bench}"), ) } fn measure_analysis_stats_path( diff --git a/xtask/src/publish.rs b/xtask/src/publish.rs new file mode 100644 index 0000000000..79b5f3d2f6 --- /dev/null +++ b/xtask/src/publish.rs @@ -0,0 +1,109 @@ +mod notes; + +use crate::flags; +use anyhow::{anyhow, bail, Result}; +use std::env; +use xshell::{cmd, Shell}; + +impl flags::PublishReleaseNotes { + pub(crate) fn run(self, sh: &Shell) -> Result<()> { + let asciidoc = sh.read_file(&self.changelog)?; + let mut markdown = notes::convert_asciidoc_to_markdown(std::io::Cursor::new(&asciidoc))?; + let file_name = check_file_name(self.changelog)?; + let tag_name = &file_name[0..10]; + let original_changelog_url = create_original_changelog_url(&file_name); + let additional_paragraph = + format!("\nSee also [original changelog]({original_changelog_url})."); + markdown.push_str(&additional_paragraph); + if self.dry_run { + println!("{markdown}"); + } else { + update_release(sh, tag_name, &markdown)?; + } + Ok(()) + } +} + +fn check_file_name>(path: P) -> Result { + let file_name = path + .as_ref() + .file_name() + .ok_or_else(|| anyhow!("file name is not specified as `changelog`"))? + .to_string_lossy(); + + let mut chars = file_name.chars(); + if file_name.len() >= 10 + && chars.next().unwrap().is_ascii_digit() + && chars.next().unwrap().is_ascii_digit() + && chars.next().unwrap().is_ascii_digit() + && chars.next().unwrap().is_ascii_digit() + && chars.next().unwrap() == '-' + && chars.next().unwrap().is_ascii_digit() + && chars.next().unwrap().is_ascii_digit() + && chars.next().unwrap() == '-' + && chars.next().unwrap().is_ascii_digit() + && chars.next().unwrap().is_ascii_digit() + { + Ok(file_name.to_string()) + } else { + bail!("unexpected file name format; no date information prefixed") + } +} + +fn create_original_changelog_url(file_name: &str) -> String { + let year = &file_name[0..4]; + let month = &file_name[5..7]; + let day = &file_name[8..10]; + let mut stem = &file_name[11..]; + if let Some(stripped) = stem.strip_suffix(".adoc") { + stem = stripped; + } + format!("https://rust-analyzer.github.io/thisweek/{year}/{month}/{day}/{stem}.html") +} + +fn update_release(sh: &Shell, tag_name: &str, release_notes: &str) -> Result<()> { + let token = match env::var("GITHUB_TOKEN") { + Ok(token) => token, + Err(_) => bail!("Please obtain a personal access token from https://github.com/settings/tokens and set the `GITHUB_TOKEN` environment variable."), + }; + let accept = "Accept: application/vnd.github+json"; + let authorization = format!("Authorization: Bearer {token}"); + let api_version = "X-GitHub-Api-Version: 2022-11-28"; + let release_url = "https://api.github.com/repos/rust-lang/rust-analyzer/releases"; + + let release_json = cmd!( + sh, + "curl -sf -H {accept} -H {authorization} -H {api_version} {release_url}/tags/{tag_name}" + ) + .read()?; + let release_id = cmd!(sh, "jq .id").stdin(release_json).read()?; + + let mut patch = String::new(); + write_json::object(&mut patch) + .string("tag_name", tag_name) + .string("target_commitish", "master") + .string("name", tag_name) + .string("body", release_notes) + .bool("draft", false) + .bool("prerelease", false); + let _ = cmd!( + sh, + "curl -sf -X PATCH -H {accept} -H {authorization} -H {api_version} {release_url}/{release_id} -d {patch}" + ) + .read()?; + + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn original_changelog_url_creation() { + let input = "2019-07-24-changelog-0.adoc"; + let actual = create_original_changelog_url(input); + let expected = "https://rust-analyzer.github.io/thisweek/2019/07/24/changelog-0.html"; + assert_eq!(actual, expected); + } +} diff --git a/xtask/src/publish/notes.rs b/xtask/src/publish/notes.rs new file mode 100644 index 0000000000..c30267295b --- /dev/null +++ b/xtask/src/publish/notes.rs @@ -0,0 +1,631 @@ +use anyhow::{anyhow, bail}; +use std::{ + borrow::Cow, + io::{BufRead, Lines}, + iter::Peekable, +}; + +const LISTING_DELIMITER: &str = "----"; +const IMAGE_BLOCK_PREFIX: &str = "image::"; +const VIDEO_BLOCK_PREFIX: &str = "video::"; + +struct Converter<'a, 'b, R: BufRead> { + iter: &'a mut Peekable>, + output: &'b mut String, +} + +impl<'a, 'b, R: BufRead> Converter<'a, 'b, R> { + fn new(iter: &'a mut Peekable>, output: &'b mut String) -> Self { + Self { iter, output } + } + + fn process(&mut self) -> anyhow::Result<()> { + self.process_document_header()?; + self.skip_blank_lines()?; + self.output.push('\n'); + + loop { + let line = self.iter.peek().unwrap().as_deref().map_err(|e| anyhow!("{e}"))?; + if get_title(line).is_some() { + let line = self.iter.next().unwrap().unwrap(); + let (level, title) = get_title(&line).unwrap(); + self.write_title(level, title); + } else if get_list_item(line).is_some() { + self.process_list()?; + } else if line.starts_with('[') { + self.process_source_code_block(0)?; + } else if line.starts_with(LISTING_DELIMITER) { + self.process_listing_block(None, 0)?; + } else if line.starts_with('.') { + self.process_block_with_title(0)?; + } else if line.starts_with(IMAGE_BLOCK_PREFIX) { + self.process_image_block(None, 0)?; + } else if line.starts_with(VIDEO_BLOCK_PREFIX) { + self.process_video_block(None, 0)?; + } else { + self.process_paragraph(0, |line| line.is_empty())?; + } + + self.skip_blank_lines()?; + if self.iter.peek().is_none() { + break; + } + self.output.push('\n'); + } + Ok(()) + } + + fn process_document_header(&mut self) -> anyhow::Result<()> { + self.process_document_title()?; + + while let Some(line) = self.iter.next() { + let line = line?; + if line.is_empty() { + break; + } + if !line.starts_with(':') { + self.write_line(&line, 0) + } + } + + Ok(()) + } + + fn process_document_title(&mut self) -> anyhow::Result<()> { + if let Some(Ok(line)) = self.iter.next() { + if let Some((level, title)) = get_title(&line) { + let title = process_inline_macros(title)?; + if level == 1 { + self.write_title(level, &title); + return Ok(()); + } + } + } + bail!("document title not found") + } + + fn process_list(&mut self) -> anyhow::Result<()> { + let mut nesting = ListNesting::new(); + while let Some(line) = self.iter.peek() { + let line = line.as_deref().map_err(|e| anyhow!("{e}"))?; + + if get_list_item(line).is_some() { + let line = self.iter.next().unwrap()?; + let line = process_inline_macros(&line)?; + let (marker, item) = get_list_item(&line).unwrap(); + nesting.set_current(marker); + self.write_list_item(item, &nesting); + self.process_paragraph(nesting.indent(), |line| { + line.is_empty() || get_list_item(line).is_some() || line == "+" + })?; + } else if line == "+" { + let _ = self.iter.next().unwrap()?; + let line = self + .iter + .peek() + .ok_or_else(|| anyhow!("list continuation unexpectedly terminated"))?; + let line = line.as_deref().map_err(|e| anyhow!("{e}"))?; + + let indent = nesting.indent(); + if line.starts_with('[') { + self.write_line("", 0); + self.process_source_code_block(indent)?; + } else if line.starts_with(LISTING_DELIMITER) { + self.write_line("", 0); + self.process_listing_block(None, indent)?; + } else if line.starts_with('.') { + self.write_line("", 0); + self.process_block_with_title(indent)?; + } else if line.starts_with(IMAGE_BLOCK_PREFIX) { + self.write_line("", 0); + self.process_image_block(None, indent)?; + } else if line.starts_with(VIDEO_BLOCK_PREFIX) { + self.write_line("", 0); + self.process_video_block(None, indent)?; + } else { + self.write_line("", 0); + let current = nesting.current().unwrap(); + self.process_paragraph(indent, |line| { + line.is_empty() + || get_list_item(line).filter(|(m, _)| m == current).is_some() + || line == "+" + })?; + } + } else { + break; + } + self.skip_blank_lines()?; + } + + Ok(()) + } + + fn process_source_code_block(&mut self, level: usize) -> anyhow::Result<()> { + if let Some(Ok(line)) = self.iter.next() { + if let Some(styles) = line.strip_prefix("[source").and_then(|s| s.strip_suffix(']')) { + let mut styles = styles.split(','); + if !styles.next().unwrap().is_empty() { + bail!("not a source code block"); + } + let language = styles.next(); + return self.process_listing_block(language, level); + } + } + bail!("not a source code block") + } + + fn process_listing_block(&mut self, style: Option<&str>, level: usize) -> anyhow::Result<()> { + if let Some(Ok(line)) = self.iter.next() { + if line == LISTING_DELIMITER { + self.write_indent(level); + self.output.push_str("```"); + if let Some(style) = style { + self.output.push_str(style); + } + self.output.push('\n'); + while let Some(line) = self.iter.next() { + let line = line?; + if line == LISTING_DELIMITER { + self.write_line("```", level); + return Ok(()); + } else { + self.write_line(&line, level); + } + } + bail!("listing block is not terminated") + } + } + bail!("not a listing block") + } + + fn process_block_with_title(&mut self, level: usize) -> anyhow::Result<()> { + if let Some(Ok(line)) = self.iter.next() { + let title = + line.strip_prefix('.').ok_or_else(|| anyhow!("extraction of the title failed"))?; + + let line = self + .iter + .peek() + .ok_or_else(|| anyhow!("target block for the title is not found"))?; + let line = line.as_deref().map_err(|e| anyhow!("{e}"))?; + if line.starts_with(IMAGE_BLOCK_PREFIX) { + return self.process_image_block(Some(title), level); + } else if line.starts_with(VIDEO_BLOCK_PREFIX) { + return self.process_video_block(Some(title), level); + } else { + bail!("title for that block type is not supported"); + } + } + bail!("not a title") + } + + fn process_image_block(&mut self, caption: Option<&str>, level: usize) -> anyhow::Result<()> { + if let Some(Ok(line)) = self.iter.next() { + if let Some((url, attrs)) = parse_media_block(&line, IMAGE_BLOCK_PREFIX) { + let alt = if let Some(stripped) = + attrs.strip_prefix('"').and_then(|s| s.strip_suffix('"')) + { + stripped + } else { + attrs + }; + if let Some(caption) = caption { + self.write_caption_line(caption, level); + } + self.write_indent(level); + self.output.push_str("!["); + self.output.push_str(alt); + self.output.push_str("]("); + self.output.push_str(url); + self.output.push_str(")\n"); + return Ok(()); + } + } + bail!("not a image block") + } + + fn process_video_block(&mut self, caption: Option<&str>, level: usize) -> anyhow::Result<()> { + if let Some(Ok(line)) = self.iter.next() { + if let Some((url, attrs)) = parse_media_block(&line, VIDEO_BLOCK_PREFIX) { + let html_attrs = match attrs { + "options=loop" => "controls loop", + r#"options="autoplay,loop""# => "autoplay controls loop", + _ => bail!("unsupported video syntax"), + }; + if let Some(caption) = caption { + self.write_caption_line(caption, level); + } + self.write_indent(level); + self.output.push_str(r#"\n"); + return Ok(()); + } + } + bail!("not a video block") + } + + fn process_paragraph

(&mut self, level: usize, predicate: P) -> anyhow::Result<()> + where + P: Fn(&str) -> bool, + { + while let Some(line) = self.iter.peek() { + let line = line.as_deref().map_err(|e| anyhow!("{e}"))?; + if predicate(line) { + break; + } + + self.write_indent(level); + let line = self.iter.next().unwrap()?; + let line = line.trim_start(); + let line = process_inline_macros(line)?; + if let Some(stripped) = line.strip_suffix('+') { + self.output.push_str(stripped); + self.output.push('\\'); + } else { + self.output.push_str(&line); + } + self.output.push('\n'); + } + + Ok(()) + } + + fn skip_blank_lines(&mut self) -> anyhow::Result<()> { + while let Some(line) = self.iter.peek() { + if !line.as_deref().unwrap().is_empty() { + break; + } + self.iter.next().unwrap()?; + } + Ok(()) + } + + fn write_title(&mut self, indent: usize, title: &str) { + for _ in 0..indent { + self.output.push('#'); + } + self.output.push(' '); + self.output.push_str(title); + self.output.push('\n'); + } + + fn write_list_item(&mut self, item: &str, nesting: &ListNesting) { + let (marker, indent) = nesting.marker(); + self.write_indent(indent); + self.output.push_str(marker); + self.output.push_str(item); + self.output.push('\n'); + } + + fn write_caption_line(&mut self, caption: &str, indent: usize) { + self.write_indent(indent); + self.output.push('_'); + self.output.push_str(caption); + self.output.push_str("_\\\n"); + } + + fn write_indent(&mut self, indent: usize) { + for _ in 0..indent { + self.output.push(' '); + } + } + + fn write_line(&mut self, line: &str, indent: usize) { + self.write_indent(indent); + self.output.push_str(line); + self.output.push('\n'); + } +} + +pub(crate) fn convert_asciidoc_to_markdown(input: R) -> anyhow::Result +where + R: BufRead, +{ + let mut output = String::new(); + let mut iter = input.lines().peekable(); + + let mut converter = Converter::new(&mut iter, &mut output); + converter.process()?; + + Ok(output) +} + +fn get_title(line: &str) -> Option<(usize, &str)> { + strip_prefix_symbol(line, '=') +} + +fn get_list_item(line: &str) -> Option<(ListMarker, &str)> { + const HYPHEN_MARKER: &str = "- "; + if let Some(text) = line.strip_prefix(HYPHEN_MARKER) { + Some((ListMarker::Hyphen, text)) + } else if let Some((count, text)) = strip_prefix_symbol(line, '*') { + Some((ListMarker::Asterisk(count), text)) + } else if let Some((count, text)) = strip_prefix_symbol(line, '.') { + Some((ListMarker::Dot(count), text)) + } else { + None + } +} + +fn strip_prefix_symbol(line: &str, symbol: char) -> Option<(usize, &str)> { + let mut iter = line.chars(); + if iter.next()? != symbol { + return None; + } + let mut count = 1; + loop { + match iter.next() { + Some(ch) if ch == symbol => { + count += 1; + } + Some(' ') => { + break; + } + _ => return None, + } + } + Some((count, iter.as_str())) +} + +fn parse_media_block<'a>(line: &'a str, prefix: &str) -> Option<(&'a str, &'a str)> { + if let Some(line) = line.strip_prefix(prefix) { + if let Some((url, rest)) = line.split_once('[') { + if let Some(attrs) = rest.strip_suffix(']') { + return Some((url, attrs)); + } + } + } + None +} + +#[derive(Debug)] +struct ListNesting(Vec); + +impl ListNesting { + fn new() -> Self { + Self(Vec::::with_capacity(6)) + } + + fn current(&mut self) -> Option<&ListMarker> { + self.0.last() + } + + fn set_current(&mut self, marker: ListMarker) { + let Self(markers) = self; + if let Some(index) = markers.iter().position(|m| *m == marker) { + markers.truncate(index + 1); + } else { + markers.push(marker); + } + } + + fn indent(&self) -> usize { + self.0.iter().map(|m| m.in_markdown().len()).sum() + } + + fn marker(&self) -> (&str, usize) { + let Self(markers) = self; + let indent = markers.iter().take(markers.len() - 1).map(|m| m.in_markdown().len()).sum(); + let marker = match markers.last() { + None => "", + Some(marker) => marker.in_markdown(), + }; + (marker, indent) + } +} + +#[derive(Debug, PartialEq, Eq)] +enum ListMarker { + Asterisk(usize), + Hyphen, + Dot(usize), +} + +impl ListMarker { + fn in_markdown(&self) -> &str { + match self { + ListMarker::Asterisk(_) => "- ", + ListMarker::Hyphen => "- ", + ListMarker::Dot(_) => "1. ", + } + } +} + +fn process_inline_macros(line: &str) -> anyhow::Result> { + let mut chars = line.char_indices(); + loop { + let (start, end, a_macro) = match get_next_line_component(&mut chars) { + Component::None => break, + Component::Text => continue, + Component::Macro(s, e, m) => (s, e, m), + }; + let mut src = line.chars(); + let mut processed = String::new(); + for _ in 0..start { + processed.push(src.next().unwrap()); + } + processed.push_str(a_macro.process()?.as_str()); + for _ in start..end { + let _ = src.next().unwrap(); + } + let mut pos = end; + + loop { + let (start, end, a_macro) = match get_next_line_component(&mut chars) { + Component::None => break, + Component::Text => continue, + Component::Macro(s, e, m) => (s, e, m), + }; + for _ in pos..start { + processed.push(src.next().unwrap()); + } + processed.push_str(a_macro.process()?.as_str()); + for _ in start..end { + let _ = src.next().unwrap(); + } + pos = end; + } + for ch in src { + processed.push(ch); + } + return Ok(Cow::Owned(processed)); + } + Ok(Cow::Borrowed(line)) +} + +fn get_next_line_component(chars: &mut std::str::CharIndices<'_>) -> Component { + let (start, mut macro_name) = match chars.next() { + None => return Component::None, + Some((_, ch)) if ch == ' ' || !ch.is_ascii() => return Component::Text, + Some((pos, ch)) => (pos, String::from(ch)), + }; + loop { + match chars.next() { + None => return Component::None, + Some((_, ch)) if ch == ' ' || !ch.is_ascii() => return Component::Text, + Some((_, ':')) => break, + Some((_, ch)) => macro_name.push(ch), + } + } + + let mut macro_target = String::new(); + loop { + match chars.next() { + None => return Component::None, + Some((_, ' ')) => return Component::Text, + Some((_, '[')) => break, + Some((_, ch)) => macro_target.push(ch), + } + } + + let mut attr_value = String::new(); + let end = loop { + match chars.next() { + None => return Component::None, + Some((pos, ']')) => break pos + 1, + Some((_, ch)) => attr_value.push(ch), + } + }; + + Component::Macro(start, end, Macro::new(macro_name, macro_target, attr_value)) +} + +enum Component { + None, + Text, + Macro(usize, usize, Macro), +} + +struct Macro { + name: String, + target: String, + attrs: String, +} + +impl Macro { + fn new(name: String, target: String, attrs: String) -> Self { + Self { name, target, attrs } + } + + fn process(&self) -> anyhow::Result { + let name = &self.name; + let text = match name.as_str() { + "https" => { + let url = &self.target; + let anchor_text = &self.attrs; + format!("[{anchor_text}](https:{url})") + } + "image" => { + let url = &self.target; + let alt = &self.attrs; + format!("![{alt}]({url})") + } + "kbd" => { + let keys = self.attrs.split('+').map(|k| Cow::Owned(format!("{k}"))); + keys.collect::>().join("+") + } + "pr" => { + let pr = &self.target; + let url = format!("https://github.com/rust-analyzer/rust-analyzer/pull/{pr}"); + format!("[`#{pr}`]({url})") + } + "commit" => { + let hash = &self.target; + let short = &hash[0..7]; + let url = format!("https://github.com/rust-analyzer/rust-analyzer/commit/{hash}"); + format!("[`{short}`]({url})") + } + "release" => { + let date = &self.target; + let url = format!("https://github.com/rust-analyzer/rust-analyzer/releases/{date}"); + format!("[`{date}`]({url})") + } + _ => bail!("macro not supported: {name}"), + }; + Ok(text) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use std::fs::read_to_string; + + #[test] + fn test_asciidoc_to_markdown_conversion() { + let input = read_to_string("test_data/input.adoc").unwrap(); + let expected = read_to_string("test_data/expected.md").unwrap(); + let actual = convert_asciidoc_to_markdown(std::io::Cursor::new(&input)).unwrap(); + + assert_eq!(actual, expected); + } + + macro_rules! test_inline_macro_processing { + ($(( + $name:ident, + $input:expr, + $expected:expr + ),)*) => ($( + #[test] + fn $name() { + let input = $input; + let actual = process_inline_macros(&input).unwrap(); + let expected = $expected; + assert_eq!(actual, expected) + } + )*); + } + + test_inline_macro_processing! { + (inline_macro_processing_for_empty_line, "", ""), + (inline_macro_processing_for_line_with_no_macro, "foo bar", "foo bar"), + ( + inline_macro_processing_for_macro_in_line_start, + "kbd::[Ctrl+T] foo", + "Ctrl+T foo" + ), + ( + inline_macro_processing_for_macro_in_line_end, + "foo kbd::[Ctrl+T]", + "foo Ctrl+T" + ), + ( + inline_macro_processing_for_macro_in_the_middle_of_line, + "foo kbd::[Ctrl+T] foo", + "foo Ctrl+T foo" + ), + ( + inline_macro_processing_for_several_macros, + "foo kbd::[Ctrl+T] foo kbd::[Enter] foo", + "foo Ctrl+T foo Enter foo" + ), + ( + inline_macro_processing_for_several_macros_without_text_in_between, + "foo kbd::[Ctrl+T]kbd::[Enter] foo", + "foo Ctrl+TEnter foo" + ), + } +} diff --git a/xtask/src/release.rs b/xtask/src/release.rs index eda8fceef0..4a30691477 100644 --- a/xtask/src/release.rs +++ b/xtask/src/release.rs @@ -64,8 +64,8 @@ impl flags::Release { let prev_tag = tags.lines().filter(|line| is_release_tag(line)).last().unwrap(); let contents = changelog::get_changelog(sh, changelog_n, &commit, prev_tag, &today)?; - let path = changelog_dir.join(format!("{}-changelog-{}.adoc", today, changelog_n)); - sh.write_file(&path, &contents)?; + let path = changelog_dir.join(format!("{today}-changelog-{changelog_n}.adoc")); + sh.write_file(path, contents)?; Ok(()) } diff --git a/xtask/src/release/changelog.rs b/xtask/src/release/changelog.rs index 2647f7794f..d2a1483e38 100644 --- a/xtask/src/release/changelog.rs +++ b/xtask/src/release/changelog.rs @@ -23,9 +23,9 @@ pub(crate) fn get_changelog( let mut others = String::new(); for line in git_log.lines() { let line = line.trim_start(); - if let Some(pr_num) = parse_pr_number(&line) { + if let Some(pr_num) = parse_pr_number(line) { let accept = "Accept: application/vnd.github.v3+json"; - let authorization = format!("Authorization: token {}", token); + let authorization = format!("Authorization: token {token}"); let pr_url = "https://api.github.com/repos/rust-lang/rust-analyzer/issues"; // we don't use an HTTPS client or JSON parser to keep the build times low @@ -57,36 +57,36 @@ pub(crate) fn get_changelog( PrKind::Other => &mut others, PrKind::Skip => continue, }; - writeln!(s, "* pr:{}[] {}", pr_num, l.message.as_deref().unwrap_or(&pr_title)).unwrap(); + writeln!(s, "* pr:{pr_num}[] {}", l.message.as_deref().unwrap_or(&pr_title)).unwrap(); } } let contents = format!( "\ -= Changelog #{} += Changelog #{changelog_n} :sectanchors: +:experimental: :page-layout: post -Commit: commit:{}[] + -Release: release:{}[] +Commit: commit:{commit}[] + +Release: release:{today}[] == New Features -{} +{features} == Fixes -{} +{fixes} == Internal Improvements -{} +{internal} == Others -{} -", - changelog_n, commit, today, features, fixes, internal, others +{others} +" ); Ok(contents) } @@ -112,11 +112,9 @@ fn unescape(s: &str) -> String { fn parse_pr_number(s: &str) -> Option { const BORS_PREFIX: &str = "Merge #"; const HOMU_PREFIX: &str = "Auto merge of #"; - if s.starts_with(BORS_PREFIX) { - let s = &s[BORS_PREFIX.len()..]; + if let Some(s) = s.strip_prefix(BORS_PREFIX) { s.parse().ok() - } else if s.starts_with(HOMU_PREFIX) { - let s = &s[HOMU_PREFIX.len()..]; + } else if let Some(s) = s.strip_prefix(HOMU_PREFIX) { if let Some(space) = s.find(' ') { s[..space].parse().ok() } else { diff --git a/xtask/test_data/expected.md b/xtask/test_data/expected.md new file mode 100644 index 0000000000..19c940c67b --- /dev/null +++ b/xtask/test_data/expected.md @@ -0,0 +1,81 @@ +# Changelog #256 + +Hello! + +Commit: [`0123456`](https://github.com/rust-analyzer/rust-analyzer/commit/0123456789abcdef0123456789abcdef01234567) \ +Release: [`2022-01-01`](https://github.com/rust-analyzer/rust-analyzer/releases/2022-01-01) + +## New Features + +- **BREAKING** [`#1111`](https://github.com/rust-analyzer/rust-analyzer/pull/1111) shortcut ctrl+r + - hyphen-prefixed list item +- nested list item + - `foo` -> `foofoo` + - `bar` -> `barbar` +- listing in the secondary level + 1. install + 1. add to config + + ```json + {"foo":"bar"} + ``` +- list item with continuation + + ![](https://example.com/animation.gif) + + ![alt text](https://example.com/animation.gif) + + + + + + _Image_\ + ![](https://example.com/animation.gif) + + _Video_\ + + + ```bash + rustup update nightly + ``` + + ``` + This is a plain listing. + ``` +- single line item followed by empty lines +- multiline list + item followed by empty lines +- multiline list + item with indent +- multiline list + item not followed by empty lines +- multiline list + item followed by different marker + - foo + - bar +- multiline list + item followed by list continuation + + paragraph + paragraph + +## Another Section + +- foo bar baz +- list item with an inline image + ![](https://example.com/animation.gif) + +The highlight of the month is probably [`#1111`](https://github.com/rust-analyzer/rust-analyzer/pull/1111). +See [online manual](https://example.com/manual) for more information. + +```bash +rustup update nightly +``` + +``` +rustup update nightly +``` + +``` +This is a plain listing. +``` diff --git a/xtask/test_data/input.adoc b/xtask/test_data/input.adoc new file mode 100644 index 0000000000..105bd8df0d --- /dev/null +++ b/xtask/test_data/input.adoc @@ -0,0 +1,90 @@ += Changelog #256 +:sectanchors: +:page-layout: post + +Hello! + +Commit: commit:0123456789abcdef0123456789abcdef01234567[] + +Release: release:2022-01-01[] + +== New Features + +* **BREAKING** pr:1111[] shortcut kbd:[ctrl+r] +- hyphen-prefixed list item +* nested list item +** `foo` -> `foofoo` +** `bar` -> `barbar` +* listing in the secondary level +. install +. add to config ++ +[source,json] +---- +{"foo":"bar"} +---- +* list item with continuation ++ +image::https://example.com/animation.gif[] ++ +image::https://example.com/animation.gif["alt text"] ++ +video::https://example.com/movie.mp4[options=loop] ++ +video::https://example.com/movie.mp4[options="autoplay,loop"] ++ +.Image +image::https://example.com/animation.gif[] ++ +.Video +video::https://example.com/movie.mp4[options=loop] ++ +[source,bash] +---- +rustup update nightly +---- ++ +---- +This is a plain listing. +---- +* single line item followed by empty lines + +* multiline list +item followed by empty lines + +* multiline list + item with indent + +* multiline list +item not followed by empty lines +* multiline list +item followed by different marker +** foo +** bar +* multiline list +item followed by list continuation ++ +paragraph +paragraph + +== Another Section + +* foo bar baz +* list item with an inline image + image:https://example.com/animation.gif[] + +The highlight of the month is probably pr:1111[]. +See https://example.com/manual[online manual] for more information. + +[source,bash] +---- +rustup update nightly +---- + +[source] +---- +rustup update nightly +---- + +---- +This is a plain listing. +----