From bfcdeb2a2461c35562196248e764405e8688d33e Mon Sep 17 00:00:00 2001 From: Yukang Date: Fri, 21 Jun 2024 00:13:34 +0800 Subject: [PATCH 01/94] Fix diagnostic name in macro_error.rs --- crates/ide-diagnostics/src/handlers/macro_error.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/ide-diagnostics/src/handlers/macro_error.rs b/crates/ide-diagnostics/src/handlers/macro_error.rs index f8780fc0da..2cd6a71c00 100644 --- a/crates/ide-diagnostics/src/handlers/macro_error.rs +++ b/crates/ide-diagnostics/src/handlers/macro_error.rs @@ -13,7 +13,7 @@ pub(crate) fn macro_error(ctx: &DiagnosticsContext<'_>, d: &hir::MacroError) -> ) } -// Diagnostic: macro-error +// Diagnostic: macro-def-error // // This diagnostic is shown for macro expansion errors. pub(crate) fn macro_def_error(ctx: &DiagnosticsContext<'_>, d: &hir::MacroDefError) -> Diagnostic { From e62bc2d1f5d097140eb1143a29dc6b1a50076869 Mon Sep 17 00:00:00 2001 From: Wilfred Hughes Date: Wed, 12 Jun 2024 17:31:39 -0700 Subject: [PATCH 02/94] fix: Improve hover text in unlinked file diagnostics Use full sentences, and mention how to disable the diagnostic if users are intentionally working on unowned files. --- crates/ide-diagnostics/src/handlers/unlinked_file.rs | 6 ++++-- editors/code/src/client.ts | 3 ++- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/crates/ide-diagnostics/src/handlers/unlinked_file.rs b/crates/ide-diagnostics/src/handlers/unlinked_file.rs index cbf50d13f5..77ffd0fd96 100644 --- a/crates/ide-diagnostics/src/handlers/unlinked_file.rs +++ b/crates/ide-diagnostics/src/handlers/unlinked_file.rs @@ -30,11 +30,13 @@ pub(crate) fn unlinked_file( // FIXME: This is a hack for the vscode extension to notice whether there is an autofix or not before having to resolve diagnostics. // This is to prevent project linking popups from appearing when there is an autofix. https://github.com/rust-lang/rust-analyzer/issues/14523 let message = if fixes.is_none() { - "file not included in crate hierarchy" + "This file is not included in any crates, so rust-analyzer can't offer IDE services." } else { - "file not included in module tree" + "This file is not included anywhere in the module tree, so rust-analyzer can't offer IDE services." }; + let message = format!("{message}\n\nIf you're intentionally working on unowned files, you can silence this warning by adding \"unlinked-file\" to rust-analyzer.diagnostics.disabled in your settings."); + let mut range = ctx.sema.db.parse(file_id).syntax_node().text_range(); let mut unused = true; diff --git a/editors/code/src/client.ts b/editors/code/src/client.ts index 1c2a34b484..542233e7b9 100644 --- a/editors/code/src/client.ts +++ b/editors/code/src/client.ts @@ -76,7 +76,8 @@ export async function createClient( // value === "unlinked-file" && value === "temporary-disabled" && !unlinkedFiles.includes(uri) && - diag.message !== "file not included in module tree" + (diag.message === "file not included in crate hierarchy" || + diag.message.startsWith("This file is not included in any crates")) ) { const config = vscode.workspace.getConfiguration("rust-analyzer"); if (config.get("showUnlinkedFileNotification")) { From d6d45a23d3ceb2f218837122a85588f96f015638 Mon Sep 17 00:00:00 2001 From: davidsemakula Date: Fri, 21 Jun 2024 17:50:14 +0300 Subject: [PATCH 03/94] fix: don't remove parentheses for calls of function-like pointers that are members of a struct or union --- .../src/handlers/remove_parentheses.rs | 29 +++++++++++++++++++ crates/syntax/src/ast/prec.rs | 8 +++++ 2 files changed, 37 insertions(+) diff --git a/crates/ide-assists/src/handlers/remove_parentheses.rs b/crates/ide-assists/src/handlers/remove_parentheses.rs index 799d36be93..f74fc26112 100644 --- a/crates/ide-assists/src/handlers/remove_parentheses.rs +++ b/crates/ide-assists/src/handlers/remove_parentheses.rs @@ -239,4 +239,33 @@ mod tests { check_assist_not_applicable(remove_parentheses, r#"fn f() { $0(return 2) + 2 }"#); } + + #[test] + fn remove_parens_indirect_calls() { + check_assist( + remove_parentheses, + r#"fn f(call: fn(usize), arg: usize) { $0(call)(arg); }"#, + r#"fn f(call: fn(usize), arg: usize) { call(arg); }"#, + ); + check_assist( + remove_parentheses, + r#"fn f(call: F, arg: usize) where F: Fn(usize) { $0(call)(arg); }"#, + r#"fn f(call: F, arg: usize) where F: Fn(usize) { call(arg); }"#, + ); + + // Parentheses are necessary when calling a function-like pointer that is a member of a struct or union. + check_assist_not_applicable( + remove_parentheses, + r#" +struct Foo { + t: T, +} + +impl Foo { + fn foo(&self, arg: usize) { + $0(self.t)(arg); + } +}"#, + ); + } } diff --git a/crates/syntax/src/ast/prec.rs b/crates/syntax/src/ast/prec.rs index 9131cd2f17..28089ffb37 100644 --- a/crates/syntax/src/ast/prec.rs +++ b/crates/syntax/src/ast/prec.rs @@ -27,6 +27,14 @@ impl Expr { } fn needs_parens_in_expr(&self, parent: &Expr) -> bool { + // Parentheses are necessary when calling a function-like pointer that is a member of a struct or union + // (e.g. `(a.f)()`). + let is_parent_call_expr = matches!(parent, ast::Expr::CallExpr(_)); + let is_field_expr = matches!(self, ast::Expr::FieldExpr(_)); + if is_parent_call_expr && is_field_expr { + return true; + } + // Special-case block weirdness if parent.child_is_followed_by_a_block() { use Expr::*; From 957325a5fe5ea46484882caec63364b453adfc7b Mon Sep 17 00:00:00 2001 From: Tavo Annus Date: Fri, 21 Jun 2024 21:12:26 +0300 Subject: [PATCH 04/94] Run `data_constructor` tactic only backwards --- crates/hir/src/term_search/tactics.rs | 288 ++++++------------ .../ide-assists/src/handlers/term_search.rs | 2 +- crates/ide-completion/src/render.rs | 7 +- .../src/handlers/typed_hole.rs | 4 +- 4 files changed, 93 insertions(+), 208 deletions(-) diff --git a/crates/hir/src/term_search/tactics.rs b/crates/hir/src/term_search/tactics.rs index b738e6af77..d64f60cb94 100644 --- a/crates/hir/src/term_search/tactics.rs +++ b/crates/hir/src/term_search/tactics.rs @@ -17,11 +17,11 @@ use itertools::Itertools; use rustc_hash::FxHashSet; use crate::{ - Adt, AssocItem, Enum, GenericDef, GenericParam, HasVisibility, Impl, ModuleDef, ScopeDef, Type, - TypeParam, Variant, + Adt, AssocItem, GenericDef, GenericParam, HasVisibility, Impl, ModuleDef, ScopeDef, Type, + TypeParam, }; -use crate::term_search::{Expr, TermSearchConfig}; +use crate::term_search::Expr; use super::{LookupTable, NewTypesKey, TermSearchCtx}; @@ -151,163 +151,27 @@ pub(super) fn assoc_const<'a, DB: HirDatabase>( /// * `should_continue` - Function that indicates when to stop iterating pub(super) fn data_constructor<'a, DB: HirDatabase>( ctx: &'a TermSearchCtx<'a, DB>, - defs: &'a FxHashSet, + _defs: &'a FxHashSet, lookup: &'a mut LookupTable, should_continue: &'a dyn std::ops::Fn() -> bool, ) -> impl Iterator + 'a { let db = ctx.sema.db; let module = ctx.scope.module(); - fn variant_helper( - db: &dyn HirDatabase, - lookup: &mut LookupTable, - should_continue: &dyn std::ops::Fn() -> bool, - parent_enum: Enum, - variant: Variant, - config: &TermSearchConfig, - ) -> Vec<(Type, Vec)> { - // Ignore unstable - if variant.is_unstable(db) { - return Vec::new(); - } - - let generics = GenericDef::from(variant.parent_enum(db)); - let Some(type_params) = generics - .type_or_const_params(db) - .into_iter() - .map(|it| it.as_type_param(db)) - .collect::>>() - else { - // Ignore enums with const generics - return Vec::new(); - }; - - // We currently do not check lifetime bounds so ignore all types that have something to do - // with them - if !generics.lifetime_params(db).is_empty() { - return Vec::new(); - } - - // Only account for stable type parameters for now, unstable params can be default - // tho, for example in `Box` - if type_params.iter().any(|it| it.is_unstable(db) && it.default(db).is_none()) { - return Vec::new(); - } - - let non_default_type_params_len = - type_params.iter().filter(|it| it.default(db).is_none()).count(); - - let enum_ty_shallow = Adt::from(parent_enum).ty(db); - let generic_params = lookup - .types_wishlist() - .clone() - .into_iter() - .filter(|ty| ty.could_unify_with(db, &enum_ty_shallow)) - .map(|it| it.type_arguments().collect::>()) - .chain((non_default_type_params_len == 0).then_some(Vec::new())); - - generic_params - .filter(|_| should_continue()) - .filter_map(move |generics| { - // Insert default type params - let mut g = generics.into_iter(); - let generics: Vec<_> = type_params - .iter() - .map(|it| it.default(db).or_else(|| g.next())) - .collect::>()?; - - let enum_ty = Adt::from(parent_enum).ty_with_args(db, generics.iter().cloned()); - - // Ignore types that have something to do with lifetimes - if config.enable_borrowcheck && enum_ty.contains_reference(db) { + lookup + .types_wishlist() + .clone() + .into_iter() + .chain(iter::once(ctx.goal.clone())) + .filter_map(|ty| ty.as_adt().map(|adt| (adt, ty))) + .filter(|_| should_continue()) + .filter_map(move |(adt, ty)| match adt { + Adt::Struct(strukt) => { + // Ignore unstable or not visible + if strukt.is_unstable(db) || !strukt.is_visible_from(db, module) { return None; } - // Early exit if some param cannot be filled from lookup - let param_exprs: Vec> = variant - .fields(db) - .into_iter() - .map(|field| lookup.find(db, &field.ty_with_args(db, generics.iter().cloned()))) - .collect::>()?; - - // Note that we need special case for 0 param constructors because of multi cartesian - // product - let variant_exprs: Vec = if param_exprs.is_empty() { - vec![Expr::Variant { variant, generics, params: Vec::new() }] - } else { - param_exprs - .into_iter() - .multi_cartesian_product() - .map(|params| Expr::Variant { variant, generics: generics.clone(), params }) - .collect() - }; - lookup.insert(enum_ty.clone(), variant_exprs.iter().cloned()); - - Some((enum_ty, variant_exprs)) - }) - .collect() - } - defs.iter() - .filter_map(move |def| match def { - ScopeDef::ModuleDef(ModuleDef::Variant(it)) => { - let variant_exprs = variant_helper( - db, - lookup, - should_continue, - it.parent_enum(db), - *it, - &ctx.config, - ); - if variant_exprs.is_empty() { - return None; - } - if GenericDef::from(it.parent_enum(db)) - .type_or_const_params(db) - .into_iter() - .filter_map(|it| it.as_type_param(db)) - .all(|it| it.default(db).is_some()) - { - lookup.mark_fulfilled(ScopeDef::ModuleDef(ModuleDef::Variant(*it))); - } - Some(variant_exprs) - } - ScopeDef::ModuleDef(ModuleDef::Adt(Adt::Enum(enum_))) => { - let exprs: Vec<(Type, Vec)> = enum_ - .variants(db) - .into_iter() - .flat_map(|it| { - variant_helper(db, lookup, should_continue, *enum_, it, &ctx.config) - }) - .collect(); - - if exprs.is_empty() { - return None; - } - - if GenericDef::from(*enum_) - .type_or_const_params(db) - .into_iter() - .filter_map(|it| it.as_type_param(db)) - .all(|it| it.default(db).is_some()) - { - lookup.mark_fulfilled(ScopeDef::ModuleDef(ModuleDef::Adt(Adt::Enum(*enum_)))); - } - - Some(exprs) - } - ScopeDef::ModuleDef(ModuleDef::Adt(Adt::Struct(it))) => { - // Ignore unstable and not visible - if it.is_unstable(db) || !it.is_visible_from(db, module) { - return None; - } - - let generics = GenericDef::from(*it); - - // Ignore const params for now - let type_params = generics - .type_or_const_params(db) - .into_iter() - .map(|it| it.as_type_param(db)) - .collect::>>()?; + let generics = GenericDef::from(strukt); // We currently do not check lifetime bounds so ignore all types that have something to do // with them @@ -315,48 +179,73 @@ pub(super) fn data_constructor<'a, DB: HirDatabase>( return None; } - // Only account for stable type parameters for now, unstable params can be default - // tho, for example in `Box` - if type_params.iter().any(|it| it.is_unstable(db) && it.default(db).is_none()) { + if ty.contains_unknown() { return None; } - let non_default_type_params_len = - type_params.iter().filter(|it| it.default(db).is_none()).count(); + // Ignore types that have something to do with lifetimes + if ctx.config.enable_borrowcheck && ty.contains_reference(db) { + return None; + } + let fields = strukt.fields(db); + // Check if all fields are visible, otherwise we cannot fill them + if fields.iter().any(|it| !it.is_visible_from(db, module)) { + return None; + } - let struct_ty_shallow = Adt::from(*it).ty(db); - let generic_params = lookup - .types_wishlist() - .clone() + let generics: Vec<_> = ty.type_arguments().collect(); + + // Early exit if some param cannot be filled from lookup + let param_exprs: Vec> = fields .into_iter() - .filter(|ty| ty.could_unify_with(db, &struct_ty_shallow)) - .map(|it| it.type_arguments().collect::>()) - .chain((non_default_type_params_len == 0).then_some(Vec::new())); + .map(|field| lookup.find(db, &field.ty_with_args(db, generics.iter().cloned()))) + .collect::>()?; - let exprs = generic_params - .filter(|_| should_continue()) - .filter_map(|generics| { - // Insert default type params - let mut g = generics.into_iter(); - let generics: Vec<_> = type_params - .iter() - .map(|it| it.default(db).or_else(|| g.next())) - .collect::>()?; + // Note that we need special case for 0 param constructors because of multi cartesian + // product + let exprs: Vec = if param_exprs.is_empty() { + vec![Expr::Struct { strukt, generics, params: Vec::new() }] + } else { + param_exprs + .into_iter() + .multi_cartesian_product() + .map(|params| Expr::Struct { strukt, generics: generics.clone(), params }) + .collect() + }; - let struct_ty = Adt::from(*it).ty_with_args(db, generics.iter().cloned()); + lookup.insert(ty.clone(), exprs.iter().cloned()); + Some((ty, exprs)) + } + Adt::Enum(enum_) => { + // Ignore unstable or not visible + if enum_.is_unstable(db) || !enum_.is_visible_from(db, module) { + return None; + } - // Ignore types that have something to do with lifetimes - if ctx.config.enable_borrowcheck && struct_ty.contains_reference(db) { - return None; - } - let fields = it.fields(db); - // Check if all fields are visible, otherwise we cannot fill them - if fields.iter().any(|it| !it.is_visible_from(db, module)) { - return None; - } + let generics = GenericDef::from(enum_); + // We currently do not check lifetime bounds so ignore all types that have something to do + // with them + if !generics.lifetime_params(db).is_empty() { + return None; + } + if ty.contains_unknown() { + return None; + } + + // Ignore types that have something to do with lifetimes + if ctx.config.enable_borrowcheck && ty.contains_reference(db) { + return None; + } + + let generics: Vec<_> = ty.type_arguments().collect(); + let exprs = enum_ + .variants(db) + .into_iter() + .filter_map(|variant| { // Early exit if some param cannot be filled from lookup - let param_exprs: Vec> = fields + let param_exprs: Vec> = variant + .fields(db) .into_iter() .map(|field| { lookup.find(db, &field.ty_with_args(db, generics.iter().cloned())) @@ -365,36 +254,33 @@ pub(super) fn data_constructor<'a, DB: HirDatabase>( // Note that we need special case for 0 param constructors because of multi cartesian // product - let struct_exprs: Vec = if param_exprs.is_empty() { - vec![Expr::Struct { strukt: *it, generics, params: Vec::new() }] + let variant_exprs: Vec = if param_exprs.is_empty() { + vec![Expr::Variant { + variant, + generics: generics.clone(), + params: Vec::new(), + }] } else { param_exprs .into_iter() .multi_cartesian_product() - .map(|params| Expr::Struct { - strukt: *it, + .map(|params| Expr::Variant { + variant, generics: generics.clone(), params, }) .collect() }; - - if non_default_type_params_len == 0 { - // Fulfilled only if there are no generic parameters - lookup.mark_fulfilled(ScopeDef::ModuleDef(ModuleDef::Adt( - Adt::Struct(*it), - ))); - } - lookup.insert(struct_ty.clone(), struct_exprs.iter().cloned()); - - Some((struct_ty, struct_exprs)) + lookup.insert(ty.clone(), variant_exprs.iter().cloned()); + Some(variant_exprs) }) + .flatten() .collect(); - Some(exprs) + + Some((ty, exprs)) } - _ => None, + Adt::Union(_) => None, }) - .flatten() .filter_map(|(ty, exprs)| ty.could_unify_with_deeply(db, &ctx.goal).then_some(exprs)) .flatten() } diff --git a/crates/ide-assists/src/handlers/term_search.rs b/crates/ide-assists/src/handlers/term_search.rs index 8a9229c549..7f1274cade 100644 --- a/crates/ide-assists/src/handlers/term_search.rs +++ b/crates/ide-assists/src/handlers/term_search.rs @@ -144,7 +144,7 @@ fn f() { let a = A { x: 1, y: true }; let b: i32 = a.x; }"#, term_search, r#"//- minicore: todo, unimplemented, option fn f() { let a: i32 = 1; let b: Option = todo$0!(); }"#, - r#"fn f() { let a: i32 = 1; let b: Option = None; }"#, + r#"fn f() { let a: i32 = 1; let b: Option = Some(a); }"#, ) } diff --git a/crates/ide-completion/src/render.rs b/crates/ide-completion/src/render.rs index ebdc813f3d..b98f745f17 100644 --- a/crates/ide-completion/src/render.rs +++ b/crates/ide-completion/src/render.rs @@ -764,6 +764,7 @@ fn main() { "#, expect![[r#" st dep::test_mod_b::Struct {…} [type_could_unify] + ex dep::test_mod_b::Struct { } [type_could_unify] st Struct (use dep::test_mod_b::Struct) [type_could_unify+requires_import] fn main() [] fn test(…) [] @@ -839,6 +840,7 @@ fn main() { "#, expect![[r#" ev dep::test_mod_b::Enum::variant [type_could_unify] + ex dep::test_mod_b::Enum::variant [type_could_unify] en Enum (use dep::test_mod_b::Enum) [type_could_unify+requires_import] fn main() [] fn test(…) [] @@ -876,6 +878,7 @@ fn main() { "#, expect![[r#" ev dep::test_mod_b::Enum::Variant [type_could_unify] + ex dep::test_mod_b::Enum::Variant [type_could_unify] fn main() [] fn test(…) [] md dep [] @@ -1839,7 +1842,6 @@ fn f() { A { bar: b$0 }; } fn baz() [type] ex baz() [type] ex bar() [type] - ex A { bar: ... }.bar [type] st A [] fn f() [] "#]], @@ -1978,7 +1980,6 @@ fn main() { "#, expect![[r#" ex core::ops::Deref::deref(&t) (use core::ops::Deref) [type_could_unify] - ex core::ops::Deref::deref(&T(S)) (use core::ops::Deref) [type_could_unify] lc m [local] lc t [local] lc &t [type+local] @@ -2028,7 +2029,6 @@ fn main() { "#, expect![[r#" ex core::ops::DerefMut::deref_mut(&mut t) (use core::ops::DerefMut) [type_could_unify] - ex core::ops::DerefMut::deref_mut(&mut T(S)) (use core::ops::DerefMut) [type_could_unify] lc m [local] lc t [local] lc &mut t [type+local] @@ -2132,7 +2132,6 @@ fn main() { } "#, expect![[r#" - ex core::ops::Deref::deref(&T(S)) (use core::ops::Deref) [type_could_unify] ex core::ops::Deref::deref(&bar()) (use core::ops::Deref) [type_could_unify] st S [] st &S [type] diff --git a/crates/ide-diagnostics/src/handlers/typed_hole.rs b/crates/ide-diagnostics/src/handlers/typed_hole.rs index 9651ce6106..71917e09f0 100644 --- a/crates/ide-diagnostics/src/handlers/typed_hole.rs +++ b/crates/ide-diagnostics/src/handlers/typed_hole.rs @@ -276,7 +276,7 @@ impl Foo for Baz { } fn asd() -> Bar { let a = Baz; - Foo::foo(_) + Foo::foo(a) } ", ); @@ -365,7 +365,7 @@ impl Foo for A { } fn main() { let a = A; - let c: Bar = Foo::foo(_); + let c: Bar = Foo::foo(&a); }"#, ); } From a3315fe02895b1ca1c9e89f62c8c3a320702e537 Mon Sep 17 00:00:00 2001 From: Tavo Annus Date: Sat, 22 Jun 2024 10:17:33 +0300 Subject: [PATCH 05/94] Remove remove not-very-helpful optimizations --- crates/hir/src/term_search.rs | 44 --------------------------- crates/hir/src/term_search/tactics.rs | 3 -- 2 files changed, 47 deletions(-) diff --git a/crates/hir/src/term_search.rs b/crates/hir/src/term_search.rs index aa046b02e2..0f0b7a67f5 100644 --- a/crates/hir/src/term_search.rs +++ b/crates/hir/src/term_search.rs @@ -93,12 +93,6 @@ struct LookupTable { data: FxHashMap, /// New types reached since last query by the `NewTypesKey` new_types: FxHashMap>, - /// ScopeDefs that are not interesting any more - exhausted_scopedefs: FxHashSet, - /// ScopeDefs that were used in current round - round_scopedef_hits: FxHashSet, - /// Amount of rounds since scopedef was first used. - rounds_since_sopedef_hit: FxHashMap, /// Types queried but not present types_wishlist: FxHashSet, /// Threshold to squash trees to `Many` @@ -212,37 +206,6 @@ impl LookupTable { } } - /// Mark `ScopeDef` as exhausted meaning it is not interesting for us any more - fn mark_exhausted(&mut self, def: ScopeDef) { - self.exhausted_scopedefs.insert(def); - } - - /// Mark `ScopeDef` as used meaning we managed to produce something useful from it - fn mark_fulfilled(&mut self, def: ScopeDef) { - self.round_scopedef_hits.insert(def); - } - - /// Start new round (meant to be called at the beginning of iteration in `term_search`) - /// - /// This functions marks some `ScopeDef`s as exhausted if there have been - /// `MAX_ROUNDS_AFTER_HIT` rounds after first using a `ScopeDef`. - fn new_round(&mut self) { - for def in &self.round_scopedef_hits { - let hits = - self.rounds_since_sopedef_hit.entry(*def).and_modify(|n| *n += 1).or_insert(0); - const MAX_ROUNDS_AFTER_HIT: u32 = 2; - if *hits > MAX_ROUNDS_AFTER_HIT { - self.exhausted_scopedefs.insert(*def); - } - } - self.round_scopedef_hits.clear(); - } - - /// Get exhausted `ScopeDef`s - fn exhausted_scopedefs(&self) -> &FxHashSet { - &self.exhausted_scopedefs - } - /// Types queried but not found fn types_wishlist(&mut self) -> &FxHashSet { &self.types_wishlist @@ -328,19 +291,12 @@ pub fn term_search(ctx: &TermSearchCtx<'_, DB>) -> Vec { solutions.extend(tactics::assoc_const(ctx, &defs, &mut lookup)); while should_continue() { - lookup.new_round(); - solutions.extend(tactics::data_constructor(ctx, &defs, &mut lookup, should_continue)); solutions.extend(tactics::free_function(ctx, &defs, &mut lookup, should_continue)); solutions.extend(tactics::impl_method(ctx, &defs, &mut lookup, should_continue)); solutions.extend(tactics::struct_projection(ctx, &defs, &mut lookup, should_continue)); solutions.extend(tactics::impl_static_method(ctx, &defs, &mut lookup, should_continue)); solutions.extend(tactics::make_tuple(ctx, &defs, &mut lookup, should_continue)); - - // Discard not interesting `ScopeDef`s for speedup - for def in lookup.exhausted_scopedefs() { - defs.remove(def); - } } solutions.into_iter().filter(|it| !it.is_many()).unique().collect() diff --git a/crates/hir/src/term_search/tactics.rs b/crates/hir/src/term_search/tactics.rs index d64f60cb94..04af72c441 100644 --- a/crates/hir/src/term_search/tactics.rs +++ b/crates/hir/src/term_search/tactics.rs @@ -74,8 +74,6 @@ pub(super) fn trivial<'a, DB: HirDatabase>( _ => None, }?; - lookup.mark_exhausted(*def); - let ty = expr.ty(db); lookup.insert(ty.clone(), std::iter::once(expr.clone())); @@ -401,7 +399,6 @@ pub(super) fn free_function<'a, DB: HirDatabase>( .collect() }; - lookup.mark_fulfilled(ScopeDef::ModuleDef(ModuleDef::Function(*it))); lookup.insert(ret_ty.clone(), fn_exprs.iter().cloned()); Some((ret_ty, fn_exprs)) }) From 23d3ac70e9278fc8982518ee91935b2fc73b4f2b Mon Sep 17 00:00:00 2001 From: Tavo Annus Date: Sat, 22 Jun 2024 14:14:42 +0300 Subject: [PATCH 06/94] Simplify `impl_static_method` tactic --- crates/hir/src/term_search/tactics.rs | 132 ++++++-------------------- 1 file changed, 29 insertions(+), 103 deletions(-) diff --git a/crates/hir/src/term_search/tactics.rs b/crates/hir/src/term_search/tactics.rs index 04af72c441..a0caebb50c 100644 --- a/crates/hir/src/term_search/tactics.rs +++ b/crates/hir/src/term_search/tactics.rs @@ -688,6 +688,7 @@ pub(super) fn impl_static_method<'a, DB: HirDatabase>( .clone() .into_iter() .chain(iter::once(ctx.goal.clone())) + .filter(|ty| !ty.type_arguments().any(|it| it.contains_unknown())) .filter(|_| should_continue()) .flat_map(|ty| { Impl::all_for_type(db, ty.clone()).into_iter().map(move |imp| (ty.clone(), imp)) @@ -702,20 +703,6 @@ pub(super) fn impl_static_method<'a, DB: HirDatabase>( let fn_generics = GenericDef::from(it); let imp_generics = GenericDef::from(imp); - // Ignore const params for now - let imp_type_params = imp_generics - .type_or_const_params(db) - .into_iter() - .map(|it| it.as_type_param(db)) - .collect::>>()?; - - // Ignore const params for now - let fn_type_params = fn_generics - .type_or_const_params(db) - .into_iter() - .map(|it| it.as_type_param(db)) - .collect::>>()?; - // Ignore all functions that have something to do with lifetimes as we don't check them if !fn_generics.lifetime_params(db).is_empty() || !imp_generics.lifetime_params(db).is_empty() @@ -733,104 +720,43 @@ pub(super) fn impl_static_method<'a, DB: HirDatabase>( return None; } - // Only account for stable type parameters for now, unstable params can be default - // tho, for example in `Box` - if imp_type_params.iter().any(|it| it.is_unstable(db) && it.default(db).is_none()) - || fn_type_params.iter().any(|it| it.is_unstable(db) && it.default(db).is_none()) + // Ignore functions with generics for now as they kill the performance + // Also checking bounds for generics is problematic + if fn_generics.type_or_const_params(db).len() > 0 { + return None; + } + + let ret_ty = it.ret_type_with_args(db, ty.type_arguments()); + // Filter out functions that return references + if ctx.config.enable_borrowcheck && ret_ty.contains_reference(db) || ret_ty.is_raw_ptr() { return None; } - // Double check that we have fully known type - if ty.type_arguments().any(|it| it.contains_unknown()) { - return None; - } - - let non_default_fn_type_params_len = - fn_type_params.iter().filter(|it| it.default(db).is_none()).count(); - - // Ignore functions with generics for now as they kill the performance - // Also checking bounds for generics is problematic - if non_default_fn_type_params_len > 0 { - return None; - } - - let generic_params = lookup - .iter_types() - .collect::>() // Force take ownership + // Early exit if some param cannot be filled from lookup + let param_exprs: Vec> = it + .params_without_self_with_args(db, ty.type_arguments()) .into_iter() - .permutations(non_default_fn_type_params_len); + .map(|field| lookup.find_autoref(db, field.ty())) + .collect::>()?; - let exprs: Vec<_> = generic_params - .filter(|_| should_continue()) - .filter_map(|generics| { - // Insert default type params - let mut g = generics.into_iter(); - let generics: Vec<_> = ty - .type_arguments() - .map(Some) - .chain(fn_type_params.iter().map(|it| match it.default(db) { - Some(ty) => Some(ty), - None => { - let generic = g.next().expect("Missing type param"); - it.trait_bounds(db) - .into_iter() - .all(|bound| generic.impls_trait(db, bound, &[])); - // Filter out generics that do not unify due to trait bounds - it.ty(db).could_unify_with(db, &generic).then_some(generic) - } - })) - .collect::>()?; + // Note that we need special case for 0 param constructors because of multi cartesian + // product + let generics = ty.type_arguments().collect(); + let fn_exprs: Vec = if param_exprs.is_empty() { + vec![Expr::Function { func: it, generics, params: Vec::new() }] + } else { + param_exprs + .into_iter() + .multi_cartesian_product() + .map(|params| Expr::Function { func: it, generics: generics.clone(), params }) + .collect() + }; - let ret_ty = it.ret_type_with_args( - db, - ty.type_arguments().chain(generics.iter().cloned()), - ); - // Filter out functions that return references - if ctx.config.enable_borrowcheck && ret_ty.contains_reference(db) - || ret_ty.is_raw_ptr() - { - return None; - } + lookup.insert(ret_ty.clone(), fn_exprs.iter().cloned()); - // Ignore functions that do not change the type - // if ty.could_unify_with_deeply(db, &ret_ty) { - // return None; - // } - - // Early exit if some param cannot be filled from lookup - let param_exprs: Vec> = it - .params_without_self_with_args( - db, - ty.type_arguments().chain(generics.iter().cloned()), - ) - .into_iter() - .map(|field| lookup.find_autoref(db, field.ty())) - .collect::>()?; - - // Note that we need special case for 0 param constructors because of multi cartesian - // product - let fn_exprs: Vec = if param_exprs.is_empty() { - vec![Expr::Function { func: it, generics, params: Vec::new() }] - } else { - param_exprs - .into_iter() - .multi_cartesian_product() - .map(|params| Expr::Function { - func: it, - generics: generics.clone(), - params, - }) - .collect() - }; - - lookup.insert(ret_ty.clone(), fn_exprs.iter().cloned()); - Some((ret_ty, fn_exprs)) - }) - .collect(); - Some(exprs) + Some((ret_ty, fn_exprs)) }) - .flatten() .filter_map(|(ty, exprs)| ty.could_unify_with_deeply(db, &ctx.goal).then_some(exprs)) .flatten() } From b4f3eb48db49bac187e374f1eb2c583df8b241b4 Mon Sep 17 00:00:00 2001 From: Tavo Annus Date: Sat, 22 Jun 2024 14:22:56 +0300 Subject: [PATCH 07/94] Simplify `impl_method` tactic --- crates/hir/src/term_search/tactics.rs | 145 ++++++++------------------ 1 file changed, 41 insertions(+), 104 deletions(-) diff --git a/crates/hir/src/term_search/tactics.rs b/crates/hir/src/term_search/tactics.rs index a0caebb50c..09f687655f 100644 --- a/crates/hir/src/term_search/tactics.rs +++ b/crates/hir/src/term_search/tactics.rs @@ -438,6 +438,8 @@ pub(super) fn impl_method<'a, DB: HirDatabase>( lookup .new_types(NewTypesKey::ImplMethod) .into_iter() + .filter(|ty| !ty.type_arguments().any(|it| it.contains_unknown())) + .filter(|_| should_continue()) .flat_map(|ty| { Impl::all_for_type(db, ty.clone()).into_iter().map(move |imp| (ty.clone(), imp)) }) @@ -450,22 +452,10 @@ pub(super) fn impl_method<'a, DB: HirDatabase>( let fn_generics = GenericDef::from(it); let imp_generics = GenericDef::from(imp); - // Ignore const params for now - let imp_type_params = imp_generics - .type_or_const_params(db) - .into_iter() - .map(|it| it.as_type_param(db)) - .collect::>>()?; - - // Ignore const params for now - let fn_type_params = fn_generics - .type_or_const_params(db) - .into_iter() - .map(|it| it.as_type_param(db)) - .collect::>>()?; - // Ignore all functions that have something to do with lifetimes as we don't check them - if !fn_generics.lifetime_params(db).is_empty() { + if !fn_generics.lifetime_params(db).is_empty() + || !imp_generics.lifetime_params(db).is_empty() + { return None; } @@ -479,112 +469,59 @@ pub(super) fn impl_method<'a, DB: HirDatabase>( return None; } - // Only account for stable type parameters for now, unstable params can be default - // tho, for example in `Box` - if imp_type_params.iter().any(|it| it.is_unstable(db) && it.default(db).is_none()) - || fn_type_params.iter().any(|it| it.is_unstable(db) && it.default(db).is_none()) + // Ignore functions with generics for now as they kill the performance + // Also checking bounds for generics is problematic + if fn_generics.type_or_const_params(db).len() > 0 { + return None; + } + + let ret_ty = it.ret_type_with_args(db, ty.type_arguments()); + // Filter out functions that return references + if ctx.config.enable_borrowcheck && ret_ty.contains_reference(db) || ret_ty.is_raw_ptr() { return None; } - // Double check that we have fully known type - if ty.type_arguments().any(|it| it.contains_unknown()) { + // Ignore functions that do not change the type + if ty.could_unify_with_deeply(db, &ret_ty) { return None; } - let non_default_fn_type_params_len = - fn_type_params.iter().filter(|it| it.default(db).is_none()).count(); + let self_ty = + it.self_param(db).expect("No self param").ty_with_args(db, ty.type_arguments()); - // Ignore functions with generics for now as they kill the performance - // Also checking bounds for generics is problematic - if non_default_fn_type_params_len > 0 { + // Ignore functions that have different self type + if !self_ty.autoderef(db).any(|s_ty| ty == s_ty) { return None; } - let generic_params = lookup - .iter_types() - .collect::>() // Force take ownership + let target_type_exprs = lookup.find(db, &ty).expect("Type not in lookup"); + + // Early exit if some param cannot be filled from lookup + let param_exprs: Vec> = it + .params_without_self_with_args(db, ty.type_arguments()) .into_iter() - .permutations(non_default_fn_type_params_len); + .map(|field| lookup.find_autoref(db, field.ty())) + .collect::>()?; - let exprs: Vec<_> = generic_params - .filter(|_| should_continue()) - .filter_map(|generics| { - // Insert default type params - let mut g = generics.into_iter(); - let generics: Vec<_> = ty - .type_arguments() - .map(Some) - .chain(fn_type_params.iter().map(|it| match it.default(db) { - Some(ty) => Some(ty), - None => { - let generic = g.next().expect("Missing type param"); - // Filter out generics that do not unify due to trait bounds - it.ty(db).could_unify_with(db, &generic).then_some(generic) - } - })) - .collect::>()?; - - let ret_ty = it.ret_type_with_args( - db, - ty.type_arguments().chain(generics.iter().cloned()), - ); - // Filter out functions that return references - if ctx.config.enable_borrowcheck && ret_ty.contains_reference(db) - || ret_ty.is_raw_ptr() - { - return None; + let generics: Vec<_> = ty.type_arguments().collect(); + let fn_exprs: Vec = std::iter::once(target_type_exprs) + .chain(param_exprs) + .multi_cartesian_product() + .map(|params| { + let mut params = params.into_iter(); + let target = Box::new(params.next().unwrap()); + Expr::Method { + func: it, + generics: generics.clone(), + target, + params: params.collect(), } - - // Ignore functions that do not change the type - if ty.could_unify_with_deeply(db, &ret_ty) { - return None; - } - - let self_ty = it - .self_param(db) - .expect("No self param") - .ty_with_args(db, ty.type_arguments().chain(generics.iter().cloned())); - - // Ignore functions that have different self type - if !self_ty.autoderef(db).any(|s_ty| ty == s_ty) { - return None; - } - - let target_type_exprs = lookup.find(db, &ty).expect("Type not in lookup"); - - // Early exit if some param cannot be filled from lookup - let param_exprs: Vec> = it - .params_without_self_with_args( - db, - ty.type_arguments().chain(generics.iter().cloned()), - ) - .into_iter() - .map(|field| lookup.find_autoref(db, field.ty())) - .collect::>()?; - - let fn_exprs: Vec = std::iter::once(target_type_exprs) - .chain(param_exprs) - .multi_cartesian_product() - .map(|params| { - let mut params = params.into_iter(); - let target = Box::new(params.next().unwrap()); - Expr::Method { - func: it, - generics: generics.clone(), - target, - params: params.collect(), - } - }) - .collect(); - - lookup.insert(ret_ty.clone(), fn_exprs.iter().cloned()); - Some((ret_ty, fn_exprs)) }) .collect(); - Some(exprs) + + Some((ret_ty, fn_exprs)) }) - .flatten() .filter_map(|(ty, exprs)| ty.could_unify_with_deeply(db, &ctx.goal).then_some(exprs)) .flatten() } From 3825d8bd0f8b09d9b82ae23d28de74ea2337991d Mon Sep 17 00:00:00 2001 From: Tavo Annus Date: Sat, 22 Jun 2024 15:06:07 +0300 Subject: [PATCH 08/94] Increase search depth to account for more granual steps --- crates/hir/src/term_search.rs | 2 +- crates/hir/src/term_search/tactics.rs | 2 ++ crates/rust-analyzer/src/config.rs | 8 ++++---- 3 files changed, 7 insertions(+), 5 deletions(-) diff --git a/crates/hir/src/term_search.rs b/crates/hir/src/term_search.rs index 0f0b7a67f5..6f84513708 100644 --- a/crates/hir/src/term_search.rs +++ b/crates/hir/src/term_search.rs @@ -238,7 +238,7 @@ pub struct TermSearchConfig { impl Default for TermSearchConfig { fn default() -> Self { - Self { enable_borrowcheck: true, many_alternatives_threshold: 1, fuel: 400 } + Self { enable_borrowcheck: true, many_alternatives_threshold: 1, fuel: 1200 } } } diff --git a/crates/hir/src/term_search/tactics.rs b/crates/hir/src/term_search/tactics.rs index 09f687655f..a4cb4aa012 100644 --- a/crates/hir/src/term_search/tactics.rs +++ b/crates/hir/src/term_search/tactics.rs @@ -448,6 +448,7 @@ pub(super) fn impl_method<'a, DB: HirDatabase>( AssocItem::Function(f) => Some((imp, ty, f)), _ => None, }) + .filter(|_| should_continue()) .filter_map(move |(imp, ty, it)| { let fn_generics = GenericDef::from(it); let imp_generics = GenericDef::from(imp); @@ -636,6 +637,7 @@ pub(super) fn impl_static_method<'a, DB: HirDatabase>( AssocItem::Function(f) => Some((imp, ty, f)), _ => None, }) + .filter(|_| should_continue()) .filter_map(move |(imp, ty, it)| { let fn_generics = GenericDef::from(it); let imp_generics = GenericDef::from(imp); diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index e8504979be..f152ac542a 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs @@ -341,8 +341,8 @@ config_data! { assist_emitMustUse: bool = false, /// Placeholder expression to use for missing expressions in assists. assist_expressionFillDefault: ExprFillDefaultDef = ExprFillDefaultDef::Todo, - /// Term search fuel in "units of work" for assists (Defaults to 400). - assist_termSearch_fuel: usize = 400, + /// Term search fuel in "units of work" for assists (Defaults to 1800). + assist_termSearch_fuel: usize = 1800, /// Whether to enforce the import granularity setting for all files. If set to false rust-analyzer will try to keep import styles consistent per file. imports_granularity_enforce: bool = false, @@ -426,8 +426,8 @@ config_data! { }"#).unwrap(), /// Whether to enable term search based snippets like `Some(foo.bar().baz())`. completion_termSearch_enable: bool = false, - /// Term search fuel in "units of work" for autocompletion (Defaults to 200). - completion_termSearch_fuel: usize = 200, + /// Term search fuel in "units of work" for autocompletion (Defaults to 1000). + completion_termSearch_fuel: usize = 1000, /// Controls file watching implementation. files_watcher: FilesWatcherDef = FilesWatcherDef::Client, From 51c3bd215a43f85877fc8855c4e8ce8222829a7e Mon Sep 17 00:00:00 2001 From: Tavo Annus Date: Sat, 22 Jun 2024 15:40:27 +0300 Subject: [PATCH 09/94] Fix suggestions of unstable constants --- crates/hir/src/term_search/tactics.rs | 12 ++++++++---- docs/user/generated_config.adoc | 8 ++++---- editors/code/package.json | 8 ++++---- 3 files changed, 16 insertions(+), 12 deletions(-) diff --git a/crates/hir/src/term_search/tactics.rs b/crates/hir/src/term_search/tactics.rs index a4cb4aa012..d1fc68d009 100644 --- a/crates/hir/src/term_search/tactics.rs +++ b/crates/hir/src/term_search/tactics.rs @@ -17,8 +17,8 @@ use itertools::Itertools; use rustc_hash::FxHashSet; use crate::{ - Adt, AssocItem, GenericDef, GenericParam, HasVisibility, Impl, ModuleDef, ScopeDef, Type, - TypeParam, + Adt, AssocItem, GenericDef, GenericParam, HasAttrs, HasVisibility, Impl, ModuleDef, ScopeDef, + Type, TypeParam, }; use crate::term_search::Expr; @@ -122,6 +122,10 @@ pub(super) fn assoc_const<'a, DB: HirDatabase>( .filter(move |it| it.is_visible_from(db, module)) .filter_map(AssocItem::as_const) .filter_map(|it| { + if it.attrs(db).is_unstable() { + return None; + } + let expr = Expr::Const(it); let ty = it.ty(db); @@ -472,7 +476,7 @@ pub(super) fn impl_method<'a, DB: HirDatabase>( // Ignore functions with generics for now as they kill the performance // Also checking bounds for generics is problematic - if fn_generics.type_or_const_params(db).len() > 0 { + if !fn_generics.type_or_const_params(db).is_empty() { return None; } @@ -661,7 +665,7 @@ pub(super) fn impl_static_method<'a, DB: HirDatabase>( // Ignore functions with generics for now as they kill the performance // Also checking bounds for generics is problematic - if fn_generics.type_or_const_params(db).len() > 0 { + if !fn_generics.type_or_const_params(db).is_empty() { return None; } diff --git a/docs/user/generated_config.adoc b/docs/user/generated_config.adoc index 14aae91741..81f30dc56e 100644 --- a/docs/user/generated_config.adoc +++ b/docs/user/generated_config.adoc @@ -9,10 +9,10 @@ for enum variants. -- Placeholder expression to use for missing expressions in assists. -- -[[rust-analyzer.assist.termSearch.fuel]]rust-analyzer.assist.termSearch.fuel (default: `400`):: +[[rust-analyzer.assist.termSearch.fuel]]rust-analyzer.assist.termSearch.fuel (default: `1800`):: + -- -Term search fuel in "units of work" for assists (Defaults to 400). +Term search fuel in "units of work" for assists (Defaults to 1800). -- [[rust-analyzer.cachePriming.enable]]rust-analyzer.cachePriming.enable (default: `true`):: + @@ -378,10 +378,10 @@ Custom completion snippets. -- Whether to enable term search based snippets like `Some(foo.bar().baz())`. -- -[[rust-analyzer.completion.termSearch.fuel]]rust-analyzer.completion.termSearch.fuel (default: `200`):: +[[rust-analyzer.completion.termSearch.fuel]]rust-analyzer.completion.termSearch.fuel (default: `1000`):: + -- -Term search fuel in "units of work" for autocompletion (Defaults to 200). +Term search fuel in "units of work" for autocompletion (Defaults to 1000). -- [[rust-analyzer.diagnostics.disabled]]rust-analyzer.diagnostics.disabled (default: `[]`):: + diff --git a/editors/code/package.json b/editors/code/package.json index db2a989106..0da5b920a7 100644 --- a/editors/code/package.json +++ b/editors/code/package.json @@ -592,8 +592,8 @@ "title": "assist", "properties": { "rust-analyzer.assist.termSearch.fuel": { - "markdownDescription": "Term search fuel in \"units of work\" for assists (Defaults to 400).", - "default": 400, + "markdownDescription": "Term search fuel in \"units of work\" for assists (Defaults to 1800).", + "default": 1800, "type": "integer", "minimum": 0 } @@ -1224,8 +1224,8 @@ "title": "completion", "properties": { "rust-analyzer.completion.termSearch.fuel": { - "markdownDescription": "Term search fuel in \"units of work\" for autocompletion (Defaults to 200).", - "default": 200, + "markdownDescription": "Term search fuel in \"units of work\" for autocompletion (Defaults to 1000).", + "default": 1000, "type": "integer", "minimum": 0 } From 291e96a5628179b0298523f018f928369cd4a673 Mon Sep 17 00:00:00 2001 From: roife Date: Sun, 23 Jun 2024 22:26:01 +0800 Subject: [PATCH 10/94] fix: pattern completions in let-stmt --- crates/ide-completion/src/context/analysis.rs | 2 +- crates/ide-completion/src/tests/pattern.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/ide-completion/src/context/analysis.rs b/crates/ide-completion/src/context/analysis.rs index 80ce5bd4cf..743aa00570 100644 --- a/crates/ide-completion/src/context/analysis.rs +++ b/crates/ide-completion/src/context/analysis.rs @@ -1334,7 +1334,7 @@ fn pattern_context_for( .map_or((PatternRefutability::Irrefutable, false), |node| { let refutability = match_ast! { match node { - ast::LetStmt(let_) => return (PatternRefutability::Irrefutable, let_.ty().is_some()), + ast::LetStmt(let_) => return (PatternRefutability::Refutable, let_.ty().is_some()), ast::Param(param) => { let has_type_ascription = param.ty().is_some(); param_ctx = (|| { diff --git a/crates/ide-completion/src/tests/pattern.rs b/crates/ide-completion/src/tests/pattern.rs index 8720cb555a..6a0b67e291 100644 --- a/crates/ide-completion/src/tests/pattern.rs +++ b/crates/ide-completion/src/tests/pattern.rs @@ -146,7 +146,7 @@ enum SingleVariantEnum { } use SingleVariantEnum::Variant; fn foo() { - let a$0 + for a$0 } "#, expect![[r#" From 3168ab5b99e575ca2757ded1009bb37bd6fac61d Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sun, 23 Jun 2024 15:22:42 +0200 Subject: [PATCH 11/94] Enum variants are not generic def ids --- crates/hir-def/src/generics.rs | 12 ++-- crates/hir-def/src/lib.rs | 62 ++++++++++++++----- crates/hir-def/src/resolver.rs | 1 - crates/hir-ty/src/builder.rs | 7 ++- crates/hir-ty/src/chalk_db.rs | 24 ++++---- crates/hir-ty/src/chalk_ext.rs | 7 ++- crates/hir-ty/src/db.rs | 13 ++-- crates/hir-ty/src/display.rs | 12 ++-- crates/hir-ty/src/generics.rs | 1 - crates/hir-ty/src/infer/expr.rs | 5 +- crates/hir-ty/src/infer/path.rs | 74 +++++++++++++++-------- crates/hir-ty/src/lib.rs | 6 +- crates/hir-ty/src/lower.rs | 68 ++++----------------- crates/hir-ty/src/mir/lower.rs | 4 +- crates/hir-ty/src/mir/monomorphization.rs | 6 +- crates/hir/src/from_id.rs | 2 - crates/hir/src/has_source.rs | 4 +- crates/hir/src/lib.rs | 28 ++++----- crates/hir/src/term_search/expr.rs | 2 +- crates/ide-db/src/active_parameter.rs | 10 ++- crates/ide-db/src/search.rs | 1 - crates/ide/src/signature_help.rs | 17 +++--- 22 files changed, 188 insertions(+), 178 deletions(-) diff --git a/crates/hir-def/src/generics.rs b/crates/hir-def/src/generics.rs index b9f8082391..ca02501567 100644 --- a/crates/hir-def/src/generics.rs +++ b/crates/hir-def/src/generics.rs @@ -582,13 +582,11 @@ impl GenericParams { GenericDefId::TraitAliasId(id) => id_to_generics(db, id, enabled_params), GenericDefId::TypeAliasId(id) => id_to_generics(db, id, enabled_params), GenericDefId::ImplId(id) => id_to_generics(db, id, enabled_params), - GenericDefId::EnumVariantId(_) | GenericDefId::ConstId(_) => { - Interned::new(GenericParams { - type_or_consts: Default::default(), - lifetimes: Default::default(), - where_predicates: Default::default(), - }) - } + GenericDefId::ConstId(_) => Interned::new(GenericParams { + type_or_consts: Default::default(), + lifetimes: Default::default(), + where_predicates: Default::default(), + }), } } diff --git a/crates/hir-def/src/lib.rs b/crates/hir-def/src/lib.rs index f6fe0c618a..b628137ef5 100644 --- a/crates/hir-def/src/lib.rs +++ b/crates/hir-def/src/lib.rs @@ -689,7 +689,7 @@ pub enum TypeOwnerId { } impl TypeOwnerId { - fn as_generic_def_id(self) -> Option { + fn as_generic_def_id(self, db: &dyn DefDatabase) -> Option { Some(match self { TypeOwnerId::FunctionId(it) => GenericDefId::FunctionId(it), TypeOwnerId::ConstId(it) => GenericDefId::ConstId(it), @@ -698,7 +698,9 @@ impl TypeOwnerId { TypeOwnerId::TraitAliasId(it) => GenericDefId::TraitAliasId(it), TypeOwnerId::TypeAliasId(it) => GenericDefId::TypeAliasId(it), TypeOwnerId::ImplId(it) => GenericDefId::ImplId(it), - TypeOwnerId::EnumVariantId(it) => GenericDefId::EnumVariantId(it), + TypeOwnerId::EnumVariantId(it) => { + GenericDefId::AdtId(AdtId::EnumId(it.lookup(db).parent)) + } TypeOwnerId::InTypeConstId(_) | TypeOwnerId::StaticId(_) => return None, }) } @@ -740,7 +742,6 @@ impl From for TypeOwnerId { GenericDefId::TraitAliasId(it) => it.into(), GenericDefId::TypeAliasId(it) => it.into(), GenericDefId::ImplId(it) => it.into(), - GenericDefId::EnumVariantId(it) => it.into(), GenericDefId::ConstId(it) => it.into(), } } @@ -849,8 +850,8 @@ impl GeneralConstId { pub fn generic_def(self, db: &dyn DefDatabase) -> Option { match self { GeneralConstId::ConstId(it) => Some(it.into()), - GeneralConstId::ConstBlockId(it) => it.lookup(db).parent.as_generic_def_id(), - GeneralConstId::InTypeConstId(it) => it.lookup(db).owner.as_generic_def_id(), + GeneralConstId::ConstBlockId(it) => it.lookup(db).parent.as_generic_def_id(db), + GeneralConstId::InTypeConstId(it) => it.lookup(db).owner.as_generic_def_id(db), } } @@ -888,12 +889,12 @@ impl From for DefWithBodyId { } impl DefWithBodyId { - pub fn as_generic_def_id(self) -> Option { + pub fn as_generic_def_id(self, db: &dyn DefDatabase) -> Option { match self { DefWithBodyId::FunctionId(f) => Some(f.into()), DefWithBodyId::StaticId(_) => None, DefWithBodyId::ConstId(c) => Some(c.into()), - DefWithBodyId::VariantId(c) => Some(c.into()), + DefWithBodyId::VariantId(c) => Some(c.lookup(db).parent.into()), // FIXME: stable rust doesn't allow generics in constants, but we should // use `TypeOwnerId::as_generic_def_id` when it does. DefWithBodyId::InTypeConstId(_) => None, @@ -921,10 +922,6 @@ pub enum GenericDefId { TraitAliasId(TraitAliasId), TypeAliasId(TypeAliasId), ImplId(ImplId), - // enum variants cannot have generics themselves, but their parent enums - // can, and this makes some code easier to write - // FIXME: Try to remove this as that will reduce the amount of query slots generated per enum? - EnumVariantId(EnumVariantId), // consts can have type parameters from their parents (i.e. associated consts of traits) ConstId(ConstId), } @@ -935,7 +932,6 @@ impl_from!( TraitAliasId, TypeAliasId, ImplId, - EnumVariantId, ConstId for GenericDefId ); @@ -967,7 +963,6 @@ impl GenericDefId { GenericDefId::TraitAliasId(it) => file_id_and_params_of_item_loc(db, it), GenericDefId::ImplId(it) => file_id_and_params_of_item_loc(db, it), GenericDefId::ConstId(it) => (it.lookup(db).id.file_id(), None), - GenericDefId::EnumVariantId(it) => (it.lookup(db).id.file_id(), None), } } @@ -994,6 +989,46 @@ impl From for GenericDefId { } } +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +pub enum CallableDefId { + FunctionId(FunctionId), + StructId(StructId), + EnumVariantId(EnumVariantId), +} + +impl InternValueTrivial for CallableDefId {} + +impl_from!(FunctionId, StructId, EnumVariantId for CallableDefId); +impl From for ModuleDefId { + fn from(def: CallableDefId) -> ModuleDefId { + match def { + CallableDefId::FunctionId(f) => ModuleDefId::FunctionId(f), + CallableDefId::StructId(s) => ModuleDefId::AdtId(AdtId::StructId(s)), + CallableDefId::EnumVariantId(e) => ModuleDefId::EnumVariantId(e), + } + } +} + +impl CallableDefId { + pub fn krate(self, db: &dyn DefDatabase) -> CrateId { + match self { + CallableDefId::FunctionId(f) => f.krate(db), + CallableDefId::StructId(s) => s.krate(db), + CallableDefId::EnumVariantId(e) => e.krate(db), + } + } +} + +impl GenericDefId { + pub fn from(db: &dyn DefDatabase, def: CallableDefId) -> GenericDefId { + match def { + CallableDefId::FunctionId(f) => f.into(), + CallableDefId::StructId(s) => s.into(), + CallableDefId::EnumVariantId(e) => e.lookup(db).parent.into(), + } + } +} + #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] pub enum AttrDefId { ModuleId(ModuleId), @@ -1310,7 +1345,6 @@ impl HasModule for GenericDefId { GenericDefId::TraitAliasId(it) => it.module(db), GenericDefId::TypeAliasId(it) => it.module(db), GenericDefId::ImplId(it) => it.module(db), - GenericDefId::EnumVariantId(it) => it.module(db), GenericDefId::ConstId(it) => it.module(db), } } diff --git a/crates/hir-def/src/resolver.rs b/crates/hir-def/src/resolver.rs index 9794963203..a83157415c 100644 --- a/crates/hir-def/src/resolver.rs +++ b/crates/hir-def/src/resolver.rs @@ -1164,7 +1164,6 @@ impl HasResolver for GenericDefId { GenericDefId::TraitAliasId(inner) => inner.resolver(db), GenericDefId::TypeAliasId(inner) => inner.resolver(db), GenericDefId::ImplId(inner) => inner.resolver(db), - GenericDefId::EnumVariantId(inner) => inner.resolver(db), GenericDefId::ConstId(inner) => inner.resolver(db), } } diff --git a/crates/hir-ty/src/builder.rs b/crates/hir-ty/src/builder.rs index 52411f94ad..bccdc9a6c5 100644 --- a/crates/hir-ty/src/builder.rs +++ b/crates/hir-ty/src/builder.rs @@ -252,8 +252,9 @@ impl TyBuilder<()> { /// This method prepopulates the builder with placeholder substitution of `parent`, so you /// should only push exactly 3 `GenericArg`s before building. pub fn subst_for_coroutine(db: &dyn HirDatabase, parent: DefWithBodyId) -> TyBuilder<()> { - let parent_subst = - parent.as_generic_def_id().map(|p| generics(db.upcast(), p).placeholder_subst(db)); + let parent_subst = parent + .as_generic_def_id(db.upcast()) + .map(|p| generics(db.upcast(), p).placeholder_subst(db)); // These represent resume type, yield type, and return type of coroutine. let params = std::iter::repeat(ParamKind::Type).take(3).collect(); TyBuilder::new((), params, parent_subst) @@ -266,7 +267,7 @@ impl TyBuilder<()> { ) -> Substitution { let sig_ty = sig_ty.cast(Interner); let self_subst = iter::once(&sig_ty); - let Some(parent) = parent.as_generic_def_id() else { + let Some(parent) = parent.as_generic_def_id(db.upcast()) else { return Substitution::from_iter(Interner, self_subst); }; Substitution::from_iter( diff --git a/crates/hir-ty/src/chalk_db.rs b/crates/hir-ty/src/chalk_db.rs index debae1fe12..43c34a7eda 100644 --- a/crates/hir-ty/src/chalk_db.rs +++ b/crates/hir-ty/src/chalk_db.rs @@ -13,7 +13,8 @@ use hir_def::{ data::adt::StructFlags, hir::Movability, lang_item::{LangItem, LangItemTarget}, - AssocItemId, BlockId, GenericDefId, HasModule, ItemContainerId, Lookup, TypeAliasId, VariantId, + AssocItemId, BlockId, CallableDefId, GenericDefId, HasModule, ItemContainerId, Lookup, + TypeAliasId, VariantId, }; use hir_expand::name::name; @@ -28,9 +29,9 @@ use crate::{ to_assoc_type_id, to_chalk_trait_id, traits::ChalkContext, utils::ClosureSubst, - wrap_empty_binders, AliasEq, AliasTy, BoundVar, CallableDefId, DebruijnIndex, FnDefId, - Interner, ProjectionTy, ProjectionTyExt, QuantifiedWhereClause, Substitution, TraitRef, - TraitRefExt, Ty, TyBuilder, TyExt, TyKind, WhereClause, + wrap_empty_binders, AliasEq, AliasTy, BoundVar, DebruijnIndex, FnDefId, Interner, ProjectionTy, + ProjectionTyExt, QuantifiedWhereClause, Substitution, TraitRef, TraitRefExt, Ty, TyBuilder, + TyExt, TyKind, WhereClause, }; pub(crate) type AssociatedTyDatum = chalk_solve::rust_ir::AssociatedTyDatum; @@ -102,7 +103,7 @@ impl chalk_solve::RustIrDatabase for ChalkContext<'_> { &self, fn_def_id: chalk_ir::FnDefId, ) -> Arc> { - self.db.fn_def_datum(self.krate, fn_def_id) + self.db.fn_def_datum(fn_def_id) } fn impls_for_trait( @@ -912,16 +913,13 @@ fn type_alias_associated_ty_value( Arc::new(value) } -pub(crate) fn fn_def_datum_query( - db: &dyn HirDatabase, - _krate: CrateId, - fn_def_id: FnDefId, -) -> Arc { +pub(crate) fn fn_def_datum_query(db: &dyn HirDatabase, fn_def_id: FnDefId) -> Arc { let callable_def: CallableDefId = from_chalk(db, fn_def_id); - let generic_params = generics(db.upcast(), callable_def.into()); + let generic_def = GenericDefId::from(db.upcast(), callable_def); + let generic_params = generics(db.upcast(), generic_def); let (sig, binders) = db.callable_item_signature(callable_def).into_value_and_skipped_binders(); let bound_vars = generic_params.bound_vars_subst(db, DebruijnIndex::INNERMOST); - let where_clauses = convert_where_clauses(db, callable_def.into(), &bound_vars); + let where_clauses = convert_where_clauses(db, generic_def, &bound_vars); let bound = rust_ir::FnDefDatumBound { // Note: Chalk doesn't actually use this information yet as far as I am aware, but we provide it anyway inputs_and_output: chalk_ir::Binders::empty( @@ -948,7 +946,7 @@ pub(crate) fn fn_def_datum_query( pub(crate) fn fn_def_variance_query(db: &dyn HirDatabase, fn_def_id: FnDefId) -> Variances { let callable_def: CallableDefId = from_chalk(db, fn_def_id); - let generic_params = generics(db.upcast(), callable_def.into()); + let generic_params = generics(db.upcast(), GenericDefId::from(db.upcast(), callable_def)); Variances::from_iter( Interner, std::iter::repeat(chalk_ir::Variance::Invariant).take(generic_params.len()), diff --git a/crates/hir-ty/src/chalk_ext.rs b/crates/hir-ty/src/chalk_ext.rs index 4279c75651..117fbd7bab 100644 --- a/crates/hir-ty/src/chalk_ext.rs +++ b/crates/hir-ty/src/chalk_ext.rs @@ -188,9 +188,10 @@ impl TyExt for Ty { fn as_generic_def(&self, db: &dyn HirDatabase) -> Option { match *self.kind(Interner) { TyKind::Adt(AdtId(adt), ..) => Some(adt.into()), - TyKind::FnDef(callable, ..) => { - Some(db.lookup_intern_callable_def(callable.into()).into()) - } + TyKind::FnDef(callable, ..) => Some(GenericDefId::from( + db.upcast(), + db.lookup_intern_callable_def(callable.into()), + )), TyKind::AssociatedType(type_alias, ..) => Some(from_assoc_type_id(type_alias).into()), TyKind::Foreign(type_alias, ..) => Some(from_foreign_def_id(type_alias).into()), _ => None, diff --git a/crates/hir-ty/src/db.rs b/crates/hir-ty/src/db.rs index e951048021..164c244483 100644 --- a/crates/hir-ty/src/db.rs +++ b/crates/hir-ty/src/db.rs @@ -9,8 +9,8 @@ use base_db::{ CrateId, Upcast, }; use hir_def::{ - db::DefDatabase, hir::ExprId, layout::TargetDataLayout, AdtId, BlockId, ConstParamId, - DefWithBodyId, EnumVariantId, FunctionId, GeneralConstId, GenericDefId, ImplId, + db::DefDatabase, hir::ExprId, layout::TargetDataLayout, AdtId, BlockId, CallableDefId, + ConstParamId, DefWithBodyId, EnumVariantId, FunctionId, GeneralConstId, GenericDefId, ImplId, LifetimeParamId, LocalFieldId, StaticId, TypeAliasId, TypeOrConstParamId, VariantId, }; use la_arena::ArenaMap; @@ -24,9 +24,8 @@ use crate::{ lower::{GenericDefaults, GenericPredicates}, method_resolution::{InherentImpls, TraitImpls, TyFingerprint}, mir::{BorrowckResult, MirBody, MirLowerError}, - Binders, CallableDefId, ClosureId, Const, FnDefId, ImplTraitId, ImplTraits, InferenceResult, - Interner, PolyFnSig, QuantifiedWhereClause, Substitution, TraitEnvironment, TraitRef, Ty, - TyDefId, ValueTyDefId, + Binders, ClosureId, Const, FnDefId, ImplTraitId, ImplTraits, InferenceResult, Interner, + PolyFnSig, Substitution, TraitEnvironment, TraitRef, Ty, TyDefId, ValueTyDefId, }; use hir_expand::name::Name; @@ -145,7 +144,7 @@ pub trait HirDatabase: DefDatabase + Upcast { def: GenericDefId, param_id: TypeOrConstParamId, assoc_name: Option, - ) -> Arc<[Binders]>; + ) -> GenericPredicates; #[salsa::invoke(crate::lower::generic_predicates_query)] fn generic_predicates(&self, def: GenericDefId) -> GenericPredicates; @@ -232,7 +231,7 @@ pub trait HirDatabase: DefDatabase + Upcast { ) -> sync::Arc; #[salsa::invoke(chalk_db::fn_def_datum_query)] - fn fn_def_datum(&self, krate: CrateId, fn_def_id: FnDefId) -> sync::Arc; + fn fn_def_datum(&self, fn_def_id: FnDefId) -> sync::Arc; #[salsa::invoke(chalk_db::fn_def_variance_query)] fn fn_def_variance(&self, fn_def_id: FnDefId) -> chalk_db::Variances; diff --git a/crates/hir-ty/src/display.rs b/crates/hir-ty/src/display.rs index 66b5398b88..70b670f5e8 100644 --- a/crates/hir-ty/src/display.rs +++ b/crates/hir-ty/src/display.rs @@ -21,8 +21,8 @@ use hir_def::{ path::{Path, PathKind}, type_ref::{TraitBoundModifier, TypeBound, TypeRef}, visibility::Visibility, - HasModule, ImportPathConfig, ItemContainerId, LocalFieldId, Lookup, ModuleDefId, ModuleId, - TraitId, + GenericDefId, HasModule, ImportPathConfig, ItemContainerId, LocalFieldId, Lookup, ModuleDefId, + ModuleId, TraitId, }; use hir_expand::name::Name; use intern::{Internable, Interned}; @@ -988,7 +988,8 @@ impl HirDisplay for Ty { f.end_location_link(); if parameters.len(Interner) > 0 { - let generics = generics(db.upcast(), def.into()); + let generic_def_id = GenericDefId::from(db.upcast(), def); + let generics = generics(db.upcast(), generic_def_id); let (parent_len, self_param, type_, const_, impl_, lifetime) = generics.provenance_split(); let parameters = parameters.as_slice(Interner); @@ -1002,8 +1003,9 @@ impl HirDisplay for Ty { debug_assert_eq!(parent_params.len(), parent_len); let parent_params = - generic_args_sans_defaults(f, Some(def.into()), parent_params); - let fn_params = generic_args_sans_defaults(f, Some(def.into()), fn_params); + generic_args_sans_defaults(f, Some(generic_def_id), parent_params); + let fn_params = + generic_args_sans_defaults(f, Some(generic_def_id), fn_params); write!(f, "<")?; hir_fmt_generic_arguments(f, parent_params, None)?; diff --git a/crates/hir-ty/src/generics.rs b/crates/hir-ty/src/generics.rs index ea10e6881e..7f8dd920e6 100644 --- a/crates/hir-ty/src/generics.rs +++ b/crates/hir-ty/src/generics.rs @@ -216,7 +216,6 @@ fn parent_generic_def(db: &dyn DefDatabase, def: GenericDefId) -> Option it.lookup(db).container, GenericDefId::TypeAliasId(it) => it.lookup(db).container, GenericDefId::ConstId(it) => it.lookup(db).container, - GenericDefId::EnumVariantId(it) => return Some(it.lookup(db).parent.into()), GenericDefId::AdtId(_) | GenericDefId::TraitId(_) | GenericDefId::ImplId(_) diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs index 95f28531ac..e053b0c1a4 100644 --- a/crates/hir-ty/src/infer/expr.rs +++ b/crates/hir-ty/src/infer/expr.rs @@ -13,7 +13,7 @@ use hir_def::{ }, lang_item::{LangItem, LangItemTarget}, path::{GenericArgs, Path}, - BlockId, FieldId, GenericParamId, ItemContainerId, Lookup, TupleFieldId, TupleId, + BlockId, FieldId, GenericDefId, GenericParamId, ItemContainerId, Lookup, TupleFieldId, TupleId, }; use hir_expand::name::{name, Name}; use stdx::always; @@ -1895,7 +1895,8 @@ impl InferenceContext<'_> { let callable_ty = self.resolve_ty_shallow(callable_ty); if let TyKind::FnDef(fn_def, parameters) = callable_ty.kind(Interner) { let def: CallableDefId = from_chalk(self.db, *fn_def); - let generic_predicates = self.db.generic_predicates(def.into()); + let generic_predicates = + self.db.generic_predicates(GenericDefId::from(self.db.upcast(), def)); for predicate in generic_predicates.iter() { let (predicate, binders) = predicate .clone() diff --git a/crates/hir-ty/src/infer/path.rs b/crates/hir-ty/src/infer/path.rs index d876008cd5..490ecfd7fa 100644 --- a/crates/hir-ty/src/infer/path.rs +++ b/crates/hir-ty/src/infer/path.rs @@ -41,14 +41,7 @@ impl InferenceContext<'_> { fn resolve_value_path(&mut self, path: &Path, id: ExprOrPatId) -> Option { let (value, self_subst) = self.resolve_value_path_inner(path, id)?; - let value_def = match value { - ValueNs::LocalBinding(pat) => match self.result.type_of_binding.get(pat) { - Some(ty) => return Some(ValuePathResolution::NonGeneric(ty.clone())), - None => { - never!("uninferred pattern?"); - return None; - } - }, + let value_def: ValueTyDefId = match value { ValueNs::FunctionId(it) => it.into(), ValueNs::ConstId(it) => it.into(), ValueNs::StaticId(it) => it.into(), @@ -62,48 +55,79 @@ impl InferenceContext<'_> { it.into() } + ValueNs::LocalBinding(pat) => { + return match self.result.type_of_binding.get(pat) { + Some(ty) => Some(ValuePathResolution::NonGeneric(ty.clone())), + None => { + never!("uninferred pattern?"); + None + } + } + } ValueNs::ImplSelf(impl_id) => { let generics = crate::generics::generics(self.db.upcast(), impl_id.into()); let substs = generics.placeholder_subst(self.db); let ty = self.db.impl_self_ty(impl_id).substitute(Interner, &substs); - if let Some((AdtId::StructId(struct_id), substs)) = ty.as_adt() { - return Some(ValuePathResolution::GenericDef( + return if let Some((AdtId::StructId(struct_id), substs)) = ty.as_adt() { + Some(ValuePathResolution::GenericDef( struct_id.into(), struct_id.into(), substs.clone(), - )); + )) } else { // FIXME: report error, invalid Self reference - return None; - } + None + }; } ValueNs::GenericParam(it) => { return Some(ValuePathResolution::NonGeneric(self.db.const_param_ty(it))) } }; + let generic_def_id = value_def.to_generic_def_id(self.db); + let Some(generic_def) = generic_def_id else { + // `value_def` is the kind of item that can never be generic (i.e. statics, at least + // currently). We can just skip the binders to get its type. + let (ty, binders) = self.db.value_ty(value_def)?.into_value_and_skipped_binders(); + stdx::always!(binders.is_empty(Interner), "non-empty binders for non-generic def",); + return Some(ValuePathResolution::NonGeneric(ty)); + }; + let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver, self.owner.into()); let substs = ctx.substs_from_path(path, value_def, true); let substs = substs.as_slice(Interner); + + if let ValueNs::EnumVariantId(_) = value { + let mut it = self_subst + .as_ref() + .map_or(&[][..], |s| s.as_slice(Interner)) + .iter() + .chain(substs) + .cloned(); + let builder = TyBuilder::subst_for_def(self.db, generic_def, None); + let substs = builder + .fill(|x| { + it.next().unwrap_or_else(|| match x { + ParamKind::Type => { + self.result.standard_types.unknown.clone().cast(Interner) + } + ParamKind::Const(ty) => consteval::unknown_const_as_generic(ty.clone()), + ParamKind::Lifetime => error_lifetime().cast(Interner), + }) + }) + .build(); + + return Some(ValuePathResolution::GenericDef(value_def, generic_def, substs)); + } + let parent_substs = self_subst.or_else(|| { - let generics = generics(self.db.upcast(), value_def.to_generic_def_id()?); + let generics = generics(self.db.upcast(), generic_def_id?); let parent_params_len = generics.parent_generics()?.len(); let parent_args = &substs[substs.len() - parent_params_len..]; Some(Substitution::from_iter(Interner, parent_args)) }); let parent_substs_len = parent_substs.as_ref().map_or(0, |s| s.len(Interner)); let mut it = substs.iter().take(substs.len() - parent_substs_len).cloned(); - - let Some(generic_def) = value_def.to_generic_def_id() else { - // `value_def` is the kind of item that can never be generic (i.e. statics, at least - // currently). We can just skip the binders to get its type. - let (ty, binders) = self.db.value_ty(value_def)?.into_value_and_skipped_binders(); - stdx::always!( - parent_substs.is_none() && binders.is_empty(Interner), - "non-empty binders for non-generic def", - ); - return Some(ValuePathResolution::NonGeneric(ty)); - }; let builder = TyBuilder::subst_for_def(self.db, generic_def, parent_substs); let substs = builder .fill(|x| { diff --git a/crates/hir-ty/src/lib.rs b/crates/hir-ty/src/lib.rs index 5e33e1285e..07c0cf920f 100644 --- a/crates/hir-ty/src/lib.rs +++ b/crates/hir-ty/src/lib.rs @@ -60,7 +60,7 @@ use chalk_ir::{ NoSolution, }; use either::Either; -use hir_def::{hir::ExprId, type_ref::Rawness, GeneralConstId, TypeOrConstParamId}; +use hir_def::{hir::ExprId, type_ref::Rawness, CallableDefId, GeneralConstId, TypeOrConstParamId}; use hir_expand::name; use la_arena::{Arena, Idx}; use mir::{MirEvalError, VTableMap}; @@ -84,8 +84,8 @@ pub use infer::{ }; pub use interner::Interner; pub use lower::{ - associated_type_shorthand_candidates, CallableDefId, ImplTraitLoweringMode, ParamLoweringMode, - TyDefId, TyLoweringContext, ValueTyDefId, + associated_type_shorthand_candidates, ImplTraitLoweringMode, ParamLoweringMode, TyDefId, + TyLoweringContext, ValueTyDefId, }; pub use mapping::{ from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id, from_placeholder_idx, diff --git a/crates/hir-ty/src/lower.rs b/crates/hir-ty/src/lower.rs index 96f545415e..fd215adde2 100644 --- a/crates/hir-ty/src/lower.rs +++ b/crates/hir-ty/src/lower.rs @@ -11,10 +11,7 @@ use std::{ ops::{self, Not as _}, }; -use base_db::{ - salsa::{Cycle, InternValueTrivial}, - CrateId, -}; +use base_db::{salsa::Cycle, CrateId}; use chalk_ir::{ cast::Cast, fold::{Shift, TypeFoldable}, @@ -38,10 +35,10 @@ use hir_def::{ type_ref::{ ConstRef, LifetimeRef, TraitBoundModifier, TraitRef as HirTraitRef, TypeBound, TypeRef, }, - AdtId, AssocItemId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId, FunctionId, - GenericDefId, GenericParamId, HasModule, ImplId, InTypeConstLoc, ItemContainerId, LocalFieldId, - Lookup, ModuleDefId, StaticId, StructId, TraitId, TypeAliasId, TypeOrConstParamId, TypeOwnerId, - UnionId, VariantId, + AdtId, AssocItemId, CallableDefId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId, + FunctionId, GenericDefId, GenericParamId, HasModule, ImplId, InTypeConstLoc, ItemContainerId, + LocalFieldId, Lookup, StaticId, StructId, TraitId, TypeAliasId, TypeOrConstParamId, + TypeOwnerId, UnionId, VariantId, }; use hir_expand::{name::Name, ExpandResult}; use intern::Interned; @@ -1535,7 +1532,7 @@ pub(crate) fn generic_predicates_for_param_query( def: GenericDefId, param_id: TypeOrConstParamId, assoc_name: Option, -) -> Arc<[Binders]> { +) -> GenericPredicates { let resolver = def.resolver(db.upcast()); let ctx = if let GenericDefId::FunctionId(_) = def { TyLoweringContext::new(db, &resolver, def.into()) @@ -1611,7 +1608,7 @@ pub(crate) fn generic_predicates_for_param_query( ); }; } - predicates.into() + GenericPredicates(predicates.is_empty().not().then(|| predicates.into())) } pub(crate) fn generic_predicates_for_param_recover( @@ -1620,15 +1617,15 @@ pub(crate) fn generic_predicates_for_param_recover( _def: &GenericDefId, _param_id: &TypeOrConstParamId, _assoc_name: &Option, -) -> Arc<[Binders]> { - Arc::from_iter(None) +) -> GenericPredicates { + GenericPredicates(None) } pub(crate) fn trait_environment_for_body_query( db: &dyn HirDatabase, def: DefWithBodyId, ) -> Arc { - let Some(def) = def.as_generic_def_id() else { + let Some(def) = def.as_generic_def_id(db.upcast()) else { let krate = def.module(db.upcast()).krate(); return TraitEnvironment::empty(krate); }; @@ -1995,47 +1992,6 @@ fn type_for_type_alias(db: &dyn HirDatabase, t: TypeAliasId) -> Binders { } } -#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] -pub enum CallableDefId { - FunctionId(FunctionId), - StructId(StructId), - EnumVariantId(EnumVariantId), -} - -impl InternValueTrivial for CallableDefId {} - -impl_from!(FunctionId, StructId, EnumVariantId for CallableDefId); -impl From for ModuleDefId { - fn from(def: CallableDefId) -> ModuleDefId { - match def { - CallableDefId::FunctionId(f) => ModuleDefId::FunctionId(f), - CallableDefId::StructId(s) => ModuleDefId::AdtId(AdtId::StructId(s)), - CallableDefId::EnumVariantId(e) => ModuleDefId::EnumVariantId(e), - } - } -} - -impl CallableDefId { - pub fn krate(self, db: &dyn HirDatabase) -> CrateId { - let db = db.upcast(); - match self { - CallableDefId::FunctionId(f) => f.krate(db), - CallableDefId::StructId(s) => s.krate(db), - CallableDefId::EnumVariantId(e) => e.krate(db), - } - } -} - -impl From for GenericDefId { - fn from(def: CallableDefId) -> GenericDefId { - match def { - CallableDefId::FunctionId(f) => f.into(), - CallableDefId::StructId(s) => s.into(), - CallableDefId::EnumVariantId(e) => e.into(), - } - } -} - #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum TyDefId { BuiltinType(BuiltinType), @@ -2056,12 +2012,12 @@ pub enum ValueTyDefId { impl_from!(FunctionId, StructId, UnionId, EnumVariantId, ConstId, StaticId for ValueTyDefId); impl ValueTyDefId { - pub(crate) fn to_generic_def_id(self) -> Option { + pub(crate) fn to_generic_def_id(self, db: &dyn HirDatabase) -> Option { match self { Self::FunctionId(id) => Some(id.into()), Self::StructId(id) => Some(id.into()), Self::UnionId(id) => Some(id.into()), - Self::EnumVariantId(var) => Some(var.into()), + Self::EnumVariantId(var) => Some(var.lookup(db.upcast()).parent.into()), Self::ConstId(id) => Some(id.into()), Self::StaticId(_) => None, } diff --git a/crates/hir-ty/src/mir/lower.rs b/crates/hir-ty/src/mir/lower.rs index 09302846f1..0569d06695 100644 --- a/crates/hir-ty/src/mir/lower.rs +++ b/crates/hir-ty/src/mir/lower.rs @@ -483,7 +483,7 @@ impl<'ctx> MirLowerCtx<'ctx> { Ok(Some(current)) } ValueNs::GenericParam(p) => { - let Some(def) = self.owner.as_generic_def_id() else { + let Some(def) = self.owner.as_generic_def_id(self.db.upcast()) else { not_supported!("owner without generic def id"); }; let gen = generics(self.db.upcast(), def); @@ -1330,7 +1330,7 @@ impl<'ctx> MirLowerCtx<'ctx> { } fn placeholder_subst(&mut self) -> Substitution { - match self.owner.as_generic_def_id() { + match self.owner.as_generic_def_id(self.db.upcast()) { Some(it) => TyBuilder::placeholder_subst(self.db, it), None => Substitution::empty(Interner), } diff --git a/crates/hir-ty/src/mir/monomorphization.rs b/crates/hir-ty/src/mir/monomorphization.rs index 43afa61504..172dea02e6 100644 --- a/crates/hir-ty/src/mir/monomorphization.rs +++ b/crates/hir-ty/src/mir/monomorphization.rs @@ -302,7 +302,7 @@ pub fn monomorphized_mir_body_query( subst: Substitution, trait_env: Arc, ) -> Result, MirLowerError> { - let generics = owner.as_generic_def_id().map(|g_def| generics(db.upcast(), g_def)); + let generics = owner.as_generic_def_id(db.upcast()).map(|g_def| generics(db.upcast(), g_def)); let filler = &mut Filler { db, subst: &subst, trait_env, generics, owner }; let body = db.mir_body(owner)?; let mut body = (*body).clone(); @@ -327,7 +327,7 @@ pub fn monomorphized_mir_body_for_closure_query( trait_env: Arc, ) -> Result, MirLowerError> { let InternedClosure(owner, _) = db.lookup_intern_closure(closure.into()); - let generics = owner.as_generic_def_id().map(|g_def| generics(db.upcast(), g_def)); + let generics = owner.as_generic_def_id(db.upcast()).map(|g_def| generics(db.upcast(), g_def)); let filler = &mut Filler { db, subst: &subst, trait_env, generics, owner }; let body = db.mir_body_for_closure(closure)?; let mut body = (*body).clone(); @@ -343,7 +343,7 @@ pub fn monomorphize_mir_body_bad( trait_env: Arc, ) -> Result { let owner = body.owner; - let generics = owner.as_generic_def_id().map(|g_def| generics(db.upcast(), g_def)); + let generics = owner.as_generic_def_id(db.upcast()).map(|g_def| generics(db.upcast(), g_def)); let filler = &mut Filler { db, subst: &subst, trait_env, generics, owner }; filler.fill_body(&mut body)?; Ok(body) diff --git a/crates/hir/src/from_id.rs b/crates/hir/src/from_id.rs index 887227bf4d..2ad39817b2 100644 --- a/crates/hir/src/from_id.rs +++ b/crates/hir/src/from_id.rs @@ -182,7 +182,6 @@ impl From for GenericDefId { GenericDef::TraitAlias(it) => GenericDefId::TraitAliasId(it.id), GenericDef::TypeAlias(it) => GenericDefId::TypeAliasId(it.id), GenericDef::Impl(it) => GenericDefId::ImplId(it.id), - GenericDef::Variant(it) => GenericDefId::EnumVariantId(it.into()), GenericDef::Const(it) => GenericDefId::ConstId(it.id), } } @@ -197,7 +196,6 @@ impl From for GenericDef { GenericDefId::TraitAliasId(it) => GenericDef::TraitAlias(it.into()), GenericDefId::TypeAliasId(it) => GenericDef::TypeAlias(it.into()), GenericDefId::ImplId(it) => GenericDef::Impl(it.into()), - GenericDefId::EnumVariantId(it) => GenericDef::Variant(it.into()), GenericDefId::ConstId(it) => GenericDef::Const(it.into()), } } diff --git a/crates/hir/src/has_source.rs b/crates/hir/src/has_source.rs index 929c8b3c09..18e27130f3 100644 --- a/crates/hir/src/has_source.rs +++ b/crates/hir/src/has_source.rs @@ -5,10 +5,10 @@ use either::Either; use hir_def::{ nameres::{ModuleOrigin, ModuleSource}, src::{HasChildSource, HasSource as _}, - Lookup, MacroId, VariantId, + CallableDefId, Lookup, MacroId, VariantId, }; use hir_expand::{HirFileId, InFile}; -use hir_ty::{db::InternedClosure, CallableDefId}; +use hir_ty::db::InternedClosure; use syntax::ast; use tt::TextRange; diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index c1fe8a8b31..c742cefe89 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -52,11 +52,11 @@ use hir_def::{ path::ImportAlias, per_ns::PerNs, resolver::{HasResolver, Resolver}, - AssocItemId, AssocItemLoc, AttrDefId, ConstId, ConstParamId, CrateRootModuleId, DefWithBodyId, - EnumId, EnumVariantId, ExternCrateId, FunctionId, GenericDefId, GenericParamId, HasModule, - ImplId, InTypeConstId, ItemContainerId, LifetimeParamId, LocalFieldId, Lookup, MacroExpander, - ModuleId, StaticId, StructId, TraitAliasId, TraitId, TupleId, TypeAliasId, TypeOrConstParamId, - TypeParamId, UnionId, + AssocItemId, AssocItemLoc, AttrDefId, CallableDefId, ConstId, ConstParamId, CrateRootModuleId, + DefWithBodyId, EnumId, EnumVariantId, ExternCrateId, FunctionId, GenericDefId, GenericParamId, + HasModule, ImplId, InTypeConstId, ItemContainerId, LifetimeParamId, LocalFieldId, Lookup, + MacroExpander, ModuleId, StaticId, StructId, TraitAliasId, TraitId, TupleId, TypeAliasId, + TypeOrConstParamId, TypeParamId, UnionId, }; use hir_expand::{ attrs::collect_attrs, name::name, proc_macro::ProcMacroKind, AstId, MacroCallKind, ValueResult, @@ -71,7 +71,7 @@ use hir_ty::{ mir::{interpret_mir, MutBorrowKind}, primitive::UintTy, traits::FnTrait, - AliasTy, CallableDefId, CallableSig, Canonical, CanonicalVarKinds, Cast, ClosureId, GenericArg, + AliasTy, CallableSig, Canonical, CanonicalVarKinds, Cast, ClosureId, GenericArg, GenericArgData, Interner, ParamKind, QuantifiedWhereClause, Scalar, Substitution, TraitEnvironment, TraitRefExt, Ty, TyBuilder, TyDefId, TyExt, TyKind, ValueTyDefId, WhereClause, @@ -1144,7 +1144,7 @@ impl Field { let generic_def_id: GenericDefId = match self.parent { VariantDef::Struct(it) => it.id.into(), VariantDef::Union(it) => it.id.into(), - VariantDef::Variant(it) => it.id.into(), + VariantDef::Variant(it) => it.id.lookup(db.upcast()).parent.into(), }; let substs = TyBuilder::placeholder_subst(db, generic_def_id); let ty = db.field_types(var_id)[self.id].clone().substitute(Interner, &substs); @@ -1177,7 +1177,9 @@ impl Field { db.layout_of_ty( self.ty(db).ty, db.trait_environment(match hir_def::VariantId::from(self.parent) { - hir_def::VariantId::EnumVariantId(id) => GenericDefId::EnumVariantId(id), + hir_def::VariantId::EnumVariantId(id) => { + GenericDefId::AdtId(id.lookup(db.upcast()).parent.into()) + } hir_def::VariantId::StructId(id) => GenericDefId::AdtId(id.into()), hir_def::VariantId::UnionId(id) => GenericDefId::AdtId(id.into()), }), @@ -2501,7 +2503,7 @@ impl Trait { db: &dyn HirDatabase, count_required_only: bool, ) -> usize { - db.generic_params(GenericDefId::from(self.id)) + db.generic_params(self.id.into()) .type_or_consts .iter() .filter(|(_, ty)| !matches!(ty, TypeOrConstParamData::TypeParamData(ty) if ty.provenance != TypeParamProvenance::TypeParamList)) @@ -3107,9 +3109,6 @@ pub enum GenericDef { TraitAlias(TraitAlias), TypeAlias(TypeAlias), Impl(Impl), - // enum variants cannot have generics themselves, but their parent enums - // can, and this makes some code easier to write - Variant(Variant), // consts can have type parameters from their parents (i.e. associated consts of traits) Const(Const), } @@ -3120,7 +3119,6 @@ impl_from!( TraitAlias, TypeAlias, Impl, - Variant, Const for GenericDef ); @@ -4052,7 +4050,9 @@ impl Type { ValueTyDefId::FunctionId(it) => GenericDefId::FunctionId(it), ValueTyDefId::StructId(it) => GenericDefId::AdtId(AdtId::StructId(it)), ValueTyDefId::UnionId(it) => GenericDefId::AdtId(AdtId::UnionId(it)), - ValueTyDefId::EnumVariantId(it) => GenericDefId::EnumVariantId(it), + ValueTyDefId::EnumVariantId(it) => { + GenericDefId::AdtId(AdtId::EnumId(it.lookup(db.upcast()).parent)) + } ValueTyDefId::StaticId(_) => return Type::new(db, def, ty.skip_binders().clone()), }, ); diff --git a/crates/hir/src/term_search/expr.rs b/crates/hir/src/term_search/expr.rs index bb687f5e73..a6faa97196 100644 --- a/crates/hir/src/term_search/expr.rs +++ b/crates/hir/src/term_search/expr.rs @@ -209,7 +209,7 @@ impl Expr { } } Expr::Variant { variant, generics, params } => { - let generics = non_default_generics(db, (*variant).into(), generics); + let generics = non_default_generics(db, variant.parent_enum(db).into(), generics); let generics_str = match generics.is_empty() { true => String::new(), false => { diff --git a/crates/ide-db/src/active_parameter.rs b/crates/ide-db/src/active_parameter.rs index 088d2ec5e3..42a80d63b1 100644 --- a/crates/ide-db/src/active_parameter.rs +++ b/crates/ide-db/src/active_parameter.rs @@ -79,8 +79,9 @@ pub fn generic_def_for_node( sema: &Semantics<'_, RootDatabase>, generic_arg_list: &ast::GenericArgList, token: &SyntaxToken, -) -> Option<(hir::GenericDef, usize, bool)> { +) -> Option<(hir::GenericDef, usize, bool, Option)> { let parent = generic_arg_list.syntax().parent()?; + let mut variant = None; let def = match_ast! { match parent { ast::PathSegment(ps) => { @@ -91,7 +92,10 @@ pub fn generic_def_for_node( hir::PathResolution::Def(hir::ModuleDef::Trait(it)) => it.into(), hir::PathResolution::Def(hir::ModuleDef::TraitAlias(it)) => it.into(), hir::PathResolution::Def(hir::ModuleDef::TypeAlias(it)) => it.into(), - hir::PathResolution::Def(hir::ModuleDef::Variant(it)) => it.into(), + hir::PathResolution::Def(hir::ModuleDef::Variant(it)) => { + variant = Some(it); + it.parent_enum(sema.db).into() + }, hir::PathResolution::Def(hir::ModuleDef::BuiltinType(_)) | hir::PathResolution::Def(hir::ModuleDef::Const(_)) | hir::PathResolution::Def(hir::ModuleDef::Macro(_)) @@ -134,5 +138,5 @@ pub fn generic_def_for_node( .next() .map_or(false, |arg| !matches!(arg, ast::GenericArg::LifetimeArg(_))); - Some((def, active_param, first_arg_is_non_lifetime)) + Some((def, active_param, first_arg_is_non_lifetime, variant)) } diff --git a/crates/ide-db/src/search.rs b/crates/ide-db/src/search.rs index b62f34f415..e1cfe04898 100644 --- a/crates/ide-db/src/search.rs +++ b/crates/ide-db/src/search.rs @@ -320,7 +320,6 @@ impl Definition { hir::GenericDef::TraitAlias(it) => it.source(db).map(|src| src.syntax().cloned()), hir::GenericDef::TypeAlias(it) => it.source(db).map(|src| src.syntax().cloned()), hir::GenericDef::Impl(it) => it.source(db).map(|src| src.syntax().cloned()), - hir::GenericDef::Variant(it) => it.source(db).map(|src| src.syntax().cloned()), hir::GenericDef::Const(it) => it.source(db).map(|src| src.syntax().cloned()), }; return match def { diff --git a/crates/ide/src/signature_help.rs b/crates/ide/src/signature_help.rs index 89c725a6c4..c5eaacdb10 100644 --- a/crates/ide/src/signature_help.rs +++ b/crates/ide/src/signature_help.rs @@ -272,7 +272,7 @@ fn signature_help_for_generics( arg_list: ast::GenericArgList, token: SyntaxToken, ) -> Option { - let (mut generics_def, mut active_parameter, first_arg_is_non_lifetime) = + let (generics_def, mut active_parameter, first_arg_is_non_lifetime, variant) = generic_def_for_node(sema, &arg_list, &token)?; let mut res = SignatureHelp { doc: None, @@ -290,6 +290,12 @@ fn signature_help_for_generics( hir::GenericDef::Adt(hir::Adt::Enum(it)) => { res.doc = it.docs(db); format_to!(res.signature, "enum {}", it.name(db).display(db)); + if let Some(variant) = variant { + // In paths, generics of an enum can be specified *after* one of its variants. + // eg. `None::` + // We'll use the signature of the enum, but include the docs of the variant. + res.doc = variant.docs(db); + } } hir::GenericDef::Adt(hir::Adt::Struct(it)) => { res.doc = it.docs(db); @@ -311,15 +317,6 @@ fn signature_help_for_generics( res.doc = it.docs(db); format_to!(res.signature, "type {}", it.name(db).display(db)); } - hir::GenericDef::Variant(it) => { - // In paths, generics of an enum can be specified *after* one of its variants. - // eg. `None::` - // We'll use the signature of the enum, but include the docs of the variant. - res.doc = it.docs(db); - let enum_ = it.parent_enum(db); - format_to!(res.signature, "enum {}", enum_.name(db).display(db)); - generics_def = enum_.into(); - } // These don't have generic args that can be specified hir::GenericDef::Impl(_) | hir::GenericDef::Const(_) => return None, } From 5548aecdca79763438bfd0d685c5228d9888e645 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sun, 23 Jun 2024 16:13:01 +0200 Subject: [PATCH 12/94] Save a bit on empty item trees by deduplicating them --- crates/hir-def/src/db.rs | 3 +++ crates/hir-def/src/item_tree.rs | 39 ++++++++++++++++++++++++--- crates/hir-def/src/lib.rs | 18 ++++++------- crates/hir-ty/src/chalk_db.rs | 5 ++-- crates/hir-ty/src/chalk_ext.rs | 2 +- crates/hir-ty/src/db.rs | 48 ++++++++++++++++----------------- crates/hir-ty/src/display.rs | 2 +- crates/hir-ty/src/infer/expr.rs | 2 +- 8 files changed, 76 insertions(+), 43 deletions(-) diff --git a/crates/hir-def/src/db.rs b/crates/hir-def/src/db.rs index 61fed71218..eb45ad343e 100644 --- a/crates/hir-def/src/db.rs +++ b/crates/hir-def/src/db.rs @@ -80,9 +80,11 @@ pub trait InternDatabase: SourceDatabase { #[salsa::query_group(DefDatabaseStorage)] pub trait DefDatabase: InternDatabase + ExpandDatabase + Upcast { + /// Whether to expand procedural macros during name resolution. #[salsa::input] fn expand_proc_attr_macros(&self) -> bool; + /// Computes an [`ItemTree`] for the given file or macro expansion. #[salsa::invoke(ItemTree::file_item_tree_query)] fn file_item_tree(&self, file_id: HirFileId) -> Arc; @@ -96,6 +98,7 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + Upcast Arc; + /// Turns a MacroId into a MacroDefId, describing the macro's definition post name resolution. fn macro_def(&self, m: MacroId) -> MacroDefId; // region:data diff --git a/crates/hir-def/src/item_tree.rs b/crates/hir-def/src/item_tree.rs index c3b7a78301..be74d15814 100644 --- a/crates/hir-def/src/item_tree.rs +++ b/crates/hir-def/src/item_tree.rs @@ -48,6 +48,7 @@ use either::Either; use hir_expand::{attrs::RawAttrs, name::Name, ExpandTo, HirFileId, InFile}; use intern::Interned; use la_arena::{Arena, Idx, IdxRange, RawIdx}; +use once_cell::sync::OnceCell; use rustc_hash::FxHashMap; use smallvec::SmallVec; use span::{AstIdNode, FileAstId, SyntaxContextId}; @@ -100,6 +101,7 @@ pub struct ItemTree { impl ItemTree { pub(crate) fn file_item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) -> Arc { let _p = tracing::info_span!("file_item_tree_query", ?file_id).entered(); + static EMPTY: OnceCell> = OnceCell::new(); let syntax = db.parse_or_expand(file_id); @@ -131,18 +133,47 @@ impl ItemTree { if let Some(attrs) = top_attrs { item_tree.attrs.insert(AttrOwner::TopLevel, attrs); } - item_tree.shrink_to_fit(); - Arc::new(item_tree) + if item_tree.data.is_none() && item_tree.top_level.is_empty() && item_tree.attrs.is_empty() + { + EMPTY + .get_or_init(|| { + Arc::new(ItemTree { + top_level: SmallVec::new_const(), + attrs: FxHashMap::default(), + data: None, + }) + }) + .clone() + } else { + item_tree.shrink_to_fit(); + Arc::new(item_tree) + } } pub(crate) fn block_item_tree_query(db: &dyn DefDatabase, block: BlockId) -> Arc { + let _p = tracing::info_span!("block_item_tree_query", ?block).entered(); + static EMPTY: OnceCell> = OnceCell::new(); + let loc = block.lookup(db); let block = loc.ast_id.to_node(db.upcast()); let ctx = lower::Ctx::new(db, loc.ast_id.file_id); let mut item_tree = ctx.lower_block(&block); - item_tree.shrink_to_fit(); - Arc::new(item_tree) + if item_tree.data.is_none() && item_tree.top_level.is_empty() && item_tree.attrs.is_empty() + { + EMPTY + .get_or_init(|| { + Arc::new(ItemTree { + top_level: SmallVec::new_const(), + attrs: FxHashMap::default(), + data: None, + }) + }) + .clone() + } else { + item_tree.shrink_to_fit(); + Arc::new(item_tree) + } } /// Returns an iterator over all items located at the top level of the `HirFileId` this diff --git a/crates/hir-def/src/lib.rs b/crates/hir-def/src/lib.rs index b628137ef5..3fa98efbb5 100644 --- a/crates/hir-def/src/lib.rs +++ b/crates/hir-def/src/lib.rs @@ -977,6 +977,14 @@ impl GenericDefId { _ => None, } } + + pub fn from_callable(db: &dyn DefDatabase, def: CallableDefId) -> GenericDefId { + match def { + CallableDefId::FunctionId(f) => f.into(), + CallableDefId::StructId(s) => s.into(), + CallableDefId::EnumVariantId(e) => e.lookup(db).parent.into(), + } + } } impl From for GenericDefId { @@ -1019,16 +1027,6 @@ impl CallableDefId { } } -impl GenericDefId { - pub fn from(db: &dyn DefDatabase, def: CallableDefId) -> GenericDefId { - match def { - CallableDefId::FunctionId(f) => f.into(), - CallableDefId::StructId(s) => s.into(), - CallableDefId::EnumVariantId(e) => e.lookup(db).parent.into(), - } - } -} - #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] pub enum AttrDefId { ModuleId(ModuleId), diff --git a/crates/hir-ty/src/chalk_db.rs b/crates/hir-ty/src/chalk_db.rs index 43c34a7eda..3ac8cbaaf8 100644 --- a/crates/hir-ty/src/chalk_db.rs +++ b/crates/hir-ty/src/chalk_db.rs @@ -915,7 +915,7 @@ fn type_alias_associated_ty_value( pub(crate) fn fn_def_datum_query(db: &dyn HirDatabase, fn_def_id: FnDefId) -> Arc { let callable_def: CallableDefId = from_chalk(db, fn_def_id); - let generic_def = GenericDefId::from(db.upcast(), callable_def); + let generic_def = GenericDefId::from_callable(db.upcast(), callable_def); let generic_params = generics(db.upcast(), generic_def); let (sig, binders) = db.callable_item_signature(callable_def).into_value_and_skipped_binders(); let bound_vars = generic_params.bound_vars_subst(db, DebruijnIndex::INNERMOST); @@ -946,7 +946,8 @@ pub(crate) fn fn_def_datum_query(db: &dyn HirDatabase, fn_def_id: FnDefId) -> Ar pub(crate) fn fn_def_variance_query(db: &dyn HirDatabase, fn_def_id: FnDefId) -> Variances { let callable_def: CallableDefId = from_chalk(db, fn_def_id); - let generic_params = generics(db.upcast(), GenericDefId::from(db.upcast(), callable_def)); + let generic_params = + generics(db.upcast(), GenericDefId::from_callable(db.upcast(), callable_def)); Variances::from_iter( Interner, std::iter::repeat(chalk_ir::Variance::Invariant).take(generic_params.len()), diff --git a/crates/hir-ty/src/chalk_ext.rs b/crates/hir-ty/src/chalk_ext.rs index 117fbd7bab..320e5bd8a2 100644 --- a/crates/hir-ty/src/chalk_ext.rs +++ b/crates/hir-ty/src/chalk_ext.rs @@ -188,7 +188,7 @@ impl TyExt for Ty { fn as_generic_def(&self, db: &dyn HirDatabase) -> Option { match *self.kind(Interner) { TyKind::Adt(AdtId(adt), ..) => Some(adt.into()), - TyKind::FnDef(callable, ..) => Some(GenericDefId::from( + TyKind::FnDef(callable, ..) => Some(GenericDefId::from_callable( db.upcast(), db.lookup_intern_callable_def(callable.into()), )), diff --git a/crates/hir-ty/src/db.rs b/crates/hir-ty/src/db.rs index 164c244483..734aad4945 100644 --- a/crates/hir-ty/src/db.rs +++ b/crates/hir-ty/src/db.rs @@ -80,8 +80,32 @@ pub trait HirDatabase: DefDatabase + Upcast { #[salsa::cycle(crate::consteval::const_eval_discriminant_recover)] fn const_eval_discriminant(&self, def: EnumVariantId) -> Result; + #[salsa::invoke(crate::method_resolution::lookup_impl_method_query)] + fn lookup_impl_method( + &self, + env: Arc, + func: FunctionId, + fn_subst: Substitution, + ) -> (FunctionId, Substitution); + // endregion:mir + #[salsa::invoke(crate::layout::layout_of_adt_query)] + #[salsa::cycle(crate::layout::layout_of_adt_recover)] + fn layout_of_adt( + &self, + def: AdtId, + subst: Substitution, + env: Arc, + ) -> Result, LayoutError>; + + #[salsa::invoke(crate::layout::layout_of_ty_query)] + #[salsa::cycle(crate::layout::layout_of_ty_recover)] + fn layout_of_ty(&self, ty: Ty, env: Arc) -> Result, LayoutError>; + + #[salsa::invoke(crate::layout::target_data_layout_query)] + fn target_data_layout(&self, krate: CrateId) -> Result, Arc>; + #[salsa::invoke(crate::lower::ty_query)] #[salsa::cycle(crate::lower::ty_recover)] fn ty(&self, def: TyDefId) -> Binders; @@ -104,30 +128,6 @@ pub trait HirDatabase: DefDatabase + Upcast { #[salsa::invoke(crate::lower::field_types_query)] fn field_types(&self, var: VariantId) -> Arc>>; - #[salsa::invoke(crate::layout::layout_of_adt_query)] - #[salsa::cycle(crate::layout::layout_of_adt_recover)] - fn layout_of_adt( - &self, - def: AdtId, - subst: Substitution, - env: Arc, - ) -> Result, LayoutError>; - - #[salsa::invoke(crate::layout::layout_of_ty_query)] - #[salsa::cycle(crate::layout::layout_of_ty_recover)] - fn layout_of_ty(&self, ty: Ty, env: Arc) -> Result, LayoutError>; - - #[salsa::invoke(crate::layout::target_data_layout_query)] - fn target_data_layout(&self, krate: CrateId) -> Result, Arc>; - - #[salsa::invoke(crate::method_resolution::lookup_impl_method_query)] - fn lookup_impl_method( - &self, - env: Arc, - func: FunctionId, - fn_subst: Substitution, - ) -> (FunctionId, Substitution); - #[salsa::invoke(crate::lower::callable_item_sig)] fn callable_item_signature(&self, def: CallableDefId) -> PolyFnSig; diff --git a/crates/hir-ty/src/display.rs b/crates/hir-ty/src/display.rs index 70b670f5e8..458970aa70 100644 --- a/crates/hir-ty/src/display.rs +++ b/crates/hir-ty/src/display.rs @@ -988,7 +988,7 @@ impl HirDisplay for Ty { f.end_location_link(); if parameters.len(Interner) > 0 { - let generic_def_id = GenericDefId::from(db.upcast(), def); + let generic_def_id = GenericDefId::from_callable(db.upcast(), def); let generics = generics(db.upcast(), generic_def_id); let (parent_len, self_param, type_, const_, impl_, lifetime) = generics.provenance_split(); diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs index e053b0c1a4..3647243537 100644 --- a/crates/hir-ty/src/infer/expr.rs +++ b/crates/hir-ty/src/infer/expr.rs @@ -1896,7 +1896,7 @@ impl InferenceContext<'_> { if let TyKind::FnDef(fn_def, parameters) = callable_ty.kind(Interner) { let def: CallableDefId = from_chalk(self.db, *fn_def); let generic_predicates = - self.db.generic_predicates(GenericDefId::from(self.db.upcast(), def)); + self.db.generic_predicates(GenericDefId::from_callable(self.db.upcast(), def)); for predicate in generic_predicates.iter() { let (predicate, binders) = predicate .clone() From e052b3e9a623b694e1c424559311498e3b90f975 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sun, 23 Jun 2024 16:32:16 +0200 Subject: [PATCH 13/94] Intern ModPath within RawVisibility --- crates/hir-def/src/data.rs | 2 +- crates/hir-def/src/db.rs | 1 + crates/hir-def/src/item_tree.rs | 36 +++++++++++++++++++------------- crates/hir-def/src/visibility.rs | 12 +++++++---- 4 files changed, 31 insertions(+), 20 deletions(-) diff --git a/crates/hir-def/src/data.rs b/crates/hir-def/src/data.rs index 4338163672..8cf1e15f31 100644 --- a/crates/hir-def/src/data.rs +++ b/crates/hir-def/src/data.rs @@ -323,7 +323,7 @@ impl TraitAliasData { pub struct ImplData { pub target_trait: Option>, pub self_ty: Interned, - pub items: Vec, + pub items: Box<[AssocItemId]>, pub is_negative: bool, pub is_unsafe: bool, // box it as the vec is usually empty anyways diff --git a/crates/hir-def/src/db.rs b/crates/hir-def/src/db.rs index eb45ad343e..0eb9e7d30b 100644 --- a/crates/hir-def/src/db.rs +++ b/crates/hir-def/src/db.rs @@ -193,6 +193,7 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + Upcast Arc>; + // should this really be a query? #[salsa::invoke(crate::attr::fields_attrs_source_map)] fn fields_attrs_source_map( &self, diff --git a/crates/hir-def/src/item_tree.rs b/crates/hir-def/src/item_tree.rs index be74d15814..7650dfe9f3 100644 --- a/crates/hir-def/src/item_tree.rs +++ b/crates/hir-def/src/item_tree.rs @@ -616,24 +616,30 @@ impl Index for ItemTree { type Output = RawVisibility; fn index(&self, index: RawVisibilityId) -> &Self::Output { static VIS_PUB: RawVisibility = RawVisibility::Public; - static VIS_PRIV_IMPLICIT: RawVisibility = RawVisibility::Module( - ModPath::from_kind(PathKind::SELF), - VisibilityExplicitness::Implicit, - ); - static VIS_PRIV_EXPLICIT: RawVisibility = RawVisibility::Module( - ModPath::from_kind(PathKind::SELF), - VisibilityExplicitness::Explicit, - ); - static VIS_PUB_CRATE: RawVisibility = RawVisibility::Module( - ModPath::from_kind(PathKind::Crate), - VisibilityExplicitness::Explicit, - ); + static VIS_PRIV_IMPLICIT: OnceCell = OnceCell::new(); + static VIS_PRIV_EXPLICIT: OnceCell = OnceCell::new(); + static VIS_PUB_CRATE: OnceCell = OnceCell::new(); match index { - RawVisibilityId::PRIV_IMPLICIT => &VIS_PRIV_IMPLICIT, - RawVisibilityId::PRIV_EXPLICIT => &VIS_PRIV_EXPLICIT, + RawVisibilityId::PRIV_IMPLICIT => VIS_PRIV_IMPLICIT.get_or_init(|| { + RawVisibility::Module( + Interned::new(ModPath::from_kind(PathKind::SELF)), + VisibilityExplicitness::Implicit, + ) + }), + RawVisibilityId::PRIV_EXPLICIT => VIS_PRIV_EXPLICIT.get_or_init(|| { + RawVisibility::Module( + Interned::new(ModPath::from_kind(PathKind::SELF)), + VisibilityExplicitness::Explicit, + ) + }), RawVisibilityId::PUB => &VIS_PUB, - RawVisibilityId::PUB_CRATE => &VIS_PUB_CRATE, + RawVisibilityId::PUB_CRATE => VIS_PUB_CRATE.get_or_init(|| { + RawVisibility::Module( + Interned::new(ModPath::from_kind(PathKind::Crate)), + VisibilityExplicitness::Explicit, + ) + }), _ => &self.data().vis.arena[Idx::from_raw(index.0.into())], } } diff --git a/crates/hir-def/src/visibility.rs b/crates/hir-def/src/visibility.rs index e08718fc83..11d91513f1 100644 --- a/crates/hir-def/src/visibility.rs +++ b/crates/hir-def/src/visibility.rs @@ -2,6 +2,7 @@ use std::iter; +use intern::Interned; use la_arena::ArenaMap; use span::SyntaxContextId; use syntax::ast; @@ -20,14 +21,17 @@ use crate::{ pub enum RawVisibility { /// `pub(in module)`, `pub(crate)` or `pub(super)`. Also private, which is /// equivalent to `pub(self)`. - Module(ModPath, VisibilityExplicitness), + Module(Interned, VisibilityExplicitness), /// `pub`. Public, } impl RawVisibility { - pub(crate) const fn private() -> RawVisibility { - RawVisibility::Module(ModPath::from_kind(PathKind::SELF), VisibilityExplicitness::Implicit) + pub(crate) fn private() -> RawVisibility { + RawVisibility::Module( + Interned::new(ModPath::from_kind(PathKind::SELF)), + VisibilityExplicitness::Implicit, + ) } pub(crate) fn from_ast( @@ -60,7 +64,7 @@ impl RawVisibility { ast::VisibilityKind::PubSelf => ModPath::from_kind(PathKind::SELF), ast::VisibilityKind::Pub => return RawVisibility::Public, }; - RawVisibility::Module(path, VisibilityExplicitness::Explicit) + RawVisibility::Module(Interned::new(path), VisibilityExplicitness::Explicit) } pub fn resolve( From 8df034d45339a2f66c0cfa20dd0c84b61f285a1a Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sun, 23 Jun 2024 17:27:31 +0200 Subject: [PATCH 14/94] Shrink mbe's Op --- Cargo.lock | 1 + crates/hir-def/src/nameres.rs | 4 +++- crates/hir-expand/src/change.rs | 3 ++- crates/hir-ty/src/method_resolution.rs | 2 +- crates/hir/src/lib.rs | 2 +- crates/hir/src/symbols.rs | 2 +- crates/mbe/Cargo.toml | 1 + crates/mbe/src/benchmark.rs | 4 ++-- crates/mbe/src/expander/matcher.rs | 4 ++-- crates/mbe/src/expander/transcriber.rs | 6 ++--- crates/mbe/src/parser.rs | 32 ++++++++++++++++++-------- crates/mbe/src/tt_iter.rs | 23 ++++++++++++------ 12 files changed, 55 insertions(+), 29 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 57d43dad3f..ce71a89897 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1046,6 +1046,7 @@ checksum = "75761162ae2b0e580d7e7c390558127e5f01b4194debd6221fd8c207fc80e3f5" name = "mbe" version = "0.0.0" dependencies = [ + "arrayvec", "cov-mark", "parser", "rustc-hash", diff --git a/crates/hir-def/src/nameres.rs b/crates/hir-def/src/nameres.rs index 162b6429c3..8e7ef48112 100644 --- a/crates/hir-def/src/nameres.rs +++ b/crates/hir-def/src/nameres.rs @@ -103,12 +103,13 @@ const PREDEFINED_TOOLS: &[SmolStr] = &[ /// is computed by the `block_def_map` query. #[derive(Debug, PartialEq, Eq)] pub struct DefMap { + /// The crate this `DefMap` belongs to. + krate: CrateId, /// When this is a block def map, this will hold the block id of the block and module that /// contains this block. block: Option, /// The modules and their data declared in this crate. pub modules: Arena, - krate: CrateId, /// The prelude module for this crate. This either comes from an import /// marked with the `prelude_import` attribute, or (in the normal case) from /// a dependency (`std` or `core`). @@ -124,6 +125,7 @@ pub struct DefMap { /// Tracks which custom derives are in scope for an item, to allow resolution of derive helper /// attributes. + // FIXME: Figure out a better way for the IDE layer to resolve these? derive_helpers_in_scope: FxHashMap, Vec<(Name, MacroId, MacroCallId)>>, /// The diagnostics that need to be emitted for this crate. diff --git a/crates/hir-expand/src/change.rs b/crates/hir-expand/src/change.rs index 1a3dd0e7dd..08491db372 100644 --- a/crates/hir-expand/src/change.rs +++ b/crates/hir-expand/src/change.rs @@ -25,7 +25,8 @@ impl ChangeWithProcMacros { pub fn apply(self, db: &mut (impl ExpandDatabase + SourceDatabaseExt)) { self.source_change.apply(db); - if let Some(proc_macros) = self.proc_macros { + if let Some(mut proc_macros) = self.proc_macros { + proc_macros.shrink_to_fit(); db.set_proc_macros_with_durability(Arc::new(proc_macros), Durability::HIGH); } if let Some(target_data_layouts) = self.target_data_layouts { diff --git a/crates/hir-ty/src/method_resolution.rs b/crates/hir-ty/src/method_resolution.rs index 5ce124d6d2..ac11da789a 100644 --- a/crates/hir-ty/src/method_resolution.rs +++ b/crates/hir-ty/src/method_resolution.rs @@ -1322,7 +1322,7 @@ fn iterate_inherent_methods( callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId, bool) -> ControlFlow<()>, ) -> ControlFlow<()> { for &impl_id in impls.for_self_ty(self_ty) { - for &item in &table.db.impl_data(impl_id).items { + for &item in table.db.impl_data(impl_id).items.iter() { let visible = match is_valid_impl_method_candidate( table, self_ty, diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index c742cefe89..eea25ac18a 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -760,7 +760,7 @@ impl Module { impl_assoc_items_scratch.clear(); } - for &item in &db.impl_data(impl_def.id).items { + for &item in db.impl_data(impl_def.id).items.iter() { AssocItem::from(item).diagnostics(db, acc, style_lints); } } diff --git a/crates/hir/src/symbols.rs b/crates/hir/src/symbols.rs index 3b88836c24..02905ca2ce 100644 --- a/crates/hir/src/symbols.rs +++ b/crates/hir/src/symbols.rs @@ -231,7 +231,7 @@ impl<'a> SymbolCollector<'a> { let impl_data = self.db.impl_data(impl_id); let impl_name = Some(SmolStr::new(impl_data.self_ty.display(self.db).to_string())); self.with_container_name(impl_name, |s| { - for &assoc_item_id in &impl_data.items { + for &assoc_item_id in impl_data.items.iter() { s.push_assoc_item(assoc_item_id) } }) diff --git a/crates/mbe/Cargo.toml b/crates/mbe/Cargo.toml index 2046fa943a..18444018e1 100644 --- a/crates/mbe/Cargo.toml +++ b/crates/mbe/Cargo.toml @@ -16,6 +16,7 @@ cov-mark = "2.0.0-pre.1" rustc-hash.workspace = true smallvec.workspace = true tracing.workspace = true +arrayvec.workspace = true # local deps syntax.workspace = true diff --git a/crates/mbe/src/benchmark.rs b/crates/mbe/src/benchmark.rs index 19ba5c7a15..27dbc84a2b 100644 --- a/crates/mbe/src/benchmark.rs +++ b/crates/mbe/src/benchmark.rs @@ -170,7 +170,7 @@ fn invocation_fixtures( Op::Literal(it) => token_trees.push(tt::Leaf::from(it.clone()).into()), Op::Ident(it) => token_trees.push(tt::Leaf::from(it.clone()).into()), Op::Punct(puncts) => { - for punct in puncts { + for punct in puncts.as_slice() { token_trees.push(tt::Leaf::from(*punct).into()); } } @@ -187,7 +187,7 @@ fn invocation_fixtures( } if i + 1 != cnt { if let Some(sep) = separator { - match sep { + match &**sep { Separator::Literal(it) => { token_trees.push(tt::Leaf::Literal(it.clone()).into()) } diff --git a/crates/mbe/src/expander/matcher.rs b/crates/mbe/src/expander/matcher.rs index 0cec4e70da..972ad188ac 100644 --- a/crates/mbe/src/expander/matcher.rs +++ b/crates/mbe/src/expander/matcher.rs @@ -59,7 +59,7 @@ //! eof: [a $( a )* a b ·] //! ``` -use std::rc::Rc; +use std::{rc::Rc, sync::Arc}; use smallvec::{smallvec, SmallVec}; use span::{Edition, Span}; @@ -315,7 +315,7 @@ struct MatchState<'t> { up: Option>>, /// The separator if we are in a repetition. - sep: Option, + sep: Option>, /// The KleeneOp of this sequence if we are in a repetition. sep_kind: Option, diff --git a/crates/mbe/src/expander/transcriber.rs b/crates/mbe/src/expander/transcriber.rs index 0f689a2692..c09cbd1d07 100644 --- a/crates/mbe/src/expander/transcriber.rs +++ b/crates/mbe/src/expander/transcriber.rs @@ -195,7 +195,7 @@ fn expand_subtree( .into(), ), Op::Punct(puncts) => { - for punct in puncts { + for punct in puncts.as_slice() { arena.push( tt::Leaf::from({ let mut it = *punct; @@ -222,7 +222,7 @@ fn expand_subtree( } Op::Repeat { tokens: subtree, kind, separator } => { let ExpandResult { value: fragment, err: e } = - expand_repeat(ctx, subtree, *kind, separator, arena, marker); + expand_repeat(ctx, subtree, *kind, separator.as_deref(), arena, marker); err = err.or(e); push_fragment(ctx, arena, fragment) } @@ -383,7 +383,7 @@ fn expand_repeat( ctx: &mut ExpandCtx<'_>, template: &MetaTemplate, kind: RepeatKind, - separator: &Option, + separator: Option<&Separator>, arena: &mut Vec>, marker: impl Fn(&mut Span) + Copy, ) -> ExpandResult { diff --git a/crates/mbe/src/parser.rs b/crates/mbe/src/parser.rs index bbe00f0afc..256068a899 100644 --- a/crates/mbe/src/parser.rs +++ b/crates/mbe/src/parser.rs @@ -1,7 +1,9 @@ //! Parser recognizes special macro syntax, `$var` and `$(repeat)*`, in token //! trees. -use smallvec::{smallvec, SmallVec}; +use std::sync::Arc; + +use arrayvec::ArrayVec; use span::{Edition, Span, SyntaxContextId}; use syntax::SmolStr; @@ -86,14 +88,14 @@ pub(crate) enum Op { Repeat { tokens: MetaTemplate, kind: RepeatKind, - separator: Option, + separator: Option>, }, Subtree { tokens: MetaTemplate, delimiter: tt::Delimiter, }, Literal(tt::Literal), - Punct(SmallVec<[tt::Punct; 3]>), + Punct(Box, 3>>), Ident(tt::Ident), } @@ -126,7 +128,7 @@ pub(crate) enum MetaVarKind { pub(crate) enum Separator { Literal(tt::Literal), Ident(tt::Ident), - Puncts(SmallVec<[tt::Punct; 3]>), + Puncts(ArrayVec, 3>), } // Note that when we compare a Separator, we just care about its textual value. @@ -165,7 +167,13 @@ fn next_op( src.next().expect("first token already peeked"); // Note that the '$' itself is a valid token inside macro_rules. let second = match src.next() { - None => return Ok(Op::Punct(smallvec![*p])), + None => { + return Ok(Op::Punct({ + let mut res = ArrayVec::new(); + res.push(*p); + Box::new(res) + })) + } Some(it) => it, }; match second { @@ -173,7 +181,7 @@ fn next_op( tt::DelimiterKind::Parenthesis => { let (separator, kind) = parse_repeat(src)?; let tokens = MetaTemplate::parse(edition, subtree, mode, new_meta_vars)?; - Op::Repeat { tokens, separator, kind } + Op::Repeat { tokens, separator: separator.map(Arc::new), kind } } tt::DelimiterKind::Brace => match mode { Mode::Template => { @@ -216,7 +224,11 @@ fn next_op( "`$$` is not allowed on the pattern side", )) } - Mode::Template => Op::Punct(smallvec![*punct]), + Mode::Template => Op::Punct({ + let mut res = ArrayVec::new(); + res.push(*punct); + Box::new(res) + }), }, tt::Leaf::Punct(_) | tt::Leaf::Literal(_) => { return Err(ParseError::expected("expected ident")) @@ -238,7 +250,7 @@ fn next_op( tt::TokenTree::Leaf(tt::Leaf::Punct(_)) => { // There's at least one punct so this shouldn't fail. let puncts = src.expect_glued_punct().unwrap(); - Op::Punct(puncts) + Op::Punct(Box::new(puncts)) } tt::TokenTree::Subtree(subtree) => { @@ -290,7 +302,7 @@ fn is_boolean_literal(lit: &tt::Literal) -> bool { } fn parse_repeat(src: &mut TtIter<'_, Span>) -> Result<(Option, RepeatKind), ParseError> { - let mut separator = Separator::Puncts(SmallVec::new()); + let mut separator = Separator::Puncts(ArrayVec::new()); for tt in src { let tt = match tt { tt::TokenTree::Leaf(leaf) => leaf, @@ -312,7 +324,7 @@ fn parse_repeat(src: &mut TtIter<'_, Span>) -> Result<(Option, Repeat '+' => RepeatKind::OneOrMore, '?' => RepeatKind::ZeroOrOne, _ => match &mut separator { - Separator::Puncts(puncts) if puncts.len() != 3 => { + Separator::Puncts(puncts) if puncts.len() < 3 => { puncts.push(*punct); continue; } diff --git a/crates/mbe/src/tt_iter.rs b/crates/mbe/src/tt_iter.rs index 9c7d7af7b1..3581fbbfd8 100644 --- a/crates/mbe/src/tt_iter.rs +++ b/crates/mbe/src/tt_iter.rs @@ -3,7 +3,7 @@ use core::fmt; -use smallvec::{smallvec, SmallVec}; +use arrayvec::ArrayVec; use syntax::SyntaxKind; use crate::{to_parser_input::to_parser_input, ExpandError, ExpandResult}; @@ -93,13 +93,15 @@ impl<'a, S: Copy> TtIter<'a, S> { /// /// This method currently may return a single quotation, which is part of lifetime ident and /// conceptually not a punct in the context of mbe. Callers should handle this. - pub(crate) fn expect_glued_punct(&mut self) -> Result; 3]>, ()> { + pub(crate) fn expect_glued_punct(&mut self) -> Result, 3>, ()> { let tt::TokenTree::Leaf(tt::Leaf::Punct(first)) = self.next().ok_or(())?.clone() else { return Err(()); }; + let mut res = ArrayVec::new(); if first.spacing == tt::Spacing::Alone { - return Ok(smallvec![first]); + res.push(first); + return Ok(res); } let (second, third) = match (self.peek_n(0), self.peek_n(1)) { @@ -108,14 +110,19 @@ impl<'a, S: Copy> TtIter<'a, S> { Some(tt::TokenTree::Leaf(tt::Leaf::Punct(p3))), ) if p2.spacing == tt::Spacing::Joint => (p2, Some(p3)), (Some(tt::TokenTree::Leaf(tt::Leaf::Punct(p2))), _) => (p2, None), - _ => return Ok(smallvec![first]), + _ => { + res.push(first); + return Ok(res); + } }; match (first.char, second.char, third.map(|it| it.char)) { ('.', '.', Some('.' | '=')) | ('<', '<', Some('=')) | ('>', '>', Some('=')) => { let _ = self.next().unwrap(); let _ = self.next().unwrap(); - Ok(smallvec![first, *second, *third.unwrap()]) + res.push(first); + res.push(*second); + res.push(*third.unwrap()); } ('-' | '!' | '*' | '/' | '&' | '%' | '^' | '+' | '<' | '=' | '>' | '|', '=', _) | ('-' | '=' | '>', '>', _) @@ -126,10 +133,12 @@ impl<'a, S: Copy> TtIter<'a, S> { | ('<', '<', _) | ('|', '|', _) => { let _ = self.next().unwrap(); - Ok(smallvec![first, *second]) + res.push(first); + res.push(*second); } - _ => Ok(smallvec![first]), + _ => res.push(first), } + Ok(res) } pub(crate) fn peek_n(&self, n: usize) -> Option<&'a tt::TokenTree> { self.inner.as_slice().get(n) From db056b4a69014461ce93325dcf432c76f374ba9a Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 24 Jun 2024 09:28:03 +0200 Subject: [PATCH 15/94] Fix term_search filtering enum variant generics incorrectly --- crates/ide-assists/src/handlers/term_search.rs | 8 ++++---- crates/ide-completion/src/render.rs | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/crates/ide-assists/src/handlers/term_search.rs b/crates/ide-assists/src/handlers/term_search.rs index 7f1274cade..b62c3b3323 100644 --- a/crates/ide-assists/src/handlers/term_search.rs +++ b/crates/ide-assists/src/handlers/term_search.rs @@ -144,7 +144,7 @@ fn f() { let a = A { x: 1, y: true }; let b: i32 = a.x; }"#, term_search, r#"//- minicore: todo, unimplemented, option fn f() { let a: i32 = 1; let b: Option = todo$0!(); }"#, - r#"fn f() { let a: i32 = 1; let b: Option = Some(a); }"#, + r#"fn f() { let a: i32 = 1; let b: Option = Some::(a); }"#, ) } @@ -156,7 +156,7 @@ fn f() { let a: i32 = 1; let b: Option = todo$0!(); }"#, enum Option { None, Some(T) } fn f() { let a: i32 = 1; let b: Option = todo$0!(); }"#, r#"enum Option { None, Some(T) } -fn f() { let a: i32 = 1; let b: Option = Option::Some(a); }"#, +fn f() { let a: i32 = 1; let b: Option = Option::Some::(a); }"#, ) } @@ -168,7 +168,7 @@ fn f() { let a: i32 = 1; let b: Option = Option::Some(a); }"#, enum Option { None, Some(T) } fn f() { let a: Option = Option::None; let b: Option> = todo$0!(); }"#, r#"enum Option { None, Some(T) } -fn f() { let a: Option = Option::None; let b: Option> = Option::Some(a); }"#, +fn f() { let a: Option = Option::None; let b: Option> = Option::Some::>(a); }"#, ) } @@ -180,7 +180,7 @@ fn f() { let a: Option = Option::None; let b: Option> = Option: enum Foo { Foo(T) } fn f() { let a = 0; let b: Foo = todo$0!(); }"#, r#"enum Foo { Foo(T) } -fn f() { let a = 0; let b: Foo = Foo::Foo(a); }"#, +fn f() { let a = 0; let b: Foo = Foo::Foo::(a); }"#, ); check_assist( diff --git a/crates/ide-completion/src/render.rs b/crates/ide-completion/src/render.rs index b98f745f17..ff60a7669e 100644 --- a/crates/ide-completion/src/render.rs +++ b/crates/ide-completion/src/render.rs @@ -2641,7 +2641,7 @@ fn foo() { expect![[r#" lc foo [type+local] ex foo [type] - ex Foo::B [type] + ex Foo::B:: [type] ev Foo::A(…) [type_could_unify] ev Foo::B [type_could_unify] en Foo [type_could_unify] From b68200ee6f48fe7875bd067335546a3621a3c762 Mon Sep 17 00:00:00 2001 From: Ali Bektas Date: Mon, 24 Jun 2024 12:31:00 +0200 Subject: [PATCH 16/94] minor : Fix duplicate snippets showing up on hover. With each `config::apply_change` duplicate configs were being added. Now we first drain the vec that holds these and then start adding. --- crates/rust-analyzer/src/config.rs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index e8504979be..96e377d365 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs @@ -844,6 +844,9 @@ impl Config { config.source_root_parent_map = source_root_map; } + // IMPORTANT : This holds as long as ` completion_snippets_custom` is declared `client`. + config.snippets = vec![]; + let snips = self.completion_snippets_custom().to_owned(); for (name, def) in snips.iter() { From 443ba254f943b51f732ce579c3288c3c6f2a249f Mon Sep 17 00:00:00 2001 From: Ali Bektas Date: Mon, 24 Jun 2024 13:13:53 +0200 Subject: [PATCH 17/94] Apply minor changes --- crates/rust-analyzer/src/config.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index 96e377d365..8e302709aa 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs @@ -845,7 +845,7 @@ impl Config { } // IMPORTANT : This holds as long as ` completion_snippets_custom` is declared `client`. - config.snippets = vec![]; + config.snippets.clear(); let snips = self.completion_snippets_custom().to_owned(); From dc39e87b79985e786b96fad4c9c91038f6fbefcf Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 24 Jun 2024 14:47:12 +0200 Subject: [PATCH 18/94] move tt-iter into tt crate --- Cargo.lock | 1 + .../src/handlers/incoherent_impl.rs | 21 +- crates/mbe/src/expander/matcher.rs | 173 +++++++------- crates/mbe/src/lib.rs | 68 +++++- crates/mbe/src/parser.rs | 3 +- crates/mbe/src/syntax_bridge.rs | 9 +- crates/mbe/src/tt_iter.rs | 217 ------------------ crates/tt/Cargo.toml | 1 + crates/tt/src/iter.rs | 161 +++++++++++++ crates/tt/src/lib.rs | 5 +- 10 files changed, 336 insertions(+), 323 deletions(-) delete mode 100644 crates/mbe/src/tt_iter.rs create mode 100644 crates/tt/src/iter.rs diff --git a/Cargo.lock b/Cargo.lock index ce71a89897..94734da9d3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2252,6 +2252,7 @@ dependencies = [ name = "tt" version = "0.0.0" dependencies = [ + "arrayvec", "smol_str", "stdx", "text-size", diff --git a/crates/ide-diagnostics/src/handlers/incoherent_impl.rs b/crates/ide-diagnostics/src/handlers/incoherent_impl.rs index 9f56e10414..d3f3020775 100644 --- a/crates/ide-diagnostics/src/handlers/incoherent_impl.rs +++ b/crates/ide-diagnostics/src/handlers/incoherent_impl.rs @@ -1,16 +1,23 @@ use hir::InFile; +use syntax::{AstNode, TextRange}; -use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext}; +use crate::{adjusted_display_range, Diagnostic, DiagnosticCode, DiagnosticsContext}; // Diagnostic: incoherent-impl // // This diagnostic is triggered if the targe type of an impl is from a foreign crate. pub(crate) fn incoherent_impl(ctx: &DiagnosticsContext<'_>, d: &hir::IncoherentImpl) -> Diagnostic { - Diagnostic::new_with_syntax_node_ptr( - ctx, + let display_range = adjusted_display_range(ctx, InFile::new(d.file_id, d.impl_), &|node| { + Some(TextRange::new( + node.syntax().text_range().start(), + node.self_ty()?.syntax().text_range().end(), + )) + }); + + Diagnostic::new( DiagnosticCode::RustcHardError("E0210"), "cannot define inherent `impl` for foreign type".to_owned(), - InFile::new(d.file_id, d.impl_.into()), + display_range, ) } @@ -23,7 +30,7 @@ mod change_case { check_diagnostics( r#" impl bool {} -//^^^^^^^^^^^^ error: cannot define inherent `impl` for foreign type +//^^^^^^^^^ error: cannot define inherent `impl` for foreign type "#, ); } @@ -60,7 +67,7 @@ impl foo::S { pub struct S; //- /main.rs crate:main deps:foo impl foo::S { #[rustc_allow_incoherent_impl] fn func(self) {} } -//^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: cannot define inherent `impl` for foreign type +//^^^^^^^^^^^ error: cannot define inherent `impl` for foreign type "#, ); check_diagnostics( @@ -70,7 +77,7 @@ pub struct S; pub struct S; //- /main.rs crate:main deps:foo impl foo::S { fn func(self) {} } -//^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: cannot define inherent `impl` for foreign type +//^^^^^^^^^^^ error: cannot define inherent `impl` for foreign type "#, ); } diff --git a/crates/mbe/src/expander/matcher.rs b/crates/mbe/src/expander/matcher.rs index 972ad188ac..b20d5579ca 100644 --- a/crates/mbe/src/expander/matcher.rs +++ b/crates/mbe/src/expander/matcher.rs @@ -64,12 +64,12 @@ use std::{rc::Rc, sync::Arc}; use smallvec::{smallvec, SmallVec}; use span::{Edition, Span}; use syntax::SmolStr; -use tt::DelimSpan; +use tt::{iter::TtIter, DelimSpan}; use crate::{ expander::{Binding, Bindings, ExpandResult, Fragment}, + expect_fragment, parser::{MetaVarKind, Op, RepeatKind, Separator}, - tt_iter::TtIter, ExpandError, MetaTemplate, ValueResult, }; @@ -406,7 +406,7 @@ fn match_loop_inner<'t>( if item.sep.is_some() && !item.sep_matched { let sep = item.sep.as_ref().unwrap(); let mut fork = src.clone(); - if fork.expect_separator(sep) { + if expect_separator(&mut fork, sep) { // HACK: here we use `meta_result` to pass `TtIter` back to caller because // it might have been advanced multiple times. `ValueResult` is // insignificant. @@ -746,7 +746,7 @@ fn match_meta_var( ) -> ExpandResult> { let fragment = match kind { MetaVarKind::Path => { - return input.expect_fragment(parser::PrefixEntryPoint::Path, edition).map(|it| { + return expect_fragment(input, parser::PrefixEntryPoint::Path, edition).map(|it| { it.map(|it| tt::TokenTree::subtree_or_wrap(it, delim_span)).map(Fragment::Path) }); } @@ -765,7 +765,7 @@ fn match_meta_var( } _ => {} }; - return input.expect_fragment(parser::PrefixEntryPoint::Expr, edition).map(|tt| { + return expect_fragment(input, parser::PrefixEntryPoint::Expr, edition).map(|tt| { tt.map(|tt| match tt { tt::TokenTree::Leaf(leaf) => tt::Subtree { delimiter: tt::Delimiter::invisible_spanned(*leaf.span()), @@ -787,14 +787,13 @@ fn match_meta_var( .expect_ident() .map(|ident| tt::Leaf::from(ident.clone()).into()) .map_err(|()| ExpandError::binding_error("expected ident")), - MetaVarKind::Tt => input - .expect_tt() - .map_err(|()| ExpandError::binding_error("expected token tree")), - MetaVarKind::Lifetime => input - .expect_lifetime() + MetaVarKind::Tt => { + expect_tt(input).map_err(|()| ExpandError::binding_error("expected token tree")) + } + MetaVarKind::Lifetime => expect_lifetime(input) .map_err(|()| ExpandError::binding_error("expected lifetime")), MetaVarKind::Literal => { - let neg = input.eat_char('-'); + let neg = eat_char(input, '-'); input .expect_literal() .map(|literal| { @@ -822,7 +821,7 @@ fn match_meta_var( MetaVarKind::Item => parser::PrefixEntryPoint::Item, MetaVarKind::Vis => parser::PrefixEntryPoint::Vis, }; - input.expect_fragment(fragment, edition).map(|it| it.map(Fragment::Tokens)) + expect_fragment(input, fragment, edition).map(|it| it.map(Fragment::Tokens)) } fn collect_vars(collector_fun: &mut impl FnMut(SmolStr), pattern: &MetaTemplate) { @@ -905,86 +904,84 @@ impl<'a> Iterator for OpDelimitedIter<'a> { } } -impl TtIter<'_, Span> { - fn expect_separator(&mut self, separator: &Separator) -> bool { - let mut fork = self.clone(); - let ok = match separator { - Separator::Ident(lhs) => match fork.expect_ident_or_underscore() { - Ok(rhs) => rhs.text == lhs.text, - Err(_) => false, +fn expect_separator(iter: &mut TtIter<'_, S>, separator: &Separator) -> bool { + let mut fork = iter.clone(); + let ok = match separator { + Separator::Ident(lhs) => match fork.expect_ident_or_underscore() { + Ok(rhs) => rhs.text == lhs.text, + Err(_) => false, + }, + Separator::Literal(lhs) => match fork.expect_literal() { + Ok(rhs) => match rhs { + tt::Leaf::Literal(rhs) => rhs.text == lhs.text, + tt::Leaf::Ident(rhs) => rhs.text == lhs.text, + tt::Leaf::Punct(_) => false, }, - Separator::Literal(lhs) => match fork.expect_literal() { - Ok(rhs) => match rhs { - tt::Leaf::Literal(rhs) => rhs.text == lhs.text, - tt::Leaf::Ident(rhs) => rhs.text == lhs.text, - tt::Leaf::Punct(_) => false, - }, - Err(_) => false, - }, - Separator::Puncts(lhs) => match fork.expect_glued_punct() { - Ok(rhs) => { - let lhs = lhs.iter().map(|it| it.char); - let rhs = rhs.iter().map(|it| it.char); - lhs.eq(rhs) - } - Err(_) => false, - }, - }; - if ok { - *self = fork; - } - ok - } - - fn expect_tt(&mut self) -> Result, ()> { - if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) = self.peek_n(0) { - if punct.char == '\'' { - self.expect_lifetime() - } else { - let puncts = self.expect_glued_punct()?; - let delimiter = tt::Delimiter { - open: puncts.first().unwrap().span, - close: puncts.last().unwrap().span, - kind: tt::DelimiterKind::Invisible, - }; - let token_trees = puncts.into_iter().map(|p| tt::Leaf::Punct(p).into()).collect(); - Ok(tt::TokenTree::Subtree(tt::Subtree { delimiter, token_trees })) + Err(_) => false, + }, + Separator::Puncts(lhs) => match fork.expect_glued_punct() { + Ok(rhs) => { + let lhs = lhs.iter().map(|it| it.char); + let rhs = rhs.iter().map(|it| it.char); + lhs.eq(rhs) } + Err(_) => false, + }, + }; + if ok { + *iter = fork; + } + ok +} + +fn expect_tt(iter: &mut TtIter<'_, S>) -> Result, ()> { + if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) = iter.peek_n(0) { + if punct.char == '\'' { + expect_lifetime(iter) } else { - self.next().ok_or(()).cloned() - } - } - - fn expect_lifetime(&mut self) -> Result, ()> { - let punct = self.expect_single_punct()?; - if punct.char != '\'' { - return Err(()); - } - let ident = self.expect_ident_or_underscore()?; - - Ok(tt::Subtree { - delimiter: tt::Delimiter { - open: punct.span, - close: ident.span, + let puncts = iter.expect_glued_punct()?; + let delimiter = tt::Delimiter { + open: puncts.first().unwrap().span, + close: puncts.last().unwrap().span, kind: tt::DelimiterKind::Invisible, - }, - token_trees: Box::new([ - tt::Leaf::Punct(*punct).into(), - tt::Leaf::Ident(ident.clone()).into(), - ]), - } - .into()) - } - - fn eat_char(&mut self, c: char) -> Option> { - let mut fork = self.clone(); - match fork.expect_char(c) { - Ok(_) => { - let tt = self.next().cloned(); - *self = fork; - tt - } - Err(_) => None, + }; + let token_trees = puncts.into_iter().map(|p| tt::Leaf::Punct(p).into()).collect(); + Ok(tt::TokenTree::Subtree(tt::Subtree { delimiter, token_trees })) } + } else { + iter.next().ok_or(()).cloned() + } +} + +fn expect_lifetime(iter: &mut TtIter<'_, S>) -> Result, ()> { + let punct = iter.expect_single_punct()?; + if punct.char != '\'' { + return Err(()); + } + let ident = iter.expect_ident_or_underscore()?; + + Ok(tt::Subtree { + delimiter: tt::Delimiter { + open: punct.span, + close: ident.span, + kind: tt::DelimiterKind::Invisible, + }, + token_trees: Box::new([ + tt::Leaf::Punct(*punct).into(), + tt::Leaf::Ident(ident.clone()).into(), + ]), + } + .into()) +} + +fn eat_char(iter: &mut TtIter<'_, S>, c: char) -> Option> { + let mut fork = iter.clone(); + match fork.expect_char(c) { + Ok(_) => { + let tt = iter.next().cloned(); + *iter = fork; + tt + } + Err(_) => None, } } diff --git a/crates/mbe/src/lib.rs b/crates/mbe/src/lib.rs index ed3200964d..5f0598dfe0 100644 --- a/crates/mbe/src/lib.rs +++ b/crates/mbe/src/lib.rs @@ -12,20 +12,17 @@ mod expander; mod parser; mod syntax_bridge; mod to_parser_input; -mod tt_iter; #[cfg(test)] mod benchmark; use span::{Edition, Span, SyntaxContextId}; use stdx::impl_from; +use tt::iter::TtIter; use std::fmt; -use crate::{ - parser::{MetaTemplate, MetaVarKind, Op}, - tt_iter::TtIter, -}; +use crate::parser::{MetaTemplate, MetaVarKind, Op}; // FIXME: we probably should re-think `token_tree_to_syntax_node` interfaces pub use ::parser::TopEntryPoint; @@ -247,6 +244,10 @@ impl DeclarativeMacro { self.err.as_deref() } + pub fn num_rules(&self) -> usize { + self.rules.len() + } + pub fn expand( &self, tt: &tt::Subtree, @@ -361,3 +362,60 @@ impl From> for ValueResult { result.map_or_else(Self::only_err, Self::ok) } } + +fn expect_fragment( + tt_iter: &mut TtIter<'_, S>, + entry_point: ::parser::PrefixEntryPoint, + edition: ::parser::Edition, +) -> ExpandResult>> { + use ::parser; + let buffer = tt::buffer::TokenBuffer::from_tokens(tt_iter.as_slice()); + let parser_input = to_parser_input::to_parser_input(&buffer); + let tree_traversal = entry_point.parse(&parser_input, edition); + let mut cursor = buffer.begin(); + let mut error = false; + for step in tree_traversal.iter() { + match step { + parser::Step::Token { kind, mut n_input_tokens } => { + if kind == ::parser::SyntaxKind::LIFETIME_IDENT { + n_input_tokens = 2; + } + for _ in 0..n_input_tokens { + cursor = cursor.bump_subtree(); + } + } + parser::Step::FloatSplit { .. } => { + // FIXME: We need to split the tree properly here, but mutating the token trees + // in the buffer is somewhat tricky to pull off. + cursor = cursor.bump_subtree(); + } + parser::Step::Enter { .. } | parser::Step::Exit => (), + parser::Step::Error { .. } => error = true, + } + } + + let err = if error || !cursor.is_root() { + Some(ExpandError::binding_error(format!("expected {entry_point:?}"))) + } else { + None + }; + + let mut curr = buffer.begin(); + let mut res = vec![]; + + while curr != cursor { + let Some(token) = curr.token_tree() else { break }; + res.push(token.cloned()); + curr = curr.bump(); + } + + *tt_iter = TtIter::new_iter(tt_iter.as_slice()[res.len()..].iter()); + let res = match &*res { + [] | [_] => res.pop(), + [first, ..] => Some(tt::TokenTree::Subtree(tt::Subtree { + delimiter: Delimiter::invisible_spanned(first.first_span()), + token_trees: res.into_boxed_slice(), + })), + }; + ExpandResult { value: res, err } +} diff --git a/crates/mbe/src/parser.rs b/crates/mbe/src/parser.rs index 256068a899..5c499c06b1 100644 --- a/crates/mbe/src/parser.rs +++ b/crates/mbe/src/parser.rs @@ -6,8 +6,9 @@ use std::sync::Arc; use arrayvec::ArrayVec; use span::{Edition, Span, SyntaxContextId}; use syntax::SmolStr; +use tt::iter::TtIter; -use crate::{tt_iter::TtIter, ParseError}; +use crate::ParseError; /// Consider /// diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs index c8ff8c35e9..73a04f00d9 100644 --- a/crates/mbe/src/syntax_bridge.rs +++ b/crates/mbe/src/syntax_bridge.rs @@ -11,9 +11,12 @@ use syntax::{ SyntaxKind::*, SyntaxNode, SyntaxToken, SyntaxTreeBuilder, TextRange, TextSize, WalkEvent, T, }; -use tt::buffer::{Cursor, TokenBuffer}; +use tt::{ + buffer::{Cursor, TokenBuffer}, + iter::TtIter, +}; -use crate::{to_parser_input::to_parser_input, tt_iter::TtIter}; +use crate::to_parser_input::to_parser_input; #[cfg(test)] mod tests; @@ -213,7 +216,7 @@ where let mut res = Vec::new(); while iter.peek_n(0).is_some() { - let expanded = iter.expect_fragment(parser::PrefixEntryPoint::Expr, edition); + let expanded = crate::expect_fragment(&mut iter, parser::PrefixEntryPoint::Expr, edition); res.push(match expanded.value { None => break, diff --git a/crates/mbe/src/tt_iter.rs b/crates/mbe/src/tt_iter.rs deleted file mode 100644 index 3581fbbfd8..0000000000 --- a/crates/mbe/src/tt_iter.rs +++ /dev/null @@ -1,217 +0,0 @@ -//! A "Parser" structure for token trees. We use this when parsing a declarative -//! macro definition into a list of patterns and templates. - -use core::fmt; - -use arrayvec::ArrayVec; -use syntax::SyntaxKind; - -use crate::{to_parser_input::to_parser_input, ExpandError, ExpandResult}; - -#[derive(Debug, Clone)] -pub(crate) struct TtIter<'a, S> { - pub(crate) inner: std::slice::Iter<'a, tt::TokenTree>, -} - -impl<'a, S: Copy> TtIter<'a, S> { - pub(crate) fn new(subtree: &'a tt::Subtree) -> TtIter<'a, S> { - TtIter { inner: subtree.token_trees.iter() } - } - - pub(crate) fn expect_char(&mut self, char: char) -> Result<(), ()> { - match self.next() { - Some(&tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: c, .. }))) if c == char => { - Ok(()) - } - _ => Err(()), - } - } - - pub(crate) fn expect_any_char(&mut self, chars: &[char]) -> Result<(), ()> { - match self.next() { - Some(tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: c, .. }))) - if chars.contains(c) => - { - Ok(()) - } - _ => Err(()), - } - } - - pub(crate) fn expect_subtree(&mut self) -> Result<&'a tt::Subtree, ()> { - match self.next() { - Some(tt::TokenTree::Subtree(it)) => Ok(it), - _ => Err(()), - } - } - - pub(crate) fn expect_leaf(&mut self) -> Result<&'a tt::Leaf, ()> { - match self.next() { - Some(tt::TokenTree::Leaf(it)) => Ok(it), - _ => Err(()), - } - } - - pub(crate) fn expect_dollar(&mut self) -> Result<(), ()> { - match self.expect_leaf()? { - tt::Leaf::Punct(tt::Punct { char: '$', .. }) => Ok(()), - _ => Err(()), - } - } - - pub(crate) fn expect_ident(&mut self) -> Result<&'a tt::Ident, ()> { - match self.expect_leaf()? { - tt::Leaf::Ident(it) if it.text != "_" => Ok(it), - _ => Err(()), - } - } - - pub(crate) fn expect_ident_or_underscore(&mut self) -> Result<&'a tt::Ident, ()> { - match self.expect_leaf()? { - tt::Leaf::Ident(it) => Ok(it), - _ => Err(()), - } - } - - pub(crate) fn expect_literal(&mut self) -> Result<&'a tt::Leaf, ()> { - let it = self.expect_leaf()?; - match it { - tt::Leaf::Literal(_) => Ok(it), - tt::Leaf::Ident(ident) if ident.text == "true" || ident.text == "false" => Ok(it), - _ => Err(()), - } - } - - pub(crate) fn expect_single_punct(&mut self) -> Result<&'a tt::Punct, ()> { - match self.expect_leaf()? { - tt::Leaf::Punct(it) => Ok(it), - _ => Err(()), - } - } - - /// Returns consecutive `Punct`s that can be glued together. - /// - /// This method currently may return a single quotation, which is part of lifetime ident and - /// conceptually not a punct in the context of mbe. Callers should handle this. - pub(crate) fn expect_glued_punct(&mut self) -> Result, 3>, ()> { - let tt::TokenTree::Leaf(tt::Leaf::Punct(first)) = self.next().ok_or(())?.clone() else { - return Err(()); - }; - - let mut res = ArrayVec::new(); - if first.spacing == tt::Spacing::Alone { - res.push(first); - return Ok(res); - } - - let (second, third) = match (self.peek_n(0), self.peek_n(1)) { - ( - Some(tt::TokenTree::Leaf(tt::Leaf::Punct(p2))), - Some(tt::TokenTree::Leaf(tt::Leaf::Punct(p3))), - ) if p2.spacing == tt::Spacing::Joint => (p2, Some(p3)), - (Some(tt::TokenTree::Leaf(tt::Leaf::Punct(p2))), _) => (p2, None), - _ => { - res.push(first); - return Ok(res); - } - }; - - match (first.char, second.char, third.map(|it| it.char)) { - ('.', '.', Some('.' | '=')) | ('<', '<', Some('=')) | ('>', '>', Some('=')) => { - let _ = self.next().unwrap(); - let _ = self.next().unwrap(); - res.push(first); - res.push(*second); - res.push(*third.unwrap()); - } - ('-' | '!' | '*' | '/' | '&' | '%' | '^' | '+' | '<' | '=' | '>' | '|', '=', _) - | ('-' | '=' | '>', '>', _) - | ('<', '-', _) - | (':', ':', _) - | ('.', '.', _) - | ('&', '&', _) - | ('<', '<', _) - | ('|', '|', _) => { - let _ = self.next().unwrap(); - res.push(first); - res.push(*second); - } - _ => res.push(first), - } - Ok(res) - } - pub(crate) fn peek_n(&self, n: usize) -> Option<&'a tt::TokenTree> { - self.inner.as_slice().get(n) - } -} - -impl<'a, S: Copy + fmt::Debug> TtIter<'a, S> { - pub(crate) fn expect_fragment( - &mut self, - entry_point: parser::PrefixEntryPoint, - edition: parser::Edition, - ) -> ExpandResult>> { - let buffer = tt::buffer::TokenBuffer::from_tokens(self.inner.as_slice()); - let parser_input = to_parser_input(&buffer); - let tree_traversal = entry_point.parse(&parser_input, edition); - let mut cursor = buffer.begin(); - let mut error = false; - for step in tree_traversal.iter() { - match step { - parser::Step::Token { kind, mut n_input_tokens } => { - if kind == SyntaxKind::LIFETIME_IDENT { - n_input_tokens = 2; - } - for _ in 0..n_input_tokens { - cursor = cursor.bump_subtree(); - } - } - parser::Step::FloatSplit { .. } => { - // FIXME: We need to split the tree properly here, but mutating the token trees - // in the buffer is somewhat tricky to pull off. - cursor = cursor.bump_subtree(); - } - parser::Step::Enter { .. } | parser::Step::Exit => (), - parser::Step::Error { .. } => error = true, - } - } - - let err = if error || !cursor.is_root() { - Some(ExpandError::binding_error(format!("expected {entry_point:?}"))) - } else { - None - }; - - let mut curr = buffer.begin(); - let mut res = vec![]; - - while curr != cursor { - let Some(token) = curr.token_tree() else { break }; - res.push(token.cloned()); - curr = curr.bump(); - } - - self.inner = self.inner.as_slice()[res.len()..].iter(); - let res = match &*res { - [] | [_] => res.pop(), - [first, ..] => Some(tt::TokenTree::Subtree(tt::Subtree { - delimiter: tt::Delimiter::invisible_spanned(first.first_span()), - token_trees: res.into_boxed_slice(), - })), - }; - ExpandResult { value: res, err } - } -} - -impl<'a, S> Iterator for TtIter<'a, S> { - type Item = &'a tt::TokenTree; - fn next(&mut self) -> Option { - self.inner.next() - } - - fn size_hint(&self) -> (usize, Option) { - self.inner.size_hint() - } -} - -impl std::iter::ExactSizeIterator for TtIter<'_, S> {} diff --git a/crates/tt/Cargo.toml b/crates/tt/Cargo.toml index c96f088cdc..1311e2ddf8 100644 --- a/crates/tt/Cargo.toml +++ b/crates/tt/Cargo.toml @@ -12,6 +12,7 @@ rust-version.workspace = true doctest = false [dependencies] +arrayvec.workspace = true smol_str.workspace = true text-size.workspace = true diff --git a/crates/tt/src/iter.rs b/crates/tt/src/iter.rs new file mode 100644 index 0000000000..175259a3e4 --- /dev/null +++ b/crates/tt/src/iter.rs @@ -0,0 +1,161 @@ +//! A "Parser" structure for token trees. We use this when parsing a declarative +//! macro definition into a list of patterns and templates. + +use arrayvec::ArrayVec; + +use crate::{Ident, Leaf, Punct, Spacing, Subtree, TokenTree}; + +#[derive(Debug, Clone)] +pub struct TtIter<'a, S> { + inner: std::slice::Iter<'a, TokenTree>, +} + +impl<'a, S: Copy> TtIter<'a, S> { + pub fn new(subtree: &'a Subtree) -> TtIter<'a, S> { + TtIter { inner: subtree.token_trees.iter() } + } + + pub fn new_iter(iter: std::slice::Iter<'a, TokenTree>) -> TtIter<'a, S> { + TtIter { inner: iter } + } + + pub fn expect_char(&mut self, char: char) -> Result<(), ()> { + match self.next() { + Some(&TokenTree::Leaf(Leaf::Punct(Punct { char: c, .. }))) if c == char => Ok(()), + _ => Err(()), + } + } + + pub fn expect_any_char(&mut self, chars: &[char]) -> Result<(), ()> { + match self.next() { + Some(TokenTree::Leaf(Leaf::Punct(Punct { char: c, .. }))) if chars.contains(c) => { + Ok(()) + } + _ => Err(()), + } + } + + pub fn expect_subtree(&mut self) -> Result<&'a Subtree, ()> { + match self.next() { + Some(TokenTree::Subtree(it)) => Ok(it), + _ => Err(()), + } + } + + pub fn expect_leaf(&mut self) -> Result<&'a Leaf, ()> { + match self.next() { + Some(TokenTree::Leaf(it)) => Ok(it), + _ => Err(()), + } + } + + pub fn expect_dollar(&mut self) -> Result<(), ()> { + match self.expect_leaf()? { + Leaf::Punct(Punct { char: '$', .. }) => Ok(()), + _ => Err(()), + } + } + + pub fn expect_ident(&mut self) -> Result<&'a Ident, ()> { + match self.expect_leaf()? { + Leaf::Ident(it) if it.text != "_" => Ok(it), + _ => Err(()), + } + } + + pub fn expect_ident_or_underscore(&mut self) -> Result<&'a Ident, ()> { + match self.expect_leaf()? { + Leaf::Ident(it) => Ok(it), + _ => Err(()), + } + } + + pub fn expect_literal(&mut self) -> Result<&'a Leaf, ()> { + let it = self.expect_leaf()?; + match it { + Leaf::Literal(_) => Ok(it), + Leaf::Ident(ident) if ident.text == "true" || ident.text == "false" => Ok(it), + _ => Err(()), + } + } + + pub fn expect_single_punct(&mut self) -> Result<&'a Punct, ()> { + match self.expect_leaf()? { + Leaf::Punct(it) => Ok(it), + _ => Err(()), + } + } + + /// Returns consecutive `Punct`s that can be glued together. + /// + /// This method currently may return a single quotation, which is part of lifetime ident and + /// conceptually not a punct in the context of mbe. Callers should handle this. + pub fn expect_glued_punct(&mut self) -> Result, 3>, ()> { + let TokenTree::Leaf(Leaf::Punct(first)) = self.next().ok_or(())?.clone() else { + return Err(()); + }; + + let mut res = ArrayVec::new(); + if first.spacing == Spacing::Alone { + res.push(first); + return Ok(res); + } + + let (second, third) = match (self.peek_n(0), self.peek_n(1)) { + (Some(TokenTree::Leaf(Leaf::Punct(p2))), Some(TokenTree::Leaf(Leaf::Punct(p3)))) + if p2.spacing == Spacing::Joint => + { + (p2, Some(p3)) + } + (Some(TokenTree::Leaf(Leaf::Punct(p2))), _) => (p2, None), + _ => { + res.push(first); + return Ok(res); + } + }; + + match (first.char, second.char, third.map(|it| it.char)) { + ('.', '.', Some('.' | '=')) | ('<', '<', Some('=')) | ('>', '>', Some('=')) => { + let _ = self.next().unwrap(); + let _ = self.next().unwrap(); + res.push(first); + res.push(*second); + res.push(*third.unwrap()); + } + ('-' | '!' | '*' | '/' | '&' | '%' | '^' | '+' | '<' | '=' | '>' | '|', '=', _) + | ('-' | '=' | '>', '>', _) + | ('<', '-', _) + | (':', ':', _) + | ('.', '.', _) + | ('&', '&', _) + | ('<', '<', _) + | ('|', '|', _) => { + let _ = self.next().unwrap(); + res.push(first); + res.push(*second); + } + _ => res.push(first), + } + Ok(res) + } + pub fn peek_n(&self, n: usize) -> Option<&'a TokenTree> { + self.inner.as_slice().get(n) + } + + pub fn as_slice(&self) -> &'a [TokenTree] { + self.inner.as_slice() + } +} + +impl<'a, S> Iterator for TtIter<'a, S> { + type Item = &'a TokenTree; + fn next(&mut self) -> Option { + self.inner.next() + } + + fn size_hint(&self) -> (usize, Option) { + self.inner.size_hint() + } +} + +impl std::iter::ExactSizeIterator for TtIter<'_, S> {} diff --git a/crates/tt/src/lib.rs b/crates/tt/src/lib.rs index e9de3f97b0..b2a21b1913 100644 --- a/crates/tt/src/lib.rs +++ b/crates/tt/src/lib.rs @@ -4,6 +4,9 @@ #![warn(rust_2018_idioms, unused_lifetimes)] +pub mod buffer; +pub mod iter; + use std::fmt; use stdx::impl_from; @@ -365,8 +368,6 @@ impl Subtree { } } -pub mod buffer; - pub fn pretty(tkns: &[TokenTree]) -> String { fn tokentree_to_text(tkn: &TokenTree) -> String { match tkn { From 21c63abec7cbefd129e4301efdad04a61829798f Mon Sep 17 00:00:00 2001 From: winstxnhdw Date: Fri, 21 Jun 2024 05:11:34 +0800 Subject: [PATCH 19/94] tests: add test for param transformation --- .../ide-assists/src/handlers/bool_to_enum.rs | 93 ++++++++++++++++++- 1 file changed, 92 insertions(+), 1 deletion(-) diff --git a/crates/ide-assists/src/handlers/bool_to_enum.rs b/crates/ide-assists/src/handlers/bool_to_enum.rs index c95e24693d..437e633723 100644 --- a/crates/ide-assists/src/handlers/bool_to_enum.rs +++ b/crates/ide-assists/src/handlers/bool_to_enum.rs @@ -517,6 +517,98 @@ mod tests { use crate::tests::{check_assist, check_assist_not_applicable}; + #[test] + fn parameter_with_first_param_usage() { + check_assist( + bool_to_enum, + r#" +fn function($0foo: bool, bar: bool) { + if foo { + println!("foo"); + } +} +"#, + r#" +#[derive(PartialEq, Eq)] +enum Bool { True, False } + +fn function(foo: Bool, bar: bool) { + if foo == Bool::True { + println!("foo"); + } +} +"#, + ) + } + + #[test] + fn parameter_with_last_param_usage() { + check_assist( + bool_to_enum, + r#" +fn function(foo: bool, $0bar: bool) { + if bar { + println!("bar"); + } +} +"#, + r#" +#[derive(PartialEq, Eq)] +enum Bool { True, False } + +fn function(foo: bool, bar: Bool) { + if bar == Bool::True { + println!("bar"); + } +} +"#, + ) + } + + #[test] + fn parameter_with_middle_param_usage() { + check_assist( + bool_to_enum, + r#" +fn function(foo: bool, $0bar: bool, baz: bool) { + if bar { + println!("bar"); + } +} +"#, + r#" +#[derive(PartialEq, Eq)] +enum Bool { True, False } + +fn function(foo: bool, bar: Bool, baz: bool) { + if bar == Bool::True { + println!("bar"); + } +} +"#, + ) + } + + #[test] + fn parameter_with_closure_usage() { + check_assist( + bool_to_enum, + r#" +fn main() { + let foo = |$0bar: bool| bar; +} +"#, + r#" +#[derive(PartialEq, Eq)] +enum Bool { True, False } + +fn main() { + let foo = |bar: Bool| bar == Bool::True; +} +"#, + ) + } + #[test] fn local_variable_with_usage() { check_assist( @@ -784,7 +876,6 @@ fn main() { #[test] fn local_variable_non_ident_pat() { - cov_mark::check!(not_applicable_in_non_ident_pat); check_assist_not_applicable( bool_to_enum, r#" From d468746075f812c8fb54e6ad9af76367704cbd07 Mon Sep 17 00:00:00 2001 From: winstxnhdw Date: Fri, 21 Jun 2024 00:52:30 +0800 Subject: [PATCH 20/94] feat: add bool_to_enum assist for parameters --- .../ide-assists/src/handlers/bool_to_enum.rs | 36 ++++++++++--------- 1 file changed, 20 insertions(+), 16 deletions(-) diff --git a/crates/ide-assists/src/handlers/bool_to_enum.rs b/crates/ide-assists/src/handlers/bool_to_enum.rs index 437e633723..a07ece38ae 100644 --- a/crates/ide-assists/src/handlers/bool_to_enum.rs +++ b/crates/ide-assists/src/handlers/bool_to_enum.rs @@ -1,3 +1,4 @@ +use either::Either; use hir::{ImportPathConfig, ModuleDef}; use ide_db::{ assists::{AssistId, AssistKind}, @@ -93,27 +94,30 @@ struct BoolNodeData { fn find_bool_node(ctx: &AssistContext<'_>) -> Option { let name: ast::Name = ctx.find_node_at_offset()?; - if let Some(let_stmt) = name.syntax().ancestors().find_map(ast::LetStmt::cast) { - let bind_pat = match let_stmt.pat()? { - ast::Pat::IdentPat(pat) => pat, - _ => { - cov_mark::hit!(not_applicable_in_non_ident_pat); - return None; - } - }; - let def = ctx.sema.to_def(&bind_pat)?; + if let Some(ident_pat) = name.syntax().parent().and_then(ast::IdentPat::cast) { + let def = ctx.sema.to_def(&ident_pat)?; if !def.ty(ctx.db()).is_bool() { cov_mark::hit!(not_applicable_non_bool_local); return None; } - Some(BoolNodeData { - target_node: let_stmt.syntax().clone(), - name, - ty_annotation: let_stmt.ty(), - initializer: let_stmt.initializer(), - definition: Definition::Local(def), - }) + let local_definition = Definition::Local(def); + match ident_pat.syntax().parent().and_then(Either::::cast)? { + Either::Left(param) => Some(BoolNodeData { + target_node: param.syntax().clone(), + name, + ty_annotation: param.ty(), + initializer: None, + definition: local_definition, + }), + Either::Right(let_stmt) => Some(BoolNodeData { + target_node: let_stmt.syntax().clone(), + name, + ty_annotation: let_stmt.ty(), + initializer: let_stmt.initializer(), + definition: local_definition, + }), + } } else if let Some(const_) = name.syntax().parent().and_then(ast::Const::cast) { let def = ctx.sema.to_def(&const_)?; if !def.ty(ctx.db()).is_bool() { From cb523fc88a35731279925a418dd6828df6acde92 Mon Sep 17 00:00:00 2001 From: Andrei Listochkin Date: Tue, 25 Jun 2024 18:13:08 +0100 Subject: [PATCH 21/94] pass cargo extra args when debug fixes #17128 --- editors/code/src/debug.ts | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/editors/code/src/debug.ts b/editors/code/src/debug.ts index 58fe1df51f..c2da2ea4e3 100644 --- a/editors/code/src/debug.ts +++ b/editors/code/src/debug.ts @@ -5,7 +5,7 @@ import type * as ra from "./lsp_ext"; import { Cargo, getRustcId, getSysroot } from "./toolchain"; import type { Ctx } from "./ctx"; -import { prepareEnv } from "./run"; +import { createCargoArgs, prepareEnv } from "./run"; import { isCargoRunnableArgs, unwrapUndefinable } from "./util"; const debugOutput = vscode.window.createOutputChannel("Debug"); @@ -180,7 +180,8 @@ async function getDebugExecutable( env: Record, ): Promise { const cargo = new Cargo(runnableArgs.workspaceRoot || ".", debugOutput, env); - const executable = await cargo.executableFromArgs(runnableArgs.cargoArgs); + const args = createCargoArgs(runnableArgs); + const executable = await cargo.executableFromArgs(args); // if we are here, there were no compilation errors. return executable; From eedba041307a4f86984c8bd49c8bd89c918ba0b1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Tue, 25 Jun 2024 20:32:09 +0300 Subject: [PATCH 22/94] Bump actions/download-artifact and upload-artifact --- .github/workflows/release.yaml | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 11014338d7..e11d6e15d1 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -132,7 +132,7 @@ jobs: run: target/${{ matrix.target }}/release/rust-analyzer analysis-stats --with-deps $(rustc --print sysroot)/lib/rustlib/src/rust/library/std - name: Upload artifacts - uses: actions/upload-artifact@v1 + uses: actions/upload-artifact@v4 with: name: dist-${{ matrix.target }} path: ./dist @@ -177,7 +177,7 @@ jobs: - run: rm -rf editors/code/server - name: Upload artifacts - uses: actions/upload-artifact@v1 + uses: actions/upload-artifact@v4 with: name: dist-x86_64-unknown-linux-musl path: ./dist @@ -206,39 +206,39 @@ jobs: - run: echo "HEAD_SHA=$(git rev-parse HEAD)" >> $GITHUB_ENV - run: 'echo "HEAD_SHA: $HEAD_SHA"' - - uses: actions/download-artifact@v1 + - uses: actions/download-artifact@v4 with: name: dist-aarch64-apple-darwin path: dist - - uses: actions/download-artifact@v1 + - uses: actions/download-artifact@v4 with: name: dist-x86_64-apple-darwin path: dist - - uses: actions/download-artifact@v1 + - uses: actions/download-artifact@v4 with: name: dist-x86_64-unknown-linux-gnu path: dist - - uses: actions/download-artifact@v1 + - uses: actions/download-artifact@v4 with: name: dist-x86_64-unknown-linux-musl path: dist - - uses: actions/download-artifact@v1 + - uses: actions/download-artifact@v4 with: name: dist-aarch64-unknown-linux-gnu path: dist - - uses: actions/download-artifact@v1 + - uses: actions/download-artifact@v4 with: name: dist-arm-unknown-linux-gnueabihf path: dist - - uses: actions/download-artifact@v1 + - uses: actions/download-artifact@v4 with: name: dist-x86_64-pc-windows-msvc path: dist - - uses: actions/download-artifact@v1 + - uses: actions/download-artifact@v4 with: name: dist-i686-pc-windows-msvc path: dist - - uses: actions/download-artifact@v1 + - uses: actions/download-artifact@v4 with: name: dist-aarch64-pc-windows-msvc path: dist From 23965e45121454472706ebd262e454ba4239ee65 Mon Sep 17 00:00:00 2001 From: Harry Sarson Date: Tue, 25 Jun 2024 17:24:37 +0100 Subject: [PATCH 23/94] do not normalize `use foo::{self}` to `use foo` It changes behaviour and can cause collisions. E.g. for the following snippet ```rs mod foo { pub mod bar {} pub const bar: i32 = 8; } // tranforming the below to `use foo::bar;` causes the error: // // the name `bar` is defined multiple times use foo::bar::{self}; const bar: u32 = 99; fn main() { let local_bar = bar; } ``` we still normalize ```rs use foo::bar; use foo::bar::{self}; ``` to `use foo::bar;` because this cannot cause collisions. See: https://github.com/rust-lang/rust-analyzer/pull/17140#issuecomment-2079189725 --- .../ide-assists/src/handlers/merge_imports.rs | 19 +++++++++++ .../src/handlers/normalize_import.rs | 31 ++++++++++++++++- crates/ide-db/src/imports/merge_imports.rs | 33 +++++++++++++++++-- 3 files changed, 79 insertions(+), 4 deletions(-) diff --git a/crates/ide-assists/src/handlers/merge_imports.rs b/crates/ide-assists/src/handlers/merge_imports.rs index 797c5c0653..7f751c93e4 100644 --- a/crates/ide-assists/src/handlers/merge_imports.rs +++ b/crates/ide-assists/src/handlers/merge_imports.rs @@ -489,6 +489,25 @@ use foo::bar; ); } + #[test] + fn test_merge_nested_empty_and_self_with_other() { + check_assist( + merge_imports, + r" +use foo::$0{bar}; +use foo::{bar::{self, other}}; +", + r" +use foo::bar::{self, other}; +", + ); + check_assist_import_one_variations!( + "foo::$0{bar}", + "foo::{bar::{self, other}}", + "use {foo::bar::{self, other}};" + ); + } + #[test] fn test_merge_nested_list_self_and_glob() { check_assist( diff --git a/crates/ide-assists/src/handlers/normalize_import.rs b/crates/ide-assists/src/handlers/normalize_import.rs index 7d003efe72..0b91eb676d 100644 --- a/crates/ide-assists/src/handlers/normalize_import.rs +++ b/crates/ide-assists/src/handlers/normalize_import.rs @@ -119,10 +119,39 @@ mod tests { ); } + #[test] + fn test_braces_kept() { + check_assist_not_applicable_variations!("foo::bar::{$0self}"); + + // This code compiles but transforming "bar::{self}" into "bar" causes a + // compilation error (the name `bar` is defined multiple times). + // Therefore, the normalize_input assist must not apply here. + check_assist_not_applicable( + normalize_import, + r" +mod foo { + + pub mod bar {} + + pub const bar: i32 = 8; +} + +use foo::bar::{$0self}; + +const bar: u32 = 99; + +fn main() { + let local_bar = bar; +} + +", + ); + } + #[test] fn test_redundant_braces() { check_assist_variations!("foo::{bar::{baz, Qux}}", "foo::bar::{baz, Qux}"); - check_assist_variations!("foo::{bar::{self}}", "foo::bar"); + check_assist_variations!("foo::{bar::{self}}", "foo::bar::{self}"); check_assist_variations!("foo::{bar::{*}}", "foo::bar::*"); check_assist_variations!("foo::{bar::{Qux as Quux}}", "foo::bar::Qux as Quux"); check_assist_variations!( diff --git a/crates/ide-db/src/imports/merge_imports.rs b/crates/ide-db/src/imports/merge_imports.rs index b153aafa0e..9cacb6b1a6 100644 --- a/crates/ide-db/src/imports/merge_imports.rs +++ b/crates/ide-db/src/imports/merge_imports.rs @@ -157,10 +157,14 @@ fn recursive_merge(lhs: &ast::UseTree, rhs: &ast::UseTree, merge: MergeBehavior) } match (tree_contains_self(lhs_t), tree_contains_self(&rhs_t)) { - (Some(true), None) => continue, + (Some(true), None) => { + remove_subtree_if_only_self(lhs_t); + continue; + } (None, Some(true)) => { ted::replace(lhs_t.syntax(), rhs_t.syntax()); *lhs_t = rhs_t; + remove_subtree_if_only_self(lhs_t); continue; } _ => (), @@ -278,14 +282,20 @@ pub fn try_normalize_use_tree_mut( fn recursive_normalize(use_tree: &ast::UseTree, style: NormalizationStyle) -> Option<()> { let use_tree_list = use_tree.use_tree_list()?; let merge_subtree_into_parent_tree = |single_subtree: &ast::UseTree| { + let subtree_is_only_self = single_subtree.path().as_ref().map_or(false, path_is_self); + let merged_path = match (use_tree.path(), single_subtree.path()) { + // If the subtree is `{self}` then we cannot merge: `use + // foo::bar::{self}` is not equivalent to `use foo::bar`. See + // https://github.com/rust-lang/rust-analyzer/pull/17140#issuecomment-2079189725. + _ if subtree_is_only_self => None, + (None, None) => None, (Some(outer), None) => Some(outer), - (None, Some(inner)) if path_is_self(&inner) => None, (None, Some(inner)) => Some(inner), - (Some(outer), Some(inner)) if path_is_self(&inner) => Some(outer), (Some(outer), Some(inner)) => Some(make::path_concat(outer, inner).clone_for_update()), }; + if merged_path.is_some() || single_subtree.use_tree_list().is_some() || single_subtree.star_token().is_some() @@ -706,3 +716,20 @@ fn path_is_self(path: &ast::Path) -> bool { fn path_len(path: ast::Path) -> usize { path.segments().count() } + +fn get_single_subtree(use_tree: &ast::UseTree) -> Option { + use_tree + .use_tree_list() + .and_then(|tree_list| tree_list.use_trees().collect_tuple()) + .map(|(single_subtree,)| single_subtree) +} + +fn remove_subtree_if_only_self(use_tree: &ast::UseTree) { + let Some(single_subtree) = get_single_subtree(use_tree) else { return }; + match (use_tree.path(), single_subtree.path()) { + (Some(_), Some(inner)) if path_is_self(&inner) => { + ted::remove_all_iter(single_subtree.syntax().children_with_tokens()); + } + _ => (), + } +} From 4aeae854e11aec504b0279329ed71dc6a073da90 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Thu, 27 Jun 2024 10:32:13 +0200 Subject: [PATCH 24/94] Update triagebot.toml --- triagebot.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/triagebot.toml b/triagebot.toml index 95eed3ee17..d62adb9144 100644 --- a/triagebot.toml +++ b/triagebot.toml @@ -5,6 +5,7 @@ [relabel] allow-unauthenticated = [ "S-*", + "A-*", ] [autolabel."S-waiting-on-review"] From 50b767862147ce60d5e1baf48536f663fa8db4c6 Mon Sep 17 00:00:00 2001 From: Shoyu Vanilla Date: Thu, 27 Jun 2024 23:48:15 +0900 Subject: [PATCH 25/94] Add a regression test for issue 17199 that causes stack overflow --- crates/hir-ty/src/tests/regression.rs | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/crates/hir-ty/src/tests/regression.rs b/crates/hir-ty/src/tests/regression.rs index 3aa94be755..4fcb6062b6 100644 --- a/crates/hir-ty/src/tests/regression.rs +++ b/crates/hir-ty/src/tests/regression.rs @@ -1999,3 +1999,22 @@ where "#, ); } + +#[test] +fn tait_async_stack_overflow_17199() { + check_types( + r#" + //- minicore: fmt, future + type Foo = impl core::fmt::Debug; + + async fn foo() -> Foo { + () + } + + async fn test() { + let t = foo().await; + // ^ impl Debug + } +"#, + ); +} From 7e9da2d67d6a0a46342371d72b63783794374a07 Mon Sep 17 00:00:00 2001 From: Shoyu Vanilla Date: Thu, 27 Jun 2024 23:51:33 +0900 Subject: [PATCH 26/94] Use proper `ImplTraits` in `insert_inference_vars_for_impl_trait` --- crates/hir-ty/src/infer.rs | 50 +++++++++++++++----------------------- 1 file changed, 19 insertions(+), 31 deletions(-) diff --git a/crates/hir-ty/src/infer.rs b/crates/hir-ty/src/infer.rs index 96431ba4ce..f3ad4e6869 100644 --- a/crates/hir-ty/src/infer.rs +++ b/crates/hir-ty/src/infer.rs @@ -835,11 +835,7 @@ impl<'a> InferenceContext<'a> { let return_ty = if let Some(rpits) = self.db.return_type_impl_traits(func) { // RPIT opaque types use substitution of their parent function. let fn_placeholders = TyBuilder::placeholder_subst(self.db, func); - let result = self.insert_inference_vars_for_impl_trait( - return_ty, - rpits.clone(), - fn_placeholders, - ); + let result = self.insert_inference_vars_for_impl_trait(return_ty, fn_placeholders); let rpits = rpits.skip_binders(); for (id, _) in rpits.impl_traits.iter() { if let Entry::Vacant(e) = self.result.type_of_rpit.entry(id) { @@ -862,12 +858,7 @@ impl<'a> InferenceContext<'a> { self.insert_atpit_coercion_table(params_and_ret_tys.iter()); } - fn insert_inference_vars_for_impl_trait( - &mut self, - t: T, - rpits: Arc>, - placeholders: Substitution, - ) -> T + fn insert_inference_vars_for_impl_trait(&mut self, t: T, placeholders: Substitution) -> T where T: crate::HasInterner + crate::TypeFoldable, { @@ -878,13 +869,21 @@ impl<'a> InferenceContext<'a> { TyKind::OpaqueType(opaque_ty_id, _) => *opaque_ty_id, _ => return ty, }; - let idx = match self.db.lookup_intern_impl_trait_id(opaque_ty_id.into()) { - ImplTraitId::ReturnTypeImplTrait(_, idx) => idx, - ImplTraitId::AssociatedTypeImplTrait(_, idx) => idx, - _ => unreachable!(), + let (impl_traits, idx) = + match self.db.lookup_intern_impl_trait_id(opaque_ty_id.into()) { + ImplTraitId::ReturnTypeImplTrait(def, idx) => { + (self.db.return_type_impl_traits(def), idx) + } + ImplTraitId::AssociatedTypeImplTrait(def, idx) => { + (self.db.type_alias_impl_traits(def), idx) + } + _ => unreachable!(), + }; + let Some(impl_traits) = impl_traits else { + return ty; }; - let bounds = - (*rpits).map_ref(|rpits| rpits.impl_traits[idx].bounds.map_ref(|it| it.iter())); + let bounds = (*impl_traits) + .map_ref(|rpits| rpits.impl_traits[idx].bounds.map_ref(|it| it.iter())); let var = self.table.new_type_var(); let var_subst = Substitution::from1(Interner, var.clone()); for bound in bounds { @@ -892,11 +891,8 @@ impl<'a> InferenceContext<'a> { let (var_predicate, binders) = predicate.substitute(Interner, &var_subst).into_value_and_skipped_binders(); always!(binders.is_empty(Interner)); // quantified where clauses not yet handled - let var_predicate = self.insert_inference_vars_for_impl_trait( - var_predicate, - rpits.clone(), - placeholders.clone(), - ); + let var_predicate = self + .insert_inference_vars_for_impl_trait(var_predicate, placeholders.clone()); self.push_obligation(var_predicate.cast(Interner)); } self.result.type_of_rpit.insert(idx, var.clone()); @@ -983,16 +979,8 @@ impl<'a> InferenceContext<'a> { self.db.lookup_intern_impl_trait_id(opaque_ty_id.into()) { if assoc_tys.contains(&alias_id) { - let atpits = self - .db - .type_alias_impl_traits(alias_id) - .expect("Marked as ATPIT but no impl traits!"); let alias_placeholders = TyBuilder::placeholder_subst(self.db, alias_id); - let ty = self.insert_inference_vars_for_impl_trait( - ty, - atpits, - alias_placeholders, - ); + let ty = self.insert_inference_vars_for_impl_trait(ty, alias_placeholders); return Some((opaque_ty_id, ty)); } } From 57497645f8cb6916fe6c866ba1f7ec679de2eb0c Mon Sep 17 00:00:00 2001 From: Joel Daniel Rico Date: Fri, 28 Jun 2024 05:30:13 -0700 Subject: [PATCH 27/94] Add landing/faq walkthroughs and commands for quick access --- editors/code/package.json | 106 +++++++++++++++++++++++++++++++++++ editors/code/src/commands.ts | 20 +++++++ editors/code/src/main.ts | 2 + 3 files changed, 128 insertions(+) diff --git a/editors/code/package.json b/editors/code/package.json index 0da5b920a7..99759330e6 100644 --- a/editors/code/package.json +++ b/editors/code/package.json @@ -305,6 +305,16 @@ "command": "rust-analyzer.toggleLSPLogs", "title": "Toggle LSP Logs", "category": "rust-analyzer" + }, + { + "command": "rust-analyzer.openWalkthrough", + "title": "Open Walkthrough", + "category": "rust-analyzer" + }, + { + "command": "rust-analyzer.openFAQ", + "title": "Open FAQ", + "category": "rust-analyzer" } ], "keybindings": [ @@ -3132,6 +3142,14 @@ { "command": "rust-analyzer.toggleLSPLogs", "when": "inRustProject" + }, + { + "command": "rust-analyzer.openWalkthrough", + "when": "inRustProject" + }, + { + "command": "rust-analyzer.openFAQ", + "when": "inRustProject" } ], "editor/context": [ @@ -3161,6 +3179,94 @@ "fileMatch": "rust-project.json", "url": "https://json.schemastore.org/rust-project.json" } + ], + "walkthroughs": [ + { + "id": "landing", + "title": "Learn about rust-analyzer", + "description": "A brief introduction to get started with rust-analyzer. Learn about key features and resources to help you get the most out of the extension.", + "steps": [ + { + "id": "docs", + "title": "Visit the docs!", + "description": "Confused about configurations? Want to learn more about rust-analyzer? Visit the [User Manual](https://rust-analyzer.github.io/manual.html)!", + "media": { + "image": "./icon.png", + "altText": "rust-analyzer logo" + }, + "completionEvents": [ + "onLink:https://rust-analyzer.github.io/manual.html" + ] + }, + { + "id": "faq", + "title": "FAQ", + "description": "Have questions about rust-analyzer? Check out the [FAQ Walkthrough](command:rust-analyzer.openFAQ)!", + "media": { + "image": "icon.png", + "altText": "rust-analyzer logo" + } + }, + { + "id": "changelog", + "title": "Changelog", + "description": "Stay up-to-date with the latest changes in rust-analyzer. Check out the changelog [here](https://rust-analyzer.github.io/thisweek)!", + "media": { + "image": "icon.png", + "altText": "rust-analyzer logo" + }, + "completionEvents": [ + "onLink:https://rust-analyzer.github.io/thisweek" + ] + }, + { + "id": "revisit", + "title": "Want to revisit a walkthrough?", + "description": "Use the ``Welcome: Open Walkthrough`` command to revisit any walkthrough!", + "media": { + "image": "icon.png", + "altText": "rust-analyzer logo" + } + } + ] + }, + { + "id": "faq", + "title": "FAQ", + "description": "Here are some frequently asked questions about rust-analyzer.", + "steps": [ + { + "id": "faq1", + "title": "What is rust-analyzer?", + "description": "rust-analyzer is a language server for Rust. It provides features like code completion, find references, and more.", + "media": { + "image": "icon.png", + "altText": "rust-analyzer logo" + } + }, + { + "id": "faq2", + "title": "Why are all of these type hints showing up in my code?", + "description": "By default, rust-analyzer displays __inlay hints__ to help you understand your code better. You can disable them in your settings.json file with ``\"editor.inlayHints.enabled\": \"off\"``", + "media": { + "image": "icon.png", + "altText": "rust-analyzer logo" + } + }, + { + "id": "faq3", + "title": "Where can I find more information about rust-analyzer?", + "description": "You can find more information about rust-analyzer in the [User Manual](https://rust-analyzer.github.io/manual.html).", + "media": { + "image": "icon.png", + "altText": "rust-analyzer logo" + }, + "completionEvents": [ + "onLink:https://rust-analyzer.github.io/manual.html" + ] + } + ] + } ] } } diff --git a/editors/code/src/commands.ts b/editors/code/src/commands.ts index f0f9fab1c6..2b0b300106 100644 --- a/editors/code/src/commands.ts +++ b/editors/code/src/commands.ts @@ -1502,3 +1502,23 @@ export function toggleLSPLogs(ctx: Ctx): Cmd { } }; } + +export function openWalkthrough(_: Ctx): Cmd { + return async () => { + await vscode.commands.executeCommand( + "workbench.action.openWalkthrough", + "rust-lang.rust-analyzer#landing", + false, + ); + }; +} + +export function openFAQ(_: Ctx): Cmd { + return async () => { + await vscode.commands.executeCommand( + "workbench.action.openWalkthrough", + "rust-lang.rust-analyzer#faq", + true, + ); + }; +} diff --git a/editors/code/src/main.ts b/editors/code/src/main.ts index ff67bb7bd5..c96f2ae869 100644 --- a/editors/code/src/main.ts +++ b/editors/code/src/main.ts @@ -178,6 +178,8 @@ function createCommands(): Record { viewMemoryLayout: { enabled: commands.viewMemoryLayout }, toggleCheckOnSave: { enabled: commands.toggleCheckOnSave }, toggleLSPLogs: { enabled: commands.toggleLSPLogs }, + openWalkthrough: { enabled: commands.openWalkthrough }, + openFAQ: { enabled: commands.openFAQ }, // Internal commands which are invoked by the server. applyActionGroup: { enabled: commands.applyActionGroup }, applySnippetWorkspaceEdit: { enabled: commands.applySnippetWorkspaceEditCommand }, From 1389312871e682bccd5119221633c6fe5e86a16f Mon Sep 17 00:00:00 2001 From: Tavo Annus Date: Sat, 29 Jun 2024 12:24:08 +0300 Subject: [PATCH 28/94] Make borrow checking configurable for term search --- crates/ide-assists/src/assist_config.rs | 1 + crates/ide-assists/src/handlers/term_search.rs | 6 +++++- crates/ide-assists/src/tests.rs | 3 +++ crates/ide-diagnostics/src/handlers/typed_hole.rs | 7 ++++++- crates/ide-diagnostics/src/lib.rs | 2 ++ crates/rust-analyzer/src/cli/analysis_stats.rs | 1 + crates/rust-analyzer/src/config.rs | 4 ++++ crates/rust-analyzer/src/integrated_benchmarks.rs | 1 + docs/user/generated_config.adoc | 5 +++++ editors/code/package.json | 10 ++++++++++ 10 files changed, 38 insertions(+), 2 deletions(-) diff --git a/crates/ide-assists/src/assist_config.rs b/crates/ide-assists/src/assist_config.rs index 5d76cb0432..410237f9ca 100644 --- a/crates/ide-assists/src/assist_config.rs +++ b/crates/ide-assists/src/assist_config.rs @@ -17,4 +17,5 @@ pub struct AssistConfig { pub prefer_prelude: bool, pub assist_emit_must_use: bool, pub term_search_fuel: u64, + pub term_search_borrowck: bool, } diff --git a/crates/ide-assists/src/handlers/term_search.rs b/crates/ide-assists/src/handlers/term_search.rs index b62c3b3323..148b951a4b 100644 --- a/crates/ide-assists/src/handlers/term_search.rs +++ b/crates/ide-assists/src/handlers/term_search.rs @@ -37,7 +37,11 @@ pub(crate) fn term_search(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option< sema: &ctx.sema, scope: &scope, goal: target_ty, - config: TermSearchConfig { fuel: ctx.config.term_search_fuel, ..Default::default() }, + config: TermSearchConfig { + fuel: ctx.config.term_search_fuel, + enable_borrowcheck: ctx.config.term_search_borrowck, + ..Default::default() + }, }; let paths = hir::term_search::term_search(&term_search_ctx); diff --git a/crates/ide-assists/src/tests.rs b/crates/ide-assists/src/tests.rs index 3b6c951251..bd10b5481b 100644 --- a/crates/ide-assists/src/tests.rs +++ b/crates/ide-assists/src/tests.rs @@ -32,6 +32,7 @@ pub(crate) const TEST_CONFIG: AssistConfig = AssistConfig { prefer_prelude: true, assist_emit_must_use: false, term_search_fuel: 400, + term_search_borrowck: true, }; pub(crate) const TEST_CONFIG_NO_SNIPPET_CAP: AssistConfig = AssistConfig { @@ -48,6 +49,7 @@ pub(crate) const TEST_CONFIG_NO_SNIPPET_CAP: AssistConfig = AssistConfig { prefer_prelude: true, assist_emit_must_use: false, term_search_fuel: 400, + term_search_borrowck: true, }; pub(crate) const TEST_CONFIG_IMPORT_ONE: AssistConfig = AssistConfig { @@ -64,6 +66,7 @@ pub(crate) const TEST_CONFIG_IMPORT_ONE: AssistConfig = AssistConfig { prefer_prelude: true, assist_emit_must_use: false, term_search_fuel: 400, + term_search_borrowck: true, }; pub(crate) fn with_single_file(text: &str) -> (RootDatabase, FileId) { diff --git a/crates/ide-diagnostics/src/handlers/typed_hole.rs b/crates/ide-diagnostics/src/handlers/typed_hole.rs index 71917e09f0..a64bac2949 100644 --- a/crates/ide-diagnostics/src/handlers/typed_hole.rs +++ b/crates/ide-diagnostics/src/handlers/typed_hole.rs @@ -47,7 +47,12 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::TypedHole) -> Option sema: &ctx.sema, scope: &scope, goal: d.expected.clone(), - config: TermSearchConfig { fuel: ctx.config.term_search_fuel, ..Default::default() }, + config: TermSearchConfig { + fuel: ctx.config.term_search_fuel, + enable_borrowcheck: ctx.config.term_search_borrowck, + + ..Default::default() + }, }; let paths = term_search(&term_search_ctx); diff --git a/crates/ide-diagnostics/src/lib.rs b/crates/ide-diagnostics/src/lib.rs index a419f04bfa..f176c30c4a 100644 --- a/crates/ide-diagnostics/src/lib.rs +++ b/crates/ide-diagnostics/src/lib.rs @@ -234,6 +234,7 @@ pub struct DiagnosticsConfig { pub prefer_no_std: bool, pub prefer_prelude: bool, pub term_search_fuel: u64, + pub term_search_borrowck: bool, } impl DiagnosticsConfig { @@ -260,6 +261,7 @@ impl DiagnosticsConfig { prefer_no_std: false, prefer_prelude: true, term_search_fuel: 400, + term_search_borrowck: true, } } } diff --git a/crates/rust-analyzer/src/cli/analysis_stats.rs b/crates/rust-analyzer/src/cli/analysis_stats.rs index 90b81d0a80..a429f75568 100644 --- a/crates/rust-analyzer/src/cli/analysis_stats.rs +++ b/crates/rust-analyzer/src/cli/analysis_stats.rs @@ -994,6 +994,7 @@ impl flags::AnalysisStats { prefer_prelude: true, style_lints: false, term_search_fuel: 400, + term_search_borrowck: true, }, ide::AssistResolveStrategy::All, file_id, diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index f719a47248..86b61629e3 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs @@ -341,6 +341,8 @@ config_data! { assist_emitMustUse: bool = false, /// Placeholder expression to use for missing expressions in assists. assist_expressionFillDefault: ExprFillDefaultDef = ExprFillDefaultDef::Todo, + /// Enable borrow checking for term search code assists. If set to false, also there will be more suggestions, but some of them may not borrow-check. + assist_termSearch_borrowcheck: bool = true, /// Term search fuel in "units of work" for assists (Defaults to 1800). assist_termSearch_fuel: usize = 1800, @@ -1269,6 +1271,7 @@ impl Config { assist_emit_must_use: self.assist_emitMustUse(source_root).to_owned(), prefer_prelude: self.imports_preferPrelude(source_root).to_owned(), term_search_fuel: self.assist_termSearch_fuel(source_root).to_owned() as u64, + term_search_borrowck: self.assist_termSearch_borrowcheck(source_root).to_owned(), } } @@ -1328,6 +1331,7 @@ impl Config { prefer_prelude: self.imports_preferPrelude(source_root).to_owned(), style_lints: self.diagnostics_styleLints_enable().to_owned(), term_search_fuel: self.assist_termSearch_fuel(source_root).to_owned() as u64, + term_search_borrowck: self.assist_termSearch_borrowcheck(source_root).to_owned(), } } pub fn expand_proc_attr_macros(&self) -> bool { diff --git a/crates/rust-analyzer/src/integrated_benchmarks.rs b/crates/rust-analyzer/src/integrated_benchmarks.rs index 1e2cd4339b..65fbc96dad 100644 --- a/crates/rust-analyzer/src/integrated_benchmarks.rs +++ b/crates/rust-analyzer/src/integrated_benchmarks.rs @@ -300,6 +300,7 @@ fn integrated_diagnostics_benchmark() { prefer_no_std: false, prefer_prelude: false, term_search_fuel: 400, + term_search_borrowck: true, }; host.analysis() .diagnostics(&diagnostics_config, ide::AssistResolveStrategy::None, file_id) diff --git a/docs/user/generated_config.adoc b/docs/user/generated_config.adoc index 81f30dc56e..25d9430621 100644 --- a/docs/user/generated_config.adoc +++ b/docs/user/generated_config.adoc @@ -9,6 +9,11 @@ for enum variants. -- Placeholder expression to use for missing expressions in assists. -- +[[rust-analyzer.assist.termSearch.borrowcheck]]rust-analyzer.assist.termSearch.borrowcheck (default: `true`):: ++ +-- +Enable borrow checking for term search code assists. If set to false, also there will be more suggestions, but some of them may not borrow-check. +-- [[rust-analyzer.assist.termSearch.fuel]]rust-analyzer.assist.termSearch.fuel (default: `1800`):: + -- diff --git a/editors/code/package.json b/editors/code/package.json index 0da5b920a7..7f9987b935 100644 --- a/editors/code/package.json +++ b/editors/code/package.json @@ -588,6 +588,16 @@ } } }, + { + "title": "assist", + "properties": { + "rust-analyzer.assist.termSearch.borrowcheck": { + "markdownDescription": "Enable borrow checking for term search code assists. If set to false, also there will be more suggestions, but some of them may not borrow-check.", + "default": true, + "type": "boolean" + } + } + }, { "title": "assist", "properties": { From 278425919d7a3eb38ff6ddeb5d34968371fe02ae Mon Sep 17 00:00:00 2001 From: roife Date: Sat, 29 Jun 2024 22:18:36 +0800 Subject: [PATCH 29/94] fix: completions after async kw --- .../src/completions/item_list.rs | 19 ++++++++++++++++--- .../ide-completion/src/completions/keyword.rs | 1 + crates/ide-completion/src/context.rs | 4 +++- crates/ide-completion/src/context/analysis.rs | 13 +++++++++---- crates/ide-completion/src/tests/item_list.rs | 19 +++++++++++++++++++ 5 files changed, 48 insertions(+), 8 deletions(-) diff --git a/crates/ide-completion/src/completions/item_list.rs b/crates/ide-completion/src/completions/item_list.rs index 02298b1e9b..1883377408 100644 --- a/crates/ide-completion/src/completions/item_list.rs +++ b/crates/ide-completion/src/completions/item_list.rs @@ -82,17 +82,30 @@ fn add_keywords(acc: &mut Completions, ctx: &CompletionContext<'_>, kind: Option let no_vis_qualifiers = ctx.qualifier_ctx.vis_node.is_none(); let in_block = kind.is_none(); + let missing_qualifiers = [ + ctx.qualifier_ctx.unsafe_tok.is_none().then_some(("unsafe", "unsafe $0")), + ctx.qualifier_ctx.async_tok.is_none().then_some(("async", "async $0")), + ]; + if !in_trait_impl { - if ctx.qualifier_ctx.unsafe_tok.is_some() { + // handle qualifier tokens + if missing_qualifiers.iter().any(Option::is_none) { + // only complete missing qualifiers + missing_qualifiers.iter().filter_map(|x| *x).for_each(|(kw, snippet)| { + add_keyword(kw, snippet); + }); + if in_item_list || in_assoc_non_trait_impl { add_keyword("fn", "fn $1($2) {\n $0\n}"); } - if in_item_list { + + if ctx.qualifier_ctx.unsafe_tok.is_some() && in_item_list { add_keyword("trait", "trait $1 {\n $0\n}"); if no_vis_qualifiers { add_keyword("impl", "impl $1 {\n $0\n}"); } } + return; } @@ -100,7 +113,6 @@ fn add_keywords(acc: &mut Completions, ctx: &CompletionContext<'_>, kind: Option add_keyword("enum", "enum $1 {\n $0\n}"); add_keyword("mod", "mod $0"); add_keyword("static", "static $0"); - add_keyword("async", "async $0"); add_keyword("struct", "struct $0"); add_keyword("trait", "trait $1 {\n $0\n}"); add_keyword("union", "union $1 {\n $0\n}"); @@ -129,6 +141,7 @@ fn add_keywords(acc: &mut Completions, ctx: &CompletionContext<'_>, kind: Option add_keyword("fn", "fn $1($2) {\n $0\n}"); add_keyword("unsafe", "unsafe $0"); add_keyword("const", "const $0"); + add_keyword("async", "async $0"); } } } diff --git a/crates/ide-completion/src/completions/keyword.rs b/crates/ide-completion/src/completions/keyword.rs index d79b539882..3f50cd55cb 100644 --- a/crates/ide-completion/src/completions/keyword.rs +++ b/crates/ide-completion/src/completions/keyword.rs @@ -57,6 +57,7 @@ mod tests { check( r"fn my_fn() { unsafe $0 }", expect![[r#" + kw async kw fn kw impl kw trait diff --git a/crates/ide-completion/src/context.rs b/crates/ide-completion/src/context.rs index 992ca18bb0..5782a4423a 100644 --- a/crates/ide-completion/src/context.rs +++ b/crates/ide-completion/src/context.rs @@ -46,13 +46,15 @@ pub(crate) enum Visible { /// Existing qualifiers for the thing we are currently completing. #[derive(Debug, Default)] pub(crate) struct QualifierCtx { + // TODO: Add try_tok and default_tok + pub(crate) async_tok: Option, pub(crate) unsafe_tok: Option, pub(crate) vis_node: Option, } impl QualifierCtx { pub(crate) fn none(&self) -> bool { - self.unsafe_tok.is_none() && self.vis_node.is_none() + self.async_tok.is_none() && self.unsafe_tok.is_none() && self.vis_node.is_none() } } diff --git a/crates/ide-completion/src/context/analysis.rs b/crates/ide-completion/src/context/analysis.rs index 743aa00570..80dcfd2f52 100644 --- a/crates/ide-completion/src/context/analysis.rs +++ b/crates/ide-completion/src/context/analysis.rs @@ -1287,10 +1287,15 @@ fn classify_name_ref( syntax::algo::non_trivia_sibling(top.clone().into(), syntax::Direction::Prev) { if error_node.kind() == SyntaxKind::ERROR { - qualifier_ctx.unsafe_tok = error_node - .children_with_tokens() - .filter_map(NodeOrToken::into_token) - .find(|it| it.kind() == T![unsafe]); + for token in + error_node.children_with_tokens().filter_map(NodeOrToken::into_token) + { + match token.kind() { + SyntaxKind::UNSAFE_KW => qualifier_ctx.unsafe_tok = Some(token), + SyntaxKind::ASYNC_KW => qualifier_ctx.async_tok = Some(token), + _ => {} + } + } qualifier_ctx.vis_node = error_node.children().find_map(ast::Visibility::cast); } } diff --git a/crates/ide-completion/src/tests/item_list.rs b/crates/ide-completion/src/tests/item_list.rs index c37900478e..f138938b02 100644 --- a/crates/ide-completion/src/tests/item_list.rs +++ b/crates/ide-completion/src/tests/item_list.rs @@ -123,6 +123,7 @@ fn after_unsafe_token() { check( r#"unsafe $0"#, expect![[r#" + kw async kw fn kw impl kw trait @@ -130,6 +131,17 @@ fn after_unsafe_token() { ); } +#[test] +fn after_async_token() { + check( + r#"async $0"#, + expect![[r#" + kw fn + kw unsafe + "#]], + ); +} + #[test] fn after_visibility() { check( @@ -157,6 +169,7 @@ fn after_visibility_unsafe() { check( r#"pub unsafe $0"#, expect![[r#" + kw async kw fn kw trait "#]], @@ -170,6 +183,7 @@ fn in_impl_assoc_item_list() { expect![[r#" ma makro!(…) macro_rules! makro md module + kw async kw const kw crate:: kw fn @@ -189,6 +203,7 @@ fn in_impl_assoc_item_list_after_attr() { expect![[r#" ma makro!(…) macro_rules! makro md module + kw async kw const kw crate:: kw fn @@ -208,6 +223,7 @@ fn in_trait_assoc_item_list() { expect![[r#" ma makro!(…) macro_rules! makro md module + kw async kw const kw crate:: kw fn @@ -225,6 +241,7 @@ fn in_trait_assoc_fn_missing_body() { expect![[r#" ma makro!(…) macro_rules! makro md module + kw async kw const kw crate:: kw fn @@ -242,6 +259,7 @@ fn in_trait_assoc_const_missing_body() { expect![[r#" ma makro!(…) macro_rules! makro md module + kw async kw const kw crate:: kw fn @@ -259,6 +277,7 @@ fn in_trait_assoc_type_aliases_missing_ty() { expect![[r#" ma makro!(…) macro_rules! makro md module + kw async kw const kw crate:: kw fn From 882ae7105dc9a29dca14c5dc7a0c39c34d33ef0d Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sun, 30 Jun 2024 13:26:13 +0200 Subject: [PATCH 30/94] Simplify unresolved proc-macro handling --- crates/hir-def/src/data.rs | 4 -- crates/hir-def/src/nameres/collector.rs | 14 ++---- crates/hir-def/src/nameres/diagnostics.rs | 55 +++++++++++++++++------ 3 files changed, 44 insertions(+), 29 deletions(-) diff --git a/crates/hir-def/src/data.rs b/crates/hir-def/src/data.rs index 8cf1e15f31..55043fdc4b 100644 --- a/crates/hir-def/src/data.rs +++ b/crates/hir-def/src/data.rs @@ -637,10 +637,6 @@ impl<'a> AssocItemCollector<'a> { attr, ) { Ok(ResolvedAttr::Macro(call_id)) => { - // If proc attribute macro expansion is disabled, skip expanding it here - if !self.db.expand_proc_attr_macros() { - continue 'attrs; - } let loc = self.db.lookup_intern_macro_call(call_id); if let MacroDefKind::ProcMacro(_, exp, _) = loc.def.kind { // If there's no expander for the proc macro (e.g. the diff --git a/crates/hir-def/src/nameres/collector.rs b/crates/hir-def/src/nameres/collector.rs index 6d2eb71549..b5045efb62 100644 --- a/crates/hir-def/src/nameres/collector.rs +++ b/crates/hir-def/src/nameres/collector.rs @@ -83,7 +83,9 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, def_map: DefMap, tree_id: TreeI let name = Name::new_text_dont_use(it.name.clone()); ( name, - if it.disabled { + if !db.expand_proc_attr_macros() { + CustomProcMacroExpander::dummy() + } else if it.disabled { CustomProcMacroExpander::disabled() } else { CustomProcMacroExpander::new(hir_expand::proc_macro::ProcMacroId::new( @@ -1331,16 +1333,6 @@ impl DefCollector<'_> { let call_id = call_id(); if let MacroDefKind::ProcMacro(_, exp, _) = def.kind { - // If proc attribute macro expansion is disabled, skip expanding it here - if !self.db.expand_proc_attr_macros() { - self.def_map.diagnostics.push(DefDiagnostic::unresolved_proc_macro( - directive.module_id, - self.db.lookup_intern_macro_call(call_id).kind, - def.krate, - )); - return recollect_without(self); - } - // If there's no expander for the proc macro (e.g. // because proc macros are disabled, or building the // proc macro crate failed), report this and skip diff --git a/crates/hir-def/src/nameres/diagnostics.rs b/crates/hir-def/src/nameres/diagnostics.rs index 523a4c107b..4ab53d20b5 100644 --- a/crates/hir-def/src/nameres/diagnostics.rs +++ b/crates/hir-def/src/nameres/diagnostics.rs @@ -17,16 +17,47 @@ use crate::{ #[derive(Debug, PartialEq, Eq)] pub enum DefDiagnosticKind { - UnresolvedModule { ast: AstId, candidates: Box<[String]> }, - UnresolvedExternCrate { ast: AstId }, - UnresolvedImport { id: ItemTreeId, index: Idx }, - UnconfiguredCode { ast: ErasedAstId, cfg: CfgExpr, opts: CfgOptions }, - UnresolvedProcMacro { ast: MacroCallKind, krate: CrateId }, - UnresolvedMacroCall { ast: MacroCallKind, path: ModPath }, - UnimplementedBuiltinMacro { ast: AstId }, - InvalidDeriveTarget { ast: AstId, id: usize }, - MalformedDerive { ast: AstId, id: usize }, - MacroDefError { ast: AstId, message: String }, + UnresolvedModule { + ast: AstId, + candidates: Box<[String]>, + }, + UnresolvedExternCrate { + ast: AstId, + }, + UnresolvedImport { + id: ItemTreeId, + index: Idx, + }, + UnconfiguredCode { + ast: ErasedAstId, + cfg: CfgExpr, + opts: CfgOptions, + }, + /// A proc-macro that is lacking an expander, this might be due to build scripts not yet having + /// run or proc-macro expansion being disabled. + UnresolvedProcMacro { + ast: MacroCallKind, + krate: CrateId, + }, + UnresolvedMacroCall { + ast: MacroCallKind, + path: ModPath, + }, + UnimplementedBuiltinMacro { + ast: AstId, + }, + InvalidDeriveTarget { + ast: AstId, + id: usize, + }, + MalformedDerive { + ast: AstId, + id: usize, + }, + MacroDefError { + ast: AstId, + message: String, + }, } #[derive(Clone, Debug, PartialEq, Eq)] @@ -92,10 +123,6 @@ impl DefDiagnostic { Self { in_module: container, kind: DefDiagnosticKind::UnconfiguredCode { ast, cfg, opts } } } - // FIXME: Whats the difference between this and unresolved_macro_call - // FIXME: This is used for a lot of things, unresolved proc macros, disabled proc macros, etc - // yet the diagnostic handler in ide-diagnostics has to figure out what happened because this - // struct loses all that information! pub fn unresolved_proc_macro( container: LocalModuleId, ast: MacroCallKind, From bfb187aacd23e3af4d074322625c84a263a63b6f Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sun, 30 Jun 2024 13:26:17 +0200 Subject: [PATCH 31/94] Fix expression scope calculation when within macro expansions --- Cargo.toml | 4 +- crates/hir-expand/src/files.rs | 34 ++++++++++++ crates/hir/src/source_analyzer.rs | 52 +++++++++++-------- .../ide-assists/src/handlers/bool_to_enum.rs | 11 +++- crates/ide/src/runnables.rs | 11 ++-- .../test_data/highlight_macros.html | 10 ++++ crates/ide/src/syntax_highlighting/tests.rs | 10 ++++ 7 files changed, 99 insertions(+), 33 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 583c7bbe33..18c9bb5ce0 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -10,9 +10,7 @@ license = "MIT OR Apache-2.0" authors = ["rust-analyzer team"] [profile.dev] -# Disabling debug info speeds up builds a bunch, -# and we don't rely on it for debugging that much. -debug = 0 +debug = 1 [profile.dev.package] # These speed up local tests. diff --git a/crates/hir-expand/src/files.rs b/crates/hir-expand/src/files.rs index 743fac50f4..90d9e61a69 100644 --- a/crates/hir-expand/src/files.rs +++ b/crates/hir-expand/src/files.rs @@ -158,6 +158,40 @@ impl InFileWrapper { // region:specific impls impl InFile<&SyntaxNode> { + pub fn parent_ancestors_with_macros( + self, + db: &dyn db::ExpandDatabase, + ) -> impl Iterator> + '_ { + let succ = move |node: &InFile| match node.value.parent() { + Some(parent) => Some(node.with_value(parent)), + None => db + .lookup_intern_macro_call(node.file_id.macro_file()?.macro_call_id) + .to_node_item(db) + .syntax() + .cloned() + .map(|node| node.parent()) + .transpose(), + }; + std::iter::successors(succ(&self.cloned()), succ) + } + + pub fn ancestors_with_macros( + self, + db: &dyn db::ExpandDatabase, + ) -> impl Iterator> + '_ { + let succ = move |node: &InFile| match node.value.parent() { + Some(parent) => Some(node.with_value(parent)), + None => db + .lookup_intern_macro_call(node.file_id.macro_file()?.macro_call_id) + .to_node_item(db) + .syntax() + .cloned() + .map(|node| node.parent()) + .transpose(), + }; + std::iter::successors(Some(self.cloned()), succ) + } + /// Falls back to the macro call range if the node cannot be mapped up fully. /// /// For attributes and derives, this will point back to the attribute only. diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs index 8e71a54f80..be8e9c49a0 100644 --- a/crates/hir/src/source_analyzer.rs +++ b/crates/hir/src/source_analyzer.rs @@ -68,38 +68,44 @@ impl SourceAnalyzer { pub(crate) fn new_for_body( db: &dyn HirDatabase, def: DefWithBodyId, - node @ InFile { file_id, .. }: InFile<&SyntaxNode>, + node: InFile<&SyntaxNode>, offset: Option, ) -> SourceAnalyzer { - let (body, source_map) = db.body_with_source_map(def); - let scopes = db.expr_scopes(def); - let scope = match offset { - None => scope_for(&scopes, &source_map, node), - Some(offset) => scope_for_offset(db, &scopes, &source_map, node.file_id, offset), - }; - let resolver = resolver_for_scope(db.upcast(), def, scope); - SourceAnalyzer { - resolver, - def: Some((def, body, source_map)), - infer: Some(db.infer(def)), - file_id, - } + Self::new_for_body_(db, def, node, offset, Some(db.infer(def))) } pub(crate) fn new_for_body_no_infer( + db: &dyn HirDatabase, + def: DefWithBodyId, + node: InFile<&SyntaxNode>, + offset: Option, + ) -> SourceAnalyzer { + Self::new_for_body_(db, def, node, offset, None) + } + + pub(crate) fn new_for_body_( db: &dyn HirDatabase, def: DefWithBodyId, node @ InFile { file_id, .. }: InFile<&SyntaxNode>, offset: Option, + infer: Option>, ) -> SourceAnalyzer { let (body, source_map) = db.body_with_source_map(def); let scopes = db.expr_scopes(def); let scope = match offset { - None => scope_for(&scopes, &source_map, node), - Some(offset) => scope_for_offset(db, &scopes, &source_map, node.file_id, offset), + None => scope_for(db, &scopes, &source_map, node), + Some(offset) => { + debug_assert!( + node.value.text_range().contains_inclusive(offset), + "{:?} not in {:?}", + offset, + node.value.text_range() + ); + scope_for_offset(db, &scopes, &source_map, node.file_id, offset) + } }; let resolver = resolver_for_scope(db.upcast(), def, scope); - SourceAnalyzer { resolver, def: Some((def, body, source_map)), infer: None, file_id } + SourceAnalyzer { resolver, def: Some((def, body, source_map)), infer, file_id } } pub(crate) fn new_for_resolver( @@ -662,7 +668,6 @@ impl SourceAnalyzer { return resolved; } - // This must be a normal source file rather than macro file. let ctx = LowerCtx::new(db.upcast(), self.file_id); let hir_path = Path::from_src(&ctx, path.clone())?; @@ -955,14 +960,17 @@ impl SourceAnalyzer { } fn scope_for( + db: &dyn HirDatabase, scopes: &ExprScopes, source_map: &BodySourceMap, node: InFile<&SyntaxNode>, ) -> Option { - node.value - .ancestors() - .filter_map(ast::Expr::cast) - .filter_map(|it| source_map.node_expr(InFile::new(node.file_id, &it))) + node.ancestors_with_macros(db.upcast()) + .take_while(|it| { + !ast::Item::can_cast(it.value.kind()) || ast::MacroCall::can_cast(it.value.kind()) + }) + .filter_map(|it| it.map(ast::Expr::cast).transpose()) + .filter_map(|it| source_map.node_expr(it.as_ref())) .find_map(|it| scopes.scope_for(it)) } diff --git a/crates/ide-assists/src/handlers/bool_to_enum.rs b/crates/ide-assists/src/handlers/bool_to_enum.rs index c95e24693d..2aeca0bae0 100644 --- a/crates/ide-assists/src/handlers/bool_to_enum.rs +++ b/crates/ide-assists/src/handlers/bool_to_enum.rs @@ -76,7 +76,11 @@ pub(crate) fn bool_to_enum(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option let usages = definition.usages(&ctx.sema).all(); add_enum_def(edit, ctx, &usages, target_node, &target_module); - replace_usages(edit, ctx, usages, definition, &target_module); + let mut delayed_mutations = Vec::new(); + replace_usages(edit, ctx, usages, definition, &target_module, &mut delayed_mutations); + for (scope, path) in delayed_mutations { + insert_use(&scope, path, &ctx.config.insert_use); + } }, ) } @@ -197,6 +201,7 @@ fn replace_usages( usages: UsageSearchResult, target_definition: Definition, target_module: &hir::Module, + delayed_mutations: &mut Vec<(ImportScope, ast::Path)>, ) { for (file_id, references) in usages { edit.edit_file(file_id); @@ -217,6 +222,7 @@ fn replace_usages( def.usages(&ctx.sema).all(), target_definition, target_module, + delayed_mutations, ) } } else if let Some(initializer) = find_assignment_usage(&name) { @@ -255,6 +261,7 @@ fn replace_usages( def.usages(&ctx.sema).all(), target_definition, target_module, + delayed_mutations, ) } } @@ -306,7 +313,7 @@ fn replace_usages( ImportScope::Module(it) => ImportScope::Module(edit.make_mut(it)), ImportScope::Block(it) => ImportScope::Block(edit.make_mut(it)), }; - insert_use(&scope, path, &ctx.config.insert_use); + delayed_mutations.push((scope, path)); } }, ) diff --git a/crates/ide/src/runnables.rs b/crates/ide/src/runnables.rs index 2feea09840..a68ee4f867 100644 --- a/crates/ide/src/runnables.rs +++ b/crates/ide/src/runnables.rs @@ -78,7 +78,6 @@ impl RunnableKind { } impl Runnable { - // test package::module::testname pub fn label(&self, target: Option<&str>) -> String { match &self.kind { RunnableKind::Test { test_id, .. } => format!("test {test_id}"), @@ -86,7 +85,7 @@ impl Runnable { RunnableKind::Bench { test_id } => format!("bench {test_id}"), RunnableKind::DocTest { test_id, .. } => format!("doctest {test_id}"), RunnableKind::Bin => { - target.map_or_else(|| "run binary".to_owned(), |t| format!("run {t}")) + format!("run {}", target.unwrap_or("binary")) } } } @@ -513,11 +512,11 @@ impl TestAttr { } } -const RUSTDOC_FENCES: [&str; 2] = ["```", "~~~"]; -const RUSTDOC_CODE_BLOCK_ATTRIBUTES_RUNNABLE: &[&str] = - &["", "rust", "should_panic", "edition2015", "edition2018", "edition2021"]; - fn has_runnable_doc_test(attrs: &hir::Attrs) -> bool { + const RUSTDOC_FENCES: [&str; 2] = ["```", "~~~"]; + const RUSTDOC_CODE_BLOCK_ATTRIBUTES_RUNNABLE: &[&str] = + &["", "rust", "should_panic", "edition2015", "edition2018", "edition2021"]; + docs_from_attrs(attrs).map_or(false, |doc| { let mut in_code_block = false; diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html b/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html index 573b3d4bd5..17411fefbd 100644 --- a/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html +++ b/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html @@ -94,10 +94,20 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd } } +macro_rules! id { + ($($tt:tt)*) => { + $($tt)* + }; +} include!(concat!("foo/", "foo.rs")); +struct S<T>(T); fn main() { + struct TestLocal; + // regression test, TestLocal here used to not resolve + let _: S<id![TestLocal]>; + format_args!("Hello, {}!", (92,).0); dont_color_me_braces!(); noop!(noop!(1)); diff --git a/crates/ide/src/syntax_highlighting/tests.rs b/crates/ide/src/syntax_highlighting/tests.rs index 08acfca2cb..5f711600a2 100644 --- a/crates/ide/src/syntax_highlighting/tests.rs +++ b/crates/ide/src/syntax_highlighting/tests.rs @@ -102,10 +102,20 @@ macro without_args { } } +macro_rules! id { + ($($tt:tt)*) => { + $($tt)* + }; +} include!(concat!("foo/", "foo.rs")); +struct S(T); fn main() { + struct TestLocal; + // regression test, TestLocal here used to not resolve + let _: S; + format_args!("Hello, {}!", (92,).0); dont_color_me_braces!(); noop!(noop!(1)); From 5374ebbf36a42f3c358c4daf901fd6c01b85be33 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sun, 30 Jun 2024 13:44:33 +0200 Subject: [PATCH 32/94] Simplify --- crates/hir-expand/src/files.rs | 61 +++++++++++++------ crates/hir/src/source_analyzer.rs | 12 ++-- crates/hir/src/term_search/tactics.rs | 3 +- crates/ide-db/src/rust_doc.rs | 6 +- .../trait_impl_redundant_assoc_item.rs | 15 +---- .../rust-analyzer/src/cli/analysis_stats.rs | 4 +- crates/rust-analyzer/src/config.rs | 2 +- .../rust-analyzer/tests/slow-tests/support.rs | 3 +- crates/rust-analyzer/tests/slow-tests/tidy.rs | 2 +- crates/test-utils/src/lib.rs | 2 +- xtask/src/codegen/diagnostics_docs.rs | 2 +- xtask/src/release.rs | 3 +- 12 files changed, 62 insertions(+), 53 deletions(-) diff --git a/crates/hir-expand/src/files.rs b/crates/hir-expand/src/files.rs index 90d9e61a69..fc9fa93268 100644 --- a/crates/hir-expand/src/files.rs +++ b/crates/hir-expand/src/files.rs @@ -1,4 +1,6 @@ //! Things to wrap other things in file ids. +use std::borrow::Borrow; + use either::Either; use span::{ AstIdNode, ErasedFileAstId, FileAstId, FileId, FileRange, HirFileId, HirFileIdRepr, @@ -76,6 +78,13 @@ impl InFileWrapper { pub fn as_ref(&self) -> InFileWrapper { self.with_value(&self.value) } + + pub fn borrow(&self) -> InFileWrapper + where + T: Borrow, + { + self.with_value(self.value.borrow()) + } } impl InFileWrapper { @@ -156,8 +165,13 @@ impl InFileWrapper { } // region:specific impls +impl> InRealFile { + pub fn file_range(&self) -> FileRange { + FileRange { file_id: self.file_id, range: self.value.borrow().text_range() } + } +} -impl InFile<&SyntaxNode> { +impl> InFile { pub fn parent_ancestors_with_macros( self, db: &dyn db::ExpandDatabase, @@ -172,7 +186,7 @@ impl InFile<&SyntaxNode> { .map(|node| node.parent()) .transpose(), }; - std::iter::successors(succ(&self.cloned()), succ) + std::iter::successors(succ(&self.borrow().cloned()), succ) } pub fn ancestors_with_macros( @@ -189,7 +203,15 @@ impl InFile<&SyntaxNode> { .map(|node| node.parent()) .transpose(), }; - std::iter::successors(Some(self.cloned()), succ) + std::iter::successors(Some(self.borrow().cloned()), succ) + } + + pub fn kind(&self) -> parser::SyntaxKind { + self.value.borrow().kind() + } + + pub fn text_range(&self) -> TextRange { + self.value.borrow().text_range() } /// Falls back to the macro call range if the node cannot be mapped up fully. @@ -197,7 +219,7 @@ impl InFile<&SyntaxNode> { /// For attributes and derives, this will point back to the attribute only. /// For the entire item use [`InFile::original_file_range_full`]. pub fn original_file_range_rooted(self, db: &dyn db::ExpandDatabase) -> FileRange { - self.map(SyntaxNode::text_range).original_node_file_range_rooted(db) + self.borrow().map(SyntaxNode::text_range).original_node_file_range_rooted(db) } /// Falls back to the macro call range if the node cannot be mapped up fully. @@ -205,15 +227,7 @@ impl InFile<&SyntaxNode> { self, db: &dyn db::ExpandDatabase, ) -> FileRange { - self.map(SyntaxNode::text_range).original_node_file_range_with_macro_call_body(db) - } - - /// Attempts to map the syntax node back up its macro calls. - pub fn original_file_range_opt( - self, - db: &dyn db::ExpandDatabase, - ) -> Option<(FileRange, SyntaxContextId)> { - self.map(SyntaxNode::text_range).original_node_file_range_opt(db) + self.borrow().map(SyntaxNode::text_range).original_node_file_range_with_macro_call_body(db) } pub fn original_syntax_node_rooted( @@ -224,16 +238,19 @@ impl InFile<&SyntaxNode> { // as we don't have node inputs otherwise and therefore can't find an `N` node in the input let file_id = match self.file_id.repr() { HirFileIdRepr::FileId(file_id) => { - return Some(InRealFile { file_id, value: self.value.clone() }) + return Some(InRealFile { file_id, value: self.value.borrow().clone() }) } HirFileIdRepr::MacroFile(m) if m.is_attr_macro(db) => m, _ => return None, }; - let FileRange { file_id, range } = - map_node_range_up_rooted(db, &db.expansion_span_map(file_id), self.value.text_range())?; + let FileRange { file_id, range } = map_node_range_up_rooted( + db, + &db.expansion_span_map(file_id), + self.value.borrow().text_range(), + )?; - let kind = self.value.kind(); + let kind = self.kind(); let value = db .parse(file_id) .syntax_node() @@ -245,6 +262,16 @@ impl InFile<&SyntaxNode> { } } +impl InFile<&SyntaxNode> { + /// Attempts to map the syntax node back up its macro calls. + pub fn original_file_range_opt( + self, + db: &dyn db::ExpandDatabase, + ) -> Option<(FileRange, SyntaxContextId)> { + self.borrow().map(SyntaxNode::text_range).original_node_file_range_opt(db) + } +} + impl InMacroFile { pub fn upmap_once( self, diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs index be8e9c49a0..81c57f6cae 100644 --- a/crates/hir/src/source_analyzer.rs +++ b/crates/hir/src/source_analyzer.rs @@ -96,10 +96,10 @@ impl SourceAnalyzer { None => scope_for(db, &scopes, &source_map, node), Some(offset) => { debug_assert!( - node.value.text_range().contains_inclusive(offset), + node.text_range().contains_inclusive(offset), "{:?} not in {:?}", offset, - node.value.text_range() + node.text_range() ); scope_for_offset(db, &scopes, &source_map, node.file_id, offset) } @@ -966,9 +966,7 @@ fn scope_for( node: InFile<&SyntaxNode>, ) -> Option { node.ancestors_with_macros(db.upcast()) - .take_while(|it| { - !ast::Item::can_cast(it.value.kind()) || ast::MacroCall::can_cast(it.value.kind()) - }) + .take_while(|it| !ast::Item::can_cast(it.kind()) || ast::MacroCall::can_cast(it.kind())) .filter_map(|it| it.map(ast::Expr::cast).transpose()) .filter_map(|it| source_map.node_expr(it.as_ref())) .find_map(|it| scopes.scope_for(it)) @@ -996,8 +994,8 @@ fn scope_for_offset( Some(it.file_id.macro_file()?.call_node(db.upcast())) }) .find(|it| it.file_id == from_file) - .filter(|it| it.value.kind() == SyntaxKind::MACRO_CALL)?; - Some((source.value.text_range(), scope)) + .filter(|it| it.kind() == SyntaxKind::MACRO_CALL)?; + Some((source.text_range(), scope)) }) .filter(|(expr_range, _scope)| expr_range.start() <= offset && offset <= expr_range.end()) // find containing scope diff --git a/crates/hir/src/term_search/tactics.rs b/crates/hir/src/term_search/tactics.rs index d1fc68d009..1b0e6f8bd5 100644 --- a/crates/hir/src/term_search/tactics.rs +++ b/crates/hir/src/term_search/tactics.rs @@ -598,9 +598,8 @@ pub(super) fn famous_types<'a, DB: HirDatabase>( Expr::FamousType { ty: Type::new(db, module.id, TyBuilder::unit()), value: "()" }, ] .into_iter() - .map(|exprs| { + .inspect(|exprs| { lookup.insert(exprs.ty(db), std::iter::once(exprs.clone())); - exprs }) .filter(|expr| expr.ty(db).could_unify_with_deeply(db, &ctx.goal)) } diff --git a/crates/ide-db/src/rust_doc.rs b/crates/ide-db/src/rust_doc.rs index ab2a250289..eacd9b9b4d 100644 --- a/crates/ide-db/src/rust_doc.rs +++ b/crates/ide-db/src/rust_doc.rs @@ -7,11 +7,7 @@ pub fn is_rust_fence(s: &str) -> bool { let mut seen_rust_tags = false; let mut seen_other_tags = false; - let tokens = s - .trim() - .split(|c| c == ',' || c == ' ' || c == '\t') - .map(str::trim) - .filter(|t| !t.is_empty()); + let tokens = s.trim().split([',', ' ', '\t']).map(str::trim).filter(|t| !t.is_empty()); for token in tokens { match token { diff --git a/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs b/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs index be1e6ed572..a470ce72fc 100644 --- a/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs +++ b/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs @@ -28,10 +28,7 @@ pub(crate) fn trait_impl_redundant_assoc_item( let function = id; ( format!("`fn {redundant_assoc_item_name}`"), - function - .source(db) - .map(|it| it.syntax().value.text_range()) - .unwrap_or(default_range), + function.source(db).map(|it| it.syntax().text_range()).unwrap_or(default_range), format!("\n {};", function.display(db)), ) } @@ -39,10 +36,7 @@ pub(crate) fn trait_impl_redundant_assoc_item( let constant = id; ( format!("`const {redundant_assoc_item_name}`"), - constant - .source(db) - .map(|it| it.syntax().value.text_range()) - .unwrap_or(default_range), + constant.source(db).map(|it| it.syntax().text_range()).unwrap_or(default_range), format!("\n {};", constant.display(db)), ) } @@ -50,10 +44,7 @@ pub(crate) fn trait_impl_redundant_assoc_item( let type_alias = id; ( format!("`type {redundant_assoc_item_name}`"), - type_alias - .source(db) - .map(|it| it.syntax().value.text_range()) - .unwrap_or(default_range), + type_alias.source(db).map(|it| it.syntax().text_range()).unwrap_or(default_range), format!("\n type {};", type_alias.name(ctx.sema.db).to_smol_str()), ) } diff --git a/crates/rust-analyzer/src/cli/analysis_stats.rs b/crates/rust-analyzer/src/cli/analysis_stats.rs index 90b81d0a80..a188adbe35 100644 --- a/crates/rust-analyzer/src/cli/analysis_stats.rs +++ b/crates/rust-analyzer/src/cli/analysis_stats.rs @@ -651,7 +651,7 @@ impl flags::AnalysisStats { if let Some(src) = source { let original_file = src.file_id.original_file(db); let path = vfs.file_path(original_file); - let syntax_range = src.value.text_range(); + let syntax_range = src.text_range(); format!("processing: {} ({} {:?})", full_name(), path, syntax_range) } else { format!("processing: {}", full_name()) @@ -945,7 +945,7 @@ impl flags::AnalysisStats { if let Some(src) = source { let original_file = src.file_id.original_file(db); let path = vfs.file_path(original_file); - let syntax_range = src.value.text_range(); + let syntax_range = src.text_range(); format!("processing: {} ({} {:?})", full_name(), path, syntax_range) } else { format!("processing: {}", full_name()) diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index f719a47248..42ba162e4f 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs @@ -3367,7 +3367,7 @@ mod tests { for idx in url_offsets { let link = &schema[idx..]; // matching on whitespace to ignore normal links - if let Some(link_end) = link.find(|c| c == ' ' || c == '[') { + if let Some(link_end) = link.find([' ', '[']) { if link.chars().nth(link_end) == Some('[') { if let Some(link_text_end) = link.find(']') { let link_text = link[link_end..(link_text_end + 1)].to_string(); diff --git a/crates/rust-analyzer/tests/slow-tests/support.rs b/crates/rust-analyzer/tests/slow-tests/support.rs index c438325532..66100971fb 100644 --- a/crates/rust-analyzer/tests/slow-tests/support.rs +++ b/crates/rust-analyzer/tests/slow-tests/support.rs @@ -388,9 +388,8 @@ impl Server { } fn recv(&self) -> Result, Timeout> { let msg = recv_timeout(&self.client.receiver)?; - let msg = msg.map(|msg| { + let msg = msg.inspect(|msg| { self.messages.borrow_mut().push(msg.clone()); - msg }); Ok(msg) } diff --git a/crates/rust-analyzer/tests/slow-tests/tidy.rs b/crates/rust-analyzer/tests/slow-tests/tidy.rs index 7dd6382cfa..8cd5cbf1c7 100644 --- a/crates/rust-analyzer/tests/slow-tests/tidy.rs +++ b/crates/rust-analyzer/tests/slow-tests/tidy.rs @@ -155,7 +155,7 @@ Zlib OR Apache-2.0 OR MIT let meta = cmd!(sh, "cargo metadata --format-version 1").read().unwrap(); let mut licenses = meta - .split(|c| c == ',' || c == '{' || c == '}') + .split([',', '{', '}']) .filter(|it| it.contains(r#""license""#)) .map(|it| it.trim()) .map(|it| it[r#""license":"#.len()..].trim_matches('"')) diff --git a/crates/test-utils/src/lib.rs b/crates/test-utils/src/lib.rs index 43f62d0d1e..53d4a28bc3 100644 --- a/crates/test-utils/src/lib.rs +++ b/crates/test-utils/src/lib.rs @@ -305,7 +305,7 @@ fn extract_line_annotations(mut line: &str) -> Vec { } let range = TextRange::at(offset, len.try_into().unwrap()); let line_no_caret = &line[len..]; - let end_marker = line_no_caret.find(|c| c == '$'); + let end_marker = line_no_caret.find('$'); let next = line_no_caret.find(marker).map_or(line.len(), |it| it + len); let cond = |end_marker| { diff --git a/xtask/src/codegen/diagnostics_docs.rs b/xtask/src/codegen/diagnostics_docs.rs index cf30531e7f..dcc9c76a50 100644 --- a/xtask/src/codegen/diagnostics_docs.rs +++ b/xtask/src/codegen/diagnostics_docs.rs @@ -63,7 +63,7 @@ fn is_valid_diagnostic_name(diagnostic: &str) -> Result<(), String> { if diagnostic.chars().any(|c| c.is_ascii_uppercase()) { return Err("Diagnostic names can't contain uppercase symbols".into()); } - if diagnostic.chars().any(|c| !c.is_ascii()) { + if !diagnostic.is_ascii() { return Err("Diagnostic can't contain non-ASCII symbols".into()); } diff --git a/xtask/src/release.rs b/xtask/src/release.rs index 5699053a23..b936876b52 100644 --- a/xtask/src/release.rs +++ b/xtask/src/release.rs @@ -119,12 +119,11 @@ impl flags::RustcPull { // Fetch given rustc commit. cmd!(sh, "git fetch http://localhost:{JOSH_PORT}/rust-lang/rust.git@{commit}{JOSH_FILTER}.git") .run() - .map_err(|e| { + .inspect_err(|_| { // Try to un-do the previous `git commit`, to leave the repo in the state we found it it. cmd!(sh, "git reset --hard HEAD^") .run() .expect("FAILED to clean up again after failed `git fetch`, sorry for that"); - e }) .context("FAILED to fetch new commits, something went wrong (committing the rust-version file has been undone)")?; From c19458270da94f1a77f7b52050da4e1f9f2cd37a Mon Sep 17 00:00:00 2001 From: Tavo Annus Date: Sat, 29 Jun 2024 21:22:26 +0300 Subject: [PATCH 33/94] Do not explicit generics to generated expressions --- crates/hir/src/term_search/expr.rs | 78 ++----------------- .../ide-assists/src/handlers/term_search.rs | 8 +- crates/ide-completion/src/render.rs | 2 +- 3 files changed, 13 insertions(+), 75 deletions(-) diff --git a/crates/hir/src/term_search/expr.rs b/crates/hir/src/term_search/expr.rs index a6faa97196..0c8f6932c7 100644 --- a/crates/hir/src/term_search/expr.rs +++ b/crates/hir/src/term_search/expr.rs @@ -9,8 +9,8 @@ use hir_ty::{ use itertools::Itertools; use crate::{ - Adt, AsAssocItem, AssocItemContainer, Const, ConstParam, Field, Function, GenericDef, Local, - ModuleDef, SemanticsScope, Static, Struct, StructKind, Trait, Type, Variant, + Adt, AsAssocItem, AssocItemContainer, Const, ConstParam, Field, Function, Local, ModuleDef, + SemanticsScope, Static, Struct, StructKind, Trait, Type, Variant, }; /// Helper function to get path to `ModuleDef` @@ -35,43 +35,6 @@ fn mod_item_path_str( .ok_or(DisplaySourceCodeError::PathNotFound) } -/// Helper function to get path to `Type` -fn type_path( - sema_scope: &SemanticsScope<'_>, - ty: &Type, - cfg: ImportPathConfig, -) -> Result { - let db = sema_scope.db; - let m = sema_scope.module(); - - match ty.as_adt() { - Some(adt) => { - let ty_name = ty.display_source_code(db, m.id, true)?; - - let mut path = mod_item_path(sema_scope, &ModuleDef::Adt(adt), cfg).unwrap(); - path.pop_segment(); - let path = path.display(db.upcast()).to_string(); - let res = match path.is_empty() { - true => ty_name, - false => format!("{path}::{ty_name}"), - }; - Ok(res) - } - None => ty.display_source_code(db, m.id, true), - } -} - -/// Helper function to filter out generic parameters that are default -fn non_default_generics(db: &dyn HirDatabase, def: GenericDef, generics: &[Type]) -> Vec { - def.type_or_const_params(db) - .into_iter() - .filter_map(|it| it.as_type_param(db)) - .zip(generics) - .filter(|(tp, arg)| tp.default(db).as_ref() != Some(arg)) - .map(|(_, arg)| arg.clone()) - .collect() -} - /// Type tree shows how can we get from set of types to some type. /// /// Consider the following code as an example @@ -208,20 +171,7 @@ impl Expr { None => Ok(format!("{target_str}.{func_name}({args})")), } } - Expr::Variant { variant, generics, params } => { - let generics = non_default_generics(db, variant.parent_enum(db).into(), generics); - let generics_str = match generics.is_empty() { - true => String::new(), - false => { - let generics = generics - .iter() - .map(|it| type_path(sema_scope, it, cfg)) - .collect::, DisplaySourceCodeError>>()? - .into_iter() - .join(", "); - format!("::<{generics}>") - } - }; + Expr::Variant { variant, params, .. } => { let inner = match variant.kind(db) { StructKind::Tuple => { let args = params @@ -230,7 +180,7 @@ impl Expr { .collect::, DisplaySourceCodeError>>()? .into_iter() .join(", "); - format!("{generics_str}({args})") + format!("({args})") } StructKind::Record => { let fields = variant.fields(db); @@ -248,16 +198,15 @@ impl Expr { .collect::, DisplaySourceCodeError>>()? .into_iter() .join(", "); - format!("{generics_str}{{ {args} }}") + format!("{{ {args} }}") } - StructKind::Unit => generics_str, + StructKind::Unit => String::new(), }; let prefix = mod_item_path_str(sema_scope, &ModuleDef::Variant(*variant))?; Ok(format!("{prefix}{inner}")) } - Expr::Struct { strukt, generics, params } => { - let generics = non_default_generics(db, (*strukt).into(), generics); + Expr::Struct { strukt, params, .. } => { let inner = match strukt.kind(db) { StructKind::Tuple => { let args = params @@ -286,18 +235,7 @@ impl Expr { .join(", "); format!(" {{ {args} }}") } - StructKind::Unit => match generics.is_empty() { - true => String::new(), - false => { - let generics = generics - .iter() - .map(|it| type_path(sema_scope, it, cfg)) - .collect::, DisplaySourceCodeError>>()? - .into_iter() - .join(", "); - format!("::<{generics}>") - } - }, + StructKind::Unit => String::new(), }; let prefix = mod_item_path_str(sema_scope, &ModuleDef::Adt(Adt::Struct(*strukt)))?; diff --git a/crates/ide-assists/src/handlers/term_search.rs b/crates/ide-assists/src/handlers/term_search.rs index 148b951a4b..874b42f51b 100644 --- a/crates/ide-assists/src/handlers/term_search.rs +++ b/crates/ide-assists/src/handlers/term_search.rs @@ -148,7 +148,7 @@ fn f() { let a = A { x: 1, y: true }; let b: i32 = a.x; }"#, term_search, r#"//- minicore: todo, unimplemented, option fn f() { let a: i32 = 1; let b: Option = todo$0!(); }"#, - r#"fn f() { let a: i32 = 1; let b: Option = Some::(a); }"#, + r#"fn f() { let a: i32 = 1; let b: Option = Some(a); }"#, ) } @@ -160,7 +160,7 @@ fn f() { let a: i32 = 1; let b: Option = todo$0!(); }"#, enum Option { None, Some(T) } fn f() { let a: i32 = 1; let b: Option = todo$0!(); }"#, r#"enum Option { None, Some(T) } -fn f() { let a: i32 = 1; let b: Option = Option::Some::(a); }"#, +fn f() { let a: i32 = 1; let b: Option = Option::Some(a); }"#, ) } @@ -172,7 +172,7 @@ fn f() { let a: i32 = 1; let b: Option = Option::Some::(a); }"#, enum Option { None, Some(T) } fn f() { let a: Option = Option::None; let b: Option> = todo$0!(); }"#, r#"enum Option { None, Some(T) } -fn f() { let a: Option = Option::None; let b: Option> = Option::Some::>(a); }"#, +fn f() { let a: Option = Option::None; let b: Option> = Option::Some(a); }"#, ) } @@ -184,7 +184,7 @@ fn f() { let a: Option = Option::None; let b: Option> = Option: enum Foo { Foo(T) } fn f() { let a = 0; let b: Foo = todo$0!(); }"#, r#"enum Foo { Foo(T) } -fn f() { let a = 0; let b: Foo = Foo::Foo::(a); }"#, +fn f() { let a = 0; let b: Foo = Foo::Foo(a); }"#, ); check_assist( diff --git a/crates/ide-completion/src/render.rs b/crates/ide-completion/src/render.rs index ff60a7669e..b98f745f17 100644 --- a/crates/ide-completion/src/render.rs +++ b/crates/ide-completion/src/render.rs @@ -2641,7 +2641,7 @@ fn foo() { expect![[r#" lc foo [type+local] ex foo [type] - ex Foo::B:: [type] + ex Foo::B [type] ev Foo::A(…) [type_could_unify] ev Foo::B [type_could_unify] en Foo [type_could_unify] From db15273d4d012864847e63fbc9251e09f4e3e8c4 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sun, 30 Jun 2024 15:05:35 +0200 Subject: [PATCH 34/94] Move dylib version stuff to proc-macro-srv --- Cargo.lock | 4 +--- Cargo.toml | 4 ++++ crates/proc-macro-api/Cargo.toml | 5 +---- crates/proc-macro-api/src/lib.rs | 3 --- crates/proc-macro-srv/Cargo.toml | 5 +++-- crates/proc-macro-srv/src/dylib.rs | 13 ++++++++---- .../src/dylib}/version.rs | 14 +++++++++++++ crates/proc-macro-srv/src/proc_macros.rs | 21 ++++--------------- 8 files changed, 36 insertions(+), 33 deletions(-) rename crates/{proc-macro-api/src => proc-macro-srv/src/dylib}/version.rs (92%) diff --git a/Cargo.lock b/Cargo.lock index 94734da9d3..790bccea19 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1329,13 +1329,10 @@ dependencies = [ "base-db", "indexmap", "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", - "memmap2", - "object 0.33.0", "paths", "rustc-hash", "serde", "serde_json", - "snap", "span", "stdx", "text-size", @@ -1358,6 +1355,7 @@ dependencies = [ "proc-macro-api", "proc-macro-test", "ra-ap-rustc_lexer", + "snap", "span", "stdx", "tt", diff --git a/Cargo.toml b/Cargo.toml index 18c9bb5ce0..d77804a418 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -120,6 +120,8 @@ hashbrown = { version = "0.14", features = [ indexmap = "2.1.0" itertools = "0.12.0" libc = "0.2.150" +libloading = "0.8.0" +memmap2 = "0.5.4" nohash-hasher = "0.2.0" oorandom = "11.1.3" object = { version = "0.33.0", default-features = false, features = [ @@ -143,6 +145,7 @@ smallvec = { version = "1.10.0", features = [ "const_generics", ] } smol_str = "0.2.1" +snap = "1.1.0" text-size = "1.1.1" tracing = "0.1.40" tracing-tree = "0.3.0" @@ -156,6 +159,7 @@ url = "2.3.1" xshell = "0.2.5" + # We need to freeze the version of the crate, as the raw-api feature is considered unstable dashmap = { version = "=5.5.3", features = ["raw-api"] } diff --git a/crates/proc-macro-api/Cargo.toml b/crates/proc-macro-api/Cargo.toml index f30f6a0f23..11fd076a6f 100644 --- a/crates/proc-macro-api/Cargo.toml +++ b/crates/proc-macro-api/Cargo.toml @@ -12,15 +12,12 @@ rust-version.workspace = true doctest = false [dependencies] -object.workspace = true serde.workspace = true serde_json = { workspace = true, features = ["unbounded_depth"] } tracing.workspace = true triomphe.workspace = true rustc-hash.workspace = true -memmap2 = "0.5.4" -snap = "1.1.0" -indexmap = "2.1.0" +indexmap.workspace = true # local deps paths = { workspace = true, features = ["serde1"] } diff --git a/crates/proc-macro-api/src/lib.rs b/crates/proc-macro-api/src/lib.rs index 8970bdd012..0de76689ab 100644 --- a/crates/proc-macro-api/src/lib.rs +++ b/crates/proc-macro-api/src/lib.rs @@ -9,7 +9,6 @@ pub mod msg; mod process; -mod version; use base_db::Env; use indexmap::IndexSet; @@ -31,8 +30,6 @@ use crate::{ process::ProcMacroProcessSrv, }; -pub use version::{read_dylib_info, read_version, RustCInfo}; - #[derive(Copy, Clone, Eq, PartialEq, Debug, Serialize, Deserialize)] pub enum ProcMacroKind { CustomDerive, diff --git a/crates/proc-macro-srv/Cargo.toml b/crates/proc-macro-srv/Cargo.toml index f8db1c6a30..735f781c43 100644 --- a/crates/proc-macro-srv/Cargo.toml +++ b/crates/proc-macro-srv/Cargo.toml @@ -13,8 +13,9 @@ doctest = false [dependencies] object.workspace = true -libloading = "0.8.0" -memmap2 = "0.5.4" +libloading.workspace = true +memmap2.workspace = true +snap.workspace = true stdx.workspace = true tt.workspace = true diff --git a/crates/proc-macro-srv/src/dylib.rs b/crates/proc-macro-srv/src/dylib.rs index 22c34ff167..474f00a574 100644 --- a/crates/proc-macro-srv/src/dylib.rs +++ b/crates/proc-macro-srv/src/dylib.rs @@ -1,5 +1,7 @@ //! Handles dynamic library loading for proc macro +mod version; + use std::{fmt, fs::File, io}; use libloading::Library; @@ -7,7 +9,7 @@ use memmap2::Mmap; use object::Object; use paths::{AbsPath, Utf8Path, Utf8PathBuf}; use proc_macro::bridge; -use proc_macro_api::{read_dylib_info, ProcMacroKind}; +use proc_macro_api::ProcMacroKind; use crate::ProcMacroSrvSpan; @@ -119,11 +121,14 @@ impl ProcMacroLibraryLibloading { let abs_file: &AbsPath = file .try_into() .map_err(|_| invalid_data_err(format!("expected an absolute path, got {file}")))?; - let version_info = read_dylib_info(abs_file)?; + let version_info = version::read_dylib_info(abs_file)?; let lib = load_library(file).map_err(invalid_data_err)?; - let proc_macros = - crate::proc_macros::ProcMacros::from_lib(&lib, symbol_name, version_info)?; + let proc_macros = crate::proc_macros::ProcMacros::from_lib( + &lib, + symbol_name, + &version_info.version_string, + )?; Ok(ProcMacroLibraryLibloading { _lib: lib, proc_macros }) } } diff --git a/crates/proc-macro-api/src/version.rs b/crates/proc-macro-srv/src/dylib/version.rs similarity index 92% rename from crates/proc-macro-api/src/version.rs rename to crates/proc-macro-srv/src/dylib/version.rs index 09b8125071..1f7ef7914b 100644 --- a/crates/proc-macro-api/src/version.rs +++ b/crates/proc-macro-srv/src/dylib/version.rs @@ -11,6 +11,7 @@ use paths::AbsPath; use snap::read::FrameDecoder as SnapDecoder; #[derive(Debug)] +#[allow(dead_code)] pub struct RustCInfo { pub version: (usize, usize, usize), pub channel: String, @@ -164,3 +165,16 @@ pub fn read_version(dylib_path: &AbsPath) -> io::Result { let version_string = String::from_utf8(version_string_utf8); version_string.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e)) } + +#[test] +fn test_version_check() { + let path = paths::AbsPathBuf::assert(crate::proc_macro_test_dylib_path()); + let info = read_dylib_info(&path).unwrap(); + assert_eq!( + info.version_string, + crate::RUSTC_VERSION_STRING, + "sysroot ABI mismatch: dylib rustc version (read from .rustc section): {:?} != proc-macro-srv version (read from 'rustc --version'): {:?}", + info.version_string, + crate::RUSTC_VERSION_STRING, + ); +} diff --git a/crates/proc-macro-srv/src/proc_macros.rs b/crates/proc-macro-srv/src/proc_macros.rs index 631fd84aa2..b963a8a029 100644 --- a/crates/proc-macro-srv/src/proc_macros.rs +++ b/crates/proc-macro-srv/src/proc_macros.rs @@ -2,7 +2,7 @@ use libloading::Library; use proc_macro::bridge; -use proc_macro_api::{ProcMacroKind, RustCInfo}; +use proc_macro_api::ProcMacroKind; use crate::{dylib::LoadProcMacroDylibError, ProcMacroSrvSpan}; @@ -29,15 +29,15 @@ impl ProcMacros { pub(crate) fn from_lib( lib: &Library, symbol_name: String, - info: RustCInfo, + version_string: &str, ) -> Result { - if info.version_string == crate::RUSTC_VERSION_STRING { + if version_string == crate::RUSTC_VERSION_STRING { let macros = unsafe { lib.get::<&&[bridge::client::ProcMacro]>(symbol_name.as_bytes()) }?; return Ok(Self { exported_macros: macros.to_vec() }); } - Err(LoadProcMacroDylibError::AbiMismatch(info.version_string)) + Err(LoadProcMacroDylibError::AbiMismatch(version_string.to_owned())) } pub(crate) fn expand( @@ -117,16 +117,3 @@ impl ProcMacros { .collect() } } - -#[test] -fn test_version_check() { - let path = paths::AbsPathBuf::assert(crate::proc_macro_test_dylib_path()); - let info = proc_macro_api::read_dylib_info(&path).unwrap(); - assert_eq!( - info.version_string, - crate::RUSTC_VERSION_STRING, - "sysroot ABI mismatch: dylib rustc version (read from .rustc section): {:?} != proc-macro-srv version (read from 'rustc --version'): {:?}", - info.version_string, - crate::RUSTC_VERSION_STRING, - ); -} From e92646962a4580799796608f02e4c410b489cb7e Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sun, 30 Jun 2024 15:22:39 +0200 Subject: [PATCH 35/94] Move interior mutability into ProcMacroSrvProcess --- crates/proc-macro-api/src/lib.rs | 37 ++++++++-------------------- crates/proc-macro-api/src/process.rs | 37 ++++++++++++++++------------ 2 files changed, 31 insertions(+), 43 deletions(-) diff --git a/crates/proc-macro-api/src/lib.rs b/crates/proc-macro-api/src/lib.rs index 0de76689ab..c64ec77487 100644 --- a/crates/proc-macro-api/src/lib.rs +++ b/crates/proc-macro-api/src/lib.rs @@ -15,10 +15,8 @@ use indexmap::IndexSet; use paths::{AbsPath, AbsPathBuf}; use rustc_hash::FxHashMap; use span::Span; -use std::{ - fmt, io, - sync::{Arc, Mutex}, -}; +use std::{fmt, io, sync::Arc}; +use tt::SmolStr; use serde::{Deserialize, Serialize}; @@ -48,9 +46,7 @@ pub struct ProcMacroServer { /// /// That means that concurrent salsa requests may block each other when expanding proc macros, /// which is unfortunate, but simple and good enough for the time being. - /// - /// Therefore, we just wrap the `ProcMacroProcessSrv` in a mutex here. - process: Arc>, + process: Arc, path: AbsPathBuf, } @@ -70,9 +66,9 @@ impl MacroDylib { /// we share a single expander process for all macros. #[derive(Debug, Clone)] pub struct ProcMacro { - process: Arc>, + process: Arc, dylib_path: AbsPathBuf, - name: String, + name: SmolStr, kind: ProcMacroKind, } @@ -89,7 +85,6 @@ impl PartialEq for ProcMacro { #[derive(Clone, Debug)] pub struct ServerError { pub message: String, - // io::Error isn't Clone for some reason pub io: Option>, } @@ -104,10 +99,6 @@ impl fmt::Display for ServerError { } } -pub struct MacroPanic { - pub message: String, -} - impl ProcMacroServer { /// Spawns an external process as the proc macro server and returns a client connected to it. pub fn spawn( @@ -115,10 +106,7 @@ impl ProcMacroServer { env: &FxHashMap, ) -> io::Result { let process = ProcMacroProcessSrv::run(process_path, env)?; - Ok(ProcMacroServer { - process: Arc::new(Mutex::new(process)), - path: process_path.to_owned(), - }) + Ok(ProcMacroServer { process: Arc::new(process), path: process_path.to_owned() }) } pub fn path(&self) -> &AbsPath { @@ -127,15 +115,14 @@ impl ProcMacroServer { pub fn load_dylib(&self, dylib: MacroDylib) -> Result, ServerError> { let _p = tracing::info_span!("ProcMacroServer::load_dylib").entered(); - let macros = - self.process.lock().unwrap_or_else(|e| e.into_inner()).find_proc_macros(&dylib.path)?; + let macros = self.process.find_proc_macros(&dylib.path)?; match macros { Ok(macros) => Ok(macros .into_iter() .map(|(name, kind)| ProcMacro { process: self.process.clone(), - name, + name: name.into(), kind, dylib_path: dylib.path.clone(), }) @@ -163,7 +150,7 @@ impl ProcMacro { call_site: Span, mixed_site: Span, ) -> Result, PanicMessage>, ServerError> { - let version = self.process.lock().unwrap_or_else(|e| e.into_inner()).version(); + let version = self.process.version(); let current_dir = env.get("CARGO_MANIFEST_DIR"); let mut span_data_table = IndexSet::default(); @@ -190,11 +177,7 @@ impl ProcMacro { }, }; - let response = self - .process - .lock() - .unwrap_or_else(|e| e.into_inner()) - .send_task(msg::Request::ExpandMacro(Box::new(task)))?; + let response = self.process.send_task(msg::Request::ExpandMacro(Box::new(task)))?; match response { msg::Response::ExpandMacro(it) => { diff --git a/crates/proc-macro-api/src/process.rs b/crates/proc-macro-api/src/process.rs index 718a96dc80..1c37082bb2 100644 --- a/crates/proc-macro-api/src/process.rs +++ b/crates/proc-macro-api/src/process.rs @@ -3,7 +3,7 @@ use std::{ io::{self, BufRead, BufReader, Read, Write}, process::{Child, ChildStdin, ChildStdout, Command, Stdio}, - sync::Arc, + sync::{Arc, Mutex}, }; use paths::AbsPath; @@ -17,13 +17,20 @@ use crate::{ #[derive(Debug)] pub(crate) struct ProcMacroProcessSrv { + /// The state of the proc-macro server process, the protocol is currently strictly sequential + /// hence the lock on the state. + state: Mutex, + version: u32, + mode: SpanMode, +} + +#[derive(Debug)] +struct ProcessSrvState { process: Process, stdin: ChildStdin, stdout: BufReader, /// Populated when the server exits. server_exited: Option, - version: u32, - mode: SpanMode, } impl ProcMacroProcessSrv { @@ -36,10 +43,7 @@ impl ProcMacroProcessSrv { let (stdin, stdout) = process.stdio().expect("couldn't access child stdio"); io::Result::Ok(ProcMacroProcessSrv { - process, - stdin, - stdout, - server_exited: None, + state: Mutex::new(ProcessSrvState { process, stdin, stdout, server_exited: None }), version: 0, mode: SpanMode::Id, }) @@ -76,7 +80,7 @@ impl ProcMacroProcessSrv { self.version } - pub(crate) fn version_check(&mut self) -> Result { + fn version_check(&self) -> Result { let request = Request::ApiVersionCheck {}; let response = self.send_task(request)?; @@ -86,7 +90,7 @@ impl ProcMacroProcessSrv { } } - fn enable_rust_analyzer_spans(&mut self) -> Result { + fn enable_rust_analyzer_spans(&self) -> Result { let request = Request::SetConfig(crate::msg::ServerConfig { span_mode: crate::msg::SpanMode::RustAnalyzer, }); @@ -99,7 +103,7 @@ impl ProcMacroProcessSrv { } pub(crate) fn find_proc_macros( - &mut self, + &self, dylib_path: &AbsPath, ) -> Result, String>, ServerError> { let request = Request::ListMacros { dylib_path: dylib_path.to_path_buf().into() }; @@ -112,20 +116,21 @@ impl ProcMacroProcessSrv { } } - pub(crate) fn send_task(&mut self, req: Request) -> Result { - if let Some(server_error) = &self.server_exited { + pub(crate) fn send_task(&self, req: Request) -> Result { + let state = &mut *self.state.lock().unwrap(); + if let Some(server_error) = &state.server_exited { return Err(server_error.clone()); } let mut buf = String::new(); - send_request(&mut self.stdin, &mut self.stdout, req, &mut buf).map_err(|e| { + send_request(&mut state.stdin, &mut state.stdout, req, &mut buf).map_err(|e| { if e.io.as_ref().map(|it| it.kind()) == Some(io::ErrorKind::BrokenPipe) { - match self.process.child.try_wait() { + match state.process.child.try_wait() { Ok(None) => e, Ok(Some(status)) => { let mut msg = String::new(); if !status.success() { - if let Some(stderr) = self.process.child.stderr.as_mut() { + if let Some(stderr) = state.process.child.stderr.as_mut() { _ = stderr.read_to_string(&mut msg); } } @@ -133,7 +138,7 @@ impl ProcMacroProcessSrv { message: format!("server exited with {status}: {msg}"), io: None, }; - self.server_exited = Some(server_error.clone()); + state.server_exited = Some(server_error.clone()); server_error } Err(_) => e, From 21a3d01875a71872ff4f1ae7e9b2ef2e14f8eec2 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sun, 30 Jun 2024 15:23:54 +0200 Subject: [PATCH 36/94] Remove inline `rust_2018_idioms, unused_lifetimes` lint warn, Cargo.toml already enforces this --- crates/base-db/src/lib.rs | 2 -- crates/cfg/src/lib.rs | 2 -- crates/flycheck/src/lib.rs | 2 -- crates/hir-def/src/lib.rs | 1 - crates/hir-expand/src/lib.rs | 1 - crates/hir-ty/src/lib.rs | 2 +- crates/hir/src/lib.rs | 1 - crates/ide-assists/src/lib.rs | 2 -- crates/ide-completion/src/lib.rs | 2 -- crates/ide-db/src/lib.rs | 2 -- crates/ide-diagnostics/src/lib.rs | 2 -- crates/ide-ssr/src/lib.rs | 2 -- crates/ide/src/lib.rs | 2 +- crates/limit/src/lib.rs | 2 -- crates/mbe/src/lib.rs | 2 -- crates/parser/src/lib.rs | 1 - crates/paths/src/lib.rs | 2 -- crates/proc-macro-api/src/lib.rs | 2 -- crates/proc-macro-srv/proc-macro-test/imp/src/lib.rs | 2 +- crates/proc-macro-srv/proc-macro-test/src/lib.rs | 2 -- crates/proc-macro-srv/src/lib.rs | 1 - crates/profile/src/lib.rs | 2 -- crates/project-model/src/lib.rs | 2 -- crates/rust-analyzer/src/bin/main.rs | 1 - crates/rust-analyzer/src/lib.rs | 2 -- crates/rust-analyzer/tests/slow-tests/main.rs | 1 - crates/sourcegen/src/lib.rs | 2 -- crates/stdx/src/lib.rs | 2 -- crates/syntax/src/lib.rs | 1 - crates/test-utils/src/lib.rs | 1 - crates/text-edit/src/lib.rs | 2 -- crates/toolchain/src/lib.rs | 2 -- crates/tt/src/lib.rs | 2 -- crates/vfs-notify/src/lib.rs | 2 -- crates/vfs/src/lib.rs | 2 -- 35 files changed, 3 insertions(+), 58 deletions(-) diff --git a/crates/base-db/src/lib.rs b/crates/base-db/src/lib.rs index f5165ea8a7..96fbbc317d 100644 --- a/crates/base-db/src/lib.rs +++ b/crates/base-db/src/lib.rs @@ -1,7 +1,5 @@ //! base_db defines basic database traits. The concrete DB is defined by ide. -#![warn(rust_2018_idioms, unused_lifetimes)] - mod change; mod input; diff --git a/crates/cfg/src/lib.rs b/crates/cfg/src/lib.rs index 9a365889e6..8b30286a0a 100644 --- a/crates/cfg/src/lib.rs +++ b/crates/cfg/src/lib.rs @@ -1,7 +1,5 @@ //! cfg defines conditional compiling options, `cfg` attribute parser and evaluator -#![warn(rust_2018_idioms, unused_lifetimes)] - mod cfg_expr; mod dnf; #[cfg(test)] diff --git a/crates/flycheck/src/lib.rs b/crates/flycheck/src/lib.rs index 4584400e66..a257633516 100644 --- a/crates/flycheck/src/lib.rs +++ b/crates/flycheck/src/lib.rs @@ -6,8 +6,6 @@ // addition to `cargo check`. Either split it into 3 crates (one for test, one for check // and one common utilities) or change its name and docs to reflect the current state. -#![warn(rust_2018_idioms, unused_lifetimes)] - use std::{fmt, io, process::Command, time::Duration}; use crossbeam_channel::{never, select, unbounded, Receiver, Sender}; diff --git a/crates/hir-def/src/lib.rs b/crates/hir-def/src/lib.rs index 3fa98efbb5..211cdd35fd 100644 --- a/crates/hir-def/src/lib.rs +++ b/crates/hir-def/src/lib.rs @@ -7,7 +7,6 @@ //! Note that `hir_def` is a work in progress, so not all of the above is //! actually true. -#![warn(rust_2018_idioms, unused_lifetimes)] #![cfg_attr(feature = "in-rust-tree", feature(rustc_private))] #[cfg(feature = "in-rust-tree")] diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs index a7150cf308..e7c34e51e8 100644 --- a/crates/hir-expand/src/lib.rs +++ b/crates/hir-expand/src/lib.rs @@ -4,7 +4,6 @@ //! tree originates not from the text of some `FileId`, but from some macro //! expansion. #![cfg_attr(feature = "in-rust-tree", feature(rustc_private))] -#![warn(rust_2018_idioms, unused_lifetimes)] pub mod attrs; pub mod builtin_attr_macro; diff --git a/crates/hir-ty/src/lib.rs b/crates/hir-ty/src/lib.rs index 07c0cf920f..bd650869bb 100644 --- a/crates/hir-ty/src/lib.rs +++ b/crates/hir-ty/src/lib.rs @@ -1,6 +1,6 @@ //! The type system. We currently use this to infer types for completion, hover //! information and various assists. -#![warn(rust_2018_idioms, unused_lifetimes)] + #![cfg_attr(feature = "in-rust-tree", feature(rustc_private))] #[cfg(feature = "in-rust-tree")] diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index eea25ac18a..a9ae8d5649 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -17,7 +17,6 @@ //! from the ide with completions, hovers, etc. It is a (soft, internal) boundary: //! . -#![warn(rust_2018_idioms, unused_lifetimes)] #![cfg_attr(feature = "in-rust-tree", feature(rustc_private))] #![recursion_limit = "512"] diff --git a/crates/ide-assists/src/lib.rs b/crates/ide-assists/src/lib.rs index abebdec1d1..685d230dc6 100644 --- a/crates/ide-assists/src/lib.rs +++ b/crates/ide-assists/src/lib.rs @@ -58,8 +58,6 @@ //! See also this post: //! -#![warn(rust_2018_idioms, unused_lifetimes)] - mod assist_config; mod assist_context; #[cfg(test)] diff --git a/crates/ide-completion/src/lib.rs b/crates/ide-completion/src/lib.rs index 8323b8f933..7150fe3f01 100644 --- a/crates/ide-completion/src/lib.rs +++ b/crates/ide-completion/src/lib.rs @@ -1,7 +1,5 @@ //! `completions` crate provides utilities for generating completions of user input. -#![warn(rust_2018_idioms, unused_lifetimes)] - mod completions; mod config; mod context; diff --git a/crates/ide-db/src/lib.rs b/crates/ide-db/src/lib.rs index 357209ceb0..8fac5baa57 100644 --- a/crates/ide-db/src/lib.rs +++ b/crates/ide-db/src/lib.rs @@ -2,8 +2,6 @@ //! //! It is mainly a `HirDatabase` for semantic analysis, plus a `SymbolsDatabase`, for fuzzy search. -#![warn(rust_2018_idioms, unused_lifetimes)] - mod apply_change; pub mod active_parameter; diff --git a/crates/ide-diagnostics/src/lib.rs b/crates/ide-diagnostics/src/lib.rs index a419f04bfa..c696faf270 100644 --- a/crates/ide-diagnostics/src/lib.rs +++ b/crates/ide-diagnostics/src/lib.rs @@ -23,8 +23,6 @@ //! There are also a couple of ad-hoc diagnostics implemented directly here, we //! don't yet have a great pattern for how to do them properly. -#![warn(rust_2018_idioms, unused_lifetimes)] - mod handlers { pub(crate) mod break_outside_of_loop; pub(crate) mod expected_function; diff --git a/crates/ide-ssr/src/lib.rs b/crates/ide-ssr/src/lib.rs index b5bf510aee..407433ed19 100644 --- a/crates/ide-ssr/src/lib.rs +++ b/crates/ide-ssr/src/lib.rs @@ -3,8 +3,6 @@ //! Allows searching the AST for code that matches one or more patterns and then replacing that code //! based on a template. -#![warn(rust_2018_idioms, unused_lifetimes)] - // Feature: Structural Search and Replace // // Search and replace with named wildcards that will match any expression, type, path, pattern or item. diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs index a2ac62341d..28ae0dce98 100644 --- a/crates/ide/src/lib.rs +++ b/crates/ide/src/lib.rs @@ -8,7 +8,7 @@ //! in this crate. // For proving that RootDatabase is RefUnwindSafe. -#![warn(rust_2018_idioms, unused_lifetimes)] + #![cfg_attr(feature = "in-rust-tree", feature(rustc_private))] #![recursion_limit = "128"] diff --git a/crates/limit/src/lib.rs b/crates/limit/src/lib.rs index 27471db6a3..c1caeed2f8 100644 --- a/crates/limit/src/lib.rs +++ b/crates/limit/src/lib.rs @@ -1,7 +1,5 @@ //! limit defines a struct to enforce limits. -#![warn(rust_2018_idioms, unused_lifetimes)] - #[cfg(feature = "tracking")] use std::sync::atomic::AtomicUsize; diff --git a/crates/mbe/src/lib.rs b/crates/mbe/src/lib.rs index 5f0598dfe0..b2fd91f6ce 100644 --- a/crates/mbe/src/lib.rs +++ b/crates/mbe/src/lib.rs @@ -6,8 +6,6 @@ //! The tests for this functionality live in another crate: //! `hir_def::macro_expansion_tests::mbe`. -#![warn(rust_2018_idioms, unused_lifetimes)] - mod expander; mod parser; mod syntax_bridge; diff --git a/crates/parser/src/lib.rs b/crates/parser/src/lib.rs index c2f9ea4772..738ed239a7 100644 --- a/crates/parser/src/lib.rs +++ b/crates/parser/src/lib.rs @@ -17,7 +17,6 @@ //! //! [`Parser`]: crate::parser::Parser -#![warn(rust_2018_idioms, unused_lifetimes)] #![allow(rustdoc::private_intra_doc_links)] #![cfg_attr(feature = "in-rust-tree", feature(rustc_private))] diff --git a/crates/paths/src/lib.rs b/crates/paths/src/lib.rs index 33c3f83db5..885f071889 100644 --- a/crates/paths/src/lib.rs +++ b/crates/paths/src/lib.rs @@ -1,8 +1,6 @@ //! Thin wrappers around `std::path`/`camino::path`, distinguishing between absolute and //! relative paths. -#![warn(rust_2018_idioms, unused_lifetimes)] - use std::{ borrow::Borrow, ffi::OsStr, diff --git a/crates/proc-macro-api/src/lib.rs b/crates/proc-macro-api/src/lib.rs index c64ec77487..039509d87c 100644 --- a/crates/proc-macro-api/src/lib.rs +++ b/crates/proc-macro-api/src/lib.rs @@ -5,8 +5,6 @@ //! is used to provide basic infrastructure for communication between two //! processes: Client (RA itself), Server (the external program) -#![warn(rust_2018_idioms, unused_lifetimes)] - pub mod msg; mod process; diff --git a/crates/proc-macro-srv/proc-macro-test/imp/src/lib.rs b/crates/proc-macro-srv/proc-macro-test/imp/src/lib.rs index 5f8530d08c..a1707364f3 100644 --- a/crates/proc-macro-srv/proc-macro-test/imp/src/lib.rs +++ b/crates/proc-macro-srv/proc-macro-test/imp/src/lib.rs @@ -1,6 +1,6 @@ //! Exports a few trivial procedural macros for testing. -#![warn(rust_2018_idioms, unused_lifetimes)] + #![feature(proc_macro_span, proc_macro_def_site)] #![allow(clippy::all)] diff --git a/crates/proc-macro-srv/proc-macro-test/src/lib.rs b/crates/proc-macro-srv/proc-macro-test/src/lib.rs index 739c6ec6f4..2edf23a634 100644 --- a/crates/proc-macro-srv/proc-macro-test/src/lib.rs +++ b/crates/proc-macro-srv/proc-macro-test/src/lib.rs @@ -1,6 +1,4 @@ //! Exports a few trivial procedural macros for testing. -#![warn(rust_2018_idioms, unused_lifetimes)] - pub static PROC_MACRO_TEST_LOCATION: &str = include_str!(concat!(env!("OUT_DIR"), "/proc_macro_test_location.txt")); diff --git a/crates/proc-macro-srv/src/lib.rs b/crates/proc-macro-srv/src/lib.rs index 2472c1e311..b86dceeb50 100644 --- a/crates/proc-macro-srv/src/lib.rs +++ b/crates/proc-macro-srv/src/lib.rs @@ -13,7 +13,6 @@ #![cfg(any(feature = "sysroot-abi", rust_analyzer))] #![cfg_attr(feature = "in-rust-tree", feature(rustc_private))] #![feature(proc_macro_internals, proc_macro_diagnostic, proc_macro_span)] -#![warn(rust_2018_idioms, unused_lifetimes)] #![allow(unreachable_pub, internal_features)] extern crate proc_macro; diff --git a/crates/profile/src/lib.rs b/crates/profile/src/lib.rs index 2ccb1cd06a..c1ae0d43d0 100644 --- a/crates/profile/src/lib.rs +++ b/crates/profile/src/lib.rs @@ -1,7 +1,5 @@ //! A collection of tools for profiling rust-analyzer. -#![warn(rust_2018_idioms, unused_lifetimes)] - #[cfg(feature = "cpu_profiler")] mod google_cpu_profiler; mod memory_usage; diff --git a/crates/project-model/src/lib.rs b/crates/project-model/src/lib.rs index 35643dcc02..92bf6a08f8 100644 --- a/crates/project-model/src/lib.rs +++ b/crates/project-model/src/lib.rs @@ -15,8 +15,6 @@ //! procedural macros). //! * Lowering of concrete model to a [`base_db::CrateGraph`] -#![warn(rust_2018_idioms, unused_lifetimes)] - mod build_scripts; mod cargo_workspace; mod cfg; diff --git a/crates/rust-analyzer/src/bin/main.rs b/crates/rust-analyzer/src/bin/main.rs index 774784f37b..1985093bc5 100644 --- a/crates/rust-analyzer/src/bin/main.rs +++ b/crates/rust-analyzer/src/bin/main.rs @@ -2,7 +2,6 @@ //! //! Based on cli flags, either spawns an LSP server, or runs a batch analysis -#![warn(rust_2018_idioms, unused_lifetimes)] #![allow(clippy::print_stdout, clippy::print_stderr)] #![cfg_attr(feature = "in-rust-tree", feature(rustc_private))] diff --git a/crates/rust-analyzer/src/lib.rs b/crates/rust-analyzer/src/lib.rs index a398e98f09..a8e6657c24 100644 --- a/crates/rust-analyzer/src/lib.rs +++ b/crates/rust-analyzer/src/lib.rs @@ -9,8 +9,6 @@ //! The `cli` submodule implements some batch-processing analysis, primarily as //! a debugging aid. -#![warn(rust_2018_idioms, unused_lifetimes)] - pub mod cli; mod caps; diff --git a/crates/rust-analyzer/tests/slow-tests/main.rs b/crates/rust-analyzer/tests/slow-tests/main.rs index f886df60e6..51b3076b46 100644 --- a/crates/rust-analyzer/tests/slow-tests/main.rs +++ b/crates/rust-analyzer/tests/slow-tests/main.rs @@ -8,7 +8,6 @@ //! specific JSON shapes here -- there's little value in such tests, as we can't //! be sure without a real client anyway. -#![warn(rust_2018_idioms, unused_lifetimes)] #![allow(clippy::disallowed_types)] mod ratoml; diff --git a/crates/sourcegen/src/lib.rs b/crates/sourcegen/src/lib.rs index 829b4d5b0f..307c214c61 100644 --- a/crates/sourcegen/src/lib.rs +++ b/crates/sourcegen/src/lib.rs @@ -6,8 +6,6 @@ //! //! This crate contains utilities to make this kind of source-gen easy. -#![warn(rust_2018_idioms, unused_lifetimes)] - use std::{ fmt, fs, mem, path::{Path, PathBuf}, diff --git a/crates/stdx/src/lib.rs b/crates/stdx/src/lib.rs index 54f10df42a..76dbd42ff6 100644 --- a/crates/stdx/src/lib.rs +++ b/crates/stdx/src/lib.rs @@ -1,7 +1,5 @@ //! Missing batteries for standard libraries. -#![warn(rust_2018_idioms, unused_lifetimes)] - use std::io as sio; use std::process::Command; use std::{cmp::Ordering, ops, time::Instant}; diff --git a/crates/syntax/src/lib.rs b/crates/syntax/src/lib.rs index b5d816b0ce..177f48b986 100644 --- a/crates/syntax/src/lib.rs +++ b/crates/syntax/src/lib.rs @@ -20,7 +20,6 @@ //! [Swift]: #![cfg_attr(feature = "in-rust-tree", feature(rustc_private))] -#![warn(rust_2018_idioms, unused_lifetimes)] #[cfg(not(feature = "in-rust-tree"))] extern crate ra_ap_rustc_lexer as rustc_lexer; diff --git a/crates/test-utils/src/lib.rs b/crates/test-utils/src/lib.rs index 53d4a28bc3..088817b835 100644 --- a/crates/test-utils/src/lib.rs +++ b/crates/test-utils/src/lib.rs @@ -6,7 +6,6 @@ //! * Extracting markup (mainly, `$0` markers) out of fixture strings. //! * marks (see the eponymous module). -#![warn(rust_2018_idioms, unused_lifetimes)] #![allow(clippy::print_stderr)] mod assert_linear; diff --git a/crates/text-edit/src/lib.rs b/crates/text-edit/src/lib.rs index e2ff373c1b..3efe0850d8 100644 --- a/crates/text-edit/src/lib.rs +++ b/crates/text-edit/src/lib.rs @@ -4,8 +4,6 @@ //! so `TextEdit` is the ultimate representation of the work done by //! rust-analyzer. -#![warn(rust_2018_idioms, unused_lifetimes)] - use itertools::Itertools; use std::cmp::max; pub use text_size::{TextRange, TextSize}; diff --git a/crates/toolchain/src/lib.rs b/crates/toolchain/src/lib.rs index 2591ed1691..a0603e35a0 100644 --- a/crates/toolchain/src/lib.rs +++ b/crates/toolchain/src/lib.rs @@ -1,7 +1,5 @@ //! Discovery of `cargo` & `rustc` executables. -#![warn(rust_2018_idioms, unused_lifetimes)] - use std::{env, iter, path::PathBuf}; use camino::{Utf8Path, Utf8PathBuf}; diff --git a/crates/tt/src/lib.rs b/crates/tt/src/lib.rs index b2a21b1913..369744d0e9 100644 --- a/crates/tt/src/lib.rs +++ b/crates/tt/src/lib.rs @@ -2,8 +2,6 @@ //! input and output) of macros. It closely mirrors `proc_macro` crate's //! `TokenTree`. -#![warn(rust_2018_idioms, unused_lifetimes)] - pub mod buffer; pub mod iter; diff --git a/crates/vfs-notify/src/lib.rs b/crates/vfs-notify/src/lib.rs index 1dbccab370..7e0f9af7af 100644 --- a/crates/vfs-notify/src/lib.rs +++ b/crates/vfs-notify/src/lib.rs @@ -7,8 +7,6 @@ //! Hopefully, one day a reliable file watching/walking crate appears on //! crates.io, and we can reduce this to trivial glue code. -#![warn(rust_2018_idioms, unused_lifetimes)] - use std::{ fs, path::{Component, Path}, diff --git a/crates/vfs/src/lib.rs b/crates/vfs/src/lib.rs index 18c8699dd4..b3aa6e2fe1 100644 --- a/crates/vfs/src/lib.rs +++ b/crates/vfs/src/lib.rs @@ -38,8 +38,6 @@ //! [`Handle`]: loader::Handle //! [`Entries`]: loader::Entry -#![warn(rust_2018_idioms, unused_lifetimes)] - mod anchored_path; pub mod file_set; pub mod loader; From 23b043a622c17c9c514a5cb1401a95aa52ae315e Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sun, 30 Jun 2024 15:28:31 +0200 Subject: [PATCH 37/94] Move proc-macro-srv RUSTC_VERSION fetching from include to env var --- crates/proc-macro-srv/build.rs | 16 ++-------------- crates/proc-macro-srv/src/lib.rs | 3 +-- 2 files changed, 3 insertions(+), 16 deletions(-) diff --git a/crates/proc-macro-srv/build.rs b/crates/proc-macro-srv/build.rs index 874d1c6cd3..44511d44b3 100644 --- a/crates/proc-macro-srv/build.rs +++ b/crates/proc-macro-srv/build.rs @@ -4,24 +4,12 @@ use std::{env, fs::File, io::Write, path::PathBuf, process::Command}; fn main() { - println!("cargo:rustc-check-cfg=cfg(rust_analyzer)"); - - let mut path = PathBuf::from(env::var_os("OUT_DIR").unwrap()); - path.push("rustc_version.rs"); - let mut f = File::create(&path).unwrap(); + println!("cargo::rustc-check-cfg=cfg(rust_analyzer)"); let rustc = env::var("RUSTC").expect("proc-macro-srv's build script expects RUSTC to be set"); let output = Command::new(rustc).arg("--version").output().expect("rustc --version must run"); let version_string = std::str::from_utf8(&output.stdout[..]) .expect("rustc --version output must be UTF-8") .trim(); - - write!( - f, - " - #[allow(dead_code)] - pub(crate) const RUSTC_VERSION_STRING: &str = {version_string:?}; - " - ) - .unwrap(); + println!("cargo::rustc-env=RUSTC_VERSION={}", version_string); } diff --git a/crates/proc-macro-srv/src/lib.rs b/crates/proc-macro-srv/src/lib.rs index b86dceeb50..8bc308fce9 100644 --- a/crates/proc-macro-srv/src/lib.rs +++ b/crates/proc-macro-srv/src/lib.rs @@ -48,8 +48,7 @@ use span::Span; use crate::server::TokenStream; -// see `build.rs` -include!(concat!(env!("OUT_DIR"), "/rustc_version.rs")); +pub const RUSTC_VERSION_STRING: &str = env!("RUSTC_VERSION"); trait ProcMacroSrvSpan: Copy { type Server: proc_macro::bridge::server::Server>; From 0b88dfc8b17468e4ca542cd90d5decefe7a35af8 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sun, 30 Jun 2024 15:34:43 +0200 Subject: [PATCH 38/94] Rename proc-macro-srv::server to server_impl --- crates/proc-macro-srv/src/dylib.rs | 2 +- crates/proc-macro-srv/src/lib.rs | 56 +++++++++---------- crates/proc-macro-srv/src/proc_macros.rs | 9 +-- .../src/{server.rs => server_impl.rs} | 0 .../rust_analyzer_span.rs | 4 +- .../src/{server => server_impl}/symbol.rs | 0 .../src/{server => server_impl}/token_id.rs | 4 +- .../{server => server_impl}/token_stream.rs | 0 crates/proc-macro-srv/src/tests/utils.rs | 8 +-- 9 files changed, 42 insertions(+), 41 deletions(-) rename crates/proc-macro-srv/src/{server.rs => server_impl.rs} (100%) rename crates/proc-macro-srv/src/{server => server_impl}/rust_analyzer_span.rs (99%) rename crates/proc-macro-srv/src/{server => server_impl}/symbol.rs (100%) rename crates/proc-macro-srv/src/{server => server_impl}/token_id.rs (99%) rename crates/proc-macro-srv/src/{server => server_impl}/token_stream.rs (100%) diff --git a/crates/proc-macro-srv/src/dylib.rs b/crates/proc-macro-srv/src/dylib.rs index 474f00a574..7fa560fb85 100644 --- a/crates/proc-macro-srv/src/dylib.rs +++ b/crates/proc-macro-srv/src/dylib.rs @@ -2,13 +2,13 @@ mod version; +use proc_macro::bridge; use std::{fmt, fs::File, io}; use libloading::Library; use memmap2::Mmap; use object::Object; use paths::{AbsPath, Utf8Path, Utf8PathBuf}; -use proc_macro::bridge; use proc_macro_api::ProcMacroKind; use crate::ProcMacroSrvSpan; diff --git a/crates/proc-macro-srv/src/lib.rs b/crates/proc-macro-srv/src/lib.rs index 8bc308fce9..3d309155d5 100644 --- a/crates/proc-macro-srv/src/lib.rs +++ b/crates/proc-macro-srv/src/lib.rs @@ -26,7 +26,7 @@ extern crate rustc_lexer; mod dylib; mod proc_macros; -mod server; +mod server_impl; use std::{ collections::{hash_map::Entry, HashMap}, @@ -46,36 +46,10 @@ use proc_macro_api::{ }; use span::Span; -use crate::server::TokenStream; +use crate::server_impl::TokenStream; pub const RUSTC_VERSION_STRING: &str = env!("RUSTC_VERSION"); -trait ProcMacroSrvSpan: Copy { - type Server: proc_macro::bridge::server::Server>; - fn make_server(call_site: Self, def_site: Self, mixed_site: Self) -> Self::Server; -} - -impl ProcMacroSrvSpan for TokenId { - type Server = server::token_id::TokenIdServer; - - fn make_server(call_site: Self, def_site: Self, mixed_site: Self) -> Self::Server { - Self::Server { interner: &server::SYMBOL_INTERNER, call_site, def_site, mixed_site } - } -} -impl ProcMacroSrvSpan for Span { - type Server = server::rust_analyzer_span::RaSpanServer; - fn make_server(call_site: Self, def_site: Self, mixed_site: Self) -> Self::Server { - Self::Server { - interner: &server::SYMBOL_INTERNER, - call_site, - def_site, - mixed_site, - tracked_env_vars: Default::default(), - tracked_paths: Default::default(), - } - } -} - #[derive(Default)] pub struct ProcMacroSrv { expanders: HashMap<(Utf8PathBuf, SystemTime), dylib::Expander>, @@ -167,6 +141,32 @@ impl ProcMacroSrv { } } +trait ProcMacroSrvSpan: Copy { + type Server: proc_macro::bridge::server::Server>; + fn make_server(call_site: Self, def_site: Self, mixed_site: Self) -> Self::Server; +} + +impl ProcMacroSrvSpan for TokenId { + type Server = server_impl::token_id::TokenIdServer; + + fn make_server(call_site: Self, def_site: Self, mixed_site: Self) -> Self::Server { + Self::Server { interner: &server_impl::SYMBOL_INTERNER, call_site, def_site, mixed_site } + } +} +impl ProcMacroSrvSpan for Span { + type Server = server_impl::rust_analyzer_span::RaSpanServer; + fn make_server(call_site: Self, def_site: Self, mixed_site: Self) -> Self::Server { + Self::Server { + interner: &server_impl::SYMBOL_INTERNER, + call_site, + def_site, + mixed_site, + tracked_env_vars: Default::default(), + tracked_paths: Default::default(), + } + } +} + fn expand_id( task: msg::ExpandMacro, expander: &dylib::Expander, diff --git a/crates/proc-macro-srv/src/proc_macros.rs b/crates/proc-macro-srv/src/proc_macros.rs index b963a8a029..c5c3dff9db 100644 --- a/crates/proc-macro-srv/src/proc_macros.rs +++ b/crates/proc-macro-srv/src/proc_macros.rs @@ -49,11 +49,12 @@ impl ProcMacros { call_site: S, mixed_site: S, ) -> Result, crate::PanicMessage> { - let parsed_body = crate::server::TokenStream::with_subtree(macro_body); + let parsed_body = crate::server_impl::TokenStream::with_subtree(macro_body); - let parsed_attributes = attributes.map_or_else(crate::server::TokenStream::new, |attr| { - crate::server::TokenStream::with_subtree(attr) - }); + let parsed_attributes = attributes + .map_or_else(crate::server_impl::TokenStream::new, |attr| { + crate::server_impl::TokenStream::with_subtree(attr) + }); for proc_macro in &self.exported_macros { match proc_macro { diff --git a/crates/proc-macro-srv/src/server.rs b/crates/proc-macro-srv/src/server_impl.rs similarity index 100% rename from crates/proc-macro-srv/src/server.rs rename to crates/proc-macro-srv/src/server_impl.rs diff --git a/crates/proc-macro-srv/src/server/rust_analyzer_span.rs b/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs similarity index 99% rename from crates/proc-macro-srv/src/server/rust_analyzer_span.rs rename to crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs index 0350bde412..bb174ba1b2 100644 --- a/crates/proc-macro-srv/src/server/rust_analyzer_span.rs +++ b/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs @@ -14,7 +14,7 @@ use proc_macro::bridge::{self, server}; use span::{Span, FIXUP_ERASED_FILE_AST_ID_MARKER}; use tt::{TextRange, TextSize}; -use crate::server::{ +use crate::server_impl::{ delim_to_external, delim_to_internal, literal_with_stringify_parts, token_stream::TokenStreamBuilder, Symbol, SymbolInternerRef, SYMBOL_INTERNER, }; @@ -29,7 +29,7 @@ mod tt { pub type Ident = ::tt::Ident; } -type TokenStream = crate::server::TokenStream; +type TokenStream = crate::server_impl::TokenStream; #[derive(Clone)] pub struct SourceFile; diff --git a/crates/proc-macro-srv/src/server/symbol.rs b/crates/proc-macro-srv/src/server_impl/symbol.rs similarity index 100% rename from crates/proc-macro-srv/src/server/symbol.rs rename to crates/proc-macro-srv/src/server_impl/symbol.rs diff --git a/crates/proc-macro-srv/src/server/token_id.rs b/crates/proc-macro-srv/src/server_impl/token_id.rs similarity index 99% rename from crates/proc-macro-srv/src/server/token_id.rs rename to crates/proc-macro-srv/src/server_impl/token_id.rs index ad7bd954cf..12edacbe39 100644 --- a/crates/proc-macro-srv/src/server/token_id.rs +++ b/crates/proc-macro-srv/src/server_impl/token_id.rs @@ -7,7 +7,7 @@ use std::{ use proc_macro::bridge::{self, server}; -use crate::server::{ +use crate::server_impl::{ delim_to_external, delim_to_internal, literal_with_stringify_parts, token_stream::TokenStreamBuilder, Symbol, SymbolInternerRef, SYMBOL_INTERNER, }; @@ -31,7 +31,7 @@ type Spacing = tt::Spacing; #[allow(unused)] type Literal = tt::Literal; type Span = tt::TokenId; -type TokenStream = crate::server::TokenStream; +type TokenStream = crate::server_impl::TokenStream; #[derive(Clone)] pub struct SourceFile; diff --git a/crates/proc-macro-srv/src/server/token_stream.rs b/crates/proc-macro-srv/src/server_impl/token_stream.rs similarity index 100% rename from crates/proc-macro-srv/src/server/token_stream.rs rename to crates/proc-macro-srv/src/server_impl/token_stream.rs diff --git a/crates/proc-macro-srv/src/tests/utils.rs b/crates/proc-macro-srv/src/tests/utils.rs index 6050bc9e36..0579c426d0 100644 --- a/crates/proc-macro-srv/src/tests/utils.rs +++ b/crates/proc-macro-srv/src/tests/utils.rs @@ -7,8 +7,8 @@ use tt::TextRange; use crate::{dylib, proc_macro_test_dylib_path, ProcMacroSrv}; -fn parse_string(call_site: TokenId, src: &str) -> crate::server::TokenStream { - crate::server::TokenStream::with_subtree( +fn parse_string(call_site: TokenId, src: &str) -> crate::server_impl::TokenStream { + crate::server_impl::TokenStream::with_subtree( mbe::parse_to_token_tree_static_span(call_site, src).unwrap(), ) } @@ -17,8 +17,8 @@ fn parse_string_spanned( anchor: SpanAnchor, call_site: SyntaxContextId, src: &str, -) -> crate::server::TokenStream { - crate::server::TokenStream::with_subtree( +) -> crate::server_impl::TokenStream { + crate::server_impl::TokenStream::with_subtree( mbe::parse_to_token_tree(anchor, call_site, src).unwrap(), ) } From 678420e66ac7d30709d3b172cbe201693f4dbc6c Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sun, 30 Jun 2024 15:37:00 +0200 Subject: [PATCH 39/94] Move proc-macro-test test path fetching from include to env var --- crates/proc-macro-srv/build.rs | 2 +- crates/proc-macro-srv/proc-macro-test/build.rs | 3 +-- crates/proc-macro-srv/proc-macro-test/src/lib.rs | 3 +-- 3 files changed, 3 insertions(+), 5 deletions(-) diff --git a/crates/proc-macro-srv/build.rs b/crates/proc-macro-srv/build.rs index 44511d44b3..9a17cfc9f3 100644 --- a/crates/proc-macro-srv/build.rs +++ b/crates/proc-macro-srv/build.rs @@ -1,7 +1,7 @@ //! Determine rustc version `proc-macro-srv` (and thus the sysroot ABI) is //! build with and make it accessible at runtime for ABI selection. -use std::{env, fs::File, io::Write, path::PathBuf, process::Command}; +use std::{env, process::Command}; fn main() { println!("cargo::rustc-check-cfg=cfg(rust_analyzer)"); diff --git a/crates/proc-macro-srv/proc-macro-test/build.rs b/crates/proc-macro-srv/proc-macro-test/build.rs index 6a0ae362d8..fe1fce3da7 100644 --- a/crates/proc-macro-srv/proc-macro-test/build.rs +++ b/crates/proc-macro-srv/proc-macro-test/build.rs @@ -121,6 +121,5 @@ fn main() { // This file is under `target_dir` and is already under `OUT_DIR`. let artifact_path = artifact_path.expect("no dylib for proc-macro-test-impl found"); - let info_path = out_dir.join("proc_macro_test_location.txt"); - fs::write(info_path, artifact_path.to_str().unwrap()).unwrap(); + println!("cargo::rustc-env=PROC_MACRO_TEST_LOCATION={}", artifact_path.display()); } diff --git a/crates/proc-macro-srv/proc-macro-test/src/lib.rs b/crates/proc-macro-srv/proc-macro-test/src/lib.rs index 2edf23a634..6464adb2ca 100644 --- a/crates/proc-macro-srv/proc-macro-test/src/lib.rs +++ b/crates/proc-macro-srv/proc-macro-test/src/lib.rs @@ -1,4 +1,3 @@ //! Exports a few trivial procedural macros for testing. -pub static PROC_MACRO_TEST_LOCATION: &str = - include_str!(concat!(env!("OUT_DIR"), "/proc_macro_test_location.txt")); +pub static PROC_MACRO_TEST_LOCATION: &str = env!("PROC_MACRO_TEST_LOCATION"); From 2fb38ceb66d5b0f4a37abd7ed4f9644a16f4dfa5 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sun, 30 Jun 2024 16:05:46 +0200 Subject: [PATCH 40/94] Faster env snapshotting in proc-macro-srv --- crates/proc-macro-api/src/lib.rs | 31 ++-- crates/proc-macro-api/src/msg.rs | 44 +++--- crates/proc-macro-srv-cli/src/main.rs | 4 +- crates/proc-macro-srv/src/lib.rs | 177 ++++++++++++----------- crates/proc-macro-srv/src/proc_macros.rs | 3 +- crates/proc-macro-srv/src/tests/utils.rs | 5 +- 6 files changed, 147 insertions(+), 117 deletions(-) diff --git a/crates/proc-macro-api/src/lib.rs b/crates/proc-macro-api/src/lib.rs index 039509d87c..9ba6a09cb7 100644 --- a/crates/proc-macro-api/src/lib.rs +++ b/crates/proc-macro-api/src/lib.rs @@ -156,23 +156,26 @@ impl ProcMacro { let call_site = span_data_table.insert_full(call_site).0; let mixed_site = span_data_table.insert_full(mixed_site).0; let task = ExpandMacro { - macro_body: FlatTree::new(subtree, version, &mut span_data_table), - macro_name: self.name.to_string(), - attributes: attr.map(|subtree| FlatTree::new(subtree, version, &mut span_data_table)), + data: msg::ExpandMacroData { + macro_body: FlatTree::new(subtree, version, &mut span_data_table), + macro_name: self.name.to_string(), + attributes: attr + .map(|subtree| FlatTree::new(subtree, version, &mut span_data_table)), + has_global_spans: ExpnGlobals { + serialize: version >= HAS_GLOBAL_SPANS, + def_site, + call_site, + mixed_site, + }, + span_data_table: if version >= RUST_ANALYZER_SPAN_SUPPORT { + serialize_span_data_index_map(&span_data_table) + } else { + Vec::new() + }, + }, lib: self.dylib_path.to_path_buf().into(), env: env.into(), current_dir, - has_global_spans: ExpnGlobals { - serialize: version >= HAS_GLOBAL_SPANS, - def_site, - call_site, - mixed_site, - }, - span_data_table: if version >= RUST_ANALYZER_SPAN_SUPPORT { - serialize_span_data_index_map(&span_data_table) - } else { - Vec::new() - }, }; let response = self.process.send_task(msg::Request::ExpandMacro(Box::new(task)))?; diff --git a/crates/proc-macro-api/src/msg.rs b/crates/proc-macro-api/src/msg.rs index ad0e1f187b..49ebedba7d 100644 --- a/crates/proc-macro-api/src/msg.rs +++ b/crates/proc-macro-api/src/msg.rs @@ -72,6 +72,16 @@ pub struct PanicMessage(pub String); #[derive(Debug, Serialize, Deserialize)] pub struct ExpandMacro { + pub lib: Utf8PathBuf, + /// Environment variables to set during macro expansion. + pub env: Vec<(String, String)>, + pub current_dir: Option, + #[serde(flatten)] + pub data: ExpandMacroData, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct ExpandMacroData { /// Argument of macro call. /// /// In custom derive this will be a struct or enum; in attribute-like macro - underlying @@ -86,13 +96,6 @@ pub struct ExpandMacro { /// Possible attributes for the attribute-like macros. pub attributes: Option, - - pub lib: Utf8PathBuf, - - /// Environment variables to set during macro expansion. - pub env: Vec<(String, String)>, - - pub current_dir: Option, /// marker for serde skip stuff #[serde(skip_serializing_if = "ExpnGlobals::skip_serializing_if")] #[serde(default)] @@ -268,25 +271,30 @@ mod tests { let tt = fixture_token_tree(); let mut span_data_table = Default::default(); let task = ExpandMacro { - macro_body: FlatTree::new(&tt, CURRENT_API_VERSION, &mut span_data_table), - macro_name: Default::default(), - attributes: None, + data: ExpandMacroData { + macro_body: FlatTree::new(&tt, CURRENT_API_VERSION, &mut span_data_table), + macro_name: Default::default(), + attributes: None, + has_global_spans: ExpnGlobals { + serialize: true, + def_site: 0, + call_site: 0, + mixed_site: 0, + }, + span_data_table: Vec::new(), + }, lib: Utf8PathBuf::from_path_buf(std::env::current_dir().unwrap()).unwrap(), env: Default::default(), current_dir: Default::default(), - has_global_spans: ExpnGlobals { - serialize: true, - def_site: 0, - call_site: 0, - mixed_site: 0, - }, - span_data_table: Vec::new(), }; let json = serde_json::to_string(&task).unwrap(); // println!("{}", json); let back: ExpandMacro = serde_json::from_str(&json).unwrap(); - assert_eq!(tt, back.macro_body.to_subtree_resolved(CURRENT_API_VERSION, &span_data_table)); + assert_eq!( + tt, + back.data.macro_body.to_subtree_resolved(CURRENT_API_VERSION, &span_data_table) + ); } } diff --git a/crates/proc-macro-srv-cli/src/main.rs b/crates/proc-macro-srv-cli/src/main.rs index df0ae3171f..f9689712ad 100644 --- a/crates/proc-macro-srv-cli/src/main.rs +++ b/crates/proc-macro-srv-cli/src/main.rs @@ -33,12 +33,14 @@ fn run() -> io::Result<()> { #[cfg(any(feature = "sysroot-abi", rust_analyzer))] fn run() -> io::Result<()> { use proc_macro_api::msg::{self, Message}; + use proc_macro_srv::EnvSnapshot; let read_request = |buf: &mut String| msg::Request::read(&mut io::stdin().lock(), buf); let write_response = |msg: msg::Response| msg.write(&mut io::stdout().lock()); - let mut srv = proc_macro_srv::ProcMacroSrv::default(); + let env = EnvSnapshot::new(); + let mut srv = proc_macro_srv::ProcMacroSrv::new(&env); let mut buf = String::new(); while let Some(req) = read_request(&mut buf)? { diff --git a/crates/proc-macro-srv/src/lib.rs b/crates/proc-macro-srv/src/lib.rs index 3d309155d5..34851ee0be 100644 --- a/crates/proc-macro-srv/src/lib.rs +++ b/crates/proc-macro-srv/src/lib.rs @@ -32,7 +32,9 @@ use std::{ collections::{hash_map::Entry, HashMap}, env, ffi::OsString, - fs, thread, + fs, + path::{Path, PathBuf}, + thread, time::SystemTime, }; @@ -50,15 +52,21 @@ use crate::server_impl::TokenStream; pub const RUSTC_VERSION_STRING: &str = env!("RUSTC_VERSION"); -#[derive(Default)] -pub struct ProcMacroSrv { +pub struct ProcMacroSrv<'env> { expanders: HashMap<(Utf8PathBuf, SystemTime), dylib::Expander>, span_mode: SpanMode, + env: &'env EnvSnapshot, +} + +impl<'env> ProcMacroSrv<'env> { + pub fn new(env: &'env EnvSnapshot) -> Self { + Self { expanders: Default::default(), span_mode: Default::default(), env } + } } const EXPANDER_STACK_SIZE: usize = 8 * 1024 * 1024; -impl ProcMacroSrv { +impl<'env> ProcMacroSrv<'env> { pub fn set_span_mode(&mut self, span_mode: SpanMode) { self.span_mode = span_mode; } @@ -69,52 +77,24 @@ impl ProcMacroSrv { pub fn expand( &mut self, - task: msg::ExpandMacro, + msg::ExpandMacro { lib, env, current_dir, data }: msg::ExpandMacro, ) -> Result<(msg::FlatTree, Vec), msg::PanicMessage> { let span_mode = self.span_mode; - let expander = self.expander(task.lib.as_ref()).map_err(|err| { + let snapped_env = self.env; + let expander = self.expander(lib.as_ref()).map_err(|err| { debug_assert!(false, "should list macros before asking to expand"); msg::PanicMessage(format!("failed to load macro: {err}")) })?; - let prev_env = EnvSnapshot::new(); - for (k, v) in &task.env { - env::set_var(k, v); - } - let prev_working_dir = match &task.current_dir { - Some(dir) => { - let prev_working_dir = std::env::current_dir().ok(); - if let Err(err) = std::env::set_current_dir(dir) { - eprintln!("Failed to set the current working dir to {dir}. Error: {err:?}") - } - prev_working_dir - } - None => None, - }; - - let ExpnGlobals { def_site, call_site, mixed_site, .. } = task.has_global_spans; + let prev_env = EnvChange::apply(snapped_env, env, current_dir.as_ref().map(<_>::as_ref)); let result = match span_mode { - SpanMode::Id => { - expand_id(task, expander, def_site, call_site, mixed_site).map(|it| (it, vec![])) - } - SpanMode::RustAnalyzer => { - expand_ra_span(task, expander, def_site, call_site, mixed_site) - } + SpanMode::Id => expand_id(data, expander).map(|it| (it, vec![])), + SpanMode::RustAnalyzer => expand_ra_span(data, expander), }; prev_env.rollback(); - if let Some(dir) = prev_working_dir { - if let Err(err) = std::env::set_current_dir(&dir) { - eprintln!( - "Failed to set the current working dir to {}. Error: {:?}", - dir.display(), - err - ) - } - } - result.map_err(msg::PanicMessage) } @@ -168,32 +148,28 @@ impl ProcMacroSrvSpan for Span { } fn expand_id( - task: msg::ExpandMacro, + msg::ExpandMacroData { + macro_body, + macro_name, + attributes, + has_global_spans: ExpnGlobals { serialize: _, def_site, call_site, mixed_site }, + span_data_table: _, + }: msg::ExpandMacroData, expander: &dylib::Expander, - def_site: usize, - call_site: usize, - mixed_site: usize, ) -> Result { let def_site = TokenId(def_site as u32); let call_site = TokenId(call_site as u32); let mixed_site = TokenId(mixed_site as u32); - let macro_body = task.macro_body.to_subtree_unresolved(CURRENT_API_VERSION); - let attributes = task.attributes.map(|it| it.to_subtree_unresolved(CURRENT_API_VERSION)); + let macro_body = macro_body.to_subtree_unresolved(CURRENT_API_VERSION); + let attributes = attributes.map(|it| it.to_subtree_unresolved(CURRENT_API_VERSION)); let result = thread::scope(|s| { let thread = thread::Builder::new() .stack_size(EXPANDER_STACK_SIZE) - .name(task.macro_name.clone()) + .name(macro_name.clone()) .spawn_scoped(s, || { expander - .expand( - &task.macro_name, - macro_body, - attributes, - def_site, - call_site, - mixed_site, - ) + .expand(¯o_name, macro_body, attributes, def_site, call_site, mixed_site) .map(|it| msg::FlatTree::new_raw(&it, CURRENT_API_VERSION)) }); let res = match thread { @@ -210,35 +186,31 @@ fn expand_id( } fn expand_ra_span( - task: msg::ExpandMacro, + msg::ExpandMacroData { + macro_body, + macro_name, + attributes, + has_global_spans: ExpnGlobals { serialize: _, def_site, call_site, mixed_site }, + span_data_table, + }: msg::ExpandMacroData, expander: &dylib::Expander, - def_site: usize, - call_site: usize, - mixed_site: usize, ) -> Result<(msg::FlatTree, Vec), String> { - let mut span_data_table = deserialize_span_data_index_map(&task.span_data_table); + let mut span_data_table = deserialize_span_data_index_map(&span_data_table); let def_site = span_data_table[def_site]; let call_site = span_data_table[call_site]; let mixed_site = span_data_table[mixed_site]; - let macro_body = task.macro_body.to_subtree_resolved(CURRENT_API_VERSION, &span_data_table); + let macro_body = macro_body.to_subtree_resolved(CURRENT_API_VERSION, &span_data_table); let attributes = - task.attributes.map(|it| it.to_subtree_resolved(CURRENT_API_VERSION, &span_data_table)); + attributes.map(|it| it.to_subtree_resolved(CURRENT_API_VERSION, &span_data_table)); let result = thread::scope(|s| { let thread = thread::Builder::new() .stack_size(EXPANDER_STACK_SIZE) - .name(task.macro_name.clone()) + .name(macro_name.clone()) .spawn_scoped(s, || { expander - .expand( - &task.macro_name, - macro_body, - attributes, - def_site, - call_site, - mixed_site, - ) + .expand(¯o_name, macro_body, attributes, def_site, call_site, mixed_site) .map(|it| { ( msg::FlatTree::new(&it, CURRENT_API_VERSION, &mut span_data_table), @@ -269,31 +241,74 @@ impl PanicMessage { } } -struct EnvSnapshot { +pub struct EnvSnapshot { vars: HashMap, } impl EnvSnapshot { - fn new() -> EnvSnapshot { + pub fn new() -> EnvSnapshot { EnvSnapshot { vars: env::vars_os().collect() } } +} + +struct EnvChange<'snap> { + changed_vars: Vec, + prev_working_dir: Option, + snap: &'snap EnvSnapshot, +} + +impl<'snap> EnvChange<'snap> { + fn apply( + snap: &'snap EnvSnapshot, + new_vars: Vec<(String, String)>, + current_dir: Option<&Path>, + ) -> EnvChange<'snap> { + let prev_working_dir = match current_dir { + Some(dir) => { + let prev_working_dir = std::env::current_dir().ok(); + if let Err(err) = std::env::set_current_dir(dir) { + eprintln!( + "Failed to set the current working dir to {}. Error: {err:?}", + dir.display() + ) + } + prev_working_dir + } + None => None, + }; + EnvChange { + snap, + changed_vars: new_vars + .into_iter() + .map(|(k, v)| { + env::set_var(&k, v); + k + }) + .collect(), + prev_working_dir, + } + } fn rollback(self) {} } -impl Drop for EnvSnapshot { +impl Drop for EnvChange<'_> { fn drop(&mut self) { - for (name, value) in env::vars_os() { - let old_value = self.vars.remove(&name); - if old_value != Some(value) { - match old_value { - None => env::remove_var(name), - Some(old_value) => env::set_var(name, old_value), - } + for name in self.changed_vars.drain(..) { + match self.snap.vars.get::(name.as_ref()) { + Some(prev_val) => env::set_var(name, prev_val), + None => env::remove_var(name), } } - for (name, old_value) in self.vars.drain() { - env::set_var(name, old_value) + + if let Some(dir) = &self.prev_working_dir { + if let Err(err) = std::env::set_current_dir(&dir) { + eprintln!( + "Failed to set the current working dir to {}. Error: {:?}", + dir.display(), + err + ) + } } } } diff --git a/crates/proc-macro-srv/src/proc_macros.rs b/crates/proc-macro-srv/src/proc_macros.rs index c5c3dff9db..d48c5b30de 100644 --- a/crates/proc-macro-srv/src/proc_macros.rs +++ b/crates/proc-macro-srv/src/proc_macros.rs @@ -1,9 +1,10 @@ //! Proc macro ABI -use libloading::Library; use proc_macro::bridge; use proc_macro_api::ProcMacroKind; +use libloading::Library; + use crate::{dylib::LoadProcMacroDylibError, ProcMacroSrvSpan}; pub(crate) struct ProcMacros { diff --git a/crates/proc-macro-srv/src/tests/utils.rs b/crates/proc-macro-srv/src/tests/utils.rs index 0579c426d0..03b1117a5b 100644 --- a/crates/proc-macro-srv/src/tests/utils.rs +++ b/crates/proc-macro-srv/src/tests/utils.rs @@ -5,7 +5,7 @@ use proc_macro_api::msg::TokenId; use span::{ErasedFileAstId, FileId, Span, SpanAnchor, SyntaxContextId}; use tt::TextRange; -use crate::{dylib, proc_macro_test_dylib_path, ProcMacroSrv}; +use crate::{dylib, proc_macro_test_dylib_path, EnvSnapshot, ProcMacroSrv}; fn parse_string(call_site: TokenId, src: &str) -> crate::server_impl::TokenStream { crate::server_impl::TokenStream::with_subtree( @@ -96,7 +96,8 @@ fn assert_expand_impl( pub(crate) fn list() -> Vec { let dylib_path = proc_macro_test_dylib_path(); - let mut srv = ProcMacroSrv::default(); + let env = EnvSnapshot::new(); + let mut srv = ProcMacroSrv::new(&env); let res = srv.list_macros(&dylib_path).unwrap(); res.into_iter().map(|(name, kind)| format!("{name} [{kind:?}]")).collect() } From b0c4bc4972e46e22271227893d72e902e78313c6 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sun, 30 Jun 2024 16:26:39 +0200 Subject: [PATCH 41/94] Fix proc-macro-test build script --- .../proc-macro-srv/proc-macro-test/build.rs | 3 +-- crates/proc-macro-srv/src/dylib.rs | 24 ++++++++++++------- 2 files changed, 17 insertions(+), 10 deletions(-) diff --git a/crates/proc-macro-srv/proc-macro-test/build.rs b/crates/proc-macro-srv/proc-macro-test/build.rs index fe1fce3da7..dd4709a1f7 100644 --- a/crates/proc-macro-srv/proc-macro-test/build.rs +++ b/crates/proc-macro-srv/proc-macro-test/build.rs @@ -30,8 +30,7 @@ fn main() { if !has_features { println!("proc-macro-test testing only works on nightly toolchains"); - let info_path = out_dir.join("proc_macro_test_location.txt"); - fs::File::create(info_path).unwrap(); + println!("cargo::rustc-env=PROC_MACRO_TEST_LOCATION=\"\""); return; } diff --git a/crates/proc-macro-srv/src/dylib.rs b/crates/proc-macro-srv/src/dylib.rs index 7fa560fb85..c2309cb3d1 100644 --- a/crates/proc-macro-srv/src/dylib.rs +++ b/crates/proc-macro-srv/src/dylib.rs @@ -133,24 +133,32 @@ impl ProcMacroLibraryLibloading { } } -pub struct Expander { +pub(crate) struct Expander { inner: ProcMacroLibraryLibloading, + path: Utf8PathBuf, +} + +impl Drop for Expander { + fn drop(&mut self) { + #[cfg(windows)] + std::fs::remove_file(&self.path).ok(); + } } impl Expander { - pub fn new(lib: &Utf8Path) -> Result { + pub(crate) fn new(lib: &Utf8Path) -> Result { // Some libraries for dynamic loading require canonicalized path even when it is // already absolute let lib = lib.canonicalize_utf8()?; - let lib = ensure_file_with_lock_free_access(&lib)?; + let path = ensure_file_with_lock_free_access(&lib)?; - let library = ProcMacroLibraryLibloading::open(lib.as_ref())?; + let library = ProcMacroLibraryLibloading::open(path.as_ref())?; - Ok(Expander { inner: library }) + Ok(Expander { inner: library, path }) } - pub fn expand( + pub(crate) fn expand( &self, macro_name: &str, macro_body: tt::Subtree, @@ -169,7 +177,7 @@ impl Expander { result.map_err(|e| e.into_string().unwrap_or_default()) } - pub fn list_macros(&self) -> Vec<(String, ProcMacroKind)> { + pub(crate) fn list_macros(&self) -> Vec<(String, ProcMacroKind)> { self.inner.proc_macros.list_macros() } } @@ -198,7 +206,7 @@ fn ensure_file_with_lock_free_access(path: &Utf8Path) -> io::Result unique_name.push_str(file_name); to.push(unique_name); - std::fs::copy(path, &to).unwrap(); + std::fs::copy(path, &to)?; Ok(to) } From 9d09bc06198d10c5a27f3cb025c85325568e1f86 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sun, 30 Jun 2024 16:33:30 +0200 Subject: [PATCH 42/94] Actual dummy server for the server cli --- crates/proc-macro-srv-cli/src/main.rs | 30 +++++++++++++++++-- .../proc-macro-srv/proc-macro-test/build.rs | 2 +- 2 files changed, 29 insertions(+), 3 deletions(-) diff --git a/crates/proc-macro-srv-cli/src/main.rs b/crates/proc-macro-srv-cli/src/main.rs index f9689712ad..f6f6fdc864 100644 --- a/crates/proc-macro-srv-cli/src/main.rs +++ b/crates/proc-macro-srv-cli/src/main.rs @@ -8,6 +8,8 @@ extern crate rustc_driver as _; use std::io; +use proc_macro_api::msg::ServerConfig; + fn main() -> std::io::Result<()> { let v = std::env::var("RUST_ANALYZER_INTERNALS_DO_NOT_USE"); match v.as_deref() { @@ -26,8 +28,32 @@ fn main() -> std::io::Result<()> { #[cfg(not(any(feature = "sysroot-abi", rust_analyzer)))] fn run() -> io::Result<()> { - eprintln!("proc-macro-srv-cli requires the `sysroot-abi` feature to be enabled"); - std::process::exit(70); + let err = "proc-macro-srv-cli needs to be compiled with the `sysroot-abi` feature to function"; + eprintln!("{err}"); + use proc_macro_api::msg::{self, Message}; + + let read_request = |buf: &mut String| msg::Request::read(&mut io::stdin().lock(), buf); + + let write_response = |msg: msg::Response| msg.write(&mut io::stdout().lock()); + + let mut buf = String::new(); + + while let Some(req) = read_request(&mut buf)? { + let res = match req { + msg::Request::ListMacros { .. } => msg::Response::ListMacros(Err(err.to_owned())), + msg::Request::ExpandMacro(_) => { + msg::Response::ExpandMacro(Err(msg::PanicMessage(err.to_owned()))) + } + msg::Request::ApiVersionCheck {} => { + msg::Response::ApiVersionCheck(proc_macro_api::msg::CURRENT_API_VERSION) + } + msg::Request::SetConfig(_) => { + msg::Response::SetConfig(ServerConfig { span_mode: msg::SpanMode::Id }) + } + }; + write_response(res)? + } + Ok(()) } #[cfg(any(feature = "sysroot-abi", rust_analyzer))] diff --git a/crates/proc-macro-srv/proc-macro-test/build.rs b/crates/proc-macro-srv/proc-macro-test/build.rs index dd4709a1f7..ff2f5d1863 100644 --- a/crates/proc-macro-srv/proc-macro-test/build.rs +++ b/crates/proc-macro-srv/proc-macro-test/build.rs @@ -8,7 +8,7 @@ //! 1.58) and future ABIs (stage1, nightly) use std::{ - env, fs, + env, path::{Path, PathBuf}, process::Command, }; From 7c7c0cbffb5f5392b5942ea513e70836c15035ea Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sun, 30 Jun 2024 16:41:52 +0200 Subject: [PATCH 43/94] Simplify --- Cargo.lock | 1 - crates/proc-macro-api/Cargo.toml | 1 - crates/proc-macro-api/src/lib.rs | 10 +++++----- crates/proc-macro-api/src/msg/flat.rs | 6 +++--- crates/proc-macro-api/src/process.rs | 10 +++++----- crates/proc-macro-srv-cli/src/main.rs | 6 +++--- crates/rust-analyzer/src/reload.rs | 2 +- 7 files changed, 17 insertions(+), 19 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 790bccea19..56deb1b7a1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1337,7 +1337,6 @@ dependencies = [ "stdx", "text-size", "tracing", - "triomphe", "tt", ] diff --git a/crates/proc-macro-api/Cargo.toml b/crates/proc-macro-api/Cargo.toml index 11fd076a6f..7f633d91ec 100644 --- a/crates/proc-macro-api/Cargo.toml +++ b/crates/proc-macro-api/Cargo.toml @@ -15,7 +15,6 @@ doctest = false serde.workspace = true serde_json = { workspace = true, features = ["unbounded_depth"] } tracing.workspace = true -triomphe.workspace = true rustc-hash.workspace = true indexmap.workspace = true diff --git a/crates/proc-macro-api/src/lib.rs b/crates/proc-macro-api/src/lib.rs index 9ba6a09cb7..f21411bc81 100644 --- a/crates/proc-macro-api/src/lib.rs +++ b/crates/proc-macro-api/src/lib.rs @@ -9,9 +9,7 @@ pub mod msg; mod process; use base_db::Env; -use indexmap::IndexSet; use paths::{AbsPath, AbsPathBuf}; -use rustc_hash::FxHashMap; use span::Span; use std::{fmt, io, sync::Arc}; use tt::SmolStr; @@ -21,7 +19,8 @@ use serde::{Deserialize, Serialize}; use crate::{ msg::{ deserialize_span_data_index_map, flat::serialize_span_data_index_map, ExpandMacro, - ExpnGlobals, FlatTree, PanicMessage, HAS_GLOBAL_SPANS, RUST_ANALYZER_SPAN_SUPPORT, + ExpnGlobals, FlatTree, PanicMessage, SpanDataIndexMap, HAS_GLOBAL_SPANS, + RUST_ANALYZER_SPAN_SUPPORT, }, process::ProcMacroProcessSrv, }; @@ -101,7 +100,8 @@ impl ProcMacroServer { /// Spawns an external process as the proc macro server and returns a client connected to it. pub fn spawn( process_path: &AbsPath, - env: &FxHashMap, + env: impl IntoIterator, impl AsRef)> + + Clone, ) -> io::Result { let process = ProcMacroProcessSrv::run(process_path, env)?; Ok(ProcMacroServer { process: Arc::new(process), path: process_path.to_owned() }) @@ -151,7 +151,7 @@ impl ProcMacro { let version = self.process.version(); let current_dir = env.get("CARGO_MANIFEST_DIR"); - let mut span_data_table = IndexSet::default(); + let mut span_data_table = SpanDataIndexMap::default(); let def_site = span_data_table.insert_full(def_site).0; let call_site = span_data_table.insert_full(call_site).0; let mixed_site = span_data_table.insert_full(mixed_site).0; diff --git a/crates/proc-macro-api/src/msg/flat.rs b/crates/proc-macro-api/src/msg/flat.rs index 99cdbd930e..11fd7596f2 100644 --- a/crates/proc-macro-api/src/msg/flat.rs +++ b/crates/proc-macro-api/src/msg/flat.rs @@ -37,7 +37,6 @@ use std::collections::VecDeque; -use indexmap::IndexSet; use la_arena::RawIdx; use rustc_hash::FxHashMap; use serde::{Deserialize, Serialize}; @@ -46,7 +45,8 @@ use text_size::TextRange; use crate::msg::ENCODE_CLOSE_SPAN_VERSION; -pub type SpanDataIndexMap = IndexSet; +pub type SpanDataIndexMap = + indexmap::IndexSet>; pub fn serialize_span_data_index_map(map: &SpanDataIndexMap) -> Vec { map.iter() @@ -328,7 +328,7 @@ impl InternableSpan for TokenId { } } impl InternableSpan for Span { - type Table = IndexSet; + type Table = SpanDataIndexMap; fn token_id_of(table: &mut Self::Table, span: Self) -> TokenId { TokenId(table.insert_full(span).0 as u32) } diff --git a/crates/proc-macro-api/src/process.rs b/crates/proc-macro-api/src/process.rs index 1c37082bb2..3a1a4cfa13 100644 --- a/crates/proc-macro-api/src/process.rs +++ b/crates/proc-macro-api/src/process.rs @@ -7,7 +7,6 @@ use std::{ }; use paths::AbsPath; -use rustc_hash::FxHashMap; use stdx::JodChild; use crate::{ @@ -36,10 +35,11 @@ struct ProcessSrvState { impl ProcMacroProcessSrv { pub(crate) fn run( process_path: &AbsPath, - env: &FxHashMap, + env: impl IntoIterator, impl AsRef)> + + Clone, ) -> io::Result { let create_srv = |null_stderr| { - let mut process = Process::run(process_path, env, null_stderr)?; + let mut process = Process::run(process_path, env.clone(), null_stderr)?; let (stdin, stdout) = process.stdio().expect("couldn't access child stdio"); io::Result::Ok(ProcMacroProcessSrv { @@ -158,7 +158,7 @@ struct Process { impl Process { fn run( path: &AbsPath, - env: &FxHashMap, + env: impl IntoIterator, impl AsRef)>, null_stderr: bool, ) -> io::Result { let child = JodChild(mk_child(path, env, null_stderr)?); @@ -176,7 +176,7 @@ impl Process { fn mk_child( path: &AbsPath, - env: &FxHashMap, + env: impl IntoIterator, impl AsRef)>, null_stderr: bool, ) -> io::Result { let mut cmd = Command::new(path); diff --git a/crates/proc-macro-srv-cli/src/main.rs b/crates/proc-macro-srv-cli/src/main.rs index f6f6fdc864..407c1969cb 100644 --- a/crates/proc-macro-srv-cli/src/main.rs +++ b/crates/proc-macro-srv-cli/src/main.rs @@ -8,8 +8,6 @@ extern crate rustc_driver as _; use std::io; -use proc_macro_api::msg::ServerConfig; - fn main() -> std::io::Result<()> { let v = std::env::var("RUST_ANALYZER_INTERNALS_DO_NOT_USE"); match v.as_deref() { @@ -48,7 +46,9 @@ fn run() -> io::Result<()> { msg::Response::ApiVersionCheck(proc_macro_api::msg::CURRENT_API_VERSION) } msg::Request::SetConfig(_) => { - msg::Response::SetConfig(ServerConfig { span_mode: msg::SpanMode::Id }) + msg::Response::SetConfig(proc_macro_api::msg::ServerConfig { + span_mode: msg::SpanMode::Id, + }) } }; write_response(res)? diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs index bd0f733ef3..7b9a10fdb8 100644 --- a/crates/rust-analyzer/src/reload.rs +++ b/crates/rust-analyzer/src/reload.rs @@ -529,7 +529,7 @@ impl GlobalState { None => ws.find_sysroot_proc_macro_srv()?, }; - let env = match &ws.kind { + let env: FxHashMap<_, _> = match &ws.kind { ProjectWorkspaceKind::Cargo { cargo_config_extra_env, .. } | ProjectWorkspaceKind::DetachedFile { cargo: Some(_), From c236190b60c4e9e35149ac99088b0107b30723ab Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sun, 30 Jun 2024 16:54:31 +0200 Subject: [PATCH 44/94] Abstract proc-macro-srv protocol format --- crates/proc-macro-api/src/json.rs | 34 ++++++++++++++++++++ crates/proc-macro-api/src/lib.rs | 1 + crates/proc-macro-api/src/msg.rs | 46 ++++++++------------------- crates/proc-macro-api/src/process.rs | 5 +-- crates/proc-macro-srv-cli/src/main.rs | 12 ++++--- crates/proc-macro-srv/src/dylib.rs | 1 + 6 files changed, 60 insertions(+), 39 deletions(-) create mode 100644 crates/proc-macro-api/src/json.rs diff --git a/crates/proc-macro-api/src/json.rs b/crates/proc-macro-api/src/json.rs new file mode 100644 index 0000000000..415845c0bd --- /dev/null +++ b/crates/proc-macro-api/src/json.rs @@ -0,0 +1,34 @@ +use std::io::{self, BufRead, Write}; + +pub fn read_json<'a>( + inp: &mut impl BufRead, + buf: &'a mut String, +) -> io::Result> { + loop { + buf.clear(); + + inp.read_line(buf)?; + buf.pop(); // Remove trailing '\n' + + if buf.is_empty() { + return Ok(None); + } + + // Some ill behaved macro try to use stdout for debugging + // We ignore it here + if !buf.starts_with('{') { + tracing::error!("proc-macro tried to print : {}", buf); + continue; + } + + return Ok(Some(buf)); + } +} + +pub fn write_json(out: &mut impl Write, msg: &str) -> io::Result<()> { + tracing::debug!("> {}", msg); + out.write_all(msg.as_bytes())?; + out.write_all(b"\n")?; + out.flush()?; + Ok(()) +} diff --git a/crates/proc-macro-api/src/lib.rs b/crates/proc-macro-api/src/lib.rs index f21411bc81..d7f49013ae 100644 --- a/crates/proc-macro-api/src/lib.rs +++ b/crates/proc-macro-api/src/lib.rs @@ -5,6 +5,7 @@ //! is used to provide basic infrastructure for communication between two //! processes: Client (RA itself), Server (the external program) +pub mod json; pub mod msg; mod process; diff --git a/crates/proc-macro-api/src/msg.rs b/crates/proc-macro-api/src/msg.rs index 49ebedba7d..fa3ba9bbfc 100644 --- a/crates/proc-macro-api/src/msg.rs +++ b/crates/proc-macro-api/src/msg.rs @@ -122,8 +122,12 @@ impl ExpnGlobals { } pub trait Message: Serialize + DeserializeOwned { - fn read(inp: &mut impl BufRead, buf: &mut String) -> io::Result> { - Ok(match read_json(inp, buf)? { + fn read( + from_proto: ProtocolRead, + inp: &mut R, + buf: &mut String, + ) -> io::Result> { + Ok(match from_proto(inp, buf)? { None => None, Some(text) => { let mut deserializer = serde_json::Deserializer::from_str(text); @@ -134,44 +138,20 @@ pub trait Message: Serialize + DeserializeOwned { } }) } - fn write(self, out: &mut impl Write) -> io::Result<()> { + fn write(self, to_proto: ProtocolWrite, out: &mut W) -> io::Result<()> { let text = serde_json::to_string(&self)?; - write_json(out, &text) + to_proto(out, &text) } } impl Message for Request {} impl Message for Response {} -fn read_json<'a>(inp: &mut impl BufRead, buf: &'a mut String) -> io::Result> { - loop { - buf.clear(); - - inp.read_line(buf)?; - buf.pop(); // Remove trailing '\n' - - if buf.is_empty() { - return Ok(None); - } - - // Some ill behaved macro try to use stdout for debugging - // We ignore it here - if !buf.starts_with('{') { - tracing::error!("proc-macro tried to print : {}", buf); - continue; - } - - return Ok(Some(buf)); - } -} - -fn write_json(out: &mut impl Write, msg: &str) -> io::Result<()> { - tracing::debug!("> {}", msg); - out.write_all(msg.as_bytes())?; - out.write_all(b"\n")?; - out.flush()?; - Ok(()) -} +#[allow(type_alias_bounds)] +type ProtocolRead = + for<'i, 'buf> fn(inp: &'i mut R, buf: &'buf mut String) -> io::Result>; +#[allow(type_alias_bounds)] +type ProtocolWrite = for<'o, 'msg> fn(out: &'o mut W, msg: &'msg str) -> io::Result<()>; #[cfg(test)] mod tests { diff --git a/crates/proc-macro-api/src/process.rs b/crates/proc-macro-api/src/process.rs index 3a1a4cfa13..2b1a791827 100644 --- a/crates/proc-macro-api/src/process.rs +++ b/crates/proc-macro-api/src/process.rs @@ -10,6 +10,7 @@ use paths::AbsPath; use stdx::JodChild; use crate::{ + json::{read_json, write_json}, msg::{Message, Request, Response, SpanMode, CURRENT_API_VERSION, RUST_ANALYZER_SPAN_SUPPORT}, ProcMacroKind, ServerError, }; @@ -201,11 +202,11 @@ fn send_request( req: Request, buf: &mut String, ) -> Result { - req.write(&mut writer).map_err(|err| ServerError { + req.write(write_json, &mut writer).map_err(|err| ServerError { message: "failed to write request".into(), io: Some(Arc::new(err)), })?; - let res = Response::read(&mut reader, buf).map_err(|err| ServerError { + let res = Response::read(read_json, &mut reader, buf).map_err(|err| ServerError { message: "failed to read response".into(), io: Some(Arc::new(err)), })?; diff --git a/crates/proc-macro-srv-cli/src/main.rs b/crates/proc-macro-srv-cli/src/main.rs index 407c1969cb..174f9c5246 100644 --- a/crates/proc-macro-srv-cli/src/main.rs +++ b/crates/proc-macro-srv-cli/src/main.rs @@ -6,6 +6,8 @@ #[cfg(feature = "in-rust-tree")] extern crate rustc_driver as _; +use proc_macro_api::json::{read_json, write_json}; + use std::io; fn main() -> std::io::Result<()> { @@ -30,9 +32,10 @@ fn run() -> io::Result<()> { eprintln!("{err}"); use proc_macro_api::msg::{self, Message}; - let read_request = |buf: &mut String| msg::Request::read(&mut io::stdin().lock(), buf); + let read_request = + |buf: &mut String| msg::Request::read(read_json, &mut io::stdin().lock(), buf); - let write_response = |msg: msg::Response| msg.write(&mut io::stdout().lock()); + let write_response = |msg: msg::Response| msg.write(write_json, &mut io::stdout().lock()); let mut buf = String::new(); @@ -61,9 +64,10 @@ fn run() -> io::Result<()> { use proc_macro_api::msg::{self, Message}; use proc_macro_srv::EnvSnapshot; - let read_request = |buf: &mut String| msg::Request::read(&mut io::stdin().lock(), buf); + let read_request = + |buf: &mut String| msg::Request::read(read_json, &mut io::stdin().lock(), buf); - let write_response = |msg: msg::Response| msg.write(&mut io::stdout().lock()); + let write_response = |msg: msg::Response| msg.write(write_json, &mut io::stdout().lock()); let env = EnvSnapshot::new(); let mut srv = proc_macro_srv::ProcMacroSrv::new(&env); diff --git a/crates/proc-macro-srv/src/dylib.rs b/crates/proc-macro-srv/src/dylib.rs index c2309cb3d1..78ae4574c4 100644 --- a/crates/proc-macro-srv/src/dylib.rs +++ b/crates/proc-macro-srv/src/dylib.rs @@ -142,6 +142,7 @@ impl Drop for Expander { fn drop(&mut self) { #[cfg(windows)] std::fs::remove_file(&self.path).ok(); + _ = self.path; } } From 956c8521a9fb0b4fe8e01ae0203223c024da552c Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sun, 30 Jun 2024 17:03:03 +0200 Subject: [PATCH 45/94] Arc proc-macro expander paths --- crates/proc-macro-api/src/json.rs | 1 + crates/proc-macro-api/src/lib.rs | 7 ++++--- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/crates/proc-macro-api/src/json.rs b/crates/proc-macro-api/src/json.rs index 415845c0bd..ec89f6a9e6 100644 --- a/crates/proc-macro-api/src/json.rs +++ b/crates/proc-macro-api/src/json.rs @@ -1,3 +1,4 @@ +//! Protocol functions for json. use std::io::{self, BufRead, Write}; pub fn read_json<'a>( diff --git a/crates/proc-macro-api/src/lib.rs b/crates/proc-macro-api/src/lib.rs index d7f49013ae..4c65dd5969 100644 --- a/crates/proc-macro-api/src/lib.rs +++ b/crates/proc-macro-api/src/lib.rs @@ -65,7 +65,7 @@ impl MacroDylib { #[derive(Debug, Clone)] pub struct ProcMacro { process: Arc, - dylib_path: AbsPathBuf, + dylib_path: Arc, name: SmolStr, kind: ProcMacroKind, } @@ -75,7 +75,7 @@ impl PartialEq for ProcMacro { fn eq(&self, other: &Self) -> bool { self.name == other.name && self.kind == other.kind - && self.dylib_path == other.dylib_path + && Arc::ptr_eq(&self.dylib_path, &other.dylib_path) && Arc::ptr_eq(&self.process, &other.process) } } @@ -116,6 +116,7 @@ impl ProcMacroServer { let _p = tracing::info_span!("ProcMacroServer::load_dylib").entered(); let macros = self.process.find_proc_macros(&dylib.path)?; + let dylib_path = Arc::new(dylib.path); match macros { Ok(macros) => Ok(macros .into_iter() @@ -123,7 +124,7 @@ impl ProcMacroServer { process: self.process.clone(), name: name.into(), kind, - dylib_path: dylib.path.clone(), + dylib_path: dylib_path.clone(), }) .collect()), Err(message) => Err(ServerError { message, io: None }), From b60b27bd22fc4e24d9b4c36c960f3ff3d42ec395 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sun, 30 Jun 2024 18:14:53 +0200 Subject: [PATCH 46/94] Remove serde flag from indexmap dependency --- Cargo.lock | 2 -- crates/rust-analyzer/Cargo.toml | 1 - crates/rust-analyzer/src/config.rs | 28 +++++++++++++++++++--------- docs/user/generated_config.adoc | 12 ++++++------ editors/code/package.json | 12 ++++++------ 5 files changed, 31 insertions(+), 24 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 56deb1b7a1..78270e6d19 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -809,7 +809,6 @@ checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26" dependencies = [ "equivalent", "hashbrown", - "serde", ] [[package]] @@ -1683,7 +1682,6 @@ dependencies = [ "ide", "ide-db", "ide-ssr", - "indexmap", "itertools", "load-cargo", "lsp-server 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)", diff --git a/crates/rust-analyzer/Cargo.toml b/crates/rust-analyzer/Cargo.toml index 8ff7235b8f..c207c42b6d 100644 --- a/crates/rust-analyzer/Cargo.toml +++ b/crates/rust-analyzer/Cargo.toml @@ -47,7 +47,6 @@ always-assert = "0.2.0" walkdir = "2.3.2" semver.workspace = true memchr = "2.7.1" -indexmap = { workspace = true, features = ["serde"] } cfg.workspace = true flycheck.workspace = true diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index 42ba162e4f..5aa7b15eb8 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs @@ -18,7 +18,6 @@ use ide_db::{ imports::insert_use::{ImportGranularity, InsertUseConfig, PrefixKind}, SnippetCap, }; -use indexmap::IndexMap; use itertools::Itertools; use lsp_types::{ClientCapabilities, MarkupKind}; use paths::{Utf8Path, Utf8PathBuf}; @@ -382,8 +381,7 @@ config_data! { /// Enables completions of private items and fields that are defined in the current workspace even if they are not visible at the current position. completion_privateEditable_enable: bool = false, /// Custom completion snippets. - // NOTE: we use IndexMap for deterministic serialization ordering - completion_snippets_custom: IndexMap = serde_json::from_str(r#"{ + completion_snippets_custom: FxHashMap = serde_json::from_str(r#"{ "Arc::new": { "postfix": "arc", "body": "Arc::new(${receiver})", @@ -1243,7 +1241,19 @@ impl Config { } pub fn json_schema() -> serde_json::Value { - FullConfigInput::json_schema() + let mut s = FullConfigInput::json_schema(); + + fn sort_objects_by_field(json: &mut serde_json::Value) { + if let serde_json::Value::Object(object) = json { + let old = std::mem::take(object); + old.into_iter().sorted_by(|(k, _), (k2, _)| k.cmp(k2)).for_each(|(k, mut v)| { + sort_objects_by_field(&mut v); + object.insert(k, v); + }); + } + } + sort_objects_by_field(&mut s); + s } pub fn root_path(&self) -> &AbsPathBuf { @@ -2640,9 +2650,8 @@ macro_rules! _config_data { /// All fields `Option`, `None` representing fields not set in a particular JSON/TOML blob. #[allow(non_snake_case)] - #[derive(Clone, Serialize, Default)] + #[derive(Clone, Default)] struct $input { $( - #[serde(skip_serializing_if = "Option::is_none")] $field: Option<$ty>, )* } @@ -2725,7 +2734,7 @@ struct DefaultConfigData { /// All of the config levels, all fields `Option`, to describe fields that are actually set by /// some rust-analyzer.toml file or JSON blob. An empty rust-analyzer.toml corresponds to /// all fields being None. -#[derive(Debug, Clone, Default, Serialize)] +#[derive(Debug, Clone, Default)] struct FullConfigInput { global: GlobalConfigInput, local: LocalConfigInput, @@ -2770,7 +2779,7 @@ impl FullConfigInput { /// All of the config levels, all fields `Option`, to describe fields that are actually set by /// some rust-analyzer.toml file or JSON blob. An empty rust-analyzer.toml corresponds to /// all fields being None. -#[derive(Debug, Clone, Default, Serialize)] +#[derive(Debug, Clone, Default)] struct GlobalLocalConfigInput { global: GlobalConfigInput, local: LocalConfigInput, @@ -2932,7 +2941,7 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json "FxHashMap, Box<[Box]>>" => set! { "type": "object", }, - "IndexMap" => set! { + "FxHashMap" => set! { "type": "object", }, "FxHashMap" => set! { @@ -3347,6 +3356,7 @@ mod tests { #[test] fn generate_package_json_config() { let s = Config::json_schema(); + let schema = format!("{s:#}"); let mut schema = schema .trim_start_matches('[') diff --git a/docs/user/generated_config.adoc b/docs/user/generated_config.adoc index 81f30dc56e..a9b3b9eb21 100644 --- a/docs/user/generated_config.adoc +++ b/docs/user/generated_config.adoc @@ -350,12 +350,6 @@ Default: "description": "Put the expression into a pinned `Box`", "scope": "expr" }, - "Ok": { - "postfix": "ok", - "body": "Ok(${receiver})", - "description": "Wrap the expression in a `Result::Ok`", - "scope": "expr" - }, "Err": { "postfix": "err", "body": "Err(${receiver})", @@ -367,6 +361,12 @@ Default: "body": "Some(${receiver})", "description": "Wrap the expression in an `Option::Some`", "scope": "expr" + }, + "Ok": { + "postfix": "ok", + "body": "Ok(${receiver})", + "description": "Wrap the expression in a `Result::Ok`", + "scope": "expr" } } ---- diff --git a/editors/code/package.json b/editors/code/package.json index 0da5b920a7..871704fac0 100644 --- a/editors/code/package.json +++ b/editors/code/package.json @@ -1187,12 +1187,6 @@ "description": "Put the expression into a pinned `Box`", "scope": "expr" }, - "Ok": { - "postfix": "ok", - "body": "Ok(${receiver})", - "description": "Wrap the expression in a `Result::Ok`", - "scope": "expr" - }, "Err": { "postfix": "err", "body": "Err(${receiver})", @@ -1204,6 +1198,12 @@ "body": "Some(${receiver})", "description": "Wrap the expression in an `Option::Some`", "scope": "expr" + }, + "Ok": { + "postfix": "ok", + "body": "Ok(${receiver})", + "description": "Wrap the expression in a `Result::Ok`", + "scope": "expr" } }, "type": "object" From d859e40db9bd0e8b54e72bc984f175993a448467 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sun, 30 Jun 2024 18:41:55 +0200 Subject: [PATCH 47/94] Bump rustc_pattern_analysis --- Cargo.lock | 37 +----- Cargo.toml | 2 +- crates/hir-ty/src/diagnostics/match_check.rs | 2 + .../diagnostics/match_check/pat_analysis.rs | 125 +++++++++--------- 4 files changed, 75 insertions(+), 91 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 78270e6d19..f30f69eb6e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -616,7 +616,7 @@ dependencies = [ "oorandom", "project-model", "ra-ap-rustc_abi", - "ra-ap-rustc_index 0.53.0", + "ra-ap-rustc_index", "ra-ap-rustc_pattern_analysis", "rustc-hash", "scoped-tls", @@ -1489,21 +1489,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "80b1d613eee933486c0613a7bc26e515e46f43adf479d1edd5e537f983e9ce46" dependencies = [ "bitflags 2.5.0", - "ra-ap-rustc_index 0.53.0", + "ra-ap-rustc_index", "tracing", ] -[[package]] -name = "ra-ap-rustc_index" -version = "0.44.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ad68bacffb87dcdbb23a3ce11261375078aaa06b85d348c49f39ffd5510dc20" -dependencies = [ - "arrayvec", - "ra-ap-rustc_index_macros 0.44.0", - "smallvec", -] - [[package]] name = "ra-ap-rustc_index" version = "0.53.0" @@ -1511,22 +1500,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f072060ac77e9e1a02cc20028095993af7e72cc0804779c68bcbf47b16de49c9" dependencies = [ "arrayvec", - "ra-ap-rustc_index_macros 0.53.0", + "ra-ap-rustc_index_macros", "smallvec", ] -[[package]] -name = "ra-ap-rustc_index_macros" -version = "0.44.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8782aaf3a113837c533dfb1c45df91cd17e1fdd1d2f9a20c2e0d1976025c4f1f" -dependencies = [ - "proc-macro2", - "quote", - "syn", - "synstructure", -] - [[package]] name = "ra-ap-rustc_index_macros" version = "0.53.0" @@ -1555,17 +1532,17 @@ version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "70dad7a491c2554590222e0c9212dcb7c2e7aceb668875075012a35ea780d135" dependencies = [ - "ra-ap-rustc_index 0.53.0", + "ra-ap-rustc_index", "ra-ap-rustc_lexer", ] [[package]] name = "ra-ap-rustc_pattern_analysis" -version = "0.44.0" +version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d63d1e1d5b2a13273cee1a10011147418f40e12b70f70578ce1dee0f1cafc334" +checksum = "34768e1faf88c31f2e9ad57b48318a52b507dafac0cddbf01b5d63bfc0b0a365" dependencies = [ - "ra-ap-rustc_index 0.44.0", + "ra-ap-rustc_index", "rustc-hash", "rustc_apfloat", "smallvec", diff --git a/Cargo.toml b/Cargo.toml index d77804a418..eea73ac380 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -87,7 +87,7 @@ ra-ap-rustc_lexer = { version = "0.53.0", default-features = false } ra-ap-rustc_parse_format = { version = "0.53.0", default-features = false } ra-ap-rustc_index = { version = "0.53.0", default-features = false } ra-ap-rustc_abi = { version = "0.53.0", default-features = false } -ra-ap-rustc_pattern_analysis = { version = "0.44.0", default-features = false } +ra-ap-rustc_pattern_analysis = { version = "0.53.0", default-features = false } # local crates that aren't published to crates.io. These should not have versions. sourcegen = { path = "./crates/sourcegen" } diff --git a/crates/hir-ty/src/diagnostics/match_check.rs b/crates/hir-ty/src/diagnostics/match_check.rs index 8d6e502c6a..8dcc14feb2 100644 --- a/crates/hir-ty/src/diagnostics/match_check.rs +++ b/crates/hir-ty/src/diagnostics/match_check.rs @@ -51,6 +51,7 @@ pub(crate) struct Pat { #[derive(Clone, Debug, PartialEq)] pub(crate) enum PatKind { Wild, + Never, /// `x`, `ref x`, `x @ P`, etc. Binding { @@ -294,6 +295,7 @@ impl HirDisplay for Pat { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { match &*self.kind { PatKind::Wild => write!(f, "_"), + PatKind::Never => write!(f, "!"), PatKind::Binding { name, subpattern } => { write!(f, "{}", name.display(f.db.upcast()))?; if let Some(subpattern) = subpattern { diff --git a/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs b/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs index c171dbc170..01e43a67e4 100644 --- a/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs +++ b/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs @@ -4,10 +4,9 @@ use std::fmt; use hir_def::{DefWithBodyId, EnumId, EnumVariantId, HasModule, LocalFieldId, ModuleId, VariantId}; use once_cell::unsync::Lazy; -use rustc_hash::FxHashMap; +use rustc_index::IndexVec; use rustc_pattern_analysis::{ constructor::{Constructor, ConstructorSet, VariantVisibility}, - index::IdxContainer, usefulness::{compute_match_usefulness, PlaceValidity, UsefulnessReport}, Captures, PatCx, PrivateUninhabitedField, }; @@ -26,10 +25,10 @@ use super::{is_box, FieldPat, Pat, PatKind}; use Constructor::*; // Re-export r-a-specific versions of all these types. -pub(crate) type DeconstructedPat<'p> = - rustc_pattern_analysis::pat::DeconstructedPat>; -pub(crate) type MatchArm<'p> = rustc_pattern_analysis::MatchArm<'p, MatchCheckCtx<'p>>; -pub(crate) type WitnessPat<'p> = rustc_pattern_analysis::pat::WitnessPat>; +pub(crate) type DeconstructedPat<'db> = + rustc_pattern_analysis::pat::DeconstructedPat>; +pub(crate) type MatchArm<'db> = rustc_pattern_analysis::MatchArm<'db, MatchCheckCtx<'db>>; +pub(crate) type WitnessPat<'db> = rustc_pattern_analysis::pat::WitnessPat>; /// [Constructor] uses this in unimplemented variants. /// It allows porting match expressions from upstream algorithm without losing semantics. @@ -54,23 +53,27 @@ impl EnumVariantContiguousIndex { } } +impl rustc_index::Idx for EnumVariantContiguousIndex { + fn new(idx: usize) -> Self { + EnumVariantContiguousIndex(idx) + } + + fn index(self) -> usize { + self.0 + } +} + #[derive(Clone)] -pub(crate) struct MatchCheckCtx<'p> { +pub(crate) struct MatchCheckCtx<'db> { module: ModuleId, body: DefWithBodyId, - pub(crate) db: &'p dyn HirDatabase, + pub(crate) db: &'db dyn HirDatabase, exhaustive_patterns: bool, min_exhaustive_patterns: bool, } -#[derive(Clone)] -pub(crate) struct PatData<'p> { - /// Keep db around so that we can print variant names in `Debug`. - pub(crate) db: &'p dyn HirDatabase, -} - -impl<'p> MatchCheckCtx<'p> { - pub(crate) fn new(module: ModuleId, body: DefWithBodyId, db: &'p dyn HirDatabase) -> Self { +impl<'db> MatchCheckCtx<'db> { + pub(crate) fn new(module: ModuleId, body: DefWithBodyId, db: &'db dyn HirDatabase) -> Self { let def_map = db.crate_def_map(module.krate()); let exhaustive_patterns = def_map.is_unstable_feature_enabled("exhaustive_patterns"); let min_exhaustive_patterns = @@ -80,9 +83,9 @@ impl<'p> MatchCheckCtx<'p> { pub(crate) fn compute_match_usefulness( &self, - arms: &[MatchArm<'p>], + arms: &[MatchArm<'db>], scrut_ty: Ty, - ) -> Result, ()> { + ) -> Result, ()> { // FIXME: Determine place validity correctly. For now, err on the safe side. let place_validity = PlaceValidity::MaybeInvalid; // Measured to take ~100ms on modern hardware. @@ -101,7 +104,7 @@ impl<'p> MatchCheckCtx<'p> { } fn variant_id_for_adt( - db: &'p dyn HirDatabase, + db: &'db dyn HirDatabase, ctor: &Constructor, adt: hir_def::AdtId, ) -> Option { @@ -126,7 +129,7 @@ impl<'p> MatchCheckCtx<'p> { &'a self, ty: &'a Ty, variant: VariantId, - ) -> impl Iterator + Captures<'a> + Captures<'p> { + ) -> impl Iterator + Captures<'a> + Captures<'db> { let (_, substs) = ty.as_adt().unwrap(); let field_tys = self.db.field_types(variant); @@ -139,8 +142,8 @@ impl<'p> MatchCheckCtx<'p> { }) } - pub(crate) fn lower_pat(&self, pat: &Pat) -> DeconstructedPat<'p> { - let singleton = |pat: DeconstructedPat<'p>| vec![pat.at_index(0)]; + pub(crate) fn lower_pat(&self, pat: &Pat) -> DeconstructedPat<'db> { + let singleton = |pat: DeconstructedPat<'db>| vec![pat.at_index(0)]; let ctor; let mut fields: Vec<_>; let arity; @@ -228,6 +231,11 @@ impl<'p> MatchCheckCtx<'p> { fields = Vec::new(); arity = 0; } + PatKind::Never => { + ctor = Never; + fields = Vec::new(); + arity = 0; + } PatKind::Or { pats } => { ctor = Or; fields = pats @@ -238,11 +246,10 @@ impl<'p> MatchCheckCtx<'p> { arity = pats.len(); } } - let data = PatData { db: self.db }; - DeconstructedPat::new(ctor, fields, arity, pat.ty.clone(), data) + DeconstructedPat::new(ctor, fields, arity, pat.ty.clone(), ()) } - pub(crate) fn hoist_witness_pat(&self, pat: &WitnessPat<'p>) -> Pat { + pub(crate) fn hoist_witness_pat(&self, pat: &WitnessPat<'db>) -> Pat { let mut subpatterns = pat.iter_fields().map(|p| self.hoist_witness_pat(p)); let kind = match pat.ctor() { &Bool(value) => PatKind::LiteralBool { value }, @@ -290,6 +297,7 @@ impl<'p> MatchCheckCtx<'p> { Slice(_) => unimplemented!(), &Str(void) => match void {}, Wildcard | NonExhaustive | Hidden | PrivateUninhabited => PatKind::Wild, + Never => PatKind::Never, Missing | F32Range(..) | F64Range(..) | Opaque(..) | Or => { never!("can't convert to pattern: {:?}", pat.ctor()); PatKind::Wild @@ -299,13 +307,13 @@ impl<'p> MatchCheckCtx<'p> { } } -impl<'p> PatCx for MatchCheckCtx<'p> { +impl<'db> PatCx for MatchCheckCtx<'db> { type Error = (); type Ty = Ty; type VariantIdx = EnumVariantContiguousIndex; type StrLit = Void; type ArmData = (); - type PatData = PatData<'p>; + type PatData = (); fn is_exhaustive_patterns_feature_on(&self) -> bool { self.exhaustive_patterns @@ -339,8 +347,8 @@ impl<'p> PatCx for MatchCheckCtx<'p> { }, Ref => 1, Slice(..) => unimplemented!(), - Bool(..) | IntRange(..) | F32Range(..) | F64Range(..) | Str(..) | Opaque(..) - | NonExhaustive | PrivateUninhabited | Hidden | Missing | Wildcard => 0, + Never | Bool(..) | IntRange(..) | F32Range(..) | F64Range(..) | Str(..) + | Opaque(..) | NonExhaustive | PrivateUninhabited | Hidden | Missing | Wildcard => 0, Or => { never!("The `Or` constructor doesn't have a fixed arity"); 0 @@ -402,8 +410,10 @@ impl<'p> PatCx for MatchCheckCtx<'p> { } }, Slice(_) => unreachable!("Found a `Slice` constructor in match checking"), - Bool(..) | IntRange(..) | F32Range(..) | F64Range(..) | Str(..) | Opaque(..) - | NonExhaustive | PrivateUninhabited | Hidden | Missing | Wildcard => smallvec![], + Never | Bool(..) | IntRange(..) | F32Range(..) | F64Range(..) | Str(..) + | Opaque(..) | NonExhaustive | PrivateUninhabited | Hidden | Missing | Wildcard => { + smallvec![] + } Or => { never!("called `Fields::wildcards` on an `Or` ctor"); smallvec![] @@ -442,11 +452,8 @@ impl<'p> PatCx for MatchCheckCtx<'p> { if enum_data.variants.is_empty() && !is_declared_nonexhaustive { ConstructorSet::NoConstructors } else { - let mut variants = FxHashMap::with_capacity_and_hasher( - enum_data.variants.len(), - Default::default(), - ); - for (i, &(variant, _)) in enum_data.variants.iter().enumerate() { + let mut variants = IndexVec::with_capacity(enum_data.variants.len()); + for &(variant, _) in enum_data.variants.iter() { let is_uninhabited = is_enum_variant_uninhabited_from(cx.db, variant, subst, cx.module); let visibility = if is_uninhabited { @@ -454,13 +461,10 @@ impl<'p> PatCx for MatchCheckCtx<'p> { } else { VariantVisibility::Visible }; - variants.insert(EnumVariantContiguousIndex(i), visibility); + variants.push(visibility); } - ConstructorSet::Variants { - variants: IdxContainer(variants), - non_exhaustive: is_declared_nonexhaustive, - } + ConstructorSet::Variants { variants, non_exhaustive: is_declared_nonexhaustive } } } TyKind::Adt(AdtId(hir_def::AdtId::UnionId(_)), _) => ConstructorSet::Union, @@ -476,26 +480,27 @@ impl<'p> PatCx for MatchCheckCtx<'p> { fn write_variant_name( f: &mut fmt::Formatter<'_>, - pat: &rustc_pattern_analysis::pat::DeconstructedPat, + _ctor: &Constructor, + _ty: &Self::Ty, ) -> fmt::Result { - let db = pat.data().db; - let variant = - pat.ty().as_adt().and_then(|(adt, _)| Self::variant_id_for_adt(db, pat.ctor(), adt)); + write!(f, "") + // We lack the database here ... + // let variant = ty.as_adt().and_then(|(adt, _)| Self::variant_id_for_adt(db, ctor, adt)); - if let Some(variant) = variant { - match variant { - VariantId::EnumVariantId(v) => { - write!(f, "{}", db.enum_variant_data(v).name.display(db.upcast()))?; - } - VariantId::StructId(s) => { - write!(f, "{}", db.struct_data(s).name.display(db.upcast()))? - } - VariantId::UnionId(u) => { - write!(f, "{}", db.union_data(u).name.display(db.upcast()))? - } - } - } - Ok(()) + // if let Some(variant) = variant { + // match variant { + // VariantId::EnumVariantId(v) => { + // write!(f, "{}", db.enum_variant_data(v).name.display(db.upcast()))?; + // } + // VariantId::StructId(s) => { + // write!(f, "{}", db.struct_data(s).name.display(db.upcast()))? + // } + // VariantId::UnionId(u) => { + // write!(f, "{}", db.union_data(u).name.display(db.upcast()))? + // } + // } + // } + // Ok(()) } fn bug(&self, fmt: fmt::Arguments<'_>) { @@ -507,7 +512,7 @@ impl<'p> PatCx for MatchCheckCtx<'p> { } } -impl<'p> fmt::Debug for MatchCheckCtx<'p> { +impl<'db> fmt::Debug for MatchCheckCtx<'db> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("MatchCheckCtx").finish() } From c6709ffe05e0e8bec5f75a0e0fc0c469ff8cb7c0 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 1 Jul 2024 14:30:21 +0200 Subject: [PATCH 48/94] Improve error message when the proc-macro server unexpectedly exits --- crates/proc-macro-api/src/lib.rs | 4 ++ crates/proc-macro-api/src/process.rs | 84 ++++++++++++++++++---------- crates/proc-macro-srv/src/lib.rs | 2 + crates/rust-analyzer/src/reload.rs | 30 +++++++--- 4 files changed, 81 insertions(+), 39 deletions(-) diff --git a/crates/proc-macro-api/src/lib.rs b/crates/proc-macro-api/src/lib.rs index 4c65dd5969..3a915e668b 100644 --- a/crates/proc-macro-api/src/lib.rs +++ b/crates/proc-macro-api/src/lib.rs @@ -130,6 +130,10 @@ impl ProcMacroServer { Err(message) => Err(ServerError { message, io: None }), } } + + pub fn exited(&self) -> Option<&ServerError> { + self.process.exited() + } } impl ProcMacro { diff --git a/crates/proc-macro-api/src/process.rs b/crates/proc-macro-api/src/process.rs index 2b1a791827..c965257a5c 100644 --- a/crates/proc-macro-api/src/process.rs +++ b/crates/proc-macro-api/src/process.rs @@ -2,8 +2,9 @@ use std::{ io::{self, BufRead, BufReader, Read, Write}, + panic::AssertUnwindSafe, process::{Child, ChildStdin, ChildStdout, Command, Stdio}, - sync::{Arc, Mutex}, + sync::{Arc, Mutex, OnceLock}, }; use paths::AbsPath; @@ -22,6 +23,8 @@ pub(crate) struct ProcMacroProcessSrv { state: Mutex, version: u32, mode: SpanMode, + /// Populated when the server exits. + exited: OnceLock>, } #[derive(Debug)] @@ -29,8 +32,6 @@ struct ProcessSrvState { process: Process, stdin: ChildStdin, stdout: BufReader, - /// Populated when the server exits. - server_exited: Option, } impl ProcMacroProcessSrv { @@ -44,9 +45,10 @@ impl ProcMacroProcessSrv { let (stdin, stdout) = process.stdio().expect("couldn't access child stdio"); io::Result::Ok(ProcMacroProcessSrv { - state: Mutex::new(ProcessSrvState { process, stdin, stdout, server_exited: None }), + state: Mutex::new(ProcessSrvState { process, stdin, stdout }), version: 0, mode: SpanMode::Id, + exited: OnceLock::new(), }) }; let mut srv = create_srv(true)?; @@ -77,6 +79,10 @@ impl ProcMacroProcessSrv { } } + pub(crate) fn exited(&self) -> Option<&ServerError> { + self.exited.get().map(|it| &it.0) + } + pub(crate) fn version(&self) -> u32 { self.version } @@ -118,36 +124,52 @@ impl ProcMacroProcessSrv { } pub(crate) fn send_task(&self, req: Request) -> Result { - let state = &mut *self.state.lock().unwrap(); - if let Some(server_error) = &state.server_exited { - return Err(server_error.clone()); + if let Some(server_error) = self.exited.get() { + return Err(server_error.0.clone()); } + let state = &mut *self.state.lock().unwrap(); let mut buf = String::new(); - send_request(&mut state.stdin, &mut state.stdout, req, &mut buf).map_err(|e| { - if e.io.as_ref().map(|it| it.kind()) == Some(io::ErrorKind::BrokenPipe) { - match state.process.child.try_wait() { - Ok(None) => e, - Ok(Some(status)) => { - let mut msg = String::new(); - if !status.success() { - if let Some(stderr) = state.process.child.stderr.as_mut() { - _ = stderr.read_to_string(&mut msg); - } - } - let server_error = ServerError { - message: format!("server exited with {status}: {msg}"), - io: None, - }; - state.server_exited = Some(server_error.clone()); - server_error + send_request(&mut state.stdin, &mut state.stdout, req, &mut buf) + .and_then(|res| { + res.ok_or_else(|| { + let message = "proc-macro server did not respond with data".to_owned(); + ServerError { + io: Some(Arc::new(io::Error::new( + io::ErrorKind::BrokenPipe, + message.clone(), + ))), + message, } - Err(_) => e, + }) + }) + .map_err(|e| { + if e.io.as_ref().map(|it| it.kind()) == Some(io::ErrorKind::BrokenPipe) { + match state.process.child.try_wait() { + Ok(None) | Err(_) => e, + Ok(Some(status)) => { + let mut msg = String::new(); + if !status.success() { + if let Some(stderr) = state.process.child.stderr.as_mut() { + _ = stderr.read_to_string(&mut msg); + } + } + let server_error = ServerError { + message: format!( + "proc-macro server exited with {status}{}{msg}", + if msg.is_empty() { "" } else { ": " } + ), + io: None, + }; + // `AssertUnwindSafe` is fine here, we already correct initialized + // server_error at this point. + self.exited.get_or_init(|| AssertUnwindSafe(server_error)).0.clone() + } + } + } else { + e } - } else { - e - } - }) + }) } } @@ -201,7 +223,7 @@ fn send_request( mut reader: &mut impl BufRead, req: Request, buf: &mut String, -) -> Result { +) -> Result, ServerError> { req.write(write_json, &mut writer).map_err(|err| ServerError { message: "failed to write request".into(), io: Some(Arc::new(err)), @@ -210,5 +232,5 @@ fn send_request( message: "failed to read response".into(), io: Some(Arc::new(err)), })?; - res.ok_or_else(|| ServerError { message: "server exited".into(), io: None }) + Ok(res) } diff --git a/crates/proc-macro-srv/src/lib.rs b/crates/proc-macro-srv/src/lib.rs index 34851ee0be..e6281035e1 100644 --- a/crates/proc-macro-srv/src/lib.rs +++ b/crates/proc-macro-srv/src/lib.rs @@ -204,6 +204,8 @@ fn expand_ra_span( let macro_body = macro_body.to_subtree_resolved(CURRENT_API_VERSION, &span_data_table); let attributes = attributes.map(|it| it.to_subtree_resolved(CURRENT_API_VERSION, &span_data_table)); + // Note, we spawn a new thread here so that thread locals allocation don't accumulate (this + // includes the proc-macro symbol interner) let result = thread::scope(|s| { let thread = thread::Builder::new() .stack_size(EXPANDER_STACK_SIZE) diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs index 7b9a10fdb8..1039daf850 100644 --- a/crates/rust-analyzer/src/reload.rs +++ b/crates/rust-analyzer/src/reload.rs @@ -184,14 +184,28 @@ impl GlobalState { message.push_str(err); message.push_str("\n\n"); }; - if let Some(Err(err)) = proc_macro_client { - status.health |= lsp_ext::Health::Warning; - format_to!( - message, - "Failed spawning proc-macro server for workspace `{}`: {err}", - ws.manifest_or_root() - ); - message.push_str("\n\n"); + match proc_macro_client { + Some(Err(err)) => { + status.health |= lsp_ext::Health::Warning; + format_to!( + message, + "Failed spawning proc-macro server for workspace `{}`: {err}", + ws.manifest_or_root() + ); + message.push_str("\n\n"); + } + Some(Ok(client)) => { + if let Some(err) = client.exited() { + status.health |= lsp_ext::Health::Warning; + format_to!( + message, + "proc-macro server for workspace `{}` exited: {err}", + ws.manifest_or_root() + ); + message.push_str("\n\n"); + } + } + _ => (), } } } From 3725ab3146a8dea5eed631cfa03652e9d6b0a3ba Mon Sep 17 00:00:00 2001 From: Shohei Wada Date: Tue, 2 Jul 2024 01:52:34 +0900 Subject: [PATCH 49/94] squash. --- crates/hir-def/src/find_path.rs | 90 ++++++++++++++++++- crates/hir-def/src/lib.rs | 2 + crates/hir-ty/src/display.rs | 6 +- crates/ide-assists/src/assist_config.rs | 1 + .../src/handlers/add_missing_match_arms.rs | 1 + .../ide-assists/src/handlers/auto_import.rs | 1 + .../ide-assists/src/handlers/bool_to_enum.rs | 1 + .../src/handlers/convert_into_to_from.rs | 1 + .../convert_tuple_return_type_to_struct.rs | 1 + .../handlers/destructure_struct_binding.rs | 1 + .../src/handlers/extract_function.rs | 1 + .../extract_struct_from_enum_variant.rs | 1 + .../src/handlers/generate_deref.rs | 2 + .../ide-assists/src/handlers/generate_new.rs | 1 + .../src/handlers/qualify_method_call.rs | 1 + .../ide-assists/src/handlers/qualify_path.rs | 1 + .../replace_derive_with_manual_impl.rs | 1 + .../replace_qualified_name_with_use.rs | 1 + .../ide-assists/src/handlers/term_search.rs | 1 + .../src/handlers/toggle_async_sugar.rs | 1 + crates/ide-assists/src/tests.rs | 3 + crates/ide-completion/src/completions.rs | 1 + crates/ide-completion/src/completions/expr.rs | 2 + .../src/completions/flyimport.rs | 3 + .../ide-completion/src/completions/postfix.rs | 1 + crates/ide-completion/src/config.rs | 1 + crates/ide-completion/src/lib.rs | 1 + crates/ide-completion/src/render.rs | 1 + crates/ide-completion/src/snippet.rs | 1 + crates/ide-completion/src/tests.rs | 1 + crates/ide-completion/src/tests/flyimport.rs | 32 +++++++ crates/ide-db/src/imports/insert_use/tests.rs | 20 +++++ crates/ide-db/src/path_transform.rs | 19 +++- .../src/handlers/json_is_not_rust.rs | 1 + .../src/handlers/missing_fields.rs | 1 + .../src/handlers/typed_hole.rs | 1 + crates/ide-diagnostics/src/lib.rs | 2 + crates/ide-ssr/src/matching.rs | 6 +- .../rust-analyzer/src/cli/analysis_stats.rs | 7 +- crates/rust-analyzer/src/config.rs | 5 ++ .../src/integrated_benchmarks.rs | 4 + docs/user/generated_config.adoc | 5 ++ editors/code/package.json | 10 +++ 43 files changed, 233 insertions(+), 11 deletions(-) diff --git a/crates/hir-def/src/find_path.rs b/crates/hir-def/src/find_path.rs index 58a1872ef2..9a3c049541 100644 --- a/crates/hir-def/src/find_path.rs +++ b/crates/hir-def/src/find_path.rs @@ -183,6 +183,8 @@ fn find_path_for_module( let kind = if name_already_occupied_in_type_ns { cov_mark::hit!(ambiguous_crate_start); PathKind::Abs + } else if ctx.cfg.prefer_absolute { + PathKind::Abs } else { PathKind::Plain }; @@ -564,7 +566,13 @@ mod tests { /// item the `path` refers to returns that same path when called from the /// module the cursor is in. #[track_caller] - fn check_found_path_(ra_fixture: &str, path: &str, prefer_prelude: bool, expect: Expect) { + fn check_found_path_( + ra_fixture: &str, + path: &str, + prefer_prelude: bool, + prefer_absolute: bool, + expect: Expect, + ) { let (db, pos) = TestDB::with_position(ra_fixture); let module = db.module_at_position(pos); let parsed_path_file = @@ -604,7 +612,7 @@ mod tests { module, prefix, ignore_local_imports, - ImportPathConfig { prefer_no_std: false, prefer_prelude }, + ImportPathConfig { prefer_no_std: false, prefer_prelude, prefer_absolute }, ); format_to!( res, @@ -619,11 +627,15 @@ mod tests { } fn check_found_path(ra_fixture: &str, path: &str, expect: Expect) { - check_found_path_(ra_fixture, path, false, expect); + check_found_path_(ra_fixture, path, false, false, expect); } fn check_found_path_prelude(ra_fixture: &str, path: &str, expect: Expect) { - check_found_path_(ra_fixture, path, true, expect); + check_found_path_(ra_fixture, path, true, false, expect); + } + + fn check_found_path_absolute(ra_fixture: &str, path: &str, expect: Expect) { + check_found_path_(ra_fixture, path, false, true, expect); } #[test] @@ -870,6 +882,39 @@ pub mod ast { ); } + #[test] + fn partially_imported_with_prefer_absolute() { + cov_mark::check!(partially_imported); + // Similar to partially_imported test case above, but with prefer_absolute enabled. + // Even if the actual imported item is in external crate, if the path to that item + // is starting from the imported name, then the path should not start from "::". + // i.e. The first line in the expected output should not start from "::". + check_found_path_absolute( + r#" +//- /main.rs crate:main deps:syntax + +use syntax::ast; +$0 + +//- /lib.rs crate:syntax +pub mod ast { + pub enum ModuleItem { + A, B, C, + } +} + "#, + "syntax::ast::ModuleItem", + expect![[r#" + Plain (imports ✔): ast::ModuleItem + Plain (imports ✖): ::syntax::ast::ModuleItem + ByCrate(imports ✔): crate::ast::ModuleItem + ByCrate(imports ✖): ::syntax::ast::ModuleItem + BySelf (imports ✔): self::ast::ModuleItem + BySelf (imports ✖): ::syntax::ast::ModuleItem + "#]], + ); + } + #[test] fn same_crate_reexport() { check_found_path( @@ -1769,6 +1814,43 @@ pub mod foo { ); } + #[test] + fn respects_absolute_setting() { + let ra_fixture = r#" +//- /main.rs crate:main deps:krate +$0 +//- /krate.rs crate:krate +pub mod foo { + pub struct Foo; +} +"#; + check_found_path( + ra_fixture, + "krate::foo::Foo", + expect![[r#" + Plain (imports ✔): krate::foo::Foo + Plain (imports ✖): krate::foo::Foo + ByCrate(imports ✔): krate::foo::Foo + ByCrate(imports ✖): krate::foo::Foo + BySelf (imports ✔): krate::foo::Foo + BySelf (imports ✖): krate::foo::Foo + "#]], + ); + + check_found_path_absolute( + ra_fixture, + "krate::foo::Foo", + expect![[r#" + Plain (imports ✔): ::krate::foo::Foo + Plain (imports ✖): ::krate::foo::Foo + ByCrate(imports ✔): ::krate::foo::Foo + ByCrate(imports ✖): ::krate::foo::Foo + BySelf (imports ✔): ::krate::foo::Foo + BySelf (imports ✖): ::krate::foo::Foo + "#]], + ); + } + #[test] fn respect_segment_length() { check_found_path( diff --git a/crates/hir-def/src/lib.rs b/crates/hir-def/src/lib.rs index 211cdd35fd..fc026a14d4 100644 --- a/crates/hir-def/src/lib.rs +++ b/crates/hir-def/src/lib.rs @@ -116,6 +116,8 @@ pub struct ImportPathConfig { pub prefer_no_std: bool, /// If true, prefer import paths containing a prelude module. pub prefer_prelude: bool, + /// If true, prefer abs path (starting with `::`) where it is available. + pub prefer_absolute: bool, } #[derive(Debug)] diff --git a/crates/hir-ty/src/display.rs b/crates/hir-ty/src/display.rs index 458970aa70..75508707e9 100644 --- a/crates/hir-ty/src/display.rs +++ b/crates/hir-ty/src/display.rs @@ -1043,7 +1043,11 @@ impl HirDisplay for Ty { module_id, PrefixKind::Plain, false, - ImportPathConfig { prefer_no_std: false, prefer_prelude: true }, + ImportPathConfig { + prefer_no_std: false, + prefer_prelude: true, + prefer_absolute: false, + }, ) { write!(f, "{}", path.display(f.db.upcast()))?; } else { diff --git a/crates/ide-assists/src/assist_config.rs b/crates/ide-assists/src/assist_config.rs index 410237f9ca..f1de6aba05 100644 --- a/crates/ide-assists/src/assist_config.rs +++ b/crates/ide-assists/src/assist_config.rs @@ -15,6 +15,7 @@ pub struct AssistConfig { pub insert_use: InsertUseConfig, pub prefer_no_std: bool, pub prefer_prelude: bool, + pub prefer_absolute: bool, pub assist_emit_must_use: bool, pub term_search_fuel: u64, pub term_search_borrowck: bool, diff --git a/crates/ide-assists/src/handlers/add_missing_match_arms.rs b/crates/ide-assists/src/handlers/add_missing_match_arms.rs index 22a4674fd4..4eb29a2378 100644 --- a/crates/ide-assists/src/handlers/add_missing_match_arms.rs +++ b/crates/ide-assists/src/handlers/add_missing_match_arms.rs @@ -74,6 +74,7 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>) let cfg = ImportPathConfig { prefer_no_std: ctx.config.prefer_no_std, prefer_prelude: ctx.config.prefer_prelude, + prefer_absolute: ctx.config.prefer_absolute, }; let module = ctx.sema.scope(expr.syntax())?.module(); diff --git a/crates/ide-assists/src/handlers/auto_import.rs b/crates/ide-assists/src/handlers/auto_import.rs index fe895eb259..f17635972b 100644 --- a/crates/ide-assists/src/handlers/auto_import.rs +++ b/crates/ide-assists/src/handlers/auto_import.rs @@ -93,6 +93,7 @@ pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option< let cfg = ImportPathConfig { prefer_no_std: ctx.config.prefer_no_std, prefer_prelude: ctx.config.prefer_prelude, + prefer_absolute: ctx.config.prefer_absolute, }; let (import_assets, syntax_under_caret) = find_importable_node(ctx)?; diff --git a/crates/ide-assists/src/handlers/bool_to_enum.rs b/crates/ide-assists/src/handlers/bool_to_enum.rs index 0aa23ccc84..f094c5c09f 100644 --- a/crates/ide-assists/src/handlers/bool_to_enum.rs +++ b/crates/ide-assists/src/handlers/bool_to_enum.rs @@ -340,6 +340,7 @@ fn augment_references_with_imports( let cfg = ImportPathConfig { prefer_no_std: ctx.config.prefer_no_std, prefer_prelude: ctx.config.prefer_prelude, + prefer_absolute: ctx.config.prefer_absolute, }; references diff --git a/crates/ide-assists/src/handlers/convert_into_to_from.rs b/crates/ide-assists/src/handlers/convert_into_to_from.rs index be433c3333..92da2678f9 100644 --- a/crates/ide-assists/src/handlers/convert_into_to_from.rs +++ b/crates/ide-assists/src/handlers/convert_into_to_from.rs @@ -47,6 +47,7 @@ pub(crate) fn convert_into_to_from(acc: &mut Assists, ctx: &AssistContext<'_>) - let cfg = ImportPathConfig { prefer_no_std: ctx.config.prefer_no_std, prefer_prelude: ctx.config.prefer_prelude, + prefer_absolute: ctx.config.prefer_absolute, }; let src_type_path = { diff --git a/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs b/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs index 241fc3b7a3..c55ff24ae3 100644 --- a/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs +++ b/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs @@ -186,6 +186,7 @@ fn augment_references_with_imports( let cfg = ImportPathConfig { prefer_no_std: ctx.config.prefer_no_std, prefer_prelude: ctx.config.prefer_prelude, + prefer_absolute: ctx.config.prefer_absolute, }; references diff --git a/crates/ide-assists/src/handlers/destructure_struct_binding.rs b/crates/ide-assists/src/handlers/destructure_struct_binding.rs index 7618871552..666e1a1496 100644 --- a/crates/ide-assists/src/handlers/destructure_struct_binding.rs +++ b/crates/ide-assists/src/handlers/destructure_struct_binding.rs @@ -90,6 +90,7 @@ fn collect_data(ident_pat: ast::IdentPat, ctx: &AssistContext<'_>) -> Option) -> Op ImportPathConfig { prefer_no_std: ctx.config.prefer_no_std, prefer_prelude: ctx.config.prefer_prelude, + prefer_absolute: ctx.config.prefer_absolute, }, ); diff --git a/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs b/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs index 3c6d73b62e..54323e2928 100644 --- a/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs +++ b/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs @@ -393,6 +393,7 @@ fn process_references( ImportPathConfig { prefer_no_std: ctx.config.prefer_no_std, prefer_prelude: ctx.config.prefer_prelude, + prefer_absolute: ctx.config.prefer_absolute, }, ); if let Some(mut mod_path) = mod_path { diff --git a/crates/ide-assists/src/handlers/generate_deref.rs b/crates/ide-assists/src/handlers/generate_deref.rs index 9a441fc5eb..cc33439dd5 100644 --- a/crates/ide-assists/src/handlers/generate_deref.rs +++ b/crates/ide-assists/src/handlers/generate_deref.rs @@ -64,6 +64,7 @@ fn generate_record_deref(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<( ImportPathConfig { prefer_no_std: ctx.config.prefer_no_std, prefer_prelude: ctx.config.prefer_prelude, + prefer_absolute: ctx.config.prefer_absolute, }, )?; @@ -111,6 +112,7 @@ fn generate_tuple_deref(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<() ImportPathConfig { prefer_no_std: ctx.config.prefer_no_std, prefer_prelude: ctx.config.prefer_prelude, + prefer_absolute: ctx.config.prefer_absolute, }, )?; diff --git a/crates/ide-assists/src/handlers/generate_new.rs b/crates/ide-assists/src/handlers/generate_new.rs index 52007e0e29..6056c80888 100644 --- a/crates/ide-assists/src/handlers/generate_new.rs +++ b/crates/ide-assists/src/handlers/generate_new.rs @@ -65,6 +65,7 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option ImportPathConfig { prefer_no_std: ctx.config.prefer_no_std, prefer_prelude: ctx.config.prefer_prelude, + prefer_absolute: ctx.config.prefer_absolute, }, )?; diff --git a/crates/ide-assists/src/handlers/qualify_method_call.rs b/crates/ide-assists/src/handlers/qualify_method_call.rs index 5d1140d57a..89e24fafc5 100644 --- a/crates/ide-assists/src/handlers/qualify_method_call.rs +++ b/crates/ide-assists/src/handlers/qualify_method_call.rs @@ -53,6 +53,7 @@ pub(crate) fn qualify_method_call(acc: &mut Assists, ctx: &AssistContext<'_>) -> ImportPathConfig { prefer_no_std: ctx.config.prefer_no_std, prefer_prelude: ctx.config.prefer_prelude, + prefer_absolute: ctx.config.prefer_absolute, }, )?; diff --git a/crates/ide-assists/src/handlers/qualify_path.rs b/crates/ide-assists/src/handlers/qualify_path.rs index 978b719c30..ca6c7c58b7 100644 --- a/crates/ide-assists/src/handlers/qualify_path.rs +++ b/crates/ide-assists/src/handlers/qualify_path.rs @@ -40,6 +40,7 @@ pub(crate) fn qualify_path(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option let cfg = ImportPathConfig { prefer_no_std: ctx.config.prefer_no_std, prefer_prelude: ctx.config.prefer_prelude, + prefer_absolute: ctx.config.prefer_absolute, }; let mut proposed_imports: Vec<_> = diff --git a/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs b/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs index e792debaa5..5582256a17 100644 --- a/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs +++ b/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs @@ -89,6 +89,7 @@ pub(crate) fn replace_derive_with_manual_impl( ImportPathConfig { prefer_no_std: ctx.config.prefer_no_std, prefer_prelude: ctx.config.prefer_prelude, + prefer_absolute: ctx.config.prefer_absolute, }, ) .as_ref() diff --git a/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs b/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs index 188165e776..f1467837dd 100644 --- a/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs +++ b/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs @@ -70,6 +70,7 @@ pub(crate) fn replace_qualified_name_with_use( ImportPathConfig { prefer_no_std: ctx.config.prefer_no_std, prefer_prelude: ctx.config.prefer_prelude, + prefer_absolute: ctx.config.prefer_absolute, }, ) }) diff --git a/crates/ide-assists/src/handlers/term_search.rs b/crates/ide-assists/src/handlers/term_search.rs index 874b42f51b..7a91179975 100644 --- a/crates/ide-assists/src/handlers/term_search.rs +++ b/crates/ide-assists/src/handlers/term_search.rs @@ -60,6 +60,7 @@ pub(crate) fn term_search(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option< ImportPathConfig { prefer_no_std: ctx.config.prefer_no_std, prefer_prelude: ctx.config.prefer_prelude, + prefer_absolute: ctx.config.prefer_absolute, }, ) .ok() diff --git a/crates/ide-assists/src/handlers/toggle_async_sugar.rs b/crates/ide-assists/src/handlers/toggle_async_sugar.rs index 30e09648ea..f79f87db9a 100644 --- a/crates/ide-assists/src/handlers/toggle_async_sugar.rs +++ b/crates/ide-assists/src/handlers/toggle_async_sugar.rs @@ -142,6 +142,7 @@ pub(crate) fn desugar_async_into_impl_future( ImportPathConfig { prefer_no_std: ctx.config.prefer_no_std, prefer_prelude: ctx.config.prefer_prelude, + prefer_absolute: ctx.config.prefer_absolute, }, )?; let trait_path = trait_path.display(ctx.db()); diff --git a/crates/ide-assists/src/tests.rs b/crates/ide-assists/src/tests.rs index bd10b5481b..2dcfda334b 100644 --- a/crates/ide-assists/src/tests.rs +++ b/crates/ide-assists/src/tests.rs @@ -30,6 +30,7 @@ pub(crate) const TEST_CONFIG: AssistConfig = AssistConfig { }, prefer_no_std: false, prefer_prelude: true, + prefer_absolute: false, assist_emit_must_use: false, term_search_fuel: 400, term_search_borrowck: true, @@ -47,6 +48,7 @@ pub(crate) const TEST_CONFIG_NO_SNIPPET_CAP: AssistConfig = AssistConfig { }, prefer_no_std: false, prefer_prelude: true, + prefer_absolute: false, assist_emit_must_use: false, term_search_fuel: 400, term_search_borrowck: true, @@ -64,6 +66,7 @@ pub(crate) const TEST_CONFIG_IMPORT_ONE: AssistConfig = AssistConfig { }, prefer_no_std: false, prefer_prelude: true, + prefer_absolute: false, assist_emit_must_use: false, term_search_fuel: 400, term_search_borrowck: true, diff --git a/crates/ide-completion/src/completions.rs b/crates/ide-completion/src/completions.rs index 11ffc8bc44..995a4443ed 100644 --- a/crates/ide-completion/src/completions.rs +++ b/crates/ide-completion/src/completions.rs @@ -639,6 +639,7 @@ fn enum_variants_with_paths( ImportPathConfig { prefer_no_std: ctx.config.prefer_no_std, prefer_prelude: ctx.config.prefer_prelude, + prefer_absolute: ctx.config.prefer_absolute, }, ) { // Variants with trivial paths are already added by the existing completion logic, diff --git a/crates/ide-completion/src/completions/expr.rs b/crates/ide-completion/src/completions/expr.rs index 7281c607da..01f9368aa4 100644 --- a/crates/ide-completion/src/completions/expr.rs +++ b/crates/ide-completion/src/completions/expr.rs @@ -177,6 +177,7 @@ pub(crate) fn complete_expr_path( ImportPathConfig { prefer_no_std: ctx.config.prefer_no_std, prefer_prelude: ctx.config.prefer_prelude, + prefer_absolute: ctx.config.prefer_absolute, }, ) .filter(|it| it.len() > 1); @@ -202,6 +203,7 @@ pub(crate) fn complete_expr_path( ImportPathConfig { prefer_no_std: ctx.config.prefer_no_std, prefer_prelude: ctx.config.prefer_prelude, + prefer_absolute: ctx.config.prefer_absolute, }, ) .filter(|it| it.len() > 1); diff --git a/crates/ide-completion/src/completions/flyimport.rs b/crates/ide-completion/src/completions/flyimport.rs index 71d44a57cb..3a8b9c0cb9 100644 --- a/crates/ide-completion/src/completions/flyimport.rs +++ b/crates/ide-completion/src/completions/flyimport.rs @@ -259,6 +259,7 @@ fn import_on_the_fly( let import_cfg = ImportPathConfig { prefer_no_std: ctx.config.prefer_no_std, prefer_prelude: ctx.config.prefer_prelude, + prefer_absolute: ctx.config.prefer_absolute, }; import_assets @@ -309,6 +310,7 @@ fn import_on_the_fly_pat_( let cfg = ImportPathConfig { prefer_no_std: ctx.config.prefer_no_std, prefer_prelude: ctx.config.prefer_prelude, + prefer_absolute: ctx.config.prefer_absolute, }; import_assets @@ -354,6 +356,7 @@ fn import_on_the_fly_method( let cfg = ImportPathConfig { prefer_no_std: ctx.config.prefer_no_std, prefer_prelude: ctx.config.prefer_prelude, + prefer_absolute: ctx.config.prefer_absolute, }; import_assets diff --git a/crates/ide-completion/src/completions/postfix.rs b/crates/ide-completion/src/completions/postfix.rs index 5041ef8d8a..d919609237 100644 --- a/crates/ide-completion/src/completions/postfix.rs +++ b/crates/ide-completion/src/completions/postfix.rs @@ -63,6 +63,7 @@ pub(crate) fn complete_postfix( let cfg = ImportPathConfig { prefer_no_std: ctx.config.prefer_no_std, prefer_prelude: ctx.config.prefer_prelude, + prefer_absolute: ctx.config.prefer_absolute, }; if let Some(drop_trait) = ctx.famous_defs().core_ops_Drop() { diff --git a/crates/ide-completion/src/config.rs b/crates/ide-completion/src/config.rs index 809c305ed8..7d062cb23e 100644 --- a/crates/ide-completion/src/config.rs +++ b/crates/ide-completion/src/config.rs @@ -22,6 +22,7 @@ pub struct CompletionConfig { pub insert_use: InsertUseConfig, pub prefer_no_std: bool, pub prefer_prelude: bool, + pub prefer_absolute: bool, pub snippets: Vec, pub limit: Option, } diff --git a/crates/ide-completion/src/lib.rs b/crates/ide-completion/src/lib.rs index 7150fe3f01..7d9c2c7c60 100644 --- a/crates/ide-completion/src/lib.rs +++ b/crates/ide-completion/src/lib.rs @@ -253,6 +253,7 @@ pub fn resolve_completion_edits( let cfg = ImportPathConfig { prefer_no_std: config.prefer_no_std, prefer_prelude: config.prefer_prelude, + prefer_absolute: config.prefer_absolute, }; imports.into_iter().for_each(|(full_import_path, imported_name)| { diff --git a/crates/ide-completion/src/render.rs b/crates/ide-completion/src/render.rs index b98f745f17..fe9e2e5268 100644 --- a/crates/ide-completion/src/render.rs +++ b/crates/ide-completion/src/render.rs @@ -298,6 +298,7 @@ pub(crate) fn render_expr( let cfg = ImportPathConfig { prefer_no_std: ctx.config.prefer_no_std, prefer_prelude: ctx.config.prefer_prelude, + prefer_absolute: ctx.config.prefer_absolute, }; let label = expr.gen_source_code(&ctx.scope, &mut label_formatter, cfg).ok()?; diff --git a/crates/ide-completion/src/snippet.rs b/crates/ide-completion/src/snippet.rs index 07836040b4..5885b74e09 100644 --- a/crates/ide-completion/src/snippet.rs +++ b/crates/ide-completion/src/snippet.rs @@ -172,6 +172,7 @@ fn import_edits(ctx: &CompletionContext<'_>, requires: &[GreenNode]) -> Option { parent.segment()?.name_ref()?, ) .and_then(|trait_ref| { - let cfg = - ImportPathConfig { prefer_no_std: false, prefer_prelude: true }; + let cfg = ImportPathConfig { + prefer_no_std: false, + prefer_prelude: true, + prefer_absolute: false, + }; let found_path = self.target_module.find_path( self.source_scope.db.upcast(), hir::ModuleDef::Trait(trait_ref), @@ -348,7 +351,11 @@ impl Ctx<'_> { } } - let cfg = ImportPathConfig { prefer_no_std: false, prefer_prelude: true }; + let cfg = ImportPathConfig { + prefer_no_std: false, + prefer_prelude: true, + prefer_absolute: false, + }; let found_path = self.target_module.find_path(self.source_scope.db.upcast(), def, cfg)?; let res = mod_path_to_ast(&found_path).clone_for_update(); @@ -383,7 +390,11 @@ impl Ctx<'_> { if let Some(adt) = ty.as_adt() { if let ast::Type::PathType(path_ty) = &ast_ty { - let cfg = ImportPathConfig { prefer_no_std: false, prefer_prelude: true }; + let cfg = ImportPathConfig { + prefer_no_std: false, + prefer_prelude: true, + prefer_absolute: false, + }; let found_path = self.target_module.find_path( self.source_scope.db.upcast(), ModuleDef::from(adt), diff --git a/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs b/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs index 2b8779044f..a9c0e3b731 100644 --- a/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs +++ b/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs @@ -146,6 +146,7 @@ pub(crate) fn json_in_items( let cfg = ImportPathConfig { prefer_no_std: config.prefer_no_std, prefer_prelude: config.prefer_prelude, + prefer_absolute: config.prefer_absolute, }; if !scope_has("Serialize") { diff --git a/crates/ide-diagnostics/src/handlers/missing_fields.rs b/crates/ide-diagnostics/src/handlers/missing_fields.rs index 9eff84b898..6a809cb0ce 100644 --- a/crates/ide-diagnostics/src/handlers/missing_fields.rs +++ b/crates/ide-diagnostics/src/handlers/missing_fields.rs @@ -128,6 +128,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::MissingFields) -> Option, d: &hir::TypedHole) -> Option ImportPathConfig { prefer_no_std: ctx.config.prefer_no_std, prefer_prelude: ctx.config.prefer_prelude, + prefer_absolute: ctx.config.prefer_absolute, }, ) .ok() diff --git a/crates/ide-diagnostics/src/lib.rs b/crates/ide-diagnostics/src/lib.rs index 2bf3f5f0ce..ccb5440513 100644 --- a/crates/ide-diagnostics/src/lib.rs +++ b/crates/ide-diagnostics/src/lib.rs @@ -231,6 +231,7 @@ pub struct DiagnosticsConfig { pub insert_use: InsertUseConfig, pub prefer_no_std: bool, pub prefer_prelude: bool, + pub prefer_absolute: bool, pub term_search_fuel: u64, pub term_search_borrowck: bool, } @@ -258,6 +259,7 @@ impl DiagnosticsConfig { }, prefer_no_std: false, prefer_prelude: true, + prefer_absolute: false, term_search_fuel: 400, term_search_borrowck: true, } diff --git a/crates/ide-ssr/src/matching.rs b/crates/ide-ssr/src/matching.rs index b29053c0c2..0d22bc94c6 100644 --- a/crates/ide-ssr/src/matching.rs +++ b/crates/ide-ssr/src/matching.rs @@ -663,7 +663,11 @@ impl Match { .module(); for (path, resolved_path) in &template.resolved_paths { if let hir::PathResolution::Def(module_def) = resolved_path.resolution { - let cfg = ImportPathConfig { prefer_no_std: false, prefer_prelude: true }; + let cfg = ImportPathConfig { + prefer_no_std: false, + prefer_prelude: true, + prefer_absolute: false, + }; let mod_path = module.find_path(sema.db, module_def, cfg).ok_or_else(|| { match_error!("Failed to render template path `{}` at match location") })?; diff --git a/crates/rust-analyzer/src/cli/analysis_stats.rs b/crates/rust-analyzer/src/cli/analysis_stats.rs index a934e14ddb..31dd2635ac 100644 --- a/crates/rust-analyzer/src/cli/analysis_stats.rs +++ b/crates/rust-analyzer/src/cli/analysis_stats.rs @@ -443,7 +443,11 @@ impl flags::AnalysisStats { .gen_source_code( &scope, &mut formatter, - ImportPathConfig { prefer_no_std: false, prefer_prelude: true }, + ImportPathConfig { + prefer_no_std: false, + prefer_prelude: true, + prefer_absolute: false, + }, ) .unwrap(); syntax_hit_found |= trim(&original_text) == trim(&generated); @@ -992,6 +996,7 @@ impl flags::AnalysisStats { }, prefer_no_std: false, prefer_prelude: true, + prefer_absolute: false, style_lints: false, term_search_fuel: 400, term_search_borrowck: true, diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index ef80d83837..e2d3426a27 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs @@ -359,6 +359,8 @@ config_data! { imports_preferPrelude: bool = false, /// The path structure for newly inserted paths to use. imports_prefix: ImportPrefixDef = ImportPrefixDef::Plain, + /// Whether to prefix external (including std, core) crate imports with `::`. e.g. "use ::std::io::Read;". + imports_prefixExternPrelude: bool = false, } } @@ -1280,6 +1282,7 @@ impl Config { prefer_no_std: self.imports_preferNoStd(source_root).to_owned(), assist_emit_must_use: self.assist_emitMustUse(source_root).to_owned(), prefer_prelude: self.imports_preferPrelude(source_root).to_owned(), + prefer_absolute: self.imports_prefixExternPrelude(source_root).to_owned(), term_search_fuel: self.assist_termSearch_fuel(source_root).to_owned() as u64, term_search_borrowck: self.assist_termSearch_borrowcheck(source_root).to_owned(), } @@ -1311,6 +1314,7 @@ impl Config { insert_use: self.insert_use_config(source_root), prefer_no_std: self.imports_preferNoStd(source_root).to_owned(), prefer_prelude: self.imports_preferPrelude(source_root).to_owned(), + prefer_absolute: self.imports_prefixExternPrelude(source_root).to_owned(), snippets: self.snippets.clone().to_vec(), limit: self.completion_limit().to_owned(), enable_term_search: self.completion_termSearch_enable().to_owned(), @@ -1339,6 +1343,7 @@ impl Config { insert_use: self.insert_use_config(source_root), prefer_no_std: self.imports_preferNoStd(source_root).to_owned(), prefer_prelude: self.imports_preferPrelude(source_root).to_owned(), + prefer_absolute: self.imports_prefixExternPrelude(source_root).to_owned(), style_lints: self.diagnostics_styleLints_enable().to_owned(), term_search_fuel: self.assist_termSearch_fuel(source_root).to_owned() as u64, term_search_borrowck: self.assist_termSearch_borrowcheck(source_root).to_owned(), diff --git a/crates/rust-analyzer/src/integrated_benchmarks.rs b/crates/rust-analyzer/src/integrated_benchmarks.rs index 65fbc96dad..ff8eb6c861 100644 --- a/crates/rust-analyzer/src/integrated_benchmarks.rs +++ b/crates/rust-analyzer/src/integrated_benchmarks.rs @@ -152,6 +152,7 @@ fn integrated_completion_benchmark() { }, prefer_no_std: false, prefer_prelude: true, + prefer_absolute: false, snippets: Vec::new(), limit: None, }; @@ -197,6 +198,7 @@ fn integrated_completion_benchmark() { }, prefer_no_std: false, prefer_prelude: true, + prefer_absolute: false, snippets: Vec::new(), limit: None, }; @@ -240,6 +242,7 @@ fn integrated_completion_benchmark() { }, prefer_no_std: false, prefer_prelude: true, + prefer_absolute: false, snippets: Vec::new(), limit: None, }; @@ -299,6 +302,7 @@ fn integrated_diagnostics_benchmark() { }, prefer_no_std: false, prefer_prelude: false, + prefer_absolute: false, term_search_fuel: 400, term_search_borrowck: true, }; diff --git a/docs/user/generated_config.adoc b/docs/user/generated_config.adoc index ea86d21e47..a95c897991 100644 --- a/docs/user/generated_config.adoc +++ b/docs/user/generated_config.adoc @@ -594,6 +594,11 @@ Whether to prefer import paths containing a `prelude` module. -- The path structure for newly inserted paths to use. -- +[[rust-analyzer.imports.prefixExternPrelude]]rust-analyzer.imports.prefixExternPrelude (default: `false`):: ++ +-- +Whether to prefix external (including std, core) crate imports with `::`. e.g. "use ::std::io::Read;". +-- [[rust-analyzer.inlayHints.bindingModeHints.enable]]rust-analyzer.inlayHints.bindingModeHints.enable (default: `false`):: + -- diff --git a/editors/code/package.json b/editors/code/package.json index 1c41114239..eea72345f8 100644 --- a/editors/code/package.json +++ b/editors/code/package.json @@ -1732,6 +1732,16 @@ } } }, + { + "title": "imports", + "properties": { + "rust-analyzer.imports.prefixExternPrelude": { + "markdownDescription": "Whether to prefix external (including std, core) crate imports with `::`. e.g. \"use ::std::io::Read;\".", + "default": false, + "type": "boolean" + } + } + }, { "title": "inlayHints", "properties": { From 1a929d64856bf8bc9a73579a1905747eb04fcb35 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 2 Jul 2024 10:13:02 +0200 Subject: [PATCH 50/94] Fix lifetime parameters moving paramter defaults --- crates/hir-ty/src/builder.rs | 12 ++- crates/hir-ty/src/generics.rs | 13 ++- crates/hir-ty/src/lower.rs | 129 +++++++++++++------------- crates/hir-ty/src/tests/regression.rs | 23 +++++ crates/hir/src/lib.rs | 4 +- 5 files changed, 113 insertions(+), 68 deletions(-) diff --git a/crates/hir-ty/src/builder.rs b/crates/hir-ty/src/builder.rs index bccdc9a6c5..76d9c60f6f 100644 --- a/crates/hir-ty/src/builder.rs +++ b/crates/hir-ty/src/builder.rs @@ -63,7 +63,14 @@ impl TyBuilder { } fn build_internal(self) -> (D, Substitution) { - assert_eq!(self.vec.len(), self.param_kinds.len(), "{:?}", &self.param_kinds); + assert_eq!( + self.vec.len(), + self.param_kinds.len(), + "{} args received, {} expected ({:?})", + self.vec.len(), + self.param_kinds.len(), + &self.param_kinds + ); for (a, e) in self.vec.iter().zip(self.param_kinds.iter()) { self.assert_match_kind(a, e); } @@ -297,7 +304,8 @@ impl TyBuilder { ) -> Self { // Note that we're building ADT, so we never have parent generic parameters. let defaults = db.generic_defaults(self.data.into()); - for default_ty in defaults.iter().skip(self.vec.len()) { + + for default_ty in &defaults[self.vec.len()..] { // NOTE(skip_binders): we only check if the arg type is error type. if let Some(x) = default_ty.skip_binders().ty(Interner) { if x.is_unknown() { diff --git a/crates/hir-ty/src/generics.rs b/crates/hir-ty/src/generics.rs index 7f8dd920e6..2685dc0ef8 100644 --- a/crates/hir-ty/src/generics.rs +++ b/crates/hir-ty/src/generics.rs @@ -20,6 +20,7 @@ use hir_def::{ LocalLifetimeParamId, LocalTypeOrConstParamId, Lookup, TypeOrConstParamId, TypeParamId, }; use intern::Interned; +use stdx::TupleExt; use crate::{db::HirDatabase, lt_to_placeholder_idx, to_placeholder_idx, Interner, Substitution}; @@ -57,7 +58,7 @@ impl Generics { self.iter_self().map(|(id, _)| id) } - fn iter_parent_id(&self) -> impl Iterator + '_ { + pub(crate) fn iter_parent_id(&self) -> impl Iterator + '_ { self.iter_parent().map(|(id, _)| id) } @@ -67,6 +68,16 @@ impl Generics { self.params.iter_type_or_consts() } + pub(crate) fn iter_self_type_or_consts_id( + &self, + ) -> impl DoubleEndedIterator + '_ { + self.params.iter_type_or_consts().map(from_toc_id(self)).map(TupleExt::head) + } + + pub(crate) fn iter_self_lt_id(&self) -> impl DoubleEndedIterator + '_ { + self.params.iter_lt().map(from_lt_id(self)).map(TupleExt::head) + } + /// Iterate over the params followed by the parent params. pub(crate) fn iter( &self, diff --git a/crates/hir-ty/src/lower.rs b/crates/hir-ty/src/lower.rs index fd215adde2..54f4ee782f 100644 --- a/crates/hir-ty/src/lower.rs +++ b/crates/hir-ty/src/lower.rs @@ -812,13 +812,13 @@ impl<'a> TyLoweringContext<'a> { infer_args: bool, explicit_self_ty: Option, ) -> Substitution { - // Remember that the item's own generic args come before its parent's. - let mut substs = Vec::new(); - let def = if let Some(d) = def { - d - } else { - return Substitution::empty(Interner); - }; + let Some(def) = def else { return Substitution::empty(Interner) }; + + // Order is + // - Optional Self parameter + // - Type or Const parameters + // - Lifetime parameters + // - Parent parameters let def_generics = generics(self.db.upcast(), def); let ( parent_params, @@ -832,45 +832,46 @@ impl<'a> TyLoweringContext<'a> { self_param as usize + type_params + const_params + impl_trait_params + lifetime_params; let total_len = parent_params + item_len; - let ty_error = TyKind::Error.intern(Interner).cast(Interner); + let mut substs = Vec::new(); - let mut def_generic_iter = def_generics.iter_id(); + // we need to iterate the lifetime and type/const params separately as our order of them + // differs from the supplied syntax - let fill_self_params = || { + let ty_error = || TyKind::Error.intern(Interner).cast(Interner); + let mut def_toc_iter = def_generics.iter_self_type_or_consts_id(); + let mut def_lt_iter = def_generics.iter_self_lt_id(); + let fill_self_param = || { if self_param { - let self_ty = - explicit_self_ty.map(|x| x.cast(Interner)).unwrap_or_else(|| ty_error.clone()); + let self_ty = explicit_self_ty.map(|x| x.cast(Interner)).unwrap_or_else(ty_error); - if let Some(id) = def_generic_iter.next() { - assert!(matches!( - id, - GenericParamId::TypeParamId(_) | GenericParamId::LifetimeParamId(_) - )); + if let Some(id) = def_toc_iter.next() { + assert!(matches!(id, GenericParamId::TypeParamId(_))); substs.push(self_ty); } } }; let mut had_explicit_args = false; - if let Some(generic_args) = &args_and_bindings { - if !generic_args.has_self_type { - fill_self_params(); + let mut lifetimes = SmallVec::<[_; 1]>::new(); + if let Some(&GenericArgs { ref args, has_self_type, .. }) = args_and_bindings { + if !has_self_type { + fill_self_param(); } - let expected_num = if generic_args.has_self_type { + let expected_num = if has_self_type { self_param as usize + type_params + const_params } else { type_params + const_params }; - let skip = if generic_args.has_self_type && !self_param { 1 } else { 0 }; - // if args are provided, it should be all of them, but we can't rely on that - for arg in generic_args - .args + let skip = if has_self_type && !self_param { 1 } else { 0 }; + // if non-lifetime args are provided, it should be all of them, but we can't rely on that + for arg in args .iter() .filter(|arg| !matches!(arg, GenericArg::Lifetime(_))) .skip(skip) .take(expected_num) { - if let Some(id) = def_generic_iter.next() { + if let Some(id) = def_toc_iter.next() { + had_explicit_args = true; let arg = generic_arg_to_chalk( self.db, id, @@ -880,20 +881,16 @@ impl<'a> TyLoweringContext<'a> { |_, const_ref, ty| self.lower_const(const_ref, ty), |_, lifetime_ref| self.lower_lifetime(lifetime_ref), ); - had_explicit_args = true; substs.push(arg); } } - for arg in generic_args - .args + for arg in args .iter() .filter(|arg| matches!(arg, GenericArg::Lifetime(_))) .take(lifetime_params) { - // Taking into the fact that def_generic_iter will always have lifetimes at the end - // Should have some test cases tho to test this behaviour more properly - if let Some(id) = def_generic_iter.next() { + if let Some(id) = def_lt_iter.next() { let arg = generic_arg_to_chalk( self.db, id, @@ -903,59 +900,65 @@ impl<'a> TyLoweringContext<'a> { |_, const_ref, ty| self.lower_const(const_ref, ty), |_, lifetime_ref| self.lower_lifetime(lifetime_ref), ); - had_explicit_args = true; - substs.push(arg); + lifetimes.push(arg); } } } else { - fill_self_params(); + fill_self_param(); } - // These params include those of parent. - let remaining_params: SmallVec<[_; 2]> = def_generic_iter - .map(|id| match id { - GenericParamId::ConstParamId(x) => { - unknown_const_as_generic(self.db.const_param_ty(x)) - } - GenericParamId::TypeParamId(_) => ty_error.clone(), - GenericParamId::LifetimeParamId(_) => error_lifetime().cast(Interner), - }) - .collect(); - assert_eq!(remaining_params.len() + substs.len(), total_len); - + let param_to_err = |id| match id { + GenericParamId::ConstParamId(x) => unknown_const_as_generic(self.db.const_param_ty(x)), + GenericParamId::TypeParamId(_) => ty_error(), + GenericParamId::LifetimeParamId(_) => error_lifetime().cast(Interner), + }; // handle defaults. In expression or pattern path segments without // explicitly specified type arguments, missing type arguments are inferred // (i.e. defaults aren't used). // Generic parameters for associated types are not supposed to have defaults, so we just // ignore them. - let is_assoc_ty = if let GenericDefId::TypeAliasId(id) = def { - let container = id.lookup(self.db.upcast()).container; - matches!(container, ItemContainerId::TraitId(_)) - } else { - false + let is_assoc_ty = || match def { + GenericDefId::TypeAliasId(id) => { + matches!(id.lookup(self.db.upcast()).container, ItemContainerId::TraitId(_)) + } + _ => false, }; - if !is_assoc_ty && (!infer_args || had_explicit_args) { - let defaults = self.db.generic_defaults(def); - assert_eq!(total_len, defaults.len()); + if (!infer_args || had_explicit_args) && !is_assoc_ty() { + let defaults = &*self.db.generic_defaults(def); + let (item, _parent) = defaults.split_at(item_len); + let (toc, lt) = item.split_at(item_len - lifetime_params); let parent_from = item_len - substs.len(); - for (idx, default_ty) in defaults[substs.len()..item_len].iter().enumerate() { + let mut rem = + def_generics.iter_id().skip(substs.len()).map(param_to_err).collect::>(); + // Fill in defaults for type/const params + for (idx, default_ty) in toc[substs.len()..].iter().enumerate() { // each default can depend on the previous parameters let substs_so_far = Substitution::from_iter( Interner, - substs.iter().cloned().chain(remaining_params[idx..].iter().cloned()), + substs.iter().cloned().chain(rem[idx..].iter().cloned()), ); substs.push(default_ty.clone().substitute(Interner, &substs_so_far)); } - - // Keep parent's params as unknown. - let mut remaining_params = remaining_params; - substs.extend(remaining_params.drain(parent_from..)); + let n_lifetimes = lifetimes.len(); + // Fill in deferred lifetimes + substs.extend(lifetimes); + // Fill in defaults for lifetime params + for default_ty in <[n_lifetimes..] { + // these are always errors so skipping is fine + substs.push(default_ty.skip_binders().clone()); + } + // Fill in remaining def params and parent params + substs.extend(rem.drain(parent_from..)); } else { - substs.extend(remaining_params); + substs.extend(def_toc_iter.map(param_to_err)); + // Fill in deferred lifetimes + substs.extend(lifetimes); + // Fill in remaining def params and parent params + substs.extend(def_generics.iter_id().skip(substs.len()).map(param_to_err)); } - assert_eq!(substs.len(), total_len); + assert_eq!(substs.len(), total_len, "expected {} substs, got {}", total_len, substs.len()); Substitution::from_iter(Interner, substs) } diff --git a/crates/hir-ty/src/tests/regression.rs b/crates/hir-ty/src/tests/regression.rs index 4fcb6062b6..aa7b00b8de 100644 --- a/crates/hir-ty/src/tests/regression.rs +++ b/crates/hir-ty/src/tests/regression.rs @@ -2018,3 +2018,26 @@ fn tait_async_stack_overflow_17199() { "#, ); } + +#[test] +fn lifetime_params_move_param_defaults() { + check_types( + r#" +pub struct Thing<'s, T = u32>; + +impl <'s> Thing<'s> { + pub fn new() -> Thing<'s> { + Thing + //^^^^^ Thing<'?, u32> + } +} + +fn main() { + let scope = + //^^^^^ &'? Thing<'?, u32> + &Thing::new(); + //^^^^^^^^^^^^ Thing<'?, u32> +} +"#, + ); +} diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index a9ae8d5649..cd916330c2 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -3602,9 +3602,9 @@ impl ConstParam { } fn generic_arg_from_param(db: &dyn HirDatabase, id: TypeOrConstParamId) -> Option { - let params = db.generic_defaults(id.parent); let local_idx = hir_ty::param_idx(db, id)?; - let ty = params.get(local_idx)?.clone(); + let defaults = db.generic_defaults(id.parent); + let ty = defaults.get(local_idx)?.clone(); let subst = TyBuilder::placeholder_subst(db, id.parent); Some(ty.substitute(Interner, &subst)) } From 372e2d22e61e99f45711b226efc803d344aa66d6 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 2 Jul 2024 11:25:21 +0200 Subject: [PATCH 51/94] Make GenericParams::type_or_consts private --- crates/hir-def/src/child_by_source.rs | 2 +- crates/hir-def/src/generics.rs | 407 +++++++++++++------------ crates/hir-def/src/item_tree/pretty.rs | 12 +- crates/hir-def/src/resolver.rs | 4 +- crates/hir-def/src/src.rs | 2 +- crates/hir-ty/src/chalk_ext.rs | 2 +- crates/hir-ty/src/generics.rs | 4 +- crates/hir-ty/src/lower.rs | 2 +- crates/hir-ty/src/mir/lower.rs | 2 +- crates/hir/src/display.rs | 15 +- crates/hir/src/lib.rs | 20 +- 11 files changed, 239 insertions(+), 233 deletions(-) diff --git a/crates/hir-def/src/child_by_source.rs b/crates/hir-def/src/child_by_source.rs index 106109eb18..6ee6d313e9 100644 --- a/crates/hir-def/src/child_by_source.rs +++ b/crates/hir-def/src/child_by_source.rs @@ -214,7 +214,7 @@ impl ChildBySource for GenericDefId { } let generic_params = db.generic_params(*self); - let mut toc_idx_iter = generic_params.type_or_consts.iter().map(|(idx, _)| idx); + let mut toc_idx_iter = generic_params.iter_type_or_consts().map(|(idx, _)| idx); let lts_idx_iter = generic_params.lifetimes.iter().map(|(idx, _)| idx); // For traits the first type index is `Self`, skip it. diff --git a/crates/hir-def/src/generics.rs b/crates/hir-def/src/generics.rs index ca02501567..8570bac9a9 100644 --- a/crates/hir-def/src/generics.rs +++ b/crates/hir-def/src/generics.rs @@ -28,6 +28,7 @@ use crate::{ LocalLifetimeParamId, LocalTypeOrConstParamId, Lookup, TypeOrConstParamId, TypeParamId, }; +/// The index of the self param in the generic of the non-parent definition. const SELF_PARAM_ID_IN_SELF: la_arena::Idx = LocalTypeOrConstParamId::from_raw(RawIdx::from_u32(0)); @@ -158,7 +159,7 @@ pub enum GenericParamDataRef<'a> { /// Data about the generic parameters of a function, struct, impl, etc. #[derive(Clone, PartialEq, Eq, Debug, Hash)] pub struct GenericParams { - pub type_or_consts: Arena, + type_or_consts: Arena, pub lifetimes: Arena, pub where_predicates: Box<[WherePredicate]>, } @@ -205,6 +206,213 @@ pub enum WherePredicateTypeTarget { TypeOrConstParam(LocalTypeOrConstParamId), } +impl GenericParams { + /// Number of Generic parameters (type_or_consts + lifetimes) + #[inline] + pub fn len(&self) -> usize { + self.type_or_consts.len() + self.lifetimes.len() + } + + #[inline] + pub fn len_lifetimes(&self) -> usize { + self.lifetimes.len() + } + + #[inline] + pub fn len_type_or_consts(&self) -> usize { + self.type_or_consts.len() + } + + #[inline] + pub fn is_empty(&self) -> bool { + self.len() == 0 + } + + /// Iterator of type_or_consts field + #[inline] + pub fn iter_type_or_consts( + &self, + ) -> impl DoubleEndedIterator { + self.type_or_consts.iter() + } + + /// Iterator of lifetimes field + #[inline] + pub fn iter_lt( + &self, + ) -> impl DoubleEndedIterator { + self.lifetimes.iter() + } + + pub(crate) fn generic_params_query( + db: &dyn DefDatabase, + def: GenericDefId, + ) -> Interned { + let _p = tracing::info_span!("generic_params_query").entered(); + + let krate = def.module(db).krate; + let cfg_options = db.crate_graph(); + let cfg_options = &cfg_options[krate].cfg_options; + + // Returns the generic parameters that are enabled under the current `#[cfg]` options + let enabled_params = + |params: &Interned, item_tree: &ItemTree, parent: GenericModItem| { + let enabled = |param| item_tree.attrs(db, krate, param).is_cfg_enabled(cfg_options); + let attr_owner_ct = |param| AttrOwner::TypeOrConstParamData(parent, param); + let attr_owner_lt = |param| AttrOwner::LifetimeParamData(parent, param); + + // In the common case, no parameters will by disabled by `#[cfg]` attributes. + // Therefore, make a first pass to check if all parameters are enabled and, if so, + // clone the `Interned` instead of recreating an identical copy. + let all_type_or_consts_enabled = + params.type_or_consts.iter().all(|(idx, _)| enabled(attr_owner_ct(idx))); + let all_lifetimes_enabled = + params.lifetimes.iter().all(|(idx, _)| enabled(attr_owner_lt(idx))); + + if all_type_or_consts_enabled && all_lifetimes_enabled { + params.clone() + } else { + Interned::new(GenericParams { + type_or_consts: all_type_or_consts_enabled + .then(|| params.type_or_consts.clone()) + .unwrap_or_else(|| { + params + .type_or_consts + .iter() + .filter(|&(idx, _)| enabled(attr_owner_ct(idx))) + .map(|(_, param)| param.clone()) + .collect() + }), + lifetimes: all_lifetimes_enabled + .then(|| params.lifetimes.clone()) + .unwrap_or_else(|| { + params + .lifetimes + .iter() + .filter(|&(idx, _)| enabled(attr_owner_lt(idx))) + .map(|(_, param)| param.clone()) + .collect() + }), + where_predicates: params.where_predicates.clone(), + }) + } + }; + fn id_to_generics( + db: &dyn DefDatabase, + id: impl for<'db> Lookup< + Database<'db> = dyn DefDatabase + 'db, + Data = impl ItemTreeLoc, + >, + enabled_params: impl Fn( + &Interned, + &ItemTree, + GenericModItem, + ) -> Interned, + ) -> Interned + where + FileItemTreeId: Into, + { + let id = id.lookup(db).item_tree_id(); + let tree = id.item_tree(db); + let item = &tree[id.value]; + enabled_params(item.generic_params(), &tree, id.value.into()) + } + + match def { + GenericDefId::FunctionId(id) => { + let loc = id.lookup(db); + let tree = loc.id.item_tree(db); + let item = &tree[loc.id.value]; + + let enabled_params = + enabled_params(&item.explicit_generic_params, &tree, loc.id.value.into()); + + let module = loc.container.module(db); + let func_data = db.function_data(id); + if func_data.params.is_empty() { + enabled_params + } else { + let mut generic_params = GenericParamsCollector { + type_or_consts: enabled_params.type_or_consts.clone(), + lifetimes: enabled_params.lifetimes.clone(), + where_predicates: enabled_params.where_predicates.clone().into(), + }; + + // Don't create an `Expander` if not needed since this + // could cause a reparse after the `ItemTree` has been created due to the spanmap. + let mut expander = Lazy::new(|| { + (module.def_map(db), Expander::new(db, loc.id.file_id(), module)) + }); + for param in func_data.params.iter() { + generic_params.fill_implicit_impl_trait_args(db, &mut expander, param); + } + Interned::new(generic_params.finish()) + } + } + GenericDefId::AdtId(AdtId::StructId(id)) => id_to_generics(db, id, enabled_params), + GenericDefId::AdtId(AdtId::EnumId(id)) => id_to_generics(db, id, enabled_params), + GenericDefId::AdtId(AdtId::UnionId(id)) => id_to_generics(db, id, enabled_params), + GenericDefId::TraitId(id) => id_to_generics(db, id, enabled_params), + GenericDefId::TraitAliasId(id) => id_to_generics(db, id, enabled_params), + GenericDefId::TypeAliasId(id) => id_to_generics(db, id, enabled_params), + GenericDefId::ImplId(id) => id_to_generics(db, id, enabled_params), + GenericDefId::ConstId(_) => Interned::new(GenericParams { + type_or_consts: Default::default(), + lifetimes: Default::default(), + where_predicates: Default::default(), + }), + } + } + + pub fn find_type_by_name(&self, name: &Name, parent: GenericDefId) -> Option { + self.type_or_consts.iter().find_map(|(id, p)| { + if p.name().as_ref() == Some(&name) && p.type_param().is_some() { + Some(TypeParamId::from_unchecked(TypeOrConstParamId { local_id: id, parent })) + } else { + None + } + }) + } + + pub fn find_const_by_name(&self, name: &Name, parent: GenericDefId) -> Option { + self.type_or_consts.iter().find_map(|(id, p)| { + if p.name().as_ref() == Some(&name) && p.const_param().is_some() { + Some(ConstParamId::from_unchecked(TypeOrConstParamId { local_id: id, parent })) + } else { + None + } + }) + } + + pub fn trait_self_param(&self) -> Option { + if self.type_or_consts.is_empty() { + return None; + } + matches!( + self.type_or_consts[SELF_PARAM_ID_IN_SELF], + TypeOrConstParamData::TypeParamData(TypeParamData { + provenance: TypeParamProvenance::TraitSelf, + .. + }) + ) + .then(|| SELF_PARAM_ID_IN_SELF) + } + + pub fn find_lifetime_by_name( + &self, + name: &Name, + parent: GenericDefId, + ) -> Option { + self.lifetimes.iter().find_map(|(id, p)| { + if &p.name == name { + Some(LifetimeParamId { local_id: id, parent }) + } else { + None + } + }) + } +} + #[derive(Clone, Default)] pub(crate) struct GenericParamsCollector { pub(crate) type_or_consts: Arena, @@ -441,200 +649,3 @@ impl GenericParamsCollector { } } } - -impl GenericParams { - /// Number of Generic parameters (type_or_consts + lifetimes) - #[inline] - pub fn len(&self) -> usize { - self.type_or_consts.len() + self.lifetimes.len() - } - - #[inline] - pub fn is_empty(&self) -> bool { - self.len() == 0 - } - - /// Iterator of type_or_consts field - #[inline] - pub fn iter_type_or_consts( - &self, - ) -> impl DoubleEndedIterator { - self.type_or_consts.iter() - } - - /// Iterator of lifetimes field - #[inline] - pub fn iter_lt( - &self, - ) -> impl DoubleEndedIterator { - self.lifetimes.iter() - } - - pub(crate) fn generic_params_query( - db: &dyn DefDatabase, - def: GenericDefId, - ) -> Interned { - let _p = tracing::info_span!("generic_params_query").entered(); - - let krate = def.module(db).krate; - let cfg_options = db.crate_graph(); - let cfg_options = &cfg_options[krate].cfg_options; - - // Returns the generic parameters that are enabled under the current `#[cfg]` options - let enabled_params = - |params: &Interned, item_tree: &ItemTree, parent: GenericModItem| { - let enabled = |param| item_tree.attrs(db, krate, param).is_cfg_enabled(cfg_options); - let attr_owner_ct = |param| AttrOwner::TypeOrConstParamData(parent, param); - let attr_owner_lt = |param| AttrOwner::LifetimeParamData(parent, param); - - // In the common case, no parameters will by disabled by `#[cfg]` attributes. - // Therefore, make a first pass to check if all parameters are enabled and, if so, - // clone the `Interned` instead of recreating an identical copy. - let all_type_or_consts_enabled = - params.type_or_consts.iter().all(|(idx, _)| enabled(attr_owner_ct(idx))); - let all_lifetimes_enabled = - params.lifetimes.iter().all(|(idx, _)| enabled(attr_owner_lt(idx))); - - if all_type_or_consts_enabled && all_lifetimes_enabled { - params.clone() - } else { - Interned::new(GenericParams { - type_or_consts: all_type_or_consts_enabled - .then(|| params.type_or_consts.clone()) - .unwrap_or_else(|| { - params - .type_or_consts - .iter() - .filter(|&(idx, _)| enabled(attr_owner_ct(idx))) - .map(|(_, param)| param.clone()) - .collect() - }), - lifetimes: all_lifetimes_enabled - .then(|| params.lifetimes.clone()) - .unwrap_or_else(|| { - params - .lifetimes - .iter() - .filter(|&(idx, _)| enabled(attr_owner_lt(idx))) - .map(|(_, param)| param.clone()) - .collect() - }), - where_predicates: params.where_predicates.clone(), - }) - } - }; - fn id_to_generics( - db: &dyn DefDatabase, - id: impl for<'db> Lookup< - Database<'db> = dyn DefDatabase + 'db, - Data = impl ItemTreeLoc, - >, - enabled_params: impl Fn( - &Interned, - &ItemTree, - GenericModItem, - ) -> Interned, - ) -> Interned - where - FileItemTreeId: Into, - { - let id = id.lookup(db).item_tree_id(); - let tree = id.item_tree(db); - let item = &tree[id.value]; - enabled_params(item.generic_params(), &tree, id.value.into()) - } - - match def { - GenericDefId::FunctionId(id) => { - let loc = id.lookup(db); - let tree = loc.id.item_tree(db); - let item = &tree[loc.id.value]; - - let enabled_params = - enabled_params(&item.explicit_generic_params, &tree, loc.id.value.into()); - - let module = loc.container.module(db); - let func_data = db.function_data(id); - if func_data.params.is_empty() { - enabled_params - } else { - let mut generic_params = GenericParamsCollector { - type_or_consts: enabled_params.type_or_consts.clone(), - lifetimes: enabled_params.lifetimes.clone(), - where_predicates: enabled_params.where_predicates.clone().into(), - }; - - // Don't create an `Expander` if not needed since this - // could cause a reparse after the `ItemTree` has been created due to the spanmap. - let mut expander = Lazy::new(|| { - (module.def_map(db), Expander::new(db, loc.id.file_id(), module)) - }); - for param in func_data.params.iter() { - generic_params.fill_implicit_impl_trait_args(db, &mut expander, param); - } - Interned::new(generic_params.finish()) - } - } - GenericDefId::AdtId(AdtId::StructId(id)) => id_to_generics(db, id, enabled_params), - GenericDefId::AdtId(AdtId::EnumId(id)) => id_to_generics(db, id, enabled_params), - GenericDefId::AdtId(AdtId::UnionId(id)) => id_to_generics(db, id, enabled_params), - GenericDefId::TraitId(id) => id_to_generics(db, id, enabled_params), - GenericDefId::TraitAliasId(id) => id_to_generics(db, id, enabled_params), - GenericDefId::TypeAliasId(id) => id_to_generics(db, id, enabled_params), - GenericDefId::ImplId(id) => id_to_generics(db, id, enabled_params), - GenericDefId::ConstId(_) => Interned::new(GenericParams { - type_or_consts: Default::default(), - lifetimes: Default::default(), - where_predicates: Default::default(), - }), - } - } - - pub fn find_type_by_name(&self, name: &Name, parent: GenericDefId) -> Option { - self.type_or_consts.iter().find_map(|(id, p)| { - if p.name().as_ref() == Some(&name) && p.type_param().is_some() { - Some(TypeParamId::from_unchecked(TypeOrConstParamId { local_id: id, parent })) - } else { - None - } - }) - } - - pub fn find_const_by_name(&self, name: &Name, parent: GenericDefId) -> Option { - self.type_or_consts.iter().find_map(|(id, p)| { - if p.name().as_ref() == Some(&name) && p.const_param().is_some() { - Some(ConstParamId::from_unchecked(TypeOrConstParamId { local_id: id, parent })) - } else { - None - } - }) - } - - pub fn trait_self_param(&self) -> Option { - if self.type_or_consts.is_empty() { - return None; - } - matches!( - self.type_or_consts[SELF_PARAM_ID_IN_SELF], - TypeOrConstParamData::TypeParamData(TypeParamData { - provenance: TypeParamProvenance::TraitSelf, - .. - }) - ) - .then(|| SELF_PARAM_ID_IN_SELF) - } - - pub fn find_lifetime_by_name( - &self, - name: &Name, - parent: GenericDefId, - ) -> Option { - self.lifetimes.iter().find_map(|(id, p)| { - if &p.name == name { - Some(LifetimeParamId { local_id: id, parent }) - } else { - None - } - }) - } -} diff --git a/crates/hir-def/src/item_tree/pretty.rs b/crates/hir-def/src/item_tree/pretty.rs index 2803678a33..59031824e1 100644 --- a/crates/hir-def/src/item_tree/pretty.rs +++ b/crates/hir-def/src/item_tree/pretty.rs @@ -540,7 +540,7 @@ impl Printer<'_> { self.print_attrs_of(AttrOwner::LifetimeParamData(parent, idx), " "); w!(self, "{}", lt.name.display(self.db.upcast())); } - for (idx, x) in params.type_or_consts.iter() { + for (idx, x) in params.iter_type_or_consts() { if !first { w!(self, ", "); } @@ -607,12 +607,10 @@ impl Printer<'_> { match target { WherePredicateTypeTarget::TypeRef(ty) => this.print_type_ref(ty), - WherePredicateTypeTarget::TypeOrConstParam(id) => { - match ¶ms.type_or_consts[*id].name() { - Some(name) => w!(this, "{}", name.display(self.db.upcast())), - None => w!(this, "_anon_{}", id.into_raw()), - } - } + WherePredicateTypeTarget::TypeOrConstParam(id) => match params[*id].name() { + Some(name) => w!(this, "{}", name.display(self.db.upcast())), + None => w!(this, "_anon_{}", id.into_raw()), + }, } w!(this, ": "); this.print_type_bounds(std::slice::from_ref(bound)); diff --git a/crates/hir-def/src/resolver.rs b/crates/hir-def/src/resolver.rs index a83157415c..c573411309 100644 --- a/crates/hir-def/src/resolver.rs +++ b/crates/hir-def/src/resolver.rs @@ -758,10 +758,10 @@ impl Scope { } Scope::GenericParams { params, def: parent } => { let parent = *parent; - for (local_id, param) in params.type_or_consts.iter() { + for (local_id, param) in params.iter_type_or_consts() { if let Some(name) = ¶m.name() { let id = TypeOrConstParamId { parent, local_id }; - let data = &db.generic_params(parent).type_or_consts[local_id]; + let data = &db.generic_params(parent)[local_id]; acc.add( name, ScopeDef::GenericParam(match data { diff --git a/crates/hir-def/src/src.rs b/crates/hir-def/src/src.rs index 2b1da8c34e..278388945c 100644 --- a/crates/hir-def/src/src.rs +++ b/crates/hir-def/src/src.rs @@ -64,7 +64,7 @@ impl HasChildSource for GenericDefId { db: &dyn DefDatabase, ) -> InFile> { let generic_params = db.generic_params(*self); - let mut idx_iter = generic_params.type_or_consts.iter().map(|(idx, _)| idx); + let mut idx_iter = generic_params.iter_type_or_consts().map(|(idx, _)| idx); let (file_id, generic_params_list) = self.file_id_and_params_of(db); diff --git a/crates/hir-ty/src/chalk_ext.rs b/crates/hir-ty/src/chalk_ext.rs index 320e5bd8a2..d0d217d270 100644 --- a/crates/hir-ty/src/chalk_ext.rs +++ b/crates/hir-ty/src/chalk_ext.rs @@ -309,7 +309,7 @@ impl TyExt for Ty { TyKind::Placeholder(idx) => { let id = from_placeholder_idx(db, *idx); let generic_params = db.generic_params(id.parent); - let param_data = &generic_params.type_or_consts[id.local_id]; + let param_data = &generic_params[id.local_id]; match param_data { TypeOrConstParamData::TypeParamData(p) => match p.provenance { hir_def::generics::TypeParamProvenance::ArgumentImplTrait => { diff --git a/crates/hir-ty/src/generics.rs b/crates/hir-ty/src/generics.rs index 2685dc0ef8..6d20791f6e 100644 --- a/crates/hir-ty/src/generics.rs +++ b/crates/hir-ty/src/generics.rs @@ -145,7 +145,7 @@ impl Generics { fn find_type_or_const_param(&self, param: TypeOrConstParamId) -> Option { if param.parent == self.def { let idx = param.local_id.into_raw().into_u32() as usize; - debug_assert!(idx <= self.params.type_or_consts.len()); + debug_assert!(idx <= self.params.len_type_or_consts()); Some(idx) } else { debug_assert_eq!(self.parent_generics().map(|it| it.def), Some(param.parent)); @@ -164,7 +164,7 @@ impl Generics { if lifetime.parent == self.def { let idx = lifetime.local_id.into_raw().into_u32() as usize; debug_assert!(idx <= self.params.lifetimes.len()); - Some(self.params.type_or_consts.len() + idx) + Some(self.params.len_type_or_consts() + idx) } else { debug_assert_eq!(self.parent_generics().map(|it| it.def), Some(lifetime.parent)); self.parent_generics() diff --git a/crates/hir-ty/src/lower.rs b/crates/hir-ty/src/lower.rs index 54f4ee782f..a6dd41ae20 100644 --- a/crates/hir-ty/src/lower.rs +++ b/crates/hir-ty/src/lower.rs @@ -2071,7 +2071,7 @@ pub(crate) fn impl_self_ty_query(db: &dyn HirDatabase, impl_id: ImplId) -> Binde // returns None if def is a type arg pub(crate) fn const_param_ty_query(db: &dyn HirDatabase, def: ConstParamId) -> Ty { let parent_data = db.generic_params(def.parent()); - let data = &parent_data.type_or_consts[def.local_id()]; + let data = &parent_data[def.local_id()]; let resolver = def.parent().resolver(db.upcast()); let ctx = TyLoweringContext::new(db, &resolver, def.parent().into()); match data { diff --git a/crates/hir-ty/src/mir/lower.rs b/crates/hir-ty/src/mir/lower.rs index 0569d06695..f7119c303a 100644 --- a/crates/hir-ty/src/mir/lower.rs +++ b/crates/hir-ty/src/mir/lower.rs @@ -183,7 +183,7 @@ impl MirLowerError { }, MirLowerError::GenericArgNotProvided(id, subst) => { let parent = id.parent; - let param = &db.generic_params(parent).type_or_consts[id.local_id]; + let param = &db.generic_params(parent)[id.local_id]; writeln!( f, "Generic arg not provided for {}", diff --git a/crates/hir/src/display.rs b/crates/hir/src/display.rs index 79069ed66b..6c2cb55cc4 100644 --- a/crates/hir/src/display.rs +++ b/crates/hir/src/display.rs @@ -452,7 +452,7 @@ impl HirDisplay for TypeOrConstParam { impl HirDisplay for TypeParam { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { let params = f.db.generic_params(self.id.parent()); - let param_data = ¶ms.type_or_consts[self.id.local_id()]; + let param_data = ¶ms[self.id.local_id()]; let substs = TyBuilder::placeholder_subst(f.db, self.id.parent()); let krate = self.id.parent().krate(f.db).id; let ty = @@ -540,10 +540,9 @@ fn write_generic_params( ) -> Result<(), HirDisplayError> { let params = f.db.generic_params(def); if params.lifetimes.is_empty() - && params.type_or_consts.iter().all(|it| it.1.const_param().is_none()) + && params.iter_type_or_consts().all(|it| it.1.const_param().is_none()) && params - .type_or_consts - .iter() + .iter_type_or_consts() .filter_map(|it| it.1.type_param()) .all(|param| !matches!(param.provenance, TypeParamProvenance::TypeParamList)) { @@ -564,7 +563,7 @@ fn write_generic_params( delim(f)?; write!(f, "{}", lifetime.name.display(f.db.upcast()))?; } - for (_, ty) in params.type_or_consts.iter() { + for (_, ty) in params.iter_type_or_consts() { if let Some(name) = &ty.name() { match ty { TypeOrConstParamData::TypeParamData(ty) => { @@ -616,7 +615,7 @@ fn has_disaplayable_predicates(params: &Interned) -> bool { !matches!( pred, WherePredicate::TypeBound { target: WherePredicateTypeTarget::TypeOrConstParam(id), .. } - if params.type_or_consts[*id].name().is_none() + if params[*id].name().is_none() ) }) } @@ -631,13 +630,13 @@ fn write_where_predicates( let is_unnamed_type_target = |params: &Interned, target: &WherePredicateTypeTarget| { matches!(target, - WherePredicateTypeTarget::TypeOrConstParam(id) if params.type_or_consts[*id].name().is_none() + WherePredicateTypeTarget::TypeOrConstParam(id) if params[*id].name().is_none() ) }; let write_target = |target: &WherePredicateTypeTarget, f: &mut HirFormatter<'_>| match target { WherePredicateTypeTarget::TypeRef(ty) => ty.hir_fmt(f), - WherePredicateTypeTarget::TypeOrConstParam(id) => match params.type_or_consts[*id].name() { + WherePredicateTypeTarget::TypeOrConstParam(id) => match params[*id].name() { Some(name) => write!(f, "{}", name.display(f.db.upcast())), None => f.write_str("{unnamed}"), }, diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index cd916330c2..19d91f44a7 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -2503,8 +2503,7 @@ impl Trait { count_required_only: bool, ) -> usize { db.generic_params(self.id.into()) - .type_or_consts - .iter() + .iter_type_or_consts() .filter(|(_, ty)| !matches!(ty, TypeOrConstParamData::TypeParamData(ty) if ty.provenance != TypeParamProvenance::TypeParamList)) .filter(|(_, ty)| !count_required_only || !ty.has_default()) .count() @@ -3125,7 +3124,7 @@ impl_from!( impl GenericDef { pub fn params(self, db: &dyn HirDatabase) -> Vec { let generics = db.generic_params(self.into()); - let ty_params = generics.type_or_consts.iter().map(|(local_id, _)| { + let ty_params = generics.iter_type_or_consts().map(|(local_id, _)| { let toc = TypeOrConstParam { id: TypeOrConstParamId { parent: self.into(), local_id } }; match toc.split(db) { Either::Left(it) => GenericParam::ConstParam(it), @@ -3153,8 +3152,7 @@ impl GenericDef { pub fn type_or_const_params(self, db: &dyn HirDatabase) -> Vec { let generics = db.generic_params(self.into()); generics - .type_or_consts - .iter() + .iter_type_or_consts() .map(|(local_id, _)| TypeOrConstParam { id: TypeOrConstParamId { parent: self.into(), local_id }, }) @@ -3496,7 +3494,7 @@ impl TypeParam { /// argument)? pub fn is_implicit(self, db: &dyn HirDatabase) -> bool { let params = db.generic_params(self.id.parent()); - let data = ¶ms.type_or_consts[self.id.local_id()]; + let data = ¶ms[self.id.local_id()]; match data.type_param().unwrap().provenance { hir_def::generics::TypeParamProvenance::TypeParamList => false, hir_def::generics::TypeParamProvenance::TraitSelf @@ -3574,7 +3572,7 @@ impl ConstParam { pub fn name(self, db: &dyn HirDatabase) -> Name { let params = db.generic_params(self.id.parent()); - match params.type_or_consts[self.id.local_id()].name() { + match params[self.id.local_id()].name() { Some(it) => it.clone(), None => { never!(); @@ -3617,7 +3615,7 @@ pub struct TypeOrConstParam { impl TypeOrConstParam { pub fn name(self, db: &dyn HirDatabase) -> Name { let params = db.generic_params(self.id.parent); - match params.type_or_consts[self.id.local_id].name() { + match params[self.id.local_id].name() { Some(n) => n.clone(), _ => Name::missing(), } @@ -3633,7 +3631,7 @@ impl TypeOrConstParam { pub fn split(self, db: &dyn HirDatabase) -> Either { let params = db.generic_params(self.id.parent); - match ¶ms.type_or_consts[self.id.local_id] { + match ¶ms[self.id.local_id] { hir_def::generics::TypeOrConstParamData::TypeParamData(_) => { Either::Right(TypeParam { id: TypeParamId::from_unchecked(self.id) }) } @@ -3652,7 +3650,7 @@ impl TypeOrConstParam { pub fn as_type_param(self, db: &dyn HirDatabase) -> Option { let params = db.generic_params(self.id.parent); - match ¶ms.type_or_consts[self.id.local_id] { + match ¶ms[self.id.local_id] { hir_def::generics::TypeOrConstParamData::TypeParamData(_) => { Some(TypeParam { id: TypeParamId::from_unchecked(self.id) }) } @@ -3662,7 +3660,7 @@ impl TypeOrConstParam { pub fn as_const_param(self, db: &dyn HirDatabase) -> Option { let params = db.generic_params(self.id.parent); - match ¶ms.type_or_consts[self.id.local_id] { + match ¶ms[self.id.local_id] { hir_def::generics::TypeOrConstParamData::TypeParamData(_) => None, hir_def::generics::TypeOrConstParamData::ConstParamData(_) => { Some(ConstParam { id: ConstParamId::from_unchecked(self.id) }) From be1ea4028bca01e8f181da65680360d2bf2515be Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 2 Jul 2024 11:27:56 +0200 Subject: [PATCH 52/94] Make GenericParams::where_predicates private --- crates/hir-def/src/generics.rs | 7 ++++++- crates/hir-def/src/item_tree/pretty.rs | 4 ++-- crates/hir-def/src/resolver.rs | 2 +- crates/hir-ty/src/utils.rs | 3 +-- crates/hir/src/display.rs | 4 ++-- 5 files changed, 12 insertions(+), 8 deletions(-) diff --git a/crates/hir-def/src/generics.rs b/crates/hir-def/src/generics.rs index 8570bac9a9..fe9004fcc6 100644 --- a/crates/hir-def/src/generics.rs +++ b/crates/hir-def/src/generics.rs @@ -161,7 +161,7 @@ pub enum GenericParamDataRef<'a> { pub struct GenericParams { type_or_consts: Arena, pub lifetimes: Arena, - pub where_predicates: Box<[WherePredicate]>, + where_predicates: Box<[WherePredicate]>, } impl ops::Index for GenericParams { @@ -228,6 +228,11 @@ impl GenericParams { self.len() == 0 } + #[inline] + pub fn where_predicates(&self) -> std::slice::Iter<'_, WherePredicate> { + self.where_predicates.iter() + } + /// Iterator of type_or_consts field #[inline] pub fn iter_type_or_consts( diff --git a/crates/hir-def/src/item_tree/pretty.rs b/crates/hir-def/src/item_tree/pretty.rs index 59031824e1..ad260df80f 100644 --- a/crates/hir-def/src/item_tree/pretty.rs +++ b/crates/hir-def/src/item_tree/pretty.rs @@ -570,13 +570,13 @@ impl Printer<'_> { } fn print_where_clause(&mut self, params: &GenericParams) -> bool { - if params.where_predicates.is_empty() { + if params.where_predicates().next().is_none() { return false; } w!(self, "\nwhere"); self.indented(|this| { - for (i, pred) in params.where_predicates.iter().enumerate() { + for (i, pred) in params.where_predicates().enumerate() { if i != 0 { wln!(this, ","); } diff --git a/crates/hir-def/src/resolver.rs b/crates/hir-def/src/resolver.rs index c573411309..a8cedd7421 100644 --- a/crates/hir-def/src/resolver.rs +++ b/crates/hir-def/src/resolver.rs @@ -596,7 +596,7 @@ impl Resolver { Scope::GenericParams { params, def } => Some((params, def)), _ => None, }) - .flat_map(|(params, def)| params.where_predicates.iter().zip(iter::repeat(def))) + .flat_map(|(params, def)| params.where_predicates().zip(iter::repeat(def))) } pub fn generic_def(&self) -> Option { diff --git a/crates/hir-ty/src/utils.rs b/crates/hir-ty/src/utils.rs index 969999cdb8..738e842146 100644 --- a/crates/hir-ty/src/utils.rs +++ b/crates/hir-ty/src/utils.rs @@ -157,8 +157,7 @@ fn direct_super_traits(db: &dyn DefDatabase, trait_: TraitId, cb: impl FnMut(Tra let generic_params = db.generic_params(trait_.into()); let trait_self = generic_params.trait_self_param(); generic_params - .where_predicates - .iter() + .where_predicates() .filter_map(|pred| match pred { WherePredicate::ForLifetime { target, bound, .. } | WherePredicate::TypeBound { target, bound } => { diff --git a/crates/hir/src/display.rs b/crates/hir/src/display.rs index 6c2cb55cc4..05047450af 100644 --- a/crates/hir/src/display.rs +++ b/crates/hir/src/display.rs @@ -611,7 +611,7 @@ fn write_where_clause( } fn has_disaplayable_predicates(params: &Interned) -> bool { - params.where_predicates.iter().any(|pred| { + params.where_predicates().any(|pred| { !matches!( pred, WherePredicate::TypeBound { target: WherePredicateTypeTarget::TypeOrConstParam(id), .. } @@ -652,7 +652,7 @@ fn write_where_predicates( _ => false, }; - let mut iter = params.where_predicates.iter().peekable(); + let mut iter = params.where_predicates().peekable(); while let Some(pred) = iter.next() { if matches!(pred, TypeBound { target, .. } if is_unnamed_type_target(params, target)) { continue; From 966798b7ba0bd314b0050b76bbe9d7a315f8510e Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 2 Jul 2024 11:31:02 +0200 Subject: [PATCH 53/94] Make GenericParams::lifetimes private --- crates/hir-def/src/child_by_source.rs | 2 +- crates/hir-def/src/generics.rs | 2 +- crates/hir-def/src/item_tree/pretty.rs | 2 +- crates/hir-def/src/resolver.rs | 2 +- crates/hir-def/src/src.rs | 2 +- crates/hir-ty/src/generics.rs | 2 +- crates/hir/src/display.rs | 4 ++-- crates/hir/src/lib.rs | 10 ++++------ 8 files changed, 12 insertions(+), 14 deletions(-) diff --git a/crates/hir-def/src/child_by_source.rs b/crates/hir-def/src/child_by_source.rs index 6ee6d313e9..0438278ca2 100644 --- a/crates/hir-def/src/child_by_source.rs +++ b/crates/hir-def/src/child_by_source.rs @@ -215,7 +215,7 @@ impl ChildBySource for GenericDefId { let generic_params = db.generic_params(*self); let mut toc_idx_iter = generic_params.iter_type_or_consts().map(|(idx, _)| idx); - let lts_idx_iter = generic_params.lifetimes.iter().map(|(idx, _)| idx); + let lts_idx_iter = generic_params.iter_lt().map(|(idx, _)| idx); // For traits the first type index is `Self`, skip it. if let GenericDefId::TraitId(_) = *self { diff --git a/crates/hir-def/src/generics.rs b/crates/hir-def/src/generics.rs index fe9004fcc6..1d880c9eee 100644 --- a/crates/hir-def/src/generics.rs +++ b/crates/hir-def/src/generics.rs @@ -160,7 +160,7 @@ pub enum GenericParamDataRef<'a> { #[derive(Clone, PartialEq, Eq, Debug, Hash)] pub struct GenericParams { type_or_consts: Arena, - pub lifetimes: Arena, + lifetimes: Arena, where_predicates: Box<[WherePredicate]>, } diff --git a/crates/hir-def/src/item_tree/pretty.rs b/crates/hir-def/src/item_tree/pretty.rs index ad260df80f..6283ae23b5 100644 --- a/crates/hir-def/src/item_tree/pretty.rs +++ b/crates/hir-def/src/item_tree/pretty.rs @@ -532,7 +532,7 @@ impl Printer<'_> { w!(self, "<"); let mut first = true; - for (idx, lt) in params.lifetimes.iter() { + for (idx, lt) in params.iter_lt() { if !first { w!(self, ", "); } diff --git a/crates/hir-def/src/resolver.rs b/crates/hir-def/src/resolver.rs index a8cedd7421..e5c1f93bbd 100644 --- a/crates/hir-def/src/resolver.rs +++ b/crates/hir-def/src/resolver.rs @@ -775,7 +775,7 @@ impl Scope { ); } } - for (local_id, param) in params.lifetimes.iter() { + for (local_id, param) in params.iter_lt() { let id = LifetimeParamId { parent, local_id }; acc.add(¶m.name, ScopeDef::GenericParam(id.into())) } diff --git a/crates/hir-def/src/src.rs b/crates/hir-def/src/src.rs index 278388945c..a0d2079e0d 100644 --- a/crates/hir-def/src/src.rs +++ b/crates/hir-def/src/src.rs @@ -103,7 +103,7 @@ impl HasChildSource for GenericDefId { db: &dyn DefDatabase, ) -> InFile> { let generic_params = db.generic_params(*self); - let idx_iter = generic_params.lifetimes.iter().map(|(idx, _)| idx); + let idx_iter = generic_params.iter_lt().map(|(idx, _)| idx); let (file_id, generic_params_list) = self.file_id_and_params_of(db); diff --git a/crates/hir-ty/src/generics.rs b/crates/hir-ty/src/generics.rs index 6d20791f6e..7fc60c41d7 100644 --- a/crates/hir-ty/src/generics.rs +++ b/crates/hir-ty/src/generics.rs @@ -163,7 +163,7 @@ impl Generics { fn find_lifetime(&self, lifetime: LifetimeParamId) -> Option { if lifetime.parent == self.def { let idx = lifetime.local_id.into_raw().into_u32() as usize; - debug_assert!(idx <= self.params.lifetimes.len()); + debug_assert!(idx <= self.params.len_lifetimes()); Some(self.params.len_type_or_consts() + idx) } else { debug_assert_eq!(self.parent_generics().map(|it| it.def), Some(lifetime.parent)); diff --git a/crates/hir/src/display.rs b/crates/hir/src/display.rs index 05047450af..72e79af75d 100644 --- a/crates/hir/src/display.rs +++ b/crates/hir/src/display.rs @@ -539,7 +539,7 @@ fn write_generic_params( f: &mut HirFormatter<'_>, ) -> Result<(), HirDisplayError> { let params = f.db.generic_params(def); - if params.lifetimes.is_empty() + if params.iter_lt().next().is_none() && params.iter_type_or_consts().all(|it| it.1.const_param().is_none()) && params .iter_type_or_consts() @@ -559,7 +559,7 @@ fn write_generic_params( f.write_str(", ") } }; - for (_, lifetime) in params.lifetimes.iter() { + for (_, lifetime) in params.iter_lt() { delim(f)?; write!(f, "{}", lifetime.name.display(f.db.upcast()))?; } diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 19d91f44a7..18a5325db6 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -665,7 +665,7 @@ impl Module { } let parent = impl_def.id.into(); let generic_params = db.generic_params(parent); - let lifetime_params = generic_params.lifetimes.iter().map(|(local_id, _)| { + let lifetime_params = generic_params.iter_lt().map(|(local_id, _)| { GenericParamId::LifetimeParamId(LifetimeParamId { parent, local_id }) }); let type_params = generic_params @@ -1540,8 +1540,7 @@ impl Adt { resolver .generic_params() .and_then(|gp| { - gp.lifetimes - .iter() + gp.iter_lt() // there should only be a single lifetime // but `Arena` requires to use an iterator .nth(0) @@ -3141,8 +3140,7 @@ impl GenericDef { pub fn lifetime_params(self, db: &dyn HirDatabase) -> Vec { let generics = db.generic_params(self.into()); generics - .lifetimes - .iter() + .iter_lt() .map(|(local_id, _)| LifetimeParam { id: LifetimeParamId { parent: self.into(), local_id }, }) @@ -3548,7 +3546,7 @@ pub struct LifetimeParam { impl LifetimeParam { pub fn name(self, db: &dyn HirDatabase) -> Name { let params = db.generic_params(self.id.parent); - params.lifetimes[self.id.local_id].name.clone() + params[self.id.local_id].name.clone() } pub fn module(self, db: &dyn HirDatabase) -> Module { From baa959fa99b094b8795126f2937bd1da2e1a4e47 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 2 Jul 2024 13:42:27 +0200 Subject: [PATCH 54/94] Move lifetimes in front of type and const params but after self --- crates/hir-def/src/generics.rs | 99 +++++++++++++-------------- crates/hir-ty/src/generics.rs | 31 +++++---- crates/hir-ty/src/lower.rs | 118 +++++++++++++++------------------ 3 files changed, 122 insertions(+), 126 deletions(-) diff --git a/crates/hir-def/src/generics.rs b/crates/hir-def/src/generics.rs index 1d880c9eee..ebaaef66db 100644 --- a/crates/hir-def/src/generics.rs +++ b/crates/hir-def/src/generics.rs @@ -249,13 +249,62 @@ impl GenericParams { self.lifetimes.iter() } + pub fn find_type_by_name(&self, name: &Name, parent: GenericDefId) -> Option { + self.type_or_consts.iter().find_map(|(id, p)| { + if p.name().as_ref() == Some(&name) && p.type_param().is_some() { + Some(TypeParamId::from_unchecked(TypeOrConstParamId { local_id: id, parent })) + } else { + None + } + }) + } + + pub fn find_const_by_name(&self, name: &Name, parent: GenericDefId) -> Option { + self.type_or_consts.iter().find_map(|(id, p)| { + if p.name().as_ref() == Some(&name) && p.const_param().is_some() { + Some(ConstParamId::from_unchecked(TypeOrConstParamId { local_id: id, parent })) + } else { + None + } + }) + } + + #[inline] + pub fn trait_self_param(&self) -> Option { + if self.type_or_consts.is_empty() { + return None; + } + matches!( + self.type_or_consts[SELF_PARAM_ID_IN_SELF], + TypeOrConstParamData::TypeParamData(TypeParamData { + provenance: TypeParamProvenance::TraitSelf, + .. + }) + ) + .then(|| SELF_PARAM_ID_IN_SELF) + } + + pub fn find_lifetime_by_name( + &self, + name: &Name, + parent: GenericDefId, + ) -> Option { + self.lifetimes.iter().find_map(|(id, p)| { + if &p.name == name { + Some(LifetimeParamId { local_id: id, parent }) + } else { + None + } + }) + } + pub(crate) fn generic_params_query( db: &dyn DefDatabase, def: GenericDefId, ) -> Interned { let _p = tracing::info_span!("generic_params_query").entered(); - let krate = def.module(db).krate; + let krate = def.krate(db); let cfg_options = db.crate_graph(); let cfg_options = &cfg_options[krate].cfg_options; @@ -368,54 +417,6 @@ impl GenericParams { }), } } - - pub fn find_type_by_name(&self, name: &Name, parent: GenericDefId) -> Option { - self.type_or_consts.iter().find_map(|(id, p)| { - if p.name().as_ref() == Some(&name) && p.type_param().is_some() { - Some(TypeParamId::from_unchecked(TypeOrConstParamId { local_id: id, parent })) - } else { - None - } - }) - } - - pub fn find_const_by_name(&self, name: &Name, parent: GenericDefId) -> Option { - self.type_or_consts.iter().find_map(|(id, p)| { - if p.name().as_ref() == Some(&name) && p.const_param().is_some() { - Some(ConstParamId::from_unchecked(TypeOrConstParamId { local_id: id, parent })) - } else { - None - } - }) - } - - pub fn trait_self_param(&self) -> Option { - if self.type_or_consts.is_empty() { - return None; - } - matches!( - self.type_or_consts[SELF_PARAM_ID_IN_SELF], - TypeOrConstParamData::TypeParamData(TypeParamData { - provenance: TypeParamProvenance::TraitSelf, - .. - }) - ) - .then(|| SELF_PARAM_ID_IN_SELF) - } - - pub fn find_lifetime_by_name( - &self, - name: &Name, - parent: GenericDefId, - ) -> Option { - self.lifetimes.iter().find_map(|(id, p)| { - if &p.name == name { - Some(LifetimeParamId { local_id: id, parent }) - } else { - None - } - }) - } } #[derive(Clone, Default)] diff --git a/crates/hir-ty/src/generics.rs b/crates/hir-ty/src/generics.rs index 7fc60c41d7..a96c101a38 100644 --- a/crates/hir-ty/src/generics.rs +++ b/crates/hir-ty/src/generics.rs @@ -2,8 +2,8 @@ //! //! The layout for generics as expected by chalk are as follows: //! - Optional Self parameter -//! - Type or Const parameters //! - Lifetime parameters +//! - Type or Const parameters //! - Parent parameters //! //! where parent follows the same scheme. @@ -20,19 +20,23 @@ use hir_def::{ LocalLifetimeParamId, LocalTypeOrConstParamId, Lookup, TypeOrConstParamId, TypeParamId, }; use intern::Interned; +use itertools::chain; use stdx::TupleExt; use crate::{db::HirDatabase, lt_to_placeholder_idx, to_placeholder_idx, Interner, Substitution}; pub(crate) fn generics(db: &dyn DefDatabase, def: GenericDefId) -> Generics { let parent_generics = parent_generic_def(db, def).map(|def| Box::new(generics(db, def))); - Generics { def, params: db.generic_params(def), parent_generics } + let params = db.generic_params(def); + let has_trait_self_param = params.trait_self_param().is_some(); + Generics { def, params, parent_generics, has_trait_self_param } } #[derive(Clone, Debug)] pub(crate) struct Generics { def: GenericDefId, params: Interned, parent_generics: Option>, + has_trait_self_param: bool, } impl ops::Index for Generics @@ -74,10 +78,6 @@ impl Generics { self.params.iter_type_or_consts().map(from_toc_id(self)).map(TupleExt::head) } - pub(crate) fn iter_self_lt_id(&self) -> impl DoubleEndedIterator + '_ { - self.params.iter_lt().map(from_lt_id(self)).map(TupleExt::head) - } - /// Iterate over the params followed by the parent params. pub(crate) fn iter( &self, @@ -89,10 +89,9 @@ impl Generics { pub(crate) fn iter_self( &self, ) -> impl DoubleEndedIterator)> + '_ { - self.params - .iter_type_or_consts() - .map(from_toc_id(self)) - .chain(self.params.iter_lt().map(from_lt_id(self))) + let mut toc = self.params.iter_type_or_consts().map(from_toc_id(self)); + let trait_self_param = self.has_trait_self_param.then(|| toc.next()).flatten(); + chain!(trait_self_param, self.params.iter_lt().map(from_lt_id(self)), toc) } /// Iterator over types and const params of parent. @@ -100,8 +99,9 @@ impl Generics { &self, ) -> impl DoubleEndedIterator)> + '_ { self.parent_generics().into_iter().flat_map(|it| { - let lt_iter = it.params.iter_lt().map(from_lt_id(it)); - it.params.iter_type_or_consts().map(from_toc_id(it)).chain(lt_iter) + let mut toc = it.params.iter_type_or_consts().map(from_toc_id(it)); + let trait_self_param = it.has_trait_self_param.then(|| toc.next()).flatten(); + chain!(trait_self_param, it.params.iter_lt().map(from_lt_id(it)), toc) }) } @@ -146,7 +146,10 @@ impl Generics { if param.parent == self.def { let idx = param.local_id.into_raw().into_u32() as usize; debug_assert!(idx <= self.params.len_type_or_consts()); - Some(idx) + if self.params.trait_self_param() == Some(param.local_id) { + return Some(idx); + } + Some(self.params.len_lifetimes() + idx) } else { debug_assert_eq!(self.parent_generics().map(|it| it.def), Some(param.parent)); self.parent_generics() @@ -164,7 +167,7 @@ impl Generics { if lifetime.parent == self.def { let idx = lifetime.local_id.into_raw().into_u32() as usize; debug_assert!(idx <= self.params.len_lifetimes()); - Some(self.params.len_type_or_consts() + idx) + Some(self.params.trait_self_param().is_some() as usize + idx) } else { debug_assert_eq!(self.parent_generics().map(|it| it.def), Some(lifetime.parent)); self.parent_generics() diff --git a/crates/hir-ty/src/lower.rs b/crates/hir-ty/src/lower.rs index a6dd41ae20..d421e72d36 100644 --- a/crates/hir-ty/src/lower.rs +++ b/crates/hir-ty/src/lower.rs @@ -384,14 +384,18 @@ impl<'a> TyLoweringContext<'a> { type_params, const_params, _impl_trait_params, - _lifetime_params, + lifetime_params, ) = self .generics() .expect("variable impl trait lowering must be in a generic def") .provenance_split(); TyKind::BoundVar(BoundVar::new( self.in_binders, - idx as usize + self_param as usize + type_params + const_params, + idx as usize + + self_param as usize + + type_params + + const_params + + lifetime_params, )) .intern(Interner) } @@ -816,8 +820,8 @@ impl<'a> TyLoweringContext<'a> { // Order is // - Optional Self parameter - // - Type or Const parameters // - Lifetime parameters + // - Type or Const parameters // - Parent parameters let def_generics = generics(self.db.upcast(), def); let ( @@ -839,7 +843,6 @@ impl<'a> TyLoweringContext<'a> { let ty_error = || TyKind::Error.intern(Interner).cast(Interner); let mut def_toc_iter = def_generics.iter_self_type_or_consts_id(); - let mut def_lt_iter = def_generics.iter_self_lt_id(); let fill_self_param = || { if self_param { let self_ty = explicit_self_ty.map(|x| x.cast(Interner)).unwrap_or_else(ty_error); @@ -852,56 +855,56 @@ impl<'a> TyLoweringContext<'a> { }; let mut had_explicit_args = false; - let mut lifetimes = SmallVec::<[_; 1]>::new(); if let Some(&GenericArgs { ref args, has_self_type, .. }) = args_and_bindings { - if !has_self_type { - fill_self_param(); - } - let expected_num = if has_self_type { - self_param as usize + type_params + const_params + // Fill in the self param first + if has_self_type && self_param { + had_explicit_args = true; + if let Some(id) = def_toc_iter.next() { + assert!(matches!(id, GenericParamId::TypeParamId(_))); + had_explicit_args = true; + if let GenericArg::Type(ty) = &args[0] { + substs.push(self.lower_ty(ty).cast(Interner)); + } + } } else { - type_params + const_params + fill_self_param() }; - let skip = if has_self_type && !self_param { 1 } else { 0 }; - // if non-lifetime args are provided, it should be all of them, but we can't rely on that + + // Then fill in the supplied lifetime args, or error lifetimes if there are too few + // (default lifetimes aren't a thing) for arg in args + .iter() + .filter_map(|arg| match arg { + GenericArg::Lifetime(arg) => Some(self.lower_lifetime(arg)), + _ => None, + }) + .chain(iter::repeat(error_lifetime())) + .take(lifetime_params) + { + substs.push(arg.cast(Interner)); + } + + let skip = if has_self_type { 1 } else { 0 }; + // Fill in supplied type and const args + // Note if non-lifetime args are provided, it should be all of them, but we can't rely on that + for (arg, id) in args .iter() .filter(|arg| !matches!(arg, GenericArg::Lifetime(_))) .skip(skip) - .take(expected_num) + .take(type_params + const_params) + .zip(def_toc_iter) { - if let Some(id) = def_toc_iter.next() { - had_explicit_args = true; - let arg = generic_arg_to_chalk( - self.db, - id, - arg, - &mut (), - |_, type_ref| self.lower_ty(type_ref), - |_, const_ref, ty| self.lower_const(const_ref, ty), - |_, lifetime_ref| self.lower_lifetime(lifetime_ref), - ); - substs.push(arg); - } - } - - for arg in args - .iter() - .filter(|arg| matches!(arg, GenericArg::Lifetime(_))) - .take(lifetime_params) - { - if let Some(id) = def_lt_iter.next() { - let arg = generic_arg_to_chalk( - self.db, - id, - arg, - &mut (), - |_, type_ref| self.lower_ty(type_ref), - |_, const_ref, ty| self.lower_const(const_ref, ty), - |_, lifetime_ref| self.lower_lifetime(lifetime_ref), - ); - lifetimes.push(arg); - } + had_explicit_args = true; + let arg = generic_arg_to_chalk( + self.db, + id, + arg, + &mut (), + |_, type_ref| self.lower_ty(type_ref), + |_, const_ref, ty| self.lower_const(const_ref, ty), + |_, lifetime_ref| self.lower_lifetime(lifetime_ref), + ); + substs.push(arg); } } else { fill_self_param(); @@ -923,16 +926,16 @@ impl<'a> TyLoweringContext<'a> { } _ => false, }; - if (!infer_args || had_explicit_args) && !is_assoc_ty() { + let fill_defaults = (!infer_args || had_explicit_args) && !is_assoc_ty(); + if fill_defaults { let defaults = &*self.db.generic_defaults(def); let (item, _parent) = defaults.split_at(item_len); - let (toc, lt) = item.split_at(item_len - lifetime_params); let parent_from = item_len - substs.len(); let mut rem = def_generics.iter_id().skip(substs.len()).map(param_to_err).collect::>(); // Fill in defaults for type/const params - for (idx, default_ty) in toc[substs.len()..].iter().enumerate() { + for (idx, default_ty) in item[substs.len()..].iter().enumerate() { // each default can depend on the previous parameters let substs_so_far = Substitution::from_iter( Interner, @@ -940,20 +943,9 @@ impl<'a> TyLoweringContext<'a> { ); substs.push(default_ty.clone().substitute(Interner, &substs_so_far)); } - let n_lifetimes = lifetimes.len(); - // Fill in deferred lifetimes - substs.extend(lifetimes); - // Fill in defaults for lifetime params - for default_ty in <[n_lifetimes..] { - // these are always errors so skipping is fine - substs.push(default_ty.skip_binders().clone()); - } - // Fill in remaining def params and parent params + // Fill in remaining parent params substs.extend(rem.drain(parent_from..)); } else { - substs.extend(def_toc_iter.map(param_to_err)); - // Fill in deferred lifetimes - substs.extend(lifetimes); // Fill in remaining def params and parent params substs.extend(def_generics.iter_id().skip(substs.len()).map(param_to_err)); } @@ -1725,8 +1717,8 @@ pub(crate) fn generic_predicates_query( }) .collect::>(); - let subst = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST); - if !subst.is_empty(Interner) { + if generics.len() > 0 { + let subst = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST); let explicitly_unsized_tys = ctx.unsized_types.into_inner(); if let Some(implicitly_sized_predicates) = implicitly_sized_clauses(db, def, &explicitly_unsized_tys, &subst, &resolver) From 26c7bfd0b498abf313cf1cea10c992be2e606999 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Wed, 3 Jul 2024 08:31:40 +0200 Subject: [PATCH 55/94] Skip match exhaustiveness checking if pattern type contains errors --- crates/hir-ty/src/diagnostics/expr.rs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/crates/hir-ty/src/diagnostics/expr.rs b/crates/hir-ty/src/diagnostics/expr.rs index ce3fa53f7a..c28ab2e98a 100644 --- a/crates/hir-ty/src/diagnostics/expr.rs +++ b/crates/hir-ty/src/diagnostics/expr.rs @@ -196,6 +196,9 @@ impl ExprValidator { let Some(pat_ty) = self.infer.type_of_pat.get(arm.pat) else { return; }; + if pat_ty.contains_unknown() { + return; + } // We only include patterns whose type matches the type // of the scrutinee expression. If we had an InvalidMatchArmPattern From 013b6a883f6efe1638d78852309eaf96ead26ca0 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Wed, 3 Jul 2024 10:41:19 +0200 Subject: [PATCH 56/94] Fix up the syntax tree for macro 2.0 --- crates/hir-expand/src/declarative.rs | 25 +- crates/mbe/src/lib.rs | 45 ++-- crates/parser/src/grammar/items.rs | 2 - .../parser/inline/ok/0147_macro_def.rast | 21 +- .../test_data/parser/ok/0012_visibility.rast | 19 +- .../test_data/parser/ok/0062_macro_2.0.rast | 216 +++++++++--------- crates/syntax/src/ast/expr_ext.rs | 57 ++++- crates/syntax/src/ast/generated/nodes.rs | 8 - xtask/src/codegen/grammar.rs | 2 + 9 files changed, 226 insertions(+), 169 deletions(-) diff --git a/crates/hir-expand/src/declarative.rs b/crates/hir-expand/src/declarative.rs index 7c3bf995b1..29408902f1 100644 --- a/crates/hir-expand/src/declarative.rs +++ b/crates/hir-expand/src/declarative.rs @@ -172,15 +172,30 @@ impl DeclarativeMacroExpander { ), ast::Macro::MacroDef(macro_def) => ( match macro_def.body() { - Some(arg) => { - let tt = mbe::syntax_node_to_token_tree( - arg.syntax(), + Some(body) => { + let span = + map.span_for_range(macro_def.macro_token().unwrap().text_range()); + let args = macro_def.args().map(|args| { + mbe::syntax_node_to_token_tree( + args.syntax(), + map.as_ref(), + span, + DocCommentDesugarMode::Mbe, + ) + }); + let body = mbe::syntax_node_to_token_tree( + body.syntax(), map.as_ref(), - map.span_for_range(macro_def.macro_token().unwrap().text_range()), + span, DocCommentDesugarMode::Mbe, ); - mbe::DeclarativeMacro::parse_macro2(&tt, edition, new_meta_vars) + mbe::DeclarativeMacro::parse_macro2( + args.as_ref(), + &body, + edition, + new_meta_vars, + ) } None => mbe::DeclarativeMacro::from_err(mbe::ParseError::Expected( "expected a token tree".into(), diff --git a/crates/mbe/src/lib.rs b/crates/mbe/src/lib.rs index b2fd91f6ce..b06c6cee12 100644 --- a/crates/mbe/src/lib.rs +++ b/crates/mbe/src/lib.rs @@ -156,7 +156,7 @@ impl DeclarativeMacro { let mut err = None; while src.len() > 0 { - let rule = match Rule::parse(edition, &mut src, true, new_meta_vars) { + let rule = match Rule::parse(edition, &mut src, new_meta_vars) { Ok(it) => it, Err(e) => { err = Some(Box::new(e)); @@ -184,19 +184,34 @@ impl DeclarativeMacro { /// The new, unstable `macro m {}` flavor. pub fn parse_macro2( - tt: &tt::Subtree, + args: Option<&tt::Subtree>, + body: &tt::Subtree, edition: impl Copy + Fn(SyntaxContextId) -> Edition, // FIXME: Remove this once we drop support for rust 1.76 (defaults to true then) new_meta_vars: bool, ) -> DeclarativeMacro { - let mut src = TtIter::new(tt); let mut rules = Vec::new(); let mut err = None; - if tt::DelimiterKind::Brace == tt.delimiter.kind { + if let Some(args) = args { + cov_mark::hit!(parse_macro_def_simple); + + let rule = (|| { + let lhs = MetaTemplate::parse_pattern(edition, args)?; + let rhs = MetaTemplate::parse_template(edition, body, new_meta_vars)?; + + Ok(crate::Rule { lhs, rhs }) + })(); + + match rule { + Ok(rule) => rules.push(rule), + Err(e) => err = Some(Box::new(e)), + } + } else { cov_mark::hit!(parse_macro_def_rules); + let mut src = TtIter::new(body); while src.len() > 0 { - let rule = match Rule::parse(edition, &mut src, true, new_meta_vars) { + let rule = match Rule::parse(edition, &mut src, new_meta_vars) { Ok(it) => it, Err(e) => { err = Some(Box::new(e)); @@ -213,19 +228,6 @@ impl DeclarativeMacro { break; } } - } else { - cov_mark::hit!(parse_macro_def_simple); - match Rule::parse(edition, &mut src, false, new_meta_vars) { - Ok(rule) => { - if src.len() != 0 { - err = Some(Box::new(ParseError::expected("remaining tokens in macro def"))); - } - rules.push(rule); - } - Err(e) => { - err = Some(Box::new(e)); - } - } } for Rule { lhs, .. } in &rules { @@ -262,14 +264,11 @@ impl Rule { fn parse( edition: impl Copy + Fn(SyntaxContextId) -> Edition, src: &mut TtIter<'_, Span>, - expect_arrow: bool, new_meta_vars: bool, ) -> Result { let lhs = src.expect_subtree().map_err(|()| ParseError::expected("expected subtree"))?; - if expect_arrow { - src.expect_char('=').map_err(|()| ParseError::expected("expected `=`"))?; - src.expect_char('>').map_err(|()| ParseError::expected("expected `>`"))?; - } + src.expect_char('=').map_err(|()| ParseError::expected("expected `=`"))?; + src.expect_char('>').map_err(|()| ParseError::expected("expected `>`"))?; let rhs = src.expect_subtree().map_err(|()| ParseError::expected("expected subtree"))?; let lhs = MetaTemplate::parse_pattern(edition, lhs)?; diff --git a/crates/parser/src/grammar/items.rs b/crates/parser/src/grammar/items.rs index 25c00ccf5f..99bbf47654 100644 --- a/crates/parser/src/grammar/items.rs +++ b/crates/parser/src/grammar/items.rs @@ -372,13 +372,11 @@ fn macro_def(p: &mut Parser<'_>, m: Marker) { // macro m { ($i:ident) => {} } token_tree(p); } else if p.at(T!['(']) { - let m = p.start(); token_tree(p); match p.current() { T!['{'] | T!['['] | T!['('] => token_tree(p), _ => p.error("expected `{`, `[`, `(`"), } - m.complete(p, TOKEN_TREE); } else { p.error("unmatched `(`"); } diff --git a/crates/parser/test_data/parser/inline/ok/0147_macro_def.rast b/crates/parser/test_data/parser/inline/ok/0147_macro_def.rast index 01de13a907..f73229b2e3 100644 --- a/crates/parser/test_data/parser/inline/ok/0147_macro_def.rast +++ b/crates/parser/test_data/parser/inline/ok/0147_macro_def.rast @@ -5,15 +5,14 @@ SOURCE_FILE NAME IDENT "m" TOKEN_TREE - TOKEN_TREE - L_PAREN "(" - DOLLAR "$" - IDENT "i" - COLON ":" - IDENT "ident" - R_PAREN ")" - WHITESPACE " " - TOKEN_TREE - L_CURLY "{" - R_CURLY "}" + L_PAREN "(" + DOLLAR "$" + IDENT "i" + COLON ":" + IDENT "ident" + R_PAREN ")" + WHITESPACE " " + TOKEN_TREE + L_CURLY "{" + R_CURLY "}" WHITESPACE "\n" diff --git a/crates/parser/test_data/parser/ok/0012_visibility.rast b/crates/parser/test_data/parser/ok/0012_visibility.rast index a95bc23016..3d9322947b 100644 --- a/crates/parser/test_data/parser/ok/0012_visibility.rast +++ b/crates/parser/test_data/parser/ok/0012_visibility.rast @@ -39,16 +39,15 @@ SOURCE_FILE NAME IDENT "m" TOKEN_TREE - TOKEN_TREE - L_PAREN "(" - DOLLAR "$" - COLON ":" - IDENT "ident" - R_PAREN ")" - WHITESPACE " " - TOKEN_TREE - L_CURLY "{" - R_CURLY "}" + L_PAREN "(" + DOLLAR "$" + COLON ":" + IDENT "ident" + R_PAREN ")" + WHITESPACE " " + TOKEN_TREE + L_CURLY "{" + R_CURLY "}" WHITESPACE "\n" FN VISIBILITY diff --git a/crates/parser/test_data/parser/ok/0062_macro_2.0.rast b/crates/parser/test_data/parser/ok/0062_macro_2.0.rast index 3915ed7506..1415a866b6 100644 --- a/crates/parser/test_data/parser/ok/0062_macro_2.0.rast +++ b/crates/parser/test_data/parser/ok/0062_macro_2.0.rast @@ -5,51 +5,50 @@ SOURCE_FILE NAME IDENT "parse_use_trees" TOKEN_TREE + L_PAREN "(" + DOLLAR "$" TOKEN_TREE L_PAREN "(" DOLLAR "$" - TOKEN_TREE - L_PAREN "(" - DOLLAR "$" - IDENT "s" - COLON ":" - IDENT "expr" - R_PAREN ")" + IDENT "s" + COLON ":" + IDENT "expr" + R_PAREN ")" + COMMA "," + STAR "*" + WHITESPACE " " + DOLLAR "$" + TOKEN_TREE + L_PAREN "(" COMMA "," - STAR "*" - WHITESPACE " " + R_PAREN ")" + STAR "*" + R_PAREN ")" + WHITESPACE " " + TOKEN_TREE + L_CURLY "{" + WHITESPACE "\n " + IDENT "vec" + BANG "!" + TOKEN_TREE + L_BRACK "[" + WHITESPACE "\n " DOLLAR "$" TOKEN_TREE L_PAREN "(" + IDENT "parse_use_tree" + TOKEN_TREE + L_PAREN "(" + DOLLAR "$" + IDENT "s" + R_PAREN ")" COMMA "," R_PAREN ")" STAR "*" - R_PAREN ")" - WHITESPACE " " - TOKEN_TREE - L_CURLY "{" WHITESPACE "\n " - IDENT "vec" - BANG "!" - TOKEN_TREE - L_BRACK "[" - WHITESPACE "\n " - DOLLAR "$" - TOKEN_TREE - L_PAREN "(" - IDENT "parse_use_tree" - TOKEN_TREE - L_PAREN "(" - DOLLAR "$" - IDENT "s" - R_PAREN ")" - COMMA "," - R_PAREN ")" - STAR "*" - WHITESPACE "\n " - R_BRACK "]" - WHITESPACE "\n" - R_CURLY "}" + R_BRACK "]" + WHITESPACE "\n" + R_CURLY "}" WHITESPACE "\n\n" FN ATTR @@ -80,79 +79,62 @@ SOURCE_FILE NAME IDENT "test_merge" TOKEN_TREE + L_PAREN "(" TOKEN_TREE - L_PAREN "(" - TOKEN_TREE - L_BRACK "[" - DOLLAR "$" - TOKEN_TREE - L_PAREN "(" - DOLLAR "$" - IDENT "input" - COLON ":" - IDENT "expr" - R_PAREN ")" - COMMA "," - STAR "*" - WHITESPACE " " - DOLLAR "$" - TOKEN_TREE - L_PAREN "(" - COMMA "," - R_PAREN ")" - STAR "*" - R_BRACK "]" - COMMA "," - WHITESPACE " " - TOKEN_TREE - L_BRACK "[" - DOLLAR "$" - TOKEN_TREE - L_PAREN "(" - DOLLAR "$" - IDENT "output" - COLON ":" - IDENT "expr" - R_PAREN ")" - COMMA "," - STAR "*" - WHITESPACE " " - DOLLAR "$" - TOKEN_TREE - L_PAREN "(" - COMMA "," - R_PAREN ")" - STAR "*" - R_BRACK "]" - R_PAREN ")" - WHITESPACE " " - TOKEN_TREE - L_CURLY "{" - WHITESPACE "\n " - IDENT "assert_eq" - BANG "!" + L_BRACK "[" + DOLLAR "$" + TOKEN_TREE + L_PAREN "(" + DOLLAR "$" + IDENT "input" + COLON ":" + IDENT "expr" + R_PAREN ")" + COMMA "," + STAR "*" + WHITESPACE " " + DOLLAR "$" TOKEN_TREE L_PAREN "(" - WHITESPACE "\n " - IDENT "merge_use_trees" - TOKEN_TREE - L_PAREN "(" - IDENT "parse_use_trees" - BANG "!" - TOKEN_TREE - L_PAREN "(" - DOLLAR "$" - TOKEN_TREE - L_PAREN "(" - DOLLAR "$" - IDENT "input" - COMMA "," - R_PAREN ")" - STAR "*" - R_PAREN ")" - R_PAREN ")" COMMA "," - WHITESPACE "\n " + R_PAREN ")" + STAR "*" + R_BRACK "]" + COMMA "," + WHITESPACE " " + TOKEN_TREE + L_BRACK "[" + DOLLAR "$" + TOKEN_TREE + L_PAREN "(" + DOLLAR "$" + IDENT "output" + COLON ":" + IDENT "expr" + R_PAREN ")" + COMMA "," + STAR "*" + WHITESPACE " " + DOLLAR "$" + TOKEN_TREE + L_PAREN "(" + COMMA "," + R_PAREN ")" + STAR "*" + R_BRACK "]" + R_PAREN ")" + WHITESPACE " " + TOKEN_TREE + L_CURLY "{" + WHITESPACE "\n " + IDENT "assert_eq" + BANG "!" + TOKEN_TREE + L_PAREN "(" + WHITESPACE "\n " + IDENT "merge_use_trees" + TOKEN_TREE + L_PAREN "(" IDENT "parse_use_trees" BANG "!" TOKEN_TREE @@ -161,17 +143,33 @@ SOURCE_FILE TOKEN_TREE L_PAREN "(" DOLLAR "$" - IDENT "output" + IDENT "input" COMMA "," R_PAREN ")" STAR "*" R_PAREN ")" - COMMA "," - WHITESPACE "\n " R_PAREN ")" - SEMICOLON ";" - WHITESPACE "\n " - R_CURLY "}" + COMMA "," + WHITESPACE "\n " + IDENT "parse_use_trees" + BANG "!" + TOKEN_TREE + L_PAREN "(" + DOLLAR "$" + TOKEN_TREE + L_PAREN "(" + DOLLAR "$" + IDENT "output" + COMMA "," + R_PAREN ")" + STAR "*" + R_PAREN ")" + COMMA "," + WHITESPACE "\n " + R_PAREN ")" + SEMICOLON ";" + WHITESPACE "\n " + R_CURLY "}" WHITESPACE "\n" R_CURLY "}" WHITESPACE "\n" diff --git a/crates/syntax/src/ast/expr_ext.rs b/crates/syntax/src/ast/expr_ext.rs index 28a9dadace..b0ee9dfd50 100644 --- a/crates/syntax/src/ast/expr_ext.rs +++ b/crates/syntax/src/ast/expr_ext.rs @@ -6,7 +6,8 @@ use crate::{ ast::{ self, operators::{ArithOp, BinaryOp, CmpOp, LogicOp, Ordering, RangeOp, UnaryOp}, - support, AstChildren, AstNode, + support, ArgList, AstChildren, AstNode, BlockExpr, ClosureExpr, Const, Expr, Fn, + FormatArgsArg, FormatArgsExpr, MacroDef, Static, TokenTree, }, AstToken, SyntaxKind::*, @@ -435,3 +436,57 @@ impl AstNode for CallableExpr { } } } + +impl MacroDef { + fn tts(&self) -> (Option, Option) { + let mut types = support::children(self.syntax()); + let first = types.next(); + let second = types.next(); + (first, second) + } + + pub fn args(&self) -> Option { + match self.tts() { + (Some(args), Some(_)) => Some(args), + _ => None, + } + } + + pub fn body(&self) -> Option { + match self.tts() { + (Some(body), None) | (_, Some(body)) => Some(body), + _ => None, + } + } +} + +impl ClosureExpr { + pub fn body(&self) -> Option { + support::child(&self.syntax) + } +} +impl Const { + pub fn body(&self) -> Option { + support::child(&self.syntax) + } +} +impl Fn { + pub fn body(&self) -> Option { + support::child(&self.syntax) + } +} +impl Static { + pub fn body(&self) -> Option { + support::child(&self.syntax) + } +} +impl FormatArgsExpr { + pub fn args(&self) -> AstChildren { + support::children(&self.syntax) + } +} +impl ArgList { + pub fn args(&self) -> AstChildren { + support::children(&self.syntax) + } +} diff --git a/crates/syntax/src/ast/generated/nodes.rs b/crates/syntax/src/ast/generated/nodes.rs index 98186c5473..bae529a2c8 100644 --- a/crates/syntax/src/ast/generated/nodes.rs +++ b/crates/syntax/src/ast/generated/nodes.rs @@ -20,7 +20,6 @@ pub struct ArgList { pub(crate) syntax: SyntaxNode, } impl ArgList { - pub fn args(&self) -> AstChildren { support::children(&self.syntax) } pub fn l_paren_token(&self) -> Option { support::token(&self.syntax, T!['(']) } pub fn r_paren_token(&self) -> Option { support::token(&self.syntax, T![')']) } } @@ -191,7 +190,6 @@ pub struct ClosureExpr { } impl ast::HasAttrs for ClosureExpr {} impl ClosureExpr { - pub fn body(&self) -> Option { support::child(&self.syntax) } pub fn generic_param_list(&self) -> Option { support::child(&self.syntax) } pub fn param_list(&self) -> Option { support::child(&self.syntax) } pub fn ret_type(&self) -> Option { support::child(&self.syntax) } @@ -211,7 +209,6 @@ impl ast::HasDocComments for Const {} impl ast::HasName for Const {} impl ast::HasVisibility for Const {} impl Const { - pub fn body(&self) -> Option { support::child(&self.syntax) } pub fn ty(&self) -> Option { support::child(&self.syntax) } pub fn colon_token(&self) -> Option { support::token(&self.syntax, T![:]) } pub fn semicolon_token(&self) -> Option { support::token(&self.syntax, T![;]) } @@ -356,7 +353,6 @@ impl ast::HasName for Fn {} impl ast::HasVisibility for Fn {} impl Fn { pub fn abi(&self) -> Option { support::child(&self.syntax) } - pub fn body(&self) -> Option { support::child(&self.syntax) } pub fn param_list(&self) -> Option { support::child(&self.syntax) } pub fn ret_type(&self) -> Option { support::child(&self.syntax) } pub fn semicolon_token(&self) -> Option { support::token(&self.syntax, T![;]) } @@ -418,7 +414,6 @@ pub struct FormatArgsExpr { } impl ast::HasAttrs for FormatArgsExpr {} impl FormatArgsExpr { - pub fn args(&self) -> AstChildren { support::children(&self.syntax) } pub fn template(&self) -> Option { support::child(&self.syntax) } pub fn pound_token(&self) -> Option { support::token(&self.syntax, T![#]) } pub fn l_paren_token(&self) -> Option { support::token(&self.syntax, T!['(']) } @@ -652,8 +647,6 @@ impl ast::HasDocComments for MacroDef {} impl ast::HasName for MacroDef {} impl ast::HasVisibility for MacroDef {} impl MacroDef { - pub fn args(&self) -> Option { support::child(&self.syntax) } - pub fn body(&self) -> Option { support::child(&self.syntax) } pub fn macro_token(&self) -> Option { support::token(&self.syntax, T![macro]) } } @@ -1224,7 +1217,6 @@ impl ast::HasDocComments for Static {} impl ast::HasName for Static {} impl ast::HasVisibility for Static {} impl Static { - pub fn body(&self) -> Option { support::child(&self.syntax) } pub fn ty(&self) -> Option { support::child(&self.syntax) } pub fn colon_token(&self) -> Option { support::token(&self.syntax, T![:]) } pub fn semicolon_token(&self) -> Option { support::token(&self.syntax, T![;]) } diff --git a/xtask/src/codegen/grammar.rs b/xtask/src/codegen/grammar.rs index cc2fadc975..0fc97ad654 100644 --- a/xtask/src/codegen/grammar.rs +++ b/xtask/src/codegen/grammar.rs @@ -692,6 +692,8 @@ fn lower_rule(acc: &mut Vec, grammar: &Grammar, label: Option<&String>, r | "self_ty" | "iterable" | "condition" + | "args" + | "body" ); if manually_implemented { return; From cbcb9779f52a5d7f2de6272432077dc21a3524f1 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Wed, 3 Jul 2024 10:59:46 +0200 Subject: [PATCH 57/94] fix: Don't emit semantic diagnostics in files with a lot of syntax errors --- crates/ide-diagnostics/src/lib.rs | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/crates/ide-diagnostics/src/lib.rs b/crates/ide-diagnostics/src/lib.rs index 2bf3f5f0ce..eb8e8e91d0 100644 --- a/crates/ide-diagnostics/src/lib.rs +++ b/crates/ide-diagnostics/src/lib.rs @@ -311,9 +311,13 @@ pub fn diagnostics( FileRange { file_id, range: err.range() }, ) })); + let parse_errors = res.len(); let parse = sema.parse(file_id); + // FIXME: This iterates the entire file which is a rather expensive operation. + // We should implement these differently in some form? + // Salsa caching + incremental re-parse would be better here for node in parse.syntax().descendants() { handlers::useless_braces::useless_braces(&mut res, file_id, &node); handlers::field_shorthand::field_shorthand(&mut res, file_id, &node); @@ -326,7 +330,10 @@ pub fn diagnostics( let mut diags = Vec::new(); match module { - Some(m) => m.diagnostics(db, &mut diags, config.style_lints), + // A bunch of parse errors in a file indicate some bigger structural parse changes in the + // file, so we skip semantic diagnostics so we can show these faster. + Some(m) if parse_errors < 16 => m.diagnostics(db, &mut diags, config.style_lints), + Some(_) => (), None => handlers::unlinked_file::unlinked_file(&ctx, &mut res, file_id), } From 4bb623decb15a82a053b54c2ee0b8ce0987dc3d8 Mon Sep 17 00:00:00 2001 From: Shoyu Vanilla Date: Thu, 4 Jul 2024 23:31:55 +0900 Subject: [PATCH 58/94] Disallow nested impl traits --- crates/hir-def/src/hir/type_ref.rs | 8 +++- crates/hir-def/src/lower.rs | 30 ++++++++++++ crates/hir-def/src/path/lower.rs | 2 + crates/hir-ty/src/tests/traits.rs | 73 ++++++++++++++++++++++++++++++ 4 files changed, 112 insertions(+), 1 deletion(-) diff --git a/crates/hir-def/src/hir/type_ref.rs b/crates/hir-def/src/hir/type_ref.rs index ec207a7f96..741ae41c74 100644 --- a/crates/hir-def/src/hir/type_ref.rs +++ b/crates/hir-def/src/hir/type_ref.rs @@ -245,7 +245,13 @@ impl TypeRef { // for types are close enough for our purposes to the inner type for now... ast::Type::ForType(inner) => TypeRef::from_ast_opt(ctx, inner.ty()), ast::Type::ImplTraitType(inner) => { - TypeRef::ImplTrait(type_bounds_from_ast(ctx, inner.type_bound_list())) + if ctx.outer_impl_trait() { + // Disallow nested impl traits + TypeRef::Error + } else { + let _guard = ctx.outer_impl_trait_scope(true); + TypeRef::ImplTrait(type_bounds_from_ast(ctx, inner.type_bound_list())) + } } ast::Type::DynTraitType(inner) => { TypeRef::DynTrait(type_bounds_from_ast(ctx, inner.type_bound_list())) diff --git a/crates/hir-def/src/lower.rs b/crates/hir-def/src/lower.rs index ecd8d79f20..e4786a1dd4 100644 --- a/crates/hir-def/src/lower.rs +++ b/crates/hir-def/src/lower.rs @@ -18,6 +18,26 @@ pub struct LowerCtx<'a> { span_map: OnceCell, ast_id_map: OnceCell>, impl_trait_bounds: RefCell>>>, + // Prevent nested impl traits like `impl Foo`. + outer_impl_trait: RefCell, +} + +pub(crate) struct OuterImplTraitGuard<'a> { + ctx: &'a LowerCtx<'a>, + old: bool, +} + +impl<'a> OuterImplTraitGuard<'a> { + fn new(ctx: &'a LowerCtx<'a>, impl_trait: bool) -> Self { + let old = ctx.outer_impl_trait.replace(impl_trait); + Self { ctx, old } + } +} + +impl<'a> Drop for OuterImplTraitGuard<'a> { + fn drop(&mut self) { + self.ctx.outer_impl_trait.replace(self.old); + } } impl<'a> LowerCtx<'a> { @@ -28,6 +48,7 @@ impl<'a> LowerCtx<'a> { span_map: OnceCell::new(), ast_id_map: OnceCell::new(), impl_trait_bounds: RefCell::new(Vec::new()), + outer_impl_trait: RefCell::default(), } } @@ -42,6 +63,7 @@ impl<'a> LowerCtx<'a> { span_map, ast_id_map: OnceCell::new(), impl_trait_bounds: RefCell::new(Vec::new()), + outer_impl_trait: RefCell::default(), } } @@ -67,4 +89,12 @@ impl<'a> LowerCtx<'a> { pub fn take_impl_traits_bounds(&self) -> Vec>> { self.impl_trait_bounds.take() } + + pub(crate) fn outer_impl_trait(&self) -> bool { + *self.outer_impl_trait.borrow() + } + + pub(crate) fn outer_impl_trait_scope(&'a self, impl_trait: bool) -> OuterImplTraitGuard<'a> { + OuterImplTraitGuard::new(self, impl_trait) + } } diff --git a/crates/hir-def/src/path/lower.rs b/crates/hir-def/src/path/lower.rs index 2b555b3998..a710c2daca 100644 --- a/crates/hir-def/src/path/lower.rs +++ b/crates/hir-def/src/path/lower.rs @@ -202,6 +202,8 @@ pub(super) fn lower_generic_args( continue; } if let Some(name_ref) = assoc_type_arg.name_ref() { + // Nested impl traits like `impl Foo` are allowed + let _guard = lower_ctx.outer_impl_trait_scope(false); let name = name_ref.as_name(); let args = assoc_type_arg .generic_arg_list() diff --git a/crates/hir-ty/src/tests/traits.rs b/crates/hir-ty/src/tests/traits.rs index 18fc8afd18..fb07e718d1 100644 --- a/crates/hir-ty/src/tests/traits.rs +++ b/crates/hir-ty/src/tests/traits.rs @@ -4824,3 +4824,76 @@ fn foo() { "#, ) } + +#[test] +fn nested_impl_traits() { + check_infer( + r#" +//- minicore: fn +trait Foo {} + +trait Bar {} + +trait Baz { + type Assoc; +} + +struct Qux { + qux: T, +} + +struct S; + +impl Foo for S {} + +fn not_allowed1(f: impl Fn(impl Foo)) { + let foo = S; + f(foo); +} + +// This caused stack overflow in #17498 +fn not_allowed2(f: impl Fn(&impl Foo)) { + let foo = S; + f(&foo); +} + +fn not_allowed3(bar: impl Bar) {} + +// This also caused stack overflow +fn not_allowed4(bar: impl Bar<&impl Foo>) {} + +fn allowed1(baz: impl Baz) {} + +fn allowed2<'a>(baz: impl Baz) {} + +fn allowed3(baz: impl Baz>) {} +"#, + expect![[r#" + 139..140 'f': impl Fn({unknown}) + ?Sized + 161..193 '{ ...oo); }': () + 171..174 'foo': S + 177..178 'S': S + 184..185 'f': impl Fn({unknown}) + ?Sized + 184..190 'f(foo)': () + 186..189 'foo': S + 251..252 'f': impl Fn(&'? {unknown}) + ?Sized + 274..307 '{ ...oo); }': () + 284..287 'foo': S + 290..291 'S': S + 297..298 'f': impl Fn(&'? {unknown}) + ?Sized + 297..304 'f(&foo)': () + 299..303 '&foo': &'? S + 300..303 'foo': S + 325..328 'bar': impl Bar<{unknown}> + ?Sized + 350..352 '{}': () + 405..408 'bar': impl Bar<&'? {unknown}> + ?Sized + 431..433 '{}': () + 447..450 'baz': impl Baz + ?Sized + 480..482 '{}': () + 500..503 'baz': impl Baz + ?Sized + 544..546 '{}': () + 560..563 'baz': impl Baz> + ?Sized + 598..600 '{}': () + "#]], + ) +} From 4420e7148fa7f1a6e9dc6626045741e315a13633 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sat, 6 Jul 2024 15:44:12 +0200 Subject: [PATCH 59/94] Diagnose unresolved self value in path expression --- crates/hir-ty/src/infer/expr.rs | 3 ++- .../src/handlers/unresolved_ident.rs | 16 ++++++++++++++++ 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs index 3647243537..7a0f7872a6 100644 --- a/crates/hir-ty/src/infer/expr.rs +++ b/crates/hir-ty/src/infer/expr.rs @@ -440,7 +440,8 @@ impl InferenceContext<'_> { let ty = match self.infer_path(p, tgt_expr.into()) { Some(ty) => ty, None => { - if matches!(p, Path::Normal { mod_path, .. } if mod_path.is_ident()) { + if matches!(p, Path::Normal { mod_path, .. } if mod_path.is_ident() || mod_path.is_self()) + { self.push_diagnostic(InferenceDiagnostic::UnresolvedIdent { expr: tgt_expr, }); diff --git a/crates/ide-diagnostics/src/handlers/unresolved_ident.rs b/crates/ide-diagnostics/src/handlers/unresolved_ident.rs index 7aa3e16536..9a81682aae 100644 --- a/crates/ide-diagnostics/src/handlers/unresolved_ident.rs +++ b/crates/ide-diagnostics/src/handlers/unresolved_ident.rs @@ -53,6 +53,22 @@ fn main() { let x = 5; let _ = x; } +"#, + ); + } + + #[test] + fn unresolved_self_val() { + check_diagnostics( + r#" +fn main() { + self.a; + //^^^^ error: no such value in this scope + let self: + self = + self; + //^^^^ error: no such value in this scope +} "#, ); } From fcddcf2ee5233254d49ec4cbbcd9da5e9667a5a1 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sat, 6 Jul 2024 16:15:06 +0200 Subject: [PATCH 60/94] Add environment to runnable lsp extension --- crates/rust-analyzer/src/handlers/request.rs | 10 +++--- crates/rust-analyzer/src/lsp/ext.rs | 11 +++--- crates/rust-analyzer/src/lsp/to_proto.rs | 11 +++--- docs/dev/lsp-extensions.md | 37 +++++++++++++++++--- editors/code/src/lsp_ext.ts | 34 +++++++++++++++--- editors/code/src/run.ts | 14 ++++---- editors/code/tests/unit/runnable_env.test.ts | 1 + 7 files changed, 85 insertions(+), 33 deletions(-) diff --git a/crates/rust-analyzer/src/handlers/request.rs b/crates/rust-analyzer/src/handlers/request.rs index 8e39b15da3..7e0db8edd8 100644 --- a/crates/rust-analyzer/src/handlers/request.rs +++ b/crates/rust-analyzer/src/handlers/request.rs @@ -847,7 +847,7 @@ pub(crate) fn handle_runnables( if expect_test { if let lsp_ext::RunnableArgs::Cargo(r) = &mut runnable.args { runnable.label = format!("{} + expect", runnable.label); - r.expect_test = Some(true); + r.environment.insert("UPDATE_EXPECT".to_owned(), "1".to_owned()); } } res.push(runnable); @@ -884,12 +884,12 @@ pub(crate) fn handle_runnables( kind: lsp_ext::RunnableKind::Cargo, args: lsp_ext::RunnableArgs::Cargo(lsp_ext::CargoRunnableArgs { workspace_root: Some(spec.workspace_root.clone().into()), - cwd: Some(cwd.into()), + cwd: cwd.into(), override_cargo: config.override_cargo.clone(), cargo_args, cargo_extra_args: config.cargo_extra_args.clone(), executable_args: Vec::new(), - expect_test: None, + environment: Default::default(), }), }) } @@ -903,12 +903,12 @@ pub(crate) fn handle_runnables( kind: lsp_ext::RunnableKind::Cargo, args: lsp_ext::RunnableArgs::Cargo(lsp_ext::CargoRunnableArgs { workspace_root: None, - cwd: None, + cwd: ".".into(), override_cargo: config.override_cargo, cargo_args: vec!["check".to_owned(), "--workspace".to_owned()], cargo_extra_args: config.cargo_extra_args, executable_args: Vec::new(), - expect_test: None, + environment: Default::default(), }), }); } diff --git a/crates/rust-analyzer/src/lsp/ext.rs b/crates/rust-analyzer/src/lsp/ext.rs index b82ba44190..3766e3f46a 100644 --- a/crates/rust-analyzer/src/lsp/ext.rs +++ b/crates/rust-analyzer/src/lsp/ext.rs @@ -460,28 +460,27 @@ pub enum RunnableKind { #[derive(Deserialize, Serialize, Debug)] #[serde(rename_all = "camelCase")] pub struct CargoRunnableArgs { - // command to be executed instead of cargo + pub environment: FxHashMap, + pub cwd: Utf8PathBuf, + /// Command to be executed instead of cargo pub override_cargo: Option, #[serde(skip_serializing_if = "Option::is_none")] pub workspace_root: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub cwd: Option, // command, --package and --lib stuff pub cargo_args: Vec, // user-specified additional cargo args, like `--release`. pub cargo_extra_args: Vec, // stuff after -- pub executable_args: Vec, - #[serde(skip_serializing_if = "Option::is_none")] - pub expect_test: Option, } #[derive(Deserialize, Serialize, Debug)] #[serde(rename_all = "camelCase")] pub struct ShellRunnableArgs { + pub environment: FxHashMap, + pub cwd: Utf8PathBuf, pub program: String, pub args: Vec, - pub cwd: Utf8PathBuf, } pub enum RelatedTests {} diff --git a/crates/rust-analyzer/src/lsp/to_proto.rs b/crates/rust-analyzer/src/lsp/to_proto.rs index db5f666a5b..fe65d1fc4f 100644 --- a/crates/rust-analyzer/src/lsp/to_proto.rs +++ b/crates/rust-analyzer/src/lsp/to_proto.rs @@ -15,7 +15,7 @@ use ide::{ }; use ide_db::{rust_doc::format_docs, FxHasher}; use itertools::Itertools; -use paths::{Utf8Component, Utf8Prefix}; +use paths::{Utf8Component, Utf8PathBuf, Utf8Prefix}; use semver::VersionReq; use serde_json::to_value; use vfs::AbsPath; @@ -1390,10 +1390,10 @@ pub(crate) fn runnable( workspace_root: Some(workspace_root.into()), override_cargo: config.override_cargo, cargo_args, - cwd: Some(cwd.into()), + cwd: cwd.into(), cargo_extra_args: config.cargo_extra_args, executable_args, - expect_test: None, + environment: Default::default(), }), })) } @@ -1407,6 +1407,7 @@ pub(crate) fn runnable( program: json_shell_runnable_args.program, args: json_shell_runnable_args.args, cwd: json_shell_runnable_args.cwd, + environment: Default::default(), }; Ok(Some(lsp_ext::Runnable { label, @@ -1433,10 +1434,10 @@ pub(crate) fn runnable( workspace_root: None, override_cargo: config.override_cargo, cargo_args, - cwd: None, + cwd: Utf8PathBuf::from("."), cargo_extra_args: config.cargo_extra_args, executable_args, - expect_test: None, + environment: Default::default(), }), })) } diff --git a/docs/dev/lsp-extensions.md b/docs/dev/lsp-extensions.md index 695fec7e8e..1cafcae7bb 100644 --- a/docs/dev/lsp-extensions.md +++ b/docs/dev/lsp-extensions.md @@ -1,5 +1,5 @@ src/main.rs:24:18 - | -24 | table.insert(element); - | ------ ^^^^^^^ cannot be put into a table - | | - | required by a bound introduced by this call - | - = help: the trait `Element` is not implemented for `` - = note: see for more information about the Table api -note: required by a bound in `Table::insert` - --> src/main.rs:15:18 - | -15 | fn insert(&self, element: T) { - | ^^^^^^^ required by this bound in `Table::insert` - -For more information about this error, try `rustc --explain E0277`. -``` - -See [RFC 3368] for more information. - -[#114452]: https://github.com/rust-lang/rust/pull/114452 -[RFC 3368]: https://github.com/rust-lang/rfcs/blob/master/text/3368-diagnostic-attribute-namespace.md "##, }, Lint { @@ -4453,17 +4552,6 @@ This feature has no tracking issue, and is therefore likely internal to the comp This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use. ------------------------- -"##, - }, - Lint { - label: "div_duration", - description: r##"# `div_duration` - -The tracking issue for this feature is: [#63139] - -[#63139]: https://github.com/rust-lang/rust/issues/63139 - ------------------------ "##, }, @@ -4645,17 +4733,6 @@ The tracking issue for this feature is: [#34761] [#34761]: https://github.com/rust-lang/rust/issues/34761 ------------------------- -"##, - }, - Lint { - label: "duration_abs_diff", - description: r##"# `duration_abs_diff` - -The tracking issue for this feature is: [#117618] - -[#117618]: https://github.com/rust-lang/rust/issues/117618 - ------------------------ "##, }, @@ -4691,6 +4768,17 @@ The tracking issue for this feature is: [#72440] [#72440]: https://github.com/rust-lang/rust/issues/72440 +------------------------ +"##, + }, + Lint { + label: "duration_millis_float", + description: r##"# `duration_millis_float` + +The tracking issue for this feature is: [#122451] + +[#122451]: https://github.com/rust-lang/rust/issues/122451 + ------------------------ "##, }, @@ -4722,6 +4810,15 @@ The tracking issue for this feature is: [#102425] This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use. +------------------------ +"##, + }, + Lint { + label: "effect_types", + description: r##"# `effect_types` + +This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use. + ------------------------ "##, }, @@ -4766,17 +4863,6 @@ The tracking issue for this feature is: [#99301] [#99301]: https://github.com/rust-lang/rust/issues/99301 ------------------------- -"##, - }, - Lint { - label: "error_in_core", - description: r##"# `error_in_core` - -The tracking issue for this feature is: [#103765] - -[#103765]: https://github.com/rust-lang/rust/issues/103765 - ------------------------ "##, }, @@ -4822,36 +4908,6 @@ The tracking issue for this feature is: [#35428] [#35428]: https://github.com/rust-lang/rust/issues/35428 ------------------------ -"##, - }, - Lint { - label: "exclusive_range_pattern", - description: r##"# `exclusive_range_pattern` - -The tracking issue for this feature is: [#37854]. - - -[#67264]: https://github.com/rust-lang/rust/issues/67264 -[#37854]: https://github.com/rust-lang/rust/issues/37854 ------ - -The `exclusive_range_pattern` feature allows non-inclusive range -patterns (`0..10`) to be used in appropriate pattern matching -contexts. It also can be combined with `#![feature(half_open_range_patterns]` -to be able to use RangeTo patterns (`..10`). - -It also enabled RangeFrom patterns but that has since been -stabilized. - -```rust -#![feature(exclusive_range_pattern)] - let x = 5; - match x { - 0..10 => println!("single digit"), - 10 => println!("ten isn't part of the above range"), - _ => println!("nor is everything else.") - } -``` "##, }, Lint { @@ -4917,6 +4973,17 @@ The tracking issue for this feature is: [#95228] [#95228]: https://github.com/rust-lang/rust/issues/95228 +------------------------ +"##, + }, + Lint { + label: "expr_fragment_specifier_2024", + description: r##"# `expr_fragment_specifier_2024` + +The tracking issue for this feature is: [#123742] + +[#123742]: https://github.com/rust-lang/rust/issues/123742 + ------------------------ "##, }, @@ -4965,6 +5032,32 @@ The tracking issue for this feature is: [#43244] [#43244]: https://github.com/rust-lang/rust/issues/43244 ------------------------ +"##, + }, + Lint { + label: "f128", + description: r##"# `f128` + +The tracking issue for this feature is: [#116909] + +[#116909]: https://github.com/rust-lang/rust/issues/116909 + +--- + +Enable the `f128` type for IEEE 128-bit floating numbers (quad precision). +"##, + }, + Lint { + label: "f16", + description: r##"# `f16` + +The tracking issue for this feature is: [#116909] + +[#116909]: https://github.com/rust-lang/rust/issues/116909 + +--- + +Enable the `f16` type for IEEE 16-bit floating numbers (half precision). "##, }, Lint { @@ -5250,12 +5343,23 @@ This feature has no tracking issue, and is therefore likely internal to the comp "##, }, Lint { - label: "fs_try_exists", - description: r##"# `fs_try_exists` + label: "freeze", + description: r##"# `freeze` -The tracking issue for this feature is: [#83186] +The tracking issue for this feature is: [#121675] -[#83186]: https://github.com/rust-lang/rust/issues/83186 +[#121675]: https://github.com/rust-lang/rust/issues/121675 + +------------------------ +"##, + }, + Lint { + label: "freeze_impls", + description: r##"# `freeze_impls` + +The tracking issue for this feature is: [#121675] + +[#121675]: https://github.com/rust-lang/rust/issues/121675 ------------------------ "##, @@ -5365,17 +5469,6 @@ The tracking issue for this feature is: [#113521] [#113521]: https://github.com/rust-lang/rust/issues/113521 ------------------------- -"##, - }, - Lint { - label: "generic_nonzero", - description: r##"# `generic_nonzero` - -The tracking issue for this feature is: [#120257] - -[#120257]: https://github.com/rust-lang/rust/issues/120257 - ------------------------ "##, }, @@ -5398,6 +5491,17 @@ The tracking issue for this feature is: [#63292] [#63292]: https://github.com/rust-lang/rust/issues/63292 +------------------------ +"##, + }, + Lint { + label: "global_registration", + description: r##"# `global_registration` + +The tracking issue for this feature is: [#125119] + +[#125119]: https://github.com/rust-lang/rust/issues/125119 + ------------------------ "##, }, @@ -5406,7 +5510,7 @@ The tracking issue for this feature is: [#63292] description: r##"# `half_open_range_patterns_in_slices` The tracking issue for this feature is: [#67264] -It is part of the `exclusive_range_pattern` feature, +It is a future part of the `exclusive_range_pattern` feature, tracked at [#37854]. [#67264]: https://github.com/rust-lang/rust/issues/67264 @@ -5417,7 +5521,6 @@ This feature allow using top-level half-open range patterns in slices. ```rust #![feature(half_open_range_patterns_in_slices)] -#![feature(exclusive_range_pattern)] fn main() { let xs = [13, 1, 5, 2, 3, 1, 21, 8]; @@ -5496,17 +5599,6 @@ The tracking issue for this feature is: [#44839] [#44839]: https://github.com/rust-lang/rust/issues/44839 ------------------------- -"##, - }, - Lint { - label: "hint_assert_unchecked", - description: r##"# `hint_assert_unchecked` - -The tracking issue for this feature is: [#119131] - -[#119131]: https://github.com/rust-lang/rust/issues/119131 - ------------------------ "##, }, @@ -5551,17 +5643,6 @@ The tracking issue for this feature is: [#99697] [#99697]: https://github.com/rust-lang/rust/issues/99697 ------------------------- -"##, - }, - Lint { - label: "imported_main", - description: r##"# `imported_main` - -The tracking issue for this feature is: [#28937] - -[#28937]: https://github.com/rust-lang/rust/issues/28937 - ------------------------ "##, }, @@ -5574,42 +5655,6 @@ The tracking issue for this feature is: [#8995] [#8995]: https://github.com/rust-lang/rust/issues/8995 ------------------------ -"##, - }, - Lint { - label: "inline_const", - description: r##"# `inline_const` - -The tracking issue for this feature is: [#76001] - -See also [`inline_const_pat`](inline-const-pat.md) - ------- - -This feature allows you to use inline constant expressions. For example, you can -turn this code: - -```rust -# fn add_one(x: i32) -> i32 { x + 1 } -const MY_COMPUTATION: i32 = 1 + 2 * 3 / 4; - -fn main() { - let x = add_one(MY_COMPUTATION); -} -``` - -into this code: - -```rust -#![feature(inline_const)] - -# fn add_one(x: i32) -> i32 { x + 1 } -fn main() { - let x = add_one(const { 1 + 2 * 3 / 4 }); -} -``` - -[#76001]: https://github.com/rust-lang/rust/issues/76001 "##, }, Lint { @@ -5618,8 +5663,6 @@ fn main() { The tracking issue for this feature is: [#76001] -See also [`inline_const`](inline-const.md) - ------ This feature allows you to use inline constant expressions in pattern position: @@ -5668,6 +5711,17 @@ The tracking issue for this feature is: [#99069] [#99069]: https://github.com/rust-lang/rust/issues/99069 +------------------------ +"##, + }, + Lint { + label: "integer_sign_cast", + description: r##"# `integer_sign_cast` + +The tracking issue for this feature is: [#125882] + +[#125882]: https://github.com/rust-lang/rust/issues/125882 + ------------------------ "##, }, @@ -5730,7 +5784,7 @@ All intrinsic fallback bodies are automatically made cross-crate inlineable (lik by the codegen backend, but not the MIR inliner. ```rust -#![feature(rustc_attrs, effects)] +#![feature(rustc_attrs)] #![allow(internal_features)] #[rustc_intrinsic] @@ -5740,7 +5794,7 @@ const unsafe fn const_deallocate(_ptr: *mut u8, _size: usize, _align: usize) {} Since these are just regular functions, it is perfectly ok to create the intrinsic twice: ```rust -#![feature(rustc_attrs, effects)] +#![feature(rustc_attrs)] #![allow(internal_features)] #[rustc_intrinsic] @@ -5764,12 +5818,23 @@ with any regular function. Various intrinsics have native MIR operations that they correspond to. Instead of requiring backends to implement both the intrinsic and the MIR operation, the `lower_intrinsics` pass will convert the calls to the MIR operation. Backends do not need to know about these intrinsics -at all. +at all. These intrinsics only make sense without a body, and can either be declared as a "rust-intrinsic" +or as a `#[rustc_intrinsic]`. The body is never used, as calls to the intrinsic do not exist +anymore after MIR analyses. ## Intrinsics without fallback logic These must be implemented by all backends. +### `#[rustc_intrinsic]` declarations + +These are written like intrinsics with fallback bodies, but the body is irrelevant. +Use `loop {}` for the body or call the intrinsic recursively and add +`#[rustc_intrinsic_must_be_overridden]` to the function to ensure that backends don't +invoke the body. + +### Legacy extern ABI based intrinsics + These are imported as if they were FFI functions, with the special `rust-intrinsic` ABI. For example, if one was in a freestanding context, but wished to be able to `transmute` between types, and @@ -5790,17 +5855,6 @@ extern "rust-intrinsic" { As with any other FFI functions, these are by default always `unsafe` to call. You can add `#[rustc_safe_intrinsic]` to the intrinsic to make it safe to call. -"##, - }, - Lint { - label: "io_error_downcast", - description: r##"# `io_error_downcast` - -The tracking issue for this feature is: [#99262] - -[#99262]: https://github.com/rust-lang/rust/issues/99262 - ------------------------- "##, }, Lint { @@ -5842,17 +5896,6 @@ The tracking issue for this feature is: [#27709] [#27709]: https://github.com/rust-lang/rust/issues/27709 ------------------------- -"##, - }, - Lint { - label: "ip_bits", - description: r##"# `ip_bits` - -The tracking issue for this feature is: [#113744] - -[#113744]: https://github.com/rust-lang/rust/issues/113744 - ------------------------ "##, }, @@ -5864,6 +5907,17 @@ The tracking issue for this feature is: [#101288] [#101288]: https://github.com/rust-lang/rust/issues/101288 +------------------------ +"##, + }, + Lint { + label: "is_none_or", + description: r##"# `is_none_or` + +The tracking issue for this feature is: [#126383] + +[#126383]: https://github.com/rust-lang/rust/issues/126383 + ------------------------ "##, }, @@ -5932,6 +5986,17 @@ The tracking issue for this feature is: [#100450] [#100450]: https://github.com/rust-lang/rust/issues/100450 +------------------------ +"##, + }, + Lint { + label: "iter_chain", + description: r##"# `iter_chain` + +The tracking issue for this feature is: [#125964] + +[#125964]: https://github.com/rust-lang/rust/issues/125964 + ------------------------ "##, }, @@ -6053,6 +6118,17 @@ The tracking issue for this feature is: [#87053] [#87053]: https://github.com/rust-lang/rust/issues/87053 +------------------------ +"##, + }, + Lint { + label: "junction_point", + description: r##"# `junction_point` + +The tracking issue for this feature is: [#121709] + +[#121709]: https://github.com/rust-lang/rust/issues/121709 + ------------------------ "##, }, @@ -6205,17 +6281,6 @@ The tracking issue for this feature is: [#69835] [#69835]: https://github.com/rust-lang/rust/issues/69835 ------------------------- -"##, - }, - Lint { - label: "lazy_cell", - description: r##"# `lazy_cell` - -The tracking issue for this feature is: [#109736] - -[#109736]: https://github.com/rust-lang/rust/issues/109736 - ------------------------ "##, }, @@ -6223,9 +6288,9 @@ The tracking issue for this feature is: [#109736] label: "lazy_cell_consume", description: r##"# `lazy_cell_consume` -The tracking issue for this feature is: [#109736] +The tracking issue for this feature is: [#125623] -[#109736]: https://github.com/rust-lang/rust/issues/109736 +[#125623]: https://github.com/rust-lang/rust/issues/125623 ------------------------ "##, @@ -6365,17 +6430,6 @@ The tracking issue for this feature is: [#114135] [#114135]: https://github.com/rust-lang/rust/issues/114135 ------------------------- -"##, - }, - Lint { - label: "lint_reasons", - description: r##"# `lint_reasons` - -The tracking issue for this feature is: [#54503] - -[#54503]: https://github.com/rust-lang/rust/issues/54503 - ------------------------ "##, }, @@ -6431,6 +6485,17 @@ The tracking issue for this feature is: [#83527] [#83527]: https://github.com/rust-lang/rust/issues/83527 +------------------------ +"##, + }, + Lint { + label: "macro_metavar_expr_concat", + description: r##"# `macro_metavar_expr_concat` + +The tracking issue for this feature is: [#124225] + +[#124225]: https://github.com/rust-lang/rust/issues/124225 + ------------------------ "##, }, @@ -6536,6 +6601,17 @@ The tracking issue for this feature is: [#93092] [#93092]: https://github.com/rust-lang/rust/issues/93092 +------------------------ +"##, + }, + Lint { + label: "maybe_uninit_fill", + description: r##"# `maybe_uninit_fill` + +The tracking issue for this feature is: [#117428] + +[#117428]: https://github.com/rust-lang/rust/issues/117428 + ------------------------ "##, }, @@ -6688,6 +6764,17 @@ The tracking issue for this feature is: [#83310] [#83310]: https://github.com/rust-lang/rust/issues/83310 +------------------------ +"##, + }, + Lint { + label: "mut_ref", + description: r##"# `mut_ref` + +The tracking issue for this feature is: [#123076] + +[#123076]: https://github.com/rust-lang/rust/issues/123076 + ------------------------ "##, }, @@ -6835,6 +6922,17 @@ The tracking issue for this feature is: [#65992] [#65992]: https://github.com/rust-lang/rust/issues/65992 +------------------------ +"##, + }, + Lint { + label: "new_range_api", + description: r##"# `new_range_api` + +The tracking issue for this feature is: [#125687] + +[#125687]: https://github.com/rust-lang/rust/issues/125687 + ------------------------ "##, }, @@ -6912,17 +7010,6 @@ The tracking issue for this feature is: [#108185] [#108185]: https://github.com/rust-lang/rust/issues/108185 ------------------------- -"##, - }, - Lint { - label: "non_null_convenience", - description: r##"# `non_null_convenience` - -The tracking issue for this feature is: [#117691] - -[#117691]: https://github.com/rust-lang/rust/issues/117691 - ------------------------ "##, }, @@ -7029,6 +7116,17 @@ The tracking issue for this feature is: [#120140] [#120140]: https://github.com/rust-lang/rust/issues/120140 +------------------------ +"##, + }, + Lint { + label: "offset_of_slice", + description: r##"# `offset_of_slice` + +The tracking issue for this feature is: [#126151] + +[#126151]: https://github.com/rust-lang/rust/issues/126151 + ------------------------ "##, }, @@ -7038,6 +7136,17 @@ The tracking issue for this feature is: [#120140] This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use. +------------------------ +"##, + }, + Lint { + label: "once_cell_get_mut", + description: r##"# `once_cell_get_mut` + +The tracking issue for this feature is: [#121641] + +[#121641]: https://github.com/rust-lang/rust/issues/121641 + ------------------------ "##, }, @@ -7093,17 +7202,6 @@ The tracking issue for this feature is: [#82901] [#82901]: https://github.com/rust-lang/rust/issues/82901 ------------------------- -"##, - }, - Lint { - label: "option_take_if", - description: r##"# `option_take_if` - -The tracking issue for this feature is: [#98934] - -[#98934]: https://github.com/rust-lang/rust/issues/98934 - ------------------------ "##, }, @@ -7137,6 +7235,17 @@ The tracking issue for this feature is: [#118485] [#118485]: https://github.com/rust-lang/rust/issues/118485 +------------------------ +"##, + }, + Lint { + label: "os_string_pathbuf_leak", + description: r##"# `os_string_pathbuf_leak` + +The tracking issue for this feature is: [#125965] + +[#125965]: https://github.com/rust-lang/rust/issues/125965 + ------------------------ "##, }, @@ -7185,21 +7294,21 @@ The tracking issue for this feature is: [#92988] "##, }, Lint { - label: "panic_info_message", - description: r##"# `panic_info_message` + label: "panic_internals", + description: r##"# `panic_internals` -The tracking issue for this feature is: [#66745] - -[#66745]: https://github.com/rust-lang/rust/issues/66745 +This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use. ------------------------ "##, }, Lint { - label: "panic_internals", - description: r##"# `panic_internals` + label: "panic_payload_as_str", + description: r##"# `panic_payload_as_str` -This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use. +The tracking issue for this feature is: [#125175] + +[#125175]: https://github.com/rust-lang/rust/issues/125175 ------------------------ "##, @@ -7234,6 +7343,28 @@ The tracking issue for this feature is: [#92649] [#92649]: https://github.com/rust-lang/rust/issues/92649 +------------------------ +"##, + }, + Lint { + label: "patchable_function_entry", + description: r##"# `patchable_function_entry` + +The tracking issue for this feature is: [#123115] + +[#123115]: https://github.com/rust-lang/rust/issues/123115 + +------------------------ +"##, + }, + Lint { + label: "path_add_extension", + description: r##"# `path_add_extension` + +The tracking issue for this feature is: [#127292] + +[#127292]: https://github.com/rust-lang/rust/issues/127292 + ------------------------ "##, }, @@ -7265,6 +7396,17 @@ The tracking issue for this feature is: [#27721] This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use. +------------------------ +"##, + }, + Lint { + label: "pattern_types", + description: r##"# `pattern_types` + +The tracking issue for this feature is: [#123646] + +[#123646]: https://github.com/rust-lang/rust/issues/123646 + ------------------------ "##, }, @@ -7291,8 +7433,8 @@ The tracking issue for this feature is: [#86918] "##, }, Lint { - label: "pointer_is_aligned", - description: r##"# `pointer_is_aligned` + label: "pointer_is_aligned_to", + description: r##"# `pointer_is_aligned_to` The tracking issue for this feature is: [#96284] @@ -7319,6 +7461,32 @@ The tracking issue for this feature is: [#86656] [#86656]: https://github.com/rust-lang/rust/issues/86656 ------------------------ +"##, + }, + Lint { + label: "postfix_match", + description: r##"# `postfix-match` + +`postfix-match` adds the feature for matching upon values postfix +the expressions that generate the values. + +```rust,edition2021 +#![feature(postfix_match)] + +enum Foo { + Bar, + Baz +} + +fn get_foo() -> Foo { + Foo::Bar +} + +get_foo().match { + Foo::Bar => {}, + Foo::Baz => panic!(), +} +``` "##, }, Lint { @@ -7329,6 +7497,17 @@ The tracking issue for this feature is: [#44839] [#44839]: https://github.com/rust-lang/rust/issues/44839 +------------------------ +"##, + }, + Lint { + label: "precise_capturing", + description: r##"# `precise_capturing` + +The tracking issue for this feature is: [#123432] + +[#123432]: https://github.com/rust-lang/rust/issues/123432 + ------------------------ "##, }, @@ -7369,28 +7548,6 @@ The tracking issue for this feature is: [#44839] This feature is internal to the Rust compiler and is not intended for general use. ------------------------- -"##, - }, - Lint { - label: "proc_macro_byte_character", - description: r##"# `proc_macro_byte_character` - -The tracking issue for this feature is: [#115268] - -[#115268]: https://github.com/rust-lang/rust/issues/115268 - ------------------------- -"##, - }, - Lint { - label: "proc_macro_c_str_literals", - description: r##"# `proc_macro_c_str_literals` - -The tracking issue for this feature is: [#119750] - -[#119750]: https://github.com/rust-lang/rust/issues/119750 - ------------------------ "##, }, @@ -7526,6 +7683,17 @@ The tracking issue for this feature is: [#102070] [#102070]: https://github.com/rust-lang/rust/issues/102070 +------------------------ +"##, + }, + Lint { + label: "ptr_as_ref_unchecked", + description: r##"# `ptr_as_ref_unchecked` + +The tracking issue for this feature is: [#122034] + +[#122034]: https://github.com/rust-lang/rust/issues/122034 + ------------------------ "##, }, @@ -7579,17 +7747,6 @@ The tracking issue for this feature is: [#95892] [#95892]: https://github.com/rust-lang/rust/issues/95892 ------------------------- -"##, - }, - Lint { - label: "ptr_to_from_bits", - description: r##"# `ptr_to_from_bits` - -The tracking issue for this feature is: [#91126] - -[#91126]: https://github.com/rust-lang/rust/issues/91126 - ------------------------ "##, }, @@ -7683,6 +7840,28 @@ The tracking issue for this feature is: [#121440] [#121440]: https://github.com/rust-lang/rust/issues/121440 +------------------------ +"##, + }, + Lint { + label: "ref_pat_eat_one_layer_2024", + description: r##"# `ref_pat_eat_one_layer_2024` + +The tracking issue for this feature is: [#123076] + +[#123076]: https://github.com/rust-lang/rust/issues/123076 + +------------------------ +"##, + }, + Lint { + label: "ref_pat_eat_one_layer_2024_structural", + description: r##"# `ref_pat_eat_one_layer_2024_structural` + +The tracking issue for this feature is: [#123076] + +[#123076]: https://github.com/rust-lang/rust/issues/123076 + ------------------------ "##, }, @@ -7737,6 +7916,24 @@ The tracking issue for this feature is: [#27731] This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use. ------------------------ +"##, + }, + Lint { + label: "result_ffi_guarantees", + description: r##"# `result_ffi_guarantees` + +The tracking issue for this feature is: [#110503] + +[#110503]: https://github.com/rust-lang/rust/issues/110503 + +------------------------ + +This feature adds the possibility of using `Result` in FFI if T's niche +value can be used to describe E or vise-versa. + +See [RFC 3391] for more information. + +[RFC 3391]: https://github.com/rust-lang/rfcs/blob/master/text/3391-result_ffi_guarantees.md "##, }, Lint { @@ -7880,6 +8077,15 @@ error: size: Size { raw: 16 } error: aborting due to 2 previous errors ``` +"##, + }, + Lint { + label: "rustc_encodable_decodable", + description: r##"# `rustc_encodable_decodable` + +This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use. + +------------------------ "##, }, Lint { @@ -7921,17 +8127,6 @@ The tracking issue for this feature is: [#101730] This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use. ------------------------- -"##, - }, - Lint { - label: "seek_seek_relative", - description: r##"# `seek_seek_relative` - -The tracking issue for this feature is: [#117374] - -[#117374]: https://github.com/rust-lang/rust/issues/117374 - ------------------------ "##, }, @@ -7976,6 +8171,17 @@ The tracking issue for this feature is: [#56975] [#56975]: https://github.com/rust-lang/rust/issues/56975 +------------------------ +"##, + }, + Lint { + label: "shorter_tail_lifetimes", + description: r##"# `shorter_tail_lifetimes` + +The tracking issue for this feature is: [#123739] + +[#123739]: https://github.com/rust-lang/rust/issues/123739 + ------------------------ "##, }, @@ -8029,17 +8235,6 @@ The tracking issue for this feature is: [#27747] [#27747]: https://github.com/rust-lang/rust/issues/27747 ------------------------- -"##, - }, - Lint { - label: "slice_flatten", - description: r##"# `slice_flatten` - -The tracking issue for this feature is: [#95629] - -[#95629]: https://github.com/rust-lang/rust/issues/95629 - ------------------------ "##, }, @@ -8113,17 +8308,6 @@ The tracking issue for this feature is: [#74265] [#74265]: https://github.com/rust-lang/rust/issues/74265 ------------------------- -"##, - }, - Lint { - label: "slice_ptr_len", - description: r##"# `slice_ptr_len` - -The tracking issue for this feature is: [#71146] - -[#71146]: https://github.com/rust-lang/rust/issues/71146 - ------------------------ "##, }, @@ -8135,17 +8319,6 @@ The tracking issue for this feature is: [#76393] [#76393]: https://github.com/rust-lang/rust/issues/76393 ------------------------- -"##, - }, - Lint { - label: "slice_split_at_unchecked", - description: r##"# `slice_split_at_unchecked` - -The tracking issue for this feature is: [#76014] - -[#76014]: https://github.com/rust-lang/rust/issues/76014 - ------------------------ "##, }, @@ -8199,24 +8372,6 @@ The tracking issue for this feature is: [#93396] [#93396]: https://github.com/rust-lang/rust/issues/93396 ------------------------- -"##, - }, - Lint { - label: "sort_internals", - description: r##"# `sort_internals` - -This feature is internal to the Rust compiler and is not intended for general use. - ------------------------- -"##, - }, - Lint { - label: "spec_option_partial_eq", - description: r##"# `spec_option_partial_eq` - -This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use. - ------------------------ "##, }, @@ -8250,17 +8405,6 @@ The tracking issue for this feature is: [#96137] [#96137]: https://github.com/rust-lang/rust/issues/96137 ------------------------- -"##, - }, - Lint { - label: "split_at_checked", - description: r##"# `split_at_checked` - -The tracking issue for this feature is: [#119128] - -[#119128]: https://github.com/rust-lang/rust/issues/119128 - ------------------------ "##, }, @@ -8697,9 +8841,9 @@ The tracking issue for this feature is: [#96256] label: "tcplistener_into_incoming", description: r##"# `tcplistener_into_incoming` -The tracking issue for this feature is: [#88339] +The tracking issue for this feature is: [#88373] -[#88339]: https://github.com/rust-lang/rust/issues/88339 +[#88373]: https://github.com/rust-lang/rust/issues/88373 ------------------------ "##, @@ -9319,6 +9463,17 @@ The tracking issue for this feature is: [#96374] [#96374]: https://github.com/rust-lang/rust/issues/96374 +------------------------ +"##, + }, + Lint { + label: "try_with_capacity", + description: r##"# `try_with_capacity` + +The tracking issue for this feature is: [#91913] + +[#91913]: https://github.com/rust-lang/rust/issues/91913 + ------------------------ "##, }, @@ -9391,12 +9546,10 @@ fn main () { "##, }, Lint { - label: "type_privacy_lints", - description: r##"# `type_privacy_lints` + label: "ub_checks", + description: r##"# `ub_checks` -The tracking issue for this feature is: [#48054] - -[#48054]: https://github.com/rust-lang/rust/issues/48054 +This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use. ------------------------ "##, @@ -9439,17 +9592,6 @@ extern "rust-call" fn add_args(args: (u32, u32)) -> u32 { fn main() {} ``` -"##, - }, - Lint { - label: "unchecked_math", - description: r##"# `unchecked_math` - -The tracking issue for this feature is: [#85122] - -[#85122]: https://github.com/rust-lang/rust/issues/85122 - ------------------------- "##, }, Lint { @@ -9514,72 +9656,6 @@ The tracking issue for this feature is: [#96467] [#96467]: https://github.com/rust-lang/rust/issues/96467 ------------------------ -"##, - }, - Lint { - label: "unix_sigpipe", - description: r##"# `unix_sigpipe` - -The tracking issue for this feature is: [#97889] - -[#97889]: https://github.com/rust-lang/rust/issues/97889 - ---- - -The `#[unix_sigpipe = "..."]` attribute on `fn main()` can be used to specify how libstd shall setup `SIGPIPE` on Unix platforms before invoking `fn main()`. This attribute is ignored on non-Unix targets. There are three variants: -* `#[unix_sigpipe = "inherit"]` -* `#[unix_sigpipe = "sig_dfl"]` -* `#[unix_sigpipe = "sig_ign"]` - -## `#[unix_sigpipe = "inherit"]` - -Leave `SIGPIPE` untouched before entering `fn main()`. Unless the parent process has changed the default `SIGPIPE` handler from `SIG_DFL` to something else, this will behave the same as `#[unix_sigpipe = "sig_dfl"]`. - -## `#[unix_sigpipe = "sig_dfl"]` - -Set the `SIGPIPE` handler to `SIG_DFL`. This will result in your program getting killed if it tries to write to a closed pipe. This is normally what you want if your program produces textual output. - -### Example - -```rust,no_run -#![feature(unix_sigpipe)] -#[unix_sigpipe = "sig_dfl"] -fn main() { loop { println!("hello world"); } } -``` - -```bash -% ./main | head -n 1 -hello world -``` - -## `#[unix_sigpipe = "sig_ign"]` - -Set the `SIGPIPE` handler to `SIG_IGN` before invoking `fn main()`. This will result in `ErrorKind::BrokenPipe` errors if you program tries to write to a closed pipe. This is normally what you want if you for example write socket servers, socket clients, or pipe peers. - -This is what libstd has done by default since 2014. (However, see the note on child processes below.) - -### Example - -```rust,no_run -#![feature(unix_sigpipe)] -#[unix_sigpipe = "sig_ign"] -fn main() { loop { println!("hello world"); } } -``` - -```bash -% ./main | head -n 1 -hello world -thread 'main' panicked at 'failed printing to stdout: Broken pipe (os error 32)', library/std/src/io/stdio.rs:1016:9 -note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace -``` - -### Note on child processes - -When spawning child processes, the legacy Rust behavior if `#[unix_sigpipe]` is not specified is to -reset `SIGPIPE` to `SIG_DFL`. - -If `#[unix_sigpipe = "..."]` is specified, no matter what its value is, the signal disposition of -`SIGPIPE` is no longer reset. This means that the child inherits the parent's `SIGPIPE` behavior. "##, }, Lint { @@ -9612,6 +9688,17 @@ The tracking issue for this feature is: [#49804] [#49804]: https://github.com/rust-lang/rust/issues/49804 +------------------------ +"##, + }, + Lint { + label: "unsafe_attributes", + description: r##"# `unsafe_attributes` + +The tracking issue for this feature is: [#123757] + +[#123757]: https://github.com/rust-lang/rust/issues/123757 + ------------------------ "##, }, @@ -9623,6 +9710,17 @@ The tracking issue for this feature is: [#111645] [#111645]: https://github.com/rust-lang/rust/issues/111645 +------------------------ +"##, + }, + Lint { + label: "unsafe_extern_blocks", + description: r##"# `unsafe_extern_blocks` + +The tracking issue for this feature is: [#123743] + +[#123743]: https://github.com/rust-lang/rust/issues/123743 + ------------------------ "##, }, @@ -9917,17 +10015,6 @@ The tracking issue for this feature is: [#94919] [#94919]: https://github.com/rust-lang/rust/issues/94919 ------------------------- -"##, - }, - Lint { - label: "utf8_chunks", - description: r##"# `utf8_chunks` - -The tracking issue for this feature is: [#99543] - -[#99543]: https://github.com/rust-lang/rust/issues/99543 - ------------------------ "##, }, @@ -9950,6 +10037,17 @@ The tracking issue for this feature is: [#65816] [#65816]: https://github.com/rust-lang/rust/issues/65816 +------------------------ +"##, + }, + Lint { + label: "vec_pop_if", + description: r##"# `vec_pop_if` + +The tracking issue for this feature is: [#122741] + +[#122741]: https://github.com/rust-lang/rust/issues/122741 + ------------------------ "##, }, @@ -10225,18 +10323,12 @@ checked."##, description: r##"Checks for usage of the `#[allow]` attribute and suggests replacing it with the `#[expect]` (See [RFC 2383](https://rust-lang.github.io/rfcs/2383-lint-reasons.html)) -The expect attribute is still unstable and requires the `lint_reasons` -on nightly. It can be enabled by adding `#![feature(lint_reasons)]` to -the crate root. - This lint only warns outer attributes (`#[allow]`), as inner attributes (`#![allow]`) are usually used to enable or disable lints on a global scale."##, }, Lint { label: "clippy::allow_attributes_without_reason", - description: r##"Checks for attributes that allow lints without a reason. - -(This requires the `lint_reasons` feature)"##, + description: r##"Checks for attributes that allow lints without a reason."##, }, Lint { label: "clippy::almost_complete_range", @@ -10309,6 +10401,10 @@ patterns."##, label: "clippy::assign_ops", description: r##"Nothing. This lint has been deprecated."##, }, + Lint { + label: "clippy::assigning_clones", + description: r##"Checks for code like `foo = bar.clone();`"##, + }, Lint { label: "clippy::async_yields_async", description: r##"Checks for async blocks that yield values of types @@ -10402,8 +10498,8 @@ Check the [Box documentation](https://doc.rust-lang.org/std/boxed/index.html) fo }, Lint { label: "clippy::box_default", - description: r##"checks for `Box::new(T::default())`, which is better written as -`Box::::default()`."##, + description: r##"checks for `Box::new(Default::default())`, which can be written as +`Box::default()`."##, }, Lint { label: "clippy::boxed_local", @@ -10419,6 +10515,11 @@ moved out of the blocks."##, label: "clippy::builtin_type_shadow", description: r##"Warns if a generic shadows a built-in type."##, }, + Lint { + label: "clippy::byte_char_slices", + description: r##"Checks for hard to read slices of byte characters, that could be more easily expressed as a +byte string."##, + }, Lint { label: "clippy::bytes_count_to_len", description: r##"It checks for `str::bytes().count()` and suggests replacing it with @@ -10507,6 +10608,10 @@ defined, this lint is `Allow` by default."##, label: "clippy::cast_slice_from_raw_parts", description: r##"Checks for a raw slice being cast to a slice pointer"##, }, + Lint { + label: "clippy::cfg_not_test", + description: r##"Checks for usage of `cfg` that excludes code from `test` builds. (i.e., `#{cfg(not(test))]`)"##, + }, Lint { label: "clippy::char_lit_as_u8", description: r##"Checks for expressions where a character literal is cast @@ -10595,6 +10700,10 @@ rewritten with `match` and `cmp`."##, description: r##"Checks for comparing to an empty slice such as `` or `[]`, and suggests using `.is_empty()` where applicable."##, }, + Lint { + label: "clippy::const_is_empty", + description: r##"It identifies calls to `.is_empty()` on constant values."##, + }, Lint { label: "clippy::copy_iterator", description: r##"Checks for types that implement `Copy` as well as @@ -10747,6 +10856,13 @@ types are defined in the clippy.toml file."##, label: "clippy::diverging_sub_expression", description: r##"Checks for diverging calls that are not match arms or statements."##, + }, + Lint { + label: "clippy::doc_lazy_continuation", + description: r##"In CommonMark Markdown, the language used to write doc comments, a +paragraph nested within a list or block quote does not need any line +after the first one to be indented or marked. The specification calls +this a lazy paragraph continuation."##, }, Lint { label: "clippy::doc_link_with_quotes", @@ -10796,6 +10912,10 @@ marked as `#[must_use]`."##, description: r##"Checks for function arguments having the similar names differing by an underscore."##, }, + Lint { + label: "clippy::duplicated_attributes", + description: r##"Checks for attributes that appear two or more times."##, + }, Lint { label: "clippy::duration_subsec", description: r##"Checks for calculation of subsecond microseconds or milliseconds @@ -10818,11 +10938,11 @@ but without a final `else` branch."##, }, Lint { label: "clippy::empty_enum", - description: r##"Checks for `enum`s with no variants. + description: r##"Checks for `enum`s with no variants, which therefore are uninhabited types +(cannot be instantiated). -As of this writing, the `never_type` is still a -nightly-only experimental API. Therefore, this lint is only triggered -if the `never_type` is enabled."##, +As of this writing, the `never_type` is still a nightly-only experimental API. +Therefore, this lint is only triggered if `#![feature(never_type)]` is enabled."##, }, Lint { label: "clippy::empty_enum_variants_with_brackets", @@ -10891,7 +11011,7 @@ than that supported by the underlying type."##, }, Lint { label: "clippy::exhaustive_structs", - description: r##"Warns on any exported `structs`s that are not tagged `#[non_exhaustive]`"##, + description: r##"Warns on any exported `struct`s that are not tagged `#[non_exhaustive]`"##, }, Lint { label: "clippy::exit", @@ -10964,6 +11084,11 @@ anywhere else."##, label: "clippy::field_reassign_with_default", description: r##"Checks for immediate reassignment of fields initialized with Default::default()."##, + }, + Lint { + label: "clippy::field_scoped_visibility_modifiers", + description: r##"Checks for usage of scoped visibility modifiers, like `pub(crate)`, on fields. These +make a field visible within a scope between public and private."##, }, Lint { label: "clippy::filetype_is_file", @@ -11297,6 +11422,11 @@ unless the annotated function is empty or simply panics."##, description: r##"Checks for usage of `x >= y + 1` or `x - 1 >= y` (and `<=`) in a block"##, }, Lint { label: "clippy::integer_division", description: r##"Checks for division of integers"## }, + Lint { + label: "clippy::integer_division_remainder_used", + description: r##"Checks for the usage of division (`/`) and remainder (`%`) operations +when performed on any integer types using the default `Div` and `Rem` trait implementations."##, + }, Lint { label: "clippy::into_iter_on_ref", description: r##"Checks for `into_iter` calls on references which should be replaced by `iter` @@ -11354,12 +11484,12 @@ create a `Vec`."##, Lint { label: "clippy::iter_filter_is_ok", description: r##"Checks for usage of `.filter(Result::is_ok)` that may be replaced with a `.flatten()` call. -This lint will require additional changes to the follow-up calls as it appects the type."##, +This lint will require additional changes to the follow-up calls as it affects the type."##, }, Lint { label: "clippy::iter_filter_is_some", description: r##"Checks for usage of `.filter(Option::is_some)` that may be replaced with a `.flatten()` call. -This lint will require additional changes to the follow-up calls as it appects the type."##, +This lint will require additional changes to the follow-up calls as it affects the type."##, }, Lint { label: "clippy::iter_kv_map", @@ -11377,8 +11507,8 @@ ignoring either the keys or values."##, }, Lint { label: "clippy::iter_nth", - description: r##"Checks for usage of `.iter().nth()` (and the related -`.iter_mut().nth()`) on standard library types with *O*(1) element access."##, + description: r##"Checks for usage of `.iter().nth()`/`.iter_mut().nth()` on standard library types that have +equivalent `.get()`/`.get_mut()` methods."##, }, Lint { label: "clippy::iter_nth_zero", @@ -11457,7 +11587,7 @@ are too large."##, Lint { label: "clippy::large_include_file", description: r##"Checks for the inclusion of large files via `include_bytes!()` -and `include_str!()`"##, +or `include_str!()`."##, }, Lint { label: "clippy::large_stack_arrays", @@ -11480,6 +11610,11 @@ the argument type is `Copy` and large enough to be worth considering passing by reference. Does not trigger if the function is being exported, because that might induce API breakage, if the parameter is declared as mutable, or if the argument is a `self`."##, + }, + Lint { + label: "clippy::legacy_numeric_constants", + description: r##"Checks for usage of `::max_value()`, `std::::MAX`, +`std::::EPSILON`, etc."##, }, Lint { label: "clippy::len_without_is_empty", @@ -11547,6 +11682,10 @@ is resolved."##, description: r##"Checks for whole number float literals that cannot be represented as the underlying type without loss."##, }, + Lint { + label: "clippy::macro_metavars_in_unsafe", + description: r##"Looks for macros that expand metavariables in an unsafe block."##, + }, Lint { label: "clippy::macro_use_imports", description: r##"Checks for `#[macro_use] use...`."##, @@ -11568,7 +11707,12 @@ cannot be represented as the underlying type without loss."##, description: r##"Checks for usage of `std::mem::size_of::() * 8` when `T::BITS` is available."##, }, - Lint { label: "clippy::manual_c_str_literals", description: r##""## }, + Lint { + label: "clippy::manual_c_str_literals", + description: r##"Checks for the manual creation of C strings (a string with a `NUL` byte at the end), either +through one of the `CStr` constructor functions, or more plainly by calling `.as_ptr()` +on a (byte) string literal with a hardcoded `\\0` byte at the end."##, + }, Lint { label: "clippy::manual_clamp", description: r##"Identifies good opportunities for a clamp function from std or core, and suggests using it."##, @@ -11602,6 +11746,10 @@ where only the `Some` or `Ok` variant of the iterator element is used."##, [`BuildHasher::hash_one`]: https://doc.rust-lang.org/std/hash/trait.BuildHasher.html#method.hash_one"##, }, + Lint { + label: "clippy::manual_inspect", + description: r##"Checks for uses of `map` which return the original item."##, + }, Lint { label: "clippy::manual_instant_elapsed", description: r##"Lints subtraction between `Instant::now()` and another `Instant`."##, @@ -11622,7 +11770,10 @@ ascii range"##, description: r##"Checks for manual `is_infinite` reimplementations (i.e., `x == ::INFINITY || x == ::NEG_INFINITY`)."##, }, - Lint { label: "clippy::manual_is_variant_and", description: r##""## }, + Lint { + label: "clippy::manual_is_variant_and", + description: r##"Checks for usage of `option.map(f).unwrap_or_default()` and `result.map(f).unwrap_or_default()` where f is a function or closure that returns the `bool` type."##, + }, Lint { label: "clippy::manual_let_else", description: r##"Warn of cases where `let...else` could be used"##, @@ -11653,6 +11804,10 @@ slices that could be optimized by having a memcpy."##, label: "clippy::manual_ok_or", description: r##"Finds patterns that reimplement `Option::ok_or`."##, }, + Lint { + label: "clippy::manual_pattern_char_comparison", + description: r##"Checks for manual `char` comparison in string patterns"##, + }, Lint { label: "clippy::manual_range_contains", description: r##"Checks for expressions like `x >= 3 && x < 8` that could @@ -11672,6 +11827,11 @@ of `x.rem_euclid(4)`."##, label: "clippy::manual_retain", description: r##"Checks for code to be replaced by `.retain()`."##, }, + Lint { + label: "clippy::manual_rotate", + description: r##"It detects manual bit rotations that could be rewritten using standard +functions `rotate_left` or `rotate_right`."##, + }, Lint { label: "clippy::manual_saturating_arithmetic", description: r##"Checks for `.checked_add/sub(x).unwrap_or(MAX/MIN)`."##, @@ -11713,6 +11873,11 @@ Note that the lint will not be emitted in const blocks, as the suggestion would label: "clippy::manual_unwrap_or", description: r##"Finds patterns that reimplement `Option::unwrap_or` or `Result::unwrap_or`."##, }, + Lint { + label: "clippy::manual_unwrap_or_default", + description: r##"Checks if a `match` or `if let` expression can be simplified using +`.unwrap_or_default()`."##, + }, Lint { label: "clippy::manual_while_let_some", description: r##"Looks for loops that check for emptiness of a `Vec` in the condition and pop an element @@ -11818,10 +11983,7 @@ and take drastic actions like `panic!`."##, }, Lint { label: "clippy::maybe_misused_cfg", - description: r##"Checks for `#[cfg(features = ...)]` and suggests to replace it with -`#[cfg(feature = ...)]`. - -It also checks if `cfg(test)` was misspelled."##, + description: r##"Nothing. This lint has been deprecated."##, }, Lint { label: "clippy::mem_forget", @@ -11845,7 +12007,7 @@ and `mem::replace(&mut _, mem::zeroed())`."##, }, Lint { label: "clippy::min_ident_chars", - description: r##"Checks for idents which comprise of a single letter. + description: r##"Checks for identifiers which consist of a single character (or fewer than the configured threshold). Note: This lint can be very noisy when enabled; it may be desirable to only enable it temporarily."##, @@ -11861,7 +12023,7 @@ used to clamp values, but switched so that the result is constant."##, }, Lint { label: "clippy::mismatched_target_os", - description: r##"Checks for cfg attributes having operating systems used in target family position."##, + description: r##"Nothing. This lint has been deprecated."##, }, Lint { label: "clippy::mismatching_type_param_order", @@ -11892,9 +12054,13 @@ is greater than the largest index used to index into the slice."##, label: "clippy::missing_const_for_fn", description: r##"Suggests the use of `const` in functions and methods where possible."##, }, + Lint { + label: "clippy::missing_const_for_thread_local", + description: r##"Suggests to use `const` in `thread_local!` macro if possible."##, + }, Lint { label: "clippy::missing_docs_in_private_items", - description: r##"Warns if there is missing doc for any private documentable item"##, + description: r##"Warns if there is missing documentation for any private documentable item."##, }, Lint { label: "clippy::missing_enforced_import_renames", @@ -11932,12 +12098,11 @@ unsafe functions and warns if there is no `# Safety` section."##, Lint { label: "clippy::missing_trait_methods", description: r##"Checks if a provided method is used implicitly by a trait -implementation. A usage example would be a wrapper where every method -should perform some operation before delegating to the inner type's -implementation. - -This lint should typically be enabled on a specific trait `impl` item -rather than globally."##, +implementation."##, + }, + Lint { + label: "clippy::missing_transmute_annotations", + description: r##"Checks if transmute calls have all generics specified."##, }, Lint { label: "clippy::mistyped_literal_suffixes", @@ -11945,7 +12110,7 @@ rather than globally."##, }, Lint { label: "clippy::mixed_attributes_style", - description: r##"Checks that an item has only one kind of attributes."##, + description: r##"Checks for items that have the same kind of attributes with mixed styles (inner/outer)."##, }, Lint { label: "clippy::mixed_case_hex_literals", @@ -11960,7 +12125,7 @@ order of sub-expressions."##, }, Lint { label: "clippy::mod_module_files", - description: r##"Checks that module layout uses only self named module files, bans `mod.rs` files."##, + description: r##"Checks that module layout uses only self named module files; bans `mod.rs` files."##, }, Lint { label: "clippy::module_inception", @@ -12026,7 +12191,7 @@ reference with the output lifetime, this lint will not trigger."##, }, Lint { label: "clippy::mut_range_bound", - description: r##"Checks for loops which have a range bound that is a mutable variable"##, + description: r##"Checks for loops with a range bound that is a mutable variable."##, }, Lint { label: "clippy::mutable_key_type", @@ -12075,9 +12240,13 @@ value with `&ref`."##, }, Lint { label: "clippy::needless_borrows_for_generic_args", - description: r##"Checks for borrow operations (`&`) that used as a generic argument to a + description: r##"Checks for borrow operations (`&`) that are used as a generic argument to a function when the borrowed value could be used."##, }, + Lint { + label: "clippy::needless_character_iteration", + description: r##"Checks if an iterator is used to check if a string is ascii."##, + }, Lint { label: "clippy::needless_collect", description: r##"Checks for functions collecting an iterator when collect @@ -12119,6 +12288,10 @@ relying on lifetime elision."##, description: r##"Checks for unnecessary `match` or match-like `if let` returns for `Option` and `Result` when function signatures are the same."##, }, + Lint { + label: "clippy::needless_maybe_sized", + description: r##"Lints `?Sized` bounds applied to type parameters that cannot be unsized"##, + }, Lint { label: "clippy::needless_option_as_deref", description: r##"Checks for no-op uses of `Option::{as_deref, as_deref_mut}`, @@ -12138,7 +12311,7 @@ superfluous."##, description: r##"Check if a `&mut` function argument is actually used mutably. Be careful if the function is publicly reexported as it would break compatibility with -users of this function."##, +users of this function, when the users pass this function as an argument."##, }, Lint { label: "clippy::needless_pass_by_value", @@ -12370,7 +12543,7 @@ can be eliminated."##, Lint { label: "clippy::panic", description: r##"Checks for usage of `panic!`."## }, Lint { label: "clippy::panic_in_result_fn", - description: r##"Checks for usage of `panic!` or assertions in a function of type result."##, + description: r##"Checks for usage of `panic!` or assertions in a function whose return type is `Result`."##, }, Lint { label: "clippy::panicking_unwrap", @@ -12378,7 +12551,7 @@ can be eliminated."##, }, Lint { label: "clippy::partial_pub_fields", - description: r##"Checks whether partial fields of a struct are public. + description: r##"Checks whether some but not all fields of a `struct` are public. Either make all fields of a type public, or make none of them public"##, }, @@ -12685,6 +12858,11 @@ and suggests `std::ptr::from_ref` and `std::ptr::from_mut` instead."##, label: "clippy::regex_macro", description: r##"Nothing. This lint has been deprecated."##, }, + Lint { + label: "clippy::renamed_function_params", + description: r##"Lints when the name of function parameters from trait impl is +different than its default implementation."##, + }, Lint { label: "clippy::repeat_once", description: r##"Checks for usage of `.repeat(1)` and suggest the following method for each types. @@ -12766,8 +12944,8 @@ one from a trait, another not from trait."##, }, Lint { label: "clippy::seek_from_current", - description: r##"Checks an argument of `seek` method of `Seek` trait -and if it start seek from `SeekFrom::Current(0)`, suggests `stream_position` instead."##, + description: r##"Checks if the `seek` method of the `Seek` trait is called with `SeekFrom::Current(0)`, +and if it is, suggests using `stream_position` instead."##, }, Lint { label: "clippy::seek_to_start_instead_of_rewind", @@ -12810,6 +12988,11 @@ see the `unseparated_literal_suffix` lint."##, label: "clippy::serde_api_misuse", description: r##"Checks for misuses of the serde API."##, }, + Lint { + label: "clippy::set_contains_or_insert", + description: r##"Checks for usage of `contains` to see if a value is not +present on `HashSet` followed by a `insert`."##, + }, Lint { label: "clippy::shadow_reuse", description: r##"Checks for bindings that shadow other bindings already in @@ -13100,10 +13283,6 @@ either `ignore`, `no_run` or `compile_fail`."##, description: r##"Triggers when a testing function (marked with the `#[test]` attribute) isn't inside a testing module (marked with `#[cfg(test)]`)."##, }, - Lint { - label: "clippy::thread_local_initializer_can_be_made_const", - description: r##"Suggests to use `const` in `thread_local!` macro if possible."##, - }, Lint { label: "clippy::to_digit_is_some", description: r##"Checks for `.to_digit(..).is_some()` on `char`s."##, @@ -13163,7 +13342,7 @@ syntax specifications for trait bounds are used simultaneously."##, }, Lint { label: "clippy::transmute_int_to_non_zero", - description: r##"Checks for transmutes from integers to `NonZero*` types, and suggests their `new_unchecked` + description: r##"Checks for transmutes from `T` to `NonZero`, and suggests the `new_unchecked` method instead."##, }, Lint { @@ -13223,7 +13402,7 @@ declarations above a certain complexity threshold."##, }, Lint { label: "clippy::type_id_on_box", - description: r##"Looks for calls to ` as Any>::type_id`."##, + description: r##"Looks for calls to `.type_id()` on a `Box`."##, }, Lint { label: "clippy::type_repetition_in_bounds", @@ -13235,8 +13414,8 @@ declarations above a certain complexity threshold."##, }, Lint { label: "clippy::unconditional_recursion", - description: r##"Checks that there isn't an infinite recursion in `PartialEq` trait -implementation."##, + description: r##"Checks that there isn't an infinite recursion in trait +implementations."##, }, Lint { label: "clippy::undocumented_unsafe_blocks", @@ -13379,6 +13558,12 @@ simpler code: label: "clippy::unnecessary_map_on_constructor", description: r##"Suggests removing the use of a `map()` (or `map_err()`) method when an `Option` or `Result` is being constructed."##, + }, + Lint { + label: "clippy::unnecessary_min_or_max", + description: r##"Checks for unnecessary calls to `min()` or `max()` in the following cases +- Either both side is constant +- One side is clearly larger than the other, like i32::MIN and an i32 variable"##, }, Lint { label: "clippy::unnecessary_mut_passed", @@ -13587,11 +13772,21 @@ lint attributes. This lint permits lint attributes for lints emitted on the items themself. For `use` items these lints are: +* ambiguous_glob_reexports +* dead_code * deprecated +* hidden_glob_reexports * unreachable_pub -* unused_imports +* unused +* unused_braces +* unused_import_braces +* clippy::disallowed_types * clippy::enum_glob_use * clippy::macro_use_imports +* clippy::module_name_repetitions +* clippy::redundant_pub_crate +* clippy::single_component_path_imports +* clippy::unsafe_removed_from_name * clippy::wildcard_imports For `extern crate` items these lints are: @@ -13655,6 +13850,10 @@ to `trailing_zeros`"##, label: "clippy::waker_clone_wake", description: r##"Checks for usage of `waker.clone().wake()`"##, }, + Lint { + label: "clippy::while_float", + description: r##"Checks for while loops comparing floating point values."##, + }, Lint { label: "clippy::while_immutable_condition", description: r##"Checks whether variables used within while loop condition @@ -13746,6 +13945,11 @@ architecture."##, label: "clippy::zero_ptr", description: r##"Catch casts from `0` to some pointer type"##, }, + Lint { + label: "clippy::zero_repeat_side_effects", + description: r##"Checks for array or vec initializations which call a function or method, +but which have a repeat count of zero."##, + }, Lint { label: "clippy::zero_sized_map_values", description: r##"Checks for maps with zero-sized value types anywhere in the code."##, @@ -13773,7 +13977,7 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ LintGroup { lint: Lint { label: "clippy::complexity", - description: r##"lint group for: clippy::bind_instead_of_map, clippy::bool_comparison, clippy::borrow_deref_ref, clippy::borrowed_box, clippy::bytes_count_to_len, clippy::char_lit_as_u8, clippy::clone_on_copy, clippy::crosspointer_transmute, clippy::default_constructed_unit_structs, clippy::deprecated_cfg_attr, clippy::deref_addrof, clippy::derivable_impls, clippy::diverging_sub_expression, clippy::double_comparisons, clippy::double_parens, clippy::duration_subsec, clippy::excessive_nesting, clippy::explicit_auto_deref, clippy::explicit_counter_loop, clippy::explicit_write, clippy::extra_unused_lifetimes, clippy::extra_unused_type_parameters, clippy::filter_map_identity, clippy::filter_next, clippy::flat_map_identity, clippy::get_last_with_len, clippy::identity_op, clippy::implied_bounds_in_impls, clippy::inspect_for_each, clippy::int_plus_one, clippy::iter_count, clippy::iter_kv_map, clippy::let_with_type_underscore, clippy::manual_filter, clippy::manual_filter_map, clippy::manual_find, clippy::manual_find_map, clippy::manual_flatten, clippy::manual_hash_one, clippy::manual_main_separator_str, clippy::manual_range_patterns, clippy::manual_rem_euclid, clippy::manual_slice_size_calculation, clippy::manual_split_once, clippy::manual_strip, clippy::manual_swap, clippy::manual_unwrap_or, clippy::map_flatten, clippy::map_identity, clippy::match_as_ref, clippy::match_single_binding, clippy::needless_arbitrary_self_type, clippy::needless_bool, clippy::needless_bool_assign, clippy::needless_borrowed_reference, clippy::needless_if, clippy::needless_lifetimes, clippy::needless_match, clippy::needless_option_as_deref, clippy::needless_option_take, clippy::needless_question_mark, clippy::needless_splitn, clippy::needless_update, clippy::neg_cmp_op_on_partial_ord, clippy::no_effect, clippy::nonminimal_bool, clippy::only_used_in_recursion, clippy::option_as_ref_deref, clippy::option_filter_map, clippy::option_map_unit_fn, clippy::or_then_unwrap, clippy::overflow_check_conditional, clippy::partialeq_ne_impl, clippy::precedence, clippy::ptr_offset_with_cast, clippy::range_zip_with_len, clippy::redundant_as_str, clippy::redundant_async_block, clippy::redundant_at_rest_pattern, clippy::redundant_closure_call, clippy::redundant_guards, clippy::redundant_slicing, clippy::repeat_once, clippy::reserve_after_initialization, clippy::result_filter_map, clippy::result_map_unit_fn, clippy::search_is_some, clippy::seek_from_current, clippy::seek_to_start_instead_of_rewind, clippy::short_circuit_statement, clippy::single_element_loop, clippy::skip_while_next, clippy::string_from_utf8_as_bytes, clippy::strlen_on_c_strings, clippy::temporary_assignment, clippy::too_many_arguments, clippy::transmute_bytes_to_str, clippy::transmute_float_to_int, clippy::transmute_int_to_bool, clippy::transmute_int_to_char, clippy::transmute_int_to_float, clippy::transmute_int_to_non_zero, clippy::transmute_num_to_bytes, clippy::transmute_ptr_to_ref, clippy::transmutes_expressible_as_ptr_casts, clippy::type_complexity, clippy::unit_arg, clippy::unnecessary_cast, clippy::unnecessary_filter_map, clippy::unnecessary_find_map, clippy::unnecessary_literal_unwrap, clippy::unnecessary_map_on_constructor, clippy::unnecessary_operation, clippy::unnecessary_sort_by, clippy::unnecessary_unwrap, clippy::unneeded_wildcard_pattern, clippy::unused_format_specs, clippy::useless_asref, clippy::useless_conversion, clippy::useless_format, clippy::useless_transmute, clippy::vec_box, clippy::while_let_loop, clippy::wildcard_in_or_patterns, clippy::zero_divided_by_zero, clippy::zero_prefixed_literal"##, + description: r##"lint group for: clippy::bind_instead_of_map, clippy::bool_comparison, clippy::borrow_deref_ref, clippy::borrowed_box, clippy::bytes_count_to_len, clippy::char_lit_as_u8, clippy::clone_on_copy, clippy::crosspointer_transmute, clippy::default_constructed_unit_structs, clippy::deprecated_cfg_attr, clippy::deref_addrof, clippy::derivable_impls, clippy::diverging_sub_expression, clippy::double_comparisons, clippy::double_parens, clippy::duration_subsec, clippy::excessive_nesting, clippy::explicit_auto_deref, clippy::explicit_counter_loop, clippy::explicit_write, clippy::extra_unused_lifetimes, clippy::extra_unused_type_parameters, clippy::filter_map_identity, clippy::filter_next, clippy::flat_map_identity, clippy::get_last_with_len, clippy::identity_op, clippy::implied_bounds_in_impls, clippy::inspect_for_each, clippy::int_plus_one, clippy::iter_count, clippy::iter_kv_map, clippy::let_with_type_underscore, clippy::manual_clamp, clippy::manual_filter, clippy::manual_filter_map, clippy::manual_find, clippy::manual_find_map, clippy::manual_flatten, clippy::manual_hash_one, clippy::manual_inspect, clippy::manual_main_separator_str, clippy::manual_range_patterns, clippy::manual_rem_euclid, clippy::manual_slice_size_calculation, clippy::manual_split_once, clippy::manual_strip, clippy::manual_swap, clippy::manual_unwrap_or, clippy::map_flatten, clippy::map_identity, clippy::match_as_ref, clippy::match_single_binding, clippy::needless_arbitrary_self_type, clippy::needless_bool, clippy::needless_bool_assign, clippy::needless_borrowed_reference, clippy::needless_if, clippy::needless_lifetimes, clippy::needless_match, clippy::needless_option_as_deref, clippy::needless_option_take, clippy::needless_question_mark, clippy::needless_splitn, clippy::needless_update, clippy::neg_cmp_op_on_partial_ord, clippy::no_effect, clippy::nonminimal_bool, clippy::only_used_in_recursion, clippy::option_as_ref_deref, clippy::option_filter_map, clippy::option_map_unit_fn, clippy::or_then_unwrap, clippy::overflow_check_conditional, clippy::partialeq_ne_impl, clippy::precedence, clippy::ptr_offset_with_cast, clippy::range_zip_with_len, clippy::redundant_as_str, clippy::redundant_async_block, clippy::redundant_at_rest_pattern, clippy::redundant_closure_call, clippy::redundant_guards, clippy::redundant_slicing, clippy::repeat_once, clippy::reserve_after_initialization, clippy::result_filter_map, clippy::result_map_unit_fn, clippy::search_is_some, clippy::seek_from_current, clippy::seek_to_start_instead_of_rewind, clippy::short_circuit_statement, clippy::single_element_loop, clippy::skip_while_next, clippy::string_from_utf8_as_bytes, clippy::strlen_on_c_strings, clippy::temporary_assignment, clippy::too_many_arguments, clippy::transmute_bytes_to_str, clippy::transmute_float_to_int, clippy::transmute_int_to_bool, clippy::transmute_int_to_char, clippy::transmute_int_to_float, clippy::transmute_int_to_non_zero, clippy::transmute_num_to_bytes, clippy::transmute_ptr_to_ref, clippy::transmutes_expressible_as_ptr_casts, clippy::type_complexity, clippy::unit_arg, clippy::unnecessary_cast, clippy::unnecessary_filter_map, clippy::unnecessary_find_map, clippy::unnecessary_literal_unwrap, clippy::unnecessary_map_on_constructor, clippy::unnecessary_min_or_max, clippy::unnecessary_operation, clippy::unnecessary_sort_by, clippy::unnecessary_unwrap, clippy::unneeded_wildcard_pattern, clippy::unused_format_specs, clippy::useless_asref, clippy::useless_conversion, clippy::useless_format, clippy::useless_transmute, clippy::vec_box, clippy::while_let_loop, clippy::wildcard_in_or_patterns, clippy::zero_divided_by_zero, clippy::zero_prefixed_literal"##, }, children: &[ "clippy::bind_instead_of_map", @@ -13809,12 +14013,14 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ "clippy::iter_count", "clippy::iter_kv_map", "clippy::let_with_type_underscore", + "clippy::manual_clamp", "clippy::manual_filter", "clippy::manual_filter_map", "clippy::manual_find", "clippy::manual_find_map", "clippy::manual_flatten", "clippy::manual_hash_one", + "clippy::manual_inspect", "clippy::manual_main_separator_str", "clippy::manual_range_patterns", "clippy::manual_rem_euclid", @@ -13888,6 +14094,7 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ "clippy::unnecessary_find_map", "clippy::unnecessary_literal_unwrap", "clippy::unnecessary_map_on_constructor", + "clippy::unnecessary_min_or_max", "clippy::unnecessary_operation", "clippy::unnecessary_sort_by", "clippy::unnecessary_unwrap", @@ -13907,7 +14114,7 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ LintGroup { lint: Lint { label: "clippy::correctness", - description: r##"lint group for: clippy::absurd_extreme_comparisons, clippy::almost_swapped, clippy::approx_constant, clippy::async_yields_async, clippy::bad_bit_mask, clippy::cast_slice_different_sizes, clippy::deprecated_semver, clippy::derive_ord_xor_partial_ord, clippy::derived_hash_with_manual_eq, clippy::eager_transmute, clippy::enum_clike_unportable_variant, clippy::eq_op, clippy::erasing_op, clippy::fn_address_comparisons, clippy::if_let_mutex, clippy::ifs_same_cond, clippy::impl_hash_borrow_with_str_and_bytes, clippy::impossible_comparisons, clippy::ineffective_bit_mask, clippy::infinite_iter, clippy::inherent_to_string_shadow_display, clippy::inline_fn_without_body, clippy::invalid_null_ptr_usage, clippy::invalid_regex, clippy::invisible_characters, clippy::iter_next_loop, clippy::iter_skip_zero, clippy::iterator_step_by_zero, clippy::let_underscore_lock, clippy::lint_groups_priority, clippy::match_str_case_mismatch, clippy::mem_replace_with_uninit, clippy::min_max, clippy::mismatched_target_os, clippy::mistyped_literal_suffixes, clippy::modulo_one, clippy::mut_from_ref, clippy::never_loop, clippy::non_octal_unix_permissions, clippy::nonsensical_open_options, clippy::not_unsafe_ptr_arg_deref, clippy::option_env_unwrap, clippy::out_of_bounds_indexing, clippy::overly_complex_bool_expr, clippy::panicking_unwrap, clippy::possible_missing_comma, clippy::read_line_without_trim, clippy::recursive_format_impl, clippy::redundant_comparisons, clippy::redundant_locals, clippy::reversed_empty_ranges, clippy::self_assignment, clippy::serde_api_misuse, clippy::size_of_in_element_count, clippy::suspicious_splitn, clippy::transmute_null_to_fn, clippy::transmuting_null, clippy::uninit_assumed_init, clippy::uninit_vec, clippy::unit_cmp, clippy::unit_hash, clippy::unit_return_expecting_ord, clippy::unsound_collection_transmute, clippy::unused_io_amount, clippy::useless_attribute, clippy::vec_resize_to_zero, clippy::while_immutable_condition, clippy::wrong_transmute, clippy::zst_offset"##, + description: r##"lint group for: clippy::absurd_extreme_comparisons, clippy::almost_swapped, clippy::approx_constant, clippy::async_yields_async, clippy::bad_bit_mask, clippy::cast_slice_different_sizes, clippy::deprecated_semver, clippy::derive_ord_xor_partial_ord, clippy::derived_hash_with_manual_eq, clippy::eager_transmute, clippy::enum_clike_unportable_variant, clippy::eq_op, clippy::erasing_op, clippy::fn_address_comparisons, clippy::if_let_mutex, clippy::ifs_same_cond, clippy::impl_hash_borrow_with_str_and_bytes, clippy::impossible_comparisons, clippy::ineffective_bit_mask, clippy::infinite_iter, clippy::inherent_to_string_shadow_display, clippy::inline_fn_without_body, clippy::invalid_null_ptr_usage, clippy::invalid_regex, clippy::invisible_characters, clippy::iter_next_loop, clippy::iter_skip_zero, clippy::iterator_step_by_zero, clippy::let_underscore_lock, clippy::lint_groups_priority, clippy::match_str_case_mismatch, clippy::mem_replace_with_uninit, clippy::min_max, clippy::mistyped_literal_suffixes, clippy::modulo_one, clippy::mut_from_ref, clippy::never_loop, clippy::non_octal_unix_permissions, clippy::nonsensical_open_options, clippy::not_unsafe_ptr_arg_deref, clippy::option_env_unwrap, clippy::out_of_bounds_indexing, clippy::overly_complex_bool_expr, clippy::panicking_unwrap, clippy::possible_missing_comma, clippy::read_line_without_trim, clippy::recursive_format_impl, clippy::redundant_comparisons, clippy::redundant_locals, clippy::reversed_empty_ranges, clippy::self_assignment, clippy::serde_api_misuse, clippy::size_of_in_element_count, clippy::suspicious_splitn, clippy::transmute_null_to_fn, clippy::transmuting_null, clippy::uninit_assumed_init, clippy::uninit_vec, clippy::unit_cmp, clippy::unit_hash, clippy::unit_return_expecting_ord, clippy::unsound_collection_transmute, clippy::unused_io_amount, clippy::useless_attribute, clippy::vec_resize_to_zero, clippy::while_immutable_condition, clippy::wrong_transmute, clippy::zst_offset"##, }, children: &[ "clippy::absurd_extreme_comparisons", @@ -13943,7 +14150,6 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ "clippy::match_str_case_mismatch", "clippy::mem_replace_with_uninit", "clippy::min_max", - "clippy::mismatched_target_os", "clippy::mistyped_literal_suffixes", "clippy::modulo_one", "clippy::mut_from_ref", @@ -13984,7 +14190,7 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ LintGroup { lint: Lint { label: "clippy::deprecated", - description: r##"lint group for: clippy::assign_ops, clippy::extend_from_slice, clippy::filter_map, clippy::find_map, clippy::if_let_redundant_pattern_matching, clippy::misaligned_transmute, clippy::pub_enum_variant_names, clippy::range_step_by_zero, clippy::regex_macro, clippy::replace_consts, clippy::should_assert_eq, clippy::unsafe_vector_initialization, clippy::unstable_as_mut_slice, clippy::unstable_as_slice, clippy::unused_collect, clippy::wrong_pub_self_convention"##, + description: r##"lint group for: clippy::assign_ops, clippy::extend_from_slice, clippy::filter_map, clippy::find_map, clippy::if_let_redundant_pattern_matching, clippy::maybe_misused_cfg, clippy::misaligned_transmute, clippy::mismatched_target_os, clippy::pub_enum_variant_names, clippy::range_step_by_zero, clippy::regex_macro, clippy::replace_consts, clippy::should_assert_eq, clippy::unsafe_vector_initialization, clippy::unstable_as_mut_slice, clippy::unstable_as_slice, clippy::unused_collect, clippy::wrong_pub_self_convention"##, }, children: &[ "clippy::assign_ops", @@ -13992,7 +14198,9 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ "clippy::filter_map", "clippy::find_map", "clippy::if_let_redundant_pattern_matching", + "clippy::maybe_misused_cfg", "clippy::misaligned_transmute", + "clippy::mismatched_target_os", "clippy::pub_enum_variant_names", "clippy::range_step_by_zero", "clippy::regex_macro", @@ -14008,7 +14216,7 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ LintGroup { lint: Lint { label: "clippy::nursery", - description: r##"lint group for: clippy::as_ptr_cast_mut, clippy::branches_sharing_code, clippy::clear_with_drain, clippy::cognitive_complexity, clippy::collection_is_never_read, clippy::debug_assert_with_mut_call, clippy::derive_partial_eq_without_eq, clippy::empty_line_after_doc_comments, clippy::empty_line_after_outer_attr, clippy::equatable_if_let, clippy::fallible_impl_from, clippy::future_not_send, clippy::imprecise_flops, clippy::iter_on_empty_collections, clippy::iter_on_single_items, clippy::iter_with_drain, clippy::large_stack_frames, clippy::manual_clamp, clippy::missing_const_for_fn, clippy::mutex_integer, clippy::needless_collect, clippy::needless_pass_by_ref_mut, clippy::non_send_fields_in_send_ty, clippy::nonstandard_macro_braces, clippy::option_if_let_else, clippy::or_fun_call, clippy::path_buf_push_overwrite, clippy::read_zero_byte_vec, clippy::readonly_write_lock, clippy::redundant_clone, clippy::redundant_pub_crate, clippy::significant_drop_in_scrutinee, clippy::significant_drop_tightening, clippy::string_lit_as_bytes, clippy::suboptimal_flops, clippy::suspicious_operation_groupings, clippy::trailing_empty_array, clippy::trait_duplication_in_bounds, clippy::transmute_undefined_repr, clippy::trivial_regex, clippy::tuple_array_conversions, clippy::type_repetition_in_bounds, clippy::uninhabited_references, clippy::unnecessary_struct_initialization, clippy::unused_peekable, clippy::unused_rounding, clippy::use_self, clippy::useless_let_if_seq"##, + description: r##"lint group for: clippy::as_ptr_cast_mut, clippy::branches_sharing_code, clippy::clear_with_drain, clippy::cognitive_complexity, clippy::collection_is_never_read, clippy::debug_assert_with_mut_call, clippy::derive_partial_eq_without_eq, clippy::empty_line_after_doc_comments, clippy::empty_line_after_outer_attr, clippy::equatable_if_let, clippy::fallible_impl_from, clippy::future_not_send, clippy::imprecise_flops, clippy::iter_on_empty_collections, clippy::iter_on_single_items, clippy::iter_with_drain, clippy::large_stack_frames, clippy::missing_const_for_fn, clippy::mutex_integer, clippy::needless_collect, clippy::needless_pass_by_ref_mut, clippy::non_send_fields_in_send_ty, clippy::nonstandard_macro_braces, clippy::option_if_let_else, clippy::or_fun_call, clippy::path_buf_push_overwrite, clippy::read_zero_byte_vec, clippy::redundant_clone, clippy::redundant_pub_crate, clippy::set_contains_or_insert, clippy::significant_drop_in_scrutinee, clippy::significant_drop_tightening, clippy::string_lit_as_bytes, clippy::suboptimal_flops, clippy::suspicious_operation_groupings, clippy::trailing_empty_array, clippy::trait_duplication_in_bounds, clippy::transmute_undefined_repr, clippy::trivial_regex, clippy::tuple_array_conversions, clippy::type_repetition_in_bounds, clippy::uninhabited_references, clippy::unnecessary_struct_initialization, clippy::unused_peekable, clippy::unused_rounding, clippy::use_self, clippy::useless_let_if_seq, clippy::while_float"##, }, children: &[ "clippy::as_ptr_cast_mut", @@ -14028,7 +14236,6 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ "clippy::iter_on_single_items", "clippy::iter_with_drain", "clippy::large_stack_frames", - "clippy::manual_clamp", "clippy::missing_const_for_fn", "clippy::mutex_integer", "clippy::needless_collect", @@ -14039,9 +14246,9 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ "clippy::or_fun_call", "clippy::path_buf_push_overwrite", "clippy::read_zero_byte_vec", - "clippy::readonly_write_lock", "clippy::redundant_clone", "clippy::redundant_pub_crate", + "clippy::set_contains_or_insert", "clippy::significant_drop_in_scrutinee", "clippy::significant_drop_tightening", "clippy::string_lit_as_bytes", @@ -14059,14 +14266,16 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ "clippy::unused_rounding", "clippy::use_self", "clippy::useless_let_if_seq", + "clippy::while_float", ], }, LintGroup { lint: Lint { label: "clippy::pedantic", - description: r##"lint group for: clippy::bool_to_int_with_if, clippy::borrow_as_ptr, clippy::case_sensitive_file_extension_comparisons, clippy::cast_lossless, clippy::cast_possible_truncation, clippy::cast_possible_wrap, clippy::cast_precision_loss, clippy::cast_ptr_alignment, clippy::cast_sign_loss, clippy::checked_conversions, clippy::cloned_instead_of_copied, clippy::copy_iterator, clippy::default_trait_access, clippy::doc_link_with_quotes, clippy::doc_markdown, clippy::empty_enum, clippy::enum_glob_use, clippy::expl_impl_clone_on_copy, clippy::explicit_deref_methods, clippy::explicit_into_iter_loop, clippy::explicit_iter_loop, clippy::filter_map_next, clippy::flat_map_option, clippy::float_cmp, clippy::fn_params_excessive_bools, clippy::from_iter_instead_of_collect, clippy::if_not_else, clippy::ignored_unit_patterns, clippy::implicit_clone, clippy::implicit_hasher, clippy::inconsistent_struct_constructor, clippy::index_refutable_slice, clippy::inefficient_to_string, clippy::inline_always, clippy::into_iter_without_iter, clippy::invalid_upcast_comparisons, clippy::items_after_statements, clippy::iter_filter_is_ok, clippy::iter_filter_is_some, clippy::iter_not_returning_iterator, clippy::iter_without_into_iter, clippy::large_digit_groups, clippy::large_futures, clippy::large_stack_arrays, clippy::large_types_passed_by_value, clippy::linkedlist, clippy::macro_use_imports, clippy::manual_assert, clippy::manual_c_str_literals, clippy::manual_instant_elapsed, clippy::manual_is_variant_and, clippy::manual_let_else, clippy::manual_ok_or, clippy::manual_string_new, clippy::many_single_char_names, clippy::map_unwrap_or, clippy::match_bool, clippy::match_on_vec_items, clippy::match_same_arms, clippy::match_wild_err_arm, clippy::match_wildcard_for_single_variants, clippy::maybe_infinite_iter, clippy::mismatching_type_param_order, clippy::missing_errors_doc, clippy::missing_fields_in_debug, clippy::missing_panics_doc, clippy::module_name_repetitions, clippy::must_use_candidate, clippy::mut_mut, clippy::naive_bytecount, clippy::needless_bitwise_bool, clippy::needless_continue, clippy::needless_for_each, clippy::needless_pass_by_value, clippy::needless_raw_string_hashes, clippy::no_effect_underscore_binding, clippy::no_mangle_with_rust_abi, clippy::option_as_ref_cloned, clippy::option_option, clippy::ptr_as_ptr, clippy::ptr_cast_constness, clippy::pub_underscore_fields, clippy::range_minus_one, clippy::range_plus_one, clippy::redundant_closure_for_method_calls, clippy::redundant_else, clippy::ref_as_ptr, clippy::ref_binding_to_reference, clippy::ref_option_ref, clippy::return_self_not_must_use, clippy::same_functions_in_if_condition, clippy::semicolon_if_nothing_returned, clippy::should_panic_without_expect, clippy::similar_names, clippy::single_match_else, clippy::stable_sort_primitive, clippy::str_split_at_newline, clippy::string_add_assign, clippy::struct_excessive_bools, clippy::struct_field_names, clippy::too_many_lines, clippy::transmute_ptr_to_ptr, clippy::trivially_copy_pass_by_ref, clippy::unchecked_duration_subtraction, clippy::unicode_not_nfc, clippy::uninlined_format_args, clippy::unnecessary_box_returns, clippy::unnecessary_join, clippy::unnecessary_wraps, clippy::unnested_or_patterns, clippy::unreadable_literal, clippy::unsafe_derive_deserialize, clippy::unused_async, clippy::unused_self, clippy::used_underscore_binding, clippy::verbose_bit_mask, clippy::wildcard_imports, clippy::zero_sized_map_values"##, + description: r##"lint group for: clippy::assigning_clones, clippy::bool_to_int_with_if, clippy::borrow_as_ptr, clippy::case_sensitive_file_extension_comparisons, clippy::cast_lossless, clippy::cast_possible_truncation, clippy::cast_possible_wrap, clippy::cast_precision_loss, clippy::cast_ptr_alignment, clippy::cast_sign_loss, clippy::checked_conversions, clippy::cloned_instead_of_copied, clippy::copy_iterator, clippy::default_trait_access, clippy::doc_link_with_quotes, clippy::doc_markdown, clippy::empty_enum, clippy::enum_glob_use, clippy::expl_impl_clone_on_copy, clippy::explicit_deref_methods, clippy::explicit_into_iter_loop, clippy::explicit_iter_loop, clippy::filter_map_next, clippy::flat_map_option, clippy::float_cmp, clippy::fn_params_excessive_bools, clippy::from_iter_instead_of_collect, clippy::if_not_else, clippy::ignored_unit_patterns, clippy::implicit_clone, clippy::implicit_hasher, clippy::inconsistent_struct_constructor, clippy::index_refutable_slice, clippy::inefficient_to_string, clippy::inline_always, clippy::into_iter_without_iter, clippy::invalid_upcast_comparisons, clippy::items_after_statements, clippy::iter_filter_is_ok, clippy::iter_filter_is_some, clippy::iter_not_returning_iterator, clippy::iter_without_into_iter, clippy::large_digit_groups, clippy::large_futures, clippy::large_stack_arrays, clippy::large_types_passed_by_value, clippy::linkedlist, clippy::macro_use_imports, clippy::manual_assert, clippy::manual_c_str_literals, clippy::manual_instant_elapsed, clippy::manual_is_variant_and, clippy::manual_let_else, clippy::manual_ok_or, clippy::manual_string_new, clippy::many_single_char_names, clippy::map_unwrap_or, clippy::match_bool, clippy::match_on_vec_items, clippy::match_same_arms, clippy::match_wild_err_arm, clippy::match_wildcard_for_single_variants, clippy::maybe_infinite_iter, clippy::mismatching_type_param_order, clippy::missing_errors_doc, clippy::missing_fields_in_debug, clippy::missing_panics_doc, clippy::module_name_repetitions, clippy::must_use_candidate, clippy::mut_mut, clippy::naive_bytecount, clippy::needless_bitwise_bool, clippy::needless_continue, clippy::needless_for_each, clippy::needless_pass_by_value, clippy::needless_raw_string_hashes, clippy::no_effect_underscore_binding, clippy::no_mangle_with_rust_abi, clippy::option_as_ref_cloned, clippy::option_option, clippy::ptr_as_ptr, clippy::ptr_cast_constness, clippy::pub_underscore_fields, clippy::range_minus_one, clippy::range_plus_one, clippy::redundant_closure_for_method_calls, clippy::redundant_else, clippy::ref_as_ptr, clippy::ref_binding_to_reference, clippy::ref_option_ref, clippy::return_self_not_must_use, clippy::same_functions_in_if_condition, clippy::semicolon_if_nothing_returned, clippy::should_panic_without_expect, clippy::similar_names, clippy::single_char_pattern, clippy::single_match_else, clippy::stable_sort_primitive, clippy::str_split_at_newline, clippy::string_add_assign, clippy::struct_excessive_bools, clippy::struct_field_names, clippy::too_many_lines, clippy::transmute_ptr_to_ptr, clippy::trivially_copy_pass_by_ref, clippy::unchecked_duration_subtraction, clippy::unicode_not_nfc, clippy::uninlined_format_args, clippy::unnecessary_box_returns, clippy::unnecessary_join, clippy::unnecessary_wraps, clippy::unnested_or_patterns, clippy::unreadable_literal, clippy::unsafe_derive_deserialize, clippy::unused_async, clippy::unused_self, clippy::used_underscore_binding, clippy::verbose_bit_mask, clippy::wildcard_imports, clippy::zero_sized_map_values"##, }, children: &[ + "clippy::assigning_clones", "clippy::bool_to_int_with_if", "clippy::borrow_as_ptr", "clippy::case_sensitive_file_extension_comparisons", @@ -14161,6 +14370,7 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ "clippy::semicolon_if_nothing_returned", "clippy::should_panic_without_expect", "clippy::similar_names", + "clippy::single_char_pattern", "clippy::single_match_else", "clippy::stable_sort_primitive", "clippy::str_split_at_newline", @@ -14190,11 +14400,10 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ LintGroup { lint: Lint { label: "clippy::perf", - description: r##"lint group for: clippy::box_collection, clippy::box_default, clippy::boxed_local, clippy::cmp_owned, clippy::collapsible_str_replace, clippy::drain_collect, clippy::expect_fun_call, clippy::extend_with_drain, clippy::format_collect, clippy::format_in_format_args, clippy::iter_nth, clippy::iter_overeager_cloned, clippy::large_const_arrays, clippy::large_enum_variant, clippy::manual_memcpy, clippy::manual_retain, clippy::manual_str_repeat, clippy::manual_try_fold, clippy::map_entry, clippy::missing_spin_loop, clippy::redundant_allocation, clippy::result_large_err, clippy::single_char_pattern, clippy::slow_vector_initialization, clippy::thread_local_initializer_can_be_made_const, clippy::to_string_in_format_args, clippy::unnecessary_to_owned, clippy::useless_vec, clippy::vec_init_then_push, clippy::waker_clone_wake"##, + description: r##"lint group for: clippy::box_collection, clippy::boxed_local, clippy::cmp_owned, clippy::collapsible_str_replace, clippy::drain_collect, clippy::expect_fun_call, clippy::extend_with_drain, clippy::format_collect, clippy::format_in_format_args, clippy::iter_overeager_cloned, clippy::large_const_arrays, clippy::large_enum_variant, clippy::manual_memcpy, clippy::manual_retain, clippy::manual_str_repeat, clippy::manual_try_fold, clippy::map_entry, clippy::missing_const_for_thread_local, clippy::missing_spin_loop, clippy::readonly_write_lock, clippy::redundant_allocation, clippy::result_large_err, clippy::slow_vector_initialization, clippy::to_string_in_format_args, clippy::unnecessary_to_owned, clippy::useless_vec, clippy::vec_init_then_push, clippy::waker_clone_wake"##, }, children: &[ "clippy::box_collection", - "clippy::box_default", "clippy::boxed_local", "clippy::cmp_owned", "clippy::collapsible_str_replace", @@ -14203,7 +14412,6 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ "clippy::extend_with_drain", "clippy::format_collect", "clippy::format_in_format_args", - "clippy::iter_nth", "clippy::iter_overeager_cloned", "clippy::large_const_arrays", "clippy::large_enum_variant", @@ -14212,12 +14420,12 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ "clippy::manual_str_repeat", "clippy::manual_try_fold", "clippy::map_entry", + "clippy::missing_const_for_thread_local", "clippy::missing_spin_loop", + "clippy::readonly_write_lock", "clippy::redundant_allocation", "clippy::result_large_err", - "clippy::single_char_pattern", "clippy::slow_vector_initialization", - "clippy::thread_local_initializer_can_be_made_const", "clippy::to_string_in_format_args", "clippy::unnecessary_to_owned", "clippy::useless_vec", @@ -14228,7 +14436,7 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ LintGroup { lint: Lint { label: "clippy::restriction", - description: r##"lint group for: clippy::absolute_paths, clippy::alloc_instead_of_core, clippy::allow_attributes, clippy::allow_attributes_without_reason, clippy::arithmetic_side_effects, clippy::as_conversions, clippy::as_underscore, clippy::assertions_on_result_states, clippy::big_endian_bytes, clippy::clone_on_ref_ptr, clippy::create_dir, clippy::dbg_macro, clippy::decimal_literal_representation, clippy::default_numeric_fallback, clippy::default_union_representation, clippy::deref_by_slicing, clippy::disallowed_script_idents, clippy::else_if_without_else, clippy::empty_drop, clippy::empty_enum_variants_with_brackets, clippy::empty_structs_with_brackets, clippy::error_impl_error, clippy::exhaustive_enums, clippy::exhaustive_structs, clippy::exit, clippy::expect_used, clippy::filetype_is_file, clippy::float_arithmetic, clippy::float_cmp_const, clippy::fn_to_numeric_cast_any, clippy::format_push_string, clippy::get_unwrap, clippy::host_endian_bytes, clippy::if_then_some_else_none, clippy::impl_trait_in_params, clippy::implicit_return, clippy::indexing_slicing, clippy::infinite_loop, clippy::inline_asm_x86_att_syntax, clippy::inline_asm_x86_intel_syntax, clippy::integer_division, clippy::iter_over_hash_type, clippy::large_include_file, clippy::let_underscore_must_use, clippy::let_underscore_untyped, clippy::little_endian_bytes, clippy::lossy_float_literal, clippy::map_err_ignore, clippy::mem_forget, clippy::min_ident_chars, clippy::missing_assert_message, clippy::missing_asserts_for_indexing, clippy::missing_docs_in_private_items, clippy::missing_inline_in_public_items, clippy::missing_trait_methods, clippy::mixed_read_write_in_expression, clippy::mod_module_files, clippy::modulo_arithmetic, clippy::multiple_inherent_impl, clippy::multiple_unsafe_ops_per_block, clippy::mutex_atomic, clippy::needless_raw_strings, clippy::non_ascii_literal, clippy::panic, clippy::panic_in_result_fn, clippy::partial_pub_fields, clippy::pattern_type_mismatch, clippy::print_stderr, clippy::print_stdout, clippy::pub_use, clippy::pub_with_shorthand, clippy::pub_without_shorthand, clippy::question_mark_used, clippy::rc_buffer, clippy::rc_mutex, clippy::redundant_type_annotations, clippy::ref_patterns, clippy::rest_pat_in_fully_bound_structs, clippy::same_name_method, clippy::self_named_module_files, clippy::semicolon_inside_block, clippy::semicolon_outside_block, clippy::separated_literal_suffix, clippy::shadow_reuse, clippy::shadow_same, clippy::shadow_unrelated, clippy::single_call_fn, clippy::single_char_lifetime_names, clippy::std_instead_of_alloc, clippy::std_instead_of_core, clippy::str_to_string, clippy::string_add, clippy::string_lit_chars_any, clippy::string_slice, clippy::string_to_string, clippy::suspicious_xor_used_as_pow, clippy::tests_outside_test_module, clippy::todo, clippy::try_err, clippy::undocumented_unsafe_blocks, clippy::unimplemented, clippy::unnecessary_safety_comment, clippy::unnecessary_safety_doc, clippy::unnecessary_self_imports, clippy::unneeded_field_pattern, clippy::unreachable, clippy::unseparated_literal_suffix, clippy::unwrap_in_result, clippy::unwrap_used, clippy::use_debug, clippy::verbose_file_reads, clippy::wildcard_enum_match_arm"##, + description: r##"lint group for: clippy::absolute_paths, clippy::alloc_instead_of_core, clippy::allow_attributes, clippy::allow_attributes_without_reason, clippy::arithmetic_side_effects, clippy::as_conversions, clippy::as_underscore, clippy::assertions_on_result_states, clippy::big_endian_bytes, clippy::cfg_not_test, clippy::clone_on_ref_ptr, clippy::create_dir, clippy::dbg_macro, clippy::decimal_literal_representation, clippy::default_numeric_fallback, clippy::default_union_representation, clippy::deref_by_slicing, clippy::disallowed_script_idents, clippy::else_if_without_else, clippy::empty_drop, clippy::empty_enum_variants_with_brackets, clippy::empty_structs_with_brackets, clippy::error_impl_error, clippy::exhaustive_enums, clippy::exhaustive_structs, clippy::exit, clippy::expect_used, clippy::field_scoped_visibility_modifiers, clippy::filetype_is_file, clippy::float_arithmetic, clippy::float_cmp_const, clippy::fn_to_numeric_cast_any, clippy::format_push_string, clippy::get_unwrap, clippy::host_endian_bytes, clippy::if_then_some_else_none, clippy::impl_trait_in_params, clippy::implicit_return, clippy::indexing_slicing, clippy::infinite_loop, clippy::inline_asm_x86_att_syntax, clippy::inline_asm_x86_intel_syntax, clippy::integer_division, clippy::integer_division_remainder_used, clippy::iter_over_hash_type, clippy::large_include_file, clippy::let_underscore_must_use, clippy::let_underscore_untyped, clippy::little_endian_bytes, clippy::lossy_float_literal, clippy::map_err_ignore, clippy::mem_forget, clippy::min_ident_chars, clippy::missing_assert_message, clippy::missing_asserts_for_indexing, clippy::missing_docs_in_private_items, clippy::missing_inline_in_public_items, clippy::missing_trait_methods, clippy::mixed_read_write_in_expression, clippy::mod_module_files, clippy::modulo_arithmetic, clippy::multiple_inherent_impl, clippy::multiple_unsafe_ops_per_block, clippy::mutex_atomic, clippy::needless_raw_strings, clippy::non_ascii_literal, clippy::panic, clippy::panic_in_result_fn, clippy::partial_pub_fields, clippy::pattern_type_mismatch, clippy::print_stderr, clippy::print_stdout, clippy::pub_use, clippy::pub_with_shorthand, clippy::pub_without_shorthand, clippy::question_mark_used, clippy::rc_buffer, clippy::rc_mutex, clippy::redundant_type_annotations, clippy::ref_patterns, clippy::renamed_function_params, clippy::rest_pat_in_fully_bound_structs, clippy::same_name_method, clippy::self_named_module_files, clippy::semicolon_inside_block, clippy::semicolon_outside_block, clippy::separated_literal_suffix, clippy::shadow_reuse, clippy::shadow_same, clippy::shadow_unrelated, clippy::single_call_fn, clippy::single_char_lifetime_names, clippy::std_instead_of_alloc, clippy::std_instead_of_core, clippy::str_to_string, clippy::string_add, clippy::string_lit_chars_any, clippy::string_slice, clippy::string_to_string, clippy::suspicious_xor_used_as_pow, clippy::tests_outside_test_module, clippy::todo, clippy::try_err, clippy::undocumented_unsafe_blocks, clippy::unimplemented, clippy::unnecessary_safety_comment, clippy::unnecessary_safety_doc, clippy::unnecessary_self_imports, clippy::unneeded_field_pattern, clippy::unreachable, clippy::unseparated_literal_suffix, clippy::unwrap_in_result, clippy::unwrap_used, clippy::use_debug, clippy::verbose_file_reads, clippy::wildcard_enum_match_arm"##, }, children: &[ "clippy::absolute_paths", @@ -14240,6 +14448,7 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ "clippy::as_underscore", "clippy::assertions_on_result_states", "clippy::big_endian_bytes", + "clippy::cfg_not_test", "clippy::clone_on_ref_ptr", "clippy::create_dir", "clippy::dbg_macro", @@ -14257,6 +14466,7 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ "clippy::exhaustive_structs", "clippy::exit", "clippy::expect_used", + "clippy::field_scoped_visibility_modifiers", "clippy::filetype_is_file", "clippy::float_arithmetic", "clippy::float_cmp_const", @@ -14272,6 +14482,7 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ "clippy::inline_asm_x86_att_syntax", "clippy::inline_asm_x86_intel_syntax", "clippy::integer_division", + "clippy::integer_division_remainder_used", "clippy::iter_over_hash_type", "clippy::large_include_file", "clippy::let_underscore_must_use", @@ -14308,6 +14519,7 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ "clippy::rc_mutex", "clippy::redundant_type_annotations", "clippy::ref_patterns", + "clippy::renamed_function_params", "clippy::rest_pat_in_fully_bound_structs", "clippy::same_name_method", "clippy::self_named_module_files", @@ -14348,7 +14560,7 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ LintGroup { lint: Lint { label: "clippy::style", - description: r##"lint group for: clippy::assertions_on_constants, clippy::assign_op_pattern, clippy::blocks_in_conditions, clippy::bool_assert_comparison, clippy::borrow_interior_mutable_const, clippy::builtin_type_shadow, clippy::bytes_nth, clippy::chars_last_cmp, clippy::chars_next_cmp, clippy::cmp_null, clippy::collapsible_else_if, clippy::collapsible_if, clippy::collapsible_match, clippy::comparison_chain, clippy::comparison_to_empty, clippy::declare_interior_mutable_const, clippy::default_instead_of_iter_empty, clippy::disallowed_macros, clippy::disallowed_methods, clippy::disallowed_names, clippy::disallowed_types, clippy::double_must_use, clippy::double_neg, clippy::duplicate_underscore_argument, clippy::enum_variant_names, clippy::err_expect, clippy::excessive_precision, clippy::field_reassign_with_default, clippy::filter_map_bool_then, clippy::fn_to_numeric_cast, clippy::fn_to_numeric_cast_with_truncation, clippy::for_kv_map, clippy::from_over_into, clippy::from_str_radix_10, clippy::get_first, clippy::if_same_then_else, clippy::implicit_saturating_add, clippy::implicit_saturating_sub, clippy::inconsistent_digit_grouping, clippy::infallible_destructuring_match, clippy::inherent_to_string, clippy::init_numbered_fields, clippy::into_iter_on_ref, clippy::is_digit_ascii_radix, clippy::items_after_test_module, clippy::iter_cloned_collect, clippy::iter_next_slice, clippy::iter_nth_zero, clippy::iter_skip_next, clippy::just_underscores_and_digits, clippy::len_without_is_empty, clippy::len_zero, clippy::let_and_return, clippy::let_unit_value, clippy::main_recursion, clippy::manual_async_fn, clippy::manual_bits, clippy::manual_is_ascii_check, clippy::manual_is_finite, clippy::manual_is_infinite, clippy::manual_map, clippy::manual_next_back, clippy::manual_non_exhaustive, clippy::manual_range_contains, clippy::manual_saturating_arithmetic, clippy::manual_while_let_some, clippy::map_clone, clippy::map_collect_result_unit, clippy::match_like_matches_macro, clippy::match_overlapping_arm, clippy::match_ref_pats, clippy::match_result_ok, clippy::mem_replace_option_with_none, clippy::mem_replace_with_default, clippy::missing_enforced_import_renames, clippy::missing_safety_doc, clippy::mixed_case_hex_literals, clippy::module_inception, clippy::must_use_unit, clippy::mut_mutex_lock, clippy::needless_borrow, clippy::needless_borrows_for_generic_args, clippy::needless_doctest_main, clippy::needless_else, clippy::needless_late_init, clippy::needless_parens_on_range_literals, clippy::needless_pub_self, clippy::needless_range_loop, clippy::needless_return, clippy::needless_return_with_question_mark, clippy::neg_multiply, clippy::new_ret_no_self, clippy::new_without_default, clippy::non_minimal_cfg, clippy::obfuscated_if_else, clippy::ok_expect, clippy::op_ref, clippy::option_map_or_err_ok, clippy::option_map_or_none, clippy::partialeq_to_none, clippy::print_literal, clippy::print_with_newline, clippy::println_empty_string, clippy::ptr_arg, clippy::ptr_eq, clippy::question_mark, clippy::redundant_closure, clippy::redundant_field_names, clippy::redundant_pattern, clippy::redundant_pattern_matching, clippy::redundant_static_lifetimes, clippy::result_map_or_into_option, clippy::result_unit_err, clippy::same_item_push, clippy::self_named_constructors, clippy::should_implement_trait, clippy::single_char_add_str, clippy::single_component_path_imports, clippy::single_match, clippy::string_extend_chars, clippy::tabs_in_doc_comments, clippy::to_digit_is_some, clippy::to_string_trait_impl, clippy::toplevel_ref_arg, clippy::trim_split_whitespace, clippy::unnecessary_fallible_conversions, clippy::unnecessary_fold, clippy::unnecessary_lazy_evaluations, clippy::unnecessary_mut_passed, clippy::unnecessary_owned_empty_strings, clippy::unsafe_removed_from_name, clippy::unused_enumerate_index, clippy::unused_unit, clippy::unusual_byte_groupings, clippy::unwrap_or_default, clippy::upper_case_acronyms, clippy::while_let_on_iterator, clippy::write_literal, clippy::write_with_newline, clippy::writeln_empty_string, clippy::wrong_self_convention, clippy::zero_ptr"##, + description: r##"lint group for: clippy::assertions_on_constants, clippy::assign_op_pattern, clippy::blocks_in_conditions, clippy::bool_assert_comparison, clippy::borrow_interior_mutable_const, clippy::box_default, clippy::builtin_type_shadow, clippy::byte_char_slices, clippy::bytes_nth, clippy::chars_last_cmp, clippy::chars_next_cmp, clippy::cmp_null, clippy::collapsible_else_if, clippy::collapsible_if, clippy::collapsible_match, clippy::comparison_chain, clippy::comparison_to_empty, clippy::declare_interior_mutable_const, clippy::default_instead_of_iter_empty, clippy::disallowed_macros, clippy::disallowed_methods, clippy::disallowed_names, clippy::disallowed_types, clippy::doc_lazy_continuation, clippy::double_must_use, clippy::double_neg, clippy::duplicate_underscore_argument, clippy::enum_variant_names, clippy::err_expect, clippy::excessive_precision, clippy::field_reassign_with_default, clippy::filter_map_bool_then, clippy::fn_to_numeric_cast, clippy::fn_to_numeric_cast_with_truncation, clippy::for_kv_map, clippy::from_over_into, clippy::from_str_radix_10, clippy::get_first, clippy::if_same_then_else, clippy::implicit_saturating_add, clippy::implicit_saturating_sub, clippy::inconsistent_digit_grouping, clippy::infallible_destructuring_match, clippy::inherent_to_string, clippy::init_numbered_fields, clippy::into_iter_on_ref, clippy::is_digit_ascii_radix, clippy::items_after_test_module, clippy::iter_cloned_collect, clippy::iter_next_slice, clippy::iter_nth, clippy::iter_nth_zero, clippy::iter_skip_next, clippy::just_underscores_and_digits, clippy::legacy_numeric_constants, clippy::len_without_is_empty, clippy::len_zero, clippy::let_and_return, clippy::let_unit_value, clippy::main_recursion, clippy::manual_async_fn, clippy::manual_bits, clippy::manual_is_ascii_check, clippy::manual_is_finite, clippy::manual_is_infinite, clippy::manual_map, clippy::manual_next_back, clippy::manual_non_exhaustive, clippy::manual_pattern_char_comparison, clippy::manual_range_contains, clippy::manual_rotate, clippy::manual_saturating_arithmetic, clippy::manual_while_let_some, clippy::map_clone, clippy::map_collect_result_unit, clippy::match_like_matches_macro, clippy::match_overlapping_arm, clippy::match_ref_pats, clippy::match_result_ok, clippy::mem_replace_option_with_none, clippy::mem_replace_with_default, clippy::missing_enforced_import_renames, clippy::missing_safety_doc, clippy::mixed_attributes_style, clippy::mixed_case_hex_literals, clippy::module_inception, clippy::must_use_unit, clippy::mut_mutex_lock, clippy::needless_borrow, clippy::needless_borrows_for_generic_args, clippy::needless_doctest_main, clippy::needless_else, clippy::needless_late_init, clippy::needless_parens_on_range_literals, clippy::needless_pub_self, clippy::needless_range_loop, clippy::needless_return, clippy::needless_return_with_question_mark, clippy::neg_multiply, clippy::new_ret_no_self, clippy::new_without_default, clippy::non_minimal_cfg, clippy::obfuscated_if_else, clippy::ok_expect, clippy::op_ref, clippy::option_map_or_err_ok, clippy::option_map_or_none, clippy::partialeq_to_none, clippy::print_literal, clippy::print_with_newline, clippy::println_empty_string, clippy::ptr_arg, clippy::ptr_eq, clippy::question_mark, clippy::redundant_closure, clippy::redundant_field_names, clippy::redundant_pattern, clippy::redundant_pattern_matching, clippy::redundant_static_lifetimes, clippy::result_map_or_into_option, clippy::result_unit_err, clippy::same_item_push, clippy::self_named_constructors, clippy::should_implement_trait, clippy::single_char_add_str, clippy::single_component_path_imports, clippy::single_match, clippy::string_extend_chars, clippy::tabs_in_doc_comments, clippy::to_digit_is_some, clippy::to_string_trait_impl, clippy::toplevel_ref_arg, clippy::trim_split_whitespace, clippy::unnecessary_fallible_conversions, clippy::unnecessary_fold, clippy::unnecessary_lazy_evaluations, clippy::unnecessary_mut_passed, clippy::unnecessary_owned_empty_strings, clippy::unsafe_removed_from_name, clippy::unused_enumerate_index, clippy::unused_unit, clippy::unusual_byte_groupings, clippy::unwrap_or_default, clippy::upper_case_acronyms, clippy::while_let_on_iterator, clippy::write_literal, clippy::write_with_newline, clippy::writeln_empty_string, clippy::wrong_self_convention, clippy::zero_ptr"##, }, children: &[ "clippy::assertions_on_constants", @@ -14356,7 +14568,9 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ "clippy::blocks_in_conditions", "clippy::bool_assert_comparison", "clippy::borrow_interior_mutable_const", + "clippy::box_default", "clippy::builtin_type_shadow", + "clippy::byte_char_slices", "clippy::bytes_nth", "clippy::chars_last_cmp", "clippy::chars_next_cmp", @@ -14372,6 +14586,7 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ "clippy::disallowed_methods", "clippy::disallowed_names", "clippy::disallowed_types", + "clippy::doc_lazy_continuation", "clippy::double_must_use", "clippy::double_neg", "clippy::duplicate_underscore_argument", @@ -14398,9 +14613,11 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ "clippy::items_after_test_module", "clippy::iter_cloned_collect", "clippy::iter_next_slice", + "clippy::iter_nth", "clippy::iter_nth_zero", "clippy::iter_skip_next", "clippy::just_underscores_and_digits", + "clippy::legacy_numeric_constants", "clippy::len_without_is_empty", "clippy::len_zero", "clippy::let_and_return", @@ -14414,7 +14631,9 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ "clippy::manual_map", "clippy::manual_next_back", "clippy::manual_non_exhaustive", + "clippy::manual_pattern_char_comparison", "clippy::manual_range_contains", + "clippy::manual_rotate", "clippy::manual_saturating_arithmetic", "clippy::manual_while_let_some", "clippy::map_clone", @@ -14427,6 +14646,7 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ "clippy::mem_replace_with_default", "clippy::missing_enforced_import_renames", "clippy::missing_safety_doc", + "clippy::mixed_attributes_style", "clippy::mixed_case_hex_literals", "clippy::module_inception", "clippy::must_use_unit", @@ -14498,7 +14718,7 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ LintGroup { lint: Lint { label: "clippy::suspicious", - description: r##"lint group for: clippy::almost_complete_range, clippy::arc_with_non_send_sync, clippy::await_holding_invalid_type, clippy::await_holding_lock, clippy::await_holding_refcell_ref, clippy::blanket_clippy_restriction_lints, clippy::cast_abs_to_unsigned, clippy::cast_enum_constructor, clippy::cast_enum_truncation, clippy::cast_nan_to_int, clippy::cast_slice_from_raw_parts, clippy::crate_in_macro_def, clippy::deprecated_clippy_cfg_attr, clippy::drop_non_drop, clippy::duplicate_mod, clippy::empty_docs, clippy::empty_loop, clippy::float_equality_without_abs, clippy::forget_non_drop, clippy::four_forward_slashes, clippy::from_raw_with_void_ptr, clippy::incompatible_msrv, clippy::ineffective_open_options, clippy::iter_out_of_bounds, clippy::join_absolute_paths, clippy::let_underscore_future, clippy::lines_filter_map_ok, clippy::maybe_misused_cfg, clippy::misnamed_getters, clippy::misrefactored_assign_op, clippy::mixed_attributes_style, clippy::multi_assignments, clippy::multiple_bound_locations, clippy::mut_range_bound, clippy::mutable_key_type, clippy::no_effect_replace, clippy::non_canonical_clone_impl, clippy::non_canonical_partial_ord_impl, clippy::octal_escapes, clippy::path_ends_with_ext, clippy::permissions_set_readonly_false, clippy::print_in_format_impl, clippy::rc_clone_in_vec_init, clippy::repeat_vec_with_capacity, clippy::single_range_in_vec_init, clippy::size_of_ref, clippy::suspicious_arithmetic_impl, clippy::suspicious_assignment_formatting, clippy::suspicious_command_arg_space, clippy::suspicious_doc_comments, clippy::suspicious_else_formatting, clippy::suspicious_map, clippy::suspicious_op_assign_impl, clippy::suspicious_open_options, clippy::suspicious_to_owned, clippy::suspicious_unary_op_formatting, clippy::swap_ptr_to_ref, clippy::test_attr_in_doctest, clippy::type_id_on_box, clippy::unconditional_recursion, clippy::unnecessary_clippy_cfg, clippy::unnecessary_get_then_check, clippy::unnecessary_result_map_or_else"##, + description: r##"lint group for: clippy::almost_complete_range, clippy::arc_with_non_send_sync, clippy::await_holding_invalid_type, clippy::await_holding_lock, clippy::await_holding_refcell_ref, clippy::blanket_clippy_restriction_lints, clippy::cast_abs_to_unsigned, clippy::cast_enum_constructor, clippy::cast_enum_truncation, clippy::cast_nan_to_int, clippy::cast_slice_from_raw_parts, clippy::const_is_empty, clippy::crate_in_macro_def, clippy::deprecated_clippy_cfg_attr, clippy::drop_non_drop, clippy::duplicate_mod, clippy::duplicated_attributes, clippy::empty_docs, clippy::empty_loop, clippy::float_equality_without_abs, clippy::forget_non_drop, clippy::four_forward_slashes, clippy::from_raw_with_void_ptr, clippy::incompatible_msrv, clippy::ineffective_open_options, clippy::iter_out_of_bounds, clippy::join_absolute_paths, clippy::let_underscore_future, clippy::lines_filter_map_ok, clippy::macro_metavars_in_unsafe, clippy::manual_unwrap_or_default, clippy::misnamed_getters, clippy::misrefactored_assign_op, clippy::missing_transmute_annotations, clippy::multi_assignments, clippy::multiple_bound_locations, clippy::mut_range_bound, clippy::mutable_key_type, clippy::needless_character_iteration, clippy::needless_maybe_sized, clippy::no_effect_replace, clippy::non_canonical_clone_impl, clippy::non_canonical_partial_ord_impl, clippy::octal_escapes, clippy::path_ends_with_ext, clippy::permissions_set_readonly_false, clippy::print_in_format_impl, clippy::rc_clone_in_vec_init, clippy::repeat_vec_with_capacity, clippy::single_range_in_vec_init, clippy::size_of_ref, clippy::suspicious_arithmetic_impl, clippy::suspicious_assignment_formatting, clippy::suspicious_command_arg_space, clippy::suspicious_doc_comments, clippy::suspicious_else_formatting, clippy::suspicious_map, clippy::suspicious_op_assign_impl, clippy::suspicious_open_options, clippy::suspicious_to_owned, clippy::suspicious_unary_op_formatting, clippy::swap_ptr_to_ref, clippy::test_attr_in_doctest, clippy::type_id_on_box, clippy::unconditional_recursion, clippy::unnecessary_clippy_cfg, clippy::unnecessary_get_then_check, clippy::unnecessary_result_map_or_else, clippy::zero_repeat_side_effects"##, }, children: &[ "clippy::almost_complete_range", @@ -14512,10 +14732,12 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ "clippy::cast_enum_truncation", "clippy::cast_nan_to_int", "clippy::cast_slice_from_raw_parts", + "clippy::const_is_empty", "clippy::crate_in_macro_def", "clippy::deprecated_clippy_cfg_attr", "clippy::drop_non_drop", "clippy::duplicate_mod", + "clippy::duplicated_attributes", "clippy::empty_docs", "clippy::empty_loop", "clippy::float_equality_without_abs", @@ -14528,14 +14750,17 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ "clippy::join_absolute_paths", "clippy::let_underscore_future", "clippy::lines_filter_map_ok", - "clippy::maybe_misused_cfg", + "clippy::macro_metavars_in_unsafe", + "clippy::manual_unwrap_or_default", "clippy::misnamed_getters", "clippy::misrefactored_assign_op", - "clippy::mixed_attributes_style", + "clippy::missing_transmute_annotations", "clippy::multi_assignments", "clippy::multiple_bound_locations", "clippy::mut_range_bound", "clippy::mutable_key_type", + "clippy::needless_character_iteration", + "clippy::needless_maybe_sized", "clippy::no_effect_replace", "clippy::non_canonical_clone_impl", "clippy::non_canonical_partial_ord_impl", @@ -14564,6 +14789,7 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[ "clippy::unnecessary_clippy_cfg", "clippy::unnecessary_get_then_check", "clippy::unnecessary_result_map_or_else", + "clippy::zero_repeat_side_effects", ], }, ]; diff --git a/xtask/src/codegen.rs b/xtask/src/codegen.rs index b23d700263..744f524ff8 100644 --- a/xtask/src/codegen.rs +++ b/xtask/src/codegen.rs @@ -18,6 +18,7 @@ impl flags::Codegen { flags::CodegenType::All => { diagnostics_docs::generate(self.check); assists_doc_tests::generate(self.check); + // diagnostics_docs::generate(self.check) doesn't generate any tests // lints::generate(self.check) Updating clones the rust repo, so don't run it unless // explicitly asked for } @@ -175,7 +176,8 @@ fn reformat(text: String) -> String { } fn add_preamble(generator: &'static str, mut text: String) -> String { - let preamble = format!("//! Generated by `{generator}`, do not edit by hand.\n\n"); + let preamble = + format!("//! Generated by `cargo codegen {generator}`, do not edit by hand.\n\n"); text.insert_str(0, &preamble); text } diff --git a/xtask/src/codegen/assists_doc_tests.rs b/xtask/src/codegen/assists_doc_tests.rs index b2d89dde76..4edf1fdaf3 100644 --- a/xtask/src/codegen/assists_doc_tests.rs +++ b/xtask/src/codegen/assists_doc_tests.rs @@ -45,7 +45,7 @@ r#####" buf.push_str(&test) } } - let buf = add_preamble("sourcegen_assists_docs", reformat(buf)); + let buf = add_preamble("assists-doc-tests", reformat(buf)); ensure_file_contents( &project_root().join("crates/ide-assists/src/tests/generated.rs"), &buf, diff --git a/xtask/src/codegen/diagnostics_docs.rs b/xtask/src/codegen/diagnostics_docs.rs index dcc9c76a50..ba26ffc4da 100644 --- a/xtask/src/codegen/diagnostics_docs.rs +++ b/xtask/src/codegen/diagnostics_docs.rs @@ -12,7 +12,7 @@ pub(crate) fn generate(check: bool) { if !check { let contents = diagnostics.into_iter().map(|it| it.to_string()).collect::>().join("\n\n"); - let contents = add_preamble("sourcegen_diagnostic_docs", contents); + let contents = add_preamble("diagnostics-docs", contents); let dst = project_root().join("docs/user/generated_diagnostic.adoc"); fs::write(dst, contents).unwrap(); } diff --git a/xtask/src/codegen/grammar.rs b/xtask/src/codegen/grammar.rs index 0fc97ad654..e3d2bfcfef 100644 --- a/xtask/src/codegen/grammar.rs +++ b/xtask/src/codegen/grammar.rs @@ -69,7 +69,7 @@ fn generate_tokens(grammar: &AstSrc) -> String { }); add_preamble( - "sourcegen_ast", + "grammar", reformat( quote! { use crate::{SyntaxKind::{self, *}, SyntaxToken, ast::AstToken}; @@ -328,7 +328,7 @@ fn generate_nodes(kinds: KindsSrc<'_>, grammar: &AstSrc) -> String { } } - let res = add_preamble("sourcegen_ast", reformat(res)); + let res = add_preamble("grammar", reformat(res)); res.replace("#[derive", "\n#[derive") } @@ -458,7 +458,7 @@ fn generate_syntax_kinds(grammar: KindsSrc<'_>) -> String { } }; - add_preamble("sourcegen_ast", reformat(ast.to_string())) + add_preamble("grammar", reformat(ast.to_string())) } fn to_upper_snake_case(s: &str) -> String { diff --git a/xtask/src/codegen/lints.rs b/xtask/src/codegen/lints.rs index 6975f9328e..d4cc0d01a7 100644 --- a/xtask/src/codegen/lints.rs +++ b/xtask/src/codegen/lints.rs @@ -28,7 +28,7 @@ pub(crate) fn generate(check: bool) { cmd!( sh, "git -C {rust_repo} submodule update --init --recursive --depth=1 -- - compiler library src/tools" + compiler library src/tools src/doc/book" ) .run() .unwrap(); @@ -73,7 +73,7 @@ pub struct LintGroup { .unwrap(); generate_descriptor_clippy(&mut contents, &lints_json); - let contents = add_preamble("sourcegen_lints", reformat(contents)); + let contents = add_preamble("lint-definitions", reformat(contents)); let destination = project_root().join(DESTINATION); ensure_file_contents(destination.as_path(), &contents, check); diff --git a/xtask/src/flags.rs b/xtask/src/flags.rs index dd7bfd0bda..00fa3117b3 100644 --- a/xtask/src/flags.rs +++ b/xtask/src/flags.rs @@ -195,7 +195,7 @@ impl FromStr for CodegenType { "grammar" => Ok(Self::Grammar), "assists-doc-tests" => Ok(Self::AssistsDocTests), "diagnostics-docs" => Ok(Self::DiagnosticsDocs), - "lints-definitions" => Ok(Self::LintDefinitions), + "lint-definitions" => Ok(Self::LintDefinitions), _ => Err("Invalid option".to_owned()), } } From 15973f1a55d1ba018ef7c974b8280402cf552bda Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sun, 7 Jul 2024 08:39:08 +0200 Subject: [PATCH 70/94] Fix stop_watch on linux --- crates/ide/src/status.rs | 1 - crates/profile/src/stop_watch.rs | 7 +++---- xtask/src/codegen.rs | 10 ++++++---- xtask/src/codegen/assists_doc_tests.rs | 4 ++-- xtask/src/codegen/diagnostics_docs.rs | 2 +- xtask/src/codegen/grammar.rs | 6 +++--- xtask/src/codegen/lints.rs | 2 +- xtask/src/flags.rs | 14 +++++++++++++- 8 files changed, 29 insertions(+), 17 deletions(-) diff --git a/crates/ide/src/status.rs b/crates/ide/src/status.rs index 69526ddef9..b998c0bfc6 100644 --- a/crates/ide/src/status.rs +++ b/crates/ide/src/status.rs @@ -20,7 +20,6 @@ use ide_db::{ }; use itertools::Itertools; use profile::{memory_usage, Bytes}; -use std::env; use stdx::format_to; use syntax::{ast, Parse, SyntaxNode}; use triomphe::Arc; diff --git a/crates/profile/src/stop_watch.rs b/crates/profile/src/stop_watch.rs index 990b59cad4..0a803959ee 100644 --- a/crates/profile/src/stop_watch.rs +++ b/crates/profile/src/stop_watch.rs @@ -29,11 +29,10 @@ impl StopWatch { // When debugging rust-analyzer using rr, the perf-related syscalls cause it to abort. // We allow disabling perf by setting the env var `RA_DISABLE_PERF`. - use once_cell::sync::Lazy; - static PERF_ENABLED: Lazy = - Lazy::new(|| std::env::var_os("RA_DISABLE_PERF").is_none()); + use std::sync::OnceLock; + static PERF_ENABLED: OnceLock = OnceLock::new(); - if *PERF_ENABLED { + if *PERF_ENABLED.get_or_init(|| std::env::var_os("RA_DISABLE_PERF").is_none()) { let mut counter = perf_event::Builder::new() .build() .map_err(|err| eprintln!("Failed to create perf counter: {err}")) diff --git a/xtask/src/codegen.rs b/xtask/src/codegen.rs index 744f524ff8..26cdf40a6c 100644 --- a/xtask/src/codegen.rs +++ b/xtask/src/codegen.rs @@ -5,7 +5,10 @@ use std::{ use xshell::{cmd, Shell}; -use crate::{flags, project_root}; +use crate::{ + flags::{self, CodegenType}, + project_root, +}; pub(crate) mod assists_doc_tests; pub(crate) mod diagnostics_docs; @@ -175,9 +178,8 @@ fn reformat(text: String) -> String { stdout } -fn add_preamble(generator: &'static str, mut text: String) -> String { - let preamble = - format!("//! Generated by `cargo codegen {generator}`, do not edit by hand.\n\n"); +fn add_preamble(cg: CodegenType, mut text: String) -> String { + let preamble = format!("//! Generated by `cargo codegen {cg}`, do not edit by hand.\n\n"); text.insert_str(0, &preamble); text } diff --git a/xtask/src/codegen/assists_doc_tests.rs b/xtask/src/codegen/assists_doc_tests.rs index 4edf1fdaf3..eab288c11b 100644 --- a/xtask/src/codegen/assists_doc_tests.rs +++ b/xtask/src/codegen/assists_doc_tests.rs @@ -45,7 +45,7 @@ r#####" buf.push_str(&test) } } - let buf = add_preamble("assists-doc-tests", reformat(buf)); + let buf = add_preamble(crate::flags::CodegenType::AssistsDocTests, reformat(buf)); ensure_file_contents( &project_root().join("crates/ide-assists/src/tests/generated.rs"), &buf, @@ -59,7 +59,7 @@ r#####" // a release. let contents = add_preamble( - "sourcegen_assists_docs", + crate::flags::CodegenType::AssistsDocTests, assists.into_iter().map(|it| it.to_string()).collect::>().join("\n\n"), ); let dst = project_root().join("docs/user/generated_assists.adoc"); diff --git a/xtask/src/codegen/diagnostics_docs.rs b/xtask/src/codegen/diagnostics_docs.rs index ba26ffc4da..316ae80f4c 100644 --- a/xtask/src/codegen/diagnostics_docs.rs +++ b/xtask/src/codegen/diagnostics_docs.rs @@ -12,7 +12,7 @@ pub(crate) fn generate(check: bool) { if !check { let contents = diagnostics.into_iter().map(|it| it.to_string()).collect::>().join("\n\n"); - let contents = add_preamble("diagnostics-docs", contents); + let contents = add_preamble(crate::flags::CodegenType::DiagnosticsDocs, contents); let dst = project_root().join("docs/user/generated_diagnostic.adoc"); fs::write(dst, contents).unwrap(); } diff --git a/xtask/src/codegen/grammar.rs b/xtask/src/codegen/grammar.rs index e3d2bfcfef..05c7132c0d 100644 --- a/xtask/src/codegen/grammar.rs +++ b/xtask/src/codegen/grammar.rs @@ -69,7 +69,7 @@ fn generate_tokens(grammar: &AstSrc) -> String { }); add_preamble( - "grammar", + crate::flags::CodegenType::Grammar, reformat( quote! { use crate::{SyntaxKind::{self, *}, SyntaxToken, ast::AstToken}; @@ -328,7 +328,7 @@ fn generate_nodes(kinds: KindsSrc<'_>, grammar: &AstSrc) -> String { } } - let res = add_preamble("grammar", reformat(res)); + let res = add_preamble(crate::flags::CodegenType::Grammar, reformat(res)); res.replace("#[derive", "\n#[derive") } @@ -458,7 +458,7 @@ fn generate_syntax_kinds(grammar: KindsSrc<'_>) -> String { } }; - add_preamble("grammar", reformat(ast.to_string())) + add_preamble(crate::flags::CodegenType::Grammar, reformat(ast.to_string())) } fn to_upper_snake_case(s: &str) -> String { diff --git a/xtask/src/codegen/lints.rs b/xtask/src/codegen/lints.rs index d4cc0d01a7..5b933933c3 100644 --- a/xtask/src/codegen/lints.rs +++ b/xtask/src/codegen/lints.rs @@ -73,7 +73,7 @@ pub struct LintGroup { .unwrap(); generate_descriptor_clippy(&mut contents, &lints_json); - let contents = add_preamble("lint-definitions", reformat(contents)); + let contents = add_preamble(crate::flags::CodegenType::LintDefinitions, reformat(contents)); let destination = project_root().join(DESTINATION); ensure_file_contents(destination.as_path(), &contents, check); diff --git a/xtask/src/flags.rs b/xtask/src/flags.rs index 00fa3117b3..4bc844ab7e 100644 --- a/xtask/src/flags.rs +++ b/xtask/src/flags.rs @@ -1,6 +1,6 @@ #![allow(unreachable_pub)] -use std::str::FromStr; +use std::{fmt, str::FromStr}; use crate::install::{ClientOpt, ServerOpt}; @@ -187,6 +187,18 @@ pub enum CodegenType { LintDefinitions, } +impl fmt::Display for CodegenType { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::All => write!(f, "all"), + Self::Grammar => write!(f, "grammar"), + Self::AssistsDocTests => write!(f, "assists-doc-tests"), + Self::DiagnosticsDocs => write!(f, "diagnostics-docs"), + Self::LintDefinitions => write!(f, "lint-definitions"), + } + } +} + impl FromStr for CodegenType { type Err = String; fn from_str(s: &str) -> Result { From b9c1c42959e024efaffdaea32a851e3e48434195 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sun, 7 Jul 2024 08:41:41 +0200 Subject: [PATCH 71/94] Allow new clippy lint in test --- crates/hir-ty/src/consteval/tests/intrinsics.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/crates/hir-ty/src/consteval/tests/intrinsics.rs b/crates/hir-ty/src/consteval/tests/intrinsics.rs index 44a4ac27af..830042d0ed 100644 --- a/crates/hir-ty/src/consteval/tests/intrinsics.rs +++ b/crates/hir-ty/src/consteval/tests/intrinsics.rs @@ -426,6 +426,7 @@ fn floating_point() { true, )), ); + #[allow(unknown_lints, unnecessary_min_or_max)] check_number( r#" extern "rust-intrinsic" { From 58026439001545ed876138f71539bad141eb1a5f Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sun, 7 Jul 2024 08:51:19 +0200 Subject: [PATCH 72/94] Move parser test generation to xtask --- crates/parser/src/tests.rs | 1 - xtask/src/codegen.rs | 11 ++++++--- xtask/src/codegen/assists_doc_tests.rs | 1 + xtask/src/codegen/grammar.rs | 21 +++++++++++++--- xtask/src/codegen/lints.rs | 7 +++++- .../src/codegen/parser_inline_tests.rs | 24 +++++++++++-------- xtask/src/flags.rs | 3 +++ 7 files changed, 50 insertions(+), 18 deletions(-) rename crates/parser/src/tests/sourcegen_inline_tests.rs => xtask/src/codegen/parser_inline_tests.rs (86%) diff --git a/crates/parser/src/tests.rs b/crates/parser/src/tests.rs index 0e04096526..a38689791c 100644 --- a/crates/parser/src/tests.rs +++ b/crates/parser/src/tests.rs @@ -1,5 +1,4 @@ mod prefix_entries; -mod sourcegen_inline_tests; mod top_entries; use std::{ diff --git a/xtask/src/codegen.rs b/xtask/src/codegen.rs index 26cdf40a6c..607cfdf83e 100644 --- a/xtask/src/codegen.rs +++ b/xtask/src/codegen.rs @@ -14,6 +14,7 @@ pub(crate) mod assists_doc_tests; pub(crate) mod diagnostics_docs; mod grammar; mod lints; +mod parser_inline_tests; impl flags::Codegen { pub(crate) fn run(self, _sh: &Shell) -> anyhow::Result<()> { @@ -21,6 +22,7 @@ impl flags::Codegen { flags::CodegenType::All => { diagnostics_docs::generate(self.check); assists_doc_tests::generate(self.check); + parser_inline_tests::generate(self.check); // diagnostics_docs::generate(self.check) doesn't generate any tests // lints::generate(self.check) Updating clones the rust repo, so don't run it unless // explicitly asked for @@ -29,6 +31,7 @@ impl flags::Codegen { flags::CodegenType::AssistsDocTests => assists_doc_tests::generate(self.check), flags::CodegenType::DiagnosticsDocs => diagnostics_docs::generate(self.check), flags::CodegenType::LintDefinitions => lints::generate(self.check), + flags::CodegenType::ParserTests => parser_inline_tests::generate(self.check), } Ok(()) } @@ -187,7 +190,7 @@ fn add_preamble(cg: CodegenType, mut text: String) -> String { /// Checks that the `file` has the specified `contents`. If that is not the /// case, updates the file and then fails the test. #[allow(clippy::print_stderr)] -fn ensure_file_contents(file: &Path, contents: &str, check: bool) { +fn ensure_file_contents(cg: CodegenType, file: &Path, contents: &str, check: bool) { if let Ok(old_contents) = fs::read_to_string(file) { if normalize_newlines(&old_contents) == normalize_newlines(contents) { // File is already up to date. @@ -201,9 +204,11 @@ fn ensure_file_contents(file: &Path, contents: &str, check: bool) { "{} was not up-to-date{}", file.display(), if std::env::var("CI").is_ok() { - "\n NOTE: run `cargo codegen` locally and commit the updated files\n" + format!( + "\n NOTE: run `cargo codegen {cg}` locally and commit the updated files\n" + ) } else { - "" + "".to_owned() } ); } else { diff --git a/xtask/src/codegen/assists_doc_tests.rs b/xtask/src/codegen/assists_doc_tests.rs index eab288c11b..c868f70209 100644 --- a/xtask/src/codegen/assists_doc_tests.rs +++ b/xtask/src/codegen/assists_doc_tests.rs @@ -47,6 +47,7 @@ r#####" } let buf = add_preamble(crate::flags::CodegenType::AssistsDocTests, reformat(buf)); ensure_file_contents( + crate::flags::CodegenType::AssistsDocTests, &project_root().join("crates/ide-assists/src/tests/generated.rs"), &buf, check, diff --git a/xtask/src/codegen/grammar.rs b/xtask/src/codegen/grammar.rs index 05c7132c0d..89db39411b 100644 --- a/xtask/src/codegen/grammar.rs +++ b/xtask/src/codegen/grammar.rs @@ -27,7 +27,12 @@ use self::ast_src::{AstEnumSrc, AstNodeSrc, AstSrc, Cardinality, Field, KindsSrc pub(crate) fn generate(check: bool) { let syntax_kinds = generate_syntax_kinds(KINDS_SRC); let syntax_kinds_file = project_root().join("crates/parser/src/syntax_kind/generated.rs"); - ensure_file_contents(syntax_kinds_file.as_path(), &syntax_kinds, check); + ensure_file_contents( + crate::flags::CodegenType::Grammar, + syntax_kinds_file.as_path(), + &syntax_kinds, + check, + ); let grammar = fs::read_to_string(project_root().join("crates/syntax/rust.ungram")) .unwrap() @@ -37,11 +42,21 @@ pub(crate) fn generate(check: bool) { let ast_tokens = generate_tokens(&ast); let ast_tokens_file = project_root().join("crates/syntax/src/ast/generated/tokens.rs"); - ensure_file_contents(ast_tokens_file.as_path(), &ast_tokens, check); + ensure_file_contents( + crate::flags::CodegenType::Grammar, + ast_tokens_file.as_path(), + &ast_tokens, + check, + ); let ast_nodes = generate_nodes(KINDS_SRC, &ast); let ast_nodes_file = project_root().join("crates/syntax/src/ast/generated/nodes.rs"); - ensure_file_contents(ast_nodes_file.as_path(), &ast_nodes, check); + ensure_file_contents( + crate::flags::CodegenType::Grammar, + ast_nodes_file.as_path(), + &ast_nodes, + check, + ); } fn generate_tokens(grammar: &AstSrc) -> String { diff --git a/xtask/src/codegen/lints.rs b/xtask/src/codegen/lints.rs index 5b933933c3..fafc87a0e2 100644 --- a/xtask/src/codegen/lints.rs +++ b/xtask/src/codegen/lints.rs @@ -76,7 +76,12 @@ pub struct LintGroup { let contents = add_preamble(crate::flags::CodegenType::LintDefinitions, reformat(contents)); let destination = project_root().join(DESTINATION); - ensure_file_contents(destination.as_path(), &contents, check); + ensure_file_contents( + crate::flags::CodegenType::LintDefinitions, + destination.as_path(), + &contents, + check, + ); } /// Parses the output of `rustdoc -Whelp` and prints `Lint` and `LintGroup` constants into `buf`. diff --git a/crates/parser/src/tests/sourcegen_inline_tests.rs b/xtask/src/codegen/parser_inline_tests.rs similarity index 86% rename from crates/parser/src/tests/sourcegen_inline_tests.rs rename to xtask/src/codegen/parser_inline_tests.rs index 5a71bfd82b..45be64c005 100644 --- a/crates/parser/src/tests/sourcegen_inline_tests.rs +++ b/xtask/src/codegen/parser_inline_tests.rs @@ -8,16 +8,20 @@ use std::{ path::{Path, PathBuf}, }; -#[test] -fn sourcegen_parser_tests() { - let grammar_dir = sourcegen::project_root().join(Path::new("crates/parser/src/grammar")); +use crate::{ + codegen::{ensure_file_contents, list_rust_files, CommentBlock}, + project_root, +}; + +pub(crate) fn generate(check: bool) { + let grammar_dir = project_root().join(Path::new("crates/parser/src/grammar")); let tests = tests_from_dir(&grammar_dir); - install_tests(&tests.ok, "crates/parser/test_data/parser/inline/ok"); - install_tests(&tests.err, "crates/parser/test_data/parser/inline/err"); + install_tests(&tests.ok, "crates/parser/test_data/parser/inline/ok", check); + install_tests(&tests.err, "crates/parser/test_data/parser/inline/err", check); - fn install_tests(tests: &HashMap, into: &str) { - let tests_dir = sourcegen::project_root().join(into); + fn install_tests(tests: &HashMap, into: &str, check: bool) { + let tests_dir = project_root().join(into); if !tests_dir.is_dir() { fs::create_dir_all(&tests_dir).unwrap(); } @@ -37,7 +41,7 @@ fn sourcegen_parser_tests() { tests_dir.join(file_name) } }; - sourcegen::ensure_file_contents(&path, &test.text); + ensure_file_contents(crate::flags::CodegenType::ParserTests, &path, &test.text, check); } } } @@ -57,7 +61,7 @@ struct Tests { fn collect_tests(s: &str) -> Vec { let mut res = Vec::new(); - for comment_block in sourcegen::CommentBlock::extract_untagged(s) { + for comment_block in CommentBlock::extract_untagged(s) { let first_line = &comment_block.contents[0]; let (name, ok) = if let Some(name) = first_line.strip_prefix("test ") { (name.to_owned(), true) @@ -80,7 +84,7 @@ fn collect_tests(s: &str) -> Vec { fn tests_from_dir(dir: &Path) -> Tests { let mut res = Tests::default(); - for entry in sourcegen::list_rust_files(dir) { + for entry in list_rust_files(dir) { process_file(&mut res, entry.as_path()); } let grammar_rs = dir.parent().unwrap().join("grammar.rs"); diff --git a/xtask/src/flags.rs b/xtask/src/flags.rs index 4bc844ab7e..f99ee1abb2 100644 --- a/xtask/src/flags.rs +++ b/xtask/src/flags.rs @@ -185,6 +185,7 @@ pub enum CodegenType { AssistsDocTests, DiagnosticsDocs, LintDefinitions, + ParserTests, } impl fmt::Display for CodegenType { @@ -195,6 +196,7 @@ impl fmt::Display for CodegenType { Self::AssistsDocTests => write!(f, "assists-doc-tests"), Self::DiagnosticsDocs => write!(f, "diagnostics-docs"), Self::LintDefinitions => write!(f, "lint-definitions"), + Self::ParserTests => write!(f, "parser-tests"), } } } @@ -208,6 +210,7 @@ impl FromStr for CodegenType { "assists-doc-tests" => Ok(Self::AssistsDocTests), "diagnostics-docs" => Ok(Self::DiagnosticsDocs), "lint-definitions" => Ok(Self::LintDefinitions), + "parser-tests" => Ok(Self::ParserTests), _ => Err("Invalid option".to_owned()), } } From 09932d9cd4adcfdc69cff52b94f87c8750928a4e Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sun, 7 Jul 2024 08:55:10 +0200 Subject: [PATCH 73/94] Update hover test fixture --- crates/ide/src/hover/tests.rs | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/crates/ide/src/hover/tests.rs b/crates/ide/src/hover/tests.rs index 20d07bf991..8479374647 100644 --- a/crates/ide/src/hover/tests.rs +++ b/crates/ide/src/hover/tests.rs @@ -6138,7 +6138,7 @@ fn hover_feature() { by the codegen backend, but not the MIR inliner. ```rust - #![feature(rustc_attrs, effects)] + #![feature(rustc_attrs)] #![allow(internal_features)] #[rustc_intrinsic] @@ -6148,7 +6148,7 @@ fn hover_feature() { Since these are just regular functions, it is perfectly ok to create the intrinsic twice: ```rust - #![feature(rustc_attrs, effects)] + #![feature(rustc_attrs)] #![allow(internal_features)] #[rustc_intrinsic] @@ -6172,12 +6172,23 @@ fn hover_feature() { Various intrinsics have native MIR operations that they correspond to. Instead of requiring backends to implement both the intrinsic and the MIR operation, the `lower_intrinsics` pass will convert the calls to the MIR operation. Backends do not need to know about these intrinsics - at all. + at all. These intrinsics only make sense without a body, and can either be declared as a "rust-intrinsic" + or as a `#[rustc_intrinsic]`. The body is never used, as calls to the intrinsic do not exist + anymore after MIR analyses. ## Intrinsics without fallback logic These must be implemented by all backends. + ### `#[rustc_intrinsic]` declarations + + These are written like intrinsics with fallback bodies, but the body is irrelevant. + Use `loop {}` for the body or call the intrinsic recursively and add + `#[rustc_intrinsic_must_be_overridden]` to the function to ensure that backends don't + invoke the body. + + ### Legacy extern ABI based intrinsics + These are imported as if they were FFI functions, with the special `rust-intrinsic` ABI. For example, if one was in a freestanding context, but wished to be able to `transmute` between types, and From 986b9cf022ca0c71ba27c37261057372ae5b909d Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sun, 7 Jul 2024 08:58:51 +0200 Subject: [PATCH 74/94] Move feature-doc generation to xtask codegen --- Cargo.lock | 1 - crates/parser/Cargo.toml | 1 - xtask/src/codegen.rs | 2 ++ .../src/codegen/feature_docs.rs | 20 +++++++++++-------- xtask/src/flags.rs | 3 +++ 5 files changed, 17 insertions(+), 10 deletions(-) rename crates/rust-analyzer/tests/slow-tests/sourcegen.rs => xtask/src/codegen/feature_docs.rs (80%) diff --git a/Cargo.lock b/Cargo.lock index c63d659543..ca442542c8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1244,7 +1244,6 @@ dependencies = [ "expect-test", "limit", "ra-ap-rustc_lexer", - "sourcegen", "stdx", "tracing", ] diff --git a/crates/parser/Cargo.toml b/crates/parser/Cargo.toml index 1f84e3f3af..54b57c201b 100644 --- a/crates/parser/Cargo.toml +++ b/crates/parser/Cargo.toml @@ -21,7 +21,6 @@ tracing = { workspace = true, optional = true } expect-test = "1.4.0" stdx.workspace = true -sourcegen.workspace = true [features] default = ["tracing"] diff --git a/xtask/src/codegen.rs b/xtask/src/codegen.rs index 607cfdf83e..f164abdbb1 100644 --- a/xtask/src/codegen.rs +++ b/xtask/src/codegen.rs @@ -12,6 +12,7 @@ use crate::{ pub(crate) mod assists_doc_tests; pub(crate) mod diagnostics_docs; +mod feature_docs; mod grammar; mod lints; mod parser_inline_tests; @@ -32,6 +33,7 @@ impl flags::Codegen { flags::CodegenType::DiagnosticsDocs => diagnostics_docs::generate(self.check), flags::CodegenType::LintDefinitions => lints::generate(self.check), flags::CodegenType::ParserTests => parser_inline_tests::generate(self.check), + flags::CodegenType::FeatureDocs => feature_docs::generate(self.check), } Ok(()) } diff --git a/crates/rust-analyzer/tests/slow-tests/sourcegen.rs b/xtask/src/codegen/feature_docs.rs similarity index 80% rename from crates/rust-analyzer/tests/slow-tests/sourcegen.rs rename to xtask/src/codegen/feature_docs.rs index 2eafb0da69..0c0fa838dd 100644 --- a/crates/rust-analyzer/tests/slow-tests/sourcegen.rs +++ b/xtask/src/codegen/feature_docs.rs @@ -2,8 +2,12 @@ use std::{fmt, fs, io, path::PathBuf}; -#[test] -fn sourcegen_feature_docs() { +use crate::{ + codegen::{list_rust_files, CommentBlock, Location}, + project_root, +}; + +pub(crate) fn generate(_check: bool) { let features = Feature::collect().unwrap(); let contents = features.into_iter().map(|it| it.to_string()).collect::>().join("\n\n"); let contents = format!( @@ -13,23 +17,23 @@ fn sourcegen_feature_docs() { ", contents.trim() ); - let dst = sourcegen::project_root().join("docs/user/generated_features.adoc"); + let dst = project_root().join("docs/user/generated_features.adoc"); fs::write(dst, contents).unwrap(); } #[derive(Debug)] struct Feature { id: String, - location: sourcegen::Location, + location: Location, doc: String, } impl Feature { fn collect() -> io::Result> { - let crates_dir = sourcegen::project_root().join("crates"); + let crates_dir = project_root().join("crates"); let mut res = Vec::new(); - for path in sourcegen::list_rust_files(&crates_dir) { + for path in list_rust_files(&crates_dir) { collect_file(&mut res, path)?; } res.sort_by(|lhs, rhs| lhs.id.cmp(&rhs.id)); @@ -37,7 +41,7 @@ impl Feature { fn collect_file(acc: &mut Vec, path: PathBuf) -> io::Result<()> { let text = std::fs::read_to_string(&path)?; - let comment_blocks = sourcegen::CommentBlock::extract("Feature", &text); + let comment_blocks = CommentBlock::extract("Feature", &text); for block in comment_blocks { let id = block.id; @@ -45,7 +49,7 @@ impl Feature { panic!("invalid feature name: {id:?}:\n {msg}") } let doc = block.contents.join("\n"); - let location = sourcegen::Location { file: path.clone(), line: block.line }; + let location = Location { file: path.clone(), line: block.line }; acc.push(Feature { id, location, doc }) } diff --git a/xtask/src/flags.rs b/xtask/src/flags.rs index f99ee1abb2..9b3a2a034e 100644 --- a/xtask/src/flags.rs +++ b/xtask/src/flags.rs @@ -186,6 +186,7 @@ pub enum CodegenType { DiagnosticsDocs, LintDefinitions, ParserTests, + FeatureDocs, } impl fmt::Display for CodegenType { @@ -197,6 +198,7 @@ impl fmt::Display for CodegenType { Self::DiagnosticsDocs => write!(f, "diagnostics-docs"), Self::LintDefinitions => write!(f, "lint-definitions"), Self::ParserTests => write!(f, "parser-tests"), + Self::FeatureDocs => write!(f, "feature-docs"), } } } @@ -211,6 +213,7 @@ impl FromStr for CodegenType { "diagnostics-docs" => Ok(Self::DiagnosticsDocs), "lint-definitions" => Ok(Self::LintDefinitions), "parser-tests" => Ok(Self::ParserTests), + "feature-docs" => Ok(Self::FeatureDocs), _ => Err("Invalid option".to_owned()), } } From 210b74890900f3365825f615006c231139a4f196 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sun, 7 Jul 2024 09:00:19 +0200 Subject: [PATCH 75/94] Drop sourcegen --- Cargo.lock | 8 -- crates/rust-analyzer/Cargo.toml | 1 - crates/sourcegen/Cargo.toml | 19 --- crates/sourcegen/src/lib.rs | 201 -------------------------------- 4 files changed, 229 deletions(-) delete mode 100644 crates/sourcegen/Cargo.toml delete mode 100644 crates/sourcegen/src/lib.rs diff --git a/Cargo.lock b/Cargo.lock index ca442542c8..0c105eb996 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1670,7 +1670,6 @@ dependencies = [ "semver", "serde", "serde_json", - "sourcegen", "stdx", "syntax", "test-fixture", @@ -1869,13 +1868,6 @@ version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b6b67fb9a61334225b5b790716f609cd58395f895b3fe8b328786812a40bc3b" -[[package]] -name = "sourcegen" -version = "0.0.0" -dependencies = [ - "xshell", -] - [[package]] name = "span" version = "0.0.0" diff --git a/crates/rust-analyzer/Cargo.toml b/crates/rust-analyzer/Cargo.toml index c207c42b6d..93fb55ede8 100644 --- a/crates/rust-analyzer/Cargo.toml +++ b/crates/rust-analyzer/Cargo.toml @@ -81,7 +81,6 @@ xshell.workspace = true test-utils.workspace = true test-fixture.workspace = true -sourcegen.workspace = true mbe.workspace = true [features] diff --git a/crates/sourcegen/Cargo.toml b/crates/sourcegen/Cargo.toml deleted file mode 100644 index d5ea4c39aa..0000000000 --- a/crates/sourcegen/Cargo.toml +++ /dev/null @@ -1,19 +0,0 @@ -[package] -name = "sourcegen" -version = "0.0.0" -description = "TBD" -publish = false - -authors.workspace = true -edition.workspace = true -license.workspace = true -rust-version.workspace = true - -[lib] -doctest = false - -[dependencies] -xshell.workspace = true - -[lints] -workspace = true \ No newline at end of file diff --git a/crates/sourcegen/src/lib.rs b/crates/sourcegen/src/lib.rs deleted file mode 100644 index 307c214c61..0000000000 --- a/crates/sourcegen/src/lib.rs +++ /dev/null @@ -1,201 +0,0 @@ -//! rust-analyzer relies heavily on source code generation. -//! -//! Things like feature documentation or assist tests are implemented by -//! processing rust-analyzer's own source code and generating the appropriate -//! output. See `sourcegen_` tests in various crates. -//! -//! This crate contains utilities to make this kind of source-gen easy. - -use std::{ - fmt, fs, mem, - path::{Path, PathBuf}, -}; - -use xshell::{cmd, Shell}; - -pub fn list_rust_files(dir: &Path) -> Vec { - let mut res = list_files(dir); - res.retain(|it| { - it.file_name().unwrap_or_default().to_str().unwrap_or_default().ends_with(".rs") - }); - res -} - -pub fn list_files(dir: &Path) -> Vec { - let mut res = Vec::new(); - let mut work = vec![dir.to_path_buf()]; - while let Some(dir) = work.pop() { - for entry in dir.read_dir().unwrap() { - let entry = entry.unwrap(); - let file_type = entry.file_type().unwrap(); - let path = entry.path(); - let is_hidden = - path.file_name().unwrap_or_default().to_str().unwrap_or_default().starts_with('.'); - if !is_hidden { - if file_type.is_dir() { - work.push(path); - } else if file_type.is_file() { - res.push(path); - } - } - } - } - res -} - -#[derive(Clone)] -pub struct CommentBlock { - pub id: String, - pub line: usize, - pub contents: Vec, - is_doc: bool, -} - -impl CommentBlock { - pub fn extract(tag: &str, text: &str) -> Vec { - assert!(tag.starts_with(char::is_uppercase)); - - let tag = format!("{tag}:"); - let mut blocks = CommentBlock::extract_untagged(text); - blocks.retain_mut(|block| { - let first = block.contents.remove(0); - let Some(id) = first.strip_prefix(&tag) else { - return false; - }; - - if block.is_doc { - panic!("Use plain (non-doc) comments with tags like {tag}:\n {first}"); - } - - id.trim().clone_into(&mut block.id); - true - }); - blocks - } - - pub fn extract_untagged(text: &str) -> Vec { - let mut res = Vec::new(); - - let lines = text.lines().map(str::trim_start); - - let dummy_block = - CommentBlock { id: String::new(), line: 0, contents: Vec::new(), is_doc: false }; - let mut block = dummy_block.clone(); - for (line_num, line) in lines.enumerate() { - match line.strip_prefix("//") { - Some(mut contents) => { - if let Some('/' | '!') = contents.chars().next() { - contents = &contents[1..]; - block.is_doc = true; - } - if let Some(' ') = contents.chars().next() { - contents = &contents[1..]; - } - block.contents.push(contents.to_owned()); - } - None => { - if !block.contents.is_empty() { - let block = mem::replace(&mut block, dummy_block.clone()); - res.push(block); - } - block.line = line_num + 2; - } - } - } - if !block.contents.is_empty() { - res.push(block); - } - res - } -} - -#[derive(Debug)] -pub struct Location { - pub file: PathBuf, - pub line: usize, -} - -impl fmt::Display for Location { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let path = self.file.strip_prefix(project_root()).unwrap().display().to_string(); - let path = path.replace('\\', "/"); - let name = self.file.file_name().unwrap(); - write!( - f, - "https://github.com/rust-lang/rust-analyzer/blob/master/{}#L{}[{}]", - path, - self.line, - name.to_str().unwrap() - ) - } -} - -fn ensure_rustfmt(sh: &Shell) { - let version = cmd!(sh, "rustup run stable rustfmt --version").read().unwrap_or_default(); - if !version.contains("stable") { - panic!( - "Failed to run rustfmt from toolchain 'stable'. \ - Please run `rustup component add rustfmt --toolchain stable` to install it.", - ); - } -} - -pub fn reformat(text: String) -> String { - let sh = Shell::new().unwrap(); - ensure_rustfmt(&sh); - let rustfmt_toml = project_root().join("rustfmt.toml"); - let mut stdout = cmd!( - sh, - "rustup run stable rustfmt --config-path {rustfmt_toml} --config fn_single_line=true" - ) - .stdin(text) - .read() - .unwrap(); - if !stdout.ends_with('\n') { - stdout.push('\n'); - } - stdout -} - -pub fn add_preamble(generator: &'static str, mut text: String) -> String { - let preamble = format!("//! Generated by `{generator}`, do not edit by hand.\n\n"); - text.insert_str(0, &preamble); - text -} - -/// Checks that the `file` has the specified `contents`. If that is not the -/// case, updates the file and then fails the test. -#[allow(clippy::print_stderr)] -pub fn ensure_file_contents(file: &Path, contents: &str) { - if let Ok(old_contents) = fs::read_to_string(file) { - if normalize_newlines(&old_contents) == normalize_newlines(contents) { - // File is already up to date. - return; - } - } - - let display_path = file.strip_prefix(project_root()).unwrap_or(file); - eprintln!( - "\n\x1b[31;1merror\x1b[0m: {} was not up-to-date, updating\n", - display_path.display() - ); - if std::env::var("CI").is_ok() { - eprintln!(" NOTE: run `cargo test` locally and commit the updated files\n"); - } - if let Some(parent) = file.parent() { - let _ = fs::create_dir_all(parent); - } - fs::write(file, contents).unwrap(); - panic!("some file was not up to date and has been updated, simply re-run the tests"); -} - -fn normalize_newlines(s: &str) -> String { - s.replace("\r\n", "\n") -} - -pub fn project_root() -> PathBuf { - let dir = env!("CARGO_MANIFEST_DIR"); - let res = PathBuf::from(dir).parent().unwrap().parent().unwrap().to_owned(); - assert!(res.join("triagebot.toml").exists()); - res -} From e752517814871d0aa64f49535229760ff686cb47 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sun, 7 Jul 2024 09:01:35 +0200 Subject: [PATCH 76/94] re-generate feature docs in release --- xtask/src/codegen.rs | 2 +- xtask/src/release.rs | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/xtask/src/codegen.rs b/xtask/src/codegen.rs index f164abdbb1..eb757c83d1 100644 --- a/xtask/src/codegen.rs +++ b/xtask/src/codegen.rs @@ -12,7 +12,7 @@ use crate::{ pub(crate) mod assists_doc_tests; pub(crate) mod diagnostics_docs; -mod feature_docs; +pub(crate) mod feature_docs; mod grammar; mod lints; mod parser_inline_tests; diff --git a/xtask/src/release.rs b/xtask/src/release.rs index b936876b52..3eda2bc027 100644 --- a/xtask/src/release.rs +++ b/xtask/src/release.rs @@ -32,6 +32,7 @@ impl flags::Release { // Generates bits of manual.adoc. codegen::diagnostics_docs::generate(false); codegen::assists_doc_tests::generate(false); + codegen::feature_docs::generate(false); let website_root = project_root().join("../rust-analyzer.github.io"); { From 866102cdaf9f4f5ec0cb8f276fd6ec4822a1ba72 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sun, 7 Jul 2024 09:11:46 +0200 Subject: [PATCH 77/94] Re-implement tidy as an xtask action --- .../hir-ty/src/consteval/tests/intrinsics.rs | 2 +- crates/rust-analyzer/tests/slow-tests/main.rs | 3 -- xtask/src/codegen.rs | 30 --------------- xtask/src/codegen/assists_doc_tests.rs | 5 +-- xtask/src/codegen/diagnostics_docs.rs | 3 +- xtask/src/codegen/feature_docs.rs | 3 +- xtask/src/codegen/lints.rs | 3 +- xtask/src/codegen/parser_inline_tests.rs | 3 +- xtask/src/flags.rs | 6 +++ xtask/src/main.rs | 3 ++ .../tests/slow-tests => xtask/src}/tidy.rs | 37 +++++++++---------- xtask/src/util.rs | 31 ++++++++++++++++ 12 files changed, 69 insertions(+), 60 deletions(-) rename {crates/rust-analyzer/tests/slow-tests => xtask/src}/tidy.rs (93%) create mode 100644 xtask/src/util.rs diff --git a/crates/hir-ty/src/consteval/tests/intrinsics.rs b/crates/hir-ty/src/consteval/tests/intrinsics.rs index 830042d0ed..543328a57e 100644 --- a/crates/hir-ty/src/consteval/tests/intrinsics.rs +++ b/crates/hir-ty/src/consteval/tests/intrinsics.rs @@ -426,7 +426,7 @@ fn floating_point() { true, )), ); - #[allow(unknown_lints, unnecessary_min_or_max)] + #[allow(unknown_lints, clippy::unnecessary_min_or_max)] check_number( r#" extern "rust-intrinsic" { diff --git a/crates/rust-analyzer/tests/slow-tests/main.rs b/crates/rust-analyzer/tests/slow-tests/main.rs index c6236d8b4f..56f416a0b6 100644 --- a/crates/rust-analyzer/tests/slow-tests/main.rs +++ b/crates/rust-analyzer/tests/slow-tests/main.rs @@ -11,11 +11,8 @@ #![allow(clippy::disallowed_types)] mod ratoml; -#[cfg(not(feature = "in-rust-tree"))] -mod sourcegen; mod support; mod testdir; -mod tidy; use std::{collections::HashMap, path::PathBuf, time::Instant}; diff --git a/xtask/src/codegen.rs b/xtask/src/codegen.rs index eb757c83d1..acaa65129d 100644 --- a/xtask/src/codegen.rs +++ b/xtask/src/codegen.rs @@ -39,36 +39,6 @@ impl flags::Codegen { } } -fn list_rust_files(dir: &Path) -> Vec { - let mut res = list_files(dir); - res.retain(|it| { - it.file_name().unwrap_or_default().to_str().unwrap_or_default().ends_with(".rs") - }); - res -} - -fn list_files(dir: &Path) -> Vec { - let mut res = Vec::new(); - let mut work = vec![dir.to_path_buf()]; - while let Some(dir) = work.pop() { - for entry in dir.read_dir().unwrap() { - let entry = entry.unwrap(); - let file_type = entry.file_type().unwrap(); - let path = entry.path(); - let is_hidden = - path.file_name().unwrap_or_default().to_str().unwrap_or_default().starts_with('.'); - if !is_hidden { - if file_type.is_dir() { - work.push(path); - } else if file_type.is_file() { - res.push(path); - } - } - } - } - res -} - #[derive(Clone)] pub(crate) struct CommentBlock { pub(crate) id: String, diff --git a/xtask/src/codegen/assists_doc_tests.rs b/xtask/src/codegen/assists_doc_tests.rs index c868f70209..1256232287 100644 --- a/xtask/src/codegen/assists_doc_tests.rs +++ b/xtask/src/codegen/assists_doc_tests.rs @@ -5,10 +5,9 @@ use std::{fmt, fs, path::Path}; use stdx::format_to_acc; use crate::{ - codegen::{ - add_preamble, ensure_file_contents, list_rust_files, reformat, CommentBlock, Location, - }, + codegen::{add_preamble, ensure_file_contents, reformat, CommentBlock, Location}, project_root, + util::list_rust_files, }; pub(crate) fn generate(check: bool) { diff --git a/xtask/src/codegen/diagnostics_docs.rs b/xtask/src/codegen/diagnostics_docs.rs index 316ae80f4c..4cb8f3f259 100644 --- a/xtask/src/codegen/diagnostics_docs.rs +++ b/xtask/src/codegen/diagnostics_docs.rs @@ -3,8 +3,9 @@ use std::{fmt, fs, io, path::PathBuf}; use crate::{ - codegen::{add_preamble, list_rust_files, CommentBlock, Location}, + codegen::{add_preamble, CommentBlock, Location}, project_root, + util::list_rust_files, }; pub(crate) fn generate(check: bool) { diff --git a/xtask/src/codegen/feature_docs.rs b/xtask/src/codegen/feature_docs.rs index 0c0fa838dd..c6451d888b 100644 --- a/xtask/src/codegen/feature_docs.rs +++ b/xtask/src/codegen/feature_docs.rs @@ -3,8 +3,9 @@ use std::{fmt, fs, io, path::PathBuf}; use crate::{ - codegen::{list_rust_files, CommentBlock, Location}, + codegen::{CommentBlock, Location}, project_root, + util::list_rust_files, }; pub(crate) fn generate(_check: bool) { diff --git a/xtask/src/codegen/lints.rs b/xtask/src/codegen/lints.rs index fafc87a0e2..f097b5817b 100644 --- a/xtask/src/codegen/lints.rs +++ b/xtask/src/codegen/lints.rs @@ -6,8 +6,9 @@ use stdx::format_to; use xshell::{cmd, Shell}; use crate::{ - codegen::{add_preamble, ensure_file_contents, list_files, reformat}, + codegen::{add_preamble, ensure_file_contents, reformat}, project_root, + util::list_files, }; const DESTINATION: &str = "crates/ide-db/src/generated/lints.rs"; diff --git a/xtask/src/codegen/parser_inline_tests.rs b/xtask/src/codegen/parser_inline_tests.rs index 45be64c005..7ad530ab01 100644 --- a/xtask/src/codegen/parser_inline_tests.rs +++ b/xtask/src/codegen/parser_inline_tests.rs @@ -9,8 +9,9 @@ use std::{ }; use crate::{ - codegen::{ensure_file_contents, list_rust_files, CommentBlock}, + codegen::{ensure_file_contents, CommentBlock}, project_root, + util::list_rust_files, }; pub(crate) fn generate(check: bool) { diff --git a/xtask/src/flags.rs b/xtask/src/flags.rs index 9b3a2a034e..cf4a22d476 100644 --- a/xtask/src/flags.rs +++ b/xtask/src/flags.rs @@ -73,6 +73,8 @@ xflags::xflags! { optional codegen_type: CodegenType optional --check } + + cmd tidy {} } } @@ -96,8 +98,12 @@ pub enum XtaskCmd { Metrics(Metrics), Bb(Bb), Codegen(Codegen), + Tidy(Tidy), } +#[derive(Debug)] +pub struct Tidy {} + #[derive(Debug)] pub struct Install { pub client: bool, diff --git a/xtask/src/main.rs b/xtask/src/main.rs index e070576303..5c312da1dd 100644 --- a/xtask/src/main.rs +++ b/xtask/src/main.rs @@ -19,6 +19,8 @@ mod install; mod metrics; mod publish; mod release; +mod tidy; +mod util; use anyhow::bail; use std::{env, path::PathBuf}; @@ -51,6 +53,7 @@ fn main() -> anyhow::Result<()> { )?; Ok(()) } + flags::XtaskCmd::Tidy(cmd) => cmd.run(sh), } } diff --git a/crates/rust-analyzer/tests/slow-tests/tidy.rs b/xtask/src/tidy.rs similarity index 93% rename from crates/rust-analyzer/tests/slow-tests/tidy.rs rename to xtask/src/tidy.rs index 8cd5cbf1c7..98e52e7e97 100644 --- a/crates/rust-analyzer/tests/slow-tests/tidy.rs +++ b/xtask/src/tidy.rs @@ -6,23 +6,29 @@ use std::{ use xshell::Shell; -#[cfg(not(feature = "in-rust-tree"))] use xshell::cmd; -#[test] -fn check_lsp_extensions_docs() { - let sh = &Shell::new().unwrap(); +use crate::{flags::Tidy, project_root, util::list_files}; +impl Tidy { + pub(crate) fn run(&self, sh: &Shell) -> anyhow::Result<()> { + check_lsp_extensions_docs(sh); + files_are_tidy(sh); + check_licenses(sh); + Ok(()) + } +} + +fn check_lsp_extensions_docs(sh: &Shell) { let expected_hash = { - let lsp_ext_rs = sh - .read_file(sourcegen::project_root().join("crates/rust-analyzer/src/lsp/ext.rs")) - .unwrap(); + let lsp_ext_rs = + sh.read_file(project_root().join("crates/rust-analyzer/src/lsp/ext.rs")).unwrap(); stable_hash(lsp_ext_rs.as_str()) }; let actual_hash = { let lsp_extensions_md = - sh.read_file(sourcegen::project_root().join("docs/dev/lsp-extensions.md")).unwrap(); + sh.read_file(project_root().join("docs/dev/lsp-extensions.md")).unwrap(); let text = lsp_extensions_md .lines() .find_map(|line| line.strip_prefix("lsp/ext.rs hash:")) @@ -45,11 +51,8 @@ Please adjust docs/dev/lsp-extensions.md. } } -#[test] -fn files_are_tidy() { - let sh = &Shell::new().unwrap(); - - let files = sourcegen::list_files(&sourcegen::project_root().join("crates")); +fn files_are_tidy(sh: &Shell) { + let files = list_files(&project_root().join("crates")); let mut tidy_docs = TidyDocs::default(); let mut tidy_marks = TidyMarks::default(); @@ -121,11 +124,7 @@ fn check_cargo_toml(path: &Path, text: String) { } } -#[cfg(not(feature = "in-rust-tree"))] -#[test] -fn check_licenses() { - let sh = &Shell::new().unwrap(); - +fn check_licenses(sh: &Shell) { let expected = " (MIT OR Apache-2.0) AND Unicode-DFS-2016 0BSD OR MIT OR Apache-2.0 @@ -277,7 +276,7 @@ impl TidyDocs { } fn is_exclude_dir(p: &Path, dirs_to_exclude: &[&str]) -> bool { - p.strip_prefix(sourcegen::project_root()) + p.strip_prefix(project_root()) .unwrap() .components() .rev() diff --git a/xtask/src/util.rs b/xtask/src/util.rs new file mode 100644 index 0000000000..39f52938c8 --- /dev/null +++ b/xtask/src/util.rs @@ -0,0 +1,31 @@ +use std::path::{Path, PathBuf}; + +pub(crate) fn list_rust_files(dir: &Path) -> Vec { + let mut res = list_files(dir); + res.retain(|it| { + it.file_name().unwrap_or_default().to_str().unwrap_or_default().ends_with(".rs") + }); + res +} + +pub(crate) fn list_files(dir: &Path) -> Vec { + let mut res = Vec::new(); + let mut work = vec![dir.to_path_buf()]; + while let Some(dir) = work.pop() { + for entry in dir.read_dir().unwrap() { + let entry = entry.unwrap(); + let file_type = entry.file_type().unwrap(); + let path = entry.path(); + let is_hidden = + path.file_name().unwrap_or_default().to_str().unwrap_or_default().starts_with('.'); + if !is_hidden { + if file_type.is_dir() { + work.push(path); + } else if file_type.is_file() { + res.push(path); + } + } + } + } + res +} From ce5046be50cb650f37bfd97c0f60ab534dfcfd96 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sun, 7 Jul 2024 09:14:50 +0200 Subject: [PATCH 78/94] Run codegen commands as tests if their results are commited --- crates/parser/src/syntax_kind/generated.rs | 2 +- crates/syntax/src/ast/generated/nodes.rs | 2 +- crates/syntax/src/ast/generated/tokens.rs | 2 +- xtask/src/codegen/assists_doc_tests.rs | 5 +++++ xtask/src/codegen/grammar.rs | 5 +++++ xtask/src/codegen/parser_inline_tests.rs | 5 +++++ xtask/src/tidy.rs | 5 +++++ 7 files changed, 23 insertions(+), 3 deletions(-) diff --git a/crates/parser/src/syntax_kind/generated.rs b/crates/parser/src/syntax_kind/generated.rs index ef83420c52..ad3398453b 100644 --- a/crates/parser/src/syntax_kind/generated.rs +++ b/crates/parser/src/syntax_kind/generated.rs @@ -1,4 +1,4 @@ -//! Generated by `sourcegen_ast`, do not edit by hand. +//! Generated by `cargo codegen grammar`, do not edit by hand. #![allow(bad_style, missing_docs, unreachable_pub)] #[doc = r" The kind of syntax node, e.g. `IDENT`, `USE_KW`, or `STRUCT`."] diff --git a/crates/syntax/src/ast/generated/nodes.rs b/crates/syntax/src/ast/generated/nodes.rs index bae529a2c8..ce189acd68 100644 --- a/crates/syntax/src/ast/generated/nodes.rs +++ b/crates/syntax/src/ast/generated/nodes.rs @@ -1,4 +1,4 @@ -//! Generated by `sourcegen_ast`, do not edit by hand. +//! Generated by `cargo codegen grammar`, do not edit by hand. #![allow(non_snake_case)] use crate::{ diff --git a/crates/syntax/src/ast/generated/tokens.rs b/crates/syntax/src/ast/generated/tokens.rs index 651a8ebbf7..85d20c2bd8 100644 --- a/crates/syntax/src/ast/generated/tokens.rs +++ b/crates/syntax/src/ast/generated/tokens.rs @@ -1,4 +1,4 @@ -//! Generated by `sourcegen_ast`, do not edit by hand. +//! Generated by `cargo codegen grammar`, do not edit by hand. use crate::{ ast::AstToken, diff --git a/xtask/src/codegen/assists_doc_tests.rs b/xtask/src/codegen/assists_doc_tests.rs index 1256232287..d06c9d65df 100644 --- a/xtask/src/codegen/assists_doc_tests.rs +++ b/xtask/src/codegen/assists_doc_tests.rs @@ -195,3 +195,8 @@ fn reveal_hash_comments(text: &str) -> String { }) .fold(String::new(), |mut acc, it| format_to_acc!(acc, "{it}\n")) } + +#[test] +fn test() { + generate(true); +} diff --git a/xtask/src/codegen/grammar.rs b/xtask/src/codegen/grammar.rs index 89db39411b..2d492f79f1 100644 --- a/xtask/src/codegen/grammar.rs +++ b/xtask/src/codegen/grammar.rs @@ -890,3 +890,8 @@ impl AstNodeSrc { }); } } + +#[test] +fn test() { + generate(true); +} diff --git a/xtask/src/codegen/parser_inline_tests.rs b/xtask/src/codegen/parser_inline_tests.rs index 7ad530ab01..5983b06e1b 100644 --- a/xtask/src/codegen/parser_inline_tests.rs +++ b/xtask/src/codegen/parser_inline_tests.rs @@ -127,3 +127,8 @@ fn existing_tests(dir: &Path, ok: bool) -> HashMap { } res } + +#[test] +fn test() { + generate(true); +} diff --git a/xtask/src/tidy.rs b/xtask/src/tidy.rs index 98e52e7e97..e85f518286 100644 --- a/xtask/src/tidy.rs +++ b/xtask/src/tidy.rs @@ -338,3 +338,8 @@ fn find_marks(set: &mut HashSet, text: &str, mark: &str) { } } } + +#[test] +fn test() { + Tidy {}.run(&Shell::new().unwrap()).unwrap(); +} From 8a4b1fd96edf6a8e6b98d3e38c08b607ceb414f5 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sun, 7 Jul 2024 09:19:09 +0200 Subject: [PATCH 79/94] Fix stale reference in architecture.md --- Cargo.toml | 1 - docs/dev/architecture.md | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index eea73ac380..bea77b9c85 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -90,7 +90,6 @@ ra-ap-rustc_abi = { version = "0.53.0", default-features = false } ra-ap-rustc_pattern_analysis = { version = "0.53.0", default-features = false } # local crates that aren't published to crates.io. These should not have versions. -sourcegen = { path = "./crates/sourcegen" } test-fixture = { path = "./crates/test-fixture" } test-utils = { path = "./crates/test-utils" } diff --git a/docs/dev/architecture.md b/docs/dev/architecture.md index 4303a800a0..f4e7263868 100644 --- a/docs/dev/architecture.md +++ b/docs/dev/architecture.md @@ -368,7 +368,7 @@ In particular, we generate: * Documentation tests for assists -See the `sourcegen` crate for details. +See the `xtask\src\codegen\assists_doc_tests.rs` module for details. **Architecture Invariant:** we avoid bootstrapping. For codegen we need to parse Rust code. From 6c8c49b01bbbe62a832545c2238633cb09ace183 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sun, 7 Jul 2024 10:14:47 +0200 Subject: [PATCH 80/94] fix: Fix callHierarchy LSP violation --- crates/ide/src/call_hierarchy.rs | 210 ++++++++++++++++--- crates/rust-analyzer/src/handlers/request.rs | 8 +- 2 files changed, 187 insertions(+), 31 deletions(-) diff --git a/crates/ide/src/call_hierarchy.rs b/crates/ide/src/call_hierarchy.rs index f7e5b40dde..3c29f2f427 100644 --- a/crates/ide/src/call_hierarchy.rs +++ b/crates/ide/src/call_hierarchy.rs @@ -9,14 +9,15 @@ use ide_db::{ search::FileReference, FxIndexMap, RootDatabase, }; -use syntax::{ast, AstNode, SyntaxKind::IDENT, TextRange}; +use span::FileRange; +use syntax::{ast, AstNode, SyntaxKind::IDENT}; use crate::{goto_definition, FilePosition, NavigationTarget, RangeInfo, TryToNav}; #[derive(Debug, Clone)] pub struct CallItem { pub target: NavigationTarget, - pub ranges: Vec, + pub ranges: Vec, } pub(crate) fn call_hierarchy( @@ -61,9 +62,10 @@ pub(crate) fn incoming_calls( def.try_to_nav(sema.db) }); if let Some(nav) = nav { - calls.add(nav.call_site, sema.original_range(name.syntax()).range); + let range = sema.original_range(name.syntax()); + calls.add(nav.call_site, range); if let Some(other) = nav.def_site { - calls.add(other, sema.original_range(name.syntax()).range); + calls.add(other, range); } } } @@ -107,12 +109,13 @@ pub(crate) fn outgoing_calls( hir::CallableKind::TupleStruct(it) => it.try_to_nav(db), _ => None, } - .zip(Some(expr.syntax().text_range())) + .zip(Some(sema.original_range(expr.syntax()))) } ast::CallableExpr::MethodCall(expr) => { - let range = expr.name_ref()?.syntax().text_range(); let function = sema.resolve_method_call(&expr)?; - function.try_to_nav(db).zip(Some(range)) + function + .try_to_nav(db) + .zip(Some(sema.original_range(expr.name_ref()?.syntax()))) } }?; Some(nav_target.into_iter().zip(iter::repeat(range))) @@ -125,11 +128,11 @@ pub(crate) fn outgoing_calls( #[derive(Default)] struct CallLocations { - funcs: FxIndexMap>, + funcs: FxIndexMap>, } impl CallLocations { - fn add(&mut self, target: NavigationTarget, range: TextRange) { + fn add(&mut self, target: NavigationTarget, range: FileRange) { self.funcs.entry(target).or_default().push(range); } @@ -153,7 +156,14 @@ mod tests { expected_outgoing: Expect, ) { fn debug_render(item: crate::CallItem) -> String { - format!("{} : {:?}", item.target.debug_render(), item.ranges) + format!( + "{} : {}", + item.target.debug_render(), + item.ranges.iter().format_with(", ", |range, f| f(&format_args!( + "{:?}:{:?}", + range.file_id, range.range + ))) + ) } let (analysis, pos) = fixture::position(ra_fixture); @@ -183,7 +193,7 @@ fn caller() { } "#, expect![["callee Function FileId(0) 0..14 3..9"]], - expect![["caller Function FileId(0) 15..44 18..24 : [33..39]"]], + expect!["caller Function FileId(0) 15..44 18..24 : FileId(0):33..39"], expect![[]], ); } @@ -199,7 +209,7 @@ fn caller() { } "#, expect![["callee Function FileId(0) 0..14 3..9"]], - expect![["caller Function FileId(0) 15..44 18..24 : [33..39]"]], + expect!["caller Function FileId(0) 15..44 18..24 : FileId(0):33..39"], expect![[]], ); } @@ -216,7 +226,7 @@ fn caller() { } "#, expect![["callee Function FileId(0) 0..14 3..9"]], - expect![["caller Function FileId(0) 15..58 18..24 : [33..39, 47..53]"]], + expect!["caller Function FileId(0) 15..58 18..24 : FileId(0):33..39, FileId(0):47..53"], expect![[]], ); } @@ -236,9 +246,9 @@ fn caller2() { } "#, expect![["callee Function FileId(0) 0..14 3..9"]], - expect![[" - caller1 Function FileId(0) 15..45 18..25 : [34..40] - caller2 Function FileId(0) 47..77 50..57 : [66..72]"]], + expect![[r#" + caller1 Function FileId(0) 15..45 18..25 : FileId(0):34..40 + caller2 Function FileId(0) 47..77 50..57 : FileId(0):66..72"#]], expect![[]], ); } @@ -265,8 +275,8 @@ mod tests { "#, expect![["callee Function FileId(0) 0..14 3..9"]], expect![[r#" - caller1 Function FileId(0) 15..45 18..25 : [34..40] - test_caller Function FileId(0) 95..149 110..121 tests : [134..140]"#]], + caller1 Function FileId(0) 15..45 18..25 : FileId(0):34..40 + test_caller Function FileId(0) 95..149 110..121 tests : FileId(0):134..140"#]], expect![[]], ); } @@ -287,7 +297,7 @@ fn caller() { pub fn callee() {} "#, expect!["callee Function FileId(1) 0..18 7..13 foo"], - expect![["caller Function FileId(0) 27..56 30..36 : [45..51]"]], + expect!["caller Function FileId(0) 27..56 30..36 : FileId(0):45..51"], expect![[]], ); } @@ -305,7 +315,7 @@ fn call$0er() { "#, expect![["caller Function FileId(0) 15..58 18..24"]], expect![[]], - expect![["callee Function FileId(0) 0..14 3..9 : [33..39, 47..53]"]], + expect!["callee Function FileId(0) 0..14 3..9 : FileId(0):33..39, FileId(0):47..53"], ); } @@ -326,7 +336,7 @@ pub fn callee() {} "#, expect![["caller Function FileId(0) 27..56 30..36"]], expect![[]], - expect!["callee Function FileId(1) 0..18 7..13 foo : [45..51]"], + expect!["callee Function FileId(1) 0..18 7..13 foo : FileId(0):45..51"], ); } @@ -348,8 +358,8 @@ fn caller3() { } "#, expect![["caller2 Function FileId(0) 33..64 36..43"]], - expect![["caller1 Function FileId(0) 0..31 3..10 : [19..26]"]], - expect![["caller3 Function FileId(0) 66..83 69..76 : [52..59]"]], + expect!["caller1 Function FileId(0) 0..31 3..10 : FileId(0):19..26"], + expect!["caller3 Function FileId(0) 66..83 69..76 : FileId(0):52..59"], ); } @@ -368,8 +378,8 @@ fn main() { } "#, expect![["a Function FileId(0) 0..18 3..4"]], - expect![["main Function FileId(0) 31..52 34..38 : [47..48]"]], - expect![["b Function FileId(0) 20..29 23..24 : [13..14]"]], + expect!["main Function FileId(0) 31..52 34..38 : FileId(0):47..48"], + expect!["b Function FileId(0) 20..29 23..24 : FileId(0):13..14"], ); check_hierarchy( @@ -385,7 +395,7 @@ fn main() { } "#, expect![["b Function FileId(0) 20..29 23..24"]], - expect![["a Function FileId(0) 0..18 3..4 : [13..14]"]], + expect!["a Function FileId(0) 0..18 3..4 : FileId(0):13..14"], expect![[]], ); } @@ -410,7 +420,7 @@ fn caller() { } "#, expect![[r#"callee Function FileId(0) 144..159 152..158"#]], - expect![[r#"caller Function FileId(0) 160..194 163..169 : [184..190]"#]], + expect!["caller Function FileId(0) 160..194 163..169 : FileId(0):184..190"], expect![[]], ); check_hierarchy( @@ -431,7 +441,7 @@ fn caller() { } "#, expect![[r#"callee Function FileId(0) 144..159 152..158"#]], - expect![[r#"caller Function FileId(0) 160..194 163..169 : [184..190]"#]], + expect!["caller Function FileId(0) 160..194 163..169 : FileId(0):184..190"], expect![[]], ); } @@ -461,6 +471,148 @@ fn caller$0() { expect![[]], ); } + #[test] + fn test_call_hierarchy_in_macros_incoming_different_files() { + check_hierarchy( + r#" +//- /lib.rs +#[macro_use] +mod foo; +define!(callee) +fn caller() { + call!(call$0ee); +} +//- /foo.rs +macro_rules! define { + ($ident:ident) => { + fn $ident {} + } +} +macro_rules! call { + ($ident:ident) => { + $ident() + } +} +"#, + expect!["callee Function FileId(0) 22..37 30..36"], + expect!["caller Function FileId(0) 38..72 41..47 : FileId(0):62..68"], + expect![[]], + ); + check_hierarchy( + r#" +//- /lib.rs +#[macro_use] +mod foo; +define!(cal$0lee) +fn caller() { + call!(callee); +} +//- /foo.rs +macro_rules! define { + ($ident:ident) => { + fn $ident {} + } +} +macro_rules! call { + ($ident:ident) => { + $ident() + } +} +"#, + expect!["callee Function FileId(0) 22..37 30..36"], + expect!["caller Function FileId(0) 38..72 41..47 : FileId(0):62..68"], + expect![[]], + ); + check_hierarchy( + r#" +//- /lib.rs +#[macro_use] +mod foo; +define!(cal$0lee) +call!(callee); +//- /foo.rs +macro_rules! define { + ($ident:ident) => { + fn $ident {} + } +} +macro_rules! call { + ($ident:ident) => { + fn caller() { + $ident() + } + fn $ident() { + $ident() + } + } +} +"#, + expect!["callee Function FileId(0) 22..37 30..36"], + expect![[r#" + callee Function FileId(0) 38..52 44..50 : FileId(0):44..50 + caller Function FileId(0) 38..52 : FileId(0):44..50 + caller Function FileId(1) 130..136 130..136 : FileId(0):44..50"#]], + expect![[]], + ); + } + + #[test] + fn test_call_hierarchy_in_macros_outgoing_different_files() { + check_hierarchy( + r#" +//- /lib.rs +#[macro_use] +mod foo; +define!(callee) +fn caller$0() { + call!(callee); +} +//- /foo.rs +macro_rules! define { + ($ident:ident) => { + fn $ident {} + } +} +macro_rules! call { + ($ident:ident) => { + $ident() + callee() + } +} +"#, + expect!["caller Function FileId(0) 38..72 41..47"], + expect![[]], + // FIXME + expect![[]], + ); + check_hierarchy( + r#" +//- /lib.rs +#[macro_use] +mod foo; +define!(callee) +fn caller$0() { + call!(callee); +} +//- /foo.rs +macro_rules! define { + () => { + fn callee {} + } +} +macro_rules! call { + ($ident:ident) => { + $ident() + callee() + } +} +"#, + expect!["caller Function FileId(0) 38..72 41..47"], + expect![[]], + // FIXME + expect![[]], + ); + } #[test] fn test_trait_method_call_hierarchy() { @@ -481,7 +633,7 @@ fn caller() { } "#, expect!["callee Function FileId(0) 15..27 18..24 T1"], - expect![["caller Function FileId(0) 82..115 85..91 : [104..110]"]], + expect!["caller Function FileId(0) 82..115 85..91 : FileId(0):104..110"], expect![[]], ); } diff --git a/crates/rust-analyzer/src/handlers/request.rs b/crates/rust-analyzer/src/handlers/request.rs index 40ca9c3fa9..e19f7a4898 100644 --- a/crates/rust-analyzer/src/handlers/request.rs +++ b/crates/rust-analyzer/src/handlers/request.rs @@ -1640,7 +1640,9 @@ pub(crate) fn handle_call_hierarchy_incoming( from_ranges: call_item .ranges .into_iter() - .map(|it| to_proto::range(&line_index, it)) + // This is the range relative to the item + .filter(|it| it.file_id == file_id) + .map(|it| to_proto::range(&line_index, it.range)) .collect(), }); } @@ -1675,7 +1677,9 @@ pub(crate) fn handle_call_hierarchy_outgoing( from_ranges: call_item .ranges .into_iter() - .map(|it| to_proto::range(&line_index, it)) + // This is the range relative to the caller + .filter(|it| it.file_id == fpos.file_id) + .map(|it| to_proto::range(&line_index, it.range)) .collect(), }); } From c08d419fba1c7ad3c6e80f38d9412942c8516b9a Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sun, 7 Jul 2024 11:00:28 +0200 Subject: [PATCH 81/94] HasGenericArgs syntax trait --- crates/hir-def/src/body/lower.rs | 4 +-- crates/hir-def/src/hir/type_ref.rs | 2 +- crates/hir-def/src/path/lower.rs | 2 +- .../src/handlers/add_turbo_fish.rs | 2 +- .../src/handlers/convert_from_to_tryfrom.rs | 2 +- .../src/handlers/convert_into_to_from.rs | 2 +- .../src/handlers/extract_type_alias.rs | 2 +- .../src/handlers/generate_delegate_trait.rs | 5 ++-- .../generate_documentation_template.rs | 2 +- .../ide-assists/src/handlers/inline_call.rs | 4 ++- .../ide-assists/src/handlers/qualify_path.rs | 1 + .../replace_qualified_name_with_use.rs | 2 +- .../replace_turbofish_with_explicit_type.rs | 3 +-- .../src/handlers/toggle_async_sugar.rs | 2 +- .../src/handlers/unwrap_result_return_type.rs | 2 +- crates/ide-completion/src/context/analysis.rs | 5 +++- crates/ide-db/src/path_transform.rs | 2 +- crates/ide-ssr/src/matching.rs | 2 +- crates/ide-ssr/src/resolving.rs | 5 +++- crates/ide/src/inlay_hints/bind_pat.rs | 2 +- crates/syntax/src/ast.rs | 7 +++-- crates/syntax/src/ast/edit_in_place.rs | 2 +- crates/syntax/src/ast/generated/nodes.rs | 27 ++++++++++++++++--- crates/syntax/src/ast/node_ext.rs | 5 +++- crates/syntax/src/ast/traits.rs | 5 ++++ xtask/src/codegen/grammar.rs | 25 ++++++++--------- 26 files changed, 83 insertions(+), 41 deletions(-) diff --git a/crates/hir-def/src/body/lower.rs b/crates/hir-def/src/body/lower.rs index faba9050fc..be7068c807 100644 --- a/crates/hir-def/src/body/lower.rs +++ b/crates/hir-def/src/body/lower.rs @@ -15,8 +15,8 @@ use span::AstIdMap; use stdx::never; use syntax::{ ast::{ - self, ArrayExprKind, AstChildren, BlockExpr, HasArgList, HasAttrs, HasLoopBody, HasName, - RangeItem, SlicePatComponents, + self, ArrayExprKind, AstChildren, BlockExpr, HasArgList, HasAttrs, HasGenericArgs, + HasLoopBody, HasName, RangeItem, SlicePatComponents, }, AstNode, AstPtr, AstToken as _, SyntaxNodePtr, }; diff --git a/crates/hir-def/src/hir/type_ref.rs b/crates/hir-def/src/hir/type_ref.rs index 741ae41c74..7272ed98ce 100644 --- a/crates/hir-def/src/hir/type_ref.rs +++ b/crates/hir-def/src/hir/type_ref.rs @@ -10,7 +10,7 @@ use hir_expand::{ AstId, }; use intern::Interned; -use syntax::ast::{self, HasName, IsString}; +use syntax::ast::{self, HasGenericArgs, HasName, IsString}; use crate::{ builtin_type::{BuiltinInt, BuiltinType, BuiltinUint}, diff --git a/crates/hir-def/src/path/lower.rs b/crates/hir-def/src/path/lower.rs index a710c2daca..cee9e05545 100644 --- a/crates/hir-def/src/path/lower.rs +++ b/crates/hir-def/src/path/lower.rs @@ -9,7 +9,7 @@ use hir_expand::{ name::{name, AsName}, }; use intern::Interned; -use syntax::ast::{self, AstNode, HasTypeBounds}; +use syntax::ast::{self, AstNode, HasGenericArgs, HasTypeBounds}; use crate::{ path::{AssociatedTypeBinding, GenericArg, GenericArgs, ModPath, Path, PathKind}, diff --git a/crates/ide-assists/src/handlers/add_turbo_fish.rs b/crates/ide-assists/src/handlers/add_turbo_fish.rs index 363aa142b2..327709b28a 100644 --- a/crates/ide-assists/src/handlers/add_turbo_fish.rs +++ b/crates/ide-assists/src/handlers/add_turbo_fish.rs @@ -1,7 +1,7 @@ use either::Either; use ide_db::defs::{Definition, NameRefClass}; use syntax::{ - ast::{self, make, HasArgList}, + ast::{self, make, HasArgList, HasGenericArgs}, ted, AstNode, }; diff --git a/crates/ide-assists/src/handlers/convert_from_to_tryfrom.rs b/crates/ide-assists/src/handlers/convert_from_to_tryfrom.rs index 5459bd334c..67c72a93da 100644 --- a/crates/ide-assists/src/handlers/convert_from_to_tryfrom.rs +++ b/crates/ide-assists/src/handlers/convert_from_to_tryfrom.rs @@ -1,7 +1,7 @@ use ide_db::{famous_defs::FamousDefs, traits::resolve_target_trait}; use itertools::Itertools; use syntax::{ - ast::{self, make, AstNode, HasName}, + ast::{self, make, AstNode, HasGenericArgs, HasName}, ted, }; diff --git a/crates/ide-assists/src/handlers/convert_into_to_from.rs b/crates/ide-assists/src/handlers/convert_into_to_from.rs index be433c3333..7a3055b90a 100644 --- a/crates/ide-assists/src/handlers/convert_into_to_from.rs +++ b/crates/ide-assists/src/handlers/convert_into_to_from.rs @@ -1,6 +1,6 @@ use hir::ImportPathConfig; use ide_db::{famous_defs::FamousDefs, helpers::mod_path_to_ast, traits::resolve_target_trait}; -use syntax::ast::{self, AstNode, HasName}; +use syntax::ast::{self, AstNode, HasGenericArgs, HasName}; use crate::{AssistContext, AssistId, AssistKind, Assists}; diff --git a/crates/ide-assists/src/handlers/extract_type_alias.rs b/crates/ide-assists/src/handlers/extract_type_alias.rs index 3612eda784..dcf16e89b2 100644 --- a/crates/ide-assists/src/handlers/extract_type_alias.rs +++ b/crates/ide-assists/src/handlers/extract_type_alias.rs @@ -1,7 +1,7 @@ use either::Either; use ide_db::syntax_helpers::node_ext::walk_ty; use syntax::{ - ast::{self, edit::IndentLevel, make, AstNode, HasGenericParams, HasName}, + ast::{self, edit::IndentLevel, make, AstNode, HasGenericArgs, HasGenericParams, HasName}, ted, }; diff --git a/crates/ide-assists/src/handlers/generate_delegate_trait.rs b/crates/ide-assists/src/handlers/generate_delegate_trait.rs index 19521b8a4b..78def51a4a 100644 --- a/crates/ide-assists/src/handlers/generate_delegate_trait.rs +++ b/crates/ide-assists/src/handlers/generate_delegate_trait.rs @@ -17,8 +17,9 @@ use syntax::{ self, edit::{self, AstNodeEdit}, edit_in_place::AttrsOwnerEdit, - make, AssocItem, GenericArgList, GenericParamList, HasAttrs, HasGenericParams, HasName, - HasTypeBounds, HasVisibility as astHasVisibility, Path, WherePred, + make, AssocItem, GenericArgList, GenericParamList, HasAttrs, HasGenericArgs, + HasGenericParams, HasName, HasTypeBounds, HasVisibility as astHasVisibility, Path, + WherePred, }, ted::{self, Position}, AstNode, NodeOrToken, SmolStr, SyntaxKind, diff --git a/crates/ide-assists/src/handlers/generate_documentation_template.rs b/crates/ide-assists/src/handlers/generate_documentation_template.rs index 38b24fd19c..51dd488454 100644 --- a/crates/ide-assists/src/handlers/generate_documentation_template.rs +++ b/crates/ide-assists/src/handlers/generate_documentation_template.rs @@ -4,7 +4,7 @@ use itertools::Itertools; use stdx::{format_to, to_lower_snake_case}; use syntax::{ algo::skip_whitespace_token, - ast::{self, edit::IndentLevel, HasDocComments, HasName}, + ast::{self, edit::IndentLevel, HasDocComments, HasGenericArgs, HasName}, match_ast, AstNode, AstToken, }; diff --git a/crates/ide-assists/src/handlers/inline_call.rs b/crates/ide-assists/src/handlers/inline_call.rs index 88fa6dc745..8c9fe23bb0 100644 --- a/crates/ide-assists/src/handlers/inline_call.rs +++ b/crates/ide-assists/src/handlers/inline_call.rs @@ -15,7 +15,9 @@ use ide_db::{ }; use itertools::{izip, Itertools}; use syntax::{ - ast::{self, edit::IndentLevel, edit_in_place::Indent, HasArgList, Pat, PathExpr}, + ast::{ + self, edit::IndentLevel, edit_in_place::Indent, HasArgList, HasGenericArgs, Pat, PathExpr, + }, ted, AstNode, NodeOrToken, SyntaxKind, }; diff --git a/crates/ide-assists/src/handlers/qualify_path.rs b/crates/ide-assists/src/handlers/qualify_path.rs index 978b719c30..f64a68f384 100644 --- a/crates/ide-assists/src/handlers/qualify_path.rs +++ b/crates/ide-assists/src/handlers/qualify_path.rs @@ -6,6 +6,7 @@ use ide_db::{ helpers::mod_path_to_ast, imports::import_assets::{ImportCandidate, LocatedImport}, }; +use syntax::ast::HasGenericArgs; use syntax::{ ast, ast::{make, HasArgList}, diff --git a/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs b/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs index 188165e776..a3ffdc9c53 100644 --- a/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs +++ b/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs @@ -4,7 +4,7 @@ use ide_db::{ imports::insert_use::{insert_use, ImportScope}, }; use syntax::{ - ast::{self, make}, + ast::{self, make, HasGenericArgs}, match_ast, ted, AstNode, SyntaxNode, }; diff --git a/crates/ide-assists/src/handlers/replace_turbofish_with_explicit_type.rs b/crates/ide-assists/src/handlers/replace_turbofish_with_explicit_type.rs index 1794c88743..3a6391cd38 100644 --- a/crates/ide-assists/src/handlers/replace_turbofish_with_explicit_type.rs +++ b/crates/ide-assists/src/handlers/replace_turbofish_with_explicit_type.rs @@ -1,7 +1,6 @@ use hir::HirDisplay; use syntax::{ - ast::{Expr, GenericArg, GenericArgList}, - ast::{LetStmt, Type::InferType}, + ast::{Expr, GenericArg, GenericArgList, HasGenericArgs, LetStmt, Type::InferType}, AstNode, TextRange, }; diff --git a/crates/ide-assists/src/handlers/toggle_async_sugar.rs b/crates/ide-assists/src/handlers/toggle_async_sugar.rs index 30e09648ea..f8adb2ea08 100644 --- a/crates/ide-assists/src/handlers/toggle_async_sugar.rs +++ b/crates/ide-assists/src/handlers/toggle_async_sugar.rs @@ -4,7 +4,7 @@ use ide_db::{ famous_defs::FamousDefs, }; use syntax::{ - ast::{self, HasVisibility}, + ast::{self, HasGenericArgs, HasVisibility}, AstNode, NodeOrToken, SyntaxKind, SyntaxNode, SyntaxToken, TextRange, }; diff --git a/crates/ide-assists/src/handlers/unwrap_result_return_type.rs b/crates/ide-assists/src/handlers/unwrap_result_return_type.rs index 8a9e669630..a1987247cb 100644 --- a/crates/ide-assists/src/handlers/unwrap_result_return_type.rs +++ b/crates/ide-assists/src/handlers/unwrap_result_return_type.rs @@ -4,7 +4,7 @@ use ide_db::{ }; use itertools::Itertools; use syntax::{ - ast::{self, Expr}, + ast::{self, Expr, HasGenericArgs}, match_ast, AstNode, NodeOrToken, SyntaxKind, TextRange, }; diff --git a/crates/ide-completion/src/context/analysis.rs b/crates/ide-completion/src/context/analysis.rs index 80dcfd2f52..a14fe24fa7 100644 --- a/crates/ide-completion/src/context/analysis.rs +++ b/crates/ide-completion/src/context/analysis.rs @@ -6,7 +6,10 @@ use ide_db::{active_parameter::ActiveParameter, RootDatabase}; use itertools::Either; use syntax::{ algo::{ancestors_at_offset, find_node_at_offset, non_trivia_sibling}, - ast::{self, AttrKind, HasArgList, HasGenericParams, HasLoopBody, HasName, NameOrNameRef}, + ast::{ + self, AttrKind, HasArgList, HasGenericArgs, HasGenericParams, HasLoopBody, HasName, + NameOrNameRef, + }, match_ast, AstNode, AstToken, Direction, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode, SyntaxToken, TextRange, TextSize, T, }; diff --git a/crates/ide-db/src/path_transform.rs b/crates/ide-db/src/path_transform.rs index 7c11dd3e2a..a36f30a645 100644 --- a/crates/ide-db/src/path_transform.rs +++ b/crates/ide-db/src/path_transform.rs @@ -6,7 +6,7 @@ use hir::{AsAssocItem, HirDisplay, ImportPathConfig, ModuleDef, SemanticsScope}; use itertools::Itertools; use rustc_hash::FxHashMap; use syntax::{ - ast::{self, make, AstNode}, + ast::{self, make, AstNode, HasGenericArgs}, ted, SyntaxNode, }; diff --git a/crates/ide-ssr/src/matching.rs b/crates/ide-ssr/src/matching.rs index b29053c0c2..9fa2ee84fd 100644 --- a/crates/ide-ssr/src/matching.rs +++ b/crates/ide-ssr/src/matching.rs @@ -10,7 +10,7 @@ use hir::{ImportPathConfig, Semantics}; use ide_db::{base_db::FileRange, FxHashMap}; use std::{cell::Cell, iter::Peekable}; use syntax::{ - ast::{self, AstNode, AstToken}, + ast::{self, AstNode, AstToken, HasGenericArgs}, SmolStr, SyntaxElement, SyntaxElementChildren, SyntaxKind, SyntaxNode, SyntaxToken, }; diff --git a/crates/ide-ssr/src/resolving.rs b/crates/ide-ssr/src/resolving.rs index 4731f14f4e..d3c1af1f31 100644 --- a/crates/ide-ssr/src/resolving.rs +++ b/crates/ide-ssr/src/resolving.rs @@ -3,7 +3,10 @@ use hir::AsAssocItem; use ide_db::{base_db::FilePosition, FxHashMap}; use parsing::Placeholder; -use syntax::{ast, SmolStr, SyntaxKind, SyntaxNode, SyntaxToken}; +use syntax::{ + ast::{self, HasGenericArgs}, + SmolStr, SyntaxKind, SyntaxNode, SyntaxToken, +}; use crate::{errors::error, parsing, SsrError}; diff --git a/crates/ide/src/inlay_hints/bind_pat.rs b/crates/ide/src/inlay_hints/bind_pat.rs index 3311bb48ad..1118f11d99 100644 --- a/crates/ide/src/inlay_hints/bind_pat.rs +++ b/crates/ide/src/inlay_hints/bind_pat.rs @@ -8,7 +8,7 @@ use ide_db::{base_db::FileId, famous_defs::FamousDefs, RootDatabase}; use itertools::Itertools; use syntax::{ - ast::{self, AstNode, HasName}, + ast::{self, AstNode, HasGenericArgs, HasName}, match_ast, }; diff --git a/crates/syntax/src/ast.rs b/crates/syntax/src/ast.rs index 168ca9f132..3282bd6eff 100644 --- a/crates/syntax/src/ast.rs +++ b/crates/syntax/src/ast.rs @@ -31,8 +31,8 @@ pub use self::{ operators::{ArithOp, BinaryOp, CmpOp, LogicOp, Ordering, RangeOp, UnaryOp}, token_ext::{CommentKind, CommentPlacement, CommentShape, IsString, QuoteOffsets, Radix}, traits::{ - AttrDocCommentIter, DocCommentIter, HasArgList, HasAttrs, HasDocComments, HasGenericParams, - HasLoopBody, HasModuleItem, HasName, HasTypeBounds, HasVisibility, + AttrDocCommentIter, DocCommentIter, HasArgList, HasAttrs, HasDocComments, HasGenericArgs, + HasGenericParams, HasLoopBody, HasModuleItem, HasName, HasTypeBounds, HasVisibility, }, }; @@ -149,14 +149,17 @@ pub trait RangeItem { mod support { use super::{AstChildren, AstNode, SyntaxKind, SyntaxNode, SyntaxToken}; + #[inline] pub(super) fn child(parent: &SyntaxNode) -> Option { parent.children().find_map(N::cast) } + #[inline] pub(super) fn children(parent: &SyntaxNode) -> AstChildren { AstChildren::new(parent) } + #[inline] pub(super) fn token(parent: &SyntaxNode, kind: SyntaxKind) -> Option { parent.children_with_tokens().filter_map(|it| it.into_token()).find(|it| it.kind() == kind) } diff --git a/crates/syntax/src/ast/edit_in_place.rs b/crates/syntax/src/ast/edit_in_place.rs index 2445e4f1a3..f1286e7aa2 100644 --- a/crates/syntax/src/ast/edit_in_place.rs +++ b/crates/syntax/src/ast/edit_in_place.rs @@ -6,7 +6,7 @@ use parser::{SyntaxKind, T}; use crate::{ algo::{self, neighbor}, - ast::{self, edit::IndentLevel, make, HasGenericParams}, + ast::{self, edit::IndentLevel, make, HasGenericArgs, HasGenericParams}, ted::{self, Position}, AstNode, AstToken, Direction, SyntaxElement, SyntaxKind::{ATTR, COMMENT, WHITESPACE}, diff --git a/crates/syntax/src/ast/generated/nodes.rs b/crates/syntax/src/ast/generated/nodes.rs index ce189acd68..0d01e0e73c 100644 --- a/crates/syntax/src/ast/generated/nodes.rs +++ b/crates/syntax/src/ast/generated/nodes.rs @@ -78,10 +78,10 @@ impl AssocItemList { pub struct AssocTypeArg { pub(crate) syntax: SyntaxNode, } +impl ast::HasGenericArgs for AssocTypeArg {} impl ast::HasTypeBounds for AssocTypeArg {} impl AssocTypeArg { pub fn const_arg(&self) -> Option { support::child(&self.syntax) } - pub fn generic_arg_list(&self) -> Option { support::child(&self.syntax) } pub fn name_ref(&self) -> Option { support::child(&self.syntax) } pub fn param_list(&self) -> Option { support::child(&self.syntax) } pub fn ret_type(&self) -> Option { support::child(&self.syntax) } @@ -783,8 +783,8 @@ pub struct MethodCallExpr { } impl ast::HasArgList for MethodCallExpr {} impl ast::HasAttrs for MethodCallExpr {} +impl ast::HasGenericArgs for MethodCallExpr {} impl MethodCallExpr { - pub fn generic_arg_list(&self) -> Option { support::child(&self.syntax) } pub fn name_ref(&self) -> Option { support::child(&self.syntax) } pub fn receiver(&self) -> Option { support::child(&self.syntax) } pub fn dot_token(&self) -> Option { support::token(&self.syntax, T![.]) } @@ -946,8 +946,8 @@ impl PathPat { pub struct PathSegment { pub(crate) syntax: SyntaxNode, } +impl ast::HasGenericArgs for PathSegment {} impl PathSegment { - pub fn generic_arg_list(&self) -> Option { support::child(&self.syntax) } pub fn name_ref(&self) -> Option { support::child(&self.syntax) } pub fn param_list(&self) -> Option { support::child(&self.syntax) } pub fn path_type(&self) -> Option { support::child(&self.syntax) } @@ -1763,6 +1763,12 @@ pub struct AnyHasDocComments { } impl ast::HasDocComments for AnyHasDocComments {} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct AnyHasGenericArgs { + pub(crate) syntax: SyntaxNode, +} +impl ast::HasGenericArgs for AnyHasGenericArgs {} + #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct AnyHasGenericParams { pub(crate) syntax: SyntaxNode, @@ -4235,6 +4241,21 @@ impl AstNode for AnyHasDocComments { } fn syntax(&self) -> &SyntaxNode { &self.syntax } } +impl AnyHasGenericArgs { + #[inline] + pub fn new(node: T) -> AnyHasGenericArgs { + AnyHasGenericArgs { syntax: node.syntax().clone() } + } +} +impl AstNode for AnyHasGenericArgs { + fn can_cast(kind: SyntaxKind) -> bool { + matches!(kind, ASSOC_TYPE_ARG | METHOD_CALL_EXPR | PATH_SEGMENT) + } + fn cast(syntax: SyntaxNode) -> Option { + Self::can_cast(syntax.kind()).then_some(AnyHasGenericArgs { syntax }) + } + fn syntax(&self) -> &SyntaxNode { &self.syntax } +} impl AnyHasGenericParams { #[inline] pub fn new(node: T) -> AnyHasGenericParams { diff --git a/crates/syntax/src/ast/node_ext.rs b/crates/syntax/src/ast/node_ext.rs index b0fbe7101c..911e3d823d 100644 --- a/crates/syntax/src/ast/node_ext.rs +++ b/crates/syntax/src/ast/node_ext.rs @@ -10,7 +10,10 @@ use parser::SyntaxKind; use rowan::{GreenNodeData, GreenTokenData}; use crate::{ - ast::{self, support, AstNode, AstToken, HasAttrs, HasGenericParams, HasName, SyntaxNode}, + ast::{ + self, support, AstNode, AstToken, HasAttrs, HasGenericArgs, HasGenericParams, HasName, + SyntaxNode, + }, ted, NodeOrToken, SmolStr, SyntaxElement, SyntaxToken, TokenText, T, }; diff --git a/crates/syntax/src/ast/traits.rs b/crates/syntax/src/ast/traits.rs index 16f7356b1e..152b0cb98c 100644 --- a/crates/syntax/src/ast/traits.rs +++ b/crates/syntax/src/ast/traits.rs @@ -52,6 +52,11 @@ pub trait HasGenericParams: AstNode { support::child(self.syntax()) } } +pub trait HasGenericArgs: AstNode { + fn generic_arg_list(&self) -> Option { + support::child(self.syntax()) + } +} pub trait HasTypeBounds: AstNode { fn type_bound_list(&self) -> Option { diff --git a/xtask/src/codegen/grammar.rs b/xtask/src/codegen/grammar.rs index 2d492f79f1..201c4cc45d 100644 --- a/xtask/src/codegen/grammar.rs +++ b/xtask/src/codegen/grammar.rs @@ -797,20 +797,21 @@ fn extract_enums(ast: &mut AstSrc) { } } -fn extract_struct_traits(ast: &mut AstSrc) { - let traits: &[(&str, &[&str])] = &[ - ("HasAttrs", &["attrs"]), - ("HasName", &["name"]), - ("HasVisibility", &["visibility"]), - ("HasGenericParams", &["generic_param_list", "where_clause"]), - ("HasTypeBounds", &["type_bound_list", "colon_token"]), - ("HasModuleItem", &["items"]), - ("HasLoopBody", &["label", "loop_body"]), - ("HasArgList", &["arg_list"]), - ]; +const TRAITS: &[(&str, &[&str])] = &[ + ("HasAttrs", &["attrs"]), + ("HasName", &["name"]), + ("HasVisibility", &["visibility"]), + ("HasGenericParams", &["generic_param_list", "where_clause"]), + ("HasGenericArgs", &["generic_arg_list"]), + ("HasTypeBounds", &["type_bound_list", "colon_token"]), + ("HasModuleItem", &["items"]), + ("HasLoopBody", &["label", "loop_body"]), + ("HasArgList", &["arg_list"]), +]; +fn extract_struct_traits(ast: &mut AstSrc) { for node in &mut ast.nodes { - for (name, methods) in traits { + for (name, methods) in TRAITS { extract_struct_trait(node, name, methods); } } From 35aa238020498c7e72c3372761411efb09471a4a Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sun, 7 Jul 2024 11:18:40 +0200 Subject: [PATCH 82/94] Inline all the things --- crates/syntax/src/ast/generated/nodes.rs | 1019 ++++++++++++++++++++++ xtask/src/codegen/grammar.rs | 13 + 2 files changed, 1032 insertions(+) diff --git a/crates/syntax/src/ast/generated/nodes.rs b/crates/syntax/src/ast/generated/nodes.rs index 0d01e0e73c..0373e7c552 100644 --- a/crates/syntax/src/ast/generated/nodes.rs +++ b/crates/syntax/src/ast/generated/nodes.rs @@ -12,6 +12,7 @@ pub struct Abi { pub(crate) syntax: SyntaxNode, } impl Abi { + #[inline] pub fn extern_token(&self) -> Option { support::token(&self.syntax, T![extern]) } } @@ -20,7 +21,9 @@ pub struct ArgList { pub(crate) syntax: SyntaxNode, } impl ArgList { + #[inline] pub fn l_paren_token(&self) -> Option { support::token(&self.syntax, T!['(']) } + #[inline] pub fn r_paren_token(&self) -> Option { support::token(&self.syntax, T![')']) } } @@ -30,10 +33,15 @@ pub struct ArrayExpr { } impl ast::HasAttrs for ArrayExpr {} impl ArrayExpr { + #[inline] pub fn expr(&self) -> Option { support::child(&self.syntax) } + #[inline] pub fn exprs(&self) -> AstChildren { support::children(&self.syntax) } + #[inline] pub fn l_brack_token(&self) -> Option { support::token(&self.syntax, T!['[']) } + #[inline] pub fn r_brack_token(&self) -> Option { support::token(&self.syntax, T![']']) } + #[inline] pub fn semicolon_token(&self) -> Option { support::token(&self.syntax, T![;]) } } @@ -42,10 +50,15 @@ pub struct ArrayType { pub(crate) syntax: SyntaxNode, } impl ArrayType { + #[inline] pub fn const_arg(&self) -> Option { support::child(&self.syntax) } + #[inline] pub fn ty(&self) -> Option { support::child(&self.syntax) } + #[inline] pub fn l_brack_token(&self) -> Option { support::token(&self.syntax, T!['[']) } + #[inline] pub fn r_brack_token(&self) -> Option { support::token(&self.syntax, T![']']) } + #[inline] pub fn semicolon_token(&self) -> Option { support::token(&self.syntax, T![;]) } } @@ -55,11 +68,17 @@ pub struct AsmExpr { } impl ast::HasAttrs for AsmExpr {} impl AsmExpr { + #[inline] pub fn expr(&self) -> Option { support::child(&self.syntax) } + #[inline] pub fn pound_token(&self) -> Option { support::token(&self.syntax, T![#]) } + #[inline] pub fn l_paren_token(&self) -> Option { support::token(&self.syntax, T!['(']) } + #[inline] pub fn r_paren_token(&self) -> Option { support::token(&self.syntax, T![')']) } + #[inline] pub fn asm_token(&self) -> Option { support::token(&self.syntax, T![asm]) } + #[inline] pub fn builtin_token(&self) -> Option { support::token(&self.syntax, T![builtin]) } } @@ -69,8 +88,11 @@ pub struct AssocItemList { } impl ast::HasAttrs for AssocItemList {} impl AssocItemList { + #[inline] pub fn assoc_items(&self) -> AstChildren { support::children(&self.syntax) } + #[inline] pub fn l_curly_token(&self) -> Option { support::token(&self.syntax, T!['{']) } + #[inline] pub fn r_curly_token(&self) -> Option { support::token(&self.syntax, T!['}']) } } @@ -81,11 +103,17 @@ pub struct AssocTypeArg { impl ast::HasGenericArgs for AssocTypeArg {} impl ast::HasTypeBounds for AssocTypeArg {} impl AssocTypeArg { + #[inline] pub fn const_arg(&self) -> Option { support::child(&self.syntax) } + #[inline] pub fn name_ref(&self) -> Option { support::child(&self.syntax) } + #[inline] pub fn param_list(&self) -> Option { support::child(&self.syntax) } + #[inline] pub fn ret_type(&self) -> Option { support::child(&self.syntax) } + #[inline] pub fn ty(&self) -> Option { support::child(&self.syntax) } + #[inline] pub fn eq_token(&self) -> Option { support::token(&self.syntax, T![=]) } } @@ -94,10 +122,15 @@ pub struct Attr { pub(crate) syntax: SyntaxNode, } impl Attr { + #[inline] pub fn meta(&self) -> Option { support::child(&self.syntax) } + #[inline] pub fn excl_token(&self) -> Option { support::token(&self.syntax, T![!]) } + #[inline] pub fn pound_token(&self) -> Option { support::token(&self.syntax, T![#]) } + #[inline] pub fn l_brack_token(&self) -> Option { support::token(&self.syntax, T!['[']) } + #[inline] pub fn r_brack_token(&self) -> Option { support::token(&self.syntax, T![']']) } } @@ -107,8 +140,11 @@ pub struct AwaitExpr { } impl ast::HasAttrs for AwaitExpr {} impl AwaitExpr { + #[inline] pub fn expr(&self) -> Option { support::child(&self.syntax) } + #[inline] pub fn dot_token(&self) -> Option { support::token(&self.syntax, T![.]) } + #[inline] pub fn await_token(&self) -> Option { support::token(&self.syntax, T![await]) } } @@ -118,7 +154,9 @@ pub struct BecomeExpr { } impl ast::HasAttrs for BecomeExpr {} impl BecomeExpr { + #[inline] pub fn expr(&self) -> Option { support::child(&self.syntax) } + #[inline] pub fn become_token(&self) -> Option { support::token(&self.syntax, T![become]) } } @@ -135,11 +173,17 @@ pub struct BlockExpr { } impl ast::HasAttrs for BlockExpr {} impl BlockExpr { + #[inline] pub fn label(&self) -> Option