diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index e11d6e15d1..39ac652de0 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -16,7 +16,7 @@ env: RUSTFLAGS: "-D warnings -W unreachable-pub" RUSTUP_MAX_RETRIES: 10 FETCH_DEPTH: 0 # pull in the tags for the version string - MACOSX_DEPLOYMENT_TARGET: 10.15 + MACOSX_DEPLOYMENT_TARGET: 13.0 CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER: aarch64-linux-gnu-gcc CARGO_TARGET_ARM_UNKNOWN_LINUX_GNUEABIHF_LINKER: arm-linux-gnueabihf-gcc @@ -43,10 +43,10 @@ jobs: - os: ubuntu-20.04 target: arm-unknown-linux-gnueabihf code-target: linux-armhf - - os: macos-12 + - os: macos-13 target: x86_64-apple-darwin code-target: darwin-x64 - - os: macos-12 + - os: macos-13 target: aarch64-apple-darwin code-target: darwin-arm64 diff --git a/Cargo.lock b/Cargo.lock index dc820fcb28..7891edc244 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -145,9 +145,12 @@ dependencies = [ [[package]] name = "cc" -version = "1.1.10" +version = "1.1.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e9e8aabfac534be767c909e0690571677d49f41bd8465ae876fe043d52ba5292" +checksum = "9540e661f81799159abee814118cc139a2004b3a3aa3ea37724a1b66530b90e0" +dependencies = [ + "shlex", +] [[package]] name = "cfg" @@ -1852,6 +1855,12 @@ dependencies = [ "lazy_static", ] +[[package]] +name = "shlex" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" + [[package]] name = "smallvec" version = "1.13.2" diff --git a/Cargo.toml b/Cargo.toml index b4587a3796..0b3d6e2a1e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -4,7 +4,7 @@ exclude = ["crates/proc-macro-srv/proc-macro-test/imp"] resolver = "2" [workspace.package] -rust-version = "1.80" +rust-version = "1.81" edition = "2021" license = "MIT OR Apache-2.0" authors = ["rust-analyzer team"] diff --git a/crates/cfg/src/lib.rs b/crates/cfg/src/lib.rs index e9daaf7de3..c2d4008605 100644 --- a/crates/cfg/src/lib.rs +++ b/crates/cfg/src/lib.rs @@ -49,6 +49,10 @@ impl CfgOptions { cfg.fold(&|atom| self.enabled.contains(atom)) } + pub fn check_atom(&self, cfg: &CfgAtom) -> bool { + self.enabled.contains(cfg) + } + pub fn insert_atom(&mut self, key: Symbol) { self.enabled.insert(CfgAtom::Flag(key)); } diff --git a/crates/hir-def/src/macro_expansion_tests/mbe.rs b/crates/hir-def/src/macro_expansion_tests/mbe.rs index 85fb90fdfb..d568f6faa7 100644 --- a/crates/hir-def/src/macro_expansion_tests/mbe.rs +++ b/crates/hir-def/src/macro_expansion_tests/mbe.rs @@ -36,7 +36,7 @@ macro_rules! f { } struct#0:1@58..64#1# MyTraitMap2#0:2@31..42#0# {#0:1@72..73#1# - map#0:1@86..89#1#:#0:1@89..90#1# #0:1@89..90#1#::#0:1@91..92#1#std#0:1@93..96#1#::#0:1@96..97#1#collections#0:1@98..109#1#::#0:1@109..110#1#HashSet#0:1@111..118#1#<#0:1@118..119#1#(#0:1@119..120#1#)#0:1@120..121#1#>#0:1@121..122#1#,#0:1@122..123#1# + map#0:1@86..89#1#:#0:1@89..90#1# #0:1@89..90#1#::#0:1@91..93#1#std#0:1@93..96#1#::#0:1@96..98#1#collections#0:1@98..109#1#::#0:1@109..111#1#HashSet#0:1@111..118#1#<#0:1@118..119#1#(#0:1@119..120#1#)#0:1@120..121#1#>#0:1@121..122#1#,#0:1@122..123#1# }#0:1@132..133#1# "#]], ); diff --git a/crates/hir-def/src/nameres/collector.rs b/crates/hir-def/src/nameres/collector.rs index e09ef4f205..7f1d19719d 100644 --- a/crates/hir-def/src/nameres/collector.rs +++ b/crates/hir-def/src/nameres/collector.rs @@ -6,7 +6,7 @@ use std::{cmp::Ordering, iter, mem, ops::Not}; use base_db::{CrateId, CrateOrigin, Dependency, LangCrateOrigin}; -use cfg::{CfgExpr, CfgOptions}; +use cfg::{CfgAtom, CfgExpr, CfgOptions}; use either::Either; use hir_expand::{ attrs::{Attr, AttrId}, @@ -1324,13 +1324,21 @@ impl DefCollector<'_> { }; // Skip #[test]/#[bench] expansion, which would merely result in more memory usage - // due to duplicating functions into macro expansions + // due to duplicating functions into macro expansions, but only if `cfg(test)` is active, + // otherwise they are expanded to nothing and this can impact e.g. diagnostics (due to things + // being cfg'ed out). + // Ideally we will just expand them to nothing here. But we are only collecting macro calls, + // not expanding them, so we have no way to do that. if matches!( def.kind, MacroDefKind::BuiltInAttr(_, expander) if expander.is_test() || expander.is_bench() ) { - return recollect_without(self); + let test_is_active = + self.cfg_options.check_atom(&CfgAtom::Flag(sym::test.clone())); + if test_is_active { + return recollect_without(self); + } } let call_id = || { diff --git a/crates/hir-expand/src/builtin/attr_macro.rs b/crates/hir-expand/src/builtin/attr_macro.rs index b9afc666f7..2a8691b461 100644 --- a/crates/hir-expand/src/builtin/attr_macro.rs +++ b/crates/hir-expand/src/builtin/attr_macro.rs @@ -4,6 +4,8 @@ use span::{MacroCallId, Span}; use crate::{db::ExpandDatabase, name, tt, ExpandResult, MacroCallKind}; +use super::quote; + macro_rules! register_builtin { ($(($name:ident, $variant:ident) => $expand:ident),* ) => { #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] @@ -52,15 +54,15 @@ impl BuiltinAttrExpander { } register_builtin! { - (bench, Bench) => dummy_attr_expand, + (bench, Bench) => dummy_gate_test_expand, (cfg_accessible, CfgAccessible) => dummy_attr_expand, (cfg_eval, CfgEval) => dummy_attr_expand, (derive, Derive) => derive_expand, // derive const is equivalent to derive for our proposes. (derive_const, DeriveConst) => derive_expand, (global_allocator, GlobalAllocator) => dummy_attr_expand, - (test, Test) => dummy_attr_expand, - (test_case, TestCase) => dummy_attr_expand + (test, Test) => dummy_gate_test_expand, + (test_case, TestCase) => dummy_gate_test_expand } pub fn find_builtin_attr(ident: &name::Name) -> Option { @@ -76,6 +78,19 @@ fn dummy_attr_expand( ExpandResult::ok(tt.clone()) } +fn dummy_gate_test_expand( + _db: &dyn ExpandDatabase, + _id: MacroCallId, + tt: &tt::Subtree, + span: Span, +) -> ExpandResult { + let result = quote::quote! { span=> + #[cfg(test)] + #tt + }; + ExpandResult::ok(result) +} + /// We generate a very specific expansion here, as we do not actually expand the `#[derive]` attribute /// itself in name res, but we do want to expand it to something for the IDE layer, so that the input /// derive attributes can be downmapped, and resolved as proper paths. diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs index 584f9631e3..484a8662eb 100644 --- a/crates/hir-expand/src/db.rs +++ b/crates/hir-expand/src/db.rs @@ -16,7 +16,10 @@ use crate::{ cfg_process, declarative::DeclarativeMacroExpander, fixup::{self, SyntaxFixupUndoInfo}, - hygiene::{span_with_call_site_ctxt, span_with_def_site_ctxt, span_with_mixed_site_ctxt}, + hygiene::{ + span_with_call_site_ctxt, span_with_def_site_ctxt, span_with_mixed_site_ctxt, + SyntaxContextExt as _, + }, proc_macro::ProcMacros, span_map::{RealSpanMap, SpanMap, SpanMapRef}, tt, AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, @@ -300,14 +303,16 @@ pub fn expand_speculative( token_tree_to_syntax_node(&speculative_expansion.value, expand_to, loc.def.edition); let syntax_node = node.syntax_node(); - let token = rev_tmap + let (token, _) = rev_tmap .ranges_with_span(span_map.span_for_range(token_to_map.text_range())) - .filter_map(|range| syntax_node.covering_element(range).into_token()) - .min_by_key(|t| { - // prefer tokens of the same kind and text + .filter_map(|(range, ctx)| syntax_node.covering_element(range).into_token().zip(Some(ctx))) + .min_by_key(|(t, ctx)| { + // prefer tokens of the same kind and text, as well as non opaque marked ones // Note the inversion of the score here, as we want to prefer the first token in case // of all tokens having the same score - (t.kind() != token_to_map.kind()) as u8 + 2 * ((t.text() != token_to_map.text()) as u8) + ctx.is_opaque(db) as u8 + + 2 * (t.kind() != token_to_map.kind()) as u8 + + 4 * ((t.text() != token_to_map.text()) as u8) })?; Some((node.syntax_node(), token)) } diff --git a/crates/hir-expand/src/hygiene.rs b/crates/hir-expand/src/hygiene.rs index cc02332207..5e1448f795 100644 --- a/crates/hir-expand/src/hygiene.rs +++ b/crates/hir-expand/src/hygiene.rs @@ -151,6 +151,7 @@ pub trait SyntaxContextExt { fn remove_mark(&mut self, db: &dyn ExpandDatabase) -> (Option, Transparency); fn outer_mark(self, db: &dyn ExpandDatabase) -> (Option, Transparency); fn marks(self, db: &dyn ExpandDatabase) -> Vec<(MacroCallId, Transparency)>; + fn is_opaque(self, db: &dyn ExpandDatabase) -> bool; } impl SyntaxContextExt for SyntaxContextId { @@ -177,6 +178,9 @@ impl SyntaxContextExt for SyntaxContextId { marks.reverse(); marks } + fn is_opaque(self, db: &dyn ExpandDatabase) -> bool { + !self.is_root() && db.lookup_intern_syntax_context(self).outer_transparency.is_opaque() + } } // FIXME: Make this a SyntaxContextExt method once we have RPIT diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs index 9538097949..56cb5fd375 100644 --- a/crates/hir-expand/src/lib.rs +++ b/crates/hir-expand/src/lib.rs @@ -25,6 +25,7 @@ mod prettify_macro_expansion_; use attrs::collect_attrs; use rustc_hash::FxHashMap; +use stdx::TupleExt; use triomphe::Arc; use std::hash::Hash; @@ -772,14 +773,15 @@ impl ExpansionInfo { /// Maps the passed in file range down into a macro expansion if it is the input to a macro call. /// /// Note this does a linear search through the entire backing vector of the spanmap. + // FIXME: Consider adding a reverse map to ExpansionInfo to get rid of the linear search which + // potentially results in quadratic look ups (notably this might improve semantic highlighting perf) pub fn map_range_down_exact( &self, span: Span, - ) -> Option + '_>> { - let tokens = self - .exp_map - .ranges_with_span_exact(span) - .flat_map(move |range| self.expanded.value.covering_element(range).into_token()); + ) -> Option + '_>> { + let tokens = self.exp_map.ranges_with_span_exact(span).flat_map(move |(range, ctx)| { + self.expanded.value.covering_element(range).into_token().zip(Some(ctx)) + }); Some(InMacroFile::new(self.expanded.file_id, tokens)) } @@ -791,11 +793,10 @@ impl ExpansionInfo { pub fn map_range_down( &self, span: Span, - ) -> Option + '_>> { - let tokens = self - .exp_map - .ranges_with_span(span) - .flat_map(move |range| self.expanded.value.covering_element(range).into_token()); + ) -> Option + '_>> { + let tokens = self.exp_map.ranges_with_span(span).flat_map(move |(range, ctx)| { + self.expanded.value.covering_element(range).into_token().zip(Some(ctx)) + }); Some(InMacroFile::new(self.expanded.file_id, tokens)) } @@ -845,7 +846,8 @@ impl ExpansionInfo { self.arg.file_id, arg_map .ranges_with_span_exact(span) - .filter(|range| range.intersect(arg_range).is_some()) + .filter(|(range, _)| range.intersect(arg_range).is_some()) + .map(TupleExt::head) .collect(), ) } diff --git a/crates/hir-ty/src/chalk_db.rs b/crates/hir-ty/src/chalk_db.rs index e74e3d7898..f7bacbd49b 100644 --- a/crates/hir-ty/src/chalk_db.rs +++ b/crates/hir-ty/src/chalk_db.rs @@ -382,8 +382,9 @@ impl chalk_solve::RustIrDatabase for ChalkContext<'_> { } fn is_object_safe(&self, trait_id: chalk_ir::TraitId) -> bool { + // FIXME: When cargo is updated, change to dyn_compatibility let trait_ = from_chalk_trait_id(trait_id); - crate::object_safety::object_safety(self.db, trait_).is_none() + crate::dyn_compatibility::dyn_compatibility(self.db, trait_).is_none() } fn closure_kind( diff --git a/crates/hir-ty/src/db.rs b/crates/hir-ty/src/db.rs index ce5a821ea2..5620d80adb 100644 --- a/crates/hir-ty/src/db.rs +++ b/crates/hir-ty/src/db.rs @@ -20,11 +20,11 @@ use triomphe::Arc; use crate::{ chalk_db, consteval::ConstEvalError, + dyn_compatibility::DynCompatibilityViolation, layout::{Layout, LayoutError}, lower::{GenericDefaults, GenericPredicates}, method_resolution::{InherentImpls, TraitImpls, TyFingerprint}, mir::{BorrowckResult, MirBody, MirLowerError}, - object_safety::ObjectSafetyViolation, Binders, ClosureId, Const, FnDefId, ImplTraitId, ImplTraits, InferenceResult, Interner, PolyFnSig, Substitution, TraitEnvironment, TraitRef, Ty, TyDefId, ValueTyDefId, }; @@ -108,8 +108,8 @@ pub trait HirDatabase: DefDatabase + Upcast { #[salsa::invoke(crate::layout::target_data_layout_query)] fn target_data_layout(&self, krate: CrateId) -> Result, Arc>; - #[salsa::invoke(crate::object_safety::object_safety_of_trait_query)] - fn object_safety_of_trait(&self, trait_: TraitId) -> Option; + #[salsa::invoke(crate::dyn_compatibility::dyn_compatibility_of_trait_query)] + fn dyn_compatibility_of_trait(&self, trait_: TraitId) -> Option; #[salsa::invoke(crate::lower::ty_query)] #[salsa::cycle(crate::lower::ty_recover)] @@ -280,8 +280,8 @@ pub trait HirDatabase: DefDatabase + Upcast { } #[test] -fn hir_database_is_object_safe() { - fn _assert_object_safe(_: &dyn HirDatabase) {} +fn hir_database_is_dyn_compatible() { + fn _assert_dyn_compatible(_: &dyn HirDatabase) {} } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] diff --git a/crates/hir-ty/src/diagnostics/decl_check.rs b/crates/hir-ty/src/diagnostics/decl_check.rs index 82517e6991..7f6b7e392b 100644 --- a/crates/hir-ty/src/diagnostics/decl_check.rs +++ b/crates/hir-ty/src/diagnostics/decl_check.rs @@ -58,7 +58,7 @@ impl fmt::Display for CaseType { let repr = match self { CaseType::LowerSnakeCase => "snake_case", CaseType::UpperSnakeCase => "UPPER_SNAKE_CASE", - CaseType::UpperCamelCase => "CamelCase", + CaseType::UpperCamelCase => "UpperCamelCase", }; repr.fmt(f) diff --git a/crates/hir-ty/src/diagnostics/decl_check/case_conv.rs b/crates/hir-ty/src/diagnostics/decl_check/case_conv.rs index cbe1af1570..aa0c9e30be 100644 --- a/crates/hir-ty/src/diagnostics/decl_check/case_conv.rs +++ b/crates/hir-ty/src/diagnostics/decl_check/case_conv.rs @@ -111,7 +111,7 @@ mod tests { check(to_lower_snake_case, "lower_snake_case", expect![[""]]); check(to_lower_snake_case, "UPPER_SNAKE_CASE", expect![["upper_snake_case"]]); check(to_lower_snake_case, "Weird_Case", expect![["weird_case"]]); - check(to_lower_snake_case, "CamelCase", expect![["camel_case"]]); + check(to_lower_snake_case, "UpperCamelCase", expect![["upper_camel_case"]]); check(to_lower_snake_case, "lowerCamelCase", expect![["lower_camel_case"]]); check(to_lower_snake_case, "a", expect![[""]]); check(to_lower_snake_case, "abc", expect![[""]]); @@ -121,8 +121,8 @@ mod tests { #[test] fn test_to_camel_case() { - check(to_camel_case, "CamelCase", expect![[""]]); - check(to_camel_case, "CamelCase_", expect![[""]]); + check(to_camel_case, "UpperCamelCase", expect![[""]]); + check(to_camel_case, "UpperCamelCase_", expect![[""]]); check(to_camel_case, "_CamelCase", expect![[""]]); check(to_camel_case, "lowerCamelCase", expect![["LowerCamelCase"]]); check(to_camel_case, "lower_snake_case", expect![["LowerSnakeCase"]]); @@ -143,7 +143,7 @@ mod tests { check(to_upper_snake_case, "UPPER_SNAKE_CASE", expect![[""]]); check(to_upper_snake_case, "lower_snake_case", expect![["LOWER_SNAKE_CASE"]]); check(to_upper_snake_case, "Weird_Case", expect![["WEIRD_CASE"]]); - check(to_upper_snake_case, "CamelCase", expect![["CAMEL_CASE"]]); + check(to_upper_snake_case, "UpperCamelCase", expect![["UPPER_CAMEL_CASE"]]); check(to_upper_snake_case, "lowerCamelCase", expect![["LOWER_CAMEL_CASE"]]); check(to_upper_snake_case, "A", expect![[""]]); check(to_upper_snake_case, "ABC", expect![[""]]); diff --git a/crates/hir-ty/src/object_safety.rs b/crates/hir-ty/src/dyn_compatibility.rs similarity index 92% rename from crates/hir-ty/src/object_safety.rs rename to crates/hir-ty/src/dyn_compatibility.rs index a4c6626855..e0d1758210 100644 --- a/crates/hir-ty/src/object_safety.rs +++ b/crates/hir-ty/src/dyn_compatibility.rs @@ -1,4 +1,4 @@ -//! Compute the object-safety of a trait +//! Compute the dyn-compatibility of a trait use std::ops::ControlFlow; @@ -28,14 +28,14 @@ use crate::{ }; #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub enum ObjectSafetyViolation { +pub enum DynCompatibilityViolation { SizedSelf, SelfReferential, Method(FunctionId, MethodViolationCode), AssocConst(ConstId), GAT(TypeAliasId), // This doesn't exist in rustc, but added for better visualization - HasNonSafeSuperTrait(TraitId), + HasNonCompatibleSuperTrait(TraitId), } #[derive(Debug, Clone, PartialEq, Eq, Hash)] @@ -50,70 +50,73 @@ pub enum MethodViolationCode { UndispatchableReceiver, } -pub fn object_safety(db: &dyn HirDatabase, trait_: TraitId) -> Option { +pub fn dyn_compatibility( + db: &dyn HirDatabase, + trait_: TraitId, +) -> Option { for super_trait in all_super_traits(db.upcast(), trait_).into_iter().skip(1).rev() { - if db.object_safety_of_trait(super_trait).is_some() { - return Some(ObjectSafetyViolation::HasNonSafeSuperTrait(super_trait)); + if db.dyn_compatibility_of_trait(super_trait).is_some() { + return Some(DynCompatibilityViolation::HasNonCompatibleSuperTrait(super_trait)); } } - db.object_safety_of_trait(trait_) + db.dyn_compatibility_of_trait(trait_) } -pub fn object_safety_with_callback( +pub fn dyn_compatibility_with_callback( db: &dyn HirDatabase, trait_: TraitId, cb: &mut F, ) -> ControlFlow<()> where - F: FnMut(ObjectSafetyViolation) -> ControlFlow<()>, + F: FnMut(DynCompatibilityViolation) -> ControlFlow<()>, { for super_trait in all_super_traits(db.upcast(), trait_).into_iter().skip(1).rev() { - if db.object_safety_of_trait(super_trait).is_some() { - cb(ObjectSafetyViolation::HasNonSafeSuperTrait(trait_))?; + if db.dyn_compatibility_of_trait(super_trait).is_some() { + cb(DynCompatibilityViolation::HasNonCompatibleSuperTrait(trait_))?; } } - object_safety_of_trait_with_callback(db, trait_, cb) + dyn_compatibility_of_trait_with_callback(db, trait_, cb) } -pub fn object_safety_of_trait_with_callback( +pub fn dyn_compatibility_of_trait_with_callback( db: &dyn HirDatabase, trait_: TraitId, cb: &mut F, ) -> ControlFlow<()> where - F: FnMut(ObjectSafetyViolation) -> ControlFlow<()>, + F: FnMut(DynCompatibilityViolation) -> ControlFlow<()>, { // Check whether this has a `Sized` bound if generics_require_sized_self(db, trait_.into()) { - cb(ObjectSafetyViolation::SizedSelf)?; + cb(DynCompatibilityViolation::SizedSelf)?; } // Check if there exist bounds that referencing self if predicates_reference_self(db, trait_) { - cb(ObjectSafetyViolation::SelfReferential)?; + cb(DynCompatibilityViolation::SelfReferential)?; } if bounds_reference_self(db, trait_) { - cb(ObjectSafetyViolation::SelfReferential)?; + cb(DynCompatibilityViolation::SelfReferential)?; } // rustc checks for non-lifetime binders here, but we don't support HRTB yet let trait_data = db.trait_data(trait_); for (_, assoc_item) in &trait_data.items { - object_safety_violation_for_assoc_item(db, trait_, *assoc_item, cb)?; + dyn_compatibility_violation_for_assoc_item(db, trait_, *assoc_item, cb)?; } ControlFlow::Continue(()) } -pub fn object_safety_of_trait_query( +pub fn dyn_compatibility_of_trait_query( db: &dyn HirDatabase, trait_: TraitId, -) -> Option { +) -> Option { let mut res = None; - object_safety_of_trait_with_callback(db, trait_, &mut |osv| { + dyn_compatibility_of_trait_with_callback(db, trait_, &mut |osv| { res = Some(osv); ControlFlow::Break(()) }); @@ -321,14 +324,14 @@ fn contains_illegal_self_type_reference>( t.visit_with(visitor.as_dyn(), outer_binder).is_break() } -fn object_safety_violation_for_assoc_item( +fn dyn_compatibility_violation_for_assoc_item( db: &dyn HirDatabase, trait_: TraitId, item: AssocItemId, cb: &mut F, ) -> ControlFlow<()> where - F: FnMut(ObjectSafetyViolation) -> ControlFlow<()>, + F: FnMut(DynCompatibilityViolation) -> ControlFlow<()>, { // Any item that has a `Self : Sized` requisite is otherwise // exempt from the regulations. @@ -337,10 +340,10 @@ where } match item { - AssocItemId::ConstId(it) => cb(ObjectSafetyViolation::AssocConst(it)), + AssocItemId::ConstId(it) => cb(DynCompatibilityViolation::AssocConst(it)), AssocItemId::FunctionId(it) => { virtual_call_violations_for_method(db, trait_, it, &mut |mvc| { - cb(ObjectSafetyViolation::Method(it, mvc)) + cb(DynCompatibilityViolation::Method(it, mvc)) }) } AssocItemId::TypeAliasId(it) => { @@ -350,7 +353,7 @@ where } else { let generic_params = db.generic_params(item.into()); if !generic_params.is_empty() { - cb(ObjectSafetyViolation::GAT(it)) + cb(DynCompatibilityViolation::GAT(it)) } else { ControlFlow::Continue(()) } @@ -469,7 +472,7 @@ fn receiver_is_dispatchable( return false; }; - // `self: Self` can't be dispatched on, but this is already considered object safe. + // `self: Self` can't be dispatched on, but this is already considered dyn compatible // See rustc's comment on https://github.com/rust-lang/rust/blob/3f121b9461cce02a703a0e7e450568849dfaa074/compiler/rustc_trait_selection/src/traits/object_safety.rs#L433-L437 if sig .skip_binders() diff --git a/crates/hir-ty/src/object_safety/tests.rs b/crates/hir-ty/src/dyn_compatibility/tests.rs similarity index 78% rename from crates/hir-ty/src/object_safety/tests.rs rename to crates/hir-ty/src/dyn_compatibility/tests.rs index c2a9117c5b..3f3e68eeb1 100644 --- a/crates/hir-ty/src/object_safety/tests.rs +++ b/crates/hir-ty/src/dyn_compatibility/tests.rs @@ -5,29 +5,29 @@ use rustc_hash::{FxHashMap, FxHashSet}; use syntax::ToSmolStr; use test_fixture::WithFixture; -use crate::{object_safety::object_safety_with_callback, test_db::TestDB}; +use crate::{dyn_compatibility::dyn_compatibility_with_callback, test_db::TestDB}; use super::{ + DynCompatibilityViolation, MethodViolationCode::{self, *}, - ObjectSafetyViolation, }; -use ObjectSafetyViolationKind::*; +use DynCompatibilityViolationKind::*; #[allow(clippy::upper_case_acronyms)] #[derive(Debug, Clone, PartialEq, Eq, Hash)] -enum ObjectSafetyViolationKind { +enum DynCompatibilityViolationKind { SizedSelf, SelfReferential, Method(MethodViolationCode), AssocConst, GAT, - HasNonSafeSuperTrait, + HasNonCompatibleSuperTrait, } -fn check_object_safety<'a>( +fn check_dyn_compatibility<'a>( ra_fixture: &str, - expected: impl IntoIterator)>, + expected: impl IntoIterator)>, ) { let mut expected: FxHashMap<_, _> = expected.into_iter().map(|(id, osvs)| (id, FxHashSet::from_iter(osvs))).collect(); @@ -53,18 +53,20 @@ fn check_object_safety<'a>( continue; }; let mut osvs = FxHashSet::default(); - object_safety_with_callback(&db, trait_id, &mut |osv| { + dyn_compatibility_with_callback(&db, trait_id, &mut |osv| { osvs.insert(match osv { - ObjectSafetyViolation::SizedSelf => SizedSelf, - ObjectSafetyViolation::SelfReferential => SelfReferential, - ObjectSafetyViolation::Method(_, mvc) => Method(mvc), - ObjectSafetyViolation::AssocConst(_) => AssocConst, - ObjectSafetyViolation::GAT(_) => GAT, - ObjectSafetyViolation::HasNonSafeSuperTrait(_) => HasNonSafeSuperTrait, + DynCompatibilityViolation::SizedSelf => SizedSelf, + DynCompatibilityViolation::SelfReferential => SelfReferential, + DynCompatibilityViolation::Method(_, mvc) => Method(mvc), + DynCompatibilityViolation::AssocConst(_) => AssocConst, + DynCompatibilityViolation::GAT(_) => GAT, + DynCompatibilityViolation::HasNonCompatibleSuperTrait(_) => { + HasNonCompatibleSuperTrait + } }); ControlFlow::Continue(()) }); - assert_eq!(osvs, expected, "Object safety violations for `{name}` do not match;"); + assert_eq!(osvs, expected, "Dyn Compatibility violations for `{name}` do not match;"); } let remains: Vec<_> = expected.keys().collect(); @@ -73,7 +75,7 @@ fn check_object_safety<'a>( #[test] fn item_bounds_can_reference_self() { - check_object_safety( + check_dyn_compatibility( r#" //- minicore: eq pub trait Foo { @@ -88,7 +90,7 @@ pub trait Foo { #[test] fn associated_consts() { - check_object_safety( + check_dyn_compatibility( r#" trait Bar { const X: usize; @@ -100,7 +102,7 @@ trait Bar { #[test] fn bounds_reference_self() { - check_object_safety( + check_dyn_compatibility( r#" //- minicore: eq trait X { @@ -113,7 +115,7 @@ trait X { #[test] fn by_value_self() { - check_object_safety( + check_dyn_compatibility( r#" //- minicore: dispatch_from_dyn trait Bar { @@ -135,7 +137,7 @@ trait Quux { #[test] fn generic_methods() { - check_object_safety( + check_dyn_compatibility( r#" //- minicore: dispatch_from_dyn trait Bar { @@ -157,7 +159,7 @@ trait Qax { #[test] fn mentions_self() { - check_object_safety( + check_dyn_compatibility( r#" //- minicore: dispatch_from_dyn trait Bar { @@ -182,7 +184,7 @@ trait Quux { #[test] fn no_static() { - check_object_safety( + check_dyn_compatibility( r#" //- minicore: dispatch_from_dyn trait Foo { @@ -195,7 +197,7 @@ trait Foo { #[test] fn sized_self() { - check_object_safety( + check_dyn_compatibility( r#" //- minicore: dispatch_from_dyn trait Bar: Sized { @@ -205,7 +207,7 @@ trait Bar: Sized { [("Bar", vec![SizedSelf])], ); - check_object_safety( + check_dyn_compatibility( r#" //- minicore: dispatch_from_dyn trait Bar @@ -220,7 +222,7 @@ trait Bar #[test] fn supertrait_gat() { - check_object_safety( + check_dyn_compatibility( r#" //- minicore: dispatch_from_dyn trait GatTrait { @@ -229,13 +231,13 @@ trait GatTrait { trait SuperTrait: GatTrait {} "#, - [("GatTrait", vec![GAT]), ("SuperTrait", vec![HasNonSafeSuperTrait])], + [("GatTrait", vec![GAT]), ("SuperTrait", vec![HasNonCompatibleSuperTrait])], ); } #[test] fn supertrait_mentions_self() { - check_object_safety( + check_dyn_compatibility( r#" //- minicore: dispatch_from_dyn trait Bar { @@ -251,7 +253,7 @@ trait Baz : Bar { #[test] fn rustc_issue_19538() { - check_object_safety( + check_dyn_compatibility( r#" //- minicore: dispatch_from_dyn trait Foo { @@ -260,13 +262,13 @@ trait Foo { trait Bar: Foo {} "#, - [("Foo", vec![Method(Generic)]), ("Bar", vec![HasNonSafeSuperTrait])], + [("Foo", vec![Method(Generic)]), ("Bar", vec![HasNonCompatibleSuperTrait])], ); } #[test] fn rustc_issue_22040() { - check_object_safety( + check_dyn_compatibility( r#" //- minicore: fmt, eq, dispatch_from_dyn use core::fmt::Debug; @@ -281,7 +283,7 @@ trait Expr: Debug + PartialEq { #[test] fn rustc_issue_102762() { - check_object_safety( + check_dyn_compatibility( r#" //- minicore: future, send, sync, dispatch_from_dyn, deref use core::pin::Pin; @@ -313,7 +315,7 @@ pub trait Fetcher: Send + Sync { #[test] fn rustc_issue_102933() { - check_object_safety( + check_dyn_compatibility( r#" //- minicore: future, dispatch_from_dyn, deref use core::future::Future; @@ -351,7 +353,7 @@ pub trait B2: Service + B1 { #[test] fn rustc_issue_106247() { - check_object_safety( + check_dyn_compatibility( r#" //- minicore: sync, dispatch_from_dyn pub trait Trait { @@ -363,8 +365,8 @@ pub trait Trait { } #[test] -fn std_error_is_object_safe() { - check_object_safety( +fn std_error_is_dyn_compatible() { + check_dyn_compatibility( r#" //- minicore: fmt, dispatch_from_dyn trait Erased<'a>: 'a {} @@ -380,14 +382,14 @@ pub trait Error: core::fmt::Debug + core::fmt::Display { } #[test] -fn lifetime_gat_is_object_unsafe() { - check_object_safety( +fn lifetime_gat_is_dyn_incompatible() { + check_dyn_compatibility( r#" //- minicore: dispatch_from_dyn trait Foo { type Bar<'a>; } "#, - [("Foo", vec![ObjectSafetyViolationKind::GAT])], + [("Foo", vec![DynCompatibilityViolationKind::GAT])], ); } diff --git a/crates/hir-ty/src/lib.rs b/crates/hir-ty/src/lib.rs index 5ed41b99ba..ef570a2055 100644 --- a/crates/hir-ty/src/lib.rs +++ b/crates/hir-ty/src/lib.rs @@ -38,11 +38,11 @@ pub mod consteval; pub mod db; pub mod diagnostics; pub mod display; +pub mod dyn_compatibility; pub mod lang_items; pub mod layout; pub mod method_resolution; pub mod mir; -pub mod object_safety; pub mod primitive; pub mod traits; diff --git a/crates/hir-ty/src/mir/borrowck.rs b/crates/hir-ty/src/mir/borrowck.rs index 878d584a4e..9830fa1ca7 100644 --- a/crates/hir-ty/src/mir/borrowck.rs +++ b/crates/hir-ty/src/mir/borrowck.rs @@ -386,82 +386,91 @@ fn ever_initialized_map( fn dfs( db: &dyn HirDatabase, body: &MirBody, - b: BasicBlockId, l: LocalId, + stack: &mut Vec, result: &mut ArenaMap>, ) { - let mut is_ever_initialized = result[b][l]; // It must be filled, as we use it as mark for dfs - let block = &body.basic_blocks[b]; - for statement in &block.statements { - match &statement.kind { - StatementKind::Assign(p, _) => { - if p.projection.lookup(&body.projection_store).is_empty() && p.local == l { + while let Some(b) = stack.pop() { + let mut is_ever_initialized = result[b][l]; // It must be filled, as we use it as mark for dfs + let block = &body.basic_blocks[b]; + for statement in &block.statements { + match &statement.kind { + StatementKind::Assign(p, _) => { + if p.projection.lookup(&body.projection_store).is_empty() && p.local == l { + is_ever_initialized = true; + } + } + StatementKind::StorageDead(p) => { + if *p == l { + is_ever_initialized = false; + } + } + StatementKind::Deinit(_) + | StatementKind::FakeRead(_) + | StatementKind::Nop + | StatementKind::StorageLive(_) => (), + } + } + let Some(terminator) = &block.terminator else { + never!( + "Terminator should be none only in construction.\nThe body:\n{}", + body.pretty_print(db) + ); + return; + }; + let mut process = |target, is_ever_initialized| { + if !result[target].contains_idx(l) || !result[target][l] && is_ever_initialized { + result[target].insert(l, is_ever_initialized); + stack.push(target); + } + }; + match &terminator.kind { + TerminatorKind::Goto { target } => process(*target, is_ever_initialized), + TerminatorKind::SwitchInt { targets, .. } => { + targets.all_targets().iter().for_each(|&it| process(it, is_ever_initialized)); + } + TerminatorKind::UnwindResume + | TerminatorKind::Abort + | TerminatorKind::Return + | TerminatorKind::Unreachable => (), + TerminatorKind::Call { target, cleanup, destination, .. } => { + if destination.projection.lookup(&body.projection_store).is_empty() + && destination.local == l + { is_ever_initialized = true; } + target.iter().chain(cleanup).for_each(|&it| process(it, is_ever_initialized)); } - StatementKind::StorageDead(p) => { - if *p == l { - is_ever_initialized = false; - } + TerminatorKind::Drop { target, unwind, place: _ } => { + iter::once(target) + .chain(unwind) + .for_each(|&it| process(it, is_ever_initialized)); } - StatementKind::Deinit(_) - | StatementKind::FakeRead(_) - | StatementKind::Nop - | StatementKind::StorageLive(_) => (), - } - } - let Some(terminator) = &block.terminator else { - never!( - "Terminator should be none only in construction.\nThe body:\n{}", - body.pretty_print(db) - ); - return; - }; - let mut process = |target, is_ever_initialized| { - if !result[target].contains_idx(l) || !result[target][l] && is_ever_initialized { - result[target].insert(l, is_ever_initialized); - dfs(db, body, target, l, result); - } - }; - match &terminator.kind { - TerminatorKind::Goto { target } => process(*target, is_ever_initialized), - TerminatorKind::SwitchInt { targets, .. } => { - targets.all_targets().iter().for_each(|&it| process(it, is_ever_initialized)); - } - TerminatorKind::UnwindResume - | TerminatorKind::Abort - | TerminatorKind::Return - | TerminatorKind::Unreachable => (), - TerminatorKind::Call { target, cleanup, destination, .. } => { - if destination.projection.lookup(&body.projection_store).is_empty() - && destination.local == l - { - is_ever_initialized = true; + TerminatorKind::DropAndReplace { .. } + | TerminatorKind::Assert { .. } + | TerminatorKind::Yield { .. } + | TerminatorKind::CoroutineDrop + | TerminatorKind::FalseEdge { .. } + | TerminatorKind::FalseUnwind { .. } => { + never!("We don't emit these MIR terminators yet"); } - target.iter().chain(cleanup).for_each(|&it| process(it, is_ever_initialized)); - } - TerminatorKind::Drop { target, unwind, place: _ } => { - iter::once(target).chain(unwind).for_each(|&it| process(it, is_ever_initialized)); - } - TerminatorKind::DropAndReplace { .. } - | TerminatorKind::Assert { .. } - | TerminatorKind::Yield { .. } - | TerminatorKind::CoroutineDrop - | TerminatorKind::FalseEdge { .. } - | TerminatorKind::FalseUnwind { .. } => { - never!("We don't emit these MIR terminators yet"); } } } + let mut stack = Vec::new(); for &l in &body.param_locals { result[body.start_block].insert(l, true); - dfs(db, body, body.start_block, l, &mut result); + stack.clear(); + stack.push(body.start_block); + dfs(db, body, l, &mut stack, &mut result); } for l in body.locals.iter().map(|it| it.0) { db.unwind_if_cancelled(); if !result[body.start_block].contains_idx(l) { result[body.start_block].insert(l, false); - dfs(db, body, body.start_block, l, &mut result); + stack.clear(); + stack.push(body.start_block); + dfs(db, body, l, &mut stack, &mut result); } } result diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 8f5db32f95..30e023e1a4 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -144,9 +144,9 @@ pub use { hir_ty::{ consteval::ConstEvalError, display::{ClosureStyle, HirDisplay, HirDisplayError, HirWrite}, + dyn_compatibility::{DynCompatibilityViolation, MethodViolationCode}, layout::LayoutError, mir::{MirEvalError, MirLowerError}, - object_safety::{MethodViolationCode, ObjectSafetyViolation}, CastError, FnAbi, PointerCast, Safety, }, // FIXME: Properly encapsulate mir @@ -497,10 +497,9 @@ impl Module { /// Finds a parent module. pub fn parent(self, db: &dyn HirDatabase) -> Option { - // FIXME: handle block expressions as modules (their parent is in a different DefMap) let def_map = self.id.def_map(db.upcast()); - let parent_id = def_map[self.id.local_id].parent?; - Some(Module { id: def_map.module_id(parent_id) }) + let parent_id = def_map.containing_module(self.id.local_id)?; + Some(Module { id: parent_id }) } /// Finds nearest non-block ancestor `Module` (`self` included). @@ -557,7 +556,7 @@ impl Module { acc: &mut Vec, style_lints: bool, ) { - let _p = tracing::info_span!("Module::diagnostics", name = ?self.name(db)).entered(); + let _p = tracing::info_span!("diagnostics", name = ?self.name(db)).entered(); let edition = db.crate_graph()[self.id.krate()].edition; let def_map = self.id.def_map(db.upcast()); for diag in def_map.diagnostics() { @@ -2690,8 +2689,8 @@ impl Trait { .count() } - pub fn object_safety(&self, db: &dyn HirDatabase) -> Option { - hir_ty::object_safety::object_safety(db, self.id) + pub fn dyn_compatibility(&self, db: &dyn HirDatabase) -> Option { + hir_ty::dyn_compatibility::dyn_compatibility(db, self.id) } fn all_macro_calls(&self, db: &dyn HirDatabase) -> Box<[(AstId, MacroCallId)]> { diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs index fa14b53dbc..b27f1fbb5d 100644 --- a/crates/hir/src/semantics.rs +++ b/crates/hir/src/semantics.rs @@ -24,6 +24,7 @@ use hir_expand::{ builtin::{BuiltinFnLikeExpander, EagerExpander}, db::ExpandDatabase, files::InRealFile, + hygiene::SyntaxContextExt as _, inert_attr_macro::find_builtin_attr_idx, name::AsName, FileRange, InMacroFile, MacroCallId, MacroFileId, MacroFileIdExt, @@ -32,13 +33,13 @@ use intern::Symbol; use itertools::Itertools; use rustc_hash::{FxHashMap, FxHashSet}; use smallvec::{smallvec, SmallVec}; -use span::{EditionedFileId, FileId, HirFileIdRepr}; +use span::{EditionedFileId, FileId, HirFileIdRepr, SyntaxContextId}; use stdx::TupleExt; use syntax::{ algo::skip_trivia_token, - ast::{self, HasAttrs as _, HasGenericParams, HasLoopBody, IsString as _}, - match_ast, AstNode, AstToken, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken, - TextRange, TextSize, + ast::{self, HasAttrs as _, HasGenericParams, IsString as _}, + AstNode, AstToken, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange, + TextSize, }; use crate::{ @@ -608,7 +609,7 @@ impl<'db> SemanticsImpl<'db> { let quote = string.open_quote_text_range()?; let token = self.wrap_token_infile(string.syntax().clone()).into_real_file().ok()?; - self.descend_into_macros_breakable(token, |token| { + self.descend_into_macros_breakable(token, |token, _| { (|| { let token = token.value; let string = ast::String::cast(token)?; @@ -655,7 +656,7 @@ impl<'db> SemanticsImpl<'db> { let original_string = ast::String::cast(original_token.clone())?; let original_token = self.wrap_token_infile(original_token).into_real_file().ok()?; let quote = original_string.open_quote_text_range()?; - self.descend_into_macros_breakable(original_token, |token| { + self.descend_into_macros_breakable(original_token, |token, _| { (|| { let token = token.value; self.resolve_offset_in_format_args( @@ -718,7 +719,7 @@ impl<'db> SemanticsImpl<'db> { // node is just the token, so descend the token self.descend_into_macros_impl( InRealFile::new(file_id, first), - &mut |InFile { value, .. }| { + &mut |InFile { value, .. }, _ctx| { if let Some(node) = value .parent_ancestors() .take_while(|it| it.text_range() == value.text_range()) @@ -732,7 +733,7 @@ impl<'db> SemanticsImpl<'db> { } else { // Descend first and last token, then zip them to look for the node they belong to let mut scratch: SmallVec<[_; 1]> = smallvec![]; - self.descend_into_macros_impl(InRealFile::new(file_id, first), &mut |token| { + self.descend_into_macros_impl(InRealFile::new(file_id, first), &mut |token, _ctx| { scratch.push(token); CONTINUE_NO_BREAKS }); @@ -740,7 +741,7 @@ impl<'db> SemanticsImpl<'db> { let mut scratch = scratch.into_iter(); self.descend_into_macros_impl( InRealFile::new(file_id, last), - &mut |InFile { value: last, file_id: last_fid }| { + &mut |InFile { value: last, file_id: last_fid }, _ctx| { if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() { if first_fid == last_fid { if let Some(p) = first.parent() { @@ -763,7 +764,9 @@ impl<'db> SemanticsImpl<'db> { res } - fn is_inside_macro_call(token: &SyntaxToken) -> bool { + // FIXME: This isn't quite right wrt to inner attributes + /// Does a syntactic traversal to check whether this token might be inside a macro call + pub fn might_be_inside_macro_call(&self, token: &SyntaxToken) -> bool { token.parent_ancestors().any(|ancestor| { if ast::MacroCall::can_cast(ancestor.kind()) { return true; @@ -781,25 +784,14 @@ impl<'db> SemanticsImpl<'db> { }) } - pub fn descend_into_macros_exact_if_in_macro( - &self, - token: SyntaxToken, - ) -> SmallVec<[SyntaxToken; 1]> { - if Self::is_inside_macro_call(&token) { - self.descend_into_macros_exact(token) - } else { - smallvec![token] - } - } - pub fn descend_into_macros_cb( &self, token: SyntaxToken, - mut cb: impl FnMut(InFile), + mut cb: impl FnMut(InFile, SyntaxContextId), ) { if let Ok(token) = self.wrap_token_infile(token).into_real_file() { - self.descend_into_macros_impl(token, &mut |t| { - cb(t); + self.descend_into_macros_impl(token, &mut |t, ctx| { + cb(t, ctx); CONTINUE_NO_BREAKS }); } @@ -808,7 +800,7 @@ impl<'db> SemanticsImpl<'db> { pub fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> { let mut res = smallvec![]; if let Ok(token) = self.wrap_token_infile(token.clone()).into_real_file() { - self.descend_into_macros_impl(token, &mut |t| { + self.descend_into_macros_impl(token, &mut |t, _ctx| { res.push(t.value); CONTINUE_NO_BREAKS }); @@ -819,10 +811,27 @@ impl<'db> SemanticsImpl<'db> { res } + pub fn descend_into_macros_no_opaque(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> { + let mut res = smallvec![]; + if let Ok(token) = self.wrap_token_infile(token.clone()).into_real_file() { + self.descend_into_macros_impl(token, &mut |t, ctx| { + if !ctx.is_opaque(self.db.upcast()) { + // Don't descend into opaque contexts + res.push(t.value); + } + CONTINUE_NO_BREAKS + }); + } + if res.is_empty() { + res.push(token); + } + res + } + pub fn descend_into_macros_breakable( &self, token: InRealFile, - mut cb: impl FnMut(InFile) -> ControlFlow, + mut cb: impl FnMut(InFile, SyntaxContextId) -> ControlFlow, ) -> Option { self.descend_into_macros_impl(token.clone(), &mut cb) } @@ -834,10 +843,12 @@ impl<'db> SemanticsImpl<'db> { let text = token.text(); let kind = token.kind(); - self.descend_into_macros_cb(token.clone(), |InFile { value, file_id: _ }| { + self.descend_into_macros_cb(token.clone(), |InFile { value, file_id: _ }, ctx| { let mapped_kind = value.kind(); let any_ident_match = || kind.is_any_identifier() && value.kind().is_any_identifier(); - let matches = (kind == mapped_kind || any_ident_match()) && text == value.text(); + let matches = (kind == mapped_kind || any_ident_match()) + && text == value.text() + && !ctx.is_opaque(self.db.upcast()); if matches { r.push(value); } @@ -854,17 +865,21 @@ impl<'db> SemanticsImpl<'db> { let text = token.text(); let kind = token.kind(); if let Ok(token) = self.wrap_token_infile(token.clone()).into_real_file() { - self.descend_into_macros_breakable(token.clone(), |InFile { value, file_id: _ }| { - let mapped_kind = value.kind(); - let any_ident_match = - || kind.is_any_identifier() && value.kind().is_any_identifier(); - let matches = (kind == mapped_kind || any_ident_match()) && text == value.text(); - if matches { - ControlFlow::Break(value) - } else { - ControlFlow::Continue(()) - } - }) + self.descend_into_macros_breakable( + token.clone(), + |InFile { value, file_id: _ }, _ctx| { + let mapped_kind = value.kind(); + let any_ident_match = + || kind.is_any_identifier() && value.kind().is_any_identifier(); + let matches = + (kind == mapped_kind || any_ident_match()) && text == value.text(); + if matches { + ControlFlow::Break(value) + } else { + ControlFlow::Continue(()) + } + }, + ) } else { None } @@ -874,7 +889,7 @@ impl<'db> SemanticsImpl<'db> { fn descend_into_macros_impl( &self, InRealFile { value: token, file_id }: InRealFile, - f: &mut dyn FnMut(InFile) -> ControlFlow, + f: &mut dyn FnMut(InFile, SyntaxContextId) -> ControlFlow, ) -> Option { let _p = tracing::info_span!("descend_into_macros_impl").entered(); let (sa, span, file_id) = token @@ -898,7 +913,8 @@ impl<'db> SemanticsImpl<'db> { // These are tracked to know which macro calls we still have to look into // the tokens themselves aren't that interesting as the span that is being used to map // things down never changes. - let mut stack: Vec<(_, SmallVec<[_; 2]>)> = vec![(file_id, smallvec![token])]; + let mut stack: Vec<(_, SmallVec<[_; 2]>)> = + vec![(file_id, smallvec![(token, SyntaxContextId::ROOT)])]; // Process the expansion of a call, pushing all tokens with our span in the expansion back onto our stack let process_expansion_for_token = |stack: &mut Vec<_>, macro_file| { @@ -921,11 +937,11 @@ impl<'db> SemanticsImpl<'db> { // Filters out all tokens that contain the given range (usually the macro call), any such // token is redundant as the corresponding macro call has already been processed let filter_duplicates = |tokens: &mut SmallVec<_>, range: TextRange| { - tokens.retain(|t: &mut SyntaxToken| !range.contains_range(t.text_range())) + tokens.retain(|(t, _): &mut (SyntaxToken, _)| !range.contains_range(t.text_range())) }; while let Some((expansion, ref mut tokens)) = stack.pop() { - while let Some(token) = tokens.pop() { + while let Some((token, ctx)) = tokens.pop() { let was_not_remapped = (|| { // First expand into attribute invocations let containing_attribute_macro_call = self.with_ctx(|ctx| { @@ -1036,7 +1052,7 @@ impl<'db> SemanticsImpl<'db> { let text_range = attr.syntax().text_range(); // remove any other token in this macro input, all their mappings are the // same as this - tokens.retain(|t| { + tokens.retain(|(t, _)| { !text_range.contains_range(t.text_range()) }); return process_expansion_for_token( @@ -1093,7 +1109,7 @@ impl<'db> SemanticsImpl<'db> { .is_none(); if was_not_remapped { - if let ControlFlow::Break(b) = f(InFile::new(expansion, token)) { + if let ControlFlow::Break(b) = f(InFile::new(expansion, token), ctx) { return Some(b); } } @@ -1221,26 +1237,10 @@ impl<'db> SemanticsImpl<'db> { ToDef::to_def(self, src.as_ref()) } - pub fn resolve_label(&self, lifetime: &ast::Lifetime) -> Option