Auto merge of #131399 - lnicola:sync-from-ra, r=lnicola

Subtree update of `rust-analyzer`

r? `@ghost`
This commit is contained in:
bors 2024-10-08 12:43:18 +00:00
commit 0746cb0185
89 changed files with 1374 additions and 663 deletions

View file

@ -16,7 +16,7 @@ env:
RUSTFLAGS: "-D warnings -W unreachable-pub"
RUSTUP_MAX_RETRIES: 10
FETCH_DEPTH: 0 # pull in the tags for the version string
MACOSX_DEPLOYMENT_TARGET: 10.15
MACOSX_DEPLOYMENT_TARGET: 13.0
CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER: aarch64-linux-gnu-gcc
CARGO_TARGET_ARM_UNKNOWN_LINUX_GNUEABIHF_LINKER: arm-linux-gnueabihf-gcc
@ -43,10 +43,10 @@ jobs:
- os: ubuntu-20.04
target: arm-unknown-linux-gnueabihf
code-target: linux-armhf
- os: macos-12
- os: macos-13
target: x86_64-apple-darwin
code-target: darwin-x64
- os: macos-12
- os: macos-13
target: aarch64-apple-darwin
code-target: darwin-arm64

13
Cargo.lock generated
View file

@ -145,9 +145,12 @@ dependencies = [
[[package]]
name = "cc"
version = "1.1.10"
version = "1.1.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e9e8aabfac534be767c909e0690571677d49f41bd8465ae876fe043d52ba5292"
checksum = "9540e661f81799159abee814118cc139a2004b3a3aa3ea37724a1b66530b90e0"
dependencies = [
"shlex",
]
[[package]]
name = "cfg"
@ -1852,6 +1855,12 @@ dependencies = [
"lazy_static",
]
[[package]]
name = "shlex"
version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
[[package]]
name = "smallvec"
version = "1.13.2"

View file

@ -4,7 +4,7 @@ exclude = ["crates/proc-macro-srv/proc-macro-test/imp"]
resolver = "2"
[workspace.package]
rust-version = "1.80"
rust-version = "1.81"
edition = "2021"
license = "MIT OR Apache-2.0"
authors = ["rust-analyzer team"]

View file

@ -49,6 +49,10 @@ impl CfgOptions {
cfg.fold(&|atom| self.enabled.contains(atom))
}
pub fn check_atom(&self, cfg: &CfgAtom) -> bool {
self.enabled.contains(cfg)
}
pub fn insert_atom(&mut self, key: Symbol) {
self.enabled.insert(CfgAtom::Flag(key));
}

View file

@ -36,7 +36,7 @@ macro_rules! f {
}
struct#0:1@58..64#1# MyTraitMap2#0:2@31..42#0# {#0:1@72..73#1#
map#0:1@86..89#1#:#0:1@89..90#1# #0:1@89..90#1#::#0:1@91..92#1#std#0:1@93..96#1#::#0:1@96..97#1#collections#0:1@98..109#1#::#0:1@109..110#1#HashSet#0:1@111..118#1#<#0:1@118..119#1#(#0:1@119..120#1#)#0:1@120..121#1#>#0:1@121..122#1#,#0:1@122..123#1#
map#0:1@86..89#1#:#0:1@89..90#1# #0:1@89..90#1#::#0:1@91..93#1#std#0:1@93..96#1#::#0:1@96..98#1#collections#0:1@98..109#1#::#0:1@109..111#1#HashSet#0:1@111..118#1#<#0:1@118..119#1#(#0:1@119..120#1#)#0:1@120..121#1#>#0:1@121..122#1#,#0:1@122..123#1#
}#0:1@132..133#1#
"#]],
);

View file

@ -6,7 +6,7 @@
use std::{cmp::Ordering, iter, mem, ops::Not};
use base_db::{CrateId, CrateOrigin, Dependency, LangCrateOrigin};
use cfg::{CfgExpr, CfgOptions};
use cfg::{CfgAtom, CfgExpr, CfgOptions};
use either::Either;
use hir_expand::{
attrs::{Attr, AttrId},
@ -1324,13 +1324,21 @@ impl DefCollector<'_> {
};
// Skip #[test]/#[bench] expansion, which would merely result in more memory usage
// due to duplicating functions into macro expansions
// due to duplicating functions into macro expansions, but only if `cfg(test)` is active,
// otherwise they are expanded to nothing and this can impact e.g. diagnostics (due to things
// being cfg'ed out).
// Ideally we will just expand them to nothing here. But we are only collecting macro calls,
// not expanding them, so we have no way to do that.
if matches!(
def.kind,
MacroDefKind::BuiltInAttr(_, expander)
if expander.is_test() || expander.is_bench()
) {
return recollect_without(self);
let test_is_active =
self.cfg_options.check_atom(&CfgAtom::Flag(sym::test.clone()));
if test_is_active {
return recollect_without(self);
}
}
let call_id = || {

View file

@ -4,6 +4,8 @@ use span::{MacroCallId, Span};
use crate::{db::ExpandDatabase, name, tt, ExpandResult, MacroCallKind};
use super::quote;
macro_rules! register_builtin {
($(($name:ident, $variant:ident) => $expand:ident),* ) => {
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@ -52,15 +54,15 @@ impl BuiltinAttrExpander {
}
register_builtin! {
(bench, Bench) => dummy_attr_expand,
(bench, Bench) => dummy_gate_test_expand,
(cfg_accessible, CfgAccessible) => dummy_attr_expand,
(cfg_eval, CfgEval) => dummy_attr_expand,
(derive, Derive) => derive_expand,
// derive const is equivalent to derive for our proposes.
(derive_const, DeriveConst) => derive_expand,
(global_allocator, GlobalAllocator) => dummy_attr_expand,
(test, Test) => dummy_attr_expand,
(test_case, TestCase) => dummy_attr_expand
(test, Test) => dummy_gate_test_expand,
(test_case, TestCase) => dummy_gate_test_expand
}
pub fn find_builtin_attr(ident: &name::Name) -> Option<BuiltinAttrExpander> {
@ -76,6 +78,19 @@ fn dummy_attr_expand(
ExpandResult::ok(tt.clone())
}
fn dummy_gate_test_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
tt: &tt::Subtree,
span: Span,
) -> ExpandResult<tt::Subtree> {
let result = quote::quote! { span=>
#[cfg(test)]
#tt
};
ExpandResult::ok(result)
}
/// We generate a very specific expansion here, as we do not actually expand the `#[derive]` attribute
/// itself in name res, but we do want to expand it to something for the IDE layer, so that the input
/// derive attributes can be downmapped, and resolved as proper paths.

View file

@ -16,7 +16,10 @@ use crate::{
cfg_process,
declarative::DeclarativeMacroExpander,
fixup::{self, SyntaxFixupUndoInfo},
hygiene::{span_with_call_site_ctxt, span_with_def_site_ctxt, span_with_mixed_site_ctxt},
hygiene::{
span_with_call_site_ctxt, span_with_def_site_ctxt, span_with_mixed_site_ctxt,
SyntaxContextExt as _,
},
proc_macro::ProcMacros,
span_map::{RealSpanMap, SpanMap, SpanMapRef},
tt, AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander,
@ -300,14 +303,16 @@ pub fn expand_speculative(
token_tree_to_syntax_node(&speculative_expansion.value, expand_to, loc.def.edition);
let syntax_node = node.syntax_node();
let token = rev_tmap
let (token, _) = rev_tmap
.ranges_with_span(span_map.span_for_range(token_to_map.text_range()))
.filter_map(|range| syntax_node.covering_element(range).into_token())
.min_by_key(|t| {
// prefer tokens of the same kind and text
.filter_map(|(range, ctx)| syntax_node.covering_element(range).into_token().zip(Some(ctx)))
.min_by_key(|(t, ctx)| {
// prefer tokens of the same kind and text, as well as non opaque marked ones
// Note the inversion of the score here, as we want to prefer the first token in case
// of all tokens having the same score
(t.kind() != token_to_map.kind()) as u8 + 2 * ((t.text() != token_to_map.text()) as u8)
ctx.is_opaque(db) as u8
+ 2 * (t.kind() != token_to_map.kind()) as u8
+ 4 * ((t.text() != token_to_map.text()) as u8)
})?;
Some((node.syntax_node(), token))
}

View file

@ -151,6 +151,7 @@ pub trait SyntaxContextExt {
fn remove_mark(&mut self, db: &dyn ExpandDatabase) -> (Option<MacroCallId>, Transparency);
fn outer_mark(self, db: &dyn ExpandDatabase) -> (Option<MacroCallId>, Transparency);
fn marks(self, db: &dyn ExpandDatabase) -> Vec<(MacroCallId, Transparency)>;
fn is_opaque(self, db: &dyn ExpandDatabase) -> bool;
}
impl SyntaxContextExt for SyntaxContextId {
@ -177,6 +178,9 @@ impl SyntaxContextExt for SyntaxContextId {
marks.reverse();
marks
}
fn is_opaque(self, db: &dyn ExpandDatabase) -> bool {
!self.is_root() && db.lookup_intern_syntax_context(self).outer_transparency.is_opaque()
}
}
// FIXME: Make this a SyntaxContextExt method once we have RPIT

View file

@ -25,6 +25,7 @@ mod prettify_macro_expansion_;
use attrs::collect_attrs;
use rustc_hash::FxHashMap;
use stdx::TupleExt;
use triomphe::Arc;
use std::hash::Hash;
@ -772,14 +773,15 @@ impl ExpansionInfo {
/// Maps the passed in file range down into a macro expansion if it is the input to a macro call.
///
/// Note this does a linear search through the entire backing vector of the spanmap.
// FIXME: Consider adding a reverse map to ExpansionInfo to get rid of the linear search which
// potentially results in quadratic look ups (notably this might improve semantic highlighting perf)
pub fn map_range_down_exact(
&self,
span: Span,
) -> Option<InMacroFile<impl Iterator<Item = SyntaxToken> + '_>> {
let tokens = self
.exp_map
.ranges_with_span_exact(span)
.flat_map(move |range| self.expanded.value.covering_element(range).into_token());
) -> Option<InMacroFile<impl Iterator<Item = (SyntaxToken, SyntaxContextId)> + '_>> {
let tokens = self.exp_map.ranges_with_span_exact(span).flat_map(move |(range, ctx)| {
self.expanded.value.covering_element(range).into_token().zip(Some(ctx))
});
Some(InMacroFile::new(self.expanded.file_id, tokens))
}
@ -791,11 +793,10 @@ impl ExpansionInfo {
pub fn map_range_down(
&self,
span: Span,
) -> Option<InMacroFile<impl Iterator<Item = SyntaxToken> + '_>> {
let tokens = self
.exp_map
.ranges_with_span(span)
.flat_map(move |range| self.expanded.value.covering_element(range).into_token());
) -> Option<InMacroFile<impl Iterator<Item = (SyntaxToken, SyntaxContextId)> + '_>> {
let tokens = self.exp_map.ranges_with_span(span).flat_map(move |(range, ctx)| {
self.expanded.value.covering_element(range).into_token().zip(Some(ctx))
});
Some(InMacroFile::new(self.expanded.file_id, tokens))
}
@ -845,7 +846,8 @@ impl ExpansionInfo {
self.arg.file_id,
arg_map
.ranges_with_span_exact(span)
.filter(|range| range.intersect(arg_range).is_some())
.filter(|(range, _)| range.intersect(arg_range).is_some())
.map(TupleExt::head)
.collect(),
)
}

View file

@ -382,8 +382,9 @@ impl chalk_solve::RustIrDatabase<Interner> for ChalkContext<'_> {
}
fn is_object_safe(&self, trait_id: chalk_ir::TraitId<Interner>) -> bool {
// FIXME: When cargo is updated, change to dyn_compatibility
let trait_ = from_chalk_trait_id(trait_id);
crate::object_safety::object_safety(self.db, trait_).is_none()
crate::dyn_compatibility::dyn_compatibility(self.db, trait_).is_none()
}
fn closure_kind(

View file

@ -20,11 +20,11 @@ use triomphe::Arc;
use crate::{
chalk_db,
consteval::ConstEvalError,
dyn_compatibility::DynCompatibilityViolation,
layout::{Layout, LayoutError},
lower::{GenericDefaults, GenericPredicates},
method_resolution::{InherentImpls, TraitImpls, TyFingerprint},
mir::{BorrowckResult, MirBody, MirLowerError},
object_safety::ObjectSafetyViolation,
Binders, ClosureId, Const, FnDefId, ImplTraitId, ImplTraits, InferenceResult, Interner,
PolyFnSig, Substitution, TraitEnvironment, TraitRef, Ty, TyDefId, ValueTyDefId,
};
@ -108,8 +108,8 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
#[salsa::invoke(crate::layout::target_data_layout_query)]
fn target_data_layout(&self, krate: CrateId) -> Result<Arc<TargetDataLayout>, Arc<str>>;
#[salsa::invoke(crate::object_safety::object_safety_of_trait_query)]
fn object_safety_of_trait(&self, trait_: TraitId) -> Option<ObjectSafetyViolation>;
#[salsa::invoke(crate::dyn_compatibility::dyn_compatibility_of_trait_query)]
fn dyn_compatibility_of_trait(&self, trait_: TraitId) -> Option<DynCompatibilityViolation>;
#[salsa::invoke(crate::lower::ty_query)]
#[salsa::cycle(crate::lower::ty_recover)]
@ -280,8 +280,8 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
}
#[test]
fn hir_database_is_object_safe() {
fn _assert_object_safe(_: &dyn HirDatabase) {}
fn hir_database_is_dyn_compatible() {
fn _assert_dyn_compatible(_: &dyn HirDatabase) {}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]

View file

@ -58,7 +58,7 @@ impl fmt::Display for CaseType {
let repr = match self {
CaseType::LowerSnakeCase => "snake_case",
CaseType::UpperSnakeCase => "UPPER_SNAKE_CASE",
CaseType::UpperCamelCase => "CamelCase",
CaseType::UpperCamelCase => "UpperCamelCase",
};
repr.fmt(f)

View file

@ -111,7 +111,7 @@ mod tests {
check(to_lower_snake_case, "lower_snake_case", expect![[""]]);
check(to_lower_snake_case, "UPPER_SNAKE_CASE", expect![["upper_snake_case"]]);
check(to_lower_snake_case, "Weird_Case", expect![["weird_case"]]);
check(to_lower_snake_case, "CamelCase", expect![["camel_case"]]);
check(to_lower_snake_case, "UpperCamelCase", expect![["upper_camel_case"]]);
check(to_lower_snake_case, "lowerCamelCase", expect![["lower_camel_case"]]);
check(to_lower_snake_case, "a", expect![[""]]);
check(to_lower_snake_case, "abc", expect![[""]]);
@ -121,8 +121,8 @@ mod tests {
#[test]
fn test_to_camel_case() {
check(to_camel_case, "CamelCase", expect![[""]]);
check(to_camel_case, "CamelCase_", expect![[""]]);
check(to_camel_case, "UpperCamelCase", expect![[""]]);
check(to_camel_case, "UpperCamelCase_", expect![[""]]);
check(to_camel_case, "_CamelCase", expect![[""]]);
check(to_camel_case, "lowerCamelCase", expect![["LowerCamelCase"]]);
check(to_camel_case, "lower_snake_case", expect![["LowerSnakeCase"]]);
@ -143,7 +143,7 @@ mod tests {
check(to_upper_snake_case, "UPPER_SNAKE_CASE", expect![[""]]);
check(to_upper_snake_case, "lower_snake_case", expect![["LOWER_SNAKE_CASE"]]);
check(to_upper_snake_case, "Weird_Case", expect![["WEIRD_CASE"]]);
check(to_upper_snake_case, "CamelCase", expect![["CAMEL_CASE"]]);
check(to_upper_snake_case, "UpperCamelCase", expect![["UPPER_CAMEL_CASE"]]);
check(to_upper_snake_case, "lowerCamelCase", expect![["LOWER_CAMEL_CASE"]]);
check(to_upper_snake_case, "A", expect![[""]]);
check(to_upper_snake_case, "ABC", expect![[""]]);

View file

@ -1,4 +1,4 @@
//! Compute the object-safety of a trait
//! Compute the dyn-compatibility of a trait
use std::ops::ControlFlow;
@ -28,14 +28,14 @@ use crate::{
};
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum ObjectSafetyViolation {
pub enum DynCompatibilityViolation {
SizedSelf,
SelfReferential,
Method(FunctionId, MethodViolationCode),
AssocConst(ConstId),
GAT(TypeAliasId),
// This doesn't exist in rustc, but added for better visualization
HasNonSafeSuperTrait(TraitId),
HasNonCompatibleSuperTrait(TraitId),
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
@ -50,70 +50,73 @@ pub enum MethodViolationCode {
UndispatchableReceiver,
}
pub fn object_safety(db: &dyn HirDatabase, trait_: TraitId) -> Option<ObjectSafetyViolation> {
pub fn dyn_compatibility(
db: &dyn HirDatabase,
trait_: TraitId,
) -> Option<DynCompatibilityViolation> {
for super_trait in all_super_traits(db.upcast(), trait_).into_iter().skip(1).rev() {
if db.object_safety_of_trait(super_trait).is_some() {
return Some(ObjectSafetyViolation::HasNonSafeSuperTrait(super_trait));
if db.dyn_compatibility_of_trait(super_trait).is_some() {
return Some(DynCompatibilityViolation::HasNonCompatibleSuperTrait(super_trait));
}
}
db.object_safety_of_trait(trait_)
db.dyn_compatibility_of_trait(trait_)
}
pub fn object_safety_with_callback<F>(
pub fn dyn_compatibility_with_callback<F>(
db: &dyn HirDatabase,
trait_: TraitId,
cb: &mut F,
) -> ControlFlow<()>
where
F: FnMut(ObjectSafetyViolation) -> ControlFlow<()>,
F: FnMut(DynCompatibilityViolation) -> ControlFlow<()>,
{
for super_trait in all_super_traits(db.upcast(), trait_).into_iter().skip(1).rev() {
if db.object_safety_of_trait(super_trait).is_some() {
cb(ObjectSafetyViolation::HasNonSafeSuperTrait(trait_))?;
if db.dyn_compatibility_of_trait(super_trait).is_some() {
cb(DynCompatibilityViolation::HasNonCompatibleSuperTrait(trait_))?;
}
}
object_safety_of_trait_with_callback(db, trait_, cb)
dyn_compatibility_of_trait_with_callback(db, trait_, cb)
}
pub fn object_safety_of_trait_with_callback<F>(
pub fn dyn_compatibility_of_trait_with_callback<F>(
db: &dyn HirDatabase,
trait_: TraitId,
cb: &mut F,
) -> ControlFlow<()>
where
F: FnMut(ObjectSafetyViolation) -> ControlFlow<()>,
F: FnMut(DynCompatibilityViolation) -> ControlFlow<()>,
{
// Check whether this has a `Sized` bound
if generics_require_sized_self(db, trait_.into()) {
cb(ObjectSafetyViolation::SizedSelf)?;
cb(DynCompatibilityViolation::SizedSelf)?;
}
// Check if there exist bounds that referencing self
if predicates_reference_self(db, trait_) {
cb(ObjectSafetyViolation::SelfReferential)?;
cb(DynCompatibilityViolation::SelfReferential)?;
}
if bounds_reference_self(db, trait_) {
cb(ObjectSafetyViolation::SelfReferential)?;
cb(DynCompatibilityViolation::SelfReferential)?;
}
// rustc checks for non-lifetime binders here, but we don't support HRTB yet
let trait_data = db.trait_data(trait_);
for (_, assoc_item) in &trait_data.items {
object_safety_violation_for_assoc_item(db, trait_, *assoc_item, cb)?;
dyn_compatibility_violation_for_assoc_item(db, trait_, *assoc_item, cb)?;
}
ControlFlow::Continue(())
}
pub fn object_safety_of_trait_query(
pub fn dyn_compatibility_of_trait_query(
db: &dyn HirDatabase,
trait_: TraitId,
) -> Option<ObjectSafetyViolation> {
) -> Option<DynCompatibilityViolation> {
let mut res = None;
object_safety_of_trait_with_callback(db, trait_, &mut |osv| {
dyn_compatibility_of_trait_with_callback(db, trait_, &mut |osv| {
res = Some(osv);
ControlFlow::Break(())
});
@ -321,14 +324,14 @@ fn contains_illegal_self_type_reference<T: TypeVisitable<Interner>>(
t.visit_with(visitor.as_dyn(), outer_binder).is_break()
}
fn object_safety_violation_for_assoc_item<F>(
fn dyn_compatibility_violation_for_assoc_item<F>(
db: &dyn HirDatabase,
trait_: TraitId,
item: AssocItemId,
cb: &mut F,
) -> ControlFlow<()>
where
F: FnMut(ObjectSafetyViolation) -> ControlFlow<()>,
F: FnMut(DynCompatibilityViolation) -> ControlFlow<()>,
{
// Any item that has a `Self : Sized` requisite is otherwise
// exempt from the regulations.
@ -337,10 +340,10 @@ where
}
match item {
AssocItemId::ConstId(it) => cb(ObjectSafetyViolation::AssocConst(it)),
AssocItemId::ConstId(it) => cb(DynCompatibilityViolation::AssocConst(it)),
AssocItemId::FunctionId(it) => {
virtual_call_violations_for_method(db, trait_, it, &mut |mvc| {
cb(ObjectSafetyViolation::Method(it, mvc))
cb(DynCompatibilityViolation::Method(it, mvc))
})
}
AssocItemId::TypeAliasId(it) => {
@ -350,7 +353,7 @@ where
} else {
let generic_params = db.generic_params(item.into());
if !generic_params.is_empty() {
cb(ObjectSafetyViolation::GAT(it))
cb(DynCompatibilityViolation::GAT(it))
} else {
ControlFlow::Continue(())
}
@ -469,7 +472,7 @@ fn receiver_is_dispatchable(
return false;
};
// `self: Self` can't be dispatched on, but this is already considered object safe.
// `self: Self` can't be dispatched on, but this is already considered dyn compatible
// See rustc's comment on https://github.com/rust-lang/rust/blob/3f121b9461cce02a703a0e7e450568849dfaa074/compiler/rustc_trait_selection/src/traits/object_safety.rs#L433-L437
if sig
.skip_binders()

View file

@ -5,29 +5,29 @@ use rustc_hash::{FxHashMap, FxHashSet};
use syntax::ToSmolStr;
use test_fixture::WithFixture;
use crate::{object_safety::object_safety_with_callback, test_db::TestDB};
use crate::{dyn_compatibility::dyn_compatibility_with_callback, test_db::TestDB};
use super::{
DynCompatibilityViolation,
MethodViolationCode::{self, *},
ObjectSafetyViolation,
};
use ObjectSafetyViolationKind::*;
use DynCompatibilityViolationKind::*;
#[allow(clippy::upper_case_acronyms)]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
enum ObjectSafetyViolationKind {
enum DynCompatibilityViolationKind {
SizedSelf,
SelfReferential,
Method(MethodViolationCode),
AssocConst,
GAT,
HasNonSafeSuperTrait,
HasNonCompatibleSuperTrait,
}
fn check_object_safety<'a>(
fn check_dyn_compatibility<'a>(
ra_fixture: &str,
expected: impl IntoIterator<Item = (&'a str, Vec<ObjectSafetyViolationKind>)>,
expected: impl IntoIterator<Item = (&'a str, Vec<DynCompatibilityViolationKind>)>,
) {
let mut expected: FxHashMap<_, _> =
expected.into_iter().map(|(id, osvs)| (id, FxHashSet::from_iter(osvs))).collect();
@ -53,18 +53,20 @@ fn check_object_safety<'a>(
continue;
};
let mut osvs = FxHashSet::default();
object_safety_with_callback(&db, trait_id, &mut |osv| {
dyn_compatibility_with_callback(&db, trait_id, &mut |osv| {
osvs.insert(match osv {
ObjectSafetyViolation::SizedSelf => SizedSelf,
ObjectSafetyViolation::SelfReferential => SelfReferential,
ObjectSafetyViolation::Method(_, mvc) => Method(mvc),
ObjectSafetyViolation::AssocConst(_) => AssocConst,
ObjectSafetyViolation::GAT(_) => GAT,
ObjectSafetyViolation::HasNonSafeSuperTrait(_) => HasNonSafeSuperTrait,
DynCompatibilityViolation::SizedSelf => SizedSelf,
DynCompatibilityViolation::SelfReferential => SelfReferential,
DynCompatibilityViolation::Method(_, mvc) => Method(mvc),
DynCompatibilityViolation::AssocConst(_) => AssocConst,
DynCompatibilityViolation::GAT(_) => GAT,
DynCompatibilityViolation::HasNonCompatibleSuperTrait(_) => {
HasNonCompatibleSuperTrait
}
});
ControlFlow::Continue(())
});
assert_eq!(osvs, expected, "Object safety violations for `{name}` do not match;");
assert_eq!(osvs, expected, "Dyn Compatibility violations for `{name}` do not match;");
}
let remains: Vec<_> = expected.keys().collect();
@ -73,7 +75,7 @@ fn check_object_safety<'a>(
#[test]
fn item_bounds_can_reference_self() {
check_object_safety(
check_dyn_compatibility(
r#"
//- minicore: eq
pub trait Foo {
@ -88,7 +90,7 @@ pub trait Foo {
#[test]
fn associated_consts() {
check_object_safety(
check_dyn_compatibility(
r#"
trait Bar {
const X: usize;
@ -100,7 +102,7 @@ trait Bar {
#[test]
fn bounds_reference_self() {
check_object_safety(
check_dyn_compatibility(
r#"
//- minicore: eq
trait X {
@ -113,7 +115,7 @@ trait X {
#[test]
fn by_value_self() {
check_object_safety(
check_dyn_compatibility(
r#"
//- minicore: dispatch_from_dyn
trait Bar {
@ -135,7 +137,7 @@ trait Quux {
#[test]
fn generic_methods() {
check_object_safety(
check_dyn_compatibility(
r#"
//- minicore: dispatch_from_dyn
trait Bar {
@ -157,7 +159,7 @@ trait Qax {
#[test]
fn mentions_self() {
check_object_safety(
check_dyn_compatibility(
r#"
//- minicore: dispatch_from_dyn
trait Bar {
@ -182,7 +184,7 @@ trait Quux {
#[test]
fn no_static() {
check_object_safety(
check_dyn_compatibility(
r#"
//- minicore: dispatch_from_dyn
trait Foo {
@ -195,7 +197,7 @@ trait Foo {
#[test]
fn sized_self() {
check_object_safety(
check_dyn_compatibility(
r#"
//- minicore: dispatch_from_dyn
trait Bar: Sized {
@ -205,7 +207,7 @@ trait Bar: Sized {
[("Bar", vec![SizedSelf])],
);
check_object_safety(
check_dyn_compatibility(
r#"
//- minicore: dispatch_from_dyn
trait Bar
@ -220,7 +222,7 @@ trait Bar
#[test]
fn supertrait_gat() {
check_object_safety(
check_dyn_compatibility(
r#"
//- minicore: dispatch_from_dyn
trait GatTrait {
@ -229,13 +231,13 @@ trait GatTrait {
trait SuperTrait<T>: GatTrait {}
"#,
[("GatTrait", vec![GAT]), ("SuperTrait", vec![HasNonSafeSuperTrait])],
[("GatTrait", vec![GAT]), ("SuperTrait", vec![HasNonCompatibleSuperTrait])],
);
}
#[test]
fn supertrait_mentions_self() {
check_object_safety(
check_dyn_compatibility(
r#"
//- minicore: dispatch_from_dyn
trait Bar<T> {
@ -251,7 +253,7 @@ trait Baz : Bar<Self> {
#[test]
fn rustc_issue_19538() {
check_object_safety(
check_dyn_compatibility(
r#"
//- minicore: dispatch_from_dyn
trait Foo {
@ -260,13 +262,13 @@ trait Foo {
trait Bar: Foo {}
"#,
[("Foo", vec![Method(Generic)]), ("Bar", vec![HasNonSafeSuperTrait])],
[("Foo", vec![Method(Generic)]), ("Bar", vec![HasNonCompatibleSuperTrait])],
);
}
#[test]
fn rustc_issue_22040() {
check_object_safety(
check_dyn_compatibility(
r#"
//- minicore: fmt, eq, dispatch_from_dyn
use core::fmt::Debug;
@ -281,7 +283,7 @@ trait Expr: Debug + PartialEq {
#[test]
fn rustc_issue_102762() {
check_object_safety(
check_dyn_compatibility(
r#"
//- minicore: future, send, sync, dispatch_from_dyn, deref
use core::pin::Pin;
@ -313,7 +315,7 @@ pub trait Fetcher: Send + Sync {
#[test]
fn rustc_issue_102933() {
check_object_safety(
check_dyn_compatibility(
r#"
//- minicore: future, dispatch_from_dyn, deref
use core::future::Future;
@ -351,7 +353,7 @@ pub trait B2: Service<Response = i32> + B1 {
#[test]
fn rustc_issue_106247() {
check_object_safety(
check_dyn_compatibility(
r#"
//- minicore: sync, dispatch_from_dyn
pub trait Trait {
@ -363,8 +365,8 @@ pub trait Trait {
}
#[test]
fn std_error_is_object_safe() {
check_object_safety(
fn std_error_is_dyn_compatible() {
check_dyn_compatibility(
r#"
//- minicore: fmt, dispatch_from_dyn
trait Erased<'a>: 'a {}
@ -380,14 +382,14 @@ pub trait Error: core::fmt::Debug + core::fmt::Display {
}
#[test]
fn lifetime_gat_is_object_unsafe() {
check_object_safety(
fn lifetime_gat_is_dyn_incompatible() {
check_dyn_compatibility(
r#"
//- minicore: dispatch_from_dyn
trait Foo {
type Bar<'a>;
}
"#,
[("Foo", vec![ObjectSafetyViolationKind::GAT])],
[("Foo", vec![DynCompatibilityViolationKind::GAT])],
);
}

View file

@ -38,11 +38,11 @@ pub mod consteval;
pub mod db;
pub mod diagnostics;
pub mod display;
pub mod dyn_compatibility;
pub mod lang_items;
pub mod layout;
pub mod method_resolution;
pub mod mir;
pub mod object_safety;
pub mod primitive;
pub mod traits;

View file

@ -386,82 +386,91 @@ fn ever_initialized_map(
fn dfs(
db: &dyn HirDatabase,
body: &MirBody,
b: BasicBlockId,
l: LocalId,
stack: &mut Vec<BasicBlockId>,
result: &mut ArenaMap<BasicBlockId, ArenaMap<LocalId, bool>>,
) {
let mut is_ever_initialized = result[b][l]; // It must be filled, as we use it as mark for dfs
let block = &body.basic_blocks[b];
for statement in &block.statements {
match &statement.kind {
StatementKind::Assign(p, _) => {
if p.projection.lookup(&body.projection_store).is_empty() && p.local == l {
while let Some(b) = stack.pop() {
let mut is_ever_initialized = result[b][l]; // It must be filled, as we use it as mark for dfs
let block = &body.basic_blocks[b];
for statement in &block.statements {
match &statement.kind {
StatementKind::Assign(p, _) => {
if p.projection.lookup(&body.projection_store).is_empty() && p.local == l {
is_ever_initialized = true;
}
}
StatementKind::StorageDead(p) => {
if *p == l {
is_ever_initialized = false;
}
}
StatementKind::Deinit(_)
| StatementKind::FakeRead(_)
| StatementKind::Nop
| StatementKind::StorageLive(_) => (),
}
}
let Some(terminator) = &block.terminator else {
never!(
"Terminator should be none only in construction.\nThe body:\n{}",
body.pretty_print(db)
);
return;
};
let mut process = |target, is_ever_initialized| {
if !result[target].contains_idx(l) || !result[target][l] && is_ever_initialized {
result[target].insert(l, is_ever_initialized);
stack.push(target);
}
};
match &terminator.kind {
TerminatorKind::Goto { target } => process(*target, is_ever_initialized),
TerminatorKind::SwitchInt { targets, .. } => {
targets.all_targets().iter().for_each(|&it| process(it, is_ever_initialized));
}
TerminatorKind::UnwindResume
| TerminatorKind::Abort
| TerminatorKind::Return
| TerminatorKind::Unreachable => (),
TerminatorKind::Call { target, cleanup, destination, .. } => {
if destination.projection.lookup(&body.projection_store).is_empty()
&& destination.local == l
{
is_ever_initialized = true;
}
target.iter().chain(cleanup).for_each(|&it| process(it, is_ever_initialized));
}
StatementKind::StorageDead(p) => {
if *p == l {
is_ever_initialized = false;
}
TerminatorKind::Drop { target, unwind, place: _ } => {
iter::once(target)
.chain(unwind)
.for_each(|&it| process(it, is_ever_initialized));
}
StatementKind::Deinit(_)
| StatementKind::FakeRead(_)
| StatementKind::Nop
| StatementKind::StorageLive(_) => (),
}
}
let Some(terminator) = &block.terminator else {
never!(
"Terminator should be none only in construction.\nThe body:\n{}",
body.pretty_print(db)
);
return;
};
let mut process = |target, is_ever_initialized| {
if !result[target].contains_idx(l) || !result[target][l] && is_ever_initialized {
result[target].insert(l, is_ever_initialized);
dfs(db, body, target, l, result);
}
};
match &terminator.kind {
TerminatorKind::Goto { target } => process(*target, is_ever_initialized),
TerminatorKind::SwitchInt { targets, .. } => {
targets.all_targets().iter().for_each(|&it| process(it, is_ever_initialized));
}
TerminatorKind::UnwindResume
| TerminatorKind::Abort
| TerminatorKind::Return
| TerminatorKind::Unreachable => (),
TerminatorKind::Call { target, cleanup, destination, .. } => {
if destination.projection.lookup(&body.projection_store).is_empty()
&& destination.local == l
{
is_ever_initialized = true;
TerminatorKind::DropAndReplace { .. }
| TerminatorKind::Assert { .. }
| TerminatorKind::Yield { .. }
| TerminatorKind::CoroutineDrop
| TerminatorKind::FalseEdge { .. }
| TerminatorKind::FalseUnwind { .. } => {
never!("We don't emit these MIR terminators yet");
}
target.iter().chain(cleanup).for_each(|&it| process(it, is_ever_initialized));
}
TerminatorKind::Drop { target, unwind, place: _ } => {
iter::once(target).chain(unwind).for_each(|&it| process(it, is_ever_initialized));
}
TerminatorKind::DropAndReplace { .. }
| TerminatorKind::Assert { .. }
| TerminatorKind::Yield { .. }
| TerminatorKind::CoroutineDrop
| TerminatorKind::FalseEdge { .. }
| TerminatorKind::FalseUnwind { .. } => {
never!("We don't emit these MIR terminators yet");
}
}
}
let mut stack = Vec::new();
for &l in &body.param_locals {
result[body.start_block].insert(l, true);
dfs(db, body, body.start_block, l, &mut result);
stack.clear();
stack.push(body.start_block);
dfs(db, body, l, &mut stack, &mut result);
}
for l in body.locals.iter().map(|it| it.0) {
db.unwind_if_cancelled();
if !result[body.start_block].contains_idx(l) {
result[body.start_block].insert(l, false);
dfs(db, body, body.start_block, l, &mut result);
stack.clear();
stack.push(body.start_block);
dfs(db, body, l, &mut stack, &mut result);
}
}
result

View file

@ -144,9 +144,9 @@ pub use {
hir_ty::{
consteval::ConstEvalError,
display::{ClosureStyle, HirDisplay, HirDisplayError, HirWrite},
dyn_compatibility::{DynCompatibilityViolation, MethodViolationCode},
layout::LayoutError,
mir::{MirEvalError, MirLowerError},
object_safety::{MethodViolationCode, ObjectSafetyViolation},
CastError, FnAbi, PointerCast, Safety,
},
// FIXME: Properly encapsulate mir
@ -497,10 +497,9 @@ impl Module {
/// Finds a parent module.
pub fn parent(self, db: &dyn HirDatabase) -> Option<Module> {
// FIXME: handle block expressions as modules (their parent is in a different DefMap)
let def_map = self.id.def_map(db.upcast());
let parent_id = def_map[self.id.local_id].parent?;
Some(Module { id: def_map.module_id(parent_id) })
let parent_id = def_map.containing_module(self.id.local_id)?;
Some(Module { id: parent_id })
}
/// Finds nearest non-block ancestor `Module` (`self` included).
@ -557,7 +556,7 @@ impl Module {
acc: &mut Vec<AnyDiagnostic>,
style_lints: bool,
) {
let _p = tracing::info_span!("Module::diagnostics", name = ?self.name(db)).entered();
let _p = tracing::info_span!("diagnostics", name = ?self.name(db)).entered();
let edition = db.crate_graph()[self.id.krate()].edition;
let def_map = self.id.def_map(db.upcast());
for diag in def_map.diagnostics() {
@ -2690,8 +2689,8 @@ impl Trait {
.count()
}
pub fn object_safety(&self, db: &dyn HirDatabase) -> Option<ObjectSafetyViolation> {
hir_ty::object_safety::object_safety(db, self.id)
pub fn dyn_compatibility(&self, db: &dyn HirDatabase) -> Option<DynCompatibilityViolation> {
hir_ty::dyn_compatibility::dyn_compatibility(db, self.id)
}
fn all_macro_calls(&self, db: &dyn HirDatabase) -> Box<[(AstId<ast::Item>, MacroCallId)]> {

View file

@ -24,6 +24,7 @@ use hir_expand::{
builtin::{BuiltinFnLikeExpander, EagerExpander},
db::ExpandDatabase,
files::InRealFile,
hygiene::SyntaxContextExt as _,
inert_attr_macro::find_builtin_attr_idx,
name::AsName,
FileRange, InMacroFile, MacroCallId, MacroFileId, MacroFileIdExt,
@ -32,13 +33,13 @@ use intern::Symbol;
use itertools::Itertools;
use rustc_hash::{FxHashMap, FxHashSet};
use smallvec::{smallvec, SmallVec};
use span::{EditionedFileId, FileId, HirFileIdRepr};
use span::{EditionedFileId, FileId, HirFileIdRepr, SyntaxContextId};
use stdx::TupleExt;
use syntax::{
algo::skip_trivia_token,
ast::{self, HasAttrs as _, HasGenericParams, HasLoopBody, IsString as _},
match_ast, AstNode, AstToken, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken,
TextRange, TextSize,
ast::{self, HasAttrs as _, HasGenericParams, IsString as _},
AstNode, AstToken, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange,
TextSize,
};
use crate::{
@ -608,7 +609,7 @@ impl<'db> SemanticsImpl<'db> {
let quote = string.open_quote_text_range()?;
let token = self.wrap_token_infile(string.syntax().clone()).into_real_file().ok()?;
self.descend_into_macros_breakable(token, |token| {
self.descend_into_macros_breakable(token, |token, _| {
(|| {
let token = token.value;
let string = ast::String::cast(token)?;
@ -655,7 +656,7 @@ impl<'db> SemanticsImpl<'db> {
let original_string = ast::String::cast(original_token.clone())?;
let original_token = self.wrap_token_infile(original_token).into_real_file().ok()?;
let quote = original_string.open_quote_text_range()?;
self.descend_into_macros_breakable(original_token, |token| {
self.descend_into_macros_breakable(original_token, |token, _| {
(|| {
let token = token.value;
self.resolve_offset_in_format_args(
@ -718,7 +719,7 @@ impl<'db> SemanticsImpl<'db> {
// node is just the token, so descend the token
self.descend_into_macros_impl(
InRealFile::new(file_id, first),
&mut |InFile { value, .. }| {
&mut |InFile { value, .. }, _ctx| {
if let Some(node) = value
.parent_ancestors()
.take_while(|it| it.text_range() == value.text_range())
@ -732,7 +733,7 @@ impl<'db> SemanticsImpl<'db> {
} else {
// Descend first and last token, then zip them to look for the node they belong to
let mut scratch: SmallVec<[_; 1]> = smallvec![];
self.descend_into_macros_impl(InRealFile::new(file_id, first), &mut |token| {
self.descend_into_macros_impl(InRealFile::new(file_id, first), &mut |token, _ctx| {
scratch.push(token);
CONTINUE_NO_BREAKS
});
@ -740,7 +741,7 @@ impl<'db> SemanticsImpl<'db> {
let mut scratch = scratch.into_iter();
self.descend_into_macros_impl(
InRealFile::new(file_id, last),
&mut |InFile { value: last, file_id: last_fid }| {
&mut |InFile { value: last, file_id: last_fid }, _ctx| {
if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() {
if first_fid == last_fid {
if let Some(p) = first.parent() {
@ -763,7 +764,9 @@ impl<'db> SemanticsImpl<'db> {
res
}
fn is_inside_macro_call(token: &SyntaxToken) -> bool {
// FIXME: This isn't quite right wrt to inner attributes
/// Does a syntactic traversal to check whether this token might be inside a macro call
pub fn might_be_inside_macro_call(&self, token: &SyntaxToken) -> bool {
token.parent_ancestors().any(|ancestor| {
if ast::MacroCall::can_cast(ancestor.kind()) {
return true;
@ -781,25 +784,14 @@ impl<'db> SemanticsImpl<'db> {
})
}
pub fn descend_into_macros_exact_if_in_macro(
&self,
token: SyntaxToken,
) -> SmallVec<[SyntaxToken; 1]> {
if Self::is_inside_macro_call(&token) {
self.descend_into_macros_exact(token)
} else {
smallvec![token]
}
}
pub fn descend_into_macros_cb(
&self,
token: SyntaxToken,
mut cb: impl FnMut(InFile<SyntaxToken>),
mut cb: impl FnMut(InFile<SyntaxToken>, SyntaxContextId),
) {
if let Ok(token) = self.wrap_token_infile(token).into_real_file() {
self.descend_into_macros_impl(token, &mut |t| {
cb(t);
self.descend_into_macros_impl(token, &mut |t, ctx| {
cb(t, ctx);
CONTINUE_NO_BREAKS
});
}
@ -808,7 +800,7 @@ impl<'db> SemanticsImpl<'db> {
pub fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
let mut res = smallvec![];
if let Ok(token) = self.wrap_token_infile(token.clone()).into_real_file() {
self.descend_into_macros_impl(token, &mut |t| {
self.descend_into_macros_impl(token, &mut |t, _ctx| {
res.push(t.value);
CONTINUE_NO_BREAKS
});
@ -819,10 +811,27 @@ impl<'db> SemanticsImpl<'db> {
res
}
pub fn descend_into_macros_no_opaque(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
let mut res = smallvec![];
if let Ok(token) = self.wrap_token_infile(token.clone()).into_real_file() {
self.descend_into_macros_impl(token, &mut |t, ctx| {
if !ctx.is_opaque(self.db.upcast()) {
// Don't descend into opaque contexts
res.push(t.value);
}
CONTINUE_NO_BREAKS
});
}
if res.is_empty() {
res.push(token);
}
res
}
pub fn descend_into_macros_breakable<T>(
&self,
token: InRealFile<SyntaxToken>,
mut cb: impl FnMut(InFile<SyntaxToken>) -> ControlFlow<T>,
mut cb: impl FnMut(InFile<SyntaxToken>, SyntaxContextId) -> ControlFlow<T>,
) -> Option<T> {
self.descend_into_macros_impl(token.clone(), &mut cb)
}
@ -834,10 +843,12 @@ impl<'db> SemanticsImpl<'db> {
let text = token.text();
let kind = token.kind();
self.descend_into_macros_cb(token.clone(), |InFile { value, file_id: _ }| {
self.descend_into_macros_cb(token.clone(), |InFile { value, file_id: _ }, ctx| {
let mapped_kind = value.kind();
let any_ident_match = || kind.is_any_identifier() && value.kind().is_any_identifier();
let matches = (kind == mapped_kind || any_ident_match()) && text == value.text();
let matches = (kind == mapped_kind || any_ident_match())
&& text == value.text()
&& !ctx.is_opaque(self.db.upcast());
if matches {
r.push(value);
}
@ -854,17 +865,21 @@ impl<'db> SemanticsImpl<'db> {
let text = token.text();
let kind = token.kind();
if let Ok(token) = self.wrap_token_infile(token.clone()).into_real_file() {
self.descend_into_macros_breakable(token.clone(), |InFile { value, file_id: _ }| {
let mapped_kind = value.kind();
let any_ident_match =
|| kind.is_any_identifier() && value.kind().is_any_identifier();
let matches = (kind == mapped_kind || any_ident_match()) && text == value.text();
if matches {
ControlFlow::Break(value)
} else {
ControlFlow::Continue(())
}
})
self.descend_into_macros_breakable(
token.clone(),
|InFile { value, file_id: _ }, _ctx| {
let mapped_kind = value.kind();
let any_ident_match =
|| kind.is_any_identifier() && value.kind().is_any_identifier();
let matches =
(kind == mapped_kind || any_ident_match()) && text == value.text();
if matches {
ControlFlow::Break(value)
} else {
ControlFlow::Continue(())
}
},
)
} else {
None
}
@ -874,7 +889,7 @@ impl<'db> SemanticsImpl<'db> {
fn descend_into_macros_impl<T>(
&self,
InRealFile { value: token, file_id }: InRealFile<SyntaxToken>,
f: &mut dyn FnMut(InFile<SyntaxToken>) -> ControlFlow<T>,
f: &mut dyn FnMut(InFile<SyntaxToken>, SyntaxContextId) -> ControlFlow<T>,
) -> Option<T> {
let _p = tracing::info_span!("descend_into_macros_impl").entered();
let (sa, span, file_id) = token
@ -898,7 +913,8 @@ impl<'db> SemanticsImpl<'db> {
// These are tracked to know which macro calls we still have to look into
// the tokens themselves aren't that interesting as the span that is being used to map
// things down never changes.
let mut stack: Vec<(_, SmallVec<[_; 2]>)> = vec![(file_id, smallvec![token])];
let mut stack: Vec<(_, SmallVec<[_; 2]>)> =
vec![(file_id, smallvec![(token, SyntaxContextId::ROOT)])];
// Process the expansion of a call, pushing all tokens with our span in the expansion back onto our stack
let process_expansion_for_token = |stack: &mut Vec<_>, macro_file| {
@ -921,11 +937,11 @@ impl<'db> SemanticsImpl<'db> {
// Filters out all tokens that contain the given range (usually the macro call), any such
// token is redundant as the corresponding macro call has already been processed
let filter_duplicates = |tokens: &mut SmallVec<_>, range: TextRange| {
tokens.retain(|t: &mut SyntaxToken| !range.contains_range(t.text_range()))
tokens.retain(|(t, _): &mut (SyntaxToken, _)| !range.contains_range(t.text_range()))
};
while let Some((expansion, ref mut tokens)) = stack.pop() {
while let Some(token) = tokens.pop() {
while let Some((token, ctx)) = tokens.pop() {
let was_not_remapped = (|| {
// First expand into attribute invocations
let containing_attribute_macro_call = self.with_ctx(|ctx| {
@ -1036,7 +1052,7 @@ impl<'db> SemanticsImpl<'db> {
let text_range = attr.syntax().text_range();
// remove any other token in this macro input, all their mappings are the
// same as this
tokens.retain(|t| {
tokens.retain(|(t, _)| {
!text_range.contains_range(t.text_range())
});
return process_expansion_for_token(
@ -1093,7 +1109,7 @@ impl<'db> SemanticsImpl<'db> {
.is_none();
if was_not_remapped {
if let ControlFlow::Break(b) = f(InFile::new(expansion, token)) {
if let ControlFlow::Break(b) = f(InFile::new(expansion, token), ctx) {
return Some(b);
}
}
@ -1221,26 +1237,10 @@ impl<'db> SemanticsImpl<'db> {
ToDef::to_def(self, src.as_ref())
}
pub fn resolve_label(&self, lifetime: &ast::Lifetime) -> Option<Label> {
let text = lifetime.text();
let label = lifetime.syntax().ancestors().find_map(|syn| {
let label = match_ast! {
match syn {
ast::ForExpr(it) => it.label(),
ast::WhileExpr(it) => it.label(),
ast::LoopExpr(it) => it.label(),
ast::BlockExpr(it) => it.label(),
_ => None,
}
};
label.filter(|l| {
l.lifetime()
.and_then(|lt| lt.lifetime_ident_token())
.map_or(false, |lt| lt.text() == text)
})
})?;
let src = self.wrap_node_infile(label);
ToDef::to_def(self, src.as_ref())
pub fn resolve_label(&self, label: &ast::Lifetime) -> Option<Label> {
let (parent, label_id) = self
.with_ctx(|ctx| ctx.label_ref_to_def(self.wrap_node_infile(label.clone()).as_ref()))?;
Some(Label { parent, label_id })
}
pub fn resolve_type(&self, ty: &ast::Type) -> Option<Type> {

View file

@ -92,7 +92,7 @@ use hir_def::{
keys::{self, Key},
DynMap,
},
hir::{BindingId, LabelId},
hir::{BindingId, Expr, LabelId},
AdtId, BlockId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId, ExternCrateId,
FieldId, FunctionId, GenericDefId, GenericParamId, ImplId, LifetimeParamId, Lookup, MacroId,
ModuleId, StaticId, StructId, TraitAliasId, TraitId, TypeAliasId, TypeParamId, UnionId, UseId,
@ -343,6 +343,20 @@ impl SourceToDefCtx<'_, '_> {
Some((container, label_id))
}
pub(super) fn label_ref_to_def(
&mut self,
src: InFile<&ast::Lifetime>,
) -> Option<(DefWithBodyId, LabelId)> {
let break_or_continue = ast::Expr::cast(src.value.syntax().parent()?)?;
let container = self.find_pat_or_label_container(src.syntax_ref())?;
let (body, source_map) = self.db.body_with_source_map(container);
let break_or_continue = source_map.node_expr(src.with_value(&break_or_continue))?;
let (Expr::Break { label, .. } | Expr::Continue { label }) = body[break_or_continue] else {
return None;
};
Some((container, label?))
}
pub(super) fn item_to_macro_call(&mut self, src: InFile<&ast::Item>) -> Option<MacroCallId> {
let map = self.dyn_map(src)?;
map[keys::ATTR_MACRO_CALL].get(&AstPtr::new(src.value)).copied()

View file

@ -1,8 +1,12 @@
use hir::TypeInfo;
use ide_db::syntax_helpers::suggest_name;
use syntax::{
ast::{self, edit::IndentLevel, edit_in_place::Indent, make, AstNode, HasName},
ted, NodeOrToken,
ast::{
self, edit::IndentLevel, edit_in_place::Indent, make, syntax_factory::SyntaxFactory,
AstNode,
},
syntax_editor::Position,
NodeOrToken,
SyntaxKind::{BLOCK_EXPR, BREAK_EXPR, COMMENT, LOOP_EXPR, MATCH_GUARD, PATH_EXPR, RETURN_EXPR},
SyntaxNode, T,
};
@ -105,39 +109,46 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
),
};
let make = SyntaxFactory::new();
let mut editor = edit.make_editor(&expr_replace);
let pat_name = make.name(&var_name);
let name_expr = make.expr_path(make::ext::ident_path(&var_name));
if let Some(cap) = ctx.config.snippet_cap {
let tabstop = edit.make_tabstop_before(cap);
editor.add_annotation(pat_name.syntax().clone(), tabstop);
}
let ident_pat = match parent {
Some(ast::Expr::RefExpr(expr)) if expr.mut_token().is_some() => {
make::ident_pat(false, true, make::name(&var_name))
make.ident_pat(false, true, pat_name)
}
_ if needs_adjust
&& !needs_ref
&& ty.as_ref().is_some_and(|ty| ty.is_mutable_reference()) =>
{
make::ident_pat(false, true, make::name(&var_name))
make.ident_pat(false, true, pat_name)
}
_ => make::ident_pat(false, false, make::name(&var_name)),
_ => make.ident_pat(false, false, pat_name),
};
let to_extract_no_ref = match ty.as_ref().filter(|_| needs_ref) {
Some(receiver_type) if receiver_type.is_mutable_reference() => {
make::expr_ref(to_extract_no_ref, true)
make.expr_ref(to_extract_no_ref, true)
}
Some(receiver_type) if receiver_type.is_reference() => {
make::expr_ref(to_extract_no_ref, false)
make.expr_ref(to_extract_no_ref, false)
}
_ => to_extract_no_ref,
};
let expr_replace = edit.make_syntax_mut(expr_replace);
let let_stmt =
make::let_stmt(ident_pat.into(), None, Some(to_extract_no_ref)).clone_for_update();
let name_expr = make::expr_path(make::ext::ident_path(&var_name)).clone_for_update();
let let_stmt = make.let_stmt(ident_pat.into(), None, Some(to_extract_no_ref));
match anchor {
Anchor::Before(place) => {
let prev_ws = place.prev_sibling_or_token().and_then(|it| it.into_token());
let indent_to = IndentLevel::from_node(&place);
let insert_place = edit.make_syntax_mut(place);
// Adjust ws to insert depending on if this is all inline or on separate lines
let trailing_ws = if prev_ws.is_some_and(|it| it.text().starts_with('\n')) {
@ -146,37 +157,20 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
" ".to_owned()
};
ted::insert_all_raw(
ted::Position::before(insert_place),
editor.insert_all(
Position::before(place),
vec![
let_stmt.syntax().clone().into(),
make::tokens::whitespace(&trailing_ws).into(),
],
);
ted::replace(expr_replace, name_expr.syntax());
if let Some(cap) = ctx.config.snippet_cap {
if let Some(ast::Pat::IdentPat(ident_pat)) = let_stmt.pat() {
if let Some(name) = ident_pat.name() {
edit.add_tabstop_before(cap, name);
}
}
}
editor.replace(expr_replace, name_expr.syntax());
}
Anchor::Replace(stmt) => {
cov_mark::hit!(test_extract_var_expr_stmt);
let stmt_replace = edit.make_mut(stmt);
ted::replace(stmt_replace.syntax(), let_stmt.syntax());
if let Some(cap) = ctx.config.snippet_cap {
if let Some(ast::Pat::IdentPat(ident_pat)) = let_stmt.pat() {
if let Some(name) = ident_pat.name() {
edit.add_tabstop_before(cap, name);
}
}
}
editor.replace(stmt.syntax(), let_stmt.syntax());
}
Anchor::WrapInBlock(to_wrap) => {
let indent_to = to_wrap.indent_level();
@ -184,47 +178,22 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
let block = if to_wrap.syntax() == &expr_replace {
// Since `expr_replace` is the same that needs to be wrapped in a block,
// we can just directly replace it with a block
let block =
make::block_expr([let_stmt.into()], Some(name_expr)).clone_for_update();
ted::replace(expr_replace, block.syntax());
block
make.block_expr([let_stmt.into()], Some(name_expr))
} else {
// `expr_replace` is a descendant of `to_wrap`, so both steps need to be
// handled separately, otherwise we wrap the wrong expression
let to_wrap = edit.make_mut(to_wrap);
// Replace the target expr first so that we don't need to find where
// `expr_replace` is in the wrapped `to_wrap`
ted::replace(expr_replace, name_expr.syntax());
// Wrap `to_wrap` in a block
let block = make::block_expr([let_stmt.into()], Some(to_wrap.clone()))
.clone_for_update();
ted::replace(to_wrap.syntax(), block.syntax());
block
// `expr_replace` is a descendant of `to_wrap`, so we just replace it with `name_expr`.
editor.replace(expr_replace, name_expr.syntax());
make.block_expr([let_stmt.into()], Some(to_wrap.clone()))
};
if let Some(cap) = ctx.config.snippet_cap {
// Adding a tabstop to `name` requires finding the let stmt again, since
// the existing `let_stmt` is not actually added to the tree
let pat = block.statements().find_map(|stmt| {
let ast::Stmt::LetStmt(let_stmt) = stmt else { return None };
let_stmt.pat()
});
if let Some(ast::Pat::IdentPat(ident_pat)) = pat {
if let Some(name) = ident_pat.name() {
edit.add_tabstop_before(cap, name);
}
}
}
editor.replace(to_wrap.syntax(), block.syntax());
// fixup indentation of block
block.indent(indent_to);
}
}
editor.add_mappings(make.finish_with_mappings());
edit.add_file_edits(ctx.file_id(), editor);
edit.rename();
},
)

View file

@ -7,7 +7,7 @@
use hir::ImportPathConfig;
use ide_db::{imports::insert_use::InsertUseConfig, SnippetCap};
use crate::snippet::Snippet;
use crate::{snippet::Snippet, CompletionFieldsToResolve};
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct CompletionConfig {
@ -27,6 +27,7 @@ pub struct CompletionConfig {
pub prefer_absolute: bool,
pub snippets: Vec<Snippet>,
pub limit: Option<usize>,
pub fields_to_resolve: CompletionFieldsToResolve,
}
#[derive(Clone, Debug, PartialEq, Eq)]

View file

@ -7,7 +7,8 @@ mod tests;
use std::{iter, ops::ControlFlow};
use hir::{
HasAttrs, Local, Name, PathResolution, ScopeDef, Semantics, SemanticsScope, Type, TypeInfo,
HasAttrs, Local, ModuleSource, Name, PathResolution, ScopeDef, Semantics, SemanticsScope, Type,
TypeInfo,
};
use ide_db::{
base_db::SourceDatabase, famous_defs::FamousDefs, helpers::is_editable_crate, FilePosition,
@ -743,7 +744,12 @@ impl<'a> CompletionContext<'a> {
}
});
let depth_from_crate_root = iter::successors(module.parent(db), |m| m.parent(db)).count();
let depth_from_crate_root = iter::successors(Some(module), |m| m.parent(db))
// `BlockExpr` modules are not count as module depth
.filter(|m| !matches!(m.definition_source(db).value, ModuleSource::BlockExpr(_)))
.count()
// exclude `m` itself
.saturating_sub(1);
let complete_semicolon = if config.add_semicolon_to_unit {
let inside_closure_ret = token.parent_ancestors().try_for_each(|ancestor| {

View file

@ -37,6 +37,31 @@ pub use crate::{
snippet::{Snippet, SnippetScope},
};
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub struct CompletionFieldsToResolve {
pub resolve_label_details: bool,
pub resolve_tags: bool,
pub resolve_detail: bool,
pub resolve_documentation: bool,
pub resolve_filter_text: bool,
pub resolve_text_edit: bool,
pub resolve_command: bool,
}
impl CompletionFieldsToResolve {
pub const fn empty() -> Self {
Self {
resolve_label_details: false,
resolve_tags: false,
resolve_detail: false,
resolve_documentation: false,
resolve_filter_text: false,
resolve_text_edit: false,
resolve_command: false,
}
}
}
//FIXME: split the following feature into fine-grained features.
// Feature: Magic Completions

View file

@ -37,8 +37,8 @@ use test_fixture::ChangeFixture;
use test_utils::assert_eq_text;
use crate::{
resolve_completion_edits, CallableSnippets, CompletionConfig, CompletionItem,
CompletionItemKind,
resolve_completion_edits, CallableSnippets, CompletionConfig, CompletionFieldsToResolve,
CompletionItem, CompletionItemKind,
};
/// Lots of basic item definitions
@ -84,6 +84,7 @@ pub(crate) const TEST_CONFIG: CompletionConfig = CompletionConfig {
prefer_absolute: false,
snippets: Vec::new(),
limit: None,
fields_to_resolve: CompletionFieldsToResolve::empty(),
};
pub(crate) fn completion_list(ra_fixture: &str) -> String {

View file

@ -7081,8 +7081,8 @@ This feature has no tracking issue, and is therefore likely internal to the comp
"##,
},
Lint {
label: "object_safe_for_dispatch",
description: r##"# `object_safe_for_dispatch`
label: "dyn_compatible_for_dispatch",
description: r##"# `dyn_compatible_for_dispatch`
The tracking issue for this feature is: [#43561]

View file

@ -614,7 +614,7 @@ fn path_segment_cmp(a: &ast::PathSegment, b: &ast::PathSegment) -> Ordering {
(Some(_), None) => Ordering::Greater,
(None, Some(_)) => Ordering::Less,
(Some(a_name), Some(b_name)) => {
// snake_case < CamelCase < UPPER_SNAKE_CASE
// snake_case < UpperCamelCase < UPPER_SNAKE_CASE
let a_text = a_name.as_str().trim_start_matches("r#");
let b_text = b_name.as_str().trim_start_matches("r#");
if a_text.starts_with(char::is_lowercase)

View file

@ -13,6 +13,7 @@ use crate::{
salsa::{Database, ParallelDatabase, Snapshot},
Cancelled, CrateId, SourceDatabase, SourceRootDatabase,
},
symbol_index::SymbolsDatabase,
FxIndexMap, RootDatabase,
};
@ -54,11 +55,13 @@ pub fn parallel_prime_caches(
let (progress_sender, progress_receiver) = crossbeam_channel::unbounded();
let (work_sender, work_receiver) = crossbeam_channel::unbounded();
let graph = graph.clone();
let local_roots = db.local_roots();
let prime_caches_worker = move |db: Snapshot<RootDatabase>| {
while let Ok((crate_id, crate_name)) = work_receiver.recv() {
progress_sender
.send(ParallelPrimeCacheWorkerProgress::BeginCrate { crate_id, crate_name })?;
// Compute the DefMap and possibly ImportMap
let file_id = graph[crate_id].root_file_id;
let root_id = db.file_source_root(file_id);
if db.source_root(root_id).is_library {
@ -68,6 +71,19 @@ pub fn parallel_prime_caches(
db.import_map(crate_id);
}
// Compute the symbol search index.
// This primes the cache for `ide_db::symbol_index::world_symbols()`.
//
// We do this for workspace crates only (members of local_roots), because doing it
// for all dependencies could be *very* unnecessarily slow in a large project.
//
// FIXME: We should do it unconditionally if the configuration is set to default to
// searching dependencies (rust-analyzer.workspace.symbol.search.scope), but we
// would need to pipe that configuration information down here.
if local_roots.contains(&root_id) {
db.crate_symbols(crate_id.into());
}
progress_sender.send(ParallelPrimeCacheWorkerProgress::EndCrate { crate_id })?;
}

View file

@ -529,9 +529,13 @@ impl<'a> FindUsages<'a> {
})
.into_iter()
.flat_map(move |token| {
sema.descend_into_macros_exact_if_in_macro(token)
.into_iter()
.filter_map(|it| it.parent())
if sema.might_be_inside_macro_call(&token) {
sema.descend_into_macros_exact(token)
} else {
<_>::from([token])
}
.into_iter()
.filter_map(|it| it.parent())
})
}

View file

@ -229,10 +229,10 @@ fn foo() {
check_diagnostics(
r#"
struct non_camel_case_name {}
// ^^^^^^^^^^^^^^^^^^^ 💡 warn: Structure `non_camel_case_name` should have CamelCase name, e.g. `NonCamelCaseName`
// ^^^^^^^^^^^^^^^^^^^ 💡 warn: Structure `non_camel_case_name` should have UpperCamelCase name, e.g. `NonCamelCaseName`
struct SCREAMING_CASE {}
// ^^^^^^^^^^^^^^ 💡 warn: Structure `SCREAMING_CASE` should have CamelCase name, e.g. `ScreamingCase`
// ^^^^^^^^^^^^^^ 💡 warn: Structure `SCREAMING_CASE` should have UpperCamelCase name, e.g. `ScreamingCase`
"#,
);
}
@ -261,10 +261,10 @@ struct SomeStruct { SomeField: u8 }
check_diagnostics(
r#"
enum some_enum { Val(u8) }
// ^^^^^^^^^ 💡 warn: Enum `some_enum` should have CamelCase name, e.g. `SomeEnum`
// ^^^^^^^^^ 💡 warn: Enum `some_enum` should have UpperCamelCase name, e.g. `SomeEnum`
enum SOME_ENUM {}
// ^^^^^^^^^ 💡 warn: Enum `SOME_ENUM` should have CamelCase name, e.g. `SomeEnum`
// ^^^^^^^^^ 💡 warn: Enum `SOME_ENUM` should have UpperCamelCase name, e.g. `SomeEnum`
"#,
);
}
@ -283,7 +283,7 @@ enum AABB {}
check_diagnostics(
r#"
enum SomeEnum { SOME_VARIANT(u8) }
// ^^^^^^^^^^^^ 💡 warn: Variant `SOME_VARIANT` should have CamelCase name, e.g. `SomeVariant`
// ^^^^^^^^^^^^ 💡 warn: Variant `SOME_VARIANT` should have UpperCamelCase name, e.g. `SomeVariant`
"#,
);
}
@ -313,7 +313,7 @@ static some_weird_const: u8 = 10;
check_diagnostics(
r#"
struct someStruct;
// ^^^^^^^^^^ 💡 warn: Structure `someStruct` should have CamelCase name, e.g. `SomeStruct`
// ^^^^^^^^^^ 💡 warn: Structure `someStruct` should have UpperCamelCase name, e.g. `SomeStruct`
impl someStruct {
fn SomeFunc(&self) {
@ -530,11 +530,11 @@ extern {
check_diagnostics(
r#"
trait BAD_TRAIT {
// ^^^^^^^^^ 💡 warn: Trait `BAD_TRAIT` should have CamelCase name, e.g. `BadTrait`
// ^^^^^^^^^ 💡 warn: Trait `BAD_TRAIT` should have UpperCamelCase name, e.g. `BadTrait`
const bad_const: u8;
// ^^^^^^^^^ 💡 warn: Constant `bad_const` should have UPPER_SNAKE_CASE name, e.g. `BAD_CONST`
type BAD_TYPE;
// ^^^^^^^^ 💡 warn: Type alias `BAD_TYPE` should have CamelCase name, e.g. `BadType`
// ^^^^^^^^ 💡 warn: Type alias `BAD_TYPE` should have UpperCamelCase name, e.g. `BadType`
fn BAD_FUNCTION();
// ^^^^^^^^^^^^ 💡 warn: Function `BAD_FUNCTION` should have snake_case name, e.g. `bad_function`
fn BadFunction();
@ -552,11 +552,11 @@ trait BAD_TRAIT {
check_diagnostics_with_disabled(
r#"
trait BAD_TRAIT {
// ^^^^^^^^^ 💡 warn: Trait `BAD_TRAIT` should have CamelCase name, e.g. `BadTrait`
// ^^^^^^^^^ 💡 warn: Trait `BAD_TRAIT` should have UpperCamelCase name, e.g. `BadTrait`
const bad_const: u8;
// ^^^^^^^^^ 💡 warn: Constant `bad_const` should have UPPER_SNAKE_CASE name, e.g. `BAD_CONST`
type BAD_TYPE;
// ^^^^^^^^ 💡 warn: Type alias `BAD_TYPE` should have CamelCase name, e.g. `BadType`
// ^^^^^^^^ 💡 warn: Type alias `BAD_TYPE` should have UpperCamelCase name, e.g. `BadType`
fn BAD_FUNCTION(BAD_PARAM: u8);
// ^^^^^^^^^^^^ 💡 warn: Function `BAD_FUNCTION` should have snake_case name, e.g. `bad_function`
// ^^^^^^^^^ 💡 warn: Parameter `BAD_PARAM` should have snake_case name, e.g. `bad_param`
@ -664,7 +664,7 @@ mod CheckNonstandardStyle {
mod CheckBadStyle {
//^^^^^^^^^^^^^ 💡 error: Module `CheckBadStyle` should have snake_case name, e.g. `check_bad_style`
struct fooo;
//^^^^ 💡 error: Structure `fooo` should have CamelCase name, e.g. `Fooo`
//^^^^ 💡 error: Structure `fooo` should have UpperCamelCase name, e.g. `Fooo`
}
mod F {
@ -676,7 +676,7 @@ mod F {
#[deny(non_snake_case, non_camel_case_types)]
pub struct some_type {
//^^^^^^^^^ 💡 error: Structure `some_type` should have CamelCase name, e.g. `SomeType`
//^^^^^^^^^ 💡 error: Structure `some_type` should have UpperCamelCase name, e.g. `SomeType`
SOME_FIELD: u8,
//^^^^^^^^^^ 💡 error: Field `SOME_FIELD` should have snake_case name, e.g. `some_field`
SomeField: u16,
@ -693,11 +693,11 @@ pub static SomeStatic: u8 = 10;
#[deny(non_snake_case, non_camel_case_types, non_upper_case_globals)]
trait BAD_TRAIT {
// ^^^^^^^^^ 💡 error: Trait `BAD_TRAIT` should have CamelCase name, e.g. `BadTrait`
// ^^^^^^^^^ 💡 error: Trait `BAD_TRAIT` should have UpperCamelCase name, e.g. `BadTrait`
const bad_const: u8;
// ^^^^^^^^^ 💡 error: Constant `bad_const` should have UPPER_SNAKE_CASE name, e.g. `BAD_CONST`
type BAD_TYPE;
// ^^^^^^^^ 💡 error: Type alias `BAD_TYPE` should have CamelCase name, e.g. `BadType`
// ^^^^^^^^ 💡 error: Type alias `BAD_TYPE` should have UpperCamelCase name, e.g. `BadType`
fn BAD_FUNCTION(BAD_PARAM: u8);
// ^^^^^^^^^^^^ 💡 error: Function `BAD_FUNCTION` should have snake_case name, e.g. `bad_function`
// ^^^^^^^^^ 💡 error: Parameter `BAD_PARAM` should have snake_case name, e.g. `bad_param`
@ -952,7 +952,7 @@ fn foo() {
let FOO;
#[allow(non_snake_case)]
struct qux;
// ^^^ 💡 warn: Structure `qux` should have CamelCase name, e.g. `Qux`
// ^^^ 💡 warn: Structure `qux` should have UpperCamelCase name, e.g. `Qux`
fn BAZ() {
// ^^^ 💡 error: Function `BAZ` should have snake_case name, e.g. `baz`

View file

@ -29,7 +29,7 @@ pub(crate) fn goto_declaration(
.find(|it| matches!(it.kind(), IDENT | T![self] | T![super] | T![crate] | T![Self]))?;
let range = original_token.text_range();
let info: Vec<NavigationTarget> = sema
.descend_into_macros(original_token)
.descend_into_macros_no_opaque(original_token)
.iter()
.filter_map(|token| {
let parent = token.parent()?;

View file

@ -83,7 +83,7 @@ pub(crate) fn goto_definition(
}
let navs = sema
.descend_into_macros(original_token.clone())
.descend_into_macros_no_opaque(original_token.clone())
.into_iter()
.filter_map(|token| {
let parent = token.parent()?;
@ -2660,6 +2660,24 @@ fn foo() {
);
}
#[test]
fn label_inside_macro() {
check(
r#"
macro_rules! m {
($s:stmt) => { $s };
}
fn foo() {
'label: loop {
// ^^^^^^
m!(continue 'label$0);
}
}
"#,
);
}
#[test]
fn goto_def_on_return_in_try() {
check(

View file

@ -69,7 +69,7 @@ pub(crate) fn goto_type_definition(
}
let range = token.text_range();
sema.descend_into_macros(token)
sema.descend_into_macros_no_opaque(token)
.into_iter()
.filter_map(|token| {
let ty = sema

View file

@ -3,9 +3,9 @@ use std::{mem, ops::Not};
use either::Either;
use hir::{
db::ExpandDatabase, Adt, AsAssocItem, AsExternAssocItem, CaptureKind, HasCrate, HasSource,
HirDisplay, Layout, LayoutError, MethodViolationCode, Name, ObjectSafetyViolation, Semantics,
Trait, Type, TypeInfo,
db::ExpandDatabase, Adt, AsAssocItem, AsExternAssocItem, AssocItemContainer, CaptureKind,
DynCompatibilityViolation, HasCrate, HasSource, HirDisplay, Layout, LayoutError,
MethodViolationCode, Name, Semantics, Trait, Type, TypeInfo,
};
use ide_db::{
base_db::SourceDatabase,
@ -529,10 +529,10 @@ pub(super) fn definition(
_ => None,
};
let object_safety_info = if let Definition::Trait(it) = def {
let mut object_safety_info = String::new();
render_object_safety(db, &mut object_safety_info, it.object_safety(db));
Some(object_safety_info)
let dyn_compatibility_info = if let Definition::Trait(it) = def {
let mut dyn_compatibility_info = String::new();
render_dyn_compatibility(db, &mut dyn_compatibility_info, it.dyn_compatibility(db));
Some(dyn_compatibility_info)
} else {
None
};
@ -546,8 +546,8 @@ pub(super) fn definition(
desc.push_str(&layout_info);
desc.push('\n');
}
if let Some(object_safety_info) = object_safety_info {
desc.push_str(&object_safety_info);
if let Some(dyn_compatibility_info) = dyn_compatibility_info {
desc.push_str(&dyn_compatibility_info);
desc.push('\n');
}
desc.push_str(&label);
@ -813,7 +813,15 @@ fn definition_mod_path(db: &RootDatabase, def: &Definition, edition: Edition) ->
if matches!(def, Definition::GenericParam(_) | Definition::Local(_) | Definition::Label(_)) {
return None;
}
def.module(db).map(|module| path(db, module, definition_owner_name(db, def, edition), edition))
let container: Option<Definition> =
def.as_assoc_item(db).and_then(|assoc| match assoc.container(db) {
AssocItemContainer::Trait(trait_) => Some(trait_.into()),
AssocItemContainer::Impl(impl_) => impl_.self_ty(db).as_adt().map(|adt| adt.into()),
});
container
.unwrap_or(*def)
.module(db)
.map(|module| path(db, module, definition_owner_name(db, def, edition), edition))
}
fn markup(docs: Option<String>, desc: String, mod_path: Option<String>) -> Markup {
@ -980,24 +988,24 @@ fn keyword_hints(
}
}
fn render_object_safety(
fn render_dyn_compatibility(
db: &RootDatabase,
buf: &mut String,
safety: Option<ObjectSafetyViolation>,
safety: Option<DynCompatibilityViolation>,
) {
let Some(osv) = safety else {
buf.push_str("// Object Safety: Yes");
buf.push_str("// Dyn Compatible: Yes");
return;
};
buf.push_str("// Object Safety: No\n// - Reason: ");
buf.push_str("// Dyn Compatible: No\n// - Reason: ");
match osv {
ObjectSafetyViolation::SizedSelf => {
DynCompatibilityViolation::SizedSelf => {
buf.push_str("has a `Self: Sized` bound");
}
ObjectSafetyViolation::SelfReferential => {
DynCompatibilityViolation::SelfReferential => {
buf.push_str("has a bound that references `Self`");
}
ObjectSafetyViolation::Method(func, mvc) => {
DynCompatibilityViolation::Method(func, mvc) => {
let name = hir::Function::from(func).name(db);
format_to!(
buf,
@ -1020,7 +1028,7 @@ fn render_object_safety(
};
buf.push_str(desc);
}
ObjectSafetyViolation::AssocConst(const_) => {
DynCompatibilityViolation::AssocConst(const_) => {
let name = hir::Const::from(const_).name(db);
if let Some(name) = name {
format_to!(buf, "has an associated constant `{}`", name.as_str());
@ -1028,11 +1036,11 @@ fn render_object_safety(
buf.push_str("has an associated constant");
}
}
ObjectSafetyViolation::GAT(alias) => {
DynCompatibilityViolation::GAT(alias) => {
let name = hir::TypeAlias::from(alias).name(db);
format_to!(buf, "has a generic associated type `{}`", name.as_str());
}
ObjectSafetyViolation::HasNonSafeSuperTrait(super_trait) => {
DynCompatibilityViolation::HasNonCompatibleSuperTrait(super_trait) => {
let name = hir::Trait::from(super_trait).name(db);
format_to!(buf, "has a object unsafe supertrait `{}`", name.as_str());
}

View file

@ -7175,7 +7175,7 @@ impl T$0 for () {}
```
```rust
// Object Safety: Yes
// Dyn Compatible: Yes
trait T {}
```
"#]],
@ -7195,7 +7195,7 @@ impl T$0 for () {}
```
```rust
// Object Safety: Yes
// Dyn Compatible: Yes
trait T {}
```
"#]],
@ -7219,7 +7219,7 @@ impl T$0 for () {}
```
```rust
// Object Safety: No
// Dyn Compatible: No
// - Reason: has a method `func` that is non dispatchable because of:
// - missing a receiver
trait T { /**/ }
@ -7245,7 +7245,7 @@ impl T$0 for () {}
```
```rust
// Object Safety: No
// Dyn Compatible: No
// - Reason: has a method `func` that is non dispatchable because of:
// - missing a receiver
trait T {
@ -7275,7 +7275,7 @@ impl T$0 for () {}
```
```rust
// Object Safety: No
// Dyn Compatible: No
// - Reason: has a method `func` that is non dispatchable because of:
// - missing a receiver
trait T {
@ -7305,7 +7305,7 @@ impl T$0 for () {}
```
```rust
// Object Safety: No
// Dyn Compatible: No
// - Reason: has a method `func` that is non dispatchable because of:
// - missing a receiver
trait T {
@ -8962,3 +8962,29 @@ fn test_hover_function_with_pat_param() {
"#]],
);
}
#[test]
fn hover_path_inside_block_scope() {
check(
r#"
mod m {
const _: () = {
mod m2 {
const C$0: () = ();
}
};
}
"#,
expect![[r#"
*C*
```rust
test::m::m2
```
```rust
const C: () = ()
```
"#]],
);
}

View file

@ -119,8 +119,8 @@ pub use ide_assists::{
Assist, AssistConfig, AssistId, AssistKind, AssistResolveStrategy, SingleResolve,
};
pub use ide_completion::{
CallableSnippets, CompletionConfig, CompletionItem, CompletionItemKind, CompletionRelevance,
Snippet, SnippetScope,
CallableSnippets, CompletionConfig, CompletionFieldsToResolve, CompletionItem,
CompletionItemKind, CompletionRelevance, Snippet, SnippetScope,
};
pub use ide_db::{
base_db::{Cancelled, CrateGraph, CrateId, FileChange, SourceRoot, SourceRootId},

View file

@ -409,7 +409,8 @@ fn traverse(
let mut r = 0;
sema.descend_into_macros_breakable(
InRealFile::new(file_id, token.clone()),
|tok| {
|tok, _ctx| {
// FIXME: Consider checking ctx transparency for being opaque?
let tok = tok.value;
let tok_kind = tok.kind();

View file

@ -265,6 +265,11 @@ impl ProjectFolders {
entries.push(manifest.to_owned());
}
for buildfile in ws.buildfiles() {
file_set_roots.push(VfsPath::from(buildfile.to_owned()));
entries.push(buildfile.to_owned());
}
// In case of detached files we do **not** look for a rust-analyzer.toml.
if !matches!(ws.kind, ProjectWorkspaceKind::DetachedFile { .. }) {
let ws_root = ws.workspace_root();
@ -521,7 +526,7 @@ mod tests {
#[test]
fn test_loading_rust_analyzer() {
let path = Path::new(env!("CARGO_MANIFEST_DIR")).parent().unwrap().parent().unwrap();
let cargo_config = CargoConfig::default();
let cargo_config = CargoConfig { set_test: true, ..CargoConfig::default() };
let load_cargo_config = LoadCargoConfig {
load_out_dirs_from_check: false,
with_proc_macro_server: ProcMacroServerChoice::None,

View file

@ -100,6 +100,7 @@ pub struct CargoConfig {
pub invocation_strategy: InvocationStrategy,
/// Optional path to use instead of `target` when building
pub target_dir: Option<Utf8PathBuf>,
pub set_test: bool,
}
pub type Package = Idx<PackageData>;

View file

@ -35,6 +35,7 @@ fn load_cargo_with_overrides(
rustc: Err(None),
cargo_config_extra_env: Default::default(),
error: None,
set_test: true,
},
cfg_overrides,
sysroot: Sysroot::empty(),
@ -242,6 +243,7 @@ fn smoke_test_real_sysroot_cargo() {
rustc: Err(None),
cargo_config_extra_env: Default::default(),
error: None,
set_test: true,
},
sysroot,
rustc_cfg: Vec::new(),

View file

@ -78,6 +78,7 @@ pub enum ProjectWorkspaceKind {
rustc: Result<Box<(CargoWorkspace, WorkspaceBuildScripts)>, Option<String>>,
/// Environment variables set in the `.cargo/config` file.
cargo_config_extra_env: FxHashMap<String, String>,
set_test: bool,
},
/// Project workspace was specified using a `rust-project.json` file.
Json(ProjectJson),
@ -98,6 +99,7 @@ pub enum ProjectWorkspaceKind {
cargo: Option<(CargoWorkspace, WorkspaceBuildScripts, Option<Arc<anyhow::Error>>)>,
/// Environment variables set in the `.cargo/config` file.
cargo_config_extra_env: FxHashMap<String, String>,
set_test: bool,
},
}
@ -112,6 +114,7 @@ impl fmt::Debug for ProjectWorkspace {
build_scripts,
rustc,
cargo_config_extra_env,
set_test,
} => f
.debug_struct("Cargo")
.field("root", &cargo.workspace_root().file_name())
@ -126,6 +129,7 @@ impl fmt::Debug for ProjectWorkspace {
.field("toolchain", &toolchain)
.field("data_layout", &target_layout)
.field("cargo_config_extra_env", &cargo_config_extra_env)
.field("set_test", set_test)
.field("build_scripts", &build_scripts.error().unwrap_or("ok"))
.finish(),
ProjectWorkspaceKind::Json(project) => {
@ -137,12 +141,14 @@ impl fmt::Debug for ProjectWorkspace {
.field("toolchain", &toolchain)
.field("data_layout", &target_layout)
.field("n_cfg_overrides", &cfg_overrides.len());
debug_struct.finish()
}
ProjectWorkspaceKind::DetachedFile {
file,
cargo: cargo_script,
cargo_config_extra_env,
set_test,
} => f
.debug_struct("DetachedFiles")
.field("file", &file)
@ -154,6 +160,7 @@ impl fmt::Debug for ProjectWorkspace {
.field("data_layout", &target_layout)
.field("n_cfg_overrides", &cfg_overrides.len())
.field("cargo_config_extra_env", &cargo_config_extra_env)
.field("set_test", set_test)
.finish(),
}
}
@ -329,6 +336,7 @@ impl ProjectWorkspace {
rustc,
cargo_config_extra_env,
error: error.map(Arc::new),
set_test: config.set_test,
},
sysroot,
rustc_cfg,
@ -423,6 +431,7 @@ impl ProjectWorkspace {
file: detached_file.to_owned(),
cargo: cargo_script,
cargo_config_extra_env,
set_test: config.set_test,
},
sysroot,
rustc_cfg,
@ -539,6 +548,17 @@ impl ProjectWorkspace {
}
}
pub fn buildfiles(&self) -> Vec<AbsPathBuf> {
match &self.kind {
ProjectWorkspaceKind::Json(project) => project
.crates()
.filter_map(|(_, krate)| krate.build.as_ref().map(|build| build.build_file.clone()))
.map(AbsPathBuf::assert)
.collect(),
_ => vec![],
}
}
pub fn find_sysroot_proc_macro_srv(&self) -> anyhow::Result<AbsPathBuf> {
self.sysroot.discover_proc_macro_srv()
}
@ -598,6 +618,7 @@ impl ProjectWorkspace {
build_scripts,
cargo_config_extra_env: _,
error: _,
set_test: _,
} => {
cargo
.packages()
@ -739,6 +760,7 @@ impl ProjectWorkspace {
build_scripts,
cargo_config_extra_env: _,
error: _,
set_test,
} => (
cargo_to_crate_graph(
load,
@ -748,10 +770,11 @@ impl ProjectWorkspace {
rustc_cfg.clone(),
cfg_overrides,
build_scripts,
*set_test,
),
sysroot,
),
ProjectWorkspaceKind::DetachedFile { file, cargo: cargo_script, .. } => (
ProjectWorkspaceKind::DetachedFile { file, cargo: cargo_script, set_test, .. } => (
if let Some((cargo, build_scripts, _)) = cargo_script {
cargo_to_crate_graph(
&mut |path| load(path),
@ -761,6 +784,7 @@ impl ProjectWorkspace {
rustc_cfg.clone(),
cfg_overrides,
build_scripts,
*set_test,
)
} else {
detached_file_to_crate_graph(
@ -769,6 +793,7 @@ impl ProjectWorkspace {
file,
sysroot,
cfg_overrides,
*set_test,
)
},
sysroot,
@ -802,6 +827,7 @@ impl ProjectWorkspace {
cargo_config_extra_env,
build_scripts: _,
error: _,
set_test: _,
},
ProjectWorkspaceKind::Cargo {
cargo: o_cargo,
@ -809,6 +835,7 @@ impl ProjectWorkspace {
cargo_config_extra_env: o_cargo_config_extra_env,
build_scripts: _,
error: _,
set_test: _,
},
) => {
cargo == o_cargo
@ -823,11 +850,13 @@ impl ProjectWorkspace {
file,
cargo: Some((cargo_script, _, _)),
cargo_config_extra_env,
set_test: _,
},
ProjectWorkspaceKind::DetachedFile {
file: o_file,
cargo: Some((o_cargo_script, _, _)),
cargo_config_extra_env: o_cargo_config_extra_env,
set_test: _,
},
) => {
file == o_file
@ -976,6 +1005,7 @@ fn cargo_to_crate_graph(
rustc_cfg: Vec<CfgAtom>,
override_cfg: &CfgOverrides,
build_scripts: &WorkspaceBuildScripts,
set_test: bool,
) -> (CrateGraph, ProcMacroPaths) {
let _p = tracing::info_span!("cargo_to_crate_graph").entered();
let mut res = (CrateGraph::default(), ProcMacroPaths::default());
@ -1000,8 +1030,10 @@ fn cargo_to_crate_graph(
let mut cfg_options = cfg_options.clone();
if cargo[pkg].is_local {
// Add test cfg for local crates
cfg_options.insert_atom(sym::test.clone());
if set_test {
// Add test cfg for local crates
cfg_options.insert_atom(sym::test.clone());
}
cfg_options.insert_atom(sym::rust_analyzer.clone());
}
@ -1162,6 +1194,7 @@ fn detached_file_to_crate_graph(
detached_file: &ManifestPath,
sysroot: &Sysroot,
override_cfg: &CfgOverrides,
set_test: bool,
) -> (CrateGraph, ProcMacroPaths) {
let _p = tracing::info_span!("detached_file_to_crate_graph").entered();
let mut crate_graph = CrateGraph::default();
@ -1169,7 +1202,9 @@ fn detached_file_to_crate_graph(
sysroot_to_crate_graph(&mut crate_graph, sysroot, rustc_cfg.clone(), load);
let mut cfg_options = CfgOptions::from_iter(rustc_cfg);
cfg_options.insert_atom(sym::test.clone());
if set_test {
cfg_options.insert_atom(sym::test.clone());
}
cfg_options.insert_atom(sym::rust_analyzer.clone());
override_cfg.apply(&mut cfg_options, "");
let cfg_options = Arc::new(cfg_options);
@ -1415,6 +1450,7 @@ fn sysroot_to_crate_graph(
..Default::default()
},
&WorkspaceBuildScripts::default(),
false,
);
let mut pub_deps = vec![];

View file

@ -137,6 +137,7 @@ fn setup_logging(log_file_flag: Option<PathBuf>) -> anyhow::Result<()> {
filter: env::var("RA_LOG").ok().unwrap_or_else(|| "error".to_owned()),
chalk_filter: env::var("CHALK_DEBUG").ok(),
profile_filter: env::var("RA_PROFILE").ok(),
json_profile_filter: std::env::var("RA_PROFILE_JSON").ok(),
}
.init()?;

View file

@ -65,6 +65,7 @@ impl flags::AnalysisStats {
false => Some(RustLibSource::Discover),
},
all_targets: true,
set_test: true,
..Default::default()
};
let no_progress = &|_| ();
@ -81,7 +82,13 @@ impl flags::AnalysisStats {
with_proc_macro_server: if self.disable_proc_macros {
ProcMacroServerChoice::None
} else {
ProcMacroServerChoice::Sysroot
match self.proc_macro_srv {
Some(ref path) => {
let path = vfs::AbsPathBuf::assert_utf8(path.to_owned());
ProcMacroServerChoice::Explicit(path)
}
None => ProcMacroServerChoice::Sysroot,
}
},
prefill_caches: false,
};

View file

@ -76,6 +76,8 @@ xflags::xflags! {
optional --disable-build-scripts
/// Don't use expand proc macros.
optional --disable-proc-macros
/// Run the proc-macro-srv binary at the specified path.
optional --proc-macro-srv path: PathBuf
/// Skip body lowering.
optional --skip-lowering
/// Skip type inference.
@ -120,7 +122,7 @@ xflags::xflags! {
optional --disable-build-scripts
/// Don't use expand proc macros.
optional --disable-proc-macros
/// Run a custom proc-macro-srv binary.
/// Run the proc-macro-srv binary at the specified path.
optional --proc-macro-srv path: PathBuf
}
@ -133,7 +135,7 @@ xflags::xflags! {
optional --disable-build-scripts
/// Don't use expand proc macros.
optional --disable-proc-macros
/// Run a custom proc-macro-srv binary.
/// Run the proc-macro-srv binary at the specified path.
optional --proc-macro-srv path: PathBuf
}
@ -233,6 +235,7 @@ pub struct AnalysisStats {
pub no_sysroot: bool,
pub disable_build_scripts: bool,
pub disable_proc_macros: bool,
pub proc_macro_srv: Option<PathBuf>,
pub skip_lowering: bool,
pub skip_inference: bool,
pub skip_mir_stats: bool,

View file

@ -277,6 +277,7 @@ impl flags::Lsif {
let cargo_config = &CargoConfig {
sysroot: Some(RustLibSource::Discover),
all_targets: true,
set_test: true,
..Default::default()
};
let no_progress = &|_| ();

View file

@ -16,6 +16,7 @@ impl flags::RunTests {
let cargo_config = CargoConfig {
sysroot: Some(RustLibSource::Discover),
all_targets: true,
set_test: true,
..Default::default()
};
let load_cargo_config = LoadCargoConfig {

View file

@ -70,6 +70,7 @@ impl Tester {
let cargo_config = CargoConfig {
sysroot: Some(RustLibSource::Discover),
all_targets: true,
set_test: true,
..Default::default()
};
@ -85,6 +86,7 @@ impl Tester {
file: ManifestPath::try_from(tmp_file).unwrap(),
cargo: None,
cargo_config_extra_env: Default::default(),
set_test: true,
},
sysroot,
rustc_cfg: vec![],

View file

@ -24,11 +24,6 @@ impl flags::Scip {
let now = Instant::now();
let no_progress = &|s| (eprintln!("rust-analyzer: Loading {s}"));
let load_cargo_config = LoadCargoConfig {
load_out_dirs_from_check: true,
with_proc_macro_server: ProcMacroServerChoice::Sysroot,
prefill_caches: true,
};
let root =
vfs::AbsPathBuf::assert_utf8(std::env::current_dir()?.join(&self.path)).normalize();
@ -51,6 +46,11 @@ impl flags::Scip {
// FIXME @alibektas : What happens to errors without logging?
error!(?error_sink, "Config Error(s)");
}
let load_cargo_config = LoadCargoConfig {
load_out_dirs_from_check: true,
with_proc_macro_server: ProcMacroServerChoice::Sysroot,
prefill_caches: true,
};
let cargo_config = config.cargo(None);
let (db, vfs, _) = load_workspace_at(
root.as_path().as_ref(),

View file

@ -13,6 +13,7 @@ impl flags::Ssr {
let cargo_config = CargoConfig {
sysroot: Some(RustLibSource::Discover),
all_targets: true,
set_test: true,
..Default::default()
};
let load_cargo_config = LoadCargoConfig {
@ -50,7 +51,8 @@ impl flags::Search {
pub fn run(self) -> anyhow::Result<()> {
use ide_db::base_db::SourceRootDatabase;
use ide_db::symbol_index::SymbolsDatabase;
let cargo_config = CargoConfig::default();
let cargo_config =
CargoConfig { all_targets: true, set_test: true, ..CargoConfig::default() };
let load_cargo_config = LoadCargoConfig {
load_out_dirs_from_check: true,
with_proc_macro_server: ProcMacroServerChoice::Sysroot,

View file

@ -12,10 +12,10 @@ use std::{
use cfg::{CfgAtom, CfgDiff};
use hir::Symbol;
use ide::{
AssistConfig, CallableSnippets, CompletionConfig, DiagnosticsConfig, ExprFillDefaultMode,
GenericParameterHints, HighlightConfig, HighlightRelatedConfig, HoverConfig, HoverDocFormat,
InlayFieldsToResolve, InlayHintsConfig, JoinLinesConfig, MemoryLayoutHoverConfig,
MemoryLayoutHoverRenderKind, Snippet, SnippetScope, SourceRootId,
AssistConfig, CallableSnippets, CompletionConfig, CompletionFieldsToResolve, DiagnosticsConfig,
ExprFillDefaultMode, GenericParameterHints, HighlightConfig, HighlightRelatedConfig,
HoverConfig, HoverDocFormat, InlayFieldsToResolve, InlayHintsConfig, JoinLinesConfig,
MemoryLayoutHoverConfig, MemoryLayoutHoverRenderKind, Snippet, SnippetScope, SourceRootId,
};
use ide_db::{
imports::insert_use::{ImportGranularity, InsertUseConfig, PrefixKind},
@ -574,6 +574,9 @@ config_data! {
/// set to a path relative to the workspace to use that path.
cargo_targetDir | rust_analyzerTargetDir: Option<TargetDirectory> = None,
/// Set `cfg(test)` for local crates. Defaults to true.
cfg_setTest: bool = true,
/// Run the check command for diagnostics on save.
checkOnSave | checkOnSave_enable: bool = true,
@ -695,7 +698,6 @@ config_data! {
workspace_symbol_search_limit: usize = 128,
/// Workspace symbol search scope.
workspace_symbol_search_scope: WorkspaceSymbolSearchScopeDef = WorkspaceSymbolSearchScopeDef::Workspace,
}
}
@ -1391,6 +1393,7 @@ impl Config {
}
pub fn completion(&self, source_root: Option<SourceRootId>) -> CompletionConfig {
let client_capability_fields = self.completion_resolve_support_properties();
CompletionConfig {
enable_postfix_completions: self.completion_postfix_enable(source_root).to_owned(),
enable_imports_on_the_fly: self.completion_autoimport_enable(source_root).to_owned()
@ -1415,6 +1418,15 @@ impl Config {
limit: self.completion_limit(source_root).to_owned(),
enable_term_search: self.completion_termSearch_enable(source_root).to_owned(),
term_search_fuel: self.completion_termSearch_fuel(source_root).to_owned() as u64,
fields_to_resolve: CompletionFieldsToResolve {
resolve_label_details: client_capability_fields.contains("labelDetails"),
resolve_tags: client_capability_fields.contains("tags"),
resolve_detail: client_capability_fields.contains("detail"),
resolve_documentation: client_capability_fields.contains("documentation"),
resolve_filter_text: client_capability_fields.contains("filterText"),
resolve_text_edit: client_capability_fields.contains("textEdit"),
resolve_command: client_capability_fields.contains("command"),
},
}
}
@ -1859,9 +1871,14 @@ impl Config {
extra_args: self.cargo_extraArgs(source_root).clone(),
extra_env: self.cargo_extraEnv(source_root).clone(),
target_dir: self.target_dir_from_config(source_root),
set_test: *self.cfg_setTest(source_root),
}
}
pub fn cfg_set_test(&self, source_root: Option<SourceRootId>) -> bool {
*self.cfg_setTest(source_root)
}
pub(crate) fn completion_snippets_default() -> FxHashMap<String, SnippetDef> {
serde_json::from_str(
r#"{

View file

@ -7,6 +7,7 @@ use paths::{AbsPathBuf, Utf8Path, Utf8PathBuf};
use project_model::ProjectJsonData;
use serde::{Deserialize, Serialize};
use serde_json::Value;
use tracing::{info_span, span::EnteredSpan};
use crate::command::{CommandHandle, ParseFromLine};
@ -60,7 +61,10 @@ impl DiscoverCommand {
let mut cmd = Command::new(command);
cmd.args(args);
Ok(DiscoverHandle { _handle: CommandHandle::spawn(cmd, self.sender.clone())? })
Ok(DiscoverHandle {
_handle: CommandHandle::spawn(cmd, self.sender.clone())?,
span: info_span!("discover_command").entered(),
})
}
}
@ -68,6 +72,8 @@ impl DiscoverCommand {
#[derive(Debug)]
pub(crate) struct DiscoverHandle {
_handle: CommandHandle<DiscoverProjectMessage>,
#[allow(dead_code)] // not accessed, but used to log on drop.
span: EnteredSpan,
}
/// An enum containing either progress messages, an error,

View file

@ -46,6 +46,11 @@ pub(crate) struct FetchWorkspaceRequest {
pub(crate) force_crate_graph_reload: bool,
}
pub(crate) struct FetchWorkspaceResponse {
pub(crate) workspaces: Vec<anyhow::Result<ProjectWorkspace>>,
pub(crate) force_crate_graph_reload: bool,
}
// Enforces drop order
pub(crate) struct Handle<H, C> {
pub(crate) handle: H,
@ -111,6 +116,9 @@ pub(crate) struct GlobalState {
pub(crate) vfs_config_version: u32,
pub(crate) vfs_progress_config_version: u32,
pub(crate) vfs_done: bool,
// used to track how long VFS loading takes. this can't be on `vfs::loader::Handle`,
// as that handle's lifetime is the same as `GlobalState` itself.
pub(crate) vfs_span: Option<tracing::span::EnteredSpan>,
pub(crate) wants_to_switch: Option<Cause>,
/// `workspaces` field stores the data we actually use, while the `OpQueue`
@ -143,8 +151,7 @@ pub(crate) struct GlobalState {
pub(crate) detached_files: FxHashSet<ManifestPath>,
// op queues
pub(crate) fetch_workspaces_queue:
OpQueue<FetchWorkspaceRequest, Option<(Vec<anyhow::Result<ProjectWorkspace>>, bool)>>,
pub(crate) fetch_workspaces_queue: OpQueue<FetchWorkspaceRequest, FetchWorkspaceResponse>,
pub(crate) fetch_build_data_queue:
OpQueue<(), (Arc<Vec<ProjectWorkspace>>, Vec<anyhow::Result<WorkspaceBuildScripts>>)>,
pub(crate) fetch_proc_macros_queue: OpQueue<Vec<ProcMacroPaths>, bool>,
@ -253,6 +260,7 @@ impl GlobalState {
vfs: Arc::new(RwLock::new((vfs::Vfs::default(), IntMap::default()))),
vfs_config_version: 0,
vfs_progress_config_version: 0,
vfs_span: None,
vfs_done: true,
wants_to_switch: None,
@ -498,7 +506,7 @@ impl GlobalState {
mem_docs: self.mem_docs.clone(),
semantic_tokens_cache: Arc::clone(&self.semantic_tokens_cache),
proc_macros_loaded: !self.config.expand_proc_macros()
|| *self.fetch_proc_macros_queue.last_op_result(),
|| self.fetch_proc_macros_queue.last_op_result().copied().unwrap_or(false),
flycheck: self.flycheck.clone(),
}
}

View file

@ -380,7 +380,7 @@ fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool {
if id == flycheck.id() {
updated = true;
match package.filter(|_| {
!world.config.flycheck_workspace(source_root_id) || target.is_some()
!world.config.flycheck_workspace(source_root_id) && target.is_some()
}) {
Some(package) => flycheck
.restart_for_package(package, target.clone().map(TupleExt::head)),

View file

@ -10,9 +10,9 @@ use std::{
use anyhow::Context;
use ide::{
AnnotationConfig, AssistKind, AssistResolveStrategy, Cancellable, FilePosition, FileRange,
HoverAction, HoverGotoTypeData, InlayFieldsToResolve, Query, RangeInfo, ReferenceCategory,
Runnable, RunnableKind, SingleResolve, SourceChange, TextEdit,
AnnotationConfig, AssistKind, AssistResolveStrategy, Cancellable, CompletionFieldsToResolve,
FilePosition, FileRange, HoverAction, HoverGotoTypeData, InlayFieldsToResolve, Query,
RangeInfo, ReferenceCategory, Runnable, RunnableKind, SingleResolve, SourceChange, TextEdit,
};
use ide_db::SymbolKind;
use itertools::Itertools;
@ -1019,9 +1019,11 @@ pub(crate) fn handle_completion(
let items = to_proto::completion_items(
&snap.config,
&completion_config.fields_to_resolve,
&line_index,
snap.file_version(position.file_id),
text_document_position,
completion_trigger_character,
items,
);
@ -1054,36 +1056,70 @@ pub(crate) fn handle_completion_resolve(
};
let source_root = snap.analysis.source_root_id(file_id)?;
let additional_edits = snap
.analysis
.resolve_completion_edits(
&snap.config.completion(Some(source_root)),
FilePosition { file_id, offset },
resolve_data
.imports
.into_iter()
.map(|import| (import.full_import_path, import.imported_name)),
)?
.into_iter()
.flat_map(|edit| edit.into_iter().map(|indel| to_proto::text_edit(&line_index, indel)))
.collect::<Vec<_>>();
let mut forced_resolve_completions_config = snap.config.completion(Some(source_root));
forced_resolve_completions_config.fields_to_resolve = CompletionFieldsToResolve::empty();
if !all_edits_are_disjoint(&original_completion, &additional_edits) {
return Err(LspError::new(
ErrorCode::InternalError as i32,
"Import edit overlaps with the original completion edits, this is not LSP-compliant"
.into(),
)
.into());
let position = FilePosition { file_id, offset };
let Some(resolved_completions) = snap.analysis.completions(
&forced_resolve_completions_config,
position,
resolve_data.trigger_character,
)?
else {
return Ok(original_completion);
};
let resolved_completions = to_proto::completion_items(
&snap.config,
&forced_resolve_completions_config.fields_to_resolve,
&line_index,
snap.file_version(position.file_id),
resolve_data.position,
resolve_data.trigger_character,
resolved_completions,
);
let Some(mut resolved_completion) = resolved_completions.into_iter().find(|completion| {
completion.label == original_completion.label
&& completion.kind == original_completion.kind
&& completion.deprecated == original_completion.deprecated
&& completion.preselect == original_completion.preselect
&& completion.sort_text == original_completion.sort_text
}) else {
return Ok(original_completion);
};
if !resolve_data.imports.is_empty() {
let additional_edits = snap
.analysis
.resolve_completion_edits(
&forced_resolve_completions_config,
position,
resolve_data
.imports
.into_iter()
.map(|import| (import.full_import_path, import.imported_name)),
)?
.into_iter()
.flat_map(|edit| edit.into_iter().map(|indel| to_proto::text_edit(&line_index, indel)))
.collect::<Vec<_>>();
if !all_edits_are_disjoint(&resolved_completion, &additional_edits) {
return Err(LspError::new(
ErrorCode::InternalError as i32,
"Import edit overlaps with the original completion edits, this is not LSP-compliant"
.into(),
)
.into());
}
if let Some(original_additional_edits) = resolved_completion.additional_text_edits.as_mut()
{
original_additional_edits.extend(additional_edits)
} else {
resolved_completion.additional_text_edits = Some(additional_edits);
}
}
if let Some(original_additional_edits) = original_completion.additional_text_edits.as_mut() {
original_additional_edits.extend(additional_edits)
} else {
original_completion.additional_text_edits = Some(additional_edits);
}
Ok(original_completion)
Ok(resolved_completion)
}
pub(crate) fn handle_folding_range(

View file

@ -12,7 +12,8 @@
use hir::ChangeWithProcMacros;
use ide::{
AnalysisHost, CallableSnippets, CompletionConfig, DiagnosticsConfig, FilePosition, TextSize,
AnalysisHost, CallableSnippets, CompletionConfig, CompletionFieldsToResolve, DiagnosticsConfig,
FilePosition, TextSize,
};
use ide_db::{
imports::insert_use::{ImportGranularity, InsertUseConfig},
@ -36,6 +37,8 @@ fn integrated_highlighting_benchmark() {
let cargo_config = CargoConfig {
sysroot: Some(project_model::RustLibSource::Discover),
all_targets: true,
set_test: true,
..CargoConfig::default()
};
let load_cargo_config = LoadCargoConfig {
@ -102,6 +105,8 @@ fn integrated_completion_benchmark() {
let cargo_config = CargoConfig {
sysroot: Some(project_model::RustLibSource::Discover),
all_targets: true,
set_test: true,
..CargoConfig::default()
};
let load_cargo_config = LoadCargoConfig {
@ -168,6 +173,7 @@ fn integrated_completion_benchmark() {
snippets: Vec::new(),
limit: None,
add_semicolon_to_unit: true,
fields_to_resolve: CompletionFieldsToResolve::empty(),
};
let position =
FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() };
@ -215,6 +221,7 @@ fn integrated_completion_benchmark() {
snippets: Vec::new(),
limit: None,
add_semicolon_to_unit: true,
fields_to_resolve: CompletionFieldsToResolve::empty(),
};
let position =
FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() };
@ -260,6 +267,7 @@ fn integrated_completion_benchmark() {
snippets: Vec::new(),
limit: None,
add_semicolon_to_unit: true,
fields_to_resolve: CompletionFieldsToResolve::empty(),
};
let position =
FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() };
@ -279,6 +287,8 @@ fn integrated_diagnostics_benchmark() {
let cargo_config = CargoConfig {
sysroot: Some(project_model::RustLibSource::Discover),
all_targets: true,
set_test: true,
..CargoConfig::default()
};
let load_cargo_config = LoadCargoConfig {

View file

@ -34,6 +34,7 @@ mod handlers {
pub mod tracing {
pub mod config;
pub mod json;
pub use config::Config;
pub mod hprof;
}

View file

@ -448,7 +448,7 @@ impl ClientCapabilities {
.unwrap_or_default()
}
pub fn inlay_hint_resolve_support_properties(&self) -> FxHashSet<String> {
pub fn inlay_hint_resolve_support_properties(&self) -> FxHashSet<&str> {
self.0
.text_document
.as_ref()
@ -457,8 +457,22 @@ impl ClientCapabilities {
.map(|inlay_resolve| inlay_resolve.properties.iter())
.into_iter()
.flatten()
.cloned()
.collect::<FxHashSet<_>>()
.map(|s| s.as_str())
.collect()
}
pub fn completion_resolve_support_properties(&self) -> FxHashSet<&str> {
self.0
.text_document
.as_ref()
.and_then(|text| text.completion.as_ref())
.and_then(|completion_caps| completion_caps.completion_item.as_ref())
.and_then(|completion_item_caps| completion_item_caps.resolve_support.as_ref())
.map(|resolve_support| resolve_support.properties.iter())
.into_iter()
.flatten()
.map(|s| s.as_str())
.collect()
}
pub fn hover_markdown_support(&self) -> bool {

View file

@ -825,6 +825,7 @@ pub struct CompletionResolveData {
pub position: lsp_types::TextDocumentPositionParams,
pub imports: Vec<CompletionImport>,
pub version: Option<i32>,
pub trigger_character: Option<char>,
}
#[derive(Debug, Serialize, Deserialize)]

View file

@ -6,9 +6,9 @@ use std::{
};
use ide::{
Annotation, AnnotationKind, Assist, AssistKind, Cancellable, CompletionItem,
CompletionItemKind, CompletionRelevance, Documentation, FileId, FileRange, FileSystemEdit,
Fold, FoldKind, Highlight, HlMod, HlOperator, HlPunct, HlRange, HlTag, Indel,
Annotation, AnnotationKind, Assist, AssistKind, Cancellable, CompletionFieldsToResolve,
CompletionItem, CompletionItemKind, CompletionRelevance, Documentation, FileId, FileRange,
FileSystemEdit, Fold, FoldKind, Highlight, HlMod, HlOperator, HlPunct, HlRange, HlTag, Indel,
InlayFieldsToResolve, InlayHint, InlayHintLabel, InlayHintLabelPart, InlayKind, Markup,
NavigationTarget, ReferenceCategory, RenameError, Runnable, Severity, SignatureHelp,
SnippetEdit, SourceChange, StructureNodeKind, SymbolKind, TextEdit, TextRange, TextSize,
@ -227,9 +227,11 @@ pub(crate) fn snippet_text_edit_vec(
pub(crate) fn completion_items(
config: &Config,
fields_to_resolve: &CompletionFieldsToResolve,
line_index: &LineIndex,
version: Option<i32>,
tdpp: lsp_types::TextDocumentPositionParams,
completion_trigger_character: Option<char>,
mut items: Vec<CompletionItem>,
) -> Vec<lsp_types::CompletionItem> {
if config.completion_hide_deprecated() {
@ -239,7 +241,17 @@ pub(crate) fn completion_items(
let max_relevance = items.iter().map(|it| it.relevance.score()).max().unwrap_or_default();
let mut res = Vec::with_capacity(items.len());
for item in items {
completion_item(&mut res, config, line_index, version, &tdpp, max_relevance, item);
completion_item(
&mut res,
config,
fields_to_resolve,
line_index,
version,
&tdpp,
max_relevance,
completion_trigger_character,
item,
);
}
if let Some(limit) = config.completion(None).limit {
@ -253,21 +265,33 @@ pub(crate) fn completion_items(
fn completion_item(
acc: &mut Vec<lsp_types::CompletionItem>,
config: &Config,
fields_to_resolve: &CompletionFieldsToResolve,
line_index: &LineIndex,
version: Option<i32>,
tdpp: &lsp_types::TextDocumentPositionParams,
max_relevance: u32,
completion_trigger_character: Option<char>,
item: CompletionItem,
) {
let insert_replace_support = config.insert_replace_support().then_some(tdpp.position);
let ref_match = item.ref_match();
let lookup = item.lookup().to_owned();
let mut additional_text_edits = Vec::new();
let mut something_to_resolve = false;
// LSP does not allow arbitrary edits in completion, so we have to do a
// non-trivial mapping here.
let text_edit = {
let filter_text = if fields_to_resolve.resolve_filter_text {
something_to_resolve = !item.lookup().is_empty();
None
} else {
Some(item.lookup().to_owned())
};
let text_edit = if fields_to_resolve.resolve_text_edit {
something_to_resolve = true;
None
} else {
// LSP does not allow arbitrary edits in completion, so we have to do a
// non-trivial mapping here.
let mut text_edit = None;
let source_range = item.source_range;
for indel in item.text_edit {
@ -290,25 +314,49 @@ fn completion_item(
additional_text_edits.push(text_edit);
}
}
text_edit.unwrap()
Some(text_edit.unwrap())
};
let insert_text_format = item.is_snippet.then_some(lsp_types::InsertTextFormat::SNIPPET);
let tags = item.deprecated.then(|| vec![lsp_types::CompletionItemTag::DEPRECATED]);
let tags = if fields_to_resolve.resolve_tags {
something_to_resolve = item.deprecated;
None
} else {
item.deprecated.then(|| vec![lsp_types::CompletionItemTag::DEPRECATED])
};
let command = if item.trigger_call_info && config.client_commands().trigger_parameter_hints {
Some(command::trigger_parameter_hints())
if fields_to_resolve.resolve_command {
something_to_resolve = true;
None
} else {
Some(command::trigger_parameter_hints())
}
} else {
None
};
let detail = if fields_to_resolve.resolve_detail {
something_to_resolve = item.detail.is_some();
None
} else {
item.detail
};
let documentation = if fields_to_resolve.resolve_documentation {
something_to_resolve = item.documentation.is_some();
None
} else {
item.documentation.map(documentation)
};
let mut lsp_item = lsp_types::CompletionItem {
label: item.label.to_string(),
detail: item.detail,
filter_text: Some(lookup),
detail,
filter_text,
kind: Some(completion_item_kind(item.kind)),
text_edit: Some(text_edit),
text_edit,
additional_text_edits: Some(additional_text_edits),
documentation: item.documentation.map(documentation),
documentation,
deprecated: Some(item.deprecated),
tags,
command,
@ -317,29 +365,40 @@ fn completion_item(
};
if config.completion_label_details_support() {
lsp_item.label_details = Some(lsp_types::CompletionItemLabelDetails {
detail: item.label_detail.as_ref().map(ToString::to_string),
description: lsp_item.detail.clone(),
});
if fields_to_resolve.resolve_label_details {
something_to_resolve = true;
} else {
lsp_item.label_details = Some(lsp_types::CompletionItemLabelDetails {
detail: item.label_detail.as_ref().map(ToString::to_string),
description: lsp_item.detail.clone(),
});
}
} else if let Some(label_detail) = item.label_detail {
lsp_item.label.push_str(label_detail.as_str());
}
set_score(&mut lsp_item, max_relevance, item.relevance);
if config.completion(None).enable_imports_on_the_fly && !item.import_to_add.is_empty() {
let imports = item
.import_to_add
.into_iter()
.map(|(import_path, import_name)| lsp_ext::CompletionImport {
full_import_path: import_path,
imported_name: import_name,
})
.collect::<Vec<_>>();
if !imports.is_empty() {
let data = lsp_ext::CompletionResolveData { position: tdpp.clone(), imports, version };
lsp_item.data = Some(to_value(data).unwrap());
}
let imports =
if config.completion(None).enable_imports_on_the_fly && !item.import_to_add.is_empty() {
item.import_to_add
.into_iter()
.map(|(import_path, import_name)| lsp_ext::CompletionImport {
full_import_path: import_path,
imported_name: import_name,
})
.collect()
} else {
Vec::new()
};
if something_to_resolve || !imports.is_empty() {
let data = lsp_ext::CompletionResolveData {
position: tdpp.clone(),
imports,
version,
trigger_character: completion_trigger_character,
};
lsp_item.data = Some(to_value(data).unwrap());
}
if let Some((label, indel, relevance)) = ref_match {

View file

@ -22,7 +22,9 @@ use crate::{
diagnostics::{fetch_native_diagnostics, DiagnosticsGeneration, NativeDiagnosticsFetchKind},
discover::{DiscoverArgument, DiscoverCommand, DiscoverProjectMessage},
flycheck::{self, FlycheckMessage},
global_state::{file_id_to_url, url_to_file_id, FetchWorkspaceRequest, GlobalState},
global_state::{
file_id_to_url, url_to_file_id, FetchWorkspaceRequest, FetchWorkspaceResponse, GlobalState,
},
hack_recover_crate_name,
handlers::dispatch::{NotificationDispatcher, RequestDispatcher},
lsp::{
@ -695,9 +697,9 @@ impl GlobalState {
let (state, msg) = match progress {
ProjectWorkspaceProgress::Begin => (Progress::Begin, None),
ProjectWorkspaceProgress::Report(msg) => (Progress::Report, Some(msg)),
ProjectWorkspaceProgress::End(workspaces, force_reload_crate_graph) => {
self.fetch_workspaces_queue
.op_completed(Some((workspaces, force_reload_crate_graph)));
ProjectWorkspaceProgress::End(workspaces, force_crate_graph_reload) => {
let resp = FetchWorkspaceResponse { workspaces, force_crate_graph_reload };
self.fetch_workspaces_queue.op_completed(resp);
if let Err(e) = self.fetch_workspace_error() {
error!("FetchWorkspaceError: {e}");
}
@ -794,13 +796,20 @@ impl GlobalState {
}
}
vfs::loader::Message::Progress { n_total, n_done, dir, config_version } => {
let _p = tracing::info_span!("GlobalState::handle_vfs_mgs/progress").entered();
let _p = span!(Level::INFO, "GlobalState::handle_vfs_mgs/progress").entered();
always!(config_version <= self.vfs_config_version);
let (n_done, state) = match n_done {
LoadingProgress::Started => (0, Progress::Begin),
LoadingProgress::Started => {
self.vfs_span =
Some(span!(Level::INFO, "vfs_load", total = n_total).entered());
(0, Progress::Begin)
}
LoadingProgress::Progress(n_done) => (n_done.min(n_total), Progress::Report),
LoadingProgress::Finished => (n_total, Progress::End),
LoadingProgress::Finished => {
self.vfs_span = None;
(n_total, Progress::End)
}
};
self.vfs_progress_config_version = config_version;
@ -881,6 +890,7 @@ impl GlobalState {
.expect("No title could be found; this is a bug");
match message {
DiscoverProjectMessage::Finished { project, buildfile } => {
self.discover_handle = None;
self.report_progress(&title, Progress::End, None, None, None);
self.discover_workspace_queue.op_completed(());
@ -892,6 +902,7 @@ impl GlobalState {
self.report_progress(&title, Progress::Report, Some(message), None, None)
}
DiscoverProjectMessage::Error { error, source } => {
self.discover_handle = None;
let message = format!("Project discovery failed: {error}");
self.discover_workspace_queue.op_completed(());
self.show_and_log_error(message.clone(), source);

View file

@ -27,12 +27,12 @@ pub(crate) type Cause = String;
pub(crate) struct OpQueue<Args = (), Output = ()> {
op_requested: Option<(Cause, Args)>,
op_in_progress: bool,
last_op_result: Output,
last_op_result: Option<Output>,
}
impl<Args, Output: Default> Default for OpQueue<Args, Output> {
impl<Args, Output> Default for OpQueue<Args, Output> {
fn default() -> Self {
Self { op_requested: None, op_in_progress: false, last_op_result: Default::default() }
Self { op_requested: None, op_in_progress: false, last_op_result: None }
}
}
@ -56,12 +56,12 @@ impl<Args, Output> OpQueue<Args, Output> {
pub(crate) fn op_completed(&mut self, result: Output) {
assert!(self.op_in_progress);
self.op_in_progress = false;
self.last_op_result = result;
self.last_op_result = Some(result);
}
/// Get the result of the last operation.
pub(crate) fn last_op_result(&self) -> &Output {
&self.last_op_result
pub(crate) fn last_op_result(&self) -> Option<&Output> {
self.last_op_result.as_ref()
}
// Is there an operation that has started, but hasn't yet finished?

View file

@ -33,7 +33,7 @@ use vfs::{AbsPath, AbsPathBuf, ChangeKind};
use crate::{
config::{Config, FilesWatcher, LinkedProject},
flycheck::{FlycheckConfig, FlycheckHandle},
global_state::{FetchWorkspaceRequest, GlobalState},
global_state::{FetchWorkspaceRequest, FetchWorkspaceResponse, GlobalState},
lsp_ext,
main_loop::{DiscoverProjectParam, Task},
op_queue::Cause,
@ -448,15 +448,15 @@ impl GlobalState {
let _p = tracing::info_span!("GlobalState::switch_workspaces").entered();
tracing::info!(%cause, "will switch workspaces");
let Some((workspaces, force_reload_crate_graph)) =
let Some(FetchWorkspaceResponse { workspaces, force_crate_graph_reload }) =
self.fetch_workspaces_queue.last_op_result()
else {
return;
};
info!(%cause, ?force_reload_crate_graph);
info!(%cause, ?force_crate_graph_reload);
if self.fetch_workspace_error().is_err() && !self.workspaces.is_empty() {
if *force_reload_crate_graph {
if *force_crate_graph_reload {
self.recreate_crate_graph(cause);
}
// It only makes sense to switch to a partially broken workspace
@ -474,8 +474,12 @@ impl GlobalState {
.all(|(l, r)| l.eq_ignore_build_data(r));
if same_workspaces {
let (workspaces, build_scripts) = self.fetch_build_data_queue.last_op_result();
if Arc::ptr_eq(workspaces, &self.workspaces) {
let (workspaces, build_scripts) = match self.fetch_build_data_queue.last_op_result() {
Some((workspaces, build_scripts)) => (workspaces.clone(), build_scripts.as_slice()),
None => (Default::default(), Default::default()),
};
if Arc::ptr_eq(&workspaces, &self.workspaces) {
info!("set build scripts to workspaces");
let workspaces = workspaces
@ -492,7 +496,7 @@ impl GlobalState {
self.workspaces = Arc::new(workspaces);
} else {
info!("build scripts do not match the version of the active workspace");
if *force_reload_crate_graph {
if *force_crate_graph_reload {
self.recreate_crate_graph(cause);
}
@ -739,22 +743,18 @@ impl GlobalState {
pub(super) fn fetch_workspace_error(&self) -> Result<(), String> {
let mut buf = String::new();
let Some((last_op_result, _)) = self.fetch_workspaces_queue.last_op_result() else {
let Some(FetchWorkspaceResponse { workspaces, .. }) =
self.fetch_workspaces_queue.last_op_result()
else {
return Ok(());
};
if !self.discover_workspace_queue.op_in_progress() {
if last_op_result.is_empty() {
stdx::format_to!(buf, "rust-analyzer failed to discover workspace");
} else {
for ws in last_op_result {
if let Err(err) = ws {
stdx::format_to!(
buf,
"rust-analyzer failed to load workspace: {:#}\n",
err
);
}
if workspaces.is_empty() && self.config.discover_workspace_config().is_none() {
stdx::format_to!(buf, "rust-analyzer failed to fetch workspace");
} else {
for ws in workspaces {
if let Err(err) = ws {
stdx::format_to!(buf, "rust-analyzer failed to load workspace: {:#}\n", err);
}
}
}
@ -769,7 +769,11 @@ impl GlobalState {
pub(super) fn fetch_build_data_error(&self) -> Result<(), String> {
let mut buf = String::new();
for ws in &self.fetch_build_data_queue.last_op_result().1 {
let Some((_, ws)) = &self.fetch_build_data_queue.last_op_result() else {
return Ok(());
};
for ws in ws {
match ws {
Ok(data) => {
if let Some(stderr) = data.error() {

View file

@ -1,17 +1,20 @@
//! Simple logger that logs either to stderr or to a file, using `tracing_subscriber`
//! filter syntax and `tracing_appender` for non blocking output.
use std::io;
use std::io::{self};
use anyhow::Context;
use tracing::level_filters::LevelFilter;
use tracing_subscriber::{
filter::Targets, fmt::MakeWriter, layer::SubscriberExt, util::SubscriberInitExt, Layer,
Registry,
filter::{filter_fn, Targets},
fmt::MakeWriter,
layer::SubscriberExt,
Layer, Registry,
};
use tracing_tree::HierarchicalLayer;
use crate::tracing::hprof;
use crate::tracing::json;
#[derive(Debug)]
pub struct Config<T> {
@ -34,6 +37,12 @@ pub struct Config<T> {
/// env RA_PROFILE=*@3>10 // dump everything, up to depth 3, if it takes more than 10
/// ```
pub profile_filter: Option<String>,
/// Filtering syntax, set in a shell:
/// ```
/// env RA_PROFILE_JSON=foo|bar|baz
/// ```
pub json_profile_filter: Option<String>,
}
impl<T> Config<T>
@ -41,7 +50,7 @@ where
T: for<'writer> MakeWriter<'writer> + Send + Sync + 'static,
{
pub fn init(self) -> anyhow::Result<()> {
let filter: Targets = self
let targets_filter: Targets = self
.filter
.parse()
.with_context(|| format!("invalid log filter: `{}`", self.filter))?;
@ -50,31 +59,60 @@ where
let ra_fmt_layer = tracing_subscriber::fmt::layer()
.with_target(false)
.with_ansi(false)
.with_writer(writer)
.with_filter(filter);
.with_filter(targets_filter);
let mut chalk_layer = None;
if let Some(chalk_filter) = self.chalk_filter {
let level: LevelFilter =
chalk_filter.parse().with_context(|| "invalid chalk log filter")?;
let chalk_layer = match self.chalk_filter {
Some(chalk_filter) => {
let level: LevelFilter =
chalk_filter.parse().with_context(|| "invalid chalk log filter")?;
let chalk_filter = Targets::new()
.with_target("chalk_solve", level)
.with_target("chalk_ir", level)
.with_target("chalk_recursive", level);
chalk_layer = Some(
let chalk_filter = Targets::new()
.with_target("chalk_solve", level)
.with_target("chalk_ir", level)
.with_target("chalk_recursive", level);
// TODO: remove `.with_filter(LevelFilter::OFF)` on the `None` branch.
HierarchicalLayer::default()
.with_indent_lines(true)
.with_ansi(false)
.with_indent_amount(2)
.with_writer(io::stderr)
.with_filter(chalk_filter),
);
.with_filter(chalk_filter)
.boxed()
}
None => None::<HierarchicalLayer>.with_filter(LevelFilter::OFF).boxed(),
};
let profiler_layer = self.profile_filter.map(|spec| hprof::layer(&spec));
// TODO: remove `.with_filter(LevelFilter::OFF)` on the `None` branch.
let profiler_layer = match self.profile_filter {
Some(spec) => Some(hprof::SpanTree::new(&spec)).with_filter(LevelFilter::INFO),
None => None.with_filter(LevelFilter::OFF),
};
Registry::default().with(ra_fmt_layer).with(chalk_layer).with(profiler_layer).try_init()?;
let json_profiler_layer = match self.json_profile_filter {
Some(spec) => {
let filter = json::JsonFilter::from_spec(&spec);
let filter = filter_fn(move |metadata| {
let allowed = match &filter.allowed_names {
Some(names) => names.contains(metadata.name()),
None => true,
};
allowed && metadata.is_span()
});
Some(json::TimingLayer::new(std::io::stderr).with_filter(filter))
}
None => None,
};
let subscriber = Registry::default()
.with(ra_fmt_layer)
.with(json_profiler_layer)
.with(profiler_layer)
.with(chalk_layer);
tracing::subscriber::set_global_default(subscriber)?;
Ok(())
}

View file

@ -33,6 +33,7 @@
use std::{
fmt::Write,
marker::PhantomData,
mem,
time::{Duration, Instant},
};
@ -50,53 +51,42 @@ use tracing_subscriber::{
Layer, Registry,
};
use crate::tracing::hprof;
pub fn init(spec: &str) -> tracing::subscriber::DefaultGuard {
let subscriber = Registry::default().with(layer(spec));
let subscriber = Registry::default().with(SpanTree::new(spec));
tracing::subscriber::set_default(subscriber)
}
pub fn layer<S>(spec: &str) -> impl Layer<S>
#[derive(Debug)]
pub(crate) struct SpanTree<S> {
aggregate: bool,
write_filter: WriteFilter,
_inner: PhantomData<fn(S)>,
}
impl<S> SpanTree<S>
where
S: Subscriber + for<'span> tracing_subscriber::registry::LookupSpan<'span>,
{
let (write_filter, allowed_names) = WriteFilter::from_spec(spec);
pub(crate) fn new(spec: &str) -> impl Layer<S> {
let (write_filter, allowed_names) = WriteFilter::from_spec(spec);
// this filter the first pass for `tracing`: these are all the "profiling" spans, but things like
// span depth or duration are not filtered here: that only occurs at write time.
let profile_filter = filter::filter_fn(move |metadata| {
let allowed = match &allowed_names {
Some(names) => names.contains(metadata.name()),
None => true,
};
// this filter the first pass for `tracing`: these are all the "profiling" spans, but things like
// span depth or duration are not filtered here: that only occurs at write time.
let profile_filter = filter::filter_fn(move |metadata| {
let allowed = match &allowed_names {
Some(names) => names.contains(metadata.name()),
None => true,
};
allowed
&& metadata.is_span()
&& metadata.level() >= &Level::INFO
&& !metadata.target().starts_with("salsa")
&& metadata.name() != "compute_exhaustiveness_and_usefulness"
&& !metadata.target().starts_with("chalk")
});
allowed
&& metadata.is_span()
&& metadata.level() >= &Level::INFO
&& !metadata.target().starts_with("salsa")
&& metadata.name() != "compute_exhaustiveness_and_usefulness"
&& !metadata.target().starts_with("chalk")
});
hprof::SpanTree::default().aggregate(true).spec_filter(write_filter).with_filter(profile_filter)
}
#[derive(Default, Debug)]
pub(crate) struct SpanTree {
aggregate: bool,
write_filter: WriteFilter,
}
impl SpanTree {
/// Merge identical sibling spans together.
pub(crate) fn aggregate(self, yes: bool) -> SpanTree {
SpanTree { aggregate: yes, ..self }
}
/// Add a write-time filter for span duration or tree depth.
pub(crate) fn spec_filter(self, write_filter: WriteFilter) -> SpanTree {
SpanTree { write_filter, ..self }
Self { aggregate: true, write_filter, _inner: PhantomData }.with_filter(profile_filter)
}
}
@ -136,7 +126,7 @@ impl<'a> Visit for DataVisitor<'a> {
}
}
impl<S> Layer<S> for SpanTree
impl<S> Layer<S> for SpanTree<S>
where
S: Subscriber + for<'span> LookupSpan<'span>,
{

View file

@ -0,0 +1,90 @@
//! A [tracing_subscriber::layer::Layer] that exports new-line delinated JSON.
//!
//! Usage:
//!
//! ```rust
//! let layer = json::TimingLayer::new(std::io::stderr);
//! Registry::default().with(layer).init();
//! ```
use std::{io::Write as _, marker::PhantomData, time::Instant};
use ide_db::FxHashSet;
use tracing::{
span::{Attributes, Id},
Event, Subscriber,
};
use tracing_subscriber::{fmt::MakeWriter, layer::Context, registry::LookupSpan, Layer};
struct JsonData {
name: &'static str,
start: std::time::Instant,
}
impl JsonData {
fn new(name: &'static str) -> Self {
Self { name, start: Instant::now() }
}
}
#[derive(Debug)]
pub(crate) struct TimingLayer<S, W> {
writer: W,
_inner: PhantomData<fn(S)>,
}
impl<S, W> TimingLayer<S, W> {
pub(crate) fn new(writer: W) -> Self {
Self { writer, _inner: PhantomData }
}
}
impl<S, W> Layer<S> for TimingLayer<S, W>
where
S: Subscriber + for<'span> LookupSpan<'span>,
W: for<'writer> MakeWriter<'writer> + Send + Sync + 'static,
{
fn on_new_span(&self, attrs: &Attributes<'_>, id: &Id, ctx: Context<'_, S>) {
let span = ctx.span(id).unwrap();
let data = JsonData::new(attrs.metadata().name());
span.extensions_mut().insert(data);
}
fn on_event(&self, _event: &Event<'_>, _ctx: Context<'_, S>) {}
fn on_close(&self, id: Id, ctx: Context<'_, S>) {
#[derive(serde::Serialize)]
struct JsonDataInner {
name: &'static str,
elapsed_ms: u128,
}
let span = ctx.span(&id).unwrap();
let Some(data) = span.extensions_mut().remove::<JsonData>() else {
return;
};
let data = JsonDataInner { name: data.name, elapsed_ms: data.start.elapsed().as_millis() };
let mut out = serde_json::to_string(&data).expect("Unable to serialize data");
out.push('\n');
self.writer.make_writer().write_all(out.as_bytes()).expect("Unable to write data");
}
}
#[derive(Default, Clone, Debug)]
pub(crate) struct JsonFilter {
pub(crate) allowed_names: Option<FxHashSet<String>>,
}
impl JsonFilter {
pub(crate) fn from_spec(spec: &str) -> Self {
let allowed_names = if spec == "*" {
None
} else {
Some(FxHashSet::from_iter(spec.split('|').map(String::from)))
};
Self { allowed_names }
}
}

View file

@ -103,6 +103,7 @@ impl Project<'_> {
filter: std::env::var("RA_LOG").ok().unwrap_or_else(|| "error".to_owned()),
chalk_filter: std::env::var("CHALK_DEBUG").ok(),
profile_filter: std::env::var("RA_PROFILE").ok(),
json_profile_filter: std::env::var("RA_PROFILE_JSON").ok(),
};
});

View file

@ -142,3 +142,12 @@ pub enum Transparency {
/// Def-site spans in procedural macros, identifiers from `macro` by default use this.
Opaque,
}
impl Transparency {
/// Returns `true` if the transparency is [`Opaque`].
///
/// [`Opaque`]: Transparency::Opaque
pub fn is_opaque(&self) -> bool {
matches!(self, Self::Opaque)
}
}

View file

@ -55,7 +55,10 @@ where
/// Returns all [`TextRange`]s that correspond to the given span.
///
/// Note this does a linear search through the entire backing vector.
pub fn ranges_with_span_exact(&self, span: SpanData<S>) -> impl Iterator<Item = TextRange> + '_
pub fn ranges_with_span_exact(
&self,
span: SpanData<S>,
) -> impl Iterator<Item = (TextRange, S)> + '_
where
S: Copy,
{
@ -64,14 +67,14 @@ where
return None;
}
let start = idx.checked_sub(1).map_or(TextSize::new(0), |prev| self.spans[prev].0);
Some(TextRange::new(start, end))
Some((TextRange::new(start, end), s.ctx))
})
}
/// Returns all [`TextRange`]s whose spans contain the given span.
///
/// Note this does a linear search through the entire backing vector.
pub fn ranges_with_span(&self, span: SpanData<S>) -> impl Iterator<Item = TextRange> + '_
pub fn ranges_with_span(&self, span: SpanData<S>) -> impl Iterator<Item = (TextRange, S)> + '_
where
S: Copy,
{
@ -83,7 +86,7 @@ where
return None;
}
let start = idx.checked_sub(1).map_or(TextSize::new(0), |prev| self.spans[prev].0);
Some(TextRange::new(start, end))
Some((TextRange::new(start, end), s.ctx))
})
}

View file

@ -148,6 +148,7 @@ pub fn token_tree_to_syntax_node<Ctx>(
) -> (Parse<SyntaxNode>, SpanMap<Ctx>)
where
SpanData<Ctx>: Copy + fmt::Debug,
Ctx: PartialEq,
{
let buffer = match tt {
tt::Subtree {
@ -892,6 +893,7 @@ fn delim_to_str(d: tt::DelimiterKind, closing: bool) -> Option<&'static str> {
impl<Ctx> TtTreeSink<'_, Ctx>
where
SpanData<Ctx>: Copy + fmt::Debug,
Ctx: PartialEq,
{
/// Parses a float literal as if it was a one to two name ref nodes with a dot inbetween.
/// This occurs when a float literal is used as a field access.
@ -949,6 +951,7 @@ where
}
let mut last = self.cursor;
let mut combined_span = None;
'tokens: for _ in 0..n_tokens {
let tmp: u8;
if self.cursor.eof() {
@ -982,7 +985,10 @@ where
format_to!(self.buf, "{lit}");
debug_assert_ne!(self.buf.len() - buf_l, 0);
self.text_pos += TextSize::new((self.buf.len() - buf_l) as u32);
self.token_map.push(self.text_pos, lit.span);
combined_span = match combined_span {
None => Some(lit.span),
Some(prev_span) => Some(Self::merge_spans(prev_span, lit.span)),
};
self.cursor = self.cursor.bump();
continue 'tokens;
}
@ -1006,9 +1012,13 @@ where
};
self.buf += text;
self.text_pos += TextSize::of(text);
self.token_map.push(self.text_pos, span);
combined_span = match combined_span {
None => Some(span),
Some(prev_span) => Some(Self::merge_spans(prev_span, span)),
}
}
self.token_map.push(self.text_pos, combined_span.expect("expected at least one token"));
self.inner.token(kind, self.buf.as_str());
self.buf.clear();
// FIXME: Emitting whitespace for this is really just a hack, we should get rid of it.
@ -1043,4 +1053,22 @@ where
fn error(&mut self, error: String) {
self.inner.error(error, self.text_pos)
}
fn merge_spans(a: SpanData<Ctx>, b: SpanData<Ctx>) -> SpanData<Ctx> {
// We don't do what rustc does exactly, rustc does something clever when the spans have different syntax contexts
// but this runs afoul of our separation between `span` and `hir-expand`.
SpanData {
range: if a.ctx == b.ctx {
TextRange::new(
std::cmp::min(a.range.start(), b.range.start()),
std::cmp::max(a.range.end(), b.range.end()),
)
} else {
// Combining ranges make no sense when they come from different syntax contexts.
a.range
},
anchor: a.anchor,
ctx: a.ctx,
}
}
}

View file

@ -8,6 +8,7 @@ pub mod make;
mod node_ext;
mod operators;
pub mod prec;
pub mod syntax_factory;
mod token_ext;
mod traits;
@ -166,7 +167,7 @@ mod support {
}
#[test]
fn assert_ast_is_object_safe() {
fn assert_ast_is_dyn_compatible() {
fn _f(_: &dyn AstNode, _: &dyn HasName) {}
}

View file

@ -0,0 +1,45 @@
//! Builds upon [`crate::ast::make`] constructors to create ast fragments with
//! optional syntax mappings.
//!
//! Instead of forcing make constructors to perform syntax mapping, we instead
//! let [`SyntaxFactory`] handle constructing the mappings. Care must be taken
//! to remember to feed the syntax mappings into a [`SyntaxEditor`](crate::syntax_editor::SyntaxEditor),
//! if applicable.
mod constructors;
use std::cell::{RefCell, RefMut};
use crate::syntax_editor::SyntaxMapping;
pub struct SyntaxFactory {
// Stored in a refcell so that the factory methods can be &self
mappings: Option<RefCell<SyntaxMapping>>,
}
impl SyntaxFactory {
/// Creates a new [`SyntaxFactory`], generating mappings between input nodes and generated nodes.
pub fn new() -> Self {
Self { mappings: Some(RefCell::new(SyntaxMapping::new())) }
}
/// Creates a [`SyntaxFactory`] without generating mappings.
pub fn without_mappings() -> Self {
Self { mappings: None }
}
/// Gets all of the tracked syntax mappings, if any.
pub fn finish_with_mappings(self) -> SyntaxMapping {
self.mappings.unwrap_or_default().into_inner()
}
fn mappings(&self) -> Option<RefMut<'_, SyntaxMapping>> {
self.mappings.as_ref().map(|it| it.borrow_mut())
}
}
impl Default for SyntaxFactory {
fn default() -> Self {
Self::without_mappings()
}
}

View file

@ -0,0 +1,110 @@
//! Wrappers over [`make`] constructors
use itertools::Itertools;
use crate::{
ast::{self, make, HasName},
syntax_editor::SyntaxMappingBuilder,
AstNode,
};
use super::SyntaxFactory;
impl SyntaxFactory {
pub fn name(&self, name: &str) -> ast::Name {
make::name(name).clone_for_update()
}
pub fn ident_pat(&self, ref_: bool, mut_: bool, name: ast::Name) -> ast::IdentPat {
let ast = make::ident_pat(ref_, mut_, name.clone()).clone_for_update();
if let Some(mut mapping) = self.mappings() {
let mut builder = SyntaxMappingBuilder::new(ast.syntax().clone());
builder.map_node(name.syntax().clone(), ast.name().unwrap().syntax().clone());
builder.finish(&mut mapping);
}
ast
}
pub fn block_expr(
&self,
stmts: impl IntoIterator<Item = ast::Stmt>,
tail_expr: Option<ast::Expr>,
) -> ast::BlockExpr {
let stmts = stmts.into_iter().collect_vec();
let input = stmts.iter().map(|it| it.syntax().clone()).collect_vec();
let ast = make::block_expr(stmts, tail_expr.clone()).clone_for_update();
if let Some((mut mapping, stmt_list)) = self.mappings().zip(ast.stmt_list()) {
let mut builder = SyntaxMappingBuilder::new(stmt_list.syntax().clone());
builder.map_children(
input.into_iter(),
stmt_list.statements().map(|it| it.syntax().clone()),
);
if let Some((input, output)) = tail_expr.zip(stmt_list.tail_expr()) {
builder.map_node(input.syntax().clone(), output.syntax().clone());
}
builder.finish(&mut mapping);
}
ast
}
pub fn expr_path(&self, path: ast::Path) -> ast::Expr {
let ast::Expr::PathExpr(ast) = make::expr_path(path.clone()).clone_for_update() else {
unreachable!()
};
if let Some(mut mapping) = self.mappings() {
let mut builder = SyntaxMappingBuilder::new(ast.syntax().clone());
builder.map_node(path.syntax().clone(), ast.path().unwrap().syntax().clone());
builder.finish(&mut mapping);
}
ast.into()
}
pub fn expr_ref(&self, expr: ast::Expr, exclusive: bool) -> ast::Expr {
let ast::Expr::RefExpr(ast) = make::expr_ref(expr.clone(), exclusive).clone_for_update()
else {
unreachable!()
};
if let Some(mut mapping) = self.mappings() {
let mut builder = SyntaxMappingBuilder::new(ast.syntax().clone());
builder.map_node(expr.syntax().clone(), ast.expr().unwrap().syntax().clone());
builder.finish(&mut mapping);
}
ast.into()
}
pub fn let_stmt(
&self,
pattern: ast::Pat,
ty: Option<ast::Type>,
initializer: Option<ast::Expr>,
) -> ast::LetStmt {
let ast =
make::let_stmt(pattern.clone(), ty.clone(), initializer.clone()).clone_for_update();
if let Some(mut mapping) = self.mappings() {
let mut builder = SyntaxMappingBuilder::new(ast.syntax().clone());
builder.map_node(pattern.syntax().clone(), ast.pat().unwrap().syntax().clone());
if let Some(input) = ty {
builder.map_node(input.syntax().clone(), ast.ty().unwrap().syntax().clone());
}
if let Some(input) = initializer {
builder
.map_node(input.syntax().clone(), ast.initializer().unwrap().syntax().clone());
}
builder.finish(&mut mapping);
}
ast
}
}

View file

@ -100,6 +100,10 @@ impl SyntaxEditor {
pub fn finish(self) -> SyntaxEdit {
edit_algo::apply_edits(self)
}
pub fn add_mappings(&mut self, other: SyntaxMapping) {
self.mappings.merge(other);
}
}
/// Represents a completed [`SyntaxEditor`] operation.
@ -319,85 +323,14 @@ fn is_ancestor_or_self_of_element(node: &SyntaxElement, ancestor: &SyntaxNode) -
#[cfg(test)]
mod tests {
use expect_test::expect;
use itertools::Itertools;
use crate::{
ast::{self, make, HasName},
ast::{self, make, syntax_factory::SyntaxFactory},
AstNode,
};
use super::*;
fn make_ident_pat(
editor: Option<&mut SyntaxEditor>,
ref_: bool,
mut_: bool,
name: ast::Name,
) -> ast::IdentPat {
let ast = make::ident_pat(ref_, mut_, name.clone()).clone_for_update();
if let Some(editor) = editor {
let mut mapping = SyntaxMappingBuilder::new(ast.syntax().clone());
mapping.map_node(name.syntax().clone(), ast.name().unwrap().syntax().clone());
mapping.finish(editor);
}
ast
}
fn make_let_stmt(
editor: Option<&mut SyntaxEditor>,
pattern: ast::Pat,
ty: Option<ast::Type>,
initializer: Option<ast::Expr>,
) -> ast::LetStmt {
let ast =
make::let_stmt(pattern.clone(), ty.clone(), initializer.clone()).clone_for_update();
if let Some(editor) = editor {
let mut mapping = SyntaxMappingBuilder::new(ast.syntax().clone());
mapping.map_node(pattern.syntax().clone(), ast.pat().unwrap().syntax().clone());
if let Some(input) = ty {
mapping.map_node(input.syntax().clone(), ast.ty().unwrap().syntax().clone());
}
if let Some(input) = initializer {
mapping
.map_node(input.syntax().clone(), ast.initializer().unwrap().syntax().clone());
}
mapping.finish(editor);
}
ast
}
fn make_block_expr(
editor: Option<&mut SyntaxEditor>,
stmts: impl IntoIterator<Item = ast::Stmt>,
tail_expr: Option<ast::Expr>,
) -> ast::BlockExpr {
let stmts = stmts.into_iter().collect_vec();
let input = stmts.iter().map(|it| it.syntax().clone()).collect_vec();
let ast = make::block_expr(stmts, tail_expr.clone()).clone_for_update();
if let Some((editor, stmt_list)) = editor.zip(ast.stmt_list()) {
let mut mapping = SyntaxMappingBuilder::new(stmt_list.syntax().clone());
mapping.map_children(
input.into_iter(),
stmt_list.statements().map(|it| it.syntax().clone()),
);
if let Some((input, output)) = tail_expr.zip(stmt_list.tail_expr()) {
mapping.map_node(input.syntax().clone(), output.syntax().clone());
}
mapping.finish(editor);
}
ast
}
#[test]
fn basic_usage() {
let root = make::match_arm(
@ -417,6 +350,7 @@ mod tests {
let to_replace = root.syntax().descendants().find_map(ast::BinExpr::cast).unwrap();
let mut editor = SyntaxEditor::new(root.syntax().clone());
let make = SyntaxFactory::new();
let name = make::name("var_name");
let name_ref = make::name_ref("var_name").clone_for_update();
@ -425,21 +359,20 @@ mod tests {
editor.add_annotation(name.syntax(), placeholder_snippet);
editor.add_annotation(name_ref.syntax(), placeholder_snippet);
let make_ident_pat = make_ident_pat(Some(&mut editor), false, false, name);
let make_let_stmt = make_let_stmt(
Some(&mut editor),
make_ident_pat.into(),
None,
Some(to_replace.clone().into()),
);
let new_block = make_block_expr(
Some(&mut editor),
[make_let_stmt.into()],
let new_block = make.block_expr(
[make
.let_stmt(
make.ident_pat(false, false, name.clone()).into(),
None,
Some(to_replace.clone().into()),
)
.into()],
Some(to_wrap.clone().into()),
);
editor.replace(to_replace.syntax(), name_ref.syntax());
editor.replace(to_wrap.syntax(), new_block.syntax());
editor.add_mappings(make.finish_with_mappings());
let edit = editor.finish();
@ -473,11 +406,11 @@ mod tests {
let second_let = root.syntax().descendants().find_map(ast::LetStmt::cast).unwrap();
let mut editor = SyntaxEditor::new(root.syntax().clone());
let make = SyntaxFactory::without_mappings();
editor.insert(
Position::first_child_of(root.stmt_list().unwrap().syntax()),
make_let_stmt(
None,
make.let_stmt(
make::ext::simple_ident_pat(make::name("first")).into(),
None,
Some(make::expr_literal("1").into()),
@ -487,8 +420,7 @@ mod tests {
editor.insert(
Position::after(second_let.syntax()),
make_let_stmt(
None,
make.let_stmt(
make::ext::simple_ident_pat(make::name("third")).into(),
None,
Some(make::expr_literal("3").into()),
@ -528,19 +460,17 @@ mod tests {
let second_let = root.syntax().descendants().find_map(ast::LetStmt::cast).unwrap();
let mut editor = SyntaxEditor::new(root.syntax().clone());
let make = SyntaxFactory::new();
let new_block_expr =
make_block_expr(Some(&mut editor), [], Some(ast::Expr::BlockExpr(inner_block.clone())));
let new_block_expr = make.block_expr([], Some(ast::Expr::BlockExpr(inner_block.clone())));
let first_let = make_let_stmt(
Some(&mut editor),
let first_let = make.let_stmt(
make::ext::simple_ident_pat(make::name("first")).into(),
None,
Some(make::expr_literal("1").into()),
);
let third_let = make_let_stmt(
Some(&mut editor),
let third_let = make.let_stmt(
make::ext::simple_ident_pat(make::name("third")).into(),
None,
Some(make::expr_literal("3").into()),
@ -552,6 +482,7 @@ mod tests {
);
editor.insert(Position::after(second_let.syntax()), third_let.syntax());
editor.replace(inner_block.syntax(), new_block_expr.syntax());
editor.add_mappings(make.finish_with_mappings());
let edit = editor.finish();
@ -581,12 +512,11 @@ mod tests {
let inner_block = root.clone();
let mut editor = SyntaxEditor::new(root.syntax().clone());
let make = SyntaxFactory::new();
let new_block_expr =
make_block_expr(Some(&mut editor), [], Some(ast::Expr::BlockExpr(inner_block.clone())));
let new_block_expr = make.block_expr([], Some(ast::Expr::BlockExpr(inner_block.clone())));
let first_let = make_let_stmt(
Some(&mut editor),
let first_let = make.let_stmt(
make::ext::simple_ident_pat(make::name("first")).into(),
None,
Some(make::expr_literal("1").into()),
@ -597,6 +527,7 @@ mod tests {
first_let.syntax(),
);
editor.replace(inner_block.syntax(), new_block_expr.syntax());
editor.add_mappings(make.finish_with_mappings());
let edit = editor.finish();

View file

@ -7,8 +7,6 @@ use rustc_hash::FxHashMap;
use crate::{SyntaxElement, SyntaxNode};
use super::SyntaxEditor;
#[derive(Debug, Default)]
pub struct SyntaxMapping {
// important information to keep track of:
@ -209,7 +207,7 @@ impl SyntaxMapping {
Some(output)
}
fn add_mapping(&mut self, syntax_mapping: SyntaxMappingBuilder) {
pub fn add_mapping(&mut self, syntax_mapping: SyntaxMappingBuilder) {
let SyntaxMappingBuilder { parent_node, node_mappings } = syntax_mapping;
let parent_entry: u32 = self.entry_parents.len().try_into().unwrap();
@ -257,8 +255,8 @@ impl SyntaxMappingBuilder {
}
}
pub fn finish(self, editor: &mut SyntaxEditor) {
editor.mappings.add_mapping(self);
pub fn finish(self, mappings: &mut SyntaxMapping) {
mappings.add_mapping(self);
}
}

View file

@ -1,4 +1,4 @@
//! Object safe interface for file watching and reading.
//! Dynamically compatible interface for file watching and reading.
use std::fmt;
use paths::{AbsPath, AbsPathBuf};
@ -232,6 +232,6 @@ impl fmt::Debug for Message {
}
#[test]
fn handle_is_object_safe() {
fn handle_is_dyn_compatible() {
fn _assert(_: &dyn Handle) {}
}

View file

@ -178,7 +178,15 @@ RA_PROFILE=foo|bar|baz // enabled only selected entries
RA_PROFILE=*@3>10 // dump everything, up to depth 3, if it takes more than 10 ms
```
In particular, I have `export RA_PROFILE='*>10'` in my shell profile.
Some rust-analyzer contributors have `export RA_PROFILE='*>10'` in my shell profile.
For machine-readable JSON output, we have the `RA_PROFILE_JSON` env variable. We support
filtering only by span name:
```
RA_PROFILE=* // dump everything
RA_PROFILE_JSON="vfs_load|parallel_prime_caches|discover_command" // dump selected spans
```
We also have a "counting" profiler which counts number of instances of popular structs.
It is enabled by `RA_COUNT=1`.

View file

@ -42,7 +42,7 @@ The underlying engine makes sure that model is computed lazily (on-demand) and c
`crates/rust-analyzer/src/bin/main.rs` contains the main function which spawns LSP.
This is *the* entry point, but it front-loads a lot of complexity, so it's fine to just skim through it.
`crates/rust-analyzer/src/handlers.rs` implements all LSP requests and is a great place to start if you are already familiar with LSP.
`crates/rust-analyzer/src/handlers/requests.rs` implements all LSP requests and is a great place to start if you are already familiar with LSP.
`Analysis` and `AnalysisHost` types define the main API for consumers of IDE services.

View file

@ -1,5 +1,5 @@
<!---
lsp/ext.rs hash: 6292ee8d88d4c9ec
lsp/ext.rs hash: 90cf7718d54fe3c2
If you need to change the above hash to make the test pass, please check if you
need to adjust this doc as well and ping this issue:

View file

@ -378,7 +378,7 @@ impl AstNode for AssocItem {
}
```
Shared AST substructures are modeled via (object safe) traits:
Shared AST substructures are modeled via (dynamically compatible) traits:
```rust
trait HasVisibility: AstNode {

View file

@ -158,6 +158,11 @@ building from locking the `Cargo.lock` at the expense of duplicating build artif
Set to `true` to use a subdirectory of the existing target directory or
set to a path relative to the workspace to use that path.
--
[[rust-analyzer.cfg.setTest]]rust-analyzer.cfg.setTest (default: `true`)::
+
--
Set `cfg(test)` for local crates. Defaults to true.
--
[[rust-analyzer.checkOnSave]]rust-analyzer.checkOnSave (default: `true`)::
+
--

View file

@ -512,6 +512,11 @@
"type": "boolean",
"default": false
},
"rust-analyzer.debug.buildBeforeRestart": {
"markdownDescription": "Whether to rebuild the project modules before debugging the same test again",
"type": "boolean",
"default": false
},
"rust-analyzer.debug.engineSettings": {
"type": "object",
"default": {},
@ -848,6 +853,16 @@
}
}
},
{
"title": "cfg",
"properties": {
"rust-analyzer.cfg.setTest": {
"markdownDescription": "Set `cfg(test)` for local crates. Defaults to true.",
"default": true,
"type": "boolean"
}
}
},
{
"title": "general",
"properties": {

View file

@ -23,10 +23,11 @@ export async function bootstrap(
if (!isValidExecutable(path, config.serverExtraEnv)) {
throw new Error(
`Failed to execute ${path} --version.` + config.serverPath
? `\`config.server.path\` or \`config.serverPath\` has been set explicitly.\
`Failed to execute ${path} --version.` +
(config.serverPath
? `\`config.server.path\` or \`config.serverPath\` has been set explicitly.\
Consider removing this config or making a valid server binary available at that path.`
: "",
: ""),
);
}

View file

@ -24,6 +24,7 @@ export class Config {
"serverPath",
"server",
"files",
"cfg",
].map((opt) => `${this.rootSection}.${opt}`);
private readonly requiresWindowReloadOpts = ["testExplorer"].map(
@ -299,6 +300,7 @@ export class Config {
engine: this.get<string>("debug.engine"),
engineSettings: this.get<object>("debug.engineSettings") ?? {},
openDebugPane: this.get<boolean>("debug.openDebugPane"),
buildBeforeRestart: this.get<boolean>("debug.buildBeforeRestart"),
sourceFileMap: sourceFileMap,
};
}

View file

@ -5,12 +5,15 @@ import type * as ra from "./lsp_ext";
import { Cargo } from "./toolchain";
import type { Ctx } from "./ctx";
import { prepareEnv } from "./run";
import { createTaskFromRunnable, prepareEnv } from "./run";
import { execute, isCargoRunnableArgs, unwrapUndefinable } from "./util";
import type { Config } from "./config";
const debugOutput = vscode.window.createOutputChannel("Debug");
// Here we want to keep track on everything that's currently running
const activeDebugSessionIds: string[] = [];
export async function makeDebugConfig(ctx: Ctx, runnable: ra.Runnable): Promise<void> {
const scope = ctx.activeRustEditor?.document.uri;
if (!scope) return;
@ -45,6 +48,8 @@ export async function startDebugSession(ctx: Ctx, runnable: ra.Runnable): Promis
const wsLaunchSection = vscode.workspace.getConfiguration("launch");
const configurations = wsLaunchSection.get<any[]>("configurations") || [];
// The runnable label is the name of the test with the "test prefix"
// e.g. test test_feature_x
const index = configurations.findIndex((c) => c.name === runnable.label);
if (-1 !== index) {
debugConfig = configurations[index];
@ -168,6 +173,8 @@ async function getDebugConfiguration(
if (debugConfig.name === "run binary") {
// The LSP side: crates\rust-analyzer\src\main_loop\handlers.rs,
// fn to_lsp_runnable(...) with RunnableKind::Bin
// FIXME: Neither crates\rust-analyzer\src\main_loop\handlers.rs
// nor to_lsp_runnable exist anymore
debugConfig.name = `run ${path.basename(executable)}`;
}
@ -359,3 +366,49 @@ function quote(xs: string[]) {
})
.join(" ");
}
async function recompileTestFromDebuggingSession(session: vscode.DebugSession, ctx: Ctx) {
const { cwd, args: sessionArgs }: vscode.DebugConfiguration = session.configuration;
const args: ra.CargoRunnableArgs = {
cwd: cwd,
cargoArgs: ["test", "--no-run", "--test", "lib"],
// The first element of the debug configuration args is the test path e.g. "test_bar::foo::test_a::test_b"
executableArgs: sessionArgs,
};
const runnable: ra.Runnable = {
kind: "cargo",
label: "compile-test",
args,
};
const task: vscode.Task = await createTaskFromRunnable(runnable, ctx.config);
// It is not needed to call the language server, since the test path is already resolved in the
// configuration option. We can simply call a debug configuration with the --no-run option to compile
await vscode.tasks.executeTask(task);
}
export function initializeDebugSessionTrackingAndRebuild(ctx: Ctx) {
vscode.debug.onDidStartDebugSession((session: vscode.DebugSession) => {
if (!activeDebugSessionIds.includes(session.id)) {
activeDebugSessionIds.push(session.id);
}
});
vscode.debug.onDidTerminateDebugSession(async (session: vscode.DebugSession) => {
// The id of the session will be the same when pressing restart the restart button
if (activeDebugSessionIds.find((s) => s === session.id)) {
await recompileTestFromDebuggingSession(session, ctx);
}
removeActiveSession(session);
});
}
function removeActiveSession(session: vscode.DebugSession) {
const activeSessionId = activeDebugSessionIds.findIndex((id) => id === session.id);
if (activeSessionId !== -1) {
activeDebugSessionIds.splice(activeSessionId, 1);
}
}

View file

@ -6,6 +6,7 @@ import { type CommandFactory, Ctx, fetchWorkspace } from "./ctx";
import * as diagnostics from "./diagnostics";
import { activateTaskProvider } from "./tasks";
import { setContextValue } from "./util";
import { initializeDebugSessionTrackingAndRebuild } from "./debug";
const RUST_PROJECT_CONTEXT_NAME = "inRustProject";
@ -102,6 +103,10 @@ async function activateServer(ctx: Ctx): Promise<RustAnalyzerExtensionApi> {
ctx.subscriptions,
);
if (ctx.config.debug.buildBeforeRestart) {
initializeDebugSessionTrackingAndRebuild(ctx);
}
await ctx.start();
return ctx;
}

View file

@ -36,7 +36,7 @@ export async function selectRunnable(
if (runnables.length === 0) {
// it is the debug case, run always has at least 'cargo check ...'
// see crates\rust-analyzer\src\main_loop\handlers.rs, handle_runnables
// see crates\rust-analyzer\src\handlers\request.rs, handle_runnables
await vscode.window.showErrorMessage("There's no debug target!");
quickPick.dispose();
return;

View file

@ -29,7 +29,7 @@ export class Cargo {
static artifactSpec(cargoArgs: string[], executableArgs?: string[]): ArtifactSpec {
cargoArgs = [...cargoArgs, "--message-format=json"];
// arguments for a runnable from the quick pick should be updated.
// see crates\rust-analyzer\src\main_loop\handlers.rs, handle_code_lens
// see crates\rust-analyzer\src\handlers\request.rs, handle_code_lens
switch (cargoArgs[0]) {
case "run":
cargoArgs[0] = "build";

View file

@ -1 +1 @@
1b5aa96d6016bafe50e071b45d4d2e3c90fd766f
cf24c73141a77db730f4b7fda69dcd7e8b113b51