mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-28 05:53:45 +00:00
Auto merge of #131399 - lnicola:sync-from-ra, r=lnicola
Subtree update of `rust-analyzer` r? `@ghost`
This commit is contained in:
commit
0746cb0185
89 changed files with 1374 additions and 663 deletions
6
.github/workflows/release.yaml
vendored
6
.github/workflows/release.yaml
vendored
|
@ -16,7 +16,7 @@ env:
|
||||||
RUSTFLAGS: "-D warnings -W unreachable-pub"
|
RUSTFLAGS: "-D warnings -W unreachable-pub"
|
||||||
RUSTUP_MAX_RETRIES: 10
|
RUSTUP_MAX_RETRIES: 10
|
||||||
FETCH_DEPTH: 0 # pull in the tags for the version string
|
FETCH_DEPTH: 0 # pull in the tags for the version string
|
||||||
MACOSX_DEPLOYMENT_TARGET: 10.15
|
MACOSX_DEPLOYMENT_TARGET: 13.0
|
||||||
CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER: aarch64-linux-gnu-gcc
|
CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER: aarch64-linux-gnu-gcc
|
||||||
CARGO_TARGET_ARM_UNKNOWN_LINUX_GNUEABIHF_LINKER: arm-linux-gnueabihf-gcc
|
CARGO_TARGET_ARM_UNKNOWN_LINUX_GNUEABIHF_LINKER: arm-linux-gnueabihf-gcc
|
||||||
|
|
||||||
|
@ -43,10 +43,10 @@ jobs:
|
||||||
- os: ubuntu-20.04
|
- os: ubuntu-20.04
|
||||||
target: arm-unknown-linux-gnueabihf
|
target: arm-unknown-linux-gnueabihf
|
||||||
code-target: linux-armhf
|
code-target: linux-armhf
|
||||||
- os: macos-12
|
- os: macos-13
|
||||||
target: x86_64-apple-darwin
|
target: x86_64-apple-darwin
|
||||||
code-target: darwin-x64
|
code-target: darwin-x64
|
||||||
- os: macos-12
|
- os: macos-13
|
||||||
target: aarch64-apple-darwin
|
target: aarch64-apple-darwin
|
||||||
code-target: darwin-arm64
|
code-target: darwin-arm64
|
||||||
|
|
||||||
|
|
13
Cargo.lock
generated
13
Cargo.lock
generated
|
@ -145,9 +145,12 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "cc"
|
name = "cc"
|
||||||
version = "1.1.10"
|
version = "1.1.22"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "e9e8aabfac534be767c909e0690571677d49f41bd8465ae876fe043d52ba5292"
|
checksum = "9540e661f81799159abee814118cc139a2004b3a3aa3ea37724a1b66530b90e0"
|
||||||
|
dependencies = [
|
||||||
|
"shlex",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "cfg"
|
name = "cfg"
|
||||||
|
@ -1852,6 +1855,12 @@ dependencies = [
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "shlex"
|
||||||
|
version = "1.3.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "smallvec"
|
name = "smallvec"
|
||||||
version = "1.13.2"
|
version = "1.13.2"
|
||||||
|
|
|
@ -4,7 +4,7 @@ exclude = ["crates/proc-macro-srv/proc-macro-test/imp"]
|
||||||
resolver = "2"
|
resolver = "2"
|
||||||
|
|
||||||
[workspace.package]
|
[workspace.package]
|
||||||
rust-version = "1.80"
|
rust-version = "1.81"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "MIT OR Apache-2.0"
|
license = "MIT OR Apache-2.0"
|
||||||
authors = ["rust-analyzer team"]
|
authors = ["rust-analyzer team"]
|
||||||
|
|
|
@ -49,6 +49,10 @@ impl CfgOptions {
|
||||||
cfg.fold(&|atom| self.enabled.contains(atom))
|
cfg.fold(&|atom| self.enabled.contains(atom))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn check_atom(&self, cfg: &CfgAtom) -> bool {
|
||||||
|
self.enabled.contains(cfg)
|
||||||
|
}
|
||||||
|
|
||||||
pub fn insert_atom(&mut self, key: Symbol) {
|
pub fn insert_atom(&mut self, key: Symbol) {
|
||||||
self.enabled.insert(CfgAtom::Flag(key));
|
self.enabled.insert(CfgAtom::Flag(key));
|
||||||
}
|
}
|
||||||
|
|
|
@ -36,7 +36,7 @@ macro_rules! f {
|
||||||
}
|
}
|
||||||
|
|
||||||
struct#0:1@58..64#1# MyTraitMap2#0:2@31..42#0# {#0:1@72..73#1#
|
struct#0:1@58..64#1# MyTraitMap2#0:2@31..42#0# {#0:1@72..73#1#
|
||||||
map#0:1@86..89#1#:#0:1@89..90#1# #0:1@89..90#1#::#0:1@91..92#1#std#0:1@93..96#1#::#0:1@96..97#1#collections#0:1@98..109#1#::#0:1@109..110#1#HashSet#0:1@111..118#1#<#0:1@118..119#1#(#0:1@119..120#1#)#0:1@120..121#1#>#0:1@121..122#1#,#0:1@122..123#1#
|
map#0:1@86..89#1#:#0:1@89..90#1# #0:1@89..90#1#::#0:1@91..93#1#std#0:1@93..96#1#::#0:1@96..98#1#collections#0:1@98..109#1#::#0:1@109..111#1#HashSet#0:1@111..118#1#<#0:1@118..119#1#(#0:1@119..120#1#)#0:1@120..121#1#>#0:1@121..122#1#,#0:1@122..123#1#
|
||||||
}#0:1@132..133#1#
|
}#0:1@132..133#1#
|
||||||
"#]],
|
"#]],
|
||||||
);
|
);
|
||||||
|
|
|
@ -6,7 +6,7 @@
|
||||||
use std::{cmp::Ordering, iter, mem, ops::Not};
|
use std::{cmp::Ordering, iter, mem, ops::Not};
|
||||||
|
|
||||||
use base_db::{CrateId, CrateOrigin, Dependency, LangCrateOrigin};
|
use base_db::{CrateId, CrateOrigin, Dependency, LangCrateOrigin};
|
||||||
use cfg::{CfgExpr, CfgOptions};
|
use cfg::{CfgAtom, CfgExpr, CfgOptions};
|
||||||
use either::Either;
|
use either::Either;
|
||||||
use hir_expand::{
|
use hir_expand::{
|
||||||
attrs::{Attr, AttrId},
|
attrs::{Attr, AttrId},
|
||||||
|
@ -1324,13 +1324,21 @@ impl DefCollector<'_> {
|
||||||
};
|
};
|
||||||
|
|
||||||
// Skip #[test]/#[bench] expansion, which would merely result in more memory usage
|
// Skip #[test]/#[bench] expansion, which would merely result in more memory usage
|
||||||
// due to duplicating functions into macro expansions
|
// due to duplicating functions into macro expansions, but only if `cfg(test)` is active,
|
||||||
|
// otherwise they are expanded to nothing and this can impact e.g. diagnostics (due to things
|
||||||
|
// being cfg'ed out).
|
||||||
|
// Ideally we will just expand them to nothing here. But we are only collecting macro calls,
|
||||||
|
// not expanding them, so we have no way to do that.
|
||||||
if matches!(
|
if matches!(
|
||||||
def.kind,
|
def.kind,
|
||||||
MacroDefKind::BuiltInAttr(_, expander)
|
MacroDefKind::BuiltInAttr(_, expander)
|
||||||
if expander.is_test() || expander.is_bench()
|
if expander.is_test() || expander.is_bench()
|
||||||
) {
|
) {
|
||||||
return recollect_without(self);
|
let test_is_active =
|
||||||
|
self.cfg_options.check_atom(&CfgAtom::Flag(sym::test.clone()));
|
||||||
|
if test_is_active {
|
||||||
|
return recollect_without(self);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let call_id = || {
|
let call_id = || {
|
||||||
|
|
|
@ -4,6 +4,8 @@ use span::{MacroCallId, Span};
|
||||||
|
|
||||||
use crate::{db::ExpandDatabase, name, tt, ExpandResult, MacroCallKind};
|
use crate::{db::ExpandDatabase, name, tt, ExpandResult, MacroCallKind};
|
||||||
|
|
||||||
|
use super::quote;
|
||||||
|
|
||||||
macro_rules! register_builtin {
|
macro_rules! register_builtin {
|
||||||
($(($name:ident, $variant:ident) => $expand:ident),* ) => {
|
($(($name:ident, $variant:ident) => $expand:ident),* ) => {
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||||
|
@ -52,15 +54,15 @@ impl BuiltinAttrExpander {
|
||||||
}
|
}
|
||||||
|
|
||||||
register_builtin! {
|
register_builtin! {
|
||||||
(bench, Bench) => dummy_attr_expand,
|
(bench, Bench) => dummy_gate_test_expand,
|
||||||
(cfg_accessible, CfgAccessible) => dummy_attr_expand,
|
(cfg_accessible, CfgAccessible) => dummy_attr_expand,
|
||||||
(cfg_eval, CfgEval) => dummy_attr_expand,
|
(cfg_eval, CfgEval) => dummy_attr_expand,
|
||||||
(derive, Derive) => derive_expand,
|
(derive, Derive) => derive_expand,
|
||||||
// derive const is equivalent to derive for our proposes.
|
// derive const is equivalent to derive for our proposes.
|
||||||
(derive_const, DeriveConst) => derive_expand,
|
(derive_const, DeriveConst) => derive_expand,
|
||||||
(global_allocator, GlobalAllocator) => dummy_attr_expand,
|
(global_allocator, GlobalAllocator) => dummy_attr_expand,
|
||||||
(test, Test) => dummy_attr_expand,
|
(test, Test) => dummy_gate_test_expand,
|
||||||
(test_case, TestCase) => dummy_attr_expand
|
(test_case, TestCase) => dummy_gate_test_expand
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn find_builtin_attr(ident: &name::Name) -> Option<BuiltinAttrExpander> {
|
pub fn find_builtin_attr(ident: &name::Name) -> Option<BuiltinAttrExpander> {
|
||||||
|
@ -76,6 +78,19 @@ fn dummy_attr_expand(
|
||||||
ExpandResult::ok(tt.clone())
|
ExpandResult::ok(tt.clone())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn dummy_gate_test_expand(
|
||||||
|
_db: &dyn ExpandDatabase,
|
||||||
|
_id: MacroCallId,
|
||||||
|
tt: &tt::Subtree,
|
||||||
|
span: Span,
|
||||||
|
) -> ExpandResult<tt::Subtree> {
|
||||||
|
let result = quote::quote! { span=>
|
||||||
|
#[cfg(test)]
|
||||||
|
#tt
|
||||||
|
};
|
||||||
|
ExpandResult::ok(result)
|
||||||
|
}
|
||||||
|
|
||||||
/// We generate a very specific expansion here, as we do not actually expand the `#[derive]` attribute
|
/// We generate a very specific expansion here, as we do not actually expand the `#[derive]` attribute
|
||||||
/// itself in name res, but we do want to expand it to something for the IDE layer, so that the input
|
/// itself in name res, but we do want to expand it to something for the IDE layer, so that the input
|
||||||
/// derive attributes can be downmapped, and resolved as proper paths.
|
/// derive attributes can be downmapped, and resolved as proper paths.
|
||||||
|
|
|
@ -16,7 +16,10 @@ use crate::{
|
||||||
cfg_process,
|
cfg_process,
|
||||||
declarative::DeclarativeMacroExpander,
|
declarative::DeclarativeMacroExpander,
|
||||||
fixup::{self, SyntaxFixupUndoInfo},
|
fixup::{self, SyntaxFixupUndoInfo},
|
||||||
hygiene::{span_with_call_site_ctxt, span_with_def_site_ctxt, span_with_mixed_site_ctxt},
|
hygiene::{
|
||||||
|
span_with_call_site_ctxt, span_with_def_site_ctxt, span_with_mixed_site_ctxt,
|
||||||
|
SyntaxContextExt as _,
|
||||||
|
},
|
||||||
proc_macro::ProcMacros,
|
proc_macro::ProcMacros,
|
||||||
span_map::{RealSpanMap, SpanMap, SpanMapRef},
|
span_map::{RealSpanMap, SpanMap, SpanMapRef},
|
||||||
tt, AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander,
|
tt, AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander,
|
||||||
|
@ -300,14 +303,16 @@ pub fn expand_speculative(
|
||||||
token_tree_to_syntax_node(&speculative_expansion.value, expand_to, loc.def.edition);
|
token_tree_to_syntax_node(&speculative_expansion.value, expand_to, loc.def.edition);
|
||||||
|
|
||||||
let syntax_node = node.syntax_node();
|
let syntax_node = node.syntax_node();
|
||||||
let token = rev_tmap
|
let (token, _) = rev_tmap
|
||||||
.ranges_with_span(span_map.span_for_range(token_to_map.text_range()))
|
.ranges_with_span(span_map.span_for_range(token_to_map.text_range()))
|
||||||
.filter_map(|range| syntax_node.covering_element(range).into_token())
|
.filter_map(|(range, ctx)| syntax_node.covering_element(range).into_token().zip(Some(ctx)))
|
||||||
.min_by_key(|t| {
|
.min_by_key(|(t, ctx)| {
|
||||||
// prefer tokens of the same kind and text
|
// prefer tokens of the same kind and text, as well as non opaque marked ones
|
||||||
// Note the inversion of the score here, as we want to prefer the first token in case
|
// Note the inversion of the score here, as we want to prefer the first token in case
|
||||||
// of all tokens having the same score
|
// of all tokens having the same score
|
||||||
(t.kind() != token_to_map.kind()) as u8 + 2 * ((t.text() != token_to_map.text()) as u8)
|
ctx.is_opaque(db) as u8
|
||||||
|
+ 2 * (t.kind() != token_to_map.kind()) as u8
|
||||||
|
+ 4 * ((t.text() != token_to_map.text()) as u8)
|
||||||
})?;
|
})?;
|
||||||
Some((node.syntax_node(), token))
|
Some((node.syntax_node(), token))
|
||||||
}
|
}
|
||||||
|
|
|
@ -151,6 +151,7 @@ pub trait SyntaxContextExt {
|
||||||
fn remove_mark(&mut self, db: &dyn ExpandDatabase) -> (Option<MacroCallId>, Transparency);
|
fn remove_mark(&mut self, db: &dyn ExpandDatabase) -> (Option<MacroCallId>, Transparency);
|
||||||
fn outer_mark(self, db: &dyn ExpandDatabase) -> (Option<MacroCallId>, Transparency);
|
fn outer_mark(self, db: &dyn ExpandDatabase) -> (Option<MacroCallId>, Transparency);
|
||||||
fn marks(self, db: &dyn ExpandDatabase) -> Vec<(MacroCallId, Transparency)>;
|
fn marks(self, db: &dyn ExpandDatabase) -> Vec<(MacroCallId, Transparency)>;
|
||||||
|
fn is_opaque(self, db: &dyn ExpandDatabase) -> bool;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SyntaxContextExt for SyntaxContextId {
|
impl SyntaxContextExt for SyntaxContextId {
|
||||||
|
@ -177,6 +178,9 @@ impl SyntaxContextExt for SyntaxContextId {
|
||||||
marks.reverse();
|
marks.reverse();
|
||||||
marks
|
marks
|
||||||
}
|
}
|
||||||
|
fn is_opaque(self, db: &dyn ExpandDatabase) -> bool {
|
||||||
|
!self.is_root() && db.lookup_intern_syntax_context(self).outer_transparency.is_opaque()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// FIXME: Make this a SyntaxContextExt method once we have RPIT
|
// FIXME: Make this a SyntaxContextExt method once we have RPIT
|
||||||
|
|
|
@ -25,6 +25,7 @@ mod prettify_macro_expansion_;
|
||||||
|
|
||||||
use attrs::collect_attrs;
|
use attrs::collect_attrs;
|
||||||
use rustc_hash::FxHashMap;
|
use rustc_hash::FxHashMap;
|
||||||
|
use stdx::TupleExt;
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
|
|
||||||
use std::hash::Hash;
|
use std::hash::Hash;
|
||||||
|
@ -772,14 +773,15 @@ impl ExpansionInfo {
|
||||||
/// Maps the passed in file range down into a macro expansion if it is the input to a macro call.
|
/// Maps the passed in file range down into a macro expansion if it is the input to a macro call.
|
||||||
///
|
///
|
||||||
/// Note this does a linear search through the entire backing vector of the spanmap.
|
/// Note this does a linear search through the entire backing vector of the spanmap.
|
||||||
|
// FIXME: Consider adding a reverse map to ExpansionInfo to get rid of the linear search which
|
||||||
|
// potentially results in quadratic look ups (notably this might improve semantic highlighting perf)
|
||||||
pub fn map_range_down_exact(
|
pub fn map_range_down_exact(
|
||||||
&self,
|
&self,
|
||||||
span: Span,
|
span: Span,
|
||||||
) -> Option<InMacroFile<impl Iterator<Item = SyntaxToken> + '_>> {
|
) -> Option<InMacroFile<impl Iterator<Item = (SyntaxToken, SyntaxContextId)> + '_>> {
|
||||||
let tokens = self
|
let tokens = self.exp_map.ranges_with_span_exact(span).flat_map(move |(range, ctx)| {
|
||||||
.exp_map
|
self.expanded.value.covering_element(range).into_token().zip(Some(ctx))
|
||||||
.ranges_with_span_exact(span)
|
});
|
||||||
.flat_map(move |range| self.expanded.value.covering_element(range).into_token());
|
|
||||||
|
|
||||||
Some(InMacroFile::new(self.expanded.file_id, tokens))
|
Some(InMacroFile::new(self.expanded.file_id, tokens))
|
||||||
}
|
}
|
||||||
|
@ -791,11 +793,10 @@ impl ExpansionInfo {
|
||||||
pub fn map_range_down(
|
pub fn map_range_down(
|
||||||
&self,
|
&self,
|
||||||
span: Span,
|
span: Span,
|
||||||
) -> Option<InMacroFile<impl Iterator<Item = SyntaxToken> + '_>> {
|
) -> Option<InMacroFile<impl Iterator<Item = (SyntaxToken, SyntaxContextId)> + '_>> {
|
||||||
let tokens = self
|
let tokens = self.exp_map.ranges_with_span(span).flat_map(move |(range, ctx)| {
|
||||||
.exp_map
|
self.expanded.value.covering_element(range).into_token().zip(Some(ctx))
|
||||||
.ranges_with_span(span)
|
});
|
||||||
.flat_map(move |range| self.expanded.value.covering_element(range).into_token());
|
|
||||||
|
|
||||||
Some(InMacroFile::new(self.expanded.file_id, tokens))
|
Some(InMacroFile::new(self.expanded.file_id, tokens))
|
||||||
}
|
}
|
||||||
|
@ -845,7 +846,8 @@ impl ExpansionInfo {
|
||||||
self.arg.file_id,
|
self.arg.file_id,
|
||||||
arg_map
|
arg_map
|
||||||
.ranges_with_span_exact(span)
|
.ranges_with_span_exact(span)
|
||||||
.filter(|range| range.intersect(arg_range).is_some())
|
.filter(|(range, _)| range.intersect(arg_range).is_some())
|
||||||
|
.map(TupleExt::head)
|
||||||
.collect(),
|
.collect(),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
|
@ -382,8 +382,9 @@ impl chalk_solve::RustIrDatabase<Interner> for ChalkContext<'_> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_object_safe(&self, trait_id: chalk_ir::TraitId<Interner>) -> bool {
|
fn is_object_safe(&self, trait_id: chalk_ir::TraitId<Interner>) -> bool {
|
||||||
|
// FIXME: When cargo is updated, change to dyn_compatibility
|
||||||
let trait_ = from_chalk_trait_id(trait_id);
|
let trait_ = from_chalk_trait_id(trait_id);
|
||||||
crate::object_safety::object_safety(self.db, trait_).is_none()
|
crate::dyn_compatibility::dyn_compatibility(self.db, trait_).is_none()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn closure_kind(
|
fn closure_kind(
|
||||||
|
|
|
@ -20,11 +20,11 @@ use triomphe::Arc;
|
||||||
use crate::{
|
use crate::{
|
||||||
chalk_db,
|
chalk_db,
|
||||||
consteval::ConstEvalError,
|
consteval::ConstEvalError,
|
||||||
|
dyn_compatibility::DynCompatibilityViolation,
|
||||||
layout::{Layout, LayoutError},
|
layout::{Layout, LayoutError},
|
||||||
lower::{GenericDefaults, GenericPredicates},
|
lower::{GenericDefaults, GenericPredicates},
|
||||||
method_resolution::{InherentImpls, TraitImpls, TyFingerprint},
|
method_resolution::{InherentImpls, TraitImpls, TyFingerprint},
|
||||||
mir::{BorrowckResult, MirBody, MirLowerError},
|
mir::{BorrowckResult, MirBody, MirLowerError},
|
||||||
object_safety::ObjectSafetyViolation,
|
|
||||||
Binders, ClosureId, Const, FnDefId, ImplTraitId, ImplTraits, InferenceResult, Interner,
|
Binders, ClosureId, Const, FnDefId, ImplTraitId, ImplTraits, InferenceResult, Interner,
|
||||||
PolyFnSig, Substitution, TraitEnvironment, TraitRef, Ty, TyDefId, ValueTyDefId,
|
PolyFnSig, Substitution, TraitEnvironment, TraitRef, Ty, TyDefId, ValueTyDefId,
|
||||||
};
|
};
|
||||||
|
@ -108,8 +108,8 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
|
||||||
#[salsa::invoke(crate::layout::target_data_layout_query)]
|
#[salsa::invoke(crate::layout::target_data_layout_query)]
|
||||||
fn target_data_layout(&self, krate: CrateId) -> Result<Arc<TargetDataLayout>, Arc<str>>;
|
fn target_data_layout(&self, krate: CrateId) -> Result<Arc<TargetDataLayout>, Arc<str>>;
|
||||||
|
|
||||||
#[salsa::invoke(crate::object_safety::object_safety_of_trait_query)]
|
#[salsa::invoke(crate::dyn_compatibility::dyn_compatibility_of_trait_query)]
|
||||||
fn object_safety_of_trait(&self, trait_: TraitId) -> Option<ObjectSafetyViolation>;
|
fn dyn_compatibility_of_trait(&self, trait_: TraitId) -> Option<DynCompatibilityViolation>;
|
||||||
|
|
||||||
#[salsa::invoke(crate::lower::ty_query)]
|
#[salsa::invoke(crate::lower::ty_query)]
|
||||||
#[salsa::cycle(crate::lower::ty_recover)]
|
#[salsa::cycle(crate::lower::ty_recover)]
|
||||||
|
@ -280,8 +280,8 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn hir_database_is_object_safe() {
|
fn hir_database_is_dyn_compatible() {
|
||||||
fn _assert_object_safe(_: &dyn HirDatabase) {}
|
fn _assert_dyn_compatible(_: &dyn HirDatabase) {}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||||
|
|
|
@ -58,7 +58,7 @@ impl fmt::Display for CaseType {
|
||||||
let repr = match self {
|
let repr = match self {
|
||||||
CaseType::LowerSnakeCase => "snake_case",
|
CaseType::LowerSnakeCase => "snake_case",
|
||||||
CaseType::UpperSnakeCase => "UPPER_SNAKE_CASE",
|
CaseType::UpperSnakeCase => "UPPER_SNAKE_CASE",
|
||||||
CaseType::UpperCamelCase => "CamelCase",
|
CaseType::UpperCamelCase => "UpperCamelCase",
|
||||||
};
|
};
|
||||||
|
|
||||||
repr.fmt(f)
|
repr.fmt(f)
|
||||||
|
|
|
@ -111,7 +111,7 @@ mod tests {
|
||||||
check(to_lower_snake_case, "lower_snake_case", expect![[""]]);
|
check(to_lower_snake_case, "lower_snake_case", expect![[""]]);
|
||||||
check(to_lower_snake_case, "UPPER_SNAKE_CASE", expect![["upper_snake_case"]]);
|
check(to_lower_snake_case, "UPPER_SNAKE_CASE", expect![["upper_snake_case"]]);
|
||||||
check(to_lower_snake_case, "Weird_Case", expect![["weird_case"]]);
|
check(to_lower_snake_case, "Weird_Case", expect![["weird_case"]]);
|
||||||
check(to_lower_snake_case, "CamelCase", expect![["camel_case"]]);
|
check(to_lower_snake_case, "UpperCamelCase", expect![["upper_camel_case"]]);
|
||||||
check(to_lower_snake_case, "lowerCamelCase", expect![["lower_camel_case"]]);
|
check(to_lower_snake_case, "lowerCamelCase", expect![["lower_camel_case"]]);
|
||||||
check(to_lower_snake_case, "a", expect![[""]]);
|
check(to_lower_snake_case, "a", expect![[""]]);
|
||||||
check(to_lower_snake_case, "abc", expect![[""]]);
|
check(to_lower_snake_case, "abc", expect![[""]]);
|
||||||
|
@ -121,8 +121,8 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_to_camel_case() {
|
fn test_to_camel_case() {
|
||||||
check(to_camel_case, "CamelCase", expect![[""]]);
|
check(to_camel_case, "UpperCamelCase", expect![[""]]);
|
||||||
check(to_camel_case, "CamelCase_", expect![[""]]);
|
check(to_camel_case, "UpperCamelCase_", expect![[""]]);
|
||||||
check(to_camel_case, "_CamelCase", expect![[""]]);
|
check(to_camel_case, "_CamelCase", expect![[""]]);
|
||||||
check(to_camel_case, "lowerCamelCase", expect![["LowerCamelCase"]]);
|
check(to_camel_case, "lowerCamelCase", expect![["LowerCamelCase"]]);
|
||||||
check(to_camel_case, "lower_snake_case", expect![["LowerSnakeCase"]]);
|
check(to_camel_case, "lower_snake_case", expect![["LowerSnakeCase"]]);
|
||||||
|
@ -143,7 +143,7 @@ mod tests {
|
||||||
check(to_upper_snake_case, "UPPER_SNAKE_CASE", expect![[""]]);
|
check(to_upper_snake_case, "UPPER_SNAKE_CASE", expect![[""]]);
|
||||||
check(to_upper_snake_case, "lower_snake_case", expect![["LOWER_SNAKE_CASE"]]);
|
check(to_upper_snake_case, "lower_snake_case", expect![["LOWER_SNAKE_CASE"]]);
|
||||||
check(to_upper_snake_case, "Weird_Case", expect![["WEIRD_CASE"]]);
|
check(to_upper_snake_case, "Weird_Case", expect![["WEIRD_CASE"]]);
|
||||||
check(to_upper_snake_case, "CamelCase", expect![["CAMEL_CASE"]]);
|
check(to_upper_snake_case, "UpperCamelCase", expect![["UPPER_CAMEL_CASE"]]);
|
||||||
check(to_upper_snake_case, "lowerCamelCase", expect![["LOWER_CAMEL_CASE"]]);
|
check(to_upper_snake_case, "lowerCamelCase", expect![["LOWER_CAMEL_CASE"]]);
|
||||||
check(to_upper_snake_case, "A", expect![[""]]);
|
check(to_upper_snake_case, "A", expect![[""]]);
|
||||||
check(to_upper_snake_case, "ABC", expect![[""]]);
|
check(to_upper_snake_case, "ABC", expect![[""]]);
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
//! Compute the object-safety of a trait
|
//! Compute the dyn-compatibility of a trait
|
||||||
|
|
||||||
use std::ops::ControlFlow;
|
use std::ops::ControlFlow;
|
||||||
|
|
||||||
|
@ -28,14 +28,14 @@ use crate::{
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||||
pub enum ObjectSafetyViolation {
|
pub enum DynCompatibilityViolation {
|
||||||
SizedSelf,
|
SizedSelf,
|
||||||
SelfReferential,
|
SelfReferential,
|
||||||
Method(FunctionId, MethodViolationCode),
|
Method(FunctionId, MethodViolationCode),
|
||||||
AssocConst(ConstId),
|
AssocConst(ConstId),
|
||||||
GAT(TypeAliasId),
|
GAT(TypeAliasId),
|
||||||
// This doesn't exist in rustc, but added for better visualization
|
// This doesn't exist in rustc, but added for better visualization
|
||||||
HasNonSafeSuperTrait(TraitId),
|
HasNonCompatibleSuperTrait(TraitId),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||||
|
@ -50,70 +50,73 @@ pub enum MethodViolationCode {
|
||||||
UndispatchableReceiver,
|
UndispatchableReceiver,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn object_safety(db: &dyn HirDatabase, trait_: TraitId) -> Option<ObjectSafetyViolation> {
|
pub fn dyn_compatibility(
|
||||||
|
db: &dyn HirDatabase,
|
||||||
|
trait_: TraitId,
|
||||||
|
) -> Option<DynCompatibilityViolation> {
|
||||||
for super_trait in all_super_traits(db.upcast(), trait_).into_iter().skip(1).rev() {
|
for super_trait in all_super_traits(db.upcast(), trait_).into_iter().skip(1).rev() {
|
||||||
if db.object_safety_of_trait(super_trait).is_some() {
|
if db.dyn_compatibility_of_trait(super_trait).is_some() {
|
||||||
return Some(ObjectSafetyViolation::HasNonSafeSuperTrait(super_trait));
|
return Some(DynCompatibilityViolation::HasNonCompatibleSuperTrait(super_trait));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
db.object_safety_of_trait(trait_)
|
db.dyn_compatibility_of_trait(trait_)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn object_safety_with_callback<F>(
|
pub fn dyn_compatibility_with_callback<F>(
|
||||||
db: &dyn HirDatabase,
|
db: &dyn HirDatabase,
|
||||||
trait_: TraitId,
|
trait_: TraitId,
|
||||||
cb: &mut F,
|
cb: &mut F,
|
||||||
) -> ControlFlow<()>
|
) -> ControlFlow<()>
|
||||||
where
|
where
|
||||||
F: FnMut(ObjectSafetyViolation) -> ControlFlow<()>,
|
F: FnMut(DynCompatibilityViolation) -> ControlFlow<()>,
|
||||||
{
|
{
|
||||||
for super_trait in all_super_traits(db.upcast(), trait_).into_iter().skip(1).rev() {
|
for super_trait in all_super_traits(db.upcast(), trait_).into_iter().skip(1).rev() {
|
||||||
if db.object_safety_of_trait(super_trait).is_some() {
|
if db.dyn_compatibility_of_trait(super_trait).is_some() {
|
||||||
cb(ObjectSafetyViolation::HasNonSafeSuperTrait(trait_))?;
|
cb(DynCompatibilityViolation::HasNonCompatibleSuperTrait(trait_))?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
object_safety_of_trait_with_callback(db, trait_, cb)
|
dyn_compatibility_of_trait_with_callback(db, trait_, cb)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn object_safety_of_trait_with_callback<F>(
|
pub fn dyn_compatibility_of_trait_with_callback<F>(
|
||||||
db: &dyn HirDatabase,
|
db: &dyn HirDatabase,
|
||||||
trait_: TraitId,
|
trait_: TraitId,
|
||||||
cb: &mut F,
|
cb: &mut F,
|
||||||
) -> ControlFlow<()>
|
) -> ControlFlow<()>
|
||||||
where
|
where
|
||||||
F: FnMut(ObjectSafetyViolation) -> ControlFlow<()>,
|
F: FnMut(DynCompatibilityViolation) -> ControlFlow<()>,
|
||||||
{
|
{
|
||||||
// Check whether this has a `Sized` bound
|
// Check whether this has a `Sized` bound
|
||||||
if generics_require_sized_self(db, trait_.into()) {
|
if generics_require_sized_self(db, trait_.into()) {
|
||||||
cb(ObjectSafetyViolation::SizedSelf)?;
|
cb(DynCompatibilityViolation::SizedSelf)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check if there exist bounds that referencing self
|
// Check if there exist bounds that referencing self
|
||||||
if predicates_reference_self(db, trait_) {
|
if predicates_reference_self(db, trait_) {
|
||||||
cb(ObjectSafetyViolation::SelfReferential)?;
|
cb(DynCompatibilityViolation::SelfReferential)?;
|
||||||
}
|
}
|
||||||
if bounds_reference_self(db, trait_) {
|
if bounds_reference_self(db, trait_) {
|
||||||
cb(ObjectSafetyViolation::SelfReferential)?;
|
cb(DynCompatibilityViolation::SelfReferential)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
// rustc checks for non-lifetime binders here, but we don't support HRTB yet
|
// rustc checks for non-lifetime binders here, but we don't support HRTB yet
|
||||||
|
|
||||||
let trait_data = db.trait_data(trait_);
|
let trait_data = db.trait_data(trait_);
|
||||||
for (_, assoc_item) in &trait_data.items {
|
for (_, assoc_item) in &trait_data.items {
|
||||||
object_safety_violation_for_assoc_item(db, trait_, *assoc_item, cb)?;
|
dyn_compatibility_violation_for_assoc_item(db, trait_, *assoc_item, cb)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
ControlFlow::Continue(())
|
ControlFlow::Continue(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn object_safety_of_trait_query(
|
pub fn dyn_compatibility_of_trait_query(
|
||||||
db: &dyn HirDatabase,
|
db: &dyn HirDatabase,
|
||||||
trait_: TraitId,
|
trait_: TraitId,
|
||||||
) -> Option<ObjectSafetyViolation> {
|
) -> Option<DynCompatibilityViolation> {
|
||||||
let mut res = None;
|
let mut res = None;
|
||||||
object_safety_of_trait_with_callback(db, trait_, &mut |osv| {
|
dyn_compatibility_of_trait_with_callback(db, trait_, &mut |osv| {
|
||||||
res = Some(osv);
|
res = Some(osv);
|
||||||
ControlFlow::Break(())
|
ControlFlow::Break(())
|
||||||
});
|
});
|
||||||
|
@ -321,14 +324,14 @@ fn contains_illegal_self_type_reference<T: TypeVisitable<Interner>>(
|
||||||
t.visit_with(visitor.as_dyn(), outer_binder).is_break()
|
t.visit_with(visitor.as_dyn(), outer_binder).is_break()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn object_safety_violation_for_assoc_item<F>(
|
fn dyn_compatibility_violation_for_assoc_item<F>(
|
||||||
db: &dyn HirDatabase,
|
db: &dyn HirDatabase,
|
||||||
trait_: TraitId,
|
trait_: TraitId,
|
||||||
item: AssocItemId,
|
item: AssocItemId,
|
||||||
cb: &mut F,
|
cb: &mut F,
|
||||||
) -> ControlFlow<()>
|
) -> ControlFlow<()>
|
||||||
where
|
where
|
||||||
F: FnMut(ObjectSafetyViolation) -> ControlFlow<()>,
|
F: FnMut(DynCompatibilityViolation) -> ControlFlow<()>,
|
||||||
{
|
{
|
||||||
// Any item that has a `Self : Sized` requisite is otherwise
|
// Any item that has a `Self : Sized` requisite is otherwise
|
||||||
// exempt from the regulations.
|
// exempt from the regulations.
|
||||||
|
@ -337,10 +340,10 @@ where
|
||||||
}
|
}
|
||||||
|
|
||||||
match item {
|
match item {
|
||||||
AssocItemId::ConstId(it) => cb(ObjectSafetyViolation::AssocConst(it)),
|
AssocItemId::ConstId(it) => cb(DynCompatibilityViolation::AssocConst(it)),
|
||||||
AssocItemId::FunctionId(it) => {
|
AssocItemId::FunctionId(it) => {
|
||||||
virtual_call_violations_for_method(db, trait_, it, &mut |mvc| {
|
virtual_call_violations_for_method(db, trait_, it, &mut |mvc| {
|
||||||
cb(ObjectSafetyViolation::Method(it, mvc))
|
cb(DynCompatibilityViolation::Method(it, mvc))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
AssocItemId::TypeAliasId(it) => {
|
AssocItemId::TypeAliasId(it) => {
|
||||||
|
@ -350,7 +353,7 @@ where
|
||||||
} else {
|
} else {
|
||||||
let generic_params = db.generic_params(item.into());
|
let generic_params = db.generic_params(item.into());
|
||||||
if !generic_params.is_empty() {
|
if !generic_params.is_empty() {
|
||||||
cb(ObjectSafetyViolation::GAT(it))
|
cb(DynCompatibilityViolation::GAT(it))
|
||||||
} else {
|
} else {
|
||||||
ControlFlow::Continue(())
|
ControlFlow::Continue(())
|
||||||
}
|
}
|
||||||
|
@ -469,7 +472,7 @@ fn receiver_is_dispatchable(
|
||||||
return false;
|
return false;
|
||||||
};
|
};
|
||||||
|
|
||||||
// `self: Self` can't be dispatched on, but this is already considered object safe.
|
// `self: Self` can't be dispatched on, but this is already considered dyn compatible
|
||||||
// See rustc's comment on https://github.com/rust-lang/rust/blob/3f121b9461cce02a703a0e7e450568849dfaa074/compiler/rustc_trait_selection/src/traits/object_safety.rs#L433-L437
|
// See rustc's comment on https://github.com/rust-lang/rust/blob/3f121b9461cce02a703a0e7e450568849dfaa074/compiler/rustc_trait_selection/src/traits/object_safety.rs#L433-L437
|
||||||
if sig
|
if sig
|
||||||
.skip_binders()
|
.skip_binders()
|
|
@ -5,29 +5,29 @@ use rustc_hash::{FxHashMap, FxHashSet};
|
||||||
use syntax::ToSmolStr;
|
use syntax::ToSmolStr;
|
||||||
use test_fixture::WithFixture;
|
use test_fixture::WithFixture;
|
||||||
|
|
||||||
use crate::{object_safety::object_safety_with_callback, test_db::TestDB};
|
use crate::{dyn_compatibility::dyn_compatibility_with_callback, test_db::TestDB};
|
||||||
|
|
||||||
use super::{
|
use super::{
|
||||||
|
DynCompatibilityViolation,
|
||||||
MethodViolationCode::{self, *},
|
MethodViolationCode::{self, *},
|
||||||
ObjectSafetyViolation,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
use ObjectSafetyViolationKind::*;
|
use DynCompatibilityViolationKind::*;
|
||||||
|
|
||||||
#[allow(clippy::upper_case_acronyms)]
|
#[allow(clippy::upper_case_acronyms)]
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||||
enum ObjectSafetyViolationKind {
|
enum DynCompatibilityViolationKind {
|
||||||
SizedSelf,
|
SizedSelf,
|
||||||
SelfReferential,
|
SelfReferential,
|
||||||
Method(MethodViolationCode),
|
Method(MethodViolationCode),
|
||||||
AssocConst,
|
AssocConst,
|
||||||
GAT,
|
GAT,
|
||||||
HasNonSafeSuperTrait,
|
HasNonCompatibleSuperTrait,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_object_safety<'a>(
|
fn check_dyn_compatibility<'a>(
|
||||||
ra_fixture: &str,
|
ra_fixture: &str,
|
||||||
expected: impl IntoIterator<Item = (&'a str, Vec<ObjectSafetyViolationKind>)>,
|
expected: impl IntoIterator<Item = (&'a str, Vec<DynCompatibilityViolationKind>)>,
|
||||||
) {
|
) {
|
||||||
let mut expected: FxHashMap<_, _> =
|
let mut expected: FxHashMap<_, _> =
|
||||||
expected.into_iter().map(|(id, osvs)| (id, FxHashSet::from_iter(osvs))).collect();
|
expected.into_iter().map(|(id, osvs)| (id, FxHashSet::from_iter(osvs))).collect();
|
||||||
|
@ -53,18 +53,20 @@ fn check_object_safety<'a>(
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
let mut osvs = FxHashSet::default();
|
let mut osvs = FxHashSet::default();
|
||||||
object_safety_with_callback(&db, trait_id, &mut |osv| {
|
dyn_compatibility_with_callback(&db, trait_id, &mut |osv| {
|
||||||
osvs.insert(match osv {
|
osvs.insert(match osv {
|
||||||
ObjectSafetyViolation::SizedSelf => SizedSelf,
|
DynCompatibilityViolation::SizedSelf => SizedSelf,
|
||||||
ObjectSafetyViolation::SelfReferential => SelfReferential,
|
DynCompatibilityViolation::SelfReferential => SelfReferential,
|
||||||
ObjectSafetyViolation::Method(_, mvc) => Method(mvc),
|
DynCompatibilityViolation::Method(_, mvc) => Method(mvc),
|
||||||
ObjectSafetyViolation::AssocConst(_) => AssocConst,
|
DynCompatibilityViolation::AssocConst(_) => AssocConst,
|
||||||
ObjectSafetyViolation::GAT(_) => GAT,
|
DynCompatibilityViolation::GAT(_) => GAT,
|
||||||
ObjectSafetyViolation::HasNonSafeSuperTrait(_) => HasNonSafeSuperTrait,
|
DynCompatibilityViolation::HasNonCompatibleSuperTrait(_) => {
|
||||||
|
HasNonCompatibleSuperTrait
|
||||||
|
}
|
||||||
});
|
});
|
||||||
ControlFlow::Continue(())
|
ControlFlow::Continue(())
|
||||||
});
|
});
|
||||||
assert_eq!(osvs, expected, "Object safety violations for `{name}` do not match;");
|
assert_eq!(osvs, expected, "Dyn Compatibility violations for `{name}` do not match;");
|
||||||
}
|
}
|
||||||
|
|
||||||
let remains: Vec<_> = expected.keys().collect();
|
let remains: Vec<_> = expected.keys().collect();
|
||||||
|
@ -73,7 +75,7 @@ fn check_object_safety<'a>(
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn item_bounds_can_reference_self() {
|
fn item_bounds_can_reference_self() {
|
||||||
check_object_safety(
|
check_dyn_compatibility(
|
||||||
r#"
|
r#"
|
||||||
//- minicore: eq
|
//- minicore: eq
|
||||||
pub trait Foo {
|
pub trait Foo {
|
||||||
|
@ -88,7 +90,7 @@ pub trait Foo {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn associated_consts() {
|
fn associated_consts() {
|
||||||
check_object_safety(
|
check_dyn_compatibility(
|
||||||
r#"
|
r#"
|
||||||
trait Bar {
|
trait Bar {
|
||||||
const X: usize;
|
const X: usize;
|
||||||
|
@ -100,7 +102,7 @@ trait Bar {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn bounds_reference_self() {
|
fn bounds_reference_self() {
|
||||||
check_object_safety(
|
check_dyn_compatibility(
|
||||||
r#"
|
r#"
|
||||||
//- minicore: eq
|
//- minicore: eq
|
||||||
trait X {
|
trait X {
|
||||||
|
@ -113,7 +115,7 @@ trait X {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn by_value_self() {
|
fn by_value_self() {
|
||||||
check_object_safety(
|
check_dyn_compatibility(
|
||||||
r#"
|
r#"
|
||||||
//- minicore: dispatch_from_dyn
|
//- minicore: dispatch_from_dyn
|
||||||
trait Bar {
|
trait Bar {
|
||||||
|
@ -135,7 +137,7 @@ trait Quux {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn generic_methods() {
|
fn generic_methods() {
|
||||||
check_object_safety(
|
check_dyn_compatibility(
|
||||||
r#"
|
r#"
|
||||||
//- minicore: dispatch_from_dyn
|
//- minicore: dispatch_from_dyn
|
||||||
trait Bar {
|
trait Bar {
|
||||||
|
@ -157,7 +159,7 @@ trait Qax {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn mentions_self() {
|
fn mentions_self() {
|
||||||
check_object_safety(
|
check_dyn_compatibility(
|
||||||
r#"
|
r#"
|
||||||
//- minicore: dispatch_from_dyn
|
//- minicore: dispatch_from_dyn
|
||||||
trait Bar {
|
trait Bar {
|
||||||
|
@ -182,7 +184,7 @@ trait Quux {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn no_static() {
|
fn no_static() {
|
||||||
check_object_safety(
|
check_dyn_compatibility(
|
||||||
r#"
|
r#"
|
||||||
//- minicore: dispatch_from_dyn
|
//- minicore: dispatch_from_dyn
|
||||||
trait Foo {
|
trait Foo {
|
||||||
|
@ -195,7 +197,7 @@ trait Foo {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn sized_self() {
|
fn sized_self() {
|
||||||
check_object_safety(
|
check_dyn_compatibility(
|
||||||
r#"
|
r#"
|
||||||
//- minicore: dispatch_from_dyn
|
//- minicore: dispatch_from_dyn
|
||||||
trait Bar: Sized {
|
trait Bar: Sized {
|
||||||
|
@ -205,7 +207,7 @@ trait Bar: Sized {
|
||||||
[("Bar", vec![SizedSelf])],
|
[("Bar", vec![SizedSelf])],
|
||||||
);
|
);
|
||||||
|
|
||||||
check_object_safety(
|
check_dyn_compatibility(
|
||||||
r#"
|
r#"
|
||||||
//- minicore: dispatch_from_dyn
|
//- minicore: dispatch_from_dyn
|
||||||
trait Bar
|
trait Bar
|
||||||
|
@ -220,7 +222,7 @@ trait Bar
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn supertrait_gat() {
|
fn supertrait_gat() {
|
||||||
check_object_safety(
|
check_dyn_compatibility(
|
||||||
r#"
|
r#"
|
||||||
//- minicore: dispatch_from_dyn
|
//- minicore: dispatch_from_dyn
|
||||||
trait GatTrait {
|
trait GatTrait {
|
||||||
|
@ -229,13 +231,13 @@ trait GatTrait {
|
||||||
|
|
||||||
trait SuperTrait<T>: GatTrait {}
|
trait SuperTrait<T>: GatTrait {}
|
||||||
"#,
|
"#,
|
||||||
[("GatTrait", vec![GAT]), ("SuperTrait", vec![HasNonSafeSuperTrait])],
|
[("GatTrait", vec![GAT]), ("SuperTrait", vec![HasNonCompatibleSuperTrait])],
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn supertrait_mentions_self() {
|
fn supertrait_mentions_self() {
|
||||||
check_object_safety(
|
check_dyn_compatibility(
|
||||||
r#"
|
r#"
|
||||||
//- minicore: dispatch_from_dyn
|
//- minicore: dispatch_from_dyn
|
||||||
trait Bar<T> {
|
trait Bar<T> {
|
||||||
|
@ -251,7 +253,7 @@ trait Baz : Bar<Self> {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn rustc_issue_19538() {
|
fn rustc_issue_19538() {
|
||||||
check_object_safety(
|
check_dyn_compatibility(
|
||||||
r#"
|
r#"
|
||||||
//- minicore: dispatch_from_dyn
|
//- minicore: dispatch_from_dyn
|
||||||
trait Foo {
|
trait Foo {
|
||||||
|
@ -260,13 +262,13 @@ trait Foo {
|
||||||
|
|
||||||
trait Bar: Foo {}
|
trait Bar: Foo {}
|
||||||
"#,
|
"#,
|
||||||
[("Foo", vec![Method(Generic)]), ("Bar", vec![HasNonSafeSuperTrait])],
|
[("Foo", vec![Method(Generic)]), ("Bar", vec![HasNonCompatibleSuperTrait])],
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn rustc_issue_22040() {
|
fn rustc_issue_22040() {
|
||||||
check_object_safety(
|
check_dyn_compatibility(
|
||||||
r#"
|
r#"
|
||||||
//- minicore: fmt, eq, dispatch_from_dyn
|
//- minicore: fmt, eq, dispatch_from_dyn
|
||||||
use core::fmt::Debug;
|
use core::fmt::Debug;
|
||||||
|
@ -281,7 +283,7 @@ trait Expr: Debug + PartialEq {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn rustc_issue_102762() {
|
fn rustc_issue_102762() {
|
||||||
check_object_safety(
|
check_dyn_compatibility(
|
||||||
r#"
|
r#"
|
||||||
//- minicore: future, send, sync, dispatch_from_dyn, deref
|
//- minicore: future, send, sync, dispatch_from_dyn, deref
|
||||||
use core::pin::Pin;
|
use core::pin::Pin;
|
||||||
|
@ -313,7 +315,7 @@ pub trait Fetcher: Send + Sync {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn rustc_issue_102933() {
|
fn rustc_issue_102933() {
|
||||||
check_object_safety(
|
check_dyn_compatibility(
|
||||||
r#"
|
r#"
|
||||||
//- minicore: future, dispatch_from_dyn, deref
|
//- minicore: future, dispatch_from_dyn, deref
|
||||||
use core::future::Future;
|
use core::future::Future;
|
||||||
|
@ -351,7 +353,7 @@ pub trait B2: Service<Response = i32> + B1 {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn rustc_issue_106247() {
|
fn rustc_issue_106247() {
|
||||||
check_object_safety(
|
check_dyn_compatibility(
|
||||||
r#"
|
r#"
|
||||||
//- minicore: sync, dispatch_from_dyn
|
//- minicore: sync, dispatch_from_dyn
|
||||||
pub trait Trait {
|
pub trait Trait {
|
||||||
|
@ -363,8 +365,8 @@ pub trait Trait {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn std_error_is_object_safe() {
|
fn std_error_is_dyn_compatible() {
|
||||||
check_object_safety(
|
check_dyn_compatibility(
|
||||||
r#"
|
r#"
|
||||||
//- minicore: fmt, dispatch_from_dyn
|
//- minicore: fmt, dispatch_from_dyn
|
||||||
trait Erased<'a>: 'a {}
|
trait Erased<'a>: 'a {}
|
||||||
|
@ -380,14 +382,14 @@ pub trait Error: core::fmt::Debug + core::fmt::Display {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn lifetime_gat_is_object_unsafe() {
|
fn lifetime_gat_is_dyn_incompatible() {
|
||||||
check_object_safety(
|
check_dyn_compatibility(
|
||||||
r#"
|
r#"
|
||||||
//- minicore: dispatch_from_dyn
|
//- minicore: dispatch_from_dyn
|
||||||
trait Foo {
|
trait Foo {
|
||||||
type Bar<'a>;
|
type Bar<'a>;
|
||||||
}
|
}
|
||||||
"#,
|
"#,
|
||||||
[("Foo", vec![ObjectSafetyViolationKind::GAT])],
|
[("Foo", vec![DynCompatibilityViolationKind::GAT])],
|
||||||
);
|
);
|
||||||
}
|
}
|
|
@ -38,11 +38,11 @@ pub mod consteval;
|
||||||
pub mod db;
|
pub mod db;
|
||||||
pub mod diagnostics;
|
pub mod diagnostics;
|
||||||
pub mod display;
|
pub mod display;
|
||||||
|
pub mod dyn_compatibility;
|
||||||
pub mod lang_items;
|
pub mod lang_items;
|
||||||
pub mod layout;
|
pub mod layout;
|
||||||
pub mod method_resolution;
|
pub mod method_resolution;
|
||||||
pub mod mir;
|
pub mod mir;
|
||||||
pub mod object_safety;
|
|
||||||
pub mod primitive;
|
pub mod primitive;
|
||||||
pub mod traits;
|
pub mod traits;
|
||||||
|
|
||||||
|
|
|
@ -386,82 +386,91 @@ fn ever_initialized_map(
|
||||||
fn dfs(
|
fn dfs(
|
||||||
db: &dyn HirDatabase,
|
db: &dyn HirDatabase,
|
||||||
body: &MirBody,
|
body: &MirBody,
|
||||||
b: BasicBlockId,
|
|
||||||
l: LocalId,
|
l: LocalId,
|
||||||
|
stack: &mut Vec<BasicBlockId>,
|
||||||
result: &mut ArenaMap<BasicBlockId, ArenaMap<LocalId, bool>>,
|
result: &mut ArenaMap<BasicBlockId, ArenaMap<LocalId, bool>>,
|
||||||
) {
|
) {
|
||||||
let mut is_ever_initialized = result[b][l]; // It must be filled, as we use it as mark for dfs
|
while let Some(b) = stack.pop() {
|
||||||
let block = &body.basic_blocks[b];
|
let mut is_ever_initialized = result[b][l]; // It must be filled, as we use it as mark for dfs
|
||||||
for statement in &block.statements {
|
let block = &body.basic_blocks[b];
|
||||||
match &statement.kind {
|
for statement in &block.statements {
|
||||||
StatementKind::Assign(p, _) => {
|
match &statement.kind {
|
||||||
if p.projection.lookup(&body.projection_store).is_empty() && p.local == l {
|
StatementKind::Assign(p, _) => {
|
||||||
|
if p.projection.lookup(&body.projection_store).is_empty() && p.local == l {
|
||||||
|
is_ever_initialized = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
StatementKind::StorageDead(p) => {
|
||||||
|
if *p == l {
|
||||||
|
is_ever_initialized = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
StatementKind::Deinit(_)
|
||||||
|
| StatementKind::FakeRead(_)
|
||||||
|
| StatementKind::Nop
|
||||||
|
| StatementKind::StorageLive(_) => (),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let Some(terminator) = &block.terminator else {
|
||||||
|
never!(
|
||||||
|
"Terminator should be none only in construction.\nThe body:\n{}",
|
||||||
|
body.pretty_print(db)
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
let mut process = |target, is_ever_initialized| {
|
||||||
|
if !result[target].contains_idx(l) || !result[target][l] && is_ever_initialized {
|
||||||
|
result[target].insert(l, is_ever_initialized);
|
||||||
|
stack.push(target);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
match &terminator.kind {
|
||||||
|
TerminatorKind::Goto { target } => process(*target, is_ever_initialized),
|
||||||
|
TerminatorKind::SwitchInt { targets, .. } => {
|
||||||
|
targets.all_targets().iter().for_each(|&it| process(it, is_ever_initialized));
|
||||||
|
}
|
||||||
|
TerminatorKind::UnwindResume
|
||||||
|
| TerminatorKind::Abort
|
||||||
|
| TerminatorKind::Return
|
||||||
|
| TerminatorKind::Unreachable => (),
|
||||||
|
TerminatorKind::Call { target, cleanup, destination, .. } => {
|
||||||
|
if destination.projection.lookup(&body.projection_store).is_empty()
|
||||||
|
&& destination.local == l
|
||||||
|
{
|
||||||
is_ever_initialized = true;
|
is_ever_initialized = true;
|
||||||
}
|
}
|
||||||
|
target.iter().chain(cleanup).for_each(|&it| process(it, is_ever_initialized));
|
||||||
}
|
}
|
||||||
StatementKind::StorageDead(p) => {
|
TerminatorKind::Drop { target, unwind, place: _ } => {
|
||||||
if *p == l {
|
iter::once(target)
|
||||||
is_ever_initialized = false;
|
.chain(unwind)
|
||||||
}
|
.for_each(|&it| process(it, is_ever_initialized));
|
||||||
}
|
}
|
||||||
StatementKind::Deinit(_)
|
TerminatorKind::DropAndReplace { .. }
|
||||||
| StatementKind::FakeRead(_)
|
| TerminatorKind::Assert { .. }
|
||||||
| StatementKind::Nop
|
| TerminatorKind::Yield { .. }
|
||||||
| StatementKind::StorageLive(_) => (),
|
| TerminatorKind::CoroutineDrop
|
||||||
}
|
| TerminatorKind::FalseEdge { .. }
|
||||||
}
|
| TerminatorKind::FalseUnwind { .. } => {
|
||||||
let Some(terminator) = &block.terminator else {
|
never!("We don't emit these MIR terminators yet");
|
||||||
never!(
|
|
||||||
"Terminator should be none only in construction.\nThe body:\n{}",
|
|
||||||
body.pretty_print(db)
|
|
||||||
);
|
|
||||||
return;
|
|
||||||
};
|
|
||||||
let mut process = |target, is_ever_initialized| {
|
|
||||||
if !result[target].contains_idx(l) || !result[target][l] && is_ever_initialized {
|
|
||||||
result[target].insert(l, is_ever_initialized);
|
|
||||||
dfs(db, body, target, l, result);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
match &terminator.kind {
|
|
||||||
TerminatorKind::Goto { target } => process(*target, is_ever_initialized),
|
|
||||||
TerminatorKind::SwitchInt { targets, .. } => {
|
|
||||||
targets.all_targets().iter().for_each(|&it| process(it, is_ever_initialized));
|
|
||||||
}
|
|
||||||
TerminatorKind::UnwindResume
|
|
||||||
| TerminatorKind::Abort
|
|
||||||
| TerminatorKind::Return
|
|
||||||
| TerminatorKind::Unreachable => (),
|
|
||||||
TerminatorKind::Call { target, cleanup, destination, .. } => {
|
|
||||||
if destination.projection.lookup(&body.projection_store).is_empty()
|
|
||||||
&& destination.local == l
|
|
||||||
{
|
|
||||||
is_ever_initialized = true;
|
|
||||||
}
|
}
|
||||||
target.iter().chain(cleanup).for_each(|&it| process(it, is_ever_initialized));
|
|
||||||
}
|
|
||||||
TerminatorKind::Drop { target, unwind, place: _ } => {
|
|
||||||
iter::once(target).chain(unwind).for_each(|&it| process(it, is_ever_initialized));
|
|
||||||
}
|
|
||||||
TerminatorKind::DropAndReplace { .. }
|
|
||||||
| TerminatorKind::Assert { .. }
|
|
||||||
| TerminatorKind::Yield { .. }
|
|
||||||
| TerminatorKind::CoroutineDrop
|
|
||||||
| TerminatorKind::FalseEdge { .. }
|
|
||||||
| TerminatorKind::FalseUnwind { .. } => {
|
|
||||||
never!("We don't emit these MIR terminators yet");
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
let mut stack = Vec::new();
|
||||||
for &l in &body.param_locals {
|
for &l in &body.param_locals {
|
||||||
result[body.start_block].insert(l, true);
|
result[body.start_block].insert(l, true);
|
||||||
dfs(db, body, body.start_block, l, &mut result);
|
stack.clear();
|
||||||
|
stack.push(body.start_block);
|
||||||
|
dfs(db, body, l, &mut stack, &mut result);
|
||||||
}
|
}
|
||||||
for l in body.locals.iter().map(|it| it.0) {
|
for l in body.locals.iter().map(|it| it.0) {
|
||||||
db.unwind_if_cancelled();
|
db.unwind_if_cancelled();
|
||||||
if !result[body.start_block].contains_idx(l) {
|
if !result[body.start_block].contains_idx(l) {
|
||||||
result[body.start_block].insert(l, false);
|
result[body.start_block].insert(l, false);
|
||||||
dfs(db, body, body.start_block, l, &mut result);
|
stack.clear();
|
||||||
|
stack.push(body.start_block);
|
||||||
|
dfs(db, body, l, &mut stack, &mut result);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
result
|
result
|
||||||
|
|
|
@ -144,9 +144,9 @@ pub use {
|
||||||
hir_ty::{
|
hir_ty::{
|
||||||
consteval::ConstEvalError,
|
consteval::ConstEvalError,
|
||||||
display::{ClosureStyle, HirDisplay, HirDisplayError, HirWrite},
|
display::{ClosureStyle, HirDisplay, HirDisplayError, HirWrite},
|
||||||
|
dyn_compatibility::{DynCompatibilityViolation, MethodViolationCode},
|
||||||
layout::LayoutError,
|
layout::LayoutError,
|
||||||
mir::{MirEvalError, MirLowerError},
|
mir::{MirEvalError, MirLowerError},
|
||||||
object_safety::{MethodViolationCode, ObjectSafetyViolation},
|
|
||||||
CastError, FnAbi, PointerCast, Safety,
|
CastError, FnAbi, PointerCast, Safety,
|
||||||
},
|
},
|
||||||
// FIXME: Properly encapsulate mir
|
// FIXME: Properly encapsulate mir
|
||||||
|
@ -497,10 +497,9 @@ impl Module {
|
||||||
|
|
||||||
/// Finds a parent module.
|
/// Finds a parent module.
|
||||||
pub fn parent(self, db: &dyn HirDatabase) -> Option<Module> {
|
pub fn parent(self, db: &dyn HirDatabase) -> Option<Module> {
|
||||||
// FIXME: handle block expressions as modules (their parent is in a different DefMap)
|
|
||||||
let def_map = self.id.def_map(db.upcast());
|
let def_map = self.id.def_map(db.upcast());
|
||||||
let parent_id = def_map[self.id.local_id].parent?;
|
let parent_id = def_map.containing_module(self.id.local_id)?;
|
||||||
Some(Module { id: def_map.module_id(parent_id) })
|
Some(Module { id: parent_id })
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Finds nearest non-block ancestor `Module` (`self` included).
|
/// Finds nearest non-block ancestor `Module` (`self` included).
|
||||||
|
@ -557,7 +556,7 @@ impl Module {
|
||||||
acc: &mut Vec<AnyDiagnostic>,
|
acc: &mut Vec<AnyDiagnostic>,
|
||||||
style_lints: bool,
|
style_lints: bool,
|
||||||
) {
|
) {
|
||||||
let _p = tracing::info_span!("Module::diagnostics", name = ?self.name(db)).entered();
|
let _p = tracing::info_span!("diagnostics", name = ?self.name(db)).entered();
|
||||||
let edition = db.crate_graph()[self.id.krate()].edition;
|
let edition = db.crate_graph()[self.id.krate()].edition;
|
||||||
let def_map = self.id.def_map(db.upcast());
|
let def_map = self.id.def_map(db.upcast());
|
||||||
for diag in def_map.diagnostics() {
|
for diag in def_map.diagnostics() {
|
||||||
|
@ -2690,8 +2689,8 @@ impl Trait {
|
||||||
.count()
|
.count()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn object_safety(&self, db: &dyn HirDatabase) -> Option<ObjectSafetyViolation> {
|
pub fn dyn_compatibility(&self, db: &dyn HirDatabase) -> Option<DynCompatibilityViolation> {
|
||||||
hir_ty::object_safety::object_safety(db, self.id)
|
hir_ty::dyn_compatibility::dyn_compatibility(db, self.id)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn all_macro_calls(&self, db: &dyn HirDatabase) -> Box<[(AstId<ast::Item>, MacroCallId)]> {
|
fn all_macro_calls(&self, db: &dyn HirDatabase) -> Box<[(AstId<ast::Item>, MacroCallId)]> {
|
||||||
|
|
|
@ -24,6 +24,7 @@ use hir_expand::{
|
||||||
builtin::{BuiltinFnLikeExpander, EagerExpander},
|
builtin::{BuiltinFnLikeExpander, EagerExpander},
|
||||||
db::ExpandDatabase,
|
db::ExpandDatabase,
|
||||||
files::InRealFile,
|
files::InRealFile,
|
||||||
|
hygiene::SyntaxContextExt as _,
|
||||||
inert_attr_macro::find_builtin_attr_idx,
|
inert_attr_macro::find_builtin_attr_idx,
|
||||||
name::AsName,
|
name::AsName,
|
||||||
FileRange, InMacroFile, MacroCallId, MacroFileId, MacroFileIdExt,
|
FileRange, InMacroFile, MacroCallId, MacroFileId, MacroFileIdExt,
|
||||||
|
@ -32,13 +33,13 @@ use intern::Symbol;
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use rustc_hash::{FxHashMap, FxHashSet};
|
use rustc_hash::{FxHashMap, FxHashSet};
|
||||||
use smallvec::{smallvec, SmallVec};
|
use smallvec::{smallvec, SmallVec};
|
||||||
use span::{EditionedFileId, FileId, HirFileIdRepr};
|
use span::{EditionedFileId, FileId, HirFileIdRepr, SyntaxContextId};
|
||||||
use stdx::TupleExt;
|
use stdx::TupleExt;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
algo::skip_trivia_token,
|
algo::skip_trivia_token,
|
||||||
ast::{self, HasAttrs as _, HasGenericParams, HasLoopBody, IsString as _},
|
ast::{self, HasAttrs as _, HasGenericParams, IsString as _},
|
||||||
match_ast, AstNode, AstToken, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken,
|
AstNode, AstToken, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange,
|
||||||
TextRange, TextSize,
|
TextSize,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
@ -608,7 +609,7 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
let quote = string.open_quote_text_range()?;
|
let quote = string.open_quote_text_range()?;
|
||||||
|
|
||||||
let token = self.wrap_token_infile(string.syntax().clone()).into_real_file().ok()?;
|
let token = self.wrap_token_infile(string.syntax().clone()).into_real_file().ok()?;
|
||||||
self.descend_into_macros_breakable(token, |token| {
|
self.descend_into_macros_breakable(token, |token, _| {
|
||||||
(|| {
|
(|| {
|
||||||
let token = token.value;
|
let token = token.value;
|
||||||
let string = ast::String::cast(token)?;
|
let string = ast::String::cast(token)?;
|
||||||
|
@ -655,7 +656,7 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
let original_string = ast::String::cast(original_token.clone())?;
|
let original_string = ast::String::cast(original_token.clone())?;
|
||||||
let original_token = self.wrap_token_infile(original_token).into_real_file().ok()?;
|
let original_token = self.wrap_token_infile(original_token).into_real_file().ok()?;
|
||||||
let quote = original_string.open_quote_text_range()?;
|
let quote = original_string.open_quote_text_range()?;
|
||||||
self.descend_into_macros_breakable(original_token, |token| {
|
self.descend_into_macros_breakable(original_token, |token, _| {
|
||||||
(|| {
|
(|| {
|
||||||
let token = token.value;
|
let token = token.value;
|
||||||
self.resolve_offset_in_format_args(
|
self.resolve_offset_in_format_args(
|
||||||
|
@ -718,7 +719,7 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
// node is just the token, so descend the token
|
// node is just the token, so descend the token
|
||||||
self.descend_into_macros_impl(
|
self.descend_into_macros_impl(
|
||||||
InRealFile::new(file_id, first),
|
InRealFile::new(file_id, first),
|
||||||
&mut |InFile { value, .. }| {
|
&mut |InFile { value, .. }, _ctx| {
|
||||||
if let Some(node) = value
|
if let Some(node) = value
|
||||||
.parent_ancestors()
|
.parent_ancestors()
|
||||||
.take_while(|it| it.text_range() == value.text_range())
|
.take_while(|it| it.text_range() == value.text_range())
|
||||||
|
@ -732,7 +733,7 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
} else {
|
} else {
|
||||||
// Descend first and last token, then zip them to look for the node they belong to
|
// Descend first and last token, then zip them to look for the node they belong to
|
||||||
let mut scratch: SmallVec<[_; 1]> = smallvec![];
|
let mut scratch: SmallVec<[_; 1]> = smallvec![];
|
||||||
self.descend_into_macros_impl(InRealFile::new(file_id, first), &mut |token| {
|
self.descend_into_macros_impl(InRealFile::new(file_id, first), &mut |token, _ctx| {
|
||||||
scratch.push(token);
|
scratch.push(token);
|
||||||
CONTINUE_NO_BREAKS
|
CONTINUE_NO_BREAKS
|
||||||
});
|
});
|
||||||
|
@ -740,7 +741,7 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
let mut scratch = scratch.into_iter();
|
let mut scratch = scratch.into_iter();
|
||||||
self.descend_into_macros_impl(
|
self.descend_into_macros_impl(
|
||||||
InRealFile::new(file_id, last),
|
InRealFile::new(file_id, last),
|
||||||
&mut |InFile { value: last, file_id: last_fid }| {
|
&mut |InFile { value: last, file_id: last_fid }, _ctx| {
|
||||||
if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() {
|
if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() {
|
||||||
if first_fid == last_fid {
|
if first_fid == last_fid {
|
||||||
if let Some(p) = first.parent() {
|
if let Some(p) = first.parent() {
|
||||||
|
@ -763,7 +764,9 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
res
|
res
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_inside_macro_call(token: &SyntaxToken) -> bool {
|
// FIXME: This isn't quite right wrt to inner attributes
|
||||||
|
/// Does a syntactic traversal to check whether this token might be inside a macro call
|
||||||
|
pub fn might_be_inside_macro_call(&self, token: &SyntaxToken) -> bool {
|
||||||
token.parent_ancestors().any(|ancestor| {
|
token.parent_ancestors().any(|ancestor| {
|
||||||
if ast::MacroCall::can_cast(ancestor.kind()) {
|
if ast::MacroCall::can_cast(ancestor.kind()) {
|
||||||
return true;
|
return true;
|
||||||
|
@ -781,25 +784,14 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn descend_into_macros_exact_if_in_macro(
|
|
||||||
&self,
|
|
||||||
token: SyntaxToken,
|
|
||||||
) -> SmallVec<[SyntaxToken; 1]> {
|
|
||||||
if Self::is_inside_macro_call(&token) {
|
|
||||||
self.descend_into_macros_exact(token)
|
|
||||||
} else {
|
|
||||||
smallvec![token]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn descend_into_macros_cb(
|
pub fn descend_into_macros_cb(
|
||||||
&self,
|
&self,
|
||||||
token: SyntaxToken,
|
token: SyntaxToken,
|
||||||
mut cb: impl FnMut(InFile<SyntaxToken>),
|
mut cb: impl FnMut(InFile<SyntaxToken>, SyntaxContextId),
|
||||||
) {
|
) {
|
||||||
if let Ok(token) = self.wrap_token_infile(token).into_real_file() {
|
if let Ok(token) = self.wrap_token_infile(token).into_real_file() {
|
||||||
self.descend_into_macros_impl(token, &mut |t| {
|
self.descend_into_macros_impl(token, &mut |t, ctx| {
|
||||||
cb(t);
|
cb(t, ctx);
|
||||||
CONTINUE_NO_BREAKS
|
CONTINUE_NO_BREAKS
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -808,7 +800,7 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
pub fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
|
pub fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
|
||||||
let mut res = smallvec![];
|
let mut res = smallvec![];
|
||||||
if let Ok(token) = self.wrap_token_infile(token.clone()).into_real_file() {
|
if let Ok(token) = self.wrap_token_infile(token.clone()).into_real_file() {
|
||||||
self.descend_into_macros_impl(token, &mut |t| {
|
self.descend_into_macros_impl(token, &mut |t, _ctx| {
|
||||||
res.push(t.value);
|
res.push(t.value);
|
||||||
CONTINUE_NO_BREAKS
|
CONTINUE_NO_BREAKS
|
||||||
});
|
});
|
||||||
|
@ -819,10 +811,27 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
res
|
res
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn descend_into_macros_no_opaque(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
|
||||||
|
let mut res = smallvec![];
|
||||||
|
if let Ok(token) = self.wrap_token_infile(token.clone()).into_real_file() {
|
||||||
|
self.descend_into_macros_impl(token, &mut |t, ctx| {
|
||||||
|
if !ctx.is_opaque(self.db.upcast()) {
|
||||||
|
// Don't descend into opaque contexts
|
||||||
|
res.push(t.value);
|
||||||
|
}
|
||||||
|
CONTINUE_NO_BREAKS
|
||||||
|
});
|
||||||
|
}
|
||||||
|
if res.is_empty() {
|
||||||
|
res.push(token);
|
||||||
|
}
|
||||||
|
res
|
||||||
|
}
|
||||||
|
|
||||||
pub fn descend_into_macros_breakable<T>(
|
pub fn descend_into_macros_breakable<T>(
|
||||||
&self,
|
&self,
|
||||||
token: InRealFile<SyntaxToken>,
|
token: InRealFile<SyntaxToken>,
|
||||||
mut cb: impl FnMut(InFile<SyntaxToken>) -> ControlFlow<T>,
|
mut cb: impl FnMut(InFile<SyntaxToken>, SyntaxContextId) -> ControlFlow<T>,
|
||||||
) -> Option<T> {
|
) -> Option<T> {
|
||||||
self.descend_into_macros_impl(token.clone(), &mut cb)
|
self.descend_into_macros_impl(token.clone(), &mut cb)
|
||||||
}
|
}
|
||||||
|
@ -834,10 +843,12 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
let text = token.text();
|
let text = token.text();
|
||||||
let kind = token.kind();
|
let kind = token.kind();
|
||||||
|
|
||||||
self.descend_into_macros_cb(token.clone(), |InFile { value, file_id: _ }| {
|
self.descend_into_macros_cb(token.clone(), |InFile { value, file_id: _ }, ctx| {
|
||||||
let mapped_kind = value.kind();
|
let mapped_kind = value.kind();
|
||||||
let any_ident_match = || kind.is_any_identifier() && value.kind().is_any_identifier();
|
let any_ident_match = || kind.is_any_identifier() && value.kind().is_any_identifier();
|
||||||
let matches = (kind == mapped_kind || any_ident_match()) && text == value.text();
|
let matches = (kind == mapped_kind || any_ident_match())
|
||||||
|
&& text == value.text()
|
||||||
|
&& !ctx.is_opaque(self.db.upcast());
|
||||||
if matches {
|
if matches {
|
||||||
r.push(value);
|
r.push(value);
|
||||||
}
|
}
|
||||||
|
@ -854,17 +865,21 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
let text = token.text();
|
let text = token.text();
|
||||||
let kind = token.kind();
|
let kind = token.kind();
|
||||||
if let Ok(token) = self.wrap_token_infile(token.clone()).into_real_file() {
|
if let Ok(token) = self.wrap_token_infile(token.clone()).into_real_file() {
|
||||||
self.descend_into_macros_breakable(token.clone(), |InFile { value, file_id: _ }| {
|
self.descend_into_macros_breakable(
|
||||||
let mapped_kind = value.kind();
|
token.clone(),
|
||||||
let any_ident_match =
|
|InFile { value, file_id: _ }, _ctx| {
|
||||||
|| kind.is_any_identifier() && value.kind().is_any_identifier();
|
let mapped_kind = value.kind();
|
||||||
let matches = (kind == mapped_kind || any_ident_match()) && text == value.text();
|
let any_ident_match =
|
||||||
if matches {
|
|| kind.is_any_identifier() && value.kind().is_any_identifier();
|
||||||
ControlFlow::Break(value)
|
let matches =
|
||||||
} else {
|
(kind == mapped_kind || any_ident_match()) && text == value.text();
|
||||||
ControlFlow::Continue(())
|
if matches {
|
||||||
}
|
ControlFlow::Break(value)
|
||||||
})
|
} else {
|
||||||
|
ControlFlow::Continue(())
|
||||||
|
}
|
||||||
|
},
|
||||||
|
)
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
@ -874,7 +889,7 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
fn descend_into_macros_impl<T>(
|
fn descend_into_macros_impl<T>(
|
||||||
&self,
|
&self,
|
||||||
InRealFile { value: token, file_id }: InRealFile<SyntaxToken>,
|
InRealFile { value: token, file_id }: InRealFile<SyntaxToken>,
|
||||||
f: &mut dyn FnMut(InFile<SyntaxToken>) -> ControlFlow<T>,
|
f: &mut dyn FnMut(InFile<SyntaxToken>, SyntaxContextId) -> ControlFlow<T>,
|
||||||
) -> Option<T> {
|
) -> Option<T> {
|
||||||
let _p = tracing::info_span!("descend_into_macros_impl").entered();
|
let _p = tracing::info_span!("descend_into_macros_impl").entered();
|
||||||
let (sa, span, file_id) = token
|
let (sa, span, file_id) = token
|
||||||
|
@ -898,7 +913,8 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
// These are tracked to know which macro calls we still have to look into
|
// These are tracked to know which macro calls we still have to look into
|
||||||
// the tokens themselves aren't that interesting as the span that is being used to map
|
// the tokens themselves aren't that interesting as the span that is being used to map
|
||||||
// things down never changes.
|
// things down never changes.
|
||||||
let mut stack: Vec<(_, SmallVec<[_; 2]>)> = vec![(file_id, smallvec![token])];
|
let mut stack: Vec<(_, SmallVec<[_; 2]>)> =
|
||||||
|
vec![(file_id, smallvec![(token, SyntaxContextId::ROOT)])];
|
||||||
|
|
||||||
// Process the expansion of a call, pushing all tokens with our span in the expansion back onto our stack
|
// Process the expansion of a call, pushing all tokens with our span in the expansion back onto our stack
|
||||||
let process_expansion_for_token = |stack: &mut Vec<_>, macro_file| {
|
let process_expansion_for_token = |stack: &mut Vec<_>, macro_file| {
|
||||||
|
@ -921,11 +937,11 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
// Filters out all tokens that contain the given range (usually the macro call), any such
|
// Filters out all tokens that contain the given range (usually the macro call), any such
|
||||||
// token is redundant as the corresponding macro call has already been processed
|
// token is redundant as the corresponding macro call has already been processed
|
||||||
let filter_duplicates = |tokens: &mut SmallVec<_>, range: TextRange| {
|
let filter_duplicates = |tokens: &mut SmallVec<_>, range: TextRange| {
|
||||||
tokens.retain(|t: &mut SyntaxToken| !range.contains_range(t.text_range()))
|
tokens.retain(|(t, _): &mut (SyntaxToken, _)| !range.contains_range(t.text_range()))
|
||||||
};
|
};
|
||||||
|
|
||||||
while let Some((expansion, ref mut tokens)) = stack.pop() {
|
while let Some((expansion, ref mut tokens)) = stack.pop() {
|
||||||
while let Some(token) = tokens.pop() {
|
while let Some((token, ctx)) = tokens.pop() {
|
||||||
let was_not_remapped = (|| {
|
let was_not_remapped = (|| {
|
||||||
// First expand into attribute invocations
|
// First expand into attribute invocations
|
||||||
let containing_attribute_macro_call = self.with_ctx(|ctx| {
|
let containing_attribute_macro_call = self.with_ctx(|ctx| {
|
||||||
|
@ -1036,7 +1052,7 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
let text_range = attr.syntax().text_range();
|
let text_range = attr.syntax().text_range();
|
||||||
// remove any other token in this macro input, all their mappings are the
|
// remove any other token in this macro input, all their mappings are the
|
||||||
// same as this
|
// same as this
|
||||||
tokens.retain(|t| {
|
tokens.retain(|(t, _)| {
|
||||||
!text_range.contains_range(t.text_range())
|
!text_range.contains_range(t.text_range())
|
||||||
});
|
});
|
||||||
return process_expansion_for_token(
|
return process_expansion_for_token(
|
||||||
|
@ -1093,7 +1109,7 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
.is_none();
|
.is_none();
|
||||||
|
|
||||||
if was_not_remapped {
|
if was_not_remapped {
|
||||||
if let ControlFlow::Break(b) = f(InFile::new(expansion, token)) {
|
if let ControlFlow::Break(b) = f(InFile::new(expansion, token), ctx) {
|
||||||
return Some(b);
|
return Some(b);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1221,26 +1237,10 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
ToDef::to_def(self, src.as_ref())
|
ToDef::to_def(self, src.as_ref())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn resolve_label(&self, lifetime: &ast::Lifetime) -> Option<Label> {
|
pub fn resolve_label(&self, label: &ast::Lifetime) -> Option<Label> {
|
||||||
let text = lifetime.text();
|
let (parent, label_id) = self
|
||||||
let label = lifetime.syntax().ancestors().find_map(|syn| {
|
.with_ctx(|ctx| ctx.label_ref_to_def(self.wrap_node_infile(label.clone()).as_ref()))?;
|
||||||
let label = match_ast! {
|
Some(Label { parent, label_id })
|
||||||
match syn {
|
|
||||||
ast::ForExpr(it) => it.label(),
|
|
||||||
ast::WhileExpr(it) => it.label(),
|
|
||||||
ast::LoopExpr(it) => it.label(),
|
|
||||||
ast::BlockExpr(it) => it.label(),
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
};
|
|
||||||
label.filter(|l| {
|
|
||||||
l.lifetime()
|
|
||||||
.and_then(|lt| lt.lifetime_ident_token())
|
|
||||||
.map_or(false, |lt| lt.text() == text)
|
|
||||||
})
|
|
||||||
})?;
|
|
||||||
let src = self.wrap_node_infile(label);
|
|
||||||
ToDef::to_def(self, src.as_ref())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn resolve_type(&self, ty: &ast::Type) -> Option<Type> {
|
pub fn resolve_type(&self, ty: &ast::Type) -> Option<Type> {
|
||||||
|
|
|
@ -92,7 +92,7 @@ use hir_def::{
|
||||||
keys::{self, Key},
|
keys::{self, Key},
|
||||||
DynMap,
|
DynMap,
|
||||||
},
|
},
|
||||||
hir::{BindingId, LabelId},
|
hir::{BindingId, Expr, LabelId},
|
||||||
AdtId, BlockId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId, ExternCrateId,
|
AdtId, BlockId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId, ExternCrateId,
|
||||||
FieldId, FunctionId, GenericDefId, GenericParamId, ImplId, LifetimeParamId, Lookup, MacroId,
|
FieldId, FunctionId, GenericDefId, GenericParamId, ImplId, LifetimeParamId, Lookup, MacroId,
|
||||||
ModuleId, StaticId, StructId, TraitAliasId, TraitId, TypeAliasId, TypeParamId, UnionId, UseId,
|
ModuleId, StaticId, StructId, TraitAliasId, TraitId, TypeAliasId, TypeParamId, UnionId, UseId,
|
||||||
|
@ -343,6 +343,20 @@ impl SourceToDefCtx<'_, '_> {
|
||||||
Some((container, label_id))
|
Some((container, label_id))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(super) fn label_ref_to_def(
|
||||||
|
&mut self,
|
||||||
|
src: InFile<&ast::Lifetime>,
|
||||||
|
) -> Option<(DefWithBodyId, LabelId)> {
|
||||||
|
let break_or_continue = ast::Expr::cast(src.value.syntax().parent()?)?;
|
||||||
|
let container = self.find_pat_or_label_container(src.syntax_ref())?;
|
||||||
|
let (body, source_map) = self.db.body_with_source_map(container);
|
||||||
|
let break_or_continue = source_map.node_expr(src.with_value(&break_or_continue))?;
|
||||||
|
let (Expr::Break { label, .. } | Expr::Continue { label }) = body[break_or_continue] else {
|
||||||
|
return None;
|
||||||
|
};
|
||||||
|
Some((container, label?))
|
||||||
|
}
|
||||||
|
|
||||||
pub(super) fn item_to_macro_call(&mut self, src: InFile<&ast::Item>) -> Option<MacroCallId> {
|
pub(super) fn item_to_macro_call(&mut self, src: InFile<&ast::Item>) -> Option<MacroCallId> {
|
||||||
let map = self.dyn_map(src)?;
|
let map = self.dyn_map(src)?;
|
||||||
map[keys::ATTR_MACRO_CALL].get(&AstPtr::new(src.value)).copied()
|
map[keys::ATTR_MACRO_CALL].get(&AstPtr::new(src.value)).copied()
|
||||||
|
|
|
@ -1,8 +1,12 @@
|
||||||
use hir::TypeInfo;
|
use hir::TypeInfo;
|
||||||
use ide_db::syntax_helpers::suggest_name;
|
use ide_db::syntax_helpers::suggest_name;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, edit::IndentLevel, edit_in_place::Indent, make, AstNode, HasName},
|
ast::{
|
||||||
ted, NodeOrToken,
|
self, edit::IndentLevel, edit_in_place::Indent, make, syntax_factory::SyntaxFactory,
|
||||||
|
AstNode,
|
||||||
|
},
|
||||||
|
syntax_editor::Position,
|
||||||
|
NodeOrToken,
|
||||||
SyntaxKind::{BLOCK_EXPR, BREAK_EXPR, COMMENT, LOOP_EXPR, MATCH_GUARD, PATH_EXPR, RETURN_EXPR},
|
SyntaxKind::{BLOCK_EXPR, BREAK_EXPR, COMMENT, LOOP_EXPR, MATCH_GUARD, PATH_EXPR, RETURN_EXPR},
|
||||||
SyntaxNode, T,
|
SyntaxNode, T,
|
||||||
};
|
};
|
||||||
|
@ -105,39 +109,46 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
|
||||||
),
|
),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let make = SyntaxFactory::new();
|
||||||
|
let mut editor = edit.make_editor(&expr_replace);
|
||||||
|
|
||||||
|
let pat_name = make.name(&var_name);
|
||||||
|
let name_expr = make.expr_path(make::ext::ident_path(&var_name));
|
||||||
|
|
||||||
|
if let Some(cap) = ctx.config.snippet_cap {
|
||||||
|
let tabstop = edit.make_tabstop_before(cap);
|
||||||
|
editor.add_annotation(pat_name.syntax().clone(), tabstop);
|
||||||
|
}
|
||||||
|
|
||||||
let ident_pat = match parent {
|
let ident_pat = match parent {
|
||||||
Some(ast::Expr::RefExpr(expr)) if expr.mut_token().is_some() => {
|
Some(ast::Expr::RefExpr(expr)) if expr.mut_token().is_some() => {
|
||||||
make::ident_pat(false, true, make::name(&var_name))
|
make.ident_pat(false, true, pat_name)
|
||||||
}
|
}
|
||||||
_ if needs_adjust
|
_ if needs_adjust
|
||||||
&& !needs_ref
|
&& !needs_ref
|
||||||
&& ty.as_ref().is_some_and(|ty| ty.is_mutable_reference()) =>
|
&& ty.as_ref().is_some_and(|ty| ty.is_mutable_reference()) =>
|
||||||
{
|
{
|
||||||
make::ident_pat(false, true, make::name(&var_name))
|
make.ident_pat(false, true, pat_name)
|
||||||
}
|
}
|
||||||
_ => make::ident_pat(false, false, make::name(&var_name)),
|
_ => make.ident_pat(false, false, pat_name),
|
||||||
};
|
};
|
||||||
|
|
||||||
let to_extract_no_ref = match ty.as_ref().filter(|_| needs_ref) {
|
let to_extract_no_ref = match ty.as_ref().filter(|_| needs_ref) {
|
||||||
Some(receiver_type) if receiver_type.is_mutable_reference() => {
|
Some(receiver_type) if receiver_type.is_mutable_reference() => {
|
||||||
make::expr_ref(to_extract_no_ref, true)
|
make.expr_ref(to_extract_no_ref, true)
|
||||||
}
|
}
|
||||||
Some(receiver_type) if receiver_type.is_reference() => {
|
Some(receiver_type) if receiver_type.is_reference() => {
|
||||||
make::expr_ref(to_extract_no_ref, false)
|
make.expr_ref(to_extract_no_ref, false)
|
||||||
}
|
}
|
||||||
_ => to_extract_no_ref,
|
_ => to_extract_no_ref,
|
||||||
};
|
};
|
||||||
|
|
||||||
let expr_replace = edit.make_syntax_mut(expr_replace);
|
let let_stmt = make.let_stmt(ident_pat.into(), None, Some(to_extract_no_ref));
|
||||||
let let_stmt =
|
|
||||||
make::let_stmt(ident_pat.into(), None, Some(to_extract_no_ref)).clone_for_update();
|
|
||||||
let name_expr = make::expr_path(make::ext::ident_path(&var_name)).clone_for_update();
|
|
||||||
|
|
||||||
match anchor {
|
match anchor {
|
||||||
Anchor::Before(place) => {
|
Anchor::Before(place) => {
|
||||||
let prev_ws = place.prev_sibling_or_token().and_then(|it| it.into_token());
|
let prev_ws = place.prev_sibling_or_token().and_then(|it| it.into_token());
|
||||||
let indent_to = IndentLevel::from_node(&place);
|
let indent_to = IndentLevel::from_node(&place);
|
||||||
let insert_place = edit.make_syntax_mut(place);
|
|
||||||
|
|
||||||
// Adjust ws to insert depending on if this is all inline or on separate lines
|
// Adjust ws to insert depending on if this is all inline or on separate lines
|
||||||
let trailing_ws = if prev_ws.is_some_and(|it| it.text().starts_with('\n')) {
|
let trailing_ws = if prev_ws.is_some_and(|it| it.text().starts_with('\n')) {
|
||||||
|
@ -146,37 +157,20 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
|
||||||
" ".to_owned()
|
" ".to_owned()
|
||||||
};
|
};
|
||||||
|
|
||||||
ted::insert_all_raw(
|
editor.insert_all(
|
||||||
ted::Position::before(insert_place),
|
Position::before(place),
|
||||||
vec![
|
vec![
|
||||||
let_stmt.syntax().clone().into(),
|
let_stmt.syntax().clone().into(),
|
||||||
make::tokens::whitespace(&trailing_ws).into(),
|
make::tokens::whitespace(&trailing_ws).into(),
|
||||||
],
|
],
|
||||||
);
|
);
|
||||||
|
|
||||||
ted::replace(expr_replace, name_expr.syntax());
|
editor.replace(expr_replace, name_expr.syntax());
|
||||||
|
|
||||||
if let Some(cap) = ctx.config.snippet_cap {
|
|
||||||
if let Some(ast::Pat::IdentPat(ident_pat)) = let_stmt.pat() {
|
|
||||||
if let Some(name) = ident_pat.name() {
|
|
||||||
edit.add_tabstop_before(cap, name);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
Anchor::Replace(stmt) => {
|
Anchor::Replace(stmt) => {
|
||||||
cov_mark::hit!(test_extract_var_expr_stmt);
|
cov_mark::hit!(test_extract_var_expr_stmt);
|
||||||
|
|
||||||
let stmt_replace = edit.make_mut(stmt);
|
editor.replace(stmt.syntax(), let_stmt.syntax());
|
||||||
ted::replace(stmt_replace.syntax(), let_stmt.syntax());
|
|
||||||
|
|
||||||
if let Some(cap) = ctx.config.snippet_cap {
|
|
||||||
if let Some(ast::Pat::IdentPat(ident_pat)) = let_stmt.pat() {
|
|
||||||
if let Some(name) = ident_pat.name() {
|
|
||||||
edit.add_tabstop_before(cap, name);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
Anchor::WrapInBlock(to_wrap) => {
|
Anchor::WrapInBlock(to_wrap) => {
|
||||||
let indent_to = to_wrap.indent_level();
|
let indent_to = to_wrap.indent_level();
|
||||||
|
@ -184,47 +178,22 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
|
||||||
let block = if to_wrap.syntax() == &expr_replace {
|
let block = if to_wrap.syntax() == &expr_replace {
|
||||||
// Since `expr_replace` is the same that needs to be wrapped in a block,
|
// Since `expr_replace` is the same that needs to be wrapped in a block,
|
||||||
// we can just directly replace it with a block
|
// we can just directly replace it with a block
|
||||||
let block =
|
make.block_expr([let_stmt.into()], Some(name_expr))
|
||||||
make::block_expr([let_stmt.into()], Some(name_expr)).clone_for_update();
|
|
||||||
ted::replace(expr_replace, block.syntax());
|
|
||||||
|
|
||||||
block
|
|
||||||
} else {
|
} else {
|
||||||
// `expr_replace` is a descendant of `to_wrap`, so both steps need to be
|
// `expr_replace` is a descendant of `to_wrap`, so we just replace it with `name_expr`.
|
||||||
// handled separately, otherwise we wrap the wrong expression
|
editor.replace(expr_replace, name_expr.syntax());
|
||||||
let to_wrap = edit.make_mut(to_wrap);
|
make.block_expr([let_stmt.into()], Some(to_wrap.clone()))
|
||||||
|
|
||||||
// Replace the target expr first so that we don't need to find where
|
|
||||||
// `expr_replace` is in the wrapped `to_wrap`
|
|
||||||
ted::replace(expr_replace, name_expr.syntax());
|
|
||||||
|
|
||||||
// Wrap `to_wrap` in a block
|
|
||||||
let block = make::block_expr([let_stmt.into()], Some(to_wrap.clone()))
|
|
||||||
.clone_for_update();
|
|
||||||
ted::replace(to_wrap.syntax(), block.syntax());
|
|
||||||
|
|
||||||
block
|
|
||||||
};
|
};
|
||||||
|
|
||||||
if let Some(cap) = ctx.config.snippet_cap {
|
editor.replace(to_wrap.syntax(), block.syntax());
|
||||||
// Adding a tabstop to `name` requires finding the let stmt again, since
|
|
||||||
// the existing `let_stmt` is not actually added to the tree
|
|
||||||
let pat = block.statements().find_map(|stmt| {
|
|
||||||
let ast::Stmt::LetStmt(let_stmt) = stmt else { return None };
|
|
||||||
let_stmt.pat()
|
|
||||||
});
|
|
||||||
|
|
||||||
if let Some(ast::Pat::IdentPat(ident_pat)) = pat {
|
|
||||||
if let Some(name) = ident_pat.name() {
|
|
||||||
edit.add_tabstop_before(cap, name);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// fixup indentation of block
|
// fixup indentation of block
|
||||||
block.indent(indent_to);
|
block.indent(indent_to);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
editor.add_mappings(make.finish_with_mappings());
|
||||||
|
edit.add_file_edits(ctx.file_id(), editor);
|
||||||
edit.rename();
|
edit.rename();
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
|
@ -7,7 +7,7 @@
|
||||||
use hir::ImportPathConfig;
|
use hir::ImportPathConfig;
|
||||||
use ide_db::{imports::insert_use::InsertUseConfig, SnippetCap};
|
use ide_db::{imports::insert_use::InsertUseConfig, SnippetCap};
|
||||||
|
|
||||||
use crate::snippet::Snippet;
|
use crate::{snippet::Snippet, CompletionFieldsToResolve};
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||||
pub struct CompletionConfig {
|
pub struct CompletionConfig {
|
||||||
|
@ -27,6 +27,7 @@ pub struct CompletionConfig {
|
||||||
pub prefer_absolute: bool,
|
pub prefer_absolute: bool,
|
||||||
pub snippets: Vec<Snippet>,
|
pub snippets: Vec<Snippet>,
|
||||||
pub limit: Option<usize>,
|
pub limit: Option<usize>,
|
||||||
|
pub fields_to_resolve: CompletionFieldsToResolve,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||||
|
|
|
@ -7,7 +7,8 @@ mod tests;
|
||||||
use std::{iter, ops::ControlFlow};
|
use std::{iter, ops::ControlFlow};
|
||||||
|
|
||||||
use hir::{
|
use hir::{
|
||||||
HasAttrs, Local, Name, PathResolution, ScopeDef, Semantics, SemanticsScope, Type, TypeInfo,
|
HasAttrs, Local, ModuleSource, Name, PathResolution, ScopeDef, Semantics, SemanticsScope, Type,
|
||||||
|
TypeInfo,
|
||||||
};
|
};
|
||||||
use ide_db::{
|
use ide_db::{
|
||||||
base_db::SourceDatabase, famous_defs::FamousDefs, helpers::is_editable_crate, FilePosition,
|
base_db::SourceDatabase, famous_defs::FamousDefs, helpers::is_editable_crate, FilePosition,
|
||||||
|
@ -743,7 +744,12 @@ impl<'a> CompletionContext<'a> {
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
let depth_from_crate_root = iter::successors(module.parent(db), |m| m.parent(db)).count();
|
let depth_from_crate_root = iter::successors(Some(module), |m| m.parent(db))
|
||||||
|
// `BlockExpr` modules are not count as module depth
|
||||||
|
.filter(|m| !matches!(m.definition_source(db).value, ModuleSource::BlockExpr(_)))
|
||||||
|
.count()
|
||||||
|
// exclude `m` itself
|
||||||
|
.saturating_sub(1);
|
||||||
|
|
||||||
let complete_semicolon = if config.add_semicolon_to_unit {
|
let complete_semicolon = if config.add_semicolon_to_unit {
|
||||||
let inside_closure_ret = token.parent_ancestors().try_for_each(|ancestor| {
|
let inside_closure_ret = token.parent_ancestors().try_for_each(|ancestor| {
|
||||||
|
|
|
@ -37,6 +37,31 @@ pub use crate::{
|
||||||
snippet::{Snippet, SnippetScope},
|
snippet::{Snippet, SnippetScope},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||||
|
pub struct CompletionFieldsToResolve {
|
||||||
|
pub resolve_label_details: bool,
|
||||||
|
pub resolve_tags: bool,
|
||||||
|
pub resolve_detail: bool,
|
||||||
|
pub resolve_documentation: bool,
|
||||||
|
pub resolve_filter_text: bool,
|
||||||
|
pub resolve_text_edit: bool,
|
||||||
|
pub resolve_command: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CompletionFieldsToResolve {
|
||||||
|
pub const fn empty() -> Self {
|
||||||
|
Self {
|
||||||
|
resolve_label_details: false,
|
||||||
|
resolve_tags: false,
|
||||||
|
resolve_detail: false,
|
||||||
|
resolve_documentation: false,
|
||||||
|
resolve_filter_text: false,
|
||||||
|
resolve_text_edit: false,
|
||||||
|
resolve_command: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
//FIXME: split the following feature into fine-grained features.
|
//FIXME: split the following feature into fine-grained features.
|
||||||
|
|
||||||
// Feature: Magic Completions
|
// Feature: Magic Completions
|
||||||
|
|
|
@ -37,8 +37,8 @@ use test_fixture::ChangeFixture;
|
||||||
use test_utils::assert_eq_text;
|
use test_utils::assert_eq_text;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
resolve_completion_edits, CallableSnippets, CompletionConfig, CompletionItem,
|
resolve_completion_edits, CallableSnippets, CompletionConfig, CompletionFieldsToResolve,
|
||||||
CompletionItemKind,
|
CompletionItem, CompletionItemKind,
|
||||||
};
|
};
|
||||||
|
|
||||||
/// Lots of basic item definitions
|
/// Lots of basic item definitions
|
||||||
|
@ -84,6 +84,7 @@ pub(crate) const TEST_CONFIG: CompletionConfig = CompletionConfig {
|
||||||
prefer_absolute: false,
|
prefer_absolute: false,
|
||||||
snippets: Vec::new(),
|
snippets: Vec::new(),
|
||||||
limit: None,
|
limit: None,
|
||||||
|
fields_to_resolve: CompletionFieldsToResolve::empty(),
|
||||||
};
|
};
|
||||||
|
|
||||||
pub(crate) fn completion_list(ra_fixture: &str) -> String {
|
pub(crate) fn completion_list(ra_fixture: &str) -> String {
|
||||||
|
|
|
@ -7081,8 +7081,8 @@ This feature has no tracking issue, and is therefore likely internal to the comp
|
||||||
"##,
|
"##,
|
||||||
},
|
},
|
||||||
Lint {
|
Lint {
|
||||||
label: "object_safe_for_dispatch",
|
label: "dyn_compatible_for_dispatch",
|
||||||
description: r##"# `object_safe_for_dispatch`
|
description: r##"# `dyn_compatible_for_dispatch`
|
||||||
|
|
||||||
The tracking issue for this feature is: [#43561]
|
The tracking issue for this feature is: [#43561]
|
||||||
|
|
||||||
|
|
|
@ -614,7 +614,7 @@ fn path_segment_cmp(a: &ast::PathSegment, b: &ast::PathSegment) -> Ordering {
|
||||||
(Some(_), None) => Ordering::Greater,
|
(Some(_), None) => Ordering::Greater,
|
||||||
(None, Some(_)) => Ordering::Less,
|
(None, Some(_)) => Ordering::Less,
|
||||||
(Some(a_name), Some(b_name)) => {
|
(Some(a_name), Some(b_name)) => {
|
||||||
// snake_case < CamelCase < UPPER_SNAKE_CASE
|
// snake_case < UpperCamelCase < UPPER_SNAKE_CASE
|
||||||
let a_text = a_name.as_str().trim_start_matches("r#");
|
let a_text = a_name.as_str().trim_start_matches("r#");
|
||||||
let b_text = b_name.as_str().trim_start_matches("r#");
|
let b_text = b_name.as_str().trim_start_matches("r#");
|
||||||
if a_text.starts_with(char::is_lowercase)
|
if a_text.starts_with(char::is_lowercase)
|
||||||
|
|
|
@ -13,6 +13,7 @@ use crate::{
|
||||||
salsa::{Database, ParallelDatabase, Snapshot},
|
salsa::{Database, ParallelDatabase, Snapshot},
|
||||||
Cancelled, CrateId, SourceDatabase, SourceRootDatabase,
|
Cancelled, CrateId, SourceDatabase, SourceRootDatabase,
|
||||||
},
|
},
|
||||||
|
symbol_index::SymbolsDatabase,
|
||||||
FxIndexMap, RootDatabase,
|
FxIndexMap, RootDatabase,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -54,11 +55,13 @@ pub fn parallel_prime_caches(
|
||||||
let (progress_sender, progress_receiver) = crossbeam_channel::unbounded();
|
let (progress_sender, progress_receiver) = crossbeam_channel::unbounded();
|
||||||
let (work_sender, work_receiver) = crossbeam_channel::unbounded();
|
let (work_sender, work_receiver) = crossbeam_channel::unbounded();
|
||||||
let graph = graph.clone();
|
let graph = graph.clone();
|
||||||
|
let local_roots = db.local_roots();
|
||||||
let prime_caches_worker = move |db: Snapshot<RootDatabase>| {
|
let prime_caches_worker = move |db: Snapshot<RootDatabase>| {
|
||||||
while let Ok((crate_id, crate_name)) = work_receiver.recv() {
|
while let Ok((crate_id, crate_name)) = work_receiver.recv() {
|
||||||
progress_sender
|
progress_sender
|
||||||
.send(ParallelPrimeCacheWorkerProgress::BeginCrate { crate_id, crate_name })?;
|
.send(ParallelPrimeCacheWorkerProgress::BeginCrate { crate_id, crate_name })?;
|
||||||
|
|
||||||
|
// Compute the DefMap and possibly ImportMap
|
||||||
let file_id = graph[crate_id].root_file_id;
|
let file_id = graph[crate_id].root_file_id;
|
||||||
let root_id = db.file_source_root(file_id);
|
let root_id = db.file_source_root(file_id);
|
||||||
if db.source_root(root_id).is_library {
|
if db.source_root(root_id).is_library {
|
||||||
|
@ -68,6 +71,19 @@ pub fn parallel_prime_caches(
|
||||||
db.import_map(crate_id);
|
db.import_map(crate_id);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Compute the symbol search index.
|
||||||
|
// This primes the cache for `ide_db::symbol_index::world_symbols()`.
|
||||||
|
//
|
||||||
|
// We do this for workspace crates only (members of local_roots), because doing it
|
||||||
|
// for all dependencies could be *very* unnecessarily slow in a large project.
|
||||||
|
//
|
||||||
|
// FIXME: We should do it unconditionally if the configuration is set to default to
|
||||||
|
// searching dependencies (rust-analyzer.workspace.symbol.search.scope), but we
|
||||||
|
// would need to pipe that configuration information down here.
|
||||||
|
if local_roots.contains(&root_id) {
|
||||||
|
db.crate_symbols(crate_id.into());
|
||||||
|
}
|
||||||
|
|
||||||
progress_sender.send(ParallelPrimeCacheWorkerProgress::EndCrate { crate_id })?;
|
progress_sender.send(ParallelPrimeCacheWorkerProgress::EndCrate { crate_id })?;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -529,9 +529,13 @@ impl<'a> FindUsages<'a> {
|
||||||
})
|
})
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.flat_map(move |token| {
|
.flat_map(move |token| {
|
||||||
sema.descend_into_macros_exact_if_in_macro(token)
|
if sema.might_be_inside_macro_call(&token) {
|
||||||
.into_iter()
|
sema.descend_into_macros_exact(token)
|
||||||
.filter_map(|it| it.parent())
|
} else {
|
||||||
|
<_>::from([token])
|
||||||
|
}
|
||||||
|
.into_iter()
|
||||||
|
.filter_map(|it| it.parent())
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -229,10 +229,10 @@ fn foo() {
|
||||||
check_diagnostics(
|
check_diagnostics(
|
||||||
r#"
|
r#"
|
||||||
struct non_camel_case_name {}
|
struct non_camel_case_name {}
|
||||||
// ^^^^^^^^^^^^^^^^^^^ 💡 warn: Structure `non_camel_case_name` should have CamelCase name, e.g. `NonCamelCaseName`
|
// ^^^^^^^^^^^^^^^^^^^ 💡 warn: Structure `non_camel_case_name` should have UpperCamelCase name, e.g. `NonCamelCaseName`
|
||||||
|
|
||||||
struct SCREAMING_CASE {}
|
struct SCREAMING_CASE {}
|
||||||
// ^^^^^^^^^^^^^^ 💡 warn: Structure `SCREAMING_CASE` should have CamelCase name, e.g. `ScreamingCase`
|
// ^^^^^^^^^^^^^^ 💡 warn: Structure `SCREAMING_CASE` should have UpperCamelCase name, e.g. `ScreamingCase`
|
||||||
"#,
|
"#,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -261,10 +261,10 @@ struct SomeStruct { SomeField: u8 }
|
||||||
check_diagnostics(
|
check_diagnostics(
|
||||||
r#"
|
r#"
|
||||||
enum some_enum { Val(u8) }
|
enum some_enum { Val(u8) }
|
||||||
// ^^^^^^^^^ 💡 warn: Enum `some_enum` should have CamelCase name, e.g. `SomeEnum`
|
// ^^^^^^^^^ 💡 warn: Enum `some_enum` should have UpperCamelCase name, e.g. `SomeEnum`
|
||||||
|
|
||||||
enum SOME_ENUM {}
|
enum SOME_ENUM {}
|
||||||
// ^^^^^^^^^ 💡 warn: Enum `SOME_ENUM` should have CamelCase name, e.g. `SomeEnum`
|
// ^^^^^^^^^ 💡 warn: Enum `SOME_ENUM` should have UpperCamelCase name, e.g. `SomeEnum`
|
||||||
"#,
|
"#,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -283,7 +283,7 @@ enum AABB {}
|
||||||
check_diagnostics(
|
check_diagnostics(
|
||||||
r#"
|
r#"
|
||||||
enum SomeEnum { SOME_VARIANT(u8) }
|
enum SomeEnum { SOME_VARIANT(u8) }
|
||||||
// ^^^^^^^^^^^^ 💡 warn: Variant `SOME_VARIANT` should have CamelCase name, e.g. `SomeVariant`
|
// ^^^^^^^^^^^^ 💡 warn: Variant `SOME_VARIANT` should have UpperCamelCase name, e.g. `SomeVariant`
|
||||||
"#,
|
"#,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -313,7 +313,7 @@ static some_weird_const: u8 = 10;
|
||||||
check_diagnostics(
|
check_diagnostics(
|
||||||
r#"
|
r#"
|
||||||
struct someStruct;
|
struct someStruct;
|
||||||
// ^^^^^^^^^^ 💡 warn: Structure `someStruct` should have CamelCase name, e.g. `SomeStruct`
|
// ^^^^^^^^^^ 💡 warn: Structure `someStruct` should have UpperCamelCase name, e.g. `SomeStruct`
|
||||||
|
|
||||||
impl someStruct {
|
impl someStruct {
|
||||||
fn SomeFunc(&self) {
|
fn SomeFunc(&self) {
|
||||||
|
@ -530,11 +530,11 @@ extern {
|
||||||
check_diagnostics(
|
check_diagnostics(
|
||||||
r#"
|
r#"
|
||||||
trait BAD_TRAIT {
|
trait BAD_TRAIT {
|
||||||
// ^^^^^^^^^ 💡 warn: Trait `BAD_TRAIT` should have CamelCase name, e.g. `BadTrait`
|
// ^^^^^^^^^ 💡 warn: Trait `BAD_TRAIT` should have UpperCamelCase name, e.g. `BadTrait`
|
||||||
const bad_const: u8;
|
const bad_const: u8;
|
||||||
// ^^^^^^^^^ 💡 warn: Constant `bad_const` should have UPPER_SNAKE_CASE name, e.g. `BAD_CONST`
|
// ^^^^^^^^^ 💡 warn: Constant `bad_const` should have UPPER_SNAKE_CASE name, e.g. `BAD_CONST`
|
||||||
type BAD_TYPE;
|
type BAD_TYPE;
|
||||||
// ^^^^^^^^ 💡 warn: Type alias `BAD_TYPE` should have CamelCase name, e.g. `BadType`
|
// ^^^^^^^^ 💡 warn: Type alias `BAD_TYPE` should have UpperCamelCase name, e.g. `BadType`
|
||||||
fn BAD_FUNCTION();
|
fn BAD_FUNCTION();
|
||||||
// ^^^^^^^^^^^^ 💡 warn: Function `BAD_FUNCTION` should have snake_case name, e.g. `bad_function`
|
// ^^^^^^^^^^^^ 💡 warn: Function `BAD_FUNCTION` should have snake_case name, e.g. `bad_function`
|
||||||
fn BadFunction();
|
fn BadFunction();
|
||||||
|
@ -552,11 +552,11 @@ trait BAD_TRAIT {
|
||||||
check_diagnostics_with_disabled(
|
check_diagnostics_with_disabled(
|
||||||
r#"
|
r#"
|
||||||
trait BAD_TRAIT {
|
trait BAD_TRAIT {
|
||||||
// ^^^^^^^^^ 💡 warn: Trait `BAD_TRAIT` should have CamelCase name, e.g. `BadTrait`
|
// ^^^^^^^^^ 💡 warn: Trait `BAD_TRAIT` should have UpperCamelCase name, e.g. `BadTrait`
|
||||||
const bad_const: u8;
|
const bad_const: u8;
|
||||||
// ^^^^^^^^^ 💡 warn: Constant `bad_const` should have UPPER_SNAKE_CASE name, e.g. `BAD_CONST`
|
// ^^^^^^^^^ 💡 warn: Constant `bad_const` should have UPPER_SNAKE_CASE name, e.g. `BAD_CONST`
|
||||||
type BAD_TYPE;
|
type BAD_TYPE;
|
||||||
// ^^^^^^^^ 💡 warn: Type alias `BAD_TYPE` should have CamelCase name, e.g. `BadType`
|
// ^^^^^^^^ 💡 warn: Type alias `BAD_TYPE` should have UpperCamelCase name, e.g. `BadType`
|
||||||
fn BAD_FUNCTION(BAD_PARAM: u8);
|
fn BAD_FUNCTION(BAD_PARAM: u8);
|
||||||
// ^^^^^^^^^^^^ 💡 warn: Function `BAD_FUNCTION` should have snake_case name, e.g. `bad_function`
|
// ^^^^^^^^^^^^ 💡 warn: Function `BAD_FUNCTION` should have snake_case name, e.g. `bad_function`
|
||||||
// ^^^^^^^^^ 💡 warn: Parameter `BAD_PARAM` should have snake_case name, e.g. `bad_param`
|
// ^^^^^^^^^ 💡 warn: Parameter `BAD_PARAM` should have snake_case name, e.g. `bad_param`
|
||||||
|
@ -664,7 +664,7 @@ mod CheckNonstandardStyle {
|
||||||
mod CheckBadStyle {
|
mod CheckBadStyle {
|
||||||
//^^^^^^^^^^^^^ 💡 error: Module `CheckBadStyle` should have snake_case name, e.g. `check_bad_style`
|
//^^^^^^^^^^^^^ 💡 error: Module `CheckBadStyle` should have snake_case name, e.g. `check_bad_style`
|
||||||
struct fooo;
|
struct fooo;
|
||||||
//^^^^ 💡 error: Structure `fooo` should have CamelCase name, e.g. `Fooo`
|
//^^^^ 💡 error: Structure `fooo` should have UpperCamelCase name, e.g. `Fooo`
|
||||||
}
|
}
|
||||||
|
|
||||||
mod F {
|
mod F {
|
||||||
|
@ -676,7 +676,7 @@ mod F {
|
||||||
|
|
||||||
#[deny(non_snake_case, non_camel_case_types)]
|
#[deny(non_snake_case, non_camel_case_types)]
|
||||||
pub struct some_type {
|
pub struct some_type {
|
||||||
//^^^^^^^^^ 💡 error: Structure `some_type` should have CamelCase name, e.g. `SomeType`
|
//^^^^^^^^^ 💡 error: Structure `some_type` should have UpperCamelCase name, e.g. `SomeType`
|
||||||
SOME_FIELD: u8,
|
SOME_FIELD: u8,
|
||||||
//^^^^^^^^^^ 💡 error: Field `SOME_FIELD` should have snake_case name, e.g. `some_field`
|
//^^^^^^^^^^ 💡 error: Field `SOME_FIELD` should have snake_case name, e.g. `some_field`
|
||||||
SomeField: u16,
|
SomeField: u16,
|
||||||
|
@ -693,11 +693,11 @@ pub static SomeStatic: u8 = 10;
|
||||||
|
|
||||||
#[deny(non_snake_case, non_camel_case_types, non_upper_case_globals)]
|
#[deny(non_snake_case, non_camel_case_types, non_upper_case_globals)]
|
||||||
trait BAD_TRAIT {
|
trait BAD_TRAIT {
|
||||||
// ^^^^^^^^^ 💡 error: Trait `BAD_TRAIT` should have CamelCase name, e.g. `BadTrait`
|
// ^^^^^^^^^ 💡 error: Trait `BAD_TRAIT` should have UpperCamelCase name, e.g. `BadTrait`
|
||||||
const bad_const: u8;
|
const bad_const: u8;
|
||||||
// ^^^^^^^^^ 💡 error: Constant `bad_const` should have UPPER_SNAKE_CASE name, e.g. `BAD_CONST`
|
// ^^^^^^^^^ 💡 error: Constant `bad_const` should have UPPER_SNAKE_CASE name, e.g. `BAD_CONST`
|
||||||
type BAD_TYPE;
|
type BAD_TYPE;
|
||||||
// ^^^^^^^^ 💡 error: Type alias `BAD_TYPE` should have CamelCase name, e.g. `BadType`
|
// ^^^^^^^^ 💡 error: Type alias `BAD_TYPE` should have UpperCamelCase name, e.g. `BadType`
|
||||||
fn BAD_FUNCTION(BAD_PARAM: u8);
|
fn BAD_FUNCTION(BAD_PARAM: u8);
|
||||||
// ^^^^^^^^^^^^ 💡 error: Function `BAD_FUNCTION` should have snake_case name, e.g. `bad_function`
|
// ^^^^^^^^^^^^ 💡 error: Function `BAD_FUNCTION` should have snake_case name, e.g. `bad_function`
|
||||||
// ^^^^^^^^^ 💡 error: Parameter `BAD_PARAM` should have snake_case name, e.g. `bad_param`
|
// ^^^^^^^^^ 💡 error: Parameter `BAD_PARAM` should have snake_case name, e.g. `bad_param`
|
||||||
|
@ -952,7 +952,7 @@ fn foo() {
|
||||||
let FOO;
|
let FOO;
|
||||||
#[allow(non_snake_case)]
|
#[allow(non_snake_case)]
|
||||||
struct qux;
|
struct qux;
|
||||||
// ^^^ 💡 warn: Structure `qux` should have CamelCase name, e.g. `Qux`
|
// ^^^ 💡 warn: Structure `qux` should have UpperCamelCase name, e.g. `Qux`
|
||||||
|
|
||||||
fn BAZ() {
|
fn BAZ() {
|
||||||
// ^^^ 💡 error: Function `BAZ` should have snake_case name, e.g. `baz`
|
// ^^^ 💡 error: Function `BAZ` should have snake_case name, e.g. `baz`
|
||||||
|
|
|
@ -29,7 +29,7 @@ pub(crate) fn goto_declaration(
|
||||||
.find(|it| matches!(it.kind(), IDENT | T![self] | T![super] | T![crate] | T![Self]))?;
|
.find(|it| matches!(it.kind(), IDENT | T![self] | T![super] | T![crate] | T![Self]))?;
|
||||||
let range = original_token.text_range();
|
let range = original_token.text_range();
|
||||||
let info: Vec<NavigationTarget> = sema
|
let info: Vec<NavigationTarget> = sema
|
||||||
.descend_into_macros(original_token)
|
.descend_into_macros_no_opaque(original_token)
|
||||||
.iter()
|
.iter()
|
||||||
.filter_map(|token| {
|
.filter_map(|token| {
|
||||||
let parent = token.parent()?;
|
let parent = token.parent()?;
|
||||||
|
|
|
@ -83,7 +83,7 @@ pub(crate) fn goto_definition(
|
||||||
}
|
}
|
||||||
|
|
||||||
let navs = sema
|
let navs = sema
|
||||||
.descend_into_macros(original_token.clone())
|
.descend_into_macros_no_opaque(original_token.clone())
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter_map(|token| {
|
.filter_map(|token| {
|
||||||
let parent = token.parent()?;
|
let parent = token.parent()?;
|
||||||
|
@ -2660,6 +2660,24 @@ fn foo() {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn label_inside_macro() {
|
||||||
|
check(
|
||||||
|
r#"
|
||||||
|
macro_rules! m {
|
||||||
|
($s:stmt) => { $s };
|
||||||
|
}
|
||||||
|
|
||||||
|
fn foo() {
|
||||||
|
'label: loop {
|
||||||
|
// ^^^^^^
|
||||||
|
m!(continue 'label$0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn goto_def_on_return_in_try() {
|
fn goto_def_on_return_in_try() {
|
||||||
check(
|
check(
|
||||||
|
|
|
@ -69,7 +69,7 @@ pub(crate) fn goto_type_definition(
|
||||||
}
|
}
|
||||||
|
|
||||||
let range = token.text_range();
|
let range = token.text_range();
|
||||||
sema.descend_into_macros(token)
|
sema.descend_into_macros_no_opaque(token)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter_map(|token| {
|
.filter_map(|token| {
|
||||||
let ty = sema
|
let ty = sema
|
||||||
|
|
|
@ -3,9 +3,9 @@ use std::{mem, ops::Not};
|
||||||
|
|
||||||
use either::Either;
|
use either::Either;
|
||||||
use hir::{
|
use hir::{
|
||||||
db::ExpandDatabase, Adt, AsAssocItem, AsExternAssocItem, CaptureKind, HasCrate, HasSource,
|
db::ExpandDatabase, Adt, AsAssocItem, AsExternAssocItem, AssocItemContainer, CaptureKind,
|
||||||
HirDisplay, Layout, LayoutError, MethodViolationCode, Name, ObjectSafetyViolation, Semantics,
|
DynCompatibilityViolation, HasCrate, HasSource, HirDisplay, Layout, LayoutError,
|
||||||
Trait, Type, TypeInfo,
|
MethodViolationCode, Name, Semantics, Trait, Type, TypeInfo,
|
||||||
};
|
};
|
||||||
use ide_db::{
|
use ide_db::{
|
||||||
base_db::SourceDatabase,
|
base_db::SourceDatabase,
|
||||||
|
@ -529,10 +529,10 @@ pub(super) fn definition(
|
||||||
_ => None,
|
_ => None,
|
||||||
};
|
};
|
||||||
|
|
||||||
let object_safety_info = if let Definition::Trait(it) = def {
|
let dyn_compatibility_info = if let Definition::Trait(it) = def {
|
||||||
let mut object_safety_info = String::new();
|
let mut dyn_compatibility_info = String::new();
|
||||||
render_object_safety(db, &mut object_safety_info, it.object_safety(db));
|
render_dyn_compatibility(db, &mut dyn_compatibility_info, it.dyn_compatibility(db));
|
||||||
Some(object_safety_info)
|
Some(dyn_compatibility_info)
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
};
|
};
|
||||||
|
@ -546,8 +546,8 @@ pub(super) fn definition(
|
||||||
desc.push_str(&layout_info);
|
desc.push_str(&layout_info);
|
||||||
desc.push('\n');
|
desc.push('\n');
|
||||||
}
|
}
|
||||||
if let Some(object_safety_info) = object_safety_info {
|
if let Some(dyn_compatibility_info) = dyn_compatibility_info {
|
||||||
desc.push_str(&object_safety_info);
|
desc.push_str(&dyn_compatibility_info);
|
||||||
desc.push('\n');
|
desc.push('\n');
|
||||||
}
|
}
|
||||||
desc.push_str(&label);
|
desc.push_str(&label);
|
||||||
|
@ -813,7 +813,15 @@ fn definition_mod_path(db: &RootDatabase, def: &Definition, edition: Edition) ->
|
||||||
if matches!(def, Definition::GenericParam(_) | Definition::Local(_) | Definition::Label(_)) {
|
if matches!(def, Definition::GenericParam(_) | Definition::Local(_) | Definition::Label(_)) {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
def.module(db).map(|module| path(db, module, definition_owner_name(db, def, edition), edition))
|
let container: Option<Definition> =
|
||||||
|
def.as_assoc_item(db).and_then(|assoc| match assoc.container(db) {
|
||||||
|
AssocItemContainer::Trait(trait_) => Some(trait_.into()),
|
||||||
|
AssocItemContainer::Impl(impl_) => impl_.self_ty(db).as_adt().map(|adt| adt.into()),
|
||||||
|
});
|
||||||
|
container
|
||||||
|
.unwrap_or(*def)
|
||||||
|
.module(db)
|
||||||
|
.map(|module| path(db, module, definition_owner_name(db, def, edition), edition))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn markup(docs: Option<String>, desc: String, mod_path: Option<String>) -> Markup {
|
fn markup(docs: Option<String>, desc: String, mod_path: Option<String>) -> Markup {
|
||||||
|
@ -980,24 +988,24 @@ fn keyword_hints(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn render_object_safety(
|
fn render_dyn_compatibility(
|
||||||
db: &RootDatabase,
|
db: &RootDatabase,
|
||||||
buf: &mut String,
|
buf: &mut String,
|
||||||
safety: Option<ObjectSafetyViolation>,
|
safety: Option<DynCompatibilityViolation>,
|
||||||
) {
|
) {
|
||||||
let Some(osv) = safety else {
|
let Some(osv) = safety else {
|
||||||
buf.push_str("// Object Safety: Yes");
|
buf.push_str("// Dyn Compatible: Yes");
|
||||||
return;
|
return;
|
||||||
};
|
};
|
||||||
buf.push_str("// Object Safety: No\n// - Reason: ");
|
buf.push_str("// Dyn Compatible: No\n// - Reason: ");
|
||||||
match osv {
|
match osv {
|
||||||
ObjectSafetyViolation::SizedSelf => {
|
DynCompatibilityViolation::SizedSelf => {
|
||||||
buf.push_str("has a `Self: Sized` bound");
|
buf.push_str("has a `Self: Sized` bound");
|
||||||
}
|
}
|
||||||
ObjectSafetyViolation::SelfReferential => {
|
DynCompatibilityViolation::SelfReferential => {
|
||||||
buf.push_str("has a bound that references `Self`");
|
buf.push_str("has a bound that references `Self`");
|
||||||
}
|
}
|
||||||
ObjectSafetyViolation::Method(func, mvc) => {
|
DynCompatibilityViolation::Method(func, mvc) => {
|
||||||
let name = hir::Function::from(func).name(db);
|
let name = hir::Function::from(func).name(db);
|
||||||
format_to!(
|
format_to!(
|
||||||
buf,
|
buf,
|
||||||
|
@ -1020,7 +1028,7 @@ fn render_object_safety(
|
||||||
};
|
};
|
||||||
buf.push_str(desc);
|
buf.push_str(desc);
|
||||||
}
|
}
|
||||||
ObjectSafetyViolation::AssocConst(const_) => {
|
DynCompatibilityViolation::AssocConst(const_) => {
|
||||||
let name = hir::Const::from(const_).name(db);
|
let name = hir::Const::from(const_).name(db);
|
||||||
if let Some(name) = name {
|
if let Some(name) = name {
|
||||||
format_to!(buf, "has an associated constant `{}`", name.as_str());
|
format_to!(buf, "has an associated constant `{}`", name.as_str());
|
||||||
|
@ -1028,11 +1036,11 @@ fn render_object_safety(
|
||||||
buf.push_str("has an associated constant");
|
buf.push_str("has an associated constant");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ObjectSafetyViolation::GAT(alias) => {
|
DynCompatibilityViolation::GAT(alias) => {
|
||||||
let name = hir::TypeAlias::from(alias).name(db);
|
let name = hir::TypeAlias::from(alias).name(db);
|
||||||
format_to!(buf, "has a generic associated type `{}`", name.as_str());
|
format_to!(buf, "has a generic associated type `{}`", name.as_str());
|
||||||
}
|
}
|
||||||
ObjectSafetyViolation::HasNonSafeSuperTrait(super_trait) => {
|
DynCompatibilityViolation::HasNonCompatibleSuperTrait(super_trait) => {
|
||||||
let name = hir::Trait::from(super_trait).name(db);
|
let name = hir::Trait::from(super_trait).name(db);
|
||||||
format_to!(buf, "has a object unsafe supertrait `{}`", name.as_str());
|
format_to!(buf, "has a object unsafe supertrait `{}`", name.as_str());
|
||||||
}
|
}
|
||||||
|
|
|
@ -7175,7 +7175,7 @@ impl T$0 for () {}
|
||||||
```
|
```
|
||||||
|
|
||||||
```rust
|
```rust
|
||||||
// Object Safety: Yes
|
// Dyn Compatible: Yes
|
||||||
trait T {}
|
trait T {}
|
||||||
```
|
```
|
||||||
"#]],
|
"#]],
|
||||||
|
@ -7195,7 +7195,7 @@ impl T$0 for () {}
|
||||||
```
|
```
|
||||||
|
|
||||||
```rust
|
```rust
|
||||||
// Object Safety: Yes
|
// Dyn Compatible: Yes
|
||||||
trait T {}
|
trait T {}
|
||||||
```
|
```
|
||||||
"#]],
|
"#]],
|
||||||
|
@ -7219,7 +7219,7 @@ impl T$0 for () {}
|
||||||
```
|
```
|
||||||
|
|
||||||
```rust
|
```rust
|
||||||
// Object Safety: No
|
// Dyn Compatible: No
|
||||||
// - Reason: has a method `func` that is non dispatchable because of:
|
// - Reason: has a method `func` that is non dispatchable because of:
|
||||||
// - missing a receiver
|
// - missing a receiver
|
||||||
trait T { /* … */ }
|
trait T { /* … */ }
|
||||||
|
@ -7245,7 +7245,7 @@ impl T$0 for () {}
|
||||||
```
|
```
|
||||||
|
|
||||||
```rust
|
```rust
|
||||||
// Object Safety: No
|
// Dyn Compatible: No
|
||||||
// - Reason: has a method `func` that is non dispatchable because of:
|
// - Reason: has a method `func` that is non dispatchable because of:
|
||||||
// - missing a receiver
|
// - missing a receiver
|
||||||
trait T {
|
trait T {
|
||||||
|
@ -7275,7 +7275,7 @@ impl T$0 for () {}
|
||||||
```
|
```
|
||||||
|
|
||||||
```rust
|
```rust
|
||||||
// Object Safety: No
|
// Dyn Compatible: No
|
||||||
// - Reason: has a method `func` that is non dispatchable because of:
|
// - Reason: has a method `func` that is non dispatchable because of:
|
||||||
// - missing a receiver
|
// - missing a receiver
|
||||||
trait T {
|
trait T {
|
||||||
|
@ -7305,7 +7305,7 @@ impl T$0 for () {}
|
||||||
```
|
```
|
||||||
|
|
||||||
```rust
|
```rust
|
||||||
// Object Safety: No
|
// Dyn Compatible: No
|
||||||
// - Reason: has a method `func` that is non dispatchable because of:
|
// - Reason: has a method `func` that is non dispatchable because of:
|
||||||
// - missing a receiver
|
// - missing a receiver
|
||||||
trait T {
|
trait T {
|
||||||
|
@ -8962,3 +8962,29 @@ fn test_hover_function_with_pat_param() {
|
||||||
"#]],
|
"#]],
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn hover_path_inside_block_scope() {
|
||||||
|
check(
|
||||||
|
r#"
|
||||||
|
mod m {
|
||||||
|
const _: () = {
|
||||||
|
mod m2 {
|
||||||
|
const C$0: () = ();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect![[r#"
|
||||||
|
*C*
|
||||||
|
|
||||||
|
```rust
|
||||||
|
test::m::m2
|
||||||
|
```
|
||||||
|
|
||||||
|
```rust
|
||||||
|
const C: () = ()
|
||||||
|
```
|
||||||
|
"#]],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
|
@ -119,8 +119,8 @@ pub use ide_assists::{
|
||||||
Assist, AssistConfig, AssistId, AssistKind, AssistResolveStrategy, SingleResolve,
|
Assist, AssistConfig, AssistId, AssistKind, AssistResolveStrategy, SingleResolve,
|
||||||
};
|
};
|
||||||
pub use ide_completion::{
|
pub use ide_completion::{
|
||||||
CallableSnippets, CompletionConfig, CompletionItem, CompletionItemKind, CompletionRelevance,
|
CallableSnippets, CompletionConfig, CompletionFieldsToResolve, CompletionItem,
|
||||||
Snippet, SnippetScope,
|
CompletionItemKind, CompletionRelevance, Snippet, SnippetScope,
|
||||||
};
|
};
|
||||||
pub use ide_db::{
|
pub use ide_db::{
|
||||||
base_db::{Cancelled, CrateGraph, CrateId, FileChange, SourceRoot, SourceRootId},
|
base_db::{Cancelled, CrateGraph, CrateId, FileChange, SourceRoot, SourceRootId},
|
||||||
|
|
|
@ -409,7 +409,8 @@ fn traverse(
|
||||||
let mut r = 0;
|
let mut r = 0;
|
||||||
sema.descend_into_macros_breakable(
|
sema.descend_into_macros_breakable(
|
||||||
InRealFile::new(file_id, token.clone()),
|
InRealFile::new(file_id, token.clone()),
|
||||||
|tok| {
|
|tok, _ctx| {
|
||||||
|
// FIXME: Consider checking ctx transparency for being opaque?
|
||||||
let tok = tok.value;
|
let tok = tok.value;
|
||||||
let tok_kind = tok.kind();
|
let tok_kind = tok.kind();
|
||||||
|
|
||||||
|
|
|
@ -265,6 +265,11 @@ impl ProjectFolders {
|
||||||
entries.push(manifest.to_owned());
|
entries.push(manifest.to_owned());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
for buildfile in ws.buildfiles() {
|
||||||
|
file_set_roots.push(VfsPath::from(buildfile.to_owned()));
|
||||||
|
entries.push(buildfile.to_owned());
|
||||||
|
}
|
||||||
|
|
||||||
// In case of detached files we do **not** look for a rust-analyzer.toml.
|
// In case of detached files we do **not** look for a rust-analyzer.toml.
|
||||||
if !matches!(ws.kind, ProjectWorkspaceKind::DetachedFile { .. }) {
|
if !matches!(ws.kind, ProjectWorkspaceKind::DetachedFile { .. }) {
|
||||||
let ws_root = ws.workspace_root();
|
let ws_root = ws.workspace_root();
|
||||||
|
@ -521,7 +526,7 @@ mod tests {
|
||||||
#[test]
|
#[test]
|
||||||
fn test_loading_rust_analyzer() {
|
fn test_loading_rust_analyzer() {
|
||||||
let path = Path::new(env!("CARGO_MANIFEST_DIR")).parent().unwrap().parent().unwrap();
|
let path = Path::new(env!("CARGO_MANIFEST_DIR")).parent().unwrap().parent().unwrap();
|
||||||
let cargo_config = CargoConfig::default();
|
let cargo_config = CargoConfig { set_test: true, ..CargoConfig::default() };
|
||||||
let load_cargo_config = LoadCargoConfig {
|
let load_cargo_config = LoadCargoConfig {
|
||||||
load_out_dirs_from_check: false,
|
load_out_dirs_from_check: false,
|
||||||
with_proc_macro_server: ProcMacroServerChoice::None,
|
with_proc_macro_server: ProcMacroServerChoice::None,
|
||||||
|
|
|
@ -100,6 +100,7 @@ pub struct CargoConfig {
|
||||||
pub invocation_strategy: InvocationStrategy,
|
pub invocation_strategy: InvocationStrategy,
|
||||||
/// Optional path to use instead of `target` when building
|
/// Optional path to use instead of `target` when building
|
||||||
pub target_dir: Option<Utf8PathBuf>,
|
pub target_dir: Option<Utf8PathBuf>,
|
||||||
|
pub set_test: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub type Package = Idx<PackageData>;
|
pub type Package = Idx<PackageData>;
|
||||||
|
|
|
@ -35,6 +35,7 @@ fn load_cargo_with_overrides(
|
||||||
rustc: Err(None),
|
rustc: Err(None),
|
||||||
cargo_config_extra_env: Default::default(),
|
cargo_config_extra_env: Default::default(),
|
||||||
error: None,
|
error: None,
|
||||||
|
set_test: true,
|
||||||
},
|
},
|
||||||
cfg_overrides,
|
cfg_overrides,
|
||||||
sysroot: Sysroot::empty(),
|
sysroot: Sysroot::empty(),
|
||||||
|
@ -242,6 +243,7 @@ fn smoke_test_real_sysroot_cargo() {
|
||||||
rustc: Err(None),
|
rustc: Err(None),
|
||||||
cargo_config_extra_env: Default::default(),
|
cargo_config_extra_env: Default::default(),
|
||||||
error: None,
|
error: None,
|
||||||
|
set_test: true,
|
||||||
},
|
},
|
||||||
sysroot,
|
sysroot,
|
||||||
rustc_cfg: Vec::new(),
|
rustc_cfg: Vec::new(),
|
||||||
|
|
|
@ -78,6 +78,7 @@ pub enum ProjectWorkspaceKind {
|
||||||
rustc: Result<Box<(CargoWorkspace, WorkspaceBuildScripts)>, Option<String>>,
|
rustc: Result<Box<(CargoWorkspace, WorkspaceBuildScripts)>, Option<String>>,
|
||||||
/// Environment variables set in the `.cargo/config` file.
|
/// Environment variables set in the `.cargo/config` file.
|
||||||
cargo_config_extra_env: FxHashMap<String, String>,
|
cargo_config_extra_env: FxHashMap<String, String>,
|
||||||
|
set_test: bool,
|
||||||
},
|
},
|
||||||
/// Project workspace was specified using a `rust-project.json` file.
|
/// Project workspace was specified using a `rust-project.json` file.
|
||||||
Json(ProjectJson),
|
Json(ProjectJson),
|
||||||
|
@ -98,6 +99,7 @@ pub enum ProjectWorkspaceKind {
|
||||||
cargo: Option<(CargoWorkspace, WorkspaceBuildScripts, Option<Arc<anyhow::Error>>)>,
|
cargo: Option<(CargoWorkspace, WorkspaceBuildScripts, Option<Arc<anyhow::Error>>)>,
|
||||||
/// Environment variables set in the `.cargo/config` file.
|
/// Environment variables set in the `.cargo/config` file.
|
||||||
cargo_config_extra_env: FxHashMap<String, String>,
|
cargo_config_extra_env: FxHashMap<String, String>,
|
||||||
|
set_test: bool,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -112,6 +114,7 @@ impl fmt::Debug for ProjectWorkspace {
|
||||||
build_scripts,
|
build_scripts,
|
||||||
rustc,
|
rustc,
|
||||||
cargo_config_extra_env,
|
cargo_config_extra_env,
|
||||||
|
set_test,
|
||||||
} => f
|
} => f
|
||||||
.debug_struct("Cargo")
|
.debug_struct("Cargo")
|
||||||
.field("root", &cargo.workspace_root().file_name())
|
.field("root", &cargo.workspace_root().file_name())
|
||||||
|
@ -126,6 +129,7 @@ impl fmt::Debug for ProjectWorkspace {
|
||||||
.field("toolchain", &toolchain)
|
.field("toolchain", &toolchain)
|
||||||
.field("data_layout", &target_layout)
|
.field("data_layout", &target_layout)
|
||||||
.field("cargo_config_extra_env", &cargo_config_extra_env)
|
.field("cargo_config_extra_env", &cargo_config_extra_env)
|
||||||
|
.field("set_test", set_test)
|
||||||
.field("build_scripts", &build_scripts.error().unwrap_or("ok"))
|
.field("build_scripts", &build_scripts.error().unwrap_or("ok"))
|
||||||
.finish(),
|
.finish(),
|
||||||
ProjectWorkspaceKind::Json(project) => {
|
ProjectWorkspaceKind::Json(project) => {
|
||||||
|
@ -137,12 +141,14 @@ impl fmt::Debug for ProjectWorkspace {
|
||||||
.field("toolchain", &toolchain)
|
.field("toolchain", &toolchain)
|
||||||
.field("data_layout", &target_layout)
|
.field("data_layout", &target_layout)
|
||||||
.field("n_cfg_overrides", &cfg_overrides.len());
|
.field("n_cfg_overrides", &cfg_overrides.len());
|
||||||
|
|
||||||
debug_struct.finish()
|
debug_struct.finish()
|
||||||
}
|
}
|
||||||
ProjectWorkspaceKind::DetachedFile {
|
ProjectWorkspaceKind::DetachedFile {
|
||||||
file,
|
file,
|
||||||
cargo: cargo_script,
|
cargo: cargo_script,
|
||||||
cargo_config_extra_env,
|
cargo_config_extra_env,
|
||||||
|
set_test,
|
||||||
} => f
|
} => f
|
||||||
.debug_struct("DetachedFiles")
|
.debug_struct("DetachedFiles")
|
||||||
.field("file", &file)
|
.field("file", &file)
|
||||||
|
@ -154,6 +160,7 @@ impl fmt::Debug for ProjectWorkspace {
|
||||||
.field("data_layout", &target_layout)
|
.field("data_layout", &target_layout)
|
||||||
.field("n_cfg_overrides", &cfg_overrides.len())
|
.field("n_cfg_overrides", &cfg_overrides.len())
|
||||||
.field("cargo_config_extra_env", &cargo_config_extra_env)
|
.field("cargo_config_extra_env", &cargo_config_extra_env)
|
||||||
|
.field("set_test", set_test)
|
||||||
.finish(),
|
.finish(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -329,6 +336,7 @@ impl ProjectWorkspace {
|
||||||
rustc,
|
rustc,
|
||||||
cargo_config_extra_env,
|
cargo_config_extra_env,
|
||||||
error: error.map(Arc::new),
|
error: error.map(Arc::new),
|
||||||
|
set_test: config.set_test,
|
||||||
},
|
},
|
||||||
sysroot,
|
sysroot,
|
||||||
rustc_cfg,
|
rustc_cfg,
|
||||||
|
@ -423,6 +431,7 @@ impl ProjectWorkspace {
|
||||||
file: detached_file.to_owned(),
|
file: detached_file.to_owned(),
|
||||||
cargo: cargo_script,
|
cargo: cargo_script,
|
||||||
cargo_config_extra_env,
|
cargo_config_extra_env,
|
||||||
|
set_test: config.set_test,
|
||||||
},
|
},
|
||||||
sysroot,
|
sysroot,
|
||||||
rustc_cfg,
|
rustc_cfg,
|
||||||
|
@ -539,6 +548,17 @@ impl ProjectWorkspace {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn buildfiles(&self) -> Vec<AbsPathBuf> {
|
||||||
|
match &self.kind {
|
||||||
|
ProjectWorkspaceKind::Json(project) => project
|
||||||
|
.crates()
|
||||||
|
.filter_map(|(_, krate)| krate.build.as_ref().map(|build| build.build_file.clone()))
|
||||||
|
.map(AbsPathBuf::assert)
|
||||||
|
.collect(),
|
||||||
|
_ => vec![],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn find_sysroot_proc_macro_srv(&self) -> anyhow::Result<AbsPathBuf> {
|
pub fn find_sysroot_proc_macro_srv(&self) -> anyhow::Result<AbsPathBuf> {
|
||||||
self.sysroot.discover_proc_macro_srv()
|
self.sysroot.discover_proc_macro_srv()
|
||||||
}
|
}
|
||||||
|
@ -598,6 +618,7 @@ impl ProjectWorkspace {
|
||||||
build_scripts,
|
build_scripts,
|
||||||
cargo_config_extra_env: _,
|
cargo_config_extra_env: _,
|
||||||
error: _,
|
error: _,
|
||||||
|
set_test: _,
|
||||||
} => {
|
} => {
|
||||||
cargo
|
cargo
|
||||||
.packages()
|
.packages()
|
||||||
|
@ -739,6 +760,7 @@ impl ProjectWorkspace {
|
||||||
build_scripts,
|
build_scripts,
|
||||||
cargo_config_extra_env: _,
|
cargo_config_extra_env: _,
|
||||||
error: _,
|
error: _,
|
||||||
|
set_test,
|
||||||
} => (
|
} => (
|
||||||
cargo_to_crate_graph(
|
cargo_to_crate_graph(
|
||||||
load,
|
load,
|
||||||
|
@ -748,10 +770,11 @@ impl ProjectWorkspace {
|
||||||
rustc_cfg.clone(),
|
rustc_cfg.clone(),
|
||||||
cfg_overrides,
|
cfg_overrides,
|
||||||
build_scripts,
|
build_scripts,
|
||||||
|
*set_test,
|
||||||
),
|
),
|
||||||
sysroot,
|
sysroot,
|
||||||
),
|
),
|
||||||
ProjectWorkspaceKind::DetachedFile { file, cargo: cargo_script, .. } => (
|
ProjectWorkspaceKind::DetachedFile { file, cargo: cargo_script, set_test, .. } => (
|
||||||
if let Some((cargo, build_scripts, _)) = cargo_script {
|
if let Some((cargo, build_scripts, _)) = cargo_script {
|
||||||
cargo_to_crate_graph(
|
cargo_to_crate_graph(
|
||||||
&mut |path| load(path),
|
&mut |path| load(path),
|
||||||
|
@ -761,6 +784,7 @@ impl ProjectWorkspace {
|
||||||
rustc_cfg.clone(),
|
rustc_cfg.clone(),
|
||||||
cfg_overrides,
|
cfg_overrides,
|
||||||
build_scripts,
|
build_scripts,
|
||||||
|
*set_test,
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
detached_file_to_crate_graph(
|
detached_file_to_crate_graph(
|
||||||
|
@ -769,6 +793,7 @@ impl ProjectWorkspace {
|
||||||
file,
|
file,
|
||||||
sysroot,
|
sysroot,
|
||||||
cfg_overrides,
|
cfg_overrides,
|
||||||
|
*set_test,
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
sysroot,
|
sysroot,
|
||||||
|
@ -802,6 +827,7 @@ impl ProjectWorkspace {
|
||||||
cargo_config_extra_env,
|
cargo_config_extra_env,
|
||||||
build_scripts: _,
|
build_scripts: _,
|
||||||
error: _,
|
error: _,
|
||||||
|
set_test: _,
|
||||||
},
|
},
|
||||||
ProjectWorkspaceKind::Cargo {
|
ProjectWorkspaceKind::Cargo {
|
||||||
cargo: o_cargo,
|
cargo: o_cargo,
|
||||||
|
@ -809,6 +835,7 @@ impl ProjectWorkspace {
|
||||||
cargo_config_extra_env: o_cargo_config_extra_env,
|
cargo_config_extra_env: o_cargo_config_extra_env,
|
||||||
build_scripts: _,
|
build_scripts: _,
|
||||||
error: _,
|
error: _,
|
||||||
|
set_test: _,
|
||||||
},
|
},
|
||||||
) => {
|
) => {
|
||||||
cargo == o_cargo
|
cargo == o_cargo
|
||||||
|
@ -823,11 +850,13 @@ impl ProjectWorkspace {
|
||||||
file,
|
file,
|
||||||
cargo: Some((cargo_script, _, _)),
|
cargo: Some((cargo_script, _, _)),
|
||||||
cargo_config_extra_env,
|
cargo_config_extra_env,
|
||||||
|
set_test: _,
|
||||||
},
|
},
|
||||||
ProjectWorkspaceKind::DetachedFile {
|
ProjectWorkspaceKind::DetachedFile {
|
||||||
file: o_file,
|
file: o_file,
|
||||||
cargo: Some((o_cargo_script, _, _)),
|
cargo: Some((o_cargo_script, _, _)),
|
||||||
cargo_config_extra_env: o_cargo_config_extra_env,
|
cargo_config_extra_env: o_cargo_config_extra_env,
|
||||||
|
set_test: _,
|
||||||
},
|
},
|
||||||
) => {
|
) => {
|
||||||
file == o_file
|
file == o_file
|
||||||
|
@ -976,6 +1005,7 @@ fn cargo_to_crate_graph(
|
||||||
rustc_cfg: Vec<CfgAtom>,
|
rustc_cfg: Vec<CfgAtom>,
|
||||||
override_cfg: &CfgOverrides,
|
override_cfg: &CfgOverrides,
|
||||||
build_scripts: &WorkspaceBuildScripts,
|
build_scripts: &WorkspaceBuildScripts,
|
||||||
|
set_test: bool,
|
||||||
) -> (CrateGraph, ProcMacroPaths) {
|
) -> (CrateGraph, ProcMacroPaths) {
|
||||||
let _p = tracing::info_span!("cargo_to_crate_graph").entered();
|
let _p = tracing::info_span!("cargo_to_crate_graph").entered();
|
||||||
let mut res = (CrateGraph::default(), ProcMacroPaths::default());
|
let mut res = (CrateGraph::default(), ProcMacroPaths::default());
|
||||||
|
@ -1000,8 +1030,10 @@ fn cargo_to_crate_graph(
|
||||||
let mut cfg_options = cfg_options.clone();
|
let mut cfg_options = cfg_options.clone();
|
||||||
|
|
||||||
if cargo[pkg].is_local {
|
if cargo[pkg].is_local {
|
||||||
// Add test cfg for local crates
|
if set_test {
|
||||||
cfg_options.insert_atom(sym::test.clone());
|
// Add test cfg for local crates
|
||||||
|
cfg_options.insert_atom(sym::test.clone());
|
||||||
|
}
|
||||||
cfg_options.insert_atom(sym::rust_analyzer.clone());
|
cfg_options.insert_atom(sym::rust_analyzer.clone());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1162,6 +1194,7 @@ fn detached_file_to_crate_graph(
|
||||||
detached_file: &ManifestPath,
|
detached_file: &ManifestPath,
|
||||||
sysroot: &Sysroot,
|
sysroot: &Sysroot,
|
||||||
override_cfg: &CfgOverrides,
|
override_cfg: &CfgOverrides,
|
||||||
|
set_test: bool,
|
||||||
) -> (CrateGraph, ProcMacroPaths) {
|
) -> (CrateGraph, ProcMacroPaths) {
|
||||||
let _p = tracing::info_span!("detached_file_to_crate_graph").entered();
|
let _p = tracing::info_span!("detached_file_to_crate_graph").entered();
|
||||||
let mut crate_graph = CrateGraph::default();
|
let mut crate_graph = CrateGraph::default();
|
||||||
|
@ -1169,7 +1202,9 @@ fn detached_file_to_crate_graph(
|
||||||
sysroot_to_crate_graph(&mut crate_graph, sysroot, rustc_cfg.clone(), load);
|
sysroot_to_crate_graph(&mut crate_graph, sysroot, rustc_cfg.clone(), load);
|
||||||
|
|
||||||
let mut cfg_options = CfgOptions::from_iter(rustc_cfg);
|
let mut cfg_options = CfgOptions::from_iter(rustc_cfg);
|
||||||
cfg_options.insert_atom(sym::test.clone());
|
if set_test {
|
||||||
|
cfg_options.insert_atom(sym::test.clone());
|
||||||
|
}
|
||||||
cfg_options.insert_atom(sym::rust_analyzer.clone());
|
cfg_options.insert_atom(sym::rust_analyzer.clone());
|
||||||
override_cfg.apply(&mut cfg_options, "");
|
override_cfg.apply(&mut cfg_options, "");
|
||||||
let cfg_options = Arc::new(cfg_options);
|
let cfg_options = Arc::new(cfg_options);
|
||||||
|
@ -1415,6 +1450,7 @@ fn sysroot_to_crate_graph(
|
||||||
..Default::default()
|
..Default::default()
|
||||||
},
|
},
|
||||||
&WorkspaceBuildScripts::default(),
|
&WorkspaceBuildScripts::default(),
|
||||||
|
false,
|
||||||
);
|
);
|
||||||
|
|
||||||
let mut pub_deps = vec![];
|
let mut pub_deps = vec![];
|
||||||
|
|
|
@ -137,6 +137,7 @@ fn setup_logging(log_file_flag: Option<PathBuf>) -> anyhow::Result<()> {
|
||||||
filter: env::var("RA_LOG").ok().unwrap_or_else(|| "error".to_owned()),
|
filter: env::var("RA_LOG").ok().unwrap_or_else(|| "error".to_owned()),
|
||||||
chalk_filter: env::var("CHALK_DEBUG").ok(),
|
chalk_filter: env::var("CHALK_DEBUG").ok(),
|
||||||
profile_filter: env::var("RA_PROFILE").ok(),
|
profile_filter: env::var("RA_PROFILE").ok(),
|
||||||
|
json_profile_filter: std::env::var("RA_PROFILE_JSON").ok(),
|
||||||
}
|
}
|
||||||
.init()?;
|
.init()?;
|
||||||
|
|
||||||
|
|
|
@ -65,6 +65,7 @@ impl flags::AnalysisStats {
|
||||||
false => Some(RustLibSource::Discover),
|
false => Some(RustLibSource::Discover),
|
||||||
},
|
},
|
||||||
all_targets: true,
|
all_targets: true,
|
||||||
|
set_test: true,
|
||||||
..Default::default()
|
..Default::default()
|
||||||
};
|
};
|
||||||
let no_progress = &|_| ();
|
let no_progress = &|_| ();
|
||||||
|
@ -81,7 +82,13 @@ impl flags::AnalysisStats {
|
||||||
with_proc_macro_server: if self.disable_proc_macros {
|
with_proc_macro_server: if self.disable_proc_macros {
|
||||||
ProcMacroServerChoice::None
|
ProcMacroServerChoice::None
|
||||||
} else {
|
} else {
|
||||||
ProcMacroServerChoice::Sysroot
|
match self.proc_macro_srv {
|
||||||
|
Some(ref path) => {
|
||||||
|
let path = vfs::AbsPathBuf::assert_utf8(path.to_owned());
|
||||||
|
ProcMacroServerChoice::Explicit(path)
|
||||||
|
}
|
||||||
|
None => ProcMacroServerChoice::Sysroot,
|
||||||
|
}
|
||||||
},
|
},
|
||||||
prefill_caches: false,
|
prefill_caches: false,
|
||||||
};
|
};
|
||||||
|
|
|
@ -76,6 +76,8 @@ xflags::xflags! {
|
||||||
optional --disable-build-scripts
|
optional --disable-build-scripts
|
||||||
/// Don't use expand proc macros.
|
/// Don't use expand proc macros.
|
||||||
optional --disable-proc-macros
|
optional --disable-proc-macros
|
||||||
|
/// Run the proc-macro-srv binary at the specified path.
|
||||||
|
optional --proc-macro-srv path: PathBuf
|
||||||
/// Skip body lowering.
|
/// Skip body lowering.
|
||||||
optional --skip-lowering
|
optional --skip-lowering
|
||||||
/// Skip type inference.
|
/// Skip type inference.
|
||||||
|
@ -120,7 +122,7 @@ xflags::xflags! {
|
||||||
optional --disable-build-scripts
|
optional --disable-build-scripts
|
||||||
/// Don't use expand proc macros.
|
/// Don't use expand proc macros.
|
||||||
optional --disable-proc-macros
|
optional --disable-proc-macros
|
||||||
/// Run a custom proc-macro-srv binary.
|
/// Run the proc-macro-srv binary at the specified path.
|
||||||
optional --proc-macro-srv path: PathBuf
|
optional --proc-macro-srv path: PathBuf
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -133,7 +135,7 @@ xflags::xflags! {
|
||||||
optional --disable-build-scripts
|
optional --disable-build-scripts
|
||||||
/// Don't use expand proc macros.
|
/// Don't use expand proc macros.
|
||||||
optional --disable-proc-macros
|
optional --disable-proc-macros
|
||||||
/// Run a custom proc-macro-srv binary.
|
/// Run the proc-macro-srv binary at the specified path.
|
||||||
optional --proc-macro-srv path: PathBuf
|
optional --proc-macro-srv path: PathBuf
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -233,6 +235,7 @@ pub struct AnalysisStats {
|
||||||
pub no_sysroot: bool,
|
pub no_sysroot: bool,
|
||||||
pub disable_build_scripts: bool,
|
pub disable_build_scripts: bool,
|
||||||
pub disable_proc_macros: bool,
|
pub disable_proc_macros: bool,
|
||||||
|
pub proc_macro_srv: Option<PathBuf>,
|
||||||
pub skip_lowering: bool,
|
pub skip_lowering: bool,
|
||||||
pub skip_inference: bool,
|
pub skip_inference: bool,
|
||||||
pub skip_mir_stats: bool,
|
pub skip_mir_stats: bool,
|
||||||
|
|
|
@ -277,6 +277,7 @@ impl flags::Lsif {
|
||||||
let cargo_config = &CargoConfig {
|
let cargo_config = &CargoConfig {
|
||||||
sysroot: Some(RustLibSource::Discover),
|
sysroot: Some(RustLibSource::Discover),
|
||||||
all_targets: true,
|
all_targets: true,
|
||||||
|
set_test: true,
|
||||||
..Default::default()
|
..Default::default()
|
||||||
};
|
};
|
||||||
let no_progress = &|_| ();
|
let no_progress = &|_| ();
|
||||||
|
|
|
@ -16,6 +16,7 @@ impl flags::RunTests {
|
||||||
let cargo_config = CargoConfig {
|
let cargo_config = CargoConfig {
|
||||||
sysroot: Some(RustLibSource::Discover),
|
sysroot: Some(RustLibSource::Discover),
|
||||||
all_targets: true,
|
all_targets: true,
|
||||||
|
set_test: true,
|
||||||
..Default::default()
|
..Default::default()
|
||||||
};
|
};
|
||||||
let load_cargo_config = LoadCargoConfig {
|
let load_cargo_config = LoadCargoConfig {
|
||||||
|
|
|
@ -70,6 +70,7 @@ impl Tester {
|
||||||
let cargo_config = CargoConfig {
|
let cargo_config = CargoConfig {
|
||||||
sysroot: Some(RustLibSource::Discover),
|
sysroot: Some(RustLibSource::Discover),
|
||||||
all_targets: true,
|
all_targets: true,
|
||||||
|
set_test: true,
|
||||||
..Default::default()
|
..Default::default()
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -85,6 +86,7 @@ impl Tester {
|
||||||
file: ManifestPath::try_from(tmp_file).unwrap(),
|
file: ManifestPath::try_from(tmp_file).unwrap(),
|
||||||
cargo: None,
|
cargo: None,
|
||||||
cargo_config_extra_env: Default::default(),
|
cargo_config_extra_env: Default::default(),
|
||||||
|
set_test: true,
|
||||||
},
|
},
|
||||||
sysroot,
|
sysroot,
|
||||||
rustc_cfg: vec![],
|
rustc_cfg: vec![],
|
||||||
|
|
|
@ -24,11 +24,6 @@ impl flags::Scip {
|
||||||
let now = Instant::now();
|
let now = Instant::now();
|
||||||
|
|
||||||
let no_progress = &|s| (eprintln!("rust-analyzer: Loading {s}"));
|
let no_progress = &|s| (eprintln!("rust-analyzer: Loading {s}"));
|
||||||
let load_cargo_config = LoadCargoConfig {
|
|
||||||
load_out_dirs_from_check: true,
|
|
||||||
with_proc_macro_server: ProcMacroServerChoice::Sysroot,
|
|
||||||
prefill_caches: true,
|
|
||||||
};
|
|
||||||
let root =
|
let root =
|
||||||
vfs::AbsPathBuf::assert_utf8(std::env::current_dir()?.join(&self.path)).normalize();
|
vfs::AbsPathBuf::assert_utf8(std::env::current_dir()?.join(&self.path)).normalize();
|
||||||
|
|
||||||
|
@ -51,6 +46,11 @@ impl flags::Scip {
|
||||||
// FIXME @alibektas : What happens to errors without logging?
|
// FIXME @alibektas : What happens to errors without logging?
|
||||||
error!(?error_sink, "Config Error(s)");
|
error!(?error_sink, "Config Error(s)");
|
||||||
}
|
}
|
||||||
|
let load_cargo_config = LoadCargoConfig {
|
||||||
|
load_out_dirs_from_check: true,
|
||||||
|
with_proc_macro_server: ProcMacroServerChoice::Sysroot,
|
||||||
|
prefill_caches: true,
|
||||||
|
};
|
||||||
let cargo_config = config.cargo(None);
|
let cargo_config = config.cargo(None);
|
||||||
let (db, vfs, _) = load_workspace_at(
|
let (db, vfs, _) = load_workspace_at(
|
||||||
root.as_path().as_ref(),
|
root.as_path().as_ref(),
|
||||||
|
|
|
@ -13,6 +13,7 @@ impl flags::Ssr {
|
||||||
let cargo_config = CargoConfig {
|
let cargo_config = CargoConfig {
|
||||||
sysroot: Some(RustLibSource::Discover),
|
sysroot: Some(RustLibSource::Discover),
|
||||||
all_targets: true,
|
all_targets: true,
|
||||||
|
set_test: true,
|
||||||
..Default::default()
|
..Default::default()
|
||||||
};
|
};
|
||||||
let load_cargo_config = LoadCargoConfig {
|
let load_cargo_config = LoadCargoConfig {
|
||||||
|
@ -50,7 +51,8 @@ impl flags::Search {
|
||||||
pub fn run(self) -> anyhow::Result<()> {
|
pub fn run(self) -> anyhow::Result<()> {
|
||||||
use ide_db::base_db::SourceRootDatabase;
|
use ide_db::base_db::SourceRootDatabase;
|
||||||
use ide_db::symbol_index::SymbolsDatabase;
|
use ide_db::symbol_index::SymbolsDatabase;
|
||||||
let cargo_config = CargoConfig::default();
|
let cargo_config =
|
||||||
|
CargoConfig { all_targets: true, set_test: true, ..CargoConfig::default() };
|
||||||
let load_cargo_config = LoadCargoConfig {
|
let load_cargo_config = LoadCargoConfig {
|
||||||
load_out_dirs_from_check: true,
|
load_out_dirs_from_check: true,
|
||||||
with_proc_macro_server: ProcMacroServerChoice::Sysroot,
|
with_proc_macro_server: ProcMacroServerChoice::Sysroot,
|
||||||
|
|
|
@ -12,10 +12,10 @@ use std::{
|
||||||
use cfg::{CfgAtom, CfgDiff};
|
use cfg::{CfgAtom, CfgDiff};
|
||||||
use hir::Symbol;
|
use hir::Symbol;
|
||||||
use ide::{
|
use ide::{
|
||||||
AssistConfig, CallableSnippets, CompletionConfig, DiagnosticsConfig, ExprFillDefaultMode,
|
AssistConfig, CallableSnippets, CompletionConfig, CompletionFieldsToResolve, DiagnosticsConfig,
|
||||||
GenericParameterHints, HighlightConfig, HighlightRelatedConfig, HoverConfig, HoverDocFormat,
|
ExprFillDefaultMode, GenericParameterHints, HighlightConfig, HighlightRelatedConfig,
|
||||||
InlayFieldsToResolve, InlayHintsConfig, JoinLinesConfig, MemoryLayoutHoverConfig,
|
HoverConfig, HoverDocFormat, InlayFieldsToResolve, InlayHintsConfig, JoinLinesConfig,
|
||||||
MemoryLayoutHoverRenderKind, Snippet, SnippetScope, SourceRootId,
|
MemoryLayoutHoverConfig, MemoryLayoutHoverRenderKind, Snippet, SnippetScope, SourceRootId,
|
||||||
};
|
};
|
||||||
use ide_db::{
|
use ide_db::{
|
||||||
imports::insert_use::{ImportGranularity, InsertUseConfig, PrefixKind},
|
imports::insert_use::{ImportGranularity, InsertUseConfig, PrefixKind},
|
||||||
|
@ -574,6 +574,9 @@ config_data! {
|
||||||
/// set to a path relative to the workspace to use that path.
|
/// set to a path relative to the workspace to use that path.
|
||||||
cargo_targetDir | rust_analyzerTargetDir: Option<TargetDirectory> = None,
|
cargo_targetDir | rust_analyzerTargetDir: Option<TargetDirectory> = None,
|
||||||
|
|
||||||
|
/// Set `cfg(test)` for local crates. Defaults to true.
|
||||||
|
cfg_setTest: bool = true,
|
||||||
|
|
||||||
/// Run the check command for diagnostics on save.
|
/// Run the check command for diagnostics on save.
|
||||||
checkOnSave | checkOnSave_enable: bool = true,
|
checkOnSave | checkOnSave_enable: bool = true,
|
||||||
|
|
||||||
|
@ -695,7 +698,6 @@ config_data! {
|
||||||
workspace_symbol_search_limit: usize = 128,
|
workspace_symbol_search_limit: usize = 128,
|
||||||
/// Workspace symbol search scope.
|
/// Workspace symbol search scope.
|
||||||
workspace_symbol_search_scope: WorkspaceSymbolSearchScopeDef = WorkspaceSymbolSearchScopeDef::Workspace,
|
workspace_symbol_search_scope: WorkspaceSymbolSearchScopeDef = WorkspaceSymbolSearchScopeDef::Workspace,
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1391,6 +1393,7 @@ impl Config {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn completion(&self, source_root: Option<SourceRootId>) -> CompletionConfig {
|
pub fn completion(&self, source_root: Option<SourceRootId>) -> CompletionConfig {
|
||||||
|
let client_capability_fields = self.completion_resolve_support_properties();
|
||||||
CompletionConfig {
|
CompletionConfig {
|
||||||
enable_postfix_completions: self.completion_postfix_enable(source_root).to_owned(),
|
enable_postfix_completions: self.completion_postfix_enable(source_root).to_owned(),
|
||||||
enable_imports_on_the_fly: self.completion_autoimport_enable(source_root).to_owned()
|
enable_imports_on_the_fly: self.completion_autoimport_enable(source_root).to_owned()
|
||||||
|
@ -1415,6 +1418,15 @@ impl Config {
|
||||||
limit: self.completion_limit(source_root).to_owned(),
|
limit: self.completion_limit(source_root).to_owned(),
|
||||||
enable_term_search: self.completion_termSearch_enable(source_root).to_owned(),
|
enable_term_search: self.completion_termSearch_enable(source_root).to_owned(),
|
||||||
term_search_fuel: self.completion_termSearch_fuel(source_root).to_owned() as u64,
|
term_search_fuel: self.completion_termSearch_fuel(source_root).to_owned() as u64,
|
||||||
|
fields_to_resolve: CompletionFieldsToResolve {
|
||||||
|
resolve_label_details: client_capability_fields.contains("labelDetails"),
|
||||||
|
resolve_tags: client_capability_fields.contains("tags"),
|
||||||
|
resolve_detail: client_capability_fields.contains("detail"),
|
||||||
|
resolve_documentation: client_capability_fields.contains("documentation"),
|
||||||
|
resolve_filter_text: client_capability_fields.contains("filterText"),
|
||||||
|
resolve_text_edit: client_capability_fields.contains("textEdit"),
|
||||||
|
resolve_command: client_capability_fields.contains("command"),
|
||||||
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1859,9 +1871,14 @@ impl Config {
|
||||||
extra_args: self.cargo_extraArgs(source_root).clone(),
|
extra_args: self.cargo_extraArgs(source_root).clone(),
|
||||||
extra_env: self.cargo_extraEnv(source_root).clone(),
|
extra_env: self.cargo_extraEnv(source_root).clone(),
|
||||||
target_dir: self.target_dir_from_config(source_root),
|
target_dir: self.target_dir_from_config(source_root),
|
||||||
|
set_test: *self.cfg_setTest(source_root),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn cfg_set_test(&self, source_root: Option<SourceRootId>) -> bool {
|
||||||
|
*self.cfg_setTest(source_root)
|
||||||
|
}
|
||||||
|
|
||||||
pub(crate) fn completion_snippets_default() -> FxHashMap<String, SnippetDef> {
|
pub(crate) fn completion_snippets_default() -> FxHashMap<String, SnippetDef> {
|
||||||
serde_json::from_str(
|
serde_json::from_str(
|
||||||
r#"{
|
r#"{
|
||||||
|
|
|
@ -7,6 +7,7 @@ use paths::{AbsPathBuf, Utf8Path, Utf8PathBuf};
|
||||||
use project_model::ProjectJsonData;
|
use project_model::ProjectJsonData;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
use tracing::{info_span, span::EnteredSpan};
|
||||||
|
|
||||||
use crate::command::{CommandHandle, ParseFromLine};
|
use crate::command::{CommandHandle, ParseFromLine};
|
||||||
|
|
||||||
|
@ -60,7 +61,10 @@ impl DiscoverCommand {
|
||||||
let mut cmd = Command::new(command);
|
let mut cmd = Command::new(command);
|
||||||
cmd.args(args);
|
cmd.args(args);
|
||||||
|
|
||||||
Ok(DiscoverHandle { _handle: CommandHandle::spawn(cmd, self.sender.clone())? })
|
Ok(DiscoverHandle {
|
||||||
|
_handle: CommandHandle::spawn(cmd, self.sender.clone())?,
|
||||||
|
span: info_span!("discover_command").entered(),
|
||||||
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -68,6 +72,8 @@ impl DiscoverCommand {
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub(crate) struct DiscoverHandle {
|
pub(crate) struct DiscoverHandle {
|
||||||
_handle: CommandHandle<DiscoverProjectMessage>,
|
_handle: CommandHandle<DiscoverProjectMessage>,
|
||||||
|
#[allow(dead_code)] // not accessed, but used to log on drop.
|
||||||
|
span: EnteredSpan,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// An enum containing either progress messages, an error,
|
/// An enum containing either progress messages, an error,
|
||||||
|
|
|
@ -46,6 +46,11 @@ pub(crate) struct FetchWorkspaceRequest {
|
||||||
pub(crate) force_crate_graph_reload: bool,
|
pub(crate) force_crate_graph_reload: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) struct FetchWorkspaceResponse {
|
||||||
|
pub(crate) workspaces: Vec<anyhow::Result<ProjectWorkspace>>,
|
||||||
|
pub(crate) force_crate_graph_reload: bool,
|
||||||
|
}
|
||||||
|
|
||||||
// Enforces drop order
|
// Enforces drop order
|
||||||
pub(crate) struct Handle<H, C> {
|
pub(crate) struct Handle<H, C> {
|
||||||
pub(crate) handle: H,
|
pub(crate) handle: H,
|
||||||
|
@ -111,6 +116,9 @@ pub(crate) struct GlobalState {
|
||||||
pub(crate) vfs_config_version: u32,
|
pub(crate) vfs_config_version: u32,
|
||||||
pub(crate) vfs_progress_config_version: u32,
|
pub(crate) vfs_progress_config_version: u32,
|
||||||
pub(crate) vfs_done: bool,
|
pub(crate) vfs_done: bool,
|
||||||
|
// used to track how long VFS loading takes. this can't be on `vfs::loader::Handle`,
|
||||||
|
// as that handle's lifetime is the same as `GlobalState` itself.
|
||||||
|
pub(crate) vfs_span: Option<tracing::span::EnteredSpan>,
|
||||||
pub(crate) wants_to_switch: Option<Cause>,
|
pub(crate) wants_to_switch: Option<Cause>,
|
||||||
|
|
||||||
/// `workspaces` field stores the data we actually use, while the `OpQueue`
|
/// `workspaces` field stores the data we actually use, while the `OpQueue`
|
||||||
|
@ -143,8 +151,7 @@ pub(crate) struct GlobalState {
|
||||||
pub(crate) detached_files: FxHashSet<ManifestPath>,
|
pub(crate) detached_files: FxHashSet<ManifestPath>,
|
||||||
|
|
||||||
// op queues
|
// op queues
|
||||||
pub(crate) fetch_workspaces_queue:
|
pub(crate) fetch_workspaces_queue: OpQueue<FetchWorkspaceRequest, FetchWorkspaceResponse>,
|
||||||
OpQueue<FetchWorkspaceRequest, Option<(Vec<anyhow::Result<ProjectWorkspace>>, bool)>>,
|
|
||||||
pub(crate) fetch_build_data_queue:
|
pub(crate) fetch_build_data_queue:
|
||||||
OpQueue<(), (Arc<Vec<ProjectWorkspace>>, Vec<anyhow::Result<WorkspaceBuildScripts>>)>,
|
OpQueue<(), (Arc<Vec<ProjectWorkspace>>, Vec<anyhow::Result<WorkspaceBuildScripts>>)>,
|
||||||
pub(crate) fetch_proc_macros_queue: OpQueue<Vec<ProcMacroPaths>, bool>,
|
pub(crate) fetch_proc_macros_queue: OpQueue<Vec<ProcMacroPaths>, bool>,
|
||||||
|
@ -253,6 +260,7 @@ impl GlobalState {
|
||||||
vfs: Arc::new(RwLock::new((vfs::Vfs::default(), IntMap::default()))),
|
vfs: Arc::new(RwLock::new((vfs::Vfs::default(), IntMap::default()))),
|
||||||
vfs_config_version: 0,
|
vfs_config_version: 0,
|
||||||
vfs_progress_config_version: 0,
|
vfs_progress_config_version: 0,
|
||||||
|
vfs_span: None,
|
||||||
vfs_done: true,
|
vfs_done: true,
|
||||||
wants_to_switch: None,
|
wants_to_switch: None,
|
||||||
|
|
||||||
|
@ -498,7 +506,7 @@ impl GlobalState {
|
||||||
mem_docs: self.mem_docs.clone(),
|
mem_docs: self.mem_docs.clone(),
|
||||||
semantic_tokens_cache: Arc::clone(&self.semantic_tokens_cache),
|
semantic_tokens_cache: Arc::clone(&self.semantic_tokens_cache),
|
||||||
proc_macros_loaded: !self.config.expand_proc_macros()
|
proc_macros_loaded: !self.config.expand_proc_macros()
|
||||||
|| *self.fetch_proc_macros_queue.last_op_result(),
|
|| self.fetch_proc_macros_queue.last_op_result().copied().unwrap_or(false),
|
||||||
flycheck: self.flycheck.clone(),
|
flycheck: self.flycheck.clone(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -380,7 +380,7 @@ fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool {
|
||||||
if id == flycheck.id() {
|
if id == flycheck.id() {
|
||||||
updated = true;
|
updated = true;
|
||||||
match package.filter(|_| {
|
match package.filter(|_| {
|
||||||
!world.config.flycheck_workspace(source_root_id) || target.is_some()
|
!world.config.flycheck_workspace(source_root_id) && target.is_some()
|
||||||
}) {
|
}) {
|
||||||
Some(package) => flycheck
|
Some(package) => flycheck
|
||||||
.restart_for_package(package, target.clone().map(TupleExt::head)),
|
.restart_for_package(package, target.clone().map(TupleExt::head)),
|
||||||
|
|
|
@ -10,9 +10,9 @@ use std::{
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
|
|
||||||
use ide::{
|
use ide::{
|
||||||
AnnotationConfig, AssistKind, AssistResolveStrategy, Cancellable, FilePosition, FileRange,
|
AnnotationConfig, AssistKind, AssistResolveStrategy, Cancellable, CompletionFieldsToResolve,
|
||||||
HoverAction, HoverGotoTypeData, InlayFieldsToResolve, Query, RangeInfo, ReferenceCategory,
|
FilePosition, FileRange, HoverAction, HoverGotoTypeData, InlayFieldsToResolve, Query,
|
||||||
Runnable, RunnableKind, SingleResolve, SourceChange, TextEdit,
|
RangeInfo, ReferenceCategory, Runnable, RunnableKind, SingleResolve, SourceChange, TextEdit,
|
||||||
};
|
};
|
||||||
use ide_db::SymbolKind;
|
use ide_db::SymbolKind;
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
|
@ -1019,9 +1019,11 @@ pub(crate) fn handle_completion(
|
||||||
|
|
||||||
let items = to_proto::completion_items(
|
let items = to_proto::completion_items(
|
||||||
&snap.config,
|
&snap.config,
|
||||||
|
&completion_config.fields_to_resolve,
|
||||||
&line_index,
|
&line_index,
|
||||||
snap.file_version(position.file_id),
|
snap.file_version(position.file_id),
|
||||||
text_document_position,
|
text_document_position,
|
||||||
|
completion_trigger_character,
|
||||||
items,
|
items,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -1054,36 +1056,70 @@ pub(crate) fn handle_completion_resolve(
|
||||||
};
|
};
|
||||||
let source_root = snap.analysis.source_root_id(file_id)?;
|
let source_root = snap.analysis.source_root_id(file_id)?;
|
||||||
|
|
||||||
let additional_edits = snap
|
let mut forced_resolve_completions_config = snap.config.completion(Some(source_root));
|
||||||
.analysis
|
forced_resolve_completions_config.fields_to_resolve = CompletionFieldsToResolve::empty();
|
||||||
.resolve_completion_edits(
|
|
||||||
&snap.config.completion(Some(source_root)),
|
|
||||||
FilePosition { file_id, offset },
|
|
||||||
resolve_data
|
|
||||||
.imports
|
|
||||||
.into_iter()
|
|
||||||
.map(|import| (import.full_import_path, import.imported_name)),
|
|
||||||
)?
|
|
||||||
.into_iter()
|
|
||||||
.flat_map(|edit| edit.into_iter().map(|indel| to_proto::text_edit(&line_index, indel)))
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
|
|
||||||
if !all_edits_are_disjoint(&original_completion, &additional_edits) {
|
let position = FilePosition { file_id, offset };
|
||||||
return Err(LspError::new(
|
let Some(resolved_completions) = snap.analysis.completions(
|
||||||
ErrorCode::InternalError as i32,
|
&forced_resolve_completions_config,
|
||||||
"Import edit overlaps with the original completion edits, this is not LSP-compliant"
|
position,
|
||||||
.into(),
|
resolve_data.trigger_character,
|
||||||
)
|
)?
|
||||||
.into());
|
else {
|
||||||
|
return Ok(original_completion);
|
||||||
|
};
|
||||||
|
let resolved_completions = to_proto::completion_items(
|
||||||
|
&snap.config,
|
||||||
|
&forced_resolve_completions_config.fields_to_resolve,
|
||||||
|
&line_index,
|
||||||
|
snap.file_version(position.file_id),
|
||||||
|
resolve_data.position,
|
||||||
|
resolve_data.trigger_character,
|
||||||
|
resolved_completions,
|
||||||
|
);
|
||||||
|
let Some(mut resolved_completion) = resolved_completions.into_iter().find(|completion| {
|
||||||
|
completion.label == original_completion.label
|
||||||
|
&& completion.kind == original_completion.kind
|
||||||
|
&& completion.deprecated == original_completion.deprecated
|
||||||
|
&& completion.preselect == original_completion.preselect
|
||||||
|
&& completion.sort_text == original_completion.sort_text
|
||||||
|
}) else {
|
||||||
|
return Ok(original_completion);
|
||||||
|
};
|
||||||
|
|
||||||
|
if !resolve_data.imports.is_empty() {
|
||||||
|
let additional_edits = snap
|
||||||
|
.analysis
|
||||||
|
.resolve_completion_edits(
|
||||||
|
&forced_resolve_completions_config,
|
||||||
|
position,
|
||||||
|
resolve_data
|
||||||
|
.imports
|
||||||
|
.into_iter()
|
||||||
|
.map(|import| (import.full_import_path, import.imported_name)),
|
||||||
|
)?
|
||||||
|
.into_iter()
|
||||||
|
.flat_map(|edit| edit.into_iter().map(|indel| to_proto::text_edit(&line_index, indel)))
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
if !all_edits_are_disjoint(&resolved_completion, &additional_edits) {
|
||||||
|
return Err(LspError::new(
|
||||||
|
ErrorCode::InternalError as i32,
|
||||||
|
"Import edit overlaps with the original completion edits, this is not LSP-compliant"
|
||||||
|
.into(),
|
||||||
|
)
|
||||||
|
.into());
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(original_additional_edits) = resolved_completion.additional_text_edits.as_mut()
|
||||||
|
{
|
||||||
|
original_additional_edits.extend(additional_edits)
|
||||||
|
} else {
|
||||||
|
resolved_completion.additional_text_edits = Some(additional_edits);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(original_additional_edits) = original_completion.additional_text_edits.as_mut() {
|
Ok(resolved_completion)
|
||||||
original_additional_edits.extend(additional_edits)
|
|
||||||
} else {
|
|
||||||
original_completion.additional_text_edits = Some(additional_edits);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(original_completion)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn handle_folding_range(
|
pub(crate) fn handle_folding_range(
|
||||||
|
|
|
@ -12,7 +12,8 @@
|
||||||
|
|
||||||
use hir::ChangeWithProcMacros;
|
use hir::ChangeWithProcMacros;
|
||||||
use ide::{
|
use ide::{
|
||||||
AnalysisHost, CallableSnippets, CompletionConfig, DiagnosticsConfig, FilePosition, TextSize,
|
AnalysisHost, CallableSnippets, CompletionConfig, CompletionFieldsToResolve, DiagnosticsConfig,
|
||||||
|
FilePosition, TextSize,
|
||||||
};
|
};
|
||||||
use ide_db::{
|
use ide_db::{
|
||||||
imports::insert_use::{ImportGranularity, InsertUseConfig},
|
imports::insert_use::{ImportGranularity, InsertUseConfig},
|
||||||
|
@ -36,6 +37,8 @@ fn integrated_highlighting_benchmark() {
|
||||||
|
|
||||||
let cargo_config = CargoConfig {
|
let cargo_config = CargoConfig {
|
||||||
sysroot: Some(project_model::RustLibSource::Discover),
|
sysroot: Some(project_model::RustLibSource::Discover),
|
||||||
|
all_targets: true,
|
||||||
|
set_test: true,
|
||||||
..CargoConfig::default()
|
..CargoConfig::default()
|
||||||
};
|
};
|
||||||
let load_cargo_config = LoadCargoConfig {
|
let load_cargo_config = LoadCargoConfig {
|
||||||
|
@ -102,6 +105,8 @@ fn integrated_completion_benchmark() {
|
||||||
|
|
||||||
let cargo_config = CargoConfig {
|
let cargo_config = CargoConfig {
|
||||||
sysroot: Some(project_model::RustLibSource::Discover),
|
sysroot: Some(project_model::RustLibSource::Discover),
|
||||||
|
all_targets: true,
|
||||||
|
set_test: true,
|
||||||
..CargoConfig::default()
|
..CargoConfig::default()
|
||||||
};
|
};
|
||||||
let load_cargo_config = LoadCargoConfig {
|
let load_cargo_config = LoadCargoConfig {
|
||||||
|
@ -168,6 +173,7 @@ fn integrated_completion_benchmark() {
|
||||||
snippets: Vec::new(),
|
snippets: Vec::new(),
|
||||||
limit: None,
|
limit: None,
|
||||||
add_semicolon_to_unit: true,
|
add_semicolon_to_unit: true,
|
||||||
|
fields_to_resolve: CompletionFieldsToResolve::empty(),
|
||||||
};
|
};
|
||||||
let position =
|
let position =
|
||||||
FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() };
|
FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() };
|
||||||
|
@ -215,6 +221,7 @@ fn integrated_completion_benchmark() {
|
||||||
snippets: Vec::new(),
|
snippets: Vec::new(),
|
||||||
limit: None,
|
limit: None,
|
||||||
add_semicolon_to_unit: true,
|
add_semicolon_to_unit: true,
|
||||||
|
fields_to_resolve: CompletionFieldsToResolve::empty(),
|
||||||
};
|
};
|
||||||
let position =
|
let position =
|
||||||
FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() };
|
FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() };
|
||||||
|
@ -260,6 +267,7 @@ fn integrated_completion_benchmark() {
|
||||||
snippets: Vec::new(),
|
snippets: Vec::new(),
|
||||||
limit: None,
|
limit: None,
|
||||||
add_semicolon_to_unit: true,
|
add_semicolon_to_unit: true,
|
||||||
|
fields_to_resolve: CompletionFieldsToResolve::empty(),
|
||||||
};
|
};
|
||||||
let position =
|
let position =
|
||||||
FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() };
|
FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() };
|
||||||
|
@ -279,6 +287,8 @@ fn integrated_diagnostics_benchmark() {
|
||||||
|
|
||||||
let cargo_config = CargoConfig {
|
let cargo_config = CargoConfig {
|
||||||
sysroot: Some(project_model::RustLibSource::Discover),
|
sysroot: Some(project_model::RustLibSource::Discover),
|
||||||
|
all_targets: true,
|
||||||
|
set_test: true,
|
||||||
..CargoConfig::default()
|
..CargoConfig::default()
|
||||||
};
|
};
|
||||||
let load_cargo_config = LoadCargoConfig {
|
let load_cargo_config = LoadCargoConfig {
|
||||||
|
|
|
@ -34,6 +34,7 @@ mod handlers {
|
||||||
|
|
||||||
pub mod tracing {
|
pub mod tracing {
|
||||||
pub mod config;
|
pub mod config;
|
||||||
|
pub mod json;
|
||||||
pub use config::Config;
|
pub use config::Config;
|
||||||
pub mod hprof;
|
pub mod hprof;
|
||||||
}
|
}
|
||||||
|
|
|
@ -448,7 +448,7 @@ impl ClientCapabilities {
|
||||||
.unwrap_or_default()
|
.unwrap_or_default()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn inlay_hint_resolve_support_properties(&self) -> FxHashSet<String> {
|
pub fn inlay_hint_resolve_support_properties(&self) -> FxHashSet<&str> {
|
||||||
self.0
|
self.0
|
||||||
.text_document
|
.text_document
|
||||||
.as_ref()
|
.as_ref()
|
||||||
|
@ -457,8 +457,22 @@ impl ClientCapabilities {
|
||||||
.map(|inlay_resolve| inlay_resolve.properties.iter())
|
.map(|inlay_resolve| inlay_resolve.properties.iter())
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.flatten()
|
.flatten()
|
||||||
.cloned()
|
.map(|s| s.as_str())
|
||||||
.collect::<FxHashSet<_>>()
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn completion_resolve_support_properties(&self) -> FxHashSet<&str> {
|
||||||
|
self.0
|
||||||
|
.text_document
|
||||||
|
.as_ref()
|
||||||
|
.and_then(|text| text.completion.as_ref())
|
||||||
|
.and_then(|completion_caps| completion_caps.completion_item.as_ref())
|
||||||
|
.and_then(|completion_item_caps| completion_item_caps.resolve_support.as_ref())
|
||||||
|
.map(|resolve_support| resolve_support.properties.iter())
|
||||||
|
.into_iter()
|
||||||
|
.flatten()
|
||||||
|
.map(|s| s.as_str())
|
||||||
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn hover_markdown_support(&self) -> bool {
|
pub fn hover_markdown_support(&self) -> bool {
|
||||||
|
|
|
@ -825,6 +825,7 @@ pub struct CompletionResolveData {
|
||||||
pub position: lsp_types::TextDocumentPositionParams,
|
pub position: lsp_types::TextDocumentPositionParams,
|
||||||
pub imports: Vec<CompletionImport>,
|
pub imports: Vec<CompletionImport>,
|
||||||
pub version: Option<i32>,
|
pub version: Option<i32>,
|
||||||
|
pub trigger_character: Option<char>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize)]
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
|
|
@ -6,9 +6,9 @@ use std::{
|
||||||
};
|
};
|
||||||
|
|
||||||
use ide::{
|
use ide::{
|
||||||
Annotation, AnnotationKind, Assist, AssistKind, Cancellable, CompletionItem,
|
Annotation, AnnotationKind, Assist, AssistKind, Cancellable, CompletionFieldsToResolve,
|
||||||
CompletionItemKind, CompletionRelevance, Documentation, FileId, FileRange, FileSystemEdit,
|
CompletionItem, CompletionItemKind, CompletionRelevance, Documentation, FileId, FileRange,
|
||||||
Fold, FoldKind, Highlight, HlMod, HlOperator, HlPunct, HlRange, HlTag, Indel,
|
FileSystemEdit, Fold, FoldKind, Highlight, HlMod, HlOperator, HlPunct, HlRange, HlTag, Indel,
|
||||||
InlayFieldsToResolve, InlayHint, InlayHintLabel, InlayHintLabelPart, InlayKind, Markup,
|
InlayFieldsToResolve, InlayHint, InlayHintLabel, InlayHintLabelPart, InlayKind, Markup,
|
||||||
NavigationTarget, ReferenceCategory, RenameError, Runnable, Severity, SignatureHelp,
|
NavigationTarget, ReferenceCategory, RenameError, Runnable, Severity, SignatureHelp,
|
||||||
SnippetEdit, SourceChange, StructureNodeKind, SymbolKind, TextEdit, TextRange, TextSize,
|
SnippetEdit, SourceChange, StructureNodeKind, SymbolKind, TextEdit, TextRange, TextSize,
|
||||||
|
@ -227,9 +227,11 @@ pub(crate) fn snippet_text_edit_vec(
|
||||||
|
|
||||||
pub(crate) fn completion_items(
|
pub(crate) fn completion_items(
|
||||||
config: &Config,
|
config: &Config,
|
||||||
|
fields_to_resolve: &CompletionFieldsToResolve,
|
||||||
line_index: &LineIndex,
|
line_index: &LineIndex,
|
||||||
version: Option<i32>,
|
version: Option<i32>,
|
||||||
tdpp: lsp_types::TextDocumentPositionParams,
|
tdpp: lsp_types::TextDocumentPositionParams,
|
||||||
|
completion_trigger_character: Option<char>,
|
||||||
mut items: Vec<CompletionItem>,
|
mut items: Vec<CompletionItem>,
|
||||||
) -> Vec<lsp_types::CompletionItem> {
|
) -> Vec<lsp_types::CompletionItem> {
|
||||||
if config.completion_hide_deprecated() {
|
if config.completion_hide_deprecated() {
|
||||||
|
@ -239,7 +241,17 @@ pub(crate) fn completion_items(
|
||||||
let max_relevance = items.iter().map(|it| it.relevance.score()).max().unwrap_or_default();
|
let max_relevance = items.iter().map(|it| it.relevance.score()).max().unwrap_or_default();
|
||||||
let mut res = Vec::with_capacity(items.len());
|
let mut res = Vec::with_capacity(items.len());
|
||||||
for item in items {
|
for item in items {
|
||||||
completion_item(&mut res, config, line_index, version, &tdpp, max_relevance, item);
|
completion_item(
|
||||||
|
&mut res,
|
||||||
|
config,
|
||||||
|
fields_to_resolve,
|
||||||
|
line_index,
|
||||||
|
version,
|
||||||
|
&tdpp,
|
||||||
|
max_relevance,
|
||||||
|
completion_trigger_character,
|
||||||
|
item,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(limit) = config.completion(None).limit {
|
if let Some(limit) = config.completion(None).limit {
|
||||||
|
@ -253,21 +265,33 @@ pub(crate) fn completion_items(
|
||||||
fn completion_item(
|
fn completion_item(
|
||||||
acc: &mut Vec<lsp_types::CompletionItem>,
|
acc: &mut Vec<lsp_types::CompletionItem>,
|
||||||
config: &Config,
|
config: &Config,
|
||||||
|
fields_to_resolve: &CompletionFieldsToResolve,
|
||||||
line_index: &LineIndex,
|
line_index: &LineIndex,
|
||||||
version: Option<i32>,
|
version: Option<i32>,
|
||||||
tdpp: &lsp_types::TextDocumentPositionParams,
|
tdpp: &lsp_types::TextDocumentPositionParams,
|
||||||
max_relevance: u32,
|
max_relevance: u32,
|
||||||
|
completion_trigger_character: Option<char>,
|
||||||
item: CompletionItem,
|
item: CompletionItem,
|
||||||
) {
|
) {
|
||||||
let insert_replace_support = config.insert_replace_support().then_some(tdpp.position);
|
let insert_replace_support = config.insert_replace_support().then_some(tdpp.position);
|
||||||
let ref_match = item.ref_match();
|
let ref_match = item.ref_match();
|
||||||
let lookup = item.lookup().to_owned();
|
|
||||||
|
|
||||||
let mut additional_text_edits = Vec::new();
|
let mut additional_text_edits = Vec::new();
|
||||||
|
let mut something_to_resolve = false;
|
||||||
|
|
||||||
// LSP does not allow arbitrary edits in completion, so we have to do a
|
let filter_text = if fields_to_resolve.resolve_filter_text {
|
||||||
// non-trivial mapping here.
|
something_to_resolve = !item.lookup().is_empty();
|
||||||
let text_edit = {
|
None
|
||||||
|
} else {
|
||||||
|
Some(item.lookup().to_owned())
|
||||||
|
};
|
||||||
|
|
||||||
|
let text_edit = if fields_to_resolve.resolve_text_edit {
|
||||||
|
something_to_resolve = true;
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
// LSP does not allow arbitrary edits in completion, so we have to do a
|
||||||
|
// non-trivial mapping here.
|
||||||
let mut text_edit = None;
|
let mut text_edit = None;
|
||||||
let source_range = item.source_range;
|
let source_range = item.source_range;
|
||||||
for indel in item.text_edit {
|
for indel in item.text_edit {
|
||||||
|
@ -290,25 +314,49 @@ fn completion_item(
|
||||||
additional_text_edits.push(text_edit);
|
additional_text_edits.push(text_edit);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
text_edit.unwrap()
|
Some(text_edit.unwrap())
|
||||||
};
|
};
|
||||||
|
|
||||||
let insert_text_format = item.is_snippet.then_some(lsp_types::InsertTextFormat::SNIPPET);
|
let insert_text_format = item.is_snippet.then_some(lsp_types::InsertTextFormat::SNIPPET);
|
||||||
let tags = item.deprecated.then(|| vec![lsp_types::CompletionItemTag::DEPRECATED]);
|
let tags = if fields_to_resolve.resolve_tags {
|
||||||
|
something_to_resolve = item.deprecated;
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
item.deprecated.then(|| vec![lsp_types::CompletionItemTag::DEPRECATED])
|
||||||
|
};
|
||||||
let command = if item.trigger_call_info && config.client_commands().trigger_parameter_hints {
|
let command = if item.trigger_call_info && config.client_commands().trigger_parameter_hints {
|
||||||
Some(command::trigger_parameter_hints())
|
if fields_to_resolve.resolve_command {
|
||||||
|
something_to_resolve = true;
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some(command::trigger_parameter_hints())
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let detail = if fields_to_resolve.resolve_detail {
|
||||||
|
something_to_resolve = item.detail.is_some();
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
item.detail
|
||||||
|
};
|
||||||
|
|
||||||
|
let documentation = if fields_to_resolve.resolve_documentation {
|
||||||
|
something_to_resolve = item.documentation.is_some();
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
item.documentation.map(documentation)
|
||||||
|
};
|
||||||
|
|
||||||
let mut lsp_item = lsp_types::CompletionItem {
|
let mut lsp_item = lsp_types::CompletionItem {
|
||||||
label: item.label.to_string(),
|
label: item.label.to_string(),
|
||||||
detail: item.detail,
|
detail,
|
||||||
filter_text: Some(lookup),
|
filter_text,
|
||||||
kind: Some(completion_item_kind(item.kind)),
|
kind: Some(completion_item_kind(item.kind)),
|
||||||
text_edit: Some(text_edit),
|
text_edit,
|
||||||
additional_text_edits: Some(additional_text_edits),
|
additional_text_edits: Some(additional_text_edits),
|
||||||
documentation: item.documentation.map(documentation),
|
documentation,
|
||||||
deprecated: Some(item.deprecated),
|
deprecated: Some(item.deprecated),
|
||||||
tags,
|
tags,
|
||||||
command,
|
command,
|
||||||
|
@ -317,29 +365,40 @@ fn completion_item(
|
||||||
};
|
};
|
||||||
|
|
||||||
if config.completion_label_details_support() {
|
if config.completion_label_details_support() {
|
||||||
lsp_item.label_details = Some(lsp_types::CompletionItemLabelDetails {
|
if fields_to_resolve.resolve_label_details {
|
||||||
detail: item.label_detail.as_ref().map(ToString::to_string),
|
something_to_resolve = true;
|
||||||
description: lsp_item.detail.clone(),
|
} else {
|
||||||
});
|
lsp_item.label_details = Some(lsp_types::CompletionItemLabelDetails {
|
||||||
|
detail: item.label_detail.as_ref().map(ToString::to_string),
|
||||||
|
description: lsp_item.detail.clone(),
|
||||||
|
});
|
||||||
|
}
|
||||||
} else if let Some(label_detail) = item.label_detail {
|
} else if let Some(label_detail) = item.label_detail {
|
||||||
lsp_item.label.push_str(label_detail.as_str());
|
lsp_item.label.push_str(label_detail.as_str());
|
||||||
}
|
}
|
||||||
|
|
||||||
set_score(&mut lsp_item, max_relevance, item.relevance);
|
set_score(&mut lsp_item, max_relevance, item.relevance);
|
||||||
|
|
||||||
if config.completion(None).enable_imports_on_the_fly && !item.import_to_add.is_empty() {
|
let imports =
|
||||||
let imports = item
|
if config.completion(None).enable_imports_on_the_fly && !item.import_to_add.is_empty() {
|
||||||
.import_to_add
|
item.import_to_add
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|(import_path, import_name)| lsp_ext::CompletionImport {
|
.map(|(import_path, import_name)| lsp_ext::CompletionImport {
|
||||||
full_import_path: import_path,
|
full_import_path: import_path,
|
||||||
imported_name: import_name,
|
imported_name: import_name,
|
||||||
})
|
})
|
||||||
.collect::<Vec<_>>();
|
.collect()
|
||||||
if !imports.is_empty() {
|
} else {
|
||||||
let data = lsp_ext::CompletionResolveData { position: tdpp.clone(), imports, version };
|
Vec::new()
|
||||||
lsp_item.data = Some(to_value(data).unwrap());
|
};
|
||||||
}
|
if something_to_resolve || !imports.is_empty() {
|
||||||
|
let data = lsp_ext::CompletionResolveData {
|
||||||
|
position: tdpp.clone(),
|
||||||
|
imports,
|
||||||
|
version,
|
||||||
|
trigger_character: completion_trigger_character,
|
||||||
|
};
|
||||||
|
lsp_item.data = Some(to_value(data).unwrap());
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some((label, indel, relevance)) = ref_match {
|
if let Some((label, indel, relevance)) = ref_match {
|
||||||
|
|
|
@ -22,7 +22,9 @@ use crate::{
|
||||||
diagnostics::{fetch_native_diagnostics, DiagnosticsGeneration, NativeDiagnosticsFetchKind},
|
diagnostics::{fetch_native_diagnostics, DiagnosticsGeneration, NativeDiagnosticsFetchKind},
|
||||||
discover::{DiscoverArgument, DiscoverCommand, DiscoverProjectMessage},
|
discover::{DiscoverArgument, DiscoverCommand, DiscoverProjectMessage},
|
||||||
flycheck::{self, FlycheckMessage},
|
flycheck::{self, FlycheckMessage},
|
||||||
global_state::{file_id_to_url, url_to_file_id, FetchWorkspaceRequest, GlobalState},
|
global_state::{
|
||||||
|
file_id_to_url, url_to_file_id, FetchWorkspaceRequest, FetchWorkspaceResponse, GlobalState,
|
||||||
|
},
|
||||||
hack_recover_crate_name,
|
hack_recover_crate_name,
|
||||||
handlers::dispatch::{NotificationDispatcher, RequestDispatcher},
|
handlers::dispatch::{NotificationDispatcher, RequestDispatcher},
|
||||||
lsp::{
|
lsp::{
|
||||||
|
@ -695,9 +697,9 @@ impl GlobalState {
|
||||||
let (state, msg) = match progress {
|
let (state, msg) = match progress {
|
||||||
ProjectWorkspaceProgress::Begin => (Progress::Begin, None),
|
ProjectWorkspaceProgress::Begin => (Progress::Begin, None),
|
||||||
ProjectWorkspaceProgress::Report(msg) => (Progress::Report, Some(msg)),
|
ProjectWorkspaceProgress::Report(msg) => (Progress::Report, Some(msg)),
|
||||||
ProjectWorkspaceProgress::End(workspaces, force_reload_crate_graph) => {
|
ProjectWorkspaceProgress::End(workspaces, force_crate_graph_reload) => {
|
||||||
self.fetch_workspaces_queue
|
let resp = FetchWorkspaceResponse { workspaces, force_crate_graph_reload };
|
||||||
.op_completed(Some((workspaces, force_reload_crate_graph)));
|
self.fetch_workspaces_queue.op_completed(resp);
|
||||||
if let Err(e) = self.fetch_workspace_error() {
|
if let Err(e) = self.fetch_workspace_error() {
|
||||||
error!("FetchWorkspaceError: {e}");
|
error!("FetchWorkspaceError: {e}");
|
||||||
}
|
}
|
||||||
|
@ -794,13 +796,20 @@ impl GlobalState {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
vfs::loader::Message::Progress { n_total, n_done, dir, config_version } => {
|
vfs::loader::Message::Progress { n_total, n_done, dir, config_version } => {
|
||||||
let _p = tracing::info_span!("GlobalState::handle_vfs_mgs/progress").entered();
|
let _p = span!(Level::INFO, "GlobalState::handle_vfs_mgs/progress").entered();
|
||||||
always!(config_version <= self.vfs_config_version);
|
always!(config_version <= self.vfs_config_version);
|
||||||
|
|
||||||
let (n_done, state) = match n_done {
|
let (n_done, state) = match n_done {
|
||||||
LoadingProgress::Started => (0, Progress::Begin),
|
LoadingProgress::Started => {
|
||||||
|
self.vfs_span =
|
||||||
|
Some(span!(Level::INFO, "vfs_load", total = n_total).entered());
|
||||||
|
(0, Progress::Begin)
|
||||||
|
}
|
||||||
LoadingProgress::Progress(n_done) => (n_done.min(n_total), Progress::Report),
|
LoadingProgress::Progress(n_done) => (n_done.min(n_total), Progress::Report),
|
||||||
LoadingProgress::Finished => (n_total, Progress::End),
|
LoadingProgress::Finished => {
|
||||||
|
self.vfs_span = None;
|
||||||
|
(n_total, Progress::End)
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
self.vfs_progress_config_version = config_version;
|
self.vfs_progress_config_version = config_version;
|
||||||
|
@ -881,6 +890,7 @@ impl GlobalState {
|
||||||
.expect("No title could be found; this is a bug");
|
.expect("No title could be found; this is a bug");
|
||||||
match message {
|
match message {
|
||||||
DiscoverProjectMessage::Finished { project, buildfile } => {
|
DiscoverProjectMessage::Finished { project, buildfile } => {
|
||||||
|
self.discover_handle = None;
|
||||||
self.report_progress(&title, Progress::End, None, None, None);
|
self.report_progress(&title, Progress::End, None, None, None);
|
||||||
self.discover_workspace_queue.op_completed(());
|
self.discover_workspace_queue.op_completed(());
|
||||||
|
|
||||||
|
@ -892,6 +902,7 @@ impl GlobalState {
|
||||||
self.report_progress(&title, Progress::Report, Some(message), None, None)
|
self.report_progress(&title, Progress::Report, Some(message), None, None)
|
||||||
}
|
}
|
||||||
DiscoverProjectMessage::Error { error, source } => {
|
DiscoverProjectMessage::Error { error, source } => {
|
||||||
|
self.discover_handle = None;
|
||||||
let message = format!("Project discovery failed: {error}");
|
let message = format!("Project discovery failed: {error}");
|
||||||
self.discover_workspace_queue.op_completed(());
|
self.discover_workspace_queue.op_completed(());
|
||||||
self.show_and_log_error(message.clone(), source);
|
self.show_and_log_error(message.clone(), source);
|
||||||
|
|
|
@ -27,12 +27,12 @@ pub(crate) type Cause = String;
|
||||||
pub(crate) struct OpQueue<Args = (), Output = ()> {
|
pub(crate) struct OpQueue<Args = (), Output = ()> {
|
||||||
op_requested: Option<(Cause, Args)>,
|
op_requested: Option<(Cause, Args)>,
|
||||||
op_in_progress: bool,
|
op_in_progress: bool,
|
||||||
last_op_result: Output,
|
last_op_result: Option<Output>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<Args, Output: Default> Default for OpQueue<Args, Output> {
|
impl<Args, Output> Default for OpQueue<Args, Output> {
|
||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
Self { op_requested: None, op_in_progress: false, last_op_result: Default::default() }
|
Self { op_requested: None, op_in_progress: false, last_op_result: None }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -56,12 +56,12 @@ impl<Args, Output> OpQueue<Args, Output> {
|
||||||
pub(crate) fn op_completed(&mut self, result: Output) {
|
pub(crate) fn op_completed(&mut self, result: Output) {
|
||||||
assert!(self.op_in_progress);
|
assert!(self.op_in_progress);
|
||||||
self.op_in_progress = false;
|
self.op_in_progress = false;
|
||||||
self.last_op_result = result;
|
self.last_op_result = Some(result);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the result of the last operation.
|
/// Get the result of the last operation.
|
||||||
pub(crate) fn last_op_result(&self) -> &Output {
|
pub(crate) fn last_op_result(&self) -> Option<&Output> {
|
||||||
&self.last_op_result
|
self.last_op_result.as_ref()
|
||||||
}
|
}
|
||||||
|
|
||||||
// Is there an operation that has started, but hasn't yet finished?
|
// Is there an operation that has started, but hasn't yet finished?
|
||||||
|
|
|
@ -33,7 +33,7 @@ use vfs::{AbsPath, AbsPathBuf, ChangeKind};
|
||||||
use crate::{
|
use crate::{
|
||||||
config::{Config, FilesWatcher, LinkedProject},
|
config::{Config, FilesWatcher, LinkedProject},
|
||||||
flycheck::{FlycheckConfig, FlycheckHandle},
|
flycheck::{FlycheckConfig, FlycheckHandle},
|
||||||
global_state::{FetchWorkspaceRequest, GlobalState},
|
global_state::{FetchWorkspaceRequest, FetchWorkspaceResponse, GlobalState},
|
||||||
lsp_ext,
|
lsp_ext,
|
||||||
main_loop::{DiscoverProjectParam, Task},
|
main_loop::{DiscoverProjectParam, Task},
|
||||||
op_queue::Cause,
|
op_queue::Cause,
|
||||||
|
@ -448,15 +448,15 @@ impl GlobalState {
|
||||||
let _p = tracing::info_span!("GlobalState::switch_workspaces").entered();
|
let _p = tracing::info_span!("GlobalState::switch_workspaces").entered();
|
||||||
tracing::info!(%cause, "will switch workspaces");
|
tracing::info!(%cause, "will switch workspaces");
|
||||||
|
|
||||||
let Some((workspaces, force_reload_crate_graph)) =
|
let Some(FetchWorkspaceResponse { workspaces, force_crate_graph_reload }) =
|
||||||
self.fetch_workspaces_queue.last_op_result()
|
self.fetch_workspaces_queue.last_op_result()
|
||||||
else {
|
else {
|
||||||
return;
|
return;
|
||||||
};
|
};
|
||||||
|
|
||||||
info!(%cause, ?force_reload_crate_graph);
|
info!(%cause, ?force_crate_graph_reload);
|
||||||
if self.fetch_workspace_error().is_err() && !self.workspaces.is_empty() {
|
if self.fetch_workspace_error().is_err() && !self.workspaces.is_empty() {
|
||||||
if *force_reload_crate_graph {
|
if *force_crate_graph_reload {
|
||||||
self.recreate_crate_graph(cause);
|
self.recreate_crate_graph(cause);
|
||||||
}
|
}
|
||||||
// It only makes sense to switch to a partially broken workspace
|
// It only makes sense to switch to a partially broken workspace
|
||||||
|
@ -474,8 +474,12 @@ impl GlobalState {
|
||||||
.all(|(l, r)| l.eq_ignore_build_data(r));
|
.all(|(l, r)| l.eq_ignore_build_data(r));
|
||||||
|
|
||||||
if same_workspaces {
|
if same_workspaces {
|
||||||
let (workspaces, build_scripts) = self.fetch_build_data_queue.last_op_result();
|
let (workspaces, build_scripts) = match self.fetch_build_data_queue.last_op_result() {
|
||||||
if Arc::ptr_eq(workspaces, &self.workspaces) {
|
Some((workspaces, build_scripts)) => (workspaces.clone(), build_scripts.as_slice()),
|
||||||
|
None => (Default::default(), Default::default()),
|
||||||
|
};
|
||||||
|
|
||||||
|
if Arc::ptr_eq(&workspaces, &self.workspaces) {
|
||||||
info!("set build scripts to workspaces");
|
info!("set build scripts to workspaces");
|
||||||
|
|
||||||
let workspaces = workspaces
|
let workspaces = workspaces
|
||||||
|
@ -492,7 +496,7 @@ impl GlobalState {
|
||||||
self.workspaces = Arc::new(workspaces);
|
self.workspaces = Arc::new(workspaces);
|
||||||
} else {
|
} else {
|
||||||
info!("build scripts do not match the version of the active workspace");
|
info!("build scripts do not match the version of the active workspace");
|
||||||
if *force_reload_crate_graph {
|
if *force_crate_graph_reload {
|
||||||
self.recreate_crate_graph(cause);
|
self.recreate_crate_graph(cause);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -739,22 +743,18 @@ impl GlobalState {
|
||||||
pub(super) fn fetch_workspace_error(&self) -> Result<(), String> {
|
pub(super) fn fetch_workspace_error(&self) -> Result<(), String> {
|
||||||
let mut buf = String::new();
|
let mut buf = String::new();
|
||||||
|
|
||||||
let Some((last_op_result, _)) = self.fetch_workspaces_queue.last_op_result() else {
|
let Some(FetchWorkspaceResponse { workspaces, .. }) =
|
||||||
|
self.fetch_workspaces_queue.last_op_result()
|
||||||
|
else {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
};
|
};
|
||||||
|
|
||||||
if !self.discover_workspace_queue.op_in_progress() {
|
if workspaces.is_empty() && self.config.discover_workspace_config().is_none() {
|
||||||
if last_op_result.is_empty() {
|
stdx::format_to!(buf, "rust-analyzer failed to fetch workspace");
|
||||||
stdx::format_to!(buf, "rust-analyzer failed to discover workspace");
|
} else {
|
||||||
} else {
|
for ws in workspaces {
|
||||||
for ws in last_op_result {
|
if let Err(err) = ws {
|
||||||
if let Err(err) = ws {
|
stdx::format_to!(buf, "rust-analyzer failed to load workspace: {:#}\n", err);
|
||||||
stdx::format_to!(
|
|
||||||
buf,
|
|
||||||
"rust-analyzer failed to load workspace: {:#}\n",
|
|
||||||
err
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -769,7 +769,11 @@ impl GlobalState {
|
||||||
pub(super) fn fetch_build_data_error(&self) -> Result<(), String> {
|
pub(super) fn fetch_build_data_error(&self) -> Result<(), String> {
|
||||||
let mut buf = String::new();
|
let mut buf = String::new();
|
||||||
|
|
||||||
for ws in &self.fetch_build_data_queue.last_op_result().1 {
|
let Some((_, ws)) = &self.fetch_build_data_queue.last_op_result() else {
|
||||||
|
return Ok(());
|
||||||
|
};
|
||||||
|
|
||||||
|
for ws in ws {
|
||||||
match ws {
|
match ws {
|
||||||
Ok(data) => {
|
Ok(data) => {
|
||||||
if let Some(stderr) = data.error() {
|
if let Some(stderr) = data.error() {
|
||||||
|
|
|
@ -1,17 +1,20 @@
|
||||||
//! Simple logger that logs either to stderr or to a file, using `tracing_subscriber`
|
//! Simple logger that logs either to stderr or to a file, using `tracing_subscriber`
|
||||||
//! filter syntax and `tracing_appender` for non blocking output.
|
//! filter syntax and `tracing_appender` for non blocking output.
|
||||||
|
|
||||||
use std::io;
|
use std::io::{self};
|
||||||
|
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
use tracing::level_filters::LevelFilter;
|
use tracing::level_filters::LevelFilter;
|
||||||
use tracing_subscriber::{
|
use tracing_subscriber::{
|
||||||
filter::Targets, fmt::MakeWriter, layer::SubscriberExt, util::SubscriberInitExt, Layer,
|
filter::{filter_fn, Targets},
|
||||||
Registry,
|
fmt::MakeWriter,
|
||||||
|
layer::SubscriberExt,
|
||||||
|
Layer, Registry,
|
||||||
};
|
};
|
||||||
use tracing_tree::HierarchicalLayer;
|
use tracing_tree::HierarchicalLayer;
|
||||||
|
|
||||||
use crate::tracing::hprof;
|
use crate::tracing::hprof;
|
||||||
|
use crate::tracing::json;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct Config<T> {
|
pub struct Config<T> {
|
||||||
|
@ -34,6 +37,12 @@ pub struct Config<T> {
|
||||||
/// env RA_PROFILE=*@3>10 // dump everything, up to depth 3, if it takes more than 10
|
/// env RA_PROFILE=*@3>10 // dump everything, up to depth 3, if it takes more than 10
|
||||||
/// ```
|
/// ```
|
||||||
pub profile_filter: Option<String>,
|
pub profile_filter: Option<String>,
|
||||||
|
|
||||||
|
/// Filtering syntax, set in a shell:
|
||||||
|
/// ```
|
||||||
|
/// env RA_PROFILE_JSON=foo|bar|baz
|
||||||
|
/// ```
|
||||||
|
pub json_profile_filter: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T> Config<T>
|
impl<T> Config<T>
|
||||||
|
@ -41,7 +50,7 @@ where
|
||||||
T: for<'writer> MakeWriter<'writer> + Send + Sync + 'static,
|
T: for<'writer> MakeWriter<'writer> + Send + Sync + 'static,
|
||||||
{
|
{
|
||||||
pub fn init(self) -> anyhow::Result<()> {
|
pub fn init(self) -> anyhow::Result<()> {
|
||||||
let filter: Targets = self
|
let targets_filter: Targets = self
|
||||||
.filter
|
.filter
|
||||||
.parse()
|
.parse()
|
||||||
.with_context(|| format!("invalid log filter: `{}`", self.filter))?;
|
.with_context(|| format!("invalid log filter: `{}`", self.filter))?;
|
||||||
|
@ -50,31 +59,60 @@ where
|
||||||
|
|
||||||
let ra_fmt_layer = tracing_subscriber::fmt::layer()
|
let ra_fmt_layer = tracing_subscriber::fmt::layer()
|
||||||
.with_target(false)
|
.with_target(false)
|
||||||
|
.with_ansi(false)
|
||||||
.with_writer(writer)
|
.with_writer(writer)
|
||||||
.with_filter(filter);
|
.with_filter(targets_filter);
|
||||||
|
|
||||||
let mut chalk_layer = None;
|
let chalk_layer = match self.chalk_filter {
|
||||||
if let Some(chalk_filter) = self.chalk_filter {
|
Some(chalk_filter) => {
|
||||||
let level: LevelFilter =
|
let level: LevelFilter =
|
||||||
chalk_filter.parse().with_context(|| "invalid chalk log filter")?;
|
chalk_filter.parse().with_context(|| "invalid chalk log filter")?;
|
||||||
|
|
||||||
let chalk_filter = Targets::new()
|
let chalk_filter = Targets::new()
|
||||||
.with_target("chalk_solve", level)
|
.with_target("chalk_solve", level)
|
||||||
.with_target("chalk_ir", level)
|
.with_target("chalk_ir", level)
|
||||||
.with_target("chalk_recursive", level);
|
.with_target("chalk_recursive", level);
|
||||||
chalk_layer = Some(
|
// TODO: remove `.with_filter(LevelFilter::OFF)` on the `None` branch.
|
||||||
HierarchicalLayer::default()
|
HierarchicalLayer::default()
|
||||||
.with_indent_lines(true)
|
.with_indent_lines(true)
|
||||||
.with_ansi(false)
|
.with_ansi(false)
|
||||||
.with_indent_amount(2)
|
.with_indent_amount(2)
|
||||||
.with_writer(io::stderr)
|
.with_writer(io::stderr)
|
||||||
.with_filter(chalk_filter),
|
.with_filter(chalk_filter)
|
||||||
);
|
.boxed()
|
||||||
|
}
|
||||||
|
None => None::<HierarchicalLayer>.with_filter(LevelFilter::OFF).boxed(),
|
||||||
};
|
};
|
||||||
|
|
||||||
let profiler_layer = self.profile_filter.map(|spec| hprof::layer(&spec));
|
// TODO: remove `.with_filter(LevelFilter::OFF)` on the `None` branch.
|
||||||
|
let profiler_layer = match self.profile_filter {
|
||||||
|
Some(spec) => Some(hprof::SpanTree::new(&spec)).with_filter(LevelFilter::INFO),
|
||||||
|
None => None.with_filter(LevelFilter::OFF),
|
||||||
|
};
|
||||||
|
|
||||||
Registry::default().with(ra_fmt_layer).with(chalk_layer).with(profiler_layer).try_init()?;
|
let json_profiler_layer = match self.json_profile_filter {
|
||||||
|
Some(spec) => {
|
||||||
|
let filter = json::JsonFilter::from_spec(&spec);
|
||||||
|
let filter = filter_fn(move |metadata| {
|
||||||
|
let allowed = match &filter.allowed_names {
|
||||||
|
Some(names) => names.contains(metadata.name()),
|
||||||
|
None => true,
|
||||||
|
};
|
||||||
|
|
||||||
|
allowed && metadata.is_span()
|
||||||
|
});
|
||||||
|
Some(json::TimingLayer::new(std::io::stderr).with_filter(filter))
|
||||||
|
}
|
||||||
|
None => None,
|
||||||
|
};
|
||||||
|
|
||||||
|
let subscriber = Registry::default()
|
||||||
|
.with(ra_fmt_layer)
|
||||||
|
.with(json_profiler_layer)
|
||||||
|
.with(profiler_layer)
|
||||||
|
.with(chalk_layer);
|
||||||
|
|
||||||
|
tracing::subscriber::set_global_default(subscriber)?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
|
@ -33,6 +33,7 @@
|
||||||
|
|
||||||
use std::{
|
use std::{
|
||||||
fmt::Write,
|
fmt::Write,
|
||||||
|
marker::PhantomData,
|
||||||
mem,
|
mem,
|
||||||
time::{Duration, Instant},
|
time::{Duration, Instant},
|
||||||
};
|
};
|
||||||
|
@ -50,53 +51,42 @@ use tracing_subscriber::{
|
||||||
Layer, Registry,
|
Layer, Registry,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::tracing::hprof;
|
|
||||||
|
|
||||||
pub fn init(spec: &str) -> tracing::subscriber::DefaultGuard {
|
pub fn init(spec: &str) -> tracing::subscriber::DefaultGuard {
|
||||||
let subscriber = Registry::default().with(layer(spec));
|
let subscriber = Registry::default().with(SpanTree::new(spec));
|
||||||
tracing::subscriber::set_default(subscriber)
|
tracing::subscriber::set_default(subscriber)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn layer<S>(spec: &str) -> impl Layer<S>
|
#[derive(Debug)]
|
||||||
|
pub(crate) struct SpanTree<S> {
|
||||||
|
aggregate: bool,
|
||||||
|
write_filter: WriteFilter,
|
||||||
|
_inner: PhantomData<fn(S)>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<S> SpanTree<S>
|
||||||
where
|
where
|
||||||
S: Subscriber + for<'span> tracing_subscriber::registry::LookupSpan<'span>,
|
S: Subscriber + for<'span> tracing_subscriber::registry::LookupSpan<'span>,
|
||||||
{
|
{
|
||||||
let (write_filter, allowed_names) = WriteFilter::from_spec(spec);
|
pub(crate) fn new(spec: &str) -> impl Layer<S> {
|
||||||
|
let (write_filter, allowed_names) = WriteFilter::from_spec(spec);
|
||||||
|
|
||||||
// this filter the first pass for `tracing`: these are all the "profiling" spans, but things like
|
// this filter the first pass for `tracing`: these are all the "profiling" spans, but things like
|
||||||
// span depth or duration are not filtered here: that only occurs at write time.
|
// span depth or duration are not filtered here: that only occurs at write time.
|
||||||
let profile_filter = filter::filter_fn(move |metadata| {
|
let profile_filter = filter::filter_fn(move |metadata| {
|
||||||
let allowed = match &allowed_names {
|
let allowed = match &allowed_names {
|
||||||
Some(names) => names.contains(metadata.name()),
|
Some(names) => names.contains(metadata.name()),
|
||||||
None => true,
|
None => true,
|
||||||
};
|
};
|
||||||
|
|
||||||
allowed
|
allowed
|
||||||
&& metadata.is_span()
|
&& metadata.is_span()
|
||||||
&& metadata.level() >= &Level::INFO
|
&& metadata.level() >= &Level::INFO
|
||||||
&& !metadata.target().starts_with("salsa")
|
&& !metadata.target().starts_with("salsa")
|
||||||
&& metadata.name() != "compute_exhaustiveness_and_usefulness"
|
&& metadata.name() != "compute_exhaustiveness_and_usefulness"
|
||||||
&& !metadata.target().starts_with("chalk")
|
&& !metadata.target().starts_with("chalk")
|
||||||
});
|
});
|
||||||
|
|
||||||
hprof::SpanTree::default().aggregate(true).spec_filter(write_filter).with_filter(profile_filter)
|
Self { aggregate: true, write_filter, _inner: PhantomData }.with_filter(profile_filter)
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Default, Debug)]
|
|
||||||
pub(crate) struct SpanTree {
|
|
||||||
aggregate: bool,
|
|
||||||
write_filter: WriteFilter,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl SpanTree {
|
|
||||||
/// Merge identical sibling spans together.
|
|
||||||
pub(crate) fn aggregate(self, yes: bool) -> SpanTree {
|
|
||||||
SpanTree { aggregate: yes, ..self }
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Add a write-time filter for span duration or tree depth.
|
|
||||||
pub(crate) fn spec_filter(self, write_filter: WriteFilter) -> SpanTree {
|
|
||||||
SpanTree { write_filter, ..self }
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -136,7 +126,7 @@ impl<'a> Visit for DataVisitor<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<S> Layer<S> for SpanTree
|
impl<S> Layer<S> for SpanTree<S>
|
||||||
where
|
where
|
||||||
S: Subscriber + for<'span> LookupSpan<'span>,
|
S: Subscriber + for<'span> LookupSpan<'span>,
|
||||||
{
|
{
|
||||||
|
|
90
crates/rust-analyzer/src/tracing/json.rs
Normal file
90
crates/rust-analyzer/src/tracing/json.rs
Normal file
|
@ -0,0 +1,90 @@
|
||||||
|
//! A [tracing_subscriber::layer::Layer] that exports new-line delinated JSON.
|
||||||
|
//!
|
||||||
|
//! Usage:
|
||||||
|
//!
|
||||||
|
//! ```rust
|
||||||
|
//! let layer = json::TimingLayer::new(std::io::stderr);
|
||||||
|
//! Registry::default().with(layer).init();
|
||||||
|
//! ```
|
||||||
|
|
||||||
|
use std::{io::Write as _, marker::PhantomData, time::Instant};
|
||||||
|
|
||||||
|
use ide_db::FxHashSet;
|
||||||
|
use tracing::{
|
||||||
|
span::{Attributes, Id},
|
||||||
|
Event, Subscriber,
|
||||||
|
};
|
||||||
|
use tracing_subscriber::{fmt::MakeWriter, layer::Context, registry::LookupSpan, Layer};
|
||||||
|
|
||||||
|
struct JsonData {
|
||||||
|
name: &'static str,
|
||||||
|
start: std::time::Instant,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl JsonData {
|
||||||
|
fn new(name: &'static str) -> Self {
|
||||||
|
Self { name, start: Instant::now() }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub(crate) struct TimingLayer<S, W> {
|
||||||
|
writer: W,
|
||||||
|
_inner: PhantomData<fn(S)>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<S, W> TimingLayer<S, W> {
|
||||||
|
pub(crate) fn new(writer: W) -> Self {
|
||||||
|
Self { writer, _inner: PhantomData }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<S, W> Layer<S> for TimingLayer<S, W>
|
||||||
|
where
|
||||||
|
S: Subscriber + for<'span> LookupSpan<'span>,
|
||||||
|
W: for<'writer> MakeWriter<'writer> + Send + Sync + 'static,
|
||||||
|
{
|
||||||
|
fn on_new_span(&self, attrs: &Attributes<'_>, id: &Id, ctx: Context<'_, S>) {
|
||||||
|
let span = ctx.span(id).unwrap();
|
||||||
|
|
||||||
|
let data = JsonData::new(attrs.metadata().name());
|
||||||
|
span.extensions_mut().insert(data);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn on_event(&self, _event: &Event<'_>, _ctx: Context<'_, S>) {}
|
||||||
|
|
||||||
|
fn on_close(&self, id: Id, ctx: Context<'_, S>) {
|
||||||
|
#[derive(serde::Serialize)]
|
||||||
|
struct JsonDataInner {
|
||||||
|
name: &'static str,
|
||||||
|
elapsed_ms: u128,
|
||||||
|
}
|
||||||
|
|
||||||
|
let span = ctx.span(&id).unwrap();
|
||||||
|
let Some(data) = span.extensions_mut().remove::<JsonData>() else {
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
|
||||||
|
let data = JsonDataInner { name: data.name, elapsed_ms: data.start.elapsed().as_millis() };
|
||||||
|
let mut out = serde_json::to_string(&data).expect("Unable to serialize data");
|
||||||
|
out.push('\n');
|
||||||
|
self.writer.make_writer().write_all(out.as_bytes()).expect("Unable to write data");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Default, Clone, Debug)]
|
||||||
|
pub(crate) struct JsonFilter {
|
||||||
|
pub(crate) allowed_names: Option<FxHashSet<String>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl JsonFilter {
|
||||||
|
pub(crate) fn from_spec(spec: &str) -> Self {
|
||||||
|
let allowed_names = if spec == "*" {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some(FxHashSet::from_iter(spec.split('|').map(String::from)))
|
||||||
|
};
|
||||||
|
|
||||||
|
Self { allowed_names }
|
||||||
|
}
|
||||||
|
}
|
|
@ -103,6 +103,7 @@ impl Project<'_> {
|
||||||
filter: std::env::var("RA_LOG").ok().unwrap_or_else(|| "error".to_owned()),
|
filter: std::env::var("RA_LOG").ok().unwrap_or_else(|| "error".to_owned()),
|
||||||
chalk_filter: std::env::var("CHALK_DEBUG").ok(),
|
chalk_filter: std::env::var("CHALK_DEBUG").ok(),
|
||||||
profile_filter: std::env::var("RA_PROFILE").ok(),
|
profile_filter: std::env::var("RA_PROFILE").ok(),
|
||||||
|
json_profile_filter: std::env::var("RA_PROFILE_JSON").ok(),
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
|
@ -142,3 +142,12 @@ pub enum Transparency {
|
||||||
/// Def-site spans in procedural macros, identifiers from `macro` by default use this.
|
/// Def-site spans in procedural macros, identifiers from `macro` by default use this.
|
||||||
Opaque,
|
Opaque,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Transparency {
|
||||||
|
/// Returns `true` if the transparency is [`Opaque`].
|
||||||
|
///
|
||||||
|
/// [`Opaque`]: Transparency::Opaque
|
||||||
|
pub fn is_opaque(&self) -> bool {
|
||||||
|
matches!(self, Self::Opaque)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -55,7 +55,10 @@ where
|
||||||
/// Returns all [`TextRange`]s that correspond to the given span.
|
/// Returns all [`TextRange`]s that correspond to the given span.
|
||||||
///
|
///
|
||||||
/// Note this does a linear search through the entire backing vector.
|
/// Note this does a linear search through the entire backing vector.
|
||||||
pub fn ranges_with_span_exact(&self, span: SpanData<S>) -> impl Iterator<Item = TextRange> + '_
|
pub fn ranges_with_span_exact(
|
||||||
|
&self,
|
||||||
|
span: SpanData<S>,
|
||||||
|
) -> impl Iterator<Item = (TextRange, S)> + '_
|
||||||
where
|
where
|
||||||
S: Copy,
|
S: Copy,
|
||||||
{
|
{
|
||||||
|
@ -64,14 +67,14 @@ where
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
let start = idx.checked_sub(1).map_or(TextSize::new(0), |prev| self.spans[prev].0);
|
let start = idx.checked_sub(1).map_or(TextSize::new(0), |prev| self.spans[prev].0);
|
||||||
Some(TextRange::new(start, end))
|
Some((TextRange::new(start, end), s.ctx))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns all [`TextRange`]s whose spans contain the given span.
|
/// Returns all [`TextRange`]s whose spans contain the given span.
|
||||||
///
|
///
|
||||||
/// Note this does a linear search through the entire backing vector.
|
/// Note this does a linear search through the entire backing vector.
|
||||||
pub fn ranges_with_span(&self, span: SpanData<S>) -> impl Iterator<Item = TextRange> + '_
|
pub fn ranges_with_span(&self, span: SpanData<S>) -> impl Iterator<Item = (TextRange, S)> + '_
|
||||||
where
|
where
|
||||||
S: Copy,
|
S: Copy,
|
||||||
{
|
{
|
||||||
|
@ -83,7 +86,7 @@ where
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
let start = idx.checked_sub(1).map_or(TextSize::new(0), |prev| self.spans[prev].0);
|
let start = idx.checked_sub(1).map_or(TextSize::new(0), |prev| self.spans[prev].0);
|
||||||
Some(TextRange::new(start, end))
|
Some((TextRange::new(start, end), s.ctx))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -148,6 +148,7 @@ pub fn token_tree_to_syntax_node<Ctx>(
|
||||||
) -> (Parse<SyntaxNode>, SpanMap<Ctx>)
|
) -> (Parse<SyntaxNode>, SpanMap<Ctx>)
|
||||||
where
|
where
|
||||||
SpanData<Ctx>: Copy + fmt::Debug,
|
SpanData<Ctx>: Copy + fmt::Debug,
|
||||||
|
Ctx: PartialEq,
|
||||||
{
|
{
|
||||||
let buffer = match tt {
|
let buffer = match tt {
|
||||||
tt::Subtree {
|
tt::Subtree {
|
||||||
|
@ -892,6 +893,7 @@ fn delim_to_str(d: tt::DelimiterKind, closing: bool) -> Option<&'static str> {
|
||||||
impl<Ctx> TtTreeSink<'_, Ctx>
|
impl<Ctx> TtTreeSink<'_, Ctx>
|
||||||
where
|
where
|
||||||
SpanData<Ctx>: Copy + fmt::Debug,
|
SpanData<Ctx>: Copy + fmt::Debug,
|
||||||
|
Ctx: PartialEq,
|
||||||
{
|
{
|
||||||
/// Parses a float literal as if it was a one to two name ref nodes with a dot inbetween.
|
/// Parses a float literal as if it was a one to two name ref nodes with a dot inbetween.
|
||||||
/// This occurs when a float literal is used as a field access.
|
/// This occurs when a float literal is used as a field access.
|
||||||
|
@ -949,6 +951,7 @@ where
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut last = self.cursor;
|
let mut last = self.cursor;
|
||||||
|
let mut combined_span = None;
|
||||||
'tokens: for _ in 0..n_tokens {
|
'tokens: for _ in 0..n_tokens {
|
||||||
let tmp: u8;
|
let tmp: u8;
|
||||||
if self.cursor.eof() {
|
if self.cursor.eof() {
|
||||||
|
@ -982,7 +985,10 @@ where
|
||||||
format_to!(self.buf, "{lit}");
|
format_to!(self.buf, "{lit}");
|
||||||
debug_assert_ne!(self.buf.len() - buf_l, 0);
|
debug_assert_ne!(self.buf.len() - buf_l, 0);
|
||||||
self.text_pos += TextSize::new((self.buf.len() - buf_l) as u32);
|
self.text_pos += TextSize::new((self.buf.len() - buf_l) as u32);
|
||||||
self.token_map.push(self.text_pos, lit.span);
|
combined_span = match combined_span {
|
||||||
|
None => Some(lit.span),
|
||||||
|
Some(prev_span) => Some(Self::merge_spans(prev_span, lit.span)),
|
||||||
|
};
|
||||||
self.cursor = self.cursor.bump();
|
self.cursor = self.cursor.bump();
|
||||||
continue 'tokens;
|
continue 'tokens;
|
||||||
}
|
}
|
||||||
|
@ -1006,9 +1012,13 @@ where
|
||||||
};
|
};
|
||||||
self.buf += text;
|
self.buf += text;
|
||||||
self.text_pos += TextSize::of(text);
|
self.text_pos += TextSize::of(text);
|
||||||
self.token_map.push(self.text_pos, span);
|
combined_span = match combined_span {
|
||||||
|
None => Some(span),
|
||||||
|
Some(prev_span) => Some(Self::merge_spans(prev_span, span)),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
self.token_map.push(self.text_pos, combined_span.expect("expected at least one token"));
|
||||||
self.inner.token(kind, self.buf.as_str());
|
self.inner.token(kind, self.buf.as_str());
|
||||||
self.buf.clear();
|
self.buf.clear();
|
||||||
// FIXME: Emitting whitespace for this is really just a hack, we should get rid of it.
|
// FIXME: Emitting whitespace for this is really just a hack, we should get rid of it.
|
||||||
|
@ -1043,4 +1053,22 @@ where
|
||||||
fn error(&mut self, error: String) {
|
fn error(&mut self, error: String) {
|
||||||
self.inner.error(error, self.text_pos)
|
self.inner.error(error, self.text_pos)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn merge_spans(a: SpanData<Ctx>, b: SpanData<Ctx>) -> SpanData<Ctx> {
|
||||||
|
// We don't do what rustc does exactly, rustc does something clever when the spans have different syntax contexts
|
||||||
|
// but this runs afoul of our separation between `span` and `hir-expand`.
|
||||||
|
SpanData {
|
||||||
|
range: if a.ctx == b.ctx {
|
||||||
|
TextRange::new(
|
||||||
|
std::cmp::min(a.range.start(), b.range.start()),
|
||||||
|
std::cmp::max(a.range.end(), b.range.end()),
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
// Combining ranges make no sense when they come from different syntax contexts.
|
||||||
|
a.range
|
||||||
|
},
|
||||||
|
anchor: a.anchor,
|
||||||
|
ctx: a.ctx,
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,6 +8,7 @@ pub mod make;
|
||||||
mod node_ext;
|
mod node_ext;
|
||||||
mod operators;
|
mod operators;
|
||||||
pub mod prec;
|
pub mod prec;
|
||||||
|
pub mod syntax_factory;
|
||||||
mod token_ext;
|
mod token_ext;
|
||||||
mod traits;
|
mod traits;
|
||||||
|
|
||||||
|
@ -166,7 +167,7 @@ mod support {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn assert_ast_is_object_safe() {
|
fn assert_ast_is_dyn_compatible() {
|
||||||
fn _f(_: &dyn AstNode, _: &dyn HasName) {}
|
fn _f(_: &dyn AstNode, _: &dyn HasName) {}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
45
crates/syntax/src/ast/syntax_factory.rs
Normal file
45
crates/syntax/src/ast/syntax_factory.rs
Normal file
|
@ -0,0 +1,45 @@
|
||||||
|
//! Builds upon [`crate::ast::make`] constructors to create ast fragments with
|
||||||
|
//! optional syntax mappings.
|
||||||
|
//!
|
||||||
|
//! Instead of forcing make constructors to perform syntax mapping, we instead
|
||||||
|
//! let [`SyntaxFactory`] handle constructing the mappings. Care must be taken
|
||||||
|
//! to remember to feed the syntax mappings into a [`SyntaxEditor`](crate::syntax_editor::SyntaxEditor),
|
||||||
|
//! if applicable.
|
||||||
|
|
||||||
|
mod constructors;
|
||||||
|
|
||||||
|
use std::cell::{RefCell, RefMut};
|
||||||
|
|
||||||
|
use crate::syntax_editor::SyntaxMapping;
|
||||||
|
|
||||||
|
pub struct SyntaxFactory {
|
||||||
|
// Stored in a refcell so that the factory methods can be &self
|
||||||
|
mappings: Option<RefCell<SyntaxMapping>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SyntaxFactory {
|
||||||
|
/// Creates a new [`SyntaxFactory`], generating mappings between input nodes and generated nodes.
|
||||||
|
pub fn new() -> Self {
|
||||||
|
Self { mappings: Some(RefCell::new(SyntaxMapping::new())) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Creates a [`SyntaxFactory`] without generating mappings.
|
||||||
|
pub fn without_mappings() -> Self {
|
||||||
|
Self { mappings: None }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Gets all of the tracked syntax mappings, if any.
|
||||||
|
pub fn finish_with_mappings(self) -> SyntaxMapping {
|
||||||
|
self.mappings.unwrap_or_default().into_inner()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn mappings(&self) -> Option<RefMut<'_, SyntaxMapping>> {
|
||||||
|
self.mappings.as_ref().map(|it| it.borrow_mut())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for SyntaxFactory {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::without_mappings()
|
||||||
|
}
|
||||||
|
}
|
110
crates/syntax/src/ast/syntax_factory/constructors.rs
Normal file
110
crates/syntax/src/ast/syntax_factory/constructors.rs
Normal file
|
@ -0,0 +1,110 @@
|
||||||
|
//! Wrappers over [`make`] constructors
|
||||||
|
use itertools::Itertools;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
ast::{self, make, HasName},
|
||||||
|
syntax_editor::SyntaxMappingBuilder,
|
||||||
|
AstNode,
|
||||||
|
};
|
||||||
|
|
||||||
|
use super::SyntaxFactory;
|
||||||
|
|
||||||
|
impl SyntaxFactory {
|
||||||
|
pub fn name(&self, name: &str) -> ast::Name {
|
||||||
|
make::name(name).clone_for_update()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn ident_pat(&self, ref_: bool, mut_: bool, name: ast::Name) -> ast::IdentPat {
|
||||||
|
let ast = make::ident_pat(ref_, mut_, name.clone()).clone_for_update();
|
||||||
|
|
||||||
|
if let Some(mut mapping) = self.mappings() {
|
||||||
|
let mut builder = SyntaxMappingBuilder::new(ast.syntax().clone());
|
||||||
|
builder.map_node(name.syntax().clone(), ast.name().unwrap().syntax().clone());
|
||||||
|
builder.finish(&mut mapping);
|
||||||
|
}
|
||||||
|
|
||||||
|
ast
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn block_expr(
|
||||||
|
&self,
|
||||||
|
stmts: impl IntoIterator<Item = ast::Stmt>,
|
||||||
|
tail_expr: Option<ast::Expr>,
|
||||||
|
) -> ast::BlockExpr {
|
||||||
|
let stmts = stmts.into_iter().collect_vec();
|
||||||
|
let input = stmts.iter().map(|it| it.syntax().clone()).collect_vec();
|
||||||
|
|
||||||
|
let ast = make::block_expr(stmts, tail_expr.clone()).clone_for_update();
|
||||||
|
|
||||||
|
if let Some((mut mapping, stmt_list)) = self.mappings().zip(ast.stmt_list()) {
|
||||||
|
let mut builder = SyntaxMappingBuilder::new(stmt_list.syntax().clone());
|
||||||
|
|
||||||
|
builder.map_children(
|
||||||
|
input.into_iter(),
|
||||||
|
stmt_list.statements().map(|it| it.syntax().clone()),
|
||||||
|
);
|
||||||
|
|
||||||
|
if let Some((input, output)) = tail_expr.zip(stmt_list.tail_expr()) {
|
||||||
|
builder.map_node(input.syntax().clone(), output.syntax().clone());
|
||||||
|
}
|
||||||
|
|
||||||
|
builder.finish(&mut mapping);
|
||||||
|
}
|
||||||
|
|
||||||
|
ast
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn expr_path(&self, path: ast::Path) -> ast::Expr {
|
||||||
|
let ast::Expr::PathExpr(ast) = make::expr_path(path.clone()).clone_for_update() else {
|
||||||
|
unreachable!()
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Some(mut mapping) = self.mappings() {
|
||||||
|
let mut builder = SyntaxMappingBuilder::new(ast.syntax().clone());
|
||||||
|
builder.map_node(path.syntax().clone(), ast.path().unwrap().syntax().clone());
|
||||||
|
builder.finish(&mut mapping);
|
||||||
|
}
|
||||||
|
|
||||||
|
ast.into()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn expr_ref(&self, expr: ast::Expr, exclusive: bool) -> ast::Expr {
|
||||||
|
let ast::Expr::RefExpr(ast) = make::expr_ref(expr.clone(), exclusive).clone_for_update()
|
||||||
|
else {
|
||||||
|
unreachable!()
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Some(mut mapping) = self.mappings() {
|
||||||
|
let mut builder = SyntaxMappingBuilder::new(ast.syntax().clone());
|
||||||
|
builder.map_node(expr.syntax().clone(), ast.expr().unwrap().syntax().clone());
|
||||||
|
builder.finish(&mut mapping);
|
||||||
|
}
|
||||||
|
|
||||||
|
ast.into()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn let_stmt(
|
||||||
|
&self,
|
||||||
|
pattern: ast::Pat,
|
||||||
|
ty: Option<ast::Type>,
|
||||||
|
initializer: Option<ast::Expr>,
|
||||||
|
) -> ast::LetStmt {
|
||||||
|
let ast =
|
||||||
|
make::let_stmt(pattern.clone(), ty.clone(), initializer.clone()).clone_for_update();
|
||||||
|
|
||||||
|
if let Some(mut mapping) = self.mappings() {
|
||||||
|
let mut builder = SyntaxMappingBuilder::new(ast.syntax().clone());
|
||||||
|
builder.map_node(pattern.syntax().clone(), ast.pat().unwrap().syntax().clone());
|
||||||
|
if let Some(input) = ty {
|
||||||
|
builder.map_node(input.syntax().clone(), ast.ty().unwrap().syntax().clone());
|
||||||
|
}
|
||||||
|
if let Some(input) = initializer {
|
||||||
|
builder
|
||||||
|
.map_node(input.syntax().clone(), ast.initializer().unwrap().syntax().clone());
|
||||||
|
}
|
||||||
|
builder.finish(&mut mapping);
|
||||||
|
}
|
||||||
|
|
||||||
|
ast
|
||||||
|
}
|
||||||
|
}
|
|
@ -100,6 +100,10 @@ impl SyntaxEditor {
|
||||||
pub fn finish(self) -> SyntaxEdit {
|
pub fn finish(self) -> SyntaxEdit {
|
||||||
edit_algo::apply_edits(self)
|
edit_algo::apply_edits(self)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn add_mappings(&mut self, other: SyntaxMapping) {
|
||||||
|
self.mappings.merge(other);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Represents a completed [`SyntaxEditor`] operation.
|
/// Represents a completed [`SyntaxEditor`] operation.
|
||||||
|
@ -319,85 +323,14 @@ fn is_ancestor_or_self_of_element(node: &SyntaxElement, ancestor: &SyntaxNode) -
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use expect_test::expect;
|
use expect_test::expect;
|
||||||
use itertools::Itertools;
|
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
ast::{self, make, HasName},
|
ast::{self, make, syntax_factory::SyntaxFactory},
|
||||||
AstNode,
|
AstNode,
|
||||||
};
|
};
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
fn make_ident_pat(
|
|
||||||
editor: Option<&mut SyntaxEditor>,
|
|
||||||
ref_: bool,
|
|
||||||
mut_: bool,
|
|
||||||
name: ast::Name,
|
|
||||||
) -> ast::IdentPat {
|
|
||||||
let ast = make::ident_pat(ref_, mut_, name.clone()).clone_for_update();
|
|
||||||
|
|
||||||
if let Some(editor) = editor {
|
|
||||||
let mut mapping = SyntaxMappingBuilder::new(ast.syntax().clone());
|
|
||||||
mapping.map_node(name.syntax().clone(), ast.name().unwrap().syntax().clone());
|
|
||||||
mapping.finish(editor);
|
|
||||||
}
|
|
||||||
|
|
||||||
ast
|
|
||||||
}
|
|
||||||
|
|
||||||
fn make_let_stmt(
|
|
||||||
editor: Option<&mut SyntaxEditor>,
|
|
||||||
pattern: ast::Pat,
|
|
||||||
ty: Option<ast::Type>,
|
|
||||||
initializer: Option<ast::Expr>,
|
|
||||||
) -> ast::LetStmt {
|
|
||||||
let ast =
|
|
||||||
make::let_stmt(pattern.clone(), ty.clone(), initializer.clone()).clone_for_update();
|
|
||||||
|
|
||||||
if let Some(editor) = editor {
|
|
||||||
let mut mapping = SyntaxMappingBuilder::new(ast.syntax().clone());
|
|
||||||
mapping.map_node(pattern.syntax().clone(), ast.pat().unwrap().syntax().clone());
|
|
||||||
if let Some(input) = ty {
|
|
||||||
mapping.map_node(input.syntax().clone(), ast.ty().unwrap().syntax().clone());
|
|
||||||
}
|
|
||||||
if let Some(input) = initializer {
|
|
||||||
mapping
|
|
||||||
.map_node(input.syntax().clone(), ast.initializer().unwrap().syntax().clone());
|
|
||||||
}
|
|
||||||
mapping.finish(editor);
|
|
||||||
}
|
|
||||||
|
|
||||||
ast
|
|
||||||
}
|
|
||||||
|
|
||||||
fn make_block_expr(
|
|
||||||
editor: Option<&mut SyntaxEditor>,
|
|
||||||
stmts: impl IntoIterator<Item = ast::Stmt>,
|
|
||||||
tail_expr: Option<ast::Expr>,
|
|
||||||
) -> ast::BlockExpr {
|
|
||||||
let stmts = stmts.into_iter().collect_vec();
|
|
||||||
let input = stmts.iter().map(|it| it.syntax().clone()).collect_vec();
|
|
||||||
|
|
||||||
let ast = make::block_expr(stmts, tail_expr.clone()).clone_for_update();
|
|
||||||
|
|
||||||
if let Some((editor, stmt_list)) = editor.zip(ast.stmt_list()) {
|
|
||||||
let mut mapping = SyntaxMappingBuilder::new(stmt_list.syntax().clone());
|
|
||||||
|
|
||||||
mapping.map_children(
|
|
||||||
input.into_iter(),
|
|
||||||
stmt_list.statements().map(|it| it.syntax().clone()),
|
|
||||||
);
|
|
||||||
|
|
||||||
if let Some((input, output)) = tail_expr.zip(stmt_list.tail_expr()) {
|
|
||||||
mapping.map_node(input.syntax().clone(), output.syntax().clone());
|
|
||||||
}
|
|
||||||
|
|
||||||
mapping.finish(editor);
|
|
||||||
}
|
|
||||||
|
|
||||||
ast
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn basic_usage() {
|
fn basic_usage() {
|
||||||
let root = make::match_arm(
|
let root = make::match_arm(
|
||||||
|
@ -417,6 +350,7 @@ mod tests {
|
||||||
let to_replace = root.syntax().descendants().find_map(ast::BinExpr::cast).unwrap();
|
let to_replace = root.syntax().descendants().find_map(ast::BinExpr::cast).unwrap();
|
||||||
|
|
||||||
let mut editor = SyntaxEditor::new(root.syntax().clone());
|
let mut editor = SyntaxEditor::new(root.syntax().clone());
|
||||||
|
let make = SyntaxFactory::new();
|
||||||
|
|
||||||
let name = make::name("var_name");
|
let name = make::name("var_name");
|
||||||
let name_ref = make::name_ref("var_name").clone_for_update();
|
let name_ref = make::name_ref("var_name").clone_for_update();
|
||||||
|
@ -425,21 +359,20 @@ mod tests {
|
||||||
editor.add_annotation(name.syntax(), placeholder_snippet);
|
editor.add_annotation(name.syntax(), placeholder_snippet);
|
||||||
editor.add_annotation(name_ref.syntax(), placeholder_snippet);
|
editor.add_annotation(name_ref.syntax(), placeholder_snippet);
|
||||||
|
|
||||||
let make_ident_pat = make_ident_pat(Some(&mut editor), false, false, name);
|
let new_block = make.block_expr(
|
||||||
let make_let_stmt = make_let_stmt(
|
[make
|
||||||
Some(&mut editor),
|
.let_stmt(
|
||||||
make_ident_pat.into(),
|
make.ident_pat(false, false, name.clone()).into(),
|
||||||
None,
|
None,
|
||||||
Some(to_replace.clone().into()),
|
Some(to_replace.clone().into()),
|
||||||
);
|
)
|
||||||
let new_block = make_block_expr(
|
.into()],
|
||||||
Some(&mut editor),
|
|
||||||
[make_let_stmt.into()],
|
|
||||||
Some(to_wrap.clone().into()),
|
Some(to_wrap.clone().into()),
|
||||||
);
|
);
|
||||||
|
|
||||||
editor.replace(to_replace.syntax(), name_ref.syntax());
|
editor.replace(to_replace.syntax(), name_ref.syntax());
|
||||||
editor.replace(to_wrap.syntax(), new_block.syntax());
|
editor.replace(to_wrap.syntax(), new_block.syntax());
|
||||||
|
editor.add_mappings(make.finish_with_mappings());
|
||||||
|
|
||||||
let edit = editor.finish();
|
let edit = editor.finish();
|
||||||
|
|
||||||
|
@ -473,11 +406,11 @@ mod tests {
|
||||||
let second_let = root.syntax().descendants().find_map(ast::LetStmt::cast).unwrap();
|
let second_let = root.syntax().descendants().find_map(ast::LetStmt::cast).unwrap();
|
||||||
|
|
||||||
let mut editor = SyntaxEditor::new(root.syntax().clone());
|
let mut editor = SyntaxEditor::new(root.syntax().clone());
|
||||||
|
let make = SyntaxFactory::without_mappings();
|
||||||
|
|
||||||
editor.insert(
|
editor.insert(
|
||||||
Position::first_child_of(root.stmt_list().unwrap().syntax()),
|
Position::first_child_of(root.stmt_list().unwrap().syntax()),
|
||||||
make_let_stmt(
|
make.let_stmt(
|
||||||
None,
|
|
||||||
make::ext::simple_ident_pat(make::name("first")).into(),
|
make::ext::simple_ident_pat(make::name("first")).into(),
|
||||||
None,
|
None,
|
||||||
Some(make::expr_literal("1").into()),
|
Some(make::expr_literal("1").into()),
|
||||||
|
@ -487,8 +420,7 @@ mod tests {
|
||||||
|
|
||||||
editor.insert(
|
editor.insert(
|
||||||
Position::after(second_let.syntax()),
|
Position::after(second_let.syntax()),
|
||||||
make_let_stmt(
|
make.let_stmt(
|
||||||
None,
|
|
||||||
make::ext::simple_ident_pat(make::name("third")).into(),
|
make::ext::simple_ident_pat(make::name("third")).into(),
|
||||||
None,
|
None,
|
||||||
Some(make::expr_literal("3").into()),
|
Some(make::expr_literal("3").into()),
|
||||||
|
@ -528,19 +460,17 @@ mod tests {
|
||||||
let second_let = root.syntax().descendants().find_map(ast::LetStmt::cast).unwrap();
|
let second_let = root.syntax().descendants().find_map(ast::LetStmt::cast).unwrap();
|
||||||
|
|
||||||
let mut editor = SyntaxEditor::new(root.syntax().clone());
|
let mut editor = SyntaxEditor::new(root.syntax().clone());
|
||||||
|
let make = SyntaxFactory::new();
|
||||||
|
|
||||||
let new_block_expr =
|
let new_block_expr = make.block_expr([], Some(ast::Expr::BlockExpr(inner_block.clone())));
|
||||||
make_block_expr(Some(&mut editor), [], Some(ast::Expr::BlockExpr(inner_block.clone())));
|
|
||||||
|
|
||||||
let first_let = make_let_stmt(
|
let first_let = make.let_stmt(
|
||||||
Some(&mut editor),
|
|
||||||
make::ext::simple_ident_pat(make::name("first")).into(),
|
make::ext::simple_ident_pat(make::name("first")).into(),
|
||||||
None,
|
None,
|
||||||
Some(make::expr_literal("1").into()),
|
Some(make::expr_literal("1").into()),
|
||||||
);
|
);
|
||||||
|
|
||||||
let third_let = make_let_stmt(
|
let third_let = make.let_stmt(
|
||||||
Some(&mut editor),
|
|
||||||
make::ext::simple_ident_pat(make::name("third")).into(),
|
make::ext::simple_ident_pat(make::name("third")).into(),
|
||||||
None,
|
None,
|
||||||
Some(make::expr_literal("3").into()),
|
Some(make::expr_literal("3").into()),
|
||||||
|
@ -552,6 +482,7 @@ mod tests {
|
||||||
);
|
);
|
||||||
editor.insert(Position::after(second_let.syntax()), third_let.syntax());
|
editor.insert(Position::after(second_let.syntax()), third_let.syntax());
|
||||||
editor.replace(inner_block.syntax(), new_block_expr.syntax());
|
editor.replace(inner_block.syntax(), new_block_expr.syntax());
|
||||||
|
editor.add_mappings(make.finish_with_mappings());
|
||||||
|
|
||||||
let edit = editor.finish();
|
let edit = editor.finish();
|
||||||
|
|
||||||
|
@ -581,12 +512,11 @@ mod tests {
|
||||||
let inner_block = root.clone();
|
let inner_block = root.clone();
|
||||||
|
|
||||||
let mut editor = SyntaxEditor::new(root.syntax().clone());
|
let mut editor = SyntaxEditor::new(root.syntax().clone());
|
||||||
|
let make = SyntaxFactory::new();
|
||||||
|
|
||||||
let new_block_expr =
|
let new_block_expr = make.block_expr([], Some(ast::Expr::BlockExpr(inner_block.clone())));
|
||||||
make_block_expr(Some(&mut editor), [], Some(ast::Expr::BlockExpr(inner_block.clone())));
|
|
||||||
|
|
||||||
let first_let = make_let_stmt(
|
let first_let = make.let_stmt(
|
||||||
Some(&mut editor),
|
|
||||||
make::ext::simple_ident_pat(make::name("first")).into(),
|
make::ext::simple_ident_pat(make::name("first")).into(),
|
||||||
None,
|
None,
|
||||||
Some(make::expr_literal("1").into()),
|
Some(make::expr_literal("1").into()),
|
||||||
|
@ -597,6 +527,7 @@ mod tests {
|
||||||
first_let.syntax(),
|
first_let.syntax(),
|
||||||
);
|
);
|
||||||
editor.replace(inner_block.syntax(), new_block_expr.syntax());
|
editor.replace(inner_block.syntax(), new_block_expr.syntax());
|
||||||
|
editor.add_mappings(make.finish_with_mappings());
|
||||||
|
|
||||||
let edit = editor.finish();
|
let edit = editor.finish();
|
||||||
|
|
||||||
|
|
|
@ -7,8 +7,6 @@ use rustc_hash::FxHashMap;
|
||||||
|
|
||||||
use crate::{SyntaxElement, SyntaxNode};
|
use crate::{SyntaxElement, SyntaxNode};
|
||||||
|
|
||||||
use super::SyntaxEditor;
|
|
||||||
|
|
||||||
#[derive(Debug, Default)]
|
#[derive(Debug, Default)]
|
||||||
pub struct SyntaxMapping {
|
pub struct SyntaxMapping {
|
||||||
// important information to keep track of:
|
// important information to keep track of:
|
||||||
|
@ -209,7 +207,7 @@ impl SyntaxMapping {
|
||||||
Some(output)
|
Some(output)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn add_mapping(&mut self, syntax_mapping: SyntaxMappingBuilder) {
|
pub fn add_mapping(&mut self, syntax_mapping: SyntaxMappingBuilder) {
|
||||||
let SyntaxMappingBuilder { parent_node, node_mappings } = syntax_mapping;
|
let SyntaxMappingBuilder { parent_node, node_mappings } = syntax_mapping;
|
||||||
|
|
||||||
let parent_entry: u32 = self.entry_parents.len().try_into().unwrap();
|
let parent_entry: u32 = self.entry_parents.len().try_into().unwrap();
|
||||||
|
@ -257,8 +255,8 @@ impl SyntaxMappingBuilder {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn finish(self, editor: &mut SyntaxEditor) {
|
pub fn finish(self, mappings: &mut SyntaxMapping) {
|
||||||
editor.mappings.add_mapping(self);
|
mappings.add_mapping(self);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
//! Object safe interface for file watching and reading.
|
//! Dynamically compatible interface for file watching and reading.
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
|
||||||
use paths::{AbsPath, AbsPathBuf};
|
use paths::{AbsPath, AbsPathBuf};
|
||||||
|
@ -232,6 +232,6 @@ impl fmt::Debug for Message {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn handle_is_object_safe() {
|
fn handle_is_dyn_compatible() {
|
||||||
fn _assert(_: &dyn Handle) {}
|
fn _assert(_: &dyn Handle) {}
|
||||||
}
|
}
|
||||||
|
|
|
@ -178,7 +178,15 @@ RA_PROFILE=foo|bar|baz // enabled only selected entries
|
||||||
RA_PROFILE=*@3>10 // dump everything, up to depth 3, if it takes more than 10 ms
|
RA_PROFILE=*@3>10 // dump everything, up to depth 3, if it takes more than 10 ms
|
||||||
```
|
```
|
||||||
|
|
||||||
In particular, I have `export RA_PROFILE='*>10'` in my shell profile.
|
Some rust-analyzer contributors have `export RA_PROFILE='*>10'` in my shell profile.
|
||||||
|
|
||||||
|
For machine-readable JSON output, we have the `RA_PROFILE_JSON` env variable. We support
|
||||||
|
filtering only by span name:
|
||||||
|
|
||||||
|
```
|
||||||
|
RA_PROFILE=* // dump everything
|
||||||
|
RA_PROFILE_JSON="vfs_load|parallel_prime_caches|discover_command" // dump selected spans
|
||||||
|
```
|
||||||
|
|
||||||
We also have a "counting" profiler which counts number of instances of popular structs.
|
We also have a "counting" profiler which counts number of instances of popular structs.
|
||||||
It is enabled by `RA_COUNT=1`.
|
It is enabled by `RA_COUNT=1`.
|
||||||
|
|
|
@ -42,7 +42,7 @@ The underlying engine makes sure that model is computed lazily (on-demand) and c
|
||||||
`crates/rust-analyzer/src/bin/main.rs` contains the main function which spawns LSP.
|
`crates/rust-analyzer/src/bin/main.rs` contains the main function which spawns LSP.
|
||||||
This is *the* entry point, but it front-loads a lot of complexity, so it's fine to just skim through it.
|
This is *the* entry point, but it front-loads a lot of complexity, so it's fine to just skim through it.
|
||||||
|
|
||||||
`crates/rust-analyzer/src/handlers.rs` implements all LSP requests and is a great place to start if you are already familiar with LSP.
|
`crates/rust-analyzer/src/handlers/requests.rs` implements all LSP requests and is a great place to start if you are already familiar with LSP.
|
||||||
|
|
||||||
`Analysis` and `AnalysisHost` types define the main API for consumers of IDE services.
|
`Analysis` and `AnalysisHost` types define the main API for consumers of IDE services.
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
<!---
|
<!---
|
||||||
lsp/ext.rs hash: 6292ee8d88d4c9ec
|
lsp/ext.rs hash: 90cf7718d54fe3c2
|
||||||
|
|
||||||
If you need to change the above hash to make the test pass, please check if you
|
If you need to change the above hash to make the test pass, please check if you
|
||||||
need to adjust this doc as well and ping this issue:
|
need to adjust this doc as well and ping this issue:
|
||||||
|
|
|
@ -378,7 +378,7 @@ impl AstNode for AssocItem {
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
Shared AST substructures are modeled via (object safe) traits:
|
Shared AST substructures are modeled via (dynamically compatible) traits:
|
||||||
|
|
||||||
```rust
|
```rust
|
||||||
trait HasVisibility: AstNode {
|
trait HasVisibility: AstNode {
|
||||||
|
|
|
@ -158,6 +158,11 @@ building from locking the `Cargo.lock` at the expense of duplicating build artif
|
||||||
Set to `true` to use a subdirectory of the existing target directory or
|
Set to `true` to use a subdirectory of the existing target directory or
|
||||||
set to a path relative to the workspace to use that path.
|
set to a path relative to the workspace to use that path.
|
||||||
--
|
--
|
||||||
|
[[rust-analyzer.cfg.setTest]]rust-analyzer.cfg.setTest (default: `true`)::
|
||||||
|
+
|
||||||
|
--
|
||||||
|
Set `cfg(test)` for local crates. Defaults to true.
|
||||||
|
--
|
||||||
[[rust-analyzer.checkOnSave]]rust-analyzer.checkOnSave (default: `true`)::
|
[[rust-analyzer.checkOnSave]]rust-analyzer.checkOnSave (default: `true`)::
|
||||||
+
|
+
|
||||||
--
|
--
|
||||||
|
|
|
@ -512,6 +512,11 @@
|
||||||
"type": "boolean",
|
"type": "boolean",
|
||||||
"default": false
|
"default": false
|
||||||
},
|
},
|
||||||
|
"rust-analyzer.debug.buildBeforeRestart": {
|
||||||
|
"markdownDescription": "Whether to rebuild the project modules before debugging the same test again",
|
||||||
|
"type": "boolean",
|
||||||
|
"default": false
|
||||||
|
},
|
||||||
"rust-analyzer.debug.engineSettings": {
|
"rust-analyzer.debug.engineSettings": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"default": {},
|
"default": {},
|
||||||
|
@ -848,6 +853,16 @@
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"title": "cfg",
|
||||||
|
"properties": {
|
||||||
|
"rust-analyzer.cfg.setTest": {
|
||||||
|
"markdownDescription": "Set `cfg(test)` for local crates. Defaults to true.",
|
||||||
|
"default": true,
|
||||||
|
"type": "boolean"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"title": "general",
|
"title": "general",
|
||||||
"properties": {
|
"properties": {
|
||||||
|
|
|
@ -23,10 +23,11 @@ export async function bootstrap(
|
||||||
|
|
||||||
if (!isValidExecutable(path, config.serverExtraEnv)) {
|
if (!isValidExecutable(path, config.serverExtraEnv)) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
`Failed to execute ${path} --version.` + config.serverPath
|
`Failed to execute ${path} --version.` +
|
||||||
? `\`config.server.path\` or \`config.serverPath\` has been set explicitly.\
|
(config.serverPath
|
||||||
|
? `\`config.server.path\` or \`config.serverPath\` has been set explicitly.\
|
||||||
Consider removing this config or making a valid server binary available at that path.`
|
Consider removing this config or making a valid server binary available at that path.`
|
||||||
: "",
|
: ""),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -24,6 +24,7 @@ export class Config {
|
||||||
"serverPath",
|
"serverPath",
|
||||||
"server",
|
"server",
|
||||||
"files",
|
"files",
|
||||||
|
"cfg",
|
||||||
].map((opt) => `${this.rootSection}.${opt}`);
|
].map((opt) => `${this.rootSection}.${opt}`);
|
||||||
|
|
||||||
private readonly requiresWindowReloadOpts = ["testExplorer"].map(
|
private readonly requiresWindowReloadOpts = ["testExplorer"].map(
|
||||||
|
@ -299,6 +300,7 @@ export class Config {
|
||||||
engine: this.get<string>("debug.engine"),
|
engine: this.get<string>("debug.engine"),
|
||||||
engineSettings: this.get<object>("debug.engineSettings") ?? {},
|
engineSettings: this.get<object>("debug.engineSettings") ?? {},
|
||||||
openDebugPane: this.get<boolean>("debug.openDebugPane"),
|
openDebugPane: this.get<boolean>("debug.openDebugPane"),
|
||||||
|
buildBeforeRestart: this.get<boolean>("debug.buildBeforeRestart"),
|
||||||
sourceFileMap: sourceFileMap,
|
sourceFileMap: sourceFileMap,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,12 +5,15 @@ import type * as ra from "./lsp_ext";
|
||||||
|
|
||||||
import { Cargo } from "./toolchain";
|
import { Cargo } from "./toolchain";
|
||||||
import type { Ctx } from "./ctx";
|
import type { Ctx } from "./ctx";
|
||||||
import { prepareEnv } from "./run";
|
import { createTaskFromRunnable, prepareEnv } from "./run";
|
||||||
import { execute, isCargoRunnableArgs, unwrapUndefinable } from "./util";
|
import { execute, isCargoRunnableArgs, unwrapUndefinable } from "./util";
|
||||||
import type { Config } from "./config";
|
import type { Config } from "./config";
|
||||||
|
|
||||||
const debugOutput = vscode.window.createOutputChannel("Debug");
|
const debugOutput = vscode.window.createOutputChannel("Debug");
|
||||||
|
|
||||||
|
// Here we want to keep track on everything that's currently running
|
||||||
|
const activeDebugSessionIds: string[] = [];
|
||||||
|
|
||||||
export async function makeDebugConfig(ctx: Ctx, runnable: ra.Runnable): Promise<void> {
|
export async function makeDebugConfig(ctx: Ctx, runnable: ra.Runnable): Promise<void> {
|
||||||
const scope = ctx.activeRustEditor?.document.uri;
|
const scope = ctx.activeRustEditor?.document.uri;
|
||||||
if (!scope) return;
|
if (!scope) return;
|
||||||
|
@ -45,6 +48,8 @@ export async function startDebugSession(ctx: Ctx, runnable: ra.Runnable): Promis
|
||||||
const wsLaunchSection = vscode.workspace.getConfiguration("launch");
|
const wsLaunchSection = vscode.workspace.getConfiguration("launch");
|
||||||
const configurations = wsLaunchSection.get<any[]>("configurations") || [];
|
const configurations = wsLaunchSection.get<any[]>("configurations") || [];
|
||||||
|
|
||||||
|
// The runnable label is the name of the test with the "test prefix"
|
||||||
|
// e.g. test test_feature_x
|
||||||
const index = configurations.findIndex((c) => c.name === runnable.label);
|
const index = configurations.findIndex((c) => c.name === runnable.label);
|
||||||
if (-1 !== index) {
|
if (-1 !== index) {
|
||||||
debugConfig = configurations[index];
|
debugConfig = configurations[index];
|
||||||
|
@ -168,6 +173,8 @@ async function getDebugConfiguration(
|
||||||
if (debugConfig.name === "run binary") {
|
if (debugConfig.name === "run binary") {
|
||||||
// The LSP side: crates\rust-analyzer\src\main_loop\handlers.rs,
|
// The LSP side: crates\rust-analyzer\src\main_loop\handlers.rs,
|
||||||
// fn to_lsp_runnable(...) with RunnableKind::Bin
|
// fn to_lsp_runnable(...) with RunnableKind::Bin
|
||||||
|
// FIXME: Neither crates\rust-analyzer\src\main_loop\handlers.rs
|
||||||
|
// nor to_lsp_runnable exist anymore
|
||||||
debugConfig.name = `run ${path.basename(executable)}`;
|
debugConfig.name = `run ${path.basename(executable)}`;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -359,3 +366,49 @@ function quote(xs: string[]) {
|
||||||
})
|
})
|
||||||
.join(" ");
|
.join(" ");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async function recompileTestFromDebuggingSession(session: vscode.DebugSession, ctx: Ctx) {
|
||||||
|
const { cwd, args: sessionArgs }: vscode.DebugConfiguration = session.configuration;
|
||||||
|
|
||||||
|
const args: ra.CargoRunnableArgs = {
|
||||||
|
cwd: cwd,
|
||||||
|
cargoArgs: ["test", "--no-run", "--test", "lib"],
|
||||||
|
|
||||||
|
// The first element of the debug configuration args is the test path e.g. "test_bar::foo::test_a::test_b"
|
||||||
|
executableArgs: sessionArgs,
|
||||||
|
};
|
||||||
|
const runnable: ra.Runnable = {
|
||||||
|
kind: "cargo",
|
||||||
|
label: "compile-test",
|
||||||
|
args,
|
||||||
|
};
|
||||||
|
const task: vscode.Task = await createTaskFromRunnable(runnable, ctx.config);
|
||||||
|
|
||||||
|
// It is not needed to call the language server, since the test path is already resolved in the
|
||||||
|
// configuration option. We can simply call a debug configuration with the --no-run option to compile
|
||||||
|
await vscode.tasks.executeTask(task);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function initializeDebugSessionTrackingAndRebuild(ctx: Ctx) {
|
||||||
|
vscode.debug.onDidStartDebugSession((session: vscode.DebugSession) => {
|
||||||
|
if (!activeDebugSessionIds.includes(session.id)) {
|
||||||
|
activeDebugSessionIds.push(session.id);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
vscode.debug.onDidTerminateDebugSession(async (session: vscode.DebugSession) => {
|
||||||
|
// The id of the session will be the same when pressing restart the restart button
|
||||||
|
if (activeDebugSessionIds.find((s) => s === session.id)) {
|
||||||
|
await recompileTestFromDebuggingSession(session, ctx);
|
||||||
|
}
|
||||||
|
removeActiveSession(session);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function removeActiveSession(session: vscode.DebugSession) {
|
||||||
|
const activeSessionId = activeDebugSessionIds.findIndex((id) => id === session.id);
|
||||||
|
|
||||||
|
if (activeSessionId !== -1) {
|
||||||
|
activeDebugSessionIds.splice(activeSessionId, 1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -6,6 +6,7 @@ import { type CommandFactory, Ctx, fetchWorkspace } from "./ctx";
|
||||||
import * as diagnostics from "./diagnostics";
|
import * as diagnostics from "./diagnostics";
|
||||||
import { activateTaskProvider } from "./tasks";
|
import { activateTaskProvider } from "./tasks";
|
||||||
import { setContextValue } from "./util";
|
import { setContextValue } from "./util";
|
||||||
|
import { initializeDebugSessionTrackingAndRebuild } from "./debug";
|
||||||
|
|
||||||
const RUST_PROJECT_CONTEXT_NAME = "inRustProject";
|
const RUST_PROJECT_CONTEXT_NAME = "inRustProject";
|
||||||
|
|
||||||
|
@ -102,6 +103,10 @@ async function activateServer(ctx: Ctx): Promise<RustAnalyzerExtensionApi> {
|
||||||
ctx.subscriptions,
|
ctx.subscriptions,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
if (ctx.config.debug.buildBeforeRestart) {
|
||||||
|
initializeDebugSessionTrackingAndRebuild(ctx);
|
||||||
|
}
|
||||||
|
|
||||||
await ctx.start();
|
await ctx.start();
|
||||||
return ctx;
|
return ctx;
|
||||||
}
|
}
|
||||||
|
|
|
@ -36,7 +36,7 @@ export async function selectRunnable(
|
||||||
|
|
||||||
if (runnables.length === 0) {
|
if (runnables.length === 0) {
|
||||||
// it is the debug case, run always has at least 'cargo check ...'
|
// it is the debug case, run always has at least 'cargo check ...'
|
||||||
// see crates\rust-analyzer\src\main_loop\handlers.rs, handle_runnables
|
// see crates\rust-analyzer\src\handlers\request.rs, handle_runnables
|
||||||
await vscode.window.showErrorMessage("There's no debug target!");
|
await vscode.window.showErrorMessage("There's no debug target!");
|
||||||
quickPick.dispose();
|
quickPick.dispose();
|
||||||
return;
|
return;
|
||||||
|
|
|
@ -29,7 +29,7 @@ export class Cargo {
|
||||||
static artifactSpec(cargoArgs: string[], executableArgs?: string[]): ArtifactSpec {
|
static artifactSpec(cargoArgs: string[], executableArgs?: string[]): ArtifactSpec {
|
||||||
cargoArgs = [...cargoArgs, "--message-format=json"];
|
cargoArgs = [...cargoArgs, "--message-format=json"];
|
||||||
// arguments for a runnable from the quick pick should be updated.
|
// arguments for a runnable from the quick pick should be updated.
|
||||||
// see crates\rust-analyzer\src\main_loop\handlers.rs, handle_code_lens
|
// see crates\rust-analyzer\src\handlers\request.rs, handle_code_lens
|
||||||
switch (cargoArgs[0]) {
|
switch (cargoArgs[0]) {
|
||||||
case "run":
|
case "run":
|
||||||
cargoArgs[0] = "build";
|
cargoArgs[0] = "build";
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
1b5aa96d6016bafe50e071b45d4d2e3c90fd766f
|
cf24c73141a77db730f4b7fda69dcd7e8b113b51
|
||||||
|
|
Loading…
Reference in a new issue