mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-27 13:33:31 +00:00
Auto merge of #17941 - ChayimFriedman2:pre-closure-to-fn, r=Veykril
Preliminary work for #17940 I split the PR as requested, and made small commits.
This commit is contained in:
commit
0ad26e6025
26 changed files with 816 additions and 167 deletions
|
@ -74,6 +74,13 @@ impl LangItemTarget {
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn as_type_alias(self) -> Option<TypeAliasId> {
|
||||||
|
match self {
|
||||||
|
LangItemTarget::TypeAlias(id) => Some(id),
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Default, Debug, Clone, PartialEq, Eq)]
|
#[derive(Default, Debug, Clone, PartialEq, Eq)]
|
||||||
|
@ -117,11 +124,19 @@ impl LangItems {
|
||||||
match def {
|
match def {
|
||||||
ModuleDefId::TraitId(trait_) => {
|
ModuleDefId::TraitId(trait_) => {
|
||||||
lang_items.collect_lang_item(db, trait_, LangItemTarget::Trait);
|
lang_items.collect_lang_item(db, trait_, LangItemTarget::Trait);
|
||||||
db.trait_data(trait_).items.iter().for_each(|&(_, assoc_id)| {
|
db.trait_data(trait_).items.iter().for_each(
|
||||||
if let AssocItemId::FunctionId(f) = assoc_id {
|
|&(_, assoc_id)| match assoc_id {
|
||||||
|
AssocItemId::FunctionId(f) => {
|
||||||
lang_items.collect_lang_item(db, f, LangItemTarget::Function);
|
lang_items.collect_lang_item(db, f, LangItemTarget::Function);
|
||||||
}
|
}
|
||||||
});
|
AssocItemId::TypeAliasId(alias) => lang_items.collect_lang_item(
|
||||||
|
db,
|
||||||
|
alias,
|
||||||
|
LangItemTarget::TypeAlias,
|
||||||
|
),
|
||||||
|
AssocItemId::ConstId(_) => {}
|
||||||
|
},
|
||||||
|
);
|
||||||
}
|
}
|
||||||
ModuleDefId::AdtId(AdtId::EnumId(e)) => {
|
ModuleDefId::AdtId(AdtId::EnumId(e)) => {
|
||||||
lang_items.collect_lang_item(db, e, LangItemTarget::EnumId);
|
lang_items.collect_lang_item(db, e, LangItemTarget::EnumId);
|
||||||
|
@ -453,6 +468,7 @@ language_item_table! {
|
||||||
|
|
||||||
Context, sym::Context, context, Target::Struct, GenericRequirement::None;
|
Context, sym::Context, context, Target::Struct, GenericRequirement::None;
|
||||||
FuturePoll, sym::poll, future_poll_fn, Target::Method(MethodKind::Trait { body: false }), GenericRequirement::None;
|
FuturePoll, sym::poll, future_poll_fn, Target::Method(MethodKind::Trait { body: false }), GenericRequirement::None;
|
||||||
|
FutureOutput, sym::future_output, future_output, Target::TypeAlias, GenericRequirement::None;
|
||||||
|
|
||||||
Option, sym::Option, option_type, Target::Enum, GenericRequirement::None;
|
Option, sym::Option, option_type, Target::Enum, GenericRequirement::None;
|
||||||
OptionSome, sym::Some, option_some_variant, Target::Variant, GenericRequirement::None;
|
OptionSome, sym::Some, option_some_variant, Target::Variant, GenericRequirement::None;
|
||||||
|
@ -467,6 +483,7 @@ language_item_table! {
|
||||||
IntoFutureIntoFuture, sym::into_future, into_future_fn, Target::Method(MethodKind::Trait { body: false }), GenericRequirement::None;
|
IntoFutureIntoFuture, sym::into_future, into_future_fn, Target::Method(MethodKind::Trait { body: false }), GenericRequirement::None;
|
||||||
IntoIterIntoIter, sym::into_iter, into_iter_fn, Target::Method(MethodKind::Trait { body: false }), GenericRequirement::None;
|
IntoIterIntoIter, sym::into_iter, into_iter_fn, Target::Method(MethodKind::Trait { body: false }), GenericRequirement::None;
|
||||||
IteratorNext, sym::next, next_fn, Target::Method(MethodKind::Trait { body: false}), GenericRequirement::None;
|
IteratorNext, sym::next, next_fn, Target::Method(MethodKind::Trait { body: false}), GenericRequirement::None;
|
||||||
|
Iterator, sym::iterator, iterator, Target::Trait, GenericRequirement::None;
|
||||||
|
|
||||||
PinNewUnchecked, sym::new_unchecked, new_unchecked_fn, Target::Method(MethodKind::Inherent), GenericRequirement::None;
|
PinNewUnchecked, sym::new_unchecked, new_unchecked_fn, Target::Method(MethodKind::Inherent), GenericRequirement::None;
|
||||||
|
|
||||||
|
|
|
@ -59,6 +59,7 @@ use crate::{
|
||||||
generics::Generics,
|
generics::Generics,
|
||||||
infer::{coerce::CoerceMany, unify::InferenceTable},
|
infer::{coerce::CoerceMany, unify::InferenceTable},
|
||||||
lower::ImplTraitLoweringMode,
|
lower::ImplTraitLoweringMode,
|
||||||
|
mir::MirSpan,
|
||||||
to_assoc_type_id,
|
to_assoc_type_id,
|
||||||
traits::FnTrait,
|
traits::FnTrait,
|
||||||
utils::{InTypeConstIdMetadata, UnevaluatedConstEvaluatorFolder},
|
utils::{InTypeConstIdMetadata, UnevaluatedConstEvaluatorFolder},
|
||||||
|
@ -553,6 +554,12 @@ pub(crate) struct InferenceContext<'a> {
|
||||||
|
|
||||||
// fields related to closure capture
|
// fields related to closure capture
|
||||||
current_captures: Vec<CapturedItemWithoutTy>,
|
current_captures: Vec<CapturedItemWithoutTy>,
|
||||||
|
/// A stack that has an entry for each projection in the current capture.
|
||||||
|
///
|
||||||
|
/// For example, in `a.b.c`, we capture the spans of `a`, `a.b`, and `a.b.c`.
|
||||||
|
/// We do that because sometimes we truncate projections (when a closure captures
|
||||||
|
/// both `a.b` and `a.b.c`), and we want to provide accurate spans in this case.
|
||||||
|
current_capture_span_stack: Vec<MirSpan>,
|
||||||
current_closure: Option<ClosureId>,
|
current_closure: Option<ClosureId>,
|
||||||
/// Stores the list of closure ids that need to be analyzed before this closure. See the
|
/// Stores the list of closure ids that need to be analyzed before this closure. See the
|
||||||
/// comment on `InferenceContext::sort_closures`
|
/// comment on `InferenceContext::sort_closures`
|
||||||
|
@ -634,6 +641,7 @@ impl<'a> InferenceContext<'a> {
|
||||||
breakables: Vec::new(),
|
breakables: Vec::new(),
|
||||||
deferred_cast_checks: Vec::new(),
|
deferred_cast_checks: Vec::new(),
|
||||||
current_captures: Vec::new(),
|
current_captures: Vec::new(),
|
||||||
|
current_capture_span_stack: Vec::new(),
|
||||||
current_closure: None,
|
current_closure: None,
|
||||||
deferred_closures: FxHashMap::default(),
|
deferred_closures: FxHashMap::default(),
|
||||||
closure_dependencies: FxHashMap::default(),
|
closure_dependencies: FxHashMap::default(),
|
||||||
|
|
|
@ -18,7 +18,7 @@ use hir_def::{
|
||||||
use hir_expand::name::Name;
|
use hir_expand::name::Name;
|
||||||
use intern::sym;
|
use intern::sym;
|
||||||
use rustc_hash::FxHashMap;
|
use rustc_hash::FxHashMap;
|
||||||
use smallvec::SmallVec;
|
use smallvec::{smallvec, SmallVec};
|
||||||
use stdx::never;
|
use stdx::never;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
@ -236,7 +236,13 @@ pub enum CaptureKind {
|
||||||
pub struct CapturedItem {
|
pub struct CapturedItem {
|
||||||
pub(crate) place: HirPlace,
|
pub(crate) place: HirPlace,
|
||||||
pub(crate) kind: CaptureKind,
|
pub(crate) kind: CaptureKind,
|
||||||
pub(crate) span: MirSpan,
|
/// The inner vec is the stacks; the outer vec is for each capture reference.
|
||||||
|
///
|
||||||
|
/// Even though we always report only the last span (i.e. the most inclusive span),
|
||||||
|
/// we need to keep them all, since when a closure occurs inside a closure, we
|
||||||
|
/// copy all captures of the inner closure to the outer closure, and then we may
|
||||||
|
/// truncate them, and we want the correct span to be reported.
|
||||||
|
span_stacks: SmallVec<[SmallVec<[MirSpan; 3]>; 3]>,
|
||||||
pub(crate) ty: Binders<Ty>,
|
pub(crate) ty: Binders<Ty>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -253,6 +259,10 @@ impl CapturedItem {
|
||||||
self.kind
|
self.kind
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn spans(&self) -> SmallVec<[MirSpan; 3]> {
|
||||||
|
self.span_stacks.iter().map(|stack| *stack.last().expect("empty span stack")).collect()
|
||||||
|
}
|
||||||
|
|
||||||
pub fn display_place(&self, owner: DefWithBodyId, db: &dyn HirDatabase) -> String {
|
pub fn display_place(&self, owner: DefWithBodyId, db: &dyn HirDatabase) -> String {
|
||||||
let body = db.body(owner);
|
let body = db.body(owner);
|
||||||
let krate = owner.krate(db.upcast());
|
let krate = owner.krate(db.upcast());
|
||||||
|
@ -314,7 +324,8 @@ impl CapturedItem {
|
||||||
pub(crate) struct CapturedItemWithoutTy {
|
pub(crate) struct CapturedItemWithoutTy {
|
||||||
pub(crate) place: HirPlace,
|
pub(crate) place: HirPlace,
|
||||||
pub(crate) kind: CaptureKind,
|
pub(crate) kind: CaptureKind,
|
||||||
pub(crate) span: MirSpan,
|
/// The inner vec is the stacks; the outer vec is for each capture reference.
|
||||||
|
pub(crate) span_stacks: SmallVec<[SmallVec<[MirSpan; 3]>; 3]>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl CapturedItemWithoutTy {
|
impl CapturedItemWithoutTy {
|
||||||
|
@ -333,7 +344,7 @@ impl CapturedItemWithoutTy {
|
||||||
return CapturedItem {
|
return CapturedItem {
|
||||||
place: self.place,
|
place: self.place,
|
||||||
kind: self.kind,
|
kind: self.kind,
|
||||||
span: self.span,
|
span_stacks: self.span_stacks,
|
||||||
ty: replace_placeholder_with_binder(ctx, ty),
|
ty: replace_placeholder_with_binder(ctx, ty),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -393,22 +404,26 @@ impl InferenceContext<'_> {
|
||||||
let r = self.place_of_expr_without_adjust(tgt_expr)?;
|
let r = self.place_of_expr_without_adjust(tgt_expr)?;
|
||||||
let default = vec![];
|
let default = vec![];
|
||||||
let adjustments = self.result.expr_adjustments.get(&tgt_expr).unwrap_or(&default);
|
let adjustments = self.result.expr_adjustments.get(&tgt_expr).unwrap_or(&default);
|
||||||
apply_adjusts_to_place(r, adjustments)
|
apply_adjusts_to_place(&mut self.current_capture_span_stack, r, adjustments)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Changes `current_capture_span_stack` to contain the stack of spans for this expr.
|
||||||
fn place_of_expr_without_adjust(&mut self, tgt_expr: ExprId) -> Option<HirPlace> {
|
fn place_of_expr_without_adjust(&mut self, tgt_expr: ExprId) -> Option<HirPlace> {
|
||||||
|
self.current_capture_span_stack.clear();
|
||||||
match &self.body[tgt_expr] {
|
match &self.body[tgt_expr] {
|
||||||
Expr::Path(p) => {
|
Expr::Path(p) => {
|
||||||
let resolver = resolver_for_expr(self.db.upcast(), self.owner, tgt_expr);
|
let resolver = resolver_for_expr(self.db.upcast(), self.owner, tgt_expr);
|
||||||
if let Some(ResolveValueResult::ValueNs(ValueNs::LocalBinding(b), _)) =
|
if let Some(ResolveValueResult::ValueNs(ValueNs::LocalBinding(b), _)) =
|
||||||
resolver.resolve_path_in_value_ns(self.db.upcast(), p)
|
resolver.resolve_path_in_value_ns(self.db.upcast(), p)
|
||||||
{
|
{
|
||||||
|
self.current_capture_span_stack.push(MirSpan::ExprId(tgt_expr));
|
||||||
return Some(HirPlace { local: b, projections: vec![] });
|
return Some(HirPlace { local: b, projections: vec![] });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Expr::Field { expr, name: _ } => {
|
Expr::Field { expr, name: _ } => {
|
||||||
let mut place = self.place_of_expr(*expr)?;
|
let mut place = self.place_of_expr(*expr)?;
|
||||||
let field = self.result.field_resolution(tgt_expr)?;
|
let field = self.result.field_resolution(tgt_expr)?;
|
||||||
|
self.current_capture_span_stack.push(MirSpan::ExprId(tgt_expr));
|
||||||
place.projections.push(ProjectionElem::Field(field));
|
place.projections.push(ProjectionElem::Field(field));
|
||||||
return Some(place);
|
return Some(place);
|
||||||
}
|
}
|
||||||
|
@ -418,6 +433,7 @@ impl InferenceContext<'_> {
|
||||||
TyKind::Ref(..) | TyKind::Raw(..)
|
TyKind::Ref(..) | TyKind::Raw(..)
|
||||||
) {
|
) {
|
||||||
let mut place = self.place_of_expr(*expr)?;
|
let mut place = self.place_of_expr(*expr)?;
|
||||||
|
self.current_capture_span_stack.push(MirSpan::ExprId(tgt_expr));
|
||||||
place.projections.push(ProjectionElem::Deref);
|
place.projections.push(ProjectionElem::Deref);
|
||||||
return Some(place);
|
return Some(place);
|
||||||
}
|
}
|
||||||
|
@ -427,29 +443,65 @@ impl InferenceContext<'_> {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
fn push_capture(&mut self, capture: CapturedItemWithoutTy) {
|
fn push_capture(&mut self, place: HirPlace, kind: CaptureKind) {
|
||||||
self.current_captures.push(capture);
|
self.current_captures.push(CapturedItemWithoutTy {
|
||||||
|
place,
|
||||||
|
kind,
|
||||||
|
span_stacks: smallvec![self.current_capture_span_stack.iter().copied().collect()],
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
fn ref_expr(&mut self, expr: ExprId) {
|
fn is_ref_span(&self, span: MirSpan) -> bool {
|
||||||
if let Some(place) = self.place_of_expr(expr) {
|
match span {
|
||||||
self.add_capture(place, CaptureKind::ByRef(BorrowKind::Shared), expr.into());
|
MirSpan::ExprId(expr) => matches!(self.body[expr], Expr::Ref { .. }),
|
||||||
|
MirSpan::BindingId(_) => true,
|
||||||
|
MirSpan::PatId(_) | MirSpan::SelfParam | MirSpan::Unknown => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn truncate_capture_spans(&self, capture: &mut CapturedItemWithoutTy, mut truncate_to: usize) {
|
||||||
|
// The first span is the identifier, and it must always remain.
|
||||||
|
truncate_to += 1;
|
||||||
|
for span_stack in &mut capture.span_stacks {
|
||||||
|
let mut remained = truncate_to;
|
||||||
|
let mut actual_truncate_to = 0;
|
||||||
|
for &span in &*span_stack {
|
||||||
|
actual_truncate_to += 1;
|
||||||
|
if !self.is_ref_span(span) {
|
||||||
|
remained -= 1;
|
||||||
|
if remained == 0 {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if actual_truncate_to < span_stack.len()
|
||||||
|
&& self.is_ref_span(span_stack[actual_truncate_to])
|
||||||
|
{
|
||||||
|
// Include the ref operator if there is one, we will fix it later (in `strip_captures_ref_span()`) if it's incorrect.
|
||||||
|
actual_truncate_to += 1;
|
||||||
|
}
|
||||||
|
span_stack.truncate(actual_truncate_to);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn ref_expr(&mut self, expr: ExprId, place: Option<HirPlace>) {
|
||||||
|
if let Some(place) = place {
|
||||||
|
self.add_capture(place, CaptureKind::ByRef(BorrowKind::Shared));
|
||||||
}
|
}
|
||||||
self.walk_expr(expr);
|
self.walk_expr(expr);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn add_capture(&mut self, place: HirPlace, kind: CaptureKind, span: MirSpan) {
|
fn add_capture(&mut self, place: HirPlace, kind: CaptureKind) {
|
||||||
if self.is_upvar(&place) {
|
if self.is_upvar(&place) {
|
||||||
self.push_capture(CapturedItemWithoutTy { place, kind, span });
|
self.push_capture(place, kind);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn mutate_expr(&mut self, expr: ExprId) {
|
fn mutate_expr(&mut self, expr: ExprId, place: Option<HirPlace>) {
|
||||||
if let Some(place) = self.place_of_expr(expr) {
|
if let Some(place) = place {
|
||||||
self.add_capture(
|
self.add_capture(
|
||||||
place,
|
place,
|
||||||
CaptureKind::ByRef(BorrowKind::Mut { kind: MutBorrowKind::Default }),
|
CaptureKind::ByRef(BorrowKind::Mut { kind: MutBorrowKind::Default }),
|
||||||
expr.into(),
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
self.walk_expr(expr);
|
self.walk_expr(expr);
|
||||||
|
@ -457,12 +509,12 @@ impl InferenceContext<'_> {
|
||||||
|
|
||||||
fn consume_expr(&mut self, expr: ExprId) {
|
fn consume_expr(&mut self, expr: ExprId) {
|
||||||
if let Some(place) = self.place_of_expr(expr) {
|
if let Some(place) = self.place_of_expr(expr) {
|
||||||
self.consume_place(place, expr.into());
|
self.consume_place(place);
|
||||||
}
|
}
|
||||||
self.walk_expr(expr);
|
self.walk_expr(expr);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn consume_place(&mut self, place: HirPlace, span: MirSpan) {
|
fn consume_place(&mut self, place: HirPlace) {
|
||||||
if self.is_upvar(&place) {
|
if self.is_upvar(&place) {
|
||||||
let ty = place.ty(self);
|
let ty = place.ty(self);
|
||||||
let kind = if self.is_ty_copy(ty) {
|
let kind = if self.is_ty_copy(ty) {
|
||||||
|
@ -470,7 +522,7 @@ impl InferenceContext<'_> {
|
||||||
} else {
|
} else {
|
||||||
CaptureKind::ByValue
|
CaptureKind::ByValue
|
||||||
};
|
};
|
||||||
self.push_capture(CapturedItemWithoutTy { place, kind, span });
|
self.push_capture(place, kind);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -501,8 +553,10 @@ impl InferenceContext<'_> {
|
||||||
Mutability::Not => CaptureKind::ByRef(BorrowKind::Shared),
|
Mutability::Not => CaptureKind::ByRef(BorrowKind::Shared),
|
||||||
};
|
};
|
||||||
if let Some(place) = self.place_of_expr_without_adjust(tgt_expr) {
|
if let Some(place) = self.place_of_expr_without_adjust(tgt_expr) {
|
||||||
if let Some(place) = apply_adjusts_to_place(place, rest) {
|
if let Some(place) =
|
||||||
self.add_capture(place, capture_kind, tgt_expr.into());
|
apply_adjusts_to_place(&mut self.current_capture_span_stack, place, rest)
|
||||||
|
{
|
||||||
|
self.add_capture(place, capture_kind);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
self.walk_expr_with_adjust(tgt_expr, rest);
|
self.walk_expr_with_adjust(tgt_expr, rest);
|
||||||
|
@ -584,11 +638,7 @@ impl InferenceContext<'_> {
|
||||||
self.walk_pat(&mut capture_mode, arm.pat);
|
self.walk_pat(&mut capture_mode, arm.pat);
|
||||||
}
|
}
|
||||||
if let Some(c) = capture_mode {
|
if let Some(c) = capture_mode {
|
||||||
self.push_capture(CapturedItemWithoutTy {
|
self.push_capture(discr_place, c);
|
||||||
place: discr_place,
|
|
||||||
kind: c,
|
|
||||||
span: (*expr).into(),
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -632,10 +682,11 @@ impl InferenceContext<'_> {
|
||||||
}
|
}
|
||||||
false
|
false
|
||||||
};
|
};
|
||||||
|
let place = self.place_of_expr(*expr);
|
||||||
if mutability {
|
if mutability {
|
||||||
self.mutate_expr(*expr);
|
self.mutate_expr(*expr, place);
|
||||||
} else {
|
} else {
|
||||||
self.ref_expr(*expr);
|
self.ref_expr(*expr, place);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
self.select_from_expr(*expr);
|
self.select_from_expr(*expr);
|
||||||
|
@ -650,16 +701,22 @@ impl InferenceContext<'_> {
|
||||||
| Expr::Cast { expr, type_ref: _ } => {
|
| Expr::Cast { expr, type_ref: _ } => {
|
||||||
self.consume_expr(*expr);
|
self.consume_expr(*expr);
|
||||||
}
|
}
|
||||||
Expr::Ref { expr, rawness: _, mutability } => match mutability {
|
Expr::Ref { expr, rawness: _, mutability } => {
|
||||||
hir_def::type_ref::Mutability::Shared => self.ref_expr(*expr),
|
// We need to do this before we push the span so the order will be correct.
|
||||||
hir_def::type_ref::Mutability::Mut => self.mutate_expr(*expr),
|
let place = self.place_of_expr(*expr);
|
||||||
},
|
self.current_capture_span_stack.push(MirSpan::ExprId(tgt_expr));
|
||||||
|
match mutability {
|
||||||
|
hir_def::type_ref::Mutability::Shared => self.ref_expr(*expr, place),
|
||||||
|
hir_def::type_ref::Mutability::Mut => self.mutate_expr(*expr, place),
|
||||||
|
}
|
||||||
|
}
|
||||||
Expr::BinaryOp { lhs, rhs, op } => {
|
Expr::BinaryOp { lhs, rhs, op } => {
|
||||||
let Some(op) = op else {
|
let Some(op) = op else {
|
||||||
return;
|
return;
|
||||||
};
|
};
|
||||||
if matches!(op, BinaryOp::Assignment { .. }) {
|
if matches!(op, BinaryOp::Assignment { .. }) {
|
||||||
self.mutate_expr(*lhs);
|
let place = self.place_of_expr(*lhs);
|
||||||
|
self.mutate_expr(*lhs, place);
|
||||||
self.consume_expr(*rhs);
|
self.consume_expr(*rhs);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -690,7 +747,11 @@ impl InferenceContext<'_> {
|
||||||
);
|
);
|
||||||
let mut cc = mem::take(&mut self.current_captures);
|
let mut cc = mem::take(&mut self.current_captures);
|
||||||
cc.extend(captures.iter().filter(|it| self.is_upvar(&it.place)).map(|it| {
|
cc.extend(captures.iter().filter(|it| self.is_upvar(&it.place)).map(|it| {
|
||||||
CapturedItemWithoutTy { place: it.place.clone(), kind: it.kind, span: it.span }
|
CapturedItemWithoutTy {
|
||||||
|
place: it.place.clone(),
|
||||||
|
kind: it.kind,
|
||||||
|
span_stacks: it.span_stacks.clone(),
|
||||||
|
}
|
||||||
}));
|
}));
|
||||||
self.current_captures = cc;
|
self.current_captures = cc;
|
||||||
}
|
}
|
||||||
|
@ -812,10 +873,13 @@ impl InferenceContext<'_> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn restrict_precision_for_unsafe(&mut self) {
|
fn restrict_precision_for_unsafe(&mut self) {
|
||||||
for capture in &mut self.current_captures {
|
// FIXME: Borrow checker problems without this.
|
||||||
|
let mut current_captures = std::mem::take(&mut self.current_captures);
|
||||||
|
for capture in &mut current_captures {
|
||||||
let mut ty = self.table.resolve_completely(self.result[capture.place.local].clone());
|
let mut ty = self.table.resolve_completely(self.result[capture.place.local].clone());
|
||||||
if ty.as_raw_ptr().is_some() || ty.is_union() {
|
if ty.as_raw_ptr().is_some() || ty.is_union() {
|
||||||
capture.kind = CaptureKind::ByRef(BorrowKind::Shared);
|
capture.kind = CaptureKind::ByRef(BorrowKind::Shared);
|
||||||
|
self.truncate_capture_spans(capture, 0);
|
||||||
capture.place.projections.truncate(0);
|
capture.place.projections.truncate(0);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
@ -830,29 +894,35 @@ impl InferenceContext<'_> {
|
||||||
);
|
);
|
||||||
if ty.as_raw_ptr().is_some() || ty.is_union() {
|
if ty.as_raw_ptr().is_some() || ty.is_union() {
|
||||||
capture.kind = CaptureKind::ByRef(BorrowKind::Shared);
|
capture.kind = CaptureKind::ByRef(BorrowKind::Shared);
|
||||||
|
self.truncate_capture_spans(capture, i + 1);
|
||||||
capture.place.projections.truncate(i + 1);
|
capture.place.projections.truncate(i + 1);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
self.current_captures = current_captures;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn adjust_for_move_closure(&mut self) {
|
fn adjust_for_move_closure(&mut self) {
|
||||||
for capture in &mut self.current_captures {
|
// FIXME: Borrow checker won't allow without this.
|
||||||
|
let mut current_captures = std::mem::take(&mut self.current_captures);
|
||||||
|
for capture in &mut current_captures {
|
||||||
if let Some(first_deref) =
|
if let Some(first_deref) =
|
||||||
capture.place.projections.iter().position(|proj| *proj == ProjectionElem::Deref)
|
capture.place.projections.iter().position(|proj| *proj == ProjectionElem::Deref)
|
||||||
{
|
{
|
||||||
|
self.truncate_capture_spans(capture, first_deref);
|
||||||
capture.place.projections.truncate(first_deref);
|
capture.place.projections.truncate(first_deref);
|
||||||
}
|
}
|
||||||
capture.kind = CaptureKind::ByValue;
|
capture.kind = CaptureKind::ByValue;
|
||||||
}
|
}
|
||||||
|
self.current_captures = current_captures;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn minimize_captures(&mut self) {
|
fn minimize_captures(&mut self) {
|
||||||
self.current_captures.sort_by_key(|it| it.place.projections.len());
|
self.current_captures.sort_unstable_by_key(|it| it.place.projections.len());
|
||||||
let mut hash_map = FxHashMap::<HirPlace, usize>::default();
|
let mut hash_map = FxHashMap::<HirPlace, usize>::default();
|
||||||
let result = mem::take(&mut self.current_captures);
|
let result = mem::take(&mut self.current_captures);
|
||||||
for item in result {
|
for mut item in result {
|
||||||
let mut lookup_place = HirPlace { local: item.place.local, projections: vec![] };
|
let mut lookup_place = HirPlace { local: item.place.local, projections: vec![] };
|
||||||
let mut it = item.place.projections.iter();
|
let mut it = item.place.projections.iter();
|
||||||
let prev_index = loop {
|
let prev_index = loop {
|
||||||
|
@ -860,12 +930,17 @@ impl InferenceContext<'_> {
|
||||||
break Some(*k);
|
break Some(*k);
|
||||||
}
|
}
|
||||||
match it.next() {
|
match it.next() {
|
||||||
Some(it) => lookup_place.projections.push(it.clone()),
|
Some(it) => {
|
||||||
|
lookup_place.projections.push(it.clone());
|
||||||
|
}
|
||||||
None => break None,
|
None => break None,
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
match prev_index {
|
match prev_index {
|
||||||
Some(p) => {
|
Some(p) => {
|
||||||
|
let prev_projections_len = self.current_captures[p].place.projections.len();
|
||||||
|
self.truncate_capture_spans(&mut item, prev_projections_len);
|
||||||
|
self.current_captures[p].span_stacks.extend(item.span_stacks);
|
||||||
let len = self.current_captures[p].place.projections.len();
|
let len = self.current_captures[p].place.projections.len();
|
||||||
let kind_after_truncate =
|
let kind_after_truncate =
|
||||||
item.place.capture_kind_of_truncated_place(item.kind, len);
|
item.place.capture_kind_of_truncated_place(item.kind, len);
|
||||||
|
@ -880,31 +955,32 @@ impl InferenceContext<'_> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn consume_with_pat(&mut self, mut place: HirPlace, pat: PatId) {
|
fn consume_with_pat(&mut self, mut place: HirPlace, tgt_pat: PatId) {
|
||||||
let cnt = self.result.pat_adjustments.get(&pat).map(|it| it.len()).unwrap_or_default();
|
let adjustments_count =
|
||||||
place.projections = place
|
self.result.pat_adjustments.get(&tgt_pat).map(|it| it.len()).unwrap_or_default();
|
||||||
.projections
|
place.projections.extend((0..adjustments_count).map(|_| ProjectionElem::Deref));
|
||||||
.iter()
|
self.current_capture_span_stack
|
||||||
.cloned()
|
.extend((0..adjustments_count).map(|_| MirSpan::PatId(tgt_pat)));
|
||||||
.chain((0..cnt).map(|_| ProjectionElem::Deref))
|
'reset_span_stack: {
|
||||||
.collect::<Vec<_>>();
|
match &self.body[tgt_pat] {
|
||||||
match &self.body[pat] {
|
|
||||||
Pat::Missing | Pat::Wild => (),
|
Pat::Missing | Pat::Wild => (),
|
||||||
Pat::Tuple { args, ellipsis } => {
|
Pat::Tuple { args, ellipsis } => {
|
||||||
let (al, ar) = args.split_at(ellipsis.map_or(args.len(), |it| it as usize));
|
let (al, ar) = args.split_at(ellipsis.map_or(args.len(), |it| it as usize));
|
||||||
let field_count = match self.result[pat].kind(Interner) {
|
let field_count = match self.result[tgt_pat].kind(Interner) {
|
||||||
TyKind::Tuple(_, s) => s.len(Interner),
|
TyKind::Tuple(_, s) => s.len(Interner),
|
||||||
_ => return,
|
_ => break 'reset_span_stack,
|
||||||
};
|
};
|
||||||
let fields = 0..field_count;
|
let fields = 0..field_count;
|
||||||
let it = al.iter().zip(fields.clone()).chain(ar.iter().rev().zip(fields.rev()));
|
let it = al.iter().zip(fields.clone()).chain(ar.iter().rev().zip(fields.rev()));
|
||||||
for (arg, i) in it {
|
for (&arg, i) in it {
|
||||||
let mut p = place.clone();
|
let mut p = place.clone();
|
||||||
|
self.current_capture_span_stack.push(MirSpan::PatId(arg));
|
||||||
p.projections.push(ProjectionElem::Field(Either::Right(TupleFieldId {
|
p.projections.push(ProjectionElem::Field(Either::Right(TupleFieldId {
|
||||||
tuple: TupleId(!0), // dummy this, as its unused anyways
|
tuple: TupleId(!0), // dummy this, as its unused anyways
|
||||||
index: i as u32,
|
index: i as u32,
|
||||||
})));
|
})));
|
||||||
self.consume_with_pat(p, *arg);
|
self.consume_with_pat(p, arg);
|
||||||
|
self.current_capture_span_stack.pop();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Pat::Or(pats) => {
|
Pat::Or(pats) => {
|
||||||
|
@ -913,12 +989,12 @@ impl InferenceContext<'_> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Pat::Record { args, .. } => {
|
Pat::Record { args, .. } => {
|
||||||
let Some(variant) = self.result.variant_resolution_for_pat(pat) else {
|
let Some(variant) = self.result.variant_resolution_for_pat(tgt_pat) else {
|
||||||
return;
|
break 'reset_span_stack;
|
||||||
};
|
};
|
||||||
match variant {
|
match variant {
|
||||||
VariantId::EnumVariantId(_) | VariantId::UnionId(_) => {
|
VariantId::EnumVariantId(_) | VariantId::UnionId(_) => {
|
||||||
self.consume_place(place, pat.into())
|
self.consume_place(place)
|
||||||
}
|
}
|
||||||
VariantId::StructId(s) => {
|
VariantId::StructId(s) => {
|
||||||
let vd = &*self.db.struct_data(s).variant_data;
|
let vd = &*self.db.struct_data(s).variant_data;
|
||||||
|
@ -928,11 +1004,13 @@ impl InferenceContext<'_> {
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
let mut p = place.clone();
|
let mut p = place.clone();
|
||||||
|
self.current_capture_span_stack.push(MirSpan::PatId(arg));
|
||||||
p.projections.push(ProjectionElem::Field(Either::Left(FieldId {
|
p.projections.push(ProjectionElem::Field(Either::Left(FieldId {
|
||||||
parent: variant,
|
parent: variant,
|
||||||
local_id,
|
local_id,
|
||||||
})));
|
})));
|
||||||
self.consume_with_pat(p, arg);
|
self.consume_with_pat(p, arg);
|
||||||
|
self.current_capture_span_stack.pop();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -941,53 +1019,65 @@ impl InferenceContext<'_> {
|
||||||
| Pat::Slice { .. }
|
| Pat::Slice { .. }
|
||||||
| Pat::ConstBlock(_)
|
| Pat::ConstBlock(_)
|
||||||
| Pat::Path(_)
|
| Pat::Path(_)
|
||||||
| Pat::Lit(_) => self.consume_place(place, pat.into()),
|
| Pat::Lit(_) => self.consume_place(place),
|
||||||
Pat::Bind { id: _, subpat: _ } => {
|
&Pat::Bind { id, subpat: _ } => {
|
||||||
let mode = self.result.binding_modes[pat];
|
let mode = self.result.binding_modes[tgt_pat];
|
||||||
let capture_kind = match mode {
|
let capture_kind = match mode {
|
||||||
BindingMode::Move => {
|
BindingMode::Move => {
|
||||||
self.consume_place(place, pat.into());
|
self.consume_place(place);
|
||||||
return;
|
break 'reset_span_stack;
|
||||||
}
|
}
|
||||||
BindingMode::Ref(Mutability::Not) => BorrowKind::Shared,
|
BindingMode::Ref(Mutability::Not) => BorrowKind::Shared,
|
||||||
BindingMode::Ref(Mutability::Mut) => {
|
BindingMode::Ref(Mutability::Mut) => {
|
||||||
BorrowKind::Mut { kind: MutBorrowKind::Default }
|
BorrowKind::Mut { kind: MutBorrowKind::Default }
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
self.add_capture(place, CaptureKind::ByRef(capture_kind), pat.into());
|
self.current_capture_span_stack.push(MirSpan::BindingId(id));
|
||||||
|
self.add_capture(place, CaptureKind::ByRef(capture_kind));
|
||||||
|
self.current_capture_span_stack.pop();
|
||||||
}
|
}
|
||||||
Pat::TupleStruct { path: _, args, ellipsis } => {
|
Pat::TupleStruct { path: _, args, ellipsis } => {
|
||||||
let Some(variant) = self.result.variant_resolution_for_pat(pat) else {
|
let Some(variant) = self.result.variant_resolution_for_pat(tgt_pat) else {
|
||||||
return;
|
break 'reset_span_stack;
|
||||||
};
|
};
|
||||||
match variant {
|
match variant {
|
||||||
VariantId::EnumVariantId(_) | VariantId::UnionId(_) => {
|
VariantId::EnumVariantId(_) | VariantId::UnionId(_) => {
|
||||||
self.consume_place(place, pat.into())
|
self.consume_place(place)
|
||||||
}
|
}
|
||||||
VariantId::StructId(s) => {
|
VariantId::StructId(s) => {
|
||||||
let vd = &*self.db.struct_data(s).variant_data;
|
let vd = &*self.db.struct_data(s).variant_data;
|
||||||
let (al, ar) = args.split_at(ellipsis.map_or(args.len(), |it| it as usize));
|
let (al, ar) =
|
||||||
|
args.split_at(ellipsis.map_or(args.len(), |it| it as usize));
|
||||||
let fields = vd.fields().iter();
|
let fields = vd.fields().iter();
|
||||||
let it =
|
let it = al
|
||||||
al.iter().zip(fields.clone()).chain(ar.iter().rev().zip(fields.rev()));
|
.iter()
|
||||||
for (arg, (i, _)) in it {
|
.zip(fields.clone())
|
||||||
|
.chain(ar.iter().rev().zip(fields.rev()));
|
||||||
|
for (&arg, (i, _)) in it {
|
||||||
let mut p = place.clone();
|
let mut p = place.clone();
|
||||||
|
self.current_capture_span_stack.push(MirSpan::PatId(arg));
|
||||||
p.projections.push(ProjectionElem::Field(Either::Left(FieldId {
|
p.projections.push(ProjectionElem::Field(Either::Left(FieldId {
|
||||||
parent: variant,
|
parent: variant,
|
||||||
local_id: i,
|
local_id: i,
|
||||||
})));
|
})));
|
||||||
self.consume_with_pat(p, *arg);
|
self.consume_with_pat(p, arg);
|
||||||
|
self.current_capture_span_stack.pop();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Pat::Ref { pat, mutability: _ } => {
|
Pat::Ref { pat, mutability: _ } => {
|
||||||
|
self.current_capture_span_stack.push(MirSpan::PatId(tgt_pat));
|
||||||
place.projections.push(ProjectionElem::Deref);
|
place.projections.push(ProjectionElem::Deref);
|
||||||
self.consume_with_pat(place, *pat)
|
self.consume_with_pat(place, *pat);
|
||||||
|
self.current_capture_span_stack.pop();
|
||||||
}
|
}
|
||||||
Pat::Box { .. } => (), // not supported
|
Pat::Box { .. } => (), // not supported
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
self.current_capture_span_stack
|
||||||
|
.truncate(self.current_capture_span_stack.len() - adjustments_count);
|
||||||
|
}
|
||||||
|
|
||||||
fn consume_exprs(&mut self, exprs: impl Iterator<Item = ExprId>) {
|
fn consume_exprs(&mut self, exprs: impl Iterator<Item = ExprId>) {
|
||||||
for expr in exprs {
|
for expr in exprs {
|
||||||
|
@ -1044,12 +1134,28 @@ impl InferenceContext<'_> {
|
||||||
CaptureBy::Ref => (),
|
CaptureBy::Ref => (),
|
||||||
}
|
}
|
||||||
self.minimize_captures();
|
self.minimize_captures();
|
||||||
|
self.strip_captures_ref_span();
|
||||||
let result = mem::take(&mut self.current_captures);
|
let result = mem::take(&mut self.current_captures);
|
||||||
let captures = result.into_iter().map(|it| it.with_ty(self)).collect::<Vec<_>>();
|
let captures = result.into_iter().map(|it| it.with_ty(self)).collect::<Vec<_>>();
|
||||||
self.result.closure_info.insert(closure, (captures, closure_kind));
|
self.result.closure_info.insert(closure, (captures, closure_kind));
|
||||||
closure_kind
|
closure_kind
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn strip_captures_ref_span(&mut self) {
|
||||||
|
// FIXME: Borrow checker won't allow without this.
|
||||||
|
let mut captures = std::mem::take(&mut self.current_captures);
|
||||||
|
for capture in &mut captures {
|
||||||
|
if matches!(capture.kind, CaptureKind::ByValue) {
|
||||||
|
for span_stack in &mut capture.span_stacks {
|
||||||
|
if self.is_ref_span(span_stack[span_stack.len() - 1]) {
|
||||||
|
span_stack.truncate(span_stack.len() - 1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
self.current_captures = captures;
|
||||||
|
}
|
||||||
|
|
||||||
pub(crate) fn infer_closures(&mut self) {
|
pub(crate) fn infer_closures(&mut self) {
|
||||||
let deferred_closures = self.sort_closures();
|
let deferred_closures = self.sort_closures();
|
||||||
for (closure, exprs) in deferred_closures.into_iter().rev() {
|
for (closure, exprs) in deferred_closures.into_iter().rev() {
|
||||||
|
@ -1110,10 +1216,17 @@ impl InferenceContext<'_> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn apply_adjusts_to_place(mut r: HirPlace, adjustments: &[Adjustment]) -> Option<HirPlace> {
|
/// Call this only when the last span in the stack isn't a split.
|
||||||
|
fn apply_adjusts_to_place(
|
||||||
|
current_capture_span_stack: &mut Vec<MirSpan>,
|
||||||
|
mut r: HirPlace,
|
||||||
|
adjustments: &[Adjustment],
|
||||||
|
) -> Option<HirPlace> {
|
||||||
|
let span = *current_capture_span_stack.last().expect("empty capture span stack");
|
||||||
for adj in adjustments {
|
for adj in adjustments {
|
||||||
match &adj.kind {
|
match &adj.kind {
|
||||||
Adjust::Deref(None) => {
|
Adjust::Deref(None) => {
|
||||||
|
current_capture_span_stack.push(span);
|
||||||
r.projections.push(ProjectionElem::Deref);
|
r.projections.push(ProjectionElem::Deref);
|
||||||
}
|
}
|
||||||
_ => return None,
|
_ => return None,
|
||||||
|
|
|
@ -636,6 +636,7 @@ pub enum TerminatorKind {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Order of variants in this enum matter: they are used to compare borrow kinds.
|
||||||
#[derive(Debug, PartialEq, Eq, Clone, Copy, PartialOrd, Ord)]
|
#[derive(Debug, PartialEq, Eq, Clone, Copy, PartialOrd, Ord)]
|
||||||
pub enum BorrowKind {
|
pub enum BorrowKind {
|
||||||
/// Data must be immutable and is aliasable.
|
/// Data must be immutable and is aliasable.
|
||||||
|
@ -666,15 +667,16 @@ pub enum BorrowKind {
|
||||||
Mut { kind: MutBorrowKind },
|
Mut { kind: MutBorrowKind },
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Order of variants in this enum matter: they are used to compare borrow kinds.
|
||||||
#[derive(Debug, PartialEq, Eq, Clone, Copy, PartialOrd, Ord)]
|
#[derive(Debug, PartialEq, Eq, Clone, Copy, PartialOrd, Ord)]
|
||||||
pub enum MutBorrowKind {
|
pub enum MutBorrowKind {
|
||||||
|
/// Data must be immutable but not aliasable. This kind of borrow cannot currently
|
||||||
|
/// be expressed by the user and is used only in implicit closure bindings.
|
||||||
|
ClosureCapture,
|
||||||
Default,
|
Default,
|
||||||
/// This borrow arose from method-call auto-ref
|
/// This borrow arose from method-call auto-ref
|
||||||
/// (i.e., adjustment::Adjust::Borrow).
|
/// (i.e., adjustment::Adjust::Borrow).
|
||||||
TwoPhasedBorrow,
|
TwoPhasedBorrow,
|
||||||
/// Data must be immutable but not aliasable. This kind of borrow cannot currently
|
|
||||||
/// be expressed by the user and is used only in implicit closure bindings.
|
|
||||||
ClosureCapture,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl BorrowKind {
|
impl BorrowKind {
|
||||||
|
|
|
@ -1180,8 +1180,15 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||||
let placeholder_subst = self.placeholder_subst();
|
let placeholder_subst = self.placeholder_subst();
|
||||||
let tmp_ty =
|
let tmp_ty =
|
||||||
capture.ty.clone().substitute(Interner, &placeholder_subst);
|
capture.ty.clone().substitute(Interner, &placeholder_subst);
|
||||||
let tmp: Place = self.temp(tmp_ty, current, capture.span)?.into();
|
// FIXME: Handle more than one span.
|
||||||
self.push_assignment(current, tmp, Rvalue::Ref(*bk, p), capture.span);
|
let capture_spans = capture.spans();
|
||||||
|
let tmp: Place = self.temp(tmp_ty, current, capture_spans[0])?.into();
|
||||||
|
self.push_assignment(
|
||||||
|
current,
|
||||||
|
tmp,
|
||||||
|
Rvalue::Ref(*bk, p),
|
||||||
|
capture_spans[0],
|
||||||
|
);
|
||||||
operands.push(Operand::Move(tmp));
|
operands.push(Operand::Move(tmp));
|
||||||
}
|
}
|
||||||
CaptureKind::ByValue => operands.push(Operand::Move(p)),
|
CaptureKind::ByValue => operands.push(Operand::Move(p)),
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
mod closure_captures;
|
||||||
mod coercion;
|
mod coercion;
|
||||||
mod diagnostics;
|
mod diagnostics;
|
||||||
mod display_source_code;
|
mod display_source_code;
|
||||||
|
|
433
crates/hir-ty/src/tests/closure_captures.rs
Normal file
433
crates/hir-ty/src/tests/closure_captures.rs
Normal file
|
@ -0,0 +1,433 @@
|
||||||
|
use base_db::salsa::InternKey;
|
||||||
|
use expect_test::{expect, Expect};
|
||||||
|
use hir_def::db::DefDatabase;
|
||||||
|
use hir_expand::files::InFileWrapper;
|
||||||
|
use itertools::Itertools;
|
||||||
|
use span::{HirFileId, TextRange};
|
||||||
|
use syntax::{AstNode, AstPtr};
|
||||||
|
use test_fixture::WithFixture;
|
||||||
|
|
||||||
|
use crate::db::{HirDatabase, InternedClosureId};
|
||||||
|
use crate::display::HirDisplay;
|
||||||
|
use crate::mir::MirSpan;
|
||||||
|
use crate::test_db::TestDB;
|
||||||
|
|
||||||
|
use super::visit_module;
|
||||||
|
|
||||||
|
fn check_closure_captures(ra_fixture: &str, expect: Expect) {
|
||||||
|
let (db, file_id) = TestDB::with_single_file(ra_fixture);
|
||||||
|
let module = db.module_for_file(file_id);
|
||||||
|
let def_map = module.def_map(&db);
|
||||||
|
|
||||||
|
let mut defs = Vec::new();
|
||||||
|
visit_module(&db, &def_map, module.local_id, &mut |it| defs.push(it));
|
||||||
|
|
||||||
|
let mut captures_info = Vec::new();
|
||||||
|
for def in defs {
|
||||||
|
let infer = db.infer(def);
|
||||||
|
let db = &db;
|
||||||
|
captures_info.extend(infer.closure_info.iter().flat_map(|(closure_id, (captures, _))| {
|
||||||
|
let closure = db.lookup_intern_closure(InternedClosureId::from_intern_id(closure_id.0));
|
||||||
|
let (_, source_map) = db.body_with_source_map(closure.0);
|
||||||
|
let closure_text_range = source_map
|
||||||
|
.expr_syntax(closure.1)
|
||||||
|
.expect("failed to map closure to SyntaxNode")
|
||||||
|
.value
|
||||||
|
.text_range();
|
||||||
|
captures.iter().map(move |capture| {
|
||||||
|
fn text_range<N: AstNode>(
|
||||||
|
db: &TestDB,
|
||||||
|
syntax: InFileWrapper<HirFileId, AstPtr<N>>,
|
||||||
|
) -> TextRange {
|
||||||
|
let root = syntax.file_syntax(db);
|
||||||
|
syntax.value.to_node(&root).syntax().text_range()
|
||||||
|
}
|
||||||
|
|
||||||
|
// FIXME: Deduplicate this with hir::Local::sources().
|
||||||
|
let (body, source_map) = db.body_with_source_map(closure.0);
|
||||||
|
let local_text_range = match body.self_param.zip(source_map.self_param_syntax()) {
|
||||||
|
Some((param, source)) if param == capture.local() => {
|
||||||
|
format!("{:?}", text_range(db, source))
|
||||||
|
}
|
||||||
|
_ => source_map
|
||||||
|
.patterns_for_binding(capture.local())
|
||||||
|
.iter()
|
||||||
|
.map(|&definition| {
|
||||||
|
text_range(db, source_map.pat_syntax(definition).unwrap())
|
||||||
|
})
|
||||||
|
.map(|it| format!("{it:?}"))
|
||||||
|
.join(", "),
|
||||||
|
};
|
||||||
|
let place = capture.display_place(closure.0, db);
|
||||||
|
let capture_ty = capture.ty.skip_binders().display_test(db).to_string();
|
||||||
|
let spans = capture
|
||||||
|
.spans()
|
||||||
|
.iter()
|
||||||
|
.flat_map(|span| match *span {
|
||||||
|
MirSpan::ExprId(expr) => {
|
||||||
|
vec![text_range(db, source_map.expr_syntax(expr).unwrap())]
|
||||||
|
}
|
||||||
|
MirSpan::PatId(pat) => {
|
||||||
|
vec![text_range(db, source_map.pat_syntax(pat).unwrap())]
|
||||||
|
}
|
||||||
|
MirSpan::BindingId(binding) => source_map
|
||||||
|
.patterns_for_binding(binding)
|
||||||
|
.iter()
|
||||||
|
.map(|pat| text_range(db, source_map.pat_syntax(*pat).unwrap()))
|
||||||
|
.collect(),
|
||||||
|
MirSpan::SelfParam => {
|
||||||
|
vec![text_range(db, source_map.self_param_syntax().unwrap())]
|
||||||
|
}
|
||||||
|
MirSpan::Unknown => Vec::new(),
|
||||||
|
})
|
||||||
|
.sorted_by_key(|it| it.start())
|
||||||
|
.map(|it| format!("{it:?}"))
|
||||||
|
.join(",");
|
||||||
|
|
||||||
|
(closure_text_range, local_text_range, spans, place, capture_ty, capture.kind())
|
||||||
|
})
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
captures_info.sort_unstable_by_key(|(closure_text_range, local_text_range, ..)| {
|
||||||
|
(closure_text_range.start(), local_text_range.clone())
|
||||||
|
});
|
||||||
|
|
||||||
|
let rendered = captures_info
|
||||||
|
.iter()
|
||||||
|
.map(|(closure_text_range, local_text_range, spans, place, capture_ty, capture_kind)| {
|
||||||
|
format!(
|
||||||
|
"{closure_text_range:?};{local_text_range};{spans} {capture_kind:?} {place} {capture_ty}"
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.join("\n");
|
||||||
|
|
||||||
|
expect.assert_eq(&rendered);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn deref_in_let() {
|
||||||
|
check_closure_captures(
|
||||||
|
r#"
|
||||||
|
//- minicore:copy
|
||||||
|
fn main() {
|
||||||
|
let a = &mut true;
|
||||||
|
let closure = || { let b = *a; };
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect!["53..71;20..21;66..68 ByRef(Shared) *a &'? bool"],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn deref_then_ref_pattern() {
|
||||||
|
check_closure_captures(
|
||||||
|
r#"
|
||||||
|
//- minicore:copy
|
||||||
|
fn main() {
|
||||||
|
let a = &mut true;
|
||||||
|
let closure = || { let &mut ref b = a; };
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect!["53..79;20..21;67..72 ByRef(Shared) *a &'? bool"],
|
||||||
|
);
|
||||||
|
check_closure_captures(
|
||||||
|
r#"
|
||||||
|
//- minicore:copy
|
||||||
|
fn main() {
|
||||||
|
let a = &mut true;
|
||||||
|
let closure = || { let &mut ref mut b = a; };
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect!["53..83;20..21;67..76 ByRef(Mut { kind: Default }) *a &'? mut bool"],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn unique_borrow() {
|
||||||
|
check_closure_captures(
|
||||||
|
r#"
|
||||||
|
//- minicore:copy
|
||||||
|
fn main() {
|
||||||
|
let a = &mut true;
|
||||||
|
let closure = || { *a = false; };
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect!["53..71;20..21;58..60 ByRef(Mut { kind: Default }) *a &'? mut bool"],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn deref_ref_mut() {
|
||||||
|
check_closure_captures(
|
||||||
|
r#"
|
||||||
|
//- minicore:copy
|
||||||
|
fn main() {
|
||||||
|
let a = &mut true;
|
||||||
|
let closure = || { let ref mut b = *a; };
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect!["53..79;20..21;62..71 ByRef(Mut { kind: Default }) *a &'? mut bool"],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn let_else_not_consuming() {
|
||||||
|
check_closure_captures(
|
||||||
|
r#"
|
||||||
|
//- minicore:copy
|
||||||
|
fn main() {
|
||||||
|
let a = &mut true;
|
||||||
|
let closure = || { let _ = *a else { return; }; };
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect!["53..88;20..21;66..68 ByRef(Shared) *a &'? bool"],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn consume() {
|
||||||
|
check_closure_captures(
|
||||||
|
r#"
|
||||||
|
//- minicore:copy
|
||||||
|
struct NonCopy;
|
||||||
|
fn main() {
|
||||||
|
let a = NonCopy;
|
||||||
|
let closure = || { let b = a; };
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect!["67..84;36..37;80..81 ByValue a NonCopy"],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn ref_to_upvar() {
|
||||||
|
check_closure_captures(
|
||||||
|
r#"
|
||||||
|
//- minicore:copy
|
||||||
|
struct NonCopy;
|
||||||
|
fn main() {
|
||||||
|
let mut a = NonCopy;
|
||||||
|
let closure = || { let b = &a; };
|
||||||
|
let closure = || { let c = &mut a; };
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect![[r#"
|
||||||
|
71..89;36..41;84..86 ByRef(Shared) a &'? NonCopy
|
||||||
|
109..131;36..41;122..128 ByRef(Mut { kind: Default }) a &'? mut NonCopy"#]],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn field() {
|
||||||
|
check_closure_captures(
|
||||||
|
r#"
|
||||||
|
//- minicore:copy
|
||||||
|
struct Foo { a: i32, b: i32 }
|
||||||
|
fn main() {
|
||||||
|
let a = Foo { a: 0, b: 0 };
|
||||||
|
let closure = || { let b = a.a; };
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect!["92..111;50..51;105..108 ByRef(Shared) a.a &'? i32"],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn fields_different_mode() {
|
||||||
|
check_closure_captures(
|
||||||
|
r#"
|
||||||
|
//- minicore:copy
|
||||||
|
struct NonCopy;
|
||||||
|
struct Foo { a: i32, b: i32, c: NonCopy, d: bool }
|
||||||
|
fn main() {
|
||||||
|
let mut a = Foo { a: 0, b: 0 };
|
||||||
|
let closure = || {
|
||||||
|
let b = &a.a;
|
||||||
|
let c = &mut a.b;
|
||||||
|
let d = a.c;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect![[r#"
|
||||||
|
133..212;87..92;154..158 ByRef(Shared) a.a &'? i32
|
||||||
|
133..212;87..92;176..184 ByRef(Mut { kind: Default }) a.b &'? mut i32
|
||||||
|
133..212;87..92;202..205 ByValue a.c NonCopy"#]],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn autoref() {
|
||||||
|
check_closure_captures(
|
||||||
|
r#"
|
||||||
|
//- minicore:copy
|
||||||
|
struct Foo;
|
||||||
|
impl Foo {
|
||||||
|
fn imm(&self) {}
|
||||||
|
fn mut_(&mut self) {}
|
||||||
|
}
|
||||||
|
fn main() {
|
||||||
|
let mut a = Foo;
|
||||||
|
let closure = || a.imm();
|
||||||
|
let closure = || a.mut_();
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect![[r#"
|
||||||
|
123..133;92..97;126..127 ByRef(Shared) a &'? Foo
|
||||||
|
153..164;92..97;156..157 ByRef(Mut { kind: Default }) a &'? mut Foo"#]],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn captures_priority() {
|
||||||
|
check_closure_captures(
|
||||||
|
r#"
|
||||||
|
//- minicore:copy
|
||||||
|
struct NonCopy;
|
||||||
|
fn main() {
|
||||||
|
let mut a = &mut true;
|
||||||
|
// Max ByRef(Mut { kind: Default })
|
||||||
|
let closure = || {
|
||||||
|
*a = false;
|
||||||
|
let b = &mut a;
|
||||||
|
};
|
||||||
|
// Max ByValue
|
||||||
|
let mut a = NonCopy;
|
||||||
|
let closure = || {
|
||||||
|
let b = a;
|
||||||
|
let c = &mut a;
|
||||||
|
let d = &a;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect![[r#"
|
||||||
|
113..167;36..41;127..128,154..160 ByRef(Mut { kind: Default }) a &'? mut &'? mut bool
|
||||||
|
231..304;196..201;252..253,276..277,296..297 ByValue a NonCopy"#]],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn let_underscore() {
|
||||||
|
check_closure_captures(
|
||||||
|
r#"
|
||||||
|
//- minicore:copy
|
||||||
|
fn main() {
|
||||||
|
let mut a = true;
|
||||||
|
let closure = || { let _ = a; };
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect![""],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn match_wildcard() {
|
||||||
|
check_closure_captures(
|
||||||
|
r#"
|
||||||
|
//- minicore:copy
|
||||||
|
struct NonCopy;
|
||||||
|
fn main() {
|
||||||
|
let mut a = NonCopy;
|
||||||
|
let closure = || match a {
|
||||||
|
_ => {}
|
||||||
|
};
|
||||||
|
let closure = || match a {
|
||||||
|
ref b => {}
|
||||||
|
};
|
||||||
|
let closure = || match a {
|
||||||
|
ref mut b => {}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect![[r#"
|
||||||
|
125..163;36..41;134..135 ByRef(Shared) a &'? NonCopy
|
||||||
|
183..225;36..41;192..193 ByRef(Mut { kind: Default }) a &'? mut NonCopy"#]],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn multiple_bindings() {
|
||||||
|
check_closure_captures(
|
||||||
|
r#"
|
||||||
|
//- minicore:copy
|
||||||
|
fn main() {
|
||||||
|
let mut a = false;
|
||||||
|
let mut closure = || { let (b | b) = a; };
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect!["57..80;20..25;76..77,76..77 ByRef(Shared) a &'? bool"],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn multiple_usages() {
|
||||||
|
check_closure_captures(
|
||||||
|
r#"
|
||||||
|
//- minicore:copy
|
||||||
|
fn main() {
|
||||||
|
let mut a = false;
|
||||||
|
let mut closure = || {
|
||||||
|
let b = &a;
|
||||||
|
let c = &a;
|
||||||
|
let d = &mut a;
|
||||||
|
a = true;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect!["57..149;20..25;78..80,98..100,118..124,134..135 ByRef(Mut { kind: Default }) a &'? mut bool"],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn ref_then_deref() {
|
||||||
|
check_closure_captures(
|
||||||
|
r#"
|
||||||
|
//- minicore:copy
|
||||||
|
fn main() {
|
||||||
|
let mut a = false;
|
||||||
|
let mut closure = || { let b = *&mut a; };
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect!["57..80;20..25;71..77 ByRef(Mut { kind: Default }) a &'? mut bool"],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn ref_of_ref() {
|
||||||
|
check_closure_captures(
|
||||||
|
r#"
|
||||||
|
//- minicore:copy
|
||||||
|
fn main() {
|
||||||
|
let mut a = &false;
|
||||||
|
let closure = || { let b = &a; };
|
||||||
|
let closure = || { let b = &mut a; };
|
||||||
|
let a = &mut false;
|
||||||
|
let closure = || { let b = &a; };
|
||||||
|
let closure = || { let b = &mut a; };
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect![[r#"
|
||||||
|
54..72;20..25;67..69 ByRef(Shared) a &'? &'? bool
|
||||||
|
92..114;20..25;105..111 ByRef(Mut { kind: Default }) a &'? mut &'? bool
|
||||||
|
158..176;124..125;171..173 ByRef(Shared) a &'? &'? mut bool
|
||||||
|
196..218;124..125;209..215 ByRef(Mut { kind: Default }) a &'? mut &'? mut bool"#]],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn multiple_capture_usages() {
|
||||||
|
check_closure_captures(
|
||||||
|
r#"
|
||||||
|
//- minicore:copy
|
||||||
|
struct A { a: i32, b: bool }
|
||||||
|
fn main() {
|
||||||
|
let mut a = A { a: 123, b: false };
|
||||||
|
let closure = |$0| {
|
||||||
|
let b = a.b;
|
||||||
|
a = A { a: 456, b: true };
|
||||||
|
};
|
||||||
|
closure();
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect!["99..165;49..54;120..121,133..134 ByRef(Mut { kind: Default }) a &'? mut A"],
|
||||||
|
);
|
||||||
|
}
|
|
@ -78,7 +78,7 @@ use hir_ty::{
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use nameres::diagnostics::DefDiagnosticKind;
|
use nameres::diagnostics::DefDiagnosticKind;
|
||||||
use rustc_hash::FxHashSet;
|
use rustc_hash::FxHashSet;
|
||||||
use span::{Edition, EditionedFileId, FileId, MacroCallId};
|
use span::{Edition, EditionedFileId, FileId, MacroCallId, SyntaxContextId};
|
||||||
use stdx::{impl_from, never};
|
use stdx::{impl_from, never};
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, HasAttrs as _, HasGenericParams, HasName},
|
ast::{self, HasAttrs as _, HasGenericParams, HasName},
|
||||||
|
@ -4133,6 +4133,7 @@ impl ClosureCapture {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Copy, PartialEq, Eq)]
|
||||||
pub enum CaptureKind {
|
pub enum CaptureKind {
|
||||||
SharedRef,
|
SharedRef,
|
||||||
UniqueSharedRef,
|
UniqueSharedRef,
|
||||||
|
@ -4378,6 +4379,22 @@ impl Type {
|
||||||
method_resolution::implements_trait(&canonical_ty, db, &self.env, trait_)
|
method_resolution::implements_trait(&canonical_ty, db, &self.env, trait_)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// This does **not** resolve `IntoFuture`, only `Future`.
|
||||||
|
pub fn future_output(self, db: &dyn HirDatabase) -> Option<Type> {
|
||||||
|
let future_output =
|
||||||
|
db.lang_item(self.env.krate, LangItem::FutureOutput)?.as_type_alias()?;
|
||||||
|
self.normalize_trait_assoc_type(db, &[], future_output.into())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// This does **not** resolve `IntoIterator`, only `Iterator`.
|
||||||
|
pub fn iterator_item(self, db: &dyn HirDatabase) -> Option<Type> {
|
||||||
|
let iterator_trait = db.lang_item(self.env.krate, LangItem::Iterator)?.as_trait()?;
|
||||||
|
let iterator_item = db
|
||||||
|
.trait_data(iterator_trait)
|
||||||
|
.associated_type_by_name(&Name::new_symbol(sym::Item.clone(), SyntaxContextId::ROOT))?;
|
||||||
|
self.normalize_trait_assoc_type(db, &[], iterator_item.into())
|
||||||
|
}
|
||||||
|
|
||||||
/// Checks that particular type `ty` implements `std::ops::FnOnce`.
|
/// Checks that particular type `ty` implements `std::ops::FnOnce`.
|
||||||
///
|
///
|
||||||
/// This function can be used to check if a particular type is callable, since FnOnce is a
|
/// This function can be used to check if a particular type is callable, since FnOnce is a
|
||||||
|
|
|
@ -1617,6 +1617,7 @@ fn format_function(
|
||||||
fun.control_flow.is_async,
|
fun.control_flow.is_async,
|
||||||
fun.mods.is_const,
|
fun.mods.is_const,
|
||||||
fun.control_flow.is_unsafe,
|
fun.control_flow.is_unsafe,
|
||||||
|
false,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -122,6 +122,7 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<'
|
||||||
let is_async = method_source.async_token().is_some();
|
let is_async = method_source.async_token().is_some();
|
||||||
let is_const = method_source.const_token().is_some();
|
let is_const = method_source.const_token().is_some();
|
||||||
let is_unsafe = method_source.unsafe_token().is_some();
|
let is_unsafe = method_source.unsafe_token().is_some();
|
||||||
|
let is_gen = method_source.gen_token().is_some();
|
||||||
|
|
||||||
let fn_name = make::name(&name);
|
let fn_name = make::name(&name);
|
||||||
|
|
||||||
|
@ -154,6 +155,7 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<'
|
||||||
is_async,
|
is_async,
|
||||||
is_const,
|
is_const,
|
||||||
is_unsafe,
|
is_unsafe,
|
||||||
|
is_gen,
|
||||||
)
|
)
|
||||||
.clone_for_update();
|
.clone_for_update();
|
||||||
|
|
||||||
|
|
|
@ -740,6 +740,7 @@ fn func_assoc_item(
|
||||||
item.async_token().is_some(),
|
item.async_token().is_some(),
|
||||||
item.const_token().is_some(),
|
item.const_token().is_some(),
|
||||||
item.unsafe_token().is_some(),
|
item.unsafe_token().is_some(),
|
||||||
|
item.gen_token().is_some(),
|
||||||
)
|
)
|
||||||
.clone_for_update();
|
.clone_for_update();
|
||||||
|
|
||||||
|
|
|
@ -365,6 +365,7 @@ impl FunctionBuilder {
|
||||||
self.is_async,
|
self.is_async,
|
||||||
false, // FIXME : const and unsafe are not handled yet.
|
false, // FIXME : const and unsafe are not handled yet.
|
||||||
false,
|
false,
|
||||||
|
false,
|
||||||
)
|
)
|
||||||
.clone_for_update();
|
.clone_for_update();
|
||||||
|
|
||||||
|
|
|
@ -261,7 +261,19 @@ fn generate_getter_from_info(
|
||||||
let ret_type = Some(make::ret_type(ty));
|
let ret_type = Some(make::ret_type(ty));
|
||||||
let body = make::block_expr([], Some(body));
|
let body = make::block_expr([], Some(body));
|
||||||
|
|
||||||
make::fn_(strukt.visibility(), fn_name, None, None, params, body, ret_type, false, false, false)
|
make::fn_(
|
||||||
|
strukt.visibility(),
|
||||||
|
fn_name,
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
params,
|
||||||
|
body,
|
||||||
|
ret_type,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn generate_setter_from_info(info: &AssistInfo, record_field_info: &RecordFieldInfo) -> ast::Fn {
|
fn generate_setter_from_info(info: &AssistInfo, record_field_info: &RecordFieldInfo) -> ast::Fn {
|
||||||
|
@ -285,7 +297,19 @@ fn generate_setter_from_info(info: &AssistInfo, record_field_info: &RecordFieldI
|
||||||
let body = make::block_expr([assign_stmt.into()], None);
|
let body = make::block_expr([assign_stmt.into()], None);
|
||||||
|
|
||||||
// Make the setter fn
|
// Make the setter fn
|
||||||
make::fn_(strukt.visibility(), fn_name, None, None, params, body, None, false, false, false)
|
make::fn_(
|
||||||
|
strukt.visibility(),
|
||||||
|
fn_name,
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
params,
|
||||||
|
body,
|
||||||
|
None,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn extract_and_parse(
|
fn extract_and_parse(
|
||||||
|
|
|
@ -115,6 +115,7 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
|
||||||
false,
|
false,
|
||||||
false,
|
false,
|
||||||
false,
|
false,
|
||||||
|
false,
|
||||||
)
|
)
|
||||||
.clone_for_update();
|
.clone_for_update();
|
||||||
fn_.indent(1.into());
|
fn_.indent(1.into());
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, make, AstNode, AstToken},
|
ast::{self, make, AstNode, AstToken},
|
||||||
match_ast, ted, NodeOrToken, SyntaxElement, TextRange, TextSize, T,
|
match_ast, ted, Edition, NodeOrToken, SyntaxElement, TextRange, TextSize, T,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{AssistContext, AssistId, AssistKind, Assists};
|
use crate::{AssistContext, AssistId, AssistKind, Assists};
|
||||||
|
@ -77,7 +77,7 @@ fn compute_dbg_replacement(macro_expr: ast::MacroExpr) -> Option<(TextRange, Opt
|
||||||
let input_expressions = input_expressions
|
let input_expressions = input_expressions
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter_map(|(is_sep, group)| (!is_sep).then_some(group))
|
.filter_map(|(is_sep, group)| (!is_sep).then_some(group))
|
||||||
.map(|mut tokens| syntax::hacks::parse_expr_from_str(&tokens.join("")))
|
.map(|mut tokens| syntax::hacks::parse_expr_from_str(&tokens.join(""), Edition::CURRENT))
|
||||||
.collect::<Option<Vec<ast::Expr>>>()?;
|
.collect::<Option<Vec<ast::Expr>>>()?;
|
||||||
|
|
||||||
let parent = macro_expr.syntax().parent()?;
|
let parent = macro_expr.syntax().parent()?;
|
||||||
|
|
|
@ -14,7 +14,7 @@ use ide_db::{
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, AttrKind},
|
ast::{self, AttrKind},
|
||||||
AstNode, SyntaxKind, T,
|
AstNode, Edition, SyntaxKind, T,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
@ -373,7 +373,9 @@ fn parse_comma_sep_expr(input: ast::TokenTree) -> Option<Vec<ast::Expr>> {
|
||||||
input_expressions
|
input_expressions
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter_map(|(is_sep, group)| (!is_sep).then_some(group))
|
.filter_map(|(is_sep, group)| (!is_sep).then_some(group))
|
||||||
.filter_map(|mut tokens| syntax::hacks::parse_expr_from_str(&tokens.join("")))
|
.filter_map(|mut tokens| {
|
||||||
|
syntax::hacks::parse_expr_from_str(&tokens.join(""), Edition::CURRENT)
|
||||||
|
})
|
||||||
.collect::<Vec<ast::Expr>>(),
|
.collect::<Vec<ast::Expr>>(),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
|
@ -477,10 +477,12 @@ pub fn parse_tt_as_comma_sep_paths(
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter_map(|(is_sep, group)| (!is_sep).then_some(group))
|
.filter_map(|(is_sep, group)| (!is_sep).then_some(group))
|
||||||
.filter_map(|mut tokens| {
|
.filter_map(|mut tokens| {
|
||||||
syntax::hacks::parse_expr_from_str(&tokens.join("")).and_then(|expr| match expr {
|
syntax::hacks::parse_expr_from_str(&tokens.join(""), Edition::CURRENT).and_then(
|
||||||
|
|expr| match expr {
|
||||||
ast::Expr::PathExpr(it) => it.path(),
|
ast::Expr::PathExpr(it) => it.path(),
|
||||||
_ => None,
|
_ => None,
|
||||||
})
|
},
|
||||||
|
)
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
Some(paths)
|
Some(paths)
|
||||||
|
|
|
@ -8465,7 +8465,7 @@ impl Iterator for S {
|
||||||
file_id: FileId(
|
file_id: FileId(
|
||||||
1,
|
1,
|
||||||
),
|
),
|
||||||
full_range: 7800..8008,
|
full_range: 7800..8042,
|
||||||
focus_range: 7865..7871,
|
focus_range: 7865..7871,
|
||||||
name: "Future",
|
name: "Future",
|
||||||
kind: Trait,
|
kind: Trait,
|
||||||
|
@ -8479,8 +8479,8 @@ impl Iterator for S {
|
||||||
file_id: FileId(
|
file_id: FileId(
|
||||||
1,
|
1,
|
||||||
),
|
),
|
||||||
full_range: 8638..9104,
|
full_range: 8672..9171,
|
||||||
focus_range: 8682..8690,
|
focus_range: 8749..8757,
|
||||||
name: "Iterator",
|
name: "Iterator",
|
||||||
kind: Trait,
|
kind: Trait,
|
||||||
container_name: "iterator",
|
container_name: "iterator",
|
||||||
|
|
|
@ -240,6 +240,7 @@ define_symbols! {
|
||||||
fundamental,
|
fundamental,
|
||||||
future_trait,
|
future_trait,
|
||||||
future,
|
future,
|
||||||
|
future_output,
|
||||||
Future,
|
Future,
|
||||||
ge,
|
ge,
|
||||||
get_context,
|
get_context,
|
||||||
|
@ -274,6 +275,7 @@ define_symbols! {
|
||||||
iter_mut,
|
iter_mut,
|
||||||
iter,
|
iter,
|
||||||
Iterator,
|
Iterator,
|
||||||
|
iterator,
|
||||||
keyword,
|
keyword,
|
||||||
lang,
|
lang,
|
||||||
le,
|
le,
|
||||||
|
|
|
@ -190,7 +190,7 @@ UseTreeList =
|
||||||
|
|
||||||
Fn =
|
Fn =
|
||||||
Attr* Visibility?
|
Attr* Visibility?
|
||||||
'default'? 'const'? 'async'? 'unsafe'? Abi?
|
'default'? 'const'? 'async'? 'gen'? 'unsafe'? Abi?
|
||||||
'fn' Name GenericParamList? ParamList RetType? WhereClause?
|
'fn' Name GenericParamList? ParamList RetType? WhereClause?
|
||||||
(body:BlockExpr | ';')
|
(body:BlockExpr | ';')
|
||||||
|
|
||||||
|
|
|
@ -8,7 +8,7 @@ use crate::{
|
||||||
ted, AstToken, NodeOrToken, SyntaxElement, SyntaxNode, SyntaxToken,
|
ted, AstToken, NodeOrToken, SyntaxElement, SyntaxNode, SyntaxToken,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy)]
|
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||||
pub struct IndentLevel(pub u8);
|
pub struct IndentLevel(pub u8);
|
||||||
|
|
||||||
impl From<u8> for IndentLevel {
|
impl From<u8> for IndentLevel {
|
||||||
|
|
|
@ -486,6 +486,8 @@ impl Fn {
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn fn_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![fn]) }
|
pub fn fn_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![fn]) }
|
||||||
#[inline]
|
#[inline]
|
||||||
|
pub fn gen_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![gen]) }
|
||||||
|
#[inline]
|
||||||
pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) }
|
pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1035,6 +1035,7 @@ pub fn fn_(
|
||||||
is_async: bool,
|
is_async: bool,
|
||||||
is_const: bool,
|
is_const: bool,
|
||||||
is_unsafe: bool,
|
is_unsafe: bool,
|
||||||
|
is_gen: bool,
|
||||||
) -> ast::Fn {
|
) -> ast::Fn {
|
||||||
let type_params = match type_params {
|
let type_params = match type_params {
|
||||||
Some(type_params) => format!("{type_params}"),
|
Some(type_params) => format!("{type_params}"),
|
||||||
|
@ -1056,9 +1057,10 @@ pub fn fn_(
|
||||||
let async_literal = if is_async { "async " } else { "" };
|
let async_literal = if is_async { "async " } else { "" };
|
||||||
let const_literal = if is_const { "const " } else { "" };
|
let const_literal = if is_const { "const " } else { "" };
|
||||||
let unsafe_literal = if is_unsafe { "unsafe " } else { "" };
|
let unsafe_literal = if is_unsafe { "unsafe " } else { "" };
|
||||||
|
let gen_literal = if is_gen { "gen " } else { "" };
|
||||||
|
|
||||||
ast_from_text(&format!(
|
ast_from_text(&format!(
|
||||||
"{visibility}{async_literal}{const_literal}{unsafe_literal}fn {fn_name}{type_params}{params} {ret_type}{where_clause}{body}",
|
"{visibility}{const_literal}{async_literal}{gen_literal}{unsafe_literal}fn {fn_name}{type_params}{params} {ret_type}{where_clause}{body}",
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
pub fn struct_(
|
pub fn struct_(
|
||||||
|
|
|
@ -17,7 +17,7 @@ use crate::{
|
||||||
ted, NodeOrToken, SmolStr, SyntaxElement, SyntaxToken, TokenText, T,
|
ted, NodeOrToken, SmolStr, SyntaxElement, SyntaxToken, TokenText, T,
|
||||||
};
|
};
|
||||||
|
|
||||||
use super::{RangeItem, RangeOp};
|
use super::{GenericParam, RangeItem, RangeOp};
|
||||||
|
|
||||||
impl ast::Lifetime {
|
impl ast::Lifetime {
|
||||||
pub fn text(&self) -> TokenText<'_> {
|
pub fn text(&self) -> TokenText<'_> {
|
||||||
|
@ -822,6 +822,15 @@ pub enum TypeOrConstParam {
|
||||||
Const(ast::ConstParam),
|
Const(ast::ConstParam),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl From<TypeOrConstParam> for GenericParam {
|
||||||
|
fn from(value: TypeOrConstParam) -> Self {
|
||||||
|
match value {
|
||||||
|
TypeOrConstParam::Type(it) => GenericParam::TypeParam(it),
|
||||||
|
TypeOrConstParam::Const(it) => GenericParam::ConstParam(it),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl TypeOrConstParam {
|
impl TypeOrConstParam {
|
||||||
pub fn name(&self) -> Option<ast::Name> {
|
pub fn name(&self) -> Option<ast::Name> {
|
||||||
match self {
|
match self {
|
||||||
|
|
|
@ -6,9 +6,9 @@ use parser::Edition;
|
||||||
|
|
||||||
use crate::{ast, AstNode};
|
use crate::{ast, AstNode};
|
||||||
|
|
||||||
pub fn parse_expr_from_str(s: &str) -> Option<ast::Expr> {
|
pub fn parse_expr_from_str(s: &str, edition: Edition) -> Option<ast::Expr> {
|
||||||
let s = s.trim();
|
let s = s.trim();
|
||||||
let file = ast::SourceFile::parse(&format!("const _: () = {s};"), Edition::CURRENT);
|
let file = ast::SourceFile::parse(&format!("const _: () = {s};"), edition);
|
||||||
let expr = file.syntax_node().descendants().find_map(ast::Expr::cast)?;
|
let expr = file.syntax_node().descendants().find_map(ast::Expr::cast)?;
|
||||||
if expr.syntax().text() != s {
|
if expr.syntax().text() != s {
|
||||||
return None;
|
return None;
|
||||||
|
|
|
@ -1196,6 +1196,7 @@ pub mod future {
|
||||||
#[doc(notable_trait)]
|
#[doc(notable_trait)]
|
||||||
#[lang = "future_trait"]
|
#[lang = "future_trait"]
|
||||||
pub trait Future {
|
pub trait Future {
|
||||||
|
#[lang = "future_output"]
|
||||||
type Output;
|
type Output;
|
||||||
#[lang = "poll"]
|
#[lang = "poll"]
|
||||||
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output>;
|
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output>;
|
||||||
|
@ -1293,6 +1294,7 @@ pub mod iter {
|
||||||
mod traits {
|
mod traits {
|
||||||
mod iterator {
|
mod iterator {
|
||||||
#[doc(notable_trait)]
|
#[doc(notable_trait)]
|
||||||
|
#[lang = "iterator"]
|
||||||
pub trait Iterator {
|
pub trait Iterator {
|
||||||
type Item;
|
type Item;
|
||||||
#[lang = "next"]
|
#[lang = "next"]
|
||||||
|
|
Loading…
Reference in a new issue