⬆️ rust-analyzer

This commit is contained in:
Laurențiu Nicola 2022-11-09 21:49:10 +02:00
parent c60b1f6414
commit 79923c382a
74 changed files with 1216 additions and 422 deletions

View file

@ -13,8 +13,7 @@ Forum for questions: https://users.rust-lang.org/c/ide/14
Before submitting, please make sure that you're not running into one of these known issues: Before submitting, please make sure that you're not running into one of these known issues:
1. extension doesn't load in VSCodium: #11080 1. on-the-fly diagnostics are mostly unimplemented (`cargo check` diagnostics will be shown when saving a file): #3107
2. on-the-fly diagnostics are mostly unimplemented (`cargo check` diagnostics will be shown when saving a file): #3107
Otherwise please try to provide information which will help us to fix the issue faster. Minimal reproducible examples with few dependencies are especially lovely <3. Otherwise please try to provide information which will help us to fix the issue faster. Minimal reproducible examples with few dependencies are especially lovely <3.
--> -->

View file

@ -2,8 +2,8 @@
name: Critical Nightly Regression name: Critical Nightly Regression
about: You are using nightly rust-analyzer and the latest version is unusable. about: You are using nightly rust-analyzer and the latest version is unusable.
title: '' title: ''
labels: '' labels: 'Broken Window'
assignees: 'matklad' assignees: ''
--- ---
@ -14,4 +14,3 @@ Please try to provide information which will help us to fix the issue faster. Mi
--> -->
This is a serious regression in nightly and it's important to fix it before the next release. This is a serious regression in nightly and it's important to fix it before the next release.
@matklad, please take a look.

View file

@ -257,8 +257,7 @@ jobs:
- name: Publish Extension (OpenVSX, release) - name: Publish Extension (OpenVSX, release)
if: github.ref == 'refs/heads/release' && (github.repository == 'rust-analyzer/rust-analyzer' || github.repository == 'rust-lang/rust-analyzer') if: github.ref == 'refs/heads/release' && (github.repository == 'rust-analyzer/rust-analyzer' || github.repository == 'rust-lang/rust-analyzer')
working-directory: ./editors/code working-directory: ./editors/code
# token from https://dev.azure.com/rust-analyzer/ run: npx ovsx publish --pat ${{ secrets.OPENVSX_TOKEN }} --packagePath ../../dist/rust-analyzer-*.vsix
run: npx ovsx publish --pat ${{ secrets.OPENVSX_TOKEN }} --packagePath ../../dist/rust-analyzer-*.vsix || true
timeout-minutes: 2 timeout-minutes: 2
- name: Publish Extension (Code Marketplace, nightly) - name: Publish Extension (Code Marketplace, nightly)
@ -269,5 +268,5 @@ jobs:
- name: Publish Extension (OpenVSX, nightly) - name: Publish Extension (OpenVSX, nightly)
if: github.ref != 'refs/heads/release' && (github.repository == 'rust-analyzer/rust-analyzer' || github.repository == 'rust-lang/rust-analyzer') if: github.ref != 'refs/heads/release' && (github.repository == 'rust-analyzer/rust-analyzer' || github.repository == 'rust-lang/rust-analyzer')
working-directory: ./editors/code working-directory: ./editors/code
run: npx ovsx publish --pat ${{ secrets.OPENVSX_TOKEN }} --packagePath ../../dist/rust-analyzer-*.vsix || true run: npx ovsx publish --pat ${{ secrets.OPENVSX_TOKEN }} --packagePath ../../dist/rust-analyzer-*.vsix
timeout-minutes: 2 timeout-minutes: 2

View file

@ -143,9 +143,12 @@ pub(crate) fn print_type_ref(type_ref: &TypeRef, buf: &mut dyn Write) -> fmt::Re
print_type_ref(elem, buf)?; print_type_ref(elem, buf)?;
write!(buf, "]")?; write!(buf, "]")?;
} }
TypeRef::Fn(args_and_ret, varargs) => { TypeRef::Fn(args_and_ret, varargs, is_unsafe) => {
let ((_, return_type), args) = let ((_, return_type), args) =
args_and_ret.split_last().expect("TypeRef::Fn is missing return type"); args_and_ret.split_last().expect("TypeRef::Fn is missing return type");
if *is_unsafe {
write!(buf, "unsafe ")?;
}
write!(buf, "fn(")?; write!(buf, "fn(")?;
for (i, (_, typeref)) in args.iter().enumerate() { for (i, (_, typeref)) in args.iter().enumerate() {
if i != 0 { if i != 0 {

View file

@ -119,7 +119,7 @@ pub enum TypeRef {
Array(Box<TypeRef>, ConstScalarOrPath), Array(Box<TypeRef>, ConstScalarOrPath),
Slice(Box<TypeRef>), Slice(Box<TypeRef>),
/// A fn pointer. Last element of the vector is the return type. /// A fn pointer. Last element of the vector is the return type.
Fn(Vec<(Option<Name>, TypeRef)>, bool /*varargs*/), Fn(Vec<(Option<Name>, TypeRef)>, bool /*varargs*/, bool /*is_unsafe*/),
ImplTrait(Vec<Interned<TypeBound>>), ImplTrait(Vec<Interned<TypeBound>>),
DynTrait(Vec<Interned<TypeBound>>), DynTrait(Vec<Interned<TypeBound>>),
Macro(AstId<ast::MacroCall>), Macro(AstId<ast::MacroCall>),
@ -229,7 +229,7 @@ impl TypeRef {
Vec::new() Vec::new()
}; };
params.push((None, ret_ty)); params.push((None, ret_ty));
TypeRef::Fn(params, is_varargs) TypeRef::Fn(params, is_varargs, inner.unsafe_token().is_some())
} }
// for types are close enough for our purposes to the inner type for now... // for types are close enough for our purposes to the inner type for now...
ast::Type::ForType(inner) => TypeRef::from_ast_opt(ctx, inner.ty()), ast::Type::ForType(inner) => TypeRef::from_ast_opt(ctx, inner.ty()),
@ -263,7 +263,7 @@ impl TypeRef {
fn go(type_ref: &TypeRef, f: &mut impl FnMut(&TypeRef)) { fn go(type_ref: &TypeRef, f: &mut impl FnMut(&TypeRef)) {
f(type_ref); f(type_ref);
match type_ref { match type_ref {
TypeRef::Fn(params, _) => { TypeRef::Fn(params, _, _) => {
params.iter().for_each(|(_, param_type)| go(param_type, f)) params.iter().for_each(|(_, param_type)| go(param_type, f))
} }
TypeRef::Tuple(types) => types.iter().for_each(|t| go(t, f)), TypeRef::Tuple(types) => types.iter().for_each(|t| go(t, f)),

View file

@ -1187,8 +1187,11 @@ impl HirDisplay for TypeRef {
inner.hir_fmt(f)?; inner.hir_fmt(f)?;
write!(f, "]")?; write!(f, "]")?;
} }
TypeRef::Fn(parameters, is_varargs) => { &TypeRef::Fn(ref parameters, is_varargs, is_unsafe) => {
// FIXME: Function pointer qualifiers. // FIXME: Function pointer qualifiers.
if is_unsafe {
write!(f, "unsafe ")?;
}
write!(f, "fn(")?; write!(f, "fn(")?;
if let Some(((_, return_type), function_parameters)) = parameters.split_last() { if let Some(((_, return_type), function_parameters)) = parameters.split_last() {
for index in 0..function_parameters.len() { for index in 0..function_parameters.len() {
@ -1203,7 +1206,7 @@ impl HirDisplay for TypeRef {
write!(f, ", ")?; write!(f, ", ")?;
} }
} }
if *is_varargs { if is_varargs {
write!(f, "{}...", if parameters.len() == 1 { "" } else { ", " })?; write!(f, "{}...", if parameters.len() == 1 { "" } else { ", " })?;
} }
write!(f, ")")?; write!(f, ")")?;

View file

@ -1020,7 +1020,7 @@ impl Expectation {
/// The primary use case is where the expected type is a fat pointer, /// The primary use case is where the expected type is a fat pointer,
/// like `&[isize]`. For example, consider the following statement: /// like `&[isize]`. For example, consider the following statement:
/// ///
/// let x: &[isize] = &[1, 2, 3]; /// let x: &[isize] = &[1, 2, 3];
/// ///
/// In this case, the expected type for the `&[1, 2, 3]` expression is /// In this case, the expected type for the `&[1, 2, 3]` expression is
/// `&[isize]`. If however we were to say that `[1, 2, 3]` has the /// `&[isize]`. If however we were to say that `[1, 2, 3]` has the

View file

@ -85,6 +85,7 @@ impl<'a> InferenceContext<'a> {
let ty = match &self.body[tgt_expr] { let ty = match &self.body[tgt_expr] {
Expr::Missing => self.err_ty(), Expr::Missing => self.err_ty(),
&Expr::If { condition, then_branch, else_branch } => { &Expr::If { condition, then_branch, else_branch } => {
let expected = &expected.adjust_for_branches(&mut self.table);
self.infer_expr( self.infer_expr(
condition, condition,
&Expectation::has_type(TyKind::Scalar(Scalar::Bool).intern(Interner)), &Expectation::has_type(TyKind::Scalar(Scalar::Bool).intern(Interner)),

View file

@ -38,10 +38,12 @@ use std::sync::Arc;
use chalk_ir::{ use chalk_ir::{
fold::{Shift, TypeFoldable}, fold::{Shift, TypeFoldable},
interner::HasInterner, interner::HasInterner,
NoSolution, NoSolution, UniverseIndex,
}; };
use hir_def::{expr::ExprId, type_ref::Rawness, TypeOrConstParamId}; use hir_def::{expr::ExprId, type_ref::Rawness, TypeOrConstParamId};
use hir_expand::name;
use itertools::Either; use itertools::Either;
use traits::FnTrait;
use utils::Generics; use utils::Generics;
use crate::{consteval::unknown_const, db::HirDatabase, utils::generics}; use crate::{consteval::unknown_const, db::HirDatabase, utils::generics};
@ -208,6 +210,7 @@ pub(crate) fn make_binders<T: HasInterner<Interner = Interner>>(
pub struct CallableSig { pub struct CallableSig {
params_and_return: Arc<[Ty]>, params_and_return: Arc<[Ty]>,
is_varargs: bool, is_varargs: bool,
safety: Safety,
} }
has_interner!(CallableSig); has_interner!(CallableSig);
@ -216,9 +219,14 @@ has_interner!(CallableSig);
pub type PolyFnSig = Binders<CallableSig>; pub type PolyFnSig = Binders<CallableSig>;
impl CallableSig { impl CallableSig {
pub fn from_params_and_return(mut params: Vec<Ty>, ret: Ty, is_varargs: bool) -> CallableSig { pub fn from_params_and_return(
mut params: Vec<Ty>,
ret: Ty,
is_varargs: bool,
safety: Safety,
) -> CallableSig {
params.push(ret); params.push(ret);
CallableSig { params_and_return: params.into(), is_varargs } CallableSig { params_and_return: params.into(), is_varargs, safety }
} }
pub fn from_fn_ptr(fn_ptr: &FnPointer) -> CallableSig { pub fn from_fn_ptr(fn_ptr: &FnPointer) -> CallableSig {
@ -235,13 +243,14 @@ impl CallableSig {
.map(|arg| arg.assert_ty_ref(Interner).clone()) .map(|arg| arg.assert_ty_ref(Interner).clone())
.collect(), .collect(),
is_varargs: fn_ptr.sig.variadic, is_varargs: fn_ptr.sig.variadic,
safety: fn_ptr.sig.safety,
} }
} }
pub fn to_fn_ptr(&self) -> FnPointer { pub fn to_fn_ptr(&self) -> FnPointer {
FnPointer { FnPointer {
num_binders: 0, num_binders: 0,
sig: FnSig { abi: (), safety: Safety::Safe, variadic: self.is_varargs }, sig: FnSig { abi: (), safety: self.safety, variadic: self.is_varargs },
substitution: FnSubst(Substitution::from_iter( substitution: FnSubst(Substitution::from_iter(
Interner, Interner,
self.params_and_return.iter().cloned(), self.params_and_return.iter().cloned(),
@ -266,7 +275,11 @@ impl TypeFoldable<Interner> for CallableSig {
) -> Result<Self, E> { ) -> Result<Self, E> {
let vec = self.params_and_return.to_vec(); let vec = self.params_and_return.to_vec();
let folded = vec.try_fold_with(folder, outer_binder)?; let folded = vec.try_fold_with(folder, outer_binder)?;
Ok(CallableSig { params_and_return: folded.into(), is_varargs: self.is_varargs }) Ok(CallableSig {
params_and_return: folded.into(),
is_varargs: self.is_varargs,
safety: self.safety,
})
} }
} }
@ -508,3 +521,68 @@ where
}); });
Canonical { value, binders: chalk_ir::CanonicalVarKinds::from_iter(Interner, kinds) } Canonical { value, binders: chalk_ir::CanonicalVarKinds::from_iter(Interner, kinds) }
} }
pub fn callable_sig_from_fnonce(
self_ty: &Canonical<Ty>,
env: Arc<TraitEnvironment>,
db: &dyn HirDatabase,
) -> Option<CallableSig> {
let krate = env.krate;
let fn_once_trait = FnTrait::FnOnce.get_id(db, krate)?;
let output_assoc_type = db.trait_data(fn_once_trait).associated_type_by_name(&name![Output])?;
let mut kinds = self_ty.binders.interned().to_vec();
let b = TyBuilder::trait_ref(db, fn_once_trait);
if b.remaining() != 2 {
return None;
}
let fn_once = b
.push(self_ty.value.clone())
.fill_with_bound_vars(DebruijnIndex::INNERMOST, kinds.len())
.build();
kinds.extend(fn_once.substitution.iter(Interner).skip(1).map(|x| {
let vk = match x.data(Interner) {
chalk_ir::GenericArgData::Ty(_) => {
chalk_ir::VariableKind::Ty(chalk_ir::TyVariableKind::General)
}
chalk_ir::GenericArgData::Lifetime(_) => chalk_ir::VariableKind::Lifetime,
chalk_ir::GenericArgData::Const(c) => {
chalk_ir::VariableKind::Const(c.data(Interner).ty.clone())
}
};
chalk_ir::WithKind::new(vk, UniverseIndex::ROOT)
}));
// FIXME: chalk refuses to solve `<Self as FnOnce<^0.0>>::Output == ^0.1`, so we first solve
// `<Self as FnOnce<^0.0>>` and then replace `^0.0` with the concrete argument tuple.
let trait_env = env.env.clone();
let obligation = InEnvironment { goal: fn_once.cast(Interner), environment: trait_env };
let canonical =
Canonical { binders: CanonicalVarKinds::from_iter(Interner, kinds), value: obligation };
let subst = match db.trait_solve(krate, canonical) {
Some(Solution::Unique(vars)) => vars.value.subst,
_ => return None,
};
let args = subst.at(Interner, self_ty.binders.interned().len()).ty(Interner)?;
let params = match args.kind(Interner) {
chalk_ir::TyKind::Tuple(_, subst) => {
subst.iter(Interner).filter_map(|arg| arg.ty(Interner).cloned()).collect::<Vec<_>>()
}
_ => return None,
};
if params.iter().any(|ty| ty.is_unknown()) {
return None;
}
let fn_once = TyBuilder::trait_ref(db, fn_once_trait)
.push(self_ty.value.clone())
.push(args.clone())
.build();
let projection =
TyBuilder::assoc_type_projection(db, output_assoc_type, Some(fn_once.substitution.clone()))
.build();
let ret_ty = db.normalize_projection(projection, env);
Some(CallableSig::from_params_and_return(params, ret_ty.clone(), false, Safety::Safe))
}

View file

@ -227,13 +227,17 @@ impl<'a> TyLoweringContext<'a> {
.intern(Interner) .intern(Interner)
} }
TypeRef::Placeholder => TyKind::Error.intern(Interner), TypeRef::Placeholder => TyKind::Error.intern(Interner),
TypeRef::Fn(params, is_varargs) => { &TypeRef::Fn(ref params, variadic, is_unsafe) => {
let substs = self.with_shifted_in(DebruijnIndex::ONE, |ctx| { let substs = self.with_shifted_in(DebruijnIndex::ONE, |ctx| {
Substitution::from_iter(Interner, params.iter().map(|(_, tr)| ctx.lower_ty(tr))) Substitution::from_iter(Interner, params.iter().map(|(_, tr)| ctx.lower_ty(tr)))
}); });
TyKind::Function(FnPointer { TyKind::Function(FnPointer {
num_binders: 0, // FIXME lower `for<'a> fn()` correctly num_binders: 0, // FIXME lower `for<'a> fn()` correctly
sig: FnSig { abi: (), safety: Safety::Safe, variadic: *is_varargs }, sig: FnSig {
abi: (),
safety: if is_unsafe { Safety::Unsafe } else { Safety::Safe },
variadic,
},
substitution: FnSubst(substs), substitution: FnSubst(substs),
}) })
.intern(Interner) .intern(Interner)
@ -1573,7 +1577,12 @@ fn fn_sig_for_fn(db: &dyn HirDatabase, def: FunctionId) -> PolyFnSig {
.with_type_param_mode(ParamLoweringMode::Variable); .with_type_param_mode(ParamLoweringMode::Variable);
let ret = ctx_ret.lower_ty(&data.ret_type); let ret = ctx_ret.lower_ty(&data.ret_type);
let generics = generics(db.upcast(), def.into()); let generics = generics(db.upcast(), def.into());
let sig = CallableSig::from_params_and_return(params, ret, data.is_varargs()); let sig = CallableSig::from_params_and_return(
params,
ret,
data.is_varargs(),
if data.has_unsafe_kw() { Safety::Unsafe } else { Safety::Safe },
);
make_binders(db, &generics, sig) make_binders(db, &generics, sig)
} }
@ -1617,7 +1626,7 @@ fn fn_sig_for_struct_constructor(db: &dyn HirDatabase, def: StructId) -> PolyFnS
TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable); TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
let params = fields.iter().map(|(_, field)| ctx.lower_ty(&field.type_ref)).collect::<Vec<_>>(); let params = fields.iter().map(|(_, field)| ctx.lower_ty(&field.type_ref)).collect::<Vec<_>>();
let (ret, binders) = type_for_adt(db, def.into()).into_value_and_skipped_binders(); let (ret, binders) = type_for_adt(db, def.into()).into_value_and_skipped_binders();
Binders::new(binders, CallableSig::from_params_and_return(params, ret, false)) Binders::new(binders, CallableSig::from_params_and_return(params, ret, false, Safety::Safe))
} }
/// Build the type of a tuple struct constructor. /// Build the type of a tuple struct constructor.
@ -1644,7 +1653,7 @@ fn fn_sig_for_enum_variant_constructor(db: &dyn HirDatabase, def: EnumVariantId)
TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable); TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
let params = fields.iter().map(|(_, field)| ctx.lower_ty(&field.type_ref)).collect::<Vec<_>>(); let params = fields.iter().map(|(_, field)| ctx.lower_ty(&field.type_ref)).collect::<Vec<_>>();
let (ret, binders) = type_for_adt(db, def.parent.into()).into_value_and_skipped_binders(); let (ret, binders) = type_for_adt(db, def.parent.into()).into_value_and_skipped_binders();
Binders::new(binders, CallableSig::from_params_and_return(params, ret, false)) Binders::new(binders, CallableSig::from_params_and_return(params, ret, false, Safety::Safe))
} }
/// Build the type of a tuple enum variant constructor. /// Build the type of a tuple enum variant constructor.

View file

@ -122,6 +122,23 @@ fn test() {
) )
} }
#[test]
fn if_else_adjust_for_branches_discard_type_var() {
check_no_mismatches(
r#"
fn test() {
let f = || {
if true {
&""
} else {
""
}
};
}
"#,
);
}
#[test] #[test]
fn match_first_coerce() { fn match_first_coerce() {
check_no_mismatches( check_no_mismatches(
@ -182,6 +199,22 @@ fn test() {
); );
} }
#[test]
fn match_adjust_for_branches_discard_type_var() {
check_no_mismatches(
r#"
fn test() {
let f = || {
match 0i32 {
0i32 => &"",
_ => "",
}
};
}
"#,
);
}
#[test] #[test]
fn return_coerce_unknown() { fn return_coerce_unknown() {
check_types( check_types(
@ -357,7 +390,7 @@ fn test() {
let f: fn(u32) -> isize = foo; let f: fn(u32) -> isize = foo;
// ^^^ adjustments: Pointer(ReifyFnPointer) // ^^^ adjustments: Pointer(ReifyFnPointer)
let f: unsafe fn(u32) -> isize = foo; let f: unsafe fn(u32) -> isize = foo;
// ^^^ adjustments: Pointer(ReifyFnPointer) // ^^^ adjustments: Pointer(ReifyFnPointer), Pointer(UnsafeFnPointer)
}", }",
); );
} }
@ -388,7 +421,10 @@ fn coerce_closure_to_fn_ptr() {
check_no_mismatches( check_no_mismatches(
r" r"
fn test() { fn test() {
let f: fn(u32) -> isize = |x| { 1 }; let f: fn(u32) -> u32 = |x| x;
// ^^^^^ adjustments: Pointer(ClosureFnPointer(Safe))
let f: unsafe fn(u32) -> u32 = |x| x;
// ^^^^^ adjustments: Pointer(ClosureFnPointer(Unsafe))
}", }",
); );
} }

View file

@ -2995,7 +2995,17 @@ impl Type {
let callee = match self.ty.kind(Interner) { let callee = match self.ty.kind(Interner) {
TyKind::Closure(id, _) => Callee::Closure(*id), TyKind::Closure(id, _) => Callee::Closure(*id),
TyKind::Function(_) => Callee::FnPtr, TyKind::Function(_) => Callee::FnPtr,
_ => Callee::Def(self.ty.callable_def(db)?), TyKind::FnDef(..) => Callee::Def(self.ty.callable_def(db)?),
_ => {
let ty = hir_ty::replace_errors_with_variables(&self.ty);
let sig = hir_ty::callable_sig_from_fnonce(&ty, self.env.clone(), db)?;
return Some(Callable {
ty: self.clone(),
sig,
callee: Callee::Other,
is_bound_method: false,
});
}
}; };
let sig = self.ty.callable_sig(db)?; let sig = self.ty.callable_sig(db)?;
@ -3464,6 +3474,7 @@ enum Callee {
Def(CallableDefId), Def(CallableDefId),
Closure(ClosureId), Closure(ClosureId),
FnPtr, FnPtr,
Other,
} }
pub enum CallableKind { pub enum CallableKind {
@ -3472,6 +3483,8 @@ pub enum CallableKind {
TupleEnumVariant(Variant), TupleEnumVariant(Variant),
Closure, Closure,
FnPtr, FnPtr,
/// Some other type that implements `FnOnce`.
Other,
} }
impl Callable { impl Callable {
@ -3483,6 +3496,7 @@ impl Callable {
Def(CallableDefId::EnumVariantId(it)) => CallableKind::TupleEnumVariant(it.into()), Def(CallableDefId::EnumVariantId(it)) => CallableKind::TupleEnumVariant(it.into()),
Closure(_) => CallableKind::Closure, Closure(_) => CallableKind::Closure,
FnPtr => CallableKind::FnPtr, FnPtr => CallableKind::FnPtr,
Other => CallableKind::Other,
} }
} }
pub fn receiver_param(&self, db: &dyn HirDatabase) -> Option<ast::SelfParam> { pub fn receiver_param(&self, db: &dyn HirDatabase) -> Option<ast::SelfParam> {

View file

@ -14,4 +14,5 @@ pub struct AssistConfig {
pub allowed: Option<Vec<AssistKind>>, pub allowed: Option<Vec<AssistKind>>,
pub insert_use: InsertUseConfig, pub insert_use: InsertUseConfig,
pub prefer_no_std: bool, pub prefer_no_std: bool,
pub assist_emit_must_use: bool,
} }

View file

@ -69,14 +69,14 @@ pub(crate) fn add_explicit_type(acc: &mut Assists, ctx: &AssistContext<'_>) -> O
let inferred_type = ty.display_source_code(ctx.db(), module.into()).ok()?; let inferred_type = ty.display_source_code(ctx.db(), module.into()).ok()?;
acc.add( acc.add(
AssistId("add_explicit_type", AssistKind::RefactorRewrite), AssistId("add_explicit_type", AssistKind::RefactorRewrite),
format!("Insert explicit type `{}`", inferred_type), format!("Insert explicit type `{inferred_type}`"),
pat_range, pat_range,
|builder| match ascribed_ty { |builder| match ascribed_ty {
Some(ascribed_ty) => { Some(ascribed_ty) => {
builder.replace(ascribed_ty.syntax().text_range(), inferred_type); builder.replace(ascribed_ty.syntax().text_range(), inferred_type);
} }
None => { None => {
builder.insert(pat_range.end(), format!(": {}", inferred_type)); builder.insert(pat_range.end(), format!(": {inferred_type}"));
} }
}, },
) )

View file

@ -35,16 +35,16 @@ pub(crate) fn add_return_type(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opt
match builder_edit_pos { match builder_edit_pos {
InsertOrReplace::Insert(insert_pos, needs_whitespace) => { InsertOrReplace::Insert(insert_pos, needs_whitespace) => {
let preceeding_whitespace = if needs_whitespace { " " } else { "" }; let preceeding_whitespace = if needs_whitespace { " " } else { "" };
builder.insert(insert_pos, &format!("{}-> {} ", preceeding_whitespace, ty)) builder.insert(insert_pos, &format!("{preceeding_whitespace}-> {ty} "))
} }
InsertOrReplace::Replace(text_range) => { InsertOrReplace::Replace(text_range) => {
builder.replace(text_range, &format!("-> {}", ty)) builder.replace(text_range, &format!("-> {ty}"))
} }
} }
if let FnType::Closure { wrap_expr: true } = fn_type { if let FnType::Closure { wrap_expr: true } = fn_type {
cov_mark::hit!(wrap_closure_non_block_expr); cov_mark::hit!(wrap_closure_non_block_expr);
// `|x| x` becomes `|x| -> T x` which is invalid, so wrap it in a block // `|x| x` becomes `|x| -> T x` which is invalid, so wrap it in a block
builder.replace(tail_expr.syntax().text_range(), &format!("{{{}}}", tail_expr)); builder.replace(tail_expr.syntax().text_range(), &format!("{{{tail_expr}}}"));
} }
}, },
) )

View file

@ -93,12 +93,13 @@ pub(crate) fn add_turbo_fish(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
builder.trigger_signature_help(); builder.trigger_signature_help();
match ctx.config.snippet_cap { match ctx.config.snippet_cap {
Some(cap) => { Some(cap) => {
let snip = format!("::<{}>", get_snippet_fish_head(number_of_arguments)); let fish_head = get_snippet_fish_head(number_of_arguments);
let snip = format!("::<{fish_head}>");
builder.insert_snippet(cap, ident.text_range().end(), snip) builder.insert_snippet(cap, ident.text_range().end(), snip)
} }
None => { None => {
let fish_head = std::iter::repeat("_").take(number_of_arguments).format(", "); let fish_head = std::iter::repeat("_").take(number_of_arguments).format(", ");
let snip = format!("::<{}>", fish_head); let snip = format!("::<{fish_head}>");
builder.insert(ident.text_range().end(), snip); builder.insert(ident.text_range().end(), snip);
} }
} }
@ -109,7 +110,7 @@ pub(crate) fn add_turbo_fish(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
/// This will create a snippet string with tabstops marked /// This will create a snippet string with tabstops marked
fn get_snippet_fish_head(number_of_arguments: usize) -> String { fn get_snippet_fish_head(number_of_arguments: usize) -> String {
let mut fish_head = (1..number_of_arguments) let mut fish_head = (1..number_of_arguments)
.format_with("", |i, f| f(&format_args!("${{{}:_}}, ", i))) .format_with("", |i, f| f(&format_args!("${{{i}:_}}, ")))
.to_string(); .to_string();
// tabstop 0 is a special case and always the last one // tabstop 0 is a special case and always the last one

View file

@ -123,20 +123,20 @@ pub(crate) fn apply_demorgan(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
let lhs_range = lhs.syntax().text_range(); let lhs_range = lhs.syntax().text_range();
let not_lhs = invert_boolean_expression(lhs); let not_lhs = invert_boolean_expression(lhs);
edit.replace(lhs_range, format!("!({}", not_lhs.syntax().text())); edit.replace(lhs_range, format!("!({not_lhs}"));
} }
if let Some(rhs) = terms.pop_back() { if let Some(rhs) = terms.pop_back() {
let rhs_range = rhs.syntax().text_range(); let rhs_range = rhs.syntax().text_range();
let not_rhs = invert_boolean_expression(rhs); let not_rhs = invert_boolean_expression(rhs);
edit.replace(rhs_range, format!("{})", not_rhs.syntax().text())); edit.replace(rhs_range, format!("{not_rhs})"));
} }
for term in terms { for term in terms {
let term_range = term.syntax().text_range(); let term_range = term.syntax().text_range();
let not_term = invert_boolean_expression(term); let not_term = invert_boolean_expression(term);
edit.replace(term_range, not_term.syntax().text()); edit.replace(term_range, not_term.to_string());
} }
} }
}, },

View file

@ -127,10 +127,12 @@ pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
.sort_by_key(|import| Reverse(relevance_score(ctx, import, current_module.as_ref()))); .sort_by_key(|import| Reverse(relevance_score(ctx, import, current_module.as_ref())));
for import in proposed_imports { for import in proposed_imports {
let import_path = import.import_path;
acc.add_group( acc.add_group(
&group_label, &group_label,
AssistId("auto_import", AssistKind::QuickFix), AssistId("auto_import", AssistKind::QuickFix),
format!("Import `{}`", import.import_path), format!("Import `{import_path}`"),
range, range,
|builder| { |builder| {
let scope = match scope.clone() { let scope = match scope.clone() {
@ -138,7 +140,7 @@ pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
ImportScope::Module(it) => ImportScope::Module(builder.make_mut(it)), ImportScope::Module(it) => ImportScope::Module(builder.make_mut(it)),
ImportScope::Block(it) => ImportScope::Block(builder.make_mut(it)), ImportScope::Block(it) => ImportScope::Block(builder.make_mut(it)),
}; };
insert_use(&scope, mod_path_to_ast(&import.import_path), &ctx.config.insert_use); insert_use(&scope, mod_path_to_ast(&import_path), &ctx.config.insert_use);
}, },
); );
} }

View file

@ -54,16 +54,17 @@ fn block_to_line(acc: &mut Assists, comment: ast::Comment) -> Option<()> {
let indent_spaces = indentation.to_string(); let indent_spaces = indentation.to_string();
let output = lines let output = lines
.map(|l| l.trim_start_matches(&indent_spaces)) .map(|line| {
.map(|l| { let line = line.trim_start_matches(&indent_spaces);
// Don't introduce trailing whitespace // Don't introduce trailing whitespace
if l.is_empty() { if line.is_empty() {
line_prefix.to_string() line_prefix.to_string()
} else { } else {
format!("{} {}", line_prefix, l.trim_start_matches(&indent_spaces)) format!("{line_prefix} {line}")
} }
}) })
.join(&format!("\n{}", indent_spaces)); .join(&format!("\n{indent_spaces}"));
edit.replace(target, output) edit.replace(target, output)
}, },
@ -96,7 +97,7 @@ fn line_to_block(acc: &mut Assists, comment: ast::Comment) -> Option<()> {
let block_prefix = let block_prefix =
CommentKind { shape: CommentShape::Block, ..comment.kind() }.prefix(); CommentKind { shape: CommentShape::Block, ..comment.kind() }.prefix();
let output = format!("{}\n{}\n{}*/", block_prefix, block_comment_body, indentation); let output = format!("{block_prefix}\n{block_comment_body}\n{indentation}*/");
edit.replace(target, output) edit.replace(target, output)
}, },

View file

@ -32,19 +32,19 @@ pub(crate) fn convert_integer_literal(acc: &mut Assists, ctx: &AssistContext<'_>
} }
let mut converted = match target_radix { let mut converted = match target_radix {
Radix::Binary => format!("0b{:b}", value), Radix::Binary => format!("0b{value:b}"),
Radix::Octal => format!("0o{:o}", value), Radix::Octal => format!("0o{value:o}"),
Radix::Decimal => value.to_string(), Radix::Decimal => value.to_string(),
Radix::Hexadecimal => format!("0x{:X}", value), Radix::Hexadecimal => format!("0x{value:X}"),
}; };
let label = format!("Convert {} to {}{}", literal, converted, suffix.unwrap_or_default());
// Appends the type suffix back into the new literal if it exists. // Appends the type suffix back into the new literal if it exists.
if let Some(suffix) = suffix { if let Some(suffix) = suffix {
converted.push_str(suffix); converted.push_str(suffix);
} }
let label = format!("Convert {literal} to {converted}");
acc.add_group( acc.add_group(
&group_id, &group_id,
AssistId("convert_integer_literal", AssistKind::RefactorInline), AssistId("convert_integer_literal", AssistKind::RefactorInline),

View file

@ -86,9 +86,9 @@ pub(crate) fn convert_into_to_from(acc: &mut Assists, ctx: &AssistContext<'_>) -
impl_.syntax().text_range(), impl_.syntax().text_range(),
|builder| { |builder| {
builder.replace(src_type.syntax().text_range(), dest_type.to_string()); builder.replace(src_type.syntax().text_range(), dest_type.to_string());
builder.replace(ast_trait.syntax().text_range(), format!("From<{}>", src_type)); builder.replace(ast_trait.syntax().text_range(), format!("From<{src_type}>"));
builder.replace(into_fn_return.syntax().text_range(), "-> Self"); builder.replace(into_fn_return.syntax().text_range(), "-> Self");
builder.replace(into_fn_params.syntax().text_range(), format!("(val: {})", src_type)); builder.replace(into_fn_params.syntax().text_range(), format!("(val: {src_type})"));
builder.replace(into_fn_name.syntax().text_range(), "from"); builder.replace(into_fn_name.syntax().text_range(), "from");
for s in selfs { for s in selfs {

View file

@ -119,19 +119,19 @@ pub(crate) fn convert_for_loop_with_for_each(
{ {
// We have either "for x in &col" and col implements a method called iter // We have either "for x in &col" and col implements a method called iter
// or "for x in &mut col" and col implements a method called iter_mut // or "for x in &mut col" and col implements a method called iter_mut
format_to!(buf, "{}.{}()", expr_behind_ref, method); format_to!(buf, "{expr_behind_ref}.{method}()");
} else if let ast::Expr::RangeExpr(..) = iterable { } else if let ast::Expr::RangeExpr(..) = iterable {
// range expressions need to be parenthesized for the syntax to be correct // range expressions need to be parenthesized for the syntax to be correct
format_to!(buf, "({})", iterable); format_to!(buf, "({iterable})");
} else if impls_core_iter(&ctx.sema, &iterable) { } else if impls_core_iter(&ctx.sema, &iterable) {
format_to!(buf, "{}", iterable); format_to!(buf, "{iterable}");
} else if let ast::Expr::RefExpr(_) = iterable { } else if let ast::Expr::RefExpr(_) = iterable {
format_to!(buf, "({}).into_iter()", iterable); format_to!(buf, "({iterable}).into_iter()");
} else { } else {
format_to!(buf, "{}.into_iter()", iterable); format_to!(buf, "{iterable}.into_iter()");
} }
format_to!(buf, ".for_each(|{}| {});", pat, body); format_to!(buf, ".for_each(|{pat}| {body});");
builder.replace(for_loop.syntax().text_range(), buf) builder.replace(for_loop.syntax().text_range(), buf)
}, },

View file

@ -80,7 +80,7 @@ fn binders_to_str(binders: &[(Name, bool)], addmut: bool) -> String {
.map( .map(
|(ident, ismut)| { |(ident, ismut)| {
if *ismut && addmut { if *ismut && addmut {
format!("mut {}", ident) format!("mut {ident}")
} else { } else {
ident.to_string() ident.to_string()
} }
@ -93,7 +93,7 @@ fn binders_to_str(binders: &[(Name, bool)], addmut: bool) -> String {
} else if binders.len() == 1 { } else if binders.len() == 1 {
vars vars
} else { } else {
format!("({})", vars) format!("({vars})")
} }
} }
@ -153,7 +153,7 @@ pub(crate) fn convert_let_else_to_match(acc: &mut Assists, ctx: &AssistContext<'
let only_expr = let_else_block.statements().next().is_none(); let only_expr = let_else_block.statements().next().is_none();
let branch2 = match &let_else_block.tail_expr() { let branch2 = match &let_else_block.tail_expr() {
Some(tail) if only_expr => format!("{},", tail.syntax().text()), Some(tail) if only_expr => format!("{tail},"),
_ => let_else_block.syntax().text().to_string(), _ => let_else_block.syntax().text().to_string(),
}; };
let replace = if binders.is_empty() { let replace = if binders.is_empty() {

View file

@ -0,0 +1,413 @@
use ide_db::defs::{Definition, NameRefClass};
use syntax::{
ast::{self, HasName},
ted, AstNode, SyntaxNode,
};
use crate::{
assist_context::{AssistContext, Assists},
AssistId, AssistKind,
};
// Assist: convert_match_to_let_else
//
// Converts let statement with match initializer to let-else statement.
//
// ```
// # //- minicore: option
// fn foo(opt: Option<()>) {
// let val = $0match opt {
// Some(it) => it,
// None => return,
// };
// }
// ```
// ->
// ```
// fn foo(opt: Option<()>) {
// let Some(val) = opt else { return };
// }
// ```
pub(crate) fn convert_match_to_let_else(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let let_stmt: ast::LetStmt = ctx.find_node_at_offset()?;
let binding = find_binding(let_stmt.pat()?)?;
let initializer = match let_stmt.initializer() {
Some(ast::Expr::MatchExpr(it)) => it,
_ => return None,
};
let initializer_expr = initializer.expr()?;
let (extracting_arm, diverging_arm) = match find_arms(ctx, &initializer) {
Some(it) => it,
None => return None,
};
if extracting_arm.guard().is_some() {
cov_mark::hit!(extracting_arm_has_guard);
return None;
}
let diverging_arm_expr = diverging_arm.expr()?;
let extracting_arm_pat = extracting_arm.pat()?;
let extracted_variable = find_extracted_variable(ctx, &extracting_arm)?;
acc.add(
AssistId("convert_match_to_let_else", AssistKind::RefactorRewrite),
"Convert match to let-else",
let_stmt.syntax().text_range(),
|builder| {
let extracting_arm_pat = rename_variable(&extracting_arm_pat, extracted_variable, binding);
builder.replace(
let_stmt.syntax().text_range(),
format!("let {extracting_arm_pat} = {initializer_expr} else {{ {diverging_arm_expr} }};")
)
},
)
}
// Given a pattern, find the name introduced to the surrounding scope.
fn find_binding(pat: ast::Pat) -> Option<ast::IdentPat> {
if let ast::Pat::IdentPat(ident) = pat {
Some(ident)
} else {
None
}
}
// Given a match expression, find extracting and diverging arms.
fn find_arms(
ctx: &AssistContext<'_>,
match_expr: &ast::MatchExpr,
) -> Option<(ast::MatchArm, ast::MatchArm)> {
let arms = match_expr.match_arm_list()?.arms().collect::<Vec<_>>();
if arms.len() != 2 {
return None;
}
let mut extracting = None;
let mut diverging = None;
for arm in arms {
if ctx.sema.type_of_expr(&arm.expr().unwrap()).unwrap().original().is_never() {
diverging = Some(arm);
} else {
extracting = Some(arm);
}
}
match (extracting, diverging) {
(Some(extracting), Some(diverging)) => Some((extracting, diverging)),
_ => {
cov_mark::hit!(non_diverging_match);
None
}
}
}
// Given an extracting arm, find the extracted variable.
fn find_extracted_variable(ctx: &AssistContext<'_>, arm: &ast::MatchArm) -> Option<ast::Name> {
match arm.expr()? {
ast::Expr::PathExpr(path) => {
let name_ref = path.syntax().descendants().find_map(ast::NameRef::cast)?;
match NameRefClass::classify(&ctx.sema, &name_ref)? {
NameRefClass::Definition(Definition::Local(local)) => {
let source = local.source(ctx.db()).value.left()?;
Some(source.name()?)
}
_ => None,
}
}
_ => {
cov_mark::hit!(extracting_arm_is_not_an_identity_expr);
return None;
}
}
}
// Rename `extracted` with `binding` in `pat`.
fn rename_variable(pat: &ast::Pat, extracted: ast::Name, binding: ast::IdentPat) -> SyntaxNode {
let syntax = pat.syntax().clone_for_update();
let extracted_syntax = syntax.covering_element(extracted.syntax().text_range());
// If `extracted` variable is a record field, we should rename it to `binding`,
// otherwise we just need to replace `extracted` with `binding`.
if let Some(record_pat_field) = extracted_syntax.ancestors().find_map(ast::RecordPatField::cast)
{
if let Some(name_ref) = record_pat_field.field_name() {
ted::replace(
record_pat_field.syntax(),
ast::make::record_pat_field(ast::make::name_ref(&name_ref.text()), binding.into())
.syntax()
.clone_for_update(),
);
}
} else {
ted::replace(extracted_syntax, binding.syntax().clone_for_update());
}
syntax
}
#[cfg(test)]
mod tests {
use crate::tests::{check_assist, check_assist_not_applicable};
use super::*;
#[test]
fn should_not_be_applicable_for_non_diverging_match() {
cov_mark::check!(non_diverging_match);
check_assist_not_applicable(
convert_match_to_let_else,
r#"
//- minicore: option
fn foo(opt: Option<()>) {
let val = $0match opt {
Some(it) => it,
None => (),
};
}
"#,
);
}
#[test]
fn should_not_be_applicable_if_extracting_arm_is_not_an_identity_expr() {
cov_mark::check_count!(extracting_arm_is_not_an_identity_expr, 2);
check_assist_not_applicable(
convert_match_to_let_else,
r#"
//- minicore: option
fn foo(opt: Option<i32>) {
let val = $0match opt {
Some(it) => it + 1,
None => return,
};
}
"#,
);
check_assist_not_applicable(
convert_match_to_let_else,
r#"
//- minicore: option
fn foo(opt: Option<()>) {
let val = $0match opt {
Some(it) => {
let _ = 1 + 1;
it
},
None => return,
};
}
"#,
);
}
#[test]
fn should_not_be_applicable_if_extracting_arm_has_guard() {
cov_mark::check!(extracting_arm_has_guard);
check_assist_not_applicable(
convert_match_to_let_else,
r#"
//- minicore: option
fn foo(opt: Option<()>) {
let val = $0match opt {
Some(it) if 2 > 1 => it,
None => return,
};
}
"#,
);
}
#[test]
fn basic_pattern() {
check_assist(
convert_match_to_let_else,
r#"
//- minicore: option
fn foo(opt: Option<()>) {
let val = $0match opt {
Some(it) => it,
None => return,
};
}
"#,
r#"
fn foo(opt: Option<()>) {
let Some(val) = opt else { return };
}
"#,
);
}
#[test]
fn keeps_modifiers() {
check_assist(
convert_match_to_let_else,
r#"
//- minicore: option
fn foo(opt: Option<()>) {
let ref mut val = $0match opt {
Some(it) => it,
None => return,
};
}
"#,
r#"
fn foo(opt: Option<()>) {
let Some(ref mut val) = opt else { return };
}
"#,
);
}
#[test]
fn nested_pattern() {
check_assist(
convert_match_to_let_else,
r#"
//- minicore: option, result
fn foo(opt: Option<Result<()>>) {
let val = $0match opt {
Some(Ok(it)) => it,
_ => return,
};
}
"#,
r#"
fn foo(opt: Option<Result<()>>) {
let Some(Ok(val)) = opt else { return };
}
"#,
);
}
#[test]
fn works_with_any_diverging_block() {
check_assist(
convert_match_to_let_else,
r#"
//- minicore: option
fn foo(opt: Option<()>) {
loop {
let val = $0match opt {
Some(it) => it,
None => break,
};
}
}
"#,
r#"
fn foo(opt: Option<()>) {
loop {
let Some(val) = opt else { break };
}
}
"#,
);
check_assist(
convert_match_to_let_else,
r#"
//- minicore: option
fn foo(opt: Option<()>) {
loop {
let val = $0match opt {
Some(it) => it,
None => continue,
};
}
}
"#,
r#"
fn foo(opt: Option<()>) {
loop {
let Some(val) = opt else { continue };
}
}
"#,
);
check_assist(
convert_match_to_let_else,
r#"
//- minicore: option
fn panic() -> ! {}
fn foo(opt: Option<()>) {
loop {
let val = $0match opt {
Some(it) => it,
None => panic(),
};
}
}
"#,
r#"
fn panic() -> ! {}
fn foo(opt: Option<()>) {
loop {
let Some(val) = opt else { panic() };
}
}
"#,
);
}
#[test]
fn struct_pattern() {
check_assist(
convert_match_to_let_else,
r#"
//- minicore: option
struct Point {
x: i32,
y: i32,
}
fn foo(opt: Option<Point>) {
let val = $0match opt {
Some(Point { x: 0, y }) => y,
_ => return,
};
}
"#,
r#"
struct Point {
x: i32,
y: i32,
}
fn foo(opt: Option<Point>) {
let Some(Point { x: 0, y: val }) = opt else { return };
}
"#,
);
}
#[test]
fn renames_whole_binding() {
check_assist(
convert_match_to_let_else,
r#"
//- minicore: option
fn foo(opt: Option<i32>) -> Option<i32> {
let val = $0match opt {
it @ Some(42) => it,
_ => return None,
};
val
}
"#,
r#"
fn foo(opt: Option<i32>) -> Option<i32> {
let val @ Some(42) = opt else { return None };
val
}
"#,
);
}
}

View file

@ -129,32 +129,15 @@ pub(crate) fn convert_to_guarded_return(acc: &mut Assists, ctx: &AssistContext<'
} }
Some((path, bound_ident)) => { Some((path, bound_ident)) => {
// If-let. // If-let.
let match_expr = { let pat = make::tuple_struct_pat(path, once(bound_ident));
let happy_arm = { let let_else_stmt = make::let_else_stmt(
let pat = make::tuple_struct_pat( pat.into(),
path, None,
once(make::ext::simple_ident_pat(make::name("it")).into()), cond_expr,
); ast::make::tail_only_block_expr(early_expression),
let expr = { );
let path = make::ext::ident_path("it"); let let_else_stmt = let_else_stmt.indent(if_indent_level);
make::expr_path(path) let_else_stmt.syntax().clone_for_update()
};
make::match_arm(once(pat.into()), None, expr)
};
let sad_arm = make::match_arm(
// FIXME: would be cool to use `None` or `Err(_)` if appropriate
once(make::wildcard_pat().into()),
None,
early_expression,
);
make::expr_match(cond_expr, make::match_arm_list(vec![happy_arm, sad_arm]))
};
let let_stmt = make::let_stmt(bound_ident, None, Some(match_expr));
let let_stmt = let_stmt.indent(if_indent_level);
let_stmt.syntax().clone_for_update()
} }
}; };
@ -238,10 +221,7 @@ fn main(n: Option<String>) {
r#" r#"
fn main(n: Option<String>) { fn main(n: Option<String>) {
bar(); bar();
let n = match n { let Some(n) = n else { return };
Some(it) => it,
_ => return,
};
foo(n); foo(n);
// comment // comment
@ -264,10 +244,7 @@ fn main() {
"#, "#,
r#" r#"
fn main() { fn main() {
let x = match Err(92) { let Ok(x) = Err(92) else { return };
Ok(it) => it,
_ => return,
};
foo(x); foo(x);
} }
"#, "#,
@ -292,10 +269,7 @@ fn main(n: Option<String>) {
r#" r#"
fn main(n: Option<String>) { fn main(n: Option<String>) {
bar(); bar();
let n = match n { let Some(n) = n else { return };
Some(it) => it,
_ => return,
};
foo(n); foo(n);
// comment // comment
@ -323,10 +297,7 @@ fn main(n: Option<String>) {
r#" r#"
fn main(n: Option<String>) { fn main(n: Option<String>) {
bar(); bar();
let mut n = match n { let Some(mut n) = n else { return };
Some(it) => it,
_ => return,
};
foo(n); foo(n);
// comment // comment
@ -354,10 +325,7 @@ fn main(n: Option<&str>) {
r#" r#"
fn main(n: Option<&str>) { fn main(n: Option<&str>) {
bar(); bar();
let ref n = match n { let Some(ref n) = n else { return };
Some(it) => it,
_ => return,
};
foo(n); foo(n);
// comment // comment
@ -412,10 +380,7 @@ fn main() {
r#" r#"
fn main() { fn main() {
while true { while true {
let n = match n { let Some(n) = n else { continue };
Some(it) => it,
_ => continue,
};
foo(n); foo(n);
bar(); bar();
} }
@ -469,10 +434,7 @@ fn main() {
r#" r#"
fn main() { fn main() {
loop { loop {
let n = match n { let Some(n) = n else { continue };
Some(it) => it,
_ => continue,
};
foo(n); foo(n);
bar(); bar();
} }

View file

@ -226,7 +226,13 @@ fn edit_field_references(
} }
fn generate_names(fields: impl Iterator<Item = ast::TupleField>) -> Vec<ast::Name> { fn generate_names(fields: impl Iterator<Item = ast::TupleField>) -> Vec<ast::Name> {
fields.enumerate().map(|(i, _)| ast::make::name(&format!("field{}", i + 1))).collect() fields
.enumerate()
.map(|(i, _)| {
let idx = i + 1;
ast::make::name(&format!("field{idx}"))
})
.collect()
} }
#[cfg(test)] #[cfg(test)]

View file

@ -58,16 +58,16 @@ pub(crate) fn convert_two_arm_bool_match_to_matches_macro(
target_range, target_range,
|builder| { |builder| {
let mut arm_str = String::new(); let mut arm_str = String::new();
if let Some(ref pat) = first_arm.pat() { if let Some(pat) = &first_arm.pat() {
arm_str += &pat.to_string(); arm_str += &pat.to_string();
} }
if let Some(ref guard) = first_arm.guard() { if let Some(guard) = &first_arm.guard() {
arm_str += &format!(" {}", &guard.to_string()); arm_str += &format!(" {guard}");
} }
if invert_matches { if invert_matches {
builder.replace(target_range, format!("!matches!({}, {})", expr, arm_str)); builder.replace(target_range, format!("!matches!({expr}, {arm_str})"));
} else { } else {
builder.replace(target_range, format!("matches!({}, {})", expr, arm_str)); builder.replace(target_range, format!("matches!({expr}, {arm_str})"));
} }
}, },
) )

View file

@ -133,7 +133,7 @@ fn generate_name(
_usages: &Option<UsageSearchResult>, _usages: &Option<UsageSearchResult>,
) -> String { ) -> String {
// FIXME: detect if name already used // FIXME: detect if name already used
format!("_{}", index) format!("_{index}")
} }
enum RefType { enum RefType {
@ -168,12 +168,12 @@ fn edit_tuple_assignment(
let add_cursor = |text: &str| { let add_cursor = |text: &str| {
// place cursor on first tuple item // place cursor on first tuple item
let first_tuple = &data.field_names[0]; let first_tuple = &data.field_names[0];
text.replacen(first_tuple, &format!("$0{}", first_tuple), 1) text.replacen(first_tuple, &format!("$0{first_tuple}"), 1)
}; };
// with sub_pattern: keep original tuple and add subpattern: `tup @ (_0, _1)` // with sub_pattern: keep original tuple and add subpattern: `tup @ (_0, _1)`
if in_sub_pattern { if in_sub_pattern {
let text = format!(" @ {}", tuple_pat); let text = format!(" @ {tuple_pat}");
match ctx.config.snippet_cap { match ctx.config.snippet_cap {
Some(cap) => { Some(cap) => {
let snip = add_cursor(&text); let snip = add_cursor(&text);
@ -314,9 +314,9 @@ struct RefData {
impl RefData { impl RefData {
fn format(&self, field_name: &str) -> String { fn format(&self, field_name: &str) -> String {
match (self.needs_deref, self.needs_parentheses) { match (self.needs_deref, self.needs_parentheses) {
(true, true) => format!("(*{})", field_name), (true, true) => format!("(*{field_name})"),
(true, false) => format!("*{}", field_name), (true, false) => format!("*{field_name}"),
(false, true) => format!("({})", field_name), (false, true) => format!("({field_name})"),
(false, false) => field_name.to_string(), (false, false) => field_name.to_string(),
} }
} }

View file

@ -181,7 +181,7 @@ fn make_function_name(semantics_scope: &hir::SemanticsScope<'_>) -> ast::NameRef
let mut counter = 0; let mut counter = 0;
while names_in_scope.contains(&name) { while names_in_scope.contains(&name) {
counter += 1; counter += 1;
name = format!("{}{}", &default_name, counter) name = format!("{default_name}{counter}")
} }
make::name_ref(&name) make::name_ref(&name)
} }
@ -1291,19 +1291,23 @@ fn make_call(ctx: &AssistContext<'_>, fun: &Function, indent: IndentLevel) -> St
match fun.outliving_locals.as_slice() { match fun.outliving_locals.as_slice() {
[] => {} [] => {}
[var] => { [var] => {
format_to!(buf, "let {}{} = ", mut_modifier(var), var.local.name(ctx.db())) let modifier = mut_modifier(var);
let name = var.local.name(ctx.db());
format_to!(buf, "let {modifier}{name} = ")
} }
vars => { vars => {
buf.push_str("let ("); buf.push_str("let (");
let bindings = vars.iter().format_with(", ", |local, f| { let bindings = vars.iter().format_with(", ", |local, f| {
f(&format_args!("{}{}", mut_modifier(local), local.local.name(ctx.db()))) let modifier = mut_modifier(local);
let name = local.local.name(ctx.db());
f(&format_args!("{modifier}{name}"))
}); });
format_to!(buf, "{}", bindings); format_to!(buf, "{bindings}");
buf.push_str(") = "); buf.push_str(") = ");
} }
} }
format_to!(buf, "{}", expr); format_to!(buf, "{expr}");
let insert_comma = fun let insert_comma = fun
.body .body
.parent() .parent()
@ -1447,6 +1451,8 @@ fn format_function(
new_indent: IndentLevel, new_indent: IndentLevel,
) -> String { ) -> String {
let mut fn_def = String::new(); let mut fn_def = String::new();
let fun_name = &fun.name;
let params = fun.make_param_list(ctx, module); let params = fun.make_param_list(ctx, module);
let ret_ty = fun.make_ret_ty(ctx, module); let ret_ty = fun.make_ret_ty(ctx, module);
let body = make_body(ctx, old_indent, new_indent, fun); let body = make_body(ctx, old_indent, new_indent, fun);
@ -1454,42 +1460,28 @@ fn format_function(
let async_kw = if fun.control_flow.is_async { "async " } else { "" }; let async_kw = if fun.control_flow.is_async { "async " } else { "" };
let unsafe_kw = if fun.control_flow.is_unsafe { "unsafe " } else { "" }; let unsafe_kw = if fun.control_flow.is_unsafe { "unsafe " } else { "" };
let (generic_params, where_clause) = make_generic_params_and_where_clause(ctx, fun); let (generic_params, where_clause) = make_generic_params_and_where_clause(ctx, fun);
format_to!(fn_def, "\n\n{new_indent}{const_kw}{async_kw}{unsafe_kw}");
match ctx.config.snippet_cap { match ctx.config.snippet_cap {
Some(_) => format_to!( Some(_) => format_to!(fn_def, "fn $0{fun_name}"),
fn_def, None => format_to!(fn_def, "fn {fun_name}"),
"\n\n{}{}{}{}fn $0{}",
new_indent,
const_kw,
async_kw,
unsafe_kw,
fun.name,
),
None => format_to!(
fn_def,
"\n\n{}{}{}{}fn {}",
new_indent,
const_kw,
async_kw,
unsafe_kw,
fun.name,
),
} }
if let Some(generic_params) = generic_params { if let Some(generic_params) = generic_params {
format_to!(fn_def, "{}", generic_params); format_to!(fn_def, "{generic_params}");
} }
format_to!(fn_def, "{}", params); format_to!(fn_def, "{params}");
if let Some(ret_ty) = ret_ty { if let Some(ret_ty) = ret_ty {
format_to!(fn_def, " {}", ret_ty); format_to!(fn_def, " {ret_ty}");
} }
if let Some(where_clause) = where_clause { if let Some(where_clause) = where_clause {
format_to!(fn_def, " {}", where_clause); format_to!(fn_def, " {where_clause}");
} }
format_to!(fn_def, " {}", body); format_to!(fn_def, " {body}");
fn_def fn_def
} }

View file

@ -127,7 +127,7 @@ pub(crate) fn extract_module(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
for item in items_to_be_processed { for item in items_to_be_processed {
let item = item.indent(IndentLevel(1)); let item = item.indent(IndentLevel(1));
let mut indented_item = String::new(); let mut indented_item = String::new();
format_to!(indented_item, "{}{}", new_item_indent, item.to_string()); format_to!(indented_item, "{new_item_indent}{item}");
body_items.push(indented_item); body_items.push(indented_item);
} }
@ -137,30 +137,28 @@ pub(crate) fn extract_module(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
let mut impl_body_def = String::new(); let mut impl_body_def = String::new();
if let Some(self_ty) = impl_.self_ty() { if let Some(self_ty) = impl_.self_ty() {
format_to!( {
impl_body_def, let impl_indent = old_item_indent + 1;
"{}impl {} {{\n{}\n{}}}", format_to!(
old_item_indent + 1, impl_body_def,
self_ty.to_string(), "{impl_indent}impl {self_ty} {{\n{body}\n{impl_indent}}}",
body, );
old_item_indent + 1 }
);
body = impl_body_def; body = impl_body_def;
// Add the import for enum/struct corresponding to given impl block // Add the import for enum/struct corresponding to given impl block
module.make_use_stmt_of_node_with_super(self_ty.syntax()); module.make_use_stmt_of_node_with_super(self_ty.syntax());
for item in module.use_items { for item in module.use_items {
let mut indented_item = String::new(); let item_indent = old_item_indent + 1;
format_to!(indented_item, "{}{}", old_item_indent + 1, item.to_string()); body = format!("{item_indent}{item}\n\n{body}");
body = format!("{}\n\n{}", indented_item, body);
} }
} }
} }
let mut module_def = String::new(); let mut module_def = String::new();
format_to!(module_def, "mod {} {{\n{}\n{}}}", module.name, body, old_item_indent); let module_name = module.name;
format_to!(module_def, "mod {module_name} {{\n{body}\n{old_item_indent}}}");
let mut usages_to_be_updated_for_curr_file = vec![]; let mut usages_to_be_updated_for_curr_file = vec![];
for usages_to_be_updated_for_file in usages_to_be_processed { for usages_to_be_updated_for_file in usages_to_be_processed {
@ -199,7 +197,7 @@ pub(crate) fn extract_module(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
builder.delete(range); builder.delete(range);
} }
builder.insert(impl_.syntax().text_range().end(), format!("\n\n{}", module_def)); builder.insert(impl_.syntax().text_range().end(), format!("\n\n{module_def}"));
} else { } else {
builder.replace(module.text_range, module_def) builder.replace(module.text_range, module_def)
} }
@ -343,9 +341,10 @@ impl Module {
&& !self.text_range.contains_range(desc.text_range()) && !self.text_range.contains_range(desc.text_range())
{ {
if let Some(name_ref) = ast::NameRef::cast(desc) { if let Some(name_ref) = ast::NameRef::cast(desc) {
let mod_name = self.name;
return Some(( return Some((
name_ref.syntax().text_range(), name_ref.syntax().text_range(),
format!("{}::{}", self.name, name_ref), format!("{mod_name}::{name_ref}"),
)); ));
} }
} }

View file

@ -296,10 +296,14 @@ fn create_struct_def(
fn update_variant(variant: &ast::Variant, generics: Option<ast::GenericParamList>) -> Option<()> { fn update_variant(variant: &ast::Variant, generics: Option<ast::GenericParamList>) -> Option<()> {
let name = variant.name()?; let name = variant.name()?;
let ty = generics let generic_args = generics
.filter(|generics| generics.generic_params().count() > 0) .filter(|generics| generics.generic_params().count() > 0)
.map(|generics| make::ty(&format!("{}{}", &name.text(), generics.to_generic_args()))) .map(|generics| generics.to_generic_args());
.unwrap_or_else(|| make::ty(&name.text())); // FIXME: replace with a `ast::make` constructor
let ty = match generic_args {
Some(generic_args) => make::ty(&format!("{name}{generic_args}")),
None => make::ty(&name.text()),
};
// change from a record to a tuple field list // change from a record to a tuple field list
let tuple_field = make::tuple_field(None, ty); let tuple_field = make::tuple_field(None, ty);

View file

@ -1,8 +1,7 @@
use either::Either; use either::Either;
use ide_db::syntax_helpers::node_ext::walk_ty; use ide_db::syntax_helpers::node_ext::walk_ty;
use itertools::Itertools;
use syntax::{ use syntax::{
ast::{self, edit::IndentLevel, AstNode, HasGenericParams, HasName}, ast::{self, edit::IndentLevel, make, AstNode, HasGenericParams, HasName},
match_ast, match_ast,
}; };
@ -64,41 +63,29 @@ pub(crate) fn extract_type_alias(acc: &mut Assists, ctx: &AssistContext<'_>) ->
known_generics.extend(it.generic_params()); known_generics.extend(it.generic_params());
} }
let generics = collect_used_generics(&ty, &known_generics); let generics = collect_used_generics(&ty, &known_generics);
let generic_params =
generics.map(|it| make::generic_param_list(it.into_iter().cloned()));
let replacement = if !generics.is_empty() { let ty_args = generic_params
format!( .as_ref()
"Type<{}>", .map_or(String::new(), |it| it.to_generic_args().to_string());
generics.iter().format_with(", ", |generic, f| { let replacement = format!("Type{ty_args}");
match generic {
ast::GenericParam::ConstParam(cp) => f(&cp.name().unwrap()),
ast::GenericParam::LifetimeParam(lp) => f(&lp.lifetime().unwrap()),
ast::GenericParam::TypeParam(tp) => f(&tp.name().unwrap()),
}
})
)
} else {
String::from("Type")
};
builder.replace(target, replacement); builder.replace(target, replacement);
let indent = IndentLevel::from_node(node); let indent = IndentLevel::from_node(node);
let generics = if !generics.is_empty() { let generic_params = generic_params.map_or(String::new(), |it| it.to_string());
format!("<{}>", generics.iter().format(", "))
} else {
String::new()
};
match ctx.config.snippet_cap { match ctx.config.snippet_cap {
Some(cap) => { Some(cap) => {
builder.insert_snippet( builder.insert_snippet(
cap, cap,
insert_pos, insert_pos,
format!("type $0Type{} = {};\n\n{}", generics, ty, indent), format!("type $0Type{generic_params} = {ty};\n\n{indent}"),
); );
} }
None => { None => {
builder.insert( builder.insert(
insert_pos, insert_pos,
format!("type Type{} = {};\n\n{}", generics, ty, indent), format!("type Type{generic_params} = {ty};\n\n{indent}"),
); );
} }
} }
@ -109,7 +96,7 @@ pub(crate) fn extract_type_alias(acc: &mut Assists, ctx: &AssistContext<'_>) ->
fn collect_used_generics<'gp>( fn collect_used_generics<'gp>(
ty: &ast::Type, ty: &ast::Type,
known_generics: &'gp [ast::GenericParam], known_generics: &'gp [ast::GenericParam],
) -> Vec<&'gp ast::GenericParam> { ) -> Option<Vec<&'gp ast::GenericParam>> {
// can't use a closure -> closure here cause lifetime inference fails for that // can't use a closure -> closure here cause lifetime inference fails for that
fn find_lifetime(text: &str) -> impl Fn(&&ast::GenericParam) -> bool + '_ { fn find_lifetime(text: &str) -> impl Fn(&&ast::GenericParam) -> bool + '_ {
move |gp: &&ast::GenericParam| match gp { move |gp: &&ast::GenericParam| match gp {
@ -198,7 +185,8 @@ fn collect_used_generics<'gp>(
ast::GenericParam::LifetimeParam(_) => 0, ast::GenericParam::LifetimeParam(_) => 0,
ast::GenericParam::TypeParam(_) => 1, ast::GenericParam::TypeParam(_) => 1,
}); });
generics
Some(generics).filter(|it| it.len() > 0)
} }
#[cfg(test)] #[cfg(test)]

View file

@ -91,13 +91,13 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
match anchor { match anchor {
Anchor::Before(_) | Anchor::Replace(_) => { Anchor::Before(_) | Anchor::Replace(_) => {
format_to!(buf, "let {}{} = {}", var_modifier, var_name, reference_modifier) format_to!(buf, "let {var_modifier}{var_name} = {reference_modifier}")
} }
Anchor::WrapInBlock(_) => { Anchor::WrapInBlock(_) => {
format_to!(buf, "{{ let {} = {}", var_name, reference_modifier) format_to!(buf, "{{ let {var_name} = {reference_modifier}")
} }
}; };
format_to!(buf, "{}", to_extract.syntax()); format_to!(buf, "{to_extract}");
if let Anchor::Replace(stmt) = anchor { if let Anchor::Replace(stmt) = anchor {
cov_mark::hit!(test_extract_var_expr_stmt); cov_mark::hit!(test_extract_var_expr_stmt);
@ -107,8 +107,8 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
match ctx.config.snippet_cap { match ctx.config.snippet_cap {
Some(cap) => { Some(cap) => {
let snip = buf.replace( let snip = buf.replace(
&format!("let {}{}", var_modifier, var_name), &format!("let {var_modifier}{var_name}"),
&format!("let {}$0{}", var_modifier, var_name), &format!("let {var_modifier}$0{var_name}"),
); );
edit.replace_snippet(cap, expr_range, snip) edit.replace_snippet(cap, expr_range, snip)
} }
@ -135,8 +135,8 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
match ctx.config.snippet_cap { match ctx.config.snippet_cap {
Some(cap) => { Some(cap) => {
let snip = buf.replace( let snip = buf.replace(
&format!("let {}{}", var_modifier, var_name), &format!("let {var_modifier}{var_name}"),
&format!("let {}$0{}", var_modifier, var_name), &format!("let {var_modifier}$0{var_name}"),
); );
edit.insert_snippet(cap, offset, snip) edit.insert_snippet(cap, offset, snip)
} }

View file

@ -57,8 +57,8 @@ fn add_vis_to_referenced_module_def(acc: &mut Assists, ctx: &AssistContext<'_>)
if current_module.krate() == target_module.krate() { "pub(crate)" } else { "pub" }; if current_module.krate() == target_module.krate() { "pub(crate)" } else { "pub" };
let assist_label = match target_name { let assist_label = match target_name {
None => format!("Change visibility to {}", missing_visibility), None => format!("Change visibility to {missing_visibility}"),
Some(name) => format!("Change visibility of {} to {}", name, missing_visibility), Some(name) => format!("Change visibility of {name} to {missing_visibility}"),
}; };
acc.add(AssistId("fix_visibility", AssistKind::QuickFix), assist_label, target, |builder| { acc.add(AssistId("fix_visibility", AssistKind::QuickFix), assist_label, target, |builder| {
@ -68,15 +68,15 @@ fn add_vis_to_referenced_module_def(acc: &mut Assists, ctx: &AssistContext<'_>)
Some(current_visibility) => builder.replace_snippet( Some(current_visibility) => builder.replace_snippet(
cap, cap,
current_visibility.syntax().text_range(), current_visibility.syntax().text_range(),
format!("$0{}", missing_visibility), format!("$0{missing_visibility}"),
), ),
None => builder.insert_snippet(cap, offset, format!("$0{} ", missing_visibility)), None => builder.insert_snippet(cap, offset, format!("$0{missing_visibility} ")),
}, },
None => match current_visibility { None => match current_visibility {
Some(current_visibility) => { Some(current_visibility) => {
builder.replace(current_visibility.syntax().text_range(), missing_visibility) builder.replace(current_visibility.syntax().text_range(), missing_visibility)
} }
None => builder.insert(offset, format!("{} ", missing_visibility)), None => builder.insert(offset, format!("{missing_visibility} ")),
}, },
} }
}) })
@ -114,7 +114,7 @@ fn add_vis_to_referenced_record_field(acc: &mut Assists, ctx: &AssistContext<'_>
let target_name = record_field_def.name(ctx.db()); let target_name = record_field_def.name(ctx.db());
let assist_label = let assist_label =
format!("Change visibility of {}.{} to {}", parent_name, target_name, missing_visibility); format!("Change visibility of {parent_name}.{target_name} to {missing_visibility}");
acc.add(AssistId("fix_visibility", AssistKind::QuickFix), assist_label, target, |builder| { acc.add(AssistId("fix_visibility", AssistKind::QuickFix), assist_label, target, |builder| {
builder.edit_file(target_file); builder.edit_file(target_file);
@ -123,15 +123,15 @@ fn add_vis_to_referenced_record_field(acc: &mut Assists, ctx: &AssistContext<'_>
Some(current_visibility) => builder.replace_snippet( Some(current_visibility) => builder.replace_snippet(
cap, cap,
current_visibility.syntax().text_range(), current_visibility.syntax().text_range(),
format!("$0{}", missing_visibility), format!("$0{missing_visibility}"),
), ),
None => builder.insert_snippet(cap, offset, format!("$0{} ", missing_visibility)), None => builder.insert_snippet(cap, offset, format!("$0{missing_visibility} ")),
}, },
None => match current_visibility { None => match current_visibility {
Some(current_visibility) => { Some(current_visibility) => {
builder.replace(current_visibility.syntax().text_range(), missing_visibility) builder.replace(current_visibility.syntax().text_range(), missing_visibility)
} }
None => builder.insert(offset, format!("{} ", missing_visibility)), None => builder.insert(offset, format!("{missing_visibility} ")),
}, },
} }
}) })

View file

@ -124,6 +124,7 @@ fn generate_enum_projection_method(
happy_case, happy_case,
sad_case, sad_case,
} = props; } = props;
let variant = ctx.find_node_at_offset::<ast::Variant>()?; let variant = ctx.find_node_at_offset::<ast::Variant>()?;
let variant_name = variant.name()?; let variant_name = variant.name()?;
let parent_enum = ast::Adt::Enum(variant.parent_enum()); let parent_enum = ast::Adt::Enum(variant.parent_enum());
@ -144,7 +145,7 @@ fn generate_enum_projection_method(
ast::StructKind::Unit => return None, ast::StructKind::Unit => return None,
}; };
let fn_name = format!("{}_{}", fn_name_prefix, &to_lower_snake_case(&variant_name.text())); let fn_name = format!("{fn_name_prefix}_{}", &to_lower_snake_case(&variant_name.text()));
// Return early if we've found an existing new fn // Return early if we've found an existing new fn
let impl_def = find_struct_impl(ctx, &parent_enum, &[fn_name.clone()])?; let impl_def = find_struct_impl(ctx, &parent_enum, &[fn_name.clone()])?;
@ -156,15 +157,25 @@ fn generate_enum_projection_method(
assist_description, assist_description,
target, target,
|builder| { |builder| {
let vis = parent_enum.visibility().map_or(String::new(), |v| format!("{v} ")); let vis = parent_enum.visibility().map_or(String::new(), |v| format!("{} ", v));
let field_type_syntax = field_type.syntax();
let must_use = if ctx.config.assist_emit_must_use {
"#[must_use]\n "
} else {
""
};
let method = format!( let method = format!(
" {vis}fn {fn_name}({self_param}) -> {return_prefix}{field_type}{return_suffix} {{ " {must_use}{vis}fn {fn_name}({self_param}) -> {return_prefix}{field_type_syntax}{return_suffix} {{
if let Self::{variant_name}{pattern_suffix} = self {{ if let Self::{variant_name}{pattern_suffix} = self {{
{happy_case}({bound_name}) {happy_case}({bound_name})
}} else {{ }} else {{
{sad_case} {sad_case}
}} }}
}}"); }}"
);
add_method_to_adt(builder, &parent_enum, impl_def, &method); add_method_to_adt(builder, &parent_enum, impl_def, &method);
}, },

View file

@ -1,3 +1,5 @@
use std::collections::BTreeSet;
use ast::make; use ast::make;
use either::Either; use either::Either;
use hir::{db::HirDatabase, PathResolution, Semantics, TypeInfo}; use hir::{db::HirDatabase, PathResolution, Semantics, TypeInfo};
@ -190,10 +192,10 @@ pub(crate) fn inline_call(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
PathResolution::Def(hir::ModuleDef::Function(f)) => f, PathResolution::Def(hir::ModuleDef::Function(f)) => f,
_ => return None, _ => return None,
}; };
(function, format!("Inline `{}`", path)) (function, format!("Inline `{path}`"))
} }
ast::CallableExpr::MethodCall(call) => { ast::CallableExpr::MethodCall(call) => {
(ctx.sema.resolve_method_call(call)?, format!("Inline `{}`", name_ref)) (ctx.sema.resolve_method_call(call)?, format!("Inline `{name_ref}`"))
} }
}; };
@ -373,8 +375,44 @@ fn inline(
}) })
} }
} }
let mut func_let_vars: BTreeSet<String> = BTreeSet::new();
// grab all of the local variable declarations in the function
for stmt in fn_body.statements() {
if let Some(let_stmt) = ast::LetStmt::cast(stmt.syntax().to_owned()) {
for has_token in let_stmt.syntax().children_with_tokens() {
if let Some(node) = has_token.as_node() {
if let Some(ident_pat) = ast::IdentPat::cast(node.to_owned()) {
func_let_vars.insert(ident_pat.syntax().text().to_string());
}
}
}
}
}
// Inline parameter expressions or generate `let` statements depending on whether inlining works or not. // Inline parameter expressions or generate `let` statements depending on whether inlining works or not.
for ((pat, param_ty, _), usages, expr) in izip!(params, param_use_nodes, arguments).rev() { for ((pat, param_ty, _), usages, expr) in izip!(params, param_use_nodes, arguments).rev() {
// izip confuses RA due to our lack of hygiene info currently losing us type info causing incorrect errors
let usages: &[ast::PathExpr] = &*usages;
let expr: &ast::Expr = expr;
let insert_let_stmt = || {
let ty = sema.type_of_expr(expr).filter(TypeInfo::has_adjustment).and(param_ty.clone());
if let Some(stmt_list) = body.stmt_list() {
stmt_list.push_front(
make::let_stmt(pat.clone(), ty, Some(expr.clone())).clone_for_update().into(),
)
}
};
// check if there is a local var in the function that conflicts with parameter
// if it does then emit a let statement and continue
if func_let_vars.contains(&expr.syntax().text().to_string()) {
insert_let_stmt();
continue;
}
let inline_direct = |usage, replacement: &ast::Expr| { let inline_direct = |usage, replacement: &ast::Expr| {
if let Some(field) = path_expr_as_record_field(usage) { if let Some(field) = path_expr_as_record_field(usage) {
cov_mark::hit!(inline_call_inline_direct_field); cov_mark::hit!(inline_call_inline_direct_field);
@ -383,9 +421,7 @@ fn inline(
ted::replace(usage.syntax(), &replacement.syntax().clone_for_update()); ted::replace(usage.syntax(), &replacement.syntax().clone_for_update());
} }
}; };
// izip confuses RA due to our lack of hygiene info currently losing us type info causing incorrect errors
let usages: &[ast::PathExpr] = &*usages;
let expr: &ast::Expr = expr;
match usages { match usages {
// inline single use closure arguments // inline single use closure arguments
[usage] [usage]
@ -408,18 +444,11 @@ fn inline(
} }
// can't inline, emit a let statement // can't inline, emit a let statement
_ => { _ => {
let ty = insert_let_stmt();
sema.type_of_expr(expr).filter(TypeInfo::has_adjustment).and(param_ty.clone());
if let Some(stmt_list) = body.stmt_list() {
stmt_list.push_front(
make::let_stmt(pat.clone(), ty, Some(expr.clone()))
.clone_for_update()
.into(),
)
}
} }
} }
} }
if let Some(generic_arg_list) = generic_arg_list.clone() { if let Some(generic_arg_list) = generic_arg_list.clone() {
if let Some((target, source)) = &sema.scope(node.syntax()).zip(sema.scope(fn_body.syntax())) if let Some((target, source)) = &sema.scope(node.syntax()).zip(sema.scope(fn_body.syntax()))
{ {
@ -1256,4 +1285,37 @@ impl A {
"#, "#,
) )
} }
#[test]
fn local_variable_shadowing_callers_argument() {
check_assist(
inline_call,
r#"
fn foo(bar: u32, baz: u32) -> u32 {
let a = 1;
bar * baz * a * 6
}
fn main() {
let a = 7;
let b = 1;
let res = foo$0(a, b);
}
"#,
r#"
fn foo(bar: u32, baz: u32) -> u32 {
let a = 1;
bar * baz * a * 6
}
fn main() {
let a = 7;
let b = 1;
let res = {
let bar = a;
let a = 1;
bar * b * a * 6
};
}
"#,
);
}
} }

View file

@ -113,7 +113,7 @@ pub(crate) fn inline_local_variable(acc: &mut Assists, ctx: &AssistContext<'_>)
.collect::<Option<Vec<_>>>()?; .collect::<Option<Vec<_>>>()?;
let init_str = initializer_expr.syntax().text().to_string(); let init_str = initializer_expr.syntax().text().to_string();
let init_in_paren = format!("({})", &init_str); let init_in_paren = format!("({init_str})");
let target = match target { let target = match target {
ast::NameOrNameRef::Name(it) => it.syntax().text_range(), ast::NameOrNameRef::Name(it) => it.syntax().text_range(),
@ -132,7 +132,7 @@ pub(crate) fn inline_local_variable(acc: &mut Assists, ctx: &AssistContext<'_>)
let replacement = if should_wrap { &init_in_paren } else { &init_str }; let replacement = if should_wrap { &init_in_paren } else { &init_str };
if ast::RecordExprField::for_field_name(&name).is_some() { if ast::RecordExprField::for_field_name(&name).is_some() {
cov_mark::hit!(inline_field_shorthand); cov_mark::hit!(inline_field_shorthand);
builder.insert(range.end(), format!(": {}", replacement)); builder.insert(range.end(), format!(": {replacement}"));
} else { } else {
builder.replace(range, replacement.clone()) builder.replace(range, replacement.clone())
} }

View file

@ -127,7 +127,7 @@ fn generate_unique_lifetime_param_name(
Some(type_params) => { Some(type_params) => {
let used_lifetime_params: FxHashSet<_> = let used_lifetime_params: FxHashSet<_> =
type_params.lifetime_params().map(|p| p.syntax().text().to_string()).collect(); type_params.lifetime_params().map(|p| p.syntax().text().to_string()).collect();
('a'..='z').map(|it| format!("'{}", it)).find(|it| !used_lifetime_params.contains(it)) ('a'..='z').map(|it| format!("'{it}")).find(|it| !used_lifetime_params.contains(it))
} }
None => Some("'a".to_string()), None => Some("'a".to_string()),
} }

View file

@ -78,7 +78,7 @@ pub(crate) fn merge_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
.join(" | ") .join(" | ")
}; };
let arm = format!("{} => {},", pats, current_expr.syntax().text()); let arm = format!("{pats} => {current_expr},");
if let [first, .., last] = &*arms_to_merge { if let [first, .., last] = &*arms_to_merge {
let start = first.syntax().text_range().start(); let start = first.syntax().text_range().start();

View file

@ -40,11 +40,11 @@ pub(crate) fn move_from_mod_rs(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
let target = source_file.syntax().text_range(); let target = source_file.syntax().text_range();
let module_name = module.name(ctx.db())?.to_string(); let module_name = module.name(ctx.db())?.to_string();
let path = format!("../{}.rs", module_name); let path = format!("../{module_name}.rs");
let dst = AnchoredPathBuf { anchor: ctx.file_id(), path }; let dst = AnchoredPathBuf { anchor: ctx.file_id(), path };
acc.add( acc.add(
AssistId("move_from_mod_rs", AssistKind::Refactor), AssistId("move_from_mod_rs", AssistKind::Refactor),
format!("Convert {}/mod.rs to {}.rs", module_name, module_name), format!("Convert {module_name}/mod.rs to {module_name}.rs"),
target, target,
|builder| { |builder| {
builder.move_file(ctx.file_id(), dst); builder.move_file(ctx.file_id(), dst);

View file

@ -133,16 +133,16 @@ pub(crate) fn move_arm_cond_to_match_guard(
}; };
let then_arm_end = match_arm.syntax().text_range().end(); let then_arm_end = match_arm.syntax().text_range().end();
let indent_level = match_arm.indent_level(); let indent_level = match_arm.indent_level();
let spaces = " ".repeat(indent_level.0 as _); let spaces = indent_level;
let mut first = true; let mut first = true;
for (cond, block) in conds_blocks { for (cond, block) in conds_blocks {
if !first { if !first {
edit.insert(then_arm_end, format!("\n{}", spaces)); edit.insert(then_arm_end, format!("\n{spaces}"));
} else { } else {
first = false; first = false;
} }
let guard = format!("{} if {} => ", match_pat, cond.syntax().text()); let guard = format!("{match_pat} if {cond} => ");
edit.insert(then_arm_end, guard); edit.insert(then_arm_end, guard);
let only_expr = block.statements().next().is_none(); let only_expr = block.statements().next().is_none();
match &block.tail_expr() { match &block.tail_expr() {
@ -158,7 +158,7 @@ pub(crate) fn move_arm_cond_to_match_guard(
} }
if let Some(e) = tail { if let Some(e) = tail {
cov_mark::hit!(move_guard_ifelse_else_tail); cov_mark::hit!(move_guard_ifelse_else_tail);
let guard = format!("\n{}{} => ", spaces, match_pat); let guard = format!("\n{spaces}{match_pat} => ");
edit.insert(then_arm_end, guard); edit.insert(then_arm_end, guard);
let only_expr = e.statements().next().is_none(); let only_expr = e.statements().next().is_none();
match &e.tail_expr() { match &e.tail_expr() {
@ -183,7 +183,7 @@ pub(crate) fn move_arm_cond_to_match_guard(
{ {
cov_mark::hit!(move_guard_ifelse_has_wildcard); cov_mark::hit!(move_guard_ifelse_has_wildcard);
} }
_ => edit.insert(then_arm_end, format!("\n{}{} => {{}}", spaces, match_pat)), _ => edit.insert(then_arm_end, format!("\n{spaces}{match_pat} => {{}}")),
} }
} }
}, },

View file

@ -52,7 +52,7 @@ pub(crate) fn move_module_to_file(acc: &mut Assists, ctx: &AssistContext<'_>) ->
let mut buf = String::from("./"); let mut buf = String::from("./");
match parent_module.name(ctx.db()) { match parent_module.name(ctx.db()) {
Some(name) if !parent_module.is_mod_rs(ctx.db()) => { Some(name) if !parent_module.is_mod_rs(ctx.db()) => {
format_to!(buf, "{}/", name) format_to!(buf, "{name}/")
} }
_ => (), _ => (),
} }
@ -82,7 +82,7 @@ pub(crate) fn move_module_to_file(acc: &mut Assists, ctx: &AssistContext<'_>) ->
items items
}; };
let buf = format!("mod {};", module_name); let buf = format!("mod {module_name};");
let replacement_start = match module_ast.mod_token() { let replacement_start = match module_ast.mod_token() {
Some(mod_token) => mod_token.text_range(), Some(mod_token) => mod_token.text_range(),

View file

@ -40,11 +40,11 @@ pub(crate) fn move_to_mod_rs(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
let target = source_file.syntax().text_range(); let target = source_file.syntax().text_range();
let module_name = module.name(ctx.db())?.to_string(); let module_name = module.name(ctx.db())?.to_string();
let path = format!("./{}/mod.rs", module_name); let path = format!("./{module_name}/mod.rs");
let dst = AnchoredPathBuf { anchor: ctx.file_id(), path }; let dst = AnchoredPathBuf { anchor: ctx.file_id(), path };
acc.add( acc.add(
AssistId("move_to_mod_rs", AssistKind::Refactor), AssistId("move_to_mod_rs", AssistKind::Refactor),
format!("Convert {}.rs to {}/mod.rs", module_name, module_name), format!("Convert {module_name}.rs to {module_name}/mod.rs"),
target, target,
|builder| { |builder| {
builder.move_file(ctx.file_id(), dst); builder.move_file(ctx.file_id(), dst);

View file

@ -38,7 +38,7 @@ pub(crate) fn reformat_number_literal(acc: &mut Assists, ctx: &AssistContext<'_>
converted.push_str(suffix); converted.push_str(suffix);
let group_id = GroupLabel("Reformat number literal".into()); let group_id = GroupLabel("Reformat number literal".into());
let label = format!("Convert {} to {}", literal, converted); let label = format!("Convert {literal} to {converted}");
let range = literal.syntax().text_range(); let range = literal.syntax().text_range();
acc.add_group( acc.add_group(
&group_id, &group_id,

View file

@ -54,7 +54,7 @@ pub(crate) fn qualify_method_call(acc: &mut Assists, ctx: &AssistContext<'_>) ->
acc.add( acc.add(
AssistId("qualify_method_call", AssistKind::RefactorInline), AssistId("qualify_method_call", AssistKind::RefactorInline),
format!("Qualify `{}` method call", ident.text()), format!("Qualify `{ident}` method call"),
range, range,
|builder| { |builder| {
qualify_candidate.qualify( qualify_candidate.qualify(

View file

@ -118,14 +118,14 @@ impl QualifyCandidate<'_> {
match self { match self {
QualifyCandidate::QualifierStart(segment, generics) => { QualifyCandidate::QualifierStart(segment, generics) => {
let generics = generics.as_ref().map_or_else(String::new, ToString::to_string); let generics = generics.as_ref().map_or_else(String::new, ToString::to_string);
replacer(format!("{}{}::{}", import, generics, segment)); replacer(format!("{import}{generics}::{segment}"));
} }
QualifyCandidate::UnqualifiedName(generics) => { QualifyCandidate::UnqualifiedName(generics) => {
let generics = generics.as_ref().map_or_else(String::new, ToString::to_string); let generics = generics.as_ref().map_or_else(String::new, ToString::to_string);
replacer(format!("{}{}", import, generics)); replacer(format!("{import}{generics}"));
} }
QualifyCandidate::TraitAssocItem(qualifier, segment) => { QualifyCandidate::TraitAssocItem(qualifier, segment) => {
replacer(format!("<{} as {}>::{}", qualifier, import, segment)); replacer(format!("<{qualifier} as {import}>::{segment}"));
} }
QualifyCandidate::TraitMethod(db, mcall_expr) => { QualifyCandidate::TraitMethod(db, mcall_expr) => {
Self::qualify_trait_method(db, mcall_expr, replacer, import, item); Self::qualify_trait_method(db, mcall_expr, replacer, import, item);
@ -155,16 +155,11 @@ impl QualifyCandidate<'_> {
hir::Access::Exclusive => make::expr_ref(receiver, true), hir::Access::Exclusive => make::expr_ref(receiver, true),
hir::Access::Owned => receiver, hir::Access::Owned => receiver,
}; };
replacer(format!( let arg_list = match arg_list {
"{}::{}{}{}", Some(args) => make::arg_list(iter::once(receiver).chain(args)),
import, None => make::arg_list(iter::once(receiver)),
method_name, };
generics, replacer(format!("{import}::{method_name}{generics}{arg_list}"));
match arg_list {
Some(args) => make::arg_list(iter::once(receiver).chain(args)),
None => make::arg_list(iter::once(receiver)),
}
));
} }
Some(()) Some(())
} }
@ -218,15 +213,17 @@ fn group_label(candidate: &ImportCandidate) -> GroupLabel {
} }
} }
.text(); .text();
GroupLabel(format!("Qualify {}", name)) GroupLabel(format!("Qualify {name}"))
} }
fn label(candidate: &ImportCandidate, import: &LocatedImport) -> String { fn label(candidate: &ImportCandidate, import: &LocatedImport) -> String {
let import_path = &import.import_path;
match candidate { match candidate {
ImportCandidate::Path(candidate) if candidate.qualifier.is_none() => { ImportCandidate::Path(candidate) if candidate.qualifier.is_none() => {
format!("Qualify as `{}`", import.import_path) format!("Qualify as `{import_path}`")
} }
_ => format!("Qualify with `{}`", import.import_path), _ => format!("Qualify with `{import_path}`"),
} }
} }

View file

@ -34,13 +34,10 @@ pub(crate) fn make_raw_string(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opt
let hashes = "#".repeat(required_hashes(&value).max(1)); let hashes = "#".repeat(required_hashes(&value).max(1));
if matches!(value, Cow::Borrowed(_)) { if matches!(value, Cow::Borrowed(_)) {
// Avoid replacing the whole string to better position the cursor. // Avoid replacing the whole string to better position the cursor.
edit.insert(token.syntax().text_range().start(), format!("r{}", hashes)); edit.insert(token.syntax().text_range().start(), format!("r{hashes}"));
edit.insert(token.syntax().text_range().end(), hashes); edit.insert(token.syntax().text_range().end(), hashes);
} else { } else {
edit.replace( edit.replace(token.syntax().text_range(), format!("r{hashes}\"{value}\"{hashes}"));
token.syntax().text_range(),
format!("r{}\"{}\"{}", hashes, value, hashes),
);
} }
}, },
) )
@ -83,7 +80,7 @@ pub(crate) fn make_usual_string(acc: &mut Assists, ctx: &AssistContext<'_>) -> O
} }
} }
edit.replace(token.syntax().text_range(), format!("\"{}\"", escaped)); edit.replace(token.syntax().text_range(), format!("\"{escaped}\""));
}, },
) )
} }

View file

@ -102,7 +102,7 @@ pub(crate) fn remove_dbg(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<(
}; };
( (
macro_call.syntax().text_range(), macro_call.syntax().text_range(),
if wrap { format!("({})", expr) } else { expr.to_string() }, if wrap { format!("({expr})") } else { expr.to_string() },
) )
} }
// dbg!(expr0, expr1, ...) // dbg!(expr0, expr1, ...)
@ -127,8 +127,8 @@ mod tests {
fn check(ra_fixture_before: &str, ra_fixture_after: &str) { fn check(ra_fixture_before: &str, ra_fixture_after: &str) {
check_assist( check_assist(
remove_dbg, remove_dbg,
&format!("fn main() {{\n{}\n}}", ra_fixture_before), &format!("fn main() {{\n{ra_fixture_before}\n}}"),
&format!("fn main() {{\n{}\n}}", ra_fixture_after), &format!("fn main() {{\n{ra_fixture_after}\n}}"),
); );
} }

View file

@ -124,7 +124,7 @@ fn add_assist(
) -> Option<()> { ) -> Option<()> {
let target = attr.syntax().text_range(); let target = attr.syntax().text_range();
let annotated_name = adt.name()?; let annotated_name = adt.name()?;
let label = format!("Convert to manual `impl {} for {}`", replace_trait_path, annotated_name); let label = format!("Convert to manual `impl {replace_trait_path} for {annotated_name}`");
acc.add( acc.add(
AssistId("replace_derive_with_manual_impl", AssistKind::Refactor), AssistId("replace_derive_with_manual_impl", AssistKind::Refactor),
@ -158,11 +158,8 @@ fn add_assist(
} }
} }
builder.insert_snippet( let rendered = render_snippet(cap, impl_def.syntax(), cursor);
cap, builder.insert_snippet(cap, insert_pos, format!("\n\n{rendered}"))
insert_pos,
format!("\n\n{}", render_snippet(cap, impl_def.syntax(), cursor)),
)
} }
}; };
}, },

View file

@ -62,7 +62,7 @@ pub(crate) fn replace_or_with_or_else(acc: &mut Assists, ctx: &AssistContext<'_>
acc.add( acc.add(
AssistId("replace_or_with_or_else", AssistKind::RefactorRewrite), AssistId("replace_or_with_or_else", AssistKind::RefactorRewrite),
format!("Replace {} with {}", name.text(), replace), format!("Replace {name} with {replace}"),
call.syntax().text_range(), call.syntax().text_range(),
|builder| { |builder| {
builder.replace(name.syntax().text_range(), replace); builder.replace(name.syntax().text_range(), replace);
@ -138,7 +138,7 @@ pub(crate) fn replace_or_else_with_or(acc: &mut Assists, ctx: &AssistContext<'_>
acc.add( acc.add(
AssistId("replace_or_else_with_or", AssistKind::RefactorRewrite), AssistId("replace_or_else_with_or", AssistKind::RefactorRewrite),
format!("Replace {} with {}", name.text(), replace), format!("Replace {name} with {replace}"),
call.syntax().text_range(), call.syntax().text_range(),
|builder| { |builder| {
builder.replace(name.syntax().text_range(), replace); builder.replace(name.syntax().text_range(), replace);

View file

@ -79,7 +79,7 @@ pub(crate) fn replace_turbofish_with_explicit_type(
"Replace turbofish with explicit type", "Replace turbofish with explicit type",
TextRange::new(initializer_start, turbofish_range.end()), TextRange::new(initializer_start, turbofish_range.end()),
|builder| { |builder| {
builder.insert(ident_range.end(), format!(": {}", returned_type)); builder.insert(ident_range.end(), format!(": {returned_type}"));
builder.delete(turbofish_range); builder.delete(turbofish_range);
}, },
); );

View file

@ -44,6 +44,12 @@ pub(crate) fn unnecessary_async(acc: &mut Assists, ctx: &AssistContext<'_>) -> O
if function.body()?.syntax().descendants().find_map(ast::AwaitExpr::cast).is_some() { if function.body()?.syntax().descendants().find_map(ast::AwaitExpr::cast).is_some() {
return None; return None;
} }
// Do nothing if the method is a member of trait.
if let Some(impl_) = function.syntax().ancestors().nth(2).and_then(ast::Impl::cast) {
if let Some(_) = impl_.trait_() {
return None;
}
}
// Remove the `async` keyword plus whitespace after it, if any. // Remove the `async` keyword plus whitespace after it, if any.
let async_range = { let async_range = {
@ -254,4 +260,18 @@ pub async fn f(s: &S) { s.f2() }"#,
fn does_not_apply_when_not_on_prototype() { fn does_not_apply_when_not_on_prototype() {
check_assist_not_applicable(unnecessary_async, "pub async fn f() { $0f2() }") check_assist_not_applicable(unnecessary_async, "pub async fn f() { $0f2() }")
} }
#[test]
fn does_not_apply_on_async_trait_method() {
check_assist_not_applicable(
unnecessary_async,
r#"
trait Trait {
async fn foo();
}
impl Trait for () {
$0async fn foo() {}
}"#,
);
}
} }

View file

@ -69,13 +69,13 @@ pub(crate) fn unwrap_tuple(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
for (pat, ty, expr) in for (pat, ty, expr) in
itertools::izip!(tuple_pat.fields(), tys.fields(), tuple_init.fields()) itertools::izip!(tuple_pat.fields(), tys.fields(), tuple_init.fields())
{ {
zipped_decls.push_str(&format!("{}let {pat}: {ty} = {expr};\n", indents)) zipped_decls.push_str(&format!("{indents}let {pat}: {ty} = {expr};\n"))
} }
edit.replace(parent.text_range(), zipped_decls.trim()); edit.replace(parent.text_range(), zipped_decls.trim());
} else { } else {
let mut zipped_decls = String::new(); let mut zipped_decls = String::new();
for (pat, expr) in itertools::izip!(tuple_pat.fields(), tuple_init.fields()) { for (pat, expr) in itertools::izip!(tuple_pat.fields(), tuple_init.fields()) {
zipped_decls.push_str(&format!("{}let {pat} = {expr};\n", indents)); zipped_decls.push_str(&format!("{indents}let {pat} = {expr};\n"));
} }
edit.replace(parent.text_range(), zipped_decls.trim()); edit.replace(parent.text_range(), zipped_decls.trim());
} }

View file

@ -76,11 +76,11 @@ pub(crate) fn wrap_return_type_in_result(acc: &mut Assists, ctx: &AssistContext<
match ctx.config.snippet_cap { match ctx.config.snippet_cap {
Some(cap) => { Some(cap) => {
let snippet = format!("Result<{}, ${{0:_}}>", type_ref); let snippet = format!("Result<{type_ref}, ${{0:_}}>");
builder.replace_snippet(cap, type_ref.syntax().text_range(), snippet) builder.replace_snippet(cap, type_ref.syntax().text_range(), snippet)
} }
None => builder None => builder
.replace(type_ref.syntax().text_range(), format!("Result<{}, _>", type_ref)), .replace(type_ref.syntax().text_range(), format!("Result<{type_ref}, _>")),
} }
}, },
) )

View file

@ -120,6 +120,7 @@ mod handlers {
mod convert_into_to_from; mod convert_into_to_from;
mod convert_iter_for_each_to_for; mod convert_iter_for_each_to_for;
mod convert_let_else_to_match; mod convert_let_else_to_match;
mod convert_match_to_let_else;
mod convert_tuple_struct_to_named_struct; mod convert_tuple_struct_to_named_struct;
mod convert_named_struct_to_tuple_struct; mod convert_named_struct_to_tuple_struct;
mod convert_to_guarded_return; mod convert_to_guarded_return;
@ -220,6 +221,7 @@ mod handlers {
convert_iter_for_each_to_for::convert_for_loop_with_for_each, convert_iter_for_each_to_for::convert_for_loop_with_for_each,
convert_let_else_to_match::convert_let_else_to_match, convert_let_else_to_match::convert_let_else_to_match,
convert_named_struct_to_tuple_struct::convert_named_struct_to_tuple_struct, convert_named_struct_to_tuple_struct::convert_named_struct_to_tuple_struct,
convert_match_to_let_else::convert_match_to_let_else,
convert_to_guarded_return::convert_to_guarded_return, convert_to_guarded_return::convert_to_guarded_return,
convert_tuple_struct_to_named_struct::convert_tuple_struct_to_named_struct, convert_tuple_struct_to_named_struct::convert_tuple_struct_to_named_struct,
convert_two_arm_bool_match_to_matches_macro::convert_two_arm_bool_match_to_matches_macro, convert_two_arm_bool_match_to_matches_macro::convert_two_arm_bool_match_to_matches_macro,

View file

@ -30,6 +30,7 @@ pub(crate) const TEST_CONFIG: AssistConfig = AssistConfig {
skip_glob_imports: true, skip_glob_imports: true,
}, },
prefer_no_std: false, prefer_no_std: false,
assist_emit_must_use: false,
}; };
pub(crate) fn with_single_file(text: &str) -> (RootDatabase, FileId) { pub(crate) fn with_single_file(text: &str) -> (RootDatabase, FileId) {

View file

@ -407,6 +407,27 @@ fn main() {
) )
} }
#[test]
fn doctest_convert_match_to_let_else() {
check_doc_test(
"convert_match_to_let_else",
r#####"
//- minicore: option
fn foo(opt: Option<()>) {
let val = $0match opt {
Some(it) => it,
None => return,
};
}
"#####,
r#####"
fn foo(opt: Option<()>) {
let Some(val) = opt else { return };
}
"#####,
)
}
#[test] #[test]
fn doctest_convert_named_struct_to_tuple_struct() { fn doctest_convert_named_struct_to_tuple_struct() {
check_doc_test( check_doc_test(

View file

@ -189,8 +189,8 @@ pub(crate) fn render_snippet(_cap: SnippetCap, node: &SyntaxNode, cursor: Cursor
let mut placeholder = cursor.node().to_string(); let mut placeholder = cursor.node().to_string();
escape(&mut placeholder); escape(&mut placeholder);
let tab_stop = match cursor { let tab_stop = match cursor {
Cursor::Replace(placeholder) => format!("${{0:{}}}", placeholder), Cursor::Replace(placeholder) => format!("${{0:{placeholder}}}"),
Cursor::Before(placeholder) => format!("$0{}", placeholder), Cursor::Before(placeholder) => format!("$0{placeholder}"),
}; };
let mut buf = node.to_string(); let mut buf = node.to_string();
@ -539,17 +539,17 @@ impl ReferenceConversion {
ReferenceConversionType::AsRefSlice => { ReferenceConversionType::AsRefSlice => {
let type_argument_name = let type_argument_name =
self.ty.type_arguments().next().unwrap().display(db).to_string(); self.ty.type_arguments().next().unwrap().display(db).to_string();
format!("&[{}]", type_argument_name) format!("&[{type_argument_name}]")
} }
ReferenceConversionType::Dereferenced => { ReferenceConversionType::Dereferenced => {
let type_argument_name = let type_argument_name =
self.ty.type_arguments().next().unwrap().display(db).to_string(); self.ty.type_arguments().next().unwrap().display(db).to_string();
format!("&{}", type_argument_name) format!("&{type_argument_name}")
} }
ReferenceConversionType::Option => { ReferenceConversionType::Option => {
let type_argument_name = let type_argument_name =
self.ty.type_arguments().next().unwrap().display(db).to_string(); self.ty.type_arguments().next().unwrap().display(db).to_string();
format!("Option<&{}>", type_argument_name) format!("Option<&{type_argument_name}>")
} }
ReferenceConversionType::Result => { ReferenceConversionType::Result => {
let mut type_arguments = self.ty.type_arguments(); let mut type_arguments = self.ty.type_arguments();
@ -557,19 +557,19 @@ impl ReferenceConversion {
type_arguments.next().unwrap().display(db).to_string(); type_arguments.next().unwrap().display(db).to_string();
let second_type_argument_name = let second_type_argument_name =
type_arguments.next().unwrap().display(db).to_string(); type_arguments.next().unwrap().display(db).to_string();
format!("Result<&{}, &{}>", first_type_argument_name, second_type_argument_name) format!("Result<&{first_type_argument_name}, &{second_type_argument_name}>")
} }
} }
} }
pub(crate) fn getter(&self, field_name: String) -> String { pub(crate) fn getter(&self, field_name: String) -> String {
match self.conversion { match self.conversion {
ReferenceConversionType::Copy => format!("self.{}", field_name), ReferenceConversionType::Copy => format!("self.{field_name}"),
ReferenceConversionType::AsRefStr ReferenceConversionType::AsRefStr
| ReferenceConversionType::AsRefSlice | ReferenceConversionType::AsRefSlice
| ReferenceConversionType::Dereferenced | ReferenceConversionType::Dereferenced
| ReferenceConversionType::Option | ReferenceConversionType::Option
| ReferenceConversionType::Result => format!("self.{}.as_ref()", field_name), | ReferenceConversionType::Result => format!("self.{field_name}.as_ref()"),
} }
} }
} }

View file

@ -41,7 +41,7 @@ fn gen_clone_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
let mut arms = vec![]; let mut arms = vec![];
for variant in list.variants() { for variant in list.variants() {
let name = variant.name()?; let name = variant.name()?;
let variant_name = make::ext::path_from_idents(["Self", &format!("{}", name)])?; let variant_name = make::ext::path_from_idents(["Self", &format!("{name}")])?;
match variant.field_list() { match variant.field_list() {
// => match self { Self::Name { x } => Self::Name { x: x.clone() } } // => match self { Self::Name { x } => Self::Name { x: x.clone() } }
@ -70,7 +70,7 @@ fn gen_clone_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
let mut pats = vec![]; let mut pats = vec![];
let mut fields = vec![]; let mut fields = vec![];
for (i, _) in list.fields().enumerate() { for (i, _) in list.fields().enumerate() {
let field_name = format!("arg{}", i); let field_name = format!("arg{i}");
let pat = make::ident_pat(false, false, make::name(&field_name)); let pat = make::ident_pat(false, false, make::name(&field_name));
pats.push(pat.into()); pats.push(pat.into());
@ -118,7 +118,7 @@ fn gen_clone_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
let mut fields = vec![]; let mut fields = vec![];
for (i, _) in field_list.fields().enumerate() { for (i, _) in field_list.fields().enumerate() {
let f_path = make::expr_path(make::ext::ident_path("self")); let f_path = make::expr_path(make::ext::ident_path("self"));
let target = make::expr_field(f_path, &format!("{}", i)); let target = make::expr_field(f_path, &format!("{i}"));
fields.push(gen_clone_call(target)); fields.push(gen_clone_call(target));
} }
let struct_name = make::expr_path(make::ext::ident_path("Self")); let struct_name = make::expr_path(make::ext::ident_path("Self"));
@ -151,7 +151,7 @@ fn gen_debug_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
let mut arms = vec![]; let mut arms = vec![];
for variant in list.variants() { for variant in list.variants() {
let name = variant.name()?; let name = variant.name()?;
let variant_name = make::ext::path_from_idents(["Self", &format!("{}", name)])?; let variant_name = make::ext::path_from_idents(["Self", &format!("{name}")])?;
let target = make::expr_path(make::ext::ident_path("f")); let target = make::expr_path(make::ext::ident_path("f"));
match variant.field_list() { match variant.field_list() {
@ -159,7 +159,7 @@ fn gen_debug_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
// => f.debug_struct(name) // => f.debug_struct(name)
let target = make::expr_path(make::ext::ident_path("f")); let target = make::expr_path(make::ext::ident_path("f"));
let method = make::name_ref("debug_struct"); let method = make::name_ref("debug_struct");
let struct_name = format!("\"{}\"", name); let struct_name = format!("\"{name}\"");
let args = make::arg_list(Some(make::expr_literal(&struct_name).into())); let args = make::arg_list(Some(make::expr_literal(&struct_name).into()));
let mut expr = make::expr_method_call(target, method, args); let mut expr = make::expr_method_call(target, method, args);
@ -173,8 +173,8 @@ fn gen_debug_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
// => <expr>.field("field_name", field) // => <expr>.field("field_name", field)
let method_name = make::name_ref("field"); let method_name = make::name_ref("field");
let name = make::expr_literal(&(format!("\"{}\"", field_name))).into(); let name = make::expr_literal(&(format!("\"{field_name}\""))).into();
let path = &format!("{}", field_name); let path = &format!("{field_name}");
let path = make::expr_path(make::ext::ident_path(path)); let path = make::expr_path(make::ext::ident_path(path));
let args = make::arg_list(vec![name, path]); let args = make::arg_list(vec![name, path]);
expr = make::expr_method_call(expr, method_name, args); expr = make::expr_method_call(expr, method_name, args);
@ -192,13 +192,13 @@ fn gen_debug_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
// => f.debug_tuple(name) // => f.debug_tuple(name)
let target = make::expr_path(make::ext::ident_path("f")); let target = make::expr_path(make::ext::ident_path("f"));
let method = make::name_ref("debug_tuple"); let method = make::name_ref("debug_tuple");
let struct_name = format!("\"{}\"", name); let struct_name = format!("\"{name}\"");
let args = make::arg_list(Some(make::expr_literal(&struct_name).into())); let args = make::arg_list(Some(make::expr_literal(&struct_name).into()));
let mut expr = make::expr_method_call(target, method, args); let mut expr = make::expr_method_call(target, method, args);
let mut pats = vec![]; let mut pats = vec![];
for (i, _) in list.fields().enumerate() { for (i, _) in list.fields().enumerate() {
let name = format!("arg{}", i); let name = format!("arg{i}");
// create a field pattern for use in `MyStruct(fields..)` // create a field pattern for use in `MyStruct(fields..)`
let field_name = make::name(&name); let field_name = make::name(&name);
@ -222,7 +222,7 @@ fn gen_debug_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
arms.push(make::match_arm(Some(pat.into()), None, expr)); arms.push(make::match_arm(Some(pat.into()), None, expr));
} }
None => { None => {
let fmt_string = make::expr_literal(&(format!("\"{}\"", name))).into(); let fmt_string = make::expr_literal(&(format!("\"{name}\""))).into();
let args = make::arg_list([target, fmt_string]); let args = make::arg_list([target, fmt_string]);
let macro_name = make::expr_path(make::ext::ident_path("write")); let macro_name = make::expr_path(make::ext::ident_path("write"));
let macro_call = make::expr_macro_call(macro_name, args); let macro_call = make::expr_macro_call(macro_name, args);
@ -244,7 +244,7 @@ fn gen_debug_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
} }
ast::Adt::Struct(strukt) => { ast::Adt::Struct(strukt) => {
let name = format!("\"{}\"", annotated_name); let name = format!("\"{annotated_name}\"");
let args = make::arg_list(Some(make::expr_literal(&name).into())); let args = make::arg_list(Some(make::expr_literal(&name).into()));
let target = make::expr_path(make::ext::ident_path("f")); let target = make::expr_path(make::ext::ident_path("f"));
@ -258,10 +258,10 @@ fn gen_debug_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
let mut expr = make::expr_method_call(target, method, args); let mut expr = make::expr_method_call(target, method, args);
for field in field_list.fields() { for field in field_list.fields() {
let name = field.name()?; let name = field.name()?;
let f_name = make::expr_literal(&(format!("\"{}\"", name))).into(); let f_name = make::expr_literal(&(format!("\"{name}\""))).into();
let f_path = make::expr_path(make::ext::ident_path("self")); let f_path = make::expr_path(make::ext::ident_path("self"));
let f_path = make::expr_ref(f_path, false); let f_path = make::expr_ref(f_path, false);
let f_path = make::expr_field(f_path, &format!("{}", name)); let f_path = make::expr_field(f_path, &format!("{name}"));
let args = make::arg_list([f_name, f_path]); let args = make::arg_list([f_name, f_path]);
expr = make::expr_method_call(expr, make::name_ref("field"), args); expr = make::expr_method_call(expr, make::name_ref("field"), args);
} }
@ -275,7 +275,7 @@ fn gen_debug_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
for (i, _) in field_list.fields().enumerate() { for (i, _) in field_list.fields().enumerate() {
let f_path = make::expr_path(make::ext::ident_path("self")); let f_path = make::expr_path(make::ext::ident_path("self"));
let f_path = make::expr_ref(f_path, false); let f_path = make::expr_ref(f_path, false);
let f_path = make::expr_field(f_path, &format!("{}", i)); let f_path = make::expr_field(f_path, &format!("{i}"));
let method = make::name_ref("field"); let method = make::name_ref("field");
expr = make::expr_method_call(expr, method, make::arg_list(Some(f_path))); expr = make::expr_method_call(expr, method, make::arg_list(Some(f_path)));
} }
@ -379,7 +379,7 @@ fn gen_hash_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
let mut stmts = vec![]; let mut stmts = vec![];
for (i, _) in field_list.fields().enumerate() { for (i, _) in field_list.fields().enumerate() {
let base = make::expr_path(make::ext::ident_path("self")); let base = make::expr_path(make::ext::ident_path("self"));
let target = make::expr_field(base, &format!("{}", i)); let target = make::expr_field(base, &format!("{i}"));
stmts.push(gen_hash_call(target)); stmts.push(gen_hash_call(target));
} }
make::block_expr(stmts, None).indent(ast::edit::IndentLevel(1)) make::block_expr(stmts, None).indent(ast::edit::IndentLevel(1))
@ -453,10 +453,10 @@ fn gen_partial_eq(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
for field in list.fields() { for field in list.fields() {
let field_name = field.name()?.to_string(); let field_name = field.name()?.to_string();
let l_name = &format!("l_{}", field_name); let l_name = &format!("l_{field_name}");
l_fields.push(gen_record_pat_field(&field_name, l_name)); l_fields.push(gen_record_pat_field(&field_name, l_name));
let r_name = &format!("r_{}", field_name); let r_name = &format!("r_{field_name}");
r_fields.push(gen_record_pat_field(&field_name, r_name)); r_fields.push(gen_record_pat_field(&field_name, r_name));
let lhs = make::expr_path(make::ext::ident_path(l_name)); let lhs = make::expr_path(make::ext::ident_path(l_name));
@ -484,12 +484,12 @@ fn gen_partial_eq(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
let mut r_fields = vec![]; let mut r_fields = vec![];
for (i, _) in list.fields().enumerate() { for (i, _) in list.fields().enumerate() {
let field_name = format!("{}", i); let field_name = format!("{i}");
let l_name = format!("l{}", field_name); let l_name = format!("l{field_name}");
l_fields.push(gen_tuple_field(&l_name)); l_fields.push(gen_tuple_field(&l_name));
let r_name = format!("r{}", field_name); let r_name = format!("r{field_name}");
r_fields.push(gen_tuple_field(&r_name)); r_fields.push(gen_tuple_field(&r_name));
let lhs = make::expr_path(make::ext::ident_path(&l_name)); let lhs = make::expr_path(make::ext::ident_path(&l_name));
@ -548,7 +548,7 @@ fn gen_partial_eq(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
Some(ast::FieldList::TupleFieldList(field_list)) => { Some(ast::FieldList::TupleFieldList(field_list)) => {
let mut expr = None; let mut expr = None;
for (i, _) in field_list.fields().enumerate() { for (i, _) in field_list.fields().enumerate() {
let idx = format!("{}", i); let idx = format!("{i}");
let lhs = make::expr_path(make::ext::ident_path("self")); let lhs = make::expr_path(make::ext::ident_path("self"));
let lhs = make::expr_field(lhs, &idx); let lhs = make::expr_field(lhs, &idx);
let rhs = make::expr_path(make::ext::ident_path("other")); let rhs = make::expr_path(make::ext::ident_path("other"));
@ -628,7 +628,7 @@ fn gen_partial_ord(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
Some(ast::FieldList::TupleFieldList(field_list)) => { Some(ast::FieldList::TupleFieldList(field_list)) => {
let mut exprs = vec![]; let mut exprs = vec![];
for (i, _) in field_list.fields().enumerate() { for (i, _) in field_list.fields().enumerate() {
let idx = format!("{}", i); let idx = format!("{i}");
let lhs = make::expr_path(make::ext::ident_path("self")); let lhs = make::expr_path(make::ext::ident_path("self"));
let lhs = make::expr_field(lhs, &idx); let lhs = make::expr_field(lhs, &idx);
let rhs = make::expr_path(make::ext::ident_path("other")); let rhs = make::expr_path(make::ext::ident_path("other"));

View file

@ -69,10 +69,6 @@ pub(crate) fn complete_postfix(
} }
} }
if !ctx.config.snippets.is_empty() {
add_custom_postfix_completions(acc, ctx, &postfix_snippet, &receiver_text);
}
let try_enum = TryEnum::from_ty(&ctx.sema, &receiver_ty.strip_references()); let try_enum = TryEnum::from_ty(&ctx.sema, &receiver_ty.strip_references());
if let Some(try_enum) = &try_enum { if let Some(try_enum) = &try_enum {
match try_enum { match try_enum {
@ -140,6 +136,10 @@ pub(crate) fn complete_postfix(
None => return, None => return,
}; };
if !ctx.config.snippets.is_empty() {
add_custom_postfix_completions(acc, ctx, &postfix_snippet, &receiver_text);
}
match try_enum { match try_enum {
Some(try_enum) => match try_enum { Some(try_enum) => match try_enum {
TryEnum::Result => { TryEnum::Result => {
@ -613,4 +613,25 @@ fn main() {
r#"fn main() { log::error!("{}", 2+2) }"#, r#"fn main() { log::error!("{}", 2+2) }"#,
); );
} }
#[test]
fn postfix_custom_snippets_completion_for_references() {
check_edit_with_config(
CompletionConfig {
snippets: vec![Snippet::new(
&[],
&["ok".into()],
&["Ok(${receiver})".into()],
"",
&[],
crate::SnippetScope::Expr,
)
.unwrap()],
..TEST_CONFIG
},
"ok",
r#"fn main() { &&42.$0 }"#,
r#"fn main() { Ok(&&42) }"#,
);
}
} }

View file

@ -446,33 +446,47 @@ impl<'a> FindUsages<'a> {
}) })
} }
// FIXME: There should be optimization potential here let find_nodes = move |name: &str, node: &syntax::SyntaxNode, offset: TextSize| {
// Currently we try to descend everything we find which node.token_at_offset(offset).find(|it| it.text() == name).map(|token| {
// means we call `Semantics::descend_into_macros` on // FIXME: There should be optimization potential here
// every textual hit. That function is notoriously // Currently we try to descend everything we find which
// expensive even for things that do not get down mapped // means we call `Semantics::descend_into_macros` on
// into macros. // every textual hit. That function is notoriously
// expensive even for things that do not get down mapped
// into macros.
sema.descend_into_macros(token).into_iter().filter_map(|it| it.parent())
})
};
for (text, file_id, search_range) in scope_files(sema, &search_scope) { for (text, file_id, search_range) in scope_files(sema, &search_scope) {
let tree = Lazy::new(move || sema.parse(file_id).syntax().clone()); let tree = Lazy::new(move || sema.parse(file_id).syntax().clone());
// Search for occurrences of the items name // Search for occurrences of the items name
for offset in match_indices(&text, finder, search_range) { for offset in match_indices(&text, finder, search_range) {
for name in sema.find_nodes_at_offset_with_descend(&tree, offset) { if let Some(iter) = find_nodes(name, &tree, offset) {
if match name { for name in iter.filter_map(ast::NameLike::cast) {
ast::NameLike::NameRef(name_ref) => self.found_name_ref(&name_ref, sink), if match name {
ast::NameLike::Name(name) => self.found_name(&name, sink), ast::NameLike::NameRef(name_ref) => {
ast::NameLike::Lifetime(lifetime) => self.found_lifetime(&lifetime, sink), self.found_name_ref(&name_ref, sink)
} { }
return; ast::NameLike::Name(name) => self.found_name(&name, sink),
ast::NameLike::Lifetime(lifetime) => {
self.found_lifetime(&lifetime, sink)
}
} {
return;
}
} }
} }
} }
// Search for occurrences of the `Self` referring to our type // Search for occurrences of the `Self` referring to our type
if let Some((self_ty, finder)) = &include_self_kw_refs { if let Some((self_ty, finder)) = &include_self_kw_refs {
for offset in match_indices(&text, finder, search_range) { for offset in match_indices(&text, finder, search_range) {
for name_ref in sema.find_nodes_at_offset_with_descend(&tree, offset) { if let Some(iter) = find_nodes("Self", &tree, offset) {
if self.found_self_ty_name_ref(self_ty, &name_ref, sink) { for name_ref in iter.filter_map(ast::NameRef::cast) {
return; if self.found_self_ty_name_ref(self_ty, &name_ref, sink) {
return;
}
} }
} }
} }
@ -493,17 +507,21 @@ impl<'a> FindUsages<'a> {
let tree = Lazy::new(move || sema.parse(file_id).syntax().clone()); let tree = Lazy::new(move || sema.parse(file_id).syntax().clone());
for offset in match_indices(&text, finder, search_range) { for offset in match_indices(&text, finder, search_range) {
for name_ref in sema.find_nodes_at_offset_with_descend(&tree, offset) { if let Some(iter) = find_nodes("super", &tree, offset) {
if self.found_name_ref(&name_ref, sink) { for name_ref in iter.filter_map(ast::NameRef::cast) {
return; if self.found_name_ref(&name_ref, sink) {
return;
}
} }
} }
} }
if let Some(finder) = &is_crate_root { if let Some(finder) = &is_crate_root {
for offset in match_indices(&text, finder, search_range) { for offset in match_indices(&text, finder, search_range) {
for name_ref in sema.find_nodes_at_offset_with_descend(&tree, offset) { if let Some(iter) = find_nodes("crate", &tree, offset) {
if self.found_name_ref(&name_ref, sink) { for name_ref in iter.filter_map(ast::NameRef::cast) {
return; if self.found_name_ref(&name_ref, sink) {
return;
}
} }
} }
} }
@ -544,9 +562,11 @@ impl<'a> FindUsages<'a> {
let finder = &Finder::new("self"); let finder = &Finder::new("self");
for offset in match_indices(&text, finder, search_range) { for offset in match_indices(&text, finder, search_range) {
for name_ref in sema.find_nodes_at_offset_with_descend(&tree, offset) { if let Some(iter) = find_nodes("self", &tree, offset) {
if self.found_self_module_name_ref(&name_ref, sink) { for name_ref in iter.filter_map(ast::NameRef::cast) {
return; if self.found_self_module_name_ref(&name_ref, sink) {
return;
}
} }
} }
} }

View file

@ -73,8 +73,8 @@ impl MonikerResult {
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct PackageInformation { pub struct PackageInformation {
pub name: String, pub name: String,
pub repo: String, pub repo: Option<String>,
pub version: String, pub version: Option<String>,
} }
pub(crate) fn crate_for_file(db: &RootDatabase, file_id: FileId) -> Option<Crate> { pub(crate) fn crate_for_file(db: &RootDatabase, file_id: FileId) -> Option<Crate> {
@ -256,18 +256,18 @@ pub(crate) fn def_to_moniker(
let (name, repo, version) = match krate.origin(db) { let (name, repo, version) = match krate.origin(db) {
CrateOrigin::CratesIo { repo, name } => ( CrateOrigin::CratesIo { repo, name } => (
name.unwrap_or(krate.display_name(db)?.canonical_name().to_string()), name.unwrap_or(krate.display_name(db)?.canonical_name().to_string()),
repo?, repo,
krate.version(db)?, krate.version(db),
), ),
CrateOrigin::Lang(lang) => ( CrateOrigin::Lang(lang) => (
krate.display_name(db)?.canonical_name().to_string(), krate.display_name(db)?.canonical_name().to_string(),
"https://github.com/rust-lang/rust/".to_string(), Some("https://github.com/rust-lang/rust/".to_string()),
match lang { Some(match lang {
LangCrateOrigin::Other => { LangCrateOrigin::Other => {
"https://github.com/rust-lang/rust/library/".into() "https://github.com/rust-lang/rust/library/".into()
} }
lang => format!("https://github.com/rust-lang/rust/library/{lang}",), lang => format!("https://github.com/rust-lang/rust/library/{lang}",),
}, }),
), ),
}; };
PackageInformation { name, repo, version } PackageInformation { name, repo, version }
@ -315,7 +315,7 @@ pub mod module {
} }
"#, "#,
"foo::module::func", "foo::module::func",
r#"PackageInformation { name: "foo", repo: "https://a.b/foo.git", version: "0.1.0" }"#, r#"PackageInformation { name: "foo", repo: Some("https://a.b/foo.git"), version: Some("0.1.0") }"#,
MonikerKind::Import, MonikerKind::Import,
); );
check_moniker( check_moniker(
@ -331,7 +331,7 @@ pub mod module {
} }
"#, "#,
"foo::module::func", "foo::module::func",
r#"PackageInformation { name: "foo", repo: "https://a.b/foo.git", version: "0.1.0" }"#, r#"PackageInformation { name: "foo", repo: Some("https://a.b/foo.git"), version: Some("0.1.0") }"#,
MonikerKind::Export, MonikerKind::Export,
); );
} }
@ -348,7 +348,7 @@ pub mod module {
} }
"#, "#,
"foo::module::MyTrait::func", "foo::module::MyTrait::func",
r#"PackageInformation { name: "foo", repo: "https://a.b/foo.git", version: "0.1.0" }"#, r#"PackageInformation { name: "foo", repo: Some("https://a.b/foo.git"), version: Some("0.1.0") }"#,
MonikerKind::Export, MonikerKind::Export,
); );
} }
@ -365,7 +365,7 @@ pub mod module {
} }
"#, "#,
"foo::module::MyTrait::MY_CONST", "foo::module::MyTrait::MY_CONST",
r#"PackageInformation { name: "foo", repo: "https://a.b/foo.git", version: "0.1.0" }"#, r#"PackageInformation { name: "foo", repo: Some("https://a.b/foo.git"), version: Some("0.1.0") }"#,
MonikerKind::Export, MonikerKind::Export,
); );
} }
@ -382,7 +382,7 @@ pub mod module {
} }
"#, "#,
"foo::module::MyTrait::MyType", "foo::module::MyTrait::MyType",
r#"PackageInformation { name: "foo", repo: "https://a.b/foo.git", version: "0.1.0" }"#, r#"PackageInformation { name: "foo", repo: Some("https://a.b/foo.git"), version: Some("0.1.0") }"#,
MonikerKind::Export, MonikerKind::Export,
); );
} }
@ -405,7 +405,7 @@ pub mod module {
} }
"#, "#,
"foo::module::MyStruct::MyTrait::func", "foo::module::MyStruct::MyTrait::func",
r#"PackageInformation { name: "foo", repo: "https://a.b/foo.git", version: "0.1.0" }"#, r#"PackageInformation { name: "foo", repo: Some("https://a.b/foo.git"), version: Some("0.1.0") }"#,
MonikerKind::Export, MonikerKind::Export,
); );
} }
@ -425,7 +425,7 @@ pub struct St {
} }
"#, "#,
"foo::St::a", "foo::St::a",
r#"PackageInformation { name: "foo", repo: "https://a.b/foo.git", version: "0.1.0" }"#, r#"PackageInformation { name: "foo", repo: Some("https://a.b/foo.git"), version: Some("0.1.0") }"#,
MonikerKind::Import, MonikerKind::Import,
); );
} }

View file

@ -40,7 +40,9 @@ pub(crate) fn prepare_rename(
if def.range_for_rename(&sema).is_none() { if def.range_for_rename(&sema).is_none() {
bail!("No references found at position") bail!("No references found at position")
} }
let frange = sema.original_range(name_like.syntax()); let Some(frange) = sema.original_range_opt(name_like.syntax()) else {
bail!("No references found at position");
};
always!( always!(
frange.range.contains_inclusive(position.offset) frange.range.contains_inclusive(position.offset)
@ -51,7 +53,7 @@ pub(crate) fn prepare_rename(
.reduce(|acc, cur| match (acc, cur) { .reduce(|acc, cur| match (acc, cur) {
// ensure all ranges are the same // ensure all ranges are the same
(Ok(acc_inner), Ok(cur_inner)) if acc_inner == cur_inner => Ok(acc_inner), (Ok(acc_inner), Ok(cur_inner)) if acc_inner == cur_inner => Ok(acc_inner),
(Err(e), _) => Err(e), (e @ Err(_), _) | (_, e @ Err(_)) => e,
_ => bail!("inconsistent text range"), _ => bail!("inconsistent text range"),
}); });
@ -2249,4 +2251,33 @@ fn foo((bar | bar | bar): ()) {
"#, "#,
); );
} }
#[test]
fn regression_13498() {
check(
"Testing",
r"
mod foo {
pub struct Test$0;
}
use foo::Test as Tester;
fn main() {
let t = Tester;
}
",
r"
mod foo {
pub struct Testing;
}
use foo::Testing as Tester;
fn main() {
let t = Tester;
}
",
)
}
} }

View file

@ -149,7 +149,7 @@ fn signature_help_for_call(
variant.name(db) variant.name(db)
); );
} }
hir::CallableKind::Closure | hir::CallableKind::FnPtr => (), hir::CallableKind::Closure | hir::CallableKind::FnPtr | hir::CallableKind::Other => (),
} }
res.signature.push('('); res.signature.push('(');
@ -189,9 +189,10 @@ fn signature_help_for_call(
hir::CallableKind::Function(func) if callable.return_type().contains_unknown() => { hir::CallableKind::Function(func) if callable.return_type().contains_unknown() => {
render(func.ret_type(db)) render(func.ret_type(db))
} }
hir::CallableKind::Function(_) | hir::CallableKind::Closure | hir::CallableKind::FnPtr => { hir::CallableKind::Function(_)
render(callable.return_type()) | hir::CallableKind::Closure
} | hir::CallableKind::FnPtr
| hir::CallableKind::Other => render(callable.return_type()),
hir::CallableKind::TupleStruct(_) | hir::CallableKind::TupleEnumVariant(_) => {} hir::CallableKind::TupleStruct(_) | hir::CallableKind::TupleEnumVariant(_) => {}
} }
Some(res) Some(res)
@ -387,10 +388,9 @@ mod tests {
} }
fn check(ra_fixture: &str, expect: Expect) { fn check(ra_fixture: &str, expect: Expect) {
// Implicitly add `Sized` to avoid noisy `T: ?Sized` in the results.
let fixture = format!( let fixture = format!(
r#" r#"
#[lang = "sized"] trait Sized {{}} //- minicore: sized, fn
{ra_fixture} {ra_fixture}
"# "#
); );
@ -1331,4 +1331,19 @@ fn f() {
"#]], "#]],
); );
} }
#[test]
fn help_for_generic_call() {
check(
r#"
fn f<F: FnOnce(u8, u16) -> i32>(f: F) {
f($0)
}
"#,
expect![[r#"
(u8, u16) -> i32
^^ ---
"#]],
);
}
} }

View file

@ -106,12 +106,12 @@ impl LsifManager<'_> {
manager: "cargo".to_string(), manager: "cargo".to_string(),
uri: None, uri: None,
content: None, content: None,
repository: Some(lsif::Repository { repository: pi.repo.map(|url| lsif::Repository {
url: pi.repo, url,
r#type: "git".to_string(), r#type: "git".to_string(),
commit_id: None, commit_id: None,
}), }),
version: Some(pi.version), version: pi.version,
})); }));
self.package_map.insert(package_information, result_set_id); self.package_map.insert(package_information, result_set_id);
result_set_id result_set_id

View file

@ -231,7 +231,7 @@ fn token_to_symbol(token: &TokenStaticData) -> Option<scip_types::Symbol> {
package: Some(scip_types::Package { package: Some(scip_types::Package {
manager: "cargo".to_string(), manager: "cargo".to_string(),
name: package_name, name: package_name,
version, version: version.unwrap_or_else(|| ".".to_string()),
..Default::default() ..Default::default()
}) })
.into(), .into(),
@ -415,4 +415,42 @@ pub mod module {
"", "",
); );
} }
#[test]
fn global_symbol_for_pub_struct() {
check_symbol(
r#"
//- /lib.rs crate:main
mod foo;
fn main() {
let _bar = foo::Bar { i: 0 };
}
//- /foo.rs
pub struct Bar$0 {
pub i: i32,
}
"#,
"rust-analyzer cargo main . foo/Bar#",
);
}
#[test]
fn global_symbol_for_pub_struct_reference() {
check_symbol(
r#"
//- /lib.rs crate:main
mod foo;
fn main() {
let _bar = foo::Bar$0 { i: 0 };
}
//- /foo.rs
pub struct Bar {
pub i: i32,
}
"#,
"rust-analyzer cargo main . foo/Bar#",
);
}
} }

View file

@ -56,6 +56,9 @@ mod patch_old_style;
// parsing the old name. // parsing the old name.
config_data! { config_data! {
struct ConfigData { struct ConfigData {
/// Whether to insert #[must_use] when generating `as_` methods
/// for enum variants.
assist_emitMustUse: bool = "false",
/// Placeholder expression to use for missing expressions in assists. /// Placeholder expression to use for missing expressions in assists.
assist_expressionFillDefault: ExprFillDefaultDef = "\"todo\"", assist_expressionFillDefault: ExprFillDefaultDef = "\"todo\"",
@ -1276,6 +1279,7 @@ impl Config {
allowed: None, allowed: None,
insert_use: self.insert_use_config(), insert_use: self.insert_use_config(),
prefer_no_std: self.data.imports_prefer_no_std, prefer_no_std: self.data.imports_prefer_no_std,
assist_emit_must_use: self.data.assist_emitMustUse,
} }
} }

View file

@ -334,6 +334,10 @@ pub fn block_expr(
ast_from_text(&format!("fn f() {buf}")) ast_from_text(&format!("fn f() {buf}"))
} }
pub fn tail_only_block_expr(tail_expr: ast::Expr) -> ast::BlockExpr {
ast_from_text(&format!("fn f() {{ {tail_expr} }}"))
}
/// Ideally this function wouldn't exist since it involves manual indenting. /// Ideally this function wouldn't exist since it involves manual indenting.
/// It differs from `make::block_expr` by also supporting comments. /// It differs from `make::block_expr` by also supporting comments.
/// ///
@ -656,6 +660,22 @@ pub fn let_stmt(
}; };
ast_from_text(&format!("fn f() {{ {text} }}")) ast_from_text(&format!("fn f() {{ {text} }}"))
} }
pub fn let_else_stmt(
pattern: ast::Pat,
ty: Option<ast::Type>,
expr: ast::Expr,
diverging: ast::BlockExpr,
) -> ast::LetStmt {
let mut text = String::new();
format_to!(text, "let {pattern}");
if let Some(ty) = ty {
format_to!(text, ": {ty}");
}
format_to!(text, " = {expr} else {diverging};");
ast_from_text(&format!("fn f() {{ {text} }}"))
}
pub fn expr_stmt(expr: ast::Expr) -> ast::ExprStmt { pub fn expr_stmt(expr: ast::Expr) -> ast::ExprStmt {
let semi = if expr.is_block_like() { "" } else { ";" }; let semi = if expr.is_block_like() { "" } else { ";" };
ast_from_text(&format!("fn f() {{ {expr}{semi} (); }}")) ast_from_text(&format!("fn f() {{ {expr}{semi} (); }}"))

View file

@ -338,7 +338,7 @@ The algorithm for building a tree of modules is to start with a crate root
declarations and recursively process child modules. This is handled by the declarations and recursively process child modules. This is handled by the
[`module_tree_query`], with two slight variations. [`module_tree_query`], with two slight variations.
[`module_tree_query`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/module_tree.rs#L116-L123 [`module_tree_query`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_hir/src/module_tree.rs#L115-L133
First, rust-analyzer builds a module tree for all crates in a source root First, rust-analyzer builds a module tree for all crates in a source root
simultaneously. The main reason for this is historical (`module_tree` predates simultaneously. The main reason for this is historical (`module_tree` predates
@ -361,7 +361,7 @@ the same, we don't have to re-execute [`module_tree_query`]. In fact, we only
need to re-execute it when we add/remove new files or when we change mod need to re-execute it when we add/remove new files or when we change mod
declarations. declarations.
[`submodules_query`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/module_tree.rs#L41 [`submodules_query`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_hir/src/module_tree.rs#L41
We store the resulting modules in a `Vec`-based indexed arena. The indices in We store the resulting modules in a `Vec`-based indexed arena. The indices in
the arena becomes module IDs. And this brings us to the next topic: the arena becomes module IDs. And this brings us to the next topic:
@ -389,8 +389,8 @@ integers which can "intern" a location and return an integer ID back. The salsa
database we use includes a couple of [interners]. How to "garbage collect" database we use includes a couple of [interners]. How to "garbage collect"
unused locations is an open question. unused locations is an open question.
[`LocationInterner`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/base_db/src/loc2id.rs#L65-L71 [`LocationInterner`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_db/src/loc2id.rs#L65-L71
[interners]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/db.rs#L22-L23 [interners]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_hir/src/db.rs#L22-L23
For example, we use `LocationInterner` to assign IDs to definitions of functions, For example, we use `LocationInterner` to assign IDs to definitions of functions,
structs, enums, etc. The location, [`DefLoc`] contains two bits of information: structs, enums, etc. The location, [`DefLoc`] contains two bits of information:
@ -404,7 +404,7 @@ using offsets, text ranges or syntax trees as keys and values for queries. What
we do instead is we store "index" of the item among all of the items of a file we do instead is we store "index" of the item among all of the items of a file
(so, a positional based ID, but localized to a single file). (so, a positional based ID, but localized to a single file).
[`DefLoc`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/ids.rs#L127-L139 [`DefLoc`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_hir/src/ids.rs#L129-L139
One thing we've glossed over for the time being is support for macros. We have One thing we've glossed over for the time being is support for macros. We have
only proof of concept handling of macros at the moment, but they are extremely only proof of concept handling of macros at the moment, but they are extremely
@ -437,7 +437,7 @@ terms of `HirFileId`! This does not recur infinitely though: any chain of
`HirFileId`s bottoms out in `HirFileId::FileId`, that is, some source file `HirFileId`s bottoms out in `HirFileId::FileId`, that is, some source file
actually written by the user. actually written by the user.
[`HirFileId`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/ids.rs#L18-L125 [`HirFileId`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_hir/src/ids.rs#L31-L93
Now that we understand how to identify a definition, in a source or in a Now that we understand how to identify a definition, in a source or in a
macro-generated file, we can discuss name resolution a bit. macro-generated file, we can discuss name resolution a bit.
@ -451,14 +451,13 @@ each module into a position-independent representation which does not change if
we modify bodies of the items. After that we [loop] resolving all imports until we modify bodies of the items. After that we [loop] resolving all imports until
we've reached a fixed point. we've reached a fixed point.
[lower]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/nameres/lower.rs#L113-L117 [lower]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_hir/src/nameres/lower.rs#L113-L147
[loop]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/nameres.rs#L186-L196 [loop]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_hir/src/nameres.rs#L186-L196
And, given all our preparation with IDs and a position-independent representation, And, given all our preparation with IDs and a position-independent representation,
it is satisfying to [test] that typing inside function body does not invalidate it is satisfying to [test] that typing inside function body does not invalidate
name resolution results. name resolution results.
[test]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/nameres/tests.rs#L376 [test]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_hir/src/nameres/tests.rs#L376
An interesting fact about name resolution is that it "erases" all of the An interesting fact about name resolution is that it "erases" all of the
intermediate paths from the imports: in the end, we know which items are defined intermediate paths from the imports: in the end, we know which items are defined
@ -493,10 +492,10 @@ there's an intermediate [projection query] which returns only the first
position-independent part of the lowering. The result of this query is stable. position-independent part of the lowering. The result of this query is stable.
Naturally, name resolution [uses] this stable projection query. Naturally, name resolution [uses] this stable projection query.
[imports]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/nameres/lower.rs#L52-L59 [imports]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_hir/src/nameres/lower.rs#L52-L59
[`SourceMap`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/nameres/lower.rs#L52-L59 [`SourceMap`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_hir/src/nameres/lower.rs#L52-L59
[projection query]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/nameres/lower.rs#L97-L103 [projection query]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_hir/src/nameres/lower.rs#L97-L103
[uses]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/query_definitions.rs#L49 [uses]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_hir/src/query_definitions.rs#L49
## Type inference ## Type inference
@ -518,10 +517,10 @@ construct a mapping from `ExprId`s to types.
[@flodiebold]: https://github.com/flodiebold [@flodiebold]: https://github.com/flodiebold
[#327]: https://github.com/rust-lang/rust-analyzer/pull/327 [#327]: https://github.com/rust-lang/rust-analyzer/pull/327
[lower the AST]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/expr.rs [lower the AST]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_hir/src/expr.rs
[positional ID]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/expr.rs#L13-L15 [positional ID]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_hir/src/expr.rs#L13-L15
[a source map]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/expr.rs#L41-L44 [a source map]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_hir/src/expr.rs#L41-L44
[type inference]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/ty.rs#L1208-L1223 [type inference]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_hir/src/ty.rs#L1208-L1223
## Tying it all together: completion ## Tying it all together: completion
@ -563,10 +562,11 @@ the type to completion.
[catch]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_lsp_server/src/main_loop.rs#L436-L442 [catch]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_lsp_server/src/main_loop.rs#L436-L442
[the handler]: https://salsa.zulipchat.com/#narrow/stream/181542-rfcs.2Fsalsa-query-group/topic/design.20next.20steps [the handler]: https://salsa.zulipchat.com/#narrow/stream/181542-rfcs.2Fsalsa-query-group/topic/design.20next.20steps
[ask analysis for completion]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ide_api/src/lib.rs#L439-L444 [ask analysis for completion]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ide_api/src/lib.rs#L439-L444
[completion implementation]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ide_api/src/completion.rs#L46-L62 [ask analysis for completion]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_ide_api/src/lib.rs#L439-L444
[`CompletionContext`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ide_api/src/completion/completion_context.rs#L14-L37 [completion implementation]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_ide_api/src/completion.rs#L46-L62
["IntelliJ Trick"]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ide_api/src/completion/completion_context.rs#L72-L75 [`CompletionContext`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_ide_api/src/completion/completion_context.rs#L14-L37
[find an ancestor `fn` node]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ide_api/src/completion/completion_context.rs#L116-L120 ["IntelliJ Trick"]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_ide_api/src/completion/completion_context.rs#L72-L75
[semantic model]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ide_api/src/completion/completion_context.rs#L123 [find an ancestor `fn` node]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_ide_api/src/completion/completion_context.rs#L116-L120
[series of independent completion routines]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ide_api/src/completion.rs#L52-L59 [semantic model]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_ide_api/src/completion/completion_context.rs#L123
[`complete_dot`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ide_api/src/completion/complete_dot.rs#L6-L22 [series of independent completion routines]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_ide_api/src/completion.rs#L52-L59
[`complete_dot`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_ide_api/src/completion/complete_dot.rs#L6-L22

View file

@ -1,3 +1,9 @@
[[rust-analyzer.assist.emitMustUse]]rust-analyzer.assist.emitMustUse (default: `false`)::
+
--
Whether to insert #[must_use] when generating `as_` methods
for enum variants.
--
[[rust-analyzer.assist.expressionFillDefault]]rust-analyzer.assist.expressionFillDefault (default: `"todo"`):: [[rust-analyzer.assist.expressionFillDefault]]rust-analyzer.assist.expressionFillDefault (default: `"todo"`)::
+ +
-- --

View file

@ -487,6 +487,12 @@ https://docs.helix-editor.com/[Helix] supports LSP by default.
However, it won't install `rust-analyzer` automatically. However, it won't install `rust-analyzer` automatically.
You can follow instructions for installing <<rust-analyzer-language-server-binary,`rust-analyzer` binary>>. You can follow instructions for installing <<rust-analyzer-language-server-binary,`rust-analyzer` binary>>.
=== Crates
There is a package named `ra_ap_rust_analyzer` available on https://crates.io/crates/ra_ap_rust-analyzer[crates.io], for someone who wants to use it programmatically.
For more details, see https://github.com/rust-lang/rust-analyzer/blob/master/.github/workflows/publish.yml[the publish workflow].
== Troubleshooting == Troubleshooting
Start with looking at the rust-analyzer version. Start with looking at the rust-analyzer version.

View file

@ -23,7 +23,7 @@
"esbuild": "^0.14.48", "esbuild": "^0.14.48",
"eslint": "^8.19.0", "eslint": "^8.19.0",
"eslint-config-prettier": "^8.5.0", "eslint-config-prettier": "^8.5.0",
"ovsx": "^0.5.1", "ovsx": "^0.5.2",
"prettier": "^2.7.1", "prettier": "^2.7.1",
"tslib": "^2.4.0", "tslib": "^2.4.0",
"typescript": "^4.7.4", "typescript": "^4.7.4",
@ -2874,9 +2874,9 @@
} }
}, },
"node_modules/ovsx": { "node_modules/ovsx": {
"version": "0.5.1", "version": "0.5.2",
"resolved": "https://registry.npmjs.org/ovsx/-/ovsx-0.5.1.tgz", "resolved": "https://registry.npmjs.org/ovsx/-/ovsx-0.5.2.tgz",
"integrity": "sha512-3OWq0l7DuVHi2bd2aQe5+QVQlFIqvrcw3/2vGXL404L6Tr+R4QHtzfnYYghv8CCa85xJHjU0RhcaC7pyXkAUbg==", "integrity": "sha512-UbLultRCk46WddeA0Cly4hoRhzBJUiLgbIEViXlgOvV54LbsppClDkMLoCevUUBHoiNdMX2NuiSgURAEXgCZdw==",
"dev": true, "dev": true,
"dependencies": { "dependencies": {
"commander": "^6.1.0", "commander": "^6.1.0",
@ -5958,9 +5958,9 @@
} }
}, },
"ovsx": { "ovsx": {
"version": "0.5.1", "version": "0.5.2",
"resolved": "https://registry.npmjs.org/ovsx/-/ovsx-0.5.1.tgz", "resolved": "https://registry.npmjs.org/ovsx/-/ovsx-0.5.2.tgz",
"integrity": "sha512-3OWq0l7DuVHi2bd2aQe5+QVQlFIqvrcw3/2vGXL404L6Tr+R4QHtzfnYYghv8CCa85xJHjU0RhcaC7pyXkAUbg==", "integrity": "sha512-UbLultRCk46WddeA0Cly4hoRhzBJUiLgbIEViXlgOvV54LbsppClDkMLoCevUUBHoiNdMX2NuiSgURAEXgCZdw==",
"dev": true, "dev": true,
"requires": { "requires": {
"commander": "^6.1.0", "commander": "^6.1.0",

View file

@ -49,7 +49,7 @@
"esbuild": "^0.14.48", "esbuild": "^0.14.48",
"eslint": "^8.19.0", "eslint": "^8.19.0",
"eslint-config-prettier": "^8.5.0", "eslint-config-prettier": "^8.5.0",
"ovsx": "^0.5.1", "ovsx": "^0.5.2",
"prettier": "^2.7.1", "prettier": "^2.7.1",
"tslib": "^2.4.0", "tslib": "^2.4.0",
"typescript": "^4.7.4", "typescript": "^4.7.4",
@ -100,22 +100,32 @@
{ {
"command": "rust-analyzer.syntaxTree", "command": "rust-analyzer.syntaxTree",
"title": "Show Syntax Tree", "title": "Show Syntax Tree",
"category": "rust-analyzer" "category": "rust-analyzer (debug command)"
}, },
{ {
"command": "rust-analyzer.viewHir", "command": "rust-analyzer.viewHir",
"title": "View Hir", "title": "View Hir",
"category": "rust-analyzer" "category": "rust-analyzer (debug command)"
}, },
{ {
"command": "rust-analyzer.viewFileText", "command": "rust-analyzer.viewFileText",
"title": "View File Text (as seen by the server)", "title": "View File Text (as seen by the server)",
"category": "rust-analyzer" "category": "rust-analyzer (debug command)"
}, },
{ {
"command": "rust-analyzer.viewItemTree", "command": "rust-analyzer.viewItemTree",
"title": "Debug ItemTree", "title": "Debug ItemTree",
"category": "rust-analyzer" "category": "rust-analyzer (debug command)"
},
{
"command": "rust-analyzer.shuffleCrateGraph",
"title": "Shuffle Crate Graph",
"category": "rust-analyzer (debug command)"
},
{
"command": "rust-analyzer.memoryUsage",
"title": "Memory Usage (Clears Database)",
"category": "rust-analyzer (debug command)"
}, },
{ {
"command": "rust-analyzer.viewCrateGraph", "command": "rust-analyzer.viewCrateGraph",
@ -172,16 +182,6 @@
"title": "Status", "title": "Status",
"category": "rust-analyzer" "category": "rust-analyzer"
}, },
{
"command": "rust-analyzer.memoryUsage",
"title": "Memory Usage (Clears Database)",
"category": "rust-analyzer"
},
{
"command": "rust-analyzer.shuffleCrateGraph",
"title": "Shuffle Crate Graph",
"category": "rust-analyzer"
},
{ {
"command": "rust-analyzer.reloadWorkspace", "command": "rust-analyzer.reloadWorkspace",
"title": "Reload workspace", "title": "Reload workspace",
@ -397,6 +397,11 @@
"type": "boolean" "type": "boolean"
}, },
"$generated-start": {}, "$generated-start": {},
"rust-analyzer.assist.emitMustUse": {
"markdownDescription": "Whether to insert #[must_use] when generating `as_` methods\nfor enum variants.",
"default": false,
"type": "boolean"
},
"rust-analyzer.assist.expressionFillDefault": { "rust-analyzer.assist.expressionFillDefault": {
"markdownDescription": "Placeholder expression to use for missing expressions in assists.", "markdownDescription": "Placeholder expression to use for missing expressions in assists.",
"default": "todo", "default": "todo",

View file

@ -1 +1,11 @@
[assign] [assign]
[shortcut]
[relabel]
allow-unauthenticated = [
"S-*",
]
[autolabel."S-waiting-on-review"]
new_pr = true