cargo clippy --fix

This commit is contained in:
Johann Hemmann 2024-01-18 13:59:49 +01:00
parent 1ab8c7fd27
commit fad4fa163c
178 changed files with 595 additions and 738 deletions

View file

@ -639,7 +639,7 @@ impl CrateGraph {
let res = self.arena.iter().find_map(|(id, data)| {
match (&data.origin, &crate_data.origin) {
(a, b) if a == b => {
if data.eq_ignoring_origin_and_deps(&crate_data, false) {
if data.eq_ignoring_origin_and_deps(crate_data, false) {
return Some((id, false));
}
}
@ -651,8 +651,8 @@ impl CrateGraph {
// version and discard the library one as the local version may have
// dev-dependencies that we want to keep resolving. See #15656 for more
// information.
if data.eq_ignoring_origin_and_deps(&crate_data, true) {
return Some((id, if a.is_local() { false } else { true }));
if data.eq_ignoring_origin_and_deps(crate_data, true) {
return Some((id, !a.is_local()));
}
}
(_, _) => return None,

View file

@ -191,7 +191,7 @@ impl AstIdMap {
/// The [`AstId`] of the root node
pub fn root(&self) -> SyntaxNodePtr {
self.arena[Idx::from_raw(RawIdx::from_u32(0))].clone()
self.arena[Idx::from_raw(RawIdx::from_u32(0))]
}
pub fn ast_id<N: AstIdNode>(&self, item: &N) -> FileAstId<N> {
@ -213,11 +213,11 @@ impl AstIdMap {
}
pub fn get<N: AstIdNode>(&self, id: FileAstId<N>) -> AstPtr<N> {
AstPtr::try_from_raw(self.arena[id.raw].clone()).unwrap()
AstPtr::try_from_raw(self.arena[id.raw]).unwrap()
}
pub fn get_erased(&self, id: ErasedFileAstId) -> SyntaxNodePtr {
self.arena[id].clone()
self.arena[id]
}
fn erased_ast_id(&self, item: &SyntaxNode) -> ErasedFileAstId {
@ -239,9 +239,7 @@ impl AstIdMap {
}
fn hash_ptr(ptr: &SyntaxNodePtr) -> u64 {
let mut hasher = BuildHasherDefault::<FxHasher>::default().build_hasher();
ptr.hash(&mut hasher);
hasher.finish()
BuildHasherDefault::<FxHasher>::default().hash_one(ptr)
}
#[derive(Copy, Clone, PartialEq, Eq)]

View file

@ -31,7 +31,7 @@ impl ops::Deref for RawAttrs {
fn deref(&self) -> &[Attr] {
match &self.entries {
Some(it) => &*it,
Some(it) => it,
None => &[],
}
}
@ -79,7 +79,7 @@ impl RawAttrs {
Self {
entries: Some(Arc::from_iter(a.iter().cloned().chain(b.iter().map(|it| {
let mut it = it.clone();
it.id.id = it.id.ast_index() as u32 + last_ast_index
it.id.id = (it.id.ast_index() as u32 + last_ast_index)
| (it.id.cfg_attr_index().unwrap_or(0) as u32)
<< AttrId::AST_INDEX_BITS;
it

View file

@ -425,7 +425,7 @@ fn clone_expand(span: Span, tt: &ast::Adt, tm: SpanMapRef<'_>) -> ExpandResult<t
let name = &adt.name;
let patterns = adt.shape.as_pattern(span, name);
let exprs = adt.shape.as_pattern_map(name, |it| quote! {span => #it .clone() }, span);
let arms = patterns.into_iter().zip(exprs.into_iter()).map(|(pat, expr)| {
let arms = patterns.into_iter().zip(exprs).map(|(pat, expr)| {
let fat_arrow = fat_arrow(span);
quote! {span =>
#pat #fat_arrow #expr,

View file

@ -125,7 +125,7 @@ fn mk_pound(span: Span) -> tt::Subtree {
vec![crate::tt::Leaf::Punct(crate::tt::Punct {
char: '#',
spacing: crate::tt::Spacing::Alone,
span: span,
span,
})
.into()],
span,
@ -279,9 +279,9 @@ fn format_args_expand_general(
let pound = mk_pound(span);
let mut tt = tt.clone();
tt.delimiter.kind = tt::DelimiterKind::Parenthesis;
return ExpandResult::ok(quote! {span =>
ExpandResult::ok(quote! {span =>
builtin #pound format_args #tt
});
})
}
fn asm_expand(
@ -624,7 +624,7 @@ fn relative_file(
fn parse_string(tt: &tt::Subtree) -> Result<String, ExpandError> {
tt.token_trees
.get(0)
.first()
.and_then(|tt| match tt {
tt::TokenTree::Leaf(tt::Leaf::Literal(it)) => unquote_str(it),
_ => None,

View file

@ -633,8 +633,8 @@ fn decl_macro_expander(
map.as_ref(),
map.span_for_range(macro_rules.macro_rules_token().unwrap().text_range()),
);
let mac = mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021, new_meta_vars);
mac
mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021, new_meta_vars)
}
None => mbe::DeclarativeMacro::from_err(
mbe::ParseError::Expected("expected a token tree".into()),
@ -651,8 +651,8 @@ fn decl_macro_expander(
map.as_ref(),
map.span_for_range(macro_def.macro_token().unwrap().text_range()),
);
let mac = mbe::DeclarativeMacro::parse_macro2(&tt, is_2021, new_meta_vars);
mac
mbe::DeclarativeMacro::parse_macro2(&tt, is_2021, new_meta_vars)
}
None => mbe::DeclarativeMacro::from_err(
mbe::ParseError::Expected("expected a token tree".into()),
@ -722,7 +722,7 @@ fn macro_expand(
db.decl_macro_expander(loc.def.krate, id).expand(db, arg.clone(), macro_call_id)
}
MacroDefKind::BuiltIn(it, _) => {
it.expand(db, macro_call_id, &arg).map_err(Into::into)
it.expand(db, macro_call_id, arg).map_err(Into::into)
}
// This might look a bit odd, but we do not expand the inputs to eager macros here.
// Eager macros inputs are expanded, well, eagerly when we collect the macro calls.
@ -746,10 +746,10 @@ fn macro_expand(
};
}
MacroDefKind::BuiltInEager(it, _) => {
it.expand(db, macro_call_id, &arg).map_err(Into::into)
it.expand(db, macro_call_id, arg).map_err(Into::into)
}
MacroDefKind::BuiltInAttr(it, _) => {
let mut res = it.expand(db, macro_call_id, &arg);
let mut res = it.expand(db, macro_call_id, arg);
fixup::reverse_fixups(&mut res.value, &undo_info);
res
}

View file

@ -68,7 +68,7 @@ pub(crate) fn fixup_syntax(
let node_range = node.text_range();
if can_handle_error(&node) && has_error_to_handle(&node) {
remove.insert(node.clone().into());
remove.insert(node.clone());
// the node contains an error node, we have to completely replace it by something valid
let original_tree = mbe::syntax_node_to_token_tree(&node, span_map, call_site);
let idx = original.len() as u32;

View file

@ -77,7 +77,7 @@ macro_rules! impl_intern_lookup {
impl $crate::Intern for $loc {
type Database<'db> = dyn $db + 'db;
type ID = $id;
fn intern<'db>(self, db: &Self::Database<'db>) -> $id {
fn intern(self, db: &Self::Database<'_>) -> $id {
db.$intern(self)
}
}
@ -85,7 +85,7 @@ macro_rules! impl_intern_lookup {
impl $crate::Lookup for $id {
type Database<'db> = dyn $db + 'db;
type Data = $loc;
fn lookup<'db>(&self, db: &Self::Database<'db>) -> $loc {
fn lookup(&self, db: &Self::Database<'_>) -> $loc {
db.$lookup(*self)
}
}
@ -96,13 +96,13 @@ macro_rules! impl_intern_lookup {
pub trait Intern {
type Database<'db>: ?Sized;
type ID;
fn intern<'db>(self, db: &Self::Database<'db>) -> Self::ID;
fn intern(self, db: &Self::Database<'_>) -> Self::ID;
}
pub trait Lookup {
type Database<'db>: ?Sized;
type Data;
fn lookup<'db>(&self, db: &Self::Database<'db>) -> Self::Data;
fn lookup(&self, db: &Self::Database<'_>) -> Self::Data;
}
impl_intern_lookup!(
@ -425,7 +425,7 @@ impl MacroDefId {
pub fn ast_id(&self) -> Either<AstId<ast::Macro>, AstId<ast::Fn>> {
match self.kind {
MacroDefKind::ProcMacro(.., id) => return Either::Right(id),
MacroDefKind::ProcMacro(.., id) => Either::Right(id),
MacroDefKind::Declarative(id)
| MacroDefKind::BuiltIn(_, id)
| MacroDefKind::BuiltInAttr(_, id)
@ -657,10 +657,10 @@ impl ExpansionInfo {
}
/// Maps the passed in file range down into a macro expansion if it is the input to a macro call.
pub fn map_range_down<'a>(
&'a self,
pub fn map_range_down(
&self,
span: Span,
) -> Option<InMacroFile<impl Iterator<Item = SyntaxToken> + 'a>> {
) -> Option<InMacroFile<impl Iterator<Item = SyntaxToken> + '_>> {
let tokens = self
.exp_map
.ranges_with_span(span)

View file

@ -301,7 +301,7 @@ pub fn resolve_crate_root(db: &dyn ExpandDatabase, mut ctxt: SyntaxContextId) ->
result_mark = Some(mark);
}
result_mark.flatten().map(|call| db.lookup_intern_macro_call(call.into()).def.krate)
result_mark.flatten().map(|call| db.lookup_intern_macro_call(call).def.krate)
}
pub use crate::name as __name;

View file

@ -18,13 +18,13 @@ pub(crate) const fn dollar_crate(span: Span) -> tt::Ident<Span> {
#[macro_export]
macro_rules! __quote {
($span:ident) => {
Vec::<crate::tt::TokenTree>::new()
Vec::<$crate::tt::TokenTree>::new()
};
( @SUBTREE($span:ident) $delim:ident $($tt:tt)* ) => {
{
let children = $crate::__quote!($span $($tt)*);
crate::tt::Subtree {
$crate::tt::Subtree {
delimiter: crate::tt::Delimiter {
kind: crate::tt::DelimiterKind::$delim,
open: $span,

View file

@ -230,7 +230,7 @@ impl chalk_solve::RustIrDatabase<Interner> for ChalkContext<'_> {
well_known_trait: rust_ir::WellKnownTrait,
) -> Option<chalk_ir::TraitId<Interner>> {
let lang_attr = lang_item_from_well_known_trait(well_known_trait);
let trait_ = match self.db.lang_item(self.krate, lang_attr.into()) {
let trait_ = match self.db.lang_item(self.krate, lang_attr) {
Some(LangItemTarget::Trait(trait_)) => trait_,
_ => return None,
};

View file

@ -218,7 +218,7 @@ impl TyExt for Ty {
// invariant ensured by `TyLoweringContext::lower_dyn_trait()`.
// FIXME: dyn types may not have principal trait and we don't want to return auto trait
// here.
TyKind::Dyn(dyn_ty) => dyn_ty.bounds.skip_binders().interned().get(0).and_then(|b| {
TyKind::Dyn(dyn_ty) => dyn_ty.bounds.skip_binders().interned().first().and_then(|b| {
match b.skip_binders() {
WhereClause::Implemented(trait_ref) => Some(trait_ref),
_ => None,
@ -427,7 +427,7 @@ pub trait DynTyExt {
impl DynTyExt for DynTy {
fn principal(&self) -> Option<&TraitRef> {
self.bounds.skip_binders().interned().get(0).and_then(|b| match b.skip_binders() {
self.bounds.skip_binders().interned().first().and_then(|b| match b.skip_binders() {
crate::WhereClause::Implemented(trait_ref) => Some(trait_ref),
_ => None,
})

View file

@ -173,7 +173,7 @@ pub fn try_const_usize(db: &dyn HirDatabase, c: &Const) -> Option<u128> {
chalk_ir::ConstValue::InferenceVar(_) => None,
chalk_ir::ConstValue::Placeholder(_) => None,
chalk_ir::ConstValue::Concrete(c) => match &c.interned {
ConstScalar::Bytes(it, _) => Some(u128::from_le_bytes(pad16(&it, false))),
ConstScalar::Bytes(it, _) => Some(u128::from_le_bytes(pad16(it, false))),
ConstScalar::UnevaluatedConst(c, subst) => {
let ec = db.const_eval(*c, subst.clone(), None).ok()?;
try_const_usize(db, &ec)
@ -298,7 +298,7 @@ pub(crate) fn eval_to_const(
body[expr].walk_child_exprs(|idx| r |= has_closure(body, idx));
r
}
if has_closure(&ctx.body, expr) {
if has_closure(ctx.body, expr) {
// Type checking clousres need an isolated body (See the above FIXME). Bail out early to prevent panic.
return unknown_const(infer[expr].clone());
}
@ -308,7 +308,7 @@ pub(crate) fn eval_to_const(
return c;
}
}
if let Ok(mir_body) = lower_to_mir(ctx.db, ctx.owner, &ctx.body, &infer, expr) {
if let Ok(mir_body) = lower_to_mir(ctx.db, ctx.owner, ctx.body, &infer, expr) {
if let Ok(result) = interpret_mir(db, Arc::new(mir_body), true, None).0 {
return result;
}

View file

@ -387,7 +387,7 @@ impl<'a> DeclValidator<'a> {
for (id, replacement) in pats_replacements {
if let Ok(source_ptr) = source_map.pat_syntax(id) {
if let Some(ptr) = source_ptr.value.clone().cast::<ast::IdentPat>() {
if let Some(ptr) = source_ptr.value.cast::<ast::IdentPat>() {
let root = source_ptr.file_syntax(self.db.upcast());
let ident_pat = ptr.to_node(&root);
let parent = match ident_pat.syntax().parent() {

View file

@ -141,8 +141,8 @@ impl ExprValidator {
);
}
}
_ => return,
};
_ => (),
}
}
fn validate_match(

View file

@ -409,7 +409,7 @@ impl<'p> Matrix<'p> {
/// Number of columns of this matrix. `None` is the matrix is empty.
pub(super) fn _column_count(&self) -> Option<usize> {
self.patterns.get(0).map(|r| r.len())
self.patterns.first().map(|r| r.len())
}
/// Pushes a new row to the matrix. If the row starts with an or-pattern, this recursively

View file

@ -427,7 +427,7 @@ impl HirDisplay for Const {
Ok(())
}
ConstValue::Concrete(c) => match &c.interned {
ConstScalar::Bytes(b, m) => render_const_scalar(f, &b, m, &data.ty),
ConstScalar::Bytes(b, m) => render_const_scalar(f, b, m, &data.ty),
ConstScalar::UnevaluatedConst(c, parameters) => {
write!(f, "{}", c.name(f.db.upcast()))?;
hir_fmt_generics(f, parameters, c.generic_def(f.db.upcast()))?;
@ -451,7 +451,7 @@ fn render_const_scalar(
TraitEnvironment::empty(*f.db.crate_graph().crates_in_topological_order().last().unwrap());
match ty.kind(Interner) {
TyKind::Scalar(s) => match s {
Scalar::Bool => write!(f, "{}", if b[0] == 0 { false } else { true }),
Scalar::Bool => write!(f, "{}", b[0] != 0),
Scalar::Char => {
let it = u128::from_le_bytes(pad16(b, false)) as u32;
let Ok(c) = char::try_from(it) else {
@ -485,7 +485,7 @@ fn render_const_scalar(
let Some(bytes) = memory_map.get(addr, size) else {
return f.write_str("<ref-data-not-available>");
};
let s = std::str::from_utf8(&bytes).unwrap_or("<utf8-error>");
let s = std::str::from_utf8(bytes).unwrap_or("<utf8-error>");
write!(f, "{s:?}")
}
TyKind::Slice(ty) => {
@ -507,7 +507,7 @@ fn render_const_scalar(
f.write_str(", ")?;
}
let offset = size_one * i;
render_const_scalar(f, &bytes[offset..offset + size_one], memory_map, &ty)?;
render_const_scalar(f, &bytes[offset..offset + size_one], memory_map, ty)?;
}
f.write_str("]")
}
@ -533,9 +533,7 @@ fn render_const_scalar(
write!(f, "&{}", data.name.display(f.db.upcast()))?;
Ok(())
}
_ => {
return f.write_str("<unsized-enum-or-union>");
}
_ => f.write_str("<unsized-enum-or-union>"),
},
_ => {
let addr = usize::from_le_bytes(match b.try_into() {
@ -579,7 +577,7 @@ fn render_const_scalar(
continue;
};
let size = layout.size.bytes_usize();
render_const_scalar(f, &b[offset..offset + size], memory_map, &ty)?;
render_const_scalar(f, &b[offset..offset + size], memory_map, ty)?;
}
f.write_str(")")
}
@ -620,7 +618,7 @@ fn render_const_scalar(
f,
&field_types,
f.db.trait_environment(adt.0.into()),
&var_layout,
var_layout,
subst,
b,
memory_map,
@ -651,7 +649,7 @@ fn render_const_scalar(
f.write_str(", ")?;
}
let offset = size_one * i;
render_const_scalar(f, &b[offset..offset + size_one], memory_map, &ty)?;
render_const_scalar(f, &b[offset..offset + size_one], memory_map, ty)?;
}
f.write_str("]")
}
@ -718,7 +716,7 @@ fn render_variant_after_name(
}
write!(f, ")")?;
}
return Ok(());
Ok(())
}
VariantData::Unit => Ok(()),
}
@ -864,7 +862,7 @@ impl HirDisplay for Ty {
write!(f, ",)")?;
} else {
write!(f, "(")?;
f.write_joined(&*substs.as_slice(Interner), ", ")?;
f.write_joined(substs.as_slice(Interner), ", ")?;
write!(f, ")")?;
}
}
@ -1210,8 +1208,7 @@ impl HirDisplay for Ty {
let subst = subst.as_slice(Interner);
let a: Option<SmallVec<[&Ty; 3]>> = subst
.get(subst.len() - 3..)
.map(|args| args.iter().map(|arg| arg.ty(Interner)).collect())
.flatten();
.and_then(|args| args.iter().map(|arg| arg.ty(Interner)).collect());
if let Some([resume_ty, yield_ty, ret_ty]) = a.as_deref() {
write!(f, "|")?;
@ -1422,7 +1419,7 @@ fn write_bounds_like_dyn_trait(
f.start_location_link(trait_.into());
write!(f, "{}", f.db.trait_data(trait_).name.display(f.db.upcast()))?;
f.end_location_link();
if let [_, params @ ..] = &*trait_ref.substitution.as_slice(Interner) {
if let [_, params @ ..] = trait_ref.substitution.as_slice(Interner) {
if is_fn_trait {
if let Some(args) =
params.first().and_then(|it| it.assert_ty_ref(Interner).as_tuple())

View file

@ -88,7 +88,7 @@ pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<Infer
DefWithBodyId::StaticId(s) => ctx.collect_static(&db.static_data(s)),
DefWithBodyId::VariantId(v) => {
ctx.return_ty = TyBuilder::builtin(
match db.enum_data(v.lookup(db.upcast()).parent.into()).variant_body_type() {
match db.enum_data(v.lookup(db.upcast()).parent).variant_body_type() {
hir_def::layout::IntegerType::Pointer(signed) => match signed {
true => BuiltinType::Int(BuiltinInt::Isize),
false => BuiltinType::Uint(BuiltinUint::Usize),
@ -156,8 +156,9 @@ pub(crate) fn normalize(db: &dyn HirDatabase, trait_env: Arc<TraitEnvironment>,
/// Binding modes inferred for patterns.
/// <https://doc.rust-lang.org/reference/patterns.html#binding-modes>
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
#[derive(Copy, Clone, Debug, Eq, PartialEq, Default)]
pub enum BindingMode {
#[default]
Move,
Ref(Mutability),
}
@ -172,12 +173,6 @@ impl BindingMode {
}
}
impl Default for BindingMode {
fn default() -> Self {
BindingMode::Move
}
}
#[derive(Debug)]
pub(crate) struct InferOk<T> {
value: T,
@ -572,10 +567,10 @@ enum BreakableKind {
Border,
}
fn find_breakable<'c>(
ctxs: &'c mut [BreakableContext],
fn find_breakable(
ctxs: &mut [BreakableContext],
label: Option<LabelId>,
) -> Option<&'c mut BreakableContext> {
) -> Option<&mut BreakableContext> {
let mut ctxs = ctxs
.iter_mut()
.rev()
@ -586,10 +581,10 @@ fn find_breakable<'c>(
}
}
fn find_continuable<'c>(
ctxs: &'c mut [BreakableContext],
fn find_continuable(
ctxs: &mut [BreakableContext],
label: Option<LabelId>,
) -> Option<&'c mut BreakableContext> {
) -> Option<&mut BreakableContext> {
match label {
Some(_) => find_breakable(ctxs, label).filter(|it| matches!(it.kind, BreakableKind::Loop)),
None => find_breakable(ctxs, label),
@ -825,8 +820,8 @@ impl<'a> InferenceContext<'a> {
ImplTraitId::ReturnTypeImplTrait(_, idx) => idx,
_ => unreachable!(),
};
let bounds = (*rpits)
.map_ref(|rpits| rpits.impl_traits[idx].bounds.map_ref(|it| it.into_iter()));
let bounds =
(*rpits).map_ref(|rpits| rpits.impl_traits[idx].bounds.map_ref(|it| it.iter()));
let var = self.table.new_type_var();
let var_subst = Substitution::from1(Interner, var.clone());
for bound in bounds {

View file

@ -31,7 +31,6 @@ impl CastCheck {
// Note that this type of cast is actually split into a coercion to a
// pointer type and a cast:
// &[T; N] -> *[T; N] -> *T
return;
}
// FIXME: Check other kinds of non-coercion casts and report error if any?

View file

@ -815,8 +815,7 @@ impl InferenceContext<'_> {
.iter()
.cloned()
.chain((0..cnt).map(|_| ProjectionElem::Deref))
.collect::<Vec<_>>()
.into();
.collect::<Vec<_>>();
match &self.body[pat] {
Pat::Missing | Pat::Wild => (),
Pat::Tuple { args, ellipsis } => {
@ -858,7 +857,7 @@ impl InferenceContext<'_> {
};
let mut p = place.clone();
p.projections.push(ProjectionElem::Field(Either::Left(FieldId {
parent: variant.into(),
parent: variant,
local_id,
})));
self.consume_with_pat(p, arg);
@ -902,7 +901,7 @@ impl InferenceContext<'_> {
for (arg, (i, _)) in it {
let mut p = place.clone();
p.projections.push(ProjectionElem::Field(Either::Left(FieldId {
parent: variant.into(),
parent: variant,
local_id: i,
})));
self.consume_with_pat(p, *arg);

View file

@ -276,7 +276,7 @@ impl InferenceContext<'_> {
// Now go through the argument patterns
for (arg_pat, arg_ty) in args.iter().zip(&sig_tys) {
self.infer_top_pat(*arg_pat, &arg_ty);
self.infer_top_pat(*arg_pat, arg_ty);
}
// FIXME: lift these out into a struct
@ -1081,8 +1081,7 @@ impl InferenceContext<'_> {
let inner_exp = expected
.to_option(table)
.as_ref()
.map(|e| e.as_adt())
.flatten()
.and_then(|e| e.as_adt())
.filter(|(e_adt, _)| e_adt == &box_id)
.map(|(_, subts)| {
let g = subts.at(Interner, 0);
@ -1320,7 +1319,7 @@ impl InferenceContext<'_> {
.unwrap_or_else(|| this.table.new_type_var());
let ty = if let Some(expr) = initializer {
let ty = if contains_explicit_ref_binding(&this.body, *pat) {
let ty = if contains_explicit_ref_binding(this.body, *pat) {
this.infer_expr(*expr, &Expectation::has_type(decl_ty.clone()))
} else {
this.infer_expr_coerce(
@ -1716,7 +1715,7 @@ impl InferenceContext<'_> {
// that we have more information about the types of arguments when we
// type-check the functions. This isn't really the right way to do this.
for check_closures in [false, true] {
let mut skip_indices = skip_indices.into_iter().copied().fuse().peekable();
let mut skip_indices = skip_indices.iter().copied().fuse().peekable();
let param_iter = param_tys.iter().cloned().chain(repeat(self.err_ty()));
let expected_iter = expected_inputs
.iter()

View file

@ -93,7 +93,7 @@ impl InferenceContext<'_> {
ty.as_adt().map(|(_, s)| s.clone()).unwrap_or_else(|| Substitution::empty(Interner));
match def {
_ if subs.len() == 0 => {}
_ if subs.is_empty() => {}
Some(def) => {
let field_types = self.db.field_types(def);
let variant_data = def.variant_data(self.db.upcast());
@ -223,13 +223,13 @@ impl InferenceContext<'_> {
) -> Ty {
let expected = self.resolve_ty_shallow(expected);
let expectations = match expected.as_tuple() {
Some(parameters) => &*parameters.as_slice(Interner),
Some(parameters) => parameters.as_slice(Interner),
_ => &[],
};
let ((pre, post), n_uncovered_patterns) = match ellipsis {
Some(idx) => (subs.split_at(idx), expectations.len().saturating_sub(subs.len())),
None => ((&subs[..], &[][..]), 0),
None => ((subs, &[][..]), 0),
};
let mut expectations_iter = expectations
.iter()
@ -423,7 +423,7 @@ impl InferenceContext<'_> {
self.result.binding_modes.insert(pat, mode);
let inner_ty = match subpat {
Some(subpat) => self.infer_pat(subpat, &expected, default_bm),
Some(subpat) => self.infer_pat(subpat, expected, default_bm),
None => expected.clone(),
};
let inner_ty = self.insert_type_vars_shallow(inner_ty);
@ -436,7 +436,7 @@ impl InferenceContext<'_> {
};
self.write_pat_ty(pat, inner_ty.clone());
self.write_binding_ty(binding, bound_ty);
return inner_ty;
inner_ty
}
fn infer_slice_pat(

View file

@ -389,7 +389,7 @@ impl InferenceContext<'_> {
name: &Name,
id: ExprOrPatId,
) -> Option<(ValueNs, Substitution)> {
let ty = self.resolve_ty_shallow(&ty);
let ty = self.resolve_ty_shallow(ty);
let (enum_id, subst) = match ty.as_adt() {
Some((AdtId::EnumId(e), subst)) => (e, subst),
_ => return None,

View file

@ -250,9 +250,7 @@ impl<'a> InferenceTable<'a> {
// and registering an obligation. But it needs chalk support, so we handle the most basic
// case (a non associated const without generic parameters) manually.
if subst.len(Interner) == 0 {
if let Ok(eval) =
self.db.const_eval((*c_id).into(), subst.clone(), None)
{
if let Ok(eval) = self.db.const_eval(*c_id, subst.clone(), None) {
eval
} else {
unknown_const(c.data(Interner).ty.clone())
@ -490,9 +488,8 @@ impl<'a> InferenceTable<'a> {
pub(crate) fn try_obligation(&mut self, goal: Goal) -> Option<Solution> {
let in_env = InEnvironment::new(&self.trait_env.env, goal);
let canonicalized = self.canonicalize(in_env);
let solution =
self.db.trait_solve(self.trait_env.krate, self.trait_env.block, canonicalized.value);
solution
self.db.trait_solve(self.trait_env.krate, self.trait_env.block, canonicalized.value)
}
pub(crate) fn register_obligation(&mut self, goal: Goal) {

View file

@ -387,7 +387,7 @@ impl chalk_ir::interner::HasInterner for Interner {
macro_rules! has_interner {
($t:ty) => {
impl HasInterner for $t {
type Interner = crate::Interner;
type Interner = $crate::Interner;
}
};
}

View file

@ -202,7 +202,7 @@ pub fn layout_of_ty_query(
return Err(LayoutError::TargetLayoutNotAvailable);
};
let cx = LayoutCx { target: &target };
let dl = &*cx.current_data_layout();
let dl = cx.current_data_layout();
let ty = normalize(db, trait_env.clone(), ty);
let result = match ty.kind(Interner) {
TyKind::Adt(AdtId(def), subst) => {
@ -278,7 +278,7 @@ pub fn layout_of_ty_query(
cx.univariant(dl, &fields, &ReprOptions::default(), kind).ok_or(LayoutError::Unknown)?
}
TyKind::Array(element, count) => {
let count = try_const_usize(db, &count).ok_or(LayoutError::HasErrorConst)? as u64;
let count = try_const_usize(db, count).ok_or(LayoutError::HasErrorConst)? as u64;
let element = db.layout_of_ty(element.clone(), trait_env)?;
let size = element.size.checked_mul(count, dl).ok_or(LayoutError::SizeOverflow)?;

View file

@ -80,7 +80,7 @@ pub fn layout_of_adt_query(
matches!(def, AdtId::EnumId(..)),
is_unsafe_cell(db, def),
layout_scalar_valid_range(db, def),
|min, max| repr_discr(&dl, &repr, min, max).unwrap_or((Integer::I8, false)),
|min, max| repr_discr(dl, &repr, min, max).unwrap_or((Integer::I8, false)),
variants.iter_enumerated().filter_map(|(id, _)| {
let AdtId::EnumId(e) = def else { return None };
let d = db.const_eval_discriminant(db.enum_data(e).variants[id.0].0).ok()?;

View file

@ -12,7 +12,7 @@ pub fn target_data_layout_query(
) -> Option<Arc<TargetDataLayout>> {
let crate_graph = db.crate_graph();
let target_layout = crate_graph[krate].target_layout.as_ref().ok()?;
let res = TargetDataLayout::parse_from_llvm_datalayout_string(&target_layout);
let res = TargetDataLayout::parse_from_llvm_datalayout_string(target_layout);
if let Err(_e) = &res {
// FIXME: Print the error here once it implements debug/display
// also logging here is somewhat wrong, but unfortunately this is the earliest place we can

View file

@ -366,11 +366,11 @@ fn return_position_impl_trait() {
}
let waker = Arc::new(EmptyWaker).into();
let mut context = Context::from_waker(&waker);
let x = pinned.poll(&mut context);
x
pinned.poll(&mut context)
}
let x = unwrap_fut(f());
x
unwrap_fut(f())
}
size_and_align_expr! {
struct Foo<T>(T, T, (T, T));

View file

@ -36,7 +36,7 @@ fn ref_simple() {
let mut y: i32 = 5;
]
|x: i32| {
y = y + x;
y += x;
y
}
}
@ -66,7 +66,7 @@ fn ref_simple() {
let x: &mut X = &mut X(2, 6);
]
|| {
(*x).0 as i64 + x.1
x.0 as i64 + x.1
}
}
}
@ -188,9 +188,7 @@ fn match_pattern() {
struct X(i64, i32, (u8, i128));
let _y: X = X(2, 5, (7, 3));
move |x: i64| {
match _y {
_ => x,
}
x
}
}
size_and_align_expr! {
@ -264,8 +262,8 @@ fn regression_15623() {
let c = 5;
move || {
let 0 = a else { return b; };
let y = c;
y
c
}
}
}

View file

@ -228,7 +228,7 @@ impl MemoryMap {
let mut transform = |(addr, val): (&usize, &Box<[u8]>)| {
let addr = *addr;
let align = if addr == 0 { 64 } else { (addr - (addr & (addr - 1))).min(64) };
f(val, align).and_then(|it| Ok((addr, it)))
f(val, align).map(|it| (addr, it))
};
match self {
MemoryMap::Empty => Ok(Default::default()),

View file

@ -1192,11 +1192,7 @@ impl<'a> TyLoweringContext<'a> {
return None;
}
if bounds.first().and_then(|b| b.trait_id()).is_none() {
// When there's no trait bound, that's an error. This happens when the trait refs
// are unresolved.
return None;
}
bounds.first().and_then(|b| b.trait_id())?;
// As multiple occurrences of the same auto traits *are* permitted, we deduplicate the
// bounds. We shouldn't have repeated elements besides auto traits at this point.
@ -1241,7 +1237,7 @@ impl<'a> TyLoweringContext<'a> {
});
crate::wrap_empty_binders(clause)
});
predicates.extend(sized_clause.into_iter());
predicates.extend(sized_clause);
predicates.shrink_to_fit();
}
predicates

View file

@ -86,7 +86,7 @@ impl TyFingerprint {
TyKind::Dyn(_) => ty.dyn_trait().map(TyFingerprint::Dyn)?,
TyKind::Ref(_, _, ty) => return TyFingerprint::for_trait_impl(ty),
TyKind::Tuple(_, subst) => {
let first_ty = subst.interned().get(0).map(|arg| arg.assert_ty_ref(Interner));
let first_ty = subst.interned().first().map(|arg| arg.assert_ty_ref(Interner));
match first_ty {
Some(ty) => return TyFingerprint::for_trait_impl(ty),
None => TyFingerprint::Unit,
@ -541,7 +541,7 @@ impl ReceiverAdjustments {
if let TyKind::Ref(m, l, inner) = ty.kind(Interner) {
if let TyKind::Array(inner, _) = inner.kind(Interner) {
break 'it TyKind::Ref(
m.clone(),
*m,
l.clone(),
TyKind::Slice(inner.clone()).intern(Interner),
)
@ -953,7 +953,7 @@ pub fn iterate_method_candidates_dyn(
let ty = table.instantiate_canonical(ty.clone());
let deref_chain = autoderef_method_receiver(&mut table, ty);
let result = deref_chain.into_iter().try_for_each(|(receiver_ty, adj)| {
deref_chain.into_iter().try_for_each(|(receiver_ty, adj)| {
iterate_method_candidates_with_autoref(
&receiver_ty,
adj,
@ -964,8 +964,7 @@ pub fn iterate_method_candidates_dyn(
name,
callback,
)
});
result
})
}
LookupMode::Path => {
// No autoderef for path lookups

View file

@ -159,7 +159,7 @@ impl<V, T> ProjectionElem<V, T> {
}
_ => {
never!("Overloaded deref on type {} is not a projection", base.display(db));
return TyKind::Error.intern(Interner);
TyKind::Error.intern(Interner)
}
},
ProjectionElem::Field(Either::Left(f)) => match &base.kind(Interner) {
@ -168,7 +168,7 @@ impl<V, T> ProjectionElem<V, T> {
}
ty => {
never!("Only adt has field, found {:?}", ty);
return TyKind::Error.intern(Interner);
TyKind::Error.intern(Interner)
}
},
ProjectionElem::Field(Either::Right(f)) => match &base.kind(Interner) {
@ -183,14 +183,14 @@ impl<V, T> ProjectionElem<V, T> {
}),
_ => {
never!("Only tuple has tuple field");
return TyKind::Error.intern(Interner);
TyKind::Error.intern(Interner)
}
},
ProjectionElem::ClosureField(f) => match &base.kind(Interner) {
TyKind::Closure(id, subst) => closure_field(*id, subst, *f),
_ => {
never!("Only closure has closure field");
return TyKind::Error.intern(Interner);
TyKind::Error.intern(Interner)
}
},
ProjectionElem::ConstantIndex { .. } | ProjectionElem::Index(_) => {
@ -198,7 +198,7 @@ impl<V, T> ProjectionElem<V, T> {
TyKind::Array(inner, _) | TyKind::Slice(inner) => inner.clone(),
_ => {
never!("Overloaded index is not a projection");
return TyKind::Error.intern(Interner);
TyKind::Error.intern(Interner)
}
}
}
@ -217,12 +217,12 @@ impl<V, T> ProjectionElem<V, T> {
TyKind::Slice(_) => base.clone(),
_ => {
never!("Subslice projection should only happen on slice and array");
return TyKind::Error.intern(Interner);
TyKind::Error.intern(Interner)
}
},
ProjectionElem::OpaqueCast(_) => {
never!("We don't emit these yet");
return TyKind::Error.intern(Interner);
TyKind::Error.intern(Interner)
}
}
}
@ -299,7 +299,7 @@ pub struct Place {
impl Place {
fn is_parent(&self, child: &Place, store: &ProjectionStore) -> bool {
self.local == child.local
&& child.projection.lookup(store).starts_with(&self.projection.lookup(store))
&& child.projection.lookup(store).starts_with(self.projection.lookup(store))
}
/// The place itself is not included

View file

@ -53,7 +53,7 @@ fn all_mir_bodies(
match db.mir_body_for_closure(c) {
Ok(body) => {
cb(body.clone());
body.closures.iter().map(|&it| for_closure(db, it, cb)).collect()
body.closures.iter().try_for_each(|&it| for_closure(db, it, cb))
}
Err(e) => Err(e),
}
@ -61,7 +61,7 @@ fn all_mir_bodies(
match db.mir_body(def) {
Ok(body) => {
cb(body.clone());
body.closures.iter().map(|&it| for_closure(db, it, &mut cb)).collect()
body.closures.iter().try_for_each(|&it| for_closure(db, it, &mut cb))
}
Err(e) => Err(e),
}
@ -257,7 +257,7 @@ fn ever_initialized_map(
for statement in &block.statements {
match &statement.kind {
StatementKind::Assign(p, _) => {
if p.projection.lookup(&body.projection_store).len() == 0 && p.local == l {
if p.projection.lookup(&body.projection_store).is_empty() && p.local == l {
is_ever_initialized = true;
}
}
@ -295,21 +295,15 @@ fn ever_initialized_map(
| TerminatorKind::Return
| TerminatorKind::Unreachable => (),
TerminatorKind::Call { target, cleanup, destination, .. } => {
if destination.projection.lookup(&body.projection_store).len() == 0
if destination.projection.lookup(&body.projection_store).is_empty()
&& destination.local == l
{
is_ever_initialized = true;
}
target
.into_iter()
.chain(cleanup.into_iter())
.for_each(|&it| process(it, is_ever_initialized));
target.iter().chain(cleanup).for_each(|&it| process(it, is_ever_initialized));
}
TerminatorKind::Drop { target, unwind, place: _ } => {
iter::once(target)
.into_iter()
.chain(unwind.into_iter())
.for_each(|&it| process(it, is_ever_initialized));
iter::once(target).chain(unwind).for_each(|&it| process(it, is_ever_initialized));
}
TerminatorKind::DropAndReplace { .. }
| TerminatorKind::Assert { .. }
@ -318,7 +312,6 @@ fn ever_initialized_map(
| TerminatorKind::FalseEdge { .. }
| TerminatorKind::FalseUnwind { .. } => {
never!("We don't emit these MIR terminators yet");
()
}
}
}
@ -452,7 +445,7 @@ fn mutability_of_locals(
for arg in args.iter() {
record_usage_for_operand(arg, &mut result);
}
if destination.projection.lookup(&body.projection_store).len() == 0 {
if destination.projection.lookup(&body.projection_store).is_empty() {
if ever_init_map.get(destination.local).copied().unwrap_or_default() {
push_mut_span(destination.local, MirSpan::Unknown, &mut result);
} else {

View file

@ -291,12 +291,11 @@ impl Address {
}
fn to_usize(&self) -> usize {
let as_num = match self {
match self {
Stack(it) => *it + STACK_OFFSET,
Heap(it) => *it + HEAP_OFFSET,
Invalid(it) => *it,
};
as_num
}
}
fn map(&self, f: impl FnOnce(usize) -> usize) -> Address {
@ -485,8 +484,7 @@ impl DropFlags {
fn remove_place(&mut self, p: &Place, store: &ProjectionStore) -> bool {
// FIXME: replace parents with parts
if let Some(parent) = p.iterate_over_parents(store).find(|it| self.need_drop.contains(&it))
{
if let Some(parent) = p.iterate_over_parents(store).find(|it| self.need_drop.contains(it)) {
self.need_drop.remove(&parent);
return true;
}
@ -551,7 +549,7 @@ pub fn interpret_mir(
memory_map.vtable.shrink_to_fit();
MemoryMap::Complex(Box::new(memory_map))
};
return Ok(intern_const_scalar(ConstScalar::Bytes(bytes, memory_map), ty));
Ok(intern_const_scalar(ConstScalar::Bytes(bytes, memory_map), ty))
})();
(it, MirOutput { stdout: evaluator.stdout, stderr: evaluator.stderr })
}
@ -562,12 +560,12 @@ const EXECUTION_LIMIT: usize = 100_000;
const EXECUTION_LIMIT: usize = 10_000_000;
impl Evaluator<'_> {
pub fn new<'a>(
db: &'a dyn HirDatabase,
pub fn new(
db: &dyn HirDatabase,
owner: DefWithBodyId,
assert_placeholder_ty_is_unused: bool,
trait_env: Option<Arc<TraitEnvironment>>,
) -> Evaluator<'a> {
) -> Evaluator<'_> {
let crate_id = owner.module(db.upcast()).krate();
Evaluator {
stack: vec![0],
@ -585,7 +583,7 @@ impl Evaluator<'_> {
assert_placeholder_ty_is_unused,
stack_depth_limit: 100,
execution_limit: EXECUTION_LIMIT,
memory_limit: 1000_000_000, // 2GB, 1GB for stack and 1GB for heap
memory_limit: 1_000_000_000, // 2GB, 1GB for stack and 1GB for heap
layout_cache: RefCell::new(Default::default()),
projected_ty_cache: RefCell::new(Default::default()),
not_special_fn_cache: RefCell::new(Default::default()),
@ -816,8 +814,8 @@ impl Evaluator<'_> {
})
}
fn interpret_mir<'slf>(
&'slf mut self,
fn interpret_mir(
&mut self,
body: Arc<MirBody>,
args: impl Iterator<Item = IntervalOrOwned>,
) -> Result<Interval> {
@ -837,7 +835,7 @@ impl Evaluator<'_> {
not_supported!("missing stack frame");
};
let e = (|| {
let mut locals = &mut my_stack_frame.locals;
let locals = &mut my_stack_frame.locals;
let body = locals.body.clone();
loop {
let current_block = &body.basic_blocks[current_block_idx];
@ -849,12 +847,10 @@ impl Evaluator<'_> {
for statement in &current_block.statements {
match &statement.kind {
StatementKind::Assign(l, r) => {
let addr = self.place_addr(l, &locals)?;
let result = self.eval_rvalue(r, &mut locals)?;
let addr = self.place_addr(l, locals)?;
let result = self.eval_rvalue(r, locals)?;
self.copy_from_interval_or_owned(addr, result)?;
locals
.drop_flags
.add_place(l.clone(), &locals.body.projection_store);
locals.drop_flags.add_place(*l, &locals.body.projection_store);
}
StatementKind::Deinit(_) => not_supported!("de-init statement"),
StatementKind::StorageLive(_)
@ -878,20 +874,20 @@ impl Evaluator<'_> {
cleanup: _,
from_hir_call: _,
} => {
let destination_interval = self.place_interval(destination, &locals)?;
let fn_ty = self.operand_ty(func, &locals)?;
let destination_interval = self.place_interval(destination, locals)?;
let fn_ty = self.operand_ty(func, locals)?;
let args = args
.iter()
.map(|it| self.operand_ty_and_eval(it, &mut locals))
.map(|it| self.operand_ty_and_eval(it, locals))
.collect::<Result<Vec<_>>>()?;
let stack_frame = match &fn_ty.kind(Interner) {
TyKind::Function(_) => {
let bytes = self.eval_operand(func, &mut locals)?;
let bytes = self.eval_operand(func, locals)?;
self.exec_fn_pointer(
bytes,
destination_interval,
&args,
&locals,
locals,
*target,
terminator.span,
)?
@ -901,7 +897,7 @@ impl Evaluator<'_> {
generic_args,
destination_interval,
&args,
&locals,
locals,
*target,
terminator.span,
)?,
@ -909,7 +905,7 @@ impl Evaluator<'_> {
};
locals
.drop_flags
.add_place(destination.clone(), &locals.body.projection_store);
.add_place(*destination, &locals.body.projection_store);
if let Some(stack_frame) = stack_frame {
self.code_stack.push(my_stack_frame);
current_block_idx = stack_frame.locals.body.start_block;
@ -924,7 +920,7 @@ impl Evaluator<'_> {
}
TerminatorKind::SwitchInt { discr, targets } => {
let val = u128::from_le_bytes(pad16(
self.eval_operand(discr, &mut locals)?.get(&self)?,
self.eval_operand(discr, locals)?.get(self)?,
false,
));
current_block_idx = targets.target_for_value(val);
@ -938,7 +934,7 @@ impl Evaluator<'_> {
));
}
TerminatorKind::Drop { place, target, unwind: _ } => {
self.drop_place(place, &mut locals, terminator.span)?;
self.drop_place(place, locals, terminator.span)?;
current_block_idx = *target;
}
_ => not_supported!("unknown terminator"),
@ -1081,7 +1077,7 @@ impl Evaluator<'_> {
}
}
Rvalue::UnaryOp(op, val) => {
let mut c = self.eval_operand(val, locals)?.get(&self)?;
let mut c = self.eval_operand(val, locals)?.get(self)?;
let mut ty = self.operand_ty(val, locals)?;
while let TyKind::Ref(_, _, z) = ty.kind(Interner) {
ty = z.clone();
@ -1124,8 +1120,8 @@ impl Evaluator<'_> {
Rvalue::CheckedBinaryOp(op, lhs, rhs) => 'binary_op: {
let lc = self.eval_operand(lhs, locals)?;
let rc = self.eval_operand(rhs, locals)?;
let mut lc = lc.get(&self)?;
let mut rc = rc.get(&self)?;
let mut lc = lc.get(self)?;
let mut rc = rc.get(self)?;
let mut ty = self.operand_ty(lhs, locals)?;
while let TyKind::Ref(_, _, z) = ty.kind(Interner) {
ty = z.clone();
@ -1277,12 +1273,12 @@ impl Evaluator<'_> {
}
Rvalue::Discriminant(p) => {
let ty = self.place_ty(p, locals)?;
let bytes = self.eval_place(p, locals)?.get(&self)?;
let bytes = self.eval_place(p, locals)?.get(self)?;
let result = self.compute_discriminant(ty, bytes)?;
Owned(result.to_le_bytes().to_vec())
}
Rvalue::Repeat(it, len) => {
let len = match try_const_usize(self.db, &len) {
let len = match try_const_usize(self.db, len) {
Some(it) => it as usize,
None => not_supported!("non evaluatable array len in repeat Rvalue"),
};
@ -1308,13 +1304,13 @@ impl Evaluator<'_> {
AggregateKind::Array(_) => {
let mut r = vec![];
for it in values {
let value = it.get(&self)?;
let value = it.get(self)?;
r.extend(value);
}
Owned(r)
}
AggregateKind::Tuple(ty) => {
let layout = self.layout(&ty)?;
let layout = self.layout(ty)?;
Owned(self.make_by_layout(
layout.size.bytes_usize(),
&layout,
@ -1329,7 +1325,7 @@ impl Evaluator<'_> {
.fields
.offset(u32::from(f.local_id.into_raw()) as usize)
.bytes_usize();
let op = values[0].get(&self)?;
let op = values[0].get(self)?;
let mut result = vec![0; layout.size.bytes_usize()];
result[offset..offset + op.len()].copy_from_slice(op);
Owned(result)
@ -1345,7 +1341,7 @@ impl Evaluator<'_> {
)?)
}
AggregateKind::Closure(ty) => {
let layout = self.layout(&ty)?;
let layout = self.layout(ty)?;
Owned(self.make_by_layout(
layout.size.bytes_usize(),
&layout,
@ -1397,7 +1393,7 @@ impl Evaluator<'_> {
},
_ => false,
};
let current = pad16(self.eval_operand(operand, locals)?.get(&self)?, is_signed);
let current = pad16(self.eval_operand(operand, locals)?.get(self)?, is_signed);
let dest_size =
self.size_of_sized(target_ty, locals, "destination of int to int cast")?;
Owned(current[0..dest_size].to_vec())
@ -1513,7 +1509,7 @@ impl Evaluator<'_> {
let mut r = Vec::with_capacity(16);
let addr = addr.get(self)?;
r.extend(addr.iter().copied());
r.extend(len.to_le_bytes().into_iter());
r.extend(len.to_le_bytes());
Owned(r)
}
t => {
@ -1525,7 +1521,7 @@ impl Evaluator<'_> {
let mut r = Vec::with_capacity(16);
let addr = addr.get(self)?;
r.extend(addr.iter().copied());
r.extend(vtable.to_le_bytes().into_iter());
r.extend(vtable.to_le_bytes());
Owned(r)
}
TyKind::Adt(id, target_subst) => match &current_ty.kind(Interner) {
@ -1539,7 +1535,7 @@ impl Evaluator<'_> {
AdtId::EnumId(_) => not_supported!("unsizing enums"),
};
let Some((last_field, _)) =
self.db.struct_data(id).variant_data.fields().iter().rev().next()
self.db.struct_data(id).variant_data.fields().iter().next_back()
else {
not_supported!("unsizing struct without field");
};
@ -1645,7 +1641,7 @@ impl Evaluator<'_> {
}
for (i, op) in values.enumerate() {
let offset = variant_layout.fields.offset(i).bytes_usize();
let op = op.get(&self)?;
let op = op.get(self)?;
match result.get_mut(offset..offset + op.len()) {
Some(it) => it.copy_from_slice(op),
None => return Err(MirEvalError::BrokenLayout(Box::new(variant_layout.clone()))),
@ -1686,7 +1682,7 @@ impl Evaluator<'_> {
}
result_owner = self
.db
.const_eval(const_id.into(), subst, Some(self.trait_env.clone()))
.const_eval(const_id, subst, Some(self.trait_env.clone()))
.map_err(|e| {
let name = const_id.name(self.db.upcast());
MirEvalError::ConstEvalError(name, Box::new(e))
@ -1769,9 +1765,8 @@ impl Evaluator<'_> {
)));
}
};
Ok(mem.get_mut(pos..pos + size).ok_or_else(|| {
MirEvalError::UndefinedBehavior("out of bound memory write".to_string())
})?)
mem.get_mut(pos..pos + size)
.ok_or_else(|| MirEvalError::UndefinedBehavior("out of bound memory write".to_string()))
}
fn write_memory(&mut self, addr: Address, r: &[u8]) -> Result<()> {
@ -1960,14 +1955,14 @@ impl Evaluator<'_> {
if let Some(ty) = check_inner {
for i in 0..count {
let offset = element_size * i;
rec(this, &b[offset..offset + element_size], &ty, locals, mm)?;
rec(this, &b[offset..offset + element_size], ty, locals, mm)?;
}
}
}
}
}
chalk_ir::TyKind::Array(inner, len) => {
let len = match try_const_usize(this.db, &len) {
let len = match try_const_usize(this.db, len) {
Some(it) => it as usize,
None => not_supported!("non evaluatable array len in patching addresses"),
};
@ -2028,7 +2023,7 @@ impl Evaluator<'_> {
Ok(())
}
let mut mm = ComplexMemoryMap::default();
rec(&self, bytes, ty, locals, &mut mm)?;
rec(self, bytes, ty, locals, &mut mm)?;
Ok(mm)
}
@ -2111,7 +2106,7 @@ impl Evaluator<'_> {
}
}
TyKind::Array(inner, len) => {
let len = match try_const_usize(self.db, &len) {
let len = match try_const_usize(self.db, len) {
Some(it) => it as usize,
None => not_supported!("non evaluatable array len in patching addresses"),
};
@ -2161,7 +2156,7 @@ impl Evaluator<'_> {
let next_ty = self.vtable_map.ty(id)?.clone();
match &next_ty.kind(Interner) {
TyKind::FnDef(def, generic_args) => {
self.exec_fn_def(*def, generic_args, destination, args, &locals, target_bb, span)
self.exec_fn_def(*def, generic_args, destination, args, locals, target_bb, span)
}
TyKind::Closure(id, subst) => {
self.exec_closure(*id, bytes.slice(0..0), subst, destination, args, locals, span)
@ -2195,7 +2190,7 @@ impl Evaluator<'_> {
closure_data.get(self)?.to_owned()
};
let arg_bytes = iter::once(Ok(closure_data))
.chain(args.iter().map(|it| Ok(it.get(&self)?.to_owned())))
.chain(args.iter().map(|it| Ok(it.get(self)?.to_owned())))
.collect::<Result<Vec<_>>>()?;
let interval = self
.interpret_mir(mir_body, arg_bytes.into_iter().map(IntervalOrOwned::Owned))
@ -2246,7 +2241,7 @@ impl Evaluator<'_> {
}
CallableDefId::StructId(id) => {
let (size, variant_layout, tag) =
self.layout_of_variant(id.into(), generic_args, &locals)?;
self.layout_of_variant(id.into(), generic_args, locals)?;
let result = self.make_by_layout(
size,
&variant_layout,
@ -2258,7 +2253,7 @@ impl Evaluator<'_> {
}
CallableDefId::EnumVariantId(id) => {
let (size, variant_layout, tag) =
self.layout_of_variant(id.into(), generic_args, &locals)?;
self.layout_of_variant(id.into(), generic_args, locals)?;
let result = self.make_by_layout(
size,
&variant_layout,
@ -2353,7 +2348,7 @@ impl Evaluator<'_> {
}
}),
);
return self.exec_fn_with_args(
self.exec_fn_with_args(
def,
&args_for_target,
generics_for_target,
@ -2361,7 +2356,7 @@ impl Evaluator<'_> {
destination,
target_bb,
span,
);
)
}
MirOrDynIndex::Mir(body) => self.exec_looked_up_function(
body,
@ -2413,7 +2408,7 @@ impl Evaluator<'_> {
target_bb: Option<BasicBlockId>,
span: MirSpan,
) -> Result<Option<StackFrame>> {
let func = args.get(0).ok_or(MirEvalError::TypeError("fn trait with no arg"))?;
let func = args.first().ok_or(MirEvalError::TypeError("fn trait with no arg"))?;
let mut func_ty = func.ty.clone();
let mut func_data = func.interval;
while let TyKind::Ref(_, _, z) = func_ty.kind(Interner) {
@ -2429,25 +2424,10 @@ impl Evaluator<'_> {
}
match &func_ty.kind(Interner) {
TyKind::FnDef(def, subst) => {
return self.exec_fn_def(
*def,
subst,
destination,
&args[1..],
locals,
target_bb,
span,
);
self.exec_fn_def(*def, subst, destination, &args[1..], locals, target_bb, span)
}
TyKind::Function(_) => {
return self.exec_fn_pointer(
func_data,
destination,
&args[1..],
locals,
target_bb,
span,
);
self.exec_fn_pointer(func_data, destination, &args[1..], locals, target_bb, span)
}
TyKind::Closure(closure, subst) => {
return self.exec_closure(
@ -2483,7 +2463,7 @@ impl Evaluator<'_> {
self.write_memory(addr, &result)?;
IntervalAndTy { interval: Interval { addr, size }, ty }
};
return self.exec_fn_with_args(
self.exec_fn_with_args(
def,
&[arg0.clone(), arg1],
generic_args,
@ -2491,7 +2471,7 @@ impl Evaluator<'_> {
destination,
target_bb,
span,
);
)
}
}
}
@ -2511,7 +2491,7 @@ impl Evaluator<'_> {
self.allocate_const_in_heap(locals, &konst)?
} else {
let ty = &self.db.infer(st.into())[self.db.body(st.into()).body_expr];
let Some((size, align)) = self.size_align_of(&ty, locals)? else {
let Some((size, align)) = self.size_align_of(ty, locals)? else {
not_supported!("unsized extern static");
};
let addr = self.heap_allocate(size, align)?;
@ -2669,7 +2649,7 @@ pub fn render_const_using_debug_impl(
db.upcast(),
&hir_def::path::Path::from_known_path_with_no_generic(ModPath::from_segments(
hir_expand::mod_path::PathKind::Abs,
[name![core], name![fmt], name![Debug]].into_iter(),
[name![core], name![fmt], name![Debug]],
)),
) else {
not_supported!("core::fmt::Debug not found");
@ -2701,7 +2681,7 @@ pub fn render_const_using_debug_impl(
db.upcast(),
&hir_def::path::Path::from_known_path_with_no_generic(ModPath::from_segments(
hir_expand::mod_path::PathKind::Abs,
[name![std], name![fmt], name![format]].into_iter(),
[name![std], name![fmt], name![format]],
)),
) else {
not_supported!("std::fmt::format not found");

View file

@ -60,7 +60,7 @@ impl Evaluator<'_> {
args,
generic_args,
destination,
&locals,
locals,
span,
)?;
return Ok(true);
@ -82,7 +82,7 @@ impl Evaluator<'_> {
args,
generic_args,
destination,
&locals,
locals,
span,
)?;
return Ok(true);
@ -100,7 +100,7 @@ impl Evaluator<'_> {
args,
generic_args,
destination,
&locals,
locals,
span,
)?;
return Ok(true);
@ -125,7 +125,7 @@ impl Evaluator<'_> {
}
if let Some(it) = self.detect_lang_function(def) {
let arg_bytes =
args.iter().map(|it| Ok(it.get(&self)?.to_owned())).collect::<Result<Vec<_>>>()?;
args.iter().map(|it| Ok(it.get(self)?.to_owned())).collect::<Result<Vec<_>>>()?;
let result = self.exec_lang_item(it, generic_args, &arg_bytes, locals, span)?;
destination.write_from_bytes(self, &result)?;
return Ok(true);
@ -313,7 +313,7 @@ impl Evaluator<'_> {
&hir_def::path::Path::from_known_path_with_no_generic(
ModPath::from_segments(
hir_expand::mod_path::PathKind::Abs,
[name![std], name![fmt], name![format]].into_iter(),
[name![std], name![fmt], name![format]],
),
),
) else {
@ -347,7 +347,7 @@ impl Evaluator<'_> {
}
DropInPlace => {
let ty =
generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner)).ok_or(
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner)).ok_or(
MirEvalError::TypeError(
"generic argument of drop_in_place is not provided",
),
@ -445,7 +445,7 @@ impl Evaluator<'_> {
}
"pthread_key_create" => {
let key = self.thread_local_storage.create_key();
let Some(arg0) = args.get(0) else {
let Some(arg0) = args.first() else {
return Err(MirEvalError::TypeError("pthread_key_create arg0 is not provided"));
};
let arg0_addr = Address::from_bytes(arg0.get(self)?)?;
@ -466,7 +466,7 @@ impl Evaluator<'_> {
Ok(())
}
"pthread_getspecific" => {
let Some(arg0) = args.get(0) else {
let Some(arg0) = args.first() else {
return Err(MirEvalError::TypeError(
"pthread_getspecific arg0 is not provided",
));
@ -477,7 +477,7 @@ impl Evaluator<'_> {
Ok(())
}
"pthread_setspecific" => {
let Some(arg0) = args.get(0) else {
let Some(arg0) = args.first() else {
return Err(MirEvalError::TypeError(
"pthread_setspecific arg0 is not provided",
));
@ -728,7 +728,7 @@ impl Evaluator<'_> {
match name {
"size_of" => {
let Some(ty) =
generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner))
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else {
return Err(MirEvalError::TypeError("size_of generic arg is not provided"));
};
@ -737,7 +737,7 @@ impl Evaluator<'_> {
}
"min_align_of" | "pref_align_of" => {
let Some(ty) =
generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner))
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else {
return Err(MirEvalError::TypeError("align_of generic arg is not provided"));
};
@ -746,7 +746,7 @@ impl Evaluator<'_> {
}
"size_of_val" => {
let Some(ty) =
generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner))
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else {
return Err(MirEvalError::TypeError("size_of_val generic arg is not provided"));
};
@ -763,7 +763,7 @@ impl Evaluator<'_> {
}
"min_align_of_val" => {
let Some(ty) =
generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner))
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else {
return Err(MirEvalError::TypeError(
"min_align_of_val generic arg is not provided",
@ -782,7 +782,7 @@ impl Evaluator<'_> {
}
"type_name" => {
let Some(ty) =
generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner))
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else {
return Err(MirEvalError::TypeError("type_name generic arg is not provided"));
};
@ -806,7 +806,7 @@ impl Evaluator<'_> {
}
"needs_drop" => {
let Some(ty) =
generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner))
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else {
return Err(MirEvalError::TypeError("size_of generic arg is not provided"));
};
@ -859,7 +859,7 @@ impl Evaluator<'_> {
let rhs = i128::from_le_bytes(pad16(rhs.get(self)?, false));
let ans = lhs.wrapping_sub(rhs);
let Some(ty) =
generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner))
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else {
return Err(MirEvalError::TypeError(
"ptr_offset_from generic arg is not provided",
@ -971,7 +971,7 @@ impl Evaluator<'_> {
));
};
let Some(ty) =
generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner))
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else {
return Err(MirEvalError::TypeError(
"copy_nonoverlapping generic arg is not provided",
@ -992,7 +992,7 @@ impl Evaluator<'_> {
};
let ty = if name == "offset" {
let Some(ty0) =
generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner))
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else {
return Err(MirEvalError::TypeError("offset generic arg is not provided"));
};
@ -1022,7 +1022,7 @@ impl Evaluator<'_> {
}
} else {
let Some(ty) =
generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner))
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else {
return Err(MirEvalError::TypeError(
"arith_offset generic arg is not provided",
@ -1147,7 +1147,7 @@ impl Evaluator<'_> {
return Err(MirEvalError::TypeError("discriminant_value arg is not provided"));
};
let Some(ty) =
generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner))
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else {
return Err(MirEvalError::TypeError(
"discriminant_value generic arg is not provided",
@ -1207,7 +1207,7 @@ impl Evaluator<'_> {
};
let dst = Address::from_bytes(ptr.get(self)?)?;
let Some(ty) =
generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner))
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else {
return Err(MirEvalError::TypeError(
"write_via_copy generic arg is not provided",
@ -1224,7 +1224,7 @@ impl Evaluator<'_> {
let count = from_bytes!(usize, count.get(self)?);
let val = from_bytes!(u8, val.get(self)?);
let Some(ty) =
generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner))
generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else {
return Err(MirEvalError::TypeError("write_bytes generic arg is not provided"));
};
@ -1265,7 +1265,7 @@ impl Evaluator<'_> {
};
let field_types = &self.db.field_types(id.into());
let last_field_ty =
field_types.iter().rev().next().unwrap().1.clone().substitute(Interner, subst);
field_types.iter().next_back().unwrap().1.clone().substitute(Interner, subst);
let sized_part_size =
layout.fields.offset(field_types.iter().count() - 1).bytes_usize();
let sized_part_align = layout.align.abi.bytes() as usize;
@ -1308,10 +1308,11 @@ impl Evaluator<'_> {
// The rest of atomic intrinsics have exactly one generic arg
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner)) else {
let Some(ty) = generic_args.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else {
return Err(MirEvalError::TypeError("atomic intrinsic generic arg is not provided"));
};
let Some(arg0) = args.get(0) else {
let Some(arg0) = args.first() else {
return Err(MirEvalError::TypeError("atomic intrinsic arg0 is not provided"));
};
let arg0_addr = Address::from_bytes(arg0.get(self)?)?;

View file

@ -46,7 +46,7 @@ impl Evaluator<'_> {
match try_const_usize(self.db, len) {
Some(len) => {
let Some(ty) =
subst.as_slice(Interner).get(0).and_then(|it| it.ty(Interner))
subst.as_slice(Interner).first().and_then(|it| it.ty(Interner))
else {
return Err(MirEvalError::TypeError("simd type with no ty param"));
};
@ -106,7 +106,7 @@ impl Evaluator<'_> {
}
}
if is_signed {
if let Some((&l, &r)) = l.iter().zip(r).rev().next() {
if let Some((&l, &r)) = l.iter().zip(r).next_back() {
if l != r {
result = (l as i8).cmp(&(r as i8));
}

View file

@ -30,7 +30,7 @@ fn eval_main(db: &TestDB, file_id: FileId) -> Result<(String, String), MirEvalEr
Substitution::empty(Interner),
db.trait_environment(func_id.into()),
)
.map_err(|e| MirEvalError::MirLowerError(func_id.into(), e))?;
.map_err(|e| MirEvalError::MirLowerError(func_id, e))?;
let (result, output) = interpret_mir(db, body, false, None);
result?;
Ok((output.stdout().into_owned(), output.stderr().into_owned()))
@ -50,7 +50,7 @@ fn check_pass_and_stdio(ra_fixture: &str, expected_stdout: &str, expected_stderr
let line_index = |size: TextSize| {
let mut size = u32::from(size) as usize;
let mut lines = ra_fixture.lines().enumerate();
while let Some((i, l)) = lines.next() {
for (i, l) in lines {
if let Some(x) = size.checked_sub(l.len()) {
size = x;
} else {

View file

@ -259,7 +259,8 @@ impl<'ctx> MirLowerCtx<'ctx> {
owner,
closures: vec![],
};
let ctx = MirLowerCtx {
MirLowerCtx {
result: mir,
db,
infer,
@ -269,8 +270,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
labeled_loop_blocks: Default::default(),
discr_temp: None,
drop_scopes: vec![DropScope::default()],
};
ctx
}
}
fn temp(&mut self, ty: Ty, current: BasicBlockId, span: MirSpan) -> Result<LocalId> {
@ -345,8 +345,8 @@ impl<'ctx> MirLowerCtx<'ctx> {
current,
place,
Rvalue::Cast(
CastKind::Pointer(cast.clone()),
Operand::Copy(p).into(),
CastKind::Pointer(*cast),
Operand::Copy(p),
last.target.clone(),
),
expr_id.into(),
@ -511,8 +511,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
return Ok(None);
};
let start_of_then = self.new_basic_block();
let end_of_then =
self.lower_expr_to_place(*then_branch, place.clone(), start_of_then)?;
let end_of_then = self.lower_expr_to_place(*then_branch, place, start_of_then)?;
let start_of_else = self.new_basic_block();
let end_of_else = if let Some(else_branch) = else_branch {
self.lower_expr_to_place(*else_branch, place, start_of_else)?
@ -539,7 +538,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
self.pattern_match(current, None, cond_place, *pat)?;
self.write_bytes_to_place(
then_target,
place.clone(),
place,
Box::new([1]),
TyBuilder::bool(),
MirSpan::Unknown,
@ -560,25 +559,19 @@ impl<'ctx> MirLowerCtx<'ctx> {
}
Expr::Block { id: _, statements, tail, label } => {
if let Some(label) = label {
self.lower_loop(
current,
place.clone(),
Some(*label),
expr_id.into(),
|this, begin| {
if let Some(current) = this.lower_block_to_place(
statements,
begin,
*tail,
place,
expr_id.into(),
)? {
let end = this.current_loop_end()?;
this.set_goto(current, end, expr_id.into());
}
Ok(())
},
)
self.lower_loop(current, place, Some(*label), expr_id.into(), |this, begin| {
if let Some(current) = this.lower_block_to_place(
statements,
begin,
*tail,
place,
expr_id.into(),
)? {
let end = this.current_loop_end()?;
this.set_goto(current, end, expr_id.into());
}
Ok(())
})
} else {
self.lower_block_to_place(statements, current, *tail, place, expr_id.into())
}
@ -646,9 +639,9 @@ impl<'ctx> MirLowerCtx<'ctx> {
);
}
TyKind::Error => {
return Err(MirLowerError::MissingFunctionDefinition(self.owner, expr_id))
Err(MirLowerError::MissingFunctionDefinition(self.owner, expr_id))
}
_ => return Err(MirLowerError::TypeError("function call on bad type")),
_ => Err(MirLowerError::TypeError("function call on bad type")),
}
}
Expr::MethodCall { receiver, args, method_name, .. } => {
@ -678,7 +671,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
let mut end = None;
for MatchArm { pat, guard, expr } in arms.iter() {
let (then, mut otherwise) =
self.pattern_match(current, None, cond_place.clone(), *pat)?;
self.pattern_match(current, None, cond_place, *pat)?;
let then = if let &Some(guard) = guard {
let next = self.new_basic_block();
let o = otherwise.get_or_insert_with(|| self.new_basic_block());
@ -696,7 +689,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
} else {
then
};
if let Some(block) = self.lower_expr_to_place(*expr, place.clone(), then)? {
if let Some(block) = self.lower_expr_to_place(*expr, place, then)? {
let r = end.get_or_insert_with(|| self.new_basic_block());
self.set_goto(block, *r, expr_id.into());
}
@ -742,9 +735,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
.as_ref()
.ok_or(MirLowerError::BreakWithoutLoop)?,
};
let Some(c) =
self.lower_expr_to_place(expr, loop_data.place.clone(), current)?
else {
let Some(c) = self.lower_expr_to_place(expr, loop_data.place, current)? else {
return Ok(None);
};
current = c;
@ -906,7 +897,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
let ty = self.expr_ty_after_adjustments(*expr);
self.push_assignment(
current,
place.clone(),
place,
Rvalue::ShallowInitBoxWithAlloc(ty),
expr_id.into(),
);
@ -1006,11 +997,8 @@ impl<'ctx> MirLowerCtx<'ctx> {
else {
return Ok(None);
};
let r_value = Rvalue::CheckedBinaryOp(
op.into(),
Operand::Copy(lhs_place.clone()),
rhs_op,
);
let r_value =
Rvalue::CheckedBinaryOp(op.into(), Operand::Copy(lhs_place), rhs_op);
self.push_assignment(current, lhs_place, r_value, expr_id.into());
return Ok(Some(current));
} else {
@ -1029,7 +1017,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
let start_of_then = self.new_basic_block();
self.push_assignment(
start_of_then,
place.clone(),
place,
lhs_op.clone().into(),
expr_id.into(),
);
@ -1168,12 +1156,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
let tmp_ty =
capture.ty.clone().substitute(Interner, &placeholder_subst);
let tmp: Place = self.temp(tmp_ty, current, capture.span)?.into();
self.push_assignment(
current,
tmp.clone(),
Rvalue::Ref(bk.clone(), p),
capture.span,
);
self.push_assignment(current, tmp, Rvalue::Ref(*bk, p), capture.span);
operands.push(Operand::Move(tmp));
}
CaptureKind::ByValue => operands.push(Operand::Move(p)),
@ -1322,7 +1305,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
) {
let temp = self.temp(self.expr_ty_after_adjustments(rhs), current, rhs.into())?;
let temp = Place::from(temp);
self.push_assignment(current, temp.clone(), rhs_op.into(), span);
self.push_assignment(current, temp, rhs_op.into(), span);
return self.lower_destructing_assignment(current, lhs, temp, span);
}
let Some((lhs_place, current)) = self.lower_expr_as_place(current, lhs, false)? else {
@ -1333,11 +1316,10 @@ impl<'ctx> MirLowerCtx<'ctx> {
}
fn placeholder_subst(&mut self) -> Substitution {
let placeholder_subst = match self.owner.as_generic_def_id() {
match self.owner.as_generic_def_id() {
Some(it) => TyBuilder::placeholder_subst(self.db, it),
None => Substitution::empty(Interner),
};
placeholder_subst
}
}
fn push_field_projection(&mut self, place: &mut Place, expr_id: ExprId) -> Result<()> {
@ -1470,7 +1452,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
} else {
let name = const_id.name(self.db.upcast());
self.db
.const_eval(const_id.into(), subst, None)
.const_eval(const_id, subst, None)
.map_err(|e| MirLowerError::ConstEvalError(name.into(), Box::new(e)))?
};
Ok(Operand::Constant(c))
@ -1612,13 +1594,13 @@ impl<'ctx> MirLowerCtx<'ctx> {
fn discr_temp_place(&mut self, current: BasicBlockId) -> Place {
match &self.discr_temp {
Some(it) => it.clone(),
Some(it) => *it,
None => {
let tmp: Place = self
.temp(TyBuilder::discr_ty(), current, MirSpan::Unknown)
.expect("discr_ty is never unsized")
.into();
self.discr_temp = Some(tmp.clone());
self.discr_temp = Some(tmp);
tmp
}
}
@ -2041,19 +2023,16 @@ pub fn mir_body_for_closure_query(
ctx.result.walk_places(|p, store| {
if let Some(it) = upvar_map.get(&p.local) {
let r = it.iter().find(|it| {
if p.projection.lookup(&store).len() < it.0.place.projections.len() {
if p.projection.lookup(store).len() < it.0.place.projections.len() {
return false;
}
for (it, y) in p.projection.lookup(&store).iter().zip(it.0.place.projections.iter())
for (it, y) in p.projection.lookup(store).iter().zip(it.0.place.projections.iter())
{
match (it, y) {
(ProjectionElem::Deref, ProjectionElem::Deref) => (),
(ProjectionElem::Field(it), ProjectionElem::Field(y)) if it == y => (),
(ProjectionElem::ClosureField(it), ProjectionElem::ClosureField(y))
if it == y =>
{
()
}
if it == y => {}
_ => return false,
}
}
@ -2069,15 +2048,11 @@ pub fn mir_body_for_closure_query(
next_projs.push(ProjectionElem::Deref);
}
next_projs.extend(
prev_projs
.lookup(&store)
.iter()
.skip(it.0.place.projections.len())
.cloned(),
prev_projs.lookup(store).iter().skip(it.0.place.projections.len()).cloned(),
);
p.projection = store.intern(next_projs.into());
}
None => err = Some(p.clone()),
None => err = Some(*p),
}
}
});

View file

@ -148,7 +148,7 @@ impl MirLowerCtx<'_> {
let temp: Place = self.temp(ref_ty, current, expr_id.into())?.into();
self.push_assignment(
current,
temp.clone(),
temp,
Operand::Static(s).into(),
expr_id.into(),
);
@ -304,7 +304,7 @@ impl MirLowerCtx<'_> {
let Some(current) = self.lower_call(
index_fn_op,
Box::new([Operand::Copy(place), index_operand]),
result.clone(),
result,
current,
false,
span,
@ -338,7 +338,7 @@ impl MirLowerCtx<'_> {
let ty_ref = TyKind::Ref(chalk_mut, static_lifetime(), source_ty.clone()).intern(Interner);
let target_ty_ref = TyKind::Ref(chalk_mut, static_lifetime(), target_ty).intern(Interner);
let ref_place: Place = self.temp(ty_ref, current, span)?.into();
self.push_assignment(current, ref_place.clone(), Rvalue::Ref(borrow_kind, place), span);
self.push_assignment(current, ref_place, Rvalue::Ref(borrow_kind, place), span);
let deref_trait = self
.resolve_lang_item(trait_lang_item)?
.as_trait()
@ -359,7 +359,7 @@ impl MirLowerCtx<'_> {
let Some(current) = self.lower_call(
deref_fn_op,
Box::new([Operand::Copy(ref_place)]),
result.clone(),
result,
current,
false,
span,

View file

@ -58,7 +58,7 @@ impl MirLowerCtx<'_> {
let (current, current_else) = self.pattern_match_inner(
current,
current_else,
cond_place.clone(),
cond_place,
pattern,
MatchingMode::Check,
)?;
@ -114,7 +114,7 @@ impl MirLowerCtx<'_> {
index: i as u32,
}))
}),
&(&mut cond_place),
&mut cond_place,
mode,
)?
}
@ -125,7 +125,7 @@ impl MirLowerCtx<'_> {
let (mut next, next_else) = self.pattern_match_inner(
current,
None,
(&mut cond_place).clone(),
cond_place,
*pat,
MatchingMode::Check,
)?;
@ -133,7 +133,7 @@ impl MirLowerCtx<'_> {
(next, _) = self.pattern_match_inner(
next,
None,
(&mut cond_place).clone(),
cond_place,
*pat,
MatchingMode::Bind,
)?;
@ -169,7 +169,7 @@ impl MirLowerCtx<'_> {
current,
pattern.into(),
current_else,
AdtPatternShape::Record { args: &*args },
AdtPatternShape::Record { args },
mode,
)?
}
@ -183,12 +183,8 @@ impl MirLowerCtx<'_> {
self.temp(TyBuilder::bool(), current, pattern.into())?.into();
self.push_assignment(
current,
discr.clone(),
Rvalue::CheckedBinaryOp(
binop,
lv,
Operand::Copy((&mut cond_place).clone()),
),
discr,
Rvalue::CheckedBinaryOp(binop, lv, Operand::Copy(cond_place)),
pattern.into(),
);
let discr = Operand::Copy(discr);
@ -222,8 +218,8 @@ impl MirLowerCtx<'_> {
self.temp(TyBuilder::usize(), current, pattern.into())?.into();
self.push_assignment(
current,
place_len.clone(),
Rvalue::Len((&mut cond_place).clone()),
place_len,
Rvalue::Len(cond_place),
pattern.into(),
);
let else_target =
@ -252,7 +248,7 @@ impl MirLowerCtx<'_> {
self.temp(TyBuilder::bool(), current, pattern.into())?.into();
self.push_assignment(
current,
discr.clone(),
discr,
Rvalue::CheckedBinaryOp(BinOp::Le, c, Operand::Copy(place_len)),
pattern.into(),
);
@ -270,7 +266,7 @@ impl MirLowerCtx<'_> {
}
}
for (i, &pat) in prefix.iter().enumerate() {
let next_place = (&mut cond_place).project(
let next_place = cond_place.project(
ProjectionElem::ConstantIndex { offset: i as u64, from_end: false },
&mut self.result.projection_store,
);
@ -280,7 +276,7 @@ impl MirLowerCtx<'_> {
if let Some(slice) = slice {
if mode == MatchingMode::Bind {
if let Pat::Bind { id, subpat: _ } = self.body[*slice] {
let next_place = (&mut cond_place).project(
let next_place = cond_place.project(
ProjectionElem::Subslice {
from: prefix.len() as u64,
to: suffix.len() as u64,
@ -299,7 +295,7 @@ impl MirLowerCtx<'_> {
}
}
for (i, &pat) in suffix.iter().enumerate() {
let next_place = (&mut cond_place).project(
let next_place = cond_place.project(
ProjectionElem::ConstantIndex { offset: i as u64, from_end: true },
&mut self.result.projection_store,
);
@ -348,7 +344,7 @@ impl MirLowerCtx<'_> {
self.lower_const(
c.into(),
current,
tmp.clone(),
tmp,
subst,
span,
self.infer[pattern].clone(),
@ -356,7 +352,7 @@ impl MirLowerCtx<'_> {
let tmp2: Place = self.temp(TyBuilder::bool(), current, pattern.into())?.into();
self.push_assignment(
current,
tmp2.clone(),
tmp2,
Rvalue::CheckedBinaryOp(
BinOp::Eq,
Operand::Copy(tmp),
@ -390,13 +386,8 @@ impl MirLowerCtx<'_> {
},
Pat::Bind { id, subpat } => {
if let Some(subpat) = subpat {
(current, current_else) = self.pattern_match_inner(
current,
current_else,
(&mut cond_place).clone(),
*subpat,
mode,
)?
(current, current_else) =
self.pattern_match_inner(current, current_else, cond_place, *subpat, mode)?
}
if mode == MatchingMode::Bind {
self.pattern_match_binding(
@ -475,7 +466,7 @@ impl MirLowerCtx<'_> {
let discr: Place = self.temp(TyBuilder::bool(), current, pattern.into())?.into();
self.push_assignment(
current,
discr.clone(),
discr,
Rvalue::CheckedBinaryOp(BinOp::Eq, c, Operand::Copy(cond_place)),
pattern.into(),
);
@ -506,12 +497,7 @@ impl MirLowerCtx<'_> {
if mode == MatchingMode::Check {
let e = self.const_eval_discriminant(v)? as u128;
let tmp = self.discr_temp_place(current);
self.push_assignment(
current,
tmp.clone(),
Rvalue::Discriminant(cond_place.clone()),
span,
);
self.push_assignment(current, tmp, Rvalue::Discriminant(cond_place), span);
let next = self.new_basic_block();
let else_target = current_else.get_or_insert_with(|| self.new_basic_block());
self.set_terminator(
@ -568,7 +554,7 @@ impl MirLowerCtx<'_> {
variant_data.field(&x.name).ok_or(MirLowerError::UnresolvedField)?;
Ok((
PlaceElem::Field(Either::Left(FieldId {
parent: v.into(),
parent: v,
local_id: field_id,
})),
x.pat,
@ -579,7 +565,7 @@ impl MirLowerCtx<'_> {
}
AdtPatternShape::Tuple { args, ellipsis } => {
let fields = variant_data.fields().iter().map(|(x, _)| {
PlaceElem::Field(Either::Left(FieldId { parent: v.into(), local_id: x }))
PlaceElem::Field(Either::Left(FieldId { parent: v, local_id: x }))
});
self.pattern_match_tuple_like(
current,

View file

@ -306,7 +306,7 @@ pub fn monomorphized_mir_body_recover(
_: &Substitution,
_: &Arc<crate::TraitEnvironment>,
) -> Result<Arc<MirBody>, MirLowerError> {
return Err(MirLowerError::Loop);
Err(MirLowerError::Loop)
}
pub fn monomorphized_mir_body_for_closure_query(

View file

@ -344,7 +344,7 @@ impl<'a> MirPrettyCtx<'a> {
}
}
}
f(self, p.local, &p.projection.lookup(&self.body.projection_store));
f(self, p.local, p.projection.lookup(&self.body.projection_store));
}
fn operand(&mut self, r: &Operand) {

View file

@ -43,13 +43,13 @@ impl fmt::Debug for TestDB {
impl Upcast<dyn ExpandDatabase> for TestDB {
fn upcast(&self) -> &(dyn ExpandDatabase + 'static) {
&*self
self
}
}
impl Upcast<dyn DefDatabase> for TestDB {
fn upcast(&self) -> &(dyn DefDatabase + 'static) {
&*self
self
}
}

View file

@ -438,7 +438,7 @@ pub(crate) fn detect_variant_from_bytes<'a>(
let krate = trait_env.krate;
let (var_id, var_layout) = match &layout.variants {
hir_def::layout::Variants::Single { index } => {
(db.enum_data(e).variants[index.0].0, &*layout)
(db.enum_data(e).variants[index.0].0, layout)
}
hir_def::layout::Variants::Multiple { tag, tag_encoding, variants, .. } => {
let target_data_layout = db.target_data_layout(krate)?;

View file

@ -188,7 +188,7 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
build_pat(
ctx.db(),
module,
variant.clone(),
variant,
ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
)
@ -312,7 +312,7 @@ fn cursor_at_trivial_match_arm_list(
match_arm_list: &MatchArmList,
) -> Option<()> {
// match x { $0 }
if match_arm_list.arms().next() == None {
if match_arm_list.arms().next().is_none() {
cov_mark::hit!(add_missing_match_arms_empty_body);
return Some(());
}

View file

@ -85,9 +85,7 @@ pub(crate) fn add_turbo_fish(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
if let Some(let_stmt) = ctx.find_node_at_offset::<ast::LetStmt>() {
if let_stmt.colon_token().is_none() {
if let_stmt.pat().is_none() {
return None;
}
let_stmt.pat()?;
acc.add(
AssistId("add_type_ascription", AssistKind::RefactorRewrite),

View file

@ -422,9 +422,7 @@ fn find_record_pat_field_usage(name: &ast::NameLike) -> Option<ast::Pat> {
fn find_assoc_const_usage(name: &ast::NameLike) -> Option<(ast::Type, ast::Expr)> {
let const_ = name.syntax().parent().and_then(ast::Const::cast)?;
if const_.syntax().parent().and_then(ast::AssocItemList::cast).is_none() {
return None;
}
const_.syntax().parent().and_then(ast::AssocItemList::cast)?;
Some((const_.ty()?, const_.body()?))
}

View file

@ -96,7 +96,7 @@ fn can_add(node: &SyntaxNode) -> bool {
if p.kind() == ASSOC_ITEM_LIST {
p.parent()
.and_then(|it| ast::Impl::cast(it))
.and_then(ast::Impl::cast)
// inherent impls i.e 'non-trait impls' have a non-local
// effect, thus can have visibility even when nested.
// so filter them out

View file

@ -113,7 +113,7 @@ fn find_extracted_variable(ctx: &AssistContext<'_>, arm: &ast::MatchArm) -> Opti
}
_ => {
cov_mark::hit!(extracting_arm_is_not_an_identity_expr);
return None;
None
}
}
}

View file

@ -87,9 +87,7 @@ pub(crate) fn convert_to_guarded_return(acc: &mut Assists, ctx: &AssistContext<'
_ => return None,
};
if then_block.syntax().first_child_or_token().map(|t| t.kind() == T!['{']).is_none() {
return None;
}
then_block.syntax().first_child_or_token().map(|t| t.kind() == T!['{'])?;
then_block.syntax().last_child_or_token().filter(|t| t.kind() == T!['}'])?;

View file

@ -84,8 +84,8 @@ fn destructure_tuple_edit_impl(
data: &TupleData,
in_sub_pattern: bool,
) {
let assignment_edit = edit_tuple_assignment(ctx, edit, &data, in_sub_pattern);
let current_file_usages_edit = edit_tuple_usages(&data, edit, ctx, in_sub_pattern);
let assignment_edit = edit_tuple_assignment(ctx, edit, data, in_sub_pattern);
let current_file_usages_edit = edit_tuple_usages(data, edit, ctx, in_sub_pattern);
assignment_edit.apply();
if let Some(usages_edit) = current_file_usages_edit {
@ -258,7 +258,7 @@ fn edit_tuple_usage(
Some(index) => Some(edit_tuple_field_usage(ctx, builder, data, index)),
None if in_sub_pattern => {
cov_mark::hit!(destructure_tuple_call_with_subpattern);
return None;
None
}
None => Some(EditTupleUsage::NoIndex(usage.range)),
}
@ -375,7 +375,7 @@ impl RefData {
expr = make::expr_paren(expr);
}
return expr;
expr
}
}
fn handle_ref_field_usage(ctx: &AssistContext<'_>, field_expr: &FieldExpr) -> (ast::Expr, RefData) {

View file

@ -668,7 +668,7 @@ fn check_intersection_and_push(
// check for intersection between all current members
// and combine all such ranges into one.
let s: SmallVec<[_; 2]> = import_paths_to_be_removed
.into_iter()
.iter_mut()
.positions(|it| it.intersect(import_path).is_some())
.collect();
for pos in s.into_iter().rev() {

View file

@ -185,7 +185,7 @@ fn collect_used_generics<'gp>(
ast::GenericParam::TypeParam(_) => 1,
});
Some(generics).filter(|it| it.len() > 0)
Some(generics).filter(|it| !it.is_empty())
}
#[cfg(test)]

View file

@ -115,7 +115,7 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
let trailing_ws = if prev_ws.is_some_and(|it| it.text().starts_with('\n')) {
format!("\n{indent_to}")
} else {
format!(" ")
" ".to_string()
};
ted::insert_all_raw(

View file

@ -23,9 +23,7 @@ pub(crate) fn flip_trait_bound(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
let plus = ctx.find_token_syntax_at_offset(T![+])?;
// Make sure we're in a `TypeBoundList`
if ast::TypeBoundList::cast(plus.parent()?).is_none() {
return None;
}
ast::TypeBoundList::cast(plus.parent()?)?;
let (before, after) = (
non_trivia_sibling(plus.clone().into(), Direction::Prev)?,

View file

@ -107,10 +107,10 @@ fn get_text_for_generate_constant(
type_name: String,
) -> Option<String> {
let constant_token = not_exist_name_ref.pop()?;
let vis = if not_exist_name_ref.len() == 0 && !outer_exists { "" } else { "\npub " };
let vis = if not_exist_name_ref.is_empty() && !outer_exists { "" } else { "\npub " };
let mut text = format!("{vis}const {constant_token}: {type_name} = $0;");
while let Some(name_ref) = not_exist_name_ref.pop() {
let vis = if not_exist_name_ref.len() == 0 && !outer_exists { "" } else { "\npub " };
let vis = if not_exist_name_ref.is_empty() && !outer_exists { "" } else { "\npub " };
text = text.replace('\n', "\n ");
text = format!("{vis}mod {name_ref} {{{text}\n}}");
}
@ -136,8 +136,7 @@ fn target_data_for_generate_constant(
let siblings_has_newline = l_curly_token
.siblings_with_tokens(Direction::Next)
.find(|it| it.kind() == SyntaxKind::WHITESPACE && it.to_string().contains('\n'))
.is_some();
.any(|it| it.kind() == SyntaxKind::WHITESPACE && it.to_string().contains('\n'));
let post_string =
if siblings_has_newline { format!("{indent}") } else { format!("\n{indent}") };
Some((offset, indent + 1, Some(file_id), post_string))

View file

@ -134,7 +134,7 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<'
// compute the `body`
let arg_list = method_source
.param_list()
.map(|list| convert_param_list_to_arg_list(list))
.map(convert_param_list_to_arg_list)
.unwrap_or_else(|| make::arg_list([]));
let tail_expr = make::expr_method_call(field, make::name_ref(&name), arg_list);

View file

@ -88,11 +88,11 @@ pub(crate) fn generate_delegate_trait(acc: &mut Assists, ctx: &AssistContext<'_>
let strukt = Struct::new(ctx.find_node_at_offset::<ast::Struct>()?)?;
let field: Field = match ctx.find_node_at_offset::<ast::RecordField>() {
Some(field) => Field::new(&ctx, Either::Left(field))?,
Some(field) => Field::new(ctx, Either::Left(field))?,
None => {
let field = ctx.find_node_at_offset::<ast::TupleField>()?;
let field_list = ctx.find_node_at_offset::<ast::TupleFieldList>()?;
Field::new(&ctx, either::Right((field, field_list)))?
Field::new(ctx, either::Right((field, field_list)))?
}
};
@ -276,7 +276,7 @@ fn generate_impl(
.filter(|item| matches!(item, AssocItem::MacroCall(_)).not())
.for_each(|item| {
let assoc =
process_assoc_item(item, qualified_path_type.clone(), &field_name);
process_assoc_item(item, qualified_path_type.clone(), field_name);
if let Some(assoc) = assoc {
delegate_assoc_items.add_item(assoc);
}
@ -288,7 +288,7 @@ fn generate_impl(
let target_scope = ctx.sema.scope(strukt.strukt.syntax())?;
let source_scope = ctx.sema.scope(bound_def.syntax())?;
let transform = PathTransform::generic_transformation(&target_scope, &source_scope);
transform.apply(&delegate.syntax());
transform.apply(delegate.syntax());
}
Delegee::Impls(trait_, old_impl) => {
let old_impl = ctx.sema.source(old_impl.to_owned())?.value;
@ -306,7 +306,7 @@ fn generate_impl(
let field_ty = rename_strukt_args(ctx, ast_strukt, field_ty, &args)?;
let where_clause = ast_strukt
.where_clause()
.and_then(|wc| Some(rename_strukt_args(ctx, ast_strukt, &wc, &args)?));
.and_then(|wc| rename_strukt_args(ctx, ast_strukt, &wc, &args));
(field_ty, where_clause)
}
None => (field_ty.clone_for_update(), None),
@ -323,7 +323,7 @@ fn generate_impl(
.trait_()?
.generic_arg_list()
.map(|l| l.generic_args().map(|arg| arg.to_string()))
.map_or_else(|| FxHashSet::default(), |it| it.collect());
.map_or_else(FxHashSet::default, |it| it.collect());
let trait_gen_params = remove_instantiated_params(
&old_impl.self_ty()?,
@ -345,13 +345,13 @@ fn generate_impl(
let mut trait_gen_args = old_impl.trait_()?.generic_arg_list();
if let Some(trait_args) = &mut trait_gen_args {
*trait_args = trait_args.clone_for_update();
transform_impl(ctx, ast_strukt, &old_impl, &transform_args, &trait_args.syntax())?;
transform_impl(ctx, ast_strukt, &old_impl, &transform_args, trait_args.syntax())?;
}
let type_gen_args = strukt_params.clone().map(|params| params.to_generic_args());
let path_type = make::ty(&trait_.name(db).to_smol_str()).clone_for_update();
transform_impl(ctx, ast_strukt, &old_impl, &transform_args, &path_type.syntax())?;
transform_impl(ctx, ast_strukt, &old_impl, &transform_args, path_type.syntax())?;
// 3) Generate delegate trait impl
delegate = make::impl_trait(
@ -383,7 +383,7 @@ fn generate_impl(
let item = item.clone_for_update();
transform_impl(ctx, ast_strukt, &old_impl, &transform_args, item.syntax())?;
let assoc = process_assoc_item(item, qualified_path_type.clone(), &field_name)?;
let assoc = process_assoc_item(item, qualified_path_type.clone(), field_name)?;
delegate_assoc_items.add_item(assoc);
}
@ -404,8 +404,8 @@ fn transform_impl(
args: &Option<GenericArgList>,
syntax: &syntax::SyntaxNode,
) -> Option<()> {
let source_scope = ctx.sema.scope(&old_impl.self_ty()?.syntax())?;
let target_scope = ctx.sema.scope(&strukt.syntax())?;
let source_scope = ctx.sema.scope(old_impl.self_ty()?.syntax())?;
let target_scope = ctx.sema.scope(strukt.syntax())?;
let hir_old_impl = ctx.sema.to_impl_def(old_impl)?;
let transform = args.as_ref().map_or_else(
@ -420,7 +420,7 @@ fn transform_impl(
},
);
transform.apply(&syntax);
transform.apply(syntax);
Some(())
}
@ -481,7 +481,7 @@ fn remove_useless_where_clauses(trait_ty: &ast::Type, self_ty: &ast::Type, wc: a
.skip(1)
.take_while(|node_or_tok| node_or_tok.kind() == SyntaxKind::WHITESPACE)
})
.for_each(|ws| ted::remove(ws));
.for_each(ted::remove);
ted::insert(
ted::Position::after(wc.syntax()),
@ -539,7 +539,7 @@ fn generate_args_for_impl(
)
})
.collect_vec();
args.is_empty().not().then(|| make::generic_arg_list(args.into_iter()))
args.is_empty().not().then(|| make::generic_arg_list(args))
}
fn rename_strukt_args<N>(
@ -558,7 +558,7 @@ where
let scope = ctx.sema.scope(item.syntax())?;
let transform = PathTransform::adt_transformation(&scope, &scope, hir_adt, args.clone());
transform.apply(&item.syntax());
transform.apply(item.syntax());
Some(item)
}

View file

@ -148,7 +148,7 @@ fn make_example_for_fn(ast_func: &ast::Fn, ctx: &AssistContext<'_>) -> Option<St
format_to!(example, "use {use_path};\n\n");
if let Some(self_name) = &self_name {
if let Some(mut_) = is_ref_mut_self(ast_func) {
let mut_ = if mut_ == true { "mut " } else { "" };
let mut_ = if mut_ { "mut " } else { "" };
format_to!(example, "let {mut_}{self_name} = ;\n");
}
}

View file

@ -124,7 +124,9 @@ fn add_variant_to_accumulator(
builder.edit_file(file_id);
let node = builder.make_mut(enum_node);
let variant = make_variant(ctx, name_ref, parent);
node.variant_list().map(|it| it.add_variant(variant.clone_for_update()));
if let Some(it) = node.variant_list() {
it.add_variant(variant.clone_for_update())
}
},
)
}

View file

@ -90,7 +90,7 @@ fn existing_from_impl(
let enum_type = enum_.ty(sema.db);
let wrapped_type = variant.fields(sema.db).get(0)?.ty(sema.db);
let wrapped_type = variant.fields(sema.db).first()?.ty(sema.db);
if enum_type.impls_trait(sema.db, from_trait, &[wrapped_type]) {
Some(())

View file

@ -42,7 +42,7 @@ pub(crate) fn generate_setter(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opt
let (strukt, info_of_record_fields, mut fn_names) = extract_and_parse(ctx, AssistType::Set)?;
// No record fields to do work on :(
if info_of_record_fields.len() == 0 {
if info_of_record_fields.is_empty() {
return None;
}
@ -163,7 +163,7 @@ pub(crate) fn generate_getter_impl(
let (strukt, info_of_record_fields, fn_names) =
extract_and_parse(ctx, if mutable { AssistType::MutGet } else { AssistType::Get })?;
// No record fields to do work on :(
if info_of_record_fields.len() == 0 {
if info_of_record_fields.is_empty() {
return None;
}
@ -318,15 +318,13 @@ fn extract_and_parse_record_fields(
})
.collect::<Vec<RecordFieldInfo>>();
if info_of_record_fields_in_selection.len() == 0 {
if info_of_record_fields_in_selection.is_empty() {
return None;
}
Some((info_of_record_fields_in_selection, field_names))
}
ast::FieldList::TupleFieldList(_) => {
return None;
}
ast::FieldList::TupleFieldList(_) => None,
}
}
@ -409,7 +407,7 @@ fn build_source_change(
// getter and end of impl ( i.e. `}` ) with an
// extra line for no reason
if i < record_fields_count - 1 {
buf = buf + "\n";
buf += "\n";
}
}

View file

@ -95,7 +95,7 @@ fn get_impl_method(
let scope = ctx.sema.scope(impl_.syntax())?;
let ty = impl_def.self_ty(db);
ty.iterate_method_candidates(db, &scope, None, Some(fn_name), |func| Some(func))
ty.iterate_method_candidates(db, &scope, None, Some(fn_name), Some)
}
#[cfg(test)]

View file

@ -85,10 +85,7 @@ pub(crate) fn generate_trait_from_impl(acc: &mut Assists, ctx: &AssistContext<'_
let assoc_items = impl_ast.assoc_item_list()?;
let first_element = assoc_items.assoc_items().next();
if first_element.is_none() {
// No reason for an assist.
return None;
}
first_element.as_ref()?;
let impl_name = impl_ast.self_ty()?;

View file

@ -60,7 +60,7 @@ pub(crate) fn inline_const_as_literal(acc: &mut Assists, ctx: &AssistContext<'_>
let id = AssistId("inline_const_as_literal", AssistKind::RefactorInline);
let label = format!("Inline const as literal");
let label = "Inline const as literal".to_string();
let target = variable.syntax().text_range();
return acc.add(id, label, target, |edit| {
@ -138,7 +138,7 @@ mod tests {
// -----------Not supported-----------
#[test]
fn inline_const_as_literal_const_fn_call_slice() {
TEST_PAIRS.into_iter().for_each(|(ty, val, _)| {
TEST_PAIRS.iter().for_each(|(ty, val, _)| {
check_assist_not_applicable(
inline_const_as_literal,
&format!(
@ -240,7 +240,7 @@ mod tests {
#[test]
fn inline_const_as_literal_const_expr() {
TEST_PAIRS.into_iter().for_each(|(ty, val, _)| {
TEST_PAIRS.iter().for_each(|(ty, val, _)| {
check_assist(
inline_const_as_literal,
&format!(
@ -261,7 +261,7 @@ mod tests {
#[test]
fn inline_const_as_literal_const_block_expr() {
TEST_PAIRS.into_iter().for_each(|(ty, val, _)| {
TEST_PAIRS.iter().for_each(|(ty, val, _)| {
check_assist(
inline_const_as_literal,
&format!(
@ -282,7 +282,7 @@ mod tests {
#[test]
fn inline_const_as_literal_const_block_eval_expr() {
TEST_PAIRS.into_iter().for_each(|(ty, val, _)| {
TEST_PAIRS.iter().for_each(|(ty, val, _)| {
check_assist(
inline_const_as_literal,
&format!(
@ -303,7 +303,7 @@ mod tests {
#[test]
fn inline_const_as_literal_const_block_eval_block_expr() {
TEST_PAIRS.into_iter().for_each(|(ty, val, _)| {
TEST_PAIRS.iter().for_each(|(ty, val, _)| {
check_assist(
inline_const_as_literal,
&format!(
@ -324,7 +324,7 @@ mod tests {
#[test]
fn inline_const_as_literal_const_fn_call_block_nested_builtin() {
TEST_PAIRS.into_iter().for_each(|(ty, val, _)| {
TEST_PAIRS.iter().for_each(|(ty, val, _)| {
check_assist(
inline_const_as_literal,
&format!(
@ -347,7 +347,7 @@ mod tests {
#[test]
fn inline_const_as_literal_const_fn_call_tuple() {
TEST_PAIRS.into_iter().for_each(|(ty, val, _)| {
TEST_PAIRS.iter().for_each(|(ty, val, _)| {
check_assist(
inline_const_as_literal,
&format!(
@ -370,7 +370,7 @@ mod tests {
#[test]
fn inline_const_as_literal_const_fn_call_builtin() {
TEST_PAIRS.into_iter().for_each(|(ty, val, _)| {
TEST_PAIRS.iter().for_each(|(ty, val, _)| {
check_assist(
inline_const_as_literal,
&format!(

View file

@ -41,7 +41,7 @@ pub(crate) fn inline_macro(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
acc.add(
AssistId("inline_macro", AssistKind::RefactorInline),
format!("Inline macro"),
"Inline macro".to_string(),
text_range,
|builder| builder.replace(text_range, expanded.to_string()),
)

View file

@ -18,7 +18,7 @@ use crate::{utils::suggest_name, AssistContext, AssistId, AssistKind, Assists};
// ```
pub(crate) fn introduce_named_generic(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let impl_trait_type = ctx.find_node_at_offset::<ast::ImplTraitType>()?;
let param = impl_trait_type.syntax().ancestors().find_map(|node| ast::Param::cast(node))?;
let param = impl_trait_type.syntax().ancestors().find_map(ast::Param::cast)?;
let fn_ = param.syntax().ancestors().find_map(ast::Fn::cast)?;
let type_bound_list = impl_trait_type.type_bound_list()?;

View file

@ -54,7 +54,7 @@ pub(crate) fn remove_unused_imports(acc: &mut Assists, ctx: &AssistContext<'_>)
.filter_map(|u| {
// Find any uses trees that are unused
let use_module = ctx.sema.scope(&u.syntax()).map(|s| s.module())?;
let use_module = ctx.sema.scope(u.syntax()).map(|s| s.module())?;
let scope = match search_scopes.entry(use_module) {
Entry::Occupied(o) => o.into_mut(),
Entry::Vacant(v) => v.insert(module_search_scope(ctx.db(), use_module)),
@ -113,10 +113,8 @@ pub(crate) fn remove_unused_imports(acc: &mut Assists, ctx: &AssistContext<'_>)
{
return Some(u);
}
} else {
if !used_once_in_scope(ctx, def, &scope) {
return Some(u);
}
} else if !used_once_in_scope(ctx, def, scope) {
return Some(u);
}
None
@ -208,7 +206,7 @@ fn module_search_scope(db: &RootDatabase, module: hir::Module) -> Vec<SearchScop
};
let mut new_ranges = Vec::new();
for old_range in ranges.iter_mut() {
let split = split_at_subrange(old_range.clone(), rng);
let split = split_at_subrange(*old_range, rng);
*old_range = split.0;
new_ranges.extend(split.1);
}

View file

@ -279,7 +279,7 @@ pub(crate) fn replace_match_with_if_let(acc: &mut Assists, ctx: &AssistContext<'
let then_block = make_block_expr(then_expr.reset_indent());
let else_expr = if is_empty_expr(&else_expr) { None } else { Some(else_expr) };
let if_let_expr = make::expr_if(
condition.into(),
condition,
then_block,
else_expr.map(make_block_expr).map(ast::ElseBranch::Block),
)

View file

@ -74,7 +74,7 @@ pub(crate) fn replace_is_method_with_if_let_method(
},
)
}
_ => return None,
_ => None,
}
}

View file

@ -133,7 +133,7 @@ pub(crate) fn replace_with_eager_method(acc: &mut Assists, ctx: &AssistContext<'
None,
None,
|func| {
let valid = func.name(ctx.sema.db).as_str() == Some(&*method_name_eager)
let valid = func.name(ctx.sema.db).as_str() == Some(method_name_eager)
&& func.num_params(ctx.sema.db) == n_params;
valid.then_some(func)
},

View file

@ -111,7 +111,7 @@ fn generic_arg_list(expr: &Expr) -> Option<GenericArgList> {
pe.path()?.segment()?.generic_arg_list()
} else {
cov_mark::hit!(not_applicable_if_non_path_function_call);
return None;
None
}
}
Expr::AwaitExpr(expr) => generic_arg_list(&expr.expr()?),

View file

@ -53,7 +53,7 @@ pub(crate) fn unwrap_block(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
let stmts: Vec<ast::Stmt> = list.statements().collect();
let initializer = ast::Expr::cast(last)?;
let let_stmt = make::let_stmt(pattern, ty, Some(initializer));
if stmts.len() > 0 {
if !stmts.is_empty() {
let block = make::block_expr(stmts, None);
format!("{}\n {}", update_expr_string(block.to_string()), let_stmt)
} else {

View file

@ -201,7 +201,7 @@ fn check_with_config(
.filter(|it| !it.source_file_edits.is_empty() || !it.file_system_edits.is_empty())
.expect("Assist did not contain any source changes");
let skip_header = source_change.source_file_edits.len() == 1
&& source_change.file_system_edits.len() == 0;
&& source_change.file_system_edits.is_empty();
let mut buf = String::new();
for (file_id, (edit, snippet_edit)) in source_change.source_file_edits {

View file

@ -103,7 +103,7 @@ impl Assist {
let doc = take_until(lines.by_ref(), "```").trim().to_string();
assert!(
(doc.chars().next().unwrap().is_ascii_uppercase() && doc.ends_with('.'))
|| assist.sections.len() > 0,
|| !assist.sections.is_empty(),
"\n\n{}: assist docs should be proper sentences, with capitalization and a full stop at the end.\n\n{}\n\n",
&assist.id,
doc,

View file

@ -117,7 +117,7 @@ pub fn filter_assoc_items(
return false;
}
return true;
true
})
// Note: This throws away items with no source.
.filter_map(|assoc_item| {
@ -165,7 +165,7 @@ pub fn add_trait_assoc_items_to_impl(
target_scope: hir::SemanticsScope<'_>,
) -> ast::AssocItem {
let new_indent_level = IndentLevel::from_node(impl_.syntax()) + 1;
let items = original_items.into_iter().map(|InFile { file_id, value: original_item }| {
let items = original_items.iter().map(|InFile { file_id, value: original_item }| {
let cloned_item = {
if file_id.is_macro() {
if let Some(formatted) =

View file

@ -648,7 +648,7 @@ fn gen_partial_ord(adt: &ast::Adt, func: &ast::Fn, trait_ref: Option<TraitRef>)
.into_iter()
.map(gen_partial_eq_match)
.collect::<Option<Vec<ast::Stmt>>>()?;
make::block_expr(stmts.into_iter(), tail).indent(ast::edit::IndentLevel(1))
make::block_expr(stmts, tail).indent(ast::edit::IndentLevel(1))
}
Some(ast::FieldList::TupleFieldList(field_list)) => {
@ -667,7 +667,7 @@ fn gen_partial_ord(adt: &ast::Adt, func: &ast::Fn, trait_ref: Option<TraitRef>)
.into_iter()
.map(gen_partial_eq_match)
.collect::<Option<Vec<ast::Stmt>>>()?;
make::block_expr(stmts.into_iter(), tail).indent(ast::edit::IndentLevel(1))
make::block_expr(stmts, tail).indent(ast::edit::IndentLevel(1))
}
// No fields in the body means there's nothing to compare.

View file

@ -44,9 +44,7 @@ pub(crate) fn complete_known_attribute_input(
None => None,
};
let (path, tt) = name_ref.zip(attribute.token_tree())?;
if tt.l_paren_token().is_none() {
return None;
}
tt.l_paren_token()?;
match path.text().as_str() {
"repr" => repr::complete_repr(acc, ctx, tt),

View file

@ -37,7 +37,7 @@ pub(crate) fn complete_cargo_env_vars(
guard_env_macro(expanded, &ctx.sema)?;
let range = expanded.text_range_between_quotes()?;
CARGO_DEFINED_VARS.into_iter().for_each(|&(var, detail)| {
CARGO_DEFINED_VARS.iter().for_each(|&(var, detail)| {
let mut item = CompletionItem::new(CompletionItemKind::Keyword, range, var);
item.detail(detail);
item.add_to(acc, ctx.db);

View file

@ -209,9 +209,7 @@ fn import_on_the_fly(
) -> Option<()> {
let _p = profile::span("import_on_the_fly").detail(|| potential_import_name.clone());
if ImportScope::find_insert_use_container(&position, &ctx.sema).is_none() {
return None;
}
ImportScope::find_insert_use_container(&position, &ctx.sema)?;
let ns_filter = |import: &LocatedImport| {
match (kind, import.original_item) {
@ -297,9 +295,7 @@ fn import_on_the_fly_pat_(
) -> Option<()> {
let _p = profile::span("import_on_the_fly_pat").detail(|| potential_import_name.clone());
if ImportScope::find_insert_use_container(&position, &ctx.sema).is_none() {
return None;
}
ImportScope::find_insert_use_container(&position, &ctx.sema)?;
let ns_filter = |import: &LocatedImport| match import.original_item {
ItemInNs::Macros(mac) => mac.is_fn_like(ctx.db),
@ -349,9 +345,7 @@ fn import_on_the_fly_method(
) -> Option<()> {
let _p = profile::span("import_on_the_fly_method").detail(|| potential_import_name.clone());
if ImportScope::find_insert_use_container(&position, &ctx.sema).is_none() {
return None;
}
ImportScope::find_insert_use_container(&position, &ctx.sema)?;
let user_input_lowercased = potential_import_name.to_lowercase();

View file

@ -108,7 +108,7 @@ fn fill_fn_params(
remove_duplicated(&mut file_params, param_list.params());
let self_completion_items = ["self", "&self", "mut self", "&mut self"];
if should_add_self_completions(ctx.token.text_range().start(), param_list, impl_) {
self_completion_items.into_iter().for_each(|self_item| add_new_item_to_acc(self_item));
self_completion_items.into_iter().for_each(&mut add_new_item_to_acc);
}
file_params.keys().for_each(|whole_param| add_new_item_to_acc(whole_param));

View file

@ -306,9 +306,7 @@ fn add_custom_postfix_completions(
postfix_snippet: impl Fn(&str, &str, &str) -> Builder,
receiver_text: &str,
) -> Option<()> {
if ImportScope::find_insert_use_container(&ctx.token.parent()?, &ctx.sema).is_none() {
return None;
}
ImportScope::find_insert_use_container(&ctx.token.parent()?, &ctx.sema)?;
ctx.config.postfix_snippets().filter(|(_, snip)| snip.scope == SnippetScope::Expr).for_each(
|(trigger, snippet)| {
let imports = match snippet.imports(ctx) {

View file

@ -129,9 +129,7 @@ fn add_custom_completions(
cap: SnippetCap,
scope: SnippetScope,
) -> Option<()> {
if ImportScope::find_insert_use_container(&ctx.token.parent()?, &ctx.sema).is_none() {
return None;
}
ImportScope::find_insert_use_container(&ctx.token.parent()?, &ctx.sema)?;
ctx.config.prefix_snippets().filter(|(_, snip)| snip.scope == scope).for_each(
|(trigger, snip)| {
let imports = match snip.imports(ctx) {

View file

@ -866,9 +866,7 @@ fn classify_name_ref(
TypeLocation::TypeAscription(TypeAscriptionTarget::Const(original.body()))
},
ast::RetType(it) => {
if it.thin_arrow_token().is_none() {
return None;
}
it.thin_arrow_token()?;
let parent = match ast::Fn::cast(parent.parent()?) {
Some(it) => it.param_list(),
None => ast::ClosureExpr::cast(parent.parent()?)?.param_list(),
@ -888,15 +886,11 @@ fn classify_name_ref(
}))
},
ast::Param(it) => {
if it.colon_token().is_none() {
return None;
}
it.colon_token()?;
TypeLocation::TypeAscription(TypeAscriptionTarget::FnParam(find_opt_node_in_file(original_file, it.pat())))
},
ast::LetStmt(it) => {
if it.colon_token().is_none() {
return None;
}
it.colon_token()?;
TypeLocation::TypeAscription(TypeAscriptionTarget::Let(find_opt_node_in_file(original_file, it.pat())))
},
ast::Impl(it) => {
@ -1312,7 +1306,7 @@ fn pattern_context_for(
.parent()
.and_then(ast::MatchExpr::cast)
.and_then(|match_expr| {
let expr_opt = find_opt_node_in_file(&original_file, match_expr.expr());
let expr_opt = find_opt_node_in_file(original_file, match_expr.expr());
expr_opt.and_then(|expr| {
sema.type_of_expr(&expr)?
@ -1321,24 +1315,20 @@ fn pattern_context_for(
.find_map(|ty| match ty.as_adt() {
Some(hir::Adt::Enum(e)) => Some(e),
_ => None,
}).and_then(|enum_| {
Some(enum_.variants(sema.db))
})
}).map(|enum_| enum_.variants(sema.db))
})
}).and_then(|variants| {
Some(variants.iter().filter_map(|variant| {
}).map(|variants| variants.iter().filter_map(|variant| {
let variant_name = variant.name(sema.db).display(sema.db).to_string();
let variant_already_present = match_arm_list.arms().any(|arm| {
arm.pat().and_then(|pat| {
let pat_already_present = pat.syntax().to_string().contains(&variant_name);
pat_already_present.then(|| pat_already_present)
pat_already_present.then_some(pat_already_present)
}).is_some()
});
(!variant_already_present).then_some(variant.clone())
(!variant_already_present).then_some(*variant)
}).collect::<Vec<Variant>>())
})
});
if let Some(missing_variants_) = missing_variants_opt {

View file

@ -553,7 +553,7 @@ impl Builder {
self.detail = detail.map(Into::into);
if let Some(detail) = &self.detail {
if never!(detail.contains('\n'), "multiline detail:\n{}", detail) {
self.detail = Some(detail.splitn(2, '\n').next().unwrap().to_string());
self.detail = Some(detail.split('\n').next().unwrap().to_string());
}
}
self

View file

@ -304,9 +304,7 @@ fn params(
func_kind: &FuncKind<'_>,
has_dot_receiver: bool,
) -> Option<(Option<hir::SelfParam>, Vec<hir::Param>)> {
if ctx.config.callable.is_none() {
return None;
}
ctx.config.callable.as_ref()?;
// Don't add parentheses if the expected type is a function reference with the same signature.
if let Some(expected) = ctx.expected_type.as_ref().filter(|e| e.is_fn()) {

View file

@ -66,19 +66,15 @@ pub fn callable_for_node(
}
ast::CallableExpr::MethodCall(call) => sema.resolve_method_call_as_callable(call),
}?;
let active_param = if let Some(arg_list) = calling_node.arg_list() {
Some(
arg_list
.syntax()
.children_with_tokens()
.filter_map(NodeOrToken::into_token)
.filter(|t| t.kind() == T![,])
.take_while(|t| t.text_range().start() <= token.text_range().start())
.count(),
)
} else {
None
};
let active_param = calling_node.arg_list().map(|arg_list| {
arg_list
.syntax()
.children_with_tokens()
.filter_map(NodeOrToken::into_token)
.filter(|t| t.kind() == T![,])
.take_while(|t| t.text_range().start() <= token.text_range().start())
.count()
});
Some((callable, active_param))
}

View file

@ -357,7 +357,7 @@ fn path_applicable_imports(
path_candidate.name.clone(),
AssocSearchMode::Include,
)
.filter_map(|item| import_for_item(sema.db, mod_path, &qualifier, item, scope_filter))
.filter_map(|item| import_for_item(sema.db, mod_path, qualifier, item, scope_filter))
.take(DEFAULT_QUERY_SEARCH_LIMIT.inner())
.collect(),
}

View file

@ -104,7 +104,7 @@ fn recursive_merge(lhs: &ast::UseTree, rhs: &ast::UseTree, merge: MergeBehavior)
.collect::<Option<_>>()?;
// Sorts the use trees similar to rustfmt's algorithm for ordering imports
// (see `use_tree_cmp` doc).
use_trees.sort_unstable_by(|a, b| use_tree_cmp(a, b));
use_trees.sort_unstable_by(use_tree_cmp);
for rhs_t in rhs.use_tree_list().into_iter().flat_map(|list| list.use_trees()) {
if !merge.is_tree_allowed(&rhs_t) {
return None;
@ -357,7 +357,7 @@ fn path_segment_cmp(a: &ast::PathSegment, b: &ast::PathSegment) -> Ordering {
if is_upper_snake_case(a_text) && !is_upper_snake_case(b_text) {
return Ordering::Greater;
}
a_text.cmp(&b_text)
a_text.cmp(b_text)
}
}
}

View file

@ -99,21 +99,21 @@ impl fmt::Debug for RootDatabase {
impl Upcast<dyn ExpandDatabase> for RootDatabase {
#[inline]
fn upcast(&self) -> &(dyn ExpandDatabase + 'static) {
&*self
self
}
}
impl Upcast<dyn DefDatabase> for RootDatabase {
#[inline]
fn upcast(&self) -> &(dyn DefDatabase + 'static) {
&*self
self
}
}
impl Upcast<dyn HirDatabase> for RootDatabase {
#[inline]
fn upcast(&self) -> &(dyn HirDatabase + 'static) {
&*self
self
}
}

View file

@ -381,7 +381,7 @@ impl Ctx<'_> {
true,
)
.ok()?;
let ast_ty = make::ty(&ty_str).clone_for_update();
let ast_ty = make::ty(ty_str).clone_for_update();
if let Some(adt) = ty.as_adt() {
if let ast::Type::PathType(path_ty) = &ast_ty {

View file

@ -104,7 +104,7 @@ impl Definition {
/// renamed and extern crate names will report its range, though a rename will introduce
/// an alias instead.
pub fn range_for_rename(self, sema: &Semantics<'_, RootDatabase>) -> Option<FileRange> {
let syn_ctx_is_root = |(range, ctx): (_, SyntaxContextId)| ctx.is_root().then(|| range);
let syn_ctx_is_root = |(range, ctx): (_, SyntaxContextId)| ctx.is_root().then_some(range);
let res = match self {
Definition::Macro(mac) => {
let src = mac.source(sema.db)?;

View file

@ -536,14 +536,12 @@ impl<'a> FindUsages<'a> {
// Search for occurrences of the items name
for offset in match_indices(&text, finder, search_range) {
tree.token_at_offset(offset).into_iter().for_each(|token| {
tree.token_at_offset(offset).for_each(|token| {
let Some(str_token) = ast::String::cast(token.clone()) else { return };
if let Some((range, nameres)) =
sema.check_for_format_args_template(token, offset)
{
if self.found_format_args_ref(file_id, range, str_token, nameres, sink) {
return;
}
if self.found_format_args_ref(file_id, range, str_token, nameres, sink) {}
}
});

View file

@ -329,7 +329,7 @@ impl Query {
for index in indices.iter() {
op = op.add(index.map.search(&automaton));
}
self.search_maps(&indices, op.union(), cb)
self.search_maps(indices, op.union(), cb)
}
SearchMode::Fuzzy => {
let automaton = fst::automaton::Subsequence::new(&self.lowercased);
@ -337,7 +337,7 @@ impl Query {
for index in indices.iter() {
op = op.add(index.map.search(&automaton));
}
self.search_maps(&indices, op.union(), cb)
self.search_maps(indices, op.union(), cb)
}
SearchMode::Prefix => {
let automaton = fst::automaton::Str::new(&self.lowercased).starts_with();
@ -345,7 +345,7 @@ impl Query {
for index in indices.iter() {
op = op.add(index.map.search(&automaton));
}
self.search_maps(&indices, op.union(), cb)
self.search_maps(indices, op.union(), cb)
}
}
}

View file

@ -17,7 +17,7 @@ pub(crate) fn break_outside_of_loop(
ctx,
DiagnosticCode::RustcHardError("E0268"),
message,
d.expr.clone().map(|it| it.into()),
d.expr.map(|it| it.into()),
)
}

View file

@ -13,7 +13,7 @@ pub(crate) fn expected_function(
ctx,
DiagnosticCode::RustcHardError("E0618"),
format!("expected function, found {}", d.found.display(ctx.sema.db)),
d.call.clone().map(|it| it.into()),
d.call.map(|it| it.into()),
)
.experimental()
}

View file

@ -31,7 +31,7 @@ pub(crate) fn inactive_code(
let res = Diagnostic::new(
DiagnosticCode::Ra("inactive-code", Severity::WeakWarning),
message,
ctx.sema.diagnostics_display_range(d.node.clone()),
ctx.sema.diagnostics_display_range(d.node),
)
.with_unused(true);
Some(res)

View file

@ -9,8 +9,8 @@ pub(crate) fn incoherent_impl(ctx: &DiagnosticsContext<'_>, d: &hir::IncoherentI
Diagnostic::new_with_syntax_node_ptr(
ctx,
DiagnosticCode::RustcHardError("E0210"),
format!("cannot define inherent `impl` for foreign type"),
InFile::new(d.file_id, d.impl_.clone().into()),
"cannot define inherent `impl` for foreign type".to_string(),
InFile::new(d.file_id, d.impl_.into()),
)
}

Some files were not shown because too many files have changed in this diff Show more