Auto merge of #16404 - Urhengulas:satisfy-clippy, r=Veykril

Work through temporarily allowed clippy lints, part 1

This is the first batch of not allowing but actually fixing the clippy lints. Each commit removes one lint from the lint table and then fixes the resulting warnings.

Follow-up to #16401
This commit is contained in:
bors 2024-01-19 20:47:05 +00:00
commit 67cfbf231c
75 changed files with 375 additions and 525 deletions

View file

@ -159,59 +159,25 @@ suspicious = { level = "warn", priority = -1 }
result_unit_err = "allow" result_unit_err = "allow"
# We don't expose public APIs that matter like this # We don't expose public APIs that matter like this
len_without_is_empty = "allow" len_without_is_empty = "allow"
# We currently prefer explicit control flow return over `...?;` statements whose result is unused
question_mark = "allow"
# We have macros that rely on this currently # We have macros that rely on this currently
enum_variant_names = "allow" enum_variant_names = "allow"
# Builder pattern disagrees # Builder pattern disagrees
new_ret_no_self = "allow" new_ret_no_self = "allow"
## Following lints should be tackled at some point ## Following lints should be tackled at some point
bind_instead_of_map = "allow"
borrowed_box = "allow" borrowed_box = "allow"
borrow_deref_ref = "allow" borrow_deref_ref = "allow"
collapsible_if = "allow"
collapsible_match = "allow"
clone_on_copy = "allow"
derivable_impls = "allow" derivable_impls = "allow"
derived_hash_with_manual_eq = "allow" derived_hash_with_manual_eq = "allow"
double_parens = "allow"
explicit_auto_deref = "allow"
field_reassign_with_default = "allow" field_reassign_with_default = "allow"
forget_non_drop = "allow" forget_non_drop = "allow"
format_collect = "allow" format_collect = "allow"
for_kv_map = "allow"
filter_map_bool_then = "allow"
from_str_radix_10 = "allow"
get_first = "allow"
if_same_then_else = "allow"
large_enum_variant = "allow" large_enum_variant = "allow"
let_and_return = "allow"
manual_find = "allow"
manual_map = "allow"
map_clone = "allow"
match_like_matches_macro = "allow"
match_single_binding = "allow"
needless_borrow = "allow"
needless_doctest_main = "allow" needless_doctest_main = "allow"
needless_lifetimes = "allow"
needless_pass_by_value = "allow"
needless_return = "allow"
new_without_default = "allow" new_without_default = "allow"
nonminimal_bool = "allow"
non_canonical_clone_impl = "allow" non_canonical_clone_impl = "allow"
non_canonical_partial_ord_impl = "allow" non_canonical_partial_ord_impl = "allow"
non_minimal_cfg = "allow"
only_used_in_recursion = "allow"
op_ref = "allow"
option_map_unit_fn = "allow"
partialeq_to_none = "allow"
ptr_arg = "allow"
redundant_closure = "allow"
redundant_pattern_matching = "allow"
search_is_some = "allow"
self_named_constructors = "allow" self_named_constructors = "allow"
single_match = "allow"
skip_while_next = "allow" skip_while_next = "allow"
too_many_arguments = "allow" too_many_arguments = "allow"
toplevel_ref_arg = "allow" toplevel_ref_arg = "allow"

View file

@ -331,7 +331,7 @@ impl CrateData {
return false; return false;
} }
if let Some(_) = opts.next() { if opts.next().is_some() {
return false; return false;
} }
} }

View file

@ -258,12 +258,12 @@ impl Body {
} }
} }
Pat::Or(args) | Pat::Tuple { args, .. } | Pat::TupleStruct { args, .. } => { Pat::Or(args) | Pat::Tuple { args, .. } | Pat::TupleStruct { args, .. } => {
args.iter().copied().for_each(|p| f(p)); args.iter().copied().for_each(f);
} }
Pat::Ref { pat, .. } => f(*pat), Pat::Ref { pat, .. } => f(*pat),
Pat::Slice { prefix, slice, suffix } => { Pat::Slice { prefix, slice, suffix } => {
let total_iter = prefix.iter().chain(slice.iter()).chain(suffix.iter()); let total_iter = prefix.iter().chain(slice.iter()).chain(suffix.iter());
total_iter.copied().for_each(|p| f(p)); total_iter.copied().for_each(f);
} }
Pat::Record { args, .. } => { Pat::Record { args, .. } => {
args.iter().for_each(|RecordFieldPat { pat, .. }| f(*pat)); args.iter().for_each(|RecordFieldPat { pat, .. }| f(*pat));
@ -369,7 +369,7 @@ impl BodySourceMap {
} }
pub fn label_syntax(&self, label: LabelId) -> LabelSource { pub fn label_syntax(&self, label: LabelId) -> LabelSource {
self.label_map_back[label].clone() self.label_map_back[label]
} }
pub fn node_label(&self, node: InFile<&ast::Label>) -> Option<LabelId> { pub fn node_label(&self, node: InFile<&ast::Label>) -> Option<LabelId> {
@ -378,11 +378,11 @@ impl BodySourceMap {
} }
pub fn field_syntax(&self, expr: ExprId) -> FieldSource { pub fn field_syntax(&self, expr: ExprId) -> FieldSource {
self.field_map_back[&expr].clone() self.field_map_back[&expr]
} }
pub fn pat_field_syntax(&self, pat: PatId) -> PatFieldSource { pub fn pat_field_syntax(&self, pat: PatId) -> PatFieldSource {
self.pat_field_map_back[&pat].clone() self.pat_field_map_back[&pat]
} }
pub fn macro_expansion_expr(&self, node: InFile<&ast::MacroExpr>) -> Option<ExprId> { pub fn macro_expansion_expr(&self, node: InFile<&ast::MacroExpr>) -> Option<ExprId> {

View file

@ -776,11 +776,10 @@ impl ExprCollector<'_> {
None => self.collect_expr_opt(e.condition()), None => self.collect_expr_opt(e.condition()),
}; };
let break_expr = let break_expr = self.alloc_expr(Expr::Break { expr: None, label: None }, syntax_ptr);
self.alloc_expr(Expr::Break { expr: None, label: None }, syntax_ptr.clone());
let if_expr = self.alloc_expr( let if_expr = self.alloc_expr(
Expr::If { condition, then_branch: body, else_branch: Some(break_expr) }, Expr::If { condition, then_branch: body, else_branch: Some(break_expr) },
syntax_ptr.clone(), syntax_ptr,
); );
self.alloc_expr(Expr::Loop { body: if_expr, label }, syntax_ptr) self.alloc_expr(Expr::Loop { body: if_expr, label }, syntax_ptr)
} }
@ -811,19 +810,19 @@ impl ExprCollector<'_> {
return self.alloc_expr(Expr::Missing, syntax_ptr); return self.alloc_expr(Expr::Missing, syntax_ptr);
}; };
let head = self.collect_expr_opt(e.iterable()); let head = self.collect_expr_opt(e.iterable());
let into_iter_fn_expr = self.alloc_expr(Expr::Path(into_iter_fn), syntax_ptr.clone()); let into_iter_fn_expr = self.alloc_expr(Expr::Path(into_iter_fn), syntax_ptr);
let iterator = self.alloc_expr( let iterator = self.alloc_expr(
Expr::Call { Expr::Call {
callee: into_iter_fn_expr, callee: into_iter_fn_expr,
args: Box::new([head]), args: Box::new([head]),
is_assignee_expr: false, is_assignee_expr: false,
}, },
syntax_ptr.clone(), syntax_ptr,
); );
let none_arm = MatchArm { let none_arm = MatchArm {
pat: self.alloc_pat_desugared(Pat::Path(Box::new(option_none))), pat: self.alloc_pat_desugared(Pat::Path(Box::new(option_none))),
guard: None, guard: None,
expr: self.alloc_expr(Expr::Break { expr: None, label: None }, syntax_ptr.clone()), expr: self.alloc_expr(Expr::Break { expr: None, label: None }, syntax_ptr),
}; };
let some_pat = Pat::TupleStruct { let some_pat = Pat::TupleStruct {
path: Some(Box::new(option_some)), path: Some(Box::new(option_some)),
@ -839,27 +838,25 @@ impl ExprCollector<'_> {
}), }),
}; };
let iter_name = Name::generate_new_name(); let iter_name = Name::generate_new_name();
let iter_expr = let iter_expr = self.alloc_expr(Expr::Path(Path::from(iter_name.clone())), syntax_ptr);
self.alloc_expr(Expr::Path(Path::from(iter_name.clone())), syntax_ptr.clone());
let iter_expr_mut = self.alloc_expr( let iter_expr_mut = self.alloc_expr(
Expr::Ref { expr: iter_expr, rawness: Rawness::Ref, mutability: Mutability::Mut }, Expr::Ref { expr: iter_expr, rawness: Rawness::Ref, mutability: Mutability::Mut },
syntax_ptr.clone(), syntax_ptr,
); );
let iter_next_fn_expr = self.alloc_expr(Expr::Path(iter_next_fn), syntax_ptr.clone()); let iter_next_fn_expr = self.alloc_expr(Expr::Path(iter_next_fn), syntax_ptr);
let iter_next_expr = self.alloc_expr( let iter_next_expr = self.alloc_expr(
Expr::Call { Expr::Call {
callee: iter_next_fn_expr, callee: iter_next_fn_expr,
args: Box::new([iter_expr_mut]), args: Box::new([iter_expr_mut]),
is_assignee_expr: false, is_assignee_expr: false,
}, },
syntax_ptr.clone(), syntax_ptr,
); );
let loop_inner = self.alloc_expr( let loop_inner = self.alloc_expr(
Expr::Match { expr: iter_next_expr, arms: Box::new([none_arm, some_arm]) }, Expr::Match { expr: iter_next_expr, arms: Box::new([none_arm, some_arm]) },
syntax_ptr.clone(), syntax_ptr,
); );
let loop_outer = let loop_outer = self.alloc_expr(Expr::Loop { body: loop_inner, label }, syntax_ptr);
self.alloc_expr(Expr::Loop { body: loop_inner, label }, syntax_ptr.clone());
let iter_binding = self.alloc_binding(iter_name, BindingAnnotation::Mutable); let iter_binding = self.alloc_binding(iter_name, BindingAnnotation::Mutable);
let iter_pat = self.alloc_pat_desugared(Pat::Bind { id: iter_binding, subpat: None }); let iter_pat = self.alloc_pat_desugared(Pat::Bind { id: iter_binding, subpat: None });
self.add_definition_to_binding(iter_binding, iter_pat); self.add_definition_to_binding(iter_binding, iter_pat);
@ -868,7 +865,7 @@ impl ExprCollector<'_> {
expr: iterator, expr: iterator,
arms: Box::new([MatchArm { pat: iter_pat, guard: None, expr: loop_outer }]), arms: Box::new([MatchArm { pat: iter_pat, guard: None, expr: loop_outer }]),
}, },
syntax_ptr.clone(), syntax_ptr,
) )
} }
@ -896,10 +893,10 @@ impl ExprCollector<'_> {
return self.alloc_expr(Expr::Missing, syntax_ptr); return self.alloc_expr(Expr::Missing, syntax_ptr);
}; };
let operand = self.collect_expr_opt(e.expr()); let operand = self.collect_expr_opt(e.expr());
let try_branch = self.alloc_expr(Expr::Path(try_branch), syntax_ptr.clone()); let try_branch = self.alloc_expr(Expr::Path(try_branch), syntax_ptr);
let expr = self.alloc_expr( let expr = self.alloc_expr(
Expr::Call { callee: try_branch, args: Box::new([operand]), is_assignee_expr: false }, Expr::Call { callee: try_branch, args: Box::new([operand]), is_assignee_expr: false },
syntax_ptr.clone(), syntax_ptr,
); );
let continue_name = Name::generate_new_name(); let continue_name = Name::generate_new_name();
let continue_binding = let continue_binding =
@ -914,7 +911,7 @@ impl ExprCollector<'_> {
ellipsis: None, ellipsis: None,
}), }),
guard: None, guard: None,
expr: self.alloc_expr(Expr::Path(Path::from(continue_name)), syntax_ptr.clone()), expr: self.alloc_expr(Expr::Path(Path::from(continue_name)), syntax_ptr),
}; };
let break_name = Name::generate_new_name(); let break_name = Name::generate_new_name();
let break_binding = self.alloc_binding(break_name.clone(), BindingAnnotation::Unannotated); let break_binding = self.alloc_binding(break_name.clone(), BindingAnnotation::Unannotated);
@ -928,18 +925,18 @@ impl ExprCollector<'_> {
}), }),
guard: None, guard: None,
expr: { expr: {
let it = self.alloc_expr(Expr::Path(Path::from(break_name)), syntax_ptr.clone()); let it = self.alloc_expr(Expr::Path(Path::from(break_name)), syntax_ptr);
let callee = self.alloc_expr(Expr::Path(try_from_residual), syntax_ptr.clone()); let callee = self.alloc_expr(Expr::Path(try_from_residual), syntax_ptr);
let result = self.alloc_expr( let result = self.alloc_expr(
Expr::Call { callee, args: Box::new([it]), is_assignee_expr: false }, Expr::Call { callee, args: Box::new([it]), is_assignee_expr: false },
syntax_ptr.clone(), syntax_ptr,
); );
self.alloc_expr( self.alloc_expr(
match self.current_try_block_label { match self.current_try_block_label {
Some(label) => Expr::Break { expr: Some(result), label: Some(label) }, Some(label) => Expr::Break { expr: Some(result), label: Some(label) },
None => Expr::Return { expr: Some(result) }, None => Expr::Return { expr: Some(result) },
}, },
syntax_ptr.clone(), syntax_ptr,
) )
}, },
}; };
@ -1847,8 +1844,8 @@ impl ExprCollector<'_> {
flags as u128, flags as u128,
Some(BuiltinUint::U32), Some(BuiltinUint::U32),
))); )));
let precision = self.make_count(&precision, argmap); let precision = self.make_count(precision, argmap);
let width = self.make_count(&width, argmap); let width = self.make_count(width, argmap);
let format_placeholder_new = { let format_placeholder_new = {
let format_placeholder_new = let format_placeholder_new =
@ -1994,7 +1991,7 @@ impl ExprCollector<'_> {
fn alloc_expr(&mut self, expr: Expr, ptr: ExprPtr) -> ExprId { fn alloc_expr(&mut self, expr: Expr, ptr: ExprPtr) -> ExprId {
let src = self.expander.in_file(ptr); let src = self.expander.in_file(ptr);
let id = self.body.exprs.alloc(expr); let id = self.body.exprs.alloc(expr);
self.source_map.expr_map_back.insert(id, src.clone()); self.source_map.expr_map_back.insert(id, src);
self.source_map.expr_map.insert(src, id); self.source_map.expr_map.insert(src, id);
id id
} }
@ -2022,7 +2019,7 @@ impl ExprCollector<'_> {
fn alloc_pat(&mut self, pat: Pat, ptr: PatPtr) -> PatId { fn alloc_pat(&mut self, pat: Pat, ptr: PatPtr) -> PatId {
let src = self.expander.in_file(ptr); let src = self.expander.in_file(ptr);
let id = self.body.pats.alloc(pat); let id = self.body.pats.alloc(pat);
self.source_map.pat_map_back.insert(id, src.clone()); self.source_map.pat_map_back.insert(id, src);
self.source_map.pat_map.insert(src, id); self.source_map.pat_map.insert(src, id);
id id
} }
@ -2037,7 +2034,7 @@ impl ExprCollector<'_> {
fn alloc_label(&mut self, label: Label, ptr: LabelPtr) -> LabelId { fn alloc_label(&mut self, label: Label, ptr: LabelPtr) -> LabelId {
let src = self.expander.in_file(ptr); let src = self.expander.in_file(ptr);
let id = self.body.labels.alloc(label); let id = self.body.labels.alloc(label);
self.source_map.label_map_back.insert(id, src.clone()); self.source_map.label_map_back.insert(id, src);
self.source_map.label_map.insert(src, id); self.source_map.label_map.insert(src, id);
id id
} }

View file

@ -259,10 +259,8 @@ fn crate_supports_no_std(db: &dyn DefDatabase, crate_id: CrateId) -> bool {
None => continue, None => continue,
}; };
let segments = tt.split(|tt| match tt { let segments =
tt::TokenTree::Leaf(tt::Leaf::Punct(p)) if p.char == ',' => true, tt.split(|tt| matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Punct(p)) if p.char == ','));
_ => false,
});
for output in segments.skip(1) { for output in segments.skip(1) {
match output { match output {
[tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] if ident.text == "no_std" => { [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] if ident.text == "no_std" => {

View file

@ -230,7 +230,7 @@ fn find_path_for_module(
} }
if let value @ Some(_) = if let value @ Some(_) =
find_in_prelude(ctx.db, &root_def_map, &def_map, ItemInNs::Types(module_id.into()), from) find_in_prelude(ctx.db, &root_def_map, def_map, ItemInNs::Types(module_id.into()), from)
{ {
return value.zip(Some(Stable)); return value.zip(Some(Stable));
} }

View file

@ -400,9 +400,8 @@ impl GenericParams {
params params
.type_or_consts .type_or_consts
.iter() .iter()
.filter_map(|(idx, param)| { .filter(|(idx, _)| enabled((*idx).into()))
enabled(idx.into()).then(|| param.clone()) .map(|(_, param)| param.clone())
})
.collect() .collect()
}), }),
lifetimes: all_lifetimes_enabled lifetimes: all_lifetimes_enabled
@ -411,9 +410,8 @@ impl GenericParams {
params params
.lifetimes .lifetimes
.iter() .iter()
.filter_map(|(idx, param)| { .filter(|(idx, _)| enabled((*idx).into()))
enabled(idx.into()).then(|| param.clone()) .map(|(_, param)| param.clone())
})
.collect() .collect()
}), }),
where_predicates: params.where_predicates.clone(), where_predicates: params.where_predicates.clone(),

View file

@ -410,11 +410,7 @@ impl ConstRef {
lower_ctx: &LowerCtx<'_>, lower_ctx: &LowerCtx<'_>,
param: &ast::ConstParam, param: &ast::ConstParam,
) -> Option<Self> { ) -> Option<Self> {
let default = param.default_val(); param.default_val().map(|default| Self::from_const_arg(lower_ctx, Some(default)))
match default {
Some(_) => Some(Self::from_const_arg(lower_ctx, default)),
None => None,
}
} }
pub fn display<'a>(&'a self, db: &'a dyn ExpandDatabase) -> impl fmt::Display + 'a { pub fn display<'a>(&'a self, db: &'a dyn ExpandDatabase) -> impl fmt::Display + 'a {

View file

@ -294,14 +294,14 @@ impl SearchMode {
pub fn check(self, query: &str, case_sensitive: bool, candidate: &str) -> bool { pub fn check(self, query: &str, case_sensitive: bool, candidate: &str) -> bool {
match self { match self {
SearchMode::Exact if case_sensitive => candidate == query, SearchMode::Exact if case_sensitive => candidate == query,
SearchMode::Exact => candidate.eq_ignore_ascii_case(&query), SearchMode::Exact => candidate.eq_ignore_ascii_case(query),
SearchMode::Prefix => { SearchMode::Prefix => {
query.len() <= candidate.len() && { query.len() <= candidate.len() && {
let prefix = &candidate[..query.len() as usize]; let prefix = &candidate[..query.len() as usize];
if case_sensitive { if case_sensitive {
prefix == query prefix == query
} else { } else {
prefix.eq_ignore_ascii_case(&query) prefix.eq_ignore_ascii_case(query)
} }
} }
} }
@ -382,11 +382,11 @@ impl Query {
} }
fn matches_assoc_mode(&self, is_trait_assoc_item: IsTraitAssocItem) -> bool { fn matches_assoc_mode(&self, is_trait_assoc_item: IsTraitAssocItem) -> bool {
match (is_trait_assoc_item, self.assoc_mode) { !matches!(
(is_trait_assoc_item, self.assoc_mode),
(IsTraitAssocItem::Yes, AssocSearchMode::Exclude) (IsTraitAssocItem::Yes, AssocSearchMode::Exclude)
| (IsTraitAssocItem::No, AssocSearchMode::AssocItemsOnly) => false, | (IsTraitAssocItem::No, AssocSearchMode::AssocItemsOnly)
_ => true, )
}
} }
} }

View file

@ -192,7 +192,7 @@ impl LangItems {
pub(crate) fn lang_attr(db: &dyn DefDatabase, item: AttrDefId) -> Option<LangItem> { pub(crate) fn lang_attr(db: &dyn DefDatabase, item: AttrDefId) -> Option<LangItem> {
let attrs = db.attrs(item); let attrs = db.attrs(item);
attrs.by_key("lang").string_value().and_then(|it| LangItem::from_str(&it)) attrs.by_key("lang").string_value().and_then(|it| LangItem::from_str(it))
} }
pub(crate) fn notable_traits_in_deps( pub(crate) fn notable_traits_in_deps(

View file

@ -733,9 +733,7 @@ pub struct InTypeConstLoc {
impl PartialEq for InTypeConstLoc { impl PartialEq for InTypeConstLoc {
fn eq(&self, other: &Self) -> bool { fn eq(&self, other: &Self) -> bool {
self.id == other.id self.id == other.id && self.owner == other.owner && *self.expected_ty == *other.expected_ty
&& self.owner == other.owner
&& &*self.expected_ty == &*other.expected_ty
} }
} }

View file

@ -1406,7 +1406,7 @@ impl DefCollector<'_> {
} }
if let errors @ [_, ..] = &*value { if let errors @ [_, ..] = &*value {
let loc: MacroCallLoc = self.db.lookup_intern_macro_call(macro_call_id); let loc: MacroCallLoc = self.db.lookup_intern_macro_call(macro_call_id);
let diag = DefDiagnostic::macro_expansion_parse_error(module_id, loc.kind, &errors); let diag = DefDiagnostic::macro_expansion_parse_error(module_id, loc.kind, errors);
self.def_map.diagnostics.push(diag); self.def_map.diagnostics.push(diag);
} }
@ -2287,7 +2287,7 @@ impl ModCollector<'_, '_> {
&MacroCall { ref path, ast_id, expand_to, call_site }: &MacroCall, &MacroCall { ref path, ast_id, expand_to, call_site }: &MacroCall,
container: ItemContainerId, container: ItemContainerId,
) { ) {
let ast_id = AstIdWithPath::new(self.file_id(), ast_id, ModPath::clone(&path)); let ast_id = AstIdWithPath::new(self.file_id(), ast_id, ModPath::clone(path));
let db = self.def_collector.db; let db = self.def_collector.db;
// FIXME: Immediately expanding in "Case 1" is insufficient since "Case 2" may also define // FIXME: Immediately expanding in "Case 1" is insufficient since "Case 2" may also define
@ -2371,9 +2371,9 @@ impl ModCollector<'_, '_> {
}; };
for (name, macs) in source.scope.legacy_macros() { for (name, macs) in source.scope.legacy_macros() {
macs.last().map(|&mac| { if let Some(&mac) = macs.last() {
target.scope.define_legacy_macro(name.clone(), mac); target.scope.define_legacy_macro(name.clone(), mac);
}); }
} }
} }

View file

@ -1348,8 +1348,8 @@ fn proc_attr(a: TokenStream, b: TokenStream) -> TokenStream { a }
let actual = def_map let actual = def_map
.macro_use_prelude .macro_use_prelude
.iter() .keys()
.map(|(name, _)| name.display(&db).to_string()) .map(|name| name.display(&db).to_string())
.sorted() .sorted()
.join("\n"); .join("\n");

View file

@ -154,7 +154,7 @@ impl Path {
pub fn mod_path(&self) -> Option<&ModPath> { pub fn mod_path(&self) -> Option<&ModPath> {
match self { match self {
Path::Normal { mod_path, .. } => Some(&mod_path), Path::Normal { mod_path, .. } => Some(mod_path),
Path::LangItem(..) => None, Path::LangItem(..) => None,
} }
} }
@ -219,13 +219,13 @@ impl<'a> PathSegments<'a> {
} }
pub fn skip(&self, len: usize) -> PathSegments<'a> { pub fn skip(&self, len: usize) -> PathSegments<'a> {
PathSegments { PathSegments {
segments: &self.segments.get(len..).unwrap_or(&[]), segments: self.segments.get(len..).unwrap_or(&[]),
generic_args: self.generic_args.and_then(|it| it.get(len..)), generic_args: self.generic_args.and_then(|it| it.get(len..)),
} }
} }
pub fn take(&self, len: usize) -> PathSegments<'a> { pub fn take(&self, len: usize) -> PathSegments<'a> {
PathSegments { PathSegments {
segments: &self.segments.get(..len).unwrap_or(&self.segments), segments: self.segments.get(..len).unwrap_or(self.segments),
generic_args: self.generic_args.map(|it| it.get(..len).unwrap_or(it)), generic_args: self.generic_args.map(|it| it.get(..len).unwrap_or(it)),
} }
} }

View file

@ -53,7 +53,7 @@ pub(super) fn lower_path(ctx: &LowerCtx<'_>, mut path: ast::Path) -> Option<Path
) )
}) })
.map(Interned::new); .map(Interned::new);
if let Some(_) = args { if args.is_some() {
generic_args.resize(segments.len(), None); generic_args.resize(segments.len(), None);
generic_args.push(args); generic_args.push(args);
} }

View file

@ -239,8 +239,7 @@ impl Resolver {
db: &dyn DefDatabase, db: &dyn DefDatabase,
visibility: &RawVisibility, visibility: &RawVisibility,
) -> Option<Visibility> { ) -> Option<Visibility> {
let within_impl = let within_impl = self.scopes().any(|scope| matches!(scope, Scope::ImplDefScope(_)));
self.scopes().find(|scope| matches!(scope, Scope::ImplDefScope(_))).is_some();
match visibility { match visibility {
RawVisibility::Module(_, _) => { RawVisibility::Module(_, _) => {
let (item_map, module) = self.item_scope(); let (item_map, module) = self.item_scope();
@ -509,7 +508,7 @@ impl Resolver {
.map(|id| ExternCrateDeclData::extern_crate_decl_data_query(db, id).name.clone()) .map(|id| ExternCrateDeclData::extern_crate_decl_data_query(db, id).name.clone())
} }
pub fn extern_crates_in_scope<'a>(&'a self) -> impl Iterator<Item = (Name, ModuleId)> + 'a { pub fn extern_crates_in_scope(&self) -> impl Iterator<Item = (Name, ModuleId)> + '_ {
self.module_scope self.module_scope
.def_map .def_map
.extern_prelude() .extern_prelude()

View file

@ -114,14 +114,10 @@ impl ExprValidator {
) { ) {
// Check that the number of arguments matches the number of parameters. // Check that the number of arguments matches the number of parameters.
// FIXME: Due to shortcomings in the current type system implementation, only emit this
// diagnostic if there are no type mismatches in the containing function.
if self.infer.expr_type_mismatches().next().is_some() { if self.infer.expr_type_mismatches().next().is_some() {
return; // FIXME: Due to shortcomings in the current type system implementation, only emit
} // this diagnostic if there are no type mismatches in the containing function.
} else if let Expr::MethodCall { receiver, .. } = expr {
match expr {
Expr::MethodCall { receiver, .. } => {
let (callee, _) = match self.infer.method_resolution(call_id) { let (callee, _) = match self.infer.method_resolution(call_id) {
Some(it) => it, Some(it) => it,
None => return, None => return,
@ -134,15 +130,11 @@ impl ExprValidator {
.check(call_id, receiver, &callee) .check(call_id, receiver, &callee)
.is_some() .is_some()
{ {
self.diagnostics.push( self.diagnostics.push(BodyValidationDiagnostic::ReplaceFilterMapNextWithFindMap {
BodyValidationDiagnostic::ReplaceFilterMapNextWithFindMap {
method_call_expr: call_id, method_call_expr: call_id,
}, });
);
} }
} }
_ => (),
}
} }
fn validate_match( fn validate_match(

View file

@ -1043,7 +1043,7 @@ impl HirDisplay for Ty {
f.start_location_link(t.into()); f.start_location_link(t.into());
} }
write!(f, "Future")?; write!(f, "Future")?;
if let Some(_) = future_trait { if future_trait.is_some() {
f.end_location_link(); f.end_location_link();
} }
write!(f, "<")?; write!(f, "<")?;
@ -1051,7 +1051,7 @@ impl HirDisplay for Ty {
f.start_location_link(t.into()); f.start_location_link(t.into());
} }
write!(f, "Output")?; write!(f, "Output")?;
if let Some(_) = output { if output.is_some() {
f.end_location_link(); f.end_location_link();
} }
write!(f, " = ")?; write!(f, " = ")?;
@ -1520,7 +1520,7 @@ fn write_bounds_like_dyn_trait(
} }
write!(f, "Sized")?; write!(f, "Sized")?;
} }
if let Some(_) = sized_trait { if sized_trait.is_some() {
f.end_location_link(); f.end_location_link();
} }
} }

View file

@ -142,14 +142,11 @@ impl HirPlace {
mut current_capture: CaptureKind, mut current_capture: CaptureKind,
len: usize, len: usize,
) -> CaptureKind { ) -> CaptureKind {
match current_capture { if let CaptureKind::ByRef(BorrowKind::Mut { .. }) = current_capture {
CaptureKind::ByRef(BorrowKind::Mut { .. }) => {
if self.projections[len..].iter().any(|it| *it == ProjectionElem::Deref) { if self.projections[len..].iter().any(|it| *it == ProjectionElem::Deref) {
current_capture = CaptureKind::ByRef(BorrowKind::Unique); current_capture = CaptureKind::ByRef(BorrowKind::Unique);
} }
} }
_ => (),
}
current_capture current_capture
} }
} }
@ -334,14 +331,12 @@ impl InferenceContext<'_> {
match &self.body[tgt_expr] { match &self.body[tgt_expr] {
Expr::Path(p) => { Expr::Path(p) => {
let resolver = resolver_for_expr(self.db.upcast(), self.owner, tgt_expr); let resolver = resolver_for_expr(self.db.upcast(), self.owner, tgt_expr);
if let Some(r) = resolver.resolve_path_in_value_ns(self.db.upcast(), p) { if let Some(ResolveValueResult::ValueNs(ValueNs::LocalBinding(b), _)) =
if let ResolveValueResult::ValueNs(v, _) = r { resolver.resolve_path_in_value_ns(self.db.upcast(), p)
if let ValueNs::LocalBinding(b) = v { {
return Some(HirPlace { local: b, projections: vec![] }); return Some(HirPlace { local: b, projections: vec![] });
} }
} }
}
}
Expr::Field { expr, name: _ } => { Expr::Field { expr, name: _ } => {
let mut place = self.place_of_expr(*expr)?; let mut place = self.place_of_expr(*expr)?;
let field = self.result.field_resolution(tgt_expr)?; let field = self.result.field_resolution(tgt_expr)?;
@ -1010,7 +1005,7 @@ impl InferenceContext<'_> {
let mut deferred_closures = mem::take(&mut self.deferred_closures); let mut deferred_closures = mem::take(&mut self.deferred_closures);
let mut dependents_count: FxHashMap<ClosureId, usize> = let mut dependents_count: FxHashMap<ClosureId, usize> =
deferred_closures.keys().map(|it| (*it, 0)).collect(); deferred_closures.keys().map(|it| (*it, 0)).collect();
for (_, deps) in &self.closure_dependencies { for deps in self.closure_dependencies.values() {
for dep in deps { for dep in deps {
*dependents_count.entry(*dep).or_default() += 1; *dependents_count.entry(*dep).or_default() += 1;
} }

View file

@ -439,7 +439,7 @@ impl InferenceContext<'_> {
ty ty
} }
&Expr::Continue { label } => { &Expr::Continue { label } => {
if let None = find_continuable(&mut self.breakables, label) { if find_continuable(&mut self.breakables, label).is_none() {
self.push_diagnostic(InferenceDiagnostic::BreakOutsideOfLoop { self.push_diagnostic(InferenceDiagnostic::BreakOutsideOfLoop {
expr: tgt_expr, expr: tgt_expr,
is_break: false, is_break: false,
@ -946,7 +946,7 @@ impl InferenceContext<'_> {
derefed_callee: &Ty, derefed_callee: &Ty,
adjustments: &mut Vec<Adjustment>, adjustments: &mut Vec<Adjustment>,
callee_ty: &Ty, callee_ty: &Ty,
params: &Vec<Ty>, params: &[Ty],
tgt_expr: ExprId, tgt_expr: ExprId,
) { ) {
match fn_x { match fn_x {

View file

@ -1,3 +1,6 @@
#![allow(clippy::match_single_binding)]
#![allow(clippy::no_effect)]
use crate::size_and_align_expr; use crate::size_and_align_expr;
#[test] #[test]

View file

@ -414,6 +414,7 @@ impl Hash for FnAbi {
} }
impl FnAbi { impl FnAbi {
#[allow(clippy::should_implement_trait)]
pub fn from_str(s: &str) -> FnAbi { pub fn from_str(s: &str) -> FnAbi {
match s { match s {
"aapcs-unwind" => FnAbi::AapcsUnwind, "aapcs-unwind" => FnAbi::AapcsUnwind,

View file

@ -1335,7 +1335,7 @@ fn named_associated_type_shorthand_candidates<R>(
), ),
_ => None, _ => None,
}); });
if let Some(_) = res { if res.is_some() {
return res; return res;
} }
// Handle `Self::Type` referring to own associated type in trait definitions // Handle `Self::Type` referring to own associated type in trait definitions

View file

@ -339,11 +339,8 @@ fn push_mut_span(local: LocalId, span: MirSpan, result: &mut ArenaMap<LocalId, M
} }
fn record_usage(local: LocalId, result: &mut ArenaMap<LocalId, MutabilityReason>) { fn record_usage(local: LocalId, result: &mut ArenaMap<LocalId, MutabilityReason>) {
match &mut result[local] { if let it @ MutabilityReason::Unused = &mut result[local] {
it @ MutabilityReason::Unused => {
*it = MutabilityReason::Not; *it = MutabilityReason::Not;
}
_ => (),
}; };
} }

View file

@ -272,6 +272,7 @@ const STACK_OFFSET: usize = 1 << 30;
const HEAP_OFFSET: usize = 1 << 29; const HEAP_OFFSET: usize = 1 << 29;
impl Address { impl Address {
#[allow(clippy::double_parens)]
fn from_bytes(it: &[u8]) -> Result<Self> { fn from_bytes(it: &[u8]) -> Result<Self> {
Ok(Address::from_usize(from_bytes!(usize, it))) Ok(Address::from_usize(from_bytes!(usize, it)))
} }
@ -1386,13 +1387,10 @@ impl Evaluator<'_> {
| CastKind::PointerExposeAddress | CastKind::PointerExposeAddress
| CastKind::PointerFromExposedAddress => { | CastKind::PointerFromExposedAddress => {
let current_ty = self.operand_ty(operand, locals)?; let current_ty = self.operand_ty(operand, locals)?;
let is_signed = match current_ty.kind(Interner) { let is_signed = matches!(
TyKind::Scalar(s) => match s { current_ty.kind(Interner),
chalk_ir::Scalar::Int(_) => true, TyKind::Scalar(chalk_ir::Scalar::Int(_))
_ => false, );
},
_ => false,
};
let current = pad16(self.eval_operand(operand, locals)?.get(self)?, is_signed); let current = pad16(self.eval_operand(operand, locals)?.get(self)?, is_signed);
let dest_size = let dest_size =
self.size_of_sized(target_ty, locals, "destination of int to int cast")?; self.size_of_sized(target_ty, locals, "destination of int to int cast")?;
@ -1664,6 +1662,7 @@ impl Evaluator<'_> {
}) })
} }
#[allow(clippy::double_parens)]
fn allocate_const_in_heap(&mut self, locals: &Locals, konst: &Const) -> Result<Interval> { fn allocate_const_in_heap(&mut self, locals: &Locals, konst: &Const) -> Result<Interval> {
let ty = &konst.data(Interner).ty; let ty = &konst.data(Interner).ty;
let chalk_ir::ConstValue::Concrete(c) = &konst.data(Interner).value else { let chalk_ir::ConstValue::Concrete(c) = &konst.data(Interner).value else {
@ -1842,11 +1841,11 @@ impl Evaluator<'_> {
} }
} }
let layout = self.layout(ty); let layout = self.layout(ty);
if self.assert_placeholder_ty_is_unused { if self.assert_placeholder_ty_is_unused
if matches!(layout, Err(MirEvalError::LayoutError(LayoutError::HasPlaceholder, _))) { && matches!(layout, Err(MirEvalError::LayoutError(LayoutError::HasPlaceholder, _)))
{
return Ok(Some((0, 1))); return Ok(Some((0, 1)));
} }
}
let layout = layout?; let layout = layout?;
Ok(layout Ok(layout
.is_sized() .is_sized()
@ -2218,7 +2217,7 @@ impl Evaluator<'_> {
let generic_args = generic_args.clone(); let generic_args = generic_args.clone();
match def { match def {
CallableDefId::FunctionId(def) => { CallableDefId::FunctionId(def) => {
if let Some(_) = self.detect_fn_trait(def) { if self.detect_fn_trait(def).is_some() {
return self.exec_fn_trait( return self.exec_fn_trait(
def, def,
args, args,

View file

@ -288,13 +288,10 @@ impl<'ctx> MirLowerCtx<'ctx> {
current: BasicBlockId, current: BasicBlockId,
) -> Result<Option<(Operand, BasicBlockId)>> { ) -> Result<Option<(Operand, BasicBlockId)>> {
if !self.has_adjustments(expr_id) { if !self.has_adjustments(expr_id) {
match &self.body.exprs[expr_id] { if let Expr::Literal(l) = &self.body.exprs[expr_id] {
Expr::Literal(l) => {
let ty = self.expr_ty_without_adjust(expr_id); let ty = self.expr_ty_without_adjust(expr_id);
return Ok(Some((self.lower_literal_to_operand(ty, l)?, current))); return Ok(Some((self.lower_literal_to_operand(ty, l)?, current)));
} }
_ => (),
}
} }
let Some((p, current)) = self.lower_expr_as_place(current, expr_id, true)? else { let Some((p, current)) = self.lower_expr_as_place(current, expr_id, true)? else {
return Ok(None); return Ok(None);
@ -948,11 +945,12 @@ impl<'ctx> MirLowerCtx<'ctx> {
// for binary operator, and use without adjust to simplify our conditions. // for binary operator, and use without adjust to simplify our conditions.
let lhs_ty = self.expr_ty_without_adjust(*lhs); let lhs_ty = self.expr_ty_without_adjust(*lhs);
let rhs_ty = self.expr_ty_without_adjust(*rhs); let rhs_ty = self.expr_ty_without_adjust(*rhs);
if matches!(op, BinaryOp::CmpOp(syntax::ast::CmpOp::Eq { .. })) { if matches!(op, BinaryOp::CmpOp(syntax::ast::CmpOp::Eq { .. }))
if lhs_ty.as_raw_ptr().is_some() && rhs_ty.as_raw_ptr().is_some() { && lhs_ty.as_raw_ptr().is_some()
&& rhs_ty.as_raw_ptr().is_some()
{
break 'b true; break 'b true;
} }
}
let builtin_inequal_impls = matches!( let builtin_inequal_impls = matches!(
op, op,
BinaryOp::ArithOp(ArithOp::Shl | ArithOp::Shr) BinaryOp::ArithOp(ArithOp::Shl | ArithOp::Shr)

View file

@ -160,8 +160,7 @@ impl MirLowerCtx<'_> {
_ => try_rvalue(self), _ => try_rvalue(self),
} }
} }
Expr::UnaryOp { expr, op } => match op { Expr::UnaryOp { expr, op: hir_def::hir::UnaryOp::Deref } => {
hir_def::hir::UnaryOp::Deref => {
let is_builtin = match self.expr_ty_without_adjust(*expr).kind(Interner) { let is_builtin = match self.expr_ty_without_adjust(*expr).kind(Interner) {
TyKind::Ref(..) | TyKind::Raw(..) => true, TyKind::Ref(..) | TyKind::Raw(..) => true,
TyKind::Adt(id, _) => { TyKind::Adt(id, _) => {
@ -174,8 +173,7 @@ impl MirLowerCtx<'_> {
_ => false, _ => false,
}; };
if !is_builtin { if !is_builtin {
let Some((p, current)) = self.lower_expr_as_place(current, *expr, true)? let Some((p, current)) = self.lower_expr_as_place(current, *expr, true)? else {
else {
return Ok(None); return Ok(None);
}; };
return self.lower_overloaded_deref( return self.lower_overloaded_deref(
@ -202,15 +200,13 @@ impl MirLowerCtx<'_> {
}, },
); );
} }
let Some((mut r, current)) = self.lower_expr_as_place(current, *expr, true)? let Some((mut r, current)) = self.lower_expr_as_place(current, *expr, true)? else {
else {
return Ok(None); return Ok(None);
}; };
r = r.project(ProjectionElem::Deref, &mut self.result.projection_store); r = r.project(ProjectionElem::Deref, &mut self.result.projection_store);
Ok(Some((r, current))) Ok(Some((r, current)))
} }
_ => try_rvalue(self), Expr::UnaryOp { .. } => try_rvalue(self),
},
Expr::Field { expr, .. } => { Expr::Field { expr, .. } => {
let Some((mut r, current)) = self.lower_expr_as_place(current, *expr, true)? else { let Some((mut r, current)) = self.lower_expr_as_place(current, *expr, true)? else {
return Ok(None); return Ok(None);

View file

@ -331,11 +331,9 @@ impl MirLowerCtx<'_> {
break 'b (c, x.1); break 'b (c, x.1);
} }
} }
if let ResolveValueResult::ValueNs(v, _) = pr { if let ResolveValueResult::ValueNs(ValueNs::ConstId(c), _) = pr {
if let ValueNs::ConstId(c) = v {
break 'b (c, Substitution::empty(Interner)); break 'b (c, Substitution::empty(Interner));
} }
}
not_supported!("path in pattern position that is not const or variant") not_supported!("path in pattern position that is not const or variant")
}; };
let tmp: Place = let tmp: Place =

View file

@ -1439,7 +1439,7 @@ impl Adt {
resolver resolver
.generic_params() .generic_params()
.and_then(|gp| { .and_then(|gp| {
(&gp.lifetimes) gp.lifetimes
.iter() .iter()
// there should only be a single lifetime // there should only be a single lifetime
// but `Arena` requires to use an iterator // but `Arena` requires to use an iterator
@ -1594,12 +1594,11 @@ impl DefWithBody {
for diag in source_map.diagnostics() { for diag in source_map.diagnostics() {
match diag { match diag {
BodyDiagnostic::InactiveCode { node, cfg, opts } => acc.push( BodyDiagnostic::InactiveCode { node, cfg, opts } => acc.push(
InactiveCode { node: node.clone(), cfg: cfg.clone(), opts: opts.clone() } InactiveCode { node: *node, cfg: cfg.clone(), opts: opts.clone() }.into(),
.into(),
), ),
BodyDiagnostic::MacroError { node, message } => acc.push( BodyDiagnostic::MacroError { node, message } => acc.push(
MacroError { MacroError {
node: node.clone().map(|it| it.into()), node: (*node).map(|it| it.into()),
precise_location: None, precise_location: None,
message: message.to_string(), message: message.to_string(),
} }
@ -1607,7 +1606,7 @@ impl DefWithBody {
), ),
BodyDiagnostic::UnresolvedProcMacro { node, krate } => acc.push( BodyDiagnostic::UnresolvedProcMacro { node, krate } => acc.push(
UnresolvedProcMacro { UnresolvedProcMacro {
node: node.clone().map(|it| it.into()), node: (*node).map(|it| it.into()),
precise_location: None, precise_location: None,
macro_name: None, macro_name: None,
kind: MacroKind::ProcMacro, kind: MacroKind::ProcMacro,
@ -1617,7 +1616,7 @@ impl DefWithBody {
), ),
BodyDiagnostic::UnresolvedMacroCall { node, path } => acc.push( BodyDiagnostic::UnresolvedMacroCall { node, path } => acc.push(
UnresolvedMacroCall { UnresolvedMacroCall {
macro_call: node.clone().map(|ast_ptr| ast_ptr.into()), macro_call: (*node).map(|ast_ptr| ast_ptr.into()),
precise_location: None, precise_location: None,
path: path.clone(), path: path.clone(),
is_bang: true, is_bang: true,
@ -1625,10 +1624,10 @@ impl DefWithBody {
.into(), .into(),
), ),
BodyDiagnostic::UnreachableLabel { node, name } => { BodyDiagnostic::UnreachableLabel { node, name } => {
acc.push(UnreachableLabel { node: node.clone(), name: name.clone() }.into()) acc.push(UnreachableLabel { node: *node, name: name.clone() }.into())
} }
BodyDiagnostic::UndeclaredLabel { node, name } => { BodyDiagnostic::UndeclaredLabel { node, name } => {
acc.push(UndeclaredLabel { node: node.clone(), name: name.clone() }.into()) acc.push(UndeclaredLabel { node: *node, name: name.clone() }.into())
} }
} }
} }
@ -1715,7 +1714,7 @@ impl DefWithBody {
field_with_same_name: field_with_same_name field_with_same_name: field_with_same_name
.clone() .clone()
.map(|ty| Type::new(db, DefWithBodyId::from(self), ty)), .map(|ty| Type::new(db, DefWithBodyId::from(self), ty)),
assoc_func_with_same_name: assoc_func_with_same_name.clone(), assoc_func_with_same_name: *assoc_func_with_same_name,
} }
.into(), .into(),
) )
@ -1931,8 +1930,7 @@ impl DefWithBody {
}, },
Either::Right(record_pat) => match source_map.pat_syntax(record_pat) { Either::Right(record_pat) => match source_map.pat_syntax(record_pat) {
Ok(source_ptr) => { Ok(source_ptr) => {
if let Some(ptr) = source_ptr.value.clone().cast::<ast::RecordPat>() if let Some(ptr) = source_ptr.value.cast::<ast::RecordPat>() {
{
let root = source_ptr.file_syntax(db.upcast()); let root = source_ptr.file_syntax(db.upcast());
let record_pat = ptr.to_node(&root); let record_pat = ptr.to_node(&root);
if record_pat.record_pat_field_list().is_some() { if record_pat.record_pat_field_list().is_some() {
@ -2083,9 +2081,7 @@ impl Function {
} }
pub fn method_params(self, db: &dyn HirDatabase) -> Option<Vec<Param>> { pub fn method_params(self, db: &dyn HirDatabase) -> Option<Vec<Param>> {
if self.self_param(db).is_none() { self.self_param(db)?;
return None;
}
Some(self.params_without_self(db)) Some(self.params_without_self(db))
} }
@ -2406,10 +2402,10 @@ impl Const {
} }
} }
if let Ok(s) = mir::render_const_using_debug_impl(db, self.id, &c) { if let Ok(s) = mir::render_const_using_debug_impl(db, self.id, &c) {
return Ok(s); Ok(s)
} else {
Ok(format!("{}", c.display(db)))
} }
let r = format!("{}", c.display(db));
return Ok(r);
} }
} }
@ -2497,14 +2493,7 @@ impl Trait {
db.generic_params(GenericDefId::from(self.id)) db.generic_params(GenericDefId::from(self.id))
.type_or_consts .type_or_consts
.iter() .iter()
.filter(|(_, ty)| match ty { .filter(|(_, ty)| !matches!(ty, TypeOrConstParamData::TypeParamData(ty) if ty.provenance != TypeParamProvenance::TypeParamList))
TypeOrConstParamData::TypeParamData(ty)
if ty.provenance != TypeParamProvenance::TypeParamList =>
{
false
}
_ => true,
})
.filter(|(_, ty)| !count_required_only || !ty.has_default()) .filter(|(_, ty)| !count_required_only || !ty.has_default())
.count() .count()
} }
@ -3163,7 +3152,7 @@ impl DeriveHelper {
.and_then(|it| it.get(self.idx as usize)) .and_then(|it| it.get(self.idx as usize))
.cloned(), .cloned(),
} }
.unwrap_or_else(|| Name::missing()) .unwrap_or_else(Name::missing)
} }
} }
@ -3874,10 +3863,7 @@ impl Type {
} }
pub fn is_int_or_uint(&self) -> bool { pub fn is_int_or_uint(&self) -> bool {
match self.ty.kind(Interner) { matches!(self.ty.kind(Interner), TyKind::Scalar(Scalar::Int(_) | Scalar::Uint(_)))
TyKind::Scalar(Scalar::Int(_) | Scalar::Uint(_)) => true,
_ => false,
}
} }
pub fn is_scalar(&self) -> bool { pub fn is_scalar(&self) -> bool {
@ -4285,10 +4271,8 @@ impl Type {
// arg can be either a `Ty` or `constant` // arg can be either a `Ty` or `constant`
if let Some(ty) = arg.ty(Interner) { if let Some(ty) = arg.ty(Interner) {
Some(format_smolstr!("{}", ty.display(db))) Some(format_smolstr!("{}", ty.display(db)))
} else if let Some(const_) = arg.constant(Interner) {
Some(format_smolstr!("{}", const_.display(db)))
} else { } else {
None arg.constant(Interner).map(|const_| format_smolstr!("{}", const_.display(db)))
} }
}) })
} }
@ -4300,7 +4284,7 @@ impl Type {
) -> impl Iterator<Item = SmolStr> + 'a { ) -> impl Iterator<Item = SmolStr> + 'a {
// iterate the lifetime // iterate the lifetime
self.as_adt() self.as_adt()
.and_then(|a| a.lifetime(db).and_then(|lt| Some((&lt.name).to_smol_str()))) .and_then(|a| a.lifetime(db).map(|lt| lt.name.to_smol_str()))
.into_iter() .into_iter()
// add the type and const parameters // add the type and const parameters
.chain(self.type_and_const_arguments(db)) .chain(self.type_and_const_arguments(db))
@ -4437,7 +4421,7 @@ impl Type {
traits_in_scope, traits_in_scope,
with_local_impls.and_then(|b| b.id.containing_block()).into(), with_local_impls.and_then(|b| b.id.containing_block()).into(),
name, name,
&mut |id| callback(id), callback,
); );
} }

View file

@ -659,10 +659,8 @@ impl<'db> SemanticsImpl<'db> {
// First expand into attribute invocations // First expand into attribute invocations
let containing_attribute_macro_call = self.with_ctx(|ctx| { let containing_attribute_macro_call = self.with_ctx(|ctx| {
token.parent_ancestors().filter_map(ast::Item::cast).find_map(|item| { token.parent_ancestors().filter_map(ast::Item::cast).find_map(|item| {
if item.attrs().next().is_none() {
// Don't force populate the dyn cache for items that don't have an attribute anyways // Don't force populate the dyn cache for items that don't have an attribute anyways
return None; item.attrs().next()?;
}
Some(( Some((
ctx.item_to_macro_call(InFile::new(file_id, item.clone()))?, ctx.item_to_macro_call(InFile::new(file_id, item.clone()))?,
item, item,
@ -1008,9 +1006,7 @@ impl<'db> SemanticsImpl<'db> {
// Update `source_ty` for the next adjustment // Update `source_ty` for the next adjustment
let source = mem::replace(&mut source_ty, target.clone()); let source = mem::replace(&mut source_ty, target.clone());
let adjustment = Adjustment { source, target, kind }; Adjustment { source, target, kind }
adjustment
}) })
.collect() .collect()
}) })
@ -1255,7 +1251,7 @@ impl<'db> SemanticsImpl<'db> {
assert!(root_node.parent().is_none()); assert!(root_node.parent().is_none());
let mut cache = self.cache.borrow_mut(); let mut cache = self.cache.borrow_mut();
let prev = cache.insert(root_node, file_id); let prev = cache.insert(root_node, file_id);
assert!(prev == None || prev == Some(file_id)) assert!(prev.is_none() || prev == Some(file_id))
} }
pub fn assert_contains_node(&self, node: &SyntaxNode) { pub fn assert_contains_node(&self, node: &SyntaxNode) {

View file

@ -142,7 +142,7 @@ impl SourceToDefCtx<'_, '_> {
Some(parent_declaration) => self.module_to_def(parent_declaration), Some(parent_declaration) => self.module_to_def(parent_declaration),
None => { None => {
let file_id = src.file_id.original_file(self.db.upcast()); let file_id = src.file_id.original_file(self.db.upcast());
self.file_to_def(file_id).get(0).copied() self.file_to_def(file_id).first().copied()
} }
}?; }?;
@ -155,7 +155,7 @@ impl SourceToDefCtx<'_, '_> {
pub(super) fn source_file_to_def(&self, src: InFile<ast::SourceFile>) -> Option<ModuleId> { pub(super) fn source_file_to_def(&self, src: InFile<ast::SourceFile>) -> Option<ModuleId> {
let _p = profile::span("source_file_to_def"); let _p = profile::span("source_file_to_def");
let file_id = src.file_id.original_file(self.db.upcast()); let file_id = src.file_id.original_file(self.db.upcast());
self.file_to_def(file_id).get(0).copied() self.file_to_def(file_id).first().copied()
} }
pub(super) fn trait_to_def(&mut self, src: InFile<ast::Trait>) -> Option<TraitId> { pub(super) fn trait_to_def(&mut self, src: InFile<ast::Trait>) -> Option<TraitId> {
@ -308,7 +308,7 @@ impl SourceToDefCtx<'_, '_> {
pub(super) fn type_param_to_def(&mut self, src: InFile<ast::TypeParam>) -> Option<TypeParamId> { pub(super) fn type_param_to_def(&mut self, src: InFile<ast::TypeParam>) -> Option<TypeParamId> {
let container: ChildContainer = self.find_generic_param_container(src.syntax())?.into(); let container: ChildContainer = self.find_generic_param_container(src.syntax())?.into();
let dyn_map = self.cache_for(container, src.file_id); let dyn_map = self.cache_for(container, src.file_id);
dyn_map[keys::TYPE_PARAM].get(&src.value).copied().map(|it| TypeParamId::from_unchecked(it)) dyn_map[keys::TYPE_PARAM].get(&src.value).copied().map(TypeParamId::from_unchecked)
} }
pub(super) fn lifetime_param_to_def( pub(super) fn lifetime_param_to_def(
@ -326,10 +326,7 @@ impl SourceToDefCtx<'_, '_> {
) -> Option<ConstParamId> { ) -> Option<ConstParamId> {
let container: ChildContainer = self.find_generic_param_container(src.syntax())?.into(); let container: ChildContainer = self.find_generic_param_container(src.syntax())?.into();
let dyn_map = self.cache_for(container, src.file_id); let dyn_map = self.cache_for(container, src.file_id);
dyn_map[keys::CONST_PARAM] dyn_map[keys::CONST_PARAM].get(&src.value).copied().map(ConstParamId::from_unchecked)
.get(&src.value)
.copied()
.map(|it| ConstParamId::from_unchecked(it))
} }
pub(super) fn generic_param_to_def( pub(super) fn generic_param_to_def(
@ -370,7 +367,7 @@ impl SourceToDefCtx<'_, '_> {
} }
} }
let def = self.file_to_def(src.file_id.original_file(self.db.upcast())).get(0).copied()?; let def = self.file_to_def(src.file_id.original_file(self.db.upcast())).first().copied()?;
Some(def.into()) Some(def.into())
} }

View file

@ -197,10 +197,8 @@ impl SourceAnalyzer {
) -> Option<(Type, Option<Type>)> { ) -> Option<(Type, Option<Type>)> {
let pat_id = self.pat_id(pat)?; let pat_id = self.pat_id(pat)?;
let infer = self.infer.as_ref()?; let infer = self.infer.as_ref()?;
let coerced = infer let coerced =
.pat_adjustments infer.pat_adjustments.get(&pat_id).and_then(|adjusts| adjusts.last().cloned());
.get(&pat_id)
.and_then(|adjusts| adjusts.last().map(|adjust| adjust.clone()));
let ty = infer[pat_id].clone(); let ty = infer[pat_id].clone();
let mk_ty = |ty| Type::new_with_resolver(db, &self.resolver, ty); let mk_ty = |ty| Type::new_with_resolver(db, &self.resolver, ty);
Some((mk_ty(ty), coerced.map(mk_ty))) Some((mk_ty(ty), coerced.map(mk_ty)))
@ -616,7 +614,7 @@ impl SourceAnalyzer {
} }
None None
})(); })();
if let Some(_) = resolved { if resolved.is_some() {
return resolved; return resolved;
} }
@ -661,7 +659,7 @@ impl SourceAnalyzer {
if let Some(name_ref) = path.as_single_name_ref() { if let Some(name_ref) = path.as_single_name_ref() {
let builtin = let builtin =
BuiltinAttr::by_name(db, self.resolver.krate().into(), &name_ref.text()); BuiltinAttr::by_name(db, self.resolver.krate().into(), &name_ref.text());
if let Some(_) = builtin { if builtin.is_some() {
return builtin.map(PathResolution::BuiltinAttr); return builtin.map(PathResolution::BuiltinAttr);
} }

View file

@ -198,7 +198,7 @@ pub(super) fn find_importable_node(
{ {
ImportAssets::for_method_call(&method_under_caret, &ctx.sema) ImportAssets::for_method_call(&method_under_caret, &ctx.sema)
.zip(Some(method_under_caret.syntax().clone().into())) .zip(Some(method_under_caret.syntax().clone().into()))
} else if let Some(_) = ctx.find_node_at_offset_with_descend::<ast::Param>() { } else if ctx.find_node_at_offset_with_descend::<ast::Param>().is_some() {
None None
} else if let Some(pat) = ctx } else if let Some(pat) = ctx
.find_node_at_offset_with_descend::<ast::IdentPat>() .find_node_at_offset_with_descend::<ast::IdentPat>()

View file

@ -38,7 +38,7 @@ pub(crate) fn convert_match_to_let_else(acc: &mut Assists, ctx: &AssistContext<'
let Some(ast::Expr::MatchExpr(initializer)) = let_stmt.initializer() else { return None }; let Some(ast::Expr::MatchExpr(initializer)) = let_stmt.initializer() else { return None };
let initializer_expr = initializer.expr()?; let initializer_expr = initializer.expr()?;
let Some((extracting_arm, diverging_arm)) = find_arms(ctx, &initializer) else { return None }; let (extracting_arm, diverging_arm) = find_arms(ctx, &initializer)?;
if extracting_arm.guard().is_some() { if extracting_arm.guard().is_some() {
cov_mark::hit!(extracting_arm_has_guard); cov_mark::hit!(extracting_arm_has_guard);
return None; return None;

View file

@ -689,28 +689,23 @@ fn does_source_exists_outside_sel_in_same_mod(
match def { match def {
Definition::Module(x) => { Definition::Module(x) => {
let source = x.definition_source(ctx.db()); let source = x.definition_source(ctx.db());
let have_same_parent; let have_same_parent = if let Some(ast_module) = &curr_parent_module {
if let Some(ast_module) = &curr_parent_module {
if let Some(hir_module) = x.parent(ctx.db()) { if let Some(hir_module) = x.parent(ctx.db()) {
have_same_parent = compare_hir_and_ast_module(ast_module, hir_module, ctx).is_some()
compare_hir_and_ast_module(ast_module, hir_module, ctx).is_some();
} else { } else {
let source_file_id = source.file_id.original_file(ctx.db()); let source_file_id = source.file_id.original_file(ctx.db());
have_same_parent = source_file_id == curr_file_id; source_file_id == curr_file_id
} }
} else { } else {
let source_file_id = source.file_id.original_file(ctx.db()); let source_file_id = source.file_id.original_file(ctx.db());
have_same_parent = source_file_id == curr_file_id; source_file_id == curr_file_id
} };
if have_same_parent { if have_same_parent {
match source.value { if let ModuleSource::Module(module_) = source.value {
ModuleSource::Module(module_) => {
source_exists_outside_sel_in_same_mod = source_exists_outside_sel_in_same_mod =
!selection_range.contains_range(module_.syntax().text_range()); !selection_range.contains_range(module_.syntax().text_range());
} }
_ => {}
}
} }
} }
Definition::Function(x) => { Definition::Function(x) => {

View file

@ -236,7 +236,7 @@ fn generate_impl(
ctx: &AssistContext<'_>, ctx: &AssistContext<'_>,
strukt: &Struct, strukt: &Struct,
field_ty: &ast::Type, field_ty: &ast::Type,
field_name: &String, field_name: &str,
delegee: &Delegee, delegee: &Delegee,
) -> Option<ast::Impl> { ) -> Option<ast::Impl> {
let delegate: ast::Impl; let delegate: ast::Impl;
@ -270,8 +270,7 @@ fn generate_impl(
make::path_from_text(&format!("<{} as {}>", field_ty, delegate.trait_()?)); make::path_from_text(&format!("<{} as {}>", field_ty, delegate.trait_()?));
let delegate_assoc_items = delegate.get_or_create_assoc_item_list(); let delegate_assoc_items = delegate.get_or_create_assoc_item_list();
match bound_def.assoc_item_list() { if let Some(ai) = bound_def.assoc_item_list() {
Some(ai) => {
ai.assoc_items() ai.assoc_items()
.filter(|item| matches!(item, AssocItem::MacroCall(_)).not()) .filter(|item| matches!(item, AssocItem::MacroCall(_)).not())
.for_each(|item| { .for_each(|item| {
@ -281,8 +280,6 @@ fn generate_impl(
delegate_assoc_items.add_item(assoc); delegate_assoc_items.add_item(assoc);
} }
}); });
}
None => {}
}; };
let target_scope = ctx.sema.scope(strukt.strukt.syntax())?; let target_scope = ctx.sema.scope(strukt.strukt.syntax())?;
@ -512,8 +509,7 @@ fn generate_args_for_impl(
// form the substitution list // form the substitution list
let mut arg_substs = FxHashMap::default(); let mut arg_substs = FxHashMap::default();
match field_ty { if let field_ty @ ast::Type::PathType(_) = field_ty {
field_ty @ ast::Type::PathType(_) => {
let field_args = field_ty.generic_arg_list().map(|gal| gal.generic_args()); let field_args = field_ty.generic_arg_list().map(|gal| gal.generic_args());
let self_ty_args = self_ty.generic_arg_list().map(|gal| gal.generic_args()); let self_ty_args = self_ty.generic_arg_list().map(|gal| gal.generic_args());
if let (Some(field_args), Some(self_ty_args)) = (field_args, self_ty_args) { if let (Some(field_args), Some(self_ty_args)) = (field_args, self_ty_args) {
@ -522,8 +518,6 @@ fn generate_args_for_impl(
}) })
} }
} }
_ => {}
}
let args = old_impl_args let args = old_impl_args
.map(|old_arg| { .map(|old_arg| {

View file

@ -377,11 +377,9 @@ fn build_source_change(
}; };
// Insert `$0` only for last getter we generate // Insert `$0` only for last getter we generate
if i == record_fields_count - 1 { if i == record_fields_count - 1 && ctx.config.snippet_cap.is_some() {
if ctx.config.snippet_cap.is_some() {
getter_buf = getter_buf.replacen("fn ", "fn $0", 1); getter_buf = getter_buf.replacen("fn ", "fn $0", 1);
} }
}
// For first element we do not merge with '\n', as // For first element we do not merge with '\n', as
// that can be inserted by impl_def check defined // that can be inserted by impl_def check defined

View file

@ -29,7 +29,7 @@ pub(crate) fn generate_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> Optio
let name = nominal.name()?; let name = nominal.name()?;
let target = nominal.syntax().text_range(); let target = nominal.syntax().text_range();
if let Some(_) = ctx.find_node_at_offset::<ast::RecordFieldList>() { if ctx.find_node_at_offset::<ast::RecordFieldList>().is_some() {
return None; return None;
} }
@ -77,7 +77,7 @@ pub(crate) fn generate_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_>) ->
let name = nominal.name()?; let name = nominal.name()?;
let target = nominal.syntax().text_range(); let target = nominal.syntax().text_range();
if let Some(_) = ctx.find_node_at_offset::<ast::RecordFieldList>() { if ctx.find_node_at_offset::<ast::RecordFieldList>().is_some() {
return None; return None;
} }

View file

@ -181,8 +181,7 @@ fn remove_items_visibility(item: &ast::AssocItem) {
} }
fn strip_body(item: &ast::AssocItem) { fn strip_body(item: &ast::AssocItem) {
match item { if let ast::AssocItem::Fn(f) = item {
ast::AssocItem::Fn(f) => {
if let Some(body) = f.body() { if let Some(body) = f.body() {
// In constrast to function bodies, we want to see no ws before a semicolon. // In constrast to function bodies, we want to see no ws before a semicolon.
// So let's remove them if we see any. // So let's remove them if we see any.
@ -194,8 +193,6 @@ fn strip_body(item: &ast::AssocItem) {
ted::replace(body.syntax(), make::tokens::semicolon()); ted::replace(body.syntax(), make::tokens::semicolon());
} }
}
_ => (),
}; };
} }

View file

@ -425,8 +425,8 @@ fn inline(
if is_self { if is_self {
let mut this_pat = make::ident_pat(false, false, make::name("this")); let mut this_pat = make::ident_pat(false, false, make::name("this"));
let mut expr = expr.clone(); let mut expr = expr.clone();
match pat { if let Pat::IdentPat(pat) = pat {
Pat::IdentPat(pat) => match (pat.ref_token(), pat.mut_token()) { match (pat.ref_token(), pat.mut_token()) {
// self => let this = obj // self => let this = obj
(None, None) => {} (None, None) => {}
// mut self => let mut this = obj // mut self => let mut this = obj
@ -449,8 +449,7 @@ fn inline(
make::expr_ref(expr, true) make::expr_ref(expr, true)
}; };
} }
}, }
_ => {}
}; };
let_stmts let_stmts
.push(make::let_stmt(this_pat.into(), ty, Some(expr)).clone_for_update().into()) .push(make::let_stmt(this_pat.into(), ty, Some(expr)).clone_for_update().into())

View file

@ -100,7 +100,7 @@ fn validate_type_recursively(
} }
(_, Some(ty)) => match ty.as_builtin() { (_, Some(ty)) => match ty.as_builtin() {
// `const A: str` is not correct, but `const A: &builtin` is. // `const A: str` is not correct, but `const A: &builtin` is.
Some(builtin) if refed || (!refed && !builtin.is_str()) => Some(()), Some(builtin) if refed || !builtin.is_str() => Some(()),
_ => None, _ => None,
}, },
_ => None, _ => None,

View file

@ -69,7 +69,7 @@ pub(crate) fn replace_turbofish_with_explicit_type(
return None; return None;
} }
if let None = let_stmt.colon_token() { if let_stmt.colon_token().is_none() {
// If there's no colon in a let statement, then there is no explicit type. // If there's no colon in a let statement, then there is no explicit type.
// let x = fn::<...>(); // let x = fn::<...>();
let ident_range = let_stmt.pat()?.syntax().text_range(); let ident_range = let_stmt.pat()?.syntax().text_range();

View file

@ -37,16 +37,14 @@ pub(crate) fn unnecessary_async(acc: &mut Assists, ctx: &AssistContext<'_>) -> O
return None; return None;
} }
// Do nothing if the function isn't async. // Do nothing if the function isn't async.
if let None = function.async_token() { function.async_token()?;
return None;
}
// Do nothing if the function has an `await` expression in its body. // Do nothing if the function has an `await` expression in its body.
if function.body()?.syntax().descendants().find_map(ast::AwaitExpr::cast).is_some() { if function.body()?.syntax().descendants().find_map(ast::AwaitExpr::cast).is_some() {
return None; return None;
} }
// Do nothing if the method is a member of trait. // Do nothing if the method is a member of trait.
if let Some(impl_) = function.syntax().ancestors().nth(2).and_then(ast::Impl::cast) { if let Some(impl_) = function.syntax().ancestors().nth(2).and_then(ast::Impl::cast) {
if let Some(_) = impl_.trait_() { if impl_.trait_().is_some() {
return None; return None;
} }
} }

View file

@ -185,10 +185,10 @@ fn normalize(name: &str) -> Option<String> {
} }
fn is_valid_name(name: &str) -> bool { fn is_valid_name(name: &str) -> bool {
match ide_db::syntax_helpers::LexedStr::single_token(name) { matches!(
Some((syntax::SyntaxKind::IDENT, _error)) => true, ide_db::syntax_helpers::LexedStr::single_token(name),
_ => false, Some((syntax::SyntaxKind::IDENT, _error))
} )
} }
fn is_useless_method(method: &ast::MethodCallExpr) -> bool { fn is_useless_method(method: &ast::MethodCallExpr) -> bool {

View file

@ -11,23 +11,19 @@ pub(crate) fn complete_field_list_tuple_variant(
path_ctx: &PathCompletionCtx, path_ctx: &PathCompletionCtx,
) { ) {
if ctx.qualifier_ctx.vis_node.is_some() { if ctx.qualifier_ctx.vis_node.is_some() {
return; } else if let PathCompletionCtx {
}
match path_ctx {
PathCompletionCtx {
has_macro_bang: false, has_macro_bang: false,
qualified: Qualified::No, qualified: Qualified::No,
parent: None, parent: None,
has_type_args: false, has_type_args: false,
.. ..
} => { } = path_ctx
{
let mut add_keyword = |kw, snippet| acc.add_keyword_snippet(ctx, kw, snippet); let mut add_keyword = |kw, snippet| acc.add_keyword_snippet(ctx, kw, snippet);
add_keyword("pub(crate)", "pub(crate)"); add_keyword("pub(crate)", "pub(crate)");
add_keyword("pub(super)", "pub(super)"); add_keyword("pub(super)", "pub(super)");
add_keyword("pub", "pub"); add_keyword("pub", "pub");
} }
_ => (),
}
} }
pub(crate) fn complete_field_list_record_variant( pub(crate) fn complete_field_list_record_variant(

View file

@ -369,11 +369,10 @@ fn import_on_the_fly_method(
}; };
key(&a.import_path).cmp(&key(&b.import_path)) key(&a.import_path).cmp(&key(&b.import_path))
}) })
.for_each(|import| match import.original_item { .for_each(|import| {
ItemInNs::Values(hir::ModuleDef::Function(f)) => { if let ItemInNs::Values(hir::ModuleDef::Function(f)) = import.original_item {
acc.add_method_with_import(ctx, dot_access, f, import); acc.add_method_with_import(ctx, dot_access, f, import);
} }
_ => (),
}); });
Some(()) Some(())
} }

View file

@ -80,7 +80,7 @@ fn add_keywords(acc: &mut Completions, ctx: &CompletionContext<'_>, kind: Option
let in_trait_impl = matches!(kind, Some(ItemListKind::TraitImpl(_))); let in_trait_impl = matches!(kind, Some(ItemListKind::TraitImpl(_)));
let in_inherent_impl = matches!(kind, Some(ItemListKind::Impl)); let in_inherent_impl = matches!(kind, Some(ItemListKind::Impl));
let no_qualifiers = ctx.qualifier_ctx.vis_node.is_none(); let no_qualifiers = ctx.qualifier_ctx.vis_node.is_none();
let in_block = matches!(kind, None); let in_block = kind.is_none();
if !in_trait_impl { if !in_trait_impl {
if ctx.qualifier_ctx.unsafe_tok.is_some() { if ctx.qualifier_ctx.unsafe_tok.is_some() {

View file

@ -186,14 +186,13 @@ impl TypeLocation {
} }
pub(crate) fn complete_consts(&self) -> bool { pub(crate) fn complete_consts(&self) -> bool {
match self { matches!(
self,
TypeLocation::GenericArg { TypeLocation::GenericArg {
corresponding_param: Some(ast::GenericParam::ConstParam(_)), corresponding_param: Some(ast::GenericParam::ConstParam(_)),
.. ..
} => true, } | TypeLocation::AssocConstEq
TypeLocation::AssocConstEq => true, )
_ => false,
}
} }
pub(crate) fn complete_types(&self) -> bool { pub(crate) fn complete_types(&self) -> bool {

View file

@ -796,8 +796,7 @@ fn classify_name_ref(
ast::AssocTypeArg(arg) => { ast::AssocTypeArg(arg) => {
let trait_ = ast::PathSegment::cast(arg.syntax().parent()?.parent()?)?; let trait_ = ast::PathSegment::cast(arg.syntax().parent()?.parent()?)?;
match sema.resolve_path(&trait_.parent_path().top_path())? { match sema.resolve_path(&trait_.parent_path().top_path())? {
hir::PathResolution::Def(def) => match def { hir::PathResolution::Def(hir::ModuleDef::Trait(trait_)) => {
hir::ModuleDef::Trait(trait_) => {
let arg_name = arg.name_ref()?; let arg_name = arg.name_ref()?;
let arg_name = arg_name.text(); let arg_name = arg_name.text();
let trait_items = trait_.items_with_supertraits(sema.db); let trait_items = trait_.items_with_supertraits(sema.db);
@ -811,8 +810,6 @@ fn classify_name_ref(
sema.source(*assoc_ty)?.value.generic_param_list() sema.source(*assoc_ty)?.value.generic_param_list()
} }
_ => None, _ => None,
},
_ => None,
} }
}, },
_ => None, _ => None,

View file

@ -295,15 +295,12 @@ fn render_resolution_pat(
let _p = profile::span("render_resolution"); let _p = profile::span("render_resolution");
use hir::ModuleDef::*; use hir::ModuleDef::*;
match resolution { if let ScopeDef::ModuleDef(Macro(mac)) = resolution {
ScopeDef::ModuleDef(Macro(mac)) => {
let ctx = ctx.import_to_add(import_to_add); let ctx = ctx.import_to_add(import_to_add);
return render_macro_pat(ctx, pattern_ctx, local_name, mac); render_macro_pat(ctx, pattern_ctx, local_name, mac)
} } else {
_ => (),
}
render_resolution_simple_(ctx, &local_name, import_to_add, resolution) render_resolution_simple_(ctx, &local_name, import_to_add, resolution)
}
} }
fn render_resolution_path( fn render_resolution_path(

View file

@ -57,11 +57,11 @@ fn render(
) -> Option<Builder> { ) -> Option<Builder> {
let db = completion.db; let db = completion.db;
let mut kind = thing.kind(db); let mut kind = thing.kind(db);
let should_add_parens = match &path_ctx { let should_add_parens = !matches!(
PathCompletionCtx { has_call_parens: true, .. } => false, path_ctx,
PathCompletionCtx { kind: PathKind::Use | PathKind::Type { .. }, .. } => false, PathCompletionCtx { has_call_parens: true, .. }
_ => true, | PathCompletionCtx { kind: PathKind::Use | PathKind::Type { .. }, .. }
}; );
let fields = thing.fields(completion)?; let fields = thing.fields(completion)?;
let (qualified_name, short_qualified_name, qualified) = match path { let (qualified_name, short_qualified_name, qualified) = match path {

View file

@ -23,7 +23,7 @@ impl ActiveParameter {
let idx = active_parameter?; let idx = active_parameter?;
let mut params = signature.params(sema.db); let mut params = signature.params(sema.db);
if !(idx < params.len()) { if idx >= params.len() {
cov_mark::hit!(too_many_arguments); cov_mark::hit!(too_many_arguments);
return None; return None;
} }

View file

@ -383,10 +383,10 @@ impl Query {
} }
fn matches_assoc_mode(&self, is_trait_assoc_item: bool) -> bool { fn matches_assoc_mode(&self, is_trait_assoc_item: bool) -> bool {
match (is_trait_assoc_item, self.assoc_mode) { !matches!(
(true, AssocSearchMode::Exclude) | (false, AssocSearchMode::AssocItemsOnly) => false, (is_trait_assoc_item, self.assoc_mode),
_ => true, (true, AssocSearchMode::Exclude) | (false, AssocSearchMode::AssocItemsOnly)
} )
} }
} }

View file

@ -310,6 +310,7 @@ impl<'db, 'sema> Matcher<'db, 'sema> {
Ok(()) Ok(())
} }
#[allow(clippy::only_used_in_recursion)]
fn check_constraint( fn check_constraint(
&self, &self,
constraint: &Constraint, constraint: &Constraint,
@ -764,12 +765,7 @@ impl Iterator for PatternIterator {
type Item = SyntaxElement; type Item = SyntaxElement;
fn next(&mut self) -> Option<SyntaxElement> { fn next(&mut self) -> Option<SyntaxElement> {
for element in &mut self.iter { self.iter.find(|element| !element.kind().is_trivia())
if !element.kind().is_trivia() {
return Some(element);
}
}
None
} }
} }

View file

@ -271,13 +271,13 @@ fn fold_range_for_where_clause(where_clause: ast::WhereClause) -> Option<TextRan
} }
fn fold_range_for_multiline_match_arm(match_arm: ast::MatchArm) -> Option<TextRange> { fn fold_range_for_multiline_match_arm(match_arm: ast::MatchArm) -> Option<TextRange> {
if let Some(_) = fold_kind(match_arm.expr()?.syntax().kind()) { if fold_kind(match_arm.expr()?.syntax().kind()).is_some() {
return None;
}
if match_arm.expr()?.syntax().text().contains_char('\n') {
return Some(match_arm.expr()?.syntax().text_range());
}
None None
} else if match_arm.expr()?.syntax().text().contains_char('\n') {
Some(match_arm.expr()?.syntax().text_range())
} else {
None
}
} }
#[cfg(test)] #[cfg(test)]

View file

@ -98,8 +98,7 @@ pub(super) fn hints(
}; };
{ {
let mut potential_lt_refs = potential_lt_refs.iter().filter(|&&(.., is_elided)| is_elided); let mut potential_lt_refs = potential_lt_refs.iter().filter(|&&(.., is_elided)| is_elided);
if let Some(_) = &self_param { if self_param.is_some() && potential_lt_refs.next().is_some() {
if let Some(_) = potential_lt_refs.next() {
allocated_lifetimes.push(if config.param_names_for_lifetime_elision_hints { allocated_lifetimes.push(if config.param_names_for_lifetime_elision_hints {
// self can't be used as a lifetime, so no need to check for collisions // self can't be used as a lifetime, so no need to check for collisions
"'self".into() "'self".into()
@ -107,7 +106,6 @@ pub(super) fn hints(
gen_idx_name() gen_idx_name()
}); });
} }
}
potential_lt_refs.for_each(|(name, ..)| { potential_lt_refs.for_each(|(name, ..)| {
let name = match name { let name = match name {
Some(it) if config.param_names_for_lifetime_elision_hints => { Some(it) if config.param_names_for_lifetime_elision_hints => {

View file

@ -47,7 +47,7 @@ pub(super) fn hints(
if let Some(name) = param { if let Some(name) = param {
if let hir::CallableKind::Function(f) = callable.kind() { if let hir::CallableKind::Function(f) = callable.kind() {
// assert the file is cached so we can map out of macros // assert the file is cached so we can map out of macros
if let Some(_) = sema.source(f) { if sema.source(f).is_some() {
linked_location = sema.original_range_opt(name.syntax()); linked_location = sema.original_range_opt(name.syntax());
} }
} }

View file

@ -95,11 +95,7 @@ pub struct MonikerIdentifier {
impl ToString for MonikerIdentifier { impl ToString for MonikerIdentifier {
fn to_string(&self) -> String { fn to_string(&self) -> String {
match self { format!("{}::{}", self.crate_name, self.description.iter().map(|x| &x.name).join("::"))
MonikerIdentifier { description, crate_name } => {
format!("{}::{}", crate_name, description.iter().map(|x| &x.name).join("::"))
}
}
} }
} }

View file

@ -282,8 +282,8 @@ fn traverse(
inside_attribute = false inside_attribute = false
} }
Enter(NodeOrToken::Node(node)) => match ast::Item::cast(node.clone()) { Enter(NodeOrToken::Node(node)) => {
Some(item) => { if let Some(item) = ast::Item::cast(node.clone()) {
match item { match item {
ast::Item::MacroRules(mac) => { ast::Item::MacroRules(mac) => {
macro_highlighter.init(); macro_highlighter.init();
@ -324,8 +324,7 @@ fn traverse(
} }
} }
} }
_ => (), }
},
Leave(NodeOrToken::Node(node)) if ast::Item::can_cast(node.kind()) => { Leave(NodeOrToken::Node(node)) if ast::Item::can_cast(node.kind()) => {
match ast::Item::cast(node.clone()) { match ast::Item::cast(node.clone()) {
Some(ast::Item::MacroRules(mac)) => { Some(ast::Item::MacroRules(mac)) => {

View file

@ -52,7 +52,6 @@ fn syntax_tree_for_string(token: &SyntaxToken, text_range: TextRange) -> Option<
} }
} }
#[allow(clippy::redundant_locals)]
fn syntax_tree_for_token(node: &SyntaxToken, text_range: TextRange) -> Option<String> { fn syntax_tree_for_token(node: &SyntaxToken, text_range: TextRange) -> Option<String> {
// Range of the full node // Range of the full node
let node_range = node.text_range(); let node_range = node.text_range();
@ -68,8 +67,6 @@ fn syntax_tree_for_token(node: &SyntaxToken, text_range: TextRange) -> Option<St
let node_len = node_range.len(); let node_len = node_range.len();
let start = start;
// We want to cap our length // We want to cap our length
let len = len.min(node_len); let len = len.min(node_len);

View file

@ -359,19 +359,16 @@ fn on_left_angle_typed(file: &SourceFile, offset: TextSize) -> Option<ExtendedTe
} }
} }
if ancestors_at_offset(file.syntax(), offset) if ancestors_at_offset(file.syntax(), offset).any(|n| {
.find(|n| {
ast::GenericParamList::can_cast(n.kind()) || ast::GenericArgList::can_cast(n.kind()) ast::GenericParamList::can_cast(n.kind()) || ast::GenericArgList::can_cast(n.kind())
}) }) {
.is_some() Some(ExtendedTextEdit {
{
return Some(ExtendedTextEdit {
edit: TextEdit::replace(range, "<$0>".to_string()), edit: TextEdit::replace(range, "<$0>".to_string()),
is_snippet: true, is_snippet: true,
}); })
} } else {
None None
}
} }
/// Adds a space after an arrow when `fn foo() { ... }` is turned into `fn foo() -> { ... }` /// Adds a space after an arrow when `fn foo() { ... }` is turned into `fn foo() -> { ... }`

View file

@ -358,7 +358,7 @@ fn expander_to_proc_macro(
proc_macro_api::ProcMacroKind::Attr => ProcMacroKind::Attr, proc_macro_api::ProcMacroKind::Attr => ProcMacroKind::Attr,
}; };
let expander: sync::Arc<dyn ProcMacroExpander> = let expander: sync::Arc<dyn ProcMacroExpander> =
if dummy_replace.iter().any(|replace| &**replace == name) { if dummy_replace.iter().any(|replace| **replace == name) {
match kind { match kind {
ProcMacroKind::Attr => sync::Arc::new(IdentityExpander), ProcMacroKind::Attr => sync::Arc::new(IdentityExpander),
_ => sync::Arc::new(EmptyExpander), _ => sync::Arc::new(EmptyExpander),

View file

@ -282,9 +282,9 @@ fn expand_subtree<S: Span>(
} }
let res = if ctx.new_meta_vars { let res = if ctx.new_meta_vars {
count(ctx, binding, 0, depth.unwrap_or(0)) count(binding, 0, depth.unwrap_or(0))
} else { } else {
count_old(ctx, binding, 0, *depth) count_old(binding, 0, *depth)
}; };
let c = match res { let c = match res {
@ -537,7 +537,6 @@ fn fix_up_and_push_path_tt<S: Span>(
/// Handles `${count(t, depth)}`. `our_depth` is the recursion depth and `count_depth` is the depth /// Handles `${count(t, depth)}`. `our_depth` is the recursion depth and `count_depth` is the depth
/// defined by the metavar expression. /// defined by the metavar expression.
fn count<S>( fn count<S>(
ctx: &ExpandCtx<'_, S>,
binding: &Binding<S>, binding: &Binding<S>,
depth_curr: usize, depth_curr: usize,
depth_max: usize, depth_max: usize,
@ -547,7 +546,7 @@ fn count<S>(
if depth_curr == depth_max { if depth_curr == depth_max {
Ok(bs.len()) Ok(bs.len())
} else { } else {
bs.iter().map(|b| count(ctx, b, depth_curr + 1, depth_max)).sum() bs.iter().map(|b| count(b, depth_curr + 1, depth_max)).sum()
} }
} }
Binding::Empty => Ok(0), Binding::Empty => Ok(0),
@ -556,16 +555,15 @@ fn count<S>(
} }
fn count_old<S>( fn count_old<S>(
ctx: &ExpandCtx<'_, S>,
binding: &Binding<S>, binding: &Binding<S>,
our_depth: usize, our_depth: usize,
count_depth: Option<usize>, count_depth: Option<usize>,
) -> Result<usize, CountError> { ) -> Result<usize, CountError> {
match binding { match binding {
Binding::Nested(bs) => match count_depth { Binding::Nested(bs) => match count_depth {
None => bs.iter().map(|b| count_old(ctx, b, our_depth + 1, None)).sum(), None => bs.iter().map(|b| count_old(b, our_depth + 1, None)).sum(),
Some(0) => Ok(bs.len()), Some(0) => Ok(bs.len()),
Some(d) => bs.iter().map(|b| count_old(ctx, b, our_depth + 1, Some(d - 1))).sum(), Some(d) => bs.iter().map(|b| count_old(b, our_depth + 1, Some(d - 1))).sum(),
}, },
Binding::Empty => Ok(0), Binding::Empty => Ok(0),
Binding::Fragment(_) | Binding::Missing(_) => { Binding::Fragment(_) | Binding::Missing(_) => {

View file

@ -22,7 +22,7 @@ fn sourcegen_parser_tests() {
} }
// ok is never actually read, but it needs to be specified to create a Test in existing_tests // ok is never actually read, but it needs to be specified to create a Test in existing_tests
let existing = existing_tests(&tests_dir, true); let existing = existing_tests(&tests_dir, true);
for t in existing.keys().filter(|&t| !tests.contains_key(t)) { if let Some(t) = existing.keys().find(|&t| !tests.contains_key(t)) {
panic!("Test is deleted: {t}"); panic!("Test is deleted: {t}");
} }

View file

@ -368,7 +368,7 @@ impl CargoWorkspace {
name, name,
root: AbsPathBuf::assert(src_path.into()), root: AbsPathBuf::assert(src_path.into()),
kind: TargetKind::new(&kind), kind: TargetKind::new(&kind),
is_proc_macro: &*kind == ["proc-macro"], is_proc_macro: *kind == ["proc-macro"],
required_features, required_features,
}); });
pkg_data.targets.push(tgt); pkg_data.targets.push(tgt);

View file

@ -36,7 +36,7 @@ impl ManifestPath {
} }
pub fn canonicalize(&self) -> ! { pub fn canonicalize(&self) -> ! {
(&**self).canonicalize() (**self).canonicalize()
} }
} }

View file

@ -17,7 +17,7 @@ use lsp_server::Connection;
use rust_analyzer::{cli::flags, config::Config, from_json}; use rust_analyzer::{cli::flags, config::Config, from_json};
use vfs::AbsPathBuf; use vfs::AbsPathBuf;
#[cfg(all(feature = "mimalloc"))] #[cfg(feature = "mimalloc")]
#[global_allocator] #[global_allocator]
static ALLOC: mimalloc::MiMalloc = mimalloc::MiMalloc; static ALLOC: mimalloc::MiMalloc = mimalloc::MiMalloc;

View file

@ -226,7 +226,6 @@ const SUPPORTED_DIAGNOSTICS: &[DiagnosticCode] = &[
]; ];
impl flags::RustcTests { impl flags::RustcTests {
#[allow(clippy::redundant_locals)]
pub fn run(self) -> Result<()> { pub fn run(self) -> Result<()> {
let mut tester = Tester::new()?; let mut tester = Tester::new()?;
let walk_dir = WalkDir::new(self.rustc_repo.join("tests/ui")); let walk_dir = WalkDir::new(self.rustc_repo.join("tests/ui"));
@ -246,8 +245,7 @@ impl flags::RustcTests {
let p = p.clone(); let p = p.clone();
move || { move || {
let _guard = stdx::panic_context::enter(p.display().to_string()); let _guard = stdx::panic_context::enter(p.display().to_string());
let tester = tester; { tester }.0.test(p);
tester.0.test(p);
} }
}) { }) {
std::panic::resume_unwind(e); std::panic::resume_unwind(e);

View file

@ -36,7 +36,7 @@ pub(crate) fn handle_work_done_progress_cancel(
) -> anyhow::Result<()> { ) -> anyhow::Result<()> {
if let lsp_types::NumberOrString::String(s) = &params.token { if let lsp_types::NumberOrString::String(s) = &params.token {
if let Some(id) = s.strip_prefix("rust-analyzer/flycheck/") { if let Some(id) = s.strip_prefix("rust-analyzer/flycheck/") {
if let Ok(id) = u32::from_str_radix(id, 10) { if let Ok(id) = id.parse::<u32>() {
if let Some(flycheck) = state.flycheck.get(id as usize) { if let Some(flycheck) = state.flycheck.get(id as usize) {
flycheck.cancel(); flycheck.cancel();
} }

View file

@ -1978,10 +1978,9 @@ fn run_rustfmt(
// approach: if the command name contains a path separator, join it with the workspace root. // approach: if the command name contains a path separator, join it with the workspace root.
// however, if the path is absolute, joining will result in the absolute path being preserved. // however, if the path is absolute, joining will result in the absolute path being preserved.
// as a fallback, rely on $PATH-based discovery. // as a fallback, rely on $PATH-based discovery.
let cmd_path = let cmd_path = if command.contains(std::path::MAIN_SEPARATOR)
if cfg!(windows) && command.contains([std::path::MAIN_SEPARATOR, '/']) { || (cfg!(windows) && command.contains('/'))
spec.workspace_root.join(cmd).into() {
} else if command.contains(std::path::MAIN_SEPARATOR) {
spec.workspace_root.join(cmd).into() spec.workspace_root.join(cmd).into()
} else { } else {
cmd cmd

View file

@ -310,8 +310,7 @@ fn completion_item(
set_score(&mut lsp_item, max_relevance, item.relevance); set_score(&mut lsp_item, max_relevance, item.relevance);
if config.completion().enable_imports_on_the_fly { if config.completion().enable_imports_on_the_fly && !item.import_to_add.is_empty() {
if !item.import_to_add.is_empty() {
let imports: Vec<_> = item let imports: Vec<_> = item
.import_to_add .import_to_add
.into_iter() .into_iter()
@ -327,7 +326,6 @@ fn completion_item(
lsp_item.data = Some(to_value(data).unwrap()); lsp_item.data = Some(to_value(data).unwrap());
} }
} }
}
if let Some((label, indel, relevance)) = ref_match { if let Some((label, indel, relevance)) = ref_match {
let mut lsp_item_with_ref = lsp_types::CompletionItem { label, ..lsp_item.clone() }; let mut lsp_item_with_ref = lsp_types::CompletionItem { label, ..lsp_item.clone() };

View file

@ -579,13 +579,13 @@ impl GlobalState {
let path = VfsPath::from(path); let path = VfsPath::from(path);
// if the file is in mem docs, it's managed by the client via notifications // if the file is in mem docs, it's managed by the client via notifications
// so only set it if its not in there // so only set it if its not in there
if !self.mem_docs.contains(&path) { if !self.mem_docs.contains(&path)
if is_changed || vfs.file_id(&path).is_none() { && (is_changed || vfs.file_id(&path).is_none())
{
vfs.set_file_contents(path, contents); vfs.set_file_contents(path, contents);
} }
} }
} }
}
vfs::loader::Message::Progress { n_total, n_done, dir, config_version } => { vfs::loader::Message::Progress { n_total, n_done, dir, config_version } => {
always!(config_version <= self.vfs_config_version); always!(config_version <= self.vfs_config_version);

View file

@ -109,7 +109,7 @@ impl GlobalState {
status.health = lsp_ext::Health::Warning; status.health = lsp_ext::Health::Warning;
message.push_str("Proc-macros have changed and need to be rebuilt.\n\n"); message.push_str("Proc-macros have changed and need to be rebuilt.\n\n");
} }
if let Err(_) = self.fetch_build_data_error() { if self.fetch_build_data_error().is_err() {
status.health = lsp_ext::Health::Warning; status.health = lsp_ext::Health::Warning;
message.push_str("Failed to run build scripts of some packages.\n\n"); message.push_str("Failed to run build scripts of some packages.\n\n");
} }
@ -173,7 +173,7 @@ impl GlobalState {
} }
} }
if let Err(_) = self.fetch_workspace_error() { if self.fetch_workspace_error().is_err() {
status.health = lsp_ext::Health::Error; status.health = lsp_ext::Health::Error;
message.push_str("Failed to load workspaces."); message.push_str("Failed to load workspaces.");
@ -364,8 +364,7 @@ impl GlobalState {
return; return;
}; };
if let Err(_) = self.fetch_workspace_error() { if self.fetch_workspace_error().is_err() && !self.workspaces.is_empty() {
if !self.workspaces.is_empty() {
if *force_reload_crate_graph { if *force_reload_crate_graph {
self.recreate_crate_graph(cause); self.recreate_crate_graph(cause);
} }
@ -373,7 +372,6 @@ impl GlobalState {
// if we don't have any workspace at all yet. // if we don't have any workspace at all yet.
return; return;
} }
}
let workspaces = let workspaces =
workspaces.iter().filter_map(|res| res.as_ref().ok().cloned()).collect::<Vec<_>>(); workspaces.iter().filter_map(|res| res.as_ref().ok().cloned()).collect::<Vec<_>>();
@ -454,8 +452,9 @@ impl GlobalState {
let files_config = self.config.files(); let files_config = self.config.files();
let project_folders = ProjectFolders::new(&self.workspaces, &files_config.exclude); let project_folders = ProjectFolders::new(&self.workspaces, &files_config.exclude);
if self.proc_macro_clients.is_empty() || !same_workspaces { if (self.proc_macro_clients.is_empty() || !same_workspaces)
if self.config.expand_proc_macros() { && self.config.expand_proc_macros()
{
tracing::info!("Spawning proc-macro servers"); tracing::info!("Spawning proc-macro servers");
self.proc_macro_clients = Arc::from_iter(self.workspaces.iter().map(|ws| { self.proc_macro_clients = Arc::from_iter(self.workspaces.iter().map(|ws| {
@ -474,7 +473,6 @@ impl GlobalState {
) )
}) })
})) }))
};
} }
let watch = match files_config.watcher { let watch = match files_config.watcher {
@ -569,10 +567,11 @@ impl GlobalState {
for ws in &self.fetch_build_data_queue.last_op_result().1 { for ws in &self.fetch_build_data_queue.last_op_result().1 {
match ws { match ws {
Ok(data) => match data.error() { Ok(data) => {
Some(stderr) => stdx::format_to!(buf, "{:#}\n", stderr), if let Some(stderr) = data.error() {
_ => (), stdx::format_to!(buf, "{:#}\n", stderr)
}, }
}
// io errors // io errors
Err(err) => stdx::format_to!(buf, "{:#}\n", err), Err(err) => stdx::format_to!(buf, "{:#}\n", err),
} }

View file

@ -379,7 +379,7 @@ impl TidyDocs {
) )
} }
for path in self.contains_fixme { if let Some(path) = self.contains_fixme.first() {
panic!("FIXME doc in a fully-documented crate: {}", path.display()) panic!("FIXME doc in a fully-documented crate: {}", path.display())
} }
} }

View file

@ -339,7 +339,7 @@ unsafe fn analyze_source_file_sse2(
} }
#[target_feature(enable = "neon")] #[target_feature(enable = "neon")]
#[cfg(any(target_arch = "aarch64"))] #[cfg(target_arch = "aarch64")]
#[inline] #[inline]
// See https://community.arm.com/arm-community-blogs/b/infrastructure-solutions-blog/posts/porting-x86-vector-bitmask-optimizations-to-arm-neon // See https://community.arm.com/arm-community-blogs/b/infrastructure-solutions-blog/posts/porting-x86-vector-bitmask-optimizations-to-arm-neon
// //
@ -354,7 +354,7 @@ unsafe fn move_mask(v: std::arch::aarch64::uint8x16_t) -> u64 {
} }
#[target_feature(enable = "neon")] #[target_feature(enable = "neon")]
#[cfg(any(target_arch = "aarch64"))] #[cfg(target_arch = "aarch64")]
unsafe fn analyze_source_file_neon( unsafe fn analyze_source_file_neon(
src: &str, src: &str,
lines: &mut Vec<TextSize>, lines: &mut Vec<TextSize>,