mirror of
https://github.com/rust-lang/rust-clippy
synced 2024-11-23 05:03:21 +00:00
[Clippy] Swap manual_retain
to use diagnostic items instead of paths
This commit is contained in:
parent
e3bb779e1e
commit
978582be74
2 changed files with 17 additions and 26 deletions
|
@ -3,22 +3,22 @@ use clippy_config::Conf;
|
|||
use clippy_utils::diagnostics::span_lint_and_sugg;
|
||||
use clippy_utils::source::snippet;
|
||||
use clippy_utils::ty::{get_type_diagnostic_name, is_type_lang_item};
|
||||
use clippy_utils::{match_def_path, paths, SpanlessEq};
|
||||
use clippy_utils::SpanlessEq;
|
||||
use rustc_errors::Applicability;
|
||||
use rustc_hir as hir;
|
||||
use rustc_hir::def_id::DefId;
|
||||
use rustc_hir::ExprKind::Assign;
|
||||
use rustc_lint::{LateContext, LateLintPass};
|
||||
use rustc_session::impl_lint_pass;
|
||||
use rustc_span::symbol::sym;
|
||||
use rustc_span::symbol::{sym, Symbol};
|
||||
use rustc_span::Span;
|
||||
|
||||
const ACCEPTABLE_METHODS: [&[&str]; 5] = [
|
||||
&paths::BINARYHEAP_ITER,
|
||||
&paths::HASHSET_ITER,
|
||||
&paths::BTREESET_ITER,
|
||||
&paths::SLICE_INTO,
|
||||
&paths::VEC_DEQUE_ITER,
|
||||
const ACCEPTABLE_METHODS: [Symbol; 5] = [
|
||||
sym::binaryheap_iter,
|
||||
sym::hashset_iter,
|
||||
sym::btreeset_iter,
|
||||
sym::slice_iter,
|
||||
sym::vecdeque_iter,
|
||||
];
|
||||
|
||||
declare_clippy_lint! {
|
||||
|
@ -84,7 +84,7 @@ fn check_into_iter(
|
|||
) {
|
||||
if let hir::ExprKind::MethodCall(_, into_iter_expr, [_], _) = &target_expr.kind
|
||||
&& let Some(filter_def_id) = cx.typeck_results().type_dependent_def_id(target_expr.hir_id)
|
||||
&& match_def_path(cx, filter_def_id, &paths::CORE_ITER_FILTER)
|
||||
&& cx.tcx.is_diagnostic_item(sym::iter_filter, filter_def_id)
|
||||
&& let hir::ExprKind::MethodCall(_, struct_expr, [], _) = &into_iter_expr.kind
|
||||
&& let Some(into_iter_def_id) = cx.typeck_results().type_dependent_def_id(into_iter_expr.hir_id)
|
||||
&& Some(into_iter_def_id) == cx.tcx.lang_items().into_iter_fn()
|
||||
|
@ -127,14 +127,14 @@ fn check_iter(
|
|||
) {
|
||||
if let hir::ExprKind::MethodCall(_, filter_expr, [], _) = &target_expr.kind
|
||||
&& let Some(copied_def_id) = cx.typeck_results().type_dependent_def_id(target_expr.hir_id)
|
||||
&& (match_def_path(cx, copied_def_id, &paths::CORE_ITER_COPIED)
|
||||
|| match_def_path(cx, copied_def_id, &paths::CORE_ITER_CLONED))
|
||||
&& (cx.tcx.is_diagnostic_item(sym::iter_copied, copied_def_id)
|
||||
|| cx.tcx.is_diagnostic_item(sym::iter_cloned, copied_def_id))
|
||||
&& let hir::ExprKind::MethodCall(_, iter_expr, [_], _) = &filter_expr.kind
|
||||
&& let Some(filter_def_id) = cx.typeck_results().type_dependent_def_id(filter_expr.hir_id)
|
||||
&& match_def_path(cx, filter_def_id, &paths::CORE_ITER_FILTER)
|
||||
&& cx.tcx.is_diagnostic_item(sym::iter_filter, filter_def_id)
|
||||
&& let hir::ExprKind::MethodCall(_, struct_expr, [], _) = &iter_expr.kind
|
||||
&& let Some(iter_expr_def_id) = cx.typeck_results().type_dependent_def_id(iter_expr.hir_id)
|
||||
&& match_acceptable_def_path(cx, iter_expr_def_id)
|
||||
&& match_acceptable_sym(cx, iter_expr_def_id)
|
||||
&& match_acceptable_type(cx, left_expr, msrv)
|
||||
&& SpanlessEq::new(cx).eq_expr(left_expr, struct_expr)
|
||||
&& let hir::ExprKind::MethodCall(_, _, [closure_expr], _) = filter_expr.kind
|
||||
|
@ -189,10 +189,10 @@ fn check_to_owned(
|
|||
&& cx.tcx.is_diagnostic_item(sym::to_owned_method, to_owned_def_id)
|
||||
&& let hir::ExprKind::MethodCall(_, chars_expr, [_], _) = &filter_expr.kind
|
||||
&& let Some(filter_def_id) = cx.typeck_results().type_dependent_def_id(filter_expr.hir_id)
|
||||
&& match_def_path(cx, filter_def_id, &paths::CORE_ITER_FILTER)
|
||||
&& cx.tcx.is_diagnostic_item(sym::iter_filter, filter_def_id)
|
||||
&& let hir::ExprKind::MethodCall(_, str_expr, [], _) = &chars_expr.kind
|
||||
&& let Some(chars_expr_def_id) = cx.typeck_results().type_dependent_def_id(chars_expr.hir_id)
|
||||
&& match_def_path(cx, chars_expr_def_id, &paths::STR_CHARS)
|
||||
&& cx.tcx.is_diagnostic_item(sym::str_chars, chars_expr_def_id)
|
||||
&& let ty = cx.typeck_results().expr_ty(str_expr).peel_refs()
|
||||
&& is_type_lang_item(cx, ty, hir::LangItem::String)
|
||||
&& SpanlessEq::new(cx).eq_expr(left_expr, str_expr)
|
||||
|
@ -247,10 +247,10 @@ fn make_sugg(
|
|||
}
|
||||
}
|
||||
|
||||
fn match_acceptable_def_path(cx: &LateContext<'_>, collect_def_id: DefId) -> bool {
|
||||
fn match_acceptable_sym(cx: &LateContext<'_>, collect_def_id: DefId) -> bool {
|
||||
ACCEPTABLE_METHODS
|
||||
.iter()
|
||||
.any(|&method| match_def_path(cx, collect_def_id, method))
|
||||
.any(|&method| cx.tcx.is_diagnostic_item(method, collect_def_id))
|
||||
}
|
||||
|
||||
fn match_acceptable_type(cx: &LateContext<'_>, expr: &hir::Expr<'_>, msrv: &Msrv) -> bool {
|
||||
|
|
|
@ -12,13 +12,8 @@ pub const APPLICABILITY_VALUES: [[&str; 3]; 4] = [
|
|||
["rustc_lint_defs", "Applicability", "MachineApplicable"],
|
||||
];
|
||||
pub const DIAG: [&str; 2] = ["rustc_errors", "Diag"];
|
||||
pub const BINARYHEAP_ITER: [&str; 5] = ["alloc", "collections", "binary_heap", "BinaryHeap", "iter"];
|
||||
pub const BTREEMAP_CONTAINS_KEY: [&str; 6] = ["alloc", "collections", "btree", "map", "BTreeMap", "contains_key"];
|
||||
pub const BTREEMAP_INSERT: [&str; 6] = ["alloc", "collections", "btree", "map", "BTreeMap", "insert"];
|
||||
pub const BTREESET_ITER: [&str; 6] = ["alloc", "collections", "btree", "set", "BTreeSet", "iter"];
|
||||
pub const CORE_ITER_CLONED: [&str; 6] = ["core", "iter", "traits", "iterator", "Iterator", "cloned"];
|
||||
pub const CORE_ITER_COPIED: [&str; 6] = ["core", "iter", "traits", "iterator", "Iterator", "copied"];
|
||||
pub const CORE_ITER_FILTER: [&str; 6] = ["core", "iter", "traits", "iterator", "Iterator", "filter"];
|
||||
pub const CORE_RESULT_OK_METHOD: [&str; 4] = ["core", "result", "Result", "ok"];
|
||||
pub const CSTRING_AS_C_STR: [&str; 5] = ["alloc", "ffi", "c_str", "CString", "as_c_str"];
|
||||
pub const EARLY_CONTEXT: [&str; 2] = ["rustc_lint", "EarlyContext"];
|
||||
|
@ -39,7 +34,6 @@ pub const HASHMAP_VALUES: [&str; 5] = ["std", "collections", "hash", "map", "Val
|
|||
pub const HASHMAP_DRAIN: [&str; 5] = ["std", "collections", "hash", "map", "Drain"];
|
||||
pub const HASHMAP_VALUES_MUT: [&str; 5] = ["std", "collections", "hash", "map", "ValuesMut"];
|
||||
pub const HASHSET_ITER_TY: [&str; 5] = ["std", "collections", "hash", "set", "Iter"];
|
||||
pub const HASHSET_ITER: [&str; 6] = ["std", "collections", "hash", "set", "HashSet", "iter"];
|
||||
pub const HASHSET_DRAIN: [&str; 5] = ["std", "collections", "hash", "set", "Drain"];
|
||||
pub const IDENT: [&str; 3] = ["rustc_span", "symbol", "Ident"];
|
||||
pub const IDENT_AS_STR: [&str; 4] = ["rustc_span", "symbol", "Ident", "as_str"];
|
||||
|
@ -71,13 +65,11 @@ pub const REGEX_SET_NEW: [&str; 3] = ["regex", "RegexSet", "new"];
|
|||
pub const SERDE_DESERIALIZE: [&str; 3] = ["serde", "de", "Deserialize"];
|
||||
pub const SERDE_DE_VISITOR: [&str; 3] = ["serde", "de", "Visitor"];
|
||||
pub const SLICE_INTO_VEC: [&str; 4] = ["alloc", "slice", "<impl [T]>", "into_vec"];
|
||||
pub const SLICE_INTO: [&str; 4] = ["core", "slice", "<impl [T]>", "iter"];
|
||||
pub const STD_IO_SEEK_FROM_CURRENT: [&str; 4] = ["std", "io", "SeekFrom", "Current"];
|
||||
pub const STD_IO_SEEKFROM_START: [&str; 4] = ["std", "io", "SeekFrom", "Start"];
|
||||
pub const STRING_AS_MUT_STR: [&str; 4] = ["alloc", "string", "String", "as_mut_str"];
|
||||
pub const STRING_AS_STR: [&str; 4] = ["alloc", "string", "String", "as_str"];
|
||||
pub const STRING_NEW: [&str; 4] = ["alloc", "string", "String", "new"];
|
||||
pub const STR_CHARS: [&str; 4] = ["core", "str", "<impl str>", "chars"];
|
||||
pub const STR_ENDS_WITH: [&str; 4] = ["core", "str", "<impl str>", "ends_with"];
|
||||
pub const STR_LEN: [&str; 4] = ["core", "str", "<impl str>", "len"];
|
||||
pub const STR_STARTS_WITH: [&str; 4] = ["core", "str", "<impl str>", "starts_with"];
|
||||
|
@ -100,7 +92,6 @@ pub const TOKIO_IO_OPEN_OPTIONS: [&str; 4] = ["tokio", "fs", "open_options", "Op
|
|||
pub const TOKIO_IO_OPEN_OPTIONS_NEW: [&str; 5] = ["tokio", "fs", "open_options", "OpenOptions", "new"];
|
||||
pub const VEC_AS_MUT_SLICE: [&str; 4] = ["alloc", "vec", "Vec", "as_mut_slice"];
|
||||
pub const VEC_AS_SLICE: [&str; 4] = ["alloc", "vec", "Vec", "as_slice"];
|
||||
pub const VEC_DEQUE_ITER: [&str; 5] = ["alloc", "collections", "vec_deque", "VecDeque", "iter"];
|
||||
pub const VEC_FROM_ELEM: [&str; 3] = ["alloc", "vec", "from_elem"];
|
||||
pub const VEC_NEW: [&str; 4] = ["alloc", "vec", "Vec", "new"];
|
||||
pub const VEC_WITH_CAPACITY: [&str; 4] = ["alloc", "vec", "Vec", "with_capacity"];
|
||||
|
|
Loading…
Reference in a new issue