2020-01-27 01:56:22 +00:00
|
|
|
use crate::utils::{match_def_path, span_lint_and_help};
|
2020-01-06 00:16:27 +00:00
|
|
|
use if_chain::if_chain;
|
|
|
|
use rustc::ty;
|
2020-01-07 01:33:28 +00:00
|
|
|
use rustc_hir::def_id::DefId;
|
2020-02-21 08:39:38 +00:00
|
|
|
use rustc_hir::{Expr, ExprKind};
|
2020-01-12 06:08:41 +00:00
|
|
|
use rustc_lint::{LateContext, LateLintPass};
|
2020-01-11 11:37:08 +00:00
|
|
|
use rustc_session::{declare_lint_pass, declare_tool_lint};
|
2020-01-06 00:16:27 +00:00
|
|
|
|
|
|
|
declare_clippy_lint! {
|
|
|
|
/// **What it does:** Checks for usage of invalid atomic
|
2020-01-09 17:49:15 +00:00
|
|
|
/// ordering in atomic loads/stores and memory fences.
|
2020-01-06 00:16:27 +00:00
|
|
|
///
|
|
|
|
/// **Why is this bad?** Using an invalid atomic ordering
|
|
|
|
/// will cause a panic at run-time.
|
|
|
|
///
|
|
|
|
/// **Known problems:** None.
|
|
|
|
///
|
|
|
|
/// **Example:**
|
2020-01-07 01:33:28 +00:00
|
|
|
/// ```rust,no_run
|
2020-01-09 17:49:15 +00:00
|
|
|
/// # use std::sync::atomic::{self, AtomicBool, Ordering};
|
2020-01-06 00:16:27 +00:00
|
|
|
///
|
|
|
|
/// let x = AtomicBool::new(true);
|
|
|
|
///
|
|
|
|
/// let _ = x.load(Ordering::Release);
|
|
|
|
/// let _ = x.load(Ordering::AcqRel);
|
|
|
|
///
|
|
|
|
/// x.store(false, Ordering::Acquire);
|
|
|
|
/// x.store(false, Ordering::AcqRel);
|
2020-01-09 17:49:15 +00:00
|
|
|
///
|
|
|
|
/// atomic::fence(Ordering::Relaxed);
|
|
|
|
/// atomic::compiler_fence(Ordering::Relaxed);
|
2020-01-06 00:16:27 +00:00
|
|
|
/// ```
|
|
|
|
pub INVALID_ATOMIC_ORDERING,
|
|
|
|
correctness,
|
2020-01-09 17:49:15 +00:00
|
|
|
"usage of invalid atomic ordering in atomic loads/stores and memory fences"
|
2020-01-06 00:16:27 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
declare_lint_pass!(AtomicOrdering => [INVALID_ATOMIC_ORDERING]);
|
|
|
|
|
|
|
|
const ATOMIC_TYPES: [&str; 12] = [
|
|
|
|
"AtomicBool",
|
|
|
|
"AtomicI8",
|
|
|
|
"AtomicI16",
|
|
|
|
"AtomicI32",
|
|
|
|
"AtomicI64",
|
|
|
|
"AtomicIsize",
|
|
|
|
"AtomicPtr",
|
|
|
|
"AtomicU8",
|
|
|
|
"AtomicU16",
|
|
|
|
"AtomicU32",
|
|
|
|
"AtomicU64",
|
|
|
|
"AtomicUsize",
|
|
|
|
];
|
|
|
|
|
|
|
|
fn type_is_atomic(cx: &LateContext<'_, '_>, expr: &Expr<'_>) -> bool {
|
|
|
|
if let ty::Adt(&ty::AdtDef { did, .. }, _) = cx.tables.expr_ty(expr).kind {
|
|
|
|
ATOMIC_TYPES
|
|
|
|
.iter()
|
|
|
|
.any(|ty| match_def_path(cx, did, &["core", "sync", "atomic", ty]))
|
|
|
|
} else {
|
|
|
|
false
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn match_ordering_def_path(cx: &LateContext<'_, '_>, did: DefId, orderings: &[&str]) -> bool {
|
|
|
|
orderings
|
|
|
|
.iter()
|
|
|
|
.any(|ordering| match_def_path(cx, did, &["core", "sync", "atomic", "Ordering", ordering]))
|
|
|
|
}
|
|
|
|
|
2020-01-09 17:49:15 +00:00
|
|
|
fn check_atomic_load_store(cx: &LateContext<'_, '_>, expr: &Expr<'_>) {
|
|
|
|
if_chain! {
|
|
|
|
if let ExprKind::MethodCall(ref method_path, _, args) = &expr.kind;
|
|
|
|
let method = method_path.ident.name.as_str();
|
|
|
|
if type_is_atomic(cx, &args[0]);
|
|
|
|
if method == "load" || method == "store";
|
|
|
|
let ordering_arg = if method == "load" { &args[1] } else { &args[2] };
|
|
|
|
if let ExprKind::Path(ref ordering_qpath) = ordering_arg.kind;
|
|
|
|
if let Some(ordering_def_id) = cx.tables.qpath_res(ordering_qpath, ordering_arg.hir_id).opt_def_id();
|
|
|
|
then {
|
|
|
|
if method == "load" &&
|
|
|
|
match_ordering_def_path(cx, ordering_def_id, &["Release", "AcqRel"]) {
|
2020-01-27 01:56:22 +00:00
|
|
|
span_lint_and_help(
|
2020-01-09 17:49:15 +00:00
|
|
|
cx,
|
|
|
|
INVALID_ATOMIC_ORDERING,
|
|
|
|
ordering_arg.span,
|
|
|
|
"atomic loads cannot have `Release` and `AcqRel` ordering",
|
|
|
|
"consider using ordering modes `Acquire`, `SeqCst` or `Relaxed`"
|
|
|
|
);
|
|
|
|
} else if method == "store" &&
|
|
|
|
match_ordering_def_path(cx, ordering_def_id, &["Acquire", "AcqRel"]) {
|
2020-01-27 01:56:22 +00:00
|
|
|
span_lint_and_help(
|
2020-01-09 17:49:15 +00:00
|
|
|
cx,
|
|
|
|
INVALID_ATOMIC_ORDERING,
|
|
|
|
ordering_arg.span,
|
|
|
|
"atomic stores cannot have `Acquire` and `AcqRel` ordering",
|
|
|
|
"consider using ordering modes `Release`, `SeqCst` or `Relaxed`"
|
|
|
|
);
|
2020-01-06 00:16:27 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2020-01-09 17:49:15 +00:00
|
|
|
|
|
|
|
fn check_memory_fence(cx: &LateContext<'_, '_>, expr: &Expr<'_>) {
|
|
|
|
if_chain! {
|
|
|
|
if let ExprKind::Call(ref func, ref args) = expr.kind;
|
|
|
|
if let ExprKind::Path(ref func_qpath) = func.kind;
|
|
|
|
if let Some(def_id) = cx.tables.qpath_res(func_qpath, func.hir_id).opt_def_id();
|
|
|
|
if ["fence", "compiler_fence"]
|
|
|
|
.iter()
|
|
|
|
.any(|func| match_def_path(cx, def_id, &["core", "sync", "atomic", func]));
|
|
|
|
if let ExprKind::Path(ref ordering_qpath) = &args[0].kind;
|
|
|
|
if let Some(ordering_def_id) = cx.tables.qpath_res(ordering_qpath, args[0].hir_id).opt_def_id();
|
|
|
|
if match_ordering_def_path(cx, ordering_def_id, &["Relaxed"]);
|
|
|
|
then {
|
2020-01-27 01:56:22 +00:00
|
|
|
span_lint_and_help(
|
2020-01-09 17:49:15 +00:00
|
|
|
cx,
|
|
|
|
INVALID_ATOMIC_ORDERING,
|
|
|
|
args[0].span,
|
|
|
|
"memory fences cannot have `Relaxed` ordering",
|
|
|
|
"consider using ordering modes `Acquire`, `Release`, `AcqRel` or `SeqCst`"
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'a, 'tcx> LateLintPass<'a, 'tcx> for AtomicOrdering {
|
|
|
|
fn check_expr(&mut self, cx: &LateContext<'a, 'tcx>, expr: &'tcx Expr<'_>) {
|
|
|
|
check_atomic_load_store(cx, expr);
|
|
|
|
check_memory_fence(cx, expr);
|
|
|
|
}
|
|
|
|
}
|