rust-clippy/clippy_lints/src/escape.rs

206 lines
6.6 KiB
Rust
Raw Normal View History

use clippy_utils::diagnostics::span_lint_hir;
use clippy_utils::ty::contains_ty;
2020-02-18 13:28:18 +00:00
use rustc_hir::intravisit;
use rustc_hir::{self, AssocItemKind, Body, FnDecl, HirId, HirIdSet, Impl, ItemKind, Node};
2020-02-17 02:07:26 +00:00
use rustc_infer::infer::TyCtxtInferExt;
2020-01-12 06:08:41 +00:00
use rustc_lint::{LateContext, LateLintPass};
2021-02-25 20:33:18 +00:00
use rustc_middle::mir::FakeReadCause;
use rustc_middle::ty::layout::LayoutOf;
use rustc_middle::ty::{self, TraitRef, Ty};
2020-01-11 11:37:08 +00:00
use rustc_session::{declare_tool_lint, impl_lint_pass};
use rustc_span::source_map::Span;
use rustc_span::symbol::kw;
use rustc_target::spec::abi::Abi;
use rustc_typeck::expr_use_visitor::{Delegate, ExprUseVisitor, PlaceBase, PlaceWithHirId};
2015-12-04 10:12:53 +00:00
2019-04-08 20:43:55 +00:00
#[derive(Copy, Clone)]
pub struct BoxedLocal {
2016-07-10 13:23:50 +00:00
pub too_large_for_stack: u64,
}
2015-12-04 10:12:53 +00:00
2018-03-28 13:24:26 +00:00
declare_clippy_lint! {
/// ### What it does
/// Checks for usage of `Box<T>` where an unboxed `T` would
/// work fine.
///
/// ### Why is this bad?
/// This is an unnecessary allocation, and bad for
/// performance. It is only necessary to allocate if you wish to move the box
/// into something.
///
/// ### Example
/// ```rust
/// # fn foo(bar: usize) {}
/// let x = Box::new(1);
/// foo(*x);
/// println!("{}", *x);
/// ```
///
/// Use instead:
/// ```rust
/// # fn foo(bar: usize) {}
/// let x = 1;
/// foo(x);
/// println!("{}", x);
/// ```
#[clippy::version = "pre 1.29.0"]
pub BOXED_LOCAL,
2018-03-28 13:24:26 +00:00
perf,
"using `Box<T>` where unnecessary"
}
2015-12-04 10:12:53 +00:00
2018-07-23 11:01:12 +00:00
fn is_non_trait_box(ty: Ty<'_>) -> bool {
2017-02-03 10:52:13 +00:00
ty.is_box() && !ty.boxed_ty().is_trait()
2015-12-28 14:12:57 +00:00
}
struct EscapeDelegate<'a, 'tcx> {
cx: &'a LateContext<'tcx>,
2019-03-01 12:26:06 +00:00
set: HirIdSet,
2021-07-17 14:43:23 +00:00
trait_self_ty: Option<Ty<'tcx>>,
2016-07-10 13:23:50 +00:00
too_large_for_stack: u64,
2015-12-04 10:12:53 +00:00
}
2019-04-08 20:43:55 +00:00
impl_lint_pass!(BoxedLocal => [BOXED_LOCAL]);
impl<'tcx> LateLintPass<'tcx> for BoxedLocal {
fn check_fn(
&mut self,
cx: &LateContext<'tcx>,
fn_kind: intravisit::FnKind<'tcx>,
2019-12-30 04:02:10 +00:00
_: &'tcx FnDecl<'_>,
2019-12-22 14:42:41 +00:00
body: &'tcx Body<'_>,
_: Span,
2019-02-20 10:11:11 +00:00
hir_id: HirId,
) {
if let Some(header) = fn_kind.header() {
if header.abi != Abi::Rust {
return;
}
}
2019-02-20 10:11:11 +00:00
let parent_id = cx.tcx.hir().get_parent_item(hir_id);
let parent_node = cx.tcx.hir().find_by_def_id(parent_id);
let mut trait_self_ty = None;
if let Some(Node::Item(item)) = parent_node {
// If the method is an impl for a trait, don't warn.
if let ItemKind::Impl(Impl { of_trait: Some(_), .. }) = item.kind {
return;
}
// find `self` ty for this trait if relevant
if let ItemKind::Trait(_, _, _, _, items) = item.kind {
for trait_item in items {
if trait_item.id.hir_id() == hir_id {
// be sure we have `self` parameter in this function
if trait_item.kind == (AssocItemKind::Fn { has_self: true }) {
trait_self_ty = Some(
TraitRef::identity(cx.tcx, trait_item.id.def_id.to_def_id())
.self_ty()
.skip_binder(),
);
}
}
}
}
}
2015-12-04 10:12:53 +00:00
let mut v = EscapeDelegate {
cx,
2019-03-01 12:26:06 +00:00
set: HirIdSet::default(),
trait_self_ty,
2016-07-10 13:23:50 +00:00
too_large_for_stack: self.too_large_for_stack,
2015-12-04 10:12:53 +00:00
};
let fn_def_id = cx.tcx.hir().local_def_id(hir_id);
2019-11-29 10:12:19 +00:00
cx.tcx.infer_ctxt().enter(|infcx| {
2020-07-17 08:47:04 +00:00
ExprUseVisitor::new(&mut v, &infcx, fn_def_id, cx.param_env, cx.typeck_results()).consume_body(body);
2019-11-29 10:12:19 +00:00
});
for node in v.set {
span_lint_hir(
2017-08-09 07:30:56 +00:00
cx,
BOXED_LOCAL,
node,
cx.tcx.hir().span(node),
2017-08-09 07:30:56 +00:00
"local variable doesn't need to be boxed here",
);
2015-12-04 10:12:53 +00:00
}
}
}
// TODO: Replace with Map::is_argument(..) when it's fixed
fn is_argument(map: rustc_middle::hir::map::Map<'_>, id: HirId) -> bool {
match map.find(id) {
Some(Node::Binding(_)) => (),
_ => return false,
}
matches!(map.find(map.get_parent_node(id)), Some(Node::Param(_)))
}
impl<'a, 'tcx> Delegate<'tcx> for EscapeDelegate<'a, 'tcx> {
fn consume(&mut self, cmt: &PlaceWithHirId<'tcx>, _: HirId) {
if cmt.place.projections.is_empty() {
if let PlaceBase::Local(lid) = cmt.place.base {
self.set.remove(&lid);
let map = &self.cx.tcx.hir();
if let Some(Node::Binding(_)) = map.find(cmt.hir_id) {
if self.set.contains(&lid) {
// let y = x where x is known
// remove x, insert y
self.set.insert(cmt.hir_id);
self.set.remove(&lid);
}
}
2015-12-04 10:12:53 +00:00
}
}
}
fn borrow(&mut self, cmt: &PlaceWithHirId<'tcx>, _: HirId, _: ty::BorrowKind) {
if cmt.place.projections.is_empty() {
if let PlaceBase::Local(lid) = cmt.place.base {
self.set.remove(&lid);
}
2015-12-04 10:12:53 +00:00
}
}
fn mutate(&mut self, cmt: &PlaceWithHirId<'tcx>, _: HirId) {
if cmt.place.projections.is_empty() {
let map = &self.cx.tcx.hir();
if is_argument(*map, cmt.hir_id) {
// Skip closure arguments
let parent_id = map.get_parent_node(cmt.hir_id);
if let Some(Node::Expr(..)) = map.find(map.get_parent_node(parent_id)) {
return;
}
2019-10-06 12:49:26 +00:00
// skip if there is a `self` parameter binding to a type
// that contains `Self` (i.e.: `self: Box<Self>`), see #4804
if let Some(trait_self_ty) = self.trait_self_ty {
2022-01-12 03:19:52 +00:00
if map.name(cmt.hir_id) == kw::SelfLower && contains_ty(cmt.place.ty(), trait_self_ty) {
return;
}
}
if is_non_trait_box(cmt.place.ty()) && !self.is_large_box(cmt.place.ty()) {
self.set.insert(cmt.hir_id);
}
2019-10-06 12:49:26 +00:00
}
}
}
2021-02-25 20:33:18 +00:00
fn fake_read(&mut self, _: &rustc_typeck::expr_use_visitor::PlaceWithHirId<'tcx>, _: FakeReadCause, _: HirId) {}
2015-12-04 10:12:53 +00:00
}
2016-07-10 13:23:50 +00:00
impl<'a, 'tcx> EscapeDelegate<'a, 'tcx> {
fn is_large_box(&self, ty: Ty<'tcx>) -> bool {
2019-01-31 01:15:29 +00:00
// Large types need to be boxed to avoid stack overflows.
2017-02-03 10:52:13 +00:00
if ty.is_box() {
2019-11-26 14:14:28 +00:00
self.cx.layout_of(ty.boxed_ty()).map_or(0, |l| l.size.bytes()) > self.too_large_for_stack
} else {
false
2016-07-10 13:23:50 +00:00
}
}
}