2020-04-10 04:50:23 +00:00
use crate ::utils ::{ match_def_path , paths , span_lint_and_note } ;
use rustc_hir ::def_id ::DefId ;
2020-04-17 06:21:49 +00:00
use rustc_hir ::{ AsyncGeneratorKind , Body , BodyId , GeneratorKind } ;
2020-04-08 04:20:37 +00:00
use rustc_lint ::{ LateContext , LateLintPass } ;
2020-04-17 06:21:49 +00:00
use rustc_middle ::ty ::GeneratorInteriorTypeCause ;
2020-04-08 04:20:37 +00:00
use rustc_session ::{ declare_lint_pass , declare_tool_lint } ;
2020-04-10 04:50:23 +00:00
use rustc_span ::Span ;
2020-04-08 04:20:37 +00:00
declare_clippy_lint! {
2020-04-17 06:21:49 +00:00
/// **What it does:** Checks for calls to await while holding a
/// non-async-aware MutexGuard.
2020-04-08 04:20:37 +00:00
///
2020-09-24 12:49:22 +00:00
/// **Why is this bad?** The Mutex types found in std::sync and parking_lot
2020-07-14 12:59:59 +00:00
/// are not designed to operate in an async context across await points.
2020-04-08 04:20:37 +00:00
///
2020-04-17 06:21:49 +00:00
/// There are two potential solutions. One is to use an asynx-aware Mutex
/// type. Many asynchronous foundation crates provide such a Mutex type. The
/// other solution is to ensure the mutex is unlocked before calling await,
/// either by introducing a scope or an explicit call to Drop::drop.
///
/// **Known problems:** None.
2020-04-08 04:20:37 +00:00
///
/// **Example:**
///
2020-04-10 05:12:34 +00:00
/// ```rust,ignore
2020-04-08 04:20:37 +00:00
/// use std::sync::Mutex;
///
/// async fn foo(x: &Mutex<u32>) {
/// let guard = x.lock().unwrap();
/// *guard += 1;
/// bar.await;
/// }
/// ```
2020-04-17 06:21:49 +00:00
///
2020-04-08 04:20:37 +00:00
/// Use instead:
2020-04-10 05:12:34 +00:00
/// ```rust,ignore
2020-04-08 04:20:37 +00:00
/// use std::sync::Mutex;
///
/// async fn foo(x: &Mutex<u32>) {
/// {
/// let guard = x.lock().unwrap();
/// *guard += 1;
/// }
/// bar.await;
/// }
/// ```
pub AWAIT_HOLDING_LOCK ,
2020-11-23 12:51:04 +00:00
pedantic ,
2020-04-08 04:20:37 +00:00
" Inside an async function, holding a MutexGuard while calling await "
}
2020-10-28 22:36:07 +00:00
declare_clippy_lint! {
/// **What it does:** Checks for calls to await while holding a
/// `RefCell` `Ref` or `RefMut`.
///
/// **Why is this bad?** `RefCell` refs only check for exclusive mutable access
/// at runtime. Holding onto a `RefCell` ref across an `await` suspension point
/// risks panics from a mutable ref shared while other refs are outstanding.
///
/// **Known problems:** None.
///
/// **Example:**
///
/// ```rust,ignore
/// use std::cell::RefCell;
///
/// async fn foo(x: &RefCell<u32>) {
2020-11-23 12:51:04 +00:00
/// let mut y = x.borrow_mut();
/// *y += 1;
2020-10-28 22:36:07 +00:00
/// bar.await;
/// }
/// ```
///
/// Use instead:
/// ```rust,ignore
/// use std::cell::RefCell;
///
/// async fn foo(x: &RefCell<u32>) {
/// {
2020-11-23 12:51:04 +00:00
/// let mut y = x.borrow_mut();
/// *y += 1;
2020-10-28 22:36:07 +00:00
/// }
/// bar.await;
/// }
/// ```
pub AWAIT_HOLDING_REFCELL_REF ,
2020-11-23 12:51:04 +00:00
pedantic ,
2020-10-28 22:36:07 +00:00
" Inside an async function, holding a RefCell ref while calling await "
}
2020-04-08 04:20:37 +00:00
2020-10-28 22:36:07 +00:00
declare_lint_pass! ( AwaitHolding = > [ AWAIT_HOLDING_LOCK , AWAIT_HOLDING_REFCELL_REF ] ) ;
impl LateLintPass < '_ > for AwaitHolding {
2020-06-25 20:41:36 +00:00
fn check_body ( & mut self , cx : & LateContext < '_ > , body : & '_ Body < '_ > ) {
2020-04-17 06:21:49 +00:00
use AsyncGeneratorKind ::{ Block , Closure , Fn } ;
2020-06-09 14:36:01 +00:00
if let Some ( GeneratorKind ::Async ( Block | Closure | Fn ) ) = body . generator_kind {
let body_id = BodyId {
hir_id : body . value . hir_id ,
} ;
let def_id = cx . tcx . hir ( ) . body_owner_def_id ( body_id ) ;
2020-07-17 08:47:04 +00:00
let typeck_results = cx . tcx . typeck ( def_id ) ;
2020-12-20 15:25:54 +00:00
check_interior_types (
cx ,
& typeck_results . generator_interior_types . as_ref ( ) . skip_binder ( ) ,
body . value . span ,
) ;
2020-04-08 04:20:37 +00:00
}
2020-04-17 06:21:49 +00:00
}
}
2020-04-08 04:20:37 +00:00
2020-06-25 20:41:36 +00:00
fn check_interior_types ( cx : & LateContext < '_ > , ty_causes : & [ GeneratorInteriorTypeCause < '_ > ] , span : Span ) {
2020-04-17 06:21:49 +00:00
for ty_cause in ty_causes {
2020-08-03 22:18:29 +00:00
if let rustc_middle ::ty ::Adt ( adt , _ ) = ty_cause . ty . kind ( ) {
2020-04-17 06:21:49 +00:00
if is_mutex_guard ( cx , adt . did ) {
span_lint_and_note (
cx ,
AWAIT_HOLDING_LOCK ,
ty_cause . span ,
" this MutexGuard is held across an 'await' point. Consider using an async-aware Mutex type or ensuring the MutexGuard is dropped before calling await. " ,
2020-04-22 04:28:23 +00:00
ty_cause . scope_span . or ( Some ( span ) ) ,
2020-04-17 06:21:49 +00:00
" these are all the await points this lock is held through " ,
) ;
2020-04-08 04:20:37 +00:00
}
2020-10-28 22:36:07 +00:00
if is_refcell_ref ( cx , adt . did ) {
span_lint_and_note (
cx ,
AWAIT_HOLDING_REFCELL_REF ,
ty_cause . span ,
" this RefCell Ref is held across an 'await' point. Consider ensuring the Ref is dropped before calling await. " ,
ty_cause . scope_span . or ( Some ( span ) ) ,
" these are all the await points this ref is held through " ,
) ;
}
2020-04-08 04:20:37 +00:00
}
}
}
2020-06-25 20:41:36 +00:00
fn is_mutex_guard ( cx : & LateContext < '_ > , def_id : DefId ) -> bool {
2020-04-10 04:50:23 +00:00
match_def_path ( cx , def_id , & paths ::MUTEX_GUARD )
| | match_def_path ( cx , def_id , & paths ::RWLOCK_READ_GUARD )
| | match_def_path ( cx , def_id , & paths ::RWLOCK_WRITE_GUARD )
| | match_def_path ( cx , def_id , & paths ::PARKING_LOT_MUTEX_GUARD )
| | match_def_path ( cx , def_id , & paths ::PARKING_LOT_RWLOCK_READ_GUARD )
| | match_def_path ( cx , def_id , & paths ::PARKING_LOT_RWLOCK_WRITE_GUARD )
2020-04-08 04:20:37 +00:00
}
2020-10-28 22:36:07 +00:00
fn is_refcell_ref ( cx : & LateContext < '_ > , def_id : DefId ) -> bool {
match_def_path ( cx , def_id , & paths ::REFCELL_REF ) | | match_def_path ( cx , def_id , & paths ::REFCELL_REFMUT )
}