2018-11-27 20:14:15 +00:00
|
|
|
use crate::consts::{constant, Constant};
|
2019-03-08 13:14:41 +00:00
|
|
|
use crate::utils::{is_expn_of, match_def_path, match_type, paths, span_help_and_lint, span_lint};
|
2018-11-27 20:14:15 +00:00
|
|
|
use if_chain::if_chain;
|
|
|
|
use regex_syntax;
|
2018-12-29 15:04:45 +00:00
|
|
|
use rustc::hir::*;
|
|
|
|
use rustc::lint::{LateContext, LateLintPass, LintArray, LintPass};
|
|
|
|
use rustc::{declare_tool_lint, lint_array};
|
|
|
|
use rustc_data_structures::fx::FxHashSet;
|
2018-10-10 06:35:10 +00:00
|
|
|
use std::convert::TryFrom;
|
2019-02-24 18:43:15 +00:00
|
|
|
use syntax::ast::{LitKind, StrStyle};
|
2018-12-29 15:04:45 +00:00
|
|
|
use syntax::source_map::{BytePos, Span};
|
2016-02-04 23:36:06 +00:00
|
|
|
|
2018-03-28 13:24:26 +00:00
|
|
|
declare_clippy_lint! {
|
2019-03-05 16:50:33 +00:00
|
|
|
/// **What it does:** Checks [regex](https://crates.io/crates/regex) creation
|
|
|
|
/// (with `Regex::new`,`RegexBuilder::new` or `RegexSet::new`) for correct
|
|
|
|
/// regex syntax.
|
|
|
|
///
|
|
|
|
/// **Why is this bad?** This will lead to a runtime panic.
|
|
|
|
///
|
|
|
|
/// **Known problems:** None.
|
|
|
|
///
|
|
|
|
/// **Example:**
|
2019-03-05 22:23:50 +00:00
|
|
|
/// ```ignore
|
2019-03-05 16:50:33 +00:00
|
|
|
/// Regex::new("|")
|
|
|
|
/// ```
|
2016-02-04 23:36:06 +00:00
|
|
|
pub INVALID_REGEX,
|
2018-03-28 13:24:26 +00:00
|
|
|
correctness,
|
2016-08-06 08:18:36 +00:00
|
|
|
"invalid regular expressions"
|
2016-02-04 23:36:06 +00:00
|
|
|
}
|
|
|
|
|
2018-03-28 13:24:26 +00:00
|
|
|
declare_clippy_lint! {
|
2019-03-05 16:50:33 +00:00
|
|
|
/// **What it does:** Checks for trivial [regex](https://crates.io/crates/regex)
|
|
|
|
/// creation (with `Regex::new`, `RegexBuilder::new` or `RegexSet::new`).
|
|
|
|
///
|
|
|
|
/// **Why is this bad?** Matching the regex can likely be replaced by `==` or
|
|
|
|
/// `str::starts_with`, `str::ends_with` or `std::contains` or other `str`
|
|
|
|
/// methods.
|
|
|
|
///
|
|
|
|
/// **Known problems:** None.
|
|
|
|
///
|
|
|
|
/// **Example:**
|
2019-03-05 22:23:50 +00:00
|
|
|
/// ```ignore
|
2019-03-05 16:50:33 +00:00
|
|
|
/// Regex::new("^foobar")
|
|
|
|
/// ```
|
2016-02-05 22:10:48 +00:00
|
|
|
pub TRIVIAL_REGEX,
|
2018-03-28 13:24:26 +00:00
|
|
|
style,
|
2016-08-06 08:18:36 +00:00
|
|
|
"trivial regular expressions"
|
2016-02-05 22:10:48 +00:00
|
|
|
}
|
|
|
|
|
2018-03-28 13:24:26 +00:00
|
|
|
declare_clippy_lint! {
|
2019-03-05 16:50:33 +00:00
|
|
|
/// **What it does:** Checks for usage of `regex!(_)` which (as of now) is
|
|
|
|
/// usually slower than `Regex::new(_)` unless called in a loop (which is a bad
|
|
|
|
/// idea anyway).
|
|
|
|
///
|
|
|
|
/// **Why is this bad?** Performance, at least for now. The macro version is
|
|
|
|
/// likely to catch up long-term, but for now the dynamic version is faster.
|
|
|
|
///
|
|
|
|
/// **Known problems:** None.
|
|
|
|
///
|
|
|
|
/// **Example:**
|
2019-03-05 22:23:50 +00:00
|
|
|
/// ```ignore
|
2019-03-05 16:50:33 +00:00
|
|
|
/// regex!("foo|bar")
|
|
|
|
/// ```
|
2016-02-07 21:50:54 +00:00
|
|
|
pub REGEX_MACRO,
|
2018-03-28 13:24:26 +00:00
|
|
|
style,
|
2016-08-06 08:18:36 +00:00
|
|
|
"use of `regex!(_)` instead of `Regex::new(_)`"
|
2016-02-07 21:50:54 +00:00
|
|
|
}
|
|
|
|
|
2016-02-14 15:55:02 +00:00
|
|
|
#[derive(Clone, Default)]
|
2016-06-10 14:17:20 +00:00
|
|
|
pub struct Pass {
|
2018-09-11 23:34:52 +00:00
|
|
|
spans: FxHashSet<Span>,
|
2019-02-24 18:43:15 +00:00
|
|
|
last: Option<HirId>,
|
2016-02-14 15:55:02 +00:00
|
|
|
}
|
2016-02-04 23:36:06 +00:00
|
|
|
|
2016-06-10 14:17:20 +00:00
|
|
|
impl LintPass for Pass {
|
2016-02-04 23:36:06 +00:00
|
|
|
fn get_lints(&self) -> LintArray {
|
2016-02-07 21:50:54 +00:00
|
|
|
lint_array!(INVALID_REGEX, REGEX_MACRO, TRIVIAL_REGEX)
|
2016-02-04 23:36:06 +00:00
|
|
|
}
|
2019-01-26 19:40:55 +00:00
|
|
|
|
|
|
|
fn name(&self) -> &'static str {
|
|
|
|
"Regex"
|
|
|
|
}
|
2016-02-04 23:36:06 +00:00
|
|
|
}
|
|
|
|
|
2016-12-07 12:13:40 +00:00
|
|
|
impl<'a, 'tcx> LateLintPass<'a, 'tcx> for Pass {
|
|
|
|
fn check_crate(&mut self, _: &LateContext<'a, 'tcx>, _: &'tcx Crate) {
|
2016-02-14 15:55:02 +00:00
|
|
|
self.spans.clear();
|
2016-02-07 21:50:54 +00:00
|
|
|
}
|
|
|
|
|
2016-12-07 12:13:40 +00:00
|
|
|
fn check_block(&mut self, cx: &LateContext<'a, 'tcx>, block: &'tcx Block) {
|
2017-10-23 19:18:02 +00:00
|
|
|
if_chain! {
|
|
|
|
if self.last.is_none();
|
|
|
|
if let Some(ref expr) = block.expr;
|
|
|
|
if match_type(cx, cx.tables.expr_ty(expr), &paths::REGEX);
|
|
|
|
if let Some(span) = is_expn_of(expr.span, "regex");
|
|
|
|
then {
|
|
|
|
if !self.spans.contains(&span) {
|
|
|
|
span_lint(cx,
|
|
|
|
REGEX_MACRO,
|
|
|
|
span,
|
|
|
|
"`regex!(_)` found. \
|
|
|
|
Please use `Regex::new(_)`, which is faster for now.");
|
|
|
|
self.spans.insert(span);
|
|
|
|
}
|
2019-02-24 18:43:15 +00:00
|
|
|
self.last = Some(block.hir_id);
|
2016-02-14 15:55:02 +00:00
|
|
|
}
|
2017-10-23 19:18:02 +00:00
|
|
|
}
|
2016-02-14 15:55:02 +00:00
|
|
|
}
|
2016-02-24 16:38:57 +00:00
|
|
|
|
2016-12-07 12:13:40 +00:00
|
|
|
fn check_block_post(&mut self, _: &LateContext<'a, 'tcx>, block: &'tcx Block) {
|
2019-02-24 18:43:15 +00:00
|
|
|
if self.last.map_or(false, |id| block.hir_id == id) {
|
2016-02-24 16:38:57 +00:00
|
|
|
self.last = None;
|
2016-02-14 15:55:02 +00:00
|
|
|
}
|
|
|
|
}
|
2016-02-07 21:50:54 +00:00
|
|
|
|
2016-12-07 12:13:40 +00:00
|
|
|
fn check_expr(&mut self, cx: &LateContext<'a, 'tcx>, expr: &'tcx Expr) {
|
2017-10-23 19:18:02 +00:00
|
|
|
if_chain! {
|
2018-07-12 07:30:57 +00:00
|
|
|
if let ExprKind::Call(ref fun, ref args) = expr.node;
|
|
|
|
if let ExprKind::Path(ref qpath) = fun.node;
|
2017-10-23 19:18:02 +00:00
|
|
|
if args.len() == 1;
|
2019-03-08 13:14:41 +00:00
|
|
|
if let Some(def_id) = cx.tables.qpath_def(qpath, fun.hir_id).opt_def_id();
|
2017-10-23 19:18:02 +00:00
|
|
|
then {
|
|
|
|
if match_def_path(cx.tcx, def_id, &paths::REGEX_NEW) ||
|
|
|
|
match_def_path(cx.tcx, def_id, &paths::REGEX_BUILDER_NEW) {
|
|
|
|
check_regex(cx, &args[0], true);
|
|
|
|
} else if match_def_path(cx.tcx, def_id, &paths::REGEX_BYTES_NEW) ||
|
|
|
|
match_def_path(cx.tcx, def_id, &paths::REGEX_BYTES_BUILDER_NEW) {
|
|
|
|
check_regex(cx, &args[0], false);
|
|
|
|
} else if match_def_path(cx.tcx, def_id, &paths::REGEX_SET_NEW) {
|
|
|
|
check_set(cx, &args[0], true);
|
|
|
|
} else if match_def_path(cx.tcx, def_id, &paths::REGEX_BYTES_SET_NEW) {
|
|
|
|
check_set(cx, &args[0], false);
|
|
|
|
}
|
2016-02-05 15:48:35 +00:00
|
|
|
}
|
2017-10-23 19:18:02 +00:00
|
|
|
}
|
2016-02-04 23:36:06 +00:00
|
|
|
}
|
|
|
|
}
|
2016-02-05 15:48:35 +00:00
|
|
|
|
2018-10-10 06:35:10 +00:00
|
|
|
#[allow(clippy::cast_possible_truncation)] // truncation very unlikely here
|
2018-04-19 06:30:07 +00:00
|
|
|
fn str_span(base: Span, c: regex_syntax::ast::Span, offset: u16) -> Span {
|
|
|
|
let offset = u32::from(offset);
|
2018-10-10 06:35:10 +00:00
|
|
|
let end = base.lo() + BytePos(u32::try_from(c.end.offset).expect("offset too large") + offset);
|
|
|
|
let start = base.lo() + BytePos(u32::try_from(c.start.offset).expect("offset too large") + offset);
|
2018-03-13 14:02:40 +00:00
|
|
|
assert!(start <= end);
|
|
|
|
Span::new(start, end, base.ctxt())
|
2016-02-05 15:48:35 +00:00
|
|
|
}
|
|
|
|
|
2018-03-13 10:38:11 +00:00
|
|
|
fn const_str<'a, 'tcx>(cx: &LateContext<'a, 'tcx>, e: &'tcx Expr) -> Option<String> {
|
2018-05-13 11:16:31 +00:00
|
|
|
constant(cx, cx.tables, e).and_then(|(c, _)| match c {
|
2018-03-13 10:38:11 +00:00
|
|
|
Constant::Str(s) => Some(s),
|
2016-02-24 16:38:57 +00:00
|
|
|
_ => None,
|
2018-03-13 10:38:11 +00:00
|
|
|
})
|
2016-02-05 15:48:35 +00:00
|
|
|
}
|
2016-02-05 22:10:48 +00:00
|
|
|
|
2018-03-13 14:02:40 +00:00
|
|
|
fn is_trivial_regex(s: ®ex_syntax::hir::Hir) -> Option<&'static str> {
|
|
|
|
use regex_syntax::hir::Anchor::*;
|
2018-11-27 20:14:15 +00:00
|
|
|
use regex_syntax::hir::HirKind::*;
|
2018-03-13 14:02:40 +00:00
|
|
|
|
2018-11-27 20:14:15 +00:00
|
|
|
let is_literal = |e: &[regex_syntax::hir::Hir]| {
|
|
|
|
e.iter().all(|e| match *e.kind() {
|
|
|
|
Literal(_) => true,
|
|
|
|
_ => false,
|
|
|
|
})
|
|
|
|
};
|
2018-03-13 14:02:40 +00:00
|
|
|
|
|
|
|
match *s.kind() {
|
2018-11-27 20:14:15 +00:00
|
|
|
Empty | Anchor(_) => Some("the regex is unlikely to be useful as it is"),
|
2018-03-13 14:02:40 +00:00
|
|
|
Literal(_) => Some("consider using `str::contains`"),
|
2018-11-27 20:14:15 +00:00
|
|
|
Alternation(ref exprs) => {
|
|
|
|
if exprs.iter().all(|e| e.kind().is_empty()) {
|
|
|
|
Some("the regex is unlikely to be useful as it is")
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
}
|
2018-03-13 14:02:40 +00:00
|
|
|
},
|
|
|
|
Concat(ref exprs) => match (exprs[0].kind(), exprs[exprs.len() - 1].kind()) {
|
2018-11-27 20:14:15 +00:00
|
|
|
(&Anchor(StartText), &Anchor(EndText)) if exprs[1..(exprs.len() - 1)].is_empty() => {
|
|
|
|
Some("consider using `str::is_empty`")
|
|
|
|
},
|
|
|
|
(&Anchor(StartText), &Anchor(EndText)) if is_literal(&exprs[1..(exprs.len() - 1)]) => {
|
|
|
|
Some("consider using `==` on `str`s")
|
|
|
|
},
|
2018-03-13 14:02:40 +00:00
|
|
|
(&Anchor(StartText), &Literal(_)) if is_literal(&exprs[1..]) => Some("consider using `str::starts_with`"),
|
2018-11-27 20:14:15 +00:00
|
|
|
(&Literal(_), &Anchor(EndText)) if is_literal(&exprs[1..(exprs.len() - 1)]) => {
|
|
|
|
Some("consider using `str::ends_with`")
|
|
|
|
},
|
2018-03-13 14:02:40 +00:00
|
|
|
_ if is_literal(exprs) => Some("consider using `str::contains`"),
|
2017-09-05 09:33:04 +00:00
|
|
|
_ => None,
|
2016-12-20 17:21:30 +00:00
|
|
|
},
|
2016-02-06 17:06:39 +00:00
|
|
|
_ => None,
|
2016-02-05 22:10:48 +00:00
|
|
|
}
|
|
|
|
}
|
2016-05-25 15:15:19 +00:00
|
|
|
|
2017-09-13 13:34:04 +00:00
|
|
|
fn check_set<'a, 'tcx>(cx: &LateContext<'a, 'tcx>, expr: &'tcx Expr, utf8: bool) {
|
2017-10-23 19:18:02 +00:00
|
|
|
if_chain! {
|
2018-07-12 07:30:57 +00:00
|
|
|
if let ExprKind::AddrOf(_, ref expr) = expr.node;
|
|
|
|
if let ExprKind::Array(ref exprs) = expr.node;
|
2017-10-23 19:18:02 +00:00
|
|
|
then {
|
|
|
|
for expr in exprs {
|
|
|
|
check_regex(cx, expr, utf8);
|
|
|
|
}
|
2016-05-25 15:15:19 +00:00
|
|
|
}
|
2017-10-23 19:18:02 +00:00
|
|
|
}
|
2016-05-25 15:15:19 +00:00
|
|
|
}
|
|
|
|
|
2017-09-13 13:34:04 +00:00
|
|
|
fn check_regex<'a, 'tcx>(cx: &LateContext<'a, 'tcx>, expr: &'tcx Expr, utf8: bool) {
|
2018-04-07 20:18:51 +00:00
|
|
|
let mut parser = regex_syntax::ParserBuilder::new()
|
|
|
|
.unicode(utf8)
|
|
|
|
.allow_invalid_utf8(!utf8)
|
|
|
|
.build();
|
2016-05-25 15:15:19 +00:00
|
|
|
|
2018-07-12 07:30:57 +00:00
|
|
|
if let ExprKind::Lit(ref lit) = expr.node {
|
2018-01-23 15:52:14 +00:00
|
|
|
if let LitKind::Str(ref r, style) = lit.node {
|
2017-03-30 08:21:13 +00:00
|
|
|
let r = &r.as_str();
|
2018-03-13 14:02:40 +00:00
|
|
|
let offset = if let StrStyle::Raw(n) = style { 2 + n } else { 1 };
|
|
|
|
match parser.parse(r) {
|
2018-11-27 20:14:15 +00:00
|
|
|
Ok(r) => {
|
|
|
|
if let Some(repl) = is_trivial_regex(&r) {
|
|
|
|
span_help_and_lint(cx, TRIVIAL_REGEX, expr.span, "trivial regex", repl);
|
|
|
|
}
|
2018-03-13 14:02:40 +00:00
|
|
|
},
|
|
|
|
Err(regex_syntax::Error::Parse(e)) => {
|
|
|
|
span_lint(
|
|
|
|
cx,
|
|
|
|
INVALID_REGEX,
|
|
|
|
str_span(expr.span, *e.span(), offset),
|
|
|
|
&format!("regex syntax error: {}", e.kind()),
|
|
|
|
);
|
|
|
|
},
|
|
|
|
Err(regex_syntax::Error::Translate(e)) => {
|
|
|
|
span_lint(
|
|
|
|
cx,
|
|
|
|
INVALID_REGEX,
|
|
|
|
str_span(expr.span, *e.span(), offset),
|
|
|
|
&format!("regex syntax error: {}", e.kind()),
|
2017-09-05 09:33:04 +00:00
|
|
|
);
|
2016-12-20 17:21:30 +00:00
|
|
|
},
|
2016-05-25 15:15:19 +00:00
|
|
|
Err(e) => {
|
2018-11-27 20:14:15 +00:00
|
|
|
span_lint(cx, INVALID_REGEX, expr.span, &format!("regex syntax error: {}", e));
|
2016-12-20 17:21:30 +00:00
|
|
|
},
|
2016-05-25 15:15:19 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
} else if let Some(r) = const_str(cx, expr) {
|
2018-03-13 14:02:40 +00:00
|
|
|
match parser.parse(&r) {
|
2018-11-27 20:14:15 +00:00
|
|
|
Ok(r) => {
|
|
|
|
if let Some(repl) = is_trivial_regex(&r) {
|
|
|
|
span_help_and_lint(cx, TRIVIAL_REGEX, expr.span, "trivial regex", repl);
|
|
|
|
}
|
2018-03-13 14:02:40 +00:00
|
|
|
},
|
|
|
|
Err(regex_syntax::Error::Parse(e)) => {
|
|
|
|
span_lint(
|
|
|
|
cx,
|
|
|
|
INVALID_REGEX,
|
|
|
|
expr.span,
|
|
|
|
&format!("regex syntax error on position {}: {}", e.span().start.offset, e.kind()),
|
|
|
|
);
|
|
|
|
},
|
|
|
|
Err(regex_syntax::Error::Translate(e)) => {
|
|
|
|
span_lint(
|
|
|
|
cx,
|
|
|
|
INVALID_REGEX,
|
|
|
|
expr.span,
|
|
|
|
&format!("regex syntax error on position {}: {}", e.span().start.offset, e.kind()),
|
2017-09-05 09:33:04 +00:00
|
|
|
);
|
2016-12-20 17:21:30 +00:00
|
|
|
},
|
2016-05-25 15:15:19 +00:00
|
|
|
Err(e) => {
|
2018-11-27 20:14:15 +00:00
|
|
|
span_lint(cx, INVALID_REGEX, expr.span, &format!("regex syntax error: {}", e));
|
2016-12-20 17:21:30 +00:00
|
|
|
},
|
2016-05-25 15:15:19 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|