3706: missing match arms diagnostic r=flodiebold a=JoshMcguigan

Following up on https://github.com/rust-analyzer/rust-analyzer/pull/3689#issuecomment-602718222, this PR creates a missing match arms diagnostic.

At the moment this is a very early draft, but I wanted to open it just to get some initial feedback.

Initial questions:

* Have I roughly created the correct boilerplate? 
* Inside the new `validate_match` function:
  * Am I correct in thinking I want to do validation by comparing the match arms against `match_expr`? And when analyzing `match_expr` I should be looking at it as a `hir_def::expr::Expr`?
  * I mostly copied the chained if-let statements from the struct validation. Shouldn't there be a non-failable way to get an AstPtr from the hir data structures? 

Thanks for all the guidance.

Co-authored-by: Josh Mcguigan <joshmcg88@gmail.com>
This commit is contained in:
bors[bot] 2020-04-07 12:53:47 +00:00 committed by GitHub
commit 97b963b44b
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
12 changed files with 1552 additions and 11 deletions

1
Cargo.lock generated
View file

@ -995,6 +995,7 @@ dependencies = [
"ra_prof", "ra_prof",
"ra_syntax", "ra_syntax",
"rustc-hash", "rustc-hash",
"smallvec",
"stdx", "stdx",
"test_utils", "test_utils",
] ]

View file

@ -1,4 +1,4 @@
//! FIXME: write short doc here //! FIXME: write short doc here
pub use hir_def::diagnostics::UnresolvedModule; pub use hir_def::diagnostics::UnresolvedModule;
pub use hir_expand::diagnostics::{AstDiagnostic, Diagnostic, DiagnosticSink}; pub use hir_expand::diagnostics::{AstDiagnostic, Diagnostic, DiagnosticSink};
pub use hir_ty::diagnostics::{MissingFields, MissingOkInTailExpr, NoSuchField}; pub use hir_ty::diagnostics::{MissingFields, MissingMatchArms, MissingOkInTailExpr, NoSuchField};

View file

@ -9,6 +9,7 @@ doctest = false
[dependencies] [dependencies]
arrayvec = "0.5.1" arrayvec = "0.5.1"
smallvec = "1.2.0"
ena = "0.13.1" ena = "0.13.1"
log = "0.4.8" log = "0.4.8"
rustc-hash = "1.1.0" rustc-hash = "1.1.0"

File diff suppressed because it is too large Load diff

View file

@ -6,7 +6,7 @@ use hir_expand::{db::AstDatabase, name::Name, HirFileId, InFile};
use ra_syntax::{ast, AstNode, AstPtr, SyntaxNodePtr}; use ra_syntax::{ast, AstNode, AstPtr, SyntaxNodePtr};
use stdx::format_to; use stdx::format_to;
pub use hir_def::diagnostics::UnresolvedModule; pub use hir_def::{diagnostics::UnresolvedModule, expr::MatchArm};
pub use hir_expand::diagnostics::{AstDiagnostic, Diagnostic, DiagnosticSink}; pub use hir_expand::diagnostics::{AstDiagnostic, Diagnostic, DiagnosticSink};
#[derive(Debug)] #[derive(Debug)]
@ -62,6 +62,25 @@ impl AstDiagnostic for MissingFields {
} }
} }
#[derive(Debug)]
pub struct MissingMatchArms {
pub file: HirFileId,
pub match_expr: AstPtr<ast::Expr>,
pub arms: AstPtr<ast::MatchArmList>,
}
impl Diagnostic for MissingMatchArms {
fn message(&self) -> String {
String::from("Missing match arm")
}
fn source(&self) -> InFile<SyntaxNodePtr> {
InFile { file_id: self.file, value: self.match_expr.into() }
}
fn as_any(&self) -> &(dyn Any + Send + 'static) {
self
}
}
#[derive(Debug)] #[derive(Debug)]
pub struct MissingOkInTailExpr { pub struct MissingOkInTailExpr {
pub file: HirFileId, pub file: HirFileId,

View file

@ -14,9 +14,10 @@ use rustc_hash::FxHashSet;
use crate::{ use crate::{
db::HirDatabase, db::HirDatabase,
diagnostics::{MissingFields, MissingOkInTailExpr}, diagnostics::{MissingFields, MissingMatchArms, MissingOkInTailExpr},
utils::variant_data, utils::variant_data,
ApplicationTy, InferenceResult, Ty, TypeCtor, ApplicationTy, InferenceResult, Ty, TypeCtor,
_match::{is_useful, MatchCheckCtx, Matrix, PatStack, Usefulness},
}; };
pub use hir_def::{ pub use hir_def::{
@ -52,15 +53,99 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
for e in body.exprs.iter() { for e in body.exprs.iter() {
if let (id, Expr::RecordLit { path, fields, spread }) = e { if let (id, Expr::RecordLit { path, fields, spread }) = e {
self.validate_record_literal(id, path, fields, *spread, db); self.validate_record_literal(id, path, fields, *spread, db);
} else if let (id, Expr::Match { expr, arms }) = e {
self.validate_match(id, *expr, arms, db, self.infer.clone());
} }
} }
let body_expr = &body[body.body_expr]; let body_expr = &body[body.body_expr];
if let Expr::Block { statements: _, tail: Some(t) } = body_expr { if let Expr::Block { tail: Some(t), .. } = body_expr {
self.validate_results_in_tail_expr(body.body_expr, *t, db); self.validate_results_in_tail_expr(body.body_expr, *t, db);
} }
} }
fn validate_match(
&mut self,
id: ExprId,
match_expr: ExprId,
arms: &[MatchArm],
db: &dyn HirDatabase,
infer: Arc<InferenceResult>,
) {
let (body, source_map): (Arc<Body>, Arc<BodySourceMap>) =
db.body_with_source_map(self.func.into());
let match_expr_ty = match infer.type_of_expr.get(match_expr) {
Some(ty) => ty,
// If we can't resolve the type of the match expression
// we cannot perform exhaustiveness checks.
None => return,
};
let cx = MatchCheckCtx { body, infer: infer.clone(), db };
let pats = arms.iter().map(|arm| arm.pat);
let mut seen = Matrix::empty();
for pat in pats {
// We skip any patterns whose type we cannot resolve.
//
// This could lead to false positives in this diagnostic, so
// it might be better to skip the entire diagnostic if we either
// cannot resolve a match arm or determine that the match arm has
// the wrong type.
if let Some(pat_ty) = infer.type_of_pat.get(pat) {
// We only include patterns whose type matches the type
// of the match expression. If we had a InvalidMatchArmPattern
// diagnostic or similar we could raise that in an else
// block here.
//
// When comparing the types, we also have to consider that rustc
// will automatically de-reference the match expression type if
// necessary.
//
// FIXME we should use the type checker for this.
if pat_ty == match_expr_ty
|| match_expr_ty
.as_reference()
.map(|(match_expr_ty, _)| match_expr_ty == pat_ty)
.unwrap_or(false)
{
// If we had a NotUsefulMatchArm diagnostic, we could
// check the usefulness of each pattern as we added it
// to the matrix here.
let v = PatStack::from_pattern(pat);
seen.push(&cx, v);
}
}
}
match is_useful(&cx, &seen, &PatStack::from_wild()) {
Ok(Usefulness::Useful) => (),
// if a wildcard pattern is not useful, then all patterns are covered
Ok(Usefulness::NotUseful) => return,
// this path is for unimplemented checks, so we err on the side of not
// reporting any errors
_ => return,
}
if let Ok(source_ptr) = source_map.expr_syntax(id) {
if let Some(expr) = source_ptr.value.left() {
let root = source_ptr.file_syntax(db.upcast());
if let ast::Expr::MatchExpr(match_expr) = expr.to_node(&root) {
if let (Some(match_expr), Some(arms)) =
(match_expr.expr(), match_expr.match_arm_list())
{
self.sink.push(MissingMatchArms {
file: source_ptr.file_id,
match_expr: AstPtr::new(&match_expr),
arms: AstPtr::new(&arms),
})
}
}
}
}
}
fn validate_record_literal( fn validate_record_literal(
&mut self, &mut self,
id: ExprId, id: ExprId,

View file

@ -21,9 +21,13 @@ impl<'a> InferenceContext<'a> {
subpats: &[PatId], subpats: &[PatId],
expected: &Ty, expected: &Ty,
default_bm: BindingMode, default_bm: BindingMode,
id: PatId,
) -> Ty { ) -> Ty {
let (ty, def) = self.resolve_variant(path); let (ty, def) = self.resolve_variant(path);
let var_data = def.map(|it| variant_data(self.db.upcast(), it)); let var_data = def.map(|it| variant_data(self.db.upcast(), it));
if let Some(variant) = def {
self.write_variant_resolution(id.into(), variant);
}
self.unify(&ty, expected); self.unify(&ty, expected);
let substs = ty.substs().unwrap_or_else(Substs::empty); let substs = ty.substs().unwrap_or_else(Substs::empty);
@ -152,7 +156,7 @@ impl<'a> InferenceContext<'a> {
Ty::apply_one(TypeCtor::Ref(*mutability), subty) Ty::apply_one(TypeCtor::Ref(*mutability), subty)
} }
Pat::TupleStruct { path: p, args: subpats } => { Pat::TupleStruct { path: p, args: subpats } => {
self.infer_tuple_struct_pat(p.as_ref(), subpats, expected, default_bm) self.infer_tuple_struct_pat(p.as_ref(), subpats, expected, default_bm, pat)
} }
Pat::Record { path: p, args: fields } => { Pat::Record { path: p, args: fields } => {
self.infer_record_pat(p.as_ref(), fields, expected, default_bm, pat) self.infer_record_pat(p.as_ref(), fields, expected, default_bm, pat)

View file

@ -67,8 +67,16 @@ impl<'a> InferenceContext<'a> {
ValueNs::FunctionId(it) => it.into(), ValueNs::FunctionId(it) => it.into(),
ValueNs::ConstId(it) => it.into(), ValueNs::ConstId(it) => it.into(),
ValueNs::StaticId(it) => it.into(), ValueNs::StaticId(it) => it.into(),
ValueNs::StructId(it) => it.into(), ValueNs::StructId(it) => {
ValueNs::EnumVariantId(it) => it.into(), self.write_variant_resolution(id, it.into());
it.into()
}
ValueNs::EnumVariantId(it) => {
self.write_variant_resolution(id, it.into());
it.into()
}
}; };
let ty = self.db.value_ty(typable); let ty = self.db.value_ty(typable);

View file

@ -43,6 +43,7 @@ mod tests;
#[cfg(test)] #[cfg(test)]
mod test_db; mod test_db;
mod marks; mod marks;
mod _match;
use std::ops::Deref; use std::ops::Deref;
use std::sync::Arc; use std::sync::Arc;

View file

@ -105,8 +105,9 @@ impl TestDB {
} }
// FIXME: don't duplicate this // FIXME: don't duplicate this
pub fn diagnostics(&self) -> String { pub fn diagnostics(&self) -> (String, u32) {
let mut buf = String::new(); let mut buf = String::new();
let mut count = 0;
let crate_graph = self.crate_graph(); let crate_graph = self.crate_graph();
for krate in crate_graph.iter() { for krate in crate_graph.iter() {
let crate_def_map = self.crate_def_map(krate); let crate_def_map = self.crate_def_map(krate);
@ -133,13 +134,14 @@ impl TestDB {
let infer = self.infer(f.into()); let infer = self.infer(f.into());
let mut sink = DiagnosticSink::new(|d| { let mut sink = DiagnosticSink::new(|d| {
format_to!(buf, "{:?}: {}\n", d.syntax_node(self).text(), d.message()); format_to!(buf, "{:?}: {}\n", d.syntax_node(self).text(), d.message());
count += 1;
}); });
infer.add_diagnostics(self, f, &mut sink); infer.add_diagnostics(self, f, &mut sink);
let mut validator = ExprValidator::new(f, infer, &mut sink); let mut validator = ExprValidator::new(f, infer, &mut sink);
validator.validate_body(self); validator.validate_body(self);
} }
} }
buf (buf, count)
} }
} }

View file

@ -309,7 +309,8 @@ fn no_such_field_diagnostics() {
} }
", ",
) )
.diagnostics(); .diagnostics()
.0;
assert_snapshot!(diagnostics, @r###" assert_snapshot!(diagnostics, @r###"
"baz: 62": no such field "baz: 62": no such field

View file

@ -101,6 +101,14 @@ pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec<Diagnostic>
fix, fix,
}) })
}) })
.on::<hir::diagnostics::MissingMatchArms, _>(|d| {
res.borrow_mut().push(Diagnostic {
range: d.highlight_range(),
message: d.message(),
severity: Severity::Error,
fix: None,
})
})
.on::<hir::diagnostics::MissingOkInTailExpr, _>(|d| { .on::<hir::diagnostics::MissingOkInTailExpr, _>(|d| {
let node = d.ast(db); let node = d.ast(db);
let replacement = format!("Ok({})", node.syntax()); let replacement = format!("Ok({})", node.syntax());
@ -291,7 +299,7 @@ mod tests {
fn check_no_diagnostic(content: &str) { fn check_no_diagnostic(content: &str) {
let (analysis, file_id) = single_file(content); let (analysis, file_id) = single_file(content);
let diagnostics = analysis.diagnostics(file_id).unwrap(); let diagnostics = analysis.diagnostics(file_id).unwrap();
assert_eq!(diagnostics.len(), 0); assert_eq!(diagnostics.len(), 0, "expected no diagnostic, found one");
} }
#[test] #[test]