2021-05-22 14:39:19 +00:00
|
|
|
//! Various diagnostics for expressions that are collected together in one pass
|
|
|
|
//! through the body using inference results: mismatched arg counts, missing
|
|
|
|
//! fields, etc.
|
2019-09-30 08:58:53 +00:00
|
|
|
|
2021-05-06 04:42:00 +00:00
|
|
|
use std::{cell::RefCell, sync::Arc};
|
2019-01-05 15:32:07 +00:00
|
|
|
|
2021-05-11 12:18:16 +00:00
|
|
|
use hir_def::{
|
|
|
|
expr::Statement, path::path, resolver::HasResolver, AssocItemId, DefWithBodyId, HasModule,
|
|
|
|
};
|
internal: move diagnostics to hir
The idea here is to eventually get rid of `dyn Diagnostic` and
`DiagnosticSink` infrastructure altogether, and just have a `enum
hir::Diagnostic` instead.
The problem with `dyn Diagnostic` is that it is defined in the lowest
level of the stack (hir_expand), but is used by the highest level (ide).
As a first step, we free hir_expand and hir_def from `dyn Diagnostic`
and kick the can up to `hir_ty`, as an intermediate state. The plan is
then to move DiagnosticSink similarly to the hir crate, and, as final
third step, remove its usage from the ide.
One currently unsolved problem is testing. You can notice that the test
which checks precise diagnostic ranges, unresolved_import_in_use_tree,
was moved to the ide layer. Logically, only IDE should have the infra to
render a specific range.
At the same time, the range is determined with the data produced in
hir_def and hir crates, so this layering is rather unfortunate. Working
on hir_def shouldn't require compiling `ide` for testing.
2021-05-23 20:31:59 +00:00
|
|
|
use hir_expand::name;
|
2019-11-15 11:53:09 +00:00
|
|
|
use rustc_hash::FxHashSet;
|
2020-08-12 16:26:51 +00:00
|
|
|
use syntax::{ast, AstPtr};
|
2019-01-05 15:32:07 +00:00
|
|
|
|
2019-11-15 11:53:09 +00:00
|
|
|
use crate::{
|
|
|
|
db::HirDatabase,
|
2020-07-08 17:58:45 +00:00
|
|
|
diagnostics::{
|
2021-05-11 12:18:16 +00:00
|
|
|
match_check::{
|
|
|
|
self,
|
|
|
|
usefulness::{compute_match_usefulness, expand_pattern, MatchCheckCtx, PatternArena},
|
|
|
|
},
|
2020-12-30 17:33:33 +00:00
|
|
|
MismatchedArgCount, MissingFields, MissingMatchArms, MissingOkOrSomeInTailExpr,
|
|
|
|
MissingPatFields, RemoveThisSemicolon,
|
2020-07-08 17:58:45 +00:00
|
|
|
},
|
internal: move diagnostics to hir
The idea here is to eventually get rid of `dyn Diagnostic` and
`DiagnosticSink` infrastructure altogether, and just have a `enum
hir::Diagnostic` instead.
The problem with `dyn Diagnostic` is that it is defined in the lowest
level of the stack (hir_expand), but is used by the highest level (ide).
As a first step, we free hir_expand and hir_def from `dyn Diagnostic`
and kick the can up to `hir_ty`, as an intermediate state. The plan is
then to move DiagnosticSink similarly to the hir crate, and, as final
third step, remove its usage from the ide.
One currently unsolved problem is testing. You can notice that the test
which checks precise diagnostic ranges, unresolved_import_in_use_tree,
was moved to the ide layer. Logically, only IDE should have the infra to
render a specific range.
At the same time, the range is determined with the data produced in
hir_def and hir crates, so this layering is rather unfortunate. Working
on hir_def shouldn't require compiling `ide` for testing.
2021-05-23 20:31:59 +00:00
|
|
|
diagnostics_sink::DiagnosticSink,
|
2021-04-03 18:22:59 +00:00
|
|
|
AdtId, InferenceResult, Interner, TyExt, TyKind,
|
2019-11-15 11:53:09 +00:00
|
|
|
};
|
2019-01-05 15:32:07 +00:00
|
|
|
|
2020-11-02 12:13:32 +00:00
|
|
|
pub(crate) use hir_def::{
|
|
|
|
body::{Body, BodySourceMap},
|
|
|
|
expr::{Expr, ExprId, MatchArm, Pat, PatId},
|
|
|
|
LocalFieldId, VariantId,
|
2019-11-12 12:09:25 +00:00
|
|
|
};
|
2019-09-03 05:56:36 +00:00
|
|
|
|
2021-05-11 12:18:16 +00:00
|
|
|
use super::ReplaceFilterMapNextWithFindMap;
|
2020-12-28 13:41:15 +00:00
|
|
|
|
2020-07-14 08:28:55 +00:00
|
|
|
pub(super) struct ExprValidator<'a, 'b: 'a> {
|
|
|
|
owner: DefWithBodyId,
|
2019-11-15 11:53:09 +00:00
|
|
|
infer: Arc<InferenceResult>,
|
|
|
|
sink: &'a mut DiagnosticSink<'b>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'a, 'b> ExprValidator<'a, 'b> {
|
2020-07-14 08:28:55 +00:00
|
|
|
pub(super) fn new(
|
|
|
|
owner: DefWithBodyId,
|
2019-11-15 11:53:09 +00:00
|
|
|
infer: Arc<InferenceResult>,
|
|
|
|
sink: &'a mut DiagnosticSink<'b>,
|
|
|
|
) -> ExprValidator<'a, 'b> {
|
2020-07-14 08:28:55 +00:00
|
|
|
ExprValidator { owner, infer, sink }
|
2019-11-15 11:53:09 +00:00
|
|
|
}
|
|
|
|
|
2020-07-14 08:28:55 +00:00
|
|
|
pub(super) fn validate_body(&mut self, db: &dyn HirDatabase) {
|
2020-12-28 13:41:15 +00:00
|
|
|
self.check_for_filter_map_next(db);
|
|
|
|
|
2021-03-17 00:27:56 +00:00
|
|
|
let body = db.body(self.owner);
|
2019-11-15 11:53:09 +00:00
|
|
|
|
2020-04-07 15:09:02 +00:00
|
|
|
for (id, expr) in body.exprs.iter() {
|
|
|
|
if let Some((variant_def, missed_fields, true)) =
|
|
|
|
record_literal_missing_fields(db, &self.infer, id, expr)
|
|
|
|
{
|
2020-04-09 03:23:51 +00:00
|
|
|
self.create_record_literal_missing_fields_diagnostic(
|
|
|
|
id,
|
|
|
|
db,
|
|
|
|
variant_def,
|
|
|
|
missed_fields,
|
|
|
|
);
|
2020-04-07 15:09:02 +00:00
|
|
|
}
|
2020-07-08 17:58:45 +00:00
|
|
|
|
|
|
|
match expr {
|
|
|
|
Expr::Match { expr, arms } => {
|
2021-05-11 12:18:16 +00:00
|
|
|
self.validate_match(id, *expr, arms, db, self.infer.clone());
|
2020-07-08 17:58:45 +00:00
|
|
|
}
|
|
|
|
Expr::Call { .. } | Expr::MethodCall { .. } => {
|
|
|
|
self.validate_call(db, id, expr);
|
|
|
|
}
|
|
|
|
_ => {}
|
2019-11-15 11:53:09 +00:00
|
|
|
}
|
|
|
|
}
|
2020-04-09 03:23:51 +00:00
|
|
|
for (id, pat) in body.pats.iter() {
|
|
|
|
if let Some((variant_def, missed_fields, true)) =
|
|
|
|
record_pattern_missing_fields(db, &self.infer, id, pat)
|
|
|
|
{
|
|
|
|
self.create_record_pattern_missing_fields_diagnostic(
|
|
|
|
id,
|
|
|
|
db,
|
|
|
|
variant_def,
|
|
|
|
missed_fields,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
2019-11-24 15:48:29 +00:00
|
|
|
let body_expr = &body[body.body_expr];
|
2020-12-09 08:17:28 +00:00
|
|
|
if let Expr::Block { statements, tail, .. } = body_expr {
|
|
|
|
if let Some(t) = tail {
|
|
|
|
self.validate_results_in_tail_expr(body.body_expr, *t, db);
|
2021-04-07 11:45:17 +00:00
|
|
|
} else if let Some(Statement::Expr { expr: id, .. }) = statements.last() {
|
2020-12-09 08:17:28 +00:00
|
|
|
self.validate_missing_tail_expr(body.body_expr, *id, db);
|
2020-12-08 18:47:20 +00:00
|
|
|
}
|
2019-11-15 11:53:09 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-04-09 03:23:51 +00:00
|
|
|
fn create_record_literal_missing_fields_diagnostic(
|
|
|
|
&mut self,
|
|
|
|
id: ExprId,
|
|
|
|
db: &dyn HirDatabase,
|
|
|
|
variant_def: VariantId,
|
2020-04-25 12:23:34 +00:00
|
|
|
missed_fields: Vec<LocalFieldId>,
|
2020-04-09 03:23:51 +00:00
|
|
|
) {
|
|
|
|
// XXX: only look at source_map if we do have missing fields
|
2021-03-17 00:27:56 +00:00
|
|
|
let (_, source_map) = db.body_with_source_map(self.owner);
|
2020-04-09 03:23:51 +00:00
|
|
|
|
|
|
|
if let Ok(source_ptr) = source_map.expr_syntax(id) {
|
2020-04-11 17:25:33 +00:00
|
|
|
let root = source_ptr.file_syntax(db.upcast());
|
2020-08-08 22:59:26 +00:00
|
|
|
if let ast::Expr::RecordExpr(record_expr) = &source_ptr.value.to_node(&root) {
|
|
|
|
if let Some(_) = record_expr.record_expr_field_list() {
|
2021-04-06 15:59:18 +00:00
|
|
|
let variant_data = variant_def.variant_data(db.upcast());
|
2020-04-11 17:25:33 +00:00
|
|
|
let missed_fields = missed_fields
|
|
|
|
.into_iter()
|
|
|
|
.map(|idx| variant_data.fields()[idx].name.clone())
|
|
|
|
.collect();
|
|
|
|
self.sink.push(MissingFields {
|
|
|
|
file: source_ptr.file_id,
|
2020-08-08 22:59:26 +00:00
|
|
|
field_list_parent: AstPtr::new(&record_expr),
|
|
|
|
field_list_parent_path: record_expr.path().map(|path| AstPtr::new(&path)),
|
2020-04-11 17:25:33 +00:00
|
|
|
missed_fields,
|
|
|
|
})
|
2020-04-09 03:23:51 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn create_record_pattern_missing_fields_diagnostic(
|
|
|
|
&mut self,
|
|
|
|
id: PatId,
|
|
|
|
db: &dyn HirDatabase,
|
|
|
|
variant_def: VariantId,
|
2020-04-25 12:23:34 +00:00
|
|
|
missed_fields: Vec<LocalFieldId>,
|
2020-04-09 03:23:51 +00:00
|
|
|
) {
|
|
|
|
// XXX: only look at source_map if we do have missing fields
|
2021-03-17 00:27:56 +00:00
|
|
|
let (_, source_map) = db.body_with_source_map(self.owner);
|
2020-04-09 03:23:51 +00:00
|
|
|
|
|
|
|
if let Ok(source_ptr) = source_map.pat_syntax(id) {
|
2020-04-10 22:27:00 +00:00
|
|
|
if let Some(expr) = source_ptr.value.as_ref().left() {
|
2020-04-09 03:23:51 +00:00
|
|
|
let root = source_ptr.file_syntax(db.upcast());
|
|
|
|
if let ast::Pat::RecordPat(record_pat) = expr.to_node(&root) {
|
2020-08-08 22:59:26 +00:00
|
|
|
if let Some(_) = record_pat.record_pat_field_list() {
|
2021-04-06 15:59:18 +00:00
|
|
|
let variant_data = variant_def.variant_data(db.upcast());
|
2020-04-09 03:23:51 +00:00
|
|
|
let missed_fields = missed_fields
|
|
|
|
.into_iter()
|
|
|
|
.map(|idx| variant_data.fields()[idx].name.clone())
|
|
|
|
.collect();
|
|
|
|
self.sink.push(MissingPatFields {
|
|
|
|
file: source_ptr.file_id,
|
2020-08-08 22:59:26 +00:00
|
|
|
field_list_parent: AstPtr::new(&record_pat),
|
2020-07-27 20:32:16 +00:00
|
|
|
field_list_parent_path: record_pat
|
|
|
|
.path()
|
|
|
|
.map(|path| AstPtr::new(&path)),
|
2020-04-09 03:23:51 +00:00
|
|
|
missed_fields,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-12-28 13:41:15 +00:00
|
|
|
fn check_for_filter_map_next(&mut self, db: &dyn HirDatabase) {
|
2021-01-01 21:11:08 +00:00
|
|
|
// Find the FunctionIds for Iterator::filter_map and Iterator::next
|
|
|
|
let iterator_path = path![core::iter::Iterator];
|
|
|
|
let resolver = self.owner.resolver(db.upcast());
|
|
|
|
let iterator_trait_id = match resolver.resolve_known_trait(db.upcast(), &iterator_path) {
|
|
|
|
Some(id) => id,
|
|
|
|
None => return,
|
|
|
|
};
|
|
|
|
let iterator_trait_items = &db.trait_data(iterator_trait_id).items;
|
2021-01-01 21:40:11 +00:00
|
|
|
let filter_map_function_id =
|
|
|
|
match iterator_trait_items.iter().find(|item| item.0 == name![filter_map]) {
|
|
|
|
Some((_, AssocItemId::FunctionId(id))) => id,
|
|
|
|
_ => return,
|
|
|
|
};
|
|
|
|
let next_function_id = match iterator_trait_items.iter().find(|item| item.0 == name![next])
|
|
|
|
{
|
2021-01-01 21:11:08 +00:00
|
|
|
Some((_, AssocItemId::FunctionId(id))) => id,
|
|
|
|
_ => return,
|
|
|
|
};
|
|
|
|
|
|
|
|
// Search function body for instances of .filter_map(..).next()
|
2021-03-17 00:27:56 +00:00
|
|
|
let body = db.body(self.owner);
|
2020-12-28 13:41:15 +00:00
|
|
|
let mut prev = None;
|
|
|
|
for (id, expr) in body.exprs.iter() {
|
2021-01-01 21:11:08 +00:00
|
|
|
if let Expr::MethodCall { receiver, .. } = expr {
|
|
|
|
let function_id = match self.infer.method_resolution(id) {
|
2021-05-23 14:59:23 +00:00
|
|
|
Some((id, _)) => id,
|
2021-01-01 21:11:08 +00:00
|
|
|
None => continue,
|
|
|
|
};
|
2020-12-28 13:41:15 +00:00
|
|
|
|
2021-01-01 21:11:08 +00:00
|
|
|
if function_id == *filter_map_function_id {
|
2020-12-30 15:46:05 +00:00
|
|
|
prev = Some(id);
|
2020-12-28 13:41:15 +00:00
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
2021-01-01 21:11:08 +00:00
|
|
|
if function_id == *next_function_id {
|
2020-12-30 15:46:05 +00:00
|
|
|
if let Some(filter_map_id) = prev {
|
2020-12-28 13:41:15 +00:00
|
|
|
if *receiver == filter_map_id {
|
2021-03-17 00:27:56 +00:00
|
|
|
let (_, source_map) = db.body_with_source_map(self.owner);
|
2020-12-30 15:46:05 +00:00
|
|
|
if let Ok(next_source_ptr) = source_map.expr_syntax(id) {
|
2020-12-28 13:41:15 +00:00
|
|
|
self.sink.push(ReplaceFilterMapNextWithFindMap {
|
2020-12-30 15:46:05 +00:00
|
|
|
file: next_source_ptr.file_id,
|
2020-12-28 13:41:15 +00:00
|
|
|
next_expr: next_source_ptr.value,
|
|
|
|
});
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
prev = None;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn validate_call(&mut self, db: &dyn HirDatabase, call_id: ExprId, expr: &Expr) {
|
2020-07-08 17:58:45 +00:00
|
|
|
// Check that the number of arguments matches the number of parameters.
|
2020-07-09 10:41:35 +00:00
|
|
|
|
2020-07-09 15:33:49 +00:00
|
|
|
// FIXME: Due to shortcomings in the current type system implementation, only emit this
|
|
|
|
// diagnostic if there are no type mismatches in the containing function.
|
2021-05-19 04:23:16 +00:00
|
|
|
if self.infer.expr_type_mismatches().next().is_some() {
|
2020-12-28 13:41:15 +00:00
|
|
|
return;
|
2020-07-09 10:41:35 +00:00
|
|
|
}
|
|
|
|
|
2020-07-09 15:33:49 +00:00
|
|
|
let is_method_call = matches!(expr, Expr::MethodCall { .. });
|
2020-07-17 08:52:18 +00:00
|
|
|
let (sig, args) = match expr {
|
2020-07-08 17:58:45 +00:00
|
|
|
Expr::Call { callee, args } => {
|
|
|
|
let callee = &self.infer.type_of_expr[*callee];
|
2020-12-28 13:41:15 +00:00
|
|
|
let sig = match callee.callable_sig(db) {
|
|
|
|
Some(sig) => sig,
|
|
|
|
None => return,
|
|
|
|
};
|
2020-07-17 08:52:18 +00:00
|
|
|
(sig, args.clone())
|
2020-07-08 17:58:45 +00:00
|
|
|
}
|
|
|
|
Expr::MethodCall { receiver, args, .. } => {
|
|
|
|
let mut args = args.clone();
|
|
|
|
args.insert(0, *receiver);
|
2020-07-17 08:52:18 +00:00
|
|
|
|
2021-01-01 19:45:49 +00:00
|
|
|
let receiver = &self.infer.type_of_expr[*receiver];
|
|
|
|
if receiver.strip_references().is_unknown() {
|
|
|
|
// if the receiver is of unknown type, it's very likely we
|
|
|
|
// don't know enough to correctly resolve the method call.
|
|
|
|
// This is kind of a band-aid for #6975.
|
2020-12-28 13:41:15 +00:00
|
|
|
return;
|
2021-01-01 19:45:49 +00:00
|
|
|
}
|
|
|
|
|
2021-05-23 14:59:23 +00:00
|
|
|
let (callee, subst) = match self.infer.method_resolution(call_id) {
|
|
|
|
Some(it) => it,
|
2020-12-28 13:41:15 +00:00
|
|
|
None => return,
|
|
|
|
};
|
2021-05-23 14:59:23 +00:00
|
|
|
let sig = db.callable_item_signature(callee.into()).substitute(&Interner, &subst);
|
2020-07-17 08:52:18 +00:00
|
|
|
|
|
|
|
(sig, args)
|
2020-07-08 17:58:45 +00:00
|
|
|
}
|
2020-12-28 13:41:15 +00:00
|
|
|
_ => return,
|
2020-07-08 17:58:45 +00:00
|
|
|
};
|
|
|
|
|
2020-07-17 08:52:18 +00:00
|
|
|
if sig.is_varargs {
|
2020-12-28 13:41:15 +00:00
|
|
|
return;
|
2020-07-14 16:23:45 +00:00
|
|
|
}
|
|
|
|
|
2020-07-17 08:52:18 +00:00
|
|
|
let params = sig.params();
|
2020-07-08 17:58:45 +00:00
|
|
|
|
2020-07-09 16:24:02 +00:00
|
|
|
let mut param_count = params.len();
|
2020-07-09 13:51:32 +00:00
|
|
|
let mut arg_count = args.len();
|
|
|
|
|
2020-07-08 17:58:45 +00:00
|
|
|
if arg_count != param_count {
|
2021-03-17 00:27:56 +00:00
|
|
|
let (_, source_map) = db.body_with_source_map(self.owner);
|
2020-07-08 17:58:45 +00:00
|
|
|
if let Ok(source_ptr) = source_map.expr_syntax(call_id) {
|
2020-07-09 13:51:32 +00:00
|
|
|
if is_method_call {
|
|
|
|
param_count -= 1;
|
|
|
|
arg_count -= 1;
|
|
|
|
}
|
2020-07-08 17:58:45 +00:00
|
|
|
self.sink.push(MismatchedArgCount {
|
|
|
|
file: source_ptr.file_id,
|
|
|
|
call_expr: source_ptr.value,
|
|
|
|
expected: param_count,
|
|
|
|
found: arg_count,
|
|
|
|
});
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-03-24 11:40:58 +00:00
|
|
|
fn validate_match(
|
|
|
|
&mut self,
|
|
|
|
id: ExprId,
|
2020-04-05 17:16:34 +00:00
|
|
|
match_expr: ExprId,
|
2020-03-24 11:40:58 +00:00
|
|
|
arms: &[MatchArm],
|
|
|
|
db: &dyn HirDatabase,
|
|
|
|
infer: Arc<InferenceResult>,
|
|
|
|
) {
|
|
|
|
let (body, source_map): (Arc<Body>, Arc<BodySourceMap>) =
|
2021-03-17 00:27:56 +00:00
|
|
|
db.body_with_source_map(self.owner);
|
2020-03-24 11:40:58 +00:00
|
|
|
|
2021-03-13 13:44:51 +00:00
|
|
|
let match_expr_ty = if infer.type_of_expr[match_expr].is_unknown() {
|
|
|
|
return;
|
|
|
|
} else {
|
|
|
|
&infer.type_of_expr[match_expr]
|
2020-04-05 17:16:34 +00:00
|
|
|
};
|
2020-03-24 11:40:58 +00:00
|
|
|
|
2021-05-06 04:42:00 +00:00
|
|
|
let pattern_arena = RefCell::new(PatternArena::new());
|
|
|
|
|
2021-05-10 08:22:13 +00:00
|
|
|
let mut m_arms = Vec::new();
|
|
|
|
let mut has_lowering_errors = false;
|
|
|
|
for arm in arms {
|
|
|
|
if let Some(pat_ty) = infer.type_of_pat.get(arm.pat) {
|
|
|
|
// We only include patterns whose type matches the type
|
|
|
|
// of the match expression. If we had a InvalidMatchArmPattern
|
|
|
|
// diagnostic or similar we could raise that in an else
|
|
|
|
// block here.
|
|
|
|
//
|
|
|
|
// When comparing the types, we also have to consider that rustc
|
|
|
|
// will automatically de-reference the match expression type if
|
|
|
|
// necessary.
|
|
|
|
//
|
|
|
|
// FIXME we should use the type checker for this.
|
2021-05-11 12:18:16 +00:00
|
|
|
if (pat_ty == match_expr_ty
|
2021-05-10 08:22:13 +00:00
|
|
|
|| match_expr_ty
|
|
|
|
.as_reference()
|
|
|
|
.map(|(match_expr_ty, ..)| match_expr_ty == pat_ty)
|
2021-05-11 12:18:16 +00:00
|
|
|
.unwrap_or(false))
|
|
|
|
&& types_of_subpatterns_do_match(arm.pat, &body, &infer)
|
2021-05-10 08:22:13 +00:00
|
|
|
{
|
|
|
|
// If we had a NotUsefulMatchArm diagnostic, we could
|
|
|
|
// check the usefulness of each pattern as we added it
|
|
|
|
// to the matrix here.
|
2021-05-11 12:18:16 +00:00
|
|
|
let m_arm = match_check::MatchArm {
|
2021-05-10 08:22:13 +00:00
|
|
|
pat: self.lower_pattern(
|
|
|
|
arm.pat,
|
|
|
|
&mut pattern_arena.borrow_mut(),
|
|
|
|
db,
|
|
|
|
&body,
|
|
|
|
&mut has_lowering_errors,
|
|
|
|
),
|
|
|
|
has_guard: arm.guard.is_some(),
|
|
|
|
};
|
|
|
|
m_arms.push(m_arm);
|
|
|
|
if !has_lowering_errors {
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// If we can't resolve the type of a pattern, or the pattern type doesn't
|
|
|
|
// fit the match expression, we skip this diagnostic. Skipping the entire
|
|
|
|
// diagnostic rather than just not including this match arm is preferred
|
|
|
|
// to avoid the chance of false positives.
|
2021-05-11 13:24:39 +00:00
|
|
|
#[cfg(test)]
|
|
|
|
match_check::tests::report_bail_out(db, self.owner, arm.pat, self.sink);
|
2021-05-06 08:32:35 +00:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2021-05-11 12:18:16 +00:00
|
|
|
let cx = MatchCheckCtx {
|
2021-04-29 18:28:43 +00:00
|
|
|
module: self.owner.module(db.upcast()),
|
2021-04-22 15:17:27 +00:00
|
|
|
match_expr,
|
|
|
|
infer: &infer,
|
|
|
|
db,
|
|
|
|
pattern_arena: &pattern_arena,
|
2021-05-19 13:00:25 +00:00
|
|
|
eprint_panic_context: &|| {
|
|
|
|
use syntax::AstNode;
|
|
|
|
if let Ok(scrutinee_sptr) = source_map.expr_syntax(match_expr) {
|
|
|
|
let root = scrutinee_sptr.file_syntax(db.upcast());
|
|
|
|
if let Some(match_ast) = scrutinee_sptr.value.to_node(&root).syntax().parent() {
|
|
|
|
eprintln!(
|
|
|
|
"Match checking is about to panic on this expression:\n{}",
|
|
|
|
match_ast.to_string(),
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
},
|
2021-04-22 15:17:27 +00:00
|
|
|
};
|
2021-05-11 12:18:16 +00:00
|
|
|
let report = compute_match_usefulness(&cx, &m_arms);
|
2021-04-22 15:17:27 +00:00
|
|
|
|
2021-05-10 08:22:13 +00:00
|
|
|
// FIXME Report unreacheble arms
|
|
|
|
// https://github.com/rust-lang/rust/blob/25c15cdbe/compiler/rustc_mir_build/src/thir/pattern/check_match.rs#L200-L201
|
2021-04-22 15:17:27 +00:00
|
|
|
|
|
|
|
let witnesses = report.non_exhaustiveness_witnesses;
|
2021-05-31 20:44:51 +00:00
|
|
|
// FIXME Report witnesses
|
2021-05-10 08:22:13 +00:00
|
|
|
// eprintln!("compute_match_usefulness(..) -> {:?}", &witnesses);
|
2021-04-22 15:17:27 +00:00
|
|
|
if !witnesses.is_empty() {
|
|
|
|
if let Ok(source_ptr) = source_map.expr_syntax(id) {
|
|
|
|
let root = source_ptr.file_syntax(db.upcast());
|
|
|
|
if let ast::Expr::MatchExpr(match_expr) = &source_ptr.value.to_node(&root) {
|
|
|
|
if let (Some(match_expr), Some(arms)) =
|
|
|
|
(match_expr.expr(), match_expr.match_arm_list())
|
|
|
|
{
|
|
|
|
self.sink.push(MissingMatchArms {
|
|
|
|
file: source_ptr.file_id,
|
|
|
|
match_expr: AstPtr::new(&match_expr),
|
|
|
|
arms: AstPtr::new(&arms),
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-05-06 04:42:00 +00:00
|
|
|
fn lower_pattern(
|
|
|
|
&self,
|
|
|
|
pat: PatId,
|
|
|
|
pattern_arena: &mut PatternArena,
|
|
|
|
db: &dyn HirDatabase,
|
|
|
|
body: &Body,
|
2021-05-06 08:32:35 +00:00
|
|
|
have_errors: &mut bool,
|
2021-05-11 12:18:16 +00:00
|
|
|
) -> match_check::PatId {
|
|
|
|
let mut patcx = match_check::PatCtxt::new(db, &self.infer, body);
|
2021-05-06 04:42:00 +00:00
|
|
|
let pattern = patcx.lower_pattern(pat);
|
2021-05-06 08:32:35 +00:00
|
|
|
let pattern = pattern_arena.alloc(expand_pattern(pattern));
|
|
|
|
if !patcx.errors.is_empty() {
|
|
|
|
*have_errors = true;
|
|
|
|
}
|
|
|
|
pattern
|
2021-05-06 04:42:00 +00:00
|
|
|
}
|
|
|
|
|
2020-03-13 15:05:46 +00:00
|
|
|
fn validate_results_in_tail_expr(&mut self, body_id: ExprId, id: ExprId, db: &dyn HirDatabase) {
|
2019-11-15 11:53:09 +00:00
|
|
|
// the mismatch will be on the whole block currently
|
|
|
|
let mismatch = match self.infer.type_mismatch_for_expr(body_id) {
|
|
|
|
Some(m) => m,
|
|
|
|
None => return,
|
|
|
|
};
|
|
|
|
|
2020-06-11 14:22:31 +00:00
|
|
|
let core_result_path = path![core::result::Result];
|
2020-12-30 17:23:00 +00:00
|
|
|
let core_option_path = path![core::option::Option];
|
2019-11-15 11:53:09 +00:00
|
|
|
|
2020-07-14 08:28:55 +00:00
|
|
|
let resolver = self.owner.resolver(db.upcast());
|
2020-06-11 14:22:31 +00:00
|
|
|
let core_result_enum = match resolver.resolve_known_enum(db.upcast(), &core_result_path) {
|
2019-11-15 11:53:09 +00:00
|
|
|
Some(it) => it,
|
|
|
|
_ => return,
|
|
|
|
};
|
2020-12-30 17:23:00 +00:00
|
|
|
let core_option_enum = match resolver.resolve_known_enum(db.upcast(), &core_option_path) {
|
|
|
|
Some(it) => it,
|
|
|
|
_ => return,
|
|
|
|
};
|
2019-11-15 11:53:09 +00:00
|
|
|
|
2021-04-03 11:08:29 +00:00
|
|
|
let (params, required) = match mismatch.expected.kind(&Interner) {
|
2021-03-13 13:44:51 +00:00
|
|
|
TyKind::Adt(AdtId(hir_def::AdtId::EnumId(enum_id)), ref parameters)
|
|
|
|
if *enum_id == core_result_enum =>
|
2021-03-01 20:57:39 +00:00
|
|
|
{
|
2020-12-30 17:23:00 +00:00
|
|
|
(parameters, "Ok".to_string())
|
2020-12-30 17:33:33 +00:00
|
|
|
}
|
2021-03-13 13:44:51 +00:00
|
|
|
TyKind::Adt(AdtId(hir_def::AdtId::EnumId(enum_id)), ref parameters)
|
|
|
|
if *enum_id == core_option_enum =>
|
2021-03-01 20:57:39 +00:00
|
|
|
{
|
2020-12-30 17:23:00 +00:00
|
|
|
(parameters, "Some".to_string())
|
2020-12-30 17:33:33 +00:00
|
|
|
}
|
2019-11-15 11:53:09 +00:00
|
|
|
_ => return,
|
|
|
|
};
|
|
|
|
|
2021-04-01 19:04:02 +00:00
|
|
|
if params.len(&Interner) > 0
|
|
|
|
&& params.at(&Interner, 0).ty(&Interner) == Some(&mismatch.actual)
|
|
|
|
{
|
2021-03-17 00:27:56 +00:00
|
|
|
let (_, source_map) = db.body_with_source_map(self.owner);
|
2019-11-15 11:53:09 +00:00
|
|
|
|
2020-03-06 13:44:44 +00:00
|
|
|
if let Ok(source_ptr) = source_map.expr_syntax(id) {
|
2020-12-30 17:33:33 +00:00
|
|
|
self.sink.push(MissingOkOrSomeInTailExpr {
|
|
|
|
file: source_ptr.file_id,
|
|
|
|
expr: source_ptr.value,
|
|
|
|
required,
|
|
|
|
});
|
2019-11-15 11:53:09 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2020-12-08 18:47:20 +00:00
|
|
|
|
2020-12-08 19:25:21 +00:00
|
|
|
fn validate_missing_tail_expr(
|
|
|
|
&mut self,
|
|
|
|
body_id: ExprId,
|
|
|
|
possible_tail_id: ExprId,
|
|
|
|
db: &dyn HirDatabase,
|
|
|
|
) {
|
2020-12-08 18:47:20 +00:00
|
|
|
let mismatch = match self.infer.type_mismatch_for_expr(body_id) {
|
|
|
|
Some(m) => m,
|
|
|
|
None => return,
|
|
|
|
};
|
|
|
|
|
2020-12-10 16:10:39 +00:00
|
|
|
let possible_tail_ty = match self.infer.type_of_expr.get(possible_tail_id) {
|
|
|
|
Some(ty) => ty,
|
|
|
|
None => return,
|
|
|
|
};
|
2020-12-08 18:47:20 +00:00
|
|
|
|
2021-04-03 18:22:59 +00:00
|
|
|
if !mismatch.actual.is_unit() || mismatch.expected != *possible_tail_ty {
|
2020-12-09 08:17:28 +00:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2021-03-17 00:27:56 +00:00
|
|
|
let (_, source_map) = db.body_with_source_map(self.owner);
|
2020-12-10 16:10:39 +00:00
|
|
|
|
2020-12-09 08:17:28 +00:00
|
|
|
if let Ok(source_ptr) = source_map.expr_syntax(possible_tail_id) {
|
2020-12-09 08:22:13 +00:00
|
|
|
self.sink
|
|
|
|
.push(RemoveThisSemicolon { file: source_ptr.file_id, expr: source_ptr.value });
|
2020-12-08 18:47:20 +00:00
|
|
|
}
|
|
|
|
}
|
2019-11-15 11:53:09 +00:00
|
|
|
}
|
2020-04-07 15:09:02 +00:00
|
|
|
|
|
|
|
pub fn record_literal_missing_fields(
|
|
|
|
db: &dyn HirDatabase,
|
|
|
|
infer: &InferenceResult,
|
|
|
|
id: ExprId,
|
|
|
|
expr: &Expr,
|
2020-04-25 12:23:34 +00:00
|
|
|
) -> Option<(VariantId, Vec<LocalFieldId>, /*exhaustive*/ bool)> {
|
2021-01-08 14:41:32 +00:00
|
|
|
let (fields, exhaustive) = match expr {
|
2020-04-07 15:09:02 +00:00
|
|
|
Expr::RecordLit { path: _, fields, spread } => (fields, spread.is_none()),
|
|
|
|
_ => return None,
|
|
|
|
};
|
|
|
|
|
|
|
|
let variant_def = infer.variant_resolution_for_expr(id)?;
|
|
|
|
if let VariantId::UnionId(_) = variant_def {
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
|
2021-04-06 15:59:18 +00:00
|
|
|
let variant_data = variant_def.variant_data(db.upcast());
|
2020-04-07 15:09:02 +00:00
|
|
|
|
|
|
|
let specified_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect();
|
2020-04-25 12:23:34 +00:00
|
|
|
let missed_fields: Vec<LocalFieldId> = variant_data
|
2020-04-07 15:09:02 +00:00
|
|
|
.fields()
|
|
|
|
.iter()
|
|
|
|
.filter_map(|(f, d)| if specified_fields.contains(&d.name) { None } else { Some(f) })
|
|
|
|
.collect();
|
|
|
|
if missed_fields.is_empty() {
|
|
|
|
return None;
|
|
|
|
}
|
2021-01-08 14:41:32 +00:00
|
|
|
Some((variant_def, missed_fields, exhaustive))
|
2020-04-07 15:09:02 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn record_pattern_missing_fields(
|
|
|
|
db: &dyn HirDatabase,
|
|
|
|
infer: &InferenceResult,
|
|
|
|
id: PatId,
|
|
|
|
pat: &Pat,
|
2020-04-25 12:23:34 +00:00
|
|
|
) -> Option<(VariantId, Vec<LocalFieldId>, /*exhaustive*/ bool)> {
|
2020-04-09 03:23:51 +00:00
|
|
|
let (fields, exhaustive) = match pat {
|
|
|
|
Pat::Record { path: _, args, ellipsis } => (args, !ellipsis),
|
2020-04-07 15:09:02 +00:00
|
|
|
_ => return None,
|
|
|
|
};
|
|
|
|
|
|
|
|
let variant_def = infer.variant_resolution_for_pat(id)?;
|
|
|
|
if let VariantId::UnionId(_) = variant_def {
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
|
2021-04-06 15:59:18 +00:00
|
|
|
let variant_data = variant_def.variant_data(db.upcast());
|
2020-04-07 15:09:02 +00:00
|
|
|
|
|
|
|
let specified_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect();
|
2020-04-25 12:23:34 +00:00
|
|
|
let missed_fields: Vec<LocalFieldId> = variant_data
|
2020-04-07 15:09:02 +00:00
|
|
|
.fields()
|
|
|
|
.iter()
|
|
|
|
.filter_map(|(f, d)| if specified_fields.contains(&d.name) { None } else { Some(f) })
|
|
|
|
.collect();
|
|
|
|
if missed_fields.is_empty() {
|
|
|
|
return None;
|
|
|
|
}
|
2020-04-09 03:23:51 +00:00
|
|
|
Some((variant_def, missed_fields, exhaustive))
|
2020-04-07 15:09:02 +00:00
|
|
|
}
|
2020-07-09 13:52:10 +00:00
|
|
|
|
2021-05-19 05:57:10 +00:00
|
|
|
fn types_of_subpatterns_do_match(pat: PatId, body: &Body, infer: &InferenceResult) -> bool {
|
|
|
|
fn walk(pat: PatId, body: &Body, infer: &InferenceResult, has_type_mismatches: &mut bool) {
|
|
|
|
match infer.type_mismatch_for_pat(pat) {
|
|
|
|
Some(_) => *has_type_mismatches = true,
|
|
|
|
None => {
|
|
|
|
body[pat].walk_child_pats(|subpat| walk(subpat, body, infer, has_type_mismatches))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
let mut has_type_mismatches = false;
|
|
|
|
walk(pat, body, infer, &mut has_type_mismatches);
|
|
|
|
!has_type_mismatches
|
|
|
|
}
|
|
|
|
|
2020-07-09 13:52:10 +00:00
|
|
|
#[cfg(test)]
|
|
|
|
mod tests {
|
2020-07-14 14:43:39 +00:00
|
|
|
use crate::diagnostics::tests::check_diagnostics;
|
2020-07-09 13:52:10 +00:00
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn simple_free_fn_zero() {
|
2020-07-14 10:05:50 +00:00
|
|
|
check_diagnostics(
|
|
|
|
r#"
|
|
|
|
fn zero() {}
|
|
|
|
fn f() { zero(1); }
|
|
|
|
//^^^^^^^ Expected 0 arguments, found 1
|
|
|
|
"#,
|
2020-07-09 15:33:49 +00:00
|
|
|
);
|
2020-07-09 13:52:10 +00:00
|
|
|
|
2020-07-14 10:05:50 +00:00
|
|
|
check_diagnostics(
|
|
|
|
r#"
|
|
|
|
fn zero() {}
|
|
|
|
fn f() { zero(); }
|
|
|
|
"#,
|
2020-07-09 13:52:10 +00:00
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn simple_free_fn_one() {
|
2020-07-14 10:05:50 +00:00
|
|
|
check_diagnostics(
|
|
|
|
r#"
|
|
|
|
fn one(arg: u8) {}
|
|
|
|
fn f() { one(); }
|
|
|
|
//^^^^^ Expected 1 argument, found 0
|
|
|
|
"#,
|
2020-07-09 15:33:49 +00:00
|
|
|
);
|
2020-07-09 13:52:10 +00:00
|
|
|
|
2020-07-14 10:05:50 +00:00
|
|
|
check_diagnostics(
|
|
|
|
r#"
|
|
|
|
fn one(arg: u8) {}
|
|
|
|
fn f() { one(1); }
|
|
|
|
"#,
|
2020-07-09 13:52:10 +00:00
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn method_as_fn() {
|
2020-07-14 10:05:50 +00:00
|
|
|
check_diagnostics(
|
|
|
|
r#"
|
|
|
|
struct S;
|
|
|
|
impl S { fn method(&self) {} }
|
|
|
|
|
|
|
|
fn f() {
|
|
|
|
S::method();
|
|
|
|
} //^^^^^^^^^^^ Expected 1 argument, found 0
|
|
|
|
"#,
|
2020-07-09 15:33:49 +00:00
|
|
|
);
|
2020-07-09 13:52:10 +00:00
|
|
|
|
2020-07-14 10:05:50 +00:00
|
|
|
check_diagnostics(
|
|
|
|
r#"
|
|
|
|
struct S;
|
|
|
|
impl S { fn method(&self) {} }
|
2020-07-09 13:52:10 +00:00
|
|
|
|
2020-07-14 10:05:50 +00:00
|
|
|
fn f() {
|
|
|
|
S::method(&S);
|
|
|
|
S.method();
|
|
|
|
}
|
|
|
|
"#,
|
2020-07-09 13:52:10 +00:00
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn method_with_arg() {
|
2020-07-14 10:05:50 +00:00
|
|
|
check_diagnostics(
|
|
|
|
r#"
|
|
|
|
struct S;
|
|
|
|
impl S { fn method(&self, arg: u8) {} }
|
2020-07-09 13:52:10 +00:00
|
|
|
|
|
|
|
fn f() {
|
|
|
|
S.method();
|
2020-07-14 10:05:50 +00:00
|
|
|
} //^^^^^^^^^^ Expected 1 argument, found 0
|
|
|
|
"#,
|
2020-07-09 15:33:49 +00:00
|
|
|
);
|
2020-07-09 13:52:10 +00:00
|
|
|
|
2020-07-14 10:05:50 +00:00
|
|
|
check_diagnostics(
|
|
|
|
r#"
|
|
|
|
struct S;
|
|
|
|
impl S { fn method(&self, arg: u8) {} }
|
2020-07-09 13:52:10 +00:00
|
|
|
|
2020-07-14 10:05:50 +00:00
|
|
|
fn f() {
|
|
|
|
S::method(&S, 0);
|
|
|
|
S.method(1);
|
|
|
|
}
|
|
|
|
"#,
|
2020-07-09 13:52:10 +00:00
|
|
|
);
|
|
|
|
}
|
2020-07-09 16:24:02 +00:00
|
|
|
|
2021-01-01 19:45:49 +00:00
|
|
|
#[test]
|
|
|
|
fn method_unknown_receiver() {
|
|
|
|
// note: this is incorrect code, so there might be errors on this in the
|
|
|
|
// future, but we shouldn't emit an argument count diagnostic here
|
|
|
|
check_diagnostics(
|
|
|
|
r#"
|
|
|
|
trait Foo { fn method(&self, arg: usize) {} }
|
|
|
|
|
|
|
|
fn f() {
|
|
|
|
let x;
|
|
|
|
x.method();
|
|
|
|
}
|
|
|
|
"#,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
2020-07-09 16:24:02 +00:00
|
|
|
#[test]
|
|
|
|
fn tuple_struct() {
|
2020-07-14 10:05:50 +00:00
|
|
|
check_diagnostics(
|
|
|
|
r#"
|
|
|
|
struct Tup(u8, u16);
|
|
|
|
fn f() {
|
|
|
|
Tup(0);
|
|
|
|
} //^^^^^^ Expected 2 arguments, found 1
|
|
|
|
"#,
|
2020-07-09 16:24:02 +00:00
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn enum_variant() {
|
2020-07-14 10:05:50 +00:00
|
|
|
check_diagnostics(
|
|
|
|
r#"
|
|
|
|
enum En { Variant(u8, u16), }
|
|
|
|
fn f() {
|
|
|
|
En::Variant(0);
|
|
|
|
} //^^^^^^^^^^^^^^ Expected 2 arguments, found 1
|
|
|
|
"#,
|
2020-07-09 16:24:02 +00:00
|
|
|
)
|
|
|
|
}
|
2020-07-14 17:57:41 +00:00
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn enum_variant_type_macro() {
|
|
|
|
check_diagnostics(
|
|
|
|
r#"
|
|
|
|
macro_rules! Type {
|
|
|
|
() => { u32 };
|
|
|
|
}
|
|
|
|
enum Foo {
|
|
|
|
Bar(Type![])
|
|
|
|
}
|
|
|
|
impl Foo {
|
|
|
|
fn new() {
|
|
|
|
Foo::Bar(0);
|
|
|
|
Foo::Bar(0, 1);
|
|
|
|
//^^^^^^^^^^^^^^ Expected 1 argument, found 2
|
|
|
|
Foo::Bar();
|
|
|
|
//^^^^^^^^^^ Expected 1 argument, found 0
|
|
|
|
}
|
|
|
|
}
|
|
|
|
"#,
|
|
|
|
);
|
|
|
|
}
|
2020-07-14 16:23:45 +00:00
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn varargs() {
|
|
|
|
check_diagnostics(
|
|
|
|
r#"
|
|
|
|
extern "C" {
|
|
|
|
fn fixed(fixed: u8);
|
|
|
|
fn varargs(fixed: u8, ...);
|
|
|
|
fn varargs2(...);
|
|
|
|
}
|
|
|
|
|
|
|
|
fn f() {
|
|
|
|
unsafe {
|
|
|
|
fixed(0);
|
|
|
|
fixed(0, 1);
|
|
|
|
//^^^^^^^^^^^ Expected 1 argument, found 2
|
|
|
|
varargs(0);
|
|
|
|
varargs(0, 1);
|
|
|
|
varargs2();
|
|
|
|
varargs2(0);
|
|
|
|
varargs2(0, 1);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
"#,
|
|
|
|
)
|
|
|
|
}
|
2020-07-17 08:52:18 +00:00
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn arg_count_lambda() {
|
|
|
|
check_diagnostics(
|
|
|
|
r#"
|
|
|
|
fn main() {
|
|
|
|
let f = |()| ();
|
|
|
|
f();
|
|
|
|
//^^^ Expected 1 argument, found 0
|
|
|
|
f(());
|
|
|
|
f((), ());
|
|
|
|
//^^^^^^^^^ Expected 1 argument, found 2
|
|
|
|
}
|
|
|
|
"#,
|
|
|
|
)
|
|
|
|
}
|
2021-03-17 14:08:46 +00:00
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn cfgd_out_call_arguments() {
|
|
|
|
check_diagnostics(
|
|
|
|
r#"
|
|
|
|
struct C(#[cfg(FALSE)] ());
|
|
|
|
impl C {
|
|
|
|
fn new() -> Self {
|
|
|
|
Self(
|
|
|
|
#[cfg(FALSE)]
|
|
|
|
(),
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
fn method(&self) {}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn main() {
|
|
|
|
C::new().method(#[cfg(FALSE)] 0);
|
|
|
|
}
|
|
|
|
"#,
|
|
|
|
);
|
|
|
|
}
|
2021-03-17 15:31:54 +00:00
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn cfgd_out_fn_params() {
|
|
|
|
check_diagnostics(
|
|
|
|
r#"
|
|
|
|
fn foo(#[cfg(NEVER)] x: ()) {}
|
|
|
|
|
2021-03-17 17:35:17 +00:00
|
|
|
struct S;
|
|
|
|
|
|
|
|
impl S {
|
|
|
|
fn method(#[cfg(NEVER)] self) {}
|
|
|
|
fn method2(#[cfg(NEVER)] self, arg: u8) {}
|
|
|
|
fn method3(self, #[cfg(NEVER)] arg: u8) {}
|
|
|
|
}
|
|
|
|
|
|
|
|
extern "C" {
|
|
|
|
fn fixed(fixed: u8, #[cfg(NEVER)] ...);
|
|
|
|
fn varargs(#[cfg(not(NEVER))] ...);
|
|
|
|
}
|
|
|
|
|
2021-03-17 15:31:54 +00:00
|
|
|
fn main() {
|
|
|
|
foo();
|
2021-03-17 17:35:17 +00:00
|
|
|
S::method();
|
|
|
|
S::method2(0);
|
|
|
|
S::method3(S);
|
|
|
|
S.method3();
|
|
|
|
unsafe {
|
|
|
|
fixed(0);
|
|
|
|
varargs(1, 2, 3);
|
|
|
|
}
|
2021-03-17 15:31:54 +00:00
|
|
|
}
|
|
|
|
"#,
|
|
|
|
)
|
|
|
|
}
|
2020-07-09 13:52:10 +00:00
|
|
|
}
|