mirror of
https://github.com/rust-lang/rust-analyzer
synced 2025-01-16 07:03:57 +00:00
Auto merge of #15584 - Veykril:diag-private-field-constructor, r=Veykril
Diagnose private fields in record constructor
This commit is contained in:
commit
a3cfb457ca
11 changed files with 257 additions and 89 deletions
|
@ -65,6 +65,8 @@ pub type LabelSource = InFile<LabelPtr>;
|
|||
|
||||
pub type FieldPtr = AstPtr<ast::RecordExprField>;
|
||||
pub type FieldSource = InFile<FieldPtr>;
|
||||
pub type PatFieldPtr = AstPtr<ast::RecordPatField>;
|
||||
pub type PatFieldSource = InFile<PatFieldPtr>;
|
||||
|
||||
/// An item body together with the mapping from syntax nodes to HIR expression
|
||||
/// IDs. This is needed to go from e.g. a position in a file to the HIR
|
||||
|
@ -90,8 +92,8 @@ pub struct BodySourceMap {
|
|||
|
||||
/// We don't create explicit nodes for record fields (`S { record_field: 92 }`).
|
||||
/// Instead, we use id of expression (`92`) to identify the field.
|
||||
field_map: FxHashMap<FieldSource, ExprId>,
|
||||
field_map_back: FxHashMap<ExprId, FieldSource>,
|
||||
pat_field_map_back: FxHashMap<PatId, PatFieldSource>,
|
||||
|
||||
expansions: FxHashMap<InFile<AstPtr<ast::MacroCall>>, HirFileId>,
|
||||
|
||||
|
@ -375,9 +377,8 @@ impl BodySourceMap {
|
|||
self.field_map_back[&expr].clone()
|
||||
}
|
||||
|
||||
pub fn node_field(&self, node: InFile<&ast::RecordExprField>) -> Option<ExprId> {
|
||||
let src = node.map(AstPtr::new);
|
||||
self.field_map.get(&src).cloned()
|
||||
pub fn pat_field_syntax(&self, pat: PatId) -> PatFieldSource {
|
||||
self.pat_field_map_back[&pat].clone()
|
||||
}
|
||||
|
||||
pub fn macro_expansion_expr(&self, node: InFile<&ast::MacroExpr>) -> Option<ExprId> {
|
||||
|
|
|
@ -446,7 +446,6 @@ impl ExprCollector<'_> {
|
|||
None => self.missing_expr(),
|
||||
};
|
||||
let src = self.expander.to_source(AstPtr::new(&field));
|
||||
self.source_map.field_map.insert(src.clone(), expr);
|
||||
self.source_map.field_map_back.insert(expr, src);
|
||||
Some(RecordLitField { name, expr })
|
||||
})
|
||||
|
@ -1330,23 +1329,21 @@ impl ExprCollector<'_> {
|
|||
ast::Pat::RecordPat(p) => {
|
||||
let path =
|
||||
p.path().and_then(|path| self.expander.parse_path(self.db, path)).map(Box::new);
|
||||
let args = p
|
||||
.record_pat_field_list()
|
||||
.expect("every struct should have a field list")
|
||||
let record_pat_field_list =
|
||||
&p.record_pat_field_list().expect("every struct should have a field list");
|
||||
let args = record_pat_field_list
|
||||
.fields()
|
||||
.filter_map(|f| {
|
||||
let ast_pat = f.pat()?;
|
||||
let pat = self.collect_pat(ast_pat, binding_list);
|
||||
let name = f.field_name()?.as_name();
|
||||
let src = self.expander.to_source(AstPtr::new(&f));
|
||||
self.source_map.pat_field_map_back.insert(pat, src);
|
||||
Some(RecordFieldPat { name, pat })
|
||||
})
|
||||
.collect();
|
||||
|
||||
let ellipsis = p
|
||||
.record_pat_field_list()
|
||||
.expect("every struct should have a field list")
|
||||
.rest_pat()
|
||||
.is_some();
|
||||
let ellipsis = record_pat_field_list.rest_pat().is_some();
|
||||
|
||||
Pat::Record { path, args, ellipsis }
|
||||
}
|
||||
|
|
|
@ -447,6 +447,7 @@ impl VariantData {
|
|||
}
|
||||
}
|
||||
|
||||
// FIXME: Linear lookup
|
||||
pub fn field(&self, name: &Name) -> Option<LocalFieldId> {
|
||||
self.fields().iter().find_map(|(id, data)| if &data.name == name { Some(id) } else { None })
|
||||
}
|
||||
|
|
|
@ -194,7 +194,8 @@ pub(crate) type InferResult<T> = Result<InferOk<T>, TypeError>;
|
|||
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||
pub enum InferenceDiagnostic {
|
||||
NoSuchField {
|
||||
expr: ExprId,
|
||||
field: ExprOrPatId,
|
||||
private: bool,
|
||||
},
|
||||
PrivateField {
|
||||
expr: ExprId,
|
||||
|
|
|
@ -514,9 +514,6 @@ impl InferenceContext<'_> {
|
|||
}
|
||||
Expr::RecordLit { path, fields, spread, .. } => {
|
||||
let (ty, def_id) = self.resolve_variant(path.as_deref(), false);
|
||||
if let Some(variant) = def_id {
|
||||
self.write_variant_resolution(tgt_expr.into(), variant);
|
||||
}
|
||||
|
||||
if let Some(t) = expected.only_has_type(&mut self.table) {
|
||||
self.unify(&ty, &t);
|
||||
|
@ -526,26 +523,56 @@ impl InferenceContext<'_> {
|
|||
.as_adt()
|
||||
.map(|(_, s)| s.clone())
|
||||
.unwrap_or_else(|| Substitution::empty(Interner));
|
||||
let field_types = def_id.map(|it| self.db.field_types(it)).unwrap_or_default();
|
||||
let variant_data = def_id.map(|it| it.variant_data(self.db.upcast()));
|
||||
for field in fields.iter() {
|
||||
let field_def =
|
||||
variant_data.as_ref().and_then(|it| match it.field(&field.name) {
|
||||
Some(local_id) => Some(FieldId { parent: def_id.unwrap(), local_id }),
|
||||
None => {
|
||||
self.push_diagnostic(InferenceDiagnostic::NoSuchField {
|
||||
expr: field.expr,
|
||||
});
|
||||
None
|
||||
}
|
||||
});
|
||||
let field_ty = field_def.map_or(self.err_ty(), |it| {
|
||||
field_types[it.local_id].clone().substitute(Interner, &substs)
|
||||
});
|
||||
// Field type might have some unknown types
|
||||
// FIXME: we may want to emit a single type variable for all instance of type fields?
|
||||
let field_ty = self.insert_type_vars(field_ty);
|
||||
self.infer_expr_coerce(field.expr, &Expectation::has_type(field_ty));
|
||||
if let Some(variant) = def_id {
|
||||
self.write_variant_resolution(tgt_expr.into(), variant);
|
||||
}
|
||||
match def_id {
|
||||
_ if fields.is_empty() => {}
|
||||
Some(def) => {
|
||||
let field_types = self.db.field_types(def);
|
||||
let variant_data = def.variant_data(self.db.upcast());
|
||||
let visibilities = self.db.field_visibilities(def);
|
||||
for field in fields.iter() {
|
||||
let field_def = {
|
||||
match variant_data.field(&field.name) {
|
||||
Some(local_id) => {
|
||||
if !visibilities[local_id].is_visible_from(
|
||||
self.db.upcast(),
|
||||
self.resolver.module(),
|
||||
) {
|
||||
self.push_diagnostic(
|
||||
InferenceDiagnostic::NoSuchField {
|
||||
field: field.expr.into(),
|
||||
private: true,
|
||||
},
|
||||
);
|
||||
}
|
||||
Some(local_id)
|
||||
}
|
||||
None => {
|
||||
self.push_diagnostic(InferenceDiagnostic::NoSuchField {
|
||||
field: field.expr.into(),
|
||||
private: false,
|
||||
});
|
||||
None
|
||||
}
|
||||
}
|
||||
};
|
||||
let field_ty = field_def.map_or(self.err_ty(), |it| {
|
||||
field_types[it].clone().substitute(Interner, &substs)
|
||||
});
|
||||
|
||||
// Field type might have some unknown types
|
||||
// FIXME: we may want to emit a single type variable for all instance of type fields?
|
||||
let field_ty = self.insert_type_vars(field_ty);
|
||||
self.infer_expr_coerce(field.expr, &Expectation::has_type(field_ty));
|
||||
}
|
||||
}
|
||||
None => {
|
||||
for field in fields.iter() {
|
||||
self.infer_expr_coerce(field.expr, &Expectation::None);
|
||||
}
|
||||
}
|
||||
}
|
||||
if let Some(expr) = spread {
|
||||
self.infer_expr(*expr, &Expectation::has_type(ty.clone()));
|
||||
|
@ -1127,7 +1154,7 @@ impl InferenceContext<'_> {
|
|||
Expr::Underscore => rhs_ty.clone(),
|
||||
_ => {
|
||||
// `lhs` is a place expression, a unit struct, or an enum variant.
|
||||
let lhs_ty = self.infer_expr(lhs, &Expectation::none());
|
||||
let lhs_ty = self.infer_expr_inner(lhs, &Expectation::none());
|
||||
|
||||
// This is the only branch where this function may coerce any type.
|
||||
// We are returning early to avoid the unifiability check below.
|
||||
|
|
|
@ -92,24 +92,51 @@ impl InferenceContext<'_> {
|
|||
let substs =
|
||||
ty.as_adt().map(|(_, s)| s.clone()).unwrap_or_else(|| Substitution::empty(Interner));
|
||||
|
||||
let field_tys = def.map(|it| self.db.field_types(it)).unwrap_or_default();
|
||||
let (pre, post) = match ellipsis {
|
||||
Some(idx) => subs.split_at(idx),
|
||||
None => (subs, &[][..]),
|
||||
};
|
||||
let post_idx_offset = field_tys.iter().count().saturating_sub(post.len());
|
||||
match def {
|
||||
_ if subs.len() == 0 => {}
|
||||
Some(def) => {
|
||||
let field_types = self.db.field_types(def);
|
||||
let variant_data = def.variant_data(self.db.upcast());
|
||||
let visibilities = self.db.field_visibilities(def);
|
||||
|
||||
let pre_iter = pre.iter().enumerate();
|
||||
let post_iter = (post_idx_offset..).zip(post.iter());
|
||||
for (i, &subpat) in pre_iter.chain(post_iter) {
|
||||
let expected_ty = var_data
|
||||
.as_ref()
|
||||
.and_then(|d| d.field(&Name::new_tuple_field(i)))
|
||||
.map_or(self.err_ty(), |field| {
|
||||
field_tys[field].clone().substitute(Interner, &substs)
|
||||
});
|
||||
let expected_ty = self.normalize_associated_types_in(expected_ty);
|
||||
T::infer(self, subpat, &expected_ty, default_bm);
|
||||
let (pre, post) = match ellipsis {
|
||||
Some(idx) => subs.split_at(idx),
|
||||
None => (subs, &[][..]),
|
||||
};
|
||||
let post_idx_offset = field_types.iter().count().saturating_sub(post.len());
|
||||
|
||||
let pre_iter = pre.iter().enumerate();
|
||||
let post_iter = (post_idx_offset..).zip(post.iter());
|
||||
|
||||
for (i, &subpat) in pre_iter.chain(post_iter) {
|
||||
let field_def = {
|
||||
match variant_data.field(&Name::new_tuple_field(i)) {
|
||||
Some(local_id) => {
|
||||
if !visibilities[local_id]
|
||||
.is_visible_from(self.db.upcast(), self.resolver.module())
|
||||
{
|
||||
// FIXME(DIAGNOSE): private tuple field
|
||||
}
|
||||
Some(local_id)
|
||||
}
|
||||
None => None,
|
||||
}
|
||||
};
|
||||
|
||||
let expected_ty = field_def.map_or(self.err_ty(), |f| {
|
||||
field_types[f].clone().substitute(Interner, &substs)
|
||||
});
|
||||
let expected_ty = self.normalize_associated_types_in(expected_ty);
|
||||
|
||||
T::infer(self, subpat, &expected_ty, default_bm);
|
||||
}
|
||||
}
|
||||
None => {
|
||||
let err_ty = self.err_ty();
|
||||
for &inner in subs {
|
||||
T::infer(self, inner, &err_ty, default_bm);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ty
|
||||
|
@ -122,7 +149,7 @@ impl InferenceContext<'_> {
|
|||
expected: &Ty,
|
||||
default_bm: T::BindingMode,
|
||||
id: T,
|
||||
subs: impl Iterator<Item = (Name, T)>,
|
||||
subs: impl Iterator<Item = (Name, T)> + ExactSizeIterator,
|
||||
) -> Ty {
|
||||
let (ty, def) = self.resolve_variant(path, false);
|
||||
if let Some(variant) = def {
|
||||
|
@ -134,17 +161,51 @@ impl InferenceContext<'_> {
|
|||
let substs =
|
||||
ty.as_adt().map(|(_, s)| s.clone()).unwrap_or_else(|| Substitution::empty(Interner));
|
||||
|
||||
let field_tys = def.map(|it| self.db.field_types(it)).unwrap_or_default();
|
||||
let var_data = def.map(|it| it.variant_data(self.db.upcast()));
|
||||
match def {
|
||||
_ if subs.len() == 0 => {}
|
||||
Some(def) => {
|
||||
let field_types = self.db.field_types(def);
|
||||
let variant_data = def.variant_data(self.db.upcast());
|
||||
let visibilities = self.db.field_visibilities(def);
|
||||
|
||||
for (name, inner) in subs {
|
||||
let expected_ty = var_data
|
||||
.as_ref()
|
||||
.and_then(|it| it.field(&name))
|
||||
.map_or(self.err_ty(), |f| field_tys[f].clone().substitute(Interner, &substs));
|
||||
let expected_ty = self.normalize_associated_types_in(expected_ty);
|
||||
for (name, inner) in subs {
|
||||
let field_def = {
|
||||
match variant_data.field(&name) {
|
||||
Some(local_id) => {
|
||||
if !visibilities[local_id]
|
||||
.is_visible_from(self.db.upcast(), self.resolver.module())
|
||||
{
|
||||
self.push_diagnostic(InferenceDiagnostic::NoSuchField {
|
||||
field: inner.into(),
|
||||
private: true,
|
||||
});
|
||||
}
|
||||
Some(local_id)
|
||||
}
|
||||
None => {
|
||||
self.push_diagnostic(InferenceDiagnostic::NoSuchField {
|
||||
field: inner.into(),
|
||||
private: false,
|
||||
});
|
||||
None
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
T::infer(self, inner, &expected_ty, default_bm);
|
||||
let expected_ty = field_def.map_or(self.err_ty(), |f| {
|
||||
field_types[f].clone().substitute(Interner, &substs)
|
||||
});
|
||||
let expected_ty = self.normalize_associated_types_in(expected_ty);
|
||||
|
||||
T::infer(self, inner, &expected_ty, default_bm);
|
||||
}
|
||||
}
|
||||
None => {
|
||||
let err_ty = self.err_ty();
|
||||
for (_, inner) in subs {
|
||||
T::infer(self, inner, &err_ty, default_bm);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ty
|
||||
|
|
|
@ -166,8 +166,8 @@ impl<V, T> ProjectionElem<V, T> {
|
|||
TyKind::Adt(_, subst) => {
|
||||
db.field_types(f.parent)[f.local_id].clone().substitute(Interner, subst)
|
||||
}
|
||||
_ => {
|
||||
never!("Only adt has field");
|
||||
ty => {
|
||||
never!("Only adt has field, found {:?}", ty);
|
||||
return TyKind::Error.intern(Interner);
|
||||
}
|
||||
},
|
||||
|
|
|
@ -173,7 +173,8 @@ pub struct MalformedDerive {
|
|||
|
||||
#[derive(Debug)]
|
||||
pub struct NoSuchField {
|
||||
pub field: InFile<AstPtr<ast::RecordExprField>>,
|
||||
pub field: InFile<Either<AstPtr<ast::RecordExprField>, AstPtr<ast::RecordPatField>>>,
|
||||
pub private: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
|
|
|
@ -1503,11 +1503,19 @@ impl DefWithBody {
|
|||
let infer = db.infer(self.into());
|
||||
let source_map = Lazy::new(|| db.body_with_source_map(self.into()).1);
|
||||
let expr_syntax = |expr| source_map.expr_syntax(expr).expect("unexpected synthetic");
|
||||
let pat_syntax = |pat| source_map.pat_syntax(pat).expect("unexpected synthetic");
|
||||
for d in &infer.diagnostics {
|
||||
match d {
|
||||
&hir_ty::InferenceDiagnostic::NoSuchField { expr } => {
|
||||
let field = source_map.field_syntax(expr);
|
||||
acc.push(NoSuchField { field }.into())
|
||||
&hir_ty::InferenceDiagnostic::NoSuchField { field: expr, private } => {
|
||||
let expr_or_pat = match expr {
|
||||
ExprOrPatId::ExprId(expr) => {
|
||||
source_map.field_syntax(expr).map(Either::Left)
|
||||
}
|
||||
ExprOrPatId::PatId(pat) => {
|
||||
source_map.pat_field_syntax(pat).map(Either::Right)
|
||||
}
|
||||
};
|
||||
acc.push(NoSuchField { field: expr_or_pat, private }.into())
|
||||
}
|
||||
&hir_ty::InferenceDiagnostic::MismatchedArgCount { call_expr, expected, found } => {
|
||||
acc.push(
|
||||
|
@ -1523,10 +1531,7 @@ impl DefWithBody {
|
|||
&hir_ty::InferenceDiagnostic::PrivateAssocItem { id, item } => {
|
||||
let expr_or_pat = match id {
|
||||
ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(Either::Left),
|
||||
ExprOrPatId::PatId(pat) => source_map
|
||||
.pat_syntax(pat)
|
||||
.expect("unexpected synthetic")
|
||||
.map(Either::Right),
|
||||
ExprOrPatId::PatId(pat) => pat_syntax(pat).map(Either::Right),
|
||||
};
|
||||
let item = item.into();
|
||||
acc.push(PrivateAssocItem { expr_or_pat, item }.into())
|
||||
|
|
|
@ -849,6 +849,7 @@ fn main() {
|
|||
struct Foo { }
|
||||
fn main(f: Foo) {
|
||||
match f { Foo { bar } => () }
|
||||
// ^^^ error: no such field
|
||||
}
|
||||
"#,
|
||||
);
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
use either::Either;
|
||||
use hir::{db::ExpandDatabase, HasSource, HirDisplay, Semantics};
|
||||
use ide_db::{base_db::FileId, source_change::SourceChange, RootDatabase};
|
||||
use syntax::{
|
||||
|
@ -12,22 +13,39 @@ use crate::{fix, Assist, Diagnostic, DiagnosticCode, DiagnosticsContext};
|
|||
//
|
||||
// This diagnostic is triggered if created structure does not have field provided in record.
|
||||
pub(crate) fn no_such_field(ctx: &DiagnosticsContext<'_>, d: &hir::NoSuchField) -> Diagnostic {
|
||||
Diagnostic::new_with_syntax_node_ptr(
|
||||
ctx,
|
||||
DiagnosticCode::RustcHardError("E0559"),
|
||||
"no such field",
|
||||
d.field.clone().map(|it| it.into()),
|
||||
)
|
||||
.with_fixes(fixes(ctx, d))
|
||||
let node = d.field.clone().map(|it| it.either(Into::into, Into::into));
|
||||
if d.private {
|
||||
// FIXME: quickfix to add required visibility
|
||||
Diagnostic::new_with_syntax_node_ptr(
|
||||
ctx,
|
||||
DiagnosticCode::RustcHardError("E0451"),
|
||||
"field is private",
|
||||
node,
|
||||
)
|
||||
} else {
|
||||
Diagnostic::new_with_syntax_node_ptr(
|
||||
ctx,
|
||||
DiagnosticCode::RustcHardError("E0559"),
|
||||
"no such field",
|
||||
node,
|
||||
)
|
||||
.with_fixes(fixes(ctx, d))
|
||||
}
|
||||
}
|
||||
|
||||
fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::NoSuchField) -> Option<Vec<Assist>> {
|
||||
let root = ctx.sema.db.parse_or_expand(d.field.file_id);
|
||||
missing_record_expr_field_fixes(
|
||||
&ctx.sema,
|
||||
d.field.file_id.original_file(ctx.sema.db),
|
||||
&d.field.value.to_node(&root),
|
||||
)
|
||||
// FIXME: quickfix for pattern
|
||||
match &d.field.value {
|
||||
Either::Left(ptr) => {
|
||||
let root = ctx.sema.db.parse_or_expand(d.field.file_id);
|
||||
missing_record_expr_field_fixes(
|
||||
&ctx.sema,
|
||||
d.field.file_id.original_file(ctx.sema.db),
|
||||
&ptr.to_node(&root),
|
||||
)
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn missing_record_expr_field_fixes(
|
||||
|
@ -118,13 +136,34 @@ mod tests {
|
|||
r#"
|
||||
struct S { foo: i32, bar: () }
|
||||
impl S {
|
||||
fn new() -> S {
|
||||
fn new(
|
||||
s@S {
|
||||
//^ 💡 error: missing structure fields:
|
||||
//| - bar
|
||||
foo,
|
||||
baz: baz2,
|
||||
//^^^^^^^^^ error: no such field
|
||||
qux
|
||||
//^^^ error: no such field
|
||||
}: S
|
||||
) -> S {
|
||||
S {
|
||||
//^ 💡 error: missing structure fields:
|
||||
//| - bar
|
||||
foo,
|
||||
baz: baz2,
|
||||
//^^^^^^^^^ error: no such field
|
||||
qux
|
||||
//^^^ error: no such field
|
||||
} = s;
|
||||
S {
|
||||
//^ 💡 error: missing structure fields:
|
||||
//| - bar
|
||||
foo: 92,
|
||||
baz: 62,
|
||||
//^^^^^^^ 💡 error: no such field
|
||||
qux
|
||||
//^^^ error: no such field
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -292,6 +331,40 @@ fn main() {
|
|||
0$0: 0
|
||||
}
|
||||
}
|
||||
"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_struct_field_private() {
|
||||
check_diagnostics(
|
||||
r#"
|
||||
mod m {
|
||||
pub struct Struct {
|
||||
field: u32,
|
||||
field2: u32,
|
||||
}
|
||||
}
|
||||
fn f(s@m::Struct {
|
||||
field: f,
|
||||
//^^^^^^^^ error: field is private
|
||||
field2
|
||||
//^^^^^^ error: field is private
|
||||
}: m::Struct) {
|
||||
// assignee expression
|
||||
m::Struct {
|
||||
field: 0,
|
||||
//^^^^^^^^ error: field is private
|
||||
field2
|
||||
//^^^^^^ error: field is private
|
||||
} = s;
|
||||
m::Struct {
|
||||
field: 0,
|
||||
//^^^^^^^^ error: field is private
|
||||
field2
|
||||
//^^^^^^ error: field is private
|
||||
};
|
||||
}
|
||||
"#,
|
||||
)
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue