3882: Move computation of missing fields into hir r=matklad a=matklad

cc @SomeoneToIgnore, this is that refactoring that moves computation of missing fields to hir. 

it actually removes meaningful duplication between diagnostics code and the completion code. Nontheless, it's a net addition of code :(

Co-authored-by: Aleksey Kladov <aleksey.kladov@gmail.com>
This commit is contained in:
bors[bot] 2020-04-07 16:48:15 +00:00 committed by GitHub
commit 0c927b4584
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
5 changed files with 198 additions and 168 deletions

View file

@ -1027,8 +1027,16 @@ impl Type {
ty: Ty,
) -> Option<Type> {
let krate = resolver.krate()?;
Some(Type::new_with_resolver_inner(db, krate, resolver, ty))
}
pub(crate) fn new_with_resolver_inner(
db: &dyn HirDatabase,
krate: CrateId,
resolver: &Resolver,
ty: Ty,
) -> Type {
let environment = TraitEnvironment::lower(db, &resolver);
Some(Type { krate, ty: InEnvironment { value: ty, environment } })
Type { krate, ty: InEnvironment { value: ty, environment } }
}
fn new(db: &dyn HirDatabase, krate: CrateId, lexical_env: impl HasResolver, ty: Ty) -> Type {
@ -1152,27 +1160,6 @@ impl Type {
res
}
pub fn variant_fields(
&self,
db: &dyn HirDatabase,
def: VariantDef,
) -> Vec<(StructField, Type)> {
// FIXME: check that ty and def match
match &self.ty.value {
Ty::Apply(a_ty) => {
let field_types = db.field_types(def.into());
def.fields(db)
.into_iter()
.map(|it| {
let ty = field_types[it.id].clone().subst(&a_ty.parameters);
(it, self.derived(ty))
})
.collect()
}
_ => Vec::new(),
}
}
pub fn autoderef<'a>(&'a self, db: &'a dyn HirDatabase) -> impl Iterator<Item = Type> + 'a {
// There should be no inference vars in types passed here
// FIXME check that?

View file

@ -23,7 +23,7 @@ use crate::{
semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx},
source_analyzer::{resolve_hir_path, SourceAnalyzer},
AssocItem, Function, HirFileId, ImplDef, InFile, Local, MacroDef, Module, ModuleDef, Name,
Origin, Path, ScopeDef, StructField, Trait, Type, TypeParam, VariantDef,
Origin, Path, ScopeDef, StructField, Trait, Type, TypeParam,
};
#[derive(Debug, Clone, PartialEq, Eq)]
@ -187,14 +187,6 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
self.analyze(field.syntax()).resolve_record_field(self.db, field)
}
pub fn resolve_record_literal(&self, record_lit: &ast::RecordLit) -> Option<VariantDef> {
self.analyze(record_lit.syntax()).resolve_record_literal(self.db, record_lit)
}
pub fn resolve_record_pattern(&self, record_pat: &ast::RecordPat) -> Option<VariantDef> {
self.analyze(record_pat.syntax()).resolve_record_pattern(record_pat)
}
pub fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<MacroDef> {
let sa = self.analyze(macro_call.syntax());
let macro_call = self.find_file(macro_call.syntax().clone()).with_value(macro_call);
@ -212,6 +204,24 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
// FIXME: use this instead?
// pub fn resolve_name_ref(&self, name_ref: &ast::NameRef) -> Option<???>;
pub fn record_literal_missing_fields(
&self,
literal: &ast::RecordLit,
) -> Vec<(StructField, Type)> {
self.analyze(literal.syntax())
.record_literal_missing_fields(self.db, literal)
.unwrap_or_default()
}
pub fn record_pattern_missing_fields(
&self,
pattern: &ast::RecordPat,
) -> Vec<(StructField, Type)> {
self.analyze(pattern.syntax())
.record_pattern_missing_fields(self.db, pattern)
.unwrap_or_default()
}
pub fn to_def<T: ToDef>(&self, src: &T) -> Option<T::Def> {
let src = self.find_file(src.syntax().clone()).with_value(src).cloned();
T::to_def(self, src)

View file

@ -14,10 +14,13 @@ use hir_def::{
},
expr::{ExprId, Pat, PatId},
resolver::{resolver_for_scope, Resolver, TypeNs, ValueNs},
AsMacroCall, DefWithBodyId,
AsMacroCall, DefWithBodyId, LocalStructFieldId, StructFieldId, VariantId,
};
use hir_expand::{hygiene::Hygiene, name::AsName, HirFileId, InFile};
use hir_ty::InferenceResult;
use hir_ty::{
expr::{record_literal_missing_fields, record_pattern_missing_fields},
InferenceResult, Substs, Ty,
};
use ra_syntax::{
ast::{self, AstNode},
SyntaxNode, SyntaxNodePtr, TextUnit,
@ -25,8 +28,10 @@ use ra_syntax::{
use crate::{
db::HirDatabase, semantics::PathResolution, Adt, Const, EnumVariant, Function, Local, MacroDef,
ModPath, ModuleDef, Path, PathKind, Static, Struct, Trait, Type, TypeAlias, TypeParam,
ModPath, ModuleDef, Path, PathKind, Static, Struct, StructField, Trait, Type, TypeAlias,
TypeParam,
};
use ra_db::CrateId;
/// `SourceAnalyzer` is a convenience wrapper which exposes HIR API in terms of
/// original source files. It should not be used inside the HIR itself.
@ -164,23 +169,6 @@ impl SourceAnalyzer {
Some((struct_field.into(), local))
}
pub(crate) fn resolve_record_literal(
&self,
db: &dyn HirDatabase,
record_lit: &ast::RecordLit,
) -> Option<crate::VariantDef> {
let expr_id = self.expr_id(db, &record_lit.clone().into())?;
self.infer.as_ref()?.variant_resolution_for_expr(expr_id).map(|it| it.into())
}
pub(crate) fn resolve_record_pattern(
&self,
record_pat: &ast::RecordPat,
) -> Option<crate::VariantDef> {
let pat_id = self.pat_id(&record_pat.clone().into())?;
self.infer.as_ref()?.variant_resolution_for_pat(pat_id).map(|it| it.into())
}
pub(crate) fn resolve_macro_call(
&self,
db: &dyn HirDatabase,
@ -231,6 +219,68 @@ impl SourceAnalyzer {
resolve_hir_path(db, &self.resolver, &hir_path)
}
pub(crate) fn record_literal_missing_fields(
&self,
db: &dyn HirDatabase,
literal: &ast::RecordLit,
) -> Option<Vec<(StructField, Type)>> {
let krate = self.resolver.krate()?;
let body = self.body.as_ref()?;
let infer = self.infer.as_ref()?;
let expr_id = self.expr_id(db, &literal.clone().into())?;
let substs = match &infer.type_of_expr[expr_id] {
Ty::Apply(a_ty) => &a_ty.parameters,
_ => return None,
};
let (variant, missing_fields, _exhaustive) =
record_literal_missing_fields(db, infer, expr_id, &body[expr_id])?;
let res = self.missing_fields(db, krate, substs, variant, missing_fields);
Some(res)
}
pub(crate) fn record_pattern_missing_fields(
&self,
db: &dyn HirDatabase,
pattern: &ast::RecordPat,
) -> Option<Vec<(StructField, Type)>> {
let krate = self.resolver.krate()?;
let body = self.body.as_ref()?;
let infer = self.infer.as_ref()?;
let pat_id = self.pat_id(&pattern.clone().into())?;
let substs = match &infer.type_of_pat[pat_id] {
Ty::Apply(a_ty) => &a_ty.parameters,
_ => return None,
};
let (variant, missing_fields) =
record_pattern_missing_fields(db, infer, pat_id, &body[pat_id])?;
let res = self.missing_fields(db, krate, substs, variant, missing_fields);
Some(res)
}
fn missing_fields(
&self,
db: &dyn HirDatabase,
krate: CrateId,
substs: &Substs,
variant: VariantId,
missing_fields: Vec<LocalStructFieldId>,
) -> Vec<(StructField, Type)> {
let field_types = db.field_types(variant);
missing_fields
.into_iter()
.map(|local_id| {
let field = StructFieldId { parent: variant, local_id };
let ty = field_types[local_id].clone().subst(substs);
(field.into(), Type::new_with_resolver_inner(db, krate, &self.resolver, ty))
})
.collect()
}
pub(crate) fn expand(
&self,
db: &dyn HirDatabase,

View file

@ -2,12 +2,8 @@
use std::sync::Arc;
use hir_def::{
path::{path, Path},
resolver::HasResolver,
AdtId, FunctionId,
};
use hir_expand::{diagnostics::DiagnosticSink, name::Name};
use hir_def::{path::path, resolver::HasResolver, AdtId, FunctionId};
use hir_expand::diagnostics::DiagnosticSink;
use ra_syntax::ast;
use ra_syntax::AstPtr;
use rustc_hash::FxHashSet;
@ -29,7 +25,7 @@ pub use hir_def::{
ArithOp, Array, BinaryOp, BindingAnnotation, CmpOp, Expr, ExprId, Literal, LogicOp,
MatchArm, Ordering, Pat, PatId, RecordFieldPat, RecordLitField, Statement, UnaryOp,
},
VariantId,
LocalStructFieldId, VariantId,
};
pub struct ExprValidator<'a, 'b: 'a> {
@ -50,14 +46,37 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
pub fn validate_body(&mut self, db: &dyn HirDatabase) {
let body = db.body(self.func.into());
for e in body.exprs.iter() {
if let (id, Expr::RecordLit { path, fields, spread }) = e {
self.validate_record_literal(id, path, fields, *spread, db);
} else if let (id, Expr::Match { expr, arms }) = e {
for (id, expr) in body.exprs.iter() {
if let Some((variant_def, missed_fields, true)) =
record_literal_missing_fields(db, &self.infer, id, expr)
{
// XXX: only look at source_map if we do have missing fields
let (_, source_map) = db.body_with_source_map(self.func.into());
if let Ok(source_ptr) = source_map.expr_syntax(id) {
if let Some(expr) = source_ptr.value.left() {
let root = source_ptr.file_syntax(db.upcast());
if let ast::Expr::RecordLit(record_lit) = expr.to_node(&root) {
if let Some(field_list) = record_lit.record_field_list() {
let variant_data = variant_data(db.upcast(), variant_def);
let missed_fields = missed_fields
.into_iter()
.map(|idx| variant_data.fields()[idx].name.clone())
.collect();
self.sink.push(MissingFields {
file: source_ptr.file_id,
field_list: AstPtr::new(&field_list),
missed_fields,
})
}
}
}
}
}
if let Expr::Match { expr, arms } = expr {
self.validate_match(id, *expr, arms, db, self.infer.clone());
}
}
let body_expr = &body[body.body_expr];
if let Expr::Block { tail: Some(t), .. } = body_expr {
self.validate_results_in_tail_expr(body.body_expr, *t, db);
@ -146,61 +165,6 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
}
}
fn validate_record_literal(
&mut self,
id: ExprId,
_path: &Option<Path>,
fields: &[RecordLitField],
spread: Option<ExprId>,
db: &dyn HirDatabase,
) {
if spread.is_some() {
return;
};
let variant_def: VariantId = match self.infer.variant_resolution_for_expr(id) {
Some(VariantId::UnionId(_)) | None => return,
Some(it) => it,
};
if let VariantId::UnionId(_) = variant_def {
return;
}
let variant_data = variant_data(db.upcast(), variant_def);
let lit_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect();
let missed_fields: Vec<Name> = variant_data
.fields()
.iter()
.filter_map(|(_f, d)| {
let name = d.name.clone();
if lit_fields.contains(&name) {
None
} else {
Some(name)
}
})
.collect();
if missed_fields.is_empty() {
return;
}
let (_, source_map) = db.body_with_source_map(self.func.into());
if let Ok(source_ptr) = source_map.expr_syntax(id) {
if let Some(expr) = source_ptr.value.left() {
let root = source_ptr.file_syntax(db.upcast());
if let ast::Expr::RecordLit(record_lit) = expr.to_node(&root) {
if let Some(field_list) = record_lit.record_field_list() {
self.sink.push(MissingFields {
file: source_ptr.file_id,
field_list: AstPtr::new(&field_list),
missed_fields,
})
}
}
}
}
}
fn validate_results_in_tail_expr(&mut self, body_id: ExprId, id: ExprId, db: &dyn HirDatabase) {
// the mismatch will be on the whole block currently
let mismatch = match self.infer.type_mismatch_for_expr(body_id) {
@ -233,3 +197,63 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
}
}
}
pub fn record_literal_missing_fields(
db: &dyn HirDatabase,
infer: &InferenceResult,
id: ExprId,
expr: &Expr,
) -> Option<(VariantId, Vec<LocalStructFieldId>, /*exhaustive*/ bool)> {
let (fields, exhausitve) = match expr {
Expr::RecordLit { path: _, fields, spread } => (fields, spread.is_none()),
_ => return None,
};
let variant_def = infer.variant_resolution_for_expr(id)?;
if let VariantId::UnionId(_) = variant_def {
return None;
}
let variant_data = variant_data(db.upcast(), variant_def);
let specified_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect();
let missed_fields: Vec<LocalStructFieldId> = variant_data
.fields()
.iter()
.filter_map(|(f, d)| if specified_fields.contains(&d.name) { None } else { Some(f) })
.collect();
if missed_fields.is_empty() {
return None;
}
Some((variant_def, missed_fields, exhausitve))
}
pub fn record_pattern_missing_fields(
db: &dyn HirDatabase,
infer: &InferenceResult,
id: PatId,
pat: &Pat,
) -> Option<(VariantId, Vec<LocalStructFieldId>)> {
let fields = match pat {
Pat::Record { path: _, args } => args,
_ => return None,
};
let variant_def = infer.variant_resolution_for_pat(id)?;
if let VariantId::UnionId(_) = variant_def {
return None;
}
let variant_data = variant_data(db.upcast(), variant_def);
let specified_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect();
let missed_fields: Vec<LocalStructFieldId> = variant_data
.fields()
.iter()
.filter_map(|(f, d)| if specified_fields.contains(&d.name) { None } else { Some(f) })
.collect();
if missed_fields.is_empty() {
return None;
}
Some((variant_def, missed_fields))
}

View file

@ -1,62 +1,21 @@
//! Complete fields in record literals and patterns.
use ra_syntax::{ast, ast::NameOwner, SmolStr};
use crate::completion::{CompletionContext, Completions};
pub(super) fn complete_record(acc: &mut Completions, ctx: &CompletionContext) -> Option<()> {
let (ty, variant, already_present_fields) =
match (ctx.record_lit_pat.as_ref(), ctx.record_lit_syntax.as_ref()) {
(None, None) => return None,
(Some(_), Some(_)) => unreachable!("A record cannot be both a literal and a pattern"),
(Some(record_pat), _) => (
ctx.sema.type_of_pat(&record_pat.clone().into())?,
ctx.sema.resolve_record_pattern(record_pat)?,
pattern_ascribed_fields(record_pat),
),
(_, Some(record_lit)) => (
ctx.sema.type_of_expr(&record_lit.clone().into())?,
ctx.sema.resolve_record_literal(record_lit)?,
literal_ascribed_fields(record_lit),
),
};
let missing_fields = match (ctx.record_lit_pat.as_ref(), ctx.record_lit_syntax.as_ref()) {
(None, None) => return None,
(Some(_), Some(_)) => unreachable!("A record cannot be both a literal and a pattern"),
(Some(record_pat), _) => ctx.sema.record_pattern_missing_fields(record_pat),
(_, Some(record_lit)) => ctx.sema.record_literal_missing_fields(record_lit),
};
for (field, field_ty) in ty.variant_fields(ctx.db, variant).into_iter().filter(|(field, _)| {
// FIXME: already_present_names better be `Vec<hir::Name>`
!already_present_fields.contains(&SmolStr::from(field.name(ctx.db).to_string()))
}) {
acc.add_field(ctx, field, &field_ty);
for (field, ty) in missing_fields {
acc.add_field(ctx, field, &ty)
}
Some(())
}
fn literal_ascribed_fields(record_lit: &ast::RecordLit) -> Vec<SmolStr> {
record_lit
.record_field_list()
.map(|field_list| field_list.fields())
.map(|fields| {
fields
.into_iter()
.filter_map(|field| field.name_ref())
.map(|name_ref| name_ref.text().clone())
.collect()
})
.unwrap_or_default()
}
fn pattern_ascribed_fields(record_pat: &ast::RecordPat) -> Vec<SmolStr> {
record_pat
.record_field_pat_list()
.map(|pat_list| {
pat_list
.record_field_pats()
.filter_map(|fild_pat| fild_pat.name())
.chain(pat_list.bind_pats().filter_map(|bind_pat| bind_pat.name()))
.map(|name| name.text().clone())
.collect()
})
.unwrap_or_default()
}
#[cfg(test)]
mod tests {
mod record_pat_tests {