This commit is contained in:
Aleksey Kladov 2019-11-27 17:46:02 +03:00
parent 3686530815
commit a87579500a
36 changed files with 1603 additions and 1516 deletions

1
Cargo.lock generated
View file

@ -1023,6 +1023,7 @@ dependencies = [
name = "ra_hir_ty"
version = "0.1.0"
dependencies = [
"arrayvec 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
"chalk-ir 0.1.0 (git+https://github.com/jackh726/chalk.git?rev=095cd38a4f16337913bba487f2055b9ca0179f30)",
"chalk-rust-ir 0.1.0 (git+https://github.com/jackh726/chalk.git?rev=095cd38a4f16337913bba487f2055b9ca0179f30)",
"chalk-solve 0.1.0 (git+https://github.com/jackh726/chalk.git?rev=095cd38a4f16337913bba487f2055b9ca0179f30)",

View file

@ -6,8 +6,10 @@ use std::sync::Arc;
use hir_def::{
adt::VariantData,
body::{Body, BodySourceMap},
builtin_type::BuiltinType,
docs::Documentation,
expr::{BindingAnnotation, Pat, PatId},
per_ns::PerNs,
resolver::HasResolver,
type_ref::{Mutability, TypeRef},
@ -20,12 +22,12 @@ use hir_expand::{
name::{self, AsName},
AstId, MacroDefId,
};
use hir_ty::expr::ExprValidator;
use ra_db::{CrateId, Edition, FileId, FilePosition};
use ra_syntax::{ast, AstNode, SyntaxNode};
use crate::{
db::{DefDatabase, HirDatabase},
expr::{BindingAnnotation, Body, BodySourceMap, ExprValidator, Pat, PatId},
ty::display::HirFormatter,
ty::{
self, InEnvironment, InferenceResult, TraitEnvironment, TraitRef, Ty, TyDefId, TypeCtor,
@ -353,8 +355,8 @@ impl Struct {
.map(|(id, _)| StructField { parent: self.into(), id })
}
pub fn ty(self, db: &impl HirDatabase) -> Ty {
db.ty(self.id.into())
pub fn ty(self, db: &impl HirDatabase) -> Type {
Type::from_def(db, self.id.module(db).krate, self.id)
}
pub fn constructor_ty(self, db: &impl HirDatabase) -> Ty {
@ -380,8 +382,8 @@ impl Union {
Module { id: self.id.module(db) }
}
pub fn ty(self, db: &impl HirDatabase) -> Ty {
db.ty(self.id.into())
pub fn ty(self, db: &impl HirDatabase) -> Type {
Type::from_def(db, self.id.module(db).krate, self.id)
}
pub fn fields(self, db: &impl HirDatabase) -> Vec<StructField> {
@ -441,8 +443,8 @@ impl Enum {
.map(|(id, _)| EnumVariant { parent: self, id })
}
pub fn ty(self, db: &impl HirDatabase) -> Ty {
db.ty(self.id.into())
pub fn ty(self, db: &impl HirDatabase) -> Type {
Type::from_def(db, self.id.module(db).krate, self.id)
}
}
@ -640,7 +642,7 @@ impl Function {
pub fn diagnostics(self, db: &impl HirDatabase, sink: &mut DiagnosticSink) {
let infer = self.infer(db);
infer.add_diagnostics(db, self.id, sink);
let mut validator = ExprValidator::new(self, infer, sink);
let mut validator = ExprValidator::new(self.id, infer, sink);
validator.validate_body(db);
}
}
@ -946,13 +948,12 @@ impl ImplBlock {
db.impl_data(self.id).target_type.clone()
}
pub fn target_ty(&self, db: &impl HirDatabase) -> Ty {
Ty::from_hir(db, &self.id.resolver(db), &self.target_type(db))
}
pub fn target_trait_ref(&self, db: &impl HirDatabase) -> Option<TraitRef> {
let target_ty = self.target_ty(db);
TraitRef::from_hir(db, &self.id.resolver(db), &self.target_trait(db)?, Some(target_ty))
pub fn target_ty(&self, db: &impl HirDatabase) -> Type {
let impl_data = db.impl_data(self.id);
let resolver = self.id.resolver(db);
let environment = TraitEnvironment::lower(db, &resolver);
let ty = Ty::from_hir(db, &resolver, &impl_data.target_type);
Type { krate: self.id.module(db).krate, ty: InEnvironment { value: ty, environment } }
}
pub fn items(&self, db: &impl DefDatabase) -> Vec<AssocItem> {
@ -1130,6 +1131,22 @@ impl Type {
Some(adt.into())
}
// FIXME: provide required accessors such that it becomes implementable from outside.
pub fn is_equal_for_find_impls(&self, other: &Type) -> bool {
match (&self.ty.value, &other.ty.value) {
(Ty::Apply(a_original_ty), Ty::Apply(ty::ApplicationTy { ctor, parameters })) => {
match ctor {
TypeCtor::Ref(..) => match parameters.as_single() {
Ty::Apply(a_ty) => a_original_ty.ctor == a_ty.ctor,
_ => false,
},
_ => a_original_ty.ctor == *ctor,
}
}
_ => false,
}
}
fn derived(&self, ty: Ty) -> Type {
Type {
krate: self.krate,

View file

@ -1,18 +1,5 @@
//! FIXME: write short doc here
use std::sync::Arc;
use hir_def::{DefWithBodyId, GenericDefId, ImplId, LocalStructFieldId, TraitId, VariantId};
use ra_arena::map::ArenaMap;
use ra_db::{salsa, CrateId};
use crate::ty::{
method_resolution::CrateImplBlocks,
traits::{AssocTyValue, Impl},
CallableDef, FnSig, GenericPredicate, InferenceResult, Substs, Ty, TyDefId, TypeCtor,
ValueTyDefId,
};
pub use hir_def::db::{
BodyQuery, BodyWithSourceMapQuery, ConstDataQuery, CrateDefMapQuery, CrateLangItemsQuery,
DefDatabase, DefDatabaseStorage, DocumentationQuery, EnumDataQuery, ExprScopesQuery,
@ -24,104 +11,12 @@ pub use hir_expand::db::{
AstDatabase, AstDatabaseStorage, AstIdMapQuery, MacroArgQuery, MacroDefQuery, MacroExpandQuery,
ParseMacroQuery,
};
#[salsa::query_group(HirDatabaseStorage)]
#[salsa::requires(salsa::Database)]
pub trait HirDatabase: DefDatabase {
#[salsa::invoke(crate::ty::infer_query)]
fn infer(&self, def: DefWithBodyId) -> Arc<InferenceResult>;
#[salsa::invoke(crate::ty::ty_query)]
fn ty(&self, def: TyDefId) -> Ty;
#[salsa::invoke(crate::ty::value_ty_query)]
fn value_ty(&self, def: ValueTyDefId) -> Ty;
#[salsa::invoke(crate::ty::field_types_query)]
fn field_types(&self, var: VariantId) -> Arc<ArenaMap<LocalStructFieldId, Ty>>;
#[salsa::invoke(crate::ty::callable_item_sig)]
fn callable_item_signature(&self, def: CallableDef) -> FnSig;
#[salsa::invoke(crate::ty::generic_predicates_for_param_query)]
fn generic_predicates_for_param(
&self,
def: GenericDefId,
param_idx: u32,
) -> Arc<[GenericPredicate]>;
#[salsa::invoke(crate::ty::generic_predicates_query)]
fn generic_predicates(&self, def: GenericDefId) -> Arc<[GenericPredicate]>;
#[salsa::invoke(crate::ty::generic_defaults_query)]
fn generic_defaults(&self, def: GenericDefId) -> Substs;
#[salsa::invoke(crate::ty::method_resolution::CrateImplBlocks::impls_in_crate_query)]
fn impls_in_crate(&self, krate: CrateId) -> Arc<CrateImplBlocks>;
#[salsa::invoke(crate::ty::traits::impls_for_trait_query)]
fn impls_for_trait(&self, krate: CrateId, trait_: TraitId) -> Arc<[ImplId]>;
/// This provides the Chalk trait solver instance. Because Chalk always
/// works from a specific crate, this query is keyed on the crate; and
/// because Chalk does its own internal caching, the solver is wrapped in a
/// Mutex and the query does an untracked read internally, to make sure the
/// cached state is thrown away when input facts change.
#[salsa::invoke(crate::ty::traits::trait_solver_query)]
fn trait_solver(&self, krate: CrateId) -> crate::ty::traits::TraitSolver;
// Interned IDs for Chalk integration
#[salsa::interned]
fn intern_type_ctor(&self, type_ctor: TypeCtor) -> crate::ty::TypeCtorId;
#[salsa::interned]
fn intern_chalk_impl(&self, impl_: Impl) -> crate::ty::traits::GlobalImplId;
#[salsa::interned]
fn intern_assoc_ty_value(
&self,
assoc_ty_value: AssocTyValue,
) -> crate::ty::traits::AssocTyValueId;
#[salsa::invoke(crate::ty::traits::chalk::associated_ty_data_query)]
fn associated_ty_data(
&self,
id: chalk_ir::TypeId,
) -> Arc<chalk_rust_ir::AssociatedTyDatum<chalk_ir::family::ChalkIr>>;
#[salsa::invoke(crate::ty::traits::chalk::trait_datum_query)]
fn trait_datum(
&self,
krate: CrateId,
trait_id: chalk_ir::TraitId,
) -> Arc<chalk_rust_ir::TraitDatum<chalk_ir::family::ChalkIr>>;
#[salsa::invoke(crate::ty::traits::chalk::struct_datum_query)]
fn struct_datum(
&self,
krate: CrateId,
struct_id: chalk_ir::StructId,
) -> Arc<chalk_rust_ir::StructDatum<chalk_ir::family::ChalkIr>>;
#[salsa::invoke(crate::ty::traits::chalk::impl_datum_query)]
fn impl_datum(
&self,
krate: CrateId,
impl_id: chalk_ir::ImplId,
) -> Arc<chalk_rust_ir::ImplDatum<chalk_ir::family::ChalkIr>>;
#[salsa::invoke(crate::ty::traits::chalk::associated_ty_value_query)]
fn associated_ty_value(
&self,
krate: CrateId,
id: chalk_rust_ir::AssociatedTyValueId,
) -> Arc<chalk_rust_ir::AssociatedTyValue<chalk_ir::family::ChalkIr>>;
#[salsa::invoke(crate::ty::traits::trait_solve_query)]
fn trait_solve(
&self,
krate: CrateId,
goal: crate::ty::Canonical<crate::ty::InEnvironment<crate::ty::Obligation>>,
) -> Option<crate::ty::traits::Solution>;
}
pub use hir_ty::db::{
AssociatedTyDataQuery, CallableItemSignatureQuery, FieldTypesQuery, GenericDefaultsQuery,
GenericPredicatesQuery, HirDatabase, HirDatabaseStorage, ImplDatumQuery, ImplsForTraitQuery,
ImplsInCrateQuery, InferQuery, StructDatumQuery, TraitDatumQuery, TraitSolveQuery, TyQuery,
ValueTyQuery,
};
#[test]
fn hir_database_is_object_safe() {

View file

@ -1,93 +1,4 @@
//! FIXME: write short doc here
use std::any::Any;
use hir_expand::HirFileId;
use ra_syntax::{ast, AstNode, AstPtr, SyntaxNodePtr};
use crate::{db::AstDatabase, Name, Source};
pub use hir_def::diagnostics::UnresolvedModule;
pub use hir_expand::diagnostics::{AstDiagnostic, Diagnostic, DiagnosticSink};
#[derive(Debug)]
pub struct NoSuchField {
pub file: HirFileId,
pub field: AstPtr<ast::RecordField>,
}
impl Diagnostic for NoSuchField {
fn message(&self) -> String {
"no such field".to_string()
}
fn source(&self) -> Source<SyntaxNodePtr> {
Source { file_id: self.file, value: self.field.into() }
}
fn as_any(&self) -> &(dyn Any + Send + 'static) {
self
}
}
#[derive(Debug)]
pub struct MissingFields {
pub file: HirFileId,
pub field_list: AstPtr<ast::RecordFieldList>,
pub missed_fields: Vec<Name>,
}
impl Diagnostic for MissingFields {
fn message(&self) -> String {
use std::fmt::Write;
let mut message = String::from("Missing structure fields:\n");
for field in &self.missed_fields {
write!(message, "- {}\n", field).unwrap();
}
message
}
fn source(&self) -> Source<SyntaxNodePtr> {
Source { file_id: self.file, value: self.field_list.into() }
}
fn as_any(&self) -> &(dyn Any + Send + 'static) {
self
}
}
impl AstDiagnostic for MissingFields {
type AST = ast::RecordFieldList;
fn ast(&self, db: &impl AstDatabase) -> Self::AST {
let root = db.parse_or_expand(self.source().file_id).unwrap();
let node = self.source().value.to_node(&root);
ast::RecordFieldList::cast(node).unwrap()
}
}
#[derive(Debug)]
pub struct MissingOkInTailExpr {
pub file: HirFileId,
pub expr: AstPtr<ast::Expr>,
}
impl Diagnostic for MissingOkInTailExpr {
fn message(&self) -> String {
"wrap return expression in Ok".to_string()
}
fn source(&self) -> Source<SyntaxNodePtr> {
Source { file_id: self.file, value: self.expr.into() }
}
fn as_any(&self) -> &(dyn Any + Send + 'static) {
self
}
}
impl AstDiagnostic for MissingOkInTailExpr {
type AST = ast::Expr;
fn ast(&self, db: &impl AstDatabase) -> Self::AST {
let root = db.parse_or_expand(self.file).unwrap();
let node = self.source().value.to_node(&root);
ast::Expr::cast(node).unwrap()
}
}
pub use hir_ty::diagnostics::{MissingFields, MissingOkInTailExpr, NoSuchField};

View file

@ -32,20 +32,13 @@ pub mod db;
pub mod source_binder;
mod ty;
mod expr;
pub mod diagnostics;
mod util;
mod from_id;
mod code_model;
pub mod from_source;
#[cfg(test)]
mod test_db;
#[cfg(test)]
mod marks;
pub use crate::{
code_model::{
src::HasSource, Adt, AssocItem, AttrDef, Const, Container, Crate, CrateDependency,
@ -53,7 +46,6 @@ pub use crate::{
HasAttrs, ImplBlock, Import, Local, MacroDef, Module, ModuleDef, ModuleSource, ScopeDef,
Static, Struct, StructField, Trait, Type, TypeAlias, Union, VariantDef,
},
expr::ExprScopes,
from_source::FromSource,
source_binder::{PathResolution, ScopeEntryWithSyntax, SourceAnalyzer},
ty::{
@ -64,6 +56,7 @@ pub use crate::{
};
pub use hir_def::{
body::scope::ExprScopes,
builtin_type::BuiltinType,
docs::Documentation,
path::{Path, PathKind},

View file

@ -8,6 +8,10 @@
use std::sync::Arc;
use hir_def::{
body::{
scope::{ExprScopes, ScopeId},
BodySourceMap,
},
expr::{ExprId, PatId},
path::known,
resolver::{self, resolver_for_scope, HasResolver, Resolver, TypeNs, ValueNs},
@ -25,7 +29,6 @@ use ra_syntax::{
use crate::{
db::HirDatabase,
expr::{BodySourceMap, ExprScopes, ScopeId},
ty::{
method_resolution::{self, implements_trait},
InEnvironment, TraitEnvironment, Ty,
@ -91,7 +94,7 @@ pub struct SourceAnalyzer {
body_owner: Option<DefWithBody>,
body_source_map: Option<Arc<BodySourceMap>>,
infer: Option<Arc<crate::ty::InferenceResult>>,
scopes: Option<Arc<crate::expr::ExprScopes>>,
scopes: Option<Arc<ExprScopes>>,
}
#[derive(Debug, Clone, PartialEq, Eq)]
@ -455,21 +458,6 @@ impl SourceAnalyzer {
macro_file_kind: to_macro_file_kind(macro_call.value),
})
}
#[cfg(test)]
pub(crate) fn body_source_map(&self) -> Arc<BodySourceMap> {
self.body_source_map.clone().unwrap()
}
#[cfg(test)]
pub(crate) fn inference_result(&self) -> Arc<crate::ty::InferenceResult> {
self.infer.clone().unwrap()
}
#[cfg(test)]
pub(crate) fn analyzed_declaration(&self) -> Option<DefWithBody> {
self.body_owner
}
}
fn scope_for(

File diff suppressed because it is too large Load diff

View file

@ -1,3 +0,0 @@
//! FIXME: write short doc here
pub use hir_ty::primitive::{FloatBitness, FloatTy, IntBitness, IntTy, Signedness, Uncertain};

View file

@ -1,12 +0,0 @@
//! Internal utility functions.
use std::sync::Arc;
/// Helper for mutating `Arc<[T]>` (i.e. `Arc::make_mut` for Arc slices).
/// The underlying values are cloned if there are other strong references.
pub(crate) fn make_mut_slice<T: Clone>(a: &mut Arc<[T]>) -> &mut [T] {
if Arc::get_mut(a).is_none() {
*a = a.iter().cloned().collect();
}
Arc::get_mut(a).unwrap()
}

View file

@ -27,7 +27,7 @@ pub mod body;
pub mod resolver;
mod trace;
mod nameres;
pub mod nameres;
#[cfg(test)]
mod test_db;

View file

@ -8,6 +8,7 @@ authors = ["rust-analyzer developers"]
doctest = false
[dependencies]
arrayvec = "0.5.1"
log = "0.4.5"
rustc-hash = "1.0"
parking_lot = "0.10.0"

View file

@ -19,7 +19,7 @@ use super::{
const AUTODEREF_RECURSION_LIMIT: usize = 10;
pub(crate) fn autoderef<'a>(
pub fn autoderef<'a>(
db: &'a impl HirDatabase,
krate: Option<CrateId>,
ty: InEnvironment<Canonical<Ty>>,

116
crates/ra_hir_ty/src/db.rs Normal file
View file

@ -0,0 +1,116 @@
//! FIXME: write short doc here
use std::sync::Arc;
use hir_def::{
db::DefDatabase, DefWithBodyId, GenericDefId, ImplId, LocalStructFieldId, TraitId, VariantId,
};
use ra_arena::map::ArenaMap;
use ra_db::{salsa, CrateId};
use crate::{
method_resolution::CrateImplBlocks,
traits::{AssocTyValue, Impl},
CallableDef, FnSig, GenericPredicate, InferenceResult, Substs, Ty, TyDefId, TypeCtor,
ValueTyDefId,
};
#[salsa::query_group(HirDatabaseStorage)]
#[salsa::requires(salsa::Database)]
pub trait HirDatabase: DefDatabase {
#[salsa::invoke(crate::infer_query)]
fn infer(&self, def: DefWithBodyId) -> Arc<InferenceResult>;
#[salsa::invoke(crate::lower::ty_query)]
fn ty(&self, def: TyDefId) -> Ty;
#[salsa::invoke(crate::lower::value_ty_query)]
fn value_ty(&self, def: ValueTyDefId) -> Ty;
#[salsa::invoke(crate::lower::field_types_query)]
fn field_types(&self, var: VariantId) -> Arc<ArenaMap<LocalStructFieldId, Ty>>;
#[salsa::invoke(crate::callable_item_sig)]
fn callable_item_signature(&self, def: CallableDef) -> FnSig;
#[salsa::invoke(crate::lower::generic_predicates_for_param_query)]
fn generic_predicates_for_param(
&self,
def: GenericDefId,
param_idx: u32,
) -> Arc<[GenericPredicate]>;
#[salsa::invoke(crate::lower::generic_predicates_query)]
fn generic_predicates(&self, def: GenericDefId) -> Arc<[GenericPredicate]>;
#[salsa::invoke(crate::lower::generic_defaults_query)]
fn generic_defaults(&self, def: GenericDefId) -> Substs;
#[salsa::invoke(crate::method_resolution::CrateImplBlocks::impls_in_crate_query)]
fn impls_in_crate(&self, krate: CrateId) -> Arc<CrateImplBlocks>;
#[salsa::invoke(crate::traits::impls_for_trait_query)]
fn impls_for_trait(&self, krate: CrateId, trait_: TraitId) -> Arc<[ImplId]>;
/// This provides the Chalk trait solver instance. Because Chalk always
/// works from a specific crate, this query is keyed on the crate; and
/// because Chalk does its own internal caching, the solver is wrapped in a
/// Mutex and the query does an untracked read internally, to make sure the
/// cached state is thrown away when input facts change.
#[salsa::invoke(crate::traits::trait_solver_query)]
fn trait_solver(&self, krate: CrateId) -> crate::traits::TraitSolver;
// Interned IDs for Chalk integration
#[salsa::interned]
fn intern_type_ctor(&self, type_ctor: TypeCtor) -> crate::TypeCtorId;
#[salsa::interned]
fn intern_chalk_impl(&self, impl_: Impl) -> crate::traits::GlobalImplId;
#[salsa::interned]
fn intern_assoc_ty_value(&self, assoc_ty_value: AssocTyValue) -> crate::traits::AssocTyValueId;
#[salsa::invoke(crate::traits::chalk::associated_ty_data_query)]
fn associated_ty_data(
&self,
id: chalk_ir::TypeId,
) -> Arc<chalk_rust_ir::AssociatedTyDatum<chalk_ir::family::ChalkIr>>;
#[salsa::invoke(crate::traits::chalk::trait_datum_query)]
fn trait_datum(
&self,
krate: CrateId,
trait_id: chalk_ir::TraitId,
) -> Arc<chalk_rust_ir::TraitDatum<chalk_ir::family::ChalkIr>>;
#[salsa::invoke(crate::traits::chalk::struct_datum_query)]
fn struct_datum(
&self,
krate: CrateId,
struct_id: chalk_ir::StructId,
) -> Arc<chalk_rust_ir::StructDatum<chalk_ir::family::ChalkIr>>;
#[salsa::invoke(crate::traits::chalk::impl_datum_query)]
fn impl_datum(
&self,
krate: CrateId,
impl_id: chalk_ir::ImplId,
) -> Arc<chalk_rust_ir::ImplDatum<chalk_ir::family::ChalkIr>>;
#[salsa::invoke(crate::traits::chalk::associated_ty_value_query)]
fn associated_ty_value(
&self,
krate: CrateId,
id: chalk_rust_ir::AssociatedTyValueId,
) -> Arc<chalk_rust_ir::AssociatedTyValue<chalk_ir::family::ChalkIr>>;
#[salsa::invoke(crate::traits::trait_solve_query)]
fn trait_solve(
&self,
krate: CrateId,
goal: crate::Canonical<crate::InEnvironment<crate::Obligation>>,
) -> Option<crate::traits::Solution>;
}
#[test]
fn hir_database_is_object_safe() {
fn _assert_object_safe(_: &dyn HirDatabase) {}
}

View file

@ -0,0 +1,91 @@
//! FIXME: write short doc here
use std::any::Any;
use hir_expand::{db::AstDatabase, name::Name, HirFileId, Source};
use ra_syntax::{ast, AstNode, AstPtr, SyntaxNodePtr};
pub use hir_def::diagnostics::UnresolvedModule;
pub use hir_expand::diagnostics::{AstDiagnostic, Diagnostic, DiagnosticSink};
#[derive(Debug)]
pub struct NoSuchField {
pub file: HirFileId,
pub field: AstPtr<ast::RecordField>,
}
impl Diagnostic for NoSuchField {
fn message(&self) -> String {
"no such field".to_string()
}
fn source(&self) -> Source<SyntaxNodePtr> {
Source { file_id: self.file, value: self.field.into() }
}
fn as_any(&self) -> &(dyn Any + Send + 'static) {
self
}
}
#[derive(Debug)]
pub struct MissingFields {
pub file: HirFileId,
pub field_list: AstPtr<ast::RecordFieldList>,
pub missed_fields: Vec<Name>,
}
impl Diagnostic for MissingFields {
fn message(&self) -> String {
use std::fmt::Write;
let mut message = String::from("Missing structure fields:\n");
for field in &self.missed_fields {
write!(message, "- {}\n", field).unwrap();
}
message
}
fn source(&self) -> Source<SyntaxNodePtr> {
Source { file_id: self.file, value: self.field_list.into() }
}
fn as_any(&self) -> &(dyn Any + Send + 'static) {
self
}
}
impl AstDiagnostic for MissingFields {
type AST = ast::RecordFieldList;
fn ast(&self, db: &impl AstDatabase) -> Self::AST {
let root = db.parse_or_expand(self.source().file_id).unwrap();
let node = self.source().value.to_node(&root);
ast::RecordFieldList::cast(node).unwrap()
}
}
#[derive(Debug)]
pub struct MissingOkInTailExpr {
pub file: HirFileId,
pub expr: AstPtr<ast::Expr>,
}
impl Diagnostic for MissingOkInTailExpr {
fn message(&self) -> String {
"wrap return expression in Ok".to_string()
}
fn source(&self) -> Source<SyntaxNodePtr> {
Source { file_id: self.file, value: self.expr.into() }
}
fn as_any(&self) -> &(dyn Any + Send + 'static) {
self
}
}
impl AstDiagnostic for MissingOkInTailExpr {
type AST = ast::Expr;
fn ast(&self, db: &impl AstDatabase) -> Self::AST {
let root = db.parse_or_expand(self.file).unwrap();
let node = self.source().value.to_node(&root);
ast::Expr::cast(node).unwrap()
}
}

View file

@ -2,8 +2,12 @@
use std::sync::Arc;
use hir_def::{path::known, resolver::HasResolver, AdtId};
use hir_expand::diagnostics::DiagnosticSink;
use hir_def::{
path::{known, Path},
resolver::HasResolver,
AdtId, FunctionId,
};
use hir_expand::{diagnostics::DiagnosticSink, name::Name};
use ra_syntax::ast;
use ra_syntax::AstPtr;
use rustc_hash::FxHashSet;
@ -11,8 +15,7 @@ use rustc_hash::FxHashSet;
use crate::{
db::HirDatabase,
diagnostics::{MissingFields, MissingOkInTailExpr},
ty::{ApplicationTy, InferenceResult, Ty, TypeCtor},
Function, Name, Path, Struct,
ApplicationTy, InferenceResult, Ty, TypeCtor,
};
pub use hir_def::{
@ -26,23 +29,23 @@ pub use hir_def::{
},
};
pub(crate) struct ExprValidator<'a, 'b: 'a> {
func: Function,
pub struct ExprValidator<'a, 'b: 'a> {
func: FunctionId,
infer: Arc<InferenceResult>,
sink: &'a mut DiagnosticSink<'b>,
}
impl<'a, 'b> ExprValidator<'a, 'b> {
pub(crate) fn new(
func: Function,
pub fn new(
func: FunctionId,
infer: Arc<InferenceResult>,
sink: &'a mut DiagnosticSink<'b>,
) -> ExprValidator<'a, 'b> {
ExprValidator { func, infer, sink }
}
pub(crate) fn validate_body(&mut self, db: &impl HirDatabase) {
let body = self.func.body(db);
pub fn validate_body(&mut self, db: &impl HirDatabase) {
let body = db.body(self.func.into());
for e in body.exprs.iter() {
if let (id, Expr::RecordLit { path, fields, spread }) = e {
@ -69,16 +72,18 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
}
let struct_def = match self.infer[id].as_adt() {
Some((AdtId::StructId(s), _)) => Struct::from(s),
Some((AdtId::StructId(s), _)) => s,
_ => return,
};
let struct_data = db.struct_data(struct_def);
let lit_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect();
let missed_fields: Vec<Name> = struct_def
.fields(db)
let missed_fields: Vec<Name> = struct_data
.variant_data
.fields()
.iter()
.filter_map(|f| {
let name = f.name(db);
.filter_map(|(_f, d)| {
let name = d.name.clone();
if lit_fields.contains(&name) {
None
} else {
@ -89,7 +94,7 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
if missed_fields.is_empty() {
return;
}
let source_map = self.func.body_source_map(db);
let (_, source_map) = db.body_with_source_map(self.func.into());
if let Some(source_ptr) = source_map.expr_syntax(id) {
if let Some(expr) = source_ptr.value.a() {
@ -121,7 +126,7 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
let std_result_path = known::std_result_result();
let resolver = self.func.id.resolver(db);
let resolver = self.func.resolver(db);
let std_result_enum = match resolver.resolve_known_enum(db, &std_result_path) {
Some(it) => it,
_ => return,
@ -134,7 +139,7 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
};
if params.len() == 2 && &params[0] == &mismatch.actual {
let source_map = self.func.body_source_map(db);
let (_, source_map) = db.body_with_source_map(self.func.into());
if let Some(source_ptr) = source_map.expr_syntax(id) {
if let Some(expr) = source_ptr.value.a() {

View file

@ -41,11 +41,11 @@ use super::{
ApplicationTy, InEnvironment, ProjectionTy, Substs, TraitEnvironment, TraitRef, Ty, TypeCtor,
TypeWalk, Uncertain,
};
use crate::{db::HirDatabase, ty::infer::diagnostics::InferenceDiagnostic};
use crate::{db::HirDatabase, infer::diagnostics::InferenceDiagnostic};
macro_rules! ty_app {
($ctor:pat, $param:pat) => {
crate::ty::Ty::Apply(crate::ty::ApplicationTy { ctor: $ctor, parameters: $param })
crate::Ty::Apply(crate::ApplicationTy { ctor: $ctor, parameters: $param })
};
($ctor:pat) => {
ty_app!($ctor, _)
@ -128,8 +128,8 @@ pub struct InferenceResult {
/// For each associated item record what it resolves to
assoc_resolutions: FxHashMap<ExprOrPatId, AssocItemId>,
diagnostics: Vec<InferenceDiagnostic>,
pub(super) type_of_expr: ArenaMap<ExprId, Ty>,
pub(super) type_of_pat: ArenaMap<PatId, Ty>,
pub type_of_expr: ArenaMap<ExprId, Ty>,
pub type_of_pat: ArenaMap<PatId, Ty>,
pub(super) type_mismatches: ArenaMap<ExprId, TypeMismatch>,
}
@ -158,7 +158,7 @@ impl InferenceResult {
pub fn type_mismatch_for_expr(&self, expr: ExprId) -> Option<&TypeMismatch> {
self.type_mismatches.get(expr)
}
pub(crate) fn add_diagnostics(
pub fn add_diagnostics(
&self,
db: &impl HirDatabase,
owner: FunctionId,

View file

@ -13,10 +13,7 @@ use hir_def::{
use rustc_hash::FxHashMap;
use test_utils::tested_by;
use crate::{
db::HirDatabase,
ty::{autoderef, Substs, TraitRef, Ty, TypeCtor, TypeWalk},
};
use crate::{autoderef, db::HirDatabase, Substs, TraitRef, Ty, TypeCtor, TypeWalk};
use super::{InEnvironment, InferTy, InferenceContext, TypeVarValue};

View file

@ -14,12 +14,9 @@ use hir_def::{
use hir_expand::name::{self, Name};
use crate::{
db::HirDatabase,
ty::{
autoderef, method_resolution, op, traits::InEnvironment, utils::variant_data, CallableDef,
InferTy, IntTy, Mutability, Obligation, ProjectionPredicate, ProjectionTy, Substs,
TraitRef, Ty, TypeCtor, TypeWalk, Uncertain,
},
autoderef, db::HirDatabase, method_resolution, op, traits::InEnvironment, utils::variant_data,
CallableDef, InferTy, IntTy, Mutability, Obligation, ProjectionPredicate, ProjectionTy, Substs,
TraitRef, Ty, TypeCtor, TypeWalk, Uncertain,
};
use super::{BindingMode, Expectation, InferenceContext, InferenceDiagnostic, TypeMismatch};

View file

@ -12,10 +12,7 @@ use hir_expand::name::Name;
use test_utils::tested_by;
use super::{BindingMode, InferenceContext};
use crate::{
db::HirDatabase,
ty::{utils::variant_data, Substs, Ty, TypeCtor, TypeWalk},
};
use crate::{db::HirDatabase, utils::variant_data, Substs, Ty, TypeCtor, TypeWalk};
impl<'a, D: HirDatabase> InferenceContext<'a, D> {
fn infer_tuple_struct_pat(

View file

@ -1,16 +1,13 @@
//! Path expression resolution.
use hir_def::{
path::{Path, PathSegment},
path::{Path, PathKind, PathSegment},
resolver::{HasResolver, ResolveValueResult, Resolver, TypeNs, ValueNs},
AssocItemId, ContainerId, Lookup,
};
use hir_expand::name::Name;
use crate::{
db::HirDatabase,
ty::{method_resolution, Substs, Ty, TypeWalk, ValueTyDefId},
};
use crate::{db::HirDatabase, method_resolution, Substs, Ty, TypeWalk, ValueTyDefId};
use super::{ExprOrPatId, InferenceContext, TraitRef};
@ -33,7 +30,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
path: &Path,
id: ExprOrPatId,
) -> Option<Ty> {
let (value, self_subst) = if let crate::PathKind::Type(type_ref) = &path.kind {
let (value, self_subst) = if let PathKind::Type(type_ref) = &path.kind {
if path.segments.is_empty() {
// This can't actually happen syntax-wise
return None;

View file

@ -2,12 +2,8 @@
use super::{InferenceContext, Obligation};
use crate::{
db::HirDatabase,
ty::{
Canonical, InEnvironment, InferTy, ProjectionPredicate, ProjectionTy, Substs, TraitRef, Ty,
TypeWalk,
},
util::make_mut_slice,
db::HirDatabase, utils::make_mut_slice, Canonical, InEnvironment, InferTy, ProjectionPredicate,
ProjectionTy, Substs, TraitRef, Ty, TypeWalk,
};
impl<'a, D: HirDatabase> InferenceContext<'a, D> {

File diff suppressed because it is too large Load diff

View file

@ -11,7 +11,7 @@ use std::sync::Arc;
use hir_def::{
builtin_type::BuiltinType,
generics::WherePredicate,
path::{GenericArg, Path, PathSegment},
path::{GenericArg, Path, PathKind, PathSegment},
resolver::{HasResolver, Resolver, TypeNs},
type_ref::{TypeBound, TypeRef},
AdtId, AstItemDef, ConstId, EnumId, EnumVariantId, FunctionId, GenericDefId, HasModule,
@ -26,15 +26,13 @@ use super::{
};
use crate::{
db::HirDatabase,
ty::{
primitive::{FloatTy, IntTy},
utils::{all_super_traits, associated_type_by_name_including_super_traits, variant_data},
},
util::make_mut_slice,
primitive::{FloatTy, IntTy},
utils::make_mut_slice,
utils::{all_super_traits, associated_type_by_name_including_super_traits, variant_data},
};
impl Ty {
pub(crate) fn from_hir(db: &impl HirDatabase, resolver: &Resolver, type_ref: &TypeRef) -> Self {
pub fn from_hir(db: &impl HirDatabase, resolver: &Resolver, type_ref: &TypeRef) -> Self {
match type_ref {
TypeRef::Never => Ty::simple(TypeCtor::Never),
TypeRef::Tuple(inner) => {
@ -103,7 +101,7 @@ impl Ty {
TypeRef::Path(path) => path,
_ => return None,
};
if let crate::PathKind::Type(_) = &path.kind {
if let PathKind::Type(_) = &path.kind {
return None;
}
if path.segments.len() > 1 {
@ -204,7 +202,7 @@ impl Ty {
pub(crate) fn from_hir_path(db: &impl HirDatabase, resolver: &Resolver, path: &Path) -> Ty {
// Resolve the path (in type namespace)
if let crate::PathKind::Type(type_ref) = &path.kind {
if let PathKind::Type(type_ref) = &path.kind {
let ty = Ty::from_hir(db, resolver, &type_ref);
let remaining_segments = &path.segments[..];
return Ty::from_type_relative_path(db, resolver, ty, remaining_segments);
@ -421,7 +419,7 @@ impl TraitRef {
substs_from_path_segment(db, resolver, segment, Some(resolved.into()), !has_self_param)
}
pub(crate) fn for_trait(db: &impl HirDatabase, trait_: TraitId) -> TraitRef {
pub fn for_trait(db: &impl HirDatabase, trait_: TraitId) -> TraitRef {
let substs = Substs::identity(&db.generic_params(trait_.into()));
TraitRef { trait_, substs }
}
@ -495,7 +493,7 @@ fn assoc_type_bindings_from_type_bound<'a>(
}
/// Build the signature of a callable item (function, struct or enum variant).
pub(crate) fn callable_item_sig(db: &impl HirDatabase, def: CallableDef) -> FnSig {
pub fn callable_item_sig(db: &impl HirDatabase, def: CallableDef) -> FnSig {
match def {
CallableDef::FunctionId(f) => fn_sig_for_fn(db, f),
CallableDef::StructId(s) => fn_sig_for_struct_constructor(db, s),
@ -544,7 +542,7 @@ pub(crate) fn generic_predicates_for_param_query(
}
impl TraitEnvironment {
pub(crate) fn lower(db: &impl HirDatabase, resolver: &Resolver) -> Arc<TraitEnvironment> {
pub fn lower(db: &impl HirDatabase, resolver: &Resolver) -> Arc<TraitEnvironment> {
let predicates = resolver
.where_predicates_in_scope()
.flat_map(|pred| GenericPredicate::from_where_predicate(db, &resolver, pred))

View file

@ -16,8 +16,9 @@ use rustc_hash::FxHashMap;
use crate::{
db::HirDatabase,
ty::primitive::{FloatBitness, Uncertain},
ty::{utils::all_super_traits, Ty, TypeCtor},
primitive::{FloatBitness, Uncertain},
utils::all_super_traits,
Ty, TypeCtor,
};
use super::{autoderef, Canonical, InEnvironment, TraitEnvironment, TraitRef};
@ -97,7 +98,7 @@ impl CrateImplBlocks {
}
impl Ty {
pub(crate) fn def_crates(
pub fn def_crates(
&self,
db: &impl HirDatabase,
cur_crate: CrateId,
@ -176,7 +177,7 @@ pub enum LookupMode {
// This would be nicer if it just returned an iterator, but that runs into
// lifetime problems, because we need to borrow temp `CrateImplBlocks`.
// FIXME add a context type here?
pub(crate) fn iterate_method_candidates<T>(
pub fn iterate_method_candidates<T>(
ty: &Canonical<Ty>,
db: &impl HirDatabase,
resolver: &Resolver,
@ -323,7 +324,7 @@ fn is_valid_candidate(
}
}
pub(crate) fn implements_trait(
pub fn implements_trait(
ty: &Canonical<Ty>,
db: &impl HirDatabase,
resolver: &Resolver,

View file

@ -2,7 +2,7 @@
use hir_def::expr::{BinaryOp, CmpOp};
use super::{InferTy, Ty, TypeCtor};
use crate::ty::ApplicationTy;
use crate::ApplicationTy;
pub(super) fn binary_op_return_ty(op: BinaryOp, rhs_ty: Ty) -> Ty {
match op {

View file

@ -2,20 +2,20 @@
use std::{panic, sync::Arc};
use hir_def::{db::DefDatabase, ModuleId};
use hir_def::{db::DefDatabase, AssocItemId, ModuleDefId, ModuleId};
use hir_expand::diagnostics::DiagnosticSink;
use parking_lot::Mutex;
use ra_db::{salsa, CrateId, FileId, FileLoader, FileLoaderDelegate, RelativePath, SourceDatabase};
use crate::{db, debug::HirDebugHelper};
use crate::{db::HirDatabase, expr::ExprValidator};
#[salsa::database(
ra_db::SourceDatabaseExtStorage,
ra_db::SourceDatabaseStorage,
db::InternDatabaseStorage,
db::AstDatabaseStorage,
db::DefDatabaseStorage,
db::HirDatabaseStorage
hir_expand::db::AstDatabaseStorage,
hir_def::db::InternDatabaseStorage,
hir_def::db::DefDatabaseStorage,
crate::db::HirDatabaseStorage
)]
#[derive(Debug, Default)]
pub struct TestDB {
@ -67,32 +67,53 @@ impl FileLoader for TestDB {
}
}
// FIXME: improve `WithFixture` to bring useful hir debugging back
impl HirDebugHelper for TestDB {
fn crate_name(&self, _krate: CrateId) -> Option<String> {
None
}
fn file_path(&self, _file_id: FileId) -> Option<String> {
None
}
}
impl TestDB {
pub fn module_for_file(&self, file_id: FileId) -> ModuleId {
for &krate in self.relevant_crates(file_id).iter() {
let crate_def_map = self.crate_def_map(krate);
for (module_id, data) in crate_def_map.modules.iter() {
if data.definition == Some(file_id) {
return ModuleId { krate, module_id };
}
}
}
panic!("Can't find module for file")
}
// FIXME: don't duplicate this
pub fn diagnostics(&self) -> String {
let mut buf = String::new();
let crate_graph = self.crate_graph();
for krate in crate_graph.iter().next() {
let crate_def_map = self.crate_def_map(krate);
let mut fns = Vec::new();
for (module_id, _) in crate_def_map.modules.iter() {
let module_id = ModuleId { krate, module_id };
let module = crate::Module::from(module_id);
module.diagnostics(
self,
&mut DiagnosticSink::new(|d| {
buf += &format!("{:?}: {}\n", d.syntax_node(self).text(), d.message());
}),
)
for decl in crate_def_map[module_id].scope.declarations() {
match decl {
ModuleDefId::FunctionId(f) => fns.push(f),
_ => (),
}
}
for &impl_id in crate_def_map[module_id].impls.iter() {
let impl_data = self.impl_data(impl_id);
for item in impl_data.items.iter() {
if let AssocItemId::FunctionId(f) = item {
fns.push(*f)
}
}
}
}
for f in fns {
let infer = self.infer(f.into());
let mut sink = DiagnosticSink::new(|d| {
buf += &format!("{:?}: {}\n", d.syntax_node(self).text(), d.message());
});
infer.add_diagnostics(self, f, &mut sink);
let mut validator = ExprValidator::new(f, infer, &mut sink);
validator.validate_body(self);
}
}
buf

View file

@ -4,20 +4,20 @@ mod coercion;
use std::fmt::Write;
use std::sync::Arc;
use hir_def::{
body::BodySourceMap, db::DefDatabase, nameres::CrateDefMap, AssocItemId, DefWithBodyId,
LocalModuleId, Lookup, ModuleDefId,
};
use hir_expand::Source;
use insta::assert_snapshot;
use ra_db::{fixture::WithFixture, salsa::Database, FilePosition, SourceDatabase};
use ra_syntax::{
algo,
ast::{self, AstNode},
SyntaxKind::*,
};
use rustc_hash::FxHashSet;
use test_utils::covers;
use crate::{
expr::BodySourceMap, test_db::TestDB, ty::display::HirDisplay, ty::InferenceResult, Source,
SourceAnalyzer,
};
use crate::{db::HirDatabase, display::HirDisplay, test_db::TestDB, InferenceResult};
// These tests compare the inference results for all expressions in a file
// against snapshots of the expected results using insta. Use cargo-insta to
@ -4674,10 +4674,20 @@ fn test<T, U>() where T: Trait<U::Item>, U: Trait<T::Item> {
fn type_at_pos(db: &TestDB, pos: FilePosition) -> String {
let file = db.parse(pos.file_id).ok().unwrap();
let expr = algo::find_node_at_offset::<ast::Expr>(file.syntax(), pos.offset).unwrap();
let analyzer =
SourceAnalyzer::new(db, Source::new(pos.file_id.into(), expr.syntax()), Some(pos.offset));
let ty = analyzer.type_of(db, &expr).unwrap();
ty.display(db).to_string()
let module = db.module_for_file(pos.file_id);
let crate_def_map = db.crate_def_map(module.krate);
for decl in crate_def_map[module.module_id].scope.declarations() {
if let ModuleDefId::FunctionId(func) = decl {
let (_body, source_map) = db.body_with_source_map(func.into());
if let Some(expr_id) = source_map.node_expr(Source::new(pos.file_id.into(), &expr)) {
let infer = db.infer(func.into());
let ty = &infer[expr_id];
return ty.display(db).to_string();
}
}
}
panic!("Can't find expression")
}
fn type_at(content: &str) -> String {
@ -4687,7 +4697,6 @@ fn type_at(content: &str) -> String {
fn infer(content: &str) -> String {
let (db, file_id) = TestDB::with_single_file(content);
let source_file = db.parse(file_id).ok().unwrap();
let mut acc = String::new();
@ -4740,20 +4749,69 @@ fn infer(content: &str) -> String {
}
};
let mut analyzed = FxHashSet::default();
for node in source_file.syntax().descendants() {
if node.kind() == FN_DEF || node.kind() == CONST_DEF || node.kind() == STATIC_DEF {
let analyzer = SourceAnalyzer::new(&db, Source::new(file_id.into(), &node), None);
if analyzed.insert(analyzer.analyzed_declaration()) {
infer_def(analyzer.inference_result(), analyzer.body_source_map());
}
let module = db.module_for_file(file_id);
let crate_def_map = db.crate_def_map(module.krate);
let mut defs: Vec<DefWithBodyId> = Vec::new();
visit_module(&db, &crate_def_map, module.module_id, &mut |it| defs.push(it));
defs.sort_by_key(|def| match def {
DefWithBodyId::FunctionId(it) => {
it.lookup(&db).ast_id.to_node(&db).syntax().text_range().start()
}
DefWithBodyId::ConstId(it) => {
it.lookup(&db).ast_id.to_node(&db).syntax().text_range().start()
}
DefWithBodyId::StaticId(it) => {
it.lookup(&db).ast_id.to_node(&db).syntax().text_range().start()
}
});
for def in defs {
let (_body, source_map) = db.body_with_source_map(def);
let infer = db.infer(def);
infer_def(infer, source_map);
}
acc.truncate(acc.trim_end().len());
acc
}
fn visit_module(
db: &TestDB,
crate_def_map: &CrateDefMap,
module_id: LocalModuleId,
cb: &mut dyn FnMut(DefWithBodyId),
) {
for decl in crate_def_map[module_id].scope.declarations() {
match decl {
ModuleDefId::FunctionId(it) => cb(it.into()),
ModuleDefId::ConstId(it) => cb(it.into()),
ModuleDefId::StaticId(it) => cb(it.into()),
ModuleDefId::TraitId(it) => {
let trait_data = db.trait_data(it);
for &(_, item) in trait_data.items.iter() {
match item {
AssocItemId::FunctionId(it) => cb(it.into()),
AssocItemId::ConstId(it) => cb(it.into()),
AssocItemId::TypeAliasId(_) => (),
}
}
}
ModuleDefId::ModuleId(it) => visit_module(db, crate_def_map, it.module_id, cb),
_ => (),
}
}
for &impl_id in crate_def_map[module_id].impls.iter() {
let impl_data = db.impl_data(impl_id);
for &item in impl_data.items.iter() {
match item {
AssocItemId::FunctionId(it) => cb(it.into()),
AssocItemId::ConstId(it) => cb(it.into()),
AssocItemId::TypeAliasId(_) => (),
}
}
}
}
fn ellipsize(mut text: String, max_len: usize) -> String {
if text.len() <= max_len {
return text;
@ -4783,10 +4841,12 @@ fn typing_whitespace_inside_a_function_should_not_invalidate_types() {
",
);
{
let file = db.parse(pos.file_id).ok().unwrap();
let node = file.syntax().token_at_offset(pos.offset).right_biased().unwrap().parent();
let events = db.log_executed(|| {
SourceAnalyzer::new(&db, Source::new(pos.file_id.into(), &node), None);
let module = db.module_for_file(pos.file_id);
let crate_def_map = db.crate_def_map(module.krate);
visit_module(&db, &crate_def_map, module.module_id, &mut |def| {
db.infer(def);
});
});
assert!(format!("{:?}", events).contains("infer"))
}
@ -4803,10 +4863,12 @@ fn typing_whitespace_inside_a_function_should_not_invalidate_types() {
db.query_mut(ra_db::FileTextQuery).set(pos.file_id, Arc::new(new_text));
{
let file = db.parse(pos.file_id).ok().unwrap();
let node = file.syntax().token_at_offset(pos.offset).right_biased().unwrap().parent();
let events = db.log_executed(|| {
SourceAnalyzer::new(&db, Source::new(pos.file_id.into(), &node), None);
let module = db.module_for_file(pos.file_id);
let crate_def_map = db.crate_def_map(module.krate);
visit_module(&db, &crate_def_map, module.module_id, &mut |def| {
db.infer(def);
});
});
assert!(!format!("{:?}", events).contains("infer"), "{:#?}", events)
}

View file

@ -11,8 +11,8 @@ use chalk_rust_ir::{AssociatedTyDatum, AssociatedTyValue, ImplDatum, StructDatum
use ra_db::CrateId;
use hir_def::{
lang_item::LangItemTarget, resolver::HasResolver, AssocItemId, AstItemDef, ContainerId,
GenericDefId, ImplId, Lookup, TraitId, TypeAliasId,
expr::Expr, lang_item::LangItemTarget, resolver::HasResolver, AssocItemId, AstItemDef,
ContainerId, GenericDefId, ImplId, Lookup, TraitId, TypeAliasId,
};
use hir_expand::name;
@ -21,8 +21,8 @@ use ra_db::salsa::{InternId, InternKey};
use super::{AssocTyValue, Canonical, ChalkContext, Impl, Obligation};
use crate::{
db::HirDatabase,
ty::display::HirDisplay,
ty::{ApplicationTy, GenericPredicate, ProjectionTy, Substs, TraitRef, Ty, TypeCtor, TypeWalk},
display::HirDisplay,
{ApplicationTy, GenericPredicate, ProjectionTy, Substs, TraitRef, Ty, TypeCtor, TypeWalk},
};
/// This represents a trait whose name we could not resolve.
@ -723,7 +723,7 @@ fn closure_fn_trait_impl_datum(
let _output = db.trait_data(fn_once_trait).associated_type_by_name(&name::OUTPUT_TYPE)?;
let num_args: u16 = match &db.body(data.def.into())[data.expr] {
crate::expr::Expr::Lambda { args, .. } => args.len() as u16,
Expr::Lambda { args, .. } => args.len() as u16,
_ => {
log::warn!("closure for closure type {:?} not found", data);
0
@ -823,7 +823,7 @@ fn closure_fn_trait_output_assoc_ty_value(
let impl_id = Impl::ClosureFnTraitImpl(data.clone()).to_chalk(db);
let num_args: u16 = match &db.body(data.def.into())[data.expr] {
crate::expr::Expr::Lambda { args, .. } => args.len() as u16,
Expr::Lambda { args, .. } => args.len() as u16,
_ => {
log::warn!("closure for closure type {:?} not found", data);
0
@ -869,38 +869,38 @@ fn id_to_chalk<T: InternKey>(salsa_id: T) -> chalk_ir::RawId {
chalk_ir::RawId { index: salsa_id.as_intern_id().as_u32() }
}
impl From<chalk_ir::StructId> for crate::ty::TypeCtorId {
impl From<chalk_ir::StructId> for crate::TypeCtorId {
fn from(struct_id: chalk_ir::StructId) -> Self {
id_from_chalk(struct_id.0)
}
}
impl From<crate::ty::TypeCtorId> for chalk_ir::StructId {
fn from(type_ctor_id: crate::ty::TypeCtorId) -> Self {
impl From<crate::TypeCtorId> for chalk_ir::StructId {
fn from(type_ctor_id: crate::TypeCtorId) -> Self {
chalk_ir::StructId(id_to_chalk(type_ctor_id))
}
}
impl From<chalk_ir::ImplId> for crate::ty::traits::GlobalImplId {
impl From<chalk_ir::ImplId> for crate::traits::GlobalImplId {
fn from(impl_id: chalk_ir::ImplId) -> Self {
id_from_chalk(impl_id.0)
}
}
impl From<crate::ty::traits::GlobalImplId> for chalk_ir::ImplId {
fn from(impl_id: crate::ty::traits::GlobalImplId) -> Self {
impl From<crate::traits::GlobalImplId> for chalk_ir::ImplId {
fn from(impl_id: crate::traits::GlobalImplId) -> Self {
chalk_ir::ImplId(id_to_chalk(impl_id))
}
}
impl From<chalk_rust_ir::AssociatedTyValueId> for crate::ty::traits::AssocTyValueId {
impl From<chalk_rust_ir::AssociatedTyValueId> for crate::traits::AssocTyValueId {
fn from(id: chalk_rust_ir::AssociatedTyValueId) -> Self {
id_from_chalk(id.0)
}
}
impl From<crate::ty::traits::AssocTyValueId> for chalk_rust_ir::AssociatedTyValueId {
fn from(assoc_ty_value_id: crate::ty::traits::AssocTyValueId) -> Self {
impl From<crate::traits::AssocTyValueId> for chalk_rust_ir::AssociatedTyValueId {
fn from(assoc_ty_value_id: crate::traits::AssocTyValueId) -> Self {
chalk_rust_ir::AssociatedTyValueId(id_to_chalk(assoc_ty_value_id))
}
}

View file

@ -73,3 +73,12 @@ pub(super) fn variant_data(db: &impl DefDatabase, var: VariantId) -> Arc<Variant
}
}
}
/// Helper for mutating `Arc<[T]>` (i.e. `Arc::make_mut` for Arc slices).
/// The underlying values are cloned if there are other strong references.
pub(crate) fn make_mut_slice<T: Clone>(a: &mut Arc<[T]>) -> &mut [T] {
if Arc::get_mut(a).is_none() {
*a = a.iter().cloned().collect();
}
Arc::get_mut(a).unwrap()
}

View file

@ -1,6 +1,6 @@
//! FIXME: write short doc here
use hir::{ApplicationTy, FromSource, ImplBlock, Ty, TypeCtor};
use hir::{FromSource, ImplBlock};
use ra_db::SourceDatabase;
use ra_syntax::{algo::find_node_at_offset, ast, AstNode};
@ -61,7 +61,7 @@ fn impls_for_def(
Some(
impls
.into_iter()
.filter(|impl_block| is_equal_for_find_impls(&ty, &impl_block.target_ty(db)))
.filter(|impl_block| ty.is_equal_for_find_impls(&impl_block.target_ty(db)))
.map(|imp| imp.to_nav(db))
.collect(),
)
@ -82,19 +82,6 @@ fn impls_for_trait(
Some(impls.into_iter().map(|imp| imp.to_nav(db)).collect())
}
fn is_equal_for_find_impls(original_ty: &Ty, impl_ty: &Ty) -> bool {
match (original_ty, impl_ty) {
(Ty::Apply(a_original_ty), Ty::Apply(ApplicationTy { ctor, parameters })) => match ctor {
TypeCtor::Ref(..) => match parameters.as_single() {
Ty::Apply(a_ty) => a_original_ty.ctor == a_ty.ctor,
_ => false,
},
_ => a_original_ty.ctor == *ctor,
},
_ => false,
}
}
#[cfg(test)]
mod tests {
use crate::mock_analysis::analysis_and_position;

View file

@ -83,6 +83,7 @@ fn no_docs_comments() {
"ra_syntax",
"ra_text_edit",
"ra_tt",
"ra_hir_ty",
];
let mut has_fixmes = whitelist.iter().map(|it| (*it, false)).collect::<HashMap<&str, bool>>();