This commit is contained in:
Aleksey Kladov 2019-11-27 12:02:54 +03:00
parent a306531e6a
commit 3a0929fca7
4 changed files with 24 additions and 11 deletions

View file

@ -2,7 +2,7 @@
use std::sync::Arc;
use hir_def::{GenericDefId, LocalStructFieldId, TraitId, VariantId};
use hir_def::{GenericDefId, ImplId, LocalStructFieldId, TraitId, VariantId};
use ra_arena::map::ArenaMap;
use ra_db::{salsa, CrateId};
@ -13,7 +13,7 @@ use crate::{
CallableDef, FnSig, GenericPredicate, InferenceResult, Substs, Ty, TyDefId, TypeCtor,
ValueTyDefId,
},
DefWithBody, ImplBlock,
DefWithBody,
};
pub use hir_def::db::{
@ -63,7 +63,7 @@ pub trait HirDatabase: DefDatabase {
fn impls_in_crate(&self, krate: CrateId) -> Arc<CrateImplBlocks>;
#[salsa::invoke(crate::ty::traits::impls_for_trait_query)]
fn impls_for_trait(&self, krate: CrateId, trait_: TraitId) -> Arc<[ImplBlock]>;
fn impls_for_trait(&self, krate: CrateId, trait_: TraitId) -> Arc<[ImplId]>;
/// This provides the Chalk trait solver instance. Because Chalk always
/// works from a specific crate, this query is keyed on the crate; and

View file

@ -4,13 +4,17 @@
//!
//! See: https://doc.rust-lang.org/nomicon/coercions.html
use hir_def::{lang_item::LangItemTarget, resolver::Resolver, AdtId};
use hir_def::{
lang_item::LangItemTarget,
resolver::{HasResolver, Resolver},
AdtId,
};
use rustc_hash::FxHashMap;
use test_utils::tested_by;
use crate::{
db::HirDatabase,
ty::{autoderef, Substs, Ty, TypeCtor, TypeWalk},
ty::{autoderef, Substs, TraitRef, Ty, TypeCtor, TypeWalk},
Mutability,
};
@ -57,9 +61,18 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
impls
.iter()
.filter_map(|impl_block| {
.filter_map(|&impl_id| {
let impl_data = db.impl_data(impl_id);
let resolver = impl_id.resolver(db);
let target_ty = Ty::from_hir(db, &resolver, &impl_data.target_type);
// `CoerseUnsized` has one generic parameter for the target type.
let trait_ref = impl_block.target_trait_ref(db)?;
let trait_ref = TraitRef::from_hir(
db,
&resolver,
impl_data.target_trait.as_ref()?,
Some(target_ty),
)?;
let cur_from_ty = trait_ref.substs.0.get(0)?;
let cur_to_ty = trait_ref.substs.0.get(1)?;

View file

@ -2,7 +2,7 @@
use std::sync::{Arc, Mutex};
use chalk_ir::{cast::Cast, family::ChalkIr};
use hir_def::{expr::ExprId, DefWithBodyId, TraitId, TypeAliasId};
use hir_def::{expr::ExprId, DefWithBodyId, ImplId, TraitId, TypeAliasId};
use log::debug;
use ra_db::{impl_intern_key, salsa, CrateId};
use ra_prof::profile;
@ -79,7 +79,7 @@ pub(crate) fn impls_for_trait_query(
db: &impl HirDatabase,
krate: CrateId,
trait_: TraitId,
) -> Arc<[ImplBlock]> {
) -> Arc<[ImplId]> {
let mut impls = FxHashSet::default();
// We call the query recursively here. On the one hand, this means we can
// reuse results from queries for different crates; on the other hand, this
@ -90,7 +90,7 @@ pub(crate) fn impls_for_trait_query(
impls.extend(db.impls_for_trait(dep.crate_id, trait_).iter());
}
let crate_impl_blocks = db.impls_in_crate(krate);
impls.extend(crate_impl_blocks.lookup_impl_blocks_for_trait(trait_).map(ImplBlock::from));
impls.extend(crate_impl_blocks.lookup_impl_blocks_for_trait(trait_));
impls.into_iter().collect()
}

View file

@ -453,7 +453,7 @@ where
.impls_for_trait(self.krate, trait_.into())
.iter()
.copied()
.map(Impl::ImplBlock)
.map(|it| Impl::ImplBlock(it.into()))
.map(|impl_| impl_.to_chalk(self.db))
.collect();