Use dyn Trait for working with databse

It improves compile time in `--release` mode quite a bit, it doesn't
really slow things down and, conceptually, it seems closer to what we
want the physical architecture to look like (we don't want to
monomorphise EVERYTHING in a single leaf crate).
This commit is contained in:
Aleksey Kladov 2020-03-13 16:05:46 +01:00
parent 648df02953
commit 9faea2364d
51 changed files with 813 additions and 794 deletions

View file

@ -32,6 +32,10 @@ macro_rules! impl_intern_key {
}; };
} }
pub trait Upcast<T: ?Sized> {
fn upcast(&self) -> &T;
}
pub trait CheckCanceled { pub trait CheckCanceled {
/// Aborts current query if there are pending changes. /// Aborts current query if there are pending changes.
/// ///

View file

@ -33,11 +33,7 @@ use ra_syntax::{
}; };
use rustc_hash::FxHashSet; use rustc_hash::FxHashSet;
use crate::{ use crate::{db::HirDatabase, has_source::HasSource, CallableDef, HirDisplay, InFile, Name};
db::{DefDatabase, HirDatabase},
has_source::HasSource,
CallableDef, HirDisplay, InFile, Name,
};
/// hir::Crate describes a single crate. It's the main interface with which /// hir::Crate describes a single crate. It's the main interface with which
/// a crate's dependencies interact. Mostly, it should be just a proxy for the /// a crate's dependencies interact. Mostly, it should be just a proxy for the
@ -54,7 +50,7 @@ pub struct CrateDependency {
} }
impl Crate { impl Crate {
pub fn dependencies(self, db: &impl DefDatabase) -> Vec<CrateDependency> { pub fn dependencies(self, db: &dyn HirDatabase) -> Vec<CrateDependency> {
db.crate_graph()[self.id] db.crate_graph()[self.id]
.dependencies .dependencies
.iter() .iter()
@ -67,7 +63,7 @@ impl Crate {
} }
// FIXME: add `transitive_reverse_dependencies`. // FIXME: add `transitive_reverse_dependencies`.
pub fn reverse_dependencies(self, db: &impl DefDatabase) -> Vec<Crate> { pub fn reverse_dependencies(self, db: &dyn HirDatabase) -> Vec<Crate> {
let crate_graph = db.crate_graph(); let crate_graph = db.crate_graph();
crate_graph crate_graph
.iter() .iter()
@ -78,20 +74,20 @@ impl Crate {
.collect() .collect()
} }
pub fn root_module(self, db: &impl DefDatabase) -> Option<Module> { pub fn root_module(self, db: &dyn HirDatabase) -> Option<Module> {
let module_id = db.crate_def_map(self.id).root; let module_id = db.crate_def_map(self.id).root;
Some(Module::new(self, module_id)) Some(Module::new(self, module_id))
} }
pub fn root_file(self, db: &impl DefDatabase) -> FileId { pub fn root_file(self, db: &dyn HirDatabase) -> FileId {
db.crate_graph()[self.id].root_file_id db.crate_graph()[self.id].root_file_id
} }
pub fn edition(self, db: &impl DefDatabase) -> Edition { pub fn edition(self, db: &dyn HirDatabase) -> Edition {
db.crate_graph()[self.id].edition db.crate_graph()[self.id].edition
} }
pub fn all(db: &impl DefDatabase) -> Vec<Crate> { pub fn all(db: &dyn HirDatabase) -> Vec<Crate> {
db.crate_graph().iter().map(|id| Crate { id }).collect() db.crate_graph().iter().map(|id| Crate { id }).collect()
} }
} }
@ -128,7 +124,7 @@ impl_froms!(
); );
impl ModuleDef { impl ModuleDef {
pub fn module(self, db: &impl HirDatabase) -> Option<Module> { pub fn module(self, db: &dyn HirDatabase) -> Option<Module> {
match self { match self {
ModuleDef::Module(it) => it.parent(db), ModuleDef::Module(it) => it.parent(db),
ModuleDef::Function(it) => Some(it.module(db)), ModuleDef::Function(it) => Some(it.module(db)),
@ -153,7 +149,7 @@ impl Module {
} }
/// Name of this module. /// Name of this module.
pub fn name(self, db: &impl DefDatabase) -> Option<Name> { pub fn name(self, db: &dyn HirDatabase) -> Option<Name> {
let def_map = db.crate_def_map(self.id.krate); let def_map = db.crate_def_map(self.id.krate);
let parent = def_map[self.id.local_id].parent?; let parent = def_map[self.id.local_id].parent?;
def_map[parent].children.iter().find_map(|(name, module_id)| { def_map[parent].children.iter().find_map(|(name, module_id)| {
@ -173,13 +169,13 @@ impl Module {
/// Topmost parent of this module. Every module has a `crate_root`, but some /// Topmost parent of this module. Every module has a `crate_root`, but some
/// might be missing `krate`. This can happen if a module's file is not included /// might be missing `krate`. This can happen if a module's file is not included
/// in the module tree of any target in `Cargo.toml`. /// in the module tree of any target in `Cargo.toml`.
pub fn crate_root(self, db: &impl DefDatabase) -> Module { pub fn crate_root(self, db: &dyn HirDatabase) -> Module {
let def_map = db.crate_def_map(self.id.krate); let def_map = db.crate_def_map(self.id.krate);
self.with_module_id(def_map.root) self.with_module_id(def_map.root)
} }
/// Iterates over all child modules. /// Iterates over all child modules.
pub fn children(self, db: &impl DefDatabase) -> impl Iterator<Item = Module> { pub fn children(self, db: &dyn HirDatabase) -> impl Iterator<Item = Module> {
let def_map = db.crate_def_map(self.id.krate); let def_map = db.crate_def_map(self.id.krate);
let children = def_map[self.id.local_id] let children = def_map[self.id.local_id]
.children .children
@ -190,13 +186,13 @@ impl Module {
} }
/// Finds a parent module. /// Finds a parent module.
pub fn parent(self, db: &impl DefDatabase) -> Option<Module> { pub fn parent(self, db: &dyn HirDatabase) -> Option<Module> {
let def_map = db.crate_def_map(self.id.krate); let def_map = db.crate_def_map(self.id.krate);
let parent_id = def_map[self.id.local_id].parent?; let parent_id = def_map[self.id.local_id].parent?;
Some(self.with_module_id(parent_id)) Some(self.with_module_id(parent_id))
} }
pub fn path_to_root(self, db: &impl HirDatabase) -> Vec<Module> { pub fn path_to_root(self, db: &dyn HirDatabase) -> Vec<Module> {
let mut res = vec![self]; let mut res = vec![self];
let mut curr = self; let mut curr = self;
while let Some(next) = curr.parent(db) { while let Some(next) = curr.parent(db) {
@ -209,7 +205,7 @@ impl Module {
/// Returns a `ModuleScope`: a set of items, visible in this module. /// Returns a `ModuleScope`: a set of items, visible in this module.
pub fn scope( pub fn scope(
self, self,
db: &impl HirDatabase, db: &dyn HirDatabase,
visible_from: Option<Module>, visible_from: Option<Module>,
) -> Vec<(Name, ScopeDef)> { ) -> Vec<(Name, ScopeDef)> {
db.crate_def_map(self.id.krate)[self.id.local_id] db.crate_def_map(self.id.krate)[self.id.local_id]
@ -217,7 +213,8 @@ impl Module {
.entries() .entries()
.filter_map(|(name, def)| { .filter_map(|(name, def)| {
if let Some(m) = visible_from { if let Some(m) = visible_from {
let filtered = def.filter_visibility(|vis| vis.is_visible_from(db, m.id)); let filtered =
def.filter_visibility(|vis| vis.is_visible_from(db.upcast(), m.id));
if filtered.is_none() && !def.is_none() { if filtered.is_none() && !def.is_none() {
None None
} else { } else {
@ -233,10 +230,10 @@ impl Module {
.collect() .collect()
} }
pub fn diagnostics(self, db: &impl HirDatabase, sink: &mut DiagnosticSink) { pub fn diagnostics(self, db: &dyn HirDatabase, sink: &mut DiagnosticSink) {
let _p = profile("Module::diagnostics"); let _p = profile("Module::diagnostics");
let crate_def_map = db.crate_def_map(self.id.krate); let crate_def_map = db.crate_def_map(self.id.krate);
crate_def_map.add_diagnostics(db, self.id.local_id, sink); crate_def_map.add_diagnostics(db.upcast(), self.id.local_id, sink);
for decl in self.declarations(db) { for decl in self.declarations(db) {
match decl { match decl {
crate::ModuleDef::Function(f) => f.diagnostics(db, sink), crate::ModuleDef::Function(f) => f.diagnostics(db, sink),
@ -259,12 +256,12 @@ impl Module {
} }
} }
pub fn declarations(self, db: &impl DefDatabase) -> Vec<ModuleDef> { pub fn declarations(self, db: &dyn HirDatabase) -> Vec<ModuleDef> {
let def_map = db.crate_def_map(self.id.krate); let def_map = db.crate_def_map(self.id.krate);
def_map[self.id.local_id].scope.declarations().map(ModuleDef::from).collect() def_map[self.id.local_id].scope.declarations().map(ModuleDef::from).collect()
} }
pub fn impl_defs(self, db: &impl DefDatabase) -> Vec<ImplDef> { pub fn impl_defs(self, db: &dyn HirDatabase) -> Vec<ImplDef> {
let def_map = db.crate_def_map(self.id.krate); let def_map = db.crate_def_map(self.id.krate);
def_map[self.id.local_id].scope.impls().map(ImplDef::from).collect() def_map[self.id.local_id].scope.impls().map(ImplDef::from).collect()
} }
@ -277,11 +274,11 @@ impl Module {
/// this module, if possible. /// this module, if possible.
pub fn find_use_path( pub fn find_use_path(
self, self,
db: &impl DefDatabase, db: &dyn HirDatabase,
item: ModuleDef, item: ModuleDef,
) -> Option<hir_def::path::ModPath> { ) -> Option<hir_def::path::ModPath> {
// FIXME expose namespace choice // FIXME expose namespace choice
hir_def::find_path::find_path(db, determine_item_namespace(item), self.into()) hir_def::find_path::find_path(db.upcast(), determine_item_namespace(item), self.into())
} }
} }
@ -307,7 +304,7 @@ pub enum FieldSource {
} }
impl StructField { impl StructField {
pub fn name(&self, db: &impl HirDatabase) -> Name { pub fn name(&self, db: &dyn HirDatabase) -> Name {
self.parent.variant_data(db).fields()[self.id].name.clone() self.parent.variant_data(db).fields()[self.id].name.clone()
} }
@ -315,7 +312,7 @@ impl StructField {
/// placeholder types for type parameters). This is good for showing /// placeholder types for type parameters). This is good for showing
/// signature help, but not so good to actually get the type of the field /// signature help, but not so good to actually get the type of the field
/// when you actually have a variable of the struct. /// when you actually have a variable of the struct.
pub fn signature_ty(&self, db: &impl HirDatabase) -> Type { pub fn signature_ty(&self, db: &dyn HirDatabase) -> Type {
let var_id = self.parent.into(); let var_id = self.parent.into();
let generic_def_id: GenericDefId = match self.parent { let generic_def_id: GenericDefId = match self.parent {
VariantDef::Struct(it) => it.id.into(), VariantDef::Struct(it) => it.id.into(),
@ -327,17 +324,17 @@ impl StructField {
Type::new(db, self.parent.module(db).id.krate, var_id, ty) Type::new(db, self.parent.module(db).id.krate, var_id, ty)
} }
pub fn parent_def(&self, _db: &impl HirDatabase) -> VariantDef { pub fn parent_def(&self, _db: &dyn HirDatabase) -> VariantDef {
self.parent self.parent
} }
} }
impl HasVisibility for StructField { impl HasVisibility for StructField {
fn visibility(&self, db: &impl HirDatabase) -> Visibility { fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
let variant_data = self.parent.variant_data(db); let variant_data = self.parent.variant_data(db);
let visibility = &variant_data.fields()[self.id].visibility; let visibility = &variant_data.fields()[self.id].visibility;
let parent_id: hir_def::VariantId = self.parent.into(); let parent_id: hir_def::VariantId = self.parent.into();
visibility.resolve(db, &parent_id.resolver(db)) visibility.resolve(db.upcast(), &parent_id.resolver(db.upcast()))
} }
} }
@ -347,19 +344,19 @@ pub struct Struct {
} }
impl Struct { impl Struct {
pub fn module(self, db: &impl DefDatabase) -> Module { pub fn module(self, db: &dyn HirDatabase) -> Module {
Module { id: self.id.lookup(db).container.module(db) } Module { id: self.id.lookup(db.upcast()).container.module(db.upcast()) }
} }
pub fn krate(self, db: &impl DefDatabase) -> Option<Crate> { pub fn krate(self, db: &dyn HirDatabase) -> Option<Crate> {
Some(self.module(db).krate()) Some(self.module(db).krate())
} }
pub fn name(self, db: &impl DefDatabase) -> Name { pub fn name(self, db: &dyn HirDatabase) -> Name {
db.struct_data(self.id).name.clone() db.struct_data(self.id).name.clone()
} }
pub fn fields(self, db: &impl HirDatabase) -> Vec<StructField> { pub fn fields(self, db: &dyn HirDatabase) -> Vec<StructField> {
db.struct_data(self.id) db.struct_data(self.id)
.variant_data .variant_data
.fields() .fields()
@ -368,11 +365,11 @@ impl Struct {
.collect() .collect()
} }
pub fn ty(self, db: &impl HirDatabase) -> Type { pub fn ty(self, db: &dyn HirDatabase) -> Type {
Type::from_def(db, self.id.lookup(db).container.module(db).krate, self.id) Type::from_def(db, self.id.lookup(db.upcast()).container.module(db.upcast()).krate, self.id)
} }
fn variant_data(self, db: &impl DefDatabase) -> Arc<VariantData> { fn variant_data(self, db: &dyn HirDatabase) -> Arc<VariantData> {
db.struct_data(self.id).variant_data.clone() db.struct_data(self.id).variant_data.clone()
} }
} }
@ -383,19 +380,19 @@ pub struct Union {
} }
impl Union { impl Union {
pub fn name(self, db: &impl DefDatabase) -> Name { pub fn name(self, db: &dyn HirDatabase) -> Name {
db.union_data(self.id).name.clone() db.union_data(self.id).name.clone()
} }
pub fn module(self, db: &impl DefDatabase) -> Module { pub fn module(self, db: &dyn HirDatabase) -> Module {
Module { id: self.id.lookup(db).container.module(db) } Module { id: self.id.lookup(db.upcast()).container.module(db.upcast()) }
} }
pub fn ty(self, db: &impl HirDatabase) -> Type { pub fn ty(self, db: &dyn HirDatabase) -> Type {
Type::from_def(db, self.id.lookup(db).container.module(db).krate, self.id) Type::from_def(db, self.id.lookup(db.upcast()).container.module(db.upcast()).krate, self.id)
} }
pub fn fields(self, db: &impl HirDatabase) -> Vec<StructField> { pub fn fields(self, db: &dyn HirDatabase) -> Vec<StructField> {
db.union_data(self.id) db.union_data(self.id)
.variant_data .variant_data
.fields() .fields()
@ -404,7 +401,7 @@ impl Union {
.collect() .collect()
} }
fn variant_data(self, db: &impl DefDatabase) -> Arc<VariantData> { fn variant_data(self, db: &dyn HirDatabase) -> Arc<VariantData> {
db.union_data(self.id).variant_data.clone() db.union_data(self.id).variant_data.clone()
} }
} }
@ -415,19 +412,19 @@ pub struct Enum {
} }
impl Enum { impl Enum {
pub fn module(self, db: &impl DefDatabase) -> Module { pub fn module(self, db: &dyn HirDatabase) -> Module {
Module { id: self.id.lookup(db).container.module(db) } Module { id: self.id.lookup(db.upcast()).container.module(db.upcast()) }
} }
pub fn krate(self, db: &impl DefDatabase) -> Option<Crate> { pub fn krate(self, db: &dyn HirDatabase) -> Option<Crate> {
Some(self.module(db).krate()) Some(self.module(db).krate())
} }
pub fn name(self, db: &impl DefDatabase) -> Name { pub fn name(self, db: &dyn HirDatabase) -> Name {
db.enum_data(self.id).name.clone() db.enum_data(self.id).name.clone()
} }
pub fn variants(self, db: &impl DefDatabase) -> Vec<EnumVariant> { pub fn variants(self, db: &dyn HirDatabase) -> Vec<EnumVariant> {
db.enum_data(self.id) db.enum_data(self.id)
.variants .variants
.iter() .iter()
@ -435,8 +432,8 @@ impl Enum {
.collect() .collect()
} }
pub fn ty(self, db: &impl HirDatabase) -> Type { pub fn ty(self, db: &dyn HirDatabase) -> Type {
Type::from_def(db, self.id.lookup(db).container.module(db).krate, self.id) Type::from_def(db, self.id.lookup(db.upcast()).container.module(db.upcast()).krate, self.id)
} }
} }
@ -447,18 +444,18 @@ pub struct EnumVariant {
} }
impl EnumVariant { impl EnumVariant {
pub fn module(self, db: &impl HirDatabase) -> Module { pub fn module(self, db: &dyn HirDatabase) -> Module {
self.parent.module(db) self.parent.module(db)
} }
pub fn parent_enum(self, _db: &impl DefDatabase) -> Enum { pub fn parent_enum(self, _db: &dyn HirDatabase) -> Enum {
self.parent self.parent
} }
pub fn name(self, db: &impl DefDatabase) -> Name { pub fn name(self, db: &dyn HirDatabase) -> Name {
db.enum_data(self.parent.id).variants[self.id].name.clone() db.enum_data(self.parent.id).variants[self.id].name.clone()
} }
pub fn fields(self, db: &impl HirDatabase) -> Vec<StructField> { pub fn fields(self, db: &dyn HirDatabase) -> Vec<StructField> {
self.variant_data(db) self.variant_data(db)
.fields() .fields()
.iter() .iter()
@ -466,11 +463,11 @@ impl EnumVariant {
.collect() .collect()
} }
pub fn kind(self, db: &impl HirDatabase) -> StructKind { pub fn kind(self, db: &dyn HirDatabase) -> StructKind {
self.variant_data(db).kind() self.variant_data(db).kind()
} }
pub(crate) fn variant_data(self, db: &impl DefDatabase) -> Arc<VariantData> { pub(crate) fn variant_data(self, db: &dyn HirDatabase) -> Arc<VariantData> {
db.enum_data(self.parent.id).variants[self.id].variant_data.clone() db.enum_data(self.parent.id).variants[self.id].variant_data.clone()
} }
} }
@ -485,7 +482,7 @@ pub enum Adt {
impl_froms!(Adt: Struct, Union, Enum); impl_froms!(Adt: Struct, Union, Enum);
impl Adt { impl Adt {
pub fn has_non_default_type_params(self, db: &impl HirDatabase) -> bool { pub fn has_non_default_type_params(self, db: &dyn HirDatabase) -> bool {
let subst = db.generic_defaults(self.into()); let subst = db.generic_defaults(self.into());
subst.iter().any(|ty| ty == &Ty::Unknown) subst.iter().any(|ty| ty == &Ty::Unknown)
} }
@ -493,12 +490,12 @@ impl Adt {
/// Turns this ADT into a type. Any type parameters of the ADT will be /// Turns this ADT into a type. Any type parameters of the ADT will be
/// turned into unknown types, which is good for e.g. finding the most /// turned into unknown types, which is good for e.g. finding the most
/// general set of completions, but will not look very nice when printed. /// general set of completions, but will not look very nice when printed.
pub fn ty(self, db: &impl HirDatabase) -> Type { pub fn ty(self, db: &dyn HirDatabase) -> Type {
let id = AdtId::from(self); let id = AdtId::from(self);
Type::from_def(db, id.module(db).krate, id) Type::from_def(db, id.module(db.upcast()).krate, id)
} }
pub fn module(self, db: &impl DefDatabase) -> Module { pub fn module(self, db: &dyn HirDatabase) -> Module {
match self { match self {
Adt::Struct(s) => s.module(db), Adt::Struct(s) => s.module(db),
Adt::Union(s) => s.module(db), Adt::Union(s) => s.module(db),
@ -506,11 +503,11 @@ impl Adt {
} }
} }
pub fn krate(self, db: &impl HirDatabase) -> Option<Crate> { pub fn krate(self, db: &dyn HirDatabase) -> Option<Crate> {
Some(self.module(db).krate()) Some(self.module(db).krate())
} }
pub fn name(&self, db: &impl HirDatabase) -> Name { pub fn name(&self, db: &dyn HirDatabase) -> Name {
match self { match self {
Adt::Struct(s) => s.name(db), Adt::Struct(s) => s.name(db),
Adt::Union(u) => u.name(db), Adt::Union(u) => u.name(db),
@ -528,7 +525,7 @@ pub enum VariantDef {
impl_froms!(VariantDef: Struct, Union, EnumVariant); impl_froms!(VariantDef: Struct, Union, EnumVariant);
impl VariantDef { impl VariantDef {
pub fn fields(self, db: &impl HirDatabase) -> Vec<StructField> { pub fn fields(self, db: &dyn HirDatabase) -> Vec<StructField> {
match self { match self {
VariantDef::Struct(it) => it.fields(db), VariantDef::Struct(it) => it.fields(db),
VariantDef::Union(it) => it.fields(db), VariantDef::Union(it) => it.fields(db),
@ -536,7 +533,7 @@ impl VariantDef {
} }
} }
pub fn module(self, db: &impl HirDatabase) -> Module { pub fn module(self, db: &dyn HirDatabase) -> Module {
match self { match self {
VariantDef::Struct(it) => it.module(db), VariantDef::Struct(it) => it.module(db),
VariantDef::Union(it) => it.module(db), VariantDef::Union(it) => it.module(db),
@ -544,7 +541,7 @@ impl VariantDef {
} }
} }
pub fn name(&self, db: &impl HirDatabase) -> Name { pub fn name(&self, db: &dyn HirDatabase) -> Name {
match self { match self {
VariantDef::Struct(s) => s.name(db), VariantDef::Struct(s) => s.name(db),
VariantDef::Union(u) => u.name(db), VariantDef::Union(u) => u.name(db),
@ -552,7 +549,7 @@ impl VariantDef {
} }
} }
pub(crate) fn variant_data(self, db: &impl DefDatabase) -> Arc<VariantData> { pub(crate) fn variant_data(self, db: &dyn HirDatabase) -> Arc<VariantData> {
match self { match self {
VariantDef::Struct(it) => it.variant_data(db), VariantDef::Struct(it) => it.variant_data(db),
VariantDef::Union(it) => it.variant_data(db), VariantDef::Union(it) => it.variant_data(db),
@ -572,7 +569,7 @@ pub enum DefWithBody {
impl_froms!(DefWithBody: Function, Const, Static); impl_froms!(DefWithBody: Function, Const, Static);
impl DefWithBody { impl DefWithBody {
pub fn module(self, db: &impl HirDatabase) -> Module { pub fn module(self, db: &dyn HirDatabase) -> Module {
match self { match self {
DefWithBody::Const(c) => c.module(db), DefWithBody::Const(c) => c.module(db),
DefWithBody::Function(f) => f.module(db), DefWithBody::Function(f) => f.module(db),
@ -580,7 +577,7 @@ impl DefWithBody {
} }
} }
pub fn name(self, db: &impl HirDatabase) -> Option<Name> { pub fn name(self, db: &dyn HirDatabase) -> Option<Name> {
match self { match self {
DefWithBody::Function(f) => Some(f.name(db)), DefWithBody::Function(f) => Some(f.name(db)),
DefWithBody::Static(s) => s.name(db), DefWithBody::Static(s) => s.name(db),
@ -595,27 +592,27 @@ pub struct Function {
} }
impl Function { impl Function {
pub fn module(self, db: &impl DefDatabase) -> Module { pub fn module(self, db: &dyn HirDatabase) -> Module {
self.id.lookup(db).module(db).into() self.id.lookup(db.upcast()).module(db.upcast()).into()
} }
pub fn krate(self, db: &impl DefDatabase) -> Option<Crate> { pub fn krate(self, db: &dyn HirDatabase) -> Option<Crate> {
Some(self.module(db).krate()) Some(self.module(db).krate())
} }
pub fn name(self, db: &impl HirDatabase) -> Name { pub fn name(self, db: &dyn HirDatabase) -> Name {
db.function_data(self.id).name.clone() db.function_data(self.id).name.clone()
} }
pub fn has_self_param(self, db: &impl HirDatabase) -> bool { pub fn has_self_param(self, db: &dyn HirDatabase) -> bool {
db.function_data(self.id).has_self_param db.function_data(self.id).has_self_param
} }
pub fn params(self, db: &impl HirDatabase) -> Vec<TypeRef> { pub fn params(self, db: &dyn HirDatabase) -> Vec<TypeRef> {
db.function_data(self.id).params.clone() db.function_data(self.id).params.clone()
} }
pub fn diagnostics(self, db: &impl HirDatabase, sink: &mut DiagnosticSink) { pub fn diagnostics(self, db: &dyn HirDatabase, sink: &mut DiagnosticSink) {
let _p = profile("Function::diagnostics"); let _p = profile("Function::diagnostics");
let infer = db.infer(self.id.into()); let infer = db.infer(self.id.into());
infer.add_diagnostics(db, self.id, sink); infer.add_diagnostics(db, self.id, sink);
@ -625,10 +622,10 @@ impl Function {
} }
impl HasVisibility for Function { impl HasVisibility for Function {
fn visibility(&self, db: &impl HirDatabase) -> Visibility { fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
let function_data = db.function_data(self.id); let function_data = db.function_data(self.id);
let visibility = &function_data.visibility; let visibility = &function_data.visibility;
visibility.resolve(db, &self.id.resolver(db)) visibility.resolve(db.upcast(), &self.id.resolver(db.upcast()))
} }
} }
@ -638,24 +635,24 @@ pub struct Const {
} }
impl Const { impl Const {
pub fn module(self, db: &impl DefDatabase) -> Module { pub fn module(self, db: &dyn HirDatabase) -> Module {
Module { id: self.id.lookup(db).module(db) } Module { id: self.id.lookup(db.upcast()).module(db.upcast()) }
} }
pub fn krate(self, db: &impl DefDatabase) -> Option<Crate> { pub fn krate(self, db: &dyn HirDatabase) -> Option<Crate> {
Some(self.module(db).krate()) Some(self.module(db).krate())
} }
pub fn name(self, db: &impl HirDatabase) -> Option<Name> { pub fn name(self, db: &dyn HirDatabase) -> Option<Name> {
db.const_data(self.id).name.clone() db.const_data(self.id).name.clone()
} }
} }
impl HasVisibility for Const { impl HasVisibility for Const {
fn visibility(&self, db: &impl HirDatabase) -> Visibility { fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
let function_data = db.const_data(self.id); let function_data = db.const_data(self.id);
let visibility = &function_data.visibility; let visibility = &function_data.visibility;
visibility.resolve(db, &self.id.resolver(db)) visibility.resolve(db.upcast(), &self.id.resolver(db.upcast()))
} }
} }
@ -665,15 +662,15 @@ pub struct Static {
} }
impl Static { impl Static {
pub fn module(self, db: &impl DefDatabase) -> Module { pub fn module(self, db: &dyn HirDatabase) -> Module {
Module { id: self.id.lookup(db).module(db) } Module { id: self.id.lookup(db.upcast()).module(db.upcast()) }
} }
pub fn krate(self, db: &impl DefDatabase) -> Option<Crate> { pub fn krate(self, db: &dyn HirDatabase) -> Option<Crate> {
Some(self.module(db).krate()) Some(self.module(db).krate())
} }
pub fn name(self, db: &impl HirDatabase) -> Option<Name> { pub fn name(self, db: &dyn HirDatabase) -> Option<Name> {
db.static_data(self.id).name.clone() db.static_data(self.id).name.clone()
} }
} }
@ -684,19 +681,19 @@ pub struct Trait {
} }
impl Trait { impl Trait {
pub fn module(self, db: &impl DefDatabase) -> Module { pub fn module(self, db: &dyn HirDatabase) -> Module {
Module { id: self.id.lookup(db).container.module(db) } Module { id: self.id.lookup(db.upcast()).container.module(db.upcast()) }
} }
pub fn name(self, db: &impl DefDatabase) -> Name { pub fn name(self, db: &dyn HirDatabase) -> Name {
db.trait_data(self.id).name.clone() db.trait_data(self.id).name.clone()
} }
pub fn items(self, db: &impl DefDatabase) -> Vec<AssocItem> { pub fn items(self, db: &dyn HirDatabase) -> Vec<AssocItem> {
db.trait_data(self.id).items.iter().map(|(_name, it)| (*it).into()).collect() db.trait_data(self.id).items.iter().map(|(_name, it)| (*it).into()).collect()
} }
pub fn is_auto(self, db: &impl DefDatabase) -> bool { pub fn is_auto(self, db: &dyn HirDatabase) -> bool {
db.trait_data(self.id).auto db.trait_data(self.id).auto
} }
} }
@ -707,37 +704,37 @@ pub struct TypeAlias {
} }
impl TypeAlias { impl TypeAlias {
pub fn has_non_default_type_params(self, db: &impl HirDatabase) -> bool { pub fn has_non_default_type_params(self, db: &dyn HirDatabase) -> bool {
let subst = db.generic_defaults(self.id.into()); let subst = db.generic_defaults(self.id.into());
subst.iter().any(|ty| ty == &Ty::Unknown) subst.iter().any(|ty| ty == &Ty::Unknown)
} }
pub fn module(self, db: &impl DefDatabase) -> Module { pub fn module(self, db: &dyn HirDatabase) -> Module {
Module { id: self.id.lookup(db).module(db) } Module { id: self.id.lookup(db.upcast()).module(db.upcast()) }
} }
pub fn krate(self, db: &impl DefDatabase) -> Option<Crate> { pub fn krate(self, db: &dyn HirDatabase) -> Option<Crate> {
Some(self.module(db).krate()) Some(self.module(db).krate())
} }
pub fn type_ref(self, db: &impl DefDatabase) -> Option<TypeRef> { pub fn type_ref(self, db: &dyn HirDatabase) -> Option<TypeRef> {
db.type_alias_data(self.id).type_ref.clone() db.type_alias_data(self.id).type_ref.clone()
} }
pub fn ty(self, db: &impl HirDatabase) -> Type { pub fn ty(self, db: &dyn HirDatabase) -> Type {
Type::from_def(db, self.id.lookup(db).module(db).krate, self.id) Type::from_def(db, self.id.lookup(db.upcast()).module(db.upcast()).krate, self.id)
} }
pub fn name(self, db: &impl DefDatabase) -> Name { pub fn name(self, db: &dyn HirDatabase) -> Name {
db.type_alias_data(self.id).name.clone() db.type_alias_data(self.id).name.clone()
} }
} }
impl HasVisibility for TypeAlias { impl HasVisibility for TypeAlias {
fn visibility(&self, db: &impl HirDatabase) -> Visibility { fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
let function_data = db.type_alias_data(self.id); let function_data = db.type_alias_data(self.id);
let visibility = &function_data.visibility; let visibility = &function_data.visibility;
visibility.resolve(db, &self.id.resolver(db)) visibility.resolve(db.upcast(), &self.id.resolver(db.upcast()))
} }
} }
@ -750,14 +747,14 @@ impl MacroDef {
/// FIXME: right now, this just returns the root module of the crate that /// FIXME: right now, this just returns the root module of the crate that
/// defines this macro. The reasons for this is that macros are expanded /// defines this macro. The reasons for this is that macros are expanded
/// early, in `ra_hir_expand`, where modules simply do not exist yet. /// early, in `ra_hir_expand`, where modules simply do not exist yet.
pub fn module(self, db: &impl HirDatabase) -> Option<Module> { pub fn module(self, db: &dyn HirDatabase) -> Option<Module> {
let krate = self.id.krate?; let krate = self.id.krate?;
let module_id = db.crate_def_map(krate).root; let module_id = db.crate_def_map(krate).root;
Some(Module::new(Crate { id: krate }, module_id)) Some(Module::new(Crate { id: krate }, module_id))
} }
/// XXX: this parses the file /// XXX: this parses the file
pub fn name(self, db: &impl HirDatabase) -> Option<Name> { pub fn name(self, db: &dyn HirDatabase) -> Option<Name> {
self.source(db).value.name().map(|it| it.as_name()) self.source(db).value.name().map(|it| it.as_name())
} }
} }
@ -775,50 +772,50 @@ pub enum AssocItemContainer {
ImplDef(ImplDef), ImplDef(ImplDef),
} }
pub trait AsAssocItem { pub trait AsAssocItem {
fn as_assoc_item(self, db: &impl DefDatabase) -> Option<AssocItem>; fn as_assoc_item(self, db: &dyn HirDatabase) -> Option<AssocItem>;
} }
impl AsAssocItem for Function { impl AsAssocItem for Function {
fn as_assoc_item(self, db: &impl DefDatabase) -> Option<AssocItem> { fn as_assoc_item(self, db: &dyn HirDatabase) -> Option<AssocItem> {
as_assoc_item(db, AssocItem::Function, self.id) as_assoc_item(db, AssocItem::Function, self.id)
} }
} }
impl AsAssocItem for Const { impl AsAssocItem for Const {
fn as_assoc_item(self, db: &impl DefDatabase) -> Option<AssocItem> { fn as_assoc_item(self, db: &dyn HirDatabase) -> Option<AssocItem> {
as_assoc_item(db, AssocItem::Const, self.id) as_assoc_item(db, AssocItem::Const, self.id)
} }
} }
impl AsAssocItem for TypeAlias { impl AsAssocItem for TypeAlias {
fn as_assoc_item(self, db: &impl DefDatabase) -> Option<AssocItem> { fn as_assoc_item(self, db: &dyn HirDatabase) -> Option<AssocItem> {
as_assoc_item(db, AssocItem::TypeAlias, self.id) as_assoc_item(db, AssocItem::TypeAlias, self.id)
} }
} }
fn as_assoc_item<ID, DEF, CTOR, AST>(db: &impl DefDatabase, ctor: CTOR, id: ID) -> Option<AssocItem> fn as_assoc_item<ID, DEF, CTOR, AST>(db: &dyn HirDatabase, ctor: CTOR, id: ID) -> Option<AssocItem>
where where
ID: Lookup<Data = AssocItemLoc<AST>>, ID: Lookup<Data = AssocItemLoc<AST>>,
DEF: From<ID>, DEF: From<ID>,
CTOR: FnOnce(DEF) -> AssocItem, CTOR: FnOnce(DEF) -> AssocItem,
AST: AstNode, AST: AstNode,
{ {
match id.lookup(db).container { match id.lookup(db.upcast()).container {
AssocContainerId::TraitId(_) | AssocContainerId::ImplId(_) => Some(ctor(DEF::from(id))), AssocContainerId::TraitId(_) | AssocContainerId::ImplId(_) => Some(ctor(DEF::from(id))),
AssocContainerId::ContainerId(_) => None, AssocContainerId::ContainerId(_) => None,
} }
} }
impl AssocItem { impl AssocItem {
pub fn module(self, db: &impl DefDatabase) -> Module { pub fn module(self, db: &dyn HirDatabase) -> Module {
match self { match self {
AssocItem::Function(f) => f.module(db), AssocItem::Function(f) => f.module(db),
AssocItem::Const(c) => c.module(db), AssocItem::Const(c) => c.module(db),
AssocItem::TypeAlias(t) => t.module(db), AssocItem::TypeAlias(t) => t.module(db),
} }
} }
pub fn container(self, db: &impl DefDatabase) -> AssocItemContainer { pub fn container(self, db: &dyn HirDatabase) -> AssocItemContainer {
let container = match self { let container = match self {
AssocItem::Function(it) => it.id.lookup(db).container, AssocItem::Function(it) => it.id.lookup(db.upcast()).container,
AssocItem::Const(it) => it.id.lookup(db).container, AssocItem::Const(it) => it.id.lookup(db.upcast()).container,
AssocItem::TypeAlias(it) => it.id.lookup(db).container, AssocItem::TypeAlias(it) => it.id.lookup(db.upcast()).container,
}; };
match container { match container {
AssocContainerId::TraitId(id) => AssocItemContainer::Trait(id.into()), AssocContainerId::TraitId(id) => AssocItemContainer::Trait(id.into()),
@ -829,7 +826,7 @@ impl AssocItem {
} }
impl HasVisibility for AssocItem { impl HasVisibility for AssocItem {
fn visibility(&self, db: &impl HirDatabase) -> Visibility { fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
match self { match self {
AssocItem::Function(f) => f.visibility(db), AssocItem::Function(f) => f.visibility(db),
AssocItem::Const(c) => c.visibility(db), AssocItem::Const(c) => c.visibility(db),
@ -862,7 +859,7 @@ impl_froms!(
); );
impl GenericDef { impl GenericDef {
pub fn params(self, db: &impl HirDatabase) -> Vec<TypeParam> { pub fn params(self, db: &dyn HirDatabase) -> Vec<TypeParam> {
let generics: Arc<hir_def::generics::GenericParams> = db.generic_params(self.into()); let generics: Arc<hir_def::generics::GenericParams> = db.generic_params(self.into());
generics generics
.types .types
@ -880,7 +877,7 @@ pub struct Local {
impl Local { impl Local {
// FIXME: why is this an option? It shouldn't be? // FIXME: why is this an option? It shouldn't be?
pub fn name(self, db: &impl HirDatabase) -> Option<Name> { pub fn name(self, db: &dyn HirDatabase) -> Option<Name> {
let body = db.body(self.parent.into()); let body = db.body(self.parent.into());
match &body[self.pat_id] { match &body[self.pat_id] {
Pat::Bind { name, .. } => Some(name.clone()), Pat::Bind { name, .. } => Some(name.clone()),
@ -888,11 +885,11 @@ impl Local {
} }
} }
pub fn is_self(self, db: &impl HirDatabase) -> bool { pub fn is_self(self, db: &dyn HirDatabase) -> bool {
self.name(db) == Some(name![self]) self.name(db) == Some(name![self])
} }
pub fn is_mut(self, db: &impl HirDatabase) -> bool { pub fn is_mut(self, db: &dyn HirDatabase) -> bool {
let body = db.body(self.parent.into()); let body = db.body(self.parent.into());
match &body[self.pat_id] { match &body[self.pat_id] {
Pat::Bind { mode, .. } => match mode { Pat::Bind { mode, .. } => match mode {
@ -903,28 +900,28 @@ impl Local {
} }
} }
pub fn parent(self, _db: &impl HirDatabase) -> DefWithBody { pub fn parent(self, _db: &dyn HirDatabase) -> DefWithBody {
self.parent.into() self.parent.into()
} }
pub fn module(self, db: &impl HirDatabase) -> Module { pub fn module(self, db: &dyn HirDatabase) -> Module {
self.parent(db).module(db) self.parent(db).module(db)
} }
pub fn ty(self, db: &impl HirDatabase) -> Type { pub fn ty(self, db: &dyn HirDatabase) -> Type {
let def = DefWithBodyId::from(self.parent); let def = DefWithBodyId::from(self.parent);
let infer = db.infer(def); let infer = db.infer(def);
let ty = infer[self.pat_id].clone(); let ty = infer[self.pat_id].clone();
let resolver = def.resolver(db); let resolver = def.resolver(db.upcast());
let krate = def.module(db).krate; let krate = def.module(db.upcast()).krate;
let environment = TraitEnvironment::lower(db, &resolver); let environment = TraitEnvironment::lower(db, &resolver);
Type { krate, ty: InEnvironment { value: ty, environment } } Type { krate, ty: InEnvironment { value: ty, environment } }
} }
pub fn source(self, db: &impl HirDatabase) -> InFile<Either<ast::BindPat, ast::SelfParam>> { pub fn source(self, db: &dyn HirDatabase) -> InFile<Either<ast::BindPat, ast::SelfParam>> {
let (_body, source_map) = db.body_with_source_map(self.parent.into()); let (_body, source_map) = db.body_with_source_map(self.parent.into());
let src = source_map.pat_syntax(self.pat_id).unwrap(); // Hmm... let src = source_map.pat_syntax(self.pat_id).unwrap(); // Hmm...
let root = src.file_syntax(db); let root = src.file_syntax(db.upcast());
src.map(|ast| { src.map(|ast| {
ast.map_left(|it| it.cast().unwrap().to_node(&root)).map_right(|it| it.to_node(&root)) ast.map_left(|it| it.cast().unwrap().to_node(&root)).map_right(|it| it.to_node(&root))
}) })
@ -937,13 +934,13 @@ pub struct TypeParam {
} }
impl TypeParam { impl TypeParam {
pub fn name(self, db: &impl HirDatabase) -> Name { pub fn name(self, db: &dyn HirDatabase) -> Name {
let params = db.generic_params(self.id.parent); let params = db.generic_params(self.id.parent);
params.types[self.id.local_id].name.clone().unwrap_or_else(Name::missing) params.types[self.id.local_id].name.clone().unwrap_or_else(Name::missing)
} }
pub fn module(self, db: &impl HirDatabase) -> Module { pub fn module(self, db: &dyn HirDatabase) -> Module {
self.id.parent.module(db).into() self.id.parent.module(db.upcast()).into()
} }
} }
@ -954,55 +951,55 @@ pub struct ImplDef {
} }
impl ImplDef { impl ImplDef {
pub fn all_in_crate(db: &impl HirDatabase, krate: Crate) -> Vec<ImplDef> { pub fn all_in_crate(db: &dyn HirDatabase, krate: Crate) -> Vec<ImplDef> {
let impls = db.impls_in_crate(krate.id); let impls = db.impls_in_crate(krate.id);
impls.all_impls().map(Self::from).collect() impls.all_impls().map(Self::from).collect()
} }
pub fn for_trait(db: &impl HirDatabase, krate: Crate, trait_: Trait) -> Vec<ImplDef> { pub fn for_trait(db: &dyn HirDatabase, krate: Crate, trait_: Trait) -> Vec<ImplDef> {
let impls = db.impls_in_crate(krate.id); let impls = db.impls_in_crate(krate.id);
impls.lookup_impl_defs_for_trait(trait_.id).map(Self::from).collect() impls.lookup_impl_defs_for_trait(trait_.id).map(Self::from).collect()
} }
pub fn target_trait(&self, db: &impl DefDatabase) -> Option<TypeRef> { pub fn target_trait(&self, db: &dyn HirDatabase) -> Option<TypeRef> {
db.impl_data(self.id).target_trait.clone() db.impl_data(self.id).target_trait.clone()
} }
pub fn target_type(&self, db: &impl DefDatabase) -> TypeRef { pub fn target_type(&self, db: &dyn HirDatabase) -> TypeRef {
db.impl_data(self.id).target_type.clone() db.impl_data(self.id).target_type.clone()
} }
pub fn target_ty(&self, db: &impl HirDatabase) -> Type { pub fn target_ty(&self, db: &dyn HirDatabase) -> Type {
let impl_data = db.impl_data(self.id); let impl_data = db.impl_data(self.id);
let resolver = self.id.resolver(db); let resolver = self.id.resolver(db.upcast());
let ctx = hir_ty::TyLoweringContext::new(db, &resolver); let ctx = hir_ty::TyLoweringContext::new(db, &resolver);
let environment = TraitEnvironment::lower(db, &resolver); let environment = TraitEnvironment::lower(db, &resolver);
let ty = Ty::from_hir(&ctx, &impl_data.target_type); let ty = Ty::from_hir(&ctx, &impl_data.target_type);
Type { Type {
krate: self.id.lookup(db).container.module(db).krate, krate: self.id.lookup(db.upcast()).container.module(db.upcast()).krate,
ty: InEnvironment { value: ty, environment }, ty: InEnvironment { value: ty, environment },
} }
} }
pub fn items(&self, db: &impl DefDatabase) -> Vec<AssocItem> { pub fn items(&self, db: &dyn HirDatabase) -> Vec<AssocItem> {
db.impl_data(self.id).items.iter().map(|it| (*it).into()).collect() db.impl_data(self.id).items.iter().map(|it| (*it).into()).collect()
} }
pub fn is_negative(&self, db: &impl DefDatabase) -> bool { pub fn is_negative(&self, db: &dyn HirDatabase) -> bool {
db.impl_data(self.id).is_negative db.impl_data(self.id).is_negative
} }
pub fn module(&self, db: &impl DefDatabase) -> Module { pub fn module(&self, db: &dyn HirDatabase) -> Module {
self.id.lookup(db).container.module(db).into() self.id.lookup(db.upcast()).container.module(db.upcast()).into()
} }
pub fn krate(&self, db: &impl DefDatabase) -> Crate { pub fn krate(&self, db: &dyn HirDatabase) -> Crate {
Crate { id: self.module(db).id.krate } Crate { id: self.module(db).id.krate }
} }
pub fn is_builtin_derive(&self, db: &impl DefDatabase) -> Option<InFile<ast::Attr>> { pub fn is_builtin_derive(&self, db: &dyn HirDatabase) -> Option<InFile<ast::Attr>> {
let src = self.source(db); let src = self.source(db);
let item = src.file_id.is_builtin_derive(db)?; let item = src.file_id.is_builtin_derive(db.upcast())?;
let hygenic = hir_expand::hygiene::Hygiene::new(db, item.file_id); let hygenic = hir_expand::hygiene::Hygiene::new(db.upcast(), item.file_id);
let attr = item let attr = item
.value .value
@ -1028,14 +1025,14 @@ pub struct Type {
} }
impl Type { impl Type {
fn new(db: &impl HirDatabase, krate: CrateId, lexical_env: impl HasResolver, ty: Ty) -> Type { fn new(db: &dyn HirDatabase, krate: CrateId, lexical_env: impl HasResolver, ty: Ty) -> Type {
let resolver = lexical_env.resolver(db); let resolver = lexical_env.resolver(db.upcast());
let environment = TraitEnvironment::lower(db, &resolver); let environment = TraitEnvironment::lower(db, &resolver);
Type { krate, ty: InEnvironment { value: ty, environment } } Type { krate, ty: InEnvironment { value: ty, environment } }
} }
fn from_def( fn from_def(
db: &impl HirDatabase, db: &dyn HirDatabase,
krate: CrateId, krate: CrateId,
def: impl HasResolver + Into<TyDefId> + Into<GenericDefId>, def: impl HasResolver + Into<TyDefId> + Into<GenericDefId>,
) -> Type { ) -> Type {
@ -1073,7 +1070,7 @@ impl Type {
/// Checks that particular type `ty` implements `std::future::Future`. /// Checks that particular type `ty` implements `std::future::Future`.
/// This function is used in `.await` syntax completion. /// This function is used in `.await` syntax completion.
pub fn impls_future(&self, db: &impl HirDatabase) -> bool { pub fn impls_future(&self, db: &dyn HirDatabase) -> bool {
let krate = self.krate; let krate = self.krate;
let std_future_trait = let std_future_trait =
@ -1110,7 +1107,7 @@ impl Type {
} }
} }
pub fn fields(&self, db: &impl HirDatabase) -> Vec<(StructField, Type)> { pub fn fields(&self, db: &dyn HirDatabase) -> Vec<(StructField, Type)> {
if let Ty::Apply(a_ty) = &self.ty.value { if let Ty::Apply(a_ty) = &self.ty.value {
if let TypeCtor::Adt(AdtId::StructId(s)) = a_ty.ctor { if let TypeCtor::Adt(AdtId::StructId(s)) = a_ty.ctor {
let var_def = s.into(); let var_def = s.into();
@ -1128,7 +1125,7 @@ impl Type {
Vec::new() Vec::new()
} }
pub fn tuple_fields(&self, _db: &impl HirDatabase) -> Vec<Type> { pub fn tuple_fields(&self, _db: &dyn HirDatabase) -> Vec<Type> {
let mut res = Vec::new(); let mut res = Vec::new();
if let Ty::Apply(a_ty) = &self.ty.value { if let Ty::Apply(a_ty) = &self.ty.value {
if let TypeCtor::Tuple { .. } = a_ty.ctor { if let TypeCtor::Tuple { .. } = a_ty.ctor {
@ -1143,7 +1140,7 @@ impl Type {
pub fn variant_fields( pub fn variant_fields(
&self, &self,
db: &impl HirDatabase, db: &dyn HirDatabase,
def: VariantDef, def: VariantDef,
) -> Vec<(StructField, Type)> { ) -> Vec<(StructField, Type)> {
// FIXME: check that ty and def match // FIXME: check that ty and def match
@ -1162,7 +1159,7 @@ impl Type {
} }
} }
pub fn autoderef<'a>(&'a self, db: &'a impl HirDatabase) -> impl Iterator<Item = Type> + 'a { pub fn autoderef<'a>(&'a self, db: &'a dyn HirDatabase) -> impl Iterator<Item = Type> + 'a {
// There should be no inference vars in types passed here // There should be no inference vars in types passed here
// FIXME check that? // FIXME check that?
let canonical = Canonical { value: self.ty.value.clone(), num_vars: 0 }; let canonical = Canonical { value: self.ty.value.clone(), num_vars: 0 };
@ -1177,7 +1174,7 @@ impl Type {
// lifetime problems, because we need to borrow temp `CrateImplDefs`. // lifetime problems, because we need to borrow temp `CrateImplDefs`.
pub fn iterate_impl_items<T>( pub fn iterate_impl_items<T>(
self, self,
db: &impl HirDatabase, db: &dyn HirDatabase,
krate: Crate, krate: Crate,
mut callback: impl FnMut(AssocItem) -> Option<T>, mut callback: impl FnMut(AssocItem) -> Option<T>,
) -> Option<T> { ) -> Option<T> {
@ -1197,7 +1194,7 @@ impl Type {
pub fn iterate_method_candidates<T>( pub fn iterate_method_candidates<T>(
&self, &self,
db: &impl HirDatabase, db: &dyn HirDatabase,
krate: Crate, krate: Crate,
traits_in_scope: &FxHashSet<TraitId>, traits_in_scope: &FxHashSet<TraitId>,
name: Option<&Name>, name: Option<&Name>,
@ -1228,7 +1225,7 @@ impl Type {
pub fn iterate_path_candidates<T>( pub fn iterate_path_candidates<T>(
&self, &self,
db: &impl HirDatabase, db: &dyn HirDatabase,
krate: Crate, krate: Crate,
traits_in_scope: &FxHashSet<TraitId>, traits_in_scope: &FxHashSet<TraitId>,
name: Option<&Name>, name: Option<&Name>,
@ -1283,7 +1280,7 @@ impl Type {
} }
impl HirDisplay for Type { impl HirDisplay for Type {
fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> std::fmt::Result { fn hir_fmt(&self, f: &mut HirFormatter) -> std::fmt::Result {
self.ty.value.hir_fmt(f) self.ty.value.hir_fmt(f)
} }
} }
@ -1360,30 +1357,30 @@ impl_froms!(
); );
pub trait HasAttrs { pub trait HasAttrs {
fn attrs(self, db: &impl DefDatabase) -> Attrs; fn attrs(self, db: &dyn HirDatabase) -> Attrs;
} }
impl<T: Into<AttrDef>> HasAttrs for T { impl<T: Into<AttrDef>> HasAttrs for T {
fn attrs(self, db: &impl DefDatabase) -> Attrs { fn attrs(self, db: &dyn HirDatabase) -> Attrs {
let def: AttrDef = self.into(); let def: AttrDef = self.into();
db.attrs(def.into()) db.attrs(def.into())
} }
} }
pub trait Docs { pub trait Docs {
fn docs(&self, db: &impl HirDatabase) -> Option<Documentation>; fn docs(&self, db: &dyn HirDatabase) -> Option<Documentation>;
} }
impl<T: Into<AttrDef> + Copy> Docs for T { impl<T: Into<AttrDef> + Copy> Docs for T {
fn docs(&self, db: &impl HirDatabase) -> Option<Documentation> { fn docs(&self, db: &dyn HirDatabase) -> Option<Documentation> {
let def: AttrDef = (*self).into(); let def: AttrDef = (*self).into();
db.documentation(def.into()) db.documentation(def.into())
} }
} }
pub trait HasVisibility { pub trait HasVisibility {
fn visibility(&self, db: &impl HirDatabase) -> Visibility; fn visibility(&self, db: &dyn HirDatabase) -> Visibility;
fn is_visible_from(&self, db: &impl HirDatabase, module: Module) -> bool { fn is_visible_from(&self, db: &dyn HirDatabase, module: Module) -> bool {
let vis = self.visibility(db); let vis = self.visibility(db);
vis.is_visible_from(db, module.id) vis.is_visible_from(db.upcast(), module.id)
} }
} }

View file

@ -9,7 +9,7 @@ use hir_def::{
use ra_syntax::ast; use ra_syntax::ast;
use crate::{ use crate::{
db::DefDatabase, Const, Enum, EnumVariant, FieldSource, Function, ImplDef, MacroDef, Module, db::HirDatabase, Const, Enum, EnumVariant, FieldSource, Function, ImplDef, MacroDef, Module,
Static, Struct, StructField, Trait, TypeAlias, TypeParam, Union, Static, Struct, StructField, Trait, TypeAlias, TypeParam, Union,
}; };
@ -17,31 +17,31 @@ pub use hir_expand::InFile;
pub trait HasSource { pub trait HasSource {
type Ast; type Ast;
fn source(self, db: &impl DefDatabase) -> InFile<Self::Ast>; fn source(self, db: &dyn HirDatabase) -> InFile<Self::Ast>;
} }
/// NB: Module is !HasSource, because it has two source nodes at the same time: /// NB: Module is !HasSource, because it has two source nodes at the same time:
/// definition and declaration. /// definition and declaration.
impl Module { impl Module {
/// Returns a node which defines this module. That is, a file or a `mod foo {}` with items. /// Returns a node which defines this module. That is, a file or a `mod foo {}` with items.
pub fn definition_source(self, db: &impl DefDatabase) -> InFile<ModuleSource> { pub fn definition_source(self, db: &dyn HirDatabase) -> InFile<ModuleSource> {
let def_map = db.crate_def_map(self.id.krate); let def_map = db.crate_def_map(self.id.krate);
def_map[self.id.local_id].definition_source(db) def_map[self.id.local_id].definition_source(db.upcast())
} }
/// Returns a node which declares this module, either a `mod foo;` or a `mod foo {}`. /// Returns a node which declares this module, either a `mod foo;` or a `mod foo {}`.
/// `None` for the crate root. /// `None` for the crate root.
pub fn declaration_source(self, db: &impl DefDatabase) -> Option<InFile<ast::Module>> { pub fn declaration_source(self, db: &dyn HirDatabase) -> Option<InFile<ast::Module>> {
let def_map = db.crate_def_map(self.id.krate); let def_map = db.crate_def_map(self.id.krate);
def_map[self.id.local_id].declaration_source(db) def_map[self.id.local_id].declaration_source(db.upcast())
} }
} }
impl HasSource for StructField { impl HasSource for StructField {
type Ast = FieldSource; type Ast = FieldSource;
fn source(self, db: &impl DefDatabase) -> InFile<FieldSource> { fn source(self, db: &dyn HirDatabase) -> InFile<FieldSource> {
let var = VariantId::from(self.parent); let var = VariantId::from(self.parent);
let src = var.child_source(db); let src = var.child_source(db.upcast());
src.map(|it| match it[self.id].clone() { src.map(|it| match it[self.id].clone() {
Either::Left(it) => FieldSource::Pos(it), Either::Left(it) => FieldSource::Pos(it),
Either::Right(it) => FieldSource::Named(it), Either::Right(it) => FieldSource::Named(it),
@ -50,78 +50,78 @@ impl HasSource for StructField {
} }
impl HasSource for Struct { impl HasSource for Struct {
type Ast = ast::StructDef; type Ast = ast::StructDef;
fn source(self, db: &impl DefDatabase) -> InFile<ast::StructDef> { fn source(self, db: &dyn HirDatabase) -> InFile<ast::StructDef> {
self.id.lookup(db).source(db) self.id.lookup(db.upcast()).source(db.upcast())
} }
} }
impl HasSource for Union { impl HasSource for Union {
type Ast = ast::UnionDef; type Ast = ast::UnionDef;
fn source(self, db: &impl DefDatabase) -> InFile<ast::UnionDef> { fn source(self, db: &dyn HirDatabase) -> InFile<ast::UnionDef> {
self.id.lookup(db).source(db) self.id.lookup(db.upcast()).source(db.upcast())
} }
} }
impl HasSource for Enum { impl HasSource for Enum {
type Ast = ast::EnumDef; type Ast = ast::EnumDef;
fn source(self, db: &impl DefDatabase) -> InFile<ast::EnumDef> { fn source(self, db: &dyn HirDatabase) -> InFile<ast::EnumDef> {
self.id.lookup(db).source(db) self.id.lookup(db.upcast()).source(db.upcast())
} }
} }
impl HasSource for EnumVariant { impl HasSource for EnumVariant {
type Ast = ast::EnumVariant; type Ast = ast::EnumVariant;
fn source(self, db: &impl DefDatabase) -> InFile<ast::EnumVariant> { fn source(self, db: &dyn HirDatabase) -> InFile<ast::EnumVariant> {
self.parent.id.child_source(db).map(|map| map[self.id].clone()) self.parent.id.child_source(db.upcast()).map(|map| map[self.id].clone())
} }
} }
impl HasSource for Function { impl HasSource for Function {
type Ast = ast::FnDef; type Ast = ast::FnDef;
fn source(self, db: &impl DefDatabase) -> InFile<ast::FnDef> { fn source(self, db: &dyn HirDatabase) -> InFile<ast::FnDef> {
self.id.lookup(db).source(db) self.id.lookup(db.upcast()).source(db.upcast())
} }
} }
impl HasSource for Const { impl HasSource for Const {
type Ast = ast::ConstDef; type Ast = ast::ConstDef;
fn source(self, db: &impl DefDatabase) -> InFile<ast::ConstDef> { fn source(self, db: &dyn HirDatabase) -> InFile<ast::ConstDef> {
self.id.lookup(db).source(db) self.id.lookup(db.upcast()).source(db.upcast())
} }
} }
impl HasSource for Static { impl HasSource for Static {
type Ast = ast::StaticDef; type Ast = ast::StaticDef;
fn source(self, db: &impl DefDatabase) -> InFile<ast::StaticDef> { fn source(self, db: &dyn HirDatabase) -> InFile<ast::StaticDef> {
self.id.lookup(db).source(db) self.id.lookup(db.upcast()).source(db.upcast())
} }
} }
impl HasSource for Trait { impl HasSource for Trait {
type Ast = ast::TraitDef; type Ast = ast::TraitDef;
fn source(self, db: &impl DefDatabase) -> InFile<ast::TraitDef> { fn source(self, db: &dyn HirDatabase) -> InFile<ast::TraitDef> {
self.id.lookup(db).source(db) self.id.lookup(db.upcast()).source(db.upcast())
} }
} }
impl HasSource for TypeAlias { impl HasSource for TypeAlias {
type Ast = ast::TypeAliasDef; type Ast = ast::TypeAliasDef;
fn source(self, db: &impl DefDatabase) -> InFile<ast::TypeAliasDef> { fn source(self, db: &dyn HirDatabase) -> InFile<ast::TypeAliasDef> {
self.id.lookup(db).source(db) self.id.lookup(db.upcast()).source(db.upcast())
} }
} }
impl HasSource for MacroDef { impl HasSource for MacroDef {
type Ast = ast::MacroCall; type Ast = ast::MacroCall;
fn source(self, db: &impl DefDatabase) -> InFile<ast::MacroCall> { fn source(self, db: &dyn HirDatabase) -> InFile<ast::MacroCall> {
InFile { InFile {
file_id: self.id.ast_id.expect("MacroDef without ast_id").file_id, file_id: self.id.ast_id.expect("MacroDef without ast_id").file_id,
value: self.id.ast_id.expect("MacroDef without ast_id").to_node(db), value: self.id.ast_id.expect("MacroDef without ast_id").to_node(db.upcast()),
} }
} }
} }
impl HasSource for ImplDef { impl HasSource for ImplDef {
type Ast = ast::ImplDef; type Ast = ast::ImplDef;
fn source(self, db: &impl DefDatabase) -> InFile<ast::ImplDef> { fn source(self, db: &dyn HirDatabase) -> InFile<ast::ImplDef> {
self.id.lookup(db).source(db) self.id.lookup(db.upcast()).source(db.upcast())
} }
} }
impl HasSource for TypeParam { impl HasSource for TypeParam {
type Ast = Either<ast::TraitDef, ast::TypeParam>; type Ast = Either<ast::TraitDef, ast::TypeParam>;
fn source(self, db: &impl DefDatabase) -> InFile<Self::Ast> { fn source(self, db: &dyn HirDatabase) -> InFile<Self::Ast> {
let child_source = self.id.parent.child_source(db); let child_source = self.id.parent.child_source(db.upcast());
child_source.map(|it| it[self.id.local_id].clone()) child_source.map(|it| it[self.id.local_id].clone())
} }
} }

View file

@ -190,7 +190,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
T::to_def(self, src) T::to_def(self, src)
} }
fn with_ctx<F: FnOnce(&mut SourceToDefCtx<&DB>) -> T, T>(&self, f: F) -> T { fn with_ctx<F: FnOnce(&mut SourceToDefCtx) -> T, T>(&self, f: F) -> T {
let mut cache = self.s2d_cache.borrow_mut(); let mut cache = self.s2d_cache.borrow_mut();
let mut ctx = SourceToDefCtx { db: self.db, cache: &mut *cache }; let mut ctx = SourceToDefCtx { db: self.db, cache: &mut *cache };
f(&mut ctx) f(&mut ctx)
@ -369,35 +369,35 @@ impl<'a, DB: HirDatabase> SemanticsScope<'a, DB> {
} }
// FIXME: Change `HasSource` trait to work with `Semantics` and remove this? // FIXME: Change `HasSource` trait to work with `Semantics` and remove this?
pub fn original_range(db: &impl HirDatabase, node: InFile<&SyntaxNode>) -> FileRange { pub fn original_range(db: &dyn HirDatabase, node: InFile<&SyntaxNode>) -> FileRange {
if let Some(range) = original_range_opt(db, node) { if let Some(range) = original_range_opt(db, node) {
let original_file = range.file_id.original_file(db); let original_file = range.file_id.original_file(db.upcast());
if range.file_id == original_file.into() { if range.file_id == original_file.into() {
return FileRange { file_id: original_file, range: range.value }; return FileRange { file_id: original_file, range: range.value };
} }
log::error!("Fail to mapping up more for {:?}", range); log::error!("Fail to mapping up more for {:?}", range);
return FileRange { file_id: range.file_id.original_file(db), range: range.value }; return FileRange { file_id: range.file_id.original_file(db.upcast()), range: range.value };
} }
// Fall back to whole macro call // Fall back to whole macro call
if let Some(expansion) = node.file_id.expansion_info(db) { if let Some(expansion) = node.file_id.expansion_info(db.upcast()) {
if let Some(call_node) = expansion.call_node() { if let Some(call_node) = expansion.call_node() {
return FileRange { return FileRange {
file_id: call_node.file_id.original_file(db), file_id: call_node.file_id.original_file(db.upcast()),
range: call_node.value.text_range(), range: call_node.value.text_range(),
}; };
} }
} }
FileRange { file_id: node.file_id.original_file(db), range: node.value.text_range() } FileRange { file_id: node.file_id.original_file(db.upcast()), range: node.value.text_range() }
} }
fn original_range_opt( fn original_range_opt(
db: &impl HirDatabase, db: &dyn HirDatabase,
node: InFile<&SyntaxNode>, node: InFile<&SyntaxNode>,
) -> Option<InFile<TextRange>> { ) -> Option<InFile<TextRange>> {
let expansion = node.file_id.expansion_info(db)?; let expansion = node.file_id.expansion_info(db.upcast())?;
// the input node has only one token ? // the input node has only one token ?
let single = skip_trivia_token(node.value.first_token()?, Direction::Next)? let single = skip_trivia_token(node.value.first_token()?, Direction::Next)?
@ -419,7 +419,7 @@ fn original_range_opt(
} }
fn ascend_call_token( fn ascend_call_token(
db: &impl HirDatabase, db: &dyn HirDatabase,
expansion: &ExpansionInfo, expansion: &ExpansionInfo,
token: InFile<SyntaxToken>, token: InFile<SyntaxToken>,
) -> Option<InFile<SyntaxToken>> { ) -> Option<InFile<SyntaxToken>> {
@ -427,7 +427,7 @@ fn ascend_call_token(
if origin != Origin::Call { if origin != Origin::Call {
return None; return None;
} }
if let Some(info) = mapped.file_id.expansion_info(db) { if let Some(info) = mapped.file_id.expansion_info(db.upcast()) {
return ascend_call_token(db, &info, mapped); return ascend_call_token(db, &info, mapped);
} }
Some(mapped) Some(mapped)

View file

@ -21,12 +21,12 @@ use crate::{db::HirDatabase, InFile, MacroDefId};
pub(super) type SourceToDefCache = FxHashMap<ChildContainer, DynMap>; pub(super) type SourceToDefCache = FxHashMap<ChildContainer, DynMap>;
pub(super) struct SourceToDefCtx<'a, DB> { pub(super) struct SourceToDefCtx<'a, 'b> {
pub(super) db: DB, pub(super) db: &'b dyn HirDatabase,
pub(super) cache: &'a mut SourceToDefCache, pub(super) cache: &'a mut SourceToDefCache,
} }
impl<DB: HirDatabase> SourceToDefCtx<'_, &'_ DB> { impl SourceToDefCtx<'_, '_> {
pub(super) fn file_to_def(&mut self, file: FileId) -> Option<ModuleId> { pub(super) fn file_to_def(&mut self, file: FileId) -> Option<ModuleId> {
let _p = profile("SourceBinder::to_module_def"); let _p = profile("SourceBinder::to_module_def");
let (krate, local_id) = self.db.relevant_crates(file).iter().find_map(|&crate_id| { let (krate, local_id) = self.db.relevant_crates(file).iter().find_map(|&crate_id| {
@ -43,7 +43,7 @@ impl<DB: HirDatabase> SourceToDefCtx<'_, &'_ DB> {
.as_ref() .as_ref()
.map(|it| it.syntax()) .map(|it| it.syntax())
.cloned() .cloned()
.ancestors_with_macros(self.db) .ancestors_with_macros(self.db.upcast())
.skip(1) .skip(1)
.find_map(|it| { .find_map(|it| {
let m = ast::Module::cast(it.value.clone())?; let m = ast::Module::cast(it.value.clone())?;
@ -53,7 +53,7 @@ impl<DB: HirDatabase> SourceToDefCtx<'_, &'_ DB> {
let parent_module = match parent_declaration { let parent_module = match parent_declaration {
Some(parent_declaration) => self.module_to_def(parent_declaration), Some(parent_declaration) => self.module_to_def(parent_declaration),
None => { None => {
let file_id = src.file_id.original_file(self.db); let file_id = src.file_id.original_file(self.db.upcast());
self.file_to_def(file_id) self.file_to_def(file_id)
} }
}?; }?;
@ -147,7 +147,7 @@ impl<DB: HirDatabase> SourceToDefCtx<'_, &'_ DB> {
// FIXME: use DynMap as well? // FIXME: use DynMap as well?
pub(super) fn macro_call_to_def(&mut self, src: InFile<ast::MacroCall>) -> Option<MacroDefId> { pub(super) fn macro_call_to_def(&mut self, src: InFile<ast::MacroCall>) -> Option<MacroDefId> {
let kind = MacroDefKind::Declarative; let kind = MacroDefKind::Declarative;
let file_id = src.file_id.original_file(self.db); let file_id = src.file_id.original_file(self.db.upcast());
let krate = self.file_to_def(file_id)?.krate; let krate = self.file_to_def(file_id)?.krate;
let file_ast_id = self.db.ast_id_map(src.file_id).ast_id(&src.value); let file_ast_id = self.db.ast_id_map(src.file_id).ast_id(&src.value);
let ast_id = Some(AstId::new(src.file_id, file_ast_id)); let ast_id = Some(AstId::new(src.file_id, file_ast_id));
@ -155,7 +155,7 @@ impl<DB: HirDatabase> SourceToDefCtx<'_, &'_ DB> {
} }
pub(super) fn find_container(&mut self, src: InFile<&SyntaxNode>) -> Option<ChildContainer> { pub(super) fn find_container(&mut self, src: InFile<&SyntaxNode>) -> Option<ChildContainer> {
for container in src.cloned().ancestors_with_macros(self.db).skip(1) { for container in src.cloned().ancestors_with_macros(self.db.upcast()).skip(1) {
let res: ChildContainer = match_ast! { let res: ChildContainer = match_ast! {
match (container.value) { match (container.value) {
ast::Module(it) => { ast::Module(it) => {
@ -200,12 +200,12 @@ impl<DB: HirDatabase> SourceToDefCtx<'_, &'_ DB> {
return Some(res); return Some(res);
} }
let def = self.file_to_def(src.file_id.original_file(self.db))?; let def = self.file_to_def(src.file_id.original_file(self.db.upcast()))?;
Some(def.into()) Some(def.into())
} }
fn find_type_param_container(&mut self, src: InFile<&SyntaxNode>) -> Option<GenericDefId> { fn find_type_param_container(&mut self, src: InFile<&SyntaxNode>) -> Option<GenericDefId> {
for container in src.cloned().ancestors_with_macros(self.db).skip(1) { for container in src.cloned().ancestors_with_macros(self.db.upcast()).skip(1) {
let res: GenericDefId = match_ast! { let res: GenericDefId = match_ast! {
match (container.value) { match (container.value) {
ast::FnDef(it) => { self.fn_to_def(container.with_value(it))?.into() }, ast::FnDef(it) => { self.fn_to_def(container.with_value(it))?.into() },
@ -223,7 +223,7 @@ impl<DB: HirDatabase> SourceToDefCtx<'_, &'_ DB> {
} }
fn find_pat_container(&mut self, src: InFile<&SyntaxNode>) -> Option<DefWithBodyId> { fn find_pat_container(&mut self, src: InFile<&SyntaxNode>) -> Option<DefWithBodyId> {
for container in src.cloned().ancestors_with_macros(self.db).skip(1) { for container in src.cloned().ancestors_with_macros(self.db.upcast()).skip(1) {
let res: DefWithBodyId = match_ast! { let res: DefWithBodyId = match_ast! {
match (container.value) { match (container.value) {
ast::ConstDef(it) => { self.const_to_def(container.with_value(it))?.into() }, ast::ConstDef(it) => { self.const_to_def(container.with_value(it))?.into() },
@ -262,7 +262,8 @@ impl_froms! {
} }
impl ChildContainer { impl ChildContainer {
fn child_by_source(self, db: &impl HirDatabase) -> DynMap { fn child_by_source(self, db: &dyn HirDatabase) -> DynMap {
let db = db.upcast();
match self { match self {
ChildContainer::DefWithBodyId(it) => it.child_by_source(db), ChildContainer::DefWithBodyId(it) => it.child_by_source(db),
ChildContainer::ModuleId(it) => it.child_by_source(db), ChildContainer::ModuleId(it) => it.child_by_source(db),

View file

@ -42,7 +42,7 @@ pub(crate) struct SourceAnalyzer {
impl SourceAnalyzer { impl SourceAnalyzer {
pub(crate) fn new_for_body( pub(crate) fn new_for_body(
db: &impl HirDatabase, db: &dyn HirDatabase,
def: DefWithBodyId, def: DefWithBodyId,
node: InFile<&SyntaxNode>, node: InFile<&SyntaxNode>,
offset: Option<TextUnit>, offset: Option<TextUnit>,
@ -53,7 +53,7 @@ impl SourceAnalyzer {
None => scope_for(&scopes, &source_map, node), None => scope_for(&scopes, &source_map, node),
Some(offset) => scope_for_offset(&scopes, &source_map, node.with_value(offset)), Some(offset) => scope_for_offset(&scopes, &source_map, node.with_value(offset)),
}; };
let resolver = resolver_for_scope(db, def, scope); let resolver = resolver_for_scope(db.upcast(), def, scope);
SourceAnalyzer { SourceAnalyzer {
resolver, resolver,
body: Some(body), body: Some(body),
@ -90,7 +90,7 @@ impl SourceAnalyzer {
fn expand_expr( fn expand_expr(
&self, &self,
db: &impl HirDatabase, db: &dyn HirDatabase,
expr: InFile<ast::MacroCall>, expr: InFile<ast::MacroCall>,
) -> Option<InFile<ast::Expr>> { ) -> Option<InFile<ast::Expr>> {
let macro_file = self.body_source_map.as_ref()?.node_macro_file(expr.as_ref())?; let macro_file = self.body_source_map.as_ref()?.node_macro_file(expr.as_ref())?;
@ -103,11 +103,11 @@ impl SourceAnalyzer {
Some(res) Some(res)
} }
fn trait_env(&self, db: &impl HirDatabase) -> Arc<TraitEnvironment> { fn trait_env(&self, db: &dyn HirDatabase) -> Arc<TraitEnvironment> {
TraitEnvironment::lower(db, &self.resolver) TraitEnvironment::lower(db, &self.resolver)
} }
pub(crate) fn type_of(&self, db: &impl HirDatabase, expr: &ast::Expr) -> Option<Type> { pub(crate) fn type_of(&self, db: &dyn HirDatabase, expr: &ast::Expr) -> Option<Type> {
let expr_id = match expr { let expr_id = match expr {
ast::Expr::MacroCall(call) => { ast::Expr::MacroCall(call) => {
let expr = self.expand_expr(db, InFile::new(self.file_id, call.clone()))?; let expr = self.expand_expr(db, InFile::new(self.file_id, call.clone()))?;
@ -121,7 +121,7 @@ impl SourceAnalyzer {
Some(Type { krate: self.resolver.krate()?, ty: InEnvironment { value: ty, environment } }) Some(Type { krate: self.resolver.krate()?, ty: InEnvironment { value: ty, environment } })
} }
pub(crate) fn type_of_pat(&self, db: &impl HirDatabase, pat: &ast::Pat) -> Option<Type> { pub(crate) fn type_of_pat(&self, db: &dyn HirDatabase, pat: &ast::Pat) -> Option<Type> {
let pat_id = self.pat_id(pat)?; let pat_id = self.pat_id(pat)?;
let ty = self.infer.as_ref()?[pat_id].clone(); let ty = self.infer.as_ref()?[pat_id].clone();
let environment = self.trait_env(db); let environment = self.trait_env(db);
@ -140,7 +140,7 @@ impl SourceAnalyzer {
pub(crate) fn resolve_record_field( pub(crate) fn resolve_record_field(
&self, &self,
db: &impl HirDatabase, db: &dyn HirDatabase,
field: &ast::RecordField, field: &ast::RecordField,
) -> Option<(crate::StructField, Option<Local>)> { ) -> Option<(crate::StructField, Option<Local>)> {
let (expr_id, local) = match field.expr() { let (expr_id, local) = match field.expr() {
@ -150,7 +150,7 @@ impl SourceAnalyzer {
let expr_id = self.body_source_map.as_ref()?.field_init_shorthand_expr(src)?; let expr_id = self.body_source_map.as_ref()?.field_init_shorthand_expr(src)?;
let local_name = field.name_ref()?.as_name(); let local_name = field.name_ref()?.as_name();
let path = ModPath::from_segments(PathKind::Plain, once(local_name)); let path = ModPath::from_segments(PathKind::Plain, once(local_name));
let local = match self.resolver.resolve_path_in_value_ns_fully(db, &path) { let local = match self.resolver.resolve_path_in_value_ns_fully(db.upcast(), &path) {
Some(ValueNs::LocalBinding(pat_id)) => { Some(ValueNs::LocalBinding(pat_id)) => {
Some(Local { pat_id, parent: self.resolver.body_owner()? }) Some(Local { pat_id, parent: self.resolver.body_owner()? })
} }
@ -181,17 +181,17 @@ impl SourceAnalyzer {
pub(crate) fn resolve_macro_call( pub(crate) fn resolve_macro_call(
&self, &self,
db: &impl HirDatabase, db: &dyn HirDatabase,
macro_call: InFile<&ast::MacroCall>, macro_call: InFile<&ast::MacroCall>,
) -> Option<MacroDef> { ) -> Option<MacroDef> {
let hygiene = Hygiene::new(db, macro_call.file_id); let hygiene = Hygiene::new(db.upcast(), macro_call.file_id);
let path = macro_call.value.path().and_then(|ast| Path::from_src(ast, &hygiene))?; let path = macro_call.value.path().and_then(|ast| Path::from_src(ast, &hygiene))?;
self.resolver.resolve_path_as_macro(db, path.mod_path()).map(|it| it.into()) self.resolver.resolve_path_as_macro(db.upcast(), path.mod_path()).map(|it| it.into())
} }
pub(crate) fn resolve_bind_pat_to_const( pub(crate) fn resolve_bind_pat_to_const(
&self, &self,
db: &impl HirDatabase, db: &dyn HirDatabase,
pat: &ast::BindPat, pat: &ast::BindPat,
) -> Option<ModuleDef> { ) -> Option<ModuleDef> {
let pat_id = self.pat_id(&pat.clone().into())?; let pat_id = self.pat_id(&pat.clone().into())?;
@ -209,7 +209,7 @@ impl SourceAnalyzer {
pub(crate) fn resolve_path( pub(crate) fn resolve_path(
&self, &self,
db: &impl HirDatabase, db: &dyn HirDatabase,
path: &ast::Path, path: &ast::Path,
) -> Option<PathResolution> { ) -> Option<PathResolution> {
if let Some(path_expr) = path.syntax().parent().and_then(ast::PathExpr::cast) { if let Some(path_expr) = path.syntax().parent().and_then(ast::PathExpr::cast) {
@ -231,11 +231,12 @@ impl SourceAnalyzer {
pub(crate) fn expand( pub(crate) fn expand(
&self, &self,
db: &impl HirDatabase, db: &dyn HirDatabase,
macro_call: InFile<&ast::MacroCall>, macro_call: InFile<&ast::MacroCall>,
) -> Option<HirFileId> { ) -> Option<HirFileId> {
let macro_call_id = let macro_call_id = macro_call.as_call_id(db.upcast(), |path| {
macro_call.as_call_id(db, |path| self.resolver.resolve_path_as_macro(db, &path))?; self.resolver.resolve_path_as_macro(db.upcast(), &path)
})?;
Some(macro_call_id.as_file()) Some(macro_call_id.as_file())
} }
} }
@ -283,42 +284,46 @@ fn scope_for_offset(
} }
pub(crate) fn resolve_hir_path( pub(crate) fn resolve_hir_path(
db: &impl HirDatabase, db: &dyn HirDatabase,
resolver: &Resolver, resolver: &Resolver,
path: &crate::Path, path: &crate::Path,
) -> Option<PathResolution> { ) -> Option<PathResolution> {
let types = resolver.resolve_path_in_type_ns_fully(db, path.mod_path()).map(|ty| match ty { let types =
TypeNs::SelfType(it) => PathResolution::SelfType(it.into()), resolver.resolve_path_in_type_ns_fully(db.upcast(), path.mod_path()).map(|ty| match ty {
TypeNs::GenericParam(id) => PathResolution::TypeParam(TypeParam { id }), TypeNs::SelfType(it) => PathResolution::SelfType(it.into()),
TypeNs::AdtSelfType(it) | TypeNs::AdtId(it) => PathResolution::Def(Adt::from(it).into()), TypeNs::GenericParam(id) => PathResolution::TypeParam(TypeParam { id }),
TypeNs::EnumVariantId(it) => PathResolution::Def(EnumVariant::from(it).into()), TypeNs::AdtSelfType(it) | TypeNs::AdtId(it) => {
TypeNs::TypeAliasId(it) => PathResolution::Def(TypeAlias::from(it).into()), PathResolution::Def(Adt::from(it).into())
TypeNs::BuiltinType(it) => PathResolution::Def(it.into()),
TypeNs::TraitId(it) => PathResolution::Def(Trait::from(it).into()),
});
let body_owner = resolver.body_owner();
let values = resolver.resolve_path_in_value_ns_fully(db, path.mod_path()).and_then(|val| {
let res = match val {
ValueNs::LocalBinding(pat_id) => {
let var = Local { parent: body_owner?.into(), pat_id };
PathResolution::Local(var)
} }
ValueNs::FunctionId(it) => PathResolution::Def(Function::from(it).into()), TypeNs::EnumVariantId(it) => PathResolution::Def(EnumVariant::from(it).into()),
ValueNs::ConstId(it) => PathResolution::Def(Const::from(it).into()), TypeNs::TypeAliasId(it) => PathResolution::Def(TypeAlias::from(it).into()),
ValueNs::StaticId(it) => PathResolution::Def(Static::from(it).into()), TypeNs::BuiltinType(it) => PathResolution::Def(it.into()),
ValueNs::StructId(it) => PathResolution::Def(Struct::from(it).into()), TypeNs::TraitId(it) => PathResolution::Def(Trait::from(it).into()),
ValueNs::EnumVariantId(it) => PathResolution::Def(EnumVariant::from(it).into()), });
}; let body_owner = resolver.body_owner();
Some(res) let values =
}); resolver.resolve_path_in_value_ns_fully(db.upcast(), path.mod_path()).and_then(|val| {
let res = match val {
ValueNs::LocalBinding(pat_id) => {
let var = Local { parent: body_owner?.into(), pat_id };
PathResolution::Local(var)
}
ValueNs::FunctionId(it) => PathResolution::Def(Function::from(it).into()),
ValueNs::ConstId(it) => PathResolution::Def(Const::from(it).into()),
ValueNs::StaticId(it) => PathResolution::Def(Static::from(it).into()),
ValueNs::StructId(it) => PathResolution::Def(Struct::from(it).into()),
ValueNs::EnumVariantId(it) => PathResolution::Def(EnumVariant::from(it).into()),
};
Some(res)
});
let items = resolver let items = resolver
.resolve_module_path_in_items(db, path.mod_path()) .resolve_module_path_in_items(db.upcast(), path.mod_path())
.take_types() .take_types()
.map(|it| PathResolution::Def(it.into())); .map(|it| PathResolution::Def(it.into()));
types.or(values).or(items).or_else(|| { types.or(values).or(items).or_else(|| {
resolver resolver
.resolve_path_as_macro(db, path.mod_path()) .resolve_path_as_macro(db.upcast(), path.mod_path())
.map(|def| PathResolution::Macro(def.into())) .map(|def| PathResolution::Macro(def.into()))
}) })
} }

View file

@ -52,14 +52,14 @@ pub struct StructFieldData {
} }
impl StructData { impl StructData {
pub(crate) fn struct_data_query(db: &impl DefDatabase, id: StructId) -> Arc<StructData> { pub(crate) fn struct_data_query(db: &dyn DefDatabase, id: StructId) -> Arc<StructData> {
let src = id.lookup(db).source(db); let src = id.lookup(db).source(db);
let name = src.value.name().map_or_else(Name::missing, |n| n.as_name()); let name = src.value.name().map_or_else(Name::missing, |n| n.as_name());
let variant_data = VariantData::new(db, src.map(|s| s.kind())); let variant_data = VariantData::new(db, src.map(|s| s.kind()));
let variant_data = Arc::new(variant_data); let variant_data = Arc::new(variant_data);
Arc::new(StructData { name, variant_data }) Arc::new(StructData { name, variant_data })
} }
pub(crate) fn union_data_query(db: &impl DefDatabase, id: UnionId) -> Arc<StructData> { pub(crate) fn union_data_query(db: &dyn DefDatabase, id: UnionId) -> Arc<StructData> {
let src = id.lookup(db).source(db); let src = id.lookup(db).source(db);
let name = src.value.name().map_or_else(Name::missing, |n| n.as_name()); let name = src.value.name().map_or_else(Name::missing, |n| n.as_name());
let variant_data = VariantData::new( let variant_data = VariantData::new(
@ -76,7 +76,7 @@ impl StructData {
} }
impl EnumData { impl EnumData {
pub(crate) fn enum_data_query(db: &impl DefDatabase, e: EnumId) -> Arc<EnumData> { pub(crate) fn enum_data_query(db: &dyn DefDatabase, e: EnumId) -> Arc<EnumData> {
let _p = profile("enum_data_query"); let _p = profile("enum_data_query");
let src = e.lookup(db).source(db); let src = e.lookup(db).source(db);
let name = src.value.name().map_or_else(Name::missing, |n| n.as_name()); let name = src.value.name().map_or_else(Name::missing, |n| n.as_name());
@ -94,7 +94,7 @@ impl EnumData {
impl HasChildSource for EnumId { impl HasChildSource for EnumId {
type ChildId = LocalEnumVariantId; type ChildId = LocalEnumVariantId;
type Value = ast::EnumVariant; type Value = ast::EnumVariant;
fn child_source(&self, db: &impl DefDatabase) -> InFile<ArenaMap<Self::ChildId, Self::Value>> { fn child_source(&self, db: &dyn DefDatabase) -> InFile<ArenaMap<Self::ChildId, Self::Value>> {
let src = self.lookup(db).source(db); let src = self.lookup(db).source(db);
let mut trace = Trace::new_for_map(); let mut trace = Trace::new_for_map();
lower_enum(db, &mut trace, &src); lower_enum(db, &mut trace, &src);
@ -103,7 +103,7 @@ impl HasChildSource for EnumId {
} }
fn lower_enum( fn lower_enum(
db: &impl DefDatabase, db: &dyn DefDatabase,
trace: &mut Trace<LocalEnumVariantId, EnumVariantData, ast::EnumVariant>, trace: &mut Trace<LocalEnumVariantId, EnumVariantData, ast::EnumVariant>,
ast: &InFile<ast::EnumDef>, ast: &InFile<ast::EnumDef>,
) { ) {
@ -119,7 +119,7 @@ fn lower_enum(
} }
impl VariantData { impl VariantData {
fn new(db: &impl DefDatabase, flavor: InFile<ast::StructKind>) -> Self { fn new(db: &dyn DefDatabase, flavor: InFile<ast::StructKind>) -> Self {
let mut trace = Trace::new_for_arena(); let mut trace = Trace::new_for_arena();
match lower_struct(db, &mut trace, &flavor) { match lower_struct(db, &mut trace, &flavor) {
StructKind::Tuple => VariantData::Tuple(trace.into_arena()), StructKind::Tuple => VariantData::Tuple(trace.into_arena()),
@ -153,7 +153,7 @@ impl HasChildSource for VariantId {
type ChildId = LocalStructFieldId; type ChildId = LocalStructFieldId;
type Value = Either<ast::TupleFieldDef, ast::RecordFieldDef>; type Value = Either<ast::TupleFieldDef, ast::RecordFieldDef>;
fn child_source(&self, db: &impl DefDatabase) -> InFile<ArenaMap<Self::ChildId, Self::Value>> { fn child_source(&self, db: &dyn DefDatabase) -> InFile<ArenaMap<Self::ChildId, Self::Value>> {
let src = match self { let src = match self {
VariantId::EnumVariantId(it) => { VariantId::EnumVariantId(it) => {
// I don't really like the fact that we call into parent source // I don't really like the fact that we call into parent source
@ -182,7 +182,7 @@ pub enum StructKind {
} }
fn lower_struct( fn lower_struct(
db: &impl DefDatabase, db: &dyn DefDatabase,
trace: &mut Trace< trace: &mut Trace<
LocalStructFieldId, LocalStructFieldId,
StructFieldData, StructFieldData,

View file

@ -32,7 +32,7 @@ impl ops::Deref for Attrs {
} }
impl Attrs { impl Attrs {
pub(crate) fn attrs_query(db: &impl DefDatabase, def: AttrDefId) -> Attrs { pub(crate) fn attrs_query(db: &dyn DefDatabase, def: AttrDefId) -> Attrs {
match def { match def {
AttrDefId::ModuleId(module) => { AttrDefId::ModuleId(module) => {
let def_map = db.crate_def_map(module.krate); let def_map = db.crate_def_map(module.krate);
@ -71,8 +71,8 @@ impl Attrs {
} }
} }
fn from_attrs_owner(db: &impl DefDatabase, owner: InFile<&dyn AttrsOwner>) -> Attrs { fn from_attrs_owner(db: &dyn DefDatabase, owner: InFile<&dyn AttrsOwner>) -> Attrs {
let hygiene = Hygiene::new(db, owner.file_id); let hygiene = Hygiene::new(db.upcast(), owner.file_id);
Attrs::new(owner.value, &hygiene) Attrs::new(owner.value, &hygiene)
} }
@ -155,20 +155,18 @@ impl<'a> AttrQuery<'a> {
} }
} }
fn attrs_from_ast<D, N>(src: AstId<N>, db: &D) -> Attrs fn attrs_from_ast<N>(src: AstId<N>, db: &dyn DefDatabase) -> Attrs
where where
N: ast::AttrsOwner, N: ast::AttrsOwner,
D: DefDatabase,
{ {
let src = InFile::new(src.file_id, src.to_node(db)); let src = InFile::new(src.file_id, src.to_node(db.upcast()));
Attrs::from_attrs_owner(db, src.as_ref().map(|it| it as &dyn AttrsOwner)) Attrs::from_attrs_owner(db, src.as_ref().map(|it| it as &dyn AttrsOwner))
} }
fn attrs_from_loc<T, D>(node: T, db: &D) -> Attrs fn attrs_from_loc<T>(node: T, db: &dyn DefDatabase) -> Attrs
where where
T: HasSource, T: HasSource,
T::Value: ast::AttrsOwner, T::Value: ast::AttrsOwner,
D: DefDatabase,
{ {
let src = node.source(db); let src = node.source(db);
Attrs::from_attrs_owner(db, src.as_ref().map(|it| it as &dyn AttrsOwner)) Attrs::from_attrs_owner(db, src.as_ref().map(|it| it as &dyn AttrsOwner))

View file

@ -34,19 +34,19 @@ pub(crate) struct Expander {
impl Expander { impl Expander {
pub(crate) fn new( pub(crate) fn new(
db: &impl DefDatabase, db: &dyn DefDatabase,
current_file_id: HirFileId, current_file_id: HirFileId,
module: ModuleId, module: ModuleId,
) -> Expander { ) -> Expander {
let crate_def_map = db.crate_def_map(module.krate); let crate_def_map = db.crate_def_map(module.krate);
let hygiene = Hygiene::new(db, current_file_id); let hygiene = Hygiene::new(db.upcast(), current_file_id);
let ast_id_map = db.ast_id_map(current_file_id); let ast_id_map = db.ast_id_map(current_file_id);
Expander { crate_def_map, current_file_id, hygiene, ast_id_map, module } Expander { crate_def_map, current_file_id, hygiene, ast_id_map, module }
} }
pub(crate) fn enter_expand<T: ast::AstNode, DB: DefDatabase>( pub(crate) fn enter_expand<T: ast::AstNode>(
&mut self, &mut self,
db: &DB, db: &dyn DefDatabase,
local_scope: Option<&ItemScope>, local_scope: Option<&ItemScope>,
macro_call: ast::MacroCall, macro_call: ast::MacroCall,
) -> Option<(Mark, T)> { ) -> Option<(Mark, T)> {
@ -70,7 +70,7 @@ impl Expander {
ast_id_map: mem::take(&mut self.ast_id_map), ast_id_map: mem::take(&mut self.ast_id_map),
bomb: DropBomb::new("expansion mark dropped"), bomb: DropBomb::new("expansion mark dropped"),
}; };
self.hygiene = Hygiene::new(db, file_id); self.hygiene = Hygiene::new(db.upcast(), file_id);
self.current_file_id = file_id; self.current_file_id = file_id;
self.ast_id_map = db.ast_id_map(file_id); self.ast_id_map = db.ast_id_map(file_id);
@ -84,8 +84,8 @@ impl Expander {
None None
} }
pub(crate) fn exit(&mut self, db: &impl DefDatabase, mut mark: Mark) { pub(crate) fn exit(&mut self, db: &dyn DefDatabase, mut mark: Mark) {
self.hygiene = Hygiene::new(db, mark.file_id); self.hygiene = Hygiene::new(db.upcast(), mark.file_id);
self.current_file_id = mark.file_id; self.current_file_id = mark.file_id;
self.ast_id_map = mem::take(&mut mark.ast_id_map); self.ast_id_map = mem::take(&mut mark.ast_id_map);
mark.bomb.defuse(); mark.bomb.defuse();
@ -99,7 +99,7 @@ impl Expander {
Path::from_src(path, &self.hygiene) Path::from_src(path, &self.hygiene)
} }
fn resolve_path_as_macro(&self, db: &impl DefDatabase, path: &ModPath) -> Option<MacroDefId> { fn resolve_path_as_macro(&self, db: &dyn DefDatabase, path: &ModPath) -> Option<MacroDefId> {
self.crate_def_map self.crate_def_map
.resolve_path(db, self.module.local_id, path, BuiltinShadowMode::Other) .resolve_path(db, self.module.local_id, path, BuiltinShadowMode::Other)
.0 .0
@ -167,7 +167,7 @@ pub struct SyntheticSyntax;
impl Body { impl Body {
pub(crate) fn body_with_source_map_query( pub(crate) fn body_with_source_map_query(
db: &impl DefDatabase, db: &dyn DefDatabase,
def: DefWithBodyId, def: DefWithBodyId,
) -> (Arc<Body>, Arc<BodySourceMap>) { ) -> (Arc<Body>, Arc<BodySourceMap>) {
let _p = profile("body_with_source_map_query"); let _p = profile("body_with_source_map_query");
@ -196,12 +196,12 @@ impl Body {
(Arc::new(body), Arc::new(source_map)) (Arc::new(body), Arc::new(source_map))
} }
pub(crate) fn body_query(db: &impl DefDatabase, def: DefWithBodyId) -> Arc<Body> { pub(crate) fn body_query(db: &dyn DefDatabase, def: DefWithBodyId) -> Arc<Body> {
db.body_with_source_map(def).0 db.body_with_source_map(def).0
} }
fn new( fn new(
db: &impl DefDatabase, db: &dyn DefDatabase,
def: DefWithBodyId, def: DefWithBodyId,
expander: Expander, expander: Expander,
params: Option<ast::ParamList>, params: Option<ast::ParamList>,

View file

@ -36,7 +36,7 @@ use crate::{
}; };
pub(super) fn lower( pub(super) fn lower(
db: &impl DefDatabase, db: &dyn DefDatabase,
def: DefWithBodyId, def: DefWithBodyId,
expander: Expander, expander: Expander,
params: Option<ast::ParamList>, params: Option<ast::ParamList>,
@ -58,8 +58,8 @@ pub(super) fn lower(
.collect(params, body) .collect(params, body)
} }
struct ExprCollector<DB> { struct ExprCollector<'a> {
db: DB, db: &'a dyn DefDatabase,
def: DefWithBodyId, def: DefWithBodyId,
expander: Expander, expander: Expander,
@ -67,10 +67,7 @@ struct ExprCollector<DB> {
source_map: BodySourceMap, source_map: BodySourceMap,
} }
impl<'a, DB> ExprCollector<&'a DB> impl ExprCollector<'_> {
where
DB: DefDatabase,
{
fn collect( fn collect(
mut self, mut self,
param_list: Option<ast::ParamList>, param_list: Option<ast::ParamList>,

View file

@ -45,7 +45,7 @@ pub struct ScopeData {
} }
impl ExprScopes { impl ExprScopes {
pub(crate) fn expr_scopes_query(db: &impl DefDatabase, def: DefWithBodyId) -> Arc<ExprScopes> { pub(crate) fn expr_scopes_query(db: &dyn DefDatabase, def: DefWithBodyId) -> Arc<ExprScopes> {
let body = db.body(def); let body = db.body(def);
Arc::new(ExprScopes::new(&*body)) Arc::new(ExprScopes::new(&*body))
} }

View file

@ -17,11 +17,11 @@ use crate::{
}; };
pub trait ChildBySource { pub trait ChildBySource {
fn child_by_source(&self, db: &impl DefDatabase) -> DynMap; fn child_by_source(&self, db: &dyn DefDatabase) -> DynMap;
} }
impl ChildBySource for TraitId { impl ChildBySource for TraitId {
fn child_by_source(&self, db: &impl DefDatabase) -> DynMap { fn child_by_source(&self, db: &dyn DefDatabase) -> DynMap {
let mut res = DynMap::default(); let mut res = DynMap::default();
let data = db.trait_data(*self); let data = db.trait_data(*self);
@ -47,7 +47,7 @@ impl ChildBySource for TraitId {
} }
impl ChildBySource for ImplId { impl ChildBySource for ImplId {
fn child_by_source(&self, db: &impl DefDatabase) -> DynMap { fn child_by_source(&self, db: &dyn DefDatabase) -> DynMap {
let mut res = DynMap::default(); let mut res = DynMap::default();
let data = db.impl_data(*self); let data = db.impl_data(*self);
@ -73,7 +73,7 @@ impl ChildBySource for ImplId {
} }
impl ChildBySource for ModuleId { impl ChildBySource for ModuleId {
fn child_by_source(&self, db: &impl DefDatabase) -> DynMap { fn child_by_source(&self, db: &dyn DefDatabase) -> DynMap {
let crate_def_map = db.crate_def_map(self.krate); let crate_def_map = db.crate_def_map(self.krate);
let module_data = &crate_def_map[self.local_id]; let module_data = &crate_def_map[self.local_id];
module_data.scope.child_by_source(db) module_data.scope.child_by_source(db)
@ -81,13 +81,13 @@ impl ChildBySource for ModuleId {
} }
impl ChildBySource for ItemScope { impl ChildBySource for ItemScope {
fn child_by_source(&self, db: &impl DefDatabase) -> DynMap { fn child_by_source(&self, db: &dyn DefDatabase) -> DynMap {
let mut res = DynMap::default(); let mut res = DynMap::default();
self.declarations().for_each(|item| add_module_def(db, &mut res, item)); self.declarations().for_each(|item| add_module_def(db, &mut res, item));
self.impls().for_each(|imp| add_impl(db, &mut res, imp)); self.impls().for_each(|imp| add_impl(db, &mut res, imp));
return res; return res;
fn add_module_def(db: &impl DefDatabase, map: &mut DynMap, item: ModuleDefId) { fn add_module_def(db: &dyn DefDatabase, map: &mut DynMap, item: ModuleDefId) {
match item { match item {
ModuleDefId::FunctionId(func) => { ModuleDefId::FunctionId(func) => {
let src = func.lookup(db).source(db); let src = func.lookup(db).source(db);
@ -126,7 +126,7 @@ impl ChildBySource for ItemScope {
_ => (), _ => (),
} }
} }
fn add_impl(db: &impl DefDatabase, map: &mut DynMap, imp: ImplId) { fn add_impl(db: &dyn DefDatabase, map: &mut DynMap, imp: ImplId) {
let src = imp.lookup(db).source(db); let src = imp.lookup(db).source(db);
map[keys::IMPL].insert(src, imp) map[keys::IMPL].insert(src, imp)
} }
@ -134,7 +134,7 @@ impl ChildBySource for ItemScope {
} }
impl ChildBySource for VariantId { impl ChildBySource for VariantId {
fn child_by_source(&self, db: &impl DefDatabase) -> DynMap { fn child_by_source(&self, db: &dyn DefDatabase) -> DynMap {
let mut res = DynMap::default(); let mut res = DynMap::default();
let arena_map = self.child_source(db); let arena_map = self.child_source(db);
@ -155,7 +155,7 @@ impl ChildBySource for VariantId {
} }
impl ChildBySource for EnumId { impl ChildBySource for EnumId {
fn child_by_source(&self, db: &impl DefDatabase) -> DynMap { fn child_by_source(&self, db: &dyn DefDatabase) -> DynMap {
let mut res = DynMap::default(); let mut res = DynMap::default();
let arena_map = self.child_source(db); let arena_map = self.child_source(db);
@ -170,7 +170,7 @@ impl ChildBySource for EnumId {
} }
impl ChildBySource for DefWithBodyId { impl ChildBySource for DefWithBodyId {
fn child_by_source(&self, db: &impl DefDatabase) -> DynMap { fn child_by_source(&self, db: &dyn DefDatabase) -> DynMap {
let body = db.body(*self); let body = db.body(*self);
body.item_scope.child_by_source(db) body.item_scope.child_by_source(db)
} }

View file

@ -105,7 +105,7 @@ pub struct TypeAliasData {
impl TypeAliasData { impl TypeAliasData {
pub(crate) fn type_alias_data_query( pub(crate) fn type_alias_data_query(
db: &impl DefDatabase, db: &dyn DefDatabase,
typ: TypeAliasId, typ: TypeAliasId,
) -> Arc<TypeAliasData> { ) -> Arc<TypeAliasData> {
let loc = typ.lookup(db); let loc = typ.lookup(db);
@ -127,7 +127,7 @@ pub struct TraitData {
} }
impl TraitData { impl TraitData {
pub(crate) fn trait_data_query(db: &impl DefDatabase, tr: TraitId) -> Arc<TraitData> { pub(crate) fn trait_data_query(db: &dyn DefDatabase, tr: TraitId) -> Arc<TraitData> {
let src = tr.lookup(db).source(db); let src = tr.lookup(db).source(db);
let name = src.value.name().map_or_else(Name::missing, |n| n.as_name()); let name = src.value.name().map_or_else(Name::missing, |n| n.as_name());
let auto = src.value.is_auto(); let auto = src.value.is_auto();
@ -200,7 +200,7 @@ pub struct ImplData {
} }
impl ImplData { impl ImplData {
pub(crate) fn impl_data_query(db: &impl DefDatabase, id: ImplId) -> Arc<ImplData> { pub(crate) fn impl_data_query(db: &dyn DefDatabase, id: ImplId) -> Arc<ImplData> {
let _p = profile("impl_data_query"); let _p = profile("impl_data_query");
let impl_loc = id.lookup(db); let impl_loc = id.lookup(db);
let src = impl_loc.source(db); let src = impl_loc.source(db);
@ -235,20 +235,20 @@ pub struct ConstData {
} }
impl ConstData { impl ConstData {
pub(crate) fn const_data_query(db: &impl DefDatabase, konst: ConstId) -> Arc<ConstData> { pub(crate) fn const_data_query(db: &dyn DefDatabase, konst: ConstId) -> Arc<ConstData> {
let loc = konst.lookup(db); let loc = konst.lookup(db);
let node = loc.source(db); let node = loc.source(db);
let vis_default = RawVisibility::default_for_container(loc.container); let vis_default = RawVisibility::default_for_container(loc.container);
Arc::new(ConstData::new(db, vis_default, node)) Arc::new(ConstData::new(db, vis_default, node))
} }
pub(crate) fn static_data_query(db: &impl DefDatabase, konst: StaticId) -> Arc<ConstData> { pub(crate) fn static_data_query(db: &dyn DefDatabase, konst: StaticId) -> Arc<ConstData> {
let node = konst.lookup(db).source(db); let node = konst.lookup(db).source(db);
Arc::new(ConstData::new(db, RawVisibility::private(), node)) Arc::new(ConstData::new(db, RawVisibility::private(), node))
} }
fn new<N: NameOwner + TypeAscriptionOwner + VisibilityOwner>( fn new<N: NameOwner + TypeAscriptionOwner + VisibilityOwner>(
db: &impl DefDatabase, db: &dyn DefDatabase,
vis_default: RawVisibility, vis_default: RawVisibility,
node: InFile<N>, node: InFile<N>,
) -> ConstData { ) -> ConstData {
@ -261,7 +261,7 @@ impl ConstData {
} }
fn collect_impl_items_in_macros( fn collect_impl_items_in_macros(
db: &impl DefDatabase, db: &dyn DefDatabase,
module_id: ModuleId, module_id: ModuleId,
impl_def: &InFile<ast::ItemList>, impl_def: &InFile<ast::ItemList>,
id: ImplId, id: ImplId,
@ -280,7 +280,7 @@ fn collect_impl_items_in_macros(
} }
fn collect_impl_items_in_macro( fn collect_impl_items_in_macro(
db: &impl DefDatabase, db: &dyn DefDatabase,
expander: &mut Expander, expander: &mut Expander,
m: ast::MacroCall, m: ast::MacroCall,
id: ImplId, id: ImplId,
@ -312,7 +312,7 @@ fn collect_impl_items_in_macro(
} }
fn collect_impl_items( fn collect_impl_items(
db: &impl DefDatabase, db: &dyn DefDatabase,
impl_items: impl Iterator<Item = ImplItem>, impl_items: impl Iterator<Item = ImplItem>,
file_id: crate::HirFileId, file_id: crate::HirFileId,
id: ImplId, id: ImplId,

View file

@ -2,7 +2,7 @@
use std::sync::Arc; use std::sync::Arc;
use hir_expand::{db::AstDatabase, HirFileId}; use hir_expand::{db::AstDatabase, HirFileId};
use ra_db::{salsa, CrateId, SourceDatabase}; use ra_db::{salsa, CrateId, SourceDatabase, Upcast};
use ra_prof::profile; use ra_prof::profile;
use ra_syntax::SmolStr; use ra_syntax::SmolStr;
@ -43,7 +43,7 @@ pub trait InternDatabase: SourceDatabase {
} }
#[salsa::query_group(DefDatabaseStorage)] #[salsa::query_group(DefDatabaseStorage)]
pub trait DefDatabase: InternDatabase + AstDatabase { pub trait DefDatabase: InternDatabase + AstDatabase + Upcast<dyn AstDatabase> {
#[salsa::invoke(RawItems::raw_items_query)] #[salsa::invoke(RawItems::raw_items_query)]
fn raw_items(&self, file_id: HirFileId) -> Arc<RawItems>; fn raw_items(&self, file_id: HirFileId) -> Arc<RawItems>;
@ -109,6 +109,12 @@ pub trait DefDatabase: InternDatabase + AstDatabase {
fn documentation(&self, def: AttrDefId) -> Option<Documentation>; fn documentation(&self, def: AttrDefId) -> Option<Documentation>;
} }
// impl<T: DefDatabase> Upcast<dyn AstDatabase> for T {
// fn upcast(&self) -> &dyn AstDatabase {
// &*self
// }
// }
fn crate_def_map_wait(db: &impl DefDatabase, krate: CrateId) -> Arc<CrateDefMap> { fn crate_def_map_wait(db: &impl DefDatabase, krate: CrateId) -> Arc<CrateDefMap> {
let _p = profile("crate_def_map:wait"); let _p = profile("crate_def_map:wait");
db.crate_def_map_query(krate) db.crate_def_map_query(krate)

View file

@ -34,7 +34,7 @@ impl Documentation {
} }
pub(crate) fn documentation_query( pub(crate) fn documentation_query(
db: &impl DefDatabase, db: &dyn DefDatabase,
def: AttrDefId, def: AttrDefId,
) -> Option<Documentation> { ) -> Option<Documentation> {
match def { match def {
@ -60,7 +60,7 @@ impl Documentation {
docs_from_ast(&src.value[it.local_id]) docs_from_ast(&src.value[it.local_id])
} }
AttrDefId::TraitId(it) => docs_from_ast(&it.lookup(db).source(db).value), AttrDefId::TraitId(it) => docs_from_ast(&it.lookup(db).source(db).value),
AttrDefId::MacroDefId(it) => docs_from_ast(&it.ast_id?.to_node(db)), AttrDefId::MacroDefId(it) => docs_from_ast(&it.ast_id?.to_node(db.upcast())),
AttrDefId::ConstId(it) => docs_from_ast(&it.lookup(db).source(db).value), AttrDefId::ConstId(it) => docs_from_ast(&it.lookup(db).source(db).value),
AttrDefId::StaticId(it) => docs_from_ast(&it.lookup(db).source(db).value), AttrDefId::StaticId(it) => docs_from_ast(&it.lookup(db).source(db).value),
AttrDefId::FunctionId(it) => docs_from_ast(&it.lookup(db).source(db).value), AttrDefId::FunctionId(it) => docs_from_ast(&it.lookup(db).source(db).value),

View file

@ -44,12 +44,12 @@ impl ModPath {
/// Find a path that can be used to refer to a certain item. This can depend on /// Find a path that can be used to refer to a certain item. This can depend on
/// *from where* you're referring to the item, hence the `from` parameter. /// *from where* you're referring to the item, hence the `from` parameter.
pub fn find_path(db: &impl DefDatabase, item: ItemInNs, from: ModuleId) -> Option<ModPath> { pub fn find_path(db: &dyn DefDatabase, item: ItemInNs, from: ModuleId) -> Option<ModPath> {
find_path_inner(db, item, from, MAX_PATH_LEN) find_path_inner(db, item, from, MAX_PATH_LEN)
} }
fn find_path_inner( fn find_path_inner(
db: &impl DefDatabase, db: &dyn DefDatabase,
item: ItemInNs, item: ItemInNs,
from: ModuleId, from: ModuleId,
max_len: usize, max_len: usize,
@ -165,7 +165,7 @@ fn select_best_path(old_path: ModPath, new_path: ModPath) -> ModPath {
} }
fn find_importable_locations( fn find_importable_locations(
db: &impl DefDatabase, db: &dyn DefDatabase,
item: ItemInNs, item: ItemInNs,
from: ModuleId, from: ModuleId,
) -> Vec<(ModuleId, Name)> { ) -> Vec<(ModuleId, Name)> {
@ -195,7 +195,7 @@ fn find_importable_locations(
/// Note that the crate doesn't need to be the one in which the item is defined; /// Note that the crate doesn't need to be the one in which the item is defined;
/// it might be re-exported in other crates. /// it might be re-exported in other crates.
fn importable_locations_in_crate( fn importable_locations_in_crate(
db: &impl DefDatabase, db: &dyn DefDatabase,
item: ItemInNs, item: ItemInNs,
krate: CrateId, krate: CrateId,
) -> Vec<(ModuleId, Name, Visibility)> { ) -> Vec<(ModuleId, Name, Visibility)> {

View file

@ -69,7 +69,7 @@ type SourceMap = ArenaMap<LocalTypeParamId, Either<ast::TraitDef, ast::TypeParam
impl GenericParams { impl GenericParams {
pub(crate) fn generic_params_query( pub(crate) fn generic_params_query(
db: &impl DefDatabase, db: &dyn DefDatabase,
def: GenericDefId, def: GenericDefId,
) -> Arc<GenericParams> { ) -> Arc<GenericParams> {
let _p = profile("generic_params_query"); let _p = profile("generic_params_query");
@ -77,7 +77,7 @@ impl GenericParams {
Arc::new(params) Arc::new(params)
} }
fn new(db: &impl DefDatabase, def: GenericDefId) -> (GenericParams, InFile<SourceMap>) { fn new(db: &dyn DefDatabase, def: GenericDefId) -> (GenericParams, InFile<SourceMap>) {
let mut generics = GenericParams { types: Arena::default(), where_predicates: Vec::new() }; let mut generics = GenericParams { types: Arena::default(), where_predicates: Vec::new() };
let mut sm = ArenaMap::default(); let mut sm = ArenaMap::default();
// FIXME: add `: Sized` bound for everything except for `Self` in traits // FIXME: add `: Sized` bound for everything except for `Self` in traits
@ -242,14 +242,14 @@ impl GenericParams {
impl HasChildSource for GenericDefId { impl HasChildSource for GenericDefId {
type ChildId = LocalTypeParamId; type ChildId = LocalTypeParamId;
type Value = Either<ast::TraitDef, ast::TypeParam>; type Value = Either<ast::TraitDef, ast::TypeParam>;
fn child_source(&self, db: &impl DefDatabase) -> InFile<SourceMap> { fn child_source(&self, db: &dyn DefDatabase) -> InFile<SourceMap> {
let (_, sm) = GenericParams::new(db, *self); let (_, sm) = GenericParams::new(db, *self);
sm sm
} }
} }
impl ChildBySource for GenericDefId { impl ChildBySource for GenericDefId {
fn child_by_source(&self, db: &impl DefDatabase) -> DynMap { fn child_by_source(&self, db: &dyn DefDatabase) -> DynMap {
let mut res = DynMap::default(); let mut res = DynMap::default();
let arena_map = self.child_source(db); let arena_map = self.child_source(db);
let arena_map = arena_map.as_ref(); let arena_map = arena_map.as_ref();

View file

@ -77,7 +77,7 @@ impl LangItems {
} }
/// Salsa query. This will look for lang items in a specific crate. /// Salsa query. This will look for lang items in a specific crate.
pub(crate) fn crate_lang_items_query(db: &impl DefDatabase, krate: CrateId) -> Arc<LangItems> { pub(crate) fn crate_lang_items_query(db: &dyn DefDatabase, krate: CrateId) -> Arc<LangItems> {
let mut lang_items = LangItems::default(); let mut lang_items = LangItems::default();
let crate_def_map = db.crate_def_map(krate); let crate_def_map = db.crate_def_map(krate);
@ -92,7 +92,7 @@ impl LangItems {
} }
pub(crate) fn module_lang_items_query( pub(crate) fn module_lang_items_query(
db: &impl DefDatabase, db: &dyn DefDatabase,
module: ModuleId, module: ModuleId,
) -> Option<Arc<LangItems>> { ) -> Option<Arc<LangItems>> {
let mut lang_items = LangItems::default(); let mut lang_items = LangItems::default();
@ -107,7 +107,7 @@ impl LangItems {
/// Salsa query. Look for a lang item, starting from the specified crate and recursively /// Salsa query. Look for a lang item, starting from the specified crate and recursively
/// traversing its dependencies. /// traversing its dependencies.
pub(crate) fn lang_item_query( pub(crate) fn lang_item_query(
db: &impl DefDatabase, db: &dyn DefDatabase,
start_crate: CrateId, start_crate: CrateId,
item: SmolStr, item: SmolStr,
) -> Option<LangItemTarget> { ) -> Option<LangItemTarget> {
@ -122,7 +122,7 @@ impl LangItems {
.find_map(|dep| db.lang_item(dep.crate_id, item.clone())) .find_map(|dep| db.lang_item(dep.crate_id, item.clone()))
} }
fn collect_lang_items(&mut self, db: &impl DefDatabase, module: ModuleId) { fn collect_lang_items(&mut self, db: &dyn DefDatabase, module: ModuleId) {
// Look for impl targets // Look for impl targets
let def_map = db.crate_def_map(module.krate); let def_map = db.crate_def_map(module.krate);
let module_data = &def_map[module.local_id]; let module_data = &def_map[module.local_id];
@ -152,7 +152,7 @@ impl LangItems {
fn collect_lang_item<T>( fn collect_lang_item<T>(
&mut self, &mut self,
db: &impl DefDatabase, db: &dyn DefDatabase,
item: T, item: T,
constructor: fn(T) -> LangItemTarget, constructor: fn(T) -> LangItemTarget,
) where ) where

View file

@ -47,8 +47,8 @@ mod marks;
use std::hash::Hash; use std::hash::Hash;
use hir_expand::{ use hir_expand::{
ast_id_map::FileAstId, db::AstDatabase, eager::expand_eager_macro, hygiene::Hygiene, AstId, ast_id_map::FileAstId, eager::expand_eager_macro, hygiene::Hygiene, AstId, HirFileId, InFile,
HirFileId, InFile, MacroCallId, MacroCallKind, MacroDefId, MacroDefKind, MacroCallId, MacroCallKind, MacroDefId, MacroDefKind,
}; };
use ra_arena::{impl_arena_id, RawId}; use ra_arena::{impl_arena_id, RawId};
use ra_db::{impl_intern_key, salsa, CrateId}; use ra_db::{impl_intern_key, salsa, CrateId};
@ -87,14 +87,14 @@ macro_rules! impl_intern {
impl Intern for $loc { impl Intern for $loc {
type ID = $id; type ID = $id;
fn intern(self, db: &impl db::DefDatabase) -> $id { fn intern(self, db: &dyn db::DefDatabase) -> $id {
db.$intern(self) db.$intern(self)
} }
} }
impl Lookup for $id { impl Lookup for $id {
type Data = $loc; type Data = $loc;
fn lookup(&self, db: &impl db::DefDatabase) -> $loc { fn lookup(&self, db: &dyn db::DefDatabase) -> $loc {
db.$lookup(*self) db.$lookup(*self)
} }
} }
@ -339,20 +339,20 @@ impl_froms!(VariantId: EnumVariantId, StructId, UnionId);
trait Intern { trait Intern {
type ID; type ID;
fn intern(self, db: &impl db::DefDatabase) -> Self::ID; fn intern(self, db: &dyn db::DefDatabase) -> Self::ID;
} }
pub trait Lookup { pub trait Lookup {
type Data; type Data;
fn lookup(&self, db: &impl db::DefDatabase) -> Self::Data; fn lookup(&self, db: &dyn db::DefDatabase) -> Self::Data;
} }
pub trait HasModule { pub trait HasModule {
fn module(&self, db: &impl db::DefDatabase) -> ModuleId; fn module(&self, db: &dyn db::DefDatabase) -> ModuleId;
} }
impl HasModule for ContainerId { impl HasModule for ContainerId {
fn module(&self, db: &impl db::DefDatabase) -> ModuleId { fn module(&self, db: &dyn db::DefDatabase) -> ModuleId {
match *self { match *self {
ContainerId::ModuleId(it) => it, ContainerId::ModuleId(it) => it,
ContainerId::DefWithBodyId(it) => it.module(db), ContainerId::DefWithBodyId(it) => it.module(db),
@ -361,7 +361,7 @@ impl HasModule for ContainerId {
} }
impl HasModule for AssocContainerId { impl HasModule for AssocContainerId {
fn module(&self, db: &impl db::DefDatabase) -> ModuleId { fn module(&self, db: &dyn db::DefDatabase) -> ModuleId {
match *self { match *self {
AssocContainerId::ContainerId(it) => it.module(db), AssocContainerId::ContainerId(it) => it.module(db),
AssocContainerId::ImplId(it) => it.lookup(db).container.module(db), AssocContainerId::ImplId(it) => it.lookup(db).container.module(db),
@ -371,13 +371,13 @@ impl HasModule for AssocContainerId {
} }
impl<N: AstNode> HasModule for AssocItemLoc<N> { impl<N: AstNode> HasModule for AssocItemLoc<N> {
fn module(&self, db: &impl db::DefDatabase) -> ModuleId { fn module(&self, db: &dyn db::DefDatabase) -> ModuleId {
self.container.module(db) self.container.module(db)
} }
} }
impl HasModule for AdtId { impl HasModule for AdtId {
fn module(&self, db: &impl db::DefDatabase) -> ModuleId { fn module(&self, db: &dyn db::DefDatabase) -> ModuleId {
match self { match self {
AdtId::StructId(it) => it.lookup(db).container, AdtId::StructId(it) => it.lookup(db).container,
AdtId::UnionId(it) => it.lookup(db).container, AdtId::UnionId(it) => it.lookup(db).container,
@ -388,7 +388,7 @@ impl HasModule for AdtId {
} }
impl HasModule for DefWithBodyId { impl HasModule for DefWithBodyId {
fn module(&self, db: &impl db::DefDatabase) -> ModuleId { fn module(&self, db: &dyn db::DefDatabase) -> ModuleId {
match self { match self {
DefWithBodyId::FunctionId(it) => it.lookup(db).module(db), DefWithBodyId::FunctionId(it) => it.lookup(db).module(db),
DefWithBodyId::StaticId(it) => it.lookup(db).module(db), DefWithBodyId::StaticId(it) => it.lookup(db).module(db),
@ -398,7 +398,7 @@ impl HasModule for DefWithBodyId {
} }
impl HasModule for GenericDefId { impl HasModule for GenericDefId {
fn module(&self, db: &impl db::DefDatabase) -> ModuleId { fn module(&self, db: &dyn db::DefDatabase) -> ModuleId {
match self { match self {
GenericDefId::FunctionId(it) => it.lookup(db).module(db), GenericDefId::FunctionId(it) => it.lookup(db).module(db),
GenericDefId::AdtId(it) => it.module(db), GenericDefId::AdtId(it) => it.module(db),
@ -412,7 +412,7 @@ impl HasModule for GenericDefId {
} }
impl HasModule for StaticLoc { impl HasModule for StaticLoc {
fn module(&self, db: &impl db::DefDatabase) -> ModuleId { fn module(&self, db: &dyn db::DefDatabase) -> ModuleId {
self.container.module(db) self.container.module(db)
} }
} }
@ -421,7 +421,7 @@ impl HasModule for StaticLoc {
pub trait AsMacroCall { pub trait AsMacroCall {
fn as_call_id( fn as_call_id(
&self, &self,
db: &(impl db::DefDatabase + AstDatabase), db: &dyn db::DefDatabase,
resolver: impl Fn(path::ModPath) -> Option<MacroDefId>, resolver: impl Fn(path::ModPath) -> Option<MacroDefId>,
) -> Option<MacroCallId>; ) -> Option<MacroCallId>;
} }
@ -429,11 +429,11 @@ pub trait AsMacroCall {
impl AsMacroCall for InFile<&ast::MacroCall> { impl AsMacroCall for InFile<&ast::MacroCall> {
fn as_call_id( fn as_call_id(
&self, &self,
db: &(impl db::DefDatabase + AstDatabase), db: &dyn db::DefDatabase,
resolver: impl Fn(path::ModPath) -> Option<MacroDefId>, resolver: impl Fn(path::ModPath) -> Option<MacroDefId>,
) -> Option<MacroCallId> { ) -> Option<MacroCallId> {
let ast_id = AstId::new(self.file_id, db.ast_id_map(self.file_id).ast_id(self.value)); let ast_id = AstId::new(self.file_id, db.ast_id_map(self.file_id).ast_id(self.value));
let h = Hygiene::new(db, self.file_id); let h = Hygiene::new(db.upcast(), self.file_id);
let path = path::ModPath::from_src(self.value.path()?, &h)?; let path = path::ModPath::from_src(self.value.path()?, &h)?;
AstIdWithPath::new(ast_id.file_id, ast_id.value, path).as_call_id(db, resolver) AstIdWithPath::new(ast_id.file_id, ast_id.value, path).as_call_id(db, resolver)
@ -456,23 +456,23 @@ impl<T: ast::AstNode> AstIdWithPath<T> {
impl AsMacroCall for AstIdWithPath<ast::MacroCall> { impl AsMacroCall for AstIdWithPath<ast::MacroCall> {
fn as_call_id( fn as_call_id(
&self, &self,
db: &impl AstDatabase, db: &dyn db::DefDatabase,
resolver: impl Fn(path::ModPath) -> Option<MacroDefId>, resolver: impl Fn(path::ModPath) -> Option<MacroDefId>,
) -> Option<MacroCallId> { ) -> Option<MacroCallId> {
let def: MacroDefId = resolver(self.path.clone())?; let def: MacroDefId = resolver(self.path.clone())?;
if let MacroDefKind::BuiltInEager(_) = def.kind { if let MacroDefKind::BuiltInEager(_) = def.kind {
let macro_call = InFile::new(self.ast_id.file_id, self.ast_id.to_node(db)); let macro_call = InFile::new(self.ast_id.file_id, self.ast_id.to_node(db.upcast()));
let hygiene = Hygiene::new(db, self.ast_id.file_id); let hygiene = Hygiene::new(db.upcast(), self.ast_id.file_id);
Some( Some(
expand_eager_macro(db, macro_call, def, &|path: ast::Path| { expand_eager_macro(db.upcast(), macro_call, def, &|path: ast::Path| {
resolver(path::ModPath::from_src(path, &hygiene)?) resolver(path::ModPath::from_src(path, &hygiene)?)
})? })?
.into(), .into(),
) )
} else { } else {
Some(def.as_lazy_macro(db, MacroCallKind::FnLike(self.ast_id)).into()) Some(def.as_lazy_macro(db.upcast(), MacroCallKind::FnLike(self.ast_id)).into())
} }
} }
} }
@ -480,10 +480,10 @@ impl AsMacroCall for AstIdWithPath<ast::MacroCall> {
impl AsMacroCall for AstIdWithPath<ast::ModuleItem> { impl AsMacroCall for AstIdWithPath<ast::ModuleItem> {
fn as_call_id( fn as_call_id(
&self, &self,
db: &impl AstDatabase, db: &dyn db::DefDatabase,
resolver: impl Fn(path::ModPath) -> Option<MacroDefId>, resolver: impl Fn(path::ModPath) -> Option<MacroDefId>,
) -> Option<MacroCallId> { ) -> Option<MacroCallId> {
let def = resolver(self.path.clone())?; let def = resolver(self.path.clone())?;
Some(def.as_lazy_macro(db, MacroCallKind::Attr(self.ast_id)).into()) Some(def.as_lazy_macro(db.upcast(), MacroCallKind::Attr(self.ast_id)).into())
} }
} }

View file

@ -151,16 +151,17 @@ impl ModuleOrigin {
/// Returns a node which defines this module. /// Returns a node which defines this module.
/// That is, a file or a `mod foo {}` with items. /// That is, a file or a `mod foo {}` with items.
fn definition_source(&self, db: &impl DefDatabase) -> InFile<ModuleSource> { fn definition_source(&self, db: &dyn DefDatabase) -> InFile<ModuleSource> {
match self { match self {
ModuleOrigin::File { definition, .. } | ModuleOrigin::CrateRoot { definition } => { ModuleOrigin::File { definition, .. } | ModuleOrigin::CrateRoot { definition } => {
let file_id = *definition; let file_id = *definition;
let sf = db.parse(file_id).tree(); let sf = db.parse(file_id).tree();
InFile::new(file_id.into(), ModuleSource::SourceFile(sf)) InFile::new(file_id.into(), ModuleSource::SourceFile(sf))
} }
ModuleOrigin::Inline { definition } => { ModuleOrigin::Inline { definition } => InFile::new(
InFile::new(definition.file_id, ModuleSource::Module(definition.to_node(db))) definition.file_id,
} ModuleSource::Module(definition.to_node(db.upcast())),
),
} }
} }
} }
@ -176,7 +177,7 @@ pub struct ModuleData {
} }
impl CrateDefMap { impl CrateDefMap {
pub(crate) fn crate_def_map_query(db: &impl DefDatabase, krate: CrateId) -> Arc<CrateDefMap> { pub(crate) fn crate_def_map_query(db: &dyn DefDatabase, krate: CrateId) -> Arc<CrateDefMap> {
let _p = profile("crate_def_map_query").detail(|| { let _p = profile("crate_def_map_query").detail(|| {
db.crate_graph()[krate] db.crate_graph()[krate]
.display_name .display_name
@ -204,7 +205,7 @@ impl CrateDefMap {
pub fn add_diagnostics( pub fn add_diagnostics(
&self, &self,
db: &impl DefDatabase, db: &dyn DefDatabase,
module: LocalModuleId, module: LocalModuleId,
sink: &mut DiagnosticSink, sink: &mut DiagnosticSink,
) { ) {
@ -220,7 +221,7 @@ impl CrateDefMap {
pub(crate) fn resolve_path( pub(crate) fn resolve_path(
&self, &self,
db: &impl DefDatabase, db: &dyn DefDatabase,
original_module: LocalModuleId, original_module: LocalModuleId,
path: &ModPath, path: &ModPath,
shadow: BuiltinShadowMode, shadow: BuiltinShadowMode,
@ -273,15 +274,15 @@ impl CrateDefMap {
impl ModuleData { impl ModuleData {
/// Returns a node which defines this module. That is, a file or a `mod foo {}` with items. /// Returns a node which defines this module. That is, a file or a `mod foo {}` with items.
pub fn definition_source(&self, db: &impl DefDatabase) -> InFile<ModuleSource> { pub fn definition_source(&self, db: &dyn DefDatabase) -> InFile<ModuleSource> {
self.origin.definition_source(db) self.origin.definition_source(db)
} }
/// Returns a node which declares this module, either a `mod foo;` or a `mod foo {}`. /// Returns a node which declares this module, either a `mod foo;` or a `mod foo {}`.
/// `None` for the crate root or block. /// `None` for the crate root or block.
pub fn declaration_source(&self, db: &impl DefDatabase) -> Option<InFile<ast::Module>> { pub fn declaration_source(&self, db: &dyn DefDatabase) -> Option<InFile<ast::Module>> {
let decl = self.origin.declaration()?; let decl = self.origin.declaration()?;
let value = decl.to_node(db); let value = decl.to_node(db.upcast());
Some(InFile { file_id: decl.file_id, value }) Some(InFile { file_id: decl.file_id, value })
} }
} }
@ -311,7 +312,7 @@ mod diagnostics {
impl DefDiagnostic { impl DefDiagnostic {
pub(super) fn add_to( pub(super) fn add_to(
&self, &self,
db: &impl DefDatabase, db: &dyn DefDatabase,
target_module: LocalModuleId, target_module: LocalModuleId,
sink: &mut DiagnosticSink, sink: &mut DiagnosticSink,
) { ) {
@ -320,7 +321,7 @@ mod diagnostics {
if *module != target_module { if *module != target_module {
return; return;
} }
let decl = declaration.to_node(db); let decl = declaration.to_node(db.upcast());
sink.push(UnresolvedModule { sink.push(UnresolvedModule {
file: declaration.file_id, file: declaration.file_id,
decl: AstPtr::new(&decl), decl: AstPtr::new(&decl),

View file

@ -30,7 +30,7 @@ use crate::{
TraitLoc, TypeAliasLoc, UnionLoc, TraitLoc, TypeAliasLoc, UnionLoc,
}; };
pub(super) fn collect_defs(db: &impl DefDatabase, mut def_map: CrateDefMap) -> CrateDefMap { pub(super) fn collect_defs(db: &dyn DefDatabase, mut def_map: CrateDefMap) -> CrateDefMap {
let crate_graph = db.crate_graph(); let crate_graph = db.crate_graph();
// populate external prelude // populate external prelude
@ -112,8 +112,8 @@ struct DeriveDirective {
} }
/// Walks the tree of module recursively /// Walks the tree of module recursively
struct DefCollector<'a, DB> { struct DefCollector<'a> {
db: &'a DB, db: &'a dyn DefDatabase,
def_map: CrateDefMap, def_map: CrateDefMap,
glob_imports: FxHashMap<LocalModuleId, Vec<(LocalModuleId, Visibility)>>, glob_imports: FxHashMap<LocalModuleId, Vec<(LocalModuleId, Visibility)>>,
unresolved_imports: Vec<ImportDirective>, unresolved_imports: Vec<ImportDirective>,
@ -124,10 +124,7 @@ struct DefCollector<'a, DB> {
cfg_options: &'a CfgOptions, cfg_options: &'a CfgOptions,
} }
impl<DB> DefCollector<'_, DB> impl DefCollector<'_> {
where
DB: DefDatabase,
{
fn collect(&mut self) { fn collect(&mut self) {
let file_id = self.db.crate_graph()[self.def_map.krate].root_file_id; let file_id = self.db.crate_graph()[self.def_map.krate].root_file_id;
let raw_items = self.db.raw_items(file_id.into()); let raw_items = self.db.raw_items(file_id.into());
@ -605,8 +602,8 @@ where
} }
/// Walks a single module, populating defs, imports and macros /// Walks a single module, populating defs, imports and macros
struct ModCollector<'a, D> { struct ModCollector<'a, 'b> {
def_collector: D, def_collector: &'a mut DefCollector<'b>,
macro_depth: usize, macro_depth: usize,
module_id: LocalModuleId, module_id: LocalModuleId,
file_id: HirFileId, file_id: HirFileId,
@ -614,10 +611,7 @@ struct ModCollector<'a, D> {
mod_dir: ModDir, mod_dir: ModDir,
} }
impl<DB> ModCollector<'_, &'_ mut DefCollector<'_, DB>> impl ModCollector<'_, '_> {
where
DB: DefDatabase,
{
fn collect(&mut self, items: &[raw::RawItem]) { fn collect(&mut self, items: &[raw::RawItem]) {
// Note: don't assert that inserted value is fresh: it's simply not true // Note: don't assert that inserted value is fresh: it's simply not true
// for macros. // for macros.
@ -950,7 +944,7 @@ mod tests {
use super::*; use super::*;
fn do_collect_defs(db: &impl DefDatabase, def_map: CrateDefMap) -> CrateDefMap { fn do_collect_defs(db: &dyn DefDatabase, def_map: CrateDefMap) -> CrateDefMap {
let mut collector = DefCollector { let mut collector = DefCollector {
db, db,
def_map, def_map,

View file

@ -40,12 +40,12 @@ impl ModDir {
pub(super) fn resolve_declaration( pub(super) fn resolve_declaration(
&self, &self,
db: &impl DefDatabase, db: &dyn DefDatabase,
file_id: HirFileId, file_id: HirFileId,
name: &Name, name: &Name,
attr_path: Option<&SmolStr>, attr_path: Option<&SmolStr>,
) -> Result<(FileId, ModDir), RelativePathBuf> { ) -> Result<(FileId, ModDir), RelativePathBuf> {
let file_id = file_id.original_file(db); let file_id = file_id.original_file(db.upcast());
let mut candidate_files = Vec::new(); let mut candidate_files = Vec::new();
match attr_to_path(attr_path) { match attr_to_path(attr_path) {

View file

@ -70,7 +70,7 @@ impl CrateDefMap {
pub(crate) fn resolve_visibility( pub(crate) fn resolve_visibility(
&self, &self,
db: &impl DefDatabase, db: &dyn DefDatabase,
original_module: LocalModuleId, original_module: LocalModuleId,
visibility: &RawVisibility, visibility: &RawVisibility,
) -> Option<Visibility> { ) -> Option<Visibility> {
@ -98,7 +98,7 @@ impl CrateDefMap {
// the result. // the result.
pub(super) fn resolve_path_fp_with_macro( pub(super) fn resolve_path_fp_with_macro(
&self, &self,
db: &impl DefDatabase, db: &dyn DefDatabase,
mode: ResolveMode, mode: ResolveMode,
original_module: LocalModuleId, original_module: LocalModuleId,
path: &ModPath, path: &ModPath,
@ -262,7 +262,7 @@ impl CrateDefMap {
fn resolve_name_in_module( fn resolve_name_in_module(
&self, &self,
db: &impl DefDatabase, db: &dyn DefDatabase,
module: LocalModuleId, module: LocalModuleId,
name: &Name, name: &Name,
shadow: BuiltinShadowMode, shadow: BuiltinShadowMode,
@ -304,7 +304,7 @@ impl CrateDefMap {
from_crate_root.or(from_extern_prelude) from_crate_root.or(from_extern_prelude)
} }
fn resolve_in_prelude(&self, db: &impl DefDatabase, name: &Name) -> PerNs { fn resolve_in_prelude(&self, db: &dyn DefDatabase, name: &Name) -> PerNs {
if let Some(prelude) = self.prelude { if let Some(prelude) = self.prelude {
let keep; let keep;
let def_map = if prelude.krate == self.krate { let def_map = if prelude.krate == self.krate {

View file

@ -9,7 +9,6 @@ use std::{ops::Index, sync::Arc};
use hir_expand::{ use hir_expand::{
ast_id_map::AstIdMap, ast_id_map::AstIdMap,
db::AstDatabase,
hygiene::Hygiene, hygiene::Hygiene,
name::{AsName, Name}, name::{AsName, Name},
}; };
@ -45,16 +44,13 @@ pub struct RawItems {
} }
impl RawItems { impl RawItems {
pub(crate) fn raw_items_query( pub(crate) fn raw_items_query(db: &dyn DefDatabase, file_id: HirFileId) -> Arc<RawItems> {
db: &(impl DefDatabase + AstDatabase),
file_id: HirFileId,
) -> Arc<RawItems> {
let _p = profile("raw_items_query"); let _p = profile("raw_items_query");
let mut collector = RawItemsCollector { let mut collector = RawItemsCollector {
raw_items: RawItems::default(), raw_items: RawItems::default(),
source_ast_id_map: db.ast_id_map(file_id), source_ast_id_map: db.ast_id_map(file_id),
file_id, file_id,
hygiene: Hygiene::new(db, file_id), hygiene: Hygiene::new(db.upcast(), file_id),
}; };
if let Some(node) = db.parse_or_expand(file_id) { if let Some(node) = db.parse_or_expand(file_id) {
if let Some(source_file) = ast::SourceFile::cast(node.clone()) { if let Some(source_file) = ast::SourceFile::cast(node.clone()) {

View file

@ -96,7 +96,7 @@ pub enum ValueNs {
impl Resolver { impl Resolver {
/// Resolve known trait from std, like `std::futures::Future` /// Resolve known trait from std, like `std::futures::Future`
pub fn resolve_known_trait(&self, db: &impl DefDatabase, path: &ModPath) -> Option<TraitId> { pub fn resolve_known_trait(&self, db: &dyn DefDatabase, path: &ModPath) -> Option<TraitId> {
let res = self.resolve_module_path(db, path, BuiltinShadowMode::Other).take_types()?; let res = self.resolve_module_path(db, path, BuiltinShadowMode::Other).take_types()?;
match res { match res {
ModuleDefId::TraitId(it) => Some(it), ModuleDefId::TraitId(it) => Some(it),
@ -105,7 +105,7 @@ impl Resolver {
} }
/// Resolve known struct from std, like `std::boxed::Box` /// Resolve known struct from std, like `std::boxed::Box`
pub fn resolve_known_struct(&self, db: &impl DefDatabase, path: &ModPath) -> Option<StructId> { pub fn resolve_known_struct(&self, db: &dyn DefDatabase, path: &ModPath) -> Option<StructId> {
let res = self.resolve_module_path(db, path, BuiltinShadowMode::Other).take_types()?; let res = self.resolve_module_path(db, path, BuiltinShadowMode::Other).take_types()?;
match res { match res {
ModuleDefId::AdtId(AdtId::StructId(it)) => Some(it), ModuleDefId::AdtId(AdtId::StructId(it)) => Some(it),
@ -114,7 +114,7 @@ impl Resolver {
} }
/// Resolve known enum from std, like `std::result::Result` /// Resolve known enum from std, like `std::result::Result`
pub fn resolve_known_enum(&self, db: &impl DefDatabase, path: &ModPath) -> Option<EnumId> { pub fn resolve_known_enum(&self, db: &dyn DefDatabase, path: &ModPath) -> Option<EnumId> {
let res = self.resolve_module_path(db, path, BuiltinShadowMode::Other).take_types()?; let res = self.resolve_module_path(db, path, BuiltinShadowMode::Other).take_types()?;
match res { match res {
ModuleDefId::AdtId(AdtId::EnumId(it)) => Some(it), ModuleDefId::AdtId(AdtId::EnumId(it)) => Some(it),
@ -124,7 +124,7 @@ impl Resolver {
fn resolve_module_path( fn resolve_module_path(
&self, &self,
db: &impl DefDatabase, db: &dyn DefDatabase,
path: &ModPath, path: &ModPath,
shadow: BuiltinShadowMode, shadow: BuiltinShadowMode,
) -> PerNs { ) -> PerNs {
@ -139,13 +139,13 @@ impl Resolver {
module_res module_res
} }
pub fn resolve_module_path_in_items(&self, db: &impl DefDatabase, path: &ModPath) -> PerNs { pub fn resolve_module_path_in_items(&self, db: &dyn DefDatabase, path: &ModPath) -> PerNs {
self.resolve_module_path(db, path, BuiltinShadowMode::Module) self.resolve_module_path(db, path, BuiltinShadowMode::Module)
} }
pub fn resolve_path_in_type_ns( pub fn resolve_path_in_type_ns(
&self, &self,
db: &impl DefDatabase, db: &dyn DefDatabase,
path: &ModPath, path: &ModPath,
) -> Option<(TypeNs, Option<usize>)> { ) -> Option<(TypeNs, Option<usize>)> {
let first_name = path.segments.first()?; let first_name = path.segments.first()?;
@ -222,7 +222,7 @@ impl Resolver {
pub fn resolve_path_in_type_ns_fully( pub fn resolve_path_in_type_ns_fully(
&self, &self,
db: &impl DefDatabase, db: &dyn DefDatabase,
path: &ModPath, path: &ModPath,
) -> Option<TypeNs> { ) -> Option<TypeNs> {
let (res, unresolved) = self.resolve_path_in_type_ns(db, path)?; let (res, unresolved) = self.resolve_path_in_type_ns(db, path)?;
@ -234,7 +234,7 @@ impl Resolver {
pub fn resolve_visibility( pub fn resolve_visibility(
&self, &self,
db: &impl DefDatabase, db: &dyn DefDatabase,
visibility: &RawVisibility, visibility: &RawVisibility,
) -> Option<Visibility> { ) -> Option<Visibility> {
match visibility { match visibility {
@ -251,7 +251,7 @@ impl Resolver {
pub fn resolve_path_in_value_ns( pub fn resolve_path_in_value_ns(
&self, &self,
db: &impl DefDatabase, db: &dyn DefDatabase,
path: &ModPath, path: &ModPath,
) -> Option<ResolveValueResult> { ) -> Option<ResolveValueResult> {
let n_segments = path.segments.len(); let n_segments = path.segments.len();
@ -367,7 +367,7 @@ impl Resolver {
pub fn resolve_path_in_value_ns_fully( pub fn resolve_path_in_value_ns_fully(
&self, &self,
db: &impl DefDatabase, db: &dyn DefDatabase,
path: &ModPath, path: &ModPath,
) -> Option<ValueNs> { ) -> Option<ValueNs> {
match self.resolve_path_in_value_ns(db, path)? { match self.resolve_path_in_value_ns(db, path)? {
@ -378,7 +378,7 @@ impl Resolver {
pub fn resolve_path_as_macro( pub fn resolve_path_as_macro(
&self, &self,
db: &impl DefDatabase, db: &dyn DefDatabase,
path: &ModPath, path: &ModPath,
) -> Option<MacroDefId> { ) -> Option<MacroDefId> {
// Search item scope legacy macro first // Search item scope legacy macro first
@ -390,13 +390,13 @@ impl Resolver {
item_map.resolve_path(db, module, &path, BuiltinShadowMode::Other).0.take_macros() item_map.resolve_path(db, module, &path, BuiltinShadowMode::Other).0.take_macros()
} }
pub fn process_all_names(&self, db: &impl DefDatabase, f: &mut dyn FnMut(Name, ScopeDef)) { pub fn process_all_names(&self, db: &dyn DefDatabase, f: &mut dyn FnMut(Name, ScopeDef)) {
for scope in self.scopes.iter().rev() { for scope in self.scopes.iter().rev() {
scope.process_names(db, f); scope.process_names(db, f);
} }
} }
pub fn traits_in_scope(&self, db: &impl DefDatabase) -> FxHashSet<TraitId> { pub fn traits_in_scope(&self, db: &dyn DefDatabase) -> FxHashSet<TraitId> {
let mut traits = FxHashSet::default(); let mut traits = FxHashSet::default();
for scope in &self.scopes { for scope in &self.scopes {
if let Scope::ModuleScope(m) = scope { if let Scope::ModuleScope(m) = scope {
@ -474,7 +474,7 @@ pub enum ScopeDef {
} }
impl Scope { impl Scope {
fn process_names(&self, db: &impl DefDatabase, f: &mut dyn FnMut(Name, ScopeDef)) { fn process_names(&self, db: &dyn DefDatabase, f: &mut dyn FnMut(Name, ScopeDef)) {
match self { match self {
Scope::ModuleScope(m) => { Scope::ModuleScope(m) => {
// FIXME: should we provide `self` here? // FIXME: should we provide `self` here?
@ -534,13 +534,13 @@ impl Scope {
} }
// needs arbitrary_self_types to be a method... or maybe move to the def? // needs arbitrary_self_types to be a method... or maybe move to the def?
pub fn resolver_for_expr(db: &impl DefDatabase, owner: DefWithBodyId, expr_id: ExprId) -> Resolver { pub fn resolver_for_expr(db: &dyn DefDatabase, owner: DefWithBodyId, expr_id: ExprId) -> Resolver {
let scopes = db.expr_scopes(owner); let scopes = db.expr_scopes(owner);
resolver_for_scope(db, owner, scopes.scope_for(expr_id)) resolver_for_scope(db, owner, scopes.scope_for(expr_id))
} }
pub fn resolver_for_scope( pub fn resolver_for_scope(
db: &impl DefDatabase, db: &dyn DefDatabase,
owner: DefWithBodyId, owner: DefWithBodyId,
scope_id: Option<ScopeId>, scope_id: Option<ScopeId>,
) -> Resolver { ) -> Resolver {
@ -560,7 +560,7 @@ impl Resolver {
self self
} }
fn push_generic_params_scope(self, db: &impl DefDatabase, def: GenericDefId) -> Resolver { fn push_generic_params_scope(self, db: &dyn DefDatabase, def: GenericDefId) -> Resolver {
let params = db.generic_params(def); let params = db.generic_params(def);
self.push_scope(Scope::GenericParams { def, params }) self.push_scope(Scope::GenericParams { def, params })
} }
@ -593,24 +593,24 @@ impl Resolver {
pub trait HasResolver: Copy { pub trait HasResolver: Copy {
/// Builds a resolver for type references inside this def. /// Builds a resolver for type references inside this def.
fn resolver(self, db: &impl DefDatabase) -> Resolver; fn resolver(self, db: &dyn DefDatabase) -> Resolver;
} }
impl HasResolver for ModuleId { impl HasResolver for ModuleId {
fn resolver(self, db: &impl DefDatabase) -> Resolver { fn resolver(self, db: &dyn DefDatabase) -> Resolver {
let def_map = db.crate_def_map(self.krate); let def_map = db.crate_def_map(self.krate);
Resolver::default().push_module_scope(def_map, self.local_id) Resolver::default().push_module_scope(def_map, self.local_id)
} }
} }
impl HasResolver for TraitId { impl HasResolver for TraitId {
fn resolver(self, db: &impl DefDatabase) -> Resolver { fn resolver(self, db: &dyn DefDatabase) -> Resolver {
self.lookup(db).container.resolver(db).push_generic_params_scope(db, self.into()) self.lookup(db).container.resolver(db).push_generic_params_scope(db, self.into())
} }
} }
impl<T: Into<AdtId> + Copy> HasResolver for T { impl<T: Into<AdtId> + Copy> HasResolver for T {
fn resolver(self, db: &impl DefDatabase) -> Resolver { fn resolver(self, db: &dyn DefDatabase) -> Resolver {
let def = self.into(); let def = self.into();
def.module(db) def.module(db)
.resolver(db) .resolver(db)
@ -620,31 +620,31 @@ impl<T: Into<AdtId> + Copy> HasResolver for T {
} }
impl HasResolver for FunctionId { impl HasResolver for FunctionId {
fn resolver(self, db: &impl DefDatabase) -> Resolver { fn resolver(self, db: &dyn DefDatabase) -> Resolver {
self.lookup(db).container.resolver(db).push_generic_params_scope(db, self.into()) self.lookup(db).container.resolver(db).push_generic_params_scope(db, self.into())
} }
} }
impl HasResolver for ConstId { impl HasResolver for ConstId {
fn resolver(self, db: &impl DefDatabase) -> Resolver { fn resolver(self, db: &dyn DefDatabase) -> Resolver {
self.lookup(db).container.resolver(db) self.lookup(db).container.resolver(db)
} }
} }
impl HasResolver for StaticId { impl HasResolver for StaticId {
fn resolver(self, db: &impl DefDatabase) -> Resolver { fn resolver(self, db: &dyn DefDatabase) -> Resolver {
self.lookup(db).container.resolver(db) self.lookup(db).container.resolver(db)
} }
} }
impl HasResolver for TypeAliasId { impl HasResolver for TypeAliasId {
fn resolver(self, db: &impl DefDatabase) -> Resolver { fn resolver(self, db: &dyn DefDatabase) -> Resolver {
self.lookup(db).container.resolver(db).push_generic_params_scope(db, self.into()) self.lookup(db).container.resolver(db).push_generic_params_scope(db, self.into())
} }
} }
impl HasResolver for ImplId { impl HasResolver for ImplId {
fn resolver(self, db: &impl DefDatabase) -> Resolver { fn resolver(self, db: &dyn DefDatabase) -> Resolver {
self.lookup(db) self.lookup(db)
.container .container
.resolver(db) .resolver(db)
@ -654,7 +654,7 @@ impl HasResolver for ImplId {
} }
impl HasResolver for DefWithBodyId { impl HasResolver for DefWithBodyId {
fn resolver(self, db: &impl DefDatabase) -> Resolver { fn resolver(self, db: &dyn DefDatabase) -> Resolver {
match self { match self {
DefWithBodyId::ConstId(c) => c.resolver(db), DefWithBodyId::ConstId(c) => c.resolver(db),
DefWithBodyId::FunctionId(f) => f.resolver(db), DefWithBodyId::FunctionId(f) => f.resolver(db),
@ -664,7 +664,7 @@ impl HasResolver for DefWithBodyId {
} }
impl HasResolver for ContainerId { impl HasResolver for ContainerId {
fn resolver(self, db: &impl DefDatabase) -> Resolver { fn resolver(self, db: &dyn DefDatabase) -> Resolver {
match self { match self {
ContainerId::ModuleId(it) => it.resolver(db), ContainerId::ModuleId(it) => it.resolver(db),
ContainerId::DefWithBodyId(it) => it.module(db).resolver(db), ContainerId::DefWithBodyId(it) => it.module(db).resolver(db),
@ -673,7 +673,7 @@ impl HasResolver for ContainerId {
} }
impl HasResolver for AssocContainerId { impl HasResolver for AssocContainerId {
fn resolver(self, db: &impl DefDatabase) -> Resolver { fn resolver(self, db: &dyn DefDatabase) -> Resolver {
match self { match self {
AssocContainerId::ContainerId(it) => it.resolver(db), AssocContainerId::ContainerId(it) => it.resolver(db),
AssocContainerId::TraitId(it) => it.resolver(db), AssocContainerId::TraitId(it) => it.resolver(db),
@ -683,7 +683,7 @@ impl HasResolver for AssocContainerId {
} }
impl HasResolver for GenericDefId { impl HasResolver for GenericDefId {
fn resolver(self, db: &impl DefDatabase) -> Resolver { fn resolver(self, db: &dyn DefDatabase) -> Resolver {
match self { match self {
GenericDefId::FunctionId(inner) => inner.resolver(db), GenericDefId::FunctionId(inner) => inner.resolver(db),
GenericDefId::AdtId(adt) => adt.resolver(db), GenericDefId::AdtId(adt) => adt.resolver(db),
@ -697,7 +697,7 @@ impl HasResolver for GenericDefId {
} }
impl HasResolver for VariantId { impl HasResolver for VariantId {
fn resolver(self, db: &impl DefDatabase) -> Resolver { fn resolver(self, db: &dyn DefDatabase) -> Resolver {
match self { match self {
VariantId::EnumVariantId(it) => it.parent.resolver(db), VariantId::EnumVariantId(it) => it.parent.resolver(db),
VariantId::StructId(it) => it.resolver(db), VariantId::StructId(it) => it.resolver(db),

View file

@ -8,14 +8,14 @@ use crate::{db::DefDatabase, AssocItemLoc, ItemLoc};
pub trait HasSource { pub trait HasSource {
type Value; type Value;
fn source(&self, db: &impl DefDatabase) -> InFile<Self::Value>; fn source(&self, db: &dyn DefDatabase) -> InFile<Self::Value>;
} }
impl<N: AstNode> HasSource for AssocItemLoc<N> { impl<N: AstNode> HasSource for AssocItemLoc<N> {
type Value = N; type Value = N;
fn source(&self, db: &impl DefDatabase) -> InFile<N> { fn source(&self, db: &dyn DefDatabase) -> InFile<N> {
let node = self.ast_id.to_node(db); let node = self.ast_id.to_node(db.upcast());
InFile::new(self.ast_id.file_id, node) InFile::new(self.ast_id.file_id, node)
} }
} }
@ -23,8 +23,8 @@ impl<N: AstNode> HasSource for AssocItemLoc<N> {
impl<N: AstNode> HasSource for ItemLoc<N> { impl<N: AstNode> HasSource for ItemLoc<N> {
type Value = N; type Value = N;
fn source(&self, db: &impl DefDatabase) -> InFile<N> { fn source(&self, db: &dyn DefDatabase) -> InFile<N> {
let node = self.ast_id.to_node(db); let node = self.ast_id.to_node(db.upcast());
InFile::new(self.ast_id.file_id, node) InFile::new(self.ast_id.file_id, node)
} }
} }
@ -32,5 +32,5 @@ impl<N: AstNode> HasSource for ItemLoc<N> {
pub trait HasChildSource { pub trait HasChildSource {
type ChildId; type ChildId;
type Value; type Value;
fn child_source(&self, db: &impl DefDatabase) -> InFile<ArenaMap<Self::ChildId, Self::Value>>; fn child_source(&self, db: &dyn DefDatabase) -> InFile<ArenaMap<Self::ChildId, Self::Value>>;
} }

View file

@ -5,8 +5,12 @@ use std::{
sync::{Arc, Mutex}, sync::{Arc, Mutex},
}; };
use hir_expand::db::AstDatabase;
use ra_db::{
salsa, CrateId, ExternSourceId, FileId, FileLoader, FileLoaderDelegate, RelativePath, Upcast,
};
use crate::db::DefDatabase; use crate::db::DefDatabase;
use ra_db::{salsa, CrateId, ExternSourceId, FileId, FileLoader, FileLoaderDelegate, RelativePath};
#[salsa::database( #[salsa::database(
ra_db::SourceDatabaseExtStorage, ra_db::SourceDatabaseExtStorage,
@ -21,6 +25,18 @@ pub struct TestDB {
events: Mutex<Option<Vec<salsa::Event<TestDB>>>>, events: Mutex<Option<Vec<salsa::Event<TestDB>>>>,
} }
impl Upcast<dyn AstDatabase> for TestDB {
fn upcast(&self) -> &(dyn AstDatabase + 'static) {
&*self
}
}
impl Upcast<dyn DefDatabase> for TestDB {
fn upcast(&self) -> &(dyn DefDatabase + 'static) {
&*self
}
}
impl salsa::Database for TestDB { impl salsa::Database for TestDB {
fn salsa_runtime(&self) -> &salsa::Runtime<Self> { fn salsa_runtime(&self) -> &salsa::Runtime<Self> {
&self.runtime &self.runtime

View file

@ -33,22 +33,22 @@ impl RawVisibility {
} }
pub(crate) fn from_ast_with_default( pub(crate) fn from_ast_with_default(
db: &impl DefDatabase, db: &dyn DefDatabase,
default: RawVisibility, default: RawVisibility,
node: InFile<Option<ast::Visibility>>, node: InFile<Option<ast::Visibility>>,
) -> RawVisibility { ) -> RawVisibility {
Self::from_ast_with_hygiene_and_default( Self::from_ast_with_hygiene_and_default(
node.value, node.value,
default, default,
&Hygiene::new(db, node.file_id), &Hygiene::new(db.upcast(), node.file_id),
) )
} }
pub(crate) fn from_ast( pub(crate) fn from_ast(
db: &impl DefDatabase, db: &dyn DefDatabase,
node: InFile<Option<ast::Visibility>>, node: InFile<Option<ast::Visibility>>,
) -> RawVisibility { ) -> RawVisibility {
Self::from_ast_with_hygiene(node.value, &Hygiene::new(db, node.file_id)) Self::from_ast_with_hygiene(node.value, &Hygiene::new(db.upcast(), node.file_id))
} }
pub(crate) fn from_ast_with_hygiene( pub(crate) fn from_ast_with_hygiene(
@ -90,7 +90,7 @@ impl RawVisibility {
pub fn resolve( pub fn resolve(
&self, &self,
db: &impl DefDatabase, db: &dyn DefDatabase,
resolver: &crate::resolver::Resolver, resolver: &crate::resolver::Resolver,
) -> Visibility { ) -> Visibility {
// we fall back to public visibility (i.e. fail open) if the path can't be resolved // we fall back to public visibility (i.e. fail open) if the path can't be resolved
@ -108,7 +108,7 @@ pub enum Visibility {
} }
impl Visibility { impl Visibility {
pub fn is_visible_from(self, db: &impl DefDatabase, from_module: ModuleId) -> bool { pub fn is_visible_from(self, db: &dyn DefDatabase, from_module: ModuleId) -> bool {
let to_module = match self { let to_module = match self {
Visibility::Module(m) => m, Visibility::Module(m) => m,
Visibility::Public => return true, Visibility::Public => return true,

View file

@ -77,7 +77,7 @@ pub trait AstDatabase: SourceDatabase {
/// token. The `token_to_map` mapped down into the expansion, with the mapped /// token. The `token_to_map` mapped down into the expansion, with the mapped
/// token returned. /// token returned.
pub fn expand_hypothetical( pub fn expand_hypothetical(
db: &impl AstDatabase, db: &dyn AstDatabase,
actual_macro_call: MacroCallId, actual_macro_call: MacroCallId,
hypothetical_args: &ra_syntax::ast::TokenTree, hypothetical_args: &ra_syntax::ast::TokenTree,
token_to_map: ra_syntax::SyntaxToken, token_to_map: ra_syntax::SyntaxToken,

View file

@ -30,7 +30,7 @@ use ra_syntax::{algo::replace_descendants, SyntaxElement, SyntaxNode};
use std::{collections::HashMap, sync::Arc}; use std::{collections::HashMap, sync::Arc};
pub fn expand_eager_macro( pub fn expand_eager_macro(
db: &impl AstDatabase, db: &dyn AstDatabase,
macro_call: InFile<ast::MacroCall>, macro_call: InFile<ast::MacroCall>,
def: MacroDefId, def: MacroDefId,
resolver: &dyn Fn(ast::Path) -> Option<MacroDefId>, resolver: &dyn Fn(ast::Path) -> Option<MacroDefId>,
@ -78,7 +78,7 @@ fn to_subtree(node: &SyntaxNode) -> Option<tt::Subtree> {
} }
fn lazy_expand( fn lazy_expand(
db: &impl AstDatabase, db: &dyn AstDatabase,
def: &MacroDefId, def: &MacroDefId,
macro_call: InFile<ast::MacroCall>, macro_call: InFile<ast::MacroCall>,
) -> Option<InFile<SyntaxNode>> { ) -> Option<InFile<SyntaxNode>> {
@ -91,7 +91,7 @@ fn lazy_expand(
} }
fn eager_macro_recur( fn eager_macro_recur(
db: &impl AstDatabase, db: &dyn AstDatabase,
curr: InFile<SyntaxNode>, curr: InFile<SyntaxNode>,
macro_resolver: &dyn Fn(ast::Path) -> Option<MacroDefId>, macro_resolver: &dyn Fn(ast::Path) -> Option<MacroDefId>,
) -> Option<SyntaxNode> { ) -> Option<SyntaxNode> {

View file

@ -19,7 +19,7 @@ pub struct Hygiene {
} }
impl Hygiene { impl Hygiene {
pub fn new(db: &impl AstDatabase, file_id: HirFileId) -> Hygiene { pub fn new(db: &dyn AstDatabase, file_id: HirFileId) -> Hygiene {
let def_crate = match file_id.0 { let def_crate = match file_id.0 {
HirFileIdRepr::FileId(_) => None, HirFileIdRepr::FileId(_) => None,
HirFileIdRepr::MacroFile(macro_file) => match macro_file.macro_call_id { HirFileIdRepr::MacroFile(macro_file) => match macro_file.macro_call_id {

View file

@ -366,7 +366,7 @@ impl<T> InFile<T> {
pub fn as_ref(&self) -> InFile<&T> { pub fn as_ref(&self) -> InFile<&T> {
self.with_value(&self.value) self.with_value(&self.value)
} }
pub fn file_syntax(&self, db: &impl db::AstDatabase) -> SyntaxNode { pub fn file_syntax(&self, db: &dyn db::AstDatabase) -> SyntaxNode {
db.parse_or_expand(self.file_id).expect("source created from invalid file") db.parse_or_expand(self.file_id).expect("source created from invalid file")
} }
} }
@ -387,7 +387,7 @@ impl<T> InFile<Option<T>> {
impl InFile<SyntaxNode> { impl InFile<SyntaxNode> {
pub fn ancestors_with_macros( pub fn ancestors_with_macros(
self, self,
db: &impl crate::db::AstDatabase, db: &dyn db::AstDatabase,
) -> impl Iterator<Item = InFile<SyntaxNode>> + '_ { ) -> impl Iterator<Item = InFile<SyntaxNode>> + '_ {
std::iter::successors(Some(self), move |node| match node.value.parent() { std::iter::successors(Some(self), move |node| match node.value.parent() {
Some(parent) => Some(node.with_value(parent)), Some(parent) => Some(node.with_value(parent)),
@ -402,7 +402,7 @@ impl InFile<SyntaxNode> {
impl InFile<SyntaxToken> { impl InFile<SyntaxToken> {
pub fn ancestors_with_macros( pub fn ancestors_with_macros(
self, self,
db: &impl crate::db::AstDatabase, db: &dyn db::AstDatabase,
) -> impl Iterator<Item = InFile<SyntaxNode>> + '_ { ) -> impl Iterator<Item = InFile<SyntaxNode>> + '_ {
self.map(|it| it.parent()).ancestors_with_macros(db) self.map(|it| it.parent()).ancestors_with_macros(db)
} }

View file

@ -20,7 +20,7 @@ use crate::{
const AUTODEREF_RECURSION_LIMIT: usize = 10; const AUTODEREF_RECURSION_LIMIT: usize = 10;
pub fn autoderef<'a>( pub fn autoderef<'a>(
db: &'a impl HirDatabase, db: &'a dyn HirDatabase,
krate: Option<CrateId>, krate: Option<CrateId>,
ty: InEnvironment<Canonical<Ty>>, ty: InEnvironment<Canonical<Ty>>,
) -> impl Iterator<Item = Canonical<Ty>> + 'a { ) -> impl Iterator<Item = Canonical<Ty>> + 'a {
@ -32,7 +32,7 @@ pub fn autoderef<'a>(
} }
pub(crate) fn deref( pub(crate) fn deref(
db: &impl HirDatabase, db: &dyn HirDatabase,
krate: CrateId, krate: CrateId,
ty: InEnvironment<&Canonical<Ty>>, ty: InEnvironment<&Canonical<Ty>>,
) -> Option<Canonical<Ty>> { ) -> Option<Canonical<Ty>> {
@ -44,7 +44,7 @@ pub(crate) fn deref(
} }
fn deref_by_trait( fn deref_by_trait(
db: &impl HirDatabase, db: &dyn HirDatabase,
krate: CrateId, krate: CrateId,
ty: InEnvironment<&Canonical<Ty>>, ty: InEnvironment<&Canonical<Ty>>,
) -> Option<Canonical<Ty>> { ) -> Option<Canonical<Ty>> {
@ -54,7 +54,7 @@ fn deref_by_trait(
}; };
let target = db.trait_data(deref_trait).associated_type_by_name(&name![Target])?; let target = db.trait_data(deref_trait).associated_type_by_name(&name![Target])?;
let generic_params = generics(db, target.into()); let generic_params = generics(db.upcast(), target.into());
if generic_params.len() != 1 { if generic_params.len() != 1 {
// the Target type + Deref trait should only have one generic parameter, // the Target type + Deref trait should only have one generic parameter,
// namely Deref's Self type // namely Deref's Self type

View file

@ -7,7 +7,7 @@ use hir_def::{
VariantId, VariantId,
}; };
use ra_arena::map::ArenaMap; use ra_arena::map::ArenaMap;
use ra_db::{impl_intern_key, salsa, CrateId}; use ra_db::{impl_intern_key, salsa, CrateId, Upcast};
use ra_prof::profile; use ra_prof::profile;
use crate::{ use crate::{
@ -20,7 +20,7 @@ use hir_expand::name::Name;
#[salsa::query_group(HirDatabaseStorage)] #[salsa::query_group(HirDatabaseStorage)]
#[salsa::requires(salsa::Database)] #[salsa::requires(salsa::Database)]
pub trait HirDatabase: DefDatabase { pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
#[salsa::invoke(infer_wait)] #[salsa::invoke(infer_wait)]
fn infer(&self, def: DefWithBodyId) -> Arc<InferenceResult>; fn infer(&self, def: DefWithBodyId) -> Arc<InferenceResult>;

View file

@ -9,8 +9,8 @@ use crate::{
use hir_def::{generics::TypeParamProvenance, AdtId, AssocContainerId, Lookup}; use hir_def::{generics::TypeParamProvenance, AdtId, AssocContainerId, Lookup};
use hir_expand::name::Name; use hir_expand::name::Name;
pub struct HirFormatter<'a, 'b, DB> { pub struct HirFormatter<'a, 'b> {
pub db: &'a DB, pub db: &'a dyn HirDatabase,
fmt: &'a mut fmt::Formatter<'b>, fmt: &'a mut fmt::Formatter<'b>,
buf: String, buf: String,
curr_size: usize, curr_size: usize,
@ -19,20 +19,20 @@ pub struct HirFormatter<'a, 'b, DB> {
} }
pub trait HirDisplay { pub trait HirDisplay {
fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result; fn hir_fmt(&self, f: &mut HirFormatter) -> fmt::Result;
fn display<'a, DB>(&'a self, db: &'a DB) -> HirDisplayWrapper<'a, DB, Self> fn display<'a>(&'a self, db: &'a dyn HirDatabase) -> HirDisplayWrapper<'a, Self>
where where
Self: Sized, Self: Sized,
{ {
HirDisplayWrapper(db, self, None, false) HirDisplayWrapper(db, self, None, false)
} }
fn display_truncated<'a, DB>( fn display_truncated<'a>(
&'a self, &'a self,
db: &'a DB, db: &'a dyn HirDatabase,
max_size: Option<usize>, max_size: Option<usize>,
) -> HirDisplayWrapper<'a, DB, Self> ) -> HirDisplayWrapper<'a, Self>
where where
Self: Sized, Self: Sized,
{ {
@ -40,10 +40,7 @@ pub trait HirDisplay {
} }
} }
impl<'a, 'b, DB> HirFormatter<'a, 'b, DB> impl<'a, 'b> HirFormatter<'a, 'b> {
where
DB: HirDatabase,
{
pub fn write_joined<T: HirDisplay>( pub fn write_joined<T: HirDisplay>(
&mut self, &mut self,
iter: impl IntoIterator<Item = T>, iter: impl IntoIterator<Item = T>,
@ -84,11 +81,10 @@ where
} }
} }
pub struct HirDisplayWrapper<'a, DB, T>(&'a DB, &'a T, Option<usize>, bool); pub struct HirDisplayWrapper<'a, T>(&'a dyn HirDatabase, &'a T, Option<usize>, bool);
impl<'a, DB, T> fmt::Display for HirDisplayWrapper<'a, DB, T> impl<'a, T> fmt::Display for HirDisplayWrapper<'a, T>
where where
DB: HirDatabase,
T: HirDisplay, T: HirDisplay,
{ {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
@ -106,13 +102,13 @@ where
const TYPE_HINT_TRUNCATION: &str = ""; const TYPE_HINT_TRUNCATION: &str = "";
impl HirDisplay for &Ty { impl HirDisplay for &Ty {
fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result { fn hir_fmt(&self, f: &mut HirFormatter) -> fmt::Result {
HirDisplay::hir_fmt(*self, f) HirDisplay::hir_fmt(*self, f)
} }
} }
impl HirDisplay for ApplicationTy { impl HirDisplay for ApplicationTy {
fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result { fn hir_fmt(&self, f: &mut HirFormatter) -> fmt::Result {
if f.should_truncate() { if f.should_truncate() {
return write!(f, "{}", TYPE_HINT_TRUNCATION); return write!(f, "{}", TYPE_HINT_TRUNCATION);
} }
@ -178,7 +174,7 @@ impl HirDisplay for ApplicationTy {
} }
} }
if self.parameters.len() > 0 { if self.parameters.len() > 0 {
let generics = generics(f.db, def.into()); let generics = generics(f.db.upcast(), def.into());
let (parent_params, self_param, type_params, _impl_trait_params) = let (parent_params, self_param, type_params, _impl_trait_params) =
generics.provenance_split(); generics.provenance_split();
let total_len = parent_params + self_param + type_params; let total_len = parent_params + self_param + type_params;
@ -238,7 +234,7 @@ impl HirDisplay for ApplicationTy {
} }
} }
TypeCtor::AssociatedType(type_alias) => { TypeCtor::AssociatedType(type_alias) => {
let trait_ = match type_alias.lookup(f.db).container { let trait_ = match type_alias.lookup(f.db.upcast()).container {
AssocContainerId::TraitId(it) => it, AssocContainerId::TraitId(it) => it,
_ => panic!("not an associated type"), _ => panic!("not an associated type"),
}; };
@ -272,7 +268,7 @@ impl HirDisplay for ApplicationTy {
} }
impl HirDisplay for ProjectionTy { impl HirDisplay for ProjectionTy {
fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result { fn hir_fmt(&self, f: &mut HirFormatter) -> fmt::Result {
if f.should_truncate() { if f.should_truncate() {
return write!(f, "{}", TYPE_HINT_TRUNCATION); return write!(f, "{}", TYPE_HINT_TRUNCATION);
} }
@ -290,7 +286,7 @@ impl HirDisplay for ProjectionTy {
} }
impl HirDisplay for Ty { impl HirDisplay for Ty {
fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result { fn hir_fmt(&self, f: &mut HirFormatter) -> fmt::Result {
if f.should_truncate() { if f.should_truncate() {
return write!(f, "{}", TYPE_HINT_TRUNCATION); return write!(f, "{}", TYPE_HINT_TRUNCATION);
} }
@ -299,7 +295,7 @@ impl HirDisplay for Ty {
Ty::Apply(a_ty) => a_ty.hir_fmt(f)?, Ty::Apply(a_ty) => a_ty.hir_fmt(f)?,
Ty::Projection(p_ty) => p_ty.hir_fmt(f)?, Ty::Projection(p_ty) => p_ty.hir_fmt(f)?,
Ty::Placeholder(id) => { Ty::Placeholder(id) => {
let generics = generics(f.db, id.parent); let generics = generics(f.db.upcast(), id.parent);
let param_data = &generics.params.types[id.local_id]; let param_data = &generics.params.types[id.local_id];
match param_data.provenance { match param_data.provenance {
TypeParamProvenance::TypeParamList | TypeParamProvenance::TraitSelf => { TypeParamProvenance::TypeParamList | TypeParamProvenance::TraitSelf => {
@ -334,7 +330,7 @@ impl HirDisplay for Ty {
fn write_bounds_like_dyn_trait( fn write_bounds_like_dyn_trait(
predicates: &[GenericPredicate], predicates: &[GenericPredicate],
f: &mut HirFormatter<impl HirDatabase>, f: &mut HirFormatter,
) -> fmt::Result { ) -> fmt::Result {
// Note: This code is written to produce nice results (i.e. // Note: This code is written to produce nice results (i.e.
// corresponding to surface Rust) for types that can occur in // corresponding to surface Rust) for types that can occur in
@ -398,7 +394,7 @@ fn write_bounds_like_dyn_trait(
} }
impl TraitRef { impl TraitRef {
fn hir_fmt_ext(&self, f: &mut HirFormatter<impl HirDatabase>, use_as: bool) -> fmt::Result { fn hir_fmt_ext(&self, f: &mut HirFormatter, use_as: bool) -> fmt::Result {
if f.should_truncate() { if f.should_truncate() {
return write!(f, "{}", TYPE_HINT_TRUNCATION); return write!(f, "{}", TYPE_HINT_TRUNCATION);
} }
@ -420,19 +416,19 @@ impl TraitRef {
} }
impl HirDisplay for TraitRef { impl HirDisplay for TraitRef {
fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result { fn hir_fmt(&self, f: &mut HirFormatter) -> fmt::Result {
self.hir_fmt_ext(f, false) self.hir_fmt_ext(f, false)
} }
} }
impl HirDisplay for &GenericPredicate { impl HirDisplay for &GenericPredicate {
fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result { fn hir_fmt(&self, f: &mut HirFormatter) -> fmt::Result {
HirDisplay::hir_fmt(*self, f) HirDisplay::hir_fmt(*self, f)
} }
} }
impl HirDisplay for GenericPredicate { impl HirDisplay for GenericPredicate {
fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result { fn hir_fmt(&self, f: &mut HirFormatter) -> fmt::Result {
if f.should_truncate() { if f.should_truncate() {
return write!(f, "{}", TYPE_HINT_TRUNCATION); return write!(f, "{}", TYPE_HINT_TRUNCATION);
} }
@ -456,7 +452,7 @@ impl HirDisplay for GenericPredicate {
} }
impl HirDisplay for Obligation { impl HirDisplay for Obligation {
fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result { fn hir_fmt(&self, f: &mut HirFormatter) -> fmt::Result {
match self { match self {
Obligation::Trait(tr) => write!(f, "Implements({})", tr.display(f.db)), Obligation::Trait(tr) => write!(f, "Implements({})", tr.display(f.db)),
Obligation::Projection(proj) => write!( Obligation::Projection(proj) => write!(

View file

@ -46,7 +46,7 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
ExprValidator { func, infer, sink } ExprValidator { func, infer, sink }
} }
pub fn validate_body(&mut self, db: &impl HirDatabase) { pub fn validate_body(&mut self, db: &dyn HirDatabase) {
let body = db.body(self.func.into()); let body = db.body(self.func.into());
for e in body.exprs.iter() { for e in body.exprs.iter() {
@ -67,7 +67,7 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
_path: &Option<Path>, _path: &Option<Path>,
fields: &[RecordLitField], fields: &[RecordLitField],
spread: Option<ExprId>, spread: Option<ExprId>,
db: &impl HirDatabase, db: &dyn HirDatabase,
) { ) {
if spread.is_some() { if spread.is_some() {
return; return;
@ -80,7 +80,7 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
return; return;
} }
let variant_data = variant_data(db, variant_def); let variant_data = variant_data(db.upcast(), variant_def);
let lit_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect(); let lit_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect();
let missed_fields: Vec<Name> = variant_data let missed_fields: Vec<Name> = variant_data
@ -102,7 +102,7 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
if let Ok(source_ptr) = source_map.expr_syntax(id) { if let Ok(source_ptr) = source_map.expr_syntax(id) {
if let Some(expr) = source_ptr.value.left() { if let Some(expr) = source_ptr.value.left() {
let root = source_ptr.file_syntax(db); let root = source_ptr.file_syntax(db.upcast());
if let ast::Expr::RecordLit(record_lit) = expr.to_node(&root) { if let ast::Expr::RecordLit(record_lit) = expr.to_node(&root) {
if let Some(field_list) = record_lit.record_field_list() { if let Some(field_list) = record_lit.record_field_list() {
self.sink.push(MissingFields { self.sink.push(MissingFields {
@ -116,12 +116,7 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
} }
} }
fn validate_results_in_tail_expr( fn validate_results_in_tail_expr(&mut self, body_id: ExprId, id: ExprId, db: &dyn HirDatabase) {
&mut self,
body_id: ExprId,
id: ExprId,
db: &impl HirDatabase,
) {
// the mismatch will be on the whole block currently // the mismatch will be on the whole block currently
let mismatch = match self.infer.type_mismatch_for_expr(body_id) { let mismatch = match self.infer.type_mismatch_for_expr(body_id) {
Some(m) => m, Some(m) => m,
@ -130,8 +125,8 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
let std_result_path = path![std::result::Result]; let std_result_path = path![std::result::Result];
let resolver = self.func.resolver(db); let resolver = self.func.resolver(db.upcast());
let std_result_enum = match resolver.resolve_known_enum(db, &std_result_path) { let std_result_enum = match resolver.resolve_known_enum(db.upcast(), &std_result_path) {
Some(it) => it, Some(it) => it,
_ => return, _ => return,
}; };

View file

@ -63,9 +63,9 @@ mod pat;
mod coerce; mod coerce;
/// The entry point of type inference. /// The entry point of type inference.
pub(crate) fn infer_query(db: &impl HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult> { pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult> {
let _p = profile("infer_query"); let _p = profile("infer_query");
let resolver = def.resolver(db); let resolver = def.resolver(db.upcast());
let mut ctx = InferenceContext::new(db, def, resolver); let mut ctx = InferenceContext::new(db, def, resolver);
match def { match def {
@ -164,7 +164,7 @@ impl InferenceResult {
} }
pub fn add_diagnostics( pub fn add_diagnostics(
&self, &self,
db: &impl HirDatabase, db: &dyn HirDatabase,
owner: FunctionId, owner: FunctionId,
sink: &mut DiagnosticSink, sink: &mut DiagnosticSink,
) { ) {
@ -190,8 +190,8 @@ impl Index<PatId> for InferenceResult {
/// The inference context contains all information needed during type inference. /// The inference context contains all information needed during type inference.
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
struct InferenceContext<'a, D: HirDatabase> { struct InferenceContext<'a> {
db: &'a D, db: &'a dyn HirDatabase,
owner: DefWithBodyId, owner: DefWithBodyId,
body: Arc<Body>, body: Arc<Body>,
resolver: Resolver, resolver: Resolver,
@ -208,8 +208,8 @@ struct InferenceContext<'a, D: HirDatabase> {
return_ty: Ty, return_ty: Ty,
} }
impl<'a, D: HirDatabase> InferenceContext<'a, D> { impl<'a> InferenceContext<'a> {
fn new(db: &'a D, owner: DefWithBodyId, resolver: Resolver) -> Self { fn new(db: &'a dyn HirDatabase, owner: DefWithBodyId, resolver: Resolver) -> Self {
InferenceContext { InferenceContext {
result: InferenceResult::default(), result: InferenceResult::default(),
table: unify::InferenceTable::new(), table: unify::InferenceTable::new(),
@ -425,7 +425,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver); let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver);
// FIXME: this should resolve assoc items as well, see this example: // FIXME: this should resolve assoc items as well, see this example:
// https://play.rust-lang.org/?gist=087992e9e22495446c01c0d4e2d69521 // https://play.rust-lang.org/?gist=087992e9e22495446c01c0d4e2d69521
return match resolver.resolve_path_in_type_ns_fully(self.db, path.mod_path()) { return match resolver.resolve_path_in_type_ns_fully(self.db.upcast(), path.mod_path()) {
Some(TypeNs::AdtId(AdtId::StructId(strukt))) => { Some(TypeNs::AdtId(AdtId::StructId(strukt))) => {
let substs = Ty::substs_from_path(&ctx, path, strukt.into()); let substs = Ty::substs_from_path(&ctx, path, strukt.into());
let ty = self.db.ty(strukt.into()); let ty = self.db.ty(strukt.into());
@ -439,7 +439,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
(ty, Some(var.into())) (ty, Some(var.into()))
} }
Some(TypeNs::SelfType(impl_id)) => { Some(TypeNs::SelfType(impl_id)) => {
let generics = crate::utils::generics(self.db, impl_id.into()); let generics = crate::utils::generics(self.db.upcast(), impl_id.into());
let substs = Substs::type_params_for_generics(&generics); let substs = Substs::type_params_for_generics(&generics);
let ty = self.db.impl_self_ty(impl_id).subst(&substs); let ty = self.db.impl_self_ty(impl_id).subst(&substs);
let variant = ty_variant(&ty); let variant = ty_variant(&ty);
@ -500,13 +500,13 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
fn resolve_into_iter_item(&self) -> Option<TypeAliasId> { fn resolve_into_iter_item(&self) -> Option<TypeAliasId> {
let path = path![std::iter::IntoIterator]; let path = path![std::iter::IntoIterator];
let trait_ = self.resolver.resolve_known_trait(self.db, &path)?; let trait_ = self.resolver.resolve_known_trait(self.db.upcast(), &path)?;
self.db.trait_data(trait_).associated_type_by_name(&name![Item]) self.db.trait_data(trait_).associated_type_by_name(&name![Item])
} }
fn resolve_ops_try_ok(&self) -> Option<TypeAliasId> { fn resolve_ops_try_ok(&self) -> Option<TypeAliasId> {
let path = path![std::ops::Try]; let path = path![std::ops::Try];
let trait_ = self.resolver.resolve_known_trait(self.db, &path)?; let trait_ = self.resolver.resolve_known_trait(self.db.upcast(), &path)?;
self.db.trait_data(trait_).associated_type_by_name(&name![Ok]) self.db.trait_data(trait_).associated_type_by_name(&name![Ok])
} }
@ -532,37 +532,37 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
fn resolve_range_full(&self) -> Option<AdtId> { fn resolve_range_full(&self) -> Option<AdtId> {
let path = path![std::ops::RangeFull]; let path = path![std::ops::RangeFull];
let struct_ = self.resolver.resolve_known_struct(self.db, &path)?; let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?;
Some(struct_.into()) Some(struct_.into())
} }
fn resolve_range(&self) -> Option<AdtId> { fn resolve_range(&self) -> Option<AdtId> {
let path = path![std::ops::Range]; let path = path![std::ops::Range];
let struct_ = self.resolver.resolve_known_struct(self.db, &path)?; let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?;
Some(struct_.into()) Some(struct_.into())
} }
fn resolve_range_inclusive(&self) -> Option<AdtId> { fn resolve_range_inclusive(&self) -> Option<AdtId> {
let path = path![std::ops::RangeInclusive]; let path = path![std::ops::RangeInclusive];
let struct_ = self.resolver.resolve_known_struct(self.db, &path)?; let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?;
Some(struct_.into()) Some(struct_.into())
} }
fn resolve_range_from(&self) -> Option<AdtId> { fn resolve_range_from(&self) -> Option<AdtId> {
let path = path![std::ops::RangeFrom]; let path = path![std::ops::RangeFrom];
let struct_ = self.resolver.resolve_known_struct(self.db, &path)?; let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?;
Some(struct_.into()) Some(struct_.into())
} }
fn resolve_range_to(&self) -> Option<AdtId> { fn resolve_range_to(&self) -> Option<AdtId> {
let path = path![std::ops::RangeTo]; let path = path![std::ops::RangeTo];
let struct_ = self.resolver.resolve_known_struct(self.db, &path)?; let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?;
Some(struct_.into()) Some(struct_.into())
} }
fn resolve_range_to_inclusive(&self) -> Option<AdtId> { fn resolve_range_to_inclusive(&self) -> Option<AdtId> {
let path = path![std::ops::RangeToInclusive]; let path = path![std::ops::RangeToInclusive];
let struct_ = self.resolver.resolve_known_struct(self.db, &path)?; let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?;
Some(struct_.into()) Some(struct_.into())
} }
@ -676,13 +676,13 @@ mod diagnostics {
impl InferenceDiagnostic { impl InferenceDiagnostic {
pub(super) fn add_to( pub(super) fn add_to(
&self, &self,
db: &impl HirDatabase, db: &dyn HirDatabase,
owner: FunctionId, owner: FunctionId,
sink: &mut DiagnosticSink, sink: &mut DiagnosticSink,
) { ) {
match self { match self {
InferenceDiagnostic::NoSuchField { expr, field } => { InferenceDiagnostic::NoSuchField { expr, field } => {
let file = owner.lookup(db).source(db).file_id; let file = owner.lookup(db.upcast()).source(db.upcast()).file_id;
let (_, source_map) = db.body_with_source_map(owner.into()); let (_, source_map) = db.body_with_source_map(owner.into());
let field = source_map.field_syntax(*expr, *field); let field = source_map.field_syntax(*expr, *field);
sink.push(NoSuchField { file, field }) sink.push(NoSuchField { file, field })

View file

@ -7,13 +7,11 @@
use hir_def::{lang_item::LangItemTarget, type_ref::Mutability}; use hir_def::{lang_item::LangItemTarget, type_ref::Mutability};
use test_utils::tested_by; use test_utils::tested_by;
use crate::{ use crate::{autoderef, traits::Solution, Obligation, Substs, TraitRef, Ty, TypeCtor};
autoderef, db::HirDatabase, traits::Solution, Obligation, Substs, TraitRef, Ty, TypeCtor,
};
use super::{unify::TypeVarValue, InEnvironment, InferTy, InferenceContext}; use super::{unify::TypeVarValue, InEnvironment, InferTy, InferenceContext};
impl<'a, D: HirDatabase> InferenceContext<'a, D> { impl<'a> InferenceContext<'a> {
/// Unify two types, but may coerce the first one to the second one /// Unify two types, but may coerce the first one to the second one
/// using "implicit coercion rules" if needed. /// using "implicit coercion rules" if needed.
pub(super) fn coerce(&mut self, from_ty: &Ty, to_ty: &Ty) -> bool { pub(super) fn coerce(&mut self, from_ty: &Ty, to_ty: &Ty) -> bool {
@ -126,7 +124,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
_ => return None, _ => return None,
}; };
let generic_params = crate::utils::generics(self.db, coerce_unsized_trait.into()); let generic_params = crate::utils::generics(self.db.upcast(), coerce_unsized_trait.into());
if generic_params.len() != 2 { if generic_params.len() != 2 {
// The CoerceUnsized trait should have two generic params: Self and T. // The CoerceUnsized trait should have two generic params: Self and T.
return None; return None;

View file

@ -14,9 +14,7 @@ use hir_expand::name::Name;
use ra_syntax::ast::RangeOp; use ra_syntax::ast::RangeOp;
use crate::{ use crate::{
autoderef, autoderef, method_resolution, op,
db::HirDatabase,
method_resolution, op,
traits::InEnvironment, traits::InEnvironment,
utils::{generics, variant_data, Generics}, utils::{generics, variant_data, Generics},
ApplicationTy, Binders, CallableDef, InferTy, IntTy, Mutability, Obligation, Substs, TraitRef, ApplicationTy, Binders, CallableDef, InferTy, IntTy, Mutability, Obligation, Substs, TraitRef,
@ -25,7 +23,7 @@ use crate::{
use super::{BindingMode, Expectation, InferenceContext, InferenceDiagnostic, TypeMismatch}; use super::{BindingMode, Expectation, InferenceContext, InferenceDiagnostic, TypeMismatch};
impl<'a, D: HirDatabase> InferenceContext<'a, D> { impl<'a> InferenceContext<'a> {
pub(super) fn infer_expr(&mut self, tgt_expr: ExprId, expected: &Expectation) -> Ty { pub(super) fn infer_expr(&mut self, tgt_expr: ExprId, expected: &Expectation) -> Ty {
let ty = self.infer_expr_inner(tgt_expr, expected); let ty = self.infer_expr_inner(tgt_expr, expected);
let could_unify = self.unify(&ty, &expected.ty); let could_unify = self.unify(&ty, &expected.ty);
@ -184,7 +182,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
} }
Expr::Path(p) => { Expr::Path(p) => {
// FIXME this could be more efficient... // FIXME this could be more efficient...
let resolver = resolver_for_expr(self.db, self.owner, tgt_expr); let resolver = resolver_for_expr(self.db.upcast(), self.owner, tgt_expr);
self.infer_path(&resolver, p, tgt_expr.into()).unwrap_or(Ty::Unknown) self.infer_path(&resolver, p, tgt_expr.into()).unwrap_or(Ty::Unknown)
} }
Expr::Continue => Ty::simple(TypeCtor::Never), Expr::Continue => Ty::simple(TypeCtor::Never),
@ -214,7 +212,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
let substs = ty.substs().unwrap_or_else(Substs::empty); let substs = ty.substs().unwrap_or_else(Substs::empty);
let field_types = def_id.map(|it| self.db.field_types(it)).unwrap_or_default(); let field_types = def_id.map(|it| self.db.field_types(it)).unwrap_or_default();
let variant_data = def_id.map(|it| variant_data(self.db, it)); let variant_data = def_id.map(|it| variant_data(self.db.upcast(), it));
for (field_idx, field) in fields.iter().enumerate() { for (field_idx, field) in fields.iter().enumerate() {
let field_def = let field_def =
variant_data.as_ref().and_then(|it| match it.field(&field.name) { variant_data.as_ref().and_then(|it| match it.field(&field.name) {
@ -579,7 +577,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
let receiver_ty = self.infer_expr(receiver, &Expectation::none()); let receiver_ty = self.infer_expr(receiver, &Expectation::none());
let canonicalized_receiver = self.canonicalizer().canonicalize_ty(receiver_ty.clone()); let canonicalized_receiver = self.canonicalizer().canonicalize_ty(receiver_ty.clone());
let traits_in_scope = self.resolver.traits_in_scope(self.db); let traits_in_scope = self.resolver.traits_in_scope(self.db.upcast());
let resolved = self.resolver.krate().and_then(|krate| { let resolved = self.resolver.krate().and_then(|krate| {
method_resolution::lookup_method( method_resolution::lookup_method(
@ -595,7 +593,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
Some((ty, func)) => { Some((ty, func)) => {
let ty = canonicalized_receiver.decanonicalize_ty(ty); let ty = canonicalized_receiver.decanonicalize_ty(ty);
self.write_method_resolution(tgt_expr, func); self.write_method_resolution(tgt_expr, func);
(ty, self.db.value_ty(func.into()), Some(generics(self.db, func.into()))) (ty, self.db.value_ty(func.into()), Some(generics(self.db.upcast(), func.into())))
} }
None => (receiver_ty, Binders::new(0, Ty::Unknown), None), None => (receiver_ty, Binders::new(0, Ty::Unknown), None),
}; };
@ -703,10 +701,13 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
// add obligation for trait implementation, if this is a trait method // add obligation for trait implementation, if this is a trait method
match def { match def {
CallableDef::FunctionId(f) => { CallableDef::FunctionId(f) => {
if let AssocContainerId::TraitId(trait_) = f.lookup(self.db).container { if let AssocContainerId::TraitId(trait_) =
f.lookup(self.db.upcast()).container
{
// construct a TraitDef // construct a TraitDef
let substs = let substs = a_ty
a_ty.parameters.prefix(generics(self.db, trait_.into()).len()); .parameters
.prefix(generics(self.db.upcast(), trait_.into()).len());
self.obligations.push(Obligation::Trait(TraitRef { trait_, substs })); self.obligations.push(Obligation::Trait(TraitRef { trait_, substs }));
} }
} }

View file

@ -12,9 +12,9 @@ use hir_expand::name::Name;
use test_utils::tested_by; use test_utils::tested_by;
use super::{BindingMode, InferenceContext}; use super::{BindingMode, InferenceContext};
use crate::{db::HirDatabase, utils::variant_data, Substs, Ty, TypeCtor}; use crate::{utils::variant_data, Substs, Ty, TypeCtor};
impl<'a, D: HirDatabase> InferenceContext<'a, D> { impl<'a> InferenceContext<'a> {
fn infer_tuple_struct_pat( fn infer_tuple_struct_pat(
&mut self, &mut self,
path: Option<&Path>, path: Option<&Path>,
@ -23,7 +23,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
default_bm: BindingMode, default_bm: BindingMode,
) -> Ty { ) -> Ty {
let (ty, def) = self.resolve_variant(path); let (ty, def) = self.resolve_variant(path);
let var_data = def.map(|it| variant_data(self.db, it)); let var_data = def.map(|it| variant_data(self.db.upcast(), it));
self.unify(&ty, expected); self.unify(&ty, expected);
let substs = ty.substs().unwrap_or_else(Substs::empty); let substs = ty.substs().unwrap_or_else(Substs::empty);
@ -51,7 +51,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
id: PatId, id: PatId,
) -> Ty { ) -> Ty {
let (ty, def) = self.resolve_variant(path); let (ty, def) = self.resolve_variant(path);
let var_data = def.map(|it| variant_data(self.db, it)); let var_data = def.map(|it| variant_data(self.db.upcast(), it));
if let Some(variant) = def { if let Some(variant) = def {
self.write_variant_resolution(id.into(), variant); self.write_variant_resolution(id.into(), variant);
} }

View file

@ -9,11 +9,11 @@ use hir_def::{
}; };
use hir_expand::name::Name; use hir_expand::name::Name;
use crate::{db::HirDatabase, method_resolution, Substs, Ty, ValueTyDefId}; use crate::{method_resolution, Substs, Ty, ValueTyDefId};
use super::{ExprOrPatId, InferenceContext, TraitRef}; use super::{ExprOrPatId, InferenceContext, TraitRef};
impl<'a, D: HirDatabase> InferenceContext<'a, D> { impl<'a> InferenceContext<'a> {
pub(super) fn infer_path( pub(super) fn infer_path(
&mut self, &mut self,
resolver: &Resolver, resolver: &Resolver,
@ -47,7 +47,8 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
id, id,
)? )?
} else { } else {
let value_or_partial = resolver.resolve_path_in_value_ns(self.db, path.mod_path())?; let value_or_partial =
resolver.resolve_path_in_value_ns(self.db.upcast(), path.mod_path())?;
match value_or_partial { match value_or_partial {
ResolveValueResult::ValueNs(it) => (it, None), ResolveValueResult::ValueNs(it) => (it, None),
@ -192,7 +193,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
let canonical_ty = self.canonicalizer().canonicalize_ty(ty.clone()); let canonical_ty = self.canonicalizer().canonicalize_ty(ty.clone());
let krate = self.resolver.krate()?; let krate = self.resolver.krate()?;
let traits_in_scope = self.resolver.traits_in_scope(self.db); let traits_in_scope = self.resolver.traits_in_scope(self.db.upcast());
method_resolution::iterate_method_candidates( method_resolution::iterate_method_candidates(
&canonical_ty.value, &canonical_ty.value,
@ -205,9 +206,11 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
move |_ty, item| { move |_ty, item| {
let (def, container) = match item { let (def, container) = match item {
AssocItemId::FunctionId(f) => { AssocItemId::FunctionId(f) => {
(ValueNs::FunctionId(f), f.lookup(self.db).container) (ValueNs::FunctionId(f), f.lookup(self.db.upcast()).container)
}
AssocItemId::ConstId(c) => {
(ValueNs::ConstId(c), c.lookup(self.db.upcast()).container)
} }
AssocItemId::ConstId(c) => (ValueNs::ConstId(c), c.lookup(self.db).container),
AssocItemId::TypeAliasId(_) => unreachable!(), AssocItemId::TypeAliasId(_) => unreachable!(),
}; };
let substs = match container { let substs = match container {

View file

@ -7,10 +7,10 @@ use ena::unify::{InPlaceUnificationTable, NoError, UnifyKey, UnifyValue};
use test_utils::tested_by; use test_utils::tested_by;
use super::{InferenceContext, Obligation}; use super::{InferenceContext, Obligation};
use crate::{db::HirDatabase, Canonical, InEnvironment, InferTy, Substs, Ty, TypeCtor, TypeWalk}; use crate::{Canonical, InEnvironment, InferTy, Substs, Ty, TypeCtor, TypeWalk};
impl<'a, D: HirDatabase> InferenceContext<'a, D> { impl<'a> InferenceContext<'a> {
pub(super) fn canonicalizer<'b>(&'b mut self) -> Canonicalizer<'a, 'b, D> pub(super) fn canonicalizer<'b>(&'b mut self) -> Canonicalizer<'a, 'b>
where where
'a: 'b, 'a: 'b,
{ {
@ -18,11 +18,11 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
} }
} }
pub(super) struct Canonicalizer<'a, 'b, D: HirDatabase> pub(super) struct Canonicalizer<'a, 'b>
where where
'a: 'b, 'a: 'b,
{ {
ctx: &'b mut InferenceContext<'a, D>, ctx: &'b mut InferenceContext<'a>,
free_vars: Vec<InferTy>, free_vars: Vec<InferTy>,
/// A stack of type variables that is used to detect recursive types (which /// A stack of type variables that is used to detect recursive types (which
/// are an error, but we need to protect against them to avoid stack /// are an error, but we need to protect against them to avoid stack
@ -35,7 +35,7 @@ pub(super) struct Canonicalized<T> {
free_vars: Vec<InferTy>, free_vars: Vec<InferTy>,
} }
impl<'a, 'b, D: HirDatabase> Canonicalizer<'a, 'b, D> impl<'a, 'b> Canonicalizer<'a, 'b>
where where
'a: 'b, 'a: 'b,
{ {
@ -123,11 +123,7 @@ impl<T> Canonicalized<T> {
ty ty
} }
pub fn apply_solution( pub fn apply_solution(&self, ctx: &mut InferenceContext<'_>, solution: Canonical<Vec<Ty>>) {
&self,
ctx: &mut InferenceContext<'_, impl HirDatabase>,
solution: Canonical<Vec<Ty>>,
) {
// the solution may contain new variables, which we need to convert to new inference vars // the solution may contain new variables, which we need to convert to new inference vars
let new_vars = Substs((0..solution.num_vars).map(|_| ctx.table.new_type_var()).collect()); let new_vars = Substs((0..solution.num_vars).map(|_| ctx.table.new_type_var()).collect());
for (i, ty) in solution.value.into_iter().enumerate() { for (i, ty) in solution.value.into_iter().enumerate() {

View file

@ -152,7 +152,7 @@ pub struct TypeCtorId(salsa::InternId);
impl_intern_key!(TypeCtorId); impl_intern_key!(TypeCtorId);
impl TypeCtor { impl TypeCtor {
pub fn num_ty_params(self, db: &impl HirDatabase) -> usize { pub fn num_ty_params(self, db: &dyn HirDatabase) -> usize {
match self { match self {
TypeCtor::Bool TypeCtor::Bool
| TypeCtor::Char | TypeCtor::Char
@ -167,15 +167,15 @@ impl TypeCtor {
| TypeCtor::Closure { .. } // 1 param representing the signature of the closure | TypeCtor::Closure { .. } // 1 param representing the signature of the closure
=> 1, => 1,
TypeCtor::Adt(adt) => { TypeCtor::Adt(adt) => {
let generic_params = generics(db, adt.into()); let generic_params = generics(db.upcast(), adt.into());
generic_params.len() generic_params.len()
} }
TypeCtor::FnDef(callable) => { TypeCtor::FnDef(callable) => {
let generic_params = generics(db, callable.into()); let generic_params = generics(db.upcast(), callable.into());
generic_params.len() generic_params.len()
} }
TypeCtor::AssociatedType(type_alias) => { TypeCtor::AssociatedType(type_alias) => {
let generic_params = generics(db, type_alias.into()); let generic_params = generics(db.upcast(), type_alias.into());
generic_params.len() generic_params.len()
} }
TypeCtor::FnPtr { num_args } => num_args as usize + 1, TypeCtor::FnPtr { num_args } => num_args as usize + 1,
@ -183,7 +183,7 @@ impl TypeCtor {
} }
} }
pub fn krate(self, db: &impl HirDatabase) -> Option<CrateId> { pub fn krate(self, db: &dyn HirDatabase) -> Option<CrateId> {
match self { match self {
TypeCtor::Bool TypeCtor::Bool
| TypeCtor::Char | TypeCtor::Char
@ -199,9 +199,11 @@ impl TypeCtor {
| TypeCtor::Tuple { .. } => None, | TypeCtor::Tuple { .. } => None,
// Closure's krate is irrelevant for coherence I would think? // Closure's krate is irrelevant for coherence I would think?
TypeCtor::Closure { .. } => None, TypeCtor::Closure { .. } => None,
TypeCtor::Adt(adt) => Some(adt.module(db).krate), TypeCtor::Adt(adt) => Some(adt.module(db.upcast()).krate),
TypeCtor::FnDef(callable) => Some(callable.krate(db)), TypeCtor::FnDef(callable) => Some(callable.krate(db)),
TypeCtor::AssociatedType(type_alias) => Some(type_alias.lookup(db).module(db).krate), TypeCtor::AssociatedType(type_alias) => {
Some(type_alias.lookup(db.upcast()).module(db.upcast()).krate)
}
} }
} }
@ -246,12 +248,12 @@ pub struct ProjectionTy {
} }
impl ProjectionTy { impl ProjectionTy {
pub fn trait_ref(&self, db: &impl HirDatabase) -> TraitRef { pub fn trait_ref(&self, db: &dyn HirDatabase) -> TraitRef {
TraitRef { trait_: self.trait_(db), substs: self.parameters.clone() } TraitRef { trait_: self.trait_(db), substs: self.parameters.clone() }
} }
fn trait_(&self, db: &impl HirDatabase) -> TraitId { fn trait_(&self, db: &dyn HirDatabase) -> TraitId {
match self.associated_ty.lookup(db).container { match self.associated_ty.lookup(db.upcast()).container {
AssocContainerId::TraitId(it) => it, AssocContainerId::TraitId(it) => it,
_ => panic!("projection ty without parent trait"), _ => panic!("projection ty without parent trait"),
} }
@ -372,8 +374,8 @@ impl Substs {
} }
/// Return Substs that replace each parameter by itself (i.e. `Ty::Param`). /// Return Substs that replace each parameter by itself (i.e. `Ty::Param`).
pub fn type_params(db: &impl HirDatabase, def: impl Into<GenericDefId>) -> Substs { pub fn type_params(db: &dyn HirDatabase, def: impl Into<GenericDefId>) -> Substs {
let params = generics(db, def.into()); let params = generics(db.upcast(), def.into());
Substs::type_params_for_generics(&params) Substs::type_params_for_generics(&params)
} }
@ -382,9 +384,9 @@ impl Substs {
Substs(generic_params.iter().enumerate().map(|(idx, _)| Ty::Bound(idx as u32)).collect()) Substs(generic_params.iter().enumerate().map(|(idx, _)| Ty::Bound(idx as u32)).collect())
} }
pub fn build_for_def(db: &impl HirDatabase, def: impl Into<GenericDefId>) -> SubstsBuilder { pub fn build_for_def(db: &dyn HirDatabase, def: impl Into<GenericDefId>) -> SubstsBuilder {
let def = def.into(); let def = def.into();
let params = generics(db, def); let params = generics(db.upcast(), def);
let param_count = params.len(); let param_count = params.len();
Substs::builder(param_count) Substs::builder(param_count)
} }
@ -393,7 +395,7 @@ impl Substs {
Substs::builder(generic_params.len()) Substs::builder(generic_params.len())
} }
pub fn build_for_type_ctor(db: &impl HirDatabase, type_ctor: TypeCtor) -> SubstsBuilder { pub fn build_for_type_ctor(db: &dyn HirDatabase, type_ctor: TypeCtor) -> SubstsBuilder {
Substs::builder(type_ctor.num_ty_params(db)) Substs::builder(type_ctor.num_ty_params(db))
} }
@ -538,7 +540,7 @@ impl GenericPredicate {
} }
} }
pub fn trait_ref(&self, db: &impl HirDatabase) -> Option<TraitRef> { pub fn trait_ref(&self, db: &dyn HirDatabase) -> Option<TraitRef> {
match self { match self {
GenericPredicate::Implemented(tr) => Some(tr.clone()), GenericPredicate::Implemented(tr) => Some(tr.clone()),
GenericPredicate::Projection(proj) => Some(proj.projection_ty.trait_ref(db)), GenericPredicate::Projection(proj) => Some(proj.projection_ty.trait_ref(db)),
@ -693,7 +695,7 @@ impl Ty {
} }
} }
fn callable_sig(&self, db: &impl HirDatabase) -> Option<FnSig> { fn callable_sig(&self, db: &dyn HirDatabase) -> Option<FnSig> {
match self { match self {
Ty::Apply(a_ty) => match a_ty.ctor { Ty::Apply(a_ty) => match a_ty.ctor {
TypeCtor::FnPtr { .. } => Some(FnSig::from_fn_ptr_substs(&a_ty.parameters)), TypeCtor::FnPtr { .. } => Some(FnSig::from_fn_ptr_substs(&a_ty.parameters)),

View file

@ -34,8 +34,8 @@ use crate::{
}; };
#[derive(Debug)] #[derive(Debug)]
pub struct TyLoweringContext<'a, DB: HirDatabase> { pub struct TyLoweringContext<'a> {
pub db: &'a DB, pub db: &'a dyn HirDatabase,
pub resolver: &'a Resolver, pub resolver: &'a Resolver,
/// Note: Conceptually, it's thinkable that we could be in a location where /// Note: Conceptually, it's thinkable that we could be in a location where
/// some type params should be represented as placeholders, and others /// some type params should be represented as placeholders, and others
@ -46,8 +46,8 @@ pub struct TyLoweringContext<'a, DB: HirDatabase> {
pub impl_trait_counter: std::cell::Cell<u16>, pub impl_trait_counter: std::cell::Cell<u16>,
} }
impl<'a, DB: HirDatabase> TyLoweringContext<'a, DB> { impl<'a> TyLoweringContext<'a> {
pub fn new(db: &'a DB, resolver: &'a Resolver) -> Self { pub fn new(db: &'a dyn HirDatabase, resolver: &'a Resolver) -> Self {
let impl_trait_counter = std::cell::Cell::new(0); let impl_trait_counter = std::cell::Cell::new(0);
let impl_trait_mode = ImplTraitLoweringMode::Disallowed; let impl_trait_mode = ImplTraitLoweringMode::Disallowed;
let type_param_mode = TypeParamLoweringMode::Placeholder; let type_param_mode = TypeParamLoweringMode::Placeholder;
@ -90,13 +90,10 @@ pub enum TypeParamLoweringMode {
} }
impl Ty { impl Ty {
pub fn from_hir(ctx: &TyLoweringContext<'_, impl HirDatabase>, type_ref: &TypeRef) -> Self { pub fn from_hir(ctx: &TyLoweringContext<'_>, type_ref: &TypeRef) -> Self {
Ty::from_hir_ext(ctx, type_ref).0 Ty::from_hir_ext(ctx, type_ref).0
} }
pub fn from_hir_ext( pub fn from_hir_ext(ctx: &TyLoweringContext<'_>, type_ref: &TypeRef) -> (Self, Option<TypeNs>) {
ctx: &TyLoweringContext<'_, impl HirDatabase>,
type_ref: &TypeRef,
) -> (Self, Option<TypeNs>) {
let mut res = None; let mut res = None;
let ty = match type_ref { let ty = match type_ref {
TypeRef::Never => Ty::simple(TypeCtor::Never), TypeRef::Never => Ty::simple(TypeCtor::Never),
@ -157,7 +154,7 @@ impl Ty {
let idx = ctx.impl_trait_counter.get(); let idx = ctx.impl_trait_counter.get();
ctx.impl_trait_counter.set(idx + 1); ctx.impl_trait_counter.set(idx + 1);
if let Some(def) = ctx.resolver.generic_def() { if let Some(def) = ctx.resolver.generic_def() {
let generics = generics(ctx.db, def); let generics = generics(ctx.db.upcast(), def);
let param = generics let param = generics
.iter() .iter()
.filter(|(_, data)| { .filter(|(_, data)| {
@ -175,7 +172,7 @@ impl Ty {
ctx.impl_trait_counter.set(idx + 1); ctx.impl_trait_counter.set(idx + 1);
let (parent_params, self_params, list_params, _impl_trait_params) = let (parent_params, self_params, list_params, _impl_trait_params) =
if let Some(def) = ctx.resolver.generic_def() { if let Some(def) = ctx.resolver.generic_def() {
let generics = generics(ctx.db, def); let generics = generics(ctx.db.upcast(), def);
generics.provenance_split() generics.provenance_split()
} else { } else {
(0, 0, 0, 0) (0, 0, 0, 0)
@ -201,10 +198,7 @@ impl Ty {
/// This is only for `generic_predicates_for_param`, where we can't just /// This is only for `generic_predicates_for_param`, where we can't just
/// lower the self types of the predicates since that could lead to cycles. /// lower the self types of the predicates since that could lead to cycles.
/// So we just check here if the `type_ref` resolves to a generic param, and which. /// So we just check here if the `type_ref` resolves to a generic param, and which.
fn from_hir_only_param( fn from_hir_only_param(ctx: &TyLoweringContext<'_>, type_ref: &TypeRef) -> Option<TypeParamId> {
ctx: &TyLoweringContext<'_, impl HirDatabase>,
type_ref: &TypeRef,
) -> Option<TypeParamId> {
let path = match type_ref { let path = match type_ref {
TypeRef::Path(path) => path, TypeRef::Path(path) => path,
_ => return None, _ => return None,
@ -215,10 +209,11 @@ impl Ty {
if path.segments().len() > 1 { if path.segments().len() > 1 {
return None; return None;
} }
let resolution = match ctx.resolver.resolve_path_in_type_ns(ctx.db, path.mod_path()) { let resolution =
Some((it, None)) => it, match ctx.resolver.resolve_path_in_type_ns(ctx.db.upcast(), path.mod_path()) {
_ => return None, Some((it, None)) => it,
}; _ => return None,
};
if let TypeNs::GenericParam(param_id) = resolution { if let TypeNs::GenericParam(param_id) = resolution {
Some(param_id) Some(param_id)
} else { } else {
@ -227,7 +222,7 @@ impl Ty {
} }
pub(crate) fn from_type_relative_path( pub(crate) fn from_type_relative_path(
ctx: &TyLoweringContext<'_, impl HirDatabase>, ctx: &TyLoweringContext<'_>,
ty: Ty, ty: Ty,
// We need the original resolution to lower `Self::AssocTy` correctly // We need the original resolution to lower `Self::AssocTy` correctly
res: Option<TypeNs>, res: Option<TypeNs>,
@ -246,7 +241,7 @@ impl Ty {
} }
pub(crate) fn from_partly_resolved_hir_path( pub(crate) fn from_partly_resolved_hir_path(
ctx: &TyLoweringContext<'_, impl HirDatabase>, ctx: &TyLoweringContext<'_>,
resolution: TypeNs, resolution: TypeNs,
resolved_segment: PathSegment<'_>, resolved_segment: PathSegment<'_>,
remaining_segments: PathSegments<'_>, remaining_segments: PathSegments<'_>,
@ -260,7 +255,7 @@ impl Ty {
let ty = if remaining_segments.len() == 1 { let ty = if remaining_segments.len() == 1 {
let segment = remaining_segments.first().unwrap(); let segment = remaining_segments.first().unwrap();
let associated_ty = associated_type_by_name_including_super_traits( let associated_ty = associated_type_by_name_including_super_traits(
ctx.db, ctx.db.upcast(),
trait_ref.trait_, trait_ref.trait_,
&segment.name, &segment.name,
); );
@ -286,8 +281,10 @@ impl Ty {
return (ty, None); return (ty, None);
} }
TypeNs::GenericParam(param_id) => { TypeNs::GenericParam(param_id) => {
let generics = let generics = generics(
generics(ctx.db, ctx.resolver.generic_def().expect("generics in scope")); ctx.db.upcast(),
ctx.resolver.generic_def().expect("generics in scope"),
);
match ctx.type_param_mode { match ctx.type_param_mode {
TypeParamLoweringMode::Placeholder => Ty::Placeholder(param_id), TypeParamLoweringMode::Placeholder => Ty::Placeholder(param_id),
TypeParamLoweringMode::Variable => { TypeParamLoweringMode::Variable => {
@ -297,7 +294,7 @@ impl Ty {
} }
} }
TypeNs::SelfType(impl_id) => { TypeNs::SelfType(impl_id) => {
let generics = generics(ctx.db, impl_id.into()); let generics = generics(ctx.db.upcast(), impl_id.into());
let substs = match ctx.type_param_mode { let substs = match ctx.type_param_mode {
TypeParamLoweringMode::Placeholder => { TypeParamLoweringMode::Placeholder => {
Substs::type_params_for_generics(&generics) Substs::type_params_for_generics(&generics)
@ -307,7 +304,7 @@ impl Ty {
ctx.db.impl_self_ty(impl_id).subst(&substs) ctx.db.impl_self_ty(impl_id).subst(&substs)
} }
TypeNs::AdtSelfType(adt) => { TypeNs::AdtSelfType(adt) => {
let generics = generics(ctx.db, adt.into()); let generics = generics(ctx.db.upcast(), adt.into());
let substs = match ctx.type_param_mode { let substs = match ctx.type_param_mode {
TypeParamLoweringMode::Placeholder => { TypeParamLoweringMode::Placeholder => {
Substs::type_params_for_generics(&generics) Substs::type_params_for_generics(&generics)
@ -327,17 +324,14 @@ impl Ty {
Ty::from_type_relative_path(ctx, ty, Some(resolution), remaining_segments) Ty::from_type_relative_path(ctx, ty, Some(resolution), remaining_segments)
} }
pub(crate) fn from_hir_path( pub(crate) fn from_hir_path(ctx: &TyLoweringContext<'_>, path: &Path) -> (Ty, Option<TypeNs>) {
ctx: &TyLoweringContext<'_, impl HirDatabase>,
path: &Path,
) -> (Ty, Option<TypeNs>) {
// Resolve the path (in type namespace) // Resolve the path (in type namespace)
if let Some(type_ref) = path.type_anchor() { if let Some(type_ref) = path.type_anchor() {
let (ty, res) = Ty::from_hir_ext(ctx, &type_ref); let (ty, res) = Ty::from_hir_ext(ctx, &type_ref);
return Ty::from_type_relative_path(ctx, ty, res, path.segments()); return Ty::from_type_relative_path(ctx, ty, res, path.segments());
} }
let (resolution, remaining_index) = let (resolution, remaining_index) =
match ctx.resolver.resolve_path_in_type_ns(ctx.db, path.mod_path()) { match ctx.resolver.resolve_path_in_type_ns(ctx.db.upcast(), path.mod_path()) {
Some(it) => it, Some(it) => it,
None => return (Ty::Unknown, None), None => return (Ty::Unknown, None),
}; };
@ -352,7 +346,7 @@ impl Ty {
} }
fn select_associated_type( fn select_associated_type(
ctx: &TyLoweringContext<'_, impl HirDatabase>, ctx: &TyLoweringContext<'_>,
self_ty: Ty, self_ty: Ty,
res: Option<TypeNs>, res: Option<TypeNs>,
segment: PathSegment<'_>, segment: PathSegment<'_>,
@ -374,7 +368,7 @@ impl Ty {
} }
_ => return Ty::Unknown, _ => return Ty::Unknown,
}; };
let traits = traits_from_env.into_iter().flat_map(|t| all_super_traits(ctx.db, t)); let traits = traits_from_env.into_iter().flat_map(|t| all_super_traits(ctx.db.upcast(), t));
for t in traits { for t in traits {
if let Some(associated_ty) = ctx.db.trait_data(t).associated_type_by_name(&segment.name) if let Some(associated_ty) = ctx.db.trait_data(t).associated_type_by_name(&segment.name)
{ {
@ -388,7 +382,7 @@ impl Ty {
} }
fn from_hir_path_inner( fn from_hir_path_inner(
ctx: &TyLoweringContext<'_, impl HirDatabase>, ctx: &TyLoweringContext<'_>,
segment: PathSegment<'_>, segment: PathSegment<'_>,
typable: TyDefId, typable: TyDefId,
) -> Ty { ) -> Ty {
@ -404,7 +398,7 @@ impl Ty {
/// Collect generic arguments from a path into a `Substs`. See also /// Collect generic arguments from a path into a `Substs`. See also
/// `create_substs_for_ast_path` and `def_to_ty` in rustc. /// `create_substs_for_ast_path` and `def_to_ty` in rustc.
pub(super) fn substs_from_path( pub(super) fn substs_from_path(
ctx: &TyLoweringContext<'_, impl HirDatabase>, ctx: &TyLoweringContext<'_>,
path: &Path, path: &Path,
// Note that we don't call `db.value_type(resolved)` here, // Note that we don't call `db.value_type(resolved)` here,
// `ValueTyDefId` is just a convenient way to pass generics and // `ValueTyDefId` is just a convenient way to pass generics and
@ -437,13 +431,13 @@ impl Ty {
} }
pub(super) fn substs_from_path_segment( pub(super) fn substs_from_path_segment(
ctx: &TyLoweringContext<'_, impl HirDatabase>, ctx: &TyLoweringContext<'_>,
segment: PathSegment<'_>, segment: PathSegment<'_>,
def_generic: Option<GenericDefId>, def_generic: Option<GenericDefId>,
_add_self_param: bool, _add_self_param: bool,
) -> Substs { ) -> Substs {
let mut substs = Vec::new(); let mut substs = Vec::new();
let def_generics = def_generic.map(|def| generics(ctx.db, def)); let def_generics = def_generic.map(|def| generics(ctx.db.upcast(), def));
let (parent_params, self_params, type_params, impl_trait_params) = let (parent_params, self_params, type_params, impl_trait_params) =
def_generics.map_or((0, 0, 0, 0), |g| g.provenance_split()); def_generics.map_or((0, 0, 0, 0), |g| g.provenance_split());
@ -489,20 +483,21 @@ pub(super) fn substs_from_path_segment(
impl TraitRef { impl TraitRef {
fn from_path( fn from_path(
ctx: &TyLoweringContext<'_, impl HirDatabase>, ctx: &TyLoweringContext<'_>,
path: &Path, path: &Path,
explicit_self_ty: Option<Ty>, explicit_self_ty: Option<Ty>,
) -> Option<Self> { ) -> Option<Self> {
let resolved = match ctx.resolver.resolve_path_in_type_ns_fully(ctx.db, path.mod_path())? { let resolved =
TypeNs::TraitId(tr) => tr, match ctx.resolver.resolve_path_in_type_ns_fully(ctx.db.upcast(), path.mod_path())? {
_ => return None, TypeNs::TraitId(tr) => tr,
}; _ => return None,
};
let segment = path.segments().last().expect("path should have at least one segment"); let segment = path.segments().last().expect("path should have at least one segment");
Some(TraitRef::from_resolved_path(ctx, resolved, segment, explicit_self_ty)) Some(TraitRef::from_resolved_path(ctx, resolved, segment, explicit_self_ty))
} }
pub(crate) fn from_resolved_path( pub(crate) fn from_resolved_path(
ctx: &TyLoweringContext<'_, impl HirDatabase>, ctx: &TyLoweringContext<'_>,
resolved: TraitId, resolved: TraitId,
segment: PathSegment<'_>, segment: PathSegment<'_>,
explicit_self_ty: Option<Ty>, explicit_self_ty: Option<Ty>,
@ -515,7 +510,7 @@ impl TraitRef {
} }
fn from_hir( fn from_hir(
ctx: &TyLoweringContext<'_, impl HirDatabase>, ctx: &TyLoweringContext<'_>,
type_ref: &TypeRef, type_ref: &TypeRef,
explicit_self_ty: Option<Ty>, explicit_self_ty: Option<Ty>,
) -> Option<Self> { ) -> Option<Self> {
@ -527,7 +522,7 @@ impl TraitRef {
} }
fn substs_from_path( fn substs_from_path(
ctx: &TyLoweringContext<'_, impl HirDatabase>, ctx: &TyLoweringContext<'_>,
segment: PathSegment<'_>, segment: PathSegment<'_>,
resolved: TraitId, resolved: TraitId,
) -> Substs { ) -> Substs {
@ -537,7 +532,7 @@ impl TraitRef {
} }
pub(crate) fn from_type_bound( pub(crate) fn from_type_bound(
ctx: &TyLoweringContext<'_, impl HirDatabase>, ctx: &TyLoweringContext<'_>,
bound: &TypeBound, bound: &TypeBound,
self_ty: Ty, self_ty: Ty,
) -> Option<TraitRef> { ) -> Option<TraitRef> {
@ -550,14 +545,14 @@ impl TraitRef {
impl GenericPredicate { impl GenericPredicate {
pub(crate) fn from_where_predicate<'a>( pub(crate) fn from_where_predicate<'a>(
ctx: &'a TyLoweringContext<'a, impl HirDatabase>, ctx: &'a TyLoweringContext<'a>,
where_predicate: &'a WherePredicate, where_predicate: &'a WherePredicate,
) -> impl Iterator<Item = GenericPredicate> + 'a { ) -> impl Iterator<Item = GenericPredicate> + 'a {
let self_ty = match &where_predicate.target { let self_ty = match &where_predicate.target {
WherePredicateTarget::TypeRef(type_ref) => Ty::from_hir(ctx, type_ref), WherePredicateTarget::TypeRef(type_ref) => Ty::from_hir(ctx, type_ref),
WherePredicateTarget::TypeParam(param_id) => { WherePredicateTarget::TypeParam(param_id) => {
let generic_def = ctx.resolver.generic_def().expect("generics in scope"); let generic_def = ctx.resolver.generic_def().expect("generics in scope");
let generics = generics(ctx.db, generic_def); let generics = generics(ctx.db.upcast(), generic_def);
let param_id = hir_def::TypeParamId { parent: generic_def, local_id: *param_id }; let param_id = hir_def::TypeParamId { parent: generic_def, local_id: *param_id };
match ctx.type_param_mode { match ctx.type_param_mode {
TypeParamLoweringMode::Placeholder => Ty::Placeholder(param_id), TypeParamLoweringMode::Placeholder => Ty::Placeholder(param_id),
@ -572,7 +567,7 @@ impl GenericPredicate {
} }
pub(crate) fn from_type_bound<'a>( pub(crate) fn from_type_bound<'a>(
ctx: &'a TyLoweringContext<'a, impl HirDatabase>, ctx: &'a TyLoweringContext<'a>,
bound: &'a TypeBound, bound: &'a TypeBound,
self_ty: Ty, self_ty: Ty,
) -> impl Iterator<Item = GenericPredicate> + 'a { ) -> impl Iterator<Item = GenericPredicate> + 'a {
@ -587,7 +582,7 @@ impl GenericPredicate {
} }
fn assoc_type_bindings_from_type_bound<'a>( fn assoc_type_bindings_from_type_bound<'a>(
ctx: &'a TyLoweringContext<'a, impl HirDatabase>, ctx: &'a TyLoweringContext<'a>,
bound: &'a TypeBound, bound: &'a TypeBound,
trait_ref: TraitRef, trait_ref: TraitRef,
) -> impl Iterator<Item = GenericPredicate> + 'a { ) -> impl Iterator<Item = GenericPredicate> + 'a {
@ -600,8 +595,11 @@ fn assoc_type_bindings_from_type_bound<'a>(
.flat_map(|segment| segment.args_and_bindings.into_iter()) .flat_map(|segment| segment.args_and_bindings.into_iter())
.flat_map(|args_and_bindings| args_and_bindings.bindings.iter()) .flat_map(|args_and_bindings| args_and_bindings.bindings.iter())
.map(move |(name, type_ref)| { .map(move |(name, type_ref)| {
let associated_ty = let associated_ty = associated_type_by_name_including_super_traits(
associated_type_by_name_including_super_traits(ctx.db, trait_ref.trait_, &name); ctx.db.upcast(),
trait_ref.trait_,
&name,
);
let associated_ty = match associated_ty { let associated_ty = match associated_ty {
None => return GenericPredicate::Error, None => return GenericPredicate::Error,
Some(t) => t, Some(t) => t,
@ -615,7 +613,7 @@ fn assoc_type_bindings_from_type_bound<'a>(
} }
/// Build the signature of a callable item (function, struct or enum variant). /// Build the signature of a callable item (function, struct or enum variant).
pub fn callable_item_sig(db: &impl HirDatabase, def: CallableDef) -> PolyFnSig { pub fn callable_item_sig(db: &dyn HirDatabase, def: CallableDef) -> PolyFnSig {
match def { match def {
CallableDef::FunctionId(f) => fn_sig_for_fn(db, f), CallableDef::FunctionId(f) => fn_sig_for_fn(db, f),
CallableDef::StructId(s) => fn_sig_for_struct_constructor(db, s), CallableDef::StructId(s) => fn_sig_for_struct_constructor(db, s),
@ -625,16 +623,16 @@ pub fn callable_item_sig(db: &impl HirDatabase, def: CallableDef) -> PolyFnSig {
/// Build the type of all specific fields of a struct or enum variant. /// Build the type of all specific fields of a struct or enum variant.
pub(crate) fn field_types_query( pub(crate) fn field_types_query(
db: &impl HirDatabase, db: &dyn HirDatabase,
variant_id: VariantId, variant_id: VariantId,
) -> Arc<ArenaMap<LocalStructFieldId, Binders<Ty>>> { ) -> Arc<ArenaMap<LocalStructFieldId, Binders<Ty>>> {
let var_data = variant_data(db, variant_id); let var_data = variant_data(db.upcast(), variant_id);
let (resolver, def): (_, GenericDefId) = match variant_id { let (resolver, def): (_, GenericDefId) = match variant_id {
VariantId::StructId(it) => (it.resolver(db), it.into()), VariantId::StructId(it) => (it.resolver(db.upcast()), it.into()),
VariantId::UnionId(it) => (it.resolver(db), it.into()), VariantId::UnionId(it) => (it.resolver(db.upcast()), it.into()),
VariantId::EnumVariantId(it) => (it.parent.resolver(db), it.parent.into()), VariantId::EnumVariantId(it) => (it.parent.resolver(db.upcast()), it.parent.into()),
}; };
let generics = generics(db, def); let generics = generics(db.upcast(), def);
let mut res = ArenaMap::default(); let mut res = ArenaMap::default();
let ctx = let ctx =
TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable); TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable);
@ -653,13 +651,13 @@ pub(crate) fn field_types_query(
/// following bounds are disallowed: `T: Foo<U::Item>, U: Foo<T::Item>`, but /// following bounds are disallowed: `T: Foo<U::Item>, U: Foo<T::Item>`, but
/// these are fine: `T: Foo<U::Item>, U: Foo<()>`. /// these are fine: `T: Foo<U::Item>, U: Foo<()>`.
pub(crate) fn generic_predicates_for_param_query( pub(crate) fn generic_predicates_for_param_query(
db: &impl HirDatabase, db: &dyn HirDatabase,
param_id: TypeParamId, param_id: TypeParamId,
) -> Arc<[Binders<GenericPredicate>]> { ) -> Arc<[Binders<GenericPredicate>]> {
let resolver = param_id.parent.resolver(db); let resolver = param_id.parent.resolver(db.upcast());
let ctx = let ctx =
TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable); TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable);
let generics = generics(db, param_id.parent); let generics = generics(db.upcast(), param_id.parent);
resolver resolver
.where_predicates_in_scope() .where_predicates_in_scope()
// we have to filter out all other predicates *first*, before attempting to lower them // we have to filter out all other predicates *first*, before attempting to lower them
@ -677,7 +675,7 @@ pub(crate) fn generic_predicates_for_param_query(
} }
pub(crate) fn generic_predicates_for_param_recover( pub(crate) fn generic_predicates_for_param_recover(
_db: &impl HirDatabase, _db: &dyn HirDatabase,
_cycle: &[String], _cycle: &[String],
_param_id: &TypeParamId, _param_id: &TypeParamId,
) -> Arc<[Binders<GenericPredicate>]> { ) -> Arc<[Binders<GenericPredicate>]> {
@ -685,7 +683,7 @@ pub(crate) fn generic_predicates_for_param_recover(
} }
impl TraitEnvironment { impl TraitEnvironment {
pub fn lower(db: &impl HirDatabase, resolver: &Resolver) -> Arc<TraitEnvironment> { pub fn lower(db: &dyn HirDatabase, resolver: &Resolver) -> Arc<TraitEnvironment> {
let ctx = TyLoweringContext::new(db, &resolver) let ctx = TyLoweringContext::new(db, &resolver)
.with_type_param_mode(TypeParamLoweringMode::Placeholder); .with_type_param_mode(TypeParamLoweringMode::Placeholder);
let mut predicates = resolver let mut predicates = resolver
@ -696,13 +694,13 @@ impl TraitEnvironment {
if let Some(def) = resolver.generic_def() { if let Some(def) = resolver.generic_def() {
let container: Option<AssocContainerId> = match def { let container: Option<AssocContainerId> = match def {
// FIXME: is there a function for this? // FIXME: is there a function for this?
GenericDefId::FunctionId(f) => Some(f.lookup(db).container), GenericDefId::FunctionId(f) => Some(f.lookup(db.upcast()).container),
GenericDefId::AdtId(_) => None, GenericDefId::AdtId(_) => None,
GenericDefId::TraitId(_) => None, GenericDefId::TraitId(_) => None,
GenericDefId::TypeAliasId(t) => Some(t.lookup(db).container), GenericDefId::TypeAliasId(t) => Some(t.lookup(db.upcast()).container),
GenericDefId::ImplId(_) => None, GenericDefId::ImplId(_) => None,
GenericDefId::EnumVariantId(_) => None, GenericDefId::EnumVariantId(_) => None,
GenericDefId::ConstId(c) => Some(c.lookup(db).container), GenericDefId::ConstId(c) => Some(c.lookup(db.upcast()).container),
}; };
if let Some(AssocContainerId::TraitId(trait_id)) = container { if let Some(AssocContainerId::TraitId(trait_id)) = container {
// add `Self: Trait<T1, T2, ...>` to the environment in trait // add `Self: Trait<T1, T2, ...>` to the environment in trait
@ -723,13 +721,13 @@ impl TraitEnvironment {
/// Resolve the where clause(s) of an item with generics. /// Resolve the where clause(s) of an item with generics.
pub(crate) fn generic_predicates_query( pub(crate) fn generic_predicates_query(
db: &impl HirDatabase, db: &dyn HirDatabase,
def: GenericDefId, def: GenericDefId,
) -> Arc<[Binders<GenericPredicate>]> { ) -> Arc<[Binders<GenericPredicate>]> {
let resolver = def.resolver(db); let resolver = def.resolver(db.upcast());
let ctx = let ctx =
TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable); TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable);
let generics = generics(db, def); let generics = generics(db.upcast(), def);
resolver resolver
.where_predicates_in_scope() .where_predicates_in_scope()
.flat_map(|pred| { .flat_map(|pred| {
@ -740,10 +738,10 @@ pub(crate) fn generic_predicates_query(
} }
/// Resolve the default type params from generics /// Resolve the default type params from generics
pub(crate) fn generic_defaults_query(db: &impl HirDatabase, def: GenericDefId) -> Substs { pub(crate) fn generic_defaults_query(db: &dyn HirDatabase, def: GenericDefId) -> Substs {
let resolver = def.resolver(db); let resolver = def.resolver(db.upcast());
let ctx = TyLoweringContext::new(db, &resolver); let ctx = TyLoweringContext::new(db, &resolver);
let generic_params = generics(db, def); let generic_params = generics(db.upcast(), def);
let defaults = generic_params let defaults = generic_params
.iter() .iter()
@ -753,33 +751,33 @@ pub(crate) fn generic_defaults_query(db: &impl HirDatabase, def: GenericDefId) -
Substs(defaults) Substs(defaults)
} }
fn fn_sig_for_fn(db: &impl HirDatabase, def: FunctionId) -> PolyFnSig { fn fn_sig_for_fn(db: &dyn HirDatabase, def: FunctionId) -> PolyFnSig {
let data = db.function_data(def); let data = db.function_data(def);
let resolver = def.resolver(db); let resolver = def.resolver(db.upcast());
let ctx_params = TyLoweringContext::new(db, &resolver) let ctx_params = TyLoweringContext::new(db, &resolver)
.with_impl_trait_mode(ImplTraitLoweringMode::Variable) .with_impl_trait_mode(ImplTraitLoweringMode::Variable)
.with_type_param_mode(TypeParamLoweringMode::Variable); .with_type_param_mode(TypeParamLoweringMode::Variable);
let params = data.params.iter().map(|tr| Ty::from_hir(&ctx_params, tr)).collect::<Vec<_>>(); let params = data.params.iter().map(|tr| Ty::from_hir(&ctx_params, tr)).collect::<Vec<_>>();
let ctx_ret = ctx_params.with_impl_trait_mode(ImplTraitLoweringMode::Opaque); let ctx_ret = ctx_params.with_impl_trait_mode(ImplTraitLoweringMode::Opaque);
let ret = Ty::from_hir(&ctx_ret, &data.ret_type); let ret = Ty::from_hir(&ctx_ret, &data.ret_type);
let generics = generics(db, def.into()); let generics = generics(db.upcast(), def.into());
let num_binders = generics.len(); let num_binders = generics.len();
Binders::new(num_binders, FnSig::from_params_and_return(params, ret)) Binders::new(num_binders, FnSig::from_params_and_return(params, ret))
} }
/// Build the declared type of a function. This should not need to look at the /// Build the declared type of a function. This should not need to look at the
/// function body. /// function body.
fn type_for_fn(db: &impl HirDatabase, def: FunctionId) -> Binders<Ty> { fn type_for_fn(db: &dyn HirDatabase, def: FunctionId) -> Binders<Ty> {
let generics = generics(db, def.into()); let generics = generics(db.upcast(), def.into());
let substs = Substs::bound_vars(&generics); let substs = Substs::bound_vars(&generics);
Binders::new(substs.len(), Ty::apply(TypeCtor::FnDef(def.into()), substs)) Binders::new(substs.len(), Ty::apply(TypeCtor::FnDef(def.into()), substs))
} }
/// Build the declared type of a const. /// Build the declared type of a const.
fn type_for_const(db: &impl HirDatabase, def: ConstId) -> Binders<Ty> { fn type_for_const(db: &dyn HirDatabase, def: ConstId) -> Binders<Ty> {
let data = db.const_data(def); let data = db.const_data(def);
let generics = generics(db, def.into()); let generics = generics(db.upcast(), def.into());
let resolver = def.resolver(db); let resolver = def.resolver(db.upcast());
let ctx = let ctx =
TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable); TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable);
@ -787,9 +785,9 @@ fn type_for_const(db: &impl HirDatabase, def: ConstId) -> Binders<Ty> {
} }
/// Build the declared type of a static. /// Build the declared type of a static.
fn type_for_static(db: &impl HirDatabase, def: StaticId) -> Binders<Ty> { fn type_for_static(db: &dyn HirDatabase, def: StaticId) -> Binders<Ty> {
let data = db.static_data(def); let data = db.static_data(def);
let resolver = def.resolver(db); let resolver = def.resolver(db.upcast());
let ctx = TyLoweringContext::new(db, &resolver); let ctx = TyLoweringContext::new(db, &resolver);
Binders::new(0, Ty::from_hir(&ctx, &data.type_ref)) Binders::new(0, Ty::from_hir(&ctx, &data.type_ref))
@ -806,10 +804,10 @@ fn type_for_builtin(def: BuiltinType) -> Ty {
}) })
} }
fn fn_sig_for_struct_constructor(db: &impl HirDatabase, def: StructId) -> PolyFnSig { fn fn_sig_for_struct_constructor(db: &dyn HirDatabase, def: StructId) -> PolyFnSig {
let struct_data = db.struct_data(def); let struct_data = db.struct_data(def);
let fields = struct_data.variant_data.fields(); let fields = struct_data.variant_data.fields();
let resolver = def.resolver(db); let resolver = def.resolver(db.upcast());
let ctx = let ctx =
TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable); TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable);
let params = let params =
@ -819,21 +817,21 @@ fn fn_sig_for_struct_constructor(db: &impl HirDatabase, def: StructId) -> PolyFn
} }
/// Build the type of a tuple struct constructor. /// Build the type of a tuple struct constructor.
fn type_for_struct_constructor(db: &impl HirDatabase, def: StructId) -> Binders<Ty> { fn type_for_struct_constructor(db: &dyn HirDatabase, def: StructId) -> Binders<Ty> {
let struct_data = db.struct_data(def); let struct_data = db.struct_data(def);
if let StructKind::Unit = struct_data.variant_data.kind() { if let StructKind::Unit = struct_data.variant_data.kind() {
return type_for_adt(db, def.into()); return type_for_adt(db, def.into());
} }
let generics = generics(db, def.into()); let generics = generics(db.upcast(), def.into());
let substs = Substs::bound_vars(&generics); let substs = Substs::bound_vars(&generics);
Binders::new(substs.len(), Ty::apply(TypeCtor::FnDef(def.into()), substs)) Binders::new(substs.len(), Ty::apply(TypeCtor::FnDef(def.into()), substs))
} }
fn fn_sig_for_enum_variant_constructor(db: &impl HirDatabase, def: EnumVariantId) -> PolyFnSig { fn fn_sig_for_enum_variant_constructor(db: &dyn HirDatabase, def: EnumVariantId) -> PolyFnSig {
let enum_data = db.enum_data(def.parent); let enum_data = db.enum_data(def.parent);
let var_data = &enum_data.variants[def.local_id]; let var_data = &enum_data.variants[def.local_id];
let fields = var_data.variant_data.fields(); let fields = var_data.variant_data.fields();
let resolver = def.parent.resolver(db); let resolver = def.parent.resolver(db.upcast());
let ctx = let ctx =
TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable); TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable);
let params = let params =
@ -843,26 +841,26 @@ fn fn_sig_for_enum_variant_constructor(db: &impl HirDatabase, def: EnumVariantId
} }
/// Build the type of a tuple enum variant constructor. /// Build the type of a tuple enum variant constructor.
fn type_for_enum_variant_constructor(db: &impl HirDatabase, def: EnumVariantId) -> Binders<Ty> { fn type_for_enum_variant_constructor(db: &dyn HirDatabase, def: EnumVariantId) -> Binders<Ty> {
let enum_data = db.enum_data(def.parent); let enum_data = db.enum_data(def.parent);
let var_data = &enum_data.variants[def.local_id].variant_data; let var_data = &enum_data.variants[def.local_id].variant_data;
if let StructKind::Unit = var_data.kind() { if let StructKind::Unit = var_data.kind() {
return type_for_adt(db, def.parent.into()); return type_for_adt(db, def.parent.into());
} }
let generics = generics(db, def.parent.into()); let generics = generics(db.upcast(), def.parent.into());
let substs = Substs::bound_vars(&generics); let substs = Substs::bound_vars(&generics);
Binders::new(substs.len(), Ty::apply(TypeCtor::FnDef(def.into()), substs)) Binders::new(substs.len(), Ty::apply(TypeCtor::FnDef(def.into()), substs))
} }
fn type_for_adt(db: &impl HirDatabase, adt: AdtId) -> Binders<Ty> { fn type_for_adt(db: &dyn HirDatabase, adt: AdtId) -> Binders<Ty> {
let generics = generics(db, adt.into()); let generics = generics(db.upcast(), adt.into());
let substs = Substs::bound_vars(&generics); let substs = Substs::bound_vars(&generics);
Binders::new(substs.len(), Ty::apply(TypeCtor::Adt(adt), substs)) Binders::new(substs.len(), Ty::apply(TypeCtor::Adt(adt), substs))
} }
fn type_for_type_alias(db: &impl HirDatabase, t: TypeAliasId) -> Binders<Ty> { fn type_for_type_alias(db: &dyn HirDatabase, t: TypeAliasId) -> Binders<Ty> {
let generics = generics(db, t.into()); let generics = generics(db.upcast(), t.into());
let resolver = t.resolver(db); let resolver = t.resolver(db.upcast());
let ctx = let ctx =
TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable); TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable);
let type_ref = &db.type_alias_data(t).type_ref; let type_ref = &db.type_alias_data(t).type_ref;
@ -880,7 +878,8 @@ pub enum CallableDef {
impl_froms!(CallableDef: FunctionId, StructId, EnumVariantId); impl_froms!(CallableDef: FunctionId, StructId, EnumVariantId);
impl CallableDef { impl CallableDef {
pub fn krate(self, db: &impl HirDatabase) -> CrateId { pub fn krate(self, db: &dyn HirDatabase) -> CrateId {
let db = db.upcast();
match self { match self {
CallableDef::FunctionId(f) => f.lookup(db).module(db), CallableDef::FunctionId(f) => f.lookup(db).module(db),
CallableDef::StructId(s) => s.lookup(db).container.module(db), CallableDef::StructId(s) => s.lookup(db).container.module(db),
@ -922,7 +921,7 @@ impl_froms!(ValueTyDefId: FunctionId, StructId, EnumVariantId, ConstId, StaticId
/// `struct Foo(usize)`, we have two types: The type of the struct itself, and /// `struct Foo(usize)`, we have two types: The type of the struct itself, and
/// the constructor function `(usize) -> Foo` which lives in the values /// the constructor function `(usize) -> Foo` which lives in the values
/// namespace. /// namespace.
pub(crate) fn ty_query(db: &impl HirDatabase, def: TyDefId) -> Binders<Ty> { pub(crate) fn ty_query(db: &dyn HirDatabase, def: TyDefId) -> Binders<Ty> {
match def { match def {
TyDefId::BuiltinType(it) => Binders::new(0, type_for_builtin(it)), TyDefId::BuiltinType(it) => Binders::new(0, type_for_builtin(it)),
TyDefId::AdtId(it) => type_for_adt(db, it), TyDefId::AdtId(it) => type_for_adt(db, it),
@ -930,16 +929,16 @@ pub(crate) fn ty_query(db: &impl HirDatabase, def: TyDefId) -> Binders<Ty> {
} }
} }
pub(crate) fn ty_recover(db: &impl HirDatabase, _cycle: &[String], def: &TyDefId) -> Binders<Ty> { pub(crate) fn ty_recover(db: &dyn HirDatabase, _cycle: &[String], def: &TyDefId) -> Binders<Ty> {
let num_binders = match *def { let num_binders = match *def {
TyDefId::BuiltinType(_) => 0, TyDefId::BuiltinType(_) => 0,
TyDefId::AdtId(it) => generics(db, it.into()).len(), TyDefId::AdtId(it) => generics(db.upcast(), it.into()).len(),
TyDefId::TypeAliasId(it) => generics(db, it.into()).len(), TyDefId::TypeAliasId(it) => generics(db.upcast(), it.into()).len(),
}; };
Binders::new(num_binders, Ty::Unknown) Binders::new(num_binders, Ty::Unknown)
} }
pub(crate) fn value_ty_query(db: &impl HirDatabase, def: ValueTyDefId) -> Binders<Ty> { pub(crate) fn value_ty_query(db: &dyn HirDatabase, def: ValueTyDefId) -> Binders<Ty> {
match def { match def {
ValueTyDefId::FunctionId(it) => type_for_fn(db, it), ValueTyDefId::FunctionId(it) => type_for_fn(db, it),
ValueTyDefId::StructId(it) => type_for_struct_constructor(db, it), ValueTyDefId::StructId(it) => type_for_struct_constructor(db, it),
@ -949,30 +948,27 @@ pub(crate) fn value_ty_query(db: &impl HirDatabase, def: ValueTyDefId) -> Binder
} }
} }
pub(crate) fn impl_self_ty_query(db: &impl HirDatabase, impl_id: ImplId) -> Binders<Ty> { pub(crate) fn impl_self_ty_query(db: &dyn HirDatabase, impl_id: ImplId) -> Binders<Ty> {
let impl_data = db.impl_data(impl_id); let impl_data = db.impl_data(impl_id);
let resolver = impl_id.resolver(db); let resolver = impl_id.resolver(db.upcast());
let generics = generics(db, impl_id.into()); let generics = generics(db.upcast(), impl_id.into());
let ctx = let ctx =
TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable); TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable);
Binders::new(generics.len(), Ty::from_hir(&ctx, &impl_data.target_type)) Binders::new(generics.len(), Ty::from_hir(&ctx, &impl_data.target_type))
} }
pub(crate) fn impl_self_ty_recover( pub(crate) fn impl_self_ty_recover(
db: &impl HirDatabase, db: &dyn HirDatabase,
_cycle: &[String], _cycle: &[String],
impl_id: &ImplId, impl_id: &ImplId,
) -> Binders<Ty> { ) -> Binders<Ty> {
let generics = generics(db, (*impl_id).into()); let generics = generics(db.upcast(), (*impl_id).into());
Binders::new(generics.len(), Ty::Unknown) Binders::new(generics.len(), Ty::Unknown)
} }
pub(crate) fn impl_trait_query( pub(crate) fn impl_trait_query(db: &dyn HirDatabase, impl_id: ImplId) -> Option<Binders<TraitRef>> {
db: &impl HirDatabase,
impl_id: ImplId,
) -> Option<Binders<TraitRef>> {
let impl_data = db.impl_data(impl_id); let impl_data = db.impl_data(impl_id);
let resolver = impl_id.resolver(db); let resolver = impl_id.resolver(db.upcast());
let ctx = let ctx =
TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable); TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable);
let self_ty = db.impl_self_ty(impl_id); let self_ty = db.impl_self_ty(impl_id);

View file

@ -48,10 +48,7 @@ pub struct CrateImplDefs {
} }
impl CrateImplDefs { impl CrateImplDefs {
pub(crate) fn impls_in_crate_query( pub(crate) fn impls_in_crate_query(db: &dyn HirDatabase, krate: CrateId) -> Arc<CrateImplDefs> {
db: &impl HirDatabase,
krate: CrateId,
) -> Arc<CrateImplDefs> {
let _p = profile("impls_in_crate_query"); let _p = profile("impls_in_crate_query");
let mut res = let mut res =
CrateImplDefs { impls: FxHashMap::default(), impls_by_trait: FxHashMap::default() }; CrateImplDefs { impls: FxHashMap::default(), impls_by_trait: FxHashMap::default() };
@ -92,7 +89,7 @@ impl CrateImplDefs {
impl Ty { impl Ty {
pub fn def_crates( pub fn def_crates(
&self, &self,
db: &impl HirDatabase, db: &dyn HirDatabase,
cur_crate: CrateId, cur_crate: CrateId,
) -> Option<ArrayVec<[CrateId; 2]>> { ) -> Option<ArrayVec<[CrateId; 2]>> {
// Types like slice can have inherent impls in several crates, (core and alloc). // Types like slice can have inherent impls in several crates, (core and alloc).
@ -110,7 +107,7 @@ impl Ty {
let lang_item_targets = match self { let lang_item_targets = match self {
Ty::Apply(a_ty) => match a_ty.ctor { Ty::Apply(a_ty) => match a_ty.ctor {
TypeCtor::Adt(def_id) => { TypeCtor::Adt(def_id) => {
return Some(std::iter::once(def_id.module(db).krate).collect()) return Some(std::iter::once(def_id.module(db.upcast()).krate).collect())
} }
TypeCtor::Bool => lang_item_crate!("bool"), TypeCtor::Bool => lang_item_crate!("bool"),
TypeCtor::Char => lang_item_crate!("char"), TypeCtor::Char => lang_item_crate!("char"),
@ -134,7 +131,7 @@ impl Ty {
LangItemTarget::ImplDefId(it) => Some(it), LangItemTarget::ImplDefId(it) => Some(it),
_ => None, _ => None,
}) })
.map(|it| it.lookup(db).container.module(db).krate) .map(|it| it.lookup(db.upcast()).container.module(db.upcast()).krate)
.collect(); .collect();
Some(res) Some(res)
} }
@ -143,7 +140,7 @@ impl Ty {
/// receiver type (but without autoref applied yet). /// receiver type (but without autoref applied yet).
pub(crate) fn lookup_method( pub(crate) fn lookup_method(
ty: &Canonical<Ty>, ty: &Canonical<Ty>,
db: &impl HirDatabase, db: &dyn HirDatabase,
env: Arc<TraitEnvironment>, env: Arc<TraitEnvironment>,
krate: CrateId, krate: CrateId,
traits_in_scope: &FxHashSet<TraitId>, traits_in_scope: &FxHashSet<TraitId>,
@ -181,7 +178,7 @@ pub enum LookupMode {
// FIXME add a context type here? // FIXME add a context type here?
pub fn iterate_method_candidates<T>( pub fn iterate_method_candidates<T>(
ty: &Canonical<Ty>, ty: &Canonical<Ty>,
db: &impl HirDatabase, db: &dyn HirDatabase,
env: Arc<TraitEnvironment>, env: Arc<TraitEnvironment>,
krate: CrateId, krate: CrateId,
traits_in_scope: &FxHashSet<TraitId>, traits_in_scope: &FxHashSet<TraitId>,
@ -247,7 +244,7 @@ pub fn iterate_method_candidates<T>(
fn iterate_method_candidates_with_autoref<T>( fn iterate_method_candidates_with_autoref<T>(
deref_chain: &[Canonical<Ty>], deref_chain: &[Canonical<Ty>],
db: &impl HirDatabase, db: &dyn HirDatabase,
env: Arc<TraitEnvironment>, env: Arc<TraitEnvironment>,
krate: CrateId, krate: CrateId,
traits_in_scope: &FxHashSet<TraitId>, traits_in_scope: &FxHashSet<TraitId>,
@ -304,7 +301,7 @@ fn iterate_method_candidates_with_autoref<T>(
fn iterate_method_candidates_by_receiver<T>( fn iterate_method_candidates_by_receiver<T>(
receiver_ty: &Canonical<Ty>, receiver_ty: &Canonical<Ty>,
rest_of_deref_chain: &[Canonical<Ty>], rest_of_deref_chain: &[Canonical<Ty>],
db: &impl HirDatabase, db: &dyn HirDatabase,
env: Arc<TraitEnvironment>, env: Arc<TraitEnvironment>,
krate: CrateId, krate: CrateId,
traits_in_scope: &FxHashSet<TraitId>, traits_in_scope: &FxHashSet<TraitId>,
@ -340,7 +337,7 @@ fn iterate_method_candidates_by_receiver<T>(
fn iterate_method_candidates_for_self_ty<T>( fn iterate_method_candidates_for_self_ty<T>(
self_ty: &Canonical<Ty>, self_ty: &Canonical<Ty>,
db: &impl HirDatabase, db: &dyn HirDatabase,
env: Arc<TraitEnvironment>, env: Arc<TraitEnvironment>,
krate: CrateId, krate: CrateId,
traits_in_scope: &FxHashSet<TraitId>, traits_in_scope: &FxHashSet<TraitId>,
@ -367,7 +364,7 @@ fn iterate_method_candidates_for_self_ty<T>(
fn iterate_trait_method_candidates<T>( fn iterate_trait_method_candidates<T>(
self_ty: &Canonical<Ty>, self_ty: &Canonical<Ty>,
db: &impl HirDatabase, db: &dyn HirDatabase,
env: Arc<TraitEnvironment>, env: Arc<TraitEnvironment>,
krate: CrateId, krate: CrateId,
traits_in_scope: &FxHashSet<TraitId>, traits_in_scope: &FxHashSet<TraitId>,
@ -381,7 +378,7 @@ fn iterate_trait_method_candidates<T>(
// if we have `T: Trait` in the param env, the trait doesn't need to be in scope // if we have `T: Trait` in the param env, the trait doesn't need to be in scope
env.trait_predicates_for_self_ty(&self_ty.value) env.trait_predicates_for_self_ty(&self_ty.value)
.map(|tr| tr.trait_) .map(|tr| tr.trait_)
.flat_map(|t| all_super_traits(db, t)) .flat_map(|t| all_super_traits(db.upcast(), t))
.collect() .collect()
} else { } else {
Vec::new() Vec::new()
@ -416,7 +413,7 @@ fn iterate_trait_method_candidates<T>(
fn iterate_inherent_methods<T>( fn iterate_inherent_methods<T>(
self_ty: &Canonical<Ty>, self_ty: &Canonical<Ty>,
db: &impl HirDatabase, db: &dyn HirDatabase,
name: Option<&Name>, name: Option<&Name>,
receiver_ty: Option<&Canonical<Ty>>, receiver_ty: Option<&Canonical<Ty>>,
krate: CrateId, krate: CrateId,
@ -449,7 +446,7 @@ fn iterate_inherent_methods<T>(
/// Returns the self type for the index trait call. /// Returns the self type for the index trait call.
pub fn resolve_indexing_op( pub fn resolve_indexing_op(
db: &impl HirDatabase, db: &dyn HirDatabase,
ty: &Canonical<Ty>, ty: &Canonical<Ty>,
env: Arc<TraitEnvironment>, env: Arc<TraitEnvironment>,
krate: CrateId, krate: CrateId,
@ -467,7 +464,7 @@ pub fn resolve_indexing_op(
} }
fn is_valid_candidate( fn is_valid_candidate(
db: &impl HirDatabase, db: &dyn HirDatabase,
name: Option<&Name>, name: Option<&Name>,
receiver_ty: Option<&Canonical<Ty>>, receiver_ty: Option<&Canonical<Ty>>,
item: AssocItemId, item: AssocItemId,
@ -504,7 +501,7 @@ fn is_valid_candidate(
} }
pub(crate) fn inherent_impl_substs( pub(crate) fn inherent_impl_substs(
db: &impl HirDatabase, db: &dyn HirDatabase,
impl_id: ImplId, impl_id: ImplId,
self_ty: &Canonical<Ty>, self_ty: &Canonical<Ty>,
) -> Option<Substs> { ) -> Option<Substs> {
@ -544,11 +541,11 @@ fn fallback_bound_vars(s: Substs, num_vars_to_keep: usize) -> Substs {
} }
fn transform_receiver_ty( fn transform_receiver_ty(
db: &impl HirDatabase, db: &dyn HirDatabase,
function_id: FunctionId, function_id: FunctionId,
self_ty: &Canonical<Ty>, self_ty: &Canonical<Ty>,
) -> Option<Ty> { ) -> Option<Ty> {
let substs = match function_id.lookup(db).container { let substs = match function_id.lookup(db.upcast()).container {
AssocContainerId::TraitId(_) => Substs::build_for_def(db, function_id) AssocContainerId::TraitId(_) => Substs::build_for_def(db, function_id)
.push(self_ty.value.clone()) .push(self_ty.value.clone())
.fill_with_unknown() .fill_with_unknown()
@ -562,7 +559,7 @@ fn transform_receiver_ty(
pub fn implements_trait( pub fn implements_trait(
ty: &Canonical<Ty>, ty: &Canonical<Ty>,
db: &impl HirDatabase, db: &dyn HirDatabase,
env: Arc<TraitEnvironment>, env: Arc<TraitEnvironment>,
krate: CrateId, krate: CrateId,
trait_: TraitId, trait_: TraitId,
@ -581,7 +578,7 @@ pub fn implements_trait(
/// This creates Substs for a trait with the given Self type and type variables /// This creates Substs for a trait with the given Self type and type variables
/// for all other parameters, to query Chalk with it. /// for all other parameters, to query Chalk with it.
fn generic_implements_goal( fn generic_implements_goal(
db: &impl HirDatabase, db: &dyn HirDatabase,
env: Arc<TraitEnvironment>, env: Arc<TraitEnvironment>,
trait_: TraitId, trait_: TraitId,
self_ty: Canonical<Ty>, self_ty: Canonical<Ty>,
@ -598,7 +595,7 @@ fn generic_implements_goal(
} }
fn autoderef_method_receiver( fn autoderef_method_receiver(
db: &impl HirDatabase, db: &dyn HirDatabase,
krate: CrateId, krate: CrateId,
ty: InEnvironment<Canonical<Ty>>, ty: InEnvironment<Canonical<Ty>>,
) -> Vec<Canonical<Ty>> { ) -> Vec<Canonical<Ty>> {

View file

@ -6,8 +6,10 @@ use std::{
}; };
use hir_def::{db::DefDatabase, AssocItemId, ModuleDefId, ModuleId}; use hir_def::{db::DefDatabase, AssocItemId, ModuleDefId, ModuleId};
use hir_expand::diagnostics::DiagnosticSink; use hir_expand::{db::AstDatabase, diagnostics::DiagnosticSink};
use ra_db::{salsa, CrateId, FileId, FileLoader, FileLoaderDelegate, RelativePath, SourceDatabase}; use ra_db::{
salsa, CrateId, FileId, FileLoader, FileLoaderDelegate, RelativePath, SourceDatabase, Upcast,
};
use crate::{db::HirDatabase, expr::ExprValidator}; use crate::{db::HirDatabase, expr::ExprValidator};
@ -25,6 +27,18 @@ pub struct TestDB {
runtime: salsa::Runtime<TestDB>, runtime: salsa::Runtime<TestDB>,
} }
impl Upcast<dyn AstDatabase> for TestDB {
fn upcast(&self) -> &(dyn AstDatabase + 'static) {
&*self
}
}
impl Upcast<dyn DefDatabase> for TestDB {
fn upcast(&self) -> &(dyn DefDatabase + 'static) {
&*self
}
}
impl salsa::Database for TestDB { impl salsa::Database for TestDB {
fn salsa_runtime(&self) -> &salsa::Runtime<TestDB> { fn salsa_runtime(&self) -> &salsa::Runtime<TestDB> {
&self.runtime &self.runtime

View file

@ -24,8 +24,8 @@ const CHALK_SOLVER_MAX_SIZE: usize = 10;
const CHALK_SOLVER_FUEL: i32 = 100; const CHALK_SOLVER_FUEL: i32 = 100;
#[derive(Debug, Copy, Clone)] #[derive(Debug, Copy, Clone)]
struct ChalkContext<'a, DB> { struct ChalkContext<'a> {
db: &'a DB, db: &'a dyn HirDatabase,
krate: CrateId, krate: CrateId,
} }
@ -37,7 +37,7 @@ fn create_chalk_solver() -> chalk_solve::Solver<Interner> {
/// Collects impls for the given trait in the whole dependency tree of `krate`. /// Collects impls for the given trait in the whole dependency tree of `krate`.
pub(crate) fn impls_for_trait_query( pub(crate) fn impls_for_trait_query(
db: &impl HirDatabase, db: &dyn HirDatabase,
krate: CrateId, krate: CrateId,
trait_: TraitId, trait_: TraitId,
) -> Arc<[ImplId]> { ) -> Arc<[ImplId]> {
@ -136,7 +136,7 @@ impl TypeWalk for ProjectionPredicate {
/// Solve a trait goal using Chalk. /// Solve a trait goal using Chalk.
pub(crate) fn trait_solve_query( pub(crate) fn trait_solve_query(
db: &impl HirDatabase, db: &dyn HirDatabase,
krate: CrateId, krate: CrateId,
goal: Canonical<InEnvironment<Obligation>>, goal: Canonical<InEnvironment<Obligation>>,
) -> Option<Solution> { ) -> Option<Solution> {
@ -163,7 +163,7 @@ pub(crate) fn trait_solve_query(
} }
fn solve( fn solve(
db: &impl HirDatabase, db: &dyn HirDatabase,
krate: CrateId, krate: CrateId,
goal: &chalk_ir::UCanonical<chalk_ir::InEnvironment<chalk_ir::Goal<Interner>>>, goal: &chalk_ir::UCanonical<chalk_ir::InEnvironment<chalk_ir::Goal<Interner>>>,
) -> Option<chalk_solve::Solution<Interner>> { ) -> Option<chalk_solve::Solution<Interner>> {
@ -188,7 +188,7 @@ fn solve(
} }
fn solution_from_chalk( fn solution_from_chalk(
db: &impl HirDatabase, db: &dyn HirDatabase,
solution: chalk_solve::Solution<Interner>, solution: chalk_solve::Solution<Interner>,
) -> Solution { ) -> Solution {
let convert_subst = |subst: chalk_ir::Canonical<chalk_ir::Substitution<Interner>>| { let convert_subst = |subst: chalk_ir::Canonical<chalk_ir::Substitution<Interner>>| {

View file

@ -26,7 +26,7 @@ pub(super) struct BuiltinImplAssocTyValueData {
} }
pub(super) fn get_builtin_impls( pub(super) fn get_builtin_impls(
db: &impl HirDatabase, db: &dyn HirDatabase,
krate: CrateId, krate: CrateId,
ty: &Ty, ty: &Ty,
// The first argument for the trait, if present // The first argument for the trait, if present
@ -59,7 +59,7 @@ pub(super) fn get_builtin_impls(
} }
fn get_builtin_unsize_impls( fn get_builtin_unsize_impls(
db: &impl HirDatabase, db: &dyn HirDatabase,
krate: CrateId, krate: CrateId,
ty: &Ty, ty: &Ty,
// The first argument for the trait, if present // The first argument for the trait, if present
@ -79,7 +79,7 @@ fn get_builtin_unsize_impls(
// FIXME what about more complicated dyn tys with marker traits? // FIXME what about more complicated dyn tys with marker traits?
if let Some(trait_ref) = ty.dyn_trait_ref() { if let Some(trait_ref) = ty.dyn_trait_ref() {
if trait_ref.trait_ != target_trait.trait_ { if trait_ref.trait_ != target_trait.trait_ {
let super_traits = all_super_traits(db, trait_ref.trait_); let super_traits = all_super_traits(db.upcast(), trait_ref.trait_);
if super_traits.contains(&target_trait.trait_) { if super_traits.contains(&target_trait.trait_) {
callback(Impl::UnsizeToSuperTraitObject(UnsizeToSuperTraitObjectData { callback(Impl::UnsizeToSuperTraitObject(UnsizeToSuperTraitObjectData {
trait_: trait_ref.trait_, trait_: trait_ref.trait_,
@ -94,7 +94,7 @@ fn get_builtin_unsize_impls(
} }
} }
pub(super) fn impl_datum(db: &impl HirDatabase, krate: CrateId, impl_: Impl) -> BuiltinImplData { pub(super) fn impl_datum(db: &dyn HirDatabase, krate: CrateId, impl_: Impl) -> BuiltinImplData {
match impl_ { match impl_ {
Impl::ImplDef(_) => unreachable!(), Impl::ImplDef(_) => unreachable!(),
Impl::ClosureFnTraitImpl(data) => closure_fn_trait_impl_datum(db, krate, data), Impl::ClosureFnTraitImpl(data) => closure_fn_trait_impl_datum(db, krate, data),
@ -107,7 +107,7 @@ pub(super) fn impl_datum(db: &impl HirDatabase, krate: CrateId, impl_: Impl) ->
} }
pub(super) fn associated_ty_value( pub(super) fn associated_ty_value(
db: &impl HirDatabase, db: &dyn HirDatabase,
krate: CrateId, krate: CrateId,
data: AssocTyValue, data: AssocTyValue,
) -> BuiltinImplAssocTyValueData { ) -> BuiltinImplAssocTyValueData {
@ -122,7 +122,7 @@ pub(super) fn associated_ty_value(
// Closure Fn trait impls // Closure Fn trait impls
fn check_closure_fn_trait_impl_prerequisites( fn check_closure_fn_trait_impl_prerequisites(
db: &impl HirDatabase, db: &dyn HirDatabase,
krate: CrateId, krate: CrateId,
data: super::ClosureFnTraitImplData, data: super::ClosureFnTraitImplData,
) -> bool { ) -> bool {
@ -143,7 +143,7 @@ fn check_closure_fn_trait_impl_prerequisites(
} }
fn closure_fn_trait_impl_datum( fn closure_fn_trait_impl_datum(
db: &impl HirDatabase, db: &dyn HirDatabase,
krate: CrateId, krate: CrateId,
data: super::ClosureFnTraitImplData, data: super::ClosureFnTraitImplData,
) -> BuiltinImplData { ) -> BuiltinImplData {
@ -189,7 +189,7 @@ fn closure_fn_trait_impl_datum(
} }
fn closure_fn_trait_output_assoc_ty_value( fn closure_fn_trait_output_assoc_ty_value(
db: &impl HirDatabase, db: &dyn HirDatabase,
krate: CrateId, krate: CrateId,
data: super::ClosureFnTraitImplData, data: super::ClosureFnTraitImplData,
) -> BuiltinImplAssocTyValueData { ) -> BuiltinImplAssocTyValueData {
@ -223,17 +223,17 @@ fn closure_fn_trait_output_assoc_ty_value(
// Array unsizing // Array unsizing
fn check_unsize_impl_prerequisites(db: &impl HirDatabase, krate: CrateId) -> bool { fn check_unsize_impl_prerequisites(db: &dyn HirDatabase, krate: CrateId) -> bool {
// the Unsize trait needs to exist and have two type parameters (Self and T) // the Unsize trait needs to exist and have two type parameters (Self and T)
let unsize_trait = match get_unsize_trait(db, krate) { let unsize_trait = match get_unsize_trait(db, krate) {
Some(t) => t, Some(t) => t,
None => return false, None => return false,
}; };
let generic_params = generics(db, unsize_trait.into()); let generic_params = generics(db.upcast(), unsize_trait.into());
generic_params.len() == 2 generic_params.len() == 2
} }
fn array_unsize_impl_datum(db: &impl HirDatabase, krate: CrateId) -> BuiltinImplData { fn array_unsize_impl_datum(db: &dyn HirDatabase, krate: CrateId) -> BuiltinImplData {
// impl<T> Unsize<[T]> for [T; _] // impl<T> Unsize<[T]> for [T; _]
// (this can be a single impl because we don't distinguish array sizes currently) // (this can be a single impl because we don't distinguish array sizes currently)
@ -260,7 +260,7 @@ fn array_unsize_impl_datum(db: &impl HirDatabase, krate: CrateId) -> BuiltinImpl
// Trait object unsizing // Trait object unsizing
fn trait_object_unsize_impl_datum( fn trait_object_unsize_impl_datum(
db: &impl HirDatabase, db: &dyn HirDatabase,
krate: CrateId, krate: CrateId,
trait_: TraitId, trait_: TraitId,
) -> BuiltinImplData { ) -> BuiltinImplData {
@ -295,7 +295,7 @@ fn trait_object_unsize_impl_datum(
} }
fn super_trait_object_unsize_impl_datum( fn super_trait_object_unsize_impl_datum(
db: &impl HirDatabase, db: &dyn HirDatabase,
krate: CrateId, krate: CrateId,
data: UnsizeToSuperTraitObjectData, data: UnsizeToSuperTraitObjectData,
) -> BuiltinImplData { ) -> BuiltinImplData {
@ -313,7 +313,7 @@ fn super_trait_object_unsize_impl_datum(
let self_bounds = vec![GenericPredicate::Implemented(self_trait_ref.clone())]; let self_bounds = vec![GenericPredicate::Implemented(self_trait_ref.clone())];
// we need to go from our trait to the super trait, substituting type parameters // we need to go from our trait to the super trait, substituting type parameters
let path = crate::utils::find_super_trait_path(db, data.trait_, data.super_trait); let path = crate::utils::find_super_trait_path(db.upcast(), data.trait_, data.super_trait);
let mut current_trait_ref = self_trait_ref; let mut current_trait_ref = self_trait_ref;
for t in path.into_iter().skip(1) { for t in path.into_iter().skip(1) {
@ -344,11 +344,7 @@ fn super_trait_object_unsize_impl_datum(
BuiltinImplData { num_vars, trait_ref, where_clauses: Vec::new(), assoc_ty_values: Vec::new() } BuiltinImplData { num_vars, trait_ref, where_clauses: Vec::new(), assoc_ty_values: Vec::new() }
} }
fn get_fn_trait( fn get_fn_trait(db: &dyn HirDatabase, krate: CrateId, fn_trait: super::FnTrait) -> Option<TraitId> {
db: &impl HirDatabase,
krate: CrateId,
fn_trait: super::FnTrait,
) -> Option<TraitId> {
let target = db.lang_item(krate, fn_trait.lang_item_name().into())?; let target = db.lang_item(krate, fn_trait.lang_item_name().into())?;
match target { match target {
LangItemTarget::TraitId(t) => Some(t), LangItemTarget::TraitId(t) => Some(t),
@ -356,7 +352,7 @@ fn get_fn_trait(
} }
} }
fn get_unsize_trait(db: &impl HirDatabase, krate: CrateId) -> Option<TraitId> { fn get_unsize_trait(db: &dyn HirDatabase, krate: CrateId) -> Option<TraitId> {
let target = db.lang_item(krate, "unsize".into())?; let target = db.lang_item(krate, "unsize".into())?;
match target { match target {
LangItemTarget::TraitId(t) => Some(t), LangItemTarget::TraitId(t) => Some(t),

View file

@ -127,11 +127,11 @@ pub type AssociatedTyValue = chalk_rust_ir::AssociatedTyValue<Interner>;
pub(super) trait ToChalk { pub(super) trait ToChalk {
type Chalk; type Chalk;
fn to_chalk(self, db: &impl HirDatabase) -> Self::Chalk; fn to_chalk(self, db: &dyn HirDatabase) -> Self::Chalk;
fn from_chalk(db: &impl HirDatabase, chalk: Self::Chalk) -> Self; fn from_chalk(db: &dyn HirDatabase, chalk: Self::Chalk) -> Self;
} }
pub(super) fn from_chalk<T, ChalkT>(db: &impl HirDatabase, chalk: ChalkT) -> T pub(super) fn from_chalk<T, ChalkT>(db: &dyn HirDatabase, chalk: ChalkT) -> T
where where
T: ToChalk<Chalk = ChalkT>, T: ToChalk<Chalk = ChalkT>,
{ {
@ -140,7 +140,7 @@ where
impl ToChalk for Ty { impl ToChalk for Ty {
type Chalk = chalk_ir::Ty<Interner>; type Chalk = chalk_ir::Ty<Interner>;
fn to_chalk(self, db: &impl HirDatabase) -> chalk_ir::Ty<Interner> { fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::Ty<Interner> {
match self { match self {
Ty::Apply(apply_ty) => { Ty::Apply(apply_ty) => {
let name = apply_ty.ctor.to_chalk(db); let name = apply_ty.ctor.to_chalk(db);
@ -179,7 +179,7 @@ impl ToChalk for Ty {
} }
} }
} }
fn from_chalk(db: &impl HirDatabase, chalk: chalk_ir::Ty<Interner>) -> Self { fn from_chalk(db: &dyn HirDatabase, chalk: chalk_ir::Ty<Interner>) -> Self {
match chalk.data().clone() { match chalk.data().clone() {
chalk_ir::TyData::Apply(apply_ty) => match apply_ty.name { chalk_ir::TyData::Apply(apply_ty) => match apply_ty.name {
TypeName::Error => Ty::Unknown, TypeName::Error => Ty::Unknown,
@ -217,11 +217,11 @@ impl ToChalk for Ty {
impl ToChalk for Substs { impl ToChalk for Substs {
type Chalk = chalk_ir::Substitution<Interner>; type Chalk = chalk_ir::Substitution<Interner>;
fn to_chalk(self, db: &impl HirDatabase) -> chalk_ir::Substitution<Interner> { fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::Substitution<Interner> {
chalk_ir::Substitution::from(self.iter().map(|ty| ty.clone().to_chalk(db))) chalk_ir::Substitution::from(self.iter().map(|ty| ty.clone().to_chalk(db)))
} }
fn from_chalk(db: &impl HirDatabase, parameters: chalk_ir::Substitution<Interner>) -> Substs { fn from_chalk(db: &dyn HirDatabase, parameters: chalk_ir::Substitution<Interner>) -> Substs {
let tys = parameters let tys = parameters
.into_iter() .into_iter()
.map(|p| match p.ty() { .map(|p| match p.ty() {
@ -236,13 +236,13 @@ impl ToChalk for Substs {
impl ToChalk for TraitRef { impl ToChalk for TraitRef {
type Chalk = chalk_ir::TraitRef<Interner>; type Chalk = chalk_ir::TraitRef<Interner>;
fn to_chalk(self: TraitRef, db: &impl HirDatabase) -> chalk_ir::TraitRef<Interner> { fn to_chalk(self: TraitRef, db: &dyn HirDatabase) -> chalk_ir::TraitRef<Interner> {
let trait_id = self.trait_.to_chalk(db); let trait_id = self.trait_.to_chalk(db);
let substitution = self.substs.to_chalk(db); let substitution = self.substs.to_chalk(db);
chalk_ir::TraitRef { trait_id, substitution } chalk_ir::TraitRef { trait_id, substitution }
} }
fn from_chalk(db: &impl HirDatabase, trait_ref: chalk_ir::TraitRef<Interner>) -> Self { fn from_chalk(db: &dyn HirDatabase, trait_ref: chalk_ir::TraitRef<Interner>) -> Self {
let trait_ = from_chalk(db, trait_ref.trait_id); let trait_ = from_chalk(db, trait_ref.trait_id);
let substs = from_chalk(db, trait_ref.substitution); let substs = from_chalk(db, trait_ref.substitution);
TraitRef { trait_, substs } TraitRef { trait_, substs }
@ -252,11 +252,11 @@ impl ToChalk for TraitRef {
impl ToChalk for hir_def::TraitId { impl ToChalk for hir_def::TraitId {
type Chalk = TraitId; type Chalk = TraitId;
fn to_chalk(self, _db: &impl HirDatabase) -> TraitId { fn to_chalk(self, _db: &dyn HirDatabase) -> TraitId {
chalk_ir::TraitId(self.as_intern_id()) chalk_ir::TraitId(self.as_intern_id())
} }
fn from_chalk(_db: &impl HirDatabase, trait_id: TraitId) -> hir_def::TraitId { fn from_chalk(_db: &dyn HirDatabase, trait_id: TraitId) -> hir_def::TraitId {
InternKey::from_intern_id(trait_id.0) InternKey::from_intern_id(trait_id.0)
} }
} }
@ -264,7 +264,7 @@ impl ToChalk for hir_def::TraitId {
impl ToChalk for TypeCtor { impl ToChalk for TypeCtor {
type Chalk = TypeName<Interner>; type Chalk = TypeName<Interner>;
fn to_chalk(self, db: &impl HirDatabase) -> TypeName<Interner> { fn to_chalk(self, db: &dyn HirDatabase) -> TypeName<Interner> {
match self { match self {
TypeCtor::AssociatedType(type_alias) => { TypeCtor::AssociatedType(type_alias) => {
let type_id = type_alias.to_chalk(db); let type_id = type_alias.to_chalk(db);
@ -278,7 +278,7 @@ impl ToChalk for TypeCtor {
} }
} }
fn from_chalk(db: &impl HirDatabase, type_name: TypeName<Interner>) -> TypeCtor { fn from_chalk(db: &dyn HirDatabase, type_name: TypeName<Interner>) -> TypeCtor {
match type_name { match type_name {
TypeName::Struct(struct_id) => db.lookup_intern_type_ctor(struct_id.into()), TypeName::Struct(struct_id) => db.lookup_intern_type_ctor(struct_id.into()),
TypeName::AssociatedType(type_id) => TypeCtor::AssociatedType(from_chalk(db, type_id)), TypeName::AssociatedType(type_id) => TypeCtor::AssociatedType(from_chalk(db, type_id)),
@ -293,11 +293,11 @@ impl ToChalk for TypeCtor {
impl ToChalk for Impl { impl ToChalk for Impl {
type Chalk = ImplId; type Chalk = ImplId;
fn to_chalk(self, db: &impl HirDatabase) -> ImplId { fn to_chalk(self, db: &dyn HirDatabase) -> ImplId {
db.intern_chalk_impl(self).into() db.intern_chalk_impl(self).into()
} }
fn from_chalk(db: &impl HirDatabase, impl_id: ImplId) -> Impl { fn from_chalk(db: &dyn HirDatabase, impl_id: ImplId) -> Impl {
db.lookup_intern_chalk_impl(impl_id.into()) db.lookup_intern_chalk_impl(impl_id.into())
} }
} }
@ -305,11 +305,11 @@ impl ToChalk for Impl {
impl ToChalk for TypeAliasId { impl ToChalk for TypeAliasId {
type Chalk = AssocTypeId; type Chalk = AssocTypeId;
fn to_chalk(self, _db: &impl HirDatabase) -> AssocTypeId { fn to_chalk(self, _db: &dyn HirDatabase) -> AssocTypeId {
chalk_ir::AssocTypeId(self.as_intern_id()) chalk_ir::AssocTypeId(self.as_intern_id())
} }
fn from_chalk(_db: &impl HirDatabase, type_alias_id: AssocTypeId) -> TypeAliasId { fn from_chalk(_db: &dyn HirDatabase, type_alias_id: AssocTypeId) -> TypeAliasId {
InternKey::from_intern_id(type_alias_id.0) InternKey::from_intern_id(type_alias_id.0)
} }
} }
@ -317,11 +317,11 @@ impl ToChalk for TypeAliasId {
impl ToChalk for AssocTyValue { impl ToChalk for AssocTyValue {
type Chalk = AssociatedTyValueId; type Chalk = AssociatedTyValueId;
fn to_chalk(self, db: &impl HirDatabase) -> AssociatedTyValueId { fn to_chalk(self, db: &dyn HirDatabase) -> AssociatedTyValueId {
db.intern_assoc_ty_value(self).into() db.intern_assoc_ty_value(self).into()
} }
fn from_chalk(db: &impl HirDatabase, assoc_ty_value_id: AssociatedTyValueId) -> AssocTyValue { fn from_chalk(db: &dyn HirDatabase, assoc_ty_value_id: AssociatedTyValueId) -> AssocTyValue {
db.lookup_intern_assoc_ty_value(assoc_ty_value_id.into()) db.lookup_intern_assoc_ty_value(assoc_ty_value_id.into())
} }
} }
@ -329,7 +329,7 @@ impl ToChalk for AssocTyValue {
impl ToChalk for GenericPredicate { impl ToChalk for GenericPredicate {
type Chalk = chalk_ir::QuantifiedWhereClause<Interner>; type Chalk = chalk_ir::QuantifiedWhereClause<Interner>;
fn to_chalk(self, db: &impl HirDatabase) -> chalk_ir::QuantifiedWhereClause<Interner> { fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::QuantifiedWhereClause<Interner> {
match self { match self {
GenericPredicate::Implemented(trait_ref) => { GenericPredicate::Implemented(trait_ref) => {
make_binders(chalk_ir::WhereClause::Implemented(trait_ref.to_chalk(db)), 0) make_binders(chalk_ir::WhereClause::Implemented(trait_ref.to_chalk(db)), 0)
@ -346,7 +346,7 @@ impl ToChalk for GenericPredicate {
} }
fn from_chalk( fn from_chalk(
db: &impl HirDatabase, db: &dyn HirDatabase,
where_clause: chalk_ir::QuantifiedWhereClause<Interner>, where_clause: chalk_ir::QuantifiedWhereClause<Interner>,
) -> GenericPredicate { ) -> GenericPredicate {
match where_clause.value { match where_clause.value {
@ -365,7 +365,7 @@ impl ToChalk for GenericPredicate {
impl ToChalk for ProjectionTy { impl ToChalk for ProjectionTy {
type Chalk = chalk_ir::AliasTy<Interner>; type Chalk = chalk_ir::AliasTy<Interner>;
fn to_chalk(self, db: &impl HirDatabase) -> chalk_ir::AliasTy<Interner> { fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::AliasTy<Interner> {
chalk_ir::AliasTy { chalk_ir::AliasTy {
associated_ty_id: self.associated_ty.to_chalk(db), associated_ty_id: self.associated_ty.to_chalk(db),
substitution: self.parameters.to_chalk(db), substitution: self.parameters.to_chalk(db),
@ -373,7 +373,7 @@ impl ToChalk for ProjectionTy {
} }
fn from_chalk( fn from_chalk(
db: &impl HirDatabase, db: &dyn HirDatabase,
projection_ty: chalk_ir::AliasTy<Interner>, projection_ty: chalk_ir::AliasTy<Interner>,
) -> ProjectionTy { ) -> ProjectionTy {
ProjectionTy { ProjectionTy {
@ -386,11 +386,11 @@ impl ToChalk for ProjectionTy {
impl ToChalk for super::ProjectionPredicate { impl ToChalk for super::ProjectionPredicate {
type Chalk = chalk_ir::Normalize<Interner>; type Chalk = chalk_ir::Normalize<Interner>;
fn to_chalk(self, db: &impl HirDatabase) -> chalk_ir::Normalize<Interner> { fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::Normalize<Interner> {
chalk_ir::Normalize { alias: self.projection_ty.to_chalk(db), ty: self.ty.to_chalk(db) } chalk_ir::Normalize { alias: self.projection_ty.to_chalk(db), ty: self.ty.to_chalk(db) }
} }
fn from_chalk(_db: &impl HirDatabase, _normalize: chalk_ir::Normalize<Interner>) -> Self { fn from_chalk(_db: &dyn HirDatabase, _normalize: chalk_ir::Normalize<Interner>) -> Self {
unimplemented!() unimplemented!()
} }
} }
@ -398,14 +398,14 @@ impl ToChalk for super::ProjectionPredicate {
impl ToChalk for Obligation { impl ToChalk for Obligation {
type Chalk = chalk_ir::DomainGoal<Interner>; type Chalk = chalk_ir::DomainGoal<Interner>;
fn to_chalk(self, db: &impl HirDatabase) -> chalk_ir::DomainGoal<Interner> { fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::DomainGoal<Interner> {
match self { match self {
Obligation::Trait(tr) => tr.to_chalk(db).cast(), Obligation::Trait(tr) => tr.to_chalk(db).cast(),
Obligation::Projection(pr) => pr.to_chalk(db).cast(), Obligation::Projection(pr) => pr.to_chalk(db).cast(),
} }
} }
fn from_chalk(_db: &impl HirDatabase, _goal: chalk_ir::DomainGoal<Interner>) -> Self { fn from_chalk(_db: &dyn HirDatabase, _goal: chalk_ir::DomainGoal<Interner>) -> Self {
unimplemented!() unimplemented!()
} }
} }
@ -416,13 +416,13 @@ where
{ {
type Chalk = chalk_ir::Canonical<T::Chalk>; type Chalk = chalk_ir::Canonical<T::Chalk>;
fn to_chalk(self, db: &impl HirDatabase) -> chalk_ir::Canonical<T::Chalk> { fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::Canonical<T::Chalk> {
let parameter = chalk_ir::ParameterKind::Ty(chalk_ir::UniverseIndex::ROOT); let parameter = chalk_ir::ParameterKind::Ty(chalk_ir::UniverseIndex::ROOT);
let value = self.value.to_chalk(db); let value = self.value.to_chalk(db);
chalk_ir::Canonical { value, binders: vec![parameter; self.num_vars] } chalk_ir::Canonical { value, binders: vec![parameter; self.num_vars] }
} }
fn from_chalk(db: &impl HirDatabase, canonical: chalk_ir::Canonical<T::Chalk>) -> Canonical<T> { fn from_chalk(db: &dyn HirDatabase, canonical: chalk_ir::Canonical<T::Chalk>) -> Canonical<T> {
Canonical { num_vars: canonical.binders.len(), value: from_chalk(db, canonical.value) } Canonical { num_vars: canonical.binders.len(), value: from_chalk(db, canonical.value) }
} }
} }
@ -430,7 +430,7 @@ where
impl ToChalk for Arc<super::TraitEnvironment> { impl ToChalk for Arc<super::TraitEnvironment> {
type Chalk = chalk_ir::Environment<Interner>; type Chalk = chalk_ir::Environment<Interner>;
fn to_chalk(self, db: &impl HirDatabase) -> chalk_ir::Environment<Interner> { fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::Environment<Interner> {
let mut clauses = Vec::new(); let mut clauses = Vec::new();
for pred in &self.predicates { for pred in &self.predicates {
if pred.is_error() { if pred.is_error() {
@ -445,7 +445,7 @@ impl ToChalk for Arc<super::TraitEnvironment> {
} }
fn from_chalk( fn from_chalk(
_db: &impl HirDatabase, _db: &dyn HirDatabase,
_env: chalk_ir::Environment<Interner>, _env: chalk_ir::Environment<Interner>,
) -> Arc<super::TraitEnvironment> { ) -> Arc<super::TraitEnvironment> {
unimplemented!() unimplemented!()
@ -458,7 +458,7 @@ where
{ {
type Chalk = chalk_ir::InEnvironment<T::Chalk>; type Chalk = chalk_ir::InEnvironment<T::Chalk>;
fn to_chalk(self, db: &impl HirDatabase) -> chalk_ir::InEnvironment<T::Chalk> { fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::InEnvironment<T::Chalk> {
chalk_ir::InEnvironment { chalk_ir::InEnvironment {
environment: self.environment.to_chalk(db), environment: self.environment.to_chalk(db),
goal: self.value.to_chalk(db), goal: self.value.to_chalk(db),
@ -466,7 +466,7 @@ where
} }
fn from_chalk( fn from_chalk(
db: &impl HirDatabase, db: &dyn HirDatabase,
in_env: chalk_ir::InEnvironment<T::Chalk>, in_env: chalk_ir::InEnvironment<T::Chalk>,
) -> super::InEnvironment<T> { ) -> super::InEnvironment<T> {
super::InEnvironment { super::InEnvironment {
@ -479,7 +479,7 @@ where
impl ToChalk for builtin::BuiltinImplData { impl ToChalk for builtin::BuiltinImplData {
type Chalk = ImplDatum; type Chalk = ImplDatum;
fn to_chalk(self, db: &impl HirDatabase) -> ImplDatum { fn to_chalk(self, db: &dyn HirDatabase) -> ImplDatum {
let impl_type = chalk_rust_ir::ImplType::External; let impl_type = chalk_rust_ir::ImplType::External;
let where_clauses = self.where_clauses.into_iter().map(|w| w.to_chalk(db)).collect(); let where_clauses = self.where_clauses.into_iter().map(|w| w.to_chalk(db)).collect();
@ -495,7 +495,7 @@ impl ToChalk for builtin::BuiltinImplData {
} }
} }
fn from_chalk(_db: &impl HirDatabase, _data: ImplDatum) -> Self { fn from_chalk(_db: &dyn HirDatabase, _data: ImplDatum) -> Self {
unimplemented!() unimplemented!()
} }
} }
@ -503,7 +503,7 @@ impl ToChalk for builtin::BuiltinImplData {
impl ToChalk for builtin::BuiltinImplAssocTyValueData { impl ToChalk for builtin::BuiltinImplAssocTyValueData {
type Chalk = AssociatedTyValue; type Chalk = AssociatedTyValue;
fn to_chalk(self, db: &impl HirDatabase) -> AssociatedTyValue { fn to_chalk(self, db: &dyn HirDatabase) -> AssociatedTyValue {
let value_bound = chalk_rust_ir::AssociatedTyValueBound { ty: self.value.to_chalk(db) }; let value_bound = chalk_rust_ir::AssociatedTyValueBound { ty: self.value.to_chalk(db) };
chalk_rust_ir::AssociatedTyValue { chalk_rust_ir::AssociatedTyValue {
@ -514,7 +514,7 @@ impl ToChalk for builtin::BuiltinImplAssocTyValueData {
} }
fn from_chalk( fn from_chalk(
_db: &impl HirDatabase, _db: &dyn HirDatabase,
_data: AssociatedTyValue, _data: AssociatedTyValue,
) -> builtin::BuiltinImplAssocTyValueData { ) -> builtin::BuiltinImplAssocTyValueData {
unimplemented!() unimplemented!()
@ -529,7 +529,7 @@ fn make_binders<T>(value: T, num_vars: usize) -> chalk_ir::Binders<T> {
} }
fn convert_where_clauses( fn convert_where_clauses(
db: &impl HirDatabase, db: &dyn HirDatabase,
def: GenericDefId, def: GenericDefId,
substs: &Substs, substs: &Substs,
) -> Vec<chalk_ir::QuantifiedWhereClause<Interner>> { ) -> Vec<chalk_ir::QuantifiedWhereClause<Interner>> {
@ -545,10 +545,7 @@ fn convert_where_clauses(
result result
} }
impl<'a, DB> chalk_solve::RustIrDatabase<Interner> for ChalkContext<'a, DB> impl<'a> chalk_solve::RustIrDatabase<Interner> for ChalkContext<'a> {
where
DB: HirDatabase,
{
fn associated_ty_data(&self, id: AssocTypeId) -> Arc<AssociatedTyDatum> { fn associated_ty_data(&self, id: AssocTypeId) -> Arc<AssociatedTyDatum> {
self.db.associated_ty_data(id) self.db.associated_ty_data(id)
} }
@ -618,16 +615,16 @@ where
} }
pub(crate) fn associated_ty_data_query( pub(crate) fn associated_ty_data_query(
db: &impl HirDatabase, db: &dyn HirDatabase,
id: AssocTypeId, id: AssocTypeId,
) -> Arc<AssociatedTyDatum> { ) -> Arc<AssociatedTyDatum> {
debug!("associated_ty_data {:?}", id); debug!("associated_ty_data {:?}", id);
let type_alias: TypeAliasId = from_chalk(db, id); let type_alias: TypeAliasId = from_chalk(db, id);
let trait_ = match type_alias.lookup(db).container { let trait_ = match type_alias.lookup(db.upcast()).container {
AssocContainerId::TraitId(t) => t, AssocContainerId::TraitId(t) => t,
_ => panic!("associated type not in trait"), _ => panic!("associated type not in trait"),
}; };
let generic_params = generics(db, type_alias.into()); let generic_params = generics(db.upcast(), type_alias.into());
let bound_data = chalk_rust_ir::AssociatedTyDatumBound { let bound_data = chalk_rust_ir::AssociatedTyDatumBound {
// FIXME add bounds and where clauses // FIXME add bounds and where clauses
bounds: vec![], bounds: vec![],
@ -643,7 +640,7 @@ pub(crate) fn associated_ty_data_query(
} }
pub(crate) fn trait_datum_query( pub(crate) fn trait_datum_query(
db: &impl HirDatabase, db: &dyn HirDatabase,
krate: CrateId, krate: CrateId,
trait_id: TraitId, trait_id: TraitId,
) -> Arc<TraitDatum> { ) -> Arc<TraitDatum> {
@ -651,11 +648,11 @@ pub(crate) fn trait_datum_query(
let trait_: hir_def::TraitId = from_chalk(db, trait_id); let trait_: hir_def::TraitId = from_chalk(db, trait_id);
let trait_data = db.trait_data(trait_); let trait_data = db.trait_data(trait_);
debug!("trait {:?} = {:?}", trait_id, trait_data.name); debug!("trait {:?} = {:?}", trait_id, trait_data.name);
let generic_params = generics(db, trait_.into()); let generic_params = generics(db.upcast(), trait_.into());
let bound_vars = Substs::bound_vars(&generic_params); let bound_vars = Substs::bound_vars(&generic_params);
let flags = chalk_rust_ir::TraitFlags { let flags = chalk_rust_ir::TraitFlags {
auto: trait_data.auto, auto: trait_data.auto,
upstream: trait_.lookup(db).container.module(db).krate != krate, upstream: trait_.lookup(db.upcast()).container.module(db.upcast()).krate != krate,
non_enumerable: true, non_enumerable: true,
coinductive: false, // only relevant for Chalk testing coinductive: false, // only relevant for Chalk testing
// FIXME set these flags correctly // FIXME set these flags correctly
@ -676,7 +673,7 @@ pub(crate) fn trait_datum_query(
} }
pub(crate) fn struct_datum_query( pub(crate) fn struct_datum_query(
db: &impl HirDatabase, db: &dyn HirDatabase,
krate: CrateId, krate: CrateId,
struct_id: StructId, struct_id: StructId,
) -> Arc<StructDatum> { ) -> Arc<StructDatum> {
@ -688,7 +685,7 @@ pub(crate) fn struct_datum_query(
let where_clauses = type_ctor let where_clauses = type_ctor
.as_generic_def() .as_generic_def()
.map(|generic_def| { .map(|generic_def| {
let generic_params = generics(db, generic_def); let generic_params = generics(db.upcast(), generic_def);
let bound_vars = Substs::bound_vars(&generic_params); let bound_vars = Substs::bound_vars(&generic_params);
convert_where_clauses(db, generic_def, &bound_vars) convert_where_clauses(db, generic_def, &bound_vars)
}) })
@ -708,7 +705,7 @@ pub(crate) fn struct_datum_query(
} }
pub(crate) fn impl_datum_query( pub(crate) fn impl_datum_query(
db: &impl HirDatabase, db: &dyn HirDatabase,
krate: CrateId, krate: CrateId,
impl_id: ImplId, impl_id: ImplId,
) -> Arc<ImplDatum> { ) -> Arc<ImplDatum> {
@ -722,7 +719,7 @@ pub(crate) fn impl_datum_query(
} }
fn impl_def_datum( fn impl_def_datum(
db: &impl HirDatabase, db: &dyn HirDatabase,
krate: CrateId, krate: CrateId,
chalk_id: ImplId, chalk_id: ImplId,
impl_id: hir_def::ImplId, impl_id: hir_def::ImplId,
@ -734,10 +731,10 @@ fn impl_def_datum(
.value; .value;
let impl_data = db.impl_data(impl_id); let impl_data = db.impl_data(impl_id);
let generic_params = generics(db, impl_id.into()); let generic_params = generics(db.upcast(), impl_id.into());
let bound_vars = Substs::bound_vars(&generic_params); let bound_vars = Substs::bound_vars(&generic_params);
let trait_ = trait_ref.trait_; let trait_ = trait_ref.trait_;
let impl_type = if impl_id.lookup(db).container.module(db).krate == krate { let impl_type = if impl_id.lookup(db.upcast()).container.module(db.upcast()).krate == krate {
chalk_rust_ir::ImplType::Local chalk_rust_ir::ImplType::Local
} else { } else {
chalk_rust_ir::ImplType::External chalk_rust_ir::ImplType::External
@ -786,7 +783,7 @@ fn impl_def_datum(
} }
pub(crate) fn associated_ty_value_query( pub(crate) fn associated_ty_value_query(
db: &impl HirDatabase, db: &dyn HirDatabase,
krate: CrateId, krate: CrateId,
id: AssociatedTyValueId, id: AssociatedTyValueId,
) -> Arc<AssociatedTyValue> { ) -> Arc<AssociatedTyValue> {
@ -800,12 +797,12 @@ pub(crate) fn associated_ty_value_query(
} }
fn type_alias_associated_ty_value( fn type_alias_associated_ty_value(
db: &impl HirDatabase, db: &dyn HirDatabase,
_krate: CrateId, _krate: CrateId,
type_alias: TypeAliasId, type_alias: TypeAliasId,
) -> Arc<AssociatedTyValue> { ) -> Arc<AssociatedTyValue> {
let type_alias_data = db.type_alias_data(type_alias); let type_alias_data = db.type_alias_data(type_alias);
let impl_id = match type_alias.lookup(db).container { let impl_id = match type_alias.lookup(db.upcast()).container {
AssocContainerId::ImplId(it) => it, AssocContainerId::ImplId(it) => it,
_ => panic!("assoc ty value should be in impl"), _ => panic!("assoc ty value should be in impl"),
}; };

View file

@ -14,7 +14,7 @@ use hir_def::{
}; };
use hir_expand::name::{name, Name}; use hir_expand::name::{name, Name};
fn direct_super_traits(db: &impl DefDatabase, trait_: TraitId) -> Vec<TraitId> { fn direct_super_traits(db: &dyn DefDatabase, trait_: TraitId) -> Vec<TraitId> {
let resolver = trait_.resolver(db); let resolver = trait_.resolver(db);
// returning the iterator directly doesn't easily work because of // returning the iterator directly doesn't easily work because of
// lifetime problems, but since there usually shouldn't be more than a // lifetime problems, but since there usually shouldn't be more than a
@ -43,7 +43,7 @@ fn direct_super_traits(db: &impl DefDatabase, trait_: TraitId) -> Vec<TraitId> {
/// Returns an iterator over the whole super trait hierarchy (including the /// Returns an iterator over the whole super trait hierarchy (including the
/// trait itself). /// trait itself).
pub(super) fn all_super_traits(db: &impl DefDatabase, trait_: TraitId) -> Vec<TraitId> { pub(super) fn all_super_traits(db: &dyn DefDatabase, trait_: TraitId) -> Vec<TraitId> {
// we need to take care a bit here to avoid infinite loops in case of cycles // we need to take care a bit here to avoid infinite loops in case of cycles
// (i.e. if we have `trait A: B; trait B: A;`) // (i.e. if we have `trait A: B; trait B: A;`)
let mut result = vec![trait_]; let mut result = vec![trait_];
@ -65,7 +65,7 @@ pub(super) fn all_super_traits(db: &impl DefDatabase, trait_: TraitId) -> Vec<Tr
/// Finds a path from a trait to one of its super traits. Returns an empty /// Finds a path from a trait to one of its super traits. Returns an empty
/// vector if there is no path. /// vector if there is no path.
pub(super) fn find_super_trait_path( pub(super) fn find_super_trait_path(
db: &impl DefDatabase, db: &dyn DefDatabase,
trait_: TraitId, trait_: TraitId,
super_trait: TraitId, super_trait: TraitId,
) -> Vec<TraitId> { ) -> Vec<TraitId> {
@ -73,7 +73,7 @@ pub(super) fn find_super_trait_path(
result.push(trait_); result.push(trait_);
return if go(db, super_trait, &mut result) { result } else { Vec::new() }; return if go(db, super_trait, &mut result) { result } else { Vec::new() };
fn go(db: &impl DefDatabase, super_trait: TraitId, path: &mut Vec<TraitId>) -> bool { fn go(db: &dyn DefDatabase, super_trait: TraitId, path: &mut Vec<TraitId>) -> bool {
let trait_ = *path.last().unwrap(); let trait_ = *path.last().unwrap();
if trait_ == super_trait { if trait_ == super_trait {
return true; return true;
@ -95,7 +95,7 @@ pub(super) fn find_super_trait_path(
} }
pub(super) fn associated_type_by_name_including_super_traits( pub(super) fn associated_type_by_name_including_super_traits(
db: &impl DefDatabase, db: &dyn DefDatabase,
trait_: TraitId, trait_: TraitId,
name: &Name, name: &Name,
) -> Option<TypeAliasId> { ) -> Option<TypeAliasId> {
@ -104,7 +104,7 @@ pub(super) fn associated_type_by_name_including_super_traits(
.find_map(|t| db.trait_data(t).associated_type_by_name(name)) .find_map(|t| db.trait_data(t).associated_type_by_name(name))
} }
pub(super) fn variant_data(db: &impl DefDatabase, var: VariantId) -> Arc<VariantData> { pub(super) fn variant_data(db: &dyn DefDatabase, var: VariantId) -> Arc<VariantData> {
match var { match var {
VariantId::StructId(it) => db.struct_data(it).variant_data.clone(), VariantId::StructId(it) => db.struct_data(it).variant_data.clone(),
VariantId::UnionId(it) => db.union_data(it).variant_data.clone(), VariantId::UnionId(it) => db.union_data(it).variant_data.clone(),
@ -123,7 +123,7 @@ pub(crate) fn make_mut_slice<T: Clone>(a: &mut Arc<[T]>) -> &mut [T] {
Arc::get_mut(a).unwrap() Arc::get_mut(a).unwrap()
} }
pub(crate) fn generics(db: &impl DefDatabase, def: GenericDefId) -> Generics { pub(crate) fn generics(db: &dyn DefDatabase, def: GenericDefId) -> Generics {
let parent_generics = parent_generic_def(db, def).map(|def| Box::new(generics(db, def))); let parent_generics = parent_generic_def(db, def).map(|def| Box::new(generics(db, def)));
Generics { def, params: db.generic_params(def), parent_generics } Generics { def, params: db.generic_params(def), parent_generics }
} }
@ -222,7 +222,7 @@ impl Generics {
} }
} }
fn parent_generic_def(db: &impl DefDatabase, def: GenericDefId) -> Option<GenericDefId> { fn parent_generic_def(db: &dyn DefDatabase, def: GenericDefId) -> Option<GenericDefId> {
let container = match def { let container = match def {
GenericDefId::FunctionId(it) => it.lookup(db).container, GenericDefId::FunctionId(it) => it.lookup(db).container,
GenericDefId::TypeAliasId(it) => it.lookup(db).container, GenericDefId::TypeAliasId(it) => it.lookup(db).container,

View file

@ -14,10 +14,11 @@ mod wasm_shims;
use std::sync::Arc; use std::sync::Arc;
use hir::db::{AstDatabase, DefDatabase};
use ra_db::{ use ra_db::{
salsa::{self, Database, Durability}, salsa::{self, Database, Durability},
Canceled, CheckCanceled, CrateId, FileId, FileLoader, FileLoaderDelegate, RelativePath, Canceled, CheckCanceled, CrateId, FileId, FileLoader, FileLoaderDelegate, RelativePath,
SourceDatabase, SourceRootId, SourceDatabase, SourceRootId, Upcast,
}; };
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
@ -41,6 +42,18 @@ pub struct RootDatabase {
pub last_gc_check: crate::wasm_shims::Instant, pub last_gc_check: crate::wasm_shims::Instant,
} }
impl Upcast<dyn AstDatabase> for RootDatabase {
fn upcast(&self) -> &(dyn AstDatabase + 'static) {
&*self
}
}
impl Upcast<dyn DefDatabase> for RootDatabase {
fn upcast(&self) -> &(dyn DefDatabase + 'static) {
&*self
}
}
impl FileLoader for RootDatabase { impl FileLoader for RootDatabase {
fn file_text(&self, file_id: FileId) -> Arc<String> { fn file_text(&self, file_id: FileId) -> Arc<String> {
FileLoaderDelegate(self).file_text(file_id) FileLoaderDelegate(self).file_text(file_id)