Merge branch 'master' of https://github.com/rust-analyzer/rust-analyzer into feature/themes

This commit is contained in:
Seivan Heidari 2019-11-21 01:11:41 +01:00
commit 358a1bcd70
82 changed files with 1457 additions and 1073 deletions

View file

@ -2,3 +2,4 @@
xtask = "run --package xtask --bin xtask --" xtask = "run --package xtask --bin xtask --"
install-ra = "run --package xtask --bin xtask -- install" # for backwards compat install-ra = "run --package xtask --bin xtask -- install" # for backwards compat
tq = "test -- -q" tq = "test -- -q"
qt = "tq"

View file

@ -58,6 +58,7 @@ https://rust-lang.zulipchat.com/#narrow/stream/185405-t-compiler.2Frls-2.2E0
* API docs: https://rust-analyzer.github.io/rust-analyzer/ra_ide_api/ * API docs: https://rust-analyzer.github.io/rust-analyzer/ra_ide_api/
## License ## License
Rust analyzer is primarily distributed under the terms of both the MIT Rust analyzer is primarily distributed under the terms of both the MIT

View file

@ -174,7 +174,7 @@ fn resolve_target_trait_def(
.path()?; .path()?;
match analyzer.resolve_path(db, &ast_path) { match analyzer.resolve_path(db, &ast_path) {
Some(hir::PathResolution::Def(hir::ModuleDef::Trait(def))) => Some(def.source(db).ast), Some(hir::PathResolution::Def(hir::ModuleDef::Trait(def))) => Some(def.source(db).value),
_ => None, _ => None,
} }
} }

View file

@ -141,7 +141,7 @@ fn find_struct_impl(
})?; })?;
let struct_ty = { let struct_ty = {
let src = hir::Source { file_id: ctx.frange.file_id.into(), ast: strukt.clone() }; let src = hir::Source { file_id: ctx.frange.file_id.into(), value: strukt.clone() };
hir::Struct::from_source(db, src).unwrap().ty(db) hir::Struct::from_source(db, src).unwrap().ty(db)
}; };
@ -152,7 +152,7 @@ fn find_struct_impl(
return false; return false;
} }
let src = hir::Source { file_id: ctx.frange.file_id.into(), ast: impl_blk.clone() }; let src = hir::Source { file_id: ctx.frange.file_id.into(), value: impl_blk.clone() };
let blk = hir::ImplBlock::from_source(db, src).unwrap(); let blk = hir::ImplBlock::from_source(db, src).unwrap();
let same_ty = blk.target_ty(db) == struct_ty; let same_ty = blk.target_ty(db) == struct_ty;

View file

@ -84,7 +84,7 @@ fn resolve_enum_def(
let expr_ty = analyzer.type_of(db, &expr)?; let expr_ty = analyzer.type_of(db, &expr)?;
analyzer.autoderef(db, expr_ty).find_map(|ty| match ty.as_adt() { analyzer.autoderef(db, expr_ty).find_map(|ty| match ty.as_adt() {
Some((Adt::Enum(e), _)) => Some(e.source(db).ast), Some((Adt::Enum(e), _)) => Some(e.source(db).value),
_ => None, _ => None,
}) })
} }

View file

@ -66,8 +66,8 @@ fn build_match_expr(
fn format_arm(block: &ast::BlockExpr) -> String { fn format_arm(block: &ast::BlockExpr) -> String {
match extract_trivial_expression(block) { match extract_trivial_expression(block) {
None => block.syntax().text().to_string(), Some(e) if !e.syntax().text().contains_char('\n') => format!("{},", e.syntax().text()),
Some(e) => format!("{},", e.syntax().text()), _ => block.syntax().text().to_string(),
} }
} }
@ -102,6 +102,34 @@ impl VariantData {
) )
} }
#[test]
fn test_replace_if_let_with_match_doesnt_unwrap_multiline_expressions() {
check_assist(
replace_if_let_with_match,
"
fn foo() {
if <|>let VariantData::Struct(..) = a {
bar(
123
)
} else {
false
}
} ",
"
fn foo() {
<|>match a {
VariantData::Struct(..) => {
bar(
123
)
}
_ => false,
}
} ",
)
}
#[test] #[test]
fn replace_if_let_with_match_target() { fn replace_if_let_with_match_target() {
check_assist_target( check_assist_target(

View file

@ -98,7 +98,7 @@ pub fn run(
let src = f.source(db); let src = f.source(db);
let original_file = src.file_id.original_file(db); let original_file = src.file_id.original_file(db);
let path = db.file_relative_path(original_file); let path = db.file_relative_path(original_file);
let syntax_range = src.ast.syntax().text_range(); let syntax_range = src.value.syntax().text_range();
write!(msg, " ({:?} {})", path, syntax_range).unwrap(); write!(msg, " ({:?} {})", path, syntax_range).unwrap();
} }
bar.set_message(&msg); bar.set_message(&msg);
@ -135,7 +135,7 @@ pub fn run(
let path = db.file_relative_path(original_file); let path = db.file_relative_path(original_file);
let line_index = host.analysis().file_line_index(original_file).unwrap(); let line_index = host.analysis().file_line_index(original_file).unwrap();
let text_range = src let text_range = src
.ast .value
.either(|it| it.syntax().text_range(), |it| it.syntax().text_range()); .either(|it| it.syntax().text_range(), |it| it.syntax().text_range());
let (start, end) = ( let (start, end) = (
line_index.line_col(text_range.start()), line_index.line_col(text_range.start()),

View file

@ -38,9 +38,6 @@ fn prev_tokens(token: SyntaxToken) -> impl Iterator<Item = SyntaxToken> {
pub fn extract_trivial_expression(expr: &ast::BlockExpr) -> Option<ast::Expr> { pub fn extract_trivial_expression(expr: &ast::BlockExpr) -> Option<ast::Expr> {
let block = expr.block()?; let block = expr.block()?;
let expr = block.expr()?; let expr = block.expr()?;
if expr.syntax().text().contains_char('\n') {
return None;
}
let non_trivial_children = block.syntax().children().filter(|it| match it.kind() { let non_trivial_children = block.syntax().children().filter(|it| match it.kind() {
WHITESPACE | T!['{'] | T!['}'] => false, WHITESPACE | T!['{'] | T!['}'] => false,
_ => it != expr.syntax(), _ => it != expr.syntax(),

View file

@ -1,54 +0,0 @@
//! This module contains the implementation details of the HIR for ADTs, i.e.
//! structs and enums (and unions).
use std::sync::Arc;
use hir_def::adt::VariantData;
use crate::{
db::{DefDatabase, HirDatabase},
EnumVariant, Module, Name, Struct, StructField,
};
impl Struct {
pub(crate) fn variant_data(self, db: &impl DefDatabase) -> Arc<VariantData> {
db.struct_data(self.id.into()).variant_data.clone()
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum VariantDef {
Struct(Struct),
EnumVariant(EnumVariant),
}
impl_froms!(VariantDef: Struct, EnumVariant);
impl VariantDef {
pub fn fields(self, db: &impl HirDatabase) -> Vec<StructField> {
match self {
VariantDef::Struct(it) => it.fields(db),
VariantDef::EnumVariant(it) => it.fields(db),
}
}
pub fn field(self, db: &impl HirDatabase, name: &Name) -> Option<StructField> {
match self {
VariantDef::Struct(it) => it.field(db, name),
VariantDef::EnumVariant(it) => it.field(db, name),
}
}
pub fn module(self, db: &impl HirDatabase) -> Module {
match self {
VariantDef::Struct(it) => it.module(db),
VariantDef::EnumVariant(it) => it.module(db),
}
}
pub(crate) fn variant_data(self, db: &impl DefDatabase) -> Arc<VariantData> {
match self {
VariantDef::Struct(it) => it.variant_data(db),
VariantDef::EnumVariant(it) => it.variant_data(db),
}
}
}

View file

@ -10,8 +10,10 @@ use hir_def::{
adt::VariantData, adt::VariantData,
body::scope::ExprScopes, body::scope::ExprScopes,
builtin_type::BuiltinType, builtin_type::BuiltinType,
traits::TraitData,
type_ref::{Mutability, TypeRef}, type_ref::{Mutability, TypeRef},
CrateModuleId, ImplId, LocalEnumVariantId, LocalStructFieldId, ModuleId, UnionId, AssocItemId, ContainerId, CrateModuleId, HasModule, ImplId, LocalEnumVariantId,
LocalStructFieldId, Lookup, ModuleId, UnionId,
}; };
use hir_expand::{ use hir_expand::{
diagnostics::DiagnosticSink, diagnostics::DiagnosticSink,
@ -21,7 +23,6 @@ use ra_db::{CrateId, Edition};
use ra_syntax::ast::{self, NameOwner, TypeAscriptionOwner}; use ra_syntax::ast::{self, NameOwner, TypeAscriptionOwner};
use crate::{ use crate::{
adt::VariantDef,
db::{AstDatabase, DefDatabase, HirDatabase}, db::{AstDatabase, DefDatabase, HirDatabase},
expr::{BindingAnnotation, Body, BodySourceMap, ExprValidator, Pat, PatId}, expr::{BindingAnnotation, Body, BodySourceMap, ExprValidator, Pat, PatId},
generics::{GenericDef, HasGenericParams}, generics::{GenericDef, HasGenericParams},
@ -29,8 +30,7 @@ use crate::{
AstItemDef, ConstId, EnumId, FunctionId, MacroDefId, StaticId, StructId, TraitId, AstItemDef, ConstId, EnumId, FunctionId, MacroDefId, StaticId, StructId, TraitId,
TypeAliasId, TypeAliasId,
}, },
resolve::{Resolver, Scope, TypeNs}, resolve::{HasResolver, TypeNs},
traits::TraitData,
ty::{InferenceResult, Namespace, TraitRef}, ty::{InferenceResult, Namespace, TraitRef},
Either, HasSource, ImportId, Name, ScopeDef, Source, Ty, Either, HasSource, ImportId, Name, ScopeDef, Source, Ty,
}; };
@ -139,7 +139,7 @@ impl Module {
) -> Either<ast::UseTree, ast::ExternCrateItem> { ) -> Either<ast::UseTree, ast::ExternCrateItem> {
let src = self.definition_source(db); let src = self.definition_source(db);
let (_, source_map) = db.raw_items_with_source_map(src.file_id); let (_, source_map) = db.raw_items_with_source_map(src.file_id);
source_map.get(&src.ast, import) source_map.get(&src.value, import)
} }
/// Returns the crate this module is part of. /// Returns the crate this module is part of.
@ -206,7 +206,7 @@ impl Module {
crate::ModuleDef::Function(f) => f.diagnostics(db, sink), crate::ModuleDef::Function(f) => f.diagnostics(db, sink),
crate::ModuleDef::Module(m) => { crate::ModuleDef::Module(m) => {
// Only add diagnostics from inline modules // Only add diagnostics from inline modules
if let ModuleSource::Module(_) = m.definition_source(db).ast { if let ModuleSource::Module(_) = m.definition_source(db).value {
m.diagnostics(db, sink) m.diagnostics(db, sink)
} }
} }
@ -223,22 +223,9 @@ impl Module {
} }
} }
pub(crate) fn resolver(self, db: &impl DefDatabase) -> Resolver {
let def_map = db.crate_def_map(self.id.krate);
Resolver::default().push_module_scope(def_map, self.id.module_id)
}
pub fn declarations(self, db: &impl DefDatabase) -> Vec<ModuleDef> { pub fn declarations(self, db: &impl DefDatabase) -> Vec<ModuleDef> {
let def_map = db.crate_def_map(self.id.krate); let def_map = db.crate_def_map(self.id.krate);
def_map[self.id.module_id] def_map[self.id.module_id].scope.declarations().map(ModuleDef::from).collect()
.scope
.entries()
.filter_map(|(_name, res)| if res.import.is_none() { Some(res.def) } else { None })
.flat_map(|per_ns| {
per_ns.take_types().into_iter().chain(per_ns.take_values().into_iter())
})
.map(ModuleDef::from)
.collect()
} }
pub fn impl_blocks(self, db: &impl DefDatabase) -> Vec<ImplBlock> { pub fn impl_blocks(self, db: &impl DefDatabase) -> Vec<ImplBlock> {
@ -323,15 +310,8 @@ impl Struct {
db.type_for_def(self.into(), Namespace::Values) db.type_for_def(self.into(), Namespace::Values)
} }
// FIXME move to a more general type fn variant_data(self, db: &impl DefDatabase) -> Arc<VariantData> {
/// Builds a resolver for type references inside this struct. db.struct_data(self.id.into()).variant_data.clone()
pub(crate) fn resolver(self, db: &impl HirDatabase) -> Resolver {
// take the outer scope...
let r = self.module(db).resolver(db);
// ...and add generic params, if present
let p = self.generic_params(db);
let r = if !p.params.is_empty() { r.push_generic_params_scope(p) } else { r };
r
} }
} }
@ -345,24 +325,13 @@ impl Union {
db.struct_data(self.id.into()).name.clone() db.struct_data(self.id.into()).name.clone()
} }
pub fn module(self, db: &impl HirDatabase) -> Module { pub fn module(self, db: &impl DefDatabase) -> Module {
Module { id: self.id.0.module(db) } Module { id: self.id.0.module(db) }
} }
pub fn ty(self, db: &impl HirDatabase) -> Ty { pub fn ty(self, db: &impl HirDatabase) -> Ty {
db.type_for_def(self.into(), Namespace::Types) db.type_for_def(self.into(), Namespace::Types)
} }
// FIXME move to a more general type
/// Builds a resolver for type references inside this union.
pub(crate) fn resolver(self, db: &impl HirDatabase) -> Resolver {
// take the outer scope...
let r = self.module(db).resolver(db);
// ...and add generic params, if present
let p = self.generic_params(db);
let r = if !p.params.is_empty() { r.push_generic_params_scope(p) } else { r };
r
}
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@ -402,17 +371,6 @@ impl Enum {
pub fn ty(self, db: &impl HirDatabase) -> Ty { pub fn ty(self, db: &impl HirDatabase) -> Ty {
db.type_for_def(self.into(), Namespace::Types) db.type_for_def(self.into(), Namespace::Types)
} }
// FIXME: move to a more general type
/// Builds a resolver for type references inside this struct.
pub(crate) fn resolver(self, db: &impl HirDatabase) -> Resolver {
// take the outer scope...
let r = self.module(db).resolver(db);
// ...and add generic params, if present
let p = self.generic_params(db);
let r = if !p.params.is_empty() { r.push_generic_params_scope(p) } else { r };
r.push_scope(Scope::AdtScope(self.into()))
}
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@ -474,22 +432,52 @@ impl Adt {
} }
} }
pub fn krate(self, db: &impl HirDatabase) -> Option<Crate> { pub fn module(self, db: &impl DefDatabase) -> Module {
Some(
match self { match self {
Adt::Struct(s) => s.module(db), Adt::Struct(s) => s.module(db),
Adt::Union(s) => s.module(db), Adt::Union(s) => s.module(db),
Adt::Enum(e) => e.module(db), Adt::Enum(e) => e.module(db),
} }
.krate(),
)
} }
pub(crate) fn resolver(self, db: &impl HirDatabase) -> Resolver { pub fn krate(self, db: &impl HirDatabase) -> Option<Crate> {
Some(self.module(db).krate())
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum VariantDef {
Struct(Struct),
EnumVariant(EnumVariant),
}
impl_froms!(VariantDef: Struct, EnumVariant);
impl VariantDef {
pub fn fields(self, db: &impl HirDatabase) -> Vec<StructField> {
match self { match self {
Adt::Struct(it) => it.resolver(db), VariantDef::Struct(it) => it.fields(db),
Adt::Union(it) => it.resolver(db), VariantDef::EnumVariant(it) => it.fields(db),
Adt::Enum(it) => it.resolver(db), }
}
pub fn field(self, db: &impl HirDatabase, name: &Name) -> Option<StructField> {
match self {
VariantDef::Struct(it) => it.field(db, name),
VariantDef::EnumVariant(it) => it.field(db, name),
}
}
pub fn module(self, db: &impl HirDatabase) -> Module {
match self {
VariantDef::Struct(it) => it.module(db),
VariantDef::EnumVariant(it) => it.module(db),
}
}
pub(crate) fn variant_data(self, db: &impl DefDatabase) -> Arc<VariantData> {
match self {
VariantDef::Struct(it) => it.variant_data(db),
VariantDef::EnumVariant(it) => it.variant_data(db),
} }
} }
} }
@ -505,15 +493,6 @@ pub enum DefWithBody {
impl_froms!(DefWithBody: Function, Const, Static); impl_froms!(DefWithBody: Function, Const, Static);
impl DefWithBody { impl DefWithBody {
/// Builds a resolver for code inside this item.
pub(crate) fn resolver(self, db: &impl HirDatabase) -> Resolver {
match self {
DefWithBody::Const(c) => c.resolver(db),
DefWithBody::Function(f) => f.resolver(db),
DefWithBody::Static(s) => s.resolver(db),
}
}
pub(crate) fn krate(self, db: &impl HirDatabase) -> Option<Crate> { pub(crate) fn krate(self, db: &impl HirDatabase) -> Option<Crate> {
match self { match self {
DefWithBody::Const(c) => c.krate(db), DefWithBody::Const(c) => c.krate(db),
@ -598,10 +577,10 @@ impl FnData {
func: Function, func: Function,
) -> Arc<FnData> { ) -> Arc<FnData> {
let src = func.source(db); let src = func.source(db);
let name = src.ast.name().map(|n| n.as_name()).unwrap_or_else(Name::missing); let name = src.value.name().map(|n| n.as_name()).unwrap_or_else(Name::missing);
let mut params = Vec::new(); let mut params = Vec::new();
let mut has_self_param = false; let mut has_self_param = false;
if let Some(param_list) = src.ast.param_list() { if let Some(param_list) = src.value.param_list() {
if let Some(self_param) = param_list.self_param() { if let Some(self_param) = param_list.self_param() {
let self_type = if let Some(type_ref) = self_param.ascribed_type() { let self_type = if let Some(type_ref) = self_param.ascribed_type() {
TypeRef::from_ast(type_ref) TypeRef::from_ast(type_ref)
@ -625,7 +604,7 @@ impl FnData {
params.push(type_ref); params.push(type_ref);
} }
} }
let ret_type = if let Some(type_ref) = src.ast.ret_type().and_then(|rt| rt.type_ref()) { let ret_type = if let Some(type_ref) = src.value.ret_type().and_then(|rt| rt.type_ref()) {
TypeRef::from_ast(type_ref) TypeRef::from_ast(type_ref)
} else { } else {
TypeRef::unit() TypeRef::unit()
@ -655,7 +634,7 @@ impl FnData {
impl Function { impl Function {
pub fn module(self, db: &impl DefDatabase) -> Module { pub fn module(self, db: &impl DefDatabase) -> Module {
Module { id: self.id.module(db) } self.id.lookup(db).module(db).into()
} }
pub fn krate(self, db: &impl DefDatabase) -> Option<Crate> { pub fn krate(self, db: &impl DefDatabase) -> Option<Crate> {
@ -688,35 +667,28 @@ impl Function {
/// The containing impl block, if this is a method. /// The containing impl block, if this is a method.
pub fn impl_block(self, db: &impl DefDatabase) -> Option<ImplBlock> { pub fn impl_block(self, db: &impl DefDatabase) -> Option<ImplBlock> {
ImplBlock::containing(db, self.into()) match self.container(db) {
Some(Container::ImplBlock(it)) => Some(it),
_ => None,
}
} }
/// The containing trait, if this is a trait method definition. /// The containing trait, if this is a trait method definition.
pub fn parent_trait(self, db: &impl DefDatabase) -> Option<Trait> { pub fn parent_trait(self, db: &impl DefDatabase) -> Option<Trait> {
db.trait_items_index(self.module(db)).get_parent_trait(self.into()) match self.container(db) {
Some(Container::Trait(it)) => Some(it),
_ => None,
}
} }
pub fn container(self, db: &impl DefDatabase) -> Option<Container> { pub fn container(self, db: &impl DefDatabase) -> Option<Container> {
if let Some(impl_block) = self.impl_block(db) { match self.id.lookup(db).container {
Some(impl_block.into()) ContainerId::TraitId(it) => Some(Container::Trait(it.into())),
} else if let Some(trait_) = self.parent_trait(db) { ContainerId::ImplId(it) => Some(Container::ImplBlock(it.into())),
Some(trait_.into()) ContainerId::ModuleId(_) => None,
} else {
None
} }
} }
// FIXME: move to a more general type for 'body-having' items
/// Builds a resolver for code inside this item.
pub(crate) fn resolver(self, db: &impl HirDatabase) -> Resolver {
// take the outer scope...
let r = self.container(db).map_or_else(|| self.module(db).resolver(db), |c| c.resolver(db));
// ...and add generic params, if present
let p = self.generic_params(db);
let r = if !p.params.is_empty() { r.push_generic_params_scope(p) } else { r };
r
}
pub fn diagnostics(self, db: &impl HirDatabase, sink: &mut DiagnosticSink) { pub fn diagnostics(self, db: &impl HirDatabase, sink: &mut DiagnosticSink) {
let infer = self.infer(db); let infer = self.infer(db);
infer.add_diagnostics(db, self, sink); infer.add_diagnostics(db, self, sink);
@ -732,7 +704,7 @@ pub struct Const {
impl Const { impl Const {
pub fn module(self, db: &impl DefDatabase) -> Module { pub fn module(self, db: &impl DefDatabase) -> Module {
Module { id: self.id.module(db) } Module { id: self.id.lookup(db).module(db) }
} }
pub fn krate(self, db: &impl DefDatabase) -> Option<Crate> { pub fn krate(self, db: &impl DefDatabase) -> Option<Crate> {
@ -751,35 +723,29 @@ impl Const {
db.infer(self.into()) db.infer(self.into())
} }
/// The containing impl block, if this is a method. /// The containing impl block, if this is a type alias.
pub fn impl_block(self, db: &impl DefDatabase) -> Option<ImplBlock> { pub fn impl_block(self, db: &impl DefDatabase) -> Option<ImplBlock> {
ImplBlock::containing(db, self.into()) match self.container(db) {
Some(Container::ImplBlock(it)) => Some(it),
_ => None,
}
} }
/// The containing trait, if this is a trait type alias definition.
pub fn parent_trait(self, db: &impl DefDatabase) -> Option<Trait> { pub fn parent_trait(self, db: &impl DefDatabase) -> Option<Trait> {
db.trait_items_index(self.module(db)).get_parent_trait(self.into()) match self.container(db) {
Some(Container::Trait(it)) => Some(it),
_ => None,
}
} }
pub fn container(self, db: &impl DefDatabase) -> Option<Container> { pub fn container(self, db: &impl DefDatabase) -> Option<Container> {
if let Some(impl_block) = self.impl_block(db) { match self.id.lookup(db).container {
Some(impl_block.into()) ContainerId::TraitId(it) => Some(Container::Trait(it.into())),
} else if let Some(trait_) = self.parent_trait(db) { ContainerId::ImplId(it) => Some(Container::ImplBlock(it.into())),
Some(trait_.into()) ContainerId::ModuleId(_) => None,
} else {
None
} }
} }
// FIXME: move to a more general type for 'body-having' items
/// Builds a resolver for code inside this item.
pub(crate) fn resolver(self, db: &impl HirDatabase) -> Resolver {
// take the outer scope...
let r = self
.impl_block(db)
.map(|ib| ib.resolver(db))
.unwrap_or_else(|| self.module(db).resolver(db));
r
}
} }
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
@ -801,7 +767,7 @@ impl ConstData {
db: &(impl DefDatabase + AstDatabase), db: &(impl DefDatabase + AstDatabase),
konst: Const, konst: Const,
) -> Arc<ConstData> { ) -> Arc<ConstData> {
let node = konst.source(db).ast; let node = konst.source(db).value;
const_data_for(&node) const_data_for(&node)
} }
@ -809,7 +775,7 @@ impl ConstData {
db: &(impl DefDatabase + AstDatabase), db: &(impl DefDatabase + AstDatabase),
konst: Static, konst: Static,
) -> Arc<ConstData> { ) -> Arc<ConstData> {
let node = konst.source(db).ast; let node = konst.source(db).value;
const_data_for(&node) const_data_for(&node)
} }
} }
@ -839,12 +805,6 @@ impl Static {
db.static_data(self) db.static_data(self)
} }
/// Builds a resolver for code inside this item.
pub(crate) fn resolver(self, db: &impl HirDatabase) -> Resolver {
// take the outer scope...
self.module(db).resolver(db)
}
pub fn infer(self, db: &impl HirDatabase) -> Arc<InferenceResult> { pub fn infer(self, db: &impl HirDatabase) -> Arc<InferenceResult> {
db.infer(self.into()) db.infer(self.into())
} }
@ -861,11 +821,11 @@ impl Trait {
} }
pub fn name(self, db: &impl DefDatabase) -> Option<Name> { pub fn name(self, db: &impl DefDatabase) -> Option<Name> {
self.trait_data(db).name().clone() self.trait_data(db).name.clone()
} }
pub fn items(self, db: &impl DefDatabase) -> Vec<AssocItem> { pub fn items(self, db: &impl DefDatabase) -> Vec<AssocItem> {
self.trait_data(db).items().to_vec() self.trait_data(db).items.iter().map(|it| (*it).into()).collect()
} }
fn direct_super_traits(self, db: &impl HirDatabase) -> Vec<Trait> { fn direct_super_traits(self, db: &impl HirDatabase) -> Vec<Trait> {
@ -912,10 +872,10 @@ impl Trait {
pub fn associated_type_by_name(self, db: &impl DefDatabase, name: &Name) -> Option<TypeAlias> { pub fn associated_type_by_name(self, db: &impl DefDatabase, name: &Name) -> Option<TypeAlias> {
let trait_data = self.trait_data(db); let trait_data = self.trait_data(db);
trait_data trait_data
.items() .items
.iter() .iter()
.filter_map(|item| match item { .filter_map(|item| match item {
AssocItem::TypeAlias(t) => Some(*t), AssocItemId::TypeAliasId(t) => Some(TypeAlias::from(*t)),
_ => None, _ => None,
}) })
.find(|t| &t.name(db) == name) .find(|t| &t.name(db) == name)
@ -930,7 +890,7 @@ impl Trait {
} }
pub(crate) fn trait_data(self, db: &impl DefDatabase) -> Arc<TraitData> { pub(crate) fn trait_data(self, db: &impl DefDatabase) -> Arc<TraitData> {
db.trait_data(self) db.trait_data(self.id)
} }
pub fn trait_ref(self, db: &impl HirDatabase) -> TraitRef { pub fn trait_ref(self, db: &impl HirDatabase) -> TraitRef {
@ -938,15 +898,7 @@ impl Trait {
} }
pub fn is_auto(self, db: &impl DefDatabase) -> bool { pub fn is_auto(self, db: &impl DefDatabase) -> bool {
self.trait_data(db).is_auto() self.trait_data(db).auto
}
pub(crate) fn resolver(self, db: &impl DefDatabase) -> Resolver {
let r = self.module(db).resolver(db);
// add generic params, if present
let p = self.generic_params(db);
let r = if !p.params.is_empty() { r.push_generic_params_scope(p) } else { r };
r
} }
} }
@ -957,30 +909,34 @@ pub struct TypeAlias {
impl TypeAlias { impl TypeAlias {
pub fn module(self, db: &impl DefDatabase) -> Module { pub fn module(self, db: &impl DefDatabase) -> Module {
Module { id: self.id.module(db) } Module { id: self.id.lookup(db).module(db) }
} }
pub fn krate(self, db: &impl DefDatabase) -> Option<Crate> { pub fn krate(self, db: &impl DefDatabase) -> Option<Crate> {
Some(self.module(db).krate()) Some(self.module(db).krate())
} }
/// The containing impl block, if this is a method. /// The containing impl block, if this is a type alias.
pub fn impl_block(self, db: &impl DefDatabase) -> Option<ImplBlock> { pub fn impl_block(self, db: &impl DefDatabase) -> Option<ImplBlock> {
ImplBlock::containing(db, self.into()) match self.container(db) {
Some(Container::ImplBlock(it)) => Some(it),
_ => None,
}
} }
/// The containing trait, if this is a trait method definition. /// The containing trait, if this is a trait type alias definition.
pub fn parent_trait(self, db: &impl DefDatabase) -> Option<Trait> { pub fn parent_trait(self, db: &impl DefDatabase) -> Option<Trait> {
db.trait_items_index(self.module(db)).get_parent_trait(self.into()) match self.container(db) {
Some(Container::Trait(it)) => Some(it),
_ => None,
}
} }
pub fn container(self, db: &impl DefDatabase) -> Option<Container> { pub fn container(self, db: &impl DefDatabase) -> Option<Container> {
if let Some(impl_block) = self.impl_block(db) { match self.id.lookup(db).container {
Some(impl_block.into()) ContainerId::TraitId(it) => Some(Container::Trait(it.into())),
} else if let Some(trait_) = self.parent_trait(db) { ContainerId::ImplId(it) => Some(Container::ImplBlock(it.into())),
Some(trait_.into()) ContainerId::ModuleId(_) => None,
} else {
None
} }
} }
@ -995,19 +951,6 @@ impl TypeAlias {
pub fn name(self, db: &impl DefDatabase) -> Name { pub fn name(self, db: &impl DefDatabase) -> Name {
db.type_alias_data(self).name.clone() db.type_alias_data(self).name.clone()
} }
/// Builds a resolver for the type references in this type alias.
pub(crate) fn resolver(self, db: &impl HirDatabase) -> Resolver {
// take the outer scope...
let r = self
.impl_block(db)
.map(|ib| ib.resolver(db))
.unwrap_or_else(|| self.module(db).resolver(db));
// ...and add generic params, if present
let p = self.generic_params(db);
let r = if !p.params.is_empty() { r.push_generic_params_scope(p) } else { r };
r
}
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@ -1023,15 +966,6 @@ pub enum Container {
} }
impl_froms!(Container: Trait, ImplBlock); impl_froms!(Container: Trait, ImplBlock);
impl Container {
pub(crate) fn resolver(self, db: &impl DefDatabase) -> Resolver {
match self {
Container::Trait(trait_) => trait_.resolver(db),
Container::ImplBlock(impl_block) => impl_block.resolver(db),
}
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub enum AssocItem { pub enum AssocItem {
Function(Function), Function(Function),

View file

@ -49,9 +49,9 @@ pub(crate) fn attributes_query(
AttrDef::Module(it) => { AttrDef::Module(it) => {
let src = it.declaration_source(db)?; let src = it.declaration_source(db)?;
let hygiene = Hygiene::new(db, src.file_id); let hygiene = Hygiene::new(db, src.file_id);
Attr::from_attrs_owner(&src.ast, &hygiene) Attr::from_attrs_owner(&src.value, &hygiene)
} }
AttrDef::StructField(it) => match it.source(db).ast { AttrDef::StructField(it) => match it.source(db).value {
FieldSource::Named(named) => { FieldSource::Named(named) => {
let src = it.source(db); let src = it.source(db);
let hygiene = Hygiene::new(db, src.file_id); let hygiene = Hygiene::new(db, src.file_id);
@ -82,7 +82,7 @@ where
{ {
let src = node.source(db); let src = node.source(db);
let hygiene = Hygiene::new(db, src.file_id); let hygiene = Hygiene::new(db, src.file_id);
Attr::from_attrs_owner(&src.ast, &hygiene) Attr::from_attrs_owner(&src.value, &hygiene)
} }
impl<T: Into<AttrDef> + Copy> Attrs for T { impl<T: Into<AttrDef> + Copy> Attrs for T {

View file

@ -70,23 +70,23 @@ pub(crate) fn documentation_query(
def: DocDef, def: DocDef,
) -> Option<Documentation> { ) -> Option<Documentation> {
match def { match def {
DocDef::Module(it) => docs_from_ast(&it.declaration_source(db)?.ast), DocDef::Module(it) => docs_from_ast(&it.declaration_source(db)?.value),
DocDef::StructField(it) => match it.source(db).ast { DocDef::StructField(it) => match it.source(db).value {
FieldSource::Named(named) => docs_from_ast(&named), FieldSource::Named(named) => docs_from_ast(&named),
FieldSource::Pos(..) => None, FieldSource::Pos(..) => None,
}, },
DocDef::Adt(it) => match it { DocDef::Adt(it) => match it {
Adt::Struct(it) => docs_from_ast(&it.source(db).ast), Adt::Struct(it) => docs_from_ast(&it.source(db).value),
Adt::Enum(it) => docs_from_ast(&it.source(db).ast), Adt::Enum(it) => docs_from_ast(&it.source(db).value),
Adt::Union(it) => docs_from_ast(&it.source(db).ast), Adt::Union(it) => docs_from_ast(&it.source(db).value),
}, },
DocDef::EnumVariant(it) => docs_from_ast(&it.source(db).ast), DocDef::EnumVariant(it) => docs_from_ast(&it.source(db).value),
DocDef::Static(it) => docs_from_ast(&it.source(db).ast), DocDef::Static(it) => docs_from_ast(&it.source(db).value),
DocDef::Const(it) => docs_from_ast(&it.source(db).ast), DocDef::Const(it) => docs_from_ast(&it.source(db).value),
DocDef::Function(it) => docs_from_ast(&it.source(db).ast), DocDef::Function(it) => docs_from_ast(&it.source(db).value),
DocDef::Trait(it) => docs_from_ast(&it.source(db).ast), DocDef::Trait(it) => docs_from_ast(&it.source(db).value),
DocDef::TypeAlias(it) => docs_from_ast(&it.source(db).ast), DocDef::TypeAlias(it) => docs_from_ast(&it.source(db).value),
DocDef::MacroDef(it) => docs_from_ast(&it.source(db).ast), DocDef::MacroDef(it) => docs_from_ast(&it.source(db).value),
} }
} }

View file

@ -1,13 +1,13 @@
//! FIXME: write short doc here //! FIXME: write short doc here
use hir_def::{HasSource as _, Lookup};
use ra_syntax::ast::{self, AstNode}; use ra_syntax::ast::{self, AstNode};
use crate::{ use crate::{
adt::VariantDef,
db::{AstDatabase, DefDatabase, HirDatabase}, db::{AstDatabase, DefDatabase, HirDatabase},
ids::AstItemDef, ids::AstItemDef,
Const, Either, Enum, EnumVariant, FieldSource, Function, HasBody, HirFileId, MacroDef, Module, Const, Either, Enum, EnumVariant, FieldSource, Function, HasBody, HirFileId, MacroDef, Module,
ModuleSource, Static, Struct, StructField, Trait, TypeAlias, Union, ModuleSource, Static, Struct, StructField, Trait, TypeAlias, Union, VariantDef,
}; };
pub use hir_expand::Source; pub use hir_expand::Source;
@ -25,9 +25,9 @@ impl Module {
let def_map = db.crate_def_map(self.id.krate); let def_map = db.crate_def_map(self.id.krate);
let decl_id = def_map[self.id.module_id].declaration; let decl_id = def_map[self.id.module_id].declaration;
let file_id = def_map[self.id.module_id].definition; let file_id = def_map[self.id.module_id].definition;
let ast = ModuleSource::new(db, file_id, decl_id); let value = ModuleSource::new(db, file_id, decl_id);
let file_id = file_id.map(HirFileId::from).unwrap_or_else(|| decl_id.unwrap().file_id()); let file_id = file_id.map(HirFileId::from).unwrap_or_else(|| decl_id.unwrap().file_id());
Source { file_id, ast } Source { file_id, value }
} }
/// Returns a node which declares this module, either a `mod foo;` or a `mod foo {}`. /// Returns a node which declares this module, either a `mod foo;` or a `mod foo {}`.
@ -38,8 +38,8 @@ impl Module {
) -> Option<Source<ast::Module>> { ) -> Option<Source<ast::Module>> {
let def_map = db.crate_def_map(self.id.krate); let def_map = db.crate_def_map(self.id.krate);
let decl = def_map[self.id.module_id].declaration?; let decl = def_map[self.id.module_id].declaration?;
let ast = decl.to_node(db); let value = decl.to_node(db);
Some(Source { file_id: decl.file_id(), ast }) Some(Source { file_id: decl.file_id(), value })
} }
} }
@ -53,11 +53,11 @@ impl HasSource for StructField {
let (file_id, struct_kind) = match self.parent { let (file_id, struct_kind) = match self.parent {
VariantDef::Struct(s) => { VariantDef::Struct(s) => {
ss = s.source(db); ss = s.source(db);
(ss.file_id, ss.ast.kind()) (ss.file_id, ss.value.kind())
} }
VariantDef::EnumVariant(e) => { VariantDef::EnumVariant(e) => {
es = e.source(db); es = e.source(db);
(es.file_id, es.ast.kind()) (es.file_id, es.value.kind())
} }
}; };
@ -66,13 +66,13 @@ impl HasSource for StructField {
ast::StructKind::Named(fl) => fl.fields().map(|it| FieldSource::Named(it)).collect(), ast::StructKind::Named(fl) => fl.fields().map(|it| FieldSource::Named(it)).collect(),
ast::StructKind::Unit => Vec::new(), ast::StructKind::Unit => Vec::new(),
}; };
let ast = field_sources let value = field_sources
.into_iter() .into_iter()
.zip(fields.iter()) .zip(fields.iter())
.find(|(_syntax, (id, _))| *id == self.id) .find(|(_syntax, (id, _))| *id == self.id)
.unwrap() .unwrap()
.0; .0;
Source { file_id, ast } Source { file_id, value }
} }
} }
impl HasSource for Struct { impl HasSource for Struct {
@ -98,8 +98,8 @@ impl HasSource for EnumVariant {
fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::EnumVariant> { fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::EnumVariant> {
let enum_data = db.enum_data(self.parent.id); let enum_data = db.enum_data(self.parent.id);
let src = self.parent.id.source(db); let src = self.parent.id.source(db);
let ast = src let value = src
.ast .value
.variant_list() .variant_list()
.into_iter() .into_iter()
.flat_map(|it| it.variants()) .flat_map(|it| it.variants())
@ -107,19 +107,19 @@ impl HasSource for EnumVariant {
.find(|(_syntax, (id, _))| *id == self.id) .find(|(_syntax, (id, _))| *id == self.id)
.unwrap() .unwrap()
.0; .0;
Source { file_id: src.file_id, ast } Source { file_id: src.file_id, value }
} }
} }
impl HasSource for Function { impl HasSource for Function {
type Ast = ast::FnDef; type Ast = ast::FnDef;
fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::FnDef> { fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::FnDef> {
self.id.source(db) self.id.lookup(db).source(db)
} }
} }
impl HasSource for Const { impl HasSource for Const {
type Ast = ast::ConstDef; type Ast = ast::ConstDef;
fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::ConstDef> { fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::ConstDef> {
self.id.source(db) self.id.lookup(db).source(db)
} }
} }
impl HasSource for Static { impl HasSource for Static {
@ -137,13 +137,13 @@ impl HasSource for Trait {
impl HasSource for TypeAlias { impl HasSource for TypeAlias {
type Ast = ast::TypeAliasDef; type Ast = ast::TypeAliasDef;
fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::TypeAliasDef> { fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::TypeAliasDef> {
self.id.source(db) self.id.lookup(db).source(db)
} }
} }
impl HasSource for MacroDef { impl HasSource for MacroDef {
type Ast = ast::MacroCall; type Ast = ast::MacroCall;
fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::MacroCall> { fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::MacroCall> {
Source { file_id: self.id.ast_id.file_id(), ast: self.id.ast_id.to_node(db) } Source { file_id: self.id.ast_id.file_id(), value: self.id.ast_id.to_node(db) }
} }
} }

View file

@ -8,10 +8,9 @@ use ra_syntax::SmolStr;
use crate::{ use crate::{
debug::HirDebugDatabase, debug::HirDebugDatabase,
generics::{GenericDef, GenericParams}, generics::GenericDef,
ids, ids,
lang_item::{LangItemTarget, LangItems}, lang_item::{LangItemTarget, LangItems},
traits::TraitData,
ty::{ ty::{
method_resolution::CrateImplBlocks, method_resolution::CrateImplBlocks,
traits::{AssocTyValue, Impl}, traits::{AssocTyValue, Impl},
@ -25,8 +24,9 @@ use crate::{
pub use hir_def::db::{ pub use hir_def::db::{
BodyQuery, BodyWithSourceMapQuery, CrateDefMapQuery, DefDatabase2, DefDatabase2Storage, BodyQuery, BodyWithSourceMapQuery, CrateDefMapQuery, DefDatabase2, DefDatabase2Storage,
EnumDataQuery, ExprScopesQuery, ImplDataQuery, InternDatabase, InternDatabaseStorage, EnumDataQuery, ExprScopesQuery, GenericParamsQuery, ImplDataQuery, InternDatabase,
RawItemsQuery, RawItemsWithSourceMapQuery, StructDataQuery, InternDatabaseStorage, RawItemsQuery, RawItemsWithSourceMapQuery, StructDataQuery,
TraitDataQuery,
}; };
pub use hir_expand::db::{ pub use hir_expand::db::{
AstDatabase, AstDatabaseStorage, AstIdMapQuery, MacroArgQuery, MacroDefQuery, MacroExpandQuery, AstDatabase, AstDatabaseStorage, AstIdMapQuery, MacroArgQuery, MacroDefQuery, MacroExpandQuery,
@ -37,15 +37,6 @@ pub use hir_expand::db::{
#[salsa::query_group(DefDatabaseStorage)] #[salsa::query_group(DefDatabaseStorage)]
#[salsa::requires(AstDatabase)] #[salsa::requires(AstDatabase)]
pub trait DefDatabase: HirDebugDatabase + DefDatabase2 { pub trait DefDatabase: HirDebugDatabase + DefDatabase2 {
#[salsa::invoke(crate::traits::TraitData::trait_data_query)]
fn trait_data(&self, t: Trait) -> Arc<TraitData>;
#[salsa::invoke(crate::traits::TraitItemsIndex::trait_items_index)]
fn trait_items_index(&self, module: Module) -> crate::traits::TraitItemsIndex;
#[salsa::invoke(crate::generics::GenericParams::generic_params_query)]
fn generic_params(&self, def: GenericDef) -> Arc<GenericParams>;
#[salsa::invoke(FnData::fn_data_query)] #[salsa::invoke(FnData::fn_data_query)]
fn fn_data(&self, func: Function) -> Arc<FnData>; fn fn_data(&self, func: Function) -> Arc<FnData>;

View file

@ -21,7 +21,7 @@ impl Diagnostic for NoSuchField {
} }
fn source(&self) -> Source<SyntaxNodePtr> { fn source(&self) -> Source<SyntaxNodePtr> {
Source { file_id: self.file, ast: self.field.into() } Source { file_id: self.file, value: self.field.into() }
} }
fn as_any(&self) -> &(dyn Any + Send + 'static) { fn as_any(&self) -> &(dyn Any + Send + 'static) {
@ -41,7 +41,7 @@ impl Diagnostic for MissingFields {
"fill structure fields".to_string() "fill structure fields".to_string()
} }
fn source(&self) -> Source<SyntaxNodePtr> { fn source(&self) -> Source<SyntaxNodePtr> {
Source { file_id: self.file, ast: self.field_list.into() } Source { file_id: self.file, value: self.field_list.into() }
} }
fn as_any(&self) -> &(dyn Any + Send + 'static) { fn as_any(&self) -> &(dyn Any + Send + 'static) {
self self
@ -53,7 +53,7 @@ impl AstDiagnostic for MissingFields {
fn ast(&self, db: &impl AstDatabase) -> Self::AST { fn ast(&self, db: &impl AstDatabase) -> Self::AST {
let root = db.parse_or_expand(self.source().file_id).unwrap(); let root = db.parse_or_expand(self.source().file_id).unwrap();
let node = self.source().ast.to_node(&root); let node = self.source().value.to_node(&root);
ast::RecordFieldList::cast(node).unwrap() ast::RecordFieldList::cast(node).unwrap()
} }
} }
@ -69,7 +69,7 @@ impl Diagnostic for MissingOkInTailExpr {
"wrap return expression in Ok".to_string() "wrap return expression in Ok".to_string()
} }
fn source(&self) -> Source<SyntaxNodePtr> { fn source(&self) -> Source<SyntaxNodePtr> {
Source { file_id: self.file, ast: self.expr.into() } Source { file_id: self.file, value: self.expr.into() }
} }
fn as_any(&self) -> &(dyn Any + Send + 'static) { fn as_any(&self) -> &(dyn Any + Send + 'static) {
self self
@ -81,7 +81,7 @@ impl AstDiagnostic for MissingOkInTailExpr {
fn ast(&self, db: &impl AstDatabase) -> Self::AST { fn ast(&self, db: &impl AstDatabase) -> Self::AST {
let root = db.parse_or_expand(self.file).unwrap(); let root = db.parse_or_expand(self.file).unwrap();
let node = self.source().ast.to_node(&root); let node = self.source().value.to_node(&root);
ast::Expr::cast(node).unwrap() ast::Expr::cast(node).unwrap()
} }
} }

View file

@ -11,6 +11,7 @@ use rustc_hash::FxHashSet;
use crate::{ use crate::{
db::HirDatabase, db::HirDatabase,
diagnostics::{MissingFields, MissingOkInTailExpr}, diagnostics::{MissingFields, MissingOkInTailExpr},
resolve::HasResolver,
ty::{ApplicationTy, InferenceResult, Ty, TypeCtor}, ty::{ApplicationTy, InferenceResult, Ty, TypeCtor},
Adt, DefWithBody, Function, HasBody, Name, Path, Resolver, Adt, DefWithBody, Function, HasBody, Name, Path, Resolver,
}; };
@ -116,7 +117,7 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
let source_map = self.func.body_source_map(db); let source_map = self.func.body_source_map(db);
if let Some(source_ptr) = source_map.expr_syntax(id) { if let Some(source_ptr) = source_map.expr_syntax(id) {
if let Some(expr) = source_ptr.ast.a() { if let Some(expr) = source_ptr.value.a() {
let root = source_ptr.file_syntax(db); let root = source_ptr.file_syntax(db);
if let ast::Expr::RecordLit(record_lit) = expr.to_node(&root) { if let ast::Expr::RecordLit(record_lit) = expr.to_node(&root) {
if let Some(field_list) = record_lit.record_field_list() { if let Some(field_list) = record_lit.record_field_list() {
@ -161,7 +162,7 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
let source_map = self.func.body_source_map(db); let source_map = self.func.body_source_map(db);
if let Some(source_ptr) = source_map.expr_syntax(id) { if let Some(source_ptr) = source_map.expr_syntax(id) {
if let Some(expr) = source_ptr.ast.a() { if let Some(expr) = source_ptr.value.a() {
self.sink.push(MissingOkInTailExpr { file: source_ptr.file_id, expr }); self.sink.push(MissingOkInTailExpr { file: source_ptr.file_id, expr });
} }
} }

View file

@ -3,9 +3,9 @@
//! It's unclear if we need this long-term, but it's definitelly useful while we //! It's unclear if we need this long-term, but it's definitelly useful while we
//! are splitting the hir. //! are splitting the hir.
use hir_def::{AdtId, AssocItemId, DefWithBodyId, EnumVariantId, ModuleDefId}; use hir_def::{AdtId, AssocItemId, DefWithBodyId, EnumVariantId, GenericDefId, ModuleDefId};
use crate::{Adt, AssocItem, DefWithBody, EnumVariant, ModuleDef}; use crate::{Adt, AssocItem, DefWithBody, EnumVariant, GenericDef, ModuleDef};
macro_rules! from_id { macro_rules! from_id {
($(($id:path, $ty:path)),*) => {$( ($(($id:path, $ty:path)),*) => {$(
@ -41,6 +41,16 @@ impl From<AdtId> for Adt {
} }
} }
impl From<Adt> for AdtId {
fn from(id: Adt) -> Self {
match id {
Adt::Struct(it) => AdtId::StructId(it.id),
Adt::Union(it) => AdtId::UnionId(it.id),
Adt::Enum(it) => AdtId::EnumId(it.id),
}
}
}
impl From<EnumVariantId> for EnumVariant { impl From<EnumVariantId> for EnumVariant {
fn from(id: EnumVariantId) -> Self { fn from(id: EnumVariantId) -> Self {
EnumVariant { parent: id.parent.into(), id: id.local_id } EnumVariant { parent: id.parent.into(), id: id.local_id }
@ -82,3 +92,33 @@ impl From<AssocItemId> for AssocItem {
} }
} }
} }
impl From<GenericDef> for GenericDefId {
fn from(def: GenericDef) -> Self {
match def {
GenericDef::Function(it) => GenericDefId::FunctionId(it.id),
GenericDef::Adt(it) => GenericDefId::AdtId(it.into()),
GenericDef::Trait(it) => GenericDefId::TraitId(it.id),
GenericDef::TypeAlias(it) => GenericDefId::TypeAliasId(it.id),
GenericDef::ImplBlock(it) => GenericDefId::ImplId(it.id),
GenericDef::EnumVariant(it) => {
GenericDefId::EnumVariantId(EnumVariantId { parent: it.parent.id, local_id: it.id })
}
GenericDef::Const(it) => GenericDefId::ConstId(it.id),
}
}
}
impl From<GenericDefId> for GenericDef {
fn from(def: GenericDefId) -> Self {
match def {
GenericDefId::FunctionId(it) => GenericDef::Function(it.into()),
GenericDefId::AdtId(it) => GenericDef::Adt(it.into()),
GenericDefId::TraitId(it) => GenericDef::Trait(it.into()),
GenericDefId::TypeAliasId(it) => GenericDef::TypeAlias(it.into()),
GenericDefId::ImplId(it) => GenericDef::ImplBlock(it.into()),
GenericDefId::EnumVariantId(it) => GenericDef::EnumVariant(it.into()),
GenericDefId::ConstId(it) => GenericDef::Const(it.into()),
}
}
}

View file

@ -4,15 +4,15 @@ use hir_def::{ModuleId, StructId, StructOrUnionId, UnionId};
use hir_expand::{name::AsName, AstId, MacroDefId, MacroDefKind}; use hir_expand::{name::AsName, AstId, MacroDefId, MacroDefKind};
use ra_syntax::{ use ra_syntax::{
ast::{self, AstNode, NameOwner}, ast::{self, AstNode, NameOwner},
match_ast, match_ast, AstPtr, SyntaxNode,
}; };
use crate::{ use crate::{
db::{AstDatabase, DefDatabase, HirDatabase}, db::{AstDatabase, DefDatabase, HirDatabase},
ids::{AstItemDef, LocationCtx}, ids::{AstItemDef, LocationCtx},
Const, DefWithBody, Enum, EnumVariant, FieldSource, Function, HasBody, HasSource, ImplBlock, AssocItem, Const, DefWithBody, Enum, EnumVariant, FieldSource, Function, HasBody, HasSource,
Local, MacroDef, Module, ModuleSource, Source, Static, Struct, StructField, Trait, TypeAlias, ImplBlock, Local, MacroDef, Module, ModuleDef, ModuleSource, Source, Static, Struct,
Union, VariantDef, StructField, Trait, TypeAlias, Union, VariantDef,
}; };
pub trait FromSource: Sized { pub trait FromSource: Sized {
@ -52,15 +52,54 @@ impl FromSource for Trait {
impl FromSource for Function { impl FromSource for Function {
type Ast = ast::FnDef; type Ast = ast::FnDef;
fn from_source(db: &(impl DefDatabase + AstDatabase), src: Source<Self::Ast>) -> Option<Self> { fn from_source(db: &(impl DefDatabase + AstDatabase), src: Source<Self::Ast>) -> Option<Self> {
let id = from_source(db, src)?; let items = match Container::find(db, src.as_ref().map(|it| it.syntax()))? {
Some(Function { id }) Container::Trait(it) => it.items(db),
Container::ImplBlock(it) => it.items(db),
Container::Module(m) => {
return m
.declarations(db)
.into_iter()
.filter_map(|it| match it {
ModuleDef::Function(it) => Some(it),
_ => None,
})
.find(|it| same_source(&it.source(db), &src))
}
};
items
.into_iter()
.filter_map(|it| match it {
AssocItem::Function(it) => Some(it),
_ => None,
})
.find(|it| same_source(&it.source(db), &src))
} }
} }
impl FromSource for Const { impl FromSource for Const {
type Ast = ast::ConstDef; type Ast = ast::ConstDef;
fn from_source(db: &(impl DefDatabase + AstDatabase), src: Source<Self::Ast>) -> Option<Self> { fn from_source(db: &(impl DefDatabase + AstDatabase), src: Source<Self::Ast>) -> Option<Self> {
let id = from_source(db, src)?; let items = match Container::find(db, src.as_ref().map(|it| it.syntax()))? {
Some(Const { id }) Container::Trait(it) => it.items(db),
Container::ImplBlock(it) => it.items(db),
Container::Module(m) => {
return m
.declarations(db)
.into_iter()
.filter_map(|it| match it {
ModuleDef::Const(it) => Some(it),
_ => None,
})
.find(|it| same_source(&it.source(db), &src))
}
};
items
.into_iter()
.filter_map(|it| match it {
AssocItem::Const(it) => Some(it),
_ => None,
})
.find(|it| same_source(&it.source(db), &src))
} }
} }
impl FromSource for Static { impl FromSource for Static {
@ -73,8 +112,27 @@ impl FromSource for Static {
impl FromSource for TypeAlias { impl FromSource for TypeAlias {
type Ast = ast::TypeAliasDef; type Ast = ast::TypeAliasDef;
fn from_source(db: &(impl DefDatabase + AstDatabase), src: Source<Self::Ast>) -> Option<Self> { fn from_source(db: &(impl DefDatabase + AstDatabase), src: Source<Self::Ast>) -> Option<Self> {
let id = from_source(db, src)?; let items = match Container::find(db, src.as_ref().map(|it| it.syntax()))? {
Some(TypeAlias { id }) Container::Trait(it) => it.items(db),
Container::ImplBlock(it) => it.items(db),
Container::Module(m) => {
return m
.declarations(db)
.into_iter()
.filter_map(|it| match it {
ModuleDef::TypeAlias(it) => Some(it),
_ => None,
})
.find(|it| same_source(&it.source(db), &src))
}
};
items
.into_iter()
.filter_map(|it| match it {
AssocItem::TypeAlias(it) => Some(it),
_ => None,
})
.find(|it| same_source(&it.source(db), &src))
} }
} }
@ -87,7 +145,7 @@ impl FromSource for MacroDef {
let module = Module::from_definition(db, Source::new(src.file_id, module_src))?; let module = Module::from_definition(db, Source::new(src.file_id, module_src))?;
let krate = module.krate().crate_id(); let krate = module.krate().crate_id();
let ast_id = AstId::new(src.file_id, db.ast_id_map(src.file_id).ast_id(&src.ast)); let ast_id = AstId::new(src.file_id, db.ast_id_map(src.file_id).ast_id(&src.value));
let id: MacroDefId = MacroDefId { krate, ast_id, kind }; let id: MacroDefId = MacroDefId { krate, ast_id, kind };
Some(MacroDef { id }) Some(MacroDef { id })
@ -105,26 +163,26 @@ impl FromSource for ImplBlock {
impl FromSource for EnumVariant { impl FromSource for EnumVariant {
type Ast = ast::EnumVariant; type Ast = ast::EnumVariant;
fn from_source(db: &(impl DefDatabase + AstDatabase), src: Source<Self::Ast>) -> Option<Self> { fn from_source(db: &(impl DefDatabase + AstDatabase), src: Source<Self::Ast>) -> Option<Self> {
let parent_enum = src.ast.parent_enum(); let parent_enum = src.value.parent_enum();
let src_enum = Source { file_id: src.file_id, ast: parent_enum }; let src_enum = Source { file_id: src.file_id, value: parent_enum };
let variants = Enum::from_source(db, src_enum)?.variants(db); let variants = Enum::from_source(db, src_enum)?.variants(db);
variants.into_iter().find(|v| v.source(db) == src) variants.into_iter().find(|v| same_source(&v.source(db), &src))
} }
} }
impl FromSource for StructField { impl FromSource for StructField {
type Ast = FieldSource; type Ast = FieldSource;
fn from_source(db: &(impl DefDatabase + AstDatabase), src: Source<Self::Ast>) -> Option<Self> { fn from_source(db: &(impl DefDatabase + AstDatabase), src: Source<Self::Ast>) -> Option<Self> {
let variant_def: VariantDef = match src.ast { let variant_def: VariantDef = match src.value {
FieldSource::Named(ref field) => { FieldSource::Named(ref field) => {
let ast = field.syntax().ancestors().find_map(ast::StructDef::cast)?; let value = field.syntax().ancestors().find_map(ast::StructDef::cast)?;
let src = Source { file_id: src.file_id, ast }; let src = Source { file_id: src.file_id, value };
let def = Struct::from_source(db, src)?; let def = Struct::from_source(db, src)?;
VariantDef::from(def) VariantDef::from(def)
} }
FieldSource::Pos(ref field) => { FieldSource::Pos(ref field) => {
let ast = field.syntax().ancestors().find_map(ast::EnumVariant::cast)?; let value = field.syntax().ancestors().find_map(ast::EnumVariant::cast)?;
let src = Source { file_id: src.file_id, ast }; let src = Source { file_id: src.file_id, value };
let def = EnumVariant::from_source(db, src)?; let def = EnumVariant::from_source(db, src)?;
VariantDef::from(def) VariantDef::from(def)
} }
@ -142,12 +200,12 @@ impl FromSource for StructField {
impl Local { impl Local {
pub fn from_source(db: &impl HirDatabase, src: Source<ast::BindPat>) -> Option<Self> { pub fn from_source(db: &impl HirDatabase, src: Source<ast::BindPat>) -> Option<Self> {
let file_id = src.file_id; let file_id = src.file_id;
let parent: DefWithBody = src.ast.syntax().ancestors().find_map(|it| { let parent: DefWithBody = src.value.syntax().ancestors().find_map(|it| {
let res = match_ast! { let res = match_ast! {
match it { match it {
ast::ConstDef(ast) => { Const::from_source(db, Source { ast, file_id})?.into() }, ast::ConstDef(value) => { Const::from_source(db, Source { value, file_id})?.into() },
ast::StaticDef(ast) => { Static::from_source(db, Source { ast, file_id})?.into() }, ast::StaticDef(value) => { Static::from_source(db, Source { value, file_id})?.into() },
ast::FnDef(ast) => { Function::from_source(db, Source { ast, file_id})?.into() }, ast::FnDef(value) => { Function::from_source(db, Source { value, file_id})?.into() },
_ => return None, _ => return None,
} }
}; };
@ -162,33 +220,33 @@ impl Local {
impl Module { impl Module {
pub fn from_declaration(db: &impl DefDatabase, src: Source<ast::Module>) -> Option<Self> { pub fn from_declaration(db: &impl DefDatabase, src: Source<ast::Module>) -> Option<Self> {
let parent_declaration = src.ast.syntax().ancestors().skip(1).find_map(ast::Module::cast); let parent_declaration = src.value.syntax().ancestors().skip(1).find_map(ast::Module::cast);
let parent_module = match parent_declaration { let parent_module = match parent_declaration {
Some(parent_declaration) => { Some(parent_declaration) => {
let src_parent = Source { file_id: src.file_id, ast: parent_declaration }; let src_parent = Source { file_id: src.file_id, value: parent_declaration };
Module::from_declaration(db, src_parent) Module::from_declaration(db, src_parent)
} }
_ => { _ => {
let src_parent = Source { let src_parent = Source {
file_id: src.file_id, file_id: src.file_id,
ast: ModuleSource::new(db, Some(src.file_id.original_file(db)), None), value: ModuleSource::new(db, Some(src.file_id.original_file(db)), None),
}; };
Module::from_definition(db, src_parent) Module::from_definition(db, src_parent)
} }
}?; }?;
let child_name = src.ast.name()?; let child_name = src.value.name()?;
parent_module.child(db, &child_name.as_name()) parent_module.child(db, &child_name.as_name())
} }
pub fn from_definition(db: &impl DefDatabase, src: Source<ModuleSource>) -> Option<Self> { pub fn from_definition(db: &impl DefDatabase, src: Source<ModuleSource>) -> Option<Self> {
match src.ast { match src.value {
ModuleSource::Module(ref module) => { ModuleSource::Module(ref module) => {
assert!(!module.has_semi()); assert!(!module.has_semi());
return Module::from_declaration( return Module::from_declaration(
db, db,
Source { file_id: src.file_id, ast: module.clone() }, Source { file_id: src.file_id, value: module.clone() },
); );
} }
ModuleSource::SourceFile(_) => (), ModuleSource::SourceFile(_) => (),
@ -214,5 +272,47 @@ where
let module_src = ModuleSource::from_child_node(db, src.as_ref().map(|it| it.syntax())); let module_src = ModuleSource::from_child_node(db, src.as_ref().map(|it| it.syntax()));
let module = Module::from_definition(db, Source::new(src.file_id, module_src))?; let module = Module::from_definition(db, Source::new(src.file_id, module_src))?;
let ctx = LocationCtx::new(db, module.id, src.file_id); let ctx = LocationCtx::new(db, module.id, src.file_id);
Some(DEF::from_ast(ctx, &src.ast)) Some(DEF::from_ast(ctx, &src.value))
}
enum Container {
Trait(Trait),
ImplBlock(ImplBlock),
Module(Module),
}
impl Container {
fn find(db: &impl DefDatabase, src: Source<&SyntaxNode>) -> Option<Container> {
// FIXME: this doesn't try to handle nested declarations
for container in src.value.ancestors() {
let res = match_ast! {
match container {
ast::TraitDef(it) => {
let c = Trait::from_source(db, src.with_value(it))?;
Container::Trait(c)
},
ast::ImplBlock(it) => {
let c = ImplBlock::from_source(db, src.with_value(it))?;
Container::ImplBlock(c)
},
_ => { continue },
}
};
return Some(res);
}
let module_source = ModuleSource::from_child_node(db, src);
let c = Module::from_definition(db, src.with_value(module_source))?;
Some(Container::Module(c))
}
}
/// XXX: AST Nodes and SyntaxNodes have identity equality semantics: nodes are
/// equal if they point to exactly the same object.
///
/// In general, we do not guarantee that we have exactly one instance of a
/// syntax tree for each file. We probably should add such guarantee, but, for
/// the time being, we will use identity-less AstPtr comparison.
fn same_source<N: AstNode>(s1: &Source<N>, s2: &Source<N>) -> bool {
s1.as_ref().map(AstPtr::new) == s2.as_ref().map(AstPtr::new)
} }

View file

@ -1,50 +1,12 @@
//! Many kinds of items or constructs can have generic parameters: functions, //! Temp module to wrap hir_def::generics
//! structs, impls, traits, etc. This module provides a common HIR for these
//! generic parameters. See also the `Generics` type and the `generics_of` query
//! in rustc.
use std::sync::Arc; use std::sync::Arc;
use hir_def::{
path::Path,
type_ref::{TypeBound, TypeRef},
};
use hir_expand::name::{self, AsName};
use ra_syntax::ast::{self, DefaultTypeParamOwner, NameOwner, TypeBoundsOwner, TypeParamsOwner};
use crate::{ use crate::{
db::{AstDatabase, DefDatabase, HirDatabase}, db::DefDatabase, Adt, Const, Container, Enum, EnumVariant, Function, ImplBlock, Struct, Trait,
Adt, Const, Container, Enum, EnumVariant, Function, HasSource, ImplBlock, Name, Struct, Trait,
TypeAlias, Union, TypeAlias, Union,
}; };
/// Data about a generic parameter (to a function, struct, impl, ...). pub use hir_def::generics::{GenericParam, GenericParams, WherePredicate};
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct GenericParam {
// FIXME: give generic params proper IDs
pub idx: u32,
pub name: Name,
pub default: Option<Path>,
}
/// Data about the generic parameters of a function, struct, impl, etc.
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct GenericParams {
pub(crate) def: GenericDef,
pub(crate) parent_params: Option<Arc<GenericParams>>,
pub(crate) params: Vec<GenericParam>,
pub(crate) where_predicates: Vec<WherePredicate>,
}
/// A single predicate from a where clause, i.e. `where Type: Trait`. Combined
/// where clauses like `where T: Foo + Bar` are turned into multiple of these.
/// It might still result in multiple actual predicates though, because of
/// associated type bindings like `Iterator<Item = u32>`.
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct WherePredicate {
pub(crate) type_ref: TypeRef,
pub(crate) bound: TypeBound,
}
#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)] #[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)]
pub enum GenericDef { pub enum GenericDef {
@ -69,150 +31,6 @@ impl_froms!(
Const Const
); );
impl GenericParams {
pub(crate) fn generic_params_query(
db: &(impl DefDatabase + AstDatabase),
def: GenericDef,
) -> Arc<GenericParams> {
let parent = match def {
GenericDef::Function(it) => it.container(db).map(GenericDef::from),
GenericDef::TypeAlias(it) => it.container(db).map(GenericDef::from),
GenericDef::Const(it) => it.container(db).map(GenericDef::from),
GenericDef::EnumVariant(it) => Some(it.parent_enum(db).into()),
GenericDef::Adt(_) | GenericDef::Trait(_) => None,
GenericDef::ImplBlock(_) => None,
};
let mut generics = GenericParams {
def,
params: Vec::new(),
parent_params: parent.map(|p| db.generic_params(p)),
where_predicates: Vec::new(),
};
let start = generics.parent_params.as_ref().map(|p| p.params.len()).unwrap_or(0) as u32;
// FIXME: add `: Sized` bound for everything except for `Self` in traits
match def {
GenericDef::Function(it) => generics.fill(&it.source(db).ast, start),
GenericDef::Adt(Adt::Struct(it)) => generics.fill(&it.source(db).ast, start),
GenericDef::Adt(Adt::Union(it)) => generics.fill(&it.source(db).ast, start),
GenericDef::Adt(Adt::Enum(it)) => generics.fill(&it.source(db).ast, start),
GenericDef::Trait(it) => {
// traits get the Self type as an implicit first type parameter
generics.params.push(GenericParam {
idx: start,
name: name::SELF_TYPE,
default: None,
});
generics.fill(&it.source(db).ast, start + 1);
// add super traits as bounds on Self
// i.e., trait Foo: Bar is equivalent to trait Foo where Self: Bar
let self_param = TypeRef::Path(name::SELF_TYPE.into());
generics.fill_bounds(&it.source(db).ast, self_param);
}
GenericDef::TypeAlias(it) => generics.fill(&it.source(db).ast, start),
// Note that we don't add `Self` here: in `impl`s, `Self` is not a
// type-parameter, but rather is a type-alias for impl's target
// type, so this is handled by the resolver.
GenericDef::ImplBlock(it) => generics.fill(&it.source(db).ast, start),
GenericDef::EnumVariant(_) | GenericDef::Const(_) => {}
}
Arc::new(generics)
}
fn fill(&mut self, node: &impl TypeParamsOwner, start: u32) {
if let Some(params) = node.type_param_list() {
self.fill_params(params, start)
}
if let Some(where_clause) = node.where_clause() {
self.fill_where_predicates(where_clause);
}
}
fn fill_bounds(&mut self, node: &impl ast::TypeBoundsOwner, type_ref: TypeRef) {
for bound in
node.type_bound_list().iter().flat_map(|type_bound_list| type_bound_list.bounds())
{
self.add_where_predicate_from_bound(bound, type_ref.clone());
}
}
fn fill_params(&mut self, params: ast::TypeParamList, start: u32) {
for (idx, type_param) in params.type_params().enumerate() {
let name = type_param.name().map_or_else(Name::missing, |it| it.as_name());
// FIXME: Use `Path::from_src`
let default = type_param.default_type().and_then(|t| t.path()).and_then(Path::from_ast);
let param = GenericParam { idx: idx as u32 + start, name: name.clone(), default };
self.params.push(param);
let type_ref = TypeRef::Path(name.into());
self.fill_bounds(&type_param, type_ref);
}
}
fn fill_where_predicates(&mut self, where_clause: ast::WhereClause) {
for pred in where_clause.predicates() {
let type_ref = match pred.type_ref() {
Some(type_ref) => type_ref,
None => continue,
};
let type_ref = TypeRef::from_ast(type_ref);
for bound in pred.type_bound_list().iter().flat_map(|l| l.bounds()) {
self.add_where_predicate_from_bound(bound, type_ref.clone());
}
}
}
fn add_where_predicate_from_bound(&mut self, bound: ast::TypeBound, type_ref: TypeRef) {
if bound.has_question_mark() {
// FIXME: remove this bound
return;
}
let bound = TypeBound::from_ast(bound);
self.where_predicates.push(WherePredicate { type_ref, bound });
}
pub(crate) fn find_by_name(&self, name: &Name) -> Option<&GenericParam> {
self.params.iter().find(|p| &p.name == name)
}
pub fn count_parent_params(&self) -> usize {
self.parent_params.as_ref().map(|p| p.count_params_including_parent()).unwrap_or(0)
}
pub fn count_params_including_parent(&self) -> usize {
let parent_count = self.count_parent_params();
parent_count + self.params.len()
}
fn for_each_param<'a>(&'a self, f: &mut impl FnMut(&'a GenericParam)) {
if let Some(parent) = &self.parent_params {
parent.for_each_param(f);
}
self.params.iter().for_each(f);
}
pub fn params_including_parent(&self) -> Vec<&GenericParam> {
let mut vec = Vec::with_capacity(self.count_params_including_parent());
self.for_each_param(&mut |p| vec.push(p));
vec
}
}
impl GenericDef {
pub(crate) fn resolver(&self, db: &impl HirDatabase) -> crate::Resolver {
match self {
GenericDef::Function(inner) => inner.resolver(db),
GenericDef::Adt(adt) => adt.resolver(db),
GenericDef::Trait(inner) => inner.resolver(db),
GenericDef::TypeAlias(inner) => inner.resolver(db),
GenericDef::ImplBlock(inner) => inner.resolver(db),
GenericDef::EnumVariant(inner) => inner.parent_enum(db).resolver(db),
GenericDef::Const(inner) => inner.resolver(db),
}
}
}
impl From<Container> for GenericDef { impl From<Container> for GenericDef {
fn from(c: Container) -> Self { fn from(c: Container) -> Self {
match c { match c {
@ -231,6 +49,6 @@ where
T: Into<GenericDef> + Copy, T: Into<GenericDef> + Copy,
{ {
fn generic_params(self, db: &impl DefDatabase) -> Arc<GenericParams> { fn generic_params(self, db: &impl DefDatabase) -> Arc<GenericParams> {
db.generic_params(self.into()) db.generic_params(self.into().into())
} }
} }

View file

@ -5,8 +5,7 @@ use ra_syntax::ast::{self};
use crate::{ use crate::{
db::{AstDatabase, DefDatabase, HirDatabase}, db::{AstDatabase, DefDatabase, HirDatabase},
generics::HasGenericParams, resolve::HasResolver,
resolve::Resolver,
ty::Ty, ty::Ty,
AssocItem, Crate, HasSource, ImplBlock, Module, Source, TraitRef, AssocItem, Crate, HasSource, ImplBlock, Module, Source, TraitRef,
}; };
@ -19,14 +18,6 @@ impl HasSource for ImplBlock {
} }
impl ImplBlock { impl ImplBlock {
pub(crate) fn containing(db: &impl DefDatabase, item: AssocItem) -> Option<ImplBlock> {
let module = item.module(db);
let crate_def_map = db.crate_def_map(module.id.krate);
crate_def_map[module.id.module_id].impls.iter().copied().map(ImplBlock::from).find(|it| {
db.impl_data(it.id).items().iter().copied().map(AssocItem::from).any(|it| it == item)
})
}
pub fn target_trait(&self, db: &impl DefDatabase) -> Option<TypeRef> { pub fn target_trait(&self, db: &impl DefDatabase) -> Option<TypeRef> {
db.impl_data(self.id).target_trait().cloned() db.impl_data(self.id).target_trait().cloned()
} }
@ -59,13 +50,4 @@ impl ImplBlock {
pub fn krate(&self, db: &impl DefDatabase) -> Crate { pub fn krate(&self, db: &impl DefDatabase) -> Crate {
Crate { crate_id: self.module(db).id.krate } Crate { crate_id: self.module(db).id.krate }
} }
pub(crate) fn resolver(&self, db: &impl DefDatabase) -> Resolver {
let r = self.module(db).resolver(db);
// add generic params, if present
let p = self.generic_params(db);
let r = if !p.params.is_empty() { r.push_generic_params_scope(p) } else { r };
let r = r.push_impl_block_scope(self.clone());
r
}
} }

View file

@ -97,7 +97,7 @@ impl LangItems {
// Look for impl targets // Look for impl targets
for impl_block in module.impl_blocks(db) { for impl_block in module.impl_blocks(db) {
let src = impl_block.source(db); let src = impl_block.source(db);
if let Some(lang_item_name) = lang_item_name(&src.ast) { if let Some(lang_item_name) = lang_item_name(&src.value) {
self.items self.items
.entry(lang_item_name) .entry(lang_item_name)
.or_insert_with(|| LangItemTarget::ImplBlock(impl_block)); .or_insert_with(|| LangItemTarget::ImplBlock(impl_block));
@ -144,7 +144,7 @@ impl LangItems {
T: Copy + HasSource<Ast = N>, T: Copy + HasSource<Ast = N>,
N: AttrsOwner, N: AttrsOwner,
{ {
let node = item.source(db).ast; let node = item.source(db).value;
if let Some(lang_item_name) = lang_item_name(&node) { if let Some(lang_item_name) = lang_item_name(&node) {
self.items.entry(lang_item_name).or_insert_with(|| constructor(item)); self.items.entry(lang_item_name).or_insert_with(|| constructor(item));
} }

View file

@ -32,8 +32,6 @@ pub mod db;
pub mod source_binder; pub mod source_binder;
mod ids; mod ids;
mod adt;
mod traits;
mod type_alias; mod type_alias;
mod ty; mod ty;
mod impl_block; mod impl_block;
@ -57,15 +55,14 @@ mod marks;
use crate::resolve::Resolver; use crate::resolve::Resolver;
pub use crate::{ pub use crate::{
adt::VariantDef,
code_model::ImplBlock,
code_model::{ code_model::{
attrs::{AttrDef, Attrs}, attrs::{AttrDef, Attrs},
docs::{DocDef, Docs, Documentation}, docs::{DocDef, Docs, Documentation},
src::{HasBodySource, HasSource}, src::{HasBodySource, HasSource},
Adt, AssocItem, Const, ConstData, Container, Crate, CrateDependency, DefWithBody, Enum, Adt, AssocItem, Const, ConstData, Container, Crate, CrateDependency, DefWithBody, Enum,
EnumVariant, FieldSource, FnData, Function, GenericParam, HasBody, Local, MacroDef, Module, EnumVariant, FieldSource, FnData, Function, GenericParam, HasBody, ImplBlock, Local,
ModuleDef, ModuleSource, Static, Struct, StructField, Trait, TypeAlias, Union, MacroDef, Module, ModuleDef, ModuleSource, Static, Struct, StructField, Trait, TypeAlias,
Union, VariantDef,
}, },
expr::ExprScopes, expr::ExprScopes,
from_source::FromSource, from_source::FromSource,

View file

@ -14,9 +14,9 @@ use crate::{
code_model::Crate, code_model::Crate,
db::{DefDatabase, HirDatabase}, db::{DefDatabase, HirDatabase},
expr::{ExprScopes, PatId, ScopeId}, expr::{ExprScopes, PatId, ScopeId},
generics::GenericParams, generics::{GenericParams, HasGenericParams},
Adt, Const, DefWithBody, Enum, EnumVariant, Function, ImplBlock, Local, MacroDef, ModuleDef, Adt, Const, Container, DefWithBody, Enum, EnumVariant, Function, GenericDef, ImplBlock, Local,
PerNs, Static, Struct, Trait, TypeAlias, MacroDef, Module, ModuleDef, PerNs, Static, Struct, Trait, TypeAlias,
}; };
#[derive(Debug, Clone, Default)] #[derive(Debug, Clone, Default)]
@ -43,7 +43,7 @@ pub(crate) enum Scope {
/// All the items and imported names of a module /// All the items and imported names of a module
ModuleScope(ModuleItemMap), ModuleScope(ModuleItemMap),
/// Brings the generic parameters of an item into scope /// Brings the generic parameters of an item into scope
GenericParams(Arc<GenericParams>), GenericParams { def: GenericDef, params: Arc<GenericParams> },
/// Brings `Self` in `impl` block into scope /// Brings `Self` in `impl` block into scope
ImplBlockScope(ImplBlock), ImplBlockScope(ImplBlock),
/// Brings `Self` in enum, struct and union definitions into scope /// Brings `Self` in enum, struct and union definitions into scope
@ -141,9 +141,9 @@ impl Resolver {
for scope in self.scopes.iter().rev() { for scope in self.scopes.iter().rev() {
match scope { match scope {
Scope::ExprScope(_) => continue, Scope::ExprScope(_) => continue,
Scope::GenericParams(_) | Scope::ImplBlockScope(_) if skip_to_mod => continue, Scope::GenericParams { .. } | Scope::ImplBlockScope(_) if skip_to_mod => continue,
Scope::GenericParams(params) => { Scope::GenericParams { params, .. } => {
if let Some(param) = params.find_by_name(first_name) { if let Some(param) = params.find_by_name(first_name) {
let idx = if path.segments.len() == 1 { None } else { Some(1) }; let idx = if path.segments.len() == 1 { None } else { Some(1) };
return Some((TypeNs::GenericParam(param.idx), idx)); return Some((TypeNs::GenericParam(param.idx), idx));
@ -212,7 +212,7 @@ impl Resolver {
match scope { match scope {
Scope::AdtScope(_) Scope::AdtScope(_)
| Scope::ExprScope(_) | Scope::ExprScope(_)
| Scope::GenericParams(_) | Scope::GenericParams { .. }
| Scope::ImplBlockScope(_) | Scope::ImplBlockScope(_)
if skip_to_mod => if skip_to_mod =>
{ {
@ -232,13 +232,13 @@ impl Resolver {
} }
Scope::ExprScope(_) => continue, Scope::ExprScope(_) => continue,
Scope::GenericParams(params) if n_segments > 1 => { Scope::GenericParams { params, .. } if n_segments > 1 => {
if let Some(param) = params.find_by_name(first_name) { if let Some(param) = params.find_by_name(first_name) {
let ty = TypeNs::GenericParam(param.idx); let ty = TypeNs::GenericParam(param.idx);
return Some(ResolveValueResult::Partial(ty, 1)); return Some(ResolveValueResult::Partial(ty, 1));
} }
} }
Scope::GenericParams(_) => continue, Scope::GenericParams { .. } => continue,
Scope::ImplBlockScope(impl_) if n_segments > 1 => { Scope::ImplBlockScope(impl_) if n_segments > 1 => {
if first_name == &name::SELF_TYPE { if first_name == &name::SELF_TYPE {
@ -361,7 +361,7 @@ impl Resolver {
self.scopes self.scopes
.iter() .iter()
.filter_map(|scope| match scope { .filter_map(|scope| match scope {
Scope::GenericParams(params) => Some(params), Scope::GenericParams { params, .. } => Some(params),
_ => None, _ => None,
}) })
.flat_map(|params| params.where_predicates.iter()) .flat_map(|params| params.where_predicates.iter())
@ -369,7 +369,7 @@ impl Resolver {
pub(crate) fn generic_def(&self) -> Option<crate::generics::GenericDef> { pub(crate) fn generic_def(&self) -> Option<crate::generics::GenericDef> {
self.scopes.iter().find_map(|scope| match scope { self.scopes.iter().find_map(|scope| match scope {
Scope::GenericParams(params) => Some(params.def), Scope::GenericParams { def, .. } => Some(*def),
_ => None, _ => None,
}) })
} }
@ -381,8 +381,17 @@ impl Resolver {
self self
} }
pub(crate) fn push_generic_params_scope(self, params: Arc<GenericParams>) -> Resolver { pub(crate) fn push_generic_params_scope(
self.push_scope(Scope::GenericParams(params)) self,
db: &impl DefDatabase,
def: GenericDef,
) -> Resolver {
let params = def.generic_params(db);
if params.params.is_empty() {
self
} else {
self.push_scope(Scope::GenericParams { def, params })
}
} }
pub(crate) fn push_impl_block_scope(self, impl_block: ImplBlock) -> Resolver { pub(crate) fn push_impl_block_scope(self, impl_block: ImplBlock) -> Resolver {
@ -457,8 +466,8 @@ impl Scope {
}); });
} }
} }
Scope::GenericParams(gp) => { Scope::GenericParams { params, .. } => {
for param in &gp.params { for param in params.params.iter() {
f(param.name.clone(), ScopeDef::GenericParam(param.idx)) f(param.name.clone(), ScopeDef::GenericParam(param.idx))
} }
} }
@ -477,3 +486,103 @@ impl Scope {
} }
} }
} }
pub(crate) trait HasResolver {
/// Builds a resolver for type references inside this def.
fn resolver(self, db: &impl DefDatabase) -> Resolver;
}
impl HasResolver for Module {
fn resolver(self, db: &impl DefDatabase) -> Resolver {
let def_map = db.crate_def_map(self.id.krate);
Resolver::default().push_module_scope(def_map, self.id.module_id)
}
}
impl HasResolver for Trait {
fn resolver(self, db: &impl DefDatabase) -> Resolver {
self.module(db).resolver(db).push_generic_params_scope(db, self.into())
}
}
impl<T: Into<Adt>> HasResolver for T {
fn resolver(self, db: &impl DefDatabase) -> Resolver {
let def = self.into();
def.module(db)
.resolver(db)
.push_generic_params_scope(db, def.into())
.push_scope(Scope::AdtScope(def))
}
}
impl HasResolver for Function {
fn resolver(self, db: &impl DefDatabase) -> Resolver {
self.container(db)
.map(|c| c.resolver(db))
.unwrap_or_else(|| self.module(db).resolver(db))
.push_generic_params_scope(db, self.into())
}
}
impl HasResolver for DefWithBody {
fn resolver(self, db: &impl DefDatabase) -> Resolver {
match self {
DefWithBody::Const(c) => c.resolver(db),
DefWithBody::Function(f) => f.resolver(db),
DefWithBody::Static(s) => s.resolver(db),
}
}
}
impl HasResolver for Const {
fn resolver(self, db: &impl DefDatabase) -> Resolver {
self.container(db).map(|c| c.resolver(db)).unwrap_or_else(|| self.module(db).resolver(db))
}
}
impl HasResolver for Static {
fn resolver(self, db: &impl DefDatabase) -> Resolver {
self.module(db).resolver(db)
}
}
impl HasResolver for TypeAlias {
fn resolver(self, db: &impl DefDatabase) -> Resolver {
self.container(db)
.map(|ib| ib.resolver(db))
.unwrap_or_else(|| self.module(db).resolver(db))
.push_generic_params_scope(db, self.into())
}
}
impl HasResolver for Container {
fn resolver(self, db: &impl DefDatabase) -> Resolver {
match self {
Container::Trait(trait_) => trait_.resolver(db),
Container::ImplBlock(impl_block) => impl_block.resolver(db),
}
}
}
impl HasResolver for GenericDef {
fn resolver(self, db: &impl DefDatabase) -> crate::Resolver {
match self {
GenericDef::Function(inner) => inner.resolver(db),
GenericDef::Adt(adt) => adt.resolver(db),
GenericDef::Trait(inner) => inner.resolver(db),
GenericDef::TypeAlias(inner) => inner.resolver(db),
GenericDef::ImplBlock(inner) => inner.resolver(db),
GenericDef::EnumVariant(inner) => inner.parent_enum(db).resolver(db),
GenericDef::Const(inner) => inner.resolver(db),
}
}
}
impl HasResolver for ImplBlock {
fn resolver(self, db: &impl DefDatabase) -> Resolver {
self.module(db)
.resolver(db)
.push_generic_params_scope(db, self.into())
.push_impl_block_scope(self)
}
}

View file

@ -23,7 +23,7 @@ use crate::{
db::HirDatabase, db::HirDatabase,
expr::{self, BodySourceMap, ExprScopes, ScopeId}, expr::{self, BodySourceMap, ExprScopes, ScopeId},
ids::LocationCtx, ids::LocationCtx,
resolve::{ScopeDef, TypeNs, ValueNs}, resolve::{HasResolver, ScopeDef, TypeNs, ValueNs},
ty::method_resolution::{self, implements_trait}, ty::method_resolution::{self, implements_trait},
AssocItem, Const, DefWithBody, Either, Enum, FromSource, Function, GenericParam, HasBody, AssocItem, Const, DefWithBody, Either, Enum, FromSource, Function, GenericParam, HasBody,
HirFileId, Local, MacroDef, Module, Name, Path, Resolver, Static, Struct, Ty, HirFileId, Local, MacroDef, Module, Name, Path, Resolver, Static, Struct, Ty,
@ -31,24 +31,24 @@ use crate::{
fn try_get_resolver_for_node(db: &impl HirDatabase, node: Source<&SyntaxNode>) -> Option<Resolver> { fn try_get_resolver_for_node(db: &impl HirDatabase, node: Source<&SyntaxNode>) -> Option<Resolver> {
match_ast! { match_ast! {
match (node.ast) { match (node.value) {
ast::Module(it) => { ast::Module(it) => {
let src = node.with_ast(it); let src = node.with_value(it);
Some(crate::Module::from_declaration(db, src)?.resolver(db)) Some(crate::Module::from_declaration(db, src)?.resolver(db))
}, },
ast::SourceFile(it) => { ast::SourceFile(it) => {
let src = node.with_ast(crate::ModuleSource::SourceFile(it)); let src = node.with_value(crate::ModuleSource::SourceFile(it));
Some(crate::Module::from_definition(db, src)?.resolver(db)) Some(crate::Module::from_definition(db, src)?.resolver(db))
}, },
ast::StructDef(it) => { ast::StructDef(it) => {
let src = node.with_ast(it); let src = node.with_value(it);
Some(Struct::from_source(db, src)?.resolver(db)) Some(Struct::from_source(db, src)?.resolver(db))
}, },
ast::EnumDef(it) => { ast::EnumDef(it) => {
let src = node.with_ast(it); let src = node.with_value(it);
Some(Enum::from_source(db, src)?.resolver(db)) Some(Enum::from_source(db, src)?.resolver(db))
}, },
_ => match node.ast.kind() { _ => match node.value.kind() {
FN_DEF | CONST_DEF | STATIC_DEF => { FN_DEF | CONST_DEF | STATIC_DEF => {
Some(def_with_body_from_child_node(db, node)?.resolver(db)) Some(def_with_body_from_child_node(db, node)?.resolver(db))
} }
@ -67,11 +67,11 @@ fn def_with_body_from_child_node(
let module = Module::from_definition(db, Source::new(child.file_id, module_source))?; let module = Module::from_definition(db, Source::new(child.file_id, module_source))?;
let ctx = LocationCtx::new(db, module.id, child.file_id); let ctx = LocationCtx::new(db, module.id, child.file_id);
child.ast.ancestors().find_map(|node| { child.value.ancestors().find_map(|node| {
match_ast! { match_ast! {
match node { match node {
ast::FnDef(def) => { Some(Function {id: ctx.to_def(&def) }.into()) }, ast::FnDef(def) => { return Function::from_source(db, child.with_value(def)).map(DefWithBody::from); },
ast::ConstDef(def) => { Some(Const { id: ctx.to_def(&def) }.into()) }, ast::ConstDef(def) => { return Const::from_source(db, child.with_value(def)).map(DefWithBody::from); },
ast::StaticDef(def) => { Some(Static { id: ctx.to_def(&def) }.into()) }, ast::StaticDef(def) => { Some(Static { id: ctx.to_def(&def) }.into()) },
_ => { None }, _ => { None },
} }
@ -157,7 +157,7 @@ impl SourceAnalyzer {
let scopes = def.expr_scopes(db); let scopes = def.expr_scopes(db);
let scope = match offset { let scope = match offset {
None => scope_for(&scopes, &source_map, node), None => scope_for(&scopes, &source_map, node),
Some(offset) => scope_for_offset(&scopes, &source_map, node.with_ast(offset)), Some(offset) => scope_for_offset(&scopes, &source_map, node.with_value(offset)),
}; };
let resolver = expr::resolver_for_scope(db, def, scope); let resolver = expr::resolver_for_scope(db, def, scope);
SourceAnalyzer { SourceAnalyzer {
@ -171,9 +171,9 @@ impl SourceAnalyzer {
} else { } else {
SourceAnalyzer { SourceAnalyzer {
resolver: node resolver: node
.ast .value
.ancestors() .ancestors()
.find_map(|it| try_get_resolver_for_node(db, node.with_ast(&it))) .find_map(|it| try_get_resolver_for_node(db, node.with_value(&it)))
.unwrap_or_default(), .unwrap_or_default(),
body_owner: None, body_owner: None,
body_source_map: None, body_source_map: None,
@ -185,12 +185,12 @@ impl SourceAnalyzer {
} }
fn expr_id(&self, expr: &ast::Expr) -> Option<ExprId> { fn expr_id(&self, expr: &ast::Expr) -> Option<ExprId> {
let src = Source { file_id: self.file_id, ast: expr }; let src = Source { file_id: self.file_id, value: expr };
self.body_source_map.as_ref()?.node_expr(src) self.body_source_map.as_ref()?.node_expr(src)
} }
fn pat_id(&self, pat: &ast::Pat) -> Option<PatId> { fn pat_id(&self, pat: &ast::Pat) -> Option<PatId> {
let src = Source { file_id: self.file_id, ast: pat }; let src = Source { file_id: self.file_id, value: pat };
self.body_source_map.as_ref()?.node_pat(src) self.body_source_map.as_ref()?.node_pat(src)
} }
@ -302,7 +302,7 @@ impl SourceAnalyzer {
let entry = scopes.resolve_name_in_scope(scope, &name)?; let entry = scopes.resolve_name_in_scope(scope, &name)?;
Some(ScopeEntryWithSyntax { Some(ScopeEntryWithSyntax {
name: entry.name().clone(), name: entry.name().clone(),
ptr: source_map.pat_syntax(entry.pat())?.ast, ptr: source_map.pat_syntax(entry.pat())?.value,
}) })
} }
@ -405,9 +405,16 @@ impl SourceAnalyzer {
implements_trait(&canonical_ty, db, &self.resolver, krate, std_future_trait) implements_trait(&canonical_ty, db, &self.resolver, krate, std_future_trait)
} }
pub fn expand(&self, db: &impl HirDatabase, macro_call: &ast::MacroCall) -> Option<Expansion> { pub fn expand(
let def = self.resolve_macro_call(db, macro_call)?.id; &self,
let ast_id = AstId::new(self.file_id, db.ast_id_map(self.file_id).ast_id(macro_call)); db: &impl HirDatabase,
macro_call: Source<&ast::MacroCall>,
) -> Option<Expansion> {
let def = self.resolve_macro_call(db, macro_call.value)?.id;
let ast_id = AstId::new(
macro_call.file_id,
db.ast_id_map(macro_call.file_id).ast_id(macro_call.value),
);
let macro_call_loc = MacroCallLoc { def, ast_id }; let macro_call_loc = MacroCallLoc { def, ast_id };
Some(Expansion { macro_call_id: db.intern_macro(macro_call_loc) }) Some(Expansion { macro_call_id: db.intern_macro(macro_call_loc) })
} }
@ -421,6 +428,11 @@ impl SourceAnalyzer {
pub(crate) fn inference_result(&self) -> Arc<crate::ty::InferenceResult> { pub(crate) fn inference_result(&self) -> Arc<crate::ty::InferenceResult> {
self.infer.clone().unwrap() self.infer.clone().unwrap()
} }
#[cfg(test)]
pub(crate) fn analyzed_declaration(&self) -> Option<DefWithBody> {
self.body_owner
}
} }
fn scope_for( fn scope_for(
@ -428,7 +440,7 @@ fn scope_for(
source_map: &BodySourceMap, source_map: &BodySourceMap,
node: Source<&SyntaxNode>, node: Source<&SyntaxNode>,
) -> Option<ScopeId> { ) -> Option<ScopeId> {
node.ast node.value
.ancestors() .ancestors()
.filter_map(ast::Expr::cast) .filter_map(ast::Expr::cast)
.filter_map(|it| source_map.node_expr(Source::new(node.file_id, &it))) .filter_map(|it| source_map.node_expr(Source::new(node.file_id, &it)))
@ -450,18 +462,18 @@ fn scope_for_offset(
return None; return None;
} }
let syntax_node_ptr = let syntax_node_ptr =
source.ast.either(|it| it.syntax_node_ptr(), |it| it.syntax_node_ptr()); source.value.either(|it| it.syntax_node_ptr(), |it| it.syntax_node_ptr());
Some((syntax_node_ptr, scope)) Some((syntax_node_ptr, scope))
}) })
// find containing scope // find containing scope
.min_by_key(|(ptr, _scope)| { .min_by_key(|(ptr, _scope)| {
( (
!(ptr.range().start() <= offset.ast && offset.ast <= ptr.range().end()), !(ptr.range().start() <= offset.value && offset.value <= ptr.range().end()),
ptr.range().len(), ptr.range().len(),
) )
}) })
.map(|(ptr, scope)| { .map(|(ptr, scope)| {
adjust(scopes, source_map, ptr, offset.file_id, offset.ast).unwrap_or(*scope) adjust(scopes, source_map, ptr, offset.file_id, offset.value).unwrap_or(*scope)
}) })
} }
@ -485,7 +497,7 @@ fn adjust(
return None; return None;
} }
let syntax_node_ptr = let syntax_node_ptr =
source.ast.either(|it| it.syntax_node_ptr(), |it| it.syntax_node_ptr()); source.value.either(|it| it.syntax_node_ptr(), |it| it.syntax_node_ptr());
Some((syntax_node_ptr, scope)) Some((syntax_node_ptr, scope))
}) })
.map(|(ptr, scope)| (ptr.range(), scope)) .map(|(ptr, scope)| (ptr.range(), scope))

View file

@ -1,82 +0,0 @@
//! HIR for trait definitions.
use std::sync::Arc;
use hir_expand::name::AsName;
use ra_syntax::ast::{self, NameOwner};
use rustc_hash::FxHashMap;
use crate::{
db::{AstDatabase, DefDatabase},
ids::LocationCtx,
AssocItem, Const, Function, HasSource, Module, Name, Trait, TypeAlias,
};
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct TraitData {
name: Option<Name>,
items: Vec<AssocItem>,
auto: bool,
}
impl TraitData {
pub(crate) fn trait_data_query(
db: &(impl DefDatabase + AstDatabase),
tr: Trait,
) -> Arc<TraitData> {
let src = tr.source(db);
let name = src.ast.name().map(|n| n.as_name());
let module = tr.module(db);
let ctx = LocationCtx::new(db, module.id, src.file_id);
let auto = src.ast.is_auto();
let items = if let Some(item_list) = src.ast.item_list() {
item_list
.impl_items()
.map(|item_node| match item_node {
ast::ImplItem::FnDef(it) => Function { id: ctx.to_def(&it) }.into(),
ast::ImplItem::ConstDef(it) => Const { id: ctx.to_def(&it) }.into(),
ast::ImplItem::TypeAliasDef(it) => TypeAlias { id: ctx.to_def(&it) }.into(),
})
.collect()
} else {
Vec::new()
};
Arc::new(TraitData { name, items, auto })
}
pub(crate) fn name(&self) -> &Option<Name> {
&self.name
}
pub(crate) fn items(&self) -> &[AssocItem] {
&self.items
}
pub(crate) fn is_auto(&self) -> bool {
self.auto
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct TraitItemsIndex {
traits_by_def: FxHashMap<AssocItem, Trait>,
}
impl TraitItemsIndex {
pub(crate) fn trait_items_index(db: &impl DefDatabase, module: Module) -> TraitItemsIndex {
let mut index = TraitItemsIndex { traits_by_def: FxHashMap::default() };
for decl in module.declarations(db) {
if let crate::ModuleDef::Trait(tr) = decl {
for item in tr.trait_data(db).items() {
index.traits_by_def.insert(*item, tr);
}
}
}
index
}
pub(crate) fn get_parent_trait(&self, item: AssocItem) -> Option<Trait> {
self.traits_by_def.get(&item).cloned()
}
}

View file

@ -3,8 +3,6 @@
mod autoderef; mod autoderef;
pub(crate) mod primitive; pub(crate) mod primitive;
#[cfg(test)]
mod tests;
pub(crate) mod traits; pub(crate) mod traits;
pub(crate) mod method_resolution; pub(crate) mod method_resolution;
mod op; mod op;
@ -12,6 +10,9 @@ mod lower;
mod infer; mod infer;
pub(crate) mod display; pub(crate) mod display;
#[cfg(test)]
mod tests;
use std::ops::Deref; use std::ops::Deref;
use std::sync::Arc; use std::sync::Arc;
use std::{fmt, iter, mem}; use std::{fmt, iter, mem};
@ -800,6 +801,10 @@ impl HirDisplay for &Ty {
impl HirDisplay for ApplicationTy { impl HirDisplay for ApplicationTy {
fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result { fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result {
if f.should_truncate() {
return write!(f, "");
}
match self.ctor { match self.ctor {
TypeCtor::Bool => write!(f, "bool")?, TypeCtor::Bool => write!(f, "bool")?,
TypeCtor::Char => write!(f, "char")?, TypeCtor::Char => write!(f, "char")?,
@ -901,6 +906,10 @@ impl HirDisplay for ApplicationTy {
impl HirDisplay for ProjectionTy { impl HirDisplay for ProjectionTy {
fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result { fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result {
if f.should_truncate() {
return write!(f, "");
}
let trait_name = self let trait_name = self
.associated_ty .associated_ty
.parent_trait(f.db) .parent_trait(f.db)
@ -919,6 +928,10 @@ impl HirDisplay for ProjectionTy {
impl HirDisplay for Ty { impl HirDisplay for Ty {
fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result { fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result {
if f.should_truncate() {
return write!(f, "");
}
match self { match self {
Ty::Apply(a_ty) => a_ty.hir_fmt(f)?, Ty::Apply(a_ty) => a_ty.hir_fmt(f)?,
Ty::Projection(p_ty) => p_ty.hir_fmt(f)?, Ty::Projection(p_ty) => p_ty.hir_fmt(f)?,
@ -1001,6 +1014,10 @@ impl HirDisplay for Ty {
impl TraitRef { impl TraitRef {
fn hir_fmt_ext(&self, f: &mut HirFormatter<impl HirDatabase>, use_as: bool) -> fmt::Result { fn hir_fmt_ext(&self, f: &mut HirFormatter<impl HirDatabase>, use_as: bool) -> fmt::Result {
if f.should_truncate() {
return write!(f, "");
}
self.substs[0].hir_fmt(f)?; self.substs[0].hir_fmt(f)?;
if use_as { if use_as {
write!(f, " as ")?; write!(f, " as ")?;
@ -1031,6 +1048,10 @@ impl HirDisplay for &GenericPredicate {
impl HirDisplay for GenericPredicate { impl HirDisplay for GenericPredicate {
fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result { fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result {
if f.should_truncate() {
return write!(f, "");
}
match self { match self {
GenericPredicate::Implemented(trait_ref) => trait_ref.hir_fmt(f)?, GenericPredicate::Implemented(trait_ref) => trait_ref.hir_fmt(f)?,
GenericPredicate::Projection(projection_pred) => { GenericPredicate::Projection(projection_pred) => {

View file

@ -7,15 +7,30 @@ use crate::db::HirDatabase;
pub struct HirFormatter<'a, 'b, DB> { pub struct HirFormatter<'a, 'b, DB> {
pub db: &'a DB, pub db: &'a DB,
fmt: &'a mut fmt::Formatter<'b>, fmt: &'a mut fmt::Formatter<'b>,
buf: String,
curr_size: usize,
max_size: Option<usize>,
} }
pub trait HirDisplay { pub trait HirDisplay {
fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result; fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result;
fn display<'a, DB>(&'a self, db: &'a DB) -> HirDisplayWrapper<'a, DB, Self> fn display<'a, DB>(&'a self, db: &'a DB) -> HirDisplayWrapper<'a, DB, Self>
where where
Self: Sized, Self: Sized,
{ {
HirDisplayWrapper(db, self) HirDisplayWrapper(db, self, None)
}
fn display_truncated<'a, DB>(
&'a self,
db: &'a DB,
max_size: Option<usize>,
) -> HirDisplayWrapper<'a, DB, Self>
where
Self: Sized,
{
HirDisplayWrapper(db, self, max_size)
} }
} }
@ -41,11 +56,25 @@ where
/// This allows using the `write!` macro directly with a `HirFormatter`. /// This allows using the `write!` macro directly with a `HirFormatter`.
pub fn write_fmt(&mut self, args: fmt::Arguments) -> fmt::Result { pub fn write_fmt(&mut self, args: fmt::Arguments) -> fmt::Result {
fmt::write(self.fmt, args) // We write to a buffer first to track output size
self.buf.clear();
fmt::write(&mut self.buf, args)?;
self.curr_size += self.buf.len();
// Then we write to the internal formatter from the buffer
self.fmt.write_str(&self.buf)
}
pub fn should_truncate(&self) -> bool {
if let Some(max_size) = self.max_size {
self.curr_size >= max_size
} else {
false
}
} }
} }
pub struct HirDisplayWrapper<'a, DB, T>(&'a DB, &'a T); pub struct HirDisplayWrapper<'a, DB, T>(&'a DB, &'a T, Option<usize>);
impl<'a, DB, T> fmt::Display for HirDisplayWrapper<'a, DB, T> impl<'a, DB, T> fmt::Display for HirDisplayWrapper<'a, DB, T>
where where
@ -53,6 +82,12 @@ where
T: HirDisplay, T: HirDisplay,
{ {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.1.hir_fmt(&mut HirFormatter { db: self.0, fmt: f }) self.1.hir_fmt(&mut HirFormatter {
db: self.0,
fmt: f,
buf: String::with_capacity(20),
curr_size: 0,
max_size: self.2,
})
} }
} }

View file

@ -37,14 +37,13 @@ use super::{
TypeCtor, TypeWalk, Uncertain, TypeCtor, TypeWalk, Uncertain,
}; };
use crate::{ use crate::{
adt::VariantDef,
code_model::TypeAlias, code_model::TypeAlias,
db::HirDatabase, db::HirDatabase,
expr::{BindingAnnotation, Body, ExprId, PatId}, expr::{BindingAnnotation, Body, ExprId, PatId},
resolve::{Resolver, TypeNs}, resolve::{HasResolver, Resolver, TypeNs},
ty::infer::diagnostics::InferenceDiagnostic, ty::infer::diagnostics::InferenceDiagnostic,
Adt, AssocItem, ConstData, DefWithBody, FloatTy, FnData, Function, HasBody, IntTy, Path, Adt, AssocItem, ConstData, DefWithBody, FloatTy, FnData, Function, HasBody, IntTy, Path,
StructField, StructField, VariantDef,
}; };
macro_rules! ty_app { macro_rules! ty_app {

View file

@ -19,18 +19,17 @@ use super::{
TypeWalk, TypeWalk,
}; };
use crate::{ use crate::{
adt::VariantDef,
db::HirDatabase, db::HirDatabase,
generics::HasGenericParams, generics::HasGenericParams,
generics::{GenericDef, WherePredicate}, generics::{GenericDef, WherePredicate},
resolve::{Resolver, TypeNs}, resolve::{HasResolver, Resolver, TypeNs},
ty::{ ty::{
primitive::{FloatTy, IntTy, Uncertain}, primitive::{FloatTy, IntTy, Uncertain},
Adt, Adt,
}, },
util::make_mut_slice, util::make_mut_slice,
Const, Enum, EnumVariant, Function, ModuleDef, Path, Static, Struct, StructField, Trait, Const, Enum, EnumVariant, Function, ModuleDef, Path, Static, Struct, StructField, Trait,
TypeAlias, Union, TypeAlias, Union, VariantDef,
}; };
// FIXME: this is only really used in `type_for_def`, which contains a bunch of // FIXME: this is only really used in `type_for_def`, which contains a bunch of
@ -611,9 +610,7 @@ pub(crate) fn generic_defaults_query(db: &impl HirDatabase, def: GenericDef) ->
let defaults = generic_params let defaults = generic_params
.params_including_parent() .params_including_parent()
.into_iter() .into_iter()
.map(|p| { .map(|p| p.default.as_ref().map_or(Ty::Unknown, |t| Ty::from_hir(db, &resolver, t)))
p.default.as_ref().map_or(Ty::Unknown, |path| Ty::from_hir_path(db, &resolver, path))
})
.collect(); .collect();
Substs(defaults) Substs(defaults)

View file

@ -232,8 +232,8 @@ fn iterate_trait_method_candidates<T>(
// trait, but if we find out it doesn't, we'll skip the rest of the // trait, but if we find out it doesn't, we'll skip the rest of the
// iteration // iteration
let mut known_implemented = false; let mut known_implemented = false;
for &item in data.items() { for &item in data.items.iter() {
if !is_valid_candidate(db, name, mode, item) { if !is_valid_candidate(db, name, mode, item.into()) {
continue; continue;
} }
if !known_implemented { if !known_implemented {
@ -243,7 +243,7 @@ fn iterate_trait_method_candidates<T>(
} }
} }
known_implemented = true; known_implemented = true;
if let Some(result) = callback(&ty.value, item) { if let Some(result) = callback(&ty.value, item.into()) {
return Some(result); return Some(result);
} }
} }

View file

@ -11,6 +11,7 @@ use ra_syntax::{
ast::{self, AstNode}, ast::{self, AstNode},
SyntaxKind::*, SyntaxKind::*,
}; };
use rustc_hash::FxHashSet;
use test_utils::covers; use test_utils::covers;
use crate::{ use crate::{
@ -1979,6 +1980,30 @@ fn test() {
); );
} }
#[test]
fn infer_associated_method_generics_with_default_tuple_param() {
let t = type_at(
r#"
//- /main.rs
struct Gen<T=()> {
val: T
}
impl<T> Gen<T> {
pub fn make() -> Gen<T> {
loop { }
}
}
fn test() {
let a = Gen::make();
a.val<|>;
}
"#,
);
assert_eq!(t, "()");
}
#[test] #[test]
fn infer_associated_method_generics_without_args() { fn infer_associated_method_generics_without_args() {
assert_snapshot!( assert_snapshot!(
@ -2494,7 +2519,6 @@ fn test() {
[167; 179) 'GLOBAL_CONST': u32 [167; 179) 'GLOBAL_CONST': u32
[189; 191) 'id': u32 [189; 191) 'id': u32
[194; 210) 'Foo::A..._CONST': u32 [194; 210) 'Foo::A..._CONST': u32
[126; 128) '99': u32
"### "###
); );
} }
@ -4694,14 +4718,16 @@ fn infer(content: &str) -> String {
} }
// sort ranges for consistency // sort ranges for consistency
types.sort_by_key(|(src_ptr, _)| (src_ptr.ast.range().start(), src_ptr.ast.range().end())); types.sort_by_key(|(src_ptr, _)| {
(src_ptr.value.range().start(), src_ptr.value.range().end())
});
for (src_ptr, ty) in &types { for (src_ptr, ty) in &types {
let node = src_ptr.ast.to_node(&src_ptr.file_syntax(&db)); let node = src_ptr.value.to_node(&src_ptr.file_syntax(&db));
let (range, text) = if let Some(self_param) = ast::SelfParam::cast(node.clone()) { let (range, text) = if let Some(self_param) = ast::SelfParam::cast(node.clone()) {
(self_param.self_kw_token().text_range(), "self".to_string()) (self_param.self_kw_token().text_range(), "self".to_string())
} else { } else {
(src_ptr.ast.range(), node.text().to_string().replace("\n", " ")) (src_ptr.value.range(), node.text().to_string().replace("\n", " "))
}; };
let macro_prefix = if src_ptr.file_id != file_id.into() { "!" } else { "" }; let macro_prefix = if src_ptr.file_id != file_id.into() { "!" } else { "" };
write!( write!(
@ -4716,12 +4742,15 @@ fn infer(content: &str) -> String {
} }
}; };
let mut analyzed = FxHashSet::default();
for node in source_file.syntax().descendants() { for node in source_file.syntax().descendants() {
if node.kind() == FN_DEF || node.kind() == CONST_DEF || node.kind() == STATIC_DEF { if node.kind() == FN_DEF || node.kind() == CONST_DEF || node.kind() == STATIC_DEF {
let analyzer = SourceAnalyzer::new(&db, Source::new(file_id.into(), &node), None); let analyzer = SourceAnalyzer::new(&db, Source::new(file_id.into(), &node), None);
if analyzed.insert(analyzer.analyzed_declaration()) {
infer_def(analyzer.inference_result(), analyzer.body_source_map()); infer_def(analyzer.inference_result(), analyzer.body_source_map());
} }
} }
}
acc.truncate(acc.trim_end().len()); acc.truncate(acc.trim_end().len());
acc acc

View file

@ -23,7 +23,7 @@ impl TypeAliasData {
db: &(impl DefDatabase + AstDatabase), db: &(impl DefDatabase + AstDatabase),
typ: TypeAlias, typ: TypeAlias,
) -> Arc<TypeAliasData> { ) -> Arc<TypeAliasData> {
let node = typ.source(db).ast; let node = typ.source(db).value;
let name = node.name().map_or_else(Name::missing, |n| n.as_name()); let name = node.name().map_or_else(Name::missing, |n| n.as_name());
let type_ref = node.type_ref().map(TypeRef::from_ast); let type_ref = node.type_ref().map(TypeRef::from_ast);
Arc::new(TypeAliasData { name, type_ref }) Arc::new(TypeAliasData { name, type_ref })

View file

@ -54,8 +54,8 @@ impl StructData {
id: StructOrUnionId, id: StructOrUnionId,
) -> Arc<StructData> { ) -> Arc<StructData> {
let src = id.source(db); let src = id.source(db);
let name = src.ast.name().map(|n| n.as_name()); let name = src.value.name().map(|n| n.as_name());
let variant_data = VariantData::new(src.ast.kind()); let variant_data = VariantData::new(src.value.kind());
let variant_data = Arc::new(variant_data); let variant_data = Arc::new(variant_data);
Arc::new(StructData { name, variant_data }) Arc::new(StructData { name, variant_data })
} }
@ -64,9 +64,9 @@ impl StructData {
impl EnumData { impl EnumData {
pub(crate) fn enum_data_query(db: &impl DefDatabase2, e: EnumId) -> Arc<EnumData> { pub(crate) fn enum_data_query(db: &impl DefDatabase2, e: EnumId) -> Arc<EnumData> {
let src = e.source(db); let src = e.source(db);
let name = src.ast.name().map(|n| n.as_name()); let name = src.value.name().map(|n| n.as_name());
let variants = src let variants = src
.ast .value
.variant_list() .variant_list()
.into_iter() .into_iter()
.flat_map(|it| it.variants()) .flat_map(|it| it.variants())

View file

@ -17,7 +17,7 @@ use crate::{
expr::{Expr, ExprId, Pat, PatId}, expr::{Expr, ExprId, Pat, PatId},
nameres::CrateDefMap, nameres::CrateDefMap,
path::Path, path::Path,
AstItemDef, DefWithBodyId, ModuleId, AstItemDef, DefWithBodyId, HasModule, HasSource, Lookup, ModuleId,
}; };
pub struct Expander { pub struct Expander {
@ -73,8 +73,8 @@ impl Expander {
std::mem::forget(mark); std::mem::forget(mark);
} }
fn to_source<T>(&self, ast: T) -> Source<T> { fn to_source<T>(&self, value: T) -> Source<T> {
Source { file_id: self.current_file_id, ast } Source { file_id: self.current_file_id, value }
} }
fn parse_path(&mut self, path: ast::Path) -> Option<Path> { fn parse_path(&mut self, path: ast::Path) -> Option<Path> {
@ -149,17 +149,19 @@ impl Body {
let (file_id, module, body) = match def { let (file_id, module, body) = match def {
DefWithBodyId::FunctionId(f) => { DefWithBodyId::FunctionId(f) => {
let f = f.lookup(db);
let src = f.source(db); let src = f.source(db);
params = src.ast.param_list(); params = src.value.param_list();
(src.file_id, f.module(db), src.ast.body().map(ast::Expr::from)) (src.file_id, f.module(db), src.value.body().map(ast::Expr::from))
} }
DefWithBodyId::ConstId(c) => { DefWithBodyId::ConstId(c) => {
let c = c.lookup(db);
let src = c.source(db); let src = c.source(db);
(src.file_id, c.module(db), src.ast.body()) (src.file_id, c.module(db), src.value.body())
} }
DefWithBodyId::StaticId(s) => { DefWithBodyId::StaticId(s) => {
let src = s.source(db); let src = s.source(db);
(src.file_id, s.module(db), src.ast.body()) (src.file_id, s.module(db), src.value.body())
} }
}; };
let expander = Expander::new(db, file_id, module); let expander = Expander::new(db, file_id, module);

View file

@ -210,8 +210,9 @@ mod tests {
let scopes = db.expr_scopes(function.into()); let scopes = db.expr_scopes(function.into());
let (_body, source_map) = db.body_with_source_map(function.into()); let (_body, source_map) = db.body_with_source_map(function.into());
let expr_id = let expr_id = source_map
source_map.node_expr(Source { file_id: file_id.into(), ast: &marker.into() }).unwrap(); .node_expr(Source { file_id: file_id.into(), value: &marker.into() })
.unwrap();
let scope = scopes.scope_for(expr_id); let scope = scopes.scope_for(expr_id);
let actual = scopes let actual = scopes
@ -317,14 +318,14 @@ mod tests {
let expr_scope = { let expr_scope = {
let expr_ast = name_ref.syntax().ancestors().find_map(ast::Expr::cast).unwrap(); let expr_ast = name_ref.syntax().ancestors().find_map(ast::Expr::cast).unwrap();
let expr_id = let expr_id =
source_map.node_expr(Source { file_id: file_id.into(), ast: &expr_ast }).unwrap(); source_map.node_expr(Source { file_id: file_id.into(), value: &expr_ast }).unwrap();
scopes.scope_for(expr_id).unwrap() scopes.scope_for(expr_id).unwrap()
}; };
let resolved = scopes.resolve_name_in_scope(expr_scope, &name_ref.as_name()).unwrap(); let resolved = scopes.resolve_name_in_scope(expr_scope, &name_ref.as_name()).unwrap();
let pat_src = source_map.pat_syntax(resolved.pat()).unwrap(); let pat_src = source_map.pat_syntax(resolved.pat()).unwrap();
let local_name = pat_src.ast.either(|it| it.syntax_node_ptr(), |it| it.syntax_node_ptr()); let local_name = pat_src.value.either(|it| it.syntax_node_ptr(), |it| it.syntax_node_ptr());
assert_eq!(local_name.range(), expected_name.syntax().text_range()); assert_eq!(local_name.range(), expected_name.syntax().text_range());
} }

View file

@ -8,30 +8,32 @@ use ra_syntax::ast;
use crate::{ use crate::{
adt::{EnumData, StructData}, adt::{EnumData, StructData},
body::{scope::ExprScopes, Body, BodySourceMap}, body::{scope::ExprScopes, Body, BodySourceMap},
imp::ImplData, generics::GenericParams,
impls::ImplData,
nameres::{ nameres::{
raw::{ImportSourceMap, RawItems}, raw::{ImportSourceMap, RawItems},
CrateDefMap, CrateDefMap,
}, },
DefWithBodyId, EnumId, ImplId, ItemLoc, StructOrUnionId, traits::TraitData,
DefWithBodyId, EnumId, GenericDefId, ImplId, ItemLoc, StructOrUnionId, TraitId,
}; };
#[salsa::query_group(InternDatabaseStorage)] #[salsa::query_group(InternDatabaseStorage)]
pub trait InternDatabase: SourceDatabase { pub trait InternDatabase: SourceDatabase {
#[salsa::interned] #[salsa::interned]
fn intern_function(&self, loc: ItemLoc<ast::FnDef>) -> crate::FunctionId; fn intern_function(&self, loc: crate::FunctionLoc) -> crate::FunctionId;
#[salsa::interned] #[salsa::interned]
fn intern_struct_or_union(&self, loc: ItemLoc<ast::StructDef>) -> crate::StructOrUnionId; fn intern_struct_or_union(&self, loc: ItemLoc<ast::StructDef>) -> crate::StructOrUnionId;
#[salsa::interned] #[salsa::interned]
fn intern_enum(&self, loc: ItemLoc<ast::EnumDef>) -> crate::EnumId; fn intern_enum(&self, loc: ItemLoc<ast::EnumDef>) -> crate::EnumId;
#[salsa::interned] #[salsa::interned]
fn intern_const(&self, loc: ItemLoc<ast::ConstDef>) -> crate::ConstId; fn intern_const(&self, loc: crate::ConstLoc) -> crate::ConstId;
#[salsa::interned] #[salsa::interned]
fn intern_static(&self, loc: ItemLoc<ast::StaticDef>) -> crate::StaticId; fn intern_static(&self, loc: ItemLoc<ast::StaticDef>) -> crate::StaticId;
#[salsa::interned] #[salsa::interned]
fn intern_trait(&self, loc: ItemLoc<ast::TraitDef>) -> crate::TraitId; fn intern_trait(&self, loc: ItemLoc<ast::TraitDef>) -> crate::TraitId;
#[salsa::interned] #[salsa::interned]
fn intern_type_alias(&self, loc: ItemLoc<ast::TypeAliasDef>) -> crate::TypeAliasId; fn intern_type_alias(&self, loc: crate::TypeAliasLoc) -> crate::TypeAliasId;
#[salsa::interned] #[salsa::interned]
fn intern_impl(&self, loc: ItemLoc<ast::ImplBlock>) -> crate::ImplId; fn intern_impl(&self, loc: ItemLoc<ast::ImplBlock>) -> crate::ImplId;
} }
@ -59,6 +61,9 @@ pub trait DefDatabase2: InternDatabase + AstDatabase {
#[salsa::invoke(ImplData::impl_data_query)] #[salsa::invoke(ImplData::impl_data_query)]
fn impl_data(&self, e: ImplId) -> Arc<ImplData>; fn impl_data(&self, e: ImplId) -> Arc<ImplData>;
#[salsa::invoke(TraitData::trait_data_query)]
fn trait_data(&self, e: TraitId) -> Arc<TraitData>;
#[salsa::invoke(Body::body_with_source_map_query)] #[salsa::invoke(Body::body_with_source_map_query)]
fn body_with_source_map(&self, def: DefWithBodyId) -> (Arc<Body>, Arc<BodySourceMap>); fn body_with_source_map(&self, def: DefWithBodyId) -> (Arc<Body>, Arc<BodySourceMap>);
@ -67,4 +72,7 @@ pub trait DefDatabase2: InternDatabase + AstDatabase {
#[salsa::invoke(ExprScopes::expr_scopes_query)] #[salsa::invoke(ExprScopes::expr_scopes_query)]
fn expr_scopes(&self, def: DefWithBodyId) -> Arc<ExprScopes>; fn expr_scopes(&self, def: DefWithBodyId) -> Arc<ExprScopes>;
#[salsa::invoke(GenericParams::generic_params_query)]
fn generic_params(&self, def: GenericDefId) -> Arc<GenericParams>;
} }

View file

@ -20,7 +20,7 @@ impl Diagnostic for UnresolvedModule {
"unresolved module".to_string() "unresolved module".to_string()
} }
fn source(&self) -> Source<SyntaxNodePtr> { fn source(&self) -> Source<SyntaxNodePtr> {
Source { file_id: self.file, ast: self.decl.into() } Source { file_id: self.file, value: self.decl.into() }
} }
fn as_any(&self) -> &(dyn Any + Send + 'static) { fn as_any(&self) -> &(dyn Any + Send + 'static) {
self self

View file

@ -0,0 +1,185 @@
//! Many kinds of items or constructs can have generic parameters: functions,
//! structs, impls, traits, etc. This module provides a common HIR for these
//! generic parameters. See also the `Generics` type and the `generics_of` query
//! in rustc.
use std::sync::Arc;
use hir_expand::name::{self, AsName, Name};
use ra_syntax::ast::{self, NameOwner, TypeBoundsOwner, TypeParamsOwner};
use crate::{
db::DefDatabase2,
type_ref::{TypeBound, TypeRef},
AdtId, AstItemDef, ContainerId, GenericDefId, HasSource, Lookup,
};
/// Data about a generic parameter (to a function, struct, impl, ...).
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct GenericParam {
// FIXME: give generic params proper IDs
pub idx: u32,
pub name: Name,
pub default: Option<TypeRef>,
}
/// Data about the generic parameters of a function, struct, impl, etc.
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct GenericParams {
pub parent_params: Option<Arc<GenericParams>>,
pub params: Vec<GenericParam>,
pub where_predicates: Vec<WherePredicate>,
}
/// A single predicate from a where clause, i.e. `where Type: Trait`. Combined
/// where clauses like `where T: Foo + Bar` are turned into multiple of these.
/// It might still result in multiple actual predicates though, because of
/// associated type bindings like `Iterator<Item = u32>`.
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct WherePredicate {
pub type_ref: TypeRef,
pub bound: TypeBound,
}
impl GenericParams {
pub(crate) fn generic_params_query(
db: &impl DefDatabase2,
def: GenericDefId,
) -> Arc<GenericParams> {
let parent_generics = parent_generic_def(db, def).map(|it| db.generic_params(it));
Arc::new(GenericParams::new(db, def.into(), parent_generics))
}
fn new(
db: &impl DefDatabase2,
def: GenericDefId,
parent_params: Option<Arc<GenericParams>>,
) -> GenericParams {
let mut generics =
GenericParams { params: Vec::new(), parent_params, where_predicates: Vec::new() };
let start = generics.parent_params.as_ref().map(|p| p.params.len()).unwrap_or(0) as u32;
// FIXME: add `: Sized` bound for everything except for `Self` in traits
match def {
GenericDefId::FunctionId(it) => generics.fill(&it.lookup(db).source(db).value, start),
GenericDefId::AdtId(AdtId::StructId(it)) => {
generics.fill(&it.0.source(db).value, start)
}
GenericDefId::AdtId(AdtId::UnionId(it)) => generics.fill(&it.0.source(db).value, start),
GenericDefId::AdtId(AdtId::EnumId(it)) => generics.fill(&it.source(db).value, start),
GenericDefId::TraitId(it) => {
// traits get the Self type as an implicit first type parameter
generics.params.push(GenericParam {
idx: start,
name: name::SELF_TYPE,
default: None,
});
generics.fill(&it.source(db).value, start + 1);
// add super traits as bounds on Self
// i.e., trait Foo: Bar is equivalent to trait Foo where Self: Bar
let self_param = TypeRef::Path(name::SELF_TYPE.into());
generics.fill_bounds(&it.source(db).value, self_param);
}
GenericDefId::TypeAliasId(it) => generics.fill(&it.lookup(db).source(db).value, start),
// Note that we don't add `Self` here: in `impl`s, `Self` is not a
// type-parameter, but rather is a type-alias for impl's target
// type, so this is handled by the resolver.
GenericDefId::ImplId(it) => generics.fill(&it.source(db).value, start),
GenericDefId::EnumVariantId(_) | GenericDefId::ConstId(_) => {}
}
generics
}
fn fill(&mut self, node: &impl TypeParamsOwner, start: u32) {
if let Some(params) = node.type_param_list() {
self.fill_params(params, start)
}
if let Some(where_clause) = node.where_clause() {
self.fill_where_predicates(where_clause);
}
}
fn fill_bounds(&mut self, node: &impl ast::TypeBoundsOwner, type_ref: TypeRef) {
for bound in
node.type_bound_list().iter().flat_map(|type_bound_list| type_bound_list.bounds())
{
self.add_where_predicate_from_bound(bound, type_ref.clone());
}
}
fn fill_params(&mut self, params: ast::TypeParamList, start: u32) {
for (idx, type_param) in params.type_params().enumerate() {
let name = type_param.name().map_or_else(Name::missing, |it| it.as_name());
// FIXME: Use `Path::from_src`
let default = type_param.default_type().map(TypeRef::from_ast);
let param = GenericParam { idx: idx as u32 + start, name: name.clone(), default };
self.params.push(param);
let type_ref = TypeRef::Path(name.into());
self.fill_bounds(&type_param, type_ref);
}
}
fn fill_where_predicates(&mut self, where_clause: ast::WhereClause) {
for pred in where_clause.predicates() {
let type_ref = match pred.type_ref() {
Some(type_ref) => type_ref,
None => continue,
};
let type_ref = TypeRef::from_ast(type_ref);
for bound in pred.type_bound_list().iter().flat_map(|l| l.bounds()) {
self.add_where_predicate_from_bound(bound, type_ref.clone());
}
}
}
fn add_where_predicate_from_bound(&mut self, bound: ast::TypeBound, type_ref: TypeRef) {
if bound.has_question_mark() {
// FIXME: remove this bound
return;
}
let bound = TypeBound::from_ast(bound);
self.where_predicates.push(WherePredicate { type_ref, bound });
}
pub fn find_by_name(&self, name: &Name) -> Option<&GenericParam> {
self.params.iter().find(|p| &p.name == name)
}
pub fn count_parent_params(&self) -> usize {
self.parent_params.as_ref().map(|p| p.count_params_including_parent()).unwrap_or(0)
}
pub fn count_params_including_parent(&self) -> usize {
let parent_count = self.count_parent_params();
parent_count + self.params.len()
}
fn for_each_param<'a>(&'a self, f: &mut impl FnMut(&'a GenericParam)) {
if let Some(parent) = &self.parent_params {
parent.for_each_param(f);
}
self.params.iter().for_each(f);
}
pub fn params_including_parent(&self) -> Vec<&GenericParam> {
let mut vec = Vec::with_capacity(self.count_params_including_parent());
self.for_each_param(&mut |p| vec.push(p));
vec
}
}
fn parent_generic_def(db: &impl DefDatabase2, def: GenericDefId) -> Option<GenericDefId> {
let container = match def {
GenericDefId::FunctionId(it) => it.lookup(db).container,
GenericDefId::TypeAliasId(it) => it.lookup(db).container,
GenericDefId::ConstId(it) => it.lookup(db).container,
GenericDefId::EnumVariantId(it) => return Some(it.parent.into()),
GenericDefId::AdtId(_) | GenericDefId::TraitId(_) | GenericDefId::ImplId(_) => return None,
};
match container {
ContainerId::ImplId(it) => Some(it.into()),
ContainerId::TraitId(it) => Some(it.into()),
ContainerId::ModuleId(_) => None,
}
}

View file

@ -5,11 +5,12 @@
use std::sync::Arc; use std::sync::Arc;
use hir_expand::AstId;
use ra_syntax::ast; use ra_syntax::ast;
use crate::{ use crate::{
db::DefDatabase2, type_ref::TypeRef, AssocItemId, AstItemDef, ConstId, FunctionId, ImplId, db::DefDatabase2, type_ref::TypeRef, AssocItemId, AstItemDef, ConstLoc, ContainerId,
LocationCtx, TypeAliasId, FunctionLoc, ImplId, Intern, TypeAliasLoc,
}; };
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
@ -25,23 +26,37 @@ impl ImplData {
let src = id.source(db); let src = id.source(db);
let items = db.ast_id_map(src.file_id); let items = db.ast_id_map(src.file_id);
let target_trait = src.ast.target_trait().map(TypeRef::from_ast); let target_trait = src.value.target_trait().map(TypeRef::from_ast);
let target_type = TypeRef::from_ast_opt(src.ast.target_type()); let target_type = TypeRef::from_ast_opt(src.value.target_type());
let negative = src.ast.is_negative(); let negative = src.value.is_negative();
let items = if let Some(item_list) = src.ast.item_list() { let items = if let Some(item_list) = src.value.item_list() {
let ctx = LocationCtx::new(db, id.module(db), src.file_id);
item_list item_list
.impl_items() .impl_items()
.map(|item_node| match item_node { .map(|item_node| match item_node {
ast::ImplItem::FnDef(it) => { ast::ImplItem::FnDef(it) => {
FunctionId::from_ast_id(ctx, items.ast_id(&it)).into() let def = FunctionLoc {
container: ContainerId::ImplId(id),
ast_id: AstId::new(src.file_id, items.ast_id(&it)),
}
.intern(db);
def.into()
} }
ast::ImplItem::ConstDef(it) => { ast::ImplItem::ConstDef(it) => {
ConstId::from_ast_id(ctx, items.ast_id(&it)).into() let def = ConstLoc {
container: ContainerId::ImplId(id),
ast_id: AstId::new(src.file_id, items.ast_id(&it)),
}
.intern(db);
def.into()
} }
ast::ImplItem::TypeAliasDef(it) => { ast::ImplItem::TypeAliasDef(it) => {
TypeAliasId::from_ast_id(ctx, items.ast_id(&it)).into() let def = TypeAliasLoc {
container: ContainerId::ImplId(id),
ast_id: AstId::new(src.file_id, items.ast_id(&it)),
}
.intern(db);
def.into()
} }
}) })
.collect() .collect()

View file

@ -13,10 +13,12 @@ pub mod path;
pub mod type_ref; pub mod type_ref;
pub mod builtin_type; pub mod builtin_type;
pub mod adt; pub mod adt;
pub mod imp; pub mod impls;
pub mod diagnostics; pub mod diagnostics;
pub mod expr; pub mod expr;
pub mod body; pub mod body;
pub mod generics;
pub mod traits;
#[cfg(test)] #[cfg(test)]
mod test_db; mod test_db;
@ -80,7 +82,7 @@ impl ModuleSource {
pub fn from_child_node(db: &impl db::DefDatabase2, child: Source<&SyntaxNode>) -> ModuleSource { pub fn from_child_node(db: &impl db::DefDatabase2, child: Source<&SyntaxNode>) -> ModuleSource {
if let Some(m) = if let Some(m) =
child.ast.ancestors().filter_map(ast::Module::cast).find(|it| !it.has_semi()) child.value.ancestors().filter_map(ast::Module::cast).find(|it| !it.has_semi())
{ {
ModuleSource::Module(m) ModuleSource::Module(m)
} else { } else {
@ -184,8 +186,8 @@ pub trait AstItemDef<N: AstNode>: salsa::InternKey + Clone {
} }
fn source(self, db: &(impl AstDatabase + InternDatabase)) -> Source<N> { fn source(self, db: &(impl AstDatabase + InternDatabase)) -> Source<N> {
let loc = self.lookup_intern(db); let loc = self.lookup_intern(db);
let ast = loc.ast_id.to_node(db); let value = loc.ast_id.to_node(db);
Source { file_id: loc.ast_id.file_id(), ast } Source { file_id: loc.ast_id.file_id(), value }
} }
fn module(self, db: &impl InternDatabase) -> ModuleId { fn module(self, db: &impl InternDatabase) -> ModuleId {
let loc = self.lookup_intern(db); let loc = self.lookup_intern(db);
@ -197,12 +199,23 @@ pub trait AstItemDef<N: AstNode>: salsa::InternKey + Clone {
pub struct FunctionId(salsa::InternId); pub struct FunctionId(salsa::InternId);
impl_intern_key!(FunctionId); impl_intern_key!(FunctionId);
impl AstItemDef<ast::FnDef> for FunctionId { #[derive(Debug, Clone, PartialEq, Eq, Hash)]
fn intern(db: &impl InternDatabase, loc: ItemLoc<ast::FnDef>) -> Self { pub struct FunctionLoc {
db.intern_function(loc) pub container: ContainerId,
pub ast_id: AstId<ast::FnDef>,
} }
fn lookup_intern(self, db: &impl InternDatabase) -> ItemLoc<ast::FnDef> {
db.lookup_intern_function(self) impl Intern for FunctionLoc {
type ID = FunctionId;
fn intern(self, db: &impl db::DefDatabase2) -> FunctionId {
db.intern_function(self)
}
}
impl Lookup for FunctionId {
type Data = FunctionLoc;
fn lookup(&self, db: &impl db::DefDatabase2) -> FunctionLoc {
db.lookup_intern_function(*self)
} }
} }
@ -276,12 +289,23 @@ impl_arena_id!(LocalStructFieldId);
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct ConstId(salsa::InternId); pub struct ConstId(salsa::InternId);
impl_intern_key!(ConstId); impl_intern_key!(ConstId);
impl AstItemDef<ast::ConstDef> for ConstId { #[derive(Debug, Clone, PartialEq, Eq, Hash)]
fn intern(db: &impl InternDatabase, loc: ItemLoc<ast::ConstDef>) -> Self { pub struct ConstLoc {
db.intern_const(loc) pub container: ContainerId,
pub ast_id: AstId<ast::ConstDef>,
} }
fn lookup_intern(self, db: &impl InternDatabase) -> ItemLoc<ast::ConstDef> {
db.lookup_intern_const(self) impl Intern for ConstLoc {
type ID = ConstId;
fn intern(self, db: &impl db::DefDatabase2) -> ConstId {
db.intern_const(self)
}
}
impl Lookup for ConstId {
type Data = ConstLoc;
fn lookup(&self, db: &impl db::DefDatabase2) -> ConstLoc {
db.lookup_intern_const(*self)
} }
} }
@ -312,12 +336,24 @@ impl AstItemDef<ast::TraitDef> for TraitId {
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct TypeAliasId(salsa::InternId); pub struct TypeAliasId(salsa::InternId);
impl_intern_key!(TypeAliasId); impl_intern_key!(TypeAliasId);
impl AstItemDef<ast::TypeAliasDef> for TypeAliasId {
fn intern(db: &impl InternDatabase, loc: ItemLoc<ast::TypeAliasDef>) -> Self { #[derive(Debug, Clone, PartialEq, Eq, Hash)]
db.intern_type_alias(loc) pub struct TypeAliasLoc {
pub container: ContainerId,
pub ast_id: AstId<ast::TypeAliasDef>,
} }
fn lookup_intern(self, db: &impl InternDatabase) -> ItemLoc<ast::TypeAliasDef> {
db.lookup_intern_type_alias(self) impl Intern for TypeAliasLoc {
type ID = TypeAliasId;
fn intern(self, db: &impl db::DefDatabase2) -> TypeAliasId {
db.intern_type_alias(self)
}
}
impl Lookup for TypeAliasId {
type Data = TypeAliasLoc;
fn lookup(&self, db: &impl db::DefDatabase2) -> TypeAliasLoc {
db.lookup_intern_type_alias(*self)
} }
} }
@ -352,6 +388,13 @@ macro_rules! impl_froms {
} }
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum ContainerId {
ModuleId(ModuleId),
ImplId(ImplId),
TraitId(TraitId),
}
/// A Data Type /// A Data Type
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum AdtId { pub enum AdtId {
@ -408,3 +451,102 @@ pub enum AssocItemId {
// require not implementing From, and instead having some checked way of // require not implementing From, and instead having some checked way of
// casting them, and somehow making the constructors private, which would be annoying. // casting them, and somehow making the constructors private, which would be annoying.
impl_froms!(AssocItemId: FunctionId, ConstId, TypeAliasId); impl_froms!(AssocItemId: FunctionId, ConstId, TypeAliasId);
#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)]
pub enum GenericDefId {
FunctionId(FunctionId),
AdtId(AdtId),
TraitId(TraitId),
TypeAliasId(TypeAliasId),
ImplId(ImplId),
// enum variants cannot have generics themselves, but their parent enums
// can, and this makes some code easier to write
EnumVariantId(EnumVariantId),
// consts can have type parameters from their parents (i.e. associated consts of traits)
ConstId(ConstId),
}
impl_froms!(
GenericDefId: FunctionId,
AdtId(StructId, EnumId, UnionId),
TraitId,
TypeAliasId,
ImplId,
EnumVariantId,
ConstId
);
trait Intern {
type ID;
fn intern(self, db: &impl db::DefDatabase2) -> Self::ID;
}
pub trait Lookup {
type Data;
fn lookup(&self, db: &impl db::DefDatabase2) -> Self::Data;
}
pub trait HasModule {
fn module(&self, db: &impl db::DefDatabase2) -> ModuleId;
}
impl HasModule for FunctionLoc {
fn module(&self, db: &impl db::DefDatabase2) -> ModuleId {
match self.container {
ContainerId::ModuleId(it) => it,
ContainerId::ImplId(it) => it.module(db),
ContainerId::TraitId(it) => it.module(db),
}
}
}
impl HasModule for TypeAliasLoc {
fn module(&self, db: &impl db::DefDatabase2) -> ModuleId {
match self.container {
ContainerId::ModuleId(it) => it,
ContainerId::ImplId(it) => it.module(db),
ContainerId::TraitId(it) => it.module(db),
}
}
}
impl HasModule for ConstLoc {
fn module(&self, db: &impl db::DefDatabase2) -> ModuleId {
match self.container {
ContainerId::ModuleId(it) => it,
ContainerId::ImplId(it) => it.module(db),
ContainerId::TraitId(it) => it.module(db),
}
}
}
pub trait HasSource {
type Value;
fn source(&self, db: &impl db::DefDatabase2) -> Source<Self::Value>;
}
impl HasSource for FunctionLoc {
type Value = ast::FnDef;
fn source(&self, db: &impl db::DefDatabase2) -> Source<ast::FnDef> {
let node = self.ast_id.to_node(db);
Source::new(self.ast_id.file_id(), node)
}
}
impl HasSource for TypeAliasLoc {
type Value = ast::TypeAliasDef;
fn source(&self, db: &impl db::DefDatabase2) -> Source<ast::TypeAliasDef> {
let node = self.ast_id.to_node(db);
Source::new(self.ast_id.file_id(), node)
}
}
impl HasSource for ConstLoc {
type Value = ast::ConstDef;
fn source(&self, db: &impl db::DefDatabase2) -> Source<ast::ConstDef> {
let node = self.ast_id.to_node(db);
Source::new(self.ast_id.file_id(), node)
}
}

View file

@ -165,6 +165,14 @@ impl ModuleScope {
self.items.iter().chain(BUILTIN_SCOPE.iter()) self.items.iter().chain(BUILTIN_SCOPE.iter())
} }
pub fn declarations(&self) -> impl Iterator<Item = ModuleDefId> + '_ {
self.entries()
.filter_map(|(_name, res)| if res.import.is_none() { Some(res.def) } else { None })
.flat_map(|per_ns| {
per_ns.take_types().into_iter().chain(per_ns.take_values().into_iter())
})
}
/// Iterate over all module scoped macros /// Iterate over all module scoped macros
pub fn macros<'a>(&'a self) -> impl Iterator<Item = (&'a Name, MacroDefId)> + 'a { pub fn macros<'a>(&'a self) -> impl Iterator<Item = (&'a Name, MacroDefId)> + 'a {
self.items self.items

View file

@ -19,9 +19,9 @@ use crate::{
per_ns::PerNs, raw, CrateDefMap, ModuleData, Resolution, ResolveMode, per_ns::PerNs, raw, CrateDefMap, ModuleData, Resolution, ResolveMode,
}, },
path::{Path, PathKind}, path::{Path, PathKind},
AdtId, AstId, AstItemDef, ConstId, CrateModuleId, EnumId, EnumVariantId, FunctionId, ImplId, AdtId, AstId, AstItemDef, ConstLoc, ContainerId, CrateModuleId, EnumId, EnumVariantId,
LocationCtx, ModuleDefId, ModuleId, StaticId, StructId, StructOrUnionId, TraitId, TypeAliasId, FunctionLoc, ImplId, Intern, LocationCtx, ModuleDefId, ModuleId, StaticId, StructId,
UnionId, StructOrUnionId, TraitId, TypeAliasLoc, UnionId,
}; };
pub(super) fn collect_defs(db: &impl DefDatabase2, mut def_map: CrateDefMap) -> CrateDefMap { pub(super) fn collect_defs(db: &impl DefDatabase2, mut def_map: CrateDefMap) -> CrateDefMap {
@ -673,8 +673,13 @@ where
let name = def.name.clone(); let name = def.name.clone();
let def: PerNs = match def.kind { let def: PerNs = match def.kind {
raw::DefKind::Function(ast_id) => { raw::DefKind::Function(ast_id) => {
let f = FunctionId::from_ast_id(ctx, ast_id); let def = FunctionLoc {
PerNs::values(f.into()) container: ContainerId::ModuleId(module),
ast_id: AstId::new(self.file_id, ast_id),
}
.intern(self.def_collector.db);
PerNs::values(def.into())
} }
raw::DefKind::Struct(ast_id) => { raw::DefKind::Struct(ast_id) => {
let id = StructOrUnionId::from_ast_id(ctx, ast_id).into(); let id = StructOrUnionId::from_ast_id(ctx, ast_id).into();
@ -687,13 +692,27 @@ where
PerNs::both(u, u) PerNs::both(u, u)
} }
raw::DefKind::Enum(ast_id) => PerNs::types(EnumId::from_ast_id(ctx, ast_id).into()), raw::DefKind::Enum(ast_id) => PerNs::types(EnumId::from_ast_id(ctx, ast_id).into()),
raw::DefKind::Const(ast_id) => PerNs::values(ConstId::from_ast_id(ctx, ast_id).into()), raw::DefKind::Const(ast_id) => {
let def = ConstLoc {
container: ContainerId::ModuleId(module),
ast_id: AstId::new(self.file_id, ast_id),
}
.intern(self.def_collector.db);
PerNs::values(def.into())
}
raw::DefKind::Static(ast_id) => { raw::DefKind::Static(ast_id) => {
PerNs::values(StaticId::from_ast_id(ctx, ast_id).into()) PerNs::values(StaticId::from_ast_id(ctx, ast_id).into())
} }
raw::DefKind::Trait(ast_id) => PerNs::types(TraitId::from_ast_id(ctx, ast_id).into()), raw::DefKind::Trait(ast_id) => PerNs::types(TraitId::from_ast_id(ctx, ast_id).into()),
raw::DefKind::TypeAlias(ast_id) => { raw::DefKind::TypeAlias(ast_id) => {
PerNs::types(TypeAliasId::from_ast_id(ctx, ast_id).into()) let def = TypeAliasLoc {
container: ContainerId::ModuleId(module),
ast_id: AstId::new(self.file_id, ast_id),
}
.intern(self.def_collector.db);
PerNs::types(def.into())
} }
}; };
let resolution = Resolution { def, import: None }; let resolution = Resolution { def, import: None };

View file

@ -327,7 +327,7 @@ impl RawItemsCollector {
let mut buf = Vec::new(); let mut buf = Vec::new();
Path::expand_use_item( Path::expand_use_item(
Source { ast: use_item, file_id: self.file_id }, Source { value: use_item, file_id: self.file_id },
&self.hygiene, &self.hygiene,
|path, use_tree, is_glob, alias| { |path, use_tree, is_glob, alias| {
let import_data = ImportData { let import_data = ImportData {

View file

@ -71,7 +71,7 @@ impl Path {
hygiene: &Hygiene, hygiene: &Hygiene,
mut cb: impl FnMut(Path, &ast::UseTree, bool, Option<Name>), mut cb: impl FnMut(Path, &ast::UseTree, bool, Option<Name>),
) { ) {
if let Some(tree) = item_src.ast.use_tree() { if let Some(tree) = item_src.value.use_tree() {
expand_use_tree(None, tree, hygiene, &mut cb); expand_use_tree(None, tree, hygiene, &mut cb);
} }
} }

View file

@ -0,0 +1,59 @@
//! HIR for trait definitions.
use std::sync::Arc;
use hir_expand::{
name::{AsName, Name},
AstId,
};
use ra_syntax::ast::{self, NameOwner};
use crate::{
db::DefDatabase2, AssocItemId, AstItemDef, ConstLoc, ContainerId, FunctionLoc, Intern, TraitId,
TypeAliasLoc,
};
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct TraitData {
pub name: Option<Name>,
pub items: Vec<AssocItemId>,
pub auto: bool,
}
impl TraitData {
pub(crate) fn trait_data_query(db: &impl DefDatabase2, tr: TraitId) -> Arc<TraitData> {
let src = tr.source(db);
let name = src.value.name().map(|n| n.as_name());
let auto = src.value.is_auto();
let ast_id_map = db.ast_id_map(src.file_id);
let items = if let Some(item_list) = src.value.item_list() {
item_list
.impl_items()
.map(|item_node| match item_node {
ast::ImplItem::FnDef(it) => FunctionLoc {
container: ContainerId::TraitId(tr),
ast_id: AstId::new(src.file_id, ast_id_map.ast_id(&it)),
}
.intern(db)
.into(),
ast::ImplItem::ConstDef(it) => ConstLoc {
container: ContainerId::TraitId(tr),
ast_id: AstId::new(src.file_id, ast_id_map.ast_id(&it)),
}
.intern(db)
.into(),
ast::ImplItem::TypeAliasDef(it) => TypeAliasLoc {
container: ContainerId::TraitId(tr),
ast_id: AstId::new(src.file_id, ast_id_map.ast_id(&it)),
}
.intern(db)
.into(),
})
.collect()
} else {
Vec::new()
};
Arc::new(TraitData { name, items, auto })
}
}

View file

@ -24,7 +24,7 @@ pub trait Diagnostic: Any + Send + Sync + fmt::Debug + 'static {
fn message(&self) -> String; fn message(&self) -> String;
fn source(&self) -> Source<SyntaxNodePtr>; fn source(&self) -> Source<SyntaxNodePtr>;
fn highlight_range(&self) -> TextRange { fn highlight_range(&self) -> TextRange {
self.source().ast.range() self.source().value.range()
} }
fn as_any(&self) -> &(dyn Any + Send + 'static); fn as_any(&self) -> &(dyn Any + Send + 'static);
} }
@ -37,7 +37,7 @@ pub trait AstDiagnostic {
impl dyn Diagnostic { impl dyn Diagnostic {
pub fn syntax_node(&self, db: &impl AstDatabase) -> SyntaxNode { pub fn syntax_node(&self, db: &impl AstDatabase) -> SyntaxNode {
let node = db.parse_or_expand(self.source().file_id).unwrap(); let node = db.parse_or_expand(self.source().file_id).unwrap();
self.source().ast.to_node(&node) self.source().value.to_node(&node)
} }
pub fn downcast_ref<D: Diagnostic>(&self) -> Option<&D> { pub fn downcast_ref<D: Diagnostic>(&self) -> Option<&D> {

View file

@ -166,19 +166,19 @@ impl ExpansionInfo {
pub fn map_token_down(&self, token: Source<&SyntaxToken>) -> Option<Source<SyntaxToken>> { pub fn map_token_down(&self, token: Source<&SyntaxToken>) -> Option<Source<SyntaxToken>> {
assert_eq!(token.file_id, self.arg.file_id); assert_eq!(token.file_id, self.arg.file_id);
let range = let range =
token.ast.text_range().checked_sub(self.arg.ast.syntax().text_range().start())?; token.value.text_range().checked_sub(self.arg.value.syntax().text_range().start())?;
let token_id = self.macro_arg.1.token_by_range(range)?; let token_id = self.macro_arg.1.token_by_range(range)?;
let token_id = self.macro_def.0.map_id_down(token_id); let token_id = self.macro_def.0.map_id_down(token_id);
let range = self.exp_map.range_by_token(token_id)?; let range = self.exp_map.range_by_token(token_id)?;
let token = algo::find_covering_element(&self.expanded.ast, range).into_token()?; let token = algo::find_covering_element(&self.expanded.value, range).into_token()?;
Some(self.expanded.with_ast(token)) Some(self.expanded.with_value(token))
} }
pub fn map_token_up(&self, token: Source<&SyntaxToken>) -> Option<Source<SyntaxToken>> { pub fn map_token_up(&self, token: Source<&SyntaxToken>) -> Option<Source<SyntaxToken>> {
let token_id = self.exp_map.token_by_range(token.ast.text_range())?; let token_id = self.exp_map.token_by_range(token.value.text_range())?;
let (token_id, origin) = self.macro_def.0.map_id_up(token_id); let (token_id, origin) = self.macro_def.0.map_id_up(token_id);
let (token_map, tt) = match origin { let (token_map, tt) = match origin {
@ -188,11 +188,11 @@ impl ExpansionInfo {
let range = token_map.range_by_token(token_id)?; let range = token_map.range_by_token(token_id)?;
let token = algo::find_covering_element( let token = algo::find_covering_element(
tt.ast.syntax(), tt.value.syntax(),
range + tt.ast.syntax().text_range().start(), range + tt.value.syntax().text_range().start(),
) )
.into_token()?; .into_token()?;
Some(tt.with_ast(token)) Some(tt.with_value(token))
} }
} }
@ -240,30 +240,34 @@ impl<N: AstNode> AstId<N> {
} }
} }
/// FIXME: https://github.com/matklad/with ? /// `Source<T>` stores a value of `T` inside a particular file/syntax tree.
///
/// Typical usages are:
///
/// * `Source<SyntaxNode>` -- syntax node in a file
/// * `Source<ast::FnDef>` -- ast node in a file
/// * `Source<TextUnit>` -- offset in a file
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)] #[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
pub struct Source<T> { pub struct Source<T> {
pub file_id: HirFileId, pub file_id: HirFileId,
// FIXME: this stores all kind of things, not only `ast`. pub value: T,
// There should be a better name...
pub ast: T,
} }
impl<T> Source<T> { impl<T> Source<T> {
pub fn new(file_id: HirFileId, ast: T) -> Source<T> { pub fn new(file_id: HirFileId, value: T) -> Source<T> {
Source { file_id, ast } Source { file_id, value }
} }
// Similarly, naming here is stupid... // Similarly, naming here is stupid...
pub fn with_ast<U>(&self, ast: U) -> Source<U> { pub fn with_value<U>(&self, value: U) -> Source<U> {
Source::new(self.file_id, ast) Source::new(self.file_id, value)
} }
pub fn map<F: FnOnce(T) -> U, U>(self, f: F) -> Source<U> { pub fn map<F: FnOnce(T) -> U, U>(self, f: F) -> Source<U> {
Source::new(self.file_id, f(self.ast)) Source::new(self.file_id, f(self.value))
} }
pub fn as_ref(&self) -> Source<&T> { pub fn as_ref(&self) -> Source<&T> {
self.with_ast(&self.ast) self.with_value(&self.value)
} }
pub fn file_syntax(&self, db: &impl db::AstDatabase) -> SyntaxNode { pub fn file_syntax(&self, db: &impl db::AstDatabase) -> SyntaxNode {
db.parse_or_expand(self.file_id).expect("source created from invalid file") db.parse_or_expand(self.file_id).expect("source created from invalid file")

View file

@ -309,7 +309,6 @@ impl RootDatabase {
hir::db::StructDataQuery hir::db::StructDataQuery
hir::db::EnumDataQuery hir::db::EnumDataQuery
hir::db::TraitDataQuery hir::db::TraitDataQuery
hir::db::TraitItemsIndexQuery
hir::db::RawItemsWithSourceMapQuery hir::db::RawItemsWithSourceMapQuery
hir::db::RawItemsQuery hir::db::RawItemsQuery
hir::db::CrateDefMapQuery hir::db::CrateDefMapQuery

View file

@ -304,6 +304,13 @@ mod tests {
), ),
@r###" @r###"
[ [
CompletionItem {
label: "Self",
source_range: [54; 54),
delete: [54; 54),
insert: "Self",
kind: TypeParam,
},
CompletionItem { CompletionItem {
label: "T", label: "T",
source_range: [54; 54), source_range: [54; 54),

View file

@ -54,7 +54,7 @@ impl<'a> CompletionContext<'a> {
let src = hir::ModuleSource::from_position(db, position); let src = hir::ModuleSource::from_position(db, position);
let module = hir::Module::from_definition( let module = hir::Module::from_definition(
db, db,
hir::Source { file_id: position.file_id.into(), ast: src }, hir::Source { file_id: position.file_id.into(), value: src },
); );
let token = let token =
original_parse.tree().syntax().token_at_offset(position.offset).left_biased()?; original_parse.tree().syntax().token_at_offset(position.offset).left_biased()?;

View file

@ -169,7 +169,7 @@ impl Completions {
None => return, None => return,
}; };
let ast_node = macro_.source(ctx.db).ast; let ast_node = macro_.source(ctx.db).value;
let detail = macro_label(&ast_node); let detail = macro_label(&ast_node);
let docs = macro_.docs(ctx.db); let docs = macro_.docs(ctx.db);
@ -201,7 +201,7 @@ impl Completions {
) { ) {
let data = func.data(ctx.db); let data = func.data(ctx.db);
let name = name.unwrap_or_else(|| data.name().to_string()); let name = name.unwrap_or_else(|| data.name().to_string());
let ast_node = func.source(ctx.db).ast; let ast_node = func.source(ctx.db).value;
let detail = function_label(&ast_node); let detail = function_label(&ast_node);
let mut builder = let mut builder =
@ -234,7 +234,7 @@ impl Completions {
} }
pub(crate) fn add_const(&mut self, ctx: &CompletionContext, constant: hir::Const) { pub(crate) fn add_const(&mut self, ctx: &CompletionContext, constant: hir::Const) {
let ast_node = constant.source(ctx.db).ast; let ast_node = constant.source(ctx.db).value;
let name = match ast_node.name() { let name = match ast_node.name() {
Some(name) => name, Some(name) => name,
_ => return, _ => return,
@ -250,7 +250,7 @@ impl Completions {
} }
pub(crate) fn add_type_alias(&mut self, ctx: &CompletionContext, type_alias: hir::TypeAlias) { pub(crate) fn add_type_alias(&mut self, ctx: &CompletionContext, type_alias: hir::TypeAlias) {
let type_def = type_alias.source(ctx.db).ast; let type_def = type_alias.source(ctx.db).value;
let name = match type_def.name() { let name = match type_def.name() {
Some(name) => name, Some(name) => name,
_ => return, _ => return,

View file

@ -96,7 +96,7 @@ pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec<Diagnostic>
}); });
let source_file = db.parse(file_id).tree(); let source_file = db.parse(file_id).tree();
let src = let src =
hir::Source { file_id: file_id.into(), ast: hir::ModuleSource::SourceFile(source_file) }; hir::Source { file_id: file_id.into(), value: hir::ModuleSource::SourceFile(source_file) };
if let Some(m) = hir::Module::from_definition(db, src) { if let Some(m) = hir::Module::from_definition(db, src) {
m.diagnostics(db, &mut sink); m.diagnostics(db, &mut sink);
}; };

View file

@ -48,12 +48,12 @@ impl FunctionSignature {
pub(crate) fn from_hir(db: &db::RootDatabase, function: hir::Function) -> Self { pub(crate) fn from_hir(db: &db::RootDatabase, function: hir::Function) -> Self {
let doc = function.docs(db); let doc = function.docs(db);
let ast_node = function.source(db).ast; let ast_node = function.source(db).value;
FunctionSignature::from(&ast_node).with_doc_opt(doc) FunctionSignature::from(&ast_node).with_doc_opt(doc)
} }
pub(crate) fn from_struct(db: &db::RootDatabase, st: hir::Struct) -> Option<Self> { pub(crate) fn from_struct(db: &db::RootDatabase, st: hir::Struct) -> Option<Self> {
let node: ast::StructDef = st.source(db).ast; let node: ast::StructDef = st.source(db).value;
match node.kind() { match node.kind() {
ast::StructKind::Named(_) => return None, ast::StructKind::Named(_) => return None,
_ => (), _ => (),
@ -87,7 +87,7 @@ impl FunctionSignature {
db: &db::RootDatabase, db: &db::RootDatabase,
variant: hir::EnumVariant, variant: hir::EnumVariant,
) -> Option<Self> { ) -> Option<Self> {
let node: ast::EnumVariant = variant.source(db).ast; let node: ast::EnumVariant = variant.source(db).value;
match node.kind() { match node.kind() {
ast::StructKind::Named(_) | ast::StructKind::Unit => return None, ast::StructKind::Named(_) | ast::StructKind::Unit => return None,
_ => (), _ => (),
@ -126,7 +126,7 @@ impl FunctionSignature {
} }
pub(crate) fn from_macro(db: &db::RootDatabase, macro_def: hir::MacroDef) -> Option<Self> { pub(crate) fn from_macro(db: &db::RootDatabase, macro_def: hir::MacroDef) -> Option<Self> {
let node: ast::MacroCall = macro_def.source(db).ast; let node: ast::MacroCall = macro_def.source(db).value;
let params = vec![]; let params = vec![];

View file

@ -86,9 +86,9 @@ impl NavigationTarget {
name, name,
None, None,
frange.range, frange.range,
src.ast.syntax().kind(), src.value.syntax().kind(),
src.ast.doc_comment_text(), src.value.doc_comment_text(),
src.ast.short_label(), src.value.short_label(),
); );
} }
module.to_nav(db) module.to_nav(db)
@ -146,9 +146,9 @@ impl NavigationTarget {
description: Option<String>, description: Option<String>,
) -> NavigationTarget { ) -> NavigationTarget {
//FIXME: use `_` instead of empty string //FIXME: use `_` instead of empty string
let name = node.ast.name().map(|it| it.text().clone()).unwrap_or_default(); let name = node.value.name().map(|it| it.text().clone()).unwrap_or_default();
let focus_range = let focus_range =
node.ast.name().map(|it| original_range(db, node.with_ast(it.syntax())).range); node.value.name().map(|it| original_range(db, node.with_value(it.syntax())).range);
let frange = original_range(db, node.map(|it| it.syntax())); let frange = original_range(db, node.map(|it| it.syntax()));
NavigationTarget::from_syntax( NavigationTarget::from_syntax(
@ -156,7 +156,7 @@ impl NavigationTarget {
name, name,
focus_range, focus_range,
frange.range, frange.range,
node.ast.syntax().kind(), node.value.syntax().kind(),
docs, docs,
description, description,
) )
@ -220,8 +220,8 @@ where
NavigationTarget::from_named( NavigationTarget::from_named(
db, db,
src.as_ref().map(|it| it as &dyn ast::NameOwner), src.as_ref().map(|it| it as &dyn ast::NameOwner),
src.ast.doc_comment_text(), src.value.doc_comment_text(),
src.ast.short_label(), src.value.short_label(),
) )
} }
} }
@ -230,9 +230,9 @@ impl ToNav for hir::Module {
fn to_nav(&self, db: &RootDatabase) -> NavigationTarget { fn to_nav(&self, db: &RootDatabase) -> NavigationTarget {
let src = self.definition_source(db); let src = self.definition_source(db);
let name = self.name(db).map(|it| it.to_string().into()).unwrap_or_default(); let name = self.name(db).map(|it| it.to_string().into()).unwrap_or_default();
match &src.ast { match &src.value {
ModuleSource::SourceFile(node) => { ModuleSource::SourceFile(node) => {
let frange = original_range(db, src.with_ast(node.syntax())); let frange = original_range(db, src.with_value(node.syntax()));
NavigationTarget::from_syntax( NavigationTarget::from_syntax(
frange.file_id, frange.file_id,
@ -245,7 +245,7 @@ impl ToNav for hir::Module {
) )
} }
ModuleSource::Module(node) => { ModuleSource::Module(node) => {
let frange = original_range(db, src.with_ast(node.syntax())); let frange = original_range(db, src.with_value(node.syntax()));
NavigationTarget::from_syntax( NavigationTarget::from_syntax(
frange.file_id, frange.file_id,
@ -271,7 +271,7 @@ impl ToNav for hir::ImplBlock {
"impl".into(), "impl".into(),
None, None,
frange.range, frange.range,
src.ast.syntax().kind(), src.value.syntax().kind(),
None, None,
None, None,
) )
@ -282,15 +282,15 @@ impl ToNav for hir::StructField {
fn to_nav(&self, db: &RootDatabase) -> NavigationTarget { fn to_nav(&self, db: &RootDatabase) -> NavigationTarget {
let src = self.source(db); let src = self.source(db);
match &src.ast { match &src.value {
FieldSource::Named(it) => NavigationTarget::from_named( FieldSource::Named(it) => NavigationTarget::from_named(
db, db,
src.with_ast(it), src.with_value(it),
it.doc_comment_text(), it.doc_comment_text(),
it.short_label(), it.short_label(),
), ),
FieldSource::Pos(it) => { FieldSource::Pos(it) => {
let frange = original_range(db, src.with_ast(it.syntax())); let frange = original_range(db, src.with_value(it.syntax()));
NavigationTarget::from_syntax( NavigationTarget::from_syntax(
frange.file_id, frange.file_id,
"".into(), "".into(),
@ -308,11 +308,11 @@ impl ToNav for hir::StructField {
impl ToNav for hir::MacroDef { impl ToNav for hir::MacroDef {
fn to_nav(&self, db: &RootDatabase) -> NavigationTarget { fn to_nav(&self, db: &RootDatabase) -> NavigationTarget {
let src = self.source(db); let src = self.source(db);
log::debug!("nav target {:#?}", src.ast.syntax()); log::debug!("nav target {:#?}", src.value.syntax());
NavigationTarget::from_named( NavigationTarget::from_named(
db, db,
src.as_ref().map(|it| it as &dyn ast::NameOwner), src.as_ref().map(|it| it as &dyn ast::NameOwner),
src.ast.doc_comment_text(), src.value.doc_comment_text(),
None, None,
) )
} }
@ -341,7 +341,7 @@ impl ToNav for hir::AssocItem {
impl ToNav for hir::Local { impl ToNav for hir::Local {
fn to_nav(&self, db: &RootDatabase) -> NavigationTarget { fn to_nav(&self, db: &RootDatabase) -> NavigationTarget {
let src = self.source(db); let src = self.source(db);
let (full_range, focus_range) = match src.ast { let (full_range, focus_range) = match src.value {
Either::A(it) => { Either::A(it) => {
(it.syntax().text_range(), it.name().map(|it| it.syntax().text_range())) (it.syntax().text_range(), it.name().map(|it| it.syntax().text_range()))
} }

View file

@ -12,7 +12,7 @@ pub(crate) fn original_range(db: &RootDatabase, node: Source<&SyntaxNode>) -> Fi
None => { None => {
return FileRange { return FileRange {
file_id: node.file_id.original_file(db), file_id: node.file_id.original_file(db),
range: node.ast.text_range(), range: node.value.text_range(),
} }
} }
Some(it) => it, Some(it) => it,
@ -25,14 +25,18 @@ pub(crate) fn original_range(db: &RootDatabase, node: Source<&SyntaxNode>) -> Fi
// *Second*, we should handle recurside macro expansions // *Second*, we should handle recurside macro expansions
let token = node let token = node
.ast .value
.descendants_with_tokens() .descendants_with_tokens()
.filter_map(|it| it.into_token()) .filter_map(|it| it.into_token())
.find_map(|it| expansion.map_token_up(node.with_ast(&it))); .find_map(|it| expansion.map_token_up(node.with_value(&it)));
match token { match token {
Some(it) => FileRange { file_id: it.file_id.original_file(db), range: it.ast.text_range() }, Some(it) => {
None => FileRange { file_id: node.file_id.original_file(db), range: node.ast.text_range() }, FileRange { file_id: it.file_id.original_file(db), range: it.value.text_range() }
}
None => {
FileRange { file_id: node.file_id.original_file(db), range: node.value.text_range() }
}
} }
} }
@ -44,14 +48,14 @@ pub(crate) fn descend_into_macros(
let src = Source::new(file_id.into(), token); let src = Source::new(file_id.into(), token);
successors(Some(src), |token| { successors(Some(src), |token| {
let macro_call = token.ast.ancestors().find_map(ast::MacroCall::cast)?; let macro_call = token.value.ancestors().find_map(ast::MacroCall::cast)?;
let tt = macro_call.token_tree()?; let tt = macro_call.token_tree()?;
if !token.ast.text_range().is_subrange(&tt.syntax().text_range()) { if !token.value.text_range().is_subrange(&tt.syntax().text_range()) {
return None; return None;
} }
let source_analyzer = let source_analyzer =
hir::SourceAnalyzer::new(db, token.with_ast(token.ast.parent()).as_ref(), None); hir::SourceAnalyzer::new(db, token.with_value(token.value.parent()).as_ref(), None);
let exp = source_analyzer.expand(db, &macro_call)?; let exp = source_analyzer.expand(db, token.with_value(&macro_call))?;
exp.map_token_down(db, token.as_ref()) exp.map_token_down(db, token.as_ref())
}) })
.last() .last()

View file

@ -23,7 +23,7 @@ pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<
let mac = name_ref.syntax().ancestors().find_map(ast::MacroCall::cast)?; let mac = name_ref.syntax().ancestors().find_map(ast::MacroCall::cast)?;
let source = hir::Source::new(position.file_id.into(), mac.syntax()); let source = hir::Source::new(position.file_id.into(), mac.syntax());
let expanded = expand_macro_recur(db, source, &mac)?; let expanded = expand_macro_recur(db, source, source.with_value(&mac))?;
// FIXME: // FIXME:
// macro expansion may lose all white space information // macro expansion may lose all white space information
@ -35,10 +35,10 @@ pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<
fn expand_macro_recur( fn expand_macro_recur(
db: &RootDatabase, db: &RootDatabase,
source: hir::Source<&SyntaxNode>, source: hir::Source<&SyntaxNode>,
macro_call: &ast::MacroCall, macro_call: hir::Source<&ast::MacroCall>,
) -> Option<SyntaxNode> { ) -> Option<SyntaxNode> {
let analyzer = hir::SourceAnalyzer::new(db, source, None); let analyzer = hir::SourceAnalyzer::new(db, source, None);
let expansion = analyzer.expand(db, &macro_call)?; let expansion = analyzer.expand(db, macro_call)?;
let macro_file_id = expansion.file_id(); let macro_file_id = expansion.file_id();
let expanded: SyntaxNode = db.parse_or_expand(macro_file_id)?; let expanded: SyntaxNode = db.parse_or_expand(macro_file_id)?;
@ -46,8 +46,8 @@ fn expand_macro_recur(
let mut replaces = FxHashMap::default(); let mut replaces = FxHashMap::default();
for child in children.into_iter() { for child in children.into_iter() {
let source = hir::Source::new(macro_file_id, source.ast); let node = hir::Source::new(macro_file_id, &child);
let new_node = expand_macro_recur(db, source, &child)?; let new_node = expand_macro_recur(db, source, node)?;
replaces.insert(child.syntax().clone().into(), new_node.into()); replaces.insert(child.syntax().clone().into(), new_node.into());
} }

View file

@ -23,13 +23,13 @@ pub(crate) fn goto_definition(
let token = descend_into_macros(db, position.file_id, token); let token = descend_into_macros(db, position.file_id, token);
let res = match_ast! { let res = match_ast! {
match (token.ast.parent()) { match (token.value.parent()) {
ast::NameRef(name_ref) => { ast::NameRef(name_ref) => {
let navs = reference_definition(db, token.with_ast(&name_ref)).to_vec(); let navs = reference_definition(db, token.with_value(&name_ref)).to_vec();
RangeInfo::new(name_ref.syntax().text_range(), navs.to_vec()) RangeInfo::new(name_ref.syntax().text_range(), navs.to_vec())
}, },
ast::Name(name) => { ast::Name(name) => {
let navs = name_definition(db, token.with_ast(&name))?; let navs = name_definition(db, token.with_value(&name))?;
RangeInfo::new(name.syntax().text_range(), navs) RangeInfo::new(name.syntax().text_range(), navs)
}, },
@ -84,7 +84,7 @@ pub(crate) fn reference_definition(
}; };
// Fallback index based approach: // Fallback index based approach:
let navs = crate::symbol_index::index_resolve(db, name_ref.ast) let navs = crate::symbol_index::index_resolve(db, name_ref.value)
.into_iter() .into_iter()
.map(|s| s.to_nav(db)) .map(|s| s.to_nav(db))
.collect(); .collect();
@ -95,11 +95,11 @@ pub(crate) fn name_definition(
db: &RootDatabase, db: &RootDatabase,
name: Source<&ast::Name>, name: Source<&ast::Name>,
) -> Option<Vec<NavigationTarget>> { ) -> Option<Vec<NavigationTarget>> {
let parent = name.ast.syntax().parent()?; let parent = name.value.syntax().parent()?;
if let Some(module) = ast::Module::cast(parent.clone()) { if let Some(module) = ast::Module::cast(parent.clone()) {
if module.has_semi() { if module.has_semi() {
let src = name.with_ast(module); let src = name.with_value(module);
if let Some(child_module) = hir::Module::from_declaration(db, src) { if let Some(child_module) = hir::Module::from_declaration(db, src) {
let nav = child_module.to_nav(db); let nav = child_module.to_nav(db);
return Some(vec![nav]); return Some(vec![nav]);
@ -107,7 +107,7 @@ pub(crate) fn name_definition(
} }
} }
if let Some(nav) = named_target(db, name.with_ast(&parent)) { if let Some(nav) = named_target(db, name.with_value(&parent)) {
return Some(vec![nav]); return Some(vec![nav]);
} }
@ -116,11 +116,11 @@ pub(crate) fn name_definition(
fn named_target(db: &RootDatabase, node: Source<&SyntaxNode>) -> Option<NavigationTarget> { fn named_target(db: &RootDatabase, node: Source<&SyntaxNode>) -> Option<NavigationTarget> {
match_ast! { match_ast! {
match (node.ast) { match (node.value) {
ast::StructDef(it) => { ast::StructDef(it) => {
Some(NavigationTarget::from_named( Some(NavigationTarget::from_named(
db, db,
node.with_ast(&it), node.with_value(&it),
it.doc_comment_text(), it.doc_comment_text(),
it.short_label(), it.short_label(),
)) ))
@ -128,7 +128,7 @@ fn named_target(db: &RootDatabase, node: Source<&SyntaxNode>) -> Option<Navigati
ast::EnumDef(it) => { ast::EnumDef(it) => {
Some(NavigationTarget::from_named( Some(NavigationTarget::from_named(
db, db,
node.with_ast(&it), node.with_value(&it),
it.doc_comment_text(), it.doc_comment_text(),
it.short_label(), it.short_label(),
)) ))
@ -136,7 +136,7 @@ fn named_target(db: &RootDatabase, node: Source<&SyntaxNode>) -> Option<Navigati
ast::EnumVariant(it) => { ast::EnumVariant(it) => {
Some(NavigationTarget::from_named( Some(NavigationTarget::from_named(
db, db,
node.with_ast(&it), node.with_value(&it),
it.doc_comment_text(), it.doc_comment_text(),
it.short_label(), it.short_label(),
)) ))
@ -144,7 +144,7 @@ fn named_target(db: &RootDatabase, node: Source<&SyntaxNode>) -> Option<Navigati
ast::FnDef(it) => { ast::FnDef(it) => {
Some(NavigationTarget::from_named( Some(NavigationTarget::from_named(
db, db,
node.with_ast(&it), node.with_value(&it),
it.doc_comment_text(), it.doc_comment_text(),
it.short_label(), it.short_label(),
)) ))
@ -152,7 +152,7 @@ fn named_target(db: &RootDatabase, node: Source<&SyntaxNode>) -> Option<Navigati
ast::TypeAliasDef(it) => { ast::TypeAliasDef(it) => {
Some(NavigationTarget::from_named( Some(NavigationTarget::from_named(
db, db,
node.with_ast(&it), node.with_value(&it),
it.doc_comment_text(), it.doc_comment_text(),
it.short_label(), it.short_label(),
)) ))
@ -160,7 +160,7 @@ fn named_target(db: &RootDatabase, node: Source<&SyntaxNode>) -> Option<Navigati
ast::ConstDef(it) => { ast::ConstDef(it) => {
Some(NavigationTarget::from_named( Some(NavigationTarget::from_named(
db, db,
node.with_ast(&it), node.with_value(&it),
it.doc_comment_text(), it.doc_comment_text(),
it.short_label(), it.short_label(),
)) ))
@ -168,7 +168,7 @@ fn named_target(db: &RootDatabase, node: Source<&SyntaxNode>) -> Option<Navigati
ast::StaticDef(it) => { ast::StaticDef(it) => {
Some(NavigationTarget::from_named( Some(NavigationTarget::from_named(
db, db,
node.with_ast(&it), node.with_value(&it),
it.doc_comment_text(), it.doc_comment_text(),
it.short_label(), it.short_label(),
)) ))
@ -176,7 +176,7 @@ fn named_target(db: &RootDatabase, node: Source<&SyntaxNode>) -> Option<Navigati
ast::TraitDef(it) => { ast::TraitDef(it) => {
Some(NavigationTarget::from_named( Some(NavigationTarget::from_named(
db, db,
node.with_ast(&it), node.with_value(&it),
it.doc_comment_text(), it.doc_comment_text(),
it.short_label(), it.short_label(),
)) ))
@ -184,7 +184,7 @@ fn named_target(db: &RootDatabase, node: Source<&SyntaxNode>) -> Option<Navigati
ast::RecordFieldDef(it) => { ast::RecordFieldDef(it) => {
Some(NavigationTarget::from_named( Some(NavigationTarget::from_named(
db, db,
node.with_ast(&it), node.with_value(&it),
it.doc_comment_text(), it.doc_comment_text(),
it.short_label(), it.short_label(),
)) ))
@ -192,7 +192,7 @@ fn named_target(db: &RootDatabase, node: Source<&SyntaxNode>) -> Option<Navigati
ast::Module(it) => { ast::Module(it) => {
Some(NavigationTarget::from_named( Some(NavigationTarget::from_named(
db, db,
node.with_ast(&it), node.with_value(&it),
it.doc_comment_text(), it.doc_comment_text(),
it.short_label(), it.short_label(),
)) ))
@ -200,7 +200,7 @@ fn named_target(db: &RootDatabase, node: Source<&SyntaxNode>) -> Option<Navigati
ast::MacroCall(it) => { ast::MacroCall(it) => {
Some(NavigationTarget::from_named( Some(NavigationTarget::from_named(
db, db,
node.with_ast(&it), node.with_value(&it),
it.doc_comment_text(), it.doc_comment_text(),
None, None,
)) ))

View file

@ -16,13 +16,13 @@ pub(crate) fn goto_type_definition(
let token = file.token_at_offset(position.offset).filter(|it| !it.kind().is_trivia()).next()?; let token = file.token_at_offset(position.offset).filter(|it| !it.kind().is_trivia()).next()?;
let token = descend_into_macros(db, position.file_id, token); let token = descend_into_macros(db, position.file_id, token);
let node = token.ast.ancestors().find_map(|token| { let node = token.value.ancestors().find_map(|token| {
token token
.ancestors() .ancestors()
.find(|n| ast::Expr::cast(n.clone()).is_some() || ast::Pat::cast(n.clone()).is_some()) .find(|n| ast::Expr::cast(n.clone()).is_some() || ast::Pat::cast(n.clone()).is_some())
})?; })?;
let analyzer = hir::SourceAnalyzer::new(db, token.with_ast(&node), None); let analyzer = hir::SourceAnalyzer::new(db, token.with_value(&node), None);
let ty: hir::Ty = if let Some(ty) = let ty: hir::Ty = if let Some(ty) =
ast::Expr::cast(node.clone()).and_then(|e| analyzer.type_of(db, &e)) ast::Expr::cast(node.clone()).and_then(|e| analyzer.type_of(db, &e))

View file

@ -101,11 +101,11 @@ fn hover_text_from_name_kind(
return match name_kind { return match name_kind {
Macro(it) => { Macro(it) => {
let src = it.source(db); let src = it.source(db);
hover_text(src.ast.doc_comment_text(), Some(macro_label(&src.ast))) hover_text(src.value.doc_comment_text(), Some(macro_label(&src.value)))
} }
Field(it) => { Field(it) => {
let src = it.source(db); let src = it.source(db);
match src.ast { match src.value {
hir::FieldSource::Named(it) => hover_text(it.doc_comment_text(), it.short_label()), hir::FieldSource::Named(it) => hover_text(it.doc_comment_text(), it.short_label()),
_ => None, _ => None,
} }
@ -116,7 +116,7 @@ fn hover_text_from_name_kind(
hir::AssocItem::TypeAlias(it) => from_def_source(db, it), hir::AssocItem::TypeAlias(it) => from_def_source(db, it),
}, },
Def(it) => match it { Def(it) => match it {
hir::ModuleDef::Module(it) => match it.definition_source(db).ast { hir::ModuleDef::Module(it) => match it.definition_source(db).value {
hir::ModuleSource::Module(it) => { hir::ModuleSource::Module(it) => {
hover_text(it.doc_comment_text(), it.short_label()) hover_text(it.doc_comment_text(), it.short_label())
} }
@ -158,7 +158,7 @@ fn hover_text_from_name_kind(
A: ast::DocCommentsOwner + ast::NameOwner + ShortLabel, A: ast::DocCommentsOwner + ast::NameOwner + ShortLabel,
{ {
let src = def.source(db); let src = def.source(db);
hover_text(src.ast.doc_comment_text(), src.ast.short_label()) hover_text(src.value.doc_comment_text(), src.value.short_label())
} }
} }
@ -170,11 +170,11 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeIn
let mut res = HoverResult::new(); let mut res = HoverResult::new();
let mut range = match_ast! { let mut range = match_ast! {
match (token.ast.parent()) { match (token.value.parent()) {
ast::NameRef(name_ref) => { ast::NameRef(name_ref) => {
let mut no_fallback = false; let mut no_fallback = false;
if let Some(name_kind) = if let Some(name_kind) =
classify_name_ref(db, token.with_ast(&name_ref)).map(|d| d.kind) classify_name_ref(db, token.with_value(&name_ref)).map(|d| d.kind)
{ {
res.extend(hover_text_from_name_kind(db, name_kind, &mut no_fallback)) res.extend(hover_text_from_name_kind(db, name_kind, &mut no_fallback))
} }
@ -196,7 +196,7 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeIn
} }
}, },
ast::Name(name) => { ast::Name(name) => {
if let Some(name_kind) = classify_name(db, token.with_ast(&name)).map(|d| d.kind) { if let Some(name_kind) = classify_name(db, token.with_value(&name)).map(|d| d.kind) {
res.extend(hover_text_from_name_kind(db, name_kind, &mut true)); res.extend(hover_text_from_name_kind(db, name_kind, &mut true));
} }
@ -211,7 +211,7 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeIn
}; };
if range.is_none() { if range.is_none() {
let node = token.ast.ancestors().find(|n| { let node = token.value.ancestors().find(|n| {
ast::Expr::cast(n.clone()).is_some() || ast::Pat::cast(n.clone()).is_some() ast::Expr::cast(n.clone()).is_some() || ast::Pat::cast(n.clone()).is_some()
})?; })?;
let frange = FileRange { file_id: position.file_id, range: node.text_range() }; let frange = FileRange { file_id: position.file_id, range: node.text_range() };
@ -404,9 +404,7 @@ mod tests {
check_hover_result( check_hover_result(
r#" r#"
//- /main.rs //- /main.rs
fn main() {
const foo<|>: u32 = 0; const foo<|>: u32 = 0;
}
"#, "#,
&["const foo: u32"], &["const foo: u32"],
); );
@ -414,9 +412,7 @@ mod tests {
check_hover_result( check_hover_result(
r#" r#"
//- /main.rs //- /main.rs
fn main() {
static foo<|>: u32 = 0; static foo<|>: u32 = 0;
}
"#, "#,
&["static foo: u32"], &["static foo: u32"],
); );

View file

@ -16,7 +16,7 @@ pub(crate) fn goto_implementation(
let src = hir::ModuleSource::from_position(db, position); let src = hir::ModuleSource::from_position(db, position);
let module = hir::Module::from_definition( let module = hir::Module::from_definition(
db, db,
hir::Source { file_id: position.file_id.into(), ast: src }, hir::Source { file_id: position.file_id.into(), value: src },
)?; )?;
if let Some(nominal_def) = find_node_at_offset::<ast::NominalDef>(&syntax, position.offset) { if let Some(nominal_def) = find_node_at_offset::<ast::NominalDef>(&syntax, position.offset) {
@ -42,11 +42,11 @@ fn impls_for_def(
) -> Option<Vec<NavigationTarget>> { ) -> Option<Vec<NavigationTarget>> {
let ty = match node { let ty = match node {
ast::NominalDef::StructDef(def) => { ast::NominalDef::StructDef(def) => {
let src = hir::Source { file_id: position.file_id.into(), ast: def.clone() }; let src = hir::Source { file_id: position.file_id.into(), value: def.clone() };
hir::Struct::from_source(db, src)?.ty(db) hir::Struct::from_source(db, src)?.ty(db)
} }
ast::NominalDef::EnumDef(def) => { ast::NominalDef::EnumDef(def) => {
let src = hir::Source { file_id: position.file_id.into(), ast: def.clone() }; let src = hir::Source { file_id: position.file_id.into(), value: def.clone() };
hir::Enum::from_source(db, src)?.ty(db) hir::Enum::from_source(db, src)?.ty(db)
} }
}; };
@ -69,7 +69,7 @@ fn impls_for_trait(
node: &ast::TraitDef, node: &ast::TraitDef,
module: hir::Module, module: hir::Module,
) -> Option<Vec<NavigationTarget>> { ) -> Option<Vec<NavigationTarget>> {
let src = hir::Source { file_id: position.file_id.into(), ast: node.clone() }; let src = hir::Source { file_id: position.file_id.into(), value: node.clone() };
let tr = hir::Trait::from_source(db, src)?; let tr = hir::Trait::from_source(db, src)?;
let krate = module.krate(); let krate = module.krate();

View file

@ -19,10 +19,15 @@ pub struct InlayHint {
pub label: SmolStr, pub label: SmolStr,
} }
pub(crate) fn inlay_hints(db: &RootDatabase, file_id: FileId, file: &SourceFile) -> Vec<InlayHint> { pub(crate) fn inlay_hints(
db: &RootDatabase,
file_id: FileId,
file: &SourceFile,
max_inlay_hint_length: Option<usize>,
) -> Vec<InlayHint> {
file.syntax() file.syntax()
.descendants() .descendants()
.map(|node| get_inlay_hints(db, file_id, &node).unwrap_or_default()) .map(|node| get_inlay_hints(db, file_id, &node, max_inlay_hint_length).unwrap_or_default())
.flatten() .flatten()
.collect() .collect()
} }
@ -31,6 +36,7 @@ fn get_inlay_hints(
db: &RootDatabase, db: &RootDatabase,
file_id: FileId, file_id: FileId,
node: &SyntaxNode, node: &SyntaxNode,
max_inlay_hint_length: Option<usize>,
) -> Option<Vec<InlayHint>> { ) -> Option<Vec<InlayHint>> {
let analyzer = SourceAnalyzer::new(db, hir::Source::new(file_id.into(), node), None); let analyzer = SourceAnalyzer::new(db, hir::Source::new(file_id.into(), node), None);
match_ast! { match_ast! {
@ -40,7 +46,7 @@ fn get_inlay_hints(
return None; return None;
} }
let pat = it.pat()?; let pat = it.pat()?;
Some(get_pat_type_hints(db, &analyzer, pat, false)) Some(get_pat_type_hints(db, &analyzer, pat, false, max_inlay_hint_length))
}, },
ast::LambdaExpr(it) => { ast::LambdaExpr(it) => {
it.param_list().map(|param_list| { it.param_list().map(|param_list| {
@ -48,22 +54,22 @@ fn get_inlay_hints(
.params() .params()
.filter(|closure_param| closure_param.ascribed_type().is_none()) .filter(|closure_param| closure_param.ascribed_type().is_none())
.filter_map(|closure_param| closure_param.pat()) .filter_map(|closure_param| closure_param.pat())
.map(|root_pat| get_pat_type_hints(db, &analyzer, root_pat, false)) .map(|root_pat| get_pat_type_hints(db, &analyzer, root_pat, false, max_inlay_hint_length))
.flatten() .flatten()
.collect() .collect()
}) })
}, },
ast::ForExpr(it) => { ast::ForExpr(it) => {
let pat = it.pat()?; let pat = it.pat()?;
Some(get_pat_type_hints(db, &analyzer, pat, false)) Some(get_pat_type_hints(db, &analyzer, pat, false, max_inlay_hint_length))
}, },
ast::IfExpr(it) => { ast::IfExpr(it) => {
let pat = it.condition()?.pat()?; let pat = it.condition()?.pat()?;
Some(get_pat_type_hints(db, &analyzer, pat, true)) Some(get_pat_type_hints(db, &analyzer, pat, true, max_inlay_hint_length))
}, },
ast::WhileExpr(it) => { ast::WhileExpr(it) => {
let pat = it.condition()?.pat()?; let pat = it.condition()?.pat()?;
Some(get_pat_type_hints(db, &analyzer, pat, true)) Some(get_pat_type_hints(db, &analyzer, pat, true, max_inlay_hint_length))
}, },
ast::MatchArmList(it) => { ast::MatchArmList(it) => {
Some( Some(
@ -71,7 +77,7 @@ fn get_inlay_hints(
.arms() .arms()
.map(|match_arm| match_arm.pats()) .map(|match_arm| match_arm.pats())
.flatten() .flatten()
.map(|root_pat| get_pat_type_hints(db, &analyzer, root_pat, true)) .map(|root_pat| get_pat_type_hints(db, &analyzer, root_pat, true, max_inlay_hint_length))
.flatten() .flatten()
.collect(), .collect(),
) )
@ -86,6 +92,7 @@ fn get_pat_type_hints(
analyzer: &SourceAnalyzer, analyzer: &SourceAnalyzer,
root_pat: ast::Pat, root_pat: ast::Pat,
skip_root_pat_hint: bool, skip_root_pat_hint: bool,
max_inlay_hint_length: Option<usize>,
) -> Vec<InlayHint> { ) -> Vec<InlayHint> {
let original_pat = &root_pat.clone(); let original_pat = &root_pat.clone();
@ -99,7 +106,7 @@ fn get_pat_type_hints(
.map(|(range, pat_type)| InlayHint { .map(|(range, pat_type)| InlayHint {
range, range,
kind: InlayKind::TypeHint, kind: InlayKind::TypeHint,
label: pat_type.display(db).to_string().into(), label: pat_type.display_truncated(db, max_inlay_hint_length).to_string().into(),
}) })
.collect() .collect()
} }
@ -209,7 +216,7 @@ fn main() {
}"#, }"#,
); );
assert_debug_snapshot!(analysis.inlay_hints(file_id).unwrap(), @r###" assert_debug_snapshot!(analysis.inlay_hints(file_id, None).unwrap(), @r###"
[ [
InlayHint { InlayHint {
range: [193; 197), range: [193; 197),
@ -278,7 +285,7 @@ fn main() {
}"#, }"#,
); );
assert_debug_snapshot!(analysis.inlay_hints(file_id).unwrap(), @r###" assert_debug_snapshot!(analysis.inlay_hints(file_id, None).unwrap(), @r###"
[ [
InlayHint { InlayHint {
range: [21; 30), range: [21; 30),
@ -307,7 +314,7 @@ fn main() {
}"#, }"#,
); );
assert_debug_snapshot!(analysis.inlay_hints(file_id).unwrap(), @r###" assert_debug_snapshot!(analysis.inlay_hints(file_id, None).unwrap(), @r###"
[ [
InlayHint { InlayHint {
range: [21; 30), range: [21; 30),
@ -355,7 +362,7 @@ fn main() {
}"#, }"#,
); );
assert_debug_snapshot!(analysis.inlay_hints(file_id).unwrap(), @r###" assert_debug_snapshot!(analysis.inlay_hints(file_id, None).unwrap(), @r###"
[ [
InlayHint { InlayHint {
range: [166; 170), range: [166; 170),
@ -418,7 +425,7 @@ fn main() {
}"#, }"#,
); );
assert_debug_snapshot!(analysis.inlay_hints(file_id).unwrap(), @r###" assert_debug_snapshot!(analysis.inlay_hints(file_id, None).unwrap(), @r###"
[ [
InlayHint { InlayHint {
range: [166; 170), range: [166; 170),
@ -481,7 +488,7 @@ fn main() {
}"#, }"#,
); );
assert_debug_snapshot!(analysis.inlay_hints(file_id).unwrap(), @r###" assert_debug_snapshot!(analysis.inlay_hints(file_id, None).unwrap(), @r###"
[ [
InlayHint { InlayHint {
range: [311; 315), range: [311; 315),
@ -507,4 +514,41 @@ fn main() {
"### "###
); );
} }
#[test]
fn hint_truncation() {
let (analysis, file_id) = single_file(
r#"
struct Smol<T>(T);
struct VeryLongOuterName<T>(T);
fn main() {
let a = Smol(0u32);
let b = VeryLongOuterName(0usize);
let c = Smol(Smol(0u32))
}"#,
);
assert_debug_snapshot!(analysis.inlay_hints(file_id, Some(8)).unwrap(), @r###"
[
InlayHint {
range: [74; 75),
kind: TypeHint,
label: "Smol<u32>",
},
InlayHint {
range: [98; 99),
kind: TypeHint,
label: "VeryLongOuterName<…>",
},
InlayHint {
range: [137; 138),
kind: TypeHint,
label: "Smol<Smol<…>>",
},
]
"###
);
}
} }

View file

@ -243,6 +243,34 @@ fn foo(e: Result<U, V>) {
); );
} }
#[test]
fn join_lines_multiline_in_block() {
check_join_lines(
r"
fn foo() {
match ty {
<|> Some(ty) => {
match ty {
_ => false,
}
}
_ => true,
}
}
",
r"
fn foo() {
match ty {
<|> Some(ty) => match ty {
_ => false,
},
_ => true,
}
}
",
);
}
#[test] #[test]
fn join_lines_keeps_comma_for_block_in_match_arm() { fn join_lines_keeps_comma_for_block_in_match_arm() {
// We already have a comma // We already have a comma

View file

@ -344,8 +344,14 @@ impl Analysis {
} }
/// Returns a list of the places in the file where type hints can be displayed. /// Returns a list of the places in the file where type hints can be displayed.
pub fn inlay_hints(&self, file_id: FileId) -> Cancelable<Vec<InlayHint>> { pub fn inlay_hints(
self.with_db(|db| inlay_hints::inlay_hints(db, file_id, &db.parse(file_id).tree())) &self,
file_id: FileId,
max_inlay_hint_length: Option<usize>,
) -> Cancelable<Vec<InlayHint>> {
self.with_db(|db| {
inlay_hints::inlay_hints(db, file_id, &db.parse(file_id).tree(), max_inlay_hint_length)
})
} }
/// Returns the set of folding ranges. /// Returns the set of folding ranges.

View file

@ -10,7 +10,7 @@ pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec<Na
let src = hir::ModuleSource::from_position(db, position); let src = hir::ModuleSource::from_position(db, position);
let module = match hir::Module::from_definition( let module = match hir::Module::from_definition(
db, db,
hir::Source { file_id: position.file_id.into(), ast: src }, hir::Source { file_id: position.file_id.into(), value: src },
) { ) {
None => return Vec::new(), None => return Vec::new(),
Some(it) => it, Some(it) => it,
@ -23,7 +23,8 @@ pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec<Na
pub(crate) fn crate_for(db: &RootDatabase, file_id: FileId) -> Vec<CrateId> { pub(crate) fn crate_for(db: &RootDatabase, file_id: FileId) -> Vec<CrateId> {
let src = hir::ModuleSource::from_file_id(db, file_id); let src = hir::ModuleSource::from_file_id(db, file_id);
let module = let module =
match hir::Module::from_definition(db, hir::Source { file_id: file_id.into(), ast: src }) { match hir::Module::from_definition(db, hir::Source { file_id: file_id.into(), value: src })
{
Some(it) => it, Some(it) => it,
None => return Vec::new(), None => return Vec::new(),
}; };

View file

@ -13,12 +13,12 @@ use crate::db::RootDatabase;
pub(crate) fn classify_name(db: &RootDatabase, name: Source<&ast::Name>) -> Option<NameDefinition> { pub(crate) fn classify_name(db: &RootDatabase, name: Source<&ast::Name>) -> Option<NameDefinition> {
let _p = profile("classify_name"); let _p = profile("classify_name");
let parent = name.ast.syntax().parent()?; let parent = name.value.syntax().parent()?;
match_ast! { match_ast! {
match parent { match parent {
ast::BindPat(it) => { ast::BindPat(it) => {
let src = name.with_ast(it); let src = name.with_value(it);
let local = hir::Local::from_source(db, src)?; let local = hir::Local::from_source(db, src)?;
Some(NameDefinition { Some(NameDefinition {
visibility: None, visibility: None,
@ -28,7 +28,7 @@ pub(crate) fn classify_name(db: &RootDatabase, name: Source<&ast::Name>) -> Opti
}, },
ast::RecordFieldDef(it) => { ast::RecordFieldDef(it) => {
let ast = hir::FieldSource::Named(it); let ast = hir::FieldSource::Named(it);
let src = name.with_ast(ast); let src = name.with_value(ast);
let field = hir::StructField::from_source(db, src)?; let field = hir::StructField::from_source(db, src)?;
Some(from_struct_field(db, field)) Some(from_struct_field(db, field))
}, },
@ -36,42 +36,42 @@ pub(crate) fn classify_name(db: &RootDatabase, name: Source<&ast::Name>) -> Opti
let def = { let def = {
if !it.has_semi() { if !it.has_semi() {
let ast = hir::ModuleSource::Module(it); let ast = hir::ModuleSource::Module(it);
let src = name.with_ast(ast); let src = name.with_value(ast);
hir::Module::from_definition(db, src) hir::Module::from_definition(db, src)
} else { } else {
let src = name.with_ast(it); let src = name.with_value(it);
hir::Module::from_declaration(db, src) hir::Module::from_declaration(db, src)
} }
}?; }?;
Some(from_module_def(db, def.into(), None)) Some(from_module_def(db, def.into(), None))
}, },
ast::StructDef(it) => { ast::StructDef(it) => {
let src = name.with_ast(it); let src = name.with_value(it);
let def = hir::Struct::from_source(db, src)?; let def = hir::Struct::from_source(db, src)?;
Some(from_module_def(db, def.into(), None)) Some(from_module_def(db, def.into(), None))
}, },
ast::EnumDef(it) => { ast::EnumDef(it) => {
let src = name.with_ast(it); let src = name.with_value(it);
let def = hir::Enum::from_source(db, src)?; let def = hir::Enum::from_source(db, src)?;
Some(from_module_def(db, def.into(), None)) Some(from_module_def(db, def.into(), None))
}, },
ast::TraitDef(it) => { ast::TraitDef(it) => {
let src = name.with_ast(it); let src = name.with_value(it);
let def = hir::Trait::from_source(db, src)?; let def = hir::Trait::from_source(db, src)?;
Some(from_module_def(db, def.into(), None)) Some(from_module_def(db, def.into(), None))
}, },
ast::StaticDef(it) => { ast::StaticDef(it) => {
let src = name.with_ast(it); let src = name.with_value(it);
let def = hir::Static::from_source(db, src)?; let def = hir::Static::from_source(db, src)?;
Some(from_module_def(db, def.into(), None)) Some(from_module_def(db, def.into(), None))
}, },
ast::EnumVariant(it) => { ast::EnumVariant(it) => {
let src = name.with_ast(it); let src = name.with_value(it);
let def = hir::EnumVariant::from_source(db, src)?; let def = hir::EnumVariant::from_source(db, src)?;
Some(from_module_def(db, def.into(), None)) Some(from_module_def(db, def.into(), None))
}, },
ast::FnDef(it) => { ast::FnDef(it) => {
let src = name.with_ast(it); let src = name.with_value(it);
let def = hir::Function::from_source(db, src)?; let def = hir::Function::from_source(db, src)?;
if parent.parent().and_then(ast::ItemList::cast).is_some() { if parent.parent().and_then(ast::ItemList::cast).is_some() {
Some(from_assoc_item(db, def.into())) Some(from_assoc_item(db, def.into()))
@ -80,7 +80,7 @@ pub(crate) fn classify_name(db: &RootDatabase, name: Source<&ast::Name>) -> Opti
} }
}, },
ast::ConstDef(it) => { ast::ConstDef(it) => {
let src = name.with_ast(it); let src = name.with_value(it);
let def = hir::Const::from_source(db, src)?; let def = hir::Const::from_source(db, src)?;
if parent.parent().and_then(ast::ItemList::cast).is_some() { if parent.parent().and_then(ast::ItemList::cast).is_some() {
Some(from_assoc_item(db, def.into())) Some(from_assoc_item(db, def.into()))
@ -89,7 +89,7 @@ pub(crate) fn classify_name(db: &RootDatabase, name: Source<&ast::Name>) -> Opti
} }
}, },
ast::TypeAliasDef(it) => { ast::TypeAliasDef(it) => {
let src = name.with_ast(it); let src = name.with_value(it);
let def = hir::TypeAlias::from_source(db, src)?; let def = hir::TypeAlias::from_source(db, src)?;
if parent.parent().and_then(ast::ItemList::cast).is_some() { if parent.parent().and_then(ast::ItemList::cast).is_some() {
Some(from_assoc_item(db, def.into())) Some(from_assoc_item(db, def.into()))
@ -98,11 +98,11 @@ pub(crate) fn classify_name(db: &RootDatabase, name: Source<&ast::Name>) -> Opti
} }
}, },
ast::MacroCall(it) => { ast::MacroCall(it) => {
let src = name.with_ast(it); let src = name.with_value(it);
let def = hir::MacroDef::from_source(db, src.clone())?; let def = hir::MacroDef::from_source(db, src.clone())?;
let module_src = ModuleSource::from_child_node(db, src.as_ref().map(|it| it.syntax())); let module_src = ModuleSource::from_child_node(db, src.as_ref().map(|it| it.syntax()));
let module = Module::from_definition(db, src.with_ast(module_src))?; let module = Module::from_definition(db, src.with_value(module_src))?;
Some(NameDefinition { Some(NameDefinition {
visibility: None, visibility: None,
@ -121,7 +121,7 @@ pub(crate) fn classify_name_ref(
) -> Option<NameDefinition> { ) -> Option<NameDefinition> {
let _p = profile("classify_name_ref"); let _p = profile("classify_name_ref");
let parent = name_ref.ast.syntax().parent()?; let parent = name_ref.value.syntax().parent()?;
let analyzer = SourceAnalyzer::new(db, name_ref.map(|it| it.syntax()), None); let analyzer = SourceAnalyzer::new(db, name_ref.map(|it| it.syntax()), None);
if let Some(method_call) = ast::MethodCallExpr::cast(parent.clone()) { if let Some(method_call) = ast::MethodCallExpr::cast(parent.clone()) {
@ -142,16 +142,16 @@ pub(crate) fn classify_name_ref(
tested_by!(goto_definition_works_for_record_fields); tested_by!(goto_definition_works_for_record_fields);
if let Some(record_lit) = record_field.syntax().ancestors().find_map(ast::RecordLit::cast) { if let Some(record_lit) = record_field.syntax().ancestors().find_map(ast::RecordLit::cast) {
let variant_def = analyzer.resolve_record_literal(&record_lit)?; let variant_def = analyzer.resolve_record_literal(&record_lit)?;
let hir_path = Path::from_name_ref(name_ref.ast); let hir_path = Path::from_name_ref(name_ref.value);
let hir_name = hir_path.as_ident()?; let hir_name = hir_path.as_ident()?;
let field = variant_def.field(db, hir_name)?; let field = variant_def.field(db, hir_name)?;
return Some(from_struct_field(db, field)); return Some(from_struct_field(db, field));
} }
} }
let ast = ModuleSource::from_child_node(db, name_ref.with_ast(&parent)); let ast = ModuleSource::from_child_node(db, name_ref.with_value(&parent));
// FIXME: find correct container and visibility for each case // FIXME: find correct container and visibility for each case
let container = Module::from_definition(db, name_ref.with_ast(ast))?; let container = Module::from_definition(db, name_ref.with_value(ast))?;
let visibility = None; let visibility = None;
if let Some(macro_call) = parent.ancestors().find_map(ast::MacroCall::cast) { if let Some(macro_call) = parent.ancestors().find_map(ast::MacroCall::cast) {
@ -162,7 +162,7 @@ pub(crate) fn classify_name_ref(
} }
} }
let path = name_ref.ast.syntax().ancestors().find_map(ast::Path::cast)?; let path = name_ref.value.syntax().ancestors().find_map(ast::Path::cast)?;
let resolved = analyzer.resolve_path(db, &path)?; let resolved = analyzer.resolve_path(db, &path)?;
match resolved { match resolved {
PathResolution::Def(def) => Some(from_module_def(db, def, Some(container))), PathResolution::Def(def) => Some(from_module_def(db, def, Some(container))),

View file

@ -32,9 +32,9 @@ pub(crate) struct NameDefinition {
pub(super) fn from_assoc_item(db: &RootDatabase, item: AssocItem) -> NameDefinition { pub(super) fn from_assoc_item(db: &RootDatabase, item: AssocItem) -> NameDefinition {
let container = item.module(db); let container = item.module(db);
let visibility = match item { let visibility = match item {
AssocItem::Function(f) => f.source(db).ast.visibility(), AssocItem::Function(f) => f.source(db).value.visibility(),
AssocItem::Const(c) => c.source(db).ast.visibility(), AssocItem::Const(c) => c.source(db).value.visibility(),
AssocItem::TypeAlias(a) => a.source(db).ast.visibility(), AssocItem::TypeAlias(a) => a.source(db).value.visibility(),
}; };
let kind = NameKind::AssocItem(item); let kind = NameKind::AssocItem(item);
NameDefinition { kind, container, visibility } NameDefinition { kind, container, visibility }
@ -45,8 +45,8 @@ pub(super) fn from_struct_field(db: &RootDatabase, field: StructField) -> NameDe
let parent = field.parent_def(db); let parent = field.parent_def(db);
let container = parent.module(db); let container = parent.module(db);
let visibility = match parent { let visibility = match parent {
VariantDef::Struct(s) => s.source(db).ast.visibility(), VariantDef::Struct(s) => s.source(db).value.visibility(),
VariantDef::EnumVariant(e) => e.source(db).ast.parent_enum().visibility(), VariantDef::EnumVariant(e) => e.source(db).value.parent_enum().visibility(),
}; };
NameDefinition { kind, container, visibility } NameDefinition { kind, container, visibility }
} }
@ -60,22 +60,22 @@ pub(super) fn from_module_def(
let (container, visibility) = match def { let (container, visibility) = match def {
ModuleDef::Module(it) => { ModuleDef::Module(it) => {
let container = it.parent(db).or_else(|| Some(it)).unwrap(); let container = it.parent(db).or_else(|| Some(it)).unwrap();
let visibility = it.declaration_source(db).and_then(|s| s.ast.visibility()); let visibility = it.declaration_source(db).and_then(|s| s.value.visibility());
(container, visibility) (container, visibility)
} }
ModuleDef::EnumVariant(it) => { ModuleDef::EnumVariant(it) => {
let container = it.module(db); let container = it.module(db);
let visibility = it.source(db).ast.parent_enum().visibility(); let visibility = it.source(db).value.parent_enum().visibility();
(container, visibility) (container, visibility)
} }
ModuleDef::Function(it) => (it.module(db), it.source(db).ast.visibility()), ModuleDef::Function(it) => (it.module(db), it.source(db).value.visibility()),
ModuleDef::Const(it) => (it.module(db), it.source(db).ast.visibility()), ModuleDef::Const(it) => (it.module(db), it.source(db).value.visibility()),
ModuleDef::Static(it) => (it.module(db), it.source(db).ast.visibility()), ModuleDef::Static(it) => (it.module(db), it.source(db).value.visibility()),
ModuleDef::Trait(it) => (it.module(db), it.source(db).ast.visibility()), ModuleDef::Trait(it) => (it.module(db), it.source(db).value.visibility()),
ModuleDef::TypeAlias(it) => (it.module(db), it.source(db).ast.visibility()), ModuleDef::TypeAlias(it) => (it.module(db), it.source(db).value.visibility()),
ModuleDef::Adt(Adt::Struct(it)) => (it.module(db), it.source(db).ast.visibility()), ModuleDef::Adt(Adt::Struct(it)) => (it.module(db), it.source(db).value.visibility()),
ModuleDef::Adt(Adt::Union(it)) => (it.module(db), it.source(db).ast.visibility()), ModuleDef::Adt(Adt::Union(it)) => (it.module(db), it.source(db).value.visibility()),
ModuleDef::Adt(Adt::Enum(it)) => (it.module(db), it.source(db).ast.visibility()), ModuleDef::Adt(Adt::Enum(it)) => (it.module(db), it.source(db).value.visibility()),
ModuleDef::BuiltinType(..) => (module.unwrap(), None), ModuleDef::BuiltinType(..) => (module.unwrap(), None),
}; };
NameDefinition { kind, container, visibility } NameDefinition { kind, container, visibility }

View file

@ -55,11 +55,11 @@ fn rename_mod(
) -> Option<SourceChange> { ) -> Option<SourceChange> {
let mut source_file_edits = Vec::new(); let mut source_file_edits = Vec::new();
let mut file_system_edits = Vec::new(); let mut file_system_edits = Vec::new();
let module_src = hir::Source { file_id: position.file_id.into(), ast: ast_module.clone() }; let module_src = hir::Source { file_id: position.file_id.into(), value: ast_module.clone() };
if let Some(module) = hir::Module::from_declaration(db, module_src) { if let Some(module) = hir::Module::from_declaration(db, module_src) {
let src = module.definition_source(db); let src = module.definition_source(db);
let file_id = src.file_id.original_file(db); let file_id = src.file_id.original_file(db);
match src.ast { match src.value {
ModuleSource::SourceFile(..) => { ModuleSource::SourceFile(..) => {
let mod_path: RelativePathBuf = db.file_relative_path(file_id); let mod_path: RelativePathBuf = db.file_relative_path(file_id);
// mod is defined in path/to/dir/mod.rs // mod is defined in path/to/dir/mod.rs
@ -121,140 +121,8 @@ mod tests {
use crate::{ use crate::{
mock_analysis::analysis_and_position, mock_analysis::single_file_with_position, FileId, mock_analysis::analysis_and_position, mock_analysis::single_file_with_position, FileId,
ReferenceSearchResult,
}; };
#[test]
fn test_find_all_refs_for_local() {
let code = r#"
fn main() {
let mut i = 1;
let j = 1;
i = i<|> + j;
{
i = 0;
}
i = 5;
}"#;
let refs = get_all_refs(code);
assert_eq!(refs.len(), 5);
}
#[test]
fn test_find_all_refs_for_param_inside() {
let code = r#"
fn foo(i : u32) -> u32 {
i<|>
}"#;
let refs = get_all_refs(code);
assert_eq!(refs.len(), 2);
}
#[test]
fn test_find_all_refs_for_fn_param() {
let code = r#"
fn foo(i<|> : u32) -> u32 {
i
}"#;
let refs = get_all_refs(code);
assert_eq!(refs.len(), 2);
}
#[test]
fn test_find_all_refs_field_name() {
let code = r#"
//- /lib.rs
struct Foo {
pub spam<|>: u32,
}
fn main(s: Foo) {
let f = s.spam;
}
"#;
let refs = get_all_refs(code);
assert_eq!(refs.len(), 2);
}
#[test]
fn test_find_all_refs_impl_item_name() {
let code = r#"
//- /lib.rs
struct Foo;
impl Foo {
fn f<|>(&self) { }
}
"#;
let refs = get_all_refs(code);
assert_eq!(refs.len(), 1);
}
#[test]
fn test_find_all_refs_enum_var_name() {
let code = r#"
//- /lib.rs
enum Foo {
A,
B<|>,
C,
}
"#;
let refs = get_all_refs(code);
assert_eq!(refs.len(), 1);
}
#[test]
fn test_find_all_refs_modules() {
let code = r#"
//- /lib.rs
pub mod foo;
pub mod bar;
fn f() {
let i = foo::Foo { n: 5 };
}
//- /foo.rs
use crate::bar;
pub struct Foo {
pub n: u32,
}
fn f() {
let i = bar::Bar { n: 5 };
}
//- /bar.rs
use crate::foo;
pub struct Bar {
pub n: u32,
}
fn f() {
let i = foo::Foo<|> { n: 5 };
}
"#;
let (analysis, pos) = analysis_and_position(code);
let refs = analysis.find_all_refs(pos, None).unwrap().unwrap();
assert_eq!(refs.len(), 3);
}
fn get_all_refs(text: &str) -> ReferenceSearchResult {
let (analysis, position) = single_file_with_position(text);
analysis.find_all_refs(position, None).unwrap().unwrap()
}
#[test] #[test]
fn test_rename_for_local() { fn test_rename_for_local() {
test_rename( test_rename(

View file

@ -73,9 +73,9 @@ impl NameDefinition {
if let NameKind::Local(var) = self.kind { if let NameKind::Local(var) = self.kind {
let range = match var.parent(db) { let range = match var.parent(db) {
DefWithBody::Function(f) => f.source(db).ast.syntax().text_range(), DefWithBody::Function(f) => f.source(db).value.syntax().text_range(),
DefWithBody::Const(c) => c.source(db).ast.syntax().text_range(), DefWithBody::Const(c) => c.source(db).value.syntax().text_range(),
DefWithBody::Static(s) => s.source(db).ast.syntax().text_range(), DefWithBody::Static(s) => s.source(db).value.syntax().text_range(),
}; };
let mut res = FxHashMap::default(); let mut res = FxHashMap::default();
res.insert(file_id, Some(range)); res.insert(file_id, Some(range));
@ -91,7 +91,7 @@ impl NameDefinition {
let parent_src = parent_module.definition_source(db); let parent_src = parent_module.definition_source(db);
let file_id = parent_src.file_id.original_file(db); let file_id = parent_src.file_id.original_file(db);
match parent_src.ast { match parent_src.value {
ModuleSource::Module(m) => { ModuleSource::Module(m) => {
let range = Some(m.syntax().text_range()); let range = Some(m.syntax().text_range());
res.insert(file_id, range); res.insert(file_id, range);
@ -135,7 +135,7 @@ impl NameDefinition {
} }
let mut res = FxHashMap::default(); let mut res = FxHashMap::default();
let range = match module_src.ast { let range = match module_src.value {
ModuleSource::Module(m) => Some(m.syntax().text_range()), ModuleSource::Module(m) => Some(m.syntax().text_range()),
ModuleSource::SourceFile(_) => None, ModuleSource::SourceFile(_) => None,
}; };

View file

@ -29,6 +29,8 @@ pub struct ServerConfig {
pub lru_capacity: Option<usize>, pub lru_capacity: Option<usize>,
pub max_inlay_hint_length: Option<usize>,
/// For internal usage to make integrated tests faster. /// For internal usage to make integrated tests faster.
#[serde(deserialize_with = "nullable_bool_true")] #[serde(deserialize_with = "nullable_bool_true")]
pub with_sysroot: bool, pub with_sysroot: bool,
@ -44,6 +46,7 @@ impl Default for ServerConfig {
exclude_globs: Vec::new(), exclude_globs: Vec::new(),
use_client_watching: false, use_client_watching: false,
lru_capacity: None, lru_capacity: None,
max_inlay_hint_length: None,
with_sysroot: true, with_sysroot: true,
feature_flags: FxHashMap::default(), feature_flags: FxHashMap::default(),
} }

View file

@ -123,6 +123,7 @@ pub fn main_loop(
.and_then(|it| it.folding_range.as_ref()) .and_then(|it| it.folding_range.as_ref())
.and_then(|it| it.line_folding_only) .and_then(|it| it.line_folding_only)
.unwrap_or(false), .unwrap_or(false),
max_inlay_hint_length: config.max_inlay_hint_length,
} }
}; };

View file

@ -888,7 +888,7 @@ pub fn handle_inlay_hints(
let analysis = world.analysis(); let analysis = world.analysis();
let line_index = analysis.file_line_index(file_id)?; let line_index = analysis.file_line_index(file_id)?;
Ok(analysis Ok(analysis
.inlay_hints(file_id)? .inlay_hints(file_id, world.options.max_inlay_hint_length)?
.into_iter() .into_iter()
.map(|api_type| InlayHint { .map(|api_type| InlayHint {
label: api_type.label.to_string(), label: api_type.label.to_string(),

View file

@ -28,6 +28,7 @@ pub struct Options {
pub publish_decorations: bool, pub publish_decorations: bool,
pub supports_location_link: bool, pub supports_location_link: bool,
pub line_folding_only: bool, pub line_folding_only: bool,
pub max_inlay_hint_length: Option<usize>,
} }
/// `WorldState` is the primary mutable state of the language server /// `WorldState` is the primary mutable state of the language server

View file

@ -3625,8 +3625,11 @@ impl AstNode for TypeParam {
impl ast::NameOwner for TypeParam {} impl ast::NameOwner for TypeParam {}
impl ast::AttrsOwner for TypeParam {} impl ast::AttrsOwner for TypeParam {}
impl ast::TypeBoundsOwner for TypeParam {} impl ast::TypeBoundsOwner for TypeParam {}
impl ast::DefaultTypeParamOwner for TypeParam {} impl TypeParam {
impl TypeParam {} pub fn default_type(&self) -> Option<TypeRef> {
AstChildren::new(&self.syntax).next()
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct TypeParamList { pub struct TypeParamList {
pub(crate) syntax: SyntaxNode, pub(crate) syntax: SyntaxNode,

View file

@ -163,9 +163,3 @@ impl Iterator for CommentIter {
self.iter.by_ref().find_map(|el| el.into_token().and_then(ast::Comment::cast)) self.iter.by_ref().find_map(|el| el.into_token().and_then(ast::Comment::cast))
} }
} }
pub trait DefaultTypeParamOwner: AstNode {
fn default_type(&self) -> Option<ast::PathType> {
child_opt(self)
}
}

View file

@ -587,7 +587,10 @@ Grammar(
("lifetime_params", "LifetimeParam" ), ("lifetime_params", "LifetimeParam" ),
] ]
), ),
"TypeParam": ( traits: ["NameOwner", "AttrsOwner", "TypeBoundsOwner", "DefaultTypeParamOwner"] ), "TypeParam": (
options: [("default_type", "TypeRef")],
traits: ["NameOwner", "AttrsOwner", "TypeBoundsOwner"],
),
"LifetimeParam": ( "LifetimeParam": (
traits: ["AttrsOwner"], traits: ["AttrsOwner"],
), ),

View file

@ -43,7 +43,7 @@ impl SyntaxNodePtr {
} }
/// Like `SyntaxNodePtr`, but remembers the type of node /// Like `SyntaxNodePtr`, but remembers the type of node
#[derive(Debug, PartialEq, Eq, Hash)] #[derive(Debug, Hash)]
pub struct AstPtr<N: AstNode> { pub struct AstPtr<N: AstNode> {
raw: SyntaxNodePtr, raw: SyntaxNodePtr,
_ty: PhantomData<fn() -> N>, _ty: PhantomData<fn() -> N>,
@ -56,6 +56,14 @@ impl<N: AstNode> Clone for AstPtr<N> {
} }
} }
impl<N: AstNode> Eq for AstPtr<N> {}
impl<N: AstNode> PartialEq for AstPtr<N> {
fn eq(&self, other: &AstPtr<N>) -> bool {
self.raw == other.raw
}
}
impl<N: AstNode> AstPtr<N> { impl<N: AstNode> AstPtr<N> {
pub fn new(node: &N) -> AstPtr<N> { pub fn new(node: &N) -> AstPtr<N> {
AstPtr { raw: SyntaxNodePtr::new(node.syntax()), _ty: PhantomData } AstPtr { raw: SyntaxNodePtr::new(node.syntax()), _ty: PhantomData }

View file

@ -136,7 +136,7 @@ Installation:
[ra-emacs-lsp.el](https://github.com/rust-analyzer/rust-analyzer/blob/69ee5c9c5ef212f7911028c9ddf581559e6565c3/editors/emacs/ra-emacs-lsp.el) [ra-emacs-lsp.el](https://github.com/rust-analyzer/rust-analyzer/blob/69ee5c9c5ef212f7911028c9ddf581559e6565c3/editors/emacs/ra-emacs-lsp.el)
to load path and require it in `init.el` to load path and require it in `init.el`
* run `lsp` in a rust buffer * run `lsp` in a rust buffer
* (Optionally) bind commands like `rust-analyzer-join-lines` or `rust-analyzer-extend-selection` to keys, and enable `rust-analyzer-inlay-hints-mode` to get inline type hints * (Optionally) bind commands like `rust-analyzer-join-lines`, `rust-analyzer-extend-selection` and `rust-analyzer-expand-macro` to keys, and enable `rust-analyzer-inlay-hints-mode` to get inline type hints
## Vim and NeoVim ## Vim and NeoVim

View file

@ -87,7 +87,7 @@ export class HintsUpdater {
range: hint.range, range: hint.range,
renderOptions: { renderOptions: {
after: { after: {
contentText: `: ${this.truncateHint(hint.label)}` contentText: `: ${hint.label}`
} }
} }
})); }));
@ -98,18 +98,6 @@ export class HintsUpdater {
} }
} }
private truncateHint(label: string): string {
if (!Server.config.maxInlayHintLength) {
return label;
}
let newLabel = label.substring(0, Server.config.maxInlayHintLength);
if (label.length > Server.config.maxInlayHintLength) {
newLabel += '…';
}
return newLabel;
}
private async queryHints(documentUri: string): Promise<InlayHint[] | null> { private async queryHints(documentUri: string): Promise<InlayHint[] | null> {
const request: InlayHintsParams = { const request: InlayHintsParams = {
textDocument: { uri: documentUri } textDocument: { uri: documentUri }

View file

@ -43,6 +43,7 @@ export class Server {
initializationOptions: { initializationOptions: {
publishDecorations: true, publishDecorations: true,
lruCapacity: Server.config.lruCapacity, lruCapacity: Server.config.lruCapacity,
maxInlayHintLength: Server.config.maxInlayHintLength,
excludeGlobs: Server.config.excludeGlobs, excludeGlobs: Server.config.excludeGlobs,
useClientWatching: Server.config.useClientWatching, useClientWatching: Server.config.useClientWatching,
featureFlags: Server.config.featureFlags featureFlags: Server.config.featureFlags

View file

@ -16,6 +16,7 @@
;; - implements joinLines (you need to bind rust-analyzer-join-lines to a key) ;; - implements joinLines (you need to bind rust-analyzer-join-lines to a key)
;; - implements selectionRanges (either bind lsp-extend-selection to a key, or use expand-region) ;; - implements selectionRanges (either bind lsp-extend-selection to a key, or use expand-region)
;; - provides rust-analyzer-inlay-hints-mode for inline type hints ;; - provides rust-analyzer-inlay-hints-mode for inline type hints
;; - provides rust-analyzer-expand-macro to expand macros
;; What's missing: ;; What's missing:
;; - file system changes in apply-source-change ;; - file system changes in apply-source-change
@ -247,5 +248,32 @@
(remove-hook 'after-change-functions #'rust-analyzer--inlay-hints-change-handler t)))) (remove-hook 'after-change-functions #'rust-analyzer--inlay-hints-change-handler t))))
;; expand macros
(defun rust-analyzer-expand-macro ()
"Expands the macro call at point recursively."
(interactive)
(when (eq 'rust-mode major-mode)
(let* ((workspace (lsp-find-workspace 'rust-analyzer (buffer-file-name)))
(params (list :textDocument (lsp--text-document-identifier)
:position (lsp--cur-position))))
(when workspace
(let* ((response (with-lsp-workspace workspace
(lsp-send-request (lsp-make-request
"rust-analyzer/expandMacro"
params))))
(result (when response (ht-get response "expansion"))))
(if result
(let ((buf (get-buffer-create (concat "*rust-analyzer macro expansion " (with-lsp-workspace workspace (lsp-workspace-root)) "*"))))
(with-current-buffer buf
(let ((inhibit-read-only t))
(erase-buffer)
(insert result)
(setq buffer-read-only t)
(special-mode)))
(pop-to-buffer buf))
(message "No macro found at point, or it could not be expanded")))))))
(provide 'ra-emacs-lsp) (provide 'ra-emacs-lsp)
;;; ra-emacs-lsp.el ends here ;;; ra-emacs-lsp.el ends here

View file

@ -5,6 +5,9 @@ version = "0.1.0"
authors = ["rust-analyzer developers"] authors = ["rust-analyzer developers"]
publish = false publish = false
[lib]
doctest = false
[dependencies] [dependencies]
walkdir = "2.1.3" walkdir = "2.1.3"
pico-args = "0.3.0" pico-args = "0.3.0"

View file

@ -9,11 +9,10 @@
//! `.cargo/config`. //! `.cargo/config`.
mod help; mod help;
use std::{env, fmt::Write, path::PathBuf, str};
use anyhow::Context; use anyhow::Context;
use core::fmt::Write;
use core::str;
use pico_args::Arguments; use pico_args::Arguments;
use std::{env, path::PathBuf};
use xtask::{ use xtask::{
codegen::{self, Mode}, codegen::{self, Mode},
install_pre_commit_hook, reformat_staged_files, run, run_clippy, run_fuzzer, run_rustfmt, install_pre_commit_hook, reformat_staged_files, run, run_clippy, run_fuzzer, run_rustfmt,
@ -37,7 +36,7 @@ struct ServerOpt {
} }
fn main() -> Result<()> { fn main() -> Result<()> {
if std::env::args().next().map(|it| it.contains("pre-commit")) == Some(true) { if env::args().next().map(|it| it.contains("pre-commit")) == Some(true) {
return reformat_staged_files(); return reformat_staged_files();
} }
@ -174,7 +173,7 @@ fn fix_path_for_mac() -> Result<()> {
fn install_client(ClientOpt::VsCode: ClientOpt) -> Result<()> { fn install_client(ClientOpt::VsCode: ClientOpt) -> Result<()> {
let npm_version = Cmd { let npm_version = Cmd {
unix: r"npm --version", unix: r"npm --version",
windows: r"cmd.exe /c npm.cmd --version", windows: r"cmd.exe /c npm --version",
work_dir: "./editors/code", work_dir: "./editors/code",
} }
.run(); .run();
@ -183,10 +182,10 @@ fn install_client(ClientOpt::VsCode: ClientOpt) -> Result<()> {
eprintln!("\nERROR: `npm --version` failed, `npm` is required to build the VS Code plugin") eprintln!("\nERROR: `npm --version` failed, `npm` is required to build the VS Code plugin")
} }
Cmd { unix: r"npm ci", windows: r"cmd.exe /c npm.cmd ci", work_dir: "./editors/code" }.run()?; Cmd { unix: r"npm ci", windows: r"cmd.exe /c npm ci", work_dir: "./editors/code" }.run()?;
Cmd { Cmd {
unix: r"npm run package --scripts-prepend-node-path", unix: r"npm run package --scripts-prepend-node-path",
windows: r"cmd.exe /c npm.cmd run package", windows: r"cmd.exe /c npm run package",
work_dir: "./editors/code", work_dir: "./editors/code",
} }
.run()?; .run()?;