mirror of
https://github.com/rust-lang/rust-analyzer
synced 2025-01-13 05:38:46 +00:00
Merge branch 'master' of https://github.com/rust-analyzer/rust-analyzer into feature/themes
This commit is contained in:
commit
358a1bcd70
82 changed files with 1457 additions and 1073 deletions
|
@ -2,3 +2,4 @@
|
|||
xtask = "run --package xtask --bin xtask --"
|
||||
install-ra = "run --package xtask --bin xtask -- install" # for backwards compat
|
||||
tq = "test -- -q"
|
||||
qt = "tq"
|
||||
|
|
|
@ -58,6 +58,7 @@ https://rust-lang.zulipchat.com/#narrow/stream/185405-t-compiler.2Frls-2.2E0
|
|||
|
||||
* API docs: https://rust-analyzer.github.io/rust-analyzer/ra_ide_api/
|
||||
|
||||
|
||||
## License
|
||||
|
||||
Rust analyzer is primarily distributed under the terms of both the MIT
|
||||
|
|
|
@ -174,7 +174,7 @@ fn resolve_target_trait_def(
|
|||
.path()?;
|
||||
|
||||
match analyzer.resolve_path(db, &ast_path) {
|
||||
Some(hir::PathResolution::Def(hir::ModuleDef::Trait(def))) => Some(def.source(db).ast),
|
||||
Some(hir::PathResolution::Def(hir::ModuleDef::Trait(def))) => Some(def.source(db).value),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -141,7 +141,7 @@ fn find_struct_impl(
|
|||
})?;
|
||||
|
||||
let struct_ty = {
|
||||
let src = hir::Source { file_id: ctx.frange.file_id.into(), ast: strukt.clone() };
|
||||
let src = hir::Source { file_id: ctx.frange.file_id.into(), value: strukt.clone() };
|
||||
hir::Struct::from_source(db, src).unwrap().ty(db)
|
||||
};
|
||||
|
||||
|
@ -152,7 +152,7 @@ fn find_struct_impl(
|
|||
return false;
|
||||
}
|
||||
|
||||
let src = hir::Source { file_id: ctx.frange.file_id.into(), ast: impl_blk.clone() };
|
||||
let src = hir::Source { file_id: ctx.frange.file_id.into(), value: impl_blk.clone() };
|
||||
let blk = hir::ImplBlock::from_source(db, src).unwrap();
|
||||
|
||||
let same_ty = blk.target_ty(db) == struct_ty;
|
||||
|
|
|
@ -84,7 +84,7 @@ fn resolve_enum_def(
|
|||
let expr_ty = analyzer.type_of(db, &expr)?;
|
||||
|
||||
analyzer.autoderef(db, expr_ty).find_map(|ty| match ty.as_adt() {
|
||||
Some((Adt::Enum(e), _)) => Some(e.source(db).ast),
|
||||
Some((Adt::Enum(e), _)) => Some(e.source(db).value),
|
||||
_ => None,
|
||||
})
|
||||
}
|
||||
|
|
|
@ -66,8 +66,8 @@ fn build_match_expr(
|
|||
|
||||
fn format_arm(block: &ast::BlockExpr) -> String {
|
||||
match extract_trivial_expression(block) {
|
||||
None => block.syntax().text().to_string(),
|
||||
Some(e) => format!("{},", e.syntax().text()),
|
||||
Some(e) if !e.syntax().text().contains_char('\n') => format!("{},", e.syntax().text()),
|
||||
_ => block.syntax().text().to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -102,6 +102,34 @@ impl VariantData {
|
|||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_replace_if_let_with_match_doesnt_unwrap_multiline_expressions() {
|
||||
check_assist(
|
||||
replace_if_let_with_match,
|
||||
"
|
||||
fn foo() {
|
||||
if <|>let VariantData::Struct(..) = a {
|
||||
bar(
|
||||
123
|
||||
)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
} ",
|
||||
"
|
||||
fn foo() {
|
||||
<|>match a {
|
||||
VariantData::Struct(..) => {
|
||||
bar(
|
||||
123
|
||||
)
|
||||
}
|
||||
_ => false,
|
||||
}
|
||||
} ",
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn replace_if_let_with_match_target() {
|
||||
check_assist_target(
|
||||
|
|
|
@ -98,7 +98,7 @@ pub fn run(
|
|||
let src = f.source(db);
|
||||
let original_file = src.file_id.original_file(db);
|
||||
let path = db.file_relative_path(original_file);
|
||||
let syntax_range = src.ast.syntax().text_range();
|
||||
let syntax_range = src.value.syntax().text_range();
|
||||
write!(msg, " ({:?} {})", path, syntax_range).unwrap();
|
||||
}
|
||||
bar.set_message(&msg);
|
||||
|
@ -135,7 +135,7 @@ pub fn run(
|
|||
let path = db.file_relative_path(original_file);
|
||||
let line_index = host.analysis().file_line_index(original_file).unwrap();
|
||||
let text_range = src
|
||||
.ast
|
||||
.value
|
||||
.either(|it| it.syntax().text_range(), |it| it.syntax().text_range());
|
||||
let (start, end) = (
|
||||
line_index.line_col(text_range.start()),
|
||||
|
|
|
@ -38,9 +38,6 @@ fn prev_tokens(token: SyntaxToken) -> impl Iterator<Item = SyntaxToken> {
|
|||
pub fn extract_trivial_expression(expr: &ast::BlockExpr) -> Option<ast::Expr> {
|
||||
let block = expr.block()?;
|
||||
let expr = block.expr()?;
|
||||
if expr.syntax().text().contains_char('\n') {
|
||||
return None;
|
||||
}
|
||||
let non_trivial_children = block.syntax().children().filter(|it| match it.kind() {
|
||||
WHITESPACE | T!['{'] | T!['}'] => false,
|
||||
_ => it != expr.syntax(),
|
||||
|
|
|
@ -1,54 +0,0 @@
|
|||
//! This module contains the implementation details of the HIR for ADTs, i.e.
|
||||
//! structs and enums (and unions).
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use hir_def::adt::VariantData;
|
||||
|
||||
use crate::{
|
||||
db::{DefDatabase, HirDatabase},
|
||||
EnumVariant, Module, Name, Struct, StructField,
|
||||
};
|
||||
|
||||
impl Struct {
|
||||
pub(crate) fn variant_data(self, db: &impl DefDatabase) -> Arc<VariantData> {
|
||||
db.struct_data(self.id.into()).variant_data.clone()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
|
||||
pub enum VariantDef {
|
||||
Struct(Struct),
|
||||
EnumVariant(EnumVariant),
|
||||
}
|
||||
impl_froms!(VariantDef: Struct, EnumVariant);
|
||||
|
||||
impl VariantDef {
|
||||
pub fn fields(self, db: &impl HirDatabase) -> Vec<StructField> {
|
||||
match self {
|
||||
VariantDef::Struct(it) => it.fields(db),
|
||||
VariantDef::EnumVariant(it) => it.fields(db),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn field(self, db: &impl HirDatabase, name: &Name) -> Option<StructField> {
|
||||
match self {
|
||||
VariantDef::Struct(it) => it.field(db, name),
|
||||
VariantDef::EnumVariant(it) => it.field(db, name),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn module(self, db: &impl HirDatabase) -> Module {
|
||||
match self {
|
||||
VariantDef::Struct(it) => it.module(db),
|
||||
VariantDef::EnumVariant(it) => it.module(db),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn variant_data(self, db: &impl DefDatabase) -> Arc<VariantData> {
|
||||
match self {
|
||||
VariantDef::Struct(it) => it.variant_data(db),
|
||||
VariantDef::EnumVariant(it) => it.variant_data(db),
|
||||
}
|
||||
}
|
||||
}
|
|
@ -10,8 +10,10 @@ use hir_def::{
|
|||
adt::VariantData,
|
||||
body::scope::ExprScopes,
|
||||
builtin_type::BuiltinType,
|
||||
traits::TraitData,
|
||||
type_ref::{Mutability, TypeRef},
|
||||
CrateModuleId, ImplId, LocalEnumVariantId, LocalStructFieldId, ModuleId, UnionId,
|
||||
AssocItemId, ContainerId, CrateModuleId, HasModule, ImplId, LocalEnumVariantId,
|
||||
LocalStructFieldId, Lookup, ModuleId, UnionId,
|
||||
};
|
||||
use hir_expand::{
|
||||
diagnostics::DiagnosticSink,
|
||||
|
@ -21,7 +23,6 @@ use ra_db::{CrateId, Edition};
|
|||
use ra_syntax::ast::{self, NameOwner, TypeAscriptionOwner};
|
||||
|
||||
use crate::{
|
||||
adt::VariantDef,
|
||||
db::{AstDatabase, DefDatabase, HirDatabase},
|
||||
expr::{BindingAnnotation, Body, BodySourceMap, ExprValidator, Pat, PatId},
|
||||
generics::{GenericDef, HasGenericParams},
|
||||
|
@ -29,8 +30,7 @@ use crate::{
|
|||
AstItemDef, ConstId, EnumId, FunctionId, MacroDefId, StaticId, StructId, TraitId,
|
||||
TypeAliasId,
|
||||
},
|
||||
resolve::{Resolver, Scope, TypeNs},
|
||||
traits::TraitData,
|
||||
resolve::{HasResolver, TypeNs},
|
||||
ty::{InferenceResult, Namespace, TraitRef},
|
||||
Either, HasSource, ImportId, Name, ScopeDef, Source, Ty,
|
||||
};
|
||||
|
@ -139,7 +139,7 @@ impl Module {
|
|||
) -> Either<ast::UseTree, ast::ExternCrateItem> {
|
||||
let src = self.definition_source(db);
|
||||
let (_, source_map) = db.raw_items_with_source_map(src.file_id);
|
||||
source_map.get(&src.ast, import)
|
||||
source_map.get(&src.value, import)
|
||||
}
|
||||
|
||||
/// Returns the crate this module is part of.
|
||||
|
@ -206,7 +206,7 @@ impl Module {
|
|||
crate::ModuleDef::Function(f) => f.diagnostics(db, sink),
|
||||
crate::ModuleDef::Module(m) => {
|
||||
// Only add diagnostics from inline modules
|
||||
if let ModuleSource::Module(_) = m.definition_source(db).ast {
|
||||
if let ModuleSource::Module(_) = m.definition_source(db).value {
|
||||
m.diagnostics(db, sink)
|
||||
}
|
||||
}
|
||||
|
@ -223,22 +223,9 @@ impl Module {
|
|||
}
|
||||
}
|
||||
|
||||
pub(crate) fn resolver(self, db: &impl DefDatabase) -> Resolver {
|
||||
let def_map = db.crate_def_map(self.id.krate);
|
||||
Resolver::default().push_module_scope(def_map, self.id.module_id)
|
||||
}
|
||||
|
||||
pub fn declarations(self, db: &impl DefDatabase) -> Vec<ModuleDef> {
|
||||
let def_map = db.crate_def_map(self.id.krate);
|
||||
def_map[self.id.module_id]
|
||||
.scope
|
||||
.entries()
|
||||
.filter_map(|(_name, res)| if res.import.is_none() { Some(res.def) } else { None })
|
||||
.flat_map(|per_ns| {
|
||||
per_ns.take_types().into_iter().chain(per_ns.take_values().into_iter())
|
||||
})
|
||||
.map(ModuleDef::from)
|
||||
.collect()
|
||||
def_map[self.id.module_id].scope.declarations().map(ModuleDef::from).collect()
|
||||
}
|
||||
|
||||
pub fn impl_blocks(self, db: &impl DefDatabase) -> Vec<ImplBlock> {
|
||||
|
@ -323,15 +310,8 @@ impl Struct {
|
|||
db.type_for_def(self.into(), Namespace::Values)
|
||||
}
|
||||
|
||||
// FIXME move to a more general type
|
||||
/// Builds a resolver for type references inside this struct.
|
||||
pub(crate) fn resolver(self, db: &impl HirDatabase) -> Resolver {
|
||||
// take the outer scope...
|
||||
let r = self.module(db).resolver(db);
|
||||
// ...and add generic params, if present
|
||||
let p = self.generic_params(db);
|
||||
let r = if !p.params.is_empty() { r.push_generic_params_scope(p) } else { r };
|
||||
r
|
||||
fn variant_data(self, db: &impl DefDatabase) -> Arc<VariantData> {
|
||||
db.struct_data(self.id.into()).variant_data.clone()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -345,24 +325,13 @@ impl Union {
|
|||
db.struct_data(self.id.into()).name.clone()
|
||||
}
|
||||
|
||||
pub fn module(self, db: &impl HirDatabase) -> Module {
|
||||
pub fn module(self, db: &impl DefDatabase) -> Module {
|
||||
Module { id: self.id.0.module(db) }
|
||||
}
|
||||
|
||||
pub fn ty(self, db: &impl HirDatabase) -> Ty {
|
||||
db.type_for_def(self.into(), Namespace::Types)
|
||||
}
|
||||
|
||||
// FIXME move to a more general type
|
||||
/// Builds a resolver for type references inside this union.
|
||||
pub(crate) fn resolver(self, db: &impl HirDatabase) -> Resolver {
|
||||
// take the outer scope...
|
||||
let r = self.module(db).resolver(db);
|
||||
// ...and add generic params, if present
|
||||
let p = self.generic_params(db);
|
||||
let r = if !p.params.is_empty() { r.push_generic_params_scope(p) } else { r };
|
||||
r
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
|
@ -402,17 +371,6 @@ impl Enum {
|
|||
pub fn ty(self, db: &impl HirDatabase) -> Ty {
|
||||
db.type_for_def(self.into(), Namespace::Types)
|
||||
}
|
||||
|
||||
// FIXME: move to a more general type
|
||||
/// Builds a resolver for type references inside this struct.
|
||||
pub(crate) fn resolver(self, db: &impl HirDatabase) -> Resolver {
|
||||
// take the outer scope...
|
||||
let r = self.module(db).resolver(db);
|
||||
// ...and add generic params, if present
|
||||
let p = self.generic_params(db);
|
||||
let r = if !p.params.is_empty() { r.push_generic_params_scope(p) } else { r };
|
||||
r.push_scope(Scope::AdtScope(self.into()))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
|
@ -474,22 +432,52 @@ impl Adt {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn krate(self, db: &impl HirDatabase) -> Option<Crate> {
|
||||
Some(
|
||||
match self {
|
||||
Adt::Struct(s) => s.module(db),
|
||||
Adt::Union(s) => s.module(db),
|
||||
Adt::Enum(e) => e.module(db),
|
||||
}
|
||||
.krate(),
|
||||
)
|
||||
pub fn module(self, db: &impl DefDatabase) -> Module {
|
||||
match self {
|
||||
Adt::Struct(s) => s.module(db),
|
||||
Adt::Union(s) => s.module(db),
|
||||
Adt::Enum(e) => e.module(db),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn resolver(self, db: &impl HirDatabase) -> Resolver {
|
||||
pub fn krate(self, db: &impl HirDatabase) -> Option<Crate> {
|
||||
Some(self.module(db).krate())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
|
||||
pub enum VariantDef {
|
||||
Struct(Struct),
|
||||
EnumVariant(EnumVariant),
|
||||
}
|
||||
impl_froms!(VariantDef: Struct, EnumVariant);
|
||||
|
||||
impl VariantDef {
|
||||
pub fn fields(self, db: &impl HirDatabase) -> Vec<StructField> {
|
||||
match self {
|
||||
Adt::Struct(it) => it.resolver(db),
|
||||
Adt::Union(it) => it.resolver(db),
|
||||
Adt::Enum(it) => it.resolver(db),
|
||||
VariantDef::Struct(it) => it.fields(db),
|
||||
VariantDef::EnumVariant(it) => it.fields(db),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn field(self, db: &impl HirDatabase, name: &Name) -> Option<StructField> {
|
||||
match self {
|
||||
VariantDef::Struct(it) => it.field(db, name),
|
||||
VariantDef::EnumVariant(it) => it.field(db, name),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn module(self, db: &impl HirDatabase) -> Module {
|
||||
match self {
|
||||
VariantDef::Struct(it) => it.module(db),
|
||||
VariantDef::EnumVariant(it) => it.module(db),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn variant_data(self, db: &impl DefDatabase) -> Arc<VariantData> {
|
||||
match self {
|
||||
VariantDef::Struct(it) => it.variant_data(db),
|
||||
VariantDef::EnumVariant(it) => it.variant_data(db),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -505,15 +493,6 @@ pub enum DefWithBody {
|
|||
impl_froms!(DefWithBody: Function, Const, Static);
|
||||
|
||||
impl DefWithBody {
|
||||
/// Builds a resolver for code inside this item.
|
||||
pub(crate) fn resolver(self, db: &impl HirDatabase) -> Resolver {
|
||||
match self {
|
||||
DefWithBody::Const(c) => c.resolver(db),
|
||||
DefWithBody::Function(f) => f.resolver(db),
|
||||
DefWithBody::Static(s) => s.resolver(db),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn krate(self, db: &impl HirDatabase) -> Option<Crate> {
|
||||
match self {
|
||||
DefWithBody::Const(c) => c.krate(db),
|
||||
|
@ -598,10 +577,10 @@ impl FnData {
|
|||
func: Function,
|
||||
) -> Arc<FnData> {
|
||||
let src = func.source(db);
|
||||
let name = src.ast.name().map(|n| n.as_name()).unwrap_or_else(Name::missing);
|
||||
let name = src.value.name().map(|n| n.as_name()).unwrap_or_else(Name::missing);
|
||||
let mut params = Vec::new();
|
||||
let mut has_self_param = false;
|
||||
if let Some(param_list) = src.ast.param_list() {
|
||||
if let Some(param_list) = src.value.param_list() {
|
||||
if let Some(self_param) = param_list.self_param() {
|
||||
let self_type = if let Some(type_ref) = self_param.ascribed_type() {
|
||||
TypeRef::from_ast(type_ref)
|
||||
|
@ -625,7 +604,7 @@ impl FnData {
|
|||
params.push(type_ref);
|
||||
}
|
||||
}
|
||||
let ret_type = if let Some(type_ref) = src.ast.ret_type().and_then(|rt| rt.type_ref()) {
|
||||
let ret_type = if let Some(type_ref) = src.value.ret_type().and_then(|rt| rt.type_ref()) {
|
||||
TypeRef::from_ast(type_ref)
|
||||
} else {
|
||||
TypeRef::unit()
|
||||
|
@ -655,7 +634,7 @@ impl FnData {
|
|||
|
||||
impl Function {
|
||||
pub fn module(self, db: &impl DefDatabase) -> Module {
|
||||
Module { id: self.id.module(db) }
|
||||
self.id.lookup(db).module(db).into()
|
||||
}
|
||||
|
||||
pub fn krate(self, db: &impl DefDatabase) -> Option<Crate> {
|
||||
|
@ -688,33 +667,26 @@ impl Function {
|
|||
|
||||
/// The containing impl block, if this is a method.
|
||||
pub fn impl_block(self, db: &impl DefDatabase) -> Option<ImplBlock> {
|
||||
ImplBlock::containing(db, self.into())
|
||||
match self.container(db) {
|
||||
Some(Container::ImplBlock(it)) => Some(it),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// The containing trait, if this is a trait method definition.
|
||||
pub fn parent_trait(self, db: &impl DefDatabase) -> Option<Trait> {
|
||||
db.trait_items_index(self.module(db)).get_parent_trait(self.into())
|
||||
}
|
||||
|
||||
pub fn container(self, db: &impl DefDatabase) -> Option<Container> {
|
||||
if let Some(impl_block) = self.impl_block(db) {
|
||||
Some(impl_block.into())
|
||||
} else if let Some(trait_) = self.parent_trait(db) {
|
||||
Some(trait_.into())
|
||||
} else {
|
||||
None
|
||||
match self.container(db) {
|
||||
Some(Container::Trait(it)) => Some(it),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
// FIXME: move to a more general type for 'body-having' items
|
||||
/// Builds a resolver for code inside this item.
|
||||
pub(crate) fn resolver(self, db: &impl HirDatabase) -> Resolver {
|
||||
// take the outer scope...
|
||||
let r = self.container(db).map_or_else(|| self.module(db).resolver(db), |c| c.resolver(db));
|
||||
// ...and add generic params, if present
|
||||
let p = self.generic_params(db);
|
||||
let r = if !p.params.is_empty() { r.push_generic_params_scope(p) } else { r };
|
||||
r
|
||||
pub fn container(self, db: &impl DefDatabase) -> Option<Container> {
|
||||
match self.id.lookup(db).container {
|
||||
ContainerId::TraitId(it) => Some(Container::Trait(it.into())),
|
||||
ContainerId::ImplId(it) => Some(Container::ImplBlock(it.into())),
|
||||
ContainerId::ModuleId(_) => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn diagnostics(self, db: &impl HirDatabase, sink: &mut DiagnosticSink) {
|
||||
|
@ -732,7 +704,7 @@ pub struct Const {
|
|||
|
||||
impl Const {
|
||||
pub fn module(self, db: &impl DefDatabase) -> Module {
|
||||
Module { id: self.id.module(db) }
|
||||
Module { id: self.id.lookup(db).module(db) }
|
||||
}
|
||||
|
||||
pub fn krate(self, db: &impl DefDatabase) -> Option<Crate> {
|
||||
|
@ -751,34 +723,28 @@ impl Const {
|
|||
db.infer(self.into())
|
||||
}
|
||||
|
||||
/// The containing impl block, if this is a method.
|
||||
/// The containing impl block, if this is a type alias.
|
||||
pub fn impl_block(self, db: &impl DefDatabase) -> Option<ImplBlock> {
|
||||
ImplBlock::containing(db, self.into())
|
||||
}
|
||||
|
||||
pub fn parent_trait(self, db: &impl DefDatabase) -> Option<Trait> {
|
||||
db.trait_items_index(self.module(db)).get_parent_trait(self.into())
|
||||
}
|
||||
|
||||
pub fn container(self, db: &impl DefDatabase) -> Option<Container> {
|
||||
if let Some(impl_block) = self.impl_block(db) {
|
||||
Some(impl_block.into())
|
||||
} else if let Some(trait_) = self.parent_trait(db) {
|
||||
Some(trait_.into())
|
||||
} else {
|
||||
None
|
||||
match self.container(db) {
|
||||
Some(Container::ImplBlock(it)) => Some(it),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
// FIXME: move to a more general type for 'body-having' items
|
||||
/// Builds a resolver for code inside this item.
|
||||
pub(crate) fn resolver(self, db: &impl HirDatabase) -> Resolver {
|
||||
// take the outer scope...
|
||||
let r = self
|
||||
.impl_block(db)
|
||||
.map(|ib| ib.resolver(db))
|
||||
.unwrap_or_else(|| self.module(db).resolver(db));
|
||||
r
|
||||
/// The containing trait, if this is a trait type alias definition.
|
||||
pub fn parent_trait(self, db: &impl DefDatabase) -> Option<Trait> {
|
||||
match self.container(db) {
|
||||
Some(Container::Trait(it)) => Some(it),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn container(self, db: &impl DefDatabase) -> Option<Container> {
|
||||
match self.id.lookup(db).container {
|
||||
ContainerId::TraitId(it) => Some(Container::Trait(it.into())),
|
||||
ContainerId::ImplId(it) => Some(Container::ImplBlock(it.into())),
|
||||
ContainerId::ModuleId(_) => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -801,7 +767,7 @@ impl ConstData {
|
|||
db: &(impl DefDatabase + AstDatabase),
|
||||
konst: Const,
|
||||
) -> Arc<ConstData> {
|
||||
let node = konst.source(db).ast;
|
||||
let node = konst.source(db).value;
|
||||
const_data_for(&node)
|
||||
}
|
||||
|
||||
|
@ -809,7 +775,7 @@ impl ConstData {
|
|||
db: &(impl DefDatabase + AstDatabase),
|
||||
konst: Static,
|
||||
) -> Arc<ConstData> {
|
||||
let node = konst.source(db).ast;
|
||||
let node = konst.source(db).value;
|
||||
const_data_for(&node)
|
||||
}
|
||||
}
|
||||
|
@ -839,12 +805,6 @@ impl Static {
|
|||
db.static_data(self)
|
||||
}
|
||||
|
||||
/// Builds a resolver for code inside this item.
|
||||
pub(crate) fn resolver(self, db: &impl HirDatabase) -> Resolver {
|
||||
// take the outer scope...
|
||||
self.module(db).resolver(db)
|
||||
}
|
||||
|
||||
pub fn infer(self, db: &impl HirDatabase) -> Arc<InferenceResult> {
|
||||
db.infer(self.into())
|
||||
}
|
||||
|
@ -861,11 +821,11 @@ impl Trait {
|
|||
}
|
||||
|
||||
pub fn name(self, db: &impl DefDatabase) -> Option<Name> {
|
||||
self.trait_data(db).name().clone()
|
||||
self.trait_data(db).name.clone()
|
||||
}
|
||||
|
||||
pub fn items(self, db: &impl DefDatabase) -> Vec<AssocItem> {
|
||||
self.trait_data(db).items().to_vec()
|
||||
self.trait_data(db).items.iter().map(|it| (*it).into()).collect()
|
||||
}
|
||||
|
||||
fn direct_super_traits(self, db: &impl HirDatabase) -> Vec<Trait> {
|
||||
|
@ -912,10 +872,10 @@ impl Trait {
|
|||
pub fn associated_type_by_name(self, db: &impl DefDatabase, name: &Name) -> Option<TypeAlias> {
|
||||
let trait_data = self.trait_data(db);
|
||||
trait_data
|
||||
.items()
|
||||
.items
|
||||
.iter()
|
||||
.filter_map(|item| match item {
|
||||
AssocItem::TypeAlias(t) => Some(*t),
|
||||
AssocItemId::TypeAliasId(t) => Some(TypeAlias::from(*t)),
|
||||
_ => None,
|
||||
})
|
||||
.find(|t| &t.name(db) == name)
|
||||
|
@ -930,7 +890,7 @@ impl Trait {
|
|||
}
|
||||
|
||||
pub(crate) fn trait_data(self, db: &impl DefDatabase) -> Arc<TraitData> {
|
||||
db.trait_data(self)
|
||||
db.trait_data(self.id)
|
||||
}
|
||||
|
||||
pub fn trait_ref(self, db: &impl HirDatabase) -> TraitRef {
|
||||
|
@ -938,15 +898,7 @@ impl Trait {
|
|||
}
|
||||
|
||||
pub fn is_auto(self, db: &impl DefDatabase) -> bool {
|
||||
self.trait_data(db).is_auto()
|
||||
}
|
||||
|
||||
pub(crate) fn resolver(self, db: &impl DefDatabase) -> Resolver {
|
||||
let r = self.module(db).resolver(db);
|
||||
// add generic params, if present
|
||||
let p = self.generic_params(db);
|
||||
let r = if !p.params.is_empty() { r.push_generic_params_scope(p) } else { r };
|
||||
r
|
||||
self.trait_data(db).auto
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -957,30 +909,34 @@ pub struct TypeAlias {
|
|||
|
||||
impl TypeAlias {
|
||||
pub fn module(self, db: &impl DefDatabase) -> Module {
|
||||
Module { id: self.id.module(db) }
|
||||
Module { id: self.id.lookup(db).module(db) }
|
||||
}
|
||||
|
||||
pub fn krate(self, db: &impl DefDatabase) -> Option<Crate> {
|
||||
Some(self.module(db).krate())
|
||||
}
|
||||
|
||||
/// The containing impl block, if this is a method.
|
||||
/// The containing impl block, if this is a type alias.
|
||||
pub fn impl_block(self, db: &impl DefDatabase) -> Option<ImplBlock> {
|
||||
ImplBlock::containing(db, self.into())
|
||||
match self.container(db) {
|
||||
Some(Container::ImplBlock(it)) => Some(it),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// The containing trait, if this is a trait method definition.
|
||||
/// The containing trait, if this is a trait type alias definition.
|
||||
pub fn parent_trait(self, db: &impl DefDatabase) -> Option<Trait> {
|
||||
db.trait_items_index(self.module(db)).get_parent_trait(self.into())
|
||||
match self.container(db) {
|
||||
Some(Container::Trait(it)) => Some(it),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn container(self, db: &impl DefDatabase) -> Option<Container> {
|
||||
if let Some(impl_block) = self.impl_block(db) {
|
||||
Some(impl_block.into())
|
||||
} else if let Some(trait_) = self.parent_trait(db) {
|
||||
Some(trait_.into())
|
||||
} else {
|
||||
None
|
||||
match self.id.lookup(db).container {
|
||||
ContainerId::TraitId(it) => Some(Container::Trait(it.into())),
|
||||
ContainerId::ImplId(it) => Some(Container::ImplBlock(it.into())),
|
||||
ContainerId::ModuleId(_) => None,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -995,19 +951,6 @@ impl TypeAlias {
|
|||
pub fn name(self, db: &impl DefDatabase) -> Name {
|
||||
db.type_alias_data(self).name.clone()
|
||||
}
|
||||
|
||||
/// Builds a resolver for the type references in this type alias.
|
||||
pub(crate) fn resolver(self, db: &impl HirDatabase) -> Resolver {
|
||||
// take the outer scope...
|
||||
let r = self
|
||||
.impl_block(db)
|
||||
.map(|ib| ib.resolver(db))
|
||||
.unwrap_or_else(|| self.module(db).resolver(db));
|
||||
// ...and add generic params, if present
|
||||
let p = self.generic_params(db);
|
||||
let r = if !p.params.is_empty() { r.push_generic_params_scope(p) } else { r };
|
||||
r
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
|
@ -1023,15 +966,6 @@ pub enum Container {
|
|||
}
|
||||
impl_froms!(Container: Trait, ImplBlock);
|
||||
|
||||
impl Container {
|
||||
pub(crate) fn resolver(self, db: &impl DefDatabase) -> Resolver {
|
||||
match self {
|
||||
Container::Trait(trait_) => trait_.resolver(db),
|
||||
Container::ImplBlock(impl_block) => impl_block.resolver(db),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
||||
pub enum AssocItem {
|
||||
Function(Function),
|
||||
|
|
|
@ -49,9 +49,9 @@ pub(crate) fn attributes_query(
|
|||
AttrDef::Module(it) => {
|
||||
let src = it.declaration_source(db)?;
|
||||
let hygiene = Hygiene::new(db, src.file_id);
|
||||
Attr::from_attrs_owner(&src.ast, &hygiene)
|
||||
Attr::from_attrs_owner(&src.value, &hygiene)
|
||||
}
|
||||
AttrDef::StructField(it) => match it.source(db).ast {
|
||||
AttrDef::StructField(it) => match it.source(db).value {
|
||||
FieldSource::Named(named) => {
|
||||
let src = it.source(db);
|
||||
let hygiene = Hygiene::new(db, src.file_id);
|
||||
|
@ -82,7 +82,7 @@ where
|
|||
{
|
||||
let src = node.source(db);
|
||||
let hygiene = Hygiene::new(db, src.file_id);
|
||||
Attr::from_attrs_owner(&src.ast, &hygiene)
|
||||
Attr::from_attrs_owner(&src.value, &hygiene)
|
||||
}
|
||||
|
||||
impl<T: Into<AttrDef> + Copy> Attrs for T {
|
||||
|
|
|
@ -70,23 +70,23 @@ pub(crate) fn documentation_query(
|
|||
def: DocDef,
|
||||
) -> Option<Documentation> {
|
||||
match def {
|
||||
DocDef::Module(it) => docs_from_ast(&it.declaration_source(db)?.ast),
|
||||
DocDef::StructField(it) => match it.source(db).ast {
|
||||
DocDef::Module(it) => docs_from_ast(&it.declaration_source(db)?.value),
|
||||
DocDef::StructField(it) => match it.source(db).value {
|
||||
FieldSource::Named(named) => docs_from_ast(&named),
|
||||
FieldSource::Pos(..) => None,
|
||||
},
|
||||
DocDef::Adt(it) => match it {
|
||||
Adt::Struct(it) => docs_from_ast(&it.source(db).ast),
|
||||
Adt::Enum(it) => docs_from_ast(&it.source(db).ast),
|
||||
Adt::Union(it) => docs_from_ast(&it.source(db).ast),
|
||||
Adt::Struct(it) => docs_from_ast(&it.source(db).value),
|
||||
Adt::Enum(it) => docs_from_ast(&it.source(db).value),
|
||||
Adt::Union(it) => docs_from_ast(&it.source(db).value),
|
||||
},
|
||||
DocDef::EnumVariant(it) => docs_from_ast(&it.source(db).ast),
|
||||
DocDef::Static(it) => docs_from_ast(&it.source(db).ast),
|
||||
DocDef::Const(it) => docs_from_ast(&it.source(db).ast),
|
||||
DocDef::Function(it) => docs_from_ast(&it.source(db).ast),
|
||||
DocDef::Trait(it) => docs_from_ast(&it.source(db).ast),
|
||||
DocDef::TypeAlias(it) => docs_from_ast(&it.source(db).ast),
|
||||
DocDef::MacroDef(it) => docs_from_ast(&it.source(db).ast),
|
||||
DocDef::EnumVariant(it) => docs_from_ast(&it.source(db).value),
|
||||
DocDef::Static(it) => docs_from_ast(&it.source(db).value),
|
||||
DocDef::Const(it) => docs_from_ast(&it.source(db).value),
|
||||
DocDef::Function(it) => docs_from_ast(&it.source(db).value),
|
||||
DocDef::Trait(it) => docs_from_ast(&it.source(db).value),
|
||||
DocDef::TypeAlias(it) => docs_from_ast(&it.source(db).value),
|
||||
DocDef::MacroDef(it) => docs_from_ast(&it.source(db).value),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
//! FIXME: write short doc here
|
||||
|
||||
use hir_def::{HasSource as _, Lookup};
|
||||
use ra_syntax::ast::{self, AstNode};
|
||||
|
||||
use crate::{
|
||||
adt::VariantDef,
|
||||
db::{AstDatabase, DefDatabase, HirDatabase},
|
||||
ids::AstItemDef,
|
||||
Const, Either, Enum, EnumVariant, FieldSource, Function, HasBody, HirFileId, MacroDef, Module,
|
||||
ModuleSource, Static, Struct, StructField, Trait, TypeAlias, Union,
|
||||
ModuleSource, Static, Struct, StructField, Trait, TypeAlias, Union, VariantDef,
|
||||
};
|
||||
|
||||
pub use hir_expand::Source;
|
||||
|
@ -25,9 +25,9 @@ impl Module {
|
|||
let def_map = db.crate_def_map(self.id.krate);
|
||||
let decl_id = def_map[self.id.module_id].declaration;
|
||||
let file_id = def_map[self.id.module_id].definition;
|
||||
let ast = ModuleSource::new(db, file_id, decl_id);
|
||||
let value = ModuleSource::new(db, file_id, decl_id);
|
||||
let file_id = file_id.map(HirFileId::from).unwrap_or_else(|| decl_id.unwrap().file_id());
|
||||
Source { file_id, ast }
|
||||
Source { file_id, value }
|
||||
}
|
||||
|
||||
/// Returns a node which declares this module, either a `mod foo;` or a `mod foo {}`.
|
||||
|
@ -38,8 +38,8 @@ impl Module {
|
|||
) -> Option<Source<ast::Module>> {
|
||||
let def_map = db.crate_def_map(self.id.krate);
|
||||
let decl = def_map[self.id.module_id].declaration?;
|
||||
let ast = decl.to_node(db);
|
||||
Some(Source { file_id: decl.file_id(), ast })
|
||||
let value = decl.to_node(db);
|
||||
Some(Source { file_id: decl.file_id(), value })
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -53,11 +53,11 @@ impl HasSource for StructField {
|
|||
let (file_id, struct_kind) = match self.parent {
|
||||
VariantDef::Struct(s) => {
|
||||
ss = s.source(db);
|
||||
(ss.file_id, ss.ast.kind())
|
||||
(ss.file_id, ss.value.kind())
|
||||
}
|
||||
VariantDef::EnumVariant(e) => {
|
||||
es = e.source(db);
|
||||
(es.file_id, es.ast.kind())
|
||||
(es.file_id, es.value.kind())
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -66,13 +66,13 @@ impl HasSource for StructField {
|
|||
ast::StructKind::Named(fl) => fl.fields().map(|it| FieldSource::Named(it)).collect(),
|
||||
ast::StructKind::Unit => Vec::new(),
|
||||
};
|
||||
let ast = field_sources
|
||||
let value = field_sources
|
||||
.into_iter()
|
||||
.zip(fields.iter())
|
||||
.find(|(_syntax, (id, _))| *id == self.id)
|
||||
.unwrap()
|
||||
.0;
|
||||
Source { file_id, ast }
|
||||
Source { file_id, value }
|
||||
}
|
||||
}
|
||||
impl HasSource for Struct {
|
||||
|
@ -98,8 +98,8 @@ impl HasSource for EnumVariant {
|
|||
fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::EnumVariant> {
|
||||
let enum_data = db.enum_data(self.parent.id);
|
||||
let src = self.parent.id.source(db);
|
||||
let ast = src
|
||||
.ast
|
||||
let value = src
|
||||
.value
|
||||
.variant_list()
|
||||
.into_iter()
|
||||
.flat_map(|it| it.variants())
|
||||
|
@ -107,19 +107,19 @@ impl HasSource for EnumVariant {
|
|||
.find(|(_syntax, (id, _))| *id == self.id)
|
||||
.unwrap()
|
||||
.0;
|
||||
Source { file_id: src.file_id, ast }
|
||||
Source { file_id: src.file_id, value }
|
||||
}
|
||||
}
|
||||
impl HasSource for Function {
|
||||
type Ast = ast::FnDef;
|
||||
fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::FnDef> {
|
||||
self.id.source(db)
|
||||
self.id.lookup(db).source(db)
|
||||
}
|
||||
}
|
||||
impl HasSource for Const {
|
||||
type Ast = ast::ConstDef;
|
||||
fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::ConstDef> {
|
||||
self.id.source(db)
|
||||
self.id.lookup(db).source(db)
|
||||
}
|
||||
}
|
||||
impl HasSource for Static {
|
||||
|
@ -137,13 +137,13 @@ impl HasSource for Trait {
|
|||
impl HasSource for TypeAlias {
|
||||
type Ast = ast::TypeAliasDef;
|
||||
fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::TypeAliasDef> {
|
||||
self.id.source(db)
|
||||
self.id.lookup(db).source(db)
|
||||
}
|
||||
}
|
||||
impl HasSource for MacroDef {
|
||||
type Ast = ast::MacroCall;
|
||||
fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::MacroCall> {
|
||||
Source { file_id: self.id.ast_id.file_id(), ast: self.id.ast_id.to_node(db) }
|
||||
Source { file_id: self.id.ast_id.file_id(), value: self.id.ast_id.to_node(db) }
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -8,10 +8,9 @@ use ra_syntax::SmolStr;
|
|||
|
||||
use crate::{
|
||||
debug::HirDebugDatabase,
|
||||
generics::{GenericDef, GenericParams},
|
||||
generics::GenericDef,
|
||||
ids,
|
||||
lang_item::{LangItemTarget, LangItems},
|
||||
traits::TraitData,
|
||||
ty::{
|
||||
method_resolution::CrateImplBlocks,
|
||||
traits::{AssocTyValue, Impl},
|
||||
|
@ -25,8 +24,9 @@ use crate::{
|
|||
|
||||
pub use hir_def::db::{
|
||||
BodyQuery, BodyWithSourceMapQuery, CrateDefMapQuery, DefDatabase2, DefDatabase2Storage,
|
||||
EnumDataQuery, ExprScopesQuery, ImplDataQuery, InternDatabase, InternDatabaseStorage,
|
||||
RawItemsQuery, RawItemsWithSourceMapQuery, StructDataQuery,
|
||||
EnumDataQuery, ExprScopesQuery, GenericParamsQuery, ImplDataQuery, InternDatabase,
|
||||
InternDatabaseStorage, RawItemsQuery, RawItemsWithSourceMapQuery, StructDataQuery,
|
||||
TraitDataQuery,
|
||||
};
|
||||
pub use hir_expand::db::{
|
||||
AstDatabase, AstDatabaseStorage, AstIdMapQuery, MacroArgQuery, MacroDefQuery, MacroExpandQuery,
|
||||
|
@ -37,15 +37,6 @@ pub use hir_expand::db::{
|
|||
#[salsa::query_group(DefDatabaseStorage)]
|
||||
#[salsa::requires(AstDatabase)]
|
||||
pub trait DefDatabase: HirDebugDatabase + DefDatabase2 {
|
||||
#[salsa::invoke(crate::traits::TraitData::trait_data_query)]
|
||||
fn trait_data(&self, t: Trait) -> Arc<TraitData>;
|
||||
|
||||
#[salsa::invoke(crate::traits::TraitItemsIndex::trait_items_index)]
|
||||
fn trait_items_index(&self, module: Module) -> crate::traits::TraitItemsIndex;
|
||||
|
||||
#[salsa::invoke(crate::generics::GenericParams::generic_params_query)]
|
||||
fn generic_params(&self, def: GenericDef) -> Arc<GenericParams>;
|
||||
|
||||
#[salsa::invoke(FnData::fn_data_query)]
|
||||
fn fn_data(&self, func: Function) -> Arc<FnData>;
|
||||
|
||||
|
|
|
@ -21,7 +21,7 @@ impl Diagnostic for NoSuchField {
|
|||
}
|
||||
|
||||
fn source(&self) -> Source<SyntaxNodePtr> {
|
||||
Source { file_id: self.file, ast: self.field.into() }
|
||||
Source { file_id: self.file, value: self.field.into() }
|
||||
}
|
||||
|
||||
fn as_any(&self) -> &(dyn Any + Send + 'static) {
|
||||
|
@ -41,7 +41,7 @@ impl Diagnostic for MissingFields {
|
|||
"fill structure fields".to_string()
|
||||
}
|
||||
fn source(&self) -> Source<SyntaxNodePtr> {
|
||||
Source { file_id: self.file, ast: self.field_list.into() }
|
||||
Source { file_id: self.file, value: self.field_list.into() }
|
||||
}
|
||||
fn as_any(&self) -> &(dyn Any + Send + 'static) {
|
||||
self
|
||||
|
@ -53,7 +53,7 @@ impl AstDiagnostic for MissingFields {
|
|||
|
||||
fn ast(&self, db: &impl AstDatabase) -> Self::AST {
|
||||
let root = db.parse_or_expand(self.source().file_id).unwrap();
|
||||
let node = self.source().ast.to_node(&root);
|
||||
let node = self.source().value.to_node(&root);
|
||||
ast::RecordFieldList::cast(node).unwrap()
|
||||
}
|
||||
}
|
||||
|
@ -69,7 +69,7 @@ impl Diagnostic for MissingOkInTailExpr {
|
|||
"wrap return expression in Ok".to_string()
|
||||
}
|
||||
fn source(&self) -> Source<SyntaxNodePtr> {
|
||||
Source { file_id: self.file, ast: self.expr.into() }
|
||||
Source { file_id: self.file, value: self.expr.into() }
|
||||
}
|
||||
fn as_any(&self) -> &(dyn Any + Send + 'static) {
|
||||
self
|
||||
|
@ -81,7 +81,7 @@ impl AstDiagnostic for MissingOkInTailExpr {
|
|||
|
||||
fn ast(&self, db: &impl AstDatabase) -> Self::AST {
|
||||
let root = db.parse_or_expand(self.file).unwrap();
|
||||
let node = self.source().ast.to_node(&root);
|
||||
let node = self.source().value.to_node(&root);
|
||||
ast::Expr::cast(node).unwrap()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -11,6 +11,7 @@ use rustc_hash::FxHashSet;
|
|||
use crate::{
|
||||
db::HirDatabase,
|
||||
diagnostics::{MissingFields, MissingOkInTailExpr},
|
||||
resolve::HasResolver,
|
||||
ty::{ApplicationTy, InferenceResult, Ty, TypeCtor},
|
||||
Adt, DefWithBody, Function, HasBody, Name, Path, Resolver,
|
||||
};
|
||||
|
@ -116,7 +117,7 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
|
|||
let source_map = self.func.body_source_map(db);
|
||||
|
||||
if let Some(source_ptr) = source_map.expr_syntax(id) {
|
||||
if let Some(expr) = source_ptr.ast.a() {
|
||||
if let Some(expr) = source_ptr.value.a() {
|
||||
let root = source_ptr.file_syntax(db);
|
||||
if let ast::Expr::RecordLit(record_lit) = expr.to_node(&root) {
|
||||
if let Some(field_list) = record_lit.record_field_list() {
|
||||
|
@ -161,7 +162,7 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
|
|||
let source_map = self.func.body_source_map(db);
|
||||
|
||||
if let Some(source_ptr) = source_map.expr_syntax(id) {
|
||||
if let Some(expr) = source_ptr.ast.a() {
|
||||
if let Some(expr) = source_ptr.value.a() {
|
||||
self.sink.push(MissingOkInTailExpr { file: source_ptr.file_id, expr });
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,9 +3,9 @@
|
|||
//! It's unclear if we need this long-term, but it's definitelly useful while we
|
||||
//! are splitting the hir.
|
||||
|
||||
use hir_def::{AdtId, AssocItemId, DefWithBodyId, EnumVariantId, ModuleDefId};
|
||||
use hir_def::{AdtId, AssocItemId, DefWithBodyId, EnumVariantId, GenericDefId, ModuleDefId};
|
||||
|
||||
use crate::{Adt, AssocItem, DefWithBody, EnumVariant, ModuleDef};
|
||||
use crate::{Adt, AssocItem, DefWithBody, EnumVariant, GenericDef, ModuleDef};
|
||||
|
||||
macro_rules! from_id {
|
||||
($(($id:path, $ty:path)),*) => {$(
|
||||
|
@ -41,6 +41,16 @@ impl From<AdtId> for Adt {
|
|||
}
|
||||
}
|
||||
|
||||
impl From<Adt> for AdtId {
|
||||
fn from(id: Adt) -> Self {
|
||||
match id {
|
||||
Adt::Struct(it) => AdtId::StructId(it.id),
|
||||
Adt::Union(it) => AdtId::UnionId(it.id),
|
||||
Adt::Enum(it) => AdtId::EnumId(it.id),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<EnumVariantId> for EnumVariant {
|
||||
fn from(id: EnumVariantId) -> Self {
|
||||
EnumVariant { parent: id.parent.into(), id: id.local_id }
|
||||
|
@ -82,3 +92,33 @@ impl From<AssocItemId> for AssocItem {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<GenericDef> for GenericDefId {
|
||||
fn from(def: GenericDef) -> Self {
|
||||
match def {
|
||||
GenericDef::Function(it) => GenericDefId::FunctionId(it.id),
|
||||
GenericDef::Adt(it) => GenericDefId::AdtId(it.into()),
|
||||
GenericDef::Trait(it) => GenericDefId::TraitId(it.id),
|
||||
GenericDef::TypeAlias(it) => GenericDefId::TypeAliasId(it.id),
|
||||
GenericDef::ImplBlock(it) => GenericDefId::ImplId(it.id),
|
||||
GenericDef::EnumVariant(it) => {
|
||||
GenericDefId::EnumVariantId(EnumVariantId { parent: it.parent.id, local_id: it.id })
|
||||
}
|
||||
GenericDef::Const(it) => GenericDefId::ConstId(it.id),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<GenericDefId> for GenericDef {
|
||||
fn from(def: GenericDefId) -> Self {
|
||||
match def {
|
||||
GenericDefId::FunctionId(it) => GenericDef::Function(it.into()),
|
||||
GenericDefId::AdtId(it) => GenericDef::Adt(it.into()),
|
||||
GenericDefId::TraitId(it) => GenericDef::Trait(it.into()),
|
||||
GenericDefId::TypeAliasId(it) => GenericDef::TypeAlias(it.into()),
|
||||
GenericDefId::ImplId(it) => GenericDef::ImplBlock(it.into()),
|
||||
GenericDefId::EnumVariantId(it) => GenericDef::EnumVariant(it.into()),
|
||||
GenericDefId::ConstId(it) => GenericDef::Const(it.into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,15 +4,15 @@ use hir_def::{ModuleId, StructId, StructOrUnionId, UnionId};
|
|||
use hir_expand::{name::AsName, AstId, MacroDefId, MacroDefKind};
|
||||
use ra_syntax::{
|
||||
ast::{self, AstNode, NameOwner},
|
||||
match_ast,
|
||||
match_ast, AstPtr, SyntaxNode,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
db::{AstDatabase, DefDatabase, HirDatabase},
|
||||
ids::{AstItemDef, LocationCtx},
|
||||
Const, DefWithBody, Enum, EnumVariant, FieldSource, Function, HasBody, HasSource, ImplBlock,
|
||||
Local, MacroDef, Module, ModuleSource, Source, Static, Struct, StructField, Trait, TypeAlias,
|
||||
Union, VariantDef,
|
||||
AssocItem, Const, DefWithBody, Enum, EnumVariant, FieldSource, Function, HasBody, HasSource,
|
||||
ImplBlock, Local, MacroDef, Module, ModuleDef, ModuleSource, Source, Static, Struct,
|
||||
StructField, Trait, TypeAlias, Union, VariantDef,
|
||||
};
|
||||
|
||||
pub trait FromSource: Sized {
|
||||
|
@ -52,15 +52,54 @@ impl FromSource for Trait {
|
|||
impl FromSource for Function {
|
||||
type Ast = ast::FnDef;
|
||||
fn from_source(db: &(impl DefDatabase + AstDatabase), src: Source<Self::Ast>) -> Option<Self> {
|
||||
let id = from_source(db, src)?;
|
||||
Some(Function { id })
|
||||
let items = match Container::find(db, src.as_ref().map(|it| it.syntax()))? {
|
||||
Container::Trait(it) => it.items(db),
|
||||
Container::ImplBlock(it) => it.items(db),
|
||||
Container::Module(m) => {
|
||||
return m
|
||||
.declarations(db)
|
||||
.into_iter()
|
||||
.filter_map(|it| match it {
|
||||
ModuleDef::Function(it) => Some(it),
|
||||
_ => None,
|
||||
})
|
||||
.find(|it| same_source(&it.source(db), &src))
|
||||
}
|
||||
};
|
||||
items
|
||||
.into_iter()
|
||||
.filter_map(|it| match it {
|
||||
AssocItem::Function(it) => Some(it),
|
||||
_ => None,
|
||||
})
|
||||
.find(|it| same_source(&it.source(db), &src))
|
||||
}
|
||||
}
|
||||
|
||||
impl FromSource for Const {
|
||||
type Ast = ast::ConstDef;
|
||||
fn from_source(db: &(impl DefDatabase + AstDatabase), src: Source<Self::Ast>) -> Option<Self> {
|
||||
let id = from_source(db, src)?;
|
||||
Some(Const { id })
|
||||
let items = match Container::find(db, src.as_ref().map(|it| it.syntax()))? {
|
||||
Container::Trait(it) => it.items(db),
|
||||
Container::ImplBlock(it) => it.items(db),
|
||||
Container::Module(m) => {
|
||||
return m
|
||||
.declarations(db)
|
||||
.into_iter()
|
||||
.filter_map(|it| match it {
|
||||
ModuleDef::Const(it) => Some(it),
|
||||
_ => None,
|
||||
})
|
||||
.find(|it| same_source(&it.source(db), &src))
|
||||
}
|
||||
};
|
||||
items
|
||||
.into_iter()
|
||||
.filter_map(|it| match it {
|
||||
AssocItem::Const(it) => Some(it),
|
||||
_ => None,
|
||||
})
|
||||
.find(|it| same_source(&it.source(db), &src))
|
||||
}
|
||||
}
|
||||
impl FromSource for Static {
|
||||
|
@ -73,8 +112,27 @@ impl FromSource for Static {
|
|||
impl FromSource for TypeAlias {
|
||||
type Ast = ast::TypeAliasDef;
|
||||
fn from_source(db: &(impl DefDatabase + AstDatabase), src: Source<Self::Ast>) -> Option<Self> {
|
||||
let id = from_source(db, src)?;
|
||||
Some(TypeAlias { id })
|
||||
let items = match Container::find(db, src.as_ref().map(|it| it.syntax()))? {
|
||||
Container::Trait(it) => it.items(db),
|
||||
Container::ImplBlock(it) => it.items(db),
|
||||
Container::Module(m) => {
|
||||
return m
|
||||
.declarations(db)
|
||||
.into_iter()
|
||||
.filter_map(|it| match it {
|
||||
ModuleDef::TypeAlias(it) => Some(it),
|
||||
_ => None,
|
||||
})
|
||||
.find(|it| same_source(&it.source(db), &src))
|
||||
}
|
||||
};
|
||||
items
|
||||
.into_iter()
|
||||
.filter_map(|it| match it {
|
||||
AssocItem::TypeAlias(it) => Some(it),
|
||||
_ => None,
|
||||
})
|
||||
.find(|it| same_source(&it.source(db), &src))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -87,7 +145,7 @@ impl FromSource for MacroDef {
|
|||
let module = Module::from_definition(db, Source::new(src.file_id, module_src))?;
|
||||
let krate = module.krate().crate_id();
|
||||
|
||||
let ast_id = AstId::new(src.file_id, db.ast_id_map(src.file_id).ast_id(&src.ast));
|
||||
let ast_id = AstId::new(src.file_id, db.ast_id_map(src.file_id).ast_id(&src.value));
|
||||
|
||||
let id: MacroDefId = MacroDefId { krate, ast_id, kind };
|
||||
Some(MacroDef { id })
|
||||
|
@ -105,26 +163,26 @@ impl FromSource for ImplBlock {
|
|||
impl FromSource for EnumVariant {
|
||||
type Ast = ast::EnumVariant;
|
||||
fn from_source(db: &(impl DefDatabase + AstDatabase), src: Source<Self::Ast>) -> Option<Self> {
|
||||
let parent_enum = src.ast.parent_enum();
|
||||
let src_enum = Source { file_id: src.file_id, ast: parent_enum };
|
||||
let parent_enum = src.value.parent_enum();
|
||||
let src_enum = Source { file_id: src.file_id, value: parent_enum };
|
||||
let variants = Enum::from_source(db, src_enum)?.variants(db);
|
||||
variants.into_iter().find(|v| v.source(db) == src)
|
||||
variants.into_iter().find(|v| same_source(&v.source(db), &src))
|
||||
}
|
||||
}
|
||||
|
||||
impl FromSource for StructField {
|
||||
type Ast = FieldSource;
|
||||
fn from_source(db: &(impl DefDatabase + AstDatabase), src: Source<Self::Ast>) -> Option<Self> {
|
||||
let variant_def: VariantDef = match src.ast {
|
||||
let variant_def: VariantDef = match src.value {
|
||||
FieldSource::Named(ref field) => {
|
||||
let ast = field.syntax().ancestors().find_map(ast::StructDef::cast)?;
|
||||
let src = Source { file_id: src.file_id, ast };
|
||||
let value = field.syntax().ancestors().find_map(ast::StructDef::cast)?;
|
||||
let src = Source { file_id: src.file_id, value };
|
||||
let def = Struct::from_source(db, src)?;
|
||||
VariantDef::from(def)
|
||||
}
|
||||
FieldSource::Pos(ref field) => {
|
||||
let ast = field.syntax().ancestors().find_map(ast::EnumVariant::cast)?;
|
||||
let src = Source { file_id: src.file_id, ast };
|
||||
let value = field.syntax().ancestors().find_map(ast::EnumVariant::cast)?;
|
||||
let src = Source { file_id: src.file_id, value };
|
||||
let def = EnumVariant::from_source(db, src)?;
|
||||
VariantDef::from(def)
|
||||
}
|
||||
|
@ -142,12 +200,12 @@ impl FromSource for StructField {
|
|||
impl Local {
|
||||
pub fn from_source(db: &impl HirDatabase, src: Source<ast::BindPat>) -> Option<Self> {
|
||||
let file_id = src.file_id;
|
||||
let parent: DefWithBody = src.ast.syntax().ancestors().find_map(|it| {
|
||||
let parent: DefWithBody = src.value.syntax().ancestors().find_map(|it| {
|
||||
let res = match_ast! {
|
||||
match it {
|
||||
ast::ConstDef(ast) => { Const::from_source(db, Source { ast, file_id})?.into() },
|
||||
ast::StaticDef(ast) => { Static::from_source(db, Source { ast, file_id})?.into() },
|
||||
ast::FnDef(ast) => { Function::from_source(db, Source { ast, file_id})?.into() },
|
||||
ast::ConstDef(value) => { Const::from_source(db, Source { value, file_id})?.into() },
|
||||
ast::StaticDef(value) => { Static::from_source(db, Source { value, file_id})?.into() },
|
||||
ast::FnDef(value) => { Function::from_source(db, Source { value, file_id})?.into() },
|
||||
_ => return None,
|
||||
}
|
||||
};
|
||||
|
@ -162,33 +220,33 @@ impl Local {
|
|||
|
||||
impl Module {
|
||||
pub fn from_declaration(db: &impl DefDatabase, src: Source<ast::Module>) -> Option<Self> {
|
||||
let parent_declaration = src.ast.syntax().ancestors().skip(1).find_map(ast::Module::cast);
|
||||
let parent_declaration = src.value.syntax().ancestors().skip(1).find_map(ast::Module::cast);
|
||||
|
||||
let parent_module = match parent_declaration {
|
||||
Some(parent_declaration) => {
|
||||
let src_parent = Source { file_id: src.file_id, ast: parent_declaration };
|
||||
let src_parent = Source { file_id: src.file_id, value: parent_declaration };
|
||||
Module::from_declaration(db, src_parent)
|
||||
}
|
||||
_ => {
|
||||
let src_parent = Source {
|
||||
file_id: src.file_id,
|
||||
ast: ModuleSource::new(db, Some(src.file_id.original_file(db)), None),
|
||||
value: ModuleSource::new(db, Some(src.file_id.original_file(db)), None),
|
||||
};
|
||||
Module::from_definition(db, src_parent)
|
||||
}
|
||||
}?;
|
||||
|
||||
let child_name = src.ast.name()?;
|
||||
let child_name = src.value.name()?;
|
||||
parent_module.child(db, &child_name.as_name())
|
||||
}
|
||||
|
||||
pub fn from_definition(db: &impl DefDatabase, src: Source<ModuleSource>) -> Option<Self> {
|
||||
match src.ast {
|
||||
match src.value {
|
||||
ModuleSource::Module(ref module) => {
|
||||
assert!(!module.has_semi());
|
||||
return Module::from_declaration(
|
||||
db,
|
||||
Source { file_id: src.file_id, ast: module.clone() },
|
||||
Source { file_id: src.file_id, value: module.clone() },
|
||||
);
|
||||
}
|
||||
ModuleSource::SourceFile(_) => (),
|
||||
|
@ -214,5 +272,47 @@ where
|
|||
let module_src = ModuleSource::from_child_node(db, src.as_ref().map(|it| it.syntax()));
|
||||
let module = Module::from_definition(db, Source::new(src.file_id, module_src))?;
|
||||
let ctx = LocationCtx::new(db, module.id, src.file_id);
|
||||
Some(DEF::from_ast(ctx, &src.ast))
|
||||
Some(DEF::from_ast(ctx, &src.value))
|
||||
}
|
||||
|
||||
enum Container {
|
||||
Trait(Trait),
|
||||
ImplBlock(ImplBlock),
|
||||
Module(Module),
|
||||
}
|
||||
|
||||
impl Container {
|
||||
fn find(db: &impl DefDatabase, src: Source<&SyntaxNode>) -> Option<Container> {
|
||||
// FIXME: this doesn't try to handle nested declarations
|
||||
for container in src.value.ancestors() {
|
||||
let res = match_ast! {
|
||||
match container {
|
||||
ast::TraitDef(it) => {
|
||||
let c = Trait::from_source(db, src.with_value(it))?;
|
||||
Container::Trait(c)
|
||||
},
|
||||
ast::ImplBlock(it) => {
|
||||
let c = ImplBlock::from_source(db, src.with_value(it))?;
|
||||
Container::ImplBlock(c)
|
||||
},
|
||||
_ => { continue },
|
||||
}
|
||||
};
|
||||
return Some(res);
|
||||
}
|
||||
|
||||
let module_source = ModuleSource::from_child_node(db, src);
|
||||
let c = Module::from_definition(db, src.with_value(module_source))?;
|
||||
Some(Container::Module(c))
|
||||
}
|
||||
}
|
||||
|
||||
/// XXX: AST Nodes and SyntaxNodes have identity equality semantics: nodes are
|
||||
/// equal if they point to exactly the same object.
|
||||
///
|
||||
/// In general, we do not guarantee that we have exactly one instance of a
|
||||
/// syntax tree for each file. We probably should add such guarantee, but, for
|
||||
/// the time being, we will use identity-less AstPtr comparison.
|
||||
fn same_source<N: AstNode>(s1: &Source<N>, s2: &Source<N>) -> bool {
|
||||
s1.as_ref().map(AstPtr::new) == s2.as_ref().map(AstPtr::new)
|
||||
}
|
||||
|
|
|
@ -1,50 +1,12 @@
|
|||
//! Many kinds of items or constructs can have generic parameters: functions,
|
||||
//! structs, impls, traits, etc. This module provides a common HIR for these
|
||||
//! generic parameters. See also the `Generics` type and the `generics_of` query
|
||||
//! in rustc.
|
||||
|
||||
//! Temp module to wrap hir_def::generics
|
||||
use std::sync::Arc;
|
||||
|
||||
use hir_def::{
|
||||
path::Path,
|
||||
type_ref::{TypeBound, TypeRef},
|
||||
};
|
||||
use hir_expand::name::{self, AsName};
|
||||
use ra_syntax::ast::{self, DefaultTypeParamOwner, NameOwner, TypeBoundsOwner, TypeParamsOwner};
|
||||
|
||||
use crate::{
|
||||
db::{AstDatabase, DefDatabase, HirDatabase},
|
||||
Adt, Const, Container, Enum, EnumVariant, Function, HasSource, ImplBlock, Name, Struct, Trait,
|
||||
db::DefDatabase, Adt, Const, Container, Enum, EnumVariant, Function, ImplBlock, Struct, Trait,
|
||||
TypeAlias, Union,
|
||||
};
|
||||
|
||||
/// Data about a generic parameter (to a function, struct, impl, ...).
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub struct GenericParam {
|
||||
// FIXME: give generic params proper IDs
|
||||
pub idx: u32,
|
||||
pub name: Name,
|
||||
pub default: Option<Path>,
|
||||
}
|
||||
|
||||
/// Data about the generic parameters of a function, struct, impl, etc.
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub struct GenericParams {
|
||||
pub(crate) def: GenericDef,
|
||||
pub(crate) parent_params: Option<Arc<GenericParams>>,
|
||||
pub(crate) params: Vec<GenericParam>,
|
||||
pub(crate) where_predicates: Vec<WherePredicate>,
|
||||
}
|
||||
|
||||
/// A single predicate from a where clause, i.e. `where Type: Trait`. Combined
|
||||
/// where clauses like `where T: Foo + Bar` are turned into multiple of these.
|
||||
/// It might still result in multiple actual predicates though, because of
|
||||
/// associated type bindings like `Iterator<Item = u32>`.
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub struct WherePredicate {
|
||||
pub(crate) type_ref: TypeRef,
|
||||
pub(crate) bound: TypeBound,
|
||||
}
|
||||
pub use hir_def::generics::{GenericParam, GenericParams, WherePredicate};
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)]
|
||||
pub enum GenericDef {
|
||||
|
@ -69,150 +31,6 @@ impl_froms!(
|
|||
Const
|
||||
);
|
||||
|
||||
impl GenericParams {
|
||||
pub(crate) fn generic_params_query(
|
||||
db: &(impl DefDatabase + AstDatabase),
|
||||
def: GenericDef,
|
||||
) -> Arc<GenericParams> {
|
||||
let parent = match def {
|
||||
GenericDef::Function(it) => it.container(db).map(GenericDef::from),
|
||||
GenericDef::TypeAlias(it) => it.container(db).map(GenericDef::from),
|
||||
GenericDef::Const(it) => it.container(db).map(GenericDef::from),
|
||||
GenericDef::EnumVariant(it) => Some(it.parent_enum(db).into()),
|
||||
GenericDef::Adt(_) | GenericDef::Trait(_) => None,
|
||||
GenericDef::ImplBlock(_) => None,
|
||||
};
|
||||
let mut generics = GenericParams {
|
||||
def,
|
||||
params: Vec::new(),
|
||||
parent_params: parent.map(|p| db.generic_params(p)),
|
||||
where_predicates: Vec::new(),
|
||||
};
|
||||
let start = generics.parent_params.as_ref().map(|p| p.params.len()).unwrap_or(0) as u32;
|
||||
// FIXME: add `: Sized` bound for everything except for `Self` in traits
|
||||
match def {
|
||||
GenericDef::Function(it) => generics.fill(&it.source(db).ast, start),
|
||||
GenericDef::Adt(Adt::Struct(it)) => generics.fill(&it.source(db).ast, start),
|
||||
GenericDef::Adt(Adt::Union(it)) => generics.fill(&it.source(db).ast, start),
|
||||
GenericDef::Adt(Adt::Enum(it)) => generics.fill(&it.source(db).ast, start),
|
||||
GenericDef::Trait(it) => {
|
||||
// traits get the Self type as an implicit first type parameter
|
||||
generics.params.push(GenericParam {
|
||||
idx: start,
|
||||
name: name::SELF_TYPE,
|
||||
default: None,
|
||||
});
|
||||
generics.fill(&it.source(db).ast, start + 1);
|
||||
// add super traits as bounds on Self
|
||||
// i.e., trait Foo: Bar is equivalent to trait Foo where Self: Bar
|
||||
let self_param = TypeRef::Path(name::SELF_TYPE.into());
|
||||
generics.fill_bounds(&it.source(db).ast, self_param);
|
||||
}
|
||||
GenericDef::TypeAlias(it) => generics.fill(&it.source(db).ast, start),
|
||||
// Note that we don't add `Self` here: in `impl`s, `Self` is not a
|
||||
// type-parameter, but rather is a type-alias for impl's target
|
||||
// type, so this is handled by the resolver.
|
||||
GenericDef::ImplBlock(it) => generics.fill(&it.source(db).ast, start),
|
||||
GenericDef::EnumVariant(_) | GenericDef::Const(_) => {}
|
||||
}
|
||||
|
||||
Arc::new(generics)
|
||||
}
|
||||
|
||||
fn fill(&mut self, node: &impl TypeParamsOwner, start: u32) {
|
||||
if let Some(params) = node.type_param_list() {
|
||||
self.fill_params(params, start)
|
||||
}
|
||||
if let Some(where_clause) = node.where_clause() {
|
||||
self.fill_where_predicates(where_clause);
|
||||
}
|
||||
}
|
||||
|
||||
fn fill_bounds(&mut self, node: &impl ast::TypeBoundsOwner, type_ref: TypeRef) {
|
||||
for bound in
|
||||
node.type_bound_list().iter().flat_map(|type_bound_list| type_bound_list.bounds())
|
||||
{
|
||||
self.add_where_predicate_from_bound(bound, type_ref.clone());
|
||||
}
|
||||
}
|
||||
|
||||
fn fill_params(&mut self, params: ast::TypeParamList, start: u32) {
|
||||
for (idx, type_param) in params.type_params().enumerate() {
|
||||
let name = type_param.name().map_or_else(Name::missing, |it| it.as_name());
|
||||
// FIXME: Use `Path::from_src`
|
||||
let default = type_param.default_type().and_then(|t| t.path()).and_then(Path::from_ast);
|
||||
|
||||
let param = GenericParam { idx: idx as u32 + start, name: name.clone(), default };
|
||||
self.params.push(param);
|
||||
|
||||
let type_ref = TypeRef::Path(name.into());
|
||||
self.fill_bounds(&type_param, type_ref);
|
||||
}
|
||||
}
|
||||
|
||||
fn fill_where_predicates(&mut self, where_clause: ast::WhereClause) {
|
||||
for pred in where_clause.predicates() {
|
||||
let type_ref = match pred.type_ref() {
|
||||
Some(type_ref) => type_ref,
|
||||
None => continue,
|
||||
};
|
||||
let type_ref = TypeRef::from_ast(type_ref);
|
||||
for bound in pred.type_bound_list().iter().flat_map(|l| l.bounds()) {
|
||||
self.add_where_predicate_from_bound(bound, type_ref.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn add_where_predicate_from_bound(&mut self, bound: ast::TypeBound, type_ref: TypeRef) {
|
||||
if bound.has_question_mark() {
|
||||
// FIXME: remove this bound
|
||||
return;
|
||||
}
|
||||
let bound = TypeBound::from_ast(bound);
|
||||
self.where_predicates.push(WherePredicate { type_ref, bound });
|
||||
}
|
||||
|
||||
pub(crate) fn find_by_name(&self, name: &Name) -> Option<&GenericParam> {
|
||||
self.params.iter().find(|p| &p.name == name)
|
||||
}
|
||||
|
||||
pub fn count_parent_params(&self) -> usize {
|
||||
self.parent_params.as_ref().map(|p| p.count_params_including_parent()).unwrap_or(0)
|
||||
}
|
||||
|
||||
pub fn count_params_including_parent(&self) -> usize {
|
||||
let parent_count = self.count_parent_params();
|
||||
parent_count + self.params.len()
|
||||
}
|
||||
|
||||
fn for_each_param<'a>(&'a self, f: &mut impl FnMut(&'a GenericParam)) {
|
||||
if let Some(parent) = &self.parent_params {
|
||||
parent.for_each_param(f);
|
||||
}
|
||||
self.params.iter().for_each(f);
|
||||
}
|
||||
|
||||
pub fn params_including_parent(&self) -> Vec<&GenericParam> {
|
||||
let mut vec = Vec::with_capacity(self.count_params_including_parent());
|
||||
self.for_each_param(&mut |p| vec.push(p));
|
||||
vec
|
||||
}
|
||||
}
|
||||
|
||||
impl GenericDef {
|
||||
pub(crate) fn resolver(&self, db: &impl HirDatabase) -> crate::Resolver {
|
||||
match self {
|
||||
GenericDef::Function(inner) => inner.resolver(db),
|
||||
GenericDef::Adt(adt) => adt.resolver(db),
|
||||
GenericDef::Trait(inner) => inner.resolver(db),
|
||||
GenericDef::TypeAlias(inner) => inner.resolver(db),
|
||||
GenericDef::ImplBlock(inner) => inner.resolver(db),
|
||||
GenericDef::EnumVariant(inner) => inner.parent_enum(db).resolver(db),
|
||||
GenericDef::Const(inner) => inner.resolver(db),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Container> for GenericDef {
|
||||
fn from(c: Container) -> Self {
|
||||
match c {
|
||||
|
@ -231,6 +49,6 @@ where
|
|||
T: Into<GenericDef> + Copy,
|
||||
{
|
||||
fn generic_params(self, db: &impl DefDatabase) -> Arc<GenericParams> {
|
||||
db.generic_params(self.into())
|
||||
db.generic_params(self.into().into())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,8 +5,7 @@ use ra_syntax::ast::{self};
|
|||
|
||||
use crate::{
|
||||
db::{AstDatabase, DefDatabase, HirDatabase},
|
||||
generics::HasGenericParams,
|
||||
resolve::Resolver,
|
||||
resolve::HasResolver,
|
||||
ty::Ty,
|
||||
AssocItem, Crate, HasSource, ImplBlock, Module, Source, TraitRef,
|
||||
};
|
||||
|
@ -19,14 +18,6 @@ impl HasSource for ImplBlock {
|
|||
}
|
||||
|
||||
impl ImplBlock {
|
||||
pub(crate) fn containing(db: &impl DefDatabase, item: AssocItem) -> Option<ImplBlock> {
|
||||
let module = item.module(db);
|
||||
let crate_def_map = db.crate_def_map(module.id.krate);
|
||||
crate_def_map[module.id.module_id].impls.iter().copied().map(ImplBlock::from).find(|it| {
|
||||
db.impl_data(it.id).items().iter().copied().map(AssocItem::from).any(|it| it == item)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn target_trait(&self, db: &impl DefDatabase) -> Option<TypeRef> {
|
||||
db.impl_data(self.id).target_trait().cloned()
|
||||
}
|
||||
|
@ -59,13 +50,4 @@ impl ImplBlock {
|
|||
pub fn krate(&self, db: &impl DefDatabase) -> Crate {
|
||||
Crate { crate_id: self.module(db).id.krate }
|
||||
}
|
||||
|
||||
pub(crate) fn resolver(&self, db: &impl DefDatabase) -> Resolver {
|
||||
let r = self.module(db).resolver(db);
|
||||
// add generic params, if present
|
||||
let p = self.generic_params(db);
|
||||
let r = if !p.params.is_empty() { r.push_generic_params_scope(p) } else { r };
|
||||
let r = r.push_impl_block_scope(self.clone());
|
||||
r
|
||||
}
|
||||
}
|
||||
|
|
|
@ -97,7 +97,7 @@ impl LangItems {
|
|||
// Look for impl targets
|
||||
for impl_block in module.impl_blocks(db) {
|
||||
let src = impl_block.source(db);
|
||||
if let Some(lang_item_name) = lang_item_name(&src.ast) {
|
||||
if let Some(lang_item_name) = lang_item_name(&src.value) {
|
||||
self.items
|
||||
.entry(lang_item_name)
|
||||
.or_insert_with(|| LangItemTarget::ImplBlock(impl_block));
|
||||
|
@ -144,7 +144,7 @@ impl LangItems {
|
|||
T: Copy + HasSource<Ast = N>,
|
||||
N: AttrsOwner,
|
||||
{
|
||||
let node = item.source(db).ast;
|
||||
let node = item.source(db).value;
|
||||
if let Some(lang_item_name) = lang_item_name(&node) {
|
||||
self.items.entry(lang_item_name).or_insert_with(|| constructor(item));
|
||||
}
|
||||
|
|
|
@ -32,8 +32,6 @@ pub mod db;
|
|||
pub mod source_binder;
|
||||
|
||||
mod ids;
|
||||
mod adt;
|
||||
mod traits;
|
||||
mod type_alias;
|
||||
mod ty;
|
||||
mod impl_block;
|
||||
|
@ -57,15 +55,14 @@ mod marks;
|
|||
use crate::resolve::Resolver;
|
||||
|
||||
pub use crate::{
|
||||
adt::VariantDef,
|
||||
code_model::ImplBlock,
|
||||
code_model::{
|
||||
attrs::{AttrDef, Attrs},
|
||||
docs::{DocDef, Docs, Documentation},
|
||||
src::{HasBodySource, HasSource},
|
||||
Adt, AssocItem, Const, ConstData, Container, Crate, CrateDependency, DefWithBody, Enum,
|
||||
EnumVariant, FieldSource, FnData, Function, GenericParam, HasBody, Local, MacroDef, Module,
|
||||
ModuleDef, ModuleSource, Static, Struct, StructField, Trait, TypeAlias, Union,
|
||||
EnumVariant, FieldSource, FnData, Function, GenericParam, HasBody, ImplBlock, Local,
|
||||
MacroDef, Module, ModuleDef, ModuleSource, Static, Struct, StructField, Trait, TypeAlias,
|
||||
Union, VariantDef,
|
||||
},
|
||||
expr::ExprScopes,
|
||||
from_source::FromSource,
|
||||
|
|
|
@ -14,9 +14,9 @@ use crate::{
|
|||
code_model::Crate,
|
||||
db::{DefDatabase, HirDatabase},
|
||||
expr::{ExprScopes, PatId, ScopeId},
|
||||
generics::GenericParams,
|
||||
Adt, Const, DefWithBody, Enum, EnumVariant, Function, ImplBlock, Local, MacroDef, ModuleDef,
|
||||
PerNs, Static, Struct, Trait, TypeAlias,
|
||||
generics::{GenericParams, HasGenericParams},
|
||||
Adt, Const, Container, DefWithBody, Enum, EnumVariant, Function, GenericDef, ImplBlock, Local,
|
||||
MacroDef, Module, ModuleDef, PerNs, Static, Struct, Trait, TypeAlias,
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
|
@ -43,7 +43,7 @@ pub(crate) enum Scope {
|
|||
/// All the items and imported names of a module
|
||||
ModuleScope(ModuleItemMap),
|
||||
/// Brings the generic parameters of an item into scope
|
||||
GenericParams(Arc<GenericParams>),
|
||||
GenericParams { def: GenericDef, params: Arc<GenericParams> },
|
||||
/// Brings `Self` in `impl` block into scope
|
||||
ImplBlockScope(ImplBlock),
|
||||
/// Brings `Self` in enum, struct and union definitions into scope
|
||||
|
@ -141,9 +141,9 @@ impl Resolver {
|
|||
for scope in self.scopes.iter().rev() {
|
||||
match scope {
|
||||
Scope::ExprScope(_) => continue,
|
||||
Scope::GenericParams(_) | Scope::ImplBlockScope(_) if skip_to_mod => continue,
|
||||
Scope::GenericParams { .. } | Scope::ImplBlockScope(_) if skip_to_mod => continue,
|
||||
|
||||
Scope::GenericParams(params) => {
|
||||
Scope::GenericParams { params, .. } => {
|
||||
if let Some(param) = params.find_by_name(first_name) {
|
||||
let idx = if path.segments.len() == 1 { None } else { Some(1) };
|
||||
return Some((TypeNs::GenericParam(param.idx), idx));
|
||||
|
@ -212,7 +212,7 @@ impl Resolver {
|
|||
match scope {
|
||||
Scope::AdtScope(_)
|
||||
| Scope::ExprScope(_)
|
||||
| Scope::GenericParams(_)
|
||||
| Scope::GenericParams { .. }
|
||||
| Scope::ImplBlockScope(_)
|
||||
if skip_to_mod =>
|
||||
{
|
||||
|
@ -232,13 +232,13 @@ impl Resolver {
|
|||
}
|
||||
Scope::ExprScope(_) => continue,
|
||||
|
||||
Scope::GenericParams(params) if n_segments > 1 => {
|
||||
Scope::GenericParams { params, .. } if n_segments > 1 => {
|
||||
if let Some(param) = params.find_by_name(first_name) {
|
||||
let ty = TypeNs::GenericParam(param.idx);
|
||||
return Some(ResolveValueResult::Partial(ty, 1));
|
||||
}
|
||||
}
|
||||
Scope::GenericParams(_) => continue,
|
||||
Scope::GenericParams { .. } => continue,
|
||||
|
||||
Scope::ImplBlockScope(impl_) if n_segments > 1 => {
|
||||
if first_name == &name::SELF_TYPE {
|
||||
|
@ -361,7 +361,7 @@ impl Resolver {
|
|||
self.scopes
|
||||
.iter()
|
||||
.filter_map(|scope| match scope {
|
||||
Scope::GenericParams(params) => Some(params),
|
||||
Scope::GenericParams { params, .. } => Some(params),
|
||||
_ => None,
|
||||
})
|
||||
.flat_map(|params| params.where_predicates.iter())
|
||||
|
@ -369,7 +369,7 @@ impl Resolver {
|
|||
|
||||
pub(crate) fn generic_def(&self) -> Option<crate::generics::GenericDef> {
|
||||
self.scopes.iter().find_map(|scope| match scope {
|
||||
Scope::GenericParams(params) => Some(params.def),
|
||||
Scope::GenericParams { def, .. } => Some(*def),
|
||||
_ => None,
|
||||
})
|
||||
}
|
||||
|
@ -381,8 +381,17 @@ impl Resolver {
|
|||
self
|
||||
}
|
||||
|
||||
pub(crate) fn push_generic_params_scope(self, params: Arc<GenericParams>) -> Resolver {
|
||||
self.push_scope(Scope::GenericParams(params))
|
||||
pub(crate) fn push_generic_params_scope(
|
||||
self,
|
||||
db: &impl DefDatabase,
|
||||
def: GenericDef,
|
||||
) -> Resolver {
|
||||
let params = def.generic_params(db);
|
||||
if params.params.is_empty() {
|
||||
self
|
||||
} else {
|
||||
self.push_scope(Scope::GenericParams { def, params })
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn push_impl_block_scope(self, impl_block: ImplBlock) -> Resolver {
|
||||
|
@ -457,8 +466,8 @@ impl Scope {
|
|||
});
|
||||
}
|
||||
}
|
||||
Scope::GenericParams(gp) => {
|
||||
for param in &gp.params {
|
||||
Scope::GenericParams { params, .. } => {
|
||||
for param in params.params.iter() {
|
||||
f(param.name.clone(), ScopeDef::GenericParam(param.idx))
|
||||
}
|
||||
}
|
||||
|
@ -477,3 +486,103 @@ impl Scope {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) trait HasResolver {
|
||||
/// Builds a resolver for type references inside this def.
|
||||
fn resolver(self, db: &impl DefDatabase) -> Resolver;
|
||||
}
|
||||
|
||||
impl HasResolver for Module {
|
||||
fn resolver(self, db: &impl DefDatabase) -> Resolver {
|
||||
let def_map = db.crate_def_map(self.id.krate);
|
||||
Resolver::default().push_module_scope(def_map, self.id.module_id)
|
||||
}
|
||||
}
|
||||
|
||||
impl HasResolver for Trait {
|
||||
fn resolver(self, db: &impl DefDatabase) -> Resolver {
|
||||
self.module(db).resolver(db).push_generic_params_scope(db, self.into())
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Into<Adt>> HasResolver for T {
|
||||
fn resolver(self, db: &impl DefDatabase) -> Resolver {
|
||||
let def = self.into();
|
||||
def.module(db)
|
||||
.resolver(db)
|
||||
.push_generic_params_scope(db, def.into())
|
||||
.push_scope(Scope::AdtScope(def))
|
||||
}
|
||||
}
|
||||
|
||||
impl HasResolver for Function {
|
||||
fn resolver(self, db: &impl DefDatabase) -> Resolver {
|
||||
self.container(db)
|
||||
.map(|c| c.resolver(db))
|
||||
.unwrap_or_else(|| self.module(db).resolver(db))
|
||||
.push_generic_params_scope(db, self.into())
|
||||
}
|
||||
}
|
||||
|
||||
impl HasResolver for DefWithBody {
|
||||
fn resolver(self, db: &impl DefDatabase) -> Resolver {
|
||||
match self {
|
||||
DefWithBody::Const(c) => c.resolver(db),
|
||||
DefWithBody::Function(f) => f.resolver(db),
|
||||
DefWithBody::Static(s) => s.resolver(db),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl HasResolver for Const {
|
||||
fn resolver(self, db: &impl DefDatabase) -> Resolver {
|
||||
self.container(db).map(|c| c.resolver(db)).unwrap_or_else(|| self.module(db).resolver(db))
|
||||
}
|
||||
}
|
||||
|
||||
impl HasResolver for Static {
|
||||
fn resolver(self, db: &impl DefDatabase) -> Resolver {
|
||||
self.module(db).resolver(db)
|
||||
}
|
||||
}
|
||||
|
||||
impl HasResolver for TypeAlias {
|
||||
fn resolver(self, db: &impl DefDatabase) -> Resolver {
|
||||
self.container(db)
|
||||
.map(|ib| ib.resolver(db))
|
||||
.unwrap_or_else(|| self.module(db).resolver(db))
|
||||
.push_generic_params_scope(db, self.into())
|
||||
}
|
||||
}
|
||||
|
||||
impl HasResolver for Container {
|
||||
fn resolver(self, db: &impl DefDatabase) -> Resolver {
|
||||
match self {
|
||||
Container::Trait(trait_) => trait_.resolver(db),
|
||||
Container::ImplBlock(impl_block) => impl_block.resolver(db),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl HasResolver for GenericDef {
|
||||
fn resolver(self, db: &impl DefDatabase) -> crate::Resolver {
|
||||
match self {
|
||||
GenericDef::Function(inner) => inner.resolver(db),
|
||||
GenericDef::Adt(adt) => adt.resolver(db),
|
||||
GenericDef::Trait(inner) => inner.resolver(db),
|
||||
GenericDef::TypeAlias(inner) => inner.resolver(db),
|
||||
GenericDef::ImplBlock(inner) => inner.resolver(db),
|
||||
GenericDef::EnumVariant(inner) => inner.parent_enum(db).resolver(db),
|
||||
GenericDef::Const(inner) => inner.resolver(db),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl HasResolver for ImplBlock {
|
||||
fn resolver(self, db: &impl DefDatabase) -> Resolver {
|
||||
self.module(db)
|
||||
.resolver(db)
|
||||
.push_generic_params_scope(db, self.into())
|
||||
.push_impl_block_scope(self)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,7 +23,7 @@ use crate::{
|
|||
db::HirDatabase,
|
||||
expr::{self, BodySourceMap, ExprScopes, ScopeId},
|
||||
ids::LocationCtx,
|
||||
resolve::{ScopeDef, TypeNs, ValueNs},
|
||||
resolve::{HasResolver, ScopeDef, TypeNs, ValueNs},
|
||||
ty::method_resolution::{self, implements_trait},
|
||||
AssocItem, Const, DefWithBody, Either, Enum, FromSource, Function, GenericParam, HasBody,
|
||||
HirFileId, Local, MacroDef, Module, Name, Path, Resolver, Static, Struct, Ty,
|
||||
|
@ -31,24 +31,24 @@ use crate::{
|
|||
|
||||
fn try_get_resolver_for_node(db: &impl HirDatabase, node: Source<&SyntaxNode>) -> Option<Resolver> {
|
||||
match_ast! {
|
||||
match (node.ast) {
|
||||
match (node.value) {
|
||||
ast::Module(it) => {
|
||||
let src = node.with_ast(it);
|
||||
let src = node.with_value(it);
|
||||
Some(crate::Module::from_declaration(db, src)?.resolver(db))
|
||||
},
|
||||
ast::SourceFile(it) => {
|
||||
let src = node.with_ast(crate::ModuleSource::SourceFile(it));
|
||||
let src = node.with_value(crate::ModuleSource::SourceFile(it));
|
||||
Some(crate::Module::from_definition(db, src)?.resolver(db))
|
||||
},
|
||||
ast::StructDef(it) => {
|
||||
let src = node.with_ast(it);
|
||||
let src = node.with_value(it);
|
||||
Some(Struct::from_source(db, src)?.resolver(db))
|
||||
},
|
||||
ast::EnumDef(it) => {
|
||||
let src = node.with_ast(it);
|
||||
let src = node.with_value(it);
|
||||
Some(Enum::from_source(db, src)?.resolver(db))
|
||||
},
|
||||
_ => match node.ast.kind() {
|
||||
_ => match node.value.kind() {
|
||||
FN_DEF | CONST_DEF | STATIC_DEF => {
|
||||
Some(def_with_body_from_child_node(db, node)?.resolver(db))
|
||||
}
|
||||
|
@ -67,11 +67,11 @@ fn def_with_body_from_child_node(
|
|||
let module = Module::from_definition(db, Source::new(child.file_id, module_source))?;
|
||||
let ctx = LocationCtx::new(db, module.id, child.file_id);
|
||||
|
||||
child.ast.ancestors().find_map(|node| {
|
||||
child.value.ancestors().find_map(|node| {
|
||||
match_ast! {
|
||||
match node {
|
||||
ast::FnDef(def) => { Some(Function {id: ctx.to_def(&def) }.into()) },
|
||||
ast::ConstDef(def) => { Some(Const { id: ctx.to_def(&def) }.into()) },
|
||||
ast::FnDef(def) => { return Function::from_source(db, child.with_value(def)).map(DefWithBody::from); },
|
||||
ast::ConstDef(def) => { return Const::from_source(db, child.with_value(def)).map(DefWithBody::from); },
|
||||
ast::StaticDef(def) => { Some(Static { id: ctx.to_def(&def) }.into()) },
|
||||
_ => { None },
|
||||
}
|
||||
|
@ -157,7 +157,7 @@ impl SourceAnalyzer {
|
|||
let scopes = def.expr_scopes(db);
|
||||
let scope = match offset {
|
||||
None => scope_for(&scopes, &source_map, node),
|
||||
Some(offset) => scope_for_offset(&scopes, &source_map, node.with_ast(offset)),
|
||||
Some(offset) => scope_for_offset(&scopes, &source_map, node.with_value(offset)),
|
||||
};
|
||||
let resolver = expr::resolver_for_scope(db, def, scope);
|
||||
SourceAnalyzer {
|
||||
|
@ -171,9 +171,9 @@ impl SourceAnalyzer {
|
|||
} else {
|
||||
SourceAnalyzer {
|
||||
resolver: node
|
||||
.ast
|
||||
.value
|
||||
.ancestors()
|
||||
.find_map(|it| try_get_resolver_for_node(db, node.with_ast(&it)))
|
||||
.find_map(|it| try_get_resolver_for_node(db, node.with_value(&it)))
|
||||
.unwrap_or_default(),
|
||||
body_owner: None,
|
||||
body_source_map: None,
|
||||
|
@ -185,12 +185,12 @@ impl SourceAnalyzer {
|
|||
}
|
||||
|
||||
fn expr_id(&self, expr: &ast::Expr) -> Option<ExprId> {
|
||||
let src = Source { file_id: self.file_id, ast: expr };
|
||||
let src = Source { file_id: self.file_id, value: expr };
|
||||
self.body_source_map.as_ref()?.node_expr(src)
|
||||
}
|
||||
|
||||
fn pat_id(&self, pat: &ast::Pat) -> Option<PatId> {
|
||||
let src = Source { file_id: self.file_id, ast: pat };
|
||||
let src = Source { file_id: self.file_id, value: pat };
|
||||
self.body_source_map.as_ref()?.node_pat(src)
|
||||
}
|
||||
|
||||
|
@ -302,7 +302,7 @@ impl SourceAnalyzer {
|
|||
let entry = scopes.resolve_name_in_scope(scope, &name)?;
|
||||
Some(ScopeEntryWithSyntax {
|
||||
name: entry.name().clone(),
|
||||
ptr: source_map.pat_syntax(entry.pat())?.ast,
|
||||
ptr: source_map.pat_syntax(entry.pat())?.value,
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -405,9 +405,16 @@ impl SourceAnalyzer {
|
|||
implements_trait(&canonical_ty, db, &self.resolver, krate, std_future_trait)
|
||||
}
|
||||
|
||||
pub fn expand(&self, db: &impl HirDatabase, macro_call: &ast::MacroCall) -> Option<Expansion> {
|
||||
let def = self.resolve_macro_call(db, macro_call)?.id;
|
||||
let ast_id = AstId::new(self.file_id, db.ast_id_map(self.file_id).ast_id(macro_call));
|
||||
pub fn expand(
|
||||
&self,
|
||||
db: &impl HirDatabase,
|
||||
macro_call: Source<&ast::MacroCall>,
|
||||
) -> Option<Expansion> {
|
||||
let def = self.resolve_macro_call(db, macro_call.value)?.id;
|
||||
let ast_id = AstId::new(
|
||||
macro_call.file_id,
|
||||
db.ast_id_map(macro_call.file_id).ast_id(macro_call.value),
|
||||
);
|
||||
let macro_call_loc = MacroCallLoc { def, ast_id };
|
||||
Some(Expansion { macro_call_id: db.intern_macro(macro_call_loc) })
|
||||
}
|
||||
|
@ -421,6 +428,11 @@ impl SourceAnalyzer {
|
|||
pub(crate) fn inference_result(&self) -> Arc<crate::ty::InferenceResult> {
|
||||
self.infer.clone().unwrap()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) fn analyzed_declaration(&self) -> Option<DefWithBody> {
|
||||
self.body_owner
|
||||
}
|
||||
}
|
||||
|
||||
fn scope_for(
|
||||
|
@ -428,7 +440,7 @@ fn scope_for(
|
|||
source_map: &BodySourceMap,
|
||||
node: Source<&SyntaxNode>,
|
||||
) -> Option<ScopeId> {
|
||||
node.ast
|
||||
node.value
|
||||
.ancestors()
|
||||
.filter_map(ast::Expr::cast)
|
||||
.filter_map(|it| source_map.node_expr(Source::new(node.file_id, &it)))
|
||||
|
@ -450,18 +462,18 @@ fn scope_for_offset(
|
|||
return None;
|
||||
}
|
||||
let syntax_node_ptr =
|
||||
source.ast.either(|it| it.syntax_node_ptr(), |it| it.syntax_node_ptr());
|
||||
source.value.either(|it| it.syntax_node_ptr(), |it| it.syntax_node_ptr());
|
||||
Some((syntax_node_ptr, scope))
|
||||
})
|
||||
// find containing scope
|
||||
.min_by_key(|(ptr, _scope)| {
|
||||
(
|
||||
!(ptr.range().start() <= offset.ast && offset.ast <= ptr.range().end()),
|
||||
!(ptr.range().start() <= offset.value && offset.value <= ptr.range().end()),
|
||||
ptr.range().len(),
|
||||
)
|
||||
})
|
||||
.map(|(ptr, scope)| {
|
||||
adjust(scopes, source_map, ptr, offset.file_id, offset.ast).unwrap_or(*scope)
|
||||
adjust(scopes, source_map, ptr, offset.file_id, offset.value).unwrap_or(*scope)
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -485,7 +497,7 @@ fn adjust(
|
|||
return None;
|
||||
}
|
||||
let syntax_node_ptr =
|
||||
source.ast.either(|it| it.syntax_node_ptr(), |it| it.syntax_node_ptr());
|
||||
source.value.either(|it| it.syntax_node_ptr(), |it| it.syntax_node_ptr());
|
||||
Some((syntax_node_ptr, scope))
|
||||
})
|
||||
.map(|(ptr, scope)| (ptr.range(), scope))
|
||||
|
|
|
@ -1,82 +0,0 @@
|
|||
//! HIR for trait definitions.
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use hir_expand::name::AsName;
|
||||
|
||||
use ra_syntax::ast::{self, NameOwner};
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use crate::{
|
||||
db::{AstDatabase, DefDatabase},
|
||||
ids::LocationCtx,
|
||||
AssocItem, Const, Function, HasSource, Module, Name, Trait, TypeAlias,
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct TraitData {
|
||||
name: Option<Name>,
|
||||
items: Vec<AssocItem>,
|
||||
auto: bool,
|
||||
}
|
||||
|
||||
impl TraitData {
|
||||
pub(crate) fn trait_data_query(
|
||||
db: &(impl DefDatabase + AstDatabase),
|
||||
tr: Trait,
|
||||
) -> Arc<TraitData> {
|
||||
let src = tr.source(db);
|
||||
let name = src.ast.name().map(|n| n.as_name());
|
||||
let module = tr.module(db);
|
||||
let ctx = LocationCtx::new(db, module.id, src.file_id);
|
||||
let auto = src.ast.is_auto();
|
||||
let items = if let Some(item_list) = src.ast.item_list() {
|
||||
item_list
|
||||
.impl_items()
|
||||
.map(|item_node| match item_node {
|
||||
ast::ImplItem::FnDef(it) => Function { id: ctx.to_def(&it) }.into(),
|
||||
ast::ImplItem::ConstDef(it) => Const { id: ctx.to_def(&it) }.into(),
|
||||
ast::ImplItem::TypeAliasDef(it) => TypeAlias { id: ctx.to_def(&it) }.into(),
|
||||
})
|
||||
.collect()
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
Arc::new(TraitData { name, items, auto })
|
||||
}
|
||||
|
||||
pub(crate) fn name(&self) -> &Option<Name> {
|
||||
&self.name
|
||||
}
|
||||
|
||||
pub(crate) fn items(&self) -> &[AssocItem] {
|
||||
&self.items
|
||||
}
|
||||
|
||||
pub(crate) fn is_auto(&self) -> bool {
|
||||
self.auto
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct TraitItemsIndex {
|
||||
traits_by_def: FxHashMap<AssocItem, Trait>,
|
||||
}
|
||||
|
||||
impl TraitItemsIndex {
|
||||
pub(crate) fn trait_items_index(db: &impl DefDatabase, module: Module) -> TraitItemsIndex {
|
||||
let mut index = TraitItemsIndex { traits_by_def: FxHashMap::default() };
|
||||
for decl in module.declarations(db) {
|
||||
if let crate::ModuleDef::Trait(tr) = decl {
|
||||
for item in tr.trait_data(db).items() {
|
||||
index.traits_by_def.insert(*item, tr);
|
||||
}
|
||||
}
|
||||
}
|
||||
index
|
||||
}
|
||||
|
||||
pub(crate) fn get_parent_trait(&self, item: AssocItem) -> Option<Trait> {
|
||||
self.traits_by_def.get(&item).cloned()
|
||||
}
|
||||
}
|
|
@ -3,8 +3,6 @@
|
|||
|
||||
mod autoderef;
|
||||
pub(crate) mod primitive;
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
pub(crate) mod traits;
|
||||
pub(crate) mod method_resolution;
|
||||
mod op;
|
||||
|
@ -12,6 +10,9 @@ mod lower;
|
|||
mod infer;
|
||||
pub(crate) mod display;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
use std::ops::Deref;
|
||||
use std::sync::Arc;
|
||||
use std::{fmt, iter, mem};
|
||||
|
@ -800,6 +801,10 @@ impl HirDisplay for &Ty {
|
|||
|
||||
impl HirDisplay for ApplicationTy {
|
||||
fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result {
|
||||
if f.should_truncate() {
|
||||
return write!(f, "…");
|
||||
}
|
||||
|
||||
match self.ctor {
|
||||
TypeCtor::Bool => write!(f, "bool")?,
|
||||
TypeCtor::Char => write!(f, "char")?,
|
||||
|
@ -901,6 +906,10 @@ impl HirDisplay for ApplicationTy {
|
|||
|
||||
impl HirDisplay for ProjectionTy {
|
||||
fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result {
|
||||
if f.should_truncate() {
|
||||
return write!(f, "…");
|
||||
}
|
||||
|
||||
let trait_name = self
|
||||
.associated_ty
|
||||
.parent_trait(f.db)
|
||||
|
@ -919,6 +928,10 @@ impl HirDisplay for ProjectionTy {
|
|||
|
||||
impl HirDisplay for Ty {
|
||||
fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result {
|
||||
if f.should_truncate() {
|
||||
return write!(f, "…");
|
||||
}
|
||||
|
||||
match self {
|
||||
Ty::Apply(a_ty) => a_ty.hir_fmt(f)?,
|
||||
Ty::Projection(p_ty) => p_ty.hir_fmt(f)?,
|
||||
|
@ -1001,6 +1014,10 @@ impl HirDisplay for Ty {
|
|||
|
||||
impl TraitRef {
|
||||
fn hir_fmt_ext(&self, f: &mut HirFormatter<impl HirDatabase>, use_as: bool) -> fmt::Result {
|
||||
if f.should_truncate() {
|
||||
return write!(f, "…");
|
||||
}
|
||||
|
||||
self.substs[0].hir_fmt(f)?;
|
||||
if use_as {
|
||||
write!(f, " as ")?;
|
||||
|
@ -1031,6 +1048,10 @@ impl HirDisplay for &GenericPredicate {
|
|||
|
||||
impl HirDisplay for GenericPredicate {
|
||||
fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result {
|
||||
if f.should_truncate() {
|
||||
return write!(f, "…");
|
||||
}
|
||||
|
||||
match self {
|
||||
GenericPredicate::Implemented(trait_ref) => trait_ref.hir_fmt(f)?,
|
||||
GenericPredicate::Projection(projection_pred) => {
|
||||
|
|
|
@ -7,15 +7,30 @@ use crate::db::HirDatabase;
|
|||
pub struct HirFormatter<'a, 'b, DB> {
|
||||
pub db: &'a DB,
|
||||
fmt: &'a mut fmt::Formatter<'b>,
|
||||
buf: String,
|
||||
curr_size: usize,
|
||||
max_size: Option<usize>,
|
||||
}
|
||||
|
||||
pub trait HirDisplay {
|
||||
fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result;
|
||||
|
||||
fn display<'a, DB>(&'a self, db: &'a DB) -> HirDisplayWrapper<'a, DB, Self>
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
HirDisplayWrapper(db, self)
|
||||
HirDisplayWrapper(db, self, None)
|
||||
}
|
||||
|
||||
fn display_truncated<'a, DB>(
|
||||
&'a self,
|
||||
db: &'a DB,
|
||||
max_size: Option<usize>,
|
||||
) -> HirDisplayWrapper<'a, DB, Self>
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
HirDisplayWrapper(db, self, max_size)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -41,11 +56,25 @@ where
|
|||
|
||||
/// This allows using the `write!` macro directly with a `HirFormatter`.
|
||||
pub fn write_fmt(&mut self, args: fmt::Arguments) -> fmt::Result {
|
||||
fmt::write(self.fmt, args)
|
||||
// We write to a buffer first to track output size
|
||||
self.buf.clear();
|
||||
fmt::write(&mut self.buf, args)?;
|
||||
self.curr_size += self.buf.len();
|
||||
|
||||
// Then we write to the internal formatter from the buffer
|
||||
self.fmt.write_str(&self.buf)
|
||||
}
|
||||
|
||||
pub fn should_truncate(&self) -> bool {
|
||||
if let Some(max_size) = self.max_size {
|
||||
self.curr_size >= max_size
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct HirDisplayWrapper<'a, DB, T>(&'a DB, &'a T);
|
||||
pub struct HirDisplayWrapper<'a, DB, T>(&'a DB, &'a T, Option<usize>);
|
||||
|
||||
impl<'a, DB, T> fmt::Display for HirDisplayWrapper<'a, DB, T>
|
||||
where
|
||||
|
@ -53,6 +82,12 @@ where
|
|||
T: HirDisplay,
|
||||
{
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
self.1.hir_fmt(&mut HirFormatter { db: self.0, fmt: f })
|
||||
self.1.hir_fmt(&mut HirFormatter {
|
||||
db: self.0,
|
||||
fmt: f,
|
||||
buf: String::with_capacity(20),
|
||||
curr_size: 0,
|
||||
max_size: self.2,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -37,14 +37,13 @@ use super::{
|
|||
TypeCtor, TypeWalk, Uncertain,
|
||||
};
|
||||
use crate::{
|
||||
adt::VariantDef,
|
||||
code_model::TypeAlias,
|
||||
db::HirDatabase,
|
||||
expr::{BindingAnnotation, Body, ExprId, PatId},
|
||||
resolve::{Resolver, TypeNs},
|
||||
resolve::{HasResolver, Resolver, TypeNs},
|
||||
ty::infer::diagnostics::InferenceDiagnostic,
|
||||
Adt, AssocItem, ConstData, DefWithBody, FloatTy, FnData, Function, HasBody, IntTy, Path,
|
||||
StructField,
|
||||
StructField, VariantDef,
|
||||
};
|
||||
|
||||
macro_rules! ty_app {
|
||||
|
|
|
@ -19,18 +19,17 @@ use super::{
|
|||
TypeWalk,
|
||||
};
|
||||
use crate::{
|
||||
adt::VariantDef,
|
||||
db::HirDatabase,
|
||||
generics::HasGenericParams,
|
||||
generics::{GenericDef, WherePredicate},
|
||||
resolve::{Resolver, TypeNs},
|
||||
resolve::{HasResolver, Resolver, TypeNs},
|
||||
ty::{
|
||||
primitive::{FloatTy, IntTy, Uncertain},
|
||||
Adt,
|
||||
},
|
||||
util::make_mut_slice,
|
||||
Const, Enum, EnumVariant, Function, ModuleDef, Path, Static, Struct, StructField, Trait,
|
||||
TypeAlias, Union,
|
||||
TypeAlias, Union, VariantDef,
|
||||
};
|
||||
|
||||
// FIXME: this is only really used in `type_for_def`, which contains a bunch of
|
||||
|
@ -611,9 +610,7 @@ pub(crate) fn generic_defaults_query(db: &impl HirDatabase, def: GenericDef) ->
|
|||
let defaults = generic_params
|
||||
.params_including_parent()
|
||||
.into_iter()
|
||||
.map(|p| {
|
||||
p.default.as_ref().map_or(Ty::Unknown, |path| Ty::from_hir_path(db, &resolver, path))
|
||||
})
|
||||
.map(|p| p.default.as_ref().map_or(Ty::Unknown, |t| Ty::from_hir(db, &resolver, t)))
|
||||
.collect();
|
||||
|
||||
Substs(defaults)
|
||||
|
|
|
@ -232,8 +232,8 @@ fn iterate_trait_method_candidates<T>(
|
|||
// trait, but if we find out it doesn't, we'll skip the rest of the
|
||||
// iteration
|
||||
let mut known_implemented = false;
|
||||
for &item in data.items() {
|
||||
if !is_valid_candidate(db, name, mode, item) {
|
||||
for &item in data.items.iter() {
|
||||
if !is_valid_candidate(db, name, mode, item.into()) {
|
||||
continue;
|
||||
}
|
||||
if !known_implemented {
|
||||
|
@ -243,7 +243,7 @@ fn iterate_trait_method_candidates<T>(
|
|||
}
|
||||
}
|
||||
known_implemented = true;
|
||||
if let Some(result) = callback(&ty.value, item) {
|
||||
if let Some(result) = callback(&ty.value, item.into()) {
|
||||
return Some(result);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -11,6 +11,7 @@ use ra_syntax::{
|
|||
ast::{self, AstNode},
|
||||
SyntaxKind::*,
|
||||
};
|
||||
use rustc_hash::FxHashSet;
|
||||
use test_utils::covers;
|
||||
|
||||
use crate::{
|
||||
|
@ -1979,6 +1980,30 @@ fn test() {
|
|||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn infer_associated_method_generics_with_default_tuple_param() {
|
||||
let t = type_at(
|
||||
r#"
|
||||
//- /main.rs
|
||||
struct Gen<T=()> {
|
||||
val: T
|
||||
}
|
||||
|
||||
impl<T> Gen<T> {
|
||||
pub fn make() -> Gen<T> {
|
||||
loop { }
|
||||
}
|
||||
}
|
||||
|
||||
fn test() {
|
||||
let a = Gen::make();
|
||||
a.val<|>;
|
||||
}
|
||||
"#,
|
||||
);
|
||||
assert_eq!(t, "()");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn infer_associated_method_generics_without_args() {
|
||||
assert_snapshot!(
|
||||
|
@ -2494,7 +2519,6 @@ fn test() {
|
|||
[167; 179) 'GLOBAL_CONST': u32
|
||||
[189; 191) 'id': u32
|
||||
[194; 210) 'Foo::A..._CONST': u32
|
||||
[126; 128) '99': u32
|
||||
"###
|
||||
);
|
||||
}
|
||||
|
@ -4694,14 +4718,16 @@ fn infer(content: &str) -> String {
|
|||
}
|
||||
|
||||
// sort ranges for consistency
|
||||
types.sort_by_key(|(src_ptr, _)| (src_ptr.ast.range().start(), src_ptr.ast.range().end()));
|
||||
types.sort_by_key(|(src_ptr, _)| {
|
||||
(src_ptr.value.range().start(), src_ptr.value.range().end())
|
||||
});
|
||||
for (src_ptr, ty) in &types {
|
||||
let node = src_ptr.ast.to_node(&src_ptr.file_syntax(&db));
|
||||
let node = src_ptr.value.to_node(&src_ptr.file_syntax(&db));
|
||||
|
||||
let (range, text) = if let Some(self_param) = ast::SelfParam::cast(node.clone()) {
|
||||
(self_param.self_kw_token().text_range(), "self".to_string())
|
||||
} else {
|
||||
(src_ptr.ast.range(), node.text().to_string().replace("\n", " "))
|
||||
(src_ptr.value.range(), node.text().to_string().replace("\n", " "))
|
||||
};
|
||||
let macro_prefix = if src_ptr.file_id != file_id.into() { "!" } else { "" };
|
||||
write!(
|
||||
|
@ -4716,10 +4742,13 @@ fn infer(content: &str) -> String {
|
|||
}
|
||||
};
|
||||
|
||||
let mut analyzed = FxHashSet::default();
|
||||
for node in source_file.syntax().descendants() {
|
||||
if node.kind() == FN_DEF || node.kind() == CONST_DEF || node.kind() == STATIC_DEF {
|
||||
let analyzer = SourceAnalyzer::new(&db, Source::new(file_id.into(), &node), None);
|
||||
infer_def(analyzer.inference_result(), analyzer.body_source_map());
|
||||
if analyzed.insert(analyzer.analyzed_declaration()) {
|
||||
infer_def(analyzer.inference_result(), analyzer.body_source_map());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -23,7 +23,7 @@ impl TypeAliasData {
|
|||
db: &(impl DefDatabase + AstDatabase),
|
||||
typ: TypeAlias,
|
||||
) -> Arc<TypeAliasData> {
|
||||
let node = typ.source(db).ast;
|
||||
let node = typ.source(db).value;
|
||||
let name = node.name().map_or_else(Name::missing, |n| n.as_name());
|
||||
let type_ref = node.type_ref().map(TypeRef::from_ast);
|
||||
Arc::new(TypeAliasData { name, type_ref })
|
||||
|
|
|
@ -54,8 +54,8 @@ impl StructData {
|
|||
id: StructOrUnionId,
|
||||
) -> Arc<StructData> {
|
||||
let src = id.source(db);
|
||||
let name = src.ast.name().map(|n| n.as_name());
|
||||
let variant_data = VariantData::new(src.ast.kind());
|
||||
let name = src.value.name().map(|n| n.as_name());
|
||||
let variant_data = VariantData::new(src.value.kind());
|
||||
let variant_data = Arc::new(variant_data);
|
||||
Arc::new(StructData { name, variant_data })
|
||||
}
|
||||
|
@ -64,9 +64,9 @@ impl StructData {
|
|||
impl EnumData {
|
||||
pub(crate) fn enum_data_query(db: &impl DefDatabase2, e: EnumId) -> Arc<EnumData> {
|
||||
let src = e.source(db);
|
||||
let name = src.ast.name().map(|n| n.as_name());
|
||||
let name = src.value.name().map(|n| n.as_name());
|
||||
let variants = src
|
||||
.ast
|
||||
.value
|
||||
.variant_list()
|
||||
.into_iter()
|
||||
.flat_map(|it| it.variants())
|
||||
|
|
|
@ -17,7 +17,7 @@ use crate::{
|
|||
expr::{Expr, ExprId, Pat, PatId},
|
||||
nameres::CrateDefMap,
|
||||
path::Path,
|
||||
AstItemDef, DefWithBodyId, ModuleId,
|
||||
AstItemDef, DefWithBodyId, HasModule, HasSource, Lookup, ModuleId,
|
||||
};
|
||||
|
||||
pub struct Expander {
|
||||
|
@ -73,8 +73,8 @@ impl Expander {
|
|||
std::mem::forget(mark);
|
||||
}
|
||||
|
||||
fn to_source<T>(&self, ast: T) -> Source<T> {
|
||||
Source { file_id: self.current_file_id, ast }
|
||||
fn to_source<T>(&self, value: T) -> Source<T> {
|
||||
Source { file_id: self.current_file_id, value }
|
||||
}
|
||||
|
||||
fn parse_path(&mut self, path: ast::Path) -> Option<Path> {
|
||||
|
@ -149,17 +149,19 @@ impl Body {
|
|||
|
||||
let (file_id, module, body) = match def {
|
||||
DefWithBodyId::FunctionId(f) => {
|
||||
let f = f.lookup(db);
|
||||
let src = f.source(db);
|
||||
params = src.ast.param_list();
|
||||
(src.file_id, f.module(db), src.ast.body().map(ast::Expr::from))
|
||||
params = src.value.param_list();
|
||||
(src.file_id, f.module(db), src.value.body().map(ast::Expr::from))
|
||||
}
|
||||
DefWithBodyId::ConstId(c) => {
|
||||
let c = c.lookup(db);
|
||||
let src = c.source(db);
|
||||
(src.file_id, c.module(db), src.ast.body())
|
||||
(src.file_id, c.module(db), src.value.body())
|
||||
}
|
||||
DefWithBodyId::StaticId(s) => {
|
||||
let src = s.source(db);
|
||||
(src.file_id, s.module(db), src.ast.body())
|
||||
(src.file_id, s.module(db), src.value.body())
|
||||
}
|
||||
};
|
||||
let expander = Expander::new(db, file_id, module);
|
||||
|
|
|
@ -210,8 +210,9 @@ mod tests {
|
|||
let scopes = db.expr_scopes(function.into());
|
||||
let (_body, source_map) = db.body_with_source_map(function.into());
|
||||
|
||||
let expr_id =
|
||||
source_map.node_expr(Source { file_id: file_id.into(), ast: &marker.into() }).unwrap();
|
||||
let expr_id = source_map
|
||||
.node_expr(Source { file_id: file_id.into(), value: &marker.into() })
|
||||
.unwrap();
|
||||
let scope = scopes.scope_for(expr_id);
|
||||
|
||||
let actual = scopes
|
||||
|
@ -317,14 +318,14 @@ mod tests {
|
|||
let expr_scope = {
|
||||
let expr_ast = name_ref.syntax().ancestors().find_map(ast::Expr::cast).unwrap();
|
||||
let expr_id =
|
||||
source_map.node_expr(Source { file_id: file_id.into(), ast: &expr_ast }).unwrap();
|
||||
source_map.node_expr(Source { file_id: file_id.into(), value: &expr_ast }).unwrap();
|
||||
scopes.scope_for(expr_id).unwrap()
|
||||
};
|
||||
|
||||
let resolved = scopes.resolve_name_in_scope(expr_scope, &name_ref.as_name()).unwrap();
|
||||
let pat_src = source_map.pat_syntax(resolved.pat()).unwrap();
|
||||
|
||||
let local_name = pat_src.ast.either(|it| it.syntax_node_ptr(), |it| it.syntax_node_ptr());
|
||||
let local_name = pat_src.value.either(|it| it.syntax_node_ptr(), |it| it.syntax_node_ptr());
|
||||
assert_eq!(local_name.range(), expected_name.syntax().text_range());
|
||||
}
|
||||
|
||||
|
|
|
@ -8,30 +8,32 @@ use ra_syntax::ast;
|
|||
use crate::{
|
||||
adt::{EnumData, StructData},
|
||||
body::{scope::ExprScopes, Body, BodySourceMap},
|
||||
imp::ImplData,
|
||||
generics::GenericParams,
|
||||
impls::ImplData,
|
||||
nameres::{
|
||||
raw::{ImportSourceMap, RawItems},
|
||||
CrateDefMap,
|
||||
},
|
||||
DefWithBodyId, EnumId, ImplId, ItemLoc, StructOrUnionId,
|
||||
traits::TraitData,
|
||||
DefWithBodyId, EnumId, GenericDefId, ImplId, ItemLoc, StructOrUnionId, TraitId,
|
||||
};
|
||||
|
||||
#[salsa::query_group(InternDatabaseStorage)]
|
||||
pub trait InternDatabase: SourceDatabase {
|
||||
#[salsa::interned]
|
||||
fn intern_function(&self, loc: ItemLoc<ast::FnDef>) -> crate::FunctionId;
|
||||
fn intern_function(&self, loc: crate::FunctionLoc) -> crate::FunctionId;
|
||||
#[salsa::interned]
|
||||
fn intern_struct_or_union(&self, loc: ItemLoc<ast::StructDef>) -> crate::StructOrUnionId;
|
||||
#[salsa::interned]
|
||||
fn intern_enum(&self, loc: ItemLoc<ast::EnumDef>) -> crate::EnumId;
|
||||
#[salsa::interned]
|
||||
fn intern_const(&self, loc: ItemLoc<ast::ConstDef>) -> crate::ConstId;
|
||||
fn intern_const(&self, loc: crate::ConstLoc) -> crate::ConstId;
|
||||
#[salsa::interned]
|
||||
fn intern_static(&self, loc: ItemLoc<ast::StaticDef>) -> crate::StaticId;
|
||||
#[salsa::interned]
|
||||
fn intern_trait(&self, loc: ItemLoc<ast::TraitDef>) -> crate::TraitId;
|
||||
#[salsa::interned]
|
||||
fn intern_type_alias(&self, loc: ItemLoc<ast::TypeAliasDef>) -> crate::TypeAliasId;
|
||||
fn intern_type_alias(&self, loc: crate::TypeAliasLoc) -> crate::TypeAliasId;
|
||||
#[salsa::interned]
|
||||
fn intern_impl(&self, loc: ItemLoc<ast::ImplBlock>) -> crate::ImplId;
|
||||
}
|
||||
|
@ -59,6 +61,9 @@ pub trait DefDatabase2: InternDatabase + AstDatabase {
|
|||
#[salsa::invoke(ImplData::impl_data_query)]
|
||||
fn impl_data(&self, e: ImplId) -> Arc<ImplData>;
|
||||
|
||||
#[salsa::invoke(TraitData::trait_data_query)]
|
||||
fn trait_data(&self, e: TraitId) -> Arc<TraitData>;
|
||||
|
||||
#[salsa::invoke(Body::body_with_source_map_query)]
|
||||
fn body_with_source_map(&self, def: DefWithBodyId) -> (Arc<Body>, Arc<BodySourceMap>);
|
||||
|
||||
|
@ -67,4 +72,7 @@ pub trait DefDatabase2: InternDatabase + AstDatabase {
|
|||
|
||||
#[salsa::invoke(ExprScopes::expr_scopes_query)]
|
||||
fn expr_scopes(&self, def: DefWithBodyId) -> Arc<ExprScopes>;
|
||||
|
||||
#[salsa::invoke(GenericParams::generic_params_query)]
|
||||
fn generic_params(&self, def: GenericDefId) -> Arc<GenericParams>;
|
||||
}
|
||||
|
|
|
@ -20,7 +20,7 @@ impl Diagnostic for UnresolvedModule {
|
|||
"unresolved module".to_string()
|
||||
}
|
||||
fn source(&self) -> Source<SyntaxNodePtr> {
|
||||
Source { file_id: self.file, ast: self.decl.into() }
|
||||
Source { file_id: self.file, value: self.decl.into() }
|
||||
}
|
||||
fn as_any(&self) -> &(dyn Any + Send + 'static) {
|
||||
self
|
||||
|
|
185
crates/ra_hir_def/src/generics.rs
Normal file
185
crates/ra_hir_def/src/generics.rs
Normal file
|
@ -0,0 +1,185 @@
|
|||
//! Many kinds of items or constructs can have generic parameters: functions,
|
||||
//! structs, impls, traits, etc. This module provides a common HIR for these
|
||||
//! generic parameters. See also the `Generics` type and the `generics_of` query
|
||||
//! in rustc.
|
||||
use std::sync::Arc;
|
||||
|
||||
use hir_expand::name::{self, AsName, Name};
|
||||
use ra_syntax::ast::{self, NameOwner, TypeBoundsOwner, TypeParamsOwner};
|
||||
|
||||
use crate::{
|
||||
db::DefDatabase2,
|
||||
type_ref::{TypeBound, TypeRef},
|
||||
AdtId, AstItemDef, ContainerId, GenericDefId, HasSource, Lookup,
|
||||
};
|
||||
|
||||
/// Data about a generic parameter (to a function, struct, impl, ...).
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub struct GenericParam {
|
||||
// FIXME: give generic params proper IDs
|
||||
pub idx: u32,
|
||||
pub name: Name,
|
||||
pub default: Option<TypeRef>,
|
||||
}
|
||||
|
||||
/// Data about the generic parameters of a function, struct, impl, etc.
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub struct GenericParams {
|
||||
pub parent_params: Option<Arc<GenericParams>>,
|
||||
pub params: Vec<GenericParam>,
|
||||
pub where_predicates: Vec<WherePredicate>,
|
||||
}
|
||||
|
||||
/// A single predicate from a where clause, i.e. `where Type: Trait`. Combined
|
||||
/// where clauses like `where T: Foo + Bar` are turned into multiple of these.
|
||||
/// It might still result in multiple actual predicates though, because of
|
||||
/// associated type bindings like `Iterator<Item = u32>`.
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub struct WherePredicate {
|
||||
pub type_ref: TypeRef,
|
||||
pub bound: TypeBound,
|
||||
}
|
||||
|
||||
impl GenericParams {
|
||||
pub(crate) fn generic_params_query(
|
||||
db: &impl DefDatabase2,
|
||||
def: GenericDefId,
|
||||
) -> Arc<GenericParams> {
|
||||
let parent_generics = parent_generic_def(db, def).map(|it| db.generic_params(it));
|
||||
Arc::new(GenericParams::new(db, def.into(), parent_generics))
|
||||
}
|
||||
|
||||
fn new(
|
||||
db: &impl DefDatabase2,
|
||||
def: GenericDefId,
|
||||
parent_params: Option<Arc<GenericParams>>,
|
||||
) -> GenericParams {
|
||||
let mut generics =
|
||||
GenericParams { params: Vec::new(), parent_params, where_predicates: Vec::new() };
|
||||
let start = generics.parent_params.as_ref().map(|p| p.params.len()).unwrap_or(0) as u32;
|
||||
// FIXME: add `: Sized` bound for everything except for `Self` in traits
|
||||
match def {
|
||||
GenericDefId::FunctionId(it) => generics.fill(&it.lookup(db).source(db).value, start),
|
||||
GenericDefId::AdtId(AdtId::StructId(it)) => {
|
||||
generics.fill(&it.0.source(db).value, start)
|
||||
}
|
||||
GenericDefId::AdtId(AdtId::UnionId(it)) => generics.fill(&it.0.source(db).value, start),
|
||||
GenericDefId::AdtId(AdtId::EnumId(it)) => generics.fill(&it.source(db).value, start),
|
||||
GenericDefId::TraitId(it) => {
|
||||
// traits get the Self type as an implicit first type parameter
|
||||
generics.params.push(GenericParam {
|
||||
idx: start,
|
||||
name: name::SELF_TYPE,
|
||||
default: None,
|
||||
});
|
||||
generics.fill(&it.source(db).value, start + 1);
|
||||
// add super traits as bounds on Self
|
||||
// i.e., trait Foo: Bar is equivalent to trait Foo where Self: Bar
|
||||
let self_param = TypeRef::Path(name::SELF_TYPE.into());
|
||||
generics.fill_bounds(&it.source(db).value, self_param);
|
||||
}
|
||||
GenericDefId::TypeAliasId(it) => generics.fill(&it.lookup(db).source(db).value, start),
|
||||
// Note that we don't add `Self` here: in `impl`s, `Self` is not a
|
||||
// type-parameter, but rather is a type-alias for impl's target
|
||||
// type, so this is handled by the resolver.
|
||||
GenericDefId::ImplId(it) => generics.fill(&it.source(db).value, start),
|
||||
GenericDefId::EnumVariantId(_) | GenericDefId::ConstId(_) => {}
|
||||
}
|
||||
|
||||
generics
|
||||
}
|
||||
|
||||
fn fill(&mut self, node: &impl TypeParamsOwner, start: u32) {
|
||||
if let Some(params) = node.type_param_list() {
|
||||
self.fill_params(params, start)
|
||||
}
|
||||
if let Some(where_clause) = node.where_clause() {
|
||||
self.fill_where_predicates(where_clause);
|
||||
}
|
||||
}
|
||||
|
||||
fn fill_bounds(&mut self, node: &impl ast::TypeBoundsOwner, type_ref: TypeRef) {
|
||||
for bound in
|
||||
node.type_bound_list().iter().flat_map(|type_bound_list| type_bound_list.bounds())
|
||||
{
|
||||
self.add_where_predicate_from_bound(bound, type_ref.clone());
|
||||
}
|
||||
}
|
||||
|
||||
fn fill_params(&mut self, params: ast::TypeParamList, start: u32) {
|
||||
for (idx, type_param) in params.type_params().enumerate() {
|
||||
let name = type_param.name().map_or_else(Name::missing, |it| it.as_name());
|
||||
// FIXME: Use `Path::from_src`
|
||||
let default = type_param.default_type().map(TypeRef::from_ast);
|
||||
let param = GenericParam { idx: idx as u32 + start, name: name.clone(), default };
|
||||
self.params.push(param);
|
||||
|
||||
let type_ref = TypeRef::Path(name.into());
|
||||
self.fill_bounds(&type_param, type_ref);
|
||||
}
|
||||
}
|
||||
|
||||
fn fill_where_predicates(&mut self, where_clause: ast::WhereClause) {
|
||||
for pred in where_clause.predicates() {
|
||||
let type_ref = match pred.type_ref() {
|
||||
Some(type_ref) => type_ref,
|
||||
None => continue,
|
||||
};
|
||||
let type_ref = TypeRef::from_ast(type_ref);
|
||||
for bound in pred.type_bound_list().iter().flat_map(|l| l.bounds()) {
|
||||
self.add_where_predicate_from_bound(bound, type_ref.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn add_where_predicate_from_bound(&mut self, bound: ast::TypeBound, type_ref: TypeRef) {
|
||||
if bound.has_question_mark() {
|
||||
// FIXME: remove this bound
|
||||
return;
|
||||
}
|
||||
let bound = TypeBound::from_ast(bound);
|
||||
self.where_predicates.push(WherePredicate { type_ref, bound });
|
||||
}
|
||||
|
||||
pub fn find_by_name(&self, name: &Name) -> Option<&GenericParam> {
|
||||
self.params.iter().find(|p| &p.name == name)
|
||||
}
|
||||
|
||||
pub fn count_parent_params(&self) -> usize {
|
||||
self.parent_params.as_ref().map(|p| p.count_params_including_parent()).unwrap_or(0)
|
||||
}
|
||||
|
||||
pub fn count_params_including_parent(&self) -> usize {
|
||||
let parent_count = self.count_parent_params();
|
||||
parent_count + self.params.len()
|
||||
}
|
||||
|
||||
fn for_each_param<'a>(&'a self, f: &mut impl FnMut(&'a GenericParam)) {
|
||||
if let Some(parent) = &self.parent_params {
|
||||
parent.for_each_param(f);
|
||||
}
|
||||
self.params.iter().for_each(f);
|
||||
}
|
||||
|
||||
pub fn params_including_parent(&self) -> Vec<&GenericParam> {
|
||||
let mut vec = Vec::with_capacity(self.count_params_including_parent());
|
||||
self.for_each_param(&mut |p| vec.push(p));
|
||||
vec
|
||||
}
|
||||
}
|
||||
|
||||
fn parent_generic_def(db: &impl DefDatabase2, def: GenericDefId) -> Option<GenericDefId> {
|
||||
let container = match def {
|
||||
GenericDefId::FunctionId(it) => it.lookup(db).container,
|
||||
GenericDefId::TypeAliasId(it) => it.lookup(db).container,
|
||||
GenericDefId::ConstId(it) => it.lookup(db).container,
|
||||
GenericDefId::EnumVariantId(it) => return Some(it.parent.into()),
|
||||
GenericDefId::AdtId(_) | GenericDefId::TraitId(_) | GenericDefId::ImplId(_) => return None,
|
||||
};
|
||||
|
||||
match container {
|
||||
ContainerId::ImplId(it) => Some(it.into()),
|
||||
ContainerId::TraitId(it) => Some(it.into()),
|
||||
ContainerId::ModuleId(_) => None,
|
||||
}
|
||||
}
|
|
@ -5,11 +5,12 @@
|
|||
|
||||
use std::sync::Arc;
|
||||
|
||||
use hir_expand::AstId;
|
||||
use ra_syntax::ast;
|
||||
|
||||
use crate::{
|
||||
db::DefDatabase2, type_ref::TypeRef, AssocItemId, AstItemDef, ConstId, FunctionId, ImplId,
|
||||
LocationCtx, TypeAliasId,
|
||||
db::DefDatabase2, type_ref::TypeRef, AssocItemId, AstItemDef, ConstLoc, ContainerId,
|
||||
FunctionLoc, ImplId, Intern, TypeAliasLoc,
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
|
@ -25,23 +26,37 @@ impl ImplData {
|
|||
let src = id.source(db);
|
||||
let items = db.ast_id_map(src.file_id);
|
||||
|
||||
let target_trait = src.ast.target_trait().map(TypeRef::from_ast);
|
||||
let target_type = TypeRef::from_ast_opt(src.ast.target_type());
|
||||
let negative = src.ast.is_negative();
|
||||
let target_trait = src.value.target_trait().map(TypeRef::from_ast);
|
||||
let target_type = TypeRef::from_ast_opt(src.value.target_type());
|
||||
let negative = src.value.is_negative();
|
||||
|
||||
let items = if let Some(item_list) = src.ast.item_list() {
|
||||
let ctx = LocationCtx::new(db, id.module(db), src.file_id);
|
||||
let items = if let Some(item_list) = src.value.item_list() {
|
||||
item_list
|
||||
.impl_items()
|
||||
.map(|item_node| match item_node {
|
||||
ast::ImplItem::FnDef(it) => {
|
||||
FunctionId::from_ast_id(ctx, items.ast_id(&it)).into()
|
||||
let def = FunctionLoc {
|
||||
container: ContainerId::ImplId(id),
|
||||
ast_id: AstId::new(src.file_id, items.ast_id(&it)),
|
||||
}
|
||||
.intern(db);
|
||||
def.into()
|
||||
}
|
||||
ast::ImplItem::ConstDef(it) => {
|
||||
ConstId::from_ast_id(ctx, items.ast_id(&it)).into()
|
||||
let def = ConstLoc {
|
||||
container: ContainerId::ImplId(id),
|
||||
ast_id: AstId::new(src.file_id, items.ast_id(&it)),
|
||||
}
|
||||
.intern(db);
|
||||
def.into()
|
||||
}
|
||||
ast::ImplItem::TypeAliasDef(it) => {
|
||||
TypeAliasId::from_ast_id(ctx, items.ast_id(&it)).into()
|
||||
let def = TypeAliasLoc {
|
||||
container: ContainerId::ImplId(id),
|
||||
ast_id: AstId::new(src.file_id, items.ast_id(&it)),
|
||||
}
|
||||
.intern(db);
|
||||
def.into()
|
||||
}
|
||||
})
|
||||
.collect()
|
|
@ -13,10 +13,12 @@ pub mod path;
|
|||
pub mod type_ref;
|
||||
pub mod builtin_type;
|
||||
pub mod adt;
|
||||
pub mod imp;
|
||||
pub mod impls;
|
||||
pub mod diagnostics;
|
||||
pub mod expr;
|
||||
pub mod body;
|
||||
pub mod generics;
|
||||
pub mod traits;
|
||||
|
||||
#[cfg(test)]
|
||||
mod test_db;
|
||||
|
@ -80,7 +82,7 @@ impl ModuleSource {
|
|||
|
||||
pub fn from_child_node(db: &impl db::DefDatabase2, child: Source<&SyntaxNode>) -> ModuleSource {
|
||||
if let Some(m) =
|
||||
child.ast.ancestors().filter_map(ast::Module::cast).find(|it| !it.has_semi())
|
||||
child.value.ancestors().filter_map(ast::Module::cast).find(|it| !it.has_semi())
|
||||
{
|
||||
ModuleSource::Module(m)
|
||||
} else {
|
||||
|
@ -184,8 +186,8 @@ pub trait AstItemDef<N: AstNode>: salsa::InternKey + Clone {
|
|||
}
|
||||
fn source(self, db: &(impl AstDatabase + InternDatabase)) -> Source<N> {
|
||||
let loc = self.lookup_intern(db);
|
||||
let ast = loc.ast_id.to_node(db);
|
||||
Source { file_id: loc.ast_id.file_id(), ast }
|
||||
let value = loc.ast_id.to_node(db);
|
||||
Source { file_id: loc.ast_id.file_id(), value }
|
||||
}
|
||||
fn module(self, db: &impl InternDatabase) -> ModuleId {
|
||||
let loc = self.lookup_intern(db);
|
||||
|
@ -197,12 +199,23 @@ pub trait AstItemDef<N: AstNode>: salsa::InternKey + Clone {
|
|||
pub struct FunctionId(salsa::InternId);
|
||||
impl_intern_key!(FunctionId);
|
||||
|
||||
impl AstItemDef<ast::FnDef> for FunctionId {
|
||||
fn intern(db: &impl InternDatabase, loc: ItemLoc<ast::FnDef>) -> Self {
|
||||
db.intern_function(loc)
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct FunctionLoc {
|
||||
pub container: ContainerId,
|
||||
pub ast_id: AstId<ast::FnDef>,
|
||||
}
|
||||
|
||||
impl Intern for FunctionLoc {
|
||||
type ID = FunctionId;
|
||||
fn intern(self, db: &impl db::DefDatabase2) -> FunctionId {
|
||||
db.intern_function(self)
|
||||
}
|
||||
fn lookup_intern(self, db: &impl InternDatabase) -> ItemLoc<ast::FnDef> {
|
||||
db.lookup_intern_function(self)
|
||||
}
|
||||
|
||||
impl Lookup for FunctionId {
|
||||
type Data = FunctionLoc;
|
||||
fn lookup(&self, db: &impl db::DefDatabase2) -> FunctionLoc {
|
||||
db.lookup_intern_function(*self)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -276,12 +289,23 @@ impl_arena_id!(LocalStructFieldId);
|
|||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct ConstId(salsa::InternId);
|
||||
impl_intern_key!(ConstId);
|
||||
impl AstItemDef<ast::ConstDef> for ConstId {
|
||||
fn intern(db: &impl InternDatabase, loc: ItemLoc<ast::ConstDef>) -> Self {
|
||||
db.intern_const(loc)
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct ConstLoc {
|
||||
pub container: ContainerId,
|
||||
pub ast_id: AstId<ast::ConstDef>,
|
||||
}
|
||||
|
||||
impl Intern for ConstLoc {
|
||||
type ID = ConstId;
|
||||
fn intern(self, db: &impl db::DefDatabase2) -> ConstId {
|
||||
db.intern_const(self)
|
||||
}
|
||||
fn lookup_intern(self, db: &impl InternDatabase) -> ItemLoc<ast::ConstDef> {
|
||||
db.lookup_intern_const(self)
|
||||
}
|
||||
|
||||
impl Lookup for ConstId {
|
||||
type Data = ConstLoc;
|
||||
fn lookup(&self, db: &impl db::DefDatabase2) -> ConstLoc {
|
||||
db.lookup_intern_const(*self)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -312,12 +336,24 @@ impl AstItemDef<ast::TraitDef> for TraitId {
|
|||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct TypeAliasId(salsa::InternId);
|
||||
impl_intern_key!(TypeAliasId);
|
||||
impl AstItemDef<ast::TypeAliasDef> for TypeAliasId {
|
||||
fn intern(db: &impl InternDatabase, loc: ItemLoc<ast::TypeAliasDef>) -> Self {
|
||||
db.intern_type_alias(loc)
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct TypeAliasLoc {
|
||||
pub container: ContainerId,
|
||||
pub ast_id: AstId<ast::TypeAliasDef>,
|
||||
}
|
||||
|
||||
impl Intern for TypeAliasLoc {
|
||||
type ID = TypeAliasId;
|
||||
fn intern(self, db: &impl db::DefDatabase2) -> TypeAliasId {
|
||||
db.intern_type_alias(self)
|
||||
}
|
||||
fn lookup_intern(self, db: &impl InternDatabase) -> ItemLoc<ast::TypeAliasDef> {
|
||||
db.lookup_intern_type_alias(self)
|
||||
}
|
||||
|
||||
impl Lookup for TypeAliasId {
|
||||
type Data = TypeAliasLoc;
|
||||
fn lookup(&self, db: &impl db::DefDatabase2) -> TypeAliasLoc {
|
||||
db.lookup_intern_type_alias(*self)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -352,6 +388,13 @@ macro_rules! impl_froms {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub enum ContainerId {
|
||||
ModuleId(ModuleId),
|
||||
ImplId(ImplId),
|
||||
TraitId(TraitId),
|
||||
}
|
||||
|
||||
/// A Data Type
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
|
||||
pub enum AdtId {
|
||||
|
@ -408,3 +451,102 @@ pub enum AssocItemId {
|
|||
// require not implementing From, and instead having some checked way of
|
||||
// casting them, and somehow making the constructors private, which would be annoying.
|
||||
impl_froms!(AssocItemId: FunctionId, ConstId, TypeAliasId);
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)]
|
||||
pub enum GenericDefId {
|
||||
FunctionId(FunctionId),
|
||||
AdtId(AdtId),
|
||||
TraitId(TraitId),
|
||||
TypeAliasId(TypeAliasId),
|
||||
ImplId(ImplId),
|
||||
// enum variants cannot have generics themselves, but their parent enums
|
||||
// can, and this makes some code easier to write
|
||||
EnumVariantId(EnumVariantId),
|
||||
// consts can have type parameters from their parents (i.e. associated consts of traits)
|
||||
ConstId(ConstId),
|
||||
}
|
||||
impl_froms!(
|
||||
GenericDefId: FunctionId,
|
||||
AdtId(StructId, EnumId, UnionId),
|
||||
TraitId,
|
||||
TypeAliasId,
|
||||
ImplId,
|
||||
EnumVariantId,
|
||||
ConstId
|
||||
);
|
||||
|
||||
trait Intern {
|
||||
type ID;
|
||||
fn intern(self, db: &impl db::DefDatabase2) -> Self::ID;
|
||||
}
|
||||
|
||||
pub trait Lookup {
|
||||
type Data;
|
||||
fn lookup(&self, db: &impl db::DefDatabase2) -> Self::Data;
|
||||
}
|
||||
|
||||
pub trait HasModule {
|
||||
fn module(&self, db: &impl db::DefDatabase2) -> ModuleId;
|
||||
}
|
||||
|
||||
impl HasModule for FunctionLoc {
|
||||
fn module(&self, db: &impl db::DefDatabase2) -> ModuleId {
|
||||
match self.container {
|
||||
ContainerId::ModuleId(it) => it,
|
||||
ContainerId::ImplId(it) => it.module(db),
|
||||
ContainerId::TraitId(it) => it.module(db),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl HasModule for TypeAliasLoc {
|
||||
fn module(&self, db: &impl db::DefDatabase2) -> ModuleId {
|
||||
match self.container {
|
||||
ContainerId::ModuleId(it) => it,
|
||||
ContainerId::ImplId(it) => it.module(db),
|
||||
ContainerId::TraitId(it) => it.module(db),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl HasModule for ConstLoc {
|
||||
fn module(&self, db: &impl db::DefDatabase2) -> ModuleId {
|
||||
match self.container {
|
||||
ContainerId::ModuleId(it) => it,
|
||||
ContainerId::ImplId(it) => it.module(db),
|
||||
ContainerId::TraitId(it) => it.module(db),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub trait HasSource {
|
||||
type Value;
|
||||
fn source(&self, db: &impl db::DefDatabase2) -> Source<Self::Value>;
|
||||
}
|
||||
|
||||
impl HasSource for FunctionLoc {
|
||||
type Value = ast::FnDef;
|
||||
|
||||
fn source(&self, db: &impl db::DefDatabase2) -> Source<ast::FnDef> {
|
||||
let node = self.ast_id.to_node(db);
|
||||
Source::new(self.ast_id.file_id(), node)
|
||||
}
|
||||
}
|
||||
|
||||
impl HasSource for TypeAliasLoc {
|
||||
type Value = ast::TypeAliasDef;
|
||||
|
||||
fn source(&self, db: &impl db::DefDatabase2) -> Source<ast::TypeAliasDef> {
|
||||
let node = self.ast_id.to_node(db);
|
||||
Source::new(self.ast_id.file_id(), node)
|
||||
}
|
||||
}
|
||||
|
||||
impl HasSource for ConstLoc {
|
||||
type Value = ast::ConstDef;
|
||||
|
||||
fn source(&self, db: &impl db::DefDatabase2) -> Source<ast::ConstDef> {
|
||||
let node = self.ast_id.to_node(db);
|
||||
Source::new(self.ast_id.file_id(), node)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -165,6 +165,14 @@ impl ModuleScope {
|
|||
self.items.iter().chain(BUILTIN_SCOPE.iter())
|
||||
}
|
||||
|
||||
pub fn declarations(&self) -> impl Iterator<Item = ModuleDefId> + '_ {
|
||||
self.entries()
|
||||
.filter_map(|(_name, res)| if res.import.is_none() { Some(res.def) } else { None })
|
||||
.flat_map(|per_ns| {
|
||||
per_ns.take_types().into_iter().chain(per_ns.take_values().into_iter())
|
||||
})
|
||||
}
|
||||
|
||||
/// Iterate over all module scoped macros
|
||||
pub fn macros<'a>(&'a self) -> impl Iterator<Item = (&'a Name, MacroDefId)> + 'a {
|
||||
self.items
|
||||
|
|
|
@ -19,9 +19,9 @@ use crate::{
|
|||
per_ns::PerNs, raw, CrateDefMap, ModuleData, Resolution, ResolveMode,
|
||||
},
|
||||
path::{Path, PathKind},
|
||||
AdtId, AstId, AstItemDef, ConstId, CrateModuleId, EnumId, EnumVariantId, FunctionId, ImplId,
|
||||
LocationCtx, ModuleDefId, ModuleId, StaticId, StructId, StructOrUnionId, TraitId, TypeAliasId,
|
||||
UnionId,
|
||||
AdtId, AstId, AstItemDef, ConstLoc, ContainerId, CrateModuleId, EnumId, EnumVariantId,
|
||||
FunctionLoc, ImplId, Intern, LocationCtx, ModuleDefId, ModuleId, StaticId, StructId,
|
||||
StructOrUnionId, TraitId, TypeAliasLoc, UnionId,
|
||||
};
|
||||
|
||||
pub(super) fn collect_defs(db: &impl DefDatabase2, mut def_map: CrateDefMap) -> CrateDefMap {
|
||||
|
@ -673,8 +673,13 @@ where
|
|||
let name = def.name.clone();
|
||||
let def: PerNs = match def.kind {
|
||||
raw::DefKind::Function(ast_id) => {
|
||||
let f = FunctionId::from_ast_id(ctx, ast_id);
|
||||
PerNs::values(f.into())
|
||||
let def = FunctionLoc {
|
||||
container: ContainerId::ModuleId(module),
|
||||
ast_id: AstId::new(self.file_id, ast_id),
|
||||
}
|
||||
.intern(self.def_collector.db);
|
||||
|
||||
PerNs::values(def.into())
|
||||
}
|
||||
raw::DefKind::Struct(ast_id) => {
|
||||
let id = StructOrUnionId::from_ast_id(ctx, ast_id).into();
|
||||
|
@ -687,13 +692,27 @@ where
|
|||
PerNs::both(u, u)
|
||||
}
|
||||
raw::DefKind::Enum(ast_id) => PerNs::types(EnumId::from_ast_id(ctx, ast_id).into()),
|
||||
raw::DefKind::Const(ast_id) => PerNs::values(ConstId::from_ast_id(ctx, ast_id).into()),
|
||||
raw::DefKind::Const(ast_id) => {
|
||||
let def = ConstLoc {
|
||||
container: ContainerId::ModuleId(module),
|
||||
ast_id: AstId::new(self.file_id, ast_id),
|
||||
}
|
||||
.intern(self.def_collector.db);
|
||||
|
||||
PerNs::values(def.into())
|
||||
}
|
||||
raw::DefKind::Static(ast_id) => {
|
||||
PerNs::values(StaticId::from_ast_id(ctx, ast_id).into())
|
||||
}
|
||||
raw::DefKind::Trait(ast_id) => PerNs::types(TraitId::from_ast_id(ctx, ast_id).into()),
|
||||
raw::DefKind::TypeAlias(ast_id) => {
|
||||
PerNs::types(TypeAliasId::from_ast_id(ctx, ast_id).into())
|
||||
let def = TypeAliasLoc {
|
||||
container: ContainerId::ModuleId(module),
|
||||
ast_id: AstId::new(self.file_id, ast_id),
|
||||
}
|
||||
.intern(self.def_collector.db);
|
||||
|
||||
PerNs::types(def.into())
|
||||
}
|
||||
};
|
||||
let resolution = Resolution { def, import: None };
|
||||
|
|
|
@ -327,7 +327,7 @@ impl RawItemsCollector {
|
|||
|
||||
let mut buf = Vec::new();
|
||||
Path::expand_use_item(
|
||||
Source { ast: use_item, file_id: self.file_id },
|
||||
Source { value: use_item, file_id: self.file_id },
|
||||
&self.hygiene,
|
||||
|path, use_tree, is_glob, alias| {
|
||||
let import_data = ImportData {
|
||||
|
|
|
@ -71,7 +71,7 @@ impl Path {
|
|||
hygiene: &Hygiene,
|
||||
mut cb: impl FnMut(Path, &ast::UseTree, bool, Option<Name>),
|
||||
) {
|
||||
if let Some(tree) = item_src.ast.use_tree() {
|
||||
if let Some(tree) = item_src.value.use_tree() {
|
||||
expand_use_tree(None, tree, hygiene, &mut cb);
|
||||
}
|
||||
}
|
||||
|
|
59
crates/ra_hir_def/src/traits.rs
Normal file
59
crates/ra_hir_def/src/traits.rs
Normal file
|
@ -0,0 +1,59 @@
|
|||
//! HIR for trait definitions.
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use hir_expand::{
|
||||
name::{AsName, Name},
|
||||
AstId,
|
||||
};
|
||||
|
||||
use ra_syntax::ast::{self, NameOwner};
|
||||
|
||||
use crate::{
|
||||
db::DefDatabase2, AssocItemId, AstItemDef, ConstLoc, ContainerId, FunctionLoc, Intern, TraitId,
|
||||
TypeAliasLoc,
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct TraitData {
|
||||
pub name: Option<Name>,
|
||||
pub items: Vec<AssocItemId>,
|
||||
pub auto: bool,
|
||||
}
|
||||
|
||||
impl TraitData {
|
||||
pub(crate) fn trait_data_query(db: &impl DefDatabase2, tr: TraitId) -> Arc<TraitData> {
|
||||
let src = tr.source(db);
|
||||
let name = src.value.name().map(|n| n.as_name());
|
||||
let auto = src.value.is_auto();
|
||||
let ast_id_map = db.ast_id_map(src.file_id);
|
||||
let items = if let Some(item_list) = src.value.item_list() {
|
||||
item_list
|
||||
.impl_items()
|
||||
.map(|item_node| match item_node {
|
||||
ast::ImplItem::FnDef(it) => FunctionLoc {
|
||||
container: ContainerId::TraitId(tr),
|
||||
ast_id: AstId::new(src.file_id, ast_id_map.ast_id(&it)),
|
||||
}
|
||||
.intern(db)
|
||||
.into(),
|
||||
ast::ImplItem::ConstDef(it) => ConstLoc {
|
||||
container: ContainerId::TraitId(tr),
|
||||
ast_id: AstId::new(src.file_id, ast_id_map.ast_id(&it)),
|
||||
}
|
||||
.intern(db)
|
||||
.into(),
|
||||
ast::ImplItem::TypeAliasDef(it) => TypeAliasLoc {
|
||||
container: ContainerId::TraitId(tr),
|
||||
ast_id: AstId::new(src.file_id, ast_id_map.ast_id(&it)),
|
||||
}
|
||||
.intern(db)
|
||||
.into(),
|
||||
})
|
||||
.collect()
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
Arc::new(TraitData { name, items, auto })
|
||||
}
|
||||
}
|
|
@ -24,7 +24,7 @@ pub trait Diagnostic: Any + Send + Sync + fmt::Debug + 'static {
|
|||
fn message(&self) -> String;
|
||||
fn source(&self) -> Source<SyntaxNodePtr>;
|
||||
fn highlight_range(&self) -> TextRange {
|
||||
self.source().ast.range()
|
||||
self.source().value.range()
|
||||
}
|
||||
fn as_any(&self) -> &(dyn Any + Send + 'static);
|
||||
}
|
||||
|
@ -37,7 +37,7 @@ pub trait AstDiagnostic {
|
|||
impl dyn Diagnostic {
|
||||
pub fn syntax_node(&self, db: &impl AstDatabase) -> SyntaxNode {
|
||||
let node = db.parse_or_expand(self.source().file_id).unwrap();
|
||||
self.source().ast.to_node(&node)
|
||||
self.source().value.to_node(&node)
|
||||
}
|
||||
|
||||
pub fn downcast_ref<D: Diagnostic>(&self) -> Option<&D> {
|
||||
|
|
|
@ -166,19 +166,19 @@ impl ExpansionInfo {
|
|||
pub fn map_token_down(&self, token: Source<&SyntaxToken>) -> Option<Source<SyntaxToken>> {
|
||||
assert_eq!(token.file_id, self.arg.file_id);
|
||||
let range =
|
||||
token.ast.text_range().checked_sub(self.arg.ast.syntax().text_range().start())?;
|
||||
token.value.text_range().checked_sub(self.arg.value.syntax().text_range().start())?;
|
||||
let token_id = self.macro_arg.1.token_by_range(range)?;
|
||||
let token_id = self.macro_def.0.map_id_down(token_id);
|
||||
|
||||
let range = self.exp_map.range_by_token(token_id)?;
|
||||
|
||||
let token = algo::find_covering_element(&self.expanded.ast, range).into_token()?;
|
||||
let token = algo::find_covering_element(&self.expanded.value, range).into_token()?;
|
||||
|
||||
Some(self.expanded.with_ast(token))
|
||||
Some(self.expanded.with_value(token))
|
||||
}
|
||||
|
||||
pub fn map_token_up(&self, token: Source<&SyntaxToken>) -> Option<Source<SyntaxToken>> {
|
||||
let token_id = self.exp_map.token_by_range(token.ast.text_range())?;
|
||||
let token_id = self.exp_map.token_by_range(token.value.text_range())?;
|
||||
|
||||
let (token_id, origin) = self.macro_def.0.map_id_up(token_id);
|
||||
let (token_map, tt) = match origin {
|
||||
|
@ -188,11 +188,11 @@ impl ExpansionInfo {
|
|||
|
||||
let range = token_map.range_by_token(token_id)?;
|
||||
let token = algo::find_covering_element(
|
||||
tt.ast.syntax(),
|
||||
range + tt.ast.syntax().text_range().start(),
|
||||
tt.value.syntax(),
|
||||
range + tt.value.syntax().text_range().start(),
|
||||
)
|
||||
.into_token()?;
|
||||
Some(tt.with_ast(token))
|
||||
Some(tt.with_value(token))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -240,30 +240,34 @@ impl<N: AstNode> AstId<N> {
|
|||
}
|
||||
}
|
||||
|
||||
/// FIXME: https://github.com/matklad/with ?
|
||||
/// `Source<T>` stores a value of `T` inside a particular file/syntax tree.
|
||||
///
|
||||
/// Typical usages are:
|
||||
///
|
||||
/// * `Source<SyntaxNode>` -- syntax node in a file
|
||||
/// * `Source<ast::FnDef>` -- ast node in a file
|
||||
/// * `Source<TextUnit>` -- offset in a file
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
|
||||
pub struct Source<T> {
|
||||
pub file_id: HirFileId,
|
||||
// FIXME: this stores all kind of things, not only `ast`.
|
||||
// There should be a better name...
|
||||
pub ast: T,
|
||||
pub value: T,
|
||||
}
|
||||
|
||||
impl<T> Source<T> {
|
||||
pub fn new(file_id: HirFileId, ast: T) -> Source<T> {
|
||||
Source { file_id, ast }
|
||||
pub fn new(file_id: HirFileId, value: T) -> Source<T> {
|
||||
Source { file_id, value }
|
||||
}
|
||||
|
||||
// Similarly, naming here is stupid...
|
||||
pub fn with_ast<U>(&self, ast: U) -> Source<U> {
|
||||
Source::new(self.file_id, ast)
|
||||
pub fn with_value<U>(&self, value: U) -> Source<U> {
|
||||
Source::new(self.file_id, value)
|
||||
}
|
||||
|
||||
pub fn map<F: FnOnce(T) -> U, U>(self, f: F) -> Source<U> {
|
||||
Source::new(self.file_id, f(self.ast))
|
||||
Source::new(self.file_id, f(self.value))
|
||||
}
|
||||
pub fn as_ref(&self) -> Source<&T> {
|
||||
self.with_ast(&self.ast)
|
||||
self.with_value(&self.value)
|
||||
}
|
||||
pub fn file_syntax(&self, db: &impl db::AstDatabase) -> SyntaxNode {
|
||||
db.parse_or_expand(self.file_id).expect("source created from invalid file")
|
||||
|
|
|
@ -309,7 +309,6 @@ impl RootDatabase {
|
|||
hir::db::StructDataQuery
|
||||
hir::db::EnumDataQuery
|
||||
hir::db::TraitDataQuery
|
||||
hir::db::TraitItemsIndexQuery
|
||||
hir::db::RawItemsWithSourceMapQuery
|
||||
hir::db::RawItemsQuery
|
||||
hir::db::CrateDefMapQuery
|
||||
|
|
|
@ -304,6 +304,13 @@ mod tests {
|
|||
),
|
||||
@r###"
|
||||
[
|
||||
CompletionItem {
|
||||
label: "Self",
|
||||
source_range: [54; 54),
|
||||
delete: [54; 54),
|
||||
insert: "Self",
|
||||
kind: TypeParam,
|
||||
},
|
||||
CompletionItem {
|
||||
label: "T",
|
||||
source_range: [54; 54),
|
||||
|
|
|
@ -54,7 +54,7 @@ impl<'a> CompletionContext<'a> {
|
|||
let src = hir::ModuleSource::from_position(db, position);
|
||||
let module = hir::Module::from_definition(
|
||||
db,
|
||||
hir::Source { file_id: position.file_id.into(), ast: src },
|
||||
hir::Source { file_id: position.file_id.into(), value: src },
|
||||
);
|
||||
let token =
|
||||
original_parse.tree().syntax().token_at_offset(position.offset).left_biased()?;
|
||||
|
|
|
@ -169,7 +169,7 @@ impl Completions {
|
|||
None => return,
|
||||
};
|
||||
|
||||
let ast_node = macro_.source(ctx.db).ast;
|
||||
let ast_node = macro_.source(ctx.db).value;
|
||||
let detail = macro_label(&ast_node);
|
||||
|
||||
let docs = macro_.docs(ctx.db);
|
||||
|
@ -201,7 +201,7 @@ impl Completions {
|
|||
) {
|
||||
let data = func.data(ctx.db);
|
||||
let name = name.unwrap_or_else(|| data.name().to_string());
|
||||
let ast_node = func.source(ctx.db).ast;
|
||||
let ast_node = func.source(ctx.db).value;
|
||||
let detail = function_label(&ast_node);
|
||||
|
||||
let mut builder =
|
||||
|
@ -234,7 +234,7 @@ impl Completions {
|
|||
}
|
||||
|
||||
pub(crate) fn add_const(&mut self, ctx: &CompletionContext, constant: hir::Const) {
|
||||
let ast_node = constant.source(ctx.db).ast;
|
||||
let ast_node = constant.source(ctx.db).value;
|
||||
let name = match ast_node.name() {
|
||||
Some(name) => name,
|
||||
_ => return,
|
||||
|
@ -250,7 +250,7 @@ impl Completions {
|
|||
}
|
||||
|
||||
pub(crate) fn add_type_alias(&mut self, ctx: &CompletionContext, type_alias: hir::TypeAlias) {
|
||||
let type_def = type_alias.source(ctx.db).ast;
|
||||
let type_def = type_alias.source(ctx.db).value;
|
||||
let name = match type_def.name() {
|
||||
Some(name) => name,
|
||||
_ => return,
|
||||
|
|
|
@ -96,7 +96,7 @@ pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec<Diagnostic>
|
|||
});
|
||||
let source_file = db.parse(file_id).tree();
|
||||
let src =
|
||||
hir::Source { file_id: file_id.into(), ast: hir::ModuleSource::SourceFile(source_file) };
|
||||
hir::Source { file_id: file_id.into(), value: hir::ModuleSource::SourceFile(source_file) };
|
||||
if let Some(m) = hir::Module::from_definition(db, src) {
|
||||
m.diagnostics(db, &mut sink);
|
||||
};
|
||||
|
|
|
@ -48,12 +48,12 @@ impl FunctionSignature {
|
|||
|
||||
pub(crate) fn from_hir(db: &db::RootDatabase, function: hir::Function) -> Self {
|
||||
let doc = function.docs(db);
|
||||
let ast_node = function.source(db).ast;
|
||||
let ast_node = function.source(db).value;
|
||||
FunctionSignature::from(&ast_node).with_doc_opt(doc)
|
||||
}
|
||||
|
||||
pub(crate) fn from_struct(db: &db::RootDatabase, st: hir::Struct) -> Option<Self> {
|
||||
let node: ast::StructDef = st.source(db).ast;
|
||||
let node: ast::StructDef = st.source(db).value;
|
||||
match node.kind() {
|
||||
ast::StructKind::Named(_) => return None,
|
||||
_ => (),
|
||||
|
@ -87,7 +87,7 @@ impl FunctionSignature {
|
|||
db: &db::RootDatabase,
|
||||
variant: hir::EnumVariant,
|
||||
) -> Option<Self> {
|
||||
let node: ast::EnumVariant = variant.source(db).ast;
|
||||
let node: ast::EnumVariant = variant.source(db).value;
|
||||
match node.kind() {
|
||||
ast::StructKind::Named(_) | ast::StructKind::Unit => return None,
|
||||
_ => (),
|
||||
|
@ -126,7 +126,7 @@ impl FunctionSignature {
|
|||
}
|
||||
|
||||
pub(crate) fn from_macro(db: &db::RootDatabase, macro_def: hir::MacroDef) -> Option<Self> {
|
||||
let node: ast::MacroCall = macro_def.source(db).ast;
|
||||
let node: ast::MacroCall = macro_def.source(db).value;
|
||||
|
||||
let params = vec![];
|
||||
|
||||
|
|
|
@ -86,9 +86,9 @@ impl NavigationTarget {
|
|||
name,
|
||||
None,
|
||||
frange.range,
|
||||
src.ast.syntax().kind(),
|
||||
src.ast.doc_comment_text(),
|
||||
src.ast.short_label(),
|
||||
src.value.syntax().kind(),
|
||||
src.value.doc_comment_text(),
|
||||
src.value.short_label(),
|
||||
);
|
||||
}
|
||||
module.to_nav(db)
|
||||
|
@ -146,9 +146,9 @@ impl NavigationTarget {
|
|||
description: Option<String>,
|
||||
) -> NavigationTarget {
|
||||
//FIXME: use `_` instead of empty string
|
||||
let name = node.ast.name().map(|it| it.text().clone()).unwrap_or_default();
|
||||
let name = node.value.name().map(|it| it.text().clone()).unwrap_or_default();
|
||||
let focus_range =
|
||||
node.ast.name().map(|it| original_range(db, node.with_ast(it.syntax())).range);
|
||||
node.value.name().map(|it| original_range(db, node.with_value(it.syntax())).range);
|
||||
let frange = original_range(db, node.map(|it| it.syntax()));
|
||||
|
||||
NavigationTarget::from_syntax(
|
||||
|
@ -156,7 +156,7 @@ impl NavigationTarget {
|
|||
name,
|
||||
focus_range,
|
||||
frange.range,
|
||||
node.ast.syntax().kind(),
|
||||
node.value.syntax().kind(),
|
||||
docs,
|
||||
description,
|
||||
)
|
||||
|
@ -220,8 +220,8 @@ where
|
|||
NavigationTarget::from_named(
|
||||
db,
|
||||
src.as_ref().map(|it| it as &dyn ast::NameOwner),
|
||||
src.ast.doc_comment_text(),
|
||||
src.ast.short_label(),
|
||||
src.value.doc_comment_text(),
|
||||
src.value.short_label(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
@ -230,9 +230,9 @@ impl ToNav for hir::Module {
|
|||
fn to_nav(&self, db: &RootDatabase) -> NavigationTarget {
|
||||
let src = self.definition_source(db);
|
||||
let name = self.name(db).map(|it| it.to_string().into()).unwrap_or_default();
|
||||
match &src.ast {
|
||||
match &src.value {
|
||||
ModuleSource::SourceFile(node) => {
|
||||
let frange = original_range(db, src.with_ast(node.syntax()));
|
||||
let frange = original_range(db, src.with_value(node.syntax()));
|
||||
|
||||
NavigationTarget::from_syntax(
|
||||
frange.file_id,
|
||||
|
@ -245,7 +245,7 @@ impl ToNav for hir::Module {
|
|||
)
|
||||
}
|
||||
ModuleSource::Module(node) => {
|
||||
let frange = original_range(db, src.with_ast(node.syntax()));
|
||||
let frange = original_range(db, src.with_value(node.syntax()));
|
||||
|
||||
NavigationTarget::from_syntax(
|
||||
frange.file_id,
|
||||
|
@ -271,7 +271,7 @@ impl ToNav for hir::ImplBlock {
|
|||
"impl".into(),
|
||||
None,
|
||||
frange.range,
|
||||
src.ast.syntax().kind(),
|
||||
src.value.syntax().kind(),
|
||||
None,
|
||||
None,
|
||||
)
|
||||
|
@ -282,15 +282,15 @@ impl ToNav for hir::StructField {
|
|||
fn to_nav(&self, db: &RootDatabase) -> NavigationTarget {
|
||||
let src = self.source(db);
|
||||
|
||||
match &src.ast {
|
||||
match &src.value {
|
||||
FieldSource::Named(it) => NavigationTarget::from_named(
|
||||
db,
|
||||
src.with_ast(it),
|
||||
src.with_value(it),
|
||||
it.doc_comment_text(),
|
||||
it.short_label(),
|
||||
),
|
||||
FieldSource::Pos(it) => {
|
||||
let frange = original_range(db, src.with_ast(it.syntax()));
|
||||
let frange = original_range(db, src.with_value(it.syntax()));
|
||||
NavigationTarget::from_syntax(
|
||||
frange.file_id,
|
||||
"".into(),
|
||||
|
@ -308,11 +308,11 @@ impl ToNav for hir::StructField {
|
|||
impl ToNav for hir::MacroDef {
|
||||
fn to_nav(&self, db: &RootDatabase) -> NavigationTarget {
|
||||
let src = self.source(db);
|
||||
log::debug!("nav target {:#?}", src.ast.syntax());
|
||||
log::debug!("nav target {:#?}", src.value.syntax());
|
||||
NavigationTarget::from_named(
|
||||
db,
|
||||
src.as_ref().map(|it| it as &dyn ast::NameOwner),
|
||||
src.ast.doc_comment_text(),
|
||||
src.value.doc_comment_text(),
|
||||
None,
|
||||
)
|
||||
}
|
||||
|
@ -341,7 +341,7 @@ impl ToNav for hir::AssocItem {
|
|||
impl ToNav for hir::Local {
|
||||
fn to_nav(&self, db: &RootDatabase) -> NavigationTarget {
|
||||
let src = self.source(db);
|
||||
let (full_range, focus_range) = match src.ast {
|
||||
let (full_range, focus_range) = match src.value {
|
||||
Either::A(it) => {
|
||||
(it.syntax().text_range(), it.name().map(|it| it.syntax().text_range()))
|
||||
}
|
||||
|
|
|
@ -12,7 +12,7 @@ pub(crate) fn original_range(db: &RootDatabase, node: Source<&SyntaxNode>) -> Fi
|
|||
None => {
|
||||
return FileRange {
|
||||
file_id: node.file_id.original_file(db),
|
||||
range: node.ast.text_range(),
|
||||
range: node.value.text_range(),
|
||||
}
|
||||
}
|
||||
Some(it) => it,
|
||||
|
@ -25,14 +25,18 @@ pub(crate) fn original_range(db: &RootDatabase, node: Source<&SyntaxNode>) -> Fi
|
|||
// *Second*, we should handle recurside macro expansions
|
||||
|
||||
let token = node
|
||||
.ast
|
||||
.value
|
||||
.descendants_with_tokens()
|
||||
.filter_map(|it| it.into_token())
|
||||
.find_map(|it| expansion.map_token_up(node.with_ast(&it)));
|
||||
.find_map(|it| expansion.map_token_up(node.with_value(&it)));
|
||||
|
||||
match token {
|
||||
Some(it) => FileRange { file_id: it.file_id.original_file(db), range: it.ast.text_range() },
|
||||
None => FileRange { file_id: node.file_id.original_file(db), range: node.ast.text_range() },
|
||||
Some(it) => {
|
||||
FileRange { file_id: it.file_id.original_file(db), range: it.value.text_range() }
|
||||
}
|
||||
None => {
|
||||
FileRange { file_id: node.file_id.original_file(db), range: node.value.text_range() }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -44,14 +48,14 @@ pub(crate) fn descend_into_macros(
|
|||
let src = Source::new(file_id.into(), token);
|
||||
|
||||
successors(Some(src), |token| {
|
||||
let macro_call = token.ast.ancestors().find_map(ast::MacroCall::cast)?;
|
||||
let macro_call = token.value.ancestors().find_map(ast::MacroCall::cast)?;
|
||||
let tt = macro_call.token_tree()?;
|
||||
if !token.ast.text_range().is_subrange(&tt.syntax().text_range()) {
|
||||
if !token.value.text_range().is_subrange(&tt.syntax().text_range()) {
|
||||
return None;
|
||||
}
|
||||
let source_analyzer =
|
||||
hir::SourceAnalyzer::new(db, token.with_ast(token.ast.parent()).as_ref(), None);
|
||||
let exp = source_analyzer.expand(db, ¯o_call)?;
|
||||
hir::SourceAnalyzer::new(db, token.with_value(token.value.parent()).as_ref(), None);
|
||||
let exp = source_analyzer.expand(db, token.with_value(¯o_call))?;
|
||||
exp.map_token_down(db, token.as_ref())
|
||||
})
|
||||
.last()
|
||||
|
|
|
@ -23,7 +23,7 @@ pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<
|
|||
let mac = name_ref.syntax().ancestors().find_map(ast::MacroCall::cast)?;
|
||||
|
||||
let source = hir::Source::new(position.file_id.into(), mac.syntax());
|
||||
let expanded = expand_macro_recur(db, source, &mac)?;
|
||||
let expanded = expand_macro_recur(db, source, source.with_value(&mac))?;
|
||||
|
||||
// FIXME:
|
||||
// macro expansion may lose all white space information
|
||||
|
@ -35,10 +35,10 @@ pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<
|
|||
fn expand_macro_recur(
|
||||
db: &RootDatabase,
|
||||
source: hir::Source<&SyntaxNode>,
|
||||
macro_call: &ast::MacroCall,
|
||||
macro_call: hir::Source<&ast::MacroCall>,
|
||||
) -> Option<SyntaxNode> {
|
||||
let analyzer = hir::SourceAnalyzer::new(db, source, None);
|
||||
let expansion = analyzer.expand(db, ¯o_call)?;
|
||||
let expansion = analyzer.expand(db, macro_call)?;
|
||||
let macro_file_id = expansion.file_id();
|
||||
let expanded: SyntaxNode = db.parse_or_expand(macro_file_id)?;
|
||||
|
||||
|
@ -46,8 +46,8 @@ fn expand_macro_recur(
|
|||
let mut replaces = FxHashMap::default();
|
||||
|
||||
for child in children.into_iter() {
|
||||
let source = hir::Source::new(macro_file_id, source.ast);
|
||||
let new_node = expand_macro_recur(db, source, &child)?;
|
||||
let node = hir::Source::new(macro_file_id, &child);
|
||||
let new_node = expand_macro_recur(db, source, node)?;
|
||||
|
||||
replaces.insert(child.syntax().clone().into(), new_node.into());
|
||||
}
|
||||
|
@ -139,7 +139,7 @@ mod tests {
|
|||
}
|
||||
macro_rules! baz {
|
||||
() => { foo!(); }
|
||||
}
|
||||
}
|
||||
f<|>oo!();
|
||||
"#,
|
||||
);
|
||||
|
@ -156,7 +156,7 @@ fn b(){}
|
|||
r#"
|
||||
//- /lib.rs
|
||||
macro_rules! foo {
|
||||
() => {
|
||||
() => {
|
||||
fn some_thing() -> u32 {
|
||||
let a = 0;
|
||||
a + 10
|
||||
|
@ -172,7 +172,7 @@ fn b(){}
|
|||
fn some_thing() -> u32 {
|
||||
let a = 0;
|
||||
a+10
|
||||
}
|
||||
}
|
||||
"###);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,13 +23,13 @@ pub(crate) fn goto_definition(
|
|||
let token = descend_into_macros(db, position.file_id, token);
|
||||
|
||||
let res = match_ast! {
|
||||
match (token.ast.parent()) {
|
||||
match (token.value.parent()) {
|
||||
ast::NameRef(name_ref) => {
|
||||
let navs = reference_definition(db, token.with_ast(&name_ref)).to_vec();
|
||||
let navs = reference_definition(db, token.with_value(&name_ref)).to_vec();
|
||||
RangeInfo::new(name_ref.syntax().text_range(), navs.to_vec())
|
||||
},
|
||||
ast::Name(name) => {
|
||||
let navs = name_definition(db, token.with_ast(&name))?;
|
||||
let navs = name_definition(db, token.with_value(&name))?;
|
||||
RangeInfo::new(name.syntax().text_range(), navs)
|
||||
|
||||
},
|
||||
|
@ -84,7 +84,7 @@ pub(crate) fn reference_definition(
|
|||
};
|
||||
|
||||
// Fallback index based approach:
|
||||
let navs = crate::symbol_index::index_resolve(db, name_ref.ast)
|
||||
let navs = crate::symbol_index::index_resolve(db, name_ref.value)
|
||||
.into_iter()
|
||||
.map(|s| s.to_nav(db))
|
||||
.collect();
|
||||
|
@ -95,11 +95,11 @@ pub(crate) fn name_definition(
|
|||
db: &RootDatabase,
|
||||
name: Source<&ast::Name>,
|
||||
) -> Option<Vec<NavigationTarget>> {
|
||||
let parent = name.ast.syntax().parent()?;
|
||||
let parent = name.value.syntax().parent()?;
|
||||
|
||||
if let Some(module) = ast::Module::cast(parent.clone()) {
|
||||
if module.has_semi() {
|
||||
let src = name.with_ast(module);
|
||||
let src = name.with_value(module);
|
||||
if let Some(child_module) = hir::Module::from_declaration(db, src) {
|
||||
let nav = child_module.to_nav(db);
|
||||
return Some(vec![nav]);
|
||||
|
@ -107,7 +107,7 @@ pub(crate) fn name_definition(
|
|||
}
|
||||
}
|
||||
|
||||
if let Some(nav) = named_target(db, name.with_ast(&parent)) {
|
||||
if let Some(nav) = named_target(db, name.with_value(&parent)) {
|
||||
return Some(vec![nav]);
|
||||
}
|
||||
|
||||
|
@ -116,11 +116,11 @@ pub(crate) fn name_definition(
|
|||
|
||||
fn named_target(db: &RootDatabase, node: Source<&SyntaxNode>) -> Option<NavigationTarget> {
|
||||
match_ast! {
|
||||
match (node.ast) {
|
||||
match (node.value) {
|
||||
ast::StructDef(it) => {
|
||||
Some(NavigationTarget::from_named(
|
||||
db,
|
||||
node.with_ast(&it),
|
||||
node.with_value(&it),
|
||||
it.doc_comment_text(),
|
||||
it.short_label(),
|
||||
))
|
||||
|
@ -128,7 +128,7 @@ fn named_target(db: &RootDatabase, node: Source<&SyntaxNode>) -> Option<Navigati
|
|||
ast::EnumDef(it) => {
|
||||
Some(NavigationTarget::from_named(
|
||||
db,
|
||||
node.with_ast(&it),
|
||||
node.with_value(&it),
|
||||
it.doc_comment_text(),
|
||||
it.short_label(),
|
||||
))
|
||||
|
@ -136,7 +136,7 @@ fn named_target(db: &RootDatabase, node: Source<&SyntaxNode>) -> Option<Navigati
|
|||
ast::EnumVariant(it) => {
|
||||
Some(NavigationTarget::from_named(
|
||||
db,
|
||||
node.with_ast(&it),
|
||||
node.with_value(&it),
|
||||
it.doc_comment_text(),
|
||||
it.short_label(),
|
||||
))
|
||||
|
@ -144,7 +144,7 @@ fn named_target(db: &RootDatabase, node: Source<&SyntaxNode>) -> Option<Navigati
|
|||
ast::FnDef(it) => {
|
||||
Some(NavigationTarget::from_named(
|
||||
db,
|
||||
node.with_ast(&it),
|
||||
node.with_value(&it),
|
||||
it.doc_comment_text(),
|
||||
it.short_label(),
|
||||
))
|
||||
|
@ -152,7 +152,7 @@ fn named_target(db: &RootDatabase, node: Source<&SyntaxNode>) -> Option<Navigati
|
|||
ast::TypeAliasDef(it) => {
|
||||
Some(NavigationTarget::from_named(
|
||||
db,
|
||||
node.with_ast(&it),
|
||||
node.with_value(&it),
|
||||
it.doc_comment_text(),
|
||||
it.short_label(),
|
||||
))
|
||||
|
@ -160,7 +160,7 @@ fn named_target(db: &RootDatabase, node: Source<&SyntaxNode>) -> Option<Navigati
|
|||
ast::ConstDef(it) => {
|
||||
Some(NavigationTarget::from_named(
|
||||
db,
|
||||
node.with_ast(&it),
|
||||
node.with_value(&it),
|
||||
it.doc_comment_text(),
|
||||
it.short_label(),
|
||||
))
|
||||
|
@ -168,7 +168,7 @@ fn named_target(db: &RootDatabase, node: Source<&SyntaxNode>) -> Option<Navigati
|
|||
ast::StaticDef(it) => {
|
||||
Some(NavigationTarget::from_named(
|
||||
db,
|
||||
node.with_ast(&it),
|
||||
node.with_value(&it),
|
||||
it.doc_comment_text(),
|
||||
it.short_label(),
|
||||
))
|
||||
|
@ -176,7 +176,7 @@ fn named_target(db: &RootDatabase, node: Source<&SyntaxNode>) -> Option<Navigati
|
|||
ast::TraitDef(it) => {
|
||||
Some(NavigationTarget::from_named(
|
||||
db,
|
||||
node.with_ast(&it),
|
||||
node.with_value(&it),
|
||||
it.doc_comment_text(),
|
||||
it.short_label(),
|
||||
))
|
||||
|
@ -184,7 +184,7 @@ fn named_target(db: &RootDatabase, node: Source<&SyntaxNode>) -> Option<Navigati
|
|||
ast::RecordFieldDef(it) => {
|
||||
Some(NavigationTarget::from_named(
|
||||
db,
|
||||
node.with_ast(&it),
|
||||
node.with_value(&it),
|
||||
it.doc_comment_text(),
|
||||
it.short_label(),
|
||||
))
|
||||
|
@ -192,7 +192,7 @@ fn named_target(db: &RootDatabase, node: Source<&SyntaxNode>) -> Option<Navigati
|
|||
ast::Module(it) => {
|
||||
Some(NavigationTarget::from_named(
|
||||
db,
|
||||
node.with_ast(&it),
|
||||
node.with_value(&it),
|
||||
it.doc_comment_text(),
|
||||
it.short_label(),
|
||||
))
|
||||
|
@ -200,7 +200,7 @@ fn named_target(db: &RootDatabase, node: Source<&SyntaxNode>) -> Option<Navigati
|
|||
ast::MacroCall(it) => {
|
||||
Some(NavigationTarget::from_named(
|
||||
db,
|
||||
node.with_ast(&it),
|
||||
node.with_value(&it),
|
||||
it.doc_comment_text(),
|
||||
None,
|
||||
))
|
||||
|
|
|
@ -16,13 +16,13 @@ pub(crate) fn goto_type_definition(
|
|||
let token = file.token_at_offset(position.offset).filter(|it| !it.kind().is_trivia()).next()?;
|
||||
let token = descend_into_macros(db, position.file_id, token);
|
||||
|
||||
let node = token.ast.ancestors().find_map(|token| {
|
||||
let node = token.value.ancestors().find_map(|token| {
|
||||
token
|
||||
.ancestors()
|
||||
.find(|n| ast::Expr::cast(n.clone()).is_some() || ast::Pat::cast(n.clone()).is_some())
|
||||
})?;
|
||||
|
||||
let analyzer = hir::SourceAnalyzer::new(db, token.with_ast(&node), None);
|
||||
let analyzer = hir::SourceAnalyzer::new(db, token.with_value(&node), None);
|
||||
|
||||
let ty: hir::Ty = if let Some(ty) =
|
||||
ast::Expr::cast(node.clone()).and_then(|e| analyzer.type_of(db, &e))
|
||||
|
|
|
@ -101,11 +101,11 @@ fn hover_text_from_name_kind(
|
|||
return match name_kind {
|
||||
Macro(it) => {
|
||||
let src = it.source(db);
|
||||
hover_text(src.ast.doc_comment_text(), Some(macro_label(&src.ast)))
|
||||
hover_text(src.value.doc_comment_text(), Some(macro_label(&src.value)))
|
||||
}
|
||||
Field(it) => {
|
||||
let src = it.source(db);
|
||||
match src.ast {
|
||||
match src.value {
|
||||
hir::FieldSource::Named(it) => hover_text(it.doc_comment_text(), it.short_label()),
|
||||
_ => None,
|
||||
}
|
||||
|
@ -116,7 +116,7 @@ fn hover_text_from_name_kind(
|
|||
hir::AssocItem::TypeAlias(it) => from_def_source(db, it),
|
||||
},
|
||||
Def(it) => match it {
|
||||
hir::ModuleDef::Module(it) => match it.definition_source(db).ast {
|
||||
hir::ModuleDef::Module(it) => match it.definition_source(db).value {
|
||||
hir::ModuleSource::Module(it) => {
|
||||
hover_text(it.doc_comment_text(), it.short_label())
|
||||
}
|
||||
|
@ -158,7 +158,7 @@ fn hover_text_from_name_kind(
|
|||
A: ast::DocCommentsOwner + ast::NameOwner + ShortLabel,
|
||||
{
|
||||
let src = def.source(db);
|
||||
hover_text(src.ast.doc_comment_text(), src.ast.short_label())
|
||||
hover_text(src.value.doc_comment_text(), src.value.short_label())
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -170,11 +170,11 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeIn
|
|||
let mut res = HoverResult::new();
|
||||
|
||||
let mut range = match_ast! {
|
||||
match (token.ast.parent()) {
|
||||
match (token.value.parent()) {
|
||||
ast::NameRef(name_ref) => {
|
||||
let mut no_fallback = false;
|
||||
if let Some(name_kind) =
|
||||
classify_name_ref(db, token.with_ast(&name_ref)).map(|d| d.kind)
|
||||
classify_name_ref(db, token.with_value(&name_ref)).map(|d| d.kind)
|
||||
{
|
||||
res.extend(hover_text_from_name_kind(db, name_kind, &mut no_fallback))
|
||||
}
|
||||
|
@ -196,7 +196,7 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeIn
|
|||
}
|
||||
},
|
||||
ast::Name(name) => {
|
||||
if let Some(name_kind) = classify_name(db, token.with_ast(&name)).map(|d| d.kind) {
|
||||
if let Some(name_kind) = classify_name(db, token.with_value(&name)).map(|d| d.kind) {
|
||||
res.extend(hover_text_from_name_kind(db, name_kind, &mut true));
|
||||
}
|
||||
|
||||
|
@ -211,7 +211,7 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeIn
|
|||
};
|
||||
|
||||
if range.is_none() {
|
||||
let node = token.ast.ancestors().find(|n| {
|
||||
let node = token.value.ancestors().find(|n| {
|
||||
ast::Expr::cast(n.clone()).is_some() || ast::Pat::cast(n.clone()).is_some()
|
||||
})?;
|
||||
let frange = FileRange { file_id: position.file_id, range: node.text_range() };
|
||||
|
@ -404,9 +404,7 @@ mod tests {
|
|||
check_hover_result(
|
||||
r#"
|
||||
//- /main.rs
|
||||
fn main() {
|
||||
const foo<|>: u32 = 0;
|
||||
}
|
||||
const foo<|>: u32 = 0;
|
||||
"#,
|
||||
&["const foo: u32"],
|
||||
);
|
||||
|
@ -414,9 +412,7 @@ mod tests {
|
|||
check_hover_result(
|
||||
r#"
|
||||
//- /main.rs
|
||||
fn main() {
|
||||
static foo<|>: u32 = 0;
|
||||
}
|
||||
static foo<|>: u32 = 0;
|
||||
"#,
|
||||
&["static foo: u32"],
|
||||
);
|
||||
|
|
|
@ -16,7 +16,7 @@ pub(crate) fn goto_implementation(
|
|||
let src = hir::ModuleSource::from_position(db, position);
|
||||
let module = hir::Module::from_definition(
|
||||
db,
|
||||
hir::Source { file_id: position.file_id.into(), ast: src },
|
||||
hir::Source { file_id: position.file_id.into(), value: src },
|
||||
)?;
|
||||
|
||||
if let Some(nominal_def) = find_node_at_offset::<ast::NominalDef>(&syntax, position.offset) {
|
||||
|
@ -42,11 +42,11 @@ fn impls_for_def(
|
|||
) -> Option<Vec<NavigationTarget>> {
|
||||
let ty = match node {
|
||||
ast::NominalDef::StructDef(def) => {
|
||||
let src = hir::Source { file_id: position.file_id.into(), ast: def.clone() };
|
||||
let src = hir::Source { file_id: position.file_id.into(), value: def.clone() };
|
||||
hir::Struct::from_source(db, src)?.ty(db)
|
||||
}
|
||||
ast::NominalDef::EnumDef(def) => {
|
||||
let src = hir::Source { file_id: position.file_id.into(), ast: def.clone() };
|
||||
let src = hir::Source { file_id: position.file_id.into(), value: def.clone() };
|
||||
hir::Enum::from_source(db, src)?.ty(db)
|
||||
}
|
||||
};
|
||||
|
@ -69,7 +69,7 @@ fn impls_for_trait(
|
|||
node: &ast::TraitDef,
|
||||
module: hir::Module,
|
||||
) -> Option<Vec<NavigationTarget>> {
|
||||
let src = hir::Source { file_id: position.file_id.into(), ast: node.clone() };
|
||||
let src = hir::Source { file_id: position.file_id.into(), value: node.clone() };
|
||||
let tr = hir::Trait::from_source(db, src)?;
|
||||
|
||||
let krate = module.krate();
|
||||
|
|
|
@ -19,10 +19,15 @@ pub struct InlayHint {
|
|||
pub label: SmolStr,
|
||||
}
|
||||
|
||||
pub(crate) fn inlay_hints(db: &RootDatabase, file_id: FileId, file: &SourceFile) -> Vec<InlayHint> {
|
||||
pub(crate) fn inlay_hints(
|
||||
db: &RootDatabase,
|
||||
file_id: FileId,
|
||||
file: &SourceFile,
|
||||
max_inlay_hint_length: Option<usize>,
|
||||
) -> Vec<InlayHint> {
|
||||
file.syntax()
|
||||
.descendants()
|
||||
.map(|node| get_inlay_hints(db, file_id, &node).unwrap_or_default())
|
||||
.map(|node| get_inlay_hints(db, file_id, &node, max_inlay_hint_length).unwrap_or_default())
|
||||
.flatten()
|
||||
.collect()
|
||||
}
|
||||
|
@ -31,6 +36,7 @@ fn get_inlay_hints(
|
|||
db: &RootDatabase,
|
||||
file_id: FileId,
|
||||
node: &SyntaxNode,
|
||||
max_inlay_hint_length: Option<usize>,
|
||||
) -> Option<Vec<InlayHint>> {
|
||||
let analyzer = SourceAnalyzer::new(db, hir::Source::new(file_id.into(), node), None);
|
||||
match_ast! {
|
||||
|
@ -40,7 +46,7 @@ fn get_inlay_hints(
|
|||
return None;
|
||||
}
|
||||
let pat = it.pat()?;
|
||||
Some(get_pat_type_hints(db, &analyzer, pat, false))
|
||||
Some(get_pat_type_hints(db, &analyzer, pat, false, max_inlay_hint_length))
|
||||
},
|
||||
ast::LambdaExpr(it) => {
|
||||
it.param_list().map(|param_list| {
|
||||
|
@ -48,22 +54,22 @@ fn get_inlay_hints(
|
|||
.params()
|
||||
.filter(|closure_param| closure_param.ascribed_type().is_none())
|
||||
.filter_map(|closure_param| closure_param.pat())
|
||||
.map(|root_pat| get_pat_type_hints(db, &analyzer, root_pat, false))
|
||||
.map(|root_pat| get_pat_type_hints(db, &analyzer, root_pat, false, max_inlay_hint_length))
|
||||
.flatten()
|
||||
.collect()
|
||||
})
|
||||
},
|
||||
ast::ForExpr(it) => {
|
||||
let pat = it.pat()?;
|
||||
Some(get_pat_type_hints(db, &analyzer, pat, false))
|
||||
Some(get_pat_type_hints(db, &analyzer, pat, false, max_inlay_hint_length))
|
||||
},
|
||||
ast::IfExpr(it) => {
|
||||
let pat = it.condition()?.pat()?;
|
||||
Some(get_pat_type_hints(db, &analyzer, pat, true))
|
||||
Some(get_pat_type_hints(db, &analyzer, pat, true, max_inlay_hint_length))
|
||||
},
|
||||
ast::WhileExpr(it) => {
|
||||
let pat = it.condition()?.pat()?;
|
||||
Some(get_pat_type_hints(db, &analyzer, pat, true))
|
||||
Some(get_pat_type_hints(db, &analyzer, pat, true, max_inlay_hint_length))
|
||||
},
|
||||
ast::MatchArmList(it) => {
|
||||
Some(
|
||||
|
@ -71,7 +77,7 @@ fn get_inlay_hints(
|
|||
.arms()
|
||||
.map(|match_arm| match_arm.pats())
|
||||
.flatten()
|
||||
.map(|root_pat| get_pat_type_hints(db, &analyzer, root_pat, true))
|
||||
.map(|root_pat| get_pat_type_hints(db, &analyzer, root_pat, true, max_inlay_hint_length))
|
||||
.flatten()
|
||||
.collect(),
|
||||
)
|
||||
|
@ -86,6 +92,7 @@ fn get_pat_type_hints(
|
|||
analyzer: &SourceAnalyzer,
|
||||
root_pat: ast::Pat,
|
||||
skip_root_pat_hint: bool,
|
||||
max_inlay_hint_length: Option<usize>,
|
||||
) -> Vec<InlayHint> {
|
||||
let original_pat = &root_pat.clone();
|
||||
|
||||
|
@ -99,7 +106,7 @@ fn get_pat_type_hints(
|
|||
.map(|(range, pat_type)| InlayHint {
|
||||
range,
|
||||
kind: InlayKind::TypeHint,
|
||||
label: pat_type.display(db).to_string().into(),
|
||||
label: pat_type.display_truncated(db, max_inlay_hint_length).to_string().into(),
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
@ -209,7 +216,7 @@ fn main() {
|
|||
}"#,
|
||||
);
|
||||
|
||||
assert_debug_snapshot!(analysis.inlay_hints(file_id).unwrap(), @r###"
|
||||
assert_debug_snapshot!(analysis.inlay_hints(file_id, None).unwrap(), @r###"
|
||||
[
|
||||
InlayHint {
|
||||
range: [193; 197),
|
||||
|
@ -278,7 +285,7 @@ fn main() {
|
|||
}"#,
|
||||
);
|
||||
|
||||
assert_debug_snapshot!(analysis.inlay_hints(file_id).unwrap(), @r###"
|
||||
assert_debug_snapshot!(analysis.inlay_hints(file_id, None).unwrap(), @r###"
|
||||
[
|
||||
InlayHint {
|
||||
range: [21; 30),
|
||||
|
@ -307,7 +314,7 @@ fn main() {
|
|||
}"#,
|
||||
);
|
||||
|
||||
assert_debug_snapshot!(analysis.inlay_hints(file_id).unwrap(), @r###"
|
||||
assert_debug_snapshot!(analysis.inlay_hints(file_id, None).unwrap(), @r###"
|
||||
[
|
||||
InlayHint {
|
||||
range: [21; 30),
|
||||
|
@ -355,7 +362,7 @@ fn main() {
|
|||
}"#,
|
||||
);
|
||||
|
||||
assert_debug_snapshot!(analysis.inlay_hints(file_id).unwrap(), @r###"
|
||||
assert_debug_snapshot!(analysis.inlay_hints(file_id, None).unwrap(), @r###"
|
||||
[
|
||||
InlayHint {
|
||||
range: [166; 170),
|
||||
|
@ -418,7 +425,7 @@ fn main() {
|
|||
}"#,
|
||||
);
|
||||
|
||||
assert_debug_snapshot!(analysis.inlay_hints(file_id).unwrap(), @r###"
|
||||
assert_debug_snapshot!(analysis.inlay_hints(file_id, None).unwrap(), @r###"
|
||||
[
|
||||
InlayHint {
|
||||
range: [166; 170),
|
||||
|
@ -481,7 +488,7 @@ fn main() {
|
|||
}"#,
|
||||
);
|
||||
|
||||
assert_debug_snapshot!(analysis.inlay_hints(file_id).unwrap(), @r###"
|
||||
assert_debug_snapshot!(analysis.inlay_hints(file_id, None).unwrap(), @r###"
|
||||
[
|
||||
InlayHint {
|
||||
range: [311; 315),
|
||||
|
@ -507,4 +514,41 @@ fn main() {
|
|||
"###
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hint_truncation() {
|
||||
let (analysis, file_id) = single_file(
|
||||
r#"
|
||||
struct Smol<T>(T);
|
||||
|
||||
struct VeryLongOuterName<T>(T);
|
||||
|
||||
fn main() {
|
||||
let a = Smol(0u32);
|
||||
let b = VeryLongOuterName(0usize);
|
||||
let c = Smol(Smol(0u32))
|
||||
}"#,
|
||||
);
|
||||
|
||||
assert_debug_snapshot!(analysis.inlay_hints(file_id, Some(8)).unwrap(), @r###"
|
||||
[
|
||||
InlayHint {
|
||||
range: [74; 75),
|
||||
kind: TypeHint,
|
||||
label: "Smol<u32>",
|
||||
},
|
||||
InlayHint {
|
||||
range: [98; 99),
|
||||
kind: TypeHint,
|
||||
label: "VeryLongOuterName<…>",
|
||||
},
|
||||
InlayHint {
|
||||
range: [137; 138),
|
||||
kind: TypeHint,
|
||||
label: "Smol<Smol<…>>",
|
||||
},
|
||||
]
|
||||
"###
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -243,6 +243,34 @@ fn foo(e: Result<U, V>) {
|
|||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn join_lines_multiline_in_block() {
|
||||
check_join_lines(
|
||||
r"
|
||||
fn foo() {
|
||||
match ty {
|
||||
<|> Some(ty) => {
|
||||
match ty {
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
_ => true,
|
||||
}
|
||||
}
|
||||
",
|
||||
r"
|
||||
fn foo() {
|
||||
match ty {
|
||||
<|> Some(ty) => match ty {
|
||||
_ => false,
|
||||
},
|
||||
_ => true,
|
||||
}
|
||||
}
|
||||
",
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn join_lines_keeps_comma_for_block_in_match_arm() {
|
||||
// We already have a comma
|
||||
|
|
|
@ -344,8 +344,14 @@ impl Analysis {
|
|||
}
|
||||
|
||||
/// Returns a list of the places in the file where type hints can be displayed.
|
||||
pub fn inlay_hints(&self, file_id: FileId) -> Cancelable<Vec<InlayHint>> {
|
||||
self.with_db(|db| inlay_hints::inlay_hints(db, file_id, &db.parse(file_id).tree()))
|
||||
pub fn inlay_hints(
|
||||
&self,
|
||||
file_id: FileId,
|
||||
max_inlay_hint_length: Option<usize>,
|
||||
) -> Cancelable<Vec<InlayHint>> {
|
||||
self.with_db(|db| {
|
||||
inlay_hints::inlay_hints(db, file_id, &db.parse(file_id).tree(), max_inlay_hint_length)
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns the set of folding ranges.
|
||||
|
|
|
@ -10,7 +10,7 @@ pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec<Na
|
|||
let src = hir::ModuleSource::from_position(db, position);
|
||||
let module = match hir::Module::from_definition(
|
||||
db,
|
||||
hir::Source { file_id: position.file_id.into(), ast: src },
|
||||
hir::Source { file_id: position.file_id.into(), value: src },
|
||||
) {
|
||||
None => return Vec::new(),
|
||||
Some(it) => it,
|
||||
|
@ -23,7 +23,8 @@ pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec<Na
|
|||
pub(crate) fn crate_for(db: &RootDatabase, file_id: FileId) -> Vec<CrateId> {
|
||||
let src = hir::ModuleSource::from_file_id(db, file_id);
|
||||
let module =
|
||||
match hir::Module::from_definition(db, hir::Source { file_id: file_id.into(), ast: src }) {
|
||||
match hir::Module::from_definition(db, hir::Source { file_id: file_id.into(), value: src })
|
||||
{
|
||||
Some(it) => it,
|
||||
None => return Vec::new(),
|
||||
};
|
||||
|
|
|
@ -13,12 +13,12 @@ use crate::db::RootDatabase;
|
|||
|
||||
pub(crate) fn classify_name(db: &RootDatabase, name: Source<&ast::Name>) -> Option<NameDefinition> {
|
||||
let _p = profile("classify_name");
|
||||
let parent = name.ast.syntax().parent()?;
|
||||
let parent = name.value.syntax().parent()?;
|
||||
|
||||
match_ast! {
|
||||
match parent {
|
||||
ast::BindPat(it) => {
|
||||
let src = name.with_ast(it);
|
||||
let src = name.with_value(it);
|
||||
let local = hir::Local::from_source(db, src)?;
|
||||
Some(NameDefinition {
|
||||
visibility: None,
|
||||
|
@ -28,7 +28,7 @@ pub(crate) fn classify_name(db: &RootDatabase, name: Source<&ast::Name>) -> Opti
|
|||
},
|
||||
ast::RecordFieldDef(it) => {
|
||||
let ast = hir::FieldSource::Named(it);
|
||||
let src = name.with_ast(ast);
|
||||
let src = name.with_value(ast);
|
||||
let field = hir::StructField::from_source(db, src)?;
|
||||
Some(from_struct_field(db, field))
|
||||
},
|
||||
|
@ -36,42 +36,42 @@ pub(crate) fn classify_name(db: &RootDatabase, name: Source<&ast::Name>) -> Opti
|
|||
let def = {
|
||||
if !it.has_semi() {
|
||||
let ast = hir::ModuleSource::Module(it);
|
||||
let src = name.with_ast(ast);
|
||||
let src = name.with_value(ast);
|
||||
hir::Module::from_definition(db, src)
|
||||
} else {
|
||||
let src = name.with_ast(it);
|
||||
let src = name.with_value(it);
|
||||
hir::Module::from_declaration(db, src)
|
||||
}
|
||||
}?;
|
||||
Some(from_module_def(db, def.into(), None))
|
||||
},
|
||||
ast::StructDef(it) => {
|
||||
let src = name.with_ast(it);
|
||||
let src = name.with_value(it);
|
||||
let def = hir::Struct::from_source(db, src)?;
|
||||
Some(from_module_def(db, def.into(), None))
|
||||
},
|
||||
ast::EnumDef(it) => {
|
||||
let src = name.with_ast(it);
|
||||
let src = name.with_value(it);
|
||||
let def = hir::Enum::from_source(db, src)?;
|
||||
Some(from_module_def(db, def.into(), None))
|
||||
},
|
||||
ast::TraitDef(it) => {
|
||||
let src = name.with_ast(it);
|
||||
let src = name.with_value(it);
|
||||
let def = hir::Trait::from_source(db, src)?;
|
||||
Some(from_module_def(db, def.into(), None))
|
||||
},
|
||||
ast::StaticDef(it) => {
|
||||
let src = name.with_ast(it);
|
||||
let src = name.with_value(it);
|
||||
let def = hir::Static::from_source(db, src)?;
|
||||
Some(from_module_def(db, def.into(), None))
|
||||
},
|
||||
ast::EnumVariant(it) => {
|
||||
let src = name.with_ast(it);
|
||||
let src = name.with_value(it);
|
||||
let def = hir::EnumVariant::from_source(db, src)?;
|
||||
Some(from_module_def(db, def.into(), None))
|
||||
},
|
||||
ast::FnDef(it) => {
|
||||
let src = name.with_ast(it);
|
||||
let src = name.with_value(it);
|
||||
let def = hir::Function::from_source(db, src)?;
|
||||
if parent.parent().and_then(ast::ItemList::cast).is_some() {
|
||||
Some(from_assoc_item(db, def.into()))
|
||||
|
@ -80,7 +80,7 @@ pub(crate) fn classify_name(db: &RootDatabase, name: Source<&ast::Name>) -> Opti
|
|||
}
|
||||
},
|
||||
ast::ConstDef(it) => {
|
||||
let src = name.with_ast(it);
|
||||
let src = name.with_value(it);
|
||||
let def = hir::Const::from_source(db, src)?;
|
||||
if parent.parent().and_then(ast::ItemList::cast).is_some() {
|
||||
Some(from_assoc_item(db, def.into()))
|
||||
|
@ -89,7 +89,7 @@ pub(crate) fn classify_name(db: &RootDatabase, name: Source<&ast::Name>) -> Opti
|
|||
}
|
||||
},
|
||||
ast::TypeAliasDef(it) => {
|
||||
let src = name.with_ast(it);
|
||||
let src = name.with_value(it);
|
||||
let def = hir::TypeAlias::from_source(db, src)?;
|
||||
if parent.parent().and_then(ast::ItemList::cast).is_some() {
|
||||
Some(from_assoc_item(db, def.into()))
|
||||
|
@ -98,11 +98,11 @@ pub(crate) fn classify_name(db: &RootDatabase, name: Source<&ast::Name>) -> Opti
|
|||
}
|
||||
},
|
||||
ast::MacroCall(it) => {
|
||||
let src = name.with_ast(it);
|
||||
let src = name.with_value(it);
|
||||
let def = hir::MacroDef::from_source(db, src.clone())?;
|
||||
|
||||
let module_src = ModuleSource::from_child_node(db, src.as_ref().map(|it| it.syntax()));
|
||||
let module = Module::from_definition(db, src.with_ast(module_src))?;
|
||||
let module = Module::from_definition(db, src.with_value(module_src))?;
|
||||
|
||||
Some(NameDefinition {
|
||||
visibility: None,
|
||||
|
@ -121,7 +121,7 @@ pub(crate) fn classify_name_ref(
|
|||
) -> Option<NameDefinition> {
|
||||
let _p = profile("classify_name_ref");
|
||||
|
||||
let parent = name_ref.ast.syntax().parent()?;
|
||||
let parent = name_ref.value.syntax().parent()?;
|
||||
let analyzer = SourceAnalyzer::new(db, name_ref.map(|it| it.syntax()), None);
|
||||
|
||||
if let Some(method_call) = ast::MethodCallExpr::cast(parent.clone()) {
|
||||
|
@ -142,16 +142,16 @@ pub(crate) fn classify_name_ref(
|
|||
tested_by!(goto_definition_works_for_record_fields);
|
||||
if let Some(record_lit) = record_field.syntax().ancestors().find_map(ast::RecordLit::cast) {
|
||||
let variant_def = analyzer.resolve_record_literal(&record_lit)?;
|
||||
let hir_path = Path::from_name_ref(name_ref.ast);
|
||||
let hir_path = Path::from_name_ref(name_ref.value);
|
||||
let hir_name = hir_path.as_ident()?;
|
||||
let field = variant_def.field(db, hir_name)?;
|
||||
return Some(from_struct_field(db, field));
|
||||
}
|
||||
}
|
||||
|
||||
let ast = ModuleSource::from_child_node(db, name_ref.with_ast(&parent));
|
||||
let ast = ModuleSource::from_child_node(db, name_ref.with_value(&parent));
|
||||
// FIXME: find correct container and visibility for each case
|
||||
let container = Module::from_definition(db, name_ref.with_ast(ast))?;
|
||||
let container = Module::from_definition(db, name_ref.with_value(ast))?;
|
||||
let visibility = None;
|
||||
|
||||
if let Some(macro_call) = parent.ancestors().find_map(ast::MacroCall::cast) {
|
||||
|
@ -162,7 +162,7 @@ pub(crate) fn classify_name_ref(
|
|||
}
|
||||
}
|
||||
|
||||
let path = name_ref.ast.syntax().ancestors().find_map(ast::Path::cast)?;
|
||||
let path = name_ref.value.syntax().ancestors().find_map(ast::Path::cast)?;
|
||||
let resolved = analyzer.resolve_path(db, &path)?;
|
||||
match resolved {
|
||||
PathResolution::Def(def) => Some(from_module_def(db, def, Some(container))),
|
||||
|
|
|
@ -32,9 +32,9 @@ pub(crate) struct NameDefinition {
|
|||
pub(super) fn from_assoc_item(db: &RootDatabase, item: AssocItem) -> NameDefinition {
|
||||
let container = item.module(db);
|
||||
let visibility = match item {
|
||||
AssocItem::Function(f) => f.source(db).ast.visibility(),
|
||||
AssocItem::Const(c) => c.source(db).ast.visibility(),
|
||||
AssocItem::TypeAlias(a) => a.source(db).ast.visibility(),
|
||||
AssocItem::Function(f) => f.source(db).value.visibility(),
|
||||
AssocItem::Const(c) => c.source(db).value.visibility(),
|
||||
AssocItem::TypeAlias(a) => a.source(db).value.visibility(),
|
||||
};
|
||||
let kind = NameKind::AssocItem(item);
|
||||
NameDefinition { kind, container, visibility }
|
||||
|
@ -45,8 +45,8 @@ pub(super) fn from_struct_field(db: &RootDatabase, field: StructField) -> NameDe
|
|||
let parent = field.parent_def(db);
|
||||
let container = parent.module(db);
|
||||
let visibility = match parent {
|
||||
VariantDef::Struct(s) => s.source(db).ast.visibility(),
|
||||
VariantDef::EnumVariant(e) => e.source(db).ast.parent_enum().visibility(),
|
||||
VariantDef::Struct(s) => s.source(db).value.visibility(),
|
||||
VariantDef::EnumVariant(e) => e.source(db).value.parent_enum().visibility(),
|
||||
};
|
||||
NameDefinition { kind, container, visibility }
|
||||
}
|
||||
|
@ -60,22 +60,22 @@ pub(super) fn from_module_def(
|
|||
let (container, visibility) = match def {
|
||||
ModuleDef::Module(it) => {
|
||||
let container = it.parent(db).or_else(|| Some(it)).unwrap();
|
||||
let visibility = it.declaration_source(db).and_then(|s| s.ast.visibility());
|
||||
let visibility = it.declaration_source(db).and_then(|s| s.value.visibility());
|
||||
(container, visibility)
|
||||
}
|
||||
ModuleDef::EnumVariant(it) => {
|
||||
let container = it.module(db);
|
||||
let visibility = it.source(db).ast.parent_enum().visibility();
|
||||
let visibility = it.source(db).value.parent_enum().visibility();
|
||||
(container, visibility)
|
||||
}
|
||||
ModuleDef::Function(it) => (it.module(db), it.source(db).ast.visibility()),
|
||||
ModuleDef::Const(it) => (it.module(db), it.source(db).ast.visibility()),
|
||||
ModuleDef::Static(it) => (it.module(db), it.source(db).ast.visibility()),
|
||||
ModuleDef::Trait(it) => (it.module(db), it.source(db).ast.visibility()),
|
||||
ModuleDef::TypeAlias(it) => (it.module(db), it.source(db).ast.visibility()),
|
||||
ModuleDef::Adt(Adt::Struct(it)) => (it.module(db), it.source(db).ast.visibility()),
|
||||
ModuleDef::Adt(Adt::Union(it)) => (it.module(db), it.source(db).ast.visibility()),
|
||||
ModuleDef::Adt(Adt::Enum(it)) => (it.module(db), it.source(db).ast.visibility()),
|
||||
ModuleDef::Function(it) => (it.module(db), it.source(db).value.visibility()),
|
||||
ModuleDef::Const(it) => (it.module(db), it.source(db).value.visibility()),
|
||||
ModuleDef::Static(it) => (it.module(db), it.source(db).value.visibility()),
|
||||
ModuleDef::Trait(it) => (it.module(db), it.source(db).value.visibility()),
|
||||
ModuleDef::TypeAlias(it) => (it.module(db), it.source(db).value.visibility()),
|
||||
ModuleDef::Adt(Adt::Struct(it)) => (it.module(db), it.source(db).value.visibility()),
|
||||
ModuleDef::Adt(Adt::Union(it)) => (it.module(db), it.source(db).value.visibility()),
|
||||
ModuleDef::Adt(Adt::Enum(it)) => (it.module(db), it.source(db).value.visibility()),
|
||||
ModuleDef::BuiltinType(..) => (module.unwrap(), None),
|
||||
};
|
||||
NameDefinition { kind, container, visibility }
|
||||
|
|
|
@ -55,11 +55,11 @@ fn rename_mod(
|
|||
) -> Option<SourceChange> {
|
||||
let mut source_file_edits = Vec::new();
|
||||
let mut file_system_edits = Vec::new();
|
||||
let module_src = hir::Source { file_id: position.file_id.into(), ast: ast_module.clone() };
|
||||
let module_src = hir::Source { file_id: position.file_id.into(), value: ast_module.clone() };
|
||||
if let Some(module) = hir::Module::from_declaration(db, module_src) {
|
||||
let src = module.definition_source(db);
|
||||
let file_id = src.file_id.original_file(db);
|
||||
match src.ast {
|
||||
match src.value {
|
||||
ModuleSource::SourceFile(..) => {
|
||||
let mod_path: RelativePathBuf = db.file_relative_path(file_id);
|
||||
// mod is defined in path/to/dir/mod.rs
|
||||
|
@ -121,140 +121,8 @@ mod tests {
|
|||
|
||||
use crate::{
|
||||
mock_analysis::analysis_and_position, mock_analysis::single_file_with_position, FileId,
|
||||
ReferenceSearchResult,
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn test_find_all_refs_for_local() {
|
||||
let code = r#"
|
||||
fn main() {
|
||||
let mut i = 1;
|
||||
let j = 1;
|
||||
i = i<|> + j;
|
||||
|
||||
{
|
||||
i = 0;
|
||||
}
|
||||
|
||||
i = 5;
|
||||
}"#;
|
||||
|
||||
let refs = get_all_refs(code);
|
||||
assert_eq!(refs.len(), 5);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_find_all_refs_for_param_inside() {
|
||||
let code = r#"
|
||||
fn foo(i : u32) -> u32 {
|
||||
i<|>
|
||||
}"#;
|
||||
|
||||
let refs = get_all_refs(code);
|
||||
assert_eq!(refs.len(), 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_find_all_refs_for_fn_param() {
|
||||
let code = r#"
|
||||
fn foo(i<|> : u32) -> u32 {
|
||||
i
|
||||
}"#;
|
||||
|
||||
let refs = get_all_refs(code);
|
||||
assert_eq!(refs.len(), 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_find_all_refs_field_name() {
|
||||
let code = r#"
|
||||
//- /lib.rs
|
||||
struct Foo {
|
||||
pub spam<|>: u32,
|
||||
}
|
||||
|
||||
fn main(s: Foo) {
|
||||
let f = s.spam;
|
||||
}
|
||||
"#;
|
||||
|
||||
let refs = get_all_refs(code);
|
||||
assert_eq!(refs.len(), 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_find_all_refs_impl_item_name() {
|
||||
let code = r#"
|
||||
//- /lib.rs
|
||||
struct Foo;
|
||||
impl Foo {
|
||||
fn f<|>(&self) { }
|
||||
}
|
||||
"#;
|
||||
|
||||
let refs = get_all_refs(code);
|
||||
assert_eq!(refs.len(), 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_find_all_refs_enum_var_name() {
|
||||
let code = r#"
|
||||
//- /lib.rs
|
||||
enum Foo {
|
||||
A,
|
||||
B<|>,
|
||||
C,
|
||||
}
|
||||
"#;
|
||||
|
||||
let refs = get_all_refs(code);
|
||||
assert_eq!(refs.len(), 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_find_all_refs_modules() {
|
||||
let code = r#"
|
||||
//- /lib.rs
|
||||
pub mod foo;
|
||||
pub mod bar;
|
||||
|
||||
fn f() {
|
||||
let i = foo::Foo { n: 5 };
|
||||
}
|
||||
|
||||
//- /foo.rs
|
||||
use crate::bar;
|
||||
|
||||
pub struct Foo {
|
||||
pub n: u32,
|
||||
}
|
||||
|
||||
fn f() {
|
||||
let i = bar::Bar { n: 5 };
|
||||
}
|
||||
|
||||
//- /bar.rs
|
||||
use crate::foo;
|
||||
|
||||
pub struct Bar {
|
||||
pub n: u32,
|
||||
}
|
||||
|
||||
fn f() {
|
||||
let i = foo::Foo<|> { n: 5 };
|
||||
}
|
||||
"#;
|
||||
|
||||
let (analysis, pos) = analysis_and_position(code);
|
||||
let refs = analysis.find_all_refs(pos, None).unwrap().unwrap();
|
||||
assert_eq!(refs.len(), 3);
|
||||
}
|
||||
|
||||
fn get_all_refs(text: &str) -> ReferenceSearchResult {
|
||||
let (analysis, position) = single_file_with_position(text);
|
||||
analysis.find_all_refs(position, None).unwrap().unwrap()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_rename_for_local() {
|
||||
test_rename(
|
||||
|
|
|
@ -73,9 +73,9 @@ impl NameDefinition {
|
|||
|
||||
if let NameKind::Local(var) = self.kind {
|
||||
let range = match var.parent(db) {
|
||||
DefWithBody::Function(f) => f.source(db).ast.syntax().text_range(),
|
||||
DefWithBody::Const(c) => c.source(db).ast.syntax().text_range(),
|
||||
DefWithBody::Static(s) => s.source(db).ast.syntax().text_range(),
|
||||
DefWithBody::Function(f) => f.source(db).value.syntax().text_range(),
|
||||
DefWithBody::Const(c) => c.source(db).value.syntax().text_range(),
|
||||
DefWithBody::Static(s) => s.source(db).value.syntax().text_range(),
|
||||
};
|
||||
let mut res = FxHashMap::default();
|
||||
res.insert(file_id, Some(range));
|
||||
|
@ -91,7 +91,7 @@ impl NameDefinition {
|
|||
let parent_src = parent_module.definition_source(db);
|
||||
let file_id = parent_src.file_id.original_file(db);
|
||||
|
||||
match parent_src.ast {
|
||||
match parent_src.value {
|
||||
ModuleSource::Module(m) => {
|
||||
let range = Some(m.syntax().text_range());
|
||||
res.insert(file_id, range);
|
||||
|
@ -135,7 +135,7 @@ impl NameDefinition {
|
|||
}
|
||||
|
||||
let mut res = FxHashMap::default();
|
||||
let range = match module_src.ast {
|
||||
let range = match module_src.value {
|
||||
ModuleSource::Module(m) => Some(m.syntax().text_range()),
|
||||
ModuleSource::SourceFile(_) => None,
|
||||
};
|
||||
|
|
|
@ -29,6 +29,8 @@ pub struct ServerConfig {
|
|||
|
||||
pub lru_capacity: Option<usize>,
|
||||
|
||||
pub max_inlay_hint_length: Option<usize>,
|
||||
|
||||
/// For internal usage to make integrated tests faster.
|
||||
#[serde(deserialize_with = "nullable_bool_true")]
|
||||
pub with_sysroot: bool,
|
||||
|
@ -44,6 +46,7 @@ impl Default for ServerConfig {
|
|||
exclude_globs: Vec::new(),
|
||||
use_client_watching: false,
|
||||
lru_capacity: None,
|
||||
max_inlay_hint_length: None,
|
||||
with_sysroot: true,
|
||||
feature_flags: FxHashMap::default(),
|
||||
}
|
||||
|
|
|
@ -123,6 +123,7 @@ pub fn main_loop(
|
|||
.and_then(|it| it.folding_range.as_ref())
|
||||
.and_then(|it| it.line_folding_only)
|
||||
.unwrap_or(false),
|
||||
max_inlay_hint_length: config.max_inlay_hint_length,
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -888,7 +888,7 @@ pub fn handle_inlay_hints(
|
|||
let analysis = world.analysis();
|
||||
let line_index = analysis.file_line_index(file_id)?;
|
||||
Ok(analysis
|
||||
.inlay_hints(file_id)?
|
||||
.inlay_hints(file_id, world.options.max_inlay_hint_length)?
|
||||
.into_iter()
|
||||
.map(|api_type| InlayHint {
|
||||
label: api_type.label.to_string(),
|
||||
|
|
|
@ -28,6 +28,7 @@ pub struct Options {
|
|||
pub publish_decorations: bool,
|
||||
pub supports_location_link: bool,
|
||||
pub line_folding_only: bool,
|
||||
pub max_inlay_hint_length: Option<usize>,
|
||||
}
|
||||
|
||||
/// `WorldState` is the primary mutable state of the language server
|
||||
|
|
|
@ -3625,8 +3625,11 @@ impl AstNode for TypeParam {
|
|||
impl ast::NameOwner for TypeParam {}
|
||||
impl ast::AttrsOwner for TypeParam {}
|
||||
impl ast::TypeBoundsOwner for TypeParam {}
|
||||
impl ast::DefaultTypeParamOwner for TypeParam {}
|
||||
impl TypeParam {}
|
||||
impl TypeParam {
|
||||
pub fn default_type(&self) -> Option<TypeRef> {
|
||||
AstChildren::new(&self.syntax).next()
|
||||
}
|
||||
}
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct TypeParamList {
|
||||
pub(crate) syntax: SyntaxNode,
|
||||
|
|
|
@ -163,9 +163,3 @@ impl Iterator for CommentIter {
|
|||
self.iter.by_ref().find_map(|el| el.into_token().and_then(ast::Comment::cast))
|
||||
}
|
||||
}
|
||||
|
||||
pub trait DefaultTypeParamOwner: AstNode {
|
||||
fn default_type(&self) -> Option<ast::PathType> {
|
||||
child_opt(self)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -587,7 +587,10 @@ Grammar(
|
|||
("lifetime_params", "LifetimeParam" ),
|
||||
]
|
||||
),
|
||||
"TypeParam": ( traits: ["NameOwner", "AttrsOwner", "TypeBoundsOwner", "DefaultTypeParamOwner"] ),
|
||||
"TypeParam": (
|
||||
options: [("default_type", "TypeRef")],
|
||||
traits: ["NameOwner", "AttrsOwner", "TypeBoundsOwner"],
|
||||
),
|
||||
"LifetimeParam": (
|
||||
traits: ["AttrsOwner"],
|
||||
),
|
||||
|
|
|
@ -43,7 +43,7 @@ impl SyntaxNodePtr {
|
|||
}
|
||||
|
||||
/// Like `SyntaxNodePtr`, but remembers the type of node
|
||||
#[derive(Debug, PartialEq, Eq, Hash)]
|
||||
#[derive(Debug, Hash)]
|
||||
pub struct AstPtr<N: AstNode> {
|
||||
raw: SyntaxNodePtr,
|
||||
_ty: PhantomData<fn() -> N>,
|
||||
|
@ -56,6 +56,14 @@ impl<N: AstNode> Clone for AstPtr<N> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<N: AstNode> Eq for AstPtr<N> {}
|
||||
|
||||
impl<N: AstNode> PartialEq for AstPtr<N> {
|
||||
fn eq(&self, other: &AstPtr<N>) -> bool {
|
||||
self.raw == other.raw
|
||||
}
|
||||
}
|
||||
|
||||
impl<N: AstNode> AstPtr<N> {
|
||||
pub fn new(node: &N) -> AstPtr<N> {
|
||||
AstPtr { raw: SyntaxNodePtr::new(node.syntax()), _ty: PhantomData }
|
||||
|
|
|
@ -136,7 +136,7 @@ Installation:
|
|||
[ra-emacs-lsp.el](https://github.com/rust-analyzer/rust-analyzer/blob/69ee5c9c5ef212f7911028c9ddf581559e6565c3/editors/emacs/ra-emacs-lsp.el)
|
||||
to load path and require it in `init.el`
|
||||
* run `lsp` in a rust buffer
|
||||
* (Optionally) bind commands like `rust-analyzer-join-lines` or `rust-analyzer-extend-selection` to keys, and enable `rust-analyzer-inlay-hints-mode` to get inline type hints
|
||||
* (Optionally) bind commands like `rust-analyzer-join-lines`, `rust-analyzer-extend-selection` and `rust-analyzer-expand-macro` to keys, and enable `rust-analyzer-inlay-hints-mode` to get inline type hints
|
||||
|
||||
|
||||
## Vim and NeoVim
|
||||
|
|
|
@ -87,7 +87,7 @@ export class HintsUpdater {
|
|||
range: hint.range,
|
||||
renderOptions: {
|
||||
after: {
|
||||
contentText: `: ${this.truncateHint(hint.label)}`
|
||||
contentText: `: ${hint.label}`
|
||||
}
|
||||
}
|
||||
}));
|
||||
|
@ -98,18 +98,6 @@ export class HintsUpdater {
|
|||
}
|
||||
}
|
||||
|
||||
private truncateHint(label: string): string {
|
||||
if (!Server.config.maxInlayHintLength) {
|
||||
return label;
|
||||
}
|
||||
|
||||
let newLabel = label.substring(0, Server.config.maxInlayHintLength);
|
||||
if (label.length > Server.config.maxInlayHintLength) {
|
||||
newLabel += '…';
|
||||
}
|
||||
return newLabel;
|
||||
}
|
||||
|
||||
private async queryHints(documentUri: string): Promise<InlayHint[] | null> {
|
||||
const request: InlayHintsParams = {
|
||||
textDocument: { uri: documentUri }
|
||||
|
|
|
@ -43,6 +43,7 @@ export class Server {
|
|||
initializationOptions: {
|
||||
publishDecorations: true,
|
||||
lruCapacity: Server.config.lruCapacity,
|
||||
maxInlayHintLength: Server.config.maxInlayHintLength,
|
||||
excludeGlobs: Server.config.excludeGlobs,
|
||||
useClientWatching: Server.config.useClientWatching,
|
||||
featureFlags: Server.config.featureFlags
|
||||
|
|
|
@ -16,6 +16,7 @@
|
|||
;; - implements joinLines (you need to bind rust-analyzer-join-lines to a key)
|
||||
;; - implements selectionRanges (either bind lsp-extend-selection to a key, or use expand-region)
|
||||
;; - provides rust-analyzer-inlay-hints-mode for inline type hints
|
||||
;; - provides rust-analyzer-expand-macro to expand macros
|
||||
|
||||
;; What's missing:
|
||||
;; - file system changes in apply-source-change
|
||||
|
@ -247,5 +248,32 @@
|
|||
(remove-hook 'after-change-functions #'rust-analyzer--inlay-hints-change-handler t))))
|
||||
|
||||
|
||||
|
||||
;; expand macros
|
||||
(defun rust-analyzer-expand-macro ()
|
||||
"Expands the macro call at point recursively."
|
||||
(interactive)
|
||||
(when (eq 'rust-mode major-mode)
|
||||
(let* ((workspace (lsp-find-workspace 'rust-analyzer (buffer-file-name)))
|
||||
(params (list :textDocument (lsp--text-document-identifier)
|
||||
:position (lsp--cur-position))))
|
||||
(when workspace
|
||||
(let* ((response (with-lsp-workspace workspace
|
||||
(lsp-send-request (lsp-make-request
|
||||
"rust-analyzer/expandMacro"
|
||||
params))))
|
||||
(result (when response (ht-get response "expansion"))))
|
||||
(if result
|
||||
(let ((buf (get-buffer-create (concat "*rust-analyzer macro expansion " (with-lsp-workspace workspace (lsp-workspace-root)) "*"))))
|
||||
(with-current-buffer buf
|
||||
(let ((inhibit-read-only t))
|
||||
(erase-buffer)
|
||||
(insert result)
|
||||
(setq buffer-read-only t)
|
||||
(special-mode)))
|
||||
(pop-to-buffer buf))
|
||||
(message "No macro found at point, or it could not be expanded")))))))
|
||||
|
||||
|
||||
(provide 'ra-emacs-lsp)
|
||||
;;; ra-emacs-lsp.el ends here
|
||||
|
|
|
@ -5,6 +5,9 @@ version = "0.1.0"
|
|||
authors = ["rust-analyzer developers"]
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
walkdir = "2.1.3"
|
||||
pico-args = "0.3.0"
|
||||
|
|
|
@ -9,11 +9,10 @@
|
|||
//! `.cargo/config`.
|
||||
mod help;
|
||||
|
||||
use std::{env, fmt::Write, path::PathBuf, str};
|
||||
|
||||
use anyhow::Context;
|
||||
use core::fmt::Write;
|
||||
use core::str;
|
||||
use pico_args::Arguments;
|
||||
use std::{env, path::PathBuf};
|
||||
use xtask::{
|
||||
codegen::{self, Mode},
|
||||
install_pre_commit_hook, reformat_staged_files, run, run_clippy, run_fuzzer, run_rustfmt,
|
||||
|
@ -37,7 +36,7 @@ struct ServerOpt {
|
|||
}
|
||||
|
||||
fn main() -> Result<()> {
|
||||
if std::env::args().next().map(|it| it.contains("pre-commit")) == Some(true) {
|
||||
if env::args().next().map(|it| it.contains("pre-commit")) == Some(true) {
|
||||
return reformat_staged_files();
|
||||
}
|
||||
|
||||
|
@ -174,7 +173,7 @@ fn fix_path_for_mac() -> Result<()> {
|
|||
fn install_client(ClientOpt::VsCode: ClientOpt) -> Result<()> {
|
||||
let npm_version = Cmd {
|
||||
unix: r"npm --version",
|
||||
windows: r"cmd.exe /c npm.cmd --version",
|
||||
windows: r"cmd.exe /c npm --version",
|
||||
work_dir: "./editors/code",
|
||||
}
|
||||
.run();
|
||||
|
@ -183,10 +182,10 @@ fn install_client(ClientOpt::VsCode: ClientOpt) -> Result<()> {
|
|||
eprintln!("\nERROR: `npm --version` failed, `npm` is required to build the VS Code plugin")
|
||||
}
|
||||
|
||||
Cmd { unix: r"npm ci", windows: r"cmd.exe /c npm.cmd ci", work_dir: "./editors/code" }.run()?;
|
||||
Cmd { unix: r"npm ci", windows: r"cmd.exe /c npm ci", work_dir: "./editors/code" }.run()?;
|
||||
Cmd {
|
||||
unix: r"npm run package --scripts-prepend-node-path",
|
||||
windows: r"cmd.exe /c npm.cmd run package",
|
||||
windows: r"cmd.exe /c npm run package",
|
||||
work_dir: "./editors/code",
|
||||
}
|
||||
.run()?;
|
||||
|
|
Loading…
Reference in a new issue