332: Struct types r=matklad a=flodiebold

Infer types for struct fields, and add basic field completions. There's also some code for enums, but I focused on getting structs working.

There's still ways to go before this becomes useful: There's no autoderef (or even reference types) and no inference for `self`, for example.

Co-authored-by: Florian Diebold <flodiebold@gmail.com>
This commit is contained in:
bors[bot] 2018-12-27 10:08:34 +00:00
commit 1d6dcef5c5
30 changed files with 1519 additions and 319 deletions

View file

@ -1,6 +1,7 @@
mod completion_item; mod completion_item;
mod completion_context; mod completion_context;
mod complete_dot;
mod complete_fn_param; mod complete_fn_param;
mod complete_keyword; mod complete_keyword;
mod complete_snippet; mod complete_snippet;
@ -20,13 +21,13 @@ use crate::{
pub use crate::completion::completion_item::{CompletionItem, InsertText, CompletionItemKind}; pub use crate::completion::completion_item::{CompletionItem, InsertText, CompletionItemKind};
/// Main entry point for copmletion. We run comletion as a two-phase process. /// Main entry point for completion. We run completion as a two-phase process.
/// ///
/// First, we look at the position and collect a so-called `CompletionContext. /// First, we look at the position and collect a so-called `CompletionContext.
/// This is a somewhat messy process, because, during completion, syntax tree is /// This is a somewhat messy process, because, during completion, syntax tree is
/// incomplete and can look readlly weired. /// incomplete and can look really weird.
/// ///
/// Once the context is collected, we run a series of completion routines whihc /// Once the context is collected, we run a series of completion routines which
/// look at the context and produce completion items. /// look at the context and produce completion items.
pub(crate) fn completions( pub(crate) fn completions(
db: &db::RootDatabase, db: &db::RootDatabase,
@ -43,6 +44,7 @@ pub(crate) fn completions(
complete_snippet::complete_item_snippet(&mut acc, &ctx); complete_snippet::complete_item_snippet(&mut acc, &ctx);
complete_path::complete_path(&mut acc, &ctx)?; complete_path::complete_path(&mut acc, &ctx)?;
complete_scope::complete_scope(&mut acc, &ctx)?; complete_scope::complete_scope(&mut acc, &ctx)?;
complete_dot::complete_dot(&mut acc, &ctx)?;
Ok(Some(acc)) Ok(Some(acc))
} }

View file

@ -0,0 +1,98 @@
use ra_syntax::ast::AstNode;
use hir::{Ty, Def};
use crate::Cancelable;
use crate::completion::{CompletionContext, Completions, CompletionKind, CompletionItem, CompletionItemKind};
/// Complete dot accesses, i.e. fields or methods (currently only fields).
pub(super) fn complete_dot(acc: &mut Completions, ctx: &CompletionContext) -> Cancelable<()> {
let module = if let Some(module) = &ctx.module {
module
} else {
return Ok(());
};
let function = if let Some(fn_def) = ctx.enclosing_fn {
hir::source_binder::function_from_module(ctx.db, module, fn_def)
} else {
return Ok(());
};
let receiver = if let Some(receiver) = ctx.dot_receiver {
receiver
} else {
return Ok(());
};
let infer_result = function.infer(ctx.db)?;
let receiver_ty = if let Some(ty) = infer_result.type_of_node(receiver.syntax()) {
ty
} else {
return Ok(());
};
if !ctx.is_method_call {
complete_fields(acc, ctx, receiver_ty)?;
}
Ok(())
}
fn complete_fields(acc: &mut Completions, ctx: &CompletionContext, receiver: Ty) -> Cancelable<()> {
// TODO: autoderef etc.
match receiver {
Ty::Adt { def_id, .. } => {
match def_id.resolve(ctx.db)? {
Def::Struct(s) => {
let variant_data = s.variant_data(ctx.db)?;
for field in variant_data.fields() {
CompletionItem::new(CompletionKind::Reference, field.name().to_string())
.kind(CompletionItemKind::Field)
.add_to(acc);
}
}
// TODO unions
_ => {}
}
}
Ty::Tuple(fields) => {
for (i, _ty) in fields.iter().enumerate() {
CompletionItem::new(CompletionKind::Reference, i.to_string())
.kind(CompletionItemKind::Field)
.add_to(acc);
}
}
_ => {}
};
Ok(())
}
#[cfg(test)]
mod tests {
use crate::completion::*;
fn check_ref_completion(code: &str, expected_completions: &str) {
check_completion(code, expected_completions, CompletionKind::Reference);
}
#[test]
fn test_struct_field_completion() {
check_ref_completion(
r"
struct A { the_field: u32 }
fn foo(a: A) {
a.<|>
}
",
r#"the_field"#,
);
}
#[test]
fn test_no_struct_field_completion_for_method_call() {
check_ref_completion(
r"
struct A { the_field: u32 }
fn foo(a: A) {
a.<|>()
}
",
r#""#,
);
}
}

View file

@ -8,7 +8,7 @@ pub(super) fn complete_path(acc: &mut Completions, ctx: &CompletionContext) -> C
(Some(path), Some(module)) => (path.clone(), module), (Some(path), Some(module)) => (path.clone(), module),
_ => return Ok(()), _ => return Ok(()),
}; };
let def_id = match module.resolve_path(ctx.db, path)? { let def_id = match module.resolve_path(ctx.db, &path)?.take_types() {
Some(it) => it, Some(it) => it,
None => return Ok(()), None => return Ok(()),
}; };

View file

@ -1,12 +1,13 @@
use ra_editor::find_node_at_offset; use ra_editor::find_node_at_offset;
use ra_text_edit::AtomTextEdit; use ra_text_edit::AtomTextEdit;
use ra_syntax::{ use ra_syntax::{
algo::find_leaf_at_offset, algo::{find_leaf_at_offset, find_covering_node},
ast, ast,
AstNode, AstNode,
SyntaxNodeRef, SyntaxNodeRef,
SourceFileNode, SourceFileNode,
TextUnit, TextUnit,
TextRange,
SyntaxKind::*, SyntaxKind::*,
}; };
use hir::source_binder; use hir::source_binder;
@ -31,6 +32,10 @@ pub(super) struct CompletionContext<'a> {
pub(super) is_stmt: bool, pub(super) is_stmt: bool,
/// Something is typed at the "top" level, in module or impl/trait. /// Something is typed at the "top" level, in module or impl/trait.
pub(super) is_new_item: bool, pub(super) is_new_item: bool,
/// The receiver if this is a field or method access, i.e. writing something.<|>
pub(super) dot_receiver: Option<ast::Expr<'a>>,
/// If this is a method call in particular, i.e. the () are already there.
pub(super) is_method_call: bool,
} }
impl<'a> CompletionContext<'a> { impl<'a> CompletionContext<'a> {
@ -54,12 +59,14 @@ impl<'a> CompletionContext<'a> {
after_if: false, after_if: false,
is_stmt: false, is_stmt: false,
is_new_item: false, is_new_item: false,
dot_receiver: None,
is_method_call: false,
}; };
ctx.fill(original_file, position.offset); ctx.fill(original_file, position.offset);
Ok(Some(ctx)) Ok(Some(ctx))
} }
fn fill(&mut self, original_file: &SourceFileNode, offset: TextUnit) { fn fill(&mut self, original_file: &'a SourceFileNode, offset: TextUnit) {
// Insert a fake ident to get a valid parse tree. We will use this file // Insert a fake ident to get a valid parse tree. We will use this file
// to determine context, though the original_file will be used for // to determine context, though the original_file will be used for
// actual completion. // actual completion.
@ -76,7 +83,7 @@ impl<'a> CompletionContext<'a> {
self.is_param = true; self.is_param = true;
return; return;
} }
self.classify_name_ref(&file, name_ref); self.classify_name_ref(original_file, name_ref);
} }
// Otherwise, see if this is a declaration. We can use heuristics to // Otherwise, see if this is a declaration. We can use heuristics to
@ -88,7 +95,7 @@ impl<'a> CompletionContext<'a> {
} }
} }
} }
fn classify_name_ref(&mut self, file: &SourceFileNode, name_ref: ast::NameRef) { fn classify_name_ref(&mut self, original_file: &'a SourceFileNode, name_ref: ast::NameRef) {
let name_range = name_ref.syntax().range(); let name_range = name_ref.syntax().range();
let top_node = name_ref let top_node = name_ref
.syntax() .syntax()
@ -105,6 +112,12 @@ impl<'a> CompletionContext<'a> {
_ => (), _ => (),
} }
self.enclosing_fn = self
.leaf
.ancestors()
.take_while(|it| it.kind() != SOURCE_FILE && it.kind() != MODULE)
.find_map(ast::FnDef::cast);
let parent = match name_ref.syntax().parent() { let parent = match name_ref.syntax().parent() {
Some(it) => it, Some(it) => it,
None => return, None => return,
@ -120,11 +133,6 @@ impl<'a> CompletionContext<'a> {
} }
if path.qualifier().is_none() { if path.qualifier().is_none() {
self.is_trivial_path = true; self.is_trivial_path = true;
self.enclosing_fn = self
.leaf
.ancestors()
.take_while(|it| it.kind() != SOURCE_FILE && it.kind() != MODULE)
.find_map(ast::FnDef::cast);
self.is_stmt = match name_ref self.is_stmt = match name_ref
.syntax() .syntax()
@ -137,7 +145,9 @@ impl<'a> CompletionContext<'a> {
}; };
if let Some(off) = name_ref.syntax().range().start().checked_sub(2.into()) { if let Some(off) = name_ref.syntax().range().start().checked_sub(2.into()) {
if let Some(if_expr) = find_node_at_offset::<ast::IfExpr>(file.syntax(), off) { if let Some(if_expr) =
find_node_at_offset::<ast::IfExpr>(original_file.syntax(), off)
{
if if_expr.syntax().range().end() < name_ref.syntax().range().start() { if if_expr.syntax().range().end() < name_ref.syntax().range().start() {
self.after_if = true; self.after_if = true;
} }
@ -145,9 +155,33 @@ impl<'a> CompletionContext<'a> {
} }
} }
} }
if let Some(field_expr) = ast::FieldExpr::cast(parent) {
// The receiver comes before the point of insertion of the fake
// ident, so it should have the same range in the non-modified file
self.dot_receiver = field_expr
.expr()
.map(|e| e.syntax().range())
.and_then(|r| find_node_with_range(original_file.syntax(), r));
}
if let Some(method_call_expr) = ast::MethodCallExpr::cast(parent) {
// As above
self.dot_receiver = method_call_expr
.expr()
.map(|e| e.syntax().range())
.and_then(|r| find_node_with_range(original_file.syntax(), r));
self.is_method_call = true;
}
} }
} }
fn find_node_with_range<'a, N: AstNode<'a>>(
syntax: SyntaxNodeRef<'a>,
range: TextRange,
) -> Option<N> {
let node = find_covering_node(syntax, range);
node.ancestors().find_map(N::cast)
}
fn is_node<'a, N: AstNode<'a>>(node: SyntaxNodeRef<'a>) -> bool { fn is_node<'a, N: AstNode<'a>>(node: SyntaxNodeRef<'a>) -> bool {
match node.ancestors().filter_map(N::cast).next() { match node.ancestors().filter_map(N::cast).next() {
None => false, None => false,

View file

@ -1,5 +1,7 @@
use crate::db; use crate::db;
use hir::PerNs;
/// `CompletionItem` describes a single completion variant in the editor pop-up. /// `CompletionItem` describes a single completion variant in the editor pop-up.
/// It is basically a POD with various properties. To construct a /// It is basically a POD with various properties. To construct a
/// `CompletionItem`, use `new` method and the `Builder` struct. /// `CompletionItem`, use `new` method and the `Builder` struct.
@ -25,7 +27,10 @@ pub enum CompletionItemKind {
Keyword, Keyword,
Module, Module,
Function, Function,
Struct,
Enum,
Binding, Binding,
Field,
} }
#[derive(Debug, PartialEq, Eq)] #[derive(Debug, PartialEq, Eq)]
@ -117,16 +122,27 @@ impl Builder {
db: &db::RootDatabase, db: &db::RootDatabase,
resolution: &hir::Resolution, resolution: &hir::Resolution,
) -> Builder { ) -> Builder {
if let Some(def_id) = resolution.def_id { let resolved = resolution.def_id.and_then(|d| d.resolve(db).ok());
if let Ok(def) = def_id.resolve(db) { let kind = match resolved {
let kind = match def { PerNs {
hir::Def::Module(..) => CompletionItemKind::Module, types: Some(hir::Def::Module(..)),
hir::Def::Function(..) => CompletionItemKind::Function, ..
_ => return self, } => CompletionItemKind::Module,
}; PerNs {
self.kind = Some(kind); types: Some(hir::Def::Struct(..)),
} ..
} } => CompletionItemKind::Struct,
PerNs {
types: Some(hir::Def::Enum(..)),
..
} => CompletionItemKind::Enum,
PerNs {
values: Some(hir::Def::Function(..)),
..
} => CompletionItemKind::Function,
_ => return self,
};
self.kind = Some(kind);
self self
} }
} }

View file

@ -95,6 +95,9 @@ salsa::database_storage! {
fn submodules() for hir::db::SubmodulesQuery; fn submodules() for hir::db::SubmodulesQuery;
fn infer() for hir::db::InferQuery; fn infer() for hir::db::InferQuery;
fn type_for_def() for hir::db::TypeForDefQuery; fn type_for_def() for hir::db::TypeForDefQuery;
fn type_for_field() for hir::db::TypeForFieldQuery;
fn struct_data() for hir::db::StructDataQuery;
fn enum_data() for hir::db::EnumDataQuery;
} }
} }
} }

194
crates/ra_hir/src/adt.rs Normal file
View file

@ -0,0 +1,194 @@
use std::sync::Arc;
use ra_syntax::{SmolStr, ast::{self, NameOwner, StructFlavor}};
use crate::{
DefId, Cancelable,
db::{HirDatabase},
type_ref::TypeRef,
};
pub struct Struct {
def_id: DefId,
}
impl Struct {
pub(crate) fn new(def_id: DefId) -> Self {
Struct { def_id }
}
pub fn def_id(&self) -> DefId {
self.def_id
}
pub fn variant_data(&self, db: &impl HirDatabase) -> Cancelable<Arc<VariantData>> {
Ok(db.struct_data(self.def_id)?.variant_data.clone())
}
pub fn struct_data(&self, db: &impl HirDatabase) -> Cancelable<Arc<StructData>> {
Ok(db.struct_data(self.def_id)?)
}
pub fn name(&self, db: &impl HirDatabase) -> Cancelable<Option<SmolStr>> {
Ok(db.struct_data(self.def_id)?.name.clone())
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct StructData {
name: Option<SmolStr>,
variant_data: Arc<VariantData>,
}
impl StructData {
pub(crate) fn new(struct_def: ast::StructDef) -> StructData {
let name = struct_def.name().map(|n| n.text());
let variant_data = VariantData::new(struct_def.flavor());
let variant_data = Arc::new(variant_data);
StructData { name, variant_data }
}
pub fn name(&self) -> Option<&SmolStr> {
self.name.as_ref()
}
pub fn variant_data(&self) -> &Arc<VariantData> {
&self.variant_data
}
}
pub struct Enum {
def_id: DefId,
}
impl Enum {
pub(crate) fn new(def_id: DefId) -> Self {
Enum { def_id }
}
pub fn def_id(&self) -> DefId {
self.def_id
}
pub fn name(&self, db: &impl HirDatabase) -> Cancelable<Option<SmolStr>> {
Ok(db.enum_data(self.def_id)?.name.clone())
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct EnumData {
name: Option<SmolStr>,
variants: Vec<(SmolStr, Arc<VariantData>)>,
}
impl EnumData {
pub(crate) fn new(enum_def: ast::EnumDef) -> Self {
let name = enum_def.name().map(|n| n.text());
let variants = if let Some(evl) = enum_def.variant_list() {
evl.variants()
.map(|v| {
(
v.name()
.map(|n| n.text())
.unwrap_or_else(|| SmolStr::new("[error]")),
Arc::new(VariantData::new(v.flavor())),
)
})
.collect()
} else {
Vec::new()
};
EnumData { name, variants }
}
}
/// A single field of an enum variant or struct
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct StructField {
name: SmolStr,
type_ref: TypeRef,
}
impl StructField {
pub fn name(&self) -> SmolStr {
self.name.clone()
}
pub fn type_ref(&self) -> &TypeRef {
&self.type_ref
}
}
/// Fields of an enum variant or struct
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum VariantData {
Struct(Vec<StructField>),
Tuple(Vec<StructField>),
Unit,
}
impl VariantData {
pub fn new(flavor: StructFlavor) -> Self {
match flavor {
StructFlavor::Tuple(fl) => {
let fields = fl
.fields()
.enumerate()
.map(|(i, fd)| StructField {
name: SmolStr::new(i.to_string()),
type_ref: TypeRef::from_ast_opt(fd.type_ref()),
})
.collect();
VariantData::Tuple(fields)
}
StructFlavor::Named(fl) => {
let fields = fl
.fields()
.map(|fd| StructField {
name: fd
.name()
.map(|n| n.text())
.unwrap_or_else(|| SmolStr::new("[error]")),
type_ref: TypeRef::from_ast_opt(fd.type_ref()),
})
.collect();
VariantData::Struct(fields)
}
StructFlavor::Unit => VariantData::Unit,
}
}
pub(crate) fn get_field_type_ref(&self, field_name: &str) -> Option<&TypeRef> {
self.fields()
.iter()
.find(|f| f.name == field_name)
.map(|f| &f.type_ref)
}
pub fn fields(&self) -> &[StructField] {
match *self {
VariantData::Struct(ref fields) | VariantData::Tuple(ref fields) => fields,
_ => &[],
}
}
pub fn is_struct(&self) -> bool {
if let VariantData::Struct(..) = *self {
true
} else {
false
}
}
pub fn is_tuple(&self) -> bool {
if let VariantData::Tuple(..) = *self {
true
} else {
false
}
}
pub fn is_unit(&self) -> bool {
if let VariantData::Unit = *self {
true
} else {
false
}
}
}

View file

@ -1,6 +1,7 @@
use std::sync::Arc; use std::sync::Arc;
use ra_syntax::{ use ra_syntax::{
SmolStr,
SyntaxNode, SyntaxNode,
ast::FnDefNode, ast::FnDefNode,
}; };
@ -15,6 +16,7 @@ use crate::{
module::{ModuleId, ModuleTree, ModuleSource, module::{ModuleId, ModuleTree, ModuleSource,
nameres::{ItemMap, InputModuleItems}}, nameres::{ItemMap, InputModuleItems}},
ty::{InferenceResult, Ty}, ty::{InferenceResult, Ty},
adt::{StructData, EnumData},
}; };
salsa::query_group! { salsa::query_group! {
@ -31,6 +33,16 @@ pub trait HirDatabase: SyntaxDatabase
use fn query_definitions::fn_syntax; use fn query_definitions::fn_syntax;
} }
fn struct_data(def_id: DefId) -> Cancelable<Arc<StructData>> {
type StructDataQuery;
use fn query_definitions::struct_data;
}
fn enum_data(def_id: DefId) -> Cancelable<Arc<EnumData>> {
type EnumDataQuery;
use fn query_definitions::enum_data;
}
fn infer(fn_id: FnId) -> Cancelable<Arc<InferenceResult>> { fn infer(fn_id: FnId) -> Cancelable<Arc<InferenceResult>> {
type InferQuery; type InferQuery;
use fn query_definitions::infer; use fn query_definitions::infer;
@ -41,6 +53,11 @@ pub trait HirDatabase: SyntaxDatabase
use fn query_definitions::type_for_def; use fn query_definitions::type_for_def;
} }
fn type_for_field(def_id: DefId, field: SmolStr) -> Cancelable<Ty> {
type TypeForFieldQuery;
use fn query_definitions::type_for_field;
}
fn file_items(file_id: FileId) -> Arc<SourceFileItems> { fn file_items(file_id: FileId) -> Arc<SourceFileItems> {
type SourceFileItemsQuery; type SourceFileItemsQuery;
use fn query_definitions::file_items; use fn query_definitions::file_items;

View file

@ -46,8 +46,7 @@ impl Function {
} }
pub fn module(&self, db: &impl HirDatabase) -> Cancelable<Module> { pub fn module(&self, db: &impl HirDatabase) -> Cancelable<Module> {
let loc = self.fn_id.0.loc(db); self.fn_id.0.module(db)
Module::new(db, loc.source_root_id, loc.module_id)
} }
} }

View file

@ -25,6 +25,8 @@ pub mod source_binder;
mod krate; mod krate;
mod module; mod module;
mod function; mod function;
mod adt;
mod type_ref;
mod ty; mod ty;
use std::ops::Index; use std::ops::Index;
@ -40,8 +42,10 @@ use crate::{
pub use self::{ pub use self::{
path::{Path, PathKind}, path::{Path, PathKind},
krate::Crate, krate::Crate,
module::{Module, ModuleId, Problem, nameres::ItemMap, ModuleScope, Resolution}, module::{Module, ModuleId, Problem, nameres::{ItemMap, PerNs, Namespace}, ModuleScope, Resolution},
function::{Function, FnScopes}, function::{Function, FnScopes},
adt::{Struct, Enum},
ty::Ty,
}; };
pub use self::function::FnSignatureInfo; pub use self::function::FnSignatureInfo;
@ -56,7 +60,11 @@ ra_db::impl_numeric_id!(DefId);
pub(crate) enum DefKind { pub(crate) enum DefKind {
Module, Module,
Function, Function,
Struct,
Enum,
Item, Item,
StructCtor,
} }
#[derive(Clone, Debug, PartialEq, Eq, Hash)] #[derive(Clone, Debug, PartialEq, Eq, Hash)]
@ -68,18 +76,18 @@ pub struct DefLoc {
} }
impl DefKind { impl DefKind {
pub(crate) fn for_syntax_kind(kind: SyntaxKind) -> Option<DefKind> { pub(crate) fn for_syntax_kind(kind: SyntaxKind) -> PerNs<DefKind> {
match kind { match kind {
SyntaxKind::FN_DEF => Some(DefKind::Function), SyntaxKind::FN_DEF => PerNs::values(DefKind::Function),
SyntaxKind::MODULE => Some(DefKind::Module), SyntaxKind::MODULE => PerNs::types(DefKind::Module),
SyntaxKind::STRUCT_DEF => PerNs::both(DefKind::Struct, DefKind::StructCtor),
SyntaxKind::ENUM_DEF => PerNs::types(DefKind::Enum),
// These define items, but don't have their own DefKinds yet: // These define items, but don't have their own DefKinds yet:
SyntaxKind::STRUCT_DEF => Some(DefKind::Item), SyntaxKind::TRAIT_DEF => PerNs::types(DefKind::Item),
SyntaxKind::ENUM_DEF => Some(DefKind::Item), SyntaxKind::TYPE_DEF => PerNs::types(DefKind::Item),
SyntaxKind::TRAIT_DEF => Some(DefKind::Item), SyntaxKind::CONST_DEF => PerNs::values(DefKind::Item),
SyntaxKind::TYPE_DEF => Some(DefKind::Item), SyntaxKind::STATIC_DEF => PerNs::values(DefKind::Item),
SyntaxKind::CONST_DEF => Some(DefKind::Item), _ => PerNs::none(),
SyntaxKind::STATIC_DEF => Some(DefKind::Item),
_ => None,
} }
} }
} }
@ -99,6 +107,8 @@ impl DefLoc {
pub enum Def { pub enum Def {
Module(Module), Module(Module),
Function(Function), Function(Function),
Struct(Struct),
Enum(Enum),
Item, Item,
} }
@ -114,10 +124,25 @@ impl DefId {
let function = Function::new(self); let function = Function::new(self);
Def::Function(function) Def::Function(function)
} }
DefKind::Struct => {
let struct_def = Struct::new(self);
Def::Struct(struct_def)
}
DefKind::Enum => {
let enum_def = Enum::new(self);
Def::Enum(enum_def)
}
DefKind::StructCtor => Def::Item,
DefKind::Item => Def::Item, DefKind::Item => Def::Item,
}; };
Ok(res) Ok(res)
} }
/// For a module, returns that module; for any other def, returns the containing module.
pub fn module(self, db: &impl HirDatabase) -> Cancelable<Module> {
let loc = self.loc(db);
Module::new(db, loc.source_root_id, loc.module_id)
}
} }
/// Identifier of item within a specific file. This is stable over reparses, so /// Identifier of item within a specific file. This is stable over reparses, so

View file

@ -193,6 +193,9 @@ salsa::database_storage! {
fn submodules() for db::SubmodulesQuery; fn submodules() for db::SubmodulesQuery;
fn infer() for db::InferQuery; fn infer() for db::InferQuery;
fn type_for_def() for db::TypeForDefQuery; fn type_for_def() for db::TypeForDefQuery;
fn type_for_field() for db::TypeForFieldQuery;
fn struct_data() for db::StructDataQuery;
fn enum_data() for db::EnumDataQuery;
} }
} }
} }

View file

@ -17,7 +17,7 @@ use crate::{
arena::{Arena, Id}, arena::{Arena, Id},
}; };
pub use self::nameres::{ModuleScope, Resolution}; pub use self::nameres::{ModuleScope, Resolution, Namespace, PerNs};
/// `Module` is API entry point to get all the information /// `Module` is API entry point to get all the information
/// about a particular module. /// about a particular module.
@ -115,16 +115,29 @@ impl Module {
Ok(res) Ok(res)
} }
pub fn resolve_path(&self, db: &impl HirDatabase, path: Path) -> Cancelable<Option<DefId>> { pub fn resolve_path(&self, db: &impl HirDatabase, path: &Path) -> Cancelable<PerNs<DefId>> {
let mut curr = match path.kind { let mut curr_per_ns = PerNs::types(
PathKind::Crate => self.crate_root(), match path.kind {
PathKind::Self_ | PathKind::Plain => self.clone(), PathKind::Crate => self.crate_root(),
PathKind::Super => ctry!(self.parent()), PathKind::Self_ | PathKind::Plain => self.clone(),
} PathKind::Super => {
.def_id(db); if let Some(p) = self.parent() {
p
} else {
return Ok(PerNs::none());
}
}
}
.def_id(db),
);
let segments = path.segments; let segments = &path.segments;
for name in segments.iter() { for name in segments.iter() {
let curr = if let Some(r) = curr_per_ns.as_ref().take(Namespace::Types) {
r
} else {
return Ok(PerNs::none());
};
let module = match curr.loc(db) { let module = match curr.loc(db) {
DefLoc { DefLoc {
kind: DefKind::Module, kind: DefKind::Module,
@ -132,12 +145,17 @@ impl Module {
module_id, module_id,
.. ..
} => Module::new(db, source_root_id, module_id)?, } => Module::new(db, source_root_id, module_id)?,
_ => return Ok(None), // TODO here would be the place to handle enum variants...
_ => return Ok(PerNs::none()),
}; };
let scope = module.scope(db)?; let scope = module.scope(db)?;
curr = ctry!(ctry!(scope.get(&name)).def_id); curr_per_ns = if let Some(r) = scope.get(&name) {
r.def_id
} else {
return Ok(PerNs::none());
};
} }
Ok(Some(curr)) Ok(curr_per_ns)
} }
pub fn problems(&self, db: &impl HirDatabase) -> Vec<(SyntaxNode, Problem)> { pub fn problems(&self, db: &impl HirDatabase) -> Vec<(SyntaxNode, Problem)> {
@ -145,7 +163,7 @@ impl Module {
} }
} }
/// Phisically, rust source is organized as a set of files, but logically it is /// Physically, rust source is organized as a set of files, but logically it is
/// organized as a tree of modules. Usually, a single file corresponds to a /// organized as a tree of modules. Usually, a single file corresponds to a
/// single module, but it is not nessary the case. /// single module, but it is not nessary the case.
/// ///

View file

@ -118,22 +118,96 @@ enum ImportKind {
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
pub struct Resolution { pub struct Resolution {
/// None for unresolved /// None for unresolved
pub def_id: Option<DefId>, pub def_id: PerNs<DefId>,
/// ident by whitch this is imported into local scope. /// ident by whitch this is imported into local scope.
pub import: Option<NamedImport>, pub import: Option<NamedImport>,
} }
// #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
// enum Namespace { pub enum Namespace {
// Types, Types,
// Values, Values,
// } }
// #[derive(Debug)] #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
// struct PerNs<T> { pub struct PerNs<T> {
// types: Option<T>, pub types: Option<T>,
// values: Option<T>, pub values: Option<T>,
// } }
impl<T> PerNs<T> {
pub fn none() -> PerNs<T> {
PerNs {
types: None,
values: None,
}
}
pub fn values(t: T) -> PerNs<T> {
PerNs {
types: None,
values: Some(t),
}
}
pub fn types(t: T) -> PerNs<T> {
PerNs {
types: Some(t),
values: None,
}
}
pub fn both(types: T, values: T) -> PerNs<T> {
PerNs {
types: Some(types),
values: Some(values),
}
}
pub fn is_none(&self) -> bool {
self.types.is_none() && self.values.is_none()
}
pub fn take(self, namespace: Namespace) -> Option<T> {
match namespace {
Namespace::Types => self.types,
Namespace::Values => self.values,
}
}
pub fn take_types(self) -> Option<T> {
self.types
}
pub fn take_values(self) -> Option<T> {
self.values
}
pub fn get(&self, namespace: Namespace) -> Option<&T> {
self.as_ref().take(namespace)
}
pub fn as_ref(&self) -> PerNs<&T> {
PerNs {
types: self.types.as_ref(),
values: self.values.as_ref(),
}
}
pub fn and_then<U>(self, f: impl Fn(T) -> Option<U>) -> PerNs<U> {
PerNs {
types: self.types.and_then(&f),
values: self.values.and_then(&f),
}
}
pub fn map<U>(self, f: impl Fn(T) -> U) -> PerNs<U> {
PerNs {
types: self.types.map(&f),
values: self.values.map(&f),
}
}
}
impl InputModuleItems { impl InputModuleItems {
pub(crate) fn new<'a>( pub(crate) fn new<'a>(
@ -254,7 +328,7 @@ where
for dep in krate.dependencies(self.db) { for dep in krate.dependencies(self.db) {
if let Some(module) = dep.krate.root_module(self.db)? { if let Some(module) = dep.krate.root_module(self.db)? {
let def_id = module.def_id(self.db); let def_id = module.def_id(self.db);
self.add_module_item(&mut module_items, dep.name, def_id); self.add_module_item(&mut module_items, dep.name, PerNs::types(def_id));
} }
} }
}; };
@ -265,7 +339,7 @@ where
module_items.items.insert( module_items.items.insert(
name.clone(), name.clone(),
Resolution { Resolution {
def_id: None, def_id: PerNs::none(),
import: Some(import), import: Some(import),
}, },
); );
@ -277,18 +351,23 @@ where
if item.kind == MODULE { if item.kind == MODULE {
continue; continue;
} }
let def_loc = DefLoc { // depending on the item kind, the location can define something in
kind: DefKind::for_syntax_kind(item.kind).unwrap_or(DefKind::Item), // the values namespace, the types namespace, or both
source_root_id: self.source_root, let kind = DefKind::for_syntax_kind(item.kind);
module_id, let def_id = kind.map(|k| {
source_item_id: SourceItemId { let def_loc = DefLoc {
file_id, kind: k,
item_id: Some(item.id), source_root_id: self.source_root,
}, module_id,
}; source_item_id: SourceItemId {
let def_id = def_loc.id(self.db); file_id,
item_id: Some(item.id),
},
};
def_loc.id(self.db)
});
let resolution = Resolution { let resolution = Resolution {
def_id: Some(def_id), def_id,
import: None, import: None,
}; };
module_items.items.insert(item.name.clone(), resolution); module_items.items.insert(item.name.clone(), resolution);
@ -303,16 +382,16 @@ where
source_item_id: module_id.source(&self.module_tree).0, source_item_id: module_id.source(&self.module_tree).0,
}; };
let def_id = def_loc.id(self.db); let def_id = def_loc.id(self.db);
self.add_module_item(&mut module_items, name, def_id); self.add_module_item(&mut module_items, name, PerNs::types(def_id));
} }
self.result.per_module.insert(module_id, module_items); self.result.per_module.insert(module_id, module_items);
Ok(()) Ok(())
} }
fn add_module_item(&self, module_items: &mut ModuleScope, name: SmolStr, def_id: DefId) { fn add_module_item(&self, module_items: &mut ModuleScope, name: SmolStr, def_id: PerNs<DefId>) {
let resolution = Resolution { let resolution = Resolution {
def_id: Some(def_id), def_id,
import: None, import: None,
}; };
module_items.items.insert(name, resolution); module_items.items.insert(name, resolution);
@ -347,15 +426,17 @@ where
let is_last = i == import.path.segments.len() - 1; let is_last = i == import.path.segments.len() - 1;
let def_id = match self.result.per_module[&curr].items.get(name) { let def_id = match self.result.per_module[&curr].items.get(name) {
None => return Ok(()), Some(res) if !res.def_id.is_none() => res.def_id,
Some(res) => match res.def_id { _ => return Ok(()),
Some(it) => it,
None => return Ok(()),
},
}; };
if !is_last { if !is_last {
curr = match def_id.loc(self.db) { let type_def_id = if let Some(d) = def_id.take(Namespace::Types) {
d
} else {
return Ok(());
};
curr = match type_def_id.loc(self.db) {
DefLoc { DefLoc {
kind: DefKind::Module, kind: DefKind::Module,
module_id: target_module_id, module_id: target_module_id,
@ -370,10 +451,11 @@ where
segments: import.path.segments[i + 1..].iter().cloned().collect(), segments: import.path.segments[i + 1..].iter().cloned().collect(),
kind: PathKind::Crate, kind: PathKind::Crate,
}; };
if let Some(def_id) = module.resolve_path(self.db, path)? { let def_id = module.resolve_path(self.db, &path)?;
if !def_id.is_none() {
self.update(module_id, |items| { self.update(module_id, |items| {
let res = Resolution { let res = Resolution {
def_id: Some(def_id), def_id: def_id,
import: Some(ptr), import: Some(ptr),
}; };
items.items.insert(name.clone(), res); items.items.insert(name.clone(), res);
@ -387,7 +469,7 @@ where
} else { } else {
self.update(module_id, |items| { self.update(module_id, |items| {
let res = Resolution { let res = Resolution {
def_id: Some(def_id), def_id: def_id,
import: Some(ptr), import: Some(ptr),
}; };
items.items.insert(name.clone(), res); items.items.insert(name.clone(), res);

View file

@ -40,7 +40,7 @@ fn item_map_smoke_test() {
); );
let name = SmolStr::from("Baz"); let name = SmolStr::from("Baz");
let resolution = &item_map.per_module[&module_id].items[&name]; let resolution = &item_map.per_module[&module_id].items[&name];
assert!(resolution.def_id.is_some()); assert!(resolution.def_id.take_types().is_some());
} }
#[test] #[test]
@ -59,7 +59,7 @@ fn test_self() {
); );
let name = SmolStr::from("Baz"); let name = SmolStr::from("Baz");
let resolution = &item_map.per_module[&module_id].items[&name]; let resolution = &item_map.per_module[&module_id].items[&name];
assert!(resolution.def_id.is_some()); assert!(resolution.def_id.take_types().is_some());
} }
#[test] #[test]
@ -92,7 +92,7 @@ fn item_map_across_crates() {
let name = SmolStr::from("Baz"); let name = SmolStr::from("Baz");
let resolution = &item_map.per_module[&module_id].items[&name]; let resolution = &item_map.per_module[&module_id].items[&name];
assert!(resolution.def_id.is_some()); assert!(resolution.def_id.take_types().is_some());
} }
#[test] #[test]

View file

@ -1,12 +1,12 @@
use ra_syntax::{SmolStr, ast, AstNode, TextRange}; use ra_syntax::{SmolStr, ast, AstNode, TextRange};
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Path { pub struct Path {
pub kind: PathKind, pub kind: PathKind,
pub segments: Vec<SmolStr>, pub segments: Vec<SmolStr>,
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum PathKind { pub enum PathKind {
Plain, Plain,
Self_, Self_,

View file

@ -19,7 +19,8 @@ use crate::{
imp::Submodule, imp::Submodule,
nameres::{InputModuleItems, ItemMap, Resolver}, nameres::{InputModuleItems, ItemMap, Resolver},
}, },
ty::{self, InferenceResult, Ty} ty::{self, InferenceResult, Ty},
adt::{StructData, EnumData},
}; };
/// Resolve `FnId` to the corresponding `SyntaxNode` /// Resolve `FnId` to the corresponding `SyntaxNode`
@ -45,6 +46,32 @@ pub(super) fn type_for_def(db: &impl HirDatabase, def_id: DefId) -> Cancelable<T
ty::type_for_def(db, def_id) ty::type_for_def(db, def_id)
} }
pub(super) fn type_for_field(
db: &impl HirDatabase,
def_id: DefId,
field: SmolStr,
) -> Cancelable<Ty> {
ty::type_for_field(db, def_id, field)
}
pub(super) fn struct_data(db: &impl HirDatabase, def_id: DefId) -> Cancelable<Arc<StructData>> {
let def_loc = def_id.loc(db);
assert!(def_loc.kind == DefKind::Struct);
let syntax = db.file_item(def_loc.source_item_id);
let struct_def =
ast::StructDef::cast(syntax.borrowed()).expect("struct def should point to StructDef node");
Ok(Arc::new(StructData::new(struct_def.borrowed())))
}
pub(super) fn enum_data(db: &impl HirDatabase, def_id: DefId) -> Cancelable<Arc<EnumData>> {
let def_loc = def_id.loc(db);
assert!(def_loc.kind == DefKind::Enum);
let syntax = db.file_item(def_loc.source_item_id);
let enum_def =
ast::EnumDef::cast(syntax.borrowed()).expect("enum def should point to EnumDef node");
Ok(Arc::new(EnumData::new(enum_def.borrowed())))
}
pub(super) fn file_items(db: &impl HirDatabase, file_id: FileId) -> Arc<SourceFileItems> { pub(super) fn file_items(db: &impl HirDatabase, file_id: FileId) -> Arc<SourceFileItems> {
let mut res = SourceFileItems::new(file_id); let mut res = SourceFileItems::new(file_id);
let source_file = db.source_file(file_id); let source_file = db.source_file(file_id);

View file

@ -11,13 +11,18 @@ use rustc_hash::{FxHashMap};
use ra_db::{LocalSyntaxPtr, Cancelable}; use ra_db::{LocalSyntaxPtr, Cancelable};
use ra_syntax::{ use ra_syntax::{
SmolStr, SmolStr,
ast::{self, AstNode, LoopBodyOwner, ArgListOwner}, ast::{self, AstNode, LoopBodyOwner, ArgListOwner, PrefixOp},
SyntaxNodeRef SyntaxNodeRef
}; };
use crate::{Def, DefId, FnScopes, Module, Function, Path, db::HirDatabase}; use crate::{
Def, DefId, FnScopes, Module, Function, Struct, Enum, Path,
db::HirDatabase,
adt::VariantData,
type_ref::{TypeRef, Mutability},
};
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] #[derive(Clone, PartialEq, Eq, Hash, Debug)]
pub enum Ty { pub enum Ty {
/// The primitive boolean type. Written as `bool`. /// The primitive boolean type. Written as `bool`.
Bool, Bool,
@ -35,8 +40,15 @@ pub enum Ty {
/// A primitive floating-point type. For example, `f64`. /// A primitive floating-point type. For example, `f64`.
Float(primitive::FloatTy), Float(primitive::FloatTy),
// Structures, enumerations and unions. /// Structures, enumerations and unions.
// Adt(AdtDef, Substs), Adt {
/// The DefId of the struct/enum.
def_id: DefId,
/// The name, for displaying.
name: SmolStr,
// later we'll need generic substitutions here
},
/// The pointee of a string slice. Written as `str`. /// The pointee of a string slice. Written as `str`.
Str, Str,
@ -45,12 +57,13 @@ pub enum Ty {
/// The pointee of an array slice. Written as `[T]`. /// The pointee of an array slice. Written as `[T]`.
Slice(TyRef), Slice(TyRef),
// A raw pointer. Written as `*mut T` or `*const T` /// A raw pointer. Written as `*mut T` or `*const T`
// RawPtr(TypeAndMut<'tcx>), RawPtr(TyRef, Mutability),
/// A reference; a pointer with an associated lifetime. Written as
/// `&'a mut T` or `&'a T`.
Ref(TyRef, Mutability),
// A reference; a pointer with an associated lifetime. Written as
// `&'a mut T` or `&'a T`.
// Ref(Ty<'tcx>, hir::Mutability),
/// A pointer to a function. Written as `fn() -> i32`. /// A pointer to a function. Written as `fn() -> i32`.
/// ///
/// For example the type of `bar` here: /// For example the type of `bar` here:
@ -107,58 +120,104 @@ pub enum Ty {
type TyRef = Arc<Ty>; type TyRef = Arc<Ty>;
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] #[derive(Clone, PartialEq, Eq, Hash, Debug)]
pub struct FnSig { pub struct FnSig {
input: Vec<Ty>, input: Vec<Ty>,
output: Ty, output: Ty,
} }
impl Ty { impl Ty {
pub fn new(_db: &impl HirDatabase, node: ast::TypeRef) -> Cancelable<Self> { pub(crate) fn from_hir(
use ra_syntax::ast::TypeRef::*; db: &impl HirDatabase,
Ok(match node { module: &Module,
ParenType(_inner) => Ty::Unknown, // TODO type_ref: &TypeRef,
TupleType(_inner) => Ty::Unknown, // TODO ) -> Cancelable<Self> {
NeverType(..) => Ty::Never, Ok(match type_ref {
PathType(inner) => { TypeRef::Never => Ty::Never,
let path = if let Some(p) = inner.path() { TypeRef::Tuple(inner) => {
p let inner_tys = inner
} else { .iter()
return Ok(Ty::Unknown); .map(|tr| Ty::from_hir(db, module, tr))
}; .collect::<Cancelable<_>>()?;
if path.qualifier().is_none() { Ty::Tuple(inner_tys)
let name = path
.segment()
.and_then(|s| s.name_ref())
.map(|n| n.text())
.unwrap_or(SmolStr::new(""));
if let Some(int_ty) = primitive::IntTy::from_string(&name) {
Ty::Int(int_ty)
} else if let Some(uint_ty) = primitive::UintTy::from_string(&name) {
Ty::Uint(uint_ty)
} else if let Some(float_ty) = primitive::FloatTy::from_string(&name) {
Ty::Float(float_ty)
} else {
// TODO
Ty::Unknown
}
} else {
// TODO
Ty::Unknown
}
} }
PointerType(_inner) => Ty::Unknown, // TODO TypeRef::Path(path) => Ty::from_hir_path(db, module, path)?,
ArrayType(_inner) => Ty::Unknown, // TODO TypeRef::RawPtr(inner, mutability) => {
SliceType(_inner) => Ty::Unknown, // TODO let inner_ty = Ty::from_hir(db, module, inner)?;
ReferenceType(_inner) => Ty::Unknown, // TODO Ty::RawPtr(Arc::new(inner_ty), *mutability)
PlaceholderType(_inner) => Ty::Unknown, // TODO }
FnPointerType(_inner) => Ty::Unknown, // TODO TypeRef::Array(_inner) => Ty::Unknown, // TODO
ForType(_inner) => Ty::Unknown, // TODO TypeRef::Slice(inner) => {
ImplTraitType(_inner) => Ty::Unknown, // TODO let inner_ty = Ty::from_hir(db, module, inner)?;
DynTraitType(_inner) => Ty::Unknown, // TODO Ty::Slice(Arc::new(inner_ty))
}
TypeRef::Reference(inner, mutability) => {
let inner_ty = Ty::from_hir(db, module, inner)?;
Ty::Ref(Arc::new(inner_ty), *mutability)
}
TypeRef::Placeholder => Ty::Unknown, // TODO
TypeRef::Fn(params) => {
let mut inner_tys = params
.iter()
.map(|tr| Ty::from_hir(db, module, tr))
.collect::<Cancelable<Vec<_>>>()?;
let return_ty = inner_tys
.pop()
.expect("TypeRef::Fn should always have at least return type");
let sig = FnSig {
input: inner_tys,
output: return_ty,
};
Ty::FnPtr(Arc::new(sig))
}
TypeRef::Error => Ty::Unknown,
}) })
} }
pub(crate) fn from_hir_path(
db: &impl HirDatabase,
module: &Module,
path: &Path,
) -> Cancelable<Self> {
if path.is_ident() {
let name = &path.segments[0];
if let Some(int_ty) = primitive::IntTy::from_string(&name) {
return Ok(Ty::Int(int_ty));
} else if let Some(uint_ty) = primitive::UintTy::from_string(&name) {
return Ok(Ty::Uint(uint_ty));
} else if let Some(float_ty) = primitive::FloatTy::from_string(&name) {
return Ok(Ty::Float(float_ty));
}
}
// Resolve in module (in type namespace)
let resolved = if let Some(r) = module.resolve_path(db, path)?.take_types() {
r
} else {
return Ok(Ty::Unknown);
};
let ty = db.type_for_def(resolved)?;
Ok(ty)
}
// TODO: These should not be necessary long-term, since everything will work on HIR
pub(crate) fn from_ast_opt(
db: &impl HirDatabase,
module: &Module,
node: Option<ast::TypeRef>,
) -> Cancelable<Self> {
node.map(|n| Ty::from_ast(db, module, n))
.unwrap_or(Ok(Ty::Unknown))
}
pub(crate) fn from_ast(
db: &impl HirDatabase,
module: &Module,
node: ast::TypeRef,
) -> Cancelable<Self> {
Ty::from_hir(db, module, &TypeRef::from_ast(node))
}
pub fn unit() -> Self { pub fn unit() -> Self {
Ty::Tuple(Vec::new()) Ty::Tuple(Vec::new())
} }
@ -174,6 +233,8 @@ impl fmt::Display for Ty {
Ty::Float(t) => write!(f, "{}", t.ty_to_string()), Ty::Float(t) => write!(f, "{}", t.ty_to_string()),
Ty::Str => write!(f, "str"), Ty::Str => write!(f, "str"),
Ty::Slice(t) => write!(f, "[{}]", t), Ty::Slice(t) => write!(f, "[{}]", t),
Ty::RawPtr(t, m) => write!(f, "*{}{}", m.as_keyword_for_ptr(), t),
Ty::Ref(t, m) => write!(f, "&{}{}", m.as_keyword_for_ref(), t),
Ty::Never => write!(f, "!"), Ty::Never => write!(f, "!"),
Ty::Tuple(ts) => { Ty::Tuple(ts) => {
write!(f, "(")?; write!(f, "(")?;
@ -189,6 +250,7 @@ impl fmt::Display for Ty {
} }
write!(f, ") -> {}", sig.output) write!(f, ") -> {}", sig.output)
} }
Ty::Adt { name, .. } => write!(f, "{}", name),
Ty::Unknown => write!(f, "[unknown]"), Ty::Unknown => write!(f, "[unknown]"),
} }
} }
@ -196,34 +258,40 @@ impl fmt::Display for Ty {
pub fn type_for_fn(db: &impl HirDatabase, f: Function) -> Cancelable<Ty> { pub fn type_for_fn(db: &impl HirDatabase, f: Function) -> Cancelable<Ty> {
let syntax = f.syntax(db); let syntax = f.syntax(db);
let module = f.module(db)?;
let node = syntax.borrowed(); let node = syntax.borrowed();
// TODO we ignore type parameters for now // TODO we ignore type parameters for now
let input = node let input = node
.param_list() .param_list()
.map(|pl| { .map(|pl| {
pl.params() pl.params()
.map(|p| { .map(|p| Ty::from_ast_opt(db, &module, p.type_ref()))
p.type_ref()
.map(|t| Ty::new(db, t))
.unwrap_or(Ok(Ty::Unknown))
})
.collect() .collect()
}) })
.unwrap_or_else(|| Ok(Vec::new()))?; .unwrap_or_else(|| Ok(Vec::new()))?;
let output = node let output = Ty::from_ast_opt(db, &module, node.ret_type().and_then(|rt| rt.type_ref()))?;
.ret_type()
.and_then(|rt| rt.type_ref())
.map(|t| Ty::new(db, t))
.unwrap_or(Ok(Ty::Unknown))?;
let sig = FnSig { input, output }; let sig = FnSig { input, output };
Ok(Ty::FnPtr(Arc::new(sig))) Ok(Ty::FnPtr(Arc::new(sig)))
} }
// TODO this should probably be per namespace (i.e. types vs. values), since for pub fn type_for_struct(db: &impl HirDatabase, s: Struct) -> Cancelable<Ty> {
// a tuple struct `struct Foo(Bar)`, Foo has function type as a value, but Ok(Ty::Adt {
// defines the struct type Foo when used in the type namespace. rustc has a def_id: s.def_id(),
// separate DefId for the constructor, but with the current DefId approach, that name: s
// seems complicated. .name(db)?
.unwrap_or_else(|| SmolStr::new("[unnamed struct]")),
})
}
pub fn type_for_enum(db: &impl HirDatabase, s: Enum) -> Cancelable<Ty> {
Ok(Ty::Adt {
def_id: s.def_id(),
name: s
.name(db)?
.unwrap_or_else(|| SmolStr::new("[unnamed enum]")),
})
}
pub fn type_for_def(db: &impl HirDatabase, def_id: DefId) -> Cancelable<Ty> { pub fn type_for_def(db: &impl HirDatabase, def_id: DefId) -> Cancelable<Ty> {
let def = def_id.resolve(db)?; let def = def_id.resolve(db)?;
match def { match def {
@ -232,6 +300,8 @@ pub fn type_for_def(db: &impl HirDatabase, def_id: DefId) -> Cancelable<Ty> {
Ok(Ty::Unknown) Ok(Ty::Unknown)
} }
Def::Function(f) => type_for_fn(db, f), Def::Function(f) => type_for_fn(db, f),
Def::Struct(s) => type_for_struct(db, s),
Def::Enum(e) => type_for_enum(db, e),
Def::Item => { Def::Item => {
log::debug!("trying to get type for item of unknown type {:?}", def_id); log::debug!("trying to get type for item of unknown type {:?}", def_id);
Ok(Ty::Unknown) Ok(Ty::Unknown)
@ -239,6 +309,33 @@ pub fn type_for_def(db: &impl HirDatabase, def_id: DefId) -> Cancelable<Ty> {
} }
} }
pub(super) fn type_for_field(
db: &impl HirDatabase,
def_id: DefId,
field: SmolStr,
) -> Cancelable<Ty> {
let def = def_id.resolve(db)?;
let variant_data = match def {
Def::Struct(s) => {
let variant_data = s.variant_data(db)?;
variant_data
}
// TODO: unions
// TODO: enum variants
_ => panic!(
"trying to get type for field in non-struct/variant {:?}",
def_id
),
};
let module = def_id.module(db)?;
let type_ref = if let Some(tr) = variant_data.get_field_type_ref(&field) {
tr
} else {
return Ok(Ty::Unknown);
};
Ty::from_hir(db, &module, &type_ref)
}
#[derive(Clone, PartialEq, Eq, Debug)] #[derive(Clone, PartialEq, Eq, Debug)]
pub struct InferenceResult { pub struct InferenceResult {
type_of: FxHashMap<LocalSyntaxPtr, Ty>, type_of: FxHashMap<LocalSyntaxPtr, Ty>,
@ -305,32 +402,54 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
}; };
// resolve in module // resolve in module
let resolved = ctry!(self.module.resolve_path(self.db, path)?); let resolved = ctry!(self.module.resolve_path(self.db, &path)?.take_values());
let ty = self.db.type_for_def(resolved)?; let ty = self.db.type_for_def(resolved)?;
// TODO we will need to add type variables for type parameters etc. here // TODO we will need to add type variables for type parameters etc. here
Ok(Some(ty)) Ok(Some(ty))
} }
fn resolve_variant(
&self,
path: Option<ast::Path>,
) -> Cancelable<(Ty, Option<Arc<VariantData>>)> {
let path = if let Some(path) = path.and_then(Path::from_ast) {
path
} else {
return Ok((Ty::Unknown, None));
};
let def_id = if let Some(def_id) = self.module.resolve_path(self.db, &path)?.take_types() {
def_id
} else {
return Ok((Ty::Unknown, None));
};
Ok(match def_id.resolve(self.db)? {
Def::Struct(s) => {
let struct_data = self.db.struct_data(def_id)?;
let ty = type_for_struct(self.db, s)?;
(ty, Some(struct_data.variant_data().clone()))
}
_ => (Ty::Unknown, None),
})
}
fn infer_expr_opt(&mut self, expr: Option<ast::Expr>) -> Cancelable<Ty> {
if let Some(e) = expr {
self.infer_expr(e)
} else {
Ok(Ty::Unknown)
}
}
fn infer_expr(&mut self, expr: ast::Expr) -> Cancelable<Ty> { fn infer_expr(&mut self, expr: ast::Expr) -> Cancelable<Ty> {
let ty = match expr { let ty = match expr {
ast::Expr::IfExpr(e) => { ast::Expr::IfExpr(e) => {
if let Some(condition) = e.condition() { if let Some(condition) = e.condition() {
if let Some(e) = condition.expr() { // TODO if no pat, this should be bool
// TODO if no pat, this should be bool self.infer_expr_opt(condition.expr())?;
self.infer_expr(e)?;
}
// TODO write type for pat // TODO write type for pat
}; };
let if_ty = if let Some(block) = e.then_branch() { let if_ty = self.infer_block_opt(e.then_branch())?;
self.infer_block(block)? let else_ty = self.infer_block_opt(e.else_branch())?;
} else {
Ty::Unknown
};
let else_ty = if let Some(block) = e.else_branch() {
self.infer_block(block)?
} else {
Ty::Unknown
};
if let Some(ty) = self.unify(&if_ty, &else_ty) { if let Some(ty) = self.unify(&if_ty, &else_ty) {
ty ty
} else { } else {
@ -338,62 +457,37 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
Ty::Unknown Ty::Unknown
} }
} }
ast::Expr::BlockExpr(e) => { ast::Expr::BlockExpr(e) => self.infer_block_opt(e.block())?,
if let Some(block) = e.block() {
self.infer_block(block)?
} else {
Ty::Unknown
}
}
ast::Expr::LoopExpr(e) => { ast::Expr::LoopExpr(e) => {
if let Some(block) = e.loop_body() { self.infer_block_opt(e.loop_body())?;
self.infer_block(block)?;
};
// TODO never, or the type of the break param // TODO never, or the type of the break param
Ty::Unknown Ty::Unknown
} }
ast::Expr::WhileExpr(e) => { ast::Expr::WhileExpr(e) => {
if let Some(condition) = e.condition() { if let Some(condition) = e.condition() {
if let Some(e) = condition.expr() { // TODO if no pat, this should be bool
// TODO if no pat, this should be bool self.infer_expr_opt(condition.expr())?;
self.infer_expr(e)?;
}
// TODO write type for pat // TODO write type for pat
}; };
if let Some(block) = e.loop_body() { self.infer_block_opt(e.loop_body())?;
// TODO
self.infer_block(block)?;
};
// TODO always unit? // TODO always unit?
Ty::Unknown Ty::Unknown
} }
ast::Expr::ForExpr(e) => { ast::Expr::ForExpr(e) => {
if let Some(expr) = e.iterable() { let _iterable_ty = self.infer_expr_opt(e.iterable());
self.infer_expr(expr)?;
}
if let Some(_pat) = e.pat() { if let Some(_pat) = e.pat() {
// TODO write type for pat // TODO write type for pat
} }
if let Some(block) = e.loop_body() { self.infer_block_opt(e.loop_body())?;
self.infer_block(block)?;
}
// TODO always unit? // TODO always unit?
Ty::Unknown Ty::Unknown
} }
ast::Expr::LambdaExpr(e) => { ast::Expr::LambdaExpr(e) => {
let _body_ty = if let Some(body) = e.body() { let _body_ty = self.infer_expr_opt(e.body())?;
self.infer_expr(body)?
} else {
Ty::Unknown
};
Ty::Unknown Ty::Unknown
} }
ast::Expr::CallExpr(e) => { ast::Expr::CallExpr(e) => {
let callee_ty = if let Some(e) = e.expr() { let callee_ty = self.infer_expr_opt(e.expr())?;
self.infer_expr(e)?
} else {
Ty::Unknown
};
if let Some(arg_list) = e.arg_list() { if let Some(arg_list) = e.arg_list() {
for arg in arg_list.args() { for arg in arg_list.args() {
// TODO unify / expect argument type // TODO unify / expect argument type
@ -410,11 +504,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
} }
} }
ast::Expr::MethodCallExpr(e) => { ast::Expr::MethodCallExpr(e) => {
let _receiver_ty = if let Some(e) = e.expr() { let _receiver_ty = self.infer_expr_opt(e.expr())?;
self.infer_expr(e)?
} else {
Ty::Unknown
};
if let Some(arg_list) = e.arg_list() { if let Some(arg_list) = e.arg_list() {
for arg in arg_list.args() { for arg in arg_list.args() {
// TODO unify / expect argument type // TODO unify / expect argument type
@ -424,20 +514,12 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
Ty::Unknown Ty::Unknown
} }
ast::Expr::MatchExpr(e) => { ast::Expr::MatchExpr(e) => {
let _ty = if let Some(match_expr) = e.expr() { let _ty = self.infer_expr_opt(e.expr())?;
self.infer_expr(match_expr)?
} else {
Ty::Unknown
};
if let Some(match_arm_list) = e.match_arm_list() { if let Some(match_arm_list) = e.match_arm_list() {
for arm in match_arm_list.arms() { for arm in match_arm_list.arms() {
// TODO type the bindings in pat // TODO type the bindings in pat
// TODO type the guard // TODO type the guard
let _ty = if let Some(e) = arm.expr() { let _ty = self.infer_expr_opt(arm.expr())?;
self.infer_expr(e)?
} else {
Ty::Unknown
};
} }
// TODO unify all the match arm types // TODO unify all the match arm types
Ty::Unknown Ty::Unknown
@ -450,68 +532,78 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
ast::Expr::PathExpr(e) => self.infer_path_expr(e)?.unwrap_or(Ty::Unknown), ast::Expr::PathExpr(e) => self.infer_path_expr(e)?.unwrap_or(Ty::Unknown),
ast::Expr::ContinueExpr(_e) => Ty::Never, ast::Expr::ContinueExpr(_e) => Ty::Never,
ast::Expr::BreakExpr(_e) => Ty::Never, ast::Expr::BreakExpr(_e) => Ty::Never,
ast::Expr::ParenExpr(e) => { ast::Expr::ParenExpr(e) => self.infer_expr_opt(e.expr())?,
if let Some(e) = e.expr() {
self.infer_expr(e)?
} else {
Ty::Unknown
}
}
ast::Expr::Label(_e) => Ty::Unknown, ast::Expr::Label(_e) => Ty::Unknown,
ast::Expr::ReturnExpr(e) => { ast::Expr::ReturnExpr(e) => {
if let Some(e) = e.expr() { self.infer_expr_opt(e.expr())?;
// TODO unify with return type
self.infer_expr(e)?;
};
Ty::Never Ty::Never
} }
ast::Expr::MatchArmList(_) | ast::Expr::MatchArm(_) | ast::Expr::MatchGuard(_) => { ast::Expr::MatchArmList(_) | ast::Expr::MatchArm(_) | ast::Expr::MatchGuard(_) => {
// Can this even occur outside of a match expression? // Can this even occur outside of a match expression?
Ty::Unknown Ty::Unknown
} }
ast::Expr::StructLit(_e) => Ty::Unknown, ast::Expr::StructLit(e) => {
let (ty, _variant_data) = self.resolve_variant(e.path())?;
if let Some(nfl) = e.named_field_list() {
for field in nfl.fields() {
// TODO unify with / expect field type
self.infer_expr_opt(field.expr())?;
}
}
ty
}
ast::Expr::NamedFieldList(_) | ast::Expr::NamedField(_) => { ast::Expr::NamedFieldList(_) | ast::Expr::NamedField(_) => {
// Can this even occur outside of a struct literal? // Can this even occur outside of a struct literal?
Ty::Unknown Ty::Unknown
} }
ast::Expr::IndexExpr(_e) => Ty::Unknown, ast::Expr::IndexExpr(_e) => Ty::Unknown,
ast::Expr::FieldExpr(_e) => Ty::Unknown, ast::Expr::FieldExpr(e) => {
ast::Expr::TryExpr(e) => { let receiver_ty = self.infer_expr_opt(e.expr())?;
let _inner_ty = if let Some(e) = e.expr() { if let Some(nr) = e.name_ref() {
self.infer_expr(e)? let text = nr.text();
match receiver_ty {
Ty::Tuple(fields) => {
let i = text.parse::<usize>().ok();
i.and_then(|i| fields.get(i).cloned())
.unwrap_or(Ty::Unknown)
}
Ty::Adt { def_id, .. } => self.db.type_for_field(def_id, text)?,
_ => Ty::Unknown,
}
} else { } else {
Ty::Unknown Ty::Unknown
}; }
}
ast::Expr::TryExpr(e) => {
let _inner_ty = self.infer_expr_opt(e.expr())?;
Ty::Unknown Ty::Unknown
} }
ast::Expr::CastExpr(e) => { ast::Expr::CastExpr(e) => {
let _inner_ty = if let Some(e) = e.expr() { let _inner_ty = self.infer_expr_opt(e.expr())?;
self.infer_expr(e)? let cast_ty = Ty::from_ast_opt(self.db, &self.module, e.type_ref())?;
} else {
Ty::Unknown
};
let cast_ty = e
.type_ref()
.map(|t| Ty::new(self.db, t))
.unwrap_or(Ok(Ty::Unknown))?;
// TODO do the coercion... // TODO do the coercion...
cast_ty cast_ty
} }
ast::Expr::RefExpr(e) => { ast::Expr::RefExpr(e) => {
let _inner_ty = if let Some(e) = e.expr() { let inner_ty = self.infer_expr_opt(e.expr())?;
self.infer_expr(e)? let m = Mutability::from_mutable(e.is_mut());
} else { // TODO reference coercions etc.
Ty::Unknown Ty::Ref(Arc::new(inner_ty), m)
};
Ty::Unknown
} }
ast::Expr::PrefixExpr(e) => { ast::Expr::PrefixExpr(e) => {
let _inner_ty = if let Some(e) = e.expr() { let inner_ty = self.infer_expr_opt(e.expr())?;
self.infer_expr(e)? match e.op() {
} else { Some(PrefixOp::Deref) => {
Ty::Unknown match inner_ty {
}; // builtin deref:
Ty::Unknown Ty::Ref(ref_inner, _) => (*ref_inner).clone(),
Ty::RawPtr(ptr_inner, _) => (*ptr_inner).clone(),
// TODO Deref::deref
_ => Ty::Unknown,
}
}
_ => Ty::Unknown,
}
} }
ast::Expr::RangeExpr(_e) => Ty::Unknown, ast::Expr::RangeExpr(_e) => Ty::Unknown,
ast::Expr::BinExpr(_e) => Ty::Unknown, ast::Expr::BinExpr(_e) => Ty::Unknown,
@ -521,15 +613,19 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
Ok(ty) Ok(ty)
} }
fn infer_block_opt(&mut self, node: Option<ast::Block>) -> Cancelable<Ty> {
if let Some(b) = node {
self.infer_block(b)
} else {
Ok(Ty::Unknown)
}
}
fn infer_block(&mut self, node: ast::Block) -> Cancelable<Ty> { fn infer_block(&mut self, node: ast::Block) -> Cancelable<Ty> {
for stmt in node.statements() { for stmt in node.statements() {
match stmt { match stmt {
ast::Stmt::LetStmt(stmt) => { ast::Stmt::LetStmt(stmt) => {
let decl_ty = if let Some(type_ref) = stmt.type_ref() { let decl_ty = Ty::from_ast_opt(self.db, &self.module, stmt.type_ref())?;
Ty::new(self.db, type_ref)?
} else {
Ty::Unknown
};
let ty = if let Some(expr) = stmt.initializer() { let ty = if let Some(expr) = stmt.initializer() {
// TODO pass expectation // TODO pass expectation
let expr_ty = self.infer_expr(expr)?; let expr_ty = self.infer_expr(expr)?;
@ -544,9 +640,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
}; };
} }
ast::Stmt::ExprStmt(expr_stmt) => { ast::Stmt::ExprStmt(expr_stmt) => {
if let Some(expr) = expr_stmt.expr() { self.infer_expr_opt(expr_stmt.expr())?;
self.infer_expr(expr)?;
}
} }
} }
} }
@ -576,7 +670,7 @@ pub fn infer(db: &impl HirDatabase, function: Function) -> Cancelable<InferenceR
continue; continue;
}; };
if let Some(type_ref) = param.type_ref() { if let Some(type_ref) = param.type_ref() {
let ty = Ty::new(db, type_ref)?; let ty = Ty::from_ast(db, &ctx.module, type_ref)?;
ctx.type_of.insert(LocalSyntaxPtr::new(pat.syntax()), ty); ctx.type_of.insert(LocalSyntaxPtr::new(pat.syntax()), ty);
} else { } else {
// TODO self param // TODO self param

View file

@ -68,6 +68,51 @@ fn test() {
); );
} }
#[test]
fn infer_struct() {
check_inference(
r#"
struct A {
b: B,
c: C,
}
struct B;
struct C(usize);
fn test() {
let c = C(1);
B;
let a: A = A { b: B, c: C(1) };
a.b;
a.c;
}
"#,
"0004_struct.txt",
);
}
#[test]
fn infer_refs_and_ptrs() {
check_inference(
r#"
fn test(a: &u32, b: &mut u32, c: *const u32, d: *mut u32) {
a;
*a;
&a;
&mut a;
b;
*b;
&b;
c;
*c;
d;
*d;
}
"#,
"0005_refs.txt",
);
}
fn infer(content: &str) -> String { fn infer(content: &str) -> String {
let (db, _, file_id) = MockDatabase::with_single_file(content); let (db, _, file_id) = MockDatabase::with_single_file(content);
let source_file = db.source_file(file_id); let source_file = db.source_file(file_id);

View file

@ -1,4 +1,4 @@
[33; 34) 'd': [unknown] [33; 34) 'd': &[unknown]
[88; 94) '1isize': [unknown] [88; 94) '1isize': [unknown]
[48; 49) 'a': u32 [48; 49) 'a': u32
[55; 56) 'b': isize [55; 56) 'b': isize
@ -10,4 +10,4 @@
[17; 18) 'b': isize [17; 18) 'b': isize
[100; 106) '"test"': [unknown] [100; 106) '"test"': [unknown]
[42; 121) '{ ...f32; }': () [42; 121) '{ ...f32; }': ()
[69; 70) 'd': [unknown] [69; 70) 'd': &[unknown]

View file

@ -0,0 +1,16 @@
[86; 90) 'C(1)': [unknown]
[121; 122) 'B': [unknown]
[86; 87) 'C': [unknown]
[129; 130) '1': [unknown]
[107; 108) 'a': A
[127; 128) 'C': [unknown]
[139; 142) 'a.b': B
[114; 133) 'A { b:...C(1) }': A
[148; 151) 'a.c': C
[148; 149) 'a': A
[139; 140) 'a': A
[72; 154) '{ ...a.c; }': ()
[96; 97) 'B': [unknown]
[88; 89) '1': [unknown]
[82; 83) 'c': [unknown]
[127; 131) 'C(1)': [unknown]

View file

@ -0,0 +1,23 @@
[115; 117) '&b': &&mut u32
[88; 94) '&mut a': &mut &u32
[146; 147) 'd': *mut u32
[145; 147) '*d': u32
[65; 66) 'a': &u32
[46; 47) 'd': *mut u32
[59; 150) '{ ... *d; }': ()
[116; 117) 'b': &mut u32
[131; 132) 'c': *const u32
[130; 132) '*c': u32
[72; 74) '*a': u32
[107; 109) '*b': u32
[108; 109) 'b': &mut u32
[9; 10) 'a': &u32
[18; 19) 'b': &mut u32
[93; 94) 'a': &u32
[100; 101) 'b': &mut u32
[81; 82) 'a': &u32
[80; 82) '&a': &&u32
[73; 74) 'a': &u32
[123; 124) 'c': *const u32
[31; 32) 'c': *const u32
[138; 139) 'd': *mut u32

View file

@ -0,0 +1,110 @@
//! HIR for references to types. Paths in these are not yet resolved. They can
//! be directly created from an ast::TypeRef, without further queries.
use ra_syntax::ast;
use crate::Path;
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
pub enum Mutability {
Shared,
Mut,
}
impl Mutability {
pub fn from_mutable(mutable: bool) -> Mutability {
if mutable {
Mutability::Mut
} else {
Mutability::Shared
}
}
pub fn as_keyword_for_ref(self) -> &'static str {
match self {
Mutability::Shared => "",
Mutability::Mut => "mut ",
}
}
pub fn as_keyword_for_ptr(self) -> &'static str {
match self {
Mutability::Shared => "const ",
Mutability::Mut => "mut ",
}
}
}
/// Compare ty::Ty
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
pub enum TypeRef {
Never,
Placeholder,
Tuple(Vec<TypeRef>),
Path(Path),
RawPtr(Box<TypeRef>, Mutability),
Reference(Box<TypeRef>, Mutability),
Array(Box<TypeRef> /*, Expr*/),
Slice(Box<TypeRef>),
/// A fn pointer. Last element of the vector is the return type.
Fn(Vec<TypeRef>),
// For
// ImplTrait,
// DynTrait,
Error,
}
impl TypeRef {
/// Converts an `ast::TypeRef` to a `hir::TypeRef`.
pub(crate) fn from_ast(node: ast::TypeRef) -> Self {
use ra_syntax::ast::TypeRef::*;
match node {
ParenType(inner) => TypeRef::from_ast_opt(inner.type_ref()),
TupleType(inner) => TypeRef::Tuple(inner.fields().map(TypeRef::from_ast).collect()),
NeverType(..) => TypeRef::Never,
PathType(inner) => inner
.path()
.and_then(Path::from_ast)
.map(TypeRef::Path)
.unwrap_or(TypeRef::Error),
PointerType(inner) => {
let inner_ty = TypeRef::from_ast_opt(inner.type_ref());
let mutability = Mutability::from_mutable(inner.is_mut());
TypeRef::RawPtr(Box::new(inner_ty), mutability)
}
ArrayType(inner) => TypeRef::Array(Box::new(TypeRef::from_ast_opt(inner.type_ref()))),
SliceType(inner) => TypeRef::Slice(Box::new(TypeRef::from_ast_opt(inner.type_ref()))),
ReferenceType(inner) => {
let inner_ty = TypeRef::from_ast_opt(inner.type_ref());
let mutability = Mutability::from_mutable(inner.is_mut());
TypeRef::Reference(Box::new(inner_ty), mutability)
}
PlaceholderType(_inner) => TypeRef::Placeholder,
FnPointerType(inner) => {
let ret_ty = TypeRef::from_ast_opt(inner.ret_type().and_then(|rt| rt.type_ref()));
let mut params = if let Some(pl) = inner.param_list() {
pl.params()
.map(|p| p.type_ref())
.map(TypeRef::from_ast_opt)
.collect()
} else {
Vec::new()
};
params.push(ret_ty);
TypeRef::Fn(params)
}
// for types are close enough for our purposes to the inner type for now...
ForType(inner) => TypeRef::from_ast_opt(inner.type_ref()),
ImplTraitType(_inner) => TypeRef::Error,
DynTraitType(_inner) => TypeRef::Error,
}
}
pub(crate) fn from_ast_opt(node: Option<ast::TypeRef>) -> Self {
if let Some(node) = node {
TypeRef::from_ast(node)
} else {
TypeRef::Error
}
}
}

View file

@ -19,7 +19,7 @@ pub fn server_capabilities() -> ServerCapabilities {
hover_provider: Some(true), hover_provider: Some(true),
completion_provider: Some(CompletionOptions { completion_provider: Some(CompletionOptions {
resolve_provider: None, resolve_provider: None,
trigger_characters: Some(vec![":".to_string()]), trigger_characters: Some(vec![":".to_string(), ".".to_string()]),
}), }),
signature_help_provider: Some(SignatureHelpOptions { signature_help_provider: Some(SignatureHelpOptions {
trigger_characters: Some(vec!["(".to_string(), ",".to_string(), ")".to_string()]), trigger_characters: Some(vec!["(".to_string(), ",".to_string(), ")".to_string()]),

View file

@ -55,7 +55,10 @@ impl Conv for CompletionItemKind {
CompletionItemKind::Snippet => Snippet, CompletionItemKind::Snippet => Snippet,
CompletionItemKind::Module => Module, CompletionItemKind::Module => Module,
CompletionItemKind::Function => Function, CompletionItemKind::Function => Function,
CompletionItemKind::Struct => Struct,
CompletionItemKind::Enum => Enum,
CompletionItemKind::Binding => Variable, CompletionItemKind::Binding => Variable,
CompletionItemKind::Field => Field,
} }
} }
} }

View file

@ -363,3 +363,73 @@ impl<'a, N: AstNode<'a>> Iterator for AstChildren<'a, N> {
} }
} }
} }
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum StructFlavor<'a> {
Tuple(PosFieldList<'a>),
Named(NamedFieldDefList<'a>),
Unit,
}
impl<'a> StructFlavor<'a> {
fn from_node<N: AstNode<'a>>(node: N) -> StructFlavor<'a> {
if let Some(nfdl) = child_opt::<_, NamedFieldDefList>(node) {
StructFlavor::Named(nfdl)
} else if let Some(pfl) = child_opt::<_, PosFieldList>(node) {
StructFlavor::Tuple(pfl)
} else {
StructFlavor::Unit
}
}
}
impl<'a> StructDef<'a> {
pub fn flavor(self) -> StructFlavor<'a> {
StructFlavor::from_node(self)
}
}
impl<'a> EnumVariant<'a> {
pub fn flavor(self) -> StructFlavor<'a> {
StructFlavor::from_node(self)
}
}
impl<'a> PointerType<'a> {
pub fn is_mut(&self) -> bool {
self.syntax().children().any(|n| n.kind() == MUT_KW)
}
}
impl<'a> ReferenceType<'a> {
pub fn is_mut(&self) -> bool {
self.syntax().children().any(|n| n.kind() == MUT_KW)
}
}
impl<'a> RefExpr<'a> {
pub fn is_mut(&self) -> bool {
self.syntax().children().any(|n| n.kind() == MUT_KW)
}
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub enum PrefixOp {
/// The `*` operator for dereferencing
Deref,
/// The `!` operator for logical inversion
Not,
/// The `-` operator for negation
Neg,
}
impl<'a> PrefixExpr<'a> {
pub fn op(&self) -> Option<PrefixOp> {
match self.syntax().first_child()?.kind() {
STAR => Some(PrefixOp::Deref),
EXCL => Some(PrefixOp::Not),
MINUS => Some(PrefixOp::Neg),
_ => None,
}
}
}

View file

@ -131,7 +131,15 @@ impl<R: TreeRoot<RaTypes>> ArrayTypeNode<R> {
} }
impl<'a> ArrayType<'a> {} impl<'a> ArrayType<'a> {
pub fn type_ref(self) -> Option<TypeRef<'a>> {
super::child_opt(self)
}
pub fn expr(self) -> Option<Expr<'a>> {
super::child_opt(self)
}
}
// Attr // Attr
#[derive(Debug, Clone, Copy,)] #[derive(Debug, Clone, Copy,)]
@ -806,7 +814,94 @@ impl<'a> ast::NameOwner<'a> for EnumDef<'a> {}
impl<'a> ast::TypeParamsOwner<'a> for EnumDef<'a> {} impl<'a> ast::TypeParamsOwner<'a> for EnumDef<'a> {}
impl<'a> ast::AttrsOwner<'a> for EnumDef<'a> {} impl<'a> ast::AttrsOwner<'a> for EnumDef<'a> {}
impl<'a> ast::DocCommentsOwner<'a> for EnumDef<'a> {} impl<'a> ast::DocCommentsOwner<'a> for EnumDef<'a> {}
impl<'a> EnumDef<'a> {} impl<'a> EnumDef<'a> {
pub fn variant_list(self) -> Option<EnumVariantList<'a>> {
super::child_opt(self)
}
}
// EnumVariant
#[derive(Debug, Clone, Copy,)]
pub struct EnumVariantNode<R: TreeRoot<RaTypes> = OwnedRoot> {
pub(crate) syntax: SyntaxNode<R>,
}
pub type EnumVariant<'a> = EnumVariantNode<RefRoot<'a>>;
impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<EnumVariantNode<R1>> for EnumVariantNode<R2> {
fn eq(&self, other: &EnumVariantNode<R1>) -> bool { self.syntax == other.syntax }
}
impl<R: TreeRoot<RaTypes>> Eq for EnumVariantNode<R> {}
impl<R: TreeRoot<RaTypes>> Hash for EnumVariantNode<R> {
fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
}
impl<'a> AstNode<'a> for EnumVariant<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
ENUM_VARIANT => Some(EnumVariant { syntax }),
_ => None,
}
}
fn syntax(self) -> SyntaxNodeRef<'a> { self.syntax }
}
impl<R: TreeRoot<RaTypes>> EnumVariantNode<R> {
pub fn borrowed(&self) -> EnumVariant {
EnumVariantNode { syntax: self.syntax.borrowed() }
}
pub fn owned(&self) -> EnumVariantNode {
EnumVariantNode { syntax: self.syntax.owned() }
}
}
impl<'a> ast::NameOwner<'a> for EnumVariant<'a> {}
impl<'a> EnumVariant<'a> {
pub fn expr(self) -> Option<Expr<'a>> {
super::child_opt(self)
}
}
// EnumVariantList
#[derive(Debug, Clone, Copy,)]
pub struct EnumVariantListNode<R: TreeRoot<RaTypes> = OwnedRoot> {
pub(crate) syntax: SyntaxNode<R>,
}
pub type EnumVariantList<'a> = EnumVariantListNode<RefRoot<'a>>;
impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<EnumVariantListNode<R1>> for EnumVariantListNode<R2> {
fn eq(&self, other: &EnumVariantListNode<R1>) -> bool { self.syntax == other.syntax }
}
impl<R: TreeRoot<RaTypes>> Eq for EnumVariantListNode<R> {}
impl<R: TreeRoot<RaTypes>> Hash for EnumVariantListNode<R> {
fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
}
impl<'a> AstNode<'a> for EnumVariantList<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
ENUM_VARIANT_LIST => Some(EnumVariantList { syntax }),
_ => None,
}
}
fn syntax(self) -> SyntaxNodeRef<'a> { self.syntax }
}
impl<R: TreeRoot<RaTypes>> EnumVariantListNode<R> {
pub fn borrowed(&self) -> EnumVariantList {
EnumVariantListNode { syntax: self.syntax.borrowed() }
}
pub fn owned(&self) -> EnumVariantListNode {
EnumVariantListNode { syntax: self.syntax.owned() }
}
}
impl<'a> EnumVariantList<'a> {
pub fn variants(self) -> impl Iterator<Item = EnumVariant<'a>> + 'a {
super::children(self)
}
}
// Expr // Expr
#[derive(Debug, Clone, Copy, PartialEq, Eq)] #[derive(Debug, Clone, Copy, PartialEq, Eq)]
@ -1036,7 +1131,15 @@ impl<R: TreeRoot<RaTypes>> FieldExprNode<R> {
} }
impl<'a> FieldExpr<'a> {} impl<'a> FieldExpr<'a> {
pub fn expr(self) -> Option<Expr<'a>> {
super::child_opt(self)
}
pub fn name_ref(self) -> Option<NameRef<'a>> {
super::child_opt(self)
}
}
// FieldPatList // FieldPatList
#[derive(Debug, Clone, Copy,)] #[derive(Debug, Clone, Copy,)]
@ -1163,7 +1266,15 @@ impl<R: TreeRoot<RaTypes>> FnPointerTypeNode<R> {
} }
impl<'a> FnPointerType<'a> {} impl<'a> FnPointerType<'a> {
pub fn param_list(self) -> Option<ParamList<'a>> {
super::child_opt(self)
}
pub fn ret_type(self) -> Option<RetType<'a>> {
super::child_opt(self)
}
}
// ForExpr // ForExpr
#[derive(Debug, Clone, Copy,)] #[derive(Debug, Clone, Copy,)]
@ -1246,7 +1357,11 @@ impl<R: TreeRoot<RaTypes>> ForTypeNode<R> {
} }
impl<'a> ForType<'a> {} impl<'a> ForType<'a> {
pub fn type_ref(self) -> Option<TypeRef<'a>> {
super::child_opt(self)
}
}
// IfExpr // IfExpr
#[derive(Debug, Clone, Copy,)] #[derive(Debug, Clone, Copy,)]
@ -1935,6 +2050,10 @@ impl<'a> MethodCallExpr<'a> {
pub fn expr(self) -> Option<Expr<'a>> { pub fn expr(self) -> Option<Expr<'a>> {
super::child_opt(self) super::child_opt(self)
} }
pub fn name_ref(self) -> Option<NameRef<'a>> {
super::child_opt(self)
}
} }
// Module // Module
@ -2142,7 +2261,15 @@ impl<R: TreeRoot<RaTypes>> NamedFieldNode<R> {
} }
impl<'a> NamedField<'a> {} impl<'a> NamedField<'a> {
pub fn name_ref(self) -> Option<NameRef<'a>> {
super::child_opt(self)
}
pub fn expr(self) -> Option<Expr<'a>> {
super::child_opt(self)
}
}
// NamedFieldDef // NamedFieldDef
#[derive(Debug, Clone, Copy,)] #[derive(Debug, Clone, Copy,)]
@ -2181,7 +2308,52 @@ impl<R: TreeRoot<RaTypes>> NamedFieldDefNode<R> {
impl<'a> ast::NameOwner<'a> for NamedFieldDef<'a> {} impl<'a> ast::NameOwner<'a> for NamedFieldDef<'a> {}
impl<'a> ast::AttrsOwner<'a> for NamedFieldDef<'a> {} impl<'a> ast::AttrsOwner<'a> for NamedFieldDef<'a> {}
impl<'a> NamedFieldDef<'a> {} impl<'a> NamedFieldDef<'a> {
pub fn type_ref(self) -> Option<TypeRef<'a>> {
super::child_opt(self)
}
}
// NamedFieldDefList
#[derive(Debug, Clone, Copy,)]
pub struct NamedFieldDefListNode<R: TreeRoot<RaTypes> = OwnedRoot> {
pub(crate) syntax: SyntaxNode<R>,
}
pub type NamedFieldDefList<'a> = NamedFieldDefListNode<RefRoot<'a>>;
impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<NamedFieldDefListNode<R1>> for NamedFieldDefListNode<R2> {
fn eq(&self, other: &NamedFieldDefListNode<R1>) -> bool { self.syntax == other.syntax }
}
impl<R: TreeRoot<RaTypes>> Eq for NamedFieldDefListNode<R> {}
impl<R: TreeRoot<RaTypes>> Hash for NamedFieldDefListNode<R> {
fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
}
impl<'a> AstNode<'a> for NamedFieldDefList<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
NAMED_FIELD_DEF_LIST => Some(NamedFieldDefList { syntax }),
_ => None,
}
}
fn syntax(self) -> SyntaxNodeRef<'a> { self.syntax }
}
impl<R: TreeRoot<RaTypes>> NamedFieldDefListNode<R> {
pub fn borrowed(&self) -> NamedFieldDefList {
NamedFieldDefListNode { syntax: self.syntax.borrowed() }
}
pub fn owned(&self) -> NamedFieldDefListNode {
NamedFieldDefListNode { syntax: self.syntax.owned() }
}
}
impl<'a> NamedFieldDefList<'a> {
pub fn fields(self) -> impl Iterator<Item = NamedFieldDef<'a>> + 'a {
super::children(self)
}
}
// NamedFieldList // NamedFieldList
#[derive(Debug, Clone, Copy,)] #[derive(Debug, Clone, Copy,)]
@ -2218,7 +2390,11 @@ impl<R: TreeRoot<RaTypes>> NamedFieldListNode<R> {
} }
impl<'a> NamedFieldList<'a> {} impl<'a> NamedFieldList<'a> {
pub fn fields(self) -> impl Iterator<Item = NamedField<'a>> + 'a {
super::children(self)
}
}
// NeverType // NeverType
#[derive(Debug, Clone, Copy,)] #[derive(Debug, Clone, Copy,)]
@ -2451,7 +2627,11 @@ impl<R: TreeRoot<RaTypes>> ParenTypeNode<R> {
} }
impl<'a> ParenType<'a> {} impl<'a> ParenType<'a> {
pub fn type_ref(self) -> Option<TypeRef<'a>> {
super::child_opt(self)
}
}
// Pat // Pat
#[derive(Debug, Clone, Copy, PartialEq, Eq)] #[derive(Debug, Clone, Copy, PartialEq, Eq)]
@ -2816,7 +2996,94 @@ impl<R: TreeRoot<RaTypes>> PointerTypeNode<R> {
} }
impl<'a> PointerType<'a> {} impl<'a> PointerType<'a> {
pub fn type_ref(self) -> Option<TypeRef<'a>> {
super::child_opt(self)
}
}
// PosField
#[derive(Debug, Clone, Copy,)]
pub struct PosFieldNode<R: TreeRoot<RaTypes> = OwnedRoot> {
pub(crate) syntax: SyntaxNode<R>,
}
pub type PosField<'a> = PosFieldNode<RefRoot<'a>>;
impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<PosFieldNode<R1>> for PosFieldNode<R2> {
fn eq(&self, other: &PosFieldNode<R1>) -> bool { self.syntax == other.syntax }
}
impl<R: TreeRoot<RaTypes>> Eq for PosFieldNode<R> {}
impl<R: TreeRoot<RaTypes>> Hash for PosFieldNode<R> {
fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
}
impl<'a> AstNode<'a> for PosField<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
POS_FIELD => Some(PosField { syntax }),
_ => None,
}
}
fn syntax(self) -> SyntaxNodeRef<'a> { self.syntax }
}
impl<R: TreeRoot<RaTypes>> PosFieldNode<R> {
pub fn borrowed(&self) -> PosField {
PosFieldNode { syntax: self.syntax.borrowed() }
}
pub fn owned(&self) -> PosFieldNode {
PosFieldNode { syntax: self.syntax.owned() }
}
}
impl<'a> ast::AttrsOwner<'a> for PosField<'a> {}
impl<'a> PosField<'a> {
pub fn type_ref(self) -> Option<TypeRef<'a>> {
super::child_opt(self)
}
}
// PosFieldList
#[derive(Debug, Clone, Copy,)]
pub struct PosFieldListNode<R: TreeRoot<RaTypes> = OwnedRoot> {
pub(crate) syntax: SyntaxNode<R>,
}
pub type PosFieldList<'a> = PosFieldListNode<RefRoot<'a>>;
impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<PosFieldListNode<R1>> for PosFieldListNode<R2> {
fn eq(&self, other: &PosFieldListNode<R1>) -> bool { self.syntax == other.syntax }
}
impl<R: TreeRoot<RaTypes>> Eq for PosFieldListNode<R> {}
impl<R: TreeRoot<RaTypes>> Hash for PosFieldListNode<R> {
fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
}
impl<'a> AstNode<'a> for PosFieldList<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
POS_FIELD_LIST => Some(PosFieldList { syntax }),
_ => None,
}
}
fn syntax(self) -> SyntaxNodeRef<'a> { self.syntax }
}
impl<R: TreeRoot<RaTypes>> PosFieldListNode<R> {
pub fn borrowed(&self) -> PosFieldList {
PosFieldListNode { syntax: self.syntax.borrowed() }
}
pub fn owned(&self) -> PosFieldListNode {
PosFieldListNode { syntax: self.syntax.owned() }
}
}
impl<'a> PosFieldList<'a> {
pub fn fields(self) -> impl Iterator<Item = PosField<'a>> + 'a {
super::children(self)
}
}
// PrefixExpr // PrefixExpr
#[derive(Debug, Clone, Copy,)] #[derive(Debug, Clone, Copy,)]
@ -3046,7 +3313,11 @@ impl<R: TreeRoot<RaTypes>> ReferenceTypeNode<R> {
} }
impl<'a> ReferenceType<'a> {} impl<'a> ReferenceType<'a> {
pub fn type_ref(self) -> Option<TypeRef<'a>> {
super::child_opt(self)
}
}
// RetType // RetType
#[derive(Debug, Clone, Copy,)] #[derive(Debug, Clone, Copy,)]
@ -3239,7 +3510,11 @@ impl<R: TreeRoot<RaTypes>> SliceTypeNode<R> {
} }
impl<'a> SliceType<'a> {} impl<'a> SliceType<'a> {
pub fn type_ref(self) -> Option<TypeRef<'a>> {
super::child_opt(self)
}
}
// SourceFile // SourceFile
#[derive(Debug, Clone, Copy,)] #[derive(Debug, Clone, Copy,)]
@ -3426,11 +3701,7 @@ impl<'a> ast::NameOwner<'a> for StructDef<'a> {}
impl<'a> ast::TypeParamsOwner<'a> for StructDef<'a> {} impl<'a> ast::TypeParamsOwner<'a> for StructDef<'a> {}
impl<'a> ast::AttrsOwner<'a> for StructDef<'a> {} impl<'a> ast::AttrsOwner<'a> for StructDef<'a> {}
impl<'a> ast::DocCommentsOwner<'a> for StructDef<'a> {} impl<'a> ast::DocCommentsOwner<'a> for StructDef<'a> {}
impl<'a> StructDef<'a> { impl<'a> StructDef<'a> {}
pub fn fields(self) -> impl Iterator<Item = NamedFieldDef<'a>> + 'a {
super::children(self)
}
}
// StructLit // StructLit
#[derive(Debug, Clone, Copy,)] #[derive(Debug, Clone, Copy,)]
@ -3467,7 +3738,15 @@ impl<R: TreeRoot<RaTypes>> StructLitNode<R> {
} }
impl<'a> StructLit<'a> {} impl<'a> StructLit<'a> {
pub fn path(self) -> Option<Path<'a>> {
super::child_opt(self)
}
pub fn named_field_list(self) -> Option<NamedFieldList<'a>> {
super::child_opt(self)
}
}
// StructPat // StructPat
#[derive(Debug, Clone, Copy,)] #[derive(Debug, Clone, Copy,)]
@ -3770,7 +4049,11 @@ impl<R: TreeRoot<RaTypes>> TupleTypeNode<R> {
} }
impl<'a> TupleType<'a> {} impl<'a> TupleType<'a> {
pub fn fields(self) -> impl Iterator<Item = TypeRef<'a>> + 'a {
super::children(self)
}
}
// TypeDef // TypeDef
#[derive(Debug, Clone, Copy,)] #[derive(Debug, Clone, Copy,)]

View file

@ -261,18 +261,20 @@ Grammar(
"TypeParamsOwner", "TypeParamsOwner",
"AttrsOwner", "AttrsOwner",
"DocCommentsOwner" "DocCommentsOwner"
],
collections: [
["fields", "NamedFieldDef"]
] ]
), ),
"NamedFieldDef": ( traits: ["NameOwner", "AttrsOwner"] ), "NamedFieldDefList": (collections: [["fields", "NamedFieldDef"]]),
"NamedFieldDef": ( traits: ["NameOwner", "AttrsOwner"], options: ["TypeRef"] ),
"PosFieldList": (collections: [["fields", "PosField"]]),
"PosField": ( traits: ["AttrsOwner"], options: ["TypeRef"]),
"EnumDef": ( traits: [ "EnumDef": ( traits: [
"NameOwner", "NameOwner",
"TypeParamsOwner", "TypeParamsOwner",
"AttrsOwner", "AttrsOwner",
"DocCommentsOwner" "DocCommentsOwner"
] ), ], options: [["variant_list", "EnumVariantList"]] ),
"EnumVariantList": ( collections: [["variants", "EnumVariant"]] ),
"EnumVariant": ( traits: ["NameOwner"], options: ["Expr"] ),
"TraitDef": ( traits: ["NameOwner", "AttrsOwner", "DocCommentsOwner"] ), "TraitDef": ( traits: ["NameOwner", "AttrsOwner", "DocCommentsOwner"] ),
"Module": ( "Module": (
traits: ["NameOwner", "AttrsOwner", "DocCommentsOwner" ], traits: ["NameOwner", "AttrsOwner", "DocCommentsOwner" ],
@ -301,17 +303,17 @@ Grammar(
] ), ] ),
"ImplItem": (), "ImplItem": (),
"ParenType": (), "ParenType": (options: ["TypeRef"]),
"TupleType": (), "TupleType": ( collections: [["fields", "TypeRef"]] ),
"NeverType": (), "NeverType": (),
"PathType": (options: ["Path"]), "PathType": (options: ["Path"]),
"PointerType": (), "PointerType": (options: ["TypeRef"]),
"ArrayType": (), "ArrayType": ( options: ["TypeRef", "Expr"] ),
"SliceType": (), "SliceType": ( options: ["TypeRef"] ),
"ReferenceType": (), "ReferenceType": (options: ["TypeRef"]),
"PlaceholderType": (), "PlaceholderType": (),
"FnPointerType": (), "FnPointerType": (options: ["ParamList", "RetType"]),
"ForType": (), "ForType": (options: ["TypeRef"]),
"ImplTraitType": (), "ImplTraitType": (),
"DynTraitType": (), "DynTraitType": (),
@ -392,19 +394,19 @@ Grammar(
collections: [ [ "pats", "Pat" ] ] collections: [ [ "pats", "Pat" ] ]
), ),
"MatchGuard": (), "MatchGuard": (),
"StructLit": (), "StructLit": (options: ["Path", "NamedFieldList"]),
"NamedFieldList": (), "NamedFieldList": (collections: [ ["fields", "NamedField"] ]),
"NamedField": (), "NamedField": (options: ["NameRef", "Expr"]),
"CallExpr": ( "CallExpr": (
traits: ["ArgListOwner"], traits: ["ArgListOwner"],
options: [ "Expr" ], options: [ "Expr" ],
), ),
"MethodCallExpr": ( "MethodCallExpr": (
traits: ["ArgListOwner"], traits: ["ArgListOwner"],
options: [ "Expr" ], options: [ "Expr", "NameRef" ],
), ),
"IndexExpr": (), "IndexExpr": (),
"FieldExpr": (), "FieldExpr": (options: ["Expr", "NameRef"]),
"TryExpr": (options: ["Expr"]), "TryExpr": (options: ["Expr"]),
"CastExpr": (options: ["Expr", "TypeRef"]), "CastExpr": (options: ["Expr", "TypeRef"]),
"RefExpr": (options: ["Expr"]), "RefExpr": (options: ["Expr"]),

View file

@ -283,14 +283,10 @@ fn postfix_expr(
// } // }
L_PAREN if allow_calls => call_expr(p, lhs), L_PAREN if allow_calls => call_expr(p, lhs),
L_BRACK if allow_calls => index_expr(p, lhs), L_BRACK if allow_calls => index_expr(p, lhs),
DOT if p.nth(1) == IDENT => { DOT if p.nth(1) == IDENT && (p.nth(2) == L_PAREN || p.nth(2) == COLONCOLON) => {
if p.nth(2) == L_PAREN || p.nth(2) == COLONCOLON { method_call_expr(p, lhs)
method_call_expr(p, lhs)
} else {
field_expr(p, lhs)
}
} }
DOT if p.nth(1) == INT_NUMBER => field_expr(p, lhs), DOT => field_expr(p, lhs),
// test postfix_range // test postfix_range
// fn foo() { let x = 1..; } // fn foo() { let x = 1..; }
DOTDOT | DOTDOTEQ if !EXPR_FIRST.contains(p.nth(1)) => { DOTDOT | DOTDOTEQ if !EXPR_FIRST.contains(p.nth(1)) => {
@ -355,13 +351,15 @@ fn method_call_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker {
// x.0.bar; // x.0.bar;
// } // }
fn field_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { fn field_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker {
assert!(p.at(DOT) && (p.nth(1) == IDENT || p.nth(1) == INT_NUMBER)); assert!(p.at(DOT));
let m = lhs.precede(p); let m = lhs.precede(p);
p.bump(); p.bump();
if p.at(IDENT) { if p.at(IDENT) {
name_ref(p) name_ref(p)
} else { } else if p.at(INT_NUMBER) {
p.bump() p.bump()
} else {
p.error("expected field name or number")
} }
m.complete(p, FIELD_EXPR) m.complete(p, FIELD_EXPR)
} }

View file

@ -0,0 +1,3 @@
fn foo(a: A) {
a.
}

View file

@ -0,0 +1,35 @@
SOURCE_FILE@[0; 24)
FN_DEF@[0; 23)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
NAME@[3; 6)
IDENT@[3; 6) "foo"
PARAM_LIST@[6; 12)
L_PAREN@[6; 7)
PARAM@[7; 11)
BIND_PAT@[7; 8)
NAME@[7; 8)
IDENT@[7; 8) "a"
COLON@[8; 9)
WHITESPACE@[9; 10)
PATH_TYPE@[10; 11)
PATH@[10; 11)
PATH_SEGMENT@[10; 11)
NAME_REF@[10; 11)
IDENT@[10; 11) "A"
R_PAREN@[11; 12)
WHITESPACE@[12; 13)
BLOCK@[13; 23)
L_CURLY@[13; 14)
WHITESPACE@[14; 19)
FIELD_EXPR@[19; 21)
PATH_EXPR@[19; 20)
PATH@[19; 20)
PATH_SEGMENT@[19; 20)
NAME_REF@[19; 20)
IDENT@[19; 20) "a"
DOT@[20; 21)
err: `expected field name or number`
WHITESPACE@[21; 22)
R_CURLY@[22; 23)
WHITESPACE@[23; 24)