Merge branch 'master' of https://github.com/rust-analyzer/rust-analyzer into feature/themes

This commit is contained in:
Seivan Heidari 2019-11-12 19:04:54 +01:00
commit 0525778a3a
24 changed files with 840 additions and 677 deletions

View file

@ -1,11 +1,12 @@
//! ra_cfg defines conditional compiling options, `cfg` attibute parser and evaluator //! ra_cfg defines conditional compiling options, `cfg` attibute parser and evaluator
mod cfg_expr;
use std::iter::IntoIterator; use std::iter::IntoIterator;
use ra_syntax::SmolStr; use ra_syntax::SmolStr;
use rustc_hash::FxHashSet; use rustc_hash::FxHashSet;
mod cfg_expr;
pub use cfg_expr::{parse_cfg, CfgExpr}; pub use cfg_expr::{parse_cfg, CfgExpr};
/// Configuration options used for conditional compilition on items with `cfg` attributes. /// Configuration options used for conditional compilition on items with `cfg` attributes.

View file

@ -1,5 +1,6 @@
//! FIXME: write short doc here //! FIXME: write short doc here
use std::str::FromStr;
use std::sync::Arc; use std::sync::Arc;
use ra_cfg::CfgOptions; use ra_cfg::CfgOptions;
@ -164,7 +165,7 @@ fn parse_meta(meta: &str) -> ParsedMeta {
match key { match key {
"crate" => krate = Some(value.to_string()), "crate" => krate = Some(value.to_string()),
"deps" => deps = value.split(',').map(|it| it.to_string()).collect(), "deps" => deps = value.split(',').map(|it| it.to_string()).collect(),
"edition" => edition = Edition::from_string(&value), "edition" => edition = Edition::from_str(&value).unwrap(),
"cfg" => { "cfg" => {
for key in value.split(',') { for key in value.split(',') {
match split1(key, '=') { match split1(key, '=') {

View file

@ -13,6 +13,7 @@ use ra_syntax::SmolStr;
use rustc_hash::FxHashSet; use rustc_hash::FxHashSet;
use crate::{RelativePath, RelativePathBuf}; use crate::{RelativePath, RelativePathBuf};
use std::str::FromStr;
/// `FileId` is an integer which uniquely identifies a file. File paths are /// `FileId` is an integer which uniquely identifies a file. File paths are
/// messy and system-dependent, so most of the code should work directly with /// messy and system-dependent, so most of the code should work directly with
@ -97,12 +98,18 @@ pub enum Edition {
Edition2015, Edition2015,
} }
impl Edition { #[derive(Debug)]
//FIXME: replace with FromStr with proper error handling pub struct ParseEditionError {
pub fn from_string(s: &str) -> Edition { pub msg: String,
}
impl FromStr for Edition {
type Err = ParseEditionError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s { match s {
"2015" => Edition::Edition2015, "2015" => Ok(Edition::Edition2015),
"2018" | _ => Edition::Edition2018, "2018" => Ok(Edition::Edition2018),
_ => Err(ParseEditionError { msg: format!("unknown edition: {}", s) }),
} }
} }
} }

View file

@ -1,5 +1,7 @@
//! This crate provides some utilities for indenting rust code. //! This crate provides some utilities for indenting rust code.
//!
use std::iter::successors;
use itertools::Itertools; use itertools::Itertools;
use ra_syntax::{ use ra_syntax::{
ast::{self, AstNode, AstToken}, ast::{self, AstNode, AstToken},
@ -7,7 +9,6 @@ use ra_syntax::{
SyntaxKind::*, SyntaxKind::*,
SyntaxNode, SyntaxToken, T, SyntaxNode, SyntaxToken, T,
}; };
use std::iter::successors;
pub fn reindent(text: &str, indent: &str) -> String { pub fn reindent(text: &str, indent: &str) -> String {
let indent = format!("\n{}", indent); let indent = format!("\n{}", indent);

View file

@ -550,7 +550,7 @@ where
} }
fn body(self, db: &impl HirDatabase) -> Arc<Body> { fn body(self, db: &impl HirDatabase) -> Arc<Body> {
db.body_hir(self.into()) db.body(self.into())
} }
fn body_source_map(self, db: &impl HirDatabase) -> Arc<BodySourceMap> { fn body_source_map(self, db: &impl HirDatabase) -> Arc<BodySourceMap> {
@ -564,7 +564,7 @@ impl HasBody for DefWithBody {
} }
fn body(self, db: &impl HirDatabase) -> Arc<Body> { fn body(self, db: &impl HirDatabase) -> Arc<Body> {
db.body_hir(self) db.body(self)
} }
fn body_source_map(self, db: &impl HirDatabase) -> Arc<BodySourceMap> { fn body_source_map(self, db: &impl HirDatabase) -> Arc<BodySourceMap> {
@ -666,7 +666,7 @@ impl Function {
} }
pub fn body(self, db: &impl HirDatabase) -> Arc<Body> { pub fn body(self, db: &impl HirDatabase) -> Arc<Body> {
db.body_hir(self.into()) db.body(self.into())
} }
pub fn ty(self, db: &impl HirDatabase) -> Ty { pub fn ty(self, db: &impl HirDatabase) -> Ty {
@ -1079,7 +1079,7 @@ pub struct Local {
impl Local { impl Local {
pub fn name(self, db: &impl HirDatabase) -> Option<Name> { pub fn name(self, db: &impl HirDatabase) -> Option<Name> {
let body = db.body_hir(self.parent); let body = db.body(self.parent);
match &body[self.pat_id] { match &body[self.pat_id] {
Pat::Bind { name, .. } => Some(name.clone()), Pat::Bind { name, .. } => Some(name.clone()),
_ => None, _ => None,
@ -1091,7 +1091,7 @@ impl Local {
} }
pub fn is_mut(self, db: &impl HirDatabase) -> bool { pub fn is_mut(self, db: &impl HirDatabase) -> bool {
let body = db.body_hir(self.parent); let body = db.body(self.parent);
match &body[self.pat_id] { match &body[self.pat_id] {
Pat::Bind { mode, .. } => match mode { Pat::Bind { mode, .. } => match mode {
BindingAnnotation::Mutable | BindingAnnotation::RefMut => true, BindingAnnotation::Mutable | BindingAnnotation::RefMut => true,

View file

@ -8,6 +8,7 @@ use ra_syntax::SmolStr;
use crate::{ use crate::{
debug::HirDebugDatabase, debug::HirDebugDatabase,
expr::{Body, BodySourceMap},
generics::{GenericDef, GenericParams}, generics::{GenericDef, GenericParams},
ids, ids,
impl_block::{ImplBlock, ImplSourceMap, ModuleImplBlocks}, impl_block::{ImplBlock, ImplSourceMap, ModuleImplBlocks},
@ -113,13 +114,10 @@ pub trait HirDatabase: DefDatabase + AstDatabase {
fn generic_defaults(&self, def: GenericDef) -> Substs; fn generic_defaults(&self, def: GenericDef) -> Substs;
#[salsa::invoke(crate::expr::body_with_source_map_query)] #[salsa::invoke(crate::expr::body_with_source_map_query)]
fn body_with_source_map( fn body_with_source_map(&self, def: DefWithBody) -> (Arc<Body>, Arc<BodySourceMap>);
&self,
def: DefWithBody,
) -> (Arc<crate::expr::Body>, Arc<crate::expr::BodySourceMap>);
#[salsa::invoke(crate::expr::body_hir_query)] #[salsa::invoke(crate::expr::body_query)]
fn body_hir(&self, def: DefWithBody) -> Arc<crate::expr::Body>; fn body(&self, def: DefWithBody) -> Arc<Body>;
#[salsa::invoke(crate::ty::method_resolution::CrateImplBlocks::impls_in_crate_query)] #[salsa::invoke(crate::ty::method_resolution::CrateImplBlocks::impls_in_crate_query)]
fn impls_in_crate(&self, krate: Crate) -> Arc<CrateImplBlocks>; fn impls_in_crate(&self, krate: Crate) -> Arc<CrateImplBlocks>;

View file

@ -1,549 +1,24 @@
//! FIXME: write short doc here //! FIXME: write short doc here
pub(crate) mod lower;
pub(crate) mod scope; pub(crate) mod scope;
pub(crate) mod validation; pub(crate) mod validation;
use std::{ops::Index, sync::Arc}; use std::sync::Arc;
use hir_def::{
path::GenericArgs,
type_ref::{Mutability, TypeRef},
};
use ra_arena::{impl_arena_id, map::ArenaMap, Arena, RawId};
use ra_syntax::{ast, AstPtr}; use ra_syntax::{ast, AstPtr};
use rustc_hash::FxHashMap;
use crate::{ use crate::{db::HirDatabase, DefWithBody, HasSource, Resolver};
db::HirDatabase,
ty::primitive::{UncertainFloatTy, UncertainIntTy},
DefWithBody, Either, HasSource, Name, Path, Resolver, Source,
};
pub use self::scope::ExprScopes; pub use self::scope::ExprScopes;
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] pub use hir_def::{
pub struct ExprId(RawId); body::{Body, BodySourceMap, ExprPtr, ExprSource, PatPtr, PatSource},
impl_arena_id!(ExprId); expr::{
ArithOp, Array, BinaryOp, BindingAnnotation, CmpOp, Expr, ExprId, Literal, LogicOp,
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] MatchArm, Ordering, Pat, PatId, RecordFieldPat, RecordLitField, Statement, UnaryOp,
pub struct PatId(RawId);
impl_arena_id!(PatId);
/// The body of an item (function, const etc.).
#[derive(Debug, Eq, PartialEq)]
pub struct Body {
/// The def of the item this body belongs to
owner: DefWithBody,
exprs: Arena<ExprId, Expr>,
pats: Arena<PatId, Pat>,
/// The patterns for the function's parameters. While the parameter types are
/// part of the function signature, the patterns are not (they don't change
/// the external type of the function).
///
/// If this `Body` is for the body of a constant, this will just be
/// empty.
params: Vec<PatId>,
/// The `ExprId` of the actual body expression.
body_expr: ExprId,
}
type ExprPtr = Either<AstPtr<ast::Expr>, AstPtr<ast::RecordField>>;
type ExprSource = Source<ExprPtr>;
type PatPtr = Either<AstPtr<ast::Pat>, AstPtr<ast::SelfParam>>;
type PatSource = Source<PatPtr>;
/// An item body together with the mapping from syntax nodes to HIR expression
/// IDs. This is needed to go from e.g. a position in a file to the HIR
/// expression containing it; but for type inference etc., we want to operate on
/// a structure that is agnostic to the actual positions of expressions in the
/// file, so that we don't recompute types whenever some whitespace is typed.
///
/// One complication here is that, due to macro expansion, a single `Body` might
/// be spread across several files. So, for each ExprId and PatId, we record
/// both the HirFileId and the position inside the file. However, we only store
/// AST -> ExprId mapping for non-macro files, as it is not clear how to handle
/// this properly for macros.
#[derive(Default, Debug, Eq, PartialEq)]
pub struct BodySourceMap {
expr_map: FxHashMap<ExprPtr, ExprId>,
expr_map_back: ArenaMap<ExprId, ExprSource>,
pat_map: FxHashMap<PatPtr, PatId>,
pat_map_back: ArenaMap<PatId, PatSource>,
field_map: FxHashMap<(ExprId, usize), AstPtr<ast::RecordField>>,
}
impl Body {
pub fn params(&self) -> &[PatId] {
&self.params
}
pub fn body_expr(&self) -> ExprId {
self.body_expr
}
pub fn owner(&self) -> DefWithBody {
self.owner
}
pub fn exprs(&self) -> impl Iterator<Item = (ExprId, &Expr)> {
self.exprs.iter()
}
pub fn pats(&self) -> impl Iterator<Item = (PatId, &Pat)> {
self.pats.iter()
}
}
// needs arbitrary_self_types to be a method... or maybe move to the def?
pub(crate) fn resolver_for_expr(
body: Arc<Body>,
db: &impl HirDatabase,
expr_id: ExprId,
) -> Resolver {
let scopes = db.expr_scopes(body.owner);
resolver_for_scope(body, db, scopes.scope_for(expr_id))
}
pub(crate) fn resolver_for_scope(
body: Arc<Body>,
db: &impl HirDatabase,
scope_id: Option<scope::ScopeId>,
) -> Resolver {
let mut r = body.owner.resolver(db);
let scopes = db.expr_scopes(body.owner);
let scope_chain = scopes.scope_chain(scope_id).collect::<Vec<_>>();
for scope in scope_chain.into_iter().rev() {
r = r.push_expr_scope(Arc::clone(&scopes), scope);
}
r
}
impl Index<ExprId> for Body {
type Output = Expr;
fn index(&self, expr: ExprId) -> &Expr {
&self.exprs[expr]
}
}
impl Index<PatId> for Body {
type Output = Pat;
fn index(&self, pat: PatId) -> &Pat {
&self.pats[pat]
}
}
impl BodySourceMap {
pub(crate) fn expr_syntax(&self, expr: ExprId) -> Option<ExprSource> {
self.expr_map_back.get(expr).copied()
}
pub(crate) fn node_expr(&self, node: &ast::Expr) -> Option<ExprId> {
self.expr_map.get(&Either::A(AstPtr::new(node))).cloned()
}
pub(crate) fn pat_syntax(&self, pat: PatId) -> Option<PatSource> {
self.pat_map_back.get(pat).copied()
}
pub(crate) fn node_pat(&self, node: &ast::Pat) -> Option<PatId> {
self.pat_map.get(&Either::A(AstPtr::new(node))).cloned()
}
pub(crate) fn field_syntax(&self, expr: ExprId, field: usize) -> AstPtr<ast::RecordField> {
self.field_map[&(expr, field)]
}
}
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum Literal {
String(String),
ByteString(Vec<u8>),
Char(char),
Bool(bool),
Int(u64, UncertainIntTy),
Float(u64, UncertainFloatTy), // FIXME: f64 is not Eq
}
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum Expr {
/// This is produced if syntax tree does not have a required expression piece.
Missing,
Path(Path),
If {
condition: ExprId,
then_branch: ExprId,
else_branch: Option<ExprId>,
}, },
Block { };
statements: Vec<Statement>,
tail: Option<ExprId>,
},
Loop {
body: ExprId,
},
While {
condition: ExprId,
body: ExprId,
},
For {
iterable: ExprId,
pat: PatId,
body: ExprId,
},
Call {
callee: ExprId,
args: Vec<ExprId>,
},
MethodCall {
receiver: ExprId,
method_name: Name,
args: Vec<ExprId>,
generic_args: Option<GenericArgs>,
},
Match {
expr: ExprId,
arms: Vec<MatchArm>,
},
Continue,
Break {
expr: Option<ExprId>,
},
Return {
expr: Option<ExprId>,
},
RecordLit {
path: Option<Path>,
fields: Vec<RecordLitField>,
spread: Option<ExprId>,
},
Field {
expr: ExprId,
name: Name,
},
Await {
expr: ExprId,
},
Try {
expr: ExprId,
},
TryBlock {
body: ExprId,
},
Cast {
expr: ExprId,
type_ref: TypeRef,
},
Ref {
expr: ExprId,
mutability: Mutability,
},
Box {
expr: ExprId,
},
UnaryOp {
expr: ExprId,
op: UnaryOp,
},
BinaryOp {
lhs: ExprId,
rhs: ExprId,
op: Option<BinaryOp>,
},
Index {
base: ExprId,
index: ExprId,
},
Lambda {
args: Vec<PatId>,
arg_types: Vec<Option<TypeRef>>,
body: ExprId,
},
Tuple {
exprs: Vec<ExprId>,
},
Array(Array),
Literal(Literal),
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub enum BinaryOp {
LogicOp(LogicOp),
ArithOp(ArithOp),
CmpOp(CmpOp),
Assignment { op: Option<ArithOp> },
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub enum LogicOp {
And,
Or,
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub enum CmpOp {
Eq { negated: bool },
Ord { ordering: Ordering, strict: bool },
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub enum Ordering {
Less,
Greater,
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub enum ArithOp {
Add,
Mul,
Sub,
Div,
Rem,
Shl,
Shr,
BitXor,
BitOr,
BitAnd,
}
pub use ra_syntax::ast::PrefixOp as UnaryOp;
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum Array {
ElementList(Vec<ExprId>),
Repeat { initializer: ExprId, repeat: ExprId },
}
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct MatchArm {
pub pats: Vec<PatId>,
pub guard: Option<ExprId>,
pub expr: ExprId,
}
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct RecordLitField {
pub name: Name,
pub expr: ExprId,
}
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum Statement {
Let { pat: PatId, type_ref: Option<TypeRef>, initializer: Option<ExprId> },
Expr(ExprId),
}
impl Expr {
pub fn walk_child_exprs(&self, mut f: impl FnMut(ExprId)) {
match self {
Expr::Missing => {}
Expr::Path(_) => {}
Expr::If { condition, then_branch, else_branch } => {
f(*condition);
f(*then_branch);
if let Some(else_branch) = else_branch {
f(*else_branch);
}
}
Expr::Block { statements, tail } => {
for stmt in statements {
match stmt {
Statement::Let { initializer, .. } => {
if let Some(expr) = initializer {
f(*expr);
}
}
Statement::Expr(e) => f(*e),
}
}
if let Some(expr) = tail {
f(*expr);
}
}
Expr::TryBlock { body } => f(*body),
Expr::Loop { body } => f(*body),
Expr::While { condition, body } => {
f(*condition);
f(*body);
}
Expr::For { iterable, body, .. } => {
f(*iterable);
f(*body);
}
Expr::Call { callee, args } => {
f(*callee);
for arg in args {
f(*arg);
}
}
Expr::MethodCall { receiver, args, .. } => {
f(*receiver);
for arg in args {
f(*arg);
}
}
Expr::Match { expr, arms } => {
f(*expr);
for arm in arms {
f(arm.expr);
}
}
Expr::Continue => {}
Expr::Break { expr } | Expr::Return { expr } => {
if let Some(expr) = expr {
f(*expr);
}
}
Expr::RecordLit { fields, spread, .. } => {
for field in fields {
f(field.expr);
}
if let Some(expr) = spread {
f(*expr);
}
}
Expr::Lambda { body, .. } => {
f(*body);
}
Expr::BinaryOp { lhs, rhs, .. } => {
f(*lhs);
f(*rhs);
}
Expr::Index { base, index } => {
f(*base);
f(*index);
}
Expr::Field { expr, .. }
| Expr::Await { expr }
| Expr::Try { expr }
| Expr::Cast { expr, .. }
| Expr::Ref { expr, .. }
| Expr::UnaryOp { expr, .. }
| Expr::Box { expr } => {
f(*expr);
}
Expr::Tuple { exprs } => {
for expr in exprs {
f(*expr);
}
}
Expr::Array(a) => match a {
Array::ElementList(exprs) => {
for expr in exprs {
f(*expr);
}
}
Array::Repeat { initializer, repeat } => {
f(*initializer);
f(*repeat)
}
},
Expr::Literal(_) => {}
}
}
}
/// Explicit binding annotations given in the HIR for a binding. Note
/// that this is not the final binding *mode* that we infer after type
/// inference.
#[derive(Clone, PartialEq, Eq, Debug, Copy)]
pub enum BindingAnnotation {
/// No binding annotation given: this means that the final binding mode
/// will depend on whether we have skipped through a `&` reference
/// when matching. For example, the `x` in `Some(x)` will have binding
/// mode `None`; if you do `let Some(x) = &Some(22)`, it will
/// ultimately be inferred to be by-reference.
Unannotated,
/// Annotated with `mut x` -- could be either ref or not, similar to `None`.
Mutable,
/// Annotated as `ref`, like `ref x`
Ref,
/// Annotated as `ref mut x`.
RefMut,
}
impl BindingAnnotation {
fn new(is_mutable: bool, is_ref: bool) -> Self {
match (is_mutable, is_ref) {
(true, true) => BindingAnnotation::RefMut,
(false, true) => BindingAnnotation::Ref,
(true, false) => BindingAnnotation::Mutable,
(false, false) => BindingAnnotation::Unannotated,
}
}
}
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct RecordFieldPat {
pub(crate) name: Name,
pub(crate) pat: PatId,
}
/// Close relative to rustc's hir::PatKind
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum Pat {
Missing,
Wild,
Tuple(Vec<PatId>),
Record {
path: Option<Path>,
args: Vec<RecordFieldPat>,
// FIXME: 'ellipsis' option
},
Range {
start: ExprId,
end: ExprId,
},
Slice {
prefix: Vec<PatId>,
rest: Option<PatId>,
suffix: Vec<PatId>,
},
Path(Path),
Lit(ExprId),
Bind {
mode: BindingAnnotation,
name: Name,
subpat: Option<PatId>,
},
TupleStruct {
path: Option<Path>,
args: Vec<PatId>,
},
Ref {
pat: PatId,
mutability: Mutability,
},
}
impl Pat {
pub fn walk_child_pats(&self, mut f: impl FnMut(PatId)) {
match self {
Pat::Range { .. } | Pat::Lit(..) | Pat::Path(..) | Pat::Wild | Pat::Missing => {}
Pat::Bind { subpat, .. } => {
subpat.iter().copied().for_each(f);
}
Pat::Tuple(args) | Pat::TupleStruct { args, .. } => {
args.iter().copied().for_each(f);
}
Pat::Ref { pat, .. } => f(*pat),
Pat::Slice { prefix, rest, suffix } => {
let total_iter = prefix.iter().chain(rest.iter()).chain(suffix.iter());
total_iter.copied().for_each(f);
}
Pat::Record { args, .. } => {
args.iter().map(|f| f.pat).for_each(f);
}
}
}
}
// Queries
pub(crate) fn body_with_source_map_query( pub(crate) fn body_with_source_map_query(
db: &impl HirDatabase, db: &impl HirDatabase,
def: DefWithBody, def: DefWithBody,
@ -565,11 +40,35 @@ pub(crate) fn body_with_source_map_query(
(src.file_id, src.ast.body()) (src.file_id, src.ast.body())
} }
}; };
let resolver = hir_def::body::MacroResolver::new(db, def.module(db).id);
let (body, source_map) = lower::lower(db, def.resolver(db), file_id, def, params, body); let (body, source_map) = Body::new(db, resolver, file_id, params, body);
(Arc::new(body), Arc::new(source_map)) (Arc::new(body), Arc::new(source_map))
} }
pub(crate) fn body_hir_query(db: &impl HirDatabase, def: DefWithBody) -> Arc<Body> { pub(crate) fn body_query(db: &impl HirDatabase, def: DefWithBody) -> Arc<Body> {
db.body_with_source_map(def).0 db.body_with_source_map(def).0
} }
// needs arbitrary_self_types to be a method... or maybe move to the def?
pub(crate) fn resolver_for_expr(
db: &impl HirDatabase,
owner: DefWithBody,
expr_id: ExprId,
) -> Resolver {
let scopes = db.expr_scopes(owner);
resolver_for_scope(db, owner, scopes.scope_for(expr_id))
}
pub(crate) fn resolver_for_scope(
db: &impl HirDatabase,
owner: DefWithBody,
scope_id: Option<scope::ScopeId>,
) -> Resolver {
let mut r = owner.resolver(db);
let scopes = db.expr_scopes(owner);
let scope_chain = scopes.scope_chain(scope_id).collect::<Vec<_>>();
for scope in scope_chain.into_iter().rev() {
r = r.push_expr_scope(Arc::clone(&scopes), scope);
}
r
}

View file

@ -46,7 +46,7 @@ pub(crate) struct ScopeData {
impl ExprScopes { impl ExprScopes {
pub(crate) fn expr_scopes_query(db: &impl HirDatabase, def: DefWithBody) -> Arc<ExprScopes> { pub(crate) fn expr_scopes_query(db: &impl HirDatabase, def: DefWithBody) -> Arc<ExprScopes> {
let body = db.body_hir(def); let body = db.body(def);
let res = ExprScopes::new(body); let res = ExprScopes::new(body);
Arc::new(res) Arc::new(res)
} }

View file

@ -5,6 +5,5 @@ test_utils::marks!(
type_var_cycles_resolve_as_possible type_var_cycles_resolve_as_possible
type_var_resolves_to_int_var type_var_resolves_to_int_var
match_ergonomics_ref match_ergonomics_ref
infer_while_let
coerce_merge_fail_fallback coerce_merge_fail_fallback
); );

View file

@ -150,7 +150,7 @@ impl SourceAnalyzer {
None => scope_for(&scopes, &source_map, &node), None => scope_for(&scopes, &source_map, &node),
Some(offset) => scope_for_offset(&scopes, &source_map, file_id.into(), offset), Some(offset) => scope_for_offset(&scopes, &source_map, file_id.into(), offset),
}; };
let resolver = expr::resolver_for_scope(def.body(db), db, scope); let resolver = expr::resolver_for_scope(db, def, scope);
SourceAnalyzer { SourceAnalyzer {
resolver, resolver,
body_owner: Some(def), body_owner: Some(def),

View file

@ -43,7 +43,7 @@ use crate::{
expr::{BindingAnnotation, Body, ExprId, PatId}, expr::{BindingAnnotation, Body, ExprId, PatId},
resolve::{Resolver, TypeNs}, resolve::{Resolver, TypeNs},
ty::infer::diagnostics::InferenceDiagnostic, ty::infer::diagnostics::InferenceDiagnostic,
Adt, AssocItem, ConstData, DefWithBody, FnData, Function, HasBody, Path, StructField, Adt, AssocItem, ConstData, DefWithBody, FnData, Function, Path, StructField,
}; };
macro_rules! ty_app { macro_rules! ty_app {
@ -64,9 +64,8 @@ mod coerce;
/// The entry point of type inference. /// The entry point of type inference.
pub fn infer_query(db: &impl HirDatabase, def: DefWithBody) -> Arc<InferenceResult> { pub fn infer_query(db: &impl HirDatabase, def: DefWithBody) -> Arc<InferenceResult> {
let _p = profile("infer_query"); let _p = profile("infer_query");
let body = def.body(db);
let resolver = def.resolver(db); let resolver = def.resolver(db);
let mut ctx = InferenceContext::new(db, body, resolver); let mut ctx = InferenceContext::new(db, def, resolver);
match def { match def {
DefWithBody::Const(ref c) => ctx.collect_const(&c.data(db)), DefWithBody::Const(ref c) => ctx.collect_const(&c.data(db)),
@ -187,6 +186,7 @@ impl Index<PatId> for InferenceResult {
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
struct InferenceContext<'a, D: HirDatabase> { struct InferenceContext<'a, D: HirDatabase> {
db: &'a D, db: &'a D,
owner: DefWithBody,
body: Arc<Body>, body: Arc<Body>,
resolver: Resolver, resolver: Resolver,
var_unification_table: InPlaceUnificationTable<TypeVarId>, var_unification_table: InPlaceUnificationTable<TypeVarId>,
@ -204,7 +204,7 @@ struct InferenceContext<'a, D: HirDatabase> {
} }
impl<'a, D: HirDatabase> InferenceContext<'a, D> { impl<'a, D: HirDatabase> InferenceContext<'a, D> {
fn new(db: &'a D, body: Arc<Body>, resolver: Resolver) -> Self { fn new(db: &'a D, owner: DefWithBody, resolver: Resolver) -> Self {
InferenceContext { InferenceContext {
result: InferenceResult::default(), result: InferenceResult::default(),
var_unification_table: InPlaceUnificationTable::new(), var_unification_table: InPlaceUnificationTable::new(),
@ -213,7 +213,8 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
trait_env: lower::trait_env(db, &resolver), trait_env: lower::trait_env(db, &resolver),
coerce_unsized_map: Self::init_coerce_unsized_map(db, &resolver), coerce_unsized_map: Self::init_coerce_unsized_map(db, &resolver),
db, db,
body, owner,
body: db.body(owner),
resolver, resolver,
} }
} }

View file

@ -130,10 +130,8 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
TypeCtor::FnPtr { num_args: sig_tys.len() as u16 - 1 }, TypeCtor::FnPtr { num_args: sig_tys.len() as u16 - 1 },
Substs(sig_tys.into()), Substs(sig_tys.into()),
); );
let closure_ty = Ty::apply_one( let closure_ty =
TypeCtor::Closure { def: self.body.owner(), expr: tgt_expr }, Ty::apply_one(TypeCtor::Closure { def: self.owner, expr: tgt_expr }, sig_ty);
sig_ty,
);
// Eagerly try to relate the closure type with the expected // Eagerly try to relate the closure type with the expected
// type, otherwise we often won't have enough information to // type, otherwise we often won't have enough information to
@ -184,7 +182,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
} }
Expr::Path(p) => { Expr::Path(p) => {
// FIXME this could be more efficient... // FIXME this could be more efficient...
let resolver = expr::resolver_for_expr(self.body.clone(), self.db, tgt_expr); let resolver = expr::resolver_for_expr(self.db, self.owner, tgt_expr);
self.infer_path(&resolver, p, tgt_expr.into()).unwrap_or(Ty::Unknown) self.infer_path(&resolver, p, tgt_expr.into()).unwrap_or(Ty::Unknown)
} }
Expr::Continue => Ty::simple(TypeCtor::Never), Expr::Continue => Ty::simple(TypeCtor::Never),
@ -452,8 +450,8 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
Ty::apply_one(TypeCtor::Ref(Mutability::Shared), slice_type) Ty::apply_one(TypeCtor::Ref(Mutability::Shared), slice_type)
} }
Literal::Char(..) => Ty::simple(TypeCtor::Char), Literal::Char(..) => Ty::simple(TypeCtor::Char),
Literal::Int(_v, ty) => Ty::simple(TypeCtor::Int(*ty)), Literal::Int(_v, ty) => Ty::simple(TypeCtor::Int((*ty).into())),
Literal::Float(_v, ty) => Ty::simple(TypeCtor::Float(*ty)), Literal::Float(_v, ty) => Ty::simple(TypeCtor::Float((*ty).into())),
}, },
}; };
// use a new type variable if we got Ty::Unknown here // use a new type variable if we got Ty::Unknown here

View file

@ -9,7 +9,7 @@ use std::iter;
use std::sync::Arc; use std::sync::Arc;
use hir_def::{ use hir_def::{
builtin_type::BuiltinType, builtin_type::{BuiltinFloat, BuiltinInt, BuiltinType},
path::{GenericArg, PathSegment}, path::{GenericArg, PathSegment},
type_ref::{TypeBound, TypeRef}, type_ref::{TypeBound, TypeRef},
}; };
@ -25,7 +25,7 @@ use crate::{
generics::{GenericDef, WherePredicate}, generics::{GenericDef, WherePredicate},
resolve::{Resolver, TypeNs}, resolve::{Resolver, TypeNs},
ty::{ ty::{
primitive::{FloatTy, IntTy}, primitive::{FloatTy, IntTy, UncertainFloatTy, UncertainIntTy},
Adt, Adt,
}, },
util::make_mut_slice, util::make_mut_slice,
@ -657,13 +657,41 @@ fn type_for_builtin(def: BuiltinType) -> Ty {
BuiltinType::Char => TypeCtor::Char, BuiltinType::Char => TypeCtor::Char,
BuiltinType::Bool => TypeCtor::Bool, BuiltinType::Bool => TypeCtor::Bool,
BuiltinType::Str => TypeCtor::Str, BuiltinType::Str => TypeCtor::Str,
BuiltinType::Int { signedness, bitness } => { BuiltinType::Int(t) => TypeCtor::Int(IntTy::from(t).into()),
TypeCtor::Int(IntTy { signedness, bitness }.into()) BuiltinType::Float(t) => TypeCtor::Float(FloatTy::from(t).into()),
}
BuiltinType::Float { bitness } => TypeCtor::Float(FloatTy { bitness }.into()),
}) })
} }
impl From<BuiltinInt> for IntTy {
fn from(t: BuiltinInt) -> Self {
IntTy { signedness: t.signedness, bitness: t.bitness }
}
}
impl From<BuiltinFloat> for FloatTy {
fn from(t: BuiltinFloat) -> Self {
FloatTy { bitness: t.bitness }
}
}
impl From<Option<BuiltinInt>> for UncertainIntTy {
fn from(t: Option<BuiltinInt>) -> Self {
match t {
None => UncertainIntTy::Unknown,
Some(t) => UncertainIntTy::Known(t.into()),
}
}
}
impl From<Option<BuiltinFloat>> for UncertainFloatTy {
fn from(t: Option<BuiltinFloat>) -> Self {
match t {
None => UncertainFloatTy::Unknown,
Some(t) => UncertainFloatTy::Known(t.into()),
}
}
}
fn fn_sig_for_struct_constructor(db: &impl HirDatabase, def: Struct) -> FnSig { fn fn_sig_for_struct_constructor(db: &impl HirDatabase, def: Struct) -> FnSig {
let struct_data = db.struct_data(def.id.into()); let struct_data = db.struct_data(def.id.into());
let fields = match struct_data.variant_data.fields() { let fields = match struct_data.variant_data.fields() {

View file

@ -129,24 +129,6 @@ impl IntTy {
(Signedness::Unsigned, IntBitness::X128) => "u128", (Signedness::Unsigned, IntBitness::X128) => "u128",
} }
} }
pub(crate) fn from_suffix(suffix: &str) -> Option<IntTy> {
match suffix {
"isize" => Some(IntTy::isize()),
"i8" => Some(IntTy::i8()),
"i16" => Some(IntTy::i16()),
"i32" => Some(IntTy::i32()),
"i64" => Some(IntTy::i64()),
"i128" => Some(IntTy::i128()),
"usize" => Some(IntTy::usize()),
"u8" => Some(IntTy::u8()),
"u16" => Some(IntTy::u16()),
"u32" => Some(IntTy::u32()),
"u64" => Some(IntTy::u64()),
"u128" => Some(IntTy::u128()),
_ => None,
}
}
} }
#[derive(Copy, Clone, PartialEq, Eq, Hash)] #[derive(Copy, Clone, PartialEq, Eq, Hash)]
@ -181,12 +163,4 @@ impl FloatTy {
FloatBitness::X64 => "f64", FloatBitness::X64 => "f64",
} }
} }
pub(crate) fn from_suffix(suffix: &str) -> Option<FloatTy> {
match suffix {
"f32" => Some(FloatTy::f32()),
"f64" => Some(FloatTy::f64()),
_ => None,
}
}
} }

View file

@ -222,7 +222,6 @@ mod collections {
#[test] #[test]
fn infer_while_let() { fn infer_while_let() {
covers!(infer_while_let);
let (db, pos) = TestDB::with_position( let (db, pos) = TestDB::with_position(
r#" r#"
//- /main.rs //- /main.rs
@ -4825,7 +4824,7 @@ fn main() {
@r###" @r###"
![0; 1) '6': i32 ![0; 1) '6': i32
[64; 88) '{ ...!(); }': () [64; 88) '{ ...!(); }': ()
[74; 75) 'x': i32 [74; 75) 'x': i32
"### "###
); );
} }

View file

@ -714,7 +714,7 @@ fn closure_fn_trait_impl_datum(
let fn_once_trait = get_fn_trait(db, krate, super::FnTrait::FnOnce)?; let fn_once_trait = get_fn_trait(db, krate, super::FnTrait::FnOnce)?;
let trait_ = get_fn_trait(db, krate, data.fn_trait)?; // get corresponding fn trait let trait_ = get_fn_trait(db, krate, data.fn_trait)?; // get corresponding fn trait
let num_args: u16 = match &db.body_hir(data.def)[data.expr] { let num_args: u16 = match &db.body(data.def)[data.expr] {
crate::expr::Expr::Lambda { args, .. } => args.len() as u16, crate::expr::Expr::Lambda { args, .. } => args.len() as u16,
_ => { _ => {
log::warn!("closure for closure type {:?} not found", data); log::warn!("closure for closure type {:?} not found", data);

View file

@ -0,0 +1,144 @@
//! FIXME: write short doc here
mod lower;
use std::{ops::Index, sync::Arc};
use hir_expand::{either::Either, HirFileId, MacroDefId, Source};
use ra_arena::{map::ArenaMap, Arena};
use ra_syntax::{ast, AstPtr};
use rustc_hash::FxHashMap;
use crate::{
db::DefDatabase2,
expr::{Expr, ExprId, Pat, PatId},
nameres::CrateDefMap,
path::Path,
ModuleId,
};
pub struct MacroResolver {
crate_def_map: Arc<CrateDefMap>,
module: ModuleId,
}
impl MacroResolver {
pub fn new(db: &impl DefDatabase2, module: ModuleId) -> MacroResolver {
MacroResolver { crate_def_map: db.crate_def_map(module.krate), module }
}
pub(crate) fn resolve_path_as_macro(
&self,
db: &impl DefDatabase2,
path: &Path,
) -> Option<MacroDefId> {
self.crate_def_map.resolve_path(db, self.module.module_id, path).0.get_macros()
}
}
/// The body of an item (function, const etc.).
#[derive(Debug, Eq, PartialEq)]
pub struct Body {
exprs: Arena<ExprId, Expr>,
pats: Arena<PatId, Pat>,
/// The patterns for the function's parameters. While the parameter types are
/// part of the function signature, the patterns are not (they don't change
/// the external type of the function).
///
/// If this `Body` is for the body of a constant, this will just be
/// empty.
params: Vec<PatId>,
/// The `ExprId` of the actual body expression.
body_expr: ExprId,
}
pub type ExprPtr = Either<AstPtr<ast::Expr>, AstPtr<ast::RecordField>>;
pub type ExprSource = Source<ExprPtr>;
pub type PatPtr = Either<AstPtr<ast::Pat>, AstPtr<ast::SelfParam>>;
pub type PatSource = Source<PatPtr>;
/// An item body together with the mapping from syntax nodes to HIR expression
/// IDs. This is needed to go from e.g. a position in a file to the HIR
/// expression containing it; but for type inference etc., we want to operate on
/// a structure that is agnostic to the actual positions of expressions in the
/// file, so that we don't recompute types whenever some whitespace is typed.
///
/// One complication here is that, due to macro expansion, a single `Body` might
/// be spread across several files. So, for each ExprId and PatId, we record
/// both the HirFileId and the position inside the file. However, we only store
/// AST -> ExprId mapping for non-macro files, as it is not clear how to handle
/// this properly for macros.
#[derive(Default, Debug, Eq, PartialEq)]
pub struct BodySourceMap {
expr_map: FxHashMap<ExprPtr, ExprId>,
expr_map_back: ArenaMap<ExprId, ExprSource>,
pat_map: FxHashMap<PatPtr, PatId>,
pat_map_back: ArenaMap<PatId, PatSource>,
field_map: FxHashMap<(ExprId, usize), AstPtr<ast::RecordField>>,
}
impl Body {
pub fn new(
db: &impl DefDatabase2,
resolver: MacroResolver,
file_id: HirFileId,
params: Option<ast::ParamList>,
body: Option<ast::Expr>,
) -> (Body, BodySourceMap) {
lower::lower(db, resolver, file_id, params, body)
}
pub fn params(&self) -> &[PatId] {
&self.params
}
pub fn body_expr(&self) -> ExprId {
self.body_expr
}
pub fn exprs(&self) -> impl Iterator<Item = (ExprId, &Expr)> {
self.exprs.iter()
}
pub fn pats(&self) -> impl Iterator<Item = (PatId, &Pat)> {
self.pats.iter()
}
}
impl Index<ExprId> for Body {
type Output = Expr;
fn index(&self, expr: ExprId) -> &Expr {
&self.exprs[expr]
}
}
impl Index<PatId> for Body {
type Output = Pat;
fn index(&self, pat: PatId) -> &Pat {
&self.pats[pat]
}
}
impl BodySourceMap {
pub fn expr_syntax(&self, expr: ExprId) -> Option<ExprSource> {
self.expr_map_back.get(expr).copied()
}
pub fn node_expr(&self, node: &ast::Expr) -> Option<ExprId> {
self.expr_map.get(&Either::A(AstPtr::new(node))).cloned()
}
pub fn pat_syntax(&self, pat: PatId) -> Option<PatSource> {
self.pat_map_back.get(pat).copied()
}
pub fn node_pat(&self, node: &ast::Pat) -> Option<PatId> {
self.pat_map.get(&Either::A(AstPtr::new(node))).cloned()
}
pub fn field_syntax(&self, expr: ExprId, field: usize) -> AstPtr<ast::RecordField> {
self.field_map[&(expr, field)]
}
}

View file

@ -1,9 +1,10 @@
//! FIXME: write short doc here //! FIXME: write short doc here
use hir_def::{path::GenericArgs, type_ref::TypeRef};
use hir_expand::{ use hir_expand::{
either::Either,
hygiene::Hygiene, hygiene::Hygiene,
name::{self, AsName, Name}, name::{self, AsName, Name},
AstId, HirFileId, MacroCallLoc, MacroFileKind, Source,
}; };
use ra_arena::Arena; use ra_arena::Arena;
use ra_syntax::{ use ra_syntax::{
@ -13,25 +14,24 @@ use ra_syntax::{
}, },
AstNode, AstPtr, AstNode, AstPtr,
}; };
use test_utils::tested_by;
use crate::{ use crate::{
db::HirDatabase, body::{Body, BodySourceMap, MacroResolver, PatPtr},
ty::primitive::{FloatTy, IntTy, UncertainFloatTy, UncertainIntTy}, builtin_type::{BuiltinFloat, BuiltinInt},
AstId, DefWithBody, Either, HirFileId, MacroCallLoc, MacroFileKind, Mutability, Path, Resolver, db::DefDatabase2,
Source, expr::{
}; ArithOp, Array, BinaryOp, BindingAnnotation, CmpOp, Expr, ExprId, Literal, LogicOp,
MatchArm, Ordering, Pat, PatId, RecordFieldPat, RecordLitField, Statement,
use super::{ },
ArithOp, Array, BinaryOp, BindingAnnotation, Body, BodySourceMap, CmpOp, Expr, ExprId, Literal, path::GenericArgs,
LogicOp, MatchArm, Ordering, Pat, PatId, PatPtr, RecordFieldPat, RecordLitField, Statement, path::Path,
type_ref::{Mutability, TypeRef},
}; };
pub(super) fn lower( pub(super) fn lower(
db: &impl HirDatabase, db: &impl DefDatabase2,
resolver: Resolver, resolver: MacroResolver,
file_id: HirFileId, file_id: HirFileId,
owner: DefWithBody,
params: Option<ast::ParamList>, params: Option<ast::ParamList>,
body: Option<ast::Expr>, body: Option<ast::Expr>,
) -> (Body, BodySourceMap) { ) -> (Body, BodySourceMap) {
@ -42,11 +42,10 @@ pub(super) fn lower(
current_file_id: file_id, current_file_id: file_id,
source_map: BodySourceMap::default(), source_map: BodySourceMap::default(),
body: Body { body: Body {
owner,
exprs: Arena::default(), exprs: Arena::default(),
pats: Arena::default(), pats: Arena::default(),
params: Vec::new(), params: Vec::new(),
body_expr: ExprId((!0).into()), body_expr: ExprId::dummy(),
}, },
} }
.collect(params, body) .collect(params, body)
@ -54,11 +53,7 @@ pub(super) fn lower(
struct ExprCollector<DB> { struct ExprCollector<DB> {
db: DB, db: DB,
resolver: Resolver, resolver: MacroResolver,
// Expr collector expands macros along the way. original points to the file
// we started with, current points to the current macro expansion. source
// maps don't support macros yet, so we only record info into source map if
// current == original (see #1196)
original_file_id: HirFileId, original_file_id: HirFileId,
current_file_id: HirFileId, current_file_id: HirFileId,
@ -68,7 +63,7 @@ struct ExprCollector<DB> {
impl<'a, DB> ExprCollector<&'a DB> impl<'a, DB> ExprCollector<&'a DB>
where where
DB: HirDatabase, DB: DefDatabase2,
{ {
fn collect( fn collect(
mut self, mut self,
@ -209,7 +204,6 @@ where
None => self.collect_expr_opt(condition.expr()), None => self.collect_expr_opt(condition.expr()),
// if let -- desugar to match // if let -- desugar to match
Some(pat) => { Some(pat) => {
tested_by!(infer_while_let);
let pat = self.collect_pat(pat); let pat = self.collect_pat(pat);
let match_expr = self.collect_expr_opt(condition.expr()); let match_expr = self.collect_expr_opt(condition.expr());
let placeholder_pat = self.missing_pat(); let placeholder_pat = self.missing_pat();
@ -423,28 +417,18 @@ where
ast::Expr::Literal(e) => { ast::Expr::Literal(e) => {
let lit = match e.kind() { let lit = match e.kind() {
LiteralKind::IntNumber { suffix } => { LiteralKind::IntNumber { suffix } => {
let known_name = suffix let known_name = suffix.and_then(|it| BuiltinInt::from_suffix(&it));
.and_then(|it| IntTy::from_suffix(&it).map(UncertainIntTy::Known));
Literal::Int( Literal::Int(Default::default(), known_name)
Default::default(),
known_name.unwrap_or(UncertainIntTy::Unknown),
)
} }
LiteralKind::FloatNumber { suffix } => { LiteralKind::FloatNumber { suffix } => {
let known_name = suffix let known_name = suffix.and_then(|it| BuiltinFloat::from_suffix(&it));
.and_then(|it| FloatTy::from_suffix(&it).map(UncertainFloatTy::Known));
Literal::Float( Literal::Float(Default::default(), known_name)
Default::default(),
known_name.unwrap_or(UncertainFloatTy::Unknown),
)
} }
LiteralKind::ByteString => Literal::ByteString(Default::default()), LiteralKind::ByteString => Literal::ByteString(Default::default()),
LiteralKind::String => Literal::String(Default::default()), LiteralKind::String => Literal::String(Default::default()),
LiteralKind::Byte => { LiteralKind::Byte => Literal::Int(Default::default(), Some(BuiltinInt::U8)),
Literal::Int(Default::default(), UncertainIntTy::Known(IntTy::u8()))
}
LiteralKind::Bool => Literal::Bool(Default::default()), LiteralKind::Bool => Literal::Bool(Default::default()),
LiteralKind::Char => Literal::Char(Default::default()), LiteralKind::Char => Literal::Char(Default::default()),
}; };
@ -467,7 +451,7 @@ where
if let Some(path) = e.path().and_then(|path| self.parse_path(path)) { if let Some(path) = e.path().and_then(|path| self.parse_path(path)) {
if let Some(def) = self.resolver.resolve_path_as_macro(self.db, &path) { if let Some(def) = self.resolver.resolve_path_as_macro(self.db, &path) {
let call_id = self.db.intern_macro(MacroCallLoc { def: def.id, ast_id }); let call_id = self.db.intern_macro(MacroCallLoc { def, ast_id });
let file_id = call_id.as_file(MacroFileKind::Expr); let file_id = call_id.as_file(MacroFileKind::Expr);
if let Some(node) = self.db.parse_or_expand(file_id) { if let Some(node) = self.db.parse_or_expand(file_id) {
if let Some(expr) = ast::Expr::cast(node) { if let Some(expr) = ast::Expr::cast(node) {

View file

@ -29,13 +29,24 @@ pub enum FloatBitness {
X64, X64,
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct BuiltinInt {
pub signedness: Signedness,
pub bitness: IntBitness,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct BuiltinFloat {
pub bitness: FloatBitness,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum BuiltinType { pub enum BuiltinType {
Char, Char,
Bool, Bool,
Str, Str,
Int { signedness: Signedness, bitness: IntBitness }, Int(BuiltinInt),
Float { bitness: FloatBitness }, Float(BuiltinFloat),
} }
impl BuiltinType { impl BuiltinType {
@ -45,22 +56,22 @@ impl BuiltinType {
(name::BOOL, BuiltinType::Bool), (name::BOOL, BuiltinType::Bool),
(name::STR, BuiltinType::Str ), (name::STR, BuiltinType::Str ),
(name::ISIZE, BuiltinType::Int { signedness: Signedness::Signed, bitness: IntBitness::Xsize }), (name::ISIZE, BuiltinType::Int(BuiltinInt::ISIZE)),
(name::I8, BuiltinType::Int { signedness: Signedness::Signed, bitness: IntBitness::X8 }), (name::I8, BuiltinType::Int(BuiltinInt::I8)),
(name::I16, BuiltinType::Int { signedness: Signedness::Signed, bitness: IntBitness::X16 }), (name::I16, BuiltinType::Int(BuiltinInt::I16)),
(name::I32, BuiltinType::Int { signedness: Signedness::Signed, bitness: IntBitness::X32 }), (name::I32, BuiltinType::Int(BuiltinInt::I32)),
(name::I64, BuiltinType::Int { signedness: Signedness::Signed, bitness: IntBitness::X64 }), (name::I64, BuiltinType::Int(BuiltinInt::I64)),
(name::I128, BuiltinType::Int { signedness: Signedness::Signed, bitness: IntBitness::X128 }), (name::I128, BuiltinType::Int(BuiltinInt::I128)),
(name::USIZE, BuiltinType::Int { signedness: Signedness::Unsigned, bitness: IntBitness::Xsize }), (name::USIZE, BuiltinType::Int(BuiltinInt::USIZE)),
(name::U8, BuiltinType::Int { signedness: Signedness::Unsigned, bitness: IntBitness::X8 }), (name::U8, BuiltinType::Int(BuiltinInt::U8)),
(name::U16, BuiltinType::Int { signedness: Signedness::Unsigned, bitness: IntBitness::X16 }), (name::U16, BuiltinType::Int(BuiltinInt::U16)),
(name::U32, BuiltinType::Int { signedness: Signedness::Unsigned, bitness: IntBitness::X32 }), (name::U32, BuiltinType::Int(BuiltinInt::U32)),
(name::U64, BuiltinType::Int { signedness: Signedness::Unsigned, bitness: IntBitness::X64 }), (name::U64, BuiltinType::Int(BuiltinInt::U64)),
(name::U128, BuiltinType::Int { signedness: Signedness::Unsigned, bitness: IntBitness::X128 }), (name::U128, BuiltinType::Int(BuiltinInt::U128)),
(name::F32, BuiltinType::Float { bitness: FloatBitness::X32 }), (name::F32, BuiltinType::Float(BuiltinFloat::F32)),
(name::F64, BuiltinType::Float { bitness: FloatBitness::X64 }), (name::F64, BuiltinType::Float(BuiltinFloat::F64)),
]; ];
} }
@ -70,7 +81,7 @@ impl fmt::Display for BuiltinType {
BuiltinType::Char => "char", BuiltinType::Char => "char",
BuiltinType::Bool => "bool", BuiltinType::Bool => "bool",
BuiltinType::Str => "str", BuiltinType::Str => "str",
BuiltinType::Int { signedness, bitness } => match (signedness, bitness) { BuiltinType::Int(BuiltinInt { signedness, bitness }) => match (signedness, bitness) {
(Signedness::Signed, IntBitness::Xsize) => "isize", (Signedness::Signed, IntBitness::Xsize) => "isize",
(Signedness::Signed, IntBitness::X8) => "i8", (Signedness::Signed, IntBitness::X8) => "i8",
(Signedness::Signed, IntBitness::X16) => "i16", (Signedness::Signed, IntBitness::X16) => "i16",
@ -85,7 +96,7 @@ impl fmt::Display for BuiltinType {
(Signedness::Unsigned, IntBitness::X64) => "u64", (Signedness::Unsigned, IntBitness::X64) => "u64",
(Signedness::Unsigned, IntBitness::X128) => "u128", (Signedness::Unsigned, IntBitness::X128) => "u128",
}, },
BuiltinType::Float { bitness } => match bitness { BuiltinType::Float(BuiltinFloat { bitness }) => match bitness {
FloatBitness::X32 => "f32", FloatBitness::X32 => "f32",
FloatBitness::X64 => "f64", FloatBitness::X64 => "f64",
}, },
@ -93,3 +104,57 @@ impl fmt::Display for BuiltinType {
f.write_str(type_name) f.write_str(type_name)
} }
} }
#[rustfmt::skip]
impl BuiltinInt {
pub const ISIZE: BuiltinInt = BuiltinInt { signedness: Signedness::Signed, bitness: IntBitness::Xsize };
pub const I8 : BuiltinInt = BuiltinInt { signedness: Signedness::Signed, bitness: IntBitness::X8 };
pub const I16 : BuiltinInt = BuiltinInt { signedness: Signedness::Signed, bitness: IntBitness::X16 };
pub const I32 : BuiltinInt = BuiltinInt { signedness: Signedness::Signed, bitness: IntBitness::X32 };
pub const I64 : BuiltinInt = BuiltinInt { signedness: Signedness::Signed, bitness: IntBitness::X64 };
pub const I128 : BuiltinInt = BuiltinInt { signedness: Signedness::Signed, bitness: IntBitness::X128 };
pub const USIZE: BuiltinInt = BuiltinInt { signedness: Signedness::Unsigned, bitness: IntBitness::Xsize };
pub const U8 : BuiltinInt = BuiltinInt { signedness: Signedness::Unsigned, bitness: IntBitness::X8 };
pub const U16 : BuiltinInt = BuiltinInt { signedness: Signedness::Unsigned, bitness: IntBitness::X16 };
pub const U32 : BuiltinInt = BuiltinInt { signedness: Signedness::Unsigned, bitness: IntBitness::X32 };
pub const U64 : BuiltinInt = BuiltinInt { signedness: Signedness::Unsigned, bitness: IntBitness::X64 };
pub const U128 : BuiltinInt = BuiltinInt { signedness: Signedness::Unsigned, bitness: IntBitness::X128 };
pub fn from_suffix(suffix: &str) -> Option<BuiltinInt> {
let res = match suffix {
"isize" => Self::ISIZE,
"i8" => Self::I8,
"i16" => Self::I16,
"i32" => Self::I32,
"i64" => Self::I64,
"i128" => Self::I128,
"usize" => Self::USIZE,
"u8" => Self::U8,
"u16" => Self::U16,
"u32" => Self::U32,
"u64" => Self::U64,
"u128" => Self::U128,
_ => return None,
};
Some(res)
}
}
#[rustfmt::skip]
impl BuiltinFloat {
pub const F32: BuiltinFloat = BuiltinFloat { bitness: FloatBitness::X32 };
pub const F64: BuiltinFloat = BuiltinFloat { bitness: FloatBitness::X64 };
pub fn from_suffix(suffix: &str) -> Option<BuiltinFloat> {
let res = match suffix {
"f32" => BuiltinFloat::F32,
"f64" => BuiltinFloat::F64,
_ => return None,
};
Some(res)
}
}

View file

@ -0,0 +1,421 @@
//! This module describes hir-level representation of expressions.
//!
//! This representaion is:
//!
//! 1. Identity-based. Each expression has an `id`, so we can distinguish
//! between different `1` in `1 + 1`.
//! 2. Independent of syntax. Though syntactic provenance information can be
//! attached separately via id-based side map.
//! 3. Unresolved. Paths are stored as sequences of names, and not as defs the
//! names refer to.
//! 4. Desugared. There's no `if let`.
//!
//! See also a neighboring `body` module.
use hir_expand::name::Name;
use ra_arena::{impl_arena_id, RawId};
use crate::{
builtin_type::{BuiltinFloat, BuiltinInt},
path::{GenericArgs, Path},
type_ref::{Mutability, TypeRef},
};
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct ExprId(RawId);
impl_arena_id!(ExprId);
impl ExprId {
pub fn dummy() -> ExprId {
ExprId((!0).into())
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct PatId(RawId);
impl_arena_id!(PatId);
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum Literal {
String(String),
ByteString(Vec<u8>),
Char(char),
Bool(bool),
Int(u64, Option<BuiltinInt>),
Float(u64, Option<BuiltinFloat>), // FIXME: f64 is not Eq
}
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum Expr {
/// This is produced if syntax tree does not have a required expression piece.
Missing,
Path(Path),
If {
condition: ExprId,
then_branch: ExprId,
else_branch: Option<ExprId>,
},
Block {
statements: Vec<Statement>,
tail: Option<ExprId>,
},
Loop {
body: ExprId,
},
While {
condition: ExprId,
body: ExprId,
},
For {
iterable: ExprId,
pat: PatId,
body: ExprId,
},
Call {
callee: ExprId,
args: Vec<ExprId>,
},
MethodCall {
receiver: ExprId,
method_name: Name,
args: Vec<ExprId>,
generic_args: Option<GenericArgs>,
},
Match {
expr: ExprId,
arms: Vec<MatchArm>,
},
Continue,
Break {
expr: Option<ExprId>,
},
Return {
expr: Option<ExprId>,
},
RecordLit {
path: Option<Path>,
fields: Vec<RecordLitField>,
spread: Option<ExprId>,
},
Field {
expr: ExprId,
name: Name,
},
Await {
expr: ExprId,
},
Try {
expr: ExprId,
},
TryBlock {
body: ExprId,
},
Cast {
expr: ExprId,
type_ref: TypeRef,
},
Ref {
expr: ExprId,
mutability: Mutability,
},
Box {
expr: ExprId,
},
UnaryOp {
expr: ExprId,
op: UnaryOp,
},
BinaryOp {
lhs: ExprId,
rhs: ExprId,
op: Option<BinaryOp>,
},
Index {
base: ExprId,
index: ExprId,
},
Lambda {
args: Vec<PatId>,
arg_types: Vec<Option<TypeRef>>,
body: ExprId,
},
Tuple {
exprs: Vec<ExprId>,
},
Array(Array),
Literal(Literal),
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub enum BinaryOp {
LogicOp(LogicOp),
ArithOp(ArithOp),
CmpOp(CmpOp),
Assignment { op: Option<ArithOp> },
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub enum LogicOp {
And,
Or,
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub enum CmpOp {
Eq { negated: bool },
Ord { ordering: Ordering, strict: bool },
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub enum Ordering {
Less,
Greater,
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub enum ArithOp {
Add,
Mul,
Sub,
Div,
Rem,
Shl,
Shr,
BitXor,
BitOr,
BitAnd,
}
pub use ra_syntax::ast::PrefixOp as UnaryOp;
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum Array {
ElementList(Vec<ExprId>),
Repeat { initializer: ExprId, repeat: ExprId },
}
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct MatchArm {
pub pats: Vec<PatId>,
pub guard: Option<ExprId>,
pub expr: ExprId,
}
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct RecordLitField {
pub name: Name,
pub expr: ExprId,
}
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum Statement {
Let { pat: PatId, type_ref: Option<TypeRef>, initializer: Option<ExprId> },
Expr(ExprId),
}
impl Expr {
pub fn walk_child_exprs(&self, mut f: impl FnMut(ExprId)) {
match self {
Expr::Missing => {}
Expr::Path(_) => {}
Expr::If { condition, then_branch, else_branch } => {
f(*condition);
f(*then_branch);
if let Some(else_branch) = else_branch {
f(*else_branch);
}
}
Expr::Block { statements, tail } => {
for stmt in statements {
match stmt {
Statement::Let { initializer, .. } => {
if let Some(expr) = initializer {
f(*expr);
}
}
Statement::Expr(e) => f(*e),
}
}
if let Some(expr) = tail {
f(*expr);
}
}
Expr::TryBlock { body } => f(*body),
Expr::Loop { body } => f(*body),
Expr::While { condition, body } => {
f(*condition);
f(*body);
}
Expr::For { iterable, body, .. } => {
f(*iterable);
f(*body);
}
Expr::Call { callee, args } => {
f(*callee);
for arg in args {
f(*arg);
}
}
Expr::MethodCall { receiver, args, .. } => {
f(*receiver);
for arg in args {
f(*arg);
}
}
Expr::Match { expr, arms } => {
f(*expr);
for arm in arms {
f(arm.expr);
}
}
Expr::Continue => {}
Expr::Break { expr } | Expr::Return { expr } => {
if let Some(expr) = expr {
f(*expr);
}
}
Expr::RecordLit { fields, spread, .. } => {
for field in fields {
f(field.expr);
}
if let Some(expr) = spread {
f(*expr);
}
}
Expr::Lambda { body, .. } => {
f(*body);
}
Expr::BinaryOp { lhs, rhs, .. } => {
f(*lhs);
f(*rhs);
}
Expr::Index { base, index } => {
f(*base);
f(*index);
}
Expr::Field { expr, .. }
| Expr::Await { expr }
| Expr::Try { expr }
| Expr::Cast { expr, .. }
| Expr::Ref { expr, .. }
| Expr::UnaryOp { expr, .. }
| Expr::Box { expr } => {
f(*expr);
}
Expr::Tuple { exprs } => {
for expr in exprs {
f(*expr);
}
}
Expr::Array(a) => match a {
Array::ElementList(exprs) => {
for expr in exprs {
f(*expr);
}
}
Array::Repeat { initializer, repeat } => {
f(*initializer);
f(*repeat)
}
},
Expr::Literal(_) => {}
}
}
}
/// Explicit binding annotations given in the HIR for a binding. Note
/// that this is not the final binding *mode* that we infer after type
/// inference.
#[derive(Clone, PartialEq, Eq, Debug, Copy)]
pub enum BindingAnnotation {
/// No binding annotation given: this means that the final binding mode
/// will depend on whether we have skipped through a `&` reference
/// when matching. For example, the `x` in `Some(x)` will have binding
/// mode `None`; if you do `let Some(x) = &Some(22)`, it will
/// ultimately be inferred to be by-reference.
Unannotated,
/// Annotated with `mut x` -- could be either ref or not, similar to `None`.
Mutable,
/// Annotated as `ref`, like `ref x`
Ref,
/// Annotated as `ref mut x`.
RefMut,
}
impl BindingAnnotation {
pub fn new(is_mutable: bool, is_ref: bool) -> Self {
match (is_mutable, is_ref) {
(true, true) => BindingAnnotation::RefMut,
(false, true) => BindingAnnotation::Ref,
(true, false) => BindingAnnotation::Mutable,
(false, false) => BindingAnnotation::Unannotated,
}
}
}
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct RecordFieldPat {
pub name: Name,
pub pat: PatId,
}
/// Close relative to rustc's hir::PatKind
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum Pat {
Missing,
Wild,
Tuple(Vec<PatId>),
Record {
path: Option<Path>,
args: Vec<RecordFieldPat>,
// FIXME: 'ellipsis' option
},
Range {
start: ExprId,
end: ExprId,
},
Slice {
prefix: Vec<PatId>,
rest: Option<PatId>,
suffix: Vec<PatId>,
},
Path(Path),
Lit(ExprId),
Bind {
mode: BindingAnnotation,
name: Name,
subpat: Option<PatId>,
},
TupleStruct {
path: Option<Path>,
args: Vec<PatId>,
},
Ref {
pat: PatId,
mutability: Mutability,
},
}
impl Pat {
pub fn walk_child_pats(&self, mut f: impl FnMut(PatId)) {
match self {
Pat::Range { .. } | Pat::Lit(..) | Pat::Path(..) | Pat::Wild | Pat::Missing => {}
Pat::Bind { subpat, .. } => {
subpat.iter().copied().for_each(f);
}
Pat::Tuple(args) | Pat::TupleStruct { args, .. } => {
args.iter().copied().for_each(f);
}
Pat::Ref { pat, .. } => f(*pat),
Pat::Slice { prefix, rest, suffix } => {
let total_iter = prefix.iter().chain(rest.iter()).chain(suffix.iter());
total_iter.copied().for_each(f);
}
Pat::Record { args, .. } => {
args.iter().map(|f| f.pat).for_each(f);
}
}
}
}

View file

@ -14,6 +14,8 @@ pub mod type_ref;
pub mod builtin_type; pub mod builtin_type;
pub mod adt; pub mod adt;
pub mod diagnostics; pub mod diagnostics;
pub mod expr;
pub mod body;
#[cfg(test)] #[cfg(test)]
mod test_db; mod test_db;

View file

@ -276,7 +276,7 @@ impl RootDatabase {
self.query(hir::db::ExprScopesQuery).sweep(sweep); self.query(hir::db::ExprScopesQuery).sweep(sweep);
self.query(hir::db::InferQuery).sweep(sweep); self.query(hir::db::InferQuery).sweep(sweep);
self.query(hir::db::BodyHirQuery).sweep(sweep); self.query(hir::db::BodyQuery).sweep(sweep);
} }
pub(crate) fn per_query_memory_usage(&mut self) -> Vec<(String, Bytes)> { pub(crate) fn per_query_memory_usage(&mut self) -> Vec<(String, Bytes)> {
@ -333,7 +333,7 @@ impl RootDatabase {
hir::db::GenericPredicatesQuery hir::db::GenericPredicatesQuery
hir::db::GenericDefaultsQuery hir::db::GenericDefaultsQuery
hir::db::BodyWithSourceMapQuery hir::db::BodyWithSourceMapQuery
hir::db::BodyHirQuery hir::db::BodyQuery
hir::db::ImplsInCrateQuery hir::db::ImplsInCrateQuery
hir::db::ImplsForTraitQuery hir::db::ImplsForTraitQuery
hir::db::AssociatedTyDataQuery hir::db::AssociatedTyDataQuery

View file

@ -1,6 +1,7 @@
//! FIXME: write short doc here //! FIXME: write short doc here
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::str::FromStr;
use cargo_metadata::{CargoOpt, MetadataCommand}; use cargo_metadata::{CargoOpt, MetadataCommand};
use ra_arena::{impl_arena_id, Arena, RawId}; use ra_arena::{impl_arena_id, Arena, RawId};
@ -140,18 +141,21 @@ impl CargoWorkspace {
let ws_members = &meta.workspace_members; let ws_members = &meta.workspace_members;
for meta_pkg in meta.packages { for meta_pkg in meta.packages {
let is_member = ws_members.contains(&meta_pkg.id); let cargo_metadata::Package { id, edition, name, manifest_path, .. } = meta_pkg;
let is_member = ws_members.contains(&id);
let edition = Edition::from_str(&edition)
.map_err(|e| (format!("metadata for package {} failed: {}", &name, e.msg)))?;
let pkg = packages.alloc(PackageData { let pkg = packages.alloc(PackageData {
name: meta_pkg.name, name,
manifest: meta_pkg.manifest_path.clone(), manifest: manifest_path,
targets: Vec::new(), targets: Vec::new(),
is_member, is_member,
edition: Edition::from_string(&meta_pkg.edition), edition,
dependencies: Vec::new(), dependencies: Vec::new(),
features: Vec::new(), features: Vec::new(),
}); });
let pkg_data = &mut packages[pkg]; let pkg_data = &mut packages[pkg];
pkg_by_id.insert(meta_pkg.id.clone(), pkg); pkg_by_id.insert(id, pkg);
for meta_tgt in meta_pkg.targets { for meta_tgt in meta_pkg.targets {
let tgt = targets.alloc(TargetData { let tgt = targets.alloc(TargetData {
pkg, pkg,

View file

@ -1,10 +1,6 @@
use std::fs; use std::{collections::HashMap, fs, io::prelude::*, io::BufReader, path::Path};
use std::io::prelude::*;
use std::io::BufReader;
use std::path::Path;
use walkdir::{DirEntry, WalkDir}; use walkdir::{DirEntry, WalkDir};
use xtask::project_root; use xtask::project_root;
fn is_exclude_dir(p: &Path) -> bool { fn is_exclude_dir(p: &Path) -> bool {
@ -37,6 +33,7 @@ fn no_docs_comments() {
let crates = project_root().join("crates"); let crates = project_root().join("crates");
let iter = WalkDir::new(crates); let iter = WalkDir::new(crates);
let mut missing_docs = Vec::new(); let mut missing_docs = Vec::new();
let mut contains_fixme = Vec::new();
for f in iter.into_iter().filter_entry(|e| !is_hidden(e)) { for f in iter.into_iter().filter_entry(|e| !is_hidden(e)) {
let f = f.unwrap(); let f = f.unwrap();
if f.file_type().is_dir() { if f.file_type().is_dir() {
@ -54,7 +51,12 @@ fn no_docs_comments() {
let mut reader = BufReader::new(fs::File::open(f.path()).unwrap()); let mut reader = BufReader::new(fs::File::open(f.path()).unwrap());
let mut line = String::new(); let mut line = String::new();
reader.read_line(&mut line).unwrap(); reader.read_line(&mut line).unwrap();
if !line.starts_with("//!") {
if line.starts_with("//!") {
if line.contains("FIXME") {
contains_fixme.push(f.path().to_path_buf())
}
} else {
missing_docs.push(f.path().display().to_string()); missing_docs.push(f.path().display().to_string());
} }
} }
@ -65,4 +67,39 @@ fn no_docs_comments() {
missing_docs.join("\n") missing_docs.join("\n")
) )
} }
let whitelist = [
"ra_batch",
"ra_cli",
"ra_db",
"ra_hir",
"ra_hir_expand",
"ra_hir_def",
"ra_ide_api",
"ra_lsp_server",
"ra_mbe",
"ra_parser",
"ra_prof",
"ra_project_model",
"ra_syntax",
"ra_text_edit",
"ra_tt",
];
let mut has_fixmes = whitelist.iter().map(|it| (*it, false)).collect::<HashMap<&str, bool>>();
'outer: for path in contains_fixme {
for krate in whitelist.iter() {
if path.components().any(|it| it.as_os_str() == *krate) {
has_fixmes.insert(krate, true);
continue 'outer;
}
}
panic!("FIXME doc in a fully-documented crate: {}", path.display())
}
for (krate, has_fixme) in has_fixmes.iter() {
if !has_fixme {
panic!("crate {} is fully documented, remove it from the white list", krate)
}
}
} }