MIR episode 5

This commit is contained in:
hkalbasi 2023-05-12 18:17:15 +03:30
parent 9b3387454d
commit cbcafd3539
36 changed files with 1532 additions and 362 deletions

View file

@ -165,7 +165,7 @@ impl Body {
};
let expander = Expander::new(db, file_id, module);
let (mut body, source_map) =
Body::new(db, expander, params, body, module.krate, is_async_fn);
Body::new(db, def, expander, params, body, module.krate, is_async_fn);
body.shrink_to_fit();
(Arc::new(body), Arc::new(source_map))
@ -189,13 +189,14 @@ impl Body {
fn new(
db: &dyn DefDatabase,
owner: DefWithBodyId,
expander: Expander,
params: Option<(ast::ParamList, impl Iterator<Item = bool>)>,
body: Option<ast::Expr>,
krate: CrateId,
is_async_fn: bool,
) -> (Body, BodySourceMap) {
lower::lower(db, expander, params, body, krate, is_async_fn)
lower::lower(db, owner, expander, params, body, krate, is_async_fn)
}
fn shrink_to_fit(&mut self) {

View file

@ -40,11 +40,12 @@ use crate::{
nameres::{DefMap, MacroSubNs},
path::{GenericArgs, Path},
type_ref::{Mutability, Rawness, TypeRef},
AdtId, BlockId, BlockLoc, ModuleDefId, UnresolvedMacro,
AdtId, BlockId, BlockLoc, DefWithBodyId, ModuleDefId, UnresolvedMacro,
};
pub(super) fn lower(
db: &dyn DefDatabase,
owner: DefWithBodyId,
expander: Expander,
params: Option<(ast::ParamList, impl Iterator<Item = bool>)>,
body: Option<ast::Expr>,
@ -53,6 +54,7 @@ pub(super) fn lower(
) -> (Body, BodySourceMap) {
ExprCollector {
db,
owner,
krate,
def_map: expander.module.def_map(db),
source_map: BodySourceMap::default(),
@ -80,6 +82,7 @@ pub(super) fn lower(
struct ExprCollector<'a> {
db: &'a dyn DefDatabase,
expander: Expander,
owner: DefWithBodyId,
def_map: Arc<DefMap>,
ast_id_map: Arc<AstIdMap>,
krate: CrateId,
@ -269,16 +272,13 @@ impl ExprCollector<'_> {
}
Some(ast::BlockModifier::Const(_)) => {
self.with_label_rib(RibKind::Constant, |this| {
this.collect_as_a_binding_owner_bad(
|this| {
this.collect_block_(e, |id, statements, tail| Expr::Const {
id,
statements,
tail,
})
},
syntax_ptr,
)
let (result_expr_id, prev_binding_owner) =
this.initialize_binding_owner(syntax_ptr);
let inner_expr = this.collect_block(e);
let x = this.db.intern_anonymous_const((this.owner, inner_expr));
this.body.exprs[result_expr_id] = Expr::Const(x);
this.current_binding_owner = prev_binding_owner;
result_expr_id
})
}
None => self.collect_block(e),

View file

@ -436,8 +436,8 @@ impl<'a> Printer<'a> {
Expr::Async { id: _, statements, tail } => {
self.print_block(Some("async "), statements, tail);
}
Expr::Const { id: _, statements, tail } => {
self.print_block(Some("const "), statements, tail);
Expr::Const(id) => {
w!(self, "const {{ /* {id:?} */ }}");
}
}
}

View file

@ -218,9 +218,10 @@ fn compute_expr_scopes(expr: ExprId, body: &Body, scopes: &mut ExprScopes, scope
scopes.set_scope(expr, scope);
compute_block_scopes(statements, *tail, body, scopes, &mut scope);
}
Expr::Unsafe { id, statements, tail }
| Expr::Async { id, statements, tail }
| Expr::Const { id, statements, tail } => {
Expr::Const(_) => {
// FIXME: This is broken.
}
Expr::Unsafe { id, statements, tail } | Expr::Async { id, statements, tail } => {
let mut scope = scopes.new_block_scope(*scope, *id, None);
// Overwrite the old scope for the block expr, so that every block scope can be found
// via the block itself (important for blocks that only contain items, no expressions).

View file

@ -16,16 +16,17 @@ use crate::{
TraitAliasData, TraitData, TypeAliasData,
},
generics::GenericParams,
hir::ExprId,
import_map::ImportMap,
item_tree::{AttrOwner, ItemTree},
lang_item::{LangItem, LangItemTarget, LangItems},
nameres::{diagnostics::DefDiagnostic, DefMap},
visibility::{self, Visibility},
AttrDefId, BlockId, BlockLoc, ConstId, ConstLoc, DefWithBodyId, EnumId, EnumLoc, ExternBlockId,
ExternBlockLoc, FunctionId, FunctionLoc, GenericDefId, ImplId, ImplLoc, LocalEnumVariantId,
LocalFieldId, Macro2Id, Macro2Loc, MacroRulesId, MacroRulesLoc, ProcMacroId, ProcMacroLoc,
StaticId, StaticLoc, StructId, StructLoc, TraitAliasId, TraitAliasLoc, TraitId, TraitLoc,
TypeAliasId, TypeAliasLoc, UnionId, UnionLoc, VariantId,
AnonymousConstId, AttrDefId, BlockId, BlockLoc, ConstId, ConstLoc, DefWithBodyId, EnumId,
EnumLoc, ExternBlockId, ExternBlockLoc, FunctionId, FunctionLoc, GenericDefId, ImplId, ImplLoc,
LocalEnumVariantId, LocalFieldId, Macro2Id, Macro2Loc, MacroRulesId, MacroRulesLoc,
ProcMacroId, ProcMacroLoc, StaticId, StaticLoc, StructId, StructLoc, TraitAliasId,
TraitAliasLoc, TraitId, TraitLoc, TypeAliasId, TypeAliasLoc, UnionId, UnionLoc, VariantId,
};
#[salsa::query_group(InternDatabaseStorage)]
@ -60,6 +61,8 @@ pub trait InternDatabase: SourceDatabase {
fn intern_proc_macro(&self, loc: ProcMacroLoc) -> ProcMacroId;
#[salsa::interned]
fn intern_macro_rules(&self, loc: MacroRulesLoc) -> MacroRulesId;
#[salsa::interned]
fn intern_anonymous_const(&self, id: (DefWithBodyId, ExprId)) -> AnonymousConstId;
}
#[salsa::query_group(DefDatabaseStorage)]

View file

@ -26,7 +26,7 @@ use crate::{
builtin_type::{BuiltinFloat, BuiltinInt, BuiltinUint},
path::{GenericArgs, Path},
type_ref::{Mutability, Rawness, TypeRef},
BlockId,
AnonymousConstId, BlockId,
};
pub use syntax::ast::{ArithOp, BinaryOp, CmpOp, LogicOp, Ordering, RangeOp, UnaryOp};
@ -169,11 +169,7 @@ pub enum Expr {
statements: Box<[Statement]>,
tail: Option<ExprId>,
},
Const {
id: Option<BlockId>,
statements: Box<[Statement]>,
tail: Option<ExprId>,
},
Const(AnonymousConstId),
Unsafe {
id: Option<BlockId>,
statements: Box<[Statement]>,
@ -355,10 +351,10 @@ impl Expr {
Expr::Let { expr, .. } => {
f(*expr);
}
Expr::Const(_) => (),
Expr::Block { statements, tail, .. }
| Expr::Unsafe { statements, tail, .. }
| Expr::Async { statements, tail, .. }
| Expr::Const { statements, tail, .. } => {
| Expr::Async { statements, tail, .. } => {
for stmt in statements.iter() {
match stmt {
Statement::Let { initializer, else_branch, .. } => {

View file

@ -59,7 +59,11 @@ mod pretty;
use std::hash::{Hash, Hasher};
use base_db::{impl_intern_key, salsa, CrateId, ProcMacroKind};
use base_db::{
impl_intern_key,
salsa::{self, InternId},
CrateId, ProcMacroKind,
};
use hir_expand::{
ast_id_map::FileAstId,
attrs::{Attr, AttrId, AttrInput},
@ -472,6 +476,46 @@ impl_from!(
for ModuleDefId
);
// FIXME: make this a DefWithBodyId
#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)]
pub struct AnonymousConstId(InternId);
impl_intern_key!(AnonymousConstId);
/// A constant, which might appears as a const item, an annonymous const block in expressions
/// or patterns, or as a constant in types with const generics.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum GeneralConstId {
ConstId(ConstId),
AnonymousConstId(AnonymousConstId),
}
impl_from!(ConstId, AnonymousConstId for GeneralConstId);
impl GeneralConstId {
pub fn generic_def(self, db: &dyn db::DefDatabase) -> Option<GenericDefId> {
match self {
GeneralConstId::ConstId(x) => Some(x.into()),
GeneralConstId::AnonymousConstId(x) => {
let (parent, _) = db.lookup_intern_anonymous_const(x);
parent.as_generic_def_id()
}
}
}
pub fn name(self, db: &dyn db::DefDatabase) -> String {
match self {
GeneralConstId::ConstId(const_id) => db
.const_data(const_id)
.name
.as_ref()
.and_then(|x| x.as_str())
.unwrap_or("_")
.to_owned(),
GeneralConstId::AnonymousConstId(id) => format!("{{anonymous const {id:?}}}"),
}
}
}
/// The defs which have a body.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum DefWithBodyId {

View file

@ -12,6 +12,9 @@ use crate::{
};
pub(crate) fn print_path(path: &Path, buf: &mut dyn Write) -> fmt::Result {
if let Path::LangItem(x) = path {
return write!(buf, "$lang_item::{x:?}");
}
match path.type_anchor() {
Some(anchor) => {
write!(buf, "<")?;

View file

@ -376,6 +376,7 @@ pub mod known {
deref,
div_assign,
div,
drop,
fn_mut,
fn_once,
future_trait,

View file

@ -34,6 +34,7 @@ pub trait TyExt {
fn as_closure(&self) -> Option<ClosureId>;
fn as_fn_def(&self, db: &dyn HirDatabase) -> Option<FunctionId>;
fn as_reference(&self) -> Option<(&Ty, Lifetime, Mutability)>;
fn as_raw_ptr(&self) -> Option<(&Ty, Mutability)>;
fn as_reference_or_ptr(&self) -> Option<(&Ty, Rawness, Mutability)>;
fn as_generic_def(&self, db: &dyn HirDatabase) -> Option<GenericDefId>;
@ -146,6 +147,7 @@ impl TyExt for Ty {
Some(CallableDefId::StructId(_) | CallableDefId::EnumVariantId(_)) | None => None,
}
}
fn as_reference(&self) -> Option<(&Ty, Lifetime, Mutability)> {
match self.kind(Interner) {
TyKind::Ref(mutability, lifetime, ty) => Some((ty, lifetime.clone(), *mutability)),
@ -153,6 +155,13 @@ impl TyExt for Ty {
}
}
fn as_raw_ptr(&self) -> Option<(&Ty, Mutability)> {
match self.kind(Interner) {
TyKind::Raw(mutability, ty) => Some((ty, *mutability)),
_ => None,
}
}
fn as_reference_or_ptr(&self) -> Option<(&Ty, Rawness, Mutability)> {
match self.kind(Interner) {
TyKind::Ref(mutability, _, ty) => Some((ty, Rawness::Ref, *mutability)),

View file

@ -7,10 +7,11 @@ use hir_def::{
path::Path,
resolver::{Resolver, ValueNs},
type_ref::ConstRef,
DefWithBodyId, EnumVariantId,
EnumVariantId, GeneralConstId, StaticId,
};
use la_arena::{Idx, RawIdx};
use stdx::never;
use triomphe::Arc;
use crate::{
db::HirDatabase, infer::InferenceContext, layout::layout_of_ty, lower::ParamLoweringMode,
@ -158,13 +159,17 @@ pub fn usize_const(db: &dyn HirDatabase, value: Option<u128>, krate: CrateId) ->
)
}
pub fn try_const_usize(c: &Const) -> Option<u128> {
pub fn try_const_usize(db: &dyn HirDatabase, c: &Const) -> Option<u128> {
match &c.data(Interner).value {
chalk_ir::ConstValue::BoundVar(_) => None,
chalk_ir::ConstValue::InferenceVar(_) => None,
chalk_ir::ConstValue::Placeholder(_) => None,
chalk_ir::ConstValue::Concrete(c) => match &c.interned {
ConstScalar::Bytes(x, _) => Some(u128::from_le_bytes(pad16(&x, false))),
ConstScalar::UnevaluatedConst(c, subst) => {
let ec = db.const_eval(*c, subst.clone()).ok()?;
try_const_usize(db, &ec)
}
_ => None,
},
}
@ -173,12 +178,20 @@ pub fn try_const_usize(c: &Const) -> Option<u128> {
pub(crate) fn const_eval_recover(
_: &dyn HirDatabase,
_: &[String],
_: &DefWithBodyId,
_: &GeneralConstId,
_: &Substitution,
) -> Result<Const, ConstEvalError> {
Err(ConstEvalError::MirLowerError(MirLowerError::Loop))
}
pub(crate) fn const_eval_static_recover(
_: &dyn HirDatabase,
_: &[String],
_: &StaticId,
) -> Result<Const, ConstEvalError> {
Err(ConstEvalError::MirLowerError(MirLowerError::Loop))
}
pub(crate) fn const_eval_discriminant_recover(
_: &dyn HirDatabase,
_: &[String],
@ -189,11 +202,28 @@ pub(crate) fn const_eval_discriminant_recover(
pub(crate) fn const_eval_query(
db: &dyn HirDatabase,
def: DefWithBodyId,
def: GeneralConstId,
subst: Substitution,
) -> Result<Const, ConstEvalError> {
let body = db.mir_body(def)?;
let c = interpret_mir(db, &body, subst, false)?;
let body = match def {
GeneralConstId::ConstId(c) => db.mir_body(c.into())?,
GeneralConstId::AnonymousConstId(c) => {
let (def, root) = db.lookup_intern_anonymous_const(c);
let body = db.body(def);
let infer = db.infer(def);
Arc::new(lower_to_mir(db, def, &body, &infer, root)?)
}
};
let c = interpret_mir(db, &body, subst, false).0?;
Ok(c)
}
pub(crate) fn const_eval_static_query(
db: &dyn HirDatabase,
def: StaticId,
) -> Result<Const, ConstEvalError> {
let body = db.mir_body(def.into())?;
let c = interpret_mir(db, &body, Substitution::empty(Interner), false).0?;
Ok(c)
}
@ -216,8 +246,8 @@ pub(crate) fn const_eval_discriminant_variant(
return Ok(value);
}
let mir_body = db.mir_body(def)?;
let c = interpret_mir(db, &mir_body, Substitution::empty(Interner), false)?;
let c = try_const_usize(&c).unwrap() as i128;
let c = interpret_mir(db, &mir_body, Substitution::empty(Interner), false).0?;
let c = try_const_usize(db, &c).unwrap() as i128;
Ok(c)
}
@ -241,7 +271,7 @@ pub(crate) fn eval_to_const(
}
let infer = ctx.clone().resolve_all();
if let Ok(mir_body) = lower_to_mir(ctx.db, ctx.owner, &ctx.body, &infer, expr) {
if let Ok(result) = interpret_mir(db, &mir_body, Substitution::empty(Interner), true) {
if let Ok(result) = interpret_mir(db, &mir_body, Substitution::empty(Interner), true).0 {
return result;
}
}

View file

@ -158,6 +158,22 @@ fn casts() {
);
}
#[test]
fn raw_pointer_equality() {
check_number(
r#"
//- minicore: copy, eq
const GOAL: bool = {
let a = 2;
let p1 = a as *const i32;
let p2 = a as *const i32;
p1 == p2
};
"#,
1,
);
}
#[test]
fn locals() {
check_number(
@ -975,6 +991,22 @@ fn pattern_matching_literal() {
);
}
#[test]
fn pattern_matching_slice() {
check_number(
r#"
//- minicore: slice, index, coerce_unsized, copy
const fn f(x: &[usize]) -> usize {
match x {
[a, b @ .., c, d] => *a + b.len() + *c + *d,
}
}
const GOAL: usize = f(&[10, 20, 3, 15, 1000, 60, 16]);
"#,
10 + 4 + 60 + 16,
);
}
#[test]
fn pattern_matching_ergonomics() {
check_number(
@ -1808,6 +1840,28 @@ fn array_and_index() {
const GOAL: usize = [1, 2, 3, 4, 5].len();"#,
5,
);
check_number(
r#"
//- minicore: coerce_unsized, index, slice
const GOAL: [u16; 5] = [1, 2, 3, 4, 5];"#,
1 + (2 << 16) + (3 << 32) + (4 << 48) + (5 << 64),
);
check_number(
r#"
//- minicore: coerce_unsized, index, slice
const GOAL: [u16; 5] = [12; 5];"#,
12 + (12 << 16) + (12 << 32) + (12 << 48) + (12 << 64),
);
check_number(
r#"
//- minicore: coerce_unsized, index, slice
const LEN: usize = 4;
const GOAL: u16 = {
let x = [7; LEN];
x[2]
}"#,
7,
);
}
#[test]
@ -1903,7 +1957,7 @@ fn enums() {
"#,
);
let r = eval_goal(&db, file_id).unwrap();
assert_eq!(try_const_usize(&r), Some(1));
assert_eq!(try_const_usize(&db, &r), Some(1));
}
#[test]
@ -1931,6 +1985,29 @@ fn const_transfer_memory() {
);
}
#[test]
fn anonymous_const_block() {
check_number(
r#"
extern "rust-intrinsic" {
pub fn size_of<T>() -> usize;
}
const fn f<T>() -> usize {
let r = const { size_of::<T>() };
r
}
const GOAL: usize = {
let x = const { 2 + const { 3 } };
let y = f::<i32>();
x + y
};
"#,
9,
);
}
#[test]
fn const_impl_assoc() {
check_number(
@ -1939,9 +2016,9 @@ fn const_impl_assoc() {
impl U5 {
const VAL: usize = 5;
}
const GOAL: usize = U5::VAL;
const GOAL: usize = U5::VAL + <U5>::VAL;
"#,
5,
10,
);
}
@ -1972,6 +2049,35 @@ fn const_generic_subst_assoc_const_impl() {
);
}
#[test]
fn associated_types() {
check_number(
r#"
trait Tr {
type Item;
fn get_item(&self) -> Self::Item;
}
struct X(i32);
struct Y(i32);
impl Tr for X {
type Item = Y;
fn get_item(&self) -> Self::Item {
Y(self.0 + 2)
}
}
fn my_get_item<T: Tr>(x: T) -> <T as Tr>::Item {
x.get_item()
}
const GOAL: i32 = my_get_item(X(3)).0;
"#,
5,
);
}
#[test]
fn const_trait_assoc() {
check_number(

View file

@ -289,3 +289,31 @@ fn copy() {
19,
);
}
#[test]
fn ctpop() {
check_number(
r#"
extern "rust-intrinsic" {
pub fn ctpop<T: Copy>(x: T) -> T;
}
const GOAL: i64 = ctpop(-29);
"#,
61,
);
}
#[test]
fn cttz() {
check_number(
r#"
extern "rust-intrinsic" {
pub fn cttz<T: Copy>(x: T) -> T;
}
const GOAL: i64 = cttz(-24);
"#,
3,
);
}

View file

@ -6,8 +6,8 @@ use std::sync;
use base_db::{impl_intern_key, salsa, CrateId, Upcast};
use hir_def::{
db::DefDatabase, hir::ExprId, layout::TargetDataLayout, AdtId, BlockId, ConstParamId,
DefWithBodyId, EnumVariantId, FunctionId, GenericDefId, ImplId, LifetimeParamId, LocalFieldId,
TypeOrConstParamId, VariantId,
DefWithBodyId, EnumVariantId, FunctionId, GeneralConstId, GenericDefId, ImplId,
LifetimeParamId, LocalFieldId, StaticId, TypeOrConstParamId, VariantId,
};
use la_arena::ArenaMap;
use smallvec::SmallVec;
@ -60,7 +60,12 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
#[salsa::invoke(crate::consteval::const_eval_query)]
#[salsa::cycle(crate::consteval::const_eval_recover)]
fn const_eval(&self, def: DefWithBodyId, subst: Substitution) -> Result<Const, ConstEvalError>;
fn const_eval(&self, def: GeneralConstId, subst: Substitution)
-> Result<Const, ConstEvalError>;
#[salsa::invoke(crate::consteval::const_eval_static_query)]
#[salsa::cycle(crate::consteval::const_eval_static_recover)]
fn const_eval_static(&self, def: StaticId) -> Result<Const, ConstEvalError>;
#[salsa::invoke(crate::consteval::const_eval_discriminant_variant)]
#[salsa::cycle(crate::consteval::const_eval_discriminant_recover)]

View file

@ -420,13 +420,8 @@ impl HirDisplay for Const {
ConstValue::Concrete(c) => match &c.interned {
ConstScalar::Bytes(b, m) => render_const_scalar(f, &b, m, &data.ty),
ConstScalar::UnevaluatedConst(c, parameters) => {
let const_data = f.db.const_data(*c);
write!(
f,
"{}",
const_data.name.as_ref().and_then(|x| x.as_str()).unwrap_or("_")
)?;
hir_fmt_generics(f, parameters, Some((*c).into()))?;
write!(f, "{}", c.name(f.db.upcast()))?;
hir_fmt_generics(f, parameters, c.generic_def(f.db.upcast()))?;
Ok(())
}
ConstScalar::Unknown => f.write_char('_'),

View file

@ -123,9 +123,14 @@ impl HirPlace {
fn ty(&self, ctx: &mut InferenceContext<'_>) -> Ty {
let mut ty = ctx.table.resolve_completely(ctx.result[self.local].clone());
for p in &self.projections {
ty = p.projected_ty(ty, ctx.db, |_, _, _| {
unreachable!("Closure field only happens in MIR");
});
ty = p.projected_ty(
ty,
ctx.db,
|_, _, _| {
unreachable!("Closure field only happens in MIR");
},
ctx.owner.module(ctx.db.upcast()).krate(),
);
}
ty.clone()
}
@ -447,7 +452,6 @@ impl InferenceContext<'_> {
}
}
Expr::Async { statements, tail, .. }
| Expr::Const { statements, tail, .. }
| Expr::Unsafe { statements, tail, .. }
| Expr::Block { statements, tail, .. } => {
for s in statements.iter() {
@ -605,6 +609,7 @@ impl InferenceContext<'_> {
| Expr::Continue { .. }
| Expr::Path(_)
| Expr::Literal(_)
| Expr::Const(_)
| Expr::Underscore => (),
}
}

View file

@ -6,7 +6,7 @@ use std::{
};
use chalk_ir::{
cast::Cast, fold::Shift, DebruijnIndex, GenericArgData, Mutability, TyKind, TyVariableKind,
cast::Cast, fold::Shift, DebruijnIndex, GenericArgData, Mutability, TyVariableKind,
};
use hir_def::{
generics::TypeOrConstParamData,
@ -39,7 +39,7 @@ use crate::{
traits::FnTrait,
utils::{generics, Generics},
Adjust, Adjustment, AdtId, AutoBorrow, Binders, CallableDefId, FnPointer, FnSig, FnSubst,
Interner, Rawness, Scalar, Substitution, TraitRef, Ty, TyBuilder, TyExt,
Interner, Rawness, Scalar, Substitution, TraitRef, Ty, TyBuilder, TyExt, TyKind,
};
use super::{
@ -164,9 +164,10 @@ impl<'a> InferenceContext<'a> {
Expr::Unsafe { id, statements, tail } => {
self.infer_block(tgt_expr, *id, statements, *tail, None, expected)
}
Expr::Const { id, statements, tail } => {
Expr::Const(id) => {
self.with_breakable_ctx(BreakableKind::Border, None, None, |this| {
this.infer_block(tgt_expr, *id, statements, *tail, None, expected)
let (_, expr) = this.db.lookup_intern_anonymous_const(*id);
this.infer_expr(expr, expected)
})
.1
}

View file

@ -42,10 +42,13 @@ impl<'a> InferenceContext<'a> {
self.infer_mut_expr(else_branch, Mutability::Not);
}
}
Expr::Const(id) => {
let (_, expr) = self.db.lookup_intern_anonymous_const(*id);
self.infer_mut_expr(expr, Mutability::Not);
}
Expr::Let { pat, expr } => self.infer_mut_expr(*expr, self.pat_bound_mutability(*pat)),
Expr::Block { id: _, statements, tail, label: _ }
| Expr::Async { id: _, statements, tail }
| Expr::Const { id: _, statements, tail }
| Expr::Unsafe { id: _, statements, tail } => {
for st in statements.iter() {
match st {

View file

@ -379,7 +379,7 @@ impl<'a> InferenceContext<'a> {
if let &Some(slice_pat_id) = slice {
let rest_pat_ty = match expected.kind(Interner) {
TyKind::Array(_, length) => {
let len = try_const_usize(length);
let len = try_const_usize(self.db, length);
let len =
len.and_then(|len| len.checked_sub((prefix.len() + suffix.len()) as u128));
TyKind::Array(elem_ty.clone(), usize_const(self.db, len, self.resolver.krate()))

View file

@ -82,7 +82,7 @@ impl TypeVisitor<Interner> for UninhabitedFrom<'_> {
TyKind::Adt(adt, subst) => self.visit_adt(adt.0, subst),
TyKind::Never => BREAK_VISIBLY_UNINHABITED,
TyKind::Tuple(..) => ty.super_visit_with(self, outer_binder),
TyKind::Array(item_ty, len) => match try_const_usize(len) {
TyKind::Array(item_ty, len) => match try_const_usize(self.db, len) {
Some(0) | None => CONTINUE_OPAQUELY_INHABITED,
Some(1..) => item_ty.super_visit_with(self, outer_binder),
},

View file

@ -148,7 +148,7 @@ pub fn layout_of_ty(db: &dyn HirDatabase, ty: &Ty, krate: CrateId) -> Result<Lay
cx.univariant(dl, &fields, &ReprOptions::default(), kind).ok_or(LayoutError::Unknown)?
}
TyKind::Array(element, count) => {
let count = try_const_usize(&count).ok_or(LayoutError::UserError(
let count = try_const_usize(db, &count).ok_or(LayoutError::UserError(
"unevaluated or mistyped const generic parameter".to_string(),
))? as u64;
let element = layout_of_ty(db, element, krate)?;

View file

@ -230,7 +230,8 @@ fn associated_types() {
}
struct Foo<A: Tr>(<A as Tr>::Ty);
struct Goal(Foo<i32>);
struct Bar<A: Tr>(A::Ty);
struct Goal(Foo<i32>, Bar<i32>, <i32 as Tr>::Ty);
}
}

View file

@ -44,7 +44,7 @@ use chalk_ir::{
NoSolution, TyData,
};
use either::Either;
use hir_def::{hir::ExprId, type_ref::Rawness, ConstId, TypeOrConstParamId};
use hir_def::{hir::ExprId, type_ref::Rawness, GeneralConstId, TypeOrConstParamId};
use hir_expand::name;
use la_arena::{Arena, Idx};
use mir::{MirEvalError, VTableMap};
@ -180,7 +180,7 @@ pub enum ConstScalar {
Bytes(Vec<u8>, MemoryMap),
// FIXME: this is a hack to get around chalk not being able to represent unevaluatable
// constants
UnevaluatedConst(ConstId, Substitution),
UnevaluatedConst(GeneralConstId, Substitution),
/// Case of an unknown value that rustc might know but we don't
// FIXME: this is a hack to get around chalk not being able to represent unevaluatable
// constants

View file

@ -699,7 +699,7 @@ pub fn lookup_impl_method(
};
let name = &db.function_data(func).name;
lookup_impl_assoc_item_for_trait_ref(trait_ref, db, env, name)
let Some((impl_fn, impl_subst)) = lookup_impl_assoc_item_for_trait_ref(trait_ref, db, env, name)
.and_then(|assoc| {
if let (AssocItemId::FunctionId(id), subst) = assoc {
Some((id, subst))
@ -707,7 +707,16 @@ pub fn lookup_impl_method(
None
}
})
.unwrap_or((func, fn_subst))
else {
return (func, fn_subst);
};
(
impl_fn,
Substitution::from_iter(
Interner,
fn_subst.iter(Interner).take(fn_params).chain(impl_subst.iter(Interner)),
),
)
}
fn lookup_impl_assoc_item_for_trait_ref(

View file

@ -3,10 +3,11 @@
use std::{fmt::Display, iter};
use crate::{
db::HirDatabase, display::HirDisplay, infer::PointerCast, lang_items::is_box, mapping::ToChalk,
CallableDefId, ClosureId, Const, ConstScalar, InferenceResult, Interner, MemoryMap,
Substitution, Ty, TyKind,
consteval::usize_const, db::HirDatabase, display::HirDisplay, infer::PointerCast,
lang_items::is_box, mapping::ToChalk, CallableDefId, ClosureId, Const, ConstScalar,
InferenceResult, Interner, MemoryMap, Substitution, Ty, TyKind,
};
use base_db::CrateId;
use chalk_ir::Mutability;
use hir_def::{
hir::{BindingId, Expr, ExprId, Ordering, PatId},
@ -114,8 +115,8 @@ pub enum ProjectionElem<V, T> {
// FIXME: get rid of this, and use FieldId for tuples and closures
TupleOrClosureField(usize),
Index(V),
ConstantIndex { offset: u64, min_length: u64, from_end: bool },
Subslice { from: u64, to: u64, from_end: bool },
ConstantIndex { offset: u64, from_end: bool },
Subslice { from: u64, to: u64 },
//Downcast(Option<Symbol>, VariantIdx),
OpaqueCast(T),
}
@ -126,6 +127,7 @@ impl<V, T> ProjectionElem<V, T> {
base: Ty,
db: &dyn HirDatabase,
closure_field: impl FnOnce(ClosureId, &Substitution, usize) -> Ty,
krate: CrateId,
) -> Ty {
match self {
ProjectionElem::Deref => match &base.data(Interner).kind {
@ -163,16 +165,34 @@ impl<V, T> ProjectionElem<V, T> {
return TyKind::Error.intern(Interner);
}
},
ProjectionElem::Index(_) => match &base.data(Interner).kind {
TyKind::Array(inner, _) | TyKind::Slice(inner) => inner.clone(),
ProjectionElem::ConstantIndex { .. } | ProjectionElem::Index(_) => {
match &base.data(Interner).kind {
TyKind::Array(inner, _) | TyKind::Slice(inner) => inner.clone(),
_ => {
never!("Overloaded index is not a projection");
return TyKind::Error.intern(Interner);
}
}
}
&ProjectionElem::Subslice { from, to } => match &base.data(Interner).kind {
TyKind::Array(inner, c) => {
let next_c = usize_const(
db,
match try_const_usize(db, c) {
None => None,
Some(x) => x.checked_sub(u128::from(from + to)),
},
krate,
);
TyKind::Array(inner.clone(), next_c).intern(Interner)
}
TyKind::Slice(_) => base.clone(),
_ => {
never!("Overloaded index is not a projection");
never!("Subslice projection should only happen on slice and array");
return TyKind::Error.intern(Interner);
}
},
ProjectionElem::ConstantIndex { .. }
| ProjectionElem::Subslice { .. }
| ProjectionElem::OpaqueCast(_) => {
ProjectionElem::OpaqueCast(_) => {
never!("We don't emit these yet");
return TyKind::Error.intern(Interner);
}
@ -182,12 +202,24 @@ impl<V, T> ProjectionElem<V, T> {
type PlaceElem = ProjectionElem<LocalId, Ty>;
#[derive(Debug, Clone, PartialEq, Eq)]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Place {
pub local: LocalId,
pub projection: Vec<PlaceElem>,
}
impl Place {
fn is_parent(&self, child: &Place) -> bool {
self.local == child.local && child.projection.starts_with(&self.projection)
}
fn iterate_over_parents(&self) -> impl Iterator<Item = Place> + '_ {
(0..self.projection.len())
.map(|x| &self.projection[0..x])
.map(|x| Place { local: self.local, projection: x.to_vec() })
}
}
impl From<LocalId> for Place {
fn from(local: LocalId) -> Self {
Self { local, projection: vec![] }
@ -941,7 +973,6 @@ pub struct MirBody {
pub locals: Arena<Local>,
pub start_block: BasicBlockId,
pub owner: DefWithBodyId,
pub arg_count: usize,
pub binding_locals: ArenaMap<BindingId, LocalId>,
pub param_locals: Vec<LocalId>,
/// This field stores the closures directly owned by this body. It is used
@ -1029,10 +1060,6 @@ impl MirBody {
}
}
fn const_as_usize(c: &Const) -> usize {
try_const_usize(c).unwrap() as usize
}
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub enum MirSpan {
ExprId(ExprId),

View file

@ -155,8 +155,10 @@ fn ever_initialized_map(body: &MirBody) -> ArenaMap<BasicBlockId, ArenaMap<Local
}
target.into_iter().chain(cleanup.into_iter()).copied().collect()
}
TerminatorKind::Drop { .. }
| TerminatorKind::DropAndReplace { .. }
TerminatorKind::Drop { target, unwind, place: _ } => {
Some(target).into_iter().chain(unwind.into_iter()).copied().collect()
}
TerminatorKind::DropAndReplace { .. }
| TerminatorKind::Assert { .. }
| TerminatorKind::Yield { .. }
| TerminatorKind::GeneratorDrop

View file

@ -5,25 +5,26 @@ use std::{borrow::Cow, collections::HashMap, fmt::Write, iter, ops::Range};
use base_db::{CrateId, FileId};
use chalk_ir::{
fold::{FallibleTypeFolder, TypeFoldable, TypeSuperFoldable},
DebruijnIndex, Mutability,
DebruijnIndex, Mutability, ProjectionTy,
};
use either::Either;
use hir_def::{
builtin_type::BuiltinType,
data::adt::{StructFlags, VariantData},
lang_item::{lang_attr, LangItem},
layout::{TagEncoding, Variants},
AdtId, DefWithBodyId, EnumVariantId, FunctionId, HasModule, ItemContainerId, Lookup, StaticId,
TypeOrConstParamId, VariantId,
AdtId, DefWithBodyId, EnumVariantId, FunctionId, GeneralConstId, HasModule, ItemContainerId,
Lookup, StaticId, TypeOrConstParamId, VariantId,
};
use hir_expand::{name::Name, InFile};
use intern::Interned;
use la_arena::ArenaMap;
use rustc_hash::FxHashMap;
use rustc_hash::{FxHashMap, FxHashSet};
use syntax::{SyntaxNodePtr, TextRange};
use triomphe::Arc;
use crate::{
consteval::{intern_const_scalar, ConstEvalError},
consteval::{intern_const_scalar, try_const_usize, ConstEvalError},
db::HirDatabase,
display::{ClosureStyle, HirDisplay},
from_placeholder_idx,
@ -31,7 +32,7 @@ use crate::{
layout::{layout_of_ty, Layout, LayoutError, RustcEnumVariantIdx},
mapping::from_chalk,
method_resolution::{is_dyn_method, lookup_impl_const, lookup_impl_method},
static_lifetime,
name, static_lifetime,
traits::FnTrait,
utils::{generics, ClosureSubst, Generics},
CallableDefId, ClosureId, Const, ConstScalar, FnDefId, GenericArgData, Interner, MemoryMap,
@ -39,11 +40,13 @@ use crate::{
};
use super::{
const_as_usize, return_slot, AggregateKind, BinOp, CastKind, LocalId, MirBody, MirLowerError,
MirSpan, Operand, Place, ProjectionElem, Rvalue, StatementKind, TerminatorKind, UnOp,
return_slot, AggregateKind, BinOp, CastKind, LocalId, MirBody, MirLowerError, MirSpan, Operand,
Place, ProjectionElem, Rvalue, StatementKind, TerminatorKind, UnOp,
};
mod shim;
#[cfg(test)]
mod tests;
macro_rules! from_bytes {
($ty:tt, $value:expr) => {
@ -87,6 +90,33 @@ impl VTableMap {
}
}
#[derive(Debug, Default, Clone, PartialEq, Eq)]
struct TlsData {
keys: Vec<u128>,
}
impl TlsData {
fn create_key(&mut self) -> usize {
self.keys.push(0);
self.keys.len() - 1
}
fn get_key(&mut self, key: usize) -> Result<u128> {
let r = self.keys.get(key).ok_or_else(|| {
MirEvalError::UndefinedBehavior(format!("Getting invalid tls key {key}"))
})?;
Ok(*r)
}
fn set_key(&mut self, key: usize, value: u128) -> Result<()> {
let r = self.keys.get_mut(key).ok_or_else(|| {
MirEvalError::UndefinedBehavior(format!("Setting invalid tls key {key}"))
})?;
*r = value;
Ok(())
}
}
pub struct Evaluator<'a> {
db: &'a dyn HirDatabase,
trait_env: Arc<TraitEnvironment>,
@ -99,6 +129,9 @@ pub struct Evaluator<'a> {
/// store the type as an interned id in place of function and vtable pointers, and we recover back the type at the
/// time of use.
vtable_map: VTableMap,
thread_local_storage: TlsData,
stdout: Vec<u8>,
stderr: Vec<u8>,
crate_id: CrateId,
// FIXME: This is a workaround, see the comment on `interpret_mir`
assert_placeholder_ty_is_unused: bool,
@ -259,6 +292,7 @@ pub enum MirEvalError {
TargetDataLayoutNotAvailable,
InvalidVTableId(usize),
CoerceUnsizedError(Ty),
LangItemNotFound(LangItem),
}
impl MirEvalError {
@ -350,6 +384,7 @@ impl MirEvalError {
| MirEvalError::StackOverflow
| MirEvalError::TargetDataLayoutNotAvailable
| MirEvalError::CoerceUnsizedError(_)
| MirEvalError::LangItemNotFound(_)
| MirEvalError::InvalidVTableId(_) => writeln!(f, "{:?}", err)?,
}
Ok(())
@ -362,6 +397,7 @@ impl std::fmt::Debug for MirEvalError {
Self::ConstEvalError(arg0, arg1) => {
f.debug_tuple("ConstEvalError").field(arg0).field(arg1).finish()
}
Self::LangItemNotFound(arg0) => f.debug_tuple("LangItemNotFound").field(arg0).finish(),
Self::LayoutError(arg0, arg1) => {
f.debug_tuple("LayoutError").field(arg0).field(arg1).finish()
}
@ -405,11 +441,32 @@ impl std::fmt::Debug for MirEvalError {
type Result<T> = std::result::Result<T, MirEvalError>;
#[derive(Debug, Default)]
struct DropFlags {
need_drop: FxHashSet<Place>,
}
impl DropFlags {
fn add_place(&mut self, p: Place) {
if p.iterate_over_parents().any(|x| self.need_drop.contains(&x)) {
return;
}
self.need_drop.retain(|x| !p.is_parent(x));
self.need_drop.insert(p);
}
fn remove_place(&mut self, p: &Place) -> bool {
// FIXME: replace parents with parts
self.need_drop.remove(p)
}
}
#[derive(Debug)]
struct Locals<'a> {
ptr: &'a ArenaMap<LocalId, Address>,
ptr: &'a ArenaMap<LocalId, Interval>,
body: &'a MirBody,
subst: &'a Substitution,
drop_flags: DropFlags,
}
pub fn interpret_mir(
@ -422,18 +479,30 @@ pub fn interpret_mir(
// a zero size, hoping that they are all outside of our current body. Even without a fix for #7434, we can
// (and probably should) do better here, for example by excluding bindings outside of the target expression.
assert_placeholder_ty_is_unused: bool,
) -> Result<Const> {
) -> (Result<Const>, String, String) {
let ty = body.locals[return_slot()].ty.clone();
let mut evaluator = Evaluator::new(db, body, assert_placeholder_ty_is_unused);
let ty = evaluator.ty_filler(&ty, &subst, body.owner)?;
let bytes = evaluator.interpret_mir(&body, None.into_iter(), subst.clone())?;
let mut memory_map = evaluator.create_memory_map(
&bytes,
&ty,
&Locals { ptr: &ArenaMap::new(), body: &body, subst: &subst },
)?;
memory_map.vtable = evaluator.vtable_map;
return Ok(intern_const_scalar(ConstScalar::Bytes(bytes, memory_map), ty));
let x: Result<Const> = (|| {
let ty = evaluator.ty_filler(&ty, &subst, body.owner)?;
let bytes = evaluator.interpret_mir(&body, None.into_iter(), subst.clone())?;
let mut memory_map = evaluator.create_memory_map(
&bytes,
&ty,
&Locals {
ptr: &ArenaMap::new(),
body: &body,
subst: &subst,
drop_flags: DropFlags::default(),
},
)?;
memory_map.vtable = evaluator.vtable_map.clone();
return Ok(intern_const_scalar(ConstScalar::Bytes(bytes, memory_map), ty));
})();
(
x,
String::from_utf8_lossy(&evaluator.stdout).into_owned(),
String::from_utf8_lossy(&evaluator.stderr).into_owned(),
)
}
impl Evaluator<'_> {
@ -448,13 +517,16 @@ impl Evaluator<'_> {
stack: vec![0],
heap: vec![0],
vtable_map: VTableMap::default(),
thread_local_storage: TlsData::default(),
static_locations: HashMap::default(),
db,
trait_env,
crate_id,
stdout: vec![],
stderr: vec![],
assert_placeholder_ty_is_unused,
stack_depth_limit: 100,
execution_limit: 100_000,
execution_limit: 1000_000,
}
}
@ -485,29 +557,37 @@ impl Evaluator<'_> {
&'a self,
p: &Place,
locals: &'a Locals<'a>,
) -> Result<(Address, Ty, Option<Interval>)> {
let mut addr = locals.ptr[p.local];
) -> Result<(Address, Ty, Option<IntervalOrOwned>)> {
let mut addr = locals.ptr[p.local].addr;
let mut ty: Ty =
self.ty_filler(&locals.body.locals[p.local].ty, locals.subst, locals.body.owner)?;
let mut metadata = None; // locals are always sized
let mut metadata: Option<IntervalOrOwned> = None; // locals are always sized
for proj in &p.projection {
let prev_ty = ty.clone();
ty = proj.projected_ty(ty, self.db, |c, subst, f| {
let (def, _) = self.db.lookup_intern_closure(c.into());
let infer = self.db.infer(def);
let (captures, _) = infer.closure_info(&c);
let parent_subst = ClosureSubst(subst).parent_subst();
captures
.get(f)
.expect("broken closure field")
.ty
.clone()
.substitute(Interner, parent_subst)
});
ty = proj.projected_ty(
ty,
self.db,
|c, subst, f| {
let (def, _) = self.db.lookup_intern_closure(c.into());
let infer = self.db.infer(def);
let (captures, _) = infer.closure_info(&c);
let parent_subst = ClosureSubst(subst).parent_subst();
captures
.get(f)
.expect("broken closure field")
.ty
.clone()
.substitute(Interner, parent_subst)
},
self.crate_id,
);
match proj {
ProjectionElem::Deref => {
metadata = if self.size_align_of(&ty, locals)?.is_none() {
Some(Interval { addr: addr.offset(self.ptr_size()), size: self.ptr_size() })
Some(
Interval { addr: addr.offset(self.ptr_size()), size: self.ptr_size() }
.into(),
)
} else {
None
};
@ -515,13 +595,57 @@ impl Evaluator<'_> {
addr = Address::from_usize(x);
}
ProjectionElem::Index(op) => {
let offset =
from_bytes!(usize, self.read_memory(locals.ptr[*op], self.ptr_size())?);
let offset = from_bytes!(
usize,
self.read_memory(locals.ptr[*op].addr, self.ptr_size())?
);
metadata = None; // Result of index is always sized
let ty_size =
self.size_of_sized(&ty, locals, "array inner type should be sized")?;
addr = addr.offset(ty_size * offset);
}
&ProjectionElem::ConstantIndex { from_end, offset } => {
let offset = if from_end {
let len = match prev_ty.kind(Interner) {
TyKind::Array(_, c) => match try_const_usize(self.db, c) {
Some(x) => x as u64,
None => {
not_supported!("indexing array with unknown const from end")
}
},
TyKind::Slice(_) => match metadata {
Some(x) => from_bytes!(u64, x.get(self)?),
None => not_supported!("slice place without metadata"),
},
_ => not_supported!("bad type for const index"),
};
(len - offset - 1) as usize
} else {
offset as usize
};
metadata = None; // Result of index is always sized
let ty_size =
self.size_of_sized(&ty, locals, "array inner type should be sized")?;
addr = addr.offset(ty_size * offset);
}
&ProjectionElem::Subslice { from, to } => {
let inner_ty = match &ty.data(Interner).kind {
TyKind::Array(inner, _) | TyKind::Slice(inner) => inner.clone(),
_ => TyKind::Error.intern(Interner),
};
metadata = match metadata {
Some(x) => {
let prev_len = from_bytes!(u64, x.get(self)?);
Some(IntervalOrOwned::Owned(
(prev_len - from - to).to_le_bytes().to_vec(),
))
}
None => None,
};
let ty_size =
self.size_of_sized(&inner_ty, locals, "array inner type should be sized")?;
addr = addr.offset(ty_size * (from as usize));
}
&ProjectionElem::TupleOrClosureField(f) => {
let layout = self.layout(&prev_ty)?;
let offset = layout.fields.offset(f).bytes_usize();
@ -553,10 +677,6 @@ impl Evaluator<'_> {
// FIXME: support structs with unsized fields
metadata = None;
}
ProjectionElem::ConstantIndex { .. } => {
not_supported!("constant index")
}
ProjectionElem::Subslice { .. } => not_supported!("subslice"),
ProjectionElem::OpaqueCast(_) => not_supported!("opaque cast"),
}
}
@ -594,7 +714,11 @@ impl Evaluator<'_> {
})
}
fn operand_ty_and_eval(&mut self, o: &Operand, locals: &Locals<'_>) -> Result<IntervalAndTy> {
fn operand_ty_and_eval(
&mut self,
o: &Operand,
locals: &mut Locals<'_>,
) -> Result<IntervalAndTy> {
Ok(IntervalAndTy {
interval: self.eval_operand(o, locals)?,
ty: self.operand_ty(o, locals)?,
@ -613,7 +737,12 @@ impl Evaluator<'_> {
return Err(MirEvalError::StackOverflow);
}
let mut current_block_idx = body.start_block;
let mut locals = Locals { ptr: &ArenaMap::new(), body: &body, subst: &subst };
let mut locals = Locals {
ptr: &ArenaMap::new(),
body: &body,
subst: &subst,
drop_flags: DropFlags::default(),
};
let (locals_ptr, stack_size) = {
let mut stack_ptr = self.stack.len();
let addr = body
@ -624,7 +753,7 @@ impl Evaluator<'_> {
self.size_of_sized(&x.ty, &locals, "no unsized local in extending stack")?;
let my_ptr = stack_ptr;
stack_ptr += size;
Ok((id, Stack(my_ptr)))
Ok((id, Interval { addr: Stack(my_ptr), size }))
})
.collect::<Result<ArenaMap<LocalId, _>>>()?;
let stack_size = stack_ptr - self.stack.len();
@ -632,9 +761,10 @@ impl Evaluator<'_> {
};
locals.ptr = &locals_ptr;
self.stack.extend(iter::repeat(0).take(stack_size));
let mut remain_args = body.arg_count;
for ((_, addr), value) in locals_ptr.iter().skip(1).zip(args) {
self.write_memory(*addr, &value)?;
let mut remain_args = body.param_locals.len();
for ((l, interval), value) in locals_ptr.iter().skip(1).zip(args) {
locals.drop_flags.add_place(l.into());
interval.write_from_bytes(self, &value)?;
if remain_args == 0 {
return Err(MirEvalError::TypeError("more arguments provided"));
}
@ -654,8 +784,9 @@ impl Evaluator<'_> {
match &statement.kind {
StatementKind::Assign(l, r) => {
let addr = self.place_addr(l, &locals)?;
let result = self.eval_rvalue(r, &locals)?.to_vec(&self)?;
let result = self.eval_rvalue(r, &mut locals)?.to_vec(&self)?;
self.write_memory(addr, &result)?;
locals.drop_flags.add_place(l.clone());
}
StatementKind::Deinit(_) => not_supported!("de-init statement"),
StatementKind::StorageLive(_)
@ -678,18 +809,18 @@ impl Evaluator<'_> {
cleanup: _,
from_hir_call: _,
} => {
let destination = self.place_interval(destination, &locals)?;
let destination_interval = self.place_interval(destination, &locals)?;
let fn_ty = self.operand_ty(func, &locals)?;
let args = args
.iter()
.map(|x| self.operand_ty_and_eval(x, &locals))
.map(|x| self.operand_ty_and_eval(x, &mut locals))
.collect::<Result<Vec<_>>>()?;
match &fn_ty.data(Interner).kind {
TyKind::Function(_) => {
let bytes = self.eval_operand(func, &locals)?;
let bytes = self.eval_operand(func, &mut locals)?;
self.exec_fn_pointer(
bytes,
destination,
destination_interval,
&args,
&locals,
terminator.span,
@ -699,7 +830,7 @@ impl Evaluator<'_> {
self.exec_fn_def(
*def,
generic_args,
destination,
destination_interval,
&args,
&locals,
terminator.span,
@ -707,38 +838,33 @@ impl Evaluator<'_> {
}
x => not_supported!("unknown function type {x:?}"),
}
locals.drop_flags.add_place(destination.clone());
current_block_idx = target.expect("broken mir, function without target");
}
TerminatorKind::SwitchInt { discr, targets } => {
let val = u128::from_le_bytes(pad16(
self.eval_operand(discr, &locals)?.get(&self)?,
self.eval_operand(discr, &mut locals)?.get(&self)?,
false,
));
current_block_idx = targets.target_for_value(val);
}
TerminatorKind::Return => {
let ty = body.locals[return_slot()].ty.clone();
self.stack_depth_limit += 1;
return Ok(self
.read_memory(
locals.ptr[return_slot()],
self.size_of_sized(&ty, &locals, "return type")?,
)?
.to_owned());
return Ok(locals.ptr[return_slot()].get(self)?.to_vec());
}
TerminatorKind::Unreachable => {
return Err(MirEvalError::UndefinedBehavior("unreachable executed".to_owned()));
}
TerminatorKind::Drop { place, target, unwind: _ } => {
self.drop_place(place, &mut locals, terminator.span)?;
current_block_idx = *target;
}
_ => not_supported!("unknown terminator"),
}
}
}
fn eval_rvalue<'a>(
&'a mut self,
r: &'a Rvalue,
locals: &'a Locals<'a>,
) -> Result<IntervalOrOwned> {
fn eval_rvalue(&mut self, r: &Rvalue, locals: &mut Locals<'_>) -> Result<IntervalOrOwned> {
use IntervalOrOwned::*;
Ok(match r {
Rvalue::Use(x) => Borrowed(self.eval_operand(x, locals)?),
@ -976,7 +1102,15 @@ impl Evaluator<'_> {
}
}
}
Rvalue::Repeat(_, _) => not_supported!("evaluating repeat rvalue"),
Rvalue::Repeat(x, len) => {
let len = match try_const_usize(self.db, len) {
Some(x) => x as usize,
None => not_supported!("non evaluatable array len in repeat Rvalue"),
};
let val = self.eval_operand(x, locals)?.get(self)?;
let size = len * val.len();
Owned(val.iter().copied().cycle().take(size).collect())
}
Rvalue::ShallowInitBox(_, _) => not_supported!("shallow init box"),
Rvalue::ShallowInitBoxWithAlloc(ty) => {
let Some((size, align)) = self.size_align_of(ty, locals)? else {
@ -1135,7 +1269,12 @@ impl Evaluator<'_> {
match self.coerce_unsized_look_through_fields(current_ty, for_ptr)? {
ty => match &ty.data(Interner).kind {
TyKind::Array(_, size) => {
let len = const_as_usize(size);
let len = match try_const_usize(self.db, size) {
None => not_supported!(
"unevaluatble len of array in coerce unsized"
),
Some(x) => x as usize,
};
let mut r = Vec::with_capacity(16);
let addr = addr.get(self)?;
r.extend(addr.iter().copied());
@ -1248,9 +1387,12 @@ impl Evaluator<'_> {
Ok(result)
}
fn eval_operand(&mut self, x: &Operand, locals: &Locals<'_>) -> Result<Interval> {
fn eval_operand(&mut self, x: &Operand, locals: &mut Locals<'_>) -> Result<Interval> {
Ok(match x {
Operand::Copy(p) | Operand::Move(p) => self.eval_place(p, locals)?,
Operand::Copy(p) | Operand::Move(p) => {
locals.drop_flags.remove_place(p);
self.eval_place(p, locals)?
}
Operand::Static(st) => {
let addr = self.eval_static(*st, locals)?;
Interval::new(addr, self.ptr_size())
@ -1311,19 +1453,21 @@ impl Evaluator<'_> {
Interval::new(addr, size)
}
ConstScalar::UnevaluatedConst(const_id, subst) => {
let subst = self.subst_filler(subst, locals);
let (const_id, subst) = lookup_impl_const(
self.db,
self.db.trait_environment_for_body(locals.body.owner),
*const_id,
subst,
);
let mut const_id = *const_id;
let mut subst = self.subst_filler(subst, locals);
if let GeneralConstId::ConstId(c) = const_id {
let (c, s) = lookup_impl_const(
self.db,
self.db.trait_environment_for_body(locals.body.owner),
c,
subst,
);
const_id = GeneralConstId::ConstId(c);
subst = s;
}
let c = self.db.const_eval(const_id.into(), subst).map_err(|e| {
let const_data = self.db.const_data(const_id);
MirEvalError::ConstEvalError(
const_data.name.as_ref().and_then(|x| x.as_str()).unwrap_or("_").to_owned(),
Box::new(e),
)
let name = const_id.name(self.db.upcast());
MirEvalError::ConstEvalError(name, Box::new(e))
})?;
if let chalk_ir::ConstValue::Concrete(c) = &c.data(Interner).value {
if let ConstScalar::Bytes(_, _) = &c.interned {
@ -1345,6 +1489,9 @@ impl Evaluator<'_> {
}
fn read_memory(&self, addr: Address, size: usize) -> Result<&[u8]> {
if size == 0 {
return Ok(&[]);
}
let (mem, pos) = match addr {
Stack(x) => (&self.stack, x),
Heap(x) => (&self.heap, x),
@ -1359,6 +1506,9 @@ impl Evaluator<'_> {
}
fn write_memory(&mut self, addr: Address, r: &[u8]) -> Result<()> {
if r.is_empty() {
return Ok(());
}
let (mem, pos) = match addr {
Stack(x) => (&mut self.stack, x),
Heap(x) => (&mut self.heap, x),
@ -1449,14 +1599,24 @@ impl Evaluator<'_> {
outer_binder: DebruijnIndex,
) -> std::result::Result<Ty, Self::Error> {
match ty.kind(Interner) {
TyKind::AssociatedType(id, subst) => {
// I don't know exactly if and why this is needed, but it looks like `normalize_ty` likes
// this kind of associated types.
Ok(TyKind::Alias(chalk_ir::AliasTy::Projection(ProjectionTy {
associated_ty_id: *id,
substitution: subst.clone().try_fold_with(self, outer_binder)?,
}))
.intern(Interner))
}
TyKind::OpaqueType(id, subst) => {
let impl_trait_id = self.db.lookup_intern_impl_trait_id((*id).into());
let subst = subst.clone().try_fold_with(self.as_dyn(), outer_binder)?;
match impl_trait_id {
crate::ImplTraitId::ReturnTypeImplTrait(func, idx) => {
let infer = self.db.infer(func.into());
let filler = &mut Filler {
db: self.db,
subst,
subst: &subst,
generics: Some(generics(self.db.upcast(), func.into())),
};
filler.try_fold_ty(infer.type_of_rpit[idx].clone(), outer_binder)
@ -1791,6 +1951,18 @@ impl Evaluator<'_> {
}
let (imp, generic_args) =
lookup_impl_method(self.db, self.trait_env.clone(), def, generic_args);
self.exec_looked_up_function(generic_args, locals, imp, arg_bytes, span, destination)
}
fn exec_looked_up_function(
&mut self,
generic_args: Substitution,
locals: &Locals<'_>,
imp: FunctionId,
arg_bytes: Vec<Vec<u8>>,
span: MirSpan,
destination: Interval,
) -> Result<()> {
let generic_args = self.subst_filler(&generic_args, &locals);
let def = imp.into();
let mir_body = self.db.mir_body(def).map_err(|e| MirEvalError::MirLowerError(imp, e))?;
@ -1853,13 +2025,12 @@ impl Evaluator<'_> {
};
let static_data = self.db.static_data(st);
let result = if !static_data.is_extern {
let konst =
self.db.const_eval(st.into(), Substitution::empty(Interner)).map_err(|e| {
MirEvalError::ConstEvalError(
static_data.name.as_str().unwrap_or("_").to_owned(),
Box::new(e),
)
})?;
let konst = self.db.const_eval_static(st).map_err(|e| {
MirEvalError::ConstEvalError(
static_data.name.as_str().unwrap_or("_").to_owned(),
Box::new(e),
)
})?;
let data = &konst.data(Interner);
if let chalk_ir::ConstValue::Concrete(c) = &data.value {
self.allocate_const_in_heap(&c, &data.ty, locals, &konst)?
@ -1891,6 +2062,115 @@ impl Evaluator<'_> {
}
}
}
fn drop_place(&mut self, place: &Place, locals: &mut Locals<'_>, span: MirSpan) -> Result<()> {
let (addr, ty, metadata) = self.place_addr_and_ty_and_metadata(place, locals)?;
if !locals.drop_flags.remove_place(place) {
return Ok(());
}
let metadata = match metadata {
Some(x) => x.get(self)?.to_vec(),
None => vec![],
};
self.run_drop_glue_deep(ty, locals, addr, &metadata, span)
}
fn run_drop_glue_deep(
&mut self,
ty: Ty,
locals: &Locals<'_>,
addr: Address,
_metadata: &[u8],
span: MirSpan,
) -> Result<()> {
let Some(drop_fn) = (|| {
let drop_trait = self.db.lang_item(self.crate_id, LangItem::Drop)?.as_trait()?;
self.db.trait_data(drop_trait).method_by_name(&name![drop])
})() else {
// in some tests we don't have drop trait in minicore, and
// we can ignore drop in them.
return Ok(());
};
let (impl_drop_candidate, subst) = lookup_impl_method(
self.db,
self.trait_env.clone(),
drop_fn,
Substitution::from1(Interner, ty.clone()),
);
if impl_drop_candidate != drop_fn {
self.exec_looked_up_function(
subst,
locals,
impl_drop_candidate,
vec![addr.to_bytes()],
span,
Interval { addr: Address::Invalid(0), size: 0 },
)?;
}
match ty.kind(Interner) {
TyKind::Adt(id, subst) => {
match id.0 {
AdtId::StructId(s) => {
let data = self.db.struct_data(s);
if data.flags.contains(StructFlags::IS_MANUALLY_DROP) {
return Ok(());
}
let layout = self.layout_adt(id.0, subst.clone())?;
match data.variant_data.as_ref() {
VariantData::Record(fields) | VariantData::Tuple(fields) => {
let field_types = self.db.field_types(s.into());
for (field, _) in fields.iter() {
let offset = layout
.fields
.offset(u32::from(field.into_raw()) as usize)
.bytes_usize();
let addr = addr.offset(offset);
let ty = field_types[field].clone().substitute(Interner, subst);
self.run_drop_glue_deep(ty, locals, addr, &[], span)?;
}
}
VariantData::Unit => (),
}
}
AdtId::UnionId(_) => (), // union fields don't need drop
AdtId::EnumId(_) => (),
}
}
TyKind::AssociatedType(_, _)
| TyKind::Scalar(_)
| TyKind::Tuple(_, _)
| TyKind::Array(_, _)
| TyKind::Slice(_)
| TyKind::Raw(_, _)
| TyKind::Ref(_, _, _)
| TyKind::OpaqueType(_, _)
| TyKind::FnDef(_, _)
| TyKind::Str
| TyKind::Never
| TyKind::Closure(_, _)
| TyKind::Generator(_, _)
| TyKind::GeneratorWitness(_, _)
| TyKind::Foreign(_)
| TyKind::Error
| TyKind::Placeholder(_)
| TyKind::Dyn(_)
| TyKind::Alias(_)
| TyKind::Function(_)
| TyKind::BoundVar(_)
| TyKind::InferenceVar(_, _) => (),
};
Ok(())
}
fn write_to_stdout(&mut self, interval: Interval) -> Result<()> {
self.stdout.extend(interval.get(self)?.to_vec());
Ok(())
}
fn write_to_stderr(&mut self, interval: Interval) -> Result<()> {
self.stderr.extend(interval.get(self)?.to_vec());
Ok(())
}
}
pub fn pad16(x: &[u8], is_signed: bool) -> [u8; 16] {

View file

@ -51,6 +51,24 @@ impl Evaluator<'_> {
)?;
return Ok(true);
}
let is_extern_c = match def.lookup(self.db.upcast()).container {
hir_def::ItemContainerId::ExternBlockId(block) => {
let id = block.lookup(self.db.upcast()).id;
id.item_tree(self.db.upcast())[id.value].abi.as_deref() == Some("C")
}
_ => false,
};
if is_extern_c {
self.exec_extern_c(
function_data.name.as_text().unwrap_or_default().as_str(),
args,
generic_args,
destination,
&locals,
span,
)?;
return Ok(true);
}
let alloc_fn = function_data
.attrs
.iter()
@ -72,7 +90,7 @@ impl Evaluator<'_> {
if let Some(x) = self.detect_lang_function(def) {
let arg_bytes =
args.iter().map(|x| Ok(x.get(&self)?.to_owned())).collect::<Result<Vec<_>>>()?;
let result = self.exec_lang_item(x, &arg_bytes)?;
let result = self.exec_lang_item(x, generic_args, &arg_bytes, locals, span)?;
destination.write_from_bytes(self, &result)?;
return Ok(true);
}
@ -118,13 +136,20 @@ impl Evaluator<'_> {
use LangItem::*;
let candidate = lang_attr(self.db.upcast(), def)?;
// We want to execute these functions with special logic
if [PanicFmt, BeginPanic, SliceLen].contains(&candidate) {
if [PanicFmt, BeginPanic, SliceLen, DropInPlace].contains(&candidate) {
return Some(candidate);
}
None
}
fn exec_lang_item(&self, x: LangItem, args: &[Vec<u8>]) -> Result<Vec<u8>> {
fn exec_lang_item(
&mut self,
x: LangItem,
generic_args: &Substitution,
args: &[Vec<u8>],
locals: &Locals<'_>,
span: MirSpan,
) -> Result<Vec<u8>> {
use LangItem::*;
let mut args = args.iter();
match x {
@ -139,10 +164,114 @@ impl Evaluator<'_> {
let ptr_size = arg.len() / 2;
Ok(arg[ptr_size..].into())
}
DropInPlace => {
let ty =
generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)).ok_or(
MirEvalError::TypeError(
"generic argument of drop_in_place is not provided",
),
)?;
let arg = args
.next()
.ok_or(MirEvalError::TypeError("argument of drop_in_place is not provided"))?;
self.run_drop_glue_deep(
ty.clone(),
locals,
Address::from_bytes(&arg[0..self.ptr_size()])?,
&arg[self.ptr_size()..],
span,
)?;
Ok(vec![])
}
x => not_supported!("Executing lang item {x:?}"),
}
}
fn exec_extern_c(
&mut self,
as_str: &str,
args: &[IntervalAndTy],
_generic_args: &Substitution,
destination: Interval,
locals: &Locals<'_>,
_span: MirSpan,
) -> Result<()> {
match as_str {
"write" => {
let [fd, ptr, len] = args else {
return Err(MirEvalError::TypeError("libc::write args are not provided"));
};
let fd = u128::from_le_bytes(pad16(fd.get(self)?, false));
let interval = Interval {
addr: Address::from_bytes(ptr.get(self)?)?,
size: from_bytes!(usize, len.get(self)?),
};
match fd {
1 => {
self.write_to_stdout(interval)?;
}
2 => {
self.write_to_stderr(interval)?;
}
_ => not_supported!("write to arbitrary file descriptor"),
}
Ok(())
}
"pthread_key_create" => {
let key = self.thread_local_storage.create_key();
let Some(arg0) = args.get(0) else {
return Err(MirEvalError::TypeError("pthread_key_create arg0 is not provided"));
};
let arg0_addr = Address::from_bytes(arg0.get(self)?)?;
let key_ty = if let Some((ty, ..)) = arg0.ty.as_reference_or_ptr() {
ty
} else {
return Err(MirEvalError::TypeError(
"pthread_key_create arg0 is not a pointer",
));
};
let arg0_interval = Interval::new(
arg0_addr,
self.size_of_sized(key_ty, locals, "pthread_key_create key arg")?,
);
arg0_interval.write_from_bytes(self, &key.to_le_bytes()[0..arg0_interval.size])?;
// return 0 as success
destination.write_from_bytes(self, &0u64.to_le_bytes()[0..destination.size])?;
Ok(())
}
"pthread_getspecific" => {
let Some(arg0) = args.get(0) else {
return Err(MirEvalError::TypeError("pthread_getspecific arg0 is not provided"));
};
let key = from_bytes!(usize, &pad16(arg0.get(self)?, false)[0..8]);
let value = self.thread_local_storage.get_key(key)?;
destination.write_from_bytes(self, &value.to_le_bytes()[0..destination.size])?;
Ok(())
}
"pthread_setspecific" => {
let Some(arg0) = args.get(0) else {
return Err(MirEvalError::TypeError("pthread_setspecific arg0 is not provided"));
};
let key = from_bytes!(usize, &pad16(arg0.get(self)?, false)[0..8]);
let Some(arg1) = args.get(1) else {
return Err(MirEvalError::TypeError("pthread_setspecific arg1 is not provided"));
};
let value = from_bytes!(u128, pad16(arg1.get(self)?, false));
self.thread_local_storage.set_key(key, value)?;
// return 0 as success
destination.write_from_bytes(self, &0u64.to_le_bytes()[0..destination.size])?;
Ok(())
}
"pthread_key_delete" => {
// we ignore this currently
// return 0 as success
destination.write_from_bytes(self, &0u64.to_le_bytes()[0..destination.size])?;
Ok(())
}
_ => not_supported!("unknown external function {as_str}"),
}
}
fn exec_intrinsic(
&mut self,
as_str: &str,
@ -288,7 +417,7 @@ impl Evaluator<'_> {
let ans = lhs.get(self)? == rhs.get(self)?;
destination.write_from_bytes(self, &[u8::from(ans)])
}
"wrapping_add" => {
"wrapping_add" | "unchecked_add" => {
let [lhs, rhs] = args else {
return Err(MirEvalError::TypeError("wrapping_add args are not provided"));
};
@ -297,6 +426,39 @@ impl Evaluator<'_> {
let ans = lhs.wrapping_add(rhs);
destination.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size])
}
"wrapping_sub" | "unchecked_sub" | "ptr_offset_from_unsigned" | "ptr_offset_from" => {
let [lhs, rhs] = args else {
return Err(MirEvalError::TypeError("wrapping_sub args are not provided"));
};
let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
let ans = lhs.wrapping_sub(rhs);
destination.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size])
}
"unchecked_rem" => {
// FIXME: signed
let [lhs, rhs] = args else {
return Err(MirEvalError::TypeError("unchecked_rem args are not provided"));
};
let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
let ans = lhs.checked_rem(rhs).ok_or_else(|| {
MirEvalError::UndefinedBehavior("unchecked_rem with bad inputs".to_owned())
})?;
destination.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size])
}
"unchecked_div" | "exact_div" => {
// FIXME: signed
let [lhs, rhs] = args else {
return Err(MirEvalError::TypeError("unchecked_div args are not provided"));
};
let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
let ans = lhs.checked_div(rhs).ok_or_else(|| {
MirEvalError::UndefinedBehavior("unchecked_rem with bad inputs".to_owned())
})?;
destination.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size])
}
"add_with_overflow" => {
let [lhs, rhs] = args else {
return Err(MirEvalError::TypeError("const_eval_select args are not provided"));
@ -373,6 +535,22 @@ impl Evaluator<'_> {
};
destination.write_from_interval(self, arg.interval)
}
"ctpop" => {
let [arg] = args else {
return Err(MirEvalError::TypeError("likely arg is not provided"));
};
let result = u128::from_le_bytes(pad16(arg.get(self)?, false)).count_ones();
destination
.write_from_bytes(self, &(result as u128).to_le_bytes()[0..destination.size])
}
"cttz" | "cttz_nonzero" => {
let [arg] = args else {
return Err(MirEvalError::TypeError("likely arg is not provided"));
};
let result = u128::from_le_bytes(pad16(arg.get(self)?, false)).trailing_zeros();
destination
.write_from_bytes(self, &(result as u128).to_le_bytes()[0..destination.size])
}
"const_eval_select" => {
let [tuple, const_fn, _] = args else {
return Err(MirEvalError::TypeError("const_eval_select args are not provided"));

View file

@ -0,0 +1,299 @@
use base_db::{fixture::WithFixture, FileId};
use hir_def::db::DefDatabase;
use crate::{db::HirDatabase, test_db::TestDB, Interner, Substitution};
use super::{interpret_mir, MirEvalError};
fn eval_main(db: &TestDB, file_id: FileId) -> Result<(String, String), MirEvalError> {
let module_id = db.module_for_file(file_id);
let def_map = module_id.def_map(db);
let scope = &def_map[module_id.local_id].scope;
let func_id = scope
.declarations()
.find_map(|x| match x {
hir_def::ModuleDefId::FunctionId(x) => {
if db.function_data(x).name.to_string() == "main" {
Some(x)
} else {
None
}
}
_ => None,
})
.unwrap();
let body =
db.mir_body(func_id.into()).map_err(|e| MirEvalError::MirLowerError(func_id.into(), e))?;
let (result, stdout, stderr) = interpret_mir(db, &body, Substitution::empty(Interner), false);
result?;
Ok((stdout, stderr))
}
fn check_pass(ra_fixture: &str) {
check_pass_and_stdio(ra_fixture, "", "");
}
fn check_pass_and_stdio(ra_fixture: &str, expected_stdout: &str, expected_stderr: &str) {
let (db, file_id) = TestDB::with_single_file(ra_fixture);
let x = eval_main(&db, file_id);
match x {
Err(e) => {
let mut err = String::new();
let span_formatter = |file, range| format!("{:?} {:?}", file, range);
e.pretty_print(&mut err, &db, span_formatter).unwrap();
panic!("Error in interpreting: {err}");
}
Ok((stdout, stderr)) => {
assert_eq!(stdout, expected_stdout);
assert_eq!(stderr, expected_stderr);
}
}
}
#[test]
fn function_with_extern_c_abi() {
check_pass(
r#"
extern "C" fn foo(a: i32, b: i32) -> i32 {
a + b
}
fn main() {
let x = foo(2, 3);
}
"#,
);
}
#[test]
fn drop_basic() {
check_pass(
r#"
//- minicore: drop, add
struct X<'a>(&'a mut i32);
impl<'a> Drop for X<'a> {
fn drop(&mut self) {
*self.0 += 1;
}
}
struct NestedX<'a> { f1: X<'a>, f2: X<'a> }
fn should_not_reach() {
_ // FIXME: replace this function with panic when that works
}
fn my_drop2(x: X<'_>) {
return;
}
fn my_drop(x: X<'_>) {
drop(x);
}
fn main() {
let mut s = 10;
let mut x = X(&mut s);
my_drop(x);
x = X(&mut s);
my_drop2(x);
X(&mut s); // dropped immediately
let x = X(&mut s);
NestedX { f1: x, f2: X(&mut s) };
if s != 15 {
should_not_reach();
}
}
"#,
);
}
#[test]
fn drop_in_place() {
check_pass(
r#"
//- minicore: drop, add, coerce_unsized
use core::ptr::drop_in_place;
struct X<'a>(&'a mut i32);
impl<'a> Drop for X<'a> {
fn drop(&mut self) {
*self.0 += 1;
}
}
fn should_not_reach() {
_ // FIXME: replace this function with panic when that works
}
fn main() {
let mut s = 2;
let x = X(&mut s);
drop_in_place(&mut x);
drop(x);
if s != 4 {
should_not_reach();
}
let p: &mut [X] = &mut [X(&mut 2)];
drop_in_place(p);
}
"#,
);
}
#[test]
fn manually_drop() {
check_pass(
r#"
//- minicore: manually_drop
use core::mem::ManuallyDrop;
struct X;
impl Drop for X {
fn drop(&mut self) {
should_not_reach();
}
}
fn should_not_reach() {
_ // FIXME: replace this function with panic when that works
}
fn main() {
let x = ManuallyDrop::new(X);
}
"#,
);
}
#[test]
fn generic_impl_for_trait_with_generic_method() {
check_pass(
r#"
//- minicore: drop
struct S<T>(T);
trait Tr {
fn f<F>(&self, x: F);
}
impl<T> Tr for S<T> {
fn f<F>(&self, x: F) {
}
}
fn main() {
let s = S(1u8);
s.f(5i64);
}
"#,
);
}
#[test]
fn index_of_slice_should_preserve_len() {
check_pass(
r#"
//- minicore: index, slice, coerce_unsized
struct X;
impl core::ops::Index<X> for [i32] {
type Output = i32;
fn index(&self, _: X) -> &i32 {
if self.len() != 3 {
should_not_reach();
}
&self[0]
}
}
fn should_not_reach() {
_ // FIXME: replace this function with panic when that works
}
fn main() {
let x: &[i32] = &[1, 2, 3];
&x[X];
}
"#,
);
}
#[test]
fn unix_write_stdout() {
check_pass_and_stdio(
r#"
//- minicore: slice, index, coerce_unsized
type pthread_key_t = u32;
type c_void = u8;
type c_int = i32;
extern "C" {
pub fn write(fd: i32, buf: *const u8, count: usize) -> usize;
}
fn main() {
let stdout = b"stdout";
let stderr = b"stderr";
write(1, &stdout[0], 6);
write(2, &stderr[0], 6);
}
"#,
"stdout",
"stderr",
);
}
#[test]
fn closure_layout_in_rpit() {
check_pass(
r#"
//- minicore: fn
fn f<F: Fn()>(x: F) {
fn g(x: impl Fn()) -> impl FnOnce() {
move || {
x();
}
}
g(x)();
}
fn main() {
f(|| {});
}
"#,
);
}
#[test]
fn posix_tls() {
check_pass(
r#"
//- minicore: option
type pthread_key_t = u32;
type c_void = u8;
type c_int = i32;
extern "C" {
pub fn pthread_key_create(
key: *mut pthread_key_t,
dtor: Option<unsafe extern "C" fn(*mut c_void)>,
) -> c_int;
pub fn pthread_key_delete(key: pthread_key_t) -> c_int;
pub fn pthread_getspecific(key: pthread_key_t) -> *mut c_void;
pub fn pthread_setspecific(key: pthread_key_t, value: *const c_void) -> c_int;
}
fn main() {
let mut key = 2;
pthread_key_create(&mut key, None);
}
"#,
);
}

View file

@ -14,7 +14,8 @@ use hir_def::{
lang_item::{LangItem, LangItemTarget},
path::Path,
resolver::{resolver_for_expr, ResolveValueResult, ValueNs},
AdtId, DefWithBodyId, EnumVariantId, HasModule, ItemContainerId, LocalFieldId, TraitId,
AdtId, DefWithBodyId, EnumVariantId, GeneralConstId, HasModule, ItemContainerId, LocalFieldId,
TraitId,
};
use hir_expand::name::Name;
use la_arena::ArenaMap;
@ -30,7 +31,6 @@ use crate::{
inhabitedness::is_ty_uninhabited_from,
layout::{layout_of_ty, LayoutError},
mapping::ToChalk,
method_resolution::lookup_impl_const,
static_lifetime,
utils::{generics, ClosureSubst},
Adjust, Adjustment, AutoBorrow, CallableDefId, TyBuilder, TyExt,
@ -51,17 +51,22 @@ struct LoopBlocks {
place: Place,
}
#[derive(Debug, Clone, Default)]
struct DropScope {
/// locals, in order of definition (so we should run drop glues in reverse order)
locals: Vec<LocalId>,
}
struct MirLowerCtx<'a> {
result: MirBody,
owner: DefWithBodyId,
current_loop_blocks: Option<LoopBlocks>,
// FIXME: we should resolve labels in HIR lowering and always work with label id here, not
// with raw names.
labeled_loop_blocks: FxHashMap<LabelId, LoopBlocks>,
discr_temp: Option<Place>,
db: &'a dyn HirDatabase,
body: &'a Body,
infer: &'a InferenceResult,
drop_scopes: Vec<DropScope>,
}
#[derive(Debug, Clone, PartialEq, Eq)]
@ -181,7 +186,6 @@ impl<'ctx> MirLowerCtx<'ctx> {
binding_locals,
param_locals: vec![],
owner,
arg_count: body.params.len(),
closures: vec![],
};
let ctx = MirLowerCtx {
@ -193,15 +197,18 @@ impl<'ctx> MirLowerCtx<'ctx> {
current_loop_blocks: None,
labeled_loop_blocks: Default::default(),
discr_temp: None,
drop_scopes: vec![DropScope::default()],
};
ctx
}
fn temp(&mut self, ty: Ty) -> Result<LocalId> {
fn temp(&mut self, ty: Ty, current: BasicBlockId, span: MirSpan) -> Result<LocalId> {
if matches!(ty.kind(Interner), TyKind::Slice(_) | TyKind::Dyn(_)) {
implementation_error!("unsized temporaries");
}
Ok(self.result.locals.alloc(Local { ty }))
let l = self.result.locals.alloc(Local { ty });
self.push_storage_live_for_local(l, current, span)?;
Ok(l)
}
fn lower_expr_to_some_operand(
@ -234,7 +241,8 @@ impl<'ctx> MirLowerCtx<'ctx> {
match adjustments.split_last() {
Some((last, rest)) => match &last.kind {
Adjust::NeverToAny => {
let temp = self.temp(TyKind::Never.intern(Interner))?;
let temp =
self.temp(TyKind::Never.intern(Interner), current, MirSpan::Unknown)?;
self.lower_expr_to_place_with_adjust(expr_id, temp.into(), current, rest)
}
Adjust::Deref(_) => {
@ -303,45 +311,39 @@ impl<'ctx> MirLowerCtx<'ctx> {
Err(MirLowerError::IncompleteExpr)
},
Expr::Path(p) => {
let unresolved_name = || MirLowerError::unresolved_path(self.db, p);
let resolver = resolver_for_expr(self.db.upcast(), self.owner, expr_id);
let pr = resolver
.resolve_path_in_value_ns(self.db.upcast(), p)
.ok_or_else(unresolved_name)?;
let pr = match pr {
ResolveValueResult::ValueNs(v) => v,
ResolveValueResult::Partial(..) => {
if let Some((assoc, subst)) = self
.infer
.assoc_resolutions_for_expr(expr_id)
{
match assoc {
hir_def::AssocItemId::ConstId(c) => {
self.lower_const(c, current, place, subst, expr_id.into(), self.expr_ty_without_adjust(expr_id))?;
return Ok(Some(current))
},
hir_def::AssocItemId::FunctionId(_) => {
// FnDefs are zero sized, no action is needed.
return Ok(Some(current))
}
hir_def::AssocItemId::TypeAliasId(_) => {
// FIXME: If it is unreachable, use proper error instead of `not_supported`.
not_supported!("associated functions and types")
},
}
} else if let Some(variant) = self
.infer
.variant_resolution_for_expr(expr_id)
{
match variant {
VariantId::EnumVariantId(e) => ValueNs::EnumVariantId(e),
VariantId::StructId(s) => ValueNs::StructId(s),
VariantId::UnionId(_) => implementation_error!("Union variant as path"),
}
} else {
return Err(unresolved_name());
let pr = if let Some((assoc, subst)) = self
.infer
.assoc_resolutions_for_expr(expr_id)
{
match assoc {
hir_def::AssocItemId::ConstId(c) => {
self.lower_const(c.into(), current, place, subst, expr_id.into(), self.expr_ty_without_adjust(expr_id))?;
return Ok(Some(current))
},
hir_def::AssocItemId::FunctionId(_) => {
// FnDefs are zero sized, no action is needed.
return Ok(Some(current))
}
hir_def::AssocItemId::TypeAliasId(_) => {
// FIXME: If it is unreachable, use proper error instead of `not_supported`.
not_supported!("associated functions and types")
},
}
} else if let Some(variant) = self
.infer
.variant_resolution_for_expr(expr_id)
{
match variant {
VariantId::EnumVariantId(e) => ValueNs::EnumVariantId(e),
VariantId::StructId(s) => ValueNs::StructId(s),
VariantId::UnionId(_) => implementation_error!("Union variant as path"),
}
} else {
let unresolved_name = || MirLowerError::unresolved_path(self.db, p);
let resolver = resolver_for_expr(self.db.upcast(), self.owner, expr_id);
resolver
.resolve_path_in_value_ns_fully(self.db.upcast(), p)
.ok_or_else(unresolved_name)?
};
match pr {
ValueNs::LocalBinding(_) | ValueNs::StaticId(_) => {
@ -357,7 +359,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
Ok(Some(current))
}
ValueNs::ConstId(const_id) => {
self.lower_const(const_id, current, place, Substitution::empty(Interner), expr_id.into(), self.expr_ty_without_adjust(expr_id))?;
self.lower_const(const_id.into(), current, place, Substitution::empty(Interner), expr_id.into(), self.expr_ty_without_adjust(expr_id))?;
Ok(Some(current))
}
ValueNs::EnumVariantId(variant_id) => {
@ -470,9 +472,10 @@ impl<'ctx> MirLowerCtx<'ctx> {
Expr::Block { id: _, statements, tail, label } => {
if let Some(label) = label {
self.lower_loop(current, place.clone(), Some(*label), expr_id.into(), |this, begin| {
if let Some(block) = this.lower_block_to_place(statements, begin, *tail, place, expr_id.into())? {
if let Some(current) = this.lower_block_to_place(statements, begin, *tail, place, expr_id.into())? {
let end = this.current_loop_end()?;
this.set_goto(block, end, expr_id.into());
let current = this.pop_drop_scope(current);
this.set_goto(current, end, expr_id.into());
}
Ok(())
})
@ -481,8 +484,9 @@ impl<'ctx> MirLowerCtx<'ctx> {
}
}
Expr::Loop { body, label } => self.lower_loop(current, place, *label, expr_id.into(), |this, begin| {
if let Some((_, block)) = this.lower_expr_as_place(begin, *body, true)? {
this.set_goto(block, begin, expr_id.into());
if let Some((_, current)) = this.lower_expr_as_place(begin, *body, true)? {
let current = this.pop_drop_scope(current);
this.set_goto(current, begin, expr_id.into());
}
Ok(())
}),
@ -502,6 +506,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
expr_id.into(),
);
if let Some((_, block)) = this.lower_expr_as_place(after_cond, *body, true)? {
let block = this.pop_drop_scope(block);
this.set_goto(block, begin, expr_id.into());
}
Ok(())
@ -531,9 +536,9 @@ impl<'ctx> MirLowerCtx<'ctx> {
let ref_mut_iterator_ty = TyKind::Ref(Mutability::Mut, static_lifetime(), iterator_ty.clone()).intern(Interner);
let item_ty = &self.infer.type_of_pat[pat];
let option_item_ty = TyKind::Adt(chalk_ir::AdtId(option.into()), Substitution::from1(Interner, item_ty.clone())).intern(Interner);
let iterator_place: Place = self.temp(iterator_ty.clone())?.into();
let option_item_place: Place = self.temp(option_item_ty.clone())?.into();
let ref_mut_iterator_place: Place = self.temp(ref_mut_iterator_ty)?.into();
let iterator_place: Place = self.temp(iterator_ty.clone(), current, expr_id.into())?.into();
let option_item_place: Place = self.temp(option_item_ty.clone(), current, expr_id.into())?.into();
let ref_mut_iterator_place: Place = self.temp(ref_mut_iterator_ty, current, expr_id.into())?.into();
let Some(current) = self.lower_call_and_args(into_iter_fn_op, Some(iterable).into_iter(), iterator_place.clone(), current, false, expr_id.into())?
else {
return Ok(None);
@ -556,6 +561,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
AdtPatternShape::Tuple { args: &[pat], ellipsis: None },
)?;
if let Some((_, block)) = this.lower_expr_as_place(current, body, true)? {
let block = this.pop_drop_scope(block);
this.set_goto(block, begin, expr_id.into());
}
Ok(())
@ -686,6 +692,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
return Ok(None);
}
}
current = self.drop_until_scope(0, current);
self.set_terminator(current, TerminatorKind::Return, expr_id.into());
Ok(None)
}
@ -770,7 +777,11 @@ impl<'ctx> MirLowerCtx<'ctx> {
Expr::Await { .. } => not_supported!("await"),
Expr::Yeet { .. } => not_supported!("yeet"),
Expr::Async { .. } => not_supported!("async block"),
Expr::Const { .. } => not_supported!("anonymous const block"),
&Expr::Const(id) => {
let subst = self.placeholder_subst();
self.lower_const(id.into(), current, place, subst, expr_id.into(), self.expr_ty_without_adjust(expr_id))?;
Ok(Some(current))
},
Expr::Cast { expr, type_ref: _ } => {
let Some((x, current)) = self.lower_expr_to_some_operand(*expr, current)? else {
return Ok(None);
@ -830,11 +841,16 @@ impl<'ctx> MirLowerCtx<'ctx> {
},
Expr::BinaryOp { lhs, rhs, op } => {
let op = op.ok_or(MirLowerError::IncompleteExpr)?;
let is_builtin = {
let is_builtin = 'b: {
// Without adjust here is a hack. We assume that we know every possible adjustment
// for binary operator, and use without adjust to simplify our conditions.
let lhs_ty = self.expr_ty_without_adjust(*lhs);
let rhs_ty = self.expr_ty_without_adjust(*rhs);
if matches!(op ,BinaryOp::CmpOp(syntax::ast::CmpOp::Eq { .. })) {
if lhs_ty.as_raw_ptr().is_some() && rhs_ty.as_raw_ptr().is_some() {
break 'b true;
}
}
let builtin_inequal_impls = matches!(
op,
BinaryOp::ArithOp(ArithOp::Shl | ArithOp::Shr) | BinaryOp::Assignment { op: Some(ArithOp::Shl | ArithOp::Shr) }
@ -973,8 +989,8 @@ impl<'ctx> MirLowerCtx<'ctx> {
ProjectionElem::Deref => ProjectionElem::Deref,
ProjectionElem::Field(x) => ProjectionElem::Field(x),
ProjectionElem::TupleOrClosureField(x) => ProjectionElem::TupleOrClosureField(x),
ProjectionElem::ConstantIndex { offset, min_length, from_end } => ProjectionElem::ConstantIndex { offset, min_length, from_end },
ProjectionElem::Subslice { from, to, from_end } => ProjectionElem::Subslice { from, to, from_end },
ProjectionElem::ConstantIndex { offset, from_end } => ProjectionElem::ConstantIndex { offset, from_end },
ProjectionElem::Subslice { from, to } => ProjectionElem::Subslice { from, to },
ProjectionElem::OpaqueCast(x) => ProjectionElem::OpaqueCast(x),
ProjectionElem::Index(x) => match x { },
}
@ -982,12 +998,9 @@ impl<'ctx> MirLowerCtx<'ctx> {
};
match &capture.kind {
CaptureKind::ByRef(bk) => {
let placeholder_subst = match self.owner.as_generic_def_id() {
Some(x) => TyBuilder::placeholder_subst(self.db, x),
None => Substitution::empty(Interner),
};
let placeholder_subst = self.placeholder_subst();
let tmp_ty = capture.ty.clone().substitute(Interner, &placeholder_subst);
let tmp: Place = self.temp(tmp_ty)?.into();
let tmp: Place = self.temp(tmp_ty, current, capture.span)?.into();
self.push_assignment(
current,
tmp.clone(),
@ -1085,6 +1098,14 @@ impl<'ctx> MirLowerCtx<'ctx> {
}
}
fn placeholder_subst(&mut self) -> Substitution {
let placeholder_subst = match self.owner.as_generic_def_id() {
Some(x) => TyBuilder::placeholder_subst(self.db, x),
None => Substitution::empty(Interner),
};
placeholder_subst
}
fn push_field_projection(&self, place: &mut Place, expr_id: ExprId) -> Result<()> {
if let Expr::Field { expr, name } = &self.body[expr_id] {
if let TyKind::Tuple(..) = self.expr_ty_after_adjustments(*expr).kind(Interner) {
@ -1146,7 +1167,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
fn lower_const(
&mut self,
const_id: hir_def::ConstId,
const_id: GeneralConstId,
prev_block: BasicBlockId,
place: Place,
subst: Substitution,
@ -1157,20 +1178,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
// We can't evaluate constant with substitution now, as generics are not monomorphized in lowering.
intern_const_scalar(ConstScalar::UnevaluatedConst(const_id, subst), ty)
} else {
let (const_id, subst) = lookup_impl_const(
self.db,
self.db.trait_environment_for_body(self.owner),
const_id,
subst,
);
let name = self
.db
.const_data(const_id)
.name
.as_ref()
.and_then(|x| x.as_str())
.unwrap_or("_")
.to_owned();
let name = const_id.name(self.db.upcast());
self.db
.const_eval(const_id.into(), subst)
.map_err(|e| MirLowerError::ConstEvalError(name, Box::new(e)))?
@ -1313,12 +1321,14 @@ impl<'ctx> MirLowerCtx<'ctx> {
self.push_statement(block, StatementKind::Assign(place, rvalue).with_span(span));
}
fn discr_temp_place(&mut self) -> Place {
fn discr_temp_place(&mut self, current: BasicBlockId) -> Place {
match &self.discr_temp {
Some(x) => x.clone(),
None => {
let tmp: Place =
self.temp(TyBuilder::discr_ty()).expect("discr_ty is never unsized").into();
let tmp: Place = self
.temp(TyBuilder::discr_ty(), current, MirSpan::Unknown)
.expect("discr_ty is never unsized")
.into();
self.discr_temp = Some(tmp.clone());
tmp
}
@ -1349,6 +1359,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
None
};
self.set_goto(prev_block, begin, span);
self.push_drop_scope();
f(self, begin)?;
let my = mem::replace(&mut self.current_loop_blocks, prev).ok_or(
MirLowerError::ImplementationError("current_loop_blocks is corrupt".to_string()),
@ -1409,27 +1420,9 @@ impl<'ctx> MirLowerCtx<'ctx> {
is_ty_uninhabited_from(&self.infer[expr_id], self.owner.module(self.db.upcast()), self.db)
}
/// This function push `StorageLive` statement for the binding, and applies changes to add `StorageDead` in
/// the appropriated places.
/// This function push `StorageLive` statement for the binding, and applies changes to add `StorageDead` and
/// `Drop` in the appropriated places.
fn push_storage_live(&mut self, b: BindingId, current: BasicBlockId) -> Result<()> {
// Current implementation is wrong. It adds no `StorageDead` at the end of scope, and before each break
// and continue. It just add a `StorageDead` before the `StorageLive`, which is not wrong, but unneeded in
// the proper implementation. Due this limitation, implementing a borrow checker on top of this mir will falsely
// allow this:
//
// ```
// let x;
// loop {
// let y = 2;
// x = &y;
// if some_condition {
// break; // we need to add a StorageDead(y) above this to kill the x borrow
// }
// }
// use(x)
// ```
// But I think this approach work for mutability analysis, as user can't write code which mutates a binding
// after StorageDead, except loops, which are handled by this hack.
let span = self.body.bindings[b]
.definitions
.first()
@ -1437,6 +1430,18 @@ impl<'ctx> MirLowerCtx<'ctx> {
.map(MirSpan::PatId)
.unwrap_or(MirSpan::Unknown);
let l = self.binding_local(b)?;
self.push_storage_live_for_local(l, current, span)
}
fn push_storage_live_for_local(
&mut self,
l: LocalId,
current: BasicBlockId,
span: MirSpan,
) -> Result<()> {
self.drop_scopes.last_mut().unwrap().locals.push(l);
// FIXME: this storage dead is not neccessary, but since drop scope handling is broken, we need
// it to avoid falso positives in mutability errors
self.push_statement(current, StatementKind::StorageDead(l).with_span(span));
self.push_statement(current, StatementKind::StorageLive(l).with_span(span));
Ok(())
@ -1500,10 +1505,11 @@ impl<'ctx> MirLowerCtx<'ctx> {
}
}
hir_def::hir::Statement::Expr { expr, has_semi: _ } => {
self.push_drop_scope();
let Some((_, c)) = self.lower_expr_as_place(current, *expr, true)? else {
return Ok(None);
};
current = c;
current = self.pop_drop_scope(c);
}
}
}
@ -1521,6 +1527,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
let base_param_count = self.result.param_locals.len();
self.result.param_locals.extend(params.clone().map(|(x, ty)| {
let local_id = self.result.locals.alloc(Local { ty });
self.drop_scopes.last_mut().unwrap().locals.push(local_id);
if let Pat::Bind { id, subpat: None } = self.body[x] {
if matches!(
self.body.bindings[id].mode,
@ -1590,6 +1597,44 @@ impl<'ctx> MirLowerCtx<'ctx> {
}
}
}
fn drop_until_scope(&mut self, scope_index: usize, mut current: BasicBlockId) -> BasicBlockId {
for scope in self.drop_scopes[scope_index..].to_vec().iter().rev() {
self.emit_drop_and_storage_dead_for_scope(scope, &mut current);
}
current
}
fn push_drop_scope(&mut self) {
self.drop_scopes.push(DropScope::default());
}
fn pop_drop_scope(&mut self, mut current: BasicBlockId) -> BasicBlockId {
let scope = self.drop_scopes.pop().unwrap();
self.emit_drop_and_storage_dead_for_scope(&scope, &mut current);
current
}
fn emit_drop_and_storage_dead_for_scope(
&mut self,
scope: &DropScope,
current: &mut Idx<BasicBlock>,
) {
for &l in scope.locals.iter().rev() {
if !self.result.locals[l].ty.clone().is_copy(self.db, self.owner) {
let prev = std::mem::replace(current, self.new_basic_block());
self.set_terminator(
prev,
TerminatorKind::Drop { place: l.into(), target: *current, unwind: None },
MirSpan::Unknown,
);
}
self.push_statement(
*current,
StatementKind::StorageDead(l).with_span(MirSpan::Unknown),
);
}
}
}
fn cast_kind(source_ty: &Ty, target_ty: &Ty) -> Result<CastKind> {
@ -1628,10 +1673,10 @@ pub fn mir_body_for_closure_query(
};
let (captures, _) = infer.closure_info(&closure);
let mut ctx = MirLowerCtx::new(db, owner, &body, &infer);
ctx.result.arg_count = args.len() + 1;
// 0 is return local
ctx.result.locals.alloc(Local { ty: infer[*root].clone() });
ctx.result.locals.alloc(Local { ty: infer[expr].clone() });
let closure_local = ctx.result.locals.alloc(Local { ty: infer[expr].clone() });
ctx.result.param_locals.push(closure_local);
let Some(sig) = ClosureSubst(substs).sig_ty().callable_sig(db) else {
implementation_error!("closure has not callable sig");
};
@ -1639,8 +1684,9 @@ pub fn mir_body_for_closure_query(
args.iter().zip(sig.params().iter()).map(|(x, y)| (*x, y.clone())),
|_| true,
)?;
if let Some(b) = ctx.lower_expr_to_place(*root, return_slot().into(), current)? {
ctx.set_terminator(b, TerminatorKind::Return, (*root).into());
if let Some(current) = ctx.lower_expr_to_place(*root, return_slot().into(), current)? {
let current = ctx.pop_drop_scope(current);
ctx.set_terminator(current, TerminatorKind::Return, (*root).into());
}
let mut upvar_map: FxHashMap<LocalId, Vec<(&CapturedItem, usize)>> = FxHashMap::default();
for (i, capture) in captures.iter().enumerate() {
@ -1761,8 +1807,9 @@ pub fn lower_to_mir(
}
ctx.lower_params_and_bindings([].into_iter(), binding_picker)?
};
if let Some(b) = ctx.lower_expr_to_place(root_expr, return_slot().into(), current)? {
ctx.set_terminator(b, TerminatorKind::Return, root_expr.into());
if let Some(current) = ctx.lower_expr_to_place(root_expr, return_slot().into(), current)? {
let current = ctx.pop_drop_scope(current);
ctx.set_terminator(current, TerminatorKind::Return, root_expr.into());
}
Ok(ctx.result)
}

View file

@ -17,7 +17,7 @@ impl MirLowerCtx<'_> {
prev_block: BasicBlockId,
) -> Result<Option<(Place, BasicBlockId)>> {
let ty = self.expr_ty_without_adjust(expr_id);
let place = self.temp(ty)?;
let place = self.temp(ty, prev_block, expr_id.into())?;
let Some(current) = self.lower_expr_to_place_without_adjust(expr_id, place.into(), prev_block)? else {
return Ok(None);
};
@ -34,7 +34,7 @@ impl MirLowerCtx<'_> {
.last()
.map(|x| x.target.clone())
.unwrap_or_else(|| self.expr_ty_without_adjust(expr_id));
let place = self.temp(ty)?;
let place = self.temp(ty, prev_block, expr_id.into())?;
let Some(current) = self.lower_expr_to_place_with_adjust(expr_id, place.into(), prev_block, adjustments)? else {
return Ok(None);
};
@ -128,12 +128,8 @@ impl MirLowerCtx<'_> {
match &self.body.exprs[expr_id] {
Expr::Path(p) => {
let resolver = resolver_for_expr(self.db.upcast(), self.owner, expr_id);
let Some(pr) = resolver.resolve_path_in_value_ns(self.db.upcast(), p) else {
return Err(MirLowerError::unresolved_path(self.db, p));
};
let pr = match pr {
ResolveValueResult::ValueNs(v) => v,
ResolveValueResult::Partial(..) => return try_rvalue(self),
let Some(pr) = resolver.resolve_path_in_value_ns_fully(self.db.upcast(), p) else {
return try_rvalue(self);
};
match pr {
ValueNs::LocalBinding(pat_id) => {
@ -143,7 +139,7 @@ impl MirLowerCtx<'_> {
let ty = self.expr_ty_without_adjust(expr_id);
let ref_ty =
TyKind::Ref(Mutability::Not, static_lifetime(), ty).intern(Interner);
let mut temp: Place = self.temp(ref_ty)?.into();
let mut temp: Place = self.temp(ref_ty, current, expr_id.into())?.into();
self.push_assignment(
current,
temp.clone(),
@ -252,7 +248,8 @@ impl MirLowerCtx<'_> {
else {
return Ok(None);
};
let l_index = self.temp(self.expr_ty_after_adjustments(*index))?;
let l_index =
self.temp(self.expr_ty_after_adjustments(*index), current, expr_id.into())?;
let Some(current) = self.lower_expr_to_place(*index, l_index.into(), current)? else {
return Ok(None);
};
@ -273,16 +270,12 @@ impl MirLowerCtx<'_> {
span: MirSpan,
index_fn: (FunctionId, Substitution),
) -> Result<Option<(Place, BasicBlockId)>> {
let (mutability, borrow_kind) = match base_ty.as_reference() {
Some((_, _, mutability)) => {
(mutability, BorrowKind::Mut { allow_two_phase_borrow: false })
}
None => (Mutability::Not, BorrowKind::Shared),
let mutability = match base_ty.as_reference() {
Some((_, _, mutability)) => mutability,
None => Mutability::Not,
};
let result_ref = TyKind::Ref(mutability, static_lifetime(), result_ty).intern(Interner);
let ref_place: Place = self.temp(base_ty)?.into();
self.push_assignment(current, ref_place.clone(), Rvalue::Ref(borrow_kind, place), span);
let mut result: Place = self.temp(result_ref)?.into();
let mut result: Place = self.temp(result_ref, current, span)?.into();
let index_fn_op = Operand::const_zst(
TyKind::FnDef(
self.db.intern_callable_def(CallableDefId::FunctionId(index_fn.0)).into(),
@ -290,7 +283,7 @@ impl MirLowerCtx<'_> {
)
.intern(Interner),
);
let Some(current) = self.lower_call(index_fn_op, vec![Operand::Copy(ref_place), index_operand], result.clone(), current, false, span)? else {
let Some(current) = self.lower_call(index_fn_op, vec![Operand::Copy(place), index_operand], result.clone(), current, false, span)? else {
return Ok(None);
};
result.projection.push(ProjectionElem::Deref);
@ -318,7 +311,7 @@ impl MirLowerCtx<'_> {
};
let ty_ref = TyKind::Ref(chalk_mut, static_lifetime(), source_ty.clone()).intern(Interner);
let target_ty_ref = TyKind::Ref(chalk_mut, static_lifetime(), target_ty).intern(Interner);
let ref_place: Place = self.temp(ty_ref)?.into();
let ref_place: Place = self.temp(ty_ref, current, span)?.into();
self.push_assignment(current, ref_place.clone(), Rvalue::Ref(borrow_kind, place), span);
let deref_trait = self
.resolve_lang_item(trait_lang_item)?
@ -336,7 +329,7 @@ impl MirLowerCtx<'_> {
)
.intern(Interner),
);
let mut result: Place = self.temp(target_ty_ref)?.into();
let mut result: Place = self.temp(target_ty_ref, current, span)?.into();
let Some(current) = self.lower_call(deref_fn_op, vec![Operand::Copy(ref_place)], result.clone(), current, false, span)? else {
return Ok(None);
};

View file

@ -107,7 +107,57 @@ impl MirLowerCtx<'_> {
)?
}
Pat::Range { .. } => not_supported!("range pattern"),
Pat::Slice { .. } => not_supported!("slice pattern"),
Pat::Slice { prefix, slice, suffix } => {
pattern_matching_dereference(&mut cond_ty, &mut binding_mode, &mut cond_place);
for (i, &pat) in prefix.iter().enumerate() {
let mut next_place = cond_place.clone();
next_place
.projection
.push(ProjectionElem::ConstantIndex { offset: i as u64, from_end: false });
let cond_ty = self.infer[pat].clone();
(current, current_else) = self.pattern_match(
current,
current_else,
next_place,
cond_ty,
pat,
binding_mode,
)?;
}
if let Some(slice) = slice {
if let Pat::Bind { id, subpat: _ } = self.body[*slice] {
let mut next_place = cond_place.clone();
next_place.projection.push(ProjectionElem::Subslice {
from: prefix.len() as u64,
to: suffix.len() as u64,
});
(current, current_else) = self.pattern_match_binding(
id,
&mut binding_mode,
next_place,
(*slice).into(),
current,
current_else,
)?;
}
}
for (i, &pat) in suffix.iter().enumerate() {
let mut next_place = cond_place.clone();
next_place
.projection
.push(ProjectionElem::ConstantIndex { offset: i as u64, from_end: true });
let cond_ty = self.infer[pat].clone();
(current, current_else) = self.pattern_match(
current,
current_else,
next_place,
cond_ty,
pat,
binding_mode,
)?;
}
(current, current_else)
}
Pat::Path(p) => match self.infer.variant_resolution_for_pat(pattern) {
Some(variant) => self.pattern_matching_variant(
cond_ty,
@ -128,17 +178,19 @@ impl MirLowerCtx<'_> {
match pr {
ResolveValueResult::ValueNs(v) => match v {
ValueNs::ConstId(c) => {
let tmp: Place = self.temp(cond_ty.clone())?.into();
let tmp: Place =
self.temp(cond_ty.clone(), current, pattern.into())?.into();
let span = pattern.into();
self.lower_const(
c,
c.into(),
current,
tmp.clone(),
Substitution::empty(Interner),
span,
cond_ty.clone(),
)?;
let tmp2: Place = self.temp(TyBuilder::bool())?.into();
let tmp2: Place =
self.temp(TyBuilder::bool(), current, pattern.into())?.into();
self.push_assignment(
current,
tmp2.clone(),
@ -180,8 +232,6 @@ impl MirLowerCtx<'_> {
_ => not_supported!("expression path literal"),
},
Pat::Bind { id, subpat } => {
let target_place = self.binding_local(*id)?;
let mode = self.body.bindings[*id].mode;
if let Some(subpat) = subpat {
(current, current_else) = self.pattern_match(
current,
@ -192,26 +242,14 @@ impl MirLowerCtx<'_> {
binding_mode,
)?
}
if matches!(mode, BindingAnnotation::Ref | BindingAnnotation::RefMut) {
binding_mode = mode;
}
self.push_storage_live(*id, current)?;
self.push_assignment(
current,
target_place.into(),
match binding_mode {
BindingAnnotation::Unannotated | BindingAnnotation::Mutable => {
Operand::Copy(cond_place).into()
}
BindingAnnotation::Ref => Rvalue::Ref(BorrowKind::Shared, cond_place),
BindingAnnotation::RefMut => Rvalue::Ref(
BorrowKind::Mut { allow_two_phase_borrow: false },
cond_place,
),
},
self.pattern_match_binding(
*id,
&mut binding_mode,
cond_place,
pattern.into(),
);
(current, current_else)
current,
current_else,
)?
}
Pat::TupleStruct { path: _, args, ellipsis } => {
let Some(variant) = self.infer.variant_resolution_for_pat(pattern) else {
@ -249,6 +287,38 @@ impl MirLowerCtx<'_> {
})
}
fn pattern_match_binding(
&mut self,
id: BindingId,
binding_mode: &mut BindingAnnotation,
cond_place: Place,
span: MirSpan,
current: BasicBlockId,
current_else: Option<BasicBlockId>,
) -> Result<(BasicBlockId, Option<BasicBlockId>)> {
let target_place = self.binding_local(id)?;
let mode = self.body.bindings[id].mode;
if matches!(mode, BindingAnnotation::Ref | BindingAnnotation::RefMut) {
*binding_mode = mode;
}
self.push_storage_live(id, current)?;
self.push_assignment(
current,
target_place.into(),
match *binding_mode {
BindingAnnotation::Unannotated | BindingAnnotation::Mutable => {
Operand::Copy(cond_place).into()
}
BindingAnnotation::Ref => Rvalue::Ref(BorrowKind::Shared, cond_place),
BindingAnnotation::RefMut => {
Rvalue::Ref(BorrowKind::Mut { allow_two_phase_borrow: false }, cond_place)
}
},
span,
);
Ok((current, current_else))
}
fn pattern_match_const(
&mut self,
current_else: Option<BasicBlockId>,
@ -259,7 +329,7 @@ impl MirLowerCtx<'_> {
) -> Result<(BasicBlockId, Option<BasicBlockId>)> {
let then_target = self.new_basic_block();
let else_target = current_else.unwrap_or_else(|| self.new_basic_block());
let discr: Place = self.temp(TyBuilder::bool())?.into();
let discr: Place = self.temp(TyBuilder::bool(), current, pattern.into())?.into();
self.push_assignment(
current,
discr.clone(),
@ -297,7 +367,7 @@ impl MirLowerCtx<'_> {
Ok(match variant {
VariantId::EnumVariantId(v) => {
let e = self.const_eval_discriminant(v)? as u128;
let tmp = self.discr_temp_place();
let tmp = self.discr_temp_place(current);
self.push_assignment(
current,
tmp.clone(),

View file

@ -1812,6 +1812,20 @@ fn main() {
//^ [(); 7]
}"#,
);
check_types(
r#"
trait Foo {
fn x(self);
}
impl Foo for u8 {
fn x(self) {
let t = [0; 4 + 2];
//^ [i32; 6]
}
}
"#,
);
}
#[test]
@ -2137,6 +2151,7 @@ async fn main() {
136..138 '()': ()
150..151 'w': i32
154..166 'const { 92 }': i32
154..166 'const { 92 }': i32
162..164 '92': i32
176..177 't': i32
180..190 ''a: { 92 }': i32

View file

@ -1857,18 +1857,34 @@ impl Function {
self,
db: &dyn HirDatabase,
span_formatter: impl Fn(FileId, TextRange) -> String,
) -> Result<(), String> {
let converter = |e: MirEvalError| {
let mut r = String::new();
_ = e.pretty_print(&mut r, db, &span_formatter);
r
) -> String {
let body = match db.mir_body(self.id.into()) {
Ok(body) => body,
Err(e) => {
let mut r = String::new();
_ = e.pretty_print(&mut r, db, &span_formatter);
return r;
}
};
let body = db
.mir_body(self.id.into())
.map_err(|e| MirEvalError::MirLowerError(self.id.into(), e))
.map_err(converter)?;
interpret_mir(db, &body, Substitution::empty(Interner), false).map_err(converter)?;
Ok(())
let (result, stdout, stderr) =
interpret_mir(db, &body, Substitution::empty(Interner), false);
let mut text = match result {
Ok(_) => "pass".to_string(),
Err(e) => {
let mut r = String::new();
_ = e.pretty_print(&mut r, db, &span_formatter);
r
}
};
if !stdout.is_empty() {
text += "\n--------- stdout ---------\n";
text += &stdout;
}
if !stderr.is_empty() {
text += "\n--------- stderr ---------\n";
text += &stderr;
}
text
}
}
@ -3665,9 +3681,9 @@ impl Type {
}
}
pub fn as_array(&self, _db: &dyn HirDatabase) -> Option<(Type, usize)> {
pub fn as_array(&self, db: &dyn HirDatabase) -> Option<(Type, usize)> {
if let TyKind::Array(ty, len) = &self.ty.kind(Interner) {
try_const_usize(len).map(|x| (self.derived(ty.clone()), x as usize))
try_const_usize(db, len).map(|x| (self.derived(ty.clone()), x as usize))
} else {
None
}

View file

@ -42,8 +42,5 @@ fn find_and_interpret(db: &RootDatabase, position: FilePosition) -> Option<Strin
let path = path.as_deref().unwrap_or("<unknown file>");
format!("file://{path}#{}:{}", line_col.line + 1, line_col.col)
};
match def.eval(db, span_formatter) {
Ok(_) => Some("pass".to_string()),
Err(e) => Some(e),
}
Some(def.eval(db, span_formatter))
}

View file

@ -324,6 +324,13 @@ pub mod mem {
}
}
}
pub mod ptr {
#[lang = "drop_in_place"]
pub unsafe fn drop_in_place<T: ?Sized>(to_drop: *mut T) {
unsafe { drop_in_place(to_drop) }
}
}
// endregion:drop
pub mod ops {
@ -1075,10 +1082,8 @@ pub mod iter {
// region:panic
mod panic {
pub macro panic_2021 {
($($t:tt)+) => (
/* Nothing yet */
),
pub macro panic_2021($($t:tt)+) {
/* Nothing yet */
}
}
// endregion:panic
@ -1158,8 +1163,8 @@ pub mod prelude {
ops::Drop, // :drop
ops::{Fn, FnMut, FnOnce}, // :fn
option::Option::{self, None, Some}, // :option
result::Result::{self, Err, Ok}, // :result
panic, // :panic
result::Result::{self, Err, Ok}, // :result
};
}