MIR episode 6

This commit is contained in:
hkalbasi 2023-05-26 00:45:37 +03:30
parent 505fd09f9e
commit 51368793b4
35 changed files with 1474 additions and 556 deletions

View file

@ -31,8 +31,8 @@ use crate::{
expander::Expander,
hir::{
dummy_expr_id, Array, Binding, BindingAnnotation, BindingId, CaptureBy, ClosureKind, Expr,
ExprId, Label, LabelId, Literal, MatchArm, Movability, Pat, PatId, RecordFieldPat,
RecordLitField, Statement,
ExprId, Label, LabelId, Literal, LiteralOrConst, MatchArm, Movability, Pat, PatId,
RecordFieldPat, RecordLitField, Statement,
},
item_scope::BuiltinShadowMode,
lang_item::LangItem,
@ -295,13 +295,7 @@ impl ExprCollector<'_> {
self.alloc_expr(Expr::While { condition, body, label }, syntax_ptr)
}
ast::Expr::ForExpr(e) => {
let label = e.label().map(|label| self.collect_label(label));
let iterable = self.collect_expr_opt(e.iterable());
let pat = self.collect_pat_top(e.pat());
let body = self.collect_labelled_block_opt(label, e.loop_body());
self.alloc_expr(Expr::For { iterable, pat, body, label }, syntax_ptr)
}
ast::Expr::ForExpr(e) => self.collect_for_loop(syntax_ptr, e),
ast::Expr::CallExpr(e) => {
let is_rustc_box = {
let attrs = e.attrs();
@ -703,6 +697,91 @@ impl ExprCollector<'_> {
expr_id
}
/// Desugar `ast::ForExpr` from: `[opt_ident]: for <pat> in <head> <body>` into:
/// ```ignore (pseudo-rust)
/// match IntoIterator::into_iter(<head>) {
/// mut iter => {
/// [opt_ident]: loop {
/// match Iterator::next(&mut iter) {
/// None => break,
/// Some(<pat>) => <body>,
/// };
/// }
/// }
/// }
/// ```
fn collect_for_loop(&mut self, syntax_ptr: AstPtr<ast::Expr>, e: ast::ForExpr) -> ExprId {
let (into_iter_fn, iter_next_fn, option_some, option_none) = 'if_chain: {
if let Some(into_iter_fn) = LangItem::IntoIterIntoIter.path(self.db, self.krate) {
if let Some(iter_next_fn) = LangItem::IteratorNext.path(self.db, self.krate) {
if let Some(option_some) = LangItem::OptionSome.path(self.db, self.krate) {
if let Some(option_none) = LangItem::OptionNone.path(self.db, self.krate) {
break 'if_chain (into_iter_fn, iter_next_fn, option_some, option_none);
}
}
}
}
// Some of the needed lang items are missing, so we can't desugar
return self.alloc_expr(Expr::Missing, syntax_ptr);
};
let head = self.collect_expr_opt(e.iterable());
let into_iter_fn_expr = self.alloc_expr(Expr::Path(into_iter_fn), syntax_ptr.clone());
let iterator = self.alloc_expr(
Expr::Call {
callee: into_iter_fn_expr,
args: Box::new([head]),
is_assignee_expr: false,
},
syntax_ptr.clone(),
);
let none_arm = MatchArm {
pat: self.alloc_pat_desugared(Pat::Path(Box::new(option_none))),
guard: None,
expr: self.alloc_expr(Expr::Break { expr: None, label: None }, syntax_ptr.clone()),
};
let some_pat = Pat::TupleStruct {
path: Some(Box::new(option_some)),
args: Box::new([self.collect_pat_top(e.pat())]),
ellipsis: None,
};
let some_arm = MatchArm {
pat: self.alloc_pat_desugared(some_pat),
guard: None,
expr: self.collect_expr_opt(e.loop_body().map(|x| x.into())),
};
let iter_name = Name::generate_new_name();
let iter_binding = self.alloc_binding(iter_name.clone(), BindingAnnotation::Mutable);
let iter_expr = self.alloc_expr(Expr::Path(Path::from(iter_name)), syntax_ptr.clone());
let iter_expr_mut = self.alloc_expr(
Expr::Ref { expr: iter_expr, rawness: Rawness::Ref, mutability: Mutability::Mut },
syntax_ptr.clone(),
);
let iter_next_fn_expr = self.alloc_expr(Expr::Path(iter_next_fn), syntax_ptr.clone());
let iter_next_expr = self.alloc_expr(
Expr::Call {
callee: iter_next_fn_expr,
args: Box::new([iter_expr_mut]),
is_assignee_expr: false,
},
syntax_ptr.clone(),
);
let loop_inner = self.alloc_expr(
Expr::Match { expr: iter_next_expr, arms: Box::new([none_arm, some_arm]) },
syntax_ptr.clone(),
);
let label = e.label().map(|label| self.collect_label(label));
let loop_outer =
self.alloc_expr(Expr::Loop { body: loop_inner, label }, syntax_ptr.clone());
let iter_pat = self.alloc_pat_desugared(Pat::Bind { id: iter_binding, subpat: None });
self.alloc_expr(
Expr::Match {
expr: iterator,
arms: Box::new([MatchArm { pat: iter_pat, guard: None, expr: loop_outer }]),
},
syntax_ptr.clone(),
)
}
/// Desugar `ast::TryExpr` from: `<expr>?` into:
/// ```ignore (pseudo-rust)
/// match Try::branch(<expr>) {
@ -1159,22 +1238,12 @@ impl ExprCollector<'_> {
}
#[rustfmt::skip] // https://github.com/rust-lang/rustfmt/issues/5676
ast::Pat::LiteralPat(lit) => 'b: {
if let Some(ast_lit) = lit.literal() {
let mut hir_lit: Literal = ast_lit.kind().into();
if lit.minus_token().is_some() {
let Some(h) = hir_lit.negate() else {
break 'b Pat::Missing;
};
hir_lit = h;
}
let expr = Expr::Literal(hir_lit);
let expr_ptr = AstPtr::new(&ast::Expr::Literal(ast_lit));
let expr_id = self.alloc_expr(expr, expr_ptr);
Pat::Lit(expr_id)
} else {
Pat::Missing
}
},
let Some((hir_lit, ast_lit)) = pat_literal_to_hir(lit) else { break 'b Pat::Missing };
let expr = Expr::Literal(hir_lit);
let expr_ptr = AstPtr::new(&ast::Expr::Literal(ast_lit));
let expr_id = self.alloc_expr(expr, expr_ptr);
Pat::Lit(expr_id)
}
ast::Pat::RestPat(_) => {
// `RestPat` requires special handling and should not be mapped
// to a Pat. Here we are using `Pat::Missing` as a fallback for
@ -1215,8 +1284,30 @@ impl ExprCollector<'_> {
}
None => Pat::Missing,
},
// FIXME: implement
ast::Pat::RangePat(_) => Pat::Missing,
// FIXME: implement in a way that also builds source map and calculates assoc resolutions in type inference.
ast::Pat::RangePat(p) => {
let mut range_part_lower = |p: Option<ast::Pat>| {
p.and_then(|x| match &x {
ast::Pat::LiteralPat(x) => {
Some(Box::new(LiteralOrConst::Literal(pat_literal_to_hir(x)?.0)))
}
ast::Pat::IdentPat(p) => {
let name =
p.name().map(|nr| nr.as_name()).unwrap_or_else(Name::missing);
Some(Box::new(LiteralOrConst::Const(name.into())))
}
ast::Pat::PathPat(p) => p
.path()
.and_then(|path| self.expander.parse_path(self.db, path))
.map(LiteralOrConst::Const)
.map(Box::new),
_ => None,
})
};
let start = range_part_lower(p.start());
let end = range_part_lower(p.end());
Pat::Range { start, end }
}
};
let ptr = AstPtr::new(&pat);
self.alloc_pat(pattern, Either::Left(ptr))
@ -1338,6 +1429,18 @@ impl ExprCollector<'_> {
// endregion: labels
}
fn pat_literal_to_hir(lit: &ast::LiteralPat) -> Option<(Literal, ast::Literal)> {
let ast_lit = lit.literal()?;
let mut hir_lit: Literal = ast_lit.kind().into();
if lit.minus_token().is_some() {
let Some(h) = hir_lit.negate() else {
return None;
};
hir_lit = h;
}
Some((hir_lit, ast_lit))
}
impl ExprCollector<'_> {
fn alloc_expr(&mut self, expr: Expr, ptr: ExprPtr) -> ExprId {
let src = self.expander.to_source(ptr);

View file

@ -7,7 +7,8 @@ use syntax::ast::HasName;
use crate::{
hir::{
Array, BindingAnnotation, BindingId, CaptureBy, ClosureKind, Literal, Movability, Statement,
Array, BindingAnnotation, BindingId, CaptureBy, ClosureKind, Literal, LiteralOrConst,
Movability, Statement,
},
pretty::{print_generic_args, print_path, print_type_ref},
type_ref::TypeRef,
@ -184,16 +185,6 @@ impl<'a> Printer<'a> {
self.print_expr(*condition);
self.print_expr(*body);
}
Expr::For { iterable, pat, body, label } => {
if let Some(lbl) = label {
w!(self, "{}: ", self.body[*lbl].name.display(self.db));
}
w!(self, "for ");
self.print_pat(*pat);
w!(self, " in ");
self.print_expr(*iterable);
self.print_expr(*body);
}
Expr::Call { callee, args, is_assignee_expr: _ } => {
self.print_expr(*callee);
w!(self, "(");
@ -534,9 +525,13 @@ impl<'a> Printer<'a> {
w!(self, "}}");
}
Pat::Range { start, end } => {
self.print_expr(*start);
w!(self, "...");
self.print_expr(*end);
if let Some(start) = start {
self.print_literal_or_const(start);
}
w!(self, "..=");
if let Some(end) = end {
self.print_literal_or_const(end);
}
}
Pat::Slice { prefix, slice, suffix } => {
w!(self, "[");
@ -627,6 +622,13 @@ impl<'a> Printer<'a> {
}
}
fn print_literal_or_const(&mut self, literal_or_const: &LiteralOrConst) {
match literal_or_const {
LiteralOrConst::Literal(l) => self.print_literal(l),
LiteralOrConst::Const(c) => self.print_path(c),
}
}
fn print_literal(&mut self, literal: &Literal) {
match literal {
Literal::String(it) => w!(self, "{:?}", it),

View file

@ -228,12 +228,6 @@ fn compute_expr_scopes(expr: ExprId, body: &Body, scopes: &mut ExprScopes, scope
scopes.set_scope(expr, scope);
compute_block_scopes(statements, *tail, body, scopes, &mut scope);
}
Expr::For { iterable, pat, body: body_expr, label } => {
compute_expr_scopes(*iterable, body, scopes, scope);
let mut scope = scopes.new_labeled_scope(*scope, make_label(label));
scopes.add_pat_bindings(body, scope, *pat);
compute_expr_scopes(*body_expr, body, scopes, &mut scope);
}
Expr::While { condition, body: body_expr, label } => {
let mut scope = scopes.new_labeled_scope(*scope, make_label(label));
compute_expr_scopes(*condition, body, scopes, &mut scope);

View file

@ -96,6 +96,13 @@ pub enum Literal {
Float(FloatTypeWrapper, Option<BuiltinFloat>),
}
#[derive(Debug, Clone, Eq, PartialEq)]
/// Used in range patterns.
pub enum LiteralOrConst {
Literal(Literal),
Const(Path),
}
impl Literal {
pub fn negate(self) -> Option<Self> {
if let Literal::Int(i, k) = self {
@ -189,12 +196,6 @@ pub enum Expr {
body: ExprId,
label: Option<LabelId>,
},
For {
iterable: ExprId,
pat: PatId,
body: ExprId,
label: Option<LabelId>,
},
Call {
callee: ExprId,
args: Box<[ExprId]>,
@ -382,10 +383,6 @@ impl Expr {
f(*condition);
f(*body);
}
Expr::For { iterable, body, .. } => {
f(*iterable);
f(*body);
}
Expr::Call { callee, args, .. } => {
f(*callee);
args.iter().copied().for_each(f);
@ -526,7 +523,7 @@ pub enum Pat {
Tuple { args: Box<[PatId]>, ellipsis: Option<usize> },
Or(Box<[PatId]>),
Record { path: Option<Box<Path>>, args: Box<[RecordFieldPat]>, ellipsis: bool },
Range { start: ExprId, end: ExprId },
Range { start: Option<Box<LiteralOrConst>>, end: Option<Box<LiteralOrConst>> },
Slice { prefix: Box<[PatId]>, slice: Option<PatId>, suffix: Box<[PatId]> },
Path(Box<Path>),
Lit(ExprId),

View file

@ -14,9 +14,9 @@ use stdx::never;
use triomphe::Arc;
use crate::{
db::HirDatabase, infer::InferenceContext, layout::layout_of_ty, lower::ParamLoweringMode,
to_placeholder_idx, utils::Generics, Const, ConstData, ConstScalar, ConstValue, GenericArg,
Interner, MemoryMap, Substitution, Ty, TyBuilder,
db::HirDatabase, infer::InferenceContext, lower::ParamLoweringMode,
mir::monomorphize_mir_body_bad, to_placeholder_idx, utils::Generics, Const, ConstData,
ConstScalar, ConstValue, GenericArg, Interner, MemoryMap, Substitution, Ty, TyBuilder,
};
use super::mir::{interpret_mir, lower_to_mir, pad16, MirEvalError, MirLowerError};
@ -130,14 +130,15 @@ pub fn intern_const_scalar(value: ConstScalar, ty: Ty) -> Const {
/// Interns a constant scalar with the given type
pub fn intern_const_ref(db: &dyn HirDatabase, value: &ConstRef, ty: Ty, krate: CrateId) -> Const {
let layout = db.layout_of_ty(ty.clone(), krate);
let bytes = match value {
ConstRef::Int(i) => {
// FIXME: We should handle failure of layout better.
let size = layout_of_ty(db, &ty, krate).map(|x| x.size.bytes_usize()).unwrap_or(16);
let size = layout.map(|x| x.size.bytes_usize()).unwrap_or(16);
ConstScalar::Bytes(i.to_le_bytes()[0..size].to_vec(), MemoryMap::default())
}
ConstRef::UInt(i) => {
let size = layout_of_ty(db, &ty, krate).map(|x| x.size.bytes_usize()).unwrap_or(16);
let size = layout.map(|x| x.size.bytes_usize()).unwrap_or(16);
ConstScalar::Bytes(i.to_le_bytes()[0..size].to_vec(), MemoryMap::default())
}
ConstRef::Bool(b) => ConstScalar::Bytes(vec![*b as u8], MemoryMap::default()),
@ -206,15 +207,22 @@ pub(crate) fn const_eval_query(
subst: Substitution,
) -> Result<Const, ConstEvalError> {
let body = match def {
GeneralConstId::ConstId(c) => db.mir_body(c.into())?,
GeneralConstId::ConstId(c) => {
db.monomorphized_mir_body(c.into(), subst, db.trait_environment(c.into()))?
}
GeneralConstId::AnonymousConstId(c) => {
let (def, root) = db.lookup_intern_anonymous_const(c);
let body = db.body(def);
let infer = db.infer(def);
Arc::new(lower_to_mir(db, def, &body, &infer, root)?)
Arc::new(monomorphize_mir_body_bad(
db,
lower_to_mir(db, def, &body, &infer, root)?,
subst,
db.trait_environment_for_body(def),
)?)
}
};
let c = interpret_mir(db, &body, subst, false).0?;
let c = interpret_mir(db, &body, false).0?;
Ok(c)
}
@ -222,8 +230,12 @@ pub(crate) fn const_eval_static_query(
db: &dyn HirDatabase,
def: StaticId,
) -> Result<Const, ConstEvalError> {
let body = db.mir_body(def.into())?;
let c = interpret_mir(db, &body, Substitution::empty(Interner), false).0?;
let body = db.monomorphized_mir_body(
def.into(),
Substitution::empty(Interner),
db.trait_environment_for_body(def.into()),
)?;
let c = interpret_mir(db, &body, false).0?;
Ok(c)
}
@ -245,8 +257,12 @@ pub(crate) fn const_eval_discriminant_variant(
};
return Ok(value);
}
let mir_body = db.mir_body(def)?;
let c = interpret_mir(db, &mir_body, Substitution::empty(Interner), false).0?;
let mir_body = db.monomorphized_mir_body(
def,
Substitution::empty(Interner),
db.trait_environment_for_body(def),
)?;
let c = interpret_mir(db, &mir_body, false).0?;
let c = try_const_usize(db, &c).unwrap() as i128;
Ok(c)
}
@ -271,7 +287,7 @@ pub(crate) fn eval_to_const(
}
let infer = ctx.clone().resolve_all();
if let Ok(mir_body) = lower_to_mir(ctx.db, ctx.owner, &ctx.body, &infer, expr) {
if let Ok(result) = interpret_mir(db, &mir_body, Substitution::empty(Interner), true).0 {
if let Ok(result) = interpret_mir(db, &mir_body, true).0 {
return result;
}
}

View file

@ -156,11 +156,23 @@ fn casts() {
check_number(
r#"
//- minicore: coerce_unsized, index, slice
const GOAL: usize = {
let a = &[10, 20, 30, 40] as &[i32];
a.len()
};
"#,
4,
);
check_number(
r#"
//- minicore: coerce_unsized, index, slice
const GOAL: usize = {
let a = [10, 20, 3, 15];
let x: &[i32] = &a;
let y: *const [i32] = x;
let z = y as *const [u8]; // slice fat pointer cast don't touch metadata
let q = z as *const str;
let p = q as *const [u8];
let w = unsafe { &*z };
w.len()
};
@ -987,11 +999,15 @@ fn path_pattern_matching() {
const MY_SEASON: Season = Summer;
impl Season {
const FALL: Season = Fall;
}
const fn f(x: Season) -> i32 {
match x {
Spring => 1,
MY_SEASON => 2,
Fall => 3,
Season::FALL => 3,
Winter => 4,
}
}
@ -1031,6 +1047,27 @@ fn pattern_matching_literal() {
);
}
#[test]
fn pattern_matching_range() {
check_number(
r#"
pub const L: i32 = 6;
mod x {
pub const R: i32 = 100;
}
const fn f(x: i32) -> i32 {
match x {
-1..=5 => x * 10,
L..=x::R => x * 100,
_ => x,
}
}
const GOAL: i32 = f(-1) + f(2) + f(100) + f(-2) + f(1000);
"#,
11008,
);
}
#[test]
fn pattern_matching_slice() {
check_number(
@ -1045,6 +1082,22 @@ fn pattern_matching_slice() {
"#,
10 + 4 + 60 + 16,
);
check_number(
r#"
//- minicore: slice, index, coerce_unsized, copy
const fn f(x: &[usize]) -> usize {
match x {
[] => 0,
[a] => *a,
&[a, b] => a + b,
[a, b @ .., c, d] => *a + b.len() + *c + *d,
}
}
const GOAL: usize = f(&[]) + f(&[10]) + f(&[100, 100])
+ f(&[1000, 1000, 1000]) + f(&[10000, 57, 34, 46, 10000, 10000]);
"#,
33213,
);
}
#[test]
@ -2105,6 +2158,26 @@ fn const_generic_subst_fn() {
"#,
11,
);
check_number(
r#"
fn f<const N: usize>(x: [i32; N]) -> usize {
N
}
trait ArrayExt {
fn f(self) -> usize;
}
impl<T, const N: usize> ArrayExt for [T; N] {
fn g(self) -> usize {
f(self)
}
}
const GOAL: usize = f([1, 2, 5]);
"#,
3,
);
}
#[test]

View file

@ -67,6 +67,30 @@ fn wrapping_add() {
);
}
#[test]
fn saturating_add() {
check_number(
r#"
extern "rust-intrinsic" {
pub fn saturating_add<T>(a: T, b: T) -> T;
}
const GOAL: u8 = saturating_add(10, 250);
"#,
255,
);
check_number(
r#"
extern "rust-intrinsic" {
pub fn saturating_add<T>(a: T, b: T) -> T;
}
const GOAL: i8 = saturating_add(5, 8);
"#,
13,
);
}
#[test]
fn allocator() {
check_number(

View file

@ -41,6 +41,23 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
#[salsa::invoke(crate::mir::mir_body_for_closure_query)]
fn mir_body_for_closure(&self, def: ClosureId) -> Result<Arc<MirBody>, MirLowerError>;
#[salsa::invoke(crate::mir::monomorphized_mir_body_query)]
#[salsa::cycle(crate::mir::monomorphized_mir_body_recover)]
fn monomorphized_mir_body(
&self,
def: DefWithBodyId,
subst: Substitution,
env: Arc<crate::TraitEnvironment>,
) -> Result<Arc<MirBody>, MirLowerError>;
#[salsa::invoke(crate::mir::monomorphized_mir_body_for_closure_query)]
fn monomorphized_mir_body_for_closure(
&self,
def: ClosureId,
subst: Substitution,
env: Arc<crate::TraitEnvironment>,
) -> Result<Arc<MirBody>, MirLowerError>;
#[salsa::invoke(crate::mir::borrowck_query)]
fn borrowck(&self, def: DefWithBodyId) -> Result<Arc<[BorrowckResult]>, MirLowerError>;
@ -84,7 +101,11 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
def: AdtId,
subst: Substitution,
krate: CrateId,
) -> Result<Layout, LayoutError>;
) -> Result<Arc<Layout>, LayoutError>;
#[salsa::invoke(crate::layout::layout_of_ty_query)]
#[salsa::cycle(crate::layout::layout_of_ty_recover)]
fn layout_of_ty(&self, ty: Ty, krate: CrateId) -> Result<Arc<Layout>, LayoutError>;
#[salsa::invoke(crate::layout::target_data_layout_query)]
fn target_data_layout(&self, krate: CrateId) -> Option<Arc<TargetDataLayout>>;

View file

@ -26,9 +26,7 @@ use stdx::never;
use crate::{
db::HirDatabase,
from_assoc_type_id, from_foreign_def_id, from_placeholder_idx,
layout::layout_of_ty,
lt_from_placeholder_idx,
from_assoc_type_id, from_foreign_def_id, from_placeholder_idx, lt_from_placeholder_idx,
mapping::from_chalk,
mir::pad16,
primitive, to_assoc_type_id,
@ -309,6 +307,8 @@ pub enum ClosureStyle {
RANotation,
/// `{closure#14825}`, useful for some diagnostics (like type mismatch) and internal usage.
ClosureWithId,
/// `{closure#14825}<i32, ()>`, useful for internal usage.
ClosureWithSubst,
/// `…`, which is the `TYPE_HINT_TRUNCATION`
Hide,
}
@ -507,7 +507,7 @@ fn render_const_scalar(
_ => f.write_str("<ref-not-supported>"),
},
chalk_ir::TyKind::Tuple(_, subst) => {
let Ok(layout) = layout_of_ty(f.db, ty, krate) else {
let Ok(layout) = f.db.layout_of_ty( ty.clone(), krate) else {
return f.write_str("<layout-error>");
};
f.write_str("(")?;
@ -520,7 +520,7 @@ fn render_const_scalar(
}
let ty = ty.assert_ty_ref(Interner); // Tuple only has type argument
let offset = layout.fields.offset(id).bytes_usize();
let Ok(layout) = layout_of_ty(f.db, &ty, krate) else {
let Ok(layout) = f.db.layout_of_ty(ty.clone(), krate) else {
f.write_str("<layout-error>")?;
continue;
};
@ -545,7 +545,7 @@ fn render_const_scalar(
.offset(u32::from(id.into_raw()) as usize)
.bytes_usize();
let ty = field_types[id].clone().substitute(Interner, subst);
let Ok(layout) = layout_of_ty(f.db, &ty, krate) else {
let Ok(layout) = f.db.layout_of_ty(ty.clone(), krate) else {
return f.write_str("<layout-error>");
};
let size = layout.size.bytes_usize();
@ -931,6 +931,10 @@ impl HirDisplay for Ty {
ClosureStyle::ClosureWithId => {
return write!(f, "{{closure#{:?}}}", id.0.as_u32())
}
ClosureStyle::ClosureWithSubst => {
write!(f, "{{closure#{:?}}}", id.0.as_u32())?;
return hir_fmt_generics(f, substs, None);
}
_ => (),
}
let sig = ClosureSubst(substs).sig_ty().callable_sig(db);

View file

@ -1153,22 +1153,6 @@ impl<'a> InferenceContext<'a> {
self.db.lang_item(krate, item)
}
fn resolve_into_iter_item(&self) -> Option<TypeAliasId> {
let ItemContainerId::TraitId(trait_) = self.resolve_lang_item(LangItem::IntoIterIntoIter)?
.as_function()?
.lookup(self.db.upcast()).container
else { return None };
self.db.trait_data(trait_).associated_type_by_name(&name![IntoIter])
}
fn resolve_iterator_item(&self) -> Option<TypeAliasId> {
let ItemContainerId::TraitId(trait_) = self.resolve_lang_item(LangItem::IteratorNext)?
.as_function()?
.lookup(self.db.upcast()).container
else { return None };
self.db.trait_data(trait_).associated_type_by_name(&name![Item])
}
fn resolve_output_on(&self, trait_: TraitId) -> Option<TypeAliasId> {
self.db.trait_data(trait_).associated_type_by_name(&name![Output])
}

View file

@ -5,7 +5,7 @@ use std::{cmp, collections::HashMap, convert::Infallible, mem};
use chalk_ir::{
cast::Cast,
fold::{FallibleTypeFolder, TypeFoldable},
AliasEq, AliasTy, BoundVar, DebruijnIndex, FnSubst, Mutability, TyKind, WhereClause,
AliasEq, AliasTy, BoundVar, ConstData, DebruijnIndex, FnSubst, Mutability, TyKind, WhereClause,
};
use hir_def::{
data::adt::VariantData,
@ -29,8 +29,8 @@ use crate::{
static_lifetime, to_chalk_trait_id,
traits::FnTrait,
utils::{self, generics, pattern_matching_dereference_count, Generics},
Adjust, Adjustment, Binders, ChalkTraitId, ClosureId, DynTy, FnPointer, FnSig, Interner,
Substitution, Ty, TyExt,
Adjust, Adjustment, Binders, ChalkTraitId, ClosureId, ConstValue, DynTy, FnPointer, FnSig,
Interner, Substitution, Ty, TyExt,
};
use super::{Expectation, InferenceContext};
@ -259,6 +259,23 @@ impl CapturedItemWithoutTy {
Interner
}
fn try_fold_free_placeholder_const(
&mut self,
ty: chalk_ir::Ty<Interner>,
idx: chalk_ir::PlaceholderIndex,
outer_binder: DebruijnIndex,
) -> Result<chalk_ir::Const<Interner>, Self::Error> {
let x = from_placeholder_idx(self.db, idx);
let Some(idx) = self.generics.param_idx(x) else {
return Err(());
};
Ok(ConstData {
ty,
value: ConstValue::BoundVar(BoundVar::new(outer_binder, idx)),
}
.intern(Interner))
}
fn try_fold_free_placeholder_ty(
&mut self,
idx: chalk_ir::PlaceholderIndex,
@ -490,8 +507,7 @@ impl InferenceContext<'_> {
self.consume_expr(*tail);
}
}
Expr::While { condition, body, label: _ }
| Expr::For { iterable: condition, pat: _, body, label: _ } => {
Expr::While { condition, body, label: _ } => {
self.consume_expr(*condition);
self.consume_expr(*body);
}

View file

@ -211,24 +211,6 @@ impl<'a> InferenceContext<'a> {
self.diverges = Diverges::Maybe;
TyBuilder::unit()
}
&Expr::For { iterable, body, pat, label } => {
let iterable_ty = self.infer_expr(iterable, &Expectation::none());
let into_iter_ty =
self.resolve_associated_type(iterable_ty, self.resolve_into_iter_item());
let pat_ty = self
.resolve_associated_type(into_iter_ty.clone(), self.resolve_iterator_item());
self.result.type_of_for_iterator.insert(tgt_expr, into_iter_ty);
self.infer_top_pat(pat, &pat_ty);
self.with_breakable_ctx(BreakableKind::Loop, None, label, |this| {
this.infer_expr(body, &Expectation::HasType(TyBuilder::unit()));
});
// the body may not run, so it diverging doesn't mean we diverge
self.diverges = Diverges::Maybe;
TyBuilder::unit()
}
Expr::Closure { body, args, ret_type, arg_types, closure_kind, capture_by: _ } => {
assert_eq!(args.len(), arg_types.len());

View file

@ -69,8 +69,7 @@ impl<'a> InferenceContext<'a> {
self.infer_mut_expr(*tail, Mutability::Not);
}
}
&Expr::For { iterable: c, pat: _, body, label: _ }
| &Expr::While { condition: c, body, label: _ } => {
&Expr::While { condition: c, body, label: _ } => {
self.infer_mut_expr(c, Mutability::Not);
self.infer_mut_expr(body, Mutability::Not);
}

View file

@ -255,9 +255,9 @@ impl<'a> InferenceContext<'a> {
self.infer_slice_pat(&expected, prefix, slice, suffix, default_bm)
}
Pat::Wild => expected.clone(),
Pat::Range { start, end } => {
let start_ty = self.infer_expr(*start, &Expectation::has_type(expected.clone()));
self.infer_expr(*end, &Expectation::has_type(start_ty))
Pat::Range { .. } => {
// FIXME: do some checks here.
expected.clone()
}
&Pat::Lit(expr) => {
// Don't emit type mismatches again, the expression lowering already did that.

View file

@ -77,14 +77,18 @@ impl<'a> LayoutCalculator for LayoutCx<'a> {
}
}
pub fn layout_of_ty(db: &dyn HirDatabase, ty: &Ty, krate: CrateId) -> Result<Layout, LayoutError> {
pub fn layout_of_ty_query(
db: &dyn HirDatabase,
ty: Ty,
krate: CrateId,
) -> Result<Arc<Layout>, LayoutError> {
let Some(target) = db.target_data_layout(krate) else { return Err(LayoutError::TargetLayoutNotAvailable) };
let cx = LayoutCx { krate, target: &target };
let dl = &*cx.current_data_layout();
let trait_env = Arc::new(TraitEnvironment::empty(krate));
let ty = normalize(db, trait_env, ty.clone());
let layout = match ty.kind(Interner) {
TyKind::Adt(AdtId(def), subst) => db.layout_of_adt(*def, subst.clone(), krate)?,
let result = match ty.kind(Interner) {
TyKind::Adt(AdtId(def), subst) => return db.layout_of_adt(*def, subst.clone(), krate),
TyKind::Scalar(s) => match s {
chalk_ir::Scalar::Bool => Layout::scalar(
dl,
@ -141,9 +145,9 @@ pub fn layout_of_ty(db: &dyn HirDatabase, ty: &Ty, krate: CrateId) -> Result<Lay
let fields = tys
.iter(Interner)
.map(|k| layout_of_ty(db, k.assert_ty_ref(Interner), krate))
.map(|k| db.layout_of_ty(k.assert_ty_ref(Interner).clone(), krate))
.collect::<Result<Vec<_>, _>>()?;
let fields = fields.iter().collect::<Vec<_>>();
let fields = fields.iter().map(|x| &**x).collect::<Vec<_>>();
let fields = fields.iter().collect::<Vec<_>>();
cx.univariant(dl, &fields, &ReprOptions::default(), kind).ok_or(LayoutError::Unknown)?
}
@ -151,7 +155,7 @@ pub fn layout_of_ty(db: &dyn HirDatabase, ty: &Ty, krate: CrateId) -> Result<Lay
let count = try_const_usize(db, &count).ok_or(LayoutError::UserError(
"unevaluated or mistyped const generic parameter".to_string(),
))? as u64;
let element = layout_of_ty(db, element, krate)?;
let element = db.layout_of_ty(element.clone(), krate)?;
let size = element.size.checked_mul(count, dl).ok_or(LayoutError::SizeOverflow)?;
let abi = if count != 0 && matches!(element.abi, Abi::Uninhabited) {
@ -172,7 +176,7 @@ pub fn layout_of_ty(db: &dyn HirDatabase, ty: &Ty, krate: CrateId) -> Result<Lay
}
}
TyKind::Slice(element) => {
let element = layout_of_ty(db, element, krate)?;
let element = db.layout_of_ty(element.clone(), krate)?;
Layout {
variants: Variants::Single { index: struct_variant_idx() },
fields: FieldsShape::Array { stride: element.size, count: 0 },
@ -206,7 +210,7 @@ pub fn layout_of_ty(db: &dyn HirDatabase, ty: &Ty, krate: CrateId) -> Result<Lay
}
_ => {
// pointee is sized
return Ok(Layout::scalar(dl, data_ptr));
return Ok(Arc::new(Layout::scalar(dl, data_ptr)));
}
};
@ -248,7 +252,7 @@ pub fn layout_of_ty(db: &dyn HirDatabase, ty: &Ty, krate: CrateId) -> Result<Lay
match impl_trait_id {
crate::ImplTraitId::ReturnTypeImplTrait(func, idx) => {
let infer = db.infer(func.into());
layout_of_ty(db, &infer.type_of_rpit[idx], krate)?
return db.layout_of_ty(infer.type_of_rpit[idx].clone(), krate);
}
crate::ImplTraitId::AsyncBlockTypeImplTrait(_, _) => {
return Err(LayoutError::NotImplemented)
@ -262,14 +266,13 @@ pub fn layout_of_ty(db: &dyn HirDatabase, ty: &Ty, krate: CrateId) -> Result<Lay
let fields = captures
.iter()
.map(|x| {
layout_of_ty(
db,
&x.ty.clone().substitute(Interner, ClosureSubst(subst).parent_subst()),
db.layout_of_ty(
x.ty.clone().substitute(Interner, ClosureSubst(subst).parent_subst()),
krate,
)
})
.collect::<Result<Vec<_>, _>>()?;
let fields = fields.iter().collect::<Vec<_>>();
let fields = fields.iter().map(|x| &**x).collect::<Vec<_>>();
let fields = fields.iter().collect::<Vec<_>>();
cx.univariant(dl, &fields, &ReprOptions::default(), StructKind::AlwaysSized)
.ok_or(LayoutError::Unknown)?
@ -284,7 +287,16 @@ pub fn layout_of_ty(db: &dyn HirDatabase, ty: &Ty, krate: CrateId) -> Result<Lay
| TyKind::BoundVar(_)
| TyKind::InferenceVar(_, _) => return Err(LayoutError::HasPlaceholder),
};
Ok(layout)
Ok(Arc::new(result))
}
pub fn layout_of_ty_recover(
_: &dyn HirDatabase,
_: &[String],
_: &Ty,
_: &CrateId,
) -> Result<Arc<Layout>, LayoutError> {
user_error!("infinite sized recursive type");
}
fn layout_of_unit(cx: &LayoutCx<'_>, dl: &TargetDataLayout) -> Result<Layout, LayoutError> {

View file

@ -10,6 +10,7 @@ use hir_def::{
};
use la_arena::RawIdx;
use smallvec::SmallVec;
use triomphe::Arc;
use crate::{
db::HirDatabase,
@ -18,7 +19,7 @@ use crate::{
Substitution,
};
use super::{layout_of_ty, LayoutCx};
use super::LayoutCx;
pub(crate) fn struct_variant_idx() -> RustcEnumVariantIdx {
RustcEnumVariantIdx(LocalEnumVariantId::from_raw(RawIdx::from(0)))
@ -29,14 +30,14 @@ pub fn layout_of_adt_query(
def: AdtId,
subst: Substitution,
krate: CrateId,
) -> Result<Layout, LayoutError> {
) -> Result<Arc<Layout>, LayoutError> {
let Some(target) = db.target_data_layout(krate) else { return Err(LayoutError::TargetLayoutNotAvailable) };
let cx = LayoutCx { krate, target: &target };
let dl = cx.current_data_layout();
let handle_variant = |def: VariantId, var: &VariantData| {
var.fields()
.iter()
.map(|(fd, _)| layout_of_ty(db, &field_ty(db, def, fd, &subst), cx.krate))
.map(|(fd, _)| db.layout_of_ty(field_ty(db, def, fd, &subst), cx.krate))
.collect::<Result<Vec<_>, _>>()
};
let (variants, repr) = match def {
@ -67,11 +68,13 @@ pub fn layout_of_adt_query(
(r, data.repr.unwrap_or_default())
}
};
let variants =
variants.iter().map(|x| x.iter().collect::<Vec<_>>()).collect::<SmallVec<[_; 1]>>();
let variants = variants
.iter()
.map(|x| x.iter().map(|x| &**x).collect::<Vec<_>>())
.collect::<SmallVec<[_; 1]>>();
let variants = variants.iter().map(|x| x.iter().collect()).collect();
if matches!(def, AdtId::UnionId(..)) {
cx.layout_of_union(&repr, &variants).ok_or(LayoutError::Unknown)
let result = if matches!(def, AdtId::UnionId(..)) {
cx.layout_of_union(&repr, &variants).ok_or(LayoutError::Unknown)?
} else {
cx.layout_of_struct_or_enum(
&repr,
@ -103,8 +106,9 @@ pub fn layout_of_adt_query(
.and_then(|x| x.last().map(|x| x.is_unsized()))
.unwrap_or(true),
)
.ok_or(LayoutError::SizeOverflow)
}
.ok_or(LayoutError::SizeOverflow)?
};
Ok(Arc::new(result))
}
fn layout_scalar_valid_range(db: &dyn HirDatabase, def: AdtId) -> (Bound<u128>, Bound<u128>) {
@ -129,7 +133,7 @@ pub fn layout_of_adt_recover(
_: &AdtId,
_: &Substitution,
_: &CrateId,
) -> Result<Layout, LayoutError> {
) -> Result<Arc<Layout>, LayoutError> {
user_error!("infinite sized recursive type");
}

View file

@ -3,6 +3,7 @@ use std::collections::HashMap;
use base_db::fixture::WithFixture;
use chalk_ir::{AdtId, TyKind};
use hir_def::db::DefDatabase;
use triomphe::Arc;
use crate::{
db::HirDatabase,
@ -11,15 +12,13 @@ use crate::{
Interner, Substitution,
};
use super::layout_of_ty;
mod closure;
fn current_machine_data_layout() -> String {
project_model::target_data_layout::get(None, None, &HashMap::default()).unwrap()
}
fn eval_goal(ra_fixture: &str, minicore: &str) -> Result<Layout, LayoutError> {
fn eval_goal(ra_fixture: &str, minicore: &str) -> Result<Arc<Layout>, LayoutError> {
let target_data_layout = current_machine_data_layout();
let ra_fixture = format!(
"{minicore}//- /main.rs crate:test target_data_layout:{target_data_layout}\n{ra_fixture}",
@ -47,11 +46,11 @@ fn eval_goal(ra_fixture: &str, minicore: &str) -> Result<Layout, LayoutError> {
})
.unwrap();
let goal_ty = TyKind::Adt(AdtId(adt_id), Substitution::empty(Interner)).intern(Interner);
layout_of_ty(&db, &goal_ty, module_id.krate())
db.layout_of_ty(goal_ty, module_id.krate())
}
/// A version of `eval_goal` for types that can not be expressed in ADTs, like closures and `impl Trait`
fn eval_expr(ra_fixture: &str, minicore: &str) -> Result<Layout, LayoutError> {
fn eval_expr(ra_fixture: &str, minicore: &str) -> Result<Arc<Layout>, LayoutError> {
let target_data_layout = current_machine_data_layout();
let ra_fixture = format!(
"{minicore}//- /main.rs crate:test target_data_layout:{target_data_layout}\nfn main(){{let goal = {{{ra_fixture}}};}}",
@ -75,7 +74,7 @@ fn eval_expr(ra_fixture: &str, minicore: &str) -> Result<Layout, LayoutError> {
let b = hir_body.bindings.iter().find(|x| x.1.name.to_smol_str() == "goal").unwrap().0;
let infer = db.infer(adt_id.into());
let goal_ty = infer.type_of_binding[b].clone();
layout_of_ty(&db, &goal_ty, module_id.krate())
db.layout_of_ty(goal_ty, module_id.krate())
}
#[track_caller]

View file

@ -782,7 +782,9 @@ fn find_matching_impl(
.into_iter()
.map(|b| b.cast(Interner));
let goal = crate::Goal::all(Interner, wcs);
table.try_obligation(goal).map(|_| (impl_data, table.resolve_completely(impl_substs)))
table.try_obligation(goal.clone())?;
table.register_obligation(goal);
Some((impl_data, table.resolve_completely(impl_substs)))
})
})
}

View file

@ -19,12 +19,17 @@ mod eval;
mod lower;
mod borrowck;
mod pretty;
mod monomorphization;
pub use borrowck::{borrowck_query, BorrowckResult, MutabilityReason};
pub use eval::{interpret_mir, pad16, Evaluator, MirEvalError, VTableMap};
pub use lower::{
lower_to_mir, mir_body_for_closure_query, mir_body_query, mir_body_recover, MirLowerError,
};
pub use monomorphization::{
monomorphize_mir_body_bad, monomorphized_mir_body_for_closure_query,
monomorphized_mir_body_query, monomorphized_mir_body_recover,
};
use smallvec::{smallvec, SmallVec};
use stdx::{impl_from, never};
@ -37,7 +42,7 @@ fn return_slot() -> LocalId {
LocalId::from_raw(RawIdx::from(0))
}
#[derive(Debug, PartialEq, Eq)]
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Local {
pub ty: Ty,
}
@ -780,7 +785,6 @@ pub enum CastKind {
FloatToInt,
FloatToFloat,
IntToFloat,
PtrToPtr,
FnPtrToPtr,
}
@ -952,7 +956,7 @@ pub struct Statement {
pub span: MirSpan,
}
#[derive(Debug, Default, PartialEq, Eq)]
#[derive(Debug, Default, Clone, PartialEq, Eq)]
pub struct BasicBlock {
/// List of statements in this block.
pub statements: Vec<Statement>,
@ -974,7 +978,7 @@ pub struct BasicBlock {
pub is_cleanup: bool,
}
#[derive(Debug, PartialEq, Eq)]
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct MirBody {
pub basic_blocks: Arena<BasicBlock>,
pub locals: Arena<Local>,

View file

@ -3,20 +3,17 @@
use std::{borrow::Cow, collections::HashMap, fmt::Write, iter, ops::Range};
use base_db::{CrateId, FileId};
use chalk_ir::{
fold::{FallibleTypeFolder, TypeFoldable, TypeSuperFoldable},
DebruijnIndex, Mutability, ProjectionTy,
};
use chalk_ir::Mutability;
use either::Either;
use hir_def::{
builtin_type::BuiltinType,
data::adt::{StructFlags, VariantData},
lang_item::{lang_attr, LangItem},
layout::{TagEncoding, Variants},
AdtId, DefWithBodyId, EnumVariantId, FunctionId, GeneralConstId, HasModule, ItemContainerId,
Lookup, StaticId, TypeOrConstParamId, VariantId,
AdtId, DefWithBodyId, EnumVariantId, FunctionId, HasModule, ItemContainerId, Lookup, StaticId,
VariantId,
};
use hir_expand::{name::Name, InFile};
use hir_expand::InFile;
use intern::Interned;
use la_arena::ArenaMap;
use rustc_hash::{FxHashMap, FxHashSet};
@ -27,14 +24,13 @@ use crate::{
consteval::{intern_const_scalar, try_const_usize, ConstEvalError},
db::HirDatabase,
display::{ClosureStyle, HirDisplay},
from_placeholder_idx,
infer::{normalize, PointerCast},
layout::{layout_of_ty, Layout, LayoutError, RustcEnumVariantIdx},
infer::PointerCast,
layout::{Layout, LayoutError, RustcEnumVariantIdx},
mapping::from_chalk,
method_resolution::{is_dyn_method, lookup_impl_const, lookup_impl_method},
method_resolution::{is_dyn_method, lookup_impl_method},
name, static_lifetime,
traits::FnTrait,
utils::{generics, ClosureSubst, Generics},
utils::ClosureSubst,
CallableDefId, ClosureId, Const, ConstScalar, FnDefId, GenericArgData, Interner, MemoryMap,
Substitution, TraitEnvironment, Ty, TyBuilder, TyExt, TyKind,
};
@ -279,7 +275,6 @@ pub enum MirEvalError {
/// Means that code had undefined behavior. We don't try to actively detect UB, but if it was detected
/// then use this type of error.
UndefinedBehavior(String),
GenericArgNotProvided(TypeOrConstParamId, Substitution),
Panic(String),
MirLowerError(FunctionId, MirLowerError),
MirLowerErrorForClosure(ClosureId, MirLowerError),
@ -348,20 +343,6 @@ impl MirEvalError {
ty.display(db).with_closure_style(ClosureStyle::ClosureWithId).to_string()
)?;
}
MirEvalError::GenericArgNotProvided(id, subst) => {
let parent = id.parent;
let param = &db.generic_params(parent).type_or_consts[id.local_id];
writeln!(
f,
"Generic arg not provided for {}",
param.name().unwrap_or(&Name::missing()).display(db.upcast())
)?;
writeln!(f, "Provided args: [")?;
for g in subst.iter(Interner) {
write!(f, " {},", g.display(db).to_string())?;
}
writeln!(f, "]")?;
}
MirEvalError::MirLowerError(func, err) => {
let function_name = db.function_data(*func);
writeln!(
@ -416,7 +397,6 @@ impl std::fmt::Debug for MirEvalError {
Self::TypeIsUnsized(ty, it) => write!(f, "{ty:?} is unsized. {it} should be sized."),
Self::ExecutionLimitExceeded => write!(f, "execution limit exceeded"),
Self::StackOverflow => write!(f, "stack overflow"),
Self::GenericArgNotProvided(..) => f.debug_tuple("GenericArgNotProvided").finish(),
Self::MirLowerError(arg0, arg1) => {
f.debug_tuple("MirLowerError").field(arg0).field(arg1).finish()
}
@ -471,14 +451,12 @@ impl DropFlags {
struct Locals<'a> {
ptr: &'a ArenaMap<LocalId, Interval>,
body: &'a MirBody,
subst: &'a Substitution,
drop_flags: DropFlags,
}
pub fn interpret_mir(
db: &dyn HirDatabase,
body: &MirBody,
subst: Substitution,
// FIXME: This is workaround. Ideally, const generics should have a separate body (issue #7434), but now
// they share their body with their parent, so in MIR lowering we have locals of the parent body, which
// might have placeholders. With this argument, we (wrongly) assume that every placeholder type has
@ -489,17 +467,11 @@ pub fn interpret_mir(
let ty = body.locals[return_slot()].ty.clone();
let mut evaluator = Evaluator::new(db, body, assert_placeholder_ty_is_unused);
let x: Result<Const> = (|| {
let ty = evaluator.ty_filler(&ty, &subst, body.owner)?;
let bytes = evaluator.interpret_mir(&body, None.into_iter(), subst.clone())?;
let bytes = evaluator.interpret_mir(&body, None.into_iter())?;
let mut memory_map = evaluator.create_memory_map(
&bytes,
&ty,
&Locals {
ptr: &ArenaMap::new(),
body: &body,
subst: &subst,
drop_flags: DropFlags::default(),
},
&Locals { ptr: &ArenaMap::new(), body: &body, drop_flags: DropFlags::default() },
)?;
memory_map.vtable = evaluator.vtable_map.clone();
return Ok(intern_const_scalar(ConstScalar::Bytes(bytes, memory_map), ty));
@ -565,8 +537,7 @@ impl Evaluator<'_> {
locals: &'a Locals<'a>,
) -> Result<(Address, Ty, Option<IntervalOrOwned>)> {
let mut addr = locals.ptr[p.local].addr;
let mut ty: Ty =
self.ty_filler(&locals.body.locals[p.local].ty, locals.subst, locals.body.owner)?;
let mut ty: Ty = locals.body.locals[p.local].ty.clone();
let mut metadata: Option<IntervalOrOwned> = None; // locals are always sized
for proj in &*p.projection {
let prev_ty = ty.clone();
@ -689,17 +660,13 @@ impl Evaluator<'_> {
Ok((addr, ty, metadata))
}
fn layout(&self, ty: &Ty) -> Result<Layout> {
layout_of_ty(self.db, ty, self.crate_id)
fn layout(&self, ty: &Ty) -> Result<Arc<Layout>> {
self.db
.layout_of_ty(ty.clone(), self.crate_id)
.map_err(|e| MirEvalError::LayoutError(e, ty.clone()))
}
fn layout_filled(&self, ty: &Ty, locals: &Locals<'_>) -> Result<Layout> {
let ty = &self.ty_filler(ty, locals.subst, locals.body.owner)?;
self.layout(ty)
}
fn layout_adt(&self, adt: AdtId, subst: Substitution) -> Result<Layout> {
fn layout_adt(&self, adt: AdtId, subst: Substitution) -> Result<Arc<Layout>> {
self.db.layout_of_adt(adt, subst.clone(), self.crate_id).map_err(|e| {
MirEvalError::LayoutError(e, TyKind::Adt(chalk_ir::AdtId(adt), subst).intern(Interner))
})
@ -735,7 +702,6 @@ impl Evaluator<'_> {
&mut self,
body: &MirBody,
args: impl Iterator<Item = Vec<u8>>,
subst: Substitution,
) -> Result<Vec<u8>> {
if let Some(x) = self.stack_depth_limit.checked_sub(1) {
self.stack_depth_limit = x;
@ -743,12 +709,8 @@ impl Evaluator<'_> {
return Err(MirEvalError::StackOverflow);
}
let mut current_block_idx = body.start_block;
let mut locals = Locals {
ptr: &ArenaMap::new(),
body: &body,
subst: &subst,
drop_flags: DropFlags::default(),
};
let mut locals =
Locals { ptr: &ArenaMap::new(), body: &body, drop_flags: DropFlags::default() };
let (locals_ptr, stack_size) = {
let mut stack_ptr = self.stack.len();
let addr = body
@ -882,7 +844,17 @@ impl Evaluator<'_> {
}
Owned(r)
}
Rvalue::Len(_) => not_supported!("rvalue len"),
Rvalue::Len(p) => {
let (_, _, metadata) = self.place_addr_and_ty_and_metadata(p, locals)?;
match metadata {
Some(m) => m,
None => {
return Err(MirEvalError::TypeError(
"type without metadata is used for Rvalue::Len",
));
}
}
}
Rvalue::UnaryOp(op, val) => {
let mut c = self.eval_operand(val, locals)?.get(&self)?;
let mut ty = self.operand_ty(val, locals)?;
@ -1080,7 +1052,7 @@ impl Evaluator<'_> {
}
return Ok(Owned(0u128.to_le_bytes().to_vec()));
};
match layout.variants {
match &layout.variants {
Variants::Single { index } => {
let r = self.const_eval_discriminant(EnumVariantId {
parent: enum_id,
@ -1102,14 +1074,14 @@ impl Evaluator<'_> {
TagEncoding::Niche { untagged_variant, niche_start, .. } => {
let tag = &bytes[offset..offset + size];
let candidate_tag = i128::from_le_bytes(pad16(tag, false))
.wrapping_sub(niche_start as i128)
.wrapping_sub(*niche_start as i128)
as usize;
let variant = variants
.iter_enumerated()
.map(|(x, _)| x)
.filter(|x| *x != untagged_variant)
.filter(|x| x != untagged_variant)
.nth(candidate_tag)
.unwrap_or(untagged_variant)
.unwrap_or(*untagged_variant)
.0;
let result = self.const_eval_discriminant(EnumVariantId {
parent: enum_id,
@ -1122,7 +1094,7 @@ impl Evaluator<'_> {
}
}
Rvalue::Repeat(x, len) => {
let len = match try_const_usize(self.db, len) {
let len = match try_const_usize(self.db, &len) {
Some(x) => x as usize,
None => not_supported!("non evaluatable array len in repeat Rvalue"),
};
@ -1154,7 +1126,7 @@ impl Evaluator<'_> {
Owned(r)
}
AggregateKind::Tuple(ty) => {
let layout = self.layout_filled(&ty, locals)?;
let layout = self.layout(&ty)?;
Owned(self.make_by_layout(
layout.size.bytes_usize(),
&layout,
@ -1174,9 +1146,8 @@ impl Evaluator<'_> {
Owned(result)
}
AggregateKind::Adt(x, subst) => {
let subst = self.subst_filler(subst, locals);
let (size, variant_layout, tag) =
self.layout_of_variant(*x, subst, locals)?;
self.layout_of_variant(*x, subst.clone(), locals)?;
Owned(self.make_by_layout(
size,
&variant_layout,
@ -1185,7 +1156,7 @@ impl Evaluator<'_> {
)?)
}
AggregateKind::Closure(ty) => {
let layout = self.layout_filled(&ty, locals)?;
let layout = self.layout(&ty)?;
Owned(self.make_by_layout(
layout.size.bytes_usize(),
&layout,
@ -1220,7 +1191,10 @@ impl Evaluator<'_> {
// This is no-op
Borrowed(self.eval_operand(operand, locals)?)
}
x => not_supported!("pointer cast {x:?}"),
PointerCast::ArrayToPointer => {
// We should remove the metadata part if the current type is slice
Borrowed(self.eval_operand(operand, locals)?.slice(0..self.ptr_size()))
}
},
CastKind::DynStar => not_supported!("dyn star cast"),
CastKind::IntToInt
@ -1235,12 +1209,6 @@ impl Evaluator<'_> {
CastKind::FloatToInt => not_supported!("float to int cast"),
CastKind::FloatToFloat => not_supported!("float to float cast"),
CastKind::IntToFloat => not_supported!("float to int cast"),
CastKind::PtrToPtr => {
let current = pad16(self.eval_operand(operand, locals)?.get(&self)?, false);
let dest_size =
self.size_of_sized(target_ty, locals, "destination of ptr to ptr cast")?;
Owned(current[0..dest_size].to_vec())
}
CastKind::FnPtrToPtr => not_supported!("fn ptr to ptr cast"),
},
})
@ -1300,8 +1268,8 @@ impl Evaluator<'_> {
r.extend(len.to_le_bytes().into_iter());
Owned(r)
}
_ => {
not_supported!("slice unsizing from non arrays")
t => {
not_supported!("slice unsizing from non array type {t:?}")
}
},
}
@ -1327,7 +1295,7 @@ impl Evaluator<'_> {
x: VariantId,
subst: Substitution,
locals: &Locals<'_>,
) -> Result<(usize, Layout, Option<(usize, usize, i128)>)> {
) -> Result<(usize, Arc<Layout>, Option<(usize, usize, i128)>)> {
let adt = x.adt_id();
if let DefWithBodyId::VariantId(f) = locals.body.owner {
if let VariantId::EnumVariantId(x) = x {
@ -1340,7 +1308,7 @@ impl Evaluator<'_> {
}
}
let layout = self.layout_adt(adt, subst)?;
Ok(match layout.variants {
Ok(match &layout.variants {
Variants::Single { .. } => (layout.size.bytes_usize(), layout, None),
Variants::Multiple { variants, tag, tag_encoding, .. } => {
let cx = self
@ -1357,22 +1325,22 @@ impl Evaluator<'_> {
let have_tag = match tag_encoding {
TagEncoding::Direct => true,
TagEncoding::Niche { untagged_variant, niche_variants: _, niche_start } => {
if untagged_variant == rustc_enum_variant_idx {
if *untagged_variant == rustc_enum_variant_idx {
false
} else {
discriminant = (variants
.iter_enumerated()
.filter(|(x, _)| *x != untagged_variant)
.filter(|(x, _)| x != untagged_variant)
.position(|(x, _)| x == rustc_enum_variant_idx)
.unwrap() as i128)
.wrapping_add(niche_start as i128);
.wrapping_add(*niche_start as i128);
true
}
}
};
(
layout.size.bytes_usize(),
variant_layout,
Arc::new(variant_layout),
if have_tag {
Some((
layout.fields.offset(0).bytes_usize(),
@ -1419,15 +1387,7 @@ impl Evaluator<'_> {
Operand::Constant(konst) => {
let data = &konst.data(Interner);
match &data.value {
chalk_ir::ConstValue::BoundVar(b) => {
let c = locals
.subst
.as_slice(Interner)
.get(b.index)
.ok_or(MirEvalError::TypeError("missing generic arg"))?
.assert_const_ref(Interner);
self.eval_operand(&Operand::Constant(c.clone()), locals)?
}
chalk_ir::ConstValue::BoundVar(_) => not_supported!("bound var constant"),
chalk_ir::ConstValue::InferenceVar(_) => {
not_supported!("inference var constant")
}
@ -1471,29 +1431,8 @@ impl Evaluator<'_> {
self.patch_addresses(&patch_map, &memory_map.vtable, addr, ty, locals)?;
Interval::new(addr, size)
}
ConstScalar::UnevaluatedConst(const_id, subst) => {
let mut const_id = *const_id;
let mut subst = self.subst_filler(subst, locals);
if let GeneralConstId::ConstId(c) = const_id {
let (c, s) = lookup_impl_const(
self.db,
self.db.trait_environment_for_body(locals.body.owner),
c,
subst,
);
const_id = GeneralConstId::ConstId(c);
subst = s;
}
let c = self.db.const_eval(const_id.into(), subst).map_err(|e| {
let name = const_id.name(self.db.upcast());
MirEvalError::ConstEvalError(name, Box::new(e))
})?;
if let chalk_ir::ConstValue::Concrete(c) = &c.data(Interner).value {
if let ConstScalar::Bytes(_, _) = &c.interned {
return self.allocate_const_in_heap(&c, ty, locals, konst);
}
}
not_supported!("failing at evaluating unevaluated const");
ConstScalar::UnevaluatedConst(..) => {
not_supported!("unevaluated const present in monomorphized mir");
}
ConstScalar::Unknown => not_supported!("evaluating unknown const"),
})
@ -1555,7 +1494,7 @@ impl Evaluator<'_> {
}
}
}
let layout = self.layout_filled(ty, locals);
let layout = self.layout(ty);
if self.assert_placeholder_ty_is_unused {
if matches!(layout, Err(MirEvalError::LayoutError(LayoutError::HasPlaceholder, _))) {
return Ok(Some((0, 1)));
@ -1576,122 +1515,12 @@ impl Evaluator<'_> {
}
}
/// Uses `ty_filler` to fill an entire subst
fn subst_filler(&self, subst: &Substitution, locals: &Locals<'_>) -> Substitution {
Substitution::from_iter(
Interner,
subst.iter(Interner).map(|x| match x.data(Interner) {
chalk_ir::GenericArgData::Ty(ty) => {
let Ok(ty) = self.ty_filler(ty, locals.subst, locals.body.owner) else {
return x.clone();
};
chalk_ir::GenericArgData::Ty(ty).intern(Interner)
}
_ => x.clone(),
}),
)
}
/// This function substitutes placeholders of the body with the provided subst, effectively plays
/// the rule of monomorphization. In addition to placeholders, it substitutes opaque types (return
/// position impl traits) with their underlying type.
fn ty_filler(&self, ty: &Ty, subst: &Substitution, owner: DefWithBodyId) -> Result<Ty> {
struct Filler<'a> {
db: &'a dyn HirDatabase,
subst: &'a Substitution,
generics: Option<Generics>,
}
impl FallibleTypeFolder<Interner> for Filler<'_> {
type Error = MirEvalError;
fn as_dyn(&mut self) -> &mut dyn FallibleTypeFolder<Interner, Error = Self::Error> {
self
}
fn interner(&self) -> Interner {
Interner
}
fn try_fold_ty(
&mut self,
ty: Ty,
outer_binder: DebruijnIndex,
) -> std::result::Result<Ty, Self::Error> {
match ty.kind(Interner) {
TyKind::AssociatedType(id, subst) => {
// I don't know exactly if and why this is needed, but it looks like `normalize_ty` likes
// this kind of associated types.
Ok(TyKind::Alias(chalk_ir::AliasTy::Projection(ProjectionTy {
associated_ty_id: *id,
substitution: subst.clone().try_fold_with(self, outer_binder)?,
}))
.intern(Interner))
}
TyKind::OpaqueType(id, subst) => {
let impl_trait_id = self.db.lookup_intern_impl_trait_id((*id).into());
let subst = subst.clone().try_fold_with(self.as_dyn(), outer_binder)?;
match impl_trait_id {
crate::ImplTraitId::ReturnTypeImplTrait(func, idx) => {
let infer = self.db.infer(func.into());
let filler = &mut Filler {
db: self.db,
subst: &subst,
generics: Some(generics(self.db.upcast(), func.into())),
};
filler.try_fold_ty(infer.type_of_rpit[idx].clone(), outer_binder)
}
crate::ImplTraitId::AsyncBlockTypeImplTrait(_, _) => {
not_supported!("async block impl trait");
}
}
}
_ => ty.try_super_fold_with(self.as_dyn(), outer_binder),
}
}
fn try_fold_free_placeholder_ty(
&mut self,
idx: chalk_ir::PlaceholderIndex,
_outer_binder: DebruijnIndex,
) -> std::result::Result<Ty, Self::Error> {
let x = from_placeholder_idx(self.db, idx);
let Some(idx) = self.generics.as_ref().and_then(|g| g.param_idx(x)) else {
not_supported!("missing idx in generics");
};
Ok(self
.subst
.as_slice(Interner)
.get(idx)
.and_then(|x| x.ty(Interner))
.ok_or_else(|| MirEvalError::GenericArgNotProvided(x, self.subst.clone()))?
.clone())
}
}
let g_def = match owner {
DefWithBodyId::FunctionId(f) => Some(f.into()),
DefWithBodyId::StaticId(_) => None,
DefWithBodyId::ConstId(f) => Some(f.into()),
DefWithBodyId::VariantId(f) => Some(f.into()),
};
let generics = g_def.map(|g_def| generics(self.db.upcast(), g_def));
let filler = &mut Filler { db: self.db, subst, generics };
Ok(normalize(
self.db,
self.trait_env.clone(),
ty.clone().try_fold_with(filler, DebruijnIndex::INNERMOST)?,
))
}
fn heap_allocate(&mut self, size: usize, _align: usize) -> Address {
let pos = self.heap.len();
self.heap.extend(iter::repeat(0).take(size));
Address::Heap(pos)
}
pub fn interpret_mir_with_no_arg(&mut self, body: &MirBody) -> Result<Vec<u8>> {
self.interpret_mir(&body, vec![].into_iter(), Substitution::empty(Interner))
}
fn detect_fn_trait(&self, def: FunctionId) -> Option<FnTrait> {
use LangItem::*;
let ItemContainerId::TraitId(parent) = self.db.lookup_intern_function(def).container else {
@ -1849,21 +1678,24 @@ impl Evaluator<'_> {
) -> Result<()> {
let mir_body = self
.db
.mir_body_for_closure(closure)
.monomorphized_mir_body_for_closure(
closure,
generic_args.clone(),
self.trait_env.clone(),
)
.map_err(|x| MirEvalError::MirLowerErrorForClosure(closure, x))?;
let arg_bytes = iter::once(Ok(closure_data.get(self)?.to_owned()))
let closure_data = if mir_body.locals[mir_body.param_locals[0]].ty.as_reference().is_some()
{
closure_data.addr.to_bytes()
} else {
closure_data.get(self)?.to_owned()
};
let arg_bytes = iter::once(Ok(closure_data))
.chain(args.iter().map(|x| Ok(x.get(&self)?.to_owned())))
.collect::<Result<Vec<_>>>()?;
let bytes = self
.interpret_mir(&mir_body, arg_bytes.into_iter(), generic_args.clone())
.map_err(|e| {
MirEvalError::InFunction(
Either::Right(closure),
Box::new(e),
span,
locals.body.owner,
)
})?;
let bytes = self.interpret_mir(&mir_body, arg_bytes.into_iter()).map_err(|e| {
MirEvalError::InFunction(Either::Right(closure), Box::new(e), span, locals.body.owner)
})?;
destination.write_from_bytes(self, &bytes)
}
@ -1877,7 +1709,7 @@ impl Evaluator<'_> {
span: MirSpan,
) -> Result<()> {
let def: CallableDefId = from_chalk(self.db, def);
let generic_args = self.subst_filler(generic_args, &locals);
let generic_args = generic_args.clone();
match def {
CallableDefId::FunctionId(def) => {
if let Some(_) = self.detect_fn_trait(def) {
@ -1982,14 +1814,14 @@ impl Evaluator<'_> {
span: MirSpan,
destination: Interval,
) -> Result<()> {
let generic_args = self.subst_filler(&generic_args, &locals);
let def = imp.into();
let mir_body = self.db.mir_body(def).map_err(|e| MirEvalError::MirLowerError(imp, e))?;
let result = self
.interpret_mir(&mir_body, arg_bytes.iter().cloned(), generic_args)
.map_err(|e| {
MirEvalError::InFunction(Either::Left(imp), Box::new(e), span, locals.body.owner)
})?;
let mir_body = self
.db
.monomorphized_mir_body(def, generic_args, self.trait_env.clone())
.map_err(|e| MirEvalError::MirLowerError(imp, e))?;
let result = self.interpret_mir(&mir_body, arg_bytes.iter().cloned()).map_err(|e| {
MirEvalError::InFunction(Either::Left(imp), Box::new(e), span, locals.body.owner)
})?;
destination.write_from_bytes(self, &result)?;
Ok(())
}

View file

@ -1,6 +1,8 @@
//! Interpret intrinsics, lang items and `extern "C"` wellknown functions which their implementation
//! is not available.
use std::cmp;
use super::*;
macro_rules! from_bytes {
@ -254,6 +256,7 @@ impl Evaluator<'_> {
}
_ => not_supported!("write to arbitrary file descriptor"),
}
destination.write_from_interval(self, len.interval)?;
Ok(())
}
"pthread_key_create" => {
@ -437,7 +440,7 @@ impl Evaluator<'_> {
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)) else {
return Err(MirEvalError::TypeError("align_of generic arg is not provided"));
};
let align = self.layout_filled(ty, locals)?.align.abi.bytes();
let align = self.layout(ty)?.align.abi.bytes();
destination.write_from_bytes(self, &align.to_le_bytes()[0..destination.size])
}
"needs_drop" => {
@ -456,6 +459,22 @@ impl Evaluator<'_> {
let ans = lhs.get(self)? == rhs.get(self)?;
destination.write_from_bytes(self, &[u8::from(ans)])
}
"saturating_add" => {
let [lhs, rhs] = args else {
return Err(MirEvalError::TypeError("saturating_add args are not provided"));
};
let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
let ans = lhs.saturating_add(rhs);
let bits = destination.size * 8;
// FIXME: signed
let is_signed = false;
let mx: u128 = if is_signed { (1 << (bits - 1)) - 1 } else { (1 << bits) - 1 };
// FIXME: signed
let mn: u128 = 0;
let ans = cmp::min(mx, cmp::max(mn, ans));
destination.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size])
}
"wrapping_add" | "unchecked_add" => {
let [lhs, rhs] = args else {
return Err(MirEvalError::TypeError("wrapping_add args are not provided"));
@ -474,6 +493,15 @@ impl Evaluator<'_> {
let ans = lhs.wrapping_sub(rhs);
destination.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size])
}
"wrapping_mul" | "unchecked_mul" => {
let [lhs, rhs] = args else {
return Err(MirEvalError::TypeError("wrapping_mul args are not provided"));
};
let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
let ans = lhs.wrapping_mul(rhs);
destination.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size])
}
"unchecked_rem" => {
// FIXME: signed
let [lhs, rhs] = args else {
@ -498,7 +526,7 @@ impl Evaluator<'_> {
})?;
destination.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size])
}
"add_with_overflow" => {
"add_with_overflow" | "sub_with_overflow" | "mul_with_overflow" => {
let [lhs, rhs] = args else {
return Err(MirEvalError::TypeError("const_eval_select args are not provided"));
};
@ -511,8 +539,14 @@ impl Evaluator<'_> {
self.size_of_sized(&lhs.ty, locals, "operand of add_with_overflow")?;
let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
let ans = lhs.wrapping_add(rhs);
let is_overflow = false;
let (ans, u128overflow) = match as_str {
"add_with_overflow" => lhs.overflowing_add(rhs),
"sub_with_overflow" => lhs.overflowing_sub(rhs),
"mul_with_overflow" => lhs.overflowing_mul(rhs),
_ => unreachable!(),
};
let is_overflow = u128overflow
|| ans.to_le_bytes()[op_size..].iter().any(|&x| x != 0 && x != 255);
let is_overflow = vec![u8::from(is_overflow)];
let layout = self.layout(&result_ty)?;
let result = self.make_by_layout(

View file

@ -21,10 +21,15 @@ fn eval_main(db: &TestDB, file_id: FileId) -> Result<(String, String), MirEvalEr
}
_ => None,
})
.unwrap();
let body =
db.mir_body(func_id.into()).map_err(|e| MirEvalError::MirLowerError(func_id.into(), e))?;
let (result, stdout, stderr) = interpret_mir(db, &body, Substitution::empty(Interner), false);
.expect("no main function found");
let body = db
.monomorphized_mir_body(
func_id.into(),
Substitution::empty(Interner),
db.trait_environment(func_id.into()),
)
.map_err(|e| MirEvalError::MirLowerError(func_id.into(), e))?;
let (result, stdout, stderr) = interpret_mir(db, &body, false);
result?;
Ok((stdout, stderr))
}
@ -34,7 +39,8 @@ fn check_pass(ra_fixture: &str) {
}
fn check_pass_and_stdio(ra_fixture: &str, expected_stdout: &str, expected_stderr: &str) {
let (db, file_id) = TestDB::with_single_file(ra_fixture);
let (db, file_ids) = TestDB::with_many_files(ra_fixture);
let file_id = *file_ids.last().unwrap();
let x = eval_main(&db, file_id);
match x {
Err(e) => {
@ -270,6 +276,243 @@ fn main() {
);
}
#[test]
fn from_fn() {
check_pass(
r#"
//- minicore: fn, iterator
struct FromFn<F>(F);
impl<T, F: FnMut() -> Option<T>> Iterator for FromFn<F> {
type Item = T;
fn next(&mut self) -> Option<Self::Item> {
(self.0)()
}
}
fn main() {
let mut tokenize = {
FromFn(move || Some(2))
};
let s = tokenize.next();
}
"#,
);
}
#[test]
fn for_loop() {
check_pass(
r#"
//- minicore: iterator, add
fn should_not_reach() {
_ // FIXME: replace this function with panic when that works
}
struct X;
struct XIter(i32);
impl IntoIterator for X {
type Item = i32;
type IntoIter = XIter;
fn into_iter(self) -> Self::IntoIter {
XIter(0)
}
}
impl Iterator for XIter {
type Item = i32;
fn next(&mut self) -> Option<Self::Item> {
if self.0 == 5 {
None
} else {
self.0 += 1;
Some(self.0)
}
}
}
fn main() {
let mut s = 0;
for x in X {
s += x;
}
if s != 15 {
should_not_reach();
}
}
"#,
);
}
#[test]
fn field_with_associated_type() {
check_pass(
r#"
//- /b/mod.rs crate:b
pub trait Tr {
fn f(self);
}
pub trait Tr2 {
type Ty: Tr;
}
pub struct S<T: Tr2> {
pub t: T::Ty,
}
impl<T: Tr2> S<T> {
pub fn g(&self) {
let k = (self.t, self.t);
self.t.f();
}
}
//- /a/mod.rs crate:a deps:b
use b::{Tr, Tr2, S};
struct A(i32);
struct B(u8);
impl Tr for A {
fn f(&self) {
}
}
impl Tr2 for B {
type Ty = A;
}
#[test]
fn main() {
let s: S<B> = S { t: A(2) };
s.g();
}
"#,
);
}
#[test]
fn specialization_array_clone() {
check_pass(
r#"
//- minicore: copy, derive, slice, index, coerce_unsized
impl<T: Clone, const N: usize> Clone for [T; N] {
#[inline]
fn clone(&self) -> Self {
SpecArrayClone::clone(self)
}
}
trait SpecArrayClone: Clone {
fn clone<const N: usize>(array: &[Self; N]) -> [Self; N];
}
impl<T: Clone> SpecArrayClone for T {
#[inline]
default fn clone<const N: usize>(array: &[T; N]) -> [T; N] {
// FIXME: panic here when we actually implement specialization.
from_slice(array)
}
}
fn from_slice<T, const N: usize>(s: &[T]) -> [T; N] {
[s[0]; N]
}
impl<T: Copy> SpecArrayClone for T {
#[inline]
fn clone<const N: usize>(array: &[T; N]) -> [T; N] {
*array
}
}
#[derive(Clone, Copy)]
struct X(i32);
fn main() {
let ar = [X(1), X(2)];
ar.clone();
}
"#,
);
}
#[test]
fn short_circuit_operator() {
check_pass(
r#"
fn should_not_reach() -> bool {
_ // FIXME: replace this function with panic when that works
}
fn main() {
if false && should_not_reach() {
should_not_reach();
}
true || should_not_reach();
}
"#,
);
}
#[test]
fn closure_state() {
check_pass(
r#"
//- minicore: fn, add, copy
fn should_not_reach() {
_ // FIXME: replace this function with panic when that works
}
fn main() {
let mut x = 2;
let mut c = move || {
x += 1;
x
};
c();
c();
c();
if x != 2 {
should_not_reach();
}
if c() != 6 {
should_not_reach();
}
}
"#,
);
}
#[test]
fn closure_capture_array_const_generic() {
check_pass(
r#"
//- minicore: fn, add, copy
struct X(i32);
fn f<const N: usize>(mut x: [X; N]) { // -> impl FnOnce() {
let c = || {
x;
};
c();
}
fn main() {
let s = f([X(1)]);
//s();
}
"#,
);
}
#[test]
fn posix_tls() {
check_pass(

View file

@ -8,14 +8,14 @@ use hir_def::{
body::Body,
data::adt::{StructKind, VariantData},
hir::{
ArithOp, Array, BinaryOp, BindingAnnotation, BindingId, ExprId, LabelId, Literal, MatchArm,
Pat, PatId, RecordFieldPat, RecordLitField,
ArithOp, Array, BinaryOp, BindingAnnotation, BindingId, ExprId, LabelId, Literal,
LiteralOrConst, MatchArm, Pat, PatId, RecordFieldPat, RecordLitField,
},
lang_item::{LangItem, LangItemTarget},
path::Path,
resolver::{resolver_for_expr, ResolveValueResult, ValueNs},
resolver::{resolver_for_expr, HasResolver, ResolveValueResult, ValueNs},
AdtId, DefWithBodyId, EnumVariantId, GeneralConstId, HasModule, ItemContainerId, LocalFieldId,
TraitId,
TraitId, TypeOrConstParamId,
};
use hir_expand::name::Name;
use la_arena::ArenaMap;
@ -29,9 +29,10 @@ use crate::{
display::HirDisplay,
infer::{CaptureKind, CapturedItem, TypeMismatch},
inhabitedness::is_ty_uninhabited_from,
layout::{layout_of_ty, LayoutError},
layout::LayoutError,
mapping::ToChalk,
static_lifetime,
traits::FnTrait,
utils::{generics, ClosureSubst},
Adjust, Adjustment, AutoBorrow, CallableDefId, TyBuilder, TyExt,
};
@ -41,8 +42,6 @@ use super::*;
mod as_place;
mod pattern_matching;
use pattern_matching::AdtPatternShape;
#[derive(Debug, Clone)]
struct LoopBlocks {
begin: BasicBlockId,
@ -74,6 +73,7 @@ pub enum MirLowerError {
ConstEvalError(String, Box<ConstEvalError>),
LayoutError(LayoutError),
IncompleteExpr,
IncompletePattern,
/// Trying to lower a trait function, instead of an implementation
TraitFunctionDefinition(TraitId, Name),
UnresolvedName(String),
@ -96,6 +96,9 @@ pub enum MirLowerError {
UnresolvedLabel,
UnresolvedUpvar(Place),
UnaccessableLocal,
// monomorphization errors:
GenericArgNotProvided(TypeOrConstParamId, Substitution),
}
impl MirLowerError {
@ -129,9 +132,24 @@ impl MirLowerError {
e.actual.display(db),
)?;
}
MirLowerError::GenericArgNotProvided(id, subst) => {
let parent = id.parent;
let param = &db.generic_params(parent).type_or_consts[id.local_id];
writeln!(
f,
"Generic arg not provided for {}",
param.name().unwrap_or(&Name::missing()).display(db.upcast())
)?;
writeln!(f, "Provided args: [")?;
for g in subst.iter(Interner) {
write!(f, " {},", g.display(db).to_string())?;
}
writeln!(f, "]")?;
}
MirLowerError::LayoutError(_)
| MirLowerError::UnsizedTemporary(_)
| MirLowerError::IncompleteExpr
| MirLowerError::IncompletePattern
| MirLowerError::UnaccessableLocal
| MirLowerError::TraitFunctionDefinition(_, _)
| MirLowerError::UnresolvedName(_)
@ -528,61 +546,6 @@ impl<'ctx> MirLowerCtx<'ctx> {
Ok(())
})
}
&Expr::For { iterable, pat, body, label } => {
let into_iter_fn = self.resolve_lang_item(LangItem::IntoIterIntoIter)?
.as_function().ok_or(MirLowerError::LangItemNotFound(LangItem::IntoIterIntoIter))?;
let iter_next_fn = self.resolve_lang_item(LangItem::IteratorNext)?
.as_function().ok_or(MirLowerError::LangItemNotFound(LangItem::IteratorNext))?;
let option_some = self.resolve_lang_item(LangItem::OptionSome)?
.as_enum_variant().ok_or(MirLowerError::LangItemNotFound(LangItem::OptionSome))?;
let option = option_some.parent;
let into_iter_fn_op = Operand::const_zst(
TyKind::FnDef(
self.db.intern_callable_def(CallableDefId::FunctionId(into_iter_fn)).into(),
Substitution::from1(Interner, self.expr_ty_without_adjust(iterable))
).intern(Interner));
let iter_next_fn_op = Operand::const_zst(
TyKind::FnDef(
self.db.intern_callable_def(CallableDefId::FunctionId(iter_next_fn)).into(),
Substitution::from1(Interner, self.expr_ty_without_adjust(iterable))
).intern(Interner));
let &Some(iterator_ty) = &self.infer.type_of_for_iterator.get(&expr_id) else {
return Err(MirLowerError::TypeError("unknown for loop iterator type"));
};
let ref_mut_iterator_ty = TyKind::Ref(Mutability::Mut, static_lifetime(), iterator_ty.clone()).intern(Interner);
let item_ty = &self.infer.type_of_pat[pat];
let option_item_ty = TyKind::Adt(chalk_ir::AdtId(option.into()), Substitution::from1(Interner, item_ty.clone())).intern(Interner);
let iterator_place: Place = self.temp(iterator_ty.clone(), current, expr_id.into())?.into();
let option_item_place: Place = self.temp(option_item_ty.clone(), current, expr_id.into())?.into();
let ref_mut_iterator_place: Place = self.temp(ref_mut_iterator_ty, current, expr_id.into())?.into();
let Some(current) = self.lower_call_and_args(into_iter_fn_op, Some(iterable).into_iter(), iterator_place.clone(), current, false, expr_id.into())?
else {
return Ok(None);
};
self.push_assignment(current, ref_mut_iterator_place.clone(), Rvalue::Ref(BorrowKind::Mut { allow_two_phase_borrow: false }, iterator_place), expr_id.into());
self.lower_loop(current, place, label, expr_id.into(), |this, begin| {
let Some(current) = this.lower_call(iter_next_fn_op, Box::new([Operand::Copy(ref_mut_iterator_place)]), option_item_place.clone(), begin, false, expr_id.into())?
else {
return Ok(());
};
let end = this.current_loop_end()?;
let (current, _) = this.pattern_matching_variant(
option_item_ty.clone(),
BindingAnnotation::Unannotated,
option_item_place.into(),
option_some.into(),
current,
pat.into(),
Some(end),
AdtPatternShape::Tuple { args: &[pat], ellipsis: None },
)?;
if let Some((_, block)) = this.lower_expr_as_place(current, body, true)? {
let block = this.pop_drop_scope(block);
this.set_goto(block, begin, expr_id.into());
}
Ok(())
})
},
Expr::Call { callee, args, .. } => {
if let Some((func_id, generic_args)) =
self.infer.method_resolution(expr_id) {
@ -918,6 +881,27 @@ impl<'ctx> MirLowerCtx<'ctx> {
let Some((lhs_op, current)) = self.lower_expr_to_some_operand(*lhs, current)? else {
return Ok(None);
};
if let hir_def::hir::BinaryOp::LogicOp(op) = op {
let value_to_short = match op {
syntax::ast::LogicOp::And => 0,
syntax::ast::LogicOp::Or => 1,
};
let start_of_then = self.new_basic_block();
self.push_assignment(start_of_then, place.clone(), lhs_op.clone().into(), expr_id.into());
let end_of_then = Some(start_of_then);
let start_of_else = self.new_basic_block();
let end_of_else =
self.lower_expr_to_place(*rhs, place, start_of_else)?;
self.set_terminator(
current,
TerminatorKind::SwitchInt {
discr: lhs_op,
targets: SwitchTargets::static_if(value_to_short, start_of_then, start_of_else),
},
expr_id.into(),
);
return Ok(self.merge_blocks(end_of_then, end_of_else, expr_id.into()));
}
let Some((rhs_op, current)) = self.lower_expr_to_some_operand(*rhs, current)? else {
return Ok(None);
};
@ -1135,8 +1119,39 @@ impl<'ctx> MirLowerCtx<'ctx> {
Ok(())
}
fn lower_literal_or_const_to_operand(
&mut self,
ty: Ty,
loc: &LiteralOrConst,
) -> Result<Operand> {
match loc {
LiteralOrConst::Literal(l) => self.lower_literal_to_operand(ty, l),
LiteralOrConst::Const(c) => {
let unresolved_name = || MirLowerError::unresolved_path(self.db, c);
let resolver = self.owner.resolver(self.db.upcast());
let pr = resolver
.resolve_path_in_value_ns(self.db.upcast(), c)
.ok_or_else(unresolved_name)?;
match pr {
ResolveValueResult::ValueNs(v) => {
if let ValueNs::ConstId(c) = v {
self.lower_const_to_operand(Substitution::empty(Interner), c.into(), ty)
} else {
not_supported!("bad path in range pattern");
}
}
ResolveValueResult::Partial(_, _) => {
not_supported!("associated constants in range pattern")
}
}
}
}
}
fn lower_literal_to_operand(&mut self, ty: Ty, l: &Literal) -> Result<Operand> {
let size = layout_of_ty(self.db, &ty, self.owner.module(self.db.upcast()).krate())?
let size = self
.db
.layout_of_ty(ty.clone(), self.owner.module(self.db.upcast()).krate())?
.size
.bytes_usize();
let bytes = match l {
@ -1196,6 +1211,17 @@ impl<'ctx> MirLowerCtx<'ctx> {
span: MirSpan,
ty: Ty,
) -> Result<()> {
let c = self.lower_const_to_operand(subst, const_id, ty)?;
self.push_assignment(prev_block, place, c.into(), span);
Ok(())
}
fn lower_const_to_operand(
&mut self,
subst: Substitution,
const_id: GeneralConstId,
ty: Ty,
) -> Result<Operand> {
let c = if subst.len(Interner) != 0 {
// We can't evaluate constant with substitution now, as generics are not monomorphized in lowering.
intern_const_scalar(ConstScalar::UnevaluatedConst(const_id, subst), ty)
@ -1205,18 +1231,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
.const_eval(const_id.into(), subst)
.map_err(|e| MirLowerError::ConstEvalError(name, Box::new(e)))?
};
self.write_const_to_place(c, prev_block, place, span)
}
fn write_const_to_place(
&mut self,
c: Const,
prev_block: BasicBlockId,
place: Place,
span: MirSpan,
) -> Result<()> {
self.push_assignment(prev_block, place, Operand::Constant(c).into(), span);
Ok(())
Ok(Operand::Constant(c))
}
fn write_bytes_to_place(
@ -1673,8 +1688,23 @@ fn cast_kind(source_ty: &Ty, target_ty: &Ty) -> Result<CastKind> {
},
(TyKind::Scalar(_), TyKind::Raw(..)) => CastKind::PointerFromExposedAddress,
(TyKind::Raw(..), TyKind::Scalar(_)) => CastKind::PointerExposeAddress,
(TyKind::Raw(..) | TyKind::Ref(..), TyKind::Raw(..) | TyKind::Ref(..)) => {
CastKind::PtrToPtr
(TyKind::Raw(_, a) | TyKind::Ref(_, _, a), TyKind::Raw(_, b) | TyKind::Ref(_, _, b)) => {
CastKind::Pointer(if a == b {
PointerCast::MutToConstPointer
} else if matches!(a.kind(Interner), TyKind::Slice(_) | TyKind::Str)
&& matches!(b.kind(Interner), TyKind::Slice(_) | TyKind::Str)
{
// slice to slice cast is no-op (metadata is not touched), so we use this
PointerCast::MutToConstPointer
} else if matches!(b.kind(Interner), TyKind::Slice(_) | TyKind::Dyn(_)) {
PointerCast::Unsize
} else if matches!(a.kind(Interner), TyKind::Slice(s) if s == b) {
PointerCast::ArrayToPointer
} else {
// cast between two sized pointer, like *const i32 to *const i8. There is no specific variant
// for it in `PointerCast` so we use `MutToConstPointer`
PointerCast::MutToConstPointer
})
}
// Enum to int casts
(TyKind::Scalar(_), TyKind::Adt(..)) | (TyKind::Adt(..), TyKind::Scalar(_)) => {
@ -1697,11 +1727,19 @@ pub fn mir_body_for_closure_query(
let TyKind::Closure(_, substs) = &infer[expr].kind(Interner) else {
implementation_error!("closure expression is not closure");
};
let (captures, _) = infer.closure_info(&closure);
let (captures, kind) = infer.closure_info(&closure);
let mut ctx = MirLowerCtx::new(db, owner, &body, &infer);
// 0 is return local
ctx.result.locals.alloc(Local { ty: infer[*root].clone() });
let closure_local = ctx.result.locals.alloc(Local { ty: infer[expr].clone() });
let closure_local = ctx.result.locals.alloc(Local {
ty: match kind {
FnTrait::FnOnce => infer[expr].clone(),
FnTrait::FnMut => TyKind::Ref(Mutability::Mut, static_lifetime(), infer[expr].clone())
.intern(Interner),
FnTrait::Fn => TyKind::Ref(Mutability::Not, static_lifetime(), infer[expr].clone())
.intern(Interner),
},
});
ctx.result.param_locals.push(closure_local);
let Some(sig) = ClosureSubst(substs).sig_ty().callable_sig(db) else {
implementation_error!("closure has not callable sig");
@ -1721,6 +1759,10 @@ pub fn mir_body_for_closure_query(
}
let mut err = None;
let closure_local = ctx.result.locals.iter().nth(1).unwrap().0;
let closure_projection = match kind {
FnTrait::FnOnce => vec![],
FnTrait::FnMut | FnTrait::Fn => vec![ProjectionElem::Deref],
};
ctx.result.walk_places(|p| {
if let Some(x) = upvar_map.get(&p.local) {
let r = x.iter().find(|x| {
@ -1743,7 +1785,8 @@ pub fn mir_body_for_closure_query(
match r {
Some(x) => {
p.local = closure_local;
let mut next_projs = vec![PlaceElem::TupleOrClosureField(x.1)];
let mut next_projs = closure_projection.clone();
next_projs.push(PlaceElem::TupleOrClosureField(x.1));
let prev_projs = mem::take(&mut p.projection);
if x.0.kind != CaptureKind::ByValue {
next_projs.push(ProjectionElem::Deref);

View file

@ -1,6 +1,6 @@
//! MIR lowering for patterns
use hir_def::resolver::HasResolver;
use hir_def::{hir::LiteralOrConst, resolver::HasResolver, AssocItemId};
use crate::utils::pattern_matching_dereference_count;
@ -38,7 +38,7 @@ impl MirLowerCtx<'_> {
mut binding_mode: BindingAnnotation,
) -> Result<(BasicBlockId, Option<BasicBlockId>)> {
Ok(match &self.body.pats[pattern] {
Pat::Missing => return Err(MirLowerError::IncompleteExpr),
Pat::Missing => return Err(MirLowerError::IncompletePattern),
Pat::Wild => (current, current_else),
Pat::Tuple { args, ellipsis } => {
pattern_matching_dereference(&mut cond_ty, &mut binding_mode, &mut cond_place);
@ -106,9 +106,92 @@ impl MirLowerCtx<'_> {
AdtPatternShape::Record { args: &*args },
)?
}
Pat::Range { .. } => not_supported!("range pattern"),
Pat::Range { start, end } => {
let mut add_check = |l: &LiteralOrConst, binop| -> Result<()> {
let lv = self.lower_literal_or_const_to_operand(cond_ty.clone(), l)?;
let else_target = *current_else.get_or_insert_with(|| self.new_basic_block());
let next = self.new_basic_block();
let discr: Place =
self.temp(TyBuilder::bool(), current, pattern.into())?.into();
self.push_assignment(
current,
discr.clone(),
Rvalue::CheckedBinaryOp(binop, lv, Operand::Copy(cond_place.clone())),
pattern.into(),
);
let discr = Operand::Copy(discr);
self.set_terminator(
current,
TerminatorKind::SwitchInt {
discr,
targets: SwitchTargets::static_if(1, next, else_target),
},
pattern.into(),
);
current = next;
Ok(())
};
if let Some(start) = start {
add_check(start, BinOp::Le)?;
}
if let Some(end) = end {
add_check(end, BinOp::Ge)?;
}
(current, current_else)
}
Pat::Slice { prefix, slice, suffix } => {
pattern_matching_dereference(&mut cond_ty, &mut binding_mode, &mut cond_place);
if let TyKind::Slice(_) = cond_ty.kind(Interner) {
let pattern_len = prefix.len() + suffix.len();
let place_len: Place =
self.temp(TyBuilder::usize(), current, pattern.into())?.into();
self.push_assignment(
current,
place_len.clone(),
Rvalue::Len(cond_place.clone()),
pattern.into(),
);
let else_target = *current_else.get_or_insert_with(|| self.new_basic_block());
let next = self.new_basic_block();
if slice.is_none() {
self.set_terminator(
current,
TerminatorKind::SwitchInt {
discr: Operand::Copy(place_len),
targets: SwitchTargets::static_if(
pattern_len as u128,
next,
else_target,
),
},
pattern.into(),
);
} else {
let c = Operand::from_concrete_const(
pattern_len.to_le_bytes().to_vec(),
MemoryMap::default(),
TyBuilder::usize(),
);
let discr: Place =
self.temp(TyBuilder::bool(), current, pattern.into())?.into();
self.push_assignment(
current,
discr.clone(),
Rvalue::CheckedBinaryOp(BinOp::Le, c, Operand::Copy(place_len)),
pattern.into(),
);
let discr = Operand::Copy(discr);
self.set_terminator(
current,
TerminatorKind::SwitchInt {
discr,
targets: SwitchTargets::static_if(1, next, else_target),
},
pattern.into(),
);
}
current = next;
}
for (i, &pat) in prefix.iter().enumerate() {
let next_place = cond_place.project(ProjectionElem::ConstantIndex {
offset: i as u64,
@ -174,53 +257,44 @@ impl MirLowerCtx<'_> {
let pr = resolver
.resolve_path_in_value_ns(self.db.upcast(), p)
.ok_or_else(unresolved_name)?;
match pr {
ResolveValueResult::ValueNs(v) => match v {
ValueNs::ConstId(c) => {
let tmp: Place =
self.temp(cond_ty.clone(), current, pattern.into())?.into();
let span = pattern.into();
self.lower_const(
c.into(),
current,
tmp.clone(),
Substitution::empty(Interner),
span,
cond_ty.clone(),
)?;
let tmp2: Place =
self.temp(TyBuilder::bool(), current, pattern.into())?.into();
self.push_assignment(
current,
tmp2.clone(),
Rvalue::CheckedBinaryOp(
BinOp::Eq,
Operand::Copy(tmp),
Operand::Copy(cond_place),
),
span,
);
let next = self.new_basic_block();
let else_target =
current_else.unwrap_or_else(|| self.new_basic_block());
self.set_terminator(
current,
TerminatorKind::SwitchInt {
discr: Operand::Copy(tmp2),
targets: SwitchTargets::static_if(1, next, else_target),
},
span,
);
(next, Some(else_target))
let (c, subst) = 'b: {
if let Some(x) = self.infer.assoc_resolutions_for_pat(pattern) {
if let AssocItemId::ConstId(c) = x.0 {
break 'b (c, x.1);
}
_ => not_supported!(
"path in pattern position that is not const or variant"
),
},
ResolveValueResult::Partial(_, _) => {
not_supported!("assoc const in patterns")
}
}
if let ResolveValueResult::ValueNs(v) = pr {
if let ValueNs::ConstId(c) = v {
break 'b (c, Substitution::empty(Interner));
}
}
not_supported!("path in pattern position that is not const or variant")
};
let tmp: Place = self.temp(cond_ty.clone(), current, pattern.into())?.into();
let span = pattern.into();
self.lower_const(c.into(), current, tmp.clone(), subst, span, cond_ty.clone())?;
let tmp2: Place = self.temp(TyBuilder::bool(), current, pattern.into())?.into();
self.push_assignment(
current,
tmp2.clone(),
Rvalue::CheckedBinaryOp(
BinOp::Eq,
Operand::Copy(tmp),
Operand::Copy(cond_place),
),
span,
);
let next = self.new_basic_block();
let else_target = current_else.unwrap_or_else(|| self.new_basic_block());
self.set_terminator(
current,
TerminatorKind::SwitchInt {
discr: Operand::Copy(tmp2),
targets: SwitchTargets::static_if(1, next, else_target),
},
span,
);
(next, Some(else_target))
}
},
Pat::Lit(l) => match &self.body.exprs[*l] {

View file

@ -0,0 +1,369 @@
//! Monomorphization of mir, which is used in mir interpreter and const eval.
//!
//! The job of monomorphization is:
//! * Monomorphization. That is, replacing `Option<T>` with `Option<i32>` where `T:=i32` substitution
//! is provided
//! * Normalizing types, for example replacing RPIT of other functions called in this body.
//!
//! So the monomorphization should be called even if the substitution is empty.
use std::mem;
use chalk_ir::{
fold::{FallibleTypeFolder, TypeFoldable, TypeSuperFoldable},
ConstData, DebruijnIndex,
};
use hir_def::{DefWithBodyId, GeneralConstId};
use triomphe::Arc;
use crate::{
consteval::unknown_const,
db::HirDatabase,
from_placeholder_idx,
infer::normalize,
method_resolution::lookup_impl_const,
utils::{generics, Generics},
ClosureId, Const, Interner, ProjectionTy, Substitution, TraitEnvironment, Ty, TyKind,
};
use super::{MirBody, MirLowerError, Operand, Rvalue, StatementKind, TerminatorKind};
macro_rules! not_supported {
($x: expr) => {
return Err(MirLowerError::NotSupported(format!($x)))
};
}
struct Filler<'a> {
db: &'a dyn HirDatabase,
trait_env: Arc<TraitEnvironment>,
subst: &'a Substitution,
generics: Option<Generics>,
owner: DefWithBodyId,
}
impl FallibleTypeFolder<Interner> for Filler<'_> {
type Error = MirLowerError;
fn as_dyn(&mut self) -> &mut dyn FallibleTypeFolder<Interner, Error = Self::Error> {
self
}
fn interner(&self) -> Interner {
Interner
}
fn try_fold_ty(
&mut self,
ty: Ty,
outer_binder: DebruijnIndex,
) -> std::result::Result<Ty, Self::Error> {
match ty.kind(Interner) {
TyKind::AssociatedType(id, subst) => {
// I don't know exactly if and why this is needed, but it looks like `normalize_ty` likes
// this kind of associated types.
Ok(TyKind::Alias(chalk_ir::AliasTy::Projection(ProjectionTy {
associated_ty_id: *id,
substitution: subst.clone().try_fold_with(self, outer_binder)?,
}))
.intern(Interner))
}
TyKind::OpaqueType(id, subst) => {
let impl_trait_id = self.db.lookup_intern_impl_trait_id((*id).into());
let subst = subst.clone().try_fold_with(self.as_dyn(), outer_binder)?;
match impl_trait_id {
crate::ImplTraitId::ReturnTypeImplTrait(func, idx) => {
let infer = self.db.infer(func.into());
let filler = &mut Filler {
db: self.db,
owner: self.owner,
trait_env: self.trait_env.clone(),
subst: &subst,
generics: Some(generics(self.db.upcast(), func.into())),
};
filler.try_fold_ty(infer.type_of_rpit[idx].clone(), outer_binder)
}
crate::ImplTraitId::AsyncBlockTypeImplTrait(_, _) => {
not_supported!("async block impl trait");
}
}
}
_ => ty.try_super_fold_with(self.as_dyn(), outer_binder),
}
}
fn try_fold_free_placeholder_const(
&mut self,
_ty: chalk_ir::Ty<Interner>,
idx: chalk_ir::PlaceholderIndex,
_outer_binder: DebruijnIndex,
) -> std::result::Result<chalk_ir::Const<Interner>, Self::Error> {
let x = from_placeholder_idx(self.db, idx);
let Some(idx) = self.generics.as_ref().and_then(|g| g.param_idx(x)) else {
not_supported!("missing idx in generics");
};
Ok(self
.subst
.as_slice(Interner)
.get(idx)
.and_then(|x| x.constant(Interner))
.ok_or_else(|| MirLowerError::GenericArgNotProvided(x, self.subst.clone()))?
.clone())
}
fn try_fold_free_placeholder_ty(
&mut self,
idx: chalk_ir::PlaceholderIndex,
_outer_binder: DebruijnIndex,
) -> std::result::Result<Ty, Self::Error> {
let x = from_placeholder_idx(self.db, idx);
let Some(idx) = self.generics.as_ref().and_then(|g| g.param_idx(x)) else {
not_supported!("missing idx in generics");
};
Ok(self
.subst
.as_slice(Interner)
.get(idx)
.and_then(|x| x.ty(Interner))
.ok_or_else(|| MirLowerError::GenericArgNotProvided(x, self.subst.clone()))?
.clone())
}
fn try_fold_const(
&mut self,
constant: chalk_ir::Const<Interner>,
outer_binder: DebruijnIndex,
) -> Result<chalk_ir::Const<Interner>, Self::Error> {
let next_ty = normalize(
self.db,
self.trait_env.clone(),
constant.data(Interner).ty.clone().try_fold_with(self, outer_binder)?,
);
ConstData { ty: next_ty, value: constant.data(Interner).value.clone() }
.intern(Interner)
.try_super_fold_with(self, outer_binder)
}
}
impl Filler<'_> {
fn fill_ty(&mut self, ty: &mut Ty) -> Result<(), MirLowerError> {
let tmp = mem::replace(ty, TyKind::Error.intern(Interner));
*ty = normalize(
self.db,
self.trait_env.clone(),
tmp.try_fold_with(self, DebruijnIndex::INNERMOST)?,
);
Ok(())
}
fn fill_const(&mut self, c: &mut Const) -> Result<(), MirLowerError> {
let tmp = mem::replace(c, unknown_const(c.data(Interner).ty.clone()));
*c = tmp.try_fold_with(self, DebruijnIndex::INNERMOST)?;
Ok(())
}
fn fill_subst(&mut self, ty: &mut Substitution) -> Result<(), MirLowerError> {
let tmp = mem::replace(ty, Substitution::empty(Interner));
*ty = tmp.try_fold_with(self, DebruijnIndex::INNERMOST)?;
Ok(())
}
fn fill_operand(&mut self, op: &mut Operand) -> Result<(), MirLowerError> {
match op {
Operand::Constant(c) => {
match &c.data(Interner).value {
chalk_ir::ConstValue::BoundVar(b) => {
let resolved = self
.subst
.as_slice(Interner)
.get(b.index)
.ok_or_else(|| {
MirLowerError::GenericArgNotProvided(
self.generics
.as_ref()
.and_then(|x| x.iter().nth(b.index))
.unwrap()
.0,
self.subst.clone(),
)
})?
.assert_const_ref(Interner);
*c = resolved.clone();
}
chalk_ir::ConstValue::InferenceVar(_)
| chalk_ir::ConstValue::Placeholder(_) => {}
chalk_ir::ConstValue::Concrete(cc) => match &cc.interned {
crate::ConstScalar::UnevaluatedConst(const_id, subst) => {
let mut const_id = *const_id;
let mut subst = subst.clone();
self.fill_subst(&mut subst)?;
if let GeneralConstId::ConstId(c) = const_id {
let (c, s) = lookup_impl_const(
self.db,
self.db.trait_environment_for_body(self.owner),
c,
subst,
);
const_id = GeneralConstId::ConstId(c);
subst = s;
}
let result =
self.db.const_eval(const_id.into(), subst).map_err(|e| {
let name = const_id.name(self.db.upcast());
MirLowerError::ConstEvalError(name, Box::new(e))
})?;
*c = result;
}
crate::ConstScalar::Bytes(_, _) | crate::ConstScalar::Unknown => (),
},
}
self.fill_const(c)?;
}
Operand::Copy(_) | Operand::Move(_) | Operand::Static(_) => (),
}
Ok(())
}
fn fill_body(&mut self, body: &mut MirBody) -> Result<(), MirLowerError> {
for (_, l) in body.locals.iter_mut() {
self.fill_ty(&mut l.ty)?;
}
for (_, bb) in body.basic_blocks.iter_mut() {
for statement in &mut bb.statements {
match &mut statement.kind {
StatementKind::Assign(_, r) => match r {
Rvalue::Aggregate(ak, ops) => {
for op in &mut **ops {
self.fill_operand(op)?;
}
match ak {
super::AggregateKind::Array(ty)
| super::AggregateKind::Tuple(ty)
| super::AggregateKind::Closure(ty) => self.fill_ty(ty)?,
super::AggregateKind::Adt(_, subst) => self.fill_subst(subst)?,
super::AggregateKind::Union(_, _) => (),
}
}
Rvalue::ShallowInitBox(_, ty) | Rvalue::ShallowInitBoxWithAlloc(ty) => {
self.fill_ty(ty)?;
}
Rvalue::Use(op) => {
self.fill_operand(op)?;
}
Rvalue::Repeat(op, len) => {
self.fill_operand(op)?;
self.fill_const(len)?;
}
Rvalue::Ref(_, _)
| Rvalue::Len(_)
| Rvalue::Cast(_, _, _)
| Rvalue::CheckedBinaryOp(_, _, _)
| Rvalue::UnaryOp(_, _)
| Rvalue::Discriminant(_)
| Rvalue::CopyForDeref(_) => (),
},
StatementKind::Deinit(_)
| StatementKind::StorageLive(_)
| StatementKind::StorageDead(_)
| StatementKind::Nop => (),
}
}
if let Some(terminator) = &mut bb.terminator {
match &mut terminator.kind {
TerminatorKind::Call { func, args, .. } => {
self.fill_operand(func)?;
for op in &mut **args {
self.fill_operand(op)?;
}
}
TerminatorKind::SwitchInt { discr, .. } => {
self.fill_operand(discr)?;
}
TerminatorKind::Goto { .. }
| TerminatorKind::Resume
| TerminatorKind::Abort
| TerminatorKind::Return
| TerminatorKind::Unreachable
| TerminatorKind::Drop { .. }
| TerminatorKind::DropAndReplace { .. }
| TerminatorKind::Assert { .. }
| TerminatorKind::Yield { .. }
| TerminatorKind::GeneratorDrop
| TerminatorKind::FalseEdge { .. }
| TerminatorKind::FalseUnwind { .. } => (),
}
}
}
Ok(())
}
}
pub fn monomorphized_mir_body_query(
db: &dyn HirDatabase,
owner: DefWithBodyId,
subst: Substitution,
trait_env: Arc<crate::TraitEnvironment>,
) -> Result<Arc<MirBody>, MirLowerError> {
let g_def = match owner {
DefWithBodyId::FunctionId(f) => Some(f.into()),
DefWithBodyId::StaticId(_) => None,
DefWithBodyId::ConstId(f) => Some(f.into()),
DefWithBodyId::VariantId(f) => Some(f.into()),
};
let generics = g_def.map(|g_def| generics(db.upcast(), g_def));
let filler = &mut Filler { db, subst: &subst, trait_env, generics, owner };
let body = db.mir_body(owner)?;
let mut body = (*body).clone();
filler.fill_body(&mut body)?;
Ok(Arc::new(body))
}
pub fn monomorphized_mir_body_recover(
_: &dyn HirDatabase,
_: &[String],
_: &DefWithBodyId,
_: &Substitution,
_: &Arc<crate::TraitEnvironment>,
) -> Result<Arc<MirBody>, MirLowerError> {
return Err(MirLowerError::Loop);
}
pub fn monomorphized_mir_body_for_closure_query(
db: &dyn HirDatabase,
closure: ClosureId,
subst: Substitution,
trait_env: Arc<crate::TraitEnvironment>,
) -> Result<Arc<MirBody>, MirLowerError> {
let (owner, _) = db.lookup_intern_closure(closure.into());
let g_def = match owner {
DefWithBodyId::FunctionId(f) => Some(f.into()),
DefWithBodyId::StaticId(_) => None,
DefWithBodyId::ConstId(f) => Some(f.into()),
DefWithBodyId::VariantId(f) => Some(f.into()),
};
let generics = g_def.map(|g_def| generics(db.upcast(), g_def));
let filler = &mut Filler { db, subst: &subst, trait_env, generics, owner };
let body = db.mir_body_for_closure(closure)?;
let mut body = (*body).clone();
filler.fill_body(&mut body)?;
Ok(Arc::new(body))
}
// FIXME: remove this function. Monomorphization is a time consuming job and should always be a query.
pub fn monomorphize_mir_body_bad(
db: &dyn HirDatabase,
mut body: MirBody,
subst: Substitution,
trait_env: Arc<crate::TraitEnvironment>,
) -> Result<MirBody, MirLowerError> {
let owner = body.owner;
let g_def = match owner {
DefWithBodyId::FunctionId(f) => Some(f.into()),
DefWithBodyId::StaticId(_) => None,
DefWithBodyId::ConstId(f) => Some(f.into()),
DefWithBodyId::VariantId(f) => Some(f.into()),
};
let generics = g_def.map(|g_def| generics(db.upcast(), g_def));
let filler = &mut Filler { db, subst: &subst, trait_env, generics, owner };
filler.fill_body(&mut body)?;
Ok(body)
}

View file

@ -437,6 +437,6 @@ impl<'a> MirPrettyCtx<'a> {
}
fn hir_display<T: HirDisplay>(&self, ty: &'a T) -> impl Display + 'a {
ty.display(self.db).with_closure_style(ClosureStyle::ClosureWithId)
ty.display(self.db).with_closure_style(ClosureStyle::ClosureWithSubst)
}
}

View file

@ -140,6 +140,7 @@ fn infer_path_qualified_macros_expanded() {
fn expr_macro_def_expanded_in_various_places() {
check_infer(
r#"
//- minicore: iterator
macro spam() {
1isize
}
@ -195,8 +196,17 @@ fn expr_macro_def_expanded_in_various_places() {
!0..6 '1isize': isize
39..442 '{ ...!(); }': ()
73..94 'spam!(...am!())': {unknown}
100..119 'for _ ...!() {}': fn into_iter<isize>(isize) -> <isize as IntoIterator>::IntoIter
100..119 'for _ ...!() {}': IntoIterator::IntoIter<isize>
100..119 'for _ ...!() {}': !
100..119 'for _ ...!() {}': IntoIterator::IntoIter<isize>
100..119 'for _ ...!() {}': &mut IntoIterator::IntoIter<isize>
100..119 'for _ ...!() {}': fn next<IntoIterator::IntoIter<isize>>(&mut IntoIterator::IntoIter<isize>) -> Option<<IntoIterator::IntoIter<isize> as Iterator>::Item>
100..119 'for _ ...!() {}': Option<Iterator::Item<IntoIterator::IntoIter<isize>>>
100..119 'for _ ...!() {}': ()
104..105 '_': {unknown}
100..119 'for _ ...!() {}': ()
100..119 'for _ ...!() {}': ()
104..105 '_': Iterator::Item<IntoIterator::IntoIter<isize>>
117..119 '{}': ()
124..134 '|| spam!()': impl Fn() -> isize
140..156 'while ...!() {}': ()
@ -221,6 +231,7 @@ fn expr_macro_def_expanded_in_various_places() {
fn expr_macro_rules_expanded_in_various_places() {
check_infer(
r#"
//- minicore: iterator
macro_rules! spam {
() => (1isize);
}
@ -276,8 +287,17 @@ fn expr_macro_rules_expanded_in_various_places() {
!0..6 '1isize': isize
53..456 '{ ...!(); }': ()
87..108 'spam!(...am!())': {unknown}
114..133 'for _ ...!() {}': fn into_iter<isize>(isize) -> <isize as IntoIterator>::IntoIter
114..133 'for _ ...!() {}': IntoIterator::IntoIter<isize>
114..133 'for _ ...!() {}': !
114..133 'for _ ...!() {}': IntoIterator::IntoIter<isize>
114..133 'for _ ...!() {}': &mut IntoIterator::IntoIter<isize>
114..133 'for _ ...!() {}': fn next<IntoIterator::IntoIter<isize>>(&mut IntoIterator::IntoIter<isize>) -> Option<<IntoIterator::IntoIter<isize> as Iterator>::Item>
114..133 'for _ ...!() {}': Option<Iterator::Item<IntoIterator::IntoIter<isize>>>
114..133 'for _ ...!() {}': ()
118..119 '_': {unknown}
114..133 'for _ ...!() {}': ()
114..133 'for _ ...!() {}': ()
118..119 '_': Iterator::Item<IntoIterator::IntoIter<isize>>
131..133 '{}': ()
138..148 '|| spam!()': impl Fn() -> isize
154..170 'while ...!() {}': ()

View file

@ -327,6 +327,7 @@ fn diverging_expression_2() {
fn diverging_expression_3_break() {
check_infer_with_mismatches(
r"
//- minicore: iterator
//- /main.rs
fn test1() {
// should give type mismatch
@ -360,6 +361,15 @@ fn diverging_expression_3_break() {
97..343 '{ ...; }; }': ()
140..141 'x': u32
149..175 '{ for ...; }; }': u32
151..172 'for a ...eak; }': fn into_iter<{unknown}>({unknown}) -> <{unknown} as IntoIterator>::IntoIter
151..172 'for a ...eak; }': {unknown}
151..172 'for a ...eak; }': !
151..172 'for a ...eak; }': {unknown}
151..172 'for a ...eak; }': &mut {unknown}
151..172 'for a ...eak; }': fn next<{unknown}>(&mut {unknown}) -> Option<<{unknown} as Iterator>::Item>
151..172 'for a ...eak; }': Option<{unknown}>
151..172 'for a ...eak; }': ()
151..172 'for a ...eak; }': ()
151..172 'for a ...eak; }': ()
155..156 'a': {unknown}
160..161 'b': {unknown}
@ -367,12 +377,30 @@ fn diverging_expression_3_break() {
164..169 'break': !
226..227 'x': u32
235..253 '{ for ... {}; }': u32
237..250 'for a in b {}': fn into_iter<{unknown}>({unknown}) -> <{unknown} as IntoIterator>::IntoIter
237..250 'for a in b {}': {unknown}
237..250 'for a in b {}': !
237..250 'for a in b {}': {unknown}
237..250 'for a in b {}': &mut {unknown}
237..250 'for a in b {}': fn next<{unknown}>(&mut {unknown}) -> Option<<{unknown} as Iterator>::Item>
237..250 'for a in b {}': Option<{unknown}>
237..250 'for a in b {}': ()
237..250 'for a in b {}': ()
237..250 'for a in b {}': ()
241..242 'a': {unknown}
246..247 'b': {unknown}
248..250 '{}': ()
304..305 'x': u32
313..340 '{ for ...; }; }': u32
315..337 'for a ...urn; }': fn into_iter<{unknown}>({unknown}) -> <{unknown} as IntoIterator>::IntoIter
315..337 'for a ...urn; }': {unknown}
315..337 'for a ...urn; }': !
315..337 'for a ...urn; }': {unknown}
315..337 'for a ...urn; }': &mut {unknown}
315..337 'for a ...urn; }': fn next<{unknown}>(&mut {unknown}) -> Option<<{unknown} as Iterator>::Item>
315..337 'for a ...urn; }': Option<{unknown}>
315..337 'for a ...urn; }': ()
315..337 'for a ...urn; }': ()
315..337 'for a ...urn; }': ()
319..320 'a': {unknown}
324..325 'b': {unknown}

View file

@ -6,6 +6,7 @@ use super::{check, check_infer, check_infer_with_mismatches, check_no_mismatches
fn infer_pattern() {
check_infer(
r#"
//- minicore: iterator
fn test(x: &i32) {
let y = x;
let &z = x;
@ -46,6 +47,15 @@ fn infer_pattern() {
82..94 '(1, "hello")': (i32, &str)
83..84 '1': i32
86..93 '"hello"': &str
101..151 'for (e... }': fn into_iter<{unknown}>({unknown}) -> <{unknown} as IntoIterator>::IntoIter
101..151 'for (e... }': {unknown}
101..151 'for (e... }': !
101..151 'for (e... }': {unknown}
101..151 'for (e... }': &mut {unknown}
101..151 'for (e... }': fn next<{unknown}>(&mut {unknown}) -> Option<<{unknown} as Iterator>::Item>
101..151 'for (e... }': Option<({unknown}, {unknown})>
101..151 'for (e... }': ()
101..151 'for (e... }': ()
101..151 'for (e... }': ()
105..111 '(e, f)': ({unknown}, {unknown})
106..107 'e': {unknown}

View file

@ -246,6 +246,7 @@ fn infer_std_crash_5() {
// taken from rustc
check_infer(
r#"
//- minicore: iterator
fn extra_compiler_flags() {
for content in doesnt_matter {
let name = if doesnt_matter {
@ -264,6 +265,15 @@ fn infer_std_crash_5() {
"#,
expect![[r#"
26..322 '{ ... } }': ()
32..320 'for co... }': fn into_iter<{unknown}>({unknown}) -> <{unknown} as IntoIterator>::IntoIter
32..320 'for co... }': {unknown}
32..320 'for co... }': !
32..320 'for co... }': {unknown}
32..320 'for co... }': &mut {unknown}
32..320 'for co... }': fn next<{unknown}>(&mut {unknown}) -> Option<<{unknown} as Iterator>::Item>
32..320 'for co... }': Option<{unknown}>
32..320 'for co... }': ()
32..320 'for co... }': ()
32..320 'for co... }': ()
36..43 'content': {unknown}
47..60 'doesnt_matter': {unknown}
@ -1215,6 +1225,7 @@ fn mamba(a: U32!(), p: u32) -> u32 {
fn for_loop_block_expr_iterable() {
check_infer(
r#"
//- minicore: iterator
fn test() {
for _ in { let x = 0; } {
let y = 0;
@ -1223,8 +1234,17 @@ fn test() {
"#,
expect![[r#"
10..68 '{ ... } }': ()
16..66 'for _ ... }': fn into_iter<()>(()) -> <() as IntoIterator>::IntoIter
16..66 'for _ ... }': IntoIterator::IntoIter<()>
16..66 'for _ ... }': !
16..66 'for _ ... }': IntoIterator::IntoIter<()>
16..66 'for _ ... }': &mut IntoIterator::IntoIter<()>
16..66 'for _ ... }': fn next<IntoIterator::IntoIter<()>>(&mut IntoIterator::IntoIter<()>) -> Option<<IntoIterator::IntoIter<()> as Iterator>::Item>
16..66 'for _ ... }': Option<Iterator::Item<IntoIterator::IntoIter<()>>>
16..66 'for _ ... }': ()
20..21 '_': {unknown}
16..66 'for _ ... }': ()
16..66 'for _ ... }': ()
20..21 '_': Iterator::Item<IntoIterator::IntoIter<()>>
25..39 '{ let x = 0; }': ()
31..32 'x': i32
35..36 '0': i32

View file

@ -62,7 +62,7 @@ use hir_ty::{
consteval::{try_const_usize, unknown_const_as_generic, ConstEvalError, ConstExt},
diagnostics::BodyValidationDiagnostic,
display::HexifiedConst,
layout::{layout_of_ty, Layout, LayoutError, RustcEnumVariantIdx, TagEncoding},
layout::{Layout, LayoutError, RustcEnumVariantIdx, TagEncoding},
method_resolution::{self, TyFingerprint},
mir::{self, interpret_mir},
primitive::UintTy,
@ -961,8 +961,8 @@ impl Field {
Type::new(db, var_id, ty)
}
pub fn layout(&self, db: &dyn HirDatabase) -> Result<Layout, LayoutError> {
layout_of_ty(db, &self.ty(db).ty, self.parent.module(db).krate().into())
pub fn layout(&self, db: &dyn HirDatabase) -> Result<Arc<Layout>, LayoutError> {
db.layout_of_ty(self.ty(db).ty.clone(), self.parent.module(db).krate().into())
}
pub fn parent_def(&self, _db: &dyn HirDatabase) -> VariantDef {
@ -1135,7 +1135,7 @@ impl Enum {
self.variants(db).iter().any(|v| !matches!(v.kind(db), StructKind::Unit))
}
pub fn layout(self, db: &dyn HirDatabase) -> Result<(Layout, usize), LayoutError> {
pub fn layout(self, db: &dyn HirDatabase) -> Result<(Arc<Layout>, usize), LayoutError> {
let layout = Adt::from(self).layout(db)?;
let tag_size =
if let layout::Variants::Multiple { tag, tag_encoding, .. } = &layout.variants {
@ -1219,11 +1219,11 @@ impl Variant {
let parent_enum = self.parent_enum(db);
let (parent_layout, tag_size) = parent_enum.layout(db)?;
Ok((
match parent_layout.variants {
match &parent_layout.variants {
layout::Variants::Multiple { variants, .. } => {
variants[RustcEnumVariantIdx(self.id)].clone()
}
_ => parent_layout,
_ => (*parent_layout).clone(),
},
tag_size,
))
@ -1255,7 +1255,7 @@ impl Adt {
})
}
pub fn layout(self, db: &dyn HirDatabase) -> Result<Layout, LayoutError> {
pub fn layout(self, db: &dyn HirDatabase) -> Result<Arc<Layout>, LayoutError> {
if db.generic_params(self.into()).iter().count() != 0 {
return Err(LayoutError::HasPlaceholder);
}
@ -1949,7 +1949,11 @@ impl Function {
db: &dyn HirDatabase,
span_formatter: impl Fn(FileId, TextRange) -> String,
) -> String {
let body = match db.mir_body(self.id.into()) {
let body = match db.monomorphized_mir_body(
self.id.into(),
Substitution::empty(Interner),
db.trait_environment(self.id.into()),
) {
Ok(body) => body,
Err(e) => {
let mut r = String::new();
@ -1957,8 +1961,7 @@ impl Function {
return r;
}
};
let (result, stdout, stderr) =
interpret_mir(db, &body, Substitution::empty(Interner), false);
let (result, stdout, stderr) = interpret_mir(db, &body, false);
let mut text = match result {
Ok(_) => "pass".to_string(),
Err(e) => {
@ -4240,8 +4243,8 @@ impl Type {
.collect()
}
pub fn layout(&self, db: &dyn HirDatabase) -> Result<Layout, LayoutError> {
layout_of_ty(db, &self.ty, self.env.krate)
pub fn layout(&self, db: &dyn HirDatabase) -> Result<Arc<Layout>, LayoutError> {
db.layout_of_ty(self.ty.clone(), self.env.krate)
}
}

View file

@ -4728,6 +4728,7 @@ const fn $0fun_name() {
check_assist(
extract_function,
r#"
//- minicore: iterator
fn foo() {
let mut x = 5;
for _ in 0..10 {
@ -4751,6 +4752,7 @@ fn $0fun_name(x: &mut i32) {
check_assist(
extract_function,
r#"
//- minicore: iterator
fn foo() {
for _ in 0..10 {
let mut x = 5;
@ -4774,6 +4776,7 @@ fn $0fun_name(mut x: i32) {
check_assist(
extract_function,
r#"
//- minicore: iterator
fn foo() {
loop {
let mut x = 5;

View file

@ -329,6 +329,7 @@ fn foo() {
fn complete_label_in_for_iterable() {
check(
r#"
//- minicore: iterator
fn foo() {
'outer: for _ in [{ 'inner: loop { break '$0 } }] {}
}

View file

@ -124,12 +124,14 @@ fn foo() {
#[test]
fn value_break_in_for_loop() {
// FIXME: the error is correct, but the message is terrible
check_diagnostics(
r#"
//- minicore: iterator
fn test() {
for _ in [()] {
break 3;
// ^^^^^^^ error: can't break with a value in this position
// ^ error: expected (), found i32
}
}
"#,

View file

@ -1137,5 +1137,5 @@ fn benchmark_syntax_highlighting_parser() {
.filter(|it| it.highlight.tag == HlTag::Symbol(SymbolKind::Function))
.count()
};
assert_eq!(hash, 1170);
assert_eq!(hash, 1169);
}