Auto merge of #15419 - HKalbasi:mir, r=HKalbasi

Add mir lower support for tuple destructing assignment

And some other changes in mir eval
This commit is contained in:
bors 2023-08-08 21:02:57 +00:00
commit fc2f90e0e5
7 changed files with 247 additions and 27 deletions

View file

@ -130,6 +130,7 @@ impl ChangeFixture {
let mut default_crate_root: Option<FileId> = None;
let mut default_target_data_layout: Option<String> = None;
let mut default_cfg = CfgOptions::default();
let mut default_env = Env::new_for_test_fixture();
let mut file_set = FileSet::default();
let mut current_source_root_kind = SourceRootKind::Local;
@ -200,6 +201,7 @@ impl ChangeFixture {
assert!(default_crate_root.is_none());
default_crate_root = Some(file_id);
default_cfg = meta.cfg;
default_env.extend(meta.env.iter().map(|(x, y)| (x.to_owned(), y.to_owned())));
default_target_data_layout = meta.target_data_layout;
}
@ -220,7 +222,7 @@ impl ChangeFixture {
None,
default_cfg,
Default::default(),
Env::new_for_test_fixture(),
default_env,
false,
CrateOrigin::Local { repo: None, name: None },
default_target_data_layout

View file

@ -686,6 +686,12 @@ impl fmt::Display for Edition {
}
}
impl Extend<(String, String)> for Env {
fn extend<T: IntoIterator<Item = (String, String)>>(&mut self, iter: T) {
self.entries.extend(iter);
}
}
impl FromIterator<(String, String)> for Env {
fn from_iter<T: IntoIterator<Item = (String, String)>>(iter: T) -> Self {
Env { entries: FromIterator::from_iter(iter) }

View file

@ -1203,6 +1203,27 @@ fn destructing_assignment() {
"#,
5,
);
check_number(
r#"
const GOAL: u8 = {
let (mut a, mut b) = (2, 5);
(a, b) = (b, a);
a * 10 + b
};
"#,
52,
);
check_number(
r#"
struct Point { x: i32, y: i32 }
const GOAL: i32 = {
let mut p = Point { x: 5, y: 6 };
(p.x, _) = (p.y, p.x);
p.x * 10 + p.y
};
"#,
66,
);
}
#[test]
@ -1432,6 +1453,30 @@ fn from_trait() {
);
}
#[test]
fn closure_clone() {
check_number(
r#"
//- minicore: clone, fn
struct S(u8);
impl Clone for S(u8) {
fn clone(&self) -> S {
S(self.0 + 5)
}
}
const GOAL: u8 = {
let s = S(3);
let cl = move || s;
let cl = cl.clone();
cl().0
}
"#,
8,
);
}
#[test]
fn builtin_derive_macro() {
check_number(
@ -2396,14 +2441,14 @@ fn const_loop() {
fn const_transfer_memory() {
check_number(
r#"
//- minicore: slice, index, coerce_unsized
//- minicore: slice, index, coerce_unsized, option
const A1: &i32 = &1;
const A2: &i32 = &10;
const A3: [&i32; 3] = [&1, &2, &100];
const A4: (i32, &i32) = (1, &1000);
const GOAL: i32 = *A1 + *A2 + *A3[2] + *A4.1;
const A4: (i32, &i32, Option<&i32>) = (1, &1000, Some(&10000));
const GOAL: i32 = *A1 + *A2 + *A3[2] + *A4.1 + *A4.2.unwrap_or(&5);
"#,
1111,
11111,
);
}

View file

@ -2007,7 +2007,28 @@ impl Evaluator<'_> {
}
}
AdtId::UnionId(_) => (),
AdtId::EnumId(_) => (),
AdtId::EnumId(e) => {
if let Some((variant, layout)) = detect_variant_from_bytes(
&layout,
self.db,
self.trait_env.clone(),
self.read_memory(addr, layout.size.bytes_usize())?,
e,
) {
let ev = EnumVariantId { parent: e, local_id: variant };
for (i, (_, ty)) in self.db.field_types(ev.into()).iter().enumerate() {
let offset = layout.fields.offset(i).bytes_usize();
let ty = ty.clone().substitute(Interner, subst);
self.patch_addresses(
patch_map,
old_vtable,
addr.offset(offset),
&ty,
locals,
)?;
}
}
}
},
TyKind::Tuple(_, subst) => {
for (id, ty) in subst.iter(Interner).enumerate() {

View file

@ -136,7 +136,10 @@ impl Evaluator<'_> {
not_supported!("wrong generic arg kind for clone");
};
// Clone has special impls for tuples and function pointers
if matches!(self_ty.kind(Interner), TyKind::Function(_) | TyKind::Tuple(..)) {
if matches!(
self_ty.kind(Interner),
TyKind::Function(_) | TyKind::Tuple(..) | TyKind::Closure(..)
) {
self.exec_clone(def, args, self_ty.clone(), locals, destination, span)?;
return Ok(true);
}
@ -167,32 +170,26 @@ impl Evaluator<'_> {
return destination
.write_from_interval(self, Interval { addr, size: destination.size });
}
TyKind::Closure(id, subst) => {
let [arg] = args else {
not_supported!("wrong arg count for clone");
};
let addr = Address::from_bytes(arg.get(self)?)?;
let (closure_owner, _) = self.db.lookup_intern_closure((*id).into());
let infer = self.db.infer(closure_owner);
let (captures, _) = infer.closure_info(id);
let layout = self.layout(&self_ty)?;
let ty_iter = captures.iter().map(|c| c.ty(subst));
self.exec_clone_for_fields(ty_iter, layout, addr, def, locals, destination, span)?;
}
TyKind::Tuple(_, subst) => {
let [arg] = args else {
not_supported!("wrong arg count for clone");
};
let addr = Address::from_bytes(arg.get(self)?)?;
let layout = self.layout(&self_ty)?;
for (i, ty) in subst.iter(Interner).enumerate() {
let ty = ty.assert_ty_ref(Interner);
let size = self.layout(ty)?.size.bytes_usize();
let tmp = self.heap_allocate(self.ptr_size(), self.ptr_size())?;
let arg = IntervalAndTy {
interval: Interval { addr: tmp, size: self.ptr_size() },
ty: TyKind::Ref(Mutability::Not, static_lifetime(), ty.clone())
.intern(Interner),
};
let offset = layout.fields.offset(i).bytes_usize();
self.write_memory(tmp, &addr.offset(offset).to_bytes())?;
self.exec_clone(
def,
&[arg],
ty.clone(),
locals,
destination.slice(offset..offset + size),
span,
)?;
}
let ty_iter = subst.iter(Interner).map(|ga| ga.assert_ty_ref(Interner).clone());
self.exec_clone_for_fields(ty_iter, layout, addr, def, locals, destination, span)?;
}
_ => {
self.exec_fn_with_args(
@ -209,6 +206,37 @@ impl Evaluator<'_> {
Ok(())
}
fn exec_clone_for_fields(
&mut self,
ty_iter: impl Iterator<Item = Ty>,
layout: Arc<Layout>,
addr: Address,
def: FunctionId,
locals: &Locals,
destination: Interval,
span: MirSpan,
) -> Result<()> {
for (i, ty) in ty_iter.enumerate() {
let size = self.layout(&ty)?.size.bytes_usize();
let tmp = self.heap_allocate(self.ptr_size(), self.ptr_size())?;
let arg = IntervalAndTy {
interval: Interval { addr: tmp, size: self.ptr_size() },
ty: TyKind::Ref(Mutability::Not, static_lifetime(), ty.clone()).intern(Interner),
};
let offset = layout.fields.offset(i).bytes_usize();
self.write_memory(tmp, &addr.offset(offset).to_bytes())?;
self.exec_clone(
def,
&[arg],
ty,
locals,
destination.slice(offset..offset + size),
span,
)?;
}
Ok(())
}
fn exec_alloc_fn(
&mut self,
alloc_fn: &str,
@ -473,6 +501,38 @@ impl Evaluator<'_> {
self.write_memory_using_ref(destination.addr, destination.size)?.fill(0);
Ok(())
}
"getenv" => {
let [name] = args else {
return Err(MirEvalError::TypeError("libc::write args are not provided"));
};
let mut name_buf = vec![];
let name = {
let mut index = Address::from_bytes(name.get(self)?)?;
loop {
let byte = self.read_memory(index, 1)?[0];
index = index.offset(1);
if byte == 0 {
break;
}
name_buf.push(byte);
}
String::from_utf8_lossy(&name_buf)
};
let value = self.db.crate_graph()[self.crate_id].env.get(&name);
match value {
None => {
// Write null as fail
self.write_memory_using_ref(destination.addr, destination.size)?.fill(0);
}
Some(mut value) => {
value.push('\0');
let addr = self.heap_allocate(value.len(), 1)?;
self.write_memory(addr, value.as_bytes())?;
self.write_memory(destination.addr, &addr.to_bytes())?;
}
}
Ok(())
}
_ => not_supported!("unknown external function {as_str}"),
}
}

View file

@ -729,6 +729,48 @@ fn main() {
)
}
#[test]
fn posix_getenv() {
check_pass(
r#"
//- /main.rs env:foo=bar
type c_char = u8;
extern "C" {
pub fn getenv(s: *const c_char) -> *mut c_char;
}
fn should_not_reach() {
_ // FIXME: replace this function with panic when that works
}
fn main() {
let result = getenv(b"foo\0" as *const _);
if *result != b'b' {
should_not_reach();
}
let result = (result as usize + 1) as *const c_char;
if *result != b'a' {
should_not_reach();
}
let result = (result as usize + 1) as *const c_char;
if *result != b'r' {
should_not_reach();
}
let result = (result as usize + 1) as *const c_char;
if *result != 0 {
should_not_reach();
}
let result = getenv(b"not found\0" as *const _);
if result as usize != 0 {
should_not_reach();
}
}
"#,
);
}
#[test]
fn posix_tls() {
check_pass(

View file

@ -1244,6 +1244,41 @@ impl<'ctx> MirLowerCtx<'ctx> {
}
}
fn lower_destructing_assignment(
&mut self,
mut current: BasicBlockId,
lhs: ExprId,
rhs: Place,
span: MirSpan,
) -> Result<Option<BasicBlockId>> {
match &self.body.exprs[lhs] {
Expr::Tuple { exprs, is_assignee_expr: _ } => {
for (i, expr) in exprs.iter().enumerate() {
let Some(c) = self.lower_destructing_assignment(
current,
*expr,
rhs.project(ProjectionElem::TupleOrClosureField(i)),
span,
)? else {
return Ok(None);
};
current = c;
}
Ok(Some(current))
}
Expr::Underscore => Ok(Some(current)),
_ => {
let Some((lhs_place, current)) =
self.lower_expr_as_place(current, lhs, false)?
else {
return Ok(None);
};
self.push_assignment(current, lhs_place, Operand::Copy(rhs).into(), span);
Ok(Some(current))
}
}
}
fn lower_assignment(
&mut self,
current: BasicBlockId,
@ -1259,6 +1294,15 @@ impl<'ctx> MirLowerCtx<'ctx> {
if matches!(&self.body.exprs[lhs], Expr::Underscore) {
return Ok(Some(current));
}
if matches!(
&self.body.exprs[lhs],
Expr::Tuple { .. } | Expr::RecordLit { .. } | Expr::Call { .. }
) {
let temp = self.temp(self.expr_ty_after_adjustments(rhs), current, rhs.into())?;
let temp = Place::from(temp);
self.push_assignment(current, temp.clone(), rhs_op.into(), span);
return self.lower_destructing_assignment(current, lhs, temp, span);
}
let Some((lhs_place, current)) =
self.lower_expr_as_place(current, lhs, false)?
else {