From 7c1be82cd9abe195205bee476c546a7645632af3 Mon Sep 17 00:00:00 2001 From: 6d7a Date: Thu, 21 Mar 2024 22:57:21 +0100 Subject: [PATCH 1/2] fix: Prevent stack overflow in recursive const types MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit In the evaluation of const values of recursive types certain declarations could cause an endless call-loop within the interpreter (hir-ty’s create_memory_map), which would lead to a stack overflow. This commit adds a check that prevents values that contain an address in their value (such as TyKind::Ref) from being allocated at the address they contain. The commit also adds a test for this edge case. --- crates/hir-ty/src/consteval/tests.rs | 27 +++++++++++++++++++++++++++ crates/hir-ty/src/mir/eval.rs | 9 ++++++++- 2 files changed, 35 insertions(+), 1 deletion(-) diff --git a/crates/hir-ty/src/consteval/tests.rs b/crates/hir-ty/src/consteval/tests.rs index 98384c4749..9199663be1 100644 --- a/crates/hir-ty/src/consteval/tests.rs +++ b/crates/hir-ty/src/consteval/tests.rs @@ -2825,3 +2825,30 @@ fn unsized_local() { |e| matches!(e, ConstEvalError::MirLowerError(MirLowerError::UnsizedTemporary(_))), ); } + +#[test] +fn recursive_adt() { + check_answer( + r#" + //- minicore: coerce_unsized, index, slice + pub enum TagTree { + Leaf, + Choice(&'static [TagTree]), + } + const GOAL: TagTree = { + const TAG_TREE: TagTree = TagTree::Choice(&[ + { + const VARIANT_TAG_TREE: TagTree = TagTree::Choice( + &[ + TagTree::Leaf, + ], + ); + VARIANT_TAG_TREE + }, + ]); + TAG_TREE + }; + "#, + |b, _| assert_eq!(b[0] % 8, 0), + ); +} diff --git a/crates/hir-ty/src/mir/eval.rs b/crates/hir-ty/src/mir/eval.rs index fd98141af6..90b8092b4b 100644 --- a/crates/hir-ty/src/mir/eval.rs +++ b/crates/hir-ty/src/mir/eval.rs @@ -1710,7 +1710,14 @@ impl Evaluator<'_> { } ConstScalar::Unknown => not_supported!("evaluating unknown const"), }; - let patch_map = memory_map.transform_addresses(|b, align| { + let patch_map = memory_map.transform_addresses(|b, mut align| { + // Prevent recursive addresses is adts and slices + match ((&b[..b.len() / 2]).try_into(), HEAP_OFFSET.checked_add(align)) { + (Ok(arr), Some(new_addr)) if usize::from_le_bytes(arr) == new_addr => { + align *= 2; + } + _ => (), + }; let addr = self.heap_allocate(b.len(), align)?; self.write_memory(addr, b)?; Ok(addr.to_usize()) From 142ef764ee3af3cee8d6336ee153a9e3f13fec90 Mon Sep 17 00:00:00 2001 From: 6d7a Date: Sun, 24 Mar 2024 10:19:25 +0100 Subject: [PATCH 2/2] fix: Check stack depth to prevent stack overflows in create_memory_map --- crates/hir-ty/src/consteval/tests.rs | 4 +- crates/hir-ty/src/mir/eval.rs | 60 +++++++++++++++++++++------- 2 files changed, 48 insertions(+), 16 deletions(-) diff --git a/crates/hir-ty/src/consteval/tests.rs b/crates/hir-ty/src/consteval/tests.rs index 9199663be1..d1ffd5046c 100644 --- a/crates/hir-ty/src/consteval/tests.rs +++ b/crates/hir-ty/src/consteval/tests.rs @@ -2828,7 +2828,7 @@ fn unsized_local() { #[test] fn recursive_adt() { - check_answer( + check_fail( r#" //- minicore: coerce_unsized, index, slice pub enum TagTree { @@ -2849,6 +2849,6 @@ fn recursive_adt() { TAG_TREE }; "#, - |b, _| assert_eq!(b[0] % 8, 0), + |e| matches!(e, ConstEvalError::MirEvalError(MirEvalError::StackOverflow)), ); } diff --git a/crates/hir-ty/src/mir/eval.rs b/crates/hir-ty/src/mir/eval.rs index 90b8092b4b..035991b5e7 100644 --- a/crates/hir-ty/src/mir/eval.rs +++ b/crates/hir-ty/src/mir/eval.rs @@ -1710,14 +1710,7 @@ impl Evaluator<'_> { } ConstScalar::Unknown => not_supported!("evaluating unknown const"), }; - let patch_map = memory_map.transform_addresses(|b, mut align| { - // Prevent recursive addresses is adts and slices - match ((&b[..b.len() / 2]).try_into(), HEAP_OFFSET.checked_add(align)) { - (Ok(arr), Some(new_addr)) if usize::from_le_bytes(arr) == new_addr => { - align *= 2; - } - _ => (), - }; + let patch_map = memory_map.transform_addresses(|b, align| { let addr = self.heap_allocate(b.len(), align)?; self.write_memory(addr, b)?; Ok(addr.to_usize()) @@ -1938,7 +1931,11 @@ impl Evaluator<'_> { ty: &Ty, locals: &Locals, mm: &mut ComplexMemoryMap, + stack_depth_limit: usize, ) -> Result<()> { + if stack_depth_limit.checked_sub(1).is_none() { + return Err(MirEvalError::StackOverflow); + } match ty.kind(Interner) { TyKind::Ref(_, _, t) => { let size = this.size_align_of(t, locals)?; @@ -1977,7 +1974,14 @@ impl Evaluator<'_> { if let Some(ty) = check_inner { for i in 0..count { let offset = element_size * i; - rec(this, &b[offset..offset + element_size], ty, locals, mm)?; + rec( + this, + &b[offset..offset + element_size], + ty, + locals, + mm, + stack_depth_limit - 1, + )?; } } } @@ -1991,7 +1995,14 @@ impl Evaluator<'_> { let size = this.size_of_sized(inner, locals, "inner of array")?; for i in 0..len { let offset = i * size; - rec(this, &bytes[offset..offset + size], inner, locals, mm)?; + rec( + this, + &bytes[offset..offset + size], + inner, + locals, + mm, + stack_depth_limit - 1, + )?; } } chalk_ir::TyKind::Tuple(_, subst) => { @@ -2000,7 +2011,14 @@ impl Evaluator<'_> { let ty = ty.assert_ty_ref(Interner); // Tuple only has type argument let offset = layout.fields.offset(id).bytes_usize(); let size = this.layout(ty)?.size.bytes_usize(); - rec(this, &bytes[offset..offset + size], ty, locals, mm)?; + rec( + this, + &bytes[offset..offset + size], + ty, + locals, + mm, + stack_depth_limit - 1, + )?; } } chalk_ir::TyKind::Adt(adt, subst) => match adt.0 { @@ -2015,7 +2033,14 @@ impl Evaluator<'_> { .bytes_usize(); let ty = &field_types[f].clone().substitute(Interner, subst); let size = this.layout(ty)?.size.bytes_usize(); - rec(this, &bytes[offset..offset + size], ty, locals, mm)?; + rec( + this, + &bytes[offset..offset + size], + ty, + locals, + mm, + stack_depth_limit - 1, + )?; } } AdtId::EnumId(e) => { @@ -2034,7 +2059,14 @@ impl Evaluator<'_> { l.fields.offset(u32::from(f.into_raw()) as usize).bytes_usize(); let ty = &field_types[f].clone().substitute(Interner, subst); let size = this.layout(ty)?.size.bytes_usize(); - rec(this, &bytes[offset..offset + size], ty, locals, mm)?; + rec( + this, + &bytes[offset..offset + size], + ty, + locals, + mm, + stack_depth_limit - 1, + )?; } } } @@ -2045,7 +2077,7 @@ impl Evaluator<'_> { Ok(()) } let mut mm = ComplexMemoryMap::default(); - rec(self, bytes, ty, locals, &mut mm)?; + rec(self, bytes, ty, locals, &mut mm, self.stack_depth_limit - 1)?; Ok(mm) }