diff --git a/rust/src/dlmalloc.rs b/rust/src/dlmalloc.rs index 550b2747..e60b2d1f 100644 --- a/rust/src/dlmalloc.rs +++ b/rust/src/dlmalloc.rs @@ -7,7 +7,7 @@ use cty::*; extern "C" { pub fn malloc(size: size_t) -> *mut c_void; - pub fn realloc(p: *mut c_void, size: size_t) -> *mut c_void; + pub fn realloc_in_place(p: *mut c_void, size: size_t) -> *mut c_void; pub fn free(p: *mut c_void); pub fn posix_memalign(p: *mut *mut c_void, alignment: size_t, size: size_t) -> c_int; } @@ -51,7 +51,29 @@ unsafe impl GlobalAlloc for DLMalloc { } #[inline] - unsafe fn realloc(&self, ptr: *mut u8, _layout: Layout, new_size: usize) -> *mut u8 { - unsafe { realloc(ptr as *mut c_void, new_size) as *mut u8 } + unsafe fn realloc(&self, ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 { + // Unfortunately, realloc doesn't make any alignment guarantees, so the memory + // has to be manually allocated as aligned memory if it cannot be resized + // in-place. + let mut new_ptr = unsafe { realloc_in_place(ptr as *mut c_void, new_size) as *mut u8 }; + + // return early if in-place resize succeeded + if !new_ptr.is_null() { + return new_ptr; + } + + // allocate new aligned storage with correct layout + new_ptr = + unsafe { self.alloc(Layout::from_size_align_unchecked(new_size, layout.align())) }; + + // return early if allocation failed + if new_ptr.is_null() { + return ptr::null_mut(); + } + + // copy over the data and deallocate the old storage + unsafe { ptr::copy(ptr, new_ptr, layout.size().min(new_size)) }; + unsafe { self.dealloc(ptr, layout) }; + new_ptr } }