use crate::{Error, Memory, MAX_WASM_PAGES, PAGE_SIZE};
pub use sp_core::MAX_POSSIBLE_ALLOCATION;
use sp_wasm_interface::{Pointer, WordSize};
use std::{
cmp::{max, min},
mem,
ops::{Index, IndexMut, Range},
};
const ALIGNMENT: u32 = 8;
const HEADER_SIZE: u32 = 8;
fn error(msg: &'static str) -> Error {
Error::Other(msg)
}
const LOG_TARGET: &str = "wasm-heap";
const N_ORDERS: usize = 23;
const MIN_POSSIBLE_ALLOCATION: u32 = 8; #[derive(Copy, Clone, PartialEq, Eq, Debug)]
struct Order(u32);
impl Order {
fn from_raw(order: u32) -> Result<Self, Error> {
if order < N_ORDERS as u32 {
Ok(Self(order))
} else {
Err(error("invalid order"))
}
}
fn from_size(size: u32) -> Result<Self, Error> {
let clamped_size = if size > MAX_POSSIBLE_ALLOCATION {
log::warn!(target: LOG_TARGET, "going to fail due to allocating {:?}", size);
return Err(Error::RequestedAllocationTooLarge)
} else if size < MIN_POSSIBLE_ALLOCATION {
MIN_POSSIBLE_ALLOCATION
} else {
size
};
let power_of_two_size = clamped_size.next_power_of_two();
let order = power_of_two_size.trailing_zeros() - MIN_POSSIBLE_ALLOCATION.trailing_zeros();
Ok(Self(order))
}
fn size(&self) -> u32 {
MIN_POSSIBLE_ALLOCATION << self.0
}
fn into_raw(self) -> u32 {
self.0
}
}
const NIL_MARKER: u32 = u32::MAX;
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
enum Link {
Nil,
Ptr(u32),
}
impl Link {
fn from_raw(raw: u32) -> Self {
if raw != NIL_MARKER {
Self::Ptr(raw)
} else {
Self::Nil
}
}
fn into_raw(self) -> u32 {
match self {
Self::Nil => NIL_MARKER,
Self::Ptr(ptr) => ptr,
}
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
enum Header {
Free(Link),
Occupied(Order),
}
impl Header {
fn read_from(memory: &impl Memory, header_ptr: u32) -> Result<Self, Error> {
let raw_header = memory.read_le_u64(header_ptr)?;
let occupied = raw_header & 0x00000001_00000000 != 0;
let header_data = raw_header as u32;
Ok(if occupied {
Self::Occupied(Order::from_raw(header_data)?)
} else {
Self::Free(Link::from_raw(header_data))
})
}
fn write_into(&self, memory: &mut impl Memory, header_ptr: u32) -> Result<(), Error> {
let (header_data, occupied_mask) = match *self {
Self::Occupied(order) => (order.into_raw(), 0x00000001_00000000),
Self::Free(link) => (link.into_raw(), 0x00000000_00000000),
};
let raw_header = header_data as u64 | occupied_mask;
memory.write_le_u64(header_ptr, raw_header)?;
Ok(())
}
fn into_occupied(self) -> Option<Order> {
match self {
Self::Occupied(order) => Some(order),
_ => None,
}
}
fn into_free(self) -> Option<Link> {
match self {
Self::Free(link) => Some(link),
_ => None,
}
}
}
struct FreeLists {
heads: [Link; N_ORDERS],
}
impl FreeLists {
fn new() -> Self {
Self { heads: [Link::Nil; N_ORDERS] }
}
fn replace(&mut self, order: Order, new: Link) -> Link {
let prev = self[order];
self[order] = new;
prev
}
}
impl Index<Order> for FreeLists {
type Output = Link;
fn index(&self, index: Order) -> &Link {
&self.heads[index.0 as usize]
}
}
impl IndexMut<Order> for FreeLists {
fn index_mut(&mut self, index: Order) -> &mut Link {
&mut self.heads[index.0 as usize]
}
}
#[derive(Clone, Debug, Default)]
#[non_exhaustive]
pub struct AllocationStats {
pub bytes_allocated: u32,
pub bytes_allocated_peak: u32,
pub bytes_allocated_sum: u128,
pub address_space_used: u32,
}
fn pages_from_size(size: u64) -> Option<u32> {
u32::try_from((size + PAGE_SIZE as u64 - 1) / PAGE_SIZE as u64).ok()
}
pub struct FreeingBumpHeapAllocator {
original_heap_base: u32,
bumper: u32,
free_lists: FreeLists,
poisoned: bool,
last_observed_memory_size: u64,
stats: AllocationStats,
}
impl Drop for FreeingBumpHeapAllocator {
fn drop(&mut self) {
log::debug!(target: LOG_TARGET, "allocator dropped: {:?}", self.stats)
}
}
impl FreeingBumpHeapAllocator {
pub fn new(heap_base: u32) -> Self {
let aligned_heap_base = (heap_base + ALIGNMENT - 1) / ALIGNMENT * ALIGNMENT;
FreeingBumpHeapAllocator {
original_heap_base: aligned_heap_base,
bumper: aligned_heap_base,
free_lists: FreeLists::new(),
poisoned: false,
last_observed_memory_size: 0,
stats: AllocationStats::default(),
}
}
pub fn allocate(
&mut self,
mem: &mut impl Memory,
size: WordSize,
) -> Result<Pointer<u8>, Error> {
if self.poisoned {
return Err(error("the allocator has been poisoned"))
}
let bomb = PoisonBomb { poisoned: &mut self.poisoned };
Self::observe_memory_size(&mut self.last_observed_memory_size, mem)?;
let order = Order::from_size(size)?;
let header_ptr: u32 = match self.free_lists[order] {
Link::Ptr(header_ptr) => {
if (u64::from(header_ptr) + u64::from(order.size()) + u64::from(HEADER_SIZE)) >
mem.size()
{
return Err(error("Invalid header pointer detected"))
}
let next_free = Header::read_from(mem, header_ptr)?
.into_free()
.ok_or_else(|| error("free list points to a occupied header"))?;
self.free_lists[order] = next_free;
header_ptr
},
Link::Nil => {
Self::bump(&mut self.bumper, order.size() + HEADER_SIZE, mem)?
},
};
Header::Occupied(order).write_into(mem, header_ptr)?;
self.stats.bytes_allocated += order.size() + HEADER_SIZE;
self.stats.bytes_allocated_sum += u128::from(order.size() + HEADER_SIZE);
self.stats.bytes_allocated_peak =
max(self.stats.bytes_allocated_peak, self.stats.bytes_allocated);
self.stats.address_space_used = self.bumper - self.original_heap_base;
log::trace!(target: LOG_TARGET, "after allocation: {:?}", self.stats);
bomb.disarm();
Ok(Pointer::new(header_ptr + HEADER_SIZE))
}
pub fn deallocate(&mut self, mem: &mut impl Memory, ptr: Pointer<u8>) -> Result<(), Error> {
if self.poisoned {
return Err(error("the allocator has been poisoned"))
}
let bomb = PoisonBomb { poisoned: &mut self.poisoned };
Self::observe_memory_size(&mut self.last_observed_memory_size, mem)?;
let header_ptr = u32::from(ptr)
.checked_sub(HEADER_SIZE)
.ok_or_else(|| error("Invalid pointer for deallocation"))?;
let order = Header::read_from(mem, header_ptr)?
.into_occupied()
.ok_or_else(|| error("the allocation points to an empty header"))?;
let prev_head = self.free_lists.replace(order, Link::Ptr(header_ptr));
Header::Free(prev_head).write_into(mem, header_ptr)?;
self.stats.bytes_allocated = self
.stats
.bytes_allocated
.checked_sub(order.size() + HEADER_SIZE)
.ok_or_else(|| error("underflow of the currently allocated bytes count"))?;
log::trace!("after deallocation: {:?}", self.stats);
bomb.disarm();
Ok(())
}
pub fn stats(&self) -> AllocationStats {
self.stats.clone()
}
fn bump(bumper: &mut u32, size: u32, memory: &mut impl Memory) -> Result<u32, Error> {
let required_size = u64::from(*bumper) + u64::from(size);
if required_size > memory.size() {
let required_pages =
pages_from_size(required_size).ok_or_else(|| Error::AllocatorOutOfSpace)?;
let current_pages = memory.pages();
let max_pages = memory.max_pages().unwrap_or(MAX_WASM_PAGES);
debug_assert!(
current_pages < required_pages,
"current pages {current_pages} < required pages {required_pages}"
);
if current_pages >= max_pages {
log::debug!(
target: LOG_TARGET,
"Wasm pages ({current_pages}) are already at the maximum.",
);
return Err(Error::AllocatorOutOfSpace)
} else if required_pages > max_pages {
log::debug!(
target: LOG_TARGET,
"Failed to grow memory from {current_pages} pages to at least {required_pages}\
pages due to the maximum limit of {max_pages} pages",
);
return Err(Error::AllocatorOutOfSpace)
}
let next_pages = min(current_pages * 2, max_pages);
let next_pages = max(next_pages, required_pages);
if memory.grow(next_pages - current_pages).is_err() {
log::error!(
target: LOG_TARGET,
"Failed to grow memory from {current_pages} pages to {next_pages} pages",
);
return Err(Error::AllocatorOutOfSpace)
}
debug_assert_eq!(memory.pages(), next_pages, "Number of pages should have increased!");
}
let res = *bumper;
*bumper += size;
Ok(res)
}
fn observe_memory_size(
last_observed_memory_size: &mut u64,
mem: &mut impl Memory,
) -> Result<(), Error> {
if mem.size() < *last_observed_memory_size {
return Err(Error::MemoryShrinked)
}
*last_observed_memory_size = mem.size();
Ok(())
}
}
trait MemoryExt: Memory {
fn read_le_u64(&self, ptr: u32) -> Result<u64, Error> {
self.with_access(|memory| {
let range =
heap_range(ptr, 8, memory.len()).ok_or_else(|| error("read out of heap bounds"))?;
let bytes = memory[range]
.try_into()
.expect("[u8] slice of length 8 must be convertible to [u8; 8]");
Ok(u64::from_le_bytes(bytes))
})
}
fn write_le_u64(&mut self, ptr: u32, val: u64) -> Result<(), Error> {
self.with_access_mut(|memory| {
let range = heap_range(ptr, 8, memory.len())
.ok_or_else(|| error("write out of heap bounds"))?;
let bytes = val.to_le_bytes();
memory[range].copy_from_slice(&bytes[..]);
Ok(())
})
}
fn size(&self) -> u64 {
debug_assert!(self.pages() <= MAX_WASM_PAGES);
self.pages() as u64 * PAGE_SIZE as u64
}
}
impl<T: Memory> MemoryExt for T {}
fn heap_range(offset: u32, length: u32, heap_len: usize) -> Option<Range<usize>> {
let start = offset as usize;
let end = offset.checked_add(length)? as usize;
if end <= heap_len {
Some(start..end)
} else {
None
}
}
struct PoisonBomb<'a> {
poisoned: &'a mut bool,
}
impl<'a> PoisonBomb<'a> {
fn disarm(self) {
mem::forget(self)
}
}
impl<'a> Drop for PoisonBomb<'a> {
fn drop(&mut self) {
*self.poisoned = true;
}
}
#[cfg(test)]
mod tests {
use super::*;
fn to_pointer(address: u32) -> Pointer<u8> {
Pointer::new(address)
}
#[derive(Debug)]
struct MemoryInstance {
data: Vec<u8>,
max_wasm_pages: u32,
}
impl MemoryInstance {
fn with_pages(pages: u32) -> Self {
Self { data: vec![0; (pages * PAGE_SIZE) as usize], max_wasm_pages: MAX_WASM_PAGES }
}
fn set_max_wasm_pages(&mut self, max_pages: u32) {
self.max_wasm_pages = max_pages;
}
}
impl Memory for MemoryInstance {
fn with_access<R>(&self, run: impl FnOnce(&[u8]) -> R) -> R {
run(&self.data)
}
fn with_access_mut<R>(&mut self, run: impl FnOnce(&mut [u8]) -> R) -> R {
run(&mut self.data)
}
fn pages(&self) -> u32 {
pages_from_size(self.data.len() as u64).unwrap()
}
fn max_pages(&self) -> Option<u32> {
Some(self.max_wasm_pages)
}
fn grow(&mut self, pages: u32) -> Result<(), ()> {
if self.pages() + pages > self.max_wasm_pages {
Err(())
} else {
self.data.resize(((self.pages() + pages) * PAGE_SIZE) as usize, 0);
Ok(())
}
}
}
#[test]
fn test_pages_from_size() {
assert_eq!(pages_from_size(0).unwrap(), 0);
assert_eq!(pages_from_size(1).unwrap(), 1);
assert_eq!(pages_from_size(65536).unwrap(), 1);
assert_eq!(pages_from_size(65536 + 1).unwrap(), 2);
assert_eq!(pages_from_size(2 * 65536).unwrap(), 2);
assert_eq!(pages_from_size(2 * 65536 + 1).unwrap(), 3);
}
#[test]
fn should_allocate_properly() {
let mut mem = MemoryInstance::with_pages(1);
let mut heap = FreeingBumpHeapAllocator::new(0);
let ptr = heap.allocate(&mut mem, 1).unwrap();
assert_eq!(ptr, to_pointer(HEADER_SIZE));
}
#[test]
fn should_always_align_pointers_to_multiples_of_8() {
let mut mem = MemoryInstance::with_pages(1);
let mut heap = FreeingBumpHeapAllocator::new(13);
let ptr = heap.allocate(&mut mem, 1).unwrap();
assert_eq!(ptr, to_pointer(24));
}
#[test]
fn should_increment_pointers_properly() {
let mut mem = MemoryInstance::with_pages(1);
let mut heap = FreeingBumpHeapAllocator::new(0);
let ptr1 = heap.allocate(&mut mem, 1).unwrap();
let ptr2 = heap.allocate(&mut mem, 9).unwrap();
let ptr3 = heap.allocate(&mut mem, 1).unwrap();
assert_eq!(ptr1, to_pointer(HEADER_SIZE));
assert_eq!(ptr2, to_pointer(24));
assert_eq!(ptr3, to_pointer(24 + 16 + HEADER_SIZE));
}
#[test]
fn should_free_properly() {
let mut mem = MemoryInstance::with_pages(1);
let mut heap = FreeingBumpHeapAllocator::new(0);
let ptr1 = heap.allocate(&mut mem, 1).unwrap();
assert_eq!(ptr1, to_pointer(HEADER_SIZE));
let ptr2 = heap.allocate(&mut mem, 1).unwrap();
assert_eq!(ptr2, to_pointer(24));
heap.deallocate(&mut mem, ptr2).unwrap();
assert_eq!(heap.free_lists.heads[0], Link::Ptr(u32::from(ptr2) - HEADER_SIZE));
}
#[test]
fn should_deallocate_and_reallocate_properly() {
let mut mem = MemoryInstance::with_pages(1);
let padded_offset = 16;
let mut heap = FreeingBumpHeapAllocator::new(13);
let ptr1 = heap.allocate(&mut mem, 1).unwrap();
assert_eq!(ptr1, to_pointer(padded_offset + HEADER_SIZE));
let ptr2 = heap.allocate(&mut mem, 9).unwrap();
assert_eq!(ptr2, to_pointer(padded_offset + 16 + HEADER_SIZE));
heap.deallocate(&mut mem, ptr2).unwrap();
let ptr3 = heap.allocate(&mut mem, 9).unwrap();
assert_eq!(ptr3, to_pointer(padded_offset + 16 + HEADER_SIZE));
assert_eq!(heap.free_lists.heads, [Link::Nil; N_ORDERS]);
}
#[test]
fn should_build_linked_list_of_free_areas_properly() {
let mut mem = MemoryInstance::with_pages(1);
let mut heap = FreeingBumpHeapAllocator::new(0);
let ptr1 = heap.allocate(&mut mem, 8).unwrap();
let ptr2 = heap.allocate(&mut mem, 8).unwrap();
let ptr3 = heap.allocate(&mut mem, 8).unwrap();
heap.deallocate(&mut mem, ptr1).unwrap();
heap.deallocate(&mut mem, ptr2).unwrap();
heap.deallocate(&mut mem, ptr3).unwrap();
assert_eq!(heap.free_lists.heads[0], Link::Ptr(u32::from(ptr3) - HEADER_SIZE));
let ptr4 = heap.allocate(&mut mem, 8).unwrap();
assert_eq!(ptr4, ptr3);
assert_eq!(heap.free_lists.heads[0], Link::Ptr(u32::from(ptr2) - HEADER_SIZE));
}
#[test]
fn should_not_allocate_if_too_large() {
let mut mem = MemoryInstance::with_pages(1);
mem.set_max_wasm_pages(1);
let mut heap = FreeingBumpHeapAllocator::new(13);
let ptr = heap.allocate(&mut mem, PAGE_SIZE - 13);
assert_eq!(Error::AllocatorOutOfSpace, ptr.unwrap_err());
}
#[test]
fn should_not_allocate_if_full() {
let mut mem = MemoryInstance::with_pages(1);
mem.set_max_wasm_pages(1);
let mut heap = FreeingBumpHeapAllocator::new(0);
let ptr1 = heap.allocate(&mut mem, (PAGE_SIZE / 2) - HEADER_SIZE).unwrap();
assert_eq!(ptr1, to_pointer(HEADER_SIZE));
let ptr2 = heap.allocate(&mut mem, PAGE_SIZE / 2);
match ptr2.unwrap_err() {
Error::AllocatorOutOfSpace => {},
e => panic!("Expected allocator out of space error, got: {:?}", e),
}
}
#[test]
fn should_allocate_max_possible_allocation_size() {
let mut mem = MemoryInstance::with_pages(1);
let mut heap = FreeingBumpHeapAllocator::new(0);
let ptr = heap.allocate(&mut mem, MAX_POSSIBLE_ALLOCATION).unwrap();
assert_eq!(ptr, to_pointer(HEADER_SIZE));
}
#[test]
fn should_not_allocate_if_requested_size_too_large() {
let mut mem = MemoryInstance::with_pages(1);
let mut heap = FreeingBumpHeapAllocator::new(0);
let ptr = heap.allocate(&mut mem, MAX_POSSIBLE_ALLOCATION + 1);
assert_eq!(Error::RequestedAllocationTooLarge, ptr.unwrap_err());
}
#[test]
fn should_return_error_when_bumper_greater_than_heap_size() {
let mut mem = MemoryInstance::with_pages(1);
mem.set_max_wasm_pages(1);
let mut heap = FreeingBumpHeapAllocator::new(0);
let mut ptrs = Vec::new();
for _ in 0..(PAGE_SIZE as usize / 40) {
ptrs.push(heap.allocate(&mut mem, 32).expect("Allocate 32 byte"));
}
assert_eq!(heap.stats.bytes_allocated, PAGE_SIZE - 16);
assert_eq!(heap.bumper, PAGE_SIZE - 16);
ptrs.into_iter()
.for_each(|ptr| heap.deallocate(&mut mem, ptr).expect("Deallocate 32 byte"));
assert_eq!(heap.stats.bytes_allocated, 0);
assert_eq!(heap.stats.bytes_allocated_peak, PAGE_SIZE - 16);
assert_eq!(heap.bumper, PAGE_SIZE - 16);
heap.allocate(&mut mem, 8).expect("Allocate 8 byte");
assert_eq!(heap.bumper as u64, mem.size());
let ptr = heap.allocate(&mut mem, 8);
assert_eq!(Error::AllocatorOutOfSpace, ptr.unwrap_err());
}
#[test]
fn should_include_prefixes_in_total_heap_size() {
let mut mem = MemoryInstance::with_pages(1);
let mut heap = FreeingBumpHeapAllocator::new(1);
heap.allocate(&mut mem, 9).unwrap();
assert_eq!(heap.stats.bytes_allocated, HEADER_SIZE + 16);
}
#[test]
fn should_calculate_total_heap_size_to_zero() {
let mut mem = MemoryInstance::with_pages(1);
let mut heap = FreeingBumpHeapAllocator::new(13);
let ptr = heap.allocate(&mut mem, 42).unwrap();
assert_eq!(ptr, to_pointer(16 + HEADER_SIZE));
heap.deallocate(&mut mem, ptr).unwrap();
assert_eq!(heap.stats.bytes_allocated, 0);
}
#[test]
fn should_calculate_total_size_of_zero() {
let mut mem = MemoryInstance::with_pages(1);
let mut heap = FreeingBumpHeapAllocator::new(19);
for _ in 1..10 {
let ptr = heap.allocate(&mut mem, 42).unwrap();
heap.deallocate(&mut mem, ptr).unwrap();
}
assert_eq!(heap.stats.bytes_allocated, 0);
}
#[test]
fn should_read_and_write_u64_correctly() {
let mut mem = MemoryInstance::with_pages(1);
mem.write_le_u64(40, 4480113).unwrap();
let value = MemoryExt::read_le_u64(&mut mem, 40).unwrap();
assert_eq!(value, 4480113);
}
#[test]
fn should_get_item_size_from_order() {
let raw_order = 0;
let item_size = Order::from_raw(raw_order).unwrap().size();
assert_eq!(item_size, 8);
}
#[test]
fn should_get_max_item_size_from_index() {
let raw_order = 22;
let item_size = Order::from_raw(raw_order).unwrap().size();
assert_eq!(item_size as u32, MAX_POSSIBLE_ALLOCATION);
}
#[test]
fn deallocate_needs_to_maintain_linked_list() {
let mut mem = MemoryInstance::with_pages(1);
let mut heap = FreeingBumpHeapAllocator::new(0);
let ptrs = (0..4).map(|_| heap.allocate(&mut mem, 8).unwrap()).collect::<Vec<_>>();
ptrs.iter().rev().for_each(|ptr| heap.deallocate(&mut mem, *ptr).unwrap());
let new_ptrs = (0..4).map(|_| heap.allocate(&mut mem, 8).unwrap()).collect::<Vec<_>>();
assert_eq!(ptrs, new_ptrs);
}
#[test]
fn header_read_write() {
let roundtrip = |header: Header| {
let mut memory = MemoryInstance::with_pages(1);
header.write_into(&mut memory, 0).unwrap();
let read_header = Header::read_from(&memory, 0).unwrap();
assert_eq!(header, read_header);
};
roundtrip(Header::Occupied(Order(0)));
roundtrip(Header::Occupied(Order(1)));
roundtrip(Header::Free(Link::Nil));
roundtrip(Header::Free(Link::Ptr(0)));
roundtrip(Header::Free(Link::Ptr(4)));
}
#[test]
fn poison_oom() {
let mut mem = MemoryInstance::with_pages(1);
mem.set_max_wasm_pages(1);
let mut heap = FreeingBumpHeapAllocator::new(0);
let alloc_ptr = heap.allocate(&mut mem, PAGE_SIZE / 2).unwrap();
assert_eq!(Error::AllocatorOutOfSpace, heap.allocate(&mut mem, PAGE_SIZE).unwrap_err());
assert!(heap.poisoned);
assert!(heap.deallocate(&mut mem, alloc_ptr).is_err());
}
#[test]
fn test_n_orders() {
assert_eq!(
MIN_POSSIBLE_ALLOCATION * 2u32.pow(N_ORDERS as u32 - 1),
MAX_POSSIBLE_ALLOCATION
);
}
#[test]
fn accepts_growing_memory() {
let mut mem = MemoryInstance::with_pages(1);
let mut heap = FreeingBumpHeapAllocator::new(0);
heap.allocate(&mut mem, PAGE_SIZE / 2).unwrap();
heap.allocate(&mut mem, PAGE_SIZE / 2).unwrap();
mem.grow(1).unwrap();
heap.allocate(&mut mem, PAGE_SIZE / 2).unwrap();
}
#[test]
fn doesnt_accept_shrinking_memory() {
let mut mem = MemoryInstance::with_pages(2);
let mut heap = FreeingBumpHeapAllocator::new(0);
heap.allocate(&mut mem, PAGE_SIZE / 2).unwrap();
mem.data.truncate(PAGE_SIZE as usize);
match heap.allocate(&mut mem, PAGE_SIZE / 2).unwrap_err() {
Error::MemoryShrinked => (),
_ => panic!(),
}
}
#[test]
fn should_grow_memory_when_running_out_of_memory() {
let mut mem = MemoryInstance::with_pages(1);
let mut heap = FreeingBumpHeapAllocator::new(0);
assert_eq!(1, mem.pages());
heap.allocate(&mut mem, PAGE_SIZE * 2).unwrap();
assert_eq!(3, mem.pages());
}
#[test]
fn modifying_the_header_leads_to_an_error() {
let mut mem = MemoryInstance::with_pages(1);
let mut heap = FreeingBumpHeapAllocator::new(0);
let ptr = heap.allocate(&mut mem, 5).unwrap();
heap.deallocate(&mut mem, ptr).unwrap();
Header::Free(Link::Ptr(u32::MAX - 1))
.write_into(&mut mem, u32::from(ptr) - HEADER_SIZE)
.unwrap();
heap.allocate(&mut mem, 5).unwrap();
assert!(heap
.allocate(&mut mem, 5)
.unwrap_err()
.to_string()
.contains("Invalid header pointer"));
}
}