1use crate::{Error, Memory, MAX_WASM_PAGES, PAGE_SIZE};
71pub use sp_core::MAX_POSSIBLE_ALLOCATION;
72use sp_wasm_interface::{Pointer, WordSize};
73use std::{
74 cmp::{max, min},
75 mem,
76 ops::{Index, IndexMut, Range},
77};
78
79const ALIGNMENT: u32 = 8;
84
85const HEADER_SIZE: u32 = 8;
88
89fn error(msg: &'static str) -> Error {
91 Error::Other(msg)
92}
93
94const LOG_TARGET: &str = "wasm-heap";
95
96const N_ORDERS: usize = 23;
106const MIN_POSSIBLE_ALLOCATION: u32 = 8; #[derive(Copy, Clone, PartialEq, Eq, Debug)]
123struct Order(u32);
124
125impl Order {
126 fn from_raw(order: u32) -> Result<Self, Error> {
130 if order < N_ORDERS as u32 {
131 Ok(Self(order))
132 } else {
133 Err(error("invalid order"))
134 }
135 }
136
137 fn from_size(size: u32) -> Result<Self, Error> {
143 let clamped_size = if size > MAX_POSSIBLE_ALLOCATION {
144 log::warn!(target: LOG_TARGET, "going to fail due to allocating {:?}", size);
145 return Err(Error::RequestedAllocationTooLarge)
146 } else if size < MIN_POSSIBLE_ALLOCATION {
147 MIN_POSSIBLE_ALLOCATION
148 } else {
149 size
150 };
151
152 let power_of_two_size = clamped_size.next_power_of_two();
156
157 let order = power_of_two_size.trailing_zeros() - MIN_POSSIBLE_ALLOCATION.trailing_zeros();
160
161 Ok(Self(order))
162 }
163
164 fn size(&self) -> u32 {
168 MIN_POSSIBLE_ALLOCATION << self.0
169 }
170
171 fn into_raw(self) -> u32 {
173 self.0
174 }
175}
176
177const NIL_MARKER: u32 = u32::MAX;
179
180#[derive(Clone, Copy, Debug, PartialEq, Eq)]
182enum Link {
183 Nil,
185 Ptr(u32),
187}
188
189impl Link {
190 fn from_raw(raw: u32) -> Self {
192 if raw != NIL_MARKER {
193 Self::Ptr(raw)
194 } else {
195 Self::Nil
196 }
197 }
198
199 fn into_raw(self) -> u32 {
201 match self {
202 Self::Nil => NIL_MARKER,
203 Self::Ptr(ptr) => ptr,
204 }
205 }
206}
207
208#[derive(Clone, Debug, PartialEq, Eq)]
228enum Header {
229 Free(Link),
231 Occupied(Order),
234}
235
236impl Header {
237 fn read_from(memory: &impl Memory, header_ptr: u32) -> Result<Self, Error> {
242 let raw_header = memory.read_le_u64(header_ptr)?;
243
244 let occupied = raw_header & 0x00000001_00000000 != 0;
247 let header_data = raw_header as u32;
248
249 Ok(if occupied {
250 Self::Occupied(Order::from_raw(header_data)?)
251 } else {
252 Self::Free(Link::from_raw(header_data))
253 })
254 }
255
256 fn write_into(&self, memory: &mut impl Memory, header_ptr: u32) -> Result<(), Error> {
260 let (header_data, occupied_mask) = match *self {
261 Self::Occupied(order) => (order.into_raw(), 0x00000001_00000000),
262 Self::Free(link) => (link.into_raw(), 0x00000000_00000000),
263 };
264 let raw_header = header_data as u64 | occupied_mask;
265 memory.write_le_u64(header_ptr, raw_header)?;
266 Ok(())
267 }
268
269 fn into_occupied(self) -> Option<Order> {
271 match self {
272 Self::Occupied(order) => Some(order),
273 _ => None,
274 }
275 }
276
277 fn into_free(self) -> Option<Link> {
279 match self {
280 Self::Free(link) => Some(link),
281 _ => None,
282 }
283 }
284}
285
286struct FreeLists {
288 heads: [Link; N_ORDERS],
289}
290
291impl FreeLists {
292 fn new() -> Self {
294 Self { heads: [Link::Nil; N_ORDERS] }
295 }
296
297 fn replace(&mut self, order: Order, new: Link) -> Link {
299 let prev = self[order];
300 self[order] = new;
301 prev
302 }
303}
304
305impl Index<Order> for FreeLists {
306 type Output = Link;
307 fn index(&self, index: Order) -> &Link {
308 &self.heads[index.0 as usize]
309 }
310}
311
312impl IndexMut<Order> for FreeLists {
313 fn index_mut(&mut self, index: Order) -> &mut Link {
314 &mut self.heads[index.0 as usize]
315 }
316}
317
318#[derive(Clone, Debug, Default)]
320#[non_exhaustive]
321pub struct AllocationStats {
322 pub bytes_allocated: u32,
326
327 pub bytes_allocated_peak: u32,
331
332 pub bytes_allocated_sum: u128,
336
337 pub address_space_used: u32,
345}
346
347fn pages_from_size(size: u64) -> Option<u32> {
353 u32::try_from(size.div_ceil(PAGE_SIZE as u64)).ok()
354}
355
356pub struct FreeingBumpHeapAllocator {
360 original_heap_base: u32,
361 bumper: u32,
362 free_lists: FreeLists,
363 poisoned: bool,
364 last_observed_memory_size: u64,
365 stats: AllocationStats,
366}
367
368impl Drop for FreeingBumpHeapAllocator {
369 fn drop(&mut self) {
370 log::debug!(target: LOG_TARGET, "allocator dropped: {:?}", self.stats)
371 }
372}
373
374impl FreeingBumpHeapAllocator {
375 pub fn new(heap_base: u32) -> Self {
381 let aligned_heap_base = heap_base.div_ceil(ALIGNMENT) * ALIGNMENT;
382
383 FreeingBumpHeapAllocator {
384 original_heap_base: aligned_heap_base,
385 bumper: aligned_heap_base,
386 free_lists: FreeLists::new(),
387 poisoned: false,
388 last_observed_memory_size: 0,
389 stats: AllocationStats::default(),
390 }
391 }
392
393 pub fn allocate(
409 &mut self,
410 mem: &mut impl Memory,
411 size: WordSize,
412 ) -> Result<Pointer<u8>, Error> {
413 if self.poisoned {
414 return Err(error("the allocator has been poisoned"))
415 }
416
417 let bomb = PoisonBomb { poisoned: &mut self.poisoned };
418
419 Self::observe_memory_size(&mut self.last_observed_memory_size, mem)?;
420 let order = Order::from_size(size)?;
421
422 let header_ptr: u32 = match self.free_lists[order] {
423 Link::Ptr(header_ptr) => {
424 if (u64::from(header_ptr) + u64::from(order.size()) + u64::from(HEADER_SIZE)) >
425 mem.size()
426 {
427 return Err(error("Invalid header pointer detected"))
428 }
429
430 let next_free = Header::read_from(mem, header_ptr)?
432 .into_free()
433 .ok_or_else(|| error("free list points to a occupied header"))?;
434 self.free_lists[order] = next_free;
435
436 header_ptr
437 },
438 Link::Nil => {
439 Self::bump(&mut self.bumper, order.size() + HEADER_SIZE, mem)?
441 },
442 };
443
444 Header::Occupied(order).write_into(mem, header_ptr)?;
446
447 self.stats.bytes_allocated += order.size() + HEADER_SIZE;
448 self.stats.bytes_allocated_sum += u128::from(order.size() + HEADER_SIZE);
449 self.stats.bytes_allocated_peak =
450 max(self.stats.bytes_allocated_peak, self.stats.bytes_allocated);
451 self.stats.address_space_used = self.bumper - self.original_heap_base;
452
453 log::trace!(target: LOG_TARGET, "after allocation: {:?}", self.stats);
454
455 bomb.disarm();
456 Ok(Pointer::new(header_ptr + HEADER_SIZE))
457 }
458
459 pub fn deallocate(&mut self, mem: &mut impl Memory, ptr: Pointer<u8>) -> Result<(), Error> {
471 if self.poisoned {
472 return Err(error("the allocator has been poisoned"))
473 }
474
475 let bomb = PoisonBomb { poisoned: &mut self.poisoned };
476
477 Self::observe_memory_size(&mut self.last_observed_memory_size, mem)?;
478
479 let header_ptr = u32::from(ptr)
480 .checked_sub(HEADER_SIZE)
481 .ok_or_else(|| error("Invalid pointer for deallocation"))?;
482
483 let order = Header::read_from(mem, header_ptr)?
484 .into_occupied()
485 .ok_or_else(|| error("the allocation points to an empty header"))?;
486
487 let prev_head = self.free_lists.replace(order, Link::Ptr(header_ptr));
489 Header::Free(prev_head).write_into(mem, header_ptr)?;
490
491 self.stats.bytes_allocated = self
492 .stats
493 .bytes_allocated
494 .checked_sub(order.size() + HEADER_SIZE)
495 .ok_or_else(|| error("underflow of the currently allocated bytes count"))?;
496
497 log::trace!("after deallocation: {:?}", self.stats);
498
499 bomb.disarm();
500 Ok(())
501 }
502
503 pub fn stats(&self) -> AllocationStats {
505 self.stats.clone()
506 }
507
508 fn bump(bumper: &mut u32, size: u32, memory: &mut impl Memory) -> Result<u32, Error> {
513 let required_size = u64::from(*bumper) + u64::from(size);
514
515 if required_size > memory.size() {
516 let required_pages =
517 pages_from_size(required_size).ok_or_else(|| Error::AllocatorOutOfSpace)?;
518
519 let current_pages = memory.pages();
520 let max_pages = memory.max_pages().unwrap_or(MAX_WASM_PAGES);
521 debug_assert!(
522 current_pages < required_pages,
523 "current pages {current_pages} < required pages {required_pages}"
524 );
525
526 if current_pages >= max_pages {
527 log::debug!(
528 target: LOG_TARGET,
529 "Wasm pages ({current_pages}) are already at the maximum.",
530 );
531
532 return Err(Error::AllocatorOutOfSpace)
533 } else if required_pages > max_pages {
534 log::debug!(
535 target: LOG_TARGET,
536 "Failed to grow memory from {current_pages} pages to at least {required_pages}\
537 pages due to the maximum limit of {max_pages} pages",
538 );
539 return Err(Error::AllocatorOutOfSpace)
540 }
541
542 let next_pages = min(current_pages * 2, max_pages);
545 let next_pages = max(next_pages, required_pages);
547
548 if memory.grow(next_pages - current_pages).is_err() {
549 log::error!(
550 target: LOG_TARGET,
551 "Failed to grow memory from {current_pages} pages to {next_pages} pages",
552 );
553
554 return Err(Error::AllocatorOutOfSpace)
555 }
556
557 debug_assert_eq!(memory.pages(), next_pages, "Number of pages should have increased!");
558 }
559
560 let res = *bumper;
561 *bumper += size;
562 Ok(res)
563 }
564
565 fn observe_memory_size(
566 last_observed_memory_size: &mut u64,
567 mem: &mut impl Memory,
568 ) -> Result<(), Error> {
569 if mem.size() < *last_observed_memory_size {
570 return Err(Error::MemoryShrinked)
571 }
572 *last_observed_memory_size = mem.size();
573 Ok(())
574 }
575}
576
577trait MemoryExt: Memory {
585 fn read_le_u64(&self, ptr: u32) -> Result<u64, Error> {
588 self.with_access(|memory| {
589 let range =
590 heap_range(ptr, 8, memory.len()).ok_or_else(|| error("read out of heap bounds"))?;
591 let bytes = memory[range]
592 .try_into()
593 .expect("[u8] slice of length 8 must be convertible to [u8; 8]");
594 Ok(u64::from_le_bytes(bytes))
595 })
596 }
597
598 fn write_le_u64(&mut self, ptr: u32, val: u64) -> Result<(), Error> {
601 self.with_access_mut(|memory| {
602 let range = heap_range(ptr, 8, memory.len())
603 .ok_or_else(|| error("write out of heap bounds"))?;
604 let bytes = val.to_le_bytes();
605 memory[range].copy_from_slice(&bytes[..]);
606 Ok(())
607 })
608 }
609
610 fn size(&self) -> u64 {
612 debug_assert!(self.pages() <= MAX_WASM_PAGES);
613
614 self.pages() as u64 * PAGE_SIZE as u64
615 }
616}
617
618impl<T: Memory> MemoryExt for T {}
619
620fn heap_range(offset: u32, length: u32, heap_len: usize) -> Option<Range<usize>> {
621 let start = offset as usize;
622 let end = offset.checked_add(length)? as usize;
623 if end <= heap_len {
624 Some(start..end)
625 } else {
626 None
627 }
628}
629
630struct PoisonBomb<'a> {
632 poisoned: &'a mut bool,
633}
634
635impl<'a> PoisonBomb<'a> {
636 fn disarm(self) {
637 mem::forget(self)
638 }
639}
640
641impl<'a> Drop for PoisonBomb<'a> {
642 fn drop(&mut self) {
643 *self.poisoned = true;
644 }
645}
646
647#[cfg(test)]
648mod tests {
649 use super::*;
650
651 fn to_pointer(address: u32) -> Pointer<u8> {
653 Pointer::new(address)
654 }
655
656 #[derive(Debug)]
657 struct MemoryInstance {
658 data: Vec<u8>,
659 max_wasm_pages: u32,
660 }
661
662 impl MemoryInstance {
663 fn with_pages(pages: u32) -> Self {
664 Self { data: vec![0; (pages * PAGE_SIZE) as usize], max_wasm_pages: MAX_WASM_PAGES }
665 }
666
667 fn set_max_wasm_pages(&mut self, max_pages: u32) {
668 self.max_wasm_pages = max_pages;
669 }
670 }
671
672 impl Memory for MemoryInstance {
673 fn with_access<R>(&self, run: impl FnOnce(&[u8]) -> R) -> R {
674 run(&self.data)
675 }
676
677 fn with_access_mut<R>(&mut self, run: impl FnOnce(&mut [u8]) -> R) -> R {
678 run(&mut self.data)
679 }
680
681 fn pages(&self) -> u32 {
682 pages_from_size(self.data.len() as u64).unwrap()
683 }
684
685 fn max_pages(&self) -> Option<u32> {
686 Some(self.max_wasm_pages)
687 }
688
689 fn grow(&mut self, pages: u32) -> Result<(), ()> {
690 if self.pages() + pages > self.max_wasm_pages {
691 Err(())
692 } else {
693 self.data.resize(((self.pages() + pages) * PAGE_SIZE) as usize, 0);
694 Ok(())
695 }
696 }
697 }
698
699 #[test]
700 fn test_pages_from_size() {
701 assert_eq!(pages_from_size(0).unwrap(), 0);
702 assert_eq!(pages_from_size(1).unwrap(), 1);
703 assert_eq!(pages_from_size(65536).unwrap(), 1);
704 assert_eq!(pages_from_size(65536 + 1).unwrap(), 2);
705 assert_eq!(pages_from_size(2 * 65536).unwrap(), 2);
706 assert_eq!(pages_from_size(2 * 65536 + 1).unwrap(), 3);
707 }
708
709 #[test]
710 fn should_allocate_properly() {
711 let mut mem = MemoryInstance::with_pages(1);
713 let mut heap = FreeingBumpHeapAllocator::new(0);
714
715 let ptr = heap.allocate(&mut mem, 1).unwrap();
717
718 assert_eq!(ptr, to_pointer(HEADER_SIZE));
721 }
722
723 #[test]
724 fn should_always_align_pointers_to_multiples_of_8() {
725 let mut mem = MemoryInstance::with_pages(1);
727 let mut heap = FreeingBumpHeapAllocator::new(13);
728
729 let ptr = heap.allocate(&mut mem, 1).unwrap();
731
732 assert_eq!(ptr, to_pointer(24));
736 }
737
738 #[test]
739 fn should_increment_pointers_properly() {
740 let mut mem = MemoryInstance::with_pages(1);
742 let mut heap = FreeingBumpHeapAllocator::new(0);
743
744 let ptr1 = heap.allocate(&mut mem, 1).unwrap();
746 let ptr2 = heap.allocate(&mut mem, 9).unwrap();
747 let ptr3 = heap.allocate(&mut mem, 1).unwrap();
748
749 assert_eq!(ptr1, to_pointer(HEADER_SIZE));
752
753 assert_eq!(ptr2, to_pointer(24));
756
757 assert_eq!(ptr3, to_pointer(24 + 16 + HEADER_SIZE));
759 }
760
761 #[test]
762 fn should_free_properly() {
763 let mut mem = MemoryInstance::with_pages(1);
765 let mut heap = FreeingBumpHeapAllocator::new(0);
766 let ptr1 = heap.allocate(&mut mem, 1).unwrap();
767 assert_eq!(ptr1, to_pointer(HEADER_SIZE));
769
770 let ptr2 = heap.allocate(&mut mem, 1).unwrap();
771 assert_eq!(ptr2, to_pointer(24));
773
774 heap.deallocate(&mut mem, ptr2).unwrap();
776
777 assert_eq!(heap.free_lists.heads[0], Link::Ptr(u32::from(ptr2) - HEADER_SIZE));
781 }
782
783 #[test]
784 fn should_deallocate_and_reallocate_properly() {
785 let mut mem = MemoryInstance::with_pages(1);
787 let padded_offset = 16;
788 let mut heap = FreeingBumpHeapAllocator::new(13);
789
790 let ptr1 = heap.allocate(&mut mem, 1).unwrap();
791 assert_eq!(ptr1, to_pointer(padded_offset + HEADER_SIZE));
793
794 let ptr2 = heap.allocate(&mut mem, 9).unwrap();
795 assert_eq!(ptr2, to_pointer(padded_offset + 16 + HEADER_SIZE));
799
800 heap.deallocate(&mut mem, ptr2).unwrap();
802 let ptr3 = heap.allocate(&mut mem, 9).unwrap();
803
804 assert_eq!(ptr3, to_pointer(padded_offset + 16 + HEADER_SIZE));
807 assert_eq!(heap.free_lists.heads, [Link::Nil; N_ORDERS]);
808 }
809
810 #[test]
811 fn should_build_linked_list_of_free_areas_properly() {
812 let mut mem = MemoryInstance::with_pages(1);
814 let mut heap = FreeingBumpHeapAllocator::new(0);
815
816 let ptr1 = heap.allocate(&mut mem, 8).unwrap();
817 let ptr2 = heap.allocate(&mut mem, 8).unwrap();
818 let ptr3 = heap.allocate(&mut mem, 8).unwrap();
819
820 heap.deallocate(&mut mem, ptr1).unwrap();
822 heap.deallocate(&mut mem, ptr2).unwrap();
823 heap.deallocate(&mut mem, ptr3).unwrap();
824
825 assert_eq!(heap.free_lists.heads[0], Link::Ptr(u32::from(ptr3) - HEADER_SIZE));
827
828 let ptr4 = heap.allocate(&mut mem, 8).unwrap();
829 assert_eq!(ptr4, ptr3);
830
831 assert_eq!(heap.free_lists.heads[0], Link::Ptr(u32::from(ptr2) - HEADER_SIZE));
832 }
833
834 #[test]
835 fn should_not_allocate_if_too_large() {
836 let mut mem = MemoryInstance::with_pages(1);
838 mem.set_max_wasm_pages(1);
839 let mut heap = FreeingBumpHeapAllocator::new(13);
840
841 let ptr = heap.allocate(&mut mem, PAGE_SIZE - 13);
843
844 assert_eq!(Error::AllocatorOutOfSpace, ptr.unwrap_err());
846 }
847
848 #[test]
849 fn should_not_allocate_if_full() {
850 let mut mem = MemoryInstance::with_pages(1);
852 mem.set_max_wasm_pages(1);
853 let mut heap = FreeingBumpHeapAllocator::new(0);
854 let ptr1 = heap.allocate(&mut mem, (PAGE_SIZE / 2) - HEADER_SIZE).unwrap();
855 assert_eq!(ptr1, to_pointer(HEADER_SIZE));
856
857 let ptr2 = heap.allocate(&mut mem, PAGE_SIZE / 2);
859
860 match ptr2.unwrap_err() {
863 Error::AllocatorOutOfSpace => {},
864 e => panic!("Expected allocator out of space error, got: {:?}", e),
865 }
866 }
867
868 #[test]
869 fn should_allocate_max_possible_allocation_size() {
870 let mut mem = MemoryInstance::with_pages(1);
872 let mut heap = FreeingBumpHeapAllocator::new(0);
873
874 let ptr = heap.allocate(&mut mem, MAX_POSSIBLE_ALLOCATION).unwrap();
876
877 assert_eq!(ptr, to_pointer(HEADER_SIZE));
879 }
880
881 #[test]
882 fn should_not_allocate_if_requested_size_too_large() {
883 let mut mem = MemoryInstance::with_pages(1);
885 let mut heap = FreeingBumpHeapAllocator::new(0);
886
887 let ptr = heap.allocate(&mut mem, MAX_POSSIBLE_ALLOCATION + 1);
889
890 assert_eq!(Error::RequestedAllocationTooLarge, ptr.unwrap_err());
892 }
893
894 #[test]
895 fn should_return_error_when_bumper_greater_than_heap_size() {
896 let mut mem = MemoryInstance::with_pages(1);
898 mem.set_max_wasm_pages(1);
899 let mut heap = FreeingBumpHeapAllocator::new(0);
900
901 let mut ptrs = Vec::new();
902 for _ in 0..(PAGE_SIZE as usize / 40) {
903 ptrs.push(heap.allocate(&mut mem, 32).expect("Allocate 32 byte"));
904 }
905
906 assert_eq!(heap.stats.bytes_allocated, PAGE_SIZE - 16);
907 assert_eq!(heap.bumper, PAGE_SIZE - 16);
908
909 ptrs.into_iter()
910 .for_each(|ptr| heap.deallocate(&mut mem, ptr).expect("Deallocate 32 byte"));
911
912 assert_eq!(heap.stats.bytes_allocated, 0);
913 assert_eq!(heap.stats.bytes_allocated_peak, PAGE_SIZE - 16);
914 assert_eq!(heap.bumper, PAGE_SIZE - 16);
915
916 heap.allocate(&mut mem, 8).expect("Allocate 8 byte");
918
919 assert_eq!(heap.bumper as u64, mem.size());
925 let ptr = heap.allocate(&mut mem, 8);
926
927 assert_eq!(Error::AllocatorOutOfSpace, ptr.unwrap_err());
929 }
930
931 #[test]
932 fn should_include_prefixes_in_total_heap_size() {
933 let mut mem = MemoryInstance::with_pages(1);
935 let mut heap = FreeingBumpHeapAllocator::new(1);
936
937 heap.allocate(&mut mem, 9).unwrap();
940
941 assert_eq!(heap.stats.bytes_allocated, HEADER_SIZE + 16);
943 }
944
945 #[test]
946 fn should_calculate_total_heap_size_to_zero() {
947 let mut mem = MemoryInstance::with_pages(1);
949 let mut heap = FreeingBumpHeapAllocator::new(13);
950
951 let ptr = heap.allocate(&mut mem, 42).unwrap();
953 assert_eq!(ptr, to_pointer(16 + HEADER_SIZE));
954 heap.deallocate(&mut mem, ptr).unwrap();
955
956 assert_eq!(heap.stats.bytes_allocated, 0);
958 }
959
960 #[test]
961 fn should_calculate_total_size_of_zero() {
962 let mut mem = MemoryInstance::with_pages(1);
964 let mut heap = FreeingBumpHeapAllocator::new(19);
965
966 for _ in 1..10 {
968 let ptr = heap.allocate(&mut mem, 42).unwrap();
969 heap.deallocate(&mut mem, ptr).unwrap();
970 }
971
972 assert_eq!(heap.stats.bytes_allocated, 0);
974 }
975
976 #[test]
977 fn should_read_and_write_u64_correctly() {
978 let mut mem = MemoryInstance::with_pages(1);
980
981 mem.write_le_u64(40, 4480113).unwrap();
983
984 let value = MemoryExt::read_le_u64(&mut mem, 40).unwrap();
986 assert_eq!(value, 4480113);
987 }
988
989 #[test]
990 fn should_get_item_size_from_order() {
991 let raw_order = 0;
993
994 let item_size = Order::from_raw(raw_order).unwrap().size();
996
997 assert_eq!(item_size, 8);
999 }
1000
1001 #[test]
1002 fn should_get_max_item_size_from_index() {
1003 let raw_order = 22;
1005
1006 let item_size = Order::from_raw(raw_order).unwrap().size();
1008
1009 assert_eq!(item_size as u32, MAX_POSSIBLE_ALLOCATION);
1011 }
1012
1013 #[test]
1014 fn deallocate_needs_to_maintain_linked_list() {
1015 let mut mem = MemoryInstance::with_pages(1);
1016 let mut heap = FreeingBumpHeapAllocator::new(0);
1017
1018 let ptrs = (0..4).map(|_| heap.allocate(&mut mem, 8).unwrap()).collect::<Vec<_>>();
1020 ptrs.iter().rev().for_each(|ptr| heap.deallocate(&mut mem, *ptr).unwrap());
1021
1022 let new_ptrs = (0..4).map(|_| heap.allocate(&mut mem, 8).unwrap()).collect::<Vec<_>>();
1024 assert_eq!(ptrs, new_ptrs);
1025 }
1026
1027 #[test]
1028 fn header_read_write() {
1029 let roundtrip = |header: Header| {
1030 let mut memory = MemoryInstance::with_pages(1);
1031 header.write_into(&mut memory, 0).unwrap();
1032
1033 let read_header = Header::read_from(&memory, 0).unwrap();
1034 assert_eq!(header, read_header);
1035 };
1036
1037 roundtrip(Header::Occupied(Order(0)));
1038 roundtrip(Header::Occupied(Order(1)));
1039 roundtrip(Header::Free(Link::Nil));
1040 roundtrip(Header::Free(Link::Ptr(0)));
1041 roundtrip(Header::Free(Link::Ptr(4)));
1042 }
1043
1044 #[test]
1045 fn poison_oom() {
1046 let mut mem = MemoryInstance::with_pages(1);
1048 mem.set_max_wasm_pages(1);
1049
1050 let mut heap = FreeingBumpHeapAllocator::new(0);
1051
1052 let alloc_ptr = heap.allocate(&mut mem, PAGE_SIZE / 2).unwrap();
1054 assert_eq!(Error::AllocatorOutOfSpace, heap.allocate(&mut mem, PAGE_SIZE).unwrap_err());
1055
1056 assert!(heap.poisoned);
1058 assert!(heap.deallocate(&mut mem, alloc_ptr).is_err());
1059 }
1060
1061 #[test]
1062 fn test_n_orders() {
1063 assert_eq!(
1065 MIN_POSSIBLE_ALLOCATION * 2u32.pow(N_ORDERS as u32 - 1),
1066 MAX_POSSIBLE_ALLOCATION
1067 );
1068 }
1069
1070 #[test]
1071 fn accepts_growing_memory() {
1072 let mut mem = MemoryInstance::with_pages(1);
1073 let mut heap = FreeingBumpHeapAllocator::new(0);
1074
1075 heap.allocate(&mut mem, PAGE_SIZE / 2).unwrap();
1076 heap.allocate(&mut mem, PAGE_SIZE / 2).unwrap();
1077
1078 mem.grow(1).unwrap();
1079
1080 heap.allocate(&mut mem, PAGE_SIZE / 2).unwrap();
1081 }
1082
1083 #[test]
1084 fn doesnt_accept_shrinking_memory() {
1085 let mut mem = MemoryInstance::with_pages(2);
1086 let mut heap = FreeingBumpHeapAllocator::new(0);
1087
1088 heap.allocate(&mut mem, PAGE_SIZE / 2).unwrap();
1089
1090 mem.data.truncate(PAGE_SIZE as usize);
1091
1092 match heap.allocate(&mut mem, PAGE_SIZE / 2).unwrap_err() {
1093 Error::MemoryShrinked => (),
1094 _ => panic!(),
1095 }
1096 }
1097
1098 #[test]
1099 fn should_grow_memory_when_running_out_of_memory() {
1100 let mut mem = MemoryInstance::with_pages(1);
1101 let mut heap = FreeingBumpHeapAllocator::new(0);
1102
1103 assert_eq!(1, mem.pages());
1104
1105 heap.allocate(&mut mem, PAGE_SIZE * 2).unwrap();
1106
1107 assert_eq!(3, mem.pages());
1108 }
1109
1110 #[test]
1111 fn modifying_the_header_leads_to_an_error() {
1112 let mut mem = MemoryInstance::with_pages(1);
1113 let mut heap = FreeingBumpHeapAllocator::new(0);
1114
1115 let ptr = heap.allocate(&mut mem, 5).unwrap();
1116
1117 heap.deallocate(&mut mem, ptr).unwrap();
1118
1119 Header::Free(Link::Ptr(u32::MAX - 1))
1120 .write_into(&mut mem, u32::from(ptr) - HEADER_SIZE)
1121 .unwrap();
1122
1123 heap.allocate(&mut mem, 5).unwrap();
1124 assert!(heap
1125 .allocate(&mut mem, 5)
1126 .unwrap_err()
1127 .to_string()
1128 .contains("Invalid header pointer"));
1129 }
1130}