1use alloc::vec::Vec;
2
3#[allow(dead_code)]
4#[derive(Copy, Clone, Default)]
5pub struct GuestInit<'a> {
6 pub page_size: u32,
7 pub ro_data: &'a [u8],
8 pub rw_data: &'a [u8],
9 pub ro_data_size: u32,
10 pub rw_data_size: u32,
11 pub stack_size: u32,
12 pub aux_data_size: u32,
13}
14
15impl<'a> GuestInit<'a> {
16 pub fn memory_map(&self) -> Result<polkavm_common::abi::MemoryMap, &'static str> {
17 polkavm_common::abi::MemoryMapBuilder::new(self.page_size)
18 .ro_data_size(self.ro_data_size)
19 .rw_data_size(self.rw_data_size)
20 .stack_size(self.stack_size)
21 .aux_data_size(self.aux_data_size)
22 .build()
23 }
24}
25
26pub(crate) struct FlatMap<T, const LAZY_ALLOCATION: bool> {
27 inner: Vec<Option<T>>,
28 desired_capacity: u32,
29}
30
31impl<T, const LAZY_ALLOCATION: bool> FlatMap<T, LAZY_ALLOCATION>
32where
33 T: Copy,
34{
35 #[inline]
36 pub fn new(capacity: u32) -> Self {
37 let mut inner = Vec::new();
38
39 if !LAZY_ALLOCATION {
40 inner.reserve_exact(capacity as usize);
41 inner.resize_with(capacity as usize, || None);
42 }
43
44 Self {
45 inner,
46 desired_capacity: capacity,
47 }
48 }
49
50 #[inline]
51 #[allow(dead_code)]
52 pub fn new_reusing_memory(mut memory: Self, capacity: u32) -> Self {
53 memory.inner.clear();
54 memory.inner.resize_with(capacity as usize, || None);
55 memory.desired_capacity = capacity;
56 memory
57 }
58
59 #[inline]
60 pub fn get(&self, key: u32) -> Option<T> {
61 self.inner.get(key as usize).and_then(|value| *value)
62 }
63
64 #[inline]
65 #[allow(dead_code)]
66 pub fn len(&self) -> u32 {
67 self.inner.len() as u32
68 }
69
70 #[cold]
71 fn allocate_capacity(&mut self) {
72 debug_assert!(self.inner.capacity() == 0, "FlatMap should not be allocated yet");
73 self.inner.reserve_exact(self.desired_capacity as usize);
74 self.inner.resize_with(self.desired_capacity as usize, || None);
75 }
76
77 #[inline]
78 pub fn insert(&mut self, key: u32, value: T) {
79 if self.inner.capacity() == 0 {
80 self.allocate_capacity();
81 }
82 self.inner[key as usize] = Some(value);
83 }
84
85 #[inline]
86 #[allow(dead_code)]
87 pub fn clear(&mut self) {
88 self.inner.clear();
89 }
90
91 #[inline]
92 pub fn reset(&mut self) {
93 self.inner.clear();
94 self.inner.shrink_to_fit();
95 }
96}
97
98#[derive(Clone, PartialEq, Eq, Debug)]
99#[non_exhaustive]
100pub struct Segfault {
101 pub page_address: u32,
103
104 pub page_size: u32,
106
107 pub is_write_protected: bool,
109}
110
111#[derive(Clone, PartialEq, Eq, Debug)]
112pub enum InterruptKind {
113 Finished,
117
118 Trap,
126
127 Ecalli(u32),
129
130 Segfault(Segfault),
137
138 NotEnoughGas,
142
143 Step,
147}