1use crate::api::{BackendAccess, ExecuteArgs, HostcallHandler, MemoryAccessError, Module};
2use crate::error::Error;
3use crate::utils::GuestInit;
4use crate::utils::RegImm;
5use core::mem::MaybeUninit;
6use polkavm_common::abi::{VM_ADDR_RETURN_TO_HOST, VM_CODE_ADDRESS_ALIGNMENT};
7use polkavm_common::error::Trap;
8use polkavm_common::operation::*;
9use polkavm_common::program::{Instruction, InstructionVisitor, Reg};
10use polkavm_common::utils::{align_to_next_page_usize, byte_slice_init, Access, AsUninitSliceMut, Gas};
11use polkavm_common::{
12 VM_RPC_FLAG_CLEAR_PROGRAM_AFTER_EXECUTION, VM_RPC_FLAG_RESET_MEMORY_AFTER_EXECUTION, VM_RPC_FLAG_RESET_MEMORY_BEFORE_EXECUTION,
13};
14
15type ExecutionError<E = core::convert::Infallible> = polkavm_common::error::ExecutionError<E>;
16
17pub(crate) struct InterpretedModule {
18 pub(crate) instructions: Vec<Instruction>,
19 ro_data: Vec<u8>,
20 rw_data: Vec<u8>,
21 pub(crate) gas_cost_for_basic_block: Vec<u32>,
22}
23
24impl InterpretedModule {
25 pub fn new(init: GuestInit, gas_cost_for_basic_block: Vec<u32>, instructions: Vec<Instruction>) -> Result<Self, Error> {
26 let memory_map = init.memory_map().map_err(Error::from_static_str)?;
27 let mut ro_data: Vec<_> = init.ro_data.into();
28 ro_data.resize(memory_map.ro_data_size() as usize, 0);
29
30 Ok(InterpretedModule {
31 instructions,
32 ro_data,
33 rw_data: init.rw_data.into(),
34 gas_cost_for_basic_block,
35 })
36 }
37}
38
39pub(crate) type OnSetReg<'a> = &'a mut dyn FnMut(Reg, u32) -> Result<(), Trap>;
40pub(crate) type OnStore<'a> = &'a mut dyn for<'r> FnMut(u32, &'r [u8]) -> Result<(), Trap>;
41
42#[derive(Default)]
43pub(crate) struct InterpreterContext<'a> {
44 on_hostcall: Option<HostcallHandler<'a>>,
45 on_set_reg: Option<OnSetReg<'a>>,
46 on_store: Option<OnStore<'a>>,
47}
48
49impl<'a> InterpreterContext<'a> {
50 pub fn set_on_hostcall(&mut self, on_hostcall: HostcallHandler<'a>) {
51 self.on_hostcall = Some(on_hostcall);
52 }
53
54 pub fn set_on_set_reg(&mut self, on_set_reg: OnSetReg<'a>) {
55 self.on_set_reg = Some(on_set_reg);
56 }
57
58 pub fn set_on_store(&mut self, on_store: OnStore<'a>) {
59 self.on_store = Some(on_store);
60 }
61}
62
63pub(crate) struct InterpretedInstance {
64 module: Option<Module>,
65 rw_data: Vec<u8>,
66 stack: Vec<u8>,
67 regs: [u32; Reg::ALL.len()],
68 nth_instruction: u32,
69 nth_basic_block: u32,
70 return_to_host: bool,
71 cycle_counter: u64,
72 gas_remaining: Option<i64>,
73 in_new_execution: bool,
74 is_memory_dirty: bool,
75 heap_size: u32,
76}
77
78impl InterpretedInstance {
79 pub fn new() -> Self {
80 Self {
81 module: None,
82 rw_data: Vec::new(),
83 stack: Vec::new(),
84 regs: [0; Reg::ALL.len()],
85 nth_instruction: VM_ADDR_RETURN_TO_HOST,
86 nth_basic_block: 0,
87 return_to_host: true,
88 cycle_counter: 0,
89 gas_remaining: None,
90 in_new_execution: false,
91 is_memory_dirty: false,
92 heap_size: 0,
93 }
94 }
95
96 pub fn new_from_module(module: &Module) -> Result<Self, Error> {
97 let mut instance = InterpretedInstance::new();
98 let mut args = ExecuteArgs::new();
99 args.module = Some(module);
100 instance.execute(args).map_err(Error::from_execution_error)?;
101
102 Ok(instance)
103 }
104
105 pub fn execute(&mut self, mut args: ExecuteArgs) -> Result<(), ExecutionError<Error>> {
106 self.prepare_for_execution(&args);
107
108 let mut ctx = InterpreterContext::default();
109 if let Some(hostcall_handler) = args.hostcall_handler.take() {
110 ctx.set_on_hostcall(hostcall_handler);
111 }
112
113 let result = if args.entry_point.is_some() { self.run(ctx) } else { Ok(()) };
114
115 self.finish_execution(args.flags);
116 result
117 }
118
119 pub fn run(&mut self, ctx: InterpreterContext) -> Result<(), ExecutionError<Error>> {
120 fn translate_error(error: Result<(), ExecutionError>) -> Result<(), ExecutionError<Error>> {
121 error.map_err(|error| match error {
122 ExecutionError::Trap(trap) => ExecutionError::Trap(trap),
123 ExecutionError::OutOfGas => ExecutionError::OutOfGas,
124 ExecutionError::Error(_) => unreachable!(),
125 })
126 }
127
128 self.is_memory_dirty = true;
129
130 if self.in_new_execution {
131 self.in_new_execution = false;
132 translate_error(self.on_start_new_basic_block())?;
133 }
134
135 let Some(module) = self.module.as_ref() else {
136 return Err(ExecutionError::Error(Error::from_static_str("no module loaded")));
137 };
138
139 let module = module.clone();
140 let mut visitor = Visitor { inner: self, ctx };
141 loop {
142 visitor.inner.cycle_counter += 1;
143 let Some(instruction) = module.instructions().get(visitor.inner.nth_instruction as usize).copied() else {
144 return Err(ExecutionError::Trap(Default::default()));
145 };
146
147 translate_error(instruction.visit(&mut visitor))?;
148 if visitor.inner.return_to_host {
149 break;
150 }
151 }
152
153 Ok(())
154 }
155
156 pub fn step_once(&mut self, ctx: InterpreterContext) -> Result<(), ExecutionError> {
157 if self.in_new_execution {
158 self.in_new_execution = false;
159 self.on_start_new_basic_block()?;
160 }
161
162 self.cycle_counter += 1;
163 let module = self.module.as_ref().expect("no module loaded");
164 let Some(instruction) = module.instructions().get(self.nth_instruction as usize).copied() else {
165 return Err(ExecutionError::Trap(Default::default()));
166 };
167
168 let mut visitor = Visitor { inner: self, ctx };
169 instruction.visit(&mut visitor)
170 }
171
172 fn reset_instance(&mut self) {
173 self.rw_data.clear();
174 self.stack.clear();
175
176 *self = Self {
177 rw_data: core::mem::take(&mut self.rw_data),
178 stack: core::mem::take(&mut self.stack),
179 ..Self::new()
180 };
181 }
182
183 pub fn reset_memory(&mut self) {
184 if self.is_memory_dirty {
185 self.force_reset_memory();
186 }
187 }
188
189 fn force_reset_memory(&mut self) {
190 self.rw_data.clear();
191 self.stack.clear();
192 self.heap_size = 0;
193 self.is_memory_dirty = false;
194
195 if let Some(module) = self.module.as_ref() {
196 let interpreted_module = module.interpreted_module().unwrap();
197 self.rw_data.extend_from_slice(&interpreted_module.rw_data);
198 self.rw_data.resize(module.memory_map().rw_data_size() as usize, 0);
199 self.stack.resize(module.memory_map().stack_size() as usize, 0);
200 }
201 }
202
203 pub fn sbrk(&mut self, size: u32) -> Option<u32> {
204 let module = self.module.as_ref()?;
205 let new_heap_size = self.heap_size.checked_add(size)?;
206 let memory_map = module.memory_map();
207 if new_heap_size > memory_map.max_heap_size() {
208 return None;
209 }
210
211 log::trace!("sbrk: +{} (heap size: {} -> {})", size, self.heap_size, new_heap_size);
212
213 self.heap_size = new_heap_size;
214 let heap_top = memory_map.heap_base() + new_heap_size;
215 if heap_top as usize > memory_map.rw_data_address() as usize + self.rw_data.len() {
216 let new_size = align_to_next_page_usize(memory_map.page_size() as usize, heap_top as usize).unwrap()
217 - memory_map.rw_data_address() as usize;
218 log::trace!("sbrk: growing memory: {} -> {}", self.rw_data.len(), new_size);
219 self.rw_data.resize(new_size, 0);
220 }
221
222 Some(heap_top)
223 }
224
225 pub fn prepare_for_execution(&mut self, args: &ExecuteArgs) {
226 if let Some(module) = args.module {
227 if module.interpreted_module().is_none() {
228 panic!("internal_error: an interpreter cannot be created from the given module");
229 }
230
231 self.reset_instance();
232 self.module = Some(module.clone());
233 if module.gas_metering().is_some() {
234 self.gas_remaining = Some(0);
235 }
236
237 self.force_reset_memory();
238 }
239
240 if let Some(regs) = args.regs {
241 self.regs.copy_from_slice(regs);
242 }
243
244 if self.module.as_ref().and_then(|module| module.gas_metering()).is_some() {
245 if let Some(gas) = args.gas {
246 self.gas_remaining = Some(gas.get() as i64);
247 }
248 } else {
249 self.gas_remaining = None;
250 }
251
252 if let Some(entry_point) = args.entry_point {
253 let module = self
254 .module
255 .as_ref()
256 .expect("internal error: tried to call into an instance without a loaded module");
257
258 let nth_basic_block = module
259 .get_export(entry_point)
260 .expect("internal error: invalid export index")
261 .jump_target();
262
263 let nth_instruction = module
264 .instruction_by_basic_block(nth_basic_block)
265 .expect("internal error: invalid export address");
266
267 self.nth_instruction = nth_instruction;
268 self.nth_basic_block = nth_basic_block;
269 }
270
271 if args.flags & VM_RPC_FLAG_RESET_MEMORY_BEFORE_EXECUTION != 0 {
272 self.reset_memory();
273 }
274
275 if args.sbrk > 0 {
276 self.sbrk(args.sbrk).expect("internal error: sbrk failed");
277 }
278
279 self.return_to_host = false;
280 self.in_new_execution = true;
281 }
282
283 pub fn finish_execution(&mut self, flags: u32) {
284 if flags & VM_RPC_FLAG_CLEAR_PROGRAM_AFTER_EXECUTION != 0 {
285 self.reset_instance();
286 } else if flags & VM_RPC_FLAG_RESET_MEMORY_AFTER_EXECUTION != 0 {
287 self.reset_memory();
288 }
289 }
290
291 pub fn access(&mut self) -> InterpretedAccess {
292 InterpretedAccess { instance: self }
293 }
294
295 fn get_memory_slice(&self, address: u32, length: u32) -> Option<&[u8]> {
296 let module = self.module.as_ref()?;
297 let memory_map = module.memory_map();
298 let (start, memory_slice) = if address >= memory_map.stack_address_low() {
299 (memory_map.stack_address_low(), &self.stack)
300 } else if address >= memory_map.rw_data_address() {
301 (memory_map.rw_data_address(), &self.rw_data)
302 } else if address >= memory_map.ro_data_address() {
303 let module = module.interpreted_module().unwrap();
304 (memory_map.ro_data_address(), &module.ro_data)
305 } else {
306 return None;
307 };
308
309 let offset = address - start;
310 memory_slice.get(offset as usize..offset as usize + length as usize)
311 }
312
313 fn get_memory_slice_mut(&mut self, address: u32, length: u32) -> Option<&mut [u8]> {
314 let memory_map = self.module.as_ref()?.memory_map();
315 let (start, memory_slice) = if address >= memory_map.stack_address_low() {
316 (memory_map.stack_address_low(), &mut self.stack)
317 } else if address >= memory_map.rw_data_address() {
318 (memory_map.rw_data_address(), &mut self.rw_data)
319 } else {
320 return None;
321 };
322
323 let offset = (address - start) as usize;
324 memory_slice.get_mut(offset..offset + length as usize)
325 }
326
327 fn on_start_new_basic_block(&mut self) -> Result<(), ExecutionError> {
328 if let Some(ref mut gas_remaining) = self.gas_remaining {
329 let module = self.module.as_ref().unwrap().interpreted_module().unwrap();
330 let gas_cost = i64::from(module.gas_cost_for_basic_block[self.nth_basic_block as usize]);
331
332 log::trace!(
333 "Consume gas at @{:x}: {} ({} -> {})",
334 self.nth_basic_block,
335 gas_cost,
336 *gas_remaining,
337 *gas_remaining - gas_cost
338 );
339
340 *gas_remaining -= gas_cost;
341 if *gas_remaining < 0 {
342 return Err(ExecutionError::OutOfGas);
343 }
344 }
345
346 Ok(())
347 }
348
349 fn check_gas(&mut self) -> Result<(), ExecutionError> {
350 if let Some(ref mut gas_remaining) = self.gas_remaining {
351 if *gas_remaining < 0 {
352 return Err(ExecutionError::OutOfGas);
353 }
354 }
355
356 Ok(())
357 }
358}
359
360pub struct InterpretedAccess<'a> {
361 instance: &'a mut InterpretedInstance,
362}
363
364impl<'a> Access<'a> for InterpretedAccess<'a> {
365 type Error = MemoryAccessError<&'static str>;
366
367 fn get_reg(&self, reg: Reg) -> u32 {
368 self.instance.regs[reg as usize]
369 }
370
371 fn set_reg(&mut self, reg: Reg, value: u32) {
372 self.instance.regs[reg as usize] = value;
373 }
374
375 fn read_memory_into_slice<'slice, T>(&self, address: u32, buffer: &'slice mut T) -> Result<&'slice mut [u8], Self::Error>
376 where
377 T: ?Sized + AsUninitSliceMut,
378 {
379 let buffer: &mut [MaybeUninit<u8>] = buffer.as_uninit_slice_mut();
380 let Some(slice) = self.instance.get_memory_slice(address, buffer.len() as u32) else {
381 return Err(MemoryAccessError {
382 address,
383 length: buffer.len() as u64,
384 error: "out of range read",
385 });
386 };
387
388 Ok(byte_slice_init(buffer, slice))
389 }
390
391 fn write_memory(&mut self, address: u32, data: &[u8]) -> Result<(), Self::Error> {
392 self.instance.is_memory_dirty = true;
393
394 let Some(slice) = self.instance.get_memory_slice_mut(address, data.len() as u32) else {
395 return Err(MemoryAccessError {
396 address,
397 length: data.len() as u64,
398 error: "out of range write",
399 });
400 };
401
402 slice.copy_from_slice(data);
403 Ok(())
404 }
405
406 fn sbrk(&mut self, size: u32) -> Option<u32> {
407 self.instance.sbrk(size)
408 }
409
410 fn heap_size(&self) -> u32 {
411 self.instance.heap_size
412 }
413
414 fn program_counter(&self) -> Option<u32> {
415 Some(self.instance.nth_instruction)
416 }
417
418 fn native_program_counter(&self) -> Option<u64> {
419 None
420 }
421
422 fn gas_remaining(&self) -> Option<Gas> {
423 let gas = self.instance.gas_remaining?;
424 Some(Gas::new(gas as u64).unwrap_or(Gas::MIN))
425 }
426
427 fn consume_gas(&mut self, gas: u64) {
428 if let Some(ref mut gas_remaining) = self.instance.gas_remaining {
429 *gas_remaining = gas_remaining.checked_sub_unsigned(gas).unwrap_or(-1);
430 }
431 }
432}
433
434struct Visitor<'a, 'b> {
435 inner: &'a mut InterpretedInstance,
436 ctx: InterpreterContext<'b>,
437}
438
439impl<'a, 'b> Visitor<'a, 'b> {
440 #[inline(always)]
441 fn get(&self, regimm: impl Into<RegImm>) -> u32 {
442 match regimm.into() {
443 RegImm::Reg(reg) => self.inner.regs[reg as usize],
444 RegImm::Imm(value) => value,
445 }
446 }
447
448 #[inline(always)]
449 fn set(&mut self, dst: Reg, value: u32) -> Result<(), ExecutionError> {
450 self.inner.regs[dst as usize] = value;
451 log::trace!("{dst} = 0x{value:x}");
452
453 if let Some(on_set_reg) = self.ctx.on_set_reg.as_mut() {
454 let result = (on_set_reg)(dst, value);
455 Ok(result.map_err(ExecutionError::Trap)?)
456 } else {
457 Ok(())
458 }
459 }
460
461 #[inline(always)]
462 fn set3(
463 &mut self,
464 dst: Reg,
465 s1: impl Into<RegImm>,
466 s2: impl Into<RegImm>,
467 callback: impl Fn(u32, u32) -> u32,
468 ) -> Result<(), ExecutionError> {
469 let s1 = self.get(s1);
470 let s2 = self.get(s2);
471 self.set(dst, callback(s1, s2))?;
472 self.inner.nth_instruction += 1;
473 Ok(())
474 }
475
476 fn branch(
477 &mut self,
478 s1: impl Into<RegImm>,
479 s2: impl Into<RegImm>,
480 target: u32,
481 callback: impl Fn(u32, u32) -> bool,
482 ) -> Result<(), ExecutionError> {
483 let s1 = self.get(s1);
484 let s2 = self.get(s2);
485 if callback(s1, s2) {
486 self.inner.nth_instruction = self
487 .inner
488 .module
489 .as_ref()
490 .unwrap()
491 .instruction_by_basic_block(target)
492 .expect("internal error: couldn't fetch the instruction index for a branch");
493 self.inner.nth_basic_block = target;
494 } else {
495 self.inner.nth_instruction += 1;
496 self.inner.nth_basic_block += 1;
497 }
498
499 self.inner.on_start_new_basic_block()
500 }
501
502 fn load<T: LoadTy>(&mut self, dst: Reg, base: Option<Reg>, offset: u32) -> Result<(), ExecutionError> {
503 assert!(core::mem::size_of::<T>() >= 1);
504
505 let address = base.map_or(0, |base| self.inner.regs[base as usize]).wrapping_add(offset);
506 let length = core::mem::size_of::<T>() as u32;
507 let Some(slice) = self.inner.get_memory_slice(address, length) else {
508 log::debug!(
509 "Load of {length} bytes from 0x{address:x} failed! (pc = #{pc}, cycle = {cycle})",
510 pc = self.inner.nth_instruction,
511 cycle = self.inner.cycle_counter
512 );
513
514 self.inner
515 .module
516 .as_ref()
517 .unwrap()
518 .debug_print_location(log::Level::Debug, self.inner.nth_instruction);
519 return Err(ExecutionError::Trap(Default::default()));
520 };
521
522 log::trace!("{dst} = {kind} [0x{address:x}]", kind = core::any::type_name::<T>());
523
524 let value = T::from_slice(slice);
525 self.set(dst, value)?;
526 self.inner.nth_instruction += 1;
527 Ok(())
528 }
529
530 fn store<T: StoreTy>(&mut self, src: impl Into<RegImm>, base: Option<Reg>, offset: u32) -> Result<(), ExecutionError> {
531 assert!(core::mem::size_of::<T>() >= 1);
532 self.inner.is_memory_dirty = true;
533
534 let address = base.map_or(0, |base| self.inner.regs[base as usize]).wrapping_add(offset);
535 let value = match src.into() {
536 RegImm::Reg(src) => {
537 let value = self.inner.regs[src as usize];
538 log::trace!("{kind} [0x{address:x}] = {src} = 0x{value:x}", kind = core::any::type_name::<T>());
539 value
540 }
541 RegImm::Imm(value) => {
542 log::trace!("{kind} [0x{address:x}] = 0x{value:x}", kind = core::any::type_name::<T>());
543 value
544 }
545 };
546
547 let length = core::mem::size_of::<T>() as u32;
548 let Some(slice) = self.inner.get_memory_slice_mut(address, length) else {
549 log::debug!(
550 "Store of {length} bytes to 0x{address:x} failed! (pc = #{pc}, cycle = {cycle})",
551 pc = self.inner.nth_instruction,
552 cycle = self.inner.cycle_counter
553 );
554 self.inner
555 .module
556 .as_ref()
557 .unwrap()
558 .debug_print_location(log::Level::Debug, self.inner.nth_instruction);
559 return Err(ExecutionError::Trap(Default::default()));
560 };
561
562 let value = T::into_bytes(value);
563 slice.copy_from_slice(value.as_ref());
564
565 if let Some(on_store) = self.ctx.on_store.as_mut() {
566 (on_store)(address, value.as_ref()).map_err(ExecutionError::Trap)?;
567 }
568
569 self.inner.nth_instruction += 1;
570 Ok(())
571 }
572
573 fn get_return_address(&self) -> u32 {
574 self.inner
575 .module
576 .as_ref()
577 .unwrap()
578 .jump_table_index_by_basic_block(self.inner.nth_basic_block + 1)
579 .expect("internal error: couldn't fetch the jump table index for the return basic block")
580 * VM_CODE_ADDRESS_ALIGNMENT
581 }
582
583 fn set_return_address(&mut self, ra: Reg, return_address: u32) -> Result<(), ExecutionError> {
584 log::trace!(
585 "Setting a call's return address: {ra} = @dyn {:x} (@{:x})",
586 return_address / VM_CODE_ADDRESS_ALIGNMENT,
587 self.inner.nth_basic_block + 1
588 );
589
590 self.set(ra, return_address)
591 }
592
593 fn dynamic_jump(&mut self, call: Option<(Reg, u32)>, base: Reg, offset: u32) -> Result<(), ExecutionError> {
594 let target = self.inner.regs[base as usize].wrapping_add(offset);
595 if let Some((ra, return_address)) = call {
596 self.set(ra, return_address)?;
597 }
598
599 if target == VM_ADDR_RETURN_TO_HOST {
600 self.inner.return_to_host = true;
601 return Ok(());
602 }
603
604 if target == 0 {
605 return Err(ExecutionError::Trap(Default::default()));
606 }
607
608 if target % VM_CODE_ADDRESS_ALIGNMENT != 0 {
609 log::error!("Found a dynamic jump with a misaligned target: target = {target}");
610 return Err(ExecutionError::Trap(Default::default()));
611 }
612
613 let Some(nth_basic_block) = self
614 .inner
615 .module
616 .as_ref()
617 .unwrap()
618 .basic_block_by_jump_table_index(target / VM_CODE_ADDRESS_ALIGNMENT)
619 else {
620 return Err(ExecutionError::Trap(Default::default()));
621 };
622
623 let nth_instruction = self
624 .inner
625 .module
626 .as_ref()
627 .unwrap()
628 .instruction_by_basic_block(nth_basic_block)
629 .expect("internal error: couldn't fetch the instruction index for a dynamic jump");
630
631 log::trace!("Dynamic jump to: #{nth_instruction}: @{nth_basic_block:x}");
632 self.inner.nth_basic_block = nth_basic_block;
633 self.inner.nth_instruction = nth_instruction;
634 self.inner.on_start_new_basic_block()
635 }
636}
637
638trait LoadTy {
639 fn from_slice(xs: &[u8]) -> u32;
640}
641
642impl LoadTy for u8 {
643 fn from_slice(xs: &[u8]) -> u32 {
644 u32::from(xs[0])
645 }
646}
647
648impl LoadTy for i8 {
649 fn from_slice(xs: &[u8]) -> u32 {
650 i32::from(xs[0] as i8) as u32
651 }
652}
653
654impl LoadTy for u16 {
655 fn from_slice(xs: &[u8]) -> u32 {
656 u32::from(u16::from_le_bytes([xs[0], xs[1]]))
657 }
658}
659
660impl LoadTy for i16 {
661 fn from_slice(xs: &[u8]) -> u32 {
662 i32::from(i16::from_le_bytes([xs[0], xs[1]])) as u32
663 }
664}
665
666impl LoadTy for u32 {
667 fn from_slice(xs: &[u8]) -> u32 {
668 u32::from_le_bytes([xs[0], xs[1], xs[2], xs[3]])
669 }
670}
671
672trait StoreTy: Sized {
673 type Array: AsRef<[u8]>;
674 fn into_bytes(value: u32) -> Self::Array;
675}
676
677impl StoreTy for u8 {
678 type Array = [u8; 1];
679 fn into_bytes(value: u32) -> Self::Array {
680 (value as u8).to_le_bytes()
681 }
682}
683
684impl StoreTy for u16 {
685 type Array = [u8; 2];
686 fn into_bytes(value: u32) -> Self::Array {
687 (value as u16).to_le_bytes()
688 }
689}
690
691impl StoreTy for u32 {
692 type Array = [u8; 4];
693 fn into_bytes(value: u32) -> Self::Array {
694 value.to_le_bytes()
695 }
696}
697
698impl<'a, 'b> InstructionVisitor for Visitor<'a, 'b> {
699 type ReturnTy = Result<(), ExecutionError>;
700
701 fn trap(&mut self) -> Self::ReturnTy {
702 log::debug!(
703 "Trap at instruction {} in block @{:x}",
704 self.inner.nth_instruction,
705 self.inner.nth_basic_block
706 );
707 Err(ExecutionError::Trap(Default::default()))
708 }
709
710 fn fallthrough(&mut self) -> Self::ReturnTy {
711 self.inner.nth_instruction += 1;
712 self.inner.nth_basic_block += 1;
713 self.inner.on_start_new_basic_block()
714 }
715
716 fn sbrk(&mut self, dst: Reg, size: Reg) -> Self::ReturnTy {
717 let size = self.get(size);
718 let result = self.inner.sbrk(size).unwrap_or(0);
719 self.set(dst, result)?;
720 self.inner.nth_instruction += 1;
721 Ok(())
722 }
723
724 fn ecalli(&mut self, imm: u32) -> Self::ReturnTy {
725 if let Some(on_hostcall) = self.ctx.on_hostcall.as_mut() {
726 let access = BackendAccess::Interpreted(self.inner.access());
727 (on_hostcall)(imm, access).map_err(ExecutionError::Trap)?;
728 self.inner.nth_instruction += 1;
729 self.inner.check_gas()?;
730 Ok(())
731 } else {
732 log::debug!("Hostcall called without any hostcall handler set!");
733 Err(ExecutionError::Trap(Default::default()))
734 }
735 }
736
737 fn set_less_than_unsigned(&mut self, d: Reg, s1: Reg, s2: Reg) -> Self::ReturnTy {
738 self.set3(d, s1, s2, |s1, s2| u32::from(s1 < s2))
739 }
740
741 fn set_less_than_signed(&mut self, d: Reg, s1: Reg, s2: Reg) -> Self::ReturnTy {
742 self.set3(d, s1, s2, |s1, s2| u32::from((s1 as i32) < (s2 as i32)))
743 }
744
745 fn shift_logical_right(&mut self, d: Reg, s1: Reg, s2: Reg) -> Self::ReturnTy {
746 self.set3(d, s1, s2, u32::wrapping_shr)
747 }
748
749 fn shift_arithmetic_right(&mut self, d: Reg, s1: Reg, s2: Reg) -> Self::ReturnTy {
750 self.set3(d, s1, s2, |s1, s2| ((s1 as i32).wrapping_shr(s2)) as u32)
751 }
752
753 fn shift_logical_left(&mut self, d: Reg, s1: Reg, s2: Reg) -> Self::ReturnTy {
754 self.set3(d, s1, s2, u32::wrapping_shl)
755 }
756
757 fn xor(&mut self, d: Reg, s1: Reg, s2: Reg) -> Self::ReturnTy {
758 self.set3(d, s1, s2, |s1, s2| s1 ^ s2)
759 }
760
761 fn and(&mut self, d: Reg, s1: Reg, s2: Reg) -> Self::ReturnTy {
762 self.set3(d, s1, s2, |s1, s2| s1 & s2)
763 }
764
765 fn or(&mut self, d: Reg, s1: Reg, s2: Reg) -> Self::ReturnTy {
766 self.set3(d, s1, s2, |s1, s2| s1 | s2)
767 }
768
769 fn add(&mut self, d: Reg, s1: Reg, s2: Reg) -> Self::ReturnTy {
770 self.set3(d, s1, s2, u32::wrapping_add)
771 }
772
773 fn sub(&mut self, d: Reg, s1: Reg, s2: Reg) -> Self::ReturnTy {
774 self.set3(d, s1, s2, u32::wrapping_sub)
775 }
776
777 fn negate_and_add_imm(&mut self, d: Reg, s1: Reg, s2: u32) -> Self::ReturnTy {
778 self.set3(d, s1, s2, |s1, s2| s2.wrapping_sub(s1))
779 }
780
781 fn mul(&mut self, d: Reg, s1: Reg, s2: Reg) -> Self::ReturnTy {
782 self.set3(d, s1, s2, u32::wrapping_mul)
783 }
784
785 fn mul_imm(&mut self, d: Reg, s1: Reg, s2: u32) -> Self::ReturnTy {
786 self.set3(d, s1, s2, u32::wrapping_mul)
787 }
788
789 fn mul_upper_signed_signed(&mut self, d: Reg, s1: Reg, s2: Reg) -> Self::ReturnTy {
790 self.set3(d, s1, s2, |s1, s2| mulh(s1 as i32, s2 as i32) as u32)
791 }
792
793 fn mul_upper_signed_signed_imm(&mut self, d: Reg, s1: Reg, s2: u32) -> Self::ReturnTy {
794 self.set3(d, s1, s2, |s1, s2| mulh(s1 as i32, s2 as i32) as u32)
795 }
796
797 fn mul_upper_unsigned_unsigned(&mut self, d: Reg, s1: Reg, s2: Reg) -> Self::ReturnTy {
798 self.set3(d, s1, s2, mulhu)
799 }
800
801 fn mul_upper_unsigned_unsigned_imm(&mut self, d: Reg, s1: Reg, s2: u32) -> Self::ReturnTy {
802 self.set3(d, s1, s2, mulhu)
803 }
804
805 fn mul_upper_signed_unsigned(&mut self, d: Reg, s1: Reg, s2: Reg) -> Self::ReturnTy {
806 self.set3(d, s1, s2, |s1, s2| mulhsu(s1 as i32, s2) as u32)
807 }
808
809 fn div_unsigned(&mut self, d: Reg, s1: Reg, s2: Reg) -> Self::ReturnTy {
810 self.set3(d, s1, s2, divu)
811 }
812
813 fn div_signed(&mut self, d: Reg, s1: Reg, s2: Reg) -> Self::ReturnTy {
814 self.set3(d, s1, s2, |s1, s2| div(s1 as i32, s2 as i32) as u32)
815 }
816
817 fn rem_unsigned(&mut self, d: Reg, s1: Reg, s2: Reg) -> Self::ReturnTy {
818 self.set3(d, s1, s2, remu)
819 }
820
821 fn rem_signed(&mut self, d: Reg, s1: Reg, s2: Reg) -> Self::ReturnTy {
822 self.set3(d, s1, s2, |s1, s2| rem(s1 as i32, s2 as i32) as u32)
823 }
824
825 fn set_less_than_unsigned_imm(&mut self, d: Reg, s1: Reg, s2: u32) -> Self::ReturnTy {
826 self.set3(d, s1, s2, |s1, s2| u32::from(s1 < s2))
827 }
828
829 fn set_greater_than_unsigned_imm(&mut self, d: Reg, s1: Reg, s2: u32) -> Self::ReturnTy {
830 self.set3(d, s1, s2, |s1, s2| u32::from(s1 > s2))
831 }
832
833 fn set_less_than_signed_imm(&mut self, d: Reg, s1: Reg, s2: u32) -> Self::ReturnTy {
834 self.set3(d, s1, s2, |s1, s2| u32::from((s1 as i32) < (s2 as i32)))
835 }
836
837 fn set_greater_than_signed_imm(&mut self, d: Reg, s1: Reg, s2: u32) -> Self::ReturnTy {
838 self.set3(d, s1, s2, |s1, s2| u32::from((s1 as i32) > (s2 as i32)))
839 }
840
841 fn shift_logical_right_imm(&mut self, d: Reg, s1: Reg, s2: u32) -> Self::ReturnTy {
842 self.set3(d, s1, s2, u32::wrapping_shr)
843 }
844
845 fn shift_logical_right_imm_alt(&mut self, d: Reg, s2: Reg, s1: u32) -> Self::ReturnTy {
846 self.set3(d, s1, s2, u32::wrapping_shr)
847 }
848
849 fn shift_arithmetic_right_imm(&mut self, d: Reg, s1: Reg, s2: u32) -> Self::ReturnTy {
850 self.set3(d, s1, s2, |s1, s2| ((s1 as i32) >> s2) as u32)
851 }
852
853 fn shift_arithmetic_right_imm_alt(&mut self, d: Reg, s2: Reg, s1: u32) -> Self::ReturnTy {
854 self.set3(d, s1, s2, |s1, s2| ((s1 as i32) >> s2) as u32)
855 }
856
857 fn shift_logical_left_imm(&mut self, d: Reg, s1: Reg, s2: u32) -> Self::ReturnTy {
858 self.set3(d, s1, s2, u32::wrapping_shl)
859 }
860
861 fn shift_logical_left_imm_alt(&mut self, d: Reg, s2: Reg, s1: u32) -> Self::ReturnTy {
862 self.set3(d, s1, s2, u32::wrapping_shl)
863 }
864
865 fn or_imm(&mut self, d: Reg, s1: Reg, s2: u32) -> Self::ReturnTy {
866 self.set3(d, s1, s2, |s1, s2| s1 | s2)
867 }
868
869 fn and_imm(&mut self, d: Reg, s1: Reg, s2: u32) -> Self::ReturnTy {
870 self.set3(d, s1, s2, |s1, s2| s1 & s2)
871 }
872
873 fn xor_imm(&mut self, d: Reg, s1: Reg, s2: u32) -> Self::ReturnTy {
874 self.set3(d, s1, s2, |s1, s2| s1 ^ s2)
875 }
876
877 fn load_imm(&mut self, dst: Reg, imm: u32) -> Self::ReturnTy {
878 self.set(dst, imm)?;
879 self.inner.nth_instruction += 1;
880 Ok(())
881 }
882
883 fn move_reg(&mut self, d: Reg, s: Reg) -> Self::ReturnTy {
884 let imm = self.get(s);
885 self.set(d, imm)?;
886 self.inner.nth_instruction += 1;
887 Ok(())
888 }
889
890 fn cmov_if_zero(&mut self, d: Reg, s: Reg, c: Reg) -> Self::ReturnTy {
891 if self.get(c) == 0 {
892 let value = self.get(s);
893 self.set(d, value)?;
894 }
895
896 self.inner.nth_instruction += 1;
897 Ok(())
898 }
899
900 fn cmov_if_zero_imm(&mut self, d: Reg, c: Reg, s: u32) -> Self::ReturnTy {
901 if self.get(c) == 0 {
902 self.set(d, s)?;
903 }
904
905 self.inner.nth_instruction += 1;
906 Ok(())
907 }
908
909 fn cmov_if_not_zero(&mut self, d: Reg, s: Reg, c: Reg) -> Self::ReturnTy {
910 if self.get(c) != 0 {
911 let value = self.get(s);
912 self.set(d, value)?;
913 }
914
915 self.inner.nth_instruction += 1;
916 Ok(())
917 }
918
919 fn cmov_if_not_zero_imm(&mut self, d: Reg, c: Reg, s: u32) -> Self::ReturnTy {
920 if self.get(c) != 0 {
921 self.set(d, s)?;
922 }
923
924 self.inner.nth_instruction += 1;
925 Ok(())
926 }
927
928 fn add_imm(&mut self, d: Reg, s1: Reg, s2: u32) -> Self::ReturnTy {
929 self.set3(d, s1, s2, u32::wrapping_add)
930 }
931
932 fn store_imm_u8(&mut self, value: u32, offset: u32) -> Self::ReturnTy {
933 self.store::<u8>(value, None, offset)
934 }
935
936 fn store_imm_u16(&mut self, value: u32, offset: u32) -> Self::ReturnTy {
937 self.store::<u16>(value, None, offset)
938 }
939
940 fn store_imm_u32(&mut self, value: u32, offset: u32) -> Self::ReturnTy {
941 self.store::<u32>(value, None, offset)
942 }
943
944 fn store_imm_indirect_u8(&mut self, base: Reg, offset: u32, value: u32) -> Self::ReturnTy {
945 self.store::<u8>(value, Some(base), offset)
946 }
947
948 fn store_imm_indirect_u16(&mut self, base: Reg, offset: u32, value: u32) -> Self::ReturnTy {
949 self.store::<u16>(value, Some(base), offset)
950 }
951
952 fn store_imm_indirect_u32(&mut self, base: Reg, offset: u32, value: u32) -> Self::ReturnTy {
953 self.store::<u32>(value, Some(base), offset)
954 }
955
956 fn store_indirect_u8(&mut self, src: Reg, base: Reg, offset: u32) -> Self::ReturnTy {
957 self.store::<u8>(src, Some(base), offset)
958 }
959
960 fn store_indirect_u16(&mut self, src: Reg, base: Reg, offset: u32) -> Self::ReturnTy {
961 self.store::<u16>(src, Some(base), offset)
962 }
963
964 fn store_indirect_u32(&mut self, src: Reg, base: Reg, offset: u32) -> Self::ReturnTy {
965 self.store::<u32>(src, Some(base), offset)
966 }
967
968 fn store_u8(&mut self, src: Reg, offset: u32) -> Self::ReturnTy {
969 self.store::<u8>(src, None, offset)
970 }
971
972 fn store_u16(&mut self, src: Reg, offset: u32) -> Self::ReturnTy {
973 self.store::<u16>(src, None, offset)
974 }
975
976 fn store_u32(&mut self, src: Reg, offset: u32) -> Self::ReturnTy {
977 self.store::<u32>(src, None, offset)
978 }
979
980 fn load_u8(&mut self, dst: Reg, offset: u32) -> Self::ReturnTy {
981 self.load::<u8>(dst, None, offset)
982 }
983
984 fn load_i8(&mut self, dst: Reg, offset: u32) -> Self::ReturnTy {
985 self.load::<i8>(dst, None, offset)
986 }
987
988 fn load_u16(&mut self, dst: Reg, offset: u32) -> Self::ReturnTy {
989 self.load::<u16>(dst, None, offset)
990 }
991
992 fn load_i16(&mut self, dst: Reg, offset: u32) -> Self::ReturnTy {
993 self.load::<i16>(dst, None, offset)
994 }
995
996 fn load_u32(&mut self, dst: Reg, offset: u32) -> Self::ReturnTy {
997 self.load::<u32>(dst, None, offset)
998 }
999
1000 fn load_indirect_u8(&mut self, dst: Reg, base: Reg, offset: u32) -> Self::ReturnTy {
1001 self.load::<u8>(dst, Some(base), offset)
1002 }
1003
1004 fn load_indirect_i8(&mut self, dst: Reg, base: Reg, offset: u32) -> Self::ReturnTy {
1005 self.load::<i8>(dst, Some(base), offset)
1006 }
1007
1008 fn load_indirect_u16(&mut self, dst: Reg, base: Reg, offset: u32) -> Self::ReturnTy {
1009 self.load::<u16>(dst, Some(base), offset)
1010 }
1011
1012 fn load_indirect_i16(&mut self, dst: Reg, base: Reg, offset: u32) -> Self::ReturnTy {
1013 self.load::<i16>(dst, Some(base), offset)
1014 }
1015
1016 fn load_indirect_u32(&mut self, dst: Reg, base: Reg, offset: u32) -> Self::ReturnTy {
1017 self.load::<u32>(dst, Some(base), offset)
1018 }
1019
1020 fn branch_less_unsigned(&mut self, s1: Reg, s2: Reg, i: u32) -> Self::ReturnTy {
1021 self.branch(s1, s2, i, |s1, s2| s1 < s2)
1022 }
1023
1024 fn branch_less_unsigned_imm(&mut self, s1: Reg, s2: u32, i: u32) -> Self::ReturnTy {
1025 self.branch(s1, s2, i, |s1, s2| s1 < s2)
1026 }
1027
1028 fn branch_less_signed(&mut self, s1: Reg, s2: Reg, i: u32) -> Self::ReturnTy {
1029 self.branch(s1, s2, i, |s1, s2| (s1 as i32) < (s2 as i32))
1030 }
1031
1032 fn branch_less_signed_imm(&mut self, s1: Reg, s2: u32, i: u32) -> Self::ReturnTy {
1033 self.branch(s1, s2, i, |s1, s2| (s1 as i32) < (s2 as i32))
1034 }
1035
1036 fn branch_eq(&mut self, s1: Reg, s2: Reg, i: u32) -> Self::ReturnTy {
1037 self.branch(s1, s2, i, |s1, s2| s1 == s2)
1038 }
1039
1040 fn branch_eq_imm(&mut self, s1: Reg, s2: u32, i: u32) -> Self::ReturnTy {
1041 self.branch(s1, s2, i, |s1, s2| s1 == s2)
1042 }
1043
1044 fn branch_not_eq(&mut self, s1: Reg, s2: Reg, i: u32) -> Self::ReturnTy {
1045 self.branch(s1, s2, i, |s1, s2| s1 != s2)
1046 }
1047
1048 fn branch_not_eq_imm(&mut self, s1: Reg, s2: u32, i: u32) -> Self::ReturnTy {
1049 self.branch(s1, s2, i, |s1, s2| s1 != s2)
1050 }
1051
1052 fn branch_greater_or_equal_unsigned(&mut self, s1: Reg, s2: Reg, i: u32) -> Self::ReturnTy {
1053 self.branch(s1, s2, i, |s1, s2| s1 >= s2)
1054 }
1055
1056 fn branch_greater_or_equal_unsigned_imm(&mut self, s1: Reg, s2: u32, i: u32) -> Self::ReturnTy {
1057 self.branch(s1, s2, i, |s1, s2| s1 >= s2)
1058 }
1059
1060 fn branch_greater_or_equal_signed(&mut self, s1: Reg, s2: Reg, i: u32) -> Self::ReturnTy {
1061 self.branch(s1, s2, i, |s1, s2| (s1 as i32) >= (s2 as i32))
1062 }
1063
1064 fn branch_greater_or_equal_signed_imm(&mut self, s1: Reg, s2: u32, i: u32) -> Self::ReturnTy {
1065 self.branch(s1, s2, i, |s1, s2| (s1 as i32) >= (s2 as i32))
1066 }
1067
1068 fn branch_less_or_equal_unsigned_imm(&mut self, s1: Reg, s2: u32, i: u32) -> Self::ReturnTy {
1069 self.branch(s1, s2, i, |s1, s2| s1 <= s2)
1070 }
1071
1072 fn branch_less_or_equal_signed_imm(&mut self, s1: Reg, s2: u32, i: u32) -> Self::ReturnTy {
1073 self.branch(s1, s2, i, |s1, s2| (s1 as i32) <= (s2 as i32))
1074 }
1075
1076 fn branch_greater_unsigned_imm(&mut self, s1: Reg, s2: u32, i: u32) -> Self::ReturnTy {
1077 self.branch(s1, s2, i, |s1, s2| s1 > s2)
1078 }
1079
1080 fn branch_greater_signed_imm(&mut self, s1: Reg, s2: u32, i: u32) -> Self::ReturnTy {
1081 self.branch(s1, s2, i, |s1, s2| (s1 as i32) > (s2 as i32))
1082 }
1083
1084 fn jump(&mut self, target: u32) -> Self::ReturnTy {
1085 let nth_instruction = self
1086 .inner
1087 .module
1088 .as_ref()
1089 .unwrap()
1090 .instruction_by_basic_block(target)
1091 .expect("internal error: couldn't fetch the instruction index for a jump");
1092
1093 log::trace!("Static jump to: #{nth_instruction}: @{target:x}");
1094 self.inner.nth_basic_block = target;
1095 self.inner.nth_instruction = nth_instruction;
1096 self.inner.on_start_new_basic_block()
1097 }
1098
1099 fn jump_indirect(&mut self, base: Reg, offset: u32) -> Self::ReturnTy {
1100 self.dynamic_jump(None, base, offset)
1101 }
1102
1103 fn call(&mut self, ra: Reg, target: u32) -> Self::ReturnTy {
1104 let return_address = self.get_return_address();
1105 self.set_return_address(ra, return_address)?;
1106 self.jump(target)
1107 }
1108
1109 fn call_indirect(&mut self, ra: Reg, base: Reg, offset: u32) -> Self::ReturnTy {
1110 let return_address = self.get_return_address();
1111 self.dynamic_jump(Some((ra, return_address)), base, offset)
1112 }
1113}