1use std::borrow::Cow;
2use std::collections::HashMap;
3use std::sync::{Arc, Mutex};
4
5use core::marker::PhantomData;
6
7use polkavm_common::abi::{MemoryMap, VM_MAXIMUM_EXPORT_COUNT, VM_MAXIMUM_IMPORT_COUNT, VM_MAXIMUM_INSTRUCTION_COUNT};
8use polkavm_common::abi::{VM_ADDR_RETURN_TO_HOST, VM_ADDR_USER_STACK_HIGH};
9use polkavm_common::error::Trap;
10use polkavm_common::program::{FrameKind, Instruction, InstructionVisitor, Reg};
11use polkavm_common::program::{ProgramBlob, ProgramExport, ProgramImport, ProgramSymbol};
12use polkavm_common::utils::{Access, AsUninitSliceMut, Gas};
13
14use crate::caller::{Caller, CallerRaw};
15use crate::config::{BackendKind, Config, GasMeteringKind, ModuleConfig, SandboxKind};
16use crate::error::{bail, bail_static, Error, ExecutionError};
17use crate::interpreter::{InterpretedAccess, InterpretedInstance, InterpretedModule};
18use crate::tracer::Tracer;
19use crate::utils::GuestInit;
20
21if_compiler_is_supported! {
22 use crate::sandbox::{Sandbox, SandboxInstance};
23 use crate::sandbox::generic::Sandbox as SandboxGeneric;
24 use crate::compiler::CompiledModule;
25
26 #[cfg(target_os = "linux")]
27 use crate::sandbox::linux::Sandbox as SandboxLinux;
28}
29
30pub type RegValue = u32;
31
32if_compiler_is_supported! {
33 {
34 impl EngineState {
35 pub(crate) fn sandbox_cache(&self) -> Option<&SandboxCache> {
36 self.sandbox_cache.as_ref()
37 }
38 }
39
40 use crate::sandbox::SandboxCache;
41 } else {
42 struct SandboxCache;
43 }
44}
45
46pub(crate) struct EngineState {
47 #[allow(dead_code)]
48 sandbox_cache: Option<SandboxCache>,
49}
50
51pub struct Engine {
52 selected_backend: BackendKind,
53 #[allow(dead_code)]
54 selected_sandbox: Option<SandboxKind>,
55 interpreter_enabled: bool,
56 debug_trace_execution: bool,
57 state: Arc<EngineState>,
58}
59
60impl Engine {
61 pub fn new(config: &Config) -> Result<Self, Error> {
62 if let Some(backend) = config.backend {
63 if !backend.is_supported() {
64 bail!("the '{backend}' backend is not supported on this platform")
65 }
66 }
67
68 if !config.allow_insecure && config.trace_execution {
69 bail!("cannot enable trace execution: `set_allow_insecure`/`POLKAVM_ALLOW_INSECURE` is not enabled");
70 }
71
72 let debug_trace_execution = config.trace_execution;
73 let default_backend = if BackendKind::Compiler.is_supported() && SandboxKind::Linux.is_supported() {
74 BackendKind::Compiler
75 } else {
76 BackendKind::Interpreter
77 };
78
79 let selected_backend = config.backend.unwrap_or(default_backend);
80 log::debug!("Selected backend: '{selected_backend}'");
81
82 let (selected_sandbox, sandbox_cache) = if_compiler_is_supported! {
83 {
84 if selected_backend == BackendKind::Compiler {
85 let default_sandbox = if SandboxKind::Linux.is_supported() {
86 SandboxKind::Linux
87 } else {
88 SandboxKind::Generic
89 };
90
91 let selected_sandbox = config.sandbox.unwrap_or(default_sandbox);
92 log::debug!("Selected sandbox: '{selected_sandbox}'");
93
94 if !selected_sandbox.is_supported() {
95 bail!("the '{selected_sandbox}' backend is not supported on this platform")
96 }
97
98 if selected_sandbox == SandboxKind::Generic && !config.allow_insecure {
99 bail!("cannot use the '{selected_sandbox}' sandbox: this sandbox is not secure yet, and `set_allow_insecure`/`POLKAVM_ALLOW_INSECURE` is not enabled");
100 }
101
102 let sandbox_cache = SandboxCache::new(selected_sandbox, config.worker_count, debug_trace_execution)?;
103 (Some(selected_sandbox), Some(sandbox_cache))
104 } else {
105 Default::default()
106 }
107 } else {
108 Default::default()
109 }
110 };
111
112 Ok(Engine {
113 selected_backend,
114 selected_sandbox,
115 interpreter_enabled: debug_trace_execution || selected_backend == BackendKind::Interpreter,
116 debug_trace_execution,
117 state: Arc::new(EngineState { sandbox_cache }),
118 })
119 }
120}
121
122if_compiler_is_supported! {
123 {
124 pub(crate) enum CompiledModuleKind {
125 #[cfg(target_os = "linux")]
126 Linux(CompiledModule<SandboxLinux>),
127 Generic(CompiledModule<SandboxGeneric>),
128 Unavailable,
129 }
130 } else {
131 pub(crate) enum CompiledModuleKind {
132 Unavailable,
133 }
134 }
135}
136
137impl CompiledModuleKind {
138 pub fn is_some(&self) -> bool {
139 !matches!(self, CompiledModuleKind::Unavailable)
140 }
141}
142
143struct ModulePrivate {
144 debug_trace_execution: bool,
145 exports: Vec<ProgramExport<'static>>,
146 imports: Vec<ProgramImport<'static>>,
147 export_index_by_symbol: HashMap<Vec<u8>, usize>,
148
149 instruction_by_basic_block: Vec<u32>,
150 jump_table_index_by_basic_block: Vec<u32>,
151 basic_block_by_jump_table_index: Vec<u32>,
152
153 blob: ProgramBlob<'static>,
154 compiled_module: CompiledModuleKind,
155 interpreted_module: Option<InterpretedModule>,
156 memory_map: MemoryMap,
157 gas_metering: Option<GasMeteringKind>,
158}
159
160#[derive(Clone)]
162pub struct Module(Arc<ModulePrivate>);
163
164pub(crate) trait BackendModule: Sized {
165 type BackendVisitor<'a>;
166 type Aux;
167
168 #[allow(clippy::too_many_arguments)]
169 fn create_visitor<'a>(
170 config: &'a ModuleConfig,
171 exports: &'a [ProgramExport],
172 basic_block_by_jump_table_index: &'a [u32],
173 jump_table_index_by_basic_block: &'a [u32],
174 init: GuestInit<'a>,
175 instruction_count: usize,
176 basic_block_count: usize,
177 debug_trace_execution: bool,
178 ) -> Result<(Self::BackendVisitor<'a>, Self::Aux), Error>;
179
180 fn finish_compilation<'a>(wrapper: VisitorWrapper<'a, Self::BackendVisitor<'a>>, aux: Self::Aux) -> Result<(Common<'a>, Self), Error>;
181}
182
183pub(crate) trait BackendVisitor: InstructionVisitor<ReturnTy = ()> {
184 fn before_instruction(&mut self);
185 fn after_instruction(&mut self);
186}
187
188polkavm_common::program::implement_instruction_visitor!(impl<'a> VisitorWrapper<'a, Vec<Instruction>>, push);
189
190impl<'a> BackendVisitor for VisitorWrapper<'a, Vec<Instruction>> {
191 fn before_instruction(&mut self) {}
192 fn after_instruction(&mut self) {}
193}
194
195pub(crate) struct Common<'a> {
196 pub(crate) code: &'a [u8],
197 pub(crate) config: &'a ModuleConfig,
198 pub(crate) imports: &'a Vec<ProgramImport<'a>>,
199 pub(crate) jump_table_index_by_basic_block: &'a Vec<u32>,
200 pub(crate) instruction_by_basic_block: Vec<u32>,
201 pub(crate) gas_cost_for_basic_block: Vec<u32>,
202 pub(crate) maximum_seen_jump_target: u32,
203 pub(crate) nth_instruction: usize,
204 pub(crate) instruction_count: usize,
205 pub(crate) basic_block_count: usize,
206 pub(crate) block_in_progress: bool,
207 pub(crate) current_instruction_offset: usize,
208}
209
210impl<'a> Common<'a> {
211 pub(crate) fn is_last_instruction(&self) -> bool {
212 self.nth_instruction + 1 == self.instruction_count
213 }
214}
215
216pub(crate) struct VisitorWrapper<'a, T> {
217 pub(crate) common: Common<'a>,
218 pub(crate) visitor: T,
219}
220
221impl<'a, T> core::ops::Deref for VisitorWrapper<'a, T> {
222 type Target = T;
223
224 #[inline]
225 fn deref(&self) -> &Self::Target {
226 &self.visitor
227 }
228}
229
230impl<'a, T> core::ops::DerefMut for VisitorWrapper<'a, T> {
231 #[inline]
232 fn deref_mut(&mut self) -> &mut Self::Target {
233 &mut self.visitor
234 }
235}
236
237#[repr(transparent)]
238pub(crate) struct CommonVisitor<'a, T>(VisitorWrapper<'a, T>);
239
240impl<'a, T> core::ops::Deref for CommonVisitor<'a, T> {
241 type Target = Common<'a>;
242
243 #[inline]
244 fn deref(&self) -> &Self::Target {
245 &self.0.common
246 }
247}
248
249impl<'a, T> core::ops::DerefMut for CommonVisitor<'a, T> {
250 #[inline]
251 fn deref_mut(&mut self) -> &mut Self::Target {
252 &mut self.0.common
253 }
254}
255
256impl<'a, T> CommonVisitor<'a, T>
257where
258 VisitorWrapper<'a, T>: BackendVisitor,
259{
260 fn nth_basic_block(&self) -> usize {
261 self.instruction_by_basic_block.len() - 1
262 }
263
264 fn start_new_basic_block(&mut self) -> Result<(), Error> {
265 if !self.is_last_instruction() {
266 let nth = (self.nth_instruction + 1) as u32;
267 self.instruction_by_basic_block.push(nth);
268 }
269
270 if self.instruction_by_basic_block.len() > self.basic_block_count {
271 bail_static!("program contains an invalid basic block count");
272 }
273
274 self.block_in_progress = false;
275 Ok(())
276 }
277
278 fn branch(&mut self, jump_target: u32, cb: impl FnOnce(&mut VisitorWrapper<'a, T>)) -> Result<(), Error> {
279 self.maximum_seen_jump_target = core::cmp::max(self.maximum_seen_jump_target, jump_target);
280
281 self.start_new_basic_block()?;
282 self.0.before_instruction();
283 cb(&mut self.0);
284 Ok(())
285 }
286}
287
288impl<'a, T> polkavm_common::program::ParsingVisitor<Error> for CommonVisitor<'a, T>
289where
290 VisitorWrapper<'a, T>: BackendVisitor,
291{
292 #[cfg_attr(not(debug_assertions), inline)]
293 fn on_pre_visit(&mut self, offset: usize, _opcode: u8) -> Self::ReturnTy {
294 if self.config.gas_metering.is_some() {
295 let nth_basic_block = self.nth_basic_block();
297 self.gas_cost_for_basic_block[nth_basic_block] += 1;
298 }
299
300 self.current_instruction_offset = offset;
301 self.block_in_progress = true;
302 Ok(())
303 }
304
305 #[cfg_attr(not(debug_assertions), inline)]
306 fn on_post_visit(&mut self) -> Self::ReturnTy {
307 self.0.after_instruction();
308 self.nth_instruction += 1;
309 Ok(())
310 }
311}
312
313impl<'a, T> polkavm_common::program::InstructionVisitor for CommonVisitor<'a, T>
314where
315 VisitorWrapper<'a, T>: BackendVisitor,
316{
317 type ReturnTy = Result<(), Error>;
318
319 #[inline(always)]
320 fn trap(&mut self) -> Self::ReturnTy {
321 self.start_new_basic_block()?;
322 self.0.before_instruction();
323 self.0.trap();
324 Ok(())
325 }
326
327 #[inline(always)]
328 fn fallthrough(&mut self) -> Self::ReturnTy {
329 self.start_new_basic_block()?;
330 self.0.before_instruction();
331 self.0.fallthrough();
332 Ok(())
333 }
334
335 #[inline(always)]
336 fn sbrk(&mut self, d: Reg, s: Reg) -> Self::ReturnTy {
337 self.0.before_instruction();
338 self.0.sbrk(d, s);
339 Ok(())
340 }
341
342 #[inline(always)]
343 fn ecalli(&mut self, imm: u32) -> Self::ReturnTy {
344 if self.imports.get(imm as usize).is_none() {
345 #[cold]
346 fn error_unrecognized_ecall(imm: u32) -> Error {
347 Error::from(format!("found an unrecognized ecall number: {imm}"))
348 }
349
350 return Err(error_unrecognized_ecall(imm));
351 }
352
353 self.0.before_instruction();
354 self.0.ecalli(imm);
355 Ok(())
356 }
357
358 #[inline(always)]
359 fn set_less_than_unsigned(&mut self, d: Reg, s1: Reg, s2: Reg) -> Self::ReturnTy {
360 self.0.before_instruction();
361 self.0.set_less_than_unsigned(d, s1, s2);
362 Ok(())
363 }
364
365 #[inline(always)]
366 fn set_less_than_signed(&mut self, d: Reg, s1: Reg, s2: Reg) -> Self::ReturnTy {
367 self.0.before_instruction();
368 self.0.set_less_than_signed(d, s1, s2);
369 Ok(())
370 }
371
372 #[inline(always)]
373 fn shift_logical_right(&mut self, d: Reg, s1: Reg, s2: Reg) -> Self::ReturnTy {
374 self.0.before_instruction();
375 self.0.shift_logical_right(d, s1, s2);
376 Ok(())
377 }
378
379 #[inline(always)]
380 fn shift_arithmetic_right(&mut self, d: Reg, s1: Reg, s2: Reg) -> Self::ReturnTy {
381 self.0.before_instruction();
382 self.0.shift_arithmetic_right(d, s1, s2);
383 Ok(())
384 }
385
386 #[inline(always)]
387 fn shift_logical_left(&mut self, d: Reg, s1: Reg, s2: Reg) -> Self::ReturnTy {
388 self.0.before_instruction();
389 self.0.shift_logical_left(d, s1, s2);
390 Ok(())
391 }
392
393 #[inline(always)]
394 fn xor(&mut self, d: Reg, s1: Reg, s2: Reg) -> Self::ReturnTy {
395 self.0.before_instruction();
396 self.0.xor(d, s1, s2);
397 Ok(())
398 }
399
400 #[inline(always)]
401 fn and(&mut self, d: Reg, s1: Reg, s2: Reg) -> Self::ReturnTy {
402 self.0.before_instruction();
403 self.0.and(d, s1, s2);
404 Ok(())
405 }
406
407 #[inline(always)]
408 fn or(&mut self, d: Reg, s1: Reg, s2: Reg) -> Self::ReturnTy {
409 self.0.before_instruction();
410 self.0.or(d, s1, s2);
411 Ok(())
412 }
413
414 #[inline(always)]
415 fn add(&mut self, d: Reg, s1: Reg, s2: Reg) -> Self::ReturnTy {
416 self.0.before_instruction();
417 self.0.add(d, s1, s2);
418 Ok(())
419 }
420
421 #[inline(always)]
422 fn sub(&mut self, d: Reg, s1: Reg, s2: Reg) -> Self::ReturnTy {
423 self.0.before_instruction();
424 self.0.sub(d, s1, s2);
425 Ok(())
426 }
427
428 #[inline(always)]
429 fn mul(&mut self, d: Reg, s1: Reg, s2: Reg) -> Self::ReturnTy {
430 self.0.before_instruction();
431 self.0.mul(d, s1, s2);
432 Ok(())
433 }
434
435 #[inline(always)]
436 fn mul_upper_signed_signed(&mut self, d: Reg, s1: Reg, s2: Reg) -> Self::ReturnTy {
437 self.0.before_instruction();
438 self.0.mul_upper_signed_signed(d, s1, s2);
439 Ok(())
440 }
441
442 #[inline(always)]
443 fn mul_upper_unsigned_unsigned(&mut self, d: Reg, s1: Reg, s2: Reg) -> Self::ReturnTy {
444 self.0.before_instruction();
445 self.0.mul_upper_unsigned_unsigned(d, s1, s2);
446 Ok(())
447 }
448
449 #[inline(always)]
450 fn mul_upper_signed_unsigned(&mut self, d: Reg, s1: Reg, s2: Reg) -> Self::ReturnTy {
451 self.0.before_instruction();
452 self.0.mul_upper_signed_unsigned(d, s1, s2);
453 Ok(())
454 }
455
456 #[inline(always)]
457 fn div_unsigned(&mut self, d: Reg, s1: Reg, s2: Reg) -> Self::ReturnTy {
458 self.0.before_instruction();
459 self.0.div_unsigned(d, s1, s2);
460 Ok(())
461 }
462
463 #[inline(always)]
464 fn div_signed(&mut self, d: Reg, s1: Reg, s2: Reg) -> Self::ReturnTy {
465 self.0.before_instruction();
466 self.0.div_signed(d, s1, s2);
467 Ok(())
468 }
469
470 #[inline(always)]
471 fn rem_unsigned(&mut self, d: Reg, s1: Reg, s2: Reg) -> Self::ReturnTy {
472 self.0.before_instruction();
473 self.0.rem_unsigned(d, s1, s2);
474 Ok(())
475 }
476
477 #[inline(always)]
478 fn rem_signed(&mut self, d: Reg, s1: Reg, s2: Reg) -> Self::ReturnTy {
479 self.0.before_instruction();
480 self.0.rem_signed(d, s1, s2);
481 Ok(())
482 }
483
484 #[inline(always)]
485 fn mul_imm(&mut self, d: Reg, s1: Reg, s2: u32) -> Self::ReturnTy {
486 self.0.before_instruction();
487 self.0.mul_imm(d, s1, s2);
488 Ok(())
489 }
490
491 #[inline(always)]
492 fn mul_upper_signed_signed_imm(&mut self, d: Reg, s1: Reg, s2: u32) -> Self::ReturnTy {
493 self.0.before_instruction();
494 self.0.mul_upper_signed_signed_imm(d, s1, s2);
495 Ok(())
496 }
497
498 #[inline(always)]
499 fn mul_upper_unsigned_unsigned_imm(&mut self, d: Reg, s1: Reg, s2: u32) -> Self::ReturnTy {
500 self.0.before_instruction();
501 self.0.mul_upper_unsigned_unsigned_imm(d, s1, s2);
502 Ok(())
503 }
504
505 #[inline(always)]
506 fn set_less_than_unsigned_imm(&mut self, d: Reg, s1: Reg, s2: u32) -> Self::ReturnTy {
507 self.0.before_instruction();
508 self.0.set_less_than_unsigned_imm(d, s1, s2);
509 Ok(())
510 }
511
512 #[inline(always)]
513 fn set_less_than_signed_imm(&mut self, d: Reg, s1: Reg, s2: u32) -> Self::ReturnTy {
514 self.0.before_instruction();
515 self.0.set_less_than_signed_imm(d, s1, s2);
516 Ok(())
517 }
518
519 #[inline(always)]
520 fn set_greater_than_unsigned_imm(&mut self, d: Reg, s1: Reg, s2: u32) -> Self::ReturnTy {
521 self.0.before_instruction();
522 self.0.set_greater_than_unsigned_imm(d, s1, s2);
523 Ok(())
524 }
525
526 #[inline(always)]
527 fn set_greater_than_signed_imm(&mut self, d: Reg, s1: Reg, s2: u32) -> Self::ReturnTy {
528 self.0.before_instruction();
529 self.0.set_greater_than_signed_imm(d, s1, s2);
530 Ok(())
531 }
532
533 #[inline(always)]
534 fn shift_logical_right_imm(&mut self, d: Reg, s1: Reg, s2: u32) -> Self::ReturnTy {
535 self.0.before_instruction();
536 self.0.shift_logical_right_imm(d, s1, s2);
537 Ok(())
538 }
539
540 #[inline(always)]
541 fn shift_arithmetic_right_imm(&mut self, d: Reg, s1: Reg, s2: u32) -> Self::ReturnTy {
542 self.0.before_instruction();
543 self.0.shift_arithmetic_right_imm(d, s1, s2);
544 Ok(())
545 }
546
547 #[inline(always)]
548 fn shift_logical_left_imm(&mut self, d: Reg, s1: Reg, s2: u32) -> Self::ReturnTy {
549 self.0.before_instruction();
550 self.0.shift_logical_left_imm(d, s1, s2);
551 Ok(())
552 }
553
554 #[inline(always)]
555 fn shift_logical_right_imm_alt(&mut self, d: Reg, s2: Reg, s1: u32) -> Self::ReturnTy {
556 self.0.before_instruction();
557 self.0.shift_logical_right_imm_alt(d, s2, s1);
558 Ok(())
559 }
560
561 #[inline(always)]
562 fn shift_arithmetic_right_imm_alt(&mut self, d: Reg, s2: Reg, s1: u32) -> Self::ReturnTy {
563 self.0.before_instruction();
564 self.0.shift_arithmetic_right_imm_alt(d, s2, s1);
565 Ok(())
566 }
567
568 #[inline(always)]
569 fn shift_logical_left_imm_alt(&mut self, d: Reg, s2: Reg, s1: u32) -> Self::ReturnTy {
570 self.0.before_instruction();
571 self.0.shift_logical_left_imm_alt(d, s2, s1);
572 Ok(())
573 }
574
575 #[inline(always)]
576 fn or_imm(&mut self, d: Reg, s: Reg, imm: u32) -> Self::ReturnTy {
577 self.0.before_instruction();
578 self.0.or_imm(d, s, imm);
579 Ok(())
580 }
581
582 #[inline(always)]
583 fn and_imm(&mut self, d: Reg, s: Reg, imm: u32) -> Self::ReturnTy {
584 self.0.before_instruction();
585 self.0.and_imm(d, s, imm);
586 Ok(())
587 }
588
589 #[inline(always)]
590 fn xor_imm(&mut self, d: Reg, s: Reg, imm: u32) -> Self::ReturnTy {
591 self.0.before_instruction();
592 self.0.xor_imm(d, s, imm);
593 Ok(())
594 }
595
596 #[inline(always)]
597 fn move_reg(&mut self, d: Reg, s: Reg) -> Self::ReturnTy {
598 self.0.before_instruction();
599 self.0.move_reg(d, s);
600 Ok(())
601 }
602
603 #[inline(always)]
604 fn cmov_if_zero(&mut self, d: Reg, s: Reg, c: Reg) -> Self::ReturnTy {
605 self.0.before_instruction();
606 self.0.cmov_if_zero(d, s, c);
607 Ok(())
608 }
609
610 #[inline(always)]
611 fn cmov_if_not_zero(&mut self, d: Reg, s: Reg, c: Reg) -> Self::ReturnTy {
612 self.0.before_instruction();
613 self.0.cmov_if_not_zero(d, s, c);
614 Ok(())
615 }
616
617 #[inline(always)]
618 fn cmov_if_zero_imm(&mut self, d: Reg, c: Reg, s: u32) -> Self::ReturnTy {
619 self.0.before_instruction();
620 self.0.cmov_if_zero_imm(d, c, s);
621 Ok(())
622 }
623
624 #[inline(always)]
625 fn cmov_if_not_zero_imm(&mut self, d: Reg, c: Reg, s: u32) -> Self::ReturnTy {
626 self.0.before_instruction();
627 self.0.cmov_if_not_zero_imm(d, c, s);
628 Ok(())
629 }
630
631 #[inline(always)]
632 fn add_imm(&mut self, d: Reg, s: Reg, imm: u32) -> Self::ReturnTy {
633 self.0.before_instruction();
634 self.0.add_imm(d, s, imm);
635 Ok(())
636 }
637
638 #[inline(always)]
639 fn negate_and_add_imm(&mut self, d: Reg, s1: Reg, s2: u32) -> Self::ReturnTy {
640 self.0.before_instruction();
641 self.0.negate_and_add_imm(d, s1, s2);
642 Ok(())
643 }
644
645 #[inline(always)]
646 fn store_imm_indirect_u8(&mut self, base: Reg, offset: u32, value: u32) -> Self::ReturnTy {
647 self.0.before_instruction();
648 self.0.store_imm_indirect_u8(base, offset, value);
649 Ok(())
650 }
651
652 #[inline(always)]
653 fn store_imm_indirect_u16(&mut self, base: Reg, offset: u32, value: u32) -> Self::ReturnTy {
654 self.0.before_instruction();
655 self.0.store_imm_indirect_u16(base, offset, value);
656 Ok(())
657 }
658
659 #[inline(always)]
660 fn store_imm_indirect_u32(&mut self, base: Reg, offset: u32, value: u32) -> Self::ReturnTy {
661 self.0.before_instruction();
662 self.0.store_imm_indirect_u32(base, offset, value);
663 Ok(())
664 }
665
666 #[inline(always)]
667 fn store_indirect_u8(&mut self, src: Reg, base: Reg, offset: u32) -> Self::ReturnTy {
668 self.0.before_instruction();
669 self.0.store_indirect_u8(src, base, offset);
670 Ok(())
671 }
672
673 #[inline(always)]
674 fn store_indirect_u16(&mut self, src: Reg, base: Reg, offset: u32) -> Self::ReturnTy {
675 self.0.before_instruction();
676 self.0.store_indirect_u16(src, base, offset);
677 Ok(())
678 }
679
680 #[inline(always)]
681 fn store_indirect_u32(&mut self, src: Reg, base: Reg, offset: u32) -> Self::ReturnTy {
682 self.0.before_instruction();
683 self.0.store_indirect_u32(src, base, offset);
684 Ok(())
685 }
686
687 #[inline(always)]
688 fn store_imm_u8(&mut self, value: u32, offset: u32) -> Self::ReturnTy {
689 self.0.before_instruction();
690 self.0.store_imm_u8(value, offset);
691 Ok(())
692 }
693
694 #[inline(always)]
695 fn store_imm_u16(&mut self, value: u32, offset: u32) -> Self::ReturnTy {
696 self.0.before_instruction();
697 self.0.store_imm_u16(value, offset);
698 Ok(())
699 }
700
701 #[inline(always)]
702 fn store_imm_u32(&mut self, value: u32, offset: u32) -> Self::ReturnTy {
703 self.0.before_instruction();
704 self.0.store_imm_u32(value, offset);
705 Ok(())
706 }
707
708 #[inline(always)]
709 fn store_u8(&mut self, src: Reg, offset: u32) -> Self::ReturnTy {
710 self.0.before_instruction();
711 self.0.store_u8(src, offset);
712 Ok(())
713 }
714
715 #[inline(always)]
716 fn store_u16(&mut self, src: Reg, offset: u32) -> Self::ReturnTy {
717 self.0.before_instruction();
718 self.0.store_u16(src, offset);
719 Ok(())
720 }
721
722 #[inline(always)]
723 fn store_u32(&mut self, src: Reg, offset: u32) -> Self::ReturnTy {
724 self.0.before_instruction();
725 self.0.store_u32(src, offset);
726 Ok(())
727 }
728
729 #[inline(always)]
730 fn load_indirect_u8(&mut self, dst: Reg, base: Reg, offset: u32) -> Self::ReturnTy {
731 self.0.before_instruction();
732 self.0.load_indirect_u8(dst, base, offset);
733 Ok(())
734 }
735
736 #[inline(always)]
737 fn load_indirect_i8(&mut self, dst: Reg, base: Reg, offset: u32) -> Self::ReturnTy {
738 self.0.before_instruction();
739 self.0.load_indirect_i8(dst, base, offset);
740 Ok(())
741 }
742
743 #[inline(always)]
744 fn load_indirect_u16(&mut self, dst: Reg, base: Reg, offset: u32) -> Self::ReturnTy {
745 self.0.before_instruction();
746 self.0.load_indirect_u16(dst, base, offset);
747 Ok(())
748 }
749
750 #[inline(always)]
751 fn load_indirect_i16(&mut self, dst: Reg, base: Reg, offset: u32) -> Self::ReturnTy {
752 self.0.before_instruction();
753 self.0.load_indirect_i16(dst, base, offset);
754 Ok(())
755 }
756
757 #[inline(always)]
758 fn load_indirect_u32(&mut self, dst: Reg, base: Reg, offset: u32) -> Self::ReturnTy {
759 self.0.before_instruction();
760 self.0.load_indirect_u32(dst, base, offset);
761 Ok(())
762 }
763
764 #[inline(always)]
765 fn load_u8(&mut self, dst: Reg, offset: u32) -> Self::ReturnTy {
766 self.0.before_instruction();
767 self.0.load_u8(dst, offset);
768 Ok(())
769 }
770
771 #[inline(always)]
772 fn load_i8(&mut self, dst: Reg, offset: u32) -> Self::ReturnTy {
773 self.0.before_instruction();
774 self.0.load_i8(dst, offset);
775 Ok(())
776 }
777
778 #[inline(always)]
779 fn load_u16(&mut self, dst: Reg, offset: u32) -> Self::ReturnTy {
780 self.0.before_instruction();
781 self.0.load_u16(dst, offset);
782 Ok(())
783 }
784
785 #[inline(always)]
786 fn load_i16(&mut self, dst: Reg, offset: u32) -> Self::ReturnTy {
787 self.0.before_instruction();
788 self.0.load_i16(dst, offset);
789 Ok(())
790 }
791
792 #[inline(always)]
793 fn load_u32(&mut self, dst: Reg, offset: u32) -> Self::ReturnTy {
794 self.0.before_instruction();
795 self.0.load_u32(dst, offset);
796 Ok(())
797 }
798
799 #[inline(always)]
800 fn branch_less_unsigned(&mut self, s1: Reg, s2: Reg, imm: u32) -> Self::ReturnTy {
801 self.branch(imm, move |backend| backend.branch_less_unsigned(s1, s2, imm))
802 }
803
804 #[inline(always)]
805 fn branch_less_signed(&mut self, s1: Reg, s2: Reg, imm: u32) -> Self::ReturnTy {
806 self.branch(imm, move |backend| backend.branch_less_signed(s1, s2, imm))
807 }
808
809 #[inline(always)]
810 fn branch_greater_or_equal_unsigned(&mut self, s1: Reg, s2: Reg, imm: u32) -> Self::ReturnTy {
811 self.branch(imm, move |backend| backend.branch_greater_or_equal_unsigned(s1, s2, imm))
812 }
813
814 #[inline(always)]
815 fn branch_greater_or_equal_signed(&mut self, s1: Reg, s2: Reg, imm: u32) -> Self::ReturnTy {
816 self.branch(imm, move |backend| backend.branch_greater_or_equal_signed(s1, s2, imm))
817 }
818
819 #[inline(always)]
820 fn branch_eq(&mut self, s1: Reg, s2: Reg, imm: u32) -> Self::ReturnTy {
821 self.branch(imm, move |backend| backend.branch_eq(s1, s2, imm))
822 }
823
824 #[inline(always)]
825 fn branch_not_eq(&mut self, s1: Reg, s2: Reg, imm: u32) -> Self::ReturnTy {
826 self.branch(imm, move |backend| backend.branch_not_eq(s1, s2, imm))
827 }
828
829 #[inline(always)]
830 fn branch_eq_imm(&mut self, s1: Reg, s2: u32, imm: u32) -> Self::ReturnTy {
831 self.branch(imm, move |backend| backend.branch_eq_imm(s1, s2, imm))
832 }
833
834 #[inline(always)]
835 fn branch_not_eq_imm(&mut self, s1: Reg, s2: u32, imm: u32) -> Self::ReturnTy {
836 self.branch(imm, move |backend| backend.branch_not_eq_imm(s1, s2, imm))
837 }
838
839 #[inline(always)]
840 fn branch_less_unsigned_imm(&mut self, s1: Reg, s2: u32, imm: u32) -> Self::ReturnTy {
841 self.branch(imm, move |backend| backend.branch_less_unsigned_imm(s1, s2, imm))
842 }
843
844 #[inline(always)]
845 fn branch_less_signed_imm(&mut self, s1: Reg, s2: u32, imm: u32) -> Self::ReturnTy {
846 self.branch(imm, move |backend| backend.branch_less_signed_imm(s1, s2, imm))
847 }
848
849 #[inline(always)]
850 fn branch_greater_or_equal_unsigned_imm(&mut self, s1: Reg, s2: u32, imm: u32) -> Self::ReturnTy {
851 self.branch(imm, move |backend| backend.branch_greater_or_equal_unsigned_imm(s1, s2, imm))
852 }
853
854 #[inline(always)]
855 fn branch_greater_or_equal_signed_imm(&mut self, s1: Reg, s2: u32, imm: u32) -> Self::ReturnTy {
856 self.branch(imm, move |backend| backend.branch_greater_or_equal_signed_imm(s1, s2, imm))
857 }
858
859 #[inline(always)]
860 fn branch_less_or_equal_unsigned_imm(&mut self, s1: Reg, s2: u32, imm: u32) -> Self::ReturnTy {
861 self.branch(imm, move |backend| backend.branch_less_or_equal_unsigned_imm(s1, s2, imm))
862 }
863
864 #[inline(always)]
865 fn branch_less_or_equal_signed_imm(&mut self, s1: Reg, s2: u32, imm: u32) -> Self::ReturnTy {
866 self.branch(imm, move |backend| backend.branch_less_or_equal_signed_imm(s1, s2, imm))
867 }
868
869 #[inline(always)]
870 fn branch_greater_unsigned_imm(&mut self, s1: Reg, s2: u32, imm: u32) -> Self::ReturnTy {
871 self.branch(imm, move |backend| backend.branch_greater_unsigned_imm(s1, s2, imm))
872 }
873
874 #[inline(always)]
875 fn branch_greater_signed_imm(&mut self, s1: Reg, s2: u32, imm: u32) -> Self::ReturnTy {
876 self.branch(imm, move |backend| backend.branch_greater_signed_imm(s1, s2, imm))
877 }
878
879 #[inline(always)]
880 fn load_imm(&mut self, dst: Reg, value: u32) -> Self::ReturnTy {
881 self.0.before_instruction();
882 self.0.load_imm(dst, value);
883 Ok(())
884 }
885
886 #[inline(always)]
887 fn call(&mut self, ra: Reg, target: u32) -> Self::ReturnTy {
888 let return_basic_block = self.instruction_by_basic_block.len() as u32;
889 if self
890 .jump_table_index_by_basic_block
891 .get(return_basic_block as usize)
892 .copied()
893 .unwrap_or(0)
894 == 0
895 {
896 bail_static!("found a call instruction where the next basic block is not part of the jump table");
897 }
898
899 self.maximum_seen_jump_target = core::cmp::max(self.maximum_seen_jump_target, target);
900
901 self.start_new_basic_block()?;
902 self.0.before_instruction();
903 self.0.call(ra, target);
904 Ok(())
905 }
906
907 #[inline(always)]
908 fn call_indirect(&mut self, ra: Reg, base: Reg, offset: u32) -> Self::ReturnTy {
909 let return_basic_block = self.instruction_by_basic_block.len() as u32;
910 if self
911 .jump_table_index_by_basic_block
912 .get(return_basic_block as usize)
913 .copied()
914 .unwrap_or(0)
915 == 0
916 {
917 bail_static!("found a call instruction where the next basic block is not part of the jump table");
918 }
919
920 self.start_new_basic_block()?;
921 self.0.before_instruction();
922 self.0.call_indirect(ra, base, offset);
923 Ok(())
924 }
925
926 #[inline(always)]
927 fn jump(&mut self, target: u32) -> Self::ReturnTy {
928 self.maximum_seen_jump_target = core::cmp::max(self.maximum_seen_jump_target, target);
929 self.start_new_basic_block()?;
930 self.0.before_instruction();
931 self.0.jump(target);
932 Ok(())
933 }
934
935 #[inline(always)]
936 fn jump_indirect(&mut self, base: Reg, offset: u32) -> Self::ReturnTy {
937 self.start_new_basic_block()?;
938 self.0.before_instruction();
939 self.0.jump_indirect(base, offset);
940 Ok(())
941 }
942}
943
944impl Module {
945 pub(crate) fn is_debug_trace_execution_enabled(&self) -> bool {
946 self.0.debug_trace_execution
947 }
948
949 pub(crate) fn instructions(&self) -> &[Instruction] {
950 &self.interpreted_module().unwrap().instructions
951 }
952
953 pub(crate) fn compiled_module(&self) -> &CompiledModuleKind {
954 &self.0.compiled_module
955 }
956
957 pub(crate) fn interpreted_module(&self) -> Option<&InterpretedModule> {
958 self.0.interpreted_module.as_ref()
959 }
960
961 pub(crate) fn blob(&self) -> &ProgramBlob<'static> {
962 &self.0.blob
963 }
964
965 pub(crate) fn get_export(&self, export_index: usize) -> Option<&ProgramExport> {
966 self.0.exports.get(export_index)
967 }
968
969 pub(crate) fn instruction_by_basic_block(&self, nth_basic_block: u32) -> Option<u32> {
970 self.0.instruction_by_basic_block.get(nth_basic_block as usize).copied()
971 }
972
973 pub(crate) fn jump_table_index_by_basic_block(&self, nth_basic_block: u32) -> Option<u32> {
974 let index = self
975 .0
976 .jump_table_index_by_basic_block
977 .get(nth_basic_block as usize)
978 .copied()
979 .unwrap_or(0);
980 if index == 0 {
981 None
982 } else {
983 Some(index)
984 }
985 }
986
987 pub(crate) fn basic_block_by_jump_table_index(&self, jump_table_index: u32) -> Option<u32> {
988 self.0.basic_block_by_jump_table_index.get(jump_table_index as usize).copied()
989 }
990
991 pub(crate) fn gas_metering(&self) -> Option<GasMeteringKind> {
992 self.0.gas_metering
993 }
994
995 pub fn new(engine: &Engine, config: &ModuleConfig, bytes: impl AsRef<[u8]>) -> Result<Self, Error> {
997 let blob = match ProgramBlob::parse(bytes.as_ref()) {
998 Ok(blob) => blob,
999 Err(error) => {
1000 bail!("failed to parse blob: {}", error);
1001 }
1002 };
1003
1004 Self::from_blob(engine, config, &blob)
1005 }
1006
1007 pub fn from_blob(engine: &Engine, config: &ModuleConfig, blob: &ProgramBlob) -> Result<Self, Error> {
1009 log::debug!("Preparing a module from a blob of length {}...", blob.as_bytes().len());
1010
1011 MemoryMap::new(config.page_size, blob.ro_data_size(), blob.rw_data_size(), blob.stack_size()).map_err(Error::from_static_str)?;
1013
1014 let imports = {
1015 log::trace!("Parsing imports...");
1016 let mut imports = Vec::new();
1017 for import in blob.imports() {
1018 let import = import.map_err(Error::from_display)?;
1019 log::trace!(" Import #{}: {}", imports.len(), import.symbol());
1020
1021 if imports.len() > VM_MAXIMUM_IMPORT_COUNT as usize {
1022 bail!(
1023 "too many imports; the program contains more than {} imports",
1024 VM_MAXIMUM_IMPORT_COUNT
1025 );
1026 }
1027
1028 imports.push(import);
1029 }
1030 imports
1031 };
1032
1033 let (initial_maximum_seen_jump_target, basic_block_by_jump_table_index, jump_table_index_by_basic_block) = {
1034 log::trace!("Parsing jump table...");
1035 let mut basic_block_by_jump_table_index = Vec::with_capacity(blob.jump_table_upper_bound() + 1);
1036 let mut jump_table_index_by_basic_block = vec![0; blob.basic_block_count() as usize];
1037
1038 basic_block_by_jump_table_index.push(u32::MAX);
1040
1041 let mut maximum_seen_jump_target = 0;
1042 for nth_basic_block in blob.jump_table() {
1043 let nth_basic_block = nth_basic_block.map_err(Error::from_display)?;
1044
1045 if let Some(slot) = jump_table_index_by_basic_block.get_mut(nth_basic_block as usize) {
1046 *slot = basic_block_by_jump_table_index.len() as u32;
1047 } else {
1048 bail_static!("program contains an invalid basic block count");
1049 }
1050
1051 maximum_seen_jump_target = core::cmp::max(maximum_seen_jump_target, nth_basic_block);
1052 basic_block_by_jump_table_index.push(nth_basic_block);
1053 }
1054
1055 basic_block_by_jump_table_index.shrink_to_fit();
1056
1057 (
1058 maximum_seen_jump_target,
1059 basic_block_by_jump_table_index,
1060 jump_table_index_by_basic_block,
1061 )
1062 };
1063
1064 let (maximum_export_jump_target, exports) = {
1065 log::trace!("Parsing exports...");
1066 let mut maximum_export_jump_target = 0;
1067 let mut exports = Vec::with_capacity(1);
1068 for export in blob.exports() {
1069 let export = export.map_err(Error::from_display)?;
1070 maximum_export_jump_target = core::cmp::max(maximum_export_jump_target, export.jump_target());
1071
1072 log::trace!(" Export at @{}: {}", export.jump_target(), export.symbol());
1073 exports.push(export);
1074 if exports.len() > VM_MAXIMUM_EXPORT_COUNT as usize {
1075 bail!(
1076 "too many exports; the program contains more than {} exports",
1077 VM_MAXIMUM_EXPORT_COUNT
1078 );
1079 }
1080 }
1081 (maximum_export_jump_target, exports)
1082 };
1083
1084 let init = GuestInit {
1085 page_size: config.page_size,
1086 ro_data: blob.ro_data(),
1087 rw_data: blob.rw_data(),
1088 ro_data_size: blob.ro_data_size(),
1089 rw_data_size: blob.rw_data_size(),
1090 stack_size: blob.stack_size(),
1091 };
1092
1093 macro_rules! new_common {
1094 () => {{
1095 let mut common = Common {
1096 code: blob.code(),
1097 config,
1098 imports: &imports,
1099 jump_table_index_by_basic_block: &jump_table_index_by_basic_block,
1100 instruction_by_basic_block: Vec::new(),
1101 gas_cost_for_basic_block: Vec::new(),
1102 maximum_seen_jump_target: initial_maximum_seen_jump_target,
1103 nth_instruction: 0,
1104 instruction_count: blob.instruction_count() as usize,
1105 basic_block_count: blob.basic_block_count() as usize,
1106 block_in_progress: false,
1107 current_instruction_offset: 0,
1108 };
1109
1110 common.instruction_by_basic_block.reserve(common.basic_block_count + 1);
1111 common.instruction_by_basic_block.push(0);
1112 if config.gas_metering.is_some() {
1113 common.gas_cost_for_basic_block.resize(common.basic_block_count, 0);
1114 }
1115
1116 common
1117 }};
1118 }
1119
1120 #[allow(unused_macros)]
1121 macro_rules! compile_module {
1122 ($sandbox_kind:ident, $module_kind:ident, $run:ident) => {{
1123 let (visitor, aux) = CompiledModule::<$sandbox_kind>::create_visitor(
1124 config,
1125 &exports,
1126 &basic_block_by_jump_table_index,
1127 &jump_table_index_by_basic_block,
1128 init,
1129 blob.instruction_count() as usize,
1130 blob.basic_block_count() as usize,
1131 engine.debug_trace_execution,
1132 )?;
1133
1134 let common = new_common!();
1135 let visitor = CommonVisitor(VisitorWrapper { common, visitor });
1136 let (visitor, result) = $run(blob, visitor);
1137 result?;
1138
1139 let (common, module) = CompiledModule::<$sandbox_kind>::finish_compilation(visitor.0, aux)?;
1140 Some((common, CompiledModuleKind::$module_kind(module)))
1141 }};
1142 }
1143
1144 let compiled: Option<(Common, CompiledModuleKind)> = if_compiler_is_supported! {
1145 {
1146 if engine.selected_backend == BackendKind::Compiler {
1147 if let Some(selected_sandbox) = engine.selected_sandbox {
1148 type VisitorTy<'a> = CommonVisitor<'a, crate::compiler::Compiler<'a>>;
1149 let run = polkavm_common::program::prepare_visitor!(COMPILER_VISITOR, VisitorTy<'a>);
1150
1151 match selected_sandbox {
1152 SandboxKind::Linux => {
1153 #[cfg(target_os = "linux")]
1154 {
1155 compile_module!(SandboxLinux, Linux, run)
1156 }
1157
1158 #[cfg(not(target_os = "linux"))]
1159 {
1160 log::debug!("Selected sandbox unavailable!");
1161 None
1162 }
1163 },
1164 SandboxKind::Generic => {
1165 compile_module!(SandboxGeneric, Generic, run)
1166 },
1167 }
1168 } else {
1169 None
1170 }
1171 } else {
1172 None
1173 }
1174 } else {{
1175 None
1176 }}
1177 };
1178
1179 let interpreted: Option<(Common, InterpretedModule)> = if engine.interpreter_enabled {
1180 let common = new_common!();
1181 type VisitorTy<'a> = CommonVisitor<'a, Vec<Instruction>>;
1182 let instructions = Vec::with_capacity(blob.instruction_count() as usize);
1183 let visitor: VisitorTy = CommonVisitor(VisitorWrapper {
1184 common,
1185 visitor: instructions,
1186 });
1187
1188 let run = polkavm_common::program::prepare_visitor!(INTERPRETER_VISITOR, VisitorTy<'a>);
1189 let (visitor, result) = run(blob, visitor);
1190 result?;
1191
1192 let CommonVisitor(VisitorWrapper {
1193 mut common,
1194 visitor: instructions,
1195 }) = visitor;
1196
1197 let module = InterpretedModule::new(init, core::mem::take(&mut common.gas_cost_for_basic_block), instructions)?;
1198 Some((common, module))
1199 } else {
1200 None
1201 };
1202
1203 let mut common = None;
1204 let compiled_module = if let Some((compiled_common, compiled_module)) = compiled {
1205 common = Some(compiled_common);
1206 compiled_module
1207 } else {
1208 CompiledModuleKind::Unavailable
1209 };
1210
1211 let interpreted_module = if let Some((interpreted_common, interpreted_module)) = interpreted {
1212 if common.is_none() {
1213 common = Some(interpreted_common);
1214 }
1215 Some(interpreted_module)
1216 } else {
1217 None
1218 };
1219
1220 let common = common.unwrap();
1221 if common.nth_instruction == 0 {
1222 bail!("the module contains no code");
1223 }
1224
1225 if common.block_in_progress {
1226 bail!("code doesn't end with a control flow instruction");
1227 }
1228
1229 if common.nth_instruction > VM_MAXIMUM_INSTRUCTION_COUNT as usize {
1230 bail!(
1231 "too many instructions; the program contains more than {} instructions",
1232 VM_MAXIMUM_INSTRUCTION_COUNT
1233 );
1234 }
1235
1236 if common.nth_instruction != common.instruction_count {
1237 bail!(
1238 "program contains an invalid instruction count (expected {}, found {})",
1239 common.instruction_count,
1240 common.nth_instruction
1241 );
1242 }
1243
1244 if common.instruction_by_basic_block.len() != common.basic_block_count {
1245 bail!(
1246 "program contains an invalid basic block count (expected {}, found {})",
1247 common.basic_block_count,
1248 common.instruction_by_basic_block.len()
1249 );
1250 }
1251
1252 debug_assert!(!common.instruction_by_basic_block.is_empty());
1253 let maximum_valid_jump_target = (common.instruction_by_basic_block.len() - 1) as u32;
1254 if common.maximum_seen_jump_target > maximum_valid_jump_target {
1255 bail!(
1256 "out of range jump found; found a jump to @{:x}, while the very last valid jump target is @{maximum_valid_jump_target:x}",
1257 common.maximum_seen_jump_target
1258 );
1259 }
1260
1261 if maximum_export_jump_target > maximum_valid_jump_target {
1262 let export = exports
1263 .iter()
1264 .find(|export| export.jump_target() == maximum_export_jump_target)
1265 .unwrap();
1266 bail!(
1267 "out of range export found; export {} points to @{:x}, while the very last valid jump target is @{maximum_valid_jump_target:x}",
1268 export.symbol(),
1269 export.jump_target(),
1270 );
1271 }
1272
1273 let instruction_by_basic_block = {
1274 let mut vec = common.instruction_by_basic_block;
1275 vec.shrink_to_fit();
1276 vec
1277 };
1278
1279 log::trace!("Processing finished!");
1280
1281 assert!(compiled_module.is_some() || interpreted_module.is_some());
1282 if compiled_module.is_some() {
1283 log::debug!("Backend used: 'compiled'");
1284 } else {
1285 log::debug!("Backend used: 'interpreted'");
1286 }
1287
1288 let export_index_by_symbol = exports
1289 .iter()
1290 .enumerate()
1291 .map(|(index, export)| (export.symbol().to_vec(), index))
1292 .collect();
1293
1294 let exports = exports.into_iter().map(|export| export.into_owned()).collect();
1295 let imports = imports.into_iter().map(|import| import.into_owned()).collect();
1296
1297 let memory_map = init.memory_map().map_err(Error::from_static_str)?;
1298 log::debug!(
1299 " Memory map: RO data: 0x{:08x}..0x{:08x} ({}/{} bytes)",
1300 memory_map.ro_data_range().start,
1301 memory_map.ro_data_range().end,
1302 blob.ro_data_size(),
1303 memory_map.ro_data_range().len(),
1304 );
1305 log::debug!(
1306 " Memory map: RW data: 0x{:08x}..0x{:08x} ({}/{} bytes)",
1307 memory_map.rw_data_range().start,
1308 memory_map.rw_data_range().end,
1309 blob.rw_data_size(),
1310 memory_map.rw_data_range().len(),
1311 );
1312 log::debug!(
1313 " Memory map: Stack: 0x{:08x}..0x{:08x} ({}/{} bytes)",
1314 memory_map.stack_range().start,
1315 memory_map.stack_range().end,
1316 blob.stack_size(),
1317 memory_map.stack_range().len(),
1318 );
1319
1320 Ok(Module(Arc::new(ModulePrivate {
1321 debug_trace_execution: engine.debug_trace_execution,
1322 exports,
1323 imports,
1324 export_index_by_symbol,
1325
1326 instruction_by_basic_block,
1327 jump_table_index_by_basic_block,
1328 basic_block_by_jump_table_index,
1329
1330 blob: blob.clone().into_owned(),
1332 compiled_module,
1333 interpreted_module,
1334 memory_map,
1335 gas_metering: config.gas_metering,
1336 })))
1337 }
1338
1339 pub fn memory_map(&self) -> &MemoryMap {
1341 &self.0.memory_map
1342 }
1343
1344 pub fn lookup_export(&self, symbol: impl AsRef<[u8]>) -> Option<ExportIndex> {
1346 let symbol = symbol.as_ref();
1347 let export_index = *self.0.export_index_by_symbol.get(symbol)?;
1348 Some(ExportIndex(export_index))
1349 }
1350
1351 pub fn machine_code(&self) -> Option<Cow<[u8]>> {
1356 if_compiler_is_supported! {
1357 {
1358 match self.0.compiled_module {
1359 #[cfg(target_os = "linux")]
1360 CompiledModuleKind::Linux(ref module) => Some(module.machine_code()),
1361 CompiledModuleKind::Generic(ref module) => Some(module.machine_code()),
1362 CompiledModuleKind::Unavailable => None,
1363 }
1364 } else {
1365 None
1366 }
1367 }
1368 }
1369
1370 pub fn machine_code_origin(&self) -> Option<u64> {
1375 if_compiler_is_supported! {
1376 {
1377 match self.0.compiled_module {
1378 #[cfg(target_os = "linux")]
1379 CompiledModuleKind::Linux(..) => Some(polkavm_common::zygote::VM_ADDR_NATIVE_CODE),
1380 CompiledModuleKind::Generic(..) => None,
1381 CompiledModuleKind::Unavailable => None,
1382 }
1383 } else {
1384 None
1385 }
1386 }
1387 }
1388
1389 pub fn nth_instruction_to_code_offset_map(&self) -> Option<&[u32]> {
1401 if_compiler_is_supported! {
1402 {
1403 match self.0.compiled_module {
1404 #[cfg(target_os = "linux")]
1405 CompiledModuleKind::Linux(ref module) => Some(module.nth_instruction_to_code_offset_map()),
1406 CompiledModuleKind::Generic(ref module) => Some(module.nth_instruction_to_code_offset_map()),
1407 CompiledModuleKind::Unavailable => None,
1408 }
1409 } else {
1410 None
1411 }
1412 }
1413 }
1414
1415 pub fn nth_basic_block_to_gas_cost_map(&self) -> Option<&[u32]> {
1420 self.0
1421 .interpreted_module
1422 .as_ref()
1423 .map(|module| module.gas_cost_for_basic_block.as_slice())
1424 }
1425
1426 pub(crate) fn debug_print_location(&self, log_level: log::Level, pc: u32) {
1427 log::log!(log_level, " At #{pc}:");
1428
1429 let blob = self.blob();
1430 let Ok(Some(mut line_program)) = blob.get_debug_line_program_at(pc) else {
1431 log::log!(log_level, " (no location available)");
1432 return;
1433 };
1434
1435 for _ in 0..128 {
1436 let Ok(Some(region_info)) = line_program.run() else { break };
1438
1439 if !region_info.instruction_range().contains(&pc) {
1440 continue;
1441 }
1442
1443 for frame in region_info.frames() {
1444 let kind = match frame.kind() {
1445 FrameKind::Enter => 'f',
1446 FrameKind::Call => 'c',
1447 FrameKind::Line => 'l',
1448 };
1449
1450 if let Ok(full_name) = frame.full_name() {
1451 if let Ok(Some(location)) = frame.location() {
1452 log::log!(log_level, " ({kind}) '{full_name}' [{location}]");
1453 } else {
1454 log::log!(log_level, " ({kind}) '{full_name}'");
1455 }
1456 }
1457 }
1458 }
1459 }
1460}
1461
1462trait CallFn<T>: Send + Sync {
1463 fn call(&self, user_data: &mut T, access: BackendAccess, raw: &mut CallerRaw) -> Result<(), Trap>;
1464}
1465
1466#[repr(transparent)]
1467pub struct CallFnArc<T>(Arc<dyn CallFn<T>>);
1468
1469impl<T> Clone for CallFnArc<T> {
1470 fn clone(&self) -> Self {
1471 Self(Arc::clone(&self.0))
1472 }
1473}
1474
1475pub trait IntoCallFn<T, Params, Result>: Send + Sync + 'static {
1476 #[doc(hidden)]
1477 const _REGS_REQUIRED: usize;
1478
1479 #[doc(hidden)]
1480 fn _into_extern_fn(self) -> CallFnArc<T>;
1481}
1482
1483pub trait AbiTy: Sized + Send + 'static {
1485 #[doc(hidden)]
1486 const _REGS_REQUIRED: usize;
1487
1488 #[doc(hidden)]
1489 fn _get(get_reg: impl FnMut() -> RegValue) -> Self;
1490
1491 #[doc(hidden)]
1492 fn _set(self, set_reg: impl FnMut(RegValue));
1493}
1494
1495impl AbiTy for u32 {
1496 const _REGS_REQUIRED: usize = 1;
1497
1498 fn _get(mut get_reg: impl FnMut() -> RegValue) -> Self {
1499 get_reg()
1500 }
1501
1502 fn _set(self, mut set_reg: impl FnMut(RegValue)) {
1503 set_reg(self)
1504 }
1505}
1506
1507impl AbiTy for i32 {
1508 const _REGS_REQUIRED: usize = <u32 as AbiTy>::_REGS_REQUIRED;
1509
1510 fn _get(get_reg: impl FnMut() -> RegValue) -> Self {
1511 <u32 as AbiTy>::_get(get_reg) as i32
1512 }
1513
1514 fn _set(self, set_reg: impl FnMut(RegValue)) {
1515 (self as u32)._set(set_reg)
1516 }
1517}
1518
1519impl AbiTy for u64 {
1520 const _REGS_REQUIRED: usize = 2;
1521
1522 fn _get(mut get_reg: impl FnMut() -> RegValue) -> Self {
1523 let value_lo = get_reg();
1524 let value_hi = get_reg();
1525 u64::from(value_lo) | (u64::from(value_hi) << 32)
1526 }
1527
1528 fn _set(self, mut set_reg: impl FnMut(RegValue)) {
1529 set_reg(self as u32);
1530 set_reg((self >> 32) as u32);
1531 }
1532}
1533
1534impl AbiTy for i64 {
1535 const _REGS_REQUIRED: usize = <u64 as AbiTy>::_REGS_REQUIRED;
1536
1537 fn _get(get_reg: impl FnMut() -> RegValue) -> Self {
1538 <u64 as AbiTy>::_get(get_reg) as i64
1539 }
1540
1541 fn _set(self, set_reg: impl FnMut(RegValue)) {
1542 (self as u64)._set(set_reg)
1543 }
1544}
1545
1546pub trait ReturnTy: Sized + Send + 'static {
1550 #[doc(hidden)]
1551 const _REGS_REQUIRED: usize;
1552
1553 #[doc(hidden)]
1554 fn _handle_return(self, set_reg: impl FnMut(RegValue)) -> Result<(), Trap>;
1555}
1556
1557impl<T> ReturnTy for T
1558where
1559 T: AbiTy,
1560{
1561 const _REGS_REQUIRED: usize = <T as AbiTy>::_REGS_REQUIRED;
1562
1563 fn _handle_return(self, set_reg: impl FnMut(RegValue)) -> Result<(), Trap> {
1564 self._set(set_reg);
1565 Ok(())
1566 }
1567}
1568
1569impl ReturnTy for () {
1570 const _REGS_REQUIRED: usize = 0;
1571
1572 fn _handle_return(self, _set_reg: impl FnMut(RegValue)) -> Result<(), Trap> {
1573 Ok(())
1574 }
1575}
1576
1577impl ReturnTy for Result<(), Trap> {
1578 const _REGS_REQUIRED: usize = 0;
1579
1580 fn _handle_return(self, _set_reg: impl FnMut(RegValue)) -> Result<(), Trap> {
1581 self
1582 }
1583}
1584
1585impl<T> ReturnTy for Result<T, Trap>
1586where
1587 T: AbiTy,
1588{
1589 const _REGS_REQUIRED: usize = <T as AbiTy>::_REGS_REQUIRED;
1590
1591 fn _handle_return(self, set_reg: impl FnMut(RegValue)) -> Result<(), Trap> {
1592 self?._set(set_reg);
1593 Ok(())
1594 }
1595}
1596
1597pub trait FuncArgs: Send {
1598 #[doc(hidden)]
1599 const _REGS_REQUIRED: usize;
1600
1601 #[doc(hidden)]
1602 fn _set(self, set_reg: impl FnMut(RegValue));
1603}
1604
1605pub trait FuncResult: Send {
1606 #[doc(hidden)]
1607 const _REGS_REQUIRED: usize;
1608
1609 #[doc(hidden)]
1610 fn _get(get_reg: impl FnMut() -> RegValue) -> Self;
1611}
1612
1613impl FuncResult for () {
1614 const _REGS_REQUIRED: usize = 0;
1615
1616 fn _get(_: impl FnMut() -> RegValue) -> Self {}
1617}
1618
1619impl<T> FuncResult for T
1620where
1621 T: AbiTy,
1622{
1623 const _REGS_REQUIRED: usize = 1;
1624
1625 fn _get(get_reg: impl FnMut() -> RegValue) -> Self {
1626 <T as AbiTy>::_get(get_reg)
1627 }
1628}
1629
1630macro_rules! impl_into_extern_fn {
1631 (@check_reg_count $regs_required:expr) => {
1632 if $regs_required > Reg::ARG_REGS.len() {
1633 log::error!("External call failed: too many registers required for arguments!");
1635 return Err(Trap::default());
1636 }
1637 };
1638
1639 (@call $caller:expr, $callback:expr, ) => {{
1640 catch_hostcall_panic(|| ($callback)($caller))
1641 }};
1642
1643 (@get_reg $caller:expr) => {{
1644 let mut reg_index = 0;
1645 let caller = &mut $caller;
1646 move || -> RegValue {
1647 let value = caller.get_reg(Reg::ARG_REGS[reg_index]);
1648 reg_index += 1;
1649 value
1650 }
1651 }};
1652
1653 (@call $caller:expr, $callback:expr, $a0:ident) => {{
1654 impl_into_extern_fn!(@check_reg_count $a0::_REGS_REQUIRED);
1655
1656 let cb = impl_into_extern_fn!(@get_reg $caller);
1657 let a0 = $a0::_get(cb);
1658 catch_hostcall_panic(|| ($callback)($caller, a0))
1659 }};
1660
1661 (@call $caller:expr, $callback:expr, $a0:ident, $a1:ident) => {{
1662 impl_into_extern_fn!(@check_reg_count $a0::_REGS_REQUIRED + $a1::_REGS_REQUIRED);
1663
1664 let mut cb = impl_into_extern_fn!(@get_reg $caller);
1665 let a0 = $a0::_get(&mut cb);
1666 let a1 = $a1::_get(cb);
1667 catch_hostcall_panic(|| ($callback)($caller, a0, a1))
1668 }};
1669
1670 (@call $caller:expr, $callback:expr, $a0:ident, $a1:ident, $a2:ident) => {{
1671 impl_into_extern_fn!(@check_reg_count $a0::_REGS_REQUIRED + $a1::_REGS_REQUIRED + $a2::_REGS_REQUIRED);
1672
1673 let mut cb = impl_into_extern_fn!(@get_reg $caller);
1674 let a0 = $a0::_get(&mut cb);
1675 let a1 = $a1::_get(&mut cb);
1676 let a2 = $a2::_get(cb);
1677 catch_hostcall_panic(|| ($callback)($caller, a0, a1, a2))
1678 }};
1679
1680 (@call $caller:expr, $callback:expr, $a0:ident, $a1:ident, $a2:ident, $a3:ident) => {{
1681 impl_into_extern_fn!(@check_reg_count $a0::_REGS_REQUIRED + $a1::_REGS_REQUIRED + $a2::_REGS_REQUIRED + $a3::_REGS_REQUIRED);
1682
1683 let mut cb = impl_into_extern_fn!(@get_reg $caller);
1684 let a0 = $a0::_get(&mut cb);
1685 let a1 = $a1::_get(&mut cb);
1686 let a2 = $a2::_get(&mut cb);
1687 let a3 = $a3::_get(cb);
1688 catch_hostcall_panic(|| ($callback)($caller, a0, a1, a2, a3))
1689 }};
1690
1691 (@call $caller:expr, $callback:expr, $a0:ident, $a1:ident, $a2:ident, $a3:ident, $a4:ident) => {{
1692 impl_into_extern_fn!(@check_reg_count $a0::_REGS_REQUIRED + $a1::_REGS_REQUIRED + $a2::_REGS_REQUIRED + $a3::_REGS_REQUIRED + $a4::_REGS_REQUIRED);
1693
1694 let mut cb = impl_into_extern_fn!(@get_reg $caller);
1695 let a0 = $a0::_get(&mut cb);
1696 let a1 = $a1::_get(&mut cb);
1697 let a2 = $a2::_get(&mut cb);
1698 let a3 = $a3::_get(&mut cb);
1699 let a4 = $a4::_get(cb);
1700 catch_hostcall_panic(|| ($callback)($caller, a0, a1, a2, a3, a4))
1701 }};
1702
1703 (@call $caller:expr, $callback:expr, $a0:ident, $a1:ident, $a2:ident, $a3:ident, $a4:ident, $a5:ident) => {{
1704 impl_into_extern_fn!(@check_reg_count $a0::_REGS_REQUIRED + $a1::_REGS_REQUIRED + $a2::_REGS_REQUIRED + $a3::_REGS_REQUIRED + $a4::_REGS_REQUIRED + $a5::_REGS_REQUIRED);
1705
1706 let mut cb = impl_into_extern_fn!(@get_reg $caller);
1707 let a0 = $a0::_get(&mut cb);
1708 let a1 = $a1::_get(&mut cb);
1709 let a2 = $a2::_get(&mut cb);
1710 let a3 = $a3::_get(&mut cb);
1711 let a4 = $a4::_get(&mut cb);
1712 let a5 = $a5::_get(cb);
1713 catch_hostcall_panic(|| ($callback)($caller, a0, a1, a2, a3, a4, a5))
1714 }};
1715
1716 ($arg_count:tt $($args:ident)*) => {
1717 impl<T, F, $($args,)* R> CallFn<T> for (F, UnsafePhantomData<(R, $($args),*)>)
1718 where
1719 F: Fn(Caller<'_, T>, $($args),*) -> R + Send + Sync + 'static,
1720 $($args: AbiTy,)*
1721 R: ReturnTy,
1722 {
1723 fn call(&self, user_data: &mut T, mut access: BackendAccess, raw: &mut CallerRaw) -> Result<(), Trap> {
1724 #[allow(unused_mut)]
1725 let result = Caller::wrap(user_data, &mut access, raw, move |mut caller| {
1726 impl_into_extern_fn!(@call caller, self.0, $($args),*)
1727 })?;
1728
1729 let set_reg = {
1730 let mut reg_index = 0;
1731 move |value: RegValue| {
1732 let reg = Reg::ARG_REGS[reg_index];
1733 access.set_reg(reg, value);
1734
1735 if let Some(ref mut tracer) = raw.tracer() {
1736 tracer.on_set_reg_in_hostcall(reg, value);
1737 }
1738
1739 reg_index += 1;
1740 }
1741 };
1742 result._handle_return(set_reg)
1743 }
1744 }
1745
1746 impl<T, F, $($args,)* R> IntoCallFn<T, ($($args,)*), R> for F
1747 where
1748 F: Fn($($args),*) -> R + Send + Sync + 'static,
1749 $($args: AbiTy,)*
1750 R: ReturnTy,
1751 {
1752 const _REGS_REQUIRED: usize = 0 $(+ $args::_REGS_REQUIRED)*;
1753
1754 fn _into_extern_fn(self) -> CallFnArc<T> {
1755 #[allow(non_snake_case)]
1756 let callback = move |_caller: Caller<T>, $($args: $args),*| -> R {
1757 self($($args),*)
1758 };
1759 CallFnArc(Arc::new((callback, UnsafePhantomData(PhantomData::<(R, $($args),*)>))))
1760 }
1761 }
1762
1763 impl<T, F, $($args,)* R> IntoCallFn<T, (Caller<'_, T>, $($args,)*), R> for F
1764 where
1765 F: Fn(Caller<'_, T>, $($args),*) -> R + Send + Sync + 'static,
1766 $($args: AbiTy,)*
1767 R: ReturnTy,
1768 {
1769 const _REGS_REQUIRED: usize = 0 $(+ $args::_REGS_REQUIRED)*;
1770
1771 fn _into_extern_fn(self) -> CallFnArc<T> {
1772 CallFnArc(Arc::new((self, UnsafePhantomData(PhantomData::<(R, $($args),*)>))))
1773 }
1774 }
1775
1776 impl<$($args: Send + AbiTy,)*> FuncArgs for ($($args,)*) {
1777 const _REGS_REQUIRED: usize = 0 $(+ $args::_REGS_REQUIRED)*;
1778
1779 #[allow(unused_mut)]
1780 #[allow(unused_variables)]
1781 #[allow(non_snake_case)]
1782 fn _set(self, mut set_reg: impl FnMut(RegValue)) {
1783 let ($($args,)*) = self;
1784 $($args._set(&mut set_reg);)*
1785 }
1786 }
1787 };
1788}
1789
1790impl_into_extern_fn!(0);
1791impl_into_extern_fn!(1 A0);
1792impl_into_extern_fn!(2 A0 A1);
1793impl_into_extern_fn!(3 A0 A1 A2);
1794impl_into_extern_fn!(4 A0 A1 A2 A3);
1795impl_into_extern_fn!(5 A0 A1 A2 A3 A4);
1796impl_into_extern_fn!(6 A0 A1 A2 A3 A4 A5);
1797
1798#[repr(transparent)]
1799struct UnsafePhantomData<T>(PhantomData<T>);
1800
1801unsafe impl<T> Send for UnsafePhantomData<T> {}
1803
1804unsafe impl<T> Sync for UnsafePhantomData<T> {}
1806
1807struct DynamicFn<T, F> {
1808 callback: F,
1809 _phantom: UnsafePhantomData<T>,
1810}
1811
1812fn catch_hostcall_panic<R>(callback: impl FnOnce() -> R) -> Result<R, Trap> {
1813 std::panic::catch_unwind(core::panic::AssertUnwindSafe(callback)).map_err(|panic| {
1814 if let Some(message) = panic.downcast_ref::<&str>() {
1815 log::error!("Hostcall panicked: {message}");
1816 } else if let Some(message) = panic.downcast_ref::<String>() {
1817 log::error!("Hostcall panicked: {message}");
1818 } else {
1819 log::error!("Hostcall panicked");
1820 }
1821
1822 Trap::default()
1823 })
1824}
1825
1826impl<T, F> CallFn<T> for DynamicFn<T, F>
1827where
1828 F: Fn(Caller<'_, T>) -> Result<(), Trap> + Send + Sync + 'static,
1829 T: 'static,
1830{
1831 fn call(&self, user_data: &mut T, mut access: BackendAccess, raw: &mut CallerRaw) -> Result<(), Trap> {
1832 Caller::wrap(user_data, &mut access, raw, move |caller| {
1833 catch_hostcall_panic(|| (self.callback)(caller))
1834 })??;
1835
1836 Ok(())
1837 }
1838}
1839
1840type FallbackHandlerArc<T> = Arc<dyn Fn(Caller<'_, T>, &[u8]) -> Result<(), Trap> + Send + Sync + 'static>;
1841
1842pub struct Linker<T> {
1843 engine_state: Arc<EngineState>,
1844 host_functions: HashMap<Vec<u8>, CallFnArc<T>>,
1845 #[allow(clippy::type_complexity)]
1846 fallback_handler: Option<FallbackHandlerArc<T>>,
1847 phantom: PhantomData<T>,
1848}
1849
1850impl<T> Linker<T> {
1851 pub fn new(engine: &Engine) -> Self {
1852 Self {
1853 engine_state: Arc::clone(&engine.state),
1854 host_functions: Default::default(),
1855 fallback_handler: None,
1856 phantom: PhantomData,
1857 }
1858 }
1859
1860 pub fn func_fallback(&mut self, func: impl Fn(Caller<'_, T>, &[u8]) -> Result<(), Trap> + Send + Sync + 'static) {
1862 self.fallback_handler = Some(Arc::new(func));
1863 }
1864
1865 pub fn func_new(
1867 &mut self,
1868 symbol: impl AsRef<[u8]>,
1869 func: impl Fn(Caller<'_, T>) -> Result<(), Trap> + Send + Sync + 'static,
1870 ) -> Result<&mut Self, Error>
1871 where
1872 T: 'static,
1873 {
1874 let symbol = symbol.as_ref();
1875 if self.host_functions.contains_key(symbol) {
1876 bail!(
1877 "cannot register host function: host function was already registered: {}",
1878 ProgramSymbol::from(symbol)
1879 );
1880 }
1881
1882 self.host_functions.insert(
1883 symbol.to_owned(),
1884 CallFnArc(Arc::new(DynamicFn {
1885 callback: func,
1886 _phantom: UnsafePhantomData(PhantomData),
1887 })),
1888 );
1889
1890 Ok(self)
1891 }
1892
1893 pub fn func_wrap<Params, Args>(
1895 &mut self,
1896 symbol: impl AsRef<[u8]>,
1897 func: impl IntoCallFn<T, Params, Args>,
1898 ) -> Result<&mut Self, Error> {
1899 let symbol = symbol.as_ref();
1900 if self.host_functions.contains_key(symbol) {
1901 bail!(
1902 "cannot register host function: host function was already registered: {}",
1903 ProgramSymbol::from(symbol)
1904 );
1905 }
1906
1907 self.host_functions.insert(symbol.to_owned(), func._into_extern_fn());
1908 Ok(self)
1909 }
1910
1911 pub fn instantiate_pre(&self, module: &Module) -> Result<InstancePre<T>, Error> {
1913 let mut host_functions: Vec<Option<CallFnArc<T>>> = Vec::with_capacity(module.0.imports.len());
1914 for import in &module.0.imports {
1915 let symbol_bytes: &[u8] = import.symbol();
1916 let Some(host_fn) = self.host_functions.get(symbol_bytes) else {
1917 if self.fallback_handler.is_some() {
1918 host_functions.push(None);
1919 continue;
1920 } else {
1921 bail!("failed to instantiate module: missing host function: {}", import.symbol());
1922 }
1923 };
1924
1925 host_functions.push(Some(host_fn.clone()));
1926 }
1927
1928 assert_eq!(host_functions.len(), module.0.imports.len());
1929 Ok(InstancePre(Arc::new(InstancePrePrivate {
1930 engine_state: Arc::clone(&self.engine_state),
1931 module: module.clone(),
1932 host_functions,
1933 fallback_handler: self.fallback_handler.clone(),
1934 _private: PhantomData,
1935 })))
1936 }
1937}
1938
1939struct InstancePrePrivate<T> {
1940 #[allow(dead_code)]
1941 engine_state: Arc<EngineState>,
1942 module: Module,
1943 host_functions: Vec<Option<CallFnArc<T>>>,
1944 fallback_handler: Option<FallbackHandlerArc<T>>,
1945 _private: PhantomData<T>,
1946}
1947
1948pub struct InstancePre<T>(Arc<InstancePrePrivate<T>>);
1949
1950impl<T> Clone for InstancePre<T> {
1951 fn clone(&self) -> Self {
1952 InstancePre(Arc::clone(&self.0))
1953 }
1954}
1955
1956impl<T> InstancePre<T> {
1957 pub fn instantiate(&self) -> Result<Instance<T>, Error> {
1959 let compiled_module = &self.0.module.0.compiled_module;
1960 let backend = if_compiler_is_supported! {
1961 {
1962 match compiled_module {
1963 #[cfg(target_os = "linux")]
1964 CompiledModuleKind::Linux(..) => {
1965 let compiled_instance = SandboxInstance::<SandboxLinux>::spawn_and_load_module(Arc::clone(&self.0.engine_state), &self.0.module)?;
1966 Some(InstanceBackend::CompiledLinux(compiled_instance))
1967 },
1968 CompiledModuleKind::Generic(..) => {
1969 let compiled_instance = SandboxInstance::<SandboxGeneric>::spawn_and_load_module(Arc::clone(&self.0.engine_state), &self.0.module)?;
1970 Some(InstanceBackend::CompiledGeneric(compiled_instance))
1971 },
1972 CompiledModuleKind::Unavailable => None
1973 }
1974 } else {
1975 match compiled_module {
1976 CompiledModuleKind::Unavailable => None
1977 }
1978 }
1979 };
1980
1981 let backend = match backend {
1982 Some(backend) => backend,
1983 None => InstanceBackend::Interpreted(InterpretedInstance::new_from_module(&self.0.module)?),
1984 };
1985
1986 let tracer = if self.0.module.0.debug_trace_execution {
1987 Some(Tracer::new(&self.0.module))
1988 } else {
1989 None
1990 };
1991
1992 Ok(Instance(Arc::new(InstancePrivate {
1993 instance_pre: self.clone(),
1994 mutable: Mutex::new(InstancePrivateMut {
1995 backend,
1996 raw: CallerRaw::new(tracer),
1997 }),
1998 })))
1999 }
2000}
2001
2002if_compiler_is_supported! {
2003 {
2004 enum InstanceBackend {
2005 #[cfg(target_os = "linux")]
2006 CompiledLinux(SandboxInstance<SandboxLinux>),
2007 CompiledGeneric(SandboxInstance<SandboxGeneric>),
2008 Interpreted(InterpretedInstance),
2009 }
2010 } else {
2011 enum InstanceBackend {
2012 Interpreted(InterpretedInstance),
2013 }
2014 }
2015}
2016
2017impl InstanceBackend {
2018 fn execute(&mut self, args: ExecuteArgs) -> Result<(), ExecutionError> {
2019 if_compiler_is_supported! {
2020 {
2021 match self {
2022 #[cfg(target_os = "linux")]
2023 InstanceBackend::CompiledLinux(ref mut backend) => backend.execute(args),
2024 InstanceBackend::CompiledGeneric(ref mut backend) => backend.execute(args),
2025 InstanceBackend::Interpreted(ref mut backend) => backend.execute(args),
2026 }
2027 } else {
2028 match self {
2029 InstanceBackend::Interpreted(ref mut backend) => backend.execute(args),
2030 }
2031 }
2032 }
2033 }
2034
2035 fn access(&mut self) -> BackendAccess {
2036 if_compiler_is_supported! {
2037 {
2038 match self {
2039 #[cfg(target_os = "linux")]
2040 InstanceBackend::CompiledLinux(ref mut backend) => BackendAccess::CompiledLinux(backend.access()),
2041 InstanceBackend::CompiledGeneric(ref mut backend) => BackendAccess::CompiledGeneric(backend.access()),
2042 InstanceBackend::Interpreted(ref mut backend) => BackendAccess::Interpreted(backend.access()),
2043 }
2044 } else {
2045 match self {
2046 InstanceBackend::Interpreted(ref mut backend) => BackendAccess::Interpreted(backend.access()),
2047 }
2048 }
2049 }
2050 }
2051
2052 fn pid(&self) -> Option<u32> {
2053 if_compiler_is_supported! {
2054 {
2055 match self {
2056 #[cfg(target_os = "linux")]
2057 InstanceBackend::CompiledLinux(ref backend) => backend.sandbox().pid(),
2058 InstanceBackend::CompiledGeneric(ref backend) => backend.sandbox().pid(),
2059 InstanceBackend::Interpreted(..) => None,
2060 }
2061 } else {
2062 match self {
2063 InstanceBackend::Interpreted(..) => None,
2064 }
2065 }
2066 }
2067 }
2068}
2069
2070#[derive(Debug)]
2071pub struct MemoryAccessError<T> {
2072 pub address: u32,
2073 pub length: u64,
2074 pub error: T,
2075}
2076
2077impl<T> core::fmt::Display for MemoryAccessError<T>
2078where
2079 T: core::fmt::Display,
2080{
2081 fn fmt(&self, fmt: &mut core::fmt::Formatter) -> core::fmt::Result {
2082 write!(
2083 fmt,
2084 "out of range memory access in 0x{:x}-0x{:x} ({} bytes): {}",
2085 self.address,
2086 u64::from(self.address) + self.length,
2087 self.length,
2088 self.error
2089 )
2090 }
2091}
2092
2093fn map_access_error<T>(error: MemoryAccessError<T>) -> Trap
2094where
2095 T: core::fmt::Display,
2096{
2097 log::warn!("{error}");
2098 Trap::default()
2099}
2100
2101if_compiler_is_supported! {
2102 {
2103 pub(crate) enum BackendAccess<'a> {
2104 #[cfg(target_os = "linux")]
2105 CompiledLinux(<SandboxLinux as Sandbox>::Access<'a>),
2106 CompiledGeneric(<SandboxGeneric as Sandbox>::Access<'a>),
2107 Interpreted(InterpretedAccess<'a>),
2108 }
2109 } else {
2110 pub(crate) enum BackendAccess<'a> {
2111 Interpreted(InterpretedAccess<'a>),
2112 }
2113 }
2114}
2115
2116if_compiler_is_supported! {
2117 {
2118 macro_rules! access_backend {
2119 ($itself:ident, |$access:ident| $e:expr) => {
2120 match $itself {
2121 #[cfg(target_os = "linux")]
2122 BackendAccess::CompiledLinux($access) => $e,
2123 BackendAccess::CompiledGeneric($access) => $e,
2124 BackendAccess::Interpreted($access) => $e,
2125 }
2126 }
2127 }
2128 } else {
2129 macro_rules! access_backend {
2130 ($itself:ident, |$access:ident| $e:expr) => {
2131 match $itself {
2132 BackendAccess::Interpreted($access) => $e,
2133 }
2134 }
2135 }
2136 }
2137}
2138
2139impl<'a> Access<'a> for BackendAccess<'a> {
2140 type Error = Trap;
2141
2142 fn get_reg(&self, reg: Reg) -> RegValue {
2143 access_backend!(self, |access| access.get_reg(reg))
2144 }
2145
2146 fn set_reg(&mut self, reg: Reg, value: RegValue) {
2147 access_backend!(self, |access| access.set_reg(reg, value))
2148 }
2149
2150 fn read_memory_into_slice<'slice, B>(&self, address: u32, buffer: &'slice mut B) -> Result<&'slice mut [u8], Self::Error>
2151 where
2152 B: ?Sized + AsUninitSliceMut,
2153 {
2154 access_backend!(self, |access| Ok(access
2155 .read_memory_into_slice(address, buffer)
2156 .map_err(map_access_error)?))
2157 }
2158
2159 fn write_memory(&mut self, address: u32, data: &[u8]) -> Result<(), Self::Error> {
2160 access_backend!(self, |access| Ok(access.write_memory(address, data).map_err(map_access_error)?))
2161 }
2162
2163 fn sbrk(&mut self, size: u32) -> Option<u32> {
2164 access_backend!(self, |access| access.sbrk(size))
2165 }
2166
2167 fn heap_size(&self) -> u32 {
2168 access_backend!(self, |access| access.heap_size())
2169 }
2170
2171 fn program_counter(&self) -> Option<u32> {
2172 access_backend!(self, |access| access.program_counter())
2173 }
2174
2175 fn native_program_counter(&self) -> Option<u64> {
2176 access_backend!(self, |access| access.native_program_counter())
2177 }
2178
2179 fn gas_remaining(&self) -> Option<Gas> {
2180 access_backend!(self, |access| access.gas_remaining())
2181 }
2182
2183 fn consume_gas(&mut self, gas: u64) {
2184 access_backend!(self, |access| access.consume_gas(gas))
2185 }
2186}
2187
2188struct InstancePrivateMut {
2189 backend: InstanceBackend,
2190 raw: CallerRaw,
2191}
2192
2193impl InstancePrivateMut {
2194 fn tracer(&mut self) -> Option<&mut Tracer> {
2195 self.raw.tracer()
2196 }
2197}
2198
2199struct InstancePrivate<T> {
2200 instance_pre: InstancePre<T>,
2201 mutable: Mutex<InstancePrivateMut>,
2202}
2203
2204pub struct Instance<T>(Arc<InstancePrivate<T>>);
2205
2206impl<T> Clone for Instance<T> {
2207 fn clone(&self) -> Self {
2208 Instance(Arc::clone(&self.0))
2209 }
2210}
2211
2212impl<T> Instance<T> {
2213 pub fn module(&self) -> &Module {
2215 &self.0.instance_pre.0.module
2216 }
2217
2218 pub fn call(&self, state_args: StateArgs, call_args: CallArgs<T>) -> Result<(), ExecutionError> {
2220 self.execute(state_args, Some(call_args))
2221 }
2222
2223 pub fn call_typed<FnArgs, FnResult>(
2227 &self,
2228 user_data: &mut T,
2229 symbol: impl AsRef<[u8]>,
2230 args: FnArgs,
2231 ) -> Result<FnResult, ExecutionError>
2232 where
2233 FnArgs: FuncArgs,
2234 FnResult: FuncResult,
2235 {
2236 let symbol = symbol.as_ref();
2237 let Some(export_index) = self.module().lookup_export(symbol) else {
2238 return Err(ExecutionError::Error(
2239 format!(
2240 "failed to call function {}: the module contains no such export",
2241 ProgramSymbol::new(symbol.into())
2242 )
2243 .into(),
2244 ));
2245 };
2246
2247 let mut call_args = CallArgs::new(user_data, export_index);
2248 call_args.args_typed::<FnArgs>(args);
2249
2250 self.call(Default::default(), call_args)?;
2251 Ok(self.get_result_typed::<FnResult>())
2252 }
2253
2254 pub fn update_state(&self, state_args: StateArgs) -> Result<(), ExecutionError> {
2256 self.execute(state_args, None)
2257 }
2258
2259 pub fn reset_memory(&self) -> Result<(), Error> {
2263 let mut args = StateArgs::new();
2264 args.reset_memory(true);
2265 self.update_state(args).map_err(Error::from_execution_error)
2266 }
2267
2268 pub fn sbrk(&self, size: u32) -> Result<Option<u32>, Error> {
2275 let mut mutable = match self.0.mutable.lock() {
2276 Ok(mutable) => mutable,
2277 Err(poison) => poison.into_inner(),
2278 };
2279
2280 let Some(new_size) = mutable.backend.access().heap_size().checked_add(size) else {
2281 return Ok(None);
2282 };
2283
2284 if new_size > self.module().memory_map().max_heap_size() {
2285 return Ok(None);
2286 };
2287
2288 let mut args = StateArgs::new();
2289 args.sbrk(size);
2290 self.execute_impl(&mut mutable, args, None).map_err(Error::from_execution_error)?;
2291
2292 debug_assert_eq!(mutable.backend.access().heap_size(), new_size);
2293 Ok(Some(self.module().memory_map().heap_base() + new_size))
2294 }
2295
2296 fn execute(&self, state_args: StateArgs, call_args: Option<CallArgs<T>>) -> Result<(), ExecutionError> {
2297 let mutable = &self.0.mutable;
2298 let mut mutable = match mutable.lock() {
2299 Ok(mutable) => mutable,
2300 Err(poison) => poison.into_inner(),
2301 };
2302
2303 self.execute_impl(&mut mutable, state_args, call_args)
2304 }
2305
2306 fn execute_impl(
2307 &self,
2308 mutable: &mut InstancePrivateMut,
2309 state_args: StateArgs,
2310 mut call_args: Option<CallArgs<T>>,
2311 ) -> Result<(), ExecutionError> {
2312 use polkavm_common::{VM_RPC_FLAG_RESET_MEMORY_AFTER_EXECUTION, VM_RPC_FLAG_RESET_MEMORY_BEFORE_EXECUTION};
2313
2314 let instance_pre = &self.0.instance_pre;
2315 let module = &instance_pre.0.module;
2316
2317 if state_args.sbrk > 0 {
2318 let current_size = if state_args.reset_memory {
2319 0
2320 } else {
2321 mutable.backend.access().heap_size()
2322 };
2323
2324 let new_size = current_size.checked_add(state_args.sbrk);
2325 if !new_size.map_or(false, |new_size| new_size <= module.memory_map().max_heap_size()) {
2326 return Err(ExecutionError::Error(Error::from_static_str(
2327 "execution failed: cannot grow the heap over the maximum",
2328 )));
2329 }
2330 }
2331
2332 let mut args = ExecuteArgs::new();
2333 if state_args.reset_memory {
2334 args.flags |= VM_RPC_FLAG_RESET_MEMORY_BEFORE_EXECUTION;
2335 }
2336
2337 args.gas = state_args.gas;
2338 args.sbrk = state_args.sbrk;
2339
2340 let (result, export) = if let Some(call_args) = call_args.as_mut() {
2341 let Some(export) = module.0.exports.get(call_args.export_index) else {
2342 return Err(ExecutionError::Error(
2343 format!(
2344 "failed to call export #{}: out of range index; the module doesn't contain this many exports",
2345 call_args.export_index
2346 )
2347 .into(),
2348 ));
2349 };
2350
2351 args.entry_point = Some(call_args.export_index);
2352 args.regs = Some(&call_args.initial_regs);
2353 if call_args.reset_memory_after_call {
2354 args.flags |= VM_RPC_FLAG_RESET_MEMORY_AFTER_EXECUTION;
2355 }
2356
2357 log::trace!(
2358 "Calling into {}... (gas limit = {:?})",
2359 export.symbol(),
2360 module.0.gas_metering.and(args.gas)
2361 );
2362
2363 if let Some(ref mut tracer) = mutable.tracer() {
2364 tracer.on_before_execute(&args);
2365 }
2366
2367 let result = {
2368 let mut on_hostcall = on_hostcall(
2369 call_args.user_data,
2370 &instance_pre.0.host_functions,
2371 &instance_pre.0.module.0.imports,
2372 instance_pre.0.fallback_handler.as_ref(),
2373 &mut mutable.raw,
2374 );
2375
2376 args.hostcall_handler = Some(&mut on_hostcall);
2377 mutable.backend.execute(args)
2378 };
2379
2380 (result, Some(export))
2381 } else {
2382 log::trace!("Updating state...");
2383
2384 if let Some(ref mut tracer) = mutable.tracer() {
2385 tracer.on_before_execute(&args);
2386 }
2387
2388 let result = mutable.backend.execute(args);
2389 (result, None)
2390 };
2391
2392 if let Some(ref mut tracer) = mutable.tracer() {
2393 tracer.on_after_execute();
2394 }
2395
2396 match result {
2397 Ok(()) => {
2398 log::trace!(
2399 "...execution finished: success, leftover gas = {:?}",
2400 mutable.backend.access().gas_remaining()
2401 );
2402 }
2403 Err(ExecutionError::Error(error)) => {
2404 log::trace!("...execution finished: error: {error}");
2405
2406 if let Some(export) = export {
2407 return Err(ExecutionError::Error(
2408 format!("failed to call function {}: {}", export.symbol(), error).into(),
2409 ));
2410 } else {
2411 return Err(ExecutionError::Error(format!("execution failed: {error}").into()));
2412 }
2413 }
2414 Err(ExecutionError::Trap(trap)) => {
2415 log::trace!("...execution finished: trapped");
2416 return Err(ExecutionError::Trap(trap));
2417 }
2418 Err(ExecutionError::OutOfGas) => {
2419 log::trace!("...execution finished: ran out of gas");
2420 return Err(ExecutionError::OutOfGas);
2421 }
2422 }
2423
2424 Ok(())
2425 }
2426
2427 pub fn read_memory_into_slice<'slice, B>(&self, address: u32, buffer: &'slice mut B) -> Result<&'slice mut [u8], Trap>
2428 where
2429 B: ?Sized + AsUninitSliceMut,
2430 {
2431 let mut mutable = match self.0.mutable.lock() {
2432 Ok(mutable) => mutable,
2433 Err(poison) => poison.into_inner(),
2434 };
2435
2436 mutable.backend.access().read_memory_into_slice(address, buffer)
2437 }
2438
2439 pub fn read_memory_into_vec(&self, address: u32, length: u32) -> Result<Vec<u8>, Trap> {
2440 let mut mutable = match self.0.mutable.lock() {
2441 Ok(mutable) => mutable,
2442 Err(poison) => poison.into_inner(),
2443 };
2444
2445 mutable.backend.access().read_memory_into_vec(address, length)
2446 }
2447
2448 pub fn write_memory(&self, address: u32, data: &[u8]) -> Result<(), Trap> {
2449 let mut mutable = match self.0.mutable.lock() {
2450 Ok(mutable) => mutable,
2451 Err(poison) => poison.into_inner(),
2452 };
2453
2454 let result = mutable.backend.access().write_memory(address, data);
2455 if let Some(ref mut tracer) = mutable.tracer() {
2456 tracer.on_memory_write_in_hostcall(address, data, result.is_ok())?;
2457 }
2458
2459 result
2460 }
2461
2462 pub fn heap_size(&self) -> u32 {
2464 let mut mutable = match self.0.mutable.lock() {
2465 Ok(mutable) => mutable,
2466 Err(poison) => poison.into_inner(),
2467 };
2468
2469 mutable.backend.access().heap_size()
2470 }
2471
2472 pub fn get_reg(&self, reg: Reg) -> RegValue {
2474 let mut mutable = match self.0.mutable.lock() {
2475 Ok(mutable) => mutable,
2476 Err(poison) => poison.into_inner(),
2477 };
2478
2479 mutable.backend.access().get_reg(reg)
2480 }
2481
2482 pub fn get_result_typed<FnResult>(&self) -> FnResult
2486 where
2487 FnResult: FuncResult,
2488 {
2489 let mut mutable = match self.0.mutable.lock() {
2490 Ok(mutable) => mutable,
2491 Err(poison) => poison.into_inner(),
2492 };
2493
2494 let mut output_count = 0;
2495 FnResult::_get(|| {
2496 let access = mutable.backend.access();
2497 let value = access.get_reg(Reg::ARG_REGS[output_count]);
2498 output_count += 1;
2499 value
2500 })
2501 }
2502
2503 pub fn gas_remaining(&self) -> Option<Gas> {
2508 let mut mutable = match self.0.mutable.lock() {
2509 Ok(mutable) => mutable,
2510 Err(poison) => poison.into_inner(),
2511 };
2512
2513 mutable.backend.access().gas_remaining()
2514 }
2515
2516 pub fn pid(&self) -> Option<u32> {
2521 let mutable = match self.0.mutable.lock() {
2522 Ok(mutable) => mutable,
2523 Err(poison) => poison.into_inner(),
2524 };
2525
2526 mutable.backend.pid()
2527 }
2528}
2529
2530#[derive(Copy, Clone, Debug)]
2532pub struct ExportIndex(usize);
2533
2534pub struct CallArgs<'a, T> {
2536 pub(crate) initial_regs: [RegValue; Reg::ALL.len()],
2537 pub(crate) user_data: &'a mut T,
2538 pub(crate) export_index: usize,
2539 pub(crate) reset_memory_after_call: bool,
2540}
2541
2542impl<'a, T> CallArgs<'a, T> {
2543 pub fn new(user_data: &'a mut T, export_index: ExportIndex) -> Self {
2545 let mut initial_regs = [0; Reg::ALL.len()];
2546 initial_regs[Reg::SP as usize] = VM_ADDR_USER_STACK_HIGH;
2547 initial_regs[Reg::RA as usize] = VM_ADDR_RETURN_TO_HOST;
2548
2549 Self {
2550 initial_regs,
2551 user_data,
2552 export_index: export_index.0,
2553 reset_memory_after_call: false,
2554 }
2555 }
2556
2557 pub fn reset_memory_after_call(&mut self, value: bool) -> &mut Self {
2561 self.reset_memory_after_call = value;
2562 self
2563 }
2564
2565 pub fn reg(&mut self, reg: Reg, value: RegValue) -> &mut Self {
2569 self.initial_regs[reg as usize] = value;
2570 self
2571 }
2572
2573 pub fn args_untyped(&mut self, args: &[RegValue]) -> &mut Self {
2579 self.initial_regs[Reg::A0 as usize..Reg::A0 as usize + args.len()].copy_from_slice(args);
2580 self
2581 }
2582
2583 pub fn args_typed<FnArgs>(&mut self, args: FnArgs) -> &mut Self
2585 where
2586 FnArgs: FuncArgs,
2587 {
2588 let mut input_count = 0;
2589 args._set(|value| {
2590 assert!(input_count <= Reg::MAXIMUM_INPUT_REGS);
2591 self.initial_regs[Reg::A0 as usize + input_count] = value;
2592 input_count += 1;
2593 });
2594
2595 self
2596 }
2597}
2598
2599pub struct StateArgs {
2600 pub(crate) reset_memory: bool,
2601 pub(crate) gas: Option<Gas>,
2602 pub(crate) sbrk: u32,
2603}
2604
2605impl Default for StateArgs {
2606 fn default() -> Self {
2607 Self::new()
2608 }
2609}
2610
2611impl StateArgs {
2612 pub fn new() -> Self {
2614 Self {
2615 reset_memory: false,
2616 gas: None,
2617 sbrk: 0,
2618 }
2619 }
2620
2621 pub fn reset_memory(&mut self, value: bool) -> &mut Self {
2627 self.reset_memory = value;
2628 self
2629 }
2630
2631 pub fn set_gas(&mut self, gas: Gas) -> &mut Self {
2635 self.gas = Some(gas);
2636 self
2637 }
2638
2639 pub fn sbrk(&mut self, bytes: u32) -> &mut Self {
2645 self.sbrk = bytes;
2646 self
2647 }
2648}
2649
2650pub(crate) type HostcallHandler<'a> = &'a mut dyn for<'r> FnMut(u32, BackendAccess<'r>) -> Result<(), Trap>;
2651
2652pub(crate) struct ExecuteArgs<'a> {
2653 pub(crate) entry_point: Option<usize>,
2654 pub(crate) regs: Option<&'a [RegValue; Reg::ALL.len()]>,
2655 pub(crate) gas: Option<Gas>,
2656 pub(crate) sbrk: u32,
2657 pub(crate) flags: u32,
2658 pub(crate) hostcall_handler: Option<HostcallHandler<'a>>,
2659 pub(crate) module: Option<&'a Module>,
2660 pub(crate) is_async: bool,
2661}
2662
2663impl<'a> ExecuteArgs<'a> {
2664 pub(crate) fn new() -> Self {
2665 ExecuteArgs {
2666 entry_point: None,
2667 regs: None,
2668 gas: None,
2669 sbrk: 0,
2670 flags: 0,
2671 hostcall_handler: None,
2672 module: None,
2673 is_async: false,
2674 }
2675 }
2676}
2677
2678fn on_hostcall<'a, T>(
2679 user_data: &'a mut T,
2680 host_functions: &'a [Option<CallFnArc<T>>],
2681 imports: &'a [ProgramImport<'a>],
2682 fallback_handler: Option<&'a FallbackHandlerArc<T>>,
2683 raw: &'a mut CallerRaw,
2684) -> impl for<'r> FnMut(u32, BackendAccess<'r>) -> Result<(), Trap> + 'a {
2685 move |hostcall: u32, mut access: BackendAccess| -> Result<(), Trap> {
2686 if hostcall & (1 << 31) != 0 {
2687 if hostcall == polkavm_common::HOSTCALL_TRACE {
2688 if let Some(tracer) = raw.tracer() {
2689 return tracer.on_trace(&mut access);
2690 }
2691
2692 log::error!("trace hostcall called but no tracer is set");
2693 return Err(Trap::default());
2694 }
2695
2696 log::error!("unknown special hostcall triggered: {}", hostcall);
2697 return Err(Trap::default());
2698 }
2699
2700 let Some(host_fn) = host_functions.get(hostcall as usize).and_then(|func| func.as_ref()) else {
2701 if let Some(fallback_handler) = fallback_handler {
2702 let import = &imports[hostcall as usize];
2703 return Caller::wrap(user_data, &mut access, raw, move |caller| fallback_handler(caller, import.symbol()));
2704 }
2705
2706 log::error!("hostcall to a function which doesn't exist: {}", hostcall);
2708 return Err(Trap::default());
2709 };
2710
2711 if let Err(trap) = host_fn.0.call(user_data, access, raw) {
2712 log::debug!("hostcall failed: {}", trap);
2713 return Err(trap);
2714 }
2715
2716 Ok(())
2717 }
2718}