1use alloc::boxed::Box;
2use alloc::format;
3use alloc::sync::Arc;
4use alloc::vec::Vec;
5
6use polkavm_common::abi::{MemoryMap, MemoryMapBuilder, VM_ADDR_RETURN_TO_HOST};
7use polkavm_common::cast::cast;
8use polkavm_common::program::{FrameKind, Imports, InstructionSetKind, Instructions, JumpTable, ProgramBlob, Reg};
9use polkavm_common::utils::{ArcBytes, AsUninitSliceMut, B32, B64};
10
11use crate::config::{BackendKind, Config, GasMeteringKind, ModuleConfig, SandboxKind};
12use crate::error::{bail, bail_static, Error};
13use crate::gas::{CostModel, CostModelKind, GasVisitor};
14use crate::interpreter::InterpretedInstance;
15use crate::utils::{GuestInit, InterruptKind};
16use crate::{Gas, ProgramCounter};
17
18#[cfg(feature = "module-cache")]
19use crate::module_cache::{ModuleCache, ModuleKey};
20
21#[derive(Copy, Clone, PartialEq, Eq, Debug)]
22pub enum MemoryProtection {
23 Read,
24 ReadWrite,
25}
26
27if_compiler_is_supported! {
28 {
29 use crate::sandbox::{Sandbox, SandboxInstance};
30 use crate::compiler::{CompiledModule, CompilerCache};
31
32 #[cfg(target_os = "linux")]
33 use crate::sandbox::linux::Sandbox as SandboxLinux;
34 #[cfg(feature = "generic-sandbox")]
35 use crate::sandbox::generic::Sandbox as SandboxGeneric;
36
37 pub(crate) struct EngineState {
38 pub(crate) sandboxing_enabled: bool,
39 pub(crate) sandbox_global: Option<crate::sandbox::GlobalStateKind>,
40 pub(crate) sandbox_cache: Option<crate::sandbox::WorkerCacheKind>,
41 compiler_cache: CompilerCache,
42 imperfect_logger_filtering_workaround: bool,
43 #[cfg(feature = "module-cache")]
44 module_cache: ModuleCache,
45 }
46 } else {
47 pub(crate) struct EngineState {
48 imperfect_logger_filtering_workaround: bool,
49 #[cfg(feature = "module-cache")]
50 module_cache: ModuleCache,
51 }
52 }
53}
54
55trait IntoResult<T> {
56 fn into_result(self, message: &str) -> Result<T, Error>;
57}
58
59if_compiler_is_supported! {
60 #[cfg(target_os = "linux")]
61 impl<T> IntoResult<T> for Result<T, polkavm_linux_raw::Error> {
62 fn into_result(self, message: &str) -> Result<T, Error> {
63 self.map_err(|error| Error::from(error).context(message))
64 }
65 }
66
67 #[cfg(feature = "generic-sandbox")]
68 use crate::sandbox::generic;
69
70 #[cfg(feature = "generic-sandbox")]
71 impl<T> IntoResult<T> for Result<T, generic::Error> {
72 fn into_result(self, message: &str) -> Result<T, Error> {
73 self.map_err(|error| Error::from(error).context(message))
74 }
75 }
76}
77
78impl<T> IntoResult<T> for T {
79 fn into_result(self, _message: &str) -> Result<T, Error> {
80 Ok(self)
81 }
82}
83
84pub type RegValue = u64;
85
86#[allow(clippy::exhaustive_structs)]
87#[derive(Copy, Clone, PartialEq, Eq, Debug)]
88pub struct SetCacheSizeLimitArgs {
89 pub max_block_size: u32,
90 pub max_cache_size_bytes: usize,
91}
92
93pub struct Engine {
94 selected_backend: BackendKind,
95 #[allow(dead_code)]
96 selected_sandbox: Option<SandboxKind>,
97 interpreter_enabled: bool,
98 crosscheck: bool,
99 state: Arc<EngineState>,
100 allow_dynamic_paging: bool,
101 allow_experimental: bool,
102 default_cost_model: CostModelKind,
103}
104
105impl Engine {
106 pub fn new(config: &Config) -> Result<Self, Error> {
107 if_compiler_is_supported! {
108 crate::sandbox::init_native_page_size();
109 }
110
111 if let Some(backend) = config.backend {
112 if !backend.is_supported() {
113 bail!("the '{backend}' backend is not supported on this platform")
114 }
115 }
116
117 if !config.allow_experimental && config.crosscheck {
118 bail!("cannot enable execution cross-checking: `set_allow_experimental`/`POLKAVM_ALLOW_EXPERIMENTAL` is not enabled");
119 }
120
121 if !config.sandboxing_enabled {
122 if !config.allow_experimental {
123 bail!("cannot disable security sandboxing: `set_allow_experimental`/`POLKAVM_ALLOW_EXPERIMENTAL` is not enabled");
124 } else {
125 log::warn!("SECURITY SANDBOXING IS DISABLED; THIS IS UNSUPPORTED; YOU HAVE BEEN WARNED");
126 }
127 }
128
129 if config.default_cost_model.is_some() && !config.allow_experimental {
130 bail!("cannot override the default gas cost model: `set_allow_experimental`/`POLKAVM_ALLOW_EXPERIMENTAL` is not enabled");
131 }
132
133 let crosscheck = config.crosscheck;
134 let default_backend = if BackendKind::Compiler.is_supported() && SandboxKind::Linux.is_supported() {
135 BackendKind::Compiler
136 } else {
137 BackendKind::Interpreter
138 };
139
140 let selected_backend = config.backend.unwrap_or(default_backend);
141 log::debug!("Selected backend: '{selected_backend}'");
142
143 #[cfg(feature = "module-cache")]
144 let module_cache = {
145 log::debug!("Enabling module cache... (LRU cache size = {})", config.lru_cache_size);
146 ModuleCache::new(config.cache_enabled, config.lru_cache_size)
147 };
148
149 #[cfg(not(feature = "module-cache"))]
150 if config.cache_enabled {
151 log::warn!("`cache_enabled` is true, but we were not compiled with the `module-cache` feature; caching will be disabled!");
152 }
153
154 let (selected_sandbox, state) = if_compiler_is_supported! {
155 {
156 if selected_backend == BackendKind::Compiler {
157 let default_sandbox = if SandboxKind::Linux.is_supported() {
158 SandboxKind::Linux
159 } else {
160 SandboxKind::Generic
161 };
162
163 let selected_sandbox = config.sandbox.unwrap_or(default_sandbox);
164 log::debug!("Selected sandbox: '{selected_sandbox}'");
165
166 if !selected_sandbox.is_supported() {
167 bail!("the '{selected_sandbox}' backend is not supported on this platform")
168 }
169
170 if selected_sandbox == SandboxKind::Generic && !config.allow_experimental {
171 bail!("cannot use the '{selected_sandbox}' sandbox: this sandbox is not production ready and may be insecure; you can enabled `set_allow_experimental`/`POLKAVM_ALLOW_EXPERIMENTAL` to be able to use it anyway");
172 }
173
174 let sandbox_global = crate::sandbox::GlobalStateKind::new(selected_sandbox, config)?;
175 let sandbox_cache = crate::sandbox::WorkerCacheKind::new(selected_sandbox, config);
176 for _ in 0..config.worker_count {
177 sandbox_cache.spawn(&sandbox_global)?;
178 }
179
180 let state = Arc::new(EngineState {
181 sandboxing_enabled: config.sandboxing_enabled,
182 sandbox_global: Some(sandbox_global),
183 sandbox_cache: Some(sandbox_cache),
184 compiler_cache: Default::default(),
185
186 imperfect_logger_filtering_workaround: config.imperfect_logger_filtering_workaround,
187 #[cfg(feature = "module-cache")]
188 module_cache,
189 });
190
191 (Some(selected_sandbox), state)
192 } else {
193 (None, Arc::new(EngineState {
194 sandboxing_enabled: config.sandboxing_enabled,
195 sandbox_global: None,
196 sandbox_cache: None,
197 compiler_cache: Default::default(),
198
199 imperfect_logger_filtering_workaround: config.imperfect_logger_filtering_workaround,
200 #[cfg(feature = "module-cache")]
201 module_cache
202 }))
203 }
204 } else {
205 (None, Arc::new(EngineState {
206 imperfect_logger_filtering_workaround: config.imperfect_logger_filtering_workaround,
207 #[cfg(feature = "module-cache")]
208 module_cache
209 }))
210 }
211 };
212
213 Ok(Engine {
214 selected_backend,
215 selected_sandbox,
216 interpreter_enabled: crosscheck || selected_backend == BackendKind::Interpreter,
217 crosscheck,
218 state,
219 allow_dynamic_paging: config.allow_dynamic_paging(),
220 allow_experimental: config.allow_experimental,
221 default_cost_model: config
222 .default_cost_model
223 .clone()
224 .unwrap_or(CostModelKind::Simple(CostModel::naive_ref())),
225 })
226 }
227
228 pub fn backend(&self) -> BackendKind {
230 self.selected_backend
231 }
232
233 pub fn idle_worker_pids(&self) -> Vec<u32> {
235 if_compiler_is_supported! {
236 {
237 self.state.sandbox_cache.as_ref().map(|cache| cache.idle_worker_pids()).unwrap_or_default()
238 } else {
239 Vec::new()
240 }
241 }
242 }
243}
244
245if_compiler_is_supported! {
246 {
247 pub(crate) enum CompiledModuleKind {
248 #[cfg(target_os = "linux")]
249 Linux(CompiledModule<SandboxLinux>),
250 #[cfg(feature = "generic-sandbox")]
251 Generic(CompiledModule<SandboxGeneric>),
252 Unavailable,
253 }
254 } else {
255 pub(crate) enum CompiledModuleKind {
256 Unavailable,
257 }
258 }
259}
260
261impl CompiledModuleKind {
262 pub fn is_some(&self) -> bool {
263 !matches!(self, CompiledModuleKind::Unavailable)
264 }
265}
266
267pub(crate) struct ModulePrivate {
268 #[allow(dead_code)]
269 engine_state: Option<Arc<EngineState>>,
270 crosscheck: bool,
271
272 blob: ProgramBlob,
273 compiled_module: CompiledModuleKind,
274 memory_map: MemoryMap,
275 gas_metering: Option<GasMeteringKind>,
276 is_strict: bool,
277 step_tracing: bool,
278 dynamic_paging: bool,
279 page_size_mask: u32,
280 page_shift: u32,
281 cost_model: CostModelKind,
282 #[cfg(feature = "module-cache")]
283 pub(crate) module_key: Option<ModuleKey>,
284
285 is_per_instruction_metering: bool,
286}
287
288#[derive(Clone)]
290pub struct Module(pub(crate) Option<Arc<ModulePrivate>>);
291
292impl Drop for Module {
293 fn drop(&mut self) {
294 #[cfg(feature = "module-cache")]
295 if let Some(state) = self.0.take() {
296 if let Some(ref engine_state) = state.engine_state {
297 let engine_state = Arc::clone(engine_state);
298 engine_state.module_cache.on_drop(state);
299 }
300 }
301 }
302}
303
304impl Module {
305 fn state(&self) -> &ModulePrivate {
306 if let Some(ref private) = self.0 {
307 private
308 } else {
309 unsafe { core::hint::unreachable_unchecked() }
311 }
312 }
313
314 pub(crate) fn is_per_instruction_metering(&self) -> bool {
315 self.state().is_per_instruction_metering
316 }
317
318 pub(crate) fn is_strict(&self) -> bool {
319 self.state().is_strict
320 }
321
322 pub(crate) fn is_step_tracing(&self) -> bool {
323 self.state().step_tracing
324 }
325
326 pub(crate) fn is_dynamic_paging(&self) -> bool {
327 self.state().dynamic_paging
328 }
329
330 if_compiler_is_supported! {
331 pub(crate) fn compiled_module(&self) -> &CompiledModuleKind {
332 &self.state().compiled_module
333 }
334 }
335
336 pub(crate) fn blob(&self) -> &ProgramBlob {
337 &self.state().blob
338 }
339
340 pub(crate) fn code_len(&self) -> u32 {
341 cast(self.state().blob.code().len()).assert_always_fits_in_u32()
342 }
343
344 pub(crate) fn instructions_bounded_at(&self, offset: ProgramCounter) -> Instructions<InstructionSetKind> {
345 self.state().blob.instructions_bounded_at(offset)
346 }
347
348 pub(crate) fn is_jump_target_valid(&self, offset: ProgramCounter) -> bool {
349 self.state().blob.is_jump_target_valid(self.state().blob.isa(), offset)
350 }
351
352 pub(crate) fn find_start_of_basic_block(&self, offset: ProgramCounter) -> Option<ProgramCounter> {
353 polkavm_common::program::find_start_of_basic_block(
354 self.state().blob.isa(),
355 self.state().blob.code(),
356 self.state().blob.bitmask(),
357 offset.0,
358 )
359 .map(ProgramCounter)
360 }
361
362 pub(crate) fn jump_table(&self) -> JumpTable {
363 self.state().blob.jump_table()
364 }
365
366 pub fn get_debug_string(&self, offset: u32) -> Result<&str, polkavm_common::program::ProgramParseError> {
367 self.state().blob.get_debug_string(offset)
368 }
369
370 pub(crate) fn gas_metering(&self) -> Option<GasMeteringKind> {
371 self.state().gas_metering
372 }
373
374 pub(crate) fn is_multiple_of_page_size(&self, value: u32) -> bool {
375 (value & self.state().page_size_mask) == 0
376 }
377
378 pub(crate) fn round_to_page_size_down(&self, value: u32) -> u32 {
379 value & !self.state().page_size_mask
380 }
381
382 pub(crate) fn round_to_page_size_up(&self, value: u32) -> u32 {
383 self.round_to_page_size_down(value) + (u32::from((value & self.state().page_size_mask) != 0) << self.state().page_shift)
384 }
385
386 pub(crate) fn get_trap_gas_cost(&self) -> u32 {
387 if self.gas_metering().is_some() {
388 match self.cost_model() {
389 CostModelKind::Simple(cost_model) => crate::gas::trap_cost(GasVisitor::new(cost_model.clone())),
390 CostModelKind::Full(cost_model) => polkavm_common::simulator::trap_cost(self.blob().isa(), *cost_model),
391 }
392 } else {
393 0
394 }
395 }
396
397 pub fn cost_model(&self) -> &CostModelKind {
399 &self.state().cost_model
400 }
401
402 if_compiler_is_supported! {
403 pub(crate) fn address_to_page(&self, address: u32) -> u32 {
404 address >> self.state().page_shift
405 }
406 }
407
408 pub fn new(engine: &Engine, config: &ModuleConfig, bytes: ArcBytes) -> Result<Self, Error> {
410 let blob = match ProgramBlob::parse(bytes) {
411 Ok(blob) => blob,
412 Err(error) => {
413 bail!("failed to parse blob: {}", error);
414 }
415 };
416
417 Self::from_blob(engine, config, blob)
418 }
419
420 pub fn from_blob(engine: &Engine, config: &ModuleConfig, blob: ProgramBlob) -> Result<Self, Error> {
422 if config.dynamic_paging() && !engine.allow_dynamic_paging {
423 bail!("dynamic paging was not enabled; use `Config::set_allow_dynamic_paging` to enable it");
424 }
425
426 if config.custom_codegen.is_some() && !engine.allow_experimental {
427 bail!("cannot use custom codegen: `set_allow_experimental`/`POLKAVM_ALLOW_EXPERIMENTAL` is not enabled");
428 }
429
430 if config.is_per_instruction_metering && engine.selected_backend == BackendKind::Compiler {
431 bail!("per instruction metering is not supported with the recompiler");
432 }
433
434 log::trace!(
435 "Creating new module from a {}-bit program blob",
436 if blob.is_64_bit() { 64 } else { 32 }
437 );
438
439 let cost_model = config.cost_model.clone().unwrap_or_else(|| engine.default_cost_model.clone());
440 if config.is_per_instruction_metering && !cost_model.is_naive() {
441 bail!("per instruction metering is not supported with a non-naive gas cost model");
442 }
443
444 #[cfg(all(target_arch = "x86_64", feature = "std"))]
446 if matches!(cost_model, CostModelKind::Full(..)) && !std::is_x86_feature_detected!("avx2") {
447 bail!("on AMD64 the full gas cost model is only supported on CPUs with AVX2 support");
448 }
449
450 #[cfg(feature = "module-cache")]
451 let module_key = {
452 let (module_key, module) = engine.state.module_cache.get(config, &blob, &cost_model);
453 if let Some(module) = module {
454 return Ok(module);
455 }
456 module_key
457 };
458
459 MemoryMapBuilder::new(config.page_size)
461 .ro_data_size(blob.ro_data_size())
462 .rw_data_size(blob.rw_data_size())
463 .stack_size(blob.stack_size())
464 .aux_data_size(config.aux_data_size())
465 .build()
466 .map_err(Error::from_static_str)?;
467
468 if config.is_strict || cfg!(debug_assertions) {
469 log::trace!("Checking imports...");
470 for (nth_import, import) in blob.imports().into_iter().enumerate() {
471 if let Some(ref import) = import {
472 log::trace!(" Import #{}: {}", nth_import, import);
473 } else {
474 log::trace!(" Import #{}: INVALID", nth_import);
475 if config.is_strict {
476 bail_static!("found an invalid import");
477 }
478 }
479 }
480
481 log::trace!("Checking jump table...");
482 for (nth_entry, code_offset) in blob.jump_table().iter().enumerate() {
483 if cast(code_offset.0).to_usize() >= blob.code().len() {
484 log::trace!(
485 " Invalid jump table entry #{nth_entry}: {code_offset} (should be less than {})",
486 blob.code().len()
487 );
488 if config.is_strict {
489 bail_static!("out of range jump table entry found");
490 }
491 }
492 }
493 };
494
495 if_compiler_is_supported! {
496 let exports = {
497 log::trace!("Parsing exports...");
498 let mut exports = Vec::with_capacity(1);
499 for export in blob.exports() {
500 log::trace!(" Export at {}: {}", export.program_counter(), export.symbol());
501 if config.is_strict && cast(export.program_counter().0).to_usize() >= blob.code().len() {
502 bail!(
503 "out of range export found; export {} points to code offset {}, while the code blob is only {} bytes",
504 export.symbol(),
505 export.program_counter(),
506 blob.code().len(),
507 );
508 }
509
510 exports.push(export);
511 }
512 exports
513 };
514 }
515
516 let init = GuestInit {
517 page_size: config.page_size,
518 ro_data: blob.ro_data(),
519 rw_data: blob.rw_data(),
520 ro_data_size: blob.ro_data_size(),
521 rw_data_size: blob.rw_data_size(),
522 stack_size: blob.stack_size(),
523 aux_data_size: config.aux_data_size(),
524 };
525
526 #[allow(unused_macros)]
527 macro_rules! compile_module {
528 ($sandbox_kind:ident, $bitness_kind:ident, $build_static_dispatch_table:ident, $visitor_name:ident, $module_kind:ident) => {
529 match cost_model {
530 CostModelKind::Simple(ref cost_model) => {
531 compile_module!(
532 $sandbox_kind,
533 $bitness_kind,
534 $build_static_dispatch_table,
535 $visitor_name,
536 $module_kind,
537 GasVisitor,
538 GasVisitor,
539 GasVisitor::new(cost_model.clone())
540 )
541 }
542 CostModelKind::Full(cost_model) => {
543 use polkavm_common::simulator::Simulator;
544 let gas_visitor = Simulator::<$bitness_kind, ()>::new(blob.code(), blob.isa(), cost_model, ());
545 compile_module!(
546 $sandbox_kind,
547 $bitness_kind,
548 $build_static_dispatch_table,
549 $visitor_name,
550 $module_kind,
551 Simulator::<'a, $bitness_kind, ()>,
552 Simulator::<$bitness_kind, ()>,
553 gas_visitor
554 )
555 }
556 }
557 };
558
559 ($sandbox_kind:ident, $bitness_kind:ident, $build_static_dispatch_table:ident, $visitor_name:ident, $module_kind:ident, $gas_kind:ty, $gas_kind_no_lifetime:ty, $gas_visitor:expr) => {{
560 type VisitorTy<'a> = crate::compiler::CompilerVisitor<'a, $sandbox_kind, $bitness_kind, $gas_kind>;
561 let (mut visitor, aux) = crate::compiler::CompilerVisitor::<$sandbox_kind, $bitness_kind, $gas_kind_no_lifetime>::new(
562 &engine.state.compiler_cache,
563 config,
564 blob.isa(),
565 blob.jump_table(),
566 blob.code(),
567 blob.bitmask(),
568 &exports,
569 config.step_tracing || engine.crosscheck,
570 cast(blob.code().len()).assert_always_fits_in_u32(),
571 init,
572 $gas_visitor,
573 )?;
574
575 blob.visit(
576 polkavm_common::program::$build_static_dispatch_table!($visitor_name, VisitorTy<'a>),
577 &mut visitor,
578 );
579
580 let global = $sandbox_kind::downcast_global_state(engine.state.sandbox_global.as_ref().unwrap());
581 let module = visitor.finish_compilation(global, &engine.state.compiler_cache, aux)?;
582 Some(CompiledModuleKind::$module_kind(module))
583 }};
584 }
585
586 let compiled_module: Option<CompiledModuleKind> = if_compiler_is_supported! {
587 {
588 if engine.selected_backend == BackendKind::Compiler {
589 if let Some(selected_sandbox) = engine.selected_sandbox {
590 match selected_sandbox {
591 SandboxKind::Linux => {
592 #[cfg(target_os = "linux")]
593 match blob.isa() {
594 InstructionSetKind::ReviveV1 => compile_module!(SandboxLinux, B64, build_static_dispatch_table_revive_v1, COMPILER_VISITOR_LINUX, Linux),
595 InstructionSetKind::JamV1 => compile_module!(SandboxLinux, B64, build_static_dispatch_table_jam_v1, COMPILER_VISITOR_LINUX, Linux),
596 InstructionSetKind::Latest32 => compile_module!(SandboxLinux, B32, build_static_dispatch_table_latest32, COMPILER_VISITOR_LINUX, Linux),
597 InstructionSetKind::Latest64 => compile_module!(SandboxLinux, B64, build_static_dispatch_table_latest64, COMPILER_VISITOR_LINUX, Linux),
598 }
599
600 #[cfg(not(target_os = "linux"))]
601 {
602 log::debug!("Selecetd sandbox unavailable: 'linux'");
603 None
604 }
605 },
606 SandboxKind::Generic => {
607 #[cfg(feature = "generic-sandbox")]
608 match blob.isa() {
609 InstructionSetKind::ReviveV1 => compile_module!(SandboxGeneric, B64, build_static_dispatch_table_revive_v1, COMPILER_VISITOR_GENERIC, Generic),
610 InstructionSetKind::JamV1 => compile_module!(SandboxGeneric, B64, build_static_dispatch_table_jam_v1, COMPILER_VISITOR_GENERIC, Generic),
611 InstructionSetKind::Latest32 => compile_module!(SandboxGeneric, B32, build_static_dispatch_table_latest32, COMPILER_VISITOR_GENERIC, Generic),
612 InstructionSetKind::Latest64 => compile_module!(SandboxGeneric, B64, build_static_dispatch_table_latest64, COMPILER_VISITOR_GENERIC, Generic),
613 }
614
615 #[cfg(not(feature = "generic-sandbox"))]
616 {
617 log::debug!("Selected sandbox unavailable: 'generic'");
618 None
619 }
620 },
621 }
622 } else {
623 None
624 }
625 } else {
626 None
627 }
628 } else {{
629 None
630 }}
631 };
632
633 let compiled_module = compiled_module.unwrap_or(CompiledModuleKind::Unavailable);
634 log::trace!("Processing finished!");
635
636 assert!(compiled_module.is_some() || engine.interpreter_enabled);
637 if compiled_module.is_some() {
638 log::debug!("Backend used: 'compiled'");
639 } else {
640 log::debug!("Backend used: 'interpreted'");
641 }
642
643 let memory_map = init.memory_map().map_err(Error::from_static_str)?;
644 log::debug!(
645 " Memory map: RO data: 0x{:08x}..0x{:08x} ({}/{} bytes, non-zero until 0x{:08x})",
646 memory_map.ro_data_range().start,
647 memory_map.ro_data_range().end,
648 blob.ro_data().len(),
649 memory_map.ro_data_range().len(),
650 cast(memory_map.ro_data_range().start).to_usize() + blob.ro_data().len(),
651 );
652 log::debug!(
653 " Memory map: RW data: 0x{:08x}..0x{:08x} ({}/{} bytes, non-zero until 0x{:08x})",
654 memory_map.rw_data_range().start,
655 memory_map.rw_data_range().end,
656 blob.rw_data().len(),
657 memory_map.rw_data_range().len(),
658 cast(memory_map.rw_data_range().start).to_usize() + blob.rw_data().len(),
659 );
660 log::debug!(
661 " Memory map: Stack: 0x{:08x}..0x{:08x} ({}/{} bytes)",
662 memory_map.stack_range().start,
663 memory_map.stack_range().end,
664 blob.stack_size(),
665 memory_map.stack_range().len(),
666 );
667 log::debug!(
668 " Memory map: Aux: 0x{:08x}..0x{:08x} ({}/{} bytes requested)",
669 memory_map.aux_data_range().start,
670 memory_map.aux_data_range().end,
671 config.aux_data_size(),
672 memory_map.aux_data_range().len(),
673 );
674
675 let page_shift = memory_map.page_size().ilog2();
676 let page_size_mask = (1 << page_shift) - 1;
677
678 let module = Arc::new(ModulePrivate {
679 engine_state: Some(Arc::clone(&engine.state)),
680
681 blob,
682 compiled_module,
683 memory_map,
684 gas_metering: config.gas_metering,
685 is_strict: config.is_strict,
686 step_tracing: config.step_tracing,
687 dynamic_paging: config.dynamic_paging,
688 crosscheck: engine.crosscheck,
689 page_size_mask,
690 page_shift,
691 cost_model,
692 is_per_instruction_metering: config.is_per_instruction_metering,
693
694 #[cfg(feature = "module-cache")]
695 module_key,
696 });
697
698 #[cfg(feature = "module-cache")]
699 if let Some(module_key) = module_key {
700 return Ok(engine.state.module_cache.insert(module_key, module));
701 }
702
703 Ok(Module(Some(module)))
704 }
705
706 pub fn is_64_bit(&self) -> bool {
708 self.state().blob.is_64_bit()
709 }
710
711 #[cfg_attr(not(feature = "module-cache"), allow(unused_variables))]
713 pub fn from_cache(engine: &Engine, config: &ModuleConfig, blob: &ProgramBlob) -> Option<Self> {
714 #[cfg(feature = "module-cache")]
715 {
716 let cost_model = config.cost_model.clone().unwrap_or_else(|| engine.default_cost_model.clone());
717 let (_, module) = engine.state.module_cache.get(config, blob, &cost_model);
718 module
719 }
720
721 #[cfg(not(feature = "module-cache"))]
722 None
723 }
724
725 pub fn instantiate(&self) -> Result<RawInstance, Error> {
727 let compiled_module = &self.state().compiled_module;
728 let Some(engine_state) = self.state().engine_state.as_ref() else {
729 return Err(Error::from_static_str("failed to instantiate module: empty module"));
730 };
731
732 let backend = if_compiler_is_supported! {
733 {{
734 match compiled_module {
735 #[cfg(target_os = "linux")]
736 CompiledModuleKind::Linux(..) => {
737 let compiled_instance = SandboxInstance::<SandboxLinux>::spawn_and_load_module(Arc::clone(engine_state), self)?;
738 Some(InstanceBackend::CompiledLinux(compiled_instance))
739 },
740 #[cfg(feature = "generic-sandbox")]
741 CompiledModuleKind::Generic(..) => {
742 let compiled_instance = SandboxInstance::<SandboxGeneric>::spawn_and_load_module(Arc::clone(engine_state), self)?;
743 Some(InstanceBackend::CompiledGeneric(compiled_instance))
744 },
745 CompiledModuleKind::Unavailable => None
746 }
747 }} else {
748 match compiled_module {
749 CompiledModuleKind::Unavailable => None
750 }
751 }
752 };
753
754 let backend = match backend {
755 Some(backend) => backend,
756 None => InstanceBackend::Interpreted(InterpretedInstance::new_from_module(
757 self.clone(),
758 false,
759 engine_state.imperfect_logger_filtering_workaround,
760 )),
761 };
762
763 let crosscheck_instance = if self.state().crosscheck && !matches!(backend, InstanceBackend::Interpreted(..)) {
764 Some(Box::new(InterpretedInstance::new_from_module(self.clone(), true, false)))
765 } else {
766 None
767 };
768
769 Ok(RawInstance {
770 module: self.clone(),
771 backend,
772 crosscheck_instance,
773 host_side_aux_write_protect: false,
774 })
775 }
776
777 pub fn memory_map(&self) -> &MemoryMap {
779 &self.state().memory_map
780 }
781
782 pub fn default_sp(&self) -> RegValue {
784 u64::from(self.memory_map().stack_address_high())
785 }
786
787 pub fn exports(&self) -> impl Iterator<Item = crate::program::ProgramExport<&[u8]>> + Clone {
789 self.state().blob.exports()
790 }
791
792 pub fn imports(&self) -> Imports {
794 self.state().blob.imports()
795 }
796
797 pub fn machine_code(&self) -> Option<&[u8]> {
802 if_compiler_is_supported! {
803 {
804 match self.state().compiled_module {
805 #[cfg(target_os = "linux")]
806 CompiledModuleKind::Linux(ref module) => Some(module.machine_code()),
807 #[cfg(feature = "generic-sandbox")]
808 CompiledModuleKind::Generic(ref module) => Some(module.machine_code()),
809 CompiledModuleKind::Unavailable => None,
810 }
811 } else {
812 None
813 }
814 }
815 }
816
817 pub fn machine_code_origin(&self) -> Option<u64> {
822 if_compiler_is_supported! {
823 {
824 match self.state().compiled_module {
825 #[cfg(target_os = "linux")]
826 CompiledModuleKind::Linux(..) => Some(polkavm_common::zygote::VM_ADDR_NATIVE_CODE),
827 #[cfg(feature = "generic-sandbox")]
828 CompiledModuleKind::Generic(..) => None,
829 CompiledModuleKind::Unavailable => None,
830 }
831 } else {
832 None
833 }
834 }
835 }
836
837 pub fn program_counter_to_machine_code_offset(&self) -> Option<&[(ProgramCounter, u32)]> {
851 if_compiler_is_supported! {
852 {
853 match self.state().compiled_module {
854 #[cfg(target_os = "linux")]
855 CompiledModuleKind::Linux(ref module) => Some(module.program_counter_to_machine_code_offset()),
856 #[cfg(feature = "generic-sandbox")]
857 CompiledModuleKind::Generic(ref module) => Some(module.program_counter_to_machine_code_offset()),
858 CompiledModuleKind::Unavailable => None,
859 }
860 } else {
861 None
862 }
863 }
864 }
865
866 pub fn calculate_gas_cost_for(&self, code_offset: ProgramCounter) -> Option<Gas> {
871 if !self.is_jump_target_valid(code_offset) && code_offset.0 < self.code_len() {
872 return None;
873 }
874
875 let gas = match self.state().cost_model {
876 CostModelKind::Simple(ref cost_model) => {
877 let gas_visitor = GasVisitor::new(cost_model.clone());
878 let instructions = self.instructions_bounded_at(code_offset);
879 crate::gas::calculate_for_block(gas_visitor, instructions)
880 }
881 CostModelKind::Full(cost_model) => {
882 use polkavm_common::simulator::Simulator;
883 let instructions = self.instructions_bounded_at(code_offset);
884 if self.is_64_bit() {
885 let gas_visitor = Simulator::<B64, ()>::new(self.blob().code(), self.blob().isa(), cost_model, ());
886 crate::gas::calculate_for_block(gas_visitor, instructions)
887 } else {
888 let gas_visitor = Simulator::<B32, ()>::new(self.blob().code(), self.blob().isa(), cost_model, ());
889 crate::gas::calculate_for_block(gas_visitor, instructions)
890 }
891 }
892 };
893
894 Some(i64::from(gas.0))
895 }
896
897 #[cold]
898 fn display_instruction_at(&self, program_counter: ProgramCounter) -> impl core::fmt::Display {
899 let state = self.state();
900 Self::display_instruction_at_impl(
901 state.blob.isa(),
902 state.blob.code(),
903 state.blob.bitmask(),
904 state.blob.is_64_bit(),
905 program_counter,
906 )
907 }
908
909 #[cold]
910 pub(crate) fn display_instruction_at_impl(
911 instruction_set: InstructionSetKind,
912 code: &[u8],
913 bitmask: &[u8],
914 is_64_bit: bool,
915 program_counter: ProgramCounter,
916 ) -> impl core::fmt::Display {
917 struct MaybeInstruction(Option<polkavm_common::program::ParsedInstruction>, bool);
918 impl core::fmt::Display for MaybeInstruction {
919 fn fmt(&self, fmt: &mut core::fmt::Formatter) -> core::fmt::Result {
920 if let Some(instruction) = self.0 {
921 let mut format = polkavm_common::program::InstructionFormat::default();
922 format.is_64_bit = self.1;
923 instruction.display(&format).fmt(fmt)?;
924 Ok(())
925 } else {
926 write!(fmt, "<NONE>")
927 }
928 }
929 }
930
931 MaybeInstruction(
932 Instructions::new_bounded(instruction_set, code, bitmask, program_counter.0).next(),
933 is_64_bit,
934 )
935 }
936
937 pub(crate) fn debug_print_location(&self, log_level: log::Level, pc: ProgramCounter) {
938 log::log!(log_level, " Location: #{pc}: {}", self.display_instruction_at(pc));
939
940 let Ok(Some(mut line_program)) = self.state().blob.get_debug_line_program_at(pc) else {
941 return;
942 };
943
944 log::log!(log_level, " Source location:");
945 for _ in 0..128 {
946 let Ok(Some(region_info)) = line_program.run() else { break };
948
949 if !region_info.instruction_range().contains(&pc) {
950 continue;
951 }
952
953 for frame in region_info.frames() {
954 let kind = match frame.kind() {
955 FrameKind::Enter => 'f',
956 FrameKind::Call => 'c',
957 FrameKind::Line => 'l',
958 };
959
960 if let Ok(full_name) = frame.full_name() {
961 if let Ok(Some(location)) = frame.location() {
962 log::log!(log_level, " ({kind}) '{full_name}' [{location}]");
963 } else {
964 log::log!(log_level, " ({kind}) '{full_name}'");
965 }
966 }
967 }
968 }
969 }
970}
971
972if_compiler_is_supported! {
973 {
974 enum InstanceBackend {
975 #[cfg(target_os = "linux")]
976 CompiledLinux(SandboxInstance<SandboxLinux>),
977 #[cfg(feature = "generic-sandbox")]
978 CompiledGeneric(SandboxInstance<SandboxGeneric>),
979 Interpreted(InterpretedInstance),
980 }
981 } else {
982 enum InstanceBackend {
983 Interpreted(InterpretedInstance),
984 }
985 }
986}
987
988#[derive(Debug)]
990pub enum MemoryAccessError {
991 OutOfRangeAccess { address: u32, length: u64 },
992 Error(Error),
993}
994
995impl core::error::Error for MemoryAccessError {}
996
997impl core::fmt::Display for MemoryAccessError {
998 fn fmt(&self, fmt: &mut core::fmt::Formatter) -> core::fmt::Result {
999 match self {
1000 MemoryAccessError::OutOfRangeAccess { address, length } => {
1001 write!(
1002 fmt,
1003 "out of range memory access in 0x{:x}-0x{:x} ({} bytes)",
1004 address,
1005 u64::from(*address) + length,
1006 length
1007 )
1008 }
1009 MemoryAccessError::Error(error) => {
1010 write!(fmt, "memory access failed: {error}")
1011 }
1012 }
1013 }
1014}
1015
1016impl From<MemoryAccessError> for alloc::string::String {
1017 fn from(error: MemoryAccessError) -> alloc::string::String {
1018 alloc::string::ToString::to_string(&error)
1019 }
1020}
1021
1022if_compiler_is_supported! {
1023 {
1024 macro_rules! access_backend {
1025 ($itself:expr, |$backend:ident| $e:expr) => {
1026 match $itself {
1027 #[cfg(target_os = "linux")]
1028 InstanceBackend::CompiledLinux(ref $backend) => {
1029 let $backend = $backend.sandbox();
1030 $e
1031 },
1032 #[cfg(feature = "generic-sandbox")]
1033 InstanceBackend::CompiledGeneric(ref $backend) => {
1034 let $backend = $backend.sandbox();
1035 $e
1036 },
1037 InstanceBackend::Interpreted(ref $backend) => $e,
1038 }
1039 };
1040
1041 ($itself:expr, |mut $backend:ident| $e:expr) => {
1042 match $itself {
1043 #[cfg(target_os = "linux")]
1044 InstanceBackend::CompiledLinux(ref mut $backend) => {
1045 let $backend = $backend.sandbox_mut();
1046 $e
1047 },
1048 #[cfg(feature = "generic-sandbox")]
1049 InstanceBackend::CompiledGeneric(ref mut $backend) => {
1050 let $backend = $backend.sandbox_mut();
1051 $e
1052 },
1053 InstanceBackend::Interpreted(ref mut $backend) => $e,
1054 }
1055 };
1056 }
1057 } else {
1058 macro_rules! access_backend {
1059 ($itself:expr, |$backend:ident| $e:expr) => {
1060 match $itself {
1061 InstanceBackend::Interpreted(ref $backend) => $e,
1062 }
1063 };
1064
1065 ($itself:expr, |mut $backend:ident| $e:expr) => {
1066 match $itself {
1067 InstanceBackend::Interpreted(ref mut $backend) => $e,
1068 }
1069 };
1070 }
1071 }
1072}
1073
1074pub struct RawInstance {
1075 module: Module,
1076 backend: InstanceBackend,
1077 crosscheck_instance: Option<Box<InterpretedInstance>>,
1078 host_side_aux_write_protect: bool,
1079}
1080
1081impl RawInstance {
1082 pub fn module(&self) -> &Module {
1084 &self.module
1085 }
1086
1087 pub fn is_64_bit(&self) -> bool {
1089 self.module.is_64_bit()
1090 }
1091
1092 #[cold]
1093 fn on_trap(&self) {
1094 use crate::program::Instruction;
1095
1096 if let Some(program_counter) = self.program_counter() {
1097 self.module.debug_print_location(log::Level::Debug, program_counter);
1098 if let Some(instruction) = self.module.instructions_bounded_at(program_counter).next() {
1099 let (base, offset, length) = match instruction.kind {
1100 Instruction::load_indirect_u8(_, base, offset)
1101 | Instruction::load_indirect_i8(_, base, offset)
1102 | Instruction::store_indirect_u8(_, base, offset)
1103 | Instruction::store_imm_indirect_u8(base, offset, _) => (Some(base), offset, 1),
1104 Instruction::load_indirect_u16(_, base, offset)
1105 | Instruction::load_indirect_i16(_, base, offset)
1106 | Instruction::store_indirect_u16(_, base, offset)
1107 | Instruction::store_imm_indirect_u16(base, offset, _) => (Some(base), offset, 2),
1108 Instruction::load_indirect_u32(_, base, offset)
1109 | Instruction::load_indirect_i32(_, base, offset)
1110 | Instruction::store_indirect_u32(_, base, offset)
1111 | Instruction::store_imm_indirect_u32(base, offset, _) => (Some(base), offset, 4),
1112 Instruction::load_indirect_u64(_, base, offset)
1113 | Instruction::store_indirect_u64(_, base, offset)
1114 | Instruction::store_imm_indirect_u64(base, offset, _) => (Some(base), offset, 8),
1115 Instruction::load_u8(_, offset)
1116 | Instruction::load_i8(_, offset)
1117 | Instruction::store_u8(_, offset)
1118 | Instruction::store_imm_u8(offset, _) => (None, offset, 1),
1119 Instruction::load_u16(_, offset)
1120 | Instruction::load_i16(_, offset)
1121 | Instruction::store_u16(_, offset)
1122 | Instruction::store_imm_u16(offset, _) => (None, offset, 2),
1123 Instruction::load_u32(_, offset)
1124 | Instruction::load_i32(_, offset)
1125 | Instruction::store_u32(_, offset)
1126 | Instruction::store_imm_u32(offset, _) => (None, offset, 4),
1127 Instruction::load_u64(_, offset) | Instruction::store_u64(_, offset) | Instruction::store_imm_u64(offset, _) => {
1128 (None, offset, 8)
1129 }
1130 _ => return,
1131 };
1132
1133 let mut offset = u64::from(offset);
1134 if let Some(base) = base {
1135 offset = offset.wrapping_add(self.reg(base.get()));
1136 }
1137
1138 offset &= 0xffffffff;
1139 let offset_end = offset.wrapping_add(length) & 0xffffffff;
1140
1141 log::debug!("Trapped when trying to access address: 0x{offset:08x}-0x{offset_end:08x}");
1142 if !self.module.is_dynamic_paging() {
1143 let aux_address = u64::from(self.module.memory_map().aux_data_address());
1144 let aux_size = u64::from(self.module.memory_map().aux_data_size());
1145 let stack_address_hi = u64::from(self.module.memory_map().stack_address_high());
1146 let stack_address_lo = u64::from(self.module.memory_map().stack_address_low());
1147 if offset >= aux_address {
1148 if aux_size > 0 {
1149 let aux_address_end = aux_address + aux_size;
1150 log::debug!(" Auxiliary data range: 0x{aux_address:08x}..0x{aux_address_end:08x}");
1151 }
1152 } else if offset < stack_address_hi && offset >= stack_address_lo.wrapping_sub(32 * 1024 * 1024) {
1153 log::debug!(" Current stack range: 0x{stack_address_lo:08x}-0x{stack_address_hi:08x}");
1154 log::debug!(" Hint: try increasing your stack size with: 'polkavm_derive::min_stack_size'");
1155 }
1156 }
1157 }
1158 }
1159 }
1160
1161 pub fn run(&mut self) -> Result<InterruptKind, Error> {
1163 if self.next_program_counter().is_none() {
1164 return Err(Error::from_static_str("failed to run: next program counter is not set"));
1165 }
1166
1167 if self.gas() < 0 {
1168 return Ok(InterruptKind::NotEnoughGas);
1169 }
1170
1171 loop {
1172 let interruption = access_backend!(self.backend, |mut backend| backend
1173 .run()
1174 .map_err(|error| format!("execution failed: {error}")))?;
1175 log::trace!("Interrupted: {:?}", interruption);
1176
1177 if matches!(interruption, InterruptKind::Trap) && log::log_enabled!(log::Level::Debug) {
1178 self.on_trap();
1179 }
1180
1181 if let Some(ref mut crosscheck) = self.crosscheck_instance {
1182 let is_step = matches!(interruption, InterruptKind::Step);
1183 let expected_interruption = crosscheck.run().expect("crosscheck failed");
1184 if interruption != expected_interruption {
1185 panic!("run: crosscheck mismatch, interpreter = {expected_interruption:?}, backend = {interruption:?}");
1186 }
1187
1188 if self.module.gas_metering() != Some(GasMeteringKind::Async) {
1189 for reg in Reg::ALL {
1190 let value = access_backend!(self.backend, |backend| backend.reg(reg));
1191 let expected_value = crosscheck.reg(reg);
1192 if value != expected_value {
1193 panic!("run: crosscheck mismatch for {reg}, interpreter = 0x{expected_value:x}, backend = 0x{value:x}");
1194 }
1195 }
1196 }
1197
1198 let crosscheck_gas = crosscheck.gas();
1199 let crosscheck_program_counter = crosscheck.program_counter();
1200 let crosscheck_next_program_counter = crosscheck.next_program_counter();
1201 if self.module.gas_metering() != Some(GasMeteringKind::Async) {
1202 let gas = self.gas();
1203 if gas != crosscheck_gas {
1204 panic!("run: crosscheck mismatch for gas, interpreter = {crosscheck_gas}, backend = {gas}");
1205 }
1206 }
1207
1208 if self.program_counter() != crosscheck_program_counter {
1209 panic!(
1210 "run: crosscheck mismatch for program counter, interpreter = {crosscheck_program_counter:?}, backend = {:?}",
1211 self.program_counter()
1212 );
1213 }
1214
1215 if self.next_program_counter() != crosscheck_next_program_counter {
1216 panic!(
1217 "run: crosscheck mismatch for next program counter, interpreter = {crosscheck_next_program_counter:?}, backend = {:?}",
1218 self.next_program_counter()
1219 );
1220 }
1221
1222 if is_step && !self.module().state().step_tracing {
1223 continue;
1224 }
1225 }
1226
1227 if self.gas() < 0 {
1228 return Ok(InterruptKind::NotEnoughGas);
1229 }
1230
1231 break Ok(interruption);
1232 }
1233 }
1234
1235 pub fn reg(&self, reg: Reg) -> RegValue {
1237 access_backend!(self.backend, |backend| backend.reg(reg))
1238 }
1239
1240 pub fn set_reg(&mut self, reg: Reg, value: RegValue) {
1242 if let Some(ref mut crosscheck) = self.crosscheck_instance {
1243 crosscheck.set_reg(reg, value);
1244 }
1245
1246 access_backend!(self.backend, |mut backend| backend.set_reg(reg, value))
1247 }
1248
1249 pub fn gas(&self) -> Gas {
1254 access_backend!(self.backend, |backend| backend.gas())
1255 }
1256
1257 pub fn set_gas(&mut self, gas: Gas) {
1259 if let Some(ref mut crosscheck) = self.crosscheck_instance {
1260 crosscheck.set_gas(gas);
1261 }
1262
1263 access_backend!(self.backend, |mut backend| backend.set_gas(gas))
1264 }
1265
1266 pub fn program_counter(&self) -> Option<ProgramCounter> {
1268 access_backend!(self.backend, |backend| backend.program_counter())
1269 }
1270
1271 pub fn next_program_counter(&self) -> Option<ProgramCounter> {
1275 access_backend!(self.backend, |backend| backend.next_program_counter())
1276 }
1277
1278 pub fn set_next_program_counter(&mut self, pc: ProgramCounter) {
1280 if let Some(ref mut crosscheck) = self.crosscheck_instance {
1281 crosscheck.set_next_program_counter(pc);
1282 }
1283
1284 access_backend!(self.backend, |mut backend| backend.set_next_program_counter(pc))
1285 }
1286
1287 pub fn clear_regs(&mut self) {
1289 for reg in Reg::ALL {
1290 self.set_reg(reg, 0);
1291 }
1292 }
1293
1294 pub fn set_accessible_aux_size(&mut self, size: u32) -> Result<(), Error> {
1296 if self.module.is_dynamic_paging() {
1297 return Err("setting accessible aux size is only possible on modules without dynamic paging".into());
1298 }
1299
1300 if size > self.module.memory_map().aux_data_size() {
1301 return Err(format!(
1302 "cannot set accessible aux size: the maximum is {}, while tried to set {}",
1303 self.module.memory_map().aux_data_size(),
1304 size
1305 )
1306 .into());
1307 }
1308
1309 let size = self.module.round_to_page_size_up(size);
1310 if let Some(ref mut crosscheck) = self.crosscheck_instance {
1311 crosscheck.set_accessible_aux_size(size);
1312 }
1313
1314 access_backend!(self.backend, |mut backend| backend
1315 .set_accessible_aux_size(size)
1316 .into_result("failed to set accessible aux size"))?;
1317
1318 debug_assert_eq!(access_backend!(self.backend, |backend| backend.accessible_aux_size()), size);
1319 Ok(())
1320 }
1321
1322 pub fn set_host_side_aux_write_protect(&mut self, is_write_protected: bool) -> Result<(), Error> {
1329 if self.module.is_dynamic_paging() {
1330 return Err("write-protecting the aux data region is only possible on modules without dynamic paging".into());
1331 }
1332
1333 self.host_side_aux_write_protect = is_write_protected;
1334 Ok(())
1335 }
1336
1337 pub fn reset_memory(&mut self) -> Result<(), Error> {
1339 if let Some(ref mut crosscheck) = self.crosscheck_instance {
1340 crosscheck.reset_memory();
1341 }
1342
1343 access_backend!(self.backend, |mut backend| backend
1344 .reset_memory()
1345 .into_result("failed to reset the instance's memory"))
1346 }
1347
1348 pub fn is_memory_accessible(&self, address: u32, size: u32, minimum_protection: MemoryProtection) -> bool {
1352 if size == 0 {
1353 return true;
1354 }
1355
1356 if address < 0x10000 {
1357 return false;
1358 }
1359
1360 let upper_limit = match minimum_protection {
1361 MemoryProtection::Read => 0x100000000,
1362 MemoryProtection::ReadWrite => self.get_write_upper_limit(),
1363 };
1364
1365 if u64::from(address) + cast(size).to_u64() > upper_limit {
1366 return false;
1367 }
1368
1369 #[inline]
1370 fn is_within(range: core::ops::Range<u32>, address: u32, size: u32) -> bool {
1371 let address_end = u64::from(address) + cast(size).to_u64();
1372 address >= range.start && address_end <= u64::from(range.end)
1373 }
1374
1375 if !self.module.is_dynamic_paging() {
1376 let map = self.module.memory_map();
1377 if is_within(map.stack_range(), address, size) {
1378 return true;
1379 }
1380
1381 let heap_size = self.heap_size();
1382 let heap_top = map.heap_base() + heap_size;
1383 let heap_top = self.module.round_to_page_size_up(heap_top);
1384 if is_within(map.rw_data_address()..heap_top, address, size) {
1385 return true;
1386 }
1387
1388 let aux_size = access_backend!(self.backend, |backend| backend.accessible_aux_size());
1389 if is_within(map.aux_data_address()..map.aux_data_address() + aux_size, address, size) {
1390 return true;
1391 }
1392
1393 if matches!(minimum_protection, MemoryProtection::Read) && is_within(map.ro_data_range(), address, size) {
1394 return true;
1395 }
1396
1397 false
1398 } else {
1399 access_backend!(self.backend, |backend| backend.is_memory_accessible(
1400 address,
1401 size,
1402 minimum_protection
1403 ))
1404 }
1405 }
1406
1407 pub fn read_memory_into<'slice, B>(&self, address: u32, buffer: &'slice mut B) -> Result<&'slice mut [u8], MemoryAccessError>
1411 where
1412 B: ?Sized + AsUninitSliceMut,
1413 {
1414 let slice = buffer.as_uninit_slice_mut();
1415 if slice.is_empty() {
1416 unsafe {
1418 return Ok(polkavm_common::utils::slice_assume_init_mut(slice));
1419 }
1420 }
1421
1422 if address < 0x10000 {
1423 return Err(MemoryAccessError::OutOfRangeAccess {
1424 address,
1425 length: cast(slice.len()).to_u64(),
1426 });
1427 }
1428
1429 if u64::from(address) + cast(slice.len()).to_u64() > 0x100000000 {
1430 return Err(MemoryAccessError::OutOfRangeAccess {
1431 address,
1432 length: cast(slice.len()).to_u64(),
1433 });
1434 }
1435
1436 let length = slice.len();
1437 let result = access_backend!(self.backend, |backend| backend.read_memory_into(address, slice));
1438 if let Some(ref crosscheck) = self.crosscheck_instance {
1439 let mut expected_data: Vec<core::mem::MaybeUninit<u8>> = alloc::vec![core::mem::MaybeUninit::new(0xfa); length];
1440 let expected_result = crosscheck.read_memory_into(address, &mut expected_data);
1441 let expected_success = expected_result.is_ok();
1442 let success = result.is_ok();
1443 let results_match = match (&result, &expected_result) {
1444 (Ok(result), Ok(expected_result)) => result == expected_result,
1445 (Err(_), Err(_)) => true,
1446 _ => false,
1447 };
1448 if !results_match {
1449 let address_end = u64::from(address) + cast(length).to_u64();
1450 if cfg!(debug_assertions) {
1451 if let (Ok(result), Ok(expected_result)) = (result, expected_result) {
1452 log::trace!("read_memory result (interpreter): {expected_result:?}");
1453 log::trace!("read_memory result (backend): {result:?}");
1454 }
1455 }
1456 panic!("read_memory: crosscheck mismatch, range = 0x{address:x}..0x{address_end:x}, interpreter = {expected_success}, backend = {success}");
1457 }
1458 }
1459
1460 if cfg!(debug_assertions) {
1461 let is_inaccessible = !self.is_memory_accessible(address, cast(length).assert_always_fits_in_u32(), MemoryProtection::Read);
1462 if is_inaccessible != matches!(result, Err(MemoryAccessError::OutOfRangeAccess { .. })) {
1463 panic!(
1464 "'read_memory_into' doesn't match with 'is_memory_accessible' for 0x{:x}-0x{:x} (read_memory_into = {:?}, is_memory_accessible = {})",
1465 address,
1466 cast(address).to_usize() + length,
1467 result.map(|_| ()),
1468 !is_inaccessible,
1469 );
1470 }
1471 }
1472
1473 result
1474 }
1475
1476 fn get_write_upper_limit(&self) -> u64 {
1477 if self.host_side_aux_write_protect {
1478 debug_assert!(!self.module.is_dynamic_paging());
1479 u64::from(self.module.memory_map().stack_address_high())
1480 } else {
1481 0x100000000
1482 }
1483 }
1484
1485 pub fn write_memory(&mut self, address: u32, data: &[u8]) -> Result<(), MemoryAccessError> {
1489 if data.is_empty() {
1490 return Ok(());
1491 }
1492
1493 if address < 0x10000 {
1494 return Err(MemoryAccessError::OutOfRangeAccess {
1495 address,
1496 length: cast(data.len()).to_u64(),
1497 });
1498 }
1499
1500 if u64::from(address) + cast(data.len()).to_u64() > self.get_write_upper_limit() {
1501 return Err(MemoryAccessError::OutOfRangeAccess {
1502 address,
1503 length: cast(data.len()).to_u64(),
1504 });
1505 }
1506
1507 let result = access_backend!(self.backend, |mut backend| backend.write_memory(address, data));
1508 if let Some(ref mut crosscheck) = self.crosscheck_instance {
1509 let expected_result = crosscheck.write_memory(address, data);
1510 let expected_success = expected_result.is_ok();
1511 let success = result.is_ok();
1512 if success != expected_success {
1513 let address_end = u64::from(address) + cast(data.len()).to_u64();
1514 panic!("write_memory: crosscheck mismatch, range = 0x{address:x}..0x{address_end:x}, interpreter = {expected_success}, backend = {success}");
1515 }
1516 }
1517
1518 if cfg!(debug_assertions) {
1519 let is_inaccessible =
1520 !self.is_memory_accessible(address, cast(data.len()).assert_always_fits_in_u32(), MemoryProtection::ReadWrite);
1521 if is_inaccessible != matches!(result, Err(MemoryAccessError::OutOfRangeAccess { .. })) {
1522 panic!(
1523 "'write_memory' doesn't match with 'is_memory_accessible' for 0x{:x}-0x{:x} (write_memory = {:?}, is_memory_accessible = {})",
1524 address,
1525 cast(address).to_usize() + data.len(),
1526 result,
1527 !is_inaccessible,
1528 );
1529 }
1530 }
1531
1532 result
1533 }
1534
1535 pub fn read_memory(&self, address: u32, length: u32) -> Result<Vec<u8>, MemoryAccessError> {
1539 let mut buffer = Vec::new();
1540 buffer.reserve_exact(cast(length).to_usize());
1541
1542 let pointer = buffer.as_ptr();
1543 let slice = self.read_memory_into(address, buffer.spare_capacity_mut())?;
1544
1545 assert_eq!(slice.as_ptr(), pointer);
1548 assert_eq!(slice.len(), cast(length).to_usize());
1549
1550 #[allow(unsafe_code)]
1551 unsafe {
1553 buffer.set_len(cast(length).to_usize());
1554 }
1555
1556 Ok(buffer)
1557 }
1558
1559 pub fn read_u64(&self, address: u32) -> Result<u64, MemoryAccessError> {
1563 let mut buffer = [0; 8];
1564 self.read_memory_into(address, &mut buffer)?;
1565
1566 Ok(u64::from_le_bytes(buffer))
1567 }
1568
1569 pub fn write_u64(&mut self, address: u32, value: u64) -> Result<(), MemoryAccessError> {
1573 self.write_memory(address, &value.to_le_bytes())
1574 }
1575
1576 pub fn read_u32(&self, address: u32) -> Result<u32, MemoryAccessError> {
1580 let mut buffer = [0; 4];
1581 self.read_memory_into(address, &mut buffer)?;
1582
1583 Ok(u32::from_le_bytes(buffer))
1584 }
1585
1586 pub fn write_u32(&mut self, address: u32, value: u32) -> Result<(), MemoryAccessError> {
1590 self.write_memory(address, &value.to_le_bytes())
1591 }
1592
1593 pub fn read_u16(&self, address: u32) -> Result<u16, MemoryAccessError> {
1597 let mut buffer = [0; 2];
1598 self.read_memory_into(address, &mut buffer)?;
1599
1600 Ok(u16::from_le_bytes(buffer))
1601 }
1602
1603 pub fn write_u16(&mut self, address: u32, value: u16) -> Result<(), MemoryAccessError> {
1607 self.write_memory(address, &value.to_le_bytes())
1608 }
1609
1610 pub fn read_u8(&self, address: u32) -> Result<u8, MemoryAccessError> {
1614 let mut buffer = [0; 1];
1615 self.read_memory_into(address, &mut buffer)?;
1616
1617 Ok(buffer[0])
1618 }
1619
1620 pub fn write_u8(&mut self, address: u32, value: u8) -> Result<(), MemoryAccessError> {
1624 self.write_memory(address, &[value])
1625 }
1626
1627 pub fn zero_memory_with_memory_protection(
1634 &mut self,
1635 address: u32,
1636 length: u32,
1637 memory_protection: MemoryProtection,
1638 ) -> Result<(), MemoryAccessError> {
1639 if !self.module.is_dynamic_paging() {
1640 return Err(MemoryAccessError::Error(
1641 "'zero_memory_with_memory_protection' is only possible on modules with dynamic paging".into(),
1642 ));
1643 }
1644
1645 if length == 0 {
1646 return Ok(());
1647 }
1648
1649 if !self.module.is_multiple_of_page_size(address) {
1650 return Err(MemoryAccessError::Error("address not a multiple of page size".into()));
1651 }
1652
1653 self.zero_memory_impl(address, length, Some(memory_protection))
1654 }
1655
1656 pub fn zero_memory(&mut self, address: u32, length: u32) -> Result<(), MemoryAccessError> {
1663 self.zero_memory_impl(address, length, None)
1664 }
1665
1666 fn zero_memory_impl(
1667 &mut self,
1668 address: u32,
1669 length: u32,
1670 memory_protection: Option<MemoryProtection>,
1671 ) -> Result<(), MemoryAccessError> {
1672 if length == 0 {
1673 return Ok(());
1674 }
1675
1676 if address < 0x10000 {
1677 return Err(MemoryAccessError::OutOfRangeAccess {
1678 address,
1679 length: u64::from(length),
1680 });
1681 }
1682
1683 if u64::from(address) + u64::from(length) > self.get_write_upper_limit() {
1684 return Err(MemoryAccessError::OutOfRangeAccess {
1685 address,
1686 length: u64::from(length),
1687 });
1688 }
1689
1690 let length = if memory_protection.is_none() {
1691 length
1692 } else {
1693 self.module().round_to_page_size_up(length)
1694 };
1695
1696 let result = access_backend!(self.backend, |mut backend| backend.zero_memory(address, length, memory_protection));
1697 if let Some(ref mut crosscheck) = self.crosscheck_instance {
1698 let expected_result = crosscheck.zero_memory(address, length, memory_protection);
1699 let expected_success = expected_result.is_ok();
1700 let success = result.is_ok();
1701 if success != expected_success {
1702 let address_end = u64::from(address) + u64::from(length);
1703 panic!("zero_memory: crosscheck mismatch, range = 0x{address:x}..0x{address_end:x}, interpreter = {expected_success}, backend = {success}");
1704 }
1705 }
1706
1707 if cfg!(debug_assertions) && memory_protection.is_none() {
1708 let is_inaccessible = !self.is_memory_accessible(address, length, MemoryProtection::ReadWrite);
1709 if is_inaccessible != matches!(result, Err(MemoryAccessError::OutOfRangeAccess { .. })) {
1710 panic!(
1711 "'zero_memory' doesn't match with 'is_memory_accessible' for 0x{:x}-0x{:x} (zero_memory = {:?}, is_memory_accessible = {})",
1712 address,
1713 cast(address).to_usize() + cast(length).to_usize(),
1714 result,
1715 !is_inaccessible,
1716 );
1717 }
1718 }
1719
1720 result
1721 }
1722
1723 pub fn protect_memory(&mut self, address: u32, length: u32) -> Result<(), MemoryAccessError> {
1727 self.change_memory_protection(address, length, MemoryProtection::Read)
1728 }
1729
1730 pub fn unprotect_memory(&mut self, address: u32, length: u32) -> Result<(), MemoryAccessError> {
1734 self.change_memory_protection(address, length, MemoryProtection::ReadWrite)
1735 }
1736
1737 fn change_memory_protection(&mut self, address: u32, length: u32, protection: MemoryProtection) -> Result<(), MemoryAccessError> {
1738 if !self.module.is_dynamic_paging() {
1739 return Err(MemoryAccessError::Error(
1740 "protecting/unprotecting memory is only possible on modules with dynamic paging".into(),
1741 ));
1742 }
1743
1744 if length == 0 {
1745 return Ok(());
1746 }
1747
1748 if address < 0x10000 {
1749 return Err(MemoryAccessError::OutOfRangeAccess {
1750 address,
1751 length: u64::from(length),
1752 });
1753 }
1754
1755 if u64::from(address) + u64::from(length) > 0x100000000 {
1756 return Err(MemoryAccessError::OutOfRangeAccess {
1757 address,
1758 length: u64::from(length),
1759 });
1760 }
1761
1762 access_backend!(self.backend, |mut backend| backend
1763 .change_memory_protection(address, length, protection))
1764 }
1765
1766 pub fn free_pages(&mut self, address: u32, length: u32) -> Result<(), Error> {
1771 if length == 0 {
1772 return Ok(());
1773 }
1774
1775 if !self.module.is_multiple_of_page_size(address) {
1776 return Err("address not a multiple of page size".into());
1777 }
1778
1779 access_backend!(self.backend, |mut backend| backend
1780 .free_pages(address, length)
1781 .into_result("free pages failed"))?;
1782 if let Some(ref mut crosscheck) = self.crosscheck_instance {
1783 crosscheck.free_pages(address, length);
1784 }
1785
1786 Ok(())
1787 }
1788
1789 pub fn heap_size(&self) -> u32 {
1791 access_backend!(self.backend, |backend| backend.heap_size())
1792 }
1793
1794 pub fn sbrk(&mut self, size: u32) -> Result<Option<u32>, Error> {
1795 let result = access_backend!(self.backend, |mut backend| backend.sbrk(size).into_result("sbrk failed"))?;
1796 if let Some(ref mut crosscheck) = self.crosscheck_instance {
1797 let expected_result = crosscheck.sbrk(size);
1798 let expected_success = expected_result.is_some();
1799 let success = result.is_some();
1800 if success != expected_success {
1801 panic!("sbrk: crosscheck mismatch, size = {size}, interpreter = {expected_success}, backend = {success}");
1802 }
1803 }
1804
1805 Ok(result)
1806 }
1807
1808 pub fn prepare_call_untyped(&mut self, pc: ProgramCounter, args: &[RegValue]) {
1818 assert!(args.len() <= Reg::ARG_REGS.len(), "too many arguments");
1819
1820 self.clear_regs();
1821 self.set_reg(Reg::SP, self.module.default_sp());
1822 self.set_reg(Reg::RA, u64::from(VM_ADDR_RETURN_TO_HOST));
1823 self.set_next_program_counter(pc);
1824
1825 for (reg, &value) in Reg::ARG_REGS.into_iter().zip(args) {
1826 self.set_reg(reg, value);
1827 }
1828 }
1829
1830 pub fn prepare_call_typed<FnArgs>(&mut self, pc: ProgramCounter, args: FnArgs)
1836 where
1837 FnArgs: crate::linker::FuncArgs,
1838 {
1839 let mut regs = [0; Reg::ARG_REGS.len()];
1840 let mut input_count = 0;
1841 args._set(self.module().blob().is_64_bit(), |value| {
1842 assert!(input_count <= Reg::ARG_REGS.len(), "too many arguments");
1843 regs[input_count] = value;
1844 input_count += 1;
1845 });
1846
1847 self.prepare_call_untyped(pc, ®s);
1848 }
1849
1850 pub fn get_result_typed<FnResult>(&self) -> FnResult
1854 where
1855 FnResult: crate::linker::FuncResult,
1856 {
1857 let mut output_count = 0;
1858 FnResult::_get(self.module().blob().is_64_bit(), || {
1859 let value = access_backend!(self.backend, |backend| backend.reg(Reg::ARG_REGS[output_count]));
1860 output_count += 1;
1861 value
1862 })
1863 }
1864
1865 pub fn pid(&self) -> Option<u32> {
1870 access_backend!(self.backend, |backend| backend.pid())
1871 }
1872
1873 pub fn next_native_program_counter(&self) -> Option<usize> {
1878 access_backend!(self.backend, |backend| backend.next_native_program_counter())
1879 }
1880
1881 pub fn reset_interpreter_cache(&mut self) {
1883 #[allow(irrefutable_let_patterns)]
1884 if let InstanceBackend::Interpreted(ref mut backend) = self.backend {
1885 backend.reset_interpreter_cache();
1886 }
1887 }
1888
1889 pub fn set_interpreter_cache_size_limit(&mut self, cache_info: Option<SetCacheSizeLimitArgs>) -> Result<(), Error> {
1891 #[allow(irrefutable_let_patterns)]
1892 if let InstanceBackend::Interpreted(ref mut backend) = self.backend {
1893 backend.set_interpreter_cache_size_limit(cache_info)?
1894 }
1895 Ok(())
1896 }
1897}