1use crate::export::Export;
6use crate::externref::VMExternRefActivationsTable;
7use crate::memory::{Memory, RuntimeMemoryCreator};
8use crate::table::{Table, TableElement, TableElementType};
9use crate::vmcontext::{
10 VMBuiltinFunctionsArray, VMCallerCheckedFuncRef, VMContext, VMFunctionImport,
11 VMGlobalDefinition, VMGlobalImport, VMMemoryDefinition, VMMemoryImport, VMOpaqueContext,
12 VMRuntimeLimits, VMTableDefinition, VMTableImport, VMCONTEXT_MAGIC,
13};
14use crate::{
15 ExportFunction, ExportGlobal, ExportMemory, ExportTable, Imports, ModuleRuntimeInfo, Store,
16 VMFunctionBody, VMSharedSignatureIndex, WasmFault,
17};
18use anyhow::Error;
19use anyhow::Result;
20use memoffset::offset_of;
21use std::alloc::{self, Layout};
22use std::any::Any;
23use std::convert::TryFrom;
24use std::hash::Hash;
25use std::ops::Range;
26use std::ptr::NonNull;
27use std::sync::atomic::AtomicU64;
28use std::sync::Arc;
29use std::{mem, ptr};
30use wasmtime_environ::{
31 packed_option::ReservedValue, DataIndex, DefinedGlobalIndex, DefinedMemoryIndex,
32 DefinedTableIndex, ElemIndex, EntityIndex, EntityRef, EntitySet, FuncIndex, GlobalIndex,
33 GlobalInit, HostPtr, MemoryIndex, Module, PrimaryMap, SignatureIndex, TableIndex,
34 TableInitialization, Trap, VMOffsets, WasmType,
35};
36
37mod allocator;
38
39pub use allocator::*;
40
41#[repr(C)] pub(crate) struct Instance {
58 runtime_info: Arc<dyn ModuleRuntimeInfo>,
64
65 memories: PrimaryMap<DefinedMemoryIndex, Memory>,
70
71 tables: PrimaryMap<DefinedTableIndex, Table>,
76
77 dropped_elements: EntitySet<ElemIndex>,
80
81 dropped_data: EntitySet<DataIndex>,
84
85 host_state: Box<dyn Any + Send + Sync>,
90
91 index: usize,
97
98 vmctx: VMContext,
102}
103
104#[allow(clippy::cast_ptr_alignment)]
105impl Instance {
106 unsafe fn new(
111 req: InstanceAllocationRequest,
112 index: usize,
113 memories: PrimaryMap<DefinedMemoryIndex, Memory>,
114 tables: PrimaryMap<DefinedTableIndex, Table>,
115 ) -> InstanceHandle {
116 let layout = Self::alloc_layout(req.runtime_info.offsets());
118 let ptr = alloc::alloc(layout);
119 if ptr.is_null() {
120 alloc::handle_alloc_error(layout);
121 }
122 let ptr = ptr.cast::<Instance>();
123
124 let module = req.runtime_info.module();
125 let dropped_elements = EntitySet::with_capacity(module.passive_elements.len());
126 let dropped_data = EntitySet::with_capacity(module.passive_data_map.len());
127
128 ptr::write(
129 ptr,
130 Instance {
131 runtime_info: req.runtime_info.clone(),
132 index,
133 memories,
134 tables,
135 dropped_elements,
136 dropped_data,
137 host_state: req.host_state,
138 vmctx: VMContext {
139 _marker: std::marker::PhantomPinned,
140 },
141 },
142 );
143
144 (*ptr).initialize_vmctx(module, req.runtime_info.offsets(), req.store, req.imports);
145 InstanceHandle { instance: ptr }
146 }
147
148 unsafe fn vmctx_plus_offset<T>(&self, offset: u32) -> *const T {
151 (std::ptr::addr_of!(self.vmctx).cast::<u8>())
152 .add(usize::try_from(offset).unwrap())
153 .cast()
154 }
155
156 unsafe fn vmctx_plus_offset_mut<T>(&mut self, offset: u32) -> *mut T {
157 (std::ptr::addr_of_mut!(self.vmctx).cast::<u8>())
158 .add(usize::try_from(offset).unwrap())
159 .cast()
160 }
161
162 pub(crate) fn module(&self) -> &Arc<Module> {
163 self.runtime_info.module()
164 }
165
166 fn offsets(&self) -> &VMOffsets<HostPtr> {
167 self.runtime_info.offsets()
168 }
169
170 fn imported_function(&self, index: FuncIndex) -> &VMFunctionImport {
172 unsafe { &*self.vmctx_plus_offset(self.offsets().vmctx_vmfunction_import(index)) }
173 }
174
175 fn imported_table(&self, index: TableIndex) -> &VMTableImport {
177 unsafe { &*self.vmctx_plus_offset(self.offsets().vmctx_vmtable_import(index)) }
178 }
179
180 fn imported_memory(&self, index: MemoryIndex) -> &VMMemoryImport {
182 unsafe { &*self.vmctx_plus_offset(self.offsets().vmctx_vmmemory_import(index)) }
183 }
184
185 fn imported_global(&self, index: GlobalIndex) -> &VMGlobalImport {
187 unsafe { &*self.vmctx_plus_offset(self.offsets().vmctx_vmglobal_import(index)) }
188 }
189
190 #[allow(dead_code)]
192 fn table(&mut self, index: DefinedTableIndex) -> VMTableDefinition {
193 unsafe { *self.table_ptr(index) }
194 }
195
196 fn set_table(&mut self, index: DefinedTableIndex, table: VMTableDefinition) {
198 unsafe {
199 *self.table_ptr(index) = table;
200 }
201 }
202
203 fn table_ptr(&mut self, index: DefinedTableIndex) -> *mut VMTableDefinition {
205 unsafe { self.vmctx_plus_offset_mut(self.offsets().vmctx_vmtable_definition(index)) }
206 }
207
208 pub(crate) fn get_memory(&self, index: MemoryIndex) -> VMMemoryDefinition {
210 if let Some(defined_index) = self.module().defined_memory_index(index) {
211 self.memory(defined_index)
212 } else {
213 let import = self.imported_memory(index);
214 unsafe { VMMemoryDefinition::load(import.from) }
215 }
216 }
217
218 pub(crate) fn get_runtime_memory(&mut self, index: MemoryIndex) -> &mut Memory {
220 if let Some(defined_index) = self.module().defined_memory_index(index) {
221 unsafe { &mut *self.get_defined_memory(defined_index) }
222 } else {
223 let import = self.imported_memory(index);
224 let ctx = unsafe { &mut *import.vmctx };
225 unsafe { &mut *ctx.instance_mut().get_defined_memory(import.index) }
226 }
227 }
228
229 fn memory(&self, index: DefinedMemoryIndex) -> VMMemoryDefinition {
231 unsafe { VMMemoryDefinition::load(self.memory_ptr(index)) }
232 }
233
234 fn set_memory(&self, index: DefinedMemoryIndex, mem: VMMemoryDefinition) {
236 unsafe {
237 *self.memory_ptr(index) = mem;
238 }
239 }
240
241 fn memory_ptr(&self, index: DefinedMemoryIndex) -> *mut VMMemoryDefinition {
243 unsafe { *self.vmctx_plus_offset(self.offsets().vmctx_vmmemory_pointer(index)) }
244 }
245
246 fn global(&mut self, index: DefinedGlobalIndex) -> &VMGlobalDefinition {
248 unsafe { &*self.global_ptr(index) }
249 }
250
251 fn global_ptr(&mut self, index: DefinedGlobalIndex) -> *mut VMGlobalDefinition {
253 unsafe { self.vmctx_plus_offset_mut(self.offsets().vmctx_vmglobal_definition(index)) }
254 }
255
256 pub(crate) fn defined_or_imported_global_ptr(
261 &mut self,
262 index: GlobalIndex,
263 ) -> *mut VMGlobalDefinition {
264 if let Some(index) = self.module().defined_global_index(index) {
265 self.global_ptr(index)
266 } else {
267 self.imported_global(index).from
268 }
269 }
270
271 pub fn runtime_limits(&mut self) -> *mut *const VMRuntimeLimits {
273 unsafe { self.vmctx_plus_offset_mut(self.offsets().vmctx_runtime_limits()) }
274 }
275
276 pub fn epoch_ptr(&mut self) -> *mut *const AtomicU64 {
278 unsafe { self.vmctx_plus_offset_mut(self.offsets().vmctx_epoch_ptr()) }
279 }
280
281 pub fn externref_activations_table(&mut self) -> *mut *mut VMExternRefActivationsTable {
283 unsafe { self.vmctx_plus_offset_mut(self.offsets().vmctx_externref_activations_table()) }
284 }
285
286 #[inline]
297 pub fn store(&self) -> *mut dyn Store {
298 let ptr =
299 unsafe { *self.vmctx_plus_offset::<*mut dyn Store>(self.offsets().vmctx_store()) };
300 assert!(!ptr.is_null());
301 ptr
302 }
303
304 pub unsafe fn set_store(&mut self, store: Option<*mut dyn Store>) {
305 if let Some(store) = store {
306 *self.vmctx_plus_offset_mut(self.offsets().vmctx_store()) = store;
307 *self.runtime_limits() = (*store).vmruntime_limits();
308 *self.epoch_ptr() = (*store).epoch_ptr();
309 *self.externref_activations_table() = (*store).externref_activations_table().0;
310 } else {
311 assert_eq!(
312 mem::size_of::<*mut dyn Store>(),
313 mem::size_of::<[*mut (); 2]>()
314 );
315 *self.vmctx_plus_offset_mut::<[*mut (); 2]>(self.offsets().vmctx_store()) =
316 [ptr::null_mut(), ptr::null_mut()];
317
318 *self.runtime_limits() = ptr::null_mut();
319 *self.epoch_ptr() = ptr::null_mut();
320 *self.externref_activations_table() = ptr::null_mut();
321 }
322 }
323
324 pub(crate) unsafe fn set_callee(&mut self, callee: Option<NonNull<VMFunctionBody>>) {
325 *self.vmctx_plus_offset_mut(self.offsets().vmctx_callee()) =
326 callee.map_or(ptr::null_mut(), |c| c.as_ptr());
327 }
328
329 #[inline]
331 pub fn vmctx(&self) -> &VMContext {
332 &self.vmctx
333 }
334
335 #[inline]
337 pub fn vmctx_ptr(&self) -> *mut VMContext {
338 self.vmctx() as *const VMContext as *mut VMContext
339 }
340
341 fn get_exported_func(&mut self, index: FuncIndex) -> ExportFunction {
342 let anyfunc = self.get_caller_checked_anyfunc(index).unwrap();
343 let anyfunc = NonNull::new(anyfunc as *const VMCallerCheckedFuncRef as *mut _).unwrap();
344 ExportFunction { anyfunc }
345 }
346
347 fn get_exported_table(&mut self, index: TableIndex) -> ExportTable {
348 let (definition, vmctx) = if let Some(def_index) = self.module().defined_table_index(index)
349 {
350 (self.table_ptr(def_index), self.vmctx_ptr())
351 } else {
352 let import = self.imported_table(index);
353 (import.from, import.vmctx)
354 };
355 ExportTable {
356 definition,
357 vmctx,
358 table: self.module().table_plans[index].clone(),
359 }
360 }
361
362 fn get_exported_memory(&mut self, index: MemoryIndex) -> ExportMemory {
363 let (definition, vmctx, def_index) =
364 if let Some(def_index) = self.module().defined_memory_index(index) {
365 (self.memory_ptr(def_index), self.vmctx_ptr(), def_index)
366 } else {
367 let import = self.imported_memory(index);
368 (import.from, import.vmctx, import.index)
369 };
370 ExportMemory {
371 definition,
372 vmctx,
373 memory: self.module().memory_plans[index].clone(),
374 index: def_index,
375 }
376 }
377
378 fn get_exported_global(&mut self, index: GlobalIndex) -> ExportGlobal {
379 ExportGlobal {
380 definition: if let Some(def_index) = self.module().defined_global_index(index) {
381 self.global_ptr(def_index)
382 } else {
383 self.imported_global(index).from
384 },
385 global: self.module().globals[index],
386 }
387 }
388
389 pub fn exports(&self) -> indexmap::map::Iter<String, EntityIndex> {
395 self.module().exports.iter()
396 }
397
398 #[inline]
400 pub fn host_state(&self) -> &dyn Any {
401 &*self.host_state
402 }
403
404 #[inline]
406 pub(crate) fn vmctx_offset() -> isize {
407 offset_of!(Self, vmctx) as isize
408 }
409
410 unsafe fn table_index(&mut self, table: &VMTableDefinition) -> DefinedTableIndex {
412 let index = DefinedTableIndex::new(
413 usize::try_from(
414 (table as *const VMTableDefinition)
415 .offset_from(self.table_ptr(DefinedTableIndex::new(0))),
416 )
417 .unwrap(),
418 );
419 assert!(index.index() < self.tables.len());
420 index
421 }
422
423 pub(crate) fn memory_grow(
429 &mut self,
430 index: MemoryIndex,
431 delta: u64,
432 ) -> Result<Option<usize>, Error> {
433 let (idx, instance) = if let Some(idx) = self.module().defined_memory_index(index) {
434 (idx, self)
435 } else {
436 let import = self.imported_memory(index);
437 unsafe {
438 let foreign_instance = (*import.vmctx).instance_mut();
439 (import.index, foreign_instance)
440 }
441 };
442 let store = unsafe { &mut *instance.store() };
443 let memory = &mut instance.memories[idx];
444
445 let result = unsafe { memory.grow(delta, Some(store)) };
446
447 if memory.as_shared_memory().is_none() {
450 let vmmemory = memory.vmmemory();
451 instance.set_memory(idx, vmmemory);
452 }
453
454 result
455 }
456
457 pub(crate) fn table_element_type(&mut self, table_index: TableIndex) -> TableElementType {
458 unsafe { (*self.get_table(table_index)).element_type() }
459 }
460
461 pub(crate) fn table_grow(
467 &mut self,
468 table_index: TableIndex,
469 delta: u32,
470 init_value: TableElement,
471 ) -> Result<Option<u32>, Error> {
472 let (defined_table_index, instance) =
473 self.get_defined_table_index_and_instance(table_index);
474 instance.defined_table_grow(defined_table_index, delta, init_value)
475 }
476
477 fn defined_table_grow(
478 &mut self,
479 table_index: DefinedTableIndex,
480 delta: u32,
481 init_value: TableElement,
482 ) -> Result<Option<u32>, Error> {
483 let store = unsafe { &mut *self.store() };
484 let table = self
485 .tables
486 .get_mut(table_index)
487 .unwrap_or_else(|| panic!("no table for index {}", table_index.index()));
488
489 let result = unsafe { table.grow(delta, init_value, store) };
490
491 let element = self.tables[table_index].vmtable();
494 self.set_table(table_index, element);
495
496 result
497 }
498
499 fn alloc_layout(offsets: &VMOffsets<HostPtr>) -> Layout {
500 let size = mem::size_of::<Self>()
501 .checked_add(usize::try_from(offsets.size_of_vmctx()).unwrap())
502 .unwrap();
503 let align = mem::align_of::<Self>();
504 Layout::from_size_align(size, align).unwrap()
505 }
506
507 fn construct_anyfunc(
517 &mut self,
518 index: FuncIndex,
519 sig: SignatureIndex,
520 into: *mut VMCallerCheckedFuncRef,
521 ) {
522 let type_index = unsafe {
523 let base: *const VMSharedSignatureIndex =
524 *self.vmctx_plus_offset_mut(self.offsets().vmctx_signature_ids_array());
525 *base.add(sig.index())
526 };
527
528 let (func_ptr, vmctx) = if let Some(def_index) = self.module().defined_func_index(index) {
529 (
530 self.runtime_info.function(def_index),
531 VMOpaqueContext::from_vmcontext(self.vmctx_ptr()),
532 )
533 } else {
534 let import = self.imported_function(index);
535 (import.body.as_ptr(), import.vmctx)
536 };
537
538 unsafe {
541 *into = VMCallerCheckedFuncRef {
542 vmctx,
543 type_index,
544 func_ptr: NonNull::new(func_ptr).expect("Non-null function pointer"),
545 };
546 }
547 }
548
549 pub(crate) fn get_caller_checked_anyfunc(
556 &mut self,
557 index: FuncIndex,
558 ) -> Option<*mut VMCallerCheckedFuncRef> {
559 if index == FuncIndex::reserved_value() {
560 return None;
561 }
562
563 unsafe {
566 let func = &self.module().functions[index];
591 let sig = func.signature;
592 let anyfunc: *mut VMCallerCheckedFuncRef = self
593 .vmctx_plus_offset_mut::<VMCallerCheckedFuncRef>(
594 self.offsets().vmctx_anyfunc(func.anyfunc),
595 );
596 self.construct_anyfunc(index, sig, anyfunc);
597
598 Some(anyfunc)
599 }
600 }
601
602 pub(crate) fn table_init(
610 &mut self,
611 table_index: TableIndex,
612 elem_index: ElemIndex,
613 dst: u32,
614 src: u32,
615 len: u32,
616 ) -> Result<(), Trap> {
617 let module = self.module().clone();
621
622 let elements = match module.passive_elements_map.get(&elem_index) {
623 Some(index) if !self.dropped_elements.contains(elem_index) => {
624 module.passive_elements[*index].as_ref()
625 }
626 _ => &[],
627 };
628 self.table_init_segment(table_index, elements, dst, src, len)
629 }
630
631 pub(crate) fn table_init_segment(
632 &mut self,
633 table_index: TableIndex,
634 elements: &[FuncIndex],
635 dst: u32,
636 src: u32,
637 len: u32,
638 ) -> Result<(), Trap> {
639 let table = unsafe { &mut *self.get_table(table_index) };
642
643 let elements = match elements
644 .get(usize::try_from(src).unwrap()..)
645 .and_then(|s| s.get(..usize::try_from(len).unwrap()))
646 {
647 Some(elements) => elements,
648 None => return Err(Trap::TableOutOfBounds),
649 };
650
651 match table.element_type() {
652 TableElementType::Func => {
653 table.init_funcs(
654 dst,
655 elements.iter().map(|idx| {
656 self.get_caller_checked_anyfunc(*idx)
657 .unwrap_or(std::ptr::null_mut())
658 }),
659 )?;
660 }
661
662 TableElementType::Extern => {
663 debug_assert!(elements.iter().all(|e| *e == FuncIndex::reserved_value()));
664 table.fill(dst, TableElement::ExternRef(None), len)?;
665 }
666 }
667 Ok(())
668 }
669
670 pub(crate) fn elem_drop(&mut self, elem_index: ElemIndex) {
672 self.dropped_elements.insert(elem_index);
675
676 }
679
680 pub(crate) fn get_defined_memory(&mut self, index: DefinedMemoryIndex) -> *mut Memory {
682 ptr::addr_of_mut!(self.memories[index])
683 }
684
685 pub(crate) fn memory_copy(
692 &mut self,
693 dst_index: MemoryIndex,
694 dst: u64,
695 src_index: MemoryIndex,
696 src: u64,
697 len: u64,
698 ) -> Result<(), Trap> {
699 let src_mem = self.get_memory(src_index);
702 let dst_mem = self.get_memory(dst_index);
703
704 let src = self.validate_inbounds(src_mem.current_length(), src, len)?;
705 let dst = self.validate_inbounds(dst_mem.current_length(), dst, len)?;
706
707 unsafe {
710 let dst = dst_mem.base.add(dst);
711 let src = src_mem.base.add(src);
712 ptr::copy(src, dst, len as usize);
715 }
716
717 Ok(())
718 }
719
720 fn validate_inbounds(&self, max: usize, ptr: u64, len: u64) -> Result<usize, Trap> {
721 let oob = || Trap::MemoryOutOfBounds;
722 let end = ptr
723 .checked_add(len)
724 .and_then(|i| usize::try_from(i).ok())
725 .ok_or_else(oob)?;
726 if end > max {
727 Err(oob())
728 } else {
729 Ok(ptr as usize)
730 }
731 }
732
733 pub(crate) fn memory_fill(
739 &mut self,
740 memory_index: MemoryIndex,
741 dst: u64,
742 val: u8,
743 len: u64,
744 ) -> Result<(), Trap> {
745 let memory = self.get_memory(memory_index);
746 let dst = self.validate_inbounds(memory.current_length(), dst, len)?;
747
748 unsafe {
751 let dst = memory.base.add(dst);
752 ptr::write_bytes(dst, val, len as usize);
755 }
756
757 Ok(())
758 }
759
760 pub(crate) fn memory_init(
768 &mut self,
769 memory_index: MemoryIndex,
770 data_index: DataIndex,
771 dst: u64,
772 src: u32,
773 len: u32,
774 ) -> Result<(), Trap> {
775 let range = match self.module().passive_data_map.get(&data_index).cloned() {
776 Some(range) if !self.dropped_data.contains(data_index) => range,
777 _ => 0..0,
778 };
779 self.memory_init_segment(memory_index, range, dst, src, len)
780 }
781
782 pub(crate) fn wasm_data(&self, range: Range<u32>) -> &[u8] {
783 &self.runtime_info.wasm_data()[range.start as usize..range.end as usize]
784 }
785
786 pub(crate) fn memory_init_segment(
787 &mut self,
788 memory_index: MemoryIndex,
789 range: Range<u32>,
790 dst: u64,
791 src: u32,
792 len: u32,
793 ) -> Result<(), Trap> {
794 let memory = self.get_memory(memory_index);
797 let data = self.wasm_data(range);
798 let dst = self.validate_inbounds(memory.current_length(), dst, len.into())?;
799 let src = self.validate_inbounds(data.len(), src.into(), len.into())?;
800 let len = len as usize;
801
802 unsafe {
803 let src_start = data.as_ptr().add(src);
804 let dst_start = memory.base.add(dst);
805 ptr::copy_nonoverlapping(src_start, dst_start, len);
808 }
809
810 Ok(())
811 }
812
813 pub(crate) fn data_drop(&mut self, data_index: DataIndex) {
815 self.dropped_data.insert(data_index);
816
817 }
820
821 pub(crate) fn get_table_with_lazy_init(
833 &mut self,
834 table_index: TableIndex,
835 range: impl Iterator<Item = u32>,
836 ) -> *mut Table {
837 let (idx, instance) = self.get_defined_table_index_and_instance(table_index);
838 let elt_ty = instance.tables[idx].element_type();
839
840 if elt_ty == TableElementType::Func {
841 for i in range {
842 let value = match instance.tables[idx].get(i) {
843 Some(value) => value,
844 None => {
845 break;
849 }
850 };
851 if value.is_uninit() {
852 let table_init = match &instance.module().table_initialization {
853 TableInitialization::FuncTable { tables, .. } => tables,
859 _ => break,
860 }
861 .get(table_index);
862
863 let func_index =
872 table_init.and_then(|indices| indices.get(i as usize).cloned());
873 let anyfunc = func_index
874 .and_then(|func_index| instance.get_caller_checked_anyfunc(func_index))
875 .unwrap_or(std::ptr::null_mut());
876
877 let value = TableElement::FuncRef(anyfunc);
878
879 instance.tables[idx]
880 .set(i, value)
881 .expect("Table type should match and index should be in-bounds");
882 }
883 }
884 }
885
886 ptr::addr_of_mut!(instance.tables[idx])
887 }
888
889 pub(crate) fn get_table(&mut self, table_index: TableIndex) -> *mut Table {
892 let (idx, instance) = self.get_defined_table_index_and_instance(table_index);
893 ptr::addr_of_mut!(instance.tables[idx])
894 }
895
896 pub(crate) fn get_defined_table(&mut self, index: DefinedTableIndex) -> *mut Table {
898 ptr::addr_of_mut!(self.tables[index])
899 }
900
901 pub(crate) fn get_defined_table_index_and_instance(
902 &mut self,
903 index: TableIndex,
904 ) -> (DefinedTableIndex, &mut Instance) {
905 if let Some(defined_table_index) = self.module().defined_table_index(index) {
906 (defined_table_index, self)
907 } else {
908 let import = self.imported_table(index);
909 unsafe {
910 let foreign_instance = (*import.vmctx).instance_mut();
911 let foreign_table_def = &*import.from;
912 let foreign_table_index = foreign_instance.table_index(foreign_table_def);
913 (foreign_table_index, foreign_instance)
914 }
915 }
916 }
917
918 unsafe fn initialize_vmctx(
924 &mut self,
925 module: &Module,
926 offsets: &VMOffsets<HostPtr>,
927 store: StorePtr,
928 imports: Imports,
929 ) {
930 assert!(std::ptr::eq(module, self.module().as_ref()));
931
932 *self.vmctx_plus_offset_mut(offsets.vmctx_magic()) = VMCONTEXT_MAGIC;
933 self.set_callee(None);
934 self.set_store(store.as_raw());
935
936 let signatures = self.runtime_info.signature_ids();
938 *self.vmctx_plus_offset_mut(offsets.vmctx_signature_ids_array()) = signatures.as_ptr();
939
940 *self.vmctx_plus_offset_mut(offsets.vmctx_builtin_functions()) =
942 &VMBuiltinFunctionsArray::INIT;
943
944 debug_assert_eq!(imports.functions.len(), module.num_imported_funcs);
946 ptr::copy_nonoverlapping(
947 imports.functions.as_ptr(),
948 self.vmctx_plus_offset_mut(offsets.vmctx_imported_functions_begin()),
949 imports.functions.len(),
950 );
951 debug_assert_eq!(imports.tables.len(), module.num_imported_tables);
952 ptr::copy_nonoverlapping(
953 imports.tables.as_ptr(),
954 self.vmctx_plus_offset_mut(offsets.vmctx_imported_tables_begin()),
955 imports.tables.len(),
956 );
957 debug_assert_eq!(imports.memories.len(), module.num_imported_memories);
958 ptr::copy_nonoverlapping(
959 imports.memories.as_ptr(),
960 self.vmctx_plus_offset_mut(offsets.vmctx_imported_memories_begin()),
961 imports.memories.len(),
962 );
963 debug_assert_eq!(imports.globals.len(), module.num_imported_globals);
964 ptr::copy_nonoverlapping(
965 imports.globals.as_ptr(),
966 self.vmctx_plus_offset_mut(offsets.vmctx_imported_globals_begin()),
967 imports.globals.len(),
968 );
969
970 let mut ptr = self.vmctx_plus_offset_mut(offsets.vmctx_tables_begin());
978 for i in 0..module.table_plans.len() - module.num_imported_tables {
979 ptr::write(ptr, self.tables[DefinedTableIndex::new(i)].vmtable());
980 ptr = ptr.add(1);
981 }
982
983 let mut ptr = self.vmctx_plus_offset_mut(offsets.vmctx_memories_begin());
989 let mut owned_ptr = self.vmctx_plus_offset_mut(offsets.vmctx_owned_memories_begin());
990 for i in 0..module.memory_plans.len() - module.num_imported_memories {
991 let defined_memory_index = DefinedMemoryIndex::new(i);
992 let memory_index = module.memory_index(defined_memory_index);
993 if module.memory_plans[memory_index].memory.shared {
994 let def_ptr = self.memories[defined_memory_index]
995 .as_shared_memory()
996 .unwrap()
997 .vmmemory_ptr();
998 ptr::write(ptr, def_ptr.cast_mut());
999 } else {
1000 ptr::write(owned_ptr, self.memories[defined_memory_index].vmmemory());
1001 ptr::write(ptr, owned_ptr);
1002 owned_ptr = owned_ptr.add(1);
1003 }
1004 ptr = ptr.add(1);
1005 }
1006
1007 self.initialize_vmctx_globals(module);
1009 }
1010
1011 unsafe fn initialize_vmctx_globals(&mut self, module: &Module) {
1012 let num_imports = module.num_imported_globals;
1013 for (index, global) in module.globals.iter().skip(num_imports) {
1014 let def_index = module.defined_global_index(index).unwrap();
1015 let to = self.global_ptr(def_index);
1016
1017 ptr::write(to, VMGlobalDefinition::new());
1019
1020 match global.initializer {
1021 GlobalInit::I32Const(x) => *(*to).as_i32_mut() = x,
1022 GlobalInit::I64Const(x) => *(*to).as_i64_mut() = x,
1023 GlobalInit::F32Const(x) => *(*to).as_f32_bits_mut() = x,
1024 GlobalInit::F64Const(x) => *(*to).as_f64_bits_mut() = x,
1025 GlobalInit::V128Const(x) => *(*to).as_u128_mut() = x,
1026 GlobalInit::GetGlobal(x) => {
1027 let from = if let Some(def_x) = module.defined_global_index(x) {
1028 self.global(def_x)
1029 } else {
1030 &*self.imported_global(x).from
1031 };
1032 match global.wasm_ty {
1036 WasmType::ExternRef => {
1037 *(*to).as_externref_mut() = from.as_externref().clone()
1038 }
1039 _ => ptr::copy_nonoverlapping(from, to, 1),
1040 }
1041 }
1042 GlobalInit::RefFunc(f) => {
1043 *(*to).as_anyfunc_mut() = self.get_caller_checked_anyfunc(f).unwrap()
1044 as *const VMCallerCheckedFuncRef;
1045 }
1046 GlobalInit::RefNullConst => match global.wasm_ty {
1047 WasmType::FuncRef => {}
1049 WasmType::ExternRef => {}
1050 ty => panic!("unsupported reference type for global: {:?}", ty),
1051 },
1052 GlobalInit::Import => panic!("locally-defined global initialized as import"),
1053 }
1054 }
1055 }
1056
1057 fn wasm_fault(&self, addr: usize) -> Option<WasmFault> {
1058 let mut fault = None;
1059 for (_, memory) in self.memories.iter() {
1060 let accessible = memory.wasm_accessible();
1061 if accessible.start <= addr && addr < accessible.end {
1062 assert!(fault.is_none());
1065 fault = Some(WasmFault {
1066 memory_size: memory.byte_size(),
1067 wasm_address: u64::try_from(addr - accessible.start).unwrap(),
1068 });
1069 }
1070 }
1071 fault
1072 }
1073}
1074
1075impl Drop for Instance {
1076 fn drop(&mut self) {
1077 let module = self.module().clone();
1079 for (idx, global) in module.globals.iter() {
1080 let idx = match module.defined_global_index(idx) {
1081 Some(idx) => idx,
1082 None => continue,
1083 };
1084 match global.wasm_ty {
1085 WasmType::ExternRef => {}
1087 _ => continue,
1088 }
1089 unsafe {
1090 drop((*self.global_ptr(idx)).as_externref_mut().take());
1091 }
1092 }
1093 }
1094}
1095
1096#[derive(Hash, PartialEq, Eq)]
1098pub struct InstanceHandle {
1099 instance: *mut Instance,
1100}
1101
1102unsafe impl Send for InstanceHandle {}
1105unsafe impl Sync for InstanceHandle {}
1106
1107fn _assert_send_sync() {
1108 fn _assert<T: Send + Sync>() {}
1109 _assert::<Instance>();
1110}
1111
1112impl InstanceHandle {
1113 #[inline]
1120 pub unsafe fn from_vmctx(vmctx: *mut VMContext) -> Self {
1121 let instance = (&mut *vmctx).instance();
1122 Self {
1123 instance: instance as *const Instance as *mut Instance,
1124 }
1125 }
1126
1127 pub fn vmctx(&self) -> &VMContext {
1129 self.instance().vmctx()
1130 }
1131
1132 #[inline]
1134 pub fn vmctx_ptr(&self) -> *mut VMContext {
1135 self.instance().vmctx_ptr()
1136 }
1137
1138 pub fn module(&self) -> &Arc<Module> {
1140 self.instance().module()
1141 }
1142
1143 pub fn get_exported_func(&mut self, export: FuncIndex) -> ExportFunction {
1145 self.instance_mut().get_exported_func(export)
1146 }
1147
1148 pub fn get_exported_global(&mut self, export: GlobalIndex) -> ExportGlobal {
1150 self.instance_mut().get_exported_global(export)
1151 }
1152
1153 pub fn get_exported_memory(&mut self, export: MemoryIndex) -> ExportMemory {
1155 self.instance_mut().get_exported_memory(export)
1156 }
1157
1158 pub fn get_exported_table(&mut self, export: TableIndex) -> ExportTable {
1160 self.instance_mut().get_exported_table(export)
1161 }
1162
1163 pub fn get_export_by_index(&mut self, export: EntityIndex) -> Export {
1165 match export {
1166 EntityIndex::Function(i) => Export::Function(self.get_exported_func(i)),
1167 EntityIndex::Global(i) => Export::Global(self.get_exported_global(i)),
1168 EntityIndex::Table(i) => Export::Table(self.get_exported_table(i)),
1169 EntityIndex::Memory(i) => Export::Memory(self.get_exported_memory(i)),
1170 }
1171 }
1172
1173 pub fn exports(&self) -> indexmap::map::Iter<String, EntityIndex> {
1179 self.instance().exports()
1180 }
1181
1182 pub fn host_state(&self) -> &dyn Any {
1184 self.instance().host_state()
1185 }
1186
1187 pub fn get_defined_memory(&mut self, index: DefinedMemoryIndex) -> *mut Memory {
1189 self.instance_mut().get_defined_memory(index)
1190 }
1191
1192 pub unsafe fn table_index(&mut self, table: &VMTableDefinition) -> DefinedTableIndex {
1194 self.instance_mut().table_index(table)
1195 }
1196
1197 pub fn get_defined_table(&mut self, index: DefinedTableIndex) -> *mut Table {
1199 self.instance_mut().get_defined_table(index)
1200 }
1201
1202 pub fn get_defined_table_with_lazy_init(
1205 &mut self,
1206 index: DefinedTableIndex,
1207 range: impl Iterator<Item = u32>,
1208 ) -> *mut Table {
1209 let index = self.instance().module().table_index(index);
1210 self.instance_mut().get_table_with_lazy_init(index, range)
1211 }
1212
1213 #[inline]
1215 pub(crate) fn instance(&self) -> &Instance {
1216 unsafe { &*(self.instance as *const Instance) }
1217 }
1218
1219 pub(crate) fn instance_mut(&mut self) -> &mut Instance {
1220 unsafe { &mut *self.instance }
1221 }
1222
1223 #[inline]
1225 pub fn store(&self) -> *mut dyn Store {
1226 self.instance().store()
1227 }
1228
1229 pub unsafe fn set_store(&mut self, store: *mut dyn Store) {
1234 self.instance_mut().set_store(Some(store));
1235 }
1236
1237 #[inline]
1244 pub unsafe fn clone(&self) -> InstanceHandle {
1245 InstanceHandle {
1246 instance: self.instance,
1247 }
1248 }
1249
1250 pub fn initialize(&mut self, module: &Module, is_bulk_memory: bool) -> Result<()> {
1257 allocator::initialize_instance(self.instance_mut(), module, is_bulk_memory)
1258 }
1259
1260 pub fn wasm_fault(&self, addr: usize) -> Option<WasmFault> {
1268 self.instance().wasm_fault(addr)
1269 }
1270}