1use crate::externref::VMExternRef;
58use crate::table::{Table, TableElementType};
59use crate::vmcontext::{VMCallerCheckedFuncRef, VMContext};
60use crate::TrapReason;
61use anyhow::Result;
62use std::mem;
63use std::ptr::{self, NonNull};
64use std::time::{Duration, Instant};
65use wasmtime_environ::{
66 DataIndex, ElemIndex, FuncIndex, GlobalIndex, MemoryIndex, TableIndex, Trap,
67};
68
69pub mod trampolines {
76 use crate::{TrapReason, VMContext};
77
78 macro_rules! libcall {
79 (
80 $(
81 $( #[$attr:meta] )*
82 $name:ident( vmctx: vmctx $(, $pname:ident: $param:ident )* ) $( -> $result:ident )?;
83 )*
84 ) => {paste::paste! {
85 $(
86 extern "C" {
92 #[allow(missing_docs)]
93 #[allow(improper_ctypes)]
94 pub fn $name(
95 vmctx: *mut VMContext,
96 $( $pname: libcall!(@ty $param), )*
97 ) $(-> libcall!(@ty $result))?;
98 }
99
100 wasm_to_libcall_trampoline!($name ; [<impl_ $name>]);
101
102 #[cfg_attr(target_arch = "s390x", no_mangle)]
113 unsafe extern "C" fn [<impl_ $name>](
114 vmctx : *mut VMContext,
115 $( $pname : libcall!(@ty $param), )*
116 ) $( -> libcall!(@ty $result))? {
117 let result = std::panic::catch_unwind(|| {
118 super::$name(vmctx, $($pname),*)
119 });
120 match result {
121 Ok(ret) => LibcallResult::convert(ret),
122 Err(panic) => crate::traphandlers::resume_panic(panic),
123 }
124 }
125
126 #[allow(non_upper_case_globals)]
130 #[used]
131 static [<impl_ $name _ref>]: unsafe extern "C" fn(
132 *mut VMContext,
133 $( $pname : libcall!(@ty $param), )*
134 ) $( -> libcall!(@ty $result))? = [<impl_ $name>];
135
136 )*
137 }};
138
139 (@ty i32) => (u32);
140 (@ty i64) => (u64);
141 (@ty reference) => (*mut u8);
142 (@ty pointer) => (*mut u8);
143 (@ty vmctx) => (*mut VMContext);
144 }
145
146 wasmtime_environ::foreach_builtin_function!(libcall);
147
148 trait LibcallResult {
154 type Abi;
155 unsafe fn convert(self) -> Self::Abi;
156 }
157
158 impl LibcallResult for () {
159 type Abi = ();
160 unsafe fn convert(self) {}
161 }
162
163 impl<T, E> LibcallResult for Result<T, E>
164 where
165 E: Into<TrapReason>,
166 {
167 type Abi = T;
168 unsafe fn convert(self) -> T {
169 match self {
170 Ok(t) => t,
171 Err(e) => crate::traphandlers::raise_trap(e.into()),
172 }
173 }
174 }
175
176 impl LibcallResult for *mut u8 {
177 type Abi = *mut u8;
178 unsafe fn convert(self) -> *mut u8 {
179 self
180 }
181 }
182}
183
184unsafe fn memory32_grow(
185 vmctx: *mut VMContext,
186 delta: u64,
187 memory_index: u32,
188) -> Result<*mut u8, TrapReason> {
189 let instance = (*vmctx).instance_mut();
190 let memory_index = MemoryIndex::from_u32(memory_index);
191 let result =
192 match instance
193 .memory_grow(memory_index, delta)
194 .map_err(|error| TrapReason::User {
195 error,
196 needs_backtrace: true,
197 })? {
198 Some(size_in_bytes) => size_in_bytes / (wasmtime_environ::WASM_PAGE_SIZE as usize),
199 None => usize::max_value(),
200 };
201 Ok(result as *mut _)
202}
203
204unsafe fn table_grow(
209 vmctx: *mut VMContext,
210 table_index: u32,
211 delta: u32,
212 init_value: *mut u8,
215) -> Result<u32> {
216 let instance = (*vmctx).instance_mut();
217 let table_index = TableIndex::from_u32(table_index);
218 let element = match instance.table_element_type(table_index) {
219 TableElementType::Func => (init_value as *mut VMCallerCheckedFuncRef).into(),
220 TableElementType::Extern => {
221 let init_value = if init_value.is_null() {
222 None
223 } else {
224 Some(VMExternRef::clone_from_raw(init_value))
225 };
226 init_value.into()
227 }
228 };
229 Ok(match instance.table_grow(table_index, delta, element)? {
230 Some(r) => r,
231 None => -1_i32 as u32,
232 })
233}
234
235use table_grow as table_grow_funcref;
236use table_grow as table_grow_externref;
237
238unsafe fn table_fill(
240 vmctx: *mut VMContext,
241 table_index: u32,
242 dst: u32,
243 val: *mut u8,
246 len: u32,
247) -> Result<(), Trap> {
248 let instance = (*vmctx).instance_mut();
249 let table_index = TableIndex::from_u32(table_index);
250 let table = &mut *instance.get_table(table_index);
251 match table.element_type() {
252 TableElementType::Func => {
253 let val = val as *mut VMCallerCheckedFuncRef;
254 table.fill(dst, val.into(), len)
255 }
256 TableElementType::Extern => {
257 let val = if val.is_null() {
258 None
259 } else {
260 Some(VMExternRef::clone_from_raw(val))
261 };
262 table.fill(dst, val.into(), len)
263 }
264 }
265}
266
267use table_fill as table_fill_funcref;
268use table_fill as table_fill_externref;
269
270unsafe fn table_copy(
272 vmctx: *mut VMContext,
273 dst_table_index: u32,
274 src_table_index: u32,
275 dst: u32,
276 src: u32,
277 len: u32,
278) -> Result<(), Trap> {
279 let dst_table_index = TableIndex::from_u32(dst_table_index);
280 let src_table_index = TableIndex::from_u32(src_table_index);
281 let instance = (*vmctx).instance_mut();
282 let dst_table = instance.get_table(dst_table_index);
283 let src_range = src..(src.checked_add(len).unwrap_or(u32::MAX));
285 let src_table = instance.get_table_with_lazy_init(src_table_index, src_range);
286 Table::copy(dst_table, src_table, dst, src, len)
287}
288
289unsafe fn table_init(
291 vmctx: *mut VMContext,
292 table_index: u32,
293 elem_index: u32,
294 dst: u32,
295 src: u32,
296 len: u32,
297) -> Result<(), Trap> {
298 let table_index = TableIndex::from_u32(table_index);
299 let elem_index = ElemIndex::from_u32(elem_index);
300 let instance = (*vmctx).instance_mut();
301 instance.table_init(table_index, elem_index, dst, src, len)
302}
303
304unsafe fn elem_drop(vmctx: *mut VMContext, elem_index: u32) {
306 let elem_index = ElemIndex::from_u32(elem_index);
307 let instance = (*vmctx).instance_mut();
308 instance.elem_drop(elem_index);
309}
310
311unsafe fn memory_copy(
313 vmctx: *mut VMContext,
314 dst_index: u32,
315 dst: u64,
316 src_index: u32,
317 src: u64,
318 len: u64,
319) -> Result<(), Trap> {
320 let src_index = MemoryIndex::from_u32(src_index);
321 let dst_index = MemoryIndex::from_u32(dst_index);
322 let instance = (*vmctx).instance_mut();
323 instance.memory_copy(dst_index, dst, src_index, src, len)
324}
325
326unsafe fn memory_fill(
328 vmctx: *mut VMContext,
329 memory_index: u32,
330 dst: u64,
331 val: u32,
332 len: u64,
333) -> Result<(), Trap> {
334 let memory_index = MemoryIndex::from_u32(memory_index);
335 let instance = (*vmctx).instance_mut();
336 instance.memory_fill(memory_index, dst, val as u8, len)
337}
338
339unsafe fn memory_init(
341 vmctx: *mut VMContext,
342 memory_index: u32,
343 data_index: u32,
344 dst: u64,
345 src: u32,
346 len: u32,
347) -> Result<(), Trap> {
348 let memory_index = MemoryIndex::from_u32(memory_index);
349 let data_index = DataIndex::from_u32(data_index);
350 let instance = (*vmctx).instance_mut();
351 instance.memory_init(memory_index, data_index, dst, src, len)
352}
353
354unsafe fn ref_func(vmctx: *mut VMContext, func_index: u32) -> *mut u8 {
356 let instance = (*vmctx).instance_mut();
357 let anyfunc = instance
358 .get_caller_checked_anyfunc(FuncIndex::from_u32(func_index))
359 .expect("ref_func: caller_checked_anyfunc should always be available for given func index");
360 anyfunc as *mut _
361}
362
363unsafe fn data_drop(vmctx: *mut VMContext, data_index: u32) {
365 let data_index = DataIndex::from_u32(data_index);
366 let instance = (*vmctx).instance_mut();
367 instance.data_drop(data_index)
368}
369
370unsafe fn table_get_lazy_init_funcref(
372 vmctx: *mut VMContext,
373 table_index: u32,
374 index: u32,
375) -> *mut u8 {
376 let instance = (*vmctx).instance_mut();
377 let table_index = TableIndex::from_u32(table_index);
378 let table = instance.get_table_with_lazy_init(table_index, std::iter::once(index));
379 let elem = (*table)
380 .get(index)
381 .expect("table access already bounds-checked");
382
383 elem.into_ref_asserting_initialized() as *mut _
384}
385
386unsafe fn drop_externref(_vmctx: *mut VMContext, externref: *mut u8) {
388 let externref = externref as *mut crate::externref::VMExternData;
389 let externref = NonNull::new(externref).unwrap();
390 crate::externref::VMExternData::drop_and_dealloc(externref);
391}
392
393unsafe fn activations_table_insert_with_gc(vmctx: *mut VMContext, externref: *mut u8) {
396 let externref = VMExternRef::clone_from_raw(externref);
397 let instance = (*vmctx).instance();
398 let (activations_table, module_info_lookup) = (*instance.store()).externref_activations_table();
399
400 activations_table.insert_without_gc(externref.clone());
408
409 activations_table.insert_with_gc(externref, module_info_lookup);
410}
411
412unsafe fn externref_global_get(vmctx: *mut VMContext, index: u32) -> *mut u8 {
414 let index = GlobalIndex::from_u32(index);
415 let instance = (*vmctx).instance_mut();
416 let global = instance.defined_or_imported_global_ptr(index);
417 match (*global).as_externref().clone() {
418 None => ptr::null_mut(),
419 Some(externref) => {
420 let raw = externref.as_raw();
421 let (activations_table, module_info_lookup) =
422 (*instance.store()).externref_activations_table();
423 activations_table.insert_with_gc(externref, module_info_lookup);
424 raw
425 }
426 }
427}
428
429unsafe fn externref_global_set(vmctx: *mut VMContext, index: u32, externref: *mut u8) {
431 let externref = if externref.is_null() {
432 None
433 } else {
434 Some(VMExternRef::clone_from_raw(externref))
435 };
436
437 let index = GlobalIndex::from_u32(index);
438 let instance = (*vmctx).instance_mut();
439 let global = instance.defined_or_imported_global_ptr(index);
440
441 let old = mem::replace((*global).as_externref_mut(), externref);
446 drop(old);
447}
448
449unsafe fn memory_atomic_notify(
451 vmctx: *mut VMContext,
452 memory_index: u32,
453 addr_index: u64,
454 count: u32,
455) -> Result<u32, Trap> {
456 let memory = MemoryIndex::from_u32(memory_index);
457 let instance = (*vmctx).instance_mut();
458 instance
459 .get_runtime_memory(memory)
460 .atomic_notify(addr_index, count)
461}
462
463unsafe fn memory_atomic_wait32(
465 vmctx: *mut VMContext,
466 memory_index: u32,
467 addr_index: u64,
468 expected: u32,
469 timeout: u64,
470) -> Result<u32, Trap> {
471 let timeout = (timeout as i64 >= 0).then(|| Instant::now() + Duration::from_nanos(timeout));
473 let memory = MemoryIndex::from_u32(memory_index);
474 let instance = (*vmctx).instance_mut();
475 Ok(instance
476 .get_runtime_memory(memory)
477 .atomic_wait32(addr_index, expected, timeout)? as u32)
478}
479
480unsafe fn memory_atomic_wait64(
482 vmctx: *mut VMContext,
483 memory_index: u32,
484 addr_index: u64,
485 expected: u64,
486 timeout: u64,
487) -> Result<u32, Trap> {
488 let timeout = (timeout as i64 >= 0).then(|| Instant::now() + Duration::from_nanos(timeout));
490 let memory = MemoryIndex::from_u32(memory_index);
491 let instance = (*vmctx).instance_mut();
492 Ok(instance
493 .get_runtime_memory(memory)
494 .atomic_wait64(addr_index, expected, timeout)? as u32)
495}
496
497unsafe fn out_of_gas(vmctx: *mut VMContext) -> Result<()> {
499 (*(*vmctx).instance().store()).out_of_gas()
500}
501
502unsafe fn new_epoch(vmctx: *mut VMContext) -> Result<u64> {
504 (*(*vmctx).instance().store()).new_epoch()
505}
506
507#[allow(missing_docs)]
516pub mod relocs {
517 pub extern "C" fn floorf32(f: f32) -> f32 {
518 f.floor()
519 }
520
521 pub extern "C" fn floorf64(f: f64) -> f64 {
522 f.floor()
523 }
524
525 pub extern "C" fn ceilf32(f: f32) -> f32 {
526 f.ceil()
527 }
528
529 pub extern "C" fn ceilf64(f: f64) -> f64 {
530 f.ceil()
531 }
532
533 pub extern "C" fn truncf32(f: f32) -> f32 {
534 f.trunc()
535 }
536
537 pub extern "C" fn truncf64(f: f64) -> f64 {
538 f.trunc()
539 }
540
541 const TOINT_32: f32 = 1.0 / f32::EPSILON;
542 const TOINT_64: f64 = 1.0 / f64::EPSILON;
543
544 pub extern "C" fn nearestf32(x: f32) -> f32 {
547 let i = x.to_bits();
552 let e = i >> 23 & 0xff;
553 if e >= 0x7f_u32 + 23 {
554 if e == 0xff {
556 if i & 0x7fffff != 0 {
558 return f32::from_bits(i | (1 << 22));
561 }
562 }
563 x
564 } else {
565 (x.abs() + TOINT_32 - TOINT_32).copysign(x)
566 }
567 }
568
569 pub extern "C" fn nearestf64(x: f64) -> f64 {
570 let i = x.to_bits();
571 let e = i >> 52 & 0x7ff;
572 if e >= 0x3ff_u64 + 52 {
573 if e == 0x7ff {
575 if i & 0xfffffffffffff != 0 {
577 return f64::from_bits(i | (1 << 51));
580 }
581 }
582 x
583 } else {
584 (x.abs() + TOINT_64 - TOINT_64).copysign(x)
585 }
586 }
587
588 pub extern "C" fn fmaf32(a: f32, b: f32, c: f32) -> f32 {
589 a.mul_add(b, c)
590 }
591
592 pub extern "C" fn fmaf64(a: f64, b: f64, c: f64) -> f64 {
593 a.mul_add(b, c)
594 }
595}