1use crate::limits::{
17 MAX_WASM_BR_TABLE_SIZE, MAX_WASM_CATCHES, MAX_WASM_HANDLERS, MAX_WASM_SELECT_RESULT_SIZE,
18};
19use crate::prelude::*;
20use crate::{BinaryReader, BinaryReaderError, FromReader, RefType, Result, ValType};
21use core::fmt;
22
23#[derive(Debug, Copy, Clone, PartialEq, Eq)]
25pub enum BlockType {
26 Empty,
28 Type(ValType),
30 FuncType(u32),
34}
35
36#[derive(Copy, Clone, Debug, PartialEq, Eq)]
38pub enum FrameKind {
39 Block,
41 If,
43 Else,
45 Loop,
47 TryTable,
53 LegacyTry,
59 LegacyCatch,
65 LegacyCatchAll,
71}
72
73#[derive(Debug, Copy, Clone, Eq, PartialEq)]
75pub struct MemArg {
76 pub align: u8,
78 pub max_align: u8,
84 pub offset: u64,
91 pub memory: u32,
97}
98
99#[derive(Clone)]
101pub struct BrTable<'a> {
102 pub(crate) reader: crate::BinaryReader<'a>,
103 pub(crate) cnt: u32,
104 pub(crate) default: u32,
105}
106
107impl PartialEq<Self> for BrTable<'_> {
108 fn eq(&self, other: &Self) -> bool {
109 self.cnt == other.cnt
110 && self.default == other.default
111 && self.reader.remaining_buffer() == other.reader.remaining_buffer()
112 }
113}
114
115impl Eq for BrTable<'_> {}
116
117impl<'a> BrTable<'a> {
118 pub fn len(&self) -> u32 {
121 self.cnt
122 }
123
124 pub fn is_empty(&self) -> bool {
126 self.len() == 0
127 }
128
129 pub fn default(&self) -> u32 {
131 self.default
132 }
133
134 pub fn targets(&self) -> BrTableTargets<'_> {
155 BrTableTargets {
156 reader: self.reader.clone(),
157 remaining: self.cnt,
158 }
159 }
160}
161
162pub struct BrTableTargets<'a> {
170 reader: crate::BinaryReader<'a>,
171 remaining: u32,
172}
173
174impl<'a> Iterator for BrTableTargets<'a> {
175 type Item = Result<u32>;
176
177 fn size_hint(&self) -> (usize, Option<usize>) {
178 let remaining = usize::try_from(self.remaining).unwrap_or_else(|error| {
179 panic!("could not convert remaining `u32` into `usize`: {error}")
180 });
181 (remaining, Some(remaining))
182 }
183
184 fn next(&mut self) -> Option<Self::Item> {
185 if self.remaining == 0 {
186 if !self.reader.eof() {
187 return Some(Err(BinaryReaderError::new(
188 "trailing data in br_table",
189 self.reader.original_position(),
190 )));
191 }
192 return None;
193 }
194 self.remaining -= 1;
195 Some(self.reader.read_var_u32())
196 }
197}
198
199impl fmt::Debug for BrTable<'_> {
200 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
201 let mut f = f.debug_struct("BrTable");
202 f.field("count", &self.cnt);
203 f.field("default", &self.default);
204 match self.targets().collect::<Result<Vec<_>>>() {
205 Ok(targets) => {
206 f.field("targets", &targets);
207 }
208 Err(_) => {
209 f.field("reader", &self.reader);
210 }
211 }
212 f.finish()
213 }
214}
215
216#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
221pub struct Ieee32(pub(crate) u32);
222
223impl Ieee32 {
224 pub fn bits(self) -> u32 {
226 self.0
227 }
228}
229
230impl From<f32> for Ieee32 {
231 fn from(value: f32) -> Self {
232 Ieee32 {
233 0: u32::from_le_bytes(value.to_le_bytes()),
234 }
235 }
236}
237
238impl From<Ieee32> for f32 {
239 fn from(bits: Ieee32) -> f32 {
240 f32::from_bits(bits.bits())
241 }
242}
243
244#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
249pub struct Ieee64(pub(crate) u64);
250
251impl Ieee64 {
252 pub fn bits(self) -> u64 {
254 self.0
255 }
256}
257
258impl From<f64> for Ieee64 {
259 fn from(value: f64) -> Self {
260 Ieee64 {
261 0: u64::from_le_bytes(value.to_le_bytes()),
262 }
263 }
264}
265
266impl From<Ieee64> for f64 {
267 fn from(bits: Ieee64) -> f64 {
268 f64::from_bits(bits.bits())
269 }
270}
271
272#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
274pub struct V128(pub(crate) [u8; 16]);
275
276impl V128 {
277 pub fn bytes(&self) -> &[u8; 16] {
279 &self.0
280 }
281
282 pub fn i128(&self) -> i128 {
284 i128::from_le_bytes(self.0)
285 }
286}
287
288impl From<V128> for i128 {
289 fn from(bits: V128) -> i128 {
290 bits.i128()
291 }
292}
293
294impl From<V128> for u128 {
295 fn from(bits: V128) -> u128 {
296 u128::from_le_bytes(bits.0)
297 }
298}
299
300#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
308pub enum Ordering {
309 AcqRel,
313 SeqCst,
316}
317
318macro_rules! define_operator {
319 ($(@$proposal:ident $op:ident $({ $($payload:tt)* })? => $visit:ident ($($ann:tt)*))*) => {
320 #[derive(Debug, Clone, Eq, PartialEq)]
324 #[allow(missing_docs)]
325 #[non_exhaustive]
326 pub enum Operator<'a> {
327 $(
328 $op $({ $($payload)* })?,
329 )*
330 }
331 }
332}
333crate::for_each_operator!(define_operator);
334
335#[derive(Clone)]
337pub struct OperatorsReader<'a> {
338 reader: BinaryReader<'a>,
339 blocks: Vec<FrameKind>,
340}
341
342impl<'a> OperatorsReader<'a> {
343 pub fn new(reader: BinaryReader<'a>) -> OperatorsReader<'a> {
345 OperatorsReader {
346 reader,
347 blocks: vec![FrameKind::Block],
348 }
349 }
350
351 pub fn eof(&self) -> bool {
353 self.reader.eof()
354 }
355
356 pub fn original_position(&self) -> usize {
358 self.reader.original_position()
359 }
360
361 pub fn finish(&self) -> Result<()> {
367 self.ensure_stack_empty()?;
368 if !self.eof() {
369 bail!(
370 self.original_position(),
371 "unexpected data at the end of operators"
372 );
373 }
374 Ok(())
375 }
376
377 fn ensure_stack_empty(&self) -> Result<()> {
378 if !self.blocks.is_empty() {
379 bail!(
380 self.original_position(),
381 "control frames remain at end of function body or expression"
382 );
383 }
384 Ok(())
385 }
386
387 pub fn read(&mut self) -> Result<Operator<'a>> {
394 self.visit_operator(&mut OperatorFactory::new())
395 }
396
397 pub fn into_iter_with_offsets(self) -> OperatorsIteratorWithOffsets<'a> {
399 OperatorsIteratorWithOffsets {
400 reader: self,
401 err: false,
402 }
403 }
404
405 pub fn read_with_offset(&mut self) -> Result<(Operator<'a>, usize)> {
407 let pos = self.reader.original_position();
408 Ok((self.read()?, pos))
409 }
410
411 fn enter(&mut self, k: FrameKind) {
412 self.blocks.push(k)
413 }
414
415 fn expect_block(&mut self, k: FrameKind, found: &str) -> Result<()> {
416 match self.blocks.last() {
417 None => bail!(
418 self.original_position(),
419 "empty stack found where {:?} expected",
420 k
421 ),
422 Some(x) if *x == k => Ok(()),
423 Some(_) => bail!(
424 self.original_position(),
425 "`{}` found outside `{:?}` block",
426 found,
427 k
428 ),
429 }
430 }
431
432 fn end(&mut self) -> Result<()> {
433 assert!(!self.blocks.is_empty());
434 self.blocks.pop();
435 Ok(())
436 }
437
438 pub fn visit_operator<T>(&mut self, visitor: &mut T) -> Result<<T as VisitOperator<'a>>::Output>
484 where
485 T: VisitOperator<'a>,
486 {
487 if self.blocks.is_empty() {
488 bail!(
489 self.original_position(),
490 "operators remaining after end of function body or expression"
491 );
492 }
493 let pos = self.reader.original_position();
494 let code = self.reader.read_u8()?;
495 Ok(match code {
496 0x00 => visitor.visit_unreachable(),
497 0x01 => visitor.visit_nop(),
498 0x02 => {
499 self.enter(FrameKind::Block);
500 visitor.visit_block(self.reader.read_block_type()?)
501 }
502 0x03 => {
503 self.enter(FrameKind::Loop);
504 visitor.visit_loop(self.reader.read_block_type()?)
505 }
506 0x04 => {
507 self.enter(FrameKind::If);
508 visitor.visit_if(self.reader.read_block_type()?)
509 }
510 0x05 => {
511 self.expect_block(FrameKind::If, "else")?;
512 visitor.visit_else()
513 }
514 0x06 => {
515 if !self.reader.legacy_exceptions() {
516 bail!(
517 pos,
518 "legacy_exceptions feature required for try instruction"
519 );
520 }
521 self.enter(FrameKind::LegacyTry);
522 visitor.visit_try(self.reader.read_block_type()?)
523 }
524 0x07 => {
525 if !self.reader.legacy_exceptions() {
526 bail!(
527 pos,
528 "legacy_exceptions feature required for catch instruction"
529 );
530 }
531 self.expect_block(FrameKind::LegacyTry, "catch")?;
532 visitor.visit_catch(self.reader.read_var_u32()?)
533 }
534 0x08 => visitor.visit_throw(self.reader.read_var_u32()?),
535 0x09 => visitor.visit_rethrow(self.reader.read_var_u32()?),
536 0x0a => visitor.visit_throw_ref(),
537 0x0b => {
538 self.end()?;
539 visitor.visit_end()
540 }
541 0x0c => visitor.visit_br(self.reader.read_var_u32()?),
542 0x0d => visitor.visit_br_if(self.reader.read_var_u32()?),
543 0x0e => visitor.visit_br_table(self.read_br_table()?),
544 0x0f => visitor.visit_return(),
545 0x10 => visitor.visit_call(self.reader.read_var_u32()?),
546 0x11 => {
547 let index = self.reader.read_var_u32()?;
548 let table = self.read_call_indirect_table_immediate()?;
549 visitor.visit_call_indirect(index, table)
550 }
551 0x12 => visitor.visit_return_call(self.reader.read_var_u32()?),
552 0x13 => visitor.visit_return_call_indirect(
553 self.reader.read_var_u32()?,
554 self.reader.read_var_u32()?,
555 ),
556 0x14 => visitor.visit_call_ref(self.reader.read()?),
557 0x15 => visitor.visit_return_call_ref(self.reader.read()?),
558 0x18 => {
559 self.expect_block(FrameKind::LegacyTry, "delegate")?;
560 self.blocks.pop();
561 visitor.visit_delegate(self.reader.read_var_u32()?)
562 }
563 0x19 => {
564 if !self.reader.legacy_exceptions() {
565 bail!(
566 pos,
567 "legacy_exceptions feature required for catch_all instruction"
568 );
569 }
570 self.expect_block(FrameKind::LegacyTry, "catch_all")?;
571 visitor.visit_catch_all()
572 }
573 0x1a => visitor.visit_drop(),
574 0x1b => visitor.visit_select(),
575 0x1c => {
576 let result_count = self
577 .reader
578 .read_size(MAX_WASM_SELECT_RESULT_SIZE, "select types")?;
579 if result_count == 1 {
580 visitor.visit_typed_select(self.reader.read()?)
581 } else {
582 let mut results = Vec::new();
583 results.reserve_exact(result_count);
584 for _ in 0..result_count {
585 results.push(self.reader.read()?);
586 }
587 visitor.visit_typed_select_multi(results)
588 }
589 }
590 0x1f => {
591 self.enter(FrameKind::TryTable);
592 visitor.visit_try_table(self.reader.read()?)
593 }
594
595 0x20 => visitor.visit_local_get(self.reader.read_var_u32()?),
596 0x21 => visitor.visit_local_set(self.reader.read_var_u32()?),
597 0x22 => visitor.visit_local_tee(self.reader.read_var_u32()?),
598 0x23 => visitor.visit_global_get(self.reader.read_var_u32()?),
599 0x24 => visitor.visit_global_set(self.reader.read_var_u32()?),
600 0x25 => visitor.visit_table_get(self.reader.read_var_u32()?),
601 0x26 => visitor.visit_table_set(self.reader.read_var_u32()?),
602
603 0x28 => visitor.visit_i32_load(self.read_memarg(2)?),
604 0x29 => visitor.visit_i64_load(self.read_memarg(3)?),
605 0x2a => visitor.visit_f32_load(self.read_memarg(2)?),
606 0x2b => visitor.visit_f64_load(self.read_memarg(3)?),
607 0x2c => visitor.visit_i32_load8_s(self.read_memarg(0)?),
608 0x2d => visitor.visit_i32_load8_u(self.read_memarg(0)?),
609 0x2e => visitor.visit_i32_load16_s(self.read_memarg(1)?),
610 0x2f => visitor.visit_i32_load16_u(self.read_memarg(1)?),
611 0x30 => visitor.visit_i64_load8_s(self.read_memarg(0)?),
612 0x31 => visitor.visit_i64_load8_u(self.read_memarg(0)?),
613 0x32 => visitor.visit_i64_load16_s(self.read_memarg(1)?),
614 0x33 => visitor.visit_i64_load16_u(self.read_memarg(1)?),
615 0x34 => visitor.visit_i64_load32_s(self.read_memarg(2)?),
616 0x35 => visitor.visit_i64_load32_u(self.read_memarg(2)?),
617 0x36 => visitor.visit_i32_store(self.read_memarg(2)?),
618 0x37 => visitor.visit_i64_store(self.read_memarg(3)?),
619 0x38 => visitor.visit_f32_store(self.read_memarg(2)?),
620 0x39 => visitor.visit_f64_store(self.read_memarg(3)?),
621 0x3a => visitor.visit_i32_store8(self.read_memarg(0)?),
622 0x3b => visitor.visit_i32_store16(self.read_memarg(1)?),
623 0x3c => visitor.visit_i64_store8(self.read_memarg(0)?),
624 0x3d => visitor.visit_i64_store16(self.read_memarg(1)?),
625 0x3e => visitor.visit_i64_store32(self.read_memarg(2)?),
626 0x3f => {
627 let mem = self.read_memory_index_or_zero_if_not_multi_memory()?;
628 visitor.visit_memory_size(mem)
629 }
630 0x40 => {
631 let mem = self.read_memory_index_or_zero_if_not_multi_memory()?;
632 visitor.visit_memory_grow(mem)
633 }
634
635 0x41 => visitor.visit_i32_const(self.reader.read_var_i32()?),
636 0x42 => visitor.visit_i64_const(self.reader.read_var_i64()?),
637 0x43 => visitor.visit_f32_const(self.reader.read_f32()?),
638 0x44 => visitor.visit_f64_const(self.reader.read_f64()?),
639
640 0x45 => visitor.visit_i32_eqz(),
641 0x46 => visitor.visit_i32_eq(),
642 0x47 => visitor.visit_i32_ne(),
643 0x48 => visitor.visit_i32_lt_s(),
644 0x49 => visitor.visit_i32_lt_u(),
645 0x4a => visitor.visit_i32_gt_s(),
646 0x4b => visitor.visit_i32_gt_u(),
647 0x4c => visitor.visit_i32_le_s(),
648 0x4d => visitor.visit_i32_le_u(),
649 0x4e => visitor.visit_i32_ge_s(),
650 0x4f => visitor.visit_i32_ge_u(),
651 0x50 => visitor.visit_i64_eqz(),
652 0x51 => visitor.visit_i64_eq(),
653 0x52 => visitor.visit_i64_ne(),
654 0x53 => visitor.visit_i64_lt_s(),
655 0x54 => visitor.visit_i64_lt_u(),
656 0x55 => visitor.visit_i64_gt_s(),
657 0x56 => visitor.visit_i64_gt_u(),
658 0x57 => visitor.visit_i64_le_s(),
659 0x58 => visitor.visit_i64_le_u(),
660 0x59 => visitor.visit_i64_ge_s(),
661 0x5a => visitor.visit_i64_ge_u(),
662 0x5b => visitor.visit_f32_eq(),
663 0x5c => visitor.visit_f32_ne(),
664 0x5d => visitor.visit_f32_lt(),
665 0x5e => visitor.visit_f32_gt(),
666 0x5f => visitor.visit_f32_le(),
667 0x60 => visitor.visit_f32_ge(),
668 0x61 => visitor.visit_f64_eq(),
669 0x62 => visitor.visit_f64_ne(),
670 0x63 => visitor.visit_f64_lt(),
671 0x64 => visitor.visit_f64_gt(),
672 0x65 => visitor.visit_f64_le(),
673 0x66 => visitor.visit_f64_ge(),
674 0x67 => visitor.visit_i32_clz(),
675 0x68 => visitor.visit_i32_ctz(),
676 0x69 => visitor.visit_i32_popcnt(),
677 0x6a => visitor.visit_i32_add(),
678 0x6b => visitor.visit_i32_sub(),
679 0x6c => visitor.visit_i32_mul(),
680 0x6d => visitor.visit_i32_div_s(),
681 0x6e => visitor.visit_i32_div_u(),
682 0x6f => visitor.visit_i32_rem_s(),
683 0x70 => visitor.visit_i32_rem_u(),
684 0x71 => visitor.visit_i32_and(),
685 0x72 => visitor.visit_i32_or(),
686 0x73 => visitor.visit_i32_xor(),
687 0x74 => visitor.visit_i32_shl(),
688 0x75 => visitor.visit_i32_shr_s(),
689 0x76 => visitor.visit_i32_shr_u(),
690 0x77 => visitor.visit_i32_rotl(),
691 0x78 => visitor.visit_i32_rotr(),
692 0x79 => visitor.visit_i64_clz(),
693 0x7a => visitor.visit_i64_ctz(),
694 0x7b => visitor.visit_i64_popcnt(),
695 0x7c => visitor.visit_i64_add(),
696 0x7d => visitor.visit_i64_sub(),
697 0x7e => visitor.visit_i64_mul(),
698 0x7f => visitor.visit_i64_div_s(),
699 0x80 => visitor.visit_i64_div_u(),
700 0x81 => visitor.visit_i64_rem_s(),
701 0x82 => visitor.visit_i64_rem_u(),
702 0x83 => visitor.visit_i64_and(),
703 0x84 => visitor.visit_i64_or(),
704 0x85 => visitor.visit_i64_xor(),
705 0x86 => visitor.visit_i64_shl(),
706 0x87 => visitor.visit_i64_shr_s(),
707 0x88 => visitor.visit_i64_shr_u(),
708 0x89 => visitor.visit_i64_rotl(),
709 0x8a => visitor.visit_i64_rotr(),
710 0x8b => visitor.visit_f32_abs(),
711 0x8c => visitor.visit_f32_neg(),
712 0x8d => visitor.visit_f32_ceil(),
713 0x8e => visitor.visit_f32_floor(),
714 0x8f => visitor.visit_f32_trunc(),
715 0x90 => visitor.visit_f32_nearest(),
716 0x91 => visitor.visit_f32_sqrt(),
717 0x92 => visitor.visit_f32_add(),
718 0x93 => visitor.visit_f32_sub(),
719 0x94 => visitor.visit_f32_mul(),
720 0x95 => visitor.visit_f32_div(),
721 0x96 => visitor.visit_f32_min(),
722 0x97 => visitor.visit_f32_max(),
723 0x98 => visitor.visit_f32_copysign(),
724 0x99 => visitor.visit_f64_abs(),
725 0x9a => visitor.visit_f64_neg(),
726 0x9b => visitor.visit_f64_ceil(),
727 0x9c => visitor.visit_f64_floor(),
728 0x9d => visitor.visit_f64_trunc(),
729 0x9e => visitor.visit_f64_nearest(),
730 0x9f => visitor.visit_f64_sqrt(),
731 0xa0 => visitor.visit_f64_add(),
732 0xa1 => visitor.visit_f64_sub(),
733 0xa2 => visitor.visit_f64_mul(),
734 0xa3 => visitor.visit_f64_div(),
735 0xa4 => visitor.visit_f64_min(),
736 0xa5 => visitor.visit_f64_max(),
737 0xa6 => visitor.visit_f64_copysign(),
738 0xa7 => visitor.visit_i32_wrap_i64(),
739 0xa8 => visitor.visit_i32_trunc_f32_s(),
740 0xa9 => visitor.visit_i32_trunc_f32_u(),
741 0xaa => visitor.visit_i32_trunc_f64_s(),
742 0xab => visitor.visit_i32_trunc_f64_u(),
743 0xac => visitor.visit_i64_extend_i32_s(),
744 0xad => visitor.visit_i64_extend_i32_u(),
745 0xae => visitor.visit_i64_trunc_f32_s(),
746 0xaf => visitor.visit_i64_trunc_f32_u(),
747 0xb0 => visitor.visit_i64_trunc_f64_s(),
748 0xb1 => visitor.visit_i64_trunc_f64_u(),
749 0xb2 => visitor.visit_f32_convert_i32_s(),
750 0xb3 => visitor.visit_f32_convert_i32_u(),
751 0xb4 => visitor.visit_f32_convert_i64_s(),
752 0xb5 => visitor.visit_f32_convert_i64_u(),
753 0xb6 => visitor.visit_f32_demote_f64(),
754 0xb7 => visitor.visit_f64_convert_i32_s(),
755 0xb8 => visitor.visit_f64_convert_i32_u(),
756 0xb9 => visitor.visit_f64_convert_i64_s(),
757 0xba => visitor.visit_f64_convert_i64_u(),
758 0xbb => visitor.visit_f64_promote_f32(),
759 0xbc => visitor.visit_i32_reinterpret_f32(),
760 0xbd => visitor.visit_i64_reinterpret_f64(),
761 0xbe => visitor.visit_f32_reinterpret_i32(),
762 0xbf => visitor.visit_f64_reinterpret_i64(),
763
764 0xc0 => visitor.visit_i32_extend8_s(),
765 0xc1 => visitor.visit_i32_extend16_s(),
766 0xc2 => visitor.visit_i64_extend8_s(),
767 0xc3 => visitor.visit_i64_extend16_s(),
768 0xc4 => visitor.visit_i64_extend32_s(),
769
770 0xd0 => visitor.visit_ref_null(self.reader.read()?),
771 0xd1 => visitor.visit_ref_is_null(),
772 0xd2 => visitor.visit_ref_func(self.reader.read_var_u32()?),
773 0xd3 => visitor.visit_ref_eq(),
774 0xd4 => visitor.visit_ref_as_non_null(),
775 0xd5 => visitor.visit_br_on_null(self.reader.read_var_u32()?),
776 0xd6 => visitor.visit_br_on_non_null(self.reader.read_var_u32()?),
777
778 0xe0 => visitor.visit_cont_new(self.reader.read_var_u32()?),
779 0xe1 => {
780 visitor.visit_cont_bind(self.reader.read_var_u32()?, self.reader.read_var_u32()?)
781 }
782 0xe2 => visitor.visit_suspend(self.reader.read_var_u32()?),
783 0xe3 => visitor.visit_resume(self.reader.read_var_u32()?, self.reader.read()?),
784 0xe4 => visitor.visit_resume_throw(
785 self.reader.read_var_u32()?,
786 self.reader.read_var_u32()?,
787 self.reader.read()?,
788 ),
789 0xe5 => visitor.visit_switch(self.reader.read_var_u32()?, self.reader.read_var_u32()?),
790
791 0xfb => self.visit_0xfb_operator(pos, visitor)?,
792 0xfc => self.visit_0xfc_operator(pos, visitor)?,
793 0xfd => {
794 #[cfg(feature = "simd")]
795 if let Some(mut visitor) = visitor.simd_visitor() {
796 return self.visit_0xfd_operator(pos, &mut visitor);
797 }
798 bail!(pos, "unexpected SIMD opcode: 0x{code:x}")
799 }
800 0xfe => self.visit_0xfe_operator(pos, visitor)?,
801
802 _ => bail!(pos, "illegal opcode: 0x{code:x}"),
803 })
804 }
805
806 fn visit_0xfb_operator<T>(
807 &mut self,
808 pos: usize,
809 visitor: &mut T,
810 ) -> Result<<T as VisitOperator<'a>>::Output>
811 where
812 T: VisitOperator<'a>,
813 {
814 let code = self.reader.read_var_u32()?;
815 Ok(match code {
816 0x0 => {
817 let type_index = self.reader.read_var_u32()?;
818 visitor.visit_struct_new(type_index)
819 }
820 0x01 => {
821 let type_index = self.reader.read_var_u32()?;
822 visitor.visit_struct_new_default(type_index)
823 }
824 0x02 => {
825 let type_index = self.reader.read_var_u32()?;
826 let field_index = self.reader.read_var_u32()?;
827 visitor.visit_struct_get(type_index, field_index)
828 }
829 0x03 => {
830 let type_index = self.reader.read_var_u32()?;
831 let field_index = self.reader.read_var_u32()?;
832 visitor.visit_struct_get_s(type_index, field_index)
833 }
834 0x04 => {
835 let type_index = self.reader.read_var_u32()?;
836 let field_index = self.reader.read_var_u32()?;
837 visitor.visit_struct_get_u(type_index, field_index)
838 }
839 0x05 => {
840 let type_index = self.reader.read_var_u32()?;
841 let field_index = self.reader.read_var_u32()?;
842 visitor.visit_struct_set(type_index, field_index)
843 }
844 0x06 => {
845 let type_index = self.reader.read_var_u32()?;
846 visitor.visit_array_new(type_index)
847 }
848 0x07 => {
849 let type_index = self.reader.read_var_u32()?;
850 visitor.visit_array_new_default(type_index)
851 }
852 0x08 => {
853 let type_index = self.reader.read_var_u32()?;
854 let n = self.reader.read_var_u32()?;
855 visitor.visit_array_new_fixed(type_index, n)
856 }
857 0x09 => {
858 let type_index = self.reader.read_var_u32()?;
859 let data_index = self.reader.read_var_u32()?;
860 visitor.visit_array_new_data(type_index, data_index)
861 }
862 0x0a => {
863 let type_index = self.reader.read_var_u32()?;
864 let elem_index = self.reader.read_var_u32()?;
865 visitor.visit_array_new_elem(type_index, elem_index)
866 }
867 0x0b => {
868 let type_index = self.reader.read_var_u32()?;
869 visitor.visit_array_get(type_index)
870 }
871 0x0c => {
872 let type_index = self.reader.read_var_u32()?;
873 visitor.visit_array_get_s(type_index)
874 }
875 0x0d => {
876 let type_index = self.reader.read_var_u32()?;
877 visitor.visit_array_get_u(type_index)
878 }
879 0x0e => {
880 let type_index = self.reader.read_var_u32()?;
881 visitor.visit_array_set(type_index)
882 }
883 0x0f => visitor.visit_array_len(),
884 0x10 => {
885 let type_index = self.reader.read_var_u32()?;
886 visitor.visit_array_fill(type_index)
887 }
888 0x11 => {
889 let type_index_dst = self.reader.read_var_u32()?;
890 let type_index_src = self.reader.read_var_u32()?;
891 visitor.visit_array_copy(type_index_dst, type_index_src)
892 }
893 0x12 => {
894 let type_index = self.reader.read_var_u32()?;
895 let data_index = self.reader.read_var_u32()?;
896 visitor.visit_array_init_data(type_index, data_index)
897 }
898 0x13 => {
899 let type_index = self.reader.read_var_u32()?;
900 let elem_index = self.reader.read_var_u32()?;
901 visitor.visit_array_init_elem(type_index, elem_index)
902 }
903 0x14 => visitor.visit_ref_test_non_null(self.reader.read()?),
904 0x15 => visitor.visit_ref_test_nullable(self.reader.read()?),
905 0x16 => visitor.visit_ref_cast_non_null(self.reader.read()?),
906 0x17 => visitor.visit_ref_cast_nullable(self.reader.read()?),
907 0x18 => {
908 let pos = self.original_position();
909 let cast_flags = self.reader.read_u8()?;
910 let relative_depth = self.reader.read_var_u32()?;
911 let (from_type_nullable, to_type_nullable) = match cast_flags {
912 0b00 => (false, false),
913 0b01 => (true, false),
914 0b10 => (false, true),
915 0b11 => (true, true),
916 _ => bail!(pos, "invalid cast flags: {cast_flags:08b}"),
917 };
918 let from_heap_type = self.reader.read()?;
919 let from_ref_type =
920 RefType::new(from_type_nullable, from_heap_type).ok_or_else(|| {
921 format_err!(pos, "implementation error: type index too large")
922 })?;
923 let to_heap_type = self.reader.read()?;
924 let to_ref_type =
925 RefType::new(to_type_nullable, to_heap_type).ok_or_else(|| {
926 format_err!(pos, "implementation error: type index too large")
927 })?;
928 visitor.visit_br_on_cast(relative_depth, from_ref_type, to_ref_type)
929 }
930 0x19 => {
931 let pos = self.original_position();
932 let cast_flags = self.reader.read_u8()?;
933 let relative_depth = self.reader.read_var_u32()?;
934 let (from_type_nullable, to_type_nullable) = match cast_flags {
935 0 => (false, false),
936 1 => (true, false),
937 2 => (false, true),
938 3 => (true, true),
939 _ => bail!(pos, "invalid cast flags: {cast_flags:08b}"),
940 };
941 let from_heap_type = self.reader.read()?;
942 let from_ref_type =
943 RefType::new(from_type_nullable, from_heap_type).ok_or_else(|| {
944 format_err!(pos, "implementation error: type index too large")
945 })?;
946 let to_heap_type = self.reader.read()?;
947 let to_ref_type =
948 RefType::new(to_type_nullable, to_heap_type).ok_or_else(|| {
949 format_err!(pos, "implementation error: type index too large")
950 })?;
951 visitor.visit_br_on_cast_fail(relative_depth, from_ref_type, to_ref_type)
952 }
953
954 0x1a => visitor.visit_any_convert_extern(),
955 0x1b => visitor.visit_extern_convert_any(),
956
957 0x1c => visitor.visit_ref_i31(),
958 0x1d => visitor.visit_i31_get_s(),
959 0x1e => visitor.visit_i31_get_u(),
960
961 _ => bail!(pos, "unknown 0xfb subopcode: 0x{code:x}"),
962 })
963 }
964
965 fn visit_0xfc_operator<T>(
966 &mut self,
967 pos: usize,
968 visitor: &mut T,
969 ) -> Result<<T as VisitOperator<'a>>::Output>
970 where
971 T: VisitOperator<'a>,
972 {
973 let code = self.reader.read_var_u32()?;
974 Ok(match code {
975 0x00 => visitor.visit_i32_trunc_sat_f32_s(),
976 0x01 => visitor.visit_i32_trunc_sat_f32_u(),
977 0x02 => visitor.visit_i32_trunc_sat_f64_s(),
978 0x03 => visitor.visit_i32_trunc_sat_f64_u(),
979 0x04 => visitor.visit_i64_trunc_sat_f32_s(),
980 0x05 => visitor.visit_i64_trunc_sat_f32_u(),
981 0x06 => visitor.visit_i64_trunc_sat_f64_s(),
982 0x07 => visitor.visit_i64_trunc_sat_f64_u(),
983
984 0x08 => {
985 let segment = self.reader.read_var_u32()?;
986 let mem = self.reader.read_var_u32()?;
987 visitor.visit_memory_init(segment, mem)
988 }
989 0x09 => {
990 let segment = self.reader.read_var_u32()?;
991 visitor.visit_data_drop(segment)
992 }
993 0x0a => {
994 let dst = self.reader.read_var_u32()?;
995 let src = self.reader.read_var_u32()?;
996 visitor.visit_memory_copy(dst, src)
997 }
998 0x0b => {
999 let mem = self.reader.read_var_u32()?;
1000 visitor.visit_memory_fill(mem)
1001 }
1002 0x0c => {
1003 let segment = self.reader.read_var_u32()?;
1004 let table = self.reader.read_var_u32()?;
1005 visitor.visit_table_init(segment, table)
1006 }
1007 0x0d => {
1008 let segment = self.reader.read_var_u32()?;
1009 visitor.visit_elem_drop(segment)
1010 }
1011 0x0e => {
1012 let dst_table = self.reader.read_var_u32()?;
1013 let src_table = self.reader.read_var_u32()?;
1014 visitor.visit_table_copy(dst_table, src_table)
1015 }
1016
1017 0x0f => {
1018 let table = self.reader.read_var_u32()?;
1019 visitor.visit_table_grow(table)
1020 }
1021 0x10 => {
1022 let table = self.reader.read_var_u32()?;
1023 visitor.visit_table_size(table)
1024 }
1025
1026 0x11 => {
1027 let table = self.reader.read_var_u32()?;
1028 visitor.visit_table_fill(table)
1029 }
1030
1031 0x12 => {
1032 let mem = self.reader.read_var_u32()?;
1033 visitor.visit_memory_discard(mem)
1034 }
1035
1036 0x13 => visitor.visit_i64_add128(),
1037 0x14 => visitor.visit_i64_sub128(),
1038 0x15 => visitor.visit_i64_mul_wide_s(),
1039 0x16 => visitor.visit_i64_mul_wide_u(),
1040
1041 _ => bail!(pos, "unknown 0xfc subopcode: 0x{code:x}"),
1042 })
1043 }
1044
1045 #[cfg(feature = "simd")]
1046 pub(super) fn visit_0xfd_operator<T>(
1047 &mut self,
1048 pos: usize,
1049 visitor: &mut T,
1050 ) -> Result<<T as VisitOperator<'a>>::Output>
1051 where
1052 T: VisitSimdOperator<'a>,
1053 {
1054 let code = self.reader.read_var_u32()?;
1055 Ok(match code {
1056 0x00 => visitor.visit_v128_load(self.read_memarg(4)?),
1057 0x01 => visitor.visit_v128_load8x8_s(self.read_memarg(3)?),
1058 0x02 => visitor.visit_v128_load8x8_u(self.read_memarg(3)?),
1059 0x03 => visitor.visit_v128_load16x4_s(self.read_memarg(3)?),
1060 0x04 => visitor.visit_v128_load16x4_u(self.read_memarg(3)?),
1061 0x05 => visitor.visit_v128_load32x2_s(self.read_memarg(3)?),
1062 0x06 => visitor.visit_v128_load32x2_u(self.read_memarg(3)?),
1063 0x07 => visitor.visit_v128_load8_splat(self.read_memarg(0)?),
1064 0x08 => visitor.visit_v128_load16_splat(self.read_memarg(1)?),
1065 0x09 => visitor.visit_v128_load32_splat(self.read_memarg(2)?),
1066 0x0a => visitor.visit_v128_load64_splat(self.read_memarg(3)?),
1067
1068 0x0b => visitor.visit_v128_store(self.read_memarg(4)?),
1069 0x0c => visitor.visit_v128_const(self.read_v128()?),
1070 0x0d => {
1071 let mut lanes: [u8; 16] = [0; 16];
1072 for lane in &mut lanes {
1073 *lane = self.read_lane_index()?
1074 }
1075 visitor.visit_i8x16_shuffle(lanes)
1076 }
1077
1078 0x0e => visitor.visit_i8x16_swizzle(),
1079 0x0f => visitor.visit_i8x16_splat(),
1080 0x10 => visitor.visit_i16x8_splat(),
1081 0x11 => visitor.visit_i32x4_splat(),
1082 0x12 => visitor.visit_i64x2_splat(),
1083 0x13 => visitor.visit_f32x4_splat(),
1084 0x14 => visitor.visit_f64x2_splat(),
1085
1086 0x15 => visitor.visit_i8x16_extract_lane_s(self.read_lane_index()?),
1087 0x16 => visitor.visit_i8x16_extract_lane_u(self.read_lane_index()?),
1088 0x17 => visitor.visit_i8x16_replace_lane(self.read_lane_index()?),
1089 0x18 => visitor.visit_i16x8_extract_lane_s(self.read_lane_index()?),
1090 0x19 => visitor.visit_i16x8_extract_lane_u(self.read_lane_index()?),
1091 0x1a => visitor.visit_i16x8_replace_lane(self.read_lane_index()?),
1092 0x1b => visitor.visit_i32x4_extract_lane(self.read_lane_index()?),
1093
1094 0x1c => visitor.visit_i32x4_replace_lane(self.read_lane_index()?),
1095 0x1d => visitor.visit_i64x2_extract_lane(self.read_lane_index()?),
1096 0x1e => visitor.visit_i64x2_replace_lane(self.read_lane_index()?),
1097 0x1f => visitor.visit_f32x4_extract_lane(self.read_lane_index()?),
1098 0x20 => visitor.visit_f32x4_replace_lane(self.read_lane_index()?),
1099 0x21 => visitor.visit_f64x2_extract_lane(self.read_lane_index()?),
1100 0x22 => visitor.visit_f64x2_replace_lane(self.read_lane_index()?),
1101
1102 0x23 => visitor.visit_i8x16_eq(),
1103 0x24 => visitor.visit_i8x16_ne(),
1104 0x25 => visitor.visit_i8x16_lt_s(),
1105 0x26 => visitor.visit_i8x16_lt_u(),
1106 0x27 => visitor.visit_i8x16_gt_s(),
1107 0x28 => visitor.visit_i8x16_gt_u(),
1108 0x29 => visitor.visit_i8x16_le_s(),
1109 0x2a => visitor.visit_i8x16_le_u(),
1110 0x2b => visitor.visit_i8x16_ge_s(),
1111 0x2c => visitor.visit_i8x16_ge_u(),
1112 0x2d => visitor.visit_i16x8_eq(),
1113 0x2e => visitor.visit_i16x8_ne(),
1114 0x2f => visitor.visit_i16x8_lt_s(),
1115 0x30 => visitor.visit_i16x8_lt_u(),
1116 0x31 => visitor.visit_i16x8_gt_s(),
1117 0x32 => visitor.visit_i16x8_gt_u(),
1118 0x33 => visitor.visit_i16x8_le_s(),
1119 0x34 => visitor.visit_i16x8_le_u(),
1120 0x35 => visitor.visit_i16x8_ge_s(),
1121 0x36 => visitor.visit_i16x8_ge_u(),
1122 0x37 => visitor.visit_i32x4_eq(),
1123 0x38 => visitor.visit_i32x4_ne(),
1124 0x39 => visitor.visit_i32x4_lt_s(),
1125 0x3a => visitor.visit_i32x4_lt_u(),
1126 0x3b => visitor.visit_i32x4_gt_s(),
1127 0x3c => visitor.visit_i32x4_gt_u(),
1128 0x3d => visitor.visit_i32x4_le_s(),
1129 0x3e => visitor.visit_i32x4_le_u(),
1130 0x3f => visitor.visit_i32x4_ge_s(),
1131 0x40 => visitor.visit_i32x4_ge_u(),
1132 0x41 => visitor.visit_f32x4_eq(),
1133 0x42 => visitor.visit_f32x4_ne(),
1134 0x43 => visitor.visit_f32x4_lt(),
1135 0x44 => visitor.visit_f32x4_gt(),
1136 0x45 => visitor.visit_f32x4_le(),
1137 0x46 => visitor.visit_f32x4_ge(),
1138 0x47 => visitor.visit_f64x2_eq(),
1139 0x48 => visitor.visit_f64x2_ne(),
1140 0x49 => visitor.visit_f64x2_lt(),
1141 0x4a => visitor.visit_f64x2_gt(),
1142 0x4b => visitor.visit_f64x2_le(),
1143 0x4c => visitor.visit_f64x2_ge(),
1144 0x4d => visitor.visit_v128_not(),
1145 0x4e => visitor.visit_v128_and(),
1146 0x4f => visitor.visit_v128_andnot(),
1147 0x50 => visitor.visit_v128_or(),
1148 0x51 => visitor.visit_v128_xor(),
1149 0x52 => visitor.visit_v128_bitselect(),
1150 0x53 => visitor.visit_v128_any_true(),
1151
1152 0x54 => {
1153 let memarg = self.read_memarg(0)?;
1154 let lane = self.read_lane_index()?;
1155 visitor.visit_v128_load8_lane(memarg, lane)
1156 }
1157 0x55 => {
1158 let memarg = self.read_memarg(1)?;
1159 let lane = self.read_lane_index()?;
1160 visitor.visit_v128_load16_lane(memarg, lane)
1161 }
1162 0x56 => {
1163 let memarg = self.read_memarg(2)?;
1164 let lane = self.read_lane_index()?;
1165 visitor.visit_v128_load32_lane(memarg, lane)
1166 }
1167 0x57 => {
1168 let memarg = self.read_memarg(3)?;
1169 let lane = self.read_lane_index()?;
1170 visitor.visit_v128_load64_lane(memarg, lane)
1171 }
1172 0x58 => {
1173 let memarg = self.read_memarg(0)?;
1174 let lane = self.read_lane_index()?;
1175 visitor.visit_v128_store8_lane(memarg, lane)
1176 }
1177 0x59 => {
1178 let memarg = self.read_memarg(1)?;
1179 let lane = self.read_lane_index()?;
1180 visitor.visit_v128_store16_lane(memarg, lane)
1181 }
1182 0x5a => {
1183 let memarg = self.read_memarg(2)?;
1184 let lane = self.read_lane_index()?;
1185 visitor.visit_v128_store32_lane(memarg, lane)
1186 }
1187 0x5b => {
1188 let memarg = self.read_memarg(3)?;
1189 let lane = self.read_lane_index()?;
1190 visitor.visit_v128_store64_lane(memarg, lane)
1191 }
1192
1193 0x5c => visitor.visit_v128_load32_zero(self.read_memarg(2)?),
1194 0x5d => visitor.visit_v128_load64_zero(self.read_memarg(3)?),
1195 0x5e => visitor.visit_f32x4_demote_f64x2_zero(),
1196 0x5f => visitor.visit_f64x2_promote_low_f32x4(),
1197 0x60 => visitor.visit_i8x16_abs(),
1198 0x61 => visitor.visit_i8x16_neg(),
1199 0x62 => visitor.visit_i8x16_popcnt(),
1200 0x63 => visitor.visit_i8x16_all_true(),
1201 0x64 => visitor.visit_i8x16_bitmask(),
1202 0x65 => visitor.visit_i8x16_narrow_i16x8_s(),
1203 0x66 => visitor.visit_i8x16_narrow_i16x8_u(),
1204 0x67 => visitor.visit_f32x4_ceil(),
1205 0x68 => visitor.visit_f32x4_floor(),
1206 0x69 => visitor.visit_f32x4_trunc(),
1207 0x6a => visitor.visit_f32x4_nearest(),
1208 0x6b => visitor.visit_i8x16_shl(),
1209 0x6c => visitor.visit_i8x16_shr_s(),
1210 0x6d => visitor.visit_i8x16_shr_u(),
1211 0x6e => visitor.visit_i8x16_add(),
1212 0x6f => visitor.visit_i8x16_add_sat_s(),
1213 0x70 => visitor.visit_i8x16_add_sat_u(),
1214 0x71 => visitor.visit_i8x16_sub(),
1215 0x72 => visitor.visit_i8x16_sub_sat_s(),
1216 0x73 => visitor.visit_i8x16_sub_sat_u(),
1217 0x74 => visitor.visit_f64x2_ceil(),
1218 0x75 => visitor.visit_f64x2_floor(),
1219 0x76 => visitor.visit_i8x16_min_s(),
1220 0x77 => visitor.visit_i8x16_min_u(),
1221 0x78 => visitor.visit_i8x16_max_s(),
1222 0x79 => visitor.visit_i8x16_max_u(),
1223 0x7a => visitor.visit_f64x2_trunc(),
1224 0x7b => visitor.visit_i8x16_avgr_u(),
1225 0x7c => visitor.visit_i16x8_extadd_pairwise_i8x16_s(),
1226 0x7d => visitor.visit_i16x8_extadd_pairwise_i8x16_u(),
1227 0x7e => visitor.visit_i32x4_extadd_pairwise_i16x8_s(),
1228 0x7f => visitor.visit_i32x4_extadd_pairwise_i16x8_u(),
1229 0x80 => visitor.visit_i16x8_abs(),
1230 0x81 => visitor.visit_i16x8_neg(),
1231 0x82 => visitor.visit_i16x8_q15mulr_sat_s(),
1232 0x83 => visitor.visit_i16x8_all_true(),
1233 0x84 => visitor.visit_i16x8_bitmask(),
1234 0x85 => visitor.visit_i16x8_narrow_i32x4_s(),
1235 0x86 => visitor.visit_i16x8_narrow_i32x4_u(),
1236 0x87 => visitor.visit_i16x8_extend_low_i8x16_s(),
1237 0x88 => visitor.visit_i16x8_extend_high_i8x16_s(),
1238 0x89 => visitor.visit_i16x8_extend_low_i8x16_u(),
1239 0x8a => visitor.visit_i16x8_extend_high_i8x16_u(),
1240 0x8b => visitor.visit_i16x8_shl(),
1241 0x8c => visitor.visit_i16x8_shr_s(),
1242 0x8d => visitor.visit_i16x8_shr_u(),
1243 0x8e => visitor.visit_i16x8_add(),
1244 0x8f => visitor.visit_i16x8_add_sat_s(),
1245 0x90 => visitor.visit_i16x8_add_sat_u(),
1246 0x91 => visitor.visit_i16x8_sub(),
1247 0x92 => visitor.visit_i16x8_sub_sat_s(),
1248 0x93 => visitor.visit_i16x8_sub_sat_u(),
1249 0x94 => visitor.visit_f64x2_nearest(),
1250 0x95 => visitor.visit_i16x8_mul(),
1251 0x96 => visitor.visit_i16x8_min_s(),
1252 0x97 => visitor.visit_i16x8_min_u(),
1253 0x98 => visitor.visit_i16x8_max_s(),
1254 0x99 => visitor.visit_i16x8_max_u(),
1255 0x9b => visitor.visit_i16x8_avgr_u(),
1256 0x9c => visitor.visit_i16x8_extmul_low_i8x16_s(),
1257 0x9d => visitor.visit_i16x8_extmul_high_i8x16_s(),
1258 0x9e => visitor.visit_i16x8_extmul_low_i8x16_u(),
1259 0x9f => visitor.visit_i16x8_extmul_high_i8x16_u(),
1260 0xa0 => visitor.visit_i32x4_abs(),
1261 0xa1 => visitor.visit_i32x4_neg(),
1262 0xa3 => visitor.visit_i32x4_all_true(),
1263 0xa4 => visitor.visit_i32x4_bitmask(),
1264 0xa7 => visitor.visit_i32x4_extend_low_i16x8_s(),
1265 0xa8 => visitor.visit_i32x4_extend_high_i16x8_s(),
1266 0xa9 => visitor.visit_i32x4_extend_low_i16x8_u(),
1267 0xaa => visitor.visit_i32x4_extend_high_i16x8_u(),
1268 0xab => visitor.visit_i32x4_shl(),
1269 0xac => visitor.visit_i32x4_shr_s(),
1270 0xad => visitor.visit_i32x4_shr_u(),
1271 0xae => visitor.visit_i32x4_add(),
1272 0xb1 => visitor.visit_i32x4_sub(),
1273 0xb5 => visitor.visit_i32x4_mul(),
1274 0xb6 => visitor.visit_i32x4_min_s(),
1275 0xb7 => visitor.visit_i32x4_min_u(),
1276 0xb8 => visitor.visit_i32x4_max_s(),
1277 0xb9 => visitor.visit_i32x4_max_u(),
1278 0xba => visitor.visit_i32x4_dot_i16x8_s(),
1279 0xbc => visitor.visit_i32x4_extmul_low_i16x8_s(),
1280 0xbd => visitor.visit_i32x4_extmul_high_i16x8_s(),
1281 0xbe => visitor.visit_i32x4_extmul_low_i16x8_u(),
1282 0xbf => visitor.visit_i32x4_extmul_high_i16x8_u(),
1283 0xc0 => visitor.visit_i64x2_abs(),
1284 0xc1 => visitor.visit_i64x2_neg(),
1285 0xc3 => visitor.visit_i64x2_all_true(),
1286 0xc4 => visitor.visit_i64x2_bitmask(),
1287 0xc7 => visitor.visit_i64x2_extend_low_i32x4_s(),
1288 0xc8 => visitor.visit_i64x2_extend_high_i32x4_s(),
1289 0xc9 => visitor.visit_i64x2_extend_low_i32x4_u(),
1290 0xca => visitor.visit_i64x2_extend_high_i32x4_u(),
1291 0xcb => visitor.visit_i64x2_shl(),
1292 0xcc => visitor.visit_i64x2_shr_s(),
1293 0xcd => visitor.visit_i64x2_shr_u(),
1294 0xce => visitor.visit_i64x2_add(),
1295 0xd1 => visitor.visit_i64x2_sub(),
1296 0xd5 => visitor.visit_i64x2_mul(),
1297 0xd6 => visitor.visit_i64x2_eq(),
1298 0xd7 => visitor.visit_i64x2_ne(),
1299 0xd8 => visitor.visit_i64x2_lt_s(),
1300 0xd9 => visitor.visit_i64x2_gt_s(),
1301 0xda => visitor.visit_i64x2_le_s(),
1302 0xdb => visitor.visit_i64x2_ge_s(),
1303 0xdc => visitor.visit_i64x2_extmul_low_i32x4_s(),
1304 0xdd => visitor.visit_i64x2_extmul_high_i32x4_s(),
1305 0xde => visitor.visit_i64x2_extmul_low_i32x4_u(),
1306 0xdf => visitor.visit_i64x2_extmul_high_i32x4_u(),
1307 0xe0 => visitor.visit_f32x4_abs(),
1308 0xe1 => visitor.visit_f32x4_neg(),
1309 0xe3 => visitor.visit_f32x4_sqrt(),
1310 0xe4 => visitor.visit_f32x4_add(),
1311 0xe5 => visitor.visit_f32x4_sub(),
1312 0xe6 => visitor.visit_f32x4_mul(),
1313 0xe7 => visitor.visit_f32x4_div(),
1314 0xe8 => visitor.visit_f32x4_min(),
1315 0xe9 => visitor.visit_f32x4_max(),
1316 0xea => visitor.visit_f32x4_pmin(),
1317 0xeb => visitor.visit_f32x4_pmax(),
1318 0xec => visitor.visit_f64x2_abs(),
1319 0xed => visitor.visit_f64x2_neg(),
1320 0xef => visitor.visit_f64x2_sqrt(),
1321 0xf0 => visitor.visit_f64x2_add(),
1322 0xf1 => visitor.visit_f64x2_sub(),
1323 0xf2 => visitor.visit_f64x2_mul(),
1324 0xf3 => visitor.visit_f64x2_div(),
1325 0xf4 => visitor.visit_f64x2_min(),
1326 0xf5 => visitor.visit_f64x2_max(),
1327 0xf6 => visitor.visit_f64x2_pmin(),
1328 0xf7 => visitor.visit_f64x2_pmax(),
1329 0xf8 => visitor.visit_i32x4_trunc_sat_f32x4_s(),
1330 0xf9 => visitor.visit_i32x4_trunc_sat_f32x4_u(),
1331 0xfa => visitor.visit_f32x4_convert_i32x4_s(),
1332 0xfb => visitor.visit_f32x4_convert_i32x4_u(),
1333 0xfc => visitor.visit_i32x4_trunc_sat_f64x2_s_zero(),
1334 0xfd => visitor.visit_i32x4_trunc_sat_f64x2_u_zero(),
1335 0xfe => visitor.visit_f64x2_convert_low_i32x4_s(),
1336 0xff => visitor.visit_f64x2_convert_low_i32x4_u(),
1337 0x100 => visitor.visit_i8x16_relaxed_swizzle(),
1338 0x101 => visitor.visit_i32x4_relaxed_trunc_f32x4_s(),
1339 0x102 => visitor.visit_i32x4_relaxed_trunc_f32x4_u(),
1340 0x103 => visitor.visit_i32x4_relaxed_trunc_f64x2_s_zero(),
1341 0x104 => visitor.visit_i32x4_relaxed_trunc_f64x2_u_zero(),
1342 0x105 => visitor.visit_f32x4_relaxed_madd(),
1343 0x106 => visitor.visit_f32x4_relaxed_nmadd(),
1344 0x107 => visitor.visit_f64x2_relaxed_madd(),
1345 0x108 => visitor.visit_f64x2_relaxed_nmadd(),
1346 0x109 => visitor.visit_i8x16_relaxed_laneselect(),
1347 0x10a => visitor.visit_i16x8_relaxed_laneselect(),
1348 0x10b => visitor.visit_i32x4_relaxed_laneselect(),
1349 0x10c => visitor.visit_i64x2_relaxed_laneselect(),
1350 0x10d => visitor.visit_f32x4_relaxed_min(),
1351 0x10e => visitor.visit_f32x4_relaxed_max(),
1352 0x10f => visitor.visit_f64x2_relaxed_min(),
1353 0x110 => visitor.visit_f64x2_relaxed_max(),
1354 0x111 => visitor.visit_i16x8_relaxed_q15mulr_s(),
1355 0x112 => visitor.visit_i16x8_relaxed_dot_i8x16_i7x16_s(),
1356 0x113 => visitor.visit_i32x4_relaxed_dot_i8x16_i7x16_add_s(),
1357
1358 _ => bail!(pos, "unknown 0xfd subopcode: 0x{code:x}"),
1359 })
1360 }
1361
1362 fn visit_0xfe_operator<T>(
1363 &mut self,
1364 pos: usize,
1365 visitor: &mut T,
1366 ) -> Result<<T as VisitOperator<'a>>::Output>
1367 where
1368 T: VisitOperator<'a>,
1369 {
1370 let code = self.reader.read_var_u32()?;
1371 Ok(match code {
1372 0x00 => visitor.visit_memory_atomic_notify(self.read_memarg(2)?),
1373 0x01 => visitor.visit_memory_atomic_wait32(self.read_memarg(2)?),
1374 0x02 => visitor.visit_memory_atomic_wait64(self.read_memarg(3)?),
1375 0x03 => {
1376 if self.reader.read_u8()? != 0 {
1377 bail!(pos, "nonzero byte after `atomic.fence`");
1378 }
1379 visitor.visit_atomic_fence()
1380 }
1381 0x10 => visitor.visit_i32_atomic_load(self.read_memarg(2)?),
1382 0x11 => visitor.visit_i64_atomic_load(self.read_memarg(3)?),
1383 0x12 => visitor.visit_i32_atomic_load8_u(self.read_memarg(0)?),
1384 0x13 => visitor.visit_i32_atomic_load16_u(self.read_memarg(1)?),
1385 0x14 => visitor.visit_i64_atomic_load8_u(self.read_memarg(0)?),
1386 0x15 => visitor.visit_i64_atomic_load16_u(self.read_memarg(1)?),
1387 0x16 => visitor.visit_i64_atomic_load32_u(self.read_memarg(2)?),
1388 0x17 => visitor.visit_i32_atomic_store(self.read_memarg(2)?),
1389 0x18 => visitor.visit_i64_atomic_store(self.read_memarg(3)?),
1390 0x19 => visitor.visit_i32_atomic_store8(self.read_memarg(0)?),
1391 0x1a => visitor.visit_i32_atomic_store16(self.read_memarg(1)?),
1392 0x1b => visitor.visit_i64_atomic_store8(self.read_memarg(0)?),
1393 0x1c => visitor.visit_i64_atomic_store16(self.read_memarg(1)?),
1394 0x1d => visitor.visit_i64_atomic_store32(self.read_memarg(2)?),
1395 0x1e => visitor.visit_i32_atomic_rmw_add(self.read_memarg(2)?),
1396 0x1f => visitor.visit_i64_atomic_rmw_add(self.read_memarg(3)?),
1397 0x20 => visitor.visit_i32_atomic_rmw8_add_u(self.read_memarg(0)?),
1398 0x21 => visitor.visit_i32_atomic_rmw16_add_u(self.read_memarg(1)?),
1399 0x22 => visitor.visit_i64_atomic_rmw8_add_u(self.read_memarg(0)?),
1400 0x23 => visitor.visit_i64_atomic_rmw16_add_u(self.read_memarg(1)?),
1401 0x24 => visitor.visit_i64_atomic_rmw32_add_u(self.read_memarg(2)?),
1402 0x25 => visitor.visit_i32_atomic_rmw_sub(self.read_memarg(2)?),
1403 0x26 => visitor.visit_i64_atomic_rmw_sub(self.read_memarg(3)?),
1404 0x27 => visitor.visit_i32_atomic_rmw8_sub_u(self.read_memarg(0)?),
1405 0x28 => visitor.visit_i32_atomic_rmw16_sub_u(self.read_memarg(1)?),
1406 0x29 => visitor.visit_i64_atomic_rmw8_sub_u(self.read_memarg(0)?),
1407 0x2a => visitor.visit_i64_atomic_rmw16_sub_u(self.read_memarg(1)?),
1408 0x2b => visitor.visit_i64_atomic_rmw32_sub_u(self.read_memarg(2)?),
1409 0x2c => visitor.visit_i32_atomic_rmw_and(self.read_memarg(2)?),
1410 0x2d => visitor.visit_i64_atomic_rmw_and(self.read_memarg(3)?),
1411 0x2e => visitor.visit_i32_atomic_rmw8_and_u(self.read_memarg(0)?),
1412 0x2f => visitor.visit_i32_atomic_rmw16_and_u(self.read_memarg(1)?),
1413 0x30 => visitor.visit_i64_atomic_rmw8_and_u(self.read_memarg(0)?),
1414 0x31 => visitor.visit_i64_atomic_rmw16_and_u(self.read_memarg(1)?),
1415 0x32 => visitor.visit_i64_atomic_rmw32_and_u(self.read_memarg(2)?),
1416 0x33 => visitor.visit_i32_atomic_rmw_or(self.read_memarg(2)?),
1417 0x34 => visitor.visit_i64_atomic_rmw_or(self.read_memarg(3)?),
1418 0x35 => visitor.visit_i32_atomic_rmw8_or_u(self.read_memarg(0)?),
1419 0x36 => visitor.visit_i32_atomic_rmw16_or_u(self.read_memarg(1)?),
1420 0x37 => visitor.visit_i64_atomic_rmw8_or_u(self.read_memarg(0)?),
1421 0x38 => visitor.visit_i64_atomic_rmw16_or_u(self.read_memarg(1)?),
1422 0x39 => visitor.visit_i64_atomic_rmw32_or_u(self.read_memarg(2)?),
1423 0x3a => visitor.visit_i32_atomic_rmw_xor(self.read_memarg(2)?),
1424 0x3b => visitor.visit_i64_atomic_rmw_xor(self.read_memarg(3)?),
1425 0x3c => visitor.visit_i32_atomic_rmw8_xor_u(self.read_memarg(0)?),
1426 0x3d => visitor.visit_i32_atomic_rmw16_xor_u(self.read_memarg(1)?),
1427 0x3e => visitor.visit_i64_atomic_rmw8_xor_u(self.read_memarg(0)?),
1428 0x3f => visitor.visit_i64_atomic_rmw16_xor_u(self.read_memarg(1)?),
1429 0x40 => visitor.visit_i64_atomic_rmw32_xor_u(self.read_memarg(2)?),
1430 0x41 => visitor.visit_i32_atomic_rmw_xchg(self.read_memarg(2)?),
1431 0x42 => visitor.visit_i64_atomic_rmw_xchg(self.read_memarg(3)?),
1432 0x43 => visitor.visit_i32_atomic_rmw8_xchg_u(self.read_memarg(0)?),
1433 0x44 => visitor.visit_i32_atomic_rmw16_xchg_u(self.read_memarg(1)?),
1434 0x45 => visitor.visit_i64_atomic_rmw8_xchg_u(self.read_memarg(0)?),
1435 0x46 => visitor.visit_i64_atomic_rmw16_xchg_u(self.read_memarg(1)?),
1436 0x47 => visitor.visit_i64_atomic_rmw32_xchg_u(self.read_memarg(2)?),
1437 0x48 => visitor.visit_i32_atomic_rmw_cmpxchg(self.read_memarg(2)?),
1438 0x49 => visitor.visit_i64_atomic_rmw_cmpxchg(self.read_memarg(3)?),
1439 0x4a => visitor.visit_i32_atomic_rmw8_cmpxchg_u(self.read_memarg(0)?),
1440 0x4b => visitor.visit_i32_atomic_rmw16_cmpxchg_u(self.read_memarg(1)?),
1441 0x4c => visitor.visit_i64_atomic_rmw8_cmpxchg_u(self.read_memarg(0)?),
1442 0x4d => visitor.visit_i64_atomic_rmw16_cmpxchg_u(self.read_memarg(1)?),
1443 0x4e => visitor.visit_i64_atomic_rmw32_cmpxchg_u(self.read_memarg(2)?),
1444
1445 0x4f => {
1447 visitor.visit_global_atomic_get(self.read_ordering()?, self.reader.read_var_u32()?)
1448 }
1449 0x50 => {
1450 visitor.visit_global_atomic_set(self.read_ordering()?, self.reader.read_var_u32()?)
1451 }
1452 0x51 => visitor
1453 .visit_global_atomic_rmw_add(self.read_ordering()?, self.reader.read_var_u32()?),
1454 0x52 => visitor
1455 .visit_global_atomic_rmw_sub(self.read_ordering()?, self.reader.read_var_u32()?),
1456 0x53 => visitor
1457 .visit_global_atomic_rmw_and(self.read_ordering()?, self.reader.read_var_u32()?),
1458 0x54 => visitor
1459 .visit_global_atomic_rmw_or(self.read_ordering()?, self.reader.read_var_u32()?),
1460 0x55 => visitor
1461 .visit_global_atomic_rmw_xor(self.read_ordering()?, self.reader.read_var_u32()?),
1462 0x56 => visitor
1463 .visit_global_atomic_rmw_xchg(self.read_ordering()?, self.reader.read_var_u32()?),
1464 0x57 => visitor.visit_global_atomic_rmw_cmpxchg(
1465 self.read_ordering()?,
1466 self.reader.read_var_u32()?,
1467 ),
1468 0x58 => {
1469 visitor.visit_table_atomic_get(self.read_ordering()?, self.reader.read_var_u32()?)
1470 }
1471 0x59 => {
1472 visitor.visit_table_atomic_set(self.read_ordering()?, self.reader.read_var_u32()?)
1473 }
1474 0x5a => visitor
1475 .visit_table_atomic_rmw_xchg(self.read_ordering()?, self.reader.read_var_u32()?),
1476 0x5b => visitor
1477 .visit_table_atomic_rmw_cmpxchg(self.read_ordering()?, self.reader.read_var_u32()?),
1478 0x5c => visitor.visit_struct_atomic_get(
1479 self.read_ordering()?,
1480 self.reader.read_var_u32()?,
1481 self.reader.read_var_u32()?,
1482 ),
1483 0x5d => visitor.visit_struct_atomic_get_s(
1484 self.read_ordering()?,
1485 self.reader.read_var_u32()?,
1486 self.reader.read_var_u32()?,
1487 ),
1488 0x5e => visitor.visit_struct_atomic_get_u(
1489 self.read_ordering()?,
1490 self.reader.read_var_u32()?,
1491 self.reader.read_var_u32()?,
1492 ),
1493 0x5f => visitor.visit_struct_atomic_set(
1494 self.read_ordering()?,
1495 self.reader.read_var_u32()?,
1496 self.reader.read_var_u32()?,
1497 ),
1498 0x60 => visitor.visit_struct_atomic_rmw_add(
1499 self.read_ordering()?,
1500 self.reader.read_var_u32()?,
1501 self.reader.read_var_u32()?,
1502 ),
1503 0x61 => visitor.visit_struct_atomic_rmw_sub(
1504 self.read_ordering()?,
1505 self.reader.read_var_u32()?,
1506 self.reader.read_var_u32()?,
1507 ),
1508 0x62 => visitor.visit_struct_atomic_rmw_and(
1509 self.read_ordering()?,
1510 self.reader.read_var_u32()?,
1511 self.reader.read_var_u32()?,
1512 ),
1513 0x63 => visitor.visit_struct_atomic_rmw_or(
1514 self.read_ordering()?,
1515 self.reader.read_var_u32()?,
1516 self.reader.read_var_u32()?,
1517 ),
1518 0x64 => visitor.visit_struct_atomic_rmw_xor(
1519 self.read_ordering()?,
1520 self.reader.read_var_u32()?,
1521 self.reader.read_var_u32()?,
1522 ),
1523 0x65 => visitor.visit_struct_atomic_rmw_xchg(
1524 self.read_ordering()?,
1525 self.reader.read_var_u32()?,
1526 self.reader.read_var_u32()?,
1527 ),
1528 0x66 => visitor.visit_struct_atomic_rmw_cmpxchg(
1529 self.read_ordering()?,
1530 self.reader.read_var_u32()?,
1531 self.reader.read_var_u32()?,
1532 ),
1533 0x67 => {
1534 visitor.visit_array_atomic_get(self.read_ordering()?, self.reader.read_var_u32()?)
1535 }
1536 0x68 => {
1537 visitor.visit_array_atomic_get_s(self.read_ordering()?, self.reader.read_var_u32()?)
1538 }
1539 0x69 => {
1540 visitor.visit_array_atomic_get_u(self.read_ordering()?, self.reader.read_var_u32()?)
1541 }
1542 0x6a => {
1543 visitor.visit_array_atomic_set(self.read_ordering()?, self.reader.read_var_u32()?)
1544 }
1545 0x6b => visitor
1546 .visit_array_atomic_rmw_add(self.read_ordering()?, self.reader.read_var_u32()?),
1547 0x6c => visitor
1548 .visit_array_atomic_rmw_sub(self.read_ordering()?, self.reader.read_var_u32()?),
1549 0x6d => visitor
1550 .visit_array_atomic_rmw_and(self.read_ordering()?, self.reader.read_var_u32()?),
1551 0x6e => visitor
1552 .visit_array_atomic_rmw_or(self.read_ordering()?, self.reader.read_var_u32()?),
1553 0x6f => visitor
1554 .visit_array_atomic_rmw_xor(self.read_ordering()?, self.reader.read_var_u32()?),
1555 0x70 => visitor
1556 .visit_array_atomic_rmw_xchg(self.read_ordering()?, self.reader.read_var_u32()?),
1557 0x71 => visitor
1558 .visit_array_atomic_rmw_cmpxchg(self.read_ordering()?, self.reader.read_var_u32()?),
1559 0x72 => visitor.visit_ref_i31_shared(),
1560
1561 _ => bail!(pos, "unknown 0xfe subopcode: 0x{code:x}"),
1562 })
1563 }
1564
1565 pub(crate) fn skip_const_expr(&mut self) -> Result<()> {
1566 loop {
1568 if let Operator::End = self.read()? {
1569 self.ensure_stack_empty()?;
1570 return Ok(());
1571 }
1572 }
1573 }
1574
1575 pub fn get_binary_reader(&self) -> BinaryReader<'a> {
1577 self.reader.clone()
1578 }
1579
1580 pub fn is_end_then_eof(&self) -> bool {
1583 self.reader.is_end_then_eof()
1584 }
1585
1586 fn read_memarg(&mut self, max_align: u8) -> Result<MemArg> {
1587 let flags_pos = self.original_position();
1588 let mut flags = self.reader.read_var_u32()?;
1589
1590 let memory = if self.reader.multi_memory() && flags & (1 << 6) != 0 {
1591 flags ^= 1 << 6;
1592 self.reader.read_var_u32()?
1593 } else {
1594 0
1595 };
1596 let max_flag_bits = if self.reader.multi_memory() { 6 } else { 5 };
1597 if flags >= (1 << max_flag_bits) {
1598 return Err(BinaryReaderError::new(
1599 "malformed memop alignment: alignment too large",
1600 flags_pos,
1601 ));
1602 }
1603 let align = flags as u8;
1604 let offset = if self.reader.memory64() {
1605 self.reader.read_var_u64()?
1606 } else {
1607 u64::from(self.reader.read_var_u32()?)
1608 };
1609 Ok(MemArg {
1610 align,
1611 max_align,
1612 offset,
1613 memory,
1614 })
1615 }
1616
1617 fn read_ordering(&mut self) -> Result<Ordering> {
1618 let byte = self.reader.read_var_u32()?;
1619 match byte {
1620 0 => Ok(Ordering::SeqCst),
1621 1 => Ok(Ordering::AcqRel),
1622 x => Err(BinaryReaderError::new(
1623 &format!("invalid atomic consistency ordering {x}"),
1624 self.original_position() - 1,
1625 )),
1626 }
1627 }
1628
1629 fn read_br_table(&mut self) -> Result<BrTable<'a>> {
1630 let cnt = self.reader.read_size(MAX_WASM_BR_TABLE_SIZE, "br_table")?;
1631 let reader = self.reader.skip(|reader| {
1632 for _ in 0..cnt {
1633 reader.read_var_u32()?;
1634 }
1635 Ok(())
1636 })?;
1637 let default = self.reader.read_var_u32()?;
1638 Ok(BrTable {
1639 reader,
1640 cnt: cnt as u32,
1641 default,
1642 })
1643 }
1644
1645 #[cfg(feature = "simd")]
1646 fn read_lane_index(&mut self) -> Result<u8> {
1647 self.reader.read_u8()
1648 }
1649
1650 #[cfg(feature = "simd")]
1651 fn read_v128(&mut self) -> Result<V128> {
1652 let mut bytes = [0; 16];
1653 bytes.clone_from_slice(self.reader.read_bytes(16)?);
1654 Ok(V128(bytes))
1655 }
1656
1657 fn read_memory_index_or_zero_if_not_multi_memory(&mut self) -> Result<u32> {
1658 if self.reader.multi_memory() {
1659 self.reader.read_var_u32()
1660 } else {
1661 match self.reader.read_u8()? {
1664 0 => Ok(0),
1665 _ => bail!(self.original_position() - 1, "zero byte expected"),
1666 }
1667 }
1668 }
1669
1670 fn read_call_indirect_table_immediate(&mut self) -> Result<u32> {
1671 if self.reader.call_indirect_overlong() {
1675 return self.reader.read_var_u32();
1676 }
1677
1678 match self.reader.read_u8()? {
1681 0 => Ok(0),
1682 _ => bail!(self.original_position() - 1, "zero byte expected"),
1683 }
1684 }
1685}
1686
1687impl<'a> IntoIterator for OperatorsReader<'a> {
1688 type Item = Result<Operator<'a>>;
1689 type IntoIter = OperatorsIterator<'a>;
1690
1691 fn into_iter(self) -> Self::IntoIter {
1712 OperatorsIterator {
1713 reader: self,
1714 err: false,
1715 }
1716 }
1717}
1718
1719pub struct OperatorsIterator<'a> {
1721 reader: OperatorsReader<'a>,
1722 err: bool,
1723}
1724
1725impl<'a> Iterator for OperatorsIterator<'a> {
1726 type Item = Result<Operator<'a>>;
1727
1728 fn next(&mut self) -> Option<Self::Item> {
1729 if self.err || self.reader.eof() {
1730 return None;
1731 }
1732 let result = self.reader.read();
1733 self.err = result.is_err();
1734 Some(result)
1735 }
1736}
1737
1738pub struct OperatorsIteratorWithOffsets<'a> {
1740 reader: OperatorsReader<'a>,
1741 err: bool,
1742}
1743
1744impl<'a> Iterator for OperatorsIteratorWithOffsets<'a> {
1745 type Item = Result<(Operator<'a>, usize)>;
1746
1747 fn next(&mut self) -> Option<Self::Item> {
1768 if self.err || self.reader.eof() {
1769 return None;
1770 }
1771 let result = self.reader.read_with_offset();
1772 self.err = result.is_err();
1773 Some(result)
1774 }
1775}
1776
1777macro_rules! define_visit_operator {
1778 ($(@$proposal:ident $op:ident $({ $($arg:ident: $argty:ty),* })? => $visit:ident ($($ann:tt)*))*) => {
1779 $(
1780 fn $visit(&mut self $($(,$arg: $argty)*)?) -> Self::Output;
1781 )*
1782 }
1783}
1784
1785#[allow(missing_docs)]
1787pub trait VisitOperator<'a> {
1788 type Output: 'a;
1790
1791 fn visit_operator(&mut self, op: &Operator<'a>) -> Self::Output {
1800 macro_rules! visit_operator {
1801 ($(@$proposal:ident $op:ident $({ $($arg:ident: $argty:ty),* })? => $visit:ident ($($ann:tt)*))*) => {{
1802 match op {
1803 $( Operator::$op $({ $($arg),* })? => self.$visit($($($arg.clone()),*)?), )*
1804 #[cfg(feature = "simd")]
1805 other => visit_simd_operator(self, other),
1806 }
1807 }};
1808 }
1809 crate::for_each_visit_operator!(visit_operator)
1810 }
1811
1812 #[cfg(feature = "simd")]
1848 fn simd_visitor(&mut self) -> Option<&mut dyn VisitSimdOperator<'a, Output = Self::Output>> {
1849 None
1850 }
1851
1852 crate::for_each_visit_operator!(define_visit_operator);
1853}
1854
1855#[cfg(feature = "simd")]
1857fn visit_simd_operator<'a, V>(visitor: &mut V, op: &Operator<'a>) -> V::Output
1858where
1859 V: VisitOperator<'a> + ?Sized,
1860{
1861 let Some(simd_visitor) = visitor.simd_visitor() else {
1862 panic!("missing SIMD visitor to visit operator: {op:?}")
1863 };
1864 macro_rules! visit_simd_operator {
1865 ($(@$proposal:ident $op:ident $({ $($arg:ident: $argty:ty),* })? => $visit:ident ($($ann:tt)*))*) => {{
1866 match op {
1867 $( Operator::$op $({ $($arg),* })? => simd_visitor.$visit($($($arg.clone()),*)?), )*
1868 unexpected => unreachable!("unexpected non-SIMD operator: {unexpected:?}"),
1869 }
1870 }};
1871 }
1872 crate::for_each_visit_simd_operator!(visit_simd_operator)
1873}
1874
1875#[cfg(feature = "simd")]
1877#[allow(missing_docs)]
1878pub trait VisitSimdOperator<'a>: VisitOperator<'a> {
1879 crate::for_each_visit_simd_operator!(define_visit_operator);
1880}
1881
1882macro_rules! define_visit_operator_delegate {
1883 ($(@$proposal:ident $op:ident $({ $($arg:ident: $argty:ty),* })? => $visit:ident ($($ann:tt)*))*) => {
1884 $(
1885 fn $visit(&mut self $($(,$arg: $argty)*)?) -> Self::Output {
1886 V::$visit(&mut *self, $($($arg),*)?)
1887 }
1888 )*
1889 }
1890}
1891
1892impl<'a, 'b, V: VisitOperator<'a> + ?Sized> VisitOperator<'a> for &'b mut V {
1893 type Output = V::Output;
1894 fn visit_operator(&mut self, op: &Operator<'a>) -> Self::Output {
1895 V::visit_operator(*self, op)
1896 }
1897 #[cfg(feature = "simd")]
1898 fn simd_visitor(&mut self) -> Option<&mut dyn VisitSimdOperator<'a, Output = V::Output>> {
1899 V::simd_visitor(*self)
1900 }
1901 crate::for_each_visit_operator!(define_visit_operator_delegate);
1902}
1903
1904#[cfg(feature = "simd")]
1905impl<'a, 'b, V: VisitSimdOperator<'a> + ?Sized> VisitSimdOperator<'a> for &'b mut V {
1906 crate::for_each_visit_simd_operator!(define_visit_operator_delegate);
1907}
1908
1909impl<'a, V: VisitOperator<'a> + ?Sized> VisitOperator<'a> for Box<V> {
1910 type Output = V::Output;
1911 fn visit_operator(&mut self, op: &Operator<'a>) -> Self::Output {
1912 V::visit_operator(&mut *self, op)
1913 }
1914 #[cfg(feature = "simd")]
1915 fn simd_visitor(&mut self) -> Option<&mut dyn VisitSimdOperator<'a, Output = V::Output>> {
1916 V::simd_visitor(&mut *self)
1917 }
1918 crate::for_each_visit_operator!(define_visit_operator_delegate);
1919}
1920
1921#[cfg(feature = "simd")]
1922impl<'a, V: VisitSimdOperator<'a> + ?Sized> VisitSimdOperator<'a> for Box<V> {
1923 crate::for_each_visit_simd_operator!(define_visit_operator_delegate);
1924}
1925
1926#[derive(Clone, Debug, Eq, PartialEq)]
1928pub struct TryTable {
1929 pub ty: BlockType,
1931 pub catches: Vec<Catch>,
1933}
1934
1935#[derive(Copy, Clone, Debug, Eq, PartialEq)]
1937#[allow(missing_docs)]
1938pub enum Catch {
1939 One { tag: u32, label: u32 },
1941 OneRef { tag: u32, label: u32 },
1943 All { label: u32 },
1945 AllRef { label: u32 },
1947}
1948
1949impl<'a> FromReader<'a> for TryTable {
1950 fn from_reader(reader: &mut BinaryReader<'a>) -> Result<Self> {
1951 let ty = reader.read_block_type()?;
1952 let catches = reader
1953 .read_iter(MAX_WASM_CATCHES, "catches")?
1954 .collect::<Result<_>>()?;
1955 Ok(TryTable { ty, catches })
1956 }
1957}
1958
1959impl<'a> FromReader<'a> for Catch {
1960 fn from_reader(reader: &mut BinaryReader<'a>) -> Result<Self> {
1961 Ok(match reader.read_u8()? {
1962 0x00 => Catch::One {
1963 tag: reader.read_var_u32()?,
1964 label: reader.read_var_u32()?,
1965 },
1966 0x01 => Catch::OneRef {
1967 tag: reader.read_var_u32()?,
1968 label: reader.read_var_u32()?,
1969 },
1970 0x02 => Catch::All {
1971 label: reader.read_var_u32()?,
1972 },
1973 0x03 => Catch::AllRef {
1974 label: reader.read_var_u32()?,
1975 },
1976
1977 x => return reader.invalid_leading_byte(x, "catch"),
1978 })
1979 }
1980}
1981
1982#[derive(Clone, Debug, Eq, PartialEq)]
1985pub struct ResumeTable {
1986 pub handlers: Vec<Handle>,
1989}
1990
1991#[derive(Copy, Clone, Debug, Eq, PartialEq)]
1993#[allow(missing_docs)]
1994pub enum Handle {
1995 OnLabel { tag: u32, label: u32 },
1997 OnSwitch { tag: u32 },
1999}
2000
2001impl ResumeTable {
2002 pub fn len(&self) -> usize {
2004 self.handlers.len()
2005 }
2006}
2007
2008impl<'a> FromReader<'a> for ResumeTable {
2009 fn from_reader(reader: &mut BinaryReader<'a>) -> Result<Self> {
2010 let handlers = reader
2011 .read_iter(MAX_WASM_HANDLERS, "resume table")?
2012 .collect::<Result<_>>()?;
2013 let table = ResumeTable { handlers };
2014 Ok(table)
2015 }
2016}
2017
2018impl<'a> FromReader<'a> for Handle {
2019 fn from_reader(reader: &mut BinaryReader<'a>) -> Result<Self> {
2020 Ok(match reader.read_u8()? {
2021 0x00 => Handle::OnLabel {
2022 tag: reader.read_var_u32()?,
2023 label: reader.read_var_u32()?,
2024 },
2025 0x01 => Handle::OnSwitch {
2026 tag: reader.read_var_u32()?,
2027 },
2028 x => return reader.invalid_leading_byte(x, "on clause"),
2029 })
2030 }
2031}
2032
2033struct OperatorFactory<'a> {
2035 marker: core::marker::PhantomData<fn() -> &'a ()>,
2036}
2037
2038impl<'a> OperatorFactory<'a> {
2039 fn new() -> Self {
2041 Self {
2042 marker: core::marker::PhantomData,
2043 }
2044 }
2045}
2046
2047macro_rules! define_visit_operator {
2048 ($(@$proposal:ident $op:ident $({ $($arg:ident: $argty:ty),* })? => $visit:ident ($($ann:tt)*))*) => {
2049 $(
2050 fn $visit(&mut self $($(,$arg: $argty)*)?) -> Operator<'a> {
2051 Operator::$op $({ $($arg),* })?
2052 }
2053 )*
2054 }
2055}
2056
2057impl<'a> VisitOperator<'a> for OperatorFactory<'a> {
2058 type Output = Operator<'a>;
2059
2060 #[cfg(feature = "simd")]
2061 fn simd_visitor(&mut self) -> Option<&mut dyn VisitSimdOperator<'a, Output = Self::Output>> {
2062 Some(self)
2063 }
2064
2065 crate::for_each_visit_operator!(define_visit_operator);
2066}
2067
2068#[cfg(feature = "simd")]
2069impl<'a> VisitSimdOperator<'a> for OperatorFactory<'a> {
2070 crate::for_each_visit_simd_operator!(define_visit_operator);
2071}