wasmparser/validator/
operators.rs

1/* Copyright 2019 Mozilla Foundation
2 *
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 *     http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16// The basic validation algorithm here is copied from the "Validation
17// Algorithm" section of the WebAssembly specification -
18// https://webassembly.github.io/spec/core/appendix/algorithm.html.
19//
20// That algorithm is followed pretty closely here, namely `push_operand`,
21// `pop_operand`, `push_ctrl`, and `pop_ctrl`. If anything here is a bit
22// confusing it's recommended to read over that section to see how it maps to
23// the various methods here.
24
25#[cfg(feature = "simd")]
26use crate::VisitSimdOperator;
27use crate::{
28    AbstractHeapType, BinaryReaderError, BlockType, BrTable, Catch, ContType, FieldType, FrameKind,
29    FuncType, GlobalType, Handle, HeapType, Ieee32, Ieee64, MemArg, ModuleArity, RefType, Result,
30    ResumeTable, StorageType, StructType, SubType, TableType, TryTable, UnpackedIndex, ValType,
31    VisitOperator, WasmFeatures, WasmModuleResources, limits::MAX_WASM_FUNCTION_LOCALS,
32};
33use crate::{CompositeInnerType, Ordering, prelude::*};
34use core::ops::{Deref, DerefMut};
35use core::{cmp, iter, mem};
36
37#[cfg(feature = "simd")]
38mod simd;
39
40pub(crate) struct OperatorValidator {
41    pub(super) locals: Locals,
42    local_inits: LocalInits,
43
44    // This is a list of flags for wasm features which are used to gate various
45    // instructions.
46    pub(crate) features: WasmFeatures,
47
48    // Temporary storage used during `match_stack_operands`
49    popped_types_tmp: Vec<MaybeType>,
50
51    /// The `control` list is the list of blocks that we're currently in.
52    control: Vec<Frame>,
53    /// The `operands` is the current type stack.
54    operands: Vec<MaybeType>,
55
56    /// Whether validation is happening in a shared context.
57    shared: bool,
58
59    /// A trace of all operand push/pop operations performed while validating an
60    /// opcode. This is then compared to the arity that we report to double
61    /// check that arity report's correctness. `true` is "push" and `false` is
62    /// "pop".
63    #[cfg(debug_assertions)]
64    pub(crate) pop_push_log: Vec<bool>,
65}
66
67/// Captures the initialization of non-defaultable locals.
68struct LocalInits {
69    /// Records if a local is already initialized.
70    local_inits: Vec<bool>,
71    /// When `local_inits` is modified, the relevant `index` is recorded
72    /// here to be undone when control pops.
73    inits: Vec<u32>,
74    /// The index of the first non-defaultable local.
75    ///
76    /// # Note
77    ///
78    /// This is an optimization so that we only have to perform expensive
79    /// look-ups for locals that have a local index equal to or higher than this.
80    first_non_default_local: u32,
81}
82
83impl Default for LocalInits {
84    fn default() -> Self {
85        Self {
86            local_inits: Vec::default(),
87            inits: Vec::default(),
88            first_non_default_local: u32::MAX,
89        }
90    }
91}
92
93impl LocalInits {
94    /// Defines new function local parameters.
95    pub fn define_params(&mut self, count: usize) {
96        let Some(new_len) = self.local_inits.len().checked_add(count) else {
97            panic!("tried to define too many function locals as parameters: {count}");
98        };
99        self.local_inits.resize(new_len, true);
100    }
101
102    /// Defines `count` function locals of type `ty`.
103    pub fn define_locals(&mut self, count: u32, ty: ValType) {
104        let Ok(count) = usize::try_from(count) else {
105            panic!("tried to define too many function locals: {count}");
106        };
107        let len = self.local_inits.len();
108        let Some(new_len) = len.checked_add(count) else {
109            panic!("tried to define too many function locals: {count}");
110        };
111        let is_defaultable = ty.is_defaultable();
112        if !is_defaultable && self.first_non_default_local == u32::MAX {
113            self.first_non_default_local = len as u32;
114        }
115        self.local_inits.resize(new_len, is_defaultable);
116    }
117
118    /// Returns `true` if the local at `local_index` has already been initialized.
119    #[inline]
120    pub fn is_uninit(&self, local_index: u32) -> bool {
121        if local_index < self.first_non_default_local {
122            return false;
123        }
124        !self.local_inits[local_index as usize]
125    }
126
127    /// Marks the local at `local_index` as initialized.
128    #[inline]
129    pub fn set_init(&mut self, local_index: u32) {
130        if self.is_uninit(local_index) {
131            self.local_inits[local_index as usize] = true;
132            self.inits.push(local_index);
133        }
134    }
135
136    /// Registers a new control frame and returns its `height`.
137    pub fn push_ctrl(&mut self) -> usize {
138        self.inits.len()
139    }
140
141    /// Pops a control frame via its `height`.
142    ///
143    /// This uninitializes all locals that have been initialized within it.
144    pub fn pop_ctrl(&mut self, height: usize) {
145        for local_index in self.inits.split_off(height) {
146            self.local_inits[local_index as usize] = false;
147        }
148    }
149
150    /// Clears the [`LocalInits`].
151    ///
152    /// After this operation `self` will be empty and ready for reuse.
153    pub fn clear(&mut self) {
154        self.local_inits.clear();
155        self.inits.clear();
156        self.first_non_default_local = u32::MAX;
157    }
158
159    /// Returns `true` if `self` is empty.
160    pub fn is_empty(&self) -> bool {
161        self.local_inits.is_empty()
162    }
163}
164
165// No science was performed in the creation of this number, feel free to change
166// it if you so like.
167const MAX_LOCALS_TO_TRACK: u32 = 50;
168
169pub(super) struct Locals {
170    // Total number of locals in the function.
171    num_locals: u32,
172
173    // The first MAX_LOCALS_TO_TRACK locals in a function. This is used to
174    // optimize the theoretically common case where most functions don't have
175    // many locals and don't need a full binary search in the entire local space
176    // below.
177    first: Vec<ValType>,
178
179    // This is a "compressed" list of locals for this function. The list of
180    // locals are represented as a list of tuples. The second element is the
181    // type of the local, and the first element is monotonically increasing as
182    // you visit elements of this list. The first element is the maximum index
183    // of the local, after the previous index, of the type specified.
184    //
185    // This allows us to do a binary search on the list for a local's index for
186    // `local.{get,set,tee}`. We do a binary search for the index desired, and
187    // it either lies in a "hole" where the maximum index is specified later,
188    // or it's at the end of the list meaning it's out of bounds.
189    uncached: Vec<(u32, ValType)>,
190}
191
192/// A Wasm control flow block on the control flow stack during Wasm validation.
193//
194// # Dev. Note
195//
196// This structure corresponds to `ctrl_frame` as specified at in the validation
197// appendix of the wasm spec
198#[derive(Debug, Copy, Clone)]
199pub struct Frame {
200    /// Indicator for what kind of instruction pushed this frame.
201    pub kind: FrameKind,
202    /// The type signature of this frame, represented as a singular return type
203    /// or a type index pointing into the module's types.
204    pub block_type: BlockType,
205    /// The index, below which, this frame cannot modify the operand stack.
206    pub height: usize,
207    /// Whether this frame is unreachable so far.
208    pub unreachable: bool,
209    /// The number of initializations in the stack at the time of its creation
210    pub init_height: usize,
211}
212
213struct OperatorValidatorTemp<'validator, 'resources, T> {
214    offset: usize,
215    inner: &'validator mut OperatorValidator,
216    resources: &'resources T,
217}
218
219#[derive(Default)]
220pub struct OperatorValidatorAllocations {
221    popped_types_tmp: Vec<MaybeType>,
222    control: Vec<Frame>,
223    operands: Vec<MaybeType>,
224    local_inits: LocalInits,
225    locals_first: Vec<ValType>,
226    locals_uncached: Vec<(u32, ValType)>,
227}
228
229/// Type storage within the validator.
230///
231/// When managing the operand stack in unreachable code, the validator may not
232/// fully know an operand's type. this unknown state is known as the `bottom`
233/// type in the WebAssembly specification. Validating further instructions may
234/// give us more information; either partial (`PartialRef`) or fully known.
235#[derive(Debug, Copy, Clone)]
236enum MaybeType<T = ValType> {
237    /// The operand has no available type information due to unreachable code.
238    ///
239    /// This state represents "unknown" and corresponds to the `bottom` type in
240    /// the WebAssembly specification. There are no constraints on what this
241    /// type may be and it can match any other type during validation.
242    Bottom,
243    /// The operand is known to be a reference and we may know its abstract
244    /// type.
245    ///
246    /// This state is not fully `Known`, however, because its type can be
247    /// interpreted as either:
248    /// - `shared` or not-`shared`
249    /// -  nullable or not nullable
250    ///
251    /// No further refinements are required for WebAssembly instructions today
252    /// but this may grow in the future.
253    UnknownRef(Option<AbstractHeapType>),
254    /// The operand is known to have type `T`.
255    Known(T),
256}
257
258// The validator is pretty performance-sensitive and `MaybeType` is the main
259// unit of storage, so assert that it doesn't exceed 4 bytes which is the
260// current expected size.
261#[test]
262fn assert_maybe_type_small() {
263    assert!(core::mem::size_of::<MaybeType>() == 4);
264}
265
266impl core::fmt::Display for MaybeType {
267    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
268        match self {
269            MaybeType::Bottom => write!(f, "bot"),
270            MaybeType::UnknownRef(ty) => {
271                write!(f, "(ref shared? ")?;
272                match ty {
273                    Some(ty) => write!(f, "{}bot", ty.as_str(true))?,
274                    None => write!(f, "bot")?,
275                }
276                write!(f, ")")
277            }
278            MaybeType::Known(ty) => core::fmt::Display::fmt(ty, f),
279        }
280    }
281}
282
283impl From<ValType> for MaybeType {
284    fn from(ty: ValType) -> MaybeType {
285        MaybeType::Known(ty)
286    }
287}
288
289impl From<RefType> for MaybeType {
290    fn from(ty: RefType) -> MaybeType {
291        let ty: ValType = ty.into();
292        ty.into()
293    }
294}
295impl From<MaybeType<RefType>> for MaybeType<ValType> {
296    fn from(ty: MaybeType<RefType>) -> MaybeType<ValType> {
297        match ty {
298            MaybeType::Bottom => MaybeType::Bottom,
299            MaybeType::UnknownRef(ty) => MaybeType::UnknownRef(ty),
300            MaybeType::Known(t) => MaybeType::Known(t.into()),
301        }
302    }
303}
304
305impl MaybeType<RefType> {
306    fn as_non_null(&self) -> MaybeType<RefType> {
307        match self {
308            MaybeType::Bottom => MaybeType::Bottom,
309            MaybeType::UnknownRef(ty) => MaybeType::UnknownRef(*ty),
310            MaybeType::Known(ty) => MaybeType::Known(ty.as_non_null()),
311        }
312    }
313
314    fn is_maybe_shared(&self, resources: &impl WasmModuleResources) -> Option<bool> {
315        match self {
316            MaybeType::Bottom => None,
317            MaybeType::UnknownRef(_) => None,
318            MaybeType::Known(ty) => Some(resources.is_shared(*ty)),
319        }
320    }
321}
322
323impl OperatorValidator {
324    fn new(features: &WasmFeatures, allocs: OperatorValidatorAllocations) -> Self {
325        let OperatorValidatorAllocations {
326            popped_types_tmp,
327            control,
328            operands,
329            local_inits,
330            locals_first,
331            locals_uncached,
332        } = allocs;
333        debug_assert!(popped_types_tmp.is_empty());
334        debug_assert!(control.is_empty());
335        debug_assert!(operands.is_empty());
336        debug_assert!(local_inits.is_empty());
337        debug_assert!(local_inits.is_empty());
338        debug_assert!(locals_first.is_empty());
339        debug_assert!(locals_uncached.is_empty());
340        OperatorValidator {
341            locals: Locals {
342                num_locals: 0,
343                first: locals_first,
344                uncached: locals_uncached,
345            },
346            local_inits,
347            features: *features,
348            popped_types_tmp,
349            operands,
350            control,
351            shared: false,
352            #[cfg(debug_assertions)]
353            pop_push_log: vec![],
354        }
355    }
356
357    /// Creates a new operator validator which will be used to validate a
358    /// function whose type is the `ty` index specified.
359    ///
360    /// The `resources` are used to learn about the function type underlying
361    /// `ty`.
362    pub fn new_func<T>(
363        ty: u32,
364        offset: usize,
365        features: &WasmFeatures,
366        resources: &T,
367        allocs: OperatorValidatorAllocations,
368    ) -> Result<Self>
369    where
370        T: WasmModuleResources,
371    {
372        let mut ret = OperatorValidator::new(features, allocs);
373        ret.control.push(Frame {
374            kind: FrameKind::Block,
375            block_type: BlockType::FuncType(ty),
376            height: 0,
377            unreachable: false,
378            init_height: 0,
379        });
380
381        // Retrieve the function's type via index (`ty`); the `offset` is
382        // necessary due to `sub_type_at`'s error messaging.
383        let sub_ty = OperatorValidatorTemp {
384            offset,
385            inner: &mut ret,
386            resources,
387        }
388        .sub_type_at(ty)?;
389
390        // Set up the function's locals.
391        if let CompositeInnerType::Func(func_ty) = &sub_ty.composite_type.inner {
392            for ty in func_ty.params() {
393                ret.locals.define(1, *ty);
394            }
395            ret.local_inits.define_params(func_ty.params().len());
396        } else {
397            bail!(offset, "expected func type at index {ty}, found {sub_ty}")
398        }
399
400        // If we're in a shared function, ensure we do not access unshared
401        // objects.
402        if sub_ty.composite_type.shared {
403            ret.shared = true;
404        }
405        Ok(ret)
406    }
407
408    /// Creates a new operator validator which will be used to validate an
409    /// `init_expr` constant expression which should result in the `ty`
410    /// specified.
411    pub fn new_const_expr(
412        features: &WasmFeatures,
413        ty: ValType,
414        allocs: OperatorValidatorAllocations,
415    ) -> Self {
416        let mut ret = OperatorValidator::new(features, allocs);
417        ret.control.push(Frame {
418            kind: FrameKind::Block,
419            block_type: BlockType::Type(ty),
420            height: 0,
421            unreachable: false,
422            init_height: 0,
423        });
424        ret
425    }
426
427    pub fn define_locals(
428        &mut self,
429        offset: usize,
430        count: u32,
431        mut ty: ValType,
432        resources: &impl WasmModuleResources,
433    ) -> Result<()> {
434        resources.check_value_type(&mut ty, &self.features, offset)?;
435        if count == 0 {
436            return Ok(());
437        }
438        if !self.locals.define(count, ty) {
439            return Err(BinaryReaderError::new(
440                "too many locals: locals exceed maximum",
441                offset,
442            ));
443        }
444        self.local_inits.define_locals(count, ty);
445        Ok(())
446    }
447
448    /// Returns the current operands stack height.
449    pub fn operand_stack_height(&self) -> usize {
450        self.operands.len()
451    }
452
453    /// Returns the optional value type of the value operand at the given
454    /// `depth` from the top of the operand stack.
455    ///
456    /// - Returns `None` if the `depth` is out of bounds.
457    /// - Returns `Some(None)` if there is a value with unknown type
458    /// at the given `depth`.
459    ///
460    /// # Note
461    ///
462    /// A `depth` of 0 will refer to the last operand on the stack.
463    pub fn peek_operand_at(&self, depth: usize) -> Option<Option<ValType>> {
464        Some(match self.operands.iter().rev().nth(depth)? {
465            MaybeType::Known(t) => Some(*t),
466            MaybeType::Bottom | MaybeType::UnknownRef(..) => None,
467        })
468    }
469
470    /// Returns the number of frames on the control flow stack.
471    pub fn control_stack_height(&self) -> usize {
472        self.control.len()
473    }
474
475    /// Validates a relative jump to the `depth` specified.
476    ///
477    /// Returns the type signature of the block that we're jumping to as well
478    /// as the kind of block if the jump is valid. Otherwise returns an error.
479    pub(crate) fn jump(&self, depth: u32) -> Option<(BlockType, FrameKind)> {
480        assert!(!self.control.is_empty());
481        let i = (self.control.len() - 1).checked_sub(depth as usize)?;
482        let frame = &self.control[i];
483        Some((frame.block_type, frame.kind))
484    }
485
486    pub fn get_frame(&self, depth: usize) -> Option<&Frame> {
487        self.control.iter().rev().nth(depth)
488    }
489
490    /// Create a temporary [`OperatorValidatorTemp`] for validation.
491    pub fn with_resources<'a, 'validator, 'resources, T>(
492        &'validator mut self,
493        resources: &'resources T,
494        offset: usize,
495    ) -> impl VisitOperator<'a, Output = Result<()>> + ModuleArity + 'validator
496    where
497        T: WasmModuleResources,
498        'resources: 'validator,
499    {
500        WasmProposalValidator(OperatorValidatorTemp {
501            offset,
502            inner: self,
503            resources,
504        })
505    }
506
507    /// Same as `with_resources` above but guarantees it's able to visit simd
508    /// operators as well.
509    #[cfg(feature = "simd")]
510    pub fn with_resources_simd<'a, 'validator, 'resources, T>(
511        &'validator mut self,
512        resources: &'resources T,
513        offset: usize,
514    ) -> impl VisitSimdOperator<'a, Output = Result<()>> + ModuleArity + 'validator
515    where
516        T: WasmModuleResources,
517        'resources: 'validator,
518    {
519        WasmProposalValidator(OperatorValidatorTemp {
520            offset,
521            inner: self,
522            resources,
523        })
524    }
525
526    pub fn into_allocations(mut self) -> OperatorValidatorAllocations {
527        fn clear<T>(mut tmp: Vec<T>) -> Vec<T> {
528            tmp.clear();
529            tmp
530        }
531        OperatorValidatorAllocations {
532            popped_types_tmp: clear(self.popped_types_tmp),
533            control: clear(self.control),
534            operands: clear(self.operands),
535            local_inits: {
536                self.local_inits.clear();
537                self.local_inits
538            },
539            locals_first: clear(self.locals.first),
540            locals_uncached: clear(self.locals.uncached),
541        }
542    }
543
544    fn record_pop(&mut self) {
545        #[cfg(debug_assertions)]
546        {
547            self.pop_push_log.push(false);
548        }
549    }
550
551    fn record_push(&mut self) {
552        #[cfg(debug_assertions)]
553        {
554            self.pop_push_log.push(true);
555        }
556    }
557}
558
559impl<R> Deref for OperatorValidatorTemp<'_, '_, R> {
560    type Target = OperatorValidator;
561    fn deref(&self) -> &OperatorValidator {
562        self.inner
563    }
564}
565
566impl<R> DerefMut for OperatorValidatorTemp<'_, '_, R> {
567    fn deref_mut(&mut self) -> &mut OperatorValidator {
568        self.inner
569    }
570}
571
572impl<'resources, R> OperatorValidatorTemp<'_, 'resources, R>
573where
574    R: WasmModuleResources,
575{
576    /// Pushes a type onto the operand stack.
577    ///
578    /// This is used by instructions to represent a value that is pushed to the
579    /// operand stack. This can fail, but only if `Type` is feature gated.
580    /// Otherwise the push operation always succeeds.
581    fn push_operand<T>(&mut self, ty: T) -> Result<()>
582    where
583        T: Into<MaybeType>,
584    {
585        let maybe_ty = ty.into();
586
587        if cfg!(debug_assertions) {
588            match maybe_ty {
589                MaybeType::Known(ValType::Ref(r)) => match r.heap_type() {
590                    HeapType::Concrete(index) => {
591                        debug_assert!(
592                            matches!(index, UnpackedIndex::Id(_)),
593                            "only ref types referencing `CoreTypeId`s can \
594                             be pushed to the operand stack"
595                        );
596                    }
597                    _ => {}
598                },
599                _ => {}
600            }
601        }
602
603        self.operands.push(maybe_ty);
604        self.record_push();
605        Ok(())
606    }
607
608    fn push_concrete_ref(&mut self, nullable: bool, type_index: u32) -> Result<()> {
609        let mut heap_ty = HeapType::Concrete(UnpackedIndex::Module(type_index));
610
611        // Canonicalize the module index into an id.
612        self.resources.check_heap_type(&mut heap_ty, self.offset)?;
613        debug_assert!(matches!(heap_ty, HeapType::Concrete(UnpackedIndex::Id(_))));
614
615        let ref_ty = RefType::new(nullable, heap_ty).ok_or_else(|| {
616            format_err!(self.offset, "implementation limit: type index too large")
617        })?;
618
619        self.push_operand(ref_ty)
620    }
621
622    fn pop_concrete_ref(&mut self, nullable: bool, type_index: u32) -> Result<MaybeType> {
623        let mut heap_ty = HeapType::Concrete(UnpackedIndex::Module(type_index));
624
625        // Canonicalize the module index into an id.
626        self.resources.check_heap_type(&mut heap_ty, self.offset)?;
627        debug_assert!(matches!(heap_ty, HeapType::Concrete(UnpackedIndex::Id(_))));
628
629        let ref_ty = RefType::new(nullable, heap_ty).ok_or_else(|| {
630            format_err!(self.offset, "implementation limit: type index too large")
631        })?;
632
633        self.pop_operand(Some(ref_ty.into()))
634    }
635
636    /// Pop the given label types, checking that they are indeed present on the
637    /// stack, and then push them back on again.
638    fn pop_push_label_types(
639        &mut self,
640        label_types: impl PreciseIterator<Item = ValType>,
641    ) -> Result<()> {
642        for ty in label_types.clone().rev() {
643            self.pop_operand(Some(ty))?;
644        }
645        for ty in label_types {
646            self.push_operand(ty)?;
647        }
648        Ok(())
649    }
650
651    /// Attempts to pop a type from the operand stack.
652    ///
653    /// This function is used to remove types from the operand stack. The
654    /// `expected` argument can be used to indicate that a type is required, or
655    /// simply that something is needed to be popped.
656    ///
657    /// If `expected` is `Some(T)` then this will be guaranteed to return
658    /// `T`, and it will only return success if the current block is
659    /// unreachable or if `T` was found at the top of the operand stack.
660    ///
661    /// If `expected` is `None` then it indicates that something must be on the
662    /// operand stack, but it doesn't matter what's on the operand stack. This
663    /// is useful for polymorphic instructions like `select`.
664    ///
665    /// If `Some(T)` is returned then `T` was popped from the operand stack and
666    /// matches `expected`. If `None` is returned then it means that `None` was
667    /// expected and a type was successfully popped, but its exact type is
668    /// indeterminate because the current block is unreachable.
669    fn pop_operand(&mut self, expected: Option<ValType>) -> Result<MaybeType> {
670        // This method is one of the hottest methods in the validator so to
671        // improve codegen this method contains a fast-path success case where
672        // if the top operand on the stack is as expected it's returned
673        // immediately. This is the most common case where the stack will indeed
674        // have the expected type and all we need to do is pop it off.
675        //
676        // Note that this still has to be careful to be correct, though. For
677        // efficiency an operand is unconditionally popped and on success it is
678        // matched against the state of the world to see if we could actually
679        // pop it. If we shouldn't have popped it then it's passed to the slow
680        // path to get pushed back onto the stack.
681        let popped = match self.operands.pop() {
682            Some(MaybeType::Known(actual_ty)) => {
683                if Some(actual_ty) == expected {
684                    if let Some(control) = self.control.last() {
685                        if self.operands.len() >= control.height {
686                            self.record_pop();
687                            return Ok(MaybeType::Known(actual_ty));
688                        }
689                    }
690                }
691                Some(MaybeType::Known(actual_ty))
692            }
693            other => other,
694        };
695
696        self._pop_operand(expected, popped)
697    }
698
699    // This is the "real" implementation of `pop_operand` which is 100%
700    // spec-compliant with little attention paid to efficiency since this is the
701    // slow-path from the actual `pop_operand` function above.
702    #[cold]
703    fn _pop_operand(
704        &mut self,
705        expected: Option<ValType>,
706        popped: Option<MaybeType>,
707    ) -> Result<MaybeType> {
708        self.operands.extend(popped);
709        let control = self.control.last().unwrap();
710        let actual = if self.operands.len() == control.height && control.unreachable {
711            MaybeType::Bottom
712        } else {
713            if self.operands.len() == control.height {
714                let desc = match expected {
715                    Some(ty) => ty_to_str(ty),
716                    None => "a type".into(),
717                };
718                bail!(
719                    self.offset,
720                    "type mismatch: expected {desc} but nothing on stack"
721                )
722            } else {
723                self.operands.pop().unwrap()
724            }
725        };
726        if let Some(expected) = expected {
727            match (actual, expected) {
728                // The bottom type matches all expectations
729                (MaybeType::Bottom, _) => {}
730
731                // The "heap bottom" type only matches other references types,
732                // but not any integer types. Note that if the heap bottom is
733                // known to have a specific abstract heap type then a subtype
734                // check is performed against hte expected type.
735                (MaybeType::UnknownRef(actual_ty), ValType::Ref(expected)) => {
736                    if let Some(actual) = actual_ty {
737                        let expected_shared = self.resources.is_shared(expected);
738                        let actual = RefType::new(
739                            false,
740                            HeapType::Abstract {
741                                shared: expected_shared,
742                                ty: actual,
743                            },
744                        )
745                        .unwrap();
746                        if !self.resources.is_subtype(actual.into(), expected.into()) {
747                            bail!(
748                                self.offset,
749                                "type mismatch: expected {}, found {}",
750                                ty_to_str(expected.into()),
751                                ty_to_str(actual.into())
752                            );
753                        }
754                    }
755                }
756
757                // Use the `is_subtype` predicate to test if a found type matches
758                // the expectation.
759                (MaybeType::Known(actual), expected) => {
760                    if !self.resources.is_subtype(actual, expected) {
761                        bail!(
762                            self.offset,
763                            "type mismatch: expected {}, found {}",
764                            ty_to_str(expected),
765                            ty_to_str(actual)
766                        );
767                    }
768                }
769
770                // A "heap bottom" type cannot match any numeric types.
771                (
772                    MaybeType::UnknownRef(..),
773                    ValType::I32 | ValType::I64 | ValType::F32 | ValType::F64 | ValType::V128,
774                ) => {
775                    bail!(
776                        self.offset,
777                        "type mismatch: expected {}, found heap type",
778                        ty_to_str(expected)
779                    )
780                }
781            }
782        }
783        self.record_pop();
784        Ok(actual)
785    }
786
787    /// Match expected vs. actual operand.
788    fn match_operand(
789        &mut self,
790        actual: ValType,
791        expected: ValType,
792    ) -> Result<(), BinaryReaderError> {
793        self.push_operand(actual)?;
794        self.pop_operand(Some(expected))?;
795        Ok(())
796    }
797
798    /// Match a type sequence to the top of the stack.
799    fn match_stack_operands(
800        &mut self,
801        expected_tys: impl PreciseIterator<Item = ValType> + 'resources,
802    ) -> Result<()> {
803        let mut popped_types_tmp = mem::take(&mut self.popped_types_tmp);
804        debug_assert!(popped_types_tmp.is_empty());
805        popped_types_tmp.reserve(expected_tys.len());
806
807        for expected_ty in expected_tys.rev() {
808            let actual_ty = self.pop_operand(Some(expected_ty))?;
809            popped_types_tmp.push(actual_ty);
810        }
811        for ty in popped_types_tmp.drain(..).rev() {
812            self.push_operand(ty)?;
813        }
814
815        debug_assert!(self.popped_types_tmp.is_empty());
816        self.popped_types_tmp = popped_types_tmp;
817        Ok(())
818    }
819
820    /// Pop a reference type from the operand stack.
821    fn pop_ref(&mut self, expected: Option<RefType>) -> Result<MaybeType<RefType>> {
822        match self.pop_operand(expected.map(|t| t.into()))? {
823            MaybeType::Bottom => Ok(MaybeType::UnknownRef(None)),
824            MaybeType::UnknownRef(ty) => Ok(MaybeType::UnknownRef(ty)),
825            MaybeType::Known(ValType::Ref(rt)) => Ok(MaybeType::Known(rt)),
826            MaybeType::Known(ty) => bail!(
827                self.offset,
828                "type mismatch: expected ref but found {}",
829                ty_to_str(ty)
830            ),
831        }
832    }
833
834    /// Pop a reference type from the operand stack, checking if it is a subtype
835    /// of a nullable type of `expected` or the shared version of `expected`.
836    ///
837    /// This function returns the popped reference type and its `shared`-ness,
838    /// saving extra lookups for concrete types.
839    fn pop_maybe_shared_ref(&mut self, expected: AbstractHeapType) -> Result<MaybeType<RefType>> {
840        let actual = match self.pop_ref(None)? {
841            MaybeType::Bottom => return Ok(MaybeType::Bottom),
842            MaybeType::UnknownRef(None) => return Ok(MaybeType::UnknownRef(None)),
843            MaybeType::UnknownRef(Some(actual)) => {
844                if !actual.is_subtype_of(expected) {
845                    bail!(
846                        self.offset,
847                        "type mismatch: expected subtype of {}, found {}",
848                        expected.as_str(false),
849                        actual.as_str(false),
850                    )
851                }
852                return Ok(MaybeType::UnknownRef(Some(actual)));
853            }
854            MaybeType::Known(ty) => ty,
855        };
856        // Change our expectation based on whether we're dealing with an actual
857        // shared or unshared type.
858        let is_actual_shared = self.resources.is_shared(actual);
859        let expected = RefType::new(
860            true,
861            HeapType::Abstract {
862                shared: is_actual_shared,
863                ty: expected,
864            },
865        )
866        .unwrap();
867
868        // Check (again) that the actual type is a subtype of the expected type.
869        // Note that `_pop_operand` already does this kind of thing but we leave
870        // that for a future refactoring (TODO).
871        if !self.resources.is_subtype(actual.into(), expected.into()) {
872            bail!(
873                self.offset,
874                "type mismatch: expected subtype of {expected}, found {actual}",
875            )
876        }
877        Ok(MaybeType::Known(actual))
878    }
879
880    /// Fetches the type for the local at `idx`, returning an error if it's out
881    /// of bounds.
882    fn local(&self, idx: u32) -> Result<ValType> {
883        match self.locals.get(idx) {
884            Some(ty) => Ok(ty),
885            None => bail!(
886                self.offset,
887                "unknown local {}: local index out of bounds",
888                idx
889            ),
890        }
891    }
892
893    /// Flags the current control frame as unreachable, additionally truncating
894    /// the currently active operand stack.
895    fn unreachable(&mut self) -> Result<()> {
896        let control = self.control.last_mut().unwrap();
897        control.unreachable = true;
898        let new_height = control.height;
899        self.operands.truncate(new_height);
900        Ok(())
901    }
902
903    /// Pushes a new frame onto the control stack.
904    ///
905    /// This operation is used when entering a new block such as an if, loop,
906    /// or block itself. The `kind` of block is specified which indicates how
907    /// breaks interact with this block's type. Additionally the type signature
908    /// of the block is specified by `ty`.
909    fn push_ctrl(&mut self, kind: FrameKind, ty: BlockType) -> Result<()> {
910        // Push a new frame which has a snapshot of the height of the current
911        // operand stack.
912        let height = self.operands.len();
913        let init_height = self.local_inits.push_ctrl();
914        self.control.push(Frame {
915            kind,
916            block_type: ty,
917            height,
918            unreachable: false,
919            init_height,
920        });
921        // All of the parameters are now also available in this control frame,
922        // so we push them here in order.
923        for ty in self.params(ty)? {
924            self.push_operand(ty)?;
925        }
926        Ok(())
927    }
928
929    /// Pops a frame from the control stack.
930    ///
931    /// This function is used when exiting a block and leaves a block scope.
932    /// Internally this will validate that blocks have the correct result type.
933    fn pop_ctrl(&mut self) -> Result<Frame> {
934        // Read the expected type and expected height of the operand stack the
935        // end of the frame.
936        let frame = self.control.last().unwrap();
937        let ty = frame.block_type;
938        let height = frame.height;
939        let init_height = frame.init_height;
940
941        // reset_locals in the spec
942        self.local_inits.pop_ctrl(init_height);
943
944        // Pop all the result types, in reverse order, from the operand stack.
945        // These types will, possibly, be transferred to the next frame.
946        for ty in self.results(ty)?.rev() {
947            self.pop_operand(Some(ty))?;
948        }
949
950        // Make sure that the operand stack has returned to is original
951        // height...
952        if self.operands.len() != height {
953            bail!(
954                self.offset,
955                "type mismatch: values remaining on stack at end of block"
956            );
957        }
958
959        // And then we can remove it!
960        Ok(self.control.pop().unwrap())
961    }
962
963    /// Validates a relative jump to the `depth` specified.
964    ///
965    /// Returns the type signature of the block that we're jumping to as well
966    /// as the kind of block if the jump is valid. Otherwise returns an error.
967    fn jump(&self, depth: u32) -> Result<(BlockType, FrameKind)> {
968        match self.inner.jump(depth) {
969            Some(tup) => Ok(tup),
970            None => bail!(self.offset, "unknown label: branch depth too large"),
971        }
972    }
973
974    /// Validates that `memory_index` is valid in this module, and returns the
975    /// type of address used to index the memory specified.
976    fn check_memory_index(&self, memory_index: u32) -> Result<ValType> {
977        match self.resources.memory_at(memory_index) {
978            Some(mem) => Ok(mem.index_type()),
979            None => bail!(self.offset, "unknown memory {}", memory_index),
980        }
981    }
982
983    /// Validates a `memarg for alignment and such (also the memory it
984    /// references), and returns the type of index used to address the memory.
985    fn check_memarg(&self, memarg: MemArg) -> Result<ValType> {
986        let index_ty = self.check_memory_index(memarg.memory)?;
987        if memarg.align > memarg.max_align {
988            bail!(
989                self.offset,
990                "malformed memop alignment: alignment must not be larger than natural"
991            );
992        }
993        if index_ty == ValType::I32 && memarg.offset > u64::from(u32::MAX) {
994            bail!(self.offset, "offset out of range: must be <= 2**32");
995        }
996        Ok(index_ty)
997    }
998
999    fn check_floats_enabled(&self) -> Result<()> {
1000        if !self.features.floats() {
1001            bail!(self.offset, "floating-point instruction disallowed");
1002        }
1003        Ok(())
1004    }
1005
1006    fn check_shared_memarg(&self, memarg: MemArg) -> Result<ValType> {
1007        if memarg.align != memarg.max_align {
1008            bail!(
1009                self.offset,
1010                "atomic instructions must always specify maximum alignment"
1011            );
1012        }
1013        self.check_memory_index(memarg.memory)
1014    }
1015
1016    /// Validates a block type, primarily with various in-flight proposals.
1017    fn check_block_type(&self, ty: &mut BlockType) -> Result<()> {
1018        match ty {
1019            BlockType::Empty => Ok(()),
1020            BlockType::Type(t) => self
1021                .resources
1022                .check_value_type(t, &self.features, self.offset),
1023            BlockType::FuncType(idx) => {
1024                if !self.features.multi_value() {
1025                    bail!(
1026                        self.offset,
1027                        "blocks, loops, and ifs may only produce a resulttype \
1028                         when multi-value is not enabled",
1029                    );
1030                }
1031                self.func_type_at(*idx)?;
1032                Ok(())
1033            }
1034        }
1035    }
1036
1037    /// Returns the corresponding function type for the `func` item located at
1038    /// `function_index`.
1039    fn type_of_function(&self, function_index: u32) -> Result<&'resources FuncType> {
1040        if let Some(type_index) = self.resources.type_index_of_function(function_index) {
1041            self.func_type_at(type_index)
1042        } else {
1043            bail!(
1044                self.offset,
1045                "unknown function {function_index}: function index out of bounds",
1046            )
1047        }
1048    }
1049
1050    /// Checks a call-style instruction which will be invoking the function `ty`
1051    /// specified.
1052    ///
1053    /// This will pop parameters from the operand stack for the function's
1054    /// parameters and then push the results of the function on the stack.
1055    fn check_call_ty(&mut self, ty: &FuncType) -> Result<()> {
1056        for &ty in ty.params().iter().rev() {
1057            debug_assert_type_indices_are_ids(ty);
1058            self.pop_operand(Some(ty))?;
1059        }
1060        for &ty in ty.results() {
1061            debug_assert_type_indices_are_ids(ty);
1062            self.push_operand(ty)?;
1063        }
1064        Ok(())
1065    }
1066
1067    /// Similar to `check_call_ty` except used for tail-call instructions.
1068    fn check_return_call_ty(&mut self, ty: &FuncType) -> Result<()> {
1069        self.check_func_type_same_results(ty)?;
1070        for &ty in ty.params().iter().rev() {
1071            debug_assert_type_indices_are_ids(ty);
1072            self.pop_operand(Some(ty))?;
1073        }
1074
1075        // Match the results with this function's.
1076        for &ty in ty.results() {
1077            debug_assert_type_indices_are_ids(ty);
1078            self.push_operand(ty)?;
1079        }
1080        self.check_return()?;
1081
1082        Ok(())
1083    }
1084
1085    /// Checks the immediate `type_index` of a `call_ref`-style instruction
1086    /// (also `return_call_ref`).
1087    ///
1088    /// This will validate that the value on the stack is a `(ref type_index)`
1089    /// or a subtype. This will then return the corresponding function type used
1090    /// for this call (to be used with `check_call_ty` or
1091    /// `check_return_call_ty`).
1092    fn check_call_ref_ty(&mut self, type_index: u32) -> Result<&'resources FuncType> {
1093        let unpacked_index = UnpackedIndex::Module(type_index);
1094        let mut hty = HeapType::Concrete(unpacked_index);
1095        self.resources.check_heap_type(&mut hty, self.offset)?;
1096        let expected = RefType::new(true, hty).expect("hty should be previously validated");
1097        self.pop_ref(Some(expected))?;
1098        self.func_type_at(type_index)
1099    }
1100
1101    /// Validates the immediate operands of a `call_indirect` or
1102    /// `return_call_indirect` instruction.
1103    ///
1104    /// This will validate that `table_index` is valid and a funcref table. It
1105    /// will additionally pop the index argument which is used to index into the
1106    /// table.
1107    ///
1108    /// The return value of this function is the function type behind
1109    /// `type_index` which must then be passed to `check_{call,return_call}_ty`.
1110    fn check_call_indirect_ty(
1111        &mut self,
1112        type_index: u32,
1113        table_index: u32,
1114    ) -> Result<&'resources FuncType> {
1115        let tab = self.table_type_at(table_index)?;
1116        if !self
1117            .resources
1118            .is_subtype(ValType::Ref(tab.element_type), ValType::FUNCREF)
1119        {
1120            bail!(
1121                self.offset,
1122                "type mismatch: indirect calls must go through a table with type <= funcref",
1123            );
1124        }
1125        self.pop_operand(Some(tab.index_type()))?;
1126        self.func_type_at(type_index)
1127    }
1128
1129    /// Validates a `return` instruction, popping types from the operand
1130    /// stack that the function needs.
1131    fn check_return(&mut self) -> Result<()> {
1132        assert!(!self.control.is_empty());
1133        for ty in self.results(self.control[0].block_type)?.rev() {
1134            self.pop_operand(Some(ty))?;
1135        }
1136        self.unreachable()?;
1137        Ok(())
1138    }
1139
1140    /// Check that the given type has the same result types as the current
1141    /// function's results.
1142    fn check_func_type_same_results(&self, callee_ty: &FuncType) -> Result<()> {
1143        assert!(!self.control.is_empty());
1144        let caller_rets = self.results(self.control[0].block_type)?;
1145        if callee_ty.results().len() != caller_rets.len()
1146            || !caller_rets
1147                .zip(callee_ty.results())
1148                .all(|(caller_ty, callee_ty)| self.resources.is_subtype(*callee_ty, caller_ty))
1149        {
1150            let caller_rets = self
1151                .results(self.control[0].block_type)?
1152                .map(|ty| format!("{ty}"))
1153                .collect::<Vec<_>>()
1154                .join(" ");
1155            let callee_rets = callee_ty
1156                .results()
1157                .iter()
1158                .map(|ty| format!("{ty}"))
1159                .collect::<Vec<_>>()
1160                .join(" ");
1161            bail!(
1162                self.offset,
1163                "type mismatch: current function requires result type \
1164                 [{caller_rets}] but callee returns [{callee_rets}]"
1165            );
1166        }
1167        Ok(())
1168    }
1169
1170    /// Checks the validity of a common comparison operator.
1171    fn check_cmp_op(&mut self, ty: ValType) -> Result<()> {
1172        self.pop_operand(Some(ty))?;
1173        self.pop_operand(Some(ty))?;
1174        self.push_operand(ValType::I32)?;
1175        Ok(())
1176    }
1177
1178    /// Checks the validity of a common float comparison operator.
1179    fn check_fcmp_op(&mut self, ty: ValType) -> Result<()> {
1180        debug_assert!(matches!(ty, ValType::F32 | ValType::F64));
1181        self.check_floats_enabled()?;
1182        self.check_cmp_op(ty)
1183    }
1184
1185    /// Checks the validity of a common unary operator.
1186    fn check_unary_op(&mut self, ty: ValType) -> Result<()> {
1187        self.pop_operand(Some(ty))?;
1188        self.push_operand(ty)?;
1189        Ok(())
1190    }
1191
1192    /// Checks the validity of a common unary float operator.
1193    fn check_funary_op(&mut self, ty: ValType) -> Result<()> {
1194        debug_assert!(matches!(ty, ValType::F32 | ValType::F64));
1195        self.check_floats_enabled()?;
1196        self.check_unary_op(ty)
1197    }
1198
1199    /// Checks the validity of a common conversion operator.
1200    fn check_conversion_op(&mut self, into: ValType, from: ValType) -> Result<()> {
1201        self.pop_operand(Some(from))?;
1202        self.push_operand(into)?;
1203        Ok(())
1204    }
1205
1206    /// Checks the validity of a common float conversion operator.
1207    fn check_fconversion_op(&mut self, into: ValType, from: ValType) -> Result<()> {
1208        debug_assert!(matches!(into, ValType::F32 | ValType::F64));
1209        self.check_floats_enabled()?;
1210        self.check_conversion_op(into, from)
1211    }
1212
1213    /// Checks the validity of a common binary operator.
1214    fn check_binary_op(&mut self, ty: ValType) -> Result<()> {
1215        self.pop_operand(Some(ty))?;
1216        self.pop_operand(Some(ty))?;
1217        self.push_operand(ty)?;
1218        Ok(())
1219    }
1220
1221    /// Checks the validity of a common binary float operator.
1222    fn check_fbinary_op(&mut self, ty: ValType) -> Result<()> {
1223        debug_assert!(matches!(ty, ValType::F32 | ValType::F64));
1224        self.check_floats_enabled()?;
1225        self.check_binary_op(ty)
1226    }
1227
1228    /// Checks the validity of an atomic load operator.
1229    fn check_atomic_load(&mut self, memarg: MemArg, load_ty: ValType) -> Result<()> {
1230        let ty = self.check_shared_memarg(memarg)?;
1231        self.pop_operand(Some(ty))?;
1232        self.push_operand(load_ty)?;
1233        Ok(())
1234    }
1235
1236    /// Checks the validity of an atomic store operator.
1237    fn check_atomic_store(&mut self, memarg: MemArg, store_ty: ValType) -> Result<()> {
1238        let ty = self.check_shared_memarg(memarg)?;
1239        self.pop_operand(Some(store_ty))?;
1240        self.pop_operand(Some(ty))?;
1241        Ok(())
1242    }
1243
1244    /// Checks the validity of atomic binary operator on memory.
1245    fn check_atomic_binary_memory_op(&mut self, memarg: MemArg, op_ty: ValType) -> Result<()> {
1246        let ty = self.check_shared_memarg(memarg)?;
1247        self.pop_operand(Some(op_ty))?;
1248        self.pop_operand(Some(ty))?;
1249        self.push_operand(op_ty)?;
1250        Ok(())
1251    }
1252
1253    /// Checks the validity of an atomic compare exchange operator on memories.
1254    fn check_atomic_binary_memory_cmpxchg(&mut self, memarg: MemArg, op_ty: ValType) -> Result<()> {
1255        let ty = self.check_shared_memarg(memarg)?;
1256        self.pop_operand(Some(op_ty))?;
1257        self.pop_operand(Some(op_ty))?;
1258        self.pop_operand(Some(ty))?;
1259        self.push_operand(op_ty)?;
1260        Ok(())
1261    }
1262
1263    /// Common helper for `ref.test` and `ref.cast` downcasting/checking
1264    /// instructions. Returns the given `heap_type` as a `ValType`.
1265    fn check_downcast(&mut self, nullable: bool, mut heap_type: HeapType) -> Result<RefType> {
1266        self.resources
1267            .check_heap_type(&mut heap_type, self.offset)?;
1268
1269        let sub_ty = RefType::new(nullable, heap_type).ok_or_else(|| {
1270            BinaryReaderError::new("implementation limit: type index too large", self.offset)
1271        })?;
1272        let sup_ty = RefType::new(true, self.resources.top_type(&heap_type))
1273            .expect("can't panic with non-concrete heap types");
1274
1275        self.pop_ref(Some(sup_ty))?;
1276        Ok(sub_ty)
1277    }
1278
1279    /// Common helper for both nullable and non-nullable variants of `ref.test`
1280    /// instructions.
1281    fn check_ref_test(&mut self, nullable: bool, heap_type: HeapType) -> Result<()> {
1282        self.check_downcast(nullable, heap_type)?;
1283        self.push_operand(ValType::I32)
1284    }
1285
1286    /// Common helper for both nullable and non-nullable variants of `ref.cast`
1287    /// instructions.
1288    fn check_ref_cast(&mut self, nullable: bool, heap_type: HeapType) -> Result<()> {
1289        let sub_ty = self.check_downcast(nullable, heap_type)?;
1290        self.push_operand(sub_ty)
1291    }
1292
1293    /// Common helper for checking the types of globals accessed with atomic RMW
1294    /// instructions, which only allow `i32` and `i64`.
1295    fn check_atomic_global_rmw_ty(&self, global_index: u32) -> Result<ValType> {
1296        let ty = self.global_type_at(global_index)?.content_type;
1297        if !(ty == ValType::I32 || ty == ValType::I64) {
1298            bail!(
1299                self.offset,
1300                "invalid type: `global.atomic.rmw.*` only allows `i32` and `i64`"
1301            );
1302        }
1303        Ok(ty)
1304    }
1305
1306    /// Common helper for checking the types of structs accessed with atomic RMW
1307    /// instructions, which only allow `i32` and `i64` types.
1308    fn check_struct_atomic_rmw(
1309        &mut self,
1310        op: &'static str,
1311        struct_type_index: u32,
1312        field_index: u32,
1313    ) -> Result<()> {
1314        let field = self.mutable_struct_field_at(struct_type_index, field_index)?;
1315        let field_ty = match field.element_type {
1316            StorageType::Val(ValType::I32) => ValType::I32,
1317            StorageType::Val(ValType::I64) => ValType::I64,
1318            _ => bail!(
1319                self.offset,
1320                "invalid type: `struct.atomic.rmw.{}` only allows `i32` and `i64`",
1321                op
1322            ),
1323        };
1324        self.pop_operand(Some(field_ty))?;
1325        self.pop_concrete_ref(true, struct_type_index)?;
1326        self.push_operand(field_ty)?;
1327        Ok(())
1328    }
1329
1330    /// Common helper for checking the types of arrays accessed with atomic RMW
1331    /// instructions, which only allow `i32` and `i64`.
1332    fn check_array_atomic_rmw(&mut self, op: &'static str, type_index: u32) -> Result<()> {
1333        let field = self.mutable_array_type_at(type_index)?;
1334        let elem_ty = match field.element_type {
1335            StorageType::Val(ValType::I32) => ValType::I32,
1336            StorageType::Val(ValType::I64) => ValType::I64,
1337            _ => bail!(
1338                self.offset,
1339                "invalid type: `array.atomic.rmw.{}` only allows `i32` and `i64`",
1340                op
1341            ),
1342        };
1343        self.pop_operand(Some(elem_ty))?;
1344        self.pop_operand(Some(ValType::I32))?;
1345        self.pop_concrete_ref(true, type_index)?;
1346        self.push_operand(elem_ty)?;
1347        Ok(())
1348    }
1349
1350    fn element_type_at(&self, elem_index: u32) -> Result<RefType> {
1351        match self.resources.element_type_at(elem_index) {
1352            Some(ty) => Ok(ty),
1353            None => bail!(
1354                self.offset,
1355                "unknown elem segment {}: segment index out of bounds",
1356                elem_index
1357            ),
1358        }
1359    }
1360
1361    fn sub_type_at(&self, at: u32) -> Result<&'resources SubType> {
1362        self.resources
1363            .sub_type_at(at)
1364            .ok_or_else(|| format_err!(self.offset, "unknown type: type index out of bounds"))
1365    }
1366
1367    fn struct_type_at(&self, at: u32) -> Result<&'resources StructType> {
1368        let sub_ty = self.sub_type_at(at)?;
1369        if let CompositeInnerType::Struct(struct_ty) = &sub_ty.composite_type.inner {
1370            if self.inner.shared && !sub_ty.composite_type.shared {
1371                bail!(
1372                    self.offset,
1373                    "shared functions cannot access unshared structs",
1374                );
1375            }
1376            Ok(struct_ty)
1377        } else {
1378            bail!(
1379                self.offset,
1380                "expected struct type at index {at}, found {sub_ty}"
1381            )
1382        }
1383    }
1384
1385    fn struct_field_at(&self, struct_type_index: u32, field_index: u32) -> Result<FieldType> {
1386        let field_index = usize::try_from(field_index).map_err(|_| {
1387            BinaryReaderError::new("unknown field: field index out of bounds", self.offset)
1388        })?;
1389        self.struct_type_at(struct_type_index)?
1390            .fields
1391            .get(field_index)
1392            .copied()
1393            .ok_or_else(|| {
1394                BinaryReaderError::new("unknown field: field index out of bounds", self.offset)
1395            })
1396    }
1397
1398    fn mutable_struct_field_at(
1399        &self,
1400        struct_type_index: u32,
1401        field_index: u32,
1402    ) -> Result<FieldType> {
1403        let field = self.struct_field_at(struct_type_index, field_index)?;
1404        if !field.mutable {
1405            bail!(
1406                self.offset,
1407                "invalid struct modification: struct field is immutable"
1408            )
1409        }
1410        Ok(field)
1411    }
1412
1413    fn array_type_at(&self, at: u32) -> Result<FieldType> {
1414        let sub_ty = self.sub_type_at(at)?;
1415        if let CompositeInnerType::Array(array_ty) = &sub_ty.composite_type.inner {
1416            if self.inner.shared && !sub_ty.composite_type.shared {
1417                bail!(
1418                    self.offset,
1419                    "shared functions cannot access unshared arrays",
1420                );
1421            }
1422            Ok(array_ty.0)
1423        } else {
1424            bail!(
1425                self.offset,
1426                "expected array type at index {at}, found {sub_ty}"
1427            )
1428        }
1429    }
1430
1431    fn mutable_array_type_at(&self, at: u32) -> Result<FieldType> {
1432        let field = self.array_type_at(at)?;
1433        if !field.mutable {
1434            bail!(
1435                self.offset,
1436                "invalid array modification: array is immutable"
1437            )
1438        }
1439        Ok(field)
1440    }
1441
1442    fn func_type_at(&self, at: u32) -> Result<&'resources FuncType> {
1443        let sub_ty = self.sub_type_at(at)?;
1444        if let CompositeInnerType::Func(func_ty) = &sub_ty.composite_type.inner {
1445            if self.inner.shared && !sub_ty.composite_type.shared {
1446                bail!(
1447                    self.offset,
1448                    "shared functions cannot access unshared functions",
1449                );
1450            }
1451            Ok(func_ty)
1452        } else {
1453            bail!(
1454                self.offset,
1455                "expected func type at index {at}, found {sub_ty}"
1456            )
1457        }
1458    }
1459
1460    fn cont_type_at(&self, at: u32) -> Result<&ContType> {
1461        let sub_ty = self.sub_type_at(at)?;
1462        if let CompositeInnerType::Cont(cont_ty) = &sub_ty.composite_type.inner {
1463            if self.inner.shared && !sub_ty.composite_type.shared {
1464                bail!(
1465                    self.offset,
1466                    "shared continuations cannot access unshared continuations",
1467                );
1468            }
1469            Ok(cont_ty)
1470        } else {
1471            bail!(self.offset, "non-continuation type {at}",)
1472        }
1473    }
1474
1475    fn func_type_of_cont_type(&self, cont_ty: &ContType) -> &'resources FuncType {
1476        let func_id = cont_ty.0.as_core_type_id().expect("valid core type id");
1477        self.resources.sub_type_at_id(func_id).unwrap_func()
1478    }
1479
1480    fn tag_at(&self, at: u32) -> Result<&'resources FuncType> {
1481        self.resources
1482            .tag_at(at)
1483            .ok_or_else(|| format_err!(self.offset, "unknown tag {}: tag index out of bounds", at))
1484    }
1485
1486    // Similar to `tag_at`, but checks that the result type is
1487    // empty. This is necessary when enabling the stack switching
1488    // feature as it allows non-empty result types on tags.
1489    fn exception_tag_at(&self, at: u32) -> Result<&'resources FuncType> {
1490        let func_ty = self.tag_at(at)?;
1491        if func_ty.results().len() != 0 {
1492            bail!(
1493                self.offset,
1494                "invalid exception type: non-empty tag result type"
1495            );
1496        }
1497        Ok(func_ty)
1498    }
1499
1500    fn global_type_at(&self, at: u32) -> Result<GlobalType> {
1501        if let Some(ty) = self.resources.global_at(at) {
1502            if self.inner.shared && !ty.shared {
1503                bail!(
1504                    self.offset,
1505                    "shared functions cannot access unshared globals",
1506                );
1507            }
1508            Ok(ty)
1509        } else {
1510            bail!(self.offset, "unknown global: global index out of bounds");
1511        }
1512    }
1513
1514    /// Validates that the `table` is valid and returns the type it points to.
1515    fn table_type_at(&self, table: u32) -> Result<TableType> {
1516        match self.resources.table_at(table) {
1517            Some(ty) => {
1518                if self.inner.shared && !ty.shared {
1519                    bail!(
1520                        self.offset,
1521                        "shared functions cannot access unshared tables",
1522                    );
1523                }
1524                Ok(ty)
1525            }
1526            None => bail!(
1527                self.offset,
1528                "unknown table {table}: table index out of bounds"
1529            ),
1530        }
1531    }
1532
1533    fn params(&self, ty: BlockType) -> Result<impl PreciseIterator<Item = ValType> + 'resources> {
1534        Ok(match ty {
1535            BlockType::Empty | BlockType::Type(_) => Either::B(None.into_iter()),
1536            BlockType::FuncType(t) => Either::A(self.func_type_at(t)?.params().iter().copied()),
1537        })
1538    }
1539
1540    fn results(&self, ty: BlockType) -> Result<impl PreciseIterator<Item = ValType> + 'resources> {
1541        Ok(match ty {
1542            BlockType::Empty => Either::B(None.into_iter()),
1543            BlockType::Type(t) => Either::B(Some(t).into_iter()),
1544            BlockType::FuncType(t) => Either::A(self.func_type_at(t)?.results().iter().copied()),
1545        })
1546    }
1547
1548    fn label_types(
1549        &self,
1550        ty: BlockType,
1551        kind: FrameKind,
1552    ) -> Result<impl PreciseIterator<Item = ValType> + 'resources> {
1553        Ok(match kind {
1554            FrameKind::Loop => Either::A(self.params(ty)?),
1555            _ => Either::B(self.results(ty)?),
1556        })
1557    }
1558
1559    fn check_data_segment(&self, data_index: u32) -> Result<()> {
1560        match self.resources.data_count() {
1561            None => bail!(self.offset, "data count section required"),
1562            Some(count) if data_index < count => Ok(()),
1563            Some(_) => bail!(self.offset, "unknown data segment {data_index}"),
1564        }
1565    }
1566
1567    fn check_resume_table(
1568        &mut self,
1569        table: ResumeTable,
1570        type_index: u32, // The type index annotation on the `resume` instruction, which `table` appears on.
1571    ) -> Result<&'resources FuncType> {
1572        let cont_ty = self.cont_type_at(type_index)?;
1573        // ts1 -> ts2
1574        let old_func_ty = self.func_type_of_cont_type(cont_ty);
1575        for handle in table.handlers {
1576            match handle {
1577                Handle::OnLabel { tag, label } => {
1578                    // ts1' -> ts2'
1579                    let tag_ty = self.tag_at(tag)?;
1580                    // ts1'' (ref (cont $ft))
1581                    let block = self.jump(label)?;
1582                    // Pop the continuation reference.
1583                    match self.label_types(block.0, block.1)?.last() {
1584                        Some(ValType::Ref(rt)) if rt.is_concrete_type_ref() => {
1585                            let sub_ty = self.resources.sub_type_at_id(
1586                                rt.type_index()
1587                                    .unwrap()
1588                                    .as_core_type_id()
1589                                    .expect("canonicalized index"),
1590                            );
1591                            let new_cont = if let CompositeInnerType::Cont(cont) =
1592                                &sub_ty.composite_type.inner
1593                            {
1594                                cont
1595                            } else {
1596                                bail!(self.offset, "non-continuation type");
1597                            };
1598                            let new_func_ty = self.func_type_of_cont_type(&new_cont);
1599                            // Check that (ts2' -> ts2) <: $ft
1600                            if new_func_ty.params().len() != tag_ty.results().len()
1601                                || !self.is_subtype_many(new_func_ty.params(), tag_ty.results())
1602                                || old_func_ty.results().len() != new_func_ty.results().len()
1603                                || !self
1604                                    .is_subtype_many(old_func_ty.results(), new_func_ty.results())
1605                            {
1606                                bail!(self.offset, "type mismatch in continuation type")
1607                            }
1608                            let expected_nargs = tag_ty.params().len() + 1;
1609                            let actual_nargs = self.label_types(block.0, block.1)?.len();
1610                            if actual_nargs != expected_nargs {
1611                                bail!(
1612                                    self.offset,
1613                                    "type mismatch: expected {expected_nargs} label result(s), but label is annotated with {actual_nargs} results"
1614                                )
1615                            }
1616
1617                            let labeltys =
1618                                self.label_types(block.0, block.1)?.take(expected_nargs - 1);
1619
1620                            // Check that ts1'' <: ts1'.
1621                            for (tagty, &lblty) in labeltys.zip(tag_ty.params()) {
1622                                if !self.resources.is_subtype(lblty, tagty) {
1623                                    bail!(
1624                                        self.offset,
1625                                        "type mismatch between tag type and label type"
1626                                    )
1627                                }
1628                            }
1629                        }
1630                        Some(ty) => {
1631                            bail!(self.offset, "type mismatch: {}", ty_to_str(ty))
1632                        }
1633                        _ => bail!(
1634                            self.offset,
1635                            "type mismatch: instruction requires continuation reference type but label has none"
1636                        ),
1637                    }
1638                }
1639                Handle::OnSwitch { tag } => {
1640                    let tag_ty = self.tag_at(tag)?;
1641                    if tag_ty.params().len() != 0 {
1642                        bail!(self.offset, "type mismatch: non-empty tag parameter type")
1643                    }
1644                }
1645            }
1646        }
1647        Ok(old_func_ty)
1648    }
1649
1650    /// Applies `is_subtype` pointwise two equally sized collections
1651    /// (i.e. equally sized after skipped elements).
1652    fn is_subtype_many(&mut self, ts1: &[ValType], ts2: &[ValType]) -> bool {
1653        debug_assert!(ts1.len() == ts2.len());
1654        ts1.iter()
1655            .zip(ts2.iter())
1656            .all(|(ty1, ty2)| self.resources.is_subtype(*ty1, *ty2))
1657    }
1658
1659    fn check_binop128(&mut self) -> Result<()> {
1660        self.pop_operand(Some(ValType::I64))?;
1661        self.pop_operand(Some(ValType::I64))?;
1662        self.pop_operand(Some(ValType::I64))?;
1663        self.pop_operand(Some(ValType::I64))?;
1664        self.push_operand(ValType::I64)?;
1665        self.push_operand(ValType::I64)?;
1666        Ok(())
1667    }
1668
1669    fn check_i64_mul_wide(&mut self) -> Result<()> {
1670        self.pop_operand(Some(ValType::I64))?;
1671        self.pop_operand(Some(ValType::I64))?;
1672        self.push_operand(ValType::I64)?;
1673        self.push_operand(ValType::I64)?;
1674        Ok(())
1675    }
1676
1677    fn check_enabled(&self, flag: bool, desc: &str) -> Result<()> {
1678        if flag {
1679            return Ok(());
1680        }
1681        bail!(self.offset, "{desc} support is not enabled");
1682    }
1683}
1684
1685pub fn ty_to_str(ty: ValType) -> &'static str {
1686    match ty {
1687        ValType::I32 => "i32",
1688        ValType::I64 => "i64",
1689        ValType::F32 => "f32",
1690        ValType::F64 => "f64",
1691        ValType::V128 => "v128",
1692        ValType::Ref(r) => r.wat(),
1693    }
1694}
1695
1696/// A wrapper "visitor" around the real operator validator internally which
1697/// exists to check that the required wasm feature is enabled to proceed with
1698/// validation.
1699///
1700/// This validator is macro-generated to ensure that the proposal listed in this
1701/// crate's macro matches the one that's validated here. Each instruction's
1702/// visit method validates the specified proposal is enabled and then delegates
1703/// to `OperatorValidatorTemp` to perform the actual opcode validation.
1704struct WasmProposalValidator<'validator, 'resources, T>(
1705    OperatorValidatorTemp<'validator, 'resources, T>,
1706);
1707
1708#[cfg_attr(not(feature = "simd"), allow(unused_macro_rules))]
1709macro_rules! validate_proposal {
1710    ($( @$proposal:ident $op:ident $({ $($arg:ident: $argty:ty),* })? => $visit:ident ($($ann:tt)*))*) => {
1711        $(
1712            fn $visit(&mut self $($(,$arg: $argty)*)?) -> Result<()> {
1713                validate_proposal!(validate self $proposal / $op);
1714                self.0.$visit($( $($arg),* )?)
1715            }
1716        )*
1717    };
1718
1719    (validate self mvp / $op:ident) => {};
1720
1721    // These opcodes are handled specially below as they were introduced in the
1722    // bulk memory proposal but are gated by the `bulk_memory_opt`
1723    // "sub-proposal".
1724    (validate self $proposal:ident / MemoryFill) => {};
1725    (validate self $proposal:ident / MemoryCopy) => {};
1726
1727    (validate $self:ident $proposal:ident / $op:ident) => {
1728        $self.0.check_enabled($self.0.features.$proposal(), validate_proposal!(desc $proposal))?
1729    };
1730
1731    (desc simd) => ("SIMD");
1732    (desc relaxed_simd) => ("relaxed SIMD");
1733    (desc threads) => ("threads");
1734    (desc shared_everything_threads) => ("shared-everything-threads");
1735    (desc saturating_float_to_int) => ("saturating float to int conversions");
1736    (desc reference_types) => ("reference types");
1737    (desc bulk_memory) => ("bulk memory");
1738    (desc sign_extension) => ("sign extension operations");
1739    (desc exceptions) => ("exceptions");
1740    (desc tail_call) => ("tail calls");
1741    (desc function_references) => ("function references");
1742    (desc memory_control) => ("memory control");
1743    (desc gc) => ("gc");
1744    (desc legacy_exceptions) => ("legacy exceptions");
1745    (desc stack_switching) => ("stack switching");
1746    (desc wide_arithmetic) => ("wide arithmetic");
1747}
1748
1749impl<'a, T> VisitOperator<'a> for WasmProposalValidator<'_, '_, T>
1750where
1751    T: WasmModuleResources,
1752{
1753    type Output = Result<()>;
1754
1755    #[cfg(feature = "simd")]
1756    fn simd_visitor(&mut self) -> Option<&mut dyn VisitSimdOperator<'a, Output = Self::Output>> {
1757        Some(self)
1758    }
1759
1760    crate::for_each_visit_operator!(validate_proposal);
1761}
1762
1763#[cfg(feature = "simd")]
1764impl<'a, T> VisitSimdOperator<'a> for WasmProposalValidator<'_, '_, T>
1765where
1766    T: WasmModuleResources,
1767{
1768    crate::for_each_visit_simd_operator!(validate_proposal);
1769}
1770
1771#[track_caller]
1772#[inline]
1773fn debug_assert_type_indices_are_ids(ty: ValType) {
1774    if cfg!(debug_assertions) {
1775        if let ValType::Ref(r) = ty {
1776            if let HeapType::Concrete(idx) = r.heap_type() {
1777                debug_assert!(
1778                    matches!(idx, UnpackedIndex::Id(_)),
1779                    "type reference should be a `CoreTypeId`, found {idx:?}"
1780                );
1781            }
1782        }
1783    }
1784}
1785
1786impl<'a, T> VisitOperator<'a> for OperatorValidatorTemp<'_, '_, T>
1787where
1788    T: WasmModuleResources,
1789{
1790    type Output = Result<()>;
1791
1792    #[cfg(feature = "simd")]
1793    fn simd_visitor(&mut self) -> Option<&mut dyn VisitSimdOperator<'a, Output = Self::Output>> {
1794        Some(self)
1795    }
1796
1797    fn visit_nop(&mut self) -> Self::Output {
1798        Ok(())
1799    }
1800    fn visit_unreachable(&mut self) -> Self::Output {
1801        self.unreachable()?;
1802        Ok(())
1803    }
1804    fn visit_block(&mut self, mut ty: BlockType) -> Self::Output {
1805        self.check_block_type(&mut ty)?;
1806        for ty in self.params(ty)?.rev() {
1807            self.pop_operand(Some(ty))?;
1808        }
1809        self.push_ctrl(FrameKind::Block, ty)?;
1810        Ok(())
1811    }
1812    fn visit_loop(&mut self, mut ty: BlockType) -> Self::Output {
1813        self.check_block_type(&mut ty)?;
1814        for ty in self.params(ty)?.rev() {
1815            self.pop_operand(Some(ty))?;
1816        }
1817        self.push_ctrl(FrameKind::Loop, ty)?;
1818        Ok(())
1819    }
1820    fn visit_if(&mut self, mut ty: BlockType) -> Self::Output {
1821        self.check_block_type(&mut ty)?;
1822        self.pop_operand(Some(ValType::I32))?;
1823        for ty in self.params(ty)?.rev() {
1824            self.pop_operand(Some(ty))?;
1825        }
1826        self.push_ctrl(FrameKind::If, ty)?;
1827        Ok(())
1828    }
1829    fn visit_else(&mut self) -> Self::Output {
1830        let frame = self.pop_ctrl()?;
1831        if frame.kind != FrameKind::If {
1832            bail!(self.offset, "else found outside of an `if` block");
1833        }
1834        self.push_ctrl(FrameKind::Else, frame.block_type)?;
1835        Ok(())
1836    }
1837    fn visit_try_table(&mut self, mut ty: TryTable) -> Self::Output {
1838        self.check_block_type(&mut ty.ty)?;
1839        for ty in self.params(ty.ty)?.rev() {
1840            self.pop_operand(Some(ty))?;
1841        }
1842        let exn_type = ValType::from(RefType::EXN);
1843        for catch in ty.catches {
1844            match catch {
1845                Catch::One { tag, label } => {
1846                    let tag = self.exception_tag_at(tag)?;
1847                    let (ty, kind) = self.jump(label)?;
1848                    let params = tag.params();
1849                    let types = self.label_types(ty, kind)?;
1850                    if params.len() != types.len() {
1851                        bail!(
1852                            self.offset,
1853                            "type mismatch: catch label must have same number of types as tag"
1854                        );
1855                    }
1856                    for (expected, actual) in types.zip(params) {
1857                        self.match_operand(*actual, expected)?;
1858                    }
1859                }
1860                Catch::OneRef { tag, label } => {
1861                    let tag = self.exception_tag_at(tag)?;
1862                    let (ty, kind) = self.jump(label)?;
1863                    let tag_params = tag.params().iter().copied();
1864                    let label_types = self.label_types(ty, kind)?;
1865                    if tag_params.len() + 1 != label_types.len() {
1866                        bail!(
1867                            self.offset,
1868                            "type mismatch: catch_ref label must have one \
1869                             more type than tag types",
1870                        );
1871                    }
1872                    for (expected_label_type, actual_tag_param) in
1873                        label_types.zip(tag_params.chain([exn_type]))
1874                    {
1875                        self.match_operand(actual_tag_param, expected_label_type)?;
1876                    }
1877                }
1878
1879                Catch::All { label } => {
1880                    let (ty, kind) = self.jump(label)?;
1881                    if self.label_types(ty, kind)?.len() != 0 {
1882                        bail!(
1883                            self.offset,
1884                            "type mismatch: catch_all label must have no result types"
1885                        );
1886                    }
1887                }
1888
1889                Catch::AllRef { label } => {
1890                    let (ty, kind) = self.jump(label)?;
1891                    let mut types = self.label_types(ty, kind)?;
1892                    let ty = match (types.next(), types.next()) {
1893                        (Some(ty), None) => ty,
1894                        _ => {
1895                            bail!(
1896                                self.offset,
1897                                "type mismatch: catch_all_ref label must have \
1898                                 exactly one result type"
1899                            );
1900                        }
1901                    };
1902                    if !self.resources.is_subtype(exn_type, ty) {
1903                        bail!(
1904                            self.offset,
1905                            "type mismatch: catch_all_ref label must a \
1906                             subtype of (ref exn)"
1907                        );
1908                    }
1909                }
1910            }
1911        }
1912        self.push_ctrl(FrameKind::TryTable, ty.ty)?;
1913        Ok(())
1914    }
1915    fn visit_throw(&mut self, index: u32) -> Self::Output {
1916        // Check values associated with the exception.
1917        let ty = self.exception_tag_at(index)?;
1918        for ty in ty.clone().params().iter().rev() {
1919            self.pop_operand(Some(*ty))?;
1920        }
1921        // this should be validated when the tag was defined in the module
1922        debug_assert!(ty.results().is_empty());
1923        self.unreachable()?;
1924        Ok(())
1925    }
1926    fn visit_throw_ref(&mut self) -> Self::Output {
1927        self.pop_operand(Some(ValType::EXNREF))?;
1928        self.unreachable()?;
1929        Ok(())
1930    }
1931    fn visit_end(&mut self) -> Self::Output {
1932        let mut frame = self.pop_ctrl()?;
1933
1934        // Note that this `if` isn't included in the appendix right
1935        // now, but it's used to allow for `if` statements that are
1936        // missing an `else` block which have the same parameter/return
1937        // types on the block (since that's valid).
1938        if frame.kind == FrameKind::If {
1939            self.push_ctrl(FrameKind::Else, frame.block_type)?;
1940            frame = self.pop_ctrl()?;
1941        }
1942        for ty in self.results(frame.block_type)? {
1943            self.push_operand(ty)?;
1944        }
1945        if self.control.is_empty() {
1946            assert_ne!(self.offset, 0);
1947        }
1948        Ok(())
1949    }
1950    fn visit_br(&mut self, relative_depth: u32) -> Self::Output {
1951        let (ty, kind) = self.jump(relative_depth)?;
1952        for ty in self.label_types(ty, kind)?.rev() {
1953            self.pop_operand(Some(ty))?;
1954        }
1955        self.unreachable()?;
1956        Ok(())
1957    }
1958    fn visit_br_if(&mut self, relative_depth: u32) -> Self::Output {
1959        self.pop_operand(Some(ValType::I32))?;
1960        let (ty, kind) = self.jump(relative_depth)?;
1961        let label_types = self.label_types(ty, kind)?;
1962        self.pop_push_label_types(label_types)?;
1963        Ok(())
1964    }
1965    fn visit_br_table(&mut self, table: BrTable) -> Self::Output {
1966        self.pop_operand(Some(ValType::I32))?;
1967        let default = self.jump(table.default())?;
1968        let default_types = self.label_types(default.0, default.1)?;
1969        for element in table.targets() {
1970            let relative_depth = element?;
1971            let block = self.jump(relative_depth)?;
1972            let label_tys = self.label_types(block.0, block.1)?;
1973            if label_tys.len() != default_types.len() {
1974                bail!(
1975                    self.offset,
1976                    "type mismatch: br_table target labels have different number of types"
1977                );
1978            }
1979            self.match_stack_operands(label_tys)?;
1980        }
1981        for ty in default_types.rev() {
1982            self.pop_operand(Some(ty))?;
1983        }
1984        self.unreachable()?;
1985        Ok(())
1986    }
1987    fn visit_return(&mut self) -> Self::Output {
1988        self.check_return()?;
1989        Ok(())
1990    }
1991    fn visit_call(&mut self, function_index: u32) -> Self::Output {
1992        let ty = self.type_of_function(function_index)?;
1993        self.check_call_ty(ty)?;
1994        Ok(())
1995    }
1996    fn visit_return_call(&mut self, function_index: u32) -> Self::Output {
1997        let ty = self.type_of_function(function_index)?;
1998        self.check_return_call_ty(ty)?;
1999        Ok(())
2000    }
2001    fn visit_call_ref(&mut self, type_index: u32) -> Self::Output {
2002        let ty = self.check_call_ref_ty(type_index)?;
2003        self.check_call_ty(ty)?;
2004        Ok(())
2005    }
2006    fn visit_return_call_ref(&mut self, type_index: u32) -> Self::Output {
2007        let ty = self.check_call_ref_ty(type_index)?;
2008        self.check_return_call_ty(ty)?;
2009        Ok(())
2010    }
2011    fn visit_call_indirect(&mut self, type_index: u32, table_index: u32) -> Self::Output {
2012        let ty = self.check_call_indirect_ty(type_index, table_index)?;
2013        self.check_call_ty(ty)?;
2014        Ok(())
2015    }
2016    fn visit_return_call_indirect(&mut self, type_index: u32, table_index: u32) -> Self::Output {
2017        let ty = self.check_call_indirect_ty(type_index, table_index)?;
2018        self.check_return_call_ty(ty)?;
2019        Ok(())
2020    }
2021    fn visit_drop(&mut self) -> Self::Output {
2022        self.pop_operand(None)?;
2023        Ok(())
2024    }
2025    fn visit_select(&mut self) -> Self::Output {
2026        self.pop_operand(Some(ValType::I32))?;
2027        let ty1 = self.pop_operand(None)?;
2028        let ty2 = self.pop_operand(None)?;
2029
2030        let ty = match (ty1, ty2) {
2031            // All heap-related types aren't allowed with the `select`
2032            // instruction
2033            (MaybeType::UnknownRef(..), _)
2034            | (_, MaybeType::UnknownRef(..))
2035            | (MaybeType::Known(ValType::Ref(_)), _)
2036            | (_, MaybeType::Known(ValType::Ref(_))) => {
2037                bail!(
2038                    self.offset,
2039                    "type mismatch: select only takes integral types"
2040                )
2041            }
2042
2043            // If one operand is the "bottom" type then whatever the other
2044            // operand is is the result of the `select`
2045            (MaybeType::Bottom, t) | (t, MaybeType::Bottom) => t,
2046
2047            // Otherwise these are two integral types and they must match for
2048            // `select` to typecheck.
2049            (t @ MaybeType::Known(t1), MaybeType::Known(t2)) => {
2050                if t1 != t2 {
2051                    bail!(
2052                        self.offset,
2053                        "type mismatch: select operands have different types"
2054                    );
2055                }
2056                t
2057            }
2058        };
2059        self.push_operand(ty)?;
2060        Ok(())
2061    }
2062    fn visit_typed_select(&mut self, mut ty: ValType) -> Self::Output {
2063        self.resources
2064            .check_value_type(&mut ty, &self.features, self.offset)?;
2065        self.pop_operand(Some(ValType::I32))?;
2066        self.pop_operand(Some(ty))?;
2067        self.pop_operand(Some(ty))?;
2068        self.push_operand(ty)?;
2069        Ok(())
2070    }
2071    fn visit_typed_select_multi(&mut self, tys: Vec<ValType>) -> Self::Output {
2072        debug_assert!(tys.len() != 1);
2073        bail!(self.offset, "invalid result arity");
2074    }
2075    fn visit_local_get(&mut self, local_index: u32) -> Self::Output {
2076        let ty = self.local(local_index)?;
2077        debug_assert_type_indices_are_ids(ty);
2078        if self.local_inits.is_uninit(local_index) {
2079            bail!(self.offset, "uninitialized local: {}", local_index);
2080        }
2081        self.push_operand(ty)?;
2082        Ok(())
2083    }
2084    fn visit_local_set(&mut self, local_index: u32) -> Self::Output {
2085        let ty = self.local(local_index)?;
2086        self.pop_operand(Some(ty))?;
2087        self.local_inits.set_init(local_index);
2088        Ok(())
2089    }
2090    fn visit_local_tee(&mut self, local_index: u32) -> Self::Output {
2091        let expected_ty = self.local(local_index)?;
2092        self.pop_operand(Some(expected_ty))?;
2093        self.local_inits.set_init(local_index);
2094        self.push_operand(expected_ty)?;
2095        Ok(())
2096    }
2097    fn visit_global_get(&mut self, global_index: u32) -> Self::Output {
2098        let ty = self.global_type_at(global_index)?.content_type;
2099        debug_assert_type_indices_are_ids(ty);
2100        self.push_operand(ty)?;
2101        Ok(())
2102    }
2103    fn visit_global_atomic_get(&mut self, _ordering: Ordering, global_index: u32) -> Self::Output {
2104        self.visit_global_get(global_index)?;
2105        // No validation of `ordering` is needed because `global.atomic.get` can
2106        // be used on both shared and unshared globals. But we do need to limit
2107        // which types can be used with this instruction.
2108        let ty = self.global_type_at(global_index)?.content_type;
2109        let supertype = RefType::ANYREF.into();
2110        if !(ty == ValType::I32 || ty == ValType::I64 || self.resources.is_subtype(ty, supertype)) {
2111            bail!(
2112                self.offset,
2113                "invalid type: `global.atomic.get` only allows `i32`, `i64` and subtypes of `anyref`"
2114            );
2115        }
2116        Ok(())
2117    }
2118    fn visit_global_set(&mut self, global_index: u32) -> Self::Output {
2119        let ty = self.global_type_at(global_index)?;
2120        if !ty.mutable {
2121            bail!(
2122                self.offset,
2123                "global is immutable: cannot modify it with `global.set`"
2124            );
2125        }
2126        self.pop_operand(Some(ty.content_type))?;
2127        Ok(())
2128    }
2129    fn visit_global_atomic_set(&mut self, _ordering: Ordering, global_index: u32) -> Self::Output {
2130        self.visit_global_set(global_index)?;
2131        // No validation of `ordering` is needed because `global.atomic.get` can
2132        // be used on both shared and unshared globals.
2133        let ty = self.global_type_at(global_index)?.content_type;
2134        let supertype = RefType::ANYREF.into();
2135        if !(ty == ValType::I32 || ty == ValType::I64 || self.resources.is_subtype(ty, supertype)) {
2136            bail!(
2137                self.offset,
2138                "invalid type: `global.atomic.set` only allows `i32`, `i64` and subtypes of `anyref`"
2139            );
2140        }
2141        Ok(())
2142    }
2143    fn visit_global_atomic_rmw_add(
2144        &mut self,
2145        _ordering: crate::Ordering,
2146        global_index: u32,
2147    ) -> Self::Output {
2148        let ty = self.check_atomic_global_rmw_ty(global_index)?;
2149        self.check_unary_op(ty)
2150    }
2151    fn visit_global_atomic_rmw_sub(
2152        &mut self,
2153        _ordering: crate::Ordering,
2154        global_index: u32,
2155    ) -> Self::Output {
2156        let ty = self.check_atomic_global_rmw_ty(global_index)?;
2157        self.check_unary_op(ty)
2158    }
2159    fn visit_global_atomic_rmw_and(
2160        &mut self,
2161        _ordering: crate::Ordering,
2162        global_index: u32,
2163    ) -> Self::Output {
2164        let ty = self.check_atomic_global_rmw_ty(global_index)?;
2165        self.check_unary_op(ty)
2166    }
2167    fn visit_global_atomic_rmw_or(
2168        &mut self,
2169        _ordering: crate::Ordering,
2170        global_index: u32,
2171    ) -> Self::Output {
2172        let ty = self.check_atomic_global_rmw_ty(global_index)?;
2173        self.check_unary_op(ty)
2174    }
2175    fn visit_global_atomic_rmw_xor(
2176        &mut self,
2177        _ordering: crate::Ordering,
2178        global_index: u32,
2179    ) -> Self::Output {
2180        let ty = self.check_atomic_global_rmw_ty(global_index)?;
2181        self.check_unary_op(ty)
2182    }
2183    fn visit_global_atomic_rmw_xchg(
2184        &mut self,
2185        _ordering: crate::Ordering,
2186        global_index: u32,
2187    ) -> Self::Output {
2188        let ty = self.global_type_at(global_index)?.content_type;
2189        if !(ty == ValType::I32
2190            || ty == ValType::I64
2191            || self.resources.is_subtype(ty, RefType::ANYREF.into()))
2192        {
2193            bail!(
2194                self.offset,
2195                "invalid type: `global.atomic.rmw.xchg` only allows `i32`, `i64` and subtypes of `anyref`"
2196            );
2197        }
2198        self.check_unary_op(ty)
2199    }
2200    fn visit_global_atomic_rmw_cmpxchg(
2201        &mut self,
2202        _ordering: crate::Ordering,
2203        global_index: u32,
2204    ) -> Self::Output {
2205        let ty = self.global_type_at(global_index)?.content_type;
2206        if !(ty == ValType::I32
2207            || ty == ValType::I64
2208            || self.resources.is_subtype(ty, RefType::EQREF.into()))
2209        {
2210            bail!(
2211                self.offset,
2212                "invalid type: `global.atomic.rmw.cmpxchg` only allows `i32`, `i64` and subtypes of `eqref`"
2213            );
2214        }
2215        self.check_binary_op(ty)
2216    }
2217
2218    fn visit_i32_load(&mut self, memarg: MemArg) -> Self::Output {
2219        let ty = self.check_memarg(memarg)?;
2220        self.pop_operand(Some(ty))?;
2221        self.push_operand(ValType::I32)?;
2222        Ok(())
2223    }
2224    fn visit_i64_load(&mut self, memarg: MemArg) -> Self::Output {
2225        let ty = self.check_memarg(memarg)?;
2226        self.pop_operand(Some(ty))?;
2227        self.push_operand(ValType::I64)?;
2228        Ok(())
2229    }
2230    fn visit_f32_load(&mut self, memarg: MemArg) -> Self::Output {
2231        self.check_floats_enabled()?;
2232        let ty = self.check_memarg(memarg)?;
2233        self.pop_operand(Some(ty))?;
2234        self.push_operand(ValType::F32)?;
2235        Ok(())
2236    }
2237    fn visit_f64_load(&mut self, memarg: MemArg) -> Self::Output {
2238        self.check_floats_enabled()?;
2239        let ty = self.check_memarg(memarg)?;
2240        self.pop_operand(Some(ty))?;
2241        self.push_operand(ValType::F64)?;
2242        Ok(())
2243    }
2244    fn visit_i32_load8_s(&mut self, memarg: MemArg) -> Self::Output {
2245        let ty = self.check_memarg(memarg)?;
2246        self.pop_operand(Some(ty))?;
2247        self.push_operand(ValType::I32)?;
2248        Ok(())
2249    }
2250    fn visit_i32_load8_u(&mut self, memarg: MemArg) -> Self::Output {
2251        self.visit_i32_load8_s(memarg)
2252    }
2253    fn visit_i32_load16_s(&mut self, memarg: MemArg) -> Self::Output {
2254        let ty = self.check_memarg(memarg)?;
2255        self.pop_operand(Some(ty))?;
2256        self.push_operand(ValType::I32)?;
2257        Ok(())
2258    }
2259    fn visit_i32_load16_u(&mut self, memarg: MemArg) -> Self::Output {
2260        self.visit_i32_load16_s(memarg)
2261    }
2262    fn visit_i64_load8_s(&mut self, memarg: MemArg) -> Self::Output {
2263        let ty = self.check_memarg(memarg)?;
2264        self.pop_operand(Some(ty))?;
2265        self.push_operand(ValType::I64)?;
2266        Ok(())
2267    }
2268    fn visit_i64_load8_u(&mut self, memarg: MemArg) -> Self::Output {
2269        self.visit_i64_load8_s(memarg)
2270    }
2271    fn visit_i64_load16_s(&mut self, memarg: MemArg) -> Self::Output {
2272        let ty = self.check_memarg(memarg)?;
2273        self.pop_operand(Some(ty))?;
2274        self.push_operand(ValType::I64)?;
2275        Ok(())
2276    }
2277    fn visit_i64_load16_u(&mut self, memarg: MemArg) -> Self::Output {
2278        self.visit_i64_load16_s(memarg)
2279    }
2280    fn visit_i64_load32_s(&mut self, memarg: MemArg) -> Self::Output {
2281        let ty = self.check_memarg(memarg)?;
2282        self.pop_operand(Some(ty))?;
2283        self.push_operand(ValType::I64)?;
2284        Ok(())
2285    }
2286    fn visit_i64_load32_u(&mut self, memarg: MemArg) -> Self::Output {
2287        self.visit_i64_load32_s(memarg)
2288    }
2289    fn visit_i32_store(&mut self, memarg: MemArg) -> Self::Output {
2290        let ty = self.check_memarg(memarg)?;
2291        self.pop_operand(Some(ValType::I32))?;
2292        self.pop_operand(Some(ty))?;
2293        Ok(())
2294    }
2295    fn visit_i64_store(&mut self, memarg: MemArg) -> Self::Output {
2296        let ty = self.check_memarg(memarg)?;
2297        self.pop_operand(Some(ValType::I64))?;
2298        self.pop_operand(Some(ty))?;
2299        Ok(())
2300    }
2301    fn visit_f32_store(&mut self, memarg: MemArg) -> Self::Output {
2302        self.check_floats_enabled()?;
2303        let ty = self.check_memarg(memarg)?;
2304        self.pop_operand(Some(ValType::F32))?;
2305        self.pop_operand(Some(ty))?;
2306        Ok(())
2307    }
2308    fn visit_f64_store(&mut self, memarg: MemArg) -> Self::Output {
2309        self.check_floats_enabled()?;
2310        let ty = self.check_memarg(memarg)?;
2311        self.pop_operand(Some(ValType::F64))?;
2312        self.pop_operand(Some(ty))?;
2313        Ok(())
2314    }
2315    fn visit_i32_store8(&mut self, memarg: MemArg) -> Self::Output {
2316        let ty = self.check_memarg(memarg)?;
2317        self.pop_operand(Some(ValType::I32))?;
2318        self.pop_operand(Some(ty))?;
2319        Ok(())
2320    }
2321    fn visit_i32_store16(&mut self, memarg: MemArg) -> Self::Output {
2322        let ty = self.check_memarg(memarg)?;
2323        self.pop_operand(Some(ValType::I32))?;
2324        self.pop_operand(Some(ty))?;
2325        Ok(())
2326    }
2327    fn visit_i64_store8(&mut self, memarg: MemArg) -> Self::Output {
2328        let ty = self.check_memarg(memarg)?;
2329        self.pop_operand(Some(ValType::I64))?;
2330        self.pop_operand(Some(ty))?;
2331        Ok(())
2332    }
2333    fn visit_i64_store16(&mut self, memarg: MemArg) -> Self::Output {
2334        let ty = self.check_memarg(memarg)?;
2335        self.pop_operand(Some(ValType::I64))?;
2336        self.pop_operand(Some(ty))?;
2337        Ok(())
2338    }
2339    fn visit_i64_store32(&mut self, memarg: MemArg) -> Self::Output {
2340        let ty = self.check_memarg(memarg)?;
2341        self.pop_operand(Some(ValType::I64))?;
2342        self.pop_operand(Some(ty))?;
2343        Ok(())
2344    }
2345    fn visit_memory_size(&mut self, mem: u32) -> Self::Output {
2346        let index_ty = self.check_memory_index(mem)?;
2347        self.push_operand(index_ty)?;
2348        Ok(())
2349    }
2350    fn visit_memory_grow(&mut self, mem: u32) -> Self::Output {
2351        let index_ty = self.check_memory_index(mem)?;
2352        self.pop_operand(Some(index_ty))?;
2353        self.push_operand(index_ty)?;
2354        Ok(())
2355    }
2356    fn visit_i32_const(&mut self, _value: i32) -> Self::Output {
2357        self.push_operand(ValType::I32)?;
2358        Ok(())
2359    }
2360    fn visit_i64_const(&mut self, _value: i64) -> Self::Output {
2361        self.push_operand(ValType::I64)?;
2362        Ok(())
2363    }
2364    fn visit_f32_const(&mut self, _value: Ieee32) -> Self::Output {
2365        self.check_floats_enabled()?;
2366        self.push_operand(ValType::F32)?;
2367        Ok(())
2368    }
2369    fn visit_f64_const(&mut self, _value: Ieee64) -> Self::Output {
2370        self.check_floats_enabled()?;
2371        self.push_operand(ValType::F64)?;
2372        Ok(())
2373    }
2374    fn visit_i32_eqz(&mut self) -> Self::Output {
2375        self.pop_operand(Some(ValType::I32))?;
2376        self.push_operand(ValType::I32)?;
2377        Ok(())
2378    }
2379    fn visit_i32_eq(&mut self) -> Self::Output {
2380        self.check_cmp_op(ValType::I32)
2381    }
2382    fn visit_i32_ne(&mut self) -> Self::Output {
2383        self.check_cmp_op(ValType::I32)
2384    }
2385    fn visit_i32_lt_s(&mut self) -> Self::Output {
2386        self.check_cmp_op(ValType::I32)
2387    }
2388    fn visit_i32_lt_u(&mut self) -> Self::Output {
2389        self.check_cmp_op(ValType::I32)
2390    }
2391    fn visit_i32_gt_s(&mut self) -> Self::Output {
2392        self.check_cmp_op(ValType::I32)
2393    }
2394    fn visit_i32_gt_u(&mut self) -> Self::Output {
2395        self.check_cmp_op(ValType::I32)
2396    }
2397    fn visit_i32_le_s(&mut self) -> Self::Output {
2398        self.check_cmp_op(ValType::I32)
2399    }
2400    fn visit_i32_le_u(&mut self) -> Self::Output {
2401        self.check_cmp_op(ValType::I32)
2402    }
2403    fn visit_i32_ge_s(&mut self) -> Self::Output {
2404        self.check_cmp_op(ValType::I32)
2405    }
2406    fn visit_i32_ge_u(&mut self) -> Self::Output {
2407        self.check_cmp_op(ValType::I32)
2408    }
2409    fn visit_i64_eqz(&mut self) -> Self::Output {
2410        self.pop_operand(Some(ValType::I64))?;
2411        self.push_operand(ValType::I32)?;
2412        Ok(())
2413    }
2414    fn visit_i64_eq(&mut self) -> Self::Output {
2415        self.check_cmp_op(ValType::I64)
2416    }
2417    fn visit_i64_ne(&mut self) -> Self::Output {
2418        self.check_cmp_op(ValType::I64)
2419    }
2420    fn visit_i64_lt_s(&mut self) -> Self::Output {
2421        self.check_cmp_op(ValType::I64)
2422    }
2423    fn visit_i64_lt_u(&mut self) -> Self::Output {
2424        self.check_cmp_op(ValType::I64)
2425    }
2426    fn visit_i64_gt_s(&mut self) -> Self::Output {
2427        self.check_cmp_op(ValType::I64)
2428    }
2429    fn visit_i64_gt_u(&mut self) -> Self::Output {
2430        self.check_cmp_op(ValType::I64)
2431    }
2432    fn visit_i64_le_s(&mut self) -> Self::Output {
2433        self.check_cmp_op(ValType::I64)
2434    }
2435    fn visit_i64_le_u(&mut self) -> Self::Output {
2436        self.check_cmp_op(ValType::I64)
2437    }
2438    fn visit_i64_ge_s(&mut self) -> Self::Output {
2439        self.check_cmp_op(ValType::I64)
2440    }
2441    fn visit_i64_ge_u(&mut self) -> Self::Output {
2442        self.check_cmp_op(ValType::I64)
2443    }
2444    fn visit_f32_eq(&mut self) -> Self::Output {
2445        self.check_fcmp_op(ValType::F32)
2446    }
2447    fn visit_f32_ne(&mut self) -> Self::Output {
2448        self.check_fcmp_op(ValType::F32)
2449    }
2450    fn visit_f32_lt(&mut self) -> Self::Output {
2451        self.check_fcmp_op(ValType::F32)
2452    }
2453    fn visit_f32_gt(&mut self) -> Self::Output {
2454        self.check_fcmp_op(ValType::F32)
2455    }
2456    fn visit_f32_le(&mut self) -> Self::Output {
2457        self.check_fcmp_op(ValType::F32)
2458    }
2459    fn visit_f32_ge(&mut self) -> Self::Output {
2460        self.check_fcmp_op(ValType::F32)
2461    }
2462    fn visit_f64_eq(&mut self) -> Self::Output {
2463        self.check_fcmp_op(ValType::F64)
2464    }
2465    fn visit_f64_ne(&mut self) -> Self::Output {
2466        self.check_fcmp_op(ValType::F64)
2467    }
2468    fn visit_f64_lt(&mut self) -> Self::Output {
2469        self.check_fcmp_op(ValType::F64)
2470    }
2471    fn visit_f64_gt(&mut self) -> Self::Output {
2472        self.check_fcmp_op(ValType::F64)
2473    }
2474    fn visit_f64_le(&mut self) -> Self::Output {
2475        self.check_fcmp_op(ValType::F64)
2476    }
2477    fn visit_f64_ge(&mut self) -> Self::Output {
2478        self.check_fcmp_op(ValType::F64)
2479    }
2480    fn visit_i32_clz(&mut self) -> Self::Output {
2481        self.check_unary_op(ValType::I32)
2482    }
2483    fn visit_i32_ctz(&mut self) -> Self::Output {
2484        self.check_unary_op(ValType::I32)
2485    }
2486    fn visit_i32_popcnt(&mut self) -> Self::Output {
2487        self.check_unary_op(ValType::I32)
2488    }
2489    fn visit_i32_add(&mut self) -> Self::Output {
2490        self.check_binary_op(ValType::I32)
2491    }
2492    fn visit_i32_sub(&mut self) -> Self::Output {
2493        self.check_binary_op(ValType::I32)
2494    }
2495    fn visit_i32_mul(&mut self) -> Self::Output {
2496        self.check_binary_op(ValType::I32)
2497    }
2498    fn visit_i32_div_s(&mut self) -> Self::Output {
2499        self.check_binary_op(ValType::I32)
2500    }
2501    fn visit_i32_div_u(&mut self) -> Self::Output {
2502        self.check_binary_op(ValType::I32)
2503    }
2504    fn visit_i32_rem_s(&mut self) -> Self::Output {
2505        self.check_binary_op(ValType::I32)
2506    }
2507    fn visit_i32_rem_u(&mut self) -> Self::Output {
2508        self.check_binary_op(ValType::I32)
2509    }
2510    fn visit_i32_and(&mut self) -> Self::Output {
2511        self.check_binary_op(ValType::I32)
2512    }
2513    fn visit_i32_or(&mut self) -> Self::Output {
2514        self.check_binary_op(ValType::I32)
2515    }
2516    fn visit_i32_xor(&mut self) -> Self::Output {
2517        self.check_binary_op(ValType::I32)
2518    }
2519    fn visit_i32_shl(&mut self) -> Self::Output {
2520        self.check_binary_op(ValType::I32)
2521    }
2522    fn visit_i32_shr_s(&mut self) -> Self::Output {
2523        self.check_binary_op(ValType::I32)
2524    }
2525    fn visit_i32_shr_u(&mut self) -> Self::Output {
2526        self.check_binary_op(ValType::I32)
2527    }
2528    fn visit_i32_rotl(&mut self) -> Self::Output {
2529        self.check_binary_op(ValType::I32)
2530    }
2531    fn visit_i32_rotr(&mut self) -> Self::Output {
2532        self.check_binary_op(ValType::I32)
2533    }
2534    fn visit_i64_clz(&mut self) -> Self::Output {
2535        self.check_unary_op(ValType::I64)
2536    }
2537    fn visit_i64_ctz(&mut self) -> Self::Output {
2538        self.check_unary_op(ValType::I64)
2539    }
2540    fn visit_i64_popcnt(&mut self) -> Self::Output {
2541        self.check_unary_op(ValType::I64)
2542    }
2543    fn visit_i64_add(&mut self) -> Self::Output {
2544        self.check_binary_op(ValType::I64)
2545    }
2546    fn visit_i64_sub(&mut self) -> Self::Output {
2547        self.check_binary_op(ValType::I64)
2548    }
2549    fn visit_i64_mul(&mut self) -> Self::Output {
2550        self.check_binary_op(ValType::I64)
2551    }
2552    fn visit_i64_div_s(&mut self) -> Self::Output {
2553        self.check_binary_op(ValType::I64)
2554    }
2555    fn visit_i64_div_u(&mut self) -> Self::Output {
2556        self.check_binary_op(ValType::I64)
2557    }
2558    fn visit_i64_rem_s(&mut self) -> Self::Output {
2559        self.check_binary_op(ValType::I64)
2560    }
2561    fn visit_i64_rem_u(&mut self) -> Self::Output {
2562        self.check_binary_op(ValType::I64)
2563    }
2564    fn visit_i64_and(&mut self) -> Self::Output {
2565        self.check_binary_op(ValType::I64)
2566    }
2567    fn visit_i64_or(&mut self) -> Self::Output {
2568        self.check_binary_op(ValType::I64)
2569    }
2570    fn visit_i64_xor(&mut self) -> Self::Output {
2571        self.check_binary_op(ValType::I64)
2572    }
2573    fn visit_i64_shl(&mut self) -> Self::Output {
2574        self.check_binary_op(ValType::I64)
2575    }
2576    fn visit_i64_shr_s(&mut self) -> Self::Output {
2577        self.check_binary_op(ValType::I64)
2578    }
2579    fn visit_i64_shr_u(&mut self) -> Self::Output {
2580        self.check_binary_op(ValType::I64)
2581    }
2582    fn visit_i64_rotl(&mut self) -> Self::Output {
2583        self.check_binary_op(ValType::I64)
2584    }
2585    fn visit_i64_rotr(&mut self) -> Self::Output {
2586        self.check_binary_op(ValType::I64)
2587    }
2588    fn visit_f32_abs(&mut self) -> Self::Output {
2589        self.check_funary_op(ValType::F32)
2590    }
2591    fn visit_f32_neg(&mut self) -> Self::Output {
2592        self.check_funary_op(ValType::F32)
2593    }
2594    fn visit_f32_ceil(&mut self) -> Self::Output {
2595        self.check_funary_op(ValType::F32)
2596    }
2597    fn visit_f32_floor(&mut self) -> Self::Output {
2598        self.check_funary_op(ValType::F32)
2599    }
2600    fn visit_f32_trunc(&mut self) -> Self::Output {
2601        self.check_funary_op(ValType::F32)
2602    }
2603    fn visit_f32_nearest(&mut self) -> Self::Output {
2604        self.check_funary_op(ValType::F32)
2605    }
2606    fn visit_f32_sqrt(&mut self) -> Self::Output {
2607        self.check_funary_op(ValType::F32)
2608    }
2609    fn visit_f32_add(&mut self) -> Self::Output {
2610        self.check_fbinary_op(ValType::F32)
2611    }
2612    fn visit_f32_sub(&mut self) -> Self::Output {
2613        self.check_fbinary_op(ValType::F32)
2614    }
2615    fn visit_f32_mul(&mut self) -> Self::Output {
2616        self.check_fbinary_op(ValType::F32)
2617    }
2618    fn visit_f32_div(&mut self) -> Self::Output {
2619        self.check_fbinary_op(ValType::F32)
2620    }
2621    fn visit_f32_min(&mut self) -> Self::Output {
2622        self.check_fbinary_op(ValType::F32)
2623    }
2624    fn visit_f32_max(&mut self) -> Self::Output {
2625        self.check_fbinary_op(ValType::F32)
2626    }
2627    fn visit_f32_copysign(&mut self) -> Self::Output {
2628        self.check_fbinary_op(ValType::F32)
2629    }
2630    fn visit_f64_abs(&mut self) -> Self::Output {
2631        self.check_funary_op(ValType::F64)
2632    }
2633    fn visit_f64_neg(&mut self) -> Self::Output {
2634        self.check_funary_op(ValType::F64)
2635    }
2636    fn visit_f64_ceil(&mut self) -> Self::Output {
2637        self.check_funary_op(ValType::F64)
2638    }
2639    fn visit_f64_floor(&mut self) -> Self::Output {
2640        self.check_funary_op(ValType::F64)
2641    }
2642    fn visit_f64_trunc(&mut self) -> Self::Output {
2643        self.check_funary_op(ValType::F64)
2644    }
2645    fn visit_f64_nearest(&mut self) -> Self::Output {
2646        self.check_funary_op(ValType::F64)
2647    }
2648    fn visit_f64_sqrt(&mut self) -> Self::Output {
2649        self.check_funary_op(ValType::F64)
2650    }
2651    fn visit_f64_add(&mut self) -> Self::Output {
2652        self.check_fbinary_op(ValType::F64)
2653    }
2654    fn visit_f64_sub(&mut self) -> Self::Output {
2655        self.check_fbinary_op(ValType::F64)
2656    }
2657    fn visit_f64_mul(&mut self) -> Self::Output {
2658        self.check_fbinary_op(ValType::F64)
2659    }
2660    fn visit_f64_div(&mut self) -> Self::Output {
2661        self.check_fbinary_op(ValType::F64)
2662    }
2663    fn visit_f64_min(&mut self) -> Self::Output {
2664        self.check_fbinary_op(ValType::F64)
2665    }
2666    fn visit_f64_max(&mut self) -> Self::Output {
2667        self.check_fbinary_op(ValType::F64)
2668    }
2669    fn visit_f64_copysign(&mut self) -> Self::Output {
2670        self.check_fbinary_op(ValType::F64)
2671    }
2672    fn visit_i32_wrap_i64(&mut self) -> Self::Output {
2673        self.check_conversion_op(ValType::I32, ValType::I64)
2674    }
2675    fn visit_i32_trunc_f32_s(&mut self) -> Self::Output {
2676        self.check_conversion_op(ValType::I32, ValType::F32)
2677    }
2678    fn visit_i32_trunc_f32_u(&mut self) -> Self::Output {
2679        self.check_conversion_op(ValType::I32, ValType::F32)
2680    }
2681    fn visit_i32_trunc_f64_s(&mut self) -> Self::Output {
2682        self.check_conversion_op(ValType::I32, ValType::F64)
2683    }
2684    fn visit_i32_trunc_f64_u(&mut self) -> Self::Output {
2685        self.check_conversion_op(ValType::I32, ValType::F64)
2686    }
2687    fn visit_i64_extend_i32_s(&mut self) -> Self::Output {
2688        self.check_conversion_op(ValType::I64, ValType::I32)
2689    }
2690    fn visit_i64_extend_i32_u(&mut self) -> Self::Output {
2691        self.check_conversion_op(ValType::I64, ValType::I32)
2692    }
2693    fn visit_i64_trunc_f32_s(&mut self) -> Self::Output {
2694        self.check_conversion_op(ValType::I64, ValType::F32)
2695    }
2696    fn visit_i64_trunc_f32_u(&mut self) -> Self::Output {
2697        self.check_conversion_op(ValType::I64, ValType::F32)
2698    }
2699    fn visit_i64_trunc_f64_s(&mut self) -> Self::Output {
2700        self.check_conversion_op(ValType::I64, ValType::F64)
2701    }
2702    fn visit_i64_trunc_f64_u(&mut self) -> Self::Output {
2703        self.check_conversion_op(ValType::I64, ValType::F64)
2704    }
2705    fn visit_f32_convert_i32_s(&mut self) -> Self::Output {
2706        self.check_fconversion_op(ValType::F32, ValType::I32)
2707    }
2708    fn visit_f32_convert_i32_u(&mut self) -> Self::Output {
2709        self.check_fconversion_op(ValType::F32, ValType::I32)
2710    }
2711    fn visit_f32_convert_i64_s(&mut self) -> Self::Output {
2712        self.check_fconversion_op(ValType::F32, ValType::I64)
2713    }
2714    fn visit_f32_convert_i64_u(&mut self) -> Self::Output {
2715        self.check_fconversion_op(ValType::F32, ValType::I64)
2716    }
2717    fn visit_f32_demote_f64(&mut self) -> Self::Output {
2718        self.check_fconversion_op(ValType::F32, ValType::F64)
2719    }
2720    fn visit_f64_convert_i32_s(&mut self) -> Self::Output {
2721        self.check_fconversion_op(ValType::F64, ValType::I32)
2722    }
2723    fn visit_f64_convert_i32_u(&mut self) -> Self::Output {
2724        self.check_fconversion_op(ValType::F64, ValType::I32)
2725    }
2726    fn visit_f64_convert_i64_s(&mut self) -> Self::Output {
2727        self.check_fconversion_op(ValType::F64, ValType::I64)
2728    }
2729    fn visit_f64_convert_i64_u(&mut self) -> Self::Output {
2730        self.check_fconversion_op(ValType::F64, ValType::I64)
2731    }
2732    fn visit_f64_promote_f32(&mut self) -> Self::Output {
2733        self.check_fconversion_op(ValType::F64, ValType::F32)
2734    }
2735    fn visit_i32_reinterpret_f32(&mut self) -> Self::Output {
2736        self.check_conversion_op(ValType::I32, ValType::F32)
2737    }
2738    fn visit_i64_reinterpret_f64(&mut self) -> Self::Output {
2739        self.check_conversion_op(ValType::I64, ValType::F64)
2740    }
2741    fn visit_f32_reinterpret_i32(&mut self) -> Self::Output {
2742        self.check_fconversion_op(ValType::F32, ValType::I32)
2743    }
2744    fn visit_f64_reinterpret_i64(&mut self) -> Self::Output {
2745        self.check_fconversion_op(ValType::F64, ValType::I64)
2746    }
2747    fn visit_i32_trunc_sat_f32_s(&mut self) -> Self::Output {
2748        self.check_conversion_op(ValType::I32, ValType::F32)
2749    }
2750    fn visit_i32_trunc_sat_f32_u(&mut self) -> Self::Output {
2751        self.check_conversion_op(ValType::I32, ValType::F32)
2752    }
2753    fn visit_i32_trunc_sat_f64_s(&mut self) -> Self::Output {
2754        self.check_conversion_op(ValType::I32, ValType::F64)
2755    }
2756    fn visit_i32_trunc_sat_f64_u(&mut self) -> Self::Output {
2757        self.check_conversion_op(ValType::I32, ValType::F64)
2758    }
2759    fn visit_i64_trunc_sat_f32_s(&mut self) -> Self::Output {
2760        self.check_conversion_op(ValType::I64, ValType::F32)
2761    }
2762    fn visit_i64_trunc_sat_f32_u(&mut self) -> Self::Output {
2763        self.check_conversion_op(ValType::I64, ValType::F32)
2764    }
2765    fn visit_i64_trunc_sat_f64_s(&mut self) -> Self::Output {
2766        self.check_conversion_op(ValType::I64, ValType::F64)
2767    }
2768    fn visit_i64_trunc_sat_f64_u(&mut self) -> Self::Output {
2769        self.check_conversion_op(ValType::I64, ValType::F64)
2770    }
2771    fn visit_i32_extend8_s(&mut self) -> Self::Output {
2772        self.check_unary_op(ValType::I32)
2773    }
2774    fn visit_i32_extend16_s(&mut self) -> Self::Output {
2775        self.check_unary_op(ValType::I32)
2776    }
2777    fn visit_i64_extend8_s(&mut self) -> Self::Output {
2778        self.check_unary_op(ValType::I64)
2779    }
2780    fn visit_i64_extend16_s(&mut self) -> Self::Output {
2781        self.check_unary_op(ValType::I64)
2782    }
2783    fn visit_i64_extend32_s(&mut self) -> Self::Output {
2784        self.check_unary_op(ValType::I64)
2785    }
2786    fn visit_i32_atomic_load(&mut self, memarg: MemArg) -> Self::Output {
2787        self.check_atomic_load(memarg, ValType::I32)
2788    }
2789    fn visit_i32_atomic_load16_u(&mut self, memarg: MemArg) -> Self::Output {
2790        self.check_atomic_load(memarg, ValType::I32)
2791    }
2792    fn visit_i32_atomic_load8_u(&mut self, memarg: MemArg) -> Self::Output {
2793        self.check_atomic_load(memarg, ValType::I32)
2794    }
2795    fn visit_i64_atomic_load(&mut self, memarg: MemArg) -> Self::Output {
2796        self.check_atomic_load(memarg, ValType::I64)
2797    }
2798    fn visit_i64_atomic_load32_u(&mut self, memarg: MemArg) -> Self::Output {
2799        self.check_atomic_load(memarg, ValType::I64)
2800    }
2801    fn visit_i64_atomic_load16_u(&mut self, memarg: MemArg) -> Self::Output {
2802        self.check_atomic_load(memarg, ValType::I64)
2803    }
2804    fn visit_i64_atomic_load8_u(&mut self, memarg: MemArg) -> Self::Output {
2805        self.check_atomic_load(memarg, ValType::I64)
2806    }
2807    fn visit_i32_atomic_store(&mut self, memarg: MemArg) -> Self::Output {
2808        self.check_atomic_store(memarg, ValType::I32)
2809    }
2810    fn visit_i32_atomic_store16(&mut self, memarg: MemArg) -> Self::Output {
2811        self.check_atomic_store(memarg, ValType::I32)
2812    }
2813    fn visit_i32_atomic_store8(&mut self, memarg: MemArg) -> Self::Output {
2814        self.check_atomic_store(memarg, ValType::I32)
2815    }
2816    fn visit_i64_atomic_store(&mut self, memarg: MemArg) -> Self::Output {
2817        self.check_atomic_store(memarg, ValType::I64)
2818    }
2819    fn visit_i64_atomic_store32(&mut self, memarg: MemArg) -> Self::Output {
2820        self.check_atomic_store(memarg, ValType::I64)
2821    }
2822    fn visit_i64_atomic_store16(&mut self, memarg: MemArg) -> Self::Output {
2823        self.check_atomic_store(memarg, ValType::I64)
2824    }
2825    fn visit_i64_atomic_store8(&mut self, memarg: MemArg) -> Self::Output {
2826        self.check_atomic_store(memarg, ValType::I64)
2827    }
2828    fn visit_i32_atomic_rmw_add(&mut self, memarg: MemArg) -> Self::Output {
2829        self.check_atomic_binary_memory_op(memarg, ValType::I32)
2830    }
2831    fn visit_i32_atomic_rmw_sub(&mut self, memarg: MemArg) -> Self::Output {
2832        self.check_atomic_binary_memory_op(memarg, ValType::I32)
2833    }
2834    fn visit_i32_atomic_rmw_and(&mut self, memarg: MemArg) -> Self::Output {
2835        self.check_atomic_binary_memory_op(memarg, ValType::I32)
2836    }
2837    fn visit_i32_atomic_rmw_or(&mut self, memarg: MemArg) -> Self::Output {
2838        self.check_atomic_binary_memory_op(memarg, ValType::I32)
2839    }
2840    fn visit_i32_atomic_rmw_xor(&mut self, memarg: MemArg) -> Self::Output {
2841        self.check_atomic_binary_memory_op(memarg, ValType::I32)
2842    }
2843    fn visit_i32_atomic_rmw16_add_u(&mut self, memarg: MemArg) -> Self::Output {
2844        self.check_atomic_binary_memory_op(memarg, ValType::I32)
2845    }
2846    fn visit_i32_atomic_rmw16_sub_u(&mut self, memarg: MemArg) -> Self::Output {
2847        self.check_atomic_binary_memory_op(memarg, ValType::I32)
2848    }
2849    fn visit_i32_atomic_rmw16_and_u(&mut self, memarg: MemArg) -> Self::Output {
2850        self.check_atomic_binary_memory_op(memarg, ValType::I32)
2851    }
2852    fn visit_i32_atomic_rmw16_or_u(&mut self, memarg: MemArg) -> Self::Output {
2853        self.check_atomic_binary_memory_op(memarg, ValType::I32)
2854    }
2855    fn visit_i32_atomic_rmw16_xor_u(&mut self, memarg: MemArg) -> Self::Output {
2856        self.check_atomic_binary_memory_op(memarg, ValType::I32)
2857    }
2858    fn visit_i32_atomic_rmw8_add_u(&mut self, memarg: MemArg) -> Self::Output {
2859        self.check_atomic_binary_memory_op(memarg, ValType::I32)
2860    }
2861    fn visit_i32_atomic_rmw8_sub_u(&mut self, memarg: MemArg) -> Self::Output {
2862        self.check_atomic_binary_memory_op(memarg, ValType::I32)
2863    }
2864    fn visit_i32_atomic_rmw8_and_u(&mut self, memarg: MemArg) -> Self::Output {
2865        self.check_atomic_binary_memory_op(memarg, ValType::I32)
2866    }
2867    fn visit_i32_atomic_rmw8_or_u(&mut self, memarg: MemArg) -> Self::Output {
2868        self.check_atomic_binary_memory_op(memarg, ValType::I32)
2869    }
2870    fn visit_i32_atomic_rmw8_xor_u(&mut self, memarg: MemArg) -> Self::Output {
2871        self.check_atomic_binary_memory_op(memarg, ValType::I32)
2872    }
2873    fn visit_i64_atomic_rmw_add(&mut self, memarg: MemArg) -> Self::Output {
2874        self.check_atomic_binary_memory_op(memarg, ValType::I64)
2875    }
2876    fn visit_i64_atomic_rmw_sub(&mut self, memarg: MemArg) -> Self::Output {
2877        self.check_atomic_binary_memory_op(memarg, ValType::I64)
2878    }
2879    fn visit_i64_atomic_rmw_and(&mut self, memarg: MemArg) -> Self::Output {
2880        self.check_atomic_binary_memory_op(memarg, ValType::I64)
2881    }
2882    fn visit_i64_atomic_rmw_or(&mut self, memarg: MemArg) -> Self::Output {
2883        self.check_atomic_binary_memory_op(memarg, ValType::I64)
2884    }
2885    fn visit_i64_atomic_rmw_xor(&mut self, memarg: MemArg) -> Self::Output {
2886        self.check_atomic_binary_memory_op(memarg, ValType::I64)
2887    }
2888    fn visit_i64_atomic_rmw32_add_u(&mut self, memarg: MemArg) -> Self::Output {
2889        self.check_atomic_binary_memory_op(memarg, ValType::I64)
2890    }
2891    fn visit_i64_atomic_rmw32_sub_u(&mut self, memarg: MemArg) -> Self::Output {
2892        self.check_atomic_binary_memory_op(memarg, ValType::I64)
2893    }
2894    fn visit_i64_atomic_rmw32_and_u(&mut self, memarg: MemArg) -> Self::Output {
2895        self.check_atomic_binary_memory_op(memarg, ValType::I64)
2896    }
2897    fn visit_i64_atomic_rmw32_or_u(&mut self, memarg: MemArg) -> Self::Output {
2898        self.check_atomic_binary_memory_op(memarg, ValType::I64)
2899    }
2900    fn visit_i64_atomic_rmw32_xor_u(&mut self, memarg: MemArg) -> Self::Output {
2901        self.check_atomic_binary_memory_op(memarg, ValType::I64)
2902    }
2903    fn visit_i64_atomic_rmw16_add_u(&mut self, memarg: MemArg) -> Self::Output {
2904        self.check_atomic_binary_memory_op(memarg, ValType::I64)
2905    }
2906    fn visit_i64_atomic_rmw16_sub_u(&mut self, memarg: MemArg) -> Self::Output {
2907        self.check_atomic_binary_memory_op(memarg, ValType::I64)
2908    }
2909    fn visit_i64_atomic_rmw16_and_u(&mut self, memarg: MemArg) -> Self::Output {
2910        self.check_atomic_binary_memory_op(memarg, ValType::I64)
2911    }
2912    fn visit_i64_atomic_rmw16_or_u(&mut self, memarg: MemArg) -> Self::Output {
2913        self.check_atomic_binary_memory_op(memarg, ValType::I64)
2914    }
2915    fn visit_i64_atomic_rmw16_xor_u(&mut self, memarg: MemArg) -> Self::Output {
2916        self.check_atomic_binary_memory_op(memarg, ValType::I64)
2917    }
2918    fn visit_i64_atomic_rmw8_add_u(&mut self, memarg: MemArg) -> Self::Output {
2919        self.check_atomic_binary_memory_op(memarg, ValType::I64)
2920    }
2921    fn visit_i64_atomic_rmw8_sub_u(&mut self, memarg: MemArg) -> Self::Output {
2922        self.check_atomic_binary_memory_op(memarg, ValType::I64)
2923    }
2924    fn visit_i64_atomic_rmw8_and_u(&mut self, memarg: MemArg) -> Self::Output {
2925        self.check_atomic_binary_memory_op(memarg, ValType::I64)
2926    }
2927    fn visit_i64_atomic_rmw8_or_u(&mut self, memarg: MemArg) -> Self::Output {
2928        self.check_atomic_binary_memory_op(memarg, ValType::I64)
2929    }
2930    fn visit_i64_atomic_rmw8_xor_u(&mut self, memarg: MemArg) -> Self::Output {
2931        self.check_atomic_binary_memory_op(memarg, ValType::I64)
2932    }
2933    fn visit_i32_atomic_rmw_xchg(&mut self, memarg: MemArg) -> Self::Output {
2934        self.check_atomic_binary_memory_op(memarg, ValType::I32)
2935    }
2936    fn visit_i32_atomic_rmw16_xchg_u(&mut self, memarg: MemArg) -> Self::Output {
2937        self.check_atomic_binary_memory_op(memarg, ValType::I32)
2938    }
2939    fn visit_i32_atomic_rmw8_xchg_u(&mut self, memarg: MemArg) -> Self::Output {
2940        self.check_atomic_binary_memory_op(memarg, ValType::I32)
2941    }
2942    fn visit_i32_atomic_rmw_cmpxchg(&mut self, memarg: MemArg) -> Self::Output {
2943        self.check_atomic_binary_memory_cmpxchg(memarg, ValType::I32)
2944    }
2945    fn visit_i32_atomic_rmw16_cmpxchg_u(&mut self, memarg: MemArg) -> Self::Output {
2946        self.check_atomic_binary_memory_cmpxchg(memarg, ValType::I32)
2947    }
2948    fn visit_i32_atomic_rmw8_cmpxchg_u(&mut self, memarg: MemArg) -> Self::Output {
2949        self.check_atomic_binary_memory_cmpxchg(memarg, ValType::I32)
2950    }
2951    fn visit_i64_atomic_rmw_xchg(&mut self, memarg: MemArg) -> Self::Output {
2952        self.check_atomic_binary_memory_op(memarg, ValType::I64)
2953    }
2954    fn visit_i64_atomic_rmw32_xchg_u(&mut self, memarg: MemArg) -> Self::Output {
2955        self.check_atomic_binary_memory_op(memarg, ValType::I64)
2956    }
2957    fn visit_i64_atomic_rmw16_xchg_u(&mut self, memarg: MemArg) -> Self::Output {
2958        self.check_atomic_binary_memory_op(memarg, ValType::I64)
2959    }
2960    fn visit_i64_atomic_rmw8_xchg_u(&mut self, memarg: MemArg) -> Self::Output {
2961        self.check_atomic_binary_memory_op(memarg, ValType::I64)
2962    }
2963    fn visit_i64_atomic_rmw_cmpxchg(&mut self, memarg: MemArg) -> Self::Output {
2964        self.check_atomic_binary_memory_cmpxchg(memarg, ValType::I64)
2965    }
2966    fn visit_i64_atomic_rmw32_cmpxchg_u(&mut self, memarg: MemArg) -> Self::Output {
2967        self.check_atomic_binary_memory_cmpxchg(memarg, ValType::I64)
2968    }
2969    fn visit_i64_atomic_rmw16_cmpxchg_u(&mut self, memarg: MemArg) -> Self::Output {
2970        self.check_atomic_binary_memory_cmpxchg(memarg, ValType::I64)
2971    }
2972    fn visit_i64_atomic_rmw8_cmpxchg_u(&mut self, memarg: MemArg) -> Self::Output {
2973        self.check_atomic_binary_memory_cmpxchg(memarg, ValType::I64)
2974    }
2975    fn visit_memory_atomic_notify(&mut self, memarg: MemArg) -> Self::Output {
2976        self.check_atomic_binary_memory_op(memarg, ValType::I32)
2977    }
2978    fn visit_memory_atomic_wait32(&mut self, memarg: MemArg) -> Self::Output {
2979        let ty = self.check_shared_memarg(memarg)?;
2980        self.pop_operand(Some(ValType::I64))?;
2981        self.pop_operand(Some(ValType::I32))?;
2982        self.pop_operand(Some(ty))?;
2983        self.push_operand(ValType::I32)?;
2984        Ok(())
2985    }
2986    fn visit_memory_atomic_wait64(&mut self, memarg: MemArg) -> Self::Output {
2987        let ty = self.check_shared_memarg(memarg)?;
2988        self.pop_operand(Some(ValType::I64))?;
2989        self.pop_operand(Some(ValType::I64))?;
2990        self.pop_operand(Some(ty))?;
2991        self.push_operand(ValType::I32)?;
2992        Ok(())
2993    }
2994    fn visit_atomic_fence(&mut self) -> Self::Output {
2995        Ok(())
2996    }
2997    fn visit_ref_null(&mut self, mut heap_type: HeapType) -> Self::Output {
2998        if let Some(ty) = RefType::new(true, heap_type) {
2999            self.features
3000                .check_ref_type(ty)
3001                .map_err(|e| BinaryReaderError::new(e, self.offset))?;
3002        }
3003        self.resources
3004            .check_heap_type(&mut heap_type, self.offset)?;
3005        let ty = ValType::Ref(
3006            RefType::new(true, heap_type).expect("existing heap types should be within our limits"),
3007        );
3008        self.push_operand(ty)?;
3009        Ok(())
3010    }
3011
3012    fn visit_ref_as_non_null(&mut self) -> Self::Output {
3013        let ty = self.pop_ref(None)?.as_non_null();
3014        self.push_operand(ty)?;
3015        Ok(())
3016    }
3017    fn visit_br_on_null(&mut self, relative_depth: u32) -> Self::Output {
3018        let ref_ty = self.pop_ref(None)?.as_non_null();
3019        let (ft, kind) = self.jump(relative_depth)?;
3020        let label_types = self.label_types(ft, kind)?;
3021        self.pop_push_label_types(label_types)?;
3022        self.push_operand(ref_ty)?;
3023        Ok(())
3024    }
3025    fn visit_br_on_non_null(&mut self, relative_depth: u32) -> Self::Output {
3026        let (ft, kind) = self.jump(relative_depth)?;
3027
3028        let mut label_types = self.label_types(ft, kind)?;
3029        let expected = match label_types.next_back() {
3030            None => bail!(
3031                self.offset,
3032                "type mismatch: br_on_non_null target has no label types",
3033            ),
3034            Some(ValType::Ref(ty)) => ty,
3035            Some(_) => bail!(
3036                self.offset,
3037                "type mismatch: br_on_non_null target does not end with heap type",
3038            ),
3039        };
3040        self.pop_ref(Some(expected.nullable()))?;
3041
3042        self.pop_push_label_types(label_types)?;
3043        Ok(())
3044    }
3045    fn visit_ref_is_null(&mut self) -> Self::Output {
3046        self.pop_ref(None)?;
3047        self.push_operand(ValType::I32)?;
3048        Ok(())
3049    }
3050    fn visit_ref_func(&mut self, function_index: u32) -> Self::Output {
3051        let type_id = match self.resources.type_id_of_function(function_index) {
3052            Some(id) => id,
3053            None => bail!(
3054                self.offset,
3055                "unknown function {}: function index out of bounds",
3056                function_index,
3057            ),
3058        };
3059        if !self.resources.is_function_referenced(function_index) {
3060            bail!(self.offset, "undeclared function reference");
3061        }
3062
3063        let index = UnpackedIndex::Id(type_id);
3064        let ty = ValType::Ref(
3065            RefType::new(false, HeapType::Concrete(index)).ok_or_else(|| {
3066                BinaryReaderError::new("implementation limit: type index too large", self.offset)
3067            })?,
3068        );
3069        self.push_operand(ty)?;
3070        Ok(())
3071    }
3072    fn visit_ref_eq(&mut self) -> Self::Output {
3073        let a = self.pop_maybe_shared_ref(AbstractHeapType::Eq)?;
3074        let b = self.pop_maybe_shared_ref(AbstractHeapType::Eq)?;
3075        let a_is_shared = a.is_maybe_shared(&self.resources);
3076        let b_is_shared = b.is_maybe_shared(&self.resources);
3077        match (a_is_shared, b_is_shared) {
3078            // One or both of the types are from unreachable code; assume
3079            // the shared-ness matches.
3080            (None, Some(_)) | (Some(_), None) | (None, None) => {}
3081
3082            (Some(is_a_shared), Some(is_b_shared)) => {
3083                if is_a_shared != is_b_shared {
3084                    bail!(
3085                        self.offset,
3086                        "type mismatch: expected `ref.eq` types to match `shared`-ness"
3087                    );
3088                }
3089            }
3090        }
3091        self.push_operand(ValType::I32)
3092    }
3093    fn visit_memory_init(&mut self, segment: u32, mem: u32) -> Self::Output {
3094        let ty = self.check_memory_index(mem)?;
3095        self.check_data_segment(segment)?;
3096        self.pop_operand(Some(ValType::I32))?;
3097        self.pop_operand(Some(ValType::I32))?;
3098        self.pop_operand(Some(ty))?;
3099        Ok(())
3100    }
3101    fn visit_data_drop(&mut self, segment: u32) -> Self::Output {
3102        self.check_data_segment(segment)?;
3103        Ok(())
3104    }
3105    fn visit_memory_copy(&mut self, dst: u32, src: u32) -> Self::Output {
3106        self.check_enabled(self.features.bulk_memory_opt(), "bulk memory")?;
3107        let dst_ty = self.check_memory_index(dst)?;
3108        let src_ty = self.check_memory_index(src)?;
3109
3110        // The length operand here is the smaller of src/dst, which is
3111        // i32 if one is i32
3112        self.pop_operand(Some(match src_ty {
3113            ValType::I32 => ValType::I32,
3114            _ => dst_ty,
3115        }))?;
3116
3117        // ... and the offset into each memory is required to be
3118        // whatever the indexing type is for that memory
3119        self.pop_operand(Some(src_ty))?;
3120        self.pop_operand(Some(dst_ty))?;
3121        Ok(())
3122    }
3123    fn visit_memory_fill(&mut self, mem: u32) -> Self::Output {
3124        self.check_enabled(self.features.bulk_memory_opt(), "bulk memory")?;
3125        let ty = self.check_memory_index(mem)?;
3126        self.pop_operand(Some(ty))?;
3127        self.pop_operand(Some(ValType::I32))?;
3128        self.pop_operand(Some(ty))?;
3129        Ok(())
3130    }
3131    fn visit_memory_discard(&mut self, mem: u32) -> Self::Output {
3132        let ty = self.check_memory_index(mem)?;
3133        self.pop_operand(Some(ty))?;
3134        self.pop_operand(Some(ty))?;
3135        Ok(())
3136    }
3137    fn visit_table_init(&mut self, segment: u32, table: u32) -> Self::Output {
3138        let table = self.table_type_at(table)?;
3139        let segment_ty = self.element_type_at(segment)?;
3140        if !self
3141            .resources
3142            .is_subtype(ValType::Ref(segment_ty), ValType::Ref(table.element_type))
3143        {
3144            bail!(self.offset, "type mismatch");
3145        }
3146        self.pop_operand(Some(ValType::I32))?;
3147        self.pop_operand(Some(ValType::I32))?;
3148        self.pop_operand(Some(table.index_type()))?;
3149        Ok(())
3150    }
3151    fn visit_elem_drop(&mut self, segment: u32) -> Self::Output {
3152        self.element_type_at(segment)?;
3153        Ok(())
3154    }
3155    fn visit_table_copy(&mut self, dst_table: u32, src_table: u32) -> Self::Output {
3156        let src = self.table_type_at(src_table)?;
3157        let dst = self.table_type_at(dst_table)?;
3158        if !self.resources.is_subtype(
3159            ValType::Ref(src.element_type),
3160            ValType::Ref(dst.element_type),
3161        ) {
3162            bail!(self.offset, "type mismatch");
3163        }
3164
3165        // The length operand here is the smaller of src/dst, which is
3166        // i32 if one is i32
3167        self.pop_operand(Some(match src.index_type() {
3168            ValType::I32 => ValType::I32,
3169            _ => dst.index_type(),
3170        }))?;
3171
3172        // ... and the offset into each table is required to be
3173        // whatever the indexing type is for that table
3174        self.pop_operand(Some(src.index_type()))?;
3175        self.pop_operand(Some(dst.index_type()))?;
3176        Ok(())
3177    }
3178    fn visit_table_get(&mut self, table: u32) -> Self::Output {
3179        let table = self.table_type_at(table)?;
3180        debug_assert_type_indices_are_ids(table.element_type.into());
3181        self.pop_operand(Some(table.index_type()))?;
3182        self.push_operand(table.element_type)?;
3183        Ok(())
3184    }
3185    fn visit_table_atomic_get(&mut self, _ordering: Ordering, table: u32) -> Self::Output {
3186        self.visit_table_get(table)?;
3187        // No validation of `ordering` is needed because `table.atomic.get` can
3188        // be used on both shared and unshared tables. But we do need to limit
3189        // which types can be used with this instruction.
3190        let ty = self.table_type_at(table)?.element_type;
3191        let supertype = RefType::ANYREF.shared().unwrap();
3192        if !self.resources.is_subtype(ty.into(), supertype.into()) {
3193            bail!(
3194                self.offset,
3195                "invalid type: `table.atomic.get` only allows subtypes of `anyref`"
3196            );
3197        }
3198        Ok(())
3199    }
3200    fn visit_table_set(&mut self, table: u32) -> Self::Output {
3201        let table = self.table_type_at(table)?;
3202        debug_assert_type_indices_are_ids(table.element_type.into());
3203        self.pop_operand(Some(table.element_type.into()))?;
3204        self.pop_operand(Some(table.index_type()))?;
3205        Ok(())
3206    }
3207    fn visit_table_atomic_set(&mut self, _ordering: Ordering, table: u32) -> Self::Output {
3208        self.visit_table_set(table)?;
3209        // No validation of `ordering` is needed because `table.atomic.set` can
3210        // be used on both shared and unshared tables. But we do need to limit
3211        // which types can be used with this instruction.
3212        let ty = self.table_type_at(table)?.element_type;
3213        let supertype = RefType::ANYREF.shared().unwrap();
3214        if !self.resources.is_subtype(ty.into(), supertype.into()) {
3215            bail!(
3216                self.offset,
3217                "invalid type: `table.atomic.set` only allows subtypes of `anyref`"
3218            );
3219        }
3220        Ok(())
3221    }
3222    fn visit_table_grow(&mut self, table: u32) -> Self::Output {
3223        let table = self.table_type_at(table)?;
3224        debug_assert_type_indices_are_ids(table.element_type.into());
3225        self.pop_operand(Some(table.index_type()))?;
3226        self.pop_operand(Some(table.element_type.into()))?;
3227        self.push_operand(table.index_type())?;
3228        Ok(())
3229    }
3230    fn visit_table_size(&mut self, table: u32) -> Self::Output {
3231        let table = self.table_type_at(table)?;
3232        self.push_operand(table.index_type())?;
3233        Ok(())
3234    }
3235    fn visit_table_fill(&mut self, table: u32) -> Self::Output {
3236        let table = self.table_type_at(table)?;
3237        debug_assert_type_indices_are_ids(table.element_type.into());
3238        self.pop_operand(Some(table.index_type()))?;
3239        self.pop_operand(Some(table.element_type.into()))?;
3240        self.pop_operand(Some(table.index_type()))?;
3241        Ok(())
3242    }
3243    fn visit_table_atomic_rmw_xchg(&mut self, _ordering: Ordering, table: u32) -> Self::Output {
3244        let table = self.table_type_at(table)?;
3245        let elem_ty = table.element_type.into();
3246        debug_assert_type_indices_are_ids(elem_ty);
3247        let supertype = RefType::ANYREF.shared().unwrap();
3248        if !self.resources.is_subtype(elem_ty, supertype.into()) {
3249            bail!(
3250                self.offset,
3251                "invalid type: `table.atomic.rmw.xchg` only allows subtypes of `anyref`"
3252            );
3253        }
3254        self.pop_operand(Some(elem_ty))?;
3255        self.pop_operand(Some(table.index_type()))?;
3256        self.push_operand(elem_ty)?;
3257        Ok(())
3258    }
3259    fn visit_table_atomic_rmw_cmpxchg(&mut self, _ordering: Ordering, table: u32) -> Self::Output {
3260        let table = self.table_type_at(table)?;
3261        let elem_ty = table.element_type.into();
3262        debug_assert_type_indices_are_ids(elem_ty);
3263        let supertype = RefType::EQREF.shared().unwrap();
3264        if !self.resources.is_subtype(elem_ty, supertype.into()) {
3265            bail!(
3266                self.offset,
3267                "invalid type: `table.atomic.rmw.cmpxchg` only allows subtypes of `eqref`"
3268            );
3269        }
3270        self.pop_operand(Some(elem_ty))?;
3271        self.pop_operand(Some(elem_ty))?;
3272        self.pop_operand(Some(table.index_type()))?;
3273        self.push_operand(elem_ty)?;
3274        Ok(())
3275    }
3276    fn visit_struct_new(&mut self, struct_type_index: u32) -> Self::Output {
3277        let struct_ty = self.struct_type_at(struct_type_index)?;
3278        for ty in struct_ty.fields.iter().rev() {
3279            self.pop_operand(Some(ty.element_type.unpack()))?;
3280        }
3281        self.push_concrete_ref(false, struct_type_index)?;
3282        Ok(())
3283    }
3284    fn visit_struct_new_default(&mut self, type_index: u32) -> Self::Output {
3285        let ty = self.struct_type_at(type_index)?;
3286        for field in ty.fields.iter() {
3287            let val_ty = field.element_type.unpack();
3288            if !val_ty.is_defaultable() {
3289                bail!(
3290                    self.offset,
3291                    "invalid `struct.new_default`: {val_ty} field is not defaultable"
3292                );
3293            }
3294        }
3295        self.push_concrete_ref(false, type_index)?;
3296        Ok(())
3297    }
3298    fn visit_struct_get(&mut self, struct_type_index: u32, field_index: u32) -> Self::Output {
3299        let field_ty = self.struct_field_at(struct_type_index, field_index)?;
3300        if field_ty.element_type.is_packed() {
3301            bail!(
3302                self.offset,
3303                "can only use struct `get` with non-packed storage types"
3304            )
3305        }
3306        self.pop_concrete_ref(true, struct_type_index)?;
3307        self.push_operand(field_ty.element_type.unpack())
3308    }
3309    fn visit_struct_atomic_get(
3310        &mut self,
3311        _ordering: Ordering,
3312        struct_type_index: u32,
3313        field_index: u32,
3314    ) -> Self::Output {
3315        self.visit_struct_get(struct_type_index, field_index)?;
3316        // The `atomic` version has some additional type restrictions.
3317        let ty = self
3318            .struct_field_at(struct_type_index, field_index)?
3319            .element_type;
3320        let is_valid_type = match ty {
3321            StorageType::Val(ValType::I32) | StorageType::Val(ValType::I64) => true,
3322            StorageType::Val(v) => self
3323                .resources
3324                .is_subtype(v, RefType::ANYREF.shared().unwrap().into()),
3325            _ => false,
3326        };
3327        if !is_valid_type {
3328            bail!(
3329                self.offset,
3330                "invalid type: `struct.atomic.get` only allows `i32`, `i64` and subtypes of `anyref`"
3331            );
3332        }
3333        Ok(())
3334    }
3335    fn visit_struct_get_s(&mut self, struct_type_index: u32, field_index: u32) -> Self::Output {
3336        let field_ty = self.struct_field_at(struct_type_index, field_index)?;
3337        if !field_ty.element_type.is_packed() {
3338            bail!(
3339                self.offset,
3340                "cannot use struct.get_s with non-packed storage types"
3341            )
3342        }
3343        self.pop_concrete_ref(true, struct_type_index)?;
3344        self.push_operand(field_ty.element_type.unpack())
3345    }
3346    fn visit_struct_atomic_get_s(
3347        &mut self,
3348        _ordering: Ordering,
3349        struct_type_index: u32,
3350        field_index: u32,
3351    ) -> Self::Output {
3352        self.visit_struct_get_s(struct_type_index, field_index)?;
3353        // This instruction has the same type restrictions as the non-`atomic` version.
3354        debug_assert!(matches!(
3355            self.struct_field_at(struct_type_index, field_index)?
3356                .element_type,
3357            StorageType::I8 | StorageType::I16
3358        ));
3359        Ok(())
3360    }
3361    fn visit_struct_get_u(&mut self, struct_type_index: u32, field_index: u32) -> Self::Output {
3362        let field_ty = self.struct_field_at(struct_type_index, field_index)?;
3363        if !field_ty.element_type.is_packed() {
3364            bail!(
3365                self.offset,
3366                "cannot use struct.get_u with non-packed storage types"
3367            )
3368        }
3369        self.pop_concrete_ref(true, struct_type_index)?;
3370        self.push_operand(field_ty.element_type.unpack())
3371    }
3372    fn visit_struct_atomic_get_u(
3373        &mut self,
3374        _ordering: Ordering,
3375        struct_type_index: u32,
3376        field_index: u32,
3377    ) -> Self::Output {
3378        self.visit_struct_get_s(struct_type_index, field_index)?;
3379        // This instruction has the same type restrictions as the non-`atomic` version.
3380        debug_assert!(matches!(
3381            self.struct_field_at(struct_type_index, field_index)?
3382                .element_type,
3383            StorageType::I8 | StorageType::I16
3384        ));
3385        Ok(())
3386    }
3387    fn visit_struct_set(&mut self, struct_type_index: u32, field_index: u32) -> Self::Output {
3388        let field_ty = self.mutable_struct_field_at(struct_type_index, field_index)?;
3389        self.pop_operand(Some(field_ty.element_type.unpack()))?;
3390        self.pop_concrete_ref(true, struct_type_index)?;
3391        Ok(())
3392    }
3393    fn visit_struct_atomic_set(
3394        &mut self,
3395        _ordering: Ordering,
3396        struct_type_index: u32,
3397        field_index: u32,
3398    ) -> Self::Output {
3399        self.visit_struct_set(struct_type_index, field_index)?;
3400        // The `atomic` version has some additional type restrictions.
3401        let ty = self
3402            .struct_field_at(struct_type_index, field_index)?
3403            .element_type;
3404        let is_valid_type = match ty {
3405            StorageType::I8 | StorageType::I16 => true,
3406            StorageType::Val(ValType::I32) | StorageType::Val(ValType::I64) => true,
3407            StorageType::Val(v) => self
3408                .resources
3409                .is_subtype(v, RefType::ANYREF.shared().unwrap().into()),
3410        };
3411        if !is_valid_type {
3412            bail!(
3413                self.offset,
3414                "invalid type: `struct.atomic.set` only allows `i8`, `i16`, `i32`, `i64` and subtypes of `anyref`"
3415            );
3416        }
3417        Ok(())
3418    }
3419    fn visit_struct_atomic_rmw_add(
3420        &mut self,
3421        _ordering: Ordering,
3422        struct_type_index: u32,
3423        field_index: u32,
3424    ) -> Self::Output {
3425        self.check_struct_atomic_rmw("add", struct_type_index, field_index)
3426    }
3427    fn visit_struct_atomic_rmw_sub(
3428        &mut self,
3429        _ordering: Ordering,
3430        struct_type_index: u32,
3431        field_index: u32,
3432    ) -> Self::Output {
3433        self.check_struct_atomic_rmw("sub", struct_type_index, field_index)
3434    }
3435    fn visit_struct_atomic_rmw_and(
3436        &mut self,
3437        _ordering: Ordering,
3438        struct_type_index: u32,
3439        field_index: u32,
3440    ) -> Self::Output {
3441        self.check_struct_atomic_rmw("and", struct_type_index, field_index)
3442    }
3443    fn visit_struct_atomic_rmw_or(
3444        &mut self,
3445        _ordering: Ordering,
3446        struct_type_index: u32,
3447        field_index: u32,
3448    ) -> Self::Output {
3449        self.check_struct_atomic_rmw("or", struct_type_index, field_index)
3450    }
3451    fn visit_struct_atomic_rmw_xor(
3452        &mut self,
3453        _ordering: Ordering,
3454        struct_type_index: u32,
3455        field_index: u32,
3456    ) -> Self::Output {
3457        self.check_struct_atomic_rmw("xor", struct_type_index, field_index)
3458    }
3459    fn visit_struct_atomic_rmw_xchg(
3460        &mut self,
3461        _ordering: Ordering,
3462        struct_type_index: u32,
3463        field_index: u32,
3464    ) -> Self::Output {
3465        let field = self.mutable_struct_field_at(struct_type_index, field_index)?;
3466        let is_valid_type = match field.element_type {
3467            StorageType::Val(ValType::I32) | StorageType::Val(ValType::I64) => true,
3468            StorageType::Val(v) => self
3469                .resources
3470                .is_subtype(v, RefType::ANYREF.shared().unwrap().into()),
3471            _ => false,
3472        };
3473        if !is_valid_type {
3474            bail!(
3475                self.offset,
3476                "invalid type: `struct.atomic.rmw.xchg` only allows `i32`, `i64` and subtypes of `anyref`"
3477            );
3478        }
3479        let field_ty = field.element_type.unpack();
3480        self.pop_operand(Some(field_ty))?;
3481        self.pop_concrete_ref(true, struct_type_index)?;
3482        self.push_operand(field_ty)?;
3483        Ok(())
3484    }
3485    fn visit_struct_atomic_rmw_cmpxchg(
3486        &mut self,
3487        _ordering: Ordering,
3488        struct_type_index: u32,
3489        field_index: u32,
3490    ) -> Self::Output {
3491        let field = self.mutable_struct_field_at(struct_type_index, field_index)?;
3492        let is_valid_type = match field.element_type {
3493            StorageType::Val(ValType::I32) | StorageType::Val(ValType::I64) => true,
3494            StorageType::Val(v) => self
3495                .resources
3496                .is_subtype(v, RefType::EQREF.shared().unwrap().into()),
3497            _ => false,
3498        };
3499        if !is_valid_type {
3500            bail!(
3501                self.offset,
3502                "invalid type: `struct.atomic.rmw.cmpxchg` only allows `i32`, `i64` and subtypes of `eqref`"
3503            );
3504        }
3505        let field_ty = field.element_type.unpack();
3506        self.pop_operand(Some(field_ty))?;
3507        self.pop_operand(Some(field_ty))?;
3508        self.pop_concrete_ref(true, struct_type_index)?;
3509        self.push_operand(field_ty)?;
3510        Ok(())
3511    }
3512    fn visit_array_new(&mut self, type_index: u32) -> Self::Output {
3513        let array_ty = self.array_type_at(type_index)?;
3514        self.pop_operand(Some(ValType::I32))?;
3515        self.pop_operand(Some(array_ty.element_type.unpack()))?;
3516        self.push_concrete_ref(false, type_index)
3517    }
3518    fn visit_array_new_default(&mut self, type_index: u32) -> Self::Output {
3519        let ty = self.array_type_at(type_index)?;
3520        let val_ty = ty.element_type.unpack();
3521        if !val_ty.is_defaultable() {
3522            bail!(
3523                self.offset,
3524                "invalid `array.new_default`: {val_ty} field is not defaultable"
3525            );
3526        }
3527        self.pop_operand(Some(ValType::I32))?;
3528        self.push_concrete_ref(false, type_index)
3529    }
3530    fn visit_array_new_fixed(&mut self, type_index: u32, n: u32) -> Self::Output {
3531        let array_ty = self.array_type_at(type_index)?;
3532        let elem_ty = array_ty.element_type.unpack();
3533        for _ in 0..n {
3534            self.pop_operand(Some(elem_ty))?;
3535        }
3536        self.push_concrete_ref(false, type_index)
3537    }
3538    fn visit_array_new_data(&mut self, type_index: u32, data_index: u32) -> Self::Output {
3539        let array_ty = self.array_type_at(type_index)?;
3540        let elem_ty = array_ty.element_type.unpack();
3541        match elem_ty {
3542            ValType::I32 | ValType::I64 | ValType::F32 | ValType::F64 | ValType::V128 => {}
3543            ValType::Ref(_) => bail!(
3544                self.offset,
3545                "type mismatch: array.new_data can only create arrays with numeric and vector elements"
3546            ),
3547        }
3548        self.check_data_segment(data_index)?;
3549        self.pop_operand(Some(ValType::I32))?;
3550        self.pop_operand(Some(ValType::I32))?;
3551        self.push_concrete_ref(false, type_index)
3552    }
3553    fn visit_array_new_elem(&mut self, type_index: u32, elem_index: u32) -> Self::Output {
3554        let array_ty = self.array_type_at(type_index)?;
3555        let array_ref_ty = match array_ty.element_type.unpack() {
3556            ValType::Ref(rt) => rt,
3557            ValType::I32 | ValType::I64 | ValType::F32 | ValType::F64 | ValType::V128 => bail!(
3558                self.offset,
3559                "type mismatch: array.new_elem can only create arrays with reference elements"
3560            ),
3561        };
3562        let elem_ref_ty = self.element_type_at(elem_index)?;
3563        if !self
3564            .resources
3565            .is_subtype(elem_ref_ty.into(), array_ref_ty.into())
3566        {
3567            bail!(
3568                self.offset,
3569                "invalid array.new_elem instruction: element segment {elem_index} type mismatch: \
3570                 expected {array_ref_ty}, found {elem_ref_ty}"
3571            )
3572        }
3573        self.pop_operand(Some(ValType::I32))?;
3574        self.pop_operand(Some(ValType::I32))?;
3575        self.push_concrete_ref(false, type_index)
3576    }
3577    fn visit_array_get(&mut self, type_index: u32) -> Self::Output {
3578        let array_ty = self.array_type_at(type_index)?;
3579        let elem_ty = array_ty.element_type;
3580        if elem_ty.is_packed() {
3581            bail!(
3582                self.offset,
3583                "cannot use array.get with packed storage types"
3584            )
3585        }
3586        self.pop_operand(Some(ValType::I32))?;
3587        self.pop_concrete_ref(true, type_index)?;
3588        self.push_operand(elem_ty.unpack())
3589    }
3590    fn visit_array_atomic_get(&mut self, _ordering: Ordering, type_index: u32) -> Self::Output {
3591        self.visit_array_get(type_index)?;
3592        // The `atomic` version has some additional type restrictions.
3593        let elem_ty = self.array_type_at(type_index)?.element_type;
3594        let is_valid_type = match elem_ty {
3595            StorageType::Val(ValType::I32) | StorageType::Val(ValType::I64) => true,
3596            StorageType::Val(v) => self
3597                .resources
3598                .is_subtype(v, RefType::ANYREF.shared().unwrap().into()),
3599            _ => false,
3600        };
3601        if !is_valid_type {
3602            bail!(
3603                self.offset,
3604                "invalid type: `array.atomic.get` only allows `i32`, `i64` and subtypes of `anyref`"
3605            );
3606        }
3607        Ok(())
3608    }
3609    fn visit_array_get_s(&mut self, type_index: u32) -> Self::Output {
3610        let array_ty = self.array_type_at(type_index)?;
3611        let elem_ty = array_ty.element_type;
3612        if !elem_ty.is_packed() {
3613            bail!(
3614                self.offset,
3615                "cannot use array.get_s with non-packed storage types"
3616            )
3617        }
3618        self.pop_operand(Some(ValType::I32))?;
3619        self.pop_concrete_ref(true, type_index)?;
3620        self.push_operand(elem_ty.unpack())
3621    }
3622    fn visit_array_atomic_get_s(&mut self, _ordering: Ordering, type_index: u32) -> Self::Output {
3623        self.visit_array_get_s(type_index)?;
3624        // This instruction has the same type restrictions as the non-`atomic` version.
3625        debug_assert!(matches!(
3626            self.array_type_at(type_index)?.element_type,
3627            StorageType::I8 | StorageType::I16
3628        ));
3629        Ok(())
3630    }
3631    fn visit_array_get_u(&mut self, type_index: u32) -> Self::Output {
3632        let array_ty = self.array_type_at(type_index)?;
3633        let elem_ty = array_ty.element_type;
3634        if !elem_ty.is_packed() {
3635            bail!(
3636                self.offset,
3637                "cannot use array.get_u with non-packed storage types"
3638            )
3639        }
3640        self.pop_operand(Some(ValType::I32))?;
3641        self.pop_concrete_ref(true, type_index)?;
3642        self.push_operand(elem_ty.unpack())
3643    }
3644    fn visit_array_atomic_get_u(&mut self, _ordering: Ordering, type_index: u32) -> Self::Output {
3645        self.visit_array_get_u(type_index)?;
3646        // This instruction has the same type restrictions as the non-`atomic` version.
3647        debug_assert!(matches!(
3648            self.array_type_at(type_index)?.element_type,
3649            StorageType::I8 | StorageType::I16
3650        ));
3651        Ok(())
3652    }
3653    fn visit_array_set(&mut self, type_index: u32) -> Self::Output {
3654        let array_ty = self.mutable_array_type_at(type_index)?;
3655        self.pop_operand(Some(array_ty.element_type.unpack()))?;
3656        self.pop_operand(Some(ValType::I32))?;
3657        self.pop_concrete_ref(true, type_index)?;
3658        Ok(())
3659    }
3660    fn visit_array_atomic_set(&mut self, _ordering: Ordering, type_index: u32) -> Self::Output {
3661        self.visit_array_set(type_index)?;
3662        // The `atomic` version has some additional type restrictions.
3663        let elem_ty = self.array_type_at(type_index)?.element_type;
3664        let is_valid_type = match elem_ty {
3665            StorageType::I8 | StorageType::I16 => true,
3666            StorageType::Val(ValType::I32) | StorageType::Val(ValType::I64) => true,
3667            StorageType::Val(v) => self
3668                .resources
3669                .is_subtype(v, RefType::ANYREF.shared().unwrap().into()),
3670        };
3671        if !is_valid_type {
3672            bail!(
3673                self.offset,
3674                "invalid type: `array.atomic.set` only allows `i8`, `i16`, `i32`, `i64` and subtypes of `anyref`"
3675            );
3676        }
3677        Ok(())
3678    }
3679    fn visit_array_len(&mut self) -> Self::Output {
3680        self.pop_maybe_shared_ref(AbstractHeapType::Array)?;
3681        self.push_operand(ValType::I32)
3682    }
3683    fn visit_array_fill(&mut self, array_type_index: u32) -> Self::Output {
3684        let array_ty = self.mutable_array_type_at(array_type_index)?;
3685        self.pop_operand(Some(ValType::I32))?;
3686        self.pop_operand(Some(array_ty.element_type.unpack()))?;
3687        self.pop_operand(Some(ValType::I32))?;
3688        self.pop_concrete_ref(true, array_type_index)?;
3689        Ok(())
3690    }
3691    fn visit_array_copy(&mut self, type_index_dst: u32, type_index_src: u32) -> Self::Output {
3692        let array_ty_dst = self.mutable_array_type_at(type_index_dst)?;
3693        let array_ty_src = self.array_type_at(type_index_src)?;
3694        match (array_ty_dst.element_type, array_ty_src.element_type) {
3695            (StorageType::I8, StorageType::I8) => {}
3696            (StorageType::I8, ty) => bail!(
3697                self.offset,
3698                "array types do not match: expected i8, found {ty}"
3699            ),
3700            (StorageType::I16, StorageType::I16) => {}
3701            (StorageType::I16, ty) => bail!(
3702                self.offset,
3703                "array types do not match: expected i16, found {ty}"
3704            ),
3705            (StorageType::Val(dst), StorageType::Val(src)) => {
3706                if !self.resources.is_subtype(src, dst) {
3707                    bail!(
3708                        self.offset,
3709                        "array types do not match: expected {dst}, found {src}"
3710                    )
3711                }
3712            }
3713            (StorageType::Val(dst), src) => {
3714                bail!(
3715                    self.offset,
3716                    "array types do not match: expected {dst}, found {src}"
3717                )
3718            }
3719        }
3720        self.pop_operand(Some(ValType::I32))?;
3721        self.pop_operand(Some(ValType::I32))?;
3722        self.pop_concrete_ref(true, type_index_src)?;
3723        self.pop_operand(Some(ValType::I32))?;
3724        self.pop_concrete_ref(true, type_index_dst)?;
3725        Ok(())
3726    }
3727    fn visit_array_init_data(
3728        &mut self,
3729        array_type_index: u32,
3730        array_data_index: u32,
3731    ) -> Self::Output {
3732        let array_ty = self.mutable_array_type_at(array_type_index)?;
3733        let val_ty = array_ty.element_type.unpack();
3734        match val_ty {
3735            ValType::I32 | ValType::I64 | ValType::F32 | ValType::F64 | ValType::V128 => {}
3736            ValType::Ref(_) => bail!(
3737                self.offset,
3738                "invalid array.init_data: array type is not numeric or vector"
3739            ),
3740        }
3741        self.check_data_segment(array_data_index)?;
3742        self.pop_operand(Some(ValType::I32))?;
3743        self.pop_operand(Some(ValType::I32))?;
3744        self.pop_operand(Some(ValType::I32))?;
3745        self.pop_concrete_ref(true, array_type_index)?;
3746        Ok(())
3747    }
3748    fn visit_array_init_elem(&mut self, type_index: u32, elem_index: u32) -> Self::Output {
3749        let array_ty = self.mutable_array_type_at(type_index)?;
3750        let array_ref_ty = match array_ty.element_type.unpack() {
3751            ValType::Ref(rt) => rt,
3752            ValType::I32 | ValType::I64 | ValType::F32 | ValType::F64 | ValType::V128 => bail!(
3753                self.offset,
3754                "type mismatch: array.init_elem can only create arrays with reference elements"
3755            ),
3756        };
3757        let elem_ref_ty = self.element_type_at(elem_index)?;
3758        if !self
3759            .resources
3760            .is_subtype(elem_ref_ty.into(), array_ref_ty.into())
3761        {
3762            bail!(
3763                self.offset,
3764                "invalid array.init_elem instruction: element segment {elem_index} type mismatch: \
3765                 expected {array_ref_ty}, found {elem_ref_ty}"
3766            )
3767        }
3768        self.pop_operand(Some(ValType::I32))?;
3769        self.pop_operand(Some(ValType::I32))?;
3770        self.pop_operand(Some(ValType::I32))?;
3771        self.pop_concrete_ref(true, type_index)?;
3772        Ok(())
3773    }
3774    fn visit_array_atomic_rmw_add(&mut self, _ordering: Ordering, type_index: u32) -> Self::Output {
3775        self.check_array_atomic_rmw("add", type_index)
3776    }
3777    fn visit_array_atomic_rmw_sub(&mut self, _ordering: Ordering, type_index: u32) -> Self::Output {
3778        self.check_array_atomic_rmw("sub", type_index)
3779    }
3780    fn visit_array_atomic_rmw_and(&mut self, _ordering: Ordering, type_index: u32) -> Self::Output {
3781        self.check_array_atomic_rmw("and", type_index)
3782    }
3783    fn visit_array_atomic_rmw_or(&mut self, _ordering: Ordering, type_index: u32) -> Self::Output {
3784        self.check_array_atomic_rmw("or", type_index)
3785    }
3786    fn visit_array_atomic_rmw_xor(&mut self, _ordering: Ordering, type_index: u32) -> Self::Output {
3787        self.check_array_atomic_rmw("xor", type_index)
3788    }
3789    fn visit_array_atomic_rmw_xchg(
3790        &mut self,
3791        _ordering: Ordering,
3792        type_index: u32,
3793    ) -> Self::Output {
3794        let field = self.mutable_array_type_at(type_index)?;
3795        let is_valid_type = match field.element_type {
3796            StorageType::Val(ValType::I32) | StorageType::Val(ValType::I64) => true,
3797            StorageType::Val(v) => self
3798                .resources
3799                .is_subtype(v, RefType::ANYREF.shared().unwrap().into()),
3800            _ => false,
3801        };
3802        if !is_valid_type {
3803            bail!(
3804                self.offset,
3805                "invalid type: `array.atomic.rmw.xchg` only allows `i32`, `i64` and subtypes of `anyref`"
3806            );
3807        }
3808        let elem_ty = field.element_type.unpack();
3809        self.pop_operand(Some(elem_ty))?;
3810        self.pop_operand(Some(ValType::I32))?;
3811        self.pop_concrete_ref(true, type_index)?;
3812        self.push_operand(elem_ty)?;
3813        Ok(())
3814    }
3815    fn visit_array_atomic_rmw_cmpxchg(
3816        &mut self,
3817        _ordering: Ordering,
3818        type_index: u32,
3819    ) -> Self::Output {
3820        let field = self.mutable_array_type_at(type_index)?;
3821        let is_valid_type = match field.element_type {
3822            StorageType::Val(ValType::I32) | StorageType::Val(ValType::I64) => true,
3823            StorageType::Val(v) => self
3824                .resources
3825                .is_subtype(v, RefType::EQREF.shared().unwrap().into()),
3826            _ => false,
3827        };
3828        if !is_valid_type {
3829            bail!(
3830                self.offset,
3831                "invalid type: `array.atomic.rmw.cmpxchg` only allows `i32`, `i64` and subtypes of `eqref`"
3832            );
3833        }
3834        let elem_ty = field.element_type.unpack();
3835        self.pop_operand(Some(elem_ty))?;
3836        self.pop_operand(Some(elem_ty))?;
3837        self.pop_operand(Some(ValType::I32))?;
3838        self.pop_concrete_ref(true, type_index)?;
3839        self.push_operand(elem_ty)?;
3840        Ok(())
3841    }
3842    fn visit_any_convert_extern(&mut self) -> Self::Output {
3843        let any_ref = match self.pop_maybe_shared_ref(AbstractHeapType::Extern)? {
3844            MaybeType::Bottom | MaybeType::UnknownRef(_) => {
3845                MaybeType::UnknownRef(Some(AbstractHeapType::Any))
3846            }
3847            MaybeType::Known(ty) => {
3848                let shared = self.resources.is_shared(ty);
3849                let heap_type = HeapType::Abstract {
3850                    shared,
3851                    ty: AbstractHeapType::Any,
3852                };
3853                let any_ref = RefType::new(ty.is_nullable(), heap_type).unwrap();
3854                MaybeType::Known(any_ref)
3855            }
3856        };
3857        self.push_operand(any_ref)
3858    }
3859    fn visit_extern_convert_any(&mut self) -> Self::Output {
3860        let extern_ref = match self.pop_maybe_shared_ref(AbstractHeapType::Any)? {
3861            MaybeType::Bottom | MaybeType::UnknownRef(_) => {
3862                MaybeType::UnknownRef(Some(AbstractHeapType::Extern))
3863            }
3864            MaybeType::Known(ty) => {
3865                let shared = self.resources.is_shared(ty);
3866                let heap_type = HeapType::Abstract {
3867                    shared,
3868                    ty: AbstractHeapType::Extern,
3869                };
3870                let extern_ref = RefType::new(ty.is_nullable(), heap_type).unwrap();
3871                MaybeType::Known(extern_ref)
3872            }
3873        };
3874        self.push_operand(extern_ref)
3875    }
3876    fn visit_ref_test_non_null(&mut self, heap_type: HeapType) -> Self::Output {
3877        self.check_ref_test(false, heap_type)
3878    }
3879    fn visit_ref_test_nullable(&mut self, heap_type: HeapType) -> Self::Output {
3880        self.check_ref_test(true, heap_type)
3881    }
3882    fn visit_ref_cast_non_null(&mut self, heap_type: HeapType) -> Self::Output {
3883        self.check_ref_cast(false, heap_type)
3884    }
3885    fn visit_ref_cast_nullable(&mut self, heap_type: HeapType) -> Self::Output {
3886        self.check_ref_cast(true, heap_type)
3887    }
3888    fn visit_br_on_cast(
3889        &mut self,
3890        relative_depth: u32,
3891        mut from_ref_type: RefType,
3892        mut to_ref_type: RefType,
3893    ) -> Self::Output {
3894        self.resources
3895            .check_ref_type(&mut from_ref_type, self.offset)?;
3896        self.resources
3897            .check_ref_type(&mut to_ref_type, self.offset)?;
3898
3899        if !self
3900            .resources
3901            .is_subtype(to_ref_type.into(), from_ref_type.into())
3902        {
3903            bail!(
3904                self.offset,
3905                "type mismatch: expected {from_ref_type}, found {to_ref_type}"
3906            );
3907        }
3908
3909        let (block_ty, frame_kind) = self.jump(relative_depth)?;
3910        let mut label_types = self.label_types(block_ty, frame_kind)?;
3911
3912        match label_types.next_back() {
3913            Some(label_ty) if self.resources.is_subtype(to_ref_type.into(), label_ty) => {
3914                self.pop_operand(Some(from_ref_type.into()))?;
3915            }
3916            Some(label_ty) => bail!(
3917                self.offset,
3918                "type mismatch: casting to type {to_ref_type}, but it does not match \
3919                 label result type {label_ty}"
3920            ),
3921            None => bail!(
3922                self.offset,
3923                "type mismatch: br_on_cast to label with empty types, must have a reference type"
3924            ),
3925        };
3926
3927        self.pop_push_label_types(label_types)?;
3928        let diff_ty = RefType::difference(from_ref_type, to_ref_type);
3929        self.push_operand(diff_ty)?;
3930        Ok(())
3931    }
3932    fn visit_br_on_cast_fail(
3933        &mut self,
3934        relative_depth: u32,
3935        mut from_ref_type: RefType,
3936        mut to_ref_type: RefType,
3937    ) -> Self::Output {
3938        self.resources
3939            .check_ref_type(&mut from_ref_type, self.offset)?;
3940        self.resources
3941            .check_ref_type(&mut to_ref_type, self.offset)?;
3942
3943        if !self
3944            .resources
3945            .is_subtype(to_ref_type.into(), from_ref_type.into())
3946        {
3947            bail!(
3948                self.offset,
3949                "type mismatch: expected {from_ref_type}, found {to_ref_type}"
3950            );
3951        }
3952
3953        let (block_ty, frame_kind) = self.jump(relative_depth)?;
3954        let mut label_tys = self.label_types(block_ty, frame_kind)?;
3955
3956        let diff_ty = RefType::difference(from_ref_type, to_ref_type);
3957        match label_tys.next_back() {
3958            Some(label_ty) if self.resources.is_subtype(diff_ty.into(), label_ty) => {
3959                self.pop_operand(Some(from_ref_type.into()))?;
3960            }
3961            Some(label_ty) => bail!(
3962                self.offset,
3963                "type mismatch: expected label result type {label_ty}, found {diff_ty}"
3964            ),
3965            None => bail!(
3966                self.offset,
3967                "type mismatch: expected a reference type, found nothing"
3968            ),
3969        }
3970
3971        self.pop_push_label_types(label_tys)?;
3972        self.push_operand(to_ref_type)?;
3973        Ok(())
3974    }
3975    fn visit_ref_i31(&mut self) -> Self::Output {
3976        self.pop_operand(Some(ValType::I32))?;
3977        self.push_operand(ValType::Ref(RefType::I31))
3978    }
3979    fn visit_ref_i31_shared(&mut self) -> Self::Output {
3980        self.pop_operand(Some(ValType::I32))?;
3981        self.push_operand(ValType::Ref(
3982            RefType::I31.shared().expect("i31 is abstract"),
3983        ))
3984    }
3985    fn visit_i31_get_s(&mut self) -> Self::Output {
3986        self.pop_maybe_shared_ref(AbstractHeapType::I31)?;
3987        self.push_operand(ValType::I32)
3988    }
3989    fn visit_i31_get_u(&mut self) -> Self::Output {
3990        self.pop_maybe_shared_ref(AbstractHeapType::I31)?;
3991        self.push_operand(ValType::I32)
3992    }
3993    fn visit_try(&mut self, mut ty: BlockType) -> Self::Output {
3994        self.check_block_type(&mut ty)?;
3995        for ty in self.params(ty)?.rev() {
3996            self.pop_operand(Some(ty))?;
3997        }
3998        self.push_ctrl(FrameKind::LegacyTry, ty)?;
3999        Ok(())
4000    }
4001    fn visit_catch(&mut self, index: u32) -> Self::Output {
4002        let frame = self.pop_ctrl()?;
4003        if frame.kind != FrameKind::LegacyTry && frame.kind != FrameKind::LegacyCatch {
4004            bail!(self.offset, "catch found outside of an `try` block");
4005        }
4006        // Start a new frame and push `exnref` value.
4007        let height = self.operands.len();
4008        let init_height = self.local_inits.push_ctrl();
4009        self.control.push(Frame {
4010            kind: FrameKind::LegacyCatch,
4011            block_type: frame.block_type,
4012            height,
4013            unreachable: false,
4014            init_height,
4015        });
4016        // Push exception argument types.
4017        let ty = self.exception_tag_at(index)?;
4018        for ty in ty.params() {
4019            self.push_operand(*ty)?;
4020        }
4021        Ok(())
4022    }
4023    fn visit_rethrow(&mut self, relative_depth: u32) -> Self::Output {
4024        // This is not a jump, but we need to check that the `rethrow`
4025        // targets an actual `catch` to get the exception.
4026        let (_, kind) = self.jump(relative_depth)?;
4027        if kind != FrameKind::LegacyCatch && kind != FrameKind::LegacyCatchAll {
4028            bail!(
4029                self.offset,
4030                "invalid rethrow label: target was not a `catch` block"
4031            );
4032        }
4033        self.unreachable()?;
4034        Ok(())
4035    }
4036    fn visit_delegate(&mut self, relative_depth: u32) -> Self::Output {
4037        let frame = self.pop_ctrl()?;
4038        if frame.kind != FrameKind::LegacyTry {
4039            bail!(self.offset, "delegate found outside of an `try` block");
4040        }
4041        // This operation is not a jump, but we need to check the
4042        // depth for validity
4043        let _ = self.jump(relative_depth)?;
4044        for ty in self.results(frame.block_type)? {
4045            self.push_operand(ty)?;
4046        }
4047        Ok(())
4048    }
4049    fn visit_catch_all(&mut self) -> Self::Output {
4050        let frame = self.pop_ctrl()?;
4051        if frame.kind == FrameKind::LegacyCatchAll {
4052            bail!(self.offset, "only one catch_all allowed per `try` block");
4053        } else if frame.kind != FrameKind::LegacyTry && frame.kind != FrameKind::LegacyCatch {
4054            bail!(self.offset, "catch_all found outside of a `try` block");
4055        }
4056        let height = self.operands.len();
4057        let init_height = self.local_inits.push_ctrl();
4058        self.control.push(Frame {
4059            kind: FrameKind::LegacyCatchAll,
4060            block_type: frame.block_type,
4061            height,
4062            unreachable: false,
4063            init_height,
4064        });
4065        Ok(())
4066    }
4067    fn visit_cont_new(&mut self, type_index: u32) -> Self::Output {
4068        let cont_ty = self.cont_type_at(type_index)?;
4069        let rt = RefType::concrete(true, cont_ty.0);
4070        self.pop_ref(Some(rt))?;
4071        self.push_concrete_ref(false, type_index)?;
4072        Ok(())
4073    }
4074    fn visit_cont_bind(&mut self, argument_index: u32, result_index: u32) -> Self::Output {
4075        // [ts1 ts1'] -> [ts2]
4076        let arg_cont = self.cont_type_at(argument_index)?;
4077        let arg_func = self.func_type_of_cont_type(arg_cont);
4078        // [ts1''] -> [ts2']
4079        let res_cont = self.cont_type_at(result_index)?;
4080        let res_func = self.func_type_of_cont_type(res_cont);
4081
4082        // Verify that the argument's domain is at least as large as the
4083        // result's domain.
4084        if arg_func.params().len() < res_func.params().len() {
4085            bail!(self.offset, "type mismatch in continuation arguments");
4086        }
4087
4088        let argcnt = arg_func.params().len() - res_func.params().len();
4089
4090        // Check that [ts1'] -> [ts2] <: [ts1''] -> [ts2']
4091        if !self.is_subtype_many(res_func.params(), &arg_func.params()[argcnt..])
4092            || arg_func.results().len() != res_func.results().len()
4093            || !self.is_subtype_many(arg_func.results(), res_func.results())
4094        {
4095            bail!(self.offset, "type mismatch in continuation types");
4096        }
4097
4098        // Check that the continuation is available on the stack.
4099        self.pop_concrete_ref(true, argument_index)?;
4100
4101        // Check that the argument prefix is available on the stack.
4102        for &ty in arg_func.params().iter().take(argcnt).rev() {
4103            self.pop_operand(Some(ty))?;
4104        }
4105
4106        // Construct the result type.
4107        self.push_concrete_ref(false, result_index)?;
4108
4109        Ok(())
4110    }
4111    fn visit_suspend(&mut self, tag_index: u32) -> Self::Output {
4112        let ft = &self.tag_at(tag_index)?;
4113        for &ty in ft.params().iter().rev() {
4114            self.pop_operand(Some(ty))?;
4115        }
4116        for &ty in ft.results() {
4117            self.push_operand(ty)?;
4118        }
4119        Ok(())
4120    }
4121    fn visit_resume(&mut self, type_index: u32, table: ResumeTable) -> Self::Output {
4122        // [ts1] -> [ts2]
4123        let ft = self.check_resume_table(table, type_index)?;
4124        self.pop_concrete_ref(true, type_index)?;
4125        // Check that ts1 are available on the stack.
4126        for &ty in ft.params().iter().rev() {
4127            self.pop_operand(Some(ty))?;
4128        }
4129
4130        // Make ts2 available on the stack.
4131        for &ty in ft.results() {
4132            self.push_operand(ty)?;
4133        }
4134        Ok(())
4135    }
4136    fn visit_resume_throw(
4137        &mut self,
4138        type_index: u32,
4139        tag_index: u32,
4140        table: ResumeTable,
4141    ) -> Self::Output {
4142        // [ts1] -> [ts2]
4143        let ft = self.check_resume_table(table, type_index)?;
4144        // [ts1'] -> []
4145        let tag_ty = self.exception_tag_at(tag_index)?;
4146        if tag_ty.results().len() != 0 {
4147            bail!(self.offset, "type mismatch: non-empty tag result type")
4148        }
4149        self.pop_concrete_ref(true, type_index)?;
4150        // Check that ts1' are available on the stack.
4151        for &ty in tag_ty.params().iter().rev() {
4152            self.pop_operand(Some(ty))?;
4153        }
4154
4155        // Make ts2 available on the stack.
4156        for &ty in ft.results() {
4157            self.push_operand(ty)?;
4158        }
4159        Ok(())
4160    }
4161    fn visit_switch(&mut self, type_index: u32, tag_index: u32) -> Self::Output {
4162        // [t1* (ref null $ct2)] -> [te1*]
4163        let cont_ty = self.cont_type_at(type_index)?;
4164        let func_ty = self.func_type_of_cont_type(cont_ty);
4165        // [] -> [t*]
4166        let tag_ty = self.tag_at(tag_index)?;
4167        if tag_ty.params().len() != 0 {
4168            bail!(self.offset, "type mismatch: non-empty tag parameter type")
4169        }
4170        // Extract the other continuation reference
4171        match func_ty.params().last() {
4172            Some(ValType::Ref(rt)) if rt.is_concrete_type_ref() => {
4173                let other_cont_id = rt
4174                    .type_index()
4175                    .unwrap()
4176                    .unpack()
4177                    .as_core_type_id()
4178                    .expect("expected canonicalized index");
4179                let sub_ty = self.resources.sub_type_at_id(other_cont_id);
4180                let other_cont_ty =
4181                    if let CompositeInnerType::Cont(cont) = &sub_ty.composite_type.inner {
4182                        cont
4183                    } else {
4184                        bail!(self.offset, "non-continuation type");
4185                    };
4186                let other_func_ty = self.func_type_of_cont_type(&other_cont_ty);
4187                if func_ty.results().len() != tag_ty.results().len()
4188                    || !self.is_subtype_many(func_ty.results(), tag_ty.results())
4189                    || other_func_ty.results().len() != tag_ty.results().len()
4190                    || !self.is_subtype_many(tag_ty.results(), other_func_ty.results())
4191                {
4192                    bail!(self.offset, "type mismatch in continuation types")
4193                }
4194
4195                // Pop the continuation reference.
4196                self.pop_concrete_ref(true, type_index)?;
4197
4198                // Check that the arguments t1* are available on the
4199                // stack.
4200                for &ty in func_ty.params().iter().rev().skip(1) {
4201                    self.pop_operand(Some(ty))?;
4202                }
4203
4204                // Make the results t2* available on the stack.
4205                for &ty in other_func_ty.params() {
4206                    self.push_operand(ty)?;
4207                }
4208            }
4209            Some(ty) => bail!(
4210                self.offset,
4211                "type mismatch: expected a continuation reference, found {}",
4212                ty_to_str(*ty)
4213            ),
4214            None => bail!(
4215                self.offset,
4216                "type mismatch: instruction requires a continuation reference"
4217            ),
4218        }
4219        Ok(())
4220    }
4221    fn visit_i64_add128(&mut self) -> Result<()> {
4222        self.check_binop128()
4223    }
4224    fn visit_i64_sub128(&mut self) -> Result<()> {
4225        self.check_binop128()
4226    }
4227    fn visit_i64_mul_wide_s(&mut self) -> Result<()> {
4228        self.check_i64_mul_wide()
4229    }
4230    fn visit_i64_mul_wide_u(&mut self) -> Result<()> {
4231        self.check_i64_mul_wide()
4232    }
4233}
4234
4235#[derive(Clone, Debug)]
4236enum Either<A, B> {
4237    A(A),
4238    B(B),
4239}
4240
4241impl<A, B> Iterator for Either<A, B>
4242where
4243    A: Iterator,
4244    B: Iterator<Item = A::Item>,
4245{
4246    type Item = A::Item;
4247    fn next(&mut self) -> Option<A::Item> {
4248        match self {
4249            Either::A(a) => a.next(),
4250            Either::B(b) => b.next(),
4251        }
4252    }
4253}
4254
4255impl<A, B> DoubleEndedIterator for Either<A, B>
4256where
4257    A: DoubleEndedIterator,
4258    B: DoubleEndedIterator<Item = A::Item>,
4259{
4260    fn next_back(&mut self) -> Option<A::Item> {
4261        match self {
4262            Either::A(a) => a.next_back(),
4263            Either::B(b) => b.next_back(),
4264        }
4265    }
4266}
4267
4268impl<A, B> ExactSizeIterator for Either<A, B>
4269where
4270    A: ExactSizeIterator,
4271    B: ExactSizeIterator<Item = A::Item>,
4272{
4273    fn len(&self) -> usize {
4274        match self {
4275            Either::A(a) => a.len(),
4276            Either::B(b) => b.len(),
4277        }
4278    }
4279}
4280
4281trait PreciseIterator: ExactSizeIterator + DoubleEndedIterator + Clone + core::fmt::Debug {}
4282impl<T: ExactSizeIterator + DoubleEndedIterator + Clone + core::fmt::Debug> PreciseIterator for T {}
4283
4284impl Locals {
4285    /// Defines another group of `count` local variables of type `ty`.
4286    ///
4287    /// Returns `true` if the definition was successful. Local variable
4288    /// definition is unsuccessful in case the amount of total variables
4289    /// after definition exceeds the allowed maximum number.
4290    fn define(&mut self, count: u32, ty: ValType) -> bool {
4291        if count == 0 {
4292            return true;
4293        }
4294        let vacant_first = MAX_LOCALS_TO_TRACK.saturating_sub(self.num_locals);
4295        match self.num_locals.checked_add(count) {
4296            Some(num_locals) if num_locals > MAX_WASM_FUNCTION_LOCALS => return false,
4297            None => return false,
4298            Some(num_locals) => self.num_locals = num_locals,
4299        };
4300        let push_to_first = cmp::min(vacant_first, count);
4301        self.first
4302            .extend(iter::repeat(ty).take(push_to_first as usize));
4303        let num_uncached = count - push_to_first;
4304        if num_uncached > 0 {
4305            let max_uncached_idx = self.num_locals - 1;
4306            self.uncached.push((max_uncached_idx, ty));
4307        }
4308        true
4309    }
4310
4311    /// Returns the number of defined local variables.
4312    pub(super) fn len_locals(&self) -> u32 {
4313        self.num_locals
4314    }
4315
4316    /// Returns the type of the local variable at the given index if any.
4317    #[inline]
4318    pub(super) fn get(&self, idx: u32) -> Option<ValType> {
4319        match self.first.get(idx as usize) {
4320            Some(ty) => Some(*ty),
4321            None => self.get_bsearch(idx),
4322        }
4323    }
4324
4325    fn get_bsearch(&self, idx: u32) -> Option<ValType> {
4326        match self.uncached.binary_search_by_key(&idx, |(idx, _)| *idx) {
4327            // If this index would be inserted at the end of the list, then the
4328            // index is out of bounds and we return an error.
4329            Err(i) if i == self.uncached.len() => None,
4330
4331            // If `Ok` is returned we found the index exactly, or if `Err` is
4332            // returned the position is the one which is the least index
4333            // greater that `idx`, which is still the type of `idx` according
4334            // to our "compressed" representation. In both cases we access the
4335            // list at index `i`.
4336            Ok(i) | Err(i) => Some(self.uncached[i].1),
4337        }
4338    }
4339}
4340
4341impl<R> ModuleArity for WasmProposalValidator<'_, '_, R>
4342where
4343    R: WasmModuleResources,
4344{
4345    fn tag_type_arity(&self, at: u32) -> Option<(u32, u32)> {
4346        self.0
4347            .resources
4348            .tag_at(at)
4349            .map(|x| (x.params().len() as u32, x.results().len() as u32))
4350    }
4351
4352    fn type_index_of_function(&self, function_idx: u32) -> Option<u32> {
4353        self.0.resources.type_index_of_function(function_idx)
4354    }
4355
4356    fn sub_type_at(&self, type_idx: u32) -> Option<&SubType> {
4357        Some(self.0.sub_type_at(type_idx).ok()?)
4358    }
4359
4360    fn func_type_of_cont_type(&self, c: &ContType) -> Option<&FuncType> {
4361        Some(self.0.func_type_of_cont_type(c))
4362    }
4363
4364    fn sub_type_of_ref_type(&self, rt: &RefType) -> Option<&SubType> {
4365        let id = rt.type_index()?.as_core_type_id()?;
4366        Some(self.0.resources.sub_type_at_id(id))
4367    }
4368
4369    fn control_stack_height(&self) -> u32 {
4370        self.0.control.len() as u32
4371    }
4372
4373    fn label_block(&self, depth: u32) -> Option<(BlockType, FrameKind)> {
4374        self.0.jump(depth).ok()
4375    }
4376}