wasmparser/validator/
operators.rs

1/* Copyright 2019 Mozilla Foundation
2 *
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 *     http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16// The basic validation algorithm here is copied from the "Validation
17// Algorithm" section of the WebAssembly specification -
18// https://webassembly.github.io/spec/core/appendix/algorithm.html.
19//
20// That algorithm is followed pretty closely here, namely `push_operand`,
21// `pop_operand`, `push_ctrl`, and `pop_ctrl`. If anything here is a bit
22// confusing it's recommended to read over that section to see how it maps to
23// the various methods here.
24
25use crate::{
26    limits::MAX_WASM_FUNCTION_LOCALS, AbstractHeapType, BinaryReaderError, BlockType, BrTable,
27    Catch, FieldType, FuncType, GlobalType, HeapType, Ieee32, Ieee64, MemArg, RefType, Result,
28    StorageType, StructType, SubType, TableType, TryTable, UnpackedIndex, ValType, VisitOperator,
29    WasmFeatures, WasmModuleResources, V128,
30};
31use crate::{prelude::*, CompositeInnerType, Ordering};
32use core::ops::{Deref, DerefMut};
33
34pub(crate) struct OperatorValidator {
35    pub(super) locals: Locals,
36    pub(super) local_inits: Vec<bool>,
37
38    // This is a list of flags for wasm features which are used to gate various
39    // instructions.
40    pub(crate) features: WasmFeatures,
41
42    // Temporary storage used during `pop_push_label_types` and various
43    // branching instructions.
44    popped_types_tmp: Vec<MaybeType>,
45
46    /// The `control` list is the list of blocks that we're currently in.
47    control: Vec<Frame>,
48    /// The `operands` is the current type stack.
49    operands: Vec<MaybeType>,
50    /// When local_inits is modified, the relevant index is recorded here to be
51    /// undone when control pops
52    inits: Vec<u32>,
53
54    /// Offset of the `end` instruction which emptied the `control` stack, which
55    /// must be the end of the function.
56    end_which_emptied_control: Option<usize>,
57
58    /// Whether validation is happening in a shared context.
59    shared: bool,
60}
61
62// No science was performed in the creation of this number, feel free to change
63// it if you so like.
64const MAX_LOCALS_TO_TRACK: usize = 50;
65
66pub(super) struct Locals {
67    // Total number of locals in the function.
68    num_locals: u32,
69
70    // The first MAX_LOCALS_TO_TRACK locals in a function. This is used to
71    // optimize the theoretically common case where most functions don't have
72    // many locals and don't need a full binary search in the entire local space
73    // below.
74    first: Vec<ValType>,
75
76    // This is a "compressed" list of locals for this function. The list of
77    // locals are represented as a list of tuples. The second element is the
78    // type of the local, and the first element is monotonically increasing as
79    // you visit elements of this list. The first element is the maximum index
80    // of the local, after the previous index, of the type specified.
81    //
82    // This allows us to do a binary search on the list for a local's index for
83    // `local.{get,set,tee}`. We do a binary search for the index desired, and
84    // it either lies in a "hole" where the maximum index is specified later,
85    // or it's at the end of the list meaning it's out of bounds.
86    all: Vec<(u32, ValType)>,
87}
88
89/// A Wasm control flow block on the control flow stack during Wasm validation.
90//
91// # Dev. Note
92//
93// This structure corresponds to `ctrl_frame` as specified at in the validation
94// appendix of the wasm spec
95#[derive(Debug, Copy, Clone)]
96pub struct Frame {
97    /// Indicator for what kind of instruction pushed this frame.
98    pub kind: FrameKind,
99    /// The type signature of this frame, represented as a singular return type
100    /// or a type index pointing into the module's types.
101    pub block_type: BlockType,
102    /// The index, below which, this frame cannot modify the operand stack.
103    pub height: usize,
104    /// Whether this frame is unreachable so far.
105    pub unreachable: bool,
106    /// The number of initializations in the stack at the time of its creation
107    pub init_height: usize,
108}
109
110/// The kind of a control flow [`Frame`].
111#[derive(Copy, Clone, Debug, PartialEq, Eq)]
112pub enum FrameKind {
113    /// A Wasm `block` control block.
114    Block,
115    /// A Wasm `if` control block.
116    If,
117    /// A Wasm `else` control block.
118    Else,
119    /// A Wasm `loop` control block.
120    Loop,
121    /// A Wasm `try` control block.
122    ///
123    /// # Note
124    ///
125    /// This belongs to the Wasm exception handling proposal.
126    TryTable,
127    /// A Wasm legacy `try` control block.
128    ///
129    /// # Note
130    ///
131    /// See: `WasmFeatures::legacy_exceptions` Note in `crates/wasmparser/src/features.rs`
132    LegacyTry,
133    /// A Wasm legacy `catch` control block.
134    ///
135    /// # Note
136    ///
137    /// See: `WasmFeatures::legacy_exceptions` Note in `crates/wasmparser/src/features.rs`
138    LegacyCatch,
139    /// A Wasm legacy `catch_all` control block.
140    ///
141    /// # Note
142    ///
143    /// See: `WasmFeatures::legacy_exceptions` Note in `crates/wasmparser/src/features.rs`
144    LegacyCatchAll,
145}
146
147struct OperatorValidatorTemp<'validator, 'resources, T> {
148    offset: usize,
149    inner: &'validator mut OperatorValidator,
150    resources: &'resources T,
151}
152
153#[derive(Default)]
154pub struct OperatorValidatorAllocations {
155    popped_types_tmp: Vec<MaybeType>,
156    control: Vec<Frame>,
157    operands: Vec<MaybeType>,
158    local_inits: Vec<bool>,
159    inits: Vec<u32>,
160    locals_first: Vec<ValType>,
161    locals_all: Vec<(u32, ValType)>,
162}
163
164/// Type storage within the validator.
165///
166/// When managing the operand stack in unreachable code, the validator may not
167/// fully know an operand's type. this unknown state is known as the `bottom`
168/// type in the WebAssembly specification. Validating further instructions may
169/// give us more information; either partial (`PartialRef`) or fully known.
170#[derive(Debug, Copy, Clone)]
171enum MaybeType<T = ValType> {
172    /// The operand has no available type information due to unreachable code.
173    ///
174    /// This state represents "unknown" and corresponds to the `bottom` type in
175    /// the WebAssembly specification. There are no constraints on what this
176    /// type may be and it can match any other type during validation.
177    Bottom,
178    /// The operand is known to be a reference and we may know its abstract
179    /// type.
180    ///
181    /// This state is not fully `Known`, however, because its type can be
182    /// interpreted as either:
183    /// - `shared` or not-`shared`
184    /// -  nullable or not nullable
185    ///
186    /// No further refinements are required for WebAssembly instructions today
187    /// but this may grow in the future.
188    UnknownRef(Option<AbstractHeapType>),
189    /// The operand is known to have type `T`.
190    Known(T),
191}
192
193// The validator is pretty performance-sensitive and `MaybeType` is the main
194// unit of storage, so assert that it doesn't exceed 4 bytes which is the
195// current expected size.
196const _: () = {
197    assert!(core::mem::size_of::<MaybeType>() == 4);
198};
199
200impl core::fmt::Display for MaybeType {
201    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
202        match self {
203            MaybeType::Bottom => write!(f, "bot"),
204            MaybeType::UnknownRef(ty) => {
205                write!(f, "(ref shared? ")?;
206                match ty {
207                    Some(ty) => write!(f, "{}bot", ty.as_str(true))?,
208                    None => write!(f, "bot")?,
209                }
210                write!(f, ")")
211            }
212            MaybeType::Known(ty) => core::fmt::Display::fmt(ty, f),
213        }
214    }
215}
216
217impl From<ValType> for MaybeType {
218    fn from(ty: ValType) -> MaybeType {
219        MaybeType::Known(ty)
220    }
221}
222
223impl From<RefType> for MaybeType {
224    fn from(ty: RefType) -> MaybeType {
225        let ty: ValType = ty.into();
226        ty.into()
227    }
228}
229impl From<MaybeType<RefType>> for MaybeType<ValType> {
230    fn from(ty: MaybeType<RefType>) -> MaybeType<ValType> {
231        match ty {
232            MaybeType::Bottom => MaybeType::Bottom,
233            MaybeType::UnknownRef(ty) => MaybeType::UnknownRef(ty),
234            MaybeType::Known(t) => MaybeType::Known(t.into()),
235        }
236    }
237}
238
239impl MaybeType<RefType> {
240    fn as_non_null(&self) -> MaybeType<RefType> {
241        match self {
242            MaybeType::Bottom => MaybeType::Bottom,
243            MaybeType::UnknownRef(ty) => MaybeType::UnknownRef(*ty),
244            MaybeType::Known(ty) => MaybeType::Known(ty.as_non_null()),
245        }
246    }
247
248    fn is_maybe_shared(&self, resources: &impl WasmModuleResources) -> Option<bool> {
249        match self {
250            MaybeType::Bottom => None,
251            MaybeType::UnknownRef(_) => None,
252            MaybeType::Known(ty) => Some(resources.is_shared(*ty)),
253        }
254    }
255}
256
257impl OperatorValidator {
258    fn new(features: &WasmFeatures, allocs: OperatorValidatorAllocations) -> Self {
259        let OperatorValidatorAllocations {
260            popped_types_tmp,
261            control,
262            operands,
263            local_inits,
264            inits,
265            locals_first,
266            locals_all,
267        } = allocs;
268        debug_assert!(popped_types_tmp.is_empty());
269        debug_assert!(control.is_empty());
270        debug_assert!(operands.is_empty());
271        debug_assert!(local_inits.is_empty());
272        debug_assert!(inits.is_empty());
273        debug_assert!(locals_first.is_empty());
274        debug_assert!(locals_all.is_empty());
275        OperatorValidator {
276            locals: Locals {
277                num_locals: 0,
278                first: locals_first,
279                all: locals_all,
280            },
281            local_inits,
282            inits,
283            features: *features,
284            popped_types_tmp,
285            operands,
286            control,
287            end_which_emptied_control: None,
288            shared: false,
289        }
290    }
291
292    /// Creates a new operator validator which will be used to validate a
293    /// function whose type is the `ty` index specified.
294    ///
295    /// The `resources` are used to learn about the function type underlying
296    /// `ty`.
297    pub fn new_func<T>(
298        ty: u32,
299        offset: usize,
300        features: &WasmFeatures,
301        resources: &T,
302        allocs: OperatorValidatorAllocations,
303    ) -> Result<Self>
304    where
305        T: WasmModuleResources,
306    {
307        let mut ret = OperatorValidator::new(features, allocs);
308        ret.control.push(Frame {
309            kind: FrameKind::Block,
310            block_type: BlockType::FuncType(ty),
311            height: 0,
312            unreachable: false,
313            init_height: 0,
314        });
315
316        // Retrieve the function's type via index (`ty`); the `offset` is
317        // necessary due to `sub_type_at`'s error messaging.
318        let sub_ty = OperatorValidatorTemp {
319            offset,
320            inner: &mut ret,
321            resources,
322        }
323        .sub_type_at(ty)?;
324
325        // Set up the function's locals.
326        if let CompositeInnerType::Func(func_ty) = &sub_ty.composite_type.inner {
327            for ty in func_ty.params() {
328                ret.locals.define(1, *ty);
329                ret.local_inits.push(true);
330            }
331        } else {
332            bail!(offset, "expected func type at index {ty}, found {sub_ty}")
333        }
334
335        // If we're in a shared function, ensure we do not access unshared
336        // objects.
337        if sub_ty.composite_type.shared {
338            ret.shared = true;
339        }
340        Ok(ret)
341    }
342
343    /// Creates a new operator validator which will be used to validate an
344    /// `init_expr` constant expression which should result in the `ty`
345    /// specified.
346    pub fn new_const_expr(
347        features: &WasmFeatures,
348        ty: ValType,
349        allocs: OperatorValidatorAllocations,
350    ) -> Self {
351        let mut ret = OperatorValidator::new(features, allocs);
352        ret.control.push(Frame {
353            kind: FrameKind::Block,
354            block_type: BlockType::Type(ty),
355            height: 0,
356            unreachable: false,
357            init_height: 0,
358        });
359        ret
360    }
361
362    pub fn define_locals(
363        &mut self,
364        offset: usize,
365        count: u32,
366        mut ty: ValType,
367        resources: &impl WasmModuleResources,
368    ) -> Result<()> {
369        resources.check_value_type(&mut ty, &self.features, offset)?;
370        if count == 0 {
371            return Ok(());
372        }
373        if !self.locals.define(count, ty) {
374            return Err(BinaryReaderError::new(
375                "too many locals: locals exceed maximum",
376                offset,
377            ));
378        }
379        self.local_inits
380            .resize(self.local_inits.len() + count as usize, ty.is_defaultable());
381        Ok(())
382    }
383
384    /// Returns the current operands stack height.
385    pub fn operand_stack_height(&self) -> usize {
386        self.operands.len()
387    }
388
389    /// Returns the optional value type of the value operand at the given
390    /// `depth` from the top of the operand stack.
391    ///
392    /// - Returns `None` if the `depth` is out of bounds.
393    /// - Returns `Some(None)` if there is a value with unknown type
394    /// at the given `depth`.
395    ///
396    /// # Note
397    ///
398    /// A `depth` of 0 will refer to the last operand on the stack.
399    pub fn peek_operand_at(&self, depth: usize) -> Option<Option<ValType>> {
400        Some(match self.operands.iter().rev().nth(depth)? {
401            MaybeType::Known(t) => Some(*t),
402            MaybeType::Bottom | MaybeType::UnknownRef(..) => None,
403        })
404    }
405
406    /// Returns the number of frames on the control flow stack.
407    pub fn control_stack_height(&self) -> usize {
408        self.control.len()
409    }
410
411    pub fn get_frame(&self, depth: usize) -> Option<&Frame> {
412        self.control.iter().rev().nth(depth)
413    }
414
415    /// Create a temporary [`OperatorValidatorTemp`] for validation.
416    pub fn with_resources<'a, 'validator, 'resources, T>(
417        &'validator mut self,
418        resources: &'resources T,
419        offset: usize,
420    ) -> impl VisitOperator<'a, Output = Result<()>> + 'validator
421    where
422        T: WasmModuleResources,
423        'resources: 'validator,
424    {
425        WasmProposalValidator(OperatorValidatorTemp {
426            offset,
427            inner: self,
428            resources,
429        })
430    }
431
432    pub fn finish(&mut self, offset: usize) -> Result<()> {
433        if self.control.last().is_some() {
434            bail!(
435                offset,
436                "control frames remain at end of function: END opcode expected"
437            );
438        }
439
440        // The `end` opcode is one byte which means that the `offset` here
441        // should point just beyond the `end` opcode which emptied the control
442        // stack. If not that means more instructions were present after the
443        // control stack was emptied.
444        if offset != self.end_which_emptied_control.unwrap() + 1 {
445            return Err(self.err_beyond_end(offset));
446        }
447        Ok(())
448    }
449
450    fn err_beyond_end(&self, offset: usize) -> BinaryReaderError {
451        format_err!(offset, "operators remaining after end of function")
452    }
453
454    pub fn into_allocations(self) -> OperatorValidatorAllocations {
455        fn clear<T>(mut tmp: Vec<T>) -> Vec<T> {
456            tmp.clear();
457            tmp
458        }
459        OperatorValidatorAllocations {
460            popped_types_tmp: clear(self.popped_types_tmp),
461            control: clear(self.control),
462            operands: clear(self.operands),
463            local_inits: clear(self.local_inits),
464            inits: clear(self.inits),
465            locals_first: clear(self.locals.first),
466            locals_all: clear(self.locals.all),
467        }
468    }
469}
470
471impl<R> Deref for OperatorValidatorTemp<'_, '_, R> {
472    type Target = OperatorValidator;
473    fn deref(&self) -> &OperatorValidator {
474        self.inner
475    }
476}
477
478impl<R> DerefMut for OperatorValidatorTemp<'_, '_, R> {
479    fn deref_mut(&mut self) -> &mut OperatorValidator {
480        self.inner
481    }
482}
483
484impl<'resources, R> OperatorValidatorTemp<'_, 'resources, R>
485where
486    R: WasmModuleResources,
487{
488    /// Pushes a type onto the operand stack.
489    ///
490    /// This is used by instructions to represent a value that is pushed to the
491    /// operand stack. This can fail, but only if `Type` is feature gated.
492    /// Otherwise the push operation always succeeds.
493    fn push_operand<T>(&mut self, ty: T) -> Result<()>
494    where
495        T: Into<MaybeType>,
496    {
497        let maybe_ty = ty.into();
498
499        if cfg!(debug_assertions) {
500            match maybe_ty {
501                MaybeType::Known(ValType::Ref(r)) => match r.heap_type() {
502                    HeapType::Concrete(index) => {
503                        debug_assert!(
504                            matches!(index, UnpackedIndex::Id(_)),
505                            "only ref types referencing `CoreTypeId`s can \
506                             be pushed to the operand stack"
507                        );
508                    }
509                    _ => {}
510                },
511                _ => {}
512            }
513        }
514
515        self.operands.push(maybe_ty);
516        Ok(())
517    }
518
519    fn push_concrete_ref(&mut self, nullable: bool, type_index: u32) -> Result<()> {
520        let mut heap_ty = HeapType::Concrete(UnpackedIndex::Module(type_index));
521
522        // Canonicalize the module index into an id.
523        self.resources.check_heap_type(&mut heap_ty, self.offset)?;
524        debug_assert!(matches!(heap_ty, HeapType::Concrete(UnpackedIndex::Id(_))));
525
526        let ref_ty = RefType::new(nullable, heap_ty).ok_or_else(|| {
527            format_err!(self.offset, "implementation limit: type index too large")
528        })?;
529
530        self.push_operand(ref_ty)
531    }
532
533    fn pop_concrete_ref(&mut self, nullable: bool, type_index: u32) -> Result<MaybeType> {
534        let mut heap_ty = HeapType::Concrete(UnpackedIndex::Module(type_index));
535
536        // Canonicalize the module index into an id.
537        self.resources.check_heap_type(&mut heap_ty, self.offset)?;
538        debug_assert!(matches!(heap_ty, HeapType::Concrete(UnpackedIndex::Id(_))));
539
540        let ref_ty = RefType::new(nullable, heap_ty).ok_or_else(|| {
541            format_err!(self.offset, "implementation limit: type index too large")
542        })?;
543
544        self.pop_operand(Some(ref_ty.into()))
545    }
546
547    /// Pop the given label types, checking that they are indeed present on the
548    /// stack, and then push them back on again.
549    fn pop_push_label_types(
550        &mut self,
551        label_types: impl PreciseIterator<Item = ValType>,
552    ) -> Result<()> {
553        for ty in label_types.clone().rev() {
554            self.pop_operand(Some(ty))?;
555        }
556        for ty in label_types {
557            self.push_operand(ty)?;
558        }
559        Ok(())
560    }
561
562    /// Attempts to pop a type from the operand stack.
563    ///
564    /// This function is used to remove types from the operand stack. The
565    /// `expected` argument can be used to indicate that a type is required, or
566    /// simply that something is needed to be popped.
567    ///
568    /// If `expected` is `Some(T)` then this will be guaranteed to return
569    /// `T`, and it will only return success if the current block is
570    /// unreachable or if `T` was found at the top of the operand stack.
571    ///
572    /// If `expected` is `None` then it indicates that something must be on the
573    /// operand stack, but it doesn't matter what's on the operand stack. This
574    /// is useful for polymorphic instructions like `select`.
575    ///
576    /// If `Some(T)` is returned then `T` was popped from the operand stack and
577    /// matches `expected`. If `None` is returned then it means that `None` was
578    /// expected and a type was successfully popped, but its exact type is
579    /// indeterminate because the current block is unreachable.
580    fn pop_operand(&mut self, expected: Option<ValType>) -> Result<MaybeType> {
581        // This method is one of the hottest methods in the validator so to
582        // improve codegen this method contains a fast-path success case where
583        // if the top operand on the stack is as expected it's returned
584        // immediately. This is the most common case where the stack will indeed
585        // have the expected type and all we need to do is pop it off.
586        //
587        // Note that this still has to be careful to be correct, though. For
588        // efficiency an operand is unconditionally popped and on success it is
589        // matched against the state of the world to see if we could actually
590        // pop it. If we shouldn't have popped it then it's passed to the slow
591        // path to get pushed back onto the stack.
592        let popped = match self.operands.pop() {
593            Some(MaybeType::Known(actual_ty)) => {
594                if Some(actual_ty) == expected {
595                    if let Some(control) = self.control.last() {
596                        if self.operands.len() >= control.height {
597                            return Ok(MaybeType::Known(actual_ty));
598                        }
599                    }
600                }
601                Some(MaybeType::Known(actual_ty))
602            }
603            other => other,
604        };
605
606        self._pop_operand(expected, popped)
607    }
608
609    // This is the "real" implementation of `pop_operand` which is 100%
610    // spec-compliant with little attention paid to efficiency since this is the
611    // slow-path from the actual `pop_operand` function above.
612    #[cold]
613    fn _pop_operand(
614        &mut self,
615        expected: Option<ValType>,
616        popped: Option<MaybeType>,
617    ) -> Result<MaybeType> {
618        self.operands.extend(popped);
619        let control = match self.control.last() {
620            Some(c) => c,
621            None => return Err(self.err_beyond_end(self.offset)),
622        };
623        let actual = if self.operands.len() == control.height && control.unreachable {
624            MaybeType::Bottom
625        } else {
626            if self.operands.len() == control.height {
627                let desc = match expected {
628                    Some(ty) => ty_to_str(ty),
629                    None => "a type".into(),
630                };
631                bail!(
632                    self.offset,
633                    "type mismatch: expected {desc} but nothing on stack"
634                )
635            } else {
636                self.operands.pop().unwrap()
637            }
638        };
639        if let Some(expected) = expected {
640            match (actual, expected) {
641                // The bottom type matches all expectations
642                (MaybeType::Bottom, _) => {}
643
644                // The "heap bottom" type only matches other references types,
645                // but not any integer types. Note that if the heap bottom is
646                // known to have a specific abstract heap type then a subtype
647                // check is performed against hte expected type.
648                (MaybeType::UnknownRef(actual_ty), ValType::Ref(expected)) => {
649                    if let Some(actual) = actual_ty {
650                        let expected_shared = self.resources.is_shared(expected);
651                        let actual = RefType::new(
652                            false,
653                            HeapType::Abstract {
654                                shared: expected_shared,
655                                ty: actual,
656                            },
657                        )
658                        .unwrap();
659                        if !self.resources.is_subtype(actual.into(), expected.into()) {
660                            bail!(
661                                self.offset,
662                                "type mismatch: expected {}, found {}",
663                                ty_to_str(expected.into()),
664                                ty_to_str(actual.into())
665                            );
666                        }
667                    }
668                }
669
670                // Use the `is_subtype` predicate to test if a found type matches
671                // the expectation.
672                (MaybeType::Known(actual), expected) => {
673                    if !self.resources.is_subtype(actual, expected) {
674                        bail!(
675                            self.offset,
676                            "type mismatch: expected {}, found {}",
677                            ty_to_str(expected),
678                            ty_to_str(actual)
679                        );
680                    }
681                }
682
683                // A "heap bottom" type cannot match any numeric types.
684                (
685                    MaybeType::UnknownRef(..),
686                    ValType::I32 | ValType::I64 | ValType::F32 | ValType::F64 | ValType::V128,
687                ) => {
688                    bail!(
689                        self.offset,
690                        "type mismatch: expected {}, found heap type",
691                        ty_to_str(expected)
692                    )
693                }
694            }
695        }
696        Ok(actual)
697    }
698
699    /// Pop a reference type from the operand stack.
700    fn pop_ref(&mut self, expected: Option<RefType>) -> Result<MaybeType<RefType>> {
701        match self.pop_operand(expected.map(|t| t.into()))? {
702            MaybeType::Bottom => Ok(MaybeType::UnknownRef(None)),
703            MaybeType::UnknownRef(ty) => Ok(MaybeType::UnknownRef(ty)),
704            MaybeType::Known(ValType::Ref(rt)) => Ok(MaybeType::Known(rt)),
705            MaybeType::Known(ty) => bail!(
706                self.offset,
707                "type mismatch: expected ref but found {}",
708                ty_to_str(ty)
709            ),
710        }
711    }
712
713    /// Pop a reference type from the operand stack, checking if it is a subtype
714    /// of a nullable type of `expected` or the shared version of `expected`.
715    ///
716    /// This function returns the popped reference type and its `shared`-ness,
717    /// saving extra lookups for concrete types.
718    fn pop_maybe_shared_ref(&mut self, expected: AbstractHeapType) -> Result<MaybeType<RefType>> {
719        let actual = match self.pop_ref(None)? {
720            MaybeType::Bottom => return Ok(MaybeType::Bottom),
721            MaybeType::UnknownRef(None) => return Ok(MaybeType::UnknownRef(None)),
722            MaybeType::UnknownRef(Some(actual)) => {
723                if !actual.is_subtype_of(expected) {
724                    bail!(
725                        self.offset,
726                        "type mismatch: expected subtype of {}, found {}",
727                        expected.as_str(false),
728                        actual.as_str(false),
729                    )
730                }
731                return Ok(MaybeType::UnknownRef(Some(actual)));
732            }
733            MaybeType::Known(ty) => ty,
734        };
735        // Change our expectation based on whether we're dealing with an actual
736        // shared or unshared type.
737        let is_actual_shared = self.resources.is_shared(actual);
738        let expected = RefType::new(
739            true,
740            HeapType::Abstract {
741                shared: is_actual_shared,
742                ty: expected,
743            },
744        )
745        .unwrap();
746
747        // Check (again) that the actual type is a subtype of the expected type.
748        // Note that `_pop_operand` already does this kind of thing but we leave
749        // that for a future refactoring (TODO).
750        if !self.resources.is_subtype(actual.into(), expected.into()) {
751            bail!(
752                self.offset,
753                "type mismatch: expected subtype of {expected}, found {actual}",
754            )
755        }
756        Ok(MaybeType::Known(actual))
757    }
758
759    /// Fetches the type for the local at `idx`, returning an error if it's out
760    /// of bounds.
761    fn local(&self, idx: u32) -> Result<ValType> {
762        match self.locals.get(idx) {
763            Some(ty) => Ok(ty),
764            None => bail!(
765                self.offset,
766                "unknown local {}: local index out of bounds",
767                idx
768            ),
769        }
770    }
771
772    /// Flags the current control frame as unreachable, additionally truncating
773    /// the currently active operand stack.
774    fn unreachable(&mut self) -> Result<()> {
775        let control = match self.control.last_mut() {
776            Some(frame) => frame,
777            None => return Err(self.err_beyond_end(self.offset)),
778        };
779        control.unreachable = true;
780        let new_height = control.height;
781        self.operands.truncate(new_height);
782        Ok(())
783    }
784
785    /// Pushes a new frame onto the control stack.
786    ///
787    /// This operation is used when entering a new block such as an if, loop,
788    /// or block itself. The `kind` of block is specified which indicates how
789    /// breaks interact with this block's type. Additionally the type signature
790    /// of the block is specified by `ty`.
791    fn push_ctrl(&mut self, kind: FrameKind, ty: BlockType) -> Result<()> {
792        // Push a new frame which has a snapshot of the height of the current
793        // operand stack.
794        let height = self.operands.len();
795        let init_height = self.inits.len();
796        self.control.push(Frame {
797            kind,
798            block_type: ty,
799            height,
800            unreachable: false,
801            init_height,
802        });
803        // All of the parameters are now also available in this control frame,
804        // so we push them here in order.
805        for ty in self.params(ty)? {
806            self.push_operand(ty)?;
807        }
808        Ok(())
809    }
810
811    /// Pops a frame from the control stack.
812    ///
813    /// This function is used when exiting a block and leaves a block scope.
814    /// Internally this will validate that blocks have the correct result type.
815    fn pop_ctrl(&mut self) -> Result<Frame> {
816        // Read the expected type and expected height of the operand stack the
817        // end of the frame.
818        let frame = match self.control.last() {
819            Some(f) => f,
820            None => return Err(self.err_beyond_end(self.offset)),
821        };
822        let ty = frame.block_type;
823        let height = frame.height;
824        let init_height = frame.init_height;
825
826        // reset_locals in the spec
827        for init in self.inits.split_off(init_height) {
828            self.local_inits[init as usize] = false;
829        }
830
831        // Pop all the result types, in reverse order, from the operand stack.
832        // These types will, possibly, be transferred to the next frame.
833        for ty in self.results(ty)?.rev() {
834            self.pop_operand(Some(ty))?;
835        }
836
837        // Make sure that the operand stack has returned to is original
838        // height...
839        if self.operands.len() != height {
840            bail!(
841                self.offset,
842                "type mismatch: values remaining on stack at end of block"
843            );
844        }
845
846        // And then we can remove it!
847        Ok(self.control.pop().unwrap())
848    }
849
850    /// Validates a relative jump to the `depth` specified.
851    ///
852    /// Returns the type signature of the block that we're jumping to as well
853    /// as the kind of block if the jump is valid. Otherwise returns an error.
854    fn jump(&self, depth: u32) -> Result<(BlockType, FrameKind)> {
855        if self.control.is_empty() {
856            return Err(self.err_beyond_end(self.offset));
857        }
858        match (self.control.len() - 1).checked_sub(depth as usize) {
859            Some(i) => {
860                let frame = &self.control[i];
861                Ok((frame.block_type, frame.kind))
862            }
863            None => bail!(self.offset, "unknown label: branch depth too large"),
864        }
865    }
866
867    /// Validates that `memory_index` is valid in this module, and returns the
868    /// type of address used to index the memory specified.
869    fn check_memory_index(&self, memory_index: u32) -> Result<ValType> {
870        match self.resources.memory_at(memory_index) {
871            Some(mem) => Ok(mem.index_type()),
872            None => bail!(self.offset, "unknown memory {}", memory_index),
873        }
874    }
875
876    /// Validates a `memarg for alignment and such (also the memory it
877    /// references), and returns the type of index used to address the memory.
878    fn check_memarg(&self, memarg: MemArg) -> Result<ValType> {
879        let index_ty = self.check_memory_index(memarg.memory)?;
880        if memarg.align > memarg.max_align {
881            bail!(
882                self.offset,
883                "malformed memop alignment: alignment must not be larger than natural"
884            );
885        }
886        if index_ty == ValType::I32 && memarg.offset > u64::from(u32::MAX) {
887            bail!(self.offset, "offset out of range: must be <= 2**32");
888        }
889        Ok(index_ty)
890    }
891
892    fn check_floats_enabled(&self) -> Result<()> {
893        if !self.features.floats() {
894            bail!(self.offset, "floating-point instruction disallowed");
895        }
896        Ok(())
897    }
898
899    fn check_shared_memarg(&self, memarg: MemArg) -> Result<ValType> {
900        if memarg.align != memarg.max_align {
901            bail!(
902                self.offset,
903                "atomic instructions must always specify maximum alignment"
904            );
905        }
906        self.check_memory_index(memarg.memory)
907    }
908
909    fn check_simd_lane_index(&self, index: u8, max: u8) -> Result<()> {
910        if index >= max {
911            bail!(self.offset, "SIMD index out of bounds");
912        }
913        Ok(())
914    }
915
916    /// Validates a block type, primarily with various in-flight proposals.
917    fn check_block_type(&self, ty: &mut BlockType) -> Result<()> {
918        match ty {
919            BlockType::Empty => Ok(()),
920            BlockType::Type(t) => self
921                .resources
922                .check_value_type(t, &self.features, self.offset),
923            BlockType::FuncType(idx) => {
924                if !self.features.multi_value() {
925                    bail!(
926                        self.offset,
927                        "blocks, loops, and ifs may only produce a resulttype \
928                         when multi-value is not enabled",
929                    );
930                }
931                self.func_type_at(*idx)?;
932                Ok(())
933            }
934        }
935    }
936
937    /// Returns the corresponding function type for the `func` item located at
938    /// `function_index`.
939    fn type_of_function(&self, function_index: u32) -> Result<&'resources FuncType> {
940        if let Some(type_index) = self.resources.type_index_of_function(function_index) {
941            self.func_type_at(type_index)
942        } else {
943            bail!(
944                self.offset,
945                "unknown function {function_index}: function index out of bounds",
946            )
947        }
948    }
949
950    /// Checks a call-style instruction which will be invoking the function `ty`
951    /// specified.
952    ///
953    /// This will pop parameters from the operand stack for the function's
954    /// parameters and then push the results of the function on the stack.
955    fn check_call_ty(&mut self, ty: &FuncType) -> Result<()> {
956        for &ty in ty.params().iter().rev() {
957            debug_assert_type_indices_are_ids(ty);
958            self.pop_operand(Some(ty))?;
959        }
960        for &ty in ty.results() {
961            debug_assert_type_indices_are_ids(ty);
962            self.push_operand(ty)?;
963        }
964        Ok(())
965    }
966
967    /// Similar to `check_call_ty` except used for tail-call instructions.
968    fn check_return_call_ty(&mut self, ty: &FuncType) -> Result<()> {
969        self.check_func_type_same_results(ty)?;
970        self.check_call_ty(ty)?;
971        self.check_return()
972    }
973
974    /// Checks the immediate `type_index` of a `call_ref`-style instruction
975    /// (also `return_call_ref`).
976    ///
977    /// This will validate that the value on the stack is a `(ref type_index)`
978    /// or a subtype. This will then return the corresponding function type used
979    /// for this call (to be used with `check_call_ty` or
980    /// `check_return_call_ty`).
981    fn check_call_ref_ty(&mut self, type_index: u32) -> Result<&'resources FuncType> {
982        let unpacked_index = UnpackedIndex::Module(type_index);
983        let mut hty = HeapType::Concrete(unpacked_index);
984        self.resources.check_heap_type(&mut hty, self.offset)?;
985        let expected = RefType::new(true, hty).expect("hty should be previously validated");
986        self.pop_ref(Some(expected))?;
987        self.func_type_at(type_index)
988    }
989
990    /// Validates the immediate operands of a `call_indirect` or
991    /// `return_call_indirect` instruction.
992    ///
993    /// This will validate that `table_index` is valid and a funcref table. It
994    /// will additionally pop the index argument which is used to index into the
995    /// table.
996    ///
997    /// The return value of this function is the function type behind
998    /// `type_index` which must then be passed to `check_{call,return_call}_ty`.
999    fn check_call_indirect_ty(
1000        &mut self,
1001        type_index: u32,
1002        table_index: u32,
1003    ) -> Result<&'resources FuncType> {
1004        let tab = self.table_type_at(table_index)?;
1005        if !self
1006            .resources
1007            .is_subtype(ValType::Ref(tab.element_type), ValType::FUNCREF)
1008        {
1009            bail!(
1010                self.offset,
1011                "indirect calls must go through a table with type <= funcref",
1012            );
1013        }
1014        self.pop_operand(Some(tab.index_type()))?;
1015        self.func_type_at(type_index)
1016    }
1017
1018    /// Validates a `return` instruction, popping types from the operand
1019    /// stack that the function needs.
1020    fn check_return(&mut self) -> Result<()> {
1021        if self.control.is_empty() {
1022            return Err(self.err_beyond_end(self.offset));
1023        }
1024        for ty in self.results(self.control[0].block_type)?.rev() {
1025            self.pop_operand(Some(ty))?;
1026        }
1027        self.unreachable()?;
1028        Ok(())
1029    }
1030
1031    /// Check that the given type has the same result types as the current
1032    /// function's results.
1033    fn check_func_type_same_results(&self, callee_ty: &FuncType) -> Result<()> {
1034        if self.control.is_empty() {
1035            return Err(self.err_beyond_end(self.offset));
1036        }
1037        let caller_rets = self.results(self.control[0].block_type)?;
1038        if callee_ty.results().len() != caller_rets.len()
1039            || !caller_rets
1040                .zip(callee_ty.results())
1041                .all(|(caller_ty, callee_ty)| self.resources.is_subtype(*callee_ty, caller_ty))
1042        {
1043            let caller_rets = self
1044                .results(self.control[0].block_type)?
1045                .map(|ty| format!("{ty}"))
1046                .collect::<Vec<_>>()
1047                .join(" ");
1048            let callee_rets = callee_ty
1049                .results()
1050                .iter()
1051                .map(|ty| format!("{ty}"))
1052                .collect::<Vec<_>>()
1053                .join(" ");
1054            bail!(
1055                self.offset,
1056                "type mismatch: current function requires result type \
1057                 [{caller_rets}] but callee returns [{callee_rets}]"
1058            );
1059        }
1060        Ok(())
1061    }
1062
1063    /// Checks the validity of a common comparison operator.
1064    fn check_cmp_op(&mut self, ty: ValType) -> Result<()> {
1065        self.pop_operand(Some(ty))?;
1066        self.pop_operand(Some(ty))?;
1067        self.push_operand(ValType::I32)?;
1068        Ok(())
1069    }
1070
1071    /// Checks the validity of a common float comparison operator.
1072    fn check_fcmp_op(&mut self, ty: ValType) -> Result<()> {
1073        debug_assert!(matches!(ty, ValType::F32 | ValType::F64));
1074        self.check_floats_enabled()?;
1075        self.check_cmp_op(ty)
1076    }
1077
1078    /// Checks the validity of a common unary operator.
1079    fn check_unary_op(&mut self, ty: ValType) -> Result<()> {
1080        self.pop_operand(Some(ty))?;
1081        self.push_operand(ty)?;
1082        Ok(())
1083    }
1084
1085    /// Checks the validity of a common unary float operator.
1086    fn check_funary_op(&mut self, ty: ValType) -> Result<()> {
1087        debug_assert!(matches!(ty, ValType::F32 | ValType::F64));
1088        self.check_floats_enabled()?;
1089        self.check_unary_op(ty)
1090    }
1091
1092    /// Checks the validity of a common conversion operator.
1093    fn check_conversion_op(&mut self, into: ValType, from: ValType) -> Result<()> {
1094        self.pop_operand(Some(from))?;
1095        self.push_operand(into)?;
1096        Ok(())
1097    }
1098
1099    /// Checks the validity of a common float conversion operator.
1100    fn check_fconversion_op(&mut self, into: ValType, from: ValType) -> Result<()> {
1101        debug_assert!(matches!(into, ValType::F32 | ValType::F64));
1102        self.check_floats_enabled()?;
1103        self.check_conversion_op(into, from)
1104    }
1105
1106    /// Checks the validity of a common binary operator.
1107    fn check_binary_op(&mut self, ty: ValType) -> Result<()> {
1108        self.pop_operand(Some(ty))?;
1109        self.pop_operand(Some(ty))?;
1110        self.push_operand(ty)?;
1111        Ok(())
1112    }
1113
1114    /// Checks the validity of a common binary float operator.
1115    fn check_fbinary_op(&mut self, ty: ValType) -> Result<()> {
1116        debug_assert!(matches!(ty, ValType::F32 | ValType::F64));
1117        self.check_floats_enabled()?;
1118        self.check_binary_op(ty)
1119    }
1120
1121    /// Checks the validity of an atomic load operator.
1122    fn check_atomic_load(&mut self, memarg: MemArg, load_ty: ValType) -> Result<()> {
1123        let ty = self.check_shared_memarg(memarg)?;
1124        self.pop_operand(Some(ty))?;
1125        self.push_operand(load_ty)?;
1126        Ok(())
1127    }
1128
1129    /// Checks the validity of an atomic store operator.
1130    fn check_atomic_store(&mut self, memarg: MemArg, store_ty: ValType) -> Result<()> {
1131        let ty = self.check_shared_memarg(memarg)?;
1132        self.pop_operand(Some(store_ty))?;
1133        self.pop_operand(Some(ty))?;
1134        Ok(())
1135    }
1136
1137    /// Checks the validity of atomic binary operator on memory.
1138    fn check_atomic_binary_memory_op(&mut self, memarg: MemArg, op_ty: ValType) -> Result<()> {
1139        let ty = self.check_shared_memarg(memarg)?;
1140        self.pop_operand(Some(op_ty))?;
1141        self.pop_operand(Some(ty))?;
1142        self.push_operand(op_ty)?;
1143        Ok(())
1144    }
1145
1146    /// Checks the validity of an atomic compare exchange operator on memories.
1147    fn check_atomic_binary_memory_cmpxchg(&mut self, memarg: MemArg, op_ty: ValType) -> Result<()> {
1148        let ty = self.check_shared_memarg(memarg)?;
1149        self.pop_operand(Some(op_ty))?;
1150        self.pop_operand(Some(op_ty))?;
1151        self.pop_operand(Some(ty))?;
1152        self.push_operand(op_ty)?;
1153        Ok(())
1154    }
1155
1156    /// Checks a [`V128`] splat operator.
1157    fn check_v128_splat(&mut self, src_ty: ValType) -> Result<()> {
1158        self.pop_operand(Some(src_ty))?;
1159        self.push_operand(ValType::V128)?;
1160        Ok(())
1161    }
1162
1163    /// Checks a [`V128`] binary operator.
1164    fn check_v128_binary_op(&mut self) -> Result<()> {
1165        self.pop_operand(Some(ValType::V128))?;
1166        self.pop_operand(Some(ValType::V128))?;
1167        self.push_operand(ValType::V128)?;
1168        Ok(())
1169    }
1170
1171    /// Checks a [`V128`] binary float operator.
1172    fn check_v128_fbinary_op(&mut self) -> Result<()> {
1173        self.check_floats_enabled()?;
1174        self.check_v128_binary_op()
1175    }
1176
1177    /// Checks a [`V128`] unary operator.
1178    fn check_v128_unary_op(&mut self) -> Result<()> {
1179        self.pop_operand(Some(ValType::V128))?;
1180        self.push_operand(ValType::V128)?;
1181        Ok(())
1182    }
1183
1184    /// Checks a [`V128`] unary float operator.
1185    fn check_v128_funary_op(&mut self) -> Result<()> {
1186        self.check_floats_enabled()?;
1187        self.check_v128_unary_op()
1188    }
1189
1190    /// Checks a [`V128`] relaxed ternary operator.
1191    fn check_v128_ternary_op(&mut self) -> Result<()> {
1192        self.pop_operand(Some(ValType::V128))?;
1193        self.pop_operand(Some(ValType::V128))?;
1194        self.pop_operand(Some(ValType::V128))?;
1195        self.push_operand(ValType::V128)?;
1196        Ok(())
1197    }
1198
1199    /// Checks a [`V128`] test operator.
1200    fn check_v128_bitmask_op(&mut self) -> Result<()> {
1201        self.pop_operand(Some(ValType::V128))?;
1202        self.push_operand(ValType::I32)?;
1203        Ok(())
1204    }
1205
1206    /// Checks a [`V128`] shift operator.
1207    fn check_v128_shift_op(&mut self) -> Result<()> {
1208        self.pop_operand(Some(ValType::I32))?;
1209        self.pop_operand(Some(ValType::V128))?;
1210        self.push_operand(ValType::V128)?;
1211        Ok(())
1212    }
1213
1214    /// Checks a [`V128`] common load operator.
1215    fn check_v128_load_op(&mut self, memarg: MemArg) -> Result<()> {
1216        let idx = self.check_memarg(memarg)?;
1217        self.pop_operand(Some(idx))?;
1218        self.push_operand(ValType::V128)?;
1219        Ok(())
1220    }
1221
1222    /// Common helper for `ref.test` and `ref.cast` downcasting/checking
1223    /// instructions. Returns the given `heap_type` as a `ValType`.
1224    fn check_downcast(&mut self, nullable: bool, mut heap_type: HeapType) -> Result<RefType> {
1225        self.resources
1226            .check_heap_type(&mut heap_type, self.offset)?;
1227
1228        let sub_ty = RefType::new(nullable, heap_type).ok_or_else(|| {
1229            BinaryReaderError::new("implementation limit: type index too large", self.offset)
1230        })?;
1231        let sup_ty = RefType::new(true, self.resources.top_type(&heap_type))
1232            .expect("can't panic with non-concrete heap types");
1233
1234        self.pop_ref(Some(sup_ty))?;
1235        Ok(sub_ty)
1236    }
1237
1238    /// Common helper for both nullable and non-nullable variants of `ref.test`
1239    /// instructions.
1240    fn check_ref_test(&mut self, nullable: bool, heap_type: HeapType) -> Result<()> {
1241        self.check_downcast(nullable, heap_type)?;
1242        self.push_operand(ValType::I32)
1243    }
1244
1245    /// Common helper for both nullable and non-nullable variants of `ref.cast`
1246    /// instructions.
1247    fn check_ref_cast(&mut self, nullable: bool, heap_type: HeapType) -> Result<()> {
1248        let sub_ty = self.check_downcast(nullable, heap_type)?;
1249        self.push_operand(sub_ty)
1250    }
1251
1252    /// Common helper for checking the types of globals accessed with atomic RMW
1253    /// instructions, which only allow `i32` and `i64`.
1254    fn check_atomic_global_rmw_ty(&self, global_index: u32) -> Result<ValType> {
1255        let ty = self.global_type_at(global_index)?.content_type;
1256        if !(ty == ValType::I32 || ty == ValType::I64) {
1257            bail!(
1258                self.offset,
1259                "invalid type: `global.atomic.rmw.*` only allows `i32` and `i64`"
1260            );
1261        }
1262        Ok(ty)
1263    }
1264
1265    /// Common helper for checking the types of structs accessed with atomic RMW
1266    /// instructions, which only allow `i32` and `i64` types.
1267    fn check_struct_atomic_rmw(
1268        &mut self,
1269        op: &'static str,
1270        struct_type_index: u32,
1271        field_index: u32,
1272    ) -> Result<()> {
1273        let field = self.mutable_struct_field_at(struct_type_index, field_index)?;
1274        let field_ty = match field.element_type {
1275            StorageType::Val(ValType::I32) => ValType::I32,
1276            StorageType::Val(ValType::I64) => ValType::I64,
1277            _ => bail!(
1278                self.offset,
1279                "invalid type: `struct.atomic.rmw.{}` only allows `i32` and `i64`",
1280                op
1281            ),
1282        };
1283        self.pop_operand(Some(field_ty))?;
1284        self.pop_concrete_ref(true, struct_type_index)?;
1285        self.push_operand(field_ty)?;
1286        Ok(())
1287    }
1288
1289    /// Common helper for checking the types of arrays accessed with atomic RMW
1290    /// instructions, which only allow `i32` and `i64`.
1291    fn check_array_atomic_rmw(&mut self, op: &'static str, type_index: u32) -> Result<()> {
1292        let field = self.mutable_array_type_at(type_index)?;
1293        let elem_ty = match field.element_type {
1294            StorageType::Val(ValType::I32) => ValType::I32,
1295            StorageType::Val(ValType::I64) => ValType::I64,
1296            _ => bail!(
1297                self.offset,
1298                "invalid type: `array.atomic.rmw.{}` only allows `i32` and `i64`",
1299                op
1300            ),
1301        };
1302        self.pop_operand(Some(elem_ty))?;
1303        self.pop_operand(Some(ValType::I32))?;
1304        self.pop_concrete_ref(true, type_index)?;
1305        self.push_operand(elem_ty)?;
1306        Ok(())
1307    }
1308
1309    fn element_type_at(&self, elem_index: u32) -> Result<RefType> {
1310        match self.resources.element_type_at(elem_index) {
1311            Some(ty) => Ok(ty),
1312            None => bail!(
1313                self.offset,
1314                "unknown elem segment {}: segment index out of bounds",
1315                elem_index
1316            ),
1317        }
1318    }
1319
1320    fn sub_type_at(&self, at: u32) -> Result<&'resources SubType> {
1321        self.resources
1322            .sub_type_at(at)
1323            .ok_or_else(|| format_err!(self.offset, "unknown type: type index out of bounds"))
1324    }
1325
1326    fn struct_type_at(&self, at: u32) -> Result<&'resources StructType> {
1327        let sub_ty = self.sub_type_at(at)?;
1328        if let CompositeInnerType::Struct(struct_ty) = &sub_ty.composite_type.inner {
1329            if self.inner.shared && !sub_ty.composite_type.shared {
1330                bail!(
1331                    self.offset,
1332                    "shared functions cannot access unshared structs",
1333                );
1334            }
1335            Ok(struct_ty)
1336        } else {
1337            bail!(
1338                self.offset,
1339                "expected struct type at index {at}, found {sub_ty}"
1340            )
1341        }
1342    }
1343
1344    fn struct_field_at(&self, struct_type_index: u32, field_index: u32) -> Result<FieldType> {
1345        let field_index = usize::try_from(field_index).map_err(|_| {
1346            BinaryReaderError::new("unknown field: field index out of bounds", self.offset)
1347        })?;
1348        self.struct_type_at(struct_type_index)?
1349            .fields
1350            .get(field_index)
1351            .copied()
1352            .ok_or_else(|| {
1353                BinaryReaderError::new("unknown field: field index out of bounds", self.offset)
1354            })
1355    }
1356
1357    fn mutable_struct_field_at(
1358        &self,
1359        struct_type_index: u32,
1360        field_index: u32,
1361    ) -> Result<FieldType> {
1362        let field = self.struct_field_at(struct_type_index, field_index)?;
1363        if !field.mutable {
1364            bail!(
1365                self.offset,
1366                "invalid struct modification: struct field is immutable"
1367            )
1368        }
1369        Ok(field)
1370    }
1371
1372    fn array_type_at(&self, at: u32) -> Result<FieldType> {
1373        let sub_ty = self.sub_type_at(at)?;
1374        if let CompositeInnerType::Array(array_ty) = &sub_ty.composite_type.inner {
1375            if self.inner.shared && !sub_ty.composite_type.shared {
1376                bail!(
1377                    self.offset,
1378                    "shared functions cannot access unshared arrays",
1379                );
1380            }
1381            Ok(array_ty.0)
1382        } else {
1383            bail!(
1384                self.offset,
1385                "expected array type at index {at}, found {sub_ty}"
1386            )
1387        }
1388    }
1389
1390    fn mutable_array_type_at(&self, at: u32) -> Result<FieldType> {
1391        let field = self.array_type_at(at)?;
1392        if !field.mutable {
1393            bail!(
1394                self.offset,
1395                "invalid array modification: array is immutable"
1396            )
1397        }
1398        Ok(field)
1399    }
1400
1401    fn func_type_at(&self, at: u32) -> Result<&'resources FuncType> {
1402        let sub_ty = self.sub_type_at(at)?;
1403        if let CompositeInnerType::Func(func_ty) = &sub_ty.composite_type.inner {
1404            if self.inner.shared && !sub_ty.composite_type.shared {
1405                bail!(
1406                    self.offset,
1407                    "shared functions cannot access unshared functions",
1408                );
1409            }
1410            Ok(func_ty)
1411        } else {
1412            bail!(
1413                self.offset,
1414                "expected func type at index {at}, found {sub_ty}"
1415            )
1416        }
1417    }
1418
1419    fn tag_at(&self, at: u32) -> Result<&'resources FuncType> {
1420        self.resources
1421            .tag_at(at)
1422            .ok_or_else(|| format_err!(self.offset, "unknown tag {}: tag index out of bounds", at))
1423    }
1424
1425    fn global_type_at(&self, at: u32) -> Result<GlobalType> {
1426        if let Some(ty) = self.resources.global_at(at) {
1427            if self.inner.shared && !ty.shared {
1428                bail!(
1429                    self.offset,
1430                    "shared functions cannot access unshared globals",
1431                );
1432            }
1433            Ok(ty)
1434        } else {
1435            bail!(self.offset, "unknown global: global index out of bounds");
1436        }
1437    }
1438
1439    /// Validates that the `table` is valid and returns the type it points to.
1440    fn table_type_at(&self, table: u32) -> Result<TableType> {
1441        match self.resources.table_at(table) {
1442            Some(ty) => {
1443                if self.inner.shared && !ty.shared {
1444                    bail!(
1445                        self.offset,
1446                        "shared functions cannot access unshared tables",
1447                    );
1448                }
1449                Ok(ty)
1450            }
1451            None => bail!(
1452                self.offset,
1453                "unknown table {table}: table index out of bounds"
1454            ),
1455        }
1456    }
1457
1458    fn params(&self, ty: BlockType) -> Result<impl PreciseIterator<Item = ValType> + 'resources> {
1459        Ok(match ty {
1460            BlockType::Empty | BlockType::Type(_) => Either::B(None.into_iter()),
1461            BlockType::FuncType(t) => Either::A(self.func_type_at(t)?.params().iter().copied()),
1462        })
1463    }
1464
1465    fn results(&self, ty: BlockType) -> Result<impl PreciseIterator<Item = ValType> + 'resources> {
1466        Ok(match ty {
1467            BlockType::Empty => Either::B(None.into_iter()),
1468            BlockType::Type(t) => Either::B(Some(t).into_iter()),
1469            BlockType::FuncType(t) => Either::A(self.func_type_at(t)?.results().iter().copied()),
1470        })
1471    }
1472
1473    fn label_types(
1474        &self,
1475        ty: BlockType,
1476        kind: FrameKind,
1477    ) -> Result<impl PreciseIterator<Item = ValType> + 'resources> {
1478        Ok(match kind {
1479            FrameKind::Loop => Either::A(self.params(ty)?),
1480            _ => Either::B(self.results(ty)?),
1481        })
1482    }
1483
1484    fn check_data_segment(&self, data_index: u32) -> Result<()> {
1485        match self.resources.data_count() {
1486            None => bail!(self.offset, "data count section required"),
1487            Some(count) if data_index < count => Ok(()),
1488            Some(_) => bail!(self.offset, "unknown data segment {data_index}"),
1489        }
1490    }
1491}
1492
1493pub fn ty_to_str(ty: ValType) -> &'static str {
1494    match ty {
1495        ValType::I32 => "i32",
1496        ValType::I64 => "i64",
1497        ValType::F32 => "f32",
1498        ValType::F64 => "f64",
1499        ValType::V128 => "v128",
1500        ValType::Ref(r) => r.wat(),
1501    }
1502}
1503
1504/// A wrapper "visitor" around the real operator validator internally which
1505/// exists to check that the required wasm feature is enabled to proceed with
1506/// validation.
1507///
1508/// This validator is macro-generated to ensure that the proposal listed in this
1509/// crate's macro matches the one that's validated here. Each instruction's
1510/// visit method validates the specified proposal is enabled and then delegates
1511/// to `OperatorValidatorTemp` to perform the actual opcode validation.
1512struct WasmProposalValidator<'validator, 'resources, T>(
1513    OperatorValidatorTemp<'validator, 'resources, T>,
1514);
1515
1516impl<T> WasmProposalValidator<'_, '_, T> {
1517    fn check_enabled(&self, flag: bool, desc: &str) -> Result<()> {
1518        if flag {
1519            return Ok(());
1520        }
1521        bail!(self.0.offset, "{desc} support is not enabled");
1522    }
1523}
1524
1525macro_rules! validate_proposal {
1526    ($( @$proposal:ident $op:ident $({ $($arg:ident: $argty:ty),* })? => $visit:ident)*) => {
1527        $(
1528            fn $visit(&mut self $($(,$arg: $argty)*)?) -> Result<()> {
1529                validate_proposal!(validate self $proposal);
1530                self.0.$visit($( $($arg),* )?)
1531            }
1532        )*
1533    };
1534
1535    (validate self mvp) => {};
1536    (validate $self:ident $proposal:ident) => {
1537        $self.check_enabled($self.0.features.$proposal(), validate_proposal!(desc $proposal))?
1538    };
1539
1540    (desc simd) => ("SIMD");
1541    (desc relaxed_simd) => ("relaxed SIMD");
1542    (desc threads) => ("threads");
1543    (desc shared_everything_threads) => ("shared-everything-threads");
1544    (desc saturating_float_to_int) => ("saturating float to int conversions");
1545    (desc reference_types) => ("reference types");
1546    (desc bulk_memory) => ("bulk memory");
1547    (desc sign_extension) => ("sign extension operations");
1548    (desc exceptions) => ("exceptions");
1549    (desc tail_call) => ("tail calls");
1550    (desc function_references) => ("function references");
1551    (desc memory_control) => ("memory control");
1552    (desc gc) => ("gc");
1553    (desc legacy_exceptions) => ("legacy exceptions");
1554}
1555
1556impl<'a, T> VisitOperator<'a> for WasmProposalValidator<'_, '_, T>
1557where
1558    T: WasmModuleResources,
1559{
1560    type Output = Result<()>;
1561
1562    for_each_operator!(validate_proposal);
1563}
1564
1565#[track_caller]
1566#[inline]
1567fn debug_assert_type_indices_are_ids(ty: ValType) {
1568    if cfg!(debug_assertions) {
1569        if let ValType::Ref(r) = ty {
1570            if let HeapType::Concrete(idx) = r.heap_type() {
1571                debug_assert!(
1572                    matches!(idx, UnpackedIndex::Id(_)),
1573                    "type reference should be a `CoreTypeId`, found {idx:?}"
1574                );
1575            }
1576        }
1577    }
1578}
1579
1580impl<'a, T> VisitOperator<'a> for OperatorValidatorTemp<'_, '_, T>
1581where
1582    T: WasmModuleResources,
1583{
1584    type Output = Result<()>;
1585
1586    fn visit_nop(&mut self) -> Self::Output {
1587        Ok(())
1588    }
1589    fn visit_unreachable(&mut self) -> Self::Output {
1590        self.unreachable()?;
1591        Ok(())
1592    }
1593    fn visit_block(&mut self, mut ty: BlockType) -> Self::Output {
1594        self.check_block_type(&mut ty)?;
1595        for ty in self.params(ty)?.rev() {
1596            self.pop_operand(Some(ty))?;
1597        }
1598        self.push_ctrl(FrameKind::Block, ty)?;
1599        Ok(())
1600    }
1601    fn visit_loop(&mut self, mut ty: BlockType) -> Self::Output {
1602        self.check_block_type(&mut ty)?;
1603        for ty in self.params(ty)?.rev() {
1604            self.pop_operand(Some(ty))?;
1605        }
1606        self.push_ctrl(FrameKind::Loop, ty)?;
1607        Ok(())
1608    }
1609    fn visit_if(&mut self, mut ty: BlockType) -> Self::Output {
1610        self.check_block_type(&mut ty)?;
1611        self.pop_operand(Some(ValType::I32))?;
1612        for ty in self.params(ty)?.rev() {
1613            self.pop_operand(Some(ty))?;
1614        }
1615        self.push_ctrl(FrameKind::If, ty)?;
1616        Ok(())
1617    }
1618    fn visit_else(&mut self) -> Self::Output {
1619        let frame = self.pop_ctrl()?;
1620        if frame.kind != FrameKind::If {
1621            bail!(self.offset, "else found outside of an `if` block");
1622        }
1623        self.push_ctrl(FrameKind::Else, frame.block_type)?;
1624        Ok(())
1625    }
1626    fn visit_try_table(&mut self, mut ty: TryTable) -> Self::Output {
1627        self.check_block_type(&mut ty.ty)?;
1628        for ty in self.params(ty.ty)?.rev() {
1629            self.pop_operand(Some(ty))?;
1630        }
1631        let exn_type = ValType::from(RefType::EXN);
1632        for catch in ty.catches {
1633            match catch {
1634                Catch::One { tag, label } => {
1635                    let tag = self.tag_at(tag)?;
1636                    let (ty, kind) = self.jump(label)?;
1637                    let params = tag.params();
1638                    let types = self.label_types(ty, kind)?;
1639                    if params.len() != types.len() {
1640                        bail!(
1641                            self.offset,
1642                            "type mismatch: catch label must have same number of types as tag"
1643                        );
1644                    }
1645                    for (expected, actual) in types.zip(params) {
1646                        self.push_operand(*actual)?;
1647                        self.pop_operand(Some(expected))?;
1648                    }
1649                }
1650                Catch::OneRef { tag, label } => {
1651                    let tag = self.tag_at(tag)?;
1652                    let (ty, kind) = self.jump(label)?;
1653                    let tag_params = tag.params().iter().copied();
1654                    let label_types = self.label_types(ty, kind)?;
1655                    if tag_params.len() + 1 != label_types.len() {
1656                        bail!(
1657                            self.offset,
1658                            "type mismatch: catch_ref label must have one \
1659                             more type than tag types",
1660                        );
1661                    }
1662                    for (expected_label_tyep, actual_tag_param) in
1663                        label_types.zip(tag_params.chain([exn_type]))
1664                    {
1665                        self.push_operand(actual_tag_param)?;
1666                        self.pop_operand(Some(expected_label_tyep))?;
1667                    }
1668                }
1669
1670                Catch::All { label } => {
1671                    let (ty, kind) = self.jump(label)?;
1672                    if self.label_types(ty, kind)?.len() != 0 {
1673                        bail!(
1674                            self.offset,
1675                            "type mismatch: catch_all label must have no result types"
1676                        );
1677                    }
1678                }
1679
1680                Catch::AllRef { label } => {
1681                    let (ty, kind) = self.jump(label)?;
1682                    let mut types = self.label_types(ty, kind)?;
1683                    let ty = match (types.next(), types.next()) {
1684                        (Some(ty), None) => ty,
1685                        _ => {
1686                            bail!(
1687                                self.offset,
1688                                "type mismatch: catch_all_ref label must have \
1689                                 exactly one result type"
1690                            );
1691                        }
1692                    };
1693                    if !self.resources.is_subtype(exn_type, ty) {
1694                        bail!(
1695                            self.offset,
1696                            "type mismatch: catch_all_ref label must a \
1697                             subtype of (ref exn)"
1698                        );
1699                    }
1700                }
1701            }
1702        }
1703        self.push_ctrl(FrameKind::TryTable, ty.ty)?;
1704        Ok(())
1705    }
1706    fn visit_throw(&mut self, index: u32) -> Self::Output {
1707        // Check values associated with the exception.
1708        let ty = self.tag_at(index)?;
1709        for ty in ty.clone().params().iter().rev() {
1710            self.pop_operand(Some(*ty))?;
1711        }
1712        // this should be validated when the tag was defined in the module
1713        debug_assert!(ty.results().is_empty());
1714        self.unreachable()?;
1715        Ok(())
1716    }
1717    fn visit_throw_ref(&mut self) -> Self::Output {
1718        self.pop_operand(Some(ValType::EXNREF))?;
1719        self.unreachable()?;
1720        Ok(())
1721    }
1722    fn visit_end(&mut self) -> Self::Output {
1723        let mut frame = self.pop_ctrl()?;
1724
1725        // Note that this `if` isn't included in the appendix right
1726        // now, but it's used to allow for `if` statements that are
1727        // missing an `else` block which have the same parameter/return
1728        // types on the block (since that's valid).
1729        if frame.kind == FrameKind::If {
1730            self.push_ctrl(FrameKind::Else, frame.block_type)?;
1731            frame = self.pop_ctrl()?;
1732        }
1733        for ty in self.results(frame.block_type)? {
1734            self.push_operand(ty)?;
1735        }
1736
1737        if self.control.is_empty() && self.end_which_emptied_control.is_none() {
1738            assert_ne!(self.offset, 0);
1739            self.end_which_emptied_control = Some(self.offset);
1740        }
1741        Ok(())
1742    }
1743    fn visit_br(&mut self, relative_depth: u32) -> Self::Output {
1744        let (ty, kind) = self.jump(relative_depth)?;
1745        for ty in self.label_types(ty, kind)?.rev() {
1746            self.pop_operand(Some(ty))?;
1747        }
1748        self.unreachable()?;
1749        Ok(())
1750    }
1751    fn visit_br_if(&mut self, relative_depth: u32) -> Self::Output {
1752        self.pop_operand(Some(ValType::I32))?;
1753        let (ty, kind) = self.jump(relative_depth)?;
1754        let label_types = self.label_types(ty, kind)?;
1755        self.pop_push_label_types(label_types)?;
1756        Ok(())
1757    }
1758    fn visit_br_table(&mut self, table: BrTable) -> Self::Output {
1759        self.pop_operand(Some(ValType::I32))?;
1760        let default = self.jump(table.default())?;
1761        let default_types = self.label_types(default.0, default.1)?;
1762        for element in table.targets() {
1763            let relative_depth = element?;
1764            let block = self.jump(relative_depth)?;
1765            let label_tys = self.label_types(block.0, block.1)?;
1766            if label_tys.len() != default_types.len() {
1767                bail!(
1768                    self.offset,
1769                    "type mismatch: br_table target labels have different number of types"
1770                );
1771            }
1772
1773            debug_assert!(self.popped_types_tmp.is_empty());
1774            self.popped_types_tmp.reserve(label_tys.len());
1775            for expected_ty in label_tys.rev() {
1776                let actual_ty = self.pop_operand(Some(expected_ty))?;
1777                self.popped_types_tmp.push(actual_ty);
1778            }
1779            for ty in self.inner.popped_types_tmp.drain(..).rev() {
1780                self.inner.operands.push(ty.into());
1781            }
1782        }
1783        for ty in default_types.rev() {
1784            self.pop_operand(Some(ty))?;
1785        }
1786        self.unreachable()?;
1787        Ok(())
1788    }
1789    fn visit_return(&mut self) -> Self::Output {
1790        self.check_return()?;
1791        Ok(())
1792    }
1793    fn visit_call(&mut self, function_index: u32) -> Self::Output {
1794        let ty = self.type_of_function(function_index)?;
1795        self.check_call_ty(ty)?;
1796        Ok(())
1797    }
1798    fn visit_return_call(&mut self, function_index: u32) -> Self::Output {
1799        let ty = self.type_of_function(function_index)?;
1800        self.check_return_call_ty(ty)?;
1801        Ok(())
1802    }
1803    fn visit_call_ref(&mut self, type_index: u32) -> Self::Output {
1804        let ty = self.check_call_ref_ty(type_index)?;
1805        self.check_call_ty(ty)?;
1806        Ok(())
1807    }
1808    fn visit_return_call_ref(&mut self, type_index: u32) -> Self::Output {
1809        let ty = self.check_call_ref_ty(type_index)?;
1810        self.check_return_call_ty(ty)?;
1811        Ok(())
1812    }
1813    fn visit_call_indirect(&mut self, type_index: u32, table_index: u32) -> Self::Output {
1814        let ty = self.check_call_indirect_ty(type_index, table_index)?;
1815        self.check_call_ty(ty)?;
1816        Ok(())
1817    }
1818    fn visit_return_call_indirect(&mut self, type_index: u32, table_index: u32) -> Self::Output {
1819        let ty = self.check_call_indirect_ty(type_index, table_index)?;
1820        self.check_return_call_ty(ty)?;
1821        Ok(())
1822    }
1823    fn visit_drop(&mut self) -> Self::Output {
1824        self.pop_operand(None)?;
1825        Ok(())
1826    }
1827    fn visit_select(&mut self) -> Self::Output {
1828        self.pop_operand(Some(ValType::I32))?;
1829        let ty1 = self.pop_operand(None)?;
1830        let ty2 = self.pop_operand(None)?;
1831
1832        let ty = match (ty1, ty2) {
1833            // All heap-related types aren't allowed with the `select`
1834            // instruction
1835            (MaybeType::UnknownRef(..), _)
1836            | (_, MaybeType::UnknownRef(..))
1837            | (MaybeType::Known(ValType::Ref(_)), _)
1838            | (_, MaybeType::Known(ValType::Ref(_))) => {
1839                bail!(
1840                    self.offset,
1841                    "type mismatch: select only takes integral types"
1842                )
1843            }
1844
1845            // If one operand is the "bottom" type then whatever the other
1846            // operand is is the result of the `select`
1847            (MaybeType::Bottom, t) | (t, MaybeType::Bottom) => t,
1848
1849            // Otherwise these are two integral types and they must match for
1850            // `select` to typecheck.
1851            (t @ MaybeType::Known(t1), MaybeType::Known(t2)) => {
1852                if t1 != t2 {
1853                    bail!(
1854                        self.offset,
1855                        "type mismatch: select operands have different types"
1856                    );
1857                }
1858                t
1859            }
1860        };
1861        self.push_operand(ty)?;
1862        Ok(())
1863    }
1864    fn visit_typed_select(&mut self, mut ty: ValType) -> Self::Output {
1865        self.resources
1866            .check_value_type(&mut ty, &self.features, self.offset)?;
1867        self.pop_operand(Some(ValType::I32))?;
1868        self.pop_operand(Some(ty))?;
1869        self.pop_operand(Some(ty))?;
1870        self.push_operand(ty)?;
1871        Ok(())
1872    }
1873    fn visit_local_get(&mut self, local_index: u32) -> Self::Output {
1874        let ty = self.local(local_index)?;
1875        debug_assert_type_indices_are_ids(ty);
1876        if !self.local_inits[local_index as usize] {
1877            bail!(self.offset, "uninitialized local: {}", local_index);
1878        }
1879        self.push_operand(ty)?;
1880        Ok(())
1881    }
1882    fn visit_local_set(&mut self, local_index: u32) -> Self::Output {
1883        let ty = self.local(local_index)?;
1884        self.pop_operand(Some(ty))?;
1885        if !self.local_inits[local_index as usize] {
1886            self.local_inits[local_index as usize] = true;
1887            self.inits.push(local_index);
1888        }
1889        Ok(())
1890    }
1891    fn visit_local_tee(&mut self, local_index: u32) -> Self::Output {
1892        let expected_ty = self.local(local_index)?;
1893        self.pop_operand(Some(expected_ty))?;
1894        if !self.local_inits[local_index as usize] {
1895            self.local_inits[local_index as usize] = true;
1896            self.inits.push(local_index);
1897        }
1898
1899        self.push_operand(expected_ty)?;
1900        Ok(())
1901    }
1902    fn visit_global_get(&mut self, global_index: u32) -> Self::Output {
1903        let ty = self.global_type_at(global_index)?.content_type;
1904        debug_assert_type_indices_are_ids(ty);
1905        self.push_operand(ty)?;
1906        Ok(())
1907    }
1908    fn visit_global_atomic_get(&mut self, _ordering: Ordering, global_index: u32) -> Self::Output {
1909        self.visit_global_get(global_index)?;
1910        // No validation of `ordering` is needed because `global.atomic.get` can
1911        // be used on both shared and unshared globals. But we do need to limit
1912        // which types can be used with this instruction.
1913        let ty = self.global_type_at(global_index)?.content_type;
1914        let supertype = RefType::ANYREF.into();
1915        if !(ty == ValType::I32 || ty == ValType::I64 || self.resources.is_subtype(ty, supertype)) {
1916            bail!(self.offset, "invalid type: `global.atomic.get` only allows `i32`, `i64` and subtypes of `anyref`");
1917        }
1918        Ok(())
1919    }
1920    fn visit_global_set(&mut self, global_index: u32) -> Self::Output {
1921        let ty = self.global_type_at(global_index)?;
1922        if !ty.mutable {
1923            bail!(
1924                self.offset,
1925                "global is immutable: cannot modify it with `global.set`"
1926            );
1927        }
1928        self.pop_operand(Some(ty.content_type))?;
1929        Ok(())
1930    }
1931    fn visit_global_atomic_set(&mut self, _ordering: Ordering, global_index: u32) -> Self::Output {
1932        self.visit_global_set(global_index)?;
1933        // No validation of `ordering` is needed because `global.atomic.get` can
1934        // be used on both shared and unshared globals.
1935        let ty = self.global_type_at(global_index)?.content_type;
1936        let supertype = RefType::ANYREF.into();
1937        if !(ty == ValType::I32 || ty == ValType::I64 || self.resources.is_subtype(ty, supertype)) {
1938            bail!(self.offset, "invalid type: `global.atomic.set` only allows `i32`, `i64` and subtypes of `anyref`");
1939        }
1940        Ok(())
1941    }
1942    fn visit_global_atomic_rmw_add(
1943        &mut self,
1944        _ordering: crate::Ordering,
1945        global_index: u32,
1946    ) -> Self::Output {
1947        let ty = self.check_atomic_global_rmw_ty(global_index)?;
1948        self.check_unary_op(ty)
1949    }
1950    fn visit_global_atomic_rmw_sub(
1951        &mut self,
1952        _ordering: crate::Ordering,
1953        global_index: u32,
1954    ) -> Self::Output {
1955        let ty = self.check_atomic_global_rmw_ty(global_index)?;
1956        self.check_unary_op(ty)
1957    }
1958    fn visit_global_atomic_rmw_and(
1959        &mut self,
1960        _ordering: crate::Ordering,
1961        global_index: u32,
1962    ) -> Self::Output {
1963        let ty = self.check_atomic_global_rmw_ty(global_index)?;
1964        self.check_unary_op(ty)
1965    }
1966    fn visit_global_atomic_rmw_or(
1967        &mut self,
1968        _ordering: crate::Ordering,
1969        global_index: u32,
1970    ) -> Self::Output {
1971        let ty = self.check_atomic_global_rmw_ty(global_index)?;
1972        self.check_unary_op(ty)
1973    }
1974    fn visit_global_atomic_rmw_xor(
1975        &mut self,
1976        _ordering: crate::Ordering,
1977        global_index: u32,
1978    ) -> Self::Output {
1979        let ty = self.check_atomic_global_rmw_ty(global_index)?;
1980        self.check_unary_op(ty)
1981    }
1982    fn visit_global_atomic_rmw_xchg(
1983        &mut self,
1984        _ordering: crate::Ordering,
1985        global_index: u32,
1986    ) -> Self::Output {
1987        let ty = self.global_type_at(global_index)?.content_type;
1988        if !(ty == ValType::I32
1989            || ty == ValType::I64
1990            || self.resources.is_subtype(ty, RefType::ANYREF.into()))
1991        {
1992            bail!(self.offset, "invalid type: `global.atomic.rmw.xchg` only allows `i32`, `i64` and subtypes of `anyref`");
1993        }
1994        self.check_unary_op(ty)
1995    }
1996    fn visit_global_atomic_rmw_cmpxchg(
1997        &mut self,
1998        _ordering: crate::Ordering,
1999        global_index: u32,
2000    ) -> Self::Output {
2001        let ty = self.global_type_at(global_index)?.content_type;
2002        if !(ty == ValType::I32
2003            || ty == ValType::I64
2004            || self.resources.is_subtype(ty, RefType::EQREF.into()))
2005        {
2006            bail!(self.offset, "invalid type: `global.atomic.rmw.cmpxchg` only allows `i32`, `i64` and subtypes of `eqref`");
2007        }
2008        self.check_binary_op(ty)
2009    }
2010
2011    fn visit_i32_load(&mut self, memarg: MemArg) -> Self::Output {
2012        let ty = self.check_memarg(memarg)?;
2013        self.pop_operand(Some(ty))?;
2014        self.push_operand(ValType::I32)?;
2015        Ok(())
2016    }
2017    fn visit_i64_load(&mut self, memarg: MemArg) -> Self::Output {
2018        let ty = self.check_memarg(memarg)?;
2019        self.pop_operand(Some(ty))?;
2020        self.push_operand(ValType::I64)?;
2021        Ok(())
2022    }
2023    fn visit_f32_load(&mut self, memarg: MemArg) -> Self::Output {
2024        self.check_floats_enabled()?;
2025        let ty = self.check_memarg(memarg)?;
2026        self.pop_operand(Some(ty))?;
2027        self.push_operand(ValType::F32)?;
2028        Ok(())
2029    }
2030    fn visit_f64_load(&mut self, memarg: MemArg) -> Self::Output {
2031        self.check_floats_enabled()?;
2032        let ty = self.check_memarg(memarg)?;
2033        self.pop_operand(Some(ty))?;
2034        self.push_operand(ValType::F64)?;
2035        Ok(())
2036    }
2037    fn visit_i32_load8_s(&mut self, memarg: MemArg) -> Self::Output {
2038        let ty = self.check_memarg(memarg)?;
2039        self.pop_operand(Some(ty))?;
2040        self.push_operand(ValType::I32)?;
2041        Ok(())
2042    }
2043    fn visit_i32_load8_u(&mut self, memarg: MemArg) -> Self::Output {
2044        self.visit_i32_load8_s(memarg)
2045    }
2046    fn visit_i32_load16_s(&mut self, memarg: MemArg) -> Self::Output {
2047        let ty = self.check_memarg(memarg)?;
2048        self.pop_operand(Some(ty))?;
2049        self.push_operand(ValType::I32)?;
2050        Ok(())
2051    }
2052    fn visit_i32_load16_u(&mut self, memarg: MemArg) -> Self::Output {
2053        self.visit_i32_load16_s(memarg)
2054    }
2055    fn visit_i64_load8_s(&mut self, memarg: MemArg) -> Self::Output {
2056        let ty = self.check_memarg(memarg)?;
2057        self.pop_operand(Some(ty))?;
2058        self.push_operand(ValType::I64)?;
2059        Ok(())
2060    }
2061    fn visit_i64_load8_u(&mut self, memarg: MemArg) -> Self::Output {
2062        self.visit_i64_load8_s(memarg)
2063    }
2064    fn visit_i64_load16_s(&mut self, memarg: MemArg) -> Self::Output {
2065        let ty = self.check_memarg(memarg)?;
2066        self.pop_operand(Some(ty))?;
2067        self.push_operand(ValType::I64)?;
2068        Ok(())
2069    }
2070    fn visit_i64_load16_u(&mut self, memarg: MemArg) -> Self::Output {
2071        self.visit_i64_load16_s(memarg)
2072    }
2073    fn visit_i64_load32_s(&mut self, memarg: MemArg) -> Self::Output {
2074        let ty = self.check_memarg(memarg)?;
2075        self.pop_operand(Some(ty))?;
2076        self.push_operand(ValType::I64)?;
2077        Ok(())
2078    }
2079    fn visit_i64_load32_u(&mut self, memarg: MemArg) -> Self::Output {
2080        self.visit_i64_load32_s(memarg)
2081    }
2082    fn visit_i32_store(&mut self, memarg: MemArg) -> Self::Output {
2083        let ty = self.check_memarg(memarg)?;
2084        self.pop_operand(Some(ValType::I32))?;
2085        self.pop_operand(Some(ty))?;
2086        Ok(())
2087    }
2088    fn visit_i64_store(&mut self, memarg: MemArg) -> Self::Output {
2089        let ty = self.check_memarg(memarg)?;
2090        self.pop_operand(Some(ValType::I64))?;
2091        self.pop_operand(Some(ty))?;
2092        Ok(())
2093    }
2094    fn visit_f32_store(&mut self, memarg: MemArg) -> Self::Output {
2095        self.check_floats_enabled()?;
2096        let ty = self.check_memarg(memarg)?;
2097        self.pop_operand(Some(ValType::F32))?;
2098        self.pop_operand(Some(ty))?;
2099        Ok(())
2100    }
2101    fn visit_f64_store(&mut self, memarg: MemArg) -> Self::Output {
2102        self.check_floats_enabled()?;
2103        let ty = self.check_memarg(memarg)?;
2104        self.pop_operand(Some(ValType::F64))?;
2105        self.pop_operand(Some(ty))?;
2106        Ok(())
2107    }
2108    fn visit_i32_store8(&mut self, memarg: MemArg) -> Self::Output {
2109        let ty = self.check_memarg(memarg)?;
2110        self.pop_operand(Some(ValType::I32))?;
2111        self.pop_operand(Some(ty))?;
2112        Ok(())
2113    }
2114    fn visit_i32_store16(&mut self, memarg: MemArg) -> Self::Output {
2115        let ty = self.check_memarg(memarg)?;
2116        self.pop_operand(Some(ValType::I32))?;
2117        self.pop_operand(Some(ty))?;
2118        Ok(())
2119    }
2120    fn visit_i64_store8(&mut self, memarg: MemArg) -> Self::Output {
2121        let ty = self.check_memarg(memarg)?;
2122        self.pop_operand(Some(ValType::I64))?;
2123        self.pop_operand(Some(ty))?;
2124        Ok(())
2125    }
2126    fn visit_i64_store16(&mut self, memarg: MemArg) -> Self::Output {
2127        let ty = self.check_memarg(memarg)?;
2128        self.pop_operand(Some(ValType::I64))?;
2129        self.pop_operand(Some(ty))?;
2130        Ok(())
2131    }
2132    fn visit_i64_store32(&mut self, memarg: MemArg) -> Self::Output {
2133        let ty = self.check_memarg(memarg)?;
2134        self.pop_operand(Some(ValType::I64))?;
2135        self.pop_operand(Some(ty))?;
2136        Ok(())
2137    }
2138    fn visit_memory_size(&mut self, mem: u32) -> Self::Output {
2139        let index_ty = self.check_memory_index(mem)?;
2140        self.push_operand(index_ty)?;
2141        Ok(())
2142    }
2143    fn visit_memory_grow(&mut self, mem: u32) -> Self::Output {
2144        let index_ty = self.check_memory_index(mem)?;
2145        self.pop_operand(Some(index_ty))?;
2146        self.push_operand(index_ty)?;
2147        Ok(())
2148    }
2149    fn visit_i32_const(&mut self, _value: i32) -> Self::Output {
2150        self.push_operand(ValType::I32)?;
2151        Ok(())
2152    }
2153    fn visit_i64_const(&mut self, _value: i64) -> Self::Output {
2154        self.push_operand(ValType::I64)?;
2155        Ok(())
2156    }
2157    fn visit_f32_const(&mut self, _value: Ieee32) -> Self::Output {
2158        self.check_floats_enabled()?;
2159        self.push_operand(ValType::F32)?;
2160        Ok(())
2161    }
2162    fn visit_f64_const(&mut self, _value: Ieee64) -> Self::Output {
2163        self.check_floats_enabled()?;
2164        self.push_operand(ValType::F64)?;
2165        Ok(())
2166    }
2167    fn visit_i32_eqz(&mut self) -> Self::Output {
2168        self.pop_operand(Some(ValType::I32))?;
2169        self.push_operand(ValType::I32)?;
2170        Ok(())
2171    }
2172    fn visit_i32_eq(&mut self) -> Self::Output {
2173        self.check_cmp_op(ValType::I32)
2174    }
2175    fn visit_i32_ne(&mut self) -> Self::Output {
2176        self.check_cmp_op(ValType::I32)
2177    }
2178    fn visit_i32_lt_s(&mut self) -> Self::Output {
2179        self.check_cmp_op(ValType::I32)
2180    }
2181    fn visit_i32_lt_u(&mut self) -> Self::Output {
2182        self.check_cmp_op(ValType::I32)
2183    }
2184    fn visit_i32_gt_s(&mut self) -> Self::Output {
2185        self.check_cmp_op(ValType::I32)
2186    }
2187    fn visit_i32_gt_u(&mut self) -> Self::Output {
2188        self.check_cmp_op(ValType::I32)
2189    }
2190    fn visit_i32_le_s(&mut self) -> Self::Output {
2191        self.check_cmp_op(ValType::I32)
2192    }
2193    fn visit_i32_le_u(&mut self) -> Self::Output {
2194        self.check_cmp_op(ValType::I32)
2195    }
2196    fn visit_i32_ge_s(&mut self) -> Self::Output {
2197        self.check_cmp_op(ValType::I32)
2198    }
2199    fn visit_i32_ge_u(&mut self) -> Self::Output {
2200        self.check_cmp_op(ValType::I32)
2201    }
2202    fn visit_i64_eqz(&mut self) -> Self::Output {
2203        self.pop_operand(Some(ValType::I64))?;
2204        self.push_operand(ValType::I32)?;
2205        Ok(())
2206    }
2207    fn visit_i64_eq(&mut self) -> Self::Output {
2208        self.check_cmp_op(ValType::I64)
2209    }
2210    fn visit_i64_ne(&mut self) -> Self::Output {
2211        self.check_cmp_op(ValType::I64)
2212    }
2213    fn visit_i64_lt_s(&mut self) -> Self::Output {
2214        self.check_cmp_op(ValType::I64)
2215    }
2216    fn visit_i64_lt_u(&mut self) -> Self::Output {
2217        self.check_cmp_op(ValType::I64)
2218    }
2219    fn visit_i64_gt_s(&mut self) -> Self::Output {
2220        self.check_cmp_op(ValType::I64)
2221    }
2222    fn visit_i64_gt_u(&mut self) -> Self::Output {
2223        self.check_cmp_op(ValType::I64)
2224    }
2225    fn visit_i64_le_s(&mut self) -> Self::Output {
2226        self.check_cmp_op(ValType::I64)
2227    }
2228    fn visit_i64_le_u(&mut self) -> Self::Output {
2229        self.check_cmp_op(ValType::I64)
2230    }
2231    fn visit_i64_ge_s(&mut self) -> Self::Output {
2232        self.check_cmp_op(ValType::I64)
2233    }
2234    fn visit_i64_ge_u(&mut self) -> Self::Output {
2235        self.check_cmp_op(ValType::I64)
2236    }
2237    fn visit_f32_eq(&mut self) -> Self::Output {
2238        self.check_fcmp_op(ValType::F32)
2239    }
2240    fn visit_f32_ne(&mut self) -> Self::Output {
2241        self.check_fcmp_op(ValType::F32)
2242    }
2243    fn visit_f32_lt(&mut self) -> Self::Output {
2244        self.check_fcmp_op(ValType::F32)
2245    }
2246    fn visit_f32_gt(&mut self) -> Self::Output {
2247        self.check_fcmp_op(ValType::F32)
2248    }
2249    fn visit_f32_le(&mut self) -> Self::Output {
2250        self.check_fcmp_op(ValType::F32)
2251    }
2252    fn visit_f32_ge(&mut self) -> Self::Output {
2253        self.check_fcmp_op(ValType::F32)
2254    }
2255    fn visit_f64_eq(&mut self) -> Self::Output {
2256        self.check_fcmp_op(ValType::F64)
2257    }
2258    fn visit_f64_ne(&mut self) -> Self::Output {
2259        self.check_fcmp_op(ValType::F64)
2260    }
2261    fn visit_f64_lt(&mut self) -> Self::Output {
2262        self.check_fcmp_op(ValType::F64)
2263    }
2264    fn visit_f64_gt(&mut self) -> Self::Output {
2265        self.check_fcmp_op(ValType::F64)
2266    }
2267    fn visit_f64_le(&mut self) -> Self::Output {
2268        self.check_fcmp_op(ValType::F64)
2269    }
2270    fn visit_f64_ge(&mut self) -> Self::Output {
2271        self.check_fcmp_op(ValType::F64)
2272    }
2273    fn visit_i32_clz(&mut self) -> Self::Output {
2274        self.check_unary_op(ValType::I32)
2275    }
2276    fn visit_i32_ctz(&mut self) -> Self::Output {
2277        self.check_unary_op(ValType::I32)
2278    }
2279    fn visit_i32_popcnt(&mut self) -> Self::Output {
2280        self.check_unary_op(ValType::I32)
2281    }
2282    fn visit_i32_add(&mut self) -> Self::Output {
2283        self.check_binary_op(ValType::I32)
2284    }
2285    fn visit_i32_sub(&mut self) -> Self::Output {
2286        self.check_binary_op(ValType::I32)
2287    }
2288    fn visit_i32_mul(&mut self) -> Self::Output {
2289        self.check_binary_op(ValType::I32)
2290    }
2291    fn visit_i32_div_s(&mut self) -> Self::Output {
2292        self.check_binary_op(ValType::I32)
2293    }
2294    fn visit_i32_div_u(&mut self) -> Self::Output {
2295        self.check_binary_op(ValType::I32)
2296    }
2297    fn visit_i32_rem_s(&mut self) -> Self::Output {
2298        self.check_binary_op(ValType::I32)
2299    }
2300    fn visit_i32_rem_u(&mut self) -> Self::Output {
2301        self.check_binary_op(ValType::I32)
2302    }
2303    fn visit_i32_and(&mut self) -> Self::Output {
2304        self.check_binary_op(ValType::I32)
2305    }
2306    fn visit_i32_or(&mut self) -> Self::Output {
2307        self.check_binary_op(ValType::I32)
2308    }
2309    fn visit_i32_xor(&mut self) -> Self::Output {
2310        self.check_binary_op(ValType::I32)
2311    }
2312    fn visit_i32_shl(&mut self) -> Self::Output {
2313        self.check_binary_op(ValType::I32)
2314    }
2315    fn visit_i32_shr_s(&mut self) -> Self::Output {
2316        self.check_binary_op(ValType::I32)
2317    }
2318    fn visit_i32_shr_u(&mut self) -> Self::Output {
2319        self.check_binary_op(ValType::I32)
2320    }
2321    fn visit_i32_rotl(&mut self) -> Self::Output {
2322        self.check_binary_op(ValType::I32)
2323    }
2324    fn visit_i32_rotr(&mut self) -> Self::Output {
2325        self.check_binary_op(ValType::I32)
2326    }
2327    fn visit_i64_clz(&mut self) -> Self::Output {
2328        self.check_unary_op(ValType::I64)
2329    }
2330    fn visit_i64_ctz(&mut self) -> Self::Output {
2331        self.check_unary_op(ValType::I64)
2332    }
2333    fn visit_i64_popcnt(&mut self) -> Self::Output {
2334        self.check_unary_op(ValType::I64)
2335    }
2336    fn visit_i64_add(&mut self) -> Self::Output {
2337        self.check_binary_op(ValType::I64)
2338    }
2339    fn visit_i64_sub(&mut self) -> Self::Output {
2340        self.check_binary_op(ValType::I64)
2341    }
2342    fn visit_i64_mul(&mut self) -> Self::Output {
2343        self.check_binary_op(ValType::I64)
2344    }
2345    fn visit_i64_div_s(&mut self) -> Self::Output {
2346        self.check_binary_op(ValType::I64)
2347    }
2348    fn visit_i64_div_u(&mut self) -> Self::Output {
2349        self.check_binary_op(ValType::I64)
2350    }
2351    fn visit_i64_rem_s(&mut self) -> Self::Output {
2352        self.check_binary_op(ValType::I64)
2353    }
2354    fn visit_i64_rem_u(&mut self) -> Self::Output {
2355        self.check_binary_op(ValType::I64)
2356    }
2357    fn visit_i64_and(&mut self) -> Self::Output {
2358        self.check_binary_op(ValType::I64)
2359    }
2360    fn visit_i64_or(&mut self) -> Self::Output {
2361        self.check_binary_op(ValType::I64)
2362    }
2363    fn visit_i64_xor(&mut self) -> Self::Output {
2364        self.check_binary_op(ValType::I64)
2365    }
2366    fn visit_i64_shl(&mut self) -> Self::Output {
2367        self.check_binary_op(ValType::I64)
2368    }
2369    fn visit_i64_shr_s(&mut self) -> Self::Output {
2370        self.check_binary_op(ValType::I64)
2371    }
2372    fn visit_i64_shr_u(&mut self) -> Self::Output {
2373        self.check_binary_op(ValType::I64)
2374    }
2375    fn visit_i64_rotl(&mut self) -> Self::Output {
2376        self.check_binary_op(ValType::I64)
2377    }
2378    fn visit_i64_rotr(&mut self) -> Self::Output {
2379        self.check_binary_op(ValType::I64)
2380    }
2381    fn visit_f32_abs(&mut self) -> Self::Output {
2382        self.check_funary_op(ValType::F32)
2383    }
2384    fn visit_f32_neg(&mut self) -> Self::Output {
2385        self.check_funary_op(ValType::F32)
2386    }
2387    fn visit_f32_ceil(&mut self) -> Self::Output {
2388        self.check_funary_op(ValType::F32)
2389    }
2390    fn visit_f32_floor(&mut self) -> Self::Output {
2391        self.check_funary_op(ValType::F32)
2392    }
2393    fn visit_f32_trunc(&mut self) -> Self::Output {
2394        self.check_funary_op(ValType::F32)
2395    }
2396    fn visit_f32_nearest(&mut self) -> Self::Output {
2397        self.check_funary_op(ValType::F32)
2398    }
2399    fn visit_f32_sqrt(&mut self) -> Self::Output {
2400        self.check_funary_op(ValType::F32)
2401    }
2402    fn visit_f32_add(&mut self) -> Self::Output {
2403        self.check_fbinary_op(ValType::F32)
2404    }
2405    fn visit_f32_sub(&mut self) -> Self::Output {
2406        self.check_fbinary_op(ValType::F32)
2407    }
2408    fn visit_f32_mul(&mut self) -> Self::Output {
2409        self.check_fbinary_op(ValType::F32)
2410    }
2411    fn visit_f32_div(&mut self) -> Self::Output {
2412        self.check_fbinary_op(ValType::F32)
2413    }
2414    fn visit_f32_min(&mut self) -> Self::Output {
2415        self.check_fbinary_op(ValType::F32)
2416    }
2417    fn visit_f32_max(&mut self) -> Self::Output {
2418        self.check_fbinary_op(ValType::F32)
2419    }
2420    fn visit_f32_copysign(&mut self) -> Self::Output {
2421        self.check_fbinary_op(ValType::F32)
2422    }
2423    fn visit_f64_abs(&mut self) -> Self::Output {
2424        self.check_funary_op(ValType::F64)
2425    }
2426    fn visit_f64_neg(&mut self) -> Self::Output {
2427        self.check_funary_op(ValType::F64)
2428    }
2429    fn visit_f64_ceil(&mut self) -> Self::Output {
2430        self.check_funary_op(ValType::F64)
2431    }
2432    fn visit_f64_floor(&mut self) -> Self::Output {
2433        self.check_funary_op(ValType::F64)
2434    }
2435    fn visit_f64_trunc(&mut self) -> Self::Output {
2436        self.check_funary_op(ValType::F64)
2437    }
2438    fn visit_f64_nearest(&mut self) -> Self::Output {
2439        self.check_funary_op(ValType::F64)
2440    }
2441    fn visit_f64_sqrt(&mut self) -> Self::Output {
2442        self.check_funary_op(ValType::F64)
2443    }
2444    fn visit_f64_add(&mut self) -> Self::Output {
2445        self.check_fbinary_op(ValType::F64)
2446    }
2447    fn visit_f64_sub(&mut self) -> Self::Output {
2448        self.check_fbinary_op(ValType::F64)
2449    }
2450    fn visit_f64_mul(&mut self) -> Self::Output {
2451        self.check_fbinary_op(ValType::F64)
2452    }
2453    fn visit_f64_div(&mut self) -> Self::Output {
2454        self.check_fbinary_op(ValType::F64)
2455    }
2456    fn visit_f64_min(&mut self) -> Self::Output {
2457        self.check_fbinary_op(ValType::F64)
2458    }
2459    fn visit_f64_max(&mut self) -> Self::Output {
2460        self.check_fbinary_op(ValType::F64)
2461    }
2462    fn visit_f64_copysign(&mut self) -> Self::Output {
2463        self.check_fbinary_op(ValType::F64)
2464    }
2465    fn visit_i32_wrap_i64(&mut self) -> Self::Output {
2466        self.check_conversion_op(ValType::I32, ValType::I64)
2467    }
2468    fn visit_i32_trunc_f32_s(&mut self) -> Self::Output {
2469        self.check_conversion_op(ValType::I32, ValType::F32)
2470    }
2471    fn visit_i32_trunc_f32_u(&mut self) -> Self::Output {
2472        self.check_conversion_op(ValType::I32, ValType::F32)
2473    }
2474    fn visit_i32_trunc_f64_s(&mut self) -> Self::Output {
2475        self.check_conversion_op(ValType::I32, ValType::F64)
2476    }
2477    fn visit_i32_trunc_f64_u(&mut self) -> Self::Output {
2478        self.check_conversion_op(ValType::I32, ValType::F64)
2479    }
2480    fn visit_i64_extend_i32_s(&mut self) -> Self::Output {
2481        self.check_conversion_op(ValType::I64, ValType::I32)
2482    }
2483    fn visit_i64_extend_i32_u(&mut self) -> Self::Output {
2484        self.check_conversion_op(ValType::I64, ValType::I32)
2485    }
2486    fn visit_i64_trunc_f32_s(&mut self) -> Self::Output {
2487        self.check_conversion_op(ValType::I64, ValType::F32)
2488    }
2489    fn visit_i64_trunc_f32_u(&mut self) -> Self::Output {
2490        self.check_conversion_op(ValType::I64, ValType::F32)
2491    }
2492    fn visit_i64_trunc_f64_s(&mut self) -> Self::Output {
2493        self.check_conversion_op(ValType::I64, ValType::F64)
2494    }
2495    fn visit_i64_trunc_f64_u(&mut self) -> Self::Output {
2496        self.check_conversion_op(ValType::I64, ValType::F64)
2497    }
2498    fn visit_f32_convert_i32_s(&mut self) -> Self::Output {
2499        self.check_fconversion_op(ValType::F32, ValType::I32)
2500    }
2501    fn visit_f32_convert_i32_u(&mut self) -> Self::Output {
2502        self.check_fconversion_op(ValType::F32, ValType::I32)
2503    }
2504    fn visit_f32_convert_i64_s(&mut self) -> Self::Output {
2505        self.check_fconversion_op(ValType::F32, ValType::I64)
2506    }
2507    fn visit_f32_convert_i64_u(&mut self) -> Self::Output {
2508        self.check_fconversion_op(ValType::F32, ValType::I64)
2509    }
2510    fn visit_f32_demote_f64(&mut self) -> Self::Output {
2511        self.check_fconversion_op(ValType::F32, ValType::F64)
2512    }
2513    fn visit_f64_convert_i32_s(&mut self) -> Self::Output {
2514        self.check_fconversion_op(ValType::F64, ValType::I32)
2515    }
2516    fn visit_f64_convert_i32_u(&mut self) -> Self::Output {
2517        self.check_fconversion_op(ValType::F64, ValType::I32)
2518    }
2519    fn visit_f64_convert_i64_s(&mut self) -> Self::Output {
2520        self.check_fconversion_op(ValType::F64, ValType::I64)
2521    }
2522    fn visit_f64_convert_i64_u(&mut self) -> Self::Output {
2523        self.check_fconversion_op(ValType::F64, ValType::I64)
2524    }
2525    fn visit_f64_promote_f32(&mut self) -> Self::Output {
2526        self.check_fconversion_op(ValType::F64, ValType::F32)
2527    }
2528    fn visit_i32_reinterpret_f32(&mut self) -> Self::Output {
2529        self.check_conversion_op(ValType::I32, ValType::F32)
2530    }
2531    fn visit_i64_reinterpret_f64(&mut self) -> Self::Output {
2532        self.check_conversion_op(ValType::I64, ValType::F64)
2533    }
2534    fn visit_f32_reinterpret_i32(&mut self) -> Self::Output {
2535        self.check_fconversion_op(ValType::F32, ValType::I32)
2536    }
2537    fn visit_f64_reinterpret_i64(&mut self) -> Self::Output {
2538        self.check_fconversion_op(ValType::F64, ValType::I64)
2539    }
2540    fn visit_i32_trunc_sat_f32_s(&mut self) -> Self::Output {
2541        self.check_conversion_op(ValType::I32, ValType::F32)
2542    }
2543    fn visit_i32_trunc_sat_f32_u(&mut self) -> Self::Output {
2544        self.check_conversion_op(ValType::I32, ValType::F32)
2545    }
2546    fn visit_i32_trunc_sat_f64_s(&mut self) -> Self::Output {
2547        self.check_conversion_op(ValType::I32, ValType::F64)
2548    }
2549    fn visit_i32_trunc_sat_f64_u(&mut self) -> Self::Output {
2550        self.check_conversion_op(ValType::I32, ValType::F64)
2551    }
2552    fn visit_i64_trunc_sat_f32_s(&mut self) -> Self::Output {
2553        self.check_conversion_op(ValType::I64, ValType::F32)
2554    }
2555    fn visit_i64_trunc_sat_f32_u(&mut self) -> Self::Output {
2556        self.check_conversion_op(ValType::I64, ValType::F32)
2557    }
2558    fn visit_i64_trunc_sat_f64_s(&mut self) -> Self::Output {
2559        self.check_conversion_op(ValType::I64, ValType::F64)
2560    }
2561    fn visit_i64_trunc_sat_f64_u(&mut self) -> Self::Output {
2562        self.check_conversion_op(ValType::I64, ValType::F64)
2563    }
2564    fn visit_i32_extend8_s(&mut self) -> Self::Output {
2565        self.check_unary_op(ValType::I32)
2566    }
2567    fn visit_i32_extend16_s(&mut self) -> Self::Output {
2568        self.check_unary_op(ValType::I32)
2569    }
2570    fn visit_i64_extend8_s(&mut self) -> Self::Output {
2571        self.check_unary_op(ValType::I64)
2572    }
2573    fn visit_i64_extend16_s(&mut self) -> Self::Output {
2574        self.check_unary_op(ValType::I64)
2575    }
2576    fn visit_i64_extend32_s(&mut self) -> Self::Output {
2577        self.check_unary_op(ValType::I64)
2578    }
2579    fn visit_i32_atomic_load(&mut self, memarg: MemArg) -> Self::Output {
2580        self.check_atomic_load(memarg, ValType::I32)
2581    }
2582    fn visit_i32_atomic_load16_u(&mut self, memarg: MemArg) -> Self::Output {
2583        self.check_atomic_load(memarg, ValType::I32)
2584    }
2585    fn visit_i32_atomic_load8_u(&mut self, memarg: MemArg) -> Self::Output {
2586        self.check_atomic_load(memarg, ValType::I32)
2587    }
2588    fn visit_i64_atomic_load(&mut self, memarg: MemArg) -> Self::Output {
2589        self.check_atomic_load(memarg, ValType::I64)
2590    }
2591    fn visit_i64_atomic_load32_u(&mut self, memarg: MemArg) -> Self::Output {
2592        self.check_atomic_load(memarg, ValType::I64)
2593    }
2594    fn visit_i64_atomic_load16_u(&mut self, memarg: MemArg) -> Self::Output {
2595        self.check_atomic_load(memarg, ValType::I64)
2596    }
2597    fn visit_i64_atomic_load8_u(&mut self, memarg: MemArg) -> Self::Output {
2598        self.check_atomic_load(memarg, ValType::I64)
2599    }
2600    fn visit_i32_atomic_store(&mut self, memarg: MemArg) -> Self::Output {
2601        self.check_atomic_store(memarg, ValType::I32)
2602    }
2603    fn visit_i32_atomic_store16(&mut self, memarg: MemArg) -> Self::Output {
2604        self.check_atomic_store(memarg, ValType::I32)
2605    }
2606    fn visit_i32_atomic_store8(&mut self, memarg: MemArg) -> Self::Output {
2607        self.check_atomic_store(memarg, ValType::I32)
2608    }
2609    fn visit_i64_atomic_store(&mut self, memarg: MemArg) -> Self::Output {
2610        self.check_atomic_store(memarg, ValType::I64)
2611    }
2612    fn visit_i64_atomic_store32(&mut self, memarg: MemArg) -> Self::Output {
2613        self.check_atomic_store(memarg, ValType::I64)
2614    }
2615    fn visit_i64_atomic_store16(&mut self, memarg: MemArg) -> Self::Output {
2616        self.check_atomic_store(memarg, ValType::I64)
2617    }
2618    fn visit_i64_atomic_store8(&mut self, memarg: MemArg) -> Self::Output {
2619        self.check_atomic_store(memarg, ValType::I64)
2620    }
2621    fn visit_i32_atomic_rmw_add(&mut self, memarg: MemArg) -> Self::Output {
2622        self.check_atomic_binary_memory_op(memarg, ValType::I32)
2623    }
2624    fn visit_i32_atomic_rmw_sub(&mut self, memarg: MemArg) -> Self::Output {
2625        self.check_atomic_binary_memory_op(memarg, ValType::I32)
2626    }
2627    fn visit_i32_atomic_rmw_and(&mut self, memarg: MemArg) -> Self::Output {
2628        self.check_atomic_binary_memory_op(memarg, ValType::I32)
2629    }
2630    fn visit_i32_atomic_rmw_or(&mut self, memarg: MemArg) -> Self::Output {
2631        self.check_atomic_binary_memory_op(memarg, ValType::I32)
2632    }
2633    fn visit_i32_atomic_rmw_xor(&mut self, memarg: MemArg) -> Self::Output {
2634        self.check_atomic_binary_memory_op(memarg, ValType::I32)
2635    }
2636    fn visit_i32_atomic_rmw16_add_u(&mut self, memarg: MemArg) -> Self::Output {
2637        self.check_atomic_binary_memory_op(memarg, ValType::I32)
2638    }
2639    fn visit_i32_atomic_rmw16_sub_u(&mut self, memarg: MemArg) -> Self::Output {
2640        self.check_atomic_binary_memory_op(memarg, ValType::I32)
2641    }
2642    fn visit_i32_atomic_rmw16_and_u(&mut self, memarg: MemArg) -> Self::Output {
2643        self.check_atomic_binary_memory_op(memarg, ValType::I32)
2644    }
2645    fn visit_i32_atomic_rmw16_or_u(&mut self, memarg: MemArg) -> Self::Output {
2646        self.check_atomic_binary_memory_op(memarg, ValType::I32)
2647    }
2648    fn visit_i32_atomic_rmw16_xor_u(&mut self, memarg: MemArg) -> Self::Output {
2649        self.check_atomic_binary_memory_op(memarg, ValType::I32)
2650    }
2651    fn visit_i32_atomic_rmw8_add_u(&mut self, memarg: MemArg) -> Self::Output {
2652        self.check_atomic_binary_memory_op(memarg, ValType::I32)
2653    }
2654    fn visit_i32_atomic_rmw8_sub_u(&mut self, memarg: MemArg) -> Self::Output {
2655        self.check_atomic_binary_memory_op(memarg, ValType::I32)
2656    }
2657    fn visit_i32_atomic_rmw8_and_u(&mut self, memarg: MemArg) -> Self::Output {
2658        self.check_atomic_binary_memory_op(memarg, ValType::I32)
2659    }
2660    fn visit_i32_atomic_rmw8_or_u(&mut self, memarg: MemArg) -> Self::Output {
2661        self.check_atomic_binary_memory_op(memarg, ValType::I32)
2662    }
2663    fn visit_i32_atomic_rmw8_xor_u(&mut self, memarg: MemArg) -> Self::Output {
2664        self.check_atomic_binary_memory_op(memarg, ValType::I32)
2665    }
2666    fn visit_i64_atomic_rmw_add(&mut self, memarg: MemArg) -> Self::Output {
2667        self.check_atomic_binary_memory_op(memarg, ValType::I64)
2668    }
2669    fn visit_i64_atomic_rmw_sub(&mut self, memarg: MemArg) -> Self::Output {
2670        self.check_atomic_binary_memory_op(memarg, ValType::I64)
2671    }
2672    fn visit_i64_atomic_rmw_and(&mut self, memarg: MemArg) -> Self::Output {
2673        self.check_atomic_binary_memory_op(memarg, ValType::I64)
2674    }
2675    fn visit_i64_atomic_rmw_or(&mut self, memarg: MemArg) -> Self::Output {
2676        self.check_atomic_binary_memory_op(memarg, ValType::I64)
2677    }
2678    fn visit_i64_atomic_rmw_xor(&mut self, memarg: MemArg) -> Self::Output {
2679        self.check_atomic_binary_memory_op(memarg, ValType::I64)
2680    }
2681    fn visit_i64_atomic_rmw32_add_u(&mut self, memarg: MemArg) -> Self::Output {
2682        self.check_atomic_binary_memory_op(memarg, ValType::I64)
2683    }
2684    fn visit_i64_atomic_rmw32_sub_u(&mut self, memarg: MemArg) -> Self::Output {
2685        self.check_atomic_binary_memory_op(memarg, ValType::I64)
2686    }
2687    fn visit_i64_atomic_rmw32_and_u(&mut self, memarg: MemArg) -> Self::Output {
2688        self.check_atomic_binary_memory_op(memarg, ValType::I64)
2689    }
2690    fn visit_i64_atomic_rmw32_or_u(&mut self, memarg: MemArg) -> Self::Output {
2691        self.check_atomic_binary_memory_op(memarg, ValType::I64)
2692    }
2693    fn visit_i64_atomic_rmw32_xor_u(&mut self, memarg: MemArg) -> Self::Output {
2694        self.check_atomic_binary_memory_op(memarg, ValType::I64)
2695    }
2696    fn visit_i64_atomic_rmw16_add_u(&mut self, memarg: MemArg) -> Self::Output {
2697        self.check_atomic_binary_memory_op(memarg, ValType::I64)
2698    }
2699    fn visit_i64_atomic_rmw16_sub_u(&mut self, memarg: MemArg) -> Self::Output {
2700        self.check_atomic_binary_memory_op(memarg, ValType::I64)
2701    }
2702    fn visit_i64_atomic_rmw16_and_u(&mut self, memarg: MemArg) -> Self::Output {
2703        self.check_atomic_binary_memory_op(memarg, ValType::I64)
2704    }
2705    fn visit_i64_atomic_rmw16_or_u(&mut self, memarg: MemArg) -> Self::Output {
2706        self.check_atomic_binary_memory_op(memarg, ValType::I64)
2707    }
2708    fn visit_i64_atomic_rmw16_xor_u(&mut self, memarg: MemArg) -> Self::Output {
2709        self.check_atomic_binary_memory_op(memarg, ValType::I64)
2710    }
2711    fn visit_i64_atomic_rmw8_add_u(&mut self, memarg: MemArg) -> Self::Output {
2712        self.check_atomic_binary_memory_op(memarg, ValType::I64)
2713    }
2714    fn visit_i64_atomic_rmw8_sub_u(&mut self, memarg: MemArg) -> Self::Output {
2715        self.check_atomic_binary_memory_op(memarg, ValType::I64)
2716    }
2717    fn visit_i64_atomic_rmw8_and_u(&mut self, memarg: MemArg) -> Self::Output {
2718        self.check_atomic_binary_memory_op(memarg, ValType::I64)
2719    }
2720    fn visit_i64_atomic_rmw8_or_u(&mut self, memarg: MemArg) -> Self::Output {
2721        self.check_atomic_binary_memory_op(memarg, ValType::I64)
2722    }
2723    fn visit_i64_atomic_rmw8_xor_u(&mut self, memarg: MemArg) -> Self::Output {
2724        self.check_atomic_binary_memory_op(memarg, ValType::I64)
2725    }
2726    fn visit_i32_atomic_rmw_xchg(&mut self, memarg: MemArg) -> Self::Output {
2727        self.check_atomic_binary_memory_op(memarg, ValType::I32)
2728    }
2729    fn visit_i32_atomic_rmw16_xchg_u(&mut self, memarg: MemArg) -> Self::Output {
2730        self.check_atomic_binary_memory_op(memarg, ValType::I32)
2731    }
2732    fn visit_i32_atomic_rmw8_xchg_u(&mut self, memarg: MemArg) -> Self::Output {
2733        self.check_atomic_binary_memory_op(memarg, ValType::I32)
2734    }
2735    fn visit_i32_atomic_rmw_cmpxchg(&mut self, memarg: MemArg) -> Self::Output {
2736        self.check_atomic_binary_memory_cmpxchg(memarg, ValType::I32)
2737    }
2738    fn visit_i32_atomic_rmw16_cmpxchg_u(&mut self, memarg: MemArg) -> Self::Output {
2739        self.check_atomic_binary_memory_cmpxchg(memarg, ValType::I32)
2740    }
2741    fn visit_i32_atomic_rmw8_cmpxchg_u(&mut self, memarg: MemArg) -> Self::Output {
2742        self.check_atomic_binary_memory_cmpxchg(memarg, ValType::I32)
2743    }
2744    fn visit_i64_atomic_rmw_xchg(&mut self, memarg: MemArg) -> Self::Output {
2745        self.check_atomic_binary_memory_op(memarg, ValType::I64)
2746    }
2747    fn visit_i64_atomic_rmw32_xchg_u(&mut self, memarg: MemArg) -> Self::Output {
2748        self.check_atomic_binary_memory_op(memarg, ValType::I64)
2749    }
2750    fn visit_i64_atomic_rmw16_xchg_u(&mut self, memarg: MemArg) -> Self::Output {
2751        self.check_atomic_binary_memory_op(memarg, ValType::I64)
2752    }
2753    fn visit_i64_atomic_rmw8_xchg_u(&mut self, memarg: MemArg) -> Self::Output {
2754        self.check_atomic_binary_memory_op(memarg, ValType::I64)
2755    }
2756    fn visit_i64_atomic_rmw_cmpxchg(&mut self, memarg: MemArg) -> Self::Output {
2757        self.check_atomic_binary_memory_cmpxchg(memarg, ValType::I64)
2758    }
2759    fn visit_i64_atomic_rmw32_cmpxchg_u(&mut self, memarg: MemArg) -> Self::Output {
2760        self.check_atomic_binary_memory_cmpxchg(memarg, ValType::I64)
2761    }
2762    fn visit_i64_atomic_rmw16_cmpxchg_u(&mut self, memarg: MemArg) -> Self::Output {
2763        self.check_atomic_binary_memory_cmpxchg(memarg, ValType::I64)
2764    }
2765    fn visit_i64_atomic_rmw8_cmpxchg_u(&mut self, memarg: MemArg) -> Self::Output {
2766        self.check_atomic_binary_memory_cmpxchg(memarg, ValType::I64)
2767    }
2768    fn visit_memory_atomic_notify(&mut self, memarg: MemArg) -> Self::Output {
2769        self.check_atomic_binary_memory_op(memarg, ValType::I32)
2770    }
2771    fn visit_memory_atomic_wait32(&mut self, memarg: MemArg) -> Self::Output {
2772        let ty = self.check_shared_memarg(memarg)?;
2773        self.pop_operand(Some(ValType::I64))?;
2774        self.pop_operand(Some(ValType::I32))?;
2775        self.pop_operand(Some(ty))?;
2776        self.push_operand(ValType::I32)?;
2777        Ok(())
2778    }
2779    fn visit_memory_atomic_wait64(&mut self, memarg: MemArg) -> Self::Output {
2780        let ty = self.check_shared_memarg(memarg)?;
2781        self.pop_operand(Some(ValType::I64))?;
2782        self.pop_operand(Some(ValType::I64))?;
2783        self.pop_operand(Some(ty))?;
2784        self.push_operand(ValType::I32)?;
2785        Ok(())
2786    }
2787    fn visit_atomic_fence(&mut self) -> Self::Output {
2788        Ok(())
2789    }
2790    fn visit_ref_null(&mut self, mut heap_type: HeapType) -> Self::Output {
2791        if let Some(ty) = RefType::new(true, heap_type) {
2792            self.features
2793                .check_ref_type(ty)
2794                .map_err(|e| BinaryReaderError::new(e, self.offset))?;
2795        }
2796        self.resources
2797            .check_heap_type(&mut heap_type, self.offset)?;
2798        let ty = ValType::Ref(
2799            RefType::new(true, heap_type).expect("existing heap types should be within our limits"),
2800        );
2801        self.push_operand(ty)?;
2802        Ok(())
2803    }
2804
2805    fn visit_ref_as_non_null(&mut self) -> Self::Output {
2806        let ty = self.pop_ref(None)?.as_non_null();
2807        self.push_operand(ty)?;
2808        Ok(())
2809    }
2810    fn visit_br_on_null(&mut self, relative_depth: u32) -> Self::Output {
2811        let ref_ty = self.pop_ref(None)?.as_non_null();
2812        let (ft, kind) = self.jump(relative_depth)?;
2813        let label_types = self.label_types(ft, kind)?;
2814        self.pop_push_label_types(label_types)?;
2815        self.push_operand(ref_ty)?;
2816        Ok(())
2817    }
2818    fn visit_br_on_non_null(&mut self, relative_depth: u32) -> Self::Output {
2819        let (ft, kind) = self.jump(relative_depth)?;
2820
2821        let mut label_types = self.label_types(ft, kind)?;
2822        let expected = match label_types.next_back() {
2823            None => bail!(
2824                self.offset,
2825                "type mismatch: br_on_non_null target has no label types",
2826            ),
2827            Some(ValType::Ref(ty)) => ty,
2828            Some(_) => bail!(
2829                self.offset,
2830                "type mismatch: br_on_non_null target does not end with heap type",
2831            ),
2832        };
2833        self.pop_ref(Some(expected.nullable()))?;
2834
2835        self.pop_push_label_types(label_types)?;
2836        Ok(())
2837    }
2838    fn visit_ref_is_null(&mut self) -> Self::Output {
2839        self.pop_ref(None)?;
2840        self.push_operand(ValType::I32)?;
2841        Ok(())
2842    }
2843    fn visit_ref_func(&mut self, function_index: u32) -> Self::Output {
2844        let type_id = match self.resources.type_id_of_function(function_index) {
2845            Some(id) => id,
2846            None => bail!(
2847                self.offset,
2848                "unknown function {}: function index out of bounds",
2849                function_index,
2850            ),
2851        };
2852        if !self.resources.is_function_referenced(function_index) {
2853            bail!(self.offset, "undeclared function reference");
2854        }
2855
2856        let index = UnpackedIndex::Id(type_id);
2857        let ty = ValType::Ref(
2858            RefType::new(false, HeapType::Concrete(index)).ok_or_else(|| {
2859                BinaryReaderError::new("implementation limit: type index too large", self.offset)
2860            })?,
2861        );
2862        self.push_operand(ty)?;
2863        Ok(())
2864    }
2865    fn visit_ref_eq(&mut self) -> Self::Output {
2866        let a = self.pop_maybe_shared_ref(AbstractHeapType::Eq)?;
2867        let b = self.pop_maybe_shared_ref(AbstractHeapType::Eq)?;
2868        let a_is_shared = a.is_maybe_shared(&self.resources);
2869        let b_is_shared = b.is_maybe_shared(&self.resources);
2870        match (a_is_shared, b_is_shared) {
2871            // One or both of the types are from unreachable code; assume
2872            // the shared-ness matches.
2873            (None, Some(_)) | (Some(_), None) | (None, None) => {}
2874
2875            (Some(is_a_shared), Some(is_b_shared)) => {
2876                if is_a_shared != is_b_shared {
2877                    bail!(
2878                        self.offset,
2879                        "type mismatch: expected `ref.eq` types to match `shared`-ness"
2880                    );
2881                }
2882            }
2883        }
2884        self.push_operand(ValType::I32)
2885    }
2886    fn visit_v128_load(&mut self, memarg: MemArg) -> Self::Output {
2887        let ty = self.check_memarg(memarg)?;
2888        self.pop_operand(Some(ty))?;
2889        self.push_operand(ValType::V128)?;
2890        Ok(())
2891    }
2892    fn visit_v128_store(&mut self, memarg: MemArg) -> Self::Output {
2893        let ty = self.check_memarg(memarg)?;
2894        self.pop_operand(Some(ValType::V128))?;
2895        self.pop_operand(Some(ty))?;
2896        Ok(())
2897    }
2898    fn visit_v128_const(&mut self, _value: V128) -> Self::Output {
2899        self.push_operand(ValType::V128)?;
2900        Ok(())
2901    }
2902    fn visit_i8x16_splat(&mut self) -> Self::Output {
2903        self.check_v128_splat(ValType::I32)
2904    }
2905    fn visit_i16x8_splat(&mut self) -> Self::Output {
2906        self.check_v128_splat(ValType::I32)
2907    }
2908    fn visit_i32x4_splat(&mut self) -> Self::Output {
2909        self.check_v128_splat(ValType::I32)
2910    }
2911    fn visit_i64x2_splat(&mut self) -> Self::Output {
2912        self.check_v128_splat(ValType::I64)
2913    }
2914    fn visit_f32x4_splat(&mut self) -> Self::Output {
2915        self.check_floats_enabled()?;
2916        self.check_v128_splat(ValType::F32)
2917    }
2918    fn visit_f64x2_splat(&mut self) -> Self::Output {
2919        self.check_floats_enabled()?;
2920        self.check_v128_splat(ValType::F64)
2921    }
2922    fn visit_i8x16_extract_lane_s(&mut self, lane: u8) -> Self::Output {
2923        self.check_simd_lane_index(lane, 16)?;
2924        self.pop_operand(Some(ValType::V128))?;
2925        self.push_operand(ValType::I32)?;
2926        Ok(())
2927    }
2928    fn visit_i8x16_extract_lane_u(&mut self, lane: u8) -> Self::Output {
2929        self.visit_i8x16_extract_lane_s(lane)
2930    }
2931    fn visit_i16x8_extract_lane_s(&mut self, lane: u8) -> Self::Output {
2932        self.check_simd_lane_index(lane, 8)?;
2933        self.pop_operand(Some(ValType::V128))?;
2934        self.push_operand(ValType::I32)?;
2935        Ok(())
2936    }
2937    fn visit_i16x8_extract_lane_u(&mut self, lane: u8) -> Self::Output {
2938        self.visit_i16x8_extract_lane_s(lane)
2939    }
2940    fn visit_i32x4_extract_lane(&mut self, lane: u8) -> Self::Output {
2941        self.check_simd_lane_index(lane, 4)?;
2942        self.pop_operand(Some(ValType::V128))?;
2943        self.push_operand(ValType::I32)?;
2944        Ok(())
2945    }
2946    fn visit_i8x16_replace_lane(&mut self, lane: u8) -> Self::Output {
2947        self.check_simd_lane_index(lane, 16)?;
2948        self.pop_operand(Some(ValType::I32))?;
2949        self.pop_operand(Some(ValType::V128))?;
2950        self.push_operand(ValType::V128)?;
2951        Ok(())
2952    }
2953    fn visit_i16x8_replace_lane(&mut self, lane: u8) -> Self::Output {
2954        self.check_simd_lane_index(lane, 8)?;
2955        self.pop_operand(Some(ValType::I32))?;
2956        self.pop_operand(Some(ValType::V128))?;
2957        self.push_operand(ValType::V128)?;
2958        Ok(())
2959    }
2960    fn visit_i32x4_replace_lane(&mut self, lane: u8) -> Self::Output {
2961        self.check_simd_lane_index(lane, 4)?;
2962        self.pop_operand(Some(ValType::I32))?;
2963        self.pop_operand(Some(ValType::V128))?;
2964        self.push_operand(ValType::V128)?;
2965        Ok(())
2966    }
2967    fn visit_i64x2_extract_lane(&mut self, lane: u8) -> Self::Output {
2968        self.check_simd_lane_index(lane, 2)?;
2969        self.pop_operand(Some(ValType::V128))?;
2970        self.push_operand(ValType::I64)?;
2971        Ok(())
2972    }
2973    fn visit_i64x2_replace_lane(&mut self, lane: u8) -> Self::Output {
2974        self.check_simd_lane_index(lane, 2)?;
2975        self.pop_operand(Some(ValType::I64))?;
2976        self.pop_operand(Some(ValType::V128))?;
2977        self.push_operand(ValType::V128)?;
2978        Ok(())
2979    }
2980    fn visit_f32x4_extract_lane(&mut self, lane: u8) -> Self::Output {
2981        self.check_floats_enabled()?;
2982        self.check_simd_lane_index(lane, 4)?;
2983        self.pop_operand(Some(ValType::V128))?;
2984        self.push_operand(ValType::F32)?;
2985        Ok(())
2986    }
2987    fn visit_f32x4_replace_lane(&mut self, lane: u8) -> Self::Output {
2988        self.check_floats_enabled()?;
2989        self.check_simd_lane_index(lane, 4)?;
2990        self.pop_operand(Some(ValType::F32))?;
2991        self.pop_operand(Some(ValType::V128))?;
2992        self.push_operand(ValType::V128)?;
2993        Ok(())
2994    }
2995    fn visit_f64x2_extract_lane(&mut self, lane: u8) -> Self::Output {
2996        self.check_floats_enabled()?;
2997        self.check_simd_lane_index(lane, 2)?;
2998        self.pop_operand(Some(ValType::V128))?;
2999        self.push_operand(ValType::F64)?;
3000        Ok(())
3001    }
3002    fn visit_f64x2_replace_lane(&mut self, lane: u8) -> Self::Output {
3003        self.check_floats_enabled()?;
3004        self.check_simd_lane_index(lane, 2)?;
3005        self.pop_operand(Some(ValType::F64))?;
3006        self.pop_operand(Some(ValType::V128))?;
3007        self.push_operand(ValType::V128)?;
3008        Ok(())
3009    }
3010    fn visit_f32x4_eq(&mut self) -> Self::Output {
3011        self.check_v128_fbinary_op()
3012    }
3013    fn visit_f32x4_ne(&mut self) -> Self::Output {
3014        self.check_v128_fbinary_op()
3015    }
3016    fn visit_f32x4_lt(&mut self) -> Self::Output {
3017        self.check_v128_fbinary_op()
3018    }
3019    fn visit_f32x4_gt(&mut self) -> Self::Output {
3020        self.check_v128_fbinary_op()
3021    }
3022    fn visit_f32x4_le(&mut self) -> Self::Output {
3023        self.check_v128_fbinary_op()
3024    }
3025    fn visit_f32x4_ge(&mut self) -> Self::Output {
3026        self.check_v128_fbinary_op()
3027    }
3028    fn visit_f64x2_eq(&mut self) -> Self::Output {
3029        self.check_v128_fbinary_op()
3030    }
3031    fn visit_f64x2_ne(&mut self) -> Self::Output {
3032        self.check_v128_fbinary_op()
3033    }
3034    fn visit_f64x2_lt(&mut self) -> Self::Output {
3035        self.check_v128_fbinary_op()
3036    }
3037    fn visit_f64x2_gt(&mut self) -> Self::Output {
3038        self.check_v128_fbinary_op()
3039    }
3040    fn visit_f64x2_le(&mut self) -> Self::Output {
3041        self.check_v128_fbinary_op()
3042    }
3043    fn visit_f64x2_ge(&mut self) -> Self::Output {
3044        self.check_v128_fbinary_op()
3045    }
3046    fn visit_f32x4_add(&mut self) -> Self::Output {
3047        self.check_v128_fbinary_op()
3048    }
3049    fn visit_f32x4_sub(&mut self) -> Self::Output {
3050        self.check_v128_fbinary_op()
3051    }
3052    fn visit_f32x4_mul(&mut self) -> Self::Output {
3053        self.check_v128_fbinary_op()
3054    }
3055    fn visit_f32x4_div(&mut self) -> Self::Output {
3056        self.check_v128_fbinary_op()
3057    }
3058    fn visit_f32x4_min(&mut self) -> Self::Output {
3059        self.check_v128_fbinary_op()
3060    }
3061    fn visit_f32x4_max(&mut self) -> Self::Output {
3062        self.check_v128_fbinary_op()
3063    }
3064    fn visit_f32x4_pmin(&mut self) -> Self::Output {
3065        self.check_v128_fbinary_op()
3066    }
3067    fn visit_f32x4_pmax(&mut self) -> Self::Output {
3068        self.check_v128_fbinary_op()
3069    }
3070    fn visit_f64x2_add(&mut self) -> Self::Output {
3071        self.check_v128_fbinary_op()
3072    }
3073    fn visit_f64x2_sub(&mut self) -> Self::Output {
3074        self.check_v128_fbinary_op()
3075    }
3076    fn visit_f64x2_mul(&mut self) -> Self::Output {
3077        self.check_v128_fbinary_op()
3078    }
3079    fn visit_f64x2_div(&mut self) -> Self::Output {
3080        self.check_v128_fbinary_op()
3081    }
3082    fn visit_f64x2_min(&mut self) -> Self::Output {
3083        self.check_v128_fbinary_op()
3084    }
3085    fn visit_f64x2_max(&mut self) -> Self::Output {
3086        self.check_v128_fbinary_op()
3087    }
3088    fn visit_f64x2_pmin(&mut self) -> Self::Output {
3089        self.check_v128_fbinary_op()
3090    }
3091    fn visit_f64x2_pmax(&mut self) -> Self::Output {
3092        self.check_v128_fbinary_op()
3093    }
3094    fn visit_i8x16_eq(&mut self) -> Self::Output {
3095        self.check_v128_binary_op()
3096    }
3097    fn visit_i8x16_ne(&mut self) -> Self::Output {
3098        self.check_v128_binary_op()
3099    }
3100    fn visit_i8x16_lt_s(&mut self) -> Self::Output {
3101        self.check_v128_binary_op()
3102    }
3103    fn visit_i8x16_lt_u(&mut self) -> Self::Output {
3104        self.check_v128_binary_op()
3105    }
3106    fn visit_i8x16_gt_s(&mut self) -> Self::Output {
3107        self.check_v128_binary_op()
3108    }
3109    fn visit_i8x16_gt_u(&mut self) -> Self::Output {
3110        self.check_v128_binary_op()
3111    }
3112    fn visit_i8x16_le_s(&mut self) -> Self::Output {
3113        self.check_v128_binary_op()
3114    }
3115    fn visit_i8x16_le_u(&mut self) -> Self::Output {
3116        self.check_v128_binary_op()
3117    }
3118    fn visit_i8x16_ge_s(&mut self) -> Self::Output {
3119        self.check_v128_binary_op()
3120    }
3121    fn visit_i8x16_ge_u(&mut self) -> Self::Output {
3122        self.check_v128_binary_op()
3123    }
3124    fn visit_i16x8_eq(&mut self) -> Self::Output {
3125        self.check_v128_binary_op()
3126    }
3127    fn visit_i16x8_ne(&mut self) -> Self::Output {
3128        self.check_v128_binary_op()
3129    }
3130    fn visit_i16x8_lt_s(&mut self) -> Self::Output {
3131        self.check_v128_binary_op()
3132    }
3133    fn visit_i16x8_lt_u(&mut self) -> Self::Output {
3134        self.check_v128_binary_op()
3135    }
3136    fn visit_i16x8_gt_s(&mut self) -> Self::Output {
3137        self.check_v128_binary_op()
3138    }
3139    fn visit_i16x8_gt_u(&mut self) -> Self::Output {
3140        self.check_v128_binary_op()
3141    }
3142    fn visit_i16x8_le_s(&mut self) -> Self::Output {
3143        self.check_v128_binary_op()
3144    }
3145    fn visit_i16x8_le_u(&mut self) -> Self::Output {
3146        self.check_v128_binary_op()
3147    }
3148    fn visit_i16x8_ge_s(&mut self) -> Self::Output {
3149        self.check_v128_binary_op()
3150    }
3151    fn visit_i16x8_ge_u(&mut self) -> Self::Output {
3152        self.check_v128_binary_op()
3153    }
3154    fn visit_i32x4_eq(&mut self) -> Self::Output {
3155        self.check_v128_binary_op()
3156    }
3157    fn visit_i32x4_ne(&mut self) -> Self::Output {
3158        self.check_v128_binary_op()
3159    }
3160    fn visit_i32x4_lt_s(&mut self) -> Self::Output {
3161        self.check_v128_binary_op()
3162    }
3163    fn visit_i32x4_lt_u(&mut self) -> Self::Output {
3164        self.check_v128_binary_op()
3165    }
3166    fn visit_i32x4_gt_s(&mut self) -> Self::Output {
3167        self.check_v128_binary_op()
3168    }
3169    fn visit_i32x4_gt_u(&mut self) -> Self::Output {
3170        self.check_v128_binary_op()
3171    }
3172    fn visit_i32x4_le_s(&mut self) -> Self::Output {
3173        self.check_v128_binary_op()
3174    }
3175    fn visit_i32x4_le_u(&mut self) -> Self::Output {
3176        self.check_v128_binary_op()
3177    }
3178    fn visit_i32x4_ge_s(&mut self) -> Self::Output {
3179        self.check_v128_binary_op()
3180    }
3181    fn visit_i32x4_ge_u(&mut self) -> Self::Output {
3182        self.check_v128_binary_op()
3183    }
3184    fn visit_i64x2_eq(&mut self) -> Self::Output {
3185        self.check_v128_binary_op()
3186    }
3187    fn visit_i64x2_ne(&mut self) -> Self::Output {
3188        self.check_v128_binary_op()
3189    }
3190    fn visit_i64x2_lt_s(&mut self) -> Self::Output {
3191        self.check_v128_binary_op()
3192    }
3193    fn visit_i64x2_gt_s(&mut self) -> Self::Output {
3194        self.check_v128_binary_op()
3195    }
3196    fn visit_i64x2_le_s(&mut self) -> Self::Output {
3197        self.check_v128_binary_op()
3198    }
3199    fn visit_i64x2_ge_s(&mut self) -> Self::Output {
3200        self.check_v128_binary_op()
3201    }
3202    fn visit_v128_and(&mut self) -> Self::Output {
3203        self.check_v128_binary_op()
3204    }
3205    fn visit_v128_andnot(&mut self) -> Self::Output {
3206        self.check_v128_binary_op()
3207    }
3208    fn visit_v128_or(&mut self) -> Self::Output {
3209        self.check_v128_binary_op()
3210    }
3211    fn visit_v128_xor(&mut self) -> Self::Output {
3212        self.check_v128_binary_op()
3213    }
3214    fn visit_i8x16_add(&mut self) -> Self::Output {
3215        self.check_v128_binary_op()
3216    }
3217    fn visit_i8x16_add_sat_s(&mut self) -> Self::Output {
3218        self.check_v128_binary_op()
3219    }
3220    fn visit_i8x16_add_sat_u(&mut self) -> Self::Output {
3221        self.check_v128_binary_op()
3222    }
3223    fn visit_i8x16_sub(&mut self) -> Self::Output {
3224        self.check_v128_binary_op()
3225    }
3226    fn visit_i8x16_sub_sat_s(&mut self) -> Self::Output {
3227        self.check_v128_binary_op()
3228    }
3229    fn visit_i8x16_sub_sat_u(&mut self) -> Self::Output {
3230        self.check_v128_binary_op()
3231    }
3232    fn visit_i8x16_min_s(&mut self) -> Self::Output {
3233        self.check_v128_binary_op()
3234    }
3235    fn visit_i8x16_min_u(&mut self) -> Self::Output {
3236        self.check_v128_binary_op()
3237    }
3238    fn visit_i8x16_max_s(&mut self) -> Self::Output {
3239        self.check_v128_binary_op()
3240    }
3241    fn visit_i8x16_max_u(&mut self) -> Self::Output {
3242        self.check_v128_binary_op()
3243    }
3244    fn visit_i16x8_add(&mut self) -> Self::Output {
3245        self.check_v128_binary_op()
3246    }
3247    fn visit_i16x8_add_sat_s(&mut self) -> Self::Output {
3248        self.check_v128_binary_op()
3249    }
3250    fn visit_i16x8_add_sat_u(&mut self) -> Self::Output {
3251        self.check_v128_binary_op()
3252    }
3253    fn visit_i16x8_sub(&mut self) -> Self::Output {
3254        self.check_v128_binary_op()
3255    }
3256    fn visit_i16x8_sub_sat_s(&mut self) -> Self::Output {
3257        self.check_v128_binary_op()
3258    }
3259    fn visit_i16x8_sub_sat_u(&mut self) -> Self::Output {
3260        self.check_v128_binary_op()
3261    }
3262    fn visit_i16x8_mul(&mut self) -> Self::Output {
3263        self.check_v128_binary_op()
3264    }
3265    fn visit_i16x8_min_s(&mut self) -> Self::Output {
3266        self.check_v128_binary_op()
3267    }
3268    fn visit_i16x8_min_u(&mut self) -> Self::Output {
3269        self.check_v128_binary_op()
3270    }
3271    fn visit_i16x8_max_s(&mut self) -> Self::Output {
3272        self.check_v128_binary_op()
3273    }
3274    fn visit_i16x8_max_u(&mut self) -> Self::Output {
3275        self.check_v128_binary_op()
3276    }
3277    fn visit_i32x4_add(&mut self) -> Self::Output {
3278        self.check_v128_binary_op()
3279    }
3280    fn visit_i32x4_sub(&mut self) -> Self::Output {
3281        self.check_v128_binary_op()
3282    }
3283    fn visit_i32x4_mul(&mut self) -> Self::Output {
3284        self.check_v128_binary_op()
3285    }
3286    fn visit_i32x4_min_s(&mut self) -> Self::Output {
3287        self.check_v128_binary_op()
3288    }
3289    fn visit_i32x4_min_u(&mut self) -> Self::Output {
3290        self.check_v128_binary_op()
3291    }
3292    fn visit_i32x4_max_s(&mut self) -> Self::Output {
3293        self.check_v128_binary_op()
3294    }
3295    fn visit_i32x4_max_u(&mut self) -> Self::Output {
3296        self.check_v128_binary_op()
3297    }
3298    fn visit_i32x4_dot_i16x8_s(&mut self) -> Self::Output {
3299        self.check_v128_binary_op()
3300    }
3301    fn visit_i64x2_add(&mut self) -> Self::Output {
3302        self.check_v128_binary_op()
3303    }
3304    fn visit_i64x2_sub(&mut self) -> Self::Output {
3305        self.check_v128_binary_op()
3306    }
3307    fn visit_i64x2_mul(&mut self) -> Self::Output {
3308        self.check_v128_binary_op()
3309    }
3310    fn visit_i8x16_avgr_u(&mut self) -> Self::Output {
3311        self.check_v128_binary_op()
3312    }
3313    fn visit_i16x8_avgr_u(&mut self) -> Self::Output {
3314        self.check_v128_binary_op()
3315    }
3316    fn visit_i8x16_narrow_i16x8_s(&mut self) -> Self::Output {
3317        self.check_v128_binary_op()
3318    }
3319    fn visit_i8x16_narrow_i16x8_u(&mut self) -> Self::Output {
3320        self.check_v128_binary_op()
3321    }
3322    fn visit_i16x8_narrow_i32x4_s(&mut self) -> Self::Output {
3323        self.check_v128_binary_op()
3324    }
3325    fn visit_i16x8_narrow_i32x4_u(&mut self) -> Self::Output {
3326        self.check_v128_binary_op()
3327    }
3328    fn visit_i16x8_extmul_low_i8x16_s(&mut self) -> Self::Output {
3329        self.check_v128_binary_op()
3330    }
3331    fn visit_i16x8_extmul_high_i8x16_s(&mut self) -> Self::Output {
3332        self.check_v128_binary_op()
3333    }
3334    fn visit_i16x8_extmul_low_i8x16_u(&mut self) -> Self::Output {
3335        self.check_v128_binary_op()
3336    }
3337    fn visit_i16x8_extmul_high_i8x16_u(&mut self) -> Self::Output {
3338        self.check_v128_binary_op()
3339    }
3340    fn visit_i32x4_extmul_low_i16x8_s(&mut self) -> Self::Output {
3341        self.check_v128_binary_op()
3342    }
3343    fn visit_i32x4_extmul_high_i16x8_s(&mut self) -> Self::Output {
3344        self.check_v128_binary_op()
3345    }
3346    fn visit_i32x4_extmul_low_i16x8_u(&mut self) -> Self::Output {
3347        self.check_v128_binary_op()
3348    }
3349    fn visit_i32x4_extmul_high_i16x8_u(&mut self) -> Self::Output {
3350        self.check_v128_binary_op()
3351    }
3352    fn visit_i64x2_extmul_low_i32x4_s(&mut self) -> Self::Output {
3353        self.check_v128_binary_op()
3354    }
3355    fn visit_i64x2_extmul_high_i32x4_s(&mut self) -> Self::Output {
3356        self.check_v128_binary_op()
3357    }
3358    fn visit_i64x2_extmul_low_i32x4_u(&mut self) -> Self::Output {
3359        self.check_v128_binary_op()
3360    }
3361    fn visit_i64x2_extmul_high_i32x4_u(&mut self) -> Self::Output {
3362        self.check_v128_binary_op()
3363    }
3364    fn visit_i16x8_q15mulr_sat_s(&mut self) -> Self::Output {
3365        self.check_v128_binary_op()
3366    }
3367    fn visit_f32x4_ceil(&mut self) -> Self::Output {
3368        self.check_v128_funary_op()
3369    }
3370    fn visit_f32x4_floor(&mut self) -> Self::Output {
3371        self.check_v128_funary_op()
3372    }
3373    fn visit_f32x4_trunc(&mut self) -> Self::Output {
3374        self.check_v128_funary_op()
3375    }
3376    fn visit_f32x4_nearest(&mut self) -> Self::Output {
3377        self.check_v128_funary_op()
3378    }
3379    fn visit_f64x2_ceil(&mut self) -> Self::Output {
3380        self.check_v128_funary_op()
3381    }
3382    fn visit_f64x2_floor(&mut self) -> Self::Output {
3383        self.check_v128_funary_op()
3384    }
3385    fn visit_f64x2_trunc(&mut self) -> Self::Output {
3386        self.check_v128_funary_op()
3387    }
3388    fn visit_f64x2_nearest(&mut self) -> Self::Output {
3389        self.check_v128_funary_op()
3390    }
3391    fn visit_f32x4_abs(&mut self) -> Self::Output {
3392        self.check_v128_funary_op()
3393    }
3394    fn visit_f32x4_neg(&mut self) -> Self::Output {
3395        self.check_v128_funary_op()
3396    }
3397    fn visit_f32x4_sqrt(&mut self) -> Self::Output {
3398        self.check_v128_funary_op()
3399    }
3400    fn visit_f64x2_abs(&mut self) -> Self::Output {
3401        self.check_v128_funary_op()
3402    }
3403    fn visit_f64x2_neg(&mut self) -> Self::Output {
3404        self.check_v128_funary_op()
3405    }
3406    fn visit_f64x2_sqrt(&mut self) -> Self::Output {
3407        self.check_v128_funary_op()
3408    }
3409    fn visit_f32x4_demote_f64x2_zero(&mut self) -> Self::Output {
3410        self.check_v128_funary_op()
3411    }
3412    fn visit_f64x2_promote_low_f32x4(&mut self) -> Self::Output {
3413        self.check_v128_funary_op()
3414    }
3415    fn visit_f64x2_convert_low_i32x4_s(&mut self) -> Self::Output {
3416        self.check_v128_funary_op()
3417    }
3418    fn visit_f64x2_convert_low_i32x4_u(&mut self) -> Self::Output {
3419        self.check_v128_funary_op()
3420    }
3421    fn visit_i32x4_trunc_sat_f32x4_s(&mut self) -> Self::Output {
3422        self.check_v128_funary_op()
3423    }
3424    fn visit_i32x4_trunc_sat_f32x4_u(&mut self) -> Self::Output {
3425        self.check_v128_funary_op()
3426    }
3427    fn visit_i32x4_trunc_sat_f64x2_s_zero(&mut self) -> Self::Output {
3428        self.check_v128_funary_op()
3429    }
3430    fn visit_i32x4_trunc_sat_f64x2_u_zero(&mut self) -> Self::Output {
3431        self.check_v128_funary_op()
3432    }
3433    fn visit_f32x4_convert_i32x4_s(&mut self) -> Self::Output {
3434        self.check_v128_funary_op()
3435    }
3436    fn visit_f32x4_convert_i32x4_u(&mut self) -> Self::Output {
3437        self.check_v128_funary_op()
3438    }
3439    fn visit_v128_not(&mut self) -> Self::Output {
3440        self.check_v128_unary_op()
3441    }
3442    fn visit_i8x16_abs(&mut self) -> Self::Output {
3443        self.check_v128_unary_op()
3444    }
3445    fn visit_i8x16_neg(&mut self) -> Self::Output {
3446        self.check_v128_unary_op()
3447    }
3448    fn visit_i8x16_popcnt(&mut self) -> Self::Output {
3449        self.check_v128_unary_op()
3450    }
3451    fn visit_i16x8_abs(&mut self) -> Self::Output {
3452        self.check_v128_unary_op()
3453    }
3454    fn visit_i16x8_neg(&mut self) -> Self::Output {
3455        self.check_v128_unary_op()
3456    }
3457    fn visit_i32x4_abs(&mut self) -> Self::Output {
3458        self.check_v128_unary_op()
3459    }
3460    fn visit_i32x4_neg(&mut self) -> Self::Output {
3461        self.check_v128_unary_op()
3462    }
3463    fn visit_i64x2_abs(&mut self) -> Self::Output {
3464        self.check_v128_unary_op()
3465    }
3466    fn visit_i64x2_neg(&mut self) -> Self::Output {
3467        self.check_v128_unary_op()
3468    }
3469    fn visit_i16x8_extend_low_i8x16_s(&mut self) -> Self::Output {
3470        self.check_v128_unary_op()
3471    }
3472    fn visit_i16x8_extend_high_i8x16_s(&mut self) -> Self::Output {
3473        self.check_v128_unary_op()
3474    }
3475    fn visit_i16x8_extend_low_i8x16_u(&mut self) -> Self::Output {
3476        self.check_v128_unary_op()
3477    }
3478    fn visit_i16x8_extend_high_i8x16_u(&mut self) -> Self::Output {
3479        self.check_v128_unary_op()
3480    }
3481    fn visit_i32x4_extend_low_i16x8_s(&mut self) -> Self::Output {
3482        self.check_v128_unary_op()
3483    }
3484    fn visit_i32x4_extend_high_i16x8_s(&mut self) -> Self::Output {
3485        self.check_v128_unary_op()
3486    }
3487    fn visit_i32x4_extend_low_i16x8_u(&mut self) -> Self::Output {
3488        self.check_v128_unary_op()
3489    }
3490    fn visit_i32x4_extend_high_i16x8_u(&mut self) -> Self::Output {
3491        self.check_v128_unary_op()
3492    }
3493    fn visit_i64x2_extend_low_i32x4_s(&mut self) -> Self::Output {
3494        self.check_v128_unary_op()
3495    }
3496    fn visit_i64x2_extend_high_i32x4_s(&mut self) -> Self::Output {
3497        self.check_v128_unary_op()
3498    }
3499    fn visit_i64x2_extend_low_i32x4_u(&mut self) -> Self::Output {
3500        self.check_v128_unary_op()
3501    }
3502    fn visit_i64x2_extend_high_i32x4_u(&mut self) -> Self::Output {
3503        self.check_v128_unary_op()
3504    }
3505    fn visit_i16x8_extadd_pairwise_i8x16_s(&mut self) -> Self::Output {
3506        self.check_v128_unary_op()
3507    }
3508    fn visit_i16x8_extadd_pairwise_i8x16_u(&mut self) -> Self::Output {
3509        self.check_v128_unary_op()
3510    }
3511    fn visit_i32x4_extadd_pairwise_i16x8_s(&mut self) -> Self::Output {
3512        self.check_v128_unary_op()
3513    }
3514    fn visit_i32x4_extadd_pairwise_i16x8_u(&mut self) -> Self::Output {
3515        self.check_v128_unary_op()
3516    }
3517    fn visit_v128_bitselect(&mut self) -> Self::Output {
3518        self.pop_operand(Some(ValType::V128))?;
3519        self.pop_operand(Some(ValType::V128))?;
3520        self.pop_operand(Some(ValType::V128))?;
3521        self.push_operand(ValType::V128)?;
3522        Ok(())
3523    }
3524    fn visit_i8x16_relaxed_swizzle(&mut self) -> Self::Output {
3525        self.pop_operand(Some(ValType::V128))?;
3526        self.pop_operand(Some(ValType::V128))?;
3527        self.push_operand(ValType::V128)?;
3528        Ok(())
3529    }
3530    fn visit_i32x4_relaxed_trunc_f32x4_s(&mut self) -> Self::Output {
3531        self.check_v128_unary_op()
3532    }
3533    fn visit_i32x4_relaxed_trunc_f32x4_u(&mut self) -> Self::Output {
3534        self.check_v128_unary_op()
3535    }
3536    fn visit_i32x4_relaxed_trunc_f64x2_s_zero(&mut self) -> Self::Output {
3537        self.check_v128_unary_op()
3538    }
3539    fn visit_i32x4_relaxed_trunc_f64x2_u_zero(&mut self) -> Self::Output {
3540        self.check_v128_unary_op()
3541    }
3542    fn visit_f32x4_relaxed_madd(&mut self) -> Self::Output {
3543        self.check_v128_ternary_op()
3544    }
3545    fn visit_f32x4_relaxed_nmadd(&mut self) -> Self::Output {
3546        self.check_v128_ternary_op()
3547    }
3548    fn visit_f64x2_relaxed_madd(&mut self) -> Self::Output {
3549        self.check_v128_ternary_op()
3550    }
3551    fn visit_f64x2_relaxed_nmadd(&mut self) -> Self::Output {
3552        self.check_v128_ternary_op()
3553    }
3554    fn visit_i8x16_relaxed_laneselect(&mut self) -> Self::Output {
3555        self.check_v128_ternary_op()
3556    }
3557    fn visit_i16x8_relaxed_laneselect(&mut self) -> Self::Output {
3558        self.check_v128_ternary_op()
3559    }
3560    fn visit_i32x4_relaxed_laneselect(&mut self) -> Self::Output {
3561        self.check_v128_ternary_op()
3562    }
3563    fn visit_i64x2_relaxed_laneselect(&mut self) -> Self::Output {
3564        self.check_v128_ternary_op()
3565    }
3566    fn visit_f32x4_relaxed_min(&mut self) -> Self::Output {
3567        self.check_v128_binary_op()
3568    }
3569    fn visit_f32x4_relaxed_max(&mut self) -> Self::Output {
3570        self.check_v128_binary_op()
3571    }
3572    fn visit_f64x2_relaxed_min(&mut self) -> Self::Output {
3573        self.check_v128_binary_op()
3574    }
3575    fn visit_f64x2_relaxed_max(&mut self) -> Self::Output {
3576        self.check_v128_binary_op()
3577    }
3578    fn visit_i16x8_relaxed_q15mulr_s(&mut self) -> Self::Output {
3579        self.check_v128_binary_op()
3580    }
3581    fn visit_i16x8_relaxed_dot_i8x16_i7x16_s(&mut self) -> Self::Output {
3582        self.check_v128_binary_op()
3583    }
3584    fn visit_i32x4_relaxed_dot_i8x16_i7x16_add_s(&mut self) -> Self::Output {
3585        self.check_v128_ternary_op()
3586    }
3587    fn visit_v128_any_true(&mut self) -> Self::Output {
3588        self.check_v128_bitmask_op()
3589    }
3590    fn visit_i8x16_all_true(&mut self) -> Self::Output {
3591        self.check_v128_bitmask_op()
3592    }
3593    fn visit_i8x16_bitmask(&mut self) -> Self::Output {
3594        self.check_v128_bitmask_op()
3595    }
3596    fn visit_i16x8_all_true(&mut self) -> Self::Output {
3597        self.check_v128_bitmask_op()
3598    }
3599    fn visit_i16x8_bitmask(&mut self) -> Self::Output {
3600        self.check_v128_bitmask_op()
3601    }
3602    fn visit_i32x4_all_true(&mut self) -> Self::Output {
3603        self.check_v128_bitmask_op()
3604    }
3605    fn visit_i32x4_bitmask(&mut self) -> Self::Output {
3606        self.check_v128_bitmask_op()
3607    }
3608    fn visit_i64x2_all_true(&mut self) -> Self::Output {
3609        self.check_v128_bitmask_op()
3610    }
3611    fn visit_i64x2_bitmask(&mut self) -> Self::Output {
3612        self.check_v128_bitmask_op()
3613    }
3614    fn visit_i8x16_shl(&mut self) -> Self::Output {
3615        self.check_v128_shift_op()
3616    }
3617    fn visit_i8x16_shr_s(&mut self) -> Self::Output {
3618        self.check_v128_shift_op()
3619    }
3620    fn visit_i8x16_shr_u(&mut self) -> Self::Output {
3621        self.check_v128_shift_op()
3622    }
3623    fn visit_i16x8_shl(&mut self) -> Self::Output {
3624        self.check_v128_shift_op()
3625    }
3626    fn visit_i16x8_shr_s(&mut self) -> Self::Output {
3627        self.check_v128_shift_op()
3628    }
3629    fn visit_i16x8_shr_u(&mut self) -> Self::Output {
3630        self.check_v128_shift_op()
3631    }
3632    fn visit_i32x4_shl(&mut self) -> Self::Output {
3633        self.check_v128_shift_op()
3634    }
3635    fn visit_i32x4_shr_s(&mut self) -> Self::Output {
3636        self.check_v128_shift_op()
3637    }
3638    fn visit_i32x4_shr_u(&mut self) -> Self::Output {
3639        self.check_v128_shift_op()
3640    }
3641    fn visit_i64x2_shl(&mut self) -> Self::Output {
3642        self.check_v128_shift_op()
3643    }
3644    fn visit_i64x2_shr_s(&mut self) -> Self::Output {
3645        self.check_v128_shift_op()
3646    }
3647    fn visit_i64x2_shr_u(&mut self) -> Self::Output {
3648        self.check_v128_shift_op()
3649    }
3650    fn visit_i8x16_swizzle(&mut self) -> Self::Output {
3651        self.pop_operand(Some(ValType::V128))?;
3652        self.pop_operand(Some(ValType::V128))?;
3653        self.push_operand(ValType::V128)?;
3654        Ok(())
3655    }
3656    fn visit_i8x16_shuffle(&mut self, lanes: [u8; 16]) -> Self::Output {
3657        self.pop_operand(Some(ValType::V128))?;
3658        self.pop_operand(Some(ValType::V128))?;
3659        for i in lanes {
3660            self.check_simd_lane_index(i, 32)?;
3661        }
3662        self.push_operand(ValType::V128)?;
3663        Ok(())
3664    }
3665    fn visit_v128_load8_splat(&mut self, memarg: MemArg) -> Self::Output {
3666        let ty = self.check_memarg(memarg)?;
3667        self.pop_operand(Some(ty))?;
3668        self.push_operand(ValType::V128)?;
3669        Ok(())
3670    }
3671    fn visit_v128_load16_splat(&mut self, memarg: MemArg) -> Self::Output {
3672        let ty = self.check_memarg(memarg)?;
3673        self.pop_operand(Some(ty))?;
3674        self.push_operand(ValType::V128)?;
3675        Ok(())
3676    }
3677    fn visit_v128_load32_splat(&mut self, memarg: MemArg) -> Self::Output {
3678        let ty = self.check_memarg(memarg)?;
3679        self.pop_operand(Some(ty))?;
3680        self.push_operand(ValType::V128)?;
3681        Ok(())
3682    }
3683    fn visit_v128_load32_zero(&mut self, memarg: MemArg) -> Self::Output {
3684        self.visit_v128_load32_splat(memarg)
3685    }
3686    fn visit_v128_load64_splat(&mut self, memarg: MemArg) -> Self::Output {
3687        self.check_v128_load_op(memarg)
3688    }
3689    fn visit_v128_load64_zero(&mut self, memarg: MemArg) -> Self::Output {
3690        self.check_v128_load_op(memarg)
3691    }
3692    fn visit_v128_load8x8_s(&mut self, memarg: MemArg) -> Self::Output {
3693        self.check_v128_load_op(memarg)
3694    }
3695    fn visit_v128_load8x8_u(&mut self, memarg: MemArg) -> Self::Output {
3696        self.check_v128_load_op(memarg)
3697    }
3698    fn visit_v128_load16x4_s(&mut self, memarg: MemArg) -> Self::Output {
3699        self.check_v128_load_op(memarg)
3700    }
3701    fn visit_v128_load16x4_u(&mut self, memarg: MemArg) -> Self::Output {
3702        self.check_v128_load_op(memarg)
3703    }
3704    fn visit_v128_load32x2_s(&mut self, memarg: MemArg) -> Self::Output {
3705        self.check_v128_load_op(memarg)
3706    }
3707    fn visit_v128_load32x2_u(&mut self, memarg: MemArg) -> Self::Output {
3708        self.check_v128_load_op(memarg)
3709    }
3710    fn visit_v128_load8_lane(&mut self, memarg: MemArg, lane: u8) -> Self::Output {
3711        let idx = self.check_memarg(memarg)?;
3712        self.check_simd_lane_index(lane, 16)?;
3713        self.pop_operand(Some(ValType::V128))?;
3714        self.pop_operand(Some(idx))?;
3715        self.push_operand(ValType::V128)?;
3716        Ok(())
3717    }
3718    fn visit_v128_load16_lane(&mut self, memarg: MemArg, lane: u8) -> Self::Output {
3719        let idx = self.check_memarg(memarg)?;
3720        self.check_simd_lane_index(lane, 8)?;
3721        self.pop_operand(Some(ValType::V128))?;
3722        self.pop_operand(Some(idx))?;
3723        self.push_operand(ValType::V128)?;
3724        Ok(())
3725    }
3726    fn visit_v128_load32_lane(&mut self, memarg: MemArg, lane: u8) -> Self::Output {
3727        let idx = self.check_memarg(memarg)?;
3728        self.check_simd_lane_index(lane, 4)?;
3729        self.pop_operand(Some(ValType::V128))?;
3730        self.pop_operand(Some(idx))?;
3731        self.push_operand(ValType::V128)?;
3732        Ok(())
3733    }
3734    fn visit_v128_load64_lane(&mut self, memarg: MemArg, lane: u8) -> Self::Output {
3735        let idx = self.check_memarg(memarg)?;
3736        self.check_simd_lane_index(lane, 2)?;
3737        self.pop_operand(Some(ValType::V128))?;
3738        self.pop_operand(Some(idx))?;
3739        self.push_operand(ValType::V128)?;
3740        Ok(())
3741    }
3742    fn visit_v128_store8_lane(&mut self, memarg: MemArg, lane: u8) -> Self::Output {
3743        let idx = self.check_memarg(memarg)?;
3744        self.check_simd_lane_index(lane, 16)?;
3745        self.pop_operand(Some(ValType::V128))?;
3746        self.pop_operand(Some(idx))?;
3747        Ok(())
3748    }
3749    fn visit_v128_store16_lane(&mut self, memarg: MemArg, lane: u8) -> Self::Output {
3750        let idx = self.check_memarg(memarg)?;
3751        self.check_simd_lane_index(lane, 8)?;
3752        self.pop_operand(Some(ValType::V128))?;
3753        self.pop_operand(Some(idx))?;
3754        Ok(())
3755    }
3756    fn visit_v128_store32_lane(&mut self, memarg: MemArg, lane: u8) -> Self::Output {
3757        let idx = self.check_memarg(memarg)?;
3758        self.check_simd_lane_index(lane, 4)?;
3759        self.pop_operand(Some(ValType::V128))?;
3760        self.pop_operand(Some(idx))?;
3761        Ok(())
3762    }
3763    fn visit_v128_store64_lane(&mut self, memarg: MemArg, lane: u8) -> Self::Output {
3764        let idx = self.check_memarg(memarg)?;
3765        self.check_simd_lane_index(lane, 2)?;
3766        self.pop_operand(Some(ValType::V128))?;
3767        self.pop_operand(Some(idx))?;
3768        Ok(())
3769    }
3770    fn visit_memory_init(&mut self, segment: u32, mem: u32) -> Self::Output {
3771        let ty = self.check_memory_index(mem)?;
3772        self.check_data_segment(segment)?;
3773        self.pop_operand(Some(ValType::I32))?;
3774        self.pop_operand(Some(ValType::I32))?;
3775        self.pop_operand(Some(ty))?;
3776        Ok(())
3777    }
3778    fn visit_data_drop(&mut self, segment: u32) -> Self::Output {
3779        self.check_data_segment(segment)?;
3780        Ok(())
3781    }
3782    fn visit_memory_copy(&mut self, dst: u32, src: u32) -> Self::Output {
3783        let dst_ty = self.check_memory_index(dst)?;
3784        let src_ty = self.check_memory_index(src)?;
3785
3786        // The length operand here is the smaller of src/dst, which is
3787        // i32 if one is i32
3788        self.pop_operand(Some(match src_ty {
3789            ValType::I32 => ValType::I32,
3790            _ => dst_ty,
3791        }))?;
3792
3793        // ... and the offset into each memory is required to be
3794        // whatever the indexing type is for that memory
3795        self.pop_operand(Some(src_ty))?;
3796        self.pop_operand(Some(dst_ty))?;
3797        Ok(())
3798    }
3799    fn visit_memory_fill(&mut self, mem: u32) -> Self::Output {
3800        let ty = self.check_memory_index(mem)?;
3801        self.pop_operand(Some(ty))?;
3802        self.pop_operand(Some(ValType::I32))?;
3803        self.pop_operand(Some(ty))?;
3804        Ok(())
3805    }
3806    fn visit_memory_discard(&mut self, mem: u32) -> Self::Output {
3807        let ty = self.check_memory_index(mem)?;
3808        self.pop_operand(Some(ty))?;
3809        self.pop_operand(Some(ty))?;
3810        Ok(())
3811    }
3812    fn visit_table_init(&mut self, segment: u32, table: u32) -> Self::Output {
3813        let table = self.table_type_at(table)?;
3814        let segment_ty = self.element_type_at(segment)?;
3815        if !self
3816            .resources
3817            .is_subtype(ValType::Ref(segment_ty), ValType::Ref(table.element_type))
3818        {
3819            bail!(self.offset, "type mismatch");
3820        }
3821        self.pop_operand(Some(ValType::I32))?;
3822        self.pop_operand(Some(ValType::I32))?;
3823        self.pop_operand(Some(table.index_type()))?;
3824        Ok(())
3825    }
3826    fn visit_elem_drop(&mut self, segment: u32) -> Self::Output {
3827        self.element_type_at(segment)?;
3828        Ok(())
3829    }
3830    fn visit_table_copy(&mut self, dst_table: u32, src_table: u32) -> Self::Output {
3831        let src = self.table_type_at(src_table)?;
3832        let dst = self.table_type_at(dst_table)?;
3833        if !self.resources.is_subtype(
3834            ValType::Ref(src.element_type),
3835            ValType::Ref(dst.element_type),
3836        ) {
3837            bail!(self.offset, "type mismatch");
3838        }
3839
3840        // The length operand here is the smaller of src/dst, which is
3841        // i32 if one is i32
3842        self.pop_operand(Some(match src.index_type() {
3843            ValType::I32 => ValType::I32,
3844            _ => dst.index_type(),
3845        }))?;
3846
3847        // ... and the offset into each table is required to be
3848        // whatever the indexing type is for that table
3849        self.pop_operand(Some(src.index_type()))?;
3850        self.pop_operand(Some(dst.index_type()))?;
3851        Ok(())
3852    }
3853    fn visit_table_get(&mut self, table: u32) -> Self::Output {
3854        let table = self.table_type_at(table)?;
3855        debug_assert_type_indices_are_ids(table.element_type.into());
3856        self.pop_operand(Some(table.index_type()))?;
3857        self.push_operand(table.element_type)?;
3858        Ok(())
3859    }
3860    fn visit_table_atomic_get(&mut self, _ordering: Ordering, table: u32) -> Self::Output {
3861        self.visit_table_get(table)?;
3862        // No validation of `ordering` is needed because `table.atomic.get` can
3863        // be used on both shared and unshared tables. But we do need to limit
3864        // which types can be used with this instruction.
3865        let ty = self.table_type_at(table)?.element_type;
3866        let supertype = RefType::ANYREF.shared().unwrap();
3867        if !self.resources.is_subtype(ty.into(), supertype.into()) {
3868            bail!(
3869                self.offset,
3870                "invalid type: `table.atomic.get` only allows subtypes of `anyref`"
3871            );
3872        }
3873        Ok(())
3874    }
3875    fn visit_table_set(&mut self, table: u32) -> Self::Output {
3876        let table = self.table_type_at(table)?;
3877        debug_assert_type_indices_are_ids(table.element_type.into());
3878        self.pop_operand(Some(table.element_type.into()))?;
3879        self.pop_operand(Some(table.index_type()))?;
3880        Ok(())
3881    }
3882    fn visit_table_atomic_set(&mut self, _ordering: Ordering, table: u32) -> Self::Output {
3883        self.visit_table_set(table)?;
3884        // No validation of `ordering` is needed because `table.atomic.set` can
3885        // be used on both shared and unshared tables. But we do need to limit
3886        // which types can be used with this instruction.
3887        let ty = self.table_type_at(table)?.element_type;
3888        let supertype = RefType::ANYREF.shared().unwrap();
3889        if !self.resources.is_subtype(ty.into(), supertype.into()) {
3890            bail!(
3891                self.offset,
3892                "invalid type: `table.atomic.set` only allows subtypes of `anyref`"
3893            );
3894        }
3895        Ok(())
3896    }
3897    fn visit_table_grow(&mut self, table: u32) -> Self::Output {
3898        let table = self.table_type_at(table)?;
3899        debug_assert_type_indices_are_ids(table.element_type.into());
3900        self.pop_operand(Some(table.index_type()))?;
3901        self.pop_operand(Some(table.element_type.into()))?;
3902        self.push_operand(table.index_type())?;
3903        Ok(())
3904    }
3905    fn visit_table_size(&mut self, table: u32) -> Self::Output {
3906        let table = self.table_type_at(table)?;
3907        self.push_operand(table.index_type())?;
3908        Ok(())
3909    }
3910    fn visit_table_fill(&mut self, table: u32) -> Self::Output {
3911        let table = self.table_type_at(table)?;
3912        debug_assert_type_indices_are_ids(table.element_type.into());
3913        self.pop_operand(Some(table.index_type()))?;
3914        self.pop_operand(Some(table.element_type.into()))?;
3915        self.pop_operand(Some(table.index_type()))?;
3916        Ok(())
3917    }
3918    fn visit_table_atomic_rmw_xchg(&mut self, _ordering: Ordering, table: u32) -> Self::Output {
3919        let table = self.table_type_at(table)?;
3920        let elem_ty = table.element_type.into();
3921        debug_assert_type_indices_are_ids(elem_ty);
3922        let supertype = RefType::ANYREF.shared().unwrap();
3923        if !self.resources.is_subtype(elem_ty, supertype.into()) {
3924            bail!(
3925                self.offset,
3926                "invalid type: `table.atomic.rmw.xchg` only allows subtypes of `anyref`"
3927            );
3928        }
3929        self.pop_operand(Some(elem_ty))?;
3930        self.pop_operand(Some(table.index_type()))?;
3931        self.push_operand(elem_ty)?;
3932        Ok(())
3933    }
3934    fn visit_table_atomic_rmw_cmpxchg(&mut self, _ordering: Ordering, table: u32) -> Self::Output {
3935        let table = self.table_type_at(table)?;
3936        let elem_ty = table.element_type.into();
3937        debug_assert_type_indices_are_ids(elem_ty);
3938        let supertype = RefType::EQREF.shared().unwrap();
3939        if !self.resources.is_subtype(elem_ty, supertype.into()) {
3940            bail!(
3941                self.offset,
3942                "invalid type: `table.atomic.rmw.cmpxchg` only allows subtypes of `eqref`"
3943            );
3944        }
3945        self.pop_operand(Some(elem_ty))?;
3946        self.pop_operand(Some(elem_ty))?;
3947        self.pop_operand(Some(table.index_type()))?;
3948        self.push_operand(elem_ty)?;
3949        Ok(())
3950    }
3951    fn visit_struct_new(&mut self, struct_type_index: u32) -> Self::Output {
3952        let struct_ty = self.struct_type_at(struct_type_index)?;
3953        for ty in struct_ty.fields.iter().rev() {
3954            self.pop_operand(Some(ty.element_type.unpack()))?;
3955        }
3956        self.push_concrete_ref(false, struct_type_index)?;
3957        Ok(())
3958    }
3959    fn visit_struct_new_default(&mut self, type_index: u32) -> Self::Output {
3960        let ty = self.struct_type_at(type_index)?;
3961        for field in ty.fields.iter() {
3962            let val_ty = field.element_type.unpack();
3963            if !val_ty.is_defaultable() {
3964                bail!(
3965                    self.offset,
3966                    "invalid `struct.new_default`: {val_ty} field is not defaultable"
3967                );
3968            }
3969        }
3970        self.push_concrete_ref(false, type_index)?;
3971        Ok(())
3972    }
3973    fn visit_struct_get(&mut self, struct_type_index: u32, field_index: u32) -> Self::Output {
3974        let field_ty = self.struct_field_at(struct_type_index, field_index)?;
3975        if field_ty.element_type.is_packed() {
3976            bail!(
3977                self.offset,
3978                "can only use struct `get` with non-packed storage types"
3979            )
3980        }
3981        self.pop_concrete_ref(true, struct_type_index)?;
3982        self.push_operand(field_ty.element_type.unpack())
3983    }
3984    fn visit_struct_atomic_get(
3985        &mut self,
3986        _ordering: Ordering,
3987        struct_type_index: u32,
3988        field_index: u32,
3989    ) -> Self::Output {
3990        self.visit_struct_get(struct_type_index, field_index)?;
3991        // The `atomic` version has some additional type restrictions.
3992        let ty = self
3993            .struct_field_at(struct_type_index, field_index)?
3994            .element_type;
3995        let is_valid_type = match ty {
3996            StorageType::Val(ValType::I32) | StorageType::Val(ValType::I64) => true,
3997            StorageType::Val(v) => self
3998                .resources
3999                .is_subtype(v, RefType::ANYREF.shared().unwrap().into()),
4000            _ => false,
4001        };
4002        if !is_valid_type {
4003            bail!(
4004                self.offset,
4005                "invalid type: `struct.atomic.get` only allows `i32`, `i64` and subtypes of `anyref`"
4006            );
4007        }
4008        Ok(())
4009    }
4010    fn visit_struct_get_s(&mut self, struct_type_index: u32, field_index: u32) -> Self::Output {
4011        let field_ty = self.struct_field_at(struct_type_index, field_index)?;
4012        if !field_ty.element_type.is_packed() {
4013            bail!(
4014                self.offset,
4015                "cannot use struct.get_s with non-packed storage types"
4016            )
4017        }
4018        self.pop_concrete_ref(true, struct_type_index)?;
4019        self.push_operand(field_ty.element_type.unpack())
4020    }
4021    fn visit_struct_atomic_get_s(
4022        &mut self,
4023        _ordering: Ordering,
4024        struct_type_index: u32,
4025        field_index: u32,
4026    ) -> Self::Output {
4027        self.visit_struct_get_s(struct_type_index, field_index)?;
4028        // This instruction has the same type restrictions as the non-`atomic` version.
4029        debug_assert!(matches!(
4030            self.struct_field_at(struct_type_index, field_index)?
4031                .element_type,
4032            StorageType::I8 | StorageType::I16
4033        ));
4034        Ok(())
4035    }
4036    fn visit_struct_get_u(&mut self, struct_type_index: u32, field_index: u32) -> Self::Output {
4037        let field_ty = self.struct_field_at(struct_type_index, field_index)?;
4038        if !field_ty.element_type.is_packed() {
4039            bail!(
4040                self.offset,
4041                "cannot use struct.get_u with non-packed storage types"
4042            )
4043        }
4044        self.pop_concrete_ref(true, struct_type_index)?;
4045        self.push_operand(field_ty.element_type.unpack())
4046    }
4047    fn visit_struct_atomic_get_u(
4048        &mut self,
4049        _ordering: Ordering,
4050        struct_type_index: u32,
4051        field_index: u32,
4052    ) -> Self::Output {
4053        self.visit_struct_get_s(struct_type_index, field_index)?;
4054        // This instruction has the same type restrictions as the non-`atomic` version.
4055        debug_assert!(matches!(
4056            self.struct_field_at(struct_type_index, field_index)?
4057                .element_type,
4058            StorageType::I8 | StorageType::I16
4059        ));
4060        Ok(())
4061    }
4062    fn visit_struct_set(&mut self, struct_type_index: u32, field_index: u32) -> Self::Output {
4063        let field_ty = self.mutable_struct_field_at(struct_type_index, field_index)?;
4064        self.pop_operand(Some(field_ty.element_type.unpack()))?;
4065        self.pop_concrete_ref(true, struct_type_index)?;
4066        Ok(())
4067    }
4068    fn visit_struct_atomic_set(
4069        &mut self,
4070        _ordering: Ordering,
4071        struct_type_index: u32,
4072        field_index: u32,
4073    ) -> Self::Output {
4074        self.visit_struct_set(struct_type_index, field_index)?;
4075        // The `atomic` version has some additional type restrictions.
4076        let ty = self
4077            .struct_field_at(struct_type_index, field_index)?
4078            .element_type;
4079        let is_valid_type = match ty {
4080            StorageType::I8 | StorageType::I16 => true,
4081            StorageType::Val(ValType::I32) | StorageType::Val(ValType::I64) => true,
4082            StorageType::Val(v) => self
4083                .resources
4084                .is_subtype(v, RefType::ANYREF.shared().unwrap().into()),
4085        };
4086        if !is_valid_type {
4087            bail!(
4088                self.offset,
4089                "invalid type: `struct.atomic.set` only allows `i8`, `i16`, `i32`, `i64` and subtypes of `anyref`"
4090            );
4091        }
4092        Ok(())
4093    }
4094    fn visit_struct_atomic_rmw_add(
4095        &mut self,
4096        _ordering: Ordering,
4097        struct_type_index: u32,
4098        field_index: u32,
4099    ) -> Self::Output {
4100        self.check_struct_atomic_rmw("add", struct_type_index, field_index)
4101    }
4102    fn visit_struct_atomic_rmw_sub(
4103        &mut self,
4104        _ordering: Ordering,
4105        struct_type_index: u32,
4106        field_index: u32,
4107    ) -> Self::Output {
4108        self.check_struct_atomic_rmw("sub", struct_type_index, field_index)
4109    }
4110    fn visit_struct_atomic_rmw_and(
4111        &mut self,
4112        _ordering: Ordering,
4113        struct_type_index: u32,
4114        field_index: u32,
4115    ) -> Self::Output {
4116        self.check_struct_atomic_rmw("and", struct_type_index, field_index)
4117    }
4118    fn visit_struct_atomic_rmw_or(
4119        &mut self,
4120        _ordering: Ordering,
4121        struct_type_index: u32,
4122        field_index: u32,
4123    ) -> Self::Output {
4124        self.check_struct_atomic_rmw("or", struct_type_index, field_index)
4125    }
4126    fn visit_struct_atomic_rmw_xor(
4127        &mut self,
4128        _ordering: Ordering,
4129        struct_type_index: u32,
4130        field_index: u32,
4131    ) -> Self::Output {
4132        self.check_struct_atomic_rmw("xor", struct_type_index, field_index)
4133    }
4134    fn visit_struct_atomic_rmw_xchg(
4135        &mut self,
4136        _ordering: Ordering,
4137        struct_type_index: u32,
4138        field_index: u32,
4139    ) -> Self::Output {
4140        let field = self.mutable_struct_field_at(struct_type_index, field_index)?;
4141        let is_valid_type = match field.element_type {
4142            StorageType::Val(ValType::I32) | StorageType::Val(ValType::I64) => true,
4143            StorageType::Val(v) => self
4144                .resources
4145                .is_subtype(v, RefType::ANYREF.shared().unwrap().into()),
4146            _ => false,
4147        };
4148        if !is_valid_type {
4149            bail!(
4150                self.offset,
4151                "invalid type: `struct.atomic.rmw.xchg` only allows `i32`, `i64` and subtypes of `anyref`"
4152            );
4153        }
4154        let field_ty = field.element_type.unpack();
4155        self.pop_operand(Some(field_ty))?;
4156        self.pop_concrete_ref(true, struct_type_index)?;
4157        self.push_operand(field_ty)?;
4158        Ok(())
4159    }
4160    fn visit_struct_atomic_rmw_cmpxchg(
4161        &mut self,
4162        _ordering: Ordering,
4163        struct_type_index: u32,
4164        field_index: u32,
4165    ) -> Self::Output {
4166        let field = self.mutable_struct_field_at(struct_type_index, field_index)?;
4167        let is_valid_type = match field.element_type {
4168            StorageType::Val(ValType::I32) | StorageType::Val(ValType::I64) => true,
4169            StorageType::Val(v) => self
4170                .resources
4171                .is_subtype(v, RefType::EQREF.shared().unwrap().into()),
4172            _ => false,
4173        };
4174        if !is_valid_type {
4175            bail!(
4176                self.offset,
4177                "invalid type: `struct.atomic.rmw.cmpxchg` only allows `i32`, `i64` and subtypes of `eqref`"
4178            );
4179        }
4180        let field_ty = field.element_type.unpack();
4181        self.pop_operand(Some(field_ty))?;
4182        self.pop_operand(Some(field_ty))?;
4183        self.pop_concrete_ref(true, struct_type_index)?;
4184        self.push_operand(field_ty)?;
4185        Ok(())
4186    }
4187    fn visit_array_new(&mut self, type_index: u32) -> Self::Output {
4188        let array_ty = self.array_type_at(type_index)?;
4189        self.pop_operand(Some(ValType::I32))?;
4190        self.pop_operand(Some(array_ty.element_type.unpack()))?;
4191        self.push_concrete_ref(false, type_index)
4192    }
4193    fn visit_array_new_default(&mut self, type_index: u32) -> Self::Output {
4194        let ty = self.array_type_at(type_index)?;
4195        let val_ty = ty.element_type.unpack();
4196        if !val_ty.is_defaultable() {
4197            bail!(
4198                self.offset,
4199                "invalid `array.new_default`: {val_ty} field is not defaultable"
4200            );
4201        }
4202        self.pop_operand(Some(ValType::I32))?;
4203        self.push_concrete_ref(false, type_index)
4204    }
4205    fn visit_array_new_fixed(&mut self, type_index: u32, n: u32) -> Self::Output {
4206        let array_ty = self.array_type_at(type_index)?;
4207        let elem_ty = array_ty.element_type.unpack();
4208        for _ in 0..n {
4209            self.pop_operand(Some(elem_ty))?;
4210        }
4211        self.push_concrete_ref(false, type_index)
4212    }
4213    fn visit_array_new_data(&mut self, type_index: u32, data_index: u32) -> Self::Output {
4214        let array_ty = self.array_type_at(type_index)?;
4215        let elem_ty = array_ty.element_type.unpack();
4216        match elem_ty {
4217            ValType::I32 | ValType::I64 | ValType::F32 | ValType::F64 | ValType::V128 => {}
4218            ValType::Ref(_) => bail!(
4219                self.offset,
4220                "type mismatch: array.new_data can only create arrays with numeric and vector elements"
4221            ),
4222        }
4223        self.check_data_segment(data_index)?;
4224        self.pop_operand(Some(ValType::I32))?;
4225        self.pop_operand(Some(ValType::I32))?;
4226        self.push_concrete_ref(false, type_index)
4227    }
4228    fn visit_array_new_elem(&mut self, type_index: u32, elem_index: u32) -> Self::Output {
4229        let array_ty = self.array_type_at(type_index)?;
4230        let array_ref_ty = match array_ty.element_type.unpack() {
4231            ValType::Ref(rt) => rt,
4232            ValType::I32 | ValType::I64 | ValType::F32 | ValType::F64 | ValType::V128 => bail!(
4233                self.offset,
4234                "type mismatch: array.new_elem can only create arrays with reference elements"
4235            ),
4236        };
4237        let elem_ref_ty = self.element_type_at(elem_index)?;
4238        if !self
4239            .resources
4240            .is_subtype(elem_ref_ty.into(), array_ref_ty.into())
4241        {
4242            bail!(
4243                self.offset,
4244                "invalid array.new_elem instruction: element segment {elem_index} type mismatch: \
4245                 expected {array_ref_ty}, found {elem_ref_ty}"
4246            )
4247        }
4248        self.pop_operand(Some(ValType::I32))?;
4249        self.pop_operand(Some(ValType::I32))?;
4250        self.push_concrete_ref(false, type_index)
4251    }
4252    fn visit_array_get(&mut self, type_index: u32) -> Self::Output {
4253        let array_ty = self.array_type_at(type_index)?;
4254        let elem_ty = array_ty.element_type;
4255        if elem_ty.is_packed() {
4256            bail!(
4257                self.offset,
4258                "cannot use array.get with packed storage types"
4259            )
4260        }
4261        self.pop_operand(Some(ValType::I32))?;
4262        self.pop_concrete_ref(true, type_index)?;
4263        self.push_operand(elem_ty.unpack())
4264    }
4265    fn visit_array_atomic_get(&mut self, _ordering: Ordering, type_index: u32) -> Self::Output {
4266        self.visit_array_get(type_index)?;
4267        // The `atomic` version has some additional type restrictions.
4268        let elem_ty = self.array_type_at(type_index)?.element_type;
4269        let is_valid_type = match elem_ty {
4270            StorageType::Val(ValType::I32) | StorageType::Val(ValType::I64) => true,
4271            StorageType::Val(v) => self
4272                .resources
4273                .is_subtype(v, RefType::ANYREF.shared().unwrap().into()),
4274            _ => false,
4275        };
4276        if !is_valid_type {
4277            bail!(
4278                self.offset,
4279                "invalid type: `array.atomic.get` only allows `i32`, `i64` and subtypes of `anyref`"
4280            );
4281        }
4282        Ok(())
4283    }
4284    fn visit_array_get_s(&mut self, type_index: u32) -> Self::Output {
4285        let array_ty = self.array_type_at(type_index)?;
4286        let elem_ty = array_ty.element_type;
4287        if !elem_ty.is_packed() {
4288            bail!(
4289                self.offset,
4290                "cannot use array.get_s with non-packed storage types"
4291            )
4292        }
4293        self.pop_operand(Some(ValType::I32))?;
4294        self.pop_concrete_ref(true, type_index)?;
4295        self.push_operand(elem_ty.unpack())
4296    }
4297    fn visit_array_atomic_get_s(&mut self, _ordering: Ordering, type_index: u32) -> Self::Output {
4298        self.visit_array_get_s(type_index)?;
4299        // This instruction has the same type restrictions as the non-`atomic` version.
4300        debug_assert!(matches!(
4301            self.array_type_at(type_index)?.element_type,
4302            StorageType::I8 | StorageType::I16
4303        ));
4304        Ok(())
4305    }
4306    fn visit_array_get_u(&mut self, type_index: u32) -> Self::Output {
4307        let array_ty = self.array_type_at(type_index)?;
4308        let elem_ty = array_ty.element_type;
4309        if !elem_ty.is_packed() {
4310            bail!(
4311                self.offset,
4312                "cannot use array.get_u with non-packed storage types"
4313            )
4314        }
4315        self.pop_operand(Some(ValType::I32))?;
4316        self.pop_concrete_ref(true, type_index)?;
4317        self.push_operand(elem_ty.unpack())
4318    }
4319    fn visit_array_atomic_get_u(&mut self, _ordering: Ordering, type_index: u32) -> Self::Output {
4320        self.visit_array_get_u(type_index)?;
4321        // This instruction has the same type restrictions as the non-`atomic` version.
4322        debug_assert!(matches!(
4323            self.array_type_at(type_index)?.element_type,
4324            StorageType::I8 | StorageType::I16
4325        ));
4326        Ok(())
4327    }
4328    fn visit_array_set(&mut self, type_index: u32) -> Self::Output {
4329        let array_ty = self.mutable_array_type_at(type_index)?;
4330        self.pop_operand(Some(array_ty.element_type.unpack()))?;
4331        self.pop_operand(Some(ValType::I32))?;
4332        self.pop_concrete_ref(true, type_index)?;
4333        Ok(())
4334    }
4335    fn visit_array_atomic_set(&mut self, _ordering: Ordering, type_index: u32) -> Self::Output {
4336        self.visit_array_set(type_index)?;
4337        // The `atomic` version has some additional type restrictions.
4338        let elem_ty = self.array_type_at(type_index)?.element_type;
4339        let is_valid_type = match elem_ty {
4340            StorageType::I8 | StorageType::I16 => true,
4341            StorageType::Val(ValType::I32) | StorageType::Val(ValType::I64) => true,
4342            StorageType::Val(v) => self
4343                .resources
4344                .is_subtype(v, RefType::ANYREF.shared().unwrap().into()),
4345        };
4346        if !is_valid_type {
4347            bail!(
4348                self.offset,
4349                "invalid type: `array.atomic.set` only allows `i8`, `i16`, `i32`, `i64` and subtypes of `anyref`"
4350            );
4351        }
4352        Ok(())
4353    }
4354    fn visit_array_len(&mut self) -> Self::Output {
4355        self.pop_maybe_shared_ref(AbstractHeapType::Array)?;
4356        self.push_operand(ValType::I32)
4357    }
4358    fn visit_array_fill(&mut self, array_type_index: u32) -> Self::Output {
4359        let array_ty = self.mutable_array_type_at(array_type_index)?;
4360        self.pop_operand(Some(ValType::I32))?;
4361        self.pop_operand(Some(array_ty.element_type.unpack()))?;
4362        self.pop_operand(Some(ValType::I32))?;
4363        self.pop_concrete_ref(true, array_type_index)?;
4364        Ok(())
4365    }
4366    fn visit_array_copy(&mut self, type_index_dst: u32, type_index_src: u32) -> Self::Output {
4367        let array_ty_dst = self.mutable_array_type_at(type_index_dst)?;
4368        let array_ty_src = self.array_type_at(type_index_src)?;
4369        match (array_ty_dst.element_type, array_ty_src.element_type) {
4370            (StorageType::I8, StorageType::I8) => {}
4371            (StorageType::I8, ty) => bail!(
4372                self.offset,
4373                "array types do not match: expected i8, found {ty}"
4374            ),
4375            (StorageType::I16, StorageType::I16) => {}
4376            (StorageType::I16, ty) => bail!(
4377                self.offset,
4378                "array types do not match: expected i16, found {ty}"
4379            ),
4380            (StorageType::Val(dst), StorageType::Val(src)) => {
4381                if !self.resources.is_subtype(src, dst) {
4382                    bail!(
4383                        self.offset,
4384                        "array types do not match: expected {dst}, found {src}"
4385                    )
4386                }
4387            }
4388            (StorageType::Val(dst), src) => {
4389                bail!(
4390                    self.offset,
4391                    "array types do not match: expected {dst}, found {src}"
4392                )
4393            }
4394        }
4395        self.pop_operand(Some(ValType::I32))?;
4396        self.pop_operand(Some(ValType::I32))?;
4397        self.pop_concrete_ref(true, type_index_src)?;
4398        self.pop_operand(Some(ValType::I32))?;
4399        self.pop_concrete_ref(true, type_index_dst)?;
4400        Ok(())
4401    }
4402    fn visit_array_init_data(
4403        &mut self,
4404        array_type_index: u32,
4405        array_data_index: u32,
4406    ) -> Self::Output {
4407        let array_ty = self.mutable_array_type_at(array_type_index)?;
4408        let val_ty = array_ty.element_type.unpack();
4409        match val_ty {
4410            ValType::I32 | ValType::I64 | ValType::F32 | ValType::F64 | ValType::V128 => {}
4411            ValType::Ref(_) => bail!(
4412                self.offset,
4413                "invalid array.init_data: array type is not numeric or vector"
4414            ),
4415        }
4416        self.check_data_segment(array_data_index)?;
4417        self.pop_operand(Some(ValType::I32))?;
4418        self.pop_operand(Some(ValType::I32))?;
4419        self.pop_operand(Some(ValType::I32))?;
4420        self.pop_concrete_ref(true, array_type_index)?;
4421        Ok(())
4422    }
4423    fn visit_array_init_elem(&mut self, type_index: u32, elem_index: u32) -> Self::Output {
4424        let array_ty = self.mutable_array_type_at(type_index)?;
4425        let array_ref_ty = match array_ty.element_type.unpack() {
4426            ValType::Ref(rt) => rt,
4427            ValType::I32 | ValType::I64 | ValType::F32 | ValType::F64 | ValType::V128 => bail!(
4428                self.offset,
4429                "type mismatch: array.init_elem can only create arrays with reference elements"
4430            ),
4431        };
4432        let elem_ref_ty = self.element_type_at(elem_index)?;
4433        if !self
4434            .resources
4435            .is_subtype(elem_ref_ty.into(), array_ref_ty.into())
4436        {
4437            bail!(
4438                self.offset,
4439                "invalid array.init_elem instruction: element segment {elem_index} type mismatch: \
4440                 expected {array_ref_ty}, found {elem_ref_ty}"
4441            )
4442        }
4443        self.pop_operand(Some(ValType::I32))?;
4444        self.pop_operand(Some(ValType::I32))?;
4445        self.pop_operand(Some(ValType::I32))?;
4446        self.pop_concrete_ref(true, type_index)?;
4447        Ok(())
4448    }
4449    fn visit_array_atomic_rmw_add(&mut self, _ordering: Ordering, type_index: u32) -> Self::Output {
4450        self.check_array_atomic_rmw("add", type_index)
4451    }
4452    fn visit_array_atomic_rmw_sub(&mut self, _ordering: Ordering, type_index: u32) -> Self::Output {
4453        self.check_array_atomic_rmw("sub", type_index)
4454    }
4455    fn visit_array_atomic_rmw_and(&mut self, _ordering: Ordering, type_index: u32) -> Self::Output {
4456        self.check_array_atomic_rmw("and", type_index)
4457    }
4458    fn visit_array_atomic_rmw_or(&mut self, _ordering: Ordering, type_index: u32) -> Self::Output {
4459        self.check_array_atomic_rmw("or", type_index)
4460    }
4461    fn visit_array_atomic_rmw_xor(&mut self, _ordering: Ordering, type_index: u32) -> Self::Output {
4462        self.check_array_atomic_rmw("xor", type_index)
4463    }
4464    fn visit_array_atomic_rmw_xchg(
4465        &mut self,
4466        _ordering: Ordering,
4467        type_index: u32,
4468    ) -> Self::Output {
4469        let field = self.mutable_array_type_at(type_index)?;
4470        let is_valid_type = match field.element_type {
4471            StorageType::Val(ValType::I32) | StorageType::Val(ValType::I64) => true,
4472            StorageType::Val(v) => self
4473                .resources
4474                .is_subtype(v, RefType::ANYREF.shared().unwrap().into()),
4475            _ => false,
4476        };
4477        if !is_valid_type {
4478            bail!(
4479                self.offset,
4480                "invalid type: `array.atomic.rmw.xchg` only allows `i32`, `i64` and subtypes of `anyref`"
4481            );
4482        }
4483        let elem_ty = field.element_type.unpack();
4484        self.pop_operand(Some(elem_ty))?;
4485        self.pop_operand(Some(ValType::I32))?;
4486        self.pop_concrete_ref(true, type_index)?;
4487        self.push_operand(elem_ty)?;
4488        Ok(())
4489    }
4490    fn visit_array_atomic_rmw_cmpxchg(
4491        &mut self,
4492        _ordering: Ordering,
4493        type_index: u32,
4494    ) -> Self::Output {
4495        let field = self.mutable_array_type_at(type_index)?;
4496        let is_valid_type = match field.element_type {
4497            StorageType::Val(ValType::I32) | StorageType::Val(ValType::I64) => true,
4498            StorageType::Val(v) => self
4499                .resources
4500                .is_subtype(v, RefType::EQREF.shared().unwrap().into()),
4501            _ => false,
4502        };
4503        if !is_valid_type {
4504            bail!(
4505                self.offset,
4506                "invalid type: `array.atomic.rmw.cmpxchg` only allows `i32`, `i64` and subtypes of `eqref`"
4507            );
4508        }
4509        let elem_ty = field.element_type.unpack();
4510        self.pop_operand(Some(elem_ty))?;
4511        self.pop_operand(Some(elem_ty))?;
4512        self.pop_operand(Some(ValType::I32))?;
4513        self.pop_concrete_ref(true, type_index)?;
4514        self.push_operand(elem_ty)?;
4515        Ok(())
4516    }
4517    fn visit_any_convert_extern(&mut self) -> Self::Output {
4518        let any_ref = match self.pop_maybe_shared_ref(AbstractHeapType::Extern)? {
4519            MaybeType::Bottom | MaybeType::UnknownRef(_) => {
4520                MaybeType::UnknownRef(Some(AbstractHeapType::Any))
4521            }
4522            MaybeType::Known(ty) => {
4523                let shared = self.resources.is_shared(ty);
4524                let heap_type = HeapType::Abstract {
4525                    shared,
4526                    ty: AbstractHeapType::Any,
4527                };
4528                let any_ref = RefType::new(ty.is_nullable(), heap_type).unwrap();
4529                MaybeType::Known(any_ref)
4530            }
4531        };
4532        self.push_operand(any_ref)
4533    }
4534    fn visit_extern_convert_any(&mut self) -> Self::Output {
4535        let extern_ref = match self.pop_maybe_shared_ref(AbstractHeapType::Any)? {
4536            MaybeType::Bottom | MaybeType::UnknownRef(_) => {
4537                MaybeType::UnknownRef(Some(AbstractHeapType::Extern))
4538            }
4539            MaybeType::Known(ty) => {
4540                let shared = self.resources.is_shared(ty);
4541                let heap_type = HeapType::Abstract {
4542                    shared,
4543                    ty: AbstractHeapType::Extern,
4544                };
4545                let extern_ref = RefType::new(ty.is_nullable(), heap_type).unwrap();
4546                MaybeType::Known(extern_ref)
4547            }
4548        };
4549        self.push_operand(extern_ref)
4550    }
4551    fn visit_ref_test_non_null(&mut self, heap_type: HeapType) -> Self::Output {
4552        self.check_ref_test(false, heap_type)
4553    }
4554    fn visit_ref_test_nullable(&mut self, heap_type: HeapType) -> Self::Output {
4555        self.check_ref_test(true, heap_type)
4556    }
4557    fn visit_ref_cast_non_null(&mut self, heap_type: HeapType) -> Self::Output {
4558        self.check_ref_cast(false, heap_type)
4559    }
4560    fn visit_ref_cast_nullable(&mut self, heap_type: HeapType) -> Self::Output {
4561        self.check_ref_cast(true, heap_type)
4562    }
4563    fn visit_br_on_cast(
4564        &mut self,
4565        relative_depth: u32,
4566        mut from_ref_type: RefType,
4567        mut to_ref_type: RefType,
4568    ) -> Self::Output {
4569        self.resources
4570            .check_ref_type(&mut from_ref_type, self.offset)?;
4571        self.resources
4572            .check_ref_type(&mut to_ref_type, self.offset)?;
4573
4574        if !self
4575            .resources
4576            .is_subtype(to_ref_type.into(), from_ref_type.into())
4577        {
4578            bail!(
4579                self.offset,
4580                "type mismatch: expected {from_ref_type}, found {to_ref_type}"
4581            );
4582        }
4583
4584        let (block_ty, frame_kind) = self.jump(relative_depth)?;
4585        let mut label_types = self.label_types(block_ty, frame_kind)?;
4586
4587        match label_types.next_back() {
4588            Some(label_ty) if self.resources.is_subtype(to_ref_type.into(), label_ty) => {
4589                self.pop_operand(Some(from_ref_type.into()))?;
4590            }
4591            Some(label_ty) => bail!(
4592                self.offset,
4593                "type mismatch: casting to type {to_ref_type}, but it does not match \
4594                 label result type {label_ty}"
4595            ),
4596            None => bail!(
4597                self.offset,
4598                "type mismatch: br_on_cast to label with empty types, must have a reference type"
4599            ),
4600        };
4601
4602        self.pop_push_label_types(label_types)?;
4603        let diff_ty = RefType::difference(from_ref_type, to_ref_type);
4604        self.push_operand(diff_ty)?;
4605        Ok(())
4606    }
4607    fn visit_br_on_cast_fail(
4608        &mut self,
4609        relative_depth: u32,
4610        mut from_ref_type: RefType,
4611        mut to_ref_type: RefType,
4612    ) -> Self::Output {
4613        self.resources
4614            .check_ref_type(&mut from_ref_type, self.offset)?;
4615        self.resources
4616            .check_ref_type(&mut to_ref_type, self.offset)?;
4617
4618        if !self
4619            .resources
4620            .is_subtype(to_ref_type.into(), from_ref_type.into())
4621        {
4622            bail!(
4623                self.offset,
4624                "type mismatch: expected {from_ref_type}, found {to_ref_type}"
4625            );
4626        }
4627
4628        let (block_ty, frame_kind) = self.jump(relative_depth)?;
4629        let mut label_tys = self.label_types(block_ty, frame_kind)?;
4630
4631        let diff_ty = RefType::difference(from_ref_type, to_ref_type);
4632        match label_tys.next_back() {
4633            Some(label_ty) if self.resources.is_subtype(diff_ty.into(), label_ty) => {
4634                self.pop_operand(Some(from_ref_type.into()))?;
4635            }
4636            Some(label_ty) => bail!(
4637                self.offset,
4638                "type mismatch: expected label result type {label_ty}, found {diff_ty}"
4639            ),
4640            None => bail!(
4641                self.offset,
4642                "type mismatch: expected a reference type, found nothing"
4643            ),
4644        }
4645
4646        self.pop_push_label_types(label_tys)?;
4647        self.push_operand(to_ref_type)?;
4648        Ok(())
4649    }
4650    fn visit_ref_i31(&mut self) -> Self::Output {
4651        self.pop_operand(Some(ValType::I32))?;
4652        self.push_operand(ValType::Ref(RefType::I31))
4653    }
4654    fn visit_ref_i31_shared(&mut self) -> Self::Output {
4655        self.pop_operand(Some(ValType::I32))?;
4656        self.push_operand(ValType::Ref(
4657            RefType::I31.shared().expect("i31 is abstract"),
4658        ))
4659    }
4660    fn visit_i31_get_s(&mut self) -> Self::Output {
4661        self.pop_maybe_shared_ref(AbstractHeapType::I31)?;
4662        self.push_operand(ValType::I32)
4663    }
4664    fn visit_i31_get_u(&mut self) -> Self::Output {
4665        self.pop_maybe_shared_ref(AbstractHeapType::I31)?;
4666        self.push_operand(ValType::I32)
4667    }
4668    fn visit_try(&mut self, mut ty: BlockType) -> Self::Output {
4669        self.check_block_type(&mut ty)?;
4670        for ty in self.params(ty)?.rev() {
4671            self.pop_operand(Some(ty))?;
4672        }
4673        self.push_ctrl(FrameKind::LegacyTry, ty)?;
4674        Ok(())
4675    }
4676    fn visit_catch(&mut self, index: u32) -> Self::Output {
4677        let frame = self.pop_ctrl()?;
4678        if frame.kind != FrameKind::LegacyTry && frame.kind != FrameKind::LegacyCatch {
4679            bail!(self.offset, "catch found outside of an `try` block");
4680        }
4681        // Start a new frame and push `exnref` value.
4682        let height = self.operands.len();
4683        let init_height = self.inits.len();
4684        self.control.push(Frame {
4685            kind: FrameKind::LegacyCatch,
4686            block_type: frame.block_type,
4687            height,
4688            unreachable: false,
4689            init_height,
4690        });
4691        // Push exception argument types.
4692        let ty = self.tag_at(index)?;
4693        for ty in ty.params() {
4694            self.push_operand(*ty)?;
4695        }
4696        Ok(())
4697    }
4698    fn visit_rethrow(&mut self, relative_depth: u32) -> Self::Output {
4699        // This is not a jump, but we need to check that the `rethrow`
4700        // targets an actual `catch` to get the exception.
4701        let (_, kind) = self.jump(relative_depth)?;
4702        if kind != FrameKind::LegacyCatch && kind != FrameKind::LegacyCatchAll {
4703            bail!(
4704                self.offset,
4705                "invalid rethrow label: target was not a `catch` block"
4706            );
4707        }
4708        self.unreachable()?;
4709        Ok(())
4710    }
4711    fn visit_delegate(&mut self, relative_depth: u32) -> Self::Output {
4712        let frame = self.pop_ctrl()?;
4713        if frame.kind != FrameKind::LegacyTry {
4714            bail!(self.offset, "delegate found outside of an `try` block");
4715        }
4716        // This operation is not a jump, but we need to check the
4717        // depth for validity
4718        let _ = self.jump(relative_depth)?;
4719        for ty in self.results(frame.block_type)? {
4720            self.push_operand(ty)?;
4721        }
4722        Ok(())
4723    }
4724    fn visit_catch_all(&mut self) -> Self::Output {
4725        let frame = self.pop_ctrl()?;
4726        if frame.kind == FrameKind::LegacyCatchAll {
4727            bail!(self.offset, "only one catch_all allowed per `try` block");
4728        } else if frame.kind != FrameKind::LegacyTry && frame.kind != FrameKind::LegacyCatch {
4729            bail!(self.offset, "catch_all found outside of a `try` block");
4730        }
4731        let height = self.operands.len();
4732        let init_height = self.inits.len();
4733        self.control.push(Frame {
4734            kind: FrameKind::LegacyCatchAll,
4735            block_type: frame.block_type,
4736            height,
4737            unreachable: false,
4738            init_height,
4739        });
4740        Ok(())
4741    }
4742}
4743
4744#[derive(Clone, Debug)]
4745enum Either<A, B> {
4746    A(A),
4747    B(B),
4748}
4749
4750impl<A, B> Iterator for Either<A, B>
4751where
4752    A: Iterator,
4753    B: Iterator<Item = A::Item>,
4754{
4755    type Item = A::Item;
4756    fn next(&mut self) -> Option<A::Item> {
4757        match self {
4758            Either::A(a) => a.next(),
4759            Either::B(b) => b.next(),
4760        }
4761    }
4762}
4763
4764impl<A, B> DoubleEndedIterator for Either<A, B>
4765where
4766    A: DoubleEndedIterator,
4767    B: DoubleEndedIterator<Item = A::Item>,
4768{
4769    fn next_back(&mut self) -> Option<A::Item> {
4770        match self {
4771            Either::A(a) => a.next_back(),
4772            Either::B(b) => b.next_back(),
4773        }
4774    }
4775}
4776
4777impl<A, B> ExactSizeIterator for Either<A, B>
4778where
4779    A: ExactSizeIterator,
4780    B: ExactSizeIterator<Item = A::Item>,
4781{
4782    fn len(&self) -> usize {
4783        match self {
4784            Either::A(a) => a.len(),
4785            Either::B(b) => b.len(),
4786        }
4787    }
4788}
4789
4790trait PreciseIterator: ExactSizeIterator + DoubleEndedIterator + Clone + core::fmt::Debug {}
4791impl<T: ExactSizeIterator + DoubleEndedIterator + Clone + core::fmt::Debug> PreciseIterator for T {}
4792
4793impl Locals {
4794    /// Defines another group of `count` local variables of type `ty`.
4795    ///
4796    /// Returns `true` if the definition was successful. Local variable
4797    /// definition is unsuccessful in case the amount of total variables
4798    /// after definition exceeds the allowed maximum number.
4799    fn define(&mut self, count: u32, ty: ValType) -> bool {
4800        match self.num_locals.checked_add(count) {
4801            Some(n) => self.num_locals = n,
4802            None => return false,
4803        }
4804        if self.num_locals > (MAX_WASM_FUNCTION_LOCALS as u32) {
4805            return false;
4806        }
4807        for _ in 0..count {
4808            if self.first.len() >= MAX_LOCALS_TO_TRACK {
4809                break;
4810            }
4811            self.first.push(ty);
4812        }
4813        self.all.push((self.num_locals - 1, ty));
4814        true
4815    }
4816
4817    /// Returns the number of defined local variables.
4818    pub(super) fn len_locals(&self) -> u32 {
4819        self.num_locals
4820    }
4821
4822    /// Returns the type of the local variable at the given index if any.
4823    #[inline]
4824    pub(super) fn get(&self, idx: u32) -> Option<ValType> {
4825        match self.first.get(idx as usize) {
4826            Some(ty) => Some(*ty),
4827            None => self.get_bsearch(idx),
4828        }
4829    }
4830
4831    fn get_bsearch(&self, idx: u32) -> Option<ValType> {
4832        match self.all.binary_search_by_key(&idx, |(idx, _)| *idx) {
4833            // If this index would be inserted at the end of the list, then the
4834            // index is out of bounds and we return an error.
4835            Err(i) if i == self.all.len() => None,
4836
4837            // If `Ok` is returned we found the index exactly, or if `Err` is
4838            // returned the position is the one which is the least index
4839            // greater that `idx`, which is still the type of `idx` according
4840            // to our "compressed" representation. In both cases we access the
4841            // list at index `i`.
4842            Ok(i) | Err(i) => Some(self.all[i].1),
4843        }
4844    }
4845}