1use alloc::vec::Vec;
10use core::fmt::{self, Display, Formatter};
11use core::ops::{Deref, DerefMut};
12use core::str::FromStr;
13
14#[cfg(feature = "enable-serde")]
15use serde::{Deserialize, Serialize};
16
17use crate::bitset::BitSet;
18use crate::entity;
19use crate::ir::{
20 self,
21 condcodes::{FloatCC, IntCC},
22 trapcode::TrapCode,
23 types, Block, FuncRef, MemFlags, SigRef, StackSlot, Type, Value,
24};
25
26pub type ValueList = entity::EntityList<Value>;
30
31pub type ValueListPool = entity::ListPool<Value>;
33
34#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
49#[cfg_attr(feature = "enable-serde", derive(Serialize, Deserialize))]
50pub struct BlockCall {
51 values: entity::EntityList<Value>,
55}
56
57impl BlockCall {
58 fn value_to_block(val: Value) -> Block {
61 Block::from_u32(val.as_u32())
62 }
63
64 fn block_to_value(block: Block) -> Value {
67 Value::from_u32(block.as_u32())
68 }
69
70 pub fn new(block: Block, args: &[Value], pool: &mut ValueListPool) -> Self {
72 let mut values = ValueList::default();
73 values.push(Self::block_to_value(block), pool);
74 values.extend(args.iter().copied(), pool);
75 Self { values }
76 }
77
78 pub fn block(&self, pool: &ValueListPool) -> Block {
80 let val = self.values.first(pool).unwrap();
81 Self::value_to_block(val)
82 }
83
84 pub fn set_block(&mut self, block: Block, pool: &mut ValueListPool) {
86 *self.values.get_mut(0, pool).unwrap() = Self::block_to_value(block);
87 }
88
89 pub fn append_argument(&mut self, arg: Value, pool: &mut ValueListPool) {
91 self.values.push(arg, pool);
92 }
93
94 pub fn args_slice<'a>(&self, pool: &'a ValueListPool) -> &'a [Value] {
96 &self.values.as_slice(pool)[1..]
97 }
98
99 pub fn args_slice_mut<'a>(&'a mut self, pool: &'a mut ValueListPool) -> &'a mut [Value] {
101 &mut self.values.as_mut_slice(pool)[1..]
102 }
103
104 pub fn remove(&mut self, ix: usize, pool: &mut ValueListPool) {
106 self.values.remove(1 + ix, pool)
107 }
108
109 pub fn clear(&mut self, pool: &mut ValueListPool) {
111 self.values.truncate(1, pool)
112 }
113
114 pub fn extend<I>(&mut self, elements: I, pool: &mut ValueListPool)
116 where
117 I: IntoIterator<Item = Value>,
118 {
119 self.values.extend(elements, pool)
120 }
121
122 pub fn display<'a>(&self, pool: &'a ValueListPool) -> DisplayBlockCall<'a> {
124 DisplayBlockCall { block: *self, pool }
125 }
126
127 pub fn deep_clone(&self, pool: &mut ValueListPool) -> Self {
131 Self {
132 values: self.values.deep_clone(pool),
133 }
134 }
135}
136
137pub struct DisplayBlockCall<'a> {
139 block: BlockCall,
140 pool: &'a ValueListPool,
141}
142
143impl<'a> Display for DisplayBlockCall<'a> {
144 fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
145 write!(f, "{}", self.block.block(&self.pool))?;
146 let args = self.block.args_slice(&self.pool);
147 if !args.is_empty() {
148 write!(f, "(")?;
149 for (ix, arg) in args.iter().enumerate() {
150 if ix > 0 {
151 write!(f, ", ")?;
152 }
153 write!(f, "{}", arg)?;
154 }
155 write!(f, ")")?;
156 }
157 Ok(())
158 }
159}
160
161include!(concat!(env!("OUT_DIR"), "/opcodes.rs"));
177
178impl Display for Opcode {
179 fn fmt(&self, f: &mut Formatter) -> fmt::Result {
180 write!(f, "{}", opcode_name(*self))
181 }
182}
183
184impl Opcode {
185 pub fn format(self) -> InstructionFormat {
187 OPCODE_FORMAT[self as usize - 1]
188 }
189
190 pub fn constraints(self) -> OpcodeConstraints {
193 OPCODE_CONSTRAINTS[self as usize - 1]
194 }
195
196 pub fn is_resumable_trap(&self) -> bool {
198 match self {
199 Opcode::ResumableTrap | Opcode::ResumableTrapnz => true,
200 _ => false,
201 }
202 }
203}
204
205impl FromStr for Opcode {
210 type Err = &'static str;
211
212 fn from_str(s: &str) -> Result<Self, &'static str> {
214 use crate::constant_hash::{probe, simple_hash, Table};
215
216 impl<'a> Table<&'a str> for [Option<Opcode>] {
217 fn len(&self) -> usize {
218 self.len()
219 }
220
221 fn key(&self, idx: usize) -> Option<&'a str> {
222 self[idx].map(opcode_name)
223 }
224 }
225
226 match probe::<&str, [Option<Self>]>(&OPCODE_HASH_TABLE, s, simple_hash(s)) {
227 Err(_) => Err("Unknown opcode"),
228 Ok(i) => Ok(OPCODE_HASH_TABLE[i].unwrap()),
231 }
232 }
233}
234
235#[derive(Clone, Debug)]
238pub struct VariableArgs(Vec<Value>);
239
240impl VariableArgs {
241 pub fn new() -> Self {
243 Self(Vec::new())
244 }
245
246 pub fn push(&mut self, v: Value) {
248 self.0.push(v)
249 }
250
251 pub fn is_empty(&self) -> bool {
253 self.0.is_empty()
254 }
255
256 pub fn into_value_list(self, fixed: &[Value], pool: &mut ValueListPool) -> ValueList {
258 let mut vlist = ValueList::default();
259 vlist.extend(fixed.iter().cloned(), pool);
260 vlist.extend(self.0, pool);
261 vlist
262 }
263}
264
265impl Deref for VariableArgs {
267 type Target = [Value];
268
269 fn deref(&self) -> &[Value] {
270 &self.0
271 }
272}
273
274impl DerefMut for VariableArgs {
275 fn deref_mut(&mut self) -> &mut [Value] {
276 &mut self.0
277 }
278}
279
280impl Display for VariableArgs {
281 fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
282 for (i, val) in self.0.iter().enumerate() {
283 if i == 0 {
284 write!(fmt, "{}", val)?;
285 } else {
286 write!(fmt, ", {}", val)?;
287 }
288 }
289 Ok(())
290 }
291}
292
293impl Default for VariableArgs {
294 fn default() -> Self {
295 Self::new()
296 }
297}
298
299impl InstructionData {
304 pub fn branch_destination<'a>(&'a self, jump_tables: &'a ir::JumpTables) -> &[BlockCall] {
308 match self {
309 Self::Jump {
310 ref destination, ..
311 } => std::slice::from_ref(destination),
312 Self::Brif { blocks, .. } => blocks.as_slice(),
313 Self::BranchTable { table, .. } => jump_tables.get(*table).unwrap().all_branches(),
314 _ => {
315 debug_assert!(!self.opcode().is_branch());
316 &[]
317 }
318 }
319 }
320
321 pub fn branch_destination_mut<'a>(
325 &'a mut self,
326 jump_tables: &'a mut ir::JumpTables,
327 ) -> &mut [BlockCall] {
328 match self {
329 Self::Jump {
330 ref mut destination,
331 ..
332 } => std::slice::from_mut(destination),
333 Self::Brif { blocks, .. } => blocks.as_mut_slice(),
334 Self::BranchTable { table, .. } => {
335 jump_tables.get_mut(*table).unwrap().all_branches_mut()
336 }
337 _ => {
338 debug_assert!(!self.opcode().is_branch());
339 &mut []
340 }
341 }
342 }
343
344 pub fn trap_code(&self) -> Option<TrapCode> {
347 match *self {
348 Self::CondTrap { code, .. } | Self::Trap { code, .. } => Some(code),
349 _ => None,
350 }
351 }
352
353 pub fn cond_code(&self) -> Option<IntCC> {
356 match self {
357 &InstructionData::IntCompare { cond, .. }
358 | &InstructionData::IntCompareImm { cond, .. } => Some(cond),
359 _ => None,
360 }
361 }
362
363 pub fn fp_cond_code(&self) -> Option<FloatCC> {
366 match self {
367 &InstructionData::FloatCompare { cond, .. } => Some(cond),
368 _ => None,
369 }
370 }
371
372 pub fn trap_code_mut(&mut self) -> Option<&mut TrapCode> {
375 match self {
376 Self::CondTrap { code, .. } | Self::Trap { code, .. } => Some(code),
377 _ => None,
378 }
379 }
380
381 pub fn atomic_rmw_op(&self) -> Option<ir::AtomicRmwOp> {
383 match self {
384 &InstructionData::AtomicRmw { op, .. } => Some(op),
385 _ => None,
386 }
387 }
388
389 pub fn load_store_offset(&self) -> Option<i32> {
391 match self {
392 &InstructionData::Load { offset, .. }
393 | &InstructionData::StackLoad { offset, .. }
394 | &InstructionData::Store { offset, .. }
395 | &InstructionData::StackStore { offset, .. } => Some(offset.into()),
396 _ => None,
397 }
398 }
399
400 pub fn memflags(&self) -> Option<MemFlags> {
402 match self {
403 &InstructionData::Load { flags, .. }
404 | &InstructionData::LoadNoOffset { flags, .. }
405 | &InstructionData::Store { flags, .. }
406 | &InstructionData::StoreNoOffset { flags, .. }
407 | &InstructionData::AtomicCas { flags, .. }
408 | &InstructionData::AtomicRmw { flags, .. } => Some(flags),
409 _ => None,
410 }
411 }
412
413 pub fn stack_slot(&self) -> Option<StackSlot> {
415 match self {
416 &InstructionData::StackStore { stack_slot, .. }
417 | &InstructionData::StackLoad { stack_slot, .. } => Some(stack_slot),
418 _ => None,
419 }
420 }
421
422 pub fn analyze_call<'a>(&'a self, pool: &'a ValueListPool) -> CallInfo<'a> {
426 match *self {
427 Self::Call {
428 func_ref, ref args, ..
429 } => CallInfo::Direct(func_ref, args.as_slice(pool)),
430 Self::CallIndirect {
431 sig_ref, ref args, ..
432 } => CallInfo::Indirect(sig_ref, &args.as_slice(pool)[1..]),
433 _ => {
434 debug_assert!(!self.opcode().is_call());
435 CallInfo::NotACall
436 }
437 }
438 }
439
440 #[inline]
441 pub(crate) fn sign_extend_immediates(&mut self, ctrl_typevar: Type) {
442 if ctrl_typevar.is_invalid() {
443 return;
444 }
445
446 let bit_width = ctrl_typevar.bits();
447
448 match self {
449 Self::BinaryImm64 {
450 opcode,
451 arg: _,
452 imm,
453 } => {
454 if *opcode == Opcode::SdivImm || *opcode == Opcode::SremImm {
455 imm.sign_extend_from_width(bit_width);
456 }
457 }
458 Self::IntCompareImm {
459 opcode,
460 arg: _,
461 cond,
462 imm,
463 } => {
464 debug_assert_eq!(*opcode, Opcode::IcmpImm);
465 if cond.unsigned() != *cond {
466 imm.sign_extend_from_width(bit_width);
467 }
468 }
469 _ => {}
470 }
471 }
472}
473
474pub enum CallInfo<'a> {
476 NotACall,
478
479 Direct(FuncRef, &'a [Value]),
482
483 Indirect(SigRef, &'a [Value]),
485}
486
487#[derive(Clone, Copy)]
493pub struct OpcodeConstraints {
494 flags: u8,
513
514 typeset_offset: u8,
516
517 constraint_offset: u16,
521}
522
523impl OpcodeConstraints {
524 pub fn use_typevar_operand(self) -> bool {
528 (self.flags & 0x8) != 0
529 }
530
531 pub fn requires_typevar_operand(self) -> bool {
538 (self.flags & 0x10) != 0
539 }
540
541 pub fn num_fixed_results(self) -> usize {
544 (self.flags & 0x7) as usize
545 }
546
547 pub fn num_fixed_value_arguments(self) -> usize {
555 ((self.flags >> 5) & 0x7) as usize
556 }
557
558 fn typeset_offset(self) -> Option<usize> {
561 let offset = usize::from(self.typeset_offset);
562 if offset < TYPE_SETS.len() {
563 Some(offset)
564 } else {
565 None
566 }
567 }
568
569 fn constraint_offset(self) -> usize {
571 self.constraint_offset as usize
572 }
573
574 pub fn result_type(self, n: usize, ctrl_type: Type) -> Type {
577 debug_assert!(n < self.num_fixed_results(), "Invalid result index");
578 match OPERAND_CONSTRAINTS[self.constraint_offset() + n].resolve(ctrl_type) {
579 ResolvedConstraint::Bound(t) => t,
580 ResolvedConstraint::Free(ts) => panic!("Result constraints can't be free: {:?}", ts),
581 }
582 }
583
584 pub fn value_argument_constraint(self, n: usize, ctrl_type: Type) -> ResolvedConstraint {
590 debug_assert!(
591 n < self.num_fixed_value_arguments(),
592 "Invalid value argument index"
593 );
594 let offset = self.constraint_offset() + self.num_fixed_results();
595 OPERAND_CONSTRAINTS[offset + n].resolve(ctrl_type)
596 }
597
598 pub fn ctrl_typeset(self) -> Option<ValueTypeSet> {
601 self.typeset_offset().map(|offset| TYPE_SETS[offset])
602 }
603
604 pub fn is_polymorphic(self) -> bool {
606 self.ctrl_typeset().is_some()
607 }
608}
609
610type BitSet8 = BitSet<u8>;
611type BitSet16 = BitSet<u16>;
612
613#[derive(Clone, Copy, Debug, Default, PartialEq, Eq)]
615pub struct ValueTypeSet {
616 pub lanes: BitSet16,
618 pub ints: BitSet8,
620 pub floats: BitSet8,
622 pub refs: BitSet8,
624 pub dynamic_lanes: BitSet16,
626}
627
628impl ValueTypeSet {
629 fn is_base_type(self, scalar: Type) -> bool {
633 let l2b = scalar.log2_lane_bits();
634 if scalar.is_int() {
635 self.ints.contains(l2b)
636 } else if scalar.is_float() {
637 self.floats.contains(l2b)
638 } else if scalar.is_ref() {
639 self.refs.contains(l2b)
640 } else {
641 false
642 }
643 }
644
645 pub fn contains(self, typ: Type) -> bool {
647 if typ.is_dynamic_vector() {
648 let l2l = typ.log2_min_lane_count();
649 self.dynamic_lanes.contains(l2l) && self.is_base_type(typ.lane_type())
650 } else {
651 let l2l = typ.log2_lane_count();
652 self.lanes.contains(l2l) && self.is_base_type(typ.lane_type())
653 }
654 }
655
656 pub fn example(self) -> Type {
660 let t = if self.ints.max().unwrap_or(0) > 5 {
661 types::I32
662 } else if self.floats.max().unwrap_or(0) > 5 {
663 types::F32
664 } else {
665 types::I8
666 };
667 t.by(1 << self.lanes.min().unwrap()).unwrap()
668 }
669}
670
671enum OperandConstraint {
673 Concrete(Type),
675
676 Free(u8),
679
680 Same,
682
683 LaneOf,
685
686 AsTruthy,
688
689 HalfWidth,
691
692 DoubleWidth,
694
695 SplitLanes,
697
698 MergeLanes,
700
701 DynamicToVector,
703
704 Narrower,
706
707 Wider,
709}
710
711impl OperandConstraint {
712 pub fn resolve(&self, ctrl_type: Type) -> ResolvedConstraint {
715 use self::OperandConstraint::*;
716 use self::ResolvedConstraint::Bound;
717 match *self {
718 Concrete(t) => Bound(t),
719 Free(vts) => ResolvedConstraint::Free(TYPE_SETS[vts as usize]),
720 Same => Bound(ctrl_type),
721 LaneOf => Bound(ctrl_type.lane_of()),
722 AsTruthy => Bound(ctrl_type.as_truthy()),
723 HalfWidth => Bound(ctrl_type.half_width().expect("invalid type for half_width")),
724 DoubleWidth => Bound(
725 ctrl_type
726 .double_width()
727 .expect("invalid type for double_width"),
728 ),
729 SplitLanes => {
730 if ctrl_type.is_dynamic_vector() {
731 Bound(
732 ctrl_type
733 .dynamic_to_vector()
734 .expect("invalid type for dynamic_to_vector")
735 .split_lanes()
736 .expect("invalid type for split_lanes")
737 .vector_to_dynamic()
738 .expect("invalid dynamic type"),
739 )
740 } else {
741 Bound(
742 ctrl_type
743 .split_lanes()
744 .expect("invalid type for split_lanes"),
745 )
746 }
747 }
748 MergeLanes => {
749 if ctrl_type.is_dynamic_vector() {
750 Bound(
751 ctrl_type
752 .dynamic_to_vector()
753 .expect("invalid type for dynamic_to_vector")
754 .merge_lanes()
755 .expect("invalid type for merge_lanes")
756 .vector_to_dynamic()
757 .expect("invalid dynamic type"),
758 )
759 } else {
760 Bound(
761 ctrl_type
762 .merge_lanes()
763 .expect("invalid type for merge_lanes"),
764 )
765 }
766 }
767 DynamicToVector => Bound(
768 ctrl_type
769 .dynamic_to_vector()
770 .expect("invalid type for dynamic_to_vector"),
771 ),
772 Narrower => {
773 let ctrl_type_bits = ctrl_type.log2_lane_bits();
774 let mut tys = ValueTypeSet::default();
775
776 tys.lanes = BitSet::from_range(0, 1);
778
779 if ctrl_type.is_int() {
780 tys.ints = BitSet8::from_range(3, ctrl_type_bits as u8);
783 } else if ctrl_type.is_float() {
784 tys.floats = BitSet8::from_range(5, ctrl_type_bits as u8);
787 } else {
788 panic!("The Narrower constraint only operates on floats or ints");
789 }
790 ResolvedConstraint::Free(tys)
791 }
792 Wider => {
793 let ctrl_type_bits = ctrl_type.log2_lane_bits();
794 let mut tys = ValueTypeSet::default();
795
796 tys.lanes = BitSet::from_range(0, 1);
798
799 if ctrl_type.is_int() {
800 let lower_bound = ctrl_type_bits as u8 + 1;
801 if lower_bound < BitSet8::bits() as u8 {
807 tys.ints = BitSet8::from_range(lower_bound, 8);
811 }
812 } else if ctrl_type.is_float() {
813 tys.floats = BitSet8::from_range(ctrl_type_bits as u8 + 1, 7);
817 } else {
818 panic!("The Wider constraint only operates on floats or ints");
819 }
820
821 ResolvedConstraint::Free(tys)
822 }
823 }
824 }
825}
826
827#[derive(Copy, Clone, Debug, PartialEq, Eq)]
829pub enum ResolvedConstraint {
830 Bound(Type),
832 Free(ValueTypeSet),
834}
835
836#[cfg(test)]
837mod tests {
838 use super::*;
839 use alloc::string::ToString;
840
841 #[test]
842 fn inst_data_is_copy() {
843 fn is_copy<T: Copy>() {}
844 is_copy::<InstructionData>();
845 }
846
847 #[test]
848 fn inst_data_size() {
849 assert_eq!(std::mem::size_of::<InstructionData>(), 16);
852 }
853
854 #[test]
855 fn opcodes() {
856 use core::mem;
857
858 let x = Opcode::Iadd;
859 let mut y = Opcode::Isub;
860
861 assert!(x != y);
862 y = Opcode::Iadd;
863 assert_eq!(x, y);
864 assert_eq!(x.format(), InstructionFormat::Binary);
865
866 assert_eq!(format!("{:?}", Opcode::IaddImm), "IaddImm");
867 assert_eq!(Opcode::IaddImm.to_string(), "iadd_imm");
868
869 assert_eq!("iadd".parse::<Opcode>(), Ok(Opcode::Iadd));
871 assert_eq!("iadd_imm".parse::<Opcode>(), Ok(Opcode::IaddImm));
872 assert_eq!("iadd\0".parse::<Opcode>(), Err("Unknown opcode"));
873 assert_eq!("".parse::<Opcode>(), Err("Unknown opcode"));
874 assert_eq!("\0".parse::<Opcode>(), Err("Unknown opcode"));
875
876 assert_eq!(mem::size_of::<Opcode>(), mem::size_of::<Option<Opcode>>());
881 }
882
883 #[test]
884 fn instruction_data() {
885 use core::mem;
886 assert_eq!(mem::size_of::<InstructionData>(), 16);
891 }
892
893 #[test]
894 fn constraints() {
895 let a = Opcode::Iadd.constraints();
896 assert!(a.use_typevar_operand());
897 assert!(!a.requires_typevar_operand());
898 assert_eq!(a.num_fixed_results(), 1);
899 assert_eq!(a.num_fixed_value_arguments(), 2);
900 assert_eq!(a.result_type(0, types::I32), types::I32);
901 assert_eq!(a.result_type(0, types::I8), types::I8);
902 assert_eq!(
903 a.value_argument_constraint(0, types::I32),
904 ResolvedConstraint::Bound(types::I32)
905 );
906 assert_eq!(
907 a.value_argument_constraint(1, types::I32),
908 ResolvedConstraint::Bound(types::I32)
909 );
910
911 let b = Opcode::Bitcast.constraints();
912 assert!(!b.use_typevar_operand());
913 assert!(!b.requires_typevar_operand());
914 assert_eq!(b.num_fixed_results(), 1);
915 assert_eq!(b.num_fixed_value_arguments(), 1);
916 assert_eq!(b.result_type(0, types::I32), types::I32);
917 assert_eq!(b.result_type(0, types::I8), types::I8);
918 match b.value_argument_constraint(0, types::I32) {
919 ResolvedConstraint::Free(vts) => assert!(vts.contains(types::F32)),
920 _ => panic!("Unexpected constraint from value_argument_constraint"),
921 }
922
923 let c = Opcode::Call.constraints();
924 assert_eq!(c.num_fixed_results(), 0);
925 assert_eq!(c.num_fixed_value_arguments(), 0);
926
927 let i = Opcode::CallIndirect.constraints();
928 assert_eq!(i.num_fixed_results(), 0);
929 assert_eq!(i.num_fixed_value_arguments(), 1);
930
931 let cmp = Opcode::Icmp.constraints();
932 assert!(cmp.use_typevar_operand());
933 assert!(cmp.requires_typevar_operand());
934 assert_eq!(cmp.num_fixed_results(), 1);
935 assert_eq!(cmp.num_fixed_value_arguments(), 2);
936 }
937
938 #[test]
939 fn value_set() {
940 use crate::ir::types::*;
941
942 let vts = ValueTypeSet {
943 lanes: BitSet16::from_range(0, 8),
944 ints: BitSet8::from_range(4, 7),
945 floats: BitSet8::from_range(0, 0),
946 refs: BitSet8::from_range(5, 7),
947 dynamic_lanes: BitSet16::from_range(0, 4),
948 };
949 assert!(!vts.contains(I8));
950 assert!(vts.contains(I32));
951 assert!(vts.contains(I64));
952 assert!(vts.contains(I32X4));
953 assert!(vts.contains(I32X4XN));
954 assert!(!vts.contains(F32));
955 assert!(vts.contains(R32));
956 assert!(vts.contains(R64));
957 assert_eq!(vts.example().to_string(), "i32");
958
959 let vts = ValueTypeSet {
960 lanes: BitSet16::from_range(0, 8),
961 ints: BitSet8::from_range(0, 0),
962 floats: BitSet8::from_range(5, 7),
963 refs: BitSet8::from_range(0, 0),
964 dynamic_lanes: BitSet16::from_range(0, 8),
965 };
966 assert_eq!(vts.example().to_string(), "f32");
967
968 let vts = ValueTypeSet {
969 lanes: BitSet16::from_range(1, 8),
970 ints: BitSet8::from_range(0, 0),
971 floats: BitSet8::from_range(5, 7),
972 refs: BitSet8::from_range(0, 0),
973 dynamic_lanes: BitSet16::from_range(0, 8),
974 };
975 assert_eq!(vts.example().to_string(), "f32x2");
976
977 let vts = ValueTypeSet {
978 lanes: BitSet16::from_range(2, 8),
979 ints: BitSet8::from_range(3, 7),
980 floats: BitSet8::from_range(0, 0),
981 refs: BitSet8::from_range(0, 0),
982 dynamic_lanes: BitSet16::from_range(0, 8),
983 };
984 assert_eq!(vts.example().to_string(), "i32x4");
985
986 let vts = ValueTypeSet {
987 lanes: BitSet16::from_range(0, 9),
989 ints: BitSet8::from_range(3, 7),
990 floats: BitSet8::from_range(0, 0),
991 refs: BitSet8::from_range(0, 0),
992 dynamic_lanes: BitSet16::from_range(0, 8),
993 };
994 assert!(vts.contains(I32));
995 assert!(vts.contains(I32X4));
996 assert!(!vts.contains(R32));
997 assert!(!vts.contains(R64));
998 }
999}