1use crate::constant_hash::Table;
10use alloc::vec::Vec;
11use core::fmt::{self, Display, Formatter};
12use core::ops::{Deref, DerefMut};
13use core::str::FromStr;
14
15#[cfg(feature = "enable-serde")]
16use serde_derive::{Deserialize, Serialize};
17
18use crate::bitset::ScalarBitSet;
19use crate::entity;
20use crate::ir::{
21 self,
22 condcodes::{FloatCC, IntCC},
23 trapcode::TrapCode,
24 types, Block, FuncRef, MemFlags, SigRef, StackSlot, Type, Value,
25};
26
27pub type ValueList = entity::EntityList<Value>;
31
32pub type ValueListPool = entity::ListPool<Value>;
34
35#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
50#[cfg_attr(feature = "enable-serde", derive(Serialize, Deserialize))]
51pub struct BlockCall {
52 values: entity::EntityList<Value>,
56}
57
58impl BlockCall {
59 fn value_to_block(val: Value) -> Block {
62 Block::from_u32(val.as_u32())
63 }
64
65 fn block_to_value(block: Block) -> Value {
68 Value::from_u32(block.as_u32())
69 }
70
71 pub fn new(block: Block, args: &[Value], pool: &mut ValueListPool) -> Self {
73 let mut values = ValueList::default();
74 values.push(Self::block_to_value(block), pool);
75 values.extend(args.iter().copied(), pool);
76 Self { values }
77 }
78
79 pub fn block(&self, pool: &ValueListPool) -> Block {
81 let val = self.values.first(pool).unwrap();
82 Self::value_to_block(val)
83 }
84
85 pub fn set_block(&mut self, block: Block, pool: &mut ValueListPool) {
87 *self.values.get_mut(0, pool).unwrap() = Self::block_to_value(block);
88 }
89
90 pub fn append_argument(&mut self, arg: Value, pool: &mut ValueListPool) {
92 self.values.push(arg, pool);
93 }
94
95 pub fn args_slice<'a>(&self, pool: &'a ValueListPool) -> &'a [Value] {
97 &self.values.as_slice(pool)[1..]
98 }
99
100 pub fn args_slice_mut<'a>(&'a mut self, pool: &'a mut ValueListPool) -> &'a mut [Value] {
102 &mut self.values.as_mut_slice(pool)[1..]
103 }
104
105 pub fn remove(&mut self, ix: usize, pool: &mut ValueListPool) {
107 self.values.remove(1 + ix, pool)
108 }
109
110 pub fn clear(&mut self, pool: &mut ValueListPool) {
112 self.values.truncate(1, pool)
113 }
114
115 pub fn extend<I>(&mut self, elements: I, pool: &mut ValueListPool)
117 where
118 I: IntoIterator<Item = Value>,
119 {
120 self.values.extend(elements, pool)
121 }
122
123 pub fn display<'a>(&self, pool: &'a ValueListPool) -> DisplayBlockCall<'a> {
125 DisplayBlockCall { block: *self, pool }
126 }
127
128 pub fn deep_clone(&self, pool: &mut ValueListPool) -> Self {
132 Self {
133 values: self.values.deep_clone(pool),
134 }
135 }
136}
137
138pub struct DisplayBlockCall<'a> {
140 block: BlockCall,
141 pool: &'a ValueListPool,
142}
143
144impl<'a> Display for DisplayBlockCall<'a> {
145 fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
146 write!(f, "{}", self.block.block(&self.pool))?;
147 let args = self.block.args_slice(&self.pool);
148 if !args.is_empty() {
149 write!(f, "(")?;
150 for (ix, arg) in args.iter().enumerate() {
151 if ix > 0 {
152 write!(f, ", ")?;
153 }
154 write!(f, "{arg}")?;
155 }
156 write!(f, ")")?;
157 }
158 Ok(())
159 }
160}
161
162include!(concat!(env!("OUT_DIR"), "/opcodes.rs"));
178
179impl Display for Opcode {
180 fn fmt(&self, f: &mut Formatter) -> fmt::Result {
181 write!(f, "{}", opcode_name(*self))
182 }
183}
184
185impl Opcode {
186 pub fn format(self) -> InstructionFormat {
188 OPCODE_FORMAT[self as usize - 1]
189 }
190
191 pub fn constraints(self) -> OpcodeConstraints {
194 OPCODE_CONSTRAINTS[self as usize - 1]
195 }
196
197 #[inline]
201 pub fn is_safepoint(self) -> bool {
202 self.is_call() && !self.is_return()
203 }
204}
205
206impl FromStr for Opcode {
211 type Err = &'static str;
212
213 fn from_str(s: &str) -> Result<Self, &'static str> {
215 use crate::constant_hash::{probe, simple_hash};
216
217 match probe::<&str, [Option<Self>]>(&OPCODE_HASH_TABLE, s, simple_hash(s)) {
218 Err(_) => Err("Unknown opcode"),
219 Ok(i) => Ok(OPCODE_HASH_TABLE[i].unwrap()),
222 }
223 }
224}
225
226impl<'a> Table<&'a str> for [Option<Opcode>] {
227 fn len(&self) -> usize {
228 self.len()
229 }
230
231 fn key(&self, idx: usize) -> Option<&'a str> {
232 self[idx].map(opcode_name)
233 }
234}
235
236#[derive(Clone, Debug)]
239pub struct VariableArgs(Vec<Value>);
240
241impl VariableArgs {
242 pub fn new() -> Self {
244 Self(Vec::new())
245 }
246
247 pub fn push(&mut self, v: Value) {
249 self.0.push(v)
250 }
251
252 pub fn is_empty(&self) -> bool {
254 self.0.is_empty()
255 }
256
257 pub fn into_value_list(self, fixed: &[Value], pool: &mut ValueListPool) -> ValueList {
259 let mut vlist = ValueList::default();
260 vlist.extend(fixed.iter().cloned(), pool);
261 vlist.extend(self.0, pool);
262 vlist
263 }
264}
265
266impl Deref for VariableArgs {
268 type Target = [Value];
269
270 fn deref(&self) -> &[Value] {
271 &self.0
272 }
273}
274
275impl DerefMut for VariableArgs {
276 fn deref_mut(&mut self) -> &mut [Value] {
277 &mut self.0
278 }
279}
280
281impl Display for VariableArgs {
282 fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
283 for (i, val) in self.0.iter().enumerate() {
284 if i == 0 {
285 write!(fmt, "{val}")?;
286 } else {
287 write!(fmt, ", {val}")?;
288 }
289 }
290 Ok(())
291 }
292}
293
294impl Default for VariableArgs {
295 fn default() -> Self {
296 Self::new()
297 }
298}
299
300impl InstructionData {
305 pub fn branch_destination<'a>(&'a self, jump_tables: &'a ir::JumpTables) -> &'a [BlockCall] {
309 match self {
310 Self::Jump {
311 ref destination, ..
312 } => std::slice::from_ref(destination),
313 Self::Brif { blocks, .. } => blocks.as_slice(),
314 Self::BranchTable { table, .. } => jump_tables.get(*table).unwrap().all_branches(),
315 _ => {
316 debug_assert!(!self.opcode().is_branch());
317 &[]
318 }
319 }
320 }
321
322 pub fn branch_destination_mut<'a>(
326 &'a mut self,
327 jump_tables: &'a mut ir::JumpTables,
328 ) -> &'a mut [BlockCall] {
329 match self {
330 Self::Jump {
331 ref mut destination,
332 ..
333 } => std::slice::from_mut(destination),
334 Self::Brif { blocks, .. } => blocks.as_mut_slice(),
335 Self::BranchTable { table, .. } => {
336 jump_tables.get_mut(*table).unwrap().all_branches_mut()
337 }
338 _ => {
339 debug_assert!(!self.opcode().is_branch());
340 &mut []
341 }
342 }
343 }
344
345 pub fn map_values(
348 &mut self,
349 pool: &mut ValueListPool,
350 jump_tables: &mut ir::JumpTables,
351 mut f: impl FnMut(Value) -> Value,
352 ) {
353 for arg in self.arguments_mut(pool) {
354 *arg = f(*arg);
355 }
356
357 for block in self.branch_destination_mut(jump_tables) {
358 for arg in block.args_slice_mut(pool) {
359 *arg = f(*arg);
360 }
361 }
362 }
363
364 pub fn trap_code(&self) -> Option<TrapCode> {
367 match *self {
368 Self::CondTrap { code, .. } | Self::Trap { code, .. } => Some(code),
369 _ => None,
370 }
371 }
372
373 pub fn cond_code(&self) -> Option<IntCC> {
376 match self {
377 &InstructionData::IntCompare { cond, .. }
378 | &InstructionData::IntCompareImm { cond, .. } => Some(cond),
379 _ => None,
380 }
381 }
382
383 pub fn fp_cond_code(&self) -> Option<FloatCC> {
386 match self {
387 &InstructionData::FloatCompare { cond, .. } => Some(cond),
388 _ => None,
389 }
390 }
391
392 pub fn trap_code_mut(&mut self) -> Option<&mut TrapCode> {
395 match self {
396 Self::CondTrap { code, .. } | Self::Trap { code, .. } => Some(code),
397 _ => None,
398 }
399 }
400
401 pub fn atomic_rmw_op(&self) -> Option<ir::AtomicRmwOp> {
403 match self {
404 &InstructionData::AtomicRmw { op, .. } => Some(op),
405 _ => None,
406 }
407 }
408
409 pub fn load_store_offset(&self) -> Option<i32> {
411 match self {
412 &InstructionData::Load { offset, .. }
413 | &InstructionData::StackLoad { offset, .. }
414 | &InstructionData::Store { offset, .. }
415 | &InstructionData::StackStore { offset, .. } => Some(offset.into()),
416 _ => None,
417 }
418 }
419
420 pub fn memflags(&self) -> Option<MemFlags> {
422 match self {
423 &InstructionData::Load { flags, .. }
424 | &InstructionData::LoadNoOffset { flags, .. }
425 | &InstructionData::Store { flags, .. }
426 | &InstructionData::StoreNoOffset { flags, .. }
427 | &InstructionData::AtomicCas { flags, .. }
428 | &InstructionData::AtomicRmw { flags, .. } => Some(flags),
429 _ => None,
430 }
431 }
432
433 pub fn stack_slot(&self) -> Option<StackSlot> {
435 match self {
436 &InstructionData::StackStore { stack_slot, .. }
437 | &InstructionData::StackLoad { stack_slot, .. } => Some(stack_slot),
438 _ => None,
439 }
440 }
441
442 pub fn analyze_call<'a>(&'a self, pool: &'a ValueListPool) -> CallInfo<'a> {
446 match *self {
447 Self::Call {
448 func_ref, ref args, ..
449 } => CallInfo::Direct(func_ref, args.as_slice(pool)),
450 Self::CallIndirect {
451 sig_ref, ref args, ..
452 } => CallInfo::Indirect(sig_ref, &args.as_slice(pool)[1..]),
453 Self::Ternary {
454 opcode: Opcode::StackSwitch,
455 ..
456 } => {
457 CallInfo::NotACall
460 }
461 _ => {
462 debug_assert!(!self.opcode().is_call());
463 CallInfo::NotACall
464 }
465 }
466 }
467
468 #[inline]
469 pub(crate) fn mask_immediates(&mut self, ctrl_typevar: Type) {
470 if ctrl_typevar.is_invalid() {
471 return;
472 }
473
474 let bit_width = ctrl_typevar.bits();
475
476 match self {
477 Self::UnaryImm { opcode: _, imm } => {
478 *imm = imm.mask_to_width(bit_width);
479 }
480 Self::BinaryImm64 {
481 opcode,
482 arg: _,
483 imm,
484 } => {
485 if *opcode == Opcode::SdivImm || *opcode == Opcode::SremImm {
486 *imm = imm.mask_to_width(bit_width);
487 }
488 }
489 Self::IntCompareImm {
490 opcode,
491 arg: _,
492 cond,
493 imm,
494 } => {
495 debug_assert_eq!(*opcode, Opcode::IcmpImm);
496 if cond.unsigned() != *cond {
497 *imm = imm.mask_to_width(bit_width);
498 }
499 }
500 _ => {}
501 }
502 }
503}
504
505pub enum CallInfo<'a> {
507 NotACall,
509
510 Direct(FuncRef, &'a [Value]),
513
514 Indirect(SigRef, &'a [Value]),
516}
517
518#[derive(Clone, Copy)]
524pub struct OpcodeConstraints {
525 flags: u8,
544
545 typeset_offset: u8,
547
548 constraint_offset: u16,
552}
553
554impl OpcodeConstraints {
555 pub fn use_typevar_operand(self) -> bool {
559 (self.flags & 0x8) != 0
560 }
561
562 pub fn requires_typevar_operand(self) -> bool {
569 (self.flags & 0x10) != 0
570 }
571
572 pub fn num_fixed_results(self) -> usize {
575 (self.flags & 0x7) as usize
576 }
577
578 pub fn num_fixed_value_arguments(self) -> usize {
586 ((self.flags >> 5) & 0x7) as usize
587 }
588
589 fn typeset_offset(self) -> Option<usize> {
592 let offset = usize::from(self.typeset_offset);
593 if offset < TYPE_SETS.len() {
594 Some(offset)
595 } else {
596 None
597 }
598 }
599
600 fn constraint_offset(self) -> usize {
602 self.constraint_offset as usize
603 }
604
605 pub fn result_type(self, n: usize, ctrl_type: Type) -> Type {
608 debug_assert!(n < self.num_fixed_results(), "Invalid result index");
609 match OPERAND_CONSTRAINTS[self.constraint_offset() + n].resolve(ctrl_type) {
610 ResolvedConstraint::Bound(t) => t,
611 ResolvedConstraint::Free(ts) => panic!("Result constraints can't be free: {ts:?}"),
612 }
613 }
614
615 pub fn value_argument_constraint(self, n: usize, ctrl_type: Type) -> ResolvedConstraint {
621 debug_assert!(
622 n < self.num_fixed_value_arguments(),
623 "Invalid value argument index"
624 );
625 let offset = self.constraint_offset() + self.num_fixed_results();
626 OPERAND_CONSTRAINTS[offset + n].resolve(ctrl_type)
627 }
628
629 pub fn ctrl_typeset(self) -> Option<ValueTypeSet> {
632 self.typeset_offset().map(|offset| TYPE_SETS[offset])
633 }
634
635 pub fn is_polymorphic(self) -> bool {
637 self.ctrl_typeset().is_some()
638 }
639}
640
641type BitSet8 = ScalarBitSet<u8>;
642type BitSet16 = ScalarBitSet<u16>;
643
644#[derive(Clone, Copy, Debug, Default, PartialEq, Eq)]
646pub struct ValueTypeSet {
647 pub lanes: BitSet16,
649 pub ints: BitSet8,
651 pub floats: BitSet8,
653 pub dynamic_lanes: BitSet16,
655}
656
657impl ValueTypeSet {
658 fn is_base_type(self, scalar: Type) -> bool {
662 let l2b = u8::try_from(scalar.log2_lane_bits()).unwrap();
663 if scalar.is_int() {
664 self.ints.contains(l2b)
665 } else if scalar.is_float() {
666 self.floats.contains(l2b)
667 } else {
668 false
669 }
670 }
671
672 pub fn contains(self, typ: Type) -> bool {
674 if typ.is_dynamic_vector() {
675 let l2l = u8::try_from(typ.log2_min_lane_count()).unwrap();
676 self.dynamic_lanes.contains(l2l) && self.is_base_type(typ.lane_type())
677 } else {
678 let l2l = u8::try_from(typ.log2_lane_count()).unwrap();
679 self.lanes.contains(l2l) && self.is_base_type(typ.lane_type())
680 }
681 }
682
683 pub fn example(self) -> Type {
687 let t = if self.ints.max().unwrap_or(0) > 5 {
688 types::I32
689 } else if self.floats.max().unwrap_or(0) > 5 {
690 types::F32
691 } else {
692 types::I8
693 };
694 t.by(1 << self.lanes.min().unwrap()).unwrap()
695 }
696}
697
698enum OperandConstraint {
700 Concrete(Type),
702
703 Free(u8),
706
707 Same,
709
710 LaneOf,
712
713 AsTruthy,
715
716 HalfWidth,
718
719 DoubleWidth,
721
722 SplitLanes,
724
725 MergeLanes,
727
728 DynamicToVector,
730
731 Narrower,
733
734 Wider,
736}
737
738impl OperandConstraint {
739 pub fn resolve(&self, ctrl_type: Type) -> ResolvedConstraint {
742 use self::OperandConstraint::*;
743 use self::ResolvedConstraint::Bound;
744 match *self {
745 Concrete(t) => Bound(t),
746 Free(vts) => ResolvedConstraint::Free(TYPE_SETS[vts as usize]),
747 Same => Bound(ctrl_type),
748 LaneOf => Bound(ctrl_type.lane_of()),
749 AsTruthy => Bound(ctrl_type.as_truthy()),
750 HalfWidth => Bound(ctrl_type.half_width().expect("invalid type for half_width")),
751 DoubleWidth => Bound(
752 ctrl_type
753 .double_width()
754 .expect("invalid type for double_width"),
755 ),
756 SplitLanes => {
757 if ctrl_type.is_dynamic_vector() {
758 Bound(
759 ctrl_type
760 .dynamic_to_vector()
761 .expect("invalid type for dynamic_to_vector")
762 .split_lanes()
763 .expect("invalid type for split_lanes")
764 .vector_to_dynamic()
765 .expect("invalid dynamic type"),
766 )
767 } else {
768 Bound(
769 ctrl_type
770 .split_lanes()
771 .expect("invalid type for split_lanes"),
772 )
773 }
774 }
775 MergeLanes => {
776 if ctrl_type.is_dynamic_vector() {
777 Bound(
778 ctrl_type
779 .dynamic_to_vector()
780 .expect("invalid type for dynamic_to_vector")
781 .merge_lanes()
782 .expect("invalid type for merge_lanes")
783 .vector_to_dynamic()
784 .expect("invalid dynamic type"),
785 )
786 } else {
787 Bound(
788 ctrl_type
789 .merge_lanes()
790 .expect("invalid type for merge_lanes"),
791 )
792 }
793 }
794 DynamicToVector => Bound(
795 ctrl_type
796 .dynamic_to_vector()
797 .expect("invalid type for dynamic_to_vector"),
798 ),
799 Narrower => {
800 let ctrl_type_bits = ctrl_type.log2_lane_bits();
801 let mut tys = ValueTypeSet::default();
802
803 tys.lanes = ScalarBitSet::from_range(0, 1);
805
806 if ctrl_type.is_int() {
807 tys.ints = BitSet8::from_range(3, ctrl_type_bits as u8);
810 } else if ctrl_type.is_float() {
811 tys.floats = BitSet8::from_range(4, ctrl_type_bits as u8);
814 } else {
815 panic!("The Narrower constraint only operates on floats or ints");
816 }
817 ResolvedConstraint::Free(tys)
818 }
819 Wider => {
820 let ctrl_type_bits = ctrl_type.log2_lane_bits();
821 let mut tys = ValueTypeSet::default();
822
823 tys.lanes = ScalarBitSet::from_range(0, 1);
825
826 if ctrl_type.is_int() {
827 let lower_bound = ctrl_type_bits as u8 + 1;
828 if lower_bound < BitSet8::capacity() {
834 tys.ints = BitSet8::from_range(lower_bound, 8);
838 }
839 } else if ctrl_type.is_float() {
840 let lower_bound = ctrl_type_bits as u8 + 1;
842 if lower_bound < BitSet8::capacity() {
843 tys.floats = BitSet8::from_range(lower_bound, 8);
844 }
845 } else {
846 panic!("The Wider constraint only operates on floats or ints");
847 }
848
849 ResolvedConstraint::Free(tys)
850 }
851 }
852 }
853}
854
855#[derive(Copy, Clone, Debug, PartialEq, Eq)]
857pub enum ResolvedConstraint {
858 Bound(Type),
860 Free(ValueTypeSet),
862}
863
864#[cfg(test)]
865mod tests {
866 use super::*;
867 use alloc::string::ToString;
868
869 #[test]
870 fn inst_data_is_copy() {
871 fn is_copy<T: Copy>() {}
872 is_copy::<InstructionData>();
873 }
874
875 #[test]
876 fn inst_data_size() {
877 assert_eq!(std::mem::size_of::<InstructionData>(), 16);
880 }
881
882 #[test]
883 fn opcodes() {
884 use core::mem;
885
886 let x = Opcode::Iadd;
887 let mut y = Opcode::Isub;
888
889 assert!(x != y);
890 y = Opcode::Iadd;
891 assert_eq!(x, y);
892 assert_eq!(x.format(), InstructionFormat::Binary);
893
894 assert_eq!(format!("{:?}", Opcode::IaddImm), "IaddImm");
895 assert_eq!(Opcode::IaddImm.to_string(), "iadd_imm");
896
897 assert_eq!("iadd".parse::<Opcode>(), Ok(Opcode::Iadd));
899 assert_eq!("iadd_imm".parse::<Opcode>(), Ok(Opcode::IaddImm));
900 assert_eq!("iadd\0".parse::<Opcode>(), Err("Unknown opcode"));
901 assert_eq!("".parse::<Opcode>(), Err("Unknown opcode"));
902 assert_eq!("\0".parse::<Opcode>(), Err("Unknown opcode"));
903
904 assert_eq!(mem::size_of::<Opcode>(), mem::size_of::<Option<Opcode>>());
909 }
910
911 #[test]
912 fn instruction_data() {
913 use core::mem;
914 assert_eq!(mem::size_of::<InstructionData>(), 16);
919 }
920
921 #[test]
922 fn constraints() {
923 let a = Opcode::Iadd.constraints();
924 assert!(a.use_typevar_operand());
925 assert!(!a.requires_typevar_operand());
926 assert_eq!(a.num_fixed_results(), 1);
927 assert_eq!(a.num_fixed_value_arguments(), 2);
928 assert_eq!(a.result_type(0, types::I32), types::I32);
929 assert_eq!(a.result_type(0, types::I8), types::I8);
930 assert_eq!(
931 a.value_argument_constraint(0, types::I32),
932 ResolvedConstraint::Bound(types::I32)
933 );
934 assert_eq!(
935 a.value_argument_constraint(1, types::I32),
936 ResolvedConstraint::Bound(types::I32)
937 );
938
939 let b = Opcode::Bitcast.constraints();
940 assert!(!b.use_typevar_operand());
941 assert!(!b.requires_typevar_operand());
942 assert_eq!(b.num_fixed_results(), 1);
943 assert_eq!(b.num_fixed_value_arguments(), 1);
944 assert_eq!(b.result_type(0, types::I32), types::I32);
945 assert_eq!(b.result_type(0, types::I8), types::I8);
946 match b.value_argument_constraint(0, types::I32) {
947 ResolvedConstraint::Free(vts) => assert!(vts.contains(types::F32)),
948 _ => panic!("Unexpected constraint from value_argument_constraint"),
949 }
950
951 let c = Opcode::Call.constraints();
952 assert_eq!(c.num_fixed_results(), 0);
953 assert_eq!(c.num_fixed_value_arguments(), 0);
954
955 let i = Opcode::CallIndirect.constraints();
956 assert_eq!(i.num_fixed_results(), 0);
957 assert_eq!(i.num_fixed_value_arguments(), 1);
958
959 let cmp = Opcode::Icmp.constraints();
960 assert!(cmp.use_typevar_operand());
961 assert!(cmp.requires_typevar_operand());
962 assert_eq!(cmp.num_fixed_results(), 1);
963 assert_eq!(cmp.num_fixed_value_arguments(), 2);
964 assert_eq!(cmp.result_type(0, types::I64), types::I8);
965 }
966
967 #[test]
968 fn value_set() {
969 use crate::ir::types::*;
970
971 let vts = ValueTypeSet {
972 lanes: BitSet16::from_range(0, 8),
973 ints: BitSet8::from_range(4, 7),
974 floats: BitSet8::from_range(0, 0),
975 dynamic_lanes: BitSet16::from_range(0, 4),
976 };
977 assert!(!vts.contains(I8));
978 assert!(vts.contains(I32));
979 assert!(vts.contains(I64));
980 assert!(vts.contains(I32X4));
981 assert!(vts.contains(I32X4XN));
982 assert!(!vts.contains(F16));
983 assert!(!vts.contains(F32));
984 assert!(!vts.contains(F128));
985 assert_eq!(vts.example().to_string(), "i32");
986
987 let vts = ValueTypeSet {
988 lanes: BitSet16::from_range(0, 8),
989 ints: BitSet8::from_range(0, 0),
990 floats: BitSet8::from_range(5, 7),
991 dynamic_lanes: BitSet16::from_range(0, 8),
992 };
993 assert_eq!(vts.example().to_string(), "f32");
994
995 let vts = ValueTypeSet {
996 lanes: BitSet16::from_range(1, 8),
997 ints: BitSet8::from_range(0, 0),
998 floats: BitSet8::from_range(5, 7),
999 dynamic_lanes: BitSet16::from_range(0, 8),
1000 };
1001 assert_eq!(vts.example().to_string(), "f32x2");
1002
1003 let vts = ValueTypeSet {
1004 lanes: BitSet16::from_range(2, 8),
1005 ints: BitSet8::from_range(3, 7),
1006 floats: BitSet8::from_range(0, 0),
1007 dynamic_lanes: BitSet16::from_range(0, 8),
1008 };
1009 assert_eq!(vts.example().to_string(), "i32x4");
1010
1011 let vts = ValueTypeSet {
1012 lanes: BitSet16::from_range(0, 9),
1014 ints: BitSet8::from_range(3, 7),
1015 floats: BitSet8::from_range(0, 0),
1016 dynamic_lanes: BitSet16::from_range(0, 8),
1017 };
1018 assert!(vts.contains(I32));
1019 assert!(vts.contains(I32X4));
1020 }
1021}