1mod canonical;
5pub(crate) use canonical::InternRecGroup;
6
7use self::arc::MaybeOwned;
8use super::{
9 check_max, combine_type_sizes,
10 operators::{ty_to_str, OperatorValidator, OperatorValidatorAllocations},
11 types::{CoreTypeId, EntityType, RecGroupId, TypeAlloc, TypeList},
12};
13use crate::{
14 limits::*, BinaryReaderError, ConstExpr, Data, DataKind, Element, ElementKind, ExternalKind,
15 FuncType, Global, GlobalType, HeapType, MemoryType, RecGroup, RefType, Result, SubType, Table,
16 TableInit, TableType, TagType, TypeRef, UnpackedIndex, ValType, VisitOperator, WasmFeatures,
17 WasmModuleResources,
18};
19use crate::{prelude::*, CompositeInnerType};
20use alloc::sync::Arc;
21use core::mem;
22
23#[derive(Copy, Clone, Default, PartialOrd, Ord, PartialEq, Eq, Debug)]
28pub enum Order {
29 #[default]
30 Initial,
31 Type,
32 Import,
33 Function,
34 Table,
35 Memory,
36 Tag,
37 Global,
38 Export,
39 Start,
40 Element,
41 DataCount,
42 Code,
43 Data,
44}
45
46#[derive(Default)]
47pub(crate) struct ModuleState {
48 pub module: arc::MaybeOwned<Module>,
54
55 order: Order,
57
58 pub data_segment_count: u32,
60
61 pub expected_code_bodies: Option<u32>,
67
68 const_expr_allocs: OperatorValidatorAllocations,
69
70 code_section_index: Option<usize>,
72}
73
74impl ModuleState {
75 pub fn update_order(&mut self, order: Order, offset: usize) -> Result<()> {
76 if self.order >= order {
77 return Err(BinaryReaderError::new("section out of order", offset));
78 }
79
80 self.order = order;
81
82 Ok(())
83 }
84
85 pub fn validate_end(&self, offset: usize) -> Result<()> {
86 if let Some(data_count) = self.module.data_count {
88 if data_count != self.data_segment_count {
89 return Err(BinaryReaderError::new(
90 "data count and data section have inconsistent lengths",
91 offset,
92 ));
93 }
94 }
95 if let Some(n) = self.expected_code_bodies {
98 if n > 0 {
99 return Err(BinaryReaderError::new(
100 "function and code section have inconsistent lengths",
101 offset,
102 ));
103 }
104 }
105
106 Ok(())
107 }
108
109 pub fn next_code_index_and_type(&mut self, offset: usize) -> Result<(u32, u32)> {
110 let index = self
111 .code_section_index
112 .get_or_insert(self.module.num_imported_functions as usize);
113
114 if *index >= self.module.functions.len() {
115 return Err(BinaryReaderError::new(
116 "code section entry exceeds number of functions",
117 offset,
118 ));
119 }
120
121 let ty = self.module.functions[*index];
122 *index += 1;
123
124 Ok(((*index - 1) as u32, ty))
125 }
126
127 pub fn add_global(
128 &mut self,
129 mut global: Global,
130 features: &WasmFeatures,
131 types: &TypeList,
132 offset: usize,
133 ) -> Result<()> {
134 self.module
135 .check_global_type(&mut global.ty, features, types, offset)?;
136 self.check_const_expr(&global.init_expr, global.ty.content_type, features, types)?;
137 self.module.assert_mut().globals.push(global.ty);
138 Ok(())
139 }
140
141 pub fn add_table(
142 &mut self,
143 mut table: Table<'_>,
144 features: &WasmFeatures,
145 types: &TypeList,
146 offset: usize,
147 ) -> Result<()> {
148 self.module
149 .check_table_type(&mut table.ty, features, types, offset)?;
150
151 match &table.init {
152 TableInit::RefNull => {
153 if !table.ty.element_type.is_nullable() {
154 bail!(offset, "type mismatch: non-defaultable element type");
155 }
156 }
157 TableInit::Expr(expr) => {
158 if !features.function_references() {
159 bail!(
160 offset,
161 "tables with expression initializers require \
162 the function-references proposal"
163 );
164 }
165 self.check_const_expr(expr, table.ty.element_type.into(), features, types)?;
166 }
167 }
168 self.module.assert_mut().tables.push(table.ty);
169 Ok(())
170 }
171
172 pub fn add_data_segment(
173 &mut self,
174 data: Data,
175 features: &WasmFeatures,
176 types: &TypeList,
177 offset: usize,
178 ) -> Result<()> {
179 match data.kind {
180 DataKind::Passive => Ok(()),
181 DataKind::Active {
182 memory_index,
183 offset_expr,
184 } => {
185 let ty = self.module.memory_at(memory_index, offset)?.index_type();
186 self.check_const_expr(&offset_expr, ty, features, types)
187 }
188 }
189 }
190
191 pub fn add_element_segment(
192 &mut self,
193 mut e: Element,
194 features: &WasmFeatures,
195 types: &TypeList,
196 offset: usize,
197 ) -> Result<()> {
198 let element_ty = match &mut e.items {
201 crate::ElementItems::Functions(_) => RefType::FUNC,
202 crate::ElementItems::Expressions(ty, _) => {
203 self.module.check_ref_type(ty, features, offset)?;
204 *ty
205 }
206 };
207
208 match e.kind {
209 ElementKind::Active {
210 table_index,
211 offset_expr,
212 } => {
213 let table = self.module.table_at(table_index.unwrap_or(0), offset)?;
214 if !types.reftype_is_subtype(element_ty, table.element_type) {
215 return Err(BinaryReaderError::new(
216 format!(
217 "type mismatch: invalid element type `{}` for table type `{}`",
218 ty_to_str(element_ty.into()),
219 ty_to_str(table.element_type.into()),
220 ),
221 offset,
222 ));
223 }
224
225 self.check_const_expr(&offset_expr, table.index_type(), features, types)?;
226 }
227 ElementKind::Passive | ElementKind::Declared => {
228 if !features.bulk_memory() {
229 return Err(BinaryReaderError::new(
230 "bulk memory must be enabled",
231 offset,
232 ));
233 }
234 }
235 }
236
237 let validate_count = |count: u32| -> Result<(), BinaryReaderError> {
238 if count > MAX_WASM_TABLE_ENTRIES as u32 {
239 Err(BinaryReaderError::new(
240 "number of elements is out of bounds",
241 offset,
242 ))
243 } else {
244 Ok(())
245 }
246 };
247 match e.items {
248 crate::ElementItems::Functions(reader) => {
249 let count = reader.count();
250 validate_count(count)?;
251 for f in reader.into_iter_with_offsets() {
252 let (offset, f) = f?;
253 self.module.get_func_type(f, types, offset)?;
254 self.module.assert_mut().function_references.insert(f);
255 }
256 }
257 crate::ElementItems::Expressions(ty, reader) => {
258 validate_count(reader.count())?;
259 for expr in reader {
260 self.check_const_expr(&expr?, ValType::Ref(ty), features, types)?;
261 }
262 }
263 }
264 self.module.assert_mut().element_types.push(element_ty);
265 Ok(())
266 }
267
268 fn check_const_expr(
269 &mut self,
270 expr: &ConstExpr<'_>,
271 expected_ty: ValType,
272 features: &WasmFeatures,
273 types: &TypeList,
274 ) -> Result<()> {
275 let mut validator = VisitConstOperator {
276 offset: 0,
277 order: self.order,
278 uninserted_funcref: false,
279 ops: OperatorValidator::new_const_expr(
280 features,
281 expected_ty,
282 mem::take(&mut self.const_expr_allocs),
283 ),
284 resources: OperatorValidatorResources {
285 types,
286 module: &mut self.module,
287 },
288 features,
289 };
290
291 let mut ops = expr.get_operators_reader();
292 while !ops.eof() {
293 validator.offset = ops.original_position();
294 ops.visit_operator(&mut validator)??;
295 }
296 validator.ops.finish(ops.original_position())?;
297
298 assert!(!validator.uninserted_funcref);
300
301 self.const_expr_allocs = validator.ops.into_allocations();
302
303 return Ok(());
304
305 struct VisitConstOperator<'a> {
306 offset: usize,
307 uninserted_funcref: bool,
308 ops: OperatorValidator,
309 resources: OperatorValidatorResources<'a>,
310 order: Order,
311 features: &'a WasmFeatures,
312 }
313
314 impl VisitConstOperator<'_> {
315 fn validator(&mut self) -> impl VisitOperator<'_, Output = Result<()>> {
316 self.ops.with_resources(&self.resources, self.offset)
317 }
318
319 fn validate_extended_const(&mut self, op: &str) -> Result<()> {
320 if self.ops.features.extended_const() {
321 Ok(())
322 } else {
323 Err(BinaryReaderError::new(
324 format!(
325 "constant expression required: non-constant operator: {}",
326 op
327 ),
328 self.offset,
329 ))
330 }
331 }
332
333 fn validate_gc(&mut self, op: &str) -> Result<()> {
334 if self.features.gc() {
335 Ok(())
336 } else {
337 Err(BinaryReaderError::new(
338 format!(
339 "constant expression required: non-constant operator: {}",
340 op
341 ),
342 self.offset,
343 ))
344 }
345 }
346
347 fn validate_shared_everything_threads(&mut self, op: &str) -> Result<()> {
348 if self.features.shared_everything_threads() {
349 Ok(())
350 } else {
351 Err(BinaryReaderError::new(
352 format!(
353 "constant expression required: non-constant operator: {}",
354 op
355 ),
356 self.offset,
357 ))
358 }
359 }
360
361 fn validate_global(&mut self, index: u32) -> Result<()> {
362 let module = &self.resources.module;
363 let global = module.global_at(index, self.offset)?;
364
365 if index >= module.num_imported_globals && !self.features.gc() {
366 return Err(BinaryReaderError::new(
367 "constant expression required: global.get of locally defined global",
368 self.offset,
369 ));
370 }
371 if global.mutable {
372 return Err(BinaryReaderError::new(
373 "constant expression required: global.get of mutable global",
374 self.offset,
375 ));
376 }
377 Ok(())
378 }
379
380 fn insert_ref_func(&mut self, index: u32) {
398 if self.order == Order::Data {
399 self.uninserted_funcref = true;
400 } else {
401 self.resources
402 .module
403 .assert_mut()
404 .function_references
405 .insert(index);
406 }
407 }
408 }
409
410 macro_rules! define_visit_operator {
411 ($(@$proposal:ident $op:ident $({ $($arg:ident: $argty:ty),* })? => $visit:ident)*) => {
412 $(
413 #[allow(unused_variables)]
414 fn $visit(&mut self $($(,$arg: $argty)*)?) -> Self::Output {
415 define_visit_operator!(@visit self $visit $($($arg)*)?)
416 }
417 )*
418 };
419
420 (@visit $self:ident visit_i32_const $val:ident) => {{
422 $self.validator().visit_i32_const($val)
423 }};
424 (@visit $self:ident visit_i64_const $val:ident) => {{
425 $self.validator().visit_i64_const($val)
426 }};
427 (@visit $self:ident visit_f32_const $val:ident) => {{
428 $self.validator().visit_f32_const($val)
429 }};
430 (@visit $self:ident visit_f64_const $val:ident) => {{
431 $self.validator().visit_f64_const($val)
432 }};
433 (@visit $self:ident visit_v128_const $val:ident) => {{
434 $self.validator().visit_v128_const($val)
435 }};
436 (@visit $self:ident visit_ref_null $val:ident) => {{
437 $self.validator().visit_ref_null($val)
438 }};
439 (@visit $self:ident visit_end) => {{
440 $self.validator().visit_end()
441 }};
442
443
444 (@visit $self:ident visit_i32_add) => {{
446 $self.validate_extended_const("i32.add")?;
447 $self.validator().visit_i32_add()
448 }};
449 (@visit $self:ident visit_i32_sub) => {{
450 $self.validate_extended_const("i32.sub")?;
451 $self.validator().visit_i32_sub()
452 }};
453 (@visit $self:ident visit_i32_mul) => {{
454 $self.validate_extended_const("i32.mul")?;
455 $self.validator().visit_i32_mul()
456 }};
457 (@visit $self:ident visit_i64_add) => {{
458 $self.validate_extended_const("i64.add")?;
459 $self.validator().visit_i64_add()
460 }};
461 (@visit $self:ident visit_i64_sub) => {{
462 $self.validate_extended_const("i64.sub")?;
463 $self.validator().visit_i64_sub()
464 }};
465 (@visit $self:ident visit_i64_mul) => {{
466 $self.validate_extended_const("i64.mul")?;
467 $self.validator().visit_i64_mul()
468 }};
469
470 (@visit $self:ident visit_struct_new $type_index:ident) => {{
473 $self.validate_gc("struct.new")?;
474 $self.validator().visit_struct_new($type_index)
475 }};
476 (@visit $self:ident visit_struct_new_default $type_index:ident) => {{
477 $self.validate_gc("struct.new_default")?;
478 $self.validator().visit_struct_new_default($type_index)
479 }};
480 (@visit $self:ident visit_array_new $type_index:ident) => {{
481 $self.validate_gc("array.new")?;
482 $self.validator().visit_array_new($type_index)
483 }};
484 (@visit $self:ident visit_array_new_default $type_index:ident) => {{
485 $self.validate_gc("array.new_default")?;
486 $self.validator().visit_array_new_default($type_index)
487 }};
488 (@visit $self:ident visit_array_new_fixed $type_index:ident $n:ident) => {{
489 $self.validate_gc("array.new_fixed")?;
490 $self.validator().visit_array_new_fixed($type_index, $n)
491 }};
492 (@visit $self:ident visit_ref_i31) => {{
493 $self.validate_gc("ref.i31")?;
494 $self.validator().visit_ref_i31()
495 }};
496 (@visit $self:ident visit_ref_i31_shared) => {{
497 $self.validate_shared_everything_threads("ref.i31_shared")?;
498 $self.validator().visit_ref_i31_shared()
499 }};
500
501 (@visit $self:ident visit_global_get $idx:ident) => {{
504 $self.validate_global($idx)?;
505 $self.validator().visit_global_get($idx)
506 }};
507 (@visit $self:ident visit_ref_func $idx:ident) => {{
510 $self.insert_ref_func($idx);
511 $self.validator().visit_ref_func($idx)
512 }};
513
514 (@visit $self:ident $op:ident $($args:tt)*) => {{
515 Err(BinaryReaderError::new(
516 format!("constant expression required: non-constant operator: {}", stringify!($op)),
517 $self.offset,
518 ))
519 }}
520 }
521
522 impl<'a> VisitOperator<'a> for VisitConstOperator<'a> {
523 type Output = Result<()>;
524
525 for_each_operator!(define_visit_operator);
526 }
527 }
528}
529
530#[derive(Debug)]
531pub(crate) struct Module {
532 pub snapshot: Option<Arc<TypeList>>,
536 pub types: Vec<CoreTypeId>,
538 pub tables: Vec<TableType>,
539 pub memories: Vec<MemoryType>,
540 pub globals: Vec<GlobalType>,
541 pub element_types: Vec<RefType>,
542 pub data_count: Option<u32>,
543 pub functions: Vec<u32>,
545 pub tags: Vec<CoreTypeId>,
546 pub function_references: Set<u32>,
547 pub imports: IndexMap<(String, String), Vec<EntityType>>,
548 pub exports: IndexMap<String, EntityType>,
549 pub type_size: u32,
550 num_imported_globals: u32,
551 num_imported_functions: u32,
552}
553
554impl Module {
555 pub fn add_types(
556 &mut self,
557 rec_group: RecGroup,
558 features: &WasmFeatures,
559 types: &mut TypeAlloc,
560 offset: usize,
561 check_limit: bool,
562 ) -> Result<()> {
563 if check_limit {
564 check_max(
565 self.types.len(),
566 rec_group.types().len() as u32,
567 MAX_WASM_TYPES,
568 "types",
569 offset,
570 )?;
571 }
572 self.canonicalize_and_intern_rec_group(features, types, rec_group, offset)
573 }
574
575 pub fn add_import(
576 &mut self,
577 mut import: crate::Import,
578 features: &WasmFeatures,
579 types: &TypeList,
580 offset: usize,
581 ) -> Result<()> {
582 let entity = self.check_type_ref(&mut import.ty, features, types, offset)?;
583
584 let (len, max, desc) = match import.ty {
585 TypeRef::Func(type_index) => {
586 self.functions.push(type_index);
587 self.num_imported_functions += 1;
588 (self.functions.len(), MAX_WASM_FUNCTIONS, "functions")
589 }
590 TypeRef::Table(ty) => {
591 self.tables.push(ty);
592 (self.tables.len(), self.max_tables(features), "tables")
593 }
594 TypeRef::Memory(ty) => {
595 self.memories.push(ty);
596 (self.memories.len(), self.max_memories(features), "memories")
597 }
598 TypeRef::Tag(ty) => {
599 self.tags.push(self.types[ty.func_type_idx as usize]);
600 (self.tags.len(), MAX_WASM_TAGS, "tags")
601 }
602 TypeRef::Global(ty) => {
603 if !features.mutable_global() && ty.mutable {
604 return Err(BinaryReaderError::new(
605 "mutable global support is not enabled",
606 offset,
607 ));
608 }
609 self.globals.push(ty);
610 self.num_imported_globals += 1;
611 (self.globals.len(), MAX_WASM_GLOBALS, "globals")
612 }
613 };
614
615 check_max(len, 0, max, desc, offset)?;
616
617 self.type_size = combine_type_sizes(self.type_size, entity.info(types).size(), offset)?;
618
619 self.imports
620 .entry((import.module.to_string(), import.name.to_string()))
621 .or_default()
622 .push(entity);
623
624 Ok(())
625 }
626
627 pub fn add_export(
628 &mut self,
629 name: &str,
630 ty: EntityType,
631 features: &WasmFeatures,
632 offset: usize,
633 check_limit: bool,
634 types: &TypeList,
635 ) -> Result<()> {
636 if !features.mutable_global() {
637 if let EntityType::Global(global_type) = ty {
638 if global_type.mutable {
639 return Err(BinaryReaderError::new(
640 "mutable global support is not enabled",
641 offset,
642 ));
643 }
644 }
645 }
646
647 if check_limit {
648 check_max(self.exports.len(), 1, MAX_WASM_EXPORTS, "exports", offset)?;
649 }
650
651 self.type_size = combine_type_sizes(self.type_size, ty.info(types).size(), offset)?;
652
653 match self.exports.insert(name.to_string(), ty) {
654 Some(_) => Err(format_err!(
655 offset,
656 "duplicate export name `{name}` already defined"
657 )),
658 None => Ok(()),
659 }
660 }
661
662 pub fn add_function(&mut self, type_index: u32, types: &TypeList, offset: usize) -> Result<()> {
663 self.func_type_at(type_index, types, offset)?;
664 self.functions.push(type_index);
665 Ok(())
666 }
667
668 pub fn add_memory(
669 &mut self,
670 ty: MemoryType,
671 features: &WasmFeatures,
672 offset: usize,
673 ) -> Result<()> {
674 self.check_memory_type(&ty, features, offset)?;
675 self.memories.push(ty);
676 Ok(())
677 }
678
679 pub fn add_tag(
680 &mut self,
681 ty: TagType,
682 features: &WasmFeatures,
683 types: &TypeList,
684 offset: usize,
685 ) -> Result<()> {
686 self.check_tag_type(&ty, features, types, offset)?;
687 self.tags.push(self.types[ty.func_type_idx as usize]);
688 Ok(())
689 }
690
691 fn sub_type_at<'a>(&self, types: &'a TypeList, idx: u32, offset: usize) -> Result<&'a SubType> {
692 let id = self.type_id_at(idx, offset)?;
693 Ok(&types[id])
694 }
695
696 fn func_type_at<'a>(
697 &self,
698 type_index: u32,
699 types: &'a TypeList,
700 offset: usize,
701 ) -> Result<&'a FuncType> {
702 match &self
703 .sub_type_at(types, type_index, offset)?
704 .composite_type
705 .inner
706 {
707 CompositeInnerType::Func(f) => Ok(f),
708 _ => bail!(offset, "type index {type_index} is not a function type"),
709 }
710 }
711
712 pub fn check_type_ref(
713 &self,
714 type_ref: &mut TypeRef,
715 features: &WasmFeatures,
716 types: &TypeList,
717 offset: usize,
718 ) -> Result<EntityType> {
719 Ok(match type_ref {
720 TypeRef::Func(type_index) => {
721 self.func_type_at(*type_index, types, offset)?;
722 EntityType::Func(self.types[*type_index as usize])
723 }
724 TypeRef::Table(t) => {
725 self.check_table_type(t, features, types, offset)?;
726 EntityType::Table(*t)
727 }
728 TypeRef::Memory(t) => {
729 self.check_memory_type(t, features, offset)?;
730 EntityType::Memory(*t)
731 }
732 TypeRef::Tag(t) => {
733 self.check_tag_type(t, features, types, offset)?;
734 EntityType::Tag(self.types[t.func_type_idx as usize])
735 }
736 TypeRef::Global(t) => {
737 self.check_global_type(t, features, types, offset)?;
738 EntityType::Global(*t)
739 }
740 })
741 }
742
743 fn check_table_type(
744 &self,
745 ty: &mut TableType,
746 features: &WasmFeatures,
747 types: &TypeList,
748 offset: usize,
749 ) -> Result<()> {
750 if ty.element_type != RefType::FUNCREF {
753 self.check_ref_type(&mut ty.element_type, features, offset)?
754 }
755
756 if ty.table64 && !features.memory64() {
757 return Err(BinaryReaderError::new(
758 "memory64 must be enabled for 64-bit tables",
759 offset,
760 ));
761 }
762
763 self.check_limits(ty.initial, ty.maximum, offset)?;
764 if ty.initial > MAX_WASM_TABLE_ENTRIES as u64 {
765 return Err(BinaryReaderError::new(
766 "minimum table size is out of bounds",
767 offset,
768 ));
769 }
770
771 if ty.shared {
772 if !features.shared_everything_threads() {
773 return Err(BinaryReaderError::new(
774 "shared tables require the shared-everything-threads proposal",
775 offset,
776 ));
777 }
778
779 if !types.reftype_is_shared(ty.element_type) {
780 return Err(BinaryReaderError::new(
781 "shared tables must have a shared element type",
782 offset,
783 ));
784 }
785 }
786
787 Ok(())
788 }
789
790 fn check_memory_type(
791 &self,
792 ty: &MemoryType,
793 features: &WasmFeatures,
794 offset: usize,
795 ) -> Result<()> {
796 self.check_limits(ty.initial, ty.maximum, offset)?;
797 let (page_size, page_size_log2) = if let Some(page_size_log2) = ty.page_size_log2 {
798 if !features.custom_page_sizes() {
799 return Err(BinaryReaderError::new(
800 "the custom page sizes proposal must be enabled to \
801 customize a memory's page size",
802 offset,
803 ));
804 }
805 if page_size_log2 != 0 && page_size_log2 != 16 {
808 return Err(BinaryReaderError::new("invalid custom page size", offset));
809 }
810 let page_size = 1_u64 << page_size_log2;
811 debug_assert!(page_size.is_power_of_two());
812 debug_assert!(page_size == DEFAULT_WASM_PAGE_SIZE || page_size == 1);
813 (page_size, page_size_log2)
814 } else {
815 let page_size_log2 = 16;
816 debug_assert_eq!(DEFAULT_WASM_PAGE_SIZE, 1 << page_size_log2);
817 (DEFAULT_WASM_PAGE_SIZE, page_size_log2)
818 };
819 let (true_maximum, err) = if ty.memory64 {
820 if !features.memory64() {
821 return Err(BinaryReaderError::new(
822 "memory64 must be enabled for 64-bit memories",
823 offset,
824 ));
825 }
826 (
827 max_wasm_memory64_pages(page_size),
828 format!(
829 "memory size must be at most 2**{} pages",
830 64 - page_size_log2
831 ),
832 )
833 } else {
834 let max = max_wasm_memory32_pages(page_size);
835 (
836 max,
837 format!("memory size must be at most {max} pages (4GiB)"),
838 )
839 };
840 if ty.initial > true_maximum {
841 return Err(BinaryReaderError::new(err, offset));
842 }
843 if let Some(maximum) = ty.maximum {
844 if maximum > true_maximum {
845 return Err(BinaryReaderError::new(err, offset));
846 }
847 }
848 if ty.shared {
849 if !features.threads() {
850 return Err(BinaryReaderError::new(
851 "threads must be enabled for shared memories",
852 offset,
853 ));
854 }
855 if ty.maximum.is_none() {
856 return Err(BinaryReaderError::new(
857 "shared memory must have maximum size",
858 offset,
859 ));
860 }
861 }
862 Ok(())
863 }
864
865 pub(crate) fn imports_for_module_type(
866 &self,
867 offset: usize,
868 ) -> Result<IndexMap<(String, String), EntityType>> {
869 self.imports
871 .iter()
872 .map(|((module, name), types)| {
873 if types.len() != 1 {
874 bail!(
875 offset,
876 "module has a duplicate import name `{module}:{name}` \
877 that is not allowed in components",
878 );
879 }
880 Ok(((module.clone(), name.clone()), types[0]))
881 })
882 .collect::<Result<_>>()
883 }
884
885 fn check_value_type(
886 &self,
887 ty: &mut ValType,
888 features: &WasmFeatures,
889 offset: usize,
890 ) -> Result<()> {
891 match ty {
894 ValType::Ref(rt) => self.check_ref_type(rt, features, offset),
895 _ => features
896 .check_value_type(*ty)
897 .map_err(|e| BinaryReaderError::new(e, offset)),
898 }
899 }
900
901 fn check_ref_type(
902 &self,
903 ty: &mut RefType,
904 features: &WasmFeatures,
905 offset: usize,
906 ) -> Result<()> {
907 features
908 .check_ref_type(*ty)
909 .map_err(|e| BinaryReaderError::new(e, offset))?;
910 let mut hty = ty.heap_type();
911 self.check_heap_type(&mut hty, offset)?;
912 *ty = RefType::new(ty.is_nullable(), hty).unwrap();
913 Ok(())
914 }
915
916 fn check_heap_type(&self, ty: &mut HeapType, offset: usize) -> Result<()> {
917 let type_index = match ty {
919 HeapType::Abstract { .. } => return Ok(()),
920 HeapType::Concrete(type_index) => type_index,
921 };
922 match type_index {
923 UnpackedIndex::Module(idx) => {
924 let id = self.type_id_at(*idx, offset)?;
925 *type_index = UnpackedIndex::Id(id);
926 Ok(())
927 }
928 UnpackedIndex::RecGroup(_) | UnpackedIndex::Id(_) => unreachable!(),
932 }
933 }
934
935 fn check_tag_type(
936 &self,
937 ty: &TagType,
938 features: &WasmFeatures,
939 types: &TypeList,
940 offset: usize,
941 ) -> Result<()> {
942 if !features.exceptions() {
943 return Err(BinaryReaderError::new(
944 "exceptions proposal not enabled",
945 offset,
946 ));
947 }
948 let ty = self.func_type_at(ty.func_type_idx, types, offset)?;
949 if !ty.results().is_empty() {
950 return Err(BinaryReaderError::new(
951 "invalid exception type: non-empty tag result type",
952 offset,
953 ));
954 }
955 Ok(())
956 }
957
958 fn check_global_type(
959 &self,
960 ty: &mut GlobalType,
961 features: &WasmFeatures,
962 types: &TypeList,
963 offset: usize,
964 ) -> Result<()> {
965 self.check_value_type(&mut ty.content_type, features, offset)?;
966 if ty.shared {
967 if !features.shared_everything_threads() {
968 return Err(BinaryReaderError::new(
969 "shared globals require the shared-everything-threads proposal",
970 offset,
971 ));
972 }
973 if !types.valtype_is_shared(ty.content_type) {
974 return Err(BinaryReaderError::new(
975 "shared globals must have a shared value type",
976 offset,
977 ));
978 }
979 }
980 Ok(())
981 }
982
983 fn check_limits<T>(&self, initial: T, maximum: Option<T>, offset: usize) -> Result<()>
984 where
985 T: Into<u64>,
986 {
987 if let Some(max) = maximum {
988 if initial.into() > max.into() {
989 return Err(BinaryReaderError::new(
990 "size minimum must not be greater than maximum",
991 offset,
992 ));
993 }
994 }
995 Ok(())
996 }
997
998 pub fn max_tables(&self, features: &WasmFeatures) -> usize {
999 if features.reference_types() {
1000 MAX_WASM_TABLES
1001 } else {
1002 1
1003 }
1004 }
1005
1006 pub fn max_memories(&self, features: &WasmFeatures) -> usize {
1007 if features.multi_memory() {
1008 MAX_WASM_MEMORIES
1009 } else {
1010 1
1011 }
1012 }
1013
1014 pub fn export_to_entity_type(
1015 &mut self,
1016 export: &crate::Export,
1017 offset: usize,
1018 ) -> Result<EntityType> {
1019 let check = |ty: &str, index: u32, total: usize| {
1020 if index as usize >= total {
1021 Err(format_err!(
1022 offset,
1023 "unknown {ty} {index}: exported {ty} index out of bounds",
1024 ))
1025 } else {
1026 Ok(())
1027 }
1028 };
1029
1030 Ok(match export.kind {
1031 ExternalKind::Func => {
1032 check("function", export.index, self.functions.len())?;
1033 self.function_references.insert(export.index);
1034 EntityType::Func(self.types[self.functions[export.index as usize] as usize])
1035 }
1036 ExternalKind::Table => {
1037 check("table", export.index, self.tables.len())?;
1038 EntityType::Table(self.tables[export.index as usize])
1039 }
1040 ExternalKind::Memory => {
1041 check("memory", export.index, self.memories.len())?;
1042 EntityType::Memory(self.memories[export.index as usize])
1043 }
1044 ExternalKind::Global => {
1045 check("global", export.index, self.globals.len())?;
1046 EntityType::Global(self.globals[export.index as usize])
1047 }
1048 ExternalKind::Tag => {
1049 check("tag", export.index, self.tags.len())?;
1050 EntityType::Tag(self.tags[export.index as usize])
1051 }
1052 })
1053 }
1054
1055 pub fn get_func_type<'a>(
1056 &self,
1057 func_idx: u32,
1058 types: &'a TypeList,
1059 offset: usize,
1060 ) -> Result<&'a FuncType> {
1061 match self.functions.get(func_idx as usize) {
1062 Some(idx) => self.func_type_at(*idx, types, offset),
1063 None => Err(format_err!(
1064 offset,
1065 "unknown function {func_idx}: func index out of bounds",
1066 )),
1067 }
1068 }
1069
1070 fn global_at(&self, idx: u32, offset: usize) -> Result<&GlobalType> {
1071 match self.globals.get(idx as usize) {
1072 Some(t) => Ok(t),
1073 None => Err(format_err!(
1074 offset,
1075 "unknown global {idx}: global index out of bounds"
1076 )),
1077 }
1078 }
1079
1080 fn table_at(&self, idx: u32, offset: usize) -> Result<&TableType> {
1081 match self.tables.get(idx as usize) {
1082 Some(t) => Ok(t),
1083 None => Err(format_err!(
1084 offset,
1085 "unknown table {idx}: table index out of bounds"
1086 )),
1087 }
1088 }
1089
1090 fn memory_at(&self, idx: u32, offset: usize) -> Result<&MemoryType> {
1091 match self.memories.get(idx as usize) {
1092 Some(t) => Ok(t),
1093 None => Err(format_err!(
1094 offset,
1095 "unknown memory {idx}: memory index out of bounds"
1096 )),
1097 }
1098 }
1099}
1100
1101impl InternRecGroup for Module {
1102 fn add_type_id(&mut self, id: CoreTypeId) {
1103 self.types.push(id);
1104 }
1105
1106 fn type_id_at(&self, idx: u32, offset: usize) -> Result<CoreTypeId> {
1107 self.types
1108 .get(idx as usize)
1109 .copied()
1110 .ok_or_else(|| format_err!(offset, "unknown type {idx}: type index out of bounds"))
1111 }
1112
1113 fn types_len(&self) -> u32 {
1114 u32::try_from(self.types.len()).unwrap()
1115 }
1116}
1117
1118impl Default for Module {
1119 fn default() -> Self {
1120 Self {
1121 snapshot: Default::default(),
1122 types: Default::default(),
1123 tables: Default::default(),
1124 memories: Default::default(),
1125 globals: Default::default(),
1126 element_types: Default::default(),
1127 data_count: Default::default(),
1128 functions: Default::default(),
1129 tags: Default::default(),
1130 function_references: Default::default(),
1131 imports: Default::default(),
1132 exports: Default::default(),
1133 type_size: 1,
1134 num_imported_globals: Default::default(),
1135 num_imported_functions: Default::default(),
1136 }
1137 }
1138}
1139
1140struct OperatorValidatorResources<'a> {
1141 module: &'a mut MaybeOwned<Module>,
1142 types: &'a TypeList,
1143}
1144
1145impl WasmModuleResources for OperatorValidatorResources<'_> {
1146 fn table_at(&self, at: u32) -> Option<TableType> {
1147 self.module.tables.get(at as usize).cloned()
1148 }
1149
1150 fn memory_at(&self, at: u32) -> Option<MemoryType> {
1151 self.module.memories.get(at as usize).cloned()
1152 }
1153
1154 fn tag_at(&self, at: u32) -> Option<&FuncType> {
1155 let type_id = *self.module.tags.get(at as usize)?;
1156 Some(self.types[type_id].unwrap_func())
1157 }
1158
1159 fn global_at(&self, at: u32) -> Option<GlobalType> {
1160 self.module.globals.get(at as usize).cloned()
1161 }
1162
1163 fn sub_type_at(&self, at: u32) -> Option<&SubType> {
1164 let id = *self.module.types.get(at as usize)?;
1165 Some(&self.types[id])
1166 }
1167
1168 fn type_id_of_function(&self, at: u32) -> Option<CoreTypeId> {
1169 let type_index = self.module.functions.get(at as usize)?;
1170 self.module.types.get(*type_index as usize).copied()
1171 }
1172
1173 fn type_index_of_function(&self, at: u32) -> Option<u32> {
1174 self.module.functions.get(at as usize).copied()
1175 }
1176
1177 fn check_heap_type(&self, t: &mut HeapType, offset: usize) -> Result<()> {
1178 self.module.check_heap_type(t, offset)
1179 }
1180
1181 fn top_type(&self, heap_type: &HeapType) -> HeapType {
1182 self.types.top_type(heap_type)
1183 }
1184
1185 fn element_type_at(&self, at: u32) -> Option<RefType> {
1186 self.module.element_types.get(at as usize).cloned()
1187 }
1188
1189 fn is_subtype(&self, a: ValType, b: ValType) -> bool {
1190 self.types.valtype_is_subtype(a, b)
1191 }
1192
1193 fn is_shared(&self, ty: RefType) -> bool {
1194 self.types.reftype_is_shared(ty)
1195 }
1196
1197 fn element_count(&self) -> u32 {
1198 self.module.element_types.len() as u32
1199 }
1200
1201 fn data_count(&self) -> Option<u32> {
1202 self.module.data_count
1203 }
1204
1205 fn is_function_referenced(&self, idx: u32) -> bool {
1206 self.module.function_references.contains(&idx)
1207 }
1208}
1209
1210#[derive(Debug)]
1213pub struct ValidatorResources(pub(crate) Arc<Module>);
1214
1215impl WasmModuleResources for ValidatorResources {
1216 fn table_at(&self, at: u32) -> Option<TableType> {
1217 self.0.tables.get(at as usize).cloned()
1218 }
1219
1220 fn memory_at(&self, at: u32) -> Option<MemoryType> {
1221 self.0.memories.get(at as usize).cloned()
1222 }
1223
1224 fn tag_at(&self, at: u32) -> Option<&FuncType> {
1225 let id = *self.0.tags.get(at as usize)?;
1226 let types = self.0.snapshot.as_ref().unwrap();
1227 match &types[id].composite_type.inner {
1228 CompositeInnerType::Func(f) => Some(f),
1229 _ => None,
1230 }
1231 }
1232
1233 fn global_at(&self, at: u32) -> Option<GlobalType> {
1234 self.0.globals.get(at as usize).cloned()
1235 }
1236
1237 fn sub_type_at(&self, at: u32) -> Option<&SubType> {
1238 let id = *self.0.types.get(at as usize)?;
1239 let types = self.0.snapshot.as_ref().unwrap();
1240 Some(&types[id])
1241 }
1242
1243 fn type_id_of_function(&self, at: u32) -> Option<CoreTypeId> {
1244 let type_index = *self.0.functions.get(at as usize)?;
1245 self.0.types.get(type_index as usize).copied()
1246 }
1247
1248 fn type_index_of_function(&self, at: u32) -> Option<u32> {
1249 self.0.functions.get(at as usize).copied()
1250 }
1251
1252 fn check_heap_type(&self, t: &mut HeapType, offset: usize) -> Result<()> {
1253 self.0.check_heap_type(t, offset)
1254 }
1255
1256 fn top_type(&self, heap_type: &HeapType) -> HeapType {
1257 self.0.snapshot.as_ref().unwrap().top_type(heap_type)
1258 }
1259
1260 fn element_type_at(&self, at: u32) -> Option<RefType> {
1261 self.0.element_types.get(at as usize).cloned()
1262 }
1263
1264 fn is_subtype(&self, a: ValType, b: ValType) -> bool {
1265 self.0.snapshot.as_ref().unwrap().valtype_is_subtype(a, b)
1266 }
1267
1268 fn is_shared(&self, ty: RefType) -> bool {
1269 self.0.snapshot.as_ref().unwrap().reftype_is_shared(ty)
1270 }
1271
1272 fn element_count(&self) -> u32 {
1273 self.0.element_types.len() as u32
1274 }
1275
1276 fn data_count(&self) -> Option<u32> {
1277 self.0.data_count
1278 }
1279
1280 fn is_function_referenced(&self, idx: u32) -> bool {
1281 self.0.function_references.contains(&idx)
1282 }
1283}
1284
1285const _: () = {
1286 fn assert_send<T: Send>() {}
1287
1288 fn assert() {
1291 assert_send::<ValidatorResources>();
1292 }
1293};
1294
1295mod arc {
1296 use alloc::sync::Arc;
1297 use core::ops::Deref;
1298
1299 enum Inner<T> {
1300 Owned(T),
1301 Shared(Arc<T>),
1302
1303 Empty, }
1305
1306 pub struct MaybeOwned<T> {
1307 inner: Inner<T>,
1308 }
1309
1310 impl<T> MaybeOwned<T> {
1311 #[inline]
1312 fn as_mut(&mut self) -> Option<&mut T> {
1313 match &mut self.inner {
1314 Inner::Owned(x) => Some(x),
1315 Inner::Shared(_) => None,
1316 Inner::Empty => Self::unreachable(),
1317 }
1318 }
1319
1320 #[inline]
1321 pub fn assert_mut(&mut self) -> &mut T {
1322 self.as_mut().unwrap()
1323 }
1324
1325 pub fn arc(&mut self) -> &Arc<T> {
1326 self.make_shared();
1327 match &self.inner {
1328 Inner::Shared(x) => x,
1329 _ => Self::unreachable(),
1330 }
1331 }
1332
1333 #[inline]
1334 fn make_shared(&mut self) {
1335 if let Inner::Shared(_) = self.inner {
1336 return;
1337 }
1338
1339 let inner = core::mem::replace(&mut self.inner, Inner::Empty);
1340 let x = match inner {
1341 Inner::Owned(x) => x,
1342 _ => Self::unreachable(),
1343 };
1344 let x = Arc::new(x);
1345 self.inner = Inner::Shared(x);
1346 }
1347
1348 #[cold]
1349 #[inline(never)]
1350 fn unreachable() -> ! {
1351 unreachable!()
1352 }
1353 }
1354
1355 impl<T: Default> Default for MaybeOwned<T> {
1356 fn default() -> MaybeOwned<T> {
1357 MaybeOwned {
1358 inner: Inner::Owned(T::default()),
1359 }
1360 }
1361 }
1362
1363 impl<T> Deref for MaybeOwned<T> {
1364 type Target = T;
1365
1366 fn deref(&self) -> &T {
1367 match &self.inner {
1368 Inner::Owned(x) => x,
1369 Inner::Shared(x) => x,
1370 Inner::Empty => Self::unreachable(),
1371 }
1372 }
1373 }
1374}