1use crate::{
30 DefinedGlobalIndex, DefinedMemoryIndex, DefinedTableIndex, FuncIndex, FuncRefIndex,
31 GlobalIndex, MemoryIndex, Module, TableIndex,
32};
33use cranelift_entity::packed_option::ReservedValue;
34use wasmtime_types::OwnedMemoryIndex;
35
36#[cfg(target_pointer_width = "32")]
37fn cast_to_u32(sz: usize) -> u32 {
38 u32::try_from(sz).unwrap()
39}
40#[cfg(target_pointer_width = "64")]
41fn cast_to_u32(sz: usize) -> u32 {
42 u32::try_from(sz).expect("overflow in cast from usize to u32")
43}
44
45#[inline]
47fn align(offset: u32, width: u32) -> u32 {
48 (offset + (width - 1)) / width * width
49}
50
51#[derive(Debug, Clone, Copy)]
54pub struct VMOffsets<P> {
55 pub ptr: P,
57 pub num_imported_functions: u32,
59 pub num_imported_tables: u32,
61 pub num_imported_memories: u32,
63 pub num_imported_globals: u32,
65 pub num_defined_tables: u32,
67 pub num_defined_memories: u32,
69 pub num_owned_memories: u32,
71 pub num_defined_globals: u32,
73 pub num_escaped_funcs: u32,
76
77 imported_functions: u32,
79 imported_tables: u32,
80 imported_memories: u32,
81 imported_globals: u32,
82 defined_tables: u32,
83 defined_memories: u32,
84 owned_memories: u32,
85 defined_globals: u32,
86 defined_func_refs: u32,
87 size: u32,
88}
89
90pub trait PtrSize {
92 fn size(&self) -> u8;
94
95 fn vmcontext_runtime_limits(&self) -> u8 {
97 u8::try_from(align(
98 u32::try_from(core::mem::size_of::<u32>()).unwrap(),
99 u32::from(self.size()),
100 ))
101 .unwrap()
102 }
103
104 fn vmcontext_builtin_functions(&self) -> u8 {
106 self.vmcontext_runtime_limits() + self.size()
107 }
108
109 #[inline]
111 fn vm_func_ref_array_call(&self) -> u8 {
112 0 * self.size()
113 }
114
115 #[inline]
117 fn vm_func_ref_wasm_call(&self) -> u8 {
118 1 * self.size()
119 }
120
121 #[inline]
123 fn vm_func_ref_type_index(&self) -> u8 {
124 2 * self.size()
125 }
126
127 #[inline]
129 fn vm_func_ref_vmctx(&self) -> u8 {
130 3 * self.size()
131 }
132
133 #[inline]
135 fn size_of_vm_func_ref(&self) -> u8 {
136 4 * self.size()
137 }
138
139 #[inline]
142 fn size_of_vmglobal_definition(&self) -> u8 {
143 16
144 }
145
146 #[inline]
150 fn vmruntime_limits_stack_limit(&self) -> u8 {
151 0
152 }
153
154 #[inline]
156 fn vmruntime_limits_fuel_consumed(&self) -> u8 {
157 self.size()
158 }
159
160 #[inline]
162 fn vmruntime_limits_epoch_deadline(&self) -> u8 {
163 self.vmruntime_limits_fuel_consumed() + 8 }
165
166 fn vmruntime_limits_last_wasm_exit_fp(&self) -> u8 {
168 self.vmruntime_limits_epoch_deadline() + 8
169 }
170
171 fn vmruntime_limits_last_wasm_exit_pc(&self) -> u8 {
173 self.vmruntime_limits_last_wasm_exit_fp() + self.size()
174 }
175
176 fn vmruntime_limits_last_wasm_entry_sp(&self) -> u8 {
178 self.vmruntime_limits_last_wasm_exit_pc() + self.size()
179 }
180
181 #[inline]
185 fn vmmemory_definition_base(&self) -> u8 {
186 0 * self.size()
187 }
188
189 #[inline]
191 fn vmmemory_definition_current_length(&self) -> u8 {
192 1 * self.size()
193 }
194
195 #[inline]
197 fn size_of_vmmemory_definition(&self) -> u8 {
198 2 * self.size()
199 }
200
201 #[inline]
203 fn size_of_vmmemory_pointer(&self) -> u8 {
204 self.size()
205 }
206
207 fn vmarray_call_host_func_context_func_ref(&self) -> u8 {
211 u8::try_from(align(
212 u32::try_from(core::mem::size_of::<u32>()).unwrap(),
213 u32::from(self.size()),
214 ))
215 .unwrap()
216 }
217
218 #[inline]
220 fn vmctx_magic(&self) -> u8 {
221 0
225 }
226
227 #[inline]
229 fn vmctx_runtime_limits(&self) -> u8 {
230 self.vmctx_magic() + self.size()
231 }
232
233 #[inline]
235 fn vmctx_builtin_functions(&self) -> u8 {
236 self.vmctx_runtime_limits() + self.size()
237 }
238
239 #[inline]
241 fn vmctx_callee(&self) -> u8 {
242 self.vmctx_builtin_functions() + self.size()
243 }
244
245 #[inline]
248 fn vmctx_epoch_ptr(&self) -> u8 {
249 self.vmctx_callee() + self.size()
250 }
251
252 #[inline]
254 fn vmctx_gc_heap_base(&self) -> u8 {
255 self.vmctx_epoch_ptr() + self.size()
256 }
257
258 #[inline]
260 fn vmctx_gc_heap_bound(&self) -> u8 {
261 self.vmctx_gc_heap_base() + self.size()
262 }
263
264 #[inline]
269 fn vmctx_gc_heap_data(&self) -> u8 {
270 self.vmctx_gc_heap_bound() + self.size()
271 }
272
273 #[inline]
275 fn vmctx_store(&self) -> u8 {
276 self.vmctx_gc_heap_data() + self.size()
277 }
278
279 #[inline]
281 fn vmctx_type_ids_array(&self) -> u8 {
282 self.vmctx_store() + 2 * self.size()
283 }
284
285 #[inline]
289 fn vmctx_dynamic_data_start(&self) -> u8 {
290 self.vmctx_type_ids_array() + self.size()
291 }
292}
293
294#[derive(Clone, Copy)]
296pub struct HostPtr;
297
298impl PtrSize for HostPtr {
299 #[inline]
300 fn size(&self) -> u8 {
301 core::mem::size_of::<usize>() as u8
302 }
303}
304
305impl PtrSize for u8 {
306 #[inline]
307 fn size(&self) -> u8 {
308 *self
309 }
310}
311
312#[derive(Debug, Clone, Copy)]
314pub struct VMOffsetsFields<P> {
315 pub ptr: P,
317 pub num_imported_functions: u32,
319 pub num_imported_tables: u32,
321 pub num_imported_memories: u32,
323 pub num_imported_globals: u32,
325 pub num_defined_tables: u32,
327 pub num_defined_memories: u32,
329 pub num_owned_memories: u32,
331 pub num_defined_globals: u32,
333 pub num_escaped_funcs: u32,
336}
337
338impl<P: PtrSize> VMOffsets<P> {
339 pub fn new(ptr: P, module: &Module) -> Self {
341 let num_owned_memories = module
342 .memory_plans
343 .iter()
344 .skip(module.num_imported_memories)
345 .filter(|p| !p.1.memory.shared)
346 .count()
347 .try_into()
348 .unwrap();
349 VMOffsets::from(VMOffsetsFields {
350 ptr,
351 num_imported_functions: cast_to_u32(module.num_imported_funcs),
352 num_imported_tables: cast_to_u32(module.num_imported_tables),
353 num_imported_memories: cast_to_u32(module.num_imported_memories),
354 num_imported_globals: cast_to_u32(module.num_imported_globals),
355 num_defined_tables: cast_to_u32(module.table_plans.len() - module.num_imported_tables),
356 num_defined_memories: cast_to_u32(
357 module.memory_plans.len() - module.num_imported_memories,
358 ),
359 num_owned_memories,
360 num_defined_globals: cast_to_u32(module.globals.len() - module.num_imported_globals),
361 num_escaped_funcs: cast_to_u32(module.num_escaped_funcs),
362 })
363 }
364
365 #[inline]
367 pub fn pointer_size(&self) -> u8 {
368 self.ptr.size()
369 }
370
371 pub fn region_sizes(&self) -> impl Iterator<Item = (&str, u32)> {
376 macro_rules! calculate_sizes {
377 ($($name:ident: $desc:tt,)*) => {{
378 let VMOffsets {
379 ptr: _,
382 num_imported_functions: _,
383 num_imported_tables: _,
384 num_imported_memories: _,
385 num_imported_globals: _,
386 num_defined_tables: _,
387 num_defined_globals: _,
388 num_defined_memories: _,
389 num_owned_memories: _,
390 num_escaped_funcs: _,
391
392 size,
394
395 $($name,)*
398 } = *self;
399
400 let mut last = size;
404 $(
405 assert!($name <= last);
406 let tmp = $name;
407 let $name = last - $name;
408 last = tmp;
409 )*
410 assert_ne!(last, 0);
411 IntoIterator::into_iter([
412 $(($desc, $name),)*
413 ("static vmctx data", last),
414 ])
415 }};
416 }
417
418 calculate_sizes! {
419 defined_func_refs: "module functions",
420 defined_globals: "defined globals",
421 owned_memories: "owned memories",
422 defined_memories: "defined memories",
423 defined_tables: "defined tables",
424 imported_globals: "imported globals",
425 imported_memories: "imported memories",
426 imported_tables: "imported tables",
427 imported_functions: "imported functions",
428 }
429 }
430}
431
432impl<P: PtrSize> From<VMOffsetsFields<P>> for VMOffsets<P> {
433 fn from(fields: VMOffsetsFields<P>) -> VMOffsets<P> {
434 let mut ret = Self {
435 ptr: fields.ptr,
436 num_imported_functions: fields.num_imported_functions,
437 num_imported_tables: fields.num_imported_tables,
438 num_imported_memories: fields.num_imported_memories,
439 num_imported_globals: fields.num_imported_globals,
440 num_defined_tables: fields.num_defined_tables,
441 num_defined_memories: fields.num_defined_memories,
442 num_owned_memories: fields.num_owned_memories,
443 num_defined_globals: fields.num_defined_globals,
444 num_escaped_funcs: fields.num_escaped_funcs,
445 imported_functions: 0,
446 imported_tables: 0,
447 imported_memories: 0,
448 imported_globals: 0,
449 defined_tables: 0,
450 defined_memories: 0,
451 owned_memories: 0,
452 defined_globals: 0,
453 defined_func_refs: 0,
454 size: 0,
455 };
456
457 #[inline]
462 fn cadd(count: u32, size: u32) -> u32 {
463 count.checked_add(size).unwrap()
464 }
465
466 #[inline]
467 fn cmul(count: u32, size: u8) -> u32 {
468 count.checked_mul(u32::from(size)).unwrap()
469 }
470
471 let mut next_field_offset = u32::from(ret.ptr.vmctx_dynamic_data_start());
472
473 macro_rules! fields {
474 (size($field:ident) = $size:expr, $($rest:tt)*) => {
475 ret.$field = next_field_offset;
476 next_field_offset = cadd(next_field_offset, u32::from($size));
477 fields!($($rest)*);
478 };
479 (align($align:expr), $($rest:tt)*) => {
480 next_field_offset = align(next_field_offset, $align);
481 fields!($($rest)*);
482 };
483 () => {};
484 }
485
486 fields! {
487 size(imported_functions)
488 = cmul(ret.num_imported_functions, ret.size_of_vmfunction_import()),
489 size(imported_tables)
490 = cmul(ret.num_imported_tables, ret.size_of_vmtable_import()),
491 size(imported_memories)
492 = cmul(ret.num_imported_memories, ret.size_of_vmmemory_import()),
493 size(imported_globals)
494 = cmul(ret.num_imported_globals, ret.size_of_vmglobal_import()),
495 size(defined_tables)
496 = cmul(ret.num_defined_tables, ret.size_of_vmtable_definition()),
497 size(defined_memories)
498 = cmul(ret.num_defined_memories, ret.ptr.size_of_vmmemory_pointer()),
499 size(owned_memories)
500 = cmul(ret.num_owned_memories, ret.ptr.size_of_vmmemory_definition()),
501 align(16),
502 size(defined_globals)
503 = cmul(ret.num_defined_globals, ret.ptr.size_of_vmglobal_definition()),
504 size(defined_func_refs) = cmul(
505 ret.num_escaped_funcs,
506 ret.ptr.size_of_vm_func_ref(),
507 ),
508 }
509
510 ret.size = next_field_offset;
511
512 return ret;
513 }
514}
515
516impl<P: PtrSize> VMOffsets<P> {
517 #[inline]
519 pub fn vmfunction_import_wasm_call(&self) -> u8 {
520 0 * self.pointer_size()
521 }
522
523 #[inline]
525 pub fn vmfunction_import_array_call(&self) -> u8 {
526 1 * self.pointer_size()
527 }
528
529 #[inline]
531 pub fn vmfunction_import_vmctx(&self) -> u8 {
532 2 * self.pointer_size()
533 }
534
535 #[inline]
537 pub fn size_of_vmfunction_import(&self) -> u8 {
538 3 * self.pointer_size()
539 }
540}
541
542impl<P: PtrSize> VMOffsets<P> {
544 pub fn size_of_vmfunction_body_ptr(&self) -> u8 {
546 1 * self.pointer_size()
547 }
548}
549
550impl<P: PtrSize> VMOffsets<P> {
552 #[inline]
554 pub fn vmtable_import_from(&self) -> u8 {
555 0 * self.pointer_size()
556 }
557
558 #[inline]
560 pub fn vmtable_import_vmctx(&self) -> u8 {
561 1 * self.pointer_size()
562 }
563
564 #[inline]
566 pub fn size_of_vmtable_import(&self) -> u8 {
567 2 * self.pointer_size()
568 }
569}
570
571impl<P: PtrSize> VMOffsets<P> {
573 #[inline]
575 pub fn vmtable_definition_base(&self) -> u8 {
576 0 * self.pointer_size()
577 }
578
579 pub fn vmtable_definition_current_elements(&self) -> u8 {
581 1 * self.pointer_size()
582 }
583
584 #[inline]
586 pub fn size_of_vmtable_definition_current_elements(&self) -> u8 {
587 4
588 }
589
590 #[inline]
592 pub fn size_of_vmtable_definition(&self) -> u8 {
593 2 * self.pointer_size()
594 }
595}
596
597impl<P: PtrSize> VMOffsets<P> {
599 #[inline]
601 pub fn vmmemory_import_from(&self) -> u8 {
602 0 * self.pointer_size()
603 }
604
605 #[inline]
607 pub fn vmmemory_import_vmctx(&self) -> u8 {
608 1 * self.pointer_size()
609 }
610
611 #[inline]
613 pub fn size_of_vmmemory_import(&self) -> u8 {
614 3 * self.pointer_size()
615 }
616}
617
618impl<P: PtrSize> VMOffsets<P> {
620 #[inline]
622 pub fn vmglobal_import_from(&self) -> u8 {
623 0 * self.pointer_size()
624 }
625
626 #[inline]
628 pub fn size_of_vmglobal_import(&self) -> u8 {
629 1 * self.pointer_size()
630 }
631}
632
633impl<P: PtrSize> VMOffsets<P> {
635 #[inline]
637 pub fn size_of_vmshared_type_index(&self) -> u8 {
638 4
639 }
640}
641
642impl<P: PtrSize> VMOffsets<P> {
644 #[inline]
646 pub fn vmctx_imported_functions_begin(&self) -> u32 {
647 self.imported_functions
648 }
649
650 #[inline]
652 pub fn vmctx_imported_tables_begin(&self) -> u32 {
653 self.imported_tables
654 }
655
656 #[inline]
658 pub fn vmctx_imported_memories_begin(&self) -> u32 {
659 self.imported_memories
660 }
661
662 #[inline]
664 pub fn vmctx_imported_globals_begin(&self) -> u32 {
665 self.imported_globals
666 }
667
668 #[inline]
670 pub fn vmctx_tables_begin(&self) -> u32 {
671 self.defined_tables
672 }
673
674 #[inline]
676 pub fn vmctx_memories_begin(&self) -> u32 {
677 self.defined_memories
678 }
679
680 #[inline]
682 pub fn vmctx_owned_memories_begin(&self) -> u32 {
683 self.owned_memories
684 }
685
686 #[inline]
688 pub fn vmctx_globals_begin(&self) -> u32 {
689 self.defined_globals
690 }
691
692 #[inline]
694 pub fn vmctx_func_refs_begin(&self) -> u32 {
695 self.defined_func_refs
696 }
697
698 #[inline]
700 pub fn size_of_vmctx(&self) -> u32 {
701 self.size
702 }
703
704 #[inline]
706 pub fn vmctx_vmfunction_import(&self, index: FuncIndex) -> u32 {
707 assert!(index.as_u32() < self.num_imported_functions);
708 self.vmctx_imported_functions_begin()
709 + index.as_u32() * u32::from(self.size_of_vmfunction_import())
710 }
711
712 #[inline]
714 pub fn vmctx_vmtable_import(&self, index: TableIndex) -> u32 {
715 assert!(index.as_u32() < self.num_imported_tables);
716 self.vmctx_imported_tables_begin()
717 + index.as_u32() * u32::from(self.size_of_vmtable_import())
718 }
719
720 #[inline]
722 pub fn vmctx_vmmemory_import(&self, index: MemoryIndex) -> u32 {
723 assert!(index.as_u32() < self.num_imported_memories);
724 self.vmctx_imported_memories_begin()
725 + index.as_u32() * u32::from(self.size_of_vmmemory_import())
726 }
727
728 #[inline]
730 pub fn vmctx_vmglobal_import(&self, index: GlobalIndex) -> u32 {
731 assert!(index.as_u32() < self.num_imported_globals);
732 self.vmctx_imported_globals_begin()
733 + index.as_u32() * u32::from(self.size_of_vmglobal_import())
734 }
735
736 #[inline]
738 pub fn vmctx_vmtable_definition(&self, index: DefinedTableIndex) -> u32 {
739 assert!(index.as_u32() < self.num_defined_tables);
740 self.vmctx_tables_begin() + index.as_u32() * u32::from(self.size_of_vmtable_definition())
741 }
742
743 #[inline]
745 pub fn vmctx_vmmemory_pointer(&self, index: DefinedMemoryIndex) -> u32 {
746 assert!(index.as_u32() < self.num_defined_memories);
747 self.vmctx_memories_begin()
748 + index.as_u32() * u32::from(self.ptr.size_of_vmmemory_pointer())
749 }
750
751 #[inline]
753 pub fn vmctx_vmmemory_definition(&self, index: OwnedMemoryIndex) -> u32 {
754 assert!(index.as_u32() < self.num_owned_memories);
755 self.vmctx_owned_memories_begin()
756 + index.as_u32() * u32::from(self.ptr.size_of_vmmemory_definition())
757 }
758
759 #[inline]
761 pub fn vmctx_vmglobal_definition(&self, index: DefinedGlobalIndex) -> u32 {
762 assert!(index.as_u32() < self.num_defined_globals);
763 self.vmctx_globals_begin()
764 + index.as_u32() * u32::from(self.ptr.size_of_vmglobal_definition())
765 }
766
767 #[inline]
770 pub fn vmctx_func_ref(&self, index: FuncRefIndex) -> u32 {
771 assert!(!index.is_reserved_value());
772 assert!(index.as_u32() < self.num_escaped_funcs);
773 self.vmctx_func_refs_begin() + index.as_u32() * u32::from(self.ptr.size_of_vm_func_ref())
774 }
775
776 #[inline]
778 pub fn vmctx_vmfunction_import_wasm_call(&self, index: FuncIndex) -> u32 {
779 self.vmctx_vmfunction_import(index) + u32::from(self.vmfunction_import_wasm_call())
780 }
781
782 #[inline]
784 pub fn vmctx_vmfunction_import_array_call(&self, index: FuncIndex) -> u32 {
785 self.vmctx_vmfunction_import(index) + u32::from(self.vmfunction_import_array_call())
786 }
787
788 #[inline]
790 pub fn vmctx_vmfunction_import_vmctx(&self, index: FuncIndex) -> u32 {
791 self.vmctx_vmfunction_import(index) + u32::from(self.vmfunction_import_vmctx())
792 }
793
794 #[inline]
796 pub fn vmctx_vmtable_import_from(&self, index: TableIndex) -> u32 {
797 self.vmctx_vmtable_import(index) + u32::from(self.vmtable_import_from())
798 }
799
800 #[inline]
802 pub fn vmctx_vmtable_definition_base(&self, index: DefinedTableIndex) -> u32 {
803 self.vmctx_vmtable_definition(index) + u32::from(self.vmtable_definition_base())
804 }
805
806 #[inline]
808 pub fn vmctx_vmtable_definition_current_elements(&self, index: DefinedTableIndex) -> u32 {
809 self.vmctx_vmtable_definition(index) + u32::from(self.vmtable_definition_current_elements())
810 }
811
812 #[inline]
814 pub fn vmctx_vmmemory_import_from(&self, index: MemoryIndex) -> u32 {
815 self.vmctx_vmmemory_import(index) + u32::from(self.vmmemory_import_from())
816 }
817
818 #[inline]
820 pub fn vmctx_vmmemory_import_vmctx(&self, index: MemoryIndex) -> u32 {
821 self.vmctx_vmmemory_import(index) + u32::from(self.vmmemory_import_vmctx())
822 }
823
824 #[inline]
826 pub fn vmctx_vmmemory_definition_base(&self, index: OwnedMemoryIndex) -> u32 {
827 self.vmctx_vmmemory_definition(index) + u32::from(self.ptr.vmmemory_definition_base())
828 }
829
830 #[inline]
832 pub fn vmctx_vmmemory_definition_current_length(&self, index: OwnedMemoryIndex) -> u32 {
833 self.vmctx_vmmemory_definition(index)
834 + u32::from(self.ptr.vmmemory_definition_current_length())
835 }
836
837 #[inline]
839 pub fn vmctx_vmglobal_import_from(&self, index: GlobalIndex) -> u32 {
840 self.vmctx_vmglobal_import(index) + u32::from(self.vmglobal_import_from())
841 }
842}
843
844impl<P: PtrSize> VMOffsets<P> {
848 #[inline]
850 pub fn vm_drc_header_ref_count(&self) -> u32 {
851 8
852 }
853}
854
855impl<P: PtrSize> VMOffsets<P> {
859 #[inline]
861 pub fn vm_gc_ref_activation_table_next(&self) -> u32 {
862 0
863 }
864
865 #[inline]
867 pub fn vm_gc_ref_activation_table_end(&self) -> u32 {
868 self.pointer_size().into()
869 }
870}
871
872pub const VMCONTEXT_MAGIC: u32 = u32::from_le_bytes(*b"core");
876
877pub const VM_ARRAY_CALL_HOST_FUNC_MAGIC: u32 = u32::from_le_bytes(*b"ACHF");
882
883#[cfg(test)]
884mod tests {
885 use crate::vmoffsets::align;
886
887 #[test]
888 fn alignment() {
889 fn is_aligned(x: u32) -> bool {
890 x % 16 == 0
891 }
892 assert!(is_aligned(align(0, 16)));
893 assert!(is_aligned(align(32, 16)));
894 assert!(is_aligned(align(33, 16)));
895 assert!(is_aligned(align(31, 16)));
896 }
897}