1use crate::global::VMGlobal;
8use crate::instance::Instance;
9use crate::memory::VMMemory;
10use crate::store::InternalStoreHandle;
11use crate::trap::{Trap, TrapCode};
12use crate::VMFunctionBody;
13use crate::VMTable;
14use crate::{VMBuiltinFunctionIndex, VMFunction};
15use std::convert::TryFrom;
16use std::ptr::{self, NonNull};
17use std::sync::atomic::{AtomicPtr, Ordering};
18use std::u32;
19use wasmer_types::RawValue;
20
21#[derive(Copy, Clone, Eq)]
26#[repr(C)]
27pub union VMFunctionContext {
28 pub vmctx: *mut VMContext,
30 pub host_env: *mut std::ffi::c_void,
32}
33
34impl VMFunctionContext {
35 pub fn is_null(&self) -> bool {
37 unsafe { self.host_env.is_null() }
38 }
39}
40
41impl std::fmt::Debug for VMFunctionContext {
42 fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
43 f.debug_struct("VMFunctionContext")
44 .field("vmctx_or_hostenv", unsafe { &self.host_env })
45 .finish()
46 }
47}
48
49impl std::cmp::PartialEq for VMFunctionContext {
50 fn eq(&self, rhs: &Self) -> bool {
51 unsafe { self.host_env as usize == rhs.host_env as usize }
52 }
53}
54
55impl std::hash::Hash for VMFunctionContext {
56 fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
57 unsafe {
58 self.vmctx.hash(state);
59 }
60 }
61}
62
63#[derive(Debug, Copy, Clone)]
65#[repr(C)]
66pub struct VMFunctionImport {
67 pub body: *const VMFunctionBody,
69
70 pub environment: VMFunctionContext,
72
73 pub handle: InternalStoreHandle<VMFunction>,
75}
76
77#[cfg(test)]
78mod test_vmfunction_import {
79 use super::VMFunctionImport;
80 use memoffset::offset_of;
81 use std::mem::size_of;
82 use wasmer_types::ModuleInfo;
83 use wasmer_types::VMOffsets;
84
85 #[test]
86 fn check_vmfunction_import_offsets() {
87 let module = ModuleInfo::new();
88 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
89 assert_eq!(
90 size_of::<VMFunctionImport>(),
91 usize::from(offsets.size_of_vmfunction_import())
92 );
93 assert_eq!(
94 offset_of!(VMFunctionImport, body),
95 usize::from(offsets.vmfunction_import_body())
96 );
97 assert_eq!(
98 offset_of!(VMFunctionImport, environment),
99 usize::from(offsets.vmfunction_import_vmctx())
100 );
101 }
102}
103
104#[repr(C)]
113pub struct VMDynamicFunctionContext<T> {
114 pub address: *const VMFunctionBody,
119
120 pub ctx: T,
122}
123
124unsafe impl<T: Sized + Send + Sync> Send for VMDynamicFunctionContext<T> {}
127unsafe impl<T: Sized + Send + Sync> Sync for VMDynamicFunctionContext<T> {}
130
131impl<T: Sized + Clone + Send + Sync> Clone for VMDynamicFunctionContext<T> {
132 fn clone(&self) -> Self {
133 Self {
134 address: self.address,
135 ctx: self.ctx.clone(),
136 }
137 }
138}
139
140#[cfg(test)]
141mod test_vmdynamicfunction_import_context {
142 use super::VMDynamicFunctionContext;
143 use crate::VMOffsets;
144 use memoffset::offset_of;
145 use std::mem::size_of;
146 use wasmer_types::ModuleInfo;
147
148 #[test]
149 fn check_vmdynamicfunction_import_context_offsets() {
150 let module = ModuleInfo::new();
151 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
152 assert_eq!(
153 size_of::<VMDynamicFunctionContext<usize>>(),
154 usize::from(offsets.size_of_vmdynamicfunction_import_context())
155 );
156 assert_eq!(
157 offset_of!(VMDynamicFunctionContext<usize>, address),
158 usize::from(offsets.vmdynamicfunction_import_context_address())
159 );
160 assert_eq!(
161 offset_of!(VMDynamicFunctionContext<usize>, ctx),
162 usize::from(offsets.vmdynamicfunction_import_context_ctx())
163 );
164 }
165}
166
167#[derive(Debug, Copy, Clone, Eq, PartialEq)]
169#[repr(C)]
170pub enum VMFunctionKind {
171 Static,
178
179 Dynamic,
185}
186
187#[derive(Clone)]
190#[repr(C)]
191pub struct VMTableImport {
192 pub definition: NonNull<VMTableDefinition>,
194
195 pub handle: InternalStoreHandle<VMTable>,
197}
198
199#[cfg(test)]
200mod test_vmtable_import {
201 use super::VMTableImport;
202 use crate::VMOffsets;
203 use memoffset::offset_of;
204 use std::mem::size_of;
205 use wasmer_types::ModuleInfo;
206
207 #[test]
208 fn check_vmtable_import_offsets() {
209 let module = ModuleInfo::new();
210 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
211 assert_eq!(
212 size_of::<VMTableImport>(),
213 usize::from(offsets.size_of_vmtable_import())
214 );
215 assert_eq!(
216 offset_of!(VMTableImport, definition),
217 usize::from(offsets.vmtable_import_definition())
218 );
219 }
220}
221
222#[derive(Clone)]
225#[repr(C)]
226pub struct VMMemoryImport {
227 pub definition: NonNull<VMMemoryDefinition>,
229
230 pub handle: InternalStoreHandle<VMMemory>,
232}
233
234#[cfg(test)]
235mod test_vmmemory_import {
236 use super::VMMemoryImport;
237 use crate::VMOffsets;
238 use memoffset::offset_of;
239 use std::mem::size_of;
240 use wasmer_types::ModuleInfo;
241
242 #[test]
243 fn check_vmmemory_import_offsets() {
244 let module = ModuleInfo::new();
245 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
246 assert_eq!(
247 size_of::<VMMemoryImport>(),
248 usize::from(offsets.size_of_vmmemory_import())
249 );
250 assert_eq!(
251 offset_of!(VMMemoryImport, definition),
252 usize::from(offsets.vmmemory_import_definition())
253 );
254 assert_eq!(
255 offset_of!(VMMemoryImport, handle),
256 usize::from(offsets.vmmemory_import_handle())
257 );
258 }
259}
260
261#[derive(Clone)]
264#[repr(C)]
265pub struct VMGlobalImport {
266 pub definition: NonNull<VMGlobalDefinition>,
268
269 pub handle: InternalStoreHandle<VMGlobal>,
271}
272
273unsafe impl Send for VMGlobalImport {}
278unsafe impl Sync for VMGlobalImport {}
284
285#[cfg(test)]
286mod test_vmglobal_import {
287 use super::VMGlobalImport;
288 use crate::VMOffsets;
289 use memoffset::offset_of;
290 use std::mem::size_of;
291 use wasmer_types::ModuleInfo;
292
293 #[test]
294 fn check_vmglobal_import_offsets() {
295 let module = ModuleInfo::new();
296 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
297 assert_eq!(
298 size_of::<VMGlobalImport>(),
299 usize::from(offsets.size_of_vmglobal_import())
300 );
301 assert_eq!(
302 offset_of!(VMGlobalImport, definition),
303 usize::from(offsets.vmglobal_import_definition())
304 );
305 }
306}
307
308pub(crate) unsafe fn memory_copy(
319 mem: &VMMemoryDefinition,
320 dst: u32,
321 src: u32,
322 len: u32,
323) -> Result<(), Trap> {
324 if src
326 .checked_add(len)
327 .map_or(true, |n| usize::try_from(n).unwrap() > mem.current_length)
328 || dst
329 .checked_add(len)
330 .map_or(true, |m| usize::try_from(m).unwrap() > mem.current_length)
331 {
332 return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
333 }
334
335 let dst = usize::try_from(dst).unwrap();
336 let src = usize::try_from(src).unwrap();
337
338 let dst = mem.base.add(dst);
341 let src = mem.base.add(src);
342 ptr::copy(src, dst, len as usize);
343
344 Ok(())
345}
346
347pub(crate) unsafe fn memory_fill(
358 mem: &VMMemoryDefinition,
359 dst: u32,
360 val: u32,
361 len: u32,
362) -> Result<(), Trap> {
363 if dst
364 .checked_add(len)
365 .map_or(true, |m| usize::try_from(m).unwrap() > mem.current_length)
366 {
367 return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
368 }
369
370 let dst = isize::try_from(dst).unwrap();
371 let val = val as u8;
372
373 let dst = mem.base.offset(dst);
376 ptr::write_bytes(dst, val, len as usize);
377
378 Ok(())
379}
380
381pub(crate) unsafe fn memory32_atomic_check32(
390 mem: &VMMemoryDefinition,
391 dst: u32,
392 val: u32,
393) -> Result<u32, Trap> {
394 if usize::try_from(dst).unwrap() > mem.current_length {
395 return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
396 }
397
398 let dst = isize::try_from(dst).unwrap();
399 if dst & 0b11 != 0 {
400 return Err(Trap::lib(TrapCode::UnalignedAtomic));
401 }
402
403 let dst = mem.base.offset(dst) as *mut u32;
406 let atomic_dst = AtomicPtr::new(dst);
407 let read_val = *atomic_dst.load(Ordering::Acquire);
408 let ret = if read_val == val { 0 } else { 1 };
409 Ok(ret)
410}
411
412pub(crate) unsafe fn memory32_atomic_check64(
421 mem: &VMMemoryDefinition,
422 dst: u32,
423 val: u64,
424) -> Result<u32, Trap> {
425 if usize::try_from(dst).unwrap() > mem.current_length {
426 return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
427 }
428
429 let dst = isize::try_from(dst).unwrap();
430 if dst & 0b111 != 0 {
431 return Err(Trap::lib(TrapCode::UnalignedAtomic));
432 }
433
434 let dst = mem.base.offset(dst) as *mut u64;
437 let atomic_dst = AtomicPtr::new(dst);
438 let read_val = *atomic_dst.load(Ordering::Acquire);
439 let ret = if read_val == val { 0 } else { 1 };
440 Ok(ret)
441}
442
443#[derive(Debug, Clone, Copy)]
446#[repr(C)]
447pub struct VMTableDefinition {
448 pub base: *mut u8,
450
451 pub current_elements: u32,
453}
454
455#[cfg(test)]
456mod test_vmtable_definition {
457 use super::VMTableDefinition;
458 use crate::VMOffsets;
459 use memoffset::offset_of;
460 use std::mem::size_of;
461 use wasmer_types::ModuleInfo;
462
463 #[test]
464 fn check_vmtable_definition_offsets() {
465 let module = ModuleInfo::new();
466 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
467 assert_eq!(
468 size_of::<VMTableDefinition>(),
469 usize::from(offsets.size_of_vmtable_definition())
470 );
471 assert_eq!(
472 offset_of!(VMTableDefinition, base),
473 usize::from(offsets.vmtable_definition_base())
474 );
475 assert_eq!(
476 offset_of!(VMTableDefinition, current_elements),
477 usize::from(offsets.vmtable_definition_current_elements())
478 );
479 }
480}
481
482#[derive(Debug, Clone)]
487#[repr(C, align(16))]
488pub struct VMGlobalDefinition {
489 pub val: RawValue,
491}
492
493#[cfg(test)]
494mod test_vmglobal_definition {
495 use super::VMGlobalDefinition;
496 use crate::{VMFuncRef, VMOffsets};
497 use more_asserts::assert_ge;
498 use std::mem::{align_of, size_of};
499 use wasmer_types::ModuleInfo;
500
501 #[test]
502 fn check_vmglobal_definition_alignment() {
503 assert_ge!(align_of::<VMGlobalDefinition>(), align_of::<i32>());
504 assert_ge!(align_of::<VMGlobalDefinition>(), align_of::<i64>());
505 assert_ge!(align_of::<VMGlobalDefinition>(), align_of::<f32>());
506 assert_ge!(align_of::<VMGlobalDefinition>(), align_of::<f64>());
507 assert_ge!(align_of::<VMGlobalDefinition>(), align_of::<VMFuncRef>());
508 assert_ge!(align_of::<VMGlobalDefinition>(), align_of::<[u8; 16]>());
509 }
510
511 #[test]
512 fn check_vmglobal_definition_offsets() {
513 let module = ModuleInfo::new();
514 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
515 assert_eq!(
516 size_of::<*const VMGlobalDefinition>(),
517 usize::from(offsets.size_of_vmglobal_local())
518 );
519 }
520
521 #[test]
522 fn check_vmglobal_begins_aligned() {
523 let module = ModuleInfo::new();
524 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
525 assert_eq!(offsets.vmctx_globals_begin() % 16, 0);
526 }
527}
528
529impl VMGlobalDefinition {
530 pub fn new() -> Self {
532 Self {
533 val: Default::default(),
534 }
535 }
536}
537
538#[repr(C)]
541#[cfg_attr(feature = "artifact-size", derive(loupe::MemoryUsage))]
542#[derive(Debug, Eq, PartialEq, Clone, Copy, Hash)]
543pub struct VMSharedSignatureIndex(u32);
544
545#[cfg(test)]
546mod test_vmshared_signature_index {
547 use super::VMSharedSignatureIndex;
548 use std::mem::size_of;
549 use wasmer_types::{ModuleInfo, TargetSharedSignatureIndex, VMOffsets};
550
551 #[test]
552 fn check_vmshared_signature_index() {
553 let module = ModuleInfo::new();
554 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
555 assert_eq!(
556 size_of::<VMSharedSignatureIndex>(),
557 usize::from(offsets.size_of_vmshared_signature_index())
558 );
559 }
560
561 #[test]
562 fn check_target_shared_signature_index() {
563 assert_eq!(
564 size_of::<VMSharedSignatureIndex>(),
565 size_of::<TargetSharedSignatureIndex>()
566 );
567 }
568}
569
570impl VMSharedSignatureIndex {
571 pub fn new(value: u32) -> Self {
573 Self(value)
574 }
575}
576
577impl Default for VMSharedSignatureIndex {
578 fn default() -> Self {
579 Self::new(u32::MAX)
580 }
581}
582
583#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)]
587#[repr(C)]
588pub struct VMCallerCheckedAnyfunc {
589 pub func_ptr: *const VMFunctionBody,
591 pub type_index: VMSharedSignatureIndex,
593 pub vmctx: VMFunctionContext,
595 pub call_trampoline: VMTrampoline,
598 }
600
601#[cfg(test)]
602mod test_vmcaller_checked_anyfunc {
603 use super::VMCallerCheckedAnyfunc;
604 use crate::VMOffsets;
605 use memoffset::offset_of;
606 use std::mem::size_of;
607 use wasmer_types::ModuleInfo;
608
609 #[test]
610 fn check_vmcaller_checked_anyfunc_offsets() {
611 let module = ModuleInfo::new();
612 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
613 assert_eq!(
614 size_of::<VMCallerCheckedAnyfunc>(),
615 usize::from(offsets.size_of_vmcaller_checked_anyfunc())
616 );
617 assert_eq!(
618 offset_of!(VMCallerCheckedAnyfunc, func_ptr),
619 usize::from(offsets.vmcaller_checked_anyfunc_func_ptr())
620 );
621 assert_eq!(
622 offset_of!(VMCallerCheckedAnyfunc, type_index),
623 usize::from(offsets.vmcaller_checked_anyfunc_type_index())
624 );
625 assert_eq!(
626 offset_of!(VMCallerCheckedAnyfunc, vmctx),
627 usize::from(offsets.vmcaller_checked_anyfunc_vmctx())
628 );
629 }
630}
631
632#[repr(C)]
635pub struct VMBuiltinFunctionsArray {
636 ptrs: [usize; Self::len()],
637}
638
639impl VMBuiltinFunctionsArray {
640 pub const fn len() -> usize {
641 VMBuiltinFunctionIndex::builtin_functions_total_number() as usize
642 }
643
644 pub fn initialized() -> Self {
645 use crate::libcalls::*;
646
647 let mut ptrs = [0; Self::len()];
648
649 ptrs[VMBuiltinFunctionIndex::get_memory32_grow_index().index() as usize] =
650 wasmer_vm_memory32_grow as usize;
651 ptrs[VMBuiltinFunctionIndex::get_imported_memory32_grow_index().index() as usize] =
652 wasmer_vm_imported_memory32_grow as usize;
653
654 ptrs[VMBuiltinFunctionIndex::get_memory32_size_index().index() as usize] =
655 wasmer_vm_memory32_size as usize;
656 ptrs[VMBuiltinFunctionIndex::get_imported_memory32_size_index().index() as usize] =
657 wasmer_vm_imported_memory32_size as usize;
658
659 ptrs[VMBuiltinFunctionIndex::get_table_copy_index().index() as usize] =
660 wasmer_vm_table_copy as usize;
661
662 ptrs[VMBuiltinFunctionIndex::get_table_init_index().index() as usize] =
663 wasmer_vm_table_init as usize;
664 ptrs[VMBuiltinFunctionIndex::get_elem_drop_index().index() as usize] =
665 wasmer_vm_elem_drop as usize;
666
667 ptrs[VMBuiltinFunctionIndex::get_memory_copy_index().index() as usize] =
668 wasmer_vm_memory32_copy as usize;
669 ptrs[VMBuiltinFunctionIndex::get_imported_memory_copy_index().index() as usize] =
670 wasmer_vm_imported_memory32_copy as usize;
671 ptrs[VMBuiltinFunctionIndex::get_memory_fill_index().index() as usize] =
672 wasmer_vm_memory32_fill as usize;
673 ptrs[VMBuiltinFunctionIndex::get_imported_memory_fill_index().index() as usize] =
674 wasmer_vm_imported_memory32_fill as usize;
675 ptrs[VMBuiltinFunctionIndex::get_memory_init_index().index() as usize] =
676 wasmer_vm_memory32_init as usize;
677 ptrs[VMBuiltinFunctionIndex::get_data_drop_index().index() as usize] =
678 wasmer_vm_data_drop as usize;
679 ptrs[VMBuiltinFunctionIndex::get_raise_trap_index().index() as usize] =
680 wasmer_vm_raise_trap as usize;
681 ptrs[VMBuiltinFunctionIndex::get_table_size_index().index() as usize] =
682 wasmer_vm_table_size as usize;
683 ptrs[VMBuiltinFunctionIndex::get_imported_table_size_index().index() as usize] =
684 wasmer_vm_imported_table_size as usize;
685 ptrs[VMBuiltinFunctionIndex::get_table_grow_index().index() as usize] =
686 wasmer_vm_table_grow as usize;
687 ptrs[VMBuiltinFunctionIndex::get_imported_table_grow_index().index() as usize] =
688 wasmer_vm_imported_table_grow as usize;
689 ptrs[VMBuiltinFunctionIndex::get_table_get_index().index() as usize] =
690 wasmer_vm_table_get as usize;
691 ptrs[VMBuiltinFunctionIndex::get_imported_table_get_index().index() as usize] =
692 wasmer_vm_imported_table_get as usize;
693 ptrs[VMBuiltinFunctionIndex::get_table_set_index().index() as usize] =
694 wasmer_vm_table_set as usize;
695 ptrs[VMBuiltinFunctionIndex::get_imported_table_set_index().index() as usize] =
696 wasmer_vm_imported_table_set as usize;
697 ptrs[VMBuiltinFunctionIndex::get_func_ref_index().index() as usize] =
698 wasmer_vm_func_ref as usize;
699 ptrs[VMBuiltinFunctionIndex::get_table_fill_index().index() as usize] =
700 wasmer_vm_table_fill as usize;
701
702 ptrs[VMBuiltinFunctionIndex::get_memory_atomic_wait32_index().index() as usize] =
703 wasmer_vm_memory32_atomic_wait32 as usize;
704 ptrs[VMBuiltinFunctionIndex::get_imported_memory_atomic_wait32_index().index() as usize] =
705 wasmer_vm_imported_memory32_atomic_wait32 as usize;
706 ptrs[VMBuiltinFunctionIndex::get_memory_atomic_wait64_index().index() as usize] =
707 wasmer_vm_memory32_atomic_wait64 as usize;
708 ptrs[VMBuiltinFunctionIndex::get_imported_memory_atomic_wait64_index().index() as usize] =
709 wasmer_vm_imported_memory32_atomic_wait64 as usize;
710 ptrs[VMBuiltinFunctionIndex::get_memory_atomic_notify_index().index() as usize] =
711 wasmer_vm_memory32_atomic_notify as usize;
712 ptrs[VMBuiltinFunctionIndex::get_imported_memory_atomic_notify_index().index() as usize] =
713 wasmer_vm_imported_memory32_atomic_notify as usize;
714
715 debug_assert!(ptrs.iter().cloned().all(|p| p != 0));
716
717 Self { ptrs }
718 }
719}
720
721#[derive(Debug)]
731#[repr(C, align(16))] pub struct VMContext {}
733
734impl VMContext {
735 #[allow(clippy::cast_ptr_alignment)]
741 #[inline]
742 pub(crate) unsafe fn instance(&self) -> &Instance {
743 &*((self as *const Self as *mut u8).offset(-Instance::vmctx_offset()) as *const Instance)
744 }
745
746 #[inline]
747 pub(crate) unsafe fn instance_mut(&mut self) -> &mut Instance {
748 &mut *((self as *const Self as *mut u8).offset(-Instance::vmctx_offset()) as *mut Instance)
749 }
750}
751
752pub type VMTrampoline = unsafe extern "C" fn(
754 *mut VMContext, *const VMFunctionBody, *mut RawValue, );
758
759#[derive(Debug, Copy, Clone)]
763#[repr(C)]
764pub struct VMMemoryDefinition {
765 pub base: *mut u8,
767
768 pub current_length: usize,
770}
771
772unsafe impl Send for VMMemoryDefinition {}
776unsafe impl Sync for VMMemoryDefinition {}
782
783#[cfg(test)]
784mod test_vmmemory_definition {
785 use super::VMMemoryDefinition;
786 use crate::VMOffsets;
787 use memoffset::offset_of;
788 use std::mem::size_of;
789 use wasmer_types::ModuleInfo;
790
791 #[test]
792 fn check_vmmemory_definition_offsets() {
793 let module = ModuleInfo::new();
794 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
795 assert_eq!(
796 size_of::<VMMemoryDefinition>(),
797 usize::from(offsets.size_of_vmmemory_definition())
798 );
799 assert_eq!(
800 offset_of!(VMMemoryDefinition, base),
801 usize::from(offsets.vmmemory_definition_base())
802 );
803 assert_eq!(
804 offset_of!(VMMemoryDefinition, current_length),
805 usize::from(offsets.vmmemory_definition_current_length())
806 );
807 }
808}