1use crate::access::WasmRefAccess;
2use crate::mem_access::{MemoryAccessError, WasmRef, WasmSlice};
3use crate::{AsStoreRef, FromToNativeWasmType, MemoryView, NativeWasmTypeInto};
4use std::convert::TryFrom;
5use std::{fmt, marker::PhantomData, mem};
6pub use wasmer_types::Memory32;
7pub use wasmer_types::Memory64;
8pub use wasmer_types::MemorySize;
9use wasmer_types::ValueType;
10
11pub type WasmPtr64<T> = WasmPtr<T, Memory64>;
13
14#[repr(transparent)]
61pub struct WasmPtr<T, M: MemorySize = Memory32> {
62 offset: M::Offset,
63 _phantom: PhantomData<T>,
64}
65
66impl<T, M: MemorySize> WasmPtr<T, M> {
67 #[inline]
69 pub fn new(offset: M::Offset) -> Self {
70 Self {
71 offset,
72 _phantom: PhantomData,
73 }
74 }
75
76 #[inline]
78 pub fn offset(&self) -> M::Offset {
79 self.offset
80 }
81
82 #[inline]
84 pub fn cast<U>(self) -> WasmPtr<U, M> {
85 WasmPtr {
86 offset: self.offset,
87 _phantom: PhantomData,
88 }
89 }
90
91 #[inline]
93 pub fn null() -> Self {
94 Self::new(M::ZERO)
95 }
96
97 #[inline]
99 pub fn is_null(&self) -> bool {
100 self.offset.into() == 0
101 }
102
103 #[inline]
108 pub fn add_offset(self, offset: M::Offset) -> Result<Self, MemoryAccessError> {
109 let base = self.offset.into();
110 let index = offset.into();
111 let offset = index
112 .checked_mul(mem::size_of::<T>() as u64)
113 .ok_or(MemoryAccessError::Overflow)?;
114 let address = base
115 .checked_add(offset)
116 .ok_or(MemoryAccessError::Overflow)?;
117 let address = M::Offset::try_from(address).map_err(|_| MemoryAccessError::Overflow)?;
118 Ok(Self::new(address))
119 }
120
121 #[inline]
126 pub fn sub_offset(self, offset: M::Offset) -> Result<Self, MemoryAccessError> {
127 let base = self.offset.into();
128 let index = offset.into();
129 let offset = index
130 .checked_mul(mem::size_of::<T>() as u64)
131 .ok_or(MemoryAccessError::Overflow)?;
132 let address = base
133 .checked_sub(offset)
134 .ok_or(MemoryAccessError::Overflow)?;
135 let address = M::Offset::try_from(address).map_err(|_| MemoryAccessError::Overflow)?;
136 Ok(Self::new(address))
137 }
138}
139
140impl<T: ValueType, M: MemorySize> WasmPtr<T, M> {
141 #[inline]
144 pub fn deref<'a>(&self, view: &'a MemoryView) -> WasmRef<'a, T> {
145 WasmRef::new(view, self.offset.into())
146 }
147
148 #[inline]
150 pub fn read(&self, view: &MemoryView) -> Result<T, MemoryAccessError> {
151 self.deref(view).read()
152 }
153
154 #[inline]
156 pub fn write(&self, view: &MemoryView, val: T) -> Result<(), MemoryAccessError> {
157 self.deref(view).write(val)
158 }
159
160 #[inline]
166 pub fn slice<'a>(
167 &self,
168 view: &'a MemoryView,
169 len: M::Offset,
170 ) -> Result<WasmSlice<'a, T>, MemoryAccessError> {
171 WasmSlice::new(view, self.offset.into(), len.into())
172 }
173
174 #[inline]
179 pub fn read_until(
180 &self,
181 view: &MemoryView,
182 mut end: impl FnMut(&T) -> bool,
183 ) -> Result<Vec<T>, MemoryAccessError> {
184 let mut vec = Vec::new();
185 for i in 0u64.. {
186 let i = M::Offset::try_from(i).map_err(|_| MemoryAccessError::Overflow)?;
187 let val = self.add_offset(i)?.deref(view).read()?;
188 if end(&val) {
189 break;
190 }
191 vec.push(val);
192 }
193 Ok(vec)
194 }
195
196 #[inline]
198 pub fn access<'a>(
199 &self,
200 view: &'a MemoryView,
201 ) -> Result<WasmRefAccess<'a, T>, MemoryAccessError> {
202 self.deref(view).access()
203 }
204}
205
206impl<M: MemorySize> WasmPtr<u8, M> {
207 #[inline]
212 pub fn read_utf8_string(
213 &self,
214 view: &MemoryView,
215 len: M::Offset,
216 ) -> Result<String, MemoryAccessError> {
217 let vec = self.slice(view, len)?.read_to_vec()?;
218 Ok(String::from_utf8(vec)?)
219 }
220
221 #[inline]
226 pub fn read_utf8_string_with_nul(
227 &self,
228 view: &MemoryView,
229 ) -> Result<String, MemoryAccessError> {
230 let vec = self.read_until(view, |&byte| byte == 0)?;
231 Ok(String::from_utf8(vec)?)
232 }
233}
234
235unsafe impl<T: ValueType, M: MemorySize> FromToNativeWasmType for WasmPtr<T, M>
236where
237 <M as wasmer_types::MemorySize>::Native: NativeWasmTypeInto,
238{
239 type Native = M::Native;
240
241 fn to_native(self) -> Self::Native {
242 M::offset_to_native(self.offset)
243 }
244 fn from_native(n: Self::Native) -> Self {
245 Self {
246 offset: M::native_to_offset(n),
247 _phantom: PhantomData,
248 }
249 }
250 #[inline]
251 fn is_from_store(&self, _store: &impl AsStoreRef) -> bool {
252 true }
254}
255
256unsafe impl<T: ValueType, M: MemorySize> ValueType for WasmPtr<T, M> {
257 fn zero_padding_bytes(&self, _bytes: &mut [mem::MaybeUninit<u8>]) {}
258}
259
260impl<T: ValueType, M: MemorySize> Clone for WasmPtr<T, M> {
261 fn clone(&self) -> Self {
262 *self
263 }
264}
265
266impl<T: ValueType, M: MemorySize> Copy for WasmPtr<T, M> {}
267
268impl<T: ValueType, M: MemorySize> PartialEq for WasmPtr<T, M> {
269 fn eq(&self, other: &Self) -> bool {
270 self.offset.into() == other.offset.into()
271 }
272}
273
274impl<T: ValueType, M: MemorySize> Eq for WasmPtr<T, M> {}
275
276impl<T: ValueType, M: MemorySize> fmt::Debug for WasmPtr<T, M> {
277 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
278 write!(f, "{}(@{})", std::any::type_name::<T>(), self.offset.into())
279 }
280}