wasmtime/runtime/
code_memory.rs1use crate::prelude::*;
4use crate::runtime::vm::{libcalls, MmapVec, UnwindRegistration};
5use core::mem::ManuallyDrop;
6use core::ops::Range;
7use object::endian::NativeEndian;
8use object::read::{elf::ElfFile64, Object, ObjectSection};
9use object::ObjectSymbol;
10use wasmtime_environ::{lookup_trap_code, obj, Trap};
11use wasmtime_jit_icache_coherence as icache_coherence;
12
13pub struct CodeMemory {
18 mmap: ManuallyDrop<MmapVec>,
21 unwind_registration: ManuallyDrop<Option<UnwindRegistration>>,
22 published: bool,
23 enable_branch_protection: bool,
24
25 relocations: Vec<(usize, obj::LibCall)>,
26
27 text: Range<usize>,
29 unwind: Range<usize>,
30 trap_data: Range<usize>,
31 wasm_data: Range<usize>,
32 address_map_data: Range<usize>,
33 func_name_data: Range<usize>,
34 info_data: Range<usize>,
35 dwarf: Range<usize>,
36}
37
38impl Drop for CodeMemory {
39 fn drop(&mut self) {
40 unsafe {
42 ManuallyDrop::drop(&mut self.unwind_registration);
43 ManuallyDrop::drop(&mut self.mmap);
44 }
45 }
46}
47
48fn _assert() {
49 fn _assert_send_sync<T: Send + Sync>() {}
50 _assert_send_sync::<CodeMemory>();
51}
52
53impl CodeMemory {
54 pub fn new(mmap: MmapVec) -> Result<Self> {
60 let obj = ElfFile64::<NativeEndian>::parse(&mmap[..])
61 .err2anyhow()
62 .with_context(|| "failed to parse internal compilation artifact")?;
63
64 let mut relocations = Vec::new();
65 let mut text = 0..0;
66 let mut unwind = 0..0;
67 let mut enable_branch_protection = None;
68 let mut trap_data = 0..0;
69 let mut wasm_data = 0..0;
70 let mut address_map_data = 0..0;
71 let mut func_name_data = 0..0;
72 let mut info_data = 0..0;
73 let mut dwarf = 0..0;
74 for section in obj.sections() {
75 let data = section.data().err2anyhow()?;
76 let name = section.name().err2anyhow()?;
77 let range = subslice_range(data, &mmap);
78
79 if section.align() != 0 && data.len() != 0 {
81 if (data.as_ptr() as u64 - mmap.as_ptr() as u64) % section.align() != 0 {
82 bail!(
83 "section `{}` isn't aligned to {:#x}",
84 section.name().unwrap_or("ERROR"),
85 section.align()
86 );
87 }
88 }
89
90 match name {
91 obj::ELF_WASM_BTI => match data.len() {
92 1 => enable_branch_protection = Some(data[0] != 0),
93 _ => bail!("invalid `{name}` section"),
94 },
95 ".text" => {
96 text = range;
97
98 for (offset, reloc) in section.relocations() {
105 assert_eq!(reloc.kind(), object::RelocationKind::Absolute);
106 assert_eq!(reloc.encoding(), object::RelocationEncoding::Generic);
107 assert_eq!(usize::from(reloc.size()), core::mem::size_of::<usize>() * 8);
108 assert_eq!(reloc.addend(), 0);
109 let sym = match reloc.target() {
110 object::RelocationTarget::Symbol(id) => id,
111 other => panic!("unknown relocation target {other:?}"),
112 };
113 let sym = obj.symbol_by_index(sym).unwrap().name().unwrap();
114 let libcall = obj::LibCall::from_str(sym)
115 .unwrap_or_else(|| panic!("unknown symbol relocation: {sym}"));
116
117 let offset = usize::try_from(offset).unwrap();
118 relocations.push((offset, libcall));
119 }
120 }
121 UnwindRegistration::SECTION_NAME => unwind = range,
122 obj::ELF_WASM_DATA => wasm_data = range,
123 obj::ELF_WASMTIME_ADDRMAP => address_map_data = range,
124 obj::ELF_WASMTIME_TRAPS => trap_data = range,
125 obj::ELF_NAME_DATA => func_name_data = range,
126 obj::ELF_WASMTIME_INFO => info_data = range,
127 obj::ELF_WASMTIME_DWARF => dwarf = range,
128
129 _ => log::debug!("ignoring section {name}"),
130 }
131 }
132 Ok(Self {
133 mmap: ManuallyDrop::new(mmap),
134 unwind_registration: ManuallyDrop::new(None),
135 published: false,
136 enable_branch_protection: enable_branch_protection
137 .ok_or_else(|| anyhow!("missing `{}` section", obj::ELF_WASM_BTI))?,
138 text,
139 unwind,
140 trap_data,
141 address_map_data,
142 func_name_data,
143 dwarf,
144 info_data,
145 wasm_data,
146 relocations,
147 })
148 }
149
150 #[inline]
152 pub fn mmap(&self) -> &MmapVec {
153 &self.mmap
154 }
155
156 #[inline]
159 pub fn text(&self) -> &[u8] {
160 &self.mmap[self.text.clone()]
161 }
162
163 #[inline]
165 pub fn dwarf(&self) -> &[u8] {
166 &self.mmap[self.dwarf.clone()]
167 }
168
169 #[inline]
171 pub fn func_name_data(&self) -> &[u8] {
172 &self.mmap[self.func_name_data.clone()]
173 }
174
175 #[inline]
181 pub fn wasm_data(&self) -> &[u8] {
182 &self.mmap[self.wasm_data.clone()]
183 }
184
185 #[inline]
188 pub fn address_map_data(&self) -> &[u8] {
189 &self.mmap[self.address_map_data.clone()]
190 }
191
192 #[inline]
195 pub fn wasmtime_info(&self) -> &[u8] {
196 &self.mmap[self.info_data.clone()]
197 }
198
199 #[inline]
202 pub fn trap_data(&self) -> &[u8] {
203 &self.mmap[self.trap_data.clone()]
204 }
205
206 pub fn publish(&mut self) -> Result<()> {
217 assert!(!self.published);
218 self.published = true;
219
220 if self.text().is_empty() {
221 return Ok(());
222 }
223
224 unsafe {
233 self.apply_relocations()?;
239
240 self.mmap.make_readonly(0..self.mmap.len())?;
248
249 let text = self.text();
250
251 icache_coherence::clear_cache(text.as_ptr().cast(), text.len())
256 .expect("Failed cache clear");
257
258 self.mmap
260 .make_executable(self.text.clone(), self.enable_branch_protection)
261 .context("unable to make memory executable")?;
262
263 icache_coherence::pipeline_flush_mt().expect("Failed pipeline flush");
265
266 self.register_unwind_info()?;
271 }
272
273 Ok(())
274 }
275
276 unsafe fn apply_relocations(&mut self) -> Result<()> {
277 if self.relocations.is_empty() {
278 return Ok(());
279 }
280
281 for (offset, libcall) in self.relocations.iter() {
282 let offset = self.text.start + offset;
283 let libcall = match libcall {
284 obj::LibCall::FloorF32 => libcalls::relocs::floorf32 as usize,
285 obj::LibCall::FloorF64 => libcalls::relocs::floorf64 as usize,
286 obj::LibCall::NearestF32 => libcalls::relocs::nearestf32 as usize,
287 obj::LibCall::NearestF64 => libcalls::relocs::nearestf64 as usize,
288 obj::LibCall::CeilF32 => libcalls::relocs::ceilf32 as usize,
289 obj::LibCall::CeilF64 => libcalls::relocs::ceilf64 as usize,
290 obj::LibCall::TruncF32 => libcalls::relocs::truncf32 as usize,
291 obj::LibCall::TruncF64 => libcalls::relocs::truncf64 as usize,
292 obj::LibCall::FmaF32 => libcalls::relocs::fmaf32 as usize,
293 obj::LibCall::FmaF64 => libcalls::relocs::fmaf64 as usize,
294 #[cfg(target_arch = "x86_64")]
295 obj::LibCall::X86Pshufb => libcalls::relocs::x86_pshufb as usize,
296 #[cfg(not(target_arch = "x86_64"))]
297 obj::LibCall::X86Pshufb => unreachable!(),
298 };
299 self.mmap
300 .as_mut_ptr()
301 .add(offset)
302 .cast::<usize>()
303 .write_unaligned(libcall);
304 }
305 Ok(())
306 }
307
308 unsafe fn register_unwind_info(&mut self) -> Result<()> {
309 if self.unwind.len() == 0 {
310 return Ok(());
311 }
312 let text = self.text();
313 let unwind_info = &self.mmap[self.unwind.clone()];
314 let registration =
315 UnwindRegistration::new(text.as_ptr(), unwind_info.as_ptr(), unwind_info.len())
316 .context("failed to create unwind info registration")?;
317 *self.unwind_registration = Some(registration);
318 Ok(())
319 }
320
321 pub fn lookup_trap_code(&self, text_offset: usize) -> Option<Trap> {
324 lookup_trap_code(self.trap_data(), text_offset)
325 }
326}
327
328fn subslice_range(inner: &[u8], outer: &[u8]) -> Range<usize> {
334 if inner.len() == 0 {
335 return 0..0;
336 }
337
338 assert!(outer.as_ptr() <= inner.as_ptr());
339 assert!((&inner[inner.len() - 1] as *const _) <= (&outer[outer.len() - 1] as *const _));
340
341 let start = inner.as_ptr() as usize - outer.as_ptr() as usize;
342 start..start + inner.len()
343}