1use crate::misc::{FixupKind, InstBuf, Instruction, Label};
2use alloc::vec::Vec;
3
4#[derive(Copy, Clone)]
5struct Fixup {
6 target_label: Label,
7 instruction_offset: usize,
8 instruction_length: u8,
9 kind: FixupKind,
10}
11
12pub struct Assembler {
13 origin: u64,
14 code: Vec<u8>,
15 labels: Vec<isize>,
16 fixups: Vec<Fixup>,
17 guaranteed_capacity: usize,
18}
19
20#[allow(clippy::derivable_impls)]
21impl Default for Assembler {
22 fn default() -> Self {
23 Self::new()
24 }
25}
26
27#[repr(transparent)]
28pub struct AssembledCode<'a>(&'a mut Assembler);
29
30impl<'a> core::ops::Deref for AssembledCode<'a> {
31 type Target = [u8];
32
33 #[inline]
34 fn deref(&self) -> &Self::Target {
35 &self.0.code
36 }
37}
38
39impl<'a> From<AssembledCode<'a>> for Vec<u8> {
40 fn from(code: AssembledCode<'a>) -> Vec<u8> {
41 core::mem::take(&mut code.0.code)
42 }
43}
44
45impl<'a> Drop for AssembledCode<'a> {
46 fn drop(&mut self) {
47 self.0.clear();
48 }
49}
50
51pub unsafe trait NonZero {
55 const VALUE: usize;
56 type Next;
57}
58
59pub struct U0;
60
61macro_rules! impl_non_zero {
62 ($(($name:ident = $value:expr, $next:ident))*) => {
63 $(
64 pub struct $name;
65
66 const _: () = {
67 assert!($value != 0);
68 };
69
70 unsafe impl NonZero for $name {
72 const VALUE: usize = $value;
73 type Next = $next;
74 }
75 )*
76 }
77}
78
79impl_non_zero! {
80 (U1 = 1, U0)
81 (U2 = 2, U1)
82 (U3 = 3, U2)
83 (U4 = 4, U3)
84 (U5 = 5, U4)
85 (U6 = 6, U5)
86}
87
88#[repr(transparent)]
89pub struct ReservedAssembler<'a, R>(&'a mut Assembler, core::marker::PhantomData<R>);
90
91impl<'a> ReservedAssembler<'a, U0> {
92 #[allow(clippy::unused_self)]
93 #[cfg_attr(not(debug_assertions), inline(always))]
94 pub fn assert_reserved_exactly_as_needed(self) {}
95}
96
97impl<'a, R> ReservedAssembler<'a, R> {
98 #[cfg_attr(not(debug_assertions), inline(always))]
99 pub fn push<T>(self, instruction: Instruction<T>) -> ReservedAssembler<'a, R::Next>
100 where
101 R: NonZero,
102 T: core::fmt::Display,
103 {
104 unsafe {
106 self.0.push_unchecked(instruction);
107 }
108
109 ReservedAssembler(self.0, core::marker::PhantomData)
110 }
111
112 #[cfg_attr(not(debug_assertions), inline(always))]
113 pub fn push_if<T>(self, condition: bool, instruction: Instruction<T>) -> ReservedAssembler<'a, R::Next>
114 where
115 R: NonZero,
116 T: core::fmt::Display,
117 {
118 if condition {
119 unsafe {
121 self.0.push_unchecked(instruction);
122 }
123 }
124
125 ReservedAssembler(self.0, core::marker::PhantomData)
126 }
127
128 #[cfg_attr(not(debug_assertions), inline(always))]
129 pub fn push_none(self) -> ReservedAssembler<'a, R::Next>
130 where
131 R: NonZero,
132 {
133 ReservedAssembler(self.0, core::marker::PhantomData)
134 }
135
136 #[cfg_attr(not(debug_assertions), inline(always))]
137 pub fn get_label_origin_offset(&self, label: Label) -> Option<isize> {
138 self.0.get_label_origin_offset(label)
139 }
140
141 #[cfg_attr(not(debug_assertions), inline(always))]
142 pub fn len(&self) -> usize {
143 self.0.len()
144 }
145
146 #[cfg_attr(not(debug_assertions), inline(always))]
147 pub fn is_empty(&self) -> bool {
148 self.0.is_empty()
149 }
150}
151
152impl Assembler {
153 pub const fn new() -> Self {
154 Assembler {
155 origin: 0,
156 code: Vec::new(),
157 labels: Vec::new(),
158 fixups: Vec::new(),
159 guaranteed_capacity: 0,
160 }
161 }
162
163 pub fn origin(&self) -> u64 {
164 self.origin
165 }
166
167 pub fn set_origin(&mut self, origin: u64) {
168 self.origin = origin;
169 }
170
171 pub fn current_address(&self) -> u64 {
172 self.origin + self.code.len() as u64
173 }
174
175 pub fn forward_declare_label(&mut self) -> Label {
176 let label = self.labels.len() as u32;
177 self.labels.push(isize::MAX);
178 Label::from_raw(label)
179 }
180
181 pub fn create_label(&mut self) -> Label {
182 let label = self.labels.len() as u32;
183 #[cfg(debug_assertions)]
184 log::trace!("{:08x}: {}:", self.origin + self.code.len() as u64, Label::from_raw(label));
185
186 self.labels.push(self.code.len() as isize);
187 Label::from_raw(label)
188 }
189
190 pub fn define_label(&mut self, label: Label) -> &mut Self {
191 #[cfg(debug_assertions)]
192 log::trace!("{:08x}: {}:", self.origin + self.code.len() as u64, label);
193
194 assert_eq!(
195 self.labels[label.raw() as usize],
196 isize::MAX,
197 "tried to redefine an already defined label"
198 );
199 self.labels[label.raw() as usize] = self.code.len() as isize;
200 self
201 }
202
203 pub fn push_with_label<T>(&mut self, label: Label, instruction: Instruction<T>) -> &mut Self
204 where
205 T: core::fmt::Display,
206 {
207 self.define_label(label);
208 self.push(instruction)
209 }
210
211 #[inline]
212 pub fn get_label_origin_offset(&self, label: Label) -> Option<isize> {
213 let offset = self.labels[label.raw() as usize];
214 if offset == isize::MAX {
215 None
216 } else {
217 Some(offset)
218 }
219 }
220
221 pub fn get_label_origin_offset_or_panic(&self, label: Label) -> isize {
222 self.get_label_origin_offset(label)
223 .expect("tried to fetch a label offset for a label that was not defined")
224 }
225
226 pub fn set_label_origin_offset(&mut self, label: Label, offset: isize) {
227 self.labels[label.raw() as usize] = offset;
228 }
229
230 #[inline(always)]
231 fn add_fixup(&mut self, instruction_offset: usize, instruction_length: usize, target_label: Label, kind: FixupKind) {
232 debug_assert!((target_label.raw() as usize) < self.labels.len());
233 debug_assert!(
234 (kind.offset() as usize) < instruction_length,
235 "instruction is {} bytes long and yet its target fixup starts at {}",
236 instruction_length,
237 kind.offset()
238 );
239 debug_assert!((kind.length() as usize) < instruction_length);
240 debug_assert!((kind.offset() as usize + kind.length() as usize) <= instruction_length);
241 self.fixups.push(Fixup {
242 target_label,
243 instruction_offset,
244 instruction_length: instruction_length as u8,
245 kind,
246 });
247 }
248
249 #[inline(always)]
250 pub fn reserve<T>(&mut self) -> ReservedAssembler<T>
251 where
252 T: NonZero,
253 {
254 InstBuf::reserve(&mut self.code, T::VALUE);
255
256 self.guaranteed_capacity = T::VALUE;
257 ReservedAssembler(self, core::marker::PhantomData)
258 }
259
260 #[cfg_attr(not(debug_assertions), inline(always))]
261 pub fn push<T>(&mut self, instruction: Instruction<T>) -> &mut Self
262 where
263 T: core::fmt::Display,
264 {
265 if self.guaranteed_capacity == 0 {
266 InstBuf::reserve_const::<1>(&mut self.code);
267 self.guaranteed_capacity = 1;
268 }
269
270 unsafe { self.push_unchecked(instruction) }
272 }
273
274 #[cfg_attr(not(debug_assertions), inline(always))]
276 unsafe fn push_unchecked<T>(&mut self, instruction: Instruction<T>) -> &mut Self
277 where
278 T: core::fmt::Display,
279 {
280 #[cfg(debug_assertions)]
281 log::trace!("{:08x}: {}", self.origin + self.code.len() as u64, instruction);
282
283 debug_assert!(self.guaranteed_capacity > 0);
284 let instruction_offset = self.code.len();
285
286 unsafe {
288 instruction.bytes.encode_into_vec_unsafe(&mut self.code);
289 }
290 self.guaranteed_capacity -= 1;
291
292 if let Some((label, fixup)) = instruction.fixup {
293 self.add_fixup(instruction_offset, instruction.bytes.len(), label, fixup);
294 }
295
296 self
297 }
298
299 pub fn push_raw(&mut self, bytes: &[u8]) -> &mut Self {
300 #[cfg(debug_assertions)]
301 log::trace!("{:08x}: {:x?}", self.origin + self.code.len() as u64, bytes);
302 self.code.extend_from_slice(bytes);
303 self
304 }
305
306 pub fn finalize(&mut self) -> AssembledCode {
307 for fixup in self.fixups.drain(..) {
308 let origin = fixup.instruction_offset + fixup.instruction_length as usize;
309 let target_absolute = self.labels[fixup.target_label.raw() as usize];
310 if target_absolute == isize::MAX {
311 log::trace!("Undefined label found: {}", fixup.target_label);
312 continue;
313 }
314
315 let opcode = (fixup.kind.0 << 8) >> 8;
316 let fixup_offset = fixup.kind.offset();
317 let fixup_length = fixup.kind.length();
318
319 if fixup_offset >= 1 {
320 self.code[fixup.instruction_offset] = opcode as u8;
321 if fixup_offset >= 2 {
322 self.code[fixup.instruction_offset + 1] = (opcode >> 8) as u8;
323 if fixup_offset >= 3 {
324 self.code[fixup.instruction_offset + 2] = (opcode >> 16) as u8;
325 }
326 }
327 }
328
329 let offset = target_absolute - origin as isize;
330 let p = fixup.instruction_offset + fixup_offset as usize;
331 if fixup_length == 1 {
332 if offset > i8::MAX as isize || offset < i8::MIN as isize {
333 panic!("out of range jump");
334 }
335 self.code[p] = offset as i8 as u8;
336 } else if fixup_length == 4 {
337 if offset > i32::MAX as isize || offset < i32::MIN as isize {
338 panic!("out of range jump");
339 }
340 self.code[p..p + 4].copy_from_slice(&(offset as i32).to_le_bytes());
341 } else {
342 unreachable!()
343 }
344 }
345
346 AssembledCode(self)
347 }
348
349 pub fn is_empty(&self) -> bool {
350 self.code.is_empty()
351 }
352
353 pub fn len(&self) -> usize {
354 self.code.len()
355 }
356
357 pub fn code_mut(&mut self) -> &mut [u8] {
358 &mut self.code
359 }
360
361 pub fn truncate(&mut self, length: usize) {
362 self.code.truncate(length);
363 }
364
365 pub fn spare_capacity(&self) -> usize {
366 self.code.capacity() - self.code.len()
367 }
368
369 pub fn resize(&mut self, size: usize, fill_with: u8) {
370 self.code.resize(size, fill_with)
371 }
372
373 pub fn reserve_code(&mut self, length: usize) {
374 self.code.reserve(length);
375 }
376
377 pub fn reserve_labels(&mut self, length: usize) {
378 self.labels.reserve(length);
379 }
380
381 pub fn reserve_fixups(&mut self, length: usize) {
382 self.fixups.reserve(length);
383 }
384
385 pub fn clear(&mut self) {
386 self.origin = 0;
387 self.code.clear();
388 self.labels.clear();
389 self.fixups.clear();
390 }
391}