1use super::liveranges::SpillWeight;
16use crate::cfg::CFGInfo;
17use crate::index::ContainerComparator;
18use crate::indexset::IndexSet;
19use crate::{
20 define_index, Allocation, Block, Edit, Function, Inst, MachineEnv, Operand, PReg, ProgPoint,
21 RegClass, VReg,
22};
23use fxhash::FxHashSet;
24use smallvec::SmallVec;
25use std::cmp::Ordering;
26use std::collections::{BTreeMap, HashMap, HashSet};
27use std::fmt::Debug;
28
29#[derive(Clone, Copy, Debug, PartialEq, Eq)]
31pub struct CodeRange {
32 pub from: ProgPoint,
33 pub to: ProgPoint,
34}
35
36impl CodeRange {
37 #[inline(always)]
38 pub fn is_empty(&self) -> bool {
39 self.from == self.to
40 }
41 #[inline(always)]
42 pub fn contains(&self, other: &Self) -> bool {
43 other.from >= self.from && other.to <= self.to
44 }
45 #[inline(always)]
46 pub fn contains_point(&self, other: ProgPoint) -> bool {
47 other >= self.from && other < self.to
48 }
49 #[inline(always)]
50 pub fn overlaps(&self, other: &Self) -> bool {
51 other.to > self.from && other.from < self.to
52 }
53 #[inline(always)]
54 pub fn len(&self) -> usize {
55 self.to.inst().index() - self.from.inst().index()
56 }
57 #[inline(always)]
59 pub fn singleton(pos: ProgPoint) -> CodeRange {
60 CodeRange {
61 from: pos,
62 to: pos.next(),
63 }
64 }
65}
66
67impl std::cmp::PartialOrd for CodeRange {
68 #[inline(always)]
69 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
70 Some(self.cmp(other))
71 }
72}
73impl std::cmp::Ord for CodeRange {
74 #[inline(always)]
75 fn cmp(&self, other: &Self) -> Ordering {
76 if self.to <= other.from {
77 Ordering::Less
78 } else if self.from >= other.to {
79 Ordering::Greater
80 } else {
81 Ordering::Equal
82 }
83 }
84}
85
86define_index!(LiveBundleIndex);
87define_index!(LiveRangeIndex);
88define_index!(SpillSetIndex);
89define_index!(UseIndex);
90define_index!(VRegIndex);
91define_index!(PRegIndex);
92define_index!(SpillSlotIndex);
93
94pub type LiveBundleVec = SmallVec<[LiveBundleIndex; 4]>;
96
97#[derive(Clone, Copy, Debug)]
98pub struct LiveRangeListEntry {
99 pub range: CodeRange,
100 pub index: LiveRangeIndex,
101}
102
103pub type LiveRangeList = SmallVec<[LiveRangeListEntry; 4]>;
104pub type UseList = SmallVec<[Use; 4]>;
105
106#[derive(Clone, Debug)]
107pub struct LiveRange {
108 pub range: CodeRange,
109
110 pub vreg: VRegIndex,
111 pub bundle: LiveBundleIndex,
112 pub uses_spill_weight_and_flags: u32,
113
114 pub uses: UseList,
115
116 pub merged_into: LiveRangeIndex,
117}
118
119#[derive(Clone, Copy, Debug, PartialEq, Eq)]
120#[repr(u32)]
121pub enum LiveRangeFlag {
122 StartsAtDef = 1,
123}
124
125impl LiveRange {
126 #[inline(always)]
127 pub fn set_flag(&mut self, flag: LiveRangeFlag) {
128 self.uses_spill_weight_and_flags |= (flag as u32) << 29;
129 }
130 #[inline(always)]
131 pub fn clear_flag(&mut self, flag: LiveRangeFlag) {
132 self.uses_spill_weight_and_flags &= !((flag as u32) << 29);
133 }
134 #[inline(always)]
135 pub fn assign_flag(&mut self, flag: LiveRangeFlag, val: bool) {
136 let bit = if val { (flag as u32) << 29 } else { 0 };
137 self.uses_spill_weight_and_flags &= 0xe000_0000;
138 self.uses_spill_weight_and_flags |= bit;
139 }
140 #[inline(always)]
141 pub fn has_flag(&self, flag: LiveRangeFlag) -> bool {
142 self.uses_spill_weight_and_flags & ((flag as u32) << 29) != 0
143 }
144 #[inline(always)]
145 pub fn flag_word(&self) -> u32 {
146 self.uses_spill_weight_and_flags & 0xe000_0000
147 }
148 #[inline(always)]
149 pub fn merge_flags(&mut self, flag_word: u32) {
150 self.uses_spill_weight_and_flags |= flag_word;
151 }
152 #[inline(always)]
153 pub fn uses_spill_weight(&self) -> SpillWeight {
154 let bits = (self.uses_spill_weight_and_flags & 0x1fff_ffff) << 2;
155 SpillWeight::from_f32(f32::from_bits(bits))
156 }
157 #[inline(always)]
158 pub fn set_uses_spill_weight(&mut self, weight: SpillWeight) {
159 let weight_bits = (weight.to_f32().to_bits() >> 2) & 0x1fff_ffff;
160 self.uses_spill_weight_and_flags =
161 (self.uses_spill_weight_and_flags & 0xe000_0000) | weight_bits;
162 }
163}
164
165#[derive(Clone, Copy, Debug)]
166pub struct Use {
167 pub operand: Operand,
168 pub pos: ProgPoint,
169 pub slot: u8,
170 pub weight: u16,
171}
172
173impl Use {
174 #[inline(always)]
175 pub fn new(operand: Operand, pos: ProgPoint, slot: u8) -> Self {
176 Self {
177 operand,
178 pos,
179 slot,
180 weight: 0,
182 }
183 }
184}
185
186pub const SLOT_NONE: u8 = u8::MAX;
187
188#[derive(Clone, Debug)]
189pub struct LiveBundle {
190 pub ranges: LiveRangeList,
191 pub spillset: SpillSetIndex,
192 pub allocation: Allocation,
193 pub prio: u32, pub spill_weight_and_props: u32,
195}
196
197pub const BUNDLE_MAX_SPILL_WEIGHT: u32 = (1 << 28) - 1;
198pub const MINIMAL_FIXED_BUNDLE_SPILL_WEIGHT: u32 = BUNDLE_MAX_SPILL_WEIGHT;
199pub const MINIMAL_BUNDLE_SPILL_WEIGHT: u32 = BUNDLE_MAX_SPILL_WEIGHT - 1;
200pub const BUNDLE_MAX_NORMAL_SPILL_WEIGHT: u32 = BUNDLE_MAX_SPILL_WEIGHT - 2;
201
202impl LiveBundle {
203 #[inline(always)]
204 pub fn set_cached_spill_weight_and_props(
205 &mut self,
206 spill_weight: u32,
207 minimal: bool,
208 fixed: bool,
209 fixed_def: bool,
210 stack: bool,
211 ) {
212 debug_assert!(spill_weight <= BUNDLE_MAX_SPILL_WEIGHT);
213 self.spill_weight_and_props = spill_weight
214 | (if minimal { 1 << 31 } else { 0 })
215 | (if fixed { 1 << 30 } else { 0 })
216 | (if fixed_def { 1 << 29 } else { 0 })
217 | (if stack { 1 << 28 } else { 0 });
218 }
219
220 #[inline(always)]
221 pub fn cached_minimal(&self) -> bool {
222 self.spill_weight_and_props & (1 << 31) != 0
223 }
224
225 #[inline(always)]
226 pub fn cached_fixed(&self) -> bool {
227 self.spill_weight_and_props & (1 << 30) != 0
228 }
229
230 #[inline(always)]
231 pub fn cached_fixed_def(&self) -> bool {
232 self.spill_weight_and_props & (1 << 29) != 0
233 }
234
235 #[inline(always)]
236 pub fn cached_stack(&self) -> bool {
237 self.spill_weight_and_props & (1 << 28) != 0
238 }
239
240 #[inline(always)]
241 pub fn set_cached_fixed(&mut self) {
242 self.spill_weight_and_props |= 1 << 30;
243 }
244
245 #[inline(always)]
246 pub fn set_cached_fixed_def(&mut self) {
247 self.spill_weight_and_props |= 1 << 29;
248 }
249
250 #[inline(always)]
251 pub fn set_cached_stack(&mut self) {
252 self.spill_weight_and_props |= 1 << 28;
253 }
254
255 #[inline(always)]
256 pub fn cached_spill_weight(&self) -> u32 {
257 self.spill_weight_and_props & ((1 << 28) - 1)
258 }
259}
260
261#[derive(Clone, Copy, Debug, PartialEq, Eq)]
262pub struct BundleProperties {
263 pub minimal: bool,
264 pub fixed: bool,
265}
266
267const fn no_bloat_capacity<T>() -> usize {
270 std::mem::size_of::<usize>() * 2 / std::mem::size_of::<T>()
282}
283
284#[derive(Clone, Debug)]
285pub struct SpillSet {
286 pub vregs: SmallVec<[VRegIndex; no_bloat_capacity::<VRegIndex>()]>,
287 pub slot: SpillSlotIndex,
288 pub reg_hint: PReg,
289 pub class: RegClass,
290 pub spill_bundle: LiveBundleIndex,
291 pub required: bool,
292 pub size: u8,
293 pub splits: u8,
294}
295
296pub(crate) const MAX_SPLITS_PER_SPILLSET: u8 = 10;
297
298#[derive(Clone, Debug)]
299pub struct VRegData {
300 pub ranges: LiveRangeList,
301 pub blockparam: Block,
302 pub is_ref: bool,
303 pub class: Option<RegClass>,
305}
306
307#[derive(Clone, Debug)]
308pub struct PRegData {
309 pub allocations: LiveRangeSet,
310 pub is_stack: bool,
311}
312
313#[derive(Clone, Debug)]
314pub struct MultiFixedRegFixup {
315 pub pos: ProgPoint,
316 pub from_slot: u8,
317 pub to_slot: u8,
318 pub level: FixedRegFixupLevel,
319 pub to_preg: PRegIndex,
320 pub vreg: VRegIndex,
321}
322
323#[derive(Clone, Debug, PartialEq, Eq)]
324pub enum FixedRegFixupLevel {
325 Initial,
327 Secondary,
330}
331
332#[derive(Clone, Copy, Debug, PartialEq, Eq)]
340#[repr(C)]
341pub struct BlockparamOut {
342 pub to_vreg: VRegIndex,
343 pub to_block: Block,
344 pub from_block: Block,
345 pub from_vreg: VRegIndex,
346}
347impl BlockparamOut {
348 #[inline(always)]
349 pub fn key(&self) -> u128 {
350 u128_key(
351 self.from_vreg.raw_u32(),
352 self.from_block.raw_u32(),
353 self.to_block.raw_u32(),
354 self.to_vreg.raw_u32(),
355 )
356 }
357}
358
359#[derive(Clone, Debug)]
364#[repr(C)]
365pub struct BlockparamIn {
366 pub from_block: Block,
367 pub to_block: Block,
368 pub to_vreg: VRegIndex,
369}
370impl BlockparamIn {
371 #[inline(always)]
372 pub fn key(&self) -> u128 {
373 u128_key(
374 self.to_vreg.raw_u32(),
375 self.to_block.raw_u32(),
376 self.from_block.raw_u32(),
377 0,
378 )
379 }
380}
381
382#[derive(Clone, Debug)]
383pub struct Env<'a, F: Function> {
384 pub func: &'a F,
385 pub env: &'a MachineEnv,
386 pub cfginfo: CFGInfo,
387 pub liveins: Vec<IndexSet>,
388 pub liveouts: Vec<IndexSet>,
389 pub blockparam_outs: Vec<BlockparamOut>,
390 pub blockparam_ins: Vec<BlockparamIn>,
391
392 pub ranges: Vec<LiveRange>,
393 pub bundles: Vec<LiveBundle>,
394 pub spillsets: Vec<SpillSet>,
395 pub vregs: Vec<VRegData>,
396 pub pregs: Vec<PRegData>,
397 pub allocation_queue: PrioQueue,
398 pub safepoints: Vec<Inst>, pub safepoints_per_vreg: HashMap<usize, HashSet<Inst>>,
400
401 pub spilled_bundles: Vec<LiveBundleIndex>,
402 pub spillslots: Vec<SpillSlotData>,
403 pub slots_by_size: Vec<SpillSlotList>,
404
405 pub extra_spillslots_by_class: [SmallVec<[Allocation; 2]>; 2],
406 pub preferred_victim_by_class: [PReg; 2],
407
408 pub prog_move_srcs: Vec<((VRegIndex, Inst), Allocation)>,
418 pub prog_move_dsts: Vec<((VRegIndex, Inst), Allocation)>,
420 pub prog_move_merges: Vec<(LiveRangeIndex, LiveRangeIndex)>,
422
423 pub multi_fixed_reg_fixups: Vec<MultiFixedRegFixup>,
432
433 pub inserted_moves: Vec<InsertedMove>,
434
435 pub edits: Vec<(PosWithPrio, Edit)>,
437 pub allocs: Vec<Allocation>,
438 pub inst_alloc_offsets: Vec<u32>,
439 pub num_spillslots: u32,
440 pub safepoint_slots: Vec<(ProgPoint, Allocation)>,
441 pub debug_locations: Vec<(u32, ProgPoint, ProgPoint, Allocation)>,
442
443 pub allocated_bundle_count: usize,
444
445 pub stats: Stats,
446
447 pub debug_annotations: std::collections::HashMap<ProgPoint, Vec<String>>,
450 pub annotations_enabled: bool,
451
452 pub conflict_set: FxHashSet<LiveBundleIndex>,
455}
456
457impl<'a, F: Function> Env<'a, F> {
458 #[inline]
460 pub fn vreg(&self, index: VRegIndex) -> VReg {
461 let class = self.vregs[index.index()]
462 .class
463 .expect("trying to get a VReg before observing its class");
464 VReg::new(index.index(), class)
465 }
466
467 pub fn observe_vreg_class(&mut self, vreg: VReg) {
470 let old_class = self.vregs[vreg.vreg()].class.replace(vreg.class());
471 debug_assert!(old_class == None || old_class == Some(vreg.class()));
474 }
475
476 pub fn is_vreg_used(&self, index: VRegIndex) -> bool {
478 self.vregs[index.index()].class.is_some()
479 }
480}
481
482#[derive(Clone, Debug)]
483pub struct SpillSlotData {
484 pub ranges: LiveRangeSet,
485 pub slots: u32,
486 pub alloc: Allocation,
487}
488
489#[derive(Clone, Debug)]
490pub struct SpillSlotList {
491 pub slots: SmallVec<[SpillSlotIndex; 32]>,
492 pub probe_start: usize,
493}
494
495impl SpillSlotList {
496 pub(crate) fn next_index(&self, index: usize) -> usize {
499 debug_assert!(index < self.slots.len());
500 if index == self.slots.len() - 1 {
501 0
502 } else {
503 index + 1
504 }
505 }
506}
507
508#[derive(Clone, Debug)]
509pub struct PrioQueue {
510 pub heap: std::collections::BinaryHeap<PrioQueueEntry>,
511}
512
513#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
514pub struct PrioQueueEntry {
515 pub prio: u32,
516 pub bundle: LiveBundleIndex,
517 pub reg_hint: PReg,
518}
519
520#[derive(Clone, Debug)]
521pub struct LiveRangeSet {
522 pub btree: BTreeMap<LiveRangeKey, LiveRangeIndex>,
523}
524
525#[derive(Clone, Copy, Debug)]
526pub struct LiveRangeKey {
527 pub from: u32,
528 pub to: u32,
529}
530
531impl LiveRangeKey {
532 #[inline(always)]
533 pub fn from_range(range: &CodeRange) -> Self {
534 Self {
535 from: range.from.to_index(),
536 to: range.to.to_index(),
537 }
538 }
539
540 #[inline(always)]
541 pub fn to_range(&self) -> CodeRange {
542 CodeRange {
543 from: ProgPoint::from_index(self.from),
544 to: ProgPoint::from_index(self.to),
545 }
546 }
547}
548
549impl std::cmp::PartialEq for LiveRangeKey {
550 #[inline(always)]
551 fn eq(&self, other: &Self) -> bool {
552 self.to > other.from && self.from < other.to
553 }
554}
555impl std::cmp::Eq for LiveRangeKey {}
556impl std::cmp::PartialOrd for LiveRangeKey {
557 #[inline(always)]
558 fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
559 Some(self.cmp(other))
560 }
561}
562impl std::cmp::Ord for LiveRangeKey {
563 #[inline(always)]
564 fn cmp(&self, other: &Self) -> std::cmp::Ordering {
565 if self.to <= other.from {
566 std::cmp::Ordering::Less
567 } else if self.from >= other.to {
568 std::cmp::Ordering::Greater
569 } else {
570 std::cmp::Ordering::Equal
571 }
572 }
573}
574
575pub struct PrioQueueComparator<'a> {
576 pub prios: &'a [usize],
577}
578impl<'a> ContainerComparator for PrioQueueComparator<'a> {
579 type Ix = LiveBundleIndex;
580 fn compare(&self, a: Self::Ix, b: Self::Ix) -> std::cmp::Ordering {
581 self.prios[a.index()].cmp(&self.prios[b.index()])
582 }
583}
584
585impl PrioQueue {
586 pub fn new() -> Self {
587 PrioQueue {
588 heap: std::collections::BinaryHeap::new(),
589 }
590 }
591
592 #[inline(always)]
593 pub fn insert(&mut self, bundle: LiveBundleIndex, prio: usize, reg_hint: PReg) {
594 self.heap.push(PrioQueueEntry {
595 prio: prio as u32,
596 bundle,
597 reg_hint,
598 });
599 }
600
601 #[inline(always)]
602 pub fn is_empty(self) -> bool {
603 self.heap.is_empty()
604 }
605
606 #[inline(always)]
607 pub fn pop(&mut self) -> Option<(LiveBundleIndex, PReg)> {
608 self.heap.pop().map(|entry| (entry.bundle, entry.reg_hint))
609 }
610}
611
612impl LiveRangeSet {
613 pub(crate) fn new() -> Self {
614 Self {
615 btree: BTreeMap::new(),
616 }
617 }
618}
619
620#[derive(Clone, Debug)]
621pub struct InsertedMove {
622 pub pos_prio: PosWithPrio,
623 pub from_alloc: Allocation,
624 pub to_alloc: Allocation,
625 pub to_vreg: VReg,
626}
627
628#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
629pub enum InsertMovePrio {
630 InEdgeMoves,
631 BlockParam,
632 Regular,
633 PostRegular,
634 MultiFixedRegInitial,
635 MultiFixedRegSecondary,
636 ReusedInput,
637 OutEdgeMoves,
638}
639
640#[derive(Clone, Copy, Debug, PartialEq, Eq)]
643#[repr(C)]
644pub struct PosWithPrio {
645 pub prio: u32,
646 pub pos: ProgPoint,
647}
648
649impl PosWithPrio {
650 #[inline]
651 pub fn key(self) -> u64 {
652 u64_key(self.pos.to_index(), self.prio)
653 }
654}
655
656#[derive(Clone, Copy, Debug, Default)]
657#[cfg_attr(feature = "enable-serde", derive(serde::Serialize, serde::Deserialize))]
658pub struct Stats {
659 pub livein_blocks: usize,
660 pub livein_iterations: usize,
661 pub initial_liverange_count: usize,
662 pub merged_bundle_count: usize,
663 pub prog_moves: usize,
664 pub prog_moves_dead_src: usize,
665 pub prog_move_merge_attempt: usize,
666 pub prog_move_merge_success: usize,
667 pub process_bundle_count: usize,
668 pub process_bundle_reg_probes_fixed: usize,
669 pub process_bundle_reg_success_fixed: usize,
670 pub process_bundle_bounding_range_probe_start_any: usize,
671 pub process_bundle_bounding_range_probes_any: usize,
672 pub process_bundle_bounding_range_success_any: usize,
673 pub process_bundle_reg_probe_start_any: usize,
674 pub process_bundle_reg_probes_any: usize,
675 pub process_bundle_reg_success_any: usize,
676 pub evict_bundle_event: usize,
677 pub evict_bundle_count: usize,
678 pub splits: usize,
679 pub splits_clobbers: usize,
680 pub splits_hot: usize,
681 pub splits_conflicts: usize,
682 pub splits_defs: usize,
683 pub splits_all: usize,
684 pub final_liverange_count: usize,
685 pub final_bundle_count: usize,
686 pub spill_bundle_count: usize,
687 pub spill_bundle_reg_probes: usize,
688 pub spill_bundle_reg_success: usize,
689 pub blockparam_ins_count: usize,
690 pub blockparam_outs_count: usize,
691 pub halfmoves_count: usize,
692 pub edits_count: usize,
693}
694
695#[inline(always)]
701pub fn u64_key(b: u32, a: u32) -> u64 {
702 a as u64 | (b as u64) << 32
703}
704#[inline(always)]
705pub fn u128_key(d: u32, c: u32, b: u32, a: u32) -> u128 {
706 a as u128 | (b as u128) << 32 | (c as u128) << 64 | (d as u128) << 96
707}