regalloc2/ion/
liveranges.rs

1/*
2 * This file was initially derived from the files
3 * `js/src/jit/BacktrackingAllocator.h` and
4 * `js/src/jit/BacktrackingAllocator.cpp` in Mozilla Firefox, and was
5 * originally licensed under the Mozilla Public License 2.0. We
6 * subsequently relicensed it to Apache-2.0 WITH LLVM-exception (see
7 * https://github.com/bytecodealliance/regalloc2/issues/7).
8 *
9 * Since the initial port, the design has been substantially evolved
10 * and optimized.
11 */
12
13//! Live-range computation.
14
15use super::{
16    CodeRange, Env, LiveBundle, LiveBundleIndex, LiveRange, LiveRangeFlag, LiveRangeIndex,
17    LiveRangeKey, LiveRangeListEntry, LiveRangeSet, PRegData, PRegIndex, RegClass, SpillSetIndex,
18    Use, VRegData, VRegIndex, SLOT_NONE,
19};
20use crate::indexset::IndexSet;
21use crate::ion::data_structures::{
22    BlockparamIn, BlockparamOut, FixedRegFixupLevel, MultiFixedRegFixup,
23};
24use crate::{
25    Allocation, Block, Function, Inst, InstPosition, Operand, OperandConstraint, OperandKind,
26    OperandPos, PReg, ProgPoint, RegAllocError, VReg,
27};
28use fxhash::{FxHashMap, FxHashSet};
29use slice_group_by::GroupByMut;
30use smallvec::{smallvec, SmallVec};
31use std::collections::{HashSet, VecDeque};
32
33/// A spill weight computed for a certain Use.
34#[derive(Clone, Copy, Debug)]
35pub struct SpillWeight(f32);
36
37#[inline(always)]
38pub fn spill_weight_from_constraint(
39    constraint: OperandConstraint,
40    loop_depth: usize,
41    is_def: bool,
42) -> SpillWeight {
43    // A bonus of 1000 for one loop level, 4000 for two loop levels,
44    // 16000 for three loop levels, etc. Avoids exponentiation.
45    let loop_depth = std::cmp::min(10, loop_depth);
46    let hot_bonus: f32 = (0..loop_depth).fold(1000.0, |a, _| a * 4.0);
47    let def_bonus: f32 = if is_def { 2000.0 } else { 0.0 };
48    let constraint_bonus: f32 = match constraint {
49        OperandConstraint::Any => 1000.0,
50        OperandConstraint::Reg | OperandConstraint::FixedReg(_) => 2000.0,
51        _ => 0.0,
52    };
53    SpillWeight(hot_bonus + def_bonus + constraint_bonus)
54}
55
56impl SpillWeight {
57    /// Convert a floating-point weight to a u16 that can be compactly
58    /// stored in a `Use`. We simply take the top 16 bits of the f32; this
59    /// is equivalent to the bfloat16 format
60    /// (https://en.wikipedia.org/wiki/Bfloat16_floating-point_format).
61    pub fn to_bits(self) -> u16 {
62        (self.0.to_bits() >> 15) as u16
63    }
64
65    /// Convert a value that was returned from
66    /// `SpillWeight::to_bits()` back into a `SpillWeight`. Note that
67    /// some precision may be lost when round-tripping from a spill
68    /// weight to packed bits and back.
69    pub fn from_bits(bits: u16) -> SpillWeight {
70        let x = f32::from_bits((bits as u32) << 15);
71        SpillWeight(x)
72    }
73
74    /// Get a zero spill weight.
75    pub fn zero() -> SpillWeight {
76        SpillWeight(0.0)
77    }
78
79    /// Convert to a raw floating-point value.
80    pub fn to_f32(self) -> f32 {
81        self.0
82    }
83
84    /// Create a `SpillWeight` from a raw floating-point value.
85    pub fn from_f32(x: f32) -> SpillWeight {
86        SpillWeight(x)
87    }
88
89    pub fn to_int(self) -> u32 {
90        self.0 as u32
91    }
92}
93
94impl std::ops::Add<SpillWeight> for SpillWeight {
95    type Output = SpillWeight;
96    fn add(self, other: SpillWeight) -> Self {
97        SpillWeight(self.0 + other.0)
98    }
99}
100
101impl<'a, F: Function> Env<'a, F> {
102    pub fn create_pregs_and_vregs(&mut self) {
103        // Create PRegs from the env.
104        self.pregs.resize(
105            PReg::NUM_INDEX,
106            PRegData {
107                allocations: LiveRangeSet::new(),
108                is_stack: false,
109            },
110        );
111        for &preg in &self.env.fixed_stack_slots {
112            self.pregs[preg.index()].is_stack = true;
113        }
114        for class in 0..self.preferred_victim_by_class.len() {
115            self.preferred_victim_by_class[class] = self.env.non_preferred_regs_by_class[class]
116                .last()
117                .or(self.env.preferred_regs_by_class[class].last())
118                .cloned()
119                .unwrap_or(PReg::invalid());
120        }
121        // Create VRegs from the vreg count.
122        for idx in 0..self.func.num_vregs() {
123            // We'll fill in the real details when we see the def.
124            let reg = VReg::new(idx, RegClass::Int);
125            self.add_vreg(
126                reg,
127                VRegData {
128                    ranges: smallvec![],
129                    blockparam: Block::invalid(),
130                    is_ref: false,
131                    // We'll learn the RegClass as we scan the code.
132                    class: None,
133                },
134            );
135        }
136        for v in self.func.reftype_vregs() {
137            self.vregs[v.vreg()].is_ref = true;
138        }
139        // Create allocations too.
140        for inst in 0..self.func.num_insts() {
141            let start = self.allocs.len() as u32;
142            self.inst_alloc_offsets.push(start);
143            for _ in 0..self.func.inst_operands(Inst::new(inst)).len() {
144                self.allocs.push(Allocation::none());
145            }
146        }
147    }
148
149    pub fn add_vreg(&mut self, reg: VReg, data: VRegData) -> VRegIndex {
150        let idx = self.vregs.len();
151        debug_assert_eq!(reg.vreg(), idx);
152        self.vregs.push(data);
153        VRegIndex::new(idx)
154    }
155
156    pub fn create_bundle(&mut self) -> LiveBundleIndex {
157        let bundle = self.bundles.len();
158        self.bundles.push(LiveBundle {
159            allocation: Allocation::none(),
160            ranges: smallvec![],
161            spillset: SpillSetIndex::invalid(),
162            prio: 0,
163            spill_weight_and_props: 0,
164        });
165        LiveBundleIndex::new(bundle)
166    }
167
168    pub fn create_liverange(&mut self, range: CodeRange) -> LiveRangeIndex {
169        let idx = self.ranges.len();
170
171        self.ranges.push(LiveRange {
172            range,
173            vreg: VRegIndex::invalid(),
174            bundle: LiveBundleIndex::invalid(),
175            uses_spill_weight_and_flags: 0,
176
177            uses: smallvec![],
178
179            merged_into: LiveRangeIndex::invalid(),
180        });
181
182        LiveRangeIndex::new(idx)
183    }
184
185    /// Mark `range` as live for the given `vreg`.
186    ///
187    /// Returns the liverange that contains the given range.
188    pub fn add_liverange_to_vreg(
189        &mut self,
190        vreg: VRegIndex,
191        mut range: CodeRange,
192    ) -> LiveRangeIndex {
193        trace!("add_liverange_to_vreg: vreg {:?} range {:?}", vreg, range);
194
195        // Invariant: as we are building liveness information, we
196        // *always* process instructions bottom-to-top, and as a
197        // consequence, new liveranges are always created before any
198        // existing liveranges for a given vreg. We assert this here,
199        // then use it to avoid an O(n) merge step (which would lead
200        // to O(n^2) liveness construction cost overall).
201        //
202        // We store liveranges in reverse order in the `.ranges`
203        // array, then reverse them at the end of
204        // `compute_liveness()`.
205
206        if !self.vregs[vreg.index()].ranges.is_empty() {
207            let last_range_index = self.vregs[vreg.index()].ranges.last().unwrap().index;
208            let last_range = self.ranges[last_range_index.index()].range;
209            if self.func.allow_multiple_vreg_defs() {
210                if last_range.contains(&range) {
211                    // Special case (may occur when multiple defs of pinned
212                    // physical regs occur): if this new range overlaps the
213                    // existing range, return it.
214                    return last_range_index;
215                }
216                // If this range's end falls in the middle of the last
217                // range, truncate it to be contiguous so we can merge
218                // below.
219                if range.to >= last_range.from && range.to <= last_range.to {
220                    range.to = last_range.from;
221                }
222            }
223            debug_assert!(
224                range.to <= last_range.from,
225                "range {:?}, last_range {:?}",
226                range,
227                last_range
228            );
229        }
230
231        if self.vregs[vreg.index()].ranges.is_empty()
232            || range.to
233                < self.ranges[self.vregs[vreg.index()]
234                    .ranges
235                    .last()
236                    .unwrap()
237                    .index
238                    .index()]
239                .range
240                .from
241        {
242            // Is not contiguous with previously-added (immediately
243            // following) range; create a new range.
244            let lr = self.create_liverange(range);
245            self.ranges[lr.index()].vreg = vreg;
246            self.vregs[vreg.index()]
247                .ranges
248                .push(LiveRangeListEntry { range, index: lr });
249            lr
250        } else {
251            // Is contiguous with previously-added range; just extend
252            // its range and return it.
253            let lr = self.vregs[vreg.index()].ranges.last().unwrap().index;
254            debug_assert!(range.to == self.ranges[lr.index()].range.from);
255            self.ranges[lr.index()].range.from = range.from;
256            lr
257        }
258    }
259
260    pub fn insert_use_into_liverange(&mut self, into: LiveRangeIndex, mut u: Use) {
261        let operand = u.operand;
262        let constraint = operand.constraint();
263        let block = self.cfginfo.insn_block[u.pos.inst().index()];
264        let loop_depth = self.cfginfo.approx_loop_depth[block.index()] as usize;
265        let weight = spill_weight_from_constraint(
266            constraint,
267            loop_depth,
268            operand.kind() != OperandKind::Use,
269        );
270        u.weight = weight.to_bits();
271
272        trace!(
273            "insert use {:?} into lr {:?} with weight {:?}",
274            u,
275            into,
276            weight,
277        );
278
279        // N.B.: we do *not* update `requirement` on the range,
280        // because those will be computed during the multi-fixed-reg
281        // fixup pass later (after all uses are inserted).
282
283        self.ranges[into.index()].uses.push(u);
284
285        // Update stats.
286        let range_weight = self.ranges[into.index()].uses_spill_weight() + weight;
287        self.ranges[into.index()].set_uses_spill_weight(range_weight);
288        trace!(
289            "  -> now range has weight {:?}",
290            self.ranges[into.index()].uses_spill_weight(),
291        );
292    }
293
294    pub fn find_vreg_liverange_for_pos(
295        &self,
296        vreg: VRegIndex,
297        pos: ProgPoint,
298    ) -> Option<LiveRangeIndex> {
299        for entry in &self.vregs[vreg.index()].ranges {
300            if entry.range.contains_point(pos) {
301                return Some(entry.index);
302            }
303        }
304        None
305    }
306
307    pub fn add_liverange_to_preg(&mut self, range: CodeRange, reg: PReg) {
308        trace!("adding liverange to preg: {:?} to {}", range, reg);
309        let preg_idx = PRegIndex::new(reg.index());
310        self.pregs[preg_idx.index()]
311            .allocations
312            .btree
313            .insert(LiveRangeKey::from_range(&range), LiveRangeIndex::invalid());
314    }
315
316    pub fn is_live_in(&mut self, block: Block, vreg: VRegIndex) -> bool {
317        self.liveins[block.index()].get(vreg.index())
318    }
319
320    pub fn compute_liveness(&mut self) -> Result<(), RegAllocError> {
321        // Create initial LiveIn and LiveOut bitsets.
322        for _ in 0..self.func.num_blocks() {
323            self.liveins.push(IndexSet::new());
324            self.liveouts.push(IndexSet::new());
325        }
326
327        // Run a worklist algorithm to precisely compute liveins and
328        // liveouts.
329        let mut workqueue = VecDeque::new();
330        let mut workqueue_set = FxHashSet::default();
331        // Initialize workqueue with postorder traversal.
332        for &block in &self.cfginfo.postorder[..] {
333            workqueue.push_back(block);
334            workqueue_set.insert(block);
335        }
336
337        while !workqueue.is_empty() {
338            let block = workqueue.pop_front().unwrap();
339            workqueue_set.remove(&block);
340            let insns = self.func.block_insns(block);
341
342            trace!("computing liveins for block{}", block.index());
343
344            self.stats.livein_iterations += 1;
345
346            let mut live = self.liveouts[block.index()].clone();
347            trace!(" -> initial liveout set: {:?}", live);
348
349            // Include outgoing blockparams in the initial live set.
350            if self.func.is_branch(insns.last()) {
351                for i in 0..self.func.block_succs(block).len() {
352                    for &param in self.func.branch_blockparams(block, insns.last(), i) {
353                        live.set(param.vreg(), true);
354                        self.observe_vreg_class(param);
355                    }
356                }
357            }
358
359            for inst in insns.rev().iter() {
360                if let Some((src, dst)) = self.func.is_move(inst) {
361                    live.set(dst.vreg().vreg(), false);
362                    live.set(src.vreg().vreg(), true);
363                    self.observe_vreg_class(src.vreg());
364                    self.observe_vreg_class(dst.vreg());
365                }
366
367                for pos in &[OperandPos::Late, OperandPos::Early] {
368                    for op in self.func.inst_operands(inst) {
369                        if op.as_fixed_nonallocatable().is_some() {
370                            continue;
371                        }
372                        if op.pos() == *pos {
373                            let was_live = live.get(op.vreg().vreg());
374                            trace!("op {:?} was_live = {}", op, was_live);
375                            match op.kind() {
376                                OperandKind::Use => {
377                                    live.set(op.vreg().vreg(), true);
378                                }
379                                OperandKind::Def => {
380                                    live.set(op.vreg().vreg(), false);
381                                }
382                            }
383                            self.observe_vreg_class(op.vreg());
384                        }
385                    }
386                }
387            }
388            for &blockparam in self.func.block_params(block) {
389                live.set(blockparam.vreg(), false);
390                self.observe_vreg_class(blockparam);
391            }
392
393            for &pred in self.func.block_preds(block) {
394                if self.liveouts[pred.index()].union_with(&live) {
395                    if !workqueue_set.contains(&pred) {
396                        workqueue_set.insert(pred);
397                        workqueue.push_back(pred);
398                    }
399                }
400            }
401
402            trace!("computed liveins at block{}: {:?}", block.index(), live);
403            self.liveins[block.index()] = live;
404        }
405
406        // Check that there are no liveins to the entry block.
407        if !self.liveins[self.func.entry_block().index()].is_empty() {
408            trace!(
409                "non-empty liveins to entry block: {:?}",
410                self.liveins[self.func.entry_block().index()]
411            );
412            return Err(RegAllocError::EntryLivein);
413        }
414
415        Ok(())
416    }
417
418    pub fn build_liveranges(&mut self) {
419        for &vreg in self.func.reftype_vregs() {
420            self.safepoints_per_vreg.insert(vreg.vreg(), HashSet::new());
421        }
422
423        // Create Uses and Defs referring to VRegs, and place the Uses
424        // in LiveRanges.
425        //
426        // We already computed precise liveouts and liveins for every
427        // block above, so we don't need to run an iterative algorithm
428        // here; instead, every block's computation is purely local,
429        // from end to start.
430
431        // Track current LiveRange for each vreg.
432        //
433        // Invariant: a stale range may be present here; ranges are
434        // only valid if `live.get(vreg)` is true.
435        let mut vreg_ranges: Vec<LiveRangeIndex> =
436            vec![LiveRangeIndex::invalid(); self.func.num_vregs()];
437
438        for i in (0..self.func.num_blocks()).rev() {
439            let block = Block::new(i);
440            let insns = self.func.block_insns(block);
441
442            self.stats.livein_blocks += 1;
443
444            // Init our local live-in set.
445            let mut live = self.liveouts[block.index()].clone();
446
447            // If the last instruction is a branch (rather than
448            // return), create blockparam_out entries.
449            if self.func.is_branch(insns.last()) {
450                for (i, &succ) in self.func.block_succs(block).iter().enumerate() {
451                    let blockparams_in = self.func.block_params(succ);
452                    let blockparams_out = self.func.branch_blockparams(block, insns.last(), i);
453                    for (&blockparam_in, &blockparam_out) in
454                        blockparams_in.iter().zip(blockparams_out)
455                    {
456                        let blockparam_out = VRegIndex::new(blockparam_out.vreg());
457                        let blockparam_in = VRegIndex::new(blockparam_in.vreg());
458                        self.blockparam_outs.push(BlockparamOut {
459                            to_vreg: blockparam_in,
460                            to_block: succ,
461                            from_block: block,
462                            from_vreg: blockparam_out,
463                        });
464
465                        // Include outgoing blockparams in the initial live set.
466                        live.set(blockparam_out.index(), true);
467                    }
468                }
469            }
470
471            // Initially, registers are assumed live for the whole block.
472            for vreg in live.iter() {
473                let range = CodeRange {
474                    from: self.cfginfo.block_entry[block.index()],
475                    to: self.cfginfo.block_exit[block.index()].next(),
476                };
477                trace!(
478                    "vreg {:?} live at end of block --> create range {:?}",
479                    VRegIndex::new(vreg),
480                    range
481                );
482                let lr = self.add_liverange_to_vreg(VRegIndex::new(vreg), range);
483                vreg_ranges[vreg] = lr;
484            }
485
486            // Create vreg data for blockparams.
487            for &param in self.func.block_params(block) {
488                self.vregs[param.vreg()].blockparam = block;
489            }
490
491            // For each instruction, in reverse order, process
492            // operands and clobbers.
493            for inst in insns.rev().iter() {
494                // Mark clobbers with CodeRanges on PRegs.
495                for clobber in self.func.inst_clobbers(inst) {
496                    // Clobber range is at After point only: an
497                    // instruction can still take an input in a reg
498                    // that it later clobbers. (In other words, the
499                    // clobber is like a normal def that never gets
500                    // used.)
501                    let range = CodeRange {
502                        from: ProgPoint::after(inst),
503                        to: ProgPoint::before(inst.next()),
504                    };
505                    self.add_liverange_to_preg(range, clobber);
506                }
507
508                // Does the instruction have any input-reusing
509                // outputs? This is important below to establish
510                // proper interference wrt other inputs. We note the
511                // *vreg* that is reused, not the index.
512                let mut reused_input = None;
513                for op in self.func.inst_operands(inst) {
514                    if let OperandConstraint::Reuse(i) = op.constraint() {
515                        debug_assert!(self.func.inst_operands(inst)[i]
516                            .as_fixed_nonallocatable()
517                            .is_none());
518                        reused_input = Some(self.func.inst_operands(inst)[i].vreg());
519                        break;
520                    }
521                }
522
523                // If this is a move, handle specially.
524                if let Some((src, dst)) = self.func.is_move(inst) {
525                    // We can completely skip the move if it is
526                    // trivial (vreg to same vreg).
527                    if src.vreg() != dst.vreg() {
528                        trace!(" -> move inst{}: src {} -> dst {}", inst.index(), src, dst);
529
530                        debug_assert_eq!(src.class(), dst.class());
531                        debug_assert_eq!(src.kind(), OperandKind::Use);
532                        debug_assert_eq!(src.pos(), OperandPos::Early);
533                        debug_assert_eq!(dst.kind(), OperandKind::Def);
534                        debug_assert_eq!(dst.pos(), OperandPos::Late);
535
536                        // Redefine src and dst operands to have
537                        // positions of After and Before respectively
538                        // (see note below), and to have Any
539                        // constraints if they were originally Reg.
540                        let src_constraint = match src.constraint() {
541                            OperandConstraint::Reg => OperandConstraint::Any,
542                            x => x,
543                        };
544                        let dst_constraint = match dst.constraint() {
545                            OperandConstraint::Reg => OperandConstraint::Any,
546                            x => x,
547                        };
548                        let src = Operand::new(
549                            src.vreg(),
550                            src_constraint,
551                            OperandKind::Use,
552                            OperandPos::Late,
553                        );
554                        let dst = Operand::new(
555                            dst.vreg(),
556                            dst_constraint,
557                            OperandKind::Def,
558                            OperandPos::Early,
559                        );
560
561                        if self.annotations_enabled {
562                            self.annotate(
563                                ProgPoint::after(inst),
564                                format!(
565                                    " prog-move v{} ({:?}) -> v{} ({:?})",
566                                    src.vreg().vreg(),
567                                    src_constraint,
568                                    dst.vreg().vreg(),
569                                    dst_constraint,
570                                ),
571                            );
572                        }
573
574                        // N.B.: in order to integrate with the move
575                        // resolution that joins LRs in general, we
576                        // conceptually treat the move as happening
577                        // between the move inst's After and the next
578                        // inst's Before. Thus the src LR goes up to
579                        // (exclusive) next-inst-pre, and the dst LR
580                        // starts at next-inst-pre. We have to take
581                        // care in our move insertion to handle this
582                        // like other inter-inst moves, i.e., at
583                        // `Regular` priority, so it properly happens
584                        // in parallel with other inter-LR moves.
585                        //
586                        // Why the progpoint between move and next
587                        // inst, and not the progpoint between prev
588                        // inst and move? Because a move can be the
589                        // first inst in a block, but cannot be the
590                        // last; so the following progpoint is always
591                        // within the same block, while the previous
592                        // one may be an inter-block point (and the
593                        // After of the prev inst in a different
594                        // block).
595
596                        // Handle the def w.r.t. liveranges: trim the
597                        // start of the range and mark it dead at this
598                        // point in our backward scan.
599                        let pos = ProgPoint::before(inst.next());
600                        let mut dst_lr = vreg_ranges[dst.vreg().vreg()];
601                        if !live.get(dst.vreg().vreg()) {
602                            let from = pos;
603                            let to = pos.next();
604                            dst_lr = self.add_liverange_to_vreg(
605                                VRegIndex::new(dst.vreg().vreg()),
606                                CodeRange { from, to },
607                            );
608                            trace!(" -> invalid LR for def; created {:?}", dst_lr);
609                        }
610                        trace!(" -> has existing LR {:?}", dst_lr);
611                        // Trim the LR to start here.
612                        if self.ranges[dst_lr.index()].range.from
613                            == self.cfginfo.block_entry[block.index()]
614                        {
615                            trace!(" -> started at block start; trimming to {:?}", pos);
616                            self.ranges[dst_lr.index()].range.from = pos;
617                        }
618                        self.ranges[dst_lr.index()].set_flag(LiveRangeFlag::StartsAtDef);
619                        live.set(dst.vreg().vreg(), false);
620                        vreg_ranges[dst.vreg().vreg()] = LiveRangeIndex::invalid();
621
622                        // Handle the use w.r.t. liveranges: make it live
623                        // and create an initial LR back to the start of
624                        // the block.
625                        let pos = ProgPoint::after(inst);
626                        let src_lr = if !live.get(src.vreg().vreg()) {
627                            let range = CodeRange {
628                                from: self.cfginfo.block_entry[block.index()],
629                                to: pos.next(),
630                            };
631                            let src_lr = self
632                                .add_liverange_to_vreg(VRegIndex::new(src.vreg().vreg()), range);
633                            vreg_ranges[src.vreg().vreg()] = src_lr;
634                            src_lr
635                        } else {
636                            vreg_ranges[src.vreg().vreg()]
637                        };
638
639                        trace!(" -> src LR {:?}", src_lr);
640
641                        // Add to live-set.
642                        let src_is_dead_after_move = !live.get(src.vreg().vreg());
643                        live.set(src.vreg().vreg(), true);
644
645                        // Add to program-moves lists.
646                        self.prog_move_srcs.push((
647                            (VRegIndex::new(src.vreg().vreg()), inst),
648                            Allocation::none(),
649                        ));
650                        self.prog_move_dsts.push((
651                            (VRegIndex::new(dst.vreg().vreg()), inst.next()),
652                            Allocation::none(),
653                        ));
654                        self.stats.prog_moves += 1;
655                        if src_is_dead_after_move {
656                            self.stats.prog_moves_dead_src += 1;
657                            self.prog_move_merges.push((src_lr, dst_lr));
658                        }
659                    }
660
661                    continue;
662                }
663
664                // Preprocess defs and uses. Specifically, if there
665                // are any fixed-reg-constrained defs at Late position
666                // and fixed-reg-constrained uses at Early position
667                // with the same preg, we need to (i) add a fixup move
668                // for the use, (ii) rewrite the use to have an Any
669                // constraint, and (ii) move the def to Early position
670                // to reserve the register for the whole instruction.
671                let mut operand_rewrites: FxHashMap<usize, Operand> = FxHashMap::default();
672                let mut late_def_fixed: SmallVec<[PReg; 8]> = smallvec![];
673                for &operand in self.func.inst_operands(inst) {
674                    if let OperandConstraint::FixedReg(preg) = operand.constraint() {
675                        match operand.pos() {
676                            OperandPos::Late => {
677                                // See note in fuzzing/func.rs: we
678                                // can't allow this, because there
679                                // would be no way to move a value
680                                // into place for a late use *after*
681                                // the early point (i.e. in the middle
682                                // of the instruction).
683                                assert!(
684                                    operand.kind() == OperandKind::Def,
685                                    "Invalid operand: fixed constraint on Use/Mod at Late point"
686                                );
687
688                                late_def_fixed.push(preg);
689                            }
690                            _ => {}
691                        }
692                    }
693                }
694                for (i, &operand) in self.func.inst_operands(inst).iter().enumerate() {
695                    if operand.as_fixed_nonallocatable().is_some() {
696                        continue;
697                    }
698                    if let OperandConstraint::FixedReg(preg) = operand.constraint() {
699                        match operand.pos() {
700                            OperandPos::Early if live.get(operand.vreg().vreg()) => {
701                                assert!(operand.kind() == OperandKind::Use,
702                                            "Invalid operand: fixed constraint on Def/Mod at Early position");
703
704                                // If we have a constraint at the
705                                // Early point for a fixed preg, and
706                                // this preg is also constrained with
707                                // a *separate* def at Late or is
708                                // clobbered, and *if* the vreg is
709                                // live downward, we have to use the
710                                // multi-fixed-reg mechanism for a
711                                // fixup and rewrite here without the
712                                // constraint. See #53.
713                                //
714                                // We adjust the def liverange and Use
715                                // to an "early" position to reserve
716                                // the register, it still must not be
717                                // used by some other vreg at the
718                                // use-site.
719                                //
720                                // Note that we handle multiple
721                                // conflicting constraints for the
722                                // same vreg in a separate pass (see
723                                // `fixup_multi_fixed_vregs` below).
724                                if late_def_fixed.contains(&preg)
725                                    || self.func.inst_clobbers(inst).contains(preg)
726                                {
727                                    log::trace!(
728                                        concat!(
729                                            "-> operand {:?} is fixed to preg {:?}, ",
730                                            "is downward live, and there is also a ",
731                                            "def or clobber at this preg"
732                                        ),
733                                        operand,
734                                        preg
735                                    );
736                                    let pos = ProgPoint::before(inst);
737                                    self.multi_fixed_reg_fixups.push(MultiFixedRegFixup {
738                                        pos,
739                                        from_slot: i as u8,
740                                        to_slot: i as u8,
741                                        to_preg: PRegIndex::new(preg.index()),
742                                        vreg: VRegIndex::new(operand.vreg().vreg()),
743                                        level: FixedRegFixupLevel::Initial,
744                                    });
745
746                                    // We need to insert a reservation
747                                    // at the before-point to reserve
748                                    // the reg for the use too.
749                                    let range = CodeRange::singleton(pos);
750                                    self.add_liverange_to_preg(range, preg);
751
752                                    // Remove the fixed-preg
753                                    // constraint from the Use.
754                                    operand_rewrites.insert(
755                                        i,
756                                        Operand::new(
757                                            operand.vreg(),
758                                            OperandConstraint::Any,
759                                            operand.kind(),
760                                            operand.pos(),
761                                        ),
762                                    );
763                                }
764                            }
765                            _ => {}
766                        }
767                    }
768                }
769
770                // Process defs and uses.
771                for &cur_pos in &[InstPosition::After, InstPosition::Before] {
772                    for i in 0..self.func.inst_operands(inst).len() {
773                        // don't borrow `self`
774                        let operand = operand_rewrites
775                            .get(&i)
776                            .cloned()
777                            .unwrap_or(self.func.inst_operands(inst)[i]);
778                        let pos = match (operand.kind(), operand.pos()) {
779                            (OperandKind::Def, OperandPos::Early) => ProgPoint::before(inst),
780                            (OperandKind::Def, OperandPos::Late) => ProgPoint::after(inst),
781                            (OperandKind::Use, OperandPos::Late) => ProgPoint::after(inst),
782                            // If there are any reused inputs in this
783                            // instruction, and this is *not* the
784                            // reused vreg, force `pos` to
785                            // `After`. This ensures that we correctly
786                            // account for the interference between
787                            // the other inputs and the
788                            // input-that-is-reused/output.
789                            (OperandKind::Use, OperandPos::Early)
790                                if reused_input.is_some()
791                                    && reused_input.unwrap() != operand.vreg() =>
792                            {
793                                ProgPoint::after(inst)
794                            }
795                            (OperandKind::Use, OperandPos::Early) => ProgPoint::before(inst),
796                        };
797
798                        if pos.pos() != cur_pos {
799                            continue;
800                        }
801
802                        trace!(
803                            "processing inst{} operand at {:?}: {:?}",
804                            inst.index(),
805                            pos,
806                            operand
807                        );
808
809                        // If this is a "fixed non-allocatable
810                        // register" operand, set the alloc
811                        // immediately and then ignore the operand
812                        // hereafter.
813                        if let Some(preg) = operand.as_fixed_nonallocatable() {
814                            self.set_alloc(inst, i, Allocation::reg(preg));
815                            continue;
816                        }
817
818                        match operand.kind() {
819                            OperandKind::Def => {
820                                trace!("Def of {} at {:?}", operand.vreg(), pos);
821
822                                // Get or create the LiveRange.
823                                let mut lr = vreg_ranges[operand.vreg().vreg()];
824                                trace!(" -> has existing LR {:?}", lr);
825                                // If there was no liverange (dead def), create a trivial one.
826                                if !live.get(operand.vreg().vreg()) {
827                                    let from = pos;
828                                    // We want to we want to span
829                                    // until Before of the next
830                                    // inst. This ensures that early
831                                    // defs used for temps on an
832                                    // instruction are reserved across
833                                    // the whole instruction.
834                                    let to = ProgPoint::before(pos.inst().next());
835                                    lr = self.add_liverange_to_vreg(
836                                        VRegIndex::new(operand.vreg().vreg()),
837                                        CodeRange { from, to },
838                                    );
839                                    trace!(" -> invalid; created {:?}", lr);
840                                    vreg_ranges[operand.vreg().vreg()] = lr;
841                                    live.set(operand.vreg().vreg(), true);
842                                }
843                                // Create the use in the LiveRange.
844                                self.insert_use_into_liverange(lr, Use::new(operand, pos, i as u8));
845                                // If def (not mod), this reg is now dead,
846                                // scanning backward; make it so.
847                                if operand.kind() == OperandKind::Def {
848                                    // Trim the range for this vreg to start
849                                    // at `pos` if it previously ended at the
850                                    // start of this block (i.e. was not
851                                    // merged into some larger LiveRange due
852                                    // to out-of-order blocks).
853                                    if self.ranges[lr.index()].range.from
854                                        == self.cfginfo.block_entry[block.index()]
855                                    {
856                                        trace!(" -> started at block start; trimming to {:?}", pos);
857                                        self.ranges[lr.index()].range.from = pos;
858                                    }
859
860                                    self.ranges[lr.index()].set_flag(LiveRangeFlag::StartsAtDef);
861
862                                    // Remove from live-set.
863                                    live.set(operand.vreg().vreg(), false);
864                                    vreg_ranges[operand.vreg().vreg()] = LiveRangeIndex::invalid();
865                                }
866                            }
867                            OperandKind::Use => {
868                                // Create/extend the LiveRange if it
869                                // doesn't already exist, and add the use
870                                // to the range.
871                                let mut lr = vreg_ranges[operand.vreg().vreg()];
872                                if !live.get(operand.vreg().vreg()) {
873                                    let range = CodeRange {
874                                        from: self.cfginfo.block_entry[block.index()],
875                                        to: pos.next(),
876                                    };
877                                    lr = self.add_liverange_to_vreg(
878                                        VRegIndex::new(operand.vreg().vreg()),
879                                        range,
880                                    );
881                                    vreg_ranges[operand.vreg().vreg()] = lr;
882                                }
883                                debug_assert!(lr.is_valid());
884
885                                trace!("Use of {:?} at {:?} -> {:?}", operand, pos, lr,);
886
887                                self.insert_use_into_liverange(lr, Use::new(operand, pos, i as u8));
888
889                                // Add to live-set.
890                                live.set(operand.vreg().vreg(), true);
891                            }
892                        }
893                    }
894                }
895
896                if self.func.requires_refs_on_stack(inst) {
897                    trace!("inst{} is safepoint", inst.index());
898                    self.safepoints.push(inst);
899                    for vreg in live.iter() {
900                        if let Some(safepoints) = self.safepoints_per_vreg.get_mut(&vreg) {
901                            trace!("vreg v{} live at safepoint inst{}", vreg, inst.index());
902                            safepoints.insert(inst);
903                        }
904                    }
905                }
906            }
907
908            // Block parameters define vregs at the very beginning of
909            // the block. Remove their live vregs from the live set
910            // here.
911            for vreg in self.func.block_params(block) {
912                if live.get(vreg.vreg()) {
913                    live.set(vreg.vreg(), false);
914                } else {
915                    // Create trivial liverange if blockparam is dead.
916                    let start = self.cfginfo.block_entry[block.index()];
917                    self.add_liverange_to_vreg(
918                        VRegIndex::new(vreg.vreg()),
919                        CodeRange {
920                            from: start,
921                            to: start.next(),
922                        },
923                    );
924                }
925                // add `blockparam_ins` entries.
926                let vreg_idx = VRegIndex::new(vreg.vreg());
927                for &pred in self.func.block_preds(block) {
928                    self.blockparam_ins.push(BlockparamIn {
929                        to_vreg: vreg_idx,
930                        to_block: block,
931                        from_block: pred,
932                    });
933                }
934            }
935        }
936
937        self.safepoints.sort_unstable();
938
939        // Make ranges in each vreg and uses in each range appear in
940        // sorted order. We built them in reverse order above, so this
941        // is a simple reversal, *not* a full sort.
942        //
943        // The ordering invariant is always maintained for uses and
944        // always for ranges in bundles (which are initialized later),
945        // but not always for ranges in vregs; those are sorted only
946        // when needed, here and then again at the end of allocation
947        // when resolving moves.
948
949        for vreg in &mut self.vregs {
950            vreg.ranges.reverse();
951            let mut last = None;
952            for entry in &mut vreg.ranges {
953                // Ranges may have been truncated above at defs. We
954                // need to update with the final range here.
955                entry.range = self.ranges[entry.index.index()].range;
956                // Assert in-order and non-overlapping.
957                debug_assert!(last.is_none() || last.unwrap() <= entry.range.from);
958                last = Some(entry.range.to);
959            }
960        }
961
962        for range in 0..self.ranges.len() {
963            self.ranges[range].uses.reverse();
964            debug_assert!(self.ranges[range]
965                .uses
966                .windows(2)
967                .all(|win| win[0].pos <= win[1].pos));
968        }
969
970        // Insert safepoint virtual stack uses, if needed.
971        for &vreg in self.func.reftype_vregs() {
972            let vreg = VRegIndex::new(vreg.vreg());
973            let mut inserted = false;
974            let mut safepoint_idx = 0;
975            for range_idx in 0..self.vregs[vreg.index()].ranges.len() {
976                let LiveRangeListEntry { range, index } =
977                    self.vregs[vreg.index()].ranges[range_idx];
978                while safepoint_idx < self.safepoints.len()
979                    && ProgPoint::before(self.safepoints[safepoint_idx]) < range.from
980                {
981                    safepoint_idx += 1;
982                }
983                while safepoint_idx < self.safepoints.len()
984                    && range.contains_point(ProgPoint::before(self.safepoints[safepoint_idx]))
985                {
986                    // Create a virtual use.
987                    let pos = ProgPoint::before(self.safepoints[safepoint_idx]);
988                    let operand = Operand::new(
989                        self.vreg(vreg),
990                        OperandConstraint::Stack,
991                        OperandKind::Use,
992                        OperandPos::Early,
993                    );
994
995                    trace!(
996                        "Safepoint-induced stack use of {:?} at {:?} -> {:?}",
997                        operand,
998                        pos,
999                        index,
1000                    );
1001
1002                    self.insert_use_into_liverange(index, Use::new(operand, pos, SLOT_NONE));
1003                    safepoint_idx += 1;
1004
1005                    inserted = true;
1006                }
1007
1008                if inserted {
1009                    self.ranges[index.index()]
1010                        .uses
1011                        .sort_unstable_by_key(|u| u.pos);
1012                }
1013
1014                if safepoint_idx >= self.safepoints.len() {
1015                    break;
1016                }
1017            }
1018        }
1019
1020        self.blockparam_ins.sort_unstable_by_key(|x| x.key());
1021        self.blockparam_outs.sort_unstable_by_key(|x| x.key());
1022        self.prog_move_srcs.sort_unstable_by_key(|(pos, _)| *pos);
1023        self.prog_move_dsts.sort_unstable_by_key(|(pos, _)| *pos);
1024
1025        trace!("prog_move_srcs = {:?}", self.prog_move_srcs);
1026        trace!("prog_move_dsts = {:?}", self.prog_move_dsts);
1027
1028        self.stats.initial_liverange_count = self.ranges.len();
1029        self.stats.blockparam_ins_count = self.blockparam_ins.len();
1030        self.stats.blockparam_outs_count = self.blockparam_outs.len();
1031    }
1032
1033    pub fn fixup_multi_fixed_vregs(&mut self) {
1034        // Do a fixed-reg cleanup pass: if there are any LiveRanges with
1035        // multiple uses (or defs) at the same ProgPoint and there is
1036        // more than one FixedReg constraint at that ProgPoint, we
1037        // need to record all but one of them in a special fixup list
1038        // and handle them later; otherwise, bundle-splitting to
1039        // create minimal bundles becomes much more complex (we would
1040        // have to split the multiple uses at the same progpoint into
1041        // different bundles, which breaks invariants related to
1042        // disjoint ranges and bundles).
1043        let mut extra_clobbers: SmallVec<[(PReg, ProgPoint); 8]> = smallvec![];
1044        for vreg in 0..self.vregs.len() {
1045            for range_idx in 0..self.vregs[vreg].ranges.len() {
1046                let entry = self.vregs[vreg].ranges[range_idx];
1047                let range = entry.index;
1048                trace!(
1049                    "multi-fixed-reg cleanup: vreg {:?} range {:?}",
1050                    VRegIndex::new(vreg),
1051                    range,
1052                );
1053
1054                // Find groups of uses that occur in at the same program point.
1055                for uses in self.ranges[range.index()]
1056                    .uses
1057                    .linear_group_by_key_mut(|u| u.pos)
1058                {
1059                    if uses.len() < 2 {
1060                        continue;
1061                    }
1062
1063                    // Search for conflicting constraints in the uses.
1064                    let mut requires_reg = false;
1065                    let mut num_fixed_reg = 0;
1066                    let mut num_fixed_stack = 0;
1067                    let mut first_reg_slot = None;
1068                    let mut first_stack_slot = None;
1069                    for u in uses.iter() {
1070                        match u.operand.constraint() {
1071                            OperandConstraint::Any => {
1072                                first_reg_slot.get_or_insert(u.slot);
1073                                first_stack_slot.get_or_insert(u.slot);
1074                            }
1075                            OperandConstraint::Reg | OperandConstraint::Reuse(_) => {
1076                                first_reg_slot.get_or_insert(u.slot);
1077                                requires_reg = true;
1078                            }
1079                            OperandConstraint::FixedReg(preg) => {
1080                                if self.pregs[preg.index()].is_stack {
1081                                    num_fixed_stack += 1;
1082                                    first_stack_slot.get_or_insert(u.slot);
1083                                } else {
1084                                    requires_reg = true;
1085                                    num_fixed_reg += 1;
1086                                    first_reg_slot.get_or_insert(u.slot);
1087                                }
1088                            }
1089                            // Maybe this could be supported in this future...
1090                            OperandConstraint::Stack => panic!(
1091                                "multiple uses of vreg with a Stack constraint are not supported"
1092                            ),
1093                        }
1094                    }
1095
1096                    // Fast path if there are no conflicts.
1097                    if num_fixed_reg + num_fixed_stack <= 1
1098                        && !(requires_reg && num_fixed_stack != 0)
1099                    {
1100                        continue;
1101                    }
1102
1103                    // We pick one constraint (in order: FixedReg, Reg, FixedStack)
1104                    // and then rewrite any incompatible constraints to Any.
1105                    // This allows register allocation to succeed and we will
1106                    // later insert moves to satisfy the rewritten constraints.
1107                    let source_slot = if requires_reg {
1108                        first_reg_slot.unwrap()
1109                    } else {
1110                        first_stack_slot.unwrap()
1111                    };
1112                    let mut first_preg = None;
1113                    for u in uses.iter_mut() {
1114                        if let OperandConstraint::FixedReg(preg) = u.operand.constraint() {
1115                            let vreg_idx = VRegIndex::new(u.operand.vreg().vreg());
1116                            let preg_idx = PRegIndex::new(preg.index());
1117                            trace!(
1118                                "at pos {:?}, vreg {:?} has fixed constraint to preg {:?}",
1119                                u.pos,
1120                                vreg_idx,
1121                                preg_idx
1122                            );
1123
1124                            // FixedStack is incompatible if there are any
1125                            // Reg/FixedReg constraints. FixedReg is
1126                            // incompatible if there already is a different
1127                            // FixedReg constraint. If either condition is true,
1128                            // we edit the constraint below; otherwise, we can
1129                            // skip this edit.
1130                            if !(requires_reg && self.pregs[preg.index()].is_stack)
1131                                && *first_preg.get_or_insert(preg) == preg
1132                            {
1133                                continue;
1134                            }
1135
1136                            trace!(" -> duplicate; switching to constraint Any");
1137                            self.multi_fixed_reg_fixups.push(MultiFixedRegFixup {
1138                                pos: u.pos,
1139                                from_slot: source_slot,
1140                                to_slot: u.slot,
1141                                to_preg: preg_idx,
1142                                vreg: vreg_idx,
1143                                level: FixedRegFixupLevel::Secondary,
1144                            });
1145                            u.operand = Operand::new(
1146                                u.operand.vreg(),
1147                                OperandConstraint::Any,
1148                                u.operand.kind(),
1149                                u.operand.pos(),
1150                            );
1151                            trace!(" -> extra clobber {} at inst{}", preg, u.pos.inst().index());
1152                            extra_clobbers.push((preg, u.pos));
1153                        }
1154                    }
1155                }
1156
1157                for &(clobber, pos) in &extra_clobbers {
1158                    let range = CodeRange {
1159                        from: pos,
1160                        to: pos.next(),
1161                    };
1162                    self.add_liverange_to_preg(range, clobber);
1163                }
1164
1165                extra_clobbers.clear();
1166            }
1167        }
1168    }
1169}