cranelift_codegen/
simple_gvn.rs

1//! A simple GVN pass.
2
3use crate::cursor::{Cursor, FuncCursor};
4use crate::dominator_tree::DominatorTree;
5use crate::ir::{Function, Inst, InstructionData, Opcode, Type};
6use crate::scoped_hash_map::ScopedHashMap;
7use crate::timing;
8use alloc::vec::Vec;
9use core::cell::{Ref, RefCell};
10use core::hash::{Hash, Hasher};
11
12/// Test whether the given opcode is unsafe to even consider for GVN.
13fn trivially_unsafe_for_gvn(opcode: Opcode) -> bool {
14    opcode.is_call()
15        || opcode.is_branch()
16        || opcode.is_terminator()
17        || opcode.is_return()
18        || opcode.can_store()
19        || (opcode.can_trap() && !opcode.side_effects_idempotent())
20        || (opcode.other_side_effects() && !opcode.side_effects_idempotent())
21}
22
23/// Test that, if the specified instruction is a load, it doesn't have the `readonly` memflag.
24fn is_load_and_not_readonly(inst_data: &InstructionData) -> bool {
25    match *inst_data {
26        InstructionData::Load { flags, .. } => !flags.readonly(),
27        _ => inst_data.opcode().can_load(),
28    }
29}
30
31/// Wrapper around `InstructionData` which implements `Eq` and `Hash`
32#[derive(Clone)]
33struct HashKey<'a, 'f: 'a> {
34    inst: InstructionData,
35    ty: Type,
36    pos: &'a RefCell<FuncCursor<'f>>,
37}
38impl<'a, 'f: 'a> Hash for HashKey<'a, 'f> {
39    fn hash<H: Hasher>(&self, state: &mut H) {
40        let pool = &self.pos.borrow().func.dfg.value_lists;
41        self.inst.hash(state, pool, |value| value);
42        self.ty.hash(state);
43    }
44}
45impl<'a, 'f: 'a> PartialEq for HashKey<'a, 'f> {
46    fn eq(&self, other: &Self) -> bool {
47        let pool = &self.pos.borrow().func.dfg.value_lists;
48        self.inst.eq(&other.inst, pool, |value| value) && self.ty == other.ty
49    }
50}
51impl<'a, 'f: 'a> Eq for HashKey<'a, 'f> {}
52
53/// Perform simple GVN on `func`.
54///
55pub fn do_simple_gvn(func: &mut Function, domtree: &mut DominatorTree) {
56    let _tt = timing::gvn();
57    debug_assert!(domtree.is_valid());
58
59    // Visit blocks in a reverse post-order.
60    //
61    // The RefCell here is a bit ugly since the HashKeys in the ScopedHashMap
62    // need a reference to the function.
63    let pos = RefCell::new(FuncCursor::new(func));
64
65    let mut visible_values: ScopedHashMap<HashKey, Inst> = ScopedHashMap::new();
66    let mut scope_stack: Vec<Inst> = Vec::new();
67
68    for &block in domtree.cfg_postorder().iter().rev() {
69        {
70            // Pop any scopes that we just exited.
71            let layout = &pos.borrow().func.layout;
72            loop {
73                if let Some(current) = scope_stack.last() {
74                    if domtree.dominates(*current, block, layout) {
75                        break;
76                    }
77                } else {
78                    break;
79                }
80                scope_stack.pop();
81                visible_values.decrement_depth();
82            }
83
84            // Push a scope for the current block.
85            scope_stack.push(layout.first_inst(block).unwrap());
86            visible_values.increment_depth();
87        }
88
89        pos.borrow_mut().goto_top(block);
90        while let Some(inst) = {
91            let mut pos = pos.borrow_mut();
92            pos.next_inst()
93        } {
94            // Resolve aliases, particularly aliases we created earlier.
95            pos.borrow_mut().func.dfg.resolve_aliases_in_arguments(inst);
96
97            let func = Ref::map(pos.borrow(), |pos| &pos.func);
98
99            let opcode = func.dfg.insts[inst].opcode();
100
101            if opcode.is_branch() && !opcode.is_terminator() {
102                scope_stack.push(func.layout.next_inst(inst).unwrap());
103                visible_values.increment_depth();
104            }
105
106            if trivially_unsafe_for_gvn(opcode) {
107                continue;
108            }
109
110            // These are split up to separate concerns.
111            if is_load_and_not_readonly(&func.dfg.insts[inst]) {
112                continue;
113            }
114
115            let ctrl_typevar = func.dfg.ctrl_typevar(inst);
116            let key = HashKey {
117                inst: func.dfg.insts[inst],
118                ty: ctrl_typevar,
119                pos: &pos,
120            };
121            use crate::scoped_hash_map::Entry::*;
122            match visible_values.entry(key) {
123                Occupied(entry) => {
124                    #[allow(clippy::debug_assert_with_mut_call)]
125                    {
126                        // Clippy incorrectly believes `&func.layout` should not be used here:
127                        // https://github.com/rust-lang/rust-clippy/issues/4737
128                        debug_assert!(domtree.dominates(*entry.get(), inst, &func.layout));
129                    }
130
131                    // If the redundant instruction is representing the current
132                    // scope, pick a new representative.
133                    let old = scope_stack.last_mut().unwrap();
134                    if *old == inst {
135                        *old = func.layout.next_inst(inst).unwrap();
136                    }
137                    // Replace the redundant instruction and remove it.
138                    drop(func);
139                    let mut pos = pos.borrow_mut();
140                    pos.func.dfg.replace_with_aliases(inst, *entry.get());
141                    pos.remove_inst_and_step_back();
142                }
143                Vacant(entry) => {
144                    entry.insert(inst);
145                }
146            }
147        }
148    }
149}