cranelift_codegen/machinst/
isle.rs

1use crate::ir::{BlockCall, Value, ValueList};
2use alloc::boxed::Box;
3use alloc::vec::Vec;
4use smallvec::SmallVec;
5use std::cell::Cell;
6
7pub use super::MachLabel;
8use super::RetPair;
9pub use crate::ir::{
10    condcodes, condcodes::CondCode, dynamic_to_fixed, ArgumentExtension, ArgumentPurpose, Constant,
11    DynamicStackSlot, ExternalName, FuncRef, GlobalValue, Immediate, SigRef, StackSlot,
12};
13pub use crate::isa::unwind::UnwindInst;
14pub use crate::isa::TargetIsa;
15pub use crate::machinst::{
16    ABIArg, ABIArgSlot, InputSourceInst, Lower, LowerBackend, RealReg, Reg, RelocDistance, Sig,
17    VCodeInst, Writable,
18};
19pub use crate::settings::{OptLevel, TlsModel};
20
21pub type Unit = ();
22pub type ValueSlice = (ValueList, usize);
23pub type ValueArray2 = [Value; 2];
24pub type ValueArray3 = [Value; 3];
25pub type BlockArray2 = [BlockCall; 2];
26pub type WritableReg = Writable<Reg>;
27pub type VecRetPair = Vec<RetPair>;
28pub type VecMask = Vec<u8>;
29pub type ValueRegs = crate::machinst::ValueRegs<Reg>;
30pub type WritableValueRegs = crate::machinst::ValueRegs<WritableReg>;
31pub type InstOutput = SmallVec<[ValueRegs; 2]>;
32pub type InstOutputBuilder = Cell<InstOutput>;
33pub type BoxExternalName = Box<ExternalName>;
34pub type Range = (usize, usize);
35
36pub enum RangeView {
37    Empty,
38    NonEmpty { index: usize, rest: Range },
39}
40
41/// Helper macro to define methods in `prelude.isle` within `impl Context for
42/// ...` for each backend. These methods are shared amongst all backends.
43#[macro_export]
44#[doc(hidden)]
45macro_rules! isle_lower_prelude_methods {
46    () => {
47        isle_common_prelude_methods!();
48
49        #[inline]
50        fn value_type(&mut self, val: Value) -> Type {
51            self.lower_ctx.dfg().value_type(val)
52        }
53
54        #[inline]
55        fn value_reg(&mut self, reg: Reg) -> ValueRegs {
56            ValueRegs::one(reg)
57        }
58
59        #[inline]
60        fn value_regs(&mut self, r1: Reg, r2: Reg) -> ValueRegs {
61            ValueRegs::two(r1, r2)
62        }
63
64        #[inline]
65        fn value_regs_invalid(&mut self) -> ValueRegs {
66            ValueRegs::invalid()
67        }
68
69        #[inline]
70        fn output_none(&mut self) -> InstOutput {
71            smallvec::smallvec![]
72        }
73
74        #[inline]
75        fn output(&mut self, regs: ValueRegs) -> InstOutput {
76            smallvec::smallvec![regs]
77        }
78
79        #[inline]
80        fn output_pair(&mut self, r1: ValueRegs, r2: ValueRegs) -> InstOutput {
81            smallvec::smallvec![r1, r2]
82        }
83
84        #[inline]
85        fn output_builder_new(&mut self) -> InstOutputBuilder {
86            std::cell::Cell::new(InstOutput::new())
87        }
88
89        #[inline]
90        fn output_builder_push(&mut self, builder: &InstOutputBuilder, regs: ValueRegs) -> Unit {
91            let mut vec = builder.take();
92            vec.push(regs);
93            builder.set(vec);
94        }
95
96        #[inline]
97        fn output_builder_finish(&mut self, builder: &InstOutputBuilder) -> InstOutput {
98            builder.take()
99        }
100
101        #[inline]
102        fn temp_writable_reg(&mut self, ty: Type) -> WritableReg {
103            let value_regs = self.lower_ctx.alloc_tmp(ty);
104            value_regs.only_reg().unwrap()
105        }
106
107        #[inline]
108        fn is_valid_reg(&mut self, reg: Reg) -> bool {
109            use crate::machinst::valueregs::InvalidSentinel;
110            !reg.is_invalid_sentinel()
111        }
112
113        #[inline]
114        fn invalid_reg(&mut self) -> Reg {
115            use crate::machinst::valueregs::InvalidSentinel;
116            Reg::invalid_sentinel()
117        }
118
119        #[inline]
120        fn mark_value_used(&mut self, val: Value) {
121            self.lower_ctx.increment_lowered_uses(val);
122        }
123
124        #[inline]
125        fn put_in_reg(&mut self, val: Value) -> Reg {
126            self.put_in_regs(val).only_reg().unwrap()
127        }
128
129        #[inline]
130        fn put_in_regs(&mut self, val: Value) -> ValueRegs {
131            // If the value is a constant, then (re)materialize it at each
132            // use. This lowers register pressure. (Only do this if we are
133            // not using egraph-based compilation; the egraph framework
134            // more efficiently rematerializes constants where needed.)
135            if !(self.backend.flags().use_egraphs()
136                && self.backend.flags().opt_level() != OptLevel::None)
137            {
138                let inputs = self.lower_ctx.get_value_as_source_or_const(val);
139                if inputs.constant.is_some() {
140                    let insn = match inputs.inst {
141                        InputSourceInst::UniqueUse(insn, 0) => Some(insn),
142                        InputSourceInst::Use(insn, 0) => Some(insn),
143                        _ => None,
144                    };
145                    if let Some(insn) = insn {
146                        if let Some(regs) = self.backend.lower(self.lower_ctx, insn) {
147                            assert!(regs.len() == 1);
148                            return regs[0];
149                        }
150                    }
151                }
152            }
153
154            self.lower_ctx.put_value_in_regs(val)
155        }
156
157        #[inline]
158        fn ensure_in_vreg(&mut self, reg: Reg, ty: Type) -> Reg {
159            self.lower_ctx.ensure_in_vreg(reg, ty)
160        }
161
162        #[inline]
163        fn value_regs_get(&mut self, regs: ValueRegs, i: usize) -> Reg {
164            regs.regs()[i]
165        }
166
167        #[inline]
168        fn value_regs_len(&mut self, regs: ValueRegs) -> usize {
169            regs.regs().len()
170        }
171
172        #[inline]
173        fn value_list_slice(&mut self, list: ValueList) -> ValueSlice {
174            (list, 0)
175        }
176
177        #[inline]
178        fn value_slice_empty(&mut self, slice: ValueSlice) -> Option<()> {
179            let (list, off) = slice;
180            if off >= list.len(&self.lower_ctx.dfg().value_lists) {
181                Some(())
182            } else {
183                None
184            }
185        }
186
187        #[inline]
188        fn value_slice_unwrap(&mut self, slice: ValueSlice) -> Option<(Value, ValueSlice)> {
189            let (list, off) = slice;
190            if let Some(val) = list.get(off, &self.lower_ctx.dfg().value_lists) {
191                Some((val, (list, off + 1)))
192            } else {
193                None
194            }
195        }
196
197        #[inline]
198        fn value_slice_len(&mut self, slice: ValueSlice) -> usize {
199            let (list, off) = slice;
200            list.len(&self.lower_ctx.dfg().value_lists) - off
201        }
202
203        #[inline]
204        fn value_slice_get(&mut self, slice: ValueSlice, idx: usize) -> Value {
205            let (list, off) = slice;
206            list.get(off + idx, &self.lower_ctx.dfg().value_lists)
207                .unwrap()
208        }
209
210        #[inline]
211        fn writable_reg_to_reg(&mut self, r: WritableReg) -> Reg {
212            r.to_reg()
213        }
214
215        #[inline]
216        fn inst_results(&mut self, inst: Inst) -> ValueSlice {
217            (self.lower_ctx.dfg().inst_results_list(inst), 0)
218        }
219
220        #[inline]
221        fn first_result(&mut self, inst: Inst) -> Option<Value> {
222            self.lower_ctx.dfg().inst_results(inst).first().copied()
223        }
224
225        #[inline]
226        fn inst_data(&mut self, inst: Inst) -> InstructionData {
227            self.lower_ctx.dfg().insts[inst]
228        }
229
230        #[inline]
231        fn def_inst(&mut self, val: Value) -> Option<Inst> {
232            self.lower_ctx.dfg().value_def(val).inst()
233        }
234
235        fn zero_value(&mut self, value: Value) -> Option<Value> {
236            let insn = self.def_inst(value);
237            if insn.is_some() {
238                let insn = insn.unwrap();
239                let inst_data = self.lower_ctx.data(insn);
240                match inst_data {
241                    InstructionData::Unary {
242                        opcode: Opcode::Splat,
243                        arg,
244                    } => {
245                        let arg = arg.clone();
246                        return self.zero_value(arg);
247                    }
248                    InstructionData::UnaryConst {
249                        opcode: Opcode::Vconst,
250                        constant_handle,
251                    } => {
252                        let constant_data =
253                            self.lower_ctx.get_constant_data(*constant_handle).clone();
254                        if constant_data.into_vec().iter().any(|&x| x != 0) {
255                            return None;
256                        } else {
257                            return Some(value);
258                        }
259                    }
260                    InstructionData::UnaryImm { imm, .. } => {
261                        if imm.bits() == 0 {
262                            return Some(value);
263                        } else {
264                            return None;
265                        }
266                    }
267                    InstructionData::UnaryIeee32 { imm, .. } => {
268                        if imm.bits() == 0 {
269                            return Some(value);
270                        } else {
271                            return None;
272                        }
273                    }
274                    InstructionData::UnaryIeee64 { imm, .. } => {
275                        if imm.bits() == 0 {
276                            return Some(value);
277                        } else {
278                            return None;
279                        }
280                    }
281                    _ => None,
282                }
283            } else {
284                None
285            }
286        }
287
288        #[inline]
289        fn tls_model(&mut self, _: Type) -> TlsModel {
290            self.backend.flags().tls_model()
291        }
292
293        #[inline]
294        fn tls_model_is_elf_gd(&mut self) -> Option<()> {
295            if self.backend.flags().tls_model() == TlsModel::ElfGd {
296                Some(())
297            } else {
298                None
299            }
300        }
301
302        #[inline]
303        fn tls_model_is_macho(&mut self) -> Option<()> {
304            if self.backend.flags().tls_model() == TlsModel::Macho {
305                Some(())
306            } else {
307                None
308            }
309        }
310
311        #[inline]
312        fn tls_model_is_coff(&mut self) -> Option<()> {
313            if self.backend.flags().tls_model() == TlsModel::Coff {
314                Some(())
315            } else {
316                None
317            }
318        }
319
320        #[inline]
321        fn preserve_frame_pointers(&mut self) -> Option<()> {
322            if self.backend.flags().preserve_frame_pointers() {
323                Some(())
324            } else {
325                None
326            }
327        }
328
329        #[inline]
330        fn func_ref_data(&mut self, func_ref: FuncRef) -> (SigRef, ExternalName, RelocDistance) {
331            let funcdata = &self.lower_ctx.dfg().ext_funcs[func_ref];
332            (
333                funcdata.signature,
334                funcdata.name.clone(),
335                funcdata.reloc_distance(),
336            )
337        }
338
339        #[inline]
340        fn box_external_name(&mut self, extname: ExternalName) -> BoxExternalName {
341            Box::new(extname)
342        }
343
344        #[inline]
345        fn symbol_value_data(
346            &mut self,
347            global_value: GlobalValue,
348        ) -> Option<(ExternalName, RelocDistance, i64)> {
349            let (name, reloc, offset) = self.lower_ctx.symbol_value_data(global_value)?;
350            Some((name.clone(), reloc, offset))
351        }
352
353        #[inline]
354        fn reloc_distance_near(&mut self, dist: RelocDistance) -> Option<()> {
355            if dist == RelocDistance::Near {
356                Some(())
357            } else {
358                None
359            }
360        }
361
362        #[inline]
363        fn u128_from_immediate(&mut self, imm: Immediate) -> Option<u128> {
364            let bytes = self.lower_ctx.get_immediate_data(imm).as_slice();
365            Some(u128::from_le_bytes(bytes.try_into().ok()?))
366        }
367
368        #[inline]
369        fn vec_mask_from_immediate(&mut self, imm: Immediate) -> Option<VecMask> {
370            let data = self.lower_ctx.get_immediate_data(imm);
371            if data.len() == 16 {
372                Some(Vec::from(data.as_slice()))
373            } else {
374                None
375            }
376        }
377
378        #[inline]
379        fn u64_from_constant(&mut self, constant: Constant) -> Option<u64> {
380            let bytes = self.lower_ctx.get_constant_data(constant).as_slice();
381            Some(u64::from_le_bytes(bytes.try_into().ok()?))
382        }
383
384        #[inline]
385        fn u128_from_constant(&mut self, constant: Constant) -> Option<u128> {
386            let bytes = self.lower_ctx.get_constant_data(constant).as_slice();
387            Some(u128::from_le_bytes(bytes.try_into().ok()?))
388        }
389
390        #[inline]
391        fn emit_u64_le_const(&mut self, value: u64) -> VCodeConstant {
392            let data = VCodeConstantData::U64(value.to_le_bytes());
393            self.lower_ctx.use_constant(data)
394        }
395
396        #[inline]
397        fn emit_u128_le_const(&mut self, value: u128) -> VCodeConstant {
398            let data = VCodeConstantData::Generated(value.to_le_bytes().as_slice().into());
399            self.lower_ctx.use_constant(data)
400        }
401
402        #[inline]
403        fn const_to_vconst(&mut self, constant: Constant) -> VCodeConstant {
404            self.lower_ctx.use_constant(VCodeConstantData::Pool(
405                constant,
406                self.lower_ctx.get_constant_data(constant).clone(),
407            ))
408        }
409
410        fn only_writable_reg(&mut self, regs: WritableValueRegs) -> Option<WritableReg> {
411            regs.only_reg()
412        }
413
414        fn writable_regs_get(&mut self, regs: WritableValueRegs, idx: usize) -> WritableReg {
415            regs.regs()[idx]
416        }
417
418        fn abi_num_args(&mut self, abi: Sig) -> usize {
419            self.lower_ctx.sigs().num_args(abi)
420        }
421
422        fn abi_get_arg(&mut self, abi: Sig, idx: usize) -> ABIArg {
423            self.lower_ctx.sigs().get_arg(abi, idx)
424        }
425
426        fn abi_num_rets(&mut self, abi: Sig) -> usize {
427            self.lower_ctx.sigs().num_rets(abi)
428        }
429
430        fn abi_get_ret(&mut self, abi: Sig, idx: usize) -> ABIArg {
431            self.lower_ctx.sigs().get_ret(abi, idx)
432        }
433
434        fn abi_ret_arg(&mut self, abi: Sig) -> Option<ABIArg> {
435            self.lower_ctx.sigs().get_ret_arg(abi)
436        }
437
438        fn abi_no_ret_arg(&mut self, abi: Sig) -> Option<()> {
439            if let Some(_) = self.lower_ctx.sigs().get_ret_arg(abi) {
440                None
441            } else {
442                Some(())
443            }
444        }
445
446        fn abi_sized_stack_arg_space(&mut self, abi: Sig) -> i64 {
447            self.lower_ctx.sigs()[abi].sized_stack_arg_space()
448        }
449
450        fn abi_sized_stack_ret_space(&mut self, abi: Sig) -> i64 {
451            self.lower_ctx.sigs()[abi].sized_stack_ret_space()
452        }
453
454        fn abi_arg_only_slot(&mut self, arg: &ABIArg) -> Option<ABIArgSlot> {
455            match arg {
456                &ABIArg::Slots { ref slots, .. } => {
457                    if slots.len() == 1 {
458                        Some(slots[0])
459                    } else {
460                        None
461                    }
462                }
463                _ => None,
464            }
465        }
466
467        fn abi_arg_struct_pointer(&mut self, arg: &ABIArg) -> Option<(ABIArgSlot, i64, u64)> {
468            match arg {
469                &ABIArg::StructArg {
470                    pointer,
471                    offset,
472                    size,
473                    ..
474                } => {
475                    if let Some(pointer) = pointer {
476                        Some((pointer, offset, size))
477                    } else {
478                        None
479                    }
480                }
481                _ => None,
482            }
483        }
484
485        fn abi_arg_implicit_pointer(&mut self, arg: &ABIArg) -> Option<(ABIArgSlot, i64, Type)> {
486            match arg {
487                &ABIArg::ImplicitPtrArg {
488                    pointer,
489                    offset,
490                    ty,
491                    ..
492                } => Some((pointer, offset, ty)),
493                _ => None,
494            }
495        }
496
497        fn abi_stackslot_addr(
498            &mut self,
499            dst: WritableReg,
500            stack_slot: StackSlot,
501            offset: Offset32,
502        ) -> MInst {
503            let offset = u32::try_from(i32::from(offset)).unwrap();
504            self.lower_ctx
505                .abi()
506                .sized_stackslot_addr(stack_slot, offset, dst)
507        }
508
509        fn abi_dynamic_stackslot_addr(
510            &mut self,
511            dst: WritableReg,
512            stack_slot: DynamicStackSlot,
513        ) -> MInst {
514            assert!(self
515                .lower_ctx
516                .abi()
517                .dynamic_stackslot_offsets()
518                .is_valid(stack_slot));
519            self.lower_ctx.abi().dynamic_stackslot_addr(stack_slot, dst)
520        }
521
522        fn real_reg_to_reg(&mut self, reg: RealReg) -> Reg {
523            Reg::from(reg)
524        }
525
526        fn real_reg_to_writable_reg(&mut self, reg: RealReg) -> WritableReg {
527            Writable::from_reg(Reg::from(reg))
528        }
529
530        fn is_sinkable_inst(&mut self, val: Value) -> Option<Inst> {
531            let input = self.lower_ctx.get_value_as_source_or_const(val);
532
533            if let InputSourceInst::UniqueUse(inst, _) = input.inst {
534                Some(inst)
535            } else {
536                None
537            }
538        }
539
540        #[inline]
541        fn sink_inst(&mut self, inst: Inst) {
542            self.lower_ctx.sink_inst(inst);
543        }
544
545        #[inline]
546        fn maybe_uextend(&mut self, value: Value) -> Option<Value> {
547            if let Some(def_inst) = self.def_inst(value) {
548                if let InstructionData::Unary {
549                    opcode: Opcode::Uextend,
550                    arg,
551                } = self.lower_ctx.data(def_inst)
552                {
553                    return Some(*arg);
554                }
555            }
556
557            Some(value)
558        }
559
560        #[inline]
561        fn preg_to_reg(&mut self, preg: PReg) -> Reg {
562            preg.into()
563        }
564
565        #[inline]
566        fn gen_move(&mut self, ty: Type, dst: WritableReg, src: Reg) -> MInst {
567            MInst::gen_move(dst, src, ty)
568        }
569
570        /// Generate the return instruction.
571        fn gen_return(&mut self, (list, off): ValueSlice) {
572            let rets = (off..list.len(&self.lower_ctx.dfg().value_lists))
573                .map(|ix| {
574                    let val = list.get(ix, &self.lower_ctx.dfg().value_lists).unwrap();
575                    self.put_in_regs(val)
576                })
577                .collect();
578            self.lower_ctx.gen_return(rets);
579        }
580
581        /// Same as `shuffle32_from_imm`, but for 64-bit lane shuffles.
582        fn shuffle64_from_imm(&mut self, imm: Immediate) -> Option<(u8, u8)> {
583            use crate::machinst::isle::shuffle_imm_as_le_lane_idx;
584
585            let bytes = self.lower_ctx.get_immediate_data(imm).as_slice();
586            Some((
587                shuffle_imm_as_le_lane_idx(8, &bytes[0..8])?,
588                shuffle_imm_as_le_lane_idx(8, &bytes[8..16])?,
589            ))
590        }
591
592        /// Attempts to interpret the shuffle immediate `imm` as a shuffle of
593        /// 32-bit lanes, returning four integers, each of which is less than 8,
594        /// which represents a permutation of 32-bit lanes as specified by
595        /// `imm`.
596        ///
597        /// For example the shuffle immediate
598        ///
599        /// `0 1 2 3 8 9 10 11 16 17 18 19 24 25 26 27`
600        ///
601        /// would return `Some((0, 2, 4, 6))`.
602        fn shuffle32_from_imm(&mut self, imm: Immediate) -> Option<(u8, u8, u8, u8)> {
603            use crate::machinst::isle::shuffle_imm_as_le_lane_idx;
604
605            let bytes = self.lower_ctx.get_immediate_data(imm).as_slice();
606            Some((
607                shuffle_imm_as_le_lane_idx(4, &bytes[0..4])?,
608                shuffle_imm_as_le_lane_idx(4, &bytes[4..8])?,
609                shuffle_imm_as_le_lane_idx(4, &bytes[8..12])?,
610                shuffle_imm_as_le_lane_idx(4, &bytes[12..16])?,
611            ))
612        }
613
614        /// Same as `shuffle32_from_imm`, but for 16-bit lane shuffles.
615        fn shuffle16_from_imm(
616            &mut self,
617            imm: Immediate,
618        ) -> Option<(u8, u8, u8, u8, u8, u8, u8, u8)> {
619            use crate::machinst::isle::shuffle_imm_as_le_lane_idx;
620            let bytes = self.lower_ctx.get_immediate_data(imm).as_slice();
621            Some((
622                shuffle_imm_as_le_lane_idx(2, &bytes[0..2])?,
623                shuffle_imm_as_le_lane_idx(2, &bytes[2..4])?,
624                shuffle_imm_as_le_lane_idx(2, &bytes[4..6])?,
625                shuffle_imm_as_le_lane_idx(2, &bytes[6..8])?,
626                shuffle_imm_as_le_lane_idx(2, &bytes[8..10])?,
627                shuffle_imm_as_le_lane_idx(2, &bytes[10..12])?,
628                shuffle_imm_as_le_lane_idx(2, &bytes[12..14])?,
629                shuffle_imm_as_le_lane_idx(2, &bytes[14..16])?,
630            ))
631        }
632
633        fn safe_divisor_from_imm64(&mut self, ty: Type, val: Imm64) -> Option<u64> {
634            let minus_one = if ty.bytes() == 8 {
635                -1
636            } else {
637                (1 << (ty.bytes() * 8)) - 1
638            };
639            let bits = val.bits() & minus_one;
640            if bits == 0 || bits == minus_one {
641                None
642            } else {
643                Some(bits as u64)
644            }
645        }
646    };
647}
648
649/// Returns the `size`-byte lane referred to by the shuffle immediate specified
650/// in `bytes`.
651///
652/// This helper is used by `shuffleNN_from_imm` above and is used to interpret a
653/// byte-based shuffle as a higher-level shuffle of bigger lanes. This will see
654/// if the `bytes` specified, which must have `size` length, specifies a lane in
655/// vectors aligned to a `size`-byte boundary.
656///
657/// Returns `None` if `bytes` doesn't specify a `size`-byte lane aligned
658/// appropriately, or returns `Some(n)` where `n` is the index of the lane being
659/// shuffled.
660pub fn shuffle_imm_as_le_lane_idx(size: u8, bytes: &[u8]) -> Option<u8> {
661    assert_eq!(bytes.len(), usize::from(size));
662
663    // The first index in `bytes` must be aligned to a `size` boundary for the
664    // bytes to be a valid specifier for a lane of `size` bytes.
665    if bytes[0] % size != 0 {
666        return None;
667    }
668
669    // Afterwards the bytes must all be one larger than the prior to specify a
670    // contiguous sequence of bytes that's being shuffled. Basically `bytes`
671    // must refer to the entire `size`-byte lane, in little-endian order.
672    for i in 0..size - 1 {
673        let idx = usize::from(i);
674        if bytes[idx] + 1 != bytes[idx + 1] {
675            return None;
676        }
677    }
678
679    // All of the `bytes` are in-order, meaning that this is a valid shuffle
680    // immediate to specify a lane of `size` bytes. The index, when viewed as
681    // `size`-byte immediates, will be the first byte divided by the byte size.
682    Some(bytes[0] / size)
683}
684
685/// Helpers specifically for machines that use ABICaller.
686#[macro_export]
687#[doc(hidden)]
688macro_rules! isle_prelude_caller_methods {
689    ($abispec:ty, $abicaller:ty) => {
690        fn gen_call(
691            &mut self,
692            sig_ref: SigRef,
693            extname: ExternalName,
694            dist: RelocDistance,
695            args @ (inputs, off): ValueSlice,
696        ) -> InstOutput {
697            let caller_conv = self.lower_ctx.abi().call_conv(self.lower_ctx.sigs());
698            let sig = &self.lower_ctx.dfg().signatures[sig_ref];
699            let num_rets = sig.returns.len();
700            let abi = self.lower_ctx.sigs().abi_sig_for_sig_ref(sig_ref);
701            let caller = <$abicaller>::from_func(
702                self.lower_ctx.sigs(),
703                sig_ref,
704                &extname,
705                dist,
706                caller_conv,
707                self.backend.flags().clone(),
708            )
709            .unwrap();
710
711            assert_eq!(
712                inputs.len(&self.lower_ctx.dfg().value_lists) - off,
713                sig.params.len()
714            );
715
716            self.gen_call_common(abi, num_rets, caller, args)
717        }
718
719        fn gen_call_indirect(
720            &mut self,
721            sig_ref: SigRef,
722            val: Value,
723            args @ (inputs, off): ValueSlice,
724        ) -> InstOutput {
725            let caller_conv = self.lower_ctx.abi().call_conv(self.lower_ctx.sigs());
726            let ptr = self.put_in_reg(val);
727            let sig = &self.lower_ctx.dfg().signatures[sig_ref];
728            let num_rets = sig.returns.len();
729            let abi = self.lower_ctx.sigs().abi_sig_for_sig_ref(sig_ref);
730            let caller = <$abicaller>::from_ptr(
731                self.lower_ctx.sigs(),
732                sig_ref,
733                ptr,
734                Opcode::CallIndirect,
735                caller_conv,
736                self.backend.flags().clone(),
737            )
738            .unwrap();
739
740            assert_eq!(
741                inputs.len(&self.lower_ctx.dfg().value_lists) - off,
742                sig.params.len()
743            );
744
745            self.gen_call_common(abi, num_rets, caller, args)
746        }
747    };
748}
749
750/// Helpers for the above ISLE prelude implementations. Meant to go
751/// inside the `impl` for the context type, not the trait impl.
752#[macro_export]
753#[doc(hidden)]
754macro_rules! isle_prelude_method_helpers {
755    ($abicaller:ty) => {
756        fn gen_call_common(
757            &mut self,
758            abi: Sig,
759            num_rets: usize,
760            mut caller: $abicaller,
761            (inputs, off): ValueSlice,
762        ) -> InstOutput {
763            caller.emit_stack_pre_adjust(self.lower_ctx);
764
765            let num_args = self.lower_ctx.sigs().num_args(abi);
766
767            assert_eq!(
768                inputs.len(&self.lower_ctx.dfg().value_lists) - off,
769                num_args
770            );
771            let mut arg_regs = vec![];
772            for i in 0..num_args {
773                let input = inputs
774                    .get(off + i, &self.lower_ctx.dfg().value_lists)
775                    .unwrap();
776                arg_regs.push(self.put_in_regs(input));
777            }
778            for (i, arg_regs) in arg_regs.iter().enumerate() {
779                caller.emit_copy_regs_to_buffer(self.lower_ctx, i, *arg_regs);
780            }
781            for (i, arg_regs) in arg_regs.iter().enumerate() {
782                for inst in caller.gen_arg(self.lower_ctx, i, *arg_regs) {
783                    self.lower_ctx.emit(inst);
784                }
785            }
786
787            // Handle retvals prior to emitting call, so the
788            // constraints are on the call instruction; but buffer the
789            // instructions till after the call.
790            let mut outputs = InstOutput::new();
791            let mut retval_insts: crate::machinst::abi::SmallInstVec<_> = smallvec::smallvec![];
792            // We take the *last* `num_rets` returns of the sig:
793            // this skips a StructReturn, if any, that is present.
794            let sigdata_num_rets = self.lower_ctx.sigs().num_rets(abi);
795            debug_assert!(num_rets <= sigdata_num_rets);
796            for i in (sigdata_num_rets - num_rets)..sigdata_num_rets {
797                // Borrow `sigdata` again so we don't hold a `self`
798                // borrow across the `&mut self` arg to
799                // `abi_arg_slot_regs()` below.
800                let ret = self.lower_ctx.sigs().get_ret(abi, i);
801                let retval_regs = self.abi_arg_slot_regs(&ret).unwrap();
802                retval_insts.extend(
803                    caller
804                        .gen_retval(self.lower_ctx, i, retval_regs.clone())
805                        .into_iter(),
806                );
807                outputs.push(valueregs::non_writable_value_regs(retval_regs));
808            }
809
810            caller.emit_call(self.lower_ctx);
811
812            for inst in retval_insts {
813                self.lower_ctx.emit(inst);
814            }
815
816            caller.emit_stack_post_adjust(self.lower_ctx);
817
818            outputs
819        }
820
821        fn abi_arg_slot_regs(&mut self, arg: &ABIArg) -> Option<WritableValueRegs> {
822            match arg {
823                &ABIArg::Slots { ref slots, .. } => match slots.len() {
824                    1 => {
825                        let a = self.temp_writable_reg(slots[0].get_type());
826                        Some(WritableValueRegs::one(a))
827                    }
828                    2 => {
829                        let a = self.temp_writable_reg(slots[0].get_type());
830                        let b = self.temp_writable_reg(slots[1].get_type());
831                        Some(WritableValueRegs::two(a, b))
832                    }
833                    _ => panic!("Expected to see one or two slots only from {:?}", arg),
834                },
835                _ => None,
836            }
837        }
838    };
839}
840
841/// This structure is used to implement the ISLE-generated `Context` trait and
842/// internally has a temporary reference to a machinst `LowerCtx`.
843pub(crate) struct IsleContext<'a, 'b, I, B>
844where
845    I: VCodeInst,
846    B: LowerBackend,
847{
848    pub lower_ctx: &'a mut Lower<'b, I>,
849    pub backend: &'a B,
850}