polkavm/
gas.rs

1use alloc::sync::Arc;
2use alloc::vec::Vec;
3use polkavm_common::program::{InstructionSet, InstructionVisitor, Instructions, Opcode, ParsingVisitor, RawReg};
4use polkavm_common::simulator::CacheModel;
5use polkavm_common::utils::GasVisitorT;
6
7#[derive(Clone)]
8pub enum CostModelKind {
9    Simple(CostModelRef),
10    Full(CacheModel),
11}
12
13impl From<CostModelRef> for CostModelKind {
14    fn from(cost_model: CostModelRef) -> CostModelKind {
15        CostModelKind::Simple(cost_model)
16    }
17}
18
19impl From<Arc<CostModel>> for CostModelKind {
20    fn from(cost_model: Arc<CostModel>) -> CostModelKind {
21        CostModelKind::Simple(cost_model.into())
22    }
23}
24
25#[derive(Clone)]
26pub struct CostModelRef {
27    pointer: *const CostModel,
28    _lifetime: Option<Arc<CostModel>>,
29}
30
31// SAFETY: The pointer inside of the struct points to either a value with a static lifetime, or the `Arc` that we keep in the struct.
32unsafe impl Send for CostModelRef {}
33
34// SAFETY: The pointer inside of the struct points to either a value with a static lifetime, or the `Arc` that we keep in the struct.
35unsafe impl Sync for CostModelRef {}
36
37impl CostModelRef {
38    pub const fn from_static(cost_model: &'static CostModel) -> Self {
39        CostModelRef {
40            pointer: cost_model as *const CostModel,
41            _lifetime: None,
42        }
43    }
44}
45
46impl From<&'static CostModel> for CostModelRef {
47    fn from(value: &'static CostModel) -> Self {
48        Self::from_static(value)
49    }
50}
51
52impl From<Arc<CostModel>> for CostModelRef {
53    fn from(value: Arc<CostModel>) -> Self {
54        CostModelRef {
55            pointer: Arc::as_ptr(&value),
56            _lifetime: Some(value),
57        }
58    }
59}
60
61impl core::ops::Deref for CostModelRef {
62    type Target = CostModel;
63
64    fn deref(&self) -> &Self::Target {
65        // SAFETY: The pointer points to either a value with a static lifetime, or the `Arc` that we keep in the struct.
66        unsafe { &*self.pointer }
67    }
68}
69
70pub type Cost = u32;
71
72macro_rules! define_cost_model_struct {
73    (@count) => {
74        0
75    };
76
77    (@count $f0:ident $f1:ident $f2:ident $f3:ident $f4:ident $f5:ident $f6:ident $f7:ident $($rest:ident)*) => {
78        8 + define_cost_model_struct!(@count $($rest)*)
79    };
80
81    (@count $f0:ident $($rest:ident)*) => {
82        1 + define_cost_model_struct!(@count $($rest)*)
83    };
84
85    (
86        version: $version:expr,
87        $($field:ident,)+
88    ) => {
89        const COST_MODEL_FIELDS: usize = define_cost_model_struct!(@count $($field)+) + 1;
90
91        #[allow(clippy::exhaustive_structs)]
92        #[derive(Hash)]
93        pub struct CostModel {
94            $(
95                pub $field: Cost,
96            )+
97
98            pub invalid: u32,
99        }
100
101        impl CostModel {
102            /// A naive gas cost model where every instruction costs one gas.
103            pub const fn naive() -> Self {
104                CostModel {
105                    $(
106                        $field: 1,
107                    )+
108
109                    invalid: 1,
110                }
111            }
112
113            /// Serializes the cost model into a byte blob.
114            pub fn serialize(&self) -> Vec<u8> {
115                let mut output = Vec::with_capacity((COST_MODEL_FIELDS + 2) * 4);
116                let version: u32 = $version;
117                output.extend_from_slice(&version.to_le_bytes());
118
119                $(
120                    output.extend_from_slice(&self.$field.to_le_bytes());
121                )+
122                output.extend_from_slice(&self.invalid.to_le_bytes());
123                output
124            }
125
126            /// Deserializes the cost model from a byte blob.
127            pub fn deserialize(blob: &[u8]) -> Option<CostModel> {
128                if (blob.len() % 4) != 0 || blob.len() / 4 != (COST_MODEL_FIELDS + 2) {
129                    return None;
130                }
131
132                if u32::from_le_bytes([blob[0], blob[1], blob[2], blob[3]]) != $version {
133                    return None;
134                }
135
136                let mut model = CostModel::naive();
137                let mut position = 4;
138                $(
139                    model.$field = u32::from_le_bytes([blob[position], blob[position + 1], blob[position + 2], blob[position + 3]]);
140                    position += 4;
141                )+
142
143                model.invalid = u32::from_le_bytes([blob[position], blob[position + 1], blob[position + 2], blob[position + 3]]);
144
145                assert_eq!(position, (COST_MODEL_FIELDS + 2) * 4);
146                Some(model)
147            }
148
149            /// Gets the cost of a given opcode.
150            pub fn cost_for_opcode(&self, opcode: Opcode) -> u32 {
151                match opcode {
152                    $(
153                        Opcode::$field => self.$field,
154                    )+
155
156                    Opcode::_NonExhaustive(()) => {
157                        #[cfg(debug_assertions)]
158                        unreachable!();
159
160                        #[cfg(not(debug_assertions))]
161                        0
162                    },
163                }
164            }
165        }
166    }
167}
168
169define_cost_model_struct! {
170    version: 2,
171
172    add_32,
173    add_64,
174    add_imm_32,
175    add_imm_64,
176    and,
177    and_imm,
178    and_inverted,
179    branch_eq,
180    branch_eq_imm,
181    branch_greater_or_equal_signed,
182    branch_greater_or_equal_signed_imm,
183    branch_greater_or_equal_unsigned,
184    branch_greater_or_equal_unsigned_imm,
185    branch_greater_signed_imm,
186    branch_greater_unsigned_imm,
187    branch_less_or_equal_signed_imm,
188    branch_less_or_equal_unsigned_imm,
189    branch_less_signed,
190    branch_less_signed_imm,
191    branch_less_unsigned,
192    branch_less_unsigned_imm,
193    branch_not_eq,
194    branch_not_eq_imm,
195    cmov_if_not_zero,
196    cmov_if_not_zero_imm,
197    cmov_if_zero,
198    cmov_if_zero_imm,
199    count_leading_zero_bits_32,
200    count_leading_zero_bits_64,
201    count_set_bits_32,
202    count_set_bits_64,
203    count_trailing_zero_bits_32,
204    count_trailing_zero_bits_64,
205    div_signed_32,
206    div_signed_64,
207    div_unsigned_32,
208    div_unsigned_64,
209    ecalli,
210    fallthrough,
211    jump,
212    jump_indirect,
213    load_i16,
214    load_i32,
215    load_i8,
216    load_imm,
217    load_imm64,
218    load_imm_and_jump,
219    load_imm_and_jump_indirect,
220    load_indirect_i16,
221    load_indirect_i32,
222    load_indirect_i8,
223    load_indirect_u16,
224    load_indirect_u32,
225    load_indirect_u64,
226    load_indirect_u8,
227    load_u16,
228    load_u32,
229    load_u64,
230    load_u8,
231    maximum,
232    maximum_unsigned,
233    memset,
234    minimum,
235    minimum_unsigned,
236    move_reg,
237    mul_32,
238    mul_64,
239    mul_imm_32,
240    mul_imm_64,
241    mul_upper_signed_signed,
242    mul_upper_signed_unsigned,
243    mul_upper_unsigned_unsigned,
244    negate_and_add_imm_32,
245    negate_and_add_imm_64,
246    or,
247    or_imm,
248    or_inverted,
249    rem_signed_32,
250    rem_signed_64,
251    rem_unsigned_32,
252    rem_unsigned_64,
253    reverse_byte,
254    rotate_left_32,
255    rotate_left_64,
256    rotate_right_32,
257    rotate_right_64,
258    rotate_right_imm_32,
259    rotate_right_imm_64,
260    rotate_right_imm_alt_32,
261    rotate_right_imm_alt_64,
262    sbrk,
263    set_greater_than_signed_imm,
264    set_greater_than_unsigned_imm,
265    set_less_than_signed,
266    set_less_than_signed_imm,
267    set_less_than_unsigned,
268    set_less_than_unsigned_imm,
269    shift_arithmetic_right_32,
270    shift_arithmetic_right_64,
271    shift_arithmetic_right_imm_32,
272    shift_arithmetic_right_imm_64,
273    shift_arithmetic_right_imm_alt_32,
274    shift_arithmetic_right_imm_alt_64,
275    shift_logical_left_32,
276    shift_logical_left_64,
277    shift_logical_left_imm_32,
278    shift_logical_left_imm_64,
279    shift_logical_left_imm_alt_32,
280    shift_logical_left_imm_alt_64,
281    shift_logical_right_32,
282    shift_logical_right_64,
283    shift_logical_right_imm_32,
284    shift_logical_right_imm_64,
285    shift_logical_right_imm_alt_32,
286    shift_logical_right_imm_alt_64,
287    sign_extend_16,
288    sign_extend_8,
289    store_imm_indirect_u16,
290    store_imm_indirect_u32,
291    store_imm_indirect_u64,
292    store_imm_indirect_u8,
293    store_imm_u16,
294    store_imm_u32,
295    store_imm_u64,
296    store_imm_u8,
297    store_indirect_u16,
298    store_indirect_u32,
299    store_indirect_u64,
300    store_indirect_u8,
301    store_u16,
302    store_u32,
303    store_u64,
304    store_u8,
305    sub_32,
306    sub_64,
307    trap,
308    unlikely,
309    xnor,
310    xor,
311    xor_imm,
312    zero_extend_16,
313}
314
315static NAIVE_COST_MODEL: CostModel = CostModel::naive();
316
317impl CostModel {
318    pub fn naive_ref() -> CostModelRef {
319        CostModelRef::from_static(&NAIVE_COST_MODEL)
320    }
321}
322
323impl CostModelKind {
324    pub(crate) fn is_naive(&self) -> bool {
325        match self {
326            CostModelKind::Simple(ref cost_model) => cost_model.pointer == core::ptr::addr_of!(NAIVE_COST_MODEL),
327            CostModelKind::Full(..) => false,
328        }
329    }
330}
331
332// TODO: Come up with a better cost model.
333pub struct GasVisitor {
334    cost_model: CostModelRef,
335    cost: u32,
336    last_block_cost: Option<u32>,
337}
338
339impl GasVisitor {
340    pub fn new(cost_model: CostModelRef) -> Self {
341        Self {
342            cost_model,
343            cost: 0,
344            last_block_cost: None,
345        }
346    }
347
348    #[inline]
349    fn start_new_basic_block(&mut self) {
350        self.last_block_cost = Some(self.cost);
351        self.cost = 0;
352    }
353}
354
355impl GasVisitorT for GasVisitor {
356    #[inline]
357    fn take_block_cost(&mut self) -> Option<u32> {
358        self.last_block_cost.take()
359    }
360
361    fn is_at_start_of_basic_block(&self) -> bool {
362        self.cost == 0
363    }
364}
365
366polkavm_common::impl_parsing_visitor_for_instruction_visitor!(GasVisitor);
367
368impl InstructionVisitor for GasVisitor {
369    type ReturnTy = ();
370
371    #[cold]
372    fn invalid(&mut self) -> Self::ReturnTy {
373        self.cost += self.cost_model.invalid;
374        self.start_new_basic_block();
375    }
376
377    #[inline(always)]
378    fn trap(&mut self) -> Self::ReturnTy {
379        self.cost += self.cost_model.trap;
380        self.start_new_basic_block();
381    }
382
383    #[inline(always)]
384    fn fallthrough(&mut self) -> Self::ReturnTy {
385        self.cost += self.cost_model.fallthrough;
386        self.start_new_basic_block();
387    }
388
389    #[inline(always)]
390    fn unlikely(&mut self) -> Self::ReturnTy {
391        self.cost += self.cost_model.unlikely
392    }
393
394    #[inline(always)]
395    fn sbrk(&mut self, _d: RawReg, _s: RawReg) -> Self::ReturnTy {
396        self.cost += self.cost_model.sbrk;
397    }
398
399    #[inline(always)]
400    fn memset(&mut self) -> Self::ReturnTy {
401        self.cost += self.cost_model.memset;
402    }
403
404    #[inline(always)]
405    fn ecalli(&mut self, _imm: u32) -> Self::ReturnTy {
406        self.cost += self.cost_model.ecalli;
407    }
408
409    #[inline(always)]
410    fn set_less_than_unsigned(&mut self, _d: RawReg, _s1: RawReg, _s2: RawReg) -> Self::ReturnTy {
411        self.cost += self.cost_model.set_less_than_unsigned;
412    }
413
414    #[inline(always)]
415    fn set_less_than_signed(&mut self, _d: RawReg, _s1: RawReg, _s2: RawReg) -> Self::ReturnTy {
416        self.cost += self.cost_model.set_less_than_signed;
417    }
418
419    #[inline(always)]
420    fn shift_logical_right_32(&mut self, _d: RawReg, _s1: RawReg, _s2: RawReg) -> Self::ReturnTy {
421        self.cost += self.cost_model.shift_logical_right_32;
422    }
423
424    #[inline(always)]
425    fn shift_arithmetic_right_32(&mut self, _d: RawReg, _s1: RawReg, _s2: RawReg) -> Self::ReturnTy {
426        self.cost += self.cost_model.shift_arithmetic_right_32;
427    }
428
429    #[inline(always)]
430    fn shift_logical_left_32(&mut self, _d: RawReg, _s1: RawReg, _s2: RawReg) -> Self::ReturnTy {
431        self.cost += self.cost_model.shift_logical_left_32;
432    }
433
434    #[inline(always)]
435    fn shift_logical_right_64(&mut self, _d: RawReg, _s1: RawReg, _s2: RawReg) -> Self::ReturnTy {
436        self.cost += self.cost_model.shift_logical_right_64;
437    }
438
439    #[inline(always)]
440    fn shift_arithmetic_right_64(&mut self, _d: RawReg, _s1: RawReg, _s2: RawReg) -> Self::ReturnTy {
441        self.cost += self.cost_model.shift_arithmetic_right_64;
442    }
443
444    #[inline(always)]
445    fn shift_logical_left_64(&mut self, _d: RawReg, _s1: RawReg, _s2: RawReg) -> Self::ReturnTy {
446        self.cost += self.cost_model.shift_logical_left_64;
447    }
448
449    #[inline(always)]
450    fn xor(&mut self, _d: RawReg, _s1: RawReg, _s2: RawReg) -> Self::ReturnTy {
451        self.cost += self.cost_model.xor;
452    }
453
454    #[inline(always)]
455    fn and(&mut self, _d: RawReg, _s1: RawReg, _s2: RawReg) -> Self::ReturnTy {
456        self.cost += self.cost_model.and;
457    }
458
459    #[inline(always)]
460    fn or(&mut self, _d: RawReg, _s1: RawReg, _s2: RawReg) -> Self::ReturnTy {
461        self.cost += self.cost_model.or;
462    }
463    #[inline(always)]
464    fn add_32(&mut self, _d: RawReg, _s1: RawReg, _s2: RawReg) -> Self::ReturnTy {
465        self.cost += self.cost_model.add_32;
466    }
467
468    #[inline(always)]
469    fn add_64(&mut self, _d: RawReg, _s1: RawReg, _s2: RawReg) -> Self::ReturnTy {
470        self.cost += self.cost_model.add_64;
471    }
472
473    #[inline(always)]
474    fn sub_32(&mut self, _d: RawReg, _s1: RawReg, _s2: RawReg) -> Self::ReturnTy {
475        self.cost += self.cost_model.sub_32;
476    }
477
478    #[inline(always)]
479    fn sub_64(&mut self, _d: RawReg, _s1: RawReg, _s2: RawReg) -> Self::ReturnTy {
480        self.cost += self.cost_model.sub_64;
481    }
482
483    #[inline(always)]
484    fn mul_32(&mut self, _d: RawReg, _s1: RawReg, _s2: RawReg) -> Self::ReturnTy {
485        self.cost += self.cost_model.mul_32;
486    }
487
488    #[inline(always)]
489    fn mul_64(&mut self, _d: RawReg, _s1: RawReg, _s2: RawReg) -> Self::ReturnTy {
490        self.cost += self.cost_model.mul_64;
491    }
492
493    #[inline(always)]
494    fn mul_upper_signed_signed(&mut self, _d: RawReg, _s1: RawReg, _s2: RawReg) -> Self::ReturnTy {
495        self.cost += self.cost_model.mul_upper_signed_signed;
496    }
497
498    #[inline(always)]
499    fn mul_upper_unsigned_unsigned(&mut self, _d: RawReg, _s1: RawReg, _s2: RawReg) -> Self::ReturnTy {
500        self.cost += self.cost_model.mul_upper_unsigned_unsigned;
501    }
502
503    #[inline(always)]
504    fn mul_upper_signed_unsigned(&mut self, _d: RawReg, _s1: RawReg, _s2: RawReg) -> Self::ReturnTy {
505        self.cost += self.cost_model.mul_upper_signed_unsigned;
506    }
507
508    #[inline(always)]
509    fn div_unsigned_32(&mut self, _d: RawReg, _s1: RawReg, _s2: RawReg) -> Self::ReturnTy {
510        self.cost += self.cost_model.div_unsigned_32;
511    }
512
513    #[inline(always)]
514    fn div_signed_32(&mut self, _d: RawReg, _s1: RawReg, _s2: RawReg) -> Self::ReturnTy {
515        self.cost += self.cost_model.div_signed_32;
516    }
517
518    #[inline(always)]
519    fn rem_unsigned_32(&mut self, _d: RawReg, _s1: RawReg, _s2: RawReg) -> Self::ReturnTy {
520        self.cost += self.cost_model.rem_unsigned_32;
521    }
522
523    #[inline(always)]
524    fn rem_signed_32(&mut self, _d: RawReg, _s1: RawReg, _s2: RawReg) -> Self::ReturnTy {
525        self.cost += self.cost_model.rem_signed_32;
526    }
527
528    #[inline(always)]
529    fn div_unsigned_64(&mut self, _d: RawReg, _s1: RawReg, _s2: RawReg) -> Self::ReturnTy {
530        self.cost += self.cost_model.div_unsigned_64;
531    }
532
533    #[inline(always)]
534    fn div_signed_64(&mut self, _d: RawReg, _s1: RawReg, _s2: RawReg) -> Self::ReturnTy {
535        self.cost += self.cost_model.div_signed_64;
536    }
537
538    #[inline(always)]
539    fn rem_unsigned_64(&mut self, _d: RawReg, _s1: RawReg, _s2: RawReg) -> Self::ReturnTy {
540        self.cost += self.cost_model.rem_unsigned_64;
541    }
542
543    #[inline(always)]
544    fn rem_signed_64(&mut self, _d: RawReg, _s1: RawReg, _s2: RawReg) -> Self::ReturnTy {
545        self.cost += self.cost_model.rem_signed_64;
546    }
547
548    #[inline(always)]
549    fn and_inverted(&mut self, _d: RawReg, _s1: RawReg, _s2: RawReg) -> Self::ReturnTy {
550        self.cost += self.cost_model.and_inverted;
551    }
552
553    #[inline(always)]
554    fn or_inverted(&mut self, _d: RawReg, _s1: RawReg, _s2: RawReg) -> Self::ReturnTy {
555        self.cost += self.cost_model.or_inverted;
556    }
557
558    #[inline(always)]
559    fn xnor(&mut self, _d: RawReg, _s1: RawReg, _s2: RawReg) -> Self::ReturnTy {
560        self.cost += self.cost_model.xnor;
561    }
562
563    #[inline(always)]
564    fn maximum(&mut self, _d: RawReg, _s1: RawReg, _s2: RawReg) -> Self::ReturnTy {
565        self.cost += self.cost_model.maximum;
566    }
567
568    #[inline(always)]
569    fn maximum_unsigned(&mut self, _d: RawReg, _s1: RawReg, _s2: RawReg) -> Self::ReturnTy {
570        self.cost += self.cost_model.maximum_unsigned;
571    }
572
573    #[inline(always)]
574    fn minimum(&mut self, _d: RawReg, _s1: RawReg, _s2: RawReg) -> Self::ReturnTy {
575        self.cost += self.cost_model.minimum;
576    }
577
578    #[inline(always)]
579    fn minimum_unsigned(&mut self, _d: RawReg, _s1: RawReg, _s2: RawReg) -> Self::ReturnTy {
580        self.cost += self.cost_model.minimum_unsigned;
581    }
582
583    #[inline(always)]
584    fn rotate_left_32(&mut self, _d: RawReg, _s1: RawReg, _s2: RawReg) -> Self::ReturnTy {
585        self.cost += self.cost_model.rotate_left_32;
586    }
587
588    #[inline(always)]
589    fn rotate_left_64(&mut self, _d: RawReg, _s1: RawReg, _s2: RawReg) -> Self::ReturnTy {
590        self.cost += self.cost_model.rotate_left_64;
591    }
592
593    #[inline(always)]
594    fn rotate_right_32(&mut self, _d: RawReg, _s1: RawReg, _s2: RawReg) -> Self::ReturnTy {
595        self.cost += self.cost_model.rotate_right_32;
596    }
597
598    #[inline(always)]
599    fn rotate_right_64(&mut self, _d: RawReg, _s1: RawReg, _s2: RawReg) -> Self::ReturnTy {
600        self.cost += self.cost_model.rotate_right_64;
601    }
602
603    #[inline(always)]
604    fn mul_imm_32(&mut self, _d: RawReg, _s1: RawReg, _s2: u32) -> Self::ReturnTy {
605        self.cost += self.cost_model.mul_imm_32;
606    }
607
608    #[inline(always)]
609    fn mul_imm_64(&mut self, _d: RawReg, _s1: RawReg, _s2: u32) -> Self::ReturnTy {
610        self.cost += self.cost_model.mul_imm_64;
611    }
612
613    #[inline(always)]
614    fn set_less_than_unsigned_imm(&mut self, _d: RawReg, _s1: RawReg, _s2: u32) -> Self::ReturnTy {
615        self.cost += self.cost_model.set_less_than_unsigned_imm;
616    }
617
618    #[inline(always)]
619    fn set_less_than_signed_imm(&mut self, _d: RawReg, _s1: RawReg, _s2: u32) -> Self::ReturnTy {
620        self.cost += self.cost_model.set_less_than_signed_imm;
621    }
622
623    #[inline(always)]
624    fn set_greater_than_unsigned_imm(&mut self, _d: RawReg, _s1: RawReg, _s2: u32) -> Self::ReturnTy {
625        self.cost += self.cost_model.set_greater_than_unsigned_imm;
626    }
627
628    #[inline(always)]
629    fn set_greater_than_signed_imm(&mut self, _d: RawReg, _s1: RawReg, _s2: u32) -> Self::ReturnTy {
630        self.cost += self.cost_model.set_greater_than_signed_imm;
631    }
632
633    #[inline(always)]
634    fn shift_logical_right_imm_32(&mut self, _d: RawReg, _s1: RawReg, _s2: u32) -> Self::ReturnTy {
635        self.cost += self.cost_model.shift_logical_right_imm_32;
636    }
637
638    #[inline(always)]
639    fn shift_arithmetic_right_imm_32(&mut self, _d: RawReg, _s1: RawReg, _s2: u32) -> Self::ReturnTy {
640        self.cost += self.cost_model.shift_arithmetic_right_imm_32;
641    }
642
643    #[inline(always)]
644    fn shift_logical_left_imm_32(&mut self, _d: RawReg, _s1: RawReg, _s2: u32) -> Self::ReturnTy {
645        self.cost += self.cost_model.shift_logical_left_imm_32;
646    }
647
648    #[inline(always)]
649    fn shift_logical_right_imm_alt_32(&mut self, _d: RawReg, _s2: RawReg, _s1: u32) -> Self::ReturnTy {
650        self.cost += self.cost_model.shift_logical_right_imm_alt_32;
651    }
652
653    #[inline(always)]
654    fn shift_arithmetic_right_imm_alt_32(&mut self, _d: RawReg, _s2: RawReg, _s1: u32) -> Self::ReturnTy {
655        self.cost += self.cost_model.shift_arithmetic_right_imm_alt_32;
656    }
657
658    #[inline(always)]
659    fn shift_logical_left_imm_alt_32(&mut self, _d: RawReg, _s2: RawReg, _s1: u32) -> Self::ReturnTy {
660        self.cost += self.cost_model.shift_logical_left_imm_alt_32;
661    }
662
663    #[inline(always)]
664    fn shift_logical_right_imm_64(&mut self, _d: RawReg, _s1: RawReg, _s2: u32) -> Self::ReturnTy {
665        self.cost += self.cost_model.shift_logical_right_imm_64;
666    }
667
668    #[inline(always)]
669    fn shift_arithmetic_right_imm_64(&mut self, _d: RawReg, _s1: RawReg, _s2: u32) -> Self::ReturnTy {
670        self.cost += self.cost_model.shift_arithmetic_right_imm_64;
671    }
672
673    #[inline(always)]
674    fn shift_logical_left_imm_64(&mut self, _d: RawReg, _s1: RawReg, _s2: u32) -> Self::ReturnTy {
675        self.cost += self.cost_model.shift_logical_left_imm_64;
676    }
677
678    #[inline(always)]
679    fn shift_logical_right_imm_alt_64(&mut self, _d: RawReg, _s2: RawReg, _s1: u32) -> Self::ReturnTy {
680        self.cost += self.cost_model.shift_logical_right_imm_alt_64;
681    }
682
683    #[inline(always)]
684    fn shift_arithmetic_right_imm_alt_64(&mut self, _d: RawReg, _s2: RawReg, _s1: u32) -> Self::ReturnTy {
685        self.cost += self.cost_model.shift_arithmetic_right_imm_alt_64;
686    }
687
688    #[inline(always)]
689    fn shift_logical_left_imm_alt_64(&mut self, _d: RawReg, _s2: RawReg, _s1: u32) -> Self::ReturnTy {
690        self.cost += self.cost_model.shift_logical_left_imm_alt_64;
691    }
692
693    #[inline(always)]
694    fn or_imm(&mut self, _d: RawReg, _s: RawReg, _imm: u32) -> Self::ReturnTy {
695        self.cost += self.cost_model.or_imm;
696    }
697
698    #[inline(always)]
699    fn and_imm(&mut self, _d: RawReg, _s: RawReg, _imm: u32) -> Self::ReturnTy {
700        self.cost += self.cost_model.and_imm;
701    }
702
703    #[inline(always)]
704    fn xor_imm(&mut self, _d: RawReg, _s: RawReg, _imm: u32) -> Self::ReturnTy {
705        self.cost += self.cost_model.xor_imm;
706    }
707
708    #[inline(always)]
709    fn move_reg(&mut self, _d: RawReg, _s: RawReg) -> Self::ReturnTy {
710        self.cost += self.cost_model.move_reg;
711    }
712
713    #[inline(always)]
714    fn count_leading_zero_bits_32(&mut self, _d: RawReg, _s: RawReg) -> Self::ReturnTy {
715        self.cost += self.cost_model.count_leading_zero_bits_32;
716    }
717
718    #[inline(always)]
719    fn count_leading_zero_bits_64(&mut self, _d: RawReg, _s: RawReg) -> Self::ReturnTy {
720        self.cost += self.cost_model.count_leading_zero_bits_64;
721    }
722
723    #[inline(always)]
724    fn count_trailing_zero_bits_32(&mut self, _d: RawReg, _s: RawReg) -> Self::ReturnTy {
725        self.cost += self.cost_model.count_trailing_zero_bits_32;
726    }
727
728    #[inline(always)]
729    fn count_trailing_zero_bits_64(&mut self, _d: RawReg, _s: RawReg) -> Self::ReturnTy {
730        self.cost += self.cost_model.count_trailing_zero_bits_64;
731    }
732
733    #[inline(always)]
734    fn count_set_bits_32(&mut self, _d: RawReg, _s: RawReg) -> Self::ReturnTy {
735        self.cost += self.cost_model.count_set_bits_32;
736    }
737
738    #[inline(always)]
739    fn count_set_bits_64(&mut self, _d: RawReg, _s: RawReg) -> Self::ReturnTy {
740        self.cost += self.cost_model.count_set_bits_64;
741    }
742
743    #[inline(always)]
744    fn sign_extend_8(&mut self, _d: RawReg, _s: RawReg) -> Self::ReturnTy {
745        self.cost += self.cost_model.sign_extend_8;
746    }
747
748    #[inline(always)]
749    fn sign_extend_16(&mut self, _d: RawReg, _s: RawReg) -> Self::ReturnTy {
750        self.cost += self.cost_model.sign_extend_16;
751    }
752
753    #[inline(always)]
754    fn zero_extend_16(&mut self, _d: RawReg, _s: RawReg) -> Self::ReturnTy {
755        self.cost += self.cost_model.zero_extend_16;
756    }
757
758    #[inline(always)]
759    fn reverse_byte(&mut self, _d: RawReg, _s: RawReg) -> Self::ReturnTy {
760        self.cost += self.cost_model.reverse_byte;
761    }
762
763    #[inline(always)]
764    fn cmov_if_zero(&mut self, _d: RawReg, _s: RawReg, _c: RawReg) -> Self::ReturnTy {
765        self.cost += self.cost_model.cmov_if_zero;
766    }
767
768    #[inline(always)]
769    fn cmov_if_not_zero(&mut self, _d: RawReg, _s: RawReg, _c: RawReg) -> Self::ReturnTy {
770        self.cost += self.cost_model.cmov_if_not_zero;
771    }
772
773    #[inline(always)]
774    fn cmov_if_zero_imm(&mut self, _d: RawReg, _c: RawReg, _s: u32) -> Self::ReturnTy {
775        self.cost += self.cost_model.cmov_if_zero_imm;
776    }
777
778    #[inline(always)]
779    fn cmov_if_not_zero_imm(&mut self, _d: RawReg, _c: RawReg, _s: u32) -> Self::ReturnTy {
780        self.cost += self.cost_model.cmov_if_not_zero_imm;
781    }
782
783    #[inline(always)]
784    fn rotate_right_imm_32(&mut self, _d: RawReg, _s: RawReg, _c: u32) -> Self::ReturnTy {
785        self.cost += self.cost_model.rotate_right_imm_32;
786    }
787
788    #[inline(always)]
789    fn rotate_right_imm_alt_32(&mut self, _d: RawReg, _s: RawReg, _c: u32) -> Self::ReturnTy {
790        self.cost += self.cost_model.rotate_right_imm_alt_32;
791    }
792
793    #[inline(always)]
794    fn rotate_right_imm_64(&mut self, _d: RawReg, _s: RawReg, _c: u32) -> Self::ReturnTy {
795        self.cost += self.cost_model.rotate_right_imm_64;
796    }
797
798    #[inline(always)]
799    fn rotate_right_imm_alt_64(&mut self, _d: RawReg, _s: RawReg, _c: u32) -> Self::ReturnTy {
800        self.cost += self.cost_model.rotate_right_imm_alt_64;
801    }
802
803    #[inline(always)]
804    fn add_imm_32(&mut self, _d: RawReg, _s: RawReg, _imm: u32) -> Self::ReturnTy {
805        self.cost += self.cost_model.add_imm_32;
806    }
807
808    #[inline(always)]
809    fn add_imm_64(&mut self, _d: RawReg, _s: RawReg, _imm: u32) -> Self::ReturnTy {
810        self.cost += self.cost_model.add_imm_64;
811    }
812
813    #[inline(always)]
814    fn negate_and_add_imm_32(&mut self, _d: RawReg, _s1: RawReg, _s2: u32) -> Self::ReturnTy {
815        self.cost += self.cost_model.negate_and_add_imm_32;
816    }
817
818    #[inline(always)]
819    fn negate_and_add_imm_64(&mut self, _d: RawReg, _s1: RawReg, _s2: u32) -> Self::ReturnTy {
820        self.cost += self.cost_model.negate_and_add_imm_64;
821    }
822
823    #[inline(always)]
824    fn store_imm_indirect_u8(&mut self, _base: RawReg, _offset: u32, _value: u32) -> Self::ReturnTy {
825        self.cost += self.cost_model.store_imm_indirect_u8;
826    }
827
828    #[inline(always)]
829    fn store_imm_indirect_u16(&mut self, _base: RawReg, _offset: u32, _value: u32) -> Self::ReturnTy {
830        self.cost += self.cost_model.store_imm_indirect_u16;
831    }
832
833    #[inline(always)]
834    fn store_imm_indirect_u32(&mut self, _base: RawReg, _offset: u32, _value: u32) -> Self::ReturnTy {
835        self.cost += self.cost_model.store_imm_indirect_u32;
836    }
837
838    #[inline(always)]
839    fn store_imm_indirect_u64(&mut self, _base: RawReg, _offset: u32, _value: u32) -> Self::ReturnTy {
840        self.cost += self.cost_model.store_imm_indirect_u64;
841    }
842
843    #[inline(always)]
844    fn store_indirect_u8(&mut self, _src: RawReg, _base: RawReg, _offset: u32) -> Self::ReturnTy {
845        self.cost += self.cost_model.store_indirect_u8;
846    }
847
848    #[inline(always)]
849    fn store_indirect_u16(&mut self, _src: RawReg, _base: RawReg, _offset: u32) -> Self::ReturnTy {
850        self.cost += self.cost_model.store_indirect_u16;
851    }
852
853    #[inline(always)]
854    fn store_indirect_u32(&mut self, _src: RawReg, _base: RawReg, _offset: u32) -> Self::ReturnTy {
855        self.cost += self.cost_model.store_indirect_u32;
856    }
857
858    #[inline(always)]
859    fn store_indirect_u64(&mut self, _src: RawReg, _base: RawReg, _offset: u32) -> Self::ReturnTy {
860        self.cost += self.cost_model.store_indirect_u64;
861    }
862
863    #[inline(always)]
864    fn store_imm_u8(&mut self, _offset: u32, _value: u32) -> Self::ReturnTy {
865        self.cost += self.cost_model.store_imm_u8;
866    }
867
868    #[inline(always)]
869    fn store_imm_u16(&mut self, _offset: u32, _value: u32) -> Self::ReturnTy {
870        self.cost += self.cost_model.store_imm_u16;
871    }
872
873    #[inline(always)]
874    fn store_imm_u32(&mut self, _offset: u32, _value: u32) -> Self::ReturnTy {
875        self.cost += self.cost_model.store_imm_u32;
876    }
877
878    #[inline(always)]
879    fn store_imm_u64(&mut self, _offset: u32, _value: u32) -> Self::ReturnTy {
880        self.cost += self.cost_model.store_imm_u64;
881    }
882
883    #[inline(always)]
884    fn store_u8(&mut self, _src: RawReg, _offset: u32) -> Self::ReturnTy {
885        self.cost += self.cost_model.store_u8;
886    }
887
888    #[inline(always)]
889    fn store_u16(&mut self, _src: RawReg, _offset: u32) -> Self::ReturnTy {
890        self.cost += self.cost_model.store_u16;
891    }
892
893    #[inline(always)]
894    fn store_u32(&mut self, _src: RawReg, _offset: u32) -> Self::ReturnTy {
895        self.cost += self.cost_model.store_u32;
896    }
897
898    #[inline(always)]
899    fn store_u64(&mut self, _src: RawReg, _offset: u32) -> Self::ReturnTy {
900        self.cost += self.cost_model.store_u64;
901    }
902
903    #[inline(always)]
904    fn load_indirect_u8(&mut self, _dst: RawReg, _base: RawReg, _offset: u32) -> Self::ReturnTy {
905        self.cost += self.cost_model.load_indirect_u8;
906    }
907
908    #[inline(always)]
909    fn load_indirect_i8(&mut self, _dst: RawReg, _base: RawReg, _offset: u32) -> Self::ReturnTy {
910        self.cost += self.cost_model.load_indirect_i8;
911    }
912
913    #[inline(always)]
914    fn load_indirect_u16(&mut self, _dst: RawReg, _base: RawReg, _offset: u32) -> Self::ReturnTy {
915        self.cost += self.cost_model.load_indirect_u16;
916    }
917
918    #[inline(always)]
919    fn load_indirect_i16(&mut self, _dst: RawReg, _base: RawReg, _offset: u32) -> Self::ReturnTy {
920        self.cost += self.cost_model.load_indirect_i16;
921    }
922
923    #[inline(always)]
924    fn load_indirect_u32(&mut self, _dst: RawReg, _base: RawReg, _offset: u32) -> Self::ReturnTy {
925        self.cost += self.cost_model.load_indirect_u32;
926    }
927
928    #[inline(always)]
929    fn load_indirect_i32(&mut self, _dst: RawReg, _base: RawReg, _offset: u32) -> Self::ReturnTy {
930        self.cost += self.cost_model.load_indirect_i32;
931    }
932
933    #[inline(always)]
934    fn load_indirect_u64(&mut self, _dst: RawReg, _base: RawReg, _offset: u32) -> Self::ReturnTy {
935        self.cost += self.cost_model.load_indirect_u64;
936    }
937
938    #[inline(always)]
939    fn load_u8(&mut self, _dst: RawReg, _offset: u32) -> Self::ReturnTy {
940        self.cost += self.cost_model.load_u8;
941    }
942
943    #[inline(always)]
944    fn load_i8(&mut self, _dst: RawReg, _offset: u32) -> Self::ReturnTy {
945        self.cost += self.cost_model.load_i8;
946    }
947
948    #[inline(always)]
949    fn load_u16(&mut self, _dst: RawReg, _offset: u32) -> Self::ReturnTy {
950        self.cost += self.cost_model.load_u16;
951    }
952
953    #[inline(always)]
954    fn load_i16(&mut self, _dst: RawReg, _offset: u32) -> Self::ReturnTy {
955        self.cost += self.cost_model.load_i16;
956    }
957
958    #[inline(always)]
959    fn load_u32(&mut self, _dst: RawReg, _offset: u32) -> Self::ReturnTy {
960        self.cost += self.cost_model.load_u32;
961    }
962
963    #[inline(always)]
964    fn load_i32(&mut self, _dst: RawReg, _offset: u32) -> Self::ReturnTy {
965        self.cost += self.cost_model.load_i32;
966    }
967
968    #[inline(always)]
969    fn load_u64(&mut self, _dst: RawReg, _offset: u32) -> Self::ReturnTy {
970        self.cost += self.cost_model.load_u64;
971    }
972
973    #[inline(always)]
974    fn branch_less_unsigned(&mut self, _s1: RawReg, _s2: RawReg, _imm: u32) -> Self::ReturnTy {
975        self.cost += self.cost_model.branch_less_unsigned;
976        self.start_new_basic_block();
977    }
978
979    #[inline(always)]
980    fn branch_less_signed(&mut self, _s1: RawReg, _s2: RawReg, _imm: u32) -> Self::ReturnTy {
981        self.cost += self.cost_model.branch_less_signed;
982        self.start_new_basic_block();
983    }
984
985    #[inline(always)]
986    fn branch_greater_or_equal_unsigned(&mut self, _s1: RawReg, _s2: RawReg, _imm: u32) -> Self::ReturnTy {
987        self.cost += self.cost_model.branch_greater_or_equal_unsigned;
988        self.start_new_basic_block();
989    }
990
991    #[inline(always)]
992    fn branch_greater_or_equal_signed(&mut self, _s1: RawReg, _s2: RawReg, _imm: u32) -> Self::ReturnTy {
993        self.cost += self.cost_model.branch_greater_or_equal_signed;
994        self.start_new_basic_block();
995    }
996
997    #[inline(always)]
998    fn branch_eq(&mut self, _s1: RawReg, _s2: RawReg, _imm: u32) -> Self::ReturnTy {
999        self.cost += self.cost_model.branch_eq;
1000        self.start_new_basic_block();
1001    }
1002
1003    #[inline(always)]
1004    fn branch_not_eq(&mut self, _s1: RawReg, _s2: RawReg, _imm: u32) -> Self::ReturnTy {
1005        self.cost += self.cost_model.branch_not_eq;
1006        self.start_new_basic_block();
1007    }
1008
1009    #[inline(always)]
1010    fn branch_eq_imm(&mut self, _s1: RawReg, _s2: u32, _imm: u32) -> Self::ReturnTy {
1011        self.cost += self.cost_model.branch_eq_imm;
1012        self.start_new_basic_block();
1013    }
1014
1015    #[inline(always)]
1016    fn branch_not_eq_imm(&mut self, _s1: RawReg, _s2: u32, _imm: u32) -> Self::ReturnTy {
1017        self.cost += self.cost_model.branch_not_eq_imm;
1018        self.start_new_basic_block();
1019    }
1020
1021    #[inline(always)]
1022    fn branch_less_unsigned_imm(&mut self, _s1: RawReg, _s2: u32, _imm: u32) -> Self::ReturnTy {
1023        self.cost += self.cost_model.branch_less_unsigned_imm;
1024        self.start_new_basic_block();
1025    }
1026
1027    #[inline(always)]
1028    fn branch_less_signed_imm(&mut self, _s1: RawReg, _s2: u32, _imm: u32) -> Self::ReturnTy {
1029        self.cost += self.cost_model.branch_less_signed_imm;
1030        self.start_new_basic_block();
1031    }
1032
1033    #[inline(always)]
1034    fn branch_greater_or_equal_unsigned_imm(&mut self, _s1: RawReg, _s2: u32, _imm: u32) -> Self::ReturnTy {
1035        self.cost += self.cost_model.branch_greater_or_equal_unsigned_imm;
1036        self.start_new_basic_block();
1037    }
1038
1039    #[inline(always)]
1040    fn branch_greater_or_equal_signed_imm(&mut self, _s1: RawReg, _s2: u32, _imm: u32) -> Self::ReturnTy {
1041        self.cost += self.cost_model.branch_greater_or_equal_signed_imm;
1042        self.start_new_basic_block();
1043    }
1044
1045    #[inline(always)]
1046    fn branch_less_or_equal_unsigned_imm(&mut self, _s1: RawReg, _s2: u32, _imm: u32) -> Self::ReturnTy {
1047        self.cost += self.cost_model.branch_less_or_equal_unsigned_imm;
1048        self.start_new_basic_block();
1049    }
1050
1051    #[inline(always)]
1052    fn branch_less_or_equal_signed_imm(&mut self, _s1: RawReg, _s2: u32, _imm: u32) -> Self::ReturnTy {
1053        self.cost += self.cost_model.branch_less_or_equal_signed_imm;
1054        self.start_new_basic_block();
1055    }
1056
1057    #[inline(always)]
1058    fn branch_greater_unsigned_imm(&mut self, _s1: RawReg, _s2: u32, _imm: u32) -> Self::ReturnTy {
1059        self.cost += self.cost_model.branch_greater_unsigned_imm;
1060        self.start_new_basic_block();
1061    }
1062
1063    #[inline(always)]
1064    fn branch_greater_signed_imm(&mut self, _s1: RawReg, _s2: u32, _imm: u32) -> Self::ReturnTy {
1065        self.cost += self.cost_model.branch_greater_signed_imm;
1066        self.start_new_basic_block();
1067    }
1068
1069    #[inline(always)]
1070    fn load_imm(&mut self, _dst: RawReg, _value: u32) -> Self::ReturnTy {
1071        self.cost += self.cost_model.load_imm;
1072    }
1073
1074    #[inline(always)]
1075    fn load_imm64(&mut self, _dst: RawReg, _value: u64) -> Self::ReturnTy {
1076        self.cost += self.cost_model.load_imm64;
1077    }
1078
1079    #[inline(always)]
1080    fn load_imm_and_jump(&mut self, _ra: RawReg, _value: u32, _target: u32) -> Self::ReturnTy {
1081        self.cost += self.cost_model.load_imm_and_jump;
1082        self.start_new_basic_block();
1083    }
1084
1085    #[inline(always)]
1086    fn load_imm_and_jump_indirect(&mut self, _ra: RawReg, _base: RawReg, _value: u32, _offset: u32) -> Self::ReturnTy {
1087        self.cost += self.cost_model.load_imm_and_jump_indirect;
1088        self.start_new_basic_block();
1089    }
1090
1091    #[inline(always)]
1092    fn jump(&mut self, _target: u32) -> Self::ReturnTy {
1093        self.cost += self.cost_model.jump;
1094        self.start_new_basic_block();
1095    }
1096
1097    #[inline(always)]
1098    fn jump_indirect(&mut self, _base: RawReg, _offset: u32) -> Self::ReturnTy {
1099        self.cost += self.cost_model.jump_indirect;
1100        self.start_new_basic_block();
1101    }
1102}
1103
1104pub fn calculate_for_block<G, I>(mut visitor: G, mut instructions: Instructions<I>) -> (u32, bool)
1105where
1106    G: GasVisitorT,
1107    I: InstructionSet,
1108{
1109    debug_assert!(visitor.is_at_start_of_basic_block());
1110    while instructions.visit_parsing(&mut visitor).is_some() {
1111        if let Some(cost) = visitor.take_block_cost() {
1112            return (cost, false);
1113        }
1114    }
1115
1116    if let Some(cost) = visitor.take_block_cost() {
1117        (cost, false)
1118    } else {
1119        let started_out_of_bounds = visitor.is_at_start_of_basic_block();
1120
1121        // We've ended out of bounds, so assume there's an implicit trap there.
1122        visitor.trap(0, 0); // TODO: Currently it doesn't matter, but pass correct offsets.
1123        (visitor.take_block_cost().unwrap(), started_out_of_bounds)
1124    }
1125}
1126
1127pub fn trap_cost<G>(mut gas_visitor: G) -> u32
1128where
1129    G: GasVisitorT,
1130{
1131    debug_assert!(gas_visitor.is_at_start_of_basic_block());
1132    gas_visitor.trap(0, 0); // TODO: Currently it doesn't matter, but pass correct offsets.
1133    gas_visitor.take_block_cost().unwrap()
1134}