frame_benchmarking_cli/machine/
mod.rs1pub mod hardware;
22
23use std::{boxed::Box, fs, path::Path};
24
25use clap::Parser;
26use comfy_table::{Row, Table};
27use log::{error, info, warn};
28
29use sc_cli::{CliConfiguration, Result, SharedParams};
30use sc_service::Configuration;
31use sc_sysinfo::{
32 benchmark_cpu, benchmark_cpu_parallelism, benchmark_disk_random_writes,
33 benchmark_disk_sequential_writes, benchmark_memory, benchmark_sr25519_verify, ExecutionLimit,
34 Metric, Requirement, Requirements, Throughput,
35};
36
37use crate::shared::check_build_profile;
38pub use hardware::SUBSTRATE_REFERENCE_HARDWARE;
39
40#[derive(Debug, Parser)]
49pub struct MachineCmd {
50 #[allow(missing_docs)]
51 #[clap(flatten)]
52 pub shared_params: SharedParams,
53
54 #[arg(long)]
58 pub allow_fail: bool,
59
60 #[arg(long, default_value_t = 10.0, value_name = "PERCENT")]
65 pub tolerance: f64,
66
67 #[arg(long, default_value_t = 5.0, value_name = "SECONDS")]
69 pub verify_duration: f32,
70
71 #[arg(long, default_value_t = 5.0, value_name = "SECONDS")]
73 pub hash_duration: f32,
74
75 #[arg(long, default_value_t = 5.0, value_name = "SECONDS")]
77 pub memory_duration: f32,
78
79 #[arg(long, default_value_t = 5.0, value_name = "SECONDS")]
81 pub disk_duration: f32,
82}
83
84struct BenchResult {
86 passed: bool,
88
89 score: Throughput,
91
92 rel_score: f64,
96}
97
98#[derive(Debug, thiserror::Error)]
100#[allow(missing_docs)]
101pub enum Error {
102 #[error("One of the benchmarks had a score that was lower than its requirement")]
103 UnmetRequirement,
104
105 #[error("The build profile is unfit for benchmarking: {0}")]
106 BadBuildProfile(String),
107
108 #[error("Benchmark results are off by at least factor 100")]
109 BadResults,
110}
111
112impl MachineCmd {
113 pub fn run(&self, cfg: &Configuration, requirements: Requirements) -> Result<()> {
115 self.validate_args()?;
116 let dir = cfg.database.path().ok_or("No DB directory provided")?;
118 fs::create_dir_all(dir)?;
119
120 info!("Running machine benchmarks...");
121 let mut results = Vec::new();
122 for requirement in &requirements.0 {
123 let result = self.run_benchmark(requirement, &dir)?;
124 results.push(result);
125 }
126 self.print_summary(requirements, results)
127 }
128
129 fn run_benchmark(&self, requirement: &Requirement, dir: &Path) -> Result<BenchResult> {
131 let score = self.measure(&requirement.metric, dir)?;
134 let rel_score = score.as_bytes() / requirement.minimum.as_bytes();
135
136 if rel_score >= 100.0 || rel_score <= 0.01 {
138 self.check_failed(Error::BadResults)?;
139 }
140 let passed = rel_score >= (1.0 - (self.tolerance / 100.0));
141 Ok(BenchResult { passed, score, rel_score })
142 }
143
144 fn measure(&self, metric: &Metric, dir: &Path) -> Result<Throughput> {
146 let verify_limit = ExecutionLimit::from_secs_f32(self.verify_duration);
147 let disk_limit = ExecutionLimit::from_secs_f32(self.disk_duration);
148 let hash_limit = ExecutionLimit::from_secs_f32(self.hash_duration);
149 let memory_limit = ExecutionLimit::from_secs_f32(self.memory_duration);
150
151 let score = match metric {
152 Metric::Blake2256 => benchmark_cpu(hash_limit),
153 Metric::Blake2256Parallel { num_cores } =>
154 benchmark_cpu_parallelism(hash_limit, *num_cores),
155 Metric::Sr25519Verify => benchmark_sr25519_verify(verify_limit),
156 Metric::MemCopy => benchmark_memory(memory_limit),
157 Metric::DiskSeqWrite => benchmark_disk_sequential_writes(disk_limit, dir)?,
158 Metric::DiskRndWrite => benchmark_disk_random_writes(disk_limit, dir)?,
159 };
160 Ok(score)
161 }
162
163 fn print_summary(&self, requirements: Requirements, results: Vec<BenchResult>) -> Result<()> {
165 let mut table = Table::new();
167 table.set_header(["Category", "Function", "Score", "Minimum", "Result"]);
168 let (mut passed, mut failed) = (0, 0);
170 for (requirement, result) in requirements.0.iter().zip(results.iter()) {
171 if result.passed {
172 passed += 1
173 } else {
174 failed += 1
175 }
176
177 table.add_row(result.to_row(requirement));
178 }
179 info!(
181 "\n{}\nFrom {} benchmarks in total, {} passed and {} failed ({:.0?}% fault tolerance).",
182 table,
183 passed + failed,
184 passed,
185 failed,
186 self.tolerance
187 );
188 if failed != 0 {
190 info!("The hardware fails to meet the requirements");
191 self.check_failed(Error::UnmetRequirement)?;
192 } else {
193 info!("The hardware meets the requirements ");
194 }
195 if let Err(err) = check_build_profile() {
197 self.check_failed(Error::BadBuildProfile(err))?;
198 }
199 Ok(())
200 }
201
202 fn check_failed(&self, e: Error) -> Result<()> {
204 if !self.allow_fail {
205 error!("Failing since --allow-fail is not set");
206 Err(sc_cli::Error::Application(Box::new(e)))
207 } else {
208 warn!("Ignoring error since --allow-fail is set: {:?}", e);
209 Ok(())
210 }
211 }
212
213 fn validate_args(&self) -> Result<()> {
215 if self.tolerance > 100.0 || self.tolerance < 0.0 {
216 return Err("The --tolerance argument is out of range".into())
217 }
218 Ok(())
219 }
220}
221
222impl BenchResult {
223 fn to_row(&self, req: &Requirement) -> Row {
225 let passed = if self.passed { "✅ Pass" } else { "❌ Fail" };
226 vec![
227 req.metric.category().into(),
228 req.metric.name().into(),
229 format!("{}", self.score),
230 format!("{}", req.minimum),
231 format!("{} ({: >5.1?} %)", passed, self.rel_score * 100.0),
232 ]
233 .into()
234 }
235}
236
237impl CliConfiguration for MachineCmd {
239 fn shared_params(&self) -> &SharedParams {
240 &self.shared_params
241 }
242}