use std::{
collections::{HashMap, HashSet},
fs,
path::PathBuf,
};
use inflector::Inflector;
use itertools::Itertools;
use serde::Serialize;
use crate::{
pallet::{
command::{PovEstimationMode, PovModesMap},
types::{ComponentRange, ComponentRangeMap},
},
shared::UnderscoreHelper,
PalletCmd,
};
use frame_benchmarking::{
Analysis, AnalysisChoice, BenchmarkBatchSplitResults, BenchmarkResult, BenchmarkSelector,
};
use frame_support::traits::StorageInfo;
use sp_core::hexdisplay::HexDisplay;
use sp_runtime::traits::Zero;
const VERSION: &str = env!("CARGO_PKG_VERSION");
const TEMPLATE: &str = include_str!("./template.hbs");
#[derive(Serialize, Default, Debug, Clone)]
struct TemplateData {
args: Vec<String>,
date: String,
hostname: String,
cpuname: String,
version: String,
pallet: String,
instance: String,
header: String,
cmd: CmdData,
benchmarks: Vec<BenchmarkData>,
}
#[derive(Serialize, Default, Debug, Clone, PartialEq)]
struct BenchmarkData {
name: String,
components: Vec<Component>,
#[serde(serialize_with = "string_serialize")]
base_weight: u128,
#[serde(serialize_with = "string_serialize")]
base_reads: u128,
#[serde(serialize_with = "string_serialize")]
base_writes: u128,
#[serde(serialize_with = "string_serialize")]
base_calculated_proof_size: u128,
#[serde(serialize_with = "string_serialize")]
base_recorded_proof_size: u128,
component_weight: Vec<ComponentSlope>,
component_reads: Vec<ComponentSlope>,
component_writes: Vec<ComponentSlope>,
component_calculated_proof_size: Vec<ComponentSlope>,
component_recorded_proof_size: Vec<ComponentSlope>,
component_ranges: Vec<ComponentRange>,
comments: Vec<String>,
#[serde(serialize_with = "string_serialize")]
min_execution_time: u128,
}
#[derive(Serialize, Default, Debug, Clone)]
struct CmdData {
steps: u32,
repeat: u32,
lowest_range_values: Vec<u32>,
highest_range_values: Vec<u32>,
wasm_execution: String,
chain: String,
db_cache: u32,
analysis_choice: String,
worst_case_map_values: u32,
additional_trie_layers: u8,
}
#[derive(Serialize, Debug, Clone, Eq, PartialEq)]
struct Component {
name: String,
is_used: bool,
}
#[derive(Serialize, Debug, Clone, Eq, PartialEq)]
struct ComponentSlope {
name: String,
#[serde(serialize_with = "string_serialize")]
slope: u128,
#[serde(serialize_with = "string_serialize")]
error: u128,
}
fn io_error(s: &str) -> std::io::Error {
use std::io::{Error, ErrorKind};
Error::new(ErrorKind::Other, s)
}
fn map_results(
batches: &[BenchmarkBatchSplitResults],
storage_info: &[StorageInfo],
component_ranges: &ComponentRangeMap,
pov_modes: PovModesMap,
default_pov_mode: PovEstimationMode,
analysis_choice: &AnalysisChoice,
pov_analysis_choice: &AnalysisChoice,
worst_case_map_values: u32,
additional_trie_layers: u8,
) -> Result<HashMap<(String, String), Vec<BenchmarkData>>, std::io::Error> {
if batches.is_empty() {
return Err(io_error("empty batches"))
}
let mut all_benchmarks = HashMap::<_, Vec<BenchmarkData>>::new();
for batch in batches {
if batch.time_results.is_empty() {
continue
}
let pallet_name = String::from_utf8(batch.pallet.clone()).unwrap();
let instance_name = String::from_utf8(batch.instance.clone()).unwrap();
let benchmark_data = get_benchmark_data(
batch,
storage_info,
&component_ranges,
pov_modes.clone(),
default_pov_mode,
analysis_choice,
pov_analysis_choice,
worst_case_map_values,
additional_trie_layers,
);
let pallet_benchmarks = all_benchmarks.entry((pallet_name, instance_name)).or_default();
pallet_benchmarks.push(benchmark_data);
}
Ok(all_benchmarks)
}
fn extract_errors(errors: &Option<Vec<u128>>) -> impl Iterator<Item = u128> + '_ {
errors
.as_ref()
.map(|e| e.as_slice())
.unwrap_or(&[])
.iter()
.copied()
.chain(std::iter::repeat(0))
}
fn get_benchmark_data(
batch: &BenchmarkBatchSplitResults,
storage_info: &[StorageInfo],
component_ranges: &ComponentRangeMap,
pov_modes: PovModesMap,
default_pov_mode: PovEstimationMode,
analysis_choice: &AnalysisChoice,
pov_analysis_choice: &AnalysisChoice,
worst_case_map_values: u32,
additional_trie_layers: u8,
) -> BenchmarkData {
let analysis_function = match analysis_choice {
AnalysisChoice::MinSquares => Analysis::min_squares_iqr,
AnalysisChoice::MedianSlopes => Analysis::median_slopes,
AnalysisChoice::Max => Analysis::max,
};
let pov_analysis_function = match pov_analysis_choice {
AnalysisChoice::MinSquares => Analysis::min_squares_iqr,
AnalysisChoice::MedianSlopes => Analysis::median_slopes,
AnalysisChoice::Max => Analysis::max,
};
let pallet = String::from_utf8(batch.pallet.clone()).unwrap();
let benchmark = String::from_utf8(batch.benchmark.clone()).unwrap();
let extrinsic_time = analysis_function(&batch.time_results, BenchmarkSelector::ExtrinsicTime)
.expect("analysis function should return an extrinsic time for valid inputs");
let reads = analysis_function(&batch.db_results, BenchmarkSelector::Reads)
.expect("analysis function should return the number of reads for valid inputs");
let writes = analysis_function(&batch.db_results, BenchmarkSelector::Writes)
.expect("analysis function should return the number of writes for valid inputs");
let recorded_proof_size =
pov_analysis_function(&batch.db_results, BenchmarkSelector::ProofSize)
.expect("analysis function should return proof sizes for valid inputs");
let mut used_components = Vec::new();
let mut used_extrinsic_time = Vec::new();
let mut used_reads = Vec::new();
let mut used_writes = Vec::new();
let mut used_calculated_proof_size = Vec::<ComponentSlope>::new();
let mut used_recorded_proof_size = Vec::<ComponentSlope>::new();
extrinsic_time
.slopes
.into_iter()
.zip(extrinsic_time.names.iter())
.zip(extract_errors(&extrinsic_time.errors))
.for_each(|((slope, name), error)| {
if !slope.is_zero() {
if !used_components.contains(&name) {
used_components.push(name);
}
used_extrinsic_time.push(ComponentSlope { name: name.clone(), slope, error });
}
});
reads
.slopes
.into_iter()
.zip(reads.names.iter())
.zip(extract_errors(&reads.errors))
.for_each(|((slope, name), error)| {
if !slope.is_zero() {
if !used_components.contains(&name) {
used_components.push(name);
}
used_reads.push(ComponentSlope { name: name.clone(), slope, error });
}
});
writes
.slopes
.into_iter()
.zip(writes.names.iter())
.zip(extract_errors(&writes.errors))
.for_each(|((slope, name), error)| {
if !slope.is_zero() {
if !used_components.contains(&name) {
used_components.push(name);
}
used_writes.push(ComponentSlope { name: name.clone(), slope, error });
}
});
recorded_proof_size
.slopes
.into_iter()
.zip(recorded_proof_size.names.iter())
.zip(extract_errors(&recorded_proof_size.errors))
.for_each(|((slope, name), error)| {
if !slope.is_zero() {
used_recorded_proof_size.push(ComponentSlope { name: name.clone(), slope, error });
}
});
used_recorded_proof_size.sort_by(|a, b| a.name.cmp(&b.name));
let mut storage_per_prefix = HashMap::<Vec<u8>, Vec<BenchmarkResult>>::new();
let pov_mode = pov_modes.get(&(pallet.clone(), benchmark.clone())).cloned().unwrap_or_default();
let comments = process_storage_results(
&mut storage_per_prefix,
&batch.db_results,
storage_info,
&pov_mode,
default_pov_mode,
worst_case_map_values,
additional_trie_layers,
);
let proof_size_per_components = storage_per_prefix
.iter()
.map(|(prefix, results)| {
let proof_size = analysis_function(results, BenchmarkSelector::ProofSize)
.expect("analysis function should return proof sizes for valid inputs");
let slope = proof_size
.slopes
.into_iter()
.zip(proof_size.names.iter())
.zip(extract_errors(&proof_size.errors))
.map(|((slope, name), error)| ComponentSlope { name: name.clone(), slope, error })
.collect::<Vec<_>>();
(prefix.clone(), slope, proof_size.base)
})
.collect::<Vec<_>>();
let mut base_calculated_proof_size = 0;
for (_, slope, base) in proof_size_per_components.iter() {
base_calculated_proof_size = base_calculated_proof_size.max(*base);
for component in slope.iter() {
let mut found = false;
for used_component in used_calculated_proof_size.iter_mut() {
if used_component.name == component.name {
used_component.slope = used_component.slope.max(component.slope);
found = true;
break
}
}
if !found && !component.slope.is_zero() {
if !used_components.contains(&&component.name) {
used_components.push(&component.name);
}
used_calculated_proof_size.push(ComponentSlope {
name: component.name.clone(),
slope: component.slope,
error: component.error,
});
}
}
}
used_calculated_proof_size.sort_by(|a, b| a.name.cmp(&b.name));
let components = batch.time_results[0]
.components
.iter()
.map(|(name, _)| -> Component {
let name_string = name.to_string();
let is_used = used_components.contains(&&name_string);
Component { name: name_string, is_used }
})
.collect::<Vec<_>>();
let component_ranges = component_ranges
.get(&(pallet.clone(), benchmark.clone()))
.map(|c| c.clone())
.unwrap_or_default();
BenchmarkData {
name: benchmark,
components,
base_weight: extrinsic_time.base,
base_reads: reads.base,
base_writes: writes.base,
base_calculated_proof_size,
base_recorded_proof_size: recorded_proof_size.base,
component_weight: used_extrinsic_time,
component_reads: used_reads,
component_writes: used_writes,
component_calculated_proof_size: used_calculated_proof_size,
component_recorded_proof_size: used_recorded_proof_size,
component_ranges,
comments,
min_execution_time: extrinsic_time.minimum,
}
}
pub(crate) fn write_results(
batches: &[BenchmarkBatchSplitResults],
storage_info: &[StorageInfo],
component_ranges: &HashMap<(String, String), Vec<ComponentRange>>,
pov_modes: PovModesMap,
default_pov_mode: PovEstimationMode,
path: &PathBuf,
cmd: &PalletCmd,
) -> Result<(), sc_cli::Error> {
let template: String = match &cmd.template {
Some(template_file) => fs::read_to_string(template_file)?,
None => TEMPLATE.to_string(),
};
let header_text = match &cmd.header {
Some(header_file) => {
let text = fs::read_to_string(header_file)?;
text
},
None => String::new(),
};
let date = chrono::Utc::now().format("%Y-%m-%d").to_string();
let args = std::env::args().collect::<Vec<String>>();
let analysis_choice: AnalysisChoice =
cmd.output_analysis.clone().try_into().map_err(io_error)?;
let pov_analysis_choice: AnalysisChoice =
cmd.output_pov_analysis.clone().try_into().map_err(io_error)?;
if cmd.additional_trie_layers > 4 {
println!(
"WARNING: `additional_trie_layers` is unexpectedly large. It assumes {} storage items.",
16f64.powi(cmd.additional_trie_layers as i32)
)
}
let cmd_data = CmdData {
steps: cmd.steps,
repeat: cmd.repeat,
lowest_range_values: cmd.lowest_range_values.clone(),
highest_range_values: cmd.highest_range_values.clone(),
wasm_execution: cmd.wasm_method.to_string(),
chain: format!("{:?}", cmd.shared_params.chain),
db_cache: cmd.database_cache_size,
analysis_choice: format!("{:?}", analysis_choice),
worst_case_map_values: cmd.worst_case_map_values,
additional_trie_layers: cmd.additional_trie_layers,
};
let mut handlebars = handlebars::Handlebars::new();
handlebars.register_helper("underscore", Box::new(UnderscoreHelper));
handlebars.register_helper("join", Box::new(JoinHelper));
handlebars.register_escape_fn(|s| -> String { s.to_string() });
let all_results = map_results(
batches,
storage_info,
component_ranges,
pov_modes,
default_pov_mode,
&analysis_choice,
&pov_analysis_choice,
cmd.worst_case_map_values,
cmd.additional_trie_layers,
)?;
let mut created_files = Vec::new();
for ((pallet, instance), results) in all_results.iter() {
let mut file_path = path.clone();
if file_path.is_dir() {
let mut file_name = pallet.clone();
if all_results.keys().any(|(p, i)| p == pallet && i != instance) {
file_name = format!("{}_{}", file_name, instance.to_snake_case());
}
file_path.push(file_name.replace("::", "_"));
file_path.set_extension("rs");
}
let hbs_data = TemplateData {
args: args.clone(),
date: date.clone(),
hostname: cmd.hostinfo_params.hostname(),
cpuname: cmd.hostinfo_params.cpuname(),
version: VERSION.to_string(),
pallet: pallet.to_string(),
instance: instance.to_string(),
header: header_text.clone(),
cmd: cmd_data.clone(),
benchmarks: results.clone(),
};
let mut output_file = fs::File::create(&file_path).map_err(|e| {
format!("Could not write weight file to: {:?}. Error: {:?}", &file_path, e)
})?;
handlebars
.render_template_to_write(&template, &hbs_data, &mut output_file)
.map_err(|e| io_error(&e.to_string()))?;
println!("Created file: {:?}", &file_path);
created_files.push(file_path);
}
let overwritten_files = created_files.iter().duplicates().collect::<Vec<_>>();
if !overwritten_files.is_empty() {
let msg = format!(
"Multiple results were written to the same file. This can happen when \
there are multiple instances of a pallet deployed and `--output` forces the output of all \
instances into the same file. Use `--unsafe-overwrite-results` to ignore this error. The \
affected files are: {:?}",
overwritten_files
);
if cmd.unsafe_overwrite_results {
println!("{msg}");
} else {
return Err(msg.into())
}
}
Ok(())
}
pub(crate) fn process_storage_results(
storage_per_prefix: &mut HashMap<Vec<u8>, Vec<BenchmarkResult>>,
results: &[BenchmarkResult],
storage_info: &[StorageInfo],
pov_modes: &HashMap<(String, String), PovEstimationMode>,
default_pov_mode: PovEstimationMode,
worst_case_map_values: u32,
additional_trie_layers: u8,
) -> Vec<String> {
let mut comments = Vec::new();
let mut storage_info_map = storage_info
.iter()
.map(|info| (info.prefix.clone(), info))
.collect::<HashMap<_, _>>();
let skip_storage_info = StorageInfo {
pallet_name: b"Skipped".to_vec(),
storage_name: b"Metadata".to_vec(),
prefix: b"Skipped Metadata".to_vec(),
max_values: None,
max_size: None,
};
storage_info_map.insert(skip_storage_info.prefix.clone(), &skip_storage_info);
let benchmark_override = StorageInfo {
pallet_name: b"Benchmark".to_vec(),
storage_name: b"Override".to_vec(),
prefix: b"Benchmark Override".to_vec(),
max_values: None,
max_size: None,
};
storage_info_map.insert(benchmark_override.prefix.clone(), &benchmark_override);
let mut identified_prefix = HashSet::<Vec<u8>>::new();
let mut identified_key = HashSet::<Vec<u8>>::new();
for result in results.iter().rev() {
for (key, reads, writes, whitelisted) in &result.keys {
if *whitelisted {
continue
}
let prefix_length = key.len().min(32);
let prefix = key[0..prefix_length].to_vec();
let is_key_identified = identified_key.contains(key);
let is_prefix_identified = identified_prefix.contains(&prefix);
let mut prefix_result = result.clone();
let key_info = storage_info_map.get(&prefix);
let pallet_name = match key_info {
Some(k) => String::from_utf8(k.pallet_name.clone()).expect("encoded from string"),
None => "".to_string(),
};
let storage_name = match key_info {
Some(k) => String::from_utf8(k.storage_name.clone()).expect("encoded from string"),
None => "".to_string(),
};
let max_size = key_info.and_then(|k| k.max_size);
let override_pov_mode = match key_info {
Some(_) => {
pov_modes.get(&(pallet_name.clone(), storage_name.clone())).or(
pov_modes.get(&(pallet_name.clone(), "ALL".to_string())).or(
pov_modes.get(&("ALL".to_string(), "ALL".to_string())),
),
)
},
None => None,
};
let is_all_ignored = pov_modes.get(&("ALL".to_string(), "ALL".to_string())) ==
Some(&PovEstimationMode::Ignored);
if is_all_ignored && override_pov_mode != Some(&PovEstimationMode::Ignored) {
panic!("The syntax currently does not allow to exclude single keys from a top-level `Ignored` pov-mode.");
}
let pov_overhead = single_read_pov_overhead(
key_info.and_then(|i| i.max_values),
worst_case_map_values,
);
let used_pov_mode = match (override_pov_mode, max_size, default_pov_mode) {
(None, _, PovEstimationMode::Ignored) => {
prefix_result.proof_size = 0;
PovEstimationMode::Ignored
},
(Some(PovEstimationMode::Ignored), _, _) => {
if is_all_ignored {
prefix_result.proof_size = 0;
} else {
}
PovEstimationMode::Ignored
},
(Some(PovEstimationMode::Measured), _, _)|
(None, _, PovEstimationMode::Measured) |
(None, None, PovEstimationMode::MaxEncodedLen) => {
prefix_result.proof_size += pov_overhead * *reads;
PovEstimationMode::Measured
},
(Some(PovEstimationMode::MaxEncodedLen), Some(max_size), _) |
(None, Some(max_size), PovEstimationMode::MaxEncodedLen) => {
prefix_result.proof_size = (pov_overhead + max_size) * *reads;
PovEstimationMode::MaxEncodedLen
},
(Some(PovEstimationMode::MaxEncodedLen), None, _) => {
panic!("Key does not have MEL bound but MEL PoV estimation mode was specified {:?}", &key);
},
};
if *reads > 0 && !is_all_ignored {
prefix_result.proof_size += 15 * 33 * additional_trie_layers as u32;
}
storage_per_prefix.entry(prefix.clone()).or_default().push(prefix_result);
match (is_key_identified, is_prefix_identified) {
(true, true) => continue,
(false, true) => {
identified_key.insert(key.clone());
},
(false, false) => {
identified_key.insert(key.clone());
identified_prefix.insert(prefix.clone());
},
(true, false) => unreachable!(),
}
if !is_prefix_identified {
match key_info {
Some(_) => {
let comment = format!(
"Storage: `{}::{}` (r:{} w:{})",
pallet_name, storage_name, reads, writes,
);
comments.push(comment)
},
None => {
let comment = format!(
"Storage: UNKNOWN KEY `0x{}` (r:{} w:{})",
HexDisplay::from(key),
reads,
writes,
);
comments.push(comment)
},
}
}
if !is_key_identified {
match key_info {
Some(key_info) => {
match worst_case_pov(
key_info.max_values,
key_info.max_size,
!is_prefix_identified,
worst_case_map_values,
) {
Some(new_pov) => {
let comment = format!(
"Proof: `{pallet_name}::{storage_name}` (`max_values`: {:?}, `max_size`: {:?}, added: {}, mode: `{:?}`)",
key_info.max_values,
key_info.max_size,
new_pov,
used_pov_mode,
);
comments.push(comment)
},
None => {
let comment = format!(
"Proof: `{}::{}` (`max_values`: {:?}, `max_size`: {:?}, mode: `{:?}`)",
pallet_name, storage_name, key_info.max_values, key_info.max_size,
used_pov_mode,
);
comments.push(comment);
},
}
},
None => {
let comment = format!(
"Proof: UNKNOWN KEY `0x{}` (r:{} w:{})",
HexDisplay::from(key),
reads,
writes,
);
comments.push(comment)
},
}
}
}
}
comments
}
fn single_read_pov_overhead(max_values: Option<u32>, worst_case_map_values: u32) -> u32 {
let max_values = max_values.unwrap_or(worst_case_map_values);
let depth: u32 = easy_log_16(max_values);
depth * 15 * 33
}
fn worst_case_pov(
max_values: Option<u32>,
max_size: Option<u32>,
is_new_prefix: bool,
worst_case_map_values: u32,
) -> Option<u32> {
if let Some(max_size) = max_size {
let trie_size: u32 = if is_new_prefix {
single_read_pov_overhead(max_values, worst_case_map_values)
} else {
0
};
Some(trie_size + max_size)
} else {
None
}
}
fn easy_log_16(i: u32) -> u32 {
#[allow(clippy::redundant_guards)]
match i {
i if i == 0 => 0,
i if i <= 16 => 1,
i if i <= 256 => 2,
i if i <= 4_096 => 3,
i if i <= 65_536 => 4,
i if i <= 1_048_576 => 5,
i if i <= 16_777_216 => 6,
i if i <= 268_435_456 => 7,
_ => 8,
}
}
#[derive(Clone, Copy)]
struct JoinHelper;
impl handlebars::HelperDef for JoinHelper {
fn call<'reg: 'rc, 'rc>(
&self,
h: &handlebars::Helper,
_: &handlebars::Handlebars,
_: &handlebars::Context,
_rc: &mut handlebars::RenderContext,
out: &mut dyn handlebars::Output,
) -> handlebars::HelperResult {
use handlebars::JsonRender;
let param = h.param(0).unwrap();
let value = param.value();
let joined = if value.is_array() {
value
.as_array()
.unwrap()
.iter()
.map(|v| v.render())
.collect::<Vec<String>>()
.join(" ")
} else {
value.render()
};
out.write(&joined)?;
Ok(())
}
}
fn string_serialize<S>(x: &u128, s: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
s.serialize_str(&x.to_string())
}
#[cfg(test)]
mod test {
use super::*;
use frame_benchmarking::{BenchmarkBatchSplitResults, BenchmarkParameter, BenchmarkResult};
fn test_data(
pallet: &[u8],
benchmark: &[u8],
param: BenchmarkParameter,
base: u32,
slope: u32,
) -> BenchmarkBatchSplitResults {
let mut results = Vec::new();
for i in 0..5 {
results.push(BenchmarkResult {
components: vec![(param, i)],
extrinsic_time: (base + slope * i).into(),
storage_root_time: (base + slope * i).into(),
reads: (base + slope * i).into(),
repeat_reads: 0,
writes: (base + slope * i).into(),
repeat_writes: 0,
proof_size: (i + 1) * 1024,
keys: vec![(b"bounded".to_vec(), (base + slope * i), (base + slope * i), false)],
})
}
return BenchmarkBatchSplitResults {
pallet: [pallet.to_vec(), b"_pallet".to_vec()].concat(),
instance: b"instance".to_vec(),
benchmark: [benchmark.to_vec(), b"_benchmark".to_vec()].concat(),
time_results: results.clone(),
db_results: results,
}
}
fn test_storage_info() -> Vec<StorageInfo> {
vec![StorageInfo {
pallet_name: b"bounded".to_vec(),
storage_name: b"bounded".to_vec(),
prefix: b"bounded".to_vec(),
max_values: Some(1 << 20),
max_size: Some(32),
}]
}
fn test_pov_mode() -> PovModesMap {
let mut map = PovModesMap::new();
map.entry(("scheduler".into(), "first_benchmark".into()))
.or_default()
.insert(("scheduler".into(), "mel".into()), PovEstimationMode::MaxEncodedLen);
map.entry(("scheduler".into(), "first_benchmark".into()))
.or_default()
.insert(("scheduler".into(), "measured".into()), PovEstimationMode::Measured);
map
}
fn check_data(benchmark: &BenchmarkData, component: &str, base: u128, slope: u128) {
assert_eq!(
benchmark.components,
vec![Component { name: component.to_string(), is_used: true },],
);
assert_eq!(benchmark.base_weight, base * 1_000);
assert_eq!(
benchmark.component_weight,
vec![ComponentSlope { name: component.to_string(), slope: slope * 1_000, error: 0 }]
);
assert_eq!(benchmark.base_reads, base);
assert_eq!(
benchmark.component_reads,
vec![ComponentSlope { name: component.to_string(), slope, error: 0 }]
);
assert_eq!(benchmark.base_writes, base);
assert_eq!(
benchmark.component_writes,
vec![ComponentSlope { name: component.to_string(), slope, error: 0 }]
);
assert_eq!(benchmark.base_recorded_proof_size, 1024);
assert_eq!(
benchmark.component_recorded_proof_size,
vec![ComponentSlope { name: component.to_string(), slope: 1024, error: 0 }]
);
}
#[test]
fn pov_mode_mel_constant_works() {
let mut results = Vec::new();
for i in 0..5 {
results.push(BenchmarkResult {
components: vec![(BenchmarkParameter::s, i)],
extrinsic_time: 0,
storage_root_time: 0,
reads: 1,
repeat_reads: 777,
writes: 888,
repeat_writes: 999,
proof_size: i * 1024,
keys: vec![(b"mel".to_vec(), 1, 1, false)],
})
}
let data = BenchmarkBatchSplitResults {
pallet: b"scheduler".to_vec(),
instance: b"instance".to_vec(),
benchmark: b"first_benchmark".to_vec(),
time_results: results.clone(),
db_results: results,
};
let storage_info = vec![StorageInfo {
pallet_name: b"scheduler".to_vec(),
storage_name: b"mel".to_vec(),
prefix: b"mel".to_vec(),
max_values: None,
max_size: Some(1 << 22), }];
let mapped_results = map_results(
&[data],
&storage_info,
&Default::default(),
test_pov_mode(),
PovEstimationMode::MaxEncodedLen,
&AnalysisChoice::default(),
&AnalysisChoice::MedianSlopes,
1_000_000,
0,
)
.unwrap();
let result =
mapped_results.get(&("scheduler".to_string(), "instance".to_string())).unwrap()[0]
.clone();
let base = result.base_calculated_proof_size;
assert!(result.component_calculated_proof_size.is_empty(), "There is no slope");
assert_eq!(base, (1 << 22) + 15 * 33 * 5);
}
#[test]
fn pov_mode_mel_linear_works() {
let mut results = Vec::new();
for i in 0..5 {
results.push(BenchmarkResult {
components: vec![(BenchmarkParameter::s, i)],
extrinsic_time: 0,
storage_root_time: 0,
reads: 123,
repeat_reads: 777,
writes: 888,
repeat_writes: 999,
proof_size: i * 1024,
keys: vec![("mel".as_bytes().to_vec(), i, 1, false)],
})
}
let data = BenchmarkBatchSplitResults {
pallet: b"scheduler".to_vec(),
instance: b"instance".to_vec(),
benchmark: b"first_benchmark".to_vec(),
time_results: results.clone(),
db_results: results,
};
let storage_info = vec![StorageInfo {
pallet_name: b"scheduler".to_vec(),
storage_name: b"mel".to_vec(),
prefix: b"mel".to_vec(),
max_values: None,
max_size: Some(1 << 22), }];
let mapped_results = map_results(
&[data],
&storage_info,
&Default::default(),
test_pov_mode(),
PovEstimationMode::MaxEncodedLen,
&AnalysisChoice::default(),
&AnalysisChoice::MedianSlopes,
1_000_000,
0,
)
.unwrap();
let result =
mapped_results.get(&("scheduler".to_string(), "instance".to_string())).unwrap()[0]
.clone();
let base = result.base_calculated_proof_size;
assert_eq!(result.component_calculated_proof_size.len(), 1, "There is a slope");
let slope = result.component_calculated_proof_size[0].clone().slope;
assert_eq!(base, 0);
assert_eq!(slope, (1 << 22) + 15 * 33 * 5);
}
#[test]
fn pov_mode_measured_const_works() {
let mut results = Vec::new();
for i in 0..5 {
results.push(BenchmarkResult {
components: vec![(BenchmarkParameter::s, i)],
extrinsic_time: 0,
storage_root_time: 0,
reads: 123,
repeat_reads: 777,
writes: 888,
repeat_writes: 999,
proof_size: 1024,
keys: vec![("measured".as_bytes().to_vec(), 1, 1, false)],
})
}
let data = BenchmarkBatchSplitResults {
pallet: b"scheduler".to_vec(),
instance: b"instance".to_vec(),
benchmark: b"first_benchmark".to_vec(),
time_results: results.clone(),
db_results: results,
};
let storage_info = vec![StorageInfo {
pallet_name: b"scheduler".to_vec(),
storage_name: b"measured".to_vec(),
prefix: b"measured".to_vec(),
max_values: None,
max_size: Some(1 << 22), }];
let mapped_results = map_results(
&[data],
&storage_info,
&Default::default(),
test_pov_mode(),
PovEstimationMode::MaxEncodedLen,
&AnalysisChoice::default(),
&AnalysisChoice::MedianSlopes,
1_000_000,
0,
)
.unwrap();
let result =
mapped_results.get(&("scheduler".to_string(), "instance".to_string())).unwrap()[0]
.clone();
let base = result.base_calculated_proof_size;
assert!(result.component_calculated_proof_size.is_empty(), "There is no slope");
assert_eq!(base, 1024 + 15 * 33 * 5);
}
#[test]
fn pov_mode_measured_linear_works() {
let mut results = Vec::new();
for i in 0..5 {
results.push(BenchmarkResult {
components: vec![(BenchmarkParameter::s, i)],
extrinsic_time: 0,
storage_root_time: 0,
reads: 123,
repeat_reads: 777,
writes: 888,
repeat_writes: 999,
proof_size: i * 1024,
keys: vec![("measured".as_bytes().to_vec(), i, 1, false)],
})
}
let data = BenchmarkBatchSplitResults {
pallet: b"scheduler".to_vec(),
instance: b"instance".to_vec(),
benchmark: b"first_benchmark".to_vec(),
time_results: results.clone(),
db_results: results,
};
let storage_info = vec![StorageInfo {
pallet_name: b"scheduler".to_vec(),
storage_name: b"measured".to_vec(),
prefix: b"measured".to_vec(),
max_values: None,
max_size: Some(1 << 22), }];
let mapped_results = map_results(
&[data],
&storage_info,
&Default::default(),
test_pov_mode(),
PovEstimationMode::MaxEncodedLen,
&AnalysisChoice::default(),
&AnalysisChoice::MedianSlopes,
1_000_000,
0,
)
.unwrap();
let result =
mapped_results.get(&("scheduler".to_string(), "instance".to_string())).unwrap()[0]
.clone();
let base = result.base_calculated_proof_size;
assert_eq!(result.component_calculated_proof_size.len(), 1, "There is a slope");
let slope = result.component_calculated_proof_size[0].clone().slope;
assert_eq!(base, 0);
assert_eq!(slope, 1024 + 15 * 33 * 5);
}
#[test]
fn pov_mode_ignored_linear_works() {
let mut results = Vec::new();
for i in 0..5 {
results.push(BenchmarkResult {
components: vec![(BenchmarkParameter::s, i)],
extrinsic_time: 0,
storage_root_time: 0,
reads: 123,
repeat_reads: 777,
writes: 888,
repeat_writes: 999,
proof_size: i * 1024,
keys: vec![("ignored".as_bytes().to_vec(), i, 1, false)],
})
}
let data = BenchmarkBatchSplitResults {
pallet: b"scheduler".to_vec(),
instance: b"instance".to_vec(),
benchmark: b"first_benchmark".to_vec(),
time_results: results.clone(),
db_results: results,
};
let storage_info = vec![StorageInfo {
pallet_name: b"scheduler".to_vec(),
storage_name: b"ignored".to_vec(),
prefix: b"ignored".to_vec(),
max_values: None,
max_size: Some(1 << 22), }];
let mapped_results = map_results(
&[data],
&storage_info,
&Default::default(),
test_pov_mode(),
PovEstimationMode::Ignored,
&AnalysisChoice::default(),
&AnalysisChoice::MedianSlopes,
1_000_000,
0,
)
.unwrap();
let result =
mapped_results.get(&("scheduler".to_string(), "instance".to_string())).unwrap()[0]
.clone();
let base = result.base_calculated_proof_size;
assert!(result.component_calculated_proof_size.is_empty(), "There is no slope");
assert_eq!(base, 0);
}
#[test]
fn map_results_works() {
let mapped_results = map_results(
&[
test_data(b"first", b"first", BenchmarkParameter::a, 10, 3),
test_data(b"first", b"second", BenchmarkParameter::b, 9, 2),
test_data(b"second", b"first", BenchmarkParameter::c, 3, 4),
test_data(b"bounded", b"bounded", BenchmarkParameter::d, 4, 6),
],
&test_storage_info(),
&Default::default(),
Default::default(),
PovEstimationMode::MaxEncodedLen,
&AnalysisChoice::default(),
&AnalysisChoice::MedianSlopes,
1_000_000,
0,
)
.unwrap();
let first_benchmark = &mapped_results
.get(&("first_pallet".to_string(), "instance".to_string()))
.unwrap()[0];
assert_eq!(first_benchmark.name, "first_benchmark");
check_data(first_benchmark, "a", 10, 3);
let second_benchmark = &mapped_results
.get(&("first_pallet".to_string(), "instance".to_string()))
.unwrap()[1];
assert_eq!(second_benchmark.name, "second_benchmark");
check_data(second_benchmark, "b", 9, 2);
let second_pallet_benchmark = &mapped_results
.get(&("second_pallet".to_string(), "instance".to_string()))
.unwrap()[0];
assert_eq!(second_pallet_benchmark.name, "first_benchmark");
check_data(second_pallet_benchmark, "c", 3, 4);
let bounded_pallet_benchmark = &mapped_results
.get(&("bounded_pallet".to_string(), "instance".to_string()))
.unwrap()[0];
assert_eq!(bounded_pallet_benchmark.name, "bounded_benchmark");
check_data(bounded_pallet_benchmark, "d", 4, 6);
assert_eq!(bounded_pallet_benchmark.base_calculated_proof_size, 10028);
assert_eq!(
bounded_pallet_benchmark.component_calculated_proof_size,
vec![ComponentSlope { name: "d".into(), slope: 15042, error: 0 }]
);
}
#[test]
fn additional_trie_layers_work() {
let mapped_results = map_results(
&[test_data(b"first", b"first", BenchmarkParameter::a, 10, 3)],
&test_storage_info(),
&Default::default(),
Default::default(),
PovEstimationMode::MaxEncodedLen,
&AnalysisChoice::default(),
&AnalysisChoice::MedianSlopes,
1_000_000,
2,
)
.unwrap();
let with_layer = &mapped_results
.get(&("first_pallet".to_string(), "instance".to_string()))
.unwrap()[0];
let mapped_results = map_results(
&[test_data(b"first", b"first", BenchmarkParameter::a, 10, 3)],
&test_storage_info(),
&Default::default(),
Default::default(),
PovEstimationMode::MaxEncodedLen,
&AnalysisChoice::default(),
&AnalysisChoice::MedianSlopes,
1_000_000,
0,
)
.unwrap();
let without_layer = &mapped_results
.get(&("first_pallet".to_string(), "instance".to_string()))
.unwrap()[0];
assert_eq!(
without_layer.base_calculated_proof_size + 2 * 15 * 33,
with_layer.base_calculated_proof_size
);
assert_eq!(
without_layer.component_calculated_proof_size,
with_layer.component_calculated_proof_size
);
}
#[test]
fn template_works() {
let all_results = map_results(
&[
test_data(b"first", b"first", BenchmarkParameter::a, 10, 3),
test_data(b"first", b"second", BenchmarkParameter::b, 9, 2),
test_data(b"second", b"first", BenchmarkParameter::c, 3, 4),
],
&test_storage_info(),
&Default::default(),
Default::default(),
PovEstimationMode::MaxEncodedLen,
&AnalysisChoice::default(),
&AnalysisChoice::MedianSlopes,
1_000_000,
0,
)
.unwrap();
let mut handlebars = handlebars::Handlebars::new();
handlebars.register_helper("underscore", Box::new(UnderscoreHelper));
handlebars.register_helper("join", Box::new(JoinHelper));
handlebars.register_escape_fn(|s| -> String { s.to_string() });
for ((_pallet, _instance), results) in all_results.iter() {
let hbs_data = TemplateData { benchmarks: results.clone(), ..Default::default() };
let output = handlebars.render_template(&TEMPLATE, &hbs_data);
assert!(output.is_ok());
println!("{:?}", output);
}
}
#[test]
fn easy_log_16_works() {
assert_eq!(easy_log_16(0), 0);
assert_eq!(easy_log_16(1), 1);
assert_eq!(easy_log_16(16), 1);
assert_eq!(easy_log_16(17), 2);
assert_eq!(easy_log_16(16u32.pow(2)), 2);
assert_eq!(easy_log_16(16u32.pow(2) + 1), 3);
assert_eq!(easy_log_16(16u32.pow(3)), 3);
assert_eq!(easy_log_16(16u32.pow(3) + 1), 4);
assert_eq!(easy_log_16(16u32.pow(4)), 4);
assert_eq!(easy_log_16(16u32.pow(4) + 1), 5);
assert_eq!(easy_log_16(16u32.pow(5)), 5);
assert_eq!(easy_log_16(16u32.pow(5) + 1), 6);
assert_eq!(easy_log_16(16u32.pow(6)), 6);
assert_eq!(easy_log_16(16u32.pow(6) + 1), 7);
assert_eq!(easy_log_16(16u32.pow(7)), 7);
assert_eq!(easy_log_16(16u32.pow(7) + 1), 8);
assert_eq!(easy_log_16(u32::MAX), 8);
}
}