#![deny(missing_docs)]
#![cfg_attr(not(feature = "std"), no_std)]
#[cfg(feature = "runtime-benchmarks")]
mod benchmarking;
#[cfg(test)]
mod mock;
#[cfg(test)]
mod tests;
pub mod weights;
use blake2::{Blake2b512, Digest};
use frame_support::{pallet_prelude::*, weights::WeightMeter, DefaultNoBound};
use frame_system::pallet_prelude::*;
use sp_io::hashing::twox_256;
use sp_runtime::{traits::Zero, FixedPointNumber, FixedU64};
use sp_std::{vec, vec::Vec};
pub use pallet::*;
pub use weights::WeightInfo;
pub const VALUE_SIZE: usize = 1024;
pub const MAX_TRASH_DATA_ENTRIES: u32 = 65_000;
pub const RESOURCE_HARD_LIMIT: FixedU64 = FixedU64::from_u32(10);
#[frame_support::pallet]
pub mod pallet {
	use super::*;
	#[pallet::config]
	pub trait Config: frame_system::Config {
		type RuntimeEvent: From<Event> + IsType<<Self as frame_system::Config>::RuntimeEvent>;
		type AdminOrigin: EnsureOrigin<Self::RuntimeOrigin>;
		type WeightInfo: WeightInfo;
	}
	#[pallet::pallet]
	pub struct Pallet<T>(_);
	#[pallet::event]
	#[pallet::generate_deposit(pub(super) fn deposit_event)]
	pub enum Event {
		PalletInitialized {
			reinit: bool,
		},
		ComputationLimitSet {
			compute: FixedU64,
		},
		StorageLimitSet {
			storage: FixedU64,
		},
	}
	#[pallet::error]
	pub enum Error<T> {
		AlreadyInitialized,
		InsaneLimit,
	}
	#[pallet::storage]
	pub(crate) type Compute<T: Config> = StorageValue<_, FixedU64, ValueQuery>;
	#[pallet::storage]
	pub(crate) type Storage<T: Config> = StorageValue<_, FixedU64, ValueQuery>;
	#[pallet::storage]
	pub(super) type TrashData<T: Config> = StorageMap<
		Hasher = Twox64Concat,
		Key = u32,
		Value = [u8; VALUE_SIZE],
		QueryKind = OptionQuery,
		MaxValues = ConstU32<MAX_TRASH_DATA_ENTRIES>,
	>;
	#[pallet::storage]
	pub(crate) type TrashDataCount<T: Config> = StorageValue<_, u32, ValueQuery>;
	#[pallet::genesis_config]
	#[derive(DefaultNoBound)]
	pub struct GenesisConfig<T: Config> {
		pub compute: FixedU64,
		pub storage: FixedU64,
		pub trash_data_count: u32,
		#[serde(skip)]
		pub _config: sp_std::marker::PhantomData<T>,
	}
	#[pallet::genesis_build]
	impl<T: Config> BuildGenesisConfig for GenesisConfig<T> {
		fn build(&self) {
			assert!(
				self.trash_data_count <= MAX_TRASH_DATA_ENTRIES,
				"number of TrashData entries cannot be bigger than {:?}",
				MAX_TRASH_DATA_ENTRIES
			);
			(0..self.trash_data_count)
				.for_each(|i| TrashData::<T>::insert(i, Pallet::<T>::gen_value(i)));
			TrashDataCount::<T>::set(self.trash_data_count);
			assert!(self.compute <= RESOURCE_HARD_LIMIT, "Compute limit is insane");
			<Compute<T>>::put(self.compute);
			assert!(self.storage <= RESOURCE_HARD_LIMIT, "Storage limit is insane");
			<Storage<T>>::put(self.storage);
		}
	}
	#[pallet::hooks]
	impl<T: Config> Hooks<BlockNumberFor<T>> for Pallet<T> {
		fn integrity_test() {
			assert!(
				!T::WeightInfo::waste_ref_time_iter(1).ref_time().is_zero(),
				"Weight zero; would get stuck in an infinite loop"
			);
			assert!(
				!T::WeightInfo::waste_proof_size_some(1).proof_size().is_zero(),
				"Weight zero; would get stuck in an infinite loop"
			);
		}
		fn on_idle(_: BlockNumberFor<T>, remaining_weight: Weight) -> Weight {
			let mut meter = WeightMeter::from_limit(remaining_weight);
			if meter.try_consume(T::WeightInfo::empty_on_idle()).is_err() {
				return T::WeightInfo::empty_on_idle()
			}
			let proof_size_limit =
				Storage::<T>::get().saturating_mul_int(meter.remaining().proof_size());
			let computation_weight_limit =
				Compute::<T>::get().saturating_mul_int(meter.remaining().ref_time());
			let mut meter = WeightMeter::from_limit(Weight::from_parts(
				computation_weight_limit,
				proof_size_limit,
			));
			Self::waste_at_most_proof_size(&mut meter);
			Self::waste_at_most_ref_time(&mut meter);
			meter.consumed()
		}
	}
	#[pallet::call(weight = T::WeightInfo)]
	impl<T: Config> Pallet<T> {
		#[pallet::call_index(0)]
		#[pallet::weight(
			T::WeightInfo::initialize_pallet_grow(witness_count.unwrap_or_default())
				.max(T::WeightInfo::initialize_pallet_shrink(witness_count.unwrap_or_default()))
		)]
		pub fn initialize_pallet(
			origin: OriginFor<T>,
			new_count: u32,
			witness_count: Option<u32>,
		) -> DispatchResult {
			T::AdminOrigin::ensure_origin_or_root(origin)?;
			let current_count = TrashDataCount::<T>::get();
			ensure!(
				current_count == witness_count.unwrap_or_default(),
				Error::<T>::AlreadyInitialized
			);
			if new_count > current_count {
				(current_count..new_count)
					.for_each(|i| TrashData::<T>::insert(i, Self::gen_value(i)));
			} else {
				(new_count..current_count).for_each(TrashData::<T>::remove);
			}
			Self::deposit_event(Event::PalletInitialized { reinit: witness_count.is_some() });
			TrashDataCount::<T>::set(new_count);
			Ok(())
		}
		#[pallet::call_index(1)]
		pub fn set_compute(origin: OriginFor<T>, compute: FixedU64) -> DispatchResult {
			T::AdminOrigin::ensure_origin_or_root(origin)?;
			ensure!(compute <= RESOURCE_HARD_LIMIT, Error::<T>::InsaneLimit);
			Compute::<T>::set(compute);
			Self::deposit_event(Event::ComputationLimitSet { compute });
			Ok(())
		}
		#[pallet::call_index(2)]
		pub fn set_storage(origin: OriginFor<T>, storage: FixedU64) -> DispatchResult {
			T::AdminOrigin::ensure_origin_or_root(origin)?;
			ensure!(storage <= RESOURCE_HARD_LIMIT, Error::<T>::InsaneLimit);
			Storage::<T>::set(storage);
			Self::deposit_event(Event::StorageLimitSet { storage });
			Ok(())
		}
	}
	impl<T: Config> Pallet<T> {
		pub(crate) fn waste_at_most_proof_size(meter: &mut WeightMeter) {
			let Ok(n) = Self::calculate_proof_size_iters(&meter) else { return };
			meter.consume(T::WeightInfo::waste_proof_size_some(n));
			(0..n).for_each(|i| {
				TrashData::<T>::get(i);
			});
		}
		fn calculate_proof_size_iters(meter: &WeightMeter) -> Result<u32, ()> {
			let base = T::WeightInfo::waste_proof_size_some(0);
			let slope = T::WeightInfo::waste_proof_size_some(1).saturating_sub(base);
			let remaining = meter.remaining().saturating_sub(base);
			let iter_by_proof_size =
				remaining.proof_size().checked_div(slope.proof_size()).ok_or(())?;
			let iter_by_ref_time = remaining.ref_time().checked_div(slope.ref_time()).ok_or(())?;
			if iter_by_proof_size > 0 && iter_by_proof_size <= iter_by_ref_time {
				Ok(iter_by_proof_size as u32)
			} else {
				Err(())
			}
		}
		pub(crate) fn waste_at_most_ref_time(meter: &mut WeightMeter) {
			let Ok(n) = Self::calculate_ref_time_iters(&meter) else { return };
			meter.consume(T::WeightInfo::waste_ref_time_iter(n));
			let clobber = Self::waste_ref_time_iter(vec![0u8; 64], n);
			debug_assert!(clobber.len() == 64);
			if clobber.len() == 65 {
				TrashData::<T>::insert(0, [clobber[0] as u8; VALUE_SIZE]);
			}
		}
		pub(crate) fn waste_ref_time_iter(clobber: Vec<u8>, i: u32) -> Vec<u8> {
			let mut hasher = Blake2b512::new();
			(0..i).for_each(|_| {
				hasher.update(clobber.as_slice());
			});
			hasher.finalize().to_vec()
		}
		fn calculate_ref_time_iters(meter: &WeightMeter) -> Result<u32, ()> {
			let base = T::WeightInfo::waste_ref_time_iter(0);
			let slope = T::WeightInfo::waste_ref_time_iter(1).saturating_sub(base);
			if !slope.proof_size().is_zero() || !base.proof_size().is_zero() {
				return Err(())
			}
			match meter
				.remaining()
				.ref_time()
				.saturating_sub(base.ref_time())
				.checked_div(slope.ref_time())
			{
				Some(0) | None => Err(()),
				Some(i) => Ok(i as u32),
			}
		}
		pub(crate) fn gen_value(seed: u32) -> [u8; VALUE_SIZE] {
			let mut ret = [0u8; VALUE_SIZE];
			for i in 0u32..(VALUE_SIZE as u32 / 32) {
				let hash = (seed, i).using_encoded(twox_256);
				ret[i as usize * 32..(i + 1) as usize * 32].copy_from_slice(&hash);
			}
			ret
		}
	}
}