1use crate::{Get, TryCollect};
21use alloc::collections::BTreeMap;
22use codec::{Compact, Decode, Encode, MaxEncodedLen};
23use core::{borrow::Borrow, marker::PhantomData, ops::Deref};
24
25#[derive(Encode, scale_info::TypeInfo)]
33#[scale_info(skip_type_params(S))]
34pub struct BoundedBTreeMap<K, V, S>(BTreeMap<K, V>, PhantomData<S>);
35
36impl<K, V, S> Decode for BoundedBTreeMap<K, V, S>
37where
38 K: Decode + Ord,
39 V: Decode,
40 S: Get<u32>,
41{
42 fn decode<I: codec::Input>(input: &mut I) -> Result<Self, codec::Error> {
43 let len: u32 = <Compact<u32>>::decode(input)?.into();
46 if len > S::get() {
47 return Err("BoundedBTreeMap exceeds its limit".into())
48 }
49 input.descend_ref()?;
50 let inner = Result::from_iter((0..len).map(|_| Decode::decode(input)))?;
51 input.ascend_ref();
52 Ok(Self(inner, PhantomData))
53 }
54
55 fn skip<I: codec::Input>(input: &mut I) -> Result<(), codec::Error> {
56 BTreeMap::<K, V>::skip(input)
57 }
58}
59
60impl<K, V, S> BoundedBTreeMap<K, V, S>
61where
62 S: Get<u32>,
63{
64 pub fn bound() -> usize {
66 S::get() as usize
67 }
68}
69
70impl<K, V, S> BoundedBTreeMap<K, V, S>
71where
72 K: Ord,
73 S: Get<u32>,
74{
75 fn unchecked_from(t: BTreeMap<K, V>) -> Self {
77 Self(t, Default::default())
78 }
79
80 pub fn retain<F: FnMut(&K, &mut V) -> bool>(&mut self, f: F) {
85 self.0.retain(f)
86 }
87
88 pub fn new() -> Self {
92 BoundedBTreeMap(BTreeMap::new(), PhantomData)
93 }
94
95 pub fn into_inner(self) -> BTreeMap<K, V> {
100 debug_assert!(self.0.len() <= Self::bound());
101 self.0
102 }
103
104 pub fn try_mutate(mut self, mut mutate: impl FnMut(&mut BTreeMap<K, V>)) -> Option<Self> {
112 mutate(&mut self.0);
113 (self.0.len() <= Self::bound()).then(move || self)
114 }
115
116 pub fn clear(&mut self) {
118 self.0.clear()
119 }
120
121 pub fn get_mut<Q>(&mut self, key: &Q) -> Option<&mut V>
126 where
127 K: Borrow<Q>,
128 Q: Ord + ?Sized,
129 {
130 self.0.get_mut(key)
131 }
132
133 pub fn try_insert(&mut self, key: K, value: V) -> Result<Option<V>, (K, V)> {
138 if self.len() < Self::bound() || self.0.contains_key(&key) {
139 Ok(self.0.insert(key, value))
140 } else {
141 Err((key, value))
142 }
143 }
144
145 pub fn remove<Q>(&mut self, key: &Q) -> Option<V>
151 where
152 K: Borrow<Q>,
153 Q: Ord + ?Sized,
154 {
155 self.0.remove(key)
156 }
157
158 pub fn remove_entry<Q>(&mut self, key: &Q) -> Option<(K, V)>
164 where
165 K: Borrow<Q>,
166 Q: Ord + ?Sized,
167 {
168 self.0.remove_entry(key)
169 }
170
171 pub fn iter_mut(&mut self) -> alloc::collections::btree_map::IterMut<K, V> {
175 self.0.iter_mut()
176 }
177
178 pub fn map<T, F>(self, mut f: F) -> BoundedBTreeMap<K, T, S>
180 where
181 F: FnMut((&K, V)) -> T,
182 {
183 BoundedBTreeMap::<K, T, S>::unchecked_from(
184 self.0
185 .into_iter()
186 .map(|(k, v)| {
187 let t = f((&k, v));
188 (k, t)
189 })
190 .collect(),
191 )
192 }
193
194 pub fn try_map<T, E, F>(self, mut f: F) -> Result<BoundedBTreeMap<K, T, S>, E>
198 where
199 F: FnMut((&K, V)) -> Result<T, E>,
200 {
201 Ok(BoundedBTreeMap::<K, T, S>::unchecked_from(
202 self.0
203 .into_iter()
204 .map(|(k, v)| (f((&k, v)).map(|t| (k, t))))
205 .collect::<Result<BTreeMap<_, _>, _>>()?,
206 ))
207 }
208}
209
210impl<K, V, S> Default for BoundedBTreeMap<K, V, S>
211where
212 K: Ord,
213 S: Get<u32>,
214{
215 fn default() -> Self {
216 Self::new()
217 }
218}
219
220impl<K, V, S> Clone for BoundedBTreeMap<K, V, S>
221where
222 BTreeMap<K, V>: Clone,
223{
224 fn clone(&self) -> Self {
225 BoundedBTreeMap(self.0.clone(), PhantomData)
226 }
227}
228
229impl<K, V, S> core::fmt::Debug for BoundedBTreeMap<K, V, S>
230where
231 BTreeMap<K, V>: core::fmt::Debug,
232 S: Get<u32>,
233{
234 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
235 f.debug_tuple("BoundedBTreeMap").field(&self.0).field(&Self::bound()).finish()
236 }
237}
238
239#[cfg(feature = "std")]
242impl<K: std::hash::Hash, V: std::hash::Hash, S> std::hash::Hash for BoundedBTreeMap<K, V, S> {
243 fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
244 self.0.hash(state);
245 }
246}
247
248impl<K, V, S1, S2> PartialEq<BoundedBTreeMap<K, V, S1>> for BoundedBTreeMap<K, V, S2>
249where
250 BTreeMap<K, V>: PartialEq,
251 S1: Get<u32>,
252 S2: Get<u32>,
253{
254 fn eq(&self, other: &BoundedBTreeMap<K, V, S1>) -> bool {
255 S1::get() == S2::get() && self.0 == other.0
256 }
257}
258
259impl<K, V, S> Eq for BoundedBTreeMap<K, V, S>
260where
261 BTreeMap<K, V>: Eq,
262 S: Get<u32>,
263{
264}
265
266impl<K, V, S> PartialEq<BTreeMap<K, V>> for BoundedBTreeMap<K, V, S>
267where
268 BTreeMap<K, V>: PartialEq,
269{
270 fn eq(&self, other: &BTreeMap<K, V>) -> bool {
271 self.0 == *other
272 }
273}
274
275impl<K, V, S> PartialOrd for BoundedBTreeMap<K, V, S>
276where
277 BTreeMap<K, V>: PartialOrd,
278 S: Get<u32>,
279{
280 fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
281 self.0.partial_cmp(&other.0)
282 }
283}
284
285impl<K, V, S> Ord for BoundedBTreeMap<K, V, S>
286where
287 BTreeMap<K, V>: Ord,
288 S: Get<u32>,
289{
290 fn cmp(&self, other: &Self) -> core::cmp::Ordering {
291 self.0.cmp(&other.0)
292 }
293}
294
295impl<K, V, S> IntoIterator for BoundedBTreeMap<K, V, S> {
296 type Item = (K, V);
297 type IntoIter = alloc::collections::btree_map::IntoIter<K, V>;
298
299 fn into_iter(self) -> Self::IntoIter {
300 self.0.into_iter()
301 }
302}
303
304impl<'a, K, V, S> IntoIterator for &'a BoundedBTreeMap<K, V, S> {
305 type Item = (&'a K, &'a V);
306 type IntoIter = alloc::collections::btree_map::Iter<'a, K, V>;
307
308 fn into_iter(self) -> Self::IntoIter {
309 self.0.iter()
310 }
311}
312
313impl<'a, K, V, S> IntoIterator for &'a mut BoundedBTreeMap<K, V, S> {
314 type Item = (&'a K, &'a mut V);
315 type IntoIter = alloc::collections::btree_map::IterMut<'a, K, V>;
316
317 fn into_iter(self) -> Self::IntoIter {
318 self.0.iter_mut()
319 }
320}
321
322impl<K, V, S> MaxEncodedLen for BoundedBTreeMap<K, V, S>
323where
324 K: MaxEncodedLen,
325 V: MaxEncodedLen,
326 S: Get<u32>,
327{
328 fn max_encoded_len() -> usize {
329 Self::bound()
330 .saturating_mul(K::max_encoded_len().saturating_add(V::max_encoded_len()))
331 .saturating_add(codec::Compact(S::get()).encoded_size())
332 }
333}
334
335impl<K, V, S> Deref for BoundedBTreeMap<K, V, S>
336where
337 K: Ord,
338{
339 type Target = BTreeMap<K, V>;
340
341 fn deref(&self) -> &Self::Target {
342 &self.0
343 }
344}
345
346impl<K, V, S> AsRef<BTreeMap<K, V>> for BoundedBTreeMap<K, V, S>
347where
348 K: Ord,
349{
350 fn as_ref(&self) -> &BTreeMap<K, V> {
351 &self.0
352 }
353}
354
355impl<K, V, S> From<BoundedBTreeMap<K, V, S>> for BTreeMap<K, V>
356where
357 K: Ord,
358{
359 fn from(map: BoundedBTreeMap<K, V, S>) -> Self {
360 map.0
361 }
362}
363
364impl<K, V, S> TryFrom<BTreeMap<K, V>> for BoundedBTreeMap<K, V, S>
365where
366 K: Ord,
367 S: Get<u32>,
368{
369 type Error = ();
370
371 fn try_from(value: BTreeMap<K, V>) -> Result<Self, Self::Error> {
372 (value.len() <= Self::bound())
373 .then(move || BoundedBTreeMap(value, PhantomData))
374 .ok_or(())
375 }
376}
377
378impl<K, V, S> codec::DecodeLength for BoundedBTreeMap<K, V, S> {
379 fn len(self_encoded: &[u8]) -> Result<usize, codec::Error> {
380 <BTreeMap<K, V> as codec::DecodeLength>::len(self_encoded)
384 }
385}
386
387impl<K, V, S> codec::EncodeLike<BTreeMap<K, V>> for BoundedBTreeMap<K, V, S> where BTreeMap<K, V>: Encode {}
388
389impl<I, K, V, Bound> TryCollect<BoundedBTreeMap<K, V, Bound>> for I
390where
391 K: Ord,
392 I: ExactSizeIterator + Iterator<Item = (K, V)>,
393 Bound: Get<u32>,
394{
395 type Error = &'static str;
396
397 fn try_collect(self) -> Result<BoundedBTreeMap<K, V, Bound>, Self::Error> {
398 if self.len() > Bound::get() as usize {
399 Err("iterator length too big")
400 } else {
401 Ok(BoundedBTreeMap::<K, V, Bound>::unchecked_from(self.collect::<BTreeMap<K, V>>()))
402 }
403 }
404}
405
406#[cfg(test)]
407mod test {
408 use super::*;
409 use crate::ConstU32;
410 use alloc::{vec, vec::Vec};
411 use codec::CompactLen;
412
413 fn map_from_keys<K>(keys: &[K]) -> BTreeMap<K, ()>
414 where
415 K: Ord + Copy,
416 {
417 keys.iter().copied().zip(core::iter::repeat(())).collect()
418 }
419
420 fn boundedmap_from_keys<K, S>(keys: &[K]) -> BoundedBTreeMap<K, (), S>
421 where
422 K: Ord + Copy,
423 S: Get<u32>,
424 {
425 map_from_keys(keys).try_into().unwrap()
426 }
427
428 #[test]
429 fn encoding_same_as_unbounded_map() {
430 let b = boundedmap_from_keys::<u32, ConstU32<7>>(&[1, 2, 3, 4, 5, 6]);
431 let m = map_from_keys(&[1, 2, 3, 4, 5, 6]);
432
433 assert_eq!(b.encode(), m.encode());
434 }
435
436 #[test]
437 fn try_insert_works() {
438 let mut bounded = boundedmap_from_keys::<u32, ConstU32<4>>(&[1, 2, 3]);
439 bounded.try_insert(0, ()).unwrap();
440 assert_eq!(*bounded, map_from_keys(&[1, 0, 2, 3]));
441
442 assert!(bounded.try_insert(9, ()).is_err());
443 assert_eq!(*bounded, map_from_keys(&[1, 0, 2, 3]));
444 }
445
446 #[test]
447 fn deref_coercion_works() {
448 let bounded = boundedmap_from_keys::<u32, ConstU32<7>>(&[1, 2, 3]);
449 assert_eq!(bounded.len(), 3);
451 assert!(bounded.iter().next().is_some());
452 assert!(!bounded.is_empty());
453 }
454
455 #[test]
456 fn try_mutate_works() {
457 let bounded = boundedmap_from_keys::<u32, ConstU32<7>>(&[1, 2, 3, 4, 5, 6]);
458 let bounded = bounded
459 .try_mutate(|v| {
460 v.insert(7, ());
461 })
462 .unwrap();
463 assert_eq!(bounded.len(), 7);
464 assert!(bounded
465 .try_mutate(|v| {
466 v.insert(8, ());
467 })
468 .is_none());
469 }
470
471 #[test]
472 fn btree_map_eq_works() {
473 let bounded = boundedmap_from_keys::<u32, ConstU32<7>>(&[1, 2, 3, 4, 5, 6]);
474 assert_eq!(bounded, map_from_keys(&[1, 2, 3, 4, 5, 6]));
475 }
476
477 #[test]
478 fn too_big_fail_to_decode() {
479 let v: Vec<(u32, u32)> = vec![(1, 1), (2, 2), (3, 3), (4, 4), (5, 5)];
480 assert_eq!(
481 BoundedBTreeMap::<u32, u32, ConstU32<4>>::decode(&mut &v.encode()[..]),
482 Err("BoundedBTreeMap exceeds its limit".into()),
483 );
484 }
485
486 #[test]
487 fn dont_consume_more_data_than_bounded_len() {
488 let m = map_from_keys(&[1, 2, 3, 4, 5, 6]);
489 let data = m.encode();
490 let data_input = &mut &data[..];
491
492 BoundedBTreeMap::<u32, u32, ConstU32<4>>::decode(data_input).unwrap_err();
493 assert_eq!(data_input.len(), data.len() - Compact::<u32>::compact_len(&(data.len() as u32)));
494 }
495
496 #[test]
497 fn unequal_eq_impl_insert_works() {
498 #[derive(Debug)]
500 struct Unequal(u32, bool);
501
502 impl PartialEq for Unequal {
503 fn eq(&self, other: &Self) -> bool {
504 self.0 == other.0
505 }
506 }
507 impl Eq for Unequal {}
508
509 impl Ord for Unequal {
510 fn cmp(&self, other: &Self) -> core::cmp::Ordering {
511 self.0.cmp(&other.0)
512 }
513 }
514
515 impl PartialOrd for Unequal {
516 fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
517 Some(self.cmp(other))
518 }
519 }
520
521 let mut map = BoundedBTreeMap::<Unequal, u32, ConstU32<4>>::new();
522
523 for i in 0..4 {
526 map.try_insert(Unequal(i, false), i).unwrap();
527 }
528
529 map.try_insert(Unequal(5, false), 5).unwrap_err();
531
532 map.try_insert(Unequal(0, true), 6).unwrap();
535 assert_eq!(map.len(), 4);
536 let (zero_key, zero_value) = map.get_key_value(&Unequal(0, true)).unwrap();
537 assert_eq!(zero_key.0, 0);
538 assert_eq!(zero_key.1, false);
539 assert_eq!(*zero_value, 6);
540 }
541
542 #[test]
543 fn eq_works() {
544 let b1 = boundedmap_from_keys::<u32, ConstU32<7>>(&[1, 2]);
546 let b2 = boundedmap_from_keys::<u32, ConstU32<7>>(&[1, 2]);
547 assert_eq!(b1, b2);
548
549 crate::parameter_types! {
551 B1: u32 = 7;
552 B2: u32 = 7;
553 }
554 let b1 = boundedmap_from_keys::<u32, B1>(&[1, 2]);
555 let b2 = boundedmap_from_keys::<u32, B2>(&[1, 2]);
556 assert_eq!(b1, b2);
557 }
558
559 #[test]
560 fn can_be_collected() {
561 let b1 = boundedmap_from_keys::<u32, ConstU32<5>>(&[1, 2, 3, 4]);
562 let b2: BoundedBTreeMap<u32, (), ConstU32<5>> = b1.iter().map(|(k, v)| (k + 1, *v)).try_collect().unwrap();
563 assert_eq!(b2.into_iter().map(|(k, _)| k).collect::<Vec<_>>(), vec![2, 3, 4, 5]);
564
565 let b2: BoundedBTreeMap<u32, (), ConstU32<4>> = b1.iter().map(|(k, v)| (k + 1, *v)).try_collect().unwrap();
567 assert_eq!(b2.into_iter().map(|(k, _)| k).collect::<Vec<_>>(), vec![2, 3, 4, 5]);
568
569 let b2: BoundedBTreeMap<u32, (), ConstU32<5>> =
571 b1.iter().map(|(k, v)| (k + 1, *v)).rev().skip(2).try_collect().unwrap();
572 assert_eq!(b2.into_iter().map(|(k, _)| k).collect::<Vec<_>>(), vec![2, 3]);
574
575 let b2: BoundedBTreeMap<u32, (), ConstU32<5>> =
576 b1.iter().map(|(k, v)| (k + 1, *v)).take(2).try_collect().unwrap();
577 assert_eq!(b2.into_iter().map(|(k, _)| k).collect::<Vec<_>>(), vec![2, 3]);
578
579 let b2: Result<BoundedBTreeMap<u32, (), ConstU32<3>>, _> = b1.iter().map(|(k, v)| (k + 1, *v)).try_collect();
581 assert!(b2.is_err());
582
583 let b2: Result<BoundedBTreeMap<u32, (), ConstU32<1>>, _> =
584 b1.iter().map(|(k, v)| (k + 1, *v)).skip(2).try_collect();
585 assert!(b2.is_err());
586 }
587
588 #[test]
589 fn test_iter_mut() {
590 let mut b1: BoundedBTreeMap<u8, u8, ConstU32<7>> =
591 [1, 2, 3, 4].into_iter().map(|k| (k, k)).try_collect().unwrap();
592
593 let b2: BoundedBTreeMap<u8, u8, ConstU32<7>> =
594 [1, 2, 3, 4].into_iter().map(|k| (k, k * 2)).try_collect().unwrap();
595
596 b1.iter_mut().for_each(|(_, v)| *v *= 2);
597
598 assert_eq!(b1, b2);
599 }
600
601 #[test]
602 fn map_retains_size() {
603 let b1 = boundedmap_from_keys::<u32, ConstU32<7>>(&[1, 2]);
604 let b2 = b1.clone();
605
606 assert_eq!(b1.len(), b2.map(|(_, _)| 5_u32).len());
607 }
608
609 #[test]
610 fn map_maps_properly() {
611 let b1: BoundedBTreeMap<u32, u32, ConstU32<7>> =
612 [1, 2, 3, 4].into_iter().map(|k| (k, k * 2)).try_collect().unwrap();
613 let b2: BoundedBTreeMap<u32, u32, ConstU32<7>> =
614 [1, 2, 3, 4].into_iter().map(|k| (k, k)).try_collect().unwrap();
615
616 assert_eq!(b1, b2.map(|(_, v)| v * 2));
617 }
618
619 #[test]
620 fn try_map_retains_size() {
621 let b1 = boundedmap_from_keys::<u32, ConstU32<7>>(&[1, 2]);
622 let b2 = b1.clone();
623
624 assert_eq!(b1.len(), b2.try_map::<_, (), _>(|(_, _)| Ok(5_u32)).unwrap().len());
625 }
626
627 #[test]
628 fn try_map_maps_properly() {
629 let b1: BoundedBTreeMap<u32, u32, ConstU32<7>> =
630 [1, 2, 3, 4].into_iter().map(|k| (k, k * 2)).try_collect().unwrap();
631 let b2: BoundedBTreeMap<u32, u32, ConstU32<7>> =
632 [1, 2, 3, 4].into_iter().map(|k| (k, k)).try_collect().unwrap();
633
634 assert_eq!(b1, b2.try_map::<_, (), _>(|(_, v)| Ok(v * 2)).unwrap());
635 }
636
637 #[test]
638 fn try_map_short_circuit() {
639 let b1: BoundedBTreeMap<u8, u8, ConstU32<7>> = [1, 2, 3, 4].into_iter().map(|k| (k, k)).try_collect().unwrap();
640
641 assert_eq!(Err("overflow"), b1.try_map(|(_, v)| v.checked_mul(100).ok_or("overflow")));
642 }
643
644 #[test]
645 fn try_map_ok() {
646 let b1: BoundedBTreeMap<u8, u8, ConstU32<7>> = [1, 2, 3, 4].into_iter().map(|k| (k, k)).try_collect().unwrap();
647 let b2: BoundedBTreeMap<u8, u16, ConstU32<7>> =
648 [1, 2, 3, 4].into_iter().map(|k| (k, (k as u16) * 100)).try_collect().unwrap();
649
650 assert_eq!(Ok(b2), b1.try_map(|(_, v)| (v as u16).checked_mul(100_u16).ok_or("overflow")));
651 }
652
653 #[test]
656 #[cfg(feature = "std")]
657 fn container_can_derive_hash() {
658 #[derive(Hash)]
659 struct Foo {
660 bar: u8,
661 map: BoundedBTreeMap<String, usize, ConstU32<16>>,
662 }
663 }
664}