1use super::WeakBoundedVec;
22use crate::{Get, TryCollect};
23use alloc::vec::Vec;
24use codec::{decode_vec_with_len, Compact, Decode, Encode, EncodeLike, MaxEncodedLen};
25use core::{
26 marker::PhantomData,
27 ops::{Deref, Index, IndexMut, RangeBounds},
28 slice::SliceIndex,
29};
30#[cfg(feature = "serde")]
31use serde::{
32 de::{Error, SeqAccess, Visitor},
33 Deserialize, Deserializer, Serialize,
34};
35
36#[cfg_attr(feature = "serde", derive(Serialize), serde(transparent))]
44#[derive(Encode, scale_info::TypeInfo)]
45#[scale_info(skip_type_params(S))]
46#[cfg_attr(feature = "json-schema", derive(schemars::JsonSchema))]
47pub struct BoundedVec<T, S>(pub(super) Vec<T>, #[cfg_attr(feature = "serde", serde(skip_serializing))] PhantomData<S>);
48
49pub trait TruncateFrom<T> {
51 fn truncate_from(unbound: T) -> Self;
53}
54
55#[cfg(feature = "serde")]
56impl<'de, T, S: Get<u32>> Deserialize<'de> for BoundedVec<T, S>
57where
58 T: Deserialize<'de>,
59{
60 fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
61 where
62 D: Deserializer<'de>,
63 {
64 struct VecVisitor<T, S: Get<u32>>(PhantomData<(T, S)>);
65
66 impl<'de, T, S: Get<u32>> Visitor<'de> for VecVisitor<T, S>
67 where
68 T: Deserialize<'de>,
69 {
70 type Value = Vec<T>;
71
72 fn expecting(&self, formatter: &mut alloc::fmt::Formatter) -> alloc::fmt::Result {
73 formatter.write_str("a sequence")
74 }
75
76 fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
77 where
78 A: SeqAccess<'de>,
79 {
80 let size = seq.size_hint().unwrap_or(0);
81 let max = match usize::try_from(S::get()) {
82 Ok(n) => n,
83 Err(_) => return Err(A::Error::custom("can't convert to usize")),
84 };
85 if size > max {
86 Err(A::Error::custom("out of bounds"))
87 } else {
88 let mut values = Vec::with_capacity(size);
89
90 while let Some(value) = seq.next_element()? {
91 if values.len() >= max {
92 return Err(A::Error::custom("out of bounds"))
93 }
94 values.push(value);
95 }
96
97 Ok(values)
98 }
99 }
100 }
101
102 let visitor: VecVisitor<T, S> = VecVisitor(PhantomData);
103 deserializer
104 .deserialize_seq(visitor)
105 .map(|v| BoundedVec::<T, S>::try_from(v).map_err(|_| Error::custom("out of bounds")))?
106 }
107}
108
109#[derive(Encode, scale_info::TypeInfo)]
113pub struct BoundedSlice<'a, T, S>(pub(super) &'a [T], PhantomData<S>);
114
115impl<'a, T: Encode + Decode, S: Get<u32>> EncodeLike<BoundedVec<T, S>> for BoundedSlice<'a, T, S> {}
118impl<'a, T: Encode + Decode, S: Get<u32>> EncodeLike<WeakBoundedVec<T, S>> for BoundedSlice<'a, T, S> {}
119impl<'a, T: Encode + Decode, S: Get<u32>> EncodeLike<Vec<T>> for BoundedSlice<'a, T, S> {}
120
121impl<'a, T, BoundSelf, BoundRhs> PartialEq<BoundedSlice<'a, T, BoundRhs>> for BoundedSlice<'a, T, BoundSelf>
122where
123 T: PartialEq,
124 BoundSelf: Get<u32>,
125 BoundRhs: Get<u32>,
126{
127 fn eq(&self, other: &BoundedSlice<'a, T, BoundRhs>) -> bool {
128 self.0 == other.0
129 }
130}
131
132impl<'a, T, BoundSelf, BoundRhs> PartialEq<BoundedVec<T, BoundRhs>> for BoundedSlice<'a, T, BoundSelf>
133where
134 T: PartialEq,
135 BoundSelf: Get<u32>,
136 BoundRhs: Get<u32>,
137{
138 fn eq(&self, other: &BoundedVec<T, BoundRhs>) -> bool {
139 self.0 == other.0
140 }
141}
142
143impl<'a, T, BoundSelf, BoundRhs> PartialEq<WeakBoundedVec<T, BoundRhs>> for BoundedSlice<'a, T, BoundSelf>
144where
145 T: PartialEq,
146 BoundSelf: Get<u32>,
147 BoundRhs: Get<u32>,
148{
149 fn eq(&self, other: &WeakBoundedVec<T, BoundRhs>) -> bool {
150 self.0 == other.0
151 }
152}
153
154impl<'a, T, S: Get<u32>> Eq for BoundedSlice<'a, T, S> where T: Eq {}
155
156impl<'a, T, BoundSelf, BoundRhs> PartialOrd<BoundedSlice<'a, T, BoundRhs>> for BoundedSlice<'a, T, BoundSelf>
157where
158 T: PartialOrd,
159 BoundSelf: Get<u32>,
160 BoundRhs: Get<u32>,
161{
162 fn partial_cmp(&self, other: &BoundedSlice<'a, T, BoundRhs>) -> Option<core::cmp::Ordering> {
163 self.0.partial_cmp(other.0)
164 }
165}
166
167impl<'a, T, BoundSelf, BoundRhs> PartialOrd<BoundedVec<T, BoundRhs>> for BoundedSlice<'a, T, BoundSelf>
168where
169 T: PartialOrd,
170 BoundSelf: Get<u32>,
171 BoundRhs: Get<u32>,
172{
173 fn partial_cmp(&self, other: &BoundedVec<T, BoundRhs>) -> Option<core::cmp::Ordering> {
174 self.0.partial_cmp(&*other.0)
175 }
176}
177
178impl<'a, T, BoundSelf, BoundRhs> PartialOrd<WeakBoundedVec<T, BoundRhs>> for BoundedSlice<'a, T, BoundSelf>
179where
180 T: PartialOrd,
181 BoundSelf: Get<u32>,
182 BoundRhs: Get<u32>,
183{
184 fn partial_cmp(&self, other: &WeakBoundedVec<T, BoundRhs>) -> Option<core::cmp::Ordering> {
185 self.0.partial_cmp(&*other.0)
186 }
187}
188
189impl<'a, T: Ord, Bound: Get<u32>> Ord for BoundedSlice<'a, T, Bound> {
190 fn cmp(&self, other: &Self) -> core::cmp::Ordering {
191 self.0.cmp(&other.0)
192 }
193}
194
195impl<'a, T, S: Get<u32>> TryFrom<&'a [T]> for BoundedSlice<'a, T, S> {
196 type Error = &'a [T];
197 fn try_from(t: &'a [T]) -> Result<Self, Self::Error> {
198 if t.len() <= S::get() as usize {
199 Ok(BoundedSlice(t, PhantomData))
200 } else {
201 Err(t)
202 }
203 }
204}
205
206impl<'a, T, S> From<BoundedSlice<'a, T, S>> for &'a [T] {
207 fn from(t: BoundedSlice<'a, T, S>) -> Self {
208 t.0
209 }
210}
211
212impl<'a, T, S: Get<u32>> TruncateFrom<&'a [T]> for BoundedSlice<'a, T, S> {
213 fn truncate_from(unbound: &'a [T]) -> Self {
214 BoundedSlice::<T, S>::truncate_from(unbound)
215 }
216}
217
218impl<'a, T, S> Clone for BoundedSlice<'a, T, S> {
219 fn clone(&self) -> Self {
220 BoundedSlice(self.0, PhantomData)
221 }
222}
223
224impl<'a, T, S> core::fmt::Debug for BoundedSlice<'a, T, S>
225where
226 &'a [T]: core::fmt::Debug,
227 S: Get<u32>,
228{
229 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
230 f.debug_tuple("BoundedSlice").field(&self.0).field(&S::get()).finish()
231 }
232}
233
234impl<'a, T, S> Copy for BoundedSlice<'a, T, S> {}
236
237impl<'a, T, S> Deref for BoundedSlice<'a, T, S> {
239 type Target = [T];
240
241 fn deref(&self) -> &Self::Target {
242 self.0
243 }
244}
245
246#[cfg(feature = "std")]
249impl<'a, T: std::hash::Hash, S> std::hash::Hash for BoundedSlice<'a, T, S> {
250 fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
251 self.0.hash(state);
252 }
253}
254
255impl<'a, T, S> core::iter::IntoIterator for BoundedSlice<'a, T, S> {
256 type Item = &'a T;
257 type IntoIter = core::slice::Iter<'a, T>;
258 fn into_iter(self) -> Self::IntoIter {
259 self.0.iter()
260 }
261}
262
263impl<'a, T, S: Get<u32>> BoundedSlice<'a, T, S> {
264 pub fn truncate_from(s: &'a [T]) -> Self {
267 Self(&s[0..(s.len().min(S::get() as usize))], PhantomData)
268 }
269}
270
271impl<T: Decode, S: Get<u32>> Decode for BoundedVec<T, S> {
272 fn decode<I: codec::Input>(input: &mut I) -> Result<Self, codec::Error> {
273 let len: u32 = <Compact<u32>>::decode(input)?.into();
276 if len > S::get() {
277 return Err("BoundedVec exceeds its limit".into())
278 }
279 let inner = decode_vec_with_len(input, len as usize)?;
280 Ok(Self(inner, PhantomData))
281 }
282
283 fn skip<I: codec::Input>(input: &mut I) -> Result<(), codec::Error> {
284 Vec::<T>::skip(input)
285 }
286}
287
288impl<T: Encode + Decode, S: Get<u32>> EncodeLike<Vec<T>> for BoundedVec<T, S> {}
290
291impl<T, S> BoundedVec<T, S> {
292 pub fn new() -> Self {
294 Self(Vec::new(), Default::default())
295 }
296
297 fn unchecked_from(t: Vec<T>) -> Self {
299 Self(t, Default::default())
300 }
301
302 pub fn clear(&mut self) {
304 self.0.clear()
305 }
306
307 pub fn into_inner(self) -> Vec<T> {
314 self.0
315 }
316
317 pub fn sort_by<F>(&mut self, compare: F)
321 where
322 F: FnMut(&T, &T) -> core::cmp::Ordering,
323 {
324 self.0.sort_by(compare)
325 }
326
327 pub fn sort_by_key<K, F>(&mut self, f: F)
331 where
332 F: FnMut(&T) -> K,
333 K: core::cmp::Ord,
334 {
335 self.0.sort_by_key(f)
336 }
337
338 pub fn sort(&mut self)
342 where
343 T: core::cmp::Ord,
344 {
345 self.0.sort()
346 }
347
348 pub fn remove(&mut self, index: usize) -> T {
354 self.0.remove(index)
355 }
356
357 pub fn swap_remove(&mut self, index: usize) -> T {
363 self.0.swap_remove(index)
364 }
365
366 pub fn retain<F: FnMut(&T) -> bool>(&mut self, f: F) {
368 self.0.retain(f)
369 }
370
371 pub fn get_mut<I: SliceIndex<[T]>>(&mut self, index: I) -> Option<&mut <I as SliceIndex<[T]>>::Output> {
373 self.0.get_mut(index)
374 }
375
376 pub fn truncate(&mut self, s: usize) {
380 self.0.truncate(s);
381 }
382
383 pub fn pop(&mut self) -> Option<T> {
387 self.0.pop()
388 }
389
390 pub fn iter_mut(&mut self) -> core::slice::IterMut<'_, T> {
392 self.0.iter_mut()
393 }
394
395 pub fn last_mut(&mut self) -> Option<&mut T> {
397 self.0.last_mut()
398 }
399
400 pub fn drain<R>(&mut self, range: R) -> alloc::vec::Drain<'_, T>
402 where
403 R: RangeBounds<usize>,
404 {
405 self.0.drain(range)
406 }
407}
408
409impl<T, S: Get<u32>> From<BoundedVec<T, S>> for Vec<T> {
410 fn from(x: BoundedVec<T, S>) -> Vec<T> {
411 x.0
412 }
413}
414
415impl<T, S: Get<u32>> BoundedVec<T, S> {
416 pub fn with_bounded_capacity(capacity: usize) -> Self {
420 let capacity = capacity.min(Self::bound());
421 Self(Vec::with_capacity(capacity), Default::default())
422 }
423
424 pub fn with_max_capacity() -> Self {
426 Self::with_bounded_capacity(Self::bound())
427 }
428
429 pub fn truncate_from(mut v: Vec<T>) -> Self {
431 v.truncate(Self::bound());
432 Self::unchecked_from(v)
433 }
434
435 pub fn bound() -> usize {
437 S::get() as usize
438 }
439
440 pub fn is_full(&self) -> bool {
442 self.len() >= Self::bound()
443 }
444
445 pub fn force_insert_keep_right(&mut self, index: usize, mut element: T) -> Result<Option<T>, T> {
456 if Self::bound() < index || self.len() < index {
458 Err(element)
459 } else if self.len() < Self::bound() {
460 self.0.insert(index, element);
462 Ok(None)
463 } else {
464 if index == 0 {
465 return Err(element)
466 }
467 core::mem::swap(&mut self[0], &mut element);
468 self[0..index].rotate_left(1);
471 Ok(Some(element))
472 }
473 }
474
475 pub fn force_insert_keep_left(&mut self, index: usize, element: T) -> Result<Option<T>, T> {
486 if Self::bound() < index || self.len() < index || Self::bound() == 0 {
488 return Err(element)
489 }
490 if Self::bound() == index && self.len() <= Self::bound() {
492 return Err(element)
493 }
494 let maybe_removed = if self.is_full() {
495 self.0.truncate(Self::bound());
497 self.0.pop()
499 } else {
500 None
501 };
502
503 self.0.insert(index, element);
505 Ok(maybe_removed)
506 }
507
508 pub fn slide(&mut self, index: usize, insert_position: usize) -> bool {
518 if self.len() <= index || self.len() < insert_position || index == usize::MAX {
520 return false
521 }
522 if index == insert_position || index + 1 == insert_position {
524 return false
525 }
526 if insert_position < index && index < self.len() {
527 self[insert_position..index + 1].rotate_right(1);
537 return true
538 } else if insert_position > 0 && index + 1 < insert_position {
539 self[index..insert_position].rotate_left(1);
551 return true
552 }
553
554 debug_assert!(false, "all noop conditions should have been covered above");
555 false
556 }
557
558 pub fn force_push(&mut self, element: T) {
562 if Self::bound() > 0 {
563 self.0.truncate(Self::bound() as usize - 1);
564 self.0.push(element);
565 }
566 }
567
568 pub fn bounded_resize(&mut self, size: usize, value: T)
571 where
572 T: Clone,
573 {
574 let size = size.min(Self::bound());
575 self.0.resize(size, value);
576 }
577
578 pub fn try_extend(&mut self, with: impl IntoIterator<Item = T> + ExactSizeIterator) -> Result<(), ()> {
581 if with.len().saturating_add(self.len()) <= Self::bound() {
582 self.0.extend(with);
583 Ok(())
584 } else {
585 Err(())
586 }
587 }
588
589 pub fn try_append(&mut self, other: &mut Vec<T>) -> Result<(), ()> {
592 if other.len().saturating_add(self.len()) <= Self::bound() {
593 self.0.append(other);
594 Ok(())
595 } else {
596 Err(())
597 }
598 }
599
600 pub fn try_mutate(mut self, mut mutate: impl FnMut(&mut Vec<T>)) -> Option<Self> {
608 mutate(&mut self.0);
609 (self.0.len() <= Self::bound()).then(move || self)
610 }
611
612 pub fn try_insert(&mut self, index: usize, element: T) -> Result<(), T> {
619 if self.len() < Self::bound() {
620 self.0.insert(index, element);
621 Ok(())
622 } else {
623 Err(element)
624 }
625 }
626
627 pub fn try_push(&mut self, element: T) -> Result<(), T> {
634 if self.len() < Self::bound() {
635 self.0.push(element);
636 Ok(())
637 } else {
638 Err(element)
639 }
640 }
641
642 pub fn try_rotate_left(&mut self, mid: usize) -> Result<(), ()> {
644 if mid > self.len() {
645 return Err(())
646 }
647
648 self.0.rotate_left(mid);
649 Ok(())
650 }
651
652 pub fn try_rotate_right(&mut self, mid: usize) -> Result<(), ()> {
654 if mid > self.len() {
655 return Err(())
656 }
657
658 self.0.rotate_right(mid);
659 Ok(())
660 }
661}
662
663impl<T, S> BoundedVec<T, S> {
664 pub fn as_bounded_slice(&self) -> BoundedSlice<T, S> {
666 BoundedSlice(&self.0[..], PhantomData::default())
667 }
668}
669
670impl<T, S> Default for BoundedVec<T, S> {
671 fn default() -> Self {
672 Self::unchecked_from(Vec::default())
674 }
675}
676
677impl<T, S> core::fmt::Debug for BoundedVec<T, S>
678where
679 Vec<T>: core::fmt::Debug,
680 S: Get<u32>,
681{
682 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
683 f.debug_tuple("BoundedVec").field(&self.0).field(&Self::bound()).finish()
684 }
685}
686
687impl<T, S> Clone for BoundedVec<T, S>
688where
689 T: Clone,
690{
691 fn clone(&self) -> Self {
692 Self::unchecked_from(self.0.clone())
694 }
695}
696
697impl<T, S: Get<u32>> TryFrom<Vec<T>> for BoundedVec<T, S> {
698 type Error = Vec<T>;
699 fn try_from(t: Vec<T>) -> Result<Self, Self::Error> {
700 if t.len() <= Self::bound() {
701 Ok(Self::unchecked_from(t))
703 } else {
704 Err(t)
705 }
706 }
707}
708
709impl<T, S: Get<u32>> TruncateFrom<Vec<T>> for BoundedVec<T, S> {
710 fn truncate_from(unbound: Vec<T>) -> Self {
711 BoundedVec::<T, S>::truncate_from(unbound)
712 }
713}
714
715#[cfg(feature = "std")]
718impl<T: std::hash::Hash, S> std::hash::Hash for BoundedVec<T, S> {
719 fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
720 self.0.hash(state);
721 }
722}
723
724impl<T, S> AsRef<Vec<T>> for BoundedVec<T, S> {
726 fn as_ref(&self) -> &Vec<T> {
727 &self.0
728 }
729}
730
731impl<T, S> AsRef<[T]> for BoundedVec<T, S> {
732 fn as_ref(&self) -> &[T] {
733 &self.0
734 }
735}
736
737impl<T, S> AsMut<[T]> for BoundedVec<T, S> {
738 fn as_mut(&mut self) -> &mut [T] {
739 &mut self.0
740 }
741}
742
743impl<T, S> Deref for BoundedVec<T, S> {
745 type Target = Vec<T>;
746
747 fn deref(&self) -> &Self::Target {
748 &self.0
749 }
750}
751
752impl<T, S, I> Index<I> for BoundedVec<T, S>
754where
755 I: SliceIndex<[T]>,
756{
757 type Output = I::Output;
758
759 #[inline]
760 fn index(&self, index: I) -> &Self::Output {
761 self.0.index(index)
762 }
763}
764
765impl<T, S, I> IndexMut<I> for BoundedVec<T, S>
766where
767 I: SliceIndex<[T]>,
768{
769 #[inline]
770 fn index_mut(&mut self, index: I) -> &mut Self::Output {
771 self.0.index_mut(index)
772 }
773}
774
775impl<T, S> core::iter::IntoIterator for BoundedVec<T, S> {
776 type Item = T;
777 type IntoIter = alloc::vec::IntoIter<T>;
778 fn into_iter(self) -> Self::IntoIter {
779 self.0.into_iter()
780 }
781}
782
783impl<'a, T, S> core::iter::IntoIterator for &'a BoundedVec<T, S> {
784 type Item = &'a T;
785 type IntoIter = core::slice::Iter<'a, T>;
786 fn into_iter(self) -> Self::IntoIter {
787 self.0.iter()
788 }
789}
790
791impl<'a, T, S> core::iter::IntoIterator for &'a mut BoundedVec<T, S> {
792 type Item = &'a mut T;
793 type IntoIter = core::slice::IterMut<'a, T>;
794 fn into_iter(self) -> Self::IntoIter {
795 self.0.iter_mut()
796 }
797}
798
799impl<T, S> codec::DecodeLength for BoundedVec<T, S> {
800 fn len(self_encoded: &[u8]) -> Result<usize, codec::Error> {
801 <Vec<T> as codec::DecodeLength>::len(self_encoded)
804 }
805}
806
807impl<T, BoundSelf, BoundRhs> PartialEq<BoundedVec<T, BoundRhs>> for BoundedVec<T, BoundSelf>
808where
809 T: PartialEq,
810 BoundSelf: Get<u32>,
811 BoundRhs: Get<u32>,
812{
813 fn eq(&self, rhs: &BoundedVec<T, BoundRhs>) -> bool {
814 self.0 == rhs.0
815 }
816}
817
818impl<T, BoundSelf, BoundRhs> PartialEq<WeakBoundedVec<T, BoundRhs>> for BoundedVec<T, BoundSelf>
819where
820 T: PartialEq,
821 BoundSelf: Get<u32>,
822 BoundRhs: Get<u32>,
823{
824 fn eq(&self, rhs: &WeakBoundedVec<T, BoundRhs>) -> bool {
825 self.0 == rhs.0
826 }
827}
828
829impl<'a, T, BoundSelf, BoundRhs> PartialEq<BoundedSlice<'a, T, BoundRhs>> for BoundedVec<T, BoundSelf>
830where
831 T: PartialEq,
832 BoundSelf: Get<u32>,
833 BoundRhs: Get<u32>,
834{
835 fn eq(&self, rhs: &BoundedSlice<'a, T, BoundRhs>) -> bool {
836 self.0 == rhs.0
837 }
838}
839
840impl<'a, T: PartialEq, S: Get<u32>> PartialEq<&'a [T]> for BoundedSlice<'a, T, S> {
841 fn eq(&self, other: &&'a [T]) -> bool {
842 &self.0 == other
843 }
844}
845
846impl<T: PartialEq, S: Get<u32>> PartialEq<Vec<T>> for BoundedVec<T, S> {
847 fn eq(&self, other: &Vec<T>) -> bool {
848 &self.0 == other
849 }
850}
851
852impl<T, S: Get<u32>> Eq for BoundedVec<T, S> where T: Eq {}
853
854impl<T, BoundSelf, BoundRhs> PartialOrd<BoundedVec<T, BoundRhs>> for BoundedVec<T, BoundSelf>
855where
856 T: PartialOrd,
857 BoundSelf: Get<u32>,
858 BoundRhs: Get<u32>,
859{
860 fn partial_cmp(&self, other: &BoundedVec<T, BoundRhs>) -> Option<core::cmp::Ordering> {
861 self.0.partial_cmp(&other.0)
862 }
863}
864
865impl<T, BoundSelf, BoundRhs> PartialOrd<WeakBoundedVec<T, BoundRhs>> for BoundedVec<T, BoundSelf>
866where
867 T: PartialOrd,
868 BoundSelf: Get<u32>,
869 BoundRhs: Get<u32>,
870{
871 fn partial_cmp(&self, other: &WeakBoundedVec<T, BoundRhs>) -> Option<core::cmp::Ordering> {
872 self.0.partial_cmp(&other.0)
873 }
874}
875
876impl<'a, T, BoundSelf, BoundRhs> PartialOrd<BoundedSlice<'a, T, BoundRhs>> for BoundedVec<T, BoundSelf>
877where
878 T: PartialOrd,
879 BoundSelf: Get<u32>,
880 BoundRhs: Get<u32>,
881{
882 fn partial_cmp(&self, other: &BoundedSlice<'a, T, BoundRhs>) -> Option<core::cmp::Ordering> {
883 (&*self.0).partial_cmp(other.0)
884 }
885}
886
887impl<T: Ord, Bound: Get<u32>> Ord for BoundedVec<T, Bound> {
888 fn cmp(&self, other: &Self) -> core::cmp::Ordering {
889 self.0.cmp(&other.0)
890 }
891}
892
893impl<T, S> MaxEncodedLen for BoundedVec<T, S>
894where
895 T: MaxEncodedLen,
896 S: Get<u32>,
897 BoundedVec<T, S>: Encode,
898{
899 fn max_encoded_len() -> usize {
900 codec::Compact(S::get())
904 .encoded_size()
905 .saturating_add(Self::bound().saturating_mul(T::max_encoded_len()))
906 }
907}
908
909impl<I, T, Bound> TryCollect<BoundedVec<T, Bound>> for I
910where
911 I: ExactSizeIterator + Iterator<Item = T>,
912 Bound: Get<u32>,
913{
914 type Error = &'static str;
915
916 fn try_collect(self) -> Result<BoundedVec<T, Bound>, Self::Error> {
917 if self.len() > Bound::get() as usize {
918 Err("iterator length too big")
919 } else {
920 Ok(BoundedVec::<T, Bound>::unchecked_from(self.collect::<Vec<T>>()))
921 }
922 }
923}
924
925#[cfg(all(test, feature = "std"))]
926mod test {
927 use super::*;
928 use crate::{bounded_vec, ConstU32, ConstU8};
929 use codec::CompactLen;
930
931 #[test]
932 fn encoding_same_as_unbounded_vec() {
933 let b: BoundedVec<u32, ConstU32<6>> = bounded_vec![0, 1, 2, 3, 4, 5];
934 let v: Vec<u32> = vec![0, 1, 2, 3, 4, 5];
935
936 assert_eq!(b.encode(), v.encode());
937 }
938
939 #[test]
940 fn slice_truncate_from_works() {
941 let bounded = BoundedSlice::<u32, ConstU32<4>>::truncate_from(&[1, 2, 3, 4, 5]);
942 assert_eq!(bounded.deref(), &[1, 2, 3, 4]);
943 let bounded = BoundedSlice::<u32, ConstU32<4>>::truncate_from(&[1, 2, 3, 4]);
944 assert_eq!(bounded.deref(), &[1, 2, 3, 4]);
945 let bounded = BoundedSlice::<u32, ConstU32<4>>::truncate_from(&[1, 2, 3]);
946 assert_eq!(bounded.deref(), &[1, 2, 3]);
947 }
948
949 #[test]
950 fn slide_works() {
951 let mut b: BoundedVec<u32, ConstU32<6>> = bounded_vec![0, 1, 2, 3, 4, 5];
952 assert!(b.slide(1, 5));
953 assert_eq!(*b, vec![0, 2, 3, 4, 1, 5]);
954 assert!(b.slide(4, 0));
955 assert_eq!(*b, vec![1, 0, 2, 3, 4, 5]);
956 assert!(b.slide(0, 2));
957 assert_eq!(*b, vec![0, 1, 2, 3, 4, 5]);
958 assert!(b.slide(1, 6));
959 assert_eq!(*b, vec![0, 2, 3, 4, 5, 1]);
960 assert!(b.slide(0, 6));
961 assert_eq!(*b, vec![2, 3, 4, 5, 1, 0]);
962 assert!(b.slide(5, 0));
963 assert_eq!(*b, vec![0, 2, 3, 4, 5, 1]);
964 assert!(!b.slide(6, 0));
965 assert!(!b.slide(7, 0));
966 assert_eq!(*b, vec![0, 2, 3, 4, 5, 1]);
967
968 let mut c: BoundedVec<u32, ConstU32<6>> = bounded_vec![0, 1, 2];
969 assert!(!c.slide(1, 5));
970 assert_eq!(*c, vec![0, 1, 2]);
971 assert!(!c.slide(4, 0));
972 assert_eq!(*c, vec![0, 1, 2]);
973 assert!(!c.slide(3, 0));
974 assert_eq!(*c, vec![0, 1, 2]);
975 assert!(c.slide(2, 0));
976 assert_eq!(*c, vec![2, 0, 1]);
977 }
978
979 #[test]
980 fn slide_noops_work() {
981 let mut b: BoundedVec<u32, ConstU32<6>> = bounded_vec![0, 1, 2, 3, 4, 5];
982 assert!(!b.slide(3, 3));
983 assert_eq!(*b, vec![0, 1, 2, 3, 4, 5]);
984 assert!(!b.slide(3, 4));
985 assert_eq!(*b, vec![0, 1, 2, 3, 4, 5]);
986 }
987
988 #[test]
989 fn force_insert_keep_left_works() {
990 let mut b: BoundedVec<u32, ConstU32<4>> = bounded_vec![];
991 assert_eq!(b.force_insert_keep_left(1, 10), Err(10));
992 assert!(b.is_empty());
993
994 assert_eq!(b.force_insert_keep_left(0, 30), Ok(None));
995 assert_eq!(b.force_insert_keep_left(0, 10), Ok(None));
996 assert_eq!(b.force_insert_keep_left(1, 20), Ok(None));
997 assert_eq!(b.force_insert_keep_left(3, 40), Ok(None));
998 assert_eq!(*b, vec![10, 20, 30, 40]);
999 assert_eq!(b.force_insert_keep_left(4, 41), Err(41));
1001 assert_eq!(*b, vec![10, 20, 30, 40]);
1002 assert_eq!(b.force_insert_keep_left(3, 31), Ok(Some(40)));
1003 assert_eq!(*b, vec![10, 20, 30, 31]);
1004 assert_eq!(b.force_insert_keep_left(1, 11), Ok(Some(31)));
1005 assert_eq!(*b, vec![10, 11, 20, 30]);
1006 assert_eq!(b.force_insert_keep_left(0, 1), Ok(Some(30)));
1007 assert_eq!(*b, vec![1, 10, 11, 20]);
1008
1009 let mut z: BoundedVec<u32, ConstU32<0>> = bounded_vec![];
1010 assert!(z.is_empty());
1011 assert_eq!(z.force_insert_keep_left(0, 10), Err(10));
1012 assert!(z.is_empty());
1013 }
1014
1015 #[test]
1016 fn force_insert_keep_right_works() {
1017 let mut b: BoundedVec<u32, ConstU32<4>> = bounded_vec![];
1018 assert_eq!(b.force_insert_keep_right(1, 10), Err(10));
1019 assert!(b.is_empty());
1020
1021 assert_eq!(b.force_insert_keep_right(0, 30), Ok(None));
1022 assert_eq!(b.force_insert_keep_right(0, 10), Ok(None));
1023 assert_eq!(b.force_insert_keep_right(1, 20), Ok(None));
1024 assert_eq!(b.force_insert_keep_right(3, 40), Ok(None));
1025 assert_eq!(*b, vec![10, 20, 30, 40]);
1026
1027 assert_eq!(b.force_insert_keep_right(0, 0), Err(0));
1029 assert_eq!(*b, vec![10, 20, 30, 40]);
1030 assert_eq!(b.force_insert_keep_right(1, 11), Ok(Some(10)));
1031 assert_eq!(*b, vec![11, 20, 30, 40]);
1032 assert_eq!(b.force_insert_keep_right(3, 31), Ok(Some(11)));
1033 assert_eq!(*b, vec![20, 30, 31, 40]);
1034 assert_eq!(b.force_insert_keep_right(4, 41), Ok(Some(20)));
1035 assert_eq!(*b, vec![30, 31, 40, 41]);
1036
1037 assert_eq!(b.force_insert_keep_right(5, 69), Err(69));
1038 assert_eq!(*b, vec![30, 31, 40, 41]);
1039
1040 let mut z: BoundedVec<u32, ConstU32<0>> = bounded_vec![];
1041 assert!(z.is_empty());
1042 assert_eq!(z.force_insert_keep_right(0, 10), Err(10));
1043 assert!(z.is_empty());
1044 }
1045
1046 #[test]
1047 fn bound_returns_correct_value() {
1048 assert_eq!(BoundedVec::<u32, ConstU32<7>>::bound(), 7);
1049 }
1050
1051 #[test]
1052 fn try_insert_works() {
1053 let mut bounded: BoundedVec<u32, ConstU32<4>> = bounded_vec![1, 2, 3];
1054 bounded.try_insert(1, 0).unwrap();
1055 assert_eq!(*bounded, vec![1, 0, 2, 3]);
1056
1057 assert!(bounded.try_insert(0, 9).is_err());
1058 assert_eq!(*bounded, vec![1, 0, 2, 3]);
1059 }
1060
1061 #[test]
1062 fn constructor_macro_works() {
1063 let bv: BoundedVec<(u32, u32), ConstU32<3>> = bounded_vec![(1, 2), (1, 2), (1, 2)];
1065 assert_eq!(bv, vec![(1, 2), (1, 2), (1, 2)]);
1066
1067 let bv: BoundedVec<(u32, u32), ConstU32<3>> = bounded_vec![(1, 2); 3];
1069 assert_eq!(bv, vec![(1, 2), (1, 2), (1, 2)]);
1070 }
1071
1072 #[test]
1073 #[should_panic(expected = "insertion index (is 9) should be <= len (is 3)")]
1074 fn try_inert_panics_if_oob() {
1075 let mut bounded: BoundedVec<u32, ConstU32<4>> = bounded_vec![1, 2, 3];
1076 bounded.try_insert(9, 0).unwrap();
1077 }
1078
1079 #[test]
1080 fn try_push_works() {
1081 let mut bounded: BoundedVec<u32, ConstU32<4>> = bounded_vec![1, 2, 3];
1082 bounded.try_push(0).unwrap();
1083 assert_eq!(*bounded, vec![1, 2, 3, 0]);
1084
1085 assert!(bounded.try_push(9).is_err());
1086 }
1087
1088 #[test]
1089 fn deref_vec_coercion_works() {
1090 let bounded: BoundedVec<u32, ConstU32<7>> = bounded_vec![1, 2, 3];
1091 assert_eq!(bounded.len(), 3);
1093 assert!(bounded.iter().next().is_some());
1094 assert!(!bounded.is_empty());
1095 }
1096
1097 #[test]
1098 fn deref_slice_coercion_works() {
1099 let bounded = BoundedSlice::<u32, ConstU32<7>>::try_from(&[1, 2, 3][..]).unwrap();
1100 assert_eq!(bounded.len(), 3);
1102 assert!(bounded.iter().next().is_some());
1103 assert!(!bounded.is_empty());
1104 }
1105
1106 #[test]
1107 fn try_mutate_works() {
1108 let bounded: BoundedVec<u32, ConstU32<7>> = bounded_vec![1, 2, 3, 4, 5, 6];
1109 let bounded = bounded.try_mutate(|v| v.push(7)).unwrap();
1110 assert_eq!(bounded.len(), 7);
1111 assert!(bounded.try_mutate(|v| v.push(8)).is_none());
1112 }
1113
1114 #[test]
1115 fn slice_indexing_works() {
1116 let bounded: BoundedVec<u32, ConstU32<7>> = bounded_vec![1, 2, 3, 4, 5, 6];
1117 assert_eq!(&bounded[0..=2], &[1, 2, 3]);
1118 }
1119
1120 #[test]
1121 fn vec_eq_works() {
1122 let bounded: BoundedVec<u32, ConstU32<7>> = bounded_vec![1, 2, 3, 4, 5, 6];
1123 assert_eq!(bounded, vec![1, 2, 3, 4, 5, 6]);
1124 }
1125
1126 #[test]
1127 fn too_big_vec_fail_to_decode() {
1128 let v: Vec<u32> = vec![1, 2, 3, 4, 5];
1129 assert_eq!(
1130 BoundedVec::<u32, ConstU32<4>>::decode(&mut &v.encode()[..]),
1131 Err("BoundedVec exceeds its limit".into()),
1132 );
1133 }
1134
1135 #[test]
1136 fn dont_consume_more_data_than_bounded_len() {
1137 let v: Vec<u32> = vec![1, 2, 3, 4, 5];
1138 let data = v.encode();
1139 let data_input = &mut &data[..];
1140
1141 BoundedVec::<u32, ConstU32<4>>::decode(data_input).unwrap_err();
1142 assert_eq!(data_input.len(), data.len() - Compact::<u32>::compact_len(&(data.len() as u32)));
1143 }
1144
1145 #[test]
1146 fn eq_works() {
1147 let b1: BoundedVec<u32, ConstU32<7>> = bounded_vec![1, 2, 3];
1149 let b2: BoundedVec<u32, ConstU32<7>> = bounded_vec![1, 2, 3];
1150 assert_eq!(b1, b2);
1151
1152 crate::parameter_types! {
1154 B1: u32 = 7;
1155 B2: u32 = 7;
1156 }
1157 let b1: BoundedVec<u32, B1> = bounded_vec![1, 2, 3];
1158 let b2: BoundedVec<u32, B2> = bounded_vec![1, 2, 3];
1159 assert_eq!(b1, b2);
1160 }
1161
1162 #[test]
1163 fn ord_works() {
1164 use std::cmp::Ordering;
1165 let b1: BoundedVec<u32, ConstU32<7>> = bounded_vec![1, 2, 3];
1166 let b2: BoundedVec<u32, ConstU32<7>> = bounded_vec![1, 3, 2];
1167
1168 assert_eq!(b1.cmp(&b2), Ordering::Less);
1170 assert_eq!(b1.cmp(&b2), b1.into_inner().cmp(&b2.into_inner()));
1171 }
1172
1173 #[test]
1174 fn try_extend_works() {
1175 let mut b: BoundedVec<u32, ConstU32<5>> = bounded_vec![1, 2, 3];
1176
1177 assert!(b.try_extend(vec![4].into_iter()).is_ok());
1178 assert_eq!(*b, vec![1, 2, 3, 4]);
1179
1180 assert!(b.try_extend(vec![5].into_iter()).is_ok());
1181 assert_eq!(*b, vec![1, 2, 3, 4, 5]);
1182
1183 assert!(b.try_extend(vec![6].into_iter()).is_err());
1184 assert_eq!(*b, vec![1, 2, 3, 4, 5]);
1185
1186 let mut b: BoundedVec<u32, ConstU32<5>> = bounded_vec![1, 2, 3];
1187 assert!(b.try_extend(vec![4, 5].into_iter()).is_ok());
1188 assert_eq!(*b, vec![1, 2, 3, 4, 5]);
1189
1190 let mut b: BoundedVec<u32, ConstU32<5>> = bounded_vec![1, 2, 3];
1191 assert!(b.try_extend(vec![4, 5, 6].into_iter()).is_err());
1192 assert_eq!(*b, vec![1, 2, 3]);
1193 }
1194
1195 #[test]
1196 fn test_serializer() {
1197 let c: BoundedVec<u32, ConstU32<6>> = bounded_vec![0, 1, 2];
1198 assert_eq!(serde_json::json!(&c).to_string(), r#"[0,1,2]"#);
1199 }
1200
1201 #[test]
1202 fn test_deserializer() {
1203 let c: BoundedVec<u32, ConstU32<6>> = serde_json::from_str(r#"[0,1,2]"#).unwrap();
1204
1205 assert_eq!(c.len(), 3);
1206 assert_eq!(c[0], 0);
1207 assert_eq!(c[1], 1);
1208 assert_eq!(c[2], 2);
1209 }
1210
1211 #[test]
1212 fn test_deserializer_bound() {
1213 let c: BoundedVec<u32, ConstU32<3>> = serde_json::from_str(r#"[0,1,2]"#).unwrap();
1214
1215 assert_eq!(c.len(), 3);
1216 assert_eq!(c[0], 0);
1217 assert_eq!(c[1], 1);
1218 assert_eq!(c[2], 2);
1219 }
1220
1221 #[test]
1222 fn test_deserializer_failed() {
1223 let c: Result<BoundedVec<u32, ConstU32<4>>, serde_json::error::Error> = serde_json::from_str(r#"[0,1,2,3,4]"#);
1224
1225 match c {
1226 Err(msg) => assert_eq!(msg.to_string(), "out of bounds at line 1 column 11"),
1227 _ => unreachable!("deserializer must raise error"),
1228 }
1229 }
1230
1231 #[test]
1232 fn bounded_vec_try_from_works() {
1233 assert!(BoundedVec::<u32, ConstU32<2>>::try_from(vec![0]).is_ok());
1234 assert!(BoundedVec::<u32, ConstU32<2>>::try_from(vec![0, 1]).is_ok());
1235 assert!(BoundedVec::<u32, ConstU32<2>>::try_from(vec![0, 1, 2]).is_err());
1236 }
1237
1238 #[test]
1239 fn bounded_slice_try_from_works() {
1240 assert!(BoundedSlice::<u32, ConstU32<2>>::try_from(&[0][..]).is_ok());
1241 assert!(BoundedSlice::<u32, ConstU32<2>>::try_from(&[0, 1][..]).is_ok());
1242 assert!(BoundedSlice::<u32, ConstU32<2>>::try_from(&[0, 1, 2][..]).is_err());
1243 }
1244
1245 #[test]
1246 fn can_be_collected() {
1247 let b1: BoundedVec<u32, ConstU32<5>> = bounded_vec![1, 2, 3, 4];
1248 let b2: BoundedVec<u32, ConstU32<5>> = b1.iter().map(|x| x + 1).try_collect().unwrap();
1249 assert_eq!(b2, vec![2, 3, 4, 5]);
1250
1251 let b2: BoundedVec<u32, ConstU32<4>> = b1.iter().map(|x| x + 1).try_collect().unwrap();
1253 assert_eq!(b2, vec![2, 3, 4, 5]);
1254
1255 let b2: BoundedVec<u32, ConstU32<4>> = b1.iter().map(|x| x + 1).rev().try_collect().unwrap();
1257 assert_eq!(b2, vec![5, 4, 3, 2]);
1258
1259 let b2: BoundedVec<u32, ConstU32<4>> = b1.iter().map(|x| x + 1).rev().skip(2).try_collect().unwrap();
1260 assert_eq!(b2, vec![3, 2]);
1261 let b2: BoundedVec<u32, ConstU32<2>> = b1.iter().map(|x| x + 1).rev().skip(2).try_collect().unwrap();
1262 assert_eq!(b2, vec![3, 2]);
1263
1264 let b2: BoundedVec<u32, ConstU32<4>> = b1.iter().map(|x| x + 1).rev().take(2).try_collect().unwrap();
1265 assert_eq!(b2, vec![5, 4]);
1266 let b2: BoundedVec<u32, ConstU32<2>> = b1.iter().map(|x| x + 1).rev().take(2).try_collect().unwrap();
1267 assert_eq!(b2, vec![5, 4]);
1268
1269 let b2: Result<BoundedVec<u32, ConstU32<3>>, _> = b1.iter().map(|x| x + 1).try_collect();
1271 assert!(b2.is_err());
1272
1273 let b2: Result<BoundedVec<u32, ConstU32<1>>, _> = b1.iter().map(|x| x + 1).rev().take(2).try_collect();
1274 assert!(b2.is_err());
1275 }
1276
1277 #[test]
1278 fn bounded_vec_debug_works() {
1279 let bound = BoundedVec::<u32, ConstU32<5>>::truncate_from(vec![1, 2, 3]);
1280 assert_eq!(format!("{:?}", bound), "BoundedVec([1, 2, 3], 5)");
1281 }
1282
1283 #[test]
1284 fn bounded_slice_debug_works() {
1285 let bound = BoundedSlice::<u32, ConstU32<5>>::truncate_from(&[1, 2, 3]);
1286 assert_eq!(format!("{:?}", bound), "BoundedSlice([1, 2, 3], 5)");
1287 }
1288
1289 #[test]
1290 fn bounded_vec_sort_by_key_works() {
1291 let mut v: BoundedVec<i32, ConstU32<5>> = bounded_vec![-5, 4, 1, -3, 2];
1292 v.sort_by_key(|k| k.abs());
1294 assert_eq!(v, vec![1, 2, -3, 4, -5]);
1295 }
1296
1297 #[test]
1298 fn bounded_vec_truncate_from_works() {
1299 let unbound = vec![1, 2, 3, 4, 5];
1300 let bound = BoundedVec::<u32, ConstU32<3>>::truncate_from(unbound.clone());
1301 assert_eq!(bound, vec![1, 2, 3]);
1302 }
1303
1304 #[test]
1305 fn bounded_slice_truncate_from_works() {
1306 let unbound = [1, 2, 3, 4, 5];
1307 let bound = BoundedSlice::<u32, ConstU32<3>>::truncate_from(&unbound);
1308 assert_eq!(bound, &[1, 2, 3][..]);
1309 }
1310
1311 #[test]
1312 fn bounded_slice_partialeq_slice_works() {
1313 let unbound = [1, 2, 3];
1314 let bound = BoundedSlice::<u32, ConstU32<3>>::truncate_from(&unbound);
1315
1316 assert_eq!(bound, &unbound[..]);
1317 assert!(bound == &unbound[..]);
1318 }
1319
1320 #[test]
1321 fn bounded_vec_try_rotate_left_works() {
1322 let o = BoundedVec::<u32, ConstU32<3>>::truncate_from(vec![1, 2, 3]);
1323 let mut bound = o.clone();
1324
1325 bound.try_rotate_left(0).unwrap();
1326 assert_eq!(bound, o);
1327 bound.try_rotate_left(3).unwrap();
1328 assert_eq!(bound, o);
1329
1330 bound.try_rotate_left(4).unwrap_err();
1331 assert_eq!(bound, o);
1332
1333 bound.try_rotate_left(1).unwrap();
1334 assert_eq!(bound, vec![2, 3, 1]);
1335 bound.try_rotate_left(2).unwrap();
1336 assert_eq!(bound, o);
1337 }
1338
1339 #[test]
1340 fn bounded_vec_try_rotate_right_works() {
1341 let o = BoundedVec::<u32, ConstU32<3>>::truncate_from(vec![1, 2, 3]);
1342 let mut bound = o.clone();
1343
1344 bound.try_rotate_right(0).unwrap();
1345 assert_eq!(bound, o);
1346 bound.try_rotate_right(3).unwrap();
1347 assert_eq!(bound, o);
1348
1349 bound.try_rotate_right(4).unwrap_err();
1350 assert_eq!(bound, o);
1351
1352 bound.try_rotate_right(1).unwrap();
1353 assert_eq!(bound, vec![3, 1, 2]);
1354 bound.try_rotate_right(2).unwrap();
1355 assert_eq!(bound, o);
1356 }
1357
1358 #[test]
1361 #[cfg(feature = "std")]
1362 fn container_can_derive_hash() {
1363 #[derive(Hash)]
1364 struct Foo<'a> {
1365 bar: u8,
1366 slice: BoundedSlice<'a, usize, ConstU8<8>>,
1367 map: BoundedVec<String, ConstU32<16>>,
1368 }
1369 }
1370}