1use core::{
4 borrow,
5 cmp::Ordering,
6 fmt, hash,
7 iter::FusedIterator,
8 marker::PhantomData,
9 mem::{self, ManuallyDrop, MaybeUninit},
10 ops::{self, Range, RangeBounds},
11 ptr::{self, NonNull},
12 slice,
13};
14
15#[cfg(feature = "zeroize")]
16use zeroize::Zeroize;
17
18use crate::{
19 len_type::{check_capacity_fits, LenType},
20 CapacityError,
21};
22
23mod drain;
24
25mod storage {
26 use core::mem::MaybeUninit;
27
28 use crate::{
29 binary_heap::{BinaryHeapInner, BinaryHeapView},
30 deque::{DequeInner, DequeView},
31 len_type::LenType,
32 };
33
34 use super::{VecInner, VecView};
35
36 #[allow(private_bounds)]
63 pub trait VecStorage<T>: VecSealedStorage<T> {}
64
65 pub trait VecSealedStorage<T> {
66 fn borrow(&self) -> &[MaybeUninit<T>];
69 fn borrow_mut(&mut self) -> &mut [MaybeUninit<T>];
70
71 fn as_vec_view<LenT: LenType>(this: &VecInner<T, LenT, Self>) -> &VecView<T, LenT>
72 where
73 Self: VecStorage<T>;
74 fn as_vec_view_mut<LenT: LenType>(
75 this: &mut VecInner<T, LenT, Self>,
76 ) -> &mut VecView<T, LenT>
77 where
78 Self: VecStorage<T>;
79
80 fn as_binary_heap_view<K>(this: &BinaryHeapInner<T, K, Self>) -> &BinaryHeapView<T, K>
81 where
82 Self: VecStorage<T>;
83 fn as_binary_heap_view_mut<K>(
84 this: &mut BinaryHeapInner<T, K, Self>,
85 ) -> &mut BinaryHeapView<T, K>
86 where
87 Self: VecStorage<T>;
88
89 fn as_deque_view(this: &DequeInner<T, Self>) -> &DequeView<T>
90 where
91 Self: VecStorage<T>;
92 fn as_deque_view_mut(this: &mut DequeInner<T, Self>) -> &mut DequeView<T>
93 where
94 Self: VecStorage<T>;
95 }
96
97 #[cfg(feature = "zeroize")]
98 use zeroize::Zeroize;
99
100 #[cfg_attr(feature = "zeroize", derive(Zeroize))]
102 pub struct VecStorageInner<T: ?Sized> {
103 pub(crate) buffer: T,
104 }
105
106 pub type OwnedVecStorage<T, const N: usize> = VecStorageInner<[MaybeUninit<T>; N]>;
109 pub type ViewVecStorage<T> = VecStorageInner<[MaybeUninit<T>]>;
111
112 impl<T, const N: usize> VecSealedStorage<T> for OwnedVecStorage<T, N> {
113 fn borrow(&self) -> &[MaybeUninit<T>] {
114 &self.buffer
115 }
116 fn borrow_mut(&mut self) -> &mut [MaybeUninit<T>] {
117 &mut self.buffer
118 }
119
120 fn as_vec_view<LenT: LenType>(this: &VecInner<T, LenT, Self>) -> &VecView<T, LenT>
121 where
122 Self: VecStorage<T>,
123 {
124 this
125 }
126 fn as_vec_view_mut<LenT: LenType>(
127 this: &mut VecInner<T, LenT, Self>,
128 ) -> &mut VecView<T, LenT>
129 where
130 Self: VecStorage<T>,
131 {
132 this
133 }
134
135 fn as_binary_heap_view<K>(this: &BinaryHeapInner<T, K, Self>) -> &BinaryHeapView<T, K>
136 where
137 Self: VecStorage<T>,
138 {
139 this
140 }
141 fn as_binary_heap_view_mut<K>(
142 this: &mut BinaryHeapInner<T, K, Self>,
143 ) -> &mut BinaryHeapView<T, K>
144 where
145 Self: VecStorage<T>,
146 {
147 this
148 }
149 fn as_deque_view(this: &DequeInner<T, Self>) -> &DequeView<T>
150 where
151 Self: VecStorage<T>,
152 {
153 this
154 }
155 fn as_deque_view_mut(this: &mut DequeInner<T, Self>) -> &mut DequeView<T>
156 where
157 Self: VecStorage<T>,
158 {
159 this
160 }
161 }
162 impl<T, const N: usize> VecStorage<T> for OwnedVecStorage<T, N> {}
163
164 impl<T> VecSealedStorage<T> for ViewVecStorage<T> {
165 fn borrow(&self) -> &[MaybeUninit<T>] {
166 &self.buffer
167 }
168 fn borrow_mut(&mut self) -> &mut [MaybeUninit<T>] {
169 &mut self.buffer
170 }
171
172 fn as_vec_view<LenT: LenType>(this: &VecInner<T, LenT, Self>) -> &VecView<T, LenT>
173 where
174 Self: VecStorage<T>,
175 {
176 this
177 }
178 fn as_vec_view_mut<LenT: LenType>(
179 this: &mut VecInner<T, LenT, Self>,
180 ) -> &mut VecView<T, LenT>
181 where
182 Self: VecStorage<T>,
183 {
184 this
185 }
186
187 fn as_binary_heap_view<K>(this: &BinaryHeapInner<T, K, Self>) -> &BinaryHeapView<T, K>
188 where
189 Self: VecStorage<T>,
190 {
191 this
192 }
193 fn as_binary_heap_view_mut<K>(
194 this: &mut BinaryHeapInner<T, K, Self>,
195 ) -> &mut BinaryHeapView<T, K>
196 where
197 Self: VecStorage<T>,
198 {
199 this
200 }
201 fn as_deque_view(this: &DequeInner<T, Self>) -> &DequeView<T>
202 where
203 Self: VecStorage<T>,
204 {
205 this
206 }
207 fn as_deque_view_mut(this: &mut DequeInner<T, Self>) -> &mut DequeView<T>
208 where
209 Self: VecStorage<T>,
210 {
211 this
212 }
213 }
214 impl<T> VecStorage<T> for ViewVecStorage<T> {}
215}
216pub use storage::{OwnedVecStorage, VecStorage, ViewVecStorage};
217
218pub(crate) use storage::VecStorageInner;
219
220pub use drain::Drain;
221
222#[cfg_attr(feature = "zeroize", derive(Zeroize), zeroize(bound = "S: Zeroize"))]
227pub struct VecInner<T, LenT: LenType, S: VecStorage<T> + ?Sized> {
228 phantom: PhantomData<T>,
229 len: LenT,
230 buffer: S,
231}
232
233pub type Vec<T, const N: usize, LenT = usize> = VecInner<T, LenT, OwnedVecStorage<T, N>>;
278
279pub type VecView<T, LenT = usize> = VecInner<T, LenT, ViewVecStorage<T>>;
302
303impl<T, LenT: LenType, const N: usize> Vec<T, N, LenT> {
304 const ELEM: MaybeUninit<T> = MaybeUninit::uninit();
305 const INIT: [MaybeUninit<T>; N] = [Self::ELEM; N]; pub const fn new() -> Self {
321 const { check_capacity_fits::<LenT, N>() }
322
323 Self {
324 phantom: PhantomData,
325 len: LenT::ZERO,
326 buffer: VecStorageInner { buffer: Self::INIT },
327 }
328 }
329
330 pub fn from_slice(other: &[T]) -> Result<Self, CapacityError>
342 where
343 T: Clone,
344 {
345 let mut v = Self::new();
346 v.extend_from_slice(other)?;
347 Ok(v)
348 }
349
350 pub fn from_array<const M: usize>(src: [T; M]) -> Self {
359 const {
360 assert!(N >= M);
361 }
362
363 let src = ManuallyDrop::new(src);
366
367 if N == M {
368 Self {
369 phantom: PhantomData,
370 len: LenT::from_usize(N),
371 buffer: unsafe { mem::transmute_copy(&src) },
374 }
375 } else {
376 let mut v = Self::new();
377
378 for (src_elem, dst_elem) in src.iter().zip(v.buffer.buffer.iter_mut()) {
379 dst_elem.write(unsafe { ptr::read(src_elem) });
382 }
383
384 unsafe { v.set_len(M) };
385 v
386 }
387 }
388
389 pub fn into_array<const M: usize>(self) -> Result<[T; M], Self> {
401 if self.len() == M {
402 let array = unsafe { (core::ptr::from_ref(&self.buffer).cast::<[T; M]>()).read() };
404
405 core::mem::forget(self);
408
409 Ok(array)
410 } else {
411 Err(self)
412 }
413 }
414
415 pub(crate) fn clone(&self) -> Self
417 where
418 T: Clone,
419 {
420 let mut new = Self::new();
421 for elem in self {
423 unsafe {
424 new.push_unchecked(elem.clone());
425 }
426 }
427 new
428 }
429
430 pub fn cast_len_type<NewLenT: LenType>(self) -> Vec<T, N, NewLenT> {
437 const { check_capacity_fits::<NewLenT, N>() }
438 let this = ManuallyDrop::new(self);
439
440 Vec {
443 len: NewLenT::from_usize(this.len()),
444 buffer: unsafe { ptr::read(&this.buffer) },
445 phantom: PhantomData,
446 }
447 }
448}
449
450impl<T, LenT: LenType, S: VecStorage<T> + ?Sized> VecInner<T, LenT, S> {
451 pub fn drain<R>(&mut self, range: R) -> Drain<'_, T, LenT>
484 where
485 R: RangeBounds<usize>,
486 {
487 let len = self.len();
498 let Range { start, end } = crate::slice::range(range, ..len);
499
500 unsafe {
501 self.set_len(start);
503 let vec = NonNull::from(self.as_mut_view());
504 let range_slice = slice::from_raw_parts(vec.as_ref().as_ptr().add(start), end - start);
505 Drain {
506 tail_start: LenT::from_usize(end),
507 tail_len: LenT::from_usize(len - end),
508 iter: range_slice.iter(),
509 vec,
510 }
511 }
512 }
513
514 #[inline]
532 pub fn as_view(&self) -> &VecView<T, LenT> {
533 S::as_vec_view(self)
534 }
535
536 #[inline]
553 pub fn as_mut_view(&mut self) -> &mut VecView<T, LenT> {
554 S::as_vec_view_mut(self)
555 }
556
557 pub fn as_ptr(&self) -> *const T {
559 self.buffer.borrow().as_ptr().cast::<T>()
560 }
561
562 pub fn as_mut_ptr(&mut self) -> *mut T {
564 self.buffer.borrow_mut().as_mut_ptr().cast::<T>()
565 }
566
567 pub fn as_slice(&self) -> &[T] {
579 unsafe {
582 slice::from_raw_parts(
583 self.buffer.borrow().as_ptr().cast::<T>(),
584 self.len.into_usize(),
585 )
586 }
587 }
588
589 pub fn as_mut_slice(&mut self) -> &mut [T] {
603 unsafe {
606 slice::from_raw_parts_mut(
607 self.buffer.borrow_mut().as_mut_ptr().cast::<T>(),
608 self.len.into_usize(),
609 )
610 }
611 }
612
613 pub fn capacity(&self) -> usize {
615 self.buffer.borrow().len()
616 }
617
618 pub fn clear(&mut self) {
620 self.truncate(0);
621 }
622
623 pub fn extend<I>(&mut self, iter: I)
629 where
630 I: IntoIterator<Item = T>,
631 {
632 for elem in iter {
633 self.push(elem).ok().unwrap();
634 }
635 }
636
637 pub fn extend_from_slice(&mut self, other: &[T]) -> Result<(), CapacityError>
653 where
654 T: Clone,
655 {
656 pub fn extend_from_slice_inner<T, LenT: LenType>(
657 len: &mut LenT,
658 buf: &mut [MaybeUninit<T>],
659 other: &[T],
660 ) -> Result<(), CapacityError>
661 where
662 T: Clone,
663 {
664 if len.into_usize() + other.len() > buf.len() {
665 Err(CapacityError)
667 } else {
668 for elem in other {
669 unsafe {
670 *buf.get_unchecked_mut(len.into_usize()) = MaybeUninit::new(elem.clone());
671 }
672 *len += LenT::one();
673 }
674 Ok(())
675 }
676 }
677
678 extend_from_slice_inner(&mut self.len, self.buffer.borrow_mut(), other)
679 }
680
681 pub fn pop(&mut self) -> Option<T> {
683 if self.len == LenT::ZERO {
684 None
685 } else {
686 Some(unsafe { self.pop_unchecked() })
687 }
688 }
689
690 pub fn push(&mut self, item: T) -> Result<(), T> {
694 if self.len() < self.capacity() {
695 unsafe { self.push_unchecked(item) }
696 Ok(())
697 } else {
698 Err(item)
699 }
700 }
701
702 pub unsafe fn pop_unchecked(&mut self) -> T {
708 debug_assert!(!self.is_empty());
709
710 self.len -= LenT::one();
711 self.buffer
712 .borrow_mut()
713 .get_unchecked_mut(self.len.into_usize())
714 .as_ptr()
715 .read()
716 }
717
718 pub unsafe fn push_unchecked(&mut self, item: T) {
724 debug_assert!(!self.is_full());
727
728 *self
729 .buffer
730 .borrow_mut()
731 .get_unchecked_mut(self.len.into_usize()) = MaybeUninit::new(item);
732
733 self.len += LenT::one();
734 }
735
736 pub fn truncate(&mut self, len: usize) {
738 unsafe {
746 if len > self.len() {
750 return;
751 }
752 let remaining_len = self.len() - len;
753 let s = ptr::slice_from_raw_parts_mut(self.as_mut_ptr().add(len), remaining_len);
754 self.len = LenT::from_usize(len);
755 ptr::drop_in_place(s);
756 }
757 }
758
759 pub fn resize(&mut self, new_len: usize, value: T) -> Result<(), CapacityError>
767 where
768 T: Clone,
769 {
770 if new_len > self.capacity() {
771 return Err(CapacityError);
772 }
773
774 if new_len > self.len() {
775 while self.len() < new_len {
776 self.push(value.clone()).ok();
777 }
778 } else {
779 self.truncate(new_len);
780 }
781
782 Ok(())
783 }
784
785 pub fn resize_default(&mut self, new_len: usize) -> Result<(), CapacityError>
793 where
794 T: Clone + Default,
795 {
796 self.resize(new_len, T::default())
797 }
798
799 pub unsafe fn set_len(&mut self, new_len: usize) {
889 debug_assert!(new_len <= self.capacity());
890
891 self.len = LenT::from_usize(new_len);
892 }
893
894 pub fn swap_remove(&mut self, index: usize) -> T {
922 assert!(index < self.len());
923 unsafe { self.swap_remove_unchecked(index) }
924 }
925
926 pub unsafe fn swap_remove_unchecked(&mut self, index: usize) -> T {
954 let length = self.len();
955 debug_assert!(index < length);
956 let value = ptr::read(self.as_ptr().add(index));
957 let base_ptr = self.as_mut_ptr();
958 ptr::copy(base_ptr.add(length - 1), base_ptr.add(index), 1);
959 self.len -= LenT::one();
960 value
961 }
962
963 pub fn is_full(&self) -> bool {
965 self.len() == self.capacity()
966 }
967
968 pub fn is_empty(&self) -> bool {
970 self.len == LenT::ZERO
971 }
972
973 pub fn starts_with(&self, needle: &[T]) -> bool
988 where
989 T: PartialEq,
990 {
991 let n = needle.len();
992 self.len() >= n && needle == &self[..n]
993 }
994
995 pub fn ends_with(&self, needle: &[T]) -> bool
1010 where
1011 T: PartialEq,
1012 {
1013 let (v, n) = (self.len(), needle.len());
1014 v >= n && needle == &self[v - n..]
1015 }
1016
1017 pub fn insert(&mut self, index: usize, element: T) -> Result<(), T> {
1038 let len = self.len();
1039 if index > len {
1040 panic!("insertion index (is {index}) should be <= len (is {len})");
1041 }
1042
1043 if self.is_full() {
1045 return Err(element);
1046 }
1047
1048 unsafe {
1049 {
1052 let p = self.as_mut_ptr().add(index);
1053 ptr::copy(p, p.offset(1), len - index);
1056 ptr::write(p, element);
1059 }
1060 self.set_len(len + 1);
1061 }
1062
1063 Ok(())
1064 }
1065
1066 pub fn remove(&mut self, index: usize) -> T {
1092 let len = self.len();
1093 if index >= len {
1094 panic!("removal index (is {index}) should be < len (is {len})");
1095 }
1096 unsafe {
1097 let ret;
1099 {
1100 let ptr = self.as_mut_ptr().add(index);
1102 ret = ptr::read(ptr);
1105
1106 ptr::copy(ptr.offset(1), ptr, len - index - 1);
1108 }
1109 self.set_len(len - 1);
1110 ret
1111 }
1112 }
1113
1114 pub fn retain<F>(&mut self, mut f: F)
1143 where
1144 F: FnMut(&T) -> bool,
1145 {
1146 self.retain_mut(|elem| f(elem));
1147 }
1148
1149 pub fn retain_mut<F>(&mut self, mut f: F)
1172 where
1173 F: FnMut(&mut T) -> bool,
1174 {
1175 let original_len = self.len;
1176 unsafe { self.set_len(0) };
1179
1180 struct BackshiftOnDrop<'a, T, LenT: LenType, S: VecStorage<T> + ?Sized> {
1192 v: &'a mut VecInner<T, LenT, S>,
1193 processed_len: LenT,
1194 deleted_cnt: LenT,
1195 original_len: LenT,
1196 }
1197
1198 impl<T, LenT: LenType, S: VecStorage<T> + ?Sized> Drop for BackshiftOnDrop<'_, T, LenT, S> {
1199 fn drop(&mut self) {
1200 if self.deleted_cnt > LenT::ZERO {
1201 unsafe {
1203 ptr::copy(
1204 self.v.as_ptr().add(self.processed_len.into_usize()),
1205 self.v
1206 .as_mut_ptr()
1207 .add((self.processed_len - self.deleted_cnt).into_usize()),
1208 (self.original_len - self.processed_len).into_usize(),
1209 );
1210 }
1211 }
1212 unsafe {
1214 self.v
1215 .set_len((self.original_len - self.deleted_cnt).into_usize());
1216 }
1217 }
1218 }
1219
1220 let mut g = BackshiftOnDrop {
1221 v: self,
1222 processed_len: LenT::ZERO,
1223 deleted_cnt: LenT::ZERO,
1224 original_len,
1225 };
1226
1227 fn process_loop<F, T, LenT: LenType, S: VecStorage<T> + ?Sized, const DELETED: bool>(
1228 original_len: LenT,
1229 f: &mut F,
1230 g: &mut BackshiftOnDrop<'_, T, LenT, S>,
1231 ) where
1232 F: FnMut(&mut T) -> bool,
1233 {
1234 while g.processed_len != original_len {
1235 let p = g.v.as_mut_ptr();
1236 let cur = unsafe { &mut *p.add(g.processed_len.into_usize()) };
1238 if !f(cur) {
1239 g.processed_len += LenT::one();
1241 g.deleted_cnt += LenT::one();
1242 unsafe { ptr::drop_in_place(cur) };
1244 if DELETED {
1246 continue;
1247 } else {
1248 break;
1249 }
1250 }
1251 if DELETED {
1252 unsafe {
1256 let hole_slot = p.add((g.processed_len - g.deleted_cnt).into_usize());
1257 ptr::copy_nonoverlapping(cur, hole_slot, 1);
1258 }
1259 }
1260 g.processed_len += LenT::one();
1261 }
1262 }
1263
1264 process_loop::<F, T, LenT, S, false>(original_len, &mut f, &mut g);
1266
1267 process_loop::<F, T, LenT, S, true>(original_len, &mut f, &mut g);
1269
1270 drop(g);
1272 }
1273
1274 #[inline]
1301 pub fn spare_capacity_mut(&mut self) -> &mut [MaybeUninit<T>] {
1302 &mut self.buffer.borrow_mut()[self.len.into_usize()..]
1303 }
1304}
1305
1306impl<T, LenT: LenType, const N: usize> Default for Vec<T, N, LenT> {
1309 fn default() -> Self {
1310 Self::new()
1311 }
1312}
1313
1314impl<T, LenT: LenType, S: VecStorage<T> + ?Sized> fmt::Debug for VecInner<T, LenT, S>
1315where
1316 T: fmt::Debug,
1317{
1318 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1319 <[T] as fmt::Debug>::fmt(self, f)
1320 }
1321}
1322
1323impl<LenT: LenType, S: VecStorage<u8> + ?Sized> fmt::Write for VecInner<u8, LenT, S> {
1324 fn write_str(&mut self, s: &str) -> fmt::Result {
1325 match self.extend_from_slice(s.as_bytes()) {
1326 Ok(()) => Ok(()),
1327 Err(_) => Err(fmt::Error),
1328 }
1329 }
1330}
1331
1332impl<T, LenT: LenType, const N: usize, const M: usize> From<[T; M]> for Vec<T, N, LenT> {
1333 fn from(array: [T; M]) -> Self {
1334 Self::from_array(array)
1335 }
1336}
1337
1338impl<T, LenT: LenType, S: VecStorage<T> + ?Sized> Drop for VecInner<T, LenT, S> {
1339 fn drop(&mut self) {
1340 let mut_slice = self.as_mut_slice();
1341 unsafe { ptr::drop_in_place(mut_slice) }
1344 }
1345}
1346
1347#[cfg(feature = "alloc")]
1348impl<T, LenT: LenType, const N: usize> TryFrom<alloc::vec::Vec<T>> for Vec<T, N, LenT> {
1350 type Error = CapacityError;
1351
1352 fn try_from(alloc_vec: alloc::vec::Vec<T>) -> Result<Self, Self::Error> {
1358 let mut vec = Self::new();
1359
1360 for e in alloc_vec {
1361 vec.push(e).map_err(|_| CapacityError {})?;
1363 }
1364
1365 Ok(vec)
1366 }
1367}
1368
1369#[cfg(feature = "alloc")]
1370impl<T, LenT: LenType, const N: usize> TryFrom<Vec<T, N, LenT>> for alloc::vec::Vec<T> {
1372 type Error = alloc::collections::TryReserveError;
1373
1374 fn try_from(vec: Vec<T, N, LenT>) -> Result<Self, Self::Error> {
1380 let mut alloc_vec = Self::new();
1381
1382 alloc_vec.try_reserve_exact(vec.len())?;
1385
1386 alloc_vec.extend(vec);
1389
1390 Ok(alloc_vec)
1391 }
1392}
1393
1394impl<'a, T: Clone, LenT: LenType, const N: usize> TryFrom<&'a [T]> for Vec<T, N, LenT> {
1395 type Error = CapacityError;
1396
1397 fn try_from(slice: &'a [T]) -> Result<Self, Self::Error> {
1398 Self::from_slice(slice)
1399 }
1400}
1401
1402impl<T, LenT: LenType, S: VecStorage<T> + ?Sized> Extend<T> for VecInner<T, LenT, S> {
1403 fn extend<I>(&mut self, iter: I)
1404 where
1405 I: IntoIterator<Item = T>,
1406 {
1407 self.extend(iter);
1408 }
1409}
1410
1411impl<'a, T, LenT: LenType, S: VecStorage<T> + ?Sized> Extend<&'a T> for VecInner<T, LenT, S>
1412where
1413 T: 'a + Copy,
1414{
1415 fn extend<I>(&mut self, iter: I)
1416 where
1417 I: IntoIterator<Item = &'a T>,
1418 {
1419 self.extend(iter.into_iter().cloned());
1420 }
1421}
1422
1423impl<T, LenT: LenType, S: VecStorage<T> + ?Sized> hash::Hash for VecInner<T, LenT, S>
1424where
1425 T: core::hash::Hash,
1426{
1427 fn hash<H: hash::Hasher>(&self, state: &mut H) {
1428 <[T] as hash::Hash>::hash(self, state);
1429 }
1430}
1431
1432impl<'a, T, LenT: LenType, S: VecStorage<T> + ?Sized> IntoIterator for &'a VecInner<T, LenT, S> {
1433 type Item = &'a T;
1434 type IntoIter = slice::Iter<'a, T>;
1435
1436 fn into_iter(self) -> Self::IntoIter {
1437 self.iter()
1438 }
1439}
1440
1441impl<'a, T, LenT: LenType, S: VecStorage<T> + ?Sized> IntoIterator
1442 for &'a mut VecInner<T, LenT, S>
1443{
1444 type Item = &'a mut T;
1445 type IntoIter = slice::IterMut<'a, T>;
1446
1447 fn into_iter(self) -> Self::IntoIter {
1448 self.iter_mut()
1449 }
1450}
1451
1452impl<T, LenT: LenType, const N: usize> FromIterator<T> for Vec<T, N, LenT> {
1453 fn from_iter<I>(iter: I) -> Self
1454 where
1455 I: IntoIterator<Item = T>,
1456 {
1457 let mut vec = Self::new();
1458 for i in iter {
1459 vec.push(i).ok().expect("Vec::from_iter overflow");
1460 }
1461 vec
1462 }
1463}
1464
1465pub struct IntoIter<T, const N: usize, LenT: LenType> {
1469 vec: Vec<T, N, LenT>,
1470 next: LenT,
1471}
1472
1473impl<T, LenT: LenType, const N: usize> Iterator for IntoIter<T, N, LenT> {
1474 type Item = T;
1475 fn next(&mut self) -> Option<Self::Item> {
1476 if self.next < self.vec.len {
1477 let item = unsafe {
1478 self.vec
1479 .buffer
1480 .buffer
1481 .get_unchecked_mut(self.next.into_usize())
1482 .as_ptr()
1483 .read()
1484 };
1485 self.next += LenT::one();
1486 Some(item)
1487 } else {
1488 None
1489 }
1490 }
1491
1492 fn size_hint(&self) -> (usize, Option<usize>) {
1493 let len = self.len();
1494 (len, Some(len))
1495 }
1496}
1497
1498impl<T, LenT: LenType, const N: usize> DoubleEndedIterator for IntoIter<T, N, LenT> {
1499 fn next_back(&mut self) -> Option<Self::Item> {
1500 if self.next < self.vec.len {
1501 let item = unsafe { self.vec.pop_unchecked() };
1503 Some(item)
1504 } else {
1505 None
1506 }
1507 }
1508}
1509
1510impl<T, LenT: LenType, const N: usize> FusedIterator for IntoIter<T, N, LenT> {}
1511
1512impl<T, LenT: LenType, const N: usize> ExactSizeIterator for IntoIter<T, N, LenT> {
1513 fn len(&self) -> usize {
1514 (self.vec.len - self.next).into_usize()
1515 }
1516}
1517
1518impl<T, LenT: LenType, const N: usize> Clone for IntoIter<T, N, LenT>
1519where
1520 T: Clone,
1521{
1522 fn clone(&self) -> Self {
1523 let mut vec = Vec::new();
1524
1525 if self.next < self.vec.len {
1526 let s = unsafe {
1527 slice::from_raw_parts(
1528 self.vec
1529 .buffer
1530 .buffer
1531 .as_ptr()
1532 .cast::<T>()
1533 .add(self.next.into_usize()),
1534 (self.vec.len - self.next).into_usize(),
1535 )
1536 };
1537 vec.extend_from_slice(s).ok();
1538 }
1539
1540 Self {
1541 vec,
1542 next: LenT::ZERO,
1543 }
1544 }
1545}
1546
1547impl<T, LenT: LenType, const N: usize> core::fmt::Debug for IntoIter<T, N, LenT>
1548where
1549 T: core::fmt::Debug,
1550{
1551 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
1552 let s = if self.next < self.vec.len {
1553 unsafe {
1554 slice::from_raw_parts(
1555 self.vec
1556 .buffer
1557 .buffer
1558 .as_ptr()
1559 .cast::<T>()
1560 .add(self.next.into_usize()),
1561 (self.vec.len - self.next).into_usize(),
1562 )
1563 }
1564 } else {
1565 &[]
1566 };
1567
1568 write!(f, "{s:?}")
1569 }
1570}
1571
1572impl<T, LenT: LenType, const N: usize> Drop for IntoIter<T, N, LenT> {
1573 fn drop(&mut self) {
1574 unsafe {
1575 ptr::drop_in_place(&mut self.vec.as_mut_slice()[self.next.into_usize()..]);
1577 self.vec.len = LenT::ZERO;
1579 }
1580 }
1581}
1582
1583impl<T, LenT: LenType, const N: usize> IntoIterator for Vec<T, N, LenT> {
1584 type Item = T;
1585 type IntoIter = IntoIter<T, N, LenT>;
1586
1587 fn into_iter(self) -> Self::IntoIter {
1588 IntoIter {
1589 vec: self,
1590 next: LenT::ZERO,
1591 }
1592 }
1593}
1594
1595impl<A, B, LenTA, LenTB, SA, SB> PartialEq<VecInner<B, LenTB, SB>> for VecInner<A, LenTA, SA>
1596where
1597 A: PartialEq<B>,
1598 LenTA: LenType,
1599 LenTB: LenType,
1600 SA: VecStorage<A> + ?Sized,
1601 SB: VecStorage<B> + ?Sized,
1602{
1603 fn eq(&self, other: &VecInner<B, LenTB, SB>) -> bool {
1604 self.as_slice().eq(other.as_slice())
1605 }
1606}
1607
1608impl<A, B, LenTB, const M: usize, SB> PartialEq<VecInner<B, LenTB, SB>> for [A; M]
1609where
1610 A: PartialEq<B>,
1611 LenTB: LenType,
1612 SB: VecStorage<B>,
1613{
1614 fn eq(&self, other: &VecInner<B, LenTB, SB>) -> bool {
1615 self.eq(other.as_slice())
1616 }
1617}
1618
1619impl<A, B, LenTB, SB, const M: usize> PartialEq<VecInner<B, LenTB, SB>> for &[A; M]
1620where
1621 A: PartialEq<B>,
1622 LenTB: LenType,
1623 SB: VecStorage<B>,
1624{
1625 fn eq(&self, other: &VecInner<B, LenTB, SB>) -> bool {
1626 (*self).eq(other)
1627 }
1628}
1629
1630impl<A, B, LenTB, SB> PartialEq<VecInner<B, LenTB, SB>> for [A]
1631where
1632 A: PartialEq<B>,
1633 LenTB: LenType,
1634 SB: VecStorage<B>,
1635{
1636 fn eq(&self, other: &VecInner<B, LenTB, SB>) -> bool {
1637 self.eq(other.as_slice())
1638 }
1639}
1640
1641impl<A, B, LenTB, SB> PartialEq<VecInner<B, LenTB, SB>> for &[A]
1642where
1643 A: PartialEq<B>,
1644 LenTB: LenType,
1645 SB: VecStorage<B>,
1646{
1647 fn eq(&self, other: &VecInner<B, LenTB, SB>) -> bool {
1648 (*self).eq(other)
1649 }
1650}
1651
1652impl<A, B, LenTB: LenType, SB: VecStorage<B>> PartialEq<VecInner<B, LenTB, SB>> for &mut [A]
1653where
1654 A: PartialEq<B>,
1655{
1656 fn eq(&self, other: &VecInner<B, LenTB, SB>) -> bool {
1657 (**self).eq(other)
1658 }
1659}
1660
1661impl<A, B, LenTA: LenType, SA, const N: usize> PartialEq<[B; N]> for VecInner<A, LenTA, SA>
1662where
1663 A: PartialEq<B>,
1664 SA: VecStorage<A> + ?Sized,
1665{
1666 #[inline]
1667 fn eq(&self, other: &[B; N]) -> bool {
1668 self.as_slice().eq(other.as_slice())
1669 }
1670}
1671
1672impl<A, B, LenTA, SA, const N: usize> PartialEq<&[B; N]> for VecInner<A, LenTA, SA>
1673where
1674 A: PartialEq<B>,
1675 LenTA: LenType,
1676 SA: VecStorage<A> + ?Sized,
1677{
1678 #[inline]
1679 fn eq(&self, other: &&[B; N]) -> bool {
1680 self.as_slice().eq(other.as_slice())
1681 }
1682}
1683
1684impl<A, B, LenTA, SA> PartialEq<[B]> for VecInner<A, LenTA, SA>
1685where
1686 A: PartialEq<B>,
1687 LenTA: LenType,
1688 SA: VecStorage<A> + ?Sized,
1689{
1690 #[inline]
1691 fn eq(&self, other: &[B]) -> bool {
1692 self.as_slice().eq(other)
1693 }
1694}
1695
1696impl<A, B, LenTA, SA> PartialEq<&[B]> for VecInner<A, LenTA, SA>
1697where
1698 A: PartialEq<B>,
1699 LenTA: LenType,
1700 SA: VecStorage<A> + ?Sized,
1701{
1702 #[inline]
1703 fn eq(&self, other: &&[B]) -> bool {
1704 self.as_slice().eq(*other)
1705 }
1706}
1707
1708impl<A, B, LenTA, SA> PartialEq<&mut [B]> for VecInner<A, LenTA, SA>
1709where
1710 A: PartialEq<B>,
1711 LenTA: LenType,
1712 SA: VecStorage<A> + ?Sized,
1713{
1714 #[inline]
1715 fn eq(&self, other: &&mut [B]) -> bool {
1716 self.as_slice().eq(*other)
1717 }
1718}
1719
1720impl<T, LenT: LenType, S: VecStorage<T> + ?Sized> Eq for VecInner<T, LenT, S> where T: Eq {}
1722
1723impl<T, LenTA: LenType, LenTB: LenType, SA: VecStorage<T> + ?Sized, SB: VecStorage<T> + ?Sized>
1724 PartialOrd<VecInner<T, LenTA, SA>> for VecInner<T, LenTB, SB>
1725where
1726 T: PartialOrd,
1727{
1728 fn partial_cmp(&self, other: &VecInner<T, LenTA, SA>) -> Option<Ordering> {
1729 self.as_slice().partial_cmp(other.as_slice())
1730 }
1731}
1732
1733impl<T, LenT: LenType, S: VecStorage<T> + ?Sized> Ord for VecInner<T, LenT, S>
1734where
1735 T: Ord,
1736{
1737 #[inline]
1738 fn cmp(&self, other: &Self) -> Ordering {
1739 self.as_slice().cmp(other.as_slice())
1740 }
1741}
1742
1743impl<T, LenT: LenType, S: VecStorage<T> + ?Sized> ops::Deref for VecInner<T, LenT, S> {
1744 type Target = [T];
1745
1746 fn deref(&self) -> &Self::Target {
1747 self.as_slice()
1748 }
1749}
1750
1751impl<T, LenT: LenType, S: VecStorage<T> + ?Sized> ops::DerefMut for VecInner<T, LenT, S> {
1752 fn deref_mut(&mut self) -> &mut Self::Target {
1753 self.as_mut_slice()
1754 }
1755}
1756
1757impl<T, LenT: LenType, S: VecStorage<T> + ?Sized> borrow::Borrow<[T]> for VecInner<T, LenT, S> {
1758 fn borrow(&self) -> &[T] {
1759 self.as_slice()
1760 }
1761}
1762impl<T, LenT: LenType, S: VecStorage<T> + ?Sized> borrow::BorrowMut<[T]> for VecInner<T, LenT, S> {
1763 fn borrow_mut(&mut self) -> &mut [T] {
1764 self.as_mut_slice()
1765 }
1766}
1767
1768impl<T, LenT: LenType, S: VecStorage<T> + ?Sized> AsRef<Self> for VecInner<T, LenT, S> {
1769 #[inline]
1770 fn as_ref(&self) -> &Self {
1771 self
1772 }
1773}
1774
1775impl<T, LenT: LenType, S: VecStorage<T> + ?Sized> AsMut<Self> for VecInner<T, LenT, S> {
1776 #[inline]
1777 fn as_mut(&mut self) -> &mut Self {
1778 self
1779 }
1780}
1781
1782impl<T, LenT: LenType, S: VecStorage<T> + ?Sized> AsRef<[T]> for VecInner<T, LenT, S> {
1783 #[inline]
1784 fn as_ref(&self) -> &[T] {
1785 self
1786 }
1787}
1788
1789impl<T, LenT: LenType, S: VecStorage<T> + ?Sized> AsMut<[T]> for VecInner<T, LenT, S> {
1790 #[inline]
1791 fn as_mut(&mut self) -> &mut [T] {
1792 self
1793 }
1794}
1795
1796impl<T, const N: usize, LenT: LenType> Clone for Vec<T, N, LenT>
1797where
1798 T: Clone,
1799{
1800 fn clone(&self) -> Self {
1801 self.clone()
1802 }
1803}
1804
1805#[cfg(test)]
1806mod tests {
1807 use core::fmt::Write;
1808
1809 use static_assertions::assert_not_impl_any;
1810
1811 use super::{Vec, VecView};
1812
1813 assert_not_impl_any!(Vec<*const (), 4>: Send);
1815
1816 #[test]
1817 fn static_new() {
1818 static mut _V: Vec<i32, 4> = Vec::new();
1819 }
1820
1821 #[test]
1822 fn stack_new() {
1823 let mut _v: Vec<i32, 4> = Vec::new();
1824 }
1825
1826 #[test]
1827 fn is_full_empty() {
1828 let mut v: Vec<i32, 4> = Vec::new();
1829
1830 assert!(v.is_empty());
1831 assert!(!v.is_full());
1832
1833 v.push(1).unwrap();
1834 assert!(!v.is_empty());
1835 assert!(!v.is_full());
1836
1837 v.push(1).unwrap();
1838 assert!(!v.is_empty());
1839 assert!(!v.is_full());
1840
1841 v.push(1).unwrap();
1842 assert!(!v.is_empty());
1843 assert!(!v.is_full());
1844
1845 v.push(1).unwrap();
1846 assert!(!v.is_empty());
1847 assert!(v.is_full());
1848 }
1849
1850 #[test]
1851 fn drop() {
1852 droppable!();
1853
1854 {
1855 let mut v: Vec<Droppable, 2> = Vec::new();
1856 v.push(Droppable::new()).ok().unwrap();
1857 v.push(Droppable::new()).ok().unwrap();
1858 v.pop().unwrap();
1859 }
1860
1861 assert_eq!(Droppable::count(), 0);
1862
1863 {
1864 let mut v: Vec<Droppable, 2> = Vec::new();
1865 v.push(Droppable::new()).ok().unwrap();
1866 v.push(Droppable::new()).ok().unwrap();
1867 }
1868
1869 assert_eq!(Droppable::count(), 0);
1870 }
1871
1872 #[test]
1873 fn drop_vecview() {
1874 droppable!();
1875
1876 {
1877 let v: Vec<Droppable, 2> = Vec::new();
1878 let v: Box<Vec<Droppable, 2>> = Box::new(v);
1879 let mut v: Box<VecView<Droppable>> = v;
1880 v.push(Droppable::new()).ok().unwrap();
1881 v.push(Droppable::new()).ok().unwrap();
1882 assert_eq!(Droppable::count(), 2);
1883 v.pop().unwrap();
1884 assert_eq!(Droppable::count(), 1);
1885 }
1886
1887 assert_eq!(Droppable::count(), 0);
1888
1889 {
1890 let v: Vec<Droppable, 2> = Vec::new();
1891 let v: Box<Vec<Droppable, 2>> = Box::new(v);
1892 let mut v: Box<VecView<Droppable>> = v;
1893 v.push(Droppable::new()).ok().unwrap();
1894 v.push(Droppable::new()).ok().unwrap();
1895 assert_eq!(Droppable::count(), 2);
1896 }
1897
1898 assert_eq!(Droppable::count(), 0);
1899 }
1900
1901 #[test]
1902 fn eq() {
1903 let mut xs: Vec<i32, 4> = Vec::new();
1904 let mut ys: Vec<i32, 8> = Vec::new();
1905
1906 assert_eq!(xs, ys);
1907
1908 xs.push(1).unwrap();
1909 ys.push(1).unwrap();
1910
1911 assert_eq!(xs, ys);
1912 }
1913
1914 #[test]
1915 fn cmp() {
1916 let mut xs: Vec<i32, 4> = Vec::new();
1917 let mut ys: Vec<i32, 4> = Vec::new();
1918
1919 assert_eq!(xs, ys);
1920
1921 xs.push(1).unwrap();
1922 ys.push(2).unwrap();
1923
1924 assert!(xs < ys);
1925 }
1926
1927 #[test]
1928 fn cmp_heterogenous_size() {
1929 let mut xs: Vec<i32, 4> = Vec::new();
1930 let mut ys: Vec<i32, 8> = Vec::new();
1931
1932 assert_eq!(xs, ys);
1933
1934 xs.push(1).unwrap();
1935 ys.push(2).unwrap();
1936
1937 assert!(xs < ys);
1938 }
1939
1940 #[test]
1941 fn cmp_with_arrays_and_slices() {
1942 let mut xs: Vec<i32, 12> = Vec::new();
1943 xs.push(1).unwrap();
1944
1945 let array = [1];
1946
1947 assert_eq!(xs, array);
1948 assert_eq!(array, xs);
1949
1950 assert_eq!(xs, array.as_slice());
1951 assert_eq!(array.as_slice(), xs);
1952
1953 assert_eq!(xs, &array);
1954 assert_eq!(&array, xs);
1955
1956 let longer_array = [1; 20];
1957
1958 assert_ne!(xs, longer_array);
1959 assert_ne!(longer_array, xs);
1960 }
1961
1962 #[test]
1963 fn full() {
1964 let mut v: Vec<i32, 4> = Vec::new();
1965
1966 v.push(0).unwrap();
1967 v.push(1).unwrap();
1968 v.push(2).unwrap();
1969 v.push(3).unwrap();
1970
1971 assert!(v.push(4).is_err());
1972 }
1973
1974 #[test]
1975 fn iter() {
1976 let mut v: Vec<i32, 4> = Vec::new();
1977
1978 v.push(0).unwrap();
1979 v.push(1).unwrap();
1980 v.push(2).unwrap();
1981 v.push(3).unwrap();
1982
1983 let mut items = v.iter();
1984
1985 assert_eq!(items.next(), Some(&0));
1986 assert_eq!(items.next(), Some(&1));
1987 assert_eq!(items.next(), Some(&2));
1988 assert_eq!(items.next(), Some(&3));
1989 assert_eq!(items.next(), None);
1990 }
1991
1992 #[test]
1993 fn iter_mut() {
1994 let mut v: Vec<i32, 4> = Vec::new();
1995
1996 v.push(0).unwrap();
1997 v.push(1).unwrap();
1998 v.push(2).unwrap();
1999 v.push(3).unwrap();
2000
2001 let mut items = v.iter_mut();
2002
2003 assert_eq!(items.next(), Some(&mut 0));
2004 assert_eq!(items.next(), Some(&mut 1));
2005 assert_eq!(items.next(), Some(&mut 2));
2006 assert_eq!(items.next(), Some(&mut 3));
2007 assert_eq!(items.next(), None);
2008 }
2009
2010 #[test]
2011 fn collect_from_iter() {
2012 let slice = &[1, 2, 3];
2013 let vec: Vec<i32, 4> = slice.iter().cloned().collect();
2014 assert_eq!(&vec, slice);
2015 }
2016
2017 #[test]
2018 #[should_panic]
2019 fn collect_from_iter_overfull() {
2020 let slice = &[1, 2, 3];
2021 let _vec = slice.iter().cloned().collect::<Vec<_, 2>>();
2022 }
2023
2024 #[test]
2025 fn iter_move() {
2026 let mut v: Vec<i32, 4> = Vec::new();
2027 v.push(0).unwrap();
2028 v.push(1).unwrap();
2029 v.push(2).unwrap();
2030 v.push(3).unwrap();
2031
2032 let mut items = v.into_iter();
2033
2034 assert_eq!(items.next(), Some(0));
2035 assert_eq!(items.next(), Some(1));
2036 assert_eq!(items.next(), Some(2));
2037 assert_eq!(items.next(), Some(3));
2038 assert_eq!(items.next(), None);
2039 }
2040
2041 #[test]
2042 fn iter_move_drop() {
2043 droppable!();
2044
2045 {
2046 let mut vec: Vec<Droppable, 2> = Vec::new();
2047 vec.push(Droppable::new()).ok().unwrap();
2048 vec.push(Droppable::new()).ok().unwrap();
2049 let mut items = vec.into_iter();
2050 let _ = items.next();
2052 let _ = items.next();
2053 }
2054
2055 assert_eq!(Droppable::count(), 0);
2056
2057 {
2058 let mut vec: Vec<Droppable, 2> = Vec::new();
2059 vec.push(Droppable::new()).ok().unwrap();
2060 vec.push(Droppable::new()).ok().unwrap();
2061 let _items = vec.into_iter();
2062 }
2064
2065 assert_eq!(Droppable::count(), 0);
2066
2067 {
2068 let mut vec: Vec<Droppable, 2> = Vec::new();
2069 vec.push(Droppable::new()).ok().unwrap();
2070 vec.push(Droppable::new()).ok().unwrap();
2071 let mut items = vec.into_iter();
2072 let _ = items.next(); }
2074
2075 assert_eq!(Droppable::count(), 0);
2076 }
2077
2078 #[test]
2079 fn iter_move_next_back() {
2080 let mut vec: Vec<&str, 3> = Vec::new();
2081 vec.push("a").unwrap();
2082 vec.push("b").unwrap();
2083 vec.push("c").unwrap();
2084 let mut items = vec.into_iter();
2085 let _ = items.next(); assert_eq!(items.next_back(), Some("c"));
2087 assert_eq!(items.next_back(), Some("b"));
2088 assert_eq!(items.next_back(), None);
2089 }
2090
2091 #[test]
2092 fn iter_move_len() {
2093 let mut vec: Vec<&str, 2> = Vec::new();
2094 vec.push("a").unwrap();
2095 vec.push("b").unwrap();
2096 let mut items = vec.into_iter();
2097 assert_eq!(items.len(), 2);
2098 let _ = items.next(); assert_eq!(items.len(), 1);
2100 let _ = items.next_back(); assert_eq!(items.len(), 0);
2102 }
2103
2104 #[test]
2105 fn push_and_pop() {
2106 let mut v: Vec<i32, 4> = Vec::new();
2107 assert_eq!(v.len(), 0);
2108
2109 assert_eq!(v.pop(), None);
2110 assert_eq!(v.len(), 0);
2111
2112 v.push(0).unwrap();
2113 assert_eq!(v.len(), 1);
2114
2115 assert_eq!(v.pop(), Some(0));
2116 assert_eq!(v.len(), 0);
2117
2118 assert_eq!(v.pop(), None);
2119 assert_eq!(v.len(), 0);
2120 }
2121
2122 #[test]
2123 fn resize_size_limit() {
2124 let mut v: Vec<u8, 4> = Vec::new();
2125
2126 v.resize(0, 0).unwrap();
2127 v.resize(4, 0).unwrap();
2128 v.resize(5, 0).expect_err("full");
2129 }
2130
2131 #[test]
2132 fn resize_length_cases() {
2133 let mut v: Vec<u8, 4> = Vec::new();
2134
2135 assert_eq!(v.len(), 0);
2136
2137 v.resize(1, 0).unwrap();
2139 assert_eq!(v.len(), 1);
2140
2141 v.resize(3, 0).unwrap();
2143 assert_eq!(v.len(), 3);
2144
2145 v.resize(3, 0).unwrap();
2147 assert_eq!(v.len(), 3);
2148
2149 v.resize(2, 0).unwrap();
2151 assert_eq!(v.len(), 2);
2152
2153 v.resize(0, 0).unwrap();
2155 assert_eq!(v.len(), 0);
2156 }
2157
2158 #[test]
2159 fn resize_contents() {
2160 let mut v: Vec<u8, 4> = Vec::new();
2161
2162 v.resize(1, 17).unwrap();
2164 assert_eq!(v[0], 17);
2165
2166 v.resize(2, 18).unwrap();
2168 assert_eq!(v[0], 17);
2169 assert_eq!(v[1], 18);
2170
2171 v.resize(2, 0).unwrap();
2173 assert_eq!(v[0], 17);
2174 assert_eq!(v[1], 18);
2175
2176 v.resize(1, 0).unwrap();
2178 assert_eq!(v[0], 17);
2179 }
2180
2181 #[test]
2182 fn resize_default() {
2183 let mut v: Vec<u8, 4> = Vec::new();
2184
2185 v.resize_default(1).unwrap();
2188 assert_eq!(v[0], 0);
2189 }
2190
2191 #[test]
2192 fn write() {
2193 let mut v: Vec<u8, 4> = Vec::new();
2194 write!(v, "{:x}", 1234).unwrap();
2195 assert_eq!(&v[..], b"4d2");
2196 }
2197
2198 #[test]
2199 fn extend_from_slice() {
2200 let mut v: Vec<u8, 4> = Vec::new();
2201 assert_eq!(v.len(), 0);
2202 v.extend_from_slice(&[1, 2]).unwrap();
2203 assert_eq!(v.len(), 2);
2204 assert_eq!(v.as_slice(), &[1, 2]);
2205 v.extend_from_slice(&[3]).unwrap();
2206 assert_eq!(v.len(), 3);
2207 assert_eq!(v.as_slice(), &[1, 2, 3]);
2208 assert!(v.extend_from_slice(&[4, 5]).is_err());
2209 assert_eq!(v.len(), 3);
2210 assert_eq!(v.as_slice(), &[1, 2, 3]);
2211 }
2212
2213 #[test]
2214 fn from_slice() {
2215 let v: Vec<u8, 4> = Vec::from_slice(&[1, 2, 3]).unwrap();
2217 assert_eq!(v.len(), 3);
2218 assert_eq!(v.as_slice(), &[1, 2, 3]);
2219
2220 assert!(Vec::<u8, 2>::from_slice(&[1, 2, 3]).is_err());
2222 }
2223
2224 #[test]
2225 fn from_array() {
2226 let v: Vec<u8, 3> = Vec::from_array([1, 2, 3]);
2228 assert_eq!(v, Vec::<u8, 3>::from([1, 2, 3]));
2229 assert_eq!(v.len(), 3);
2230 assert_eq!(v.as_slice(), &[1, 2, 3]);
2231
2232 let v: Vec<u8, 4> = Vec::from_array([1, 2, 3]);
2234 assert_eq!(v, Vec::<u8, 4>::from([1, 2, 3]));
2235 assert_eq!(v.len(), 3);
2236 assert_eq!(v.as_slice(), &[1, 2, 3]);
2237 }
2238
2239 #[test]
2240 fn from_array_no_drop() {
2241 struct Drops(Option<u8>);
2242
2243 impl Drop for Drops {
2244 fn drop(&mut self) {
2245 self.0 = None;
2246 }
2247 }
2248
2249 let v: Vec<Drops, 3> = Vec::from([Drops(Some(1)), Drops(Some(2)), Drops(Some(3))]);
2250
2251 assert_eq!(v[0].0, Some(1));
2252 assert_eq!(v[1].0, Some(2));
2253 assert_eq!(v[2].0, Some(3));
2254 }
2255
2256 #[test]
2257 fn starts_with() {
2258 let v: Vec<_, 8> = Vec::from_slice(b"ab").unwrap();
2259 assert!(v.starts_with(&[]));
2260 assert!(v.starts_with(b""));
2261 assert!(v.starts_with(b"a"));
2262 assert!(v.starts_with(b"ab"));
2263 assert!(!v.starts_with(b"abc"));
2264 assert!(!v.starts_with(b"ba"));
2265 assert!(!v.starts_with(b"b"));
2266 }
2267
2268 #[test]
2269 fn ends_with() {
2270 let v: Vec<_, 8> = Vec::from_slice(b"ab").unwrap();
2271 assert!(v.ends_with(&[]));
2272 assert!(v.ends_with(b""));
2273 assert!(v.ends_with(b"b"));
2274 assert!(v.ends_with(b"ab"));
2275 assert!(!v.ends_with(b"abc"));
2276 assert!(!v.ends_with(b"ba"));
2277 assert!(!v.ends_with(b"a"));
2278 }
2279
2280 #[test]
2281 fn spare_capacity_mut() {
2282 let mut v: Vec<_, 4> = Vec::new();
2283 let uninit = v.spare_capacity_mut();
2284 assert_eq!(uninit.len(), 4);
2285 uninit[0].write(1);
2286 uninit[1].write(2);
2287 uninit[2].write(3);
2288 unsafe { v.set_len(3) };
2289 assert_eq!(v.as_slice(), &[1, 2, 3]);
2290
2291 let uninit = v.spare_capacity_mut();
2292 assert_eq!(uninit.len(), 1);
2293 uninit[0].write(4);
2294 unsafe { v.set_len(4) };
2295 assert_eq!(v.as_slice(), &[1, 2, 3, 4]);
2296
2297 assert!(v.spare_capacity_mut().is_empty());
2298 }
2299
2300 #[test]
2301 #[cfg(feature = "alloc")]
2302 fn heapless_to_alloc() {
2303 let mut hv: Vec<u8, 4> = Vec::new();
2304 hv.push(0).unwrap();
2305 hv.push(1).unwrap();
2306
2307 let av: alloc::vec::Vec<u8> = hv.clone().try_into().unwrap();
2308 assert_eq!(av.as_slice(), hv.as_slice());
2309 }
2310
2311 #[test]
2312 #[cfg(feature = "alloc")]
2313 fn alloc_to_heapless() {
2314 let mut av: alloc::vec::Vec<u8> = alloc::vec::Vec::new();
2315 av.push(0);
2316 av.push(1);
2317
2318 let hv: Vec<u8, 2> = av.clone().try_into().unwrap();
2319 assert_eq!(hv.as_slice(), av.as_slice());
2320
2321 let _: crate::CapacityError =
2322 <alloc::vec::Vec<u8> as TryInto<Vec<u8, 1>>>::try_into(av.clone()).unwrap_err();
2323 }
2324
2325 #[test]
2326 #[cfg(feature = "zeroize")]
2327 fn test_vec_zeroize() {
2328 use zeroize::Zeroize;
2329
2330 let mut v: Vec<u8, 8> = Vec::new();
2331 for i in 0..8 {
2332 v.push(i).unwrap();
2333 }
2334
2335 for i in 0..8 {
2336 assert_eq!(v[i], i as u8);
2337 }
2338
2339 v.truncate(4);
2340 assert_eq!(v.len(), 4);
2341
2342 for i in 0..4 {
2343 assert_eq!(v[i], i as u8);
2344 }
2345
2346 v.zeroize();
2347
2348 assert_eq!(v.len(), 0);
2349
2350 unsafe {
2351 v.set_len(8);
2352 }
2353
2354 for i in 0..8 {
2355 assert_eq!(v[i], 0);
2356 }
2357 }
2358
2359 #[test]
2360 #[cfg(feature = "zeroize")]
2361 fn test_vecview_zeroize() {
2362 use zeroize::Zeroize;
2363
2364 let mut v: Vec<u8, 8> = Vec::new();
2365 for i in 0..8 {
2366 v.push(i).unwrap();
2367 }
2368
2369 let view = v.as_mut_view();
2370
2371 for i in 0..8 {
2372 assert_eq!(view[i], i as u8);
2373 }
2374
2375 view.zeroize();
2376
2377 assert_eq!(view.len(), 0);
2378
2379 unsafe {
2380 view.set_len(8);
2381 }
2382
2383 for i in 0..8 {
2384 assert_eq!(view[i], 0);
2385 }
2386 }
2387
2388 fn _test_variance<'a: 'b, 'b>(x: Vec<&'a (), 42>) -> Vec<&'b (), 42> {
2389 x
2390 }
2391 fn _test_variance_view<'a: 'b, 'b, 'c>(x: &'c VecView<&'a ()>) -> &'c VecView<&'b ()> {
2392 x
2393 }
2394}