heapless/vec/mod.rs
1//! A fixed capacity [`Vec`](https://doc.rust-lang.org/std/vec/struct.Vec.html).
2
3use core::borrow;
4use core::marker::PhantomData;
5use core::{
6 cmp::Ordering,
7 fmt, hash,
8 mem::{self, ManuallyDrop, MaybeUninit},
9 ops::{self, Range, RangeBounds},
10 ptr::{self, NonNull},
11 slice,
12};
13
14use crate::len_type::{check_capacity_fits, LenType};
15use crate::CapacityError;
16
17mod drain;
18
19mod storage {
20 use core::mem::MaybeUninit;
21
22 use crate::{
23 binary_heap::{BinaryHeapInner, BinaryHeapView},
24 deque::{DequeInner, DequeView},
25 len_type::LenType,
26 };
27
28 use super::{VecInner, VecView};
29
30 /// Trait defining how data for a container is stored.
31 ///
32 /// There's two implementations available:
33 ///
34 /// - [`OwnedVecStorage`]: stores the data in an array `[T; N]` whose size is known at compile time.
35 /// - [`ViewVecStorage`]: stores the data in an unsized `[T]`.
36 ///
37 /// This allows [`Vec`] to be generic over either sized or unsized storage. The [`vec`](super)
38 /// module contains a [`VecInner`] struct that's generic on [`VecStorage`],
39 /// and two type aliases for convenience:
40 ///
41 /// - [`Vec<T, N>`](crate::vec::Vec) = `VecInner<T, OwnedStorage<T, N>>`
42 /// - [`VecView<T>`](crate::vec::VecView) = `VecInner<T, ViewStorage<T>>`
43 ///
44 /// `Vec` can be unsized into `VecView`, either by unsizing coercions such as `&mut Vec -> &mut VecView` or
45 /// `Box<Vec> -> Box<VecView>`, or explicitly with [`.as_view()`](crate::vec::Vec::as_view) or [`.as_mut_view()`](crate::vec::Vec::as_mut_view).
46 ///
47 /// This trait is sealed, so you cannot implement it for your own types. You can only use
48 /// the implementations provided by this crate.
49 ///
50 /// [`VecInner`]: super::VecInner
51 /// [`Vec`]: super::Vec
52 /// [`VecView`]: super::VecView
53 #[allow(private_bounds)]
54 pub trait VecStorage<T>: VecSealedStorage<T> {}
55
56 pub trait VecSealedStorage<T> {
57 // part of the sealed trait so that no trait is publicly implemented by `OwnedVecStorage` besides `Storage`
58 fn borrow(&self) -> &[MaybeUninit<T>];
59 fn borrow_mut(&mut self) -> &mut [MaybeUninit<T>];
60
61 fn as_vec_view<LenT: LenType>(this: &VecInner<T, LenT, Self>) -> &VecView<T, LenT>
62 where
63 Self: VecStorage<T>;
64 fn as_vec_view_mut<LenT: LenType>(
65 this: &mut VecInner<T, LenT, Self>,
66 ) -> &mut VecView<T, LenT>
67 where
68 Self: VecStorage<T>;
69
70 fn as_binary_heap_view<K>(this: &BinaryHeapInner<T, K, Self>) -> &BinaryHeapView<T, K>
71 where
72 Self: VecStorage<T>;
73 fn as_binary_heap_view_mut<K>(
74 this: &mut BinaryHeapInner<T, K, Self>,
75 ) -> &mut BinaryHeapView<T, K>
76 where
77 Self: VecStorage<T>;
78
79 fn as_deque_view(this: &DequeInner<T, Self>) -> &DequeView<T>
80 where
81 Self: VecStorage<T>;
82 fn as_deque_view_mut(this: &mut DequeInner<T, Self>) -> &mut DequeView<T>
83 where
84 Self: VecStorage<T>;
85 }
86
87 // One sealed layer of indirection to hide the internal details (The MaybeUninit).
88 pub struct VecStorageInner<T: ?Sized> {
89 pub(crate) buffer: T,
90 }
91
92 /// Implementation of [`VecStorage`] that stores the data in an array `[T; N]` whose size is known at compile time.
93 pub type OwnedVecStorage<T, const N: usize> = VecStorageInner<[MaybeUninit<T>; N]>;
94 /// Implementation of [`VecStorage`] that stores the data in an unsized `[T]`.
95 pub type ViewVecStorage<T> = VecStorageInner<[MaybeUninit<T>]>;
96
97 impl<T, const N: usize> VecSealedStorage<T> for OwnedVecStorage<T, N> {
98 fn borrow(&self) -> &[MaybeUninit<T>] {
99 &self.buffer
100 }
101 fn borrow_mut(&mut self) -> &mut [MaybeUninit<T>] {
102 &mut self.buffer
103 }
104
105 fn as_vec_view<LenT: LenType>(this: &VecInner<T, LenT, Self>) -> &VecView<T, LenT>
106 where
107 Self: VecStorage<T>,
108 {
109 this
110 }
111 fn as_vec_view_mut<LenT: LenType>(
112 this: &mut VecInner<T, LenT, Self>,
113 ) -> &mut VecView<T, LenT>
114 where
115 Self: VecStorage<T>,
116 {
117 this
118 }
119
120 fn as_binary_heap_view<K>(this: &BinaryHeapInner<T, K, Self>) -> &BinaryHeapView<T, K>
121 where
122 Self: VecStorage<T>,
123 {
124 this
125 }
126 fn as_binary_heap_view_mut<K>(
127 this: &mut BinaryHeapInner<T, K, Self>,
128 ) -> &mut BinaryHeapView<T, K>
129 where
130 Self: VecStorage<T>,
131 {
132 this
133 }
134 fn as_deque_view(this: &DequeInner<T, Self>) -> &DequeView<T>
135 where
136 Self: VecStorage<T>,
137 {
138 this
139 }
140 fn as_deque_view_mut(this: &mut DequeInner<T, Self>) -> &mut DequeView<T>
141 where
142 Self: VecStorage<T>,
143 {
144 this
145 }
146 }
147 impl<T, const N: usize> VecStorage<T> for OwnedVecStorage<T, N> {}
148
149 impl<T> VecSealedStorage<T> for ViewVecStorage<T> {
150 fn borrow(&self) -> &[MaybeUninit<T>] {
151 &self.buffer
152 }
153 fn borrow_mut(&mut self) -> &mut [MaybeUninit<T>] {
154 &mut self.buffer
155 }
156
157 fn as_vec_view<LenT: LenType>(this: &VecInner<T, LenT, Self>) -> &VecView<T, LenT>
158 where
159 Self: VecStorage<T>,
160 {
161 this
162 }
163 fn as_vec_view_mut<LenT: LenType>(
164 this: &mut VecInner<T, LenT, Self>,
165 ) -> &mut VecView<T, LenT>
166 where
167 Self: VecStorage<T>,
168 {
169 this
170 }
171
172 fn as_binary_heap_view<K>(this: &BinaryHeapInner<T, K, Self>) -> &BinaryHeapView<T, K>
173 where
174 Self: VecStorage<T>,
175 {
176 this
177 }
178 fn as_binary_heap_view_mut<K>(
179 this: &mut BinaryHeapInner<T, K, Self>,
180 ) -> &mut BinaryHeapView<T, K>
181 where
182 Self: VecStorage<T>,
183 {
184 this
185 }
186 fn as_deque_view(this: &DequeInner<T, Self>) -> &DequeView<T>
187 where
188 Self: VecStorage<T>,
189 {
190 this
191 }
192 fn as_deque_view_mut(this: &mut DequeInner<T, Self>) -> &mut DequeView<T>
193 where
194 Self: VecStorage<T>,
195 {
196 this
197 }
198 }
199 impl<T> VecStorage<T> for ViewVecStorage<T> {}
200}
201pub use storage::{OwnedVecStorage, VecStorage, ViewVecStorage};
202
203pub(crate) use storage::VecStorageInner;
204
205pub use drain::Drain;
206
207/// Base struct for [`Vec`] and [`VecView`], generic over the [`VecStorage`].
208///
209/// In most cases you should use [`Vec`] or [`VecView`] directly. Only use this
210/// struct if you want to write code that's generic over both.
211pub struct VecInner<T, LenT: LenType, S: VecStorage<T> + ?Sized> {
212 phantom: PhantomData<T>,
213 len: LenT,
214 buffer: S,
215}
216
217/// A fixed capacity [`Vec`](https://doc.rust-lang.org/std/vec/struct.Vec.html).
218///
219/// # Examples
220///
221/// ```
222/// use heapless::Vec;
223///
224/// // A vector with a fixed capacity of 8 elements allocated on the stack
225/// let mut vec = Vec::<_, 8>::new();
226/// vec.push(1).unwrap();
227/// vec.push(2).unwrap();
228///
229/// assert_eq!(vec.len(), 2);
230/// assert_eq!(vec[0], 1);
231///
232/// assert_eq!(vec.pop(), Some(2));
233/// assert_eq!(vec.len(), 1);
234///
235/// vec[0] = 7;
236/// assert_eq!(vec[0], 7);
237///
238/// vec.extend([1, 2, 3].iter().cloned());
239///
240/// for x in &vec {
241/// println!("{}", x);
242/// }
243/// assert_eq!(*vec, [7, 1, 2, 3]);
244/// ```
245///
246/// In some cases, the const-generic might be cumbersome. `Vec` can coerce into a [`VecView`] to remove the need for the const-generic:
247///
248/// ```rust
249/// use heapless::{Vec, VecView};
250///
251/// let vec: Vec<u8, 10> = Vec::from_slice(&[1, 2, 3, 4]).unwrap();
252/// let view: &VecView<_, _> = &vec;
253/// ```
254///
255/// For uncommmon capacity values, or in generic scenarios, you may have to provide the `LenT` generic yourself.
256///
257/// This should be the smallest unsigned integer type that your capacity fits in, or `usize` if you don't want to consider this.
258pub type Vec<T, const N: usize, LenT = usize> = VecInner<T, LenT, OwnedVecStorage<T, N>>;
259
260/// A [`Vec`] with dynamic capacity
261///
262/// [`Vec`] coerces to `VecView`. `VecView` is `!Sized`, meaning it can only ever be used by reference.
263///
264/// Unlike [`Vec`], `VecView` does not have an `N` const-generic parameter.
265/// This has the ergonomic advantage of making it possible to use functions without needing to know at
266/// compile-time the size of the buffers used, for example for use in `dyn` traits.
267///
268/// `VecView<T>` is to `Vec<T, N>` what `[T]` is to `[T; N]`.
269///
270/// ```rust
271/// use heapless::{Vec, VecView};
272///
273/// let mut vec: Vec<u8, 10> = Vec::from_slice(&[1, 2, 3, 4]).unwrap();
274/// let view: &VecView<_, _> = &vec;
275/// assert_eq!(view, &[1, 2, 3, 4]);
276///
277/// let mut_view: &mut VecView<_, _> = &mut vec;
278/// mut_view.push(5);
279/// assert_eq!(vec, [1, 2, 3, 4, 5]);
280/// ```
281pub type VecView<T, LenT = usize> = VecInner<T, LenT, ViewVecStorage<T>>;
282
283impl<T, LenT: LenType, const N: usize> Vec<T, N, LenT> {
284 const ELEM: MaybeUninit<T> = MaybeUninit::uninit();
285 const INIT: [MaybeUninit<T>; N] = [Self::ELEM; N]; // important for optimization of `new`
286
287 /// Constructs a new, empty vector with a fixed capacity of `N`
288 ///
289 /// # Examples
290 ///
291 /// ```
292 /// use heapless::Vec;
293 ///
294 /// // allocate the vector on the stack
295 /// let mut x: Vec<u8, 16> = Vec::new();
296 ///
297 /// // allocate the vector in a static variable
298 /// static mut X: Vec<u8, 16> = Vec::new();
299 /// ```
300 pub const fn new() -> Self {
301 const { check_capacity_fits::<LenT, N>() }
302
303 Self {
304 phantom: PhantomData,
305 len: LenT::ZERO,
306 buffer: VecStorageInner { buffer: Self::INIT },
307 }
308 }
309
310 /// Constructs a new vector with a fixed capacity of `N` and fills it
311 /// with the provided slice.
312 ///
313 /// This is equivalent to the following code:
314 ///
315 /// ```
316 /// use heapless::Vec;
317 ///
318 /// let mut v: Vec<u8, 16> = Vec::new();
319 /// v.extend_from_slice(&[1, 2, 3]).unwrap();
320 /// ```
321 pub fn from_slice(other: &[T]) -> Result<Self, CapacityError>
322 where
323 T: Clone,
324 {
325 let mut v = Self::new();
326 v.extend_from_slice(other)?;
327 Ok(v)
328 }
329
330 /// Constructs a new vector with a fixed capacity of `N`, initializing
331 /// it with the provided array.
332 ///
333 /// The length of the provided array, `M` may be equal to _or_ less than
334 /// the capacity of the vector, `N`.
335 ///
336 /// If the length of the provided array is greater than the capacity of the
337 /// vector a compile-time error will be produced.
338 pub fn from_array<const M: usize>(src: [T; M]) -> Self {
339 const {
340 assert!(N >= M);
341 }
342
343 // We've got to copy `src`, but we're functionally moving it. Don't run
344 // any Drop code for T.
345 let src = ManuallyDrop::new(src);
346
347 if N == M {
348 Self {
349 phantom: PhantomData,
350 len: LenT::from_usize(N),
351 // NOTE(unsafe) ManuallyDrop<[T; M]> and [MaybeUninit<T>; N]
352 // have the same layout when N == M.
353 buffer: unsafe { mem::transmute_copy(&src) },
354 }
355 } else {
356 let mut v = Self::new();
357
358 for (src_elem, dst_elem) in src.iter().zip(v.buffer.buffer.iter_mut()) {
359 // NOTE(unsafe) src element is not going to drop as src itself
360 // is wrapped in a ManuallyDrop.
361 dst_elem.write(unsafe { ptr::read(src_elem) });
362 }
363
364 unsafe { v.set_len(M) };
365 v
366 }
367 }
368
369 /// Returns the contents of the vector as an array of length `M` if the length
370 /// of the vector is exactly `M`, otherwise returns `Err(self)`.
371 ///
372 /// # Examples
373 ///
374 /// ```
375 /// use heapless::Vec;
376 /// let buffer: Vec<u8, 42> = Vec::from_slice(&[1, 2, 3, 5, 8]).unwrap();
377 /// let array: [u8; 5] = buffer.into_array().unwrap();
378 /// assert_eq!(array, [1, 2, 3, 5, 8]);
379 /// ```
380 pub fn into_array<const M: usize>(self) -> Result<[T; M], Self> {
381 if self.len() == M {
382 // This is how the unstable `MaybeUninit::array_assume_init` method does it
383 let array = unsafe { (core::ptr::from_ref(&self.buffer).cast::<[T; M]>()).read() };
384
385 // We don't want `self`'s destructor to be called because that would drop all the
386 // items in the array
387 core::mem::forget(self);
388
389 Ok(array)
390 } else {
391 Err(self)
392 }
393 }
394
395 /// Clones a vec into a new vec
396 pub(crate) fn clone(&self) -> Self
397 where
398 T: Clone,
399 {
400 let mut new = Self::new();
401 // avoid `extend_from_slice` as that introduces a runtime check/panicking branch
402 for elem in self {
403 unsafe {
404 new.push_unchecked(elem.clone());
405 }
406 }
407 new
408 }
409
410 /// Casts the `LenT` type to a new type, preserving everything else about the vector.
411 ///
412 /// This can be useful if you need to pass a `Vec<T, N, u8>` into a `Vec<T, N, usize>` for example.
413 ///
414 /// This will check at compile time if the `N` value will fit into `NewLenT`, and error if not.
415 pub fn cast_len_type<NewLenT: LenType>(self) -> Vec<T, N, NewLenT> {
416 const { check_capacity_fits::<NewLenT, N>() }
417 let this = ManuallyDrop::new(self);
418
419 // SAFETY: Pointer argument is derived from a reference, meeting the safety documented invariants.
420 // This also prevents double drops by wrapping `self` in `ManuallyDrop`.
421 Vec {
422 len: NewLenT::from_usize(this.len()),
423 buffer: unsafe { ptr::read(&this.buffer) },
424 phantom: PhantomData,
425 }
426 }
427}
428
429impl<T, LenT: LenType, S: VecStorage<T> + ?Sized> VecInner<T, LenT, S> {
430 /// Removes the specified range from the vector in bulk, returning all
431 /// removed elements as an iterator. If the iterator is dropped before
432 /// being fully consumed, it drops the remaining removed elements.
433 ///
434 /// The returned iterator keeps a mutable borrow on the vector to optimize
435 /// its implementation.
436 ///
437 /// # Panics
438 ///
439 /// Panics if the starting point is greater than the end point or if
440 /// the end point is greater than the length of the vector.
441 ///
442 /// # Leaking
443 ///
444 /// If the returned iterator goes out of scope without being dropped (due to
445 /// [`mem::forget`], for example), the vector may have lost and leaked
446 /// elements arbitrarily, including elements outside the range.
447 ///
448 /// # Examples
449 ///
450 /// ```
451 /// use heapless::Vec;
452 ///
453 /// let mut v = Vec::<_, 8>::from_array([1, 2, 3]);
454 /// let u: Vec<_, 8> = v.drain(1..).collect();
455 /// assert_eq!(v, &[1]);
456 /// assert_eq!(u, &[2, 3]);
457 ///
458 /// // A full range clears the vector, like `clear()` does.
459 /// v.drain(..);
460 /// assert_eq!(v, &[]);
461 /// ```
462 pub fn drain<R>(&mut self, range: R) -> Drain<'_, T, LenT>
463 where
464 R: RangeBounds<usize>,
465 {
466 // Memory Safety
467 //
468 // When the `Drain` is first created, it shortens the length of
469 // the source vector to make sure no uninitialized or moved-from elements
470 // are accessible at all if the `Drain`'s destructor never gets to run.
471 //
472 // `Drain` will `ptr::read` out the values to remove.
473 // When finished, remaining tail of the vec is copied back to cover
474 // the hole, and the vector length is restored to the new length.
475 //
476 let len = self.len();
477 let Range { start, end } = crate::slice::range(range, ..len);
478
479 unsafe {
480 // Set `self.vec` length's to `start`, to be safe in case `Drain` is leaked.
481 self.set_len(start);
482 let vec = NonNull::from(self.as_mut_view());
483 let range_slice = slice::from_raw_parts(vec.as_ref().as_ptr().add(start), end - start);
484 Drain {
485 tail_start: LenT::from_usize(end),
486 tail_len: LenT::from_usize(len - end),
487 iter: range_slice.iter(),
488 vec,
489 }
490 }
491 }
492
493 /// Get a reference to the `Vec`, erasing the `N` const-generic.
494 ///
495 ///
496 /// ```rust
497 /// # use heapless::{Vec, VecView};
498 /// let vec: Vec<u8, 10> = Vec::from_slice(&[1, 2, 3, 4]).unwrap();
499 /// let view: &VecView<u8, _> = vec.as_view();
500 /// ```
501 ///
502 /// It is often preferable to do the same through type coerction, since `Vec<T, N>` implements `Unsize<VecView<T>>`:
503 ///
504 /// ```rust
505 /// # use heapless::{Vec, VecView};
506 /// let vec: Vec<u8, 10> = Vec::from_slice(&[1, 2, 3, 4]).unwrap();
507 /// let view: &VecView<u8, _> = &vec;
508 /// ```
509 #[inline]
510 pub fn as_view(&self) -> &VecView<T, LenT> {
511 S::as_vec_view(self)
512 }
513
514 /// Get a mutable reference to the `Vec`, erasing the `N` const-generic.
515 ///
516 /// ```rust
517 /// # use heapless::{Vec, VecView};
518 /// let mut vec: Vec<u8, 10, u8> = Vec::from_slice(&[1, 2, 3, 4]).unwrap();
519 /// let view: &mut VecView<u8, _> = vec.as_mut_view();
520 /// ```
521 ///
522 /// It is often preferable to do the same through type coerction, since `Vec<T, N>` implements `Unsize<VecView<T>>`:
523 ///
524 /// ```rust
525 /// # use heapless::{Vec, VecView};
526 /// let mut vec: Vec<u8, 10, u8> = Vec::from_slice(&[1, 2, 3, 4]).unwrap();
527 /// let view: &mut VecView<u8, _> = &mut vec;
528 /// ```
529 #[inline]
530 pub fn as_mut_view(&mut self) -> &mut VecView<T, LenT> {
531 S::as_vec_view_mut(self)
532 }
533
534 /// Returns a raw pointer to the vector’s buffer.
535 pub fn as_ptr(&self) -> *const T {
536 self.buffer.borrow().as_ptr().cast::<T>()
537 }
538
539 /// Returns a raw pointer to the vector’s buffer, which may be mutated through.
540 pub fn as_mut_ptr(&mut self) -> *mut T {
541 self.buffer.borrow_mut().as_mut_ptr().cast::<T>()
542 }
543
544 /// Extracts a slice containing the entire vector.
545 ///
546 /// Equivalent to `&s[..]`.
547 ///
548 /// # Examples
549 ///
550 /// ```
551 /// use heapless::Vec;
552 /// let buffer: Vec<u8, 5> = Vec::from_slice(&[1, 2, 3, 5, 8]).unwrap();
553 /// assert_eq!(buffer.as_slice(), &[1, 2, 3, 5, 8]);
554 /// ```
555 pub fn as_slice(&self) -> &[T] {
556 // NOTE(unsafe) avoid bound checks in the slicing operation
557 // &buffer[..self.len]
558 unsafe {
559 slice::from_raw_parts(
560 self.buffer.borrow().as_ptr().cast::<T>(),
561 self.len.into_usize(),
562 )
563 }
564 }
565
566 /// Extracts a mutable slice containing the entire vector.
567 ///
568 /// Equivalent to `&mut s[..]`.
569 ///
570 /// # Examples
571 ///
572 /// ```
573 /// use heapless::Vec;
574 /// let mut buffer: Vec<u8, 5> = Vec::from_slice(&[1, 2, 3, 5, 8]).unwrap();
575 /// let buffer_slice = buffer.as_mut_slice();
576 /// buffer_slice[0] = 9;
577 /// assert_eq!(buffer.as_slice(), &[9, 2, 3, 5, 8]);
578 /// ```
579 pub fn as_mut_slice(&mut self) -> &mut [T] {
580 // NOTE(unsafe) avoid bound checks in the slicing operation
581 // &mut buffer[..self.len]
582 unsafe {
583 slice::from_raw_parts_mut(
584 self.buffer.borrow_mut().as_mut_ptr().cast::<T>(),
585 self.len.into_usize(),
586 )
587 }
588 }
589
590 /// Returns the maximum number of elements the vector can hold.
591 pub fn capacity(&self) -> usize {
592 self.buffer.borrow().len()
593 }
594
595 /// Clears the vector, removing all values.
596 pub fn clear(&mut self) {
597 self.truncate(0);
598 }
599
600 /// Extends the vec from an iterator.
601 ///
602 /// # Panic
603 ///
604 /// Panics if the vec cannot hold all elements of the iterator.
605 pub fn extend<I>(&mut self, iter: I)
606 where
607 I: IntoIterator<Item = T>,
608 {
609 for elem in iter {
610 self.push(elem).ok().unwrap();
611 }
612 }
613
614 /// Clones and appends all elements in a slice to the `Vec`.
615 ///
616 /// Iterates over the slice `other`, clones each element, and then appends
617 /// it to this `Vec`. The `other` vector is traversed in-order.
618 ///
619 /// # Examples
620 ///
621 /// ```
622 /// use heapless::Vec;
623 ///
624 /// let mut vec = Vec::<u8, 8>::new();
625 /// vec.push(1).unwrap();
626 /// vec.extend_from_slice(&[2, 3, 4]).unwrap();
627 /// assert_eq!(*vec, [1, 2, 3, 4]);
628 /// ```
629 pub fn extend_from_slice(&mut self, other: &[T]) -> Result<(), CapacityError>
630 where
631 T: Clone,
632 {
633 pub fn extend_from_slice_inner<T, LenT: LenType>(
634 len: &mut LenT,
635 buf: &mut [MaybeUninit<T>],
636 other: &[T],
637 ) -> Result<(), CapacityError>
638 where
639 T: Clone,
640 {
641 if len.into_usize() + other.len() > buf.len() {
642 // won't fit in the `Vec`; don't modify anything and return an error
643 Err(CapacityError)
644 } else {
645 for elem in other {
646 unsafe {
647 *buf.get_unchecked_mut(len.into_usize()) = MaybeUninit::new(elem.clone());
648 }
649 *len += LenT::one();
650 }
651 Ok(())
652 }
653 }
654
655 extend_from_slice_inner(&mut self.len, self.buffer.borrow_mut(), other)
656 }
657
658 /// Removes the last element from a vector and returns it, or `None` if it's empty
659 pub fn pop(&mut self) -> Option<T> {
660 if self.len == LenT::ZERO {
661 None
662 } else {
663 Some(unsafe { self.pop_unchecked() })
664 }
665 }
666
667 /// Appends an `item` to the back of the collection
668 ///
669 /// Returns back the `item` if the vector is full.
670 pub fn push(&mut self, item: T) -> Result<(), T> {
671 if self.len() < self.capacity() {
672 unsafe { self.push_unchecked(item) }
673 Ok(())
674 } else {
675 Err(item)
676 }
677 }
678
679 /// Removes the last element from a vector and returns it
680 ///
681 /// # Safety
682 ///
683 /// This assumes the vec to have at least one element.
684 pub unsafe fn pop_unchecked(&mut self) -> T {
685 debug_assert!(!self.is_empty());
686
687 self.len -= LenT::one();
688 self.buffer
689 .borrow_mut()
690 .get_unchecked_mut(self.len.into_usize())
691 .as_ptr()
692 .read()
693 }
694
695 /// Appends an `item` to the back of the collection
696 ///
697 /// # Safety
698 ///
699 /// This assumes the vec is not full.
700 pub unsafe fn push_unchecked(&mut self, item: T) {
701 // NOTE(ptr::write) the memory slot that we are about to write to is uninitialized. We
702 // use `ptr::write` to avoid running `T`'s destructor on the uninitialized memory
703 debug_assert!(!self.is_full());
704
705 *self
706 .buffer
707 .borrow_mut()
708 .get_unchecked_mut(self.len.into_usize()) = MaybeUninit::new(item);
709
710 self.len += LenT::one();
711 }
712
713 /// Shortens the vector, keeping the first `len` elements and dropping the rest.
714 pub fn truncate(&mut self, len: usize) {
715 // This is safe because:
716 //
717 // * the slice passed to `drop_in_place` is valid; the `len > self.len`
718 // case avoids creating an invalid slice, and
719 // * the `len` of the vector is shrunk before calling `drop_in_place`,
720 // such that no value will be dropped twice in case `drop_in_place`
721 // were to panic once (if it panics twice, the program aborts).
722 unsafe {
723 // Note: It's intentional that this is `>` and not `>=`.
724 // Changing it to `>=` has negative performance
725 // implications in some cases. See rust-lang/rust#78884 for more.
726 if len > self.len() {
727 return;
728 }
729 let remaining_len = self.len() - len;
730 let s = ptr::slice_from_raw_parts_mut(self.as_mut_ptr().add(len), remaining_len);
731 self.len = LenT::from_usize(len);
732 ptr::drop_in_place(s);
733 }
734 }
735
736 /// Resizes the Vec in-place so that len is equal to `new_len`.
737 ///
738 /// If `new_len` is greater than len, the Vec is extended by the
739 /// difference, with each additional slot filled with value. If
740 /// `new_len` is less than len, the Vec is simply truncated.
741 ///
742 /// See also [`resize_default`](Self::resize_default).
743 pub fn resize(&mut self, new_len: usize, value: T) -> Result<(), CapacityError>
744 where
745 T: Clone,
746 {
747 if new_len > self.capacity() {
748 return Err(CapacityError);
749 }
750
751 if new_len > self.len() {
752 while self.len() < new_len {
753 self.push(value.clone()).ok();
754 }
755 } else {
756 self.truncate(new_len);
757 }
758
759 Ok(())
760 }
761
762 /// Resizes the `Vec` in-place so that `len` is equal to `new_len`.
763 ///
764 /// If `new_len` is greater than `len`, the `Vec` is extended by the
765 /// difference, with each additional slot filled with `Default::default()`.
766 /// If `new_len` is less than `len`, the `Vec` is simply truncated.
767 ///
768 /// See also [`resize`](Self::resize).
769 pub fn resize_default(&mut self, new_len: usize) -> Result<(), CapacityError>
770 where
771 T: Clone + Default,
772 {
773 self.resize(new_len, T::default())
774 }
775
776 /// Forces the length of the vector to `new_len`.
777 ///
778 /// This is a low-level operation that maintains none of the normal
779 /// invariants of the type. Normally changing the length of a vector
780 /// is done using one of the safe operations instead, such as
781 /// [`truncate`], [`resize`], [`extend`], or [`clear`].
782 ///
783 /// [`truncate`]: Self::truncate
784 /// [`resize`]: Self::resize
785 /// [`extend`]: core::iter::Extend
786 /// [`clear`]: Self::clear
787 ///
788 /// # Safety
789 ///
790 /// - `new_len` must be less than or equal to [`capacity()`].
791 /// - The elements at `old_len..new_len` must be initialized.
792 ///
793 /// [`capacity()`]: Self::capacity
794 ///
795 /// # Examples
796 ///
797 /// This method can be useful for situations in which the vector
798 /// is serving as a buffer for other code, particularly over FFI:
799 ///
800 /// ```no_run
801 /// # #![allow(dead_code)]
802 /// use heapless::Vec;
803 ///
804 /// # // This is just a minimal skeleton for the doc example;
805 /// # // don't use this as a starting point for a real library.
806 /// # pub struct StreamWrapper { strm: *mut core::ffi::c_void }
807 /// # const Z_OK: i32 = 0;
808 /// # extern "C" {
809 /// # fn deflateGetDictionary(
810 /// # strm: *mut core::ffi::c_void,
811 /// # dictionary: *mut u8,
812 /// # dictLength: *mut usize,
813 /// # ) -> i32;
814 /// # }
815 /// # impl StreamWrapper {
816 /// pub fn get_dictionary(&self) -> Option<Vec<u8, 32768>> {
817 /// // Per the FFI method's docs, "32768 bytes is always enough".
818 /// let mut dict = Vec::new();
819 /// let mut dict_length = 0;
820 /// // SAFETY: When `deflateGetDictionary` returns `Z_OK`, it holds that:
821 /// // 1. `dict_length` elements were initialized.
822 /// // 2. `dict_length` <= the capacity (32_768)
823 /// // which makes `set_len` safe to call.
824 /// unsafe {
825 /// // Make the FFI call...
826 /// let r = deflateGetDictionary(self.strm, dict.as_mut_ptr(), &mut dict_length);
827 /// if r == Z_OK {
828 /// // ...and update the length to what was initialized.
829 /// dict.set_len(dict_length);
830 /// Some(dict)
831 /// } else {
832 /// None
833 /// }
834 /// }
835 /// }
836 /// # }
837 /// ```
838 ///
839 /// While the following example is sound, there is a memory leak since
840 /// the inner vectors were not freed prior to the `set_len` call:
841 ///
842 /// ```
843 /// use core::iter::FromIterator;
844 /// use heapless::Vec;
845 ///
846 /// let mut vec = Vec::<Vec<u8, 3>, 3>::from_iter(
847 /// [
848 /// Vec::from_iter([1, 0, 0].iter().cloned()),
849 /// Vec::from_iter([0, 1, 0].iter().cloned()),
850 /// Vec::from_iter([0, 0, 1].iter().cloned()),
851 /// ]
852 /// .iter()
853 /// .cloned(),
854 /// );
855 /// // SAFETY:
856 /// // 1. `old_len..0` is empty so no elements need to be initialized.
857 /// // 2. `0 <= capacity` always holds whatever `capacity` is.
858 /// unsafe {
859 /// vec.set_len(0);
860 /// }
861 /// ```
862 ///
863 /// Normally, here, one would use [`clear`] instead to correctly drop
864 /// the contents and thus not leak memory.
865 pub unsafe fn set_len(&mut self, new_len: usize) {
866 debug_assert!(new_len <= self.capacity());
867
868 self.len = LenT::from_usize(new_len);
869 }
870
871 /// Removes an element from the vector and returns it.
872 ///
873 /// The removed element is replaced by the last element of the vector.
874 ///
875 /// This does not preserve ordering, but is *O*(1).
876 ///
877 /// # Panics
878 ///
879 /// Panics if `index` is out of bounds.
880 ///
881 /// # Examples
882 ///
883 /// ```
884 /// use heapless::Vec;
885 ///
886 /// let mut v: Vec<_, 8> = Vec::new();
887 /// v.push("foo").unwrap();
888 /// v.push("bar").unwrap();
889 /// v.push("baz").unwrap();
890 /// v.push("qux").unwrap();
891 ///
892 /// assert_eq!(v.swap_remove(1), "bar");
893 /// assert_eq!(&*v, ["foo", "qux", "baz"]);
894 ///
895 /// assert_eq!(v.swap_remove(0), "foo");
896 /// assert_eq!(&*v, ["baz", "qux"]);
897 /// ```
898 pub fn swap_remove(&mut self, index: usize) -> T {
899 assert!(index < self.len());
900 unsafe { self.swap_remove_unchecked(index) }
901 }
902
903 /// Removes an element from the vector and returns it.
904 ///
905 /// The removed element is replaced by the last element of the vector.
906 ///
907 /// This does not preserve ordering, but is *O*(1).
908 ///
909 /// # Safety
910 ///
911 /// Assumes `index` within bounds.
912 ///
913 /// # Examples
914 ///
915 /// ```
916 /// use heapless::Vec;
917 ///
918 /// let mut v: Vec<_, 8> = Vec::new();
919 /// v.push("foo").unwrap();
920 /// v.push("bar").unwrap();
921 /// v.push("baz").unwrap();
922 /// v.push("qux").unwrap();
923 ///
924 /// assert_eq!(unsafe { v.swap_remove_unchecked(1) }, "bar");
925 /// assert_eq!(&*v, ["foo", "qux", "baz"]);
926 ///
927 /// assert_eq!(unsafe { v.swap_remove_unchecked(0) }, "foo");
928 /// assert_eq!(&*v, ["baz", "qux"]);
929 /// ```
930 pub unsafe fn swap_remove_unchecked(&mut self, index: usize) -> T {
931 let length = self.len();
932 debug_assert!(index < length);
933 let value = ptr::read(self.as_ptr().add(index));
934 let base_ptr = self.as_mut_ptr();
935 ptr::copy(base_ptr.add(length - 1), base_ptr.add(index), 1);
936 self.len -= LenT::one();
937 value
938 }
939
940 /// Returns true if the vec is full
941 pub fn is_full(&self) -> bool {
942 self.len() == self.capacity()
943 }
944
945 /// Returns true if the vec is empty
946 pub fn is_empty(&self) -> bool {
947 self.len == LenT::ZERO
948 }
949
950 /// Returns `true` if `needle` is a prefix of the Vec.
951 ///
952 /// Always returns `true` if `needle` is an empty slice.
953 ///
954 /// # Examples
955 ///
956 /// ```
957 /// use heapless::Vec;
958 ///
959 /// let v: Vec<_, 8> = Vec::from_slice(b"abc").unwrap();
960 /// assert_eq!(v.starts_with(b""), true);
961 /// assert_eq!(v.starts_with(b"ab"), true);
962 /// assert_eq!(v.starts_with(b"bc"), false);
963 /// ```
964 pub fn starts_with(&self, needle: &[T]) -> bool
965 where
966 T: PartialEq,
967 {
968 let n = needle.len();
969 self.len() >= n && needle == &self[..n]
970 }
971
972 /// Returns `true` if `needle` is a suffix of the Vec.
973 ///
974 /// Always returns `true` if `needle` is an empty slice.
975 ///
976 /// # Examples
977 ///
978 /// ```
979 /// use heapless::Vec;
980 ///
981 /// let v: Vec<_, 8> = Vec::from_slice(b"abc").unwrap();
982 /// assert_eq!(v.ends_with(b""), true);
983 /// assert_eq!(v.ends_with(b"ab"), false);
984 /// assert_eq!(v.ends_with(b"bc"), true);
985 /// ```
986 pub fn ends_with(&self, needle: &[T]) -> bool
987 where
988 T: PartialEq,
989 {
990 let (v, n) = (self.len(), needle.len());
991 v >= n && needle == &self[v - n..]
992 }
993
994 /// Inserts an element at position `index` within the vector, shifting all
995 /// elements after it to the right.
996 ///
997 /// Returns back the `element` if the vector is full.
998 ///
999 /// # Panics
1000 ///
1001 /// Panics if `index > len`.
1002 ///
1003 /// # Examples
1004 ///
1005 /// ```
1006 /// use heapless::Vec;
1007 ///
1008 /// let mut vec: Vec<_, 8> = Vec::from_slice(&[1, 2, 3]).unwrap();
1009 /// vec.insert(1, 4);
1010 /// assert_eq!(vec, [1, 4, 2, 3]);
1011 /// vec.insert(4, 5);
1012 /// assert_eq!(vec, [1, 4, 2, 3, 5]);
1013 /// ```
1014 pub fn insert(&mut self, index: usize, element: T) -> Result<(), T> {
1015 let len = self.len();
1016 if index > len {
1017 panic!("insertion index (is {index}) should be <= len (is {len})");
1018 }
1019
1020 // check there's space for the new element
1021 if self.is_full() {
1022 return Err(element);
1023 }
1024
1025 unsafe {
1026 // infallible
1027 // The spot to put the new value
1028 {
1029 let p = self.as_mut_ptr().add(index);
1030 // Shift everything over to make space. (Duplicating the
1031 // `index`th element into two consecutive places.)
1032 ptr::copy(p, p.offset(1), len - index);
1033 // Write it in, overwriting the first copy of the `index`th
1034 // element.
1035 ptr::write(p, element);
1036 }
1037 self.set_len(len + 1);
1038 }
1039
1040 Ok(())
1041 }
1042
1043 /// Removes and returns the element at position `index` within the vector,
1044 /// shifting all elements after it to the left.
1045 ///
1046 /// Note: Because this shifts over the remaining elements, it has a
1047 /// worst-case performance of *O*(n). If you don't need the order of
1048 /// elements to be preserved, use [`swap_remove`] instead. If you'd like to
1049 /// remove elements from the beginning of the `Vec`, consider using
1050 /// [`Deque::pop_front`] instead.
1051 ///
1052 /// [`swap_remove`]: Vec::swap_remove
1053 /// [`Deque::pop_front`]: crate::Deque::pop_front
1054 ///
1055 /// # Panics
1056 ///
1057 /// Panics if `index` is out of bounds.
1058 ///
1059 /// # Examples
1060 ///
1061 /// ```
1062 /// use heapless::Vec;
1063 ///
1064 /// let mut v: Vec<_, 8> = Vec::from_slice(&[1, 2, 3]).unwrap();
1065 /// assert_eq!(v.remove(1), 2);
1066 /// assert_eq!(v, [1, 3]);
1067 /// ```
1068 pub fn remove(&mut self, index: usize) -> T {
1069 let len = self.len();
1070 if index >= len {
1071 panic!("removal index (is {index}) should be < len (is {len})");
1072 }
1073 unsafe {
1074 // infallible
1075 let ret;
1076 {
1077 // the place we are taking from.
1078 let ptr = self.as_mut_ptr().add(index);
1079 // copy it out, unsafely having a copy of the value on
1080 // the stack and in the vector at the same time.
1081 ret = ptr::read(ptr);
1082
1083 // Shift everything down to fill in that spot.
1084 ptr::copy(ptr.offset(1), ptr, len - index - 1);
1085 }
1086 self.set_len(len - 1);
1087 ret
1088 }
1089 }
1090
1091 /// Retains only the elements specified by the predicate.
1092 ///
1093 /// In other words, remove all elements `e` for which `f(&e)` returns `false`.
1094 /// This method operates in place, visiting each element exactly once in the
1095 /// original order, and preserves the order of the retained elements.
1096 ///
1097 /// # Examples
1098 ///
1099 /// ```
1100 /// use heapless::Vec;
1101 ///
1102 /// let mut vec: Vec<_, 8> = Vec::from_slice(&[1, 2, 3, 4]).unwrap();
1103 /// vec.retain(|&x| x % 2 == 0);
1104 /// assert_eq!(vec, [2, 4]);
1105 /// ```
1106 ///
1107 /// Because the elements are visited exactly once in the original order,
1108 /// external state may be used to decide which elements to keep.
1109 ///
1110 /// ```
1111 /// use heapless::Vec;
1112 ///
1113 /// let mut vec: Vec<_, 8> = Vec::from_slice(&[1, 2, 3, 4, 5]).unwrap();
1114 /// let keep = [false, true, true, false, true];
1115 /// let mut iter = keep.iter();
1116 /// vec.retain(|_| *iter.next().unwrap());
1117 /// assert_eq!(vec, [2, 3, 5]);
1118 /// ```
1119 pub fn retain<F>(&mut self, mut f: F)
1120 where
1121 F: FnMut(&T) -> bool,
1122 {
1123 self.retain_mut(|elem| f(elem));
1124 }
1125
1126 /// Retains only the elements specified by the predicate, passing a mutable reference to it.
1127 ///
1128 /// In other words, remove all elements `e` such that `f(&mut e)` returns `false`.
1129 /// This method operates in place, visiting each element exactly once in the
1130 /// original order, and preserves the order of the retained elements.
1131 ///
1132 /// # Examples
1133 ///
1134 /// ```
1135 /// use heapless::Vec;
1136 ///
1137 /// let mut vec: Vec<_, 8> = Vec::from_slice(&[1, 2, 3, 4]).unwrap();
1138 /// vec.retain_mut(|x| {
1139 /// if *x <= 3 {
1140 /// *x += 1;
1141 /// true
1142 /// } else {
1143 /// false
1144 /// }
1145 /// });
1146 /// assert_eq!(vec, [2, 3, 4]);
1147 /// ```
1148 pub fn retain_mut<F>(&mut self, mut f: F)
1149 where
1150 F: FnMut(&mut T) -> bool,
1151 {
1152 let original_len = self.len;
1153 // Avoid double drop if the drop guard is not executed,
1154 // since we may make some holes during the process.
1155 unsafe { self.set_len(0) };
1156
1157 // Vec: [Kept, Kept, Hole, Hole, Hole, Hole, Unchecked, Unchecked]
1158 // |<- processed len ->| ^- next to check
1159 // |<- deleted cnt ->|
1160 // |<- original_len ->|
1161 // Kept: Elements which predicate returns true on.
1162 // Hole: Moved or dropped element slot.
1163 // Unchecked: Unchecked valid elements.
1164 //
1165 // This drop guard will be invoked when predicate or `drop` of element panicked.
1166 // It shifts unchecked elements to cover holes and `set_len` to the correct length.
1167 // In cases when predicate and `drop` never panick, it will be optimized out.
1168 struct BackshiftOnDrop<'a, T, LenT: LenType, S: VecStorage<T> + ?Sized> {
1169 v: &'a mut VecInner<T, LenT, S>,
1170 processed_len: LenT,
1171 deleted_cnt: LenT,
1172 original_len: LenT,
1173 }
1174
1175 impl<T, LenT: LenType, S: VecStorage<T> + ?Sized> Drop for BackshiftOnDrop<'_, T, LenT, S> {
1176 fn drop(&mut self) {
1177 if self.deleted_cnt > LenT::ZERO {
1178 // SAFETY: Trailing unchecked items must be valid since we never touch them.
1179 unsafe {
1180 ptr::copy(
1181 self.v.as_ptr().add(self.processed_len.into_usize()),
1182 self.v
1183 .as_mut_ptr()
1184 .add((self.processed_len - self.deleted_cnt).into_usize()),
1185 (self.original_len - self.processed_len).into_usize(),
1186 );
1187 }
1188 }
1189 // SAFETY: After filling holes, all items are in contiguous memory.
1190 unsafe {
1191 self.v
1192 .set_len((self.original_len - self.deleted_cnt).into_usize());
1193 }
1194 }
1195 }
1196
1197 let mut g = BackshiftOnDrop {
1198 v: self,
1199 processed_len: LenT::ZERO,
1200 deleted_cnt: LenT::ZERO,
1201 original_len,
1202 };
1203
1204 fn process_loop<F, T, LenT: LenType, S: VecStorage<T> + ?Sized, const DELETED: bool>(
1205 original_len: LenT,
1206 f: &mut F,
1207 g: &mut BackshiftOnDrop<'_, T, LenT, S>,
1208 ) where
1209 F: FnMut(&mut T) -> bool,
1210 {
1211 while g.processed_len != original_len {
1212 let p = g.v.as_mut_ptr();
1213 // SAFETY: Unchecked element must be valid.
1214 let cur = unsafe { &mut *p.add(g.processed_len.into_usize()) };
1215 if !f(cur) {
1216 // Advance early to avoid double drop if `drop_in_place` panicked.
1217 g.processed_len += LenT::one();
1218 g.deleted_cnt += LenT::one();
1219 // SAFETY: We never touch this element again after dropped.
1220 unsafe { ptr::drop_in_place(cur) };
1221 // We already advanced the counter.
1222 if DELETED {
1223 continue;
1224 } else {
1225 break;
1226 }
1227 }
1228 if DELETED {
1229 // SAFETY: `deleted_cnt` > 0, so the hole slot must not overlap with current element.
1230 // We use copy for move, and never touch this element again.
1231 unsafe {
1232 let hole_slot = p.add((g.processed_len - g.deleted_cnt).into_usize());
1233 ptr::copy_nonoverlapping(cur, hole_slot, 1);
1234 }
1235 }
1236 g.processed_len += LenT::one();
1237 }
1238 }
1239
1240 // Stage 1: Nothing was deleted.
1241 process_loop::<F, T, LenT, S, false>(original_len, &mut f, &mut g);
1242
1243 // Stage 2: Some elements were deleted.
1244 process_loop::<F, T, LenT, S, true>(original_len, &mut f, &mut g);
1245
1246 // All item are processed. This can be optimized to `set_len` by LLVM.
1247 drop(g);
1248 }
1249
1250 /// Returns the remaining spare capacity of the vector as a slice of `MaybeUninit<T>`.
1251 ///
1252 /// The returned slice can be used to fill the vector with data before marking the data as
1253 /// initialized using the `set_len` method.
1254 ///
1255 /// # Examples
1256 ///
1257 /// ```
1258 /// use heapless::Vec;
1259 ///
1260 /// // Allocate vector big enough for 10 elements.
1261 /// let mut v: Vec<_, 10> = Vec::new();
1262 ///
1263 /// // Fill in the first 3 elements.
1264 /// let uninit = v.spare_capacity_mut();
1265 /// uninit[0].write(0);
1266 /// uninit[1].write(1);
1267 /// uninit[2].write(2);
1268 ///
1269 /// // Mark the first 3 elements of the vector as being initialized.
1270 /// unsafe {
1271 /// v.set_len(3);
1272 /// }
1273 ///
1274 /// assert_eq!(&v, &[0, 1, 2]);
1275 /// ```
1276 #[inline]
1277 pub fn spare_capacity_mut(&mut self) -> &mut [MaybeUninit<T>] {
1278 &mut self.buffer.borrow_mut()[self.len.into_usize()..]
1279 }
1280}
1281
1282// Trait implementations
1283
1284impl<T, LenT: LenType, const N: usize> Default for Vec<T, N, LenT> {
1285 fn default() -> Self {
1286 Self::new()
1287 }
1288}
1289
1290impl<T, LenT: LenType, S: VecStorage<T> + ?Sized> fmt::Debug for VecInner<T, LenT, S>
1291where
1292 T: fmt::Debug,
1293{
1294 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1295 <[T] as fmt::Debug>::fmt(self, f)
1296 }
1297}
1298
1299impl<LenT: LenType, S: VecStorage<u8> + ?Sized> fmt::Write for VecInner<u8, LenT, S> {
1300 fn write_str(&mut self, s: &str) -> fmt::Result {
1301 match self.extend_from_slice(s.as_bytes()) {
1302 Ok(()) => Ok(()),
1303 Err(_) => Err(fmt::Error),
1304 }
1305 }
1306}
1307
1308impl<T, LenT: LenType, const N: usize, const M: usize> From<[T; M]> for Vec<T, N, LenT> {
1309 fn from(array: [T; M]) -> Self {
1310 Self::from_array(array)
1311 }
1312}
1313
1314impl<T, LenT: LenType, S: VecStorage<T> + ?Sized> Drop for VecInner<T, LenT, S> {
1315 fn drop(&mut self) {
1316 let mut_slice = self.as_mut_slice();
1317 // We drop each element used in the vector by turning into a `&mut [T]`.
1318 // SAFETY: the buffer contains initialized data for the range 0..self.len
1319 unsafe { ptr::drop_in_place(mut_slice) }
1320 }
1321}
1322
1323#[cfg(feature = "alloc")]
1324/// Converts the given `alloc::vec::Vec<T>` into a `Vec<T, N>`.
1325impl<T, LenT: LenType, const N: usize> TryFrom<alloc::vec::Vec<T>> for Vec<T, N, LenT> {
1326 type Error = CapacityError;
1327
1328 /// Converts the given `alloc::vec::Vec<T>` into a `Vec<T, N>`.
1329 ///
1330 /// # Errors
1331 ///
1332 /// Returns `Err` if the length of the `alloc::vec::Vec<T>` is greater than `N`.
1333 fn try_from(alloc_vec: alloc::vec::Vec<T>) -> Result<Self, Self::Error> {
1334 let mut vec = Self::new();
1335
1336 for e in alloc_vec {
1337 // Push each element individually to allow handling capacity errors.
1338 vec.push(e).map_err(|_| CapacityError {})?;
1339 }
1340
1341 Ok(vec)
1342 }
1343}
1344
1345#[cfg(feature = "alloc")]
1346/// Converts the given `Vec<T, N>` into an `alloc::vec::Vec<T>`.
1347impl<T, LenT: LenType, const N: usize> TryFrom<Vec<T, N, LenT>> for alloc::vec::Vec<T> {
1348 type Error = alloc::collections::TryReserveError;
1349
1350 /// Converts the given `Vec<T, N>` into an `alloc::vec::Vec<T>`.
1351 ///
1352 /// # Errors
1353 ///
1354 /// Returns `Err` if the `alloc::vec::Vec` fails to allocate memory.
1355 fn try_from(vec: Vec<T, N, LenT>) -> Result<Self, Self::Error> {
1356 let mut alloc_vec = Self::new();
1357
1358 // Allocate enough space for the elements, return an error if the
1359 // allocation fails.
1360 alloc_vec.try_reserve_exact(vec.len())?;
1361
1362 // Transfer the elements, since we reserved enough space above, this
1363 // should not fail due to OOM.
1364 alloc_vec.extend(vec);
1365
1366 Ok(alloc_vec)
1367 }
1368}
1369
1370impl<'a, T: Clone, LenT: LenType, const N: usize> TryFrom<&'a [T]> for Vec<T, N, LenT> {
1371 type Error = CapacityError;
1372
1373 fn try_from(slice: &'a [T]) -> Result<Self, Self::Error> {
1374 Self::from_slice(slice)
1375 }
1376}
1377
1378impl<T, LenT: LenType, S: VecStorage<T> + ?Sized> Extend<T> for VecInner<T, LenT, S> {
1379 fn extend<I>(&mut self, iter: I)
1380 where
1381 I: IntoIterator<Item = T>,
1382 {
1383 self.extend(iter);
1384 }
1385}
1386
1387impl<'a, T, LenT: LenType, S: VecStorage<T> + ?Sized> Extend<&'a T> for VecInner<T, LenT, S>
1388where
1389 T: 'a + Copy,
1390{
1391 fn extend<I>(&mut self, iter: I)
1392 where
1393 I: IntoIterator<Item = &'a T>,
1394 {
1395 self.extend(iter.into_iter().cloned());
1396 }
1397}
1398
1399impl<T, LenT: LenType, S: VecStorage<T> + ?Sized> hash::Hash for VecInner<T, LenT, S>
1400where
1401 T: core::hash::Hash,
1402{
1403 fn hash<H: hash::Hasher>(&self, state: &mut H) {
1404 <[T] as hash::Hash>::hash(self, state);
1405 }
1406}
1407
1408impl<'a, T, LenT: LenType, S: VecStorage<T> + ?Sized> IntoIterator for &'a VecInner<T, LenT, S> {
1409 type Item = &'a T;
1410 type IntoIter = slice::Iter<'a, T>;
1411
1412 fn into_iter(self) -> Self::IntoIter {
1413 self.iter()
1414 }
1415}
1416
1417impl<'a, T, LenT: LenType, S: VecStorage<T> + ?Sized> IntoIterator
1418 for &'a mut VecInner<T, LenT, S>
1419{
1420 type Item = &'a mut T;
1421 type IntoIter = slice::IterMut<'a, T>;
1422
1423 fn into_iter(self) -> Self::IntoIter {
1424 self.iter_mut()
1425 }
1426}
1427
1428impl<T, LenT: LenType, const N: usize> FromIterator<T> for Vec<T, N, LenT> {
1429 fn from_iter<I>(iter: I) -> Self
1430 where
1431 I: IntoIterator<Item = T>,
1432 {
1433 let mut vec = Self::new();
1434 for i in iter {
1435 vec.push(i).ok().expect("Vec::from_iter overflow");
1436 }
1437 vec
1438 }
1439}
1440
1441/// An iterator that moves out of an [`Vec`][`Vec`].
1442///
1443/// This struct is created by calling the `into_iter` method on [`Vec`][`Vec`].
1444pub struct IntoIter<T, const N: usize, LenT: LenType> {
1445 vec: Vec<T, N, LenT>,
1446 next: LenT,
1447}
1448
1449impl<T, LenT: LenType, const N: usize> Iterator for IntoIter<T, N, LenT> {
1450 type Item = T;
1451 fn next(&mut self) -> Option<Self::Item> {
1452 if self.next < self.vec.len {
1453 let item = unsafe {
1454 self.vec
1455 .buffer
1456 .buffer
1457 .get_unchecked_mut(self.next.into_usize())
1458 .as_ptr()
1459 .read()
1460 };
1461 self.next += LenT::one();
1462 Some(item)
1463 } else {
1464 None
1465 }
1466 }
1467}
1468
1469impl<T, LenT: LenType, const N: usize> Clone for IntoIter<T, N, LenT>
1470where
1471 T: Clone,
1472{
1473 fn clone(&self) -> Self {
1474 let mut vec = Vec::new();
1475
1476 if self.next < self.vec.len {
1477 let s = unsafe {
1478 slice::from_raw_parts(
1479 self.vec
1480 .buffer
1481 .buffer
1482 .as_ptr()
1483 .cast::<T>()
1484 .add(self.next.into_usize()),
1485 (self.vec.len - self.next).into_usize(),
1486 )
1487 };
1488 vec.extend_from_slice(s).ok();
1489 }
1490
1491 Self {
1492 vec,
1493 next: LenT::ZERO,
1494 }
1495 }
1496}
1497
1498impl<T, LenT: LenType, const N: usize> core::fmt::Debug for IntoIter<T, N, LenT>
1499where
1500 T: core::fmt::Debug,
1501{
1502 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
1503 let s = if self.next < self.vec.len {
1504 unsafe {
1505 slice::from_raw_parts(
1506 self.vec
1507 .buffer
1508 .buffer
1509 .as_ptr()
1510 .cast::<T>()
1511 .add(self.next.into_usize()),
1512 (self.vec.len - self.next).into_usize(),
1513 )
1514 }
1515 } else {
1516 &[]
1517 };
1518
1519 write!(f, "{s:?}")
1520 }
1521}
1522
1523impl<T, LenT: LenType, const N: usize> Drop for IntoIter<T, N, LenT> {
1524 fn drop(&mut self) {
1525 unsafe {
1526 // Drop all the elements that have not been moved out of vec
1527 ptr::drop_in_place(&mut self.vec.as_mut_slice()[self.next.into_usize()..]);
1528 // Prevent dropping of other elements
1529 self.vec.len = LenT::ZERO;
1530 }
1531 }
1532}
1533
1534impl<T, LenT: LenType, const N: usize> IntoIterator for Vec<T, N, LenT> {
1535 type Item = T;
1536 type IntoIter = IntoIter<T, N, LenT>;
1537
1538 fn into_iter(self) -> Self::IntoIter {
1539 IntoIter {
1540 vec: self,
1541 next: LenT::ZERO,
1542 }
1543 }
1544}
1545
1546impl<A, B, LenTA, LenTB, SA, SB> PartialEq<VecInner<B, LenTB, SB>> for VecInner<A, LenTA, SA>
1547where
1548 A: PartialEq<B>,
1549 LenTA: LenType,
1550 LenTB: LenType,
1551 SA: VecStorage<A> + ?Sized,
1552 SB: VecStorage<B> + ?Sized,
1553{
1554 fn eq(&self, other: &VecInner<B, LenTB, SB>) -> bool {
1555 self.as_slice().eq(other.as_slice())
1556 }
1557}
1558
1559impl<A, B, LenTB, const M: usize, SB> PartialEq<VecInner<B, LenTB, SB>> for [A; M]
1560where
1561 A: PartialEq<B>,
1562 LenTB: LenType,
1563 SB: VecStorage<B>,
1564{
1565 fn eq(&self, other: &VecInner<B, LenTB, SB>) -> bool {
1566 self.eq(other.as_slice())
1567 }
1568}
1569
1570impl<A, B, LenTB, SB, const M: usize> PartialEq<VecInner<B, LenTB, SB>> for &[A; M]
1571where
1572 A: PartialEq<B>,
1573 LenTB: LenType,
1574 SB: VecStorage<B>,
1575{
1576 fn eq(&self, other: &VecInner<B, LenTB, SB>) -> bool {
1577 (*self).eq(other)
1578 }
1579}
1580
1581impl<A, B, LenTB, SB> PartialEq<VecInner<B, LenTB, SB>> for [A]
1582where
1583 A: PartialEq<B>,
1584 LenTB: LenType,
1585 SB: VecStorage<B>,
1586{
1587 fn eq(&self, other: &VecInner<B, LenTB, SB>) -> bool {
1588 self.eq(other.as_slice())
1589 }
1590}
1591
1592impl<A, B, LenTB, SB> PartialEq<VecInner<B, LenTB, SB>> for &[A]
1593where
1594 A: PartialEq<B>,
1595 LenTB: LenType,
1596 SB: VecStorage<B>,
1597{
1598 fn eq(&self, other: &VecInner<B, LenTB, SB>) -> bool {
1599 (*self).eq(other)
1600 }
1601}
1602
1603impl<A, B, LenTB: LenType, SB: VecStorage<B>> PartialEq<VecInner<B, LenTB, SB>> for &mut [A]
1604where
1605 A: PartialEq<B>,
1606{
1607 fn eq(&self, other: &VecInner<B, LenTB, SB>) -> bool {
1608 (**self).eq(other)
1609 }
1610}
1611
1612impl<A, B, LenTA: LenType, SA, const N: usize> PartialEq<[B; N]> for VecInner<A, LenTA, SA>
1613where
1614 A: PartialEq<B>,
1615 SA: VecStorage<A> + ?Sized,
1616{
1617 #[inline]
1618 fn eq(&self, other: &[B; N]) -> bool {
1619 self.as_slice().eq(other.as_slice())
1620 }
1621}
1622
1623impl<A, B, LenTA, SA, const N: usize> PartialEq<&[B; N]> for VecInner<A, LenTA, SA>
1624where
1625 A: PartialEq<B>,
1626 LenTA: LenType,
1627 SA: VecStorage<A> + ?Sized,
1628{
1629 #[inline]
1630 fn eq(&self, other: &&[B; N]) -> bool {
1631 self.as_slice().eq(other.as_slice())
1632 }
1633}
1634
1635impl<A, B, LenTA, SA> PartialEq<[B]> for VecInner<A, LenTA, SA>
1636where
1637 A: PartialEq<B>,
1638 LenTA: LenType,
1639 SA: VecStorage<A> + ?Sized,
1640{
1641 #[inline]
1642 fn eq(&self, other: &[B]) -> bool {
1643 self.as_slice().eq(other)
1644 }
1645}
1646
1647impl<A, B, LenTA, SA> PartialEq<&[B]> for VecInner<A, LenTA, SA>
1648where
1649 A: PartialEq<B>,
1650 LenTA: LenType,
1651 SA: VecStorage<A> + ?Sized,
1652{
1653 #[inline]
1654 fn eq(&self, other: &&[B]) -> bool {
1655 self.as_slice().eq(*other)
1656 }
1657}
1658
1659impl<A, B, LenTA, SA> PartialEq<&mut [B]> for VecInner<A, LenTA, SA>
1660where
1661 A: PartialEq<B>,
1662 LenTA: LenType,
1663 SA: VecStorage<A> + ?Sized,
1664{
1665 #[inline]
1666 fn eq(&self, other: &&mut [B]) -> bool {
1667 self.as_slice().eq(*other)
1668 }
1669}
1670
1671// Implements Eq if underlying data is Eq
1672impl<T, LenT: LenType, S: VecStorage<T> + ?Sized> Eq for VecInner<T, LenT, S> where T: Eq {}
1673
1674impl<T, LenTA: LenType, LenTB: LenType, SA: VecStorage<T> + ?Sized, SB: VecStorage<T> + ?Sized>
1675 PartialOrd<VecInner<T, LenTA, SA>> for VecInner<T, LenTB, SB>
1676where
1677 T: PartialOrd,
1678{
1679 fn partial_cmp(&self, other: &VecInner<T, LenTA, SA>) -> Option<Ordering> {
1680 self.as_slice().partial_cmp(other.as_slice())
1681 }
1682}
1683
1684impl<T, LenT: LenType, S: VecStorage<T> + ?Sized> Ord for VecInner<T, LenT, S>
1685where
1686 T: Ord,
1687{
1688 #[inline]
1689 fn cmp(&self, other: &Self) -> Ordering {
1690 self.as_slice().cmp(other.as_slice())
1691 }
1692}
1693
1694impl<T, LenT: LenType, S: VecStorage<T> + ?Sized> ops::Deref for VecInner<T, LenT, S> {
1695 type Target = [T];
1696
1697 fn deref(&self) -> &Self::Target {
1698 self.as_slice()
1699 }
1700}
1701
1702impl<T, LenT: LenType, S: VecStorage<T> + ?Sized> ops::DerefMut for VecInner<T, LenT, S> {
1703 fn deref_mut(&mut self) -> &mut Self::Target {
1704 self.as_mut_slice()
1705 }
1706}
1707
1708impl<T, LenT: LenType, S: VecStorage<T> + ?Sized> borrow::Borrow<[T]> for VecInner<T, LenT, S> {
1709 fn borrow(&self) -> &[T] {
1710 self.as_slice()
1711 }
1712}
1713impl<T, LenT: LenType, S: VecStorage<T> + ?Sized> borrow::BorrowMut<[T]> for VecInner<T, LenT, S> {
1714 fn borrow_mut(&mut self) -> &mut [T] {
1715 self.as_mut_slice()
1716 }
1717}
1718
1719impl<T, LenT: LenType, S: VecStorage<T> + ?Sized> AsRef<Self> for VecInner<T, LenT, S> {
1720 #[inline]
1721 fn as_ref(&self) -> &Self {
1722 self
1723 }
1724}
1725
1726impl<T, LenT: LenType, S: VecStorage<T> + ?Sized> AsMut<Self> for VecInner<T, LenT, S> {
1727 #[inline]
1728 fn as_mut(&mut self) -> &mut Self {
1729 self
1730 }
1731}
1732
1733impl<T, LenT: LenType, S: VecStorage<T> + ?Sized> AsRef<[T]> for VecInner<T, LenT, S> {
1734 #[inline]
1735 fn as_ref(&self) -> &[T] {
1736 self
1737 }
1738}
1739
1740impl<T, LenT: LenType, S: VecStorage<T> + ?Sized> AsMut<[T]> for VecInner<T, LenT, S> {
1741 #[inline]
1742 fn as_mut(&mut self) -> &mut [T] {
1743 self
1744 }
1745}
1746
1747impl<T, const N: usize, LenT: LenType> Clone for Vec<T, N, LenT>
1748where
1749 T: Clone,
1750{
1751 fn clone(&self) -> Self {
1752 self.clone()
1753 }
1754}
1755
1756#[cfg(test)]
1757mod tests {
1758 use core::fmt::Write;
1759
1760 use static_assertions::assert_not_impl_any;
1761
1762 use super::{Vec, VecView};
1763
1764 // Ensure a `Vec` containing `!Send` values stays `!Send` itself.
1765 assert_not_impl_any!(Vec<*const (), 4>: Send);
1766
1767 #[test]
1768 fn static_new() {
1769 static mut _V: Vec<i32, 4> = Vec::new();
1770 }
1771
1772 #[test]
1773 fn stack_new() {
1774 let mut _v: Vec<i32, 4> = Vec::new();
1775 }
1776
1777 #[test]
1778 fn is_full_empty() {
1779 let mut v: Vec<i32, 4> = Vec::new();
1780
1781 assert!(v.is_empty());
1782 assert!(!v.is_full());
1783
1784 v.push(1).unwrap();
1785 assert!(!v.is_empty());
1786 assert!(!v.is_full());
1787
1788 v.push(1).unwrap();
1789 assert!(!v.is_empty());
1790 assert!(!v.is_full());
1791
1792 v.push(1).unwrap();
1793 assert!(!v.is_empty());
1794 assert!(!v.is_full());
1795
1796 v.push(1).unwrap();
1797 assert!(!v.is_empty());
1798 assert!(v.is_full());
1799 }
1800
1801 #[test]
1802 fn drop() {
1803 droppable!();
1804
1805 {
1806 let mut v: Vec<Droppable, 2> = Vec::new();
1807 v.push(Droppable::new()).ok().unwrap();
1808 v.push(Droppable::new()).ok().unwrap();
1809 v.pop().unwrap();
1810 }
1811
1812 assert_eq!(Droppable::count(), 0);
1813
1814 {
1815 let mut v: Vec<Droppable, 2> = Vec::new();
1816 v.push(Droppable::new()).ok().unwrap();
1817 v.push(Droppable::new()).ok().unwrap();
1818 }
1819
1820 assert_eq!(Droppable::count(), 0);
1821 }
1822
1823 #[test]
1824 fn drop_vecview() {
1825 droppable!();
1826
1827 {
1828 let v: Vec<Droppable, 2> = Vec::new();
1829 let v: Box<Vec<Droppable, 2>> = Box::new(v);
1830 let mut v: Box<VecView<Droppable>> = v;
1831 v.push(Droppable::new()).ok().unwrap();
1832 v.push(Droppable::new()).ok().unwrap();
1833 assert_eq!(Droppable::count(), 2);
1834 v.pop().unwrap();
1835 assert_eq!(Droppable::count(), 1);
1836 }
1837
1838 assert_eq!(Droppable::count(), 0);
1839
1840 {
1841 let v: Vec<Droppable, 2> = Vec::new();
1842 let v: Box<Vec<Droppable, 2>> = Box::new(v);
1843 let mut v: Box<VecView<Droppable>> = v;
1844 v.push(Droppable::new()).ok().unwrap();
1845 v.push(Droppable::new()).ok().unwrap();
1846 assert_eq!(Droppable::count(), 2);
1847 }
1848
1849 assert_eq!(Droppable::count(), 0);
1850 }
1851
1852 #[test]
1853 fn eq() {
1854 let mut xs: Vec<i32, 4> = Vec::new();
1855 let mut ys: Vec<i32, 8> = Vec::new();
1856
1857 assert_eq!(xs, ys);
1858
1859 xs.push(1).unwrap();
1860 ys.push(1).unwrap();
1861
1862 assert_eq!(xs, ys);
1863 }
1864
1865 #[test]
1866 fn cmp() {
1867 let mut xs: Vec<i32, 4> = Vec::new();
1868 let mut ys: Vec<i32, 4> = Vec::new();
1869
1870 assert_eq!(xs, ys);
1871
1872 xs.push(1).unwrap();
1873 ys.push(2).unwrap();
1874
1875 assert!(xs < ys);
1876 }
1877
1878 #[test]
1879 fn cmp_heterogenous_size() {
1880 let mut xs: Vec<i32, 4> = Vec::new();
1881 let mut ys: Vec<i32, 8> = Vec::new();
1882
1883 assert_eq!(xs, ys);
1884
1885 xs.push(1).unwrap();
1886 ys.push(2).unwrap();
1887
1888 assert!(xs < ys);
1889 }
1890
1891 #[test]
1892 fn cmp_with_arrays_and_slices() {
1893 let mut xs: Vec<i32, 12> = Vec::new();
1894 xs.push(1).unwrap();
1895
1896 let array = [1];
1897
1898 assert_eq!(xs, array);
1899 assert_eq!(array, xs);
1900
1901 assert_eq!(xs, array.as_slice());
1902 assert_eq!(array.as_slice(), xs);
1903
1904 assert_eq!(xs, &array);
1905 assert_eq!(&array, xs);
1906
1907 let longer_array = [1; 20];
1908
1909 assert_ne!(xs, longer_array);
1910 assert_ne!(longer_array, xs);
1911 }
1912
1913 #[test]
1914 fn full() {
1915 let mut v: Vec<i32, 4> = Vec::new();
1916
1917 v.push(0).unwrap();
1918 v.push(1).unwrap();
1919 v.push(2).unwrap();
1920 v.push(3).unwrap();
1921
1922 assert!(v.push(4).is_err());
1923 }
1924
1925 #[test]
1926 fn iter() {
1927 let mut v: Vec<i32, 4> = Vec::new();
1928
1929 v.push(0).unwrap();
1930 v.push(1).unwrap();
1931 v.push(2).unwrap();
1932 v.push(3).unwrap();
1933
1934 let mut items = v.iter();
1935
1936 assert_eq!(items.next(), Some(&0));
1937 assert_eq!(items.next(), Some(&1));
1938 assert_eq!(items.next(), Some(&2));
1939 assert_eq!(items.next(), Some(&3));
1940 assert_eq!(items.next(), None);
1941 }
1942
1943 #[test]
1944 fn iter_mut() {
1945 let mut v: Vec<i32, 4> = Vec::new();
1946
1947 v.push(0).unwrap();
1948 v.push(1).unwrap();
1949 v.push(2).unwrap();
1950 v.push(3).unwrap();
1951
1952 let mut items = v.iter_mut();
1953
1954 assert_eq!(items.next(), Some(&mut 0));
1955 assert_eq!(items.next(), Some(&mut 1));
1956 assert_eq!(items.next(), Some(&mut 2));
1957 assert_eq!(items.next(), Some(&mut 3));
1958 assert_eq!(items.next(), None);
1959 }
1960
1961 #[test]
1962 fn collect_from_iter() {
1963 let slice = &[1, 2, 3];
1964 let vec: Vec<i32, 4> = slice.iter().cloned().collect();
1965 assert_eq!(&vec, slice);
1966 }
1967
1968 #[test]
1969 #[should_panic]
1970 fn collect_from_iter_overfull() {
1971 let slice = &[1, 2, 3];
1972 let _vec = slice.iter().cloned().collect::<Vec<_, 2>>();
1973 }
1974
1975 #[test]
1976 fn iter_move() {
1977 let mut v: Vec<i32, 4> = Vec::new();
1978 v.push(0).unwrap();
1979 v.push(1).unwrap();
1980 v.push(2).unwrap();
1981 v.push(3).unwrap();
1982
1983 let mut items = v.into_iter();
1984
1985 assert_eq!(items.next(), Some(0));
1986 assert_eq!(items.next(), Some(1));
1987 assert_eq!(items.next(), Some(2));
1988 assert_eq!(items.next(), Some(3));
1989 assert_eq!(items.next(), None);
1990 }
1991
1992 #[test]
1993 fn iter_move_drop() {
1994 droppable!();
1995
1996 {
1997 let mut vec: Vec<Droppable, 2> = Vec::new();
1998 vec.push(Droppable::new()).ok().unwrap();
1999 vec.push(Droppable::new()).ok().unwrap();
2000 let mut items = vec.into_iter();
2001 // Move all
2002 let _ = items.next();
2003 let _ = items.next();
2004 }
2005
2006 assert_eq!(Droppable::count(), 0);
2007
2008 {
2009 let mut vec: Vec<Droppable, 2> = Vec::new();
2010 vec.push(Droppable::new()).ok().unwrap();
2011 vec.push(Droppable::new()).ok().unwrap();
2012 let _items = vec.into_iter();
2013 // Move none
2014 }
2015
2016 assert_eq!(Droppable::count(), 0);
2017
2018 {
2019 let mut vec: Vec<Droppable, 2> = Vec::new();
2020 vec.push(Droppable::new()).ok().unwrap();
2021 vec.push(Droppable::new()).ok().unwrap();
2022 let mut items = vec.into_iter();
2023 let _ = items.next(); // Move partly
2024 }
2025
2026 assert_eq!(Droppable::count(), 0);
2027 }
2028
2029 #[test]
2030 fn push_and_pop() {
2031 let mut v: Vec<i32, 4> = Vec::new();
2032 assert_eq!(v.len(), 0);
2033
2034 assert_eq!(v.pop(), None);
2035 assert_eq!(v.len(), 0);
2036
2037 v.push(0).unwrap();
2038 assert_eq!(v.len(), 1);
2039
2040 assert_eq!(v.pop(), Some(0));
2041 assert_eq!(v.len(), 0);
2042
2043 assert_eq!(v.pop(), None);
2044 assert_eq!(v.len(), 0);
2045 }
2046
2047 #[test]
2048 fn resize_size_limit() {
2049 let mut v: Vec<u8, 4> = Vec::new();
2050
2051 v.resize(0, 0).unwrap();
2052 v.resize(4, 0).unwrap();
2053 v.resize(5, 0).expect_err("full");
2054 }
2055
2056 #[test]
2057 fn resize_length_cases() {
2058 let mut v: Vec<u8, 4> = Vec::new();
2059
2060 assert_eq!(v.len(), 0);
2061
2062 // Grow by 1
2063 v.resize(1, 0).unwrap();
2064 assert_eq!(v.len(), 1);
2065
2066 // Grow by 2
2067 v.resize(3, 0).unwrap();
2068 assert_eq!(v.len(), 3);
2069
2070 // Resize to current size
2071 v.resize(3, 0).unwrap();
2072 assert_eq!(v.len(), 3);
2073
2074 // Shrink by 1
2075 v.resize(2, 0).unwrap();
2076 assert_eq!(v.len(), 2);
2077
2078 // Shrink by 2
2079 v.resize(0, 0).unwrap();
2080 assert_eq!(v.len(), 0);
2081 }
2082
2083 #[test]
2084 fn resize_contents() {
2085 let mut v: Vec<u8, 4> = Vec::new();
2086
2087 // New entries take supplied value when growing
2088 v.resize(1, 17).unwrap();
2089 assert_eq!(v[0], 17);
2090
2091 // Old values aren't changed when growing
2092 v.resize(2, 18).unwrap();
2093 assert_eq!(v[0], 17);
2094 assert_eq!(v[1], 18);
2095
2096 // Old values aren't changed when length unchanged
2097 v.resize(2, 0).unwrap();
2098 assert_eq!(v[0], 17);
2099 assert_eq!(v[1], 18);
2100
2101 // Old values aren't changed when shrinking
2102 v.resize(1, 0).unwrap();
2103 assert_eq!(v[0], 17);
2104 }
2105
2106 #[test]
2107 fn resize_default() {
2108 let mut v: Vec<u8, 4> = Vec::new();
2109
2110 // resize_default is implemented using resize, so just check the
2111 // correct value is being written.
2112 v.resize_default(1).unwrap();
2113 assert_eq!(v[0], 0);
2114 }
2115
2116 #[test]
2117 fn write() {
2118 let mut v: Vec<u8, 4> = Vec::new();
2119 write!(v, "{:x}", 1234).unwrap();
2120 assert_eq!(&v[..], b"4d2");
2121 }
2122
2123 #[test]
2124 fn extend_from_slice() {
2125 let mut v: Vec<u8, 4> = Vec::new();
2126 assert_eq!(v.len(), 0);
2127 v.extend_from_slice(&[1, 2]).unwrap();
2128 assert_eq!(v.len(), 2);
2129 assert_eq!(v.as_slice(), &[1, 2]);
2130 v.extend_from_slice(&[3]).unwrap();
2131 assert_eq!(v.len(), 3);
2132 assert_eq!(v.as_slice(), &[1, 2, 3]);
2133 assert!(v.extend_from_slice(&[4, 5]).is_err());
2134 assert_eq!(v.len(), 3);
2135 assert_eq!(v.as_slice(), &[1, 2, 3]);
2136 }
2137
2138 #[test]
2139 fn from_slice() {
2140 // Successful construction
2141 let v: Vec<u8, 4> = Vec::from_slice(&[1, 2, 3]).unwrap();
2142 assert_eq!(v.len(), 3);
2143 assert_eq!(v.as_slice(), &[1, 2, 3]);
2144
2145 // Slice too large
2146 assert!(Vec::<u8, 2>::from_slice(&[1, 2, 3]).is_err());
2147 }
2148
2149 #[test]
2150 fn from_array() {
2151 // Successful construction, N == M
2152 let v: Vec<u8, 3> = Vec::from_array([1, 2, 3]);
2153 assert_eq!(v, Vec::<u8, 3>::from([1, 2, 3]));
2154 assert_eq!(v.len(), 3);
2155 assert_eq!(v.as_slice(), &[1, 2, 3]);
2156
2157 // Successful construction, N > M
2158 let v: Vec<u8, 4> = Vec::from_array([1, 2, 3]);
2159 assert_eq!(v, Vec::<u8, 4>::from([1, 2, 3]));
2160 assert_eq!(v.len(), 3);
2161 assert_eq!(v.as_slice(), &[1, 2, 3]);
2162 }
2163
2164 #[test]
2165 fn from_array_no_drop() {
2166 struct Drops(Option<u8>);
2167
2168 impl Drop for Drops {
2169 fn drop(&mut self) {
2170 self.0 = None;
2171 }
2172 }
2173
2174 let v: Vec<Drops, 3> = Vec::from([Drops(Some(1)), Drops(Some(2)), Drops(Some(3))]);
2175
2176 assert_eq!(v[0].0, Some(1));
2177 assert_eq!(v[1].0, Some(2));
2178 assert_eq!(v[2].0, Some(3));
2179 }
2180
2181 #[test]
2182 fn starts_with() {
2183 let v: Vec<_, 8> = Vec::from_slice(b"ab").unwrap();
2184 assert!(v.starts_with(&[]));
2185 assert!(v.starts_with(b""));
2186 assert!(v.starts_with(b"a"));
2187 assert!(v.starts_with(b"ab"));
2188 assert!(!v.starts_with(b"abc"));
2189 assert!(!v.starts_with(b"ba"));
2190 assert!(!v.starts_with(b"b"));
2191 }
2192
2193 #[test]
2194 fn ends_with() {
2195 let v: Vec<_, 8> = Vec::from_slice(b"ab").unwrap();
2196 assert!(v.ends_with(&[]));
2197 assert!(v.ends_with(b""));
2198 assert!(v.ends_with(b"b"));
2199 assert!(v.ends_with(b"ab"));
2200 assert!(!v.ends_with(b"abc"));
2201 assert!(!v.ends_with(b"ba"));
2202 assert!(!v.ends_with(b"a"));
2203 }
2204
2205 #[test]
2206 fn spare_capacity_mut() {
2207 let mut v: Vec<_, 4> = Vec::new();
2208 let uninit = v.spare_capacity_mut();
2209 assert_eq!(uninit.len(), 4);
2210 uninit[0].write(1);
2211 uninit[1].write(2);
2212 uninit[2].write(3);
2213 unsafe { v.set_len(3) };
2214 assert_eq!(v.as_slice(), &[1, 2, 3]);
2215
2216 let uninit = v.spare_capacity_mut();
2217 assert_eq!(uninit.len(), 1);
2218 uninit[0].write(4);
2219 unsafe { v.set_len(4) };
2220 assert_eq!(v.as_slice(), &[1, 2, 3, 4]);
2221
2222 assert!(v.spare_capacity_mut().is_empty());
2223 }
2224
2225 #[test]
2226 #[cfg(feature = "alloc")]
2227 fn heapless_to_alloc() {
2228 let mut hv: Vec<u8, 4> = Vec::new();
2229 hv.push(0).unwrap();
2230 hv.push(1).unwrap();
2231
2232 let av: alloc::vec::Vec<u8> = hv.clone().try_into().unwrap();
2233 assert_eq!(av.as_slice(), hv.as_slice());
2234 }
2235
2236 #[test]
2237 #[cfg(feature = "alloc")]
2238 fn alloc_to_heapless() {
2239 let mut av: alloc::vec::Vec<u8> = alloc::vec::Vec::new();
2240 av.push(0);
2241 av.push(1);
2242
2243 let hv: Vec<u8, 2> = av.clone().try_into().unwrap();
2244 assert_eq!(hv.as_slice(), av.as_slice());
2245
2246 let _: crate::CapacityError =
2247 <alloc::vec::Vec<u8> as TryInto<Vec<u8, 1>>>::try_into(av.clone()).unwrap_err();
2248 }
2249
2250 fn _test_variance<'a: 'b, 'b>(x: Vec<&'a (), 42>) -> Vec<&'b (), 42> {
2251 x
2252 }
2253 fn _test_variance_view<'a: 'b, 'b, 'c>(x: &'c VecView<&'a ()>) -> &'c VecView<&'b ()> {
2254 x
2255 }
2256}