allocator_api2/vec/
into_iter.rs1use core::fmt;
2use core::iter::FusedIterator;
3use core::marker::PhantomData;
4use core::mem::{self, size_of, ManuallyDrop};
5
6use core::ptr::{self, NonNull};
7use core::slice::{self};
8
9use crate::addr;
10
11use super::{Allocator, Global, RawVec};
12
13#[cfg(not(no_global_oom_handling))]
14use super::Vec;
15
16pub struct IntoIter<T, A: Allocator = Global> {
30 pub(super) buf: NonNull<T>,
31 pub(super) phantom: PhantomData<T>,
32 pub(super) cap: usize,
33 pub(super) alloc: ManuallyDrop<A>,
36 pub(super) ptr: *const T,
37 pub(super) end: *const T,
38}
39
40impl<T: fmt::Debug, A: Allocator> fmt::Debug for IntoIter<T, A> {
41 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
42 f.debug_tuple("IntoIter").field(&self.as_slice()).finish()
43 }
44}
45
46impl<T, A: Allocator> IntoIter<T, A> {
47 pub fn as_slice(&self) -> &[T] {
59 unsafe { slice::from_raw_parts(self.ptr, self.len()) }
60 }
61
62 pub fn as_mut_slice(&mut self) -> &mut [T] {
76 unsafe { &mut *self.as_raw_mut_slice() }
77 }
78
79 #[inline(always)]
81 pub fn allocator(&self) -> &A {
82 &self.alloc
83 }
84
85 fn as_raw_mut_slice(&mut self) -> *mut [T] {
86 ptr::slice_from_raw_parts_mut(self.ptr as *mut T, self.len())
87 }
88}
89
90impl<T, A: Allocator> AsRef<[T]> for IntoIter<T, A> {
91 fn as_ref(&self) -> &[T] {
92 self.as_slice()
93 }
94}
95
96unsafe impl<T: Send, A: Allocator + Send> Send for IntoIter<T, A> {}
97
98unsafe impl<T: Sync, A: Allocator + Sync> Sync for IntoIter<T, A> {}
99
100impl<T, A: Allocator> Iterator for IntoIter<T, A> {
101 type Item = T;
102
103 #[inline(always)]
104 fn next(&mut self) -> Option<T> {
105 if self.ptr == self.end {
106 None
107 } else if size_of::<T>() == 0 {
108 self.ptr = self.ptr.cast::<u8>().wrapping_add(1).cast();
112
113 Some(unsafe { mem::zeroed() })
115 } else {
116 let old = self.ptr;
117 self.ptr = unsafe { self.ptr.add(1) };
118
119 Some(unsafe { ptr::read(old) })
120 }
121 }
122
123 #[inline(always)]
124 fn size_hint(&self) -> (usize, Option<usize>) {
125 let exact = if size_of::<T>() == 0 {
126 addr(self.end).wrapping_sub(addr(self.ptr))
127 } else {
128 unsafe { self.end.offset_from(self.ptr) as usize }
129 };
130 (exact, Some(exact))
131 }
132
133 #[inline(always)]
134 fn count(self) -> usize {
135 self.len()
136 }
137}
138
139impl<T, A: Allocator> DoubleEndedIterator for IntoIter<T, A> {
140 #[inline(always)]
141 fn next_back(&mut self) -> Option<T> {
142 if self.end == self.ptr {
143 None
144 } else if size_of::<T>() == 0 {
145 self.end = self.end.cast::<u8>().wrapping_add(1).cast();
147
148 Some(unsafe { mem::zeroed() })
150 } else {
151 self.end = unsafe { self.end.sub(1) };
152
153 Some(unsafe { ptr::read(self.end) })
154 }
155 }
156}
157
158impl<T, A: Allocator> ExactSizeIterator for IntoIter<T, A> {}
159
160impl<T, A: Allocator> FusedIterator for IntoIter<T, A> {}
161
162#[cfg(not(no_global_oom_handling))]
163impl<T: Clone, A: Allocator + Clone> Clone for IntoIter<T, A> {
164 fn clone(&self) -> Self {
165 let mut vec = Vec::<T, A>::with_capacity_in(self.len(), (*self.alloc).clone());
166 vec.extend(self.as_slice().iter().cloned());
167 vec.into_iter()
168 }
169}
170
171impl<T, A: Allocator> Drop for IntoIter<T, A> {
172 fn drop(&mut self) {
173 struct DropGuard<'a, T, A: Allocator>(&'a mut IntoIter<T, A>);
174
175 impl<T, A: Allocator> Drop for DropGuard<'_, T, A> {
176 fn drop(&mut self) {
177 unsafe {
178 let alloc = ManuallyDrop::take(&mut self.0.alloc);
180 let _ = RawVec::from_raw_parts_in(self.0.buf.as_ptr(), self.0.cap, alloc);
182 }
183 }
184 }
185
186 let guard = DropGuard(self);
187 unsafe {
189 ptr::drop_in_place(guard.0.as_raw_mut_slice());
190 }
191 }
193}