zerocopy/wrappers.rs
1// Copyright 2023 The Fuchsia Authors
2//
3// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0
4// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT
5// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option.
6// This file may not be copied, modified, or distributed except according to
7// those terms.
8
9use core::{fmt, hash::Hash};
10
11use super::*;
12
13/// A type with no alignment requirement.
14///
15/// An `Unalign` wraps a `T`, removing any alignment requirement. `Unalign<T>`
16/// has the same size and bit validity as `T`, but not necessarily the same
17/// alignment [or ABI]. This is useful if a type with an alignment requirement
18/// needs to be read from a chunk of memory which provides no alignment
19/// guarantees.
20///
21/// Since `Unalign` has no alignment requirement, the inner `T` may not be
22/// properly aligned in memory. There are five ways to access the inner `T`:
23/// - by value, using [`get`] or [`into_inner`]
24/// - by reference inside of a callback, using [`update`]
25/// - fallibly by reference, using [`try_deref`] or [`try_deref_mut`]; these can
26/// fail if the `Unalign` does not satisfy `T`'s alignment requirement at
27/// runtime
28/// - unsafely by reference, using [`deref_unchecked`] or
29/// [`deref_mut_unchecked`]; it is the caller's responsibility to ensure that
30/// the `Unalign` satisfies `T`'s alignment requirement
31/// - (where `T: Unaligned`) infallibly by reference, using [`Deref::deref`] or
32/// [`DerefMut::deref_mut`]
33///
34/// [or ABI]: https://github.com/google/zerocopy/issues/164
35/// [`get`]: Unalign::get
36/// [`into_inner`]: Unalign::into_inner
37/// [`update`]: Unalign::update
38/// [`try_deref`]: Unalign::try_deref
39/// [`try_deref_mut`]: Unalign::try_deref_mut
40/// [`deref_unchecked`]: Unalign::deref_unchecked
41/// [`deref_mut_unchecked`]: Unalign::deref_mut_unchecked
42///
43/// # Example
44///
45/// In this example, we need `EthernetFrame` to have no alignment requirement -
46/// and thus implement [`Unaligned`]. `EtherType` is `#[repr(u16)]` and so
47/// cannot implement `Unaligned`. We use `Unalign` to relax `EtherType`'s
48/// alignment requirement so that `EthernetFrame` has no alignment requirement
49/// and can implement `Unaligned`.
50///
51/// ```rust
52/// use zerocopy::*;
53/// # use zerocopy_derive::*;
54/// # #[derive(FromBytes, KnownLayout, Immutable, Unaligned)] #[repr(C)] struct Mac([u8; 6]);
55///
56/// # #[derive(PartialEq, Copy, Clone, Debug)]
57/// #[derive(TryFromBytes, KnownLayout, Immutable)]
58/// #[repr(u16)]
59/// enum EtherType {
60/// Ipv4 = 0x0800u16.to_be(),
61/// Arp = 0x0806u16.to_be(),
62/// Ipv6 = 0x86DDu16.to_be(),
63/// # /*
64/// ...
65/// # */
66/// }
67///
68/// #[derive(TryFromBytes, KnownLayout, Immutable, Unaligned)]
69/// #[repr(C)]
70/// struct EthernetFrame {
71/// src: Mac,
72/// dst: Mac,
73/// ethertype: Unalign<EtherType>,
74/// payload: [u8],
75/// }
76///
77/// let bytes = &[
78/// # 0, 1, 2, 3, 4, 5,
79/// # 6, 7, 8, 9, 10, 11,
80/// # /*
81/// ...
82/// # */
83/// 0x86, 0xDD, // EtherType
84/// 0xDE, 0xAD, 0xBE, 0xEF // Payload
85/// ][..];
86///
87/// // PANICS: Guaranteed not to panic because `bytes` is of the right
88/// // length, has the right contents, and `EthernetFrame` has no
89/// // alignment requirement.
90/// let packet = EthernetFrame::try_ref_from_bytes(&bytes).unwrap();
91///
92/// assert_eq!(packet.ethertype.get(), EtherType::Ipv6);
93/// assert_eq!(packet.payload, [0xDE, 0xAD, 0xBE, 0xEF]);
94/// ```
95///
96/// # Safety
97///
98/// `Unalign<T>` is guaranteed to have the same size and bit validity as `T`,
99/// and to have [`UnsafeCell`]s covering the same byte ranges as `T`.
100/// `Unalign<T>` is guaranteed to have alignment 1.
101// NOTE: This type is sound to use with types that need to be dropped. The
102// reason is that the compiler-generated drop code automatically moves all
103// values to aligned memory slots before dropping them in-place. This is not
104// well-documented, but it's hinted at in places like [1] and [2]. However, this
105// also means that `T` must be `Sized`; unless something changes, we can never
106// support unsized `T`. [3]
107//
108// [1] https://github.com/rust-lang/rust/issues/54148#issuecomment-420529646
109// [2] https://github.com/google/zerocopy/pull/126#discussion_r1018512323
110// [3] https://github.com/google/zerocopy/issues/209
111#[allow(missing_debug_implementations)]
112#[derive(Default, Copy)]
113#[cfg_attr(any(feature = "derive", test), derive(Immutable, FromBytes, IntoBytes, Unaligned))]
114#[repr(C, packed)]
115pub struct Unalign<T>(T);
116
117// We do not use `derive(KnownLayout)` on `Unalign`, because the derive is not
118// smart enough to realize that `Unalign<T>` is always sized and thus emits a
119// `KnownLayout` impl bounded on `T: KnownLayout.` This is overly restrictive.
120impl_known_layout!(T => Unalign<T>);
121
122safety_comment! {
123 /// SAFETY:
124 /// - `Unalign<T>` promises to have alignment 1, and so we don't require
125 /// that `T: Unaligned`.
126 /// - `Unalign<T>` has the same bit validity as `T`, and so it is
127 /// `FromZeros`, `FromBytes`, or `IntoBytes` exactly when `T` is as well.
128 /// - `Immutable`: `Unalign<T>` has the same fields as `T`, so it contains
129 /// `UnsafeCell`s exactly when `T` does.
130 /// - `TryFromBytes`: `Unalign<T>` has the same the same bit validity as
131 /// `T`, so `T::is_bit_valid` is a sound implementation of `is_bit_valid`.
132 impl_or_verify!(T => Unaligned for Unalign<T>);
133 impl_or_verify!(T: Immutable => Immutable for Unalign<T>);
134 impl_or_verify!(
135 T: TryFromBytes => TryFromBytes for Unalign<T>;
136 |c| T::is_bit_valid(c.transmute())
137 );
138 impl_or_verify!(T: FromZeros => FromZeros for Unalign<T>);
139 impl_or_verify!(T: FromBytes => FromBytes for Unalign<T>);
140 impl_or_verify!(T: IntoBytes => IntoBytes for Unalign<T>);
141}
142
143// Note that `Unalign: Clone` only if `T: Copy`. Since the inner `T` may not be
144// aligned, there's no way to safely call `T::clone`, and so a `T: Clone` bound
145// is not sufficient to implement `Clone` for `Unalign`.
146impl<T: Copy> Clone for Unalign<T> {
147 #[inline(always)]
148 fn clone(&self) -> Unalign<T> {
149 *self
150 }
151}
152
153impl<T> Unalign<T> {
154 /// Constructs a new `Unalign`.
155 #[inline(always)]
156 pub const fn new(val: T) -> Unalign<T> {
157 Unalign(val)
158 }
159
160 /// Consumes `self`, returning the inner `T`.
161 #[inline(always)]
162 pub const fn into_inner(self) -> T {
163 // SAFETY: Since `Unalign` is `#[repr(C, packed)]`, it has the same size
164 // and bit validity as `T`.
165 //
166 // We do this instead of just destructuring in order to prevent
167 // `Unalign`'s `Drop::drop` from being run, since dropping is not
168 // supported in `const fn`s.
169 //
170 // TODO(https://github.com/rust-lang/rust/issues/73255): Destructure
171 // instead of using unsafe.
172 unsafe { crate::util::transmute_unchecked(self) }
173 }
174
175 /// Attempts to return a reference to the wrapped `T`, failing if `self` is
176 /// not properly aligned.
177 ///
178 /// If `self` does not satisfy `align_of::<T>()`, then `try_deref` returns
179 /// `Err`.
180 ///
181 /// If `T: Unaligned`, then `Unalign<T>` implements [`Deref`], and callers
182 /// may prefer [`Deref::deref`], which is infallible.
183 #[inline(always)]
184 pub fn try_deref(&self) -> Result<&T, AlignmentError<&Self, T>> {
185 let inner = Ptr::from_ref(self).transmute();
186 match inner.bikeshed_try_into_aligned() {
187 Ok(aligned) => Ok(aligned.as_ref()),
188 Err(err) => Err(err.map_src(|src| src.into_unalign().as_ref())),
189 }
190 }
191
192 /// Attempts to return a mutable reference to the wrapped `T`, failing if
193 /// `self` is not properly aligned.
194 ///
195 /// If `self` does not satisfy `align_of::<T>()`, then `try_deref` returns
196 /// `Err`.
197 ///
198 /// If `T: Unaligned`, then `Unalign<T>` implements [`DerefMut`], and
199 /// callers may prefer [`DerefMut::deref_mut`], which is infallible.
200 #[inline(always)]
201 pub fn try_deref_mut(&mut self) -> Result<&mut T, AlignmentError<&mut Self, T>> {
202 let inner = Ptr::from_mut(self).transmute::<_, _, (_, (_, _))>();
203 match inner.bikeshed_try_into_aligned() {
204 Ok(aligned) => Ok(aligned.as_mut()),
205 Err(err) => Err(err.map_src(|src| src.into_unalign().as_mut())),
206 }
207 }
208
209 /// Returns a reference to the wrapped `T` without checking alignment.
210 ///
211 /// If `T: Unaligned`, then `Unalign<T>` implements[ `Deref`], and callers
212 /// may prefer [`Deref::deref`], which is safe.
213 ///
214 /// # Safety
215 ///
216 /// The caller must guarantee that `self` satisfies `align_of::<T>()`.
217 #[inline(always)]
218 pub const unsafe fn deref_unchecked(&self) -> &T {
219 // SAFETY: `Unalign<T>` is `repr(transparent)`, so there is a valid `T`
220 // at the same memory location as `self`. It has no alignment guarantee,
221 // but the caller has promised that `self` is properly aligned, so we
222 // know that it is sound to create a reference to `T` at this memory
223 // location.
224 //
225 // We use `mem::transmute` instead of `&*self.get_ptr()` because
226 // dereferencing pointers is not stable in `const` on our current MSRV
227 // (1.56 as of this writing).
228 unsafe { mem::transmute(self) }
229 }
230
231 /// Returns a mutable reference to the wrapped `T` without checking
232 /// alignment.
233 ///
234 /// If `T: Unaligned`, then `Unalign<T>` implements[ `DerefMut`], and
235 /// callers may prefer [`DerefMut::deref_mut`], which is safe.
236 ///
237 /// # Safety
238 ///
239 /// The caller must guarantee that `self` satisfies `align_of::<T>()`.
240 #[inline(always)]
241 pub unsafe fn deref_mut_unchecked(&mut self) -> &mut T {
242 // SAFETY: `self.get_mut_ptr()` returns a raw pointer to a valid `T` at
243 // the same memory location as `self`. It has no alignment guarantee,
244 // but the caller has promised that `self` is properly aligned, so we
245 // know that the pointer itself is aligned, and thus that it is sound to
246 // create a reference to a `T` at this memory location.
247 unsafe { &mut *self.get_mut_ptr() }
248 }
249
250 /// Gets an unaligned raw pointer to the inner `T`.
251 ///
252 /// # Safety
253 ///
254 /// The returned raw pointer is not necessarily aligned to
255 /// `align_of::<T>()`. Most functions which operate on raw pointers require
256 /// those pointers to be aligned, so calling those functions with the result
257 /// of `get_ptr` will result in undefined behavior if alignment is not
258 /// guaranteed using some out-of-band mechanism. In general, the only
259 /// functions which are safe to call with this pointer are those which are
260 /// explicitly documented as being sound to use with an unaligned pointer,
261 /// such as [`read_unaligned`].
262 ///
263 /// Even if the caller is permitted to mutate `self` (e.g. they have
264 /// ownership or a mutable borrow), it is not guaranteed to be sound to
265 /// write through the returned pointer. If writing is required, prefer
266 /// [`get_mut_ptr`] instead.
267 ///
268 /// [`read_unaligned`]: core::ptr::read_unaligned
269 /// [`get_mut_ptr`]: Unalign::get_mut_ptr
270 #[inline(always)]
271 pub const fn get_ptr(&self) -> *const T {
272 ptr::addr_of!(self.0)
273 }
274
275 /// Gets an unaligned mutable raw pointer to the inner `T`.
276 ///
277 /// # Safety
278 ///
279 /// The returned raw pointer is not necessarily aligned to
280 /// `align_of::<T>()`. Most functions which operate on raw pointers require
281 /// those pointers to be aligned, so calling those functions with the result
282 /// of `get_ptr` will result in undefined behavior if alignment is not
283 /// guaranteed using some out-of-band mechanism. In general, the only
284 /// functions which are safe to call with this pointer are those which are
285 /// explicitly documented as being sound to use with an unaligned pointer,
286 /// such as [`read_unaligned`].
287 ///
288 /// [`read_unaligned`]: core::ptr::read_unaligned
289 // TODO(https://github.com/rust-lang/rust/issues/57349): Make this `const`.
290 #[inline(always)]
291 pub fn get_mut_ptr(&mut self) -> *mut T {
292 ptr::addr_of_mut!(self.0)
293 }
294
295 /// Sets the inner `T`, dropping the previous value.
296 // TODO(https://github.com/rust-lang/rust/issues/57349): Make this `const`.
297 #[inline(always)]
298 pub fn set(&mut self, t: T) {
299 *self = Unalign::new(t);
300 }
301
302 /// Updates the inner `T` by calling a function on it.
303 ///
304 /// If [`T: Unaligned`], then `Unalign<T>` implements [`DerefMut`], and that
305 /// impl should be preferred over this method when performing updates, as it
306 /// will usually be faster and more ergonomic.
307 ///
308 /// For large types, this method may be expensive, as it requires copying
309 /// `2 * size_of::<T>()` bytes. \[1\]
310 ///
311 /// \[1\] Since the inner `T` may not be aligned, it would not be sound to
312 /// invoke `f` on it directly. Instead, `update` moves it into a
313 /// properly-aligned location in the local stack frame, calls `f` on it, and
314 /// then moves it back to its original location in `self`.
315 ///
316 /// [`T: Unaligned`]: Unaligned
317 #[inline]
318 pub fn update<O, F: FnOnce(&mut T) -> O>(&mut self, f: F) -> O {
319 if mem::align_of::<T>() == 1 {
320 // While we advise callers to use `DerefMut` when `T: Unaligned`,
321 // not all callers will be able to guarantee `T: Unaligned` in all
322 // cases. In particular, callers who are themselves providing an API
323 // which is generic over `T` may sometimes be called by *their*
324 // callers with `T` such that `align_of::<T>() == 1`, but cannot
325 // guarantee this in the general case. Thus, this optimization may
326 // sometimes be helpful.
327
328 // SAFETY: Since `T`'s alignment is 1, `self` satisfies its
329 // alignment by definition.
330 let t = unsafe { self.deref_mut_unchecked() };
331 return f(t);
332 }
333
334 // On drop, this moves `copy` out of itself and uses `ptr::write` to
335 // overwrite `slf`.
336 struct WriteBackOnDrop<T> {
337 copy: ManuallyDrop<T>,
338 slf: *mut Unalign<T>,
339 }
340
341 impl<T> Drop for WriteBackOnDrop<T> {
342 fn drop(&mut self) {
343 // SAFETY: We never use `copy` again as required by
344 // `ManuallyDrop::take`.
345 let copy = unsafe { ManuallyDrop::take(&mut self.copy) };
346 // SAFETY: `slf` is the raw pointer value of `self`. We know it
347 // is valid for writes and properly aligned because `self` is a
348 // mutable reference, which guarantees both of these properties.
349 unsafe { ptr::write(self.slf, Unalign::new(copy)) };
350 }
351 }
352
353 // SAFETY: We know that `self` is valid for reads, properly aligned, and
354 // points to an initialized `Unalign<T>` because it is a mutable
355 // reference, which guarantees all of these properties.
356 //
357 // Since `T: !Copy`, it would be unsound in the general case to allow
358 // both the original `Unalign<T>` and the copy to be used by safe code.
359 // We guarantee that the copy is used to overwrite the original in the
360 // `Drop::drop` impl of `WriteBackOnDrop`. So long as this `drop` is
361 // called before any other safe code executes, soundness is upheld.
362 // While this method can terminate in two ways (by returning normally or
363 // by unwinding due to a panic in `f`), in both cases, `write_back` is
364 // dropped - and its `drop` called - before any other safe code can
365 // execute.
366 let copy = unsafe { ptr::read(self) }.into_inner();
367 let mut write_back = WriteBackOnDrop { copy: ManuallyDrop::new(copy), slf: self };
368
369 let ret = f(&mut write_back.copy);
370
371 drop(write_back);
372 ret
373 }
374}
375
376impl<T: Copy> Unalign<T> {
377 /// Gets a copy of the inner `T`.
378 // TODO(https://github.com/rust-lang/rust/issues/57349): Make this `const`.
379 #[inline(always)]
380 pub fn get(&self) -> T {
381 let Unalign(val) = *self;
382 val
383 }
384}
385
386impl<T: Unaligned> Deref for Unalign<T> {
387 type Target = T;
388
389 #[inline(always)]
390 fn deref(&self) -> &T {
391 Ptr::from_ref(self).transmute().bikeshed_recall_aligned().as_ref()
392 }
393}
394
395impl<T: Unaligned> DerefMut for Unalign<T> {
396 #[inline(always)]
397 fn deref_mut(&mut self) -> &mut T {
398 Ptr::from_mut(self).transmute::<_, _, (_, (_, _))>().bikeshed_recall_aligned().as_mut()
399 }
400}
401
402impl<T: Unaligned + PartialOrd> PartialOrd<Unalign<T>> for Unalign<T> {
403 #[inline(always)]
404 fn partial_cmp(&self, other: &Unalign<T>) -> Option<Ordering> {
405 PartialOrd::partial_cmp(self.deref(), other.deref())
406 }
407}
408
409impl<T: Unaligned + Ord> Ord for Unalign<T> {
410 #[inline(always)]
411 fn cmp(&self, other: &Unalign<T>) -> Ordering {
412 Ord::cmp(self.deref(), other.deref())
413 }
414}
415
416impl<T: Unaligned + PartialEq> PartialEq<Unalign<T>> for Unalign<T> {
417 #[inline(always)]
418 fn eq(&self, other: &Unalign<T>) -> bool {
419 PartialEq::eq(self.deref(), other.deref())
420 }
421}
422
423impl<T: Unaligned + Eq> Eq for Unalign<T> {}
424
425impl<T: Unaligned + Hash> Hash for Unalign<T> {
426 #[inline(always)]
427 fn hash<H>(&self, state: &mut H)
428 where
429 H: Hasher,
430 {
431 self.deref().hash(state);
432 }
433}
434
435impl<T: Unaligned + Debug> Debug for Unalign<T> {
436 #[inline(always)]
437 fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
438 Debug::fmt(self.deref(), f)
439 }
440}
441
442impl<T: Unaligned + Display> Display for Unalign<T> {
443 #[inline(always)]
444 fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
445 Display::fmt(self.deref(), f)
446 }
447}
448
449/// A wrapper type to construct uninitialized instances of `T`.
450///
451/// `MaybeUninit` is identical to the [standard library
452/// `MaybeUninit`][core-maybe-uninit] type except that it supports unsized
453/// types.
454///
455/// # Layout
456///
457/// The same layout guarantees and caveats apply to `MaybeUninit<T>` as apply to
458/// the [standard library `MaybeUninit`][core-maybe-uninit] with one exception:
459/// for `T: !Sized`, there is no single value for `T`'s size. Instead, for such
460/// types, the following are guaranteed:
461/// - Every [valid size][valid-size] for `T` is a valid size for
462/// `MaybeUninit<T>` and vice versa
463/// - Given `t: *const T` and `m: *const MaybeUninit<T>` with identical fat
464/// pointer metadata, `t` and `m` address the same number of bytes (and
465/// likewise for `*mut`)
466///
467/// [core-maybe-uninit]: core::mem::MaybeUninit
468/// [valid-size]: crate::KnownLayout#what-is-a-valid-size
469#[repr(transparent)]
470#[doc(hidden)]
471pub struct MaybeUninit<T: ?Sized + KnownLayout>(
472 // SAFETY: `MaybeUninit<T>` has the same size as `T`, because (by invariant
473 // on `T::MaybeUninit`) `T::MaybeUninit` has `T::LAYOUT` identical to `T`,
474 // and because (invariant on `T::LAYOUT`) we can trust that `LAYOUT`
475 // accurately reflects the layout of `T`. By invariant on `T::MaybeUninit`,
476 // it admits uninitialized bytes in all positions. Because `MabyeUninit` is
477 // marked `repr(transparent)`, these properties additionally hold true for
478 // `Self`.
479 T::MaybeUninit,
480);
481
482#[doc(hidden)]
483impl<T: ?Sized + KnownLayout> MaybeUninit<T> {
484 /// Constructs a `MaybeUninit<T>` initialized with the given value.
485 #[inline(always)]
486 pub fn new(val: T) -> Self
487 where
488 T: Sized,
489 Self: Sized,
490 {
491 // SAFETY: It is valid to transmute `val` to `MaybeUninit<T>` because it
492 // is both valid to transmute `val` to `T::MaybeUninit`, and it is valid
493 // to transmute from `T::MaybeUninit` to `MaybeUninit<T>`.
494 //
495 // First, it is valid to transmute `val` to `T::MaybeUninit` because, by
496 // invariant on `T::MaybeUninit`:
497 // - For `T: Sized`, `T` and `T::MaybeUninit` have the same size.
498 // - All byte sequences of the correct size are valid values of
499 // `T::MaybeUninit`.
500 //
501 // Second, it is additionally valid to transmute from `T::MaybeUninit`
502 // to `MaybeUninit<T>`, because `MaybeUninit<T>` is a
503 // `repr(transparent)` wrapper around `T::MaybeUninit`.
504 //
505 // These two transmutes are collapsed into one so we don't need to add a
506 // `T::MaybeUninit: Sized` bound to this function's `where` clause.
507 unsafe { crate::util::transmute_unchecked(val) }
508 }
509
510 /// Constructs an uninitialized `MaybeUninit<T>`.
511 #[must_use]
512 #[inline(always)]
513 pub fn uninit() -> Self
514 where
515 T: Sized,
516 Self: Sized,
517 {
518 let uninit = CoreMaybeUninit::<T>::uninit();
519 // SAFETY: It is valid to transmute from `CoreMaybeUninit<T>` to
520 // `MaybeUninit<T>` since they both admit uninitialized bytes in all
521 // positions, and they have the same size (i.e., that of `T`).
522 //
523 // `MaybeUninit<T>` has the same size as `T`, because (by invariant on
524 // `T::MaybeUninit`) `T::MaybeUninit` has `T::LAYOUT` identical to `T`,
525 // and because (invariant on `T::LAYOUT`) we can trust that `LAYOUT`
526 // accurately reflects the layout of `T`.
527 //
528 // `CoreMaybeUninit<T>` has the same size as `T` [1] and admits
529 // uninitialized bytes in all positions.
530 //
531 // [1] Per https://doc.rust-lang.org/1.81.0/std/mem/union.MaybeUninit.html#layout-1:
532 //
533 // `MaybeUninit<T>` is guaranteed to have the same size, alignment,
534 // and ABI as `T`
535 unsafe { crate::util::transmute_unchecked(uninit) }
536 }
537
538 /// Creates a `Box<MaybeUninit<T>>`.
539 ///
540 /// This function is useful for allocating large, uninit values on the heap
541 /// without ever creating a temporary instance of `Self` on the stack.
542 ///
543 /// # Errors
544 ///
545 /// Returns an error on allocation failure. Allocation failure is guaranteed
546 /// never to cause a panic or an abort.
547 #[cfg(feature = "alloc")]
548 #[inline]
549 pub fn new_boxed_uninit(meta: T::PointerMetadata) -> Result<Box<Self>, AllocError> {
550 // SAFETY: `alloc::alloc::alloc_zeroed` is a valid argument of
551 // `new_box`. The referent of the pointer returned by `alloc` (and,
552 // consequently, the `Box` derived from it) is a valid instance of
553 // `Self`, because `Self` is `MaybeUninit` and thus admits arbitrary
554 // (un)initialized bytes.
555 unsafe { crate::util::new_box(meta, alloc::alloc::alloc) }
556 }
557
558 /// Extracts the value from the `MaybeUninit<T>` container.
559 ///
560 /// # Safety
561 ///
562 /// The caller must ensure that `self` is in an bit-valid state. Depending
563 /// on subsequent use, it may also need to be in a library-valid state.
564 #[inline(always)]
565 pub unsafe fn assume_init(self) -> T
566 where
567 T: Sized,
568 Self: Sized,
569 {
570 // SAFETY: The caller guarantees that `self` is in an bit-valid state.
571 unsafe { crate::util::transmute_unchecked(self) }
572 }
573}
574
575impl<T: ?Sized + KnownLayout> fmt::Debug for MaybeUninit<T> {
576 #[inline]
577 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
578 f.pad(core::any::type_name::<Self>())
579 }
580}
581
582#[cfg(test)]
583mod tests {
584 use core::panic::AssertUnwindSafe;
585
586 use super::*;
587 use crate::util::testutil::*;
588
589 #[test]
590 fn test_unalign() {
591 // Test methods that don't depend on alignment.
592 let mut u = Unalign::new(AU64(123));
593 assert_eq!(u.get(), AU64(123));
594 assert_eq!(u.into_inner(), AU64(123));
595 assert_eq!(u.get_ptr(), <*const _>::cast::<AU64>(&u));
596 assert_eq!(u.get_mut_ptr(), <*mut _>::cast::<AU64>(&mut u));
597 u.set(AU64(321));
598 assert_eq!(u.get(), AU64(321));
599
600 // Test methods that depend on alignment (when alignment is satisfied).
601 let mut u: Align<_, AU64> = Align::new(Unalign::new(AU64(123)));
602 assert_eq!(u.t.try_deref().unwrap(), &AU64(123));
603 assert_eq!(u.t.try_deref_mut().unwrap(), &mut AU64(123));
604 // SAFETY: The `Align<_, AU64>` guarantees proper alignment.
605 assert_eq!(unsafe { u.t.deref_unchecked() }, &AU64(123));
606 // SAFETY: The `Align<_, AU64>` guarantees proper alignment.
607 assert_eq!(unsafe { u.t.deref_mut_unchecked() }, &mut AU64(123));
608 *u.t.try_deref_mut().unwrap() = AU64(321);
609 assert_eq!(u.t.get(), AU64(321));
610
611 // Test methods that depend on alignment (when alignment is not
612 // satisfied).
613 let mut u: ForceUnalign<_, AU64> = ForceUnalign::new(Unalign::new(AU64(123)));
614 assert!(matches!(u.t.try_deref(), Err(AlignmentError { .. })));
615 assert!(matches!(u.t.try_deref_mut(), Err(AlignmentError { .. })));
616
617 // Test methods that depend on `T: Unaligned`.
618 let mut u = Unalign::new(123u8);
619 assert_eq!(u.try_deref(), Ok(&123));
620 assert_eq!(u.try_deref_mut(), Ok(&mut 123));
621 assert_eq!(u.deref(), &123);
622 assert_eq!(u.deref_mut(), &mut 123);
623 *u = 21;
624 assert_eq!(u.get(), 21);
625
626 // Test that some `Unalign` functions and methods are `const`.
627 const _UNALIGN: Unalign<u64> = Unalign::new(0);
628 const _UNALIGN_PTR: *const u64 = _UNALIGN.get_ptr();
629 const _U64: u64 = _UNALIGN.into_inner();
630 // Make sure all code is considered "used".
631 //
632 // TODO(https://github.com/rust-lang/rust/issues/104084): Remove this
633 // attribute.
634 #[allow(dead_code)]
635 const _: () = {
636 let x: Align<_, AU64> = Align::new(Unalign::new(AU64(123)));
637 // Make sure that `deref_unchecked` is `const`.
638 //
639 // SAFETY: The `Align<_, AU64>` guarantees proper alignment.
640 let au64 = unsafe { x.t.deref_unchecked() };
641 match au64 {
642 AU64(123) => {}
643 _ => const_unreachable!(),
644 }
645 };
646 }
647
648 #[test]
649 fn test_unalign_update() {
650 let mut u = Unalign::new(AU64(123));
651 u.update(|a| a.0 += 1);
652 assert_eq!(u.get(), AU64(124));
653
654 // Test that, even if the callback panics, the original is still
655 // correctly overwritten. Use a `Box` so that Miri is more likely to
656 // catch any unsoundness (which would likely result in two `Box`es for
657 // the same heap object, which is the sort of thing that Miri would
658 // probably catch).
659 let mut u = Unalign::new(Box::new(AU64(123)));
660 let res = std::panic::catch_unwind(AssertUnwindSafe(|| {
661 u.update(|a| {
662 a.0 += 1;
663 panic!();
664 })
665 }));
666 assert!(res.is_err());
667 assert_eq!(u.into_inner(), Box::new(AU64(124)));
668
669 // Test the align_of::<T>() == 1 optimization.
670 let mut u = Unalign::new([0u8, 1]);
671 u.update(|a| a[0] += 1);
672 assert_eq!(u.get(), [1u8, 1]);
673 }
674
675 #[test]
676 fn test_unalign_copy_clone() {
677 // Test that `Copy` and `Clone` do not cause soundness issues. This test
678 // is mainly meant to exercise UB that would be caught by Miri.
679
680 // `u.t` is definitely not validly-aligned for `AU64`'s alignment of 8.
681 let u = ForceUnalign::<_, AU64>::new(Unalign::new(AU64(123)));
682 #[allow(clippy::clone_on_copy)]
683 let v = u.t.clone();
684 let w = u.t;
685 assert_eq!(u.t.get(), v.get());
686 assert_eq!(u.t.get(), w.get());
687 assert_eq!(v.get(), w.get());
688 }
689
690 #[test]
691 fn test_unalign_trait_impls() {
692 let zero = Unalign::new(0u8);
693 let one = Unalign::new(1u8);
694
695 assert!(zero < one);
696 assert_eq!(PartialOrd::partial_cmp(&zero, &one), Some(Ordering::Less));
697 assert_eq!(Ord::cmp(&zero, &one), Ordering::Less);
698
699 assert_ne!(zero, one);
700 assert_eq!(zero, zero);
701 assert!(!PartialEq::eq(&zero, &one));
702 assert!(PartialEq::eq(&zero, &zero));
703
704 fn hash<T: Hash>(t: &T) -> u64 {
705 let mut h = std::collections::hash_map::DefaultHasher::new();
706 t.hash(&mut h);
707 h.finish()
708 }
709
710 assert_eq!(hash(&zero), hash(&0u8));
711 assert_eq!(hash(&one), hash(&1u8));
712
713 assert_eq!(format!("{:?}", zero), format!("{:?}", 0u8));
714 assert_eq!(format!("{:?}", one), format!("{:?}", 1u8));
715 assert_eq!(format!("{}", zero), format!("{}", 0u8));
716 assert_eq!(format!("{}", one), format!("{}", 1u8));
717 }
718
719 #[test]
720 #[allow(clippy::as_conversions)]
721 fn test_maybe_uninit() {
722 // int
723 {
724 let input = 42;
725 let uninit = MaybeUninit::new(input);
726 // SAFETY: `uninit` is in an initialized state
727 let output = unsafe { uninit.assume_init() };
728 assert_eq!(input, output);
729 }
730
731 // thin ref
732 {
733 let input = 42;
734 let uninit = MaybeUninit::new(&input);
735 // SAFETY: `uninit` is in an initialized state
736 let output = unsafe { uninit.assume_init() };
737 assert_eq!(&input as *const _, output as *const _);
738 assert_eq!(input, *output);
739 }
740
741 // wide ref
742 {
743 let input = [1, 2, 3, 4];
744 let uninit = MaybeUninit::new(&input[..]);
745 // SAFETY: `uninit` is in an initialized state
746 let output = unsafe { uninit.assume_init() };
747 assert_eq!(&input[..] as *const _, output as *const _);
748 assert_eq!(input, *output);
749 }
750 }
751}