zerocopy/
ref.rs

1// Copyright 2024 The Fuchsia Authors
2//
3// Licensed under the 2-Clause BSD License <LICENSE-BSD or
4// https://opensource.org/license/bsd-2-clause>, Apache License, Version 2.0
5// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT
6// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option.
7// This file may not be copied, modified, or distributed except according to
8// those terms.
9use super::*;
10use crate::pointer::{
11    BecauseInvariantsEq, BecauseMutationCompatible, MutationCompatible, TransmuteFromPtr,
12};
13
14mod def {
15    use core::marker::PhantomData;
16
17    use crate::{
18        ByteSlice, ByteSliceMut, CloneableByteSlice, CopyableByteSlice, IntoByteSlice,
19        IntoByteSliceMut,
20    };
21
22    /// A typed reference derived from a byte slice.
23    ///
24    /// A `Ref<B, T>` is a reference to a `T` which is stored in a byte slice, `B`.
25    /// Unlike a native reference (`&T` or `&mut T`), `Ref<B, T>` has the same
26    /// mutability as the byte slice it was constructed from (`B`).
27    ///
28    /// # Examples
29    ///
30    /// `Ref` can be used to treat a sequence of bytes as a structured type, and
31    /// to read and write the fields of that type as if the byte slice reference
32    /// were simply a reference to that type.
33    ///
34    /// ```rust
35    /// use zerocopy::*;
36    /// # use zerocopy_derive::*;
37    ///
38    /// #[derive(FromBytes, IntoBytes, KnownLayout, Immutable, Unaligned)]
39    /// #[repr(C)]
40    /// struct UdpHeader {
41    ///     src_port: [u8; 2],
42    ///     dst_port: [u8; 2],
43    ///     length: [u8; 2],
44    ///     checksum: [u8; 2],
45    /// }
46    ///
47    /// #[derive(FromBytes, IntoBytes, KnownLayout, Immutable, Unaligned)]
48    /// #[repr(C, packed)]
49    /// struct UdpPacket {
50    ///     header: UdpHeader,
51    ///     body: [u8],
52    /// }
53    ///
54    /// impl UdpPacket {
55    ///     pub fn parse<B: ByteSlice>(bytes: B) -> Option<Ref<B, UdpPacket>> {
56    ///         Ref::from_bytes(bytes).ok()
57    ///     }
58    /// }
59    /// ```
60    pub struct Ref<B, T: ?Sized>(
61        // INVARIANTS: The referent (via `.deref`, `.deref_mut`, `.into`) byte
62        // slice is aligned to `T`'s alignment and its size corresponds to a
63        // valid size for `T`.
64        B,
65        PhantomData<T>,
66    );
67
68    impl<B, T: ?Sized> Ref<B, T> {
69        /// Constructs a new `Ref`.
70        ///
71        /// # Safety
72        ///
73        /// `bytes` dereferences (via [`deref`], [`deref_mut`], and [`into`]) to
74        /// a byte slice which is aligned to `T`'s alignment and whose size is a
75        /// valid size for `T`.
76        ///
77        /// [`deref`]: core::ops::Deref::deref
78        /// [`deref_mut`]: core::ops::DerefMut::deref_mut
79        /// [`into`]: core::convert::Into::into
80        pub(crate) unsafe fn new_unchecked(bytes: B) -> Ref<B, T> {
81            // INVARIANTS: The caller has promised that `bytes`'s referent is
82            // validly-aligned and has a valid size.
83            Ref(bytes, PhantomData)
84        }
85    }
86
87    impl<B: ByteSlice, T: ?Sized> Ref<B, T> {
88        /// Access the byte slice as a [`ByteSlice`].
89        ///
90        /// # Safety
91        ///
92        /// The caller promises not to call methods on the returned
93        /// [`ByteSlice`] other than `ByteSlice` methods (for example, via
94        /// `Any::downcast_ref`).
95        ///
96        /// `as_byte_slice` promises to return a `ByteSlice` whose referent is
97        /// validly-aligned for `T` and has a valid size for `T`.
98        pub(crate) unsafe fn as_byte_slice(&self) -> &impl ByteSlice {
99            // INVARIANTS: The caller promises not to call methods other than
100            // those on `ByteSlice`. Since `B: ByteSlice`, dereference stability
101            // guarantees that calling `ByteSlice` methods will not change the
102            // address or length of `self.0`'s referent.
103            //
104            // SAFETY: By invariant on `self.0`, the alignment and size
105            // post-conditions are upheld.
106            &self.0
107        }
108    }
109
110    impl<B: ByteSliceMut, T: ?Sized> Ref<B, T> {
111        /// Access the byte slice as a [`ByteSliceMut`].
112        ///
113        /// # Safety
114        ///
115        /// The caller promises not to call methods on the returned
116        /// [`ByteSliceMut`] other than `ByteSliceMut` methods (for example, via
117        /// `Any::downcast_mut`).
118        ///
119        /// `as_byte_slice` promises to return a `ByteSlice` whose referent is
120        /// validly-aligned for `T` and has a valid size for `T`.
121        pub(crate) unsafe fn as_byte_slice_mut(&mut self) -> &mut impl ByteSliceMut {
122            // INVARIANTS: The caller promises not to call methods other than
123            // those on `ByteSliceMut`. Since `B: ByteSlice`, dereference
124            // stability guarantees that calling `ByteSlice` methods will not
125            // change the address or length of `self.0`'s referent.
126            //
127            // SAFETY: By invariant on `self.0`, the alignment and size
128            // post-conditions are upheld.
129            &mut self.0
130        }
131    }
132
133    impl<'a, B: IntoByteSlice<'a>, T: ?Sized> Ref<B, T> {
134        /// Access the byte slice as an [`IntoByteSlice`].
135        ///
136        /// # Safety
137        ///
138        /// The caller promises not to call methods on the returned
139        /// [`IntoByteSlice`] other than `IntoByteSlice` methods (for example,
140        /// via `Any::downcast_ref`).
141        ///
142        /// `as_byte_slice` promises to return a `ByteSlice` whose referent is
143        /// validly-aligned for `T` and has a valid size for `T`.
144        pub(crate) unsafe fn into_byte_slice(self) -> impl IntoByteSlice<'a> {
145            // INVARIANTS: The caller promises not to call methods other than
146            // those on `IntoByteSlice`. Since `B: ByteSlice`, dereference
147            // stability guarantees that calling `ByteSlice` methods will not
148            // change the address or length of `self.0`'s referent.
149            //
150            // SAFETY: By invariant on `self.0`, the alignment and size
151            // post-conditions are upheld.
152            self.0
153        }
154    }
155
156    impl<'a, B: IntoByteSliceMut<'a>, T: ?Sized> Ref<B, T> {
157        /// Access the byte slice as an [`IntoByteSliceMut`].
158        ///
159        /// # Safety
160        ///
161        /// The caller promises not to call methods on the returned
162        /// [`IntoByteSliceMut`] other than `IntoByteSliceMut` methods (for
163        /// example, via `Any::downcast_mut`).
164        ///
165        /// `as_byte_slice` promises to return a `ByteSlice` whose referent is
166        /// validly-aligned for `T` and has a valid size for `T`.
167        pub(crate) unsafe fn into_byte_slice_mut(self) -> impl IntoByteSliceMut<'a> {
168            // INVARIANTS: The caller promises not to call methods other than
169            // those on `IntoByteSliceMut`. Since `B: ByteSlice`, dereference
170            // stability guarantees that calling `ByteSlice` methods will not
171            // change the address or length of `self.0`'s referent.
172            //
173            // SAFETY: By invariant on `self.0`, the alignment and size
174            // post-conditions are upheld.
175            self.0
176        }
177    }
178
179    impl<B: CloneableByteSlice + Clone, T: ?Sized> Clone for Ref<B, T> {
180        #[inline]
181        fn clone(&self) -> Ref<B, T> {
182            // INVARIANTS: Since `B: CloneableByteSlice`, `self.0.clone()` has
183            // the same address and length as `self.0`. Since `self.0` upholds
184            // the field invariants, so does `self.0.clone()`.
185            Ref(self.0.clone(), PhantomData)
186        }
187    }
188
189    // INVARIANTS: Since `B: CopyableByteSlice`, the copied `Ref`'s `.0` has the
190    // same address and length as the original `Ref`'s `.0`. Since the original
191    // upholds the field invariants, so does the copy.
192    impl<B: CopyableByteSlice + Copy, T: ?Sized> Copy for Ref<B, T> {}
193}
194
195#[allow(unreachable_pub)] // This is a false positive on our MSRV toolchain.
196pub use def::Ref;
197
198use crate::pointer::{
199    invariant::{Aligned, BecauseExclusive, Initialized, Unaligned, Valid},
200    BecauseRead, PtrInner,
201};
202
203impl<B, T> Ref<B, T>
204where
205    B: ByteSlice,
206{
207    #[must_use = "has no side effects"]
208    pub(crate) fn sized_from(bytes: B) -> Result<Ref<B, T>, CastError<B, T>> {
209        if bytes.len() != mem::size_of::<T>() {
210            return Err(SizeError::new(bytes).into());
211        }
212        if let Err(err) = util::validate_aligned_to::<_, T>(bytes.deref()) {
213            return Err(err.with_src(bytes).into());
214        }
215
216        // SAFETY: We just validated size and alignment.
217        Ok(unsafe { Ref::new_unchecked(bytes) })
218    }
219}
220
221impl<B, T> Ref<B, T>
222where
223    B: SplitByteSlice,
224{
225    #[must_use = "has no side effects"]
226    pub(crate) fn sized_from_prefix(bytes: B) -> Result<(Ref<B, T>, B), CastError<B, T>> {
227        if bytes.len() < mem::size_of::<T>() {
228            return Err(SizeError::new(bytes).into());
229        }
230        if let Err(err) = util::validate_aligned_to::<_, T>(bytes.deref()) {
231            return Err(err.with_src(bytes).into());
232        }
233        let (bytes, suffix) =
234            bytes.split_at(mem::size_of::<T>()).map_err(|b| SizeError::new(b).into())?;
235        // SAFETY: We just validated alignment and that `bytes` is at least as
236        // large as `T`. `bytes.split_at(mem::size_of::<T>())?` ensures that the
237        // new `bytes` is exactly the size of `T`. By safety postcondition on
238        // `SplitByteSlice::split_at` we can rely on `split_at` to produce the
239        // correct `bytes` and `suffix`.
240        let r = unsafe { Ref::new_unchecked(bytes) };
241        Ok((r, suffix))
242    }
243
244    #[must_use = "has no side effects"]
245    pub(crate) fn sized_from_suffix(bytes: B) -> Result<(B, Ref<B, T>), CastError<B, T>> {
246        let bytes_len = bytes.len();
247        let split_at = if let Some(split_at) = bytes_len.checked_sub(mem::size_of::<T>()) {
248            split_at
249        } else {
250            return Err(SizeError::new(bytes).into());
251        };
252        let (prefix, bytes) = bytes.split_at(split_at).map_err(|b| SizeError::new(b).into())?;
253        if let Err(err) = util::validate_aligned_to::<_, T>(bytes.deref()) {
254            return Err(err.with_src(bytes).into());
255        }
256        // SAFETY: Since `split_at` is defined as `bytes_len - size_of::<T>()`,
257        // the `bytes` which results from `let (prefix, bytes) =
258        // bytes.split_at(split_at)?` has length `size_of::<T>()`. After
259        // constructing `bytes`, we validate that it has the proper alignment.
260        // By safety postcondition on `SplitByteSlice::split_at` we can rely on
261        // `split_at` to produce the correct `prefix` and `bytes`.
262        let r = unsafe { Ref::new_unchecked(bytes) };
263        Ok((prefix, r))
264    }
265}
266
267impl<B, T> Ref<B, T>
268where
269    B: ByteSlice,
270    T: KnownLayout + Immutable + ?Sized,
271{
272    /// Constructs a `Ref` from a byte slice.
273    ///
274    /// If the length of `source` is not a [valid size of `T`][valid-size], or
275    /// if `source` is not appropriately aligned for `T`, this returns `Err`. If
276    /// [`T: Unaligned`][t-unaligned], you can [infallibly discard the alignment
277    /// error][size-error-from].
278    ///
279    /// `T` may be a sized type, a slice, or a [slice DST][slice-dst].
280    ///
281    /// [valid-size]: crate::KnownLayout#what-is-a-valid-size
282    /// [t-unaligned]: crate::Unaligned
283    /// [size-error-from]: error/struct.SizeError.html#method.from-1
284    /// [slice-dst]: KnownLayout#dynamically-sized-types
285    ///
286    /// # Compile-Time Assertions
287    ///
288    /// This method cannot yet be used on unsized types whose dynamically-sized
289    /// component is zero-sized. Attempting to use this method on such types
290    /// results in a compile-time assertion error; e.g.:
291    ///
292    /// ```compile_fail,E0080
293    /// use zerocopy::*;
294    /// # use zerocopy_derive::*;
295    ///
296    /// #[derive(Immutable, KnownLayout)]
297    /// #[repr(C)]
298    /// struct ZSTy {
299    ///     leading_sized: u16,
300    ///     trailing_dst: [()],
301    /// }
302    ///
303    /// let _ = Ref::<_, ZSTy>::from_bytes(&b"UU"[..]); // ⚠ Compile Error!
304    /// ```
305    #[must_use = "has no side effects"]
306    #[inline]
307    pub fn from_bytes(source: B) -> Result<Ref<B, T>, CastError<B, T>> {
308        static_assert_dst_is_not_zst!(T);
309        if let Err(e) =
310            Ptr::from_ref(source.deref()).try_cast_into_no_leftover::<T, BecauseImmutable>(None)
311        {
312            return Err(e.with_src(()).with_src(source));
313        }
314        // SAFETY: `try_cast_into_no_leftover` validates size and alignment.
315        Ok(unsafe { Ref::new_unchecked(source) })
316    }
317}
318
319impl<B, T> Ref<B, T>
320where
321    B: SplitByteSlice,
322    T: KnownLayout + Immutable + ?Sized,
323{
324    /// Constructs a `Ref` from the prefix of a byte slice.
325    ///
326    /// This method computes the [largest possible size of `T`][valid-size] that
327    /// can fit in the leading bytes of `source`, then attempts to return both a
328    /// `Ref` to those bytes, and a reference to the remaining bytes. If there
329    /// are insufficient bytes, or if `source` is not appropriately aligned,
330    /// this returns `Err`. If [`T: Unaligned`][t-unaligned], you can
331    /// [infallibly discard the alignment error][size-error-from].
332    ///
333    /// `T` may be a sized type, a slice, or a [slice DST][slice-dst].
334    ///
335    /// [valid-size]: crate::KnownLayout#what-is-a-valid-size
336    /// [t-unaligned]: crate::Unaligned
337    /// [size-error-from]: error/struct.SizeError.html#method.from-1
338    /// [slice-dst]: KnownLayout#dynamically-sized-types
339    ///
340    /// # Compile-Time Assertions
341    ///
342    /// This method cannot yet be used on unsized types whose dynamically-sized
343    /// component is zero-sized. Attempting to use this method on such types
344    /// results in a compile-time assertion error; e.g.:
345    ///
346    /// ```compile_fail,E0080
347    /// use zerocopy::*;
348    /// # use zerocopy_derive::*;
349    ///
350    /// #[derive(Immutable, KnownLayout)]
351    /// #[repr(C)]
352    /// struct ZSTy {
353    ///     leading_sized: u16,
354    ///     trailing_dst: [()],
355    /// }
356    ///
357    /// let _ = Ref::<_, ZSTy>::from_prefix(&b"UU"[..]); // ⚠ Compile Error!
358    /// ```
359    #[must_use = "has no side effects"]
360    #[inline]
361    pub fn from_prefix(source: B) -> Result<(Ref<B, T>, B), CastError<B, T>> {
362        static_assert_dst_is_not_zst!(T);
363        let remainder = match Ptr::from_ref(source.deref())
364            .try_cast_into::<T, BecauseImmutable>(CastType::Prefix, None)
365        {
366            Ok((_, remainder)) => remainder,
367            Err(e) => {
368                return Err(e.with_src(()).with_src(source));
369            }
370        };
371
372        // SAFETY: `remainder` is constructed as a subset of `source`, and so it
373        // cannot have a larger size than `source`. Both of their `len` methods
374        // measure bytes (`source` deref's to `[u8]`, and `remainder` is a
375        // `Ptr<[u8]>`), so `source.len() >= remainder.len()`. Thus, this cannot
376        // underflow.
377        #[allow(unstable_name_collisions)]
378        let split_at = unsafe { source.len().unchecked_sub(remainder.len()) };
379        let (bytes, suffix) = source.split_at(split_at).map_err(|b| SizeError::new(b).into())?;
380        // SAFETY: `try_cast_into` validates size and alignment, and returns a
381        // `split_at` that indicates how many bytes of `source` correspond to a
382        // valid `T`. By safety postcondition on `SplitByteSlice::split_at` we
383        // can rely on `split_at` to produce the correct `source` and `suffix`.
384        let r = unsafe { Ref::new_unchecked(bytes) };
385        Ok((r, suffix))
386    }
387
388    /// Constructs a `Ref` from the suffix of a byte slice.
389    ///
390    /// This method computes the [largest possible size of `T`][valid-size] that
391    /// can fit in the trailing bytes of `source`, then attempts to return both
392    /// a `Ref` to those bytes, and a reference to the preceding bytes. If there
393    /// are insufficient bytes, or if that suffix of `source` is not
394    /// appropriately aligned, this returns `Err`. If [`T:
395    /// Unaligned`][t-unaligned], you can [infallibly discard the alignment
396    /// error][size-error-from].
397    ///
398    /// `T` may be a sized type, a slice, or a [slice DST][slice-dst].
399    ///
400    /// [valid-size]: crate::KnownLayout#what-is-a-valid-size
401    /// [t-unaligned]: crate::Unaligned
402    /// [size-error-from]: error/struct.SizeError.html#method.from-1
403    /// [slice-dst]: KnownLayout#dynamically-sized-types
404    ///
405    /// # Compile-Time Assertions
406    ///
407    /// This method cannot yet be used on unsized types whose dynamically-sized
408    /// component is zero-sized. Attempting to use this method on such types
409    /// results in a compile-time assertion error; e.g.:
410    ///
411    /// ```compile_fail,E0080
412    /// use zerocopy::*;
413    /// # use zerocopy_derive::*;
414    ///
415    /// #[derive(Immutable, KnownLayout)]
416    /// #[repr(C)]
417    /// struct ZSTy {
418    ///     leading_sized: u16,
419    ///     trailing_dst: [()],
420    /// }
421    ///
422    /// let _ = Ref::<_, ZSTy>::from_suffix(&b"UU"[..]); // ⚠ Compile Error!
423    /// ```
424    #[must_use = "has no side effects"]
425    #[inline]
426    pub fn from_suffix(source: B) -> Result<(B, Ref<B, T>), CastError<B, T>> {
427        static_assert_dst_is_not_zst!(T);
428        let remainder = match Ptr::from_ref(source.deref())
429            .try_cast_into::<T, BecauseImmutable>(CastType::Suffix, None)
430        {
431            Ok((_, remainder)) => remainder,
432            Err(e) => {
433                let e = e.with_src(());
434                return Err(e.with_src(source));
435            }
436        };
437
438        let split_at = remainder.len();
439        let (prefix, bytes) = source.split_at(split_at).map_err(|b| SizeError::new(b).into())?;
440        // SAFETY: `try_cast_into` validates size and alignment, and returns a
441        // `split_at` that indicates how many bytes of `source` correspond to a
442        // valid `T`. By safety postcondition on `SplitByteSlice::split_at` we
443        // can rely on `split_at` to produce the correct `prefix` and `bytes`.
444        let r = unsafe { Ref::new_unchecked(bytes) };
445        Ok((prefix, r))
446    }
447}
448
449impl<B, T> Ref<B, T>
450where
451    B: ByteSlice,
452    T: KnownLayout<PointerMetadata = usize> + Immutable + ?Sized,
453{
454    /// Constructs a `Ref` from the given bytes with DST length equal to `count`
455    /// without copying.
456    ///
457    /// This method attempts to return a `Ref` to the prefix of `source`
458    /// interpreted as a `T` with `count` trailing elements, and a reference to
459    /// the remaining bytes. If the length of `source` is not equal to the size
460    /// of `Self` with `count` elements, or if `source` is not appropriately
461    /// aligned, this returns `Err`. If [`T: Unaligned`][t-unaligned], you can
462    /// [infallibly discard the alignment error][size-error-from].
463    ///
464    /// [t-unaligned]: crate::Unaligned
465    /// [size-error-from]: error/struct.SizeError.html#method.from-1
466    ///
467    /// # Compile-Time Assertions
468    ///
469    /// This method cannot yet be used on unsized types whose dynamically-sized
470    /// component is zero-sized. Attempting to use this method on such types
471    /// results in a compile-time assertion error; e.g.:
472    ///
473    /// ```compile_fail,E0080
474    /// use zerocopy::*;
475    /// # use zerocopy_derive::*;
476    ///
477    /// #[derive(Immutable, KnownLayout)]
478    /// #[repr(C)]
479    /// struct ZSTy {
480    ///     leading_sized: u16,
481    ///     trailing_dst: [()],
482    /// }
483    ///
484    /// let _ = Ref::<_, ZSTy>::from_bytes_with_elems(&b"UU"[..], 42); // ⚠ Compile Error!
485    /// ```
486    #[inline]
487    pub fn from_bytes_with_elems(source: B, count: usize) -> Result<Ref<B, T>, CastError<B, T>> {
488        static_assert_dst_is_not_zst!(T);
489        let expected_len = match T::size_for_metadata(count) {
490            Some(len) => len,
491            None => return Err(SizeError::new(source).into()),
492        };
493        if source.len() != expected_len {
494            return Err(SizeError::new(source).into());
495        }
496        Self::from_bytes(source)
497    }
498}
499
500impl<B, T> Ref<B, T>
501where
502    B: SplitByteSlice,
503    T: KnownLayout<PointerMetadata = usize> + Immutable + ?Sized,
504{
505    /// Constructs a `Ref` from the prefix of the given bytes with DST
506    /// length equal to `count` without copying.
507    ///
508    /// This method attempts to return a `Ref` to the prefix of `source`
509    /// interpreted as a `T` with `count` trailing elements, and a reference to
510    /// the remaining bytes. If there are insufficient bytes, or if `source` is
511    /// not appropriately aligned, this returns `Err`. If [`T:
512    /// Unaligned`][t-unaligned], you can [infallibly discard the alignment
513    /// error][size-error-from].
514    ///
515    /// [t-unaligned]: crate::Unaligned
516    /// [size-error-from]: error/struct.SizeError.html#method.from-1
517    ///
518    /// # Compile-Time Assertions
519    ///
520    /// This method cannot yet be used on unsized types whose dynamically-sized
521    /// component is zero-sized. Attempting to use this method on such types
522    /// results in a compile-time assertion error; e.g.:
523    ///
524    /// ```compile_fail,E0080
525    /// use zerocopy::*;
526    /// # use zerocopy_derive::*;
527    ///
528    /// #[derive(Immutable, KnownLayout)]
529    /// #[repr(C)]
530    /// struct ZSTy {
531    ///     leading_sized: u16,
532    ///     trailing_dst: [()],
533    /// }
534    ///
535    /// let _ = Ref::<_, ZSTy>::from_prefix_with_elems(&b"UU"[..], 42); // ⚠ Compile Error!
536    /// ```
537    #[inline]
538    pub fn from_prefix_with_elems(
539        source: B,
540        count: usize,
541    ) -> Result<(Ref<B, T>, B), CastError<B, T>> {
542        static_assert_dst_is_not_zst!(T);
543        let expected_len = match T::size_for_metadata(count) {
544            Some(len) => len,
545            None => return Err(SizeError::new(source).into()),
546        };
547        let (prefix, bytes) = source.split_at(expected_len).map_err(SizeError::new)?;
548        Self::from_bytes(prefix).map(move |l| (l, bytes))
549    }
550
551    /// Constructs a `Ref` from the suffix of the given bytes with DST length
552    /// equal to `count` without copying.
553    ///
554    /// This method attempts to return a `Ref` to the suffix of `source`
555    /// interpreted as a `T` with `count` trailing elements, and a reference to
556    /// the preceding bytes. If there are insufficient bytes, or if that suffix
557    /// of `source` is not appropriately aligned, this returns `Err`. If [`T:
558    /// Unaligned`][t-unaligned], you can [infallibly discard the alignment
559    /// error][size-error-from].
560    ///
561    /// [t-unaligned]: crate::Unaligned
562    /// [size-error-from]: error/struct.SizeError.html#method.from-1
563    ///
564    /// # Compile-Time Assertions
565    ///
566    /// This method cannot yet be used on unsized types whose dynamically-sized
567    /// component is zero-sized. Attempting to use this method on such types
568    /// results in a compile-time assertion error; e.g.:
569    ///
570    /// ```compile_fail,E0080
571    /// use zerocopy::*;
572    /// # use zerocopy_derive::*;
573    ///
574    /// #[derive(Immutable, KnownLayout)]
575    /// #[repr(C)]
576    /// struct ZSTy {
577    ///     leading_sized: u16,
578    ///     trailing_dst: [()],
579    /// }
580    ///
581    /// let _ = Ref::<_, ZSTy>::from_suffix_with_elems(&b"UU"[..], 42); // ⚠ Compile Error!
582    /// ```
583    #[inline]
584    pub fn from_suffix_with_elems(
585        source: B,
586        count: usize,
587    ) -> Result<(B, Ref<B, T>), CastError<B, T>> {
588        static_assert_dst_is_not_zst!(T);
589        let expected_len = match T::size_for_metadata(count) {
590            Some(len) => len,
591            None => return Err(SizeError::new(source).into()),
592        };
593        let split_at = if let Some(split_at) = source.len().checked_sub(expected_len) {
594            split_at
595        } else {
596            return Err(SizeError::new(source).into());
597        };
598        // SAFETY: The preceding `source.len().checked_sub(expected_len)`
599        // guarantees that `split_at` is in-bounds.
600        let (bytes, suffix) = unsafe { source.split_at_unchecked(split_at) };
601        Self::from_bytes(suffix).map(move |l| (bytes, l))
602    }
603}
604
605impl<'a, B, T> Ref<B, T>
606where
607    B: 'a + IntoByteSlice<'a>,
608    T: FromBytes + KnownLayout + Immutable + ?Sized,
609{
610    /// Converts this `Ref` into a reference.
611    ///
612    /// `into_ref` consumes the `Ref`, and returns a reference to `T`.
613    ///
614    /// Note: this is an associated function, which means that you have to call
615    /// it as `Ref::into_ref(r)` instead of `r.into_ref()`. This is so that
616    /// there is no conflict with a method on the inner type.
617    #[must_use = "has no side effects"]
618    #[inline(always)]
619    pub fn into_ref(r: Self) -> &'a T {
620        // Presumably unreachable, since we've guarded each constructor of `Ref`.
621        static_assert_dst_is_not_zst!(T);
622
623        // SAFETY: We don't call any methods on `b` other than those provided by
624        // `IntoByteSlice`.
625        let b = unsafe { r.into_byte_slice() };
626        let b = b.into_byte_slice();
627
628        if let crate::layout::SizeInfo::Sized { .. } = T::LAYOUT.size_info {
629            let ptr = Ptr::from_ref(b);
630            // SAFETY: We just checked that `T: Sized`. By invariant on `r`,
631            // `b`'s size is equal to `size_of::<T>()`.
632            let ptr = unsafe { cast_for_sized::<T, _, _, _>(ptr) };
633
634            // SAFETY: None of the preceding transformations modifies the
635            // address of the pointer, and by invariant on `r`, we know that it
636            // is validly-aligned.
637            let ptr = unsafe { ptr.assume_alignment::<Aligned>() };
638            return ptr.as_ref();
639        }
640
641        // PANICS: By post-condition on `into_byte_slice`, `b`'s size and
642        // alignment are valid for `T`. By post-condition, `b.into_byte_slice()`
643        // produces a byte slice with identical address and length to that
644        // produced by `b.deref()`.
645        let ptr = Ptr::from_ref(b.into_byte_slice())
646            .try_cast_into_no_leftover::<T, BecauseImmutable>(None)
647            .expect("zerocopy internal error: into_ref should be infallible");
648        let ptr = ptr.recall_validity();
649        ptr.as_ref()
650    }
651}
652
653impl<'a, B, T> Ref<B, T>
654where
655    B: 'a + IntoByteSliceMut<'a>,
656    T: FromBytes + IntoBytes + KnownLayout + ?Sized,
657{
658    /// Converts this `Ref` into a mutable reference.
659    ///
660    /// `into_mut` consumes the `Ref`, and returns a mutable reference to `T`.
661    ///
662    /// Note: this is an associated function, which means that you have to call
663    /// it as `Ref::into_mut(r)` instead of `r.into_mut()`. This is so that
664    /// there is no conflict with a method on the inner type.
665    #[must_use = "has no side effects"]
666    #[inline(always)]
667    pub fn into_mut(r: Self) -> &'a mut T {
668        // Presumably unreachable, since we've guarded each constructor of `Ref`.
669        static_assert_dst_is_not_zst!(T);
670
671        // SAFETY: We don't call any methods on `b` other than those provided by
672        // `IntoByteSliceMut`.
673        let b = unsafe { r.into_byte_slice_mut() };
674        let b = b.into_byte_slice_mut();
675
676        if let crate::layout::SizeInfo::Sized { .. } = T::LAYOUT.size_info {
677            let ptr = Ptr::from_mut(b);
678            // SAFETY: We just checked that `T: Sized`. By invariant on `r`,
679            // `b`'s size is equal to `size_of::<T>()`.
680            let ptr = unsafe {
681                cast_for_sized::<
682                    T,
683                    _,
684                    (BecauseRead, (BecauseExclusive, BecauseExclusive)),
685                    (BecauseMutationCompatible, BecauseInvariantsEq),
686                >(ptr)
687            };
688
689            // SAFETY: None of the preceding transformations modifies the
690            // address of the pointer, and by invariant on `r`, we know that it
691            // is validly-aligned.
692            let ptr = unsafe { ptr.assume_alignment::<Aligned>() };
693            return ptr.as_mut();
694        }
695
696        // PANICS: By post-condition on `into_byte_slice_mut`, `b`'s size and
697        // alignment are valid for `T`. By post-condition,
698        // `b.into_byte_slice_mut()` produces a byte slice with identical
699        // address and length to that produced by `b.deref_mut()`.
700        let ptr = Ptr::from_mut(b.into_byte_slice_mut())
701            .try_cast_into_no_leftover::<T, BecauseExclusive>(None)
702            .expect("zerocopy internal error: into_ref should be infallible");
703        let ptr = ptr.recall_validity::<_, (_, (_, _))>();
704        ptr.as_mut()
705    }
706}
707
708impl<B, T> Ref<B, T>
709where
710    B: ByteSlice,
711    T: ?Sized,
712{
713    /// Gets the underlying bytes.
714    ///
715    /// Note: this is an associated function, which means that you have to call
716    /// it as `Ref::bytes(r)` instead of `r.bytes()`. This is so that there is
717    /// no conflict with a method on the inner type.
718    #[inline]
719    pub fn bytes(r: &Self) -> &[u8] {
720        // SAFETY: We don't call any methods on `b` other than those provided by
721        // `ByteSlice`.
722        unsafe { r.as_byte_slice().deref() }
723    }
724}
725
726impl<B, T> Ref<B, T>
727where
728    B: ByteSliceMut,
729    T: ?Sized,
730{
731    /// Gets the underlying bytes mutably.
732    ///
733    /// Note: this is an associated function, which means that you have to call
734    /// it as `Ref::bytes_mut(r)` instead of `r.bytes_mut()`. This is so that
735    /// there is no conflict with a method on the inner type.
736    #[inline]
737    pub fn bytes_mut(r: &mut Self) -> &mut [u8] {
738        // SAFETY: We don't call any methods on `b` other than those provided by
739        // `ByteSliceMut`.
740        unsafe { r.as_byte_slice_mut().deref_mut() }
741    }
742}
743
744impl<B, T> Ref<B, T>
745where
746    B: ByteSlice,
747    T: FromBytes,
748{
749    /// Reads a copy of `T`.
750    ///
751    /// Note: this is an associated function, which means that you have to call
752    /// it as `Ref::read(r)` instead of `r.read()`. This is so that there is no
753    /// conflict with a method on the inner type.
754    #[must_use = "has no side effects"]
755    #[inline]
756    pub fn read(r: &Self) -> T {
757        // SAFETY: We don't call any methods on `b` other than those provided by
758        // `ByteSlice`.
759        let b = unsafe { r.as_byte_slice() };
760
761        // SAFETY: By postcondition on `as_byte_slice`, we know that `b` is a
762        // valid size and alignment for `T`. By safety invariant on `ByteSlice`,
763        // we know that this is preserved via `.deref()`. Because `T:
764        // FromBytes`, it is sound to interpret these bytes as a `T`.
765        unsafe { ptr::read(b.deref().as_ptr().cast::<T>()) }
766    }
767}
768
769impl<B, T> Ref<B, T>
770where
771    B: ByteSliceMut,
772    T: IntoBytes,
773{
774    /// Writes the bytes of `t` and then forgets `t`.
775    ///
776    /// Note: this is an associated function, which means that you have to call
777    /// it as `Ref::write(r, t)` instead of `r.write(t)`. This is so that there
778    /// is no conflict with a method on the inner type.
779    #[inline]
780    pub fn write(r: &mut Self, t: T) {
781        // SAFETY: We don't call any methods on `b` other than those provided by
782        // `ByteSliceMut`.
783        let b = unsafe { r.as_byte_slice_mut() };
784
785        // SAFETY: By postcondition on `as_byte_slice_mut`, we know that `b` is
786        // a valid size and alignment for `T`. By safety invariant on
787        // `ByteSlice`, we know that this is preserved via `.deref()`. Writing
788        // `t` to the buffer will allow all of the bytes of `t` to be accessed
789        // as a `[u8]`, but because `T: IntoBytes`, we know that this is sound.
790        unsafe { ptr::write(b.deref_mut().as_mut_ptr().cast::<T>(), t) }
791    }
792}
793
794impl<B, T> Deref for Ref<B, T>
795where
796    B: ByteSlice,
797    T: FromBytes + KnownLayout + Immutable + ?Sized,
798{
799    type Target = T;
800    #[inline]
801    fn deref(&self) -> &T {
802        // Presumably unreachable, since we've guarded each constructor of `Ref`.
803        static_assert_dst_is_not_zst!(T);
804
805        // SAFETY: We don't call any methods on `b` other than those provided by
806        // `ByteSlice`.
807        let b = unsafe { self.as_byte_slice() };
808        let b = b.deref();
809
810        if let crate::layout::SizeInfo::Sized { .. } = T::LAYOUT.size_info {
811            let ptr = Ptr::from_ref(b);
812            // SAFETY: We just checked that `T: Sized`. By invariant on `r`,
813            // `b`'s size is equal to `size_of::<T>()`.
814            let ptr = unsafe { cast_for_sized::<T, _, _, _>(ptr) };
815
816            // SAFETY: None of the preceding transformations modifies the
817            // address of the pointer, and by invariant on `r`, we know that it
818            // is validly-aligned.
819            let ptr = unsafe { ptr.assume_alignment::<Aligned>() };
820            return ptr.as_ref();
821        }
822
823        // PANICS: By postcondition on `as_byte_slice`, `b`'s size and alignment
824        // are valid for `T`, and by invariant on `ByteSlice`, these are
825        // preserved through `.deref()`, so this `unwrap` will not panic.
826        let ptr = Ptr::from_ref(b)
827            .try_cast_into_no_leftover::<T, BecauseImmutable>(None)
828            .expect("zerocopy internal error: Deref::deref should be infallible");
829        let ptr = ptr.recall_validity();
830        ptr.as_ref()
831    }
832}
833
834impl<B, T> DerefMut for Ref<B, T>
835where
836    B: ByteSliceMut,
837    // FIXME(#251): We can't remove `Immutable` here because it's required by
838    // the impl of `Deref`, which is a super-trait of `DerefMut`. Maybe we can
839    // add a separate inherent method for this?
840    T: FromBytes + IntoBytes + KnownLayout + Immutable + ?Sized,
841{
842    #[inline]
843    fn deref_mut(&mut self) -> &mut T {
844        // Presumably unreachable, since we've guarded each constructor of `Ref`.
845        static_assert_dst_is_not_zst!(T);
846
847        // SAFETY: We don't call any methods on `b` other than those provided by
848        // `ByteSliceMut`.
849        let b = unsafe { self.as_byte_slice_mut() };
850        let b = b.deref_mut();
851
852        if let crate::layout::SizeInfo::Sized { .. } = T::LAYOUT.size_info {
853            let ptr = Ptr::from_mut(b);
854            // SAFETY: We just checked that `T: Sized`. By invariant on `r`,
855            // `b`'s size is equal to `size_of::<T>()`.
856            let ptr = unsafe {
857                cast_for_sized::<
858                    T,
859                    _,
860                    (BecauseRead, (BecauseExclusive, BecauseExclusive)),
861                    (BecauseMutationCompatible, BecauseInvariantsEq),
862                >(ptr)
863            };
864
865            // SAFETY: None of the preceding transformations modifies the
866            // address of the pointer, and by invariant on `r`, we know that it
867            // is validly-aligned.
868            let ptr = unsafe { ptr.assume_alignment::<Aligned>() };
869            return ptr.as_mut();
870        }
871
872        // PANICS: By postcondition on `as_byte_slice_mut`, `b`'s size and
873        // alignment are valid for `T`, and by invariant on `ByteSlice`, these
874        // are preserved through `.deref_mut()`, so this `unwrap` will not
875        // panic.
876        let ptr = Ptr::from_mut(b)
877            .try_cast_into_no_leftover::<T, BecauseExclusive>(None)
878            .expect("zerocopy internal error: DerefMut::deref_mut should be infallible");
879        let ptr = ptr.recall_validity::<_, (_, (_, (BecauseExclusive, BecauseExclusive)))>();
880        ptr.as_mut()
881    }
882}
883
884impl<T, B> Display for Ref<B, T>
885where
886    B: ByteSlice,
887    T: FromBytes + Display + KnownLayout + Immutable + ?Sized,
888{
889    #[inline]
890    fn fmt(&self, fmt: &mut Formatter<'_>) -> fmt::Result {
891        let inner: &T = self;
892        inner.fmt(fmt)
893    }
894}
895
896impl<T, B> Debug for Ref<B, T>
897where
898    B: ByteSlice,
899    T: FromBytes + Debug + KnownLayout + Immutable + ?Sized,
900{
901    #[inline]
902    fn fmt(&self, fmt: &mut Formatter<'_>) -> fmt::Result {
903        let inner: &T = self;
904        fmt.debug_tuple("Ref").field(&inner).finish()
905    }
906}
907
908impl<T, B> Eq for Ref<B, T>
909where
910    B: ByteSlice,
911    T: FromBytes + Eq + KnownLayout + Immutable + ?Sized,
912{
913}
914
915impl<T, B> PartialEq for Ref<B, T>
916where
917    B: ByteSlice,
918    T: FromBytes + PartialEq + KnownLayout + Immutable + ?Sized,
919{
920    #[inline]
921    fn eq(&self, other: &Self) -> bool {
922        self.deref().eq(other.deref())
923    }
924}
925
926impl<T, B> Ord for Ref<B, T>
927where
928    B: ByteSlice,
929    T: FromBytes + Ord + KnownLayout + Immutable + ?Sized,
930{
931    #[inline]
932    fn cmp(&self, other: &Self) -> Ordering {
933        let inner: &T = self;
934        let other_inner: &T = other;
935        inner.cmp(other_inner)
936    }
937}
938
939impl<T, B> PartialOrd for Ref<B, T>
940where
941    B: ByteSlice,
942    T: FromBytes + PartialOrd + KnownLayout + Immutable + ?Sized,
943{
944    #[inline]
945    fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
946        let inner: &T = self;
947        let other_inner: &T = other;
948        inner.partial_cmp(other_inner)
949    }
950}
951
952/// # Safety
953///
954/// `T: Sized` and `ptr`'s referent must have size `size_of::<T>()`.
955#[inline(always)]
956unsafe fn cast_for_sized<'a, T, A, R, S>(
957    ptr: Ptr<'a, [u8], (A, Aligned, Valid)>,
958) -> Ptr<'a, T, (A, Unaligned, Valid)>
959where
960    T: FromBytes + KnownLayout + ?Sized,
961    A: crate::invariant::Aliasing,
962    [u8]: MutationCompatible<T, A, Initialized, Initialized, R>,
963    T: TransmuteFromPtr<T, A, Initialized, Valid, S>,
964{
965    use crate::pointer::cast::{Cast, Project};
966
967    enum CastForSized {}
968
969    // SAFETY: `CastForSized` is only used below with the input `ptr`, which the
970    // caller promises has size `size_of::<T>()`. Thus, the referent produced in
971    // this cast has the same size as `ptr`'s referent. All operations preserve
972    // provenance.
973    unsafe impl<T: ?Sized + KnownLayout> Project<[u8], T> for CastForSized {
974        #[inline(always)]
975        fn project(src: PtrInner<'_, [u8]>) -> *mut T {
976            T::raw_from_ptr_len(
977                src.as_non_null().cast(),
978                <T::PointerMetadata as crate::PointerMetadata>::from_elem_count(0),
979            )
980            .as_ptr()
981        }
982    }
983
984    // SAFETY: The `Project::project` impl preserves referent address.
985    unsafe impl<T: ?Sized + KnownLayout> Cast<[u8], T> for CastForSized {}
986
987    ptr.recall_validity::<Initialized, (_, (_, _))>()
988        .cast::<_, CastForSized, _>()
989        .recall_validity::<Valid, _>()
990}
991
992#[cfg(test)]
993#[allow(clippy::assertions_on_result_states)]
994mod tests {
995    use core::convert::TryInto as _;
996
997    use super::*;
998    use crate::util::testutil::*;
999
1000    #[test]
1001    fn test_mut_slice_into_ref() {
1002        // Prior to #1260/#1299, calling `into_ref` on a `&mut [u8]`-backed
1003        // `Ref` was not supported.
1004        let mut buf = [0u8];
1005        let r = Ref::<&mut [u8], u8>::from_bytes(&mut buf).unwrap();
1006        assert_eq!(Ref::into_ref(r), &0);
1007    }
1008
1009    #[test]
1010    fn test_address() {
1011        // Test that the `Deref` and `DerefMut` implementations return a
1012        // reference which points to the right region of memory.
1013
1014        let buf = [0];
1015        let r = Ref::<_, u8>::from_bytes(&buf[..]).unwrap();
1016        let buf_ptr = buf.as_ptr();
1017        let deref_ptr: *const u8 = r.deref();
1018        assert_eq!(buf_ptr, deref_ptr);
1019
1020        let buf = [0];
1021        let r = Ref::<_, [u8]>::from_bytes(&buf[..]).unwrap();
1022        let buf_ptr = buf.as_ptr();
1023        let deref_ptr = r.deref().as_ptr();
1024        assert_eq!(buf_ptr, deref_ptr);
1025    }
1026
1027    // Verify that values written to a `Ref` are properly shared between the
1028    // typed and untyped representations, that reads via `deref` and `read`
1029    // behave the same, and that writes via `deref_mut` and `write` behave the
1030    // same.
1031    fn test_new_helper(mut r: Ref<&mut [u8], AU64>) {
1032        // assert that the value starts at 0
1033        assert_eq!(*r, AU64(0));
1034        assert_eq!(Ref::read(&r), AU64(0));
1035
1036        // Assert that values written to the typed value are reflected in the
1037        // byte slice.
1038        const VAL1: AU64 = AU64(0xFF00FF00FF00FF00);
1039        *r = VAL1;
1040        assert_eq!(Ref::bytes(&r), &VAL1.to_bytes());
1041        *r = AU64(0);
1042        Ref::write(&mut r, VAL1);
1043        assert_eq!(Ref::bytes(&r), &VAL1.to_bytes());
1044
1045        // Assert that values written to the byte slice are reflected in the
1046        // typed value.
1047        const VAL2: AU64 = AU64(!VAL1.0); // different from `VAL1`
1048        Ref::bytes_mut(&mut r).copy_from_slice(&VAL2.to_bytes()[..]);
1049        assert_eq!(*r, VAL2);
1050        assert_eq!(Ref::read(&r), VAL2);
1051    }
1052
1053    // Verify that values written to a `Ref` are properly shared between the
1054    // typed and untyped representations; pass a value with `typed_len` `AU64`s
1055    // backed by an array of `typed_len * 8` bytes.
1056    fn test_new_helper_slice(mut r: Ref<&mut [u8], [AU64]>, typed_len: usize) {
1057        // Assert that the value starts out zeroed.
1058        assert_eq!(&*r, vec![AU64(0); typed_len].as_slice());
1059
1060        // Check the backing storage is the exact same slice.
1061        let untyped_len = typed_len * 8;
1062        assert_eq!(Ref::bytes(&r).len(), untyped_len);
1063        assert_eq!(Ref::bytes(&r).as_ptr(), r.as_ptr().cast::<u8>());
1064
1065        // Assert that values written to the typed value are reflected in the
1066        // byte slice.
1067        const VAL1: AU64 = AU64(0xFF00FF00FF00FF00);
1068        for typed in &mut *r {
1069            *typed = VAL1;
1070        }
1071        assert_eq!(Ref::bytes(&r), VAL1.0.to_ne_bytes().repeat(typed_len).as_slice());
1072
1073        // Assert that values written to the byte slice are reflected in the
1074        // typed value.
1075        const VAL2: AU64 = AU64(!VAL1.0); // different from VAL1
1076        Ref::bytes_mut(&mut r).copy_from_slice(&VAL2.0.to_ne_bytes().repeat(typed_len));
1077        assert!(r.iter().copied().all(|x| x == VAL2));
1078    }
1079
1080    #[test]
1081    fn test_new_aligned_sized() {
1082        // Test that a properly-aligned, properly-sized buffer works for new,
1083        // new_from_prefix, and new_from_suffix, and that new_from_prefix and
1084        // new_from_suffix return empty slices. Test that a properly-aligned
1085        // buffer whose length is a multiple of the element size works for
1086        // new_slice.
1087
1088        // A buffer with an alignment of 8.
1089        let mut buf = Align::<[u8; 8], AU64>::default();
1090        // `buf.t` should be aligned to 8, so this should always succeed.
1091        test_new_helper(Ref::<_, AU64>::from_bytes(&mut buf.t[..]).unwrap());
1092        {
1093            // In a block so that `r` and `suffix` don't live too long.
1094            buf.set_default();
1095            let (r, suffix) = Ref::<_, AU64>::from_prefix(&mut buf.t[..]).unwrap();
1096            assert!(suffix.is_empty());
1097            test_new_helper(r);
1098        }
1099        {
1100            buf.set_default();
1101            let (prefix, r) = Ref::<_, AU64>::from_suffix(&mut buf.t[..]).unwrap();
1102            assert!(prefix.is_empty());
1103            test_new_helper(r);
1104        }
1105
1106        // A buffer with alignment 8 and length 24. We choose this length very
1107        // intentionally: if we instead used length 16, then the prefix and
1108        // suffix lengths would be identical. In the past, we used length 16,
1109        // which resulted in this test failing to discover the bug uncovered in
1110        // #506.
1111        let mut buf = Align::<[u8; 24], AU64>::default();
1112        // `buf.t` should be aligned to 8 and have a length which is a multiple
1113        // of `size_of::<AU64>()`, so this should always succeed.
1114        test_new_helper_slice(Ref::<_, [AU64]>::from_bytes(&mut buf.t[..]).unwrap(), 3);
1115        buf.set_default();
1116        let r = Ref::<_, [AU64]>::from_bytes_with_elems(&mut buf.t[..], 3).unwrap();
1117        test_new_helper_slice(r, 3);
1118
1119        let ascending: [u8; 24] = (0..24).collect::<Vec<_>>().try_into().unwrap();
1120        // 16 ascending bytes followed by 8 zeros.
1121        let mut ascending_prefix = ascending;
1122        ascending_prefix[16..].copy_from_slice(&[0, 0, 0, 0, 0, 0, 0, 0]);
1123        // 8 zeros followed by 16 ascending bytes.
1124        let mut ascending_suffix = ascending;
1125        ascending_suffix[..8].copy_from_slice(&[0, 0, 0, 0, 0, 0, 0, 0]);
1126        {
1127            buf.t = ascending_suffix;
1128            let (r, suffix) = Ref::<_, [AU64]>::from_prefix_with_elems(&mut buf.t[..], 1).unwrap();
1129            assert_eq!(suffix, &ascending[8..]);
1130            test_new_helper_slice(r, 1);
1131        }
1132        {
1133            buf.t = ascending_prefix;
1134            let (prefix, r) = Ref::<_, [AU64]>::from_suffix_with_elems(&mut buf.t[..], 1).unwrap();
1135            assert_eq!(prefix, &ascending[..16]);
1136            test_new_helper_slice(r, 1);
1137        }
1138    }
1139
1140    #[test]
1141    fn test_new_oversized() {
1142        // Test that a properly-aligned, overly-sized buffer works for
1143        // `new_from_prefix` and `new_from_suffix`, and that they return the
1144        // remainder and prefix of the slice respectively.
1145
1146        let mut buf = Align::<[u8; 16], AU64>::default();
1147        {
1148            // In a block so that `r` and `suffix` don't live too long. `buf.t`
1149            // should be aligned to 8, so this should always succeed.
1150            let (r, suffix) = Ref::<_, AU64>::from_prefix(&mut buf.t[..]).unwrap();
1151            assert_eq!(suffix.len(), 8);
1152            test_new_helper(r);
1153        }
1154        {
1155            buf.set_default();
1156            // `buf.t` should be aligned to 8, so this should always succeed.
1157            let (prefix, r) = Ref::<_, AU64>::from_suffix(&mut buf.t[..]).unwrap();
1158            assert_eq!(prefix.len(), 8);
1159            test_new_helper(r);
1160        }
1161    }
1162
1163    #[test]
1164    #[allow(clippy::cognitive_complexity)]
1165    fn test_new_error() {
1166        // Fail because the buffer is too large.
1167
1168        // A buffer with an alignment of 8.
1169        let buf = Align::<[u8; 16], AU64>::default();
1170        // `buf.t` should be aligned to 8, so only the length check should fail.
1171        assert!(Ref::<_, AU64>::from_bytes(&buf.t[..]).is_err());
1172
1173        // Fail because the buffer is too small.
1174
1175        // A buffer with an alignment of 8.
1176        let buf = Align::<[u8; 4], AU64>::default();
1177        // `buf.t` should be aligned to 8, so only the length check should fail.
1178        assert!(Ref::<_, AU64>::from_bytes(&buf.t[..]).is_err());
1179        assert!(Ref::<_, AU64>::from_prefix(&buf.t[..]).is_err());
1180        assert!(Ref::<_, AU64>::from_suffix(&buf.t[..]).is_err());
1181
1182        // Fail because the length is not a multiple of the element size.
1183
1184        let buf = Align::<[u8; 12], AU64>::default();
1185        // `buf.t` has length 12, but element size is 8.
1186        assert!(Ref::<_, [AU64]>::from_bytes(&buf.t[..]).is_err());
1187
1188        // Fail because the buffer is too short.
1189        let buf = Align::<[u8; 12], AU64>::default();
1190        // `buf.t` has length 12, but the element size is 8 (and we're expecting
1191        // two of them). For each function, we test with a length that would
1192        // cause the size to overflow `usize`, and with a normal length that
1193        // will fail thanks to the buffer being too short; these are different
1194        // error paths, and while the error types are the same, the distinction
1195        // shows up in code coverage metrics.
1196        let n = (usize::MAX / mem::size_of::<AU64>()) + 1;
1197        assert!(Ref::<_, [AU64]>::from_bytes_with_elems(&buf.t[..], n).is_err());
1198        assert!(Ref::<_, [AU64]>::from_bytes_with_elems(&buf.t[..], 2).is_err());
1199        assert!(Ref::<_, [AU64]>::from_prefix_with_elems(&buf.t[..], n).is_err());
1200        assert!(Ref::<_, [AU64]>::from_prefix_with_elems(&buf.t[..], 2).is_err());
1201        assert!(Ref::<_, [AU64]>::from_suffix_with_elems(&buf.t[..], n).is_err());
1202        assert!(Ref::<_, [AU64]>::from_suffix_with_elems(&buf.t[..], 2).is_err());
1203
1204        // Fail because the alignment is insufficient.
1205
1206        // A buffer with an alignment of 8. An odd buffer size is chosen so that
1207        // the last byte of the buffer has odd alignment.
1208        let buf = Align::<[u8; 13], AU64>::default();
1209        // Slicing from 1, we get a buffer with size 12 (so the length check
1210        // should succeed) but an alignment of only 1, which is insufficient.
1211        assert!(Ref::<_, AU64>::from_bytes(&buf.t[1..]).is_err());
1212        assert!(Ref::<_, AU64>::from_prefix(&buf.t[1..]).is_err());
1213        assert!(Ref::<_, [AU64]>::from_bytes(&buf.t[1..]).is_err());
1214        assert!(Ref::<_, [AU64]>::from_bytes_with_elems(&buf.t[1..], 1).is_err());
1215        assert!(Ref::<_, [AU64]>::from_prefix_with_elems(&buf.t[1..], 1).is_err());
1216        assert!(Ref::<_, [AU64]>::from_suffix_with_elems(&buf.t[1..], 1).is_err());
1217        // Slicing is unnecessary here because `new_from_suffix` uses the suffix
1218        // of the slice, which has odd alignment.
1219        assert!(Ref::<_, AU64>::from_suffix(&buf.t[..]).is_err());
1220
1221        // Fail due to arithmetic overflow.
1222
1223        let buf = Align::<[u8; 16], AU64>::default();
1224        let unreasonable_len = usize::MAX / mem::size_of::<AU64>() + 1;
1225        assert!(Ref::<_, [AU64]>::from_prefix_with_elems(&buf.t[..], unreasonable_len).is_err());
1226        assert!(Ref::<_, [AU64]>::from_suffix_with_elems(&buf.t[..], unreasonable_len).is_err());
1227    }
1228
1229    #[test]
1230    #[allow(unstable_name_collisions)]
1231    #[allow(clippy::as_conversions)]
1232    fn test_into_ref_mut() {
1233        #[allow(unused)]
1234        use crate::util::AsAddress as _;
1235
1236        let mut buf = Align::<[u8; 8], u64>::default();
1237        let r = Ref::<_, u64>::from_bytes(&buf.t[..]).unwrap();
1238        let rf = Ref::into_ref(r);
1239        assert_eq!(rf, &0u64);
1240        let buf_addr = (&buf.t as *const [u8; 8]).addr();
1241        assert_eq!((rf as *const u64).addr(), buf_addr);
1242
1243        let r = Ref::<_, u64>::from_bytes(&mut buf.t[..]).unwrap();
1244        let rf = Ref::into_mut(r);
1245        assert_eq!(rf, &mut 0u64);
1246        assert_eq!((rf as *mut u64).addr(), buf_addr);
1247
1248        *rf = u64::MAX;
1249        assert_eq!(buf.t, [0xFF; 8]);
1250    }
1251
1252    #[test]
1253    fn test_display_debug() {
1254        let buf = Align::<[u8; 8], u64>::default();
1255        let r = Ref::<_, u64>::from_bytes(&buf.t[..]).unwrap();
1256        assert_eq!(format!("{}", r), "0");
1257        assert_eq!(format!("{:?}", r), "Ref(0)");
1258
1259        let buf = Align::<[u8; 8], u64>::default();
1260        let r = Ref::<_, [u64]>::from_bytes(&buf.t[..]).unwrap();
1261        assert_eq!(format!("{:?}", r), "Ref([0])");
1262    }
1263
1264    #[test]
1265    fn test_eq() {
1266        let buf1 = 0_u64;
1267        let r1 = Ref::<_, u64>::from_bytes(buf1.as_bytes()).unwrap();
1268        let buf2 = 0_u64;
1269        let r2 = Ref::<_, u64>::from_bytes(buf2.as_bytes()).unwrap();
1270        assert_eq!(r1, r2);
1271    }
1272
1273    #[test]
1274    fn test_ne() {
1275        let buf1 = 0_u64;
1276        let r1 = Ref::<_, u64>::from_bytes(buf1.as_bytes()).unwrap();
1277        let buf2 = 1_u64;
1278        let r2 = Ref::<_, u64>::from_bytes(buf2.as_bytes()).unwrap();
1279        assert_ne!(r1, r2);
1280    }
1281
1282    #[test]
1283    fn test_ord() {
1284        let buf1 = 0_u64;
1285        let r1 = Ref::<_, u64>::from_bytes(buf1.as_bytes()).unwrap();
1286        let buf2 = 1_u64;
1287        let r2 = Ref::<_, u64>::from_bytes(buf2.as_bytes()).unwrap();
1288        assert!(r1 < r2);
1289        assert_eq!(PartialOrd::partial_cmp(&r1, &r2), Some(Ordering::Less));
1290        assert_eq!(Ord::cmp(&r1, &r2), Ordering::Less);
1291    }
1292}
1293
1294#[cfg(all(test, __ZEROCOPY_INTERNAL_USE_ONLY_NIGHTLY_FEATURES_IN_TESTS))]
1295mod benches {
1296    use test::{self, Bencher};
1297
1298    use super::*;
1299    use crate::util::testutil::*;
1300
1301    #[bench]
1302    fn bench_from_bytes_sized(b: &mut Bencher) {
1303        let buf = Align::<[u8; 8], AU64>::default();
1304        // `buf.t` should be aligned to 8, so this should always succeed.
1305        let bytes = &buf.t[..];
1306        b.iter(|| test::black_box(Ref::<_, AU64>::from_bytes(test::black_box(bytes)).unwrap()));
1307    }
1308
1309    #[bench]
1310    fn bench_into_ref_sized(b: &mut Bencher) {
1311        let buf = Align::<[u8; 8], AU64>::default();
1312        let bytes = &buf.t[..];
1313        let r = Ref::<_, AU64>::from_bytes(bytes).unwrap();
1314        b.iter(|| test::black_box(Ref::into_ref(test::black_box(r))));
1315    }
1316
1317    #[bench]
1318    fn bench_into_mut_sized(b: &mut Bencher) {
1319        let mut buf = Align::<[u8; 8], AU64>::default();
1320        let buf = &mut buf.t[..];
1321        let _ = Ref::<_, AU64>::from_bytes(&mut *buf).unwrap();
1322        b.iter(move || {
1323            // SAFETY: The preceding `from_bytes` succeeded, and so we know that
1324            // `buf` is validly-aligned and has the correct length.
1325            let r = unsafe { Ref::<&mut [u8], AU64>::new_unchecked(&mut *buf) };
1326            test::black_box(Ref::into_mut(test::black_box(r)));
1327        });
1328    }
1329
1330    #[bench]
1331    fn bench_deref_sized(b: &mut Bencher) {
1332        let buf = Align::<[u8; 8], AU64>::default();
1333        let bytes = &buf.t[..];
1334        let r = Ref::<_, AU64>::from_bytes(bytes).unwrap();
1335        b.iter(|| {
1336            let temp = test::black_box(r);
1337            test::black_box(temp.deref());
1338        });
1339    }
1340
1341    #[bench]
1342    fn bench_deref_mut_sized(b: &mut Bencher) {
1343        let mut buf = Align::<[u8; 8], AU64>::default();
1344        let buf = &mut buf.t[..];
1345        let _ = Ref::<_, AU64>::from_bytes(&mut *buf).unwrap();
1346        b.iter(|| {
1347            // SAFETY: The preceding `from_bytes` succeeded, and so we know that
1348            // `buf` is validly-aligned and has the correct length.
1349            let r = unsafe { Ref::<&mut [u8], AU64>::new_unchecked(&mut *buf) };
1350            let mut temp = test::black_box(r);
1351            test::black_box(temp.deref_mut());
1352        });
1353    }
1354}