zerocopy/pointer/
inner.rs

1// Copyright 2024 The Fuchsia Authors
2//
3// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0
4// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT
5// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option.
6// This file may not be copied, modified, or distributed except according to
7// those terms.
8
9use core::{marker::PhantomData, mem, ops::Range, ptr::NonNull};
10
11pub use _def::PtrInner;
12
13#[allow(unused_imports)]
14use crate::util::polyfills::NumExt as _;
15use crate::{
16    layout::{CastType, MetadataCastError},
17    util::AsAddress,
18    AlignmentError, CastError, KnownLayout, MetadataOf, SizeError, SplitAt,
19};
20
21mod _def {
22    use super::*;
23    /// The inner pointer stored inside a [`Ptr`][crate::Ptr].
24    ///
25    /// `PtrInner<'a, T>` is [covariant] in `'a` and invariant in `T`.
26    ///
27    /// [covariant]: https://doc.rust-lang.org/reference/subtyping.html
28    #[allow(missing_debug_implementations)]
29    pub struct PtrInner<'a, T>
30    where
31        T: ?Sized,
32    {
33        /// # Invariants
34        ///
35        /// 0. If `ptr`'s referent is not zero sized, then `ptr` has valid
36        ///    provenance for its referent, which is entirely contained in some
37        ///    Rust allocation, `A`.
38        /// 1. If `ptr`'s referent is not zero sized, `A` is guaranteed to live
39        ///    for at least `'a`.
40        ///
41        /// # Postconditions
42        ///
43        /// By virtue of these invariants, code may assume the following, which
44        /// are logical implications of the invariants:
45        /// - `ptr`'s referent is not larger than `isize::MAX` bytes \[1\]
46        /// - `ptr`'s referent does not wrap around the address space \[1\]
47        ///
48        /// \[1\] Per <https://doc.rust-lang.org/1.85.0/std/ptr/index.html#allocated-object>:
49        ///
50        ///   For any allocated object with `base` address, `size`, and a set of
51        ///   `addresses`, the following are guaranteed:
52        ///   ...
53        ///   - `size <= isize::MAX`
54        ///
55        ///   As a consequence of these guarantees, given any address `a` within
56        ///   the set of addresses of an allocated object:
57        ///   ...
58        ///   - It is guaranteed that, given `o = a - base` (i.e., the offset of
59        ///     `a` within the allocated object), `base + o` will not wrap
60        ///     around the address space (in other words, will not overflow
61        ///     `usize`)
62        ptr: NonNull<T>,
63        // SAFETY: `&'a UnsafeCell<T>` is covariant in `'a` and invariant in `T`
64        // [1]. We use this construction rather than the equivalent `&mut T`,
65        // because our MSRV of 1.65 prohibits `&mut` types in const contexts.
66        //
67        // [1] https://doc.rust-lang.org/1.81.0/reference/subtyping.html#variance
68        _marker: PhantomData<&'a core::cell::UnsafeCell<T>>,
69    }
70
71    impl<'a, T: 'a + ?Sized> Copy for PtrInner<'a, T> {}
72    impl<'a, T: 'a + ?Sized> Clone for PtrInner<'a, T> {
73        #[inline(always)]
74        fn clone(&self) -> PtrInner<'a, T> {
75            // SAFETY: None of the invariants on `ptr` are affected by having
76            // multiple copies of a `PtrInner`.
77            *self
78        }
79    }
80
81    impl<'a, T: 'a + ?Sized> PtrInner<'a, T> {
82        /// Constructs a `Ptr` from a [`NonNull`].
83        ///
84        /// # Safety
85        ///
86        /// The caller promises that:
87        ///
88        /// 0. If `ptr`'s referent is not zero sized, then `ptr` has valid
89        ///    provenance for its referent, which is entirely contained in some
90        ///    Rust allocation, `A`.
91        /// 1. If `ptr`'s referent is not zero sized, `A` is guaranteed to live
92        ///    for at least `'a`.
93        #[inline(always)]
94        #[must_use]
95        pub const unsafe fn new(ptr: NonNull<T>) -> PtrInner<'a, T> {
96            // SAFETY: The caller has promised to satisfy all safety invariants
97            // of `PtrInner`.
98            Self { ptr, _marker: PhantomData }
99        }
100
101        /// Converts this `PtrInner<T>` to a [`NonNull<T>`].
102        ///
103        /// Note that this method does not consume `self`. The caller should
104        /// watch out for `unsafe` code which uses the returned `NonNull` in a
105        /// way that violates the safety invariants of `self`.
106        #[inline(always)]
107        #[must_use]
108        pub const fn as_non_null(&self) -> NonNull<T> {
109            self.ptr
110        }
111    }
112}
113
114impl<'a, T: ?Sized> PtrInner<'a, T> {
115    /// Constructs a `PtrInner` from a reference.
116    #[inline]
117    pub(crate) fn from_ref(ptr: &'a T) -> Self {
118        let ptr = NonNull::from(ptr);
119        // SAFETY:
120        // 0. If `ptr`'s referent is not zero sized, then `ptr`, by invariant on
121        //    `&'a T` [1], has valid provenance for its referent, which is
122        //    entirely contained in some Rust allocation, `A`.
123        // 1. If `ptr`'s referent is not zero sized, then `A`, by invariant on
124        //    `&'a T`, is guaranteed to live for at least `'a`.
125        //
126        // [1] Per https://doc.rust-lang.org/1.85.0/std/primitive.reference.html#safety:
127        //
128        //   For all types, `T: ?Sized`, and for all `t: &T` or `t: &mut T`,
129        //   when such values cross an API boundary, the following invariants
130        //   must generally be upheld:
131        //   ...
132        //   - if `size_of_val(t) > 0`, then `t` is dereferenceable for
133        //     `size_of_val(t)` many bytes
134        //
135        //   If `t` points at address `a`, being “dereferenceable” for N bytes
136        //   means that the memory range `[a, a + N)` is all contained within a
137        //   single allocated object.
138        unsafe { Self::new(ptr) }
139    }
140
141    /// Constructs a `PtrInner` from a mutable reference.
142    #[inline]
143    pub(crate) fn from_mut(ptr: &'a mut T) -> Self {
144        let ptr = NonNull::from(ptr);
145        // SAFETY:
146        // 0. If `ptr`'s referent is not zero sized, then `ptr`, by invariant on
147        //    `&'a mut T` [1], has valid provenance for its referent, which is
148        //    entirely contained in some Rust allocation, `A`.
149        // 1. If `ptr`'s referent is not zero sized, then `A`, by invariant on
150        //    `&'a mut T`, is guaranteed to live for at least `'a`.
151        //
152        // [1] Per https://doc.rust-lang.org/1.85.0/std/primitive.reference.html#safety:
153        //
154        //   For all types, `T: ?Sized`, and for all `t: &T` or `t: &mut T`,
155        //   when such values cross an API boundary, the following invariants
156        //   must generally be upheld:
157        //   ...
158        //   - if `size_of_val(t) > 0`, then `t` is dereferenceable for
159        //     `size_of_val(t)` many bytes
160        //
161        //   If `t` points at address `a`, being “dereferenceable” for N bytes
162        //   means that the memory range `[a, a + N)` is all contained within a
163        //   single allocated object.
164        unsafe { Self::new(ptr) }
165    }
166
167    #[must_use]
168    #[inline(always)]
169    pub fn cast_sized<U>(self) -> PtrInner<'a, U>
170    where
171        T: Sized,
172    {
173        static_assert!(T, U => mem::size_of::<T>() >= mem::size_of::<U>());
174        // SAFETY: By the preceding assert, `U` is no larger than `T`, which is
175        // the size of `self`'s referent.
176        unsafe { self.cast() }
177    }
178
179    /// # Safety
180    ///
181    /// `U` must not be larger than the size of `self`'s referent.
182    #[must_use]
183    #[inline(always)]
184    pub unsafe fn cast<U>(self) -> PtrInner<'a, U> {
185        let ptr = self.as_non_null().cast::<U>();
186
187        // SAFETY: The caller promises that `U` is no larger than `self`'s
188        // referent. Thus, `ptr` addresses a subset of the bytes addressed by
189        // `self`.
190        //
191        // 0. By invariant on `self`, if `self`'s referent is not zero sized,
192        //    then `self` has valid provenance for its referent, which is
193        //    entirely contained in some Rust allocation, `A`. Thus, the same
194        //    holds of `ptr`.
195        // 1. By invariant on `self`, if `self`'s referent is not zero sized,
196        //    then `A` is guaranteed to live for at least `'a`.
197        unsafe { PtrInner::new(ptr) }
198    }
199}
200
201#[allow(clippy::needless_lifetimes)]
202impl<'a, T> PtrInner<'a, T>
203where
204    T: ?Sized + KnownLayout,
205{
206    /// Extracts the metadata of this `ptr`.
207    pub(crate) fn meta(self) -> MetadataOf<T> {
208        let meta = T::pointer_to_metadata(self.as_non_null().as_ptr());
209        // SAFETY: By invariant on `PtrInner`, `self.as_non_null()` addresses no
210        // more than `isize::MAX` bytes.
211        unsafe { MetadataOf::new_unchecked(meta) }
212    }
213
214    /// Produces a `PtrInner` with the same address and provenance as `self` but
215    /// the given `meta`.
216    ///
217    /// # Safety
218    ///
219    /// The caller promises that if `self`'s referent is not zero sized, then
220    /// a pointer constructed from its address with the given `meta` metadata
221    /// will address a subset of the allocation pointed to by `self`.
222    #[inline]
223    pub(crate) unsafe fn with_meta(self, meta: T::PointerMetadata) -> Self
224    where
225        T: KnownLayout,
226    {
227        let raw = T::raw_from_ptr_len(self.as_non_null().cast(), meta);
228
229        // SAFETY:
230        //
231        // Lemma 0: `raw` either addresses zero bytes, or addresses a subset of
232        //          the allocation pointed to by `self` and has the same
233        //          provenance as `self`. Proof: `raw` is constructed using
234        //          provenance-preserving operations, and the caller has
235        //          promised that, if `self`'s referent is not zero-sized, the
236        //          resulting pointer addresses a subset of the allocation
237        //          pointed to by `self`.
238        //
239        // 0. Per Lemma 0 and by invariant on `self`, if `ptr`'s referent is not
240        //    zero sized, then `ptr` is derived from some valid Rust allocation,
241        //    `A`.
242        // 1. Per Lemma 0 and by invariant on `self`, if `ptr`'s referent is not
243        //    zero sized, then `ptr` has valid provenance for `A`.
244        // 2. Per Lemma 0 and by invariant on `self`, if `ptr`'s referent is not
245        //    zero sized, then `ptr` addresses a byte range which is entirely
246        //    contained in `A`.
247        // 3. Per Lemma 0 and by invariant on `self`, `ptr` addresses a byte
248        //    range whose length fits in an `isize`.
249        // 4. Per Lemma 0 and by invariant on `self`, `ptr` addresses a byte
250        //    range which does not wrap around the address space.
251        // 5. Per Lemma 0 and by invariant on `self`, if `ptr`'s referent is not
252        //    zero sized, then `A` is guaranteed to live for at least `'a`.
253        unsafe { PtrInner::new(raw) }
254    }
255
256    pub(crate) fn as_bytes(self) -> PtrInner<'a, [u8]> {
257        let ptr = self.as_non_null();
258        let bytes = match T::size_of_val_raw(ptr) {
259            Some(bytes) => bytes,
260            // SAFETY: `KnownLayout::size_of_val_raw` promises to always
261            // return `Some` so long as the resulting size fits in a
262            // `usize`. By invariant on `PtrInner`, `self` refers to a range
263            // of bytes whose size fits in an `isize`, which implies that it
264            // also fits in a `usize`.
265            None => unsafe { core::hint::unreachable_unchecked() },
266        };
267
268        let ptr = core::ptr::slice_from_raw_parts_mut(ptr.cast::<u8>().as_ptr(), bytes);
269
270        // SAFETY: `ptr` has the same address as `ptr = self.as_non_null()`,
271        // which is non-null by construction.
272        let ptr = unsafe { NonNull::new_unchecked(ptr) };
273
274        // SAFETY: `ptr` points to `bytes` `u8`s starting at the same address as
275        // `self`'s referent. Since `bytes` is the length of `self`'s referent,
276        // `ptr` addresses the same byte range as `self`. Thus, by invariant on
277        // `self` (as a `PtrInner`):
278        //
279        // 0. If `ptr`'s referent is not zero sized, then `ptr` has valid
280        //    provenance for its referent, which is entirely contained in some
281        //    Rust allocation, `A`.
282        // 1. If `ptr`'s referent is not zero sized, `A` is guaranteed to live
283        //    for at least `'a`.
284        unsafe { PtrInner::new(ptr) }
285    }
286}
287
288#[allow(clippy::needless_lifetimes)]
289impl<'a, T> PtrInner<'a, T>
290where
291    T: ?Sized + KnownLayout<PointerMetadata = usize>,
292{
293    /// Splits `T` in two.
294    ///
295    /// # Safety
296    ///
297    /// The caller promises that:
298    ///  - `l_len.get() <= self.meta()`.
299    ///
300    /// ## (Non-)Overlap
301    ///
302    /// Given `let (left, right) = ptr.split_at(l_len)`, it is guaranteed that
303    /// `left` and `right` are contiguous and non-overlapping if
304    /// `l_len.padding_needed_for() == 0`. This is true for all `[T]`.
305    ///
306    /// If `l_len.padding_needed_for() != 0`, then the left pointer will overlap
307    /// the right pointer to satisfy `T`'s padding requirements.
308    pub(crate) unsafe fn split_at_unchecked(
309        self,
310        l_len: crate::util::MetadataOf<T>,
311    ) -> (Self, PtrInner<'a, [T::Elem]>)
312    where
313        T: SplitAt,
314    {
315        let l_len = l_len.get();
316
317        // SAFETY: The caller promises that `l_len.get() <= self.meta()`.
318        // Trivially, `0 <= l_len`.
319        let left = unsafe { self.with_meta(l_len) };
320
321        let right = self.trailing_slice();
322        // SAFETY: The caller promises that `l_len <= self.meta() = slf.meta()`.
323        // Trivially, `slf.meta() <= slf.meta()`.
324        let right = unsafe { right.slice_unchecked(l_len..self.meta().get()) };
325
326        // SAFETY: If `l_len.padding_needed_for() == 0`, then `left` and `right`
327        // are non-overlapping. Proof: `left` is constructed `slf` with `l_len`
328        // as its (exclusive) upper bound. If `l_len.padding_needed_for() == 0`,
329        // then `left` requires no trailing padding following its final element.
330        // Since `right` is constructed from `slf`'s trailing slice with `l_len`
331        // as its (inclusive) lower bound, no byte is referred to by both
332        // pointers.
333        //
334        // Conversely, `l_len.padding_needed_for() == N`, where `N
335        // > 0`, `left` requires `N` bytes of trailing padding following its
336        // final element. Since `right` is constructed from the trailing slice
337        // of `slf` with `l_len` as its (inclusive) lower bound, the first `N`
338        // bytes of `right` are aliased by `left`.
339        (left, right)
340    }
341
342    /// Produces the trailing slice of `self`.
343    pub(crate) fn trailing_slice(self) -> PtrInner<'a, [T::Elem]>
344    where
345        T: SplitAt,
346    {
347        let offset = crate::trailing_slice_layout::<T>().offset;
348
349        let bytes = self.as_non_null().cast::<u8>().as_ptr();
350
351        // SAFETY:
352        // - By invariant on `T: KnownLayout`, `T::LAYOUT` describes `T`'s
353        //   layout. `offset` is the offset of the trailing slice within `T`,
354        //   which is by definition in-bounds or one byte past the end of any
355        //   `T`, regardless of metadata. By invariant on `PtrInner`, `self`
356        //   (and thus `bytes`) points to a byte range of size `<= isize::MAX`,
357        //   and so `offset <= isize::MAX`. Since `size_of::<u8>() == 1`,
358        //   `offset * size_of::<u8>() <= isize::MAX`.
359        // - If `offset > 0`, then by invariant on `PtrInner`, `self` (and thus
360        //   `bytes`) points to a byte range entirely contained within the same
361        //   allocated object as `self`. As explained above, this offset results
362        //   in a pointer to or one byte past the end of this allocated object.
363        let bytes = unsafe { bytes.add(offset) };
364
365        // SAFETY: By the preceding safety argument, `bytes` is within or one
366        // byte past the end of the same allocated object as `self`, which
367        // ensures that it is non-null.
368        let bytes = unsafe { NonNull::new_unchecked(bytes) };
369
370        let ptr = KnownLayout::raw_from_ptr_len(bytes, self.meta().get());
371
372        // SAFETY:
373        // 0. If `ptr`'s referent is not zero sized, then `ptr` is derived from
374        //    some valid Rust allocation, `A`, because `ptr` is derived from
375        //    the same allocated object as `self`.
376        // 1. If `ptr`'s referent is not zero sized, then `ptr` has valid
377        //    provenance for `A` because `raw` is derived from the same
378        //    allocated object as `self` via provenance-preserving operations.
379        // 2. If `ptr`'s referent is not zero sized, then `ptr` addresses a byte
380        //    range which is entirely contained in `A`, by previous safety proof
381        //    on `bytes`.
382        // 3. `ptr` addresses a byte range whose length fits in an `isize`, by
383        //    consequence of #2.
384        // 4. `ptr` addresses a byte range which does not wrap around the
385        //    address space, by consequence of #2.
386        // 5. If `ptr`'s referent is not zero sized, then `A` is guaranteed to
387        //    live for at least `'a`, because `ptr` is derived from `self`.
388        unsafe { PtrInner::new(ptr) }
389    }
390}
391
392#[allow(clippy::needless_lifetimes)]
393impl<'a, T> PtrInner<'a, [T]> {
394    /// Creates a pointer which addresses the given `range` of self.
395    ///
396    /// # Safety
397    ///
398    /// `range` is a valid range (`start <= end`) and `end <= self.meta()`.
399    pub(crate) unsafe fn slice_unchecked(self, range: Range<usize>) -> Self {
400        let base = self.as_non_null().cast::<T>().as_ptr();
401
402        // SAFETY: The caller promises that `start <= end <= self.meta()`. By
403        // invariant, if `self`'s referent is not zero-sized, then `self` refers
404        // to a byte range which is contained within a single allocation, which
405        // is no more than `isize::MAX` bytes long, and which does not wrap
406        // around the address space. Thus, this pointer arithmetic remains
407        // in-bounds of the same allocation, and does not wrap around the
408        // address space. The offset (in bytes) does not overflow `isize`.
409        //
410        // If `self`'s referent is zero-sized, then these conditions are
411        // trivially satisfied.
412        let base = unsafe { base.add(range.start) };
413
414        // SAFETY: The caller promises that `start <= end`, and so this will not
415        // underflow.
416        #[allow(unstable_name_collisions)]
417        let len = unsafe { range.end.unchecked_sub(range.start) };
418
419        let ptr = core::ptr::slice_from_raw_parts_mut(base, len);
420
421        // SAFETY: By invariant, `self`'s referent is either a ZST or lives
422        // entirely in an allocation. `ptr` points inside of or one byte past
423        // the end of that referent. Thus, in either case, `ptr` is non-null.
424        let ptr = unsafe { NonNull::new_unchecked(ptr) };
425
426        // SAFETY:
427        //
428        // Lemma 0: `ptr` addresses a subset of the bytes addressed by `self`,
429        //          and has the same provenance. Proof: The caller guarantees
430        //          that `start <= end <= self.meta()`. Thus, `base` is
431        //          in-bounds of `self`, and `base + (end - start)` is also
432        //          in-bounds of self. Finally, `ptr` is constructed using
433        //          provenance-preserving operations.
434        //
435        // 0. Per Lemma 0 and by invariant on `self`, if `ptr`'s referent is not
436        //    zero sized, then `ptr` has valid provenance for its referent,
437        //    which is entirely contained in some Rust allocation, `A`.
438        // 1. Per Lemma 0 and by invariant on `self`, if `ptr`'s referent is not
439        //    zero sized, then `A` is guaranteed to live for at least `'a`.
440        unsafe { PtrInner::new(ptr) }
441    }
442
443    /// Iteratively projects the elements `PtrInner<T>` from `PtrInner<[T]>`.
444    pub(crate) fn iter(&self) -> impl Iterator<Item = PtrInner<'a, T>> {
445        // FIXME(#429): Once `NonNull::cast` documents that it preserves
446        // provenance, cite those docs.
447        let base = self.as_non_null().cast::<T>().as_ptr();
448        (0..self.meta().get()).map(move |i| {
449            // FIXME(https://github.com/rust-lang/rust/issues/74265): Use
450            // `NonNull::get_unchecked_mut`.
451
452            // SAFETY: If the following conditions are not satisfied
453            // `pointer::cast` may induce Undefined Behavior [1]:
454            //
455            // > - The computed offset, `count * size_of::<T>()` bytes, must not
456            // >   overflow `isize``.
457            // > - If the computed offset is non-zero, then `self` must be
458            // >   derived from a pointer to some allocated object, and the
459            // >   entire memory range between `self` and the result must be in
460            // >   bounds of that allocated object. In particular, this range
461            // >   must not “wrap around” the edge of the address space.
462            //
463            // [1] https://doc.rust-lang.org/std/primitive.pointer.html#method.add
464            //
465            // We satisfy both of these conditions here:
466            // - By invariant on `Ptr`, `self` addresses a byte range whose
467            //   length fits in an `isize`. Since `elem` is contained in `self`,
468            //   the computed offset of `elem` must fit within `isize.`
469            // - If the computed offset is non-zero, then this means that the
470            //   referent is not zero-sized. In this case, `base` points to an
471            //   allocated object (by invariant on `self`). Thus:
472            //   - By contract, `self.meta()` accurately reflects the number of
473            //     elements in the slice. `i` is in bounds of `c.meta()` by
474            //     construction, and so the result of this addition cannot
475            //     overflow past the end of the allocation referred to by `c`.
476            //   - By invariant on `Ptr`, `self` addresses a byte range which
477            //     does not wrap around the address space. Since `elem` is
478            //     contained in `self`, the computed offset of `elem` must wrap
479            //     around the address space.
480            //
481            // FIXME(#429): Once `pointer::add` documents that it preserves
482            // provenance, cite those docs.
483            let elem = unsafe { base.add(i) };
484
485            // SAFETY: `elem` must not be null. `base` is constructed from a
486            // `NonNull` pointer, and the addition that produces `elem` must not
487            // overflow or wrap around, so `elem >= base > 0`.
488            //
489            // FIXME(#429): Once `NonNull::new_unchecked` documents that it
490            // preserves provenance, cite those docs.
491            let elem = unsafe { NonNull::new_unchecked(elem) };
492
493            // SAFETY: The safety invariants of `Ptr::new` (see definition) are
494            // satisfied:
495            // 0. If `elem`'s referent is not zero sized, then `elem` has valid
496            //    provenance for its referent, because it derived from `self`
497            //    using a series of provenance-preserving operations, and
498            //    because `self` has valid provenance for its referent. By the
499            //    same argument, `elem`'s referent is entirely contained within
500            //    the same allocated object as `self`'s referent.
501            // 1. If `elem`'s referent is not zero sized, then the allocation of
502            //    `elem` is guaranteed to live for at least `'a`, because `elem`
503            //    is entirely contained in `self`, which lives for at least `'a`
504            //    by invariant on `Ptr`.
505            unsafe { PtrInner::new(elem) }
506        })
507    }
508}
509
510impl<'a, T, const N: usize> PtrInner<'a, [T; N]> {
511    /// Casts this pointer-to-array into a slice.
512    ///
513    /// # Safety
514    ///
515    /// Callers may assume that the returned `PtrInner` references the same
516    /// address and length as `self`.
517    #[allow(clippy::wrong_self_convention)]
518    pub(crate) fn as_slice(self) -> PtrInner<'a, [T]> {
519        let start = self.as_non_null().cast::<T>().as_ptr();
520        let slice = core::ptr::slice_from_raw_parts_mut(start, N);
521        // SAFETY: `slice` is not null, because it is derived from `start`
522        // which is non-null.
523        let slice = unsafe { NonNull::new_unchecked(slice) };
524        // SAFETY: Lemma: In the following safety arguments, note that `slice`
525        // is derived from `self` in two steps: first, by casting `self: [T; N]`
526        // to `start: T`, then by constructing a pointer to a slice starting at
527        // `start` of length `N`. As a result, `slice` references exactly the
528        // same allocation as `self`, if any.
529        //
530        // 0. By the above lemma, if `slice`'s referent is not zero sized, then
531        //    `slice` has the same referent as `self`. By invariant on `self`,
532        //    this referent is entirely contained within some allocation, `A`.
533        //    Because `slice` was constructed using provenance-preserving
534        //    operations, it has provenance for its entire referent.
535        // 1. By the above lemma, if `slice`'s referent is not zero sized, then
536        //    `A` is guaranteed to live for at least `'a`, because it is derived
537        //    from the same allocation as `self`, which, by invariant on `Ptr`,
538        //    lives for at least `'a`.
539        unsafe { PtrInner::new(slice) }
540    }
541}
542
543impl<'a> PtrInner<'a, [u8]> {
544    /// Attempts to cast `self` to a `U` using the given cast type.
545    ///
546    /// If `U` is a slice DST and pointer metadata (`meta`) is provided, then
547    /// the cast will only succeed if it would produce an object with the given
548    /// metadata.
549    ///
550    /// Returns `None` if the resulting `U` would be invalidly-aligned, if no
551    /// `U` can fit in `self`, or if the provided pointer metadata describes an
552    /// invalid instance of `U`. On success, returns a pointer to the
553    /// largest-possible `U` which fits in `self`.
554    ///
555    /// # Safety
556    ///
557    /// The caller may assume that this implementation is correct, and may rely
558    /// on that assumption for the soundness of their code. In particular, the
559    /// caller may assume that, if `try_cast_into` returns `Some((ptr,
560    /// remainder))`, then `ptr` and `remainder` refer to non-overlapping byte
561    /// ranges within `self`, and that `ptr` and `remainder` entirely cover
562    /// `self`. Finally:
563    /// - If this is a prefix cast, `ptr` has the same address as `self`.
564    /// - If this is a suffix cast, `remainder` has the same address as `self`.
565    #[inline]
566    pub(crate) fn try_cast_into<U>(
567        self,
568        cast_type: CastType,
569        meta: Option<U::PointerMetadata>,
570    ) -> Result<(PtrInner<'a, U>, PtrInner<'a, [u8]>), CastError<Self, U>>
571    where
572        U: 'a + ?Sized + KnownLayout,
573    {
574        // PANICS: By invariant, the byte range addressed by
575        // `self.as_non_null()` does not wrap around the address space. This
576        // implies that the sum of the address (represented as a `usize`) and
577        // length do not overflow `usize`, as required by
578        // `validate_cast_and_convert_metadata`. Thus, this call to
579        // `validate_cast_and_convert_metadata` will only panic if `U` is a DST
580        // whose trailing slice element is zero-sized.
581        let maybe_metadata = MetadataOf::<U>::validate_cast_and_convert_metadata(
582            AsAddress::addr(self.as_non_null().as_ptr()),
583            self.meta(),
584            cast_type,
585            meta,
586        );
587
588        let (elems, split_at) = match maybe_metadata {
589            Ok((elems, split_at)) => (elems, split_at),
590            Err(MetadataCastError::Alignment) => {
591                // SAFETY: Since `validate_cast_and_convert_metadata` returned
592                // an alignment error, `U` must have an alignment requirement
593                // greater than one.
594                let err = unsafe { AlignmentError::<_, U>::new_unchecked(self) };
595                return Err(CastError::Alignment(err));
596            }
597            Err(MetadataCastError::Size) => return Err(CastError::Size(SizeError::new(self))),
598        };
599
600        // SAFETY: `validate_cast_and_convert_metadata` promises to return
601        // `split_at <= self.meta()`.
602        //
603        // Lemma 0: `l_slice` and `r_slice` are non-overlapping. Proof: By
604        // contract on `PtrInner::split_at_unchecked`, the produced `PtrInner`s
605        // are always non-overlapping if `self` is a `[T]`; here it is a `[u8]`.
606        let (l_slice, r_slice) = unsafe { self.split_at_unchecked(split_at) };
607
608        let (target, remainder) = match cast_type {
609            CastType::Prefix => (l_slice, r_slice),
610            CastType::Suffix => (r_slice, l_slice),
611        };
612
613        let base = target.as_non_null().cast::<u8>();
614
615        let ptr = U::raw_from_ptr_len(base, elems.get());
616
617        // SAFETY:
618        // 0. By invariant, if `target`'s referent is not zero sized, then
619        //    `target` has provenance valid for some Rust allocation, `A`.
620        //    Because `ptr` is derived from `target` via provenance-preserving
621        //    operations, `ptr` will also have provenance valid for its entire
622        //    referent.
623        // 1. `validate_cast_and_convert_metadata` promises that the object
624        //    described by `elems` and `split_at` lives at a byte range which is
625        //    a subset of the input byte range. Thus, by invariant, if
626        //    `target`'s referent is not zero sized, then `target` refers to an
627        //    allocation which is guaranteed to live for at least `'a`, and thus
628        //    so does `ptr`.
629        Ok((unsafe { PtrInner::new(ptr) }, remainder))
630    }
631}
632
633#[cfg(test)]
634mod tests {
635    use super::*;
636    use crate::*;
637
638    #[test]
639    fn test_meta() {
640        let arr = [1; 16];
641        let dst = <[u8]>::ref_from_bytes(&arr[..]).unwrap();
642        let ptr = PtrInner::from_ref(dst);
643        assert_eq!(ptr.meta().get(), 16);
644
645        // SAFETY: 8 is less than 16
646        let ptr = unsafe { ptr.with_meta(8) };
647
648        assert_eq!(ptr.meta().get(), 8);
649    }
650
651    #[test]
652    fn test_split_at() {
653        fn test_split_at<const OFFSET: usize, const BUFFER_SIZE: usize>() {
654            #[derive(FromBytes, KnownLayout, SplitAt, Immutable)]
655            #[repr(C)]
656            struct SliceDst<const OFFSET: usize> {
657                prefix: [u8; OFFSET],
658                trailing: [u8],
659            }
660
661            let n: usize = BUFFER_SIZE - OFFSET;
662            let arr = [1; BUFFER_SIZE];
663            let dst = SliceDst::<OFFSET>::ref_from_bytes(&arr[..]).unwrap();
664            let ptr = PtrInner::from_ref(dst);
665            for i in 0..=n {
666                assert_eq!(ptr.meta().get(), n);
667                // SAFETY: `i` is in bounds by construction.
668                let i = unsafe { MetadataOf::new_unchecked(i) };
669                // SAFETY: `i` is in bounds by construction.
670                let (l, r) = unsafe { ptr.split_at_unchecked(i) };
671                // SAFETY: Points to a valid value by construction.
672                #[allow(clippy::undocumented_unsafe_blocks, clippy::as_conversions)]
673                // Clippy false positive
674                let l_sum: usize = l
675                    .trailing_slice()
676                    .iter()
677                    .map(|ptr| unsafe { core::ptr::read_unaligned(ptr.as_non_null().as_ptr()) }
678                        as usize)
679                    .sum();
680                // SAFETY: Points to a valid value by construction.
681                #[allow(clippy::undocumented_unsafe_blocks, clippy::as_conversions)]
682                // Clippy false positive
683                let r_sum: usize = r
684                    .iter()
685                    .map(|ptr| unsafe { core::ptr::read_unaligned(ptr.as_non_null().as_ptr()) }
686                        as usize)
687                    .sum();
688                assert_eq!(l_sum, i.get());
689                assert_eq!(r_sum, n - i.get());
690                assert_eq!(l_sum + r_sum, n);
691            }
692        }
693
694        test_split_at::<0, 16>();
695        test_split_at::<1, 17>();
696        test_split_at::<2, 18>();
697    }
698
699    #[test]
700    fn test_trailing_slice() {
701        fn test_trailing_slice<const OFFSET: usize, const BUFFER_SIZE: usize>() {
702            #[derive(FromBytes, KnownLayout, SplitAt, Immutable)]
703            #[repr(C)]
704            struct SliceDst<const OFFSET: usize> {
705                prefix: [u8; OFFSET],
706                trailing: [u8],
707            }
708
709            let n: usize = BUFFER_SIZE - OFFSET;
710            let arr = [1; BUFFER_SIZE];
711            let dst = SliceDst::<OFFSET>::ref_from_bytes(&arr[..]).unwrap();
712            let ptr = PtrInner::from_ref(dst);
713
714            assert_eq!(ptr.meta().get(), n);
715            let trailing = ptr.trailing_slice();
716            assert_eq!(trailing.meta().get(), n);
717
718            assert_eq!(
719                // SAFETY: We assume this to be sound for the sake of this test,
720                // which will fail, here, in miri, if the safety precondition of
721                // `offset_of` is not satisfied.
722                unsafe {
723                    #[allow(clippy::as_conversions)]
724                    let offset = (trailing.as_non_null().as_ptr() as *mut u8)
725                        .offset_from(ptr.as_non_null().as_ptr() as *mut _);
726                    offset
727                },
728                isize::try_from(OFFSET).unwrap(),
729            );
730
731            // SAFETY: Points to a valid value by construction.
732            #[allow(clippy::undocumented_unsafe_blocks, clippy::as_conversions)]
733            // Clippy false positive
734            let trailing: usize =
735                trailing
736                    .iter()
737                    .map(|ptr| unsafe { core::ptr::read_unaligned(ptr.as_non_null().as_ptr()) }
738                        as usize)
739                    .sum();
740
741            assert_eq!(trailing, n);
742        }
743
744        test_trailing_slice::<0, 16>();
745        test_trailing_slice::<1, 17>();
746        test_trailing_slice::<2, 18>();
747    }
748}