zerocopy/pointer/inner.rs
1// Copyright 2024 The Fuchsia Authors
2//
3// Licensed under a BSD-style license <LICENSE-BSD>, Apache License, Version 2.0
4// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT
5// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option.
6// This file may not be copied, modified, or distributed except according to
7// those terms.
8
9use core::{marker::PhantomData, ops::Range, ptr::NonNull};
10
11#[allow(unused_imports)]
12use crate::util::polyfills::NumExt as _;
13use crate::{
14 layout::{CastType, MetadataCastError},
15 util::AsAddress,
16 AlignmentError, CastError, KnownLayout, MetadataOf, SizeError, SplitAt,
17};
18
19pub(crate) use _def::PtrInner;
20
21mod _def {
22 use super::*;
23 /// The inner pointer stored inside a [`Ptr`][crate::Ptr].
24 ///
25 /// `PtrInner<'a, T>` is [covariant] in `'a` and invariant in `T`.
26 ///
27 /// [covariant]: https://doc.rust-lang.org/reference/subtyping.html
28 pub(crate) struct PtrInner<'a, T>
29 where
30 T: ?Sized,
31 {
32 /// # Invariants
33 ///
34 /// 0. If `ptr`'s referent is not zero sized, then `ptr` has valid
35 /// provenance for its referent, which is entirely contained in some
36 /// Rust allocation, `A`.
37 /// 1. If `ptr`'s referent is not zero sized, `A` is guaranteed to live
38 /// for at least `'a`.
39 ///
40 /// # Postconditions
41 ///
42 /// By virtue of these invariants, code may assume the following, which
43 /// are logical implications of the invariants:
44 /// - `ptr`'s referent is not larger than `isize::MAX` bytes \[1\]
45 /// - `ptr`'s referent does not wrap around the address space \[1\]
46 ///
47 /// \[1\] Per <https://doc.rust-lang.org/1.85.0/std/ptr/index.html#allocated-object>:
48 ///
49 /// For any allocated object with `base` address, `size`, and a set of
50 /// `addresses`, the following are guaranteed:
51 /// ...
52 /// - `size <= isize::MAX`
53 ///
54 /// As a consequence of these guarantees, given any address `a` within
55 /// the set of addresses of an allocated object:
56 /// ...
57 /// - It is guaranteed that, given `o = a - base` (i.e., the offset of
58 /// `a` within the allocated object), `base + o` will not wrap around
59 /// the address space (in other words, will not overflow `usize`)
60 ptr: NonNull<T>,
61 // SAFETY: `&'a UnsafeCell<T>` is covariant in `'a` and invariant in `T`
62 // [1]. We use this construction rather than the equivalent `&mut T`,
63 // because our MSRV of 1.65 prohibits `&mut` types in const contexts.
64 //
65 // [1] https://doc.rust-lang.org/1.81.0/reference/subtyping.html#variance
66 _marker: PhantomData<&'a core::cell::UnsafeCell<T>>,
67 }
68
69 impl<'a, T: 'a + ?Sized> Copy for PtrInner<'a, T> {}
70 impl<'a, T: 'a + ?Sized> Clone for PtrInner<'a, T> {
71 fn clone(&self) -> PtrInner<'a, T> {
72 // SAFETY: None of the invariants on `ptr` are affected by having
73 // multiple copies of a `PtrInner`.
74 *self
75 }
76 }
77
78 impl<'a, T: 'a + ?Sized> PtrInner<'a, T> {
79 /// Constructs a `Ptr` from a [`NonNull`].
80 ///
81 /// # Safety
82 ///
83 /// The caller promises that:
84 ///
85 /// 0. If `ptr`'s referent is not zero sized, then `ptr` has valid
86 /// provenance for its referent, which is entirely contained in some
87 /// Rust allocation, `A`.
88 /// 1. If `ptr`'s referent is not zero sized, `A` is guaranteed to live
89 /// for at least `'a`.
90 pub(crate) const unsafe fn new(ptr: NonNull<T>) -> PtrInner<'a, T> {
91 // SAFETY: The caller has promised to satisfy all safety invariants
92 // of `PtrInner`.
93 Self { ptr, _marker: PhantomData }
94 }
95
96 /// Converts this `PtrInner<T>` to a [`NonNull<T>`].
97 ///
98 /// Note that this method does not consume `self`. The caller should
99 /// watch out for `unsafe` code which uses the returned `NonNull` in a
100 /// way that violates the safety invariants of `self`.
101 pub(crate) const fn as_non_null(&self) -> NonNull<T> {
102 self.ptr
103 }
104 }
105}
106
107impl<'a, T: ?Sized> PtrInner<'a, T> {
108 /// Constructs a `PtrInner` from a reference.
109 #[inline]
110 pub(crate) fn from_ref(ptr: &'a T) -> Self {
111 let ptr = NonNull::from(ptr);
112 // SAFETY:
113 // 0. If `ptr`'s referent is not zero sized, then `ptr`, by invariant on
114 // `&'a T` [1], has valid provenance for its referent, which is
115 // entirely contained in some Rust allocation, `A`.
116 // 1. If `ptr`'s referent is not zero sized, then `A`, by invariant on
117 // `&'a T`, is guaranteed to live for at least `'a`.
118 //
119 // [1] Per https://doc.rust-lang.org/1.85.0/std/primitive.reference.html#safety:
120 //
121 // For all types, `T: ?Sized`, and for all `t: &T` or `t: &mut T`,
122 // when such values cross an API boundary, the following invariants
123 // must generally be upheld:
124 // ...
125 // - if `size_of_val(t) > 0`, then `t` is dereferenceable for
126 // `size_of_val(t)` many bytes
127 //
128 // If `t` points at address `a`, being “dereferenceable” for N bytes
129 // means that the memory range `[a, a + N)` is all contained within a
130 // single allocated object.
131 unsafe { Self::new(ptr) }
132 }
133
134 /// Constructs a `PtrInner` from a mutable reference.
135 #[inline]
136 pub(crate) fn from_mut(ptr: &'a mut T) -> Self {
137 let ptr = NonNull::from(ptr);
138 // SAFETY:
139 // 0. If `ptr`'s referent is not zero sized, then `ptr`, by invariant on
140 // `&'a mut T` [1], has valid provenance for its referent, which is
141 // entirely contained in some Rust allocation, `A`.
142 // 1. If `ptr`'s referent is not zero sized, then `A`, by invariant on
143 // `&'a mut T`, is guaranteed to live for at least `'a`.
144 //
145 // [1] Per https://doc.rust-lang.org/1.85.0/std/primitive.reference.html#safety:
146 //
147 // For all types, `T: ?Sized`, and for all `t: &T` or `t: &mut T`,
148 // when such values cross an API boundary, the following invariants
149 // must generally be upheld:
150 // ...
151 // - if `size_of_val(t) > 0`, then `t` is dereferenceable for
152 // `size_of_val(t)` many bytes
153 //
154 // If `t` points at address `a`, being “dereferenceable” for N bytes
155 // means that the memory range `[a, a + N)` is all contained within a
156 // single allocated object.
157 unsafe { Self::new(ptr) }
158 }
159}
160
161#[allow(clippy::needless_lifetimes)]
162impl<'a, T> PtrInner<'a, T>
163where
164 T: ?Sized + KnownLayout,
165{
166 /// Extracts the metadata of this `ptr`.
167 pub(crate) fn meta(self) -> MetadataOf<T> {
168 let meta = T::pointer_to_metadata(self.as_non_null().as_ptr());
169 // SAFETY: By invariant on `PtrInner`, `self.as_non_null()` addresses no
170 // more than `isize::MAX` bytes.
171 unsafe { MetadataOf::new_unchecked(meta) }
172 }
173
174 /// Produces a `PtrInner` with the same address and provenance as `self` but
175 /// the given `meta`.
176 ///
177 /// # Safety
178 ///
179 /// The caller promises that if `self`'s referent is not zero sized, then
180 /// a pointer constructed from its address with the given `meta` metadata
181 /// will address a subset of the allocation pointed to by `self`.
182 #[inline]
183 pub(crate) unsafe fn with_meta(self, meta: T::PointerMetadata) -> Self
184 where
185 T: KnownLayout,
186 {
187 let raw = T::raw_from_ptr_len(self.as_non_null().cast(), meta);
188
189 // SAFETY:
190 //
191 // Lemma 0: `raw` either addresses zero bytes, or addresses a subset of
192 // the allocation pointed to by `self` and has the same
193 // provenance as `self`. Proof: `raw` is constructed using
194 // provenance-preserving operations, and the caller has
195 // promised that, if `self`'s referent is not zero-sized, the
196 // resulting pointer addresses a subset of the allocation
197 // pointed to by `self`.
198 //
199 // 0. Per Lemma 0 and by invariant on `self`, if `ptr`'s referent is not
200 // zero sized, then `ptr` is derived from some valid Rust allocation,
201 // `A`.
202 // 1. Per Lemma 0 and by invariant on `self`, if `ptr`'s referent is not
203 // zero sized, then `ptr` has valid provenance for `A`.
204 // 2. Per Lemma 0 and by invariant on `self`, if `ptr`'s referent is not
205 // zero sized, then `ptr` addresses a byte range which is entirely
206 // contained in `A`.
207 // 3. Per Lemma 0 and by invariant on `self`, `ptr` addresses a byte
208 // range whose length fits in an `isize`.
209 // 4. Per Lemma 0 and by invariant on `self`, `ptr` addresses a byte
210 // range which does not wrap around the address space.
211 // 5. Per Lemma 0 and by invariant on `self`, if `ptr`'s referent is not
212 // zero sized, then `A` is guaranteed to live for at least `'a`.
213 unsafe { PtrInner::new(raw) }
214 }
215}
216
217#[allow(clippy::needless_lifetimes)]
218impl<'a, T> PtrInner<'a, T>
219where
220 T: ?Sized + KnownLayout<PointerMetadata = usize>,
221{
222 /// Splits `T` in two.
223 ///
224 /// # Safety
225 ///
226 /// The caller promises that:
227 /// - `l_len.get() <= self.meta()`.
228 ///
229 /// ## (Non-)Overlap
230 ///
231 /// Given `let (left, right) = ptr.split_at(l_len)`, it is guaranteed that
232 /// `left` and `right` are contiguous and non-overlapping if
233 /// `l_len.padding_needed_for() == 0`. This is true for all `[T]`.
234 ///
235 /// If `l_len.padding_needed_for() != 0`, then the left pointer will overlap
236 /// the right pointer to satisfy `T`'s padding requirements.
237 pub(crate) unsafe fn split_at_unchecked(
238 self,
239 l_len: crate::util::MetadataOf<T>,
240 ) -> (Self, PtrInner<'a, [T::Elem]>)
241 where
242 T: SplitAt,
243 {
244 let l_len = l_len.get();
245
246 // SAFETY: The caller promises that `l_len.get() <= self.meta()`.
247 // Trivially, `0 <= l_len`.
248 let left = unsafe { self.with_meta(l_len) };
249
250 let right = self.trailing_slice();
251 // SAFETY: The caller promises that `l_len <= self.meta() = slf.meta()`.
252 // Trivially, `slf.meta() <= slf.meta()`.
253 let right = unsafe { right.slice_unchecked(l_len..self.meta().get()) };
254
255 // SAFETY: If `l_len.padding_needed_for() == 0`, then `left` and `right`
256 // are non-overlapping. Proof: `left` is constructed `slf` with `l_len`
257 // as its (exclusive) upper bound. If `l_len.padding_needed_for() == 0`,
258 // then `left` requires no trailing padding following its final element.
259 // Since `right` is constructed from `slf`'s trailing slice with `l_len`
260 // as its (inclusive) lower bound, no byte is referred to by both
261 // pointers.
262 //
263 // Conversely, `l_len.padding_needed_for() == N`, where `N
264 // > 0`, `left` requires `N` bytes of trailing padding following its
265 // final element. Since `right` is constructed from the trailing slice
266 // of `slf` with `l_len` as its (inclusive) lower bound, the first `N`
267 // bytes of `right` are aliased by `left`.
268 (left, right)
269 }
270
271 /// Produces the trailing slice of `self`.
272 pub(crate) fn trailing_slice(self) -> PtrInner<'a, [T::Elem]>
273 where
274 T: SplitAt,
275 {
276 let offset = crate::trailing_slice_layout::<T>().offset;
277
278 let bytes = self.as_non_null().cast::<u8>().as_ptr();
279
280 // SAFETY:
281 // - By invariant on `T: KnownLayout`, `T::LAYOUT` describes `T`'s
282 // layout. `offset` is the offset of the trailing slice within `T`,
283 // which is by definition in-bounds or one byte past the end of any
284 // `T`, regardless of metadata. By invariant on `PtrInner`, `self`
285 // (and thus `bytes`) points to a byte range of size `<= isize::MAX`,
286 // and so `offset <= isize::MAX`. Since `size_of::<u8>() == 1`,
287 // `offset * size_of::<u8>() <= isize::MAX`.
288 // - If `offset > 0`, then by invariant on `PtrInner`, `self` (and thus
289 // `bytes`) points to a byte range entirely contained within the same
290 // allocated object as `self`. As explained above, this offset results
291 // in a pointer to or one byte past the end of this allocated object.
292 let bytes = unsafe { bytes.add(offset) };
293
294 // SAFETY: By the preceding safety argument, `bytes` is within or one
295 // byte past the end of the same allocated object as `self`, which
296 // ensures that it is non-null.
297 let bytes = unsafe { NonNull::new_unchecked(bytes) };
298
299 let ptr = KnownLayout::raw_from_ptr_len(bytes, self.meta().get());
300
301 // SAFETY:
302 // 0. If `ptr`'s referent is not zero sized, then `ptr` is derived from
303 // some valid Rust allocation, `A`, because `ptr` is derived from
304 // the same allocated object as `self`.
305 // 1. If `ptr`'s referent is not zero sized, then `ptr` has valid
306 // provenance for `A` because `raw` is derived from the same
307 // allocated object as `self` via provenance-preserving operations.
308 // 2. If `ptr`'s referent is not zero sized, then `ptr` addresses a byte
309 // range which is entirely contained in `A`, by previous safety proof
310 // on `bytes`.
311 // 3. `ptr` addresses a byte range whose length fits in an `isize`, by
312 // consequence of #2.
313 // 4. `ptr` addresses a byte range which does not wrap around the
314 // address space, by consequence of #2.
315 // 5. If `ptr`'s referent is not zero sized, then `A` is guaranteed to
316 // live for at least `'a`, because `ptr` is derived from `self`.
317 unsafe { PtrInner::new(ptr) }
318 }
319}
320
321#[allow(clippy::needless_lifetimes)]
322impl<'a, T> PtrInner<'a, [T]> {
323 /// Creates a pointer which addresses the given `range` of self.
324 ///
325 /// # Safety
326 ///
327 /// `range` is a valid range (`start <= end`) and `end <= self.meta()`.
328 pub(crate) unsafe fn slice_unchecked(self, range: Range<usize>) -> Self {
329 let base = self.as_non_null().cast::<T>().as_ptr();
330
331 // SAFETY: The caller promises that `start <= end <= self.meta()`. By
332 // invariant, if `self`'s referent is not zero-sized, then `self` refers
333 // to a byte range which is contained within a single allocation, which
334 // is no more than `isize::MAX` bytes long, and which does not wrap
335 // around the address space. Thus, this pointer arithmetic remains
336 // in-bounds of the same allocation, and does not wrap around the
337 // address space. The offset (in bytes) does not overflow `isize`.
338 //
339 // If `self`'s referent is zero-sized, then these conditions are
340 // trivially satisfied.
341 let base = unsafe { base.add(range.start) };
342
343 // SAFETY: The caller promises that `start <= end`, and so this will not
344 // underflow.
345 #[allow(unstable_name_collisions)]
346 let len = unsafe { range.end.unchecked_sub(range.start) };
347
348 let ptr = core::ptr::slice_from_raw_parts_mut(base, len);
349
350 // SAFETY: By invariant, `self`'s referent is either a ZST or lives
351 // entirely in an allocation. `ptr` points inside of or one byte past
352 // the end of that referent. Thus, in either case, `ptr` is non-null.
353 let ptr = unsafe { NonNull::new_unchecked(ptr) };
354
355 // SAFETY:
356 //
357 // Lemma 0: `ptr` addresses a subset of the bytes addressed by `self`,
358 // and has the same provenance. Proof: The caller guarantees
359 // that `start <= end <= self.meta()`. Thus, `base` is
360 // in-bounds of `self`, and `base + (end - start)` is also
361 // in-bounds of self. Finally, `ptr` is constructed using
362 // provenance-preserving operations.
363 //
364 // 0. Per Lemma 0 and by invariant on `self`, if `ptr`'s referent is not
365 // zero sized, then `ptr` has valid provenance for its referent,
366 // which is entirely contained in some Rust allocation, `A`.
367 // 1. Per Lemma 0 and by invariant on `self`, if `ptr`'s referent is not
368 // zero sized, then `A` is guaranteed to live for at least `'a`.
369 unsafe { PtrInner::new(ptr) }
370 }
371
372 /// Iteratively projects the elements `PtrInner<T>` from `PtrInner<[T]>`.
373 pub(crate) fn iter(&self) -> impl Iterator<Item = PtrInner<'a, T>> {
374 // FIXME(#429): Once `NonNull::cast` documents that it preserves
375 // provenance, cite those docs.
376 let base = self.as_non_null().cast::<T>().as_ptr();
377 (0..self.meta().get()).map(move |i| {
378 // FIXME(https://github.com/rust-lang/rust/issues/74265): Use
379 // `NonNull::get_unchecked_mut`.
380
381 // SAFETY: If the following conditions are not satisfied
382 // `pointer::cast` may induce Undefined Behavior [1]:
383 //
384 // > - The computed offset, `count * size_of::<T>()` bytes, must not
385 // > overflow `isize``.
386 // > - If the computed offset is non-zero, then `self` must be
387 // > derived from a pointer to some allocated object, and the
388 // > entire memory range between `self` and the result must be in
389 // > bounds of that allocated object. In particular, this range
390 // > must not “wrap around” the edge of the address space.
391 //
392 // [1] https://doc.rust-lang.org/std/primitive.pointer.html#method.add
393 //
394 // We satisfy both of these conditions here:
395 // - By invariant on `Ptr`, `self` addresses a byte range whose
396 // length fits in an `isize`. Since `elem` is contained in `self`,
397 // the computed offset of `elem` must fit within `isize.`
398 // - If the computed offset is non-zero, then this means that the
399 // referent is not zero-sized. In this case, `base` points to an
400 // allocated object (by invariant on `self`). Thus:
401 // - By contract, `self.meta()` accurately reflects the number of
402 // elements in the slice. `i` is in bounds of `c.meta()` by
403 // construction, and so the result of this addition cannot
404 // overflow past the end of the allocation referred to by `c`.
405 // - By invariant on `Ptr`, `self` addresses a byte range which
406 // does not wrap around the address space. Since `elem` is
407 // contained in `self`, the computed offset of `elem` must wrap
408 // around the address space.
409 //
410 // FIXME(#429): Once `pointer::add` documents that it preserves
411 // provenance, cite those docs.
412 let elem = unsafe { base.add(i) };
413
414 // SAFETY: `elem` must not be null. `base` is constructed from a
415 // `NonNull` pointer, and the addition that produces `elem` must not
416 // overflow or wrap around, so `elem >= base > 0`.
417 //
418 // FIXME(#429): Once `NonNull::new_unchecked` documents that it
419 // preserves provenance, cite those docs.
420 let elem = unsafe { NonNull::new_unchecked(elem) };
421
422 // SAFETY: The safety invariants of `Ptr::new` (see definition) are
423 // satisfied:
424 // 0. If `elem`'s referent is not zero sized, then `elem` has valid
425 // provenance for its referent, because it derived from `self`
426 // using a series of provenance-preserving operations, and
427 // because `self` has valid provenance for its referent. By the
428 // same argument, `elem`'s referent is entirely contained within
429 // the same allocated object as `self`'s referent.
430 // 1. If `elem`'s referent is not zero sized, then the allocation of
431 // `elem` is guaranteed to live for at least `'a`, because `elem`
432 // is entirely contained in `self`, which lives for at least `'a`
433 // by invariant on `Ptr`.
434 unsafe { PtrInner::new(elem) }
435 })
436 }
437}
438
439impl<'a, T, const N: usize> PtrInner<'a, [T; N]> {
440 /// Casts this pointer-to-array into a slice.
441 ///
442 /// # Safety
443 ///
444 /// Callers may assume that the returned `PtrInner` references the same
445 /// address and length as `self`.
446 #[allow(clippy::wrong_self_convention)]
447 pub(crate) fn as_slice(self) -> PtrInner<'a, [T]> {
448 let start = self.as_non_null().cast::<T>().as_ptr();
449 let slice = core::ptr::slice_from_raw_parts_mut(start, N);
450 // SAFETY: `slice` is not null, because it is derived from `start`
451 // which is non-null.
452 let slice = unsafe { NonNull::new_unchecked(slice) };
453 // SAFETY: Lemma: In the following safety arguments, note that `slice`
454 // is derived from `self` in two steps: first, by casting `self: [T; N]`
455 // to `start: T`, then by constructing a pointer to a slice starting at
456 // `start` of length `N`. As a result, `slice` references exactly the
457 // same allocation as `self`, if any.
458 //
459 // 0. By the above lemma, if `slice`'s referent is not zero sized, then
460 // `slice` has the same referent as `self`. By invariant on `self`,
461 // this referent is entirely contained within some allocation, `A`.
462 // Because `slice` was constructed using provenance-preserving
463 // operations, it has provenance for its entire referent.
464 // 1. By the above lemma, if `slice`'s referent is not zero sized, then
465 // `A` is guaranteed to live for at least `'a`, because it is derived
466 // from the same allocation as `self`, which, by invariant on `Ptr`,
467 // lives for at least `'a`.
468 unsafe { PtrInner::new(slice) }
469 }
470}
471
472impl<'a> PtrInner<'a, [u8]> {
473 /// Attempts to cast `self` to a `U` using the given cast type.
474 ///
475 /// If `U` is a slice DST and pointer metadata (`meta`) is provided, then
476 /// the cast will only succeed if it would produce an object with the given
477 /// metadata.
478 ///
479 /// Returns `None` if the resulting `U` would be invalidly-aligned, if no
480 /// `U` can fit in `self`, or if the provided pointer metadata describes an
481 /// invalid instance of `U`. On success, returns a pointer to the
482 /// largest-possible `U` which fits in `self`.
483 ///
484 /// # Safety
485 ///
486 /// The caller may assume that this implementation is correct, and may rely
487 /// on that assumption for the soundness of their code. In particular, the
488 /// caller may assume that, if `try_cast_into` returns `Some((ptr,
489 /// remainder))`, then `ptr` and `remainder` refer to non-overlapping byte
490 /// ranges within `self`, and that `ptr` and `remainder` entirely cover
491 /// `self`. Finally:
492 /// - If this is a prefix cast, `ptr` has the same address as `self`.
493 /// - If this is a suffix cast, `remainder` has the same address as `self`.
494 #[inline]
495 pub(crate) fn try_cast_into<U>(
496 self,
497 cast_type: CastType,
498 meta: Option<U::PointerMetadata>,
499 ) -> Result<(PtrInner<'a, U>, PtrInner<'a, [u8]>), CastError<Self, U>>
500 where
501 U: 'a + ?Sized + KnownLayout,
502 {
503 // PANICS: By invariant, the byte range addressed by
504 // `self.as_non_null()` does not wrap around the address space. This
505 // implies that the sum of the address (represented as a `usize`) and
506 // length do not overflow `usize`, as required by
507 // `validate_cast_and_convert_metadata`. Thus, this call to
508 // `validate_cast_and_convert_metadata` will only panic if `U` is a DST
509 // whose trailing slice element is zero-sized.
510 let maybe_metadata = MetadataOf::<U>::validate_cast_and_convert_metadata(
511 AsAddress::addr(self.as_non_null().as_ptr()),
512 self.meta(),
513 cast_type,
514 meta,
515 );
516
517 let (elems, split_at) = match maybe_metadata {
518 Ok((elems, split_at)) => (elems, split_at),
519 Err(MetadataCastError::Alignment) => {
520 // SAFETY: Since `validate_cast_and_convert_metadata` returned
521 // an alignment error, `U` must have an alignment requirement
522 // greater than one.
523 let err = unsafe { AlignmentError::<_, U>::new_unchecked(self) };
524 return Err(CastError::Alignment(err));
525 }
526 Err(MetadataCastError::Size) => return Err(CastError::Size(SizeError::new(self))),
527 };
528
529 // SAFETY: `validate_cast_and_convert_metadata` promises to return
530 // `split_at <= self.meta()`.
531 //
532 // Lemma 0: `l_slice` and `r_slice` are non-overlapping. Proof: By
533 // contract on `PtrInner::split_at_unchecked`, the produced `PtrInner`s
534 // are always non-overlapping if `self` is a `[T]`; here it is a `[u8]`.
535 let (l_slice, r_slice) = unsafe { self.split_at_unchecked(split_at) };
536
537 let (target, remainder) = match cast_type {
538 CastType::Prefix => (l_slice, r_slice),
539 CastType::Suffix => (r_slice, l_slice),
540 };
541
542 let base = target.as_non_null().cast::<u8>();
543
544 let ptr = U::raw_from_ptr_len(base, elems.get());
545
546 // SAFETY:
547 // 0. By invariant, if `target`'s referent is not zero sized, then
548 // `target` has provenance valid for some Rust allocation, `A`.
549 // Because `ptr` is derived from `target` via provenance-preserving
550 // operations, `ptr` will also have provenance valid for its entire
551 // referent.
552 // 1. `validate_cast_and_convert_metadata` promises that the object
553 // described by `elems` and `split_at` lives at a byte range which is
554 // a subset of the input byte range. Thus, by invariant, if
555 // `target`'s referent is not zero sized, then `target` refers to an
556 // allocation which is guaranteed to live for at least `'a`, and thus
557 // so does `ptr`.
558 Ok((unsafe { PtrInner::new(ptr) }, remainder))
559 }
560}
561
562#[cfg(test)]
563mod tests {
564 use super::*;
565 use crate::*;
566
567 #[test]
568 fn test_meta() {
569 let arr = [1; 16];
570 let dst = <[u8]>::ref_from_bytes(&arr[..]).unwrap();
571 let ptr = PtrInner::from_ref(dst);
572 assert_eq!(ptr.meta().get(), 16);
573
574 // SAFETY: 8 is less than 16
575 let ptr = unsafe { ptr.with_meta(8) };
576
577 assert_eq!(ptr.meta().get(), 8);
578 }
579
580 #[test]
581 fn test_split_at() {
582 fn test_split_at<const OFFSET: usize, const BUFFER_SIZE: usize>() {
583 #[derive(FromBytes, KnownLayout, SplitAt, Immutable)]
584 #[repr(C)]
585 struct SliceDst<const OFFSET: usize> {
586 prefix: [u8; OFFSET],
587 trailing: [u8],
588 }
589
590 let n: usize = BUFFER_SIZE - OFFSET;
591 let arr = [1; BUFFER_SIZE];
592 let dst = SliceDst::<OFFSET>::ref_from_bytes(&arr[..]).unwrap();
593 let ptr = PtrInner::from_ref(dst);
594 for i in 0..=n {
595 assert_eq!(ptr.meta().get(), n);
596 // SAFETY: `i` is in bounds by construction.
597 let i = unsafe { MetadataOf::new_unchecked(i) };
598 // SAFETY: `i` is in bounds by construction.
599 let (l, r) = unsafe { ptr.split_at_unchecked(i) };
600 // SAFETY: Points to a valid value by construction.
601 #[allow(clippy::undocumented_unsafe_blocks, clippy::as_conversions)]
602 // Clippy false positive
603 let l_sum: usize = l
604 .trailing_slice()
605 .iter()
606 .map(|ptr| unsafe { core::ptr::read_unaligned(ptr.as_non_null().as_ptr()) }
607 as usize)
608 .sum();
609 // SAFETY: Points to a valid value by construction.
610 #[allow(clippy::undocumented_unsafe_blocks, clippy::as_conversions)]
611 // Clippy false positive
612 let r_sum: usize = r
613 .iter()
614 .map(|ptr| unsafe { core::ptr::read_unaligned(ptr.as_non_null().as_ptr()) }
615 as usize)
616 .sum();
617 assert_eq!(l_sum, i.get());
618 assert_eq!(r_sum, n - i.get());
619 assert_eq!(l_sum + r_sum, n);
620 }
621 }
622
623 test_split_at::<0, 16>();
624 test_split_at::<1, 17>();
625 test_split_at::<2, 18>();
626 }
627
628 #[test]
629 fn test_trailing_slice() {
630 fn test_trailing_slice<const OFFSET: usize, const BUFFER_SIZE: usize>() {
631 #[derive(FromBytes, KnownLayout, SplitAt, Immutable)]
632 #[repr(C)]
633 struct SliceDst<const OFFSET: usize> {
634 prefix: [u8; OFFSET],
635 trailing: [u8],
636 }
637
638 let n: usize = BUFFER_SIZE - OFFSET;
639 let arr = [1; BUFFER_SIZE];
640 let dst = SliceDst::<OFFSET>::ref_from_bytes(&arr[..]).unwrap();
641 let ptr = PtrInner::from_ref(dst);
642
643 assert_eq!(ptr.meta().get(), n);
644 let trailing = ptr.trailing_slice();
645 assert_eq!(trailing.meta().get(), n);
646
647 assert_eq!(
648 // SAFETY: We assume this to be sound for the sake of this test,
649 // which will fail, here, in miri, if the safety precondition of
650 // `offset_of` is not satisfied.
651 unsafe {
652 #[allow(clippy::as_conversions)]
653 let offset = (trailing.as_non_null().as_ptr() as *mut u8)
654 .offset_from(ptr.as_non_null().as_ptr() as *mut _);
655 offset
656 },
657 isize::try_from(OFFSET).unwrap(),
658 );
659
660 // SAFETY: Points to a valid value by construction.
661 #[allow(clippy::undocumented_unsafe_blocks, clippy::as_conversions)]
662 // Clippy false positive
663 let trailing: usize =
664 trailing
665 .iter()
666 .map(|ptr| unsafe { core::ptr::read_unaligned(ptr.as_non_null().as_ptr()) }
667 as usize)
668 .sum();
669
670 assert_eq!(trailing, n);
671 }
672
673 test_trailing_slice::<0, 16>();
674 test_trailing_slice::<1, 17>();
675 test_trailing_slice::<2, 18>();
676 }
677}