zerocopy/
split_at.rs

1// Copyright 2025 The Fuchsia Authors
2//
3// Licensed under the 2-Clause BSD License <LICENSE-BSD or
4// https://opensource.org/license/bsd-2-clause>, Apache License, Version 2.0
5// <LICENSE-APACHE or https://www.apache.org/licenses/LICENSE-2.0>, or the MIT
6// license <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your option.
7// This file may not be copied, modified, or distributed except according to
8// those terms.
9
10use super::*;
11use crate::pointer::invariant::{Aligned, Exclusive, Invariants, Shared, Valid};
12
13/// Types that can be split in two.
14///
15/// This trait generalizes Rust's existing support for splitting slices to
16/// support slices and slice-based dynamically-sized types ("slice DSTs").
17///
18/// # Implementation
19///
20/// **Do not implement this trait yourself!** Instead, use
21/// [`#[derive(SplitAt)]`][derive]; e.g.:
22///
23/// ```
24/// # use zerocopy_derive::{SplitAt, KnownLayout};
25/// #[derive(SplitAt, KnownLayout)]
26/// #[repr(C)]
27/// struct MyStruct<T: ?Sized> {
28/// # /*
29///     ...,
30/// # */
31///     // `SplitAt` types must have at least one field.
32///     field: T,
33/// }
34/// ```
35///
36/// This derive performs a sophisticated, compile-time safety analysis to
37/// determine whether a type is `SplitAt`.
38///
39/// # Safety
40///
41/// This trait does not convey any safety guarantees to code outside this crate.
42///
43/// You must not rely on the `#[doc(hidden)]` internals of `SplitAt`. Future
44/// releases of zerocopy may make backwards-breaking changes to these items,
45/// including changes that only affect soundness, which may cause code which
46/// uses those items to silently become unsound.
47///
48#[cfg_attr(feature = "derive", doc = "[derive]: zerocopy_derive::SplitAt")]
49#[cfg_attr(
50    not(feature = "derive"),
51    doc = concat!("[derive]: https://docs.rs/zerocopy/", env!("CARGO_PKG_VERSION"), "/zerocopy/derive.SplitAt.html"),
52)]
53#[cfg_attr(
54    zerocopy_diagnostic_on_unimplemented_1_78_0,
55    diagnostic::on_unimplemented(note = "Consider adding `#[derive(SplitAt)]` to `{Self}`")
56)]
57// # Safety
58//
59// The trailing slice is well-aligned for its element type. `Self` is `[T]`, or
60// a `repr(C)` or `repr(transparent)` slice DST.
61pub unsafe trait SplitAt: KnownLayout<PointerMetadata = usize> {
62    /// The element type of the trailing slice.
63    type Elem;
64
65    #[doc(hidden)]
66    fn only_derive_is_allowed_to_implement_this_trait()
67    where
68        Self: Sized;
69
70    /// Unsafely splits `self` in two.
71    ///
72    /// # Safety
73    ///
74    /// The caller promises that `l_len` is not greater than the length of
75    /// `self`'s trailing slice.
76    #[inline]
77    #[must_use]
78    unsafe fn split_at_unchecked(&self, l_len: usize) -> Split<&Self> {
79        // SAFETY: By precondition on the caller, `l_len <= self.len()`.
80        unsafe { Split::<&Self>::new(self, l_len) }
81    }
82
83    /// Attempts to split `self` in two.
84    ///
85    /// Returns `None` if `l_len` is greater than the length of `self`'s
86    /// trailing slice.
87    ///
88    /// # Examples
89    ///
90    /// ```
91    /// use zerocopy::{SplitAt, FromBytes};
92    /// # use zerocopy_derive::*;
93    ///
94    /// #[derive(SplitAt, FromBytes, KnownLayout, Immutable)]
95    /// #[repr(C)]
96    /// struct Packet {
97    ///     length: u8,
98    ///     body: [u8],
99    /// }
100    ///
101    /// // These bytes encode a `Packet`.
102    /// let bytes = &[4, 1, 2, 3, 4, 5, 6, 7, 8, 9][..];
103    ///
104    /// let packet = Packet::ref_from_bytes(bytes).unwrap();
105    ///
106    /// assert_eq!(packet.length, 4);
107    /// assert_eq!(packet.body, [1, 2, 3, 4, 5, 6, 7, 8, 9]);
108    ///
109    /// // Attempt to split `packet` at `length`.
110    /// let split = packet.split_at(packet.length as usize).unwrap();
111    ///
112    /// // Use the `Immutable` bound on `Packet` to prove that it's okay to
113    /// // return concurrent references to `packet` and `rest`.
114    /// let (packet, rest) = split.via_immutable();
115    ///
116    /// assert_eq!(packet.length, 4);
117    /// assert_eq!(packet.body, [1, 2, 3, 4]);
118    /// assert_eq!(rest, [5, 6, 7, 8, 9]);
119    /// ```
120    #[inline]
121    #[must_use = "has no side effects"]
122    fn split_at(&self, l_len: usize) -> Option<Split<&Self>> {
123        MetadataOf::new_in_bounds(self, l_len).map(
124            #[inline(always)]
125            |l_len| {
126                // SAFETY: We have ensured that `l_len <= self.len()` (by
127                // post-condition on `MetadataOf::new_in_bounds`)
128                unsafe { Split::new(self, l_len.get()) }
129            },
130        )
131    }
132
133    /// Unsafely splits `self` in two.
134    ///
135    /// # Safety
136    ///
137    /// The caller promises that `l_len` is not greater than the length of
138    /// `self`'s trailing slice.
139    #[inline]
140    #[must_use]
141    unsafe fn split_at_mut_unchecked(&mut self, l_len: usize) -> Split<&mut Self> {
142        // SAFETY: By precondition on the caller, `l_len <= self.len()`.
143        unsafe { Split::<&mut Self>::new(self, l_len) }
144    }
145
146    /// Attempts to split `self` in two.
147    ///
148    /// Returns `None` if `l_len` is greater than the length of `self`'s
149    /// trailing slice, or if the given `l_len` would result in [the trailing
150    /// padding](KnownLayout#slice-dst-layout) of the left portion overlapping
151    /// the right portion.
152    ///
153    ///
154    /// # Examples
155    ///
156    /// ```
157    /// use zerocopy::{SplitAt, FromBytes};
158    /// # use zerocopy_derive::*;
159    ///
160    /// #[derive(SplitAt, FromBytes, KnownLayout, IntoBytes)]
161    /// #[repr(C)]
162    /// struct Packet<B: ?Sized> {
163    ///     length: u8,
164    ///     body: B,
165    /// }
166    ///
167    /// // These bytes encode a `Packet`.
168    /// let mut bytes = &mut [4, 1, 2, 3, 4, 5, 6, 7, 8, 9][..];
169    ///
170    /// let packet = Packet::<[u8]>::mut_from_bytes(bytes).unwrap();
171    ///
172    /// assert_eq!(packet.length, 4);
173    /// assert_eq!(packet.body, [1, 2, 3, 4, 5, 6, 7, 8, 9]);
174    ///
175    /// {
176    ///     // Attempt to split `packet` at `length`.
177    ///     let split = packet.split_at_mut(packet.length as usize).unwrap();
178    ///
179    ///     // Use the `IntoBytes` bound on `Packet` to prove that it's okay to
180    ///     // return concurrent references to `packet` and `rest`.
181    ///     let (packet, rest) = split.via_into_bytes();
182    ///
183    ///     assert_eq!(packet.length, 4);
184    ///     assert_eq!(packet.body, [1, 2, 3, 4]);
185    ///     assert_eq!(rest, [5, 6, 7, 8, 9]);
186    ///
187    ///     rest.fill(0);
188    /// }
189    ///
190    /// assert_eq!(packet.length, 4);
191    /// assert_eq!(packet.body, [1, 2, 3, 4, 0, 0, 0, 0, 0]);
192    /// ```
193    #[inline]
194    fn split_at_mut(&mut self, l_len: usize) -> Option<Split<&mut Self>> {
195        MetadataOf::new_in_bounds(self, l_len).map(
196            #[inline(always)]
197            |l_len| {
198                // SAFETY: We have ensured that `l_len <= self.len()` (by
199                // post-condition on `MetadataOf::new_in_bounds`)
200                unsafe { Split::new(self, l_len.get()) }
201            },
202        )
203    }
204}
205
206// SAFETY: `[T]`'s trailing slice is `[T]`, which is trivially aligned.
207unsafe impl<T> SplitAt for [T] {
208    type Elem = T;
209
210    #[inline]
211    #[allow(dead_code)]
212    fn only_derive_is_allowed_to_implement_this_trait()
213    where
214        Self: Sized,
215    {
216    }
217}
218
219/// A `T` that has been split into two possibly-overlapping parts.
220///
221/// For some dynamically sized types, the padding that appears after the
222/// trailing slice field [is a dynamic function of the trailing slice
223/// length](KnownLayout#slice-dst-layout). If `T` is split at a length that
224/// requires trailing padding, the trailing padding of the left part of the
225/// split `T` will overlap the right part. If `T` is a mutable reference or
226/// permits interior mutation, you must ensure that the left and right parts do
227/// not overlap. You can do this at zero-cost using using
228/// [`Self::via_immutable`], [`Self::via_into_bytes`], or
229/// [`Self::via_unaligned`], or with a dynamic check by using
230/// [`Self::via_runtime_check`].
231#[derive(Debug)]
232pub struct Split<T> {
233    /// A pointer to the source slice DST.
234    source: T,
235    /// The length of the future left half of `source`.
236    ///
237    /// # Safety
238    ///
239    /// If `source` is a pointer to a slice DST, `l_len` is no greater than
240    /// `source`'s length.
241    l_len: usize,
242}
243
244impl<T> Split<T> {
245    /// Produces a `Split` of `source` with `l_len`.
246    ///
247    /// # Safety
248    ///
249    /// `l_len` is no greater than `source`'s length.
250    #[inline(always)]
251    unsafe fn new(source: T, l_len: usize) -> Self {
252        Self { source, l_len }
253    }
254}
255
256impl<'a, T> Split<&'a T>
257where
258    T: ?Sized + SplitAt,
259{
260    #[inline(always)]
261    fn into_ptr(self) -> Split<Ptr<'a, T, (Shared, Aligned, Valid)>> {
262        let source = Ptr::from_ref(self.source);
263        // SAFETY: `Ptr::from_ref(self.source)` points to exactly `self.source`
264        // and thus maintains the invariants of `self` with respect to `l_len`.
265        unsafe { Split::new(source, self.l_len) }
266    }
267
268    /// Produces the split parts of `self`, using [`Immutable`] to ensure that
269    /// it is sound to have concurrent references to both parts.
270    ///
271    /// # Examples
272    ///
273    /// ```
274    /// use zerocopy::{SplitAt, FromBytes};
275    /// # use zerocopy_derive::*;
276    ///
277    /// #[derive(SplitAt, FromBytes, KnownLayout, Immutable)]
278    /// #[repr(C)]
279    /// struct Packet {
280    ///     length: u8,
281    ///     body: [u8],
282    /// }
283    ///
284    /// // These bytes encode a `Packet`.
285    /// let bytes = &[4, 1, 2, 3, 4, 5, 6, 7, 8, 9][..];
286    ///
287    /// let packet = Packet::ref_from_bytes(bytes).unwrap();
288    ///
289    /// assert_eq!(packet.length, 4);
290    /// assert_eq!(packet.body, [1, 2, 3, 4, 5, 6, 7, 8, 9]);
291    ///
292    /// // Attempt to split `packet` at `length`.
293    /// let split = packet.split_at(packet.length as usize).unwrap();
294    ///
295    /// // Use the `Immutable` bound on `Packet` to prove that it's okay to
296    /// // return concurrent references to `packet` and `rest`.
297    /// let (packet, rest) = split.via_immutable();
298    ///
299    /// assert_eq!(packet.length, 4);
300    /// assert_eq!(packet.body, [1, 2, 3, 4]);
301    /// assert_eq!(rest, [5, 6, 7, 8, 9]);
302    /// ```
303    #[must_use = "has no side effects"]
304    #[inline(always)]
305    pub fn via_immutable(self) -> (&'a T, &'a [T::Elem])
306    where
307        T: Immutable,
308    {
309        let (l, r) = self.into_ptr().via_immutable();
310        (l.as_ref(), r.as_ref())
311    }
312
313    /// Produces the split parts of `self`, using [`IntoBytes`] to ensure that
314    /// it is sound to have concurrent references to both parts.
315    ///
316    /// # Examples
317    ///
318    /// ```
319    /// use zerocopy::{SplitAt, FromBytes};
320    /// # use zerocopy_derive::*;
321    ///
322    /// #[derive(SplitAt, FromBytes, KnownLayout, Immutable, IntoBytes)]
323    /// #[repr(C)]
324    /// struct Packet<B: ?Sized> {
325    ///     length: u8,
326    ///     body: B,
327    /// }
328    ///
329    /// // These bytes encode a `Packet`.
330    /// let bytes = &[4, 1, 2, 3, 4, 5, 6, 7, 8, 9][..];
331    ///
332    /// let packet = Packet::<[u8]>::ref_from_bytes(bytes).unwrap();
333    ///
334    /// assert_eq!(packet.length, 4);
335    /// assert_eq!(packet.body, [1, 2, 3, 4, 5, 6, 7, 8, 9]);
336    ///
337    /// // Attempt to split `packet` at `length`.
338    /// let split = packet.split_at(packet.length as usize).unwrap();
339    ///
340    /// // Use the `IntoBytes` bound on `Packet` to prove that it's okay to
341    /// // return concurrent references to `packet` and `rest`.
342    /// let (packet, rest) = split.via_into_bytes();
343    ///
344    /// assert_eq!(packet.length, 4);
345    /// assert_eq!(packet.body, [1, 2, 3, 4]);
346    /// assert_eq!(rest, [5, 6, 7, 8, 9]);
347    /// ```
348    #[must_use = "has no side effects"]
349    #[inline(always)]
350    pub fn via_into_bytes(self) -> (&'a T, &'a [T::Elem])
351    where
352        T: IntoBytes,
353    {
354        let (l, r) = self.into_ptr().via_into_bytes();
355        (l.as_ref(), r.as_ref())
356    }
357
358    /// Produces the split parts of `self`, using [`Unaligned`] to ensure that
359    /// it is sound to have concurrent references to both parts.
360    ///
361    /// # Examples
362    ///
363    /// ```
364    /// use zerocopy::{SplitAt, FromBytes};
365    /// # use zerocopy_derive::*;
366    ///
367    /// #[derive(SplitAt, FromBytes, KnownLayout, Immutable, Unaligned)]
368    /// #[repr(C)]
369    /// struct Packet {
370    ///     length: u8,
371    ///     body: [u8],
372    /// }
373    ///
374    /// // These bytes encode a `Packet`.
375    /// let bytes = &[4, 1, 2, 3, 4, 5, 6, 7, 8, 9][..];
376    ///
377    /// let packet = Packet::ref_from_bytes(bytes).unwrap();
378    ///
379    /// assert_eq!(packet.length, 4);
380    /// assert_eq!(packet.body, [1, 2, 3, 4, 5, 6, 7, 8, 9]);
381    ///
382    /// // Attempt to split `packet` at `length`.
383    /// let split = packet.split_at(packet.length as usize).unwrap();
384    ///
385    /// // Use the `Unaligned` bound on `Packet` to prove that it's okay to
386    /// // return concurrent references to `packet` and `rest`.
387    /// let (packet, rest) = split.via_unaligned();
388    ///
389    /// assert_eq!(packet.length, 4);
390    /// assert_eq!(packet.body, [1, 2, 3, 4]);
391    /// assert_eq!(rest, [5, 6, 7, 8, 9]);
392    /// ```
393    #[must_use = "has no side effects"]
394    #[inline(always)]
395    pub fn via_unaligned(self) -> (&'a T, &'a [T::Elem])
396    where
397        T: Unaligned,
398    {
399        let (l, r) = self.into_ptr().via_unaligned();
400        (l.as_ref(), r.as_ref())
401    }
402
403    /// Produces the split parts of `self`, using a dynamic check to ensure that
404    /// it is sound to have concurrent references to both parts. You should
405    /// prefer using [`Self::via_immutable`], [`Self::via_into_bytes`], or
406    /// [`Self::via_unaligned`], which have no runtime cost.
407    ///
408    /// Note that this check is overly conservative if `T` is [`Immutable`]; for
409    /// some types, this check will reject some splits which
410    /// [`Self::via_immutable`] will accept.
411    ///
412    /// # Examples
413    ///
414    /// ```
415    /// use zerocopy::{SplitAt, FromBytes, IntoBytes, network_endian::U16};
416    /// # use zerocopy_derive::*;
417    ///
418    /// #[derive(SplitAt, FromBytes, KnownLayout, Immutable, Debug)]
419    /// #[repr(C, align(2))]
420    /// struct Packet {
421    ///     length: U16,
422    ///     body: [u8],
423    /// }
424    ///
425    /// // These bytes encode a `Packet`.
426    /// let bytes = [
427    ///     4u16.to_be(),
428    ///     1u16.to_be(),
429    ///     2u16.to_be(),
430    ///     3u16.to_be(),
431    ///     4u16.to_be()
432    /// ];
433    ///
434    /// let packet = Packet::ref_from_bytes(bytes.as_bytes()).unwrap();
435    ///
436    /// assert_eq!(packet.length, 4);
437    /// assert_eq!(packet.body, [0, 1, 0, 2, 0, 3, 0, 4]);
438    ///
439    /// // Attempt to split `packet` at `length`.
440    /// let split = packet.split_at(packet.length.into()).unwrap();
441    ///
442    /// // Use a dynamic check to prove that it's okay to return concurrent
443    /// // references to `packet` and `rest`.
444    /// let (packet, rest) = split.via_runtime_check().unwrap();
445    ///
446    /// assert_eq!(packet.length, 4);
447    /// assert_eq!(packet.body, [0, 1, 0, 2]);
448    /// assert_eq!(rest, [0, 3, 0, 4]);
449    ///
450    /// // Attempt to split `packet` at `length - 1`.
451    /// let idx = packet.length.get() - 1;
452    /// let split = packet.split_at(idx as usize).unwrap();
453    ///
454    /// // Attempt (and fail) to use a dynamic check to prove that it's okay
455    /// // to return concurrent references to `packet` and `rest`. Note that
456    /// // this is a case of `via_runtime_check` being overly conservative.
457    /// // Although the left and right parts indeed overlap, the `Immutable`
458    /// // bound ensures that concurrently referencing these overlapping
459    /// // parts is sound.
460    /// assert!(split.via_runtime_check().is_err());
461    /// ```
462    #[must_use = "has no side effects"]
463    #[inline(always)]
464    pub fn via_runtime_check(self) -> Result<(&'a T, &'a [T::Elem]), Self> {
465        match self.into_ptr().via_runtime_check() {
466            Ok((l, r)) => Ok((l.as_ref(), r.as_ref())),
467            Err(s) => Err(s.into_ref()),
468        }
469    }
470
471    /// Unsafely produces the split parts of `self`.
472    ///
473    /// # Safety
474    ///
475    /// If `T` permits interior mutation, the trailing padding bytes of the left
476    /// portion must not overlap the right portion. For some dynamically sized
477    /// types, the padding that appears after the trailing slice field [is a
478    /// dynamic function of the trailing slice
479    /// length](KnownLayout#slice-dst-layout). Thus, for some types, this
480    /// condition is dependent on the length of the left portion.
481    #[must_use = "has no side effects"]
482    #[inline(always)]
483    pub unsafe fn via_unchecked(self) -> (&'a T, &'a [T::Elem]) {
484        // SAFETY: The aliasing of `self.into_ptr()` is not `Exclusive`, but the
485        // caller has promised that if `T` permits interior mutation then the
486        // left and right portions of `self` split at `l_len` do not overlap.
487        let (l, r) = unsafe { self.into_ptr().via_unchecked() };
488        (l.as_ref(), r.as_ref())
489    }
490}
491
492impl<'a, T> Split<&'a mut T>
493where
494    T: ?Sized + SplitAt,
495{
496    #[inline(always)]
497    fn into_ptr(self) -> Split<Ptr<'a, T, (Exclusive, Aligned, Valid)>> {
498        let source = Ptr::from_mut(self.source);
499        // SAFETY: `Ptr::from_mut(self.source)` points to exactly `self.source`,
500        // and thus maintains the invariants of `self` with respect to `l_len`.
501        unsafe { Split::new(source, self.l_len) }
502    }
503
504    /// Produces the split parts of `self`, using [`IntoBytes`] to ensure that
505    /// it is sound to have concurrent references to both parts.
506    ///
507    /// # Examples
508    ///
509    /// ```
510    /// use zerocopy::{SplitAt, FromBytes};
511    /// # use zerocopy_derive::*;
512    ///
513    /// #[derive(SplitAt, FromBytes, KnownLayout, IntoBytes)]
514    /// #[repr(C)]
515    /// struct Packet<B: ?Sized> {
516    ///     length: u8,
517    ///     body: B,
518    /// }
519    ///
520    /// // These bytes encode a `Packet`.
521    /// let mut bytes = &mut [4, 1, 2, 3, 4, 5, 6, 7, 8, 9][..];
522    ///
523    /// let packet = Packet::<[u8]>::mut_from_bytes(bytes).unwrap();
524    ///
525    /// assert_eq!(packet.length, 4);
526    /// assert_eq!(packet.body, [1, 2, 3, 4, 5, 6, 7, 8, 9]);
527    ///
528    /// {
529    ///     // Attempt to split `packet` at `length`.
530    ///     let split = packet.split_at_mut(packet.length as usize).unwrap();
531    ///
532    ///     // Use the `IntoBytes` bound on `Packet` to prove that it's okay to
533    ///     // return concurrent references to `packet` and `rest`.
534    ///     let (packet, rest) = split.via_into_bytes();
535    ///
536    ///     assert_eq!(packet.length, 4);
537    ///     assert_eq!(packet.body, [1, 2, 3, 4]);
538    ///     assert_eq!(rest, [5, 6, 7, 8, 9]);
539    ///
540    ///     rest.fill(0);
541    /// }
542    ///
543    /// assert_eq!(packet.length, 4);
544    /// assert_eq!(packet.body, [1, 2, 3, 4, 0, 0, 0, 0, 0]);
545    /// ```
546    #[must_use = "has no side effects"]
547    #[inline(always)]
548    pub fn via_into_bytes(self) -> (&'a mut T, &'a mut [T::Elem])
549    where
550        T: IntoBytes,
551    {
552        let (l, r) = self.into_ptr().via_into_bytes();
553        (l.as_mut(), r.as_mut())
554    }
555
556    /// Produces the split parts of `self`, using [`Unaligned`] to ensure that
557    /// it is sound to have concurrent references to both parts.
558    ///
559    /// # Examples
560    ///
561    /// ```
562    /// use zerocopy::{SplitAt, FromBytes};
563    /// # use zerocopy_derive::*;
564    ///
565    /// #[derive(SplitAt, FromBytes, KnownLayout, IntoBytes, Unaligned)]
566    /// #[repr(C)]
567    /// struct Packet<B: ?Sized> {
568    ///     length: u8,
569    ///     body: B,
570    /// }
571    ///
572    /// // These bytes encode a `Packet`.
573    /// let mut bytes = &mut [4, 1, 2, 3, 4, 5, 6, 7, 8, 9][..];
574    ///
575    /// let packet = Packet::<[u8]>::mut_from_bytes(bytes).unwrap();
576    ///
577    /// assert_eq!(packet.length, 4);
578    /// assert_eq!(packet.body, [1, 2, 3, 4, 5, 6, 7, 8, 9]);
579    ///
580    /// {
581    ///     // Attempt to split `packet` at `length`.
582    ///     let split = packet.split_at_mut(packet.length as usize).unwrap();
583    ///
584    ///     // Use the `Unaligned` bound on `Packet` to prove that it's okay to
585    ///     // return concurrent references to `packet` and `rest`.
586    ///     let (packet, rest) = split.via_unaligned();
587    ///
588    ///     assert_eq!(packet.length, 4);
589    ///     assert_eq!(packet.body, [1, 2, 3, 4]);
590    ///     assert_eq!(rest, [5, 6, 7, 8, 9]);
591    ///
592    ///     rest.fill(0);
593    /// }
594    ///
595    /// assert_eq!(packet.length, 4);
596    /// assert_eq!(packet.body, [1, 2, 3, 4, 0, 0, 0, 0, 0]);
597    /// ```
598    #[must_use = "has no side effects"]
599    #[inline(always)]
600    pub fn via_unaligned(self) -> (&'a mut T, &'a mut [T::Elem])
601    where
602        T: Unaligned,
603    {
604        let (l, r) = self.into_ptr().via_unaligned();
605        (l.as_mut(), r.as_mut())
606    }
607
608    /// Produces the split parts of `self`, using a dynamic check to ensure that
609    /// it is sound to have concurrent references to both parts. You should
610    /// prefer using [`Self::via_into_bytes`] or [`Self::via_unaligned`], which
611    /// have no runtime cost.
612    ///
613    /// # Examples
614    ///
615    /// ```
616    /// use zerocopy::{SplitAt, FromBytes};
617    /// # use zerocopy_derive::*;
618    ///
619    /// #[derive(SplitAt, FromBytes, KnownLayout, IntoBytes, Debug)]
620    /// #[repr(C)]
621    /// struct Packet<B: ?Sized> {
622    ///     length: u8,
623    ///     body: B,
624    /// }
625    ///
626    /// // These bytes encode a `Packet`.
627    /// let mut bytes = &mut [4, 1, 2, 3, 4, 5, 6, 7, 8, 9][..];
628    ///
629    /// let packet = Packet::<[u8]>::mut_from_bytes(bytes).unwrap();
630    ///
631    /// assert_eq!(packet.length, 4);
632    /// assert_eq!(packet.body, [1, 2, 3, 4, 5, 6, 7, 8, 9]);
633    ///
634    /// {
635    ///     // Attempt to split `packet` at `length`.
636    ///     let split = packet.split_at_mut(packet.length as usize).unwrap();
637    ///
638    ///     // Use a dynamic check to prove that it's okay to return concurrent
639    ///     // references to `packet` and `rest`.
640    ///     let (packet, rest) = split.via_runtime_check().unwrap();
641    ///
642    ///     assert_eq!(packet.length, 4);
643    ///     assert_eq!(packet.body, [1, 2, 3, 4]);
644    ///     assert_eq!(rest, [5, 6, 7, 8, 9]);
645    ///
646    ///     rest.fill(0);
647    /// }
648    ///
649    /// assert_eq!(packet.length, 4);
650    /// assert_eq!(packet.body, [1, 2, 3, 4, 0, 0, 0, 0, 0]);
651    /// ```
652    #[must_use = "has no side effects"]
653    #[inline(always)]
654    pub fn via_runtime_check(self) -> Result<(&'a mut T, &'a mut [T::Elem]), Self> {
655        match self.into_ptr().via_runtime_check() {
656            Ok((l, r)) => Ok((l.as_mut(), r.as_mut())),
657            Err(s) => Err(s.into_mut()),
658        }
659    }
660
661    /// Unsafely produces the split parts of `self`.
662    ///
663    /// # Safety
664    ///
665    /// The trailing padding bytes of the left portion must not overlap the
666    /// right portion. For some dynamically sized types, the padding that
667    /// appears after the trailing slice field [is a dynamic function of the
668    /// trailing slice length](KnownLayout#slice-dst-layout). Thus, for some
669    /// types, this condition is dependent on the length of the left portion.
670    #[must_use = "has no side effects"]
671    #[inline(always)]
672    pub unsafe fn via_unchecked(self) -> (&'a mut T, &'a mut [T::Elem]) {
673        // SAFETY: The aliasing of `self.into_ptr()` is `Exclusive`, and the
674        // caller has promised that the left and right portions of `self` split
675        // at `l_len` do not overlap.
676        let (l, r) = unsafe { self.into_ptr().via_unchecked() };
677        (l.as_mut(), r.as_mut())
678    }
679}
680
681impl<'a, T, I> Split<Ptr<'a, T, I>>
682where
683    T: ?Sized + SplitAt,
684    I: Invariants<Alignment = Aligned, Validity = Valid>,
685{
686    fn into_ref(self) -> Split<&'a T>
687    where
688        I: Invariants<Aliasing = Shared>,
689    {
690        // SAFETY: `self.source.as_ref()` points to exactly the same referent as
691        // `self.source` and thus maintains the invariants of `self` with
692        // respect to `l_len`.
693        unsafe { Split::new(self.source.as_ref(), self.l_len) }
694    }
695
696    fn into_mut(self) -> Split<&'a mut T>
697    where
698        I: Invariants<Aliasing = Exclusive>,
699    {
700        // SAFETY: `self.source.as_mut()` points to exactly the same referent as
701        // `self.source` and thus maintains the invariants of `self` with
702        // respect to `l_len`.
703        unsafe { Split::new(self.source.unify_invariants().as_mut(), self.l_len) }
704    }
705
706    /// Produces the length of `self`'s left part.
707    #[inline(always)]
708    fn l_len(&self) -> MetadataOf<T> {
709        // SAFETY: By invariant on `Split`, `self.l_len` is not greater than the
710        // length of `self.source`.
711        unsafe { MetadataOf::<T>::new_unchecked(self.l_len) }
712    }
713
714    /// Produces the split parts of `self`, using [`Immutable`] to ensure that
715    /// it is sound to have concurrent references to both parts.
716    #[inline(always)]
717    fn via_immutable(self) -> (Ptr<'a, T, I>, Ptr<'a, [T::Elem], I>)
718    where
719        T: Immutable,
720        I: Invariants<Aliasing = Shared>,
721    {
722        // SAFETY: `Aliasing = Shared` and `T: Immutable`.
723        unsafe { self.via_unchecked() }
724    }
725
726    /// Produces the split parts of `self`, using [`IntoBytes`] to ensure that
727    /// it is sound to have concurrent references to both parts.
728    #[inline(always)]
729    fn via_into_bytes(self) -> (Ptr<'a, T, I>, Ptr<'a, [T::Elem], I>)
730    where
731        T: IntoBytes,
732    {
733        // SAFETY: By `T: IntoBytes`, `T` has no padding for any length.
734        // Consequently, `T` can be split into non-overlapping parts at any
735        // index.
736        unsafe { self.via_unchecked() }
737    }
738
739    /// Produces the split parts of `self`, using [`Unaligned`] to ensure that
740    /// it is sound to have concurrent references to both parts.
741    #[inline(always)]
742    fn via_unaligned(self) -> (Ptr<'a, T, I>, Ptr<'a, [T::Elem], I>)
743    where
744        T: Unaligned,
745    {
746        // SAFETY: By `T: SplitAt + Unaligned`, `T` is either a slice or a
747        // `repr(C)` or `repr(transparent)` slice DST that is well-aligned at
748        // any address and length. If `T` is a slice DST with alignment 1,
749        // `repr(C)` or `repr(transparent)` ensures that no padding is placed
750        // after the final element of the trailing slice. Consequently, `T` can
751        // be split into strictly non-overlapping parts any any index.
752        unsafe { self.via_unchecked() }
753    }
754
755    /// Produces the split parts of `self`, using a dynamic check to ensure that
756    /// it is sound to have concurrent references to both parts. You should
757    /// prefer using [`Self::via_immutable`], [`Self::via_into_bytes`], or
758    /// [`Self::via_unaligned`], which have no runtime cost.
759    #[inline(always)]
760    fn via_runtime_check(self) -> Result<(Ptr<'a, T, I>, Ptr<'a, [T::Elem], I>), Self> {
761        let l_len = self.l_len();
762        // FIXME(#1290): Once we require `KnownLayout` on all fields, add an
763        // `IS_IMMUTABLE` associated const, and add `T::IS_IMMUTABLE ||` to the
764        // below check.
765        if l_len.padding_needed_for() == 0 {
766            // SAFETY: By `T: SplitAt`, `T` is either `[T]`, or a `repr(C)` or
767            // `repr(transparent)` slice DST, for which the trailing padding
768            // needed to accomodate `l_len` trailing elements is
769            // `l_len.padding_needed_for()`. If no trailing padding is required,
770            // the left and right parts are strictly non-overlapping.
771            Ok(unsafe { self.via_unchecked() })
772        } else {
773            Err(self)
774        }
775    }
776
777    /// Unsafely produces the split parts of `self`.
778    ///
779    /// # Safety
780    ///
781    /// The caller promises that if `I::Aliasing` is [`Exclusive`] or `T`
782    /// permits interior mutation, then `l_len.padding_needed_for() == 0`.
783    #[inline(always)]
784    unsafe fn via_unchecked(self) -> (Ptr<'a, T, I>, Ptr<'a, [T::Elem], I>) {
785        let l_len = self.l_len();
786        let inner = self.source.as_inner();
787
788        // SAFETY: By invariant on `Self::l_len`, `l_len` is not greater than
789        // the length of `inner`'s trailing slice.
790        let (left, right) = unsafe { inner.split_at_unchecked(l_len) };
791
792        // Lemma 0: `left` and `right` conform to the aliasing invariant
793        // `I::Aliasing`. Proof: If `I::Aliasing` is `Exclusive` or `T` permits
794        // interior mutation, the caller promises that `l_len.padding_needed_for()
795        // == 0`. Consequently, by post-condition on `PtrInner::split_at_unchecked`,
796        // there is no trailing padding after `left`'s final element that would
797        // overlap into `right`. If `I::Aliasing` is shared and `T` forbids interior
798        // mutation, then overlap between their referents is permissible.
799
800        // SAFETY:
801        // 0. `left` conforms to the aliasing invariant of `I::Aliasing`, by Lemma 0.
802        // 1. `left` conforms to the alignment invariant of `I::Alignment, because
803        //    the referents of `left` and `Self` have the same address and type
804        //    (and, thus, alignment requirement).
805        // 2. `left` conforms to the validity invariant of `I::Validity`, neither
806        //    the type nor bytes of `left`'s referent have been changed.
807        let left = unsafe { Ptr::from_inner(left) };
808
809        // SAFETY:
810        // 0. `right` conforms to the aliasing invariant of `I::Aliasing`, by Lemma
811        //    0.
812        // 1. `right` conforms to the alignment invariant of `I::Alignment, because
813        //    if `ptr` with `I::Alignment = Aligned`, then by invariant on `T:
814        //    SplitAt`, the trailing slice of `ptr` (from which `right` is derived)
815        //    will also be well-aligned.
816        // 2. `right` conforms to the validity invariant of `I::Validity`,
817        //    because `right: [T::Elem]` is derived from the trailing slice of
818        //    `ptr`, which, by contract on `T: SplitAt::Elem`, has type
819        //    `[T::Elem]`. The `left` part cannot be used to invalidate `right`,
820        //    because the caller promises that if `I::Aliasing` is `Exclusive`
821        //    or `T` permits interior mutation, then `l_len.padding_needed_for()
822        //    == 0` and thus the parts will be non-overlapping.
823        let right = unsafe { Ptr::from_inner(right) };
824
825        (left, right)
826    }
827}
828
829#[cfg(test)]
830mod tests {
831    #[cfg(feature = "derive")]
832    #[test]
833    fn test_split_at() {
834        use crate::{FromBytes, Immutable, IntoBytes, KnownLayout, SplitAt};
835
836        #[derive(FromBytes, KnownLayout, SplitAt, IntoBytes, Immutable, Debug)]
837        #[repr(C)]
838        struct SliceDst<const OFFSET: usize> {
839            prefix: [u8; OFFSET],
840            trailing: [u8],
841        }
842
843        #[allow(clippy::as_conversions)]
844        fn test_split_at<const OFFSET: usize, const BUFFER_SIZE: usize>() {
845            // Test `split_at`
846            let n: usize = BUFFER_SIZE - OFFSET;
847            let arr = [1; BUFFER_SIZE];
848            let dst = SliceDst::<OFFSET>::ref_from_bytes(&arr[..]).unwrap();
849            for i in 0..=n {
850                let (l, r) = dst.split_at(i).unwrap().via_runtime_check().unwrap();
851                let l_sum: u8 = l.trailing.iter().sum();
852                let r_sum: u8 = r.iter().sum();
853                assert_eq!(l_sum, i as u8);
854                assert_eq!(r_sum, (n - i) as u8);
855                assert_eq!(l_sum + r_sum, n as u8);
856            }
857
858            // Test `split_at_mut`
859            let n: usize = BUFFER_SIZE - OFFSET;
860            let mut arr = [1; BUFFER_SIZE];
861            let dst = SliceDst::<OFFSET>::mut_from_bytes(&mut arr[..]).unwrap();
862            for i in 0..=n {
863                let (l, r) = dst.split_at_mut(i).unwrap().via_runtime_check().unwrap();
864                let l_sum: u8 = l.trailing.iter().sum();
865                let r_sum: u8 = r.iter().sum();
866                assert_eq!(l_sum, i as u8);
867                assert_eq!(r_sum, (n - i) as u8);
868                assert_eq!(l_sum + r_sum, n as u8);
869            }
870        }
871
872        test_split_at::<0, 16>();
873        test_split_at::<1, 17>();
874        test_split_at::<2, 18>();
875    }
876
877    #[cfg(feature = "derive")]
878    #[test]
879    #[allow(clippy::as_conversions)]
880    fn test_split_at_overlapping() {
881        use crate::{FromBytes, Immutable, IntoBytes, KnownLayout, SplitAt};
882
883        #[derive(FromBytes, KnownLayout, SplitAt, Immutable)]
884        #[repr(C, align(2))]
885        struct SliceDst {
886            prefix: u8,
887            trailing: [u8],
888        }
889
890        const N: usize = 16;
891
892        let arr = [1u16; N];
893        let dst = SliceDst::ref_from_bytes(arr.as_bytes()).unwrap();
894
895        for i in 0..N {
896            let split = dst.split_at(i).unwrap().via_runtime_check();
897            if i % 2 == 1 {
898                assert!(split.is_ok());
899            } else {
900                assert!(split.is_err());
901            }
902        }
903    }
904}