From 2d2c41fae6fb34aa8e8750d571dfe7b68bc1f0cd Mon Sep 17 00:00:00 2001 From: Jack Wrenn Date: Tue, 23 Apr 2024 19:40:51 +0000 Subject: [PATCH] wip --- src/deprecated.rs | 12 +- src/error.rs | 395 +++++++++++++ src/lib.rs | 553 +++++++++++------- src/pointer/ptr.rs | 104 +++- src/util.rs | 7 + zerocopy-derive/tests/enum_try_from_bytes.rs | 68 ++- .../tests/struct_try_from_bytes.rs | 10 +- 7 files changed, 880 insertions(+), 269 deletions(-) create mode 100644 src/error.rs diff --git a/src/deprecated.rs b/src/deprecated.rs index 860270d0834..0966a431392 100644 --- a/src/deprecated.rs +++ b/src/deprecated.rs @@ -21,7 +21,7 @@ where #[doc(hidden)] #[inline] pub fn new_slice(bytes: B) -> Option> { - Self::new(bytes) + Self::new(bytes).ok() } } @@ -34,7 +34,7 @@ where #[doc(hidden)] #[inline(always)] pub fn new_slice_unaligned(bytes: B) -> Option> { - Ref::new_unaligned(bytes) + Ref::new_unaligned(bytes).ok() } } @@ -74,7 +74,7 @@ where #[doc(hidden)] #[inline] pub fn new_slice_from_prefix(bytes: B, count: usize) -> Option<(Ref, B)> { - Ref::with_trailing_elements_from_prefix(bytes, count) + Ref::with_trailing_elements_from_prefix(bytes, count).ok() } #[deprecated(since = "0.8.0", note = "replaced by `Ref::with_trailing_elements_from_suffix`")] @@ -82,7 +82,7 @@ where #[doc(hidden)] #[inline] pub fn new_slice_from_suffix(bytes: B, count: usize) -> Option<(B, Ref)> { - Ref::with_trailing_elements_from_suffix(bytes, count) + Ref::with_trailing_elements_from_suffix(bytes, count).ok() } } @@ -99,7 +99,7 @@ where #[must_use = "has no side effects"] #[inline(always)] pub fn new_slice_unaligned_from_prefix(bytes: B, count: usize) -> Option<(Ref, B)> { - Ref::with_trailing_elements_unaligned_from_prefix(bytes, count) + Ref::with_trailing_elements_unaligned_from_prefix(bytes, count).ok() } #[deprecated( @@ -110,6 +110,6 @@ where #[must_use = "has no side effects"] #[inline(always)] pub fn new_slice_unaligned_from_suffix(bytes: B, count: usize) -> Option<(B, Ref)> { - Ref::with_trailing_elements_unaligned_from_suffix(bytes, count) + Ref::with_trailing_elements_unaligned_from_suffix(bytes, count).ok() } } diff --git a/src/error.rs b/src/error.rs new file mode 100644 index 00000000000..fb13ef3f6d4 --- /dev/null +++ b/src/error.rs @@ -0,0 +1,395 @@ +//! Types related to error reporting. +//! +//! ## Single failure mode errors +//! +//! Generally speaking, zerocopy's conversions may fail for one of up to three reasons: +//! - [`AlignmentError`]: the conversion source was improperly aligned +//! - [`SizeError`]: the conversion source was of incorrect size +//! - [`ValidityError`]: the conversion source contained invalid data +//! +//! Methods that only have one failure mode, like [`Ref::new_unaligned`], return +//! that mode's corresponding error type directly. +//! +//! ## Compound errors +//! +//! Conversion methods that have either two or three possible failure modes +//! return one of these error types: +//! - [`CastError`]: the error type of ref-conversions. +//! - [`TryCastError`]: the error type of fallible ref-conversions. +//! - [`TryReadError`]: the error type of fallible read-conversions. +//! +//! These three types are aliases for the enum [`ConvertError`], whose +//! infallible branches (if any) are parameterized with [`Infallible`]. +//! +//! ## Accessing the conversion source +//! +//! All error types provide an `into_src` method that converts the error into +//! the source value underlying the failed conversion. + +use core::{convert::Infallible, fmt, marker::PhantomData, ops::Deref}; + +use crate::util::unreachable_infallible; + +#[cfg(doc)] +use crate::{FromBytes, Ref, TryFromBytes}; + +/// Zerocopy's generic error type. +/// +/// Generally speaking, zerocopy's conversions may fail for one of up to three reasons: +/// - [`AlignmentError`]: the conversion source was improperly aligned +/// - [`SizeError`]: the conversion source was of incorrect size +/// - [`ValidityError`]: the conversion source contained invalid data +/// +/// However, not all conversions produce all errors. For instance, +/// [`FromBytes::ref_from`] may fail due to alignment or size issues, but not +/// validity issues. This generic error type captures these (im)possibilities +/// via parameterization: `A` is parameterized with [`AlignmentError`], `S` is +/// parameterized with [`SizeError`], and `V` is parameterized with +/// [`Infallible`]. +/// +/// Zerocopy never uses this type directly in its API. Rather, we provide three +/// pre-parameterized aliases: +/// - [`CastError`]: the error type of ref-conversions. +/// - [`TryCastError`]: the error type of fallible ref-conversions. +/// - [`TryReadError`]: the error type of fallible read-conversions. +#[derive(PartialEq, Eq)] +pub enum ConvertError { + /// The conversion source was improperly aligned. + Alignment(A), + /// The conversion source was of incorrect size. + Size(S), + /// The conversion source contained invalid data. + Validity(V), +} + +impl fmt::Debug for ConvertError { + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::Alignment(e) => f.debug_tuple("Alignment").field(e).finish(), + Self::Size(e) => f.debug_tuple("Size").field(e).finish(), + Self::Validity(e) => f.debug_tuple("Validity").field(e).finish(), + } + } +} + +/// Produces a human-readable error message. +impl fmt::Display for ConvertError { + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::Alignment(e) => e.fmt(f), + Self::Size(e) => e.fmt(f), + Self::Validity(e) => e.fmt(f), + } + } +} + +/// The error emitted if the conversion source is improperly aligned. +#[derive(PartialEq, Eq)] +pub struct AlignmentError { + /// The source value involved in the conversion. + src: Src, + /// The inner destination type inolved in the conversion. + dst: PhantomData, +} + +impl AlignmentError { + pub(crate) fn new(src: Src) -> Self { + Self { src, dst: PhantomData } + } + + /// Produces the source underlying the failed conversion. + #[inline] + pub fn into_src(self) -> Src { + self.src + } + + pub(crate) fn with_src(self, new_src: NewSrc) -> AlignmentError { + AlignmentError { src: new_src, dst: PhantomData } + } + + pub(crate) fn map_src(self, f: impl Fn(Src) -> NewSrc) -> AlignmentError { + AlignmentError { src: f(self.src), dst: PhantomData } + } + + pub(crate) fn with_dst(self) -> AlignmentError { + AlignmentError { src: self.src, dst: PhantomData } + } + + pub(crate) fn into(self) -> ConvertError { + ConvertError::Alignment(self) + } +} + +impl fmt::Debug for AlignmentError { + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("AlignmentError").finish() + } +} + +/// Produces a human-readable error message. +// The bounds on this impl are intentionally conservative, and can be relaxed +// either once a `?Sized` alignment accessor is stabilized, or by storing the +// alignment as a runtime value. +impl fmt::Display for AlignmentError +where + Src: Deref, +{ + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str("the conversion failed because the address of the source (")?; + f.write_fmt(format_args!("{:p}", self.src.deref()))?; + f.write_str(") is not a multiple of the alignment (")?; + core::mem::align_of::().fmt(f)?; + f.write_str(") of the destination type: ")?; + f.write_str(core::any::type_name::())?; + Ok(()) + } +} + +impl From> + for ConvertError, S, V> +{ + #[inline] + fn from(err: AlignmentError) -> Self { + Self::Alignment(err) + } +} + +/// The error emitted if the conversion source is of incorrect size. +#[derive(PartialEq, Eq)] +pub struct SizeError { + /// The source value involved in the conversion. + src: Src, + /// The inner destination type inolved in the conversion. + dst: PhantomData, +} + +impl SizeError { + pub(crate) fn new(src: Src) -> Self { + Self { src, dst: PhantomData } + } + + /// Produces the source underlying the failed conversion. + #[inline] + pub fn into_src(self) -> Src { + self.src + } + + /// Sets the source value associated with the conversion error. + pub(crate) fn with_src(self, new_src: NewSrc) -> SizeError { + SizeError { src: new_src, dst: PhantomData } + } + + /// Maps the source value associated with the conversion error. + pub(crate) fn map_src(self, f: impl Fn(Src) -> NewSrc) -> SizeError { + SizeError { src: f(self.src), dst: PhantomData } + } + + /// Sets the destination type associated with the conversion error. + pub(crate) fn with_dst(self) -> SizeError { + SizeError { src: self.src, dst: PhantomData } + } + + /// Converts the error into a general [`ConvertError`]. + pub(crate) fn into(self) -> ConvertError { + ConvertError::Size(self) + } +} + +impl fmt::Debug for SizeError { + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("SizeError").finish() + } +} + +/// Produces a human-readable error message. +impl fmt::Display for SizeError +where + Src: Deref, +{ + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str("the conversion failed because the source was incorrectly sized to complete the conversion into the destination type: ")?; + f.write_str(core::any::type_name::())?; + Ok(()) + } +} + +impl From> for ConvertError, V> { + #[inline] + fn from(err: SizeError) -> Self { + Self::Size(err) + } +} + +/// The error emitted if the conversion source contains invalid data. +#[derive(PartialEq, Eq)] +pub struct ValidityError { + /// The source value involved in the conversion. + src: Src, + /// The inner destination type inolved in the conversion. + dst: PhantomData, +} + +impl ValidityError { + pub(crate) fn new(src: Src) -> Self { + Self { src, dst: PhantomData } + } + + /// Produces the source underlying the failed conversion. + #[inline] + pub fn into_src(self) -> Src { + self.src + } + + /// Sets the source value associated with the conversion error. + pub(crate) fn with_src(self, new_src: NewSrc) -> ValidityError { + ValidityError { src: new_src, dst: PhantomData } + } + + /// Maps the source value associated with the conversion error. + pub(crate) fn map_src(self, f: impl Fn(Src) -> NewSrc) -> ValidityError { + ValidityError { src: f(self.src), dst: PhantomData } + } + + /// Converts the error into a general [`ConvertError`]. + pub(crate) fn into(self) -> ConvertError { + ConvertError::Validity(self) + } +} + +impl fmt::Debug for ValidityError { + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("ValidityError").finish() + } +} + +/// Produces a human-readable error message. +impl fmt::Display for ValidityError +where + Src: Deref, +{ + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str("the conversion failed because the bytes the source is not a valid value of the destination type: ")?; + f.write_str(core::any::type_name::())?; + Ok(()) + } +} + +impl From> for ConvertError> { + #[inline] + fn from(err: ValidityError) -> Self { + Self::Validity(err) + } +} + +/// The error type of ref-conversions. +/// +/// Ref-conversions, like [`FromBytes::ref_from`] may emit +/// [alignment](AlignmentError) and [size](SizeError) errors. +pub type CastError = + ConvertError, SizeError, Infallible>; + +impl CastError { + /// Produces the source underlying the failed conversion. + #[inline] + pub fn into_src(self) -> Src { + match self { + Self::Alignment(e) => e.src, + Self::Size(e) => e.src, + Self::Validity(i) => unreachable_infallible(i), + } + } + + /// Sets the source value associated with the conversion error. + pub(crate) fn with_src(self, new_src: NewSrc) -> CastError { + match self { + Self::Alignment(e) => CastError::Alignment(e.with_src(new_src)), + Self::Size(e) => CastError::Size(e.with_src(new_src)), + Self::Validity(i) => unreachable_infallible(i), + } + } + + /// Maps the source value associated with the conversion error. + pub(crate) fn map_src(self, f: impl Fn(Src) -> NewSrc) -> CastError { + match self { + Self::Alignment(e) => CastError::Alignment(e.map_src(f)), + Self::Size(e) => CastError::Size(e.map_src(f)), + Self::Validity(i) => unreachable_infallible(i), + } + } + + /// Sets the destination type associated with the conversion error. + pub(crate) fn with_dst(self) -> CastError { + match self { + Self::Alignment(e) => CastError::Alignment(e.with_dst()), + Self::Size(e) => CastError::Size(e.with_dst()), + Self::Validity(i) => unreachable_infallible(i), + } + } + + /// Converts the error into a general [`ConvertError`]. + pub(crate) fn into(self) -> TryCastError { + match self { + Self::Alignment(e) => TryCastError::Alignment(e), + Self::Size(e) => TryCastError::Size(e), + Self::Validity(i) => unreachable_infallible(i), + } + } +} + +/// The error type of fallible ref-conversions. +/// +/// Fallible ref-conversions, like [`TryFromBytes::try_ref_from`] may emit +/// [alignment](AlignmentError), [size](SizeError), and +/// [validity](ValidityError) errors. +pub type TryCastError = + ConvertError, SizeError, ValidityError>; + +impl TryCastError { + /// Produces the source underlying the failed conversion. + #[inline] + pub fn into_src(self) -> Src { + match self { + Self::Alignment(e) => e.src, + Self::Size(e) => e.src, + Self::Validity(e) => e.src, + } + } +} + +impl From> for TryCastError { + #[inline] + fn from(value: CastError) -> Self { + match value { + CastError::Alignment(e) => Self::Alignment(e), + CastError::Size(e) => Self::Size(e), + CastError::Validity(i) => unreachable_infallible(i), + } + } +} + +/// The error type of fallible read-conversions. +/// +/// Fallible read-conversions, like [`TryFromBytes::try_read_from`] may emit +/// [size](SizeError) and [validity](ValidityError) errors, but not alignment errors. +pub type TryReadError = + ConvertError, ValidityError>; + +impl TryReadError { + /// Produces the source underlying the failed conversion. + #[inline] + pub fn into_src(self) -> Src { + match self { + Self::Alignment(i) => unreachable_infallible(i), + Self::Size(e) => e.src, + Self::Validity(e) => e.src, + } + } +} diff --git a/src/lib.rs b/src/lib.rs index 52df74c3187..c2ce063eb5f 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -280,6 +280,11 @@ mod macros; pub mod byteorder; mod deprecated; +// Thid module is `pub` so that zerocopy's error types and error handling +// documentation is grouped together in a cohesive module. In practice, we +// expect most users to use the re-export of `error`'s items to avoid identifier +// stuttering. +pub mod error; #[doc(hidden)] pub mod macro_util; #[doc(hidden)] @@ -289,6 +294,7 @@ mod util; mod wrappers; pub use crate::byteorder::*; +pub use crate::error::*; pub use crate::wrappers::*; use core::{ @@ -317,6 +323,7 @@ use crate::pointer::invariant; extern crate alloc; #[cfg(any(feature = "alloc", test))] use alloc::{boxed::Box, vec::Vec}; +use util::unreachable_infallible; #[cfg(any(feature = "alloc", test, kani))] use core::alloc::Layout; @@ -456,6 +463,12 @@ pub enum CastType { Suffix, } +#[cfg_attr(test, derive(Debug))] +enum MetadataCastError { + Alignment, + Size, +} + impl DstLayout { /// The minimum possible alignment of a type. const MIN_ALIGN: NonZeroUsize = match NonZeroUsize::new(1) { @@ -820,7 +833,7 @@ impl DstLayout { addr: usize, bytes_len: usize, cast_type: CastType, - ) -> Option<(usize, usize)> { + ) -> Result<(usize, usize), MetadataCastError> { // `debug_assert!`, but with `#[allow(clippy::arithmetic_side_effects)]`. macro_rules! __const_debug_assert { ($e:expr $(, $msg:expr)?) => { @@ -878,14 +891,14 @@ impl DstLayout { // by 0 because `align` is non-zero. #[allow(clippy::arithmetic_side_effects)] if (addr + offset) % self.align.get() != 0 { - return None; + return Err(MetadataCastError::Alignment); } } let (elems, self_bytes) = match size_info { SizeInfo::Sized { size } => { if size > bytes_len { - return None; + return Err(MetadataCastError::Size); } (0, size) } @@ -904,7 +917,7 @@ impl DstLayout { let max_slice_and_padding_bytes = match max_total_bytes.checked_sub(offset) { Some(max) => max, // `bytes_len` too small even for 0 trailing slice elements. - None => return None, + None => return Err(MetadataCastError::Size), }; // Calculate the number of elements that fit in @@ -959,7 +972,7 @@ impl DstLayout { CastType::Suffix => bytes_len - self_bytes, }; - Some((elems, split_at)) + Ok((elems, split_at)) } } @@ -1611,11 +1624,13 @@ pub unsafe trait TryFromBytes { /// these cases are handled. #[must_use = "has no side effects"] #[inline] - fn try_ref_from(bytes: &[u8]) -> Option<&Self> + fn try_ref_from(bytes: &[u8]) -> Result<&Self, TryCastError<&[u8], Self>> where Self: KnownLayout + Immutable, { - let candidate = Ptr::from_ref(bytes).try_cast_into_no_leftover::()?; + let candidate = Ptr::from_ref(bytes) + .try_cast_into_no_leftover::() + .map_err(|e| e.map_src(|src| src.as_ref()).into())?; // This call may panic. If that happens, it doesn't cause any soundness // issues, as we have not generated any invalid state which we need to @@ -1625,7 +1640,7 @@ pub unsafe trait TryFromBytes { // calling `try_into_valid` (and thus `is_bit_valid`) with a shared // pointer when `Self: !Immutable`. Since `Self: Immutable`, this panic // condition will not happen. - let candidate = candidate.try_into_valid(); + let candidate = candidate.try_into_valid().map_err(|e| e.with_src(bytes).into()); candidate.map(MaybeAligned::as_ref) } @@ -1642,23 +1657,39 @@ pub unsafe trait TryFromBytes { /// these cases are handled. #[must_use = "has no side effects"] #[inline] - fn try_mut_from(bytes: &mut [u8]) -> Option<&mut Self> + fn try_mut_from(bytes: &mut [u8]) -> Result<&mut Self, TryCastError<&mut [u8], Self>> where Self: KnownLayout + Immutable, // TODO(#251): Remove the `Immutable` bound. { - let candidate = Ptr::from_mut(bytes).try_cast_into_no_leftover::()?; - - // This call may panic. If that happens, it doesn't cause any soundness - // issues, as we have not generated any invalid state which we need to - // fix before returning. - // - // Note that one panic or post-monomorphization error condition is - // calling `try_into_valid` (and thus `is_bit_valid`) with a shared - // pointer when `Self: !Immutable`. Since `Self: Immutable`, this panic - // condition will not happen. - let candidate = candidate.try_into_valid(); - - candidate.map(Ptr::as_mut) + let size = bytes.len(); + match Ptr::from_mut(bytes).try_cast_into_no_leftover::() { + Ok(candidate) => { + // This call may panic. If that happens, it doesn't cause any soundness + // issues, as we have not generated any invalid state which we need to + // fix before returning. + // + // Note that one panic or post-monomorphization error condition is + // calling `try_into_valid` (and thus `is_bit_valid`) with a shared + // pointer when `Self: !Immutable`. Since `Self: Immutable`, this panic + // condition will not happen. + match candidate.try_into_valid() { + Ok(candidate) => Ok(candidate.as_mut()), + Err(e) => Err(e + .map_src(|src| { + // SAFETY: The provided `size`, below, is computed from + // `bytes.len()`. By contract on `try_into_valid` we can + // count on `src` being equal to `candidate`. Since + // `candidate` was derived from the entire extent of + // `bytes`, the size of `bytes` is exactly equal to the + // size of `src`. + let bytes = unsafe { src.as_bytes_with_size(size) }; + bytes.as_mut() + }) + .into()), + } + } + Err(e) => Err(e.map_src(Ptr::as_mut).into()), + } } /// Attempts to read a `Self` from a byte slice. @@ -1672,7 +1703,7 @@ pub unsafe trait TryFromBytes { /// these cases are handled. #[must_use = "has no side effects"] #[inline] - fn try_read_from(bytes: &[u8]) -> Option + fn try_read_from(bytes: &[u8]) -> Result> where Self: Sized, { @@ -1681,7 +1712,12 @@ pub unsafe trait TryFromBytes { // mut` and `Ptr::from_mut` here. See the doc comment on `is_bit_valid` // and the implementation of `TryFromBytes` for `UnsafeCell` for more // details. - let mut candidate = MaybeUninit::::read_from(bytes)?; + let mut candidate = match MaybeUninit::::read_from(bytes) { + Ok(candidate) => candidate, + Err(e) => { + return Err(TryReadError::Size(e.with_dst())); + } + }; let c_ptr = Ptr::from_mut(&mut candidate); let c_ptr = c_ptr.transparent_wrapper_into_inner(); // SAFETY: `c_ptr` has no uninitialized sub-ranges because it derived @@ -1697,11 +1733,11 @@ pub unsafe trait TryFromBytes { // pointer when `Self: !Immutable`. Since `Self: Immutable`, this panic // condition will not happen. if !Self::is_bit_valid(c_ptr.forget_aligned()) { - return None; + return Err(ValidityError::new(bytes).into()); } // SAFETY: We just validated that `candidate` contains a valid `Self`. - Some(unsafe { candidate.assume_init() }) + Ok(unsafe { candidate.assume_init() }) } } @@ -2257,7 +2293,7 @@ pub unsafe trait FromBytes: FromZeros { /// ``` #[must_use = "has no side effects"] #[inline] - fn ref_from(bytes: &[u8]) -> Option<&Self> + fn ref_from(bytes: &[u8]) -> Result<&Self, CastError<&[u8], Self>> where Self: KnownLayout + Immutable, { @@ -2307,7 +2343,7 @@ pub unsafe trait FromBytes: FromZeros { /// ``` #[must_use = "has no side effects"] #[inline] - fn ref_from_prefix(bytes: &[u8]) -> Option<(&Self, &[u8])> + fn ref_from_prefix(bytes: &[u8]) -> Result<(&Self, &[u8]), CastError<&[u8], Self>> where Self: KnownLayout + Immutable, { @@ -2344,7 +2380,7 @@ pub unsafe trait FromBytes: FromZeros { /// ``` #[must_use = "has no side effects"] #[inline] - fn ref_from_suffix(bytes: &[u8]) -> Option<(&[u8], &Self)> + fn ref_from_suffix(bytes: &[u8]) -> Result<(&[u8], &Self), CastError<&[u8], Self>> where Self: Immutable + KnownLayout, { @@ -2388,7 +2424,7 @@ pub unsafe trait FromBytes: FromZeros { /// ``` #[must_use = "has no side effects"] #[inline] - fn mut_from(bytes: &mut [u8]) -> Option<&mut Self> + fn mut_from(bytes: &mut [u8]) -> Result<&mut Self, CastError<&mut [u8], Self>> where Self: IntoBytes + KnownLayout + Immutable, { @@ -2436,7 +2472,9 @@ pub unsafe trait FromBytes: FromZeros { /// ``` #[must_use = "has no side effects"] #[inline] - fn mut_from_prefix(bytes: &mut [u8]) -> Option<(&mut Self, &mut [u8])> + fn mut_from_prefix( + bytes: &mut [u8], + ) -> Result<(&mut Self, &mut [u8]), CastError<&mut [u8], Self>> where Self: IntoBytes + KnownLayout + Immutable, { @@ -2479,7 +2517,9 @@ pub unsafe trait FromBytes: FromZeros { /// ``` #[must_use = "has no side effects"] #[inline] - fn mut_from_suffix(bytes: &mut [u8]) -> Option<(&mut [u8], &mut Self)> + fn mut_from_suffix( + bytes: &mut [u8], + ) -> Result<(&mut [u8], &mut Self), CastError<&mut [u8], Self>> where Self: IntoBytes + KnownLayout + Immutable, { @@ -2531,7 +2571,10 @@ pub unsafe trait FromBytes: FromZeros { /// ``` #[must_use = "has no side effects"] #[inline] - fn from_prefix_with_trailing_elements(bytes: &[u8], count: usize) -> Option<(&Self, &[u8])> + fn from_prefix_with_trailing_elements( + bytes: &[u8], + count: usize, + ) -> Result<(&Self, &[u8]), CastError<&[u8], Self>> where Self: KnownLayout + Immutable, { @@ -2550,7 +2593,7 @@ pub unsafe trait FromBytes: FromZeros { where Self: Sized + Immutable, { - <[Self]>::from_prefix_with_trailing_elements(bytes, count) + <[Self]>::from_prefix_with_trailing_elements(bytes, count).ok() } /// Interprets the suffix of the given `bytes` as a `&[Self]` with length @@ -2597,7 +2640,10 @@ pub unsafe trait FromBytes: FromZeros { /// ``` #[must_use = "has no side effects"] #[inline] - fn from_suffix_with_trailing_elements(bytes: &[u8], count: usize) -> Option<(&[u8], &Self)> + fn from_suffix_with_trailing_elements( + bytes: &[u8], + count: usize, + ) -> Result<(&[u8], &Self), CastError<&[u8], Self>> where Self: KnownLayout + Immutable, { @@ -2616,7 +2662,7 @@ pub unsafe trait FromBytes: FromZeros { where Self: Sized + Immutable, { - <[Self]>::from_suffix_with_trailing_elements(bytes, count) + <[Self]>::from_suffix_with_trailing_elements(bytes, count).ok() } #[deprecated(since = "0.8.0", note = "`FromBytes::mut_from` now supports slices")] @@ -2627,7 +2673,7 @@ pub unsafe trait FromBytes: FromZeros { where Self: Sized + IntoBytes + Immutable, { - <[Self]>::mut_from(bytes) + <[Self]>::mut_from(bytes).ok() } /// Interprets the prefix of the given `bytes` as a `&mut [Self]` with @@ -2682,7 +2728,7 @@ pub unsafe trait FromBytes: FromZeros { fn mut_from_prefix_with_trailing_elements( bytes: &mut [u8], count: usize, - ) -> Option<(&mut Self, &mut [u8])> + ) -> Result<(&mut Self, &mut [u8]), CastError<&mut [u8], Self>> where Self: IntoBytes + KnownLayout + Immutable, { @@ -2701,7 +2747,7 @@ pub unsafe trait FromBytes: FromZeros { where Self: Sized + Immutable, { - <[Self]>::from_prefix_with_trailing_elements(bytes, count) + <[Self]>::from_prefix_with_trailing_elements(bytes, count).ok() } /// Interprets the suffix of the given `bytes` as a `&mut [Self]` with length @@ -2756,7 +2802,7 @@ pub unsafe trait FromBytes: FromZeros { fn mut_from_suffix_with_trailing_elements( bytes: &mut [u8], count: usize, - ) -> Option<(&mut [u8], &mut Self)> + ) -> Result<(&mut [u8], &mut Self), CastError<&mut [u8], Self>> where Self: IntoBytes + KnownLayout + Immutable, { @@ -2774,7 +2820,7 @@ pub unsafe trait FromBytes: FromZeros { where Self: Sized + IntoBytes + Immutable, { - <[Self]>::mut_from_suffix_with_trailing_elements(bytes, count) + <[Self]>::mut_from_suffix_with_trailing_elements(bytes, count).ok() } /// Reads a copy of `Self` from `bytes`. @@ -2808,11 +2854,16 @@ pub unsafe trait FromBytes: FromZeros { /// ``` #[must_use = "has no side effects"] #[inline] - fn read_from(bytes: &[u8]) -> Option + fn read_from(bytes: &[u8]) -> Result> where Self: Sized, { - Ref::<_, Unalign>::new_sized(bytes).map(|r| r.read().into_inner()) + match Ref::<_, Unalign>::new_sized(bytes) { + Ok(r) => Ok(r.read().into_inner()), + Err(CastError::Size(e)) => Err(e.with_dst()), + Err(CastError::Alignment(_)) => unreachable!(), + Err(CastError::Validity(i)) => unreachable_infallible(i), + } } /// Reads a copy of `Self` from the prefix of `bytes`. @@ -2848,11 +2899,16 @@ pub unsafe trait FromBytes: FromZeros { /// ``` #[must_use = "has no side effects"] #[inline] - fn read_from_prefix(bytes: &[u8]) -> Option + fn read_from_prefix(bytes: &[u8]) -> Result> where Self: Sized, { - Ref::<_, Unalign>::new_sized_from_prefix(bytes).map(|(r, _)| r.read().into_inner()) + match Ref::<_, Unalign>::new_sized_from_prefix(bytes) { + Ok((r, _)) => Ok(r.read().into_inner()), + Err(CastError::Size(e)) => Err(e.with_dst()), + Err(CastError::Alignment(_)) => unreachable!(), + Err(CastError::Validity(i)) => unreachable_infallible(i), + } } /// Reads a copy of `Self` from the suffix of `bytes`. @@ -2882,11 +2938,16 @@ pub unsafe trait FromBytes: FromZeros { /// ``` #[must_use = "has no side effects"] #[inline] - fn read_from_suffix(bytes: &[u8]) -> Option + fn read_from_suffix(bytes: &[u8]) -> Result> where Self: Sized, { - Ref::<_, Unalign>::new_sized_from_suffix(bytes).map(|(_, r)| r.read().into_inner()) + match Ref::<_, Unalign>::new_sized_from_suffix(bytes) { + Ok((_, r)) => Ok(r.read().into_inner()), + Err(CastError::Size(e)) => Err(CastError::Size(e.with_dst())), + Err(CastError::Alignment(_)) => unreachable!(), + Err(CastError::Validity(i)) => unreachable_infallible(i), + } } #[deprecated(since = "0.8.0", note = "`FromBytes::ref_from` now supports slices")] @@ -2897,7 +2958,7 @@ pub unsafe trait FromBytes: FromZeros { where Self: Sized + Immutable, { - <[Self]>::ref_from(bytes) + <[Self]>::ref_from(bytes).ok() } } @@ -3268,16 +3329,15 @@ pub unsafe trait IntoBytes { /// ``` #[must_use = "callers should check the return value to see if the operation succeeded"] #[inline] - fn write_to(&self, bytes: &mut [u8]) -> Option<()> + fn write_to(&self, bytes: &mut [u8]) -> Result<(), SizeError<&Self, &mut [u8]>> where Self: Immutable, { if bytes.len() != mem::size_of_val(self) { - return None; + return Err(SizeError::new(self)); } - bytes.copy_from_slice(self.as_bytes()); - Some(()) + Ok(()) } /// Writes a copy of `self` to the prefix of `bytes`. @@ -3329,13 +3389,18 @@ pub unsafe trait IntoBytes { /// ``` #[must_use = "callers should check the return value to see if the operation succeeded"] #[inline] - fn write_to_prefix(&self, bytes: &mut [u8]) -> Option<()> + fn write_to_prefix(&self, bytes: &mut [u8]) -> Result<(), SizeError<&Self, &mut [u8]>> where Self: Immutable, { let size = mem::size_of_val(self); - bytes.get_mut(..size)?.copy_from_slice(self.as_bytes()); - Some(()) + match bytes.get_mut(..size) { + Some(bytes) => { + bytes.copy_from_slice(self.as_bytes()); + Ok(()) + } + None => Err(SizeError::new(self)), + } } /// Writes a copy of `self` to the suffix of `bytes`. @@ -3394,16 +3459,26 @@ pub unsafe trait IntoBytes { /// ``` #[must_use = "callers should check the return value to see if the operation succeeded"] #[inline] - fn write_to_suffix(&self, bytes: &mut [u8]) -> Option<()> + fn write_to_suffix(&self, bytes: &mut [u8]) -> Result<(), SizeError<&Self, &mut [u8]>> where Self: Immutable, { - let start = bytes.len().checked_sub(mem::size_of_val(self))?; - // get_mut() should never return None here. We use ? rather than - // .unwrap() because in the event the branch is not optimized away, - // returning None is generally lighter-weight than panicking. - bytes.get_mut(start..)?.copy_from_slice(self.as_bytes()); - Some(()) + let start = if let Some(start) = bytes.len().checked_sub(mem::size_of_val(self)) { + start + } else { + return Err(SizeError::new(self)); + }; + let bytes = if let Some(bytes) = bytes.get_mut(start..) { + bytes + } else { + // get_mut() should never return None here. We return a `SizeError` + // rather than .unwrap() because in the event the branch is not + // optimized away, returning a value is generally lighter-weight + // than panicking. + return Err(SizeError::new(self)); + }; + bytes.copy_from_slice(self.as_bytes()); + Ok(()) } #[deprecated(since = "0.8.0", note = "`IntoBytes::as_bytes_mut` was renamed to `as_mut_bytes`")] @@ -4924,7 +4999,7 @@ macro_rules! include_value { /// } /// /// impl UdpPacket { -/// pub fn set_src_port(&mut self, src_port: [u8; 2]) { +/// pub fn with_src_port(&mut self, src_port: [u8; 2]) { /// self.header.src_port = src_port; /// } /// } @@ -4958,10 +5033,12 @@ where T: KnownLayout + Immutable + ?Sized, { #[must_use = "has no side effects"] - fn bikeshed_new_known_layout(bytes: B) -> Option> { - let _ = Ptr::from_ref(bytes.deref()).try_cast_into_no_leftover::()?; + fn bikeshed_new_known_layout(bytes: B) -> Result, CastError> { + if let Err(e) = Ptr::from_ref(bytes.deref()).try_cast_into_no_leftover::() { + return Err(e.with_src(()).with_src(bytes).with_dst()); + }; // INVARIANTS: `try_cast_into_no_leftover` validates size and alignment. - Some(Ref(bytes, PhantomData)) + Ok(Ref(bytes, PhantomData)) } } @@ -4971,27 +5048,39 @@ where T: KnownLayout + Immutable + ?Sized, { #[must_use = "has no side effects"] - fn bikeshed_new_from_prefix_known_layout(bytes: B) -> Option<(Ref, B)> { - let (_, split_at) = Ptr::from_ref(bytes.deref()).try_cast_into::(CastType::Prefix)?; - let (bytes, suffix) = try_split_at(bytes, split_at)?; + fn bikeshed_new_from_prefix_known_layout(bytes: B) -> Result<(Ref, B), CastError> { + let split_at = match Ptr::from_ref(bytes.deref()).try_cast_into::(CastType::Prefix) { + Ok((_, split_at)) => split_at, + Err(e) => { + return Err(e.with_src(()).with_src(bytes)); + } + }; + let (bytes, suffix) = + try_split_at(bytes, split_at).map_err(|b| SizeError::new(b).into())?; // INVARIANTS: `try_cast_into` validates size and alignment, and returns // a `split_at` that indicates how many bytes of `bytes` correspond to a // valid `T`. By safety postcondition on `SplitByteSlice::try_split_at` // we can rely `try_split_at` to produce the correct `bytes` and // `suffix`. - Some((Ref(bytes, PhantomData), suffix)) + Ok((Ref(bytes, PhantomData), suffix)) } #[must_use = "has no side effects"] - fn bikeshed_new_from_suffix_known_layout(bytes: B) -> Option<(B, Ref)> { - let (_, split_at) = Ptr::from_ref(bytes.deref()).try_cast_into::(CastType::Suffix)?; - let (prefix, bytes) = try_split_at(bytes, split_at)?; + fn bikeshed_new_from_suffix_known_layout(bytes: B) -> Result<(B, Ref), CastError> { + let split_at = match Ptr::from_ref(bytes.deref()).try_cast_into::(CastType::Suffix) { + Ok((_, split_at)) => split_at, + Err(e) => { + return Err(e.with_src(()).with_src(bytes)); + } + }; + let (prefix, bytes) = + try_split_at(bytes, split_at).map_err(|b| SizeError::new(b).into())?; // INVARIANTS: `try_cast_into` validates size and alignment, and returns // a `split_at` that indicates how many bytes of `bytes` correspond to a // valid `T`. By safety postcondition on `SplitByteSlice::try_split_at` // we can rely on `try_split_at` to produce the correct `prefix` and // `bytes`. - Some((prefix, Ref(bytes, PhantomData))) + Ok((prefix, Ref(bytes, PhantomData))) } } @@ -5000,12 +5089,15 @@ where B: ByteSlice, { #[must_use = "has no side effects"] - fn new_sized(bytes: B) -> Option> { - if bytes.len() != mem::size_of::() || !util::aligned_to::<_, T>(bytes.deref()) { - return None; + fn new_sized(bytes: B) -> Result, CastError> { + if bytes.len() != mem::size_of::() { + return Err(SizeError::new(bytes).into()); + } + if !util::aligned_to::<_, T>(bytes.deref()) { + return Err(AlignmentError::new(bytes).into()); } // INVARIANTS: We just validated size and alignment. - Some(Ref(bytes, PhantomData)) + Ok(Ref(bytes, PhantomData)) } } @@ -5014,26 +5106,35 @@ where B: SplitByteSlice, { #[must_use = "has no side effects"] - fn new_sized_from_prefix(bytes: B) -> Option<(Ref, B)> { - if bytes.len() < mem::size_of::() || !util::aligned_to::<_, T>(bytes.deref()) { - return None; + fn new_sized_from_prefix(bytes: B) -> Result<(Ref, B), CastError> { + if bytes.len() < mem::size_of::() { + return Err(SizeError::new(bytes).into()); } - let (bytes, suffix) = try_split_at(bytes, mem::size_of::())?; + if !util::aligned_to::<_, T>(bytes.deref()) { + return Err(AlignmentError::new(bytes).into()); + } + let (bytes, suffix) = + try_split_at(bytes, mem::size_of::()).map_err(|b| SizeError::new(b).into())?; // INVARIANTS: We just validated alignment and that `bytes` is at least // as large as `T`. `try_split_at(bytes, mem::size_of::())?` ensures // that the new `bytes` is exactly the size of `T`. By safety // postcondition on `SplitByteSlice::try_split_at` we can rely on // `try_split_at` to produce the correct `bytes` and `suffix`. - Some((Ref(bytes, PhantomData), suffix)) + Ok((Ref(bytes, PhantomData), suffix)) } #[must_use = "has no side effects"] - fn new_sized_from_suffix(bytes: B) -> Option<(B, Ref)> { + fn new_sized_from_suffix(bytes: B) -> Result<(B, Ref), CastError> { let bytes_len = bytes.len(); - let split_at = bytes_len.checked_sub(mem::size_of::())?; - let (prefix, bytes) = try_split_at(bytes, split_at)?; + let split_at = if let Some(split_at) = bytes_len.checked_sub(mem::size_of::()) { + split_at + } else { + return Err(SizeError::new(bytes).into()); + }; + let (prefix, bytes) = + try_split_at(bytes, split_at).map_err(|b| SizeError::new(b).into())?; if !util::aligned_to::<_, T>(bytes.deref()) { - return None; + return Err(AlignmentError::new(bytes).into()); } // INVARIANTS: Since `split_at` is defined as `bytes_len - // size_of::()`, the `bytes` which results from `let (prefix, bytes) @@ -5041,7 +5142,7 @@ where // constructing `bytes`, we validate that it has the proper alignment. // By safety postcondition on `SplitByteSlice::try_split_at` we can rely // on `try_split_at` to produce the correct `prefix` and `bytes`. - Some((prefix, Ref(bytes, PhantomData))) + Ok((prefix, Ref(bytes, PhantomData))) } } @@ -5057,10 +5158,12 @@ where /// these checks fail, it returns `None`. #[must_use = "has no side effects"] #[inline] - pub fn new(bytes: B) -> Option> { - let _ = Ptr::from_ref(bytes.deref()).try_cast_into_no_leftover::()?; + pub fn new(bytes: B) -> Result, CastError> { + if let Err(e) = Ptr::from_ref(bytes.deref()).try_cast_into_no_leftover::() { + return Err(e.with_src(()).with_src(bytes)); + } // INVARIANTS: `try_cast_into_no_leftover` validates size and alignment. - Some(Ref(bytes, PhantomData)) + Ok(Ref(bytes, PhantomData)) } } @@ -5078,15 +5181,22 @@ where /// checks fail, it returns `None`. #[must_use = "has no side effects"] #[inline] - pub fn new_from_prefix(bytes: B) -> Option<(Ref, B)> { - let (_, split_at) = Ptr::from_ref(bytes.deref()).try_cast_into::(CastType::Prefix)?; - let (bytes, suffix) = try_split_at(bytes, split_at)?; + pub fn new_from_prefix(bytes: B) -> Result<(Ref, B), CastError> { + let split_at = match Ptr::from_ref(bytes.deref()).try_cast_into::(CastType::Prefix) { + Ok((_, split_at)) => split_at, + Err(e) => { + return Err(e.with_src(()).with_src(bytes)); + } + }; + + let (bytes, suffix) = + try_split_at(bytes, split_at).map_err(|b| SizeError::new(b).into())?; // INVARIANTS: `try_cast_into` validates size and alignment, and returns // a `split_at` that indicates how many bytes of `bytes` correspond to a // valid `T`. By safety postcondition on `SplitByteSlice::try_split_at` // we can rely on `try_split_at` to produce the correct `bytes` and // `suffix`. - Some((Ref(bytes, PhantomData), suffix)) + Ok((Ref(bytes, PhantomData), suffix)) } /// Constructs a new `Ref` from the suffix of a byte slice. @@ -5099,15 +5209,23 @@ where /// `None`. #[must_use = "has no side effects"] #[inline] - pub fn new_from_suffix(bytes: B) -> Option<(B, Ref)> { - let (_, split_at) = Ptr::from_ref(bytes.deref()).try_cast_into::(CastType::Suffix)?; - let (prefix, bytes) = try_split_at(bytes, split_at)?; + pub fn new_from_suffix(bytes: B) -> Result<(B, Ref), CastError> { + let split_at = match Ptr::from_ref(bytes.deref()).try_cast_into::(CastType::Suffix) { + Ok((_, split_at)) => split_at, + Err(e) => { + let e = e.with_src(()); + return Err(e.with_src(bytes)); + } + }; + + let (prefix, bytes) = + try_split_at(bytes, split_at).map_err(|b| SizeError::new(b).into())?; // INVARIANTS: `try_cast_into` validates size and alignment, and returns // a `try_split_at` that indicates how many bytes of `bytes` correspond // to a valid `T`. By safety postcondition on // `SplitByteSlice::try_split_at` we can rely on `try_split_at` to // produce the correct `prefix` and `bytes`. - Some((prefix, Ref(bytes, PhantomData))) + Ok((prefix, Ref(bytes, PhantomData))) } } @@ -5120,13 +5238,16 @@ where // update references to this name in `#[deprecated]` attributes elsewhere. #[doc(hidden)] #[inline] - pub fn with_trailing_elements_from_prefix(bytes: B, count: usize) -> Option<(Ref, B)> { + pub fn with_trailing_elements_from_prefix( + bytes: B, + count: usize, + ) -> Result<(Ref, B), CastError> { let expected_len = match count.size_for_metadata(T::LAYOUT) { Some(len) => len, - None => return None, + None => return Err(SizeError::new(bytes).into()), }; if bytes.len() < expected_len { - return None; + return Err(SizeError::new(bytes).into()); } let (prefix, bytes) = bytes.split_at(expected_len); Self::new(prefix).map(move |l| (l, bytes)) @@ -5142,12 +5263,19 @@ where // update references to this name in `#[deprecated]` attributes elsewhere. #[doc(hidden)] #[inline] - pub fn with_trailing_elements_from_suffix(bytes: B, count: usize) -> Option<(B, Ref)> { + pub fn with_trailing_elements_from_suffix( + bytes: B, + count: usize, + ) -> Result<(B, Ref), CastError> { let expected_len = match count.size_for_metadata(T::LAYOUT) { Some(len) => len, - None => return None, + None => return Err(SizeError::new(bytes).into()), + }; + let split_at = if let Some(split_at) = bytes.len().checked_sub(expected_len) { + split_at + } else { + return Err(SizeError::new(bytes).into()); }; - let split_at = bytes.len().checked_sub(expected_len)?; let (bytes, suffix) = bytes.split_at(split_at); Self::new(suffix).map(move |l| (bytes, l)) } @@ -5164,8 +5292,13 @@ where /// constructs a new `Ref`. If the check fails, it returns `None`. #[must_use = "has no side effects"] #[inline(always)] - pub fn new_unaligned(bytes: B) -> Option> { - Ref::new(bytes) + pub fn new_unaligned(bytes: B) -> Result, SizeError> { + match Ref::new(bytes) { + Ok(dst) => Ok(dst), + Err(CastError::Size(e)) => Err(e), + Err(CastError::Alignment(_)) => unreachable!(), + Err(CastError::Validity(i)) => unreachable_infallible(i), + } } } @@ -5183,8 +5316,12 @@ where /// caller. If the length check fails, it returns `None`. #[must_use = "has no side effects"] #[inline(always)] - pub fn new_unaligned_from_prefix(bytes: B) -> Option<(Ref, B)> { - Ref::new_from_prefix(bytes) + pub fn new_unaligned_from_prefix(bytes: B) -> Result<(Ref, B), SizeError> { + Ref::new_from_prefix(bytes).map_err(|e| match e { + CastError::Size(e) => e, + CastError::Alignment(_) => unreachable!(), + CastError::Validity(i) => unreachable_infallible(i), + }) } /// Constructs a new `Ref` from the suffix of a byte slice for a type with @@ -5196,8 +5333,12 @@ where /// caller. If the length check fails, it returns `None`. #[must_use = "has no side effects"] #[inline(always)] - pub fn new_unaligned_from_suffix(bytes: B) -> Option<(B, Ref)> { - Ref::new_from_suffix(bytes) + pub fn new_unaligned_from_suffix(bytes: B) -> Result<(B, Ref), SizeError> { + Ref::new_from_suffix(bytes).map_err(|e| match e { + CastError::Size(e) => e, + CastError::Alignment(_) => unreachable!(), + CastError::Validity(i) => unreachable_infallible(i), + }) } } @@ -5213,7 +5354,7 @@ where pub fn with_trailing_elements_unaligned_from_prefix( bytes: B, count: usize, - ) -> Option<(Ref, B)> { + ) -> Result<(Ref, B), CastError> { Self::with_trailing_elements_from_prefix(bytes, count) } } @@ -5230,7 +5371,7 @@ where pub fn with_trailing_elements_unaligned_from_suffix( bytes: B, count: usize, - ) -> Option<(B, Ref)> { + ) -> Result<(B, Ref), CastError> { Self::with_trailing_elements_from_suffix(bytes, count) } } @@ -5496,7 +5637,7 @@ pub unsafe trait SplitByteSlice: ByteSlice { #[must_use] #[inline] fn split_at(self, mid: usize) -> (Self, Self) { - if let Some(splits) = try_split_at(self, mid) { + if let Ok(splits) = try_split_at(self, mid) { splits } else { panic!("mid > len") @@ -5516,16 +5657,15 @@ pub unsafe trait SplitByteSlice: ByteSlice { /// Attempts to split the slice at the midpoint. /// -/// `x.try_split_at(mid)` returns `Some((x[..mid], x[mid..]))` if `mid <= -/// x.deref().len()` and otherwise returns `None`. +/// `x.try_split_at(mid)` returns `Ok((x[..mid], x[mid..]))` if `mid <= +/// x.deref().len()` and otherwise returns `Err(x)`. /// /// # Safety /// /// Unsafe code may rely on this function correctly implementing the above /// functionality. -#[must_use] #[inline] -fn try_split_at(slice: S, mid: usize) -> Option<(S, S)> +fn try_split_at(slice: S, mid: usize) -> Result<(S, S), S> where S: SplitByteSlice, { @@ -5536,9 +5676,9 @@ where // dereference to a byte slice of the same address and length. Thus, we // can be sure that the above precondition remains satisfied through the // call to `split_at_unchecked`. - unsafe { Some(slice.split_at_unchecked(mid)) } + unsafe { Ok(slice.split_at_unchecked(mid)) } } else { - None + Err(slice) } } @@ -6273,16 +6413,22 @@ mod tests { const ODDS: [usize; 8] = [1, 3, 5, 7, 9, 11, 13, 15]; // base_size is too big for the memory region. - test!(layout(((1..8) | ((1..8), (1..8))), _).validate(_, [0], _), Ok(None)); - test!(layout(((2..8) | ((2..8), (2..8))), _).validate(_, [1], _), Ok(None)); + test!( + layout(((1..8) | ((1..8), (1..8))), _).validate(_, [0], _), + Ok(Err(MetadataCastError::Size)) + ); + test!( + layout(((2..8) | ((2..8), (2..8))), _).validate(_, [1], _), + Ok(Err(MetadataCastError::Size)) + ); // addr is unaligned for prefix cast - test!(layout(_, [2]).validate(ODDS, _, Prefix), Ok(None)); - test!(layout(_, [2]).validate(ODDS, _, Prefix), Ok(None)); + test!(layout(_, [2]).validate(ODDS, _, Prefix), Ok(Err(MetadataCastError::Alignment))); + test!(layout(_, [2]).validate(ODDS, _, Prefix), Ok(Err(MetadataCastError::Alignment))); // addr is aligned, but end of buffer is unaligned for suffix cast - test!(layout(_, [2]).validate(EVENS, ODDS, Suffix), Ok(None)); - test!(layout(_, [2]).validate(EVENS, ODDS, Suffix), Ok(None)); + test!(layout(_, [2]).validate(EVENS, ODDS, Suffix), Ok(Err(MetadataCastError::Alignment))); + test!(layout(_, [2]).validate(EVENS, ODDS, Suffix), Ok(Err(MetadataCastError::Alignment))); // Unfortunately, these constants cannot easily be used in the // implementation of `validate_cast_and_convert_metadata`, since @@ -6320,7 +6466,7 @@ mod tests { fn validate_behavior( (layout, addr, bytes_len, cast_type): (DstLayout, usize, usize, CastType), ) { - if let Some((elems, split_at)) = + if let Ok((elems, split_at)) = layout.validate_cast_and_convert_metadata(addr, bytes_len, cast_type) { let (size_info, align) = (layout.size_info, layout.align); @@ -7229,54 +7375,57 @@ mod tests { // Test `FromBytes::{read_from, read_from_prefix, read_from_suffix}`. - assert_eq!(u64::read_from(&VAL_BYTES[..]), Some(VAL)); + assert_eq!(u64::read_from(&VAL_BYTES[..]), Ok(VAL)); // The first 8 bytes are from `VAL_BYTES` and the second 8 bytes are all // zeros. let bytes_with_prefix: [u8; 16] = transmute!([VAL_BYTES, [0; 8]]); - assert_eq!(u64::read_from_prefix(&bytes_with_prefix[..]), Some(VAL)); - assert_eq!(u64::read_from_suffix(&bytes_with_prefix[..]), Some(0)); + assert_eq!(u64::read_from_prefix(&bytes_with_prefix[..]), Ok(VAL)); + assert_eq!(u64::read_from_suffix(&bytes_with_prefix[..]), Ok(0)); // The first 8 bytes are all zeros and the second 8 bytes are from // `VAL_BYTES` let bytes_with_suffix: [u8; 16] = transmute!([[0; 8], VAL_BYTES]); - assert_eq!(u64::read_from_prefix(&bytes_with_suffix[..]), Some(0)); - assert_eq!(u64::read_from_suffix(&bytes_with_suffix[..]), Some(VAL)); + assert_eq!(u64::read_from_prefix(&bytes_with_suffix[..]), Ok(0)); + assert_eq!(u64::read_from_suffix(&bytes_with_suffix[..]), Ok(VAL)); // Test `IntoBytes::{write_to, write_to_prefix, write_to_suffix}`. let mut bytes = [0u8; 8]; - assert_eq!(VAL.write_to(&mut bytes[..]), Some(())); + assert_eq!(VAL.write_to(&mut bytes[..]), Ok(())); assert_eq!(bytes, VAL_BYTES); let mut bytes = [0u8; 16]; - assert_eq!(VAL.write_to_prefix(&mut bytes[..]), Some(())); + assert_eq!(VAL.write_to_prefix(&mut bytes[..]), Ok(())); let want: [u8; 16] = transmute!([VAL_BYTES, [0; 8]]); assert_eq!(bytes, want); let mut bytes = [0u8; 16]; - assert_eq!(VAL.write_to_suffix(&mut bytes[..]), Some(())); + assert_eq!(VAL.write_to_suffix(&mut bytes[..]), Ok(())); let want: [u8; 16] = transmute!([[0; 8], VAL_BYTES]); assert_eq!(bytes, want); } #[test] fn test_try_from_bytes_try_read_from() { - assert_eq!(::try_read_from(&[0]), Some(false)); - assert_eq!(::try_read_from(&[1]), Some(true)); + assert_eq!(::try_read_from(&[0]), Ok(false)); + assert_eq!(::try_read_from(&[1]), Ok(true)); // If we don't pass enough bytes, it fails. - assert_eq!(::try_read_from(&[]), None); + assert!(matches!(::try_read_from(&[]), Err(TryReadError::Size(_)))); // If we pass too many bytes, it fails. - assert_eq!(::try_read_from(&[0, 0]), None); + assert!(matches!(::try_read_from(&[0, 0]), Err(TryReadError::Size(_)))); // If we pass an invalid value, it fails. - assert_eq!(::try_read_from(&[2]), None); + assert!(matches!( + ::try_read_from(&[2]), + Err(TryReadError::Validity(_)) + )); // Reading from a misaligned buffer should still succeed. Since `AU64`'s // alignment is 8, and since we read from two adjacent addresses one // byte apart, it is guaranteed that at least one of them (though // possibly both) will be misaligned. let bytes: [u8; 9] = [0, 0, 0, 0, 0, 0, 0, 0, 0]; - assert_eq!(::try_read_from(&bytes[..8]), Some(AU64(0))); - assert_eq!(::try_read_from(&bytes[1..9]), Some(AU64(0))); + assert_eq!(::try_read_from(&bytes[..8]), Ok(AU64(0))); + assert_eq!(::try_read_from(&bytes[1..9]), Ok(AU64(0))); } #[test] @@ -7722,36 +7871,36 @@ mod tests { // Fail because the buffer is too large. let mut buf = Align::<[u8; 16], AU64>::default(); // `buf.t` should be aligned to 8, so only the length check should fail. - assert!(AU64::ref_from(&buf.t[..]).is_none()); - assert!(AU64::mut_from(&mut buf.t[..]).is_none()); - assert!(<[u8; 8]>::ref_from(&buf.t[..]).is_none()); - assert!(<[u8; 8]>::mut_from(&mut buf.t[..]).is_none()); + assert!(AU64::ref_from(&buf.t[..]).is_err()); + assert!(AU64::mut_from(&mut buf.t[..]).is_err()); + assert!(<[u8; 8]>::ref_from(&buf.t[..]).is_err()); + assert!(<[u8; 8]>::mut_from(&mut buf.t[..]).is_err()); // Fail because the buffer is too small. let mut buf = Align::<[u8; 4], AU64>::default(); - assert!(AU64::ref_from(&buf.t[..]).is_none()); - assert!(AU64::mut_from(&mut buf.t[..]).is_none()); - assert!(<[u8; 8]>::ref_from(&buf.t[..]).is_none()); - assert!(<[u8; 8]>::mut_from(&mut buf.t[..]).is_none()); - assert!(AU64::ref_from_prefix(&buf.t[..]).is_none()); - assert!(AU64::mut_from_prefix(&mut buf.t[..]).is_none()); - assert!(AU64::ref_from_suffix(&buf.t[..]).is_none()); - assert!(AU64::mut_from_suffix(&mut buf.t[..]).is_none()); - assert!(<[u8; 8]>::ref_from_prefix(&buf.t[..]).is_none()); - assert!(<[u8; 8]>::mut_from_prefix(&mut buf.t[..]).is_none()); - assert!(<[u8; 8]>::ref_from_suffix(&buf.t[..]).is_none()); - assert!(<[u8; 8]>::mut_from_suffix(&mut buf.t[..]).is_none()); + assert!(AU64::ref_from(&buf.t[..]).is_err()); + assert!(AU64::mut_from(&mut buf.t[..]).is_err()); + assert!(<[u8; 8]>::ref_from(&buf.t[..]).is_err()); + assert!(<[u8; 8]>::mut_from(&mut buf.t[..]).is_err()); + assert!(AU64::ref_from_prefix(&buf.t[..]).is_err()); + assert!(AU64::mut_from_prefix(&mut buf.t[..]).is_err()); + assert!(AU64::ref_from_suffix(&buf.t[..]).is_err()); + assert!(AU64::mut_from_suffix(&mut buf.t[..]).is_err()); + assert!(<[u8; 8]>::ref_from_prefix(&buf.t[..]).is_err()); + assert!(<[u8; 8]>::mut_from_prefix(&mut buf.t[..]).is_err()); + assert!(<[u8; 8]>::ref_from_suffix(&buf.t[..]).is_err()); + assert!(<[u8; 8]>::mut_from_suffix(&mut buf.t[..]).is_err()); // Fail because the alignment is insufficient. let mut buf = Align::<[u8; 13], AU64>::default(); - assert!(AU64::ref_from(&buf.t[1..]).is_none()); - assert!(AU64::mut_from(&mut buf.t[1..]).is_none()); - assert!(AU64::ref_from(&buf.t[1..]).is_none()); - assert!(AU64::mut_from(&mut buf.t[1..]).is_none()); - assert!(AU64::ref_from_prefix(&buf.t[1..]).is_none()); - assert!(AU64::mut_from_prefix(&mut buf.t[1..]).is_none()); - assert!(AU64::ref_from_suffix(&buf.t[..]).is_none()); - assert!(AU64::mut_from_suffix(&mut buf.t[..]).is_none()); + assert!(AU64::ref_from(&buf.t[1..]).is_err()); + assert!(AU64::mut_from(&mut buf.t[1..]).is_err()); + assert!(AU64::ref_from(&buf.t[1..]).is_err()); + assert!(AU64::mut_from(&mut buf.t[1..]).is_err()); + assert!(AU64::ref_from_prefix(&buf.t[1..]).is_err()); + assert!(AU64::mut_from_prefix(&mut buf.t[1..]).is_err()); + assert!(AU64::ref_from_suffix(&buf.t[..]).is_err()); + assert!(AU64::mut_from_suffix(&mut buf.t[..]).is_err()); } #[test] @@ -7762,38 +7911,38 @@ mod tests { // A buffer with an alignment of 8. let buf = Align::<[u8; 16], AU64>::default(); // `buf.t` should be aligned to 8, so only the length check should fail. - assert!(Ref::<_, AU64>::new(&buf.t[..]).is_none()); - assert!(Ref::<_, [u8; 8]>::new_unaligned(&buf.t[..]).is_none()); + assert!(Ref::<_, AU64>::new(&buf.t[..]).is_err()); + assert!(Ref::<_, [u8; 8]>::new_unaligned(&buf.t[..]).is_err()); // Fail because the buffer is too small. // A buffer with an alignment of 8. let buf = Align::<[u8; 4], AU64>::default(); // `buf.t` should be aligned to 8, so only the length check should fail. - assert!(Ref::<_, AU64>::new(&buf.t[..]).is_none()); - assert!(Ref::<_, [u8; 8]>::new_unaligned(&buf.t[..]).is_none()); - assert!(Ref::<_, AU64>::new_from_prefix(&buf.t[..]).is_none()); - assert!(Ref::<_, AU64>::new_from_suffix(&buf.t[..]).is_none()); - assert!(Ref::<_, [u8; 8]>::new_unaligned_from_prefix(&buf.t[..]).is_none()); - assert!(Ref::<_, [u8; 8]>::new_unaligned_from_suffix(&buf.t[..]).is_none()); + assert!(Ref::<_, AU64>::new(&buf.t[..]).is_err()); + assert!(Ref::<_, [u8; 8]>::new_unaligned(&buf.t[..]).is_err()); + assert!(Ref::<_, AU64>::new_from_prefix(&buf.t[..]).is_err()); + assert!(Ref::<_, AU64>::new_from_suffix(&buf.t[..]).is_err()); + assert!(Ref::<_, [u8; 8]>::new_unaligned_from_prefix(&buf.t[..]).is_err()); + assert!(Ref::<_, [u8; 8]>::new_unaligned_from_suffix(&buf.t[..]).is_err()); // Fail because the length is not a multiple of the element size. let buf = Align::<[u8; 12], AU64>::default(); // `buf.t` has length 12, but element size is 8. - assert!(Ref::<_, [AU64]>::new(&buf.t[..]).is_none()); - assert!(Ref::<_, [[u8; 8]]>::new_unaligned(&buf.t[..]).is_none()); + assert!(Ref::<_, [AU64]>::new(&buf.t[..]).is_err()); + assert!(Ref::<_, [[u8; 8]]>::new_unaligned(&buf.t[..]).is_err()); // Fail because the buffer is too short. let buf = Align::<[u8; 12], AU64>::default(); // `buf.t` has length 12, but the element size is 8 (and we're expecting // two of them). - assert!(Ref::<_, [AU64]>::with_trailing_elements_from_prefix(&buf.t[..], 2).is_none()); - assert!(Ref::<_, [AU64]>::with_trailing_elements_from_suffix(&buf.t[..], 2).is_none()); + assert!(Ref::<_, [AU64]>::with_trailing_elements_from_prefix(&buf.t[..], 2).is_err()); + assert!(Ref::<_, [AU64]>::with_trailing_elements_from_suffix(&buf.t[..], 2).is_err()); assert!(Ref::<_, [[u8; 8]]>::with_trailing_elements_unaligned_from_prefix(&buf.t[..], 2) - .is_none()); + .is_err()); assert!(Ref::<_, [[u8; 8]]>::with_trailing_elements_unaligned_from_suffix(&buf.t[..], 2) - .is_none()); + .is_err()); // Fail because the alignment is insufficient. @@ -7802,33 +7951,33 @@ mod tests { let buf = Align::<[u8; 13], AU64>::default(); // Slicing from 1, we get a buffer with size 12 (so the length check // should succeed) but an alignment of only 1, which is insufficient. - assert!(Ref::<_, AU64>::new(&buf.t[1..]).is_none()); - assert!(Ref::<_, AU64>::new_from_prefix(&buf.t[1..]).is_none()); - assert!(Ref::<_, [AU64]>::new(&buf.t[1..]).is_none()); - assert!(Ref::<_, [AU64]>::with_trailing_elements_from_prefix(&buf.t[1..], 1).is_none()); - assert!(Ref::<_, [AU64]>::with_trailing_elements_from_suffix(&buf.t[1..], 1).is_none()); + assert!(Ref::<_, AU64>::new(&buf.t[1..]).is_err()); + assert!(Ref::<_, AU64>::new_from_prefix(&buf.t[1..]).is_err()); + assert!(Ref::<_, [AU64]>::new(&buf.t[1..]).is_err()); + assert!(Ref::<_, [AU64]>::with_trailing_elements_from_prefix(&buf.t[1..], 1).is_err()); + assert!(Ref::<_, [AU64]>::with_trailing_elements_from_suffix(&buf.t[1..], 1).is_err()); // Slicing is unnecessary here because `new_from_suffix` uses the suffix // of the slice, which has odd alignment. - assert!(Ref::<_, AU64>::new_from_suffix(&buf.t[..]).is_none()); + assert!(Ref::<_, AU64>::new_from_suffix(&buf.t[..]).is_err()); // Fail due to arithmetic overflow. let buf = Align::<[u8; 16], AU64>::default(); let unreasonable_len = usize::MAX / mem::size_of::() + 1; assert!(Ref::<_, [AU64]>::with_trailing_elements_from_prefix(&buf.t[..], unreasonable_len) - .is_none()); + .is_err()); assert!(Ref::<_, [AU64]>::with_trailing_elements_from_suffix(&buf.t[..], unreasonable_len) - .is_none()); + .is_err()); assert!(Ref::<_, [[u8; 8]]>::with_trailing_elements_unaligned_from_prefix( &buf.t[..], unreasonable_len ) - .is_none()); + .is_err()); assert!(Ref::<_, [[u8; 8]]>::with_trailing_elements_unaligned_from_suffix( &buf.t[..], unreasonable_len ) - .is_none()); + .is_err()); } // Tests for ensuring that, if a ZST is passed into a slice-like function, @@ -7882,43 +8031,43 @@ mod tests { t.as_mut_bytes()[0] ^= 0xFF; // `write_to` rejects slices that are too small or too large. - assert_eq!(t.write_to(&mut vec![0; N - 1][..]), None); - assert_eq!(t.write_to(&mut vec![0; N + 1][..]), None); + assert!(t.write_to(&mut vec![0; N - 1][..]).is_err()); + assert!(t.write_to(&mut vec![0; N + 1][..]).is_err()); // `write_to` works as expected. let mut bytes = [0; N]; - assert_eq!(t.write_to(&mut bytes[..]), Some(())); + assert_eq!(t.write_to(&mut bytes[..]), Ok(())); assert_eq!(bytes, t.as_bytes()); // `write_to_prefix` rejects slices that are too small. - assert_eq!(t.write_to_prefix(&mut vec![0; N - 1][..]), None); + assert!(t.write_to_prefix(&mut vec![0; N - 1][..]).is_err()); // `write_to_prefix` works with exact-sized slices. let mut bytes = [0; N]; - assert_eq!(t.write_to_prefix(&mut bytes[..]), Some(())); + assert_eq!(t.write_to_prefix(&mut bytes[..]), Ok(())); assert_eq!(bytes, t.as_bytes()); // `write_to_prefix` works with too-large slices, and any bytes past // the prefix aren't modified. let mut too_many_bytes = vec![0; N + 1]; too_many_bytes[N] = 123; - assert_eq!(t.write_to_prefix(&mut too_many_bytes[..]), Some(())); + assert_eq!(t.write_to_prefix(&mut too_many_bytes[..]), Ok(())); assert_eq!(&too_many_bytes[..N], t.as_bytes()); assert_eq!(too_many_bytes[N], 123); // `write_to_suffix` rejects slices that are too small. - assert_eq!(t.write_to_suffix(&mut vec![0; N - 1][..]), None); + assert!(t.write_to_suffix(&mut vec![0; N - 1][..]).is_err()); // `write_to_suffix` works with exact-sized slices. let mut bytes = [0; N]; - assert_eq!(t.write_to_suffix(&mut bytes[..]), Some(())); + assert_eq!(t.write_to_suffix(&mut bytes[..]), Ok(())); assert_eq!(bytes, t.as_bytes()); // `write_to_suffix` works with too-large slices, and any bytes // before the suffix aren't modified. let mut too_many_bytes = vec![0; N + 1]; too_many_bytes[0] = 123; - assert_eq!(t.write_to_suffix(&mut too_many_bytes[..]), Some(())); + assert_eq!(t.write_to_suffix(&mut too_many_bytes[..]), Ok(())); assert_eq!(&too_many_bytes[1..], t.as_bytes()); assert_eq!(too_many_bytes[0], 123); } @@ -8256,7 +8405,7 @@ mod tests { &self, bytes: &'bytes [u8], ) -> Option> { - Some(T::try_ref_from(bytes)) + Some(T::try_ref_from(bytes).ok()) } #[allow(clippy::needless_lifetimes)] @@ -8264,7 +8413,7 @@ mod tests { &self, bytes: &'bytes mut [u8], ) -> Option> { - Some(T::try_mut_from(bytes)) + Some(T::try_mut_from(bytes).ok()) } } @@ -8274,7 +8423,7 @@ mod tests { impl TestTryReadFrom for AutorefWrapper { fn test_try_read_from(&self, bytes: &[u8]) -> Option> { - Some(T::try_read_from(bytes)) + Some(T::try_read_from(bytes).ok()) } } diff --git a/src/pointer/ptr.rs b/src/pointer/ptr.rs index ba25963ba30..1d3dbe8bda5 100644 --- a/src/pointer/ptr.rs +++ b/src/pointer/ptr.rs @@ -653,7 +653,7 @@ mod _conversions { /// State transitions between invariants. mod _transitions { use super::*; - use crate::TryFromBytes; + use crate::{TryFromBytes, ValidityError}; impl<'a, T, I> Ptr<'a, T, I> where @@ -874,10 +874,15 @@ mod _transitions { /// /// This method will panic if /// [`T::is_bit_valid`][TryFromBytes::is_bit_valid] panics. + /// + /// # Safety + /// + /// Unsafe code may rely on this method's returned `ValidityError` + /// containing `self`. #[inline] pub(crate) fn try_into_valid( mut self, - ) -> Option> + ) -> Result, ValidityError> where T: TryFromBytes, I::Aliasing: invariant::at_least::Shared, @@ -889,9 +894,9 @@ mod _transitions { if T::is_bit_valid(self.reborrow().forget_exclusive().forget_aligned()) { // SAFETY: If `T::is_bit_valid`, code may assume that `self` // contains a bit-valid instance of `Self`. - Some(unsafe { self.assume_valid() }) + Ok(unsafe { self.assume_valid() }) } else { - None + Err(ValidityError::new(self)) } } @@ -922,7 +927,7 @@ mod _transitions { /// Casts of the referent type. mod _casts { use super::*; - use crate::PointerMetadata; + use crate::{AlignmentError, CastError, MetadataCastError, PointerMetadata, SizeError}; impl<'a, T, I> Ptr<'a, T, I> where @@ -1022,18 +1027,37 @@ mod _casts { #[allow(clippy::wrong_self_convention)] pub(crate) fn as_bytes( self, + ) -> Ptr<'a, [u8], (I::Aliasing, invariant::Aligned, invariant::Valid)> { + // SAFETY: The provided size is exactly equal to that of the + // referenced `T`. + unsafe { self.as_bytes_with_size(core::mem::size_of::()) } + } + } + + impl<'a, T, I> Ptr<'a, T, I> + where + T: 'a + ?Sized, + I: Invariants, + T: Immutable, + { + /// Casts this pointer-to-initialized into a pointer-to-bytes. + #[allow(clippy::wrong_self_convention)] + pub(crate) unsafe fn as_bytes_with_size( + self, + size: usize, ) -> Ptr<'a, [u8], (I::Aliasing, invariant::Aligned, invariant::Valid)> { // SAFETY: We ensure that: // - `cast(p)` is implemented as an invocation to // `slice_from_raw_parts_mut`. - // - The size of the object referenced by the resulting pointer is - // exactly equal to the size of the object referenced by `self`. + // - By contract on the caller, The size of the object referenced by + // the resulting pointer is exactly equal to the size of the + // object referenced by `self`. // - `T` and `[u8]` trivially contain `UnsafeCell`s at identical // ranges [u8]`, because both are `Immutable`. let ptr: Ptr<'a, [u8], _> = unsafe { self.cast_unsized(|p: *mut T| { #[allow(clippy::as_conversions)] - core::ptr::slice_from_raw_parts_mut(p.cast::(), core::mem::size_of::()) + core::ptr::slice_from_raw_parts_mut(p.cast::(), size) }) }; @@ -1112,7 +1136,7 @@ mod _casts { /// alignment of `[u8]` is 1. impl<'a, I> Ptr<'a, [u8], I> where - I: Invariants, + I: Invariants, { /// Attempts to cast `self` to a `U` using the given cast type. /// @@ -1135,21 +1159,31 @@ mod _casts { /// /// Panics if `U` is a DST whose trailing slice element is zero-sized. pub(crate) fn try_cast_into( - &self, + self, cast_type: CastType, - ) -> Option<(Ptr<'a, U, (I::Aliasing, invariant::Aligned, invariant::Initialized)>, usize)> - { + ) -> Result< + (Ptr<'a, U, (I::Aliasing, invariant::Aligned, invariant::Initialized)>, usize), + CastError, + > { // PANICS: By invariant, the byte range addressed by `self.ptr` does // not wrap around the address space. This implies that the sum of // the address (represented as a `usize`) and length do not overflow // `usize`, as required by `validate_cast_and_convert_metadata`. // Thus, this call to `validate_cast_and_convert_metadata` will only // panic if `U` is a DST whose trailing slice element is zero-sized. - let (elems, split_at) = U::LAYOUT.validate_cast_and_convert_metadata( + let maybe_metadata = U::LAYOUT.validate_cast_and_convert_metadata( AsAddress::addr(self.as_non_null().as_ptr()), self.len(), cast_type, - )?; + ); + + let (elems, split_at) = match maybe_metadata { + Ok((elems, split_at)) => (elems, split_at), + Err(MetadataCastError::Alignment) => { + return Err(CastError::Alignment(AlignmentError::new(self))) + } + Err(MetadataCastError::Size) => return Err(CastError::Size(SizeError::new(self))), + }; let offset = match cast_type { CastType::Prefix => 0, @@ -1219,7 +1253,7 @@ mod _casts { // invariant on Ptr<'a, T, I>, preserved through the cast by the // bound `U: Immutable`. // 10. See 9. - Some((unsafe { Ptr::new(ptr) }, split_at)) + Ok((unsafe { Ptr::new(ptr) }, split_at)) } /// Attempts to cast `self` into a `U`, failing if all of the bytes of @@ -1235,14 +1269,42 @@ mod _casts { #[allow(unused)] #[inline(always)] pub(crate) fn try_cast_into_no_leftover( - &self, - ) -> Option> { + self, + ) -> Result< + Ptr<'a, U, (I::Aliasing, invariant::Aligned, invariant::Initialized)>, + CastError, + > { + let len = self.len(); // TODO(#67): Remove this allow. See NonNulSlicelExt for more // details. #[allow(unstable_name_collisions)] match self.try_cast_into(CastType::Prefix) { - Some((slf, split_at)) if split_at == self.len() => Some(slf), - Some(_) | None => None, + Ok((slf, split_at)) => { + if split_at == len { + Ok(slf) + } else { + // Undo the cast so we can return the original bytes. + // + // SAFETY: By contract on `try_cast_into`, `slf` a + // from-bytes cast of `self`. The length of `self` + // (`len`) and `slf` are equal, thus `len` satisfies the + // precondition of `Ptr::as_bytes_with_size` that the + // provided length is equal to that of the referenced + // memory. + let slf = unsafe { slf.as_bytes_with_size(len) }; + // Restore the initial invariants of `self`. + // + // SAFETY: The referent type of `slf` is now equal to + // that of `self`, but the invariants nominally differ. + // Since `slf` and `self` refer to the same memory and + // no actions have been taken that would violate the + // original invariants on `self`, it is sound to apply + // the invariants of `self` onto `slf`. + let slf = unsafe { Self::from_ptr(slf) }; + Err(CastError::Size(SizeError::<_, U>::new(slf))) + } + } + Err(err) => Err(err), } } } @@ -1562,7 +1624,7 @@ mod tests { } for cast_type in [CastType::Prefix, CastType::Suffix] { - if let Some((slf, split_at)) = + if let Ok((slf, split_at)) = Ptr::from_ref(bytes).try_cast_into::(cast_type) { // SAFETY: All bytes in `bytes` have been @@ -1575,7 +1637,7 @@ mod tests { } } - if let Some(slf) = Ptr::from_ref(bytes).try_cast_into_no_leftover::() { + if let Ok(slf) = Ptr::from_ref(bytes).try_cast_into_no_leftover::() { // SAFETY: All bytes in `bytes` have been // initialized. let len = unsafe { validate_and_get_len(slf) }; diff --git a/src/util.rs b/src/util.rs index 099a9e596a2..a31041205a9 100644 --- a/src/util.rs +++ b/src/util.rs @@ -589,6 +589,13 @@ pub(crate) const fn min(a: NonZeroUsize, b: NonZeroUsize) -> NonZeroUsize { } } +/// A safe alternative to [`core::hint::unreachable_unchecked`]. +pub(crate) const fn unreachable_infallible(_: core::convert::Infallible) -> T { + // SAFETY: Since the argument type `Infallible` is uninhabited, this + // function cannot be executed with a value of it. + unsafe { core::hint::unreachable_unchecked() } +} + /// Since we support multiple versions of Rust, there are often features which /// have been stabilized in the most recent stable release which do not yet /// exist (stably) on our MSRV. This module provides polyfills for those diff --git a/zerocopy-derive/tests/enum_try_from_bytes.rs b/zerocopy-derive/tests/enum_try_from_bytes.rs index 2d03d305d71..2ad228ba3bc 100644 --- a/zerocopy-derive/tests/enum_try_from_bytes.rs +++ b/zerocopy-derive/tests/enum_try_from_bytes.rs @@ -22,10 +22,10 @@ util_assert_impl_all!(Foo: imp::TryFromBytes); #[test] fn test_foo() { - imp::assert_eq!(::try_read_from(&[0]), imp::Some(Foo::A)); - imp::assert_eq!(::try_read_from(&[]), imp::None); - imp::assert_eq!(::try_read_from(&[1]), imp::None); - imp::assert_eq!(::try_read_from(&[0, 0]), imp::None); + imp::assert_eq!(::try_read_from(&[0]), imp::Ok(Foo::A)); + imp::assert!(::try_read_from(&[]).is_err()); + imp::assert!(::try_read_from(&[1]).is_err()); + imp::assert!(::try_read_from(&[0, 0]).is_err()); } #[derive(Eq, PartialEq, Debug, imp::KnownLayout, imp::Immutable, imp::TryFromBytes)] @@ -38,11 +38,11 @@ util_assert_impl_all!(Bar: imp::TryFromBytes); #[test] fn test_bar() { - imp::assert_eq!(::try_read_from(&[0, 0]), imp::Some(Bar::A)); - imp::assert_eq!(::try_read_from(&[]), imp::None); - imp::assert_eq!(::try_read_from(&[0]), imp::None); - imp::assert_eq!(::try_read_from(&[0, 1]), imp::None); - imp::assert_eq!(::try_read_from(&[0, 0, 0]), imp::None); + imp::assert_eq!(::try_read_from(&[0, 0]), imp::Ok(Bar::A)); + imp::assert!(::try_read_from(&[]).is_err()); + imp::assert!(::try_read_from(&[0]).is_err()); + imp::assert!(::try_read_from(&[0, 1]).is_err()); + imp::assert!(::try_read_from(&[0, 0, 0]).is_err()); } #[derive(Eq, PartialEq, Debug, imp::KnownLayout, imp::Immutable, imp::TryFromBytes)] @@ -58,17 +58,17 @@ util_assert_impl_all!(Baz: imp::TryFromBytes); fn test_baz() { imp::assert_eq!( ::try_read_from(imp::IntoBytes::as_bytes(&1u32)), - imp::Some(Baz::A) + imp::Ok(Baz::A) ); imp::assert_eq!( ::try_read_from(imp::IntoBytes::as_bytes(&0u32)), - imp::Some(Baz::B) + imp::Ok(Baz::B) ); - imp::assert_eq!(::try_read_from(&[]), imp::None); - imp::assert_eq!(::try_read_from(&[0]), imp::None); - imp::assert_eq!(::try_read_from(&[0, 0]), imp::None); - imp::assert_eq!(::try_read_from(&[0, 0, 0]), imp::None); - imp::assert_eq!(::try_read_from(&[0, 0, 0, 0, 0]), imp::None); + imp::assert!(::try_read_from(&[]).is_err()); + imp::assert!(::try_read_from(&[0]).is_err()); + imp::assert!(::try_read_from(&[0, 0]).is_err()); + imp::assert!(::try_read_from(&[0, 0, 0]).is_err()); + imp::assert!(::try_read_from(&[0, 0, 0, 0, 0]).is_err()); } // Test hygiene - make sure that `i8` being shadowed doesn't cause problems for @@ -92,23 +92,23 @@ util_assert_impl_all!(Blah: imp::TryFromBytes); fn test_blah() { imp::assert_eq!( ::try_read_from(imp::IntoBytes::as_bytes(&1i8)), - imp::Some(Blah::A) + imp::Ok(Blah::A) ); imp::assert_eq!( ::try_read_from(imp::IntoBytes::as_bytes(&0i8)), - imp::Some(Blah::B) + imp::Ok(Blah::B) ); imp::assert_eq!( ::try_read_from(imp::IntoBytes::as_bytes(&3i8)), - imp::Some(Blah::C) + imp::Ok(Blah::C) ); imp::assert_eq!( ::try_read_from(imp::IntoBytes::as_bytes(&6i8)), - imp::Some(Blah::D) + imp::Ok(Blah::D) ); - imp::assert_eq!(::try_read_from(&[]), imp::None); - imp::assert_eq!(::try_read_from(&[4]), imp::None); - imp::assert_eq!(::try_read_from(&[0, 0]), imp::None); + imp::assert!(::try_read_from(&[]).is_err()); + imp::assert!(::try_read_from(&[4]).is_err()); + imp::assert!(::try_read_from(&[0, 0]).is_err()); } #[derive( @@ -127,21 +127,20 @@ fn test_fieldless_but_not_unit_only() { let disc: [u8; SIZE] = ::zerocopy::transmute!(FieldlessButNotUnitOnly::A); imp::assert_eq!( ::try_read_from(&disc[..]), - imp::Some(FieldlessButNotUnitOnly::A) + imp::Ok(FieldlessButNotUnitOnly::A) ); let disc: [u8; SIZE] = ::zerocopy::transmute!(FieldlessButNotUnitOnly::B()); imp::assert_eq!( ::try_read_from(&disc[..]), - imp::Some(FieldlessButNotUnitOnly::B()) + imp::Ok(FieldlessButNotUnitOnly::B()) ); let disc: [u8; SIZE] = ::zerocopy::transmute!(FieldlessButNotUnitOnly::C {}); imp::assert_eq!( ::try_read_from(&disc[..]), - imp::Some(FieldlessButNotUnitOnly::C {}) + imp::Ok(FieldlessButNotUnitOnly::C {}) ); - imp::assert_eq!( - ::try_read_from(&[0xFF; SIZE][..]), - imp::None + imp::assert!( + ::try_read_from(&[0xFF; SIZE][..]).is_err() ); } @@ -161,20 +160,19 @@ fn test_weird_discriminants() { let disc: [u8; SIZE] = ::zerocopy::transmute!(WeirdDiscriminants::A); imp::assert_eq!( ::try_read_from(&disc[..]), - imp::Some(WeirdDiscriminants::A) + imp::Ok(WeirdDiscriminants::A) ); let disc: [u8; SIZE] = ::zerocopy::transmute!(WeirdDiscriminants::B); imp::assert_eq!( ::try_read_from(&disc[..]), - imp::Some(WeirdDiscriminants::B) + imp::Ok(WeirdDiscriminants::B) ); let disc: [u8; SIZE] = ::zerocopy::transmute!(WeirdDiscriminants::C); imp::assert_eq!( ::try_read_from(&disc[..]), - imp::Some(WeirdDiscriminants::C) + imp::Ok(WeirdDiscriminants::C) ); - imp::assert_eq!( - ::try_read_from(&[0xFF; SIZE][..]), - imp::None + imp::assert!( + ::try_read_from(&[0xFF; SIZE][..]).is_err() ); } diff --git a/zerocopy-derive/tests/struct_try_from_bytes.rs b/zerocopy-derive/tests/struct_try_from_bytes.rs index e36f7df2080..cd97e6f2afd 100644 --- a/zerocopy-derive/tests/struct_try_from_bytes.rs +++ b/zerocopy-derive/tests/struct_try_from_bytes.rs @@ -167,7 +167,7 @@ struct CPacked { fn c_packed() { let candidate = &[42u8, 0xFF, 0xFF, 0xFF, 0xFF]; let converted = ::try_ref_from(candidate); - imp::assert_eq!(converted, imp::Some(&CPacked { a: 42, b: u32::MAX })); + imp::assert_eq!(converted, imp::Ok(&CPacked { a: 42, b: u32::MAX })); } #[derive(imp::TryFromBytes, imp::KnownLayout, imp::Immutable)] @@ -188,7 +188,7 @@ struct CPackedUnsized { fn c_packed_unsized() { let candidate = &[42u8, 0xFF, 0xFF, 0xFF, 0xFF]; let converted = ::try_ref_from(candidate); - imp::assert!(converted.is_some()); + imp::assert!(converted.is_ok()); } #[derive(imp::TryFromBytes)] @@ -209,13 +209,13 @@ struct PackedUnsized { fn packed_unsized() { let candidate = &[42u8, 0xFF, 0xFF, 0xFF, 0xFF]; let converted = ::try_ref_from(candidate); - imp::assert!(converted.is_some()); + imp::assert!(converted.is_err()); let candidate = &[42u8, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF]; let converted = ::try_ref_from(candidate); - imp::assert!(converted.is_none()); + imp::assert!(converted.is_err()); let candidate = &[42u8, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF]; let converted = ::try_ref_from(candidate); - imp::assert!(converted.is_some()); + imp::assert!(converted.is_ok()); }