diff --git a/src/byteorder.rs b/src/byteorder.rs index 635ed7726a..2ea8aaad3e 100644 --- a/src/byteorder.rs +++ b/src/byteorder.rs @@ -54,7 +54,7 @@ //! //! impl UdpPacket { //! fn parse(bytes: B) -> Option> { -//! let (header, body) = Ref::new_from_prefix(bytes)?; +//! let (header, body) = Ref::new_from_prefix(bytes).ok()?; //! Some(UdpPacket { header, body }) //! } //! diff --git a/src/deprecated.rs b/src/deprecated.rs index 860270d083..0966a43139 100644 --- a/src/deprecated.rs +++ b/src/deprecated.rs @@ -21,7 +21,7 @@ where #[doc(hidden)] #[inline] pub fn new_slice(bytes: B) -> Option> { - Self::new(bytes) + Self::new(bytes).ok() } } @@ -34,7 +34,7 @@ where #[doc(hidden)] #[inline(always)] pub fn new_slice_unaligned(bytes: B) -> Option> { - Ref::new_unaligned(bytes) + Ref::new_unaligned(bytes).ok() } } @@ -74,7 +74,7 @@ where #[doc(hidden)] #[inline] pub fn new_slice_from_prefix(bytes: B, count: usize) -> Option<(Ref, B)> { - Ref::with_trailing_elements_from_prefix(bytes, count) + Ref::with_trailing_elements_from_prefix(bytes, count).ok() } #[deprecated(since = "0.8.0", note = "replaced by `Ref::with_trailing_elements_from_suffix`")] @@ -82,7 +82,7 @@ where #[doc(hidden)] #[inline] pub fn new_slice_from_suffix(bytes: B, count: usize) -> Option<(B, Ref)> { - Ref::with_trailing_elements_from_suffix(bytes, count) + Ref::with_trailing_elements_from_suffix(bytes, count).ok() } } @@ -99,7 +99,7 @@ where #[must_use = "has no side effects"] #[inline(always)] pub fn new_slice_unaligned_from_prefix(bytes: B, count: usize) -> Option<(Ref, B)> { - Ref::with_trailing_elements_unaligned_from_prefix(bytes, count) + Ref::with_trailing_elements_unaligned_from_prefix(bytes, count).ok() } #[deprecated( @@ -110,6 +110,6 @@ where #[must_use = "has no side effects"] #[inline(always)] pub fn new_slice_unaligned_from_suffix(bytes: B, count: usize) -> Option<(B, Ref)> { - Ref::with_trailing_elements_unaligned_from_suffix(bytes, count) + Ref::with_trailing_elements_unaligned_from_suffix(bytes, count).ok() } } diff --git a/src/error.rs b/src/error.rs new file mode 100644 index 0000000000..9f71565508 --- /dev/null +++ b/src/error.rs @@ -0,0 +1,463 @@ +//! Types related to error reporting. +//! +//! ## Single failure mode errors +//! +//! Generally speaking, zerocopy's conversions may fail for one of up to three reasons: +//! - [`AlignmentError`]: the conversion source was improperly aligned +//! - [`SizeError`]: the conversion source was of incorrect size +//! - [`ValidityError`]: the conversion source contained invalid data +//! +//! Methods that only have one failure mode, like [`Ref::new_unaligned`], return +//! that mode's corresponding error type directly. +//! +//! ## Compound errors +//! +//! Conversion methods that have either two or three possible failure modes +//! return one of these error types: +//! - [`CastError`]: the error type of reference conversions +//! - [`TryCastError`]: the error type of fallible reference conversions +//! - [`TryReadError`]: the error type of fallible read conversions +//! +//! ## Accessing the conversion source +//! +//! All error types provide an `into_src` method that converts the error into +//! the source value underlying the failed conversion. + +use core::{convert::Infallible, fmt, marker::PhantomData, ops::Deref}; + +use crate::TryFromBytes; +#[cfg(doc)] +use crate::{FromBytes, Ref}; + +// This is private to remove `ConvertError` from our SemVer obligations for the +// time being. +// TODO(#1139): Remove this wrapping `private` module. +mod private { + #[cfg(doc)] + use super::*; + + /// Zerocopy's generic error type. + /// + /// Generally speaking, zerocopy's conversions may fail for one of up to three reasons: + /// - [`AlignmentError`]: the conversion source was improperly aligned + /// - [`SizeError`]: the conversion source was of incorrect size + /// - [`ValidityError`]: the conversion source contained invalid data + /// + /// However, not all conversions produce all errors. For instance, + /// [`FromBytes::ref_from`] may fail due to alignment or size issues, but not + /// validity issues. This generic error type captures these (im)possibilities + /// via parameterization: `A` is parameterized with [`AlignmentError`], `S` is + /// parameterized with [`SizeError`], and `V` is parameterized with + /// [`Infallible`]. + /// + /// Zerocopy never uses this type directly in its API. Rather, we provide three + /// pre-parameterized aliases: + /// - [`CastError`]: the error type of reference conversions + /// - [`TryCastError`]: the error type of fallible reference conversions + /// - [`TryReadError`]: the error type of fallible read conversions + #[derive(PartialEq, Eq)] + pub enum ConvertError { + /// The conversion source was improperly aligned. + #[doc(hidden)] + Alignment(A), + /// The conversion source was of incorrect size. + #[doc(hidden)] + Size(S), + /// The conversion source contained invalid data. + #[doc(hidden)] + Validity(V), + } +} + +use private::ConvertError; + +impl fmt::Debug for ConvertError { + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::Alignment(e) => f.debug_tuple("Alignment").field(e).finish(), + Self::Size(e) => f.debug_tuple("Size").field(e).finish(), + Self::Validity(e) => f.debug_tuple("Validity").field(e).finish(), + } + } +} + +/// Produces a human-readable error message. +impl fmt::Display for ConvertError { + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::Alignment(e) => e.fmt(f), + Self::Size(e) => e.fmt(f), + Self::Validity(e) => e.fmt(f), + } + } +} + +/// The error emitted if the conversion source is improperly aligned. +#[derive(PartialEq, Eq)] +pub struct AlignmentError { + /// The source value involved in the conversion. + src: Src, + /// The inner destination type inolved in the conversion. + dst: PhantomData, +} + +impl AlignmentError { + pub(crate) fn new(src: Src) -> Self { + Self { src, dst: PhantomData } + } + + /// Produces the source underlying the failed conversion. + #[inline] + pub fn into_src(self) -> Src { + self.src + } + + pub(crate) fn with_src(self, new_src: NewSrc) -> AlignmentError { + AlignmentError { src: new_src, dst: PhantomData } + } + + pub(crate) fn map_src(self, f: impl Fn(Src) -> NewSrc) -> AlignmentError { + AlignmentError { src: f(self.src), dst: PhantomData } + } + + pub(crate) fn into(self) -> ConvertError { + ConvertError::Alignment(self) + } +} + +impl fmt::Debug for AlignmentError { + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("AlignmentError").finish() + } +} + +/// Produces a human-readable error message. +// The bounds on this impl are intentionally conservative, and can be relaxed +// either once a `?Sized` alignment accessor is stabilized, or by storing the +// alignment as a runtime value. +impl fmt::Display for AlignmentError +where + Src: Deref, +{ + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + #[cfg_attr(__INTERNAL_USE_ONLY_NIGHTLY_FEATURES_IN_TESTS, allow(lossy_provenance_casts))] + #[allow(clippy::as_conversions)] + let addr = self.src.deref() as *const _ as *const () as usize; + let addr_align = 2usize.pow(addr.trailing_zeros()); + f.write_str("the conversion failed because the address of the source (a multiple of ")?; + addr_align.fmt(f)?; + f.write_str(") is not a multiple of the alignment (")?; + core::mem::align_of::().fmt(f)?; + f.write_str(") of the destination type: ")?; + f.write_str(core::any::type_name::())?; + Ok(()) + } +} + +impl From> + for ConvertError, S, V> +{ + #[inline] + fn from(err: AlignmentError) -> Self { + Self::Alignment(err) + } +} + +/// The error emitted if the conversion source is of incorrect size. +#[derive(PartialEq, Eq)] +pub struct SizeError { + /// The source value involved in the conversion. + src: Src, + /// The inner destination type inolved in the conversion. + dst: PhantomData, +} + +impl SizeError { + pub(crate) fn new(src: Src) -> Self { + Self { src, dst: PhantomData } + } + + /// Produces the source underlying the failed conversion. + #[inline] + pub fn into_src(self) -> Src { + self.src + } + + /// Sets the source value associated with the conversion error. + pub(crate) fn with_src(self, new_src: NewSrc) -> SizeError { + SizeError { src: new_src, dst: PhantomData } + } + + /// Maps the source value associated with the conversion error. + pub(crate) fn map_src(self, f: impl Fn(Src) -> NewSrc) -> SizeError { + SizeError { src: f(self.src), dst: PhantomData } + } + + /// Sets the destination type associated with the conversion error. + pub(crate) fn with_dst(self) -> SizeError { + SizeError { src: self.src, dst: PhantomData } + } + + /// Converts the error into a general [`ConvertError`]. + pub(crate) fn into(self) -> ConvertError { + ConvertError::Size(self) + } +} + +impl fmt::Debug for SizeError { + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("SizeError").finish() + } +} + +/// Produces a human-readable error message. +impl fmt::Display for SizeError +where + Src: Deref, +{ + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str("the conversion failed because the source was incorrectly sized to complete the conversion into the destination type: ")?; + f.write_str(core::any::type_name::())?; + Ok(()) + } +} + +impl From> for ConvertError, V> { + #[inline] + fn from(err: SizeError) -> Self { + Self::Size(err) + } +} + +/// The error emitted if the conversion source contains invalid data. +#[derive(PartialEq, Eq)] +pub struct ValidityError { + /// The source value involved in the conversion. + src: Src, + /// The inner destination type inolved in the conversion. + dst: PhantomData, +} + +impl ValidityError { + pub(crate) fn new(src: Src) -> Self { + Self { src, dst: PhantomData } + } + + /// Produces the source underlying the failed conversion. + #[inline] + pub fn into_src(self) -> Src { + self.src + } + + /// Sets the source value associated with the conversion error. + pub(crate) fn with_src(self, new_src: NewSrc) -> ValidityError { + ValidityError { src: new_src, dst: PhantomData } + } + + /// Maps the source value associated with the conversion error. + pub(crate) fn map_src(self, f: impl Fn(Src) -> NewSrc) -> ValidityError { + ValidityError { src: f(self.src), dst: PhantomData } + } + + /// Converts the error into a general [`ConvertError`]. + pub(crate) fn into(self) -> ConvertError { + ConvertError::Validity(self) + } +} + +impl fmt::Debug for ValidityError { + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("ValidityError").finish() + } +} + +/// Produces a human-readable error message. +impl fmt::Display for ValidityError +where + Src: Deref, +{ + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str("the conversion failed because the source bytes are not a valid value of the destination type: ")?; + f.write_str(core::any::type_name::())?; + Ok(()) + } +} + +impl From> + for ConvertError> +{ + #[inline] + fn from(err: ValidityError) -> Self { + Self::Validity(err) + } +} + +/// The error type of reference conversions. +/// +/// Reference conversions, like [`FromBytes::ref_from`] may emit +/// [alignment](AlignmentError) and [size](SizeError) errors. +// Bounds on generic parameters are not enforced in type aliases, but they do +// appear in rustdoc. +#[allow(type_alias_bounds)] +pub type CastError = + ConvertError, SizeError, Infallible>; + +impl CastError { + /// Produces the source underlying the failed conversion. + #[inline] + pub fn into_src(self) -> Src { + match self { + Self::Alignment(e) => e.src, + Self::Size(e) => e.src, + Self::Validity(i) => match i {}, + } + } + + /// Sets the source value associated with the conversion error. + pub(crate) fn with_src(self, new_src: NewSrc) -> CastError { + match self { + Self::Alignment(e) => CastError::Alignment(e.with_src(new_src)), + Self::Size(e) => CastError::Size(e.with_src(new_src)), + Self::Validity(i) => match i {}, + } + } + + /// Maps the source value associated with the conversion error. + pub(crate) fn map_src(self, f: impl Fn(Src) -> NewSrc) -> CastError { + match self { + Self::Alignment(e) => CastError::Alignment(e.map_src(f)), + Self::Size(e) => CastError::Size(e.map_src(f)), + Self::Validity(i) => match i {}, + } + } + + /// Converts the error into a general [`ConvertError`]. + pub(crate) fn into(self) -> TryCastError + where + Dst: TryFromBytes, + { + match self { + Self::Alignment(e) => TryCastError::Alignment(e), + Self::Size(e) => TryCastError::Size(e), + Self::Validity(i) => match i {}, + } + } +} + +/// The error type of fallible reference conversions. +/// +/// Fallible reference conversions, like [`TryFromBytes::try_ref_from`] may emit +/// [alignment](AlignmentError), [size](SizeError), and +/// [validity](ValidityError) errors. +// Bounds on generic parameters are not enforced in type aliases, but they do +// appear in rustdoc. +#[allow(type_alias_bounds)] +pub type TryCastError = + ConvertError, SizeError, ValidityError>; + +// TODO(#1139): Remove the `TryFromBytes` here and in other downstream locations +// (all the way to `ValidityError`) if we determine it's not necessary for rich +// validity errors. +impl TryCastError { + /// Produces the source underlying the failed conversion. + #[inline] + pub fn into_src(self) -> Src { + match self { + Self::Alignment(e) => e.src, + Self::Size(e) => e.src, + Self::Validity(e) => e.src, + } + } +} + +impl From> for TryCastError { + #[inline] + fn from(value: CastError) -> Self { + match value { + CastError::Alignment(e) => Self::Alignment(e), + CastError::Size(e) => Self::Size(e), + CastError::Validity(i) => match i {}, + } + } +} + +/// The error type of fallible read-conversions. +/// +/// Fallible read-conversions, like [`TryFromBytes::try_read_from`] may emit +/// [size](SizeError) and [validity](ValidityError) errors, but not alignment errors. +// Bounds on generic parameters are not enforced in type aliases, but they do +// appear in rustdoc. +#[allow(type_alias_bounds)] +pub type TryReadError = + ConvertError, ValidityError>; + +impl TryReadError { + /// Produces the source underlying the failed conversion. + #[inline] + pub fn into_src(self) -> Src { + match self { + Self::Alignment(i) => match i {}, + Self::Size(e) => e.src, + Self::Validity(e) => e.src, + } + } +} + +#[cfg(test)] +mod test { + use super::*; + + #[test] + fn alignment_display() { + #[repr(C, align(128))] + struct Aligned { + bytes: [u8; 128], + } + + let aligned = Aligned { bytes: [0; 128] }; + + assert_eq!( + AlignmentError::<_, elain::Align::<8>>::new(&aligned.bytes[1..]).to_string(), + "the conversion failed because the address of the source (a multiple of 1) is not a multiple of the alignment (8) of the destination type: elain::Align<8>" + ); + + assert_eq!( + AlignmentError::<_, elain::Align::<8>>::new(&aligned.bytes[2..]).to_string(), + "the conversion failed because the address of the source (a multiple of 2) is not a multiple of the alignment (8) of the destination type: elain::Align<8>" + ); + + assert_eq!( + AlignmentError::<_, elain::Align::<8>>::new(&aligned.bytes[3..]).to_string(), + "the conversion failed because the address of the source (a multiple of 1) is not a multiple of the alignment (8) of the destination type: elain::Align<8>" + ); + + assert_eq!( + AlignmentError::<_, elain::Align::<8>>::new(&aligned.bytes[4..]).to_string(), + "the conversion failed because the address of the source (a multiple of 4) is not a multiple of the alignment (8) of the destination type: elain::Align<8>" + ); + } + + #[test] + fn size_display() { + assert_eq!( + SizeError::<_, [u8]>::new(&[0u8; 1][..]).to_string(), + "the conversion failed because the source was incorrectly sized to complete the conversion into the destination type: [u8]" + ); + } + + #[test] + fn validity_display() { + assert_eq!( + ValidityError::<_, bool>::new(&[2u8; 1][..]).to_string(), + "the conversion failed because the source bytes are not a valid value of the destination type: bool" + ); + } +} diff --git a/src/layout.rs b/src/layout.rs index f55f439aec..f0c460b898 100644 --- a/src/layout.rs +++ b/src/layout.rs @@ -84,6 +84,12 @@ pub enum CastType { Suffix, } +#[cfg_attr(test, derive(Debug))] +pub(crate) enum MetadataCastError { + Alignment, + Size, +} + impl DstLayout { /// The minimum possible alignment of a type. const MIN_ALIGN: NonZeroUsize = match NonZeroUsize::new(1) { @@ -448,7 +454,7 @@ impl DstLayout { addr: usize, bytes_len: usize, cast_type: CastType, - ) -> Option<(usize, usize)> { + ) -> Result<(usize, usize), MetadataCastError> { // `debug_assert!`, but with `#[allow(clippy::arithmetic_side_effects)]`. macro_rules! __const_debug_assert { ($e:expr $(, $msg:expr)?) => { @@ -506,14 +512,14 @@ impl DstLayout { // by 0 because `align` is non-zero. #[allow(clippy::arithmetic_side_effects)] if (addr + offset) % self.align.get() != 0 { - return None; + return Err(MetadataCastError::Alignment); } } let (elems, self_bytes) = match size_info { SizeInfo::Sized { size } => { if size > bytes_len { - return None; + return Err(MetadataCastError::Size); } (0, size) } @@ -532,7 +538,7 @@ impl DstLayout { let max_slice_and_padding_bytes = match max_total_bytes.checked_sub(offset) { Some(max) => max, // `bytes_len` too small even for 0 trailing slice elements. - None => return None, + None => return Err(MetadataCastError::Size), }; // Calculate the number of elements that fit in @@ -587,6 +593,6 @@ impl DstLayout { CastType::Suffix => bytes_len - self_bytes, }; - Some((elems, split_at)) + Ok((elems, split_at)) } } diff --git a/src/lib.rs b/src/lib.rs index 891f571076..a2f78b5ac4 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -281,6 +281,11 @@ mod macros; pub mod byteorder; mod deprecated; +// This module is `pub` so that zerocopy's error types and error handling +// documentation is grouped together in a cohesive module. In practice, we +// expect most users to use the re-export of `error`'s items to avoid identifier +// stuttering. +pub mod error; #[doc(hidden)] pub mod layout; #[doc(hidden)] @@ -292,6 +297,7 @@ mod util; mod wrappers; pub use crate::byteorder::*; +pub use crate::error::*; pub use crate::wrappers::*; use core::{ @@ -1128,12 +1134,14 @@ pub unsafe trait TryFromBytes { /// ``` #[must_use = "has no side effects"] #[inline] - fn try_ref_from(bytes: &[u8]) -> Option<&Self> + fn try_ref_from(bytes: &[u8]) -> Result<&Self, TryCastError<&[u8], Self>> where Self: KnownLayout + Immutable, { util::assert_dst_is_not_zst::(); - let candidate = Ptr::from_ref(bytes).try_cast_into_no_leftover::()?; + let candidate = Ptr::from_ref(bytes) + .try_cast_into_no_leftover::() + .map_err(|e| e.map_src(|src| src.as_ref()).into())?; // This call may panic. If that happens, it doesn't cause any soundness // issues, as we have not generated any invalid state which we need to @@ -1143,7 +1151,7 @@ pub unsafe trait TryFromBytes { // calling `try_into_valid` (and thus `is_bit_valid`) with a shared // pointer when `Self: !Immutable`. Since `Self: Immutable`, this panic // condition will not happen. - let candidate = candidate.try_into_valid(); + let candidate = candidate.try_into_valid().map_err(|e| e.with_src(bytes).into()); candidate.map(MaybeAligned::as_ref) } @@ -1181,24 +1189,28 @@ pub unsafe trait TryFromBytes { /// ``` #[must_use = "has no side effects"] #[inline] - fn try_mut_from(bytes: &mut [u8]) -> Option<&mut Self> + fn try_mut_from(bytes: &mut [u8]) -> Result<&mut Self, TryCastError<&mut [u8], Self>> where Self: KnownLayout + Immutable, // TODO(#251): Remove the `Immutable` bound. { util::assert_dst_is_not_zst::(); - let candidate = Ptr::from_mut(bytes).try_cast_into_no_leftover::()?; - - // This call may panic. If that happens, it doesn't cause any soundness - // issues, as we have not generated any invalid state which we need to - // fix before returning. - // - // Note that one panic or post-monomorphization error condition is - // calling `try_into_valid` (and thus `is_bit_valid`) with a shared - // pointer when `Self: !Immutable`. Since `Self: Immutable`, this panic - // condition will not happen. - let candidate = candidate.try_into_valid(); - - candidate.map(Ptr::as_mut) + match Ptr::from_mut(bytes).try_cast_into_no_leftover::() { + Ok(candidate) => { + // This call may panic. If that happens, it doesn't cause any soundness + // issues, as we have not generated any invalid state which we need to + // fix before returning. + // + // Note that one panic or post-monomorphization error condition is + // calling `try_into_valid` (and thus `is_bit_valid`) with a shared + // pointer when `Self: !Immutable`. Since `Self: Immutable`, this panic + // condition will not happen. + match candidate.try_into_valid() { + Ok(candidate) => Ok(candidate.as_mut()), + Err(e) => Err(e.map_src(|src| src.as_bytes().as_mut()).into()), + } + } + Err(e) => Err(e.map_src(Ptr::as_mut).into()), + } } /// Attempts to read a `Self` from a byte slice. @@ -1212,7 +1224,7 @@ pub unsafe trait TryFromBytes { /// these cases are handled. #[must_use = "has no side effects"] #[inline] - fn try_read_from(bytes: &[u8]) -> Option + fn try_read_from(bytes: &[u8]) -> Result> where Self: Sized, { @@ -1221,7 +1233,12 @@ pub unsafe trait TryFromBytes { // mut` and `Ptr::from_mut` here. See the doc comment on `is_bit_valid` // and the implementation of `TryFromBytes` for `UnsafeCell` for more // details. - let mut candidate = MaybeUninit::::read_from(bytes)?; + let mut candidate = match MaybeUninit::::read_from(bytes) { + Ok(candidate) => candidate, + Err(e) => { + return Err(TryReadError::Size(e.with_dst())); + } + }; let c_ptr = Ptr::from_mut(&mut candidate); let c_ptr = c_ptr.transparent_wrapper_into_inner(); // SAFETY: `c_ptr` has no uninitialized sub-ranges because it derived @@ -1237,11 +1254,11 @@ pub unsafe trait TryFromBytes { // pointer when `Self: !Immutable`. Since `Self: Immutable`, this panic // condition will not happen. if !Self::is_bit_valid(c_ptr.forget_aligned()) { - return None; + return Err(ValidityError::new(bytes).into()); } // SAFETY: We just validated that `candidate` contains a valid `Self`. - Some(unsafe { candidate.assume_init() }) + Ok(unsafe { candidate.assume_init() }) } } @@ -1760,7 +1777,7 @@ pub unsafe trait FromBytes: FromZeros { /// /// If `bytes.len()` does not correspond to a valid length for `Self`, or if /// `bytes` is not aligned to `Self`'s alignment requirement, this returns - /// `None`. + /// `Err`. /// /// # Compile-Time Assertions /// @@ -1817,13 +1834,15 @@ pub unsafe trait FromBytes: FromZeros { /// ``` #[must_use = "has no side effects"] #[inline] - fn ref_from(bytes: &[u8]) -> Option<&Self> + fn ref_from(bytes: &[u8]) -> Result<&Self, CastError<&[u8], Self>> where Self: KnownLayout + Immutable, { util::assert_dst_is_not_zst::(); - let ptr = Ptr::from_ref(bytes).try_cast_into_no_leftover()?; - Some(ptr.bikeshed_recall_valid().as_ref()) + match Ptr::from_ref(bytes).try_cast_into_no_leftover() { + Ok(ptr) => Ok(ptr.bikeshed_recall_valid().as_ref()), + Err(err) => Err(err.map_src(|src| src.as_ref())), + } } /// Interprets the prefix of the given `bytes` as a `&Self` without copying. @@ -1831,7 +1850,7 @@ pub unsafe trait FromBytes: FromZeros { /// This method returns both a reference to the first `size_of::()` /// bytes of `bytes` interpreted as `Self`, and a reference to the remaining /// bytes. If `bytes.len() < size_of::()` or `bytes` is not aligned to - /// `align_of::()`, this returns `None`. + /// `align_of::()`, this returns `Err`. /// /// # Compile-Time Assertions /// @@ -1889,7 +1908,7 @@ pub unsafe trait FromBytes: FromZeros { /// ``` #[must_use = "has no side effects"] #[inline] - fn ref_from_prefix(bytes: &[u8]) -> Option<(&Self, &[u8])> + fn ref_from_prefix(bytes: &[u8]) -> Result<(&Self, &[u8]), CastError<&[u8], Self>> where Self: KnownLayout + Immutable, { @@ -1902,7 +1921,7 @@ pub unsafe trait FromBytes: FromZeros { /// This method returns both a reference to the last `size_of::()` /// bytes of `bytes` interpreted as `Self`, and a reference to the preceding /// bytes. If `bytes.len() < size_of::()` or the suffix of `bytes` is - /// not aligned to `align_of::()`, this returns `None`. + /// not aligned to `align_of::()`, this returns `Err`. /// /// # Compile-Time Assertions /// @@ -1946,7 +1965,7 @@ pub unsafe trait FromBytes: FromZeros { /// ``` #[must_use = "has no side effects"] #[inline] - fn ref_from_suffix(bytes: &[u8]) -> Option<(&[u8], &Self)> + fn ref_from_suffix(bytes: &[u8]) -> Result<(&[u8], &Self), CastError<&[u8], Self>> where Self: Immutable + KnownLayout, { @@ -1957,7 +1976,7 @@ pub unsafe trait FromBytes: FromZeros { /// Interprets the given `bytes` as a `&mut Self` without copying. /// /// If `bytes.len() != size_of::()` or `bytes` is not aligned to - /// `align_of::()`, this returns `None`. + /// `align_of::()`, this returns `Err`. /// /// # Compile-Time Assertions /// @@ -2011,13 +2030,15 @@ pub unsafe trait FromBytes: FromZeros { /// ``` #[must_use = "has no side effects"] #[inline] - fn mut_from(bytes: &mut [u8]) -> Option<&mut Self> + fn mut_from(bytes: &mut [u8]) -> Result<&mut Self, CastError<&mut [u8], Self>> where Self: IntoBytes + KnownLayout + Immutable, { util::assert_dst_is_not_zst::(); - let ptr = Ptr::from_mut(bytes).try_cast_into_no_leftover()?; - Some(ptr.bikeshed_recall_valid().as_mut()) + match Ptr::from_mut(bytes).try_cast_into_no_leftover() { + Ok(ptr) => Ok(ptr.bikeshed_recall_valid().as_mut()), + Err(err) => Err(err.map_src(|src| src.as_mut())), + } } /// Interprets the prefix of the given `bytes` as a `&mut Self` without @@ -2026,7 +2047,7 @@ pub unsafe trait FromBytes: FromZeros { /// This method returns both a reference to the first `size_of::()` /// bytes of `bytes` interpreted as `Self`, and a reference to the remaining /// bytes. If `bytes.len() < size_of::()` or `bytes` is not aligned to - /// `align_of::()`, this returns `None`. + /// `align_of::()`, this returns `Err`. /// /// # Compile-Time Assertions /// @@ -2082,7 +2103,9 @@ pub unsafe trait FromBytes: FromZeros { /// ``` #[must_use = "has no side effects"] #[inline] - fn mut_from_prefix(bytes: &mut [u8]) -> Option<(&mut Self, &mut [u8])> + fn mut_from_prefix( + bytes: &mut [u8], + ) -> Result<(&mut Self, &mut [u8]), CastError<&mut [u8], Self>> where Self: IntoBytes + KnownLayout + Immutable, { @@ -2096,7 +2119,7 @@ pub unsafe trait FromBytes: FromZeros { /// This method returns both a reference to the last `size_of::()` /// bytes of `bytes` interpreted as `Self`, and a reference to the preceding /// bytes. If `bytes.len() < size_of::()` or the suffix of `bytes` is - /// not aligned to `align_of::()`, this returns `None`. + /// not aligned to `align_of::()`, this returns `Err`. /// /// # Compile-Time Assertions /// @@ -2146,7 +2169,9 @@ pub unsafe trait FromBytes: FromZeros { /// ``` #[must_use = "has no side effects"] #[inline] - fn mut_from_suffix(bytes: &mut [u8]) -> Option<(&mut [u8], &mut Self)> + fn mut_from_suffix( + bytes: &mut [u8], + ) -> Result<(&mut [u8], &mut Self), CastError<&mut [u8], Self>> where Self: IntoBytes + KnownLayout + Immutable, { @@ -2162,7 +2187,7 @@ pub unsafe trait FromBytes: FromZeros { /// `size_of::() * count` bytes from `bytes` to construct a `&[Self]`, /// and returns the remaining bytes to the caller. It also ensures that /// `sizeof::() * count` does not overflow a `usize`. If any of the - /// length, alignment, or overflow checks fail, it returns `None`. + /// length, alignment, or overflow checks fail, it returns `Err`. /// /// # Compile-Time Assertions /// @@ -2214,7 +2239,10 @@ pub unsafe trait FromBytes: FromZeros { /// ``` #[must_use = "has no side effects"] #[inline] - fn from_prefix_with_trailing_elements(bytes: &[u8], count: usize) -> Option<(&Self, &[u8])> + fn from_prefix_with_trailing_elements( + bytes: &[u8], + count: usize, + ) -> Result<(&Self, &[u8]), CastError<&[u8], Self>> where Self: KnownLayout + Immutable, { @@ -2234,7 +2262,7 @@ pub unsafe trait FromBytes: FromZeros { where Self: Sized + Immutable, { - <[Self]>::from_prefix_with_trailing_elements(bytes, count) + <[Self]>::from_prefix_with_trailing_elements(bytes, count).ok() } /// Interprets the suffix of the given `bytes` as a `&[Self]` with length @@ -2245,7 +2273,7 @@ pub unsafe trait FromBytes: FromZeros { /// `size_of::() * count` bytes from `bytes` to construct a `&[Self]`, /// and returns the preceding bytes to the caller. It also ensures that /// `sizeof::() * count` does not overflow a `usize`. If any of the - /// length, alignment, or overflow checks fail, it returns `None`. + /// length, alignment, or overflow checks fail, it returns `Err`. /// /// # Compile-Time Assertions /// @@ -2297,7 +2325,10 @@ pub unsafe trait FromBytes: FromZeros { /// ``` #[must_use = "has no side effects"] #[inline] - fn from_suffix_with_trailing_elements(bytes: &[u8], count: usize) -> Option<(&[u8], &Self)> + fn from_suffix_with_trailing_elements( + bytes: &[u8], + count: usize, + ) -> Result<(&[u8], &Self), CastError<&[u8], Self>> where Self: KnownLayout + Immutable, { @@ -2317,7 +2348,7 @@ pub unsafe trait FromBytes: FromZeros { where Self: Sized + Immutable, { - <[Self]>::from_suffix_with_trailing_elements(bytes, count) + <[Self]>::from_suffix_with_trailing_elements(bytes, count).ok() } #[deprecated(since = "0.8.0", note = "`FromBytes::mut_from` now supports slices")] @@ -2328,7 +2359,7 @@ pub unsafe trait FromBytes: FromZeros { where Self: Sized + IntoBytes + Immutable, { - <[Self]>::mut_from(bytes) + <[Self]>::mut_from(bytes).ok() } /// Interprets the prefix of the given `bytes` as a `&mut [Self]` with @@ -2339,7 +2370,7 @@ pub unsafe trait FromBytes: FromZeros { /// `size_of::() * count` bytes from `bytes` to construct a `&[Self]`, /// and returns the remaining bytes to the caller. It also ensures that /// `sizeof::() * count` does not overflow a `usize`. If any of the - /// length, alignment, or overflow checks fail, it returns `None`. + /// length, alignment, or overflow checks fail, it returns `Err`. /// /// # Compile-Time Assertions /// @@ -2400,7 +2431,7 @@ pub unsafe trait FromBytes: FromZeros { fn mut_from_prefix_with_trailing_elements( bytes: &mut [u8], count: usize, - ) -> Option<(&mut Self, &mut [u8])> + ) -> Result<(&mut Self, &mut [u8]), CastError<&mut [u8], Self>> where Self: IntoBytes + KnownLayout + Immutable, { @@ -2420,7 +2451,7 @@ pub unsafe trait FromBytes: FromZeros { where Self: Sized + Immutable, { - <[Self]>::from_prefix_with_trailing_elements(bytes, count) + <[Self]>::from_prefix_with_trailing_elements(bytes, count).ok() } /// Interprets the suffix of the given `bytes` as a `&mut [Self]` with length @@ -2431,7 +2462,7 @@ pub unsafe trait FromBytes: FromZeros { /// `size_of::() * count` bytes from `bytes` to construct a `&[Self]`, /// and returns the preceding bytes to the caller. It also ensures that /// `sizeof::() * count` does not overflow a `usize`. If any of the - /// length, alignment, or overflow checks fail, it returns `None`. + /// length, alignment, or overflow checks fail, it returns `Err`. /// /// /// # Compile-Time Assertions @@ -2493,7 +2524,7 @@ pub unsafe trait FromBytes: FromZeros { fn mut_from_suffix_with_trailing_elements( bytes: &mut [u8], count: usize, - ) -> Option<(&mut [u8], &mut Self)> + ) -> Result<(&mut [u8], &mut Self), CastError<&mut [u8], Self>> where Self: IntoBytes + KnownLayout + Immutable, { @@ -2512,12 +2543,12 @@ pub unsafe trait FromBytes: FromZeros { where Self: Sized + IntoBytes + Immutable, { - <[Self]>::mut_from_suffix_with_trailing_elements(bytes, count) + <[Self]>::mut_from_suffix_with_trailing_elements(bytes, count).ok() } /// Reads a copy of `Self` from `bytes`. /// - /// If `bytes.len() != size_of::()`, `read_from` returns `None`. + /// If `bytes.len() != size_of::()`, `read_from` returns `Err`. /// /// # Examples /// @@ -2546,18 +2577,23 @@ pub unsafe trait FromBytes: FromZeros { /// ``` #[must_use = "has no side effects"] #[inline] - fn read_from(bytes: &[u8]) -> Option + fn read_from(bytes: &[u8]) -> Result> where Self: Sized, { - Ref::<_, Unalign>::new_sized(bytes).map(|r| r.read().into_inner()) + match Ref::<_, Unalign>::new_sized(bytes) { + Ok(r) => Ok(r.read().into_inner()), + Err(CastError::Size(e)) => Err(e.with_dst()), + Err(CastError::Alignment(_)) => unreachable!(), + Err(CastError::Validity(i)) => match i {}, + } } /// Reads a copy of `Self` from the prefix of `bytes`. /// /// `read_from_prefix` reads a `Self` from the first `size_of::()` /// bytes of `bytes`. If `bytes.len() < size_of::()`, it returns - /// `None`. + /// `Err`. /// /// # Examples /// @@ -2586,18 +2622,23 @@ pub unsafe trait FromBytes: FromZeros { /// ``` #[must_use = "has no side effects"] #[inline] - fn read_from_prefix(bytes: &[u8]) -> Option + fn read_from_prefix(bytes: &[u8]) -> Result> where Self: Sized, { - Ref::<_, Unalign>::new_sized_from_prefix(bytes).map(|(r, _)| r.read().into_inner()) + match Ref::<_, Unalign>::new_sized_from_prefix(bytes) { + Ok((r, _)) => Ok(r.read().into_inner()), + Err(CastError::Size(e)) => Err(e.with_dst()), + Err(CastError::Alignment(_)) => unreachable!(), + Err(CastError::Validity(i)) => match i {}, + } } /// Reads a copy of `Self` from the suffix of `bytes`. /// /// `read_from_suffix` reads a `Self` from the last `size_of::()` /// bytes of `bytes`. If `bytes.len() < size_of::()`, it returns - /// `None`. + /// `Err`. /// /// # Examples /// @@ -2620,11 +2661,16 @@ pub unsafe trait FromBytes: FromZeros { /// ``` #[must_use = "has no side effects"] #[inline] - fn read_from_suffix(bytes: &[u8]) -> Option + fn read_from_suffix(bytes: &[u8]) -> Result> where Self: Sized, { - Ref::<_, Unalign>::new_sized_from_suffix(bytes).map(|(_, r)| r.read().into_inner()) + match Ref::<_, Unalign>::new_sized_from_suffix(bytes) { + Ok((_, r)) => Ok(r.read().into_inner()), + Err(CastError::Size(e)) => Err(CastError::Size(e.with_dst())), + Err(CastError::Alignment(_)) => unreachable!(), + Err(CastError::Validity(i)) => match i {}, + } } #[deprecated(since = "0.8.0", note = "`FromBytes::ref_from` now supports slices")] @@ -2635,7 +2681,7 @@ pub unsafe trait FromBytes: FromZeros { where Self: Sized + Immutable, { - <[Self]>::ref_from(bytes) + <[Self]>::ref_from(bytes).ok() } } @@ -2960,7 +3006,7 @@ pub unsafe trait IntoBytes { /// Writes a copy of `self` to `bytes`. /// - /// If `bytes.len() != size_of_val(self)`, `write_to` returns `None`. + /// If `bytes.len() != size_of_val(self)`, `write_to` returns `Err`. /// /// # Examples /// @@ -2992,7 +3038,7 @@ pub unsafe trait IntoBytes { /// ``` /// /// If too many or too few target bytes are provided, `write_to` returns - /// `None` and leaves the target bytes unmodified: + /// `Err` and leaves the target bytes unmodified: /// /// ``` /// # use zerocopy::IntoBytes; @@ -3001,27 +3047,26 @@ pub unsafe trait IntoBytes { /// /// let write_result = header.write_to(excessive_bytes); /// - /// assert!(write_result.is_none()); + /// assert!(write_result.is_err()); /// assert_eq!(excessive_bytes, [0u8; 128]); /// ``` #[must_use = "callers should check the return value to see if the operation succeeded"] #[inline] - fn write_to(&self, bytes: &mut [u8]) -> Option<()> + fn write_to(&self, bytes: &mut [u8]) -> Result<(), SizeError<&Self, &mut [u8]>> where Self: Immutable, { if bytes.len() != mem::size_of_val(self) { - return None; + return Err(SizeError::new(self)); } - bytes.copy_from_slice(self.as_bytes()); - Some(()) + Ok(()) } /// Writes a copy of `self` to the prefix of `bytes`. /// /// `write_to_prefix` writes `self` to the first `size_of_val(self)` bytes - /// of `bytes`. If `bytes.len() < size_of_val(self)`, it returns `None`. + /// of `bytes`. If `bytes.len() < size_of_val(self)`, it returns `Err`. /// /// # Examples /// @@ -3053,7 +3098,7 @@ pub unsafe trait IntoBytes { /// ``` /// /// If insufficient target bytes are provided, `write_to_prefix` returns - /// `None` and leaves the target bytes unmodified: + /// `Err` and leaves the target bytes unmodified: /// /// ``` /// # use zerocopy::IntoBytes; @@ -3062,24 +3107,29 @@ pub unsafe trait IntoBytes { /// /// let write_result = header.write_to_suffix(insufficent_bytes); /// - /// assert!(write_result.is_none()); + /// assert!(write_result.is_err()); /// assert_eq!(insufficent_bytes, [0, 0]); /// ``` #[must_use = "callers should check the return value to see if the operation succeeded"] #[inline] - fn write_to_prefix(&self, bytes: &mut [u8]) -> Option<()> + fn write_to_prefix(&self, bytes: &mut [u8]) -> Result<(), SizeError<&Self, &mut [u8]>> where Self: Immutable, { let size = mem::size_of_val(self); - bytes.get_mut(..size)?.copy_from_slice(self.as_bytes()); - Some(()) + match bytes.get_mut(..size) { + Some(bytes) => { + bytes.copy_from_slice(self.as_bytes()); + Ok(()) + } + None => Err(SizeError::new(self)), + } } /// Writes a copy of `self` to the suffix of `bytes`. /// /// `write_to_suffix` writes `self` to the last `size_of_val(self)` bytes of - /// `bytes`. If `bytes.len() < size_of_val(self)`, it returns `None`. + /// `bytes`. If `bytes.len() < size_of_val(self)`, it returns `Err`. /// /// # Examples /// @@ -3113,12 +3163,12 @@ pub unsafe trait IntoBytes { /// /// let write_result = header.write_to_suffix(insufficent_bytes); /// - /// assert!(write_result.is_none()); + /// assert!(write_result.is_err()); /// assert_eq!(insufficent_bytes, [0, 0]); /// ``` /// /// If insufficient target bytes are provided, `write_to_suffix` returns - /// `None` and leaves the target bytes unmodified: + /// `Err` and leaves the target bytes unmodified: /// /// ``` /// # use zerocopy::IntoBytes; @@ -3127,21 +3177,31 @@ pub unsafe trait IntoBytes { /// /// let write_result = header.write_to_suffix(insufficent_bytes); /// - /// assert!(write_result.is_none()); + /// assert!(write_result.is_err()); /// assert_eq!(insufficent_bytes, [0, 0]); /// ``` #[must_use = "callers should check the return value to see if the operation succeeded"] #[inline] - fn write_to_suffix(&self, bytes: &mut [u8]) -> Option<()> + fn write_to_suffix(&self, bytes: &mut [u8]) -> Result<(), SizeError<&Self, &mut [u8]>> where Self: Immutable, { - let start = bytes.len().checked_sub(mem::size_of_val(self))?; - // get_mut() should never return None here. We use ? rather than - // .unwrap() because in the event the branch is not optimized away, - // returning None is generally lighter-weight than panicking. - bytes.get_mut(start..)?.copy_from_slice(self.as_bytes()); - Some(()) + let start = if let Some(start) = bytes.len().checked_sub(mem::size_of_val(self)) { + start + } else { + return Err(SizeError::new(self)); + }; + let bytes = if let Some(bytes) = bytes.get_mut(start..) { + bytes + } else { + // get_mut() should never return None here. We return a `SizeError` + // rather than .unwrap() because in the event the branch is not + // optimized away, returning a value is generally lighter-weight + // than panicking. + return Err(SizeError::new(self)); + }; + bytes.copy_from_slice(self.as_bytes()); + Ok(()) } #[deprecated(since = "0.8.0", note = "`IntoBytes::as_bytes_mut` was renamed to `as_mut_bytes`")] @@ -4654,7 +4714,7 @@ macro_rules! include_value { /// /// impl UdpPacket { /// pub fn parse(bytes: B) -> Option> { -/// let (header, body) = Ref::new_unaligned_from_prefix(bytes)?; +/// let (header, body) = Ref::new_unaligned_from_prefix(bytes).ok()?; /// Some(UdpPacket { header, body }) /// } /// @@ -4664,7 +4724,7 @@ macro_rules! include_value { /// } /// /// impl UdpPacket { -/// pub fn set_src_port(&mut self, src_port: [u8; 2]) { +/// pub fn with_src_port(&mut self, src_port: [u8; 2]) { /// self.header.src_port = src_port; /// } /// } @@ -4697,12 +4757,15 @@ where B: ByteSlice, { #[must_use = "has no side effects"] - fn new_sized(bytes: B) -> Option> { - if bytes.len() != mem::size_of::() || !util::aligned_to::<_, T>(bytes.deref()) { - return None; + fn new_sized(bytes: B) -> Result, CastError> { + if bytes.len() != mem::size_of::() { + return Err(SizeError::new(bytes).into()); + } + if !util::aligned_to::<_, T>(bytes.deref()) { + return Err(AlignmentError::new(bytes).into()); } // INVARIANTS: We just validated size and alignment. - Some(Ref(bytes, PhantomData)) + Ok(Ref(bytes, PhantomData)) } } @@ -4711,26 +4774,35 @@ where B: SplitByteSlice, { #[must_use = "has no side effects"] - fn new_sized_from_prefix(bytes: B) -> Option<(Ref, B)> { - if bytes.len() < mem::size_of::() || !util::aligned_to::<_, T>(bytes.deref()) { - return None; + fn new_sized_from_prefix(bytes: B) -> Result<(Ref, B), CastError> { + if bytes.len() < mem::size_of::() { + return Err(SizeError::new(bytes).into()); + } + if !util::aligned_to::<_, T>(bytes.deref()) { + return Err(AlignmentError::new(bytes).into()); } - let (bytes, suffix) = try_split_at(bytes, mem::size_of::())?; + let (bytes, suffix) = + try_split_at(bytes, mem::size_of::()).map_err(|b| SizeError::new(b).into())?; // INVARIANTS: We just validated alignment and that `bytes` is at least // as large as `T`. `try_split_at(bytes, mem::size_of::())?` ensures // that the new `bytes` is exactly the size of `T`. By safety // postcondition on `SplitByteSlice::try_split_at` we can rely on // `try_split_at` to produce the correct `bytes` and `suffix`. - Some((Ref(bytes, PhantomData), suffix)) + Ok((Ref(bytes, PhantomData), suffix)) } #[must_use = "has no side effects"] - fn new_sized_from_suffix(bytes: B) -> Option<(B, Ref)> { + fn new_sized_from_suffix(bytes: B) -> Result<(B, Ref), CastError> { let bytes_len = bytes.len(); - let split_at = bytes_len.checked_sub(mem::size_of::())?; - let (prefix, bytes) = try_split_at(bytes, split_at)?; + let split_at = if let Some(split_at) = bytes_len.checked_sub(mem::size_of::()) { + split_at + } else { + return Err(SizeError::new(bytes).into()); + }; + let (prefix, bytes) = + try_split_at(bytes, split_at).map_err(|b| SizeError::new(b).into())?; if !util::aligned_to::<_, T>(bytes.deref()) { - return None; + return Err(AlignmentError::new(bytes).into()); } // INVARIANTS: Since `split_at` is defined as `bytes_len - // size_of::()`, the `bytes` which results from `let (prefix, bytes) @@ -4738,7 +4810,7 @@ where // constructing `bytes`, we validate that it has the proper alignment. // By safety postcondition on `SplitByteSlice::try_split_at` we can rely // on `try_split_at` to produce the correct `prefix` and `bytes`. - Some((prefix, Ref(bytes, PhantomData))) + Ok((prefix, Ref(bytes, PhantomData))) } } @@ -4774,11 +4846,13 @@ where /// ``` #[must_use = "has no side effects"] #[inline] - pub fn new(bytes: B) -> Option> { + pub fn new(bytes: B) -> Result, CastError> { util::assert_dst_is_not_zst::(); - let _ = Ptr::from_ref(bytes.deref()).try_cast_into_no_leftover::()?; + if let Err(e) = Ptr::from_ref(bytes.deref()).try_cast_into_no_leftover::() { + return Err(e.with_src(()).with_src(bytes)); + } // INVARIANTS: `try_cast_into_no_leftover` validates size and alignment. - Some(Ref(bytes, PhantomData)) + Ok(Ref(bytes, PhantomData)) } } @@ -4816,16 +4890,23 @@ where /// ``` #[must_use = "has no side effects"] #[inline] - pub fn new_from_prefix(bytes: B) -> Option<(Ref, B)> { + pub fn new_from_prefix(bytes: B) -> Result<(Ref, B), CastError> { util::assert_dst_is_not_zst::(); - let (_, split_at) = Ptr::from_ref(bytes.deref()).try_cast_into::(CastType::Prefix)?; - let (bytes, suffix) = try_split_at(bytes, split_at)?; + let split_at = match Ptr::from_ref(bytes.deref()).try_cast_into::(CastType::Prefix) { + Ok((_, split_at)) => split_at, + Err(e) => { + return Err(e.with_src(()).with_src(bytes)); + } + }; + + let (bytes, suffix) = + try_split_at(bytes, split_at).map_err(|b| SizeError::new(b).into())?; // INVARIANTS: `try_cast_into` validates size and alignment, and returns // a `split_at` that indicates how many bytes of `bytes` correspond to a // valid `T`. By safety postcondition on `SplitByteSlice::try_split_at` // we can rely on `try_split_at` to produce the correct `bytes` and // `suffix`. - Some((Ref(bytes, PhantomData), suffix)) + Ok((Ref(bytes, PhantomData), suffix)) } /// Constructs a new `Ref` from the suffix of a byte slice. @@ -4858,16 +4939,24 @@ where /// ``` #[must_use = "has no side effects"] #[inline] - pub fn new_from_suffix(bytes: B) -> Option<(B, Ref)> { + pub fn new_from_suffix(bytes: B) -> Result<(B, Ref), CastError> { util::assert_dst_is_not_zst::(); - let (_, split_at) = Ptr::from_ref(bytes.deref()).try_cast_into::(CastType::Suffix)?; - let (prefix, bytes) = try_split_at(bytes, split_at)?; + let split_at = match Ptr::from_ref(bytes.deref()).try_cast_into::(CastType::Suffix) { + Ok((_, split_at)) => split_at, + Err(e) => { + let e = e.with_src(()); + return Err(e.with_src(bytes)); + } + }; + + let (prefix, bytes) = + try_split_at(bytes, split_at).map_err(|b| SizeError::new(b).into())?; // INVARIANTS: `try_cast_into` validates size and alignment, and returns // a `try_split_at` that indicates how many bytes of `bytes` correspond // to a valid `T`. By safety postcondition on // `SplitByteSlice::try_split_at` we can rely on `try_split_at` to // produce the correct `prefix` and `bytes`. - Some((prefix, Ref(bytes, PhantomData))) + Ok((prefix, Ref(bytes, PhantomData))) } } @@ -4880,14 +4969,17 @@ where // update references to this name in `#[deprecated]` attributes elsewhere. #[doc(hidden)] #[inline] - pub fn with_trailing_elements_from_prefix(bytes: B, count: usize) -> Option<(Ref, B)> { + pub fn with_trailing_elements_from_prefix( + bytes: B, + count: usize, + ) -> Result<(Ref, B), CastError> { util::assert_dst_is_not_zst::(); let expected_len = match count.size_for_metadata(T::LAYOUT) { Some(len) => len, - None => return None, + None => return Err(SizeError::new(bytes).into()), }; if bytes.len() < expected_len { - return None; + return Err(SizeError::new(bytes).into()); } let (prefix, bytes) = bytes.split_at(expected_len); Self::new(prefix).map(move |l| (l, bytes)) @@ -4903,13 +4995,20 @@ where // update references to this name in `#[deprecated]` attributes elsewhere. #[doc(hidden)] #[inline] - pub fn with_trailing_elements_from_suffix(bytes: B, count: usize) -> Option<(B, Ref)> { + pub fn with_trailing_elements_from_suffix( + bytes: B, + count: usize, + ) -> Result<(B, Ref), CastError> { util::assert_dst_is_not_zst::(); let expected_len = match count.size_for_metadata(T::LAYOUT) { Some(len) => len, - None => return None, + None => return Err(SizeError::new(bytes).into()), + }; + let split_at = if let Some(split_at) = bytes.len().checked_sub(expected_len) { + split_at + } else { + return Err(SizeError::new(bytes).into()); }; - let split_at = bytes.len().checked_sub(expected_len)?; let (bytes, suffix) = bytes.split_at(split_at); Self::new(suffix).map(move |l| (bytes, l)) } @@ -4946,9 +5045,14 @@ where /// ``` #[must_use = "has no side effects"] #[inline(always)] - pub fn new_unaligned(bytes: B) -> Option> { + pub fn new_unaligned(bytes: B) -> Result, SizeError> { util::assert_dst_is_not_zst::(); - Ref::new(bytes) + match Ref::new(bytes) { + Ok(dst) => Ok(dst), + Err(CastError::Size(e)) => Err(e), + Err(CastError::Alignment(_)) => unreachable!(), + Err(CastError::Validity(i)) => match i {}, + } } } @@ -4986,9 +5090,13 @@ where /// ``` #[must_use = "has no side effects"] #[inline(always)] - pub fn new_unaligned_from_prefix(bytes: B) -> Option<(Ref, B)> { + pub fn new_unaligned_from_prefix(bytes: B) -> Result<(Ref, B), SizeError> { util::assert_dst_is_not_zst::(); - Ref::new_from_prefix(bytes) + Ref::new_from_prefix(bytes).map_err(|e| match e { + CastError::Size(e) => e, + CastError::Alignment(_) => unreachable!(), + CastError::Validity(i) => match i {}, + }) } /// Constructs a new `Ref` from the suffix of a byte slice for a type with @@ -5020,9 +5128,13 @@ where /// ``` #[must_use = "has no side effects"] #[inline(always)] - pub fn new_unaligned_from_suffix(bytes: B) -> Option<(B, Ref)> { + pub fn new_unaligned_from_suffix(bytes: B) -> Result<(B, Ref), SizeError> { util::assert_dst_is_not_zst::(); - Ref::new_from_suffix(bytes) + Ref::new_from_suffix(bytes).map_err(|e| match e { + CastError::Size(e) => e, + CastError::Alignment(_) => unreachable!(), + CastError::Validity(i) => match i {}, + }) } } @@ -5038,7 +5150,7 @@ where pub fn with_trailing_elements_unaligned_from_prefix( bytes: B, count: usize, - ) -> Option<(Ref, B)> { + ) -> Result<(Ref, B), CastError> { util::assert_dst_is_not_zst::(); Self::with_trailing_elements_from_prefix(bytes, count) } @@ -5056,7 +5168,7 @@ where pub fn with_trailing_elements_unaligned_from_suffix( bytes: B, count: usize, - ) -> Option<(B, Ref)> { + ) -> Result<(B, Ref), CastError> { util::assert_dst_is_not_zst::(); Self::with_trailing_elements_from_suffix(bytes, count) } @@ -5329,7 +5441,7 @@ pub unsafe trait SplitByteSlice: ByteSlice { #[must_use] #[inline] fn split_at(self, mid: usize) -> (Self, Self) { - if let Some(splits) = try_split_at(self, mid) { + if let Ok(splits) = try_split_at(self, mid) { splits } else { panic!("mid > len") @@ -5349,16 +5461,15 @@ pub unsafe trait SplitByteSlice: ByteSlice { /// Attempts to split the slice at the midpoint. /// -/// `x.try_split_at(mid)` returns `Some((x[..mid], x[mid..]))` if `mid <= -/// x.deref().len()` and otherwise returns `None`. +/// `x.try_split_at(mid)` returns `Ok((x[..mid], x[mid..]))` if `mid <= +/// x.deref().len()` and otherwise returns `Err(x)`. /// /// # Safety /// /// Unsafe code may rely on this function correctly implementing the above /// functionality. -#[must_use] #[inline] -fn try_split_at(slice: S, mid: usize) -> Option<(S, S)> +fn try_split_at(slice: S, mid: usize) -> Result<(S, S), S> where S: SplitByteSlice, { @@ -5369,9 +5480,9 @@ where // dereference to a byte slice of the same address and length. Thus, we // can be sure that the above precondition remains satisfied through the // call to `split_at_unchecked`. - unsafe { Some(slice.split_at_unchecked(mid)) } + unsafe { Ok(slice.split_at_unchecked(mid)) } } else { - None + Err(slice) } } @@ -5754,7 +5865,7 @@ mod alloc_support { pub use alloc_support::*; #[cfg(test)] -#[allow(clippy::unreadable_literal)] +#[allow(clippy::assertions_on_result_states, clippy::unreadable_literal)] mod tests { use core::convert::TryInto as _; @@ -6106,16 +6217,26 @@ mod tests { const ODDS: [usize; 8] = [1, 3, 5, 7, 9, 11, 13, 15]; // base_size is too big for the memory region. - test!(layout(((1..8) | ((1..8), (1..8))), _).validate(_, [0], _), Ok(None)); - test!(layout(((2..8) | ((2..8), (2..8))), _).validate(_, [1], _), Ok(None)); + test!( + layout(((1..8) | ((1..8), (1..8))), _).validate([0], [0], _), + Ok(Err(MetadataCastError::Size)) + ); + test!( + layout(((2..8) | ((2..8), (2..8))), _).validate([0], [1], Prefix), + Ok(Err(MetadataCastError::Size)) + ); + test!( + layout(((2..8) | ((2..8), (2..8))), _).validate([0x1000_0000 - 1], [1], Suffix), + Ok(Err(MetadataCastError::Size)) + ); // addr is unaligned for prefix cast - test!(layout(_, [2]).validate(ODDS, _, Prefix), Ok(None)); - test!(layout(_, [2]).validate(ODDS, _, Prefix), Ok(None)); + test!(layout(_, [2]).validate(ODDS, _, Prefix), Ok(Err(MetadataCastError::Alignment))); + test!(layout(_, [2]).validate(ODDS, _, Prefix), Ok(Err(MetadataCastError::Alignment))); // addr is aligned, but end of buffer is unaligned for suffix cast - test!(layout(_, [2]).validate(EVENS, ODDS, Suffix), Ok(None)); - test!(layout(_, [2]).validate(EVENS, ODDS, Suffix), Ok(None)); + test!(layout(_, [2]).validate(EVENS, ODDS, Suffix), Ok(Err(MetadataCastError::Alignment))); + test!(layout(_, [2]).validate(EVENS, ODDS, Suffix), Ok(Err(MetadataCastError::Alignment))); // Unfortunately, these constants cannot easily be used in the // implementation of `validate_cast_and_convert_metadata`, since @@ -6153,7 +6274,7 @@ mod tests { fn validate_behavior( (layout, addr, bytes_len, cast_type): (DstLayout, usize, usize, CastType), ) { - if let Some((elems, split_at)) = + if let Ok((elems, split_at)) = layout.validate_cast_and_convert_metadata(addr, bytes_len, cast_type) { let (size_info, align) = (layout.size_info, layout.align); @@ -7062,54 +7183,57 @@ mod tests { // Test `FromBytes::{read_from, read_from_prefix, read_from_suffix}`. - assert_eq!(u64::read_from(&VAL_BYTES[..]), Some(VAL)); + assert_eq!(u64::read_from(&VAL_BYTES[..]), Ok(VAL)); // The first 8 bytes are from `VAL_BYTES` and the second 8 bytes are all // zeros. let bytes_with_prefix: [u8; 16] = transmute!([VAL_BYTES, [0; 8]]); - assert_eq!(u64::read_from_prefix(&bytes_with_prefix[..]), Some(VAL)); - assert_eq!(u64::read_from_suffix(&bytes_with_prefix[..]), Some(0)); + assert_eq!(u64::read_from_prefix(&bytes_with_prefix[..]), Ok(VAL)); + assert_eq!(u64::read_from_suffix(&bytes_with_prefix[..]), Ok(0)); // The first 8 bytes are all zeros and the second 8 bytes are from // `VAL_BYTES` let bytes_with_suffix: [u8; 16] = transmute!([[0; 8], VAL_BYTES]); - assert_eq!(u64::read_from_prefix(&bytes_with_suffix[..]), Some(0)); - assert_eq!(u64::read_from_suffix(&bytes_with_suffix[..]), Some(VAL)); + assert_eq!(u64::read_from_prefix(&bytes_with_suffix[..]), Ok(0)); + assert_eq!(u64::read_from_suffix(&bytes_with_suffix[..]), Ok(VAL)); // Test `IntoBytes::{write_to, write_to_prefix, write_to_suffix}`. let mut bytes = [0u8; 8]; - assert_eq!(VAL.write_to(&mut bytes[..]), Some(())); + assert_eq!(VAL.write_to(&mut bytes[..]), Ok(())); assert_eq!(bytes, VAL_BYTES); let mut bytes = [0u8; 16]; - assert_eq!(VAL.write_to_prefix(&mut bytes[..]), Some(())); + assert_eq!(VAL.write_to_prefix(&mut bytes[..]), Ok(())); let want: [u8; 16] = transmute!([VAL_BYTES, [0; 8]]); assert_eq!(bytes, want); let mut bytes = [0u8; 16]; - assert_eq!(VAL.write_to_suffix(&mut bytes[..]), Some(())); + assert_eq!(VAL.write_to_suffix(&mut bytes[..]), Ok(())); let want: [u8; 16] = transmute!([[0; 8], VAL_BYTES]); assert_eq!(bytes, want); } #[test] fn test_try_from_bytes_try_read_from() { - assert_eq!(::try_read_from(&[0]), Some(false)); - assert_eq!(::try_read_from(&[1]), Some(true)); + assert_eq!(::try_read_from(&[0]), Ok(false)); + assert_eq!(::try_read_from(&[1]), Ok(true)); // If we don't pass enough bytes, it fails. - assert_eq!(::try_read_from(&[]), None); + assert!(matches!(::try_read_from(&[]), Err(TryReadError::Size(_)))); // If we pass too many bytes, it fails. - assert_eq!(::try_read_from(&[0, 0]), None); + assert!(matches!(::try_read_from(&[0, 0]), Err(TryReadError::Size(_)))); // If we pass an invalid value, it fails. - assert_eq!(::try_read_from(&[2]), None); + assert!(matches!( + ::try_read_from(&[2]), + Err(TryReadError::Validity(_)) + )); // Reading from a misaligned buffer should still succeed. Since `AU64`'s // alignment is 8, and since we read from two adjacent addresses one // byte apart, it is guaranteed that at least one of them (though // possibly both) will be misaligned. let bytes: [u8; 9] = [0, 0, 0, 0, 0, 0, 0, 0, 0]; - assert_eq!(::try_read_from(&bytes[..8]), Some(AU64(0))); - assert_eq!(::try_read_from(&bytes[1..9]), Some(AU64(0))); + assert_eq!(::try_read_from(&bytes[..8]), Ok(AU64(0))); + assert_eq!(::try_read_from(&bytes[1..9]), Ok(AU64(0))); } #[test] @@ -7555,36 +7679,36 @@ mod tests { // Fail because the buffer is too large. let mut buf = Align::<[u8; 16], AU64>::default(); // `buf.t` should be aligned to 8, so only the length check should fail. - assert!(AU64::ref_from(&buf.t[..]).is_none()); - assert!(AU64::mut_from(&mut buf.t[..]).is_none()); - assert!(<[u8; 8]>::ref_from(&buf.t[..]).is_none()); - assert!(<[u8; 8]>::mut_from(&mut buf.t[..]).is_none()); + assert!(AU64::ref_from(&buf.t[..]).is_err()); + assert!(AU64::mut_from(&mut buf.t[..]).is_err()); + assert!(<[u8; 8]>::ref_from(&buf.t[..]).is_err()); + assert!(<[u8; 8]>::mut_from(&mut buf.t[..]).is_err()); // Fail because the buffer is too small. let mut buf = Align::<[u8; 4], AU64>::default(); - assert!(AU64::ref_from(&buf.t[..]).is_none()); - assert!(AU64::mut_from(&mut buf.t[..]).is_none()); - assert!(<[u8; 8]>::ref_from(&buf.t[..]).is_none()); - assert!(<[u8; 8]>::mut_from(&mut buf.t[..]).is_none()); - assert!(AU64::ref_from_prefix(&buf.t[..]).is_none()); - assert!(AU64::mut_from_prefix(&mut buf.t[..]).is_none()); - assert!(AU64::ref_from_suffix(&buf.t[..]).is_none()); - assert!(AU64::mut_from_suffix(&mut buf.t[..]).is_none()); - assert!(<[u8; 8]>::ref_from_prefix(&buf.t[..]).is_none()); - assert!(<[u8; 8]>::mut_from_prefix(&mut buf.t[..]).is_none()); - assert!(<[u8; 8]>::ref_from_suffix(&buf.t[..]).is_none()); - assert!(<[u8; 8]>::mut_from_suffix(&mut buf.t[..]).is_none()); + assert!(AU64::ref_from(&buf.t[..]).is_err()); + assert!(AU64::mut_from(&mut buf.t[..]).is_err()); + assert!(<[u8; 8]>::ref_from(&buf.t[..]).is_err()); + assert!(<[u8; 8]>::mut_from(&mut buf.t[..]).is_err()); + assert!(AU64::ref_from_prefix(&buf.t[..]).is_err()); + assert!(AU64::mut_from_prefix(&mut buf.t[..]).is_err()); + assert!(AU64::ref_from_suffix(&buf.t[..]).is_err()); + assert!(AU64::mut_from_suffix(&mut buf.t[..]).is_err()); + assert!(<[u8; 8]>::ref_from_prefix(&buf.t[..]).is_err()); + assert!(<[u8; 8]>::mut_from_prefix(&mut buf.t[..]).is_err()); + assert!(<[u8; 8]>::ref_from_suffix(&buf.t[..]).is_err()); + assert!(<[u8; 8]>::mut_from_suffix(&mut buf.t[..]).is_err()); // Fail because the alignment is insufficient. let mut buf = Align::<[u8; 13], AU64>::default(); - assert!(AU64::ref_from(&buf.t[1..]).is_none()); - assert!(AU64::mut_from(&mut buf.t[1..]).is_none()); - assert!(AU64::ref_from(&buf.t[1..]).is_none()); - assert!(AU64::mut_from(&mut buf.t[1..]).is_none()); - assert!(AU64::ref_from_prefix(&buf.t[1..]).is_none()); - assert!(AU64::mut_from_prefix(&mut buf.t[1..]).is_none()); - assert!(AU64::ref_from_suffix(&buf.t[..]).is_none()); - assert!(AU64::mut_from_suffix(&mut buf.t[..]).is_none()); + assert!(AU64::ref_from(&buf.t[1..]).is_err()); + assert!(AU64::mut_from(&mut buf.t[1..]).is_err()); + assert!(AU64::ref_from(&buf.t[1..]).is_err()); + assert!(AU64::mut_from(&mut buf.t[1..]).is_err()); + assert!(AU64::ref_from_prefix(&buf.t[1..]).is_err()); + assert!(AU64::mut_from_prefix(&mut buf.t[1..]).is_err()); + assert!(AU64::ref_from_suffix(&buf.t[..]).is_err()); + assert!(AU64::mut_from_suffix(&mut buf.t[..]).is_err()); } #[test] @@ -7595,38 +7719,38 @@ mod tests { // A buffer with an alignment of 8. let buf = Align::<[u8; 16], AU64>::default(); // `buf.t` should be aligned to 8, so only the length check should fail. - assert!(Ref::<_, AU64>::new(&buf.t[..]).is_none()); - assert!(Ref::<_, [u8; 8]>::new_unaligned(&buf.t[..]).is_none()); + assert!(Ref::<_, AU64>::new(&buf.t[..]).is_err()); + assert!(Ref::<_, [u8; 8]>::new_unaligned(&buf.t[..]).is_err()); // Fail because the buffer is too small. // A buffer with an alignment of 8. let buf = Align::<[u8; 4], AU64>::default(); // `buf.t` should be aligned to 8, so only the length check should fail. - assert!(Ref::<_, AU64>::new(&buf.t[..]).is_none()); - assert!(Ref::<_, [u8; 8]>::new_unaligned(&buf.t[..]).is_none()); - assert!(Ref::<_, AU64>::new_from_prefix(&buf.t[..]).is_none()); - assert!(Ref::<_, AU64>::new_from_suffix(&buf.t[..]).is_none()); - assert!(Ref::<_, [u8; 8]>::new_unaligned_from_prefix(&buf.t[..]).is_none()); - assert!(Ref::<_, [u8; 8]>::new_unaligned_from_suffix(&buf.t[..]).is_none()); + assert!(Ref::<_, AU64>::new(&buf.t[..]).is_err()); + assert!(Ref::<_, [u8; 8]>::new_unaligned(&buf.t[..]).is_err()); + assert!(Ref::<_, AU64>::new_from_prefix(&buf.t[..]).is_err()); + assert!(Ref::<_, AU64>::new_from_suffix(&buf.t[..]).is_err()); + assert!(Ref::<_, [u8; 8]>::new_unaligned_from_prefix(&buf.t[..]).is_err()); + assert!(Ref::<_, [u8; 8]>::new_unaligned_from_suffix(&buf.t[..]).is_err()); // Fail because the length is not a multiple of the element size. let buf = Align::<[u8; 12], AU64>::default(); // `buf.t` has length 12, but element size is 8. - assert!(Ref::<_, [AU64]>::new(&buf.t[..]).is_none()); - assert!(Ref::<_, [[u8; 8]]>::new_unaligned(&buf.t[..]).is_none()); + assert!(Ref::<_, [AU64]>::new(&buf.t[..]).is_err()); + assert!(Ref::<_, [[u8; 8]]>::new_unaligned(&buf.t[..]).is_err()); // Fail because the buffer is too short. let buf = Align::<[u8; 12], AU64>::default(); // `buf.t` has length 12, but the element size is 8 (and we're expecting // two of them). - assert!(Ref::<_, [AU64]>::with_trailing_elements_from_prefix(&buf.t[..], 2).is_none()); - assert!(Ref::<_, [AU64]>::with_trailing_elements_from_suffix(&buf.t[..], 2).is_none()); + assert!(Ref::<_, [AU64]>::with_trailing_elements_from_prefix(&buf.t[..], 2).is_err()); + assert!(Ref::<_, [AU64]>::with_trailing_elements_from_suffix(&buf.t[..], 2).is_err()); assert!(Ref::<_, [[u8; 8]]>::with_trailing_elements_unaligned_from_prefix(&buf.t[..], 2) - .is_none()); + .is_err()); assert!(Ref::<_, [[u8; 8]]>::with_trailing_elements_unaligned_from_suffix(&buf.t[..], 2) - .is_none()); + .is_err()); // Fail because the alignment is insufficient. @@ -7635,33 +7759,33 @@ mod tests { let buf = Align::<[u8; 13], AU64>::default(); // Slicing from 1, we get a buffer with size 12 (so the length check // should succeed) but an alignment of only 1, which is insufficient. - assert!(Ref::<_, AU64>::new(&buf.t[1..]).is_none()); - assert!(Ref::<_, AU64>::new_from_prefix(&buf.t[1..]).is_none()); - assert!(Ref::<_, [AU64]>::new(&buf.t[1..]).is_none()); - assert!(Ref::<_, [AU64]>::with_trailing_elements_from_prefix(&buf.t[1..], 1).is_none()); - assert!(Ref::<_, [AU64]>::with_trailing_elements_from_suffix(&buf.t[1..], 1).is_none()); + assert!(Ref::<_, AU64>::new(&buf.t[1..]).is_err()); + assert!(Ref::<_, AU64>::new_from_prefix(&buf.t[1..]).is_err()); + assert!(Ref::<_, [AU64]>::new(&buf.t[1..]).is_err()); + assert!(Ref::<_, [AU64]>::with_trailing_elements_from_prefix(&buf.t[1..], 1).is_err()); + assert!(Ref::<_, [AU64]>::with_trailing_elements_from_suffix(&buf.t[1..], 1).is_err()); // Slicing is unnecessary here because `new_from_suffix` uses the suffix // of the slice, which has odd alignment. - assert!(Ref::<_, AU64>::new_from_suffix(&buf.t[..]).is_none()); + assert!(Ref::<_, AU64>::new_from_suffix(&buf.t[..]).is_err()); // Fail due to arithmetic overflow. let buf = Align::<[u8; 16], AU64>::default(); let unreasonable_len = usize::MAX / mem::size_of::() + 1; assert!(Ref::<_, [AU64]>::with_trailing_elements_from_prefix(&buf.t[..], unreasonable_len) - .is_none()); + .is_err()); assert!(Ref::<_, [AU64]>::with_trailing_elements_from_suffix(&buf.t[..], unreasonable_len) - .is_none()); + .is_err()); assert!(Ref::<_, [[u8; 8]]>::with_trailing_elements_unaligned_from_prefix( &buf.t[..], unreasonable_len ) - .is_none()); + .is_err()); assert!(Ref::<_, [[u8; 8]]>::with_trailing_elements_unaligned_from_suffix( &buf.t[..], unreasonable_len ) - .is_none()); + .is_err()); } #[test] @@ -7690,43 +7814,43 @@ mod tests { t.as_mut_bytes()[0] ^= 0xFF; // `write_to` rejects slices that are too small or too large. - assert_eq!(t.write_to(&mut vec![0; N - 1][..]), None); - assert_eq!(t.write_to(&mut vec![0; N + 1][..]), None); + assert!(t.write_to(&mut vec![0; N - 1][..]).is_err()); + assert!(t.write_to(&mut vec![0; N + 1][..]).is_err()); // `write_to` works as expected. let mut bytes = [0; N]; - assert_eq!(t.write_to(&mut bytes[..]), Some(())); + assert_eq!(t.write_to(&mut bytes[..]), Ok(())); assert_eq!(bytes, t.as_bytes()); // `write_to_prefix` rejects slices that are too small. - assert_eq!(t.write_to_prefix(&mut vec![0; N - 1][..]), None); + assert!(t.write_to_prefix(&mut vec![0; N - 1][..]).is_err()); // `write_to_prefix` works with exact-sized slices. let mut bytes = [0; N]; - assert_eq!(t.write_to_prefix(&mut bytes[..]), Some(())); + assert_eq!(t.write_to_prefix(&mut bytes[..]), Ok(())); assert_eq!(bytes, t.as_bytes()); // `write_to_prefix` works with too-large slices, and any bytes past // the prefix aren't modified. let mut too_many_bytes = vec![0; N + 1]; too_many_bytes[N] = 123; - assert_eq!(t.write_to_prefix(&mut too_many_bytes[..]), Some(())); + assert_eq!(t.write_to_prefix(&mut too_many_bytes[..]), Ok(())); assert_eq!(&too_many_bytes[..N], t.as_bytes()); assert_eq!(too_many_bytes[N], 123); // `write_to_suffix` rejects slices that are too small. - assert_eq!(t.write_to_suffix(&mut vec![0; N - 1][..]), None); + assert!(t.write_to_suffix(&mut vec![0; N - 1][..]).is_err()); // `write_to_suffix` works with exact-sized slices. let mut bytes = [0; N]; - assert_eq!(t.write_to_suffix(&mut bytes[..]), Some(())); + assert_eq!(t.write_to_suffix(&mut bytes[..]), Ok(())); assert_eq!(bytes, t.as_bytes()); // `write_to_suffix` works with too-large slices, and any bytes // before the suffix aren't modified. let mut too_many_bytes = vec![0; N + 1]; too_many_bytes[0] = 123; - assert_eq!(t.write_to_suffix(&mut too_many_bytes[..]), Some(())); + assert_eq!(t.write_to_suffix(&mut too_many_bytes[..]), Ok(())); assert_eq!(&too_many_bytes[1..], t.as_bytes()); assert_eq!(too_many_bytes[0], 123); } @@ -8070,7 +8194,7 @@ mod tests { &self, bytes: &'bytes [u8], ) -> Option> { - Some(T::try_ref_from(bytes)) + Some(T::try_ref_from(bytes).ok()) } #[allow(clippy::needless_lifetimes)] @@ -8078,7 +8202,7 @@ mod tests { &self, bytes: &'bytes mut [u8], ) -> Option> { - Some(T::try_mut_from(bytes)) + Some(T::try_mut_from(bytes).ok()) } } @@ -8088,7 +8212,7 @@ mod tests { impl TestTryReadFrom for AutorefWrapper { fn test_try_read_from(&self, bytes: &[u8]) -> Option> { - Some(T::try_read_from(bytes)) + Some(T::try_read_from(bytes).ok()) } } diff --git a/src/pointer/ptr.rs b/src/pointer/ptr.rs index 37ac90f7b2..54e993f80e 100644 --- a/src/pointer/ptr.rs +++ b/src/pointer/ptr.rs @@ -618,7 +618,7 @@ mod _conversions { /// State transitions between invariants. mod _transitions { use super::*; - use crate::TryFromBytes; + use crate::{TryFromBytes, ValidityError}; impl<'a, T, I> Ptr<'a, T, I> where @@ -845,10 +845,15 @@ mod _transitions { /// /// This method will panic if /// [`T::is_bit_valid`][TryFromBytes::is_bit_valid] panics. + /// + /// # Safety + /// + /// On error, unsafe code may rely on this method's returned + /// `ValidityError` containing `self`. #[inline] pub(crate) fn try_into_valid( mut self, - ) -> Option> + ) -> Result, ValidityError> where T: TryFromBytes, I::Aliasing: AtLeast, @@ -860,9 +865,9 @@ mod _transitions { if T::is_bit_valid(self.reborrow().forget_exclusive().forget_aligned()) { // SAFETY: If `T::is_bit_valid`, code may assume that `self` // contains a bit-valid instance of `Self`. - Some(unsafe { self.assume_valid() }) + Ok(unsafe { self.assume_valid() }) } else { - None + Err(ValidityError::new(self)) } } @@ -893,7 +898,7 @@ mod _transitions { /// Casts of the referent type. mod _casts { use super::*; - use crate::PointerMetadata; + use crate::{layout::MetadataCastError, AlignmentError, CastError, PointerMetadata, SizeError}; impl<'a, T, I> Ptr<'a, T, I> where @@ -1111,22 +1116,30 @@ mod _casts { /// - If this is a suffix cast, `ptr` refers to the byte range /// `[split_at, self.len())` in `self`. pub(crate) fn try_cast_into( - &self, + self, cast_type: CastType, - ) -> Option<(Ptr<'a, U, (I::Aliasing, Aligned, Initialized)>, usize)> { + ) -> Result<(Ptr<'a, U, (I::Aliasing, Aligned, Initialized)>, usize), CastError> + { crate::util::assert_dst_is_not_zst::(); - // PANICS: By invariant, the byte range addressed by `self.ptr` does // not wrap around the address space. This implies that the sum of // the address (represented as a `usize`) and length do not overflow // `usize`, as required by `validate_cast_and_convert_metadata`. // Thus, this call to `validate_cast_and_convert_metadata` will only // panic if `U` is a DST whose trailing slice element is zero-sized. - let (elems, split_at) = U::LAYOUT.validate_cast_and_convert_metadata( + let maybe_metadata = U::LAYOUT.validate_cast_and_convert_metadata( AsAddress::addr(self.as_non_null().as_ptr()), self.len(), cast_type, - )?; + ); + + let (elems, split_at) = match maybe_metadata { + Ok((elems, split_at)) => (elems, split_at), + Err(MetadataCastError::Alignment) => { + return Err(CastError::Alignment(AlignmentError::new(self))) + } + Err(MetadataCastError::Size) => return Err(CastError::Size(SizeError::new(self))), + }; let offset = match cast_type { CastType::Prefix => 0, @@ -1196,7 +1209,7 @@ mod _casts { // invariant on Ptr<'a, T, I>, preserved through the cast by the // bound `U: Immutable`. // 10. See 9. - Some((unsafe { Ptr::new(ptr) }, split_at)) + Ok((unsafe { Ptr::new(ptr) }, split_at)) } /// Attempts to cast `self` into a `U`, failing if all of the bytes of @@ -1212,14 +1225,32 @@ mod _casts { #[allow(unused)] #[inline(always)] pub(crate) fn try_cast_into_no_leftover( - &self, - ) -> Option> { + self, + ) -> Result, CastError> { + let len = self.len(); // TODO(#67): Remove this allow. See NonNulSlicelExt for more // details. #[allow(unstable_name_collisions)] match self.try_cast_into(CastType::Prefix) { - Some((slf, split_at)) if split_at == self.len() => Some(slf), - Some(_) | None => None, + Ok((slf, split_at)) => { + if split_at == len { + Ok(slf) + } else { + // Undo the cast so we can return the original bytes. + let slf = slf.as_bytes(); + // Restore the initial invariants of `self`. + // + // SAFETY: The referent type of `slf` is now equal to + // that of `self`, but the invariants nominally differ. + // Since `slf` and `self` refer to the same memory and + // no actions have been taken that would violate the + // original invariants on `self`, it is sound to apply + // the invariants of `self` onto `slf`. + let slf = unsafe { slf.assume_invariants() }; + Err(CastError::Size(SizeError::<_, U>::new(slf))) + } + } + Err(err) => Err(err), } } } @@ -1522,7 +1553,7 @@ mod tests { } for cast_type in [CastType::Prefix, CastType::Suffix] { - if let Some((slf, split_at)) = + if let Ok((slf, split_at)) = Ptr::from_ref(bytes).try_cast_into::(cast_type) { // SAFETY: All bytes in `bytes` have been @@ -1535,7 +1566,7 @@ mod tests { } } - if let Some(slf) = Ptr::from_ref(bytes).try_cast_into_no_leftover::() { + if let Ok(slf) = Ptr::from_ref(bytes).try_cast_into_no_leftover::() { // SAFETY: All bytes in `bytes` have been // initialized. let len = unsafe { validate_and_get_len(slf) }; diff --git a/zerocopy-derive/tests/enum_try_from_bytes.rs b/zerocopy-derive/tests/enum_try_from_bytes.rs index 2d03d305d7..2ad228ba3b 100644 --- a/zerocopy-derive/tests/enum_try_from_bytes.rs +++ b/zerocopy-derive/tests/enum_try_from_bytes.rs @@ -22,10 +22,10 @@ util_assert_impl_all!(Foo: imp::TryFromBytes); #[test] fn test_foo() { - imp::assert_eq!(::try_read_from(&[0]), imp::Some(Foo::A)); - imp::assert_eq!(::try_read_from(&[]), imp::None); - imp::assert_eq!(::try_read_from(&[1]), imp::None); - imp::assert_eq!(::try_read_from(&[0, 0]), imp::None); + imp::assert_eq!(::try_read_from(&[0]), imp::Ok(Foo::A)); + imp::assert!(::try_read_from(&[]).is_err()); + imp::assert!(::try_read_from(&[1]).is_err()); + imp::assert!(::try_read_from(&[0, 0]).is_err()); } #[derive(Eq, PartialEq, Debug, imp::KnownLayout, imp::Immutable, imp::TryFromBytes)] @@ -38,11 +38,11 @@ util_assert_impl_all!(Bar: imp::TryFromBytes); #[test] fn test_bar() { - imp::assert_eq!(::try_read_from(&[0, 0]), imp::Some(Bar::A)); - imp::assert_eq!(::try_read_from(&[]), imp::None); - imp::assert_eq!(::try_read_from(&[0]), imp::None); - imp::assert_eq!(::try_read_from(&[0, 1]), imp::None); - imp::assert_eq!(::try_read_from(&[0, 0, 0]), imp::None); + imp::assert_eq!(::try_read_from(&[0, 0]), imp::Ok(Bar::A)); + imp::assert!(::try_read_from(&[]).is_err()); + imp::assert!(::try_read_from(&[0]).is_err()); + imp::assert!(::try_read_from(&[0, 1]).is_err()); + imp::assert!(::try_read_from(&[0, 0, 0]).is_err()); } #[derive(Eq, PartialEq, Debug, imp::KnownLayout, imp::Immutable, imp::TryFromBytes)] @@ -58,17 +58,17 @@ util_assert_impl_all!(Baz: imp::TryFromBytes); fn test_baz() { imp::assert_eq!( ::try_read_from(imp::IntoBytes::as_bytes(&1u32)), - imp::Some(Baz::A) + imp::Ok(Baz::A) ); imp::assert_eq!( ::try_read_from(imp::IntoBytes::as_bytes(&0u32)), - imp::Some(Baz::B) + imp::Ok(Baz::B) ); - imp::assert_eq!(::try_read_from(&[]), imp::None); - imp::assert_eq!(::try_read_from(&[0]), imp::None); - imp::assert_eq!(::try_read_from(&[0, 0]), imp::None); - imp::assert_eq!(::try_read_from(&[0, 0, 0]), imp::None); - imp::assert_eq!(::try_read_from(&[0, 0, 0, 0, 0]), imp::None); + imp::assert!(::try_read_from(&[]).is_err()); + imp::assert!(::try_read_from(&[0]).is_err()); + imp::assert!(::try_read_from(&[0, 0]).is_err()); + imp::assert!(::try_read_from(&[0, 0, 0]).is_err()); + imp::assert!(::try_read_from(&[0, 0, 0, 0, 0]).is_err()); } // Test hygiene - make sure that `i8` being shadowed doesn't cause problems for @@ -92,23 +92,23 @@ util_assert_impl_all!(Blah: imp::TryFromBytes); fn test_blah() { imp::assert_eq!( ::try_read_from(imp::IntoBytes::as_bytes(&1i8)), - imp::Some(Blah::A) + imp::Ok(Blah::A) ); imp::assert_eq!( ::try_read_from(imp::IntoBytes::as_bytes(&0i8)), - imp::Some(Blah::B) + imp::Ok(Blah::B) ); imp::assert_eq!( ::try_read_from(imp::IntoBytes::as_bytes(&3i8)), - imp::Some(Blah::C) + imp::Ok(Blah::C) ); imp::assert_eq!( ::try_read_from(imp::IntoBytes::as_bytes(&6i8)), - imp::Some(Blah::D) + imp::Ok(Blah::D) ); - imp::assert_eq!(::try_read_from(&[]), imp::None); - imp::assert_eq!(::try_read_from(&[4]), imp::None); - imp::assert_eq!(::try_read_from(&[0, 0]), imp::None); + imp::assert!(::try_read_from(&[]).is_err()); + imp::assert!(::try_read_from(&[4]).is_err()); + imp::assert!(::try_read_from(&[0, 0]).is_err()); } #[derive( @@ -127,21 +127,20 @@ fn test_fieldless_but_not_unit_only() { let disc: [u8; SIZE] = ::zerocopy::transmute!(FieldlessButNotUnitOnly::A); imp::assert_eq!( ::try_read_from(&disc[..]), - imp::Some(FieldlessButNotUnitOnly::A) + imp::Ok(FieldlessButNotUnitOnly::A) ); let disc: [u8; SIZE] = ::zerocopy::transmute!(FieldlessButNotUnitOnly::B()); imp::assert_eq!( ::try_read_from(&disc[..]), - imp::Some(FieldlessButNotUnitOnly::B()) + imp::Ok(FieldlessButNotUnitOnly::B()) ); let disc: [u8; SIZE] = ::zerocopy::transmute!(FieldlessButNotUnitOnly::C {}); imp::assert_eq!( ::try_read_from(&disc[..]), - imp::Some(FieldlessButNotUnitOnly::C {}) + imp::Ok(FieldlessButNotUnitOnly::C {}) ); - imp::assert_eq!( - ::try_read_from(&[0xFF; SIZE][..]), - imp::None + imp::assert!( + ::try_read_from(&[0xFF; SIZE][..]).is_err() ); } @@ -161,20 +160,19 @@ fn test_weird_discriminants() { let disc: [u8; SIZE] = ::zerocopy::transmute!(WeirdDiscriminants::A); imp::assert_eq!( ::try_read_from(&disc[..]), - imp::Some(WeirdDiscriminants::A) + imp::Ok(WeirdDiscriminants::A) ); let disc: [u8; SIZE] = ::zerocopy::transmute!(WeirdDiscriminants::B); imp::assert_eq!( ::try_read_from(&disc[..]), - imp::Some(WeirdDiscriminants::B) + imp::Ok(WeirdDiscriminants::B) ); let disc: [u8; SIZE] = ::zerocopy::transmute!(WeirdDiscriminants::C); imp::assert_eq!( ::try_read_from(&disc[..]), - imp::Some(WeirdDiscriminants::C) + imp::Ok(WeirdDiscriminants::C) ); - imp::assert_eq!( - ::try_read_from(&[0xFF; SIZE][..]), - imp::None + imp::assert!( + ::try_read_from(&[0xFF; SIZE][..]).is_err() ); } diff --git a/zerocopy-derive/tests/struct_try_from_bytes.rs b/zerocopy-derive/tests/struct_try_from_bytes.rs index e36f7df208..9423ef7a6f 100644 --- a/zerocopy-derive/tests/struct_try_from_bytes.rs +++ b/zerocopy-derive/tests/struct_try_from_bytes.rs @@ -167,7 +167,7 @@ struct CPacked { fn c_packed() { let candidate = &[42u8, 0xFF, 0xFF, 0xFF, 0xFF]; let converted = ::try_ref_from(candidate); - imp::assert_eq!(converted, imp::Some(&CPacked { a: 42, b: u32::MAX })); + imp::assert_eq!(converted, imp::Ok(&CPacked { a: 42, b: u32::MAX })); } #[derive(imp::TryFromBytes, imp::KnownLayout, imp::Immutable)] @@ -188,7 +188,7 @@ struct CPackedUnsized { fn c_packed_unsized() { let candidate = &[42u8, 0xFF, 0xFF, 0xFF, 0xFF]; let converted = ::try_ref_from(candidate); - imp::assert!(converted.is_some()); + imp::assert!(converted.is_ok()); } #[derive(imp::TryFromBytes)] @@ -209,13 +209,13 @@ struct PackedUnsized { fn packed_unsized() { let candidate = &[42u8, 0xFF, 0xFF, 0xFF, 0xFF]; let converted = ::try_ref_from(candidate); - imp::assert!(converted.is_some()); + imp::assert!(converted.is_ok()); let candidate = &[42u8, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF]; let converted = ::try_ref_from(candidate); - imp::assert!(converted.is_none()); + imp::assert!(converted.is_err()); let candidate = &[42u8, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF]; let converted = ::try_ref_from(candidate); - imp::assert!(converted.is_some()); + imp::assert!(converted.is_ok()); }