diff --git a/library/alloc/src/collections/vec_deque/iter.rs b/library/alloc/src/collections/vec_deque/iter.rs index d3dbd10c863fb..c5775c2bd6545 100644 --- a/library/alloc/src/collections/vec_deque/iter.rs +++ b/library/alloc/src/collections/vec_deque/iter.rs @@ -144,6 +144,7 @@ impl<'a, T> Iterator for Iter<'a, T> { } #[inline] + #[core::contracts::requires(idx < self.len())] unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> Self::Item { // Safety: The TrustedRandomAccess contract requires that callers only pass an index // that is in bounds. diff --git a/library/alloc/src/collections/vec_deque/iter_mut.rs b/library/alloc/src/collections/vec_deque/iter_mut.rs index 0c5f06e752b7b..6710af20b8341 100644 --- a/library/alloc/src/collections/vec_deque/iter_mut.rs +++ b/library/alloc/src/collections/vec_deque/iter_mut.rs @@ -208,6 +208,8 @@ impl<'a, T> Iterator for IterMut<'a, T> { } #[inline] + #[allow(unused_parens)] + #[core::contracts::requires(idx < self.len())] unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> Self::Item { // Safety: The TrustedRandomAccess contract requires that callers only pass an index // that is in bounds. diff --git a/library/alloc/src/lib.rs b/library/alloc/src/lib.rs index fd54a375f3ea9..01c575dda0bcf 100644 --- a/library/alloc/src/lib.rs +++ b/library/alloc/src/lib.rs @@ -106,6 +106,7 @@ #![feature(const_default)] #![feature(const_eval_select)] #![feature(const_heap)] +#![feature(contracts)] #![feature(core_intrinsics)] #![feature(deprecated_suggestion)] #![feature(deref_pure_trait)] diff --git a/library/alloc/src/vec/into_iter.rs b/library/alloc/src/vec/into_iter.rs index 358bdeacae790..f58e722c8c62d 100644 --- a/library/alloc/src/vec/into_iter.rs +++ b/library/alloc/src/vec/into_iter.rs @@ -360,6 +360,7 @@ impl Iterator for IntoIter { R::from_output(accum) } + #[core::contracts::requires(i < self.len())] unsafe fn __iterator_get_unchecked(&mut self, i: usize) -> Self::Item where Self: TrustedRandomAccessNoCoerce, diff --git a/library/alloctests/lib.rs b/library/alloctests/lib.rs index 0201c8752210c..085118f6c743a 100644 --- a/library/alloctests/lib.rs +++ b/library/alloctests/lib.rs @@ -11,6 +11,8 @@ #![allow(rustdoc::redundant_explicit_links)] #![warn(rustdoc::unescaped_backticks)] #![deny(ffi_unwind_calls)] +// permit use of experimental feature contracts +#![allow(incomplete_features)] // // Library features: // tidy-alphabetical-start @@ -20,6 +22,7 @@ #![feature(assert_matches)] #![feature(char_internals)] #![feature(char_max_len)] +#![feature(contracts)] #![feature(core_intrinsics)] #![feature(exact_size_is_empty)] #![feature(extend_one)] diff --git a/library/core/src/alloc/layout.rs b/library/core/src/alloc/layout.rs index 1f37c978fecfd..6e205bf1dee56 100644 --- a/library/core/src/alloc/layout.rs +++ b/library/core/src/alloc/layout.rs @@ -66,6 +66,14 @@ impl Layout { #[stable(feature = "alloc_layout", since = "1.28.0")] #[rustc_const_stable(feature = "const_alloc_layout_size_align", since = "1.50.0")] #[inline] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::ensures( + move |result: &Result| + result.is_err() || ( + align.is_power_of_two() && + size <= isize::MAX as usize - (align - 1) && + result.as_ref().unwrap().size() == size && + result.as_ref().unwrap().align() == align))] pub const fn from_size_align(size: usize, align: usize) -> Result { if Layout::is_size_align_valid(size, align) { // SAFETY: Layout::is_size_align_valid checks the preconditions for this call. @@ -127,6 +135,10 @@ impl Layout { #[must_use] #[inline] #[track_caller] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::requires(Layout::from_size_align(size, align).is_ok())] + #[core::contracts::ensures( + move |result: &Self| result.size() == size && result.align() == align)] pub const unsafe fn from_size_align_unchecked(size: usize, align: usize) -> Self { assert_unsafe_precondition!( check_library_ub, @@ -167,6 +179,10 @@ impl Layout { #[rustc_const_stable(feature = "alloc_layout_const_new", since = "1.42.0")] #[must_use] #[inline] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::ensures( + |result: &Self| + result.size() == mem::size_of::() && result.align() == mem::align_of::())] pub const fn new() -> Self { let (size, align) = size_align::(); // SAFETY: if the type is instantiated, rustc already ensures that its @@ -182,6 +198,10 @@ impl Layout { #[rustc_const_stable(feature = "const_alloc_layout", since = "1.85.0")] #[must_use] #[inline] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::requires(mem::align_of_val(t).is_power_of_two())] + // FIXME: requires `&self` to be `'static` + // #[core::contracts::ensures(move |result: &Self| result.align() == mem::align_of_val(t))] pub const fn for_value(t: &T) -> Self { let (size, align) = (size_of_val(t), align_of_val(t)); // SAFETY: see rationale in `new` for why this is using the unsafe variant @@ -217,6 +237,8 @@ impl Layout { /// [extern type]: ../../unstable-book/language-features/extern-types.html #[unstable(feature = "layout_for_ptr", issue = "69835")] #[must_use] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::ensures(|result: &Self| result.align().is_power_of_two())] pub const unsafe fn for_value_raw(t: *const T) -> Self { // SAFETY: we pass along the prerequisites of these functions to the caller let (size, align) = unsafe { (mem::size_of_val_raw(t), mem::align_of_val_raw(t)) }; @@ -233,6 +255,8 @@ impl Layout { #[unstable(feature = "alloc_layout_extra", issue = "55724")] #[must_use] #[inline] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::ensures(|result: &NonNull| result.is_aligned())] pub const fn dangling(&self) -> NonNull { NonNull::without_provenance(self.align.as_nonzero()) } @@ -254,6 +278,12 @@ impl Layout { #[stable(feature = "alloc_layout_manipulation", since = "1.44.0")] #[rustc_const_stable(feature = "const_alloc_layout", since = "1.85.0")] #[inline] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::ensures( + move |result: &Result| + result.is_err() || ( + result.as_ref().unwrap().align() >= align && + result.as_ref().unwrap().align().is_power_of_two()))] pub const fn align_to(&self, align: usize) -> Result { if let Some(align) = Alignment::new(align) { Layout::from_size_alignment(self.size, Alignment::max(self.align, align)) @@ -282,6 +312,8 @@ impl Layout { #[must_use = "this returns the padding needed, \ without modifying the `Layout`"] #[inline] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::ensures(move |result| *result <= align)] pub const fn padding_needed_for(&self, align: usize) -> usize { // FIXME: Can we just change the type on this to `Alignment`? let Some(align) = Alignment::new(align) else { return usize::MAX }; @@ -330,6 +362,14 @@ impl Layout { #[must_use = "this returns a new `Layout`, \ without modifying the original"] #[inline] + // FIXME: requires `&self` to be `'static` + // #[rustc_allow_const_fn_unstable(contracts)] + // #[core::contracts::ensures( + // move |result: &Layout| + // result.size() >= self.size() && + // result.align() == self.align() && + // result.size() % result.align() == 0 && + // self.size() + self.padding_needed_for(self.align()) == result.size())] pub const fn pad_to_align(&self) -> Layout { // This cannot overflow. Quoting from the invariant of Layout: // > `size`, when rounded up to the nearest multiple of `align`, @@ -370,6 +410,12 @@ impl Layout { /// ``` #[unstable(feature = "alloc_layout_extra", issue = "55724")] #[inline] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::ensures( + move |result: &Result<(Self, usize), LayoutError>| + result.is_err() || ( + (n == 0 || result.as_ref().unwrap().0.size() % n == 0) && + result.as_ref().unwrap().0.size() == n * result.as_ref().unwrap().1))] pub const fn repeat(&self, n: usize) -> Result<(Self, usize), LayoutError> { let padded = self.pad_to_align(); if let Ok(repeated) = padded.repeat_packed(n) { @@ -427,6 +473,15 @@ impl Layout { #[stable(feature = "alloc_layout_manipulation", since = "1.44.0")] #[rustc_const_stable(feature = "const_alloc_layout", since = "1.85.0")] #[inline] + // FIXME: requires `&self` to be `'static` + // #[rustc_allow_const_fn_unstable(contracts)] + // #[core::contracts::ensures( + // move |result: &Result<(Self, usize), LayoutError>| + // result.is_err() || ( + // result.as_ref().unwrap().0.align() == cmp::max(self.align(), next.align()) && + // result.as_ref().unwrap().0.size() >= self.size() + next.size() && + // result.as_ref().unwrap().1 >= self.size() && + // result.as_ref().unwrap().1 <= result.as_ref().unwrap().0.size()))] pub const fn extend(&self, next: Self) -> Result<(Self, usize), LayoutError> { let new_align = Alignment::max(self.align, next.align); let offset = self.size_rounded_up_to_custom_align(next.align); @@ -458,6 +513,13 @@ impl Layout { /// On arithmetic overflow, returns `LayoutError`. #[unstable(feature = "alloc_layout_extra", issue = "55724")] #[inline] + // FIXME: requires `&self` to be `'static` + // #[rustc_allow_const_fn_unstable(contracts)] + // #[core::contracts::ensures( + // move |result: &Result| + // result.is_err() || ( + // result.as_ref().unwrap().size() == n * self.size() && + // result.as_ref().unwrap().align() == self.align()))] pub const fn repeat_packed(&self, n: usize) -> Result { if let Some(size) = self.size.checked_mul(n) { // The safe constructor is called here to enforce the isize size limit. @@ -475,6 +537,13 @@ impl Layout { /// On arithmetic overflow, returns `LayoutError`. #[unstable(feature = "alloc_layout_extra", issue = "55724")] #[inline] + // FIXME: requires `&self` to be `'static` + // #[rustc_allow_const_fn_unstable(contracts)] + // #[core::contracts::ensures( + // move |result: &Result| + // result.is_err() || ( + // result.as_ref().unwrap().size() == self.size() + next.size() && + // result.as_ref().unwrap().align() == self.align()))] pub const fn extend_packed(&self, next: Self) -> Result { // SAFETY: each `size` is at most `isize::MAX == usize::MAX/2`, so the // sum is at most `usize::MAX/2*2 == usize::MAX - 1`, and cannot overflow. @@ -490,6 +559,12 @@ impl Layout { #[stable(feature = "alloc_layout_manipulation", since = "1.44.0")] #[rustc_const_stable(feature = "const_alloc_layout", since = "1.85.0")] #[inline] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::ensures( + move |result: &Result| + result.is_err() || ( + result.as_ref().unwrap().size() == n * mem::size_of::() && + result.as_ref().unwrap().align() == mem::align_of::()))] pub const fn array(n: usize) -> Result { // Reduce the amount of code we need to monomorphize per `T`. return inner(T::LAYOUT, n); diff --git a/library/core/src/array/iter.rs b/library/core/src/array/iter.rs index 1c1f4d78c03fd..77276c543545a 100644 --- a/library/core/src/array/iter.rs +++ b/library/core/src/array/iter.rs @@ -138,6 +138,8 @@ impl IntoIter { /// ``` #[unstable(feature = "array_into_iter_constructors", issue = "91583")] #[inline] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::requires(initialized.start <= initialized.end && initialized.end <= N)] pub const unsafe fn new_unchecked( buffer: [MaybeUninit; N], initialized: Range, @@ -279,6 +281,7 @@ impl Iterator for IntoIter { } #[inline] + #[core::contracts::requires(idx < self.len())] unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> Self::Item { // SAFETY: The caller must provide an idx that is in bound of the remainder. let elem_ref = unsafe { self.as_mut_slice().get_unchecked_mut(idx) }; diff --git a/library/core/src/ascii/ascii_char.rs b/library/core/src/ascii/ascii_char.rs index d77fafed2039b..bbed0adf9fbc4 100644 --- a/library/core/src/ascii/ascii_char.rs +++ b/library/core/src/ascii/ascii_char.rs @@ -458,6 +458,10 @@ impl AsciiChar { /// or returns `None` if it's too large. #[unstable(feature = "ascii_char", issue = "110998")] #[inline] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::ensures( + move |result: &Option| + (b <= 127) == (result.is_some() && result.unwrap() as u8 == b))] pub const fn from_u8(b: u8) -> Option { if b <= 127 { // SAFETY: Just checked that `b` is in-range @@ -475,6 +479,9 @@ impl AsciiChar { /// `b` must be in `0..=127`, or else this is UB. #[unstable(feature = "ascii_char", issue = "110998")] #[inline] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::requires(b <= 127)] + #[core::contracts::ensures(move |result: &Self| *result as u8 == b)] pub const unsafe fn from_u8_unchecked(b: u8) -> Self { // SAFETY: Our safety precondition is that `b` is in-range. unsafe { transmute(b) } @@ -513,6 +520,11 @@ impl AsciiChar { #[unstable(feature = "ascii_char", issue = "110998")] #[inline] #[track_caller] + // Only `d < 64` is required for safety as described above, but we use `d < 10` as in the + // `assert_unsafe_precondition` inside. See https://github.com/rust-lang/rust/pull/129374 for + // some context about the discrepancy. + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::requires(d < 10)] pub const unsafe fn digit_unchecked(d: u8) -> Self { assert_unsafe_precondition!( check_library_ub, @@ -532,6 +544,8 @@ impl AsciiChar { /// Gets this ASCII character as a byte. #[unstable(feature = "ascii_char", issue = "110998")] #[inline] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::ensures(|result: &u8| *result <= 127)] pub const fn to_u8(self) -> u8 { self as u8 } diff --git a/library/core/src/char/convert.rs b/library/core/src/char/convert.rs index 6380f42d320c6..a64efbff5e332 100644 --- a/library/core/src/char/convert.rs +++ b/library/core/src/char/convert.rs @@ -23,6 +23,9 @@ pub(super) const fn from_u32(i: u32) -> Option { #[must_use] #[allow(unnecessary_transmutes)] #[track_caller] +#[rustc_allow_const_fn_unstable(contracts)] +#[core::contracts::requires(char_try_from_u32(i).is_ok())] +#[core::contracts::ensures(move |result: &char| *result as u32 == i)] pub(super) const unsafe fn from_u32_unchecked(i: u32) -> char { // SAFETY: the caller must guarantee that `i` is a valid char value. unsafe { diff --git a/library/core/src/char/mod.rs b/library/core/src/char/mod.rs index 82a3f6f916be3..576d849b03b94 100644 --- a/library/core/src/char/mod.rs +++ b/library/core/src/char/mod.rs @@ -138,6 +138,8 @@ pub const fn from_u32(i: u32) -> Option { #[rustc_const_stable(feature = "const_char_from_u32_unchecked", since = "1.81.0")] #[must_use] #[inline] +#[rustc_allow_const_fn_unstable(contracts)] +#[core::contracts::requires(i <= 0x10FFFF && (i < 0xD800 || i > 0xDFFF))] pub const unsafe fn from_u32_unchecked(i: u32) -> char { // SAFETY: the safety contract must be upheld by the caller. unsafe { self::convert::from_u32_unchecked(i) } @@ -399,6 +401,7 @@ macro_rules! casemappingiter_impls { self.0.advance_by(n) } + #[core::contracts::requires(idx < self.0.len())] unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> Self::Item { // SAFETY: just forwarding requirements to caller unsafe { self.0.__iterator_get_unchecked(idx) } @@ -533,6 +536,7 @@ impl Iterator for CaseMappingIter { self.0.advance_by(n) } + #[core::contracts::requires(idx < self.len())] unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> Self::Item { // SAFETY: just forwarding requirements to caller unsafe { self.0.__iterator_get_unchecked(idx) } diff --git a/library/core/src/ffi/c_str.rs b/library/core/src/ffi/c_str.rs index 09d9b160700ca..359cdb83e14cb 100644 --- a/library/core/src/ffi/c_str.rs +++ b/library/core/src/ffi/c_str.rs @@ -250,6 +250,13 @@ impl CStr { #[must_use] #[stable(feature = "rust1", since = "1.0.0")] #[rustc_const_stable(feature = "const_cstr_from_ptr", since = "1.81.0")] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::requires(!ptr.is_null())] + #[core::contracts::ensures( + |result: &&CStr| + !result.inner.is_empty() && + result.inner[result.inner.len() - 1] == 0 && + !result.inner[..result.inner.len() - 1].contains(&0))] pub const unsafe fn from_ptr<'a>(ptr: *const c_char) -> &'a CStr { // SAFETY: The caller has provided a pointer that points to a valid C // string with a NUL terminator less than `isize::MAX` from `ptr`. @@ -385,6 +392,14 @@ impl CStr { #[stable(feature = "cstr_from_bytes", since = "1.10.0")] #[rustc_const_stable(feature = "const_cstr_unchecked", since = "1.59.0")] #[rustc_allow_const_fn_unstable(const_eval_select)] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::requires( + !bytes.is_empty() && bytes[bytes.len() - 1] == 0 && !bytes[..bytes.len()-1].contains(&0))] + #[core::contracts::ensures( + |result: &&CStr| + !result.inner.is_empty() && + result.inner[result.inner.len() - 1] == 0 && + !result.inner[..result.inner.len() - 1].contains(&0))] pub const unsafe fn from_bytes_with_nul_unchecked(bytes: &[u8]) -> &CStr { const_eval_select!( @capture { bytes: &[u8] } -> &CStr: @@ -723,6 +738,12 @@ impl const AsRef for CStr { #[inline] #[unstable(feature = "cstr_internals", issue = "none")] #[rustc_allow_const_fn_unstable(const_eval_select)] +#[rustc_allow_const_fn_unstable(contracts)] +#[core::contracts::ensures( + move |&result| + result < isize::MAX as usize && + // SAFETY: result is within isize::MAX + unsafe { *ptr.add(result) } == 0)] const unsafe fn strlen(ptr: *const c_char) -> usize { const_eval_select!( @capture { s: *const c_char = ptr } -> usize: diff --git a/library/core/src/intrinsics/fallback.rs b/library/core/src/intrinsics/fallback.rs index 932537f2581f8..aa1e836f08bef 100644 --- a/library/core/src/intrinsics/fallback.rs +++ b/library/core/src/intrinsics/fallback.rs @@ -130,6 +130,7 @@ macro_rules! impl_disjoint_bitor { impl const DisjointBitOr for $t { #[cfg_attr(miri, track_caller)] #[inline] + #[core::contracts::requires((self & other) == zero!($t))] unsafe fn disjoint_bitor(self, other: Self) -> Self { // Note that the assume here is required for UB detection in Miri! diff --git a/library/core/src/intrinsics/mod.rs b/library/core/src/intrinsics/mod.rs index 4cee77fda4fba..d0748b05177da 100644 --- a/library/core/src/intrinsics/mod.rs +++ b/library/core/src/intrinsics/mod.rs @@ -2563,6 +2563,8 @@ pub const fn is_val_statically_known(_arg: T) -> bool { #[inline] #[rustc_intrinsic] #[rustc_intrinsic_const_stable_indirect] +#[rustc_allow_const_fn_unstable(contracts)] +#[core::contracts::requires(x.addr() != y.addr() || core::mem::size_of::() == 0)] pub const unsafe fn typed_swap_nonoverlapping(x: *mut T, y: *mut T) { // SAFETY: The caller provided single non-overlapping items behind // pointers, so swapping them with `count: 1` is fine. diff --git a/library/core/src/iter/adapters/cloned.rs b/library/core/src/iter/adapters/cloned.rs index aea6d64281aec..7129c192bc495 100644 --- a/library/core/src/iter/adapters/cloned.rs +++ b/library/core/src/iter/adapters/cloned.rs @@ -61,6 +61,7 @@ where self.it.map(T::clone).fold(init, f) } + #[core::contracts::requires(idx < self.it.size_hint().0)] unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> T where Self: TrustedRandomAccessNoCoerce, diff --git a/library/core/src/iter/adapters/copied.rs b/library/core/src/iter/adapters/copied.rs index 23e4e25ab5388..1307ce9892067 100644 --- a/library/core/src/iter/adapters/copied.rs +++ b/library/core/src/iter/adapters/copied.rs @@ -92,6 +92,7 @@ where self.it.advance_by(n) } + #[core::contracts::requires(idx < self.it.size_hint().0)] unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> T where Self: TrustedRandomAccessNoCoerce, diff --git a/library/core/src/iter/adapters/enumerate.rs b/library/core/src/iter/adapters/enumerate.rs index f7b9f0b7a5e9d..3fd8111343a7f 100644 --- a/library/core/src/iter/adapters/enumerate.rs +++ b/library/core/src/iter/adapters/enumerate.rs @@ -160,6 +160,7 @@ where #[rustc_inherit_overflow_checks] #[inline] + #[core::contracts::requires(idx < self.iter.size_hint().0)] unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> ::Item where Self: TrustedRandomAccessNoCoerce, diff --git a/library/core/src/iter/adapters/fuse.rs b/library/core/src/iter/adapters/fuse.rs index 0072a95e8dfe0..0efa551e5f762 100644 --- a/library/core/src/iter/adapters/fuse.rs +++ b/library/core/src/iter/adapters/fuse.rs @@ -109,6 +109,8 @@ where } #[inline] + #[core::contracts::requires( + self.iter.is_some() && idx < self.iter.as_ref().unwrap().size_hint().0)] unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> Self::Item where Self: TrustedRandomAccessNoCoerce, diff --git a/library/core/src/iter/adapters/map.rs b/library/core/src/iter/adapters/map.rs index 007c2d5acc2d0..7323b8defb96e 100644 --- a/library/core/src/iter/adapters/map.rs +++ b/library/core/src/iter/adapters/map.rs @@ -129,6 +129,7 @@ where } #[inline] + #[core::contracts::requires(idx < self.iter.size_hint().0)] unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> B where Self: TrustedRandomAccessNoCoerce, diff --git a/library/core/src/iter/adapters/skip.rs b/library/core/src/iter/adapters/skip.rs index 55c4a7f14fbd6..a35185fa3aeb3 100644 --- a/library/core/src/iter/adapters/skip.rs +++ b/library/core/src/iter/adapters/skip.rs @@ -158,6 +158,7 @@ where } #[doc(hidden)] + #[core::contracts::requires(idx + self.n < self.iter.size_hint().0)] unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> Self::Item where Self: TrustedRandomAccessNoCoerce, diff --git a/library/core/src/iter/adapters/zip.rs b/library/core/src/iter/adapters/zip.rs index c5e199c30821d..1c8f690b4da01 100644 --- a/library/core/src/iter/adapters/zip.rs +++ b/library/core/src/iter/adapters/zip.rs @@ -104,6 +104,7 @@ where } #[inline] + #[core::contracts::requires(idx < self.size_hint().0)] unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> Self::Item where Self: TrustedRandomAccessNoCoerce, diff --git a/library/core/src/iter/range.rs b/library/core/src/iter/range.rs index 9e43d5688cecc..9c173be389d1f 100644 --- a/library/core/src/iter/range.rs +++ b/library/core/src/iter/range.rs @@ -184,12 +184,14 @@ pub trait Step: Clone + PartialOrd + Sized { // than the signed::MAX value. Therefore `as` casting to the signed type would be incorrect. macro_rules! step_signed_methods { ($unsigned: ty) => { + #[core::contracts::requires(start.checked_add_unsigned(n as $unsigned).is_some())] #[inline] unsafe fn forward_unchecked(start: Self, n: usize) -> Self { // SAFETY: the caller has to guarantee that `start + n` doesn't overflow. unsafe { start.checked_add_unsigned(n as $unsigned).unwrap_unchecked() } } + #[core::contracts::requires(start.checked_sub_unsigned(n as $unsigned).is_some())] #[inline] unsafe fn backward_unchecked(start: Self, n: usize) -> Self { // SAFETY: the caller has to guarantee that `start - n` doesn't overflow. @@ -200,12 +202,14 @@ macro_rules! step_signed_methods { macro_rules! step_unsigned_methods { () => { + #[core::contracts::requires(start.checked_add(n as Self).is_some())] #[inline] unsafe fn forward_unchecked(start: Self, n: usize) -> Self { // SAFETY: the caller has to guarantee that `start + n` doesn't overflow. unsafe { start.unchecked_add(n as Self) } } + #[core::contracts::requires(start >= (n as Self))] #[inline] unsafe fn backward_unchecked(start: Self, n: usize) -> Self { // SAFETY: the caller has to guarantee that `start - n` doesn't overflow. @@ -495,6 +499,11 @@ impl Step for char { Some(unsafe { char::from_u32_unchecked(res) }) } + #[core::contracts::requires( + (start as u32).checked_add(count as u32).is_some_and(|dist| + (start as u32) >= 0xD800 || + dist < 0xD800 || + dist.checked_add(0x800).is_some()))] #[inline] unsafe fn forward_unchecked(start: char, count: usize) -> char { let start = start as u32; @@ -511,6 +520,11 @@ impl Step for char { unsafe { char::from_u32_unchecked(res) } } + #[core::contracts::requires( + (start as u32).checked_sub(count as u32).is_some_and(|dist| + (start as u32) < 0xE000 || + dist >= 0xE000 || + dist.checked_sub(0x800).is_some()))] #[inline] unsafe fn backward_unchecked(start: char, count: usize) -> char { let start = start as u32; @@ -549,6 +563,7 @@ impl Step for AsciiChar { Some(unsafe { AsciiChar::from_u8_unchecked(end) }) } + #[core::contracts::requires(count < 256 && start.to_u8().checked_add(count as u8).is_some())] #[inline] unsafe fn forward_unchecked(start: AsciiChar, count: usize) -> AsciiChar { // SAFETY: Caller asserts that result is a valid ASCII character, @@ -559,6 +574,7 @@ impl Step for AsciiChar { unsafe { AsciiChar::from_u8_unchecked(end) } } + #[core::contracts::requires(count < 256 && start.to_u8().checked_sub(count as u8).is_some())] #[inline] unsafe fn backward_unchecked(start: AsciiChar, count: usize) -> AsciiChar { // SAFETY: Caller asserts that result is a valid ASCII character, @@ -587,6 +603,7 @@ impl Step for Ipv4Addr { u32::backward_checked(start.to_bits(), count).map(Ipv4Addr::from_bits) } + #[core::contracts::requires(start.to_bits().checked_add(count as u32).is_some())] #[inline] unsafe fn forward_unchecked(start: Ipv4Addr, count: usize) -> Ipv4Addr { // SAFETY: Since u32 and Ipv4Addr are losslessly convertible, @@ -594,6 +611,7 @@ impl Step for Ipv4Addr { Ipv4Addr::from_bits(unsafe { u32::forward_unchecked(start.to_bits(), count) }) } + #[core::contracts::requires(start.to_bits().checked_sub(count as u32).is_some())] #[inline] unsafe fn backward_unchecked(start: Ipv4Addr, count: usize) -> Ipv4Addr { // SAFETY: Since u32 and Ipv4Addr are losslessly convertible, @@ -619,6 +637,7 @@ impl Step for Ipv6Addr { u128::backward_checked(start.to_bits(), count).map(Ipv6Addr::from_bits) } + #[core::contracts::requires(start.to_bits().checked_add(count as u128).is_some())] #[inline] unsafe fn forward_unchecked(start: Ipv6Addr, count: usize) -> Ipv6Addr { // SAFETY: Since u128 and Ipv6Addr are losslessly convertible, @@ -626,6 +645,7 @@ impl Step for Ipv6Addr { Ipv6Addr::from_bits(unsafe { u128::forward_unchecked(start.to_bits(), count) }) } + #[core::contracts::requires(start.to_bits().checked_sub(count as u128).is_some())] #[inline] unsafe fn backward_unchecked(start: Ipv6Addr, count: usize) -> Ipv6Addr { // SAFETY: Since u128 and Ipv6Addr are losslessly convertible, @@ -905,6 +925,7 @@ impl Iterator for ops::Range { } #[inline] + #[core::contracts::requires(idx < self.size_hint().0)] unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> Self::Item where Self: TrustedRandomAccessNoCoerce, diff --git a/library/core/src/iter/traits/iterator.rs b/library/core/src/iter/traits/iterator.rs index 695f8d1e195e9..4e709210ab98f 100644 --- a/library/core/src/iter/traits/iterator.rs +++ b/library/core/src/iter/traits/iterator.rs @@ -1710,6 +1710,7 @@ pub trait Iterator { /// ``` #[inline] #[unstable(feature = "iter_map_windows", reason = "recently added", issue = "87155")] + #[core::contracts::requires(N > 0)] fn map_windows(self, f: F) -> MapWindows where Self: Sized, diff --git a/library/core/src/lib.rs b/library/core/src/lib.rs index e213e1d91a75d..260b846872842 100644 --- a/library/core/src/lib.rs +++ b/library/core/src/lib.rs @@ -107,6 +107,7 @@ #![feature(const_destruct)] #![feature(const_eval_select)] #![feature(const_select_unpredictable)] +#![feature(contracts)] #![feature(core_intrinsics)] #![feature(coverage_attribute)] #![feature(disjoint_bitor)] diff --git a/library/core/src/num/f128.rs b/library/core/src/num/f128.rs index e7101537b298f..b69579ad1b30e 100644 --- a/library/core/src/num/f128.rs +++ b/library/core/src/num/f128.rs @@ -875,6 +875,7 @@ impl f128 { #[inline] #[unstable(feature = "f128", issue = "116909")] #[must_use = "this returns the result of the operation, without modifying the original"] + #[core::contracts::requires(self.is_finite())] pub unsafe fn to_int_unchecked(self) -> Int where Self: FloatToInt, diff --git a/library/core/src/num/f16.rs b/library/core/src/num/f16.rs index aa8342a22ad58..37171da33795b 100644 --- a/library/core/src/num/f16.rs +++ b/library/core/src/num/f16.rs @@ -862,6 +862,7 @@ impl f16 { #[inline] #[unstable(feature = "f16", issue = "116909")] #[must_use = "this returns the result of the operation, without modifying the original"] + #[core::contracts::requires(self.is_finite())] pub unsafe fn to_int_unchecked(self) -> Int where Self: FloatToInt, diff --git a/library/core/src/num/f32.rs b/library/core/src/num/f32.rs index 3070e1dedbe43..81d7cbe5c54a3 100644 --- a/library/core/src/num/f32.rs +++ b/library/core/src/num/f32.rs @@ -1066,6 +1066,7 @@ impl f32 { without modifying the original"] #[stable(feature = "float_approx_unchecked_to", since = "1.44.0")] #[inline] + #[core::contracts::requires(self.is_finite())] pub unsafe fn to_int_unchecked(self) -> Int where Self: FloatToInt, diff --git a/library/core/src/num/f64.rs b/library/core/src/num/f64.rs index dc8ccc551b2da..ec49d6f9311c5 100644 --- a/library/core/src/num/f64.rs +++ b/library/core/src/num/f64.rs @@ -1065,6 +1065,7 @@ impl f64 { without modifying the original"] #[stable(feature = "float_approx_unchecked_to", since = "1.44.0")] #[inline] + #[core::contracts::requires(self.is_finite())] pub unsafe fn to_int_unchecked(self) -> Int where Self: FloatToInt, diff --git a/library/core/src/num/int_macros.rs b/library/core/src/num/int_macros.rs index c3460a6409069..24019eef2dd14 100644 --- a/library/core/src/num/int_macros.rs +++ b/library/core/src/num/int_macros.rs @@ -554,6 +554,8 @@ macro_rules! int_impl { without modifying the original"] #[inline(always)] #[track_caller] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::requires(!self.overflowing_add(rhs).1)] pub const unsafe fn unchecked_add(self, rhs: Self) -> Self { assert_unsafe_precondition!( check_language_ub, @@ -694,6 +696,8 @@ macro_rules! int_impl { without modifying the original"] #[inline(always)] #[track_caller] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::requires(!self.overflowing_sub(rhs).1)] pub const unsafe fn unchecked_sub(self, rhs: Self) -> Self { assert_unsafe_precondition!( check_language_ub, @@ -834,6 +838,8 @@ macro_rules! int_impl { without modifying the original"] #[inline(always)] #[track_caller] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::requires(!self.overflowing_mul(rhs).1)] pub const unsafe fn unchecked_mul(self, rhs: Self) -> Self { assert_unsafe_precondition!( check_language_ub, @@ -1252,6 +1258,9 @@ macro_rules! int_impl { without modifying the original"] #[inline(always)] #[track_caller] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::requires(self != <$SelfT>::MIN)] + #[core::contracts::ensures(move |result| *result == -self)] pub const unsafe fn unchecked_neg(self) -> Self { assert_unsafe_precondition!( check_language_ub, @@ -1372,6 +1381,8 @@ macro_rules! int_impl { without modifying the original"] #[inline(always)] #[track_caller] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::requires(rhs < <$ActualT>::BITS)] pub const unsafe fn unchecked_shl(self, rhs: u32) -> Self { assert_unsafe_precondition!( check_language_ub, @@ -1547,6 +1558,8 @@ macro_rules! int_impl { without modifying the original"] #[inline(always)] #[track_caller] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::requires(rhs < <$ActualT>::BITS)] pub const unsafe fn unchecked_shr(self, rhs: u32) -> Self { assert_unsafe_precondition!( check_language_ub, @@ -2278,6 +2291,8 @@ macro_rules! int_impl { #[must_use = "this returns the result of the operation, \ without modifying the original"] #[inline(always)] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::ensures(move |result| *result == self << (rhs & (Self::BITS - 1)))] pub const fn wrapping_shl(self, rhs: u32) -> Self { // SAFETY: the masking by the bitsize of the type ensures that we do not shift // out of bounds @@ -2305,6 +2320,8 @@ macro_rules! int_impl { #[must_use = "this returns the result of the operation, \ without modifying the original"] #[inline(always)] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::ensures(move |result| *result == self >> (rhs & (Self::BITS - 1)))] pub const fn wrapping_shr(self, rhs: u32) -> Self { // SAFETY: the masking by the bitsize of the type ensures that we do not shift // out of bounds diff --git a/library/core/src/num/mod.rs b/library/core/src/num/mod.rs index 35141dfeb3a6d..5b6ecc643c584 100644 --- a/library/core/src/num/mod.rs +++ b/library/core/src/num/mod.rs @@ -505,6 +505,8 @@ impl u8 { #[must_use] #[unstable(feature = "ascii_char", issue = "110998")] #[inline] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::requires(self.is_ascii())] pub const unsafe fn as_ascii_unchecked(&self) -> ascii::Char { assert_unsafe_precondition!( check_library_ub, diff --git a/library/core/src/num/niche_types.rs b/library/core/src/num/niche_types.rs index 9ac0eb72bdcbc..d15f1dbdfb3e8 100644 --- a/library/core/src/num/niche_types.rs +++ b/library/core/src/num/niche_types.rs @@ -33,6 +33,12 @@ macro_rules! define_valid_range_type { impl $name { #[inline] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::ensures( + |result: &Option<$name>| + result.is_none() || ( + (result.unwrap().as_inner() as $uint) >= ($low as $uint) && + (result.unwrap().as_inner() as $uint) <= ($high as $uint)))] pub const fn new(val: $int) -> Option { if (val as $uint) >= ($low as $uint) && (val as $uint) <= ($high as $uint) { // SAFETY: just checked the inclusive range @@ -49,12 +55,19 @@ macro_rules! define_valid_range_type { /// Immediate language UB if `val` is not within the valid range for this /// type, as it violates the validity invariant. #[inline] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::requires( + (val as $uint) >= ($low as $uint) && (val as $uint) <= ($high as $uint))] pub const unsafe fn new_unchecked(val: $int) -> Self { // SAFETY: Caller promised that `val` is within the valid range. unsafe { $name(val) } } #[inline] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::ensures( + |result| + (*result as $uint) >= ($low as $uint) && (*result as $uint) <= ($high as $uint))] pub const fn as_inner(self) -> $int { // SAFETY: This is a transparent wrapper, so unwrapping it is sound // (Not using `.0` due to MCP#807.) diff --git a/library/core/src/num/nonzero.rs b/library/core/src/num/nonzero.rs index efb0665b7f461..22234d472c248 100644 --- a/library/core/src/num/nonzero.rs +++ b/library/core/src/num/nonzero.rs @@ -411,6 +411,13 @@ where #[must_use] #[inline] #[track_caller] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::requires({ + let size = core::mem::size_of::(); + let ptr = &n as *const T as *const u8; + // SAFETY: to be confirmed + let slice = unsafe { core::slice::from_raw_parts(ptr, size) }; + !slice.iter().all(|&byte| byte == 0) })] pub const unsafe fn new_unchecked(n: T) -> Self { match Self::new(n) { Some(n) => n, @@ -452,6 +459,12 @@ where #[must_use] #[inline] #[track_caller] + #[core::contracts::requires({ + let size = core::mem::size_of::(); + let ptr = n as *const T as *const u8; + // SAFETY: to be confirmed + let slice = unsafe { core::slice::from_raw_parts(ptr, size) }; + !slice.iter().all(|&byte| byte == 0) })] pub unsafe fn from_mut_unchecked(n: &mut T) -> &mut Self { match Self::from_mut(n) { Some(n) => n, @@ -771,6 +784,8 @@ macro_rules! nonzero_integer { #[must_use = "this returns the result of the operation, \ without modifying the original"] #[inline(always)] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::ensures(|result: &NonZero| result.get() > 0)] pub const fn count_ones(self) -> NonZero { // SAFETY: // `self` is non-zero, which means it has at least one bit set, which means @@ -802,6 +817,10 @@ macro_rules! nonzero_integer { #[must_use = "this returns the result of the operation, \ without modifying the original"] #[inline(always)] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::requires(let old : NonZero<$Int> = self; true)] + #[core::contracts::ensures( + move |result: &NonZero<$Int>| result.rotate_right(n).get() == old.get())] pub const fn rotate_left(self, n: u32) -> Self { let result = self.get().rotate_left(n); // SAFETY: Rotating bits preserves the property int > 0. @@ -833,6 +852,10 @@ macro_rules! nonzero_integer { #[must_use = "this returns the result of the operation, \ without modifying the original"] #[inline(always)] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::requires(let old : NonZero<$Int> = self; true)] + #[core::contracts::ensures( + move |result: &NonZero<$Int>| result.rotate_left(n).get() == old.get())] pub const fn rotate_right(self, n: u32) -> Self { let result = self.get().rotate_right(n); // SAFETY: Rotating bits preserves the property int > 0. @@ -1144,6 +1167,11 @@ macro_rules! nonzero_integer { #[must_use = "this returns the result of the operation, \ without modifying the original"] #[inline] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::requires(self.get().checked_mul(other.get()).is_some())] + #[core::contracts::ensures( + move |result: &Self| + self.get().checked_mul(other.get()).is_some_and(|product| product == result.get()))] pub const unsafe fn unchecked_mul(self, other: Self) -> Self { // SAFETY: The caller ensures there is no overflow. unsafe { Self::new_unchecked(self.get().unchecked_mul(other.get())) } @@ -1552,6 +1580,11 @@ macro_rules! nonzero_integer_signedness_dependent_methods { #[must_use = "this returns the result of the operation, \ without modifying the original"] #[inline] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::requires(self.get().checked_add(other).is_some())] + #[core::contracts::ensures( + move |result: &Self| + self.get().checked_add(other).is_some_and(|sum| sum == result.get()))] pub const unsafe fn unchecked_add(self, other: $Int) -> Self { // SAFETY: The caller ensures there is no overflow. unsafe { Self::new_unchecked(self.get().unchecked_add(other)) } diff --git a/library/core/src/num/uint_macros.rs b/library/core/src/num/uint_macros.rs index b5b768cf677aa..c35d0d4e3f5e9 100644 --- a/library/core/src/num/uint_macros.rs +++ b/library/core/src/num/uint_macros.rs @@ -702,6 +702,8 @@ macro_rules! uint_impl { without modifying the original"] #[inline(always)] #[track_caller] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::requires(!self.overflowing_add(rhs).1)] pub const unsafe fn unchecked_add(self, rhs: Self) -> Self { assert_unsafe_precondition!( check_language_ub, @@ -881,6 +883,8 @@ macro_rules! uint_impl { without modifying the original"] #[inline(always)] #[track_caller] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::requires(!self.overflowing_sub(rhs).1)] pub const unsafe fn unchecked_sub(self, rhs: Self) -> Self { assert_unsafe_precondition!( check_language_ub, @@ -1090,6 +1094,8 @@ macro_rules! uint_impl { without modifying the original"] #[inline(always)] #[track_caller] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::requires(!self.overflowing_mul(rhs).1)] pub const unsafe fn unchecked_mul(self, rhs: Self) -> Self { assert_unsafe_precondition!( check_language_ub, @@ -1449,6 +1455,7 @@ macro_rules! uint_impl { #[unstable(feature = "disjoint_bitor", issue = "135758")] #[rustc_const_unstable(feature = "disjoint_bitor", issue = "135758")] #[inline] + #[core::contracts::requires((self & other) == 0)] pub const unsafe fn unchecked_disjoint_bitor(self, other: Self) -> Self { assert_unsafe_precondition!( check_language_ub, @@ -1780,6 +1787,8 @@ macro_rules! uint_impl { without modifying the original"] #[inline(always)] #[track_caller] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::requires(rhs < <$ActualT>::BITS)] pub const unsafe fn unchecked_shl(self, rhs: u32) -> Self { assert_unsafe_precondition!( check_language_ub, @@ -1952,6 +1961,8 @@ macro_rules! uint_impl { without modifying the original"] #[inline(always)] #[track_caller] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::requires(rhs < <$ActualT>::BITS)] pub const unsafe fn unchecked_shr(self, rhs: u32) -> Self { assert_unsafe_precondition!( check_language_ub, @@ -2534,6 +2545,9 @@ macro_rules! uint_impl { #[must_use = "this returns the result of the operation, \ without modifying the original"] #[inline(always)] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::ensures( + move |result: &Self| *result == self << (rhs & (Self::BITS - 1)))] pub const fn wrapping_shl(self, rhs: u32) -> Self { // SAFETY: the masking by the bitsize of the type ensures that we do not shift // out of bounds @@ -2564,6 +2578,8 @@ macro_rules! uint_impl { #[must_use = "this returns the result of the operation, \ without modifying the original"] #[inline(always)] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::ensures(move |result| *result == self >> (rhs & (Self::BITS - 1)))] pub const fn wrapping_shr(self, rhs: u32) -> Self { // SAFETY: the masking by the bitsize of the type ensures that we do not shift // out of bounds diff --git a/library/core/src/ops/index_range.rs b/library/core/src/ops/index_range.rs index 507fa9460bea6..0bd71171a01ab 100644 --- a/library/core/src/ops/index_range.rs +++ b/library/core/src/ops/index_range.rs @@ -20,6 +20,8 @@ impl IndexRange { /// - `start <= end` #[inline] #[track_caller] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::requires(start <= end)] pub(crate) const unsafe fn new_unchecked(start: usize, end: usize) -> Self { ub_checks::assert_unsafe_precondition!( check_library_ub, @@ -54,6 +56,7 @@ impl IndexRange { /// # Safety /// - Can only be called when `start < end`, aka when `len > 0`. #[inline] + #[core::contracts::requires(self.start < self.end)] unsafe fn next_unchecked(&mut self) -> usize { debug_assert!(self.start < self.end); @@ -66,6 +69,7 @@ impl IndexRange { /// # Safety /// - Can only be called when `start < end`, aka when `len > 0`. #[inline] + #[core::contracts::requires(self.start < self.end)] unsafe fn next_back_unchecked(&mut self) -> usize { debug_assert!(self.start < self.end); diff --git a/library/core/src/ptr/const_ptr.rs b/library/core/src/ptr/const_ptr.rs index 451092709443b..4d96122d1d606 100644 --- a/library/core/src/ptr/const_ptr.rs +++ b/library/core/src/ptr/const_ptr.rs @@ -346,6 +346,12 @@ impl *const T { #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")] #[inline(always)] #[track_caller] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::requires( + // Precondition 1: the computed offset `count * size_of::()` does not overflow `isize`. + // Precondition 2: adding the computed offset to `self` does not cause overflow. + count.checked_mul(core::mem::size_of::() as isize).is_some_and( + |computed_offset| (self as isize).checked_add(computed_offset).is_some()))] pub const unsafe fn offset(self, count: isize) -> *const T where T: Sized, @@ -609,6 +615,16 @@ impl *const T { #[rustc_const_stable(feature = "const_ptr_offset_from", since = "1.65.0")] #[inline] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::requires( + // Ensures subtracting `origin` from `self` doesn't overflow + (self as isize).checked_sub(origin as isize).is_some() && + // Ensure the distance between `self` and `origin` is aligned to `T` + (self as isize - origin as isize) % (mem::size_of::() as isize) == 0)] + // FIXME: requires `T` to be `'static` + // #[core::contracts::ensures( + // move |result| + // *result == (self as isize - origin as isize) / (mem::size_of::() as isize))] pub const unsafe fn offset_from(self, origin: *const T) -> isize where T: Sized, @@ -826,6 +842,14 @@ impl *const T { #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")] #[inline(always)] #[track_caller] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::requires( + // Precondition 1: the computed offset `count * size_of::()` does not overflow `isize`. + // Precondition 2: adding the computed offset to `self` does not cause overflow. + count.checked_mul(core::mem::size_of::()).is_some_and( + |computed_offset| + computed_offset <= isize::MAX as usize && + (self as isize).checked_add(computed_offset as isize).is_some()))] pub const unsafe fn add(self, count: usize) -> Self where T: Sized, @@ -932,6 +956,14 @@ impl *const T { #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")] #[inline(always)] #[track_caller] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::requires( + // Precondition 1: the computed offset `count * size_of::()` does not overflow `isize`. + // Precondition 2: subtracting the computed offset from `self` does not cause overflow. + count.checked_mul(core::mem::size_of::()).is_some_and( + |computed_offset| + computed_offset <= isize::MAX as usize && + (self as isize).checked_sub(computed_offset as isize).is_some()))] pub const unsafe fn sub(self, count: usize) -> Self where T: Sized, diff --git a/library/core/src/ptr/mod.rs b/library/core/src/ptr/mod.rs index fd067d19fcd98..d045666990ae2 100644 --- a/library/core/src/ptr/mod.rs +++ b/library/core/src/ptr/mod.rs @@ -2224,6 +2224,41 @@ pub unsafe fn write_volatile(dst: *mut T, src: T) { /// /// Any questions go to @nagisa. #[allow(ptr_to_integer_transmute_in_consts)] +#[core::contracts::requires(a.is_power_of_two())] +// FIXME: requires `T` to be `'static` +// #[core::contracts::ensures(move |result| { +// let stride = mem::size_of::(); +// // ZSTs +// if stride == 0 { +// if p.addr() % a == 0 { +// return *result == 0; +// } else { +// return *result == usize::MAX; +// } +// } +// +// // In this case, the pointer cannot be aligned +// if (a % stride == 0) && (p.addr() % stride != 0) { +// return *result == usize::MAX; +// } +// +// // Checking if the answer should indeed be usize::MAX when a % stride != 0 +// // requires computing gcd(a, stride), which could be done using cttz as the implementation +// // does. +// if a % stride != 0 && *result == usize::MAX { +// return true; +// } +// +// // If we reach this case, either: +// // - a % stride == 0 and p.addr() % stride == 0, so it is definitely possible to align the +// // pointer +// // - a % stride != 0 and result != usize::MAX, so align_offset is claiming that it's possible +// // to align the pointer +// // Check that applying the returned result does indeed produce an aligned address +// let product = usize::wrapping_mul(*result, stride); +// let new_addr = usize::wrapping_add(product, p.addr()); +// *result != usize::MAX && new_addr % a == 0 +// })] pub(crate) unsafe fn align_offset(p: *const T, a: usize) -> usize { // FIXME(#75598): Direct use of these intrinsics improves codegen significantly at opt-level <= // 1, where the method versions of these operations are not inlined. @@ -2241,6 +2276,9 @@ pub(crate) unsafe fn align_offset(p: *const T, a: usize) -> usize { /// /// Implementation of this function shall not panic. Ever. #[inline] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::requires(m.is_power_of_two() && x < m && x % 2 != 0)] + #[core::contracts::ensures(move |result| wrapping_mul(*result, x) % m == 1)] const unsafe fn mod_inv(x: usize, m: usize) -> usize { /// Multiplicative modular inverse table modulo 2⁴ = 16. /// diff --git a/library/core/src/ptr/mut_ptr.rs b/library/core/src/ptr/mut_ptr.rs index 24ee92bdd6e1b..361a5e3f5980f 100644 --- a/library/core/src/ptr/mut_ptr.rs +++ b/library/core/src/ptr/mut_ptr.rs @@ -349,6 +349,14 @@ impl *mut T { #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")] #[inline(always)] #[track_caller] + // Note: It is the caller's responsibility to ensure that `self` is non-null and properly aligned. + // These conditions are not verified as part of the preconditions. + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::requires( + // Precondition 1: the computed offset `count * size_of::()` does not overflow `isize`. + // Precondition 2: adding the computed offset to `self` does not cause overflow. + count.checked_mul(core::mem::size_of::() as isize).is_some_and( + |computed_offset| (self as isize).checked_add(computed_offset).is_some()))] pub const unsafe fn offset(self, count: isize) -> *mut T where T: Sized, @@ -790,6 +798,17 @@ impl *mut T { #[rustc_const_stable(feature = "const_ptr_offset_from", since = "1.65.0")] #[inline(always)] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::requires( + // Ensuring that subtracting 'origin' from 'self' doesn't result in an overflow + (self as isize).checked_sub(origin as isize).is_some() && + // Ensuring that the distance between 'self' and 'origin' is aligned to `T` + (self as isize - origin as isize) % (mem::size_of::() as isize) == 0)] + // FIXME: requires `T` to be `'static` + // #[core::contracts::ensures( + // move |result| + // core::mem::size_of::() == 0 || + // (*result == (self as isize - origin as isize) / (mem::size_of::() as isize)))] pub const unsafe fn offset_from(self, origin: *const T) -> isize where T: Sized, @@ -924,6 +943,16 @@ impl *mut T { #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")] #[inline(always)] #[track_caller] + // Note: It is the caller's responsibility to ensure that `self` is non-null and properly + // aligned. These conditions are not verified as part of the preconditions. + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::requires( + // Precondition 1: the computed offset `count * size_of::()` does not overflow `isize`. + // Precondition 2: adding the computed offset to `self` does not cause overflow. + count.checked_mul(core::mem::size_of::()).is_some_and( + |computed_offset| + computed_offset <= isize::MAX as usize && + (self as isize).checked_add(computed_offset as isize).is_some()))] pub const unsafe fn add(self, count: usize) -> Self where T: Sized, @@ -1030,6 +1059,16 @@ impl *mut T { #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")] #[inline(always)] #[track_caller] + // Note: It is the caller's responsibility to ensure that `self` is non-null and properly + // aligned. These conditions are not verified as part of the preconditions. + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::requires( + // Precondition 1: the computed offset `count * size_of::()` does not overflow `isize`. + // Precondition 2: subtracting the computed offset from `self` does not cause overflow. + count.checked_mul(core::mem::size_of::()).is_some_and( + |computed_offset| + computed_offset <= isize::MAX as usize && + (self as isize).checked_sub(computed_offset as isize).is_some()))] pub const unsafe fn sub(self, count: usize) -> Self where T: Sized, diff --git a/library/core/src/ptr/non_null.rs b/library/core/src/ptr/non_null.rs index a762e969b52dc..115a1c85d69b2 100644 --- a/library/core/src/ptr/non_null.rs +++ b/library/core/src/ptr/non_null.rs @@ -127,6 +127,9 @@ impl NonNull { #[rustc_const_stable(feature = "const_nonnull_dangling", since = "1.36.0")] #[must_use] #[inline] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::ensures( + |result: &NonNull| !result.pointer.is_null() && result.pointer.is_aligned())] pub const fn dangling() -> Self { let align = crate::ptr::Alignment::of::(); NonNull::without_provenance(align.as_nonzero()) @@ -165,6 +168,10 @@ impl NonNull { #[inline] #[must_use] #[unstable(feature = "ptr_as_uninit", issue = "75402")] + // FIXME: requires `T` to be `'static` + // #[rustc_allow_const_fn_unstable(contracts)] + // #[core::contracts::ensures( + // move |result: &&'a MaybeUninit| core::ptr::eq(*result, self.cast().as_ptr()))] pub const unsafe fn as_uninit_ref<'a>(self) -> &'a MaybeUninit { // SAFETY: the caller must guarantee that `self` meets all the // requirements for a reference. @@ -188,6 +195,10 @@ impl NonNull { #[inline] #[must_use] #[unstable(feature = "ptr_as_uninit", issue = "75402")] + // FIXME: requires `T` to be `'static` + // #[rustc_allow_const_fn_unstable(contracts)] + // #[core::contracts::ensures( + // move |result: &&mut MaybeUninit| core::ptr::eq(*result, self.cast().as_ptr()))] pub const unsafe fn as_uninit_mut<'a>(self) -> &'a mut MaybeUninit { // SAFETY: the caller must guarantee that `self` meets all the // requirements for a reference. @@ -230,6 +241,10 @@ impl NonNull { #[rustc_const_stable(feature = "const_nonnull_new_unchecked", since = "1.25.0")] #[inline] #[track_caller] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::requires(!ptr.is_null())] + // FIXME: requires `T` to be `'static` + // #[core::contracts::ensures(move |result: &Self| result.as_ptr() == ptr)] pub const unsafe fn new_unchecked(ptr: *mut T) -> Self { // SAFETY: the caller must guarantee that `ptr` is non-null. unsafe { @@ -266,6 +281,12 @@ impl NonNull { #[stable(feature = "nonnull", since = "1.25.0")] #[rustc_const_stable(feature = "const_nonnull_new", since = "1.85.0")] #[inline] + // FIXME: requires `T` to be `'static` + // #[rustc_allow_const_fn_unstable(contracts)] + // #[core::contracts::ensures( + // move |result: &Option| + // result.is_some() == !ptr.is_null() && + // (result.is_none() || result.expect("ptr is null!").as_ptr() == ptr))] pub const fn new(ptr: *mut T) -> Option { if !ptr.is_null() { // SAFETY: The pointer is already checked and is not null @@ -301,6 +322,8 @@ impl NonNull { /// [`std::ptr::from_raw_parts`]: crate::ptr::from_raw_parts #[unstable(feature = "ptr_metadata", issue = "81513")] #[inline] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::ensures(|result: &NonNull| !result.pointer.is_null())] pub const fn from_raw_parts( data_pointer: NonNull, metadata: ::Metadata, @@ -318,6 +341,12 @@ impl NonNull { #[must_use = "this returns the result of the operation, \ without modifying the original"] #[inline] + // FIXME: requires `T` to be `'static` + // #[rustc_allow_const_fn_unstable(contracts)] + // #[core::contracts::ensures( + // move |(data_ptr, _): &(NonNull<()>, ::Metadata)| + // !data_ptr.as_ptr().is_null() && + // self.as_ptr() as *const () == data_ptr.as_ptr() as *const ())] pub const fn to_raw_parts(self) -> (NonNull<()>, ::Metadata) { (self.cast(), super::metadata(self.as_ptr())) } @@ -330,6 +359,9 @@ impl NonNull { #[must_use] #[inline] #[stable(feature = "strict_provenance", since = "1.84.0")] + // FIXME: requires `T` to be `'static` + // #[core::contracts::ensures( + // move |result: &NonZero| result.get() == self.as_ptr() as *const() as usize)] pub fn addr(self) -> NonZero { // SAFETY: The pointer is guaranteed by the type to be non-null, // meaning that the address will be non-zero. @@ -358,6 +390,7 @@ impl NonNull { #[must_use] #[inline] #[stable(feature = "strict_provenance", since = "1.84.0")] + #[core::contracts::ensures(move |result: &Self| result.addr() == addr)] pub fn with_addr(self, addr: NonZero) -> Self { // SAFETY: The result of `ptr::from::with_addr` is non-null because `addr` is guaranteed to be non-zero. unsafe { NonNull::new_unchecked(self.as_ptr().with_addr(addr.get()) as *mut _) } @@ -398,6 +431,9 @@ impl NonNull { #[rustc_never_returns_null_ptr] #[must_use] #[inline(always)] + // FIXME: requires `T` to be `'static` + // #[rustc_allow_const_fn_unstable(contracts)] + // #[core::contracts::ensures(move |result: &*mut T| *result == self.pointer as *mut T)] pub const fn as_ptr(self) -> *mut T { // This is a transmute for the same reasons as `NonZero::get`. @@ -437,6 +473,9 @@ impl NonNull { #[rustc_const_stable(feature = "const_nonnull_as_ref", since = "1.73.0")] #[must_use] #[inline(always)] + // FIXME: requires `T` to be `'static` + // #[rustc_allow_const_fn_unstable(contracts)] + // #[core::contracts::ensures(move |result: &&T| core::ptr::eq(*result, self.as_ptr()))] pub const unsafe fn as_ref<'a>(&self) -> &'a T { // SAFETY: the caller must guarantee that `self` meets all the // requirements for a reference. @@ -475,6 +514,9 @@ impl NonNull { #[rustc_const_stable(feature = "const_ptr_as_ref", since = "1.83.0")] #[must_use] #[inline(always)] + // FIXME: requires `T` to be `'static` + // #[rustc_allow_const_fn_unstable(contracts)] + // #[core::contracts::ensures(|result: &&'a mut T| core::ptr::eq(*result, self.as_ptr()))] pub const unsafe fn as_mut<'a>(&mut self) -> &'a mut T { // SAFETY: the caller must guarantee that `self` meets all the // requirements for a mutable reference. @@ -499,6 +541,10 @@ impl NonNull { #[must_use = "this returns the result of the operation, \ without modifying the original"] #[inline] + // FIXME: requires `T` to be `'static` + // #[rustc_allow_const_fn_unstable(contracts)] + // #[core::contracts::ensures( + // move |result: &NonNull| result.as_ptr().addr() == self.as_ptr().addr())] pub const fn cast(self) -> NonNull { // SAFETY: `self` is a `NonNull` pointer which is necessarily non-null unsafe { NonNull { pointer: self.as_ptr() as *mut U } } @@ -572,6 +618,14 @@ impl NonNull { #[must_use = "returns a new pointer rather than modifying its argument"] #[stable(feature = "non_null_convenience", since = "1.80.0")] #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::requires( + count.checked_mul(core::mem::size_of::() as isize).is_some() && + (self.as_ptr() as isize).checked_add( + count.wrapping_mul(core::mem::size_of::() as isize)).is_some())] + // FIXME: requires `T` to be `'static` + // #[core::contracts::ensures( + // move |result: &Self| result.as_ptr() == self.as_ptr().wrapping_offset(count))] pub const unsafe fn offset(self, count: isize) -> Self where T: Sized, @@ -598,6 +652,11 @@ impl NonNull { #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces #[stable(feature = "non_null_convenience", since = "1.80.0")] #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::requires((self.as_ptr().addr() as isize).checked_add(count).is_some())] + // FIXME: requires `T` to be `'static` + // #[core::contracts::ensures( + // move |result: &Self| result.as_ptr() == self.as_ptr().wrapping_byte_offset(count))] pub const unsafe fn byte_offset(self, count: isize) -> Self { // SAFETY: the caller must uphold the safety contract for `offset` and `byte_offset` has // the same safety contract. @@ -648,6 +707,15 @@ impl NonNull { #[must_use = "returns a new pointer rather than modifying its argument"] #[stable(feature = "non_null_convenience", since = "1.80.0")] #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::requires( + count.checked_mul(core::mem::size_of::()).is_some() && + count * core::mem::size_of::() <= isize::MAX as usize && + (self.pointer as isize).checked_add( + count as isize * core::mem::size_of::() as isize).is_some())] + // FIXME: requires `T` to be `'static` + // #[core::contracts::ensures( + // move |result: &Self| result.as_ptr() == unsafe { self.as_ptr().offset(count as isize) })] pub const unsafe fn add(self, count: usize) -> Self where T: Sized, @@ -725,6 +793,14 @@ impl NonNull { #[must_use = "returns a new pointer rather than modifying its argument"] #[stable(feature = "non_null_convenience", since = "1.80.0")] #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::requires( + count.checked_mul(core::mem::size_of::()).is_some() && + count * core::mem::size_of::() <= isize::MAX as usize)] + // FIXME: requires `T` to be `'static` + // #[core::contracts::ensures( + // move |result: &NonNull| + // result.as_ptr() == unsafe { self.as_ptr().offset(-(count as isize)) })] pub const unsafe fn sub(self, count: usize) -> Self where T: Sized, @@ -854,6 +930,17 @@ impl NonNull { #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces #[stable(feature = "non_null_convenience", since = "1.80.0")] #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::requires( + self.as_ptr().addr().checked_sub(origin.as_ptr().addr()).is_some_and( + |distance| distance % core::mem::size_of::() == 0) || + origin.as_ptr().addr().checked_sub(self.as_ptr().addr()).is_some_and( + |distance| distance % core::mem::size_of::() == 0))] + // FIXME: requires `T` to be `'static` + // #[core::contracts::ensures( + // move |result: &isize| + // *result == (self.as_ptr() as isize - origin.as_ptr() as isize) / + // core::mem::size_of::() as isize)] pub const unsafe fn offset_from(self, origin: NonNull) -> isize where T: Sized, @@ -875,6 +962,12 @@ impl NonNull { #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces #[stable(feature = "non_null_convenience", since = "1.80.0")] #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")] + // FIXME: requires `T` to be `'static` + // #[rustc_allow_const_fn_unstable(contracts)] + // #[core::contracts::ensures( + // move |result: &isize| + // *result == + // unsafe { (self.as_ptr() as *const u8).offset_from(origin.as_ptr() as *const u8) })] pub const unsafe fn byte_offset_from(self, origin: NonNull) -> isize { // SAFETY: the caller must uphold the safety contract for `byte_offset_from`. unsafe { self.as_ptr().byte_offset_from(origin.as_ptr()) } @@ -945,6 +1038,15 @@ impl NonNull { #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces #[stable(feature = "ptr_sub_ptr", since = "1.87.0")] #[rustc_const_stable(feature = "const_ptr_sub_ptr", since = "1.87.0")] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::requires( + self.as_ptr().addr().checked_sub(subtracted.as_ptr().addr()).is_some() && + (self.as_ptr().addr()) >= (subtracted.as_ptr().addr()) && + (self.as_ptr().addr() - subtracted.as_ptr().addr()) % core::mem::size_of::() == 0)] + // FIXME: requires `T` to be `'static` + // #[core::contracts::ensures( + // move |result: &usize| + // *result == unsafe { self.as_ptr().offset_from(subtracted.as_ptr()) } as usize)] pub const unsafe fn offset_from_unsigned(self, subtracted: NonNull) -> usize where T: Sized, @@ -1043,6 +1145,10 @@ impl NonNull { #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces #[stable(feature = "non_null_convenience", since = "1.80.0")] #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::requires( + count.checked_mul(core::mem::size_of::()).map_or_else( + || false, |size| size <= isize::MAX as usize))] pub const unsafe fn copy_to(self, dest: NonNull, count: usize) where T: Sized, @@ -1063,6 +1169,10 @@ impl NonNull { #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces #[stable(feature = "non_null_convenience", since = "1.80.0")] #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::requires( + count.checked_mul(core::mem::size_of::()).map_or_else( + || false, |size| size <= isize::MAX as usize))] pub const unsafe fn copy_to_nonoverlapping(self, dest: NonNull, count: usize) where T: Sized, @@ -1083,6 +1193,10 @@ impl NonNull { #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces #[stable(feature = "non_null_convenience", since = "1.80.0")] #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::requires( + count.checked_mul(core::mem::size_of::()).map_or_else( + || false, |size| size <= isize::MAX as usize))] pub const unsafe fn copy_from(self, src: NonNull, count: usize) where T: Sized, @@ -1103,6 +1217,10 @@ impl NonNull { #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces #[stable(feature = "non_null_convenience", since = "1.80.0")] #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::requires( + count.checked_mul(core::mem::size_of::()).map_or_else( + || false, |size| size <= isize::MAX as usize))] pub const unsafe fn copy_from_nonoverlapping(self, src: NonNull, count: usize) where T: Sized, @@ -1156,6 +1274,10 @@ impl NonNull { #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces #[stable(feature = "non_null_convenience", since = "1.80.0")] #[rustc_const_stable(feature = "const_ptr_write", since = "1.83.0")] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::requires( + count.checked_mul(core::mem::size_of::() as usize).is_some_and( + |byte_count| byte_count.wrapping_add(self.as_ptr() as usize) <= isize::MAX as usize))] pub const unsafe fn write_bytes(self, val: u8, count: usize) where T: Sized, @@ -1289,6 +1411,38 @@ impl NonNull { #[inline] #[must_use] #[stable(feature = "non_null_convenience", since = "1.80.0")] + // FIXME: requires `T` to be `'static` + // #[core::contracts::ensures(move |result| { + // // Post-condition reference: https://github.com/model-checking/verify-rust-std/pull/69/files + // let stride = crate::mem::size_of::(); + // // ZSTs + // if stride == 0 { + // if self.pointer.addr() % align == 0 { + // return *result == 0; + // } else { + // return *result == usize::MAX; + // } + // } + // // In this case, the pointer cannot be aligned + // if (align % stride == 0) && (self.pointer.addr() % stride != 0) { + // return *result == usize::MAX; + // } + // // Checking if the answer should indeed be usize::MAX when a % stride != 0 requires + // // computing gcd(align, stride), which could be done using cttz as the implementation of + // // ptr::align_offset does. + // if align % stride != 0 && *result == usize::MAX { + // return true; + // } + // // If we reach this case, either: + // // - align % stride == 0 and self.pointer.addr() % stride == 0, so it is definitely + // // possible to align the pointer + // // - align % stride != 0 and result != usize::MAX, so align_offset is claiming that it's + // // possible to align the pointer + // // Check that applying the returned result does indeed produce an aligned address + // let product = usize::wrapping_mul(*result, stride); + // let new_addr = usize::wrapping_add(product, self.pointer.addr()); + // *result != usize::MAX && new_addr % align == 0 + // })] pub fn align_offset(self, align: usize) -> usize where T: Sized, @@ -1323,6 +1477,9 @@ impl NonNull { #[inline] #[must_use] #[stable(feature = "pointer_is_aligned", since = "1.79.0")] + // FIXME: requires `T` to be `'static` + // #[core::contracts::ensures( + // move |result: &bool| *result == (self.as_ptr().addr() % core::mem::align_of::() == 0))] pub fn is_aligned(self) -> bool where T: Sized, @@ -1363,6 +1520,9 @@ impl NonNull { #[inline] #[must_use] #[unstable(feature = "pointer_is_aligned_to", issue = "96284")] + #[core::contracts::requires(align.is_power_of_two())] + // FIXME: requires `T` to be `'static` + // #[core::contracts::ensures(move |result: &bool| *result == (self.as_ptr().addr() % align == 0))] pub fn is_aligned_to(self, align: usize) -> bool { self.as_ptr().is_aligned_to(align) } @@ -1416,6 +1576,13 @@ impl NonNull<[T]> { #[rustc_const_stable(feature = "const_slice_from_raw_parts_mut", since = "1.83.0")] #[must_use] #[inline] + // FIXME: requires `T` to be `'static` + // #[rustc_allow_const_fn_unstable(contracts)] + // #[core::contracts::ensures( + // move |result: &NonNull<[T]>| + // !result.pointer.is_null() && + // result.pointer as *const T == data.pointer && + // unsafe { result.as_ref() }.len() == len)] pub const fn slice_from_raw_parts(data: NonNull, len: usize) -> Self { // SAFETY: `data` is a `NonNull` pointer which is necessarily non-null unsafe { Self::new_unchecked(super::slice_from_raw_parts_mut(data.as_ptr(), len)) } @@ -1476,6 +1643,10 @@ impl NonNull<[T]> { #[inline] #[must_use] #[unstable(feature = "slice_ptr_get", issue = "74265")] + #[rustc_allow_const_fn_unstable(contracts)] + // FIXME: requires `T` to be `'static` + // #[core::contracts::ensures( + // move |result: &NonNull| result.as_ptr().addr() == self.as_ptr().addr())] pub const fn as_non_null_ptr(self) -> NonNull { self.cast() } @@ -1495,6 +1666,9 @@ impl NonNull<[T]> { #[must_use] #[unstable(feature = "slice_ptr_get", issue = "74265")] #[rustc_never_returns_null_ptr] + // FIXME: requires `T` to be `'static` + // #[rustc_allow_const_fn_unstable(contracts)] + // #[core::contracts::ensures(move |result: &*mut T| *result == self.pointer as *mut T)] pub const fn as_mut_ptr(self) -> *mut T { self.as_non_null_ptr().as_ptr() } @@ -1539,6 +1713,18 @@ impl NonNull<[T]> { #[inline] #[must_use] #[unstable(feature = "ptr_as_uninit", issue = "75402")] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::requires( + // Ensure the pointer is properly aligned + self.as_ptr().cast::().align_offset(core::mem::align_of::()) == 0 && + // Ensure the slice size does not exceed isize::MAX + self.len().checked_mul(core::mem::size_of::()).is_some() && + self.len() * core::mem::size_of::() <= isize::MAX as usize && + self.as_ptr().addr().checked_add(self.len() * core::mem::size_of::()).is_some())] + // FIXME: requires `T` to be `'static` + // #[core::contracts::ensures( + // move |result: &&[MaybeUninit]| + // result.len() == self.len() && core::ptr::eq(result.as_ptr(), self.cast().as_ptr()))] pub const unsafe fn as_uninit_slice<'a>(self) -> &'a [MaybeUninit] { // SAFETY: the caller must uphold the safety contract for `as_uninit_slice`. unsafe { slice::from_raw_parts(self.cast().as_ptr(), self.len()) } @@ -1603,6 +1789,18 @@ impl NonNull<[T]> { #[inline] #[must_use] #[unstable(feature = "ptr_as_uninit", issue = "75402")] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::requires( + // Ensure the pointer is properly aligned + self.as_ptr().cast::().align_offset(core::mem::align_of::()) == 0 && + // Ensure the slice size does not exceed isize::MAX + self.len().checked_mul(core::mem::size_of::()).is_some() && + self.len() * core::mem::size_of::() <= isize::MAX as usize && + self.as_ptr().addr().checked_add(self.len() * core::mem::size_of::()).is_some())] + // FIXME: requires `T` to be `'static` + // #[core::contracts::ensures( + // move |result: &&mut [MaybeUninit]| + // result.len() == self.len() && core::ptr::eq(result.as_ptr(), self.cast().as_ptr()))] pub const unsafe fn as_uninit_slice_mut<'a>(self) -> &'a mut [MaybeUninit] { // SAFETY: the caller must uphold the safety contract for `as_uninit_slice_mut`. unsafe { slice::from_raw_parts_mut(self.cast().as_ptr(), self.len()) } diff --git a/library/core/src/ptr/unique.rs b/library/core/src/ptr/unique.rs index cdc8b6cc936df..16702c06aaca3 100644 --- a/library/core/src/ptr/unique.rs +++ b/library/core/src/ptr/unique.rs @@ -83,6 +83,10 @@ impl Unique { /// /// `ptr` must be non-null. #[inline] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::requires(!ptr.is_null())] + // FIXME: requires `T` to be `'static` + // #[core::contracts::ensures(move |result: &Unique| result.as_ptr() == ptr)] pub const unsafe fn new_unchecked(ptr: *mut T) -> Self { // SAFETY: the caller must guarantee that `ptr` is non-null. unsafe { Unique { pointer: NonNull::new_unchecked(ptr), _marker: PhantomData } } @@ -90,6 +94,11 @@ impl Unique { /// Creates a new `Unique` if `ptr` is non-null. #[inline] + #[rustc_allow_const_fn_unstable(contracts)] + // FIXME: requires `T` to be `'static` + // #[core::contracts::ensures( + // move |result: &Option>| + // result.is_none() == ptr.is_null() && (result.is_none() || result.unwrap().as_ptr() == ptr))] pub const fn new(ptr: *mut T) -> Option { if let Some(pointer) = NonNull::new(ptr) { Some(Unique { pointer, _marker: PhantomData }) @@ -107,6 +116,8 @@ impl Unique { /// Acquires the underlying `*mut` pointer. #[must_use = "`self` will be dropped if the result is not used"] #[inline] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::ensures(|result: &*mut T| !result.is_null())] pub const fn as_ptr(self) -> *mut T { self.pointer.as_ptr() } @@ -114,6 +125,9 @@ impl Unique { /// Acquires the underlying `*mut` pointer. #[must_use = "`self` will be dropped if the result is not used"] #[inline] + // FIXME: requires `T` to be `'static` + // #[rustc_allow_const_fn_unstable(contracts)] + // #[core::contracts::ensures(move |result: &NonNull| result.as_ptr() == self.pointer.as_ptr())] pub const fn as_non_null_ptr(self) -> NonNull { self.pointer } diff --git a/library/core/src/range/iter.rs b/library/core/src/range/iter.rs index 24efd4a204a5f..2c6a262d3683e 100644 --- a/library/core/src/range/iter.rs +++ b/library/core/src/range/iter.rs @@ -104,6 +104,7 @@ impl Iterator for IterRange { } #[inline] + #[core::contracts::requires(idx < self.size_hint().0)] unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> Self::Item where Self: TrustedRandomAccessNoCoerce, diff --git a/library/core/src/slice/iter.rs b/library/core/src/slice/iter.rs index 7053ae86e732f..0a59ac68ca6b5 100644 --- a/library/core/src/slice/iter.rs +++ b/library/core/src/slice/iter.rs @@ -1403,6 +1403,7 @@ impl<'a, T> Iterator for Windows<'a, T> { } } + #[core::contracts::requires(idx < self.len())] unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> Self::Item { // SAFETY: since the caller guarantees that `i` is in bounds, // which means that `i` cannot overflow an `isize`, and the @@ -1560,6 +1561,7 @@ impl<'a, T> Iterator for Chunks<'a, T> { } } + #[core::contracts::requires(idx < self.len())] unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> Self::Item { let start = idx * self.chunk_size; // SAFETY: the caller guarantees that `i` is in bounds, @@ -1749,6 +1751,7 @@ impl<'a, T> Iterator for ChunksMut<'a, T> { } } + #[core::contracts::requires(idx < self.len())] unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> Self::Item { let start = idx * self.chunk_size; // SAFETY: see comments for `Chunks::__iterator_get_unchecked` and `self.v`. @@ -1947,6 +1950,7 @@ impl<'a, T> Iterator for ChunksExact<'a, T> { self.next_back() } + #[core::contracts::requires(idx < self.len())] unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> Self::Item { let start = idx * self.chunk_size; // SAFETY: mostly identical to `Chunks::__iterator_get_unchecked`. @@ -2108,6 +2112,7 @@ impl<'a, T> Iterator for ChunksExactMut<'a, T> { self.next_back() } + #[core::contracts::requires(idx < self.len())] unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> Self::Item { let start = idx * self.chunk_size; // SAFETY: see comments for `Chunks::__iterator_get_unchecked` and `self.v`. @@ -2389,6 +2394,7 @@ impl<'a, T> Iterator for RChunks<'a, T> { } } + #[core::contracts::requires(idx < self.len())] unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> Self::Item { let end = self.v.len() - idx * self.chunk_size; let start = match end.checked_sub(self.chunk_size) { @@ -2569,6 +2575,7 @@ impl<'a, T> Iterator for RChunksMut<'a, T> { } } + #[core::contracts::requires(idx < self.len())] unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> Self::Item { let end = self.v.len() - idx * self.chunk_size; let start = match end.checked_sub(self.chunk_size) { @@ -2762,6 +2769,7 @@ impl<'a, T> Iterator for RChunksExact<'a, T> { self.next_back() } + #[core::contracts::requires(idx < self.len())] unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> Self::Item { let end = self.v.len() - idx * self.chunk_size; let start = end - self.chunk_size; @@ -2928,6 +2936,7 @@ impl<'a, T> Iterator for RChunksExactMut<'a, T> { self.next_back() } + #[core::contracts::requires(idx < self.len())] unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> Self::Item { let end = self.v.len() - idx * self.chunk_size; let start = end - self.chunk_size; diff --git a/library/core/src/slice/iter/macros.rs b/library/core/src/slice/iter/macros.rs index 7c1ed3fe8a246..311e32b72212d 100644 --- a/library/core/src/slice/iter/macros.rs +++ b/library/core/src/slice/iter/macros.rs @@ -77,6 +77,7 @@ macro_rules! iterator { /// /// The iterator must not be empty #[inline] + #[core::contracts::requires(!is_empty!(self))] unsafe fn next_back_unchecked(&mut self) -> $elem { // SAFETY: the caller promised it's not empty, so // the offsetting is in-bounds and there's an element to return. @@ -96,6 +97,7 @@ macro_rules! iterator { // returning the old start. // Unsafe because the offset must not exceed `self.len()`. #[inline(always)] + #[core::contracts::requires(offset <= len!(self))] unsafe fn post_inc_start(&mut self, offset: usize) -> NonNull { let old = self.ptr; @@ -115,6 +117,7 @@ macro_rules! iterator { // returning the new end. // Unsafe because the offset must not exceed `self.len()`. #[inline(always)] + #[core::contracts::requires(offset <= len!(self))] unsafe fn pre_dec_end(&mut self, offset: usize) -> NonNull { if_zst!(mut self, // SAFETY: By our precondition, `offset` can be at most the @@ -392,6 +395,7 @@ macro_rules! iterator { } #[inline] + #[core::contracts::requires(idx < len!(self))] unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> Self::Item { // SAFETY: the caller must guarantee that `i` is in bounds of // the underlying slice, so `i` cannot overflow an `isize`, and @@ -460,6 +464,7 @@ macro_rules! iterator { impl<'a, T> UncheckedIterator for $name<'a, T> { #[inline] + #[core::contracts::requires(!is_empty!(self))] unsafe fn next_unchecked(&mut self) -> $elem { // SAFETY: The caller promised there's at least one more item. unsafe { diff --git a/library/core/src/slice/mod.rs b/library/core/src/slice/mod.rs index 0248688733952..b8b1279fe2815 100644 --- a/library/core/src/slice/mod.rs +++ b/library/core/src/slice/mod.rs @@ -4053,6 +4053,25 @@ impl [T] { /// ``` #[stable(feature = "slice_align_to", since = "1.30.0")] #[must_use] + // FIXME: requires `&self` to be `'static` + // #[core::contracts::ensures( + // move |(prefix, middle, suffix): &(&[T], &[U], &[T])| + // // The following clause guarantees that middle is of maximum size within self If U or T are + // // ZSTs, then middle has size zero, so we adapt the check in that case + // (((U::IS_ZST || T::IS_ZST) && prefix.len() == self.len()) || ( + // prefix.len() * size_of::() < align_of::() && + // suffix.len() * size_of::() < size_of::() + // )) && + // // Either align_to just returns self in the prefix, or the 3 returned slices should be + // // sequential, contiguous, and have same total length as self + // prefix.as_ptr() == self.as_ptr() && ( + // prefix.len() == self.len() || ( + // unsafe { prefix.as_ptr().add(prefix.len()) } as *const u8 == + // middle.as_ptr() as *const u8 && + // unsafe { middle.as_ptr().add(middle.len()) } as *const u8 == + // suffix.as_ptr() as *const u8 && + // unsafe { suffix.as_ptr().add(suffix.len()) } == + // unsafe { self.as_ptr().add(self.len()) })))] pub unsafe fn align_to(&self) -> (&[T], &[U], &[T]) { // Note that most of this function will be constant-evaluated, if U::IS_ZST || T::IS_ZST { diff --git a/library/core/src/str/iter.rs b/library/core/src/str/iter.rs index d2985d8a18669..fa0bb1bd1db8a 100644 --- a/library/core/src/str/iter.rs +++ b/library/core/src/str/iter.rs @@ -356,6 +356,7 @@ impl Iterator for Bytes<'_> { } #[inline] + #[core::contracts::requires(idx < self.0.len())] unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> u8 { // SAFETY: the caller must uphold the safety contract // for `Iterator::__iterator_get_unchecked`. diff --git a/library/core/src/str/pattern.rs b/library/core/src/str/pattern.rs index e116b13838323..7c6851f899920 100644 --- a/library/core/src/str/pattern.rs +++ b/library/core/src/str/pattern.rs @@ -1921,6 +1921,7 @@ fn simd_contains(needle: &str, haystack: &str) -> Option { all(target_arch = "loongarch64", target_feature = "lsx") ))] #[inline] +#[core::contracts::requires(x.len() == y.len())] unsafe fn small_slice_eq(x: &[u8], y: &[u8]) -> bool { debug_assert_eq!(x.len(), y.len()); // This function is adapted from diff --git a/library/core/src/time.rs b/library/core/src/time.rs index f721fcd6156cf..05af55fc1a461 100644 --- a/library/core/src/time.rs +++ b/library/core/src/time.rs @@ -191,6 +191,8 @@ impl Duration { #[inline] #[must_use] #[rustc_const_stable(feature = "duration_consts_2", since = "1.58.0")] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::ensures(|duration: &Duration| duration.nanos.as_inner() < NANOS_PER_SEC)] pub const fn new(secs: u64, nanos: u32) -> Duration { if nanos < NANOS_PER_SEC { // SAFETY: nanos < NANOS_PER_SEC, therefore nanos is within the valid range @@ -221,6 +223,10 @@ impl Duration { #[must_use] #[inline] #[rustc_const_stable(feature = "duration_consts", since = "1.32.0")] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::ensures( + move |duration: &Duration| + duration.nanos.as_inner() < NANOS_PER_SEC && duration.secs == secs)] pub const fn from_secs(secs: u64) -> Duration { Duration { secs, nanos: Nanoseconds::ZERO } } @@ -241,6 +247,8 @@ impl Duration { #[must_use] #[inline] #[rustc_const_stable(feature = "duration_consts", since = "1.32.0")] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::ensures(|duration: &Duration| duration.nanos.as_inner() < NANOS_PER_SEC)] pub const fn from_millis(millis: u64) -> Duration { let secs = millis / MILLIS_PER_SEC; let subsec_millis = (millis % MILLIS_PER_SEC) as u32; @@ -267,6 +275,8 @@ impl Duration { #[must_use] #[inline] #[rustc_const_stable(feature = "duration_consts", since = "1.32.0")] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::ensures(|duration: &Duration| duration.nanos.as_inner() < NANOS_PER_SEC)] pub const fn from_micros(micros: u64) -> Duration { let secs = micros / MICROS_PER_SEC; let subsec_micros = (micros % MICROS_PER_SEC) as u32; @@ -298,6 +308,8 @@ impl Duration { #[must_use] #[inline] #[rustc_const_stable(feature = "duration_consts", since = "1.32.0")] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::ensures(|duration: &Duration| duration.nanos.as_inner() < NANOS_PER_SEC)] pub const fn from_nanos(nanos: u64) -> Duration { const NANOS_PER_SEC: u64 = self::NANOS_PER_SEC as u64; let secs = nanos / NANOS_PER_SEC; @@ -504,6 +516,9 @@ impl Duration { #[rustc_const_stable(feature = "duration_consts", since = "1.32.0")] #[must_use] #[inline] + // FIXME: requires `&self` to be `'static` + // #[rustc_allow_const_fn_unstable(contracts)] + // #[core::contracts::ensures(move |secs: &u64| *secs == self.secs)] pub const fn as_secs(&self) -> u64 { self.secs } @@ -527,6 +542,9 @@ impl Duration { #[rustc_const_stable(feature = "duration_consts", since = "1.32.0")] #[must_use] #[inline] + // FIXME: requires `&self` to be `'static` + // #[rustc_allow_const_fn_unstable(contracts)] + // #[core::contracts::ensures(move |ms| *ms == self.nanos.as_inner() / NANOS_PER_MILLI)] pub const fn subsec_millis(&self) -> u32 { self.nanos.as_inner() / NANOS_PER_MILLI } @@ -550,6 +568,9 @@ impl Duration { #[rustc_const_stable(feature = "duration_consts", since = "1.32.0")] #[must_use] #[inline] + // FIXME: requires `&self` to be `'static` + // #[rustc_allow_const_fn_unstable(contracts)] + // #[core::contracts::ensures(|ms| *ms == self.nanos.as_inner() / NANOS_PER_MICRO)] pub const fn subsec_micros(&self) -> u32 { self.nanos.as_inner() / NANOS_PER_MICRO } @@ -573,6 +594,9 @@ impl Duration { #[rustc_const_stable(feature = "duration_consts", since = "1.32.0")] #[must_use] #[inline] + // FIXME: requires `&self` to be `'static` + // #[rustc_allow_const_fn_unstable(contracts)] + // #[core::contracts::ensures(|nanos| *nanos == self.nanos.as_inner())] pub const fn subsec_nanos(&self) -> u32 { self.nanos.as_inner() } @@ -591,6 +615,12 @@ impl Duration { #[rustc_const_stable(feature = "duration_as_u128", since = "1.33.0")] #[must_use] #[inline] + // FIXME: requires `&self` to be `'static` + // #[rustc_allow_const_fn_unstable(contracts)] + // #[core::contracts::ensures( + // |ms| + // *ms == self.secs as u128 * MILLIS_PER_SEC as u128 + + // (self.nanos.as_inner() / NANOS_PER_MILLI) as u128)] pub const fn as_millis(&self) -> u128 { self.secs as u128 * MILLIS_PER_SEC as u128 + (self.nanos.as_inner() / NANOS_PER_MILLI) as u128 @@ -610,6 +640,12 @@ impl Duration { #[rustc_const_stable(feature = "duration_as_u128", since = "1.33.0")] #[must_use] #[inline] + // FIXME: requires `&self` to be `'static` + // #[rustc_allow_const_fn_unstable(contracts)] + // #[core::contracts::ensures( + // |ms| + // *ms == self.secs as u128 * MICROS_PER_SEC as u128 + + // (self.nanos.as_inner() / NANOS_PER_MICRO) as u128)] pub const fn as_micros(&self) -> u128 { self.secs as u128 * MICROS_PER_SEC as u128 + (self.nanos.as_inner() / NANOS_PER_MICRO) as u128 @@ -668,6 +704,10 @@ impl Duration { without modifying the original"] #[inline] #[rustc_const_stable(feature = "duration_consts_2", since = "1.58.0")] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::ensures( + |duration: &Option| + duration.is_none() || duration.unwrap().nanos.as_inner() < NANOS_PER_SEC)] pub const fn checked_add(self, rhs: Duration) -> Option { if let Some(mut secs) = self.secs.checked_add(rhs.secs) { let mut nanos = self.nanos.as_inner() + rhs.nanos.as_inner(); @@ -726,6 +766,10 @@ impl Duration { without modifying the original"] #[inline] #[rustc_const_stable(feature = "duration_consts_2", since = "1.58.0")] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::ensures( + |duration: &Option| + duration.is_none() || duration.unwrap().nanos.as_inner() < NANOS_PER_SEC)] pub const fn checked_sub(self, rhs: Duration) -> Option { if let Some(mut secs) = self.secs.checked_sub(rhs.secs) { let nanos = if self.nanos.as_inner() >= rhs.nanos.as_inner() { @@ -782,6 +826,10 @@ impl Duration { without modifying the original"] #[inline] #[rustc_const_stable(feature = "duration_consts_2", since = "1.58.0")] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::ensures( + |duration: &Option| + duration.is_none() || duration.unwrap().nanos.as_inner() < NANOS_PER_SEC)] pub const fn checked_mul(self, rhs: u32) -> Option { // Multiply nanoseconds as u64, because it cannot overflow that way. let total_nanos = self.nanos.as_inner() as u64 * rhs as u64; @@ -838,6 +886,10 @@ impl Duration { without modifying the original"] #[inline] #[rustc_const_stable(feature = "duration_consts_2", since = "1.58.0")] + #[rustc_allow_const_fn_unstable(contracts)] + #[core::contracts::ensures( + move |duration: &Option| + rhs == 0 || duration.unwrap().nanos.as_inner() < NANOS_PER_SEC)] pub const fn checked_div(self, rhs: u32) -> Option { if rhs != 0 { let (secs, extra_secs) = (self.secs / (rhs as u64), self.secs % (rhs as u64)); diff --git a/library/std/src/alloc.rs b/library/std/src/alloc.rs index daa25c5a50dd6..550d4cd00414a 100644 --- a/library/std/src/alloc.rs +++ b/library/std/src/alloc.rs @@ -151,6 +151,11 @@ impl System { // SAFETY: Same as `Allocator::grow` #[inline] + #[core::contracts::requires( + new_layout.size() >= old_layout.size() && + ptr.as_ptr().is_aligned_to(old_layout.align()) && + (old_layout.size() == 0 || old_layout.align() != 0) && + (new_layout.size() == 0 || new_layout.align() != 0))] unsafe fn grow_impl( &self, ptr: NonNull, @@ -213,6 +218,7 @@ unsafe impl Allocator for System { } #[inline] + #[core::contracts::requires(layout.size() != 0)] unsafe fn deallocate(&self, ptr: NonNull, layout: Layout) { if layout.size() != 0 { // SAFETY: `layout` is non-zero in size, @@ -222,6 +228,7 @@ unsafe impl Allocator for System { } #[inline] + #[core::contracts::requires(new_layout.size() >= old_layout.size())] unsafe fn grow( &self, ptr: NonNull, @@ -233,6 +240,7 @@ unsafe impl Allocator for System { } #[inline] + #[core::contracts::requires(new_layout.size() >= old_layout.size())] unsafe fn grow_zeroed( &self, ptr: NonNull, @@ -244,6 +252,7 @@ unsafe impl Allocator for System { } #[inline] + #[core::contracts::requires(new_layout.size() <= old_layout.size())] unsafe fn shrink( &self, ptr: NonNull, @@ -395,6 +404,7 @@ pub mod __default_lib_allocator { // ABI #[rustc_std_internal_symbol] + #[core::contracts::requires(align.is_power_of_two())] pub unsafe extern "C" fn __rdl_alloc(size: usize, align: usize) -> *mut u8 { // SAFETY: see the guarantees expected by `Layout::from_size_align` and // `GlobalAlloc::alloc`. @@ -405,6 +415,7 @@ pub mod __default_lib_allocator { } #[rustc_std_internal_symbol] + #[core::contracts::requires(align.is_power_of_two())] pub unsafe extern "C" fn __rdl_dealloc(ptr: *mut u8, size: usize, align: usize) { // SAFETY: see the guarantees expected by `Layout::from_size_align` and // `GlobalAlloc::dealloc`. @@ -412,6 +423,7 @@ pub mod __default_lib_allocator { } #[rustc_std_internal_symbol] + #[core::contracts::requires(align.is_power_of_two())] pub unsafe extern "C" fn __rdl_realloc( ptr: *mut u8, old_size: usize, @@ -427,6 +439,7 @@ pub mod __default_lib_allocator { } #[rustc_std_internal_symbol] + #[core::contracts::requires(align.is_power_of_two())] pub unsafe extern "C" fn __rdl_alloc_zeroed(size: usize, align: usize) -> *mut u8 { // SAFETY: see the guarantees expected by `Layout::from_size_align` and // `GlobalAlloc::alloc_zeroed`. diff --git a/library/std/src/lib.rs b/library/std/src/lib.rs index a8c50cec01e0b..02d7a8594ffcc 100644 --- a/library/std/src/lib.rs +++ b/library/std/src/lib.rs @@ -253,6 +253,8 @@ #![deny(ffi_unwind_calls)] // std may use features in a platform-specific way #![allow(unused_features)] +// permit use of experimental feature contracts +#![allow(incomplete_features)] // // Features: #![cfg_attr(test, feature(internal_output_capture, print_internals, update_panic_count, rt))] @@ -278,6 +280,7 @@ #![feature(cfi_encoding)] #![feature(char_max_len)] #![feature(const_trait_impl)] +#![feature(contracts)] #![feature(core_float_math)] #![feature(decl_macro)] #![feature(deprecated_suggestion)] diff --git a/library/std/src/sync/mpmc/context.rs b/library/std/src/sync/mpmc/context.rs index 6b2f4cb6ffd29..d5789c51bf5c1 100644 --- a/library/std/src/sync/mpmc/context.rs +++ b/library/std/src/sync/mpmc/context.rs @@ -116,6 +116,7 @@ impl Context { /// # Safety /// This may only be called from the thread this `Context` belongs to. #[inline] + #[core::contracts::requires(self.thread_id() == current_thread_id())] pub unsafe fn wait_until(&self, deadline: Option) -> Selected { loop { // Check whether an operation has been selected. diff --git a/src/tools/miri/tests/pass/shims/time-with-isolation.stdout b/src/tools/miri/tests/pass/shims/time-with-isolation.stdout index 6058735f005fd..5ca24c544bba9 100644 --- a/src/tools/miri/tests/pass/shims/time-with-isolation.stdout +++ b/src/tools/miri/tests/pass/shims/time-with-isolation.stdout @@ -1,2 +1,2 @@ -The loop took around 1350ms +The loop took around 1450ms (It's fine for this number to change when you `--bless` this test.) diff --git a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.32bit.panic-abort.diff b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.32bit.panic-abort.diff index 2c89670dcf7d7..9b499f910265c 100644 --- a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.32bit.panic-abort.diff +++ b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.32bit.panic-abort.diff @@ -18,21 +18,29 @@ scope 5 (inlined NonNull::<[bool; 0]>::dangling) { let mut _6: std::num::NonZero; scope 6 { - scope 8 (inlined std::ptr::Alignment::as_nonzero) { - } - scope 9 (inlined NonNull::<[bool; 0]>::without_provenance) { - let _7: *const [bool; 0]; - scope 10 { + scope 7 { + scope 9 { } - scope 11 (inlined NonZero::::get) { + } + scope 8 { + scope 12 (inlined std::ptr::Alignment::as_nonzero) { } - scope 12 (inlined std::ptr::without_provenance::<[bool; 0]>) { - scope 13 (inlined without_provenance_mut::<[bool; 0]>) { + scope 13 (inlined NonNull::<[bool; 0]>::without_provenance) { + let _7: *const [bool; 0]; + scope 14 { + } + scope 15 (inlined NonZero::::get) { + } + scope 16 (inlined std::ptr::without_provenance::<[bool; 0]>) { + scope 17 (inlined without_provenance_mut::<[bool; 0]>) { + } } } } + scope 11 (inlined std::ptr::Alignment::of::<[bool; 0]>) { + } } - scope 7 (inlined std::ptr::Alignment::of::<[bool; 0]>) { + scope 10 (inlined core::contracts::build_check_ensures::, {closure@NonNull<[bool; 0]>::dangling::{closure#0}}>) { } } } diff --git a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.32bit.panic-unwind.diff b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.32bit.panic-unwind.diff index 8fecfe224cc69..ffbe1b77a5b27 100644 --- a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.32bit.panic-unwind.diff +++ b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.32bit.panic-unwind.diff @@ -18,21 +18,29 @@ scope 5 (inlined NonNull::<[bool; 0]>::dangling) { let mut _6: std::num::NonZero; scope 6 { - scope 8 (inlined std::ptr::Alignment::as_nonzero) { - } - scope 9 (inlined NonNull::<[bool; 0]>::without_provenance) { - let _7: *const [bool; 0]; - scope 10 { + scope 7 { + scope 9 { } - scope 11 (inlined NonZero::::get) { + } + scope 8 { + scope 12 (inlined std::ptr::Alignment::as_nonzero) { } - scope 12 (inlined std::ptr::without_provenance::<[bool; 0]>) { - scope 13 (inlined without_provenance_mut::<[bool; 0]>) { + scope 13 (inlined NonNull::<[bool; 0]>::without_provenance) { + let _7: *const [bool; 0]; + scope 14 { + } + scope 15 (inlined NonZero::::get) { + } + scope 16 (inlined std::ptr::without_provenance::<[bool; 0]>) { + scope 17 (inlined without_provenance_mut::<[bool; 0]>) { + } } } } + scope 11 (inlined std::ptr::Alignment::of::<[bool; 0]>) { + } } - scope 7 (inlined std::ptr::Alignment::of::<[bool; 0]>) { + scope 10 (inlined core::contracts::build_check_ensures::, {closure@NonNull<[bool; 0]>::dangling::{closure#0}}>) { } } } diff --git a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.64bit.panic-abort.diff b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.64bit.panic-abort.diff index 976ea252c2f89..f0f6e9d03816e 100644 --- a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.64bit.panic-abort.diff +++ b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.64bit.panic-abort.diff @@ -18,21 +18,29 @@ scope 5 (inlined NonNull::<[bool; 0]>::dangling) { let mut _6: std::num::NonZero; scope 6 { - scope 8 (inlined std::ptr::Alignment::as_nonzero) { - } - scope 9 (inlined NonNull::<[bool; 0]>::without_provenance) { - let _7: *const [bool; 0]; - scope 10 { + scope 7 { + scope 9 { } - scope 11 (inlined NonZero::::get) { + } + scope 8 { + scope 12 (inlined std::ptr::Alignment::as_nonzero) { } - scope 12 (inlined std::ptr::without_provenance::<[bool; 0]>) { - scope 13 (inlined without_provenance_mut::<[bool; 0]>) { + scope 13 (inlined NonNull::<[bool; 0]>::without_provenance) { + let _7: *const [bool; 0]; + scope 14 { + } + scope 15 (inlined NonZero::::get) { + } + scope 16 (inlined std::ptr::without_provenance::<[bool; 0]>) { + scope 17 (inlined without_provenance_mut::<[bool; 0]>) { + } } } } + scope 11 (inlined std::ptr::Alignment::of::<[bool; 0]>) { + } } - scope 7 (inlined std::ptr::Alignment::of::<[bool; 0]>) { + scope 10 (inlined core::contracts::build_check_ensures::, {closure@NonNull<[bool; 0]>::dangling::{closure#0}}>) { } } } diff --git a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.64bit.panic-unwind.diff b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.64bit.panic-unwind.diff index 6c59f5e3e2e86..54124312f82fa 100644 --- a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.64bit.panic-unwind.diff +++ b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.64bit.panic-unwind.diff @@ -18,21 +18,29 @@ scope 5 (inlined NonNull::<[bool; 0]>::dangling) { let mut _6: std::num::NonZero; scope 6 { - scope 8 (inlined std::ptr::Alignment::as_nonzero) { - } - scope 9 (inlined NonNull::<[bool; 0]>::without_provenance) { - let _7: *const [bool; 0]; - scope 10 { + scope 7 { + scope 9 { } - scope 11 (inlined NonZero::::get) { + } + scope 8 { + scope 12 (inlined std::ptr::Alignment::as_nonzero) { } - scope 12 (inlined std::ptr::without_provenance::<[bool; 0]>) { - scope 13 (inlined without_provenance_mut::<[bool; 0]>) { + scope 13 (inlined NonNull::<[bool; 0]>::without_provenance) { + let _7: *const [bool; 0]; + scope 14 { + } + scope 15 (inlined NonZero::::get) { + } + scope 16 (inlined std::ptr::without_provenance::<[bool; 0]>) { + scope 17 (inlined without_provenance_mut::<[bool; 0]>) { + } } } } + scope 11 (inlined std::ptr::Alignment::of::<[bool; 0]>) { + } } - scope 7 (inlined std::ptr::Alignment::of::<[bool; 0]>) { + scope 10 (inlined core::contracts::build_check_ensures::, {closure@NonNull<[bool; 0]>::dangling::{closure#0}}>) { } } } diff --git a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.32bit.panic-abort.diff b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.32bit.panic-abort.diff index 1f9cf6d6aca83..f54afb6f139ea 100644 --- a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.32bit.panic-abort.diff +++ b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.32bit.panic-abort.diff @@ -18,21 +18,29 @@ scope 5 (inlined NonNull::<[bool; 0]>::dangling) { let mut _6: std::num::NonZero; scope 6 { - scope 8 (inlined std::ptr::Alignment::as_nonzero) { - } - scope 9 (inlined NonNull::<[bool; 0]>::without_provenance) { - let _7: *const [bool; 0]; - scope 10 { + scope 7 { + scope 9 { } - scope 11 (inlined NonZero::::get) { + } + scope 8 { + scope 12 (inlined std::ptr::Alignment::as_nonzero) { } - scope 12 (inlined std::ptr::without_provenance::<[bool; 0]>) { - scope 13 (inlined without_provenance_mut::<[bool; 0]>) { + scope 13 (inlined NonNull::<[bool; 0]>::without_provenance) { + let _7: *const [bool; 0]; + scope 14 { + } + scope 15 (inlined NonZero::::get) { + } + scope 16 (inlined std::ptr::without_provenance::<[bool; 0]>) { + scope 17 (inlined without_provenance_mut::<[bool; 0]>) { + } } } } + scope 11 (inlined std::ptr::Alignment::of::<[bool; 0]>) { + } } - scope 7 (inlined std::ptr::Alignment::of::<[bool; 0]>) { + scope 10 (inlined core::contracts::build_check_ensures::, {closure@NonNull<[bool; 0]>::dangling::{closure#0}}>) { } } } diff --git a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.32bit.panic-unwind.diff b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.32bit.panic-unwind.diff index a8760285fac11..8418071f44140 100644 --- a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.32bit.panic-unwind.diff +++ b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.32bit.panic-unwind.diff @@ -18,21 +18,29 @@ scope 5 (inlined NonNull::<[bool; 0]>::dangling) { let mut _6: std::num::NonZero; scope 6 { - scope 8 (inlined std::ptr::Alignment::as_nonzero) { - } - scope 9 (inlined NonNull::<[bool; 0]>::without_provenance) { - let _7: *const [bool; 0]; - scope 10 { + scope 7 { + scope 9 { } - scope 11 (inlined NonZero::::get) { + } + scope 8 { + scope 12 (inlined std::ptr::Alignment::as_nonzero) { } - scope 12 (inlined std::ptr::without_provenance::<[bool; 0]>) { - scope 13 (inlined without_provenance_mut::<[bool; 0]>) { + scope 13 (inlined NonNull::<[bool; 0]>::without_provenance) { + let _7: *const [bool; 0]; + scope 14 { + } + scope 15 (inlined NonZero::::get) { + } + scope 16 (inlined std::ptr::without_provenance::<[bool; 0]>) { + scope 17 (inlined without_provenance_mut::<[bool; 0]>) { + } } } } + scope 11 (inlined std::ptr::Alignment::of::<[bool; 0]>) { + } } - scope 7 (inlined std::ptr::Alignment::of::<[bool; 0]>) { + scope 10 (inlined core::contracts::build_check_ensures::, {closure@NonNull<[bool; 0]>::dangling::{closure#0}}>) { } } } diff --git a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.64bit.panic-abort.diff b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.64bit.panic-abort.diff index c398ae70a1a3e..8f11248e753b4 100644 --- a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.64bit.panic-abort.diff +++ b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.64bit.panic-abort.diff @@ -18,21 +18,29 @@ scope 5 (inlined NonNull::<[bool; 0]>::dangling) { let mut _6: std::num::NonZero; scope 6 { - scope 8 (inlined std::ptr::Alignment::as_nonzero) { - } - scope 9 (inlined NonNull::<[bool; 0]>::without_provenance) { - let _7: *const [bool; 0]; - scope 10 { + scope 7 { + scope 9 { } - scope 11 (inlined NonZero::::get) { + } + scope 8 { + scope 12 (inlined std::ptr::Alignment::as_nonzero) { } - scope 12 (inlined std::ptr::without_provenance::<[bool; 0]>) { - scope 13 (inlined without_provenance_mut::<[bool; 0]>) { + scope 13 (inlined NonNull::<[bool; 0]>::without_provenance) { + let _7: *const [bool; 0]; + scope 14 { + } + scope 15 (inlined NonZero::::get) { + } + scope 16 (inlined std::ptr::without_provenance::<[bool; 0]>) { + scope 17 (inlined without_provenance_mut::<[bool; 0]>) { + } } } } + scope 11 (inlined std::ptr::Alignment::of::<[bool; 0]>) { + } } - scope 7 (inlined std::ptr::Alignment::of::<[bool; 0]>) { + scope 10 (inlined core::contracts::build_check_ensures::, {closure@NonNull<[bool; 0]>::dangling::{closure#0}}>) { } } } diff --git a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.64bit.panic-unwind.diff b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.64bit.panic-unwind.diff index 02934c02587d2..04a1733d917dc 100644 --- a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.64bit.panic-unwind.diff +++ b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.64bit.panic-unwind.diff @@ -18,21 +18,29 @@ scope 5 (inlined NonNull::<[bool; 0]>::dangling) { let mut _6: std::num::NonZero; scope 6 { - scope 8 (inlined std::ptr::Alignment::as_nonzero) { - } - scope 9 (inlined NonNull::<[bool; 0]>::without_provenance) { - let _7: *const [bool; 0]; - scope 10 { + scope 7 { + scope 9 { } - scope 11 (inlined NonZero::::get) { + } + scope 8 { + scope 12 (inlined std::ptr::Alignment::as_nonzero) { } - scope 12 (inlined std::ptr::without_provenance::<[bool; 0]>) { - scope 13 (inlined without_provenance_mut::<[bool; 0]>) { + scope 13 (inlined NonNull::<[bool; 0]>::without_provenance) { + let _7: *const [bool; 0]; + scope 14 { + } + scope 15 (inlined NonZero::::get) { + } + scope 16 (inlined std::ptr::without_provenance::<[bool; 0]>) { + scope 17 (inlined without_provenance_mut::<[bool; 0]>) { + } } } } + scope 11 (inlined std::ptr::Alignment::of::<[bool; 0]>) { + } } - scope 7 (inlined std::ptr::Alignment::of::<[bool; 0]>) { + scope 10 (inlined core::contracts::build_check_ensures::, {closure@NonNull<[bool; 0]>::dangling::{closure#0}}>) { } } } diff --git a/tests/mir-opt/dont_reset_cast_kind_without_updating_operand.test.GVN.32bit.panic-abort.diff b/tests/mir-opt/dont_reset_cast_kind_without_updating_operand.test.GVN.32bit.panic-abort.diff index 38beb81e1ead2..033bf9ed763e5 100644 --- a/tests/mir-opt/dont_reset_cast_kind_without_updating_operand.test.GVN.32bit.panic-abort.diff +++ b/tests/mir-opt/dont_reset_cast_kind_without_updating_operand.test.GVN.32bit.panic-abort.diff @@ -13,7 +13,7 @@ let mut _11: *const (); let mut _16: usize; let mut _17: usize; - let mut _27: usize; + let mut _30: usize; scope 1 { debug vp_ctx => _1; let _5: *const (); @@ -26,12 +26,12 @@ scope 4 { debug _x => _8; } - scope 18 (inlined foo) { - let mut _28: *const [()]; + scope 22 (inlined foo) { + let mut _31: *const [()]; } } - scope 16 (inlined slice_from_raw_parts::<()>) { - scope 17 (inlined std::ptr::from_raw_parts::<[()], ()>) { + scope 20 (inlined slice_from_raw_parts::<()>) { + scope 21 (inlined std::ptr::from_raw_parts::<[()], ()>) { } } } @@ -49,25 +49,36 @@ scope 7 { let _21: std::ptr::NonNull<[u8]>; scope 8 { - scope 11 (inlined NonNull::<[u8]>::as_mut_ptr) { - scope 12 (inlined NonNull::<[u8]>::as_non_null_ptr) { - scope 13 (inlined NonNull::<[u8]>::cast::) { - let mut _26: *mut [u8]; - scope 14 (inlined NonNull::<[u8]>::as_ptr) { + scope 15 (inlined NonNull::<[u8]>::as_mut_ptr) { + scope 16 (inlined NonNull::<[u8]>::as_non_null_ptr) { + scope 17 (inlined NonNull::<[u8]>::cast::) { + let mut _27: *mut [u8]; + scope 18 (inlined NonNull::<[u8]>::as_ptr) { } } } - scope 15 (inlined NonNull::::as_ptr) { + scope 19 (inlined NonNull::::as_ptr) { } } } - scope 10 (inlined ::allocate) { + scope 14 (inlined ::allocate) { } } scope 9 (inlined #[track_caller] Layout::from_size_align_unchecked) { - let mut _23: bool; - let _24: (); - let mut _25: std::ptr::Alignment; + let _23: std::option::Option<{closure@std::alloc::Layout::from_size_align_unchecked::{closure#1}}>; + let mut _24: bool; + let _25: (); + let mut _26: std::ptr::Alignment; + let mut _28: usize; + let mut _29: usize; + scope 10 { + scope 11 { + scope 12 { + } + } + } + scope 13 (inlined core::contracts::build_check_ensures::) { + } } } } @@ -94,10 +105,12 @@ StorageLive(_20); StorageLive(_21); StorageLive(_22); - StorageLive(_24); + StorageLive(_25); StorageLive(_23); - _23 = UbChecks(); - switchInt(move _23) -> [0: bb6, otherwise: bb5]; + _23 = Option::<{closure@Layout::from_size_align_unchecked::{closure#1}}>::None; + StorageLive(_24); + _24 = UbChecks(); + switchInt(move _24) -> [0: bb6, otherwise: bb5]; } bb1: { @@ -117,14 +130,14 @@ bb4: { _21 = copy ((_19 as Ok).0: std::ptr::NonNull<[u8]>); -- StorageLive(_26); +- StorageLive(_27); + nop; - _26 = copy _21 as *mut [u8] (Transmute); - _12 = copy _26 as *mut u8 (PtrToPtr); -- StorageDead(_26); + _27 = copy _21 as *mut [u8] (Transmute); + _12 = copy _27 as *mut u8 (PtrToPtr); +- StorageDead(_27); + nop; StorageDead(_19); - StorageDead(_24); + StorageDead(_25); StorageDead(_22); StorageDead(_21); StorageDead(_20); @@ -132,7 +145,7 @@ StorageDead(_17); StorageDead(_16); - _13 = copy _12 as *const () (PtrToPtr); -+ _13 = copy _26 as *const () (PtrToPtr); ++ _13 = copy _27 as *const () (PtrToPtr); _14 = NonNull::<()> { pointer: copy _13 }; _15 = Unique::<()> { pointer: copy _14, _marker: const PhantomData::<()> }; _3 = Box::<()>(move _15, const std::alloc::Global); @@ -157,21 +170,21 @@ + nop; StorageLive(_7); _7 = copy _5; - StorageLive(_27); - _27 = const 1_usize; -- _6 = *const [()] from (copy _7, copy _27); + StorageLive(_30); + _30 = const 1_usize; +- _6 = *const [()] from (copy _7, copy _30); + _6 = *const [()] from (copy _5, const 1_usize); - StorageDead(_27); + StorageDead(_30); StorageDead(_7); StorageLive(_8); StorageLive(_9); _9 = copy _6; - StorageLive(_28); -- _28 = copy _9; + StorageLive(_31); +- _31 = copy _9; - _8 = copy _9 as *mut () (PtrToPtr); -+ _28 = copy _6; ++ _31 = copy _6; + _8 = copy _5 as *mut () (PtrToPtr); - StorageDead(_28); + StorageDead(_31); StorageDead(_9); _0 = const (); StorageDead(_8); @@ -183,18 +196,19 @@ } bb5: { -- _24 = Layout::from_size_align_unchecked::precondition_check(copy _16, copy _17) -> [return: bb6, unwind unreachable]; -+ _24 = Layout::from_size_align_unchecked::precondition_check(const 0_usize, const 1_usize) -> [return: bb6, unwind unreachable]; +- _25 = Layout::from_size_align_unchecked::precondition_check(copy _16, copy _17) -> [return: bb6, unwind unreachable]; ++ _25 = Layout::from_size_align_unchecked::precondition_check(const 0_usize, const 1_usize) -> [return: bb6, unwind unreachable]; } bb6: { - StorageDead(_23); - StorageLive(_25); -- _25 = copy _17 as std::ptr::Alignment (Transmute); -- _18 = Layout { size: copy _16, align: move _25 }; -+ _25 = const std::ptr::Alignment(std::ptr::alignment::AlignmentEnum::_Align1Shl0); + StorageDead(_24); + StorageLive(_26); +- _26 = copy _17 as std::ptr::Alignment (Transmute); +- _18 = Layout { size: copy _16, align: move _26 }; ++ _26 = const std::ptr::Alignment(std::ptr::alignment::AlignmentEnum::_Align1Shl0); + _18 = const Layout {{ size: 0_usize, align: std::ptr::Alignment(std::ptr::alignment::AlignmentEnum::_Align1Shl0) }}; - StorageDead(_25); + StorageDead(_26); + StorageDead(_23); StorageLive(_19); - _19 = std::alloc::Global::alloc_impl(const alloc::alloc::exchange_malloc::promoted[0], copy _18, const false) -> [return: bb7, unwind unreachable]; + _19 = std::alloc::Global::alloc_impl(const alloc::alloc::exchange_malloc::promoted[0], const Layout {{ size: 0_usize, align: std::ptr::Alignment(std::ptr::alignment::AlignmentEnum::_Align1Shl0) }}, const false) -> [return: bb7, unwind unreachable]; diff --git a/tests/mir-opt/dont_reset_cast_kind_without_updating_operand.test.GVN.64bit.panic-abort.diff b/tests/mir-opt/dont_reset_cast_kind_without_updating_operand.test.GVN.64bit.panic-abort.diff index 047579cdb5094..fefe5b3a4c74f 100644 --- a/tests/mir-opt/dont_reset_cast_kind_without_updating_operand.test.GVN.64bit.panic-abort.diff +++ b/tests/mir-opt/dont_reset_cast_kind_without_updating_operand.test.GVN.64bit.panic-abort.diff @@ -13,7 +13,7 @@ let mut _11: *const (); let mut _16: usize; let mut _17: usize; - let mut _27: usize; + let mut _30: usize; scope 1 { debug vp_ctx => _1; let _5: *const (); @@ -26,12 +26,12 @@ scope 4 { debug _x => _8; } - scope 18 (inlined foo) { - let mut _28: *const [()]; + scope 22 (inlined foo) { + let mut _31: *const [()]; } } - scope 16 (inlined slice_from_raw_parts::<()>) { - scope 17 (inlined std::ptr::from_raw_parts::<[()], ()>) { + scope 20 (inlined slice_from_raw_parts::<()>) { + scope 21 (inlined std::ptr::from_raw_parts::<[()], ()>) { } } } @@ -49,25 +49,36 @@ scope 7 { let _21: std::ptr::NonNull<[u8]>; scope 8 { - scope 11 (inlined NonNull::<[u8]>::as_mut_ptr) { - scope 12 (inlined NonNull::<[u8]>::as_non_null_ptr) { - scope 13 (inlined NonNull::<[u8]>::cast::) { - let mut _26: *mut [u8]; - scope 14 (inlined NonNull::<[u8]>::as_ptr) { + scope 15 (inlined NonNull::<[u8]>::as_mut_ptr) { + scope 16 (inlined NonNull::<[u8]>::as_non_null_ptr) { + scope 17 (inlined NonNull::<[u8]>::cast::) { + let mut _27: *mut [u8]; + scope 18 (inlined NonNull::<[u8]>::as_ptr) { } } } - scope 15 (inlined NonNull::::as_ptr) { + scope 19 (inlined NonNull::::as_ptr) { } } } - scope 10 (inlined ::allocate) { + scope 14 (inlined ::allocate) { } } scope 9 (inlined #[track_caller] Layout::from_size_align_unchecked) { - let mut _23: bool; - let _24: (); - let mut _25: std::ptr::Alignment; + let _23: std::option::Option<{closure@std::alloc::Layout::from_size_align_unchecked::{closure#1}}>; + let mut _24: bool; + let _25: (); + let mut _26: std::ptr::Alignment; + let mut _28: usize; + let mut _29: usize; + scope 10 { + scope 11 { + scope 12 { + } + } + } + scope 13 (inlined core::contracts::build_check_ensures::) { + } } } } @@ -94,10 +105,12 @@ StorageLive(_20); StorageLive(_21); StorageLive(_22); - StorageLive(_24); + StorageLive(_25); StorageLive(_23); - _23 = UbChecks(); - switchInt(move _23) -> [0: bb6, otherwise: bb5]; + _23 = Option::<{closure@Layout::from_size_align_unchecked::{closure#1}}>::None; + StorageLive(_24); + _24 = UbChecks(); + switchInt(move _24) -> [0: bb6, otherwise: bb5]; } bb1: { @@ -117,14 +130,14 @@ bb4: { _21 = copy ((_19 as Ok).0: std::ptr::NonNull<[u8]>); -- StorageLive(_26); +- StorageLive(_27); + nop; - _26 = copy _21 as *mut [u8] (Transmute); - _12 = copy _26 as *mut u8 (PtrToPtr); -- StorageDead(_26); + _27 = copy _21 as *mut [u8] (Transmute); + _12 = copy _27 as *mut u8 (PtrToPtr); +- StorageDead(_27); + nop; StorageDead(_19); - StorageDead(_24); + StorageDead(_25); StorageDead(_22); StorageDead(_21); StorageDead(_20); @@ -132,7 +145,7 @@ StorageDead(_17); StorageDead(_16); - _13 = copy _12 as *const () (PtrToPtr); -+ _13 = copy _26 as *const () (PtrToPtr); ++ _13 = copy _27 as *const () (PtrToPtr); _14 = NonNull::<()> { pointer: copy _13 }; _15 = Unique::<()> { pointer: copy _14, _marker: const PhantomData::<()> }; _3 = Box::<()>(move _15, const std::alloc::Global); @@ -157,21 +170,21 @@ + nop; StorageLive(_7); _7 = copy _5; - StorageLive(_27); - _27 = const 1_usize; -- _6 = *const [()] from (copy _7, copy _27); + StorageLive(_30); + _30 = const 1_usize; +- _6 = *const [()] from (copy _7, copy _30); + _6 = *const [()] from (copy _5, const 1_usize); - StorageDead(_27); + StorageDead(_30); StorageDead(_7); StorageLive(_8); StorageLive(_9); _9 = copy _6; - StorageLive(_28); -- _28 = copy _9; + StorageLive(_31); +- _31 = copy _9; - _8 = copy _9 as *mut () (PtrToPtr); -+ _28 = copy _6; ++ _31 = copy _6; + _8 = copy _5 as *mut () (PtrToPtr); - StorageDead(_28); + StorageDead(_31); StorageDead(_9); _0 = const (); StorageDead(_8); @@ -183,18 +196,19 @@ } bb5: { -- _24 = Layout::from_size_align_unchecked::precondition_check(copy _16, copy _17) -> [return: bb6, unwind unreachable]; -+ _24 = Layout::from_size_align_unchecked::precondition_check(const 0_usize, const 1_usize) -> [return: bb6, unwind unreachable]; +- _25 = Layout::from_size_align_unchecked::precondition_check(copy _16, copy _17) -> [return: bb6, unwind unreachable]; ++ _25 = Layout::from_size_align_unchecked::precondition_check(const 0_usize, const 1_usize) -> [return: bb6, unwind unreachable]; } bb6: { - StorageDead(_23); - StorageLive(_25); -- _25 = copy _17 as std::ptr::Alignment (Transmute); -- _18 = Layout { size: copy _16, align: move _25 }; -+ _25 = const std::ptr::Alignment(std::ptr::alignment::AlignmentEnum::_Align1Shl0); + StorageDead(_24); + StorageLive(_26); +- _26 = copy _17 as std::ptr::Alignment (Transmute); +- _18 = Layout { size: copy _16, align: move _26 }; ++ _26 = const std::ptr::Alignment(std::ptr::alignment::AlignmentEnum::_Align1Shl0); + _18 = const Layout {{ size: 0_usize, align: std::ptr::Alignment(std::ptr::alignment::AlignmentEnum::_Align1Shl0) }}; - StorageDead(_25); + StorageDead(_26); + StorageDead(_23); StorageLive(_19); - _19 = std::alloc::Global::alloc_impl(const alloc::alloc::exchange_malloc::promoted[0], copy _18, const false) -> [return: bb7, unwind unreachable]; + _19 = std::alloc::Global::alloc_impl(const alloc::alloc::exchange_malloc::promoted[0], const Layout {{ size: 0_usize, align: std::ptr::Alignment(std::ptr::alignment::AlignmentEnum::_Align1Shl0) }}, const false) -> [return: bb7, unwind unreachable]; diff --git a/tests/mir-opt/gvn_ptr_eq_with_constant.main.GVN.diff b/tests/mir-opt/gvn_ptr_eq_with_constant.main.GVN.diff index f56af33ea603f..7bc2a7836ad81 100644 --- a/tests/mir-opt/gvn_ptr_eq_with_constant.main.GVN.diff +++ b/tests/mir-opt/gvn_ptr_eq_with_constant.main.GVN.diff @@ -9,30 +9,38 @@ scope 2 (inlined NonNull::::dangling) { let mut _3: std::num::NonZero; scope 3 { - scope 5 (inlined std::ptr::Alignment::as_nonzero) { - } - scope 6 (inlined NonNull::::without_provenance) { - scope 7 { + scope 4 { + scope 6 { } - scope 8 (inlined NonZero::::get) { + } + scope 5 { + scope 9 (inlined std::ptr::Alignment::as_nonzero) { } - scope 9 (inlined std::ptr::without_provenance::) { - scope 10 (inlined without_provenance_mut::) { + scope 10 (inlined NonNull::::without_provenance) { + scope 11 { + } + scope 12 (inlined NonZero::::get) { + } + scope 13 (inlined std::ptr::without_provenance::) { + scope 14 (inlined without_provenance_mut::) { + } } } } + scope 8 (inlined std::ptr::Alignment::of::) { + } } - scope 4 (inlined std::ptr::Alignment::of::) { + scope 7 (inlined core::contracts::build_check_ensures::, {closure@NonNull::dangling::{closure#0}}>) { } } - scope 11 (inlined NonNull::::as_ptr) { + scope 15 (inlined NonNull::::as_ptr) { } } - scope 12 (inlined Foo::::cmp_ptr) { + scope 16 (inlined Foo::::cmp_ptr) { let mut _4: *const u8; let mut _5: *mut u8; let mut _6: *const u8; - scope 13 (inlined std::ptr::eq::) { + scope 17 (inlined std::ptr::eq::) { } } diff --git a/tests/mir-opt/pre-codegen/drop_boxed_slice.generic_in_place.PreCodegen.after.32bit.panic-abort.mir b/tests/mir-opt/pre-codegen/drop_boxed_slice.generic_in_place.PreCodegen.after.32bit.panic-abort.mir index 791d6b71a6f78..7eeb387875542 100644 --- a/tests/mir-opt/pre-codegen/drop_boxed_slice.generic_in_place.PreCodegen.after.32bit.panic-abort.mir +++ b/tests/mir-opt/pre-codegen/drop_boxed_slice.generic_in_place.PreCodegen.after.32bit.panic-abort.mir @@ -12,50 +12,74 @@ fn generic_in_place(_1: *mut Box<[T]>) -> () { scope 3 { let _8: std::ptr::alignment::AlignmentEnum; scope 4 { - scope 12 (inlined Layout::size) { + scope 11 (inlined Layout::size) { } - scope 13 (inlined Unique::<[T]>::cast::) { - scope 14 (inlined NonNull::<[T]>::cast::) { - scope 15 (inlined NonNull::<[T]>::as_ptr) { + scope 12 (inlined Unique::<[T]>::cast::) { + scope 13 (inlined NonNull::<[T]>::cast::) { + scope 14 (inlined NonNull::<[T]>::as_ptr) { } } } - scope 16 (inlined as From>>::from) { - scope 17 (inlined Unique::::as_non_null_ptr) { + scope 15 (inlined as From>>::from) { + scope 16 (inlined Unique::::as_non_null_ptr) { } } - scope 18 (inlined ::deallocate) { + scope 30 (inlined ::deallocate) { let mut _9: *mut u8; - scope 19 (inlined Layout::size) { + scope 31 (inlined Layout::size) { } - scope 20 (inlined NonNull::::as_ptr) { + scope 32 (inlined NonNull::::as_ptr) { } - scope 21 (inlined std::alloc::dealloc) { + scope 33 (inlined std::alloc::dealloc) { let mut _10: usize; - scope 22 (inlined Layout::size) { + scope 34 (inlined Layout::size) { } - scope 23 (inlined Layout::align) { - scope 24 (inlined std::ptr::Alignment::as_usize) { + scope 35 (inlined Layout::align) { + scope 36 (inlined std::ptr::Alignment::as_usize) { } } } } } scope 5 (inlined Unique::<[T]>::as_ptr) { - scope 6 (inlined NonNull::<[T]>::as_ptr) { + scope 6 { + scope 7 { + scope 8 { + } + } + scope 10 (inlined NonNull::<[T]>::as_ptr) { + } + } + scope 9 (inlined core::contracts::build_check_ensures::<*mut [T], {closure@Unique<[T]>::as_ptr::{closure#0}}>) { } } - scope 7 (inlined Layout::for_value_raw::<[T]>) { + scope 17 (inlined Layout::for_value_raw::<[T]>) { let mut _5: usize; let mut _6: usize; - scope 8 { - scope 11 (inlined #[track_caller] Layout::from_size_align_unchecked) { - let mut _7: std::ptr::Alignment; + scope 18 { + scope 19 { + scope 21 { + } + } + scope 20 { + scope 25 (inlined #[track_caller] Layout::from_size_align_unchecked) { + let mut _7: std::ptr::Alignment; + scope 26 { + scope 27 { + scope 28 { + } + } + } + scope 29 (inlined core::contracts::build_check_ensures::) { + } + } + } + scope 23 (inlined size_of_val_raw::<[T]>) { + } + scope 24 (inlined align_of_val_raw::<[T]>) { } } - scope 9 (inlined size_of_val_raw::<[T]>) { - } - scope 10 (inlined align_of_val_raw::<[T]>) { + scope 22 (inlined core::contracts::build_check_ensures::::{closure#0}}>) { } } } diff --git a/tests/mir-opt/pre-codegen/drop_boxed_slice.generic_in_place.PreCodegen.after.32bit.panic-unwind.mir b/tests/mir-opt/pre-codegen/drop_boxed_slice.generic_in_place.PreCodegen.after.32bit.panic-unwind.mir index 791d6b71a6f78..7eeb387875542 100644 --- a/tests/mir-opt/pre-codegen/drop_boxed_slice.generic_in_place.PreCodegen.after.32bit.panic-unwind.mir +++ b/tests/mir-opt/pre-codegen/drop_boxed_slice.generic_in_place.PreCodegen.after.32bit.panic-unwind.mir @@ -12,50 +12,74 @@ fn generic_in_place(_1: *mut Box<[T]>) -> () { scope 3 { let _8: std::ptr::alignment::AlignmentEnum; scope 4 { - scope 12 (inlined Layout::size) { + scope 11 (inlined Layout::size) { } - scope 13 (inlined Unique::<[T]>::cast::) { - scope 14 (inlined NonNull::<[T]>::cast::) { - scope 15 (inlined NonNull::<[T]>::as_ptr) { + scope 12 (inlined Unique::<[T]>::cast::) { + scope 13 (inlined NonNull::<[T]>::cast::) { + scope 14 (inlined NonNull::<[T]>::as_ptr) { } } } - scope 16 (inlined as From>>::from) { - scope 17 (inlined Unique::::as_non_null_ptr) { + scope 15 (inlined as From>>::from) { + scope 16 (inlined Unique::::as_non_null_ptr) { } } - scope 18 (inlined ::deallocate) { + scope 30 (inlined ::deallocate) { let mut _9: *mut u8; - scope 19 (inlined Layout::size) { + scope 31 (inlined Layout::size) { } - scope 20 (inlined NonNull::::as_ptr) { + scope 32 (inlined NonNull::::as_ptr) { } - scope 21 (inlined std::alloc::dealloc) { + scope 33 (inlined std::alloc::dealloc) { let mut _10: usize; - scope 22 (inlined Layout::size) { + scope 34 (inlined Layout::size) { } - scope 23 (inlined Layout::align) { - scope 24 (inlined std::ptr::Alignment::as_usize) { + scope 35 (inlined Layout::align) { + scope 36 (inlined std::ptr::Alignment::as_usize) { } } } } } scope 5 (inlined Unique::<[T]>::as_ptr) { - scope 6 (inlined NonNull::<[T]>::as_ptr) { + scope 6 { + scope 7 { + scope 8 { + } + } + scope 10 (inlined NonNull::<[T]>::as_ptr) { + } + } + scope 9 (inlined core::contracts::build_check_ensures::<*mut [T], {closure@Unique<[T]>::as_ptr::{closure#0}}>) { } } - scope 7 (inlined Layout::for_value_raw::<[T]>) { + scope 17 (inlined Layout::for_value_raw::<[T]>) { let mut _5: usize; let mut _6: usize; - scope 8 { - scope 11 (inlined #[track_caller] Layout::from_size_align_unchecked) { - let mut _7: std::ptr::Alignment; + scope 18 { + scope 19 { + scope 21 { + } + } + scope 20 { + scope 25 (inlined #[track_caller] Layout::from_size_align_unchecked) { + let mut _7: std::ptr::Alignment; + scope 26 { + scope 27 { + scope 28 { + } + } + } + scope 29 (inlined core::contracts::build_check_ensures::) { + } + } + } + scope 23 (inlined size_of_val_raw::<[T]>) { + } + scope 24 (inlined align_of_val_raw::<[T]>) { } } - scope 9 (inlined size_of_val_raw::<[T]>) { - } - scope 10 (inlined align_of_val_raw::<[T]>) { + scope 22 (inlined core::contracts::build_check_ensures::::{closure#0}}>) { } } } diff --git a/tests/mir-opt/pre-codegen/drop_boxed_slice.generic_in_place.PreCodegen.after.64bit.panic-abort.mir b/tests/mir-opt/pre-codegen/drop_boxed_slice.generic_in_place.PreCodegen.after.64bit.panic-abort.mir index 791d6b71a6f78..7eeb387875542 100644 --- a/tests/mir-opt/pre-codegen/drop_boxed_slice.generic_in_place.PreCodegen.after.64bit.panic-abort.mir +++ b/tests/mir-opt/pre-codegen/drop_boxed_slice.generic_in_place.PreCodegen.after.64bit.panic-abort.mir @@ -12,50 +12,74 @@ fn generic_in_place(_1: *mut Box<[T]>) -> () { scope 3 { let _8: std::ptr::alignment::AlignmentEnum; scope 4 { - scope 12 (inlined Layout::size) { + scope 11 (inlined Layout::size) { } - scope 13 (inlined Unique::<[T]>::cast::) { - scope 14 (inlined NonNull::<[T]>::cast::) { - scope 15 (inlined NonNull::<[T]>::as_ptr) { + scope 12 (inlined Unique::<[T]>::cast::) { + scope 13 (inlined NonNull::<[T]>::cast::) { + scope 14 (inlined NonNull::<[T]>::as_ptr) { } } } - scope 16 (inlined as From>>::from) { - scope 17 (inlined Unique::::as_non_null_ptr) { + scope 15 (inlined as From>>::from) { + scope 16 (inlined Unique::::as_non_null_ptr) { } } - scope 18 (inlined ::deallocate) { + scope 30 (inlined ::deallocate) { let mut _9: *mut u8; - scope 19 (inlined Layout::size) { + scope 31 (inlined Layout::size) { } - scope 20 (inlined NonNull::::as_ptr) { + scope 32 (inlined NonNull::::as_ptr) { } - scope 21 (inlined std::alloc::dealloc) { + scope 33 (inlined std::alloc::dealloc) { let mut _10: usize; - scope 22 (inlined Layout::size) { + scope 34 (inlined Layout::size) { } - scope 23 (inlined Layout::align) { - scope 24 (inlined std::ptr::Alignment::as_usize) { + scope 35 (inlined Layout::align) { + scope 36 (inlined std::ptr::Alignment::as_usize) { } } } } } scope 5 (inlined Unique::<[T]>::as_ptr) { - scope 6 (inlined NonNull::<[T]>::as_ptr) { + scope 6 { + scope 7 { + scope 8 { + } + } + scope 10 (inlined NonNull::<[T]>::as_ptr) { + } + } + scope 9 (inlined core::contracts::build_check_ensures::<*mut [T], {closure@Unique<[T]>::as_ptr::{closure#0}}>) { } } - scope 7 (inlined Layout::for_value_raw::<[T]>) { + scope 17 (inlined Layout::for_value_raw::<[T]>) { let mut _5: usize; let mut _6: usize; - scope 8 { - scope 11 (inlined #[track_caller] Layout::from_size_align_unchecked) { - let mut _7: std::ptr::Alignment; + scope 18 { + scope 19 { + scope 21 { + } + } + scope 20 { + scope 25 (inlined #[track_caller] Layout::from_size_align_unchecked) { + let mut _7: std::ptr::Alignment; + scope 26 { + scope 27 { + scope 28 { + } + } + } + scope 29 (inlined core::contracts::build_check_ensures::) { + } + } + } + scope 23 (inlined size_of_val_raw::<[T]>) { + } + scope 24 (inlined align_of_val_raw::<[T]>) { } } - scope 9 (inlined size_of_val_raw::<[T]>) { - } - scope 10 (inlined align_of_val_raw::<[T]>) { + scope 22 (inlined core::contracts::build_check_ensures::::{closure#0}}>) { } } } diff --git a/tests/mir-opt/pre-codegen/drop_boxed_slice.generic_in_place.PreCodegen.after.64bit.panic-unwind.mir b/tests/mir-opt/pre-codegen/drop_boxed_slice.generic_in_place.PreCodegen.after.64bit.panic-unwind.mir index 791d6b71a6f78..7eeb387875542 100644 --- a/tests/mir-opt/pre-codegen/drop_boxed_slice.generic_in_place.PreCodegen.after.64bit.panic-unwind.mir +++ b/tests/mir-opt/pre-codegen/drop_boxed_slice.generic_in_place.PreCodegen.after.64bit.panic-unwind.mir @@ -12,50 +12,74 @@ fn generic_in_place(_1: *mut Box<[T]>) -> () { scope 3 { let _8: std::ptr::alignment::AlignmentEnum; scope 4 { - scope 12 (inlined Layout::size) { + scope 11 (inlined Layout::size) { } - scope 13 (inlined Unique::<[T]>::cast::) { - scope 14 (inlined NonNull::<[T]>::cast::) { - scope 15 (inlined NonNull::<[T]>::as_ptr) { + scope 12 (inlined Unique::<[T]>::cast::) { + scope 13 (inlined NonNull::<[T]>::cast::) { + scope 14 (inlined NonNull::<[T]>::as_ptr) { } } } - scope 16 (inlined as From>>::from) { - scope 17 (inlined Unique::::as_non_null_ptr) { + scope 15 (inlined as From>>::from) { + scope 16 (inlined Unique::::as_non_null_ptr) { } } - scope 18 (inlined ::deallocate) { + scope 30 (inlined ::deallocate) { let mut _9: *mut u8; - scope 19 (inlined Layout::size) { + scope 31 (inlined Layout::size) { } - scope 20 (inlined NonNull::::as_ptr) { + scope 32 (inlined NonNull::::as_ptr) { } - scope 21 (inlined std::alloc::dealloc) { + scope 33 (inlined std::alloc::dealloc) { let mut _10: usize; - scope 22 (inlined Layout::size) { + scope 34 (inlined Layout::size) { } - scope 23 (inlined Layout::align) { - scope 24 (inlined std::ptr::Alignment::as_usize) { + scope 35 (inlined Layout::align) { + scope 36 (inlined std::ptr::Alignment::as_usize) { } } } } } scope 5 (inlined Unique::<[T]>::as_ptr) { - scope 6 (inlined NonNull::<[T]>::as_ptr) { + scope 6 { + scope 7 { + scope 8 { + } + } + scope 10 (inlined NonNull::<[T]>::as_ptr) { + } + } + scope 9 (inlined core::contracts::build_check_ensures::<*mut [T], {closure@Unique<[T]>::as_ptr::{closure#0}}>) { } } - scope 7 (inlined Layout::for_value_raw::<[T]>) { + scope 17 (inlined Layout::for_value_raw::<[T]>) { let mut _5: usize; let mut _6: usize; - scope 8 { - scope 11 (inlined #[track_caller] Layout::from_size_align_unchecked) { - let mut _7: std::ptr::Alignment; + scope 18 { + scope 19 { + scope 21 { + } + } + scope 20 { + scope 25 (inlined #[track_caller] Layout::from_size_align_unchecked) { + let mut _7: std::ptr::Alignment; + scope 26 { + scope 27 { + scope 28 { + } + } + } + scope 29 (inlined core::contracts::build_check_ensures::) { + } + } + } + scope 23 (inlined size_of_val_raw::<[T]>) { + } + scope 24 (inlined align_of_val_raw::<[T]>) { } } - scope 9 (inlined size_of_val_raw::<[T]>) { - } - scope 10 (inlined align_of_val_raw::<[T]>) { + scope 22 (inlined core::contracts::build_check_ensures::::{closure#0}}>) { } } } diff --git a/tests/mir-opt/pre-codegen/loops.vec_move.PreCodegen.after.mir b/tests/mir-opt/pre-codegen/loops.vec_move.PreCodegen.after.mir index 4260ec3eaedf1..b27120851f587 100644 --- a/tests/mir-opt/pre-codegen/loops.vec_move.PreCodegen.after.mir +++ b/tests/mir-opt/pre-codegen/loops.vec_move.PreCodegen.after.mir @@ -64,6 +64,14 @@ fn vec_move(_1: Vec) -> () { let mut _20: core::num::niche_types::UsizeNoHighBit; scope 43 (inlined core::num::niche_types::UsizeNoHighBit::as_inner) { debug self => _20; + scope 44 { + scope 45 { + scope 46 { + } + } + } + scope 47 (inlined core::contracts::build_check_ensures::) { + } } } } diff --git a/tests/mir-opt/pre-codegen/slice_iter.reverse_loop.PreCodegen.after.panic-abort.mir b/tests/mir-opt/pre-codegen/slice_iter.reverse_loop.PreCodegen.after.panic-abort.mir index 3009be3f9dc67..eaa07115fee6e 100644 --- a/tests/mir-opt/pre-codegen/slice_iter.reverse_loop.PreCodegen.after.panic-abort.mir +++ b/tests/mir-opt/pre-codegen/slice_iter.reverse_loop.PreCodegen.after.panic-abort.mir @@ -60,15 +60,23 @@ fn reverse_loop(_1: &[T], _2: impl Fn(&T)) -> () { scope 32 { scope 35 (inlined NonNull::::sub) { scope 36 (inlined #[track_caller] core::num::::unchecked_neg) { - scope 37 (inlined core::ub_checks::check_language_ub) { - scope 38 (inlined core::ub_checks::check_language_ub::runtime) { + scope 37 { + scope 38 { + scope 39 { + } } + scope 41 (inlined core::ub_checks::check_language_ub) { + scope 42 (inlined core::ub_checks::check_language_ub::runtime) { + } + } + } + scope 40 (inlined core::contracts::build_check_ensures::::unchecked_neg::{closure#1}}>) { } } - scope 39 (inlined NonNull::::offset) { + scope 43 (inlined NonNull::::offset) { let mut _24: *const T; let mut _25: *const T; - scope 40 (inlined NonNull::::as_ptr) { + scope 44 (inlined NonNull::::as_ptr) { } } } @@ -79,11 +87,11 @@ fn reverse_loop(_1: &[T], _2: impl Fn(&T)) -> () { } } } - scope 41 (inlined NonNull::::as_ref::<'_>) { + scope 45 (inlined NonNull::::as_ref::<'_>) { let _31: *const T; - scope 42 (inlined NonNull::::as_ptr) { + scope 46 (inlined NonNull::::as_ptr) { } - scope 43 (inlined std::ptr::mut_ptr::::cast_const) { + scope 47 (inlined std::ptr::mut_ptr::::cast_const) { } } } diff --git a/tests/mir-opt/pre-codegen/slice_iter.reverse_loop.PreCodegen.after.panic-unwind.mir b/tests/mir-opt/pre-codegen/slice_iter.reverse_loop.PreCodegen.after.panic-unwind.mir index e40bff5ea3504..32d5152a3cc36 100644 --- a/tests/mir-opt/pre-codegen/slice_iter.reverse_loop.PreCodegen.after.panic-unwind.mir +++ b/tests/mir-opt/pre-codegen/slice_iter.reverse_loop.PreCodegen.after.panic-unwind.mir @@ -60,15 +60,23 @@ fn reverse_loop(_1: &[T], _2: impl Fn(&T)) -> () { scope 32 { scope 35 (inlined NonNull::::sub) { scope 36 (inlined #[track_caller] core::num::::unchecked_neg) { - scope 37 (inlined core::ub_checks::check_language_ub) { - scope 38 (inlined core::ub_checks::check_language_ub::runtime) { + scope 37 { + scope 38 { + scope 39 { + } } + scope 41 (inlined core::ub_checks::check_language_ub) { + scope 42 (inlined core::ub_checks::check_language_ub::runtime) { + } + } + } + scope 40 (inlined core::contracts::build_check_ensures::::unchecked_neg::{closure#1}}>) { } } - scope 39 (inlined NonNull::::offset) { + scope 43 (inlined NonNull::::offset) { let mut _24: *const T; let mut _25: *const T; - scope 40 (inlined NonNull::::as_ptr) { + scope 44 (inlined NonNull::::as_ptr) { } } } @@ -79,11 +87,11 @@ fn reverse_loop(_1: &[T], _2: impl Fn(&T)) -> () { } } } - scope 41 (inlined NonNull::::as_ref::<'_>) { + scope 45 (inlined NonNull::::as_ref::<'_>) { let _31: *const T; - scope 42 (inlined NonNull::::as_ptr) { + scope 46 (inlined NonNull::::as_ptr) { } - scope 43 (inlined std::ptr::mut_ptr::::cast_const) { + scope 47 (inlined std::ptr::mut_ptr::::cast_const) { } } } diff --git a/tests/mir-opt/pre-codegen/slice_iter.slice_iter_mut_next_back.PreCodegen.after.panic-abort.mir b/tests/mir-opt/pre-codegen/slice_iter.slice_iter_mut_next_back.PreCodegen.after.panic-abort.mir index 62b738c36bf4b..453912a8643c7 100644 --- a/tests/mir-opt/pre-codegen/slice_iter.slice_iter_mut_next_back.PreCodegen.after.panic-abort.mir +++ b/tests/mir-opt/pre-codegen/slice_iter.slice_iter_mut_next_back.PreCodegen.after.panic-abort.mir @@ -45,15 +45,23 @@ fn slice_iter_mut_next_back(_1: &mut std::slice::IterMut<'_, T>) -> Option<&mut scope 14 { scope 17 (inlined NonNull::::sub) { scope 18 (inlined #[track_caller] core::num::::unchecked_neg) { - scope 19 (inlined core::ub_checks::check_language_ub) { - scope 20 (inlined core::ub_checks::check_language_ub::runtime) { + scope 19 { + scope 20 { + scope 21 { + } } + scope 23 (inlined core::ub_checks::check_language_ub) { + scope 24 (inlined core::ub_checks::check_language_ub::runtime) { + } + } + } + scope 22 (inlined core::contracts::build_check_ensures::::unchecked_neg::{closure#1}}>) { } } - scope 21 (inlined NonNull::::offset) { + scope 25 (inlined NonNull::::offset) { let mut _13: *const T; let mut _14: *const T; - scope 22 (inlined NonNull::::as_ptr) { + scope 26 (inlined NonNull::::as_ptr) { } } } @@ -64,9 +72,9 @@ fn slice_iter_mut_next_back(_1: &mut std::slice::IterMut<'_, T>) -> Option<&mut } } } - scope 23 (inlined NonNull::::as_mut::<'_>) { + scope 27 (inlined NonNull::::as_mut::<'_>) { let mut _20: *mut T; - scope 24 (inlined NonNull::::as_ptr) { + scope 28 (inlined NonNull::::as_ptr) { } } } diff --git a/tests/mir-opt/pre-codegen/slice_iter.slice_iter_mut_next_back.PreCodegen.after.panic-unwind.mir b/tests/mir-opt/pre-codegen/slice_iter.slice_iter_mut_next_back.PreCodegen.after.panic-unwind.mir index 62b738c36bf4b..453912a8643c7 100644 --- a/tests/mir-opt/pre-codegen/slice_iter.slice_iter_mut_next_back.PreCodegen.after.panic-unwind.mir +++ b/tests/mir-opt/pre-codegen/slice_iter.slice_iter_mut_next_back.PreCodegen.after.panic-unwind.mir @@ -45,15 +45,23 @@ fn slice_iter_mut_next_back(_1: &mut std::slice::IterMut<'_, T>) -> Option<&mut scope 14 { scope 17 (inlined NonNull::::sub) { scope 18 (inlined #[track_caller] core::num::::unchecked_neg) { - scope 19 (inlined core::ub_checks::check_language_ub) { - scope 20 (inlined core::ub_checks::check_language_ub::runtime) { + scope 19 { + scope 20 { + scope 21 { + } } + scope 23 (inlined core::ub_checks::check_language_ub) { + scope 24 (inlined core::ub_checks::check_language_ub::runtime) { + } + } + } + scope 22 (inlined core::contracts::build_check_ensures::::unchecked_neg::{closure#1}}>) { } } - scope 21 (inlined NonNull::::offset) { + scope 25 (inlined NonNull::::offset) { let mut _13: *const T; let mut _14: *const T; - scope 22 (inlined NonNull::::as_ptr) { + scope 26 (inlined NonNull::::as_ptr) { } } } @@ -64,9 +72,9 @@ fn slice_iter_mut_next_back(_1: &mut std::slice::IterMut<'_, T>) -> Option<&mut } } } - scope 23 (inlined NonNull::::as_mut::<'_>) { + scope 27 (inlined NonNull::::as_mut::<'_>) { let mut _20: *mut T; - scope 24 (inlined NonNull::::as_ptr) { + scope 28 (inlined NonNull::::as_ptr) { } } }