diff --git a/compiler/rustc_arena/src/lib.rs b/compiler/rustc_arena/src/lib.rs index 5cb79d9eea53b..bdbc59821de2f 100644 --- a/compiler/rustc_arena/src/lib.rs +++ b/compiler/rustc_arena/src/lib.rs @@ -95,7 +95,7 @@ impl ArenaChunk { unsafe { if mem::size_of::() == 0 { // A pointer as large as possible for zero-sized elements. - ptr::invalid_mut(!0) + ptr::without_provenance_mut(!0) } else { self.start().add(self.storage.len()) } diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs index 2cc38d90ffe4d..c3d0019be3975 100644 --- a/library/alloc/src/rc.rs +++ b/library/alloc/src/rc.rs @@ -2804,7 +2804,9 @@ impl Weak { #[must_use] pub const fn new() -> Weak { Weak { - ptr: unsafe { NonNull::new_unchecked(ptr::invalid_mut::>(usize::MAX)) }, + ptr: unsafe { + NonNull::new_unchecked(ptr::without_provenance_mut::>(usize::MAX)) + }, alloc: Global, } } @@ -2829,7 +2831,9 @@ impl Weak { #[unstable(feature = "allocator_api", issue = "32838")] pub fn new_in(alloc: A) -> Weak { Weak { - ptr: unsafe { NonNull::new_unchecked(ptr::invalid_mut::>(usize::MAX)) }, + ptr: unsafe { + NonNull::new_unchecked(ptr::without_provenance_mut::>(usize::MAX)) + }, alloc, } } diff --git a/library/alloc/src/sync.rs b/library/alloc/src/sync.rs index e1211da4c6176..524aa35e04529 100644 --- a/library/alloc/src/sync.rs +++ b/library/alloc/src/sync.rs @@ -2555,7 +2555,9 @@ impl Weak { #[must_use] pub const fn new() -> Weak { Weak { - ptr: unsafe { NonNull::new_unchecked(ptr::invalid_mut::>(usize::MAX)) }, + ptr: unsafe { + NonNull::new_unchecked(ptr::without_provenance_mut::>(usize::MAX)) + }, alloc: Global, } } @@ -2583,7 +2585,9 @@ impl Weak { #[unstable(feature = "allocator_api", issue = "32838")] pub fn new_in(alloc: A) -> Weak { Weak { - ptr: unsafe { NonNull::new_unchecked(ptr::invalid_mut::>(usize::MAX)) }, + ptr: unsafe { + NonNull::new_unchecked(ptr::without_provenance_mut::>(usize::MAX)) + }, alloc, } } diff --git a/library/alloc/tests/fmt.rs b/library/alloc/tests/fmt.rs index 04da95bbb83ed..379e09ab69a3c 100644 --- a/library/alloc/tests/fmt.rs +++ b/library/alloc/tests/fmt.rs @@ -77,14 +77,14 @@ fn test_format_macro_interface() { t!(format!("{}", "foo"), "foo"); t!(format!("{}", "foo".to_string()), "foo"); if cfg!(target_pointer_width = "32") { - t!(format!("{:#p}", ptr::invalid::(0x1234)), "0x00001234"); - t!(format!("{:#p}", ptr::invalid_mut::(0x1234)), "0x00001234"); + t!(format!("{:#p}", ptr::without_provenance::(0x1234)), "0x00001234"); + t!(format!("{:#p}", ptr::without_provenance_mut::(0x1234)), "0x00001234"); } else { - t!(format!("{:#p}", ptr::invalid::(0x1234)), "0x0000000000001234"); - t!(format!("{:#p}", ptr::invalid_mut::(0x1234)), "0x0000000000001234"); + t!(format!("{:#p}", ptr::without_provenance::(0x1234)), "0x0000000000001234"); + t!(format!("{:#p}", ptr::without_provenance_mut::(0x1234)), "0x0000000000001234"); } - t!(format!("{:p}", ptr::invalid::(0x1234)), "0x1234"); - t!(format!("{:p}", ptr::invalid_mut::(0x1234)), "0x1234"); + t!(format!("{:p}", ptr::without_provenance::(0x1234)), "0x1234"); + t!(format!("{:p}", ptr::without_provenance_mut::(0x1234)), "0x1234"); t!(format!("{A:x}"), "aloha"); t!(format!("{B:X}"), "adios"); t!(format!("foo {} ☃☃☃☃☃☃", "bar"), "foo bar ☃☃☃☃☃☃"); @@ -208,7 +208,7 @@ fn test_format_macro_interface() { { let val = usize::MAX; let exp = format!("{val:#x}"); - t!(format!("{:p}", std::ptr::invalid::(val)), exp); + t!(format!("{:p}", std::ptr::without_provenance::(val)), exp); } // Escaping diff --git a/library/alloc/tests/vec.rs b/library/alloc/tests/vec.rs index 04bb20e96b792..15ee4d6520523 100644 --- a/library/alloc/tests/vec.rs +++ b/library/alloc/tests/vec.rs @@ -2575,7 +2575,7 @@ fn test_box_zero_allocator() { assert!(state.0.insert(addr)); state.1 += 1; std::println!("allocating {addr}"); - std::ptr::invalid_mut(addr) + std::ptr::without_provenance_mut(addr) } else { unsafe { std::alloc::alloc(layout) } }; diff --git a/library/core/src/alloc/layout.rs b/library/core/src/alloc/layout.rs index 9ef0a7d760840..2a02870e30be9 100644 --- a/library/core/src/alloc/layout.rs +++ b/library/core/src/alloc/layout.rs @@ -215,7 +215,7 @@ impl Layout { #[inline] pub const fn dangling(&self) -> NonNull { // SAFETY: align is guaranteed to be non-zero - unsafe { NonNull::new_unchecked(crate::ptr::invalid_mut::(self.align())) } + unsafe { NonNull::new_unchecked(crate::ptr::without_provenance_mut::(self.align())) } } /// Creates a layout describing the record that can hold a value diff --git a/library/core/src/intrinsics.rs b/library/core/src/intrinsics.rs index 4a1187561b398..bec0948c5ed3f 100644 --- a/library/core/src/intrinsics.rs +++ b/library/core/src/intrinsics.rs @@ -1155,7 +1155,7 @@ extern "rust-intrinsic" { /// /// Transmuting pointers *to* integers in a `const` context is [undefined behavior][ub], /// unless the pointer was originally created *from* an integer. - /// (That includes this function specifically, integer-to-pointer casts, and helpers like [`invalid`][crate::ptr::invalid], + /// (That includes this function specifically, integer-to-pointer casts, and helpers like [`invalid`][crate::ptr::dangling], /// but also semantically-equivalent conversions such as punning through `repr(C)` union fields.) /// Any attempt to use the resulting value for integer operations will abort const-evaluation. /// (And even outside `const`, such transmutation is touching on many unspecified aspects of the diff --git a/library/core/src/ptr/const_ptr.rs b/library/core/src/ptr/const_ptr.rs index c5e3df07a1cf7..85a56d37ab75c 100644 --- a/library/core/src/ptr/const_ptr.rs +++ b/library/core/src/ptr/const_ptr.rs @@ -181,7 +181,7 @@ impl *const T { /// /// This is similar to `self as usize`, which semantically discards *provenance* and /// *address-space* information. However, unlike `self as usize`, casting the returned address - /// back to a pointer yields [`invalid`][], which is undefined behavior to dereference. To + /// back to a pointer yields a [pointer without provenance][without_provenance], which is undefined behavior to dereference. To /// properly restore the lost information and obtain a dereferenceable pointer, use /// [`with_addr`][pointer::with_addr] or [`map_addr`][pointer::map_addr]. /// diff --git a/library/core/src/ptr/mod.rs b/library/core/src/ptr/mod.rs index 2bd14f357d80c..90b3341f0ad4d 100644 --- a/library/core/src/ptr/mod.rs +++ b/library/core/src/ptr/mod.rs @@ -4,13 +4,13 @@ //! //! # Safety //! -//! Many functions in this module take raw pointers as arguments and read from -//! or write to them. For this to be safe, these pointers must be *valid*. -//! Whether a pointer is valid depends on the operation it is used for -//! (read or write), and the extent of the memory that is accessed (i.e., -//! how many bytes are read/written). Most functions use `*mut T` and `*const T` -//! to access only a single value, in which case the documentation omits the size -//! and implicitly assumes it to be `size_of::()` bytes. +//! Many functions in this module take raw pointers as arguments and read from or write to them. For +//! this to be safe, these pointers must be *valid* for the given access. Whether a pointer is valid +//! depends on the operation it is used for (read or write), and the extent of the memory that is +//! accessed (i.e., how many bytes are read/written) -- it makes no sense to ask "is this pointer +//! valid"; one has to ask "is this pointer valid for a given access". Most functions use `*mut T` +//! and `*const T` to access only a single value, in which case the documentation omits the size and +//! implicitly assumes it to be `size_of::()` bytes. //! //! The precise rules for validity are not determined yet. The guarantees that are //! provided at this point are very minimal: @@ -26,7 +26,7 @@ //! some memory happens to exist at that address and gets deallocated. This corresponds to writing //! your own allocator: allocating zero-sized objects is not very hard. The canonical way to //! obtain a pointer that is valid for zero-sized accesses is [`NonNull::dangling`]. -//FIXME: mention `ptr::invalid` above, once it is stable. +//FIXME: mention `ptr::dangling` above, once it is stable. //! * All accesses performed by functions in this module are *non-atomic* in the sense //! of [atomic operations] used to synchronize between threads. This means it is //! undefined behavior to perform two concurrent accesses to the same location from different @@ -44,6 +44,10 @@ //! information, see the [book] as well as the section in the reference devoted //! to [undefined behavior][ub]. //! +//! We say that a pointer is "dangling" if it is not valid for any non-zero-sized accesses. This +//! means out-of-bounds pointers, pointers to freed memory, null pointers, and pointers created with +//! [`NonNull::dangling`] are all dangling. +//! //! ## Alignment //! //! Valid raw pointers as defined above are not necessarily properly aligned (where @@ -167,6 +171,7 @@ //! * The **address-space** it is part of (e.g. "data" vs "code" in WASM). //! * The **address** it points to, which can be represented by a `usize`. //! * The **provenance** it has, defining the memory it has permission to access. +//! Provenance can be absent, in which case the pointer does not have permission to access any memory. //! //! Under Strict Provenance, a usize *cannot* accurately represent a pointer, and converting from //! a pointer to a usize is generally an operation which *only* extracts the address. It is @@ -270,11 +275,12 @@ //! //! But it *is* still sound to: //! -//! * Create an invalid pointer from just an address (see [`ptr::invalid`][]). This can -//! be used for sentinel values like `null` *or* to represent a tagged pointer that will -//! never be dereferenceable. In general, it is always sound for an integer to pretend -//! to be a pointer "for fun" as long as you don't use operations on it which require -//! it to be valid (offset, read, write, etc). +//! * Create a pointer without provenance from just an address (see [`ptr::dangling`][]). Such a +//! pointer cannot be used for memory accesses (except for zero-sized accesses). This can still be +//! useful for sentinel values like `null` *or* to represent a tagged pointer that will never be +//! dereferenceable. In general, it is always sound for an integer to pretend to be a pointer "for +//! fun" as long as you don't use operations on it which require it to be valid (non-zero-sized +//! offset, read, write, etc). //! //! * Forge an allocation of size zero at any sufficiently aligned non-null address. //! i.e. the usual "ZSTs are fake, do what you want" rules apply *but* this only applies @@ -283,7 +289,7 @@ //! that allocation and it will still get invalidated if the allocation gets deallocated. //! In the future we may introduce an API to make such a forged allocation explicit. //! -//! * [`wrapping_offset`][] a pointer outside its provenance. This includes invalid pointers +//! * [`wrapping_offset`][] a pointer outside its provenance. This includes pointers //! which have "no" provenance. Unfortunately there may be practical limits on this for a //! particular platform, and it's an open question as to how to specify this (if at all). //! Notably, [CHERI][] relies on a compression scheme that can't handle a @@ -294,7 +300,7 @@ //! generous (think kilobytes, not bytes). //! //! * Compare arbitrary pointers by address. Addresses *are* just integers and so there is -//! always a coherent answer, even if the pointers are invalid or from different +//! always a coherent answer, even if the pointers are dangling or from different //! address-spaces/provenances. Of course, comparing addresses from different address-spaces //! is generally going to be *meaningless*, but so is comparing Kilograms to Meters, and Rust //! doesn't prevent that either. Similarly, if you get "lucky" and notice that a pointer @@ -367,7 +373,7 @@ //! [`with_addr`]: pointer::with_addr //! [`map_addr`]: pointer::map_addr //! [`addr`]: pointer::addr -//! [`ptr::invalid`]: core::ptr::invalid +//! [`ptr::dangling`]: core::ptr::dangling //! [`expose_addr`]: pointer::expose_addr //! [`from_exposed_addr`]: from_exposed_addr //! [Miri]: https://github.com/rust-lang/miri @@ -537,7 +543,7 @@ pub unsafe fn drop_in_place(to_drop: *mut T) { #[rustc_allow_const_fn_unstable(ptr_metadata)] #[rustc_diagnostic_item = "ptr_null"] pub const fn null() -> *const T { - from_raw_parts(invalid(0), ()) + from_raw_parts(without_provenance(0), ()) } /// Creates a null mutable raw pointer. @@ -563,32 +569,26 @@ pub const fn null() -> *const T { #[rustc_allow_const_fn_unstable(ptr_metadata)] #[rustc_diagnostic_item = "ptr_null_mut"] pub const fn null_mut() -> *mut T { - from_raw_parts_mut(invalid_mut(0), ()) + from_raw_parts_mut(without_provenance_mut(0), ()) } -/// Creates an invalid pointer with the given address. +/// Creates a pointer with the given address and no provenance. +/// +/// Without provenance, this pointer is not associated with any actual allocation. Such a +/// no-provenance pointer may be used for zero-sized memory accesses (if suitably aligned), but +/// non-zero-sized memory accesses with a no-provenance pointer are UB. No-provenance pointers are +/// little more than a usize address in disguise. /// /// This is different from `addr as *const T`, which creates a pointer that picks up a previously /// exposed provenance. See [`from_exposed_addr`] for more details on that operation. /// -/// The module's top-level documentation discusses the precise meaning of an "invalid" -/// pointer but essentially this expresses that the pointer is not associated -/// with any actual allocation and is little more than a usize address in disguise. -/// -/// This pointer will have no provenance associated with it and is therefore -/// UB to read/write/offset. This mostly exists to facilitate things -/// like `ptr::null` and `NonNull::dangling` which make invalid pointers. -/// -/// (Standard "Zero-Sized-Types get to cheat and lie" caveats apply, although it -/// may be desirable to give them their own API just to make that 100% clear.) -/// /// This API and its claimed semantics are part of the Strict Provenance experiment, /// see the [module documentation][crate::ptr] for details. #[inline(always)] #[must_use] #[rustc_const_stable(feature = "stable_things_using_strict_provenance", since = "1.61.0")] #[unstable(feature = "strict_provenance", issue = "95228")] -pub const fn invalid(addr: usize) -> *const T { +pub const fn without_provenance(addr: usize) -> *const T { // FIXME(strict_provenance_magic): I am magic and should be a compiler intrinsic. // We use transmute rather than a cast so tools like Miri can tell that this // is *not* the same as from_exposed_addr. @@ -597,21 +597,32 @@ pub const fn invalid(addr: usize) -> *const T { unsafe { mem::transmute(addr) } } -/// Creates an invalid mutable pointer with the given address. +/// Creates a new pointer that is dangling, but well-aligned. /// -/// This is different from `addr as *mut T`, which creates a pointer that picks up a previously -/// exposed provenance. See [`from_exposed_addr_mut`] for more details on that operation. +/// This is useful for initializing types which lazily allocate, like +/// `Vec::new` does. /// -/// The module's top-level documentation discusses the precise meaning of an "invalid" -/// pointer but essentially this expresses that the pointer is not associated -/// with any actual allocation and is little more than a usize address in disguise. +/// Note that the pointer value may potentially represent a valid pointer to +/// a `T`, which means this must not be used as a "not yet initialized" +/// sentinel value. Types that lazily allocate must track initialization by +/// some other means. +#[inline(always)] +#[must_use] +#[rustc_const_stable(feature = "stable_things_using_strict_provenance", since = "1.61.0")] +#[unstable(feature = "strict_provenance", issue = "95228")] +pub const fn dangling() -> *const T { + without_provenance(mem::align_of::()) +} + +/// Creates a pointer with the given address and no provenance. /// -/// This pointer will have no provenance associated with it and is therefore -/// UB to read/write/offset. This mostly exists to facilitate things -/// like `ptr::null` and `NonNull::dangling` which make invalid pointers. +/// Without provenance, this pointer is not associated with any actual allocation. Such a +/// no-provenance pointer may be used for zero-sized memory accesses (if suitably aligned), but +/// non-zero-sized memory accesses with a no-provenance pointer are UB. No-provenance pointers are +/// little more than a usize address in disguise. /// -/// (Standard "Zero-Sized-Types get to cheat and lie" caveats apply, although it -/// may be desirable to give them their own API just to make that 100% clear.) +/// This is different from `addr as *mut T`, which creates a pointer that picks up a previously +/// exposed provenance. See [`from_exposed_addr_mut`] for more details on that operation. /// /// This API and its claimed semantics are part of the Strict Provenance experiment, /// see the [module documentation][crate::ptr] for details. @@ -619,7 +630,7 @@ pub const fn invalid(addr: usize) -> *const T { #[must_use] #[rustc_const_stable(feature = "stable_things_using_strict_provenance", since = "1.61.0")] #[unstable(feature = "strict_provenance", issue = "95228")] -pub const fn invalid_mut(addr: usize) -> *mut T { +pub const fn without_provenance_mut(addr: usize) -> *mut T { // FIXME(strict_provenance_magic): I am magic and should be a compiler intrinsic. // We use transmute rather than a cast so tools like Miri can tell that this // is *not* the same as from_exposed_addr. @@ -628,6 +639,23 @@ pub const fn invalid_mut(addr: usize) -> *mut T { unsafe { mem::transmute(addr) } } +/// Creates a new pointer that is dangling, but well-aligned. +/// +/// This is useful for initializing types which lazily allocate, like +/// `Vec::new` does. +/// +/// Note that the pointer value may potentially represent a valid pointer to +/// a `T`, which means this must not be used as a "not yet initialized" +/// sentinel value. Types that lazily allocate must track initialization by +/// some other means. +#[inline(always)] +#[must_use] +#[rustc_const_stable(feature = "stable_things_using_strict_provenance", since = "1.61.0")] +#[unstable(feature = "strict_provenance", issue = "95228")] +pub const fn dangling_mut() -> *mut T { + without_provenance_mut(mem::align_of::()) +} + /// Convert an address back to a pointer, picking up a previously 'exposed' provenance. /// /// This is a more rigorously specified alternative to `addr as *const T`. The provenance of the diff --git a/library/core/src/ptr/mut_ptr.rs b/library/core/src/ptr/mut_ptr.rs index 376673d67c10b..28ba26f5c16c4 100644 --- a/library/core/src/ptr/mut_ptr.rs +++ b/library/core/src/ptr/mut_ptr.rs @@ -188,9 +188,10 @@ impl *mut T { /// /// This is similar to `self as usize`, which semantically discards *provenance* and /// *address-space* information. However, unlike `self as usize`, casting the returned address - /// back to a pointer yields [`invalid`][], which is undefined behavior to dereference. To - /// properly restore the lost information and obtain a dereferenceable pointer, use - /// [`with_addr`][pointer::with_addr] or [`map_addr`][pointer::map_addr]. + /// back to a pointer yields yields a [pointer without provenance][without_provenance_mut], which is undefined + /// behavior to dereference. To properly restore the lost information and obtain a + /// dereferenceable pointer, use [`with_addr`][pointer::with_addr] or + /// [`map_addr`][pointer::map_addr]. /// /// If using those APIs is not possible because there is no way to preserve a pointer with the /// required provenance, then Strict Provenance might not be for you. Use pointer-integer casts diff --git a/library/core/src/ptr/non_null.rs b/library/core/src/ptr/non_null.rs index 16e903439936d..098ec23385567 100644 --- a/library/core/src/ptr/non_null.rs +++ b/library/core/src/ptr/non_null.rs @@ -4,8 +4,7 @@ use crate::hash; use crate::intrinsics; use crate::intrinsics::assert_unsafe_precondition; use crate::marker::Unsize; -use crate::mem::SizedTypeProperties; -use crate::mem::{self, MaybeUninit}; +use crate::mem::{MaybeUninit, SizedTypeProperties}; use crate::num::{NonZero, NonZeroUsize}; use crate::ops::{CoerceUnsized, DispatchFromDyn}; use crate::ptr; @@ -114,7 +113,7 @@ impl NonNull { // to a *mut T. Therefore, `ptr` is not null and the conditions for // calling new_unchecked() are respected. unsafe { - let ptr = crate::ptr::invalid_mut::(mem::align_of::()); + let ptr = crate::ptr::dangling_mut::(); NonNull::new_unchecked(ptr) } } diff --git a/library/core/src/slice/iter.rs b/library/core/src/slice/iter.rs index 617b385a960c4..d7d4f90c1a538 100644 --- a/library/core/src/slice/iter.rs +++ b/library/core/src/slice/iter.rs @@ -12,7 +12,7 @@ use crate::iter::{ use crate::marker::PhantomData; use crate::mem::{self, SizedTypeProperties}; use crate::num::NonZero; -use crate::ptr::{self, invalid, invalid_mut, NonNull}; +use crate::ptr::{self, without_provenance, without_provenance_mut, NonNull}; use super::{from_raw_parts, from_raw_parts_mut}; @@ -67,7 +67,7 @@ pub struct Iter<'a, T: 'a> { ptr: NonNull, /// For non-ZSTs, the non-null pointer to the past-the-end element. /// - /// For ZSTs, this is `ptr::invalid(len)`. + /// For ZSTs, this is `ptr::dangling(len)`. end_or_len: *const T, _marker: PhantomData<&'a T>, } @@ -91,7 +91,8 @@ impl<'a, T> Iter<'a, T> { let ptr: NonNull = NonNull::from(slice).cast(); // SAFETY: Similar to `IterMut::new`. unsafe { - let end_or_len = if T::IS_ZST { invalid(len) } else { ptr.as_ptr().add(len) }; + let end_or_len = + if T::IS_ZST { without_provenance(len) } else { ptr.as_ptr().add(len) }; Self { ptr, end_or_len, _marker: PhantomData } } @@ -189,7 +190,7 @@ pub struct IterMut<'a, T: 'a> { ptr: NonNull, /// For non-ZSTs, the non-null pointer to the past-the-end element. /// - /// For ZSTs, this is `ptr::invalid_mut(len)`. + /// For ZSTs, this is `ptr::without_provenance_mut(len)`. end_or_len: *mut T, _marker: PhantomData<&'a mut T>, } @@ -228,7 +229,8 @@ impl<'a, T> IterMut<'a, T> { // See the `next_unchecked!` and `is_empty!` macros as well as the // `post_inc_start` method for more information. unsafe { - let end_or_len = if T::IS_ZST { invalid_mut(len) } else { ptr.as_ptr().add(len) }; + let end_or_len = + if T::IS_ZST { without_provenance_mut(len) } else { ptr.as_ptr().add(len) }; Self { ptr, end_or_len, _marker: PhantomData } } diff --git a/library/core/src/sync/atomic.rs b/library/core/src/sync/atomic.rs index e9a0d9e1d287c..45193c11e1d6b 100644 --- a/library/core/src/sync/atomic.rs +++ b/library/core/src/sync/atomic.rs @@ -1842,7 +1842,7 @@ impl AtomicPtr { #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces pub fn fetch_byte_add(&self, val: usize, order: Ordering) -> *mut T { // SAFETY: data races are prevented by atomic intrinsics. - unsafe { atomic_add(self.p.get(), core::ptr::invalid_mut(val), order).cast() } + unsafe { atomic_add(self.p.get(), core::ptr::without_provenance_mut(val), order).cast() } } /// Offsets the pointer's address by subtracting `val` *bytes*, returning the @@ -1867,7 +1867,7 @@ impl AtomicPtr { /// #![feature(strict_provenance_atomic_ptr, strict_provenance)] /// use core::sync::atomic::{AtomicPtr, Ordering}; /// - /// let atom = AtomicPtr::::new(core::ptr::invalid_mut(1)); + /// let atom = AtomicPtr::::new(core::ptr::without_provenance_mut(1)); /// assert_eq!(atom.fetch_byte_sub(1, Ordering::Relaxed).addr(), 1); /// assert_eq!(atom.load(Ordering::Relaxed).addr(), 0); /// ``` @@ -1877,7 +1877,7 @@ impl AtomicPtr { #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces pub fn fetch_byte_sub(&self, val: usize, order: Ordering) -> *mut T { // SAFETY: data races are prevented by atomic intrinsics. - unsafe { atomic_sub(self.p.get(), core::ptr::invalid_mut(val), order).cast() } + unsafe { atomic_sub(self.p.get(), core::ptr::without_provenance_mut(val), order).cast() } } /// Performs a bitwise "or" operation on the address of the current pointer, @@ -1928,7 +1928,7 @@ impl AtomicPtr { #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces pub fn fetch_or(&self, val: usize, order: Ordering) -> *mut T { // SAFETY: data races are prevented by atomic intrinsics. - unsafe { atomic_or(self.p.get(), core::ptr::invalid_mut(val), order).cast() } + unsafe { atomic_or(self.p.get(), core::ptr::without_provenance_mut(val), order).cast() } } /// Performs a bitwise "and" operation on the address of the current @@ -1978,7 +1978,7 @@ impl AtomicPtr { #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces pub fn fetch_and(&self, val: usize, order: Ordering) -> *mut T { // SAFETY: data races are prevented by atomic intrinsics. - unsafe { atomic_and(self.p.get(), core::ptr::invalid_mut(val), order).cast() } + unsafe { atomic_and(self.p.get(), core::ptr::without_provenance_mut(val), order).cast() } } /// Performs a bitwise "xor" operation on the address of the current @@ -2026,7 +2026,7 @@ impl AtomicPtr { #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces pub fn fetch_xor(&self, val: usize, order: Ordering) -> *mut T { // SAFETY: data races are prevented by atomic intrinsics. - unsafe { atomic_xor(self.p.get(), core::ptr::invalid_mut(val), order).cast() } + unsafe { atomic_xor(self.p.get(), core::ptr::without_provenance_mut(val), order).cast() } } /// Returns a mutable pointer to the underlying pointer. diff --git a/library/core/tests/alloc.rs b/library/core/tests/alloc.rs index 3ceaeadcec6c3..b88f1821cd77c 100644 --- a/library/core/tests/alloc.rs +++ b/library/core/tests/alloc.rs @@ -10,7 +10,7 @@ fn const_unchecked_layout() { const DANGLING: NonNull = LAYOUT.dangling(); assert_eq!(LAYOUT.size(), SIZE); assert_eq!(LAYOUT.align(), ALIGN); - assert_eq!(Some(DANGLING), NonNull::new(ptr::invalid_mut(ALIGN))); + assert_eq!(Some(DANGLING), NonNull::new(ptr::without_provenance_mut(ALIGN))); } #[test] diff --git a/library/core/tests/hash/mod.rs b/library/core/tests/hash/mod.rs index 3b9351457a959..bdd1c2579ded8 100644 --- a/library/core/tests/hash/mod.rs +++ b/library/core/tests/hash/mod.rs @@ -87,10 +87,10 @@ fn test_writer_hasher() { let cs: Rc<[u8]> = Rc::new([1, 2, 3]); assert_eq!(hash(&cs), 9); - let ptr = ptr::invalid::(5_usize); + let ptr = ptr::without_provenance::(5_usize); assert_eq!(hash(&ptr), 5); - let ptr = ptr::invalid_mut::(5_usize); + let ptr = ptr::without_provenance_mut::(5_usize); assert_eq!(hash(&ptr), 5); if cfg!(miri) { diff --git a/library/core/tests/ptr.rs b/library/core/tests/ptr.rs index b3f7dfa1fb9c7..659fbd255c168 100644 --- a/library/core/tests/ptr.rs +++ b/library/core/tests/ptr.rs @@ -350,9 +350,9 @@ fn align_offset_zst() { // all, because no amount of elements will align the pointer. let mut p = 1; while p < 1024 { - assert_eq!(ptr::invalid::<()>(p).align_offset(p), 0); + assert_eq!(ptr::without_provenance::<()>(p).align_offset(p), 0); if p != 1 { - assert_eq!(ptr::invalid::<()>(p + 1).align_offset(p), !0); + assert_eq!(ptr::without_provenance::<()>(p + 1).align_offset(p), !0); } p = (p + 1).next_power_of_two(); } @@ -365,9 +365,9 @@ fn align_offset_zst_const() { // all, because no amount of elements will align the pointer. let mut p = 1; while p < 1024 { - assert!(ptr::invalid::<()>(p).align_offset(p) == 0); + assert!(ptr::without_provenance::<()>(p).align_offset(p) == 0); if p != 1 { - assert!(ptr::invalid::<()>(p + 1).align_offset(p) == !0); + assert!(ptr::without_provenance::<()>(p + 1).align_offset(p) == !0); } p = (p + 1).next_power_of_two(); } @@ -384,7 +384,7 @@ fn align_offset_stride_one() { let expected = ptr % align; let offset = if expected == 0 { 0 } else { align - expected }; assert_eq!( - ptr::invalid::(ptr).align_offset(align), + ptr::without_provenance::(ptr).align_offset(align), offset, "ptr = {}, align = {}, size = 1", ptr, @@ -406,7 +406,7 @@ fn align_offset_stride_one_const() { while ptr < 2 * align { let expected = ptr % align; let offset = if expected == 0 { 0 } else { align - expected }; - assert!(ptr::invalid::(ptr).align_offset(align) == offset); + assert!(ptr::without_provenance::(ptr).align_offset(align) == offset); ptr += 1; } align = (align + 1).next_power_of_two(); @@ -452,30 +452,30 @@ fn align_offset_various_strides() { unsafe { #[repr(packed)] struct A3(#[allow(dead_code)] u16, #[allow(dead_code)] u8); - x |= test_stride::(ptr::invalid::(ptr), align); + x |= test_stride::(ptr::without_provenance::(ptr), align); struct A4(#[allow(dead_code)] u32); - x |= test_stride::(ptr::invalid::(ptr), align); + x |= test_stride::(ptr::without_provenance::(ptr), align); #[repr(packed)] struct A5(#[allow(dead_code)] u32, #[allow(dead_code)] u8); - x |= test_stride::(ptr::invalid::(ptr), align); + x |= test_stride::(ptr::without_provenance::(ptr), align); #[repr(packed)] struct A6(#[allow(dead_code)] u32, #[allow(dead_code)] u16); - x |= test_stride::(ptr::invalid::(ptr), align); + x |= test_stride::(ptr::without_provenance::(ptr), align); #[repr(packed)] struct A7(#[allow(dead_code)] u32, #[allow(dead_code)] u16, #[allow(dead_code)] u8); - x |= test_stride::(ptr::invalid::(ptr), align); + x |= test_stride::(ptr::without_provenance::(ptr), align); #[repr(packed)] struct A8(#[allow(dead_code)] u32, #[allow(dead_code)] u32); - x |= test_stride::(ptr::invalid::(ptr), align); + x |= test_stride::(ptr::without_provenance::(ptr), align); #[repr(packed)] struct A9(#[allow(dead_code)] u32, #[allow(dead_code)] u32, #[allow(dead_code)] u8); - x |= test_stride::(ptr::invalid::(ptr), align); + x |= test_stride::(ptr::without_provenance::(ptr), align); #[repr(packed)] struct A10( @@ -483,10 +483,10 @@ fn align_offset_various_strides() { #[allow(dead_code)] u32, #[allow(dead_code)] u16, ); - x |= test_stride::(ptr::invalid::(ptr), align); + x |= test_stride::(ptr::without_provenance::(ptr), align); - x |= test_stride::(ptr::invalid::(ptr), align); - x |= test_stride::(ptr::invalid::(ptr), align); + x |= test_stride::(ptr::without_provenance::(ptr), align); + x |= test_stride::(ptr::without_provenance::(ptr), align); } } align = (align + 1).next_power_of_two(); @@ -522,18 +522,18 @@ fn align_offset_various_strides_const() { unsafe { #[repr(packed)] struct A3(#[allow(dead_code)] u16, #[allow(dead_code)] u8); - test_stride::(ptr::invalid::(ptr), ptr, align); + test_stride::(ptr::without_provenance::(ptr), ptr, align); struct A4(#[allow(dead_code)] u32); - test_stride::(ptr::invalid::(ptr), ptr, align); + test_stride::(ptr::without_provenance::(ptr), ptr, align); #[repr(packed)] struct A5(#[allow(dead_code)] u32, #[allow(dead_code)] u8); - test_stride::(ptr::invalid::(ptr), ptr, align); + test_stride::(ptr::without_provenance::(ptr), ptr, align); #[repr(packed)] struct A6(#[allow(dead_code)] u32, #[allow(dead_code)] u16); - test_stride::(ptr::invalid::(ptr), ptr, align); + test_stride::(ptr::without_provenance::(ptr), ptr, align); #[repr(packed)] struct A7( @@ -541,11 +541,11 @@ fn align_offset_various_strides_const() { #[allow(dead_code)] u16, #[allow(dead_code)] u8, ); - test_stride::(ptr::invalid::(ptr), ptr, align); + test_stride::(ptr::without_provenance::(ptr), ptr, align); #[repr(packed)] struct A8(#[allow(dead_code)] u32, #[allow(dead_code)] u32); - test_stride::(ptr::invalid::(ptr), ptr, align); + test_stride::(ptr::without_provenance::(ptr), ptr, align); #[repr(packed)] struct A9( @@ -553,7 +553,7 @@ fn align_offset_various_strides_const() { #[allow(dead_code)] u32, #[allow(dead_code)] u8, ); - test_stride::(ptr::invalid::(ptr), ptr, align); + test_stride::(ptr::without_provenance::(ptr), ptr, align); #[repr(packed)] struct A10( @@ -561,10 +561,10 @@ fn align_offset_various_strides_const() { #[allow(dead_code)] u32, #[allow(dead_code)] u16, ); - test_stride::(ptr::invalid::(ptr), ptr, align); + test_stride::(ptr::without_provenance::(ptr), ptr, align); - test_stride::(ptr::invalid::(ptr), ptr, align); - test_stride::(ptr::invalid::(ptr), ptr, align); + test_stride::(ptr::without_provenance::(ptr), ptr, align); + test_stride::(ptr::without_provenance::(ptr), ptr, align); } ptr += 1; } @@ -689,7 +689,7 @@ fn align_offset_issue_103361() { #[cfg(target_pointer_width = "16")] const SIZE: usize = 1 << 13; struct HugeSize(#[allow(dead_code)] [u8; SIZE - 1]); - let _ = ptr::invalid::(SIZE).align_offset(SIZE); + let _ = ptr::without_provenance::(SIZE).align_offset(SIZE); } #[test] @@ -703,9 +703,9 @@ fn align_offset_issue_103361_const() { struct HugeSize(#[allow(dead_code)] [u8; SIZE - 1]); const { - assert!(ptr::invalid::(SIZE - 1).align_offset(SIZE) == SIZE - 1); - assert!(ptr::invalid::(SIZE).align_offset(SIZE) == 0); - assert!(ptr::invalid::(SIZE + 1).align_offset(SIZE) == 1); + assert!(ptr::without_provenance::(SIZE - 1).align_offset(SIZE) == SIZE - 1); + assert!(ptr::without_provenance::(SIZE).align_offset(SIZE) == 0); + assert!(ptr::without_provenance::(SIZE + 1).align_offset(SIZE) == 1); } } diff --git a/library/core/tests/waker.rs b/library/core/tests/waker.rs index 38a3a0adad98e..2c66e0d7ad3a4 100644 --- a/library/core/tests/waker.rs +++ b/library/core/tests/waker.rs @@ -3,7 +3,7 @@ use std::task::{RawWaker, RawWakerVTable, Waker}; #[test] fn test_waker_getters() { - let raw_waker = RawWaker::new(ptr::invalid_mut(42usize), &WAKER_VTABLE); + let raw_waker = RawWaker::new(ptr::without_provenance_mut(42usize), &WAKER_VTABLE); assert_eq!(raw_waker.data() as usize, 42); assert!(ptr::eq(raw_waker.vtable(), &WAKER_VTABLE)); @@ -15,7 +15,7 @@ fn test_waker_getters() { } static WAKER_VTABLE: RawWakerVTable = RawWakerVTable::new( - |data| RawWaker::new(ptr::invalid_mut(data as usize + 1), &WAKER_VTABLE), + |data| RawWaker::new(ptr::without_provenance_mut(data as usize + 1), &WAKER_VTABLE), |_| {}, |_| {}, |_| {}, diff --git a/library/std/src/backtrace.rs b/library/std/src/backtrace.rs index 835e35eac34f6..475b3e7eb9312 100644 --- a/library/std/src/backtrace.rs +++ b/library/std/src/backtrace.rs @@ -467,7 +467,7 @@ impl RawFrame { match self { RawFrame::Actual(frame) => frame.ip(), #[cfg(test)] - RawFrame::Fake => crate::ptr::invalid_mut(1), + RawFrame::Fake => crate::ptr::without_provenance_mut(1), } } } diff --git a/library/std/src/io/error/repr_bitpacked.rs b/library/std/src/io/error/repr_bitpacked.rs index db175659770b5..c053e047b1a63 100644 --- a/library/std/src/io/error/repr_bitpacked.rs +++ b/library/std/src/io/error/repr_bitpacked.rs @@ -174,7 +174,10 @@ impl Repr { pub(super) fn new_os(code: RawOsError) -> Self { let utagged = ((code as usize) << 32) | TAG_OS; // Safety: `TAG_OS` is not zero, so the result of the `|` is not 0. - let res = Self(unsafe { NonNull::new_unchecked(ptr::invalid_mut(utagged)) }, PhantomData); + let res = Self( + unsafe { NonNull::new_unchecked(ptr::without_provenance_mut(utagged)) }, + PhantomData, + ); // quickly smoke-check we encoded the right thing (This generally will // only run in std's tests, unless the user uses -Zbuild-std) debug_assert!( @@ -188,7 +191,10 @@ impl Repr { pub(super) fn new_simple(kind: ErrorKind) -> Self { let utagged = ((kind as usize) << 32) | TAG_SIMPLE; // Safety: `TAG_SIMPLE` is not zero, so the result of the `|` is not 0. - let res = Self(unsafe { NonNull::new_unchecked(ptr::invalid_mut(utagged)) }, PhantomData); + let res = Self( + unsafe { NonNull::new_unchecked(ptr::without_provenance_mut(utagged)) }, + PhantomData, + ); // quickly smoke-check we encoded the right thing (This generally will // only run in std's tests, unless the user uses -Zbuild-std) debug_assert!( diff --git a/library/std/src/sys/locks/rwlock/queue.rs b/library/std/src/sys/locks/rwlock/queue.rs index 0f02a98dfdd49..dce966086b8ff 100644 --- a/library/std/src/sys/locks/rwlock/queue.rs +++ b/library/std/src/sys/locks/rwlock/queue.rs @@ -110,7 +110,7 @@ use crate::cell::OnceCell; use crate::hint::spin_loop; use crate::mem; -use crate::ptr::{self, invalid_mut, null_mut, NonNull}; +use crate::ptr::{self, null_mut, without_provenance_mut, NonNull}; use crate::sync::atomic::{ AtomicBool, AtomicPtr, Ordering::{AcqRel, Acquire, Relaxed, Release}, @@ -126,7 +126,7 @@ const SPIN_COUNT: usize = 7; type State = *mut (); type AtomicState = AtomicPtr<()>; -const UNLOCKED: State = invalid_mut(0); +const UNLOCKED: State = without_provenance_mut(0); const LOCKED: usize = 1; const QUEUED: usize = 2; const QUEUE_LOCKED: usize = 4; @@ -144,7 +144,7 @@ fn write_lock(state: State) -> Option { #[inline] fn read_lock(state: State) -> Option { if state.addr() & QUEUED == 0 && state.addr() != LOCKED { - Some(invalid_mut(state.addr().checked_add(SINGLE)? | LOCKED)) + Some(without_provenance_mut(state.addr().checked_add(SINGLE)? | LOCKED)) } else { None } @@ -405,7 +405,7 @@ impl RwLock { match self.state.fetch_update(Release, Acquire, |state| { if state.addr() & QUEUED == 0 { let count = state.addr() - (SINGLE | LOCKED); - Some(if count > 0 { invalid_mut(count | LOCKED) } else { UNLOCKED }) + Some(if count > 0 { without_provenance_mut(count | LOCKED) } else { UNLOCKED }) } else { None } @@ -444,7 +444,7 @@ impl RwLock { #[inline] pub unsafe fn write_unlock(&self) { if let Err(state) = - self.state.compare_exchange(invalid_mut(LOCKED), UNLOCKED, Release, Relaxed) + self.state.compare_exchange(without_provenance_mut(LOCKED), UNLOCKED, Release, Relaxed) { // SAFETY: // Since other threads cannot acquire the lock, the state can only diff --git a/library/std/src/sys/pal/common/thread_local/os_local.rs b/library/std/src/sys/pal/common/thread_local/os_local.rs index 7cf291921228b..3edffd7e4437c 100644 --- a/library/std/src/sys/pal/common/thread_local/os_local.rs +++ b/library/std/src/sys/pal/common/thread_local/os_local.rs @@ -176,7 +176,7 @@ unsafe extern "C" fn destroy_value(ptr: *mut u8) { if let Err(_) = panic::catch_unwind(|| unsafe { let ptr = Box::from_raw(ptr as *mut Value); let key = ptr.key; - key.os.set(ptr::invalid_mut(1)); + key.os.set(ptr::without_provenance_mut(1)); drop(ptr); key.os.set(ptr::null_mut()); }) { diff --git a/library/std/src/sys/pal/unix/futex.rs b/library/std/src/sys/pal/unix/futex.rs index d310be6c7a1eb..26161a9af79d8 100644 --- a/library/std/src/sys/pal/unix/futex.rs +++ b/library/std/src/sys/pal/unix/futex.rs @@ -53,7 +53,7 @@ pub fn futex_wait(futex: &AtomicU32, expected: u32, timeout: Option) - futex as *const AtomicU32 as *mut _, libc::UMTX_OP_WAIT_UINT_PRIVATE, expected as libc::c_ulong, - crate::ptr::invalid_mut(umtx_timeout_size), + crate::ptr::without_provenance_mut(umtx_timeout_size), umtx_timeout_ptr as *mut _, ) } else if #[cfg(any(target_os = "linux", target_os = "android"))] { diff --git a/library/std/src/sys/pal/unix/thread_parking/netbsd.rs b/library/std/src/sys/pal/unix/thread_parking/netbsd.rs index 3be08122138ab..5eeb37f87634b 100644 --- a/library/std/src/sys/pal/unix/thread_parking/netbsd.rs +++ b/library/std/src/sys/pal/unix/thread_parking/netbsd.rs @@ -25,7 +25,7 @@ pub fn current() -> ThreadId { #[inline] pub fn park(hint: usize) { unsafe { - ___lwp_park60(0, 0, ptr::null_mut(), 0, ptr::invalid(hint), ptr::null()); + ___lwp_park60(0, 0, ptr::null_mut(), 0, ptr::without_provenance(hint), ptr::null()); } } @@ -40,13 +40,20 @@ pub fn park_timeout(dur: Duration, hint: usize) { // Timeout needs to be mutable since it is modified on NetBSD 9.0 and // above. unsafe { - ___lwp_park60(CLOCK_MONOTONIC, 0, &mut timeout, 0, ptr::invalid(hint), ptr::null()); + ___lwp_park60( + CLOCK_MONOTONIC, + 0, + &mut timeout, + 0, + ptr::without_provenance(hint), + ptr::null(), + ); } } #[inline] pub fn unpark(tid: ThreadId, hint: usize) { unsafe { - _lwp_unpark(tid, ptr::invalid(hint)); + _lwp_unpark(tid, ptr::without_provenance(hint)); } } diff --git a/library/std/src/sys/pal/unix/weak.rs b/library/std/src/sys/pal/unix/weak.rs index 61088ff16eddf..48cc8633e93d2 100644 --- a/library/std/src/sys/pal/unix/weak.rs +++ b/library/std/src/sys/pal/unix/weak.rs @@ -80,7 +80,11 @@ pub(crate) struct DlsymWeak { impl DlsymWeak { pub(crate) const fn new(name: &'static str) -> Self { - DlsymWeak { name, func: AtomicPtr::new(ptr::invalid_mut(1)), _marker: PhantomData } + DlsymWeak { + name, + func: AtomicPtr::new(ptr::without_provenance_mut(1)), + _marker: PhantomData, + } } #[inline] diff --git a/library/std/src/sys/pal/windows/c.rs b/library/std/src/sys/pal/windows/c.rs index 1a59ac9a9cadf..6b12d7db8b03a 100644 --- a/library/std/src/sys/pal/windows/c.rs +++ b/library/std/src/sys/pal/windows/c.rs @@ -47,7 +47,7 @@ pub use FD_SET as fd_set; pub use LINGER as linger; pub use TIMEVAL as timeval; -pub const INVALID_HANDLE_VALUE: HANDLE = ::core::ptr::invalid_mut(-1i32 as _); +pub const INVALID_HANDLE_VALUE: HANDLE = ::core::ptr::without_provenance_mut(-1i32 as _); // https://learn.microsoft.com/en-us/cpp/c-runtime-library/exit-success-exit-failure?view=msvc-170 pub const EXIT_SUCCESS: u32 = 0; diff --git a/library/std/src/sys/pal/windows/os.rs b/library/std/src/sys/pal/windows/os.rs index 73cb2db8b79e5..374c9845ea4bb 100644 --- a/library/std/src/sys/pal/windows/os.rs +++ b/library/std/src/sys/pal/windows/os.rs @@ -327,7 +327,7 @@ fn home_dir_crt() -> Option { super::fill_utf16_buf( |buf, mut sz| { match c::GetUserProfileDirectoryW( - ptr::invalid_mut(CURRENT_PROCESS_TOKEN), + ptr::without_provenance_mut(CURRENT_PROCESS_TOKEN), buf, &mut sz, ) { diff --git a/library/std/src/sys/pal/windows/thread_local_key/tests.rs b/library/std/src/sys/pal/windows/thread_local_key/tests.rs index c739f0caf3ec0..4119f99096842 100644 --- a/library/std/src/sys/pal/windows/thread_local_key/tests.rs +++ b/library/std/src/sys/pal/windows/thread_local_key/tests.rs @@ -13,8 +13,8 @@ fn smoke() { unsafe { assert!(K1.get().is_null()); assert!(K2.get().is_null()); - K1.set(ptr::invalid_mut(1)); - K2.set(ptr::invalid_mut(2)); + K1.set(ptr::without_provenance_mut(1)); + K2.set(ptr::without_provenance_mut(2)); assert_eq!(K1.get() as usize, 1); assert_eq!(K2.get() as usize, 2); } diff --git a/library/std/src/sys/pal/windows/thread_parking.rs b/library/std/src/sys/pal/windows/thread_parking.rs index eb9167cd8552b..343b530b15ef9 100644 --- a/library/std/src/sys/pal/windows/thread_parking.rs +++ b/library/std/src/sys/pal/windows/thread_parking.rs @@ -220,7 +220,7 @@ impl Parker { } fn keyed_event_handle() -> c::HANDLE { - const INVALID: c::HANDLE = ptr::invalid_mut(!0); + const INVALID: c::HANDLE = ptr::without_provenance_mut(!0); static HANDLE: AtomicPtr = AtomicPtr::new(INVALID); match HANDLE.load(Relaxed) { INVALID => { diff --git a/library/std/src/sys_common/backtrace.rs b/library/std/src/sys_common/backtrace.rs index adfe721cfa9ad..67711dbd5bc75 100644 --- a/library/std/src/sys_common/backtrace.rs +++ b/library/std/src/sys_common/backtrace.rs @@ -218,7 +218,7 @@ pub fn output_filename( #[cfg(all(target_vendor = "fortanix", target_env = "sgx"))] pub fn set_image_base() { let image_base = crate::os::fortanix_sgx::mem::image_base(); - backtrace_rs::set_image_base(crate::ptr::invalid_mut(image_base as _)); + backtrace_rs::set_image_base(crate::ptr::without_provenance_mut(image_base as _)); } #[cfg(not(all(target_vendor = "fortanix", target_env = "sgx")))] diff --git a/library/std/src/sys_common/once/queue.rs b/library/std/src/sys_common/once/queue.rs index def0bcd6fac44..3cc1df113e3f1 100644 --- a/library/std/src/sys_common/once/queue.rs +++ b/library/std/src/sys_common/once/queue.rs @@ -110,7 +110,7 @@ impl Once { #[inline] #[rustc_const_stable(feature = "const_once_new", since = "1.32.0")] pub const fn new() -> Once { - Once { state_and_queue: AtomicPtr::new(ptr::invalid_mut(INCOMPLETE)) } + Once { state_and_queue: AtomicPtr::new(ptr::without_provenance_mut(INCOMPLETE)) } } #[inline] @@ -158,7 +158,7 @@ impl Once { // Try to register this thread as the one RUNNING. let exchange_result = self.state_and_queue.compare_exchange( state_and_queue, - ptr::invalid_mut(RUNNING), + ptr::without_provenance_mut(RUNNING), Ordering::Acquire, Ordering::Acquire, ); @@ -170,14 +170,14 @@ impl Once { // wake them up on drop. let mut waiter_queue = WaiterQueue { state_and_queue: &self.state_and_queue, - set_state_on_drop_to: ptr::invalid_mut(POISONED), + set_state_on_drop_to: ptr::without_provenance_mut(POISONED), }; // Run the initialization function, letting it know if we're // poisoned or not. let init_state = public::OnceState { inner: OnceState { poisoned: state_and_queue.addr() == POISONED, - set_state_on_drop_to: Cell::new(ptr::invalid_mut(COMPLETE)), + set_state_on_drop_to: Cell::new(ptr::without_provenance_mut(COMPLETE)), }, }; init(&init_state); @@ -289,6 +289,6 @@ impl OnceState { #[inline] pub fn poison(&self) { - self.set_state_on_drop_to.set(ptr::invalid_mut(POISONED)); + self.set_state_on_drop_to.set(ptr::without_provenance_mut(POISONED)); } } diff --git a/library/std/src/sys_common/thread_local_key/tests.rs b/library/std/src/sys_common/thread_local_key/tests.rs index 6a44c65d91869..48bed31af517c 100644 --- a/library/std/src/sys_common/thread_local_key/tests.rs +++ b/library/std/src/sys_common/thread_local_key/tests.rs @@ -9,8 +9,8 @@ fn statik() { unsafe { assert!(K1.get().is_null()); assert!(K2.get().is_null()); - K1.set(ptr::invalid_mut(1)); - K2.set(ptr::invalid_mut(2)); + K1.set(ptr::without_provenance_mut(1)); + K2.set(ptr::without_provenance_mut(2)); assert_eq!(K1.get() as usize, 1); assert_eq!(K2.get() as usize, 2); } diff --git a/src/tools/miri/tests/fail/dangling_pointers/deref_dangling_box.rs b/src/tools/miri/tests/fail/dangling_pointers/deref_dangling_box.rs index d2823672ade2f..fa40f942b8f2f 100644 --- a/src/tools/miri/tests/fail/dangling_pointers/deref_dangling_box.rs +++ b/src/tools/miri/tests/fail/dangling_pointers/deref_dangling_box.rs @@ -8,7 +8,7 @@ use std::ptr::{self, addr_of_mut}; // (This test relies on the `deref_copy` pass that lowers `**ptr` to materialize the intermediate pointer.) fn main() { - let mut inner = ptr::invalid::(24); + let mut inner = ptr::without_provenance::(24); let outer = addr_of_mut!(inner).cast::>(); // Now `outer` is a pointer to a dangling reference. // Deref'ing that should be UB. diff --git a/src/tools/miri/tests/fail/dangling_pointers/deref_dangling_ref.rs b/src/tools/miri/tests/fail/dangling_pointers/deref_dangling_ref.rs index b62e041d70c6a..036ef2580a87f 100644 --- a/src/tools/miri/tests/fail/dangling_pointers/deref_dangling_ref.rs +++ b/src/tools/miri/tests/fail/dangling_pointers/deref_dangling_ref.rs @@ -8,7 +8,7 @@ use std::ptr::{self, addr_of_mut}; // (This test relies on the `deref_copy` pass that lowers `**ptr` to materialize the intermediate pointer.) fn main() { - let mut inner = ptr::invalid::(24); + let mut inner = ptr::without_provenance::(24); let outer = addr_of_mut!(inner).cast::<&'static mut i32>(); // Now `outer` is a pointer to a dangling reference. // Deref'ing that should be UB. diff --git a/src/tools/miri/tests/fail/provenance/ptr_invalid.rs b/src/tools/miri/tests/fail/provenance/ptr_invalid.rs index 5d44928d1d24d..730859684a0ce 100644 --- a/src/tools/miri/tests/fail/provenance/ptr_invalid.rs +++ b/src/tools/miri/tests/fail/provenance/ptr_invalid.rs @@ -1,9 +1,9 @@ #![feature(strict_provenance, exposed_provenance)] -// Ensure that a `ptr::invalid` ptr is truly invalid. +// Ensure that a `ptr::without_provenance` ptr is truly invalid. fn main() { let x = 42; let xptr = &x as *const i32; - let xptr_invalid = std::ptr::invalid::(xptr.expose_addr()); + let xptr_invalid = std::ptr::without_provenance::(xptr.expose_addr()); let _val = unsafe { *xptr_invalid }; //~ ERROR: is a dangling pointer } diff --git a/src/tools/miri/tests/fail/provenance/ptr_invalid_offset.rs b/src/tools/miri/tests/fail/provenance/ptr_invalid_offset.rs index 91ba18f768055..c8be521ef823d 100644 --- a/src/tools/miri/tests/fail/provenance/ptr_invalid_offset.rs +++ b/src/tools/miri/tests/fail/provenance/ptr_invalid_offset.rs @@ -4,7 +4,7 @@ fn main() { let x = 22; let ptr = &x as *const _ as *const u8; - let roundtrip = std::ptr::invalid::(ptr as usize); + let roundtrip = std::ptr::without_provenance::(ptr as usize); // Not even offsetting this is allowed. let _ = unsafe { roundtrip.offset(1) }; //~ERROR: is a dangling pointer } diff --git a/src/tools/miri/tests/pass-dep/shims/mmap.rs b/src/tools/miri/tests/pass-dep/shims/mmap.rs index 7bbb9dd53cb87..0cbe8d942946d 100644 --- a/src/tools/miri/tests/pass-dep/shims/mmap.rs +++ b/src/tools/miri/tests/pass-dep/shims/mmap.rs @@ -71,7 +71,7 @@ fn test_mmap( let ptr = unsafe { mmap( - ptr::invalid_mut(page_size * 64), + ptr::without_provenance_mut(page_size * 64), page_size, libc::PROT_READ | libc::PROT_WRITE, // We don't support MAP_FIXED @@ -114,13 +114,13 @@ fn test_mmap( assert_eq!(ptr, libc::MAP_FAILED); // We report an error when trying to munmap an address which is not a multiple of the page size - let res = unsafe { libc::munmap(ptr::invalid_mut(1), page_size) }; + let res = unsafe { libc::munmap(ptr::without_provenance_mut(1), page_size) }; assert_eq!(res, -1); assert_eq!(Error::last_os_error().raw_os_error().unwrap(), libc::EINVAL); // We report an error when trying to munmap a length that cannot be rounded up to a multiple of // the page size. - let res = unsafe { libc::munmap(ptr::invalid_mut(page_size), usize::MAX - 1) }; + let res = unsafe { libc::munmap(ptr::without_provenance_mut(page_size), usize::MAX - 1) }; assert_eq!(res, -1); assert_eq!(Error::last_os_error().raw_os_error().unwrap(), libc::EINVAL); } @@ -156,7 +156,7 @@ fn test_mremap() { // Test all of our error conditions // Not aligned let ptr = - unsafe { libc::mremap(ptr::invalid_mut(1), page_size, page_size, libc::MREMAP_MAYMOVE) }; + unsafe { libc::mremap(ptr::without_provenance_mut(1), page_size, page_size, libc::MREMAP_MAYMOVE) }; assert_eq!(ptr, libc::MAP_FAILED); assert_eq!(Error::last_os_error().raw_os_error().unwrap(), libc::EINVAL); diff --git a/src/tools/miri/tests/pass-dep/shims/posix_memalign.rs b/src/tools/miri/tests/pass-dep/shims/posix_memalign.rs index 9bd8a00d68dcd..5cf62995fbee2 100644 --- a/src/tools/miri/tests/pass-dep/shims/posix_memalign.rs +++ b/src/tools/miri/tests/pass-dep/shims/posix_memalign.rs @@ -58,7 +58,7 @@ fn main() { // Non-power of 2 align unsafe { - let mut ptr: *mut libc::c_void = ptr::invalid_mut(0x1234567); + let mut ptr: *mut libc::c_void = ptr::without_provenance_mut(0x1234567); let align = 15; let size = 8; assert_eq!(libc::posix_memalign(&mut ptr, align, size), libc::EINVAL); @@ -70,7 +70,7 @@ fn main() { // Too small align (smaller than ptr) unsafe { - let mut ptr: *mut libc::c_void = ptr::invalid_mut(0x1234567); + let mut ptr: *mut libc::c_void = ptr::without_provenance_mut(0x1234567); let align = std::mem::size_of::() / 2; let size = 8; assert_eq!(libc::posix_memalign(&mut ptr, align, size), libc::EINVAL); diff --git a/src/tools/miri/tests/pass/align_offset_symbolic.rs b/src/tools/miri/tests/pass/align_offset_symbolic.rs index ac28c63e08110..c32fa2c8f9bda 100644 --- a/src/tools/miri/tests/pass/align_offset_symbolic.rs +++ b/src/tools/miri/tests/pass/align_offset_symbolic.rs @@ -100,7 +100,7 @@ fn huge_align() { #[cfg(target_pointer_width = "16")] const SIZE: usize = 1 << 13; struct HugeSize(#[allow(dead_code)] [u8; SIZE - 1]); - let _ = std::ptr::invalid::(SIZE).align_offset(SIZE); + let _ = std::ptr::without_provenance::(SIZE).align_offset(SIZE); } // This shows that we cannot store the promised alignment info in `AllocExtra`, diff --git a/src/tools/miri/tests/pass/atomic.rs b/src/tools/miri/tests/pass/atomic.rs index 60b8ff87b59e3..dfdc9b42f81fc 100644 --- a/src/tools/miri/tests/pass/atomic.rs +++ b/src/tools/miri/tests/pass/atomic.rs @@ -137,7 +137,7 @@ fn atomic_ptr() { let ptr = AtomicPtr::::new(ptr::null_mut()); assert!(ptr.load(Relaxed).addr() == 0); - ptr.store(ptr::invalid_mut(13), SeqCst); + ptr.store(ptr::without_provenance_mut(13), SeqCst); assert!(ptr.swap(x, Relaxed).addr() == 13); unsafe { assert!(*ptr.load(Acquire) == 0) }; @@ -145,7 +145,7 @@ fn atomic_ptr() { assert_eq!( ptr.compare_exchange( (&mut 0 as *mut i32).with_addr(x.addr()), - ptr::invalid_mut(0), + ptr::without_provenance_mut(0), SeqCst, SeqCst ) @@ -156,7 +156,7 @@ fn atomic_ptr() { assert_eq!( ptr.compare_exchange( (&mut 0 as *mut i32).with_addr(x.addr()), - ptr::invalid_mut(0), + ptr::without_provenance_mut(0), SeqCst, SeqCst ) diff --git a/src/tools/miri/tests/pass/ptr_raw.rs b/src/tools/miri/tests/pass/ptr_raw.rs index 11c3455a9ca51..dcf13d97ce3d1 100644 --- a/src/tools/miri/tests/pass/ptr_raw.rs +++ b/src/tools/miri/tests/pass/ptr_raw.rs @@ -35,12 +35,12 @@ fn assign_overlapping() { fn deref_invalid() { unsafe { // `addr_of!(*ptr)` is never UB. - let _val = addr_of!(*ptr::invalid::(0)); - let _val = addr_of!(*ptr::invalid::(1)); // not aligned + let _val = addr_of!(*ptr::without_provenance::(0)); + let _val = addr_of!(*ptr::without_provenance::(1)); // not aligned // Similarly, just mentioning the place is fine. - let _ = *ptr::invalid::(0); - let _ = *ptr::invalid::(1); + let _ = *ptr::without_provenance::(0); + let _ = *ptr::without_provenance::(1); } } diff --git a/src/tools/miri/tests/pass/slices.rs b/src/tools/miri/tests/pass/slices.rs index a99e921150b31..d30ca96ea41cc 100644 --- a/src/tools/miri/tests/pass/slices.rs +++ b/src/tools/miri/tests/pass/slices.rs @@ -29,7 +29,7 @@ fn slice_of_zst() { // In a slice of zero-size elements the pointer is meaningless. // Ensure iteration still works even if the pointer is at the end of the address space. - let slice: &[()] = unsafe { slice::from_raw_parts(ptr::invalid(-5isize as usize), 10) }; + let slice: &[()] = unsafe { slice::from_raw_parts(ptr::without_provenance(-5isize as usize), 10) }; assert_eq!(slice.len(), 10); assert_eq!(slice.iter().count(), 10); @@ -43,7 +43,7 @@ fn slice_of_zst() { // Test mutable iterators as well let slice: &mut [()] = - unsafe { slice::from_raw_parts_mut(ptr::invalid_mut(-5isize as usize), 10) }; + unsafe { slice::from_raw_parts_mut(ptr::without_provenance_mut(-5isize as usize), 10) }; assert_eq!(slice.len(), 10); assert_eq!(slice.iter_mut().count(), 10); @@ -263,7 +263,7 @@ fn test_for_invalidated_pointers() { fn large_raw_slice() { let size = isize::MAX as usize; // Creating a raw slice of size isize::MAX and asking for its size is okay. - let s = std::ptr::slice_from_raw_parts(ptr::invalid::(1), size); + let s = std::ptr::slice_from_raw_parts(ptr::without_provenance::(1), size); assert_eq!(size, unsafe { std::mem::size_of_val_raw(s) }); } diff --git a/src/tools/miri/tests/pass/underscore_pattern.rs b/src/tools/miri/tests/pass/underscore_pattern.rs index b0e85bc1bb038..f0afe5589546e 100644 --- a/src/tools/miri/tests/pass/underscore_pattern.rs +++ b/src/tools/miri/tests/pass/underscore_pattern.rs @@ -38,7 +38,7 @@ fn invalid_match() { fn dangling_let() { unsafe { - let ptr = ptr::invalid::(0x40); + let ptr = ptr::without_provenance::(0x40); let _ = *ptr; } } @@ -54,7 +54,7 @@ fn invalid_let() { // Adding a type annotation used to change how MIR is generated, make sure we cover both cases. fn dangling_let_type_annotation() { unsafe { - let ptr = ptr::invalid::(0x40); + let ptr = ptr::without_provenance::(0x40); let _: bool = *ptr; } } diff --git a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.32bit.panic-abort.diff b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.32bit.panic-abort.diff index 80191a21f4feb..e6b8d5e6c21bc 100644 --- a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.32bit.panic-abort.diff +++ b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.32bit.panic-abort.diff @@ -17,26 +17,28 @@ scope 4 (inlined Unique::<[bool; 0]>::dangling) { let mut _5: std::ptr::NonNull<[bool; 0]>; scope 5 (inlined NonNull::<[bool; 0]>::dangling) { - let mut _7: usize; scope 6 { let _6: *mut [bool; 0]; scope 7 { debug ptr => _6; - scope 11 (inlined NonNull::<[bool; 0]>::new_unchecked) { + scope 12 (inlined NonNull::<[bool; 0]>::new_unchecked) { debug ptr => _6; let mut _8: bool; let _9: (); let mut _10: *mut (); let mut _11: *const [bool; 0]; - scope 12 { + scope 13 { } } } - scope 8 (inlined align_of::<[bool; 0]>) { - } - scope 9 (inlined invalid_mut::<[bool; 0]>) { - debug addr => _7; - scope 10 { + scope 8 (inlined dangling_mut::<[bool; 0]>) { + let mut _7: usize; + scope 9 (inlined align_of::<[bool; 0]>) { + } + scope 10 (inlined without_provenance_mut::<[bool; 0]>) { + debug addr => _7; + scope 11 { + } } } } diff --git a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.32bit.panic-unwind.diff b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.32bit.panic-unwind.diff index ed878978e4bf7..bd74591018bc7 100644 --- a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.32bit.panic-unwind.diff +++ b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.32bit.panic-unwind.diff @@ -17,26 +17,28 @@ scope 4 (inlined Unique::<[bool; 0]>::dangling) { let mut _5: std::ptr::NonNull<[bool; 0]>; scope 5 (inlined NonNull::<[bool; 0]>::dangling) { - let mut _7: usize; scope 6 { let _6: *mut [bool; 0]; scope 7 { debug ptr => _6; - scope 11 (inlined NonNull::<[bool; 0]>::new_unchecked) { + scope 12 (inlined NonNull::<[bool; 0]>::new_unchecked) { debug ptr => _6; let mut _8: bool; let _9: (); let mut _10: *mut (); let mut _11: *const [bool; 0]; - scope 12 { + scope 13 { } } } - scope 8 (inlined align_of::<[bool; 0]>) { - } - scope 9 (inlined invalid_mut::<[bool; 0]>) { - debug addr => _7; - scope 10 { + scope 8 (inlined dangling_mut::<[bool; 0]>) { + let mut _7: usize; + scope 9 (inlined align_of::<[bool; 0]>) { + } + scope 10 (inlined without_provenance_mut::<[bool; 0]>) { + debug addr => _7; + scope 11 { + } } } } diff --git a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.64bit.panic-abort.diff b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.64bit.panic-abort.diff index a61902501bf9f..fdbb0b2df03ae 100644 --- a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.64bit.panic-abort.diff +++ b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.64bit.panic-abort.diff @@ -17,26 +17,28 @@ scope 4 (inlined Unique::<[bool; 0]>::dangling) { let mut _5: std::ptr::NonNull<[bool; 0]>; scope 5 (inlined NonNull::<[bool; 0]>::dangling) { - let mut _7: usize; scope 6 { let _6: *mut [bool; 0]; scope 7 { debug ptr => _6; - scope 11 (inlined NonNull::<[bool; 0]>::new_unchecked) { + scope 12 (inlined NonNull::<[bool; 0]>::new_unchecked) { debug ptr => _6; let mut _8: bool; let _9: (); let mut _10: *mut (); let mut _11: *const [bool; 0]; - scope 12 { + scope 13 { } } } - scope 8 (inlined align_of::<[bool; 0]>) { - } - scope 9 (inlined invalid_mut::<[bool; 0]>) { - debug addr => _7; - scope 10 { + scope 8 (inlined dangling_mut::<[bool; 0]>) { + let mut _7: usize; + scope 9 (inlined align_of::<[bool; 0]>) { + } + scope 10 (inlined without_provenance_mut::<[bool; 0]>) { + debug addr => _7; + scope 11 { + } } } } diff --git a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.64bit.panic-unwind.diff b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.64bit.panic-unwind.diff index fca7fe89b4a73..d6b5984b81dd6 100644 --- a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.64bit.panic-unwind.diff +++ b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.DataflowConstProp.64bit.panic-unwind.diff @@ -17,26 +17,28 @@ scope 4 (inlined Unique::<[bool; 0]>::dangling) { let mut _5: std::ptr::NonNull<[bool; 0]>; scope 5 (inlined NonNull::<[bool; 0]>::dangling) { - let mut _7: usize; scope 6 { let _6: *mut [bool; 0]; scope 7 { debug ptr => _6; - scope 11 (inlined NonNull::<[bool; 0]>::new_unchecked) { + scope 12 (inlined NonNull::<[bool; 0]>::new_unchecked) { debug ptr => _6; let mut _8: bool; let _9: (); let mut _10: *mut (); let mut _11: *const [bool; 0]; - scope 12 { + scope 13 { } } } - scope 8 (inlined align_of::<[bool; 0]>) { - } - scope 9 (inlined invalid_mut::<[bool; 0]>) { - debug addr => _7; - scope 10 { + scope 8 (inlined dangling_mut::<[bool; 0]>) { + let mut _7: usize; + scope 9 (inlined align_of::<[bool; 0]>) { + } + scope 10 (inlined without_provenance_mut::<[bool; 0]>) { + debug addr => _7; + scope 11 { + } } } } diff --git a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.32bit.panic-abort.diff b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.32bit.panic-abort.diff index 0ced2e4deed1d..c7445aaee6c55 100644 --- a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.32bit.panic-abort.diff +++ b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.32bit.panic-abort.diff @@ -17,26 +17,28 @@ scope 4 (inlined Unique::<[bool; 0]>::dangling) { let mut _5: std::ptr::NonNull<[bool; 0]>; scope 5 (inlined NonNull::<[bool; 0]>::dangling) { - let mut _7: usize; scope 6 { let _6: *mut [bool; 0]; scope 7 { debug ptr => _6; - scope 11 (inlined NonNull::<[bool; 0]>::new_unchecked) { + scope 12 (inlined NonNull::<[bool; 0]>::new_unchecked) { debug ptr => _6; let mut _8: bool; let _9: (); let mut _10: *mut (); let mut _11: *const [bool; 0]; - scope 12 { + scope 13 { } } } - scope 8 (inlined align_of::<[bool; 0]>) { - } - scope 9 (inlined invalid_mut::<[bool; 0]>) { - debug addr => _7; - scope 10 { + scope 8 (inlined dangling_mut::<[bool; 0]>) { + let mut _7: usize; + scope 9 (inlined align_of::<[bool; 0]>) { + } + scope 10 (inlined without_provenance_mut::<[bool; 0]>) { + debug addr => _7; + scope 11 { + } } } } diff --git a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.32bit.panic-unwind.diff b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.32bit.panic-unwind.diff index e17d76a6d9540..b8e961bc08750 100644 --- a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.32bit.panic-unwind.diff +++ b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.32bit.panic-unwind.diff @@ -17,26 +17,28 @@ scope 4 (inlined Unique::<[bool; 0]>::dangling) { let mut _5: std::ptr::NonNull<[bool; 0]>; scope 5 (inlined NonNull::<[bool; 0]>::dangling) { - let mut _7: usize; scope 6 { let _6: *mut [bool; 0]; scope 7 { debug ptr => _6; - scope 11 (inlined NonNull::<[bool; 0]>::new_unchecked) { + scope 12 (inlined NonNull::<[bool; 0]>::new_unchecked) { debug ptr => _6; let mut _8: bool; let _9: (); let mut _10: *mut (); let mut _11: *const [bool; 0]; - scope 12 { + scope 13 { } } } - scope 8 (inlined align_of::<[bool; 0]>) { - } - scope 9 (inlined invalid_mut::<[bool; 0]>) { - debug addr => _7; - scope 10 { + scope 8 (inlined dangling_mut::<[bool; 0]>) { + let mut _7: usize; + scope 9 (inlined align_of::<[bool; 0]>) { + } + scope 10 (inlined without_provenance_mut::<[bool; 0]>) { + debug addr => _7; + scope 11 { + } } } } diff --git a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.64bit.panic-abort.diff b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.64bit.panic-abort.diff index ff68b3c2d55d6..9678db90d0590 100644 --- a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.64bit.panic-abort.diff +++ b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.64bit.panic-abort.diff @@ -17,26 +17,28 @@ scope 4 (inlined Unique::<[bool; 0]>::dangling) { let mut _5: std::ptr::NonNull<[bool; 0]>; scope 5 (inlined NonNull::<[bool; 0]>::dangling) { - let mut _7: usize; scope 6 { let _6: *mut [bool; 0]; scope 7 { debug ptr => _6; - scope 11 (inlined NonNull::<[bool; 0]>::new_unchecked) { + scope 12 (inlined NonNull::<[bool; 0]>::new_unchecked) { debug ptr => _6; let mut _8: bool; let _9: (); let mut _10: *mut (); let mut _11: *const [bool; 0]; - scope 12 { + scope 13 { } } } - scope 8 (inlined align_of::<[bool; 0]>) { - } - scope 9 (inlined invalid_mut::<[bool; 0]>) { - debug addr => _7; - scope 10 { + scope 8 (inlined dangling_mut::<[bool; 0]>) { + let mut _7: usize; + scope 9 (inlined align_of::<[bool; 0]>) { + } + scope 10 (inlined without_provenance_mut::<[bool; 0]>) { + debug addr => _7; + scope 11 { + } } } } diff --git a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.64bit.panic-unwind.diff b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.64bit.panic-unwind.diff index de951e57fb9f2..8aa6c9c23e9a4 100644 --- a/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.64bit.panic-unwind.diff +++ b/tests/mir-opt/dataflow-const-prop/default_boxed_slice.main.GVN.64bit.panic-unwind.diff @@ -17,26 +17,28 @@ scope 4 (inlined Unique::<[bool; 0]>::dangling) { let mut _5: std::ptr::NonNull<[bool; 0]>; scope 5 (inlined NonNull::<[bool; 0]>::dangling) { - let mut _7: usize; scope 6 { let _6: *mut [bool; 0]; scope 7 { debug ptr => _6; - scope 11 (inlined NonNull::<[bool; 0]>::new_unchecked) { + scope 12 (inlined NonNull::<[bool; 0]>::new_unchecked) { debug ptr => _6; let mut _8: bool; let _9: (); let mut _10: *mut (); let mut _11: *const [bool; 0]; - scope 12 { + scope 13 { } } } - scope 8 (inlined align_of::<[bool; 0]>) { - } - scope 9 (inlined invalid_mut::<[bool; 0]>) { - debug addr => _7; - scope 10 { + scope 8 (inlined dangling_mut::<[bool; 0]>) { + let mut _7: usize; + scope 9 (inlined align_of::<[bool; 0]>) { + } + scope 10 (inlined without_provenance_mut::<[bool; 0]>) { + debug addr => _7; + scope 11 { + } } } } diff --git a/tests/mir-opt/pre-codegen/slice_iter.enumerated_loop.PreCodegen.after.panic-abort.mir b/tests/mir-opt/pre-codegen/slice_iter.enumerated_loop.PreCodegen.after.panic-abort.mir index 05b01404b69ad..1eda1ac13658b 100644 --- a/tests/mir-opt/pre-codegen/slice_iter.enumerated_loop.PreCodegen.after.panic-abort.mir +++ b/tests/mir-opt/pre-codegen/slice_iter.enumerated_loop.PreCodegen.after.panic-abort.mir @@ -42,7 +42,7 @@ fn enumerated_loop(_1: &[T], _2: impl Fn(usize, &T)) -> () { scope 8 { debug end_or_len => _11; } - scope 14 (inlined invalid::) { + scope 14 (inlined without_provenance::) { debug addr => _3; scope 15 { } diff --git a/tests/mir-opt/pre-codegen/slice_iter.enumerated_loop.PreCodegen.after.panic-unwind.mir b/tests/mir-opt/pre-codegen/slice_iter.enumerated_loop.PreCodegen.after.panic-unwind.mir index 1fb29f5c662a2..3cd79654facd9 100644 --- a/tests/mir-opt/pre-codegen/slice_iter.enumerated_loop.PreCodegen.after.panic-unwind.mir +++ b/tests/mir-opt/pre-codegen/slice_iter.enumerated_loop.PreCodegen.after.panic-unwind.mir @@ -42,7 +42,7 @@ fn enumerated_loop(_1: &[T], _2: impl Fn(usize, &T)) -> () { scope 8 { debug end_or_len => _11; } - scope 14 (inlined invalid::) { + scope 14 (inlined without_provenance::) { debug addr => _3; scope 15 { } diff --git a/tests/mir-opt/pre-codegen/slice_iter.forward_loop.PreCodegen.after.panic-abort.mir b/tests/mir-opt/pre-codegen/slice_iter.forward_loop.PreCodegen.after.panic-abort.mir index 2e63030aa5eab..a6995bbcbe3b9 100644 --- a/tests/mir-opt/pre-codegen/slice_iter.forward_loop.PreCodegen.after.panic-abort.mir +++ b/tests/mir-opt/pre-codegen/slice_iter.forward_loop.PreCodegen.after.panic-abort.mir @@ -39,7 +39,7 @@ fn forward_loop(_1: &[T], _2: impl Fn(&T)) -> () { scope 8 { debug end_or_len => _11; } - scope 14 (inlined invalid::) { + scope 14 (inlined without_provenance::) { debug addr => _3; scope 15 { } diff --git a/tests/mir-opt/pre-codegen/slice_iter.forward_loop.PreCodegen.after.panic-unwind.mir b/tests/mir-opt/pre-codegen/slice_iter.forward_loop.PreCodegen.after.panic-unwind.mir index b6b6b6972e97e..039b7e1aa4770 100644 --- a/tests/mir-opt/pre-codegen/slice_iter.forward_loop.PreCodegen.after.panic-unwind.mir +++ b/tests/mir-opt/pre-codegen/slice_iter.forward_loop.PreCodegen.after.panic-unwind.mir @@ -39,7 +39,7 @@ fn forward_loop(_1: &[T], _2: impl Fn(&T)) -> () { scope 8 { debug end_or_len => _11; } - scope 14 (inlined invalid::) { + scope 14 (inlined without_provenance::) { debug addr => _3; scope 15 { } diff --git a/tests/mir-opt/pre-codegen/slice_iter.reverse_loop.PreCodegen.after.panic-abort.mir b/tests/mir-opt/pre-codegen/slice_iter.reverse_loop.PreCodegen.after.panic-abort.mir index a78e46a0b787e..2465c2381ada7 100644 --- a/tests/mir-opt/pre-codegen/slice_iter.reverse_loop.PreCodegen.after.panic-abort.mir +++ b/tests/mir-opt/pre-codegen/slice_iter.reverse_loop.PreCodegen.after.panic-abort.mir @@ -44,7 +44,7 @@ fn reverse_loop(_1: &[T], _2: impl Fn(&T)) -> () { scope 8 { debug end_or_len => _11; } - scope 14 (inlined invalid::) { + scope 14 (inlined without_provenance::) { debug addr => _3; scope 15 { } diff --git a/tests/mir-opt/pre-codegen/slice_iter.reverse_loop.PreCodegen.after.panic-unwind.mir b/tests/mir-opt/pre-codegen/slice_iter.reverse_loop.PreCodegen.after.panic-unwind.mir index 4e54a23e81998..c5219ac3390c1 100644 --- a/tests/mir-opt/pre-codegen/slice_iter.reverse_loop.PreCodegen.after.panic-unwind.mir +++ b/tests/mir-opt/pre-codegen/slice_iter.reverse_loop.PreCodegen.after.panic-unwind.mir @@ -44,7 +44,7 @@ fn reverse_loop(_1: &[T], _2: impl Fn(&T)) -> () { scope 8 { debug end_or_len => _11; } - scope 14 (inlined invalid::) { + scope 14 (inlined without_provenance::) { debug addr => _3; scope 15 { }