From 6c65e34a64aec6bfdc3d45266b6f2adc889f87ee Mon Sep 17 00:00:00 2001 From: Scott McMurray Date: Fri, 17 Mar 2023 02:10:38 -0700 Subject: [PATCH] Less `as *` in library/core `cast`/`cast_const`/`cast_mut` have been stable and const-stable for a while, so let's use them instead of `as`, for clarity about what the cast is doing and to emphasize `as` casts doing anything else. After all, if it had existed back then, using `.cast::()` instead of `as *mut T` would have helped catch the soundness bug back in . I'm working on a lint to enforce this, which is how I found all these cases :) Old zulip conversation about moving things off `as` where feasible: . --- library/core/src/array/mod.rs | 4 ++-- library/core/src/cell.rs | 2 +- library/core/src/hash/mod.rs | 2 +- library/core/src/iter/adapters/copied.rs | 4 ++-- library/core/src/mem/maybe_uninit.rs | 19 ++++++++++--------- library/core/src/mem/mod.rs | 4 ++-- library/core/src/ptr/const_ptr.rs | 8 ++++---- library/core/src/ptr/mod.rs | 4 ++-- library/core/src/ptr/mut_ptr.rs | 16 ++++++++-------- library/core/src/slice/ascii.rs | 6 +++--- library/core/src/slice/cmp.rs | 4 ++-- library/core/src/slice/iter.rs | 2 +- library/core/src/slice/mod.rs | 12 ++++++------ library/core/src/slice/rotate.rs | 2 +- library/core/src/str/pattern.rs | 8 ++++---- library/core/src/str/validations.rs | 2 +- library/core/src/sync/atomic.rs | 10 +++++----- 17 files changed, 55 insertions(+), 54 deletions(-) diff --git a/library/core/src/array/mod.rs b/library/core/src/array/mod.rs index 1643842d60756..f716adec178f0 100644 --- a/library/core/src/array/mod.rs +++ b/library/core/src/array/mod.rs @@ -253,7 +253,7 @@ impl<'a, T, const N: usize> TryFrom<&'a [T]> for &'a [T; N] { fn try_from(slice: &[T]) -> Result<&[T; N], TryFromSliceError> { if slice.len() == N { - let ptr = slice.as_ptr() as *const [T; N]; + let ptr = slice.as_ptr().cast::<[T; N]>(); // SAFETY: ok because we just checked that the length fits unsafe { Ok(&*ptr) } } else { @@ -280,7 +280,7 @@ impl<'a, T, const N: usize> TryFrom<&'a mut [T]> for &'a mut [T; N] { fn try_from(slice: &mut [T]) -> Result<&mut [T; N], TryFromSliceError> { if slice.len() == N { - let ptr = slice.as_mut_ptr() as *mut [T; N]; + let ptr = slice.as_mut_ptr().cast::<[T; N]>(); // SAFETY: ok because we just checked that the length fits unsafe { Ok(&mut *ptr) } } else { diff --git a/library/core/src/cell.rs b/library/core/src/cell.rs index 9d1720acf3663..8ac3a2f000998 100644 --- a/library/core/src/cell.rs +++ b/library/core/src/cell.rs @@ -624,7 +624,7 @@ impl Cell<[T; N]> { #[unstable(feature = "as_array_of_cells", issue = "88248")] pub fn as_array_of_cells(&self) -> &[Cell; N] { // SAFETY: `Cell` has the same memory layout as `T`. - unsafe { &*(self as *const Cell<[T; N]> as *const [Cell; N]) } + unsafe { &*ptr::from_ref(self).cast::<[Cell; N]>() } } } diff --git a/library/core/src/hash/mod.rs b/library/core/src/hash/mod.rs index 4e7bae7bcb05a..0fdc0d6f3a434 100644 --- a/library/core/src/hash/mod.rs +++ b/library/core/src/hash/mod.rs @@ -835,7 +835,7 @@ mod impls { #[inline] fn hash_slice(data: &[$ty], state: &mut H) { let newlen = mem::size_of_val(data); - let ptr = data.as_ptr() as *const u8; + let ptr = data.as_ptr().cast::(); // SAFETY: `ptr` is valid and aligned, as this macro is only used // for numeric primitives which have no padding. The new slice only // spans across `data` and is never mutated, and its total size is the diff --git a/library/core/src/iter/adapters/copied.rs b/library/core/src/iter/adapters/copied.rs index 62d3afb81603d..d84b4057e1ea8 100644 --- a/library/core/src/iter/adapters/copied.rs +++ b/library/core/src/iter/adapters/copied.rs @@ -223,7 +223,7 @@ where unsafe { ptr::copy_nonoverlapping( self.as_ref().as_ptr(), - raw_array.as_mut_ptr() as *mut T, + raw_array.as_mut_ptr().cast::(), len, ); let _ = self.advance_by(len); @@ -234,7 +234,7 @@ where // SAFETY: `len` is larger than the array size. Copy a fixed amount here to fully initialize // the array. unsafe { - ptr::copy_nonoverlapping(self.as_ref().as_ptr(), raw_array.as_mut_ptr() as *mut T, N); + ptr::copy_nonoverlapping(self.as_ref().as_ptr(), raw_array.as_mut_ptr().cast::(), N); let _ = self.advance_by(N); Ok(MaybeUninit::array_assume_init(raw_array)) } diff --git a/library/core/src/mem/maybe_uninit.rs b/library/core/src/mem/maybe_uninit.rs index 3f491836551dc..9e4f56176b42b 100644 --- a/library/core/src/mem/maybe_uninit.rs +++ b/library/core/src/mem/maybe_uninit.rs @@ -527,7 +527,8 @@ impl MaybeUninit { #[inline(always)] pub const fn as_ptr(&self) -> *const T { // `MaybeUninit` and `ManuallyDrop` are both `repr(transparent)` so we can cast the pointer. - self as *const _ as *const T + // FIXME: consider `ptr::from_ref` once that's const-stable + ptr::addr_of!(*self).cast::() } /// Gets a mutable pointer to the contained value. Reading from this pointer or turning it @@ -566,7 +567,7 @@ impl MaybeUninit { #[inline(always)] pub const fn as_mut_ptr(&mut self) -> *mut T { // `MaybeUninit` and `ManuallyDrop` are both `repr(transparent)` so we can cast the pointer. - self as *mut _ as *mut T + ptr::from_mut(self).cast::() } /// Extracts the value from the `MaybeUninit` container. This is a great way @@ -947,7 +948,7 @@ impl MaybeUninit { // And thus the conversion is safe let ret = unsafe { intrinsics::assert_inhabited::<[T; N]>(); - (&array as *const _ as *const [T; N]).read() + ptr::from_ref(&array).cast::<[T; N]>().read() }; // FIXME: required to avoid `~const Destruct` bound @@ -1002,7 +1003,7 @@ impl MaybeUninit { #[rustc_const_unstable(feature = "maybe_uninit_slice", issue = "63569")] #[inline(always)] pub const fn slice_as_ptr(this: &[MaybeUninit]) -> *const T { - this.as_ptr() as *const T + this.as_ptr().cast::() } /// Gets a mutable pointer to the first element of the array. @@ -1010,7 +1011,7 @@ impl MaybeUninit { #[rustc_const_unstable(feature = "maybe_uninit_slice", issue = "63569")] #[inline(always)] pub const fn slice_as_mut_ptr(this: &mut [MaybeUninit]) -> *mut T { - this.as_mut_ptr() as *mut T + this.as_mut_ptr().cast::() } /// Copies the elements from `src` to `this`, returning a mutable reference to the now initialized contents of `this`. @@ -1182,7 +1183,7 @@ impl MaybeUninit { pub fn as_bytes(&self) -> &[MaybeUninit] { // SAFETY: MaybeUninit is always valid, even for padding bytes unsafe { - slice::from_raw_parts(self.as_ptr() as *const MaybeUninit, mem::size_of::()) + slice::from_raw_parts(self.as_ptr().cast::>(), mem::size_of::()) } } @@ -1214,7 +1215,7 @@ impl MaybeUninit { // SAFETY: MaybeUninit is always valid, even for padding bytes unsafe { slice::from_raw_parts_mut( - self.as_mut_ptr() as *mut MaybeUninit, + self.as_mut_ptr().cast::>(), mem::size_of::(), ) } @@ -1244,7 +1245,7 @@ impl MaybeUninit { // SAFETY: MaybeUninit is always valid, even for padding bytes unsafe { slice::from_raw_parts( - this.as_ptr() as *const MaybeUninit, + this.as_ptr().cast::>(), this.len() * mem::size_of::(), ) } @@ -1277,7 +1278,7 @@ impl MaybeUninit { // SAFETY: MaybeUninit is always valid, even for padding bytes unsafe { slice::from_raw_parts_mut( - this.as_mut_ptr() as *mut MaybeUninit, + this.as_mut_ptr().cast::>(), this.len() * mem::size_of::(), ) } diff --git a/library/core/src/mem/mod.rs b/library/core/src/mem/mod.rs index a67df7ed557a1..23e709ae1eb0e 100644 --- a/library/core/src/mem/mod.rs +++ b/library/core/src/mem/mod.rs @@ -1054,12 +1054,12 @@ pub const unsafe fn transmute_copy(src: &Src) -> Dst { if align_of::() > align_of::() { // SAFETY: `src` is a reference which is guaranteed to be valid for reads. // The caller must guarantee that the actual transmutation is safe. - unsafe { ptr::read_unaligned(src as *const Src as *const Dst) } + unsafe { ptr::from_ref(src).cast::().read_unaligned() } } else { // SAFETY: `src` is a reference which is guaranteed to be valid for reads. // We just checked that `src as *const Dst` was properly aligned. // The caller must guarantee that the actual transmutation is safe. - unsafe { ptr::read(src as *const Src as *const Dst) } + unsafe { ptr::from_ref(src).cast::().read() } } } diff --git a/library/core/src/ptr/const_ptr.rs b/library/core/src/ptr/const_ptr.rs index 839afc57f85d2..90d8894d28980 100644 --- a/library/core/src/ptr/const_ptr.rs +++ b/library/core/src/ptr/const_ptr.rs @@ -48,7 +48,7 @@ impl *const T { } // SAFETY: The two versions are equivalent at runtime. - unsafe { const_eval_select((self as *const u8,), const_impl, runtime_impl) } + unsafe { const_eval_select((self.cast::(),), const_impl, runtime_impl) } } /// Casts to a pointer of another type. @@ -95,7 +95,7 @@ impl *const T { where U: ?Sized, { - from_raw_parts::(self as *const (), metadata(meta)) + from_raw_parts::(self.cast::<()>(), metadata(meta)) } /// Changes constness without changing the type. @@ -400,7 +400,7 @@ impl *const T { { // SAFETY: the caller must guarantee that `self` meets all the // requirements for a reference. - if self.is_null() { None } else { Some(unsafe { &*(self as *const MaybeUninit) }) } + if self.is_null() { None } else { Some(unsafe { &*self.cast::>() }) } } /// Calculates the offset from a pointer. @@ -822,7 +822,7 @@ impl *const T { where T: Sized, { - match intrinsics::ptr_guaranteed_cmp(self as _, other as _) { + match intrinsics::ptr_guaranteed_cmp(self, other) { 2 => None, other => Some(other == 1), } diff --git a/library/core/src/ptr/mod.rs b/library/core/src/ptr/mod.rs index 5884a8ca30807..4d4c0111e083c 100644 --- a/library/core/src/ptr/mod.rs +++ b/library/core/src/ptr/mod.rs @@ -1278,7 +1278,7 @@ pub const unsafe fn read_unaligned(src: *const T) -> T { // Also, since we just wrote a valid value into `tmp`, it is guaranteed // to be properly initialized. unsafe { - copy_nonoverlapping(src as *const u8, tmp.as_mut_ptr() as *mut u8, mem::size_of::()); + copy_nonoverlapping(src.cast::(), tmp.as_mut_ptr().cast::(), mem::size_of::()); tmp.assume_init() } } @@ -1473,7 +1473,7 @@ pub const unsafe fn write_unaligned(dst: *mut T, src: T) { // `dst` cannot overlap `src` because the caller has mutable access // to `dst` while `src` is owned by this function. unsafe { - copy_nonoverlapping(&src as *const T as *const u8, dst as *mut u8, mem::size_of::()); + copy_nonoverlapping(addr_of!(src).cast::(), dst.cast::(), mem::size_of::()); // We are calling the intrinsic directly to avoid function calls in the generated code. intrinsics::forget(src); } diff --git a/library/core/src/ptr/mut_ptr.rs b/library/core/src/ptr/mut_ptr.rs index ece5244e9a99c..6062caa736034 100644 --- a/library/core/src/ptr/mut_ptr.rs +++ b/library/core/src/ptr/mut_ptr.rs @@ -412,7 +412,7 @@ impl *mut T { { // SAFETY: the caller must guarantee that `self` meets all the // requirements for a reference. - if self.is_null() { None } else { Some(unsafe { &*(self as *const MaybeUninit) }) } + unsafe { self.cast_const().as_uninit_ref() } } /// Calculates the offset from a pointer. @@ -476,7 +476,7 @@ impl *mut T { // SAFETY: the caller must uphold the safety contract for `offset`. // The obtained pointer is valid for writes since the caller must // guarantee that it points to the same allocated object as `self`. - unsafe { intrinsics::offset(self, count) as *mut T } + unsafe { intrinsics::offset(self, count).cast_mut() } } /// Calculates the offset from a pointer in bytes. @@ -555,7 +555,7 @@ impl *mut T { T: Sized, { // SAFETY: the `arith_offset` intrinsic has no prerequisites to be called. - unsafe { intrinsics::arith_offset(self, count) as *mut T } + unsafe { intrinsics::arith_offset(self, count).cast_mut() } } /// Calculates the offset from a pointer in bytes using wrapping arithmetic. @@ -717,7 +717,7 @@ impl *mut T { { // SAFETY: the caller must guarantee that `self` meets all the // requirements for a reference. - if self.is_null() { None } else { Some(unsafe { &mut *(self as *mut MaybeUninit) }) } + if self.is_null() { None } else { Some(unsafe { &mut *self.cast::>() }) } } /// Returns whether two pointers are guaranteed to be equal. @@ -744,7 +744,7 @@ impl *mut T { where T: Sized, { - (self as *const T).guaranteed_eq(other as _) + self.cast_const().guaranteed_eq(other.cast_const()) } /// Returns whether two pointers are guaranteed to be inequal. @@ -771,7 +771,7 @@ impl *mut T { where T: Sized, { - (self as *const T).guaranteed_ne(other as _) + self.cast_const().guaranteed_ne(other.cast_const()) } /// Calculates the distance between two pointers. The returned value is in @@ -864,7 +864,7 @@ impl *mut T { T: Sized, { // SAFETY: the caller must uphold the safety contract for `offset_from`. - unsafe { (self as *const T).offset_from(origin) } + unsafe { self.cast_const().offset_from(origin) } } /// Calculates the distance between two pointers. The returned value is in @@ -955,7 +955,7 @@ impl *mut T { T: Sized, { // SAFETY: the caller must uphold the safety contract for `sub_ptr`. - unsafe { (self as *const T).sub_ptr(origin) } + unsafe { self.cast_const().sub_ptr(origin) } } /// Calculates the offset from a pointer (convenience for `.offset(count as isize)`). diff --git a/library/core/src/slice/ascii.rs b/library/core/src/slice/ascii.rs index 5e5399acc1b0f..3cd58ceafc9ef 100644 --- a/library/core/src/slice/ascii.rs +++ b/library/core/src/slice/ascii.rs @@ -271,7 +271,7 @@ fn is_ascii(s: &[u8]) -> bool { let start = s.as_ptr(); // SAFETY: We verify `len < USIZE_SIZE` above. - let first_word = unsafe { (start as *const usize).read_unaligned() }; + let first_word = unsafe { start.cast::().read_unaligned() }; if contains_nonascii(first_word) { return false; @@ -283,7 +283,7 @@ fn is_ascii(s: &[u8]) -> bool { // SAFETY: word_ptr is the (properly aligned) usize ptr we use to read the // middle chunk of the slice. - let mut word_ptr = unsafe { start.add(offset_to_aligned) as *const usize }; + let mut word_ptr = unsafe { start.add(offset_to_aligned).cast::() }; // `byte_pos` is the byte index of `word_ptr`, used for loop end checks. let mut byte_pos = offset_to_aligned; @@ -322,7 +322,7 @@ fn is_ascii(s: &[u8]) -> bool { debug_assert!(byte_pos <= len && len - byte_pos <= USIZE_SIZE); // SAFETY: This relies on `len >= USIZE_SIZE`, which we check at the start. - let last_word = unsafe { (start.add(len - USIZE_SIZE) as *const usize).read_unaligned() }; + let last_word = unsafe { start.add(len - USIZE_SIZE).cast::().read_unaligned() }; !contains_nonascii(last_word) } diff --git a/library/core/src/slice/cmp.rs b/library/core/src/slice/cmp.rs index 7601dd3c75608..d8ebfb1b5d5f5 100644 --- a/library/core/src/slice/cmp.rs +++ b/library/core/src/slice/cmp.rs @@ -88,7 +88,7 @@ where // The two slices have been checked to have the same size above. unsafe { let size = mem::size_of_val(self); - memcmp(self.as_ptr() as *const u8, other.as_ptr() as *const u8, size) == 0 + memcmp(self.as_ptr().cast::(), other.as_ptr().cast::(), size) == 0 } } } @@ -231,7 +231,7 @@ impl SliceContains for i8 { // as `*const u8` is safe. The `x.as_ptr()` comes from a reference and is thus guaranteed // to be valid for reads for the length of the slice `x.len()`, which cannot be larger // than `isize::MAX`. The returned slice is never mutated. - let bytes: &[u8] = unsafe { from_raw_parts(x.as_ptr() as *const u8, x.len()) }; + let bytes: &[u8] = unsafe { from_raw_parts(x.as_ptr().cast::(), x.len()) }; memchr::memchr(byte, bytes).is_some() } } diff --git a/library/core/src/slice/iter.rs b/library/core/src/slice/iter.rs index c4317799bcc68..10bb4f1b27f07 100644 --- a/library/core/src/slice/iter.rs +++ b/library/core/src/slice/iter.rs @@ -90,7 +90,7 @@ impl<'a, T> Iter<'a, T> { let end = if T::IS_ZST { ptr.wrapping_byte_add(slice.len()) } else { ptr.add(slice.len()) }; - Self { ptr: NonNull::new_unchecked(ptr as *mut T), end, _marker: PhantomData } + Self { ptr: NonNull::new_unchecked(ptr.cast_mut()), end, _marker: PhantomData } } } diff --git a/library/core/src/slice/mod.rs b/library/core/src/slice/mod.rs index d319b2bc37fdd..317348167fbeb 100644 --- a/library/core/src/slice/mod.rs +++ b/library/core/src/slice/mod.rs @@ -1800,7 +1800,7 @@ impl [T] { pub fn split_array_ref(&self) -> (&[T; N], &[T]) { let (a, b) = self.split_at(N); // SAFETY: a points to [T; N]? Yes it's [T] of length N (checked by split_at) - unsafe { (&*(a.as_ptr() as *const [T; N]), b) } + unsafe { (&*a.as_ptr().cast::<[T; N]>(), b) } } /// Divides one mutable slice into an array and a remainder slice at an index. @@ -1833,7 +1833,7 @@ impl [T] { pub fn split_array_mut(&mut self) -> (&mut [T; N], &mut [T]) { let (a, b) = self.split_at_mut(N); // SAFETY: a points to [T; N]? Yes it's [T] of length N (checked by split_at_mut) - unsafe { (&mut *(a.as_mut_ptr() as *mut [T; N]), b) } + unsafe { (&mut *a.as_mut_ptr().cast::<[T; N]>(), b) } } /// Divides one slice into an array and a remainder slice at an index from @@ -1879,7 +1879,7 @@ impl [T] { assert!(N <= self.len()); let (a, b) = self.split_at(self.len() - N); // SAFETY: b points to [T; N]? Yes it's [T] of length N (checked by split_at) - unsafe { (a, &*(b.as_ptr() as *const [T; N])) } + unsafe { (a, &*b.as_ptr().cast::<[T; N]>()) } } /// Divides one mutable slice into an array and a remainder slice at an @@ -1913,7 +1913,7 @@ impl [T] { assert!(N <= self.len()); let (a, b) = self.split_at_mut(self.len() - N); // SAFETY: b points to [T; N]? Yes it's [T] of length N (checked by split_at_mut) - unsafe { (a, &mut *(b.as_mut_ptr() as *mut [T; N])) } + unsafe { (a, &mut *b.as_mut_ptr().cast::<[T; N]>()) } } /// Returns an iterator over subslices separated by elements that match @@ -3582,7 +3582,7 @@ impl [T] { unsafe { ( left, - from_raw_parts(rest.as_ptr() as *const U, us_len), + from_raw_parts(rest.as_ptr().cast::(), us_len), from_raw_parts(rest.as_ptr().add(rest.len() - ts_len), ts_len), ) } @@ -3652,7 +3652,7 @@ impl [T] { unsafe { ( left, - from_raw_parts_mut(mut_ptr as *mut U, us_len), + from_raw_parts_mut(mut_ptr.cast::(), us_len), from_raw_parts_mut(mut_ptr.add(rest_len - ts_len), ts_len), ) } diff --git a/library/core/src/slice/rotate.rs b/library/core/src/slice/rotate.rs index fa8c238f8e7a2..55da952155443 100644 --- a/library/core/src/slice/rotate.rs +++ b/library/core/src/slice/rotate.rs @@ -162,7 +162,7 @@ pub unsafe fn ptr_rotate(mut left: usize, mut mid: *mut T, mut right: usize) // Algorithm 2 // The `[T; 0]` here is to ensure this is appropriately aligned for T let mut rawarray = MaybeUninit::<(BufType, [T; 0])>::uninit(); - let buf = rawarray.as_mut_ptr() as *mut T; + let buf = rawarray.as_mut_ptr().cast::(); // SAFETY: `mid-left <= mid-left+right < mid+right` let dim = unsafe { mid.sub(left).add(right) }; if left <= right { diff --git a/library/core/src/str/pattern.rs b/library/core/src/str/pattern.rs index 19da6d2fbecbc..1725ef3d35b92 100644 --- a/library/core/src/str/pattern.rs +++ b/library/core/src/str/pattern.rs @@ -1905,16 +1905,16 @@ unsafe fn small_slice_eq(x: &[u8], y: &[u8]) -> bool { let (mut px, mut py) = (x.as_ptr(), y.as_ptr()); let (pxend, pyend) = (px.add(x.len() - 4), py.add(y.len() - 4)); while px < pxend { - let vx = (px as *const u32).read_unaligned(); - let vy = (py as *const u32).read_unaligned(); + let vx = px.cast::().read_unaligned(); + let vy = py.cast::().read_unaligned(); if vx != vy { return false; } px = px.add(4); py = py.add(4); } - let vx = (pxend as *const u32).read_unaligned(); - let vy = (pyend as *const u32).read_unaligned(); + let vx = pxend.cast::().read_unaligned(); + let vy = pyend.cast::().read_unaligned(); vx == vy } } diff --git a/library/core/src/str/validations.rs b/library/core/src/str/validations.rs index 2acef432f2063..419395007af71 100644 --- a/library/core/src/str/validations.rs +++ b/library/core/src/str/validations.rs @@ -218,7 +218,7 @@ pub(super) const fn run_utf8_validation(v: &[u8]) -> Result<(), Utf8Error> { // always aligned with a `usize` so it's safe to dereference // both `block` and `block.add(1)`. unsafe { - let block = ptr.add(index) as *const usize; + let block = ptr.add(index).cast::(); // break if there is a nonascii byte let zu = contains_nonascii(*block); let zv = contains_nonascii(*block.add(1)); diff --git a/library/core/src/sync/atomic.rs b/library/core/src/sync/atomic.rs index 2f6b1c74da08e..1685670956b90 100644 --- a/library/core/src/sync/atomic.rs +++ b/library/core/src/sync/atomic.rs @@ -126,8 +126,8 @@ use self::Ordering::*; use crate::cell::UnsafeCell; -use crate::fmt; use crate::intrinsics; +use crate::{fmt, ptr}; use crate::hint::spin_loop; @@ -368,7 +368,7 @@ impl AtomicBool { #[stable(feature = "atomic_access", since = "1.15.0")] pub fn get_mut(&mut self) -> &mut bool { // SAFETY: the mutable reference guarantees unique ownership. - unsafe { &mut *(self.v.get() as *mut bool) } + unsafe { &mut *self.v.get().cast::() } } /// Get atomic access to a `&mut bool`. @@ -390,7 +390,7 @@ impl AtomicBool { pub fn from_mut(v: &mut bool) -> &mut Self { // SAFETY: the mutable reference guarantees unique ownership, and // alignment of both `bool` and `Self` is 1. - unsafe { &mut *(v as *mut bool as *mut Self) } + unsafe { &mut *ptr::from_mut(v).cast::() } } /// Get non-atomic access to a `&mut [AtomicBool]` slice. @@ -1154,7 +1154,7 @@ impl AtomicPtr { // - the mutable reference guarantees unique ownership. // - the alignment of `*mut T` and `Self` is the same on all platforms // supported by rust, as verified above. - unsafe { &mut *(v as *mut *mut T as *mut Self) } + unsafe { &mut *ptr::from_mut(v).cast::() } } /// Get non-atomic access to a `&mut [AtomicPtr]` slice. @@ -2147,7 +2147,7 @@ macro_rules! atomic_int { // - the mutable reference guarantees unique ownership. // - the alignment of `$int_type` and `Self` is the // same, as promised by $cfg_align and verified above. - unsafe { &mut *(v as *mut $int_type as *mut Self) } + unsafe { &mut *ptr::from_mut(v).cast::() } } #[doc = concat!("Get non-atomic access to a `&mut [", stringify!($atomic_type), "]` slice")]