diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs index c245b42c3e880..e491d4d91aa9c 100644 --- a/library/alloc/src/rc.rs +++ b/library/alloc/src/rc.rs @@ -365,6 +365,12 @@ impl Rc { unsafe { self.ptr.as_ref() } } + #[inline] + fn into_inner_with_allocator(this: Self) -> (NonNull>, A) { + let this = mem::ManuallyDrop::new(this); + (this.ptr, unsafe { ptr::read(&this.alloc) }) + } + #[inline] unsafe fn from_inner_in(ptr: NonNull>, alloc: A) -> Self { Self { ptr, phantom: PhantomData, alloc } @@ -661,16 +667,6 @@ impl Rc { } impl Rc { - /// Returns a reference to the underlying allocator. - /// - /// Note: this is an associated function, which means that you have - /// to call it as `Rc::allocator(&r)` instead of `r.allocator()`. This - /// is so that there is no conflict with a method on the inner type. - #[inline] - #[unstable(feature = "allocator_api", issue = "32838")] - pub fn allocator(this: &Self) -> &A { - &this.alloc - } /// Constructs a new `Rc` in the provided allocator. /// /// # Examples @@ -1145,12 +1141,9 @@ impl Rc, A> { /// ``` #[unstable(feature = "new_uninit", issue = "63291")] #[inline] - pub unsafe fn assume_init(self) -> Rc - where - A: Clone, - { - let md_self = mem::ManuallyDrop::new(self); - unsafe { Rc::from_inner_in(md_self.ptr.cast(), md_self.alloc.clone()) } + pub unsafe fn assume_init(self) -> Rc { + let (ptr, alloc) = Self::into_inner_with_allocator(self); + unsafe { Rc::from_inner_in(ptr.cast(), alloc) } } } @@ -1189,12 +1182,9 @@ impl Rc<[mem::MaybeUninit], A> { /// ``` #[unstable(feature = "new_uninit", issue = "63291")] #[inline] - pub unsafe fn assume_init(self) -> Rc<[T], A> - where - A: Clone, - { - let md_self = mem::ManuallyDrop::new(self); - unsafe { Rc::from_ptr_in(md_self.ptr.as_ptr() as _, md_self.alloc.clone()) } + pub unsafe fn assume_init(self) -> Rc<[T], A> { + let (ptr, alloc) = Self::into_inner_with_allocator(self); + unsafe { Rc::from_ptr_in(ptr.as_ptr() as _, alloc) } } } @@ -1333,6 +1323,17 @@ impl Rc { } impl Rc { + /// Returns a reference to the underlying allocator. + /// + /// Note: this is an associated function, which means that you have + /// to call it as `Rc::allocator(&r)` instead of `r.allocator()`. This + /// is so that there is no conflict with a method on the inner type. + #[inline] + #[unstable(feature = "allocator_api", issue = "32838")] + pub fn allocator(this: &Self) -> &A { + &this.alloc + } + /// Consumes the `Rc`, returning the wrapped pointer. /// /// To avoid a memory leak the pointer must be converted back to an `Rc` using @@ -1356,6 +1357,33 @@ impl Rc { ptr } + /// Consumes the `Rc`, returning the wrapped pointer and allocator. + /// + /// To avoid a memory leak the pointer must be converted back to an `Rc` using + /// [`Rc::from_raw_in`]. + /// + /// # Examples + /// + /// ``` + /// #![feature(allocator_api)] + /// use std::rc::Rc; + /// use std::alloc::System; + /// + /// let x = Rc::new_in("hello".to_owned(), System); + /// let (ptr, alloc) = Rc::into_raw_with_allocator(x); + /// assert_eq!(unsafe { &*ptr }, "hello"); + /// let x = unsafe { Rc::from_raw_in(ptr, alloc) }; + /// assert_eq!(&*x, "hello"); + /// ``` + #[unstable(feature = "allocator_api", issue = "32838")] + pub fn into_raw_with_allocator(this: Self) -> (*const T, A) { + let this = mem::ManuallyDrop::new(this); + let ptr = Self::as_ptr(&this); + // Safety: `this` is ManuallyDrop so the allocator will not be double-dropped + let alloc = unsafe { ptr::read(Self::allocator(&this)) }; + (ptr, alloc) + } + /// Provides a raw pointer to the data. /// /// The counts are not affected in any way and the `Rc` is not consumed. The pointer is valid @@ -1809,7 +1837,9 @@ impl Rc { // reference to the allocation. unsafe { &mut this.ptr.as_mut().value } } +} +impl Rc { /// If we have the only reference to `T` then unwrap it. Otherwise, clone `T` and return the /// clone. /// @@ -1845,7 +1875,7 @@ impl Rc { } } -impl Rc { +impl Rc { /// Attempt to downcast the `Rc` to a concrete type. /// /// # Examples @@ -1868,12 +1898,8 @@ impl Rc { #[stable(feature = "rc_downcast", since = "1.29.0")] pub fn downcast(self) -> Result, Self> { if (*self).is::() { - unsafe { - let ptr = self.ptr.cast::>(); - let alloc = self.alloc.clone(); - forget(self); - Ok(Rc::from_inner_in(ptr, alloc)) - } + let (ptr, alloc) = Rc::into_inner_with_allocator(self); + unsafe { Ok(Rc::from_inner_in(ptr.cast(), alloc)) } } else { Err(self) } @@ -1908,12 +1934,8 @@ impl Rc { #[inline] #[unstable(feature = "downcast_unchecked", issue = "90850")] pub unsafe fn downcast_unchecked(self) -> Rc { - unsafe { - let ptr = self.ptr.cast::>(); - let alloc = self.alloc.clone(); - mem::forget(self); - Rc::from_inner_in(ptr, alloc) - } + let (ptr, alloc) = Rc::into_inner_with_allocator(self); + unsafe { Rc::from_inner_in(ptr.cast(), alloc) } } } @@ -2661,12 +2683,13 @@ impl From> for Rc<[u8]> { } #[stable(feature = "boxed_slice_try_from", since = "1.43.0")] -impl TryFrom> for Rc<[T; N]> { - type Error = Rc<[T]>; +impl TryFrom> for Rc<[T; N], A> { + type Error = Rc<[T], A>; - fn try_from(boxed_slice: Rc<[T]>) -> Result { + fn try_from(boxed_slice: Rc<[T], A>) -> Result { if boxed_slice.len() == N { - Ok(unsafe { Rc::from_raw(Rc::into_raw(boxed_slice) as *mut [T; N]) }) + let (ptr, alloc) = Rc::into_inner_with_allocator(boxed_slice); + Ok(unsafe { Rc::from_inner_in(ptr.cast(), alloc) }) } else { Err(boxed_slice) } @@ -2923,6 +2946,13 @@ impl Weak { } impl Weak { + /// Returns a reference to the underlying allocator. + #[inline] + #[unstable(feature = "allocator_api", issue = "32838")] + pub fn allocator(&self) -> &A { + &self.alloc + } + /// Returns a raw pointer to the object `T` pointed to by this `Weak`. /// /// The pointer is valid only if there are some strong references. The pointer may be dangling, @@ -3000,11 +3030,11 @@ impl Weak { result } - /// Consumes the `Weak` and turns it into a raw pointer. + /// Consumes the `Weak`, returning the wrapped pointer and allocator. /// /// This converts the weak pointer into a raw pointer, while still preserving the ownership of /// one weak reference (the weak count is not modified by this operation). It can be turned - /// back into the `Weak` with [`from_raw`]. + /// back into the `Weak` with [`from_raw_in`]. /// /// The same restrictions of accessing the target of the pointer as with /// [`as_ptr`] apply. @@ -3012,27 +3042,30 @@ impl Weak { /// # Examples /// /// ``` + /// #![feature(allocator_api)] /// use std::rc::{Rc, Weak}; + /// use std::alloc::System; /// - /// let strong = Rc::new("hello".to_owned()); + /// let strong = Rc::new_in("hello".to_owned(), System); /// let weak = Rc::downgrade(&strong); - /// let raw = weak.into_raw(); + /// let (raw, alloc) = weak.into_raw_with_allocator(); /// /// assert_eq!(1, Rc::weak_count(&strong)); /// assert_eq!("hello", unsafe { &*raw }); /// - /// drop(unsafe { Weak::from_raw(raw) }); + /// drop(unsafe { Weak::from_raw_in(raw, alloc) }); /// assert_eq!(0, Rc::weak_count(&strong)); /// ``` /// - /// [`from_raw`]: Weak::from_raw + /// [`from_raw_in`]: Weak::from_raw_in /// [`as_ptr`]: Weak::as_ptr #[inline] #[unstable(feature = "allocator_api", issue = "32838")] - pub fn into_raw_and_alloc(self) -> (*const T, A) { - let rc = mem::ManuallyDrop::new(self); - let result = rc.as_ptr(); - let alloc = unsafe { ptr::read(&rc.alloc) }; + pub fn into_raw_with_allocator(self) -> (*const T, A) { + let this = mem::ManuallyDrop::new(self); + let result = this.as_ptr(); + // Safety: `this` is ManuallyDrop so the allocator will not be double-dropped + let alloc = unsafe { ptr::read(this.allocator()) }; (result, alloc) } diff --git a/library/alloc/src/sync.rs b/library/alloc/src/sync.rs index 297a273d274bf..43cd3f6f92578 100644 --- a/library/alloc/src/sync.rs +++ b/library/alloc/src/sync.rs @@ -280,8 +280,8 @@ impl Arc { impl Arc { #[inline] - fn internal_into_inner_with_allocator(self) -> (NonNull>, A) { - let this = mem::ManuallyDrop::new(self); + fn into_inner_with_allocator(this: Self) -> (NonNull>, A) { + let this = mem::ManuallyDrop::new(this); (this.ptr, unsafe { ptr::read(&this.alloc) }) } @@ -683,16 +683,6 @@ impl Arc { } impl Arc { - /// Returns a reference to the underlying allocator. - /// - /// Note: this is an associated function, which means that you have - /// to call it as `Arc::allocator(&a)` instead of `a.allocator()`. This - /// is so that there is no conflict with a method on the inner type. - #[inline] - #[unstable(feature = "allocator_api", issue = "32838")] - pub fn allocator(this: &Self) -> &A { - &this.alloc - } /// Constructs a new `Arc` in the provided allocator. /// /// # Examples @@ -1290,7 +1280,7 @@ impl Arc, A> { #[must_use = "`self` will be dropped if the result is not used"] #[inline] pub unsafe fn assume_init(self) -> Arc { - let (ptr, alloc) = self.internal_into_inner_with_allocator(); + let (ptr, alloc) = Arc::into_inner_with_allocator(self); unsafe { Arc::from_inner_in(ptr.cast(), alloc) } } } @@ -1332,7 +1322,7 @@ impl Arc<[mem::MaybeUninit], A> { #[must_use = "`self` will be dropped if the result is not used"] #[inline] pub unsafe fn assume_init(self) -> Arc<[T], A> { - let (ptr, alloc) = self.internal_into_inner_with_allocator(); + let (ptr, alloc) = Arc::into_inner_with_allocator(self); unsafe { Arc::from_ptr_in(ptr.as_ptr() as _, alloc) } } } @@ -1473,6 +1463,17 @@ impl Arc { } impl Arc { + /// Returns a reference to the underlying allocator. + /// + /// Note: this is an associated function, which means that you have + /// to call it as `Arc::allocator(&a)` instead of `a.allocator()`. This + /// is so that there is no conflict with a method on the inner type. + #[inline] + #[unstable(feature = "allocator_api", issue = "32838")] + pub fn allocator(this: &Self) -> &A { + &this.alloc + } + /// Consumes the `Arc`, returning the wrapped pointer. /// /// To avoid a memory leak the pointer must be converted back to an `Arc` using @@ -1496,6 +1497,34 @@ impl Arc { ptr } + /// Consumes the `Arc`, returning the wrapped pointer and allocator. + /// + /// To avoid a memory leak the pointer must be converted back to an `Arc` using + /// [`Arc::from_raw_in`]. + /// + /// # Examples + /// + /// ``` + /// #![feature(allocator_api)] + /// use std::sync::Arc; + /// use std::alloc::System; + /// + /// let x = Arc::new_in("hello".to_owned(), System); + /// let (ptr, alloc) = Arc::into_raw_with_allocator(x); + /// assert_eq!(unsafe { &*ptr }, "hello"); + /// let x = unsafe { Arc::from_raw_in(ptr, alloc) }; + /// assert_eq!(&*x, "hello"); + /// ``` + #[must_use = "losing the pointer will leak memory"] + #[unstable(feature = "allocator_api", issue = "32838")] + pub fn into_raw_with_allocator(this: Self) -> (*const T, A) { + let this = mem::ManuallyDrop::new(this); + let ptr = Self::as_ptr(&this); + // Safety: `this` is ManuallyDrop so the allocator will not be double-dropped + let alloc = unsafe { ptr::read(Self::allocator(&this)) }; + (ptr, alloc) + } + /// Provides a raw pointer to the data. /// /// The counts are not affected in any way and the `Arc` is not consumed. The pointer is valid for @@ -2227,7 +2256,9 @@ impl Arc { // either unique to begin with, or became one upon cloning the contents. unsafe { Self::get_mut_unchecked(this) } } +} +impl Arc { /// If we have the only reference to `T` then unwrap it. Otherwise, clone `T` and return the /// clone. /// @@ -2498,10 +2529,8 @@ impl Arc { T: Any + Send + Sync, { if (*self).is::() { - unsafe { - let (ptr, alloc) = self.internal_into_inner_with_allocator(); - Ok(Arc::from_inner_in(ptr.cast(), alloc)) - } + let (ptr, alloc) = Arc::into_inner_with_allocator(self); + unsafe { Ok(Arc::from_inner_in(ptr.cast(), alloc)) } } else { Err(self) } @@ -2539,10 +2568,8 @@ impl Arc { where T: Any + Send + Sync, { - unsafe { - let (ptr, alloc) = self.internal_into_inner_with_allocator(); - Arc::from_inner_in(ptr.cast(), alloc) - } + let (ptr, alloc) = Arc::into_inner_with_allocator(self); + unsafe { Arc::from_inner_in(ptr.cast(), alloc) } } } @@ -2661,6 +2688,13 @@ impl Weak { } impl Weak { + /// Returns a reference to the underlying allocator. + #[inline] + #[unstable(feature = "allocator_api", issue = "32838")] + pub fn allocator(&self) -> &A { + &self.alloc + } + /// Returns a raw pointer to the object `T` pointed to by this `Weak`. /// /// The pointer is valid only if there are some strong references. The pointer may be dangling, @@ -2738,6 +2772,45 @@ impl Weak { result } + /// Consumes the `Weak`, returning the wrapped pointer and allocator. + /// + /// This converts the weak pointer into a raw pointer, while still preserving the ownership of + /// one weak reference (the weak count is not modified by this operation). It can be turned + /// back into the `Weak` with [`from_raw_in`]. + /// + /// The same restrictions of accessing the target of the pointer as with + /// [`as_ptr`] apply. + /// + /// # Examples + /// + /// ``` + /// #![feature(allocator_api)] + /// use std::sync::{Arc, Weak}; + /// use std::alloc::System; + /// + /// let strong = Arc::new_in("hello".to_owned(), System); + /// let weak = Arc::downgrade(&strong); + /// let (raw, alloc) = weak.into_raw_with_allocator(); + /// + /// assert_eq!(1, Arc::weak_count(&strong)); + /// assert_eq!("hello", unsafe { &*raw }); + /// + /// drop(unsafe { Weak::from_raw_in(raw, alloc) }); + /// assert_eq!(0, Arc::weak_count(&strong)); + /// ``` + /// + /// [`from_raw_in`]: Weak::from_raw_in + /// [`as_ptr`]: Weak::as_ptr + #[must_use = "losing the pointer will leak memory"] + #[unstable(feature = "allocator_api", issue = "32838")] + pub fn into_raw_with_allocator(self) -> (*const T, A) { + let this = mem::ManuallyDrop::new(self); + let result = this.as_ptr(); + // Safety: `this` is ManuallyDrop so the allocator will not be double-dropped + let alloc = unsafe { ptr::read(Self::allocator(&this)) }; + (result, alloc) + } + /// Converts a raw pointer previously created by [`into_raw`] back into `Weak` in the provided /// allocator. /// @@ -3426,7 +3499,7 @@ impl From> for Arc { #[cfg(not(no_global_oom_handling))] #[stable(feature = "shared_from_slice", since = "1.21.0")] -impl From> for Arc<[T], A> { +impl From> for Arc<[T], A> { /// Allocate a reference-counted slice and move `v`'s items into it. /// /// # Example @@ -3506,7 +3579,7 @@ impl TryFrom> for Arc<[T; N], A> { fn try_from(boxed_slice: Arc<[T], A>) -> Result { if boxed_slice.len() == N { - let (ptr, alloc) = boxed_slice.internal_into_inner_with_allocator(); + let (ptr, alloc) = Arc::into_inner_with_allocator(boxed_slice); Ok(unsafe { Arc::from_inner_in(ptr.cast(), alloc) }) } else { Err(boxed_slice)