diff --git a/library/alloc/src/lib.rs b/library/alloc/src/lib.rs index 3bc376482e998..5fa9d920f0238 100644 --- a/library/alloc/src/lib.rs +++ b/library/alloc/src/lib.rs @@ -76,6 +76,7 @@ #![cfg_attr(test, feature(test))] #![cfg_attr(test, feature(new_uninit))] #![feature(allocator_api)] +#![feature(allocator_api_internals)] #![feature(array_chunks)] #![feature(array_methods)] #![feature(array_windows)] diff --git a/library/alloc/src/raw_vec.rs b/library/alloc/src/raw_vec.rs index 2e2c9b76bd4ba..623c0b06af873 100644 --- a/library/alloc/src/raw_vec.rs +++ b/library/alloc/src/raw_vec.rs @@ -1,6 +1,8 @@ #![unstable(feature = "raw_vec_internals", reason = "implementation detail", issue = "none")] #![doc(hidden)] +#[cfg(not(no_global_oom_handling))] +use core::alloc::helper::AllocInit; use core::alloc::LayoutError; use core::cmp; use core::intrinsics; @@ -18,14 +20,6 @@ use crate::collections::TryReserveError::{self, *}; #[cfg(test)] mod tests; -#[cfg(not(no_global_oom_handling))] -enum AllocInit { - /// The contents of the new memory are uninitialized. - Uninitialized, - /// The new memory is guaranteed to be zeroed. - Zeroed, -} - /// A low-level utility for more ergonomically allocating, reallocating, and deallocating /// a buffer of memory on the heap without having to worry about all the corner cases /// involved. This type is excellent for building your own data structures like Vec and VecDeque. diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs index 800952f7a5ece..399c0fbbeef6a 100644 --- a/library/alloc/src/rc.rs +++ b/library/alloc/src/rc.rs @@ -242,11 +242,12 @@ #![stable(feature = "rust1", since = "1.0.0")] -#[cfg(not(test))] +#[cfg(all(not(test), not(no_global_oom_handling)))] use crate::boxed::Box; -#[cfg(test)] +#[cfg(all(test, not(no_global_oom_handling)))] use std::boxed::Box; +use core::alloc::helper::PrefixAllocator; use core::any::Any; use core::borrow; use core::cell::Cell; @@ -258,19 +259,16 @@ use core::intrinsics::abort; #[cfg(not(no_global_oom_handling))] use core::iter; use core::marker::{self, PhantomData, Unpin, Unsize}; -#[cfg(not(no_global_oom_handling))] -use core::mem::size_of_val; -use core::mem::{self, align_of_val_raw, forget}; +use core::mem; use core::ops::{CoerceUnsized, Deref, DispatchFromDyn, Receiver}; +#[cfg(not(no_global_oom_handling))] use core::pin::Pin; use core::ptr::{self, NonNull}; #[cfg(not(no_global_oom_handling))] use core::slice::from_raw_parts_mut; #[cfg(not(no_global_oom_handling))] -use crate::alloc::handle_alloc_error; -#[cfg(not(no_global_oom_handling))] -use crate::alloc::{box_free, WriteCloneIntoRaw}; +use crate::alloc::{box_free, handle_alloc_error, WriteCloneIntoRaw}; use crate::alloc::{AllocError, Allocator, Global, Layout}; use crate::borrow::{Cow, ToOwned}; #[cfg(not(no_global_oom_handling))] @@ -281,16 +279,84 @@ use crate::vec::Vec; #[cfg(test)] mod tests; -// This is repr(C) to future-proof against possible field-reordering, which -// would interfere with otherwise safe [into|from]_raw() of transmutable -// inner types. -#[repr(C)] -struct RcBox { +/// Metadata for `Rc` and `Weak` to be allocated as prefix. +#[unstable(feature = "allocator_api_internals", issue = "none")] +#[derive(Debug, Clone)] +#[doc(hidden)] +pub struct RcMetadata { strong: Cell, weak: Cell, - value: T, } +impl RcMetadata { + // There is an implicit weak pointer owned by all the strong + // pointers, which ensures that the weak destructor never frees + // the allocation while the strong destructor is running, even + // if the weak pointer is stored inside the strong one. + #[inline] + fn new_strong() -> Self { + Self { strong: Cell::new(1), weak: Cell::new(1) } + } + + #[inline] + fn new_weak() -> Self { + Self { strong: Cell::new(0), weak: Cell::new(1) } + } + + #[inline] + fn strong(&self) -> usize { + self.strong.get() + } + + #[inline] + fn inc_strong(&self) { + let strong = self.strong(); + + // We want to abort on overflow instead of dropping the value. + // The reference count will never be zero when this is called; + // nevertheless, we insert an abort here to hint LLVM at + // an otherwise missed optimization. + if strong == 0 || strong == usize::MAX { + abort(); + } + self.strong.set(strong + 1); + } + + #[inline] + fn dec_strong(&self) { + self.strong.set(self.strong() - 1); + } + + #[inline] + fn weak(&self) -> usize { + self.weak.get() + } + + #[inline] + fn inc_weak(&self) { + let weak = self.weak(); + + // We want to abort on overflow instead of dropping the value. + // The reference count will never be zero when this is called; + // nevertheless, we insert an abort here to hint LLVM at + // an otherwise missed optimization. + if weak == 0 || weak == usize::MAX { + abort(); + } + self.weak.set(weak + 1); + } + + #[inline] + fn dec_weak(&self) { + self.weak.set(self.weak() - 1); + } +} + +/// Allocator used for `Rc` and `Weak`. +#[unstable(feature = "allocator_api_internals", issue = "none")] +#[doc(hidden)] +pub type RcAllocator = PrefixAllocator; + /// A single-threaded reference-counting pointer. 'Rc' stands for 'Reference /// Counted'. /// @@ -304,8 +370,9 @@ struct RcBox { #[cfg_attr(not(test), rustc_diagnostic_item = "Rc")] #[stable(feature = "rust1", since = "1.0.0")] pub struct Rc { - ptr: NonNull>, - phantom: PhantomData>, + ptr: NonNull, + alloc: RcAllocator, + _marker: PhantomData, } #[stable(feature = "rust1", since = "1.0.0")] @@ -320,19 +387,20 @@ impl, U: ?Sized> CoerceUnsized> for Rc {} impl, U: ?Sized> DispatchFromDyn> for Rc {} impl Rc { - #[inline(always)] - fn inner(&self) -> &RcBox { - // This unsafety is ok because while this Rc is alive we're guaranteed - // that the inner pointer is valid. - unsafe { self.ptr.as_ref() } - } + #[inline] + fn metadata_ptr(ptr: NonNull) -> NonNull { + // SAFETY: since the only unsized types possible are slices, trait objects, + // and extern types, the input safety requirement is currently enough to + // satisfy the requirements of for_value_raw; this is an implementation + // detail of the language that may not be relied upon outside of std. + let align = unsafe { mem::align_of_val_raw(ptr.as_ptr()) }; - fn from_inner(ptr: NonNull>) -> Self { - Self { ptr, phantom: PhantomData } + unsafe { RcAllocator::::prefix(ptr.cast(), align) } } - unsafe fn from_ptr(ptr: *mut RcBox) -> Self { - Self::from_inner(unsafe { NonNull::new_unchecked(ptr) }) + #[inline(always)] + fn metadata(&self) -> &RcMetadata { + unsafe { Self::metadata_ptr(self.ptr).as_ref() } } } @@ -346,15 +414,15 @@ impl Rc { /// /// let five = Rc::new(5); /// ``` + #[cfg(not(no_global_oom_handling))] #[stable(feature = "rust1", since = "1.0.0")] pub fn new(value: T) -> Rc { - // There is an implicit weak pointer owned by all the strong - // pointers, which ensures that the weak destructor never frees - // the allocation while the strong destructor is running, even - // if the weak pointer is stored inside the strong one. - Self::from_inner( - Box::leak(box RcBox { strong: Cell::new(1), weak: Cell::new(1), value }).into(), - ) + let alloc = RcAllocator::new(Global); + let ptr = allocate(&alloc, Layout::new::(), RcMetadata::new_strong()).cast::(); + unsafe { + ptr.as_ptr().write(value); + Self::from_non_null(ptr, alloc) + } } /// Constructs a new `Rc` using a weak reference to itself. Attempting @@ -381,20 +449,18 @@ impl Rc { /// } /// } /// ``` + #[inline] + #[cfg(not(no_global_oom_handling))] #[unstable(feature = "arc_new_cyclic", issue = "75861")] pub fn new_cyclic(data_fn: impl FnOnce(&Weak) -> T) -> Rc { // Construct the inner in the "uninitialized" state with a single // weak reference. - let uninit_ptr: NonNull<_> = Box::leak(box RcBox { - strong: Cell::new(0), - weak: Cell::new(1), - value: mem::MaybeUninit::::uninit(), - }) - .into(); + let alloc = RcAllocator::new(Global); + let ptr = allocate(&alloc, Layout::new::(), RcMetadata::new_weak()).cast::(); - let init_ptr: NonNull> = uninit_ptr.cast(); - - let weak = Weak { ptr: init_ptr }; + // Strong references should collectively own a shared weak reference, + // so don't run the destructor for our old weak reference. + let weak = mem::ManuallyDrop::new(Weak { ptr, alloc }); // It's important we don't give up ownership of the weak pointer, or // else the memory might be freed by the time `data_fn` returns. If @@ -402,23 +468,15 @@ impl Rc { // weak pointer for ourselves, but this would result in additional // updates to the weak reference count which might not be necessary // otherwise. - let data = data_fn(&weak); - unsafe { - let inner = init_ptr.as_ptr(); - ptr::write(ptr::addr_of_mut!((*inner).value), data); - - let prev_value = (*inner).strong.get(); - debug_assert_eq!(prev_value, 0, "No prior strong references should exist"); - (*inner).strong.set(1); + ptr.as_ptr().write(data_fn(&weak)); } - let strong = Rc::from_inner(init_ptr); + let meta = unsafe { Self::metadata_ptr(ptr).as_ref() }; + debug_assert_eq!(meta.strong.get(), 0, "No prior strong references should exist"); + meta.strong.set(1); - // Strong references should collectively own a shared weak reference, - // so don't run the destructor for our old weak reference. - mem::forget(weak); - strong + unsafe { Self::from_non_null(ptr, weak.alloc) } } /// Constructs a new `Rc` with uninitialized contents. @@ -445,13 +503,9 @@ impl Rc { #[cfg(not(no_global_oom_handling))] #[unstable(feature = "new_uninit", issue = "63291")] pub fn new_uninit() -> Rc> { - unsafe { - Rc::from_ptr(Rc::allocate_for_layout( - Layout::new::(), - |layout| Global.allocate(layout), - |mem| mem as *mut RcBox>, - )) - } + let alloc = RcAllocator::new(Global); + let ptr = allocate(&alloc, Layout::new::(), RcMetadata::new_strong()).cast(); + unsafe { Rc::from_non_null(ptr, alloc) } } /// Constructs a new `Rc` with uninitialized contents, with the memory @@ -477,13 +531,9 @@ impl Rc { #[cfg(not(no_global_oom_handling))] #[unstable(feature = "new_uninit", issue = "63291")] pub fn new_zeroed() -> Rc> { - unsafe { - Rc::from_ptr(Rc::allocate_for_layout( - Layout::new::(), - |layout| Global.allocate_zeroed(layout), - |mem| mem as *mut RcBox>, - )) - } + let alloc = RcAllocator::new(Global); + let ptr = allocate_zeroed(&alloc, Layout::new::(), RcMetadata::new_strong()).cast(); + unsafe { Rc::from_non_null(ptr, alloc) } } /// Constructs a new `Rc`, returning an error if the allocation fails @@ -497,16 +547,15 @@ impl Rc { /// let five = Rc::try_new(5); /// # Ok::<(), std::alloc::AllocError>(()) /// ``` + #[inline] #[unstable(feature = "allocator_api", issue = "32838")] pub fn try_new(value: T) -> Result, AllocError> { - // There is an implicit weak pointer owned by all the strong - // pointers, which ensures that the weak destructor never frees - // the allocation while the strong destructor is running, even - // if the weak pointer is stored inside the strong one. - Ok(Self::from_inner( - Box::leak(Box::try_new(RcBox { strong: Cell::new(1), weak: Cell::new(1), value })?) - .into(), - )) + let alloc = RcAllocator::new(Global); + let ptr = try_allocate(&alloc, Layout::new::(), RcMetadata::new_strong())?.cast::(); + unsafe { + ptr.as_ptr().write(value); + Ok(Self::from_non_null(ptr, alloc)) + } } /// Constructs a new `Rc` with uninitialized contents, returning an error if the allocation fails @@ -534,13 +583,9 @@ impl Rc { #[unstable(feature = "allocator_api", issue = "32838")] // #[unstable(feature = "new_uninit", issue = "63291")] pub fn try_new_uninit() -> Result>, AllocError> { - unsafe { - Ok(Rc::from_ptr(Rc::try_allocate_for_layout( - Layout::new::(), - |layout| Global.allocate(layout), - |mem| mem as *mut RcBox>, - )?)) - } + let alloc = RcAllocator::new(Global); + let ptr = try_allocate(&alloc, Layout::new::(), RcMetadata::new_strong())?; + unsafe { Ok(Rc::from_non_null(ptr.cast(), alloc)) } } /// Constructs a new `Rc` with uninitialized contents, with the memory @@ -567,16 +612,15 @@ impl Rc { #[unstable(feature = "allocator_api", issue = "32838")] //#[unstable(feature = "new_uninit", issue = "63291")] pub fn try_new_zeroed() -> Result>, AllocError> { - unsafe { - Ok(Rc::from_ptr(Rc::try_allocate_for_layout( - Layout::new::(), - |layout| Global.allocate_zeroed(layout), - |mem| mem as *mut RcBox>, - )?)) - } + let alloc = RcAllocator::new(Global); + let ptr = try_allocate_zeroed(&alloc, Layout::new::(), RcMetadata::new_strong())?; + unsafe { Ok(Rc::from_non_null(ptr.cast(), alloc)) } } + /// Constructs a new `Pin>`. If `T` does not implement `Unpin`, then /// `value` will be pinned in memory and unable to be moved. + #[inline] + #[cfg(not(no_global_oom_handling))] #[stable(feature = "pin", since = "1.33.0")] pub fn pin(value: T) -> Pin> { unsafe { Pin::new_unchecked(Rc::new(value)) } @@ -605,16 +649,16 @@ impl Rc { #[stable(feature = "rc_unique", since = "1.4.0")] pub fn try_unwrap(this: Self) -> Result { if Rc::strong_count(&this) == 1 { + let this = mem::ManuallyDrop::new(this); unsafe { - let val = ptr::read(&*this); // copy the contained object + let val = ptr::read(&**this); // copy the contained object // Indicate to Weaks that they can't be promoted by decrementing // the strong count, and then remove the implicit "strong weak" // pointer while also handling drop logic by just crafting a // fake Weak. - this.inner().dec_strong(); - let _weak = Weak { ptr: this.ptr }; - forget(this); + this.metadata().dec_strong(); + let _weak = Weak { ptr: this.ptr, alloc: this.alloc }; Ok(val) } } else { @@ -650,7 +694,12 @@ impl Rc<[T]> { #[cfg(not(no_global_oom_handling))] #[unstable(feature = "new_uninit", issue = "63291")] pub fn new_uninit_slice(len: usize) -> Rc<[mem::MaybeUninit]> { - unsafe { Rc::from_ptr(Rc::allocate_for_slice(len)) } + let alloc = RcAllocator::new(Global); + let ptr = NonNull::slice_from_raw_parts( + allocate(&alloc, Layout::array::(len).unwrap(), RcMetadata::new_strong()).cast(), + len, + ); + unsafe { Rc::from_non_null(ptr, alloc) } } /// Constructs a new reference-counted slice with uninitialized contents, with the memory being @@ -676,16 +725,13 @@ impl Rc<[T]> { #[cfg(not(no_global_oom_handling))] #[unstable(feature = "new_uninit", issue = "63291")] pub fn new_zeroed_slice(len: usize) -> Rc<[mem::MaybeUninit]> { - unsafe { - Rc::from_ptr(Rc::allocate_for_layout( - Layout::array::(len).unwrap(), - |layout| Global.allocate_zeroed(layout), - |mem| { - ptr::slice_from_raw_parts_mut(mem as *mut T, len) - as *mut RcBox<[mem::MaybeUninit]> - }, - )) - } + let alloc = RcAllocator::new(Global); + let ptr = NonNull::slice_from_raw_parts( + allocate_zeroed(&alloc, Layout::array::(len).unwrap(), RcMetadata::new_strong()) + .cast(), + len, + ); + unsafe { Rc::from_non_null(ptr, alloc) } } } @@ -721,10 +767,11 @@ impl Rc> { /// /// assert_eq!(*five, 5) /// ``` - #[unstable(feature = "new_uninit", issue = "63291")] #[inline] + #[unstable(feature = "new_uninit", issue = "63291")] pub unsafe fn assume_init(self) -> Rc { - Rc::from_inner(mem::ManuallyDrop::new(self).ptr.cast()) + let this = mem::ManuallyDrop::new(self); + unsafe { Rc::from_non_null(this.ptr.cast(), this.alloc) } } } @@ -762,10 +809,15 @@ impl Rc<[mem::MaybeUninit]> { /// /// assert_eq!(*values, [1, 2, 3]) /// ``` - #[unstable(feature = "new_uninit", issue = "63291")] #[inline] + #[unstable(feature = "new_uninit", issue = "63291")] pub unsafe fn assume_init(self) -> Rc<[T]> { - unsafe { Rc::from_ptr(mem::ManuallyDrop::new(self).ptr.as_ptr() as _) } + let this = mem::ManuallyDrop::new(self); + let len = this.ptr.len(); + let ptr = unsafe { + NonNull::slice_from_raw_parts(NonNull::new_unchecked(this.ptr.as_mut_ptr().cast()), len) + }; + unsafe { Rc::from_non_null(ptr, this.alloc) } } } @@ -786,11 +838,11 @@ impl Rc { /// let x_ptr = Rc::into_raw(x); /// assert_eq!(unsafe { &*x_ptr }, "hello"); /// ``` + #[inline] #[stable(feature = "rc_raw", since = "1.17.0")] pub fn into_raw(this: Self) -> *const T { - let ptr = Self::as_ptr(&this); - mem::forget(this); - ptr + let this = mem::ManuallyDrop::new(this); + Self::as_ptr(&this) } /// Provides a raw pointer to the data. @@ -809,14 +861,10 @@ impl Rc { /// assert_eq!(x_ptr, Rc::as_ptr(&y)); /// assert_eq!(unsafe { &*x_ptr }, "hello"); /// ``` + #[inline(always)] #[stable(feature = "weak_into_raw", since = "1.45.0")] pub fn as_ptr(this: &Self) -> *const T { - let ptr: *mut RcBox = NonNull::as_ptr(this.ptr); - - // SAFETY: This cannot go through Deref::deref or Rc::inner because - // this is required to retain raw/mut provenance such that e.g. `get_mut` can - // write through the pointer after the Rc is recovered through `from_raw`. - unsafe { ptr::addr_of_mut!((*ptr).value) } + this.ptr.as_ptr() } /// Constructs an `Rc` from a raw pointer. @@ -856,15 +904,17 @@ impl Rc { /// /// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling! /// ``` + #[inline] #[stable(feature = "rc_raw", since = "1.17.0")] pub unsafe fn from_raw(ptr: *const T) -> Self { - let offset = unsafe { data_offset(ptr) }; - - // Reverse the offset to find the original RcBox. - let rc_ptr = - unsafe { (ptr as *mut RcBox).set_ptr_value((ptr as *mut u8).offset(-offset)) }; + unsafe { + Self::from_non_null(NonNull::new_unchecked(ptr as *mut T), RcAllocator::new(Global)) + } + } - unsafe { Self::from_ptr(rc_ptr) } + #[inline(always)] + unsafe fn from_non_null(ptr: NonNull, alloc: RcAllocator) -> Self { + Self { ptr, alloc, _marker: PhantomData } } /// Creates a new [`Weak`] pointer to this allocation. @@ -878,12 +928,13 @@ impl Rc { /// /// let weak_five = Rc::downgrade(&five); /// ``` + #[inline] #[stable(feature = "rc_weak", since = "1.4.0")] pub fn downgrade(this: &Self) -> Weak { - this.inner().inc_weak(); + this.metadata().inc_weak(); // Make sure we do not create a dangling Weak debug_assert!(!is_dangling(this.ptr.as_ptr())); - Weak { ptr: this.ptr } + Weak { ptr: this.ptr, alloc: this.alloc.clone() } } /// Gets the number of [`Weak`] pointers to this allocation. @@ -901,7 +952,7 @@ impl Rc { #[inline] #[stable(feature = "rc_counts", since = "1.15.0")] pub fn weak_count(this: &Self) -> usize { - this.inner().weak() - 1 + this.metadata().weak() - 1 } /// Gets the number of strong (`Rc`) pointers to this allocation. @@ -919,7 +970,7 @@ impl Rc { #[inline] #[stable(feature = "rc_counts", since = "1.15.0")] pub fn strong_count(this: &Self) -> usize { - this.inner().strong() + this.metadata().strong() } /// Increments the strong reference count on the `Rc` associated with the @@ -950,7 +1001,8 @@ impl Rc { #[stable(feature = "rc_mutate_strong_count", since = "1.53.0")] pub unsafe fn increment_strong_count(ptr: *const T) { // Retain Rc, but don't touch refcount by wrapping in ManuallyDrop - let rc = unsafe { mem::ManuallyDrop::new(Rc::::from_raw(ptr)) }; + let rc = unsafe { mem::ManuallyDrop::new(Self::from_raw(ptr)) }; + debug_assert_ne!(Self::strong_count(&rc), 0, "the strong count must be at least 1"); // Now increase refcount, but don't drop new refcount either let _rc_clone: mem::ManuallyDrop<_> = rc.clone(); } @@ -986,14 +1038,14 @@ impl Rc { #[inline] #[stable(feature = "rc_mutate_strong_count", since = "1.53.0")] pub unsafe fn decrement_strong_count(ptr: *const T) { - unsafe { mem::drop(Rc::from_raw(ptr)) }; + unsafe { mem::drop(Self::from_raw(ptr)) }; } /// Returns `true` if there are no other `Rc` or [`Weak`] pointers to /// this allocation. #[inline] fn is_unique(this: &Self) -> bool { - Rc::weak_count(this) == 0 && Rc::strong_count(this) == 1 + Self::weak_count(this) == 0 && Self::strong_count(this) == 1 } /// Returns a mutable reference into the given `Rc`, if there are @@ -1023,7 +1075,7 @@ impl Rc { #[inline] #[stable(feature = "rc_unique", since = "1.4.0")] pub fn get_mut(this: &mut Self) -> Option<&mut T> { - if Rc::is_unique(this) { unsafe { Some(Rc::get_mut_unchecked(this)) } } else { None } + if Self::is_unique(this) { unsafe { Some(Self::get_mut_unchecked(this)) } } else { None } } /// Returns a mutable reference into the given `Rc`, @@ -1058,11 +1110,9 @@ impl Rc { pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut T { // We are careful to *not* create a reference covering the "count" fields, as // this would conflict with accesses to the reference counts (e.g. by `Weak`). - unsafe { &mut (*this.ptr.as_ptr()).value } + unsafe { this.ptr.as_mut() } } - #[inline] - #[stable(feature = "ptr_eq", since = "1.17.0")] /// Returns `true` if the two `Rc`s point to the same allocation /// (in a vein similar to [`ptr::eq`]). /// @@ -1080,8 +1130,10 @@ impl Rc { /// ``` /// /// [`ptr::eq`]: core::ptr::eq + #[inline] + #[stable(feature = "ptr_eq", since = "1.17.0")] pub fn ptr_eq(this: &Self, other: &Self) -> bool { - this.ptr.as_ptr() == other.ptr.as_ptr() + this.ptr == other.ptr } } @@ -1138,7 +1190,7 @@ impl Rc { #[inline] #[stable(feature = "rc_unique", since = "1.4.0")] pub fn make_mut(this: &mut Self) -> &mut T { - if Rc::strong_count(this) != 1 { + if Self::strong_count(this) != 1 { // Gotta clone the data, there are other Rcs. // Pre-allocate memory to allow writing the cloned value directly. let mut rc = Self::new_uninit(); @@ -1154,10 +1206,10 @@ impl Rc { let data = Rc::get_mut_unchecked(&mut rc); data.as_mut_ptr().copy_from_nonoverlapping(&**this, 1); - this.inner().dec_strong(); + this.metadata().dec_strong(); // Remove implicit strong-weak ref (no need to craft a fake // Weak here -- we know other Weaks can clean up for us) - this.inner().dec_weak(); + this.metadata().dec_weak(); ptr::write(this, rc.assume_init()); } } @@ -1166,13 +1218,11 @@ impl Rc { // reference count is guaranteed to be 1 at this point, and we required // the `Rc` itself to be `mut`, so we're returning the only possible // reference to the allocation. - unsafe { &mut this.ptr.as_mut().value } + unsafe { Self::get_mut_unchecked(this) } } } impl Rc { - #[inline] - #[stable(feature = "rc_downcast", since = "1.29.0")] /// Attempt to downcast the `Rc` to a concrete type. /// /// # Examples @@ -1191,11 +1241,12 @@ impl Rc { /// print_if_string(Rc::new(my_string)); /// print_if_string(Rc::new(0i8)); /// ``` + #[inline] + #[stable(feature = "rc_downcast", since = "1.29.0")] pub fn downcast(self) -> Result, Rc> { if (*self).is::() { - let ptr = self.ptr.cast::>(); - forget(self); - Ok(Rc::from_inner(ptr)) + let this = mem::ManuallyDrop::new(self); + unsafe { Ok(Rc::from_non_null(this.ptr.cast(), this.alloc)) } } else { Err(self) } @@ -1203,120 +1254,46 @@ impl Rc { } impl Rc { - /// Allocates an `RcBox` with sufficient space for - /// a possibly-unsized inner value where the value has the layout provided. - /// - /// The function `mem_to_rcbox` is called with the data pointer - /// and must return back a (potentially fat)-pointer for the `RcBox`. - #[cfg(not(no_global_oom_handling))] - unsafe fn allocate_for_layout( - value_layout: Layout, - allocate: impl FnOnce(Layout) -> Result, AllocError>, - mem_to_rcbox: impl FnOnce(*mut u8) -> *mut RcBox, - ) -> *mut RcBox { - // Calculate layout using the given value layout. - // Previously, layout was calculated on the expression - // `&*(ptr as *const RcBox)`, but this created a misaligned - // reference (see #54908). - let layout = Layout::new::>().extend(value_layout).unwrap().0.pad_to_align(); - unsafe { - Rc::try_allocate_for_layout(value_layout, allocate, mem_to_rcbox) - .unwrap_or_else(|_| handle_alloc_error(layout)) - } - } - - /// Allocates an `RcBox` with sufficient space for - /// a possibly-unsized inner value where the value has the layout provided, - /// returning an error if allocation fails. - /// - /// The function `mem_to_rcbox` is called with the data pointer - /// and must return back a (potentially fat)-pointer for the `RcBox`. - #[inline] - unsafe fn try_allocate_for_layout( - value_layout: Layout, - allocate: impl FnOnce(Layout) -> Result, AllocError>, - mem_to_rcbox: impl FnOnce(*mut u8) -> *mut RcBox, - ) -> Result<*mut RcBox, AllocError> { - // Calculate layout using the given value layout. - // Previously, layout was calculated on the expression - // `&*(ptr as *const RcBox)`, but this created a misaligned - // reference (see #54908). - let layout = Layout::new::>().extend(value_layout).unwrap().0.pad_to_align(); - - // Allocate for the layout. - let ptr = allocate(layout)?; - - // Initialize the RcBox - let inner = mem_to_rcbox(ptr.as_non_null_ptr().as_ptr()); - unsafe { - debug_assert_eq!(Layout::for_value(&*inner), layout); - - ptr::write(&mut (*inner).strong, Cell::new(1)); - ptr::write(&mut (*inner).weak, Cell::new(1)); - } - - Ok(inner) - } - - /// Allocates an `RcBox` with sufficient space for an unsized inner value - #[cfg(not(no_global_oom_handling))] - unsafe fn allocate_for_ptr(ptr: *const T) -> *mut RcBox { - // Allocate for the `RcBox` using the given value. - unsafe { - Self::allocate_for_layout( - Layout::for_value(&*ptr), - |layout| Global.allocate(layout), - |mem| (ptr as *mut RcBox).set_ptr_value(mem), - ) - } - } - #[cfg(not(no_global_oom_handling))] fn from_box(v: Box) -> Rc { unsafe { let (box_unique, alloc) = Box::into_unique(v); let bptr = box_unique.as_ptr(); + let rc_alloc = RcAllocator::new(alloc); - let value_size = size_of_val(&*bptr); - let ptr = Self::allocate_for_ptr(bptr); + let layout = Layout::for_value(&*bptr); + let mem = allocate(&rc_alloc, layout, RcMetadata::new_strong()).cast(); + let ptr = NonNull::new_unchecked(bptr.set_ptr_value(mem.as_ptr())); // Copy value as bytes ptr::copy_nonoverlapping( bptr as *const T as *const u8, - &mut (*ptr).value as *mut _ as *mut u8, - value_size, + ptr.as_ptr() as *mut u8, + layout.size(), ); // Free the allocation without dropping its contents - box_free(box_unique, alloc); + box_free(box_unique, &rc_alloc.parent); - Self::from_ptr(ptr) + Self::from_non_null(ptr, rc_alloc) } } } impl Rc<[T]> { - /// Allocates an `RcBox<[T]>` with the given length. - #[cfg(not(no_global_oom_handling))] - unsafe fn allocate_for_slice(len: usize) -> *mut RcBox<[T]> { - unsafe { - Self::allocate_for_layout( - Layout::array::(len).unwrap(), - |layout| Global.allocate(layout), - |mem| ptr::slice_from_raw_parts_mut(mem as *mut T, len) as *mut RcBox<[T]>, - ) - } - } - /// Copy elements from slice into newly allocated Rc<\[T\]> /// /// Unsafe because the caller must either take ownership or bind `T: Copy` #[cfg(not(no_global_oom_handling))] unsafe fn copy_from_slice(v: &[T]) -> Rc<[T]> { + let alloc = RcAllocator::new(Global); + let ptr = NonNull::slice_from_raw_parts( + allocate(&alloc, Layout::array::(v.len()).unwrap(), RcMetadata::new_strong()).cast(), + v.len(), + ); unsafe { - let ptr = Self::allocate_for_slice(v.len()); - ptr::copy_nonoverlapping(v.as_ptr(), &mut (*ptr).value as *mut [T] as *mut T, v.len()); - Self::from_ptr(ptr) + ptr::copy_nonoverlapping(v.as_ptr(), ptr.as_non_null_ptr().as_ptr(), v.len()); + Self::from_non_null(ptr, alloc) } } @@ -1328,44 +1305,44 @@ impl Rc<[T]> { // Panic guard while cloning T elements. // In the event of a panic, elements that have been written // into the new RcBox will be dropped, then the memory freed. - struct Guard { - mem: NonNull, - elems: *mut T, + struct Guard { + ptr: NonNull, layout: Layout, n_elems: usize, + alloc: A, } - impl Drop for Guard { + impl Drop for Guard { fn drop(&mut self) { unsafe { - let slice = from_raw_parts_mut(self.elems, self.n_elems); + let slice = from_raw_parts_mut(self.ptr.as_ptr(), self.n_elems); ptr::drop_in_place(slice); - Global.deallocate(self.mem, self.layout); + self.alloc.deallocate(self.ptr.cast(), self.layout); } } } unsafe { - let ptr = Self::allocate_for_slice(len); - - let mem = ptr as *mut _ as *mut u8; - let layout = Layout::for_value(&*ptr); - - // Pointer to first element - let elems = &mut (*ptr).value as *mut [T] as *mut T; + let alloc = RcAllocator::new(Global); + let layout = Layout::array::(len).unwrap(); + let ptr = NonNull::slice_from_raw_parts( + allocate(&alloc, layout, RcMetadata::new_strong()).cast(), + len, + ); - let mut guard = Guard { mem: NonNull::new_unchecked(mem), elems, layout, n_elems: 0 }; + let mut guard = + Guard { ptr: ptr.as_non_null_ptr(), layout, n_elems: 0, alloc: alloc.by_ref() }; for (i, item) in iter.enumerate() { - ptr::write(elems.add(i), item); + ptr::write(ptr.get_unchecked_mut(i).as_ptr(), item); guard.n_elems += 1; } // All clear. Forget the guard so it doesn't free the new RcBox. - forget(guard); + mem::forget(guard); - Self::from_ptr(ptr) + Self::from_non_null(ptr, alloc) } } } @@ -1397,7 +1374,7 @@ impl Deref for Rc { #[inline(always)] fn deref(&self) -> &T { - &self.inner().value + unsafe { self.ptr.as_ref() } } } @@ -1432,18 +1409,25 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc { /// drop(foo2); // Prints "dropped!" /// ``` fn drop(&mut self) { - unsafe { - self.inner().dec_strong(); - if self.inner().strong() == 0 { - // destroy the contained object + let metadata = self.metadata(); + + metadata.dec_strong(); + if metadata.strong() == 0 { + // destroy the contained object + unsafe { ptr::drop_in_place(Self::get_mut_unchecked(self)); + } - // remove the implicit "strong weak" pointer now that we've - // destroyed the contents. - self.inner().dec_weak(); + // Due to the borrow checker, we have to read the metadata again + let metadata = self.metadata(); - if self.inner().weak() == 0 { - Global.deallocate(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())); + // remove the implicit "strong weak" pointer now that we've + // destroyed the contents. + metadata.dec_weak(); + if metadata.weak() == 0 { + unsafe { + let layout = Layout::for_value_raw(self.ptr.as_ptr()); + self.alloc.deallocate(self.ptr.cast(), layout); } } } @@ -1468,11 +1452,12 @@ impl Clone for Rc { /// ``` #[inline] fn clone(&self) -> Rc { - self.inner().inc_strong(); - Self::from_inner(self.ptr) + self.metadata().inc_strong(); + unsafe { Self::from_raw(self.ptr.as_ptr()) } } } +#[cfg(not(no_global_oom_handling))] #[stable(feature = "rust1", since = "1.0.0")] impl Default for Rc { /// Creates a new `Rc`, with the `Default` value for `T`. @@ -1731,6 +1716,7 @@ impl fmt::Pointer for Rc { } } +#[cfg(not(no_global_oom_handling))] #[stable(feature = "from_for_ptrs", since = "1.6.0")] impl From for Rc { /// Converts a generic type `T` into a `Rc` @@ -1993,9 +1979,10 @@ pub struct Weak { // but it is not necessarily a valid pointer. // `Weak::new` sets this to `usize::MAX` so that it doesn’t need // to allocate space on the heap. That's not a value a real pointer - // will ever have because RcBox has alignment at least 2. + // will ever have because `RcMetadata` has alignment at least 2. // This is only possible when `T: Sized`; unsized `T` never dangle. - ptr: NonNull>, + ptr: NonNull, + alloc: RcAllocator, } #[stable(feature = "rc_weak", since = "1.4.0")] @@ -2009,6 +1996,13 @@ impl, U: ?Sized> CoerceUnsized> for Weak {} #[unstable(feature = "dispatch_from_dyn", issue = "none")] impl, U: ?Sized> DispatchFromDyn> for Weak {} +impl Weak { + #[inline] + fn metadata(&self) -> Option<&RcMetadata> { + (!is_dangling(self.ptr.as_ptr())).then(|| unsafe { Rc::metadata_ptr(self.ptr).as_ref() }) + } +} + impl Weak { /// Constructs a new `Weak`, without allocating any memory. /// Calling [`upgrade`] on the return value always gives [`None`]. @@ -2025,20 +2019,16 @@ impl Weak { /// ``` #[stable(feature = "downgraded_weak", since = "1.10.0")] pub fn new() -> Weak { - Weak { ptr: NonNull::new(usize::MAX as *mut RcBox).expect("MAX is not 0") } + Weak { + ptr: NonNull::new(usize::MAX as *mut T).expect("MAX is not 0"), + alloc: RcAllocator::new(Global), + } } } +#[inline] pub(crate) fn is_dangling(ptr: *mut T) -> bool { - let address = ptr as *mut () as usize; - address == usize::MAX -} - -/// Helper type to allow accessing the reference counts without -/// making any assertions about the data field. -struct WeakInner<'a> { - weak: &'a Cell, - strong: &'a Cell, + ptr as *mut () as usize == usize::MAX } impl Weak { @@ -2067,20 +2057,10 @@ impl Weak { /// ``` /// /// [`null`]: core::ptr::null + #[inline(always)] #[stable(feature = "rc_as_ptr", since = "1.45.0")] pub fn as_ptr(&self) -> *const T { - let ptr: *mut RcBox = NonNull::as_ptr(self.ptr); - - if is_dangling(ptr) { - // If the pointer is dangling, we return the sentinel directly. This cannot be - // a valid payload address, as the payload is at least as aligned as RcBox (usize). - ptr as *const T - } else { - // SAFETY: if is_dangling returns false, then the pointer is dereferencable. - // The payload may be dropped at this point, and we have to maintain provenance, - // so use raw pointer manipulation. - unsafe { ptr::addr_of_mut!((*ptr).value) } - } + self.ptr.as_ptr() } /// Consumes the `Weak` and turns it into a raw pointer. @@ -2110,6 +2090,7 @@ impl Weak { /// /// [`from_raw`]: Weak::from_raw /// [`as_ptr`]: Weak::as_ptr + #[inline] #[stable(feature = "weak_into_raw", since = "1.45.0")] pub fn into_raw(self) -> *const T { let result = self.as_ptr(); @@ -2159,24 +2140,18 @@ impl Weak { /// [`into_raw`]: Weak::into_raw /// [`upgrade`]: Weak::upgrade /// [`new`]: Weak::new + #[inline] #[stable(feature = "weak_into_raw", since = "1.45.0")] pub unsafe fn from_raw(ptr: *const T) -> Self { - // See Weak::as_ptr for context on how the input pointer is derived. - - let ptr = if is_dangling(ptr as *mut T) { - // This is a dangling Weak. - ptr as *mut RcBox - } else { - // Otherwise, we're guaranteed the pointer came from a nondangling Weak. - // SAFETY: data_offset is safe to call, as ptr references a real (potentially dropped) T. - let offset = unsafe { data_offset(ptr) }; - // Thus, we reverse the offset to get the whole RcBox. - // SAFETY: the pointer originated from a Weak, so this offset is safe. - unsafe { (ptr as *mut RcBox).set_ptr_value((ptr as *mut u8).offset(-offset)) } - }; + unsafe { + Self::from_non_null(NonNull::new_unchecked(ptr as *mut T), RcAllocator::new(Global)) + } + } - // SAFETY: we now have recovered the original Weak pointer, so can create the Weak. - Weak { ptr: unsafe { NonNull::new_unchecked(ptr) } } + /// Constructs a `Weak` from a raw pointer. + #[inline(always)] + unsafe fn from_non_null(ptr: NonNull, alloc: RcAllocator) -> Self { + Self { ptr, alloc } } /// Attempts to upgrade the `Weak` pointer to an [`Rc`], delaying @@ -2204,13 +2179,11 @@ impl Weak { /// ``` #[stable(feature = "rc_weak", since = "1.4.0")] pub fn upgrade(&self) -> Option> { - let inner = self.inner()?; - if inner.strong() == 0 { - None - } else { - inner.inc_strong(); - Some(Rc::from_inner(self.ptr)) - } + let meta = self.metadata()?; + (meta.strong() != 0).then(|| { + meta.inc_strong(); + unsafe { Rc::from_raw(self.ptr.as_ptr()) } + }) } /// Gets the number of strong (`Rc`) pointers pointing to this allocation. @@ -2218,7 +2191,7 @@ impl Weak { /// If `self` was created using [`Weak::new`], this will return 0. #[stable(feature = "weak_counts", since = "1.41.0")] pub fn strong_count(&self) -> usize { - if let Some(inner) = self.inner() { inner.strong() } else { 0 } + self.metadata().map(|meta| meta.strong()).unwrap_or(0) } /// Gets the number of `Weak` pointers pointing to this allocation. @@ -2226,10 +2199,10 @@ impl Weak { /// If no strong pointers remain, this will return zero. #[stable(feature = "weak_counts", since = "1.41.0")] pub fn weak_count(&self) -> usize { - self.inner() - .map(|inner| { - if inner.strong() > 0 { - inner.weak() - 1 // subtract the implicit weak ptr + self.metadata() + .map(|meta| { + if meta.strong() > 0 { + meta.weak() - 1 // subtract the implicit weak ptr } else { 0 } @@ -2237,23 +2210,6 @@ impl Weak { .unwrap_or(0) } - /// Returns `None` when the pointer is dangling and there is no allocated `RcBox`, - /// (i.e., when this `Weak` was created by `Weak::new`). - #[inline] - fn inner(&self) -> Option> { - if is_dangling(self.ptr.as_ptr()) { - None - } else { - // We are careful to *not* create a reference covering the "data" field, as - // the field may be mutated concurrently (for example, if the last `Rc` - // is dropped, the data field will be dropped in-place). - Some(unsafe { - let ptr = self.ptr.as_ptr(); - WeakInner { strong: &(*ptr).strong, weak: &(*ptr).weak } - }) - } - } - /// Returns `true` if the two `Weak`s point to the same allocation (similar to /// [`ptr::eq`]), or if both don't point to any allocation /// (because they were created with `Weak::new()`). @@ -2295,10 +2251,10 @@ impl Weak { /// ``` /// /// [`ptr::eq`]: core::ptr::eq - #[inline] + #[inline(always)] #[stable(feature = "weak_ptr_eq", since = "1.39.0")] pub fn ptr_eq(&self, other: &Self) -> bool { - self.ptr.as_ptr() == other.ptr.as_ptr() + self.ptr == other.ptr } } @@ -2329,14 +2285,15 @@ impl Drop for Weak { /// assert!(other_weak_foo.upgrade().is_none()); /// ``` fn drop(&mut self) { - let inner = if let Some(inner) = self.inner() { inner } else { return }; + let meta = if let Some(meta) = self.metadata() { meta } else { return }; - inner.dec_weak(); + meta.dec_weak(); // the weak count starts at 1, and will only go to zero if all // the strong pointers have disappeared. - if inner.weak() == 0 { + if meta.weak() == 0 { unsafe { - Global.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr())); + let layout = Layout::for_value_raw(self.ptr.as_ptr()); + self.alloc.deallocate(self.ptr.cast(), layout); } } } @@ -2357,10 +2314,10 @@ impl Clone for Weak { /// ``` #[inline] fn clone(&self) -> Weak { - if let Some(inner) = self.inner() { - inner.inc_weak() + if let Some(meta) = self.metadata() { + meta.inc_weak() } - Weak { ptr: self.ptr } + Weak { ptr: self.ptr, alloc: RcAllocator::new(Global) } } } @@ -2392,91 +2349,38 @@ impl Default for Weak { } } -// NOTE: We checked_add here to deal with mem::forget safely. In particular -// if you mem::forget Rcs (or Weaks), the ref-count can overflow, and then -// you can free the allocation while outstanding Rcs (or Weaks) exist. -// We abort because this is such a degenerate scenario that we don't care about -// what happens -- no real program should ever experience this. -// -// This should have negligible overhead since you don't actually need to -// clone these much in Rust thanks to ownership and move-semantics. - -#[doc(hidden)] -trait RcInnerPtr { - fn weak_ref(&self) -> &Cell; - fn strong_ref(&self) -> &Cell; - - #[inline] - fn strong(&self) -> usize { - self.strong_ref().get() - } - - #[inline] - fn inc_strong(&self) { - let strong = self.strong(); - - // We want to abort on overflow instead of dropping the value. - // The reference count will never be zero when this is called; - // nevertheless, we insert an abort here to hint LLVM at - // an otherwise missed optimization. - if strong == 0 || strong == usize::MAX { - abort(); - } - self.strong_ref().set(strong + 1); - } - - #[inline] - fn dec_strong(&self) { - self.strong_ref().set(self.strong() - 1); - } - - #[inline] - fn weak(&self) -> usize { - self.weak_ref().get() - } - - #[inline] - fn inc_weak(&self) { - let weak = self.weak(); - - // We want to abort on overflow instead of dropping the value. - // The reference count will never be zero when this is called; - // nevertheless, we insert an abort here to hint LLVM at - // an otherwise missed optimization. - if weak == 0 || weak == usize::MAX { - abort(); - } - self.weak_ref().set(weak + 1); - } - - #[inline] - fn dec_weak(&self) { - self.weak_ref().set(self.weak() - 1); - } +/// Dediated function for allocating to prevent generating a function for every `T` +#[cfg(not(no_global_oom_handling))] +fn allocate(alloc: &RcAllocator, layout: Layout, metadata: RcMetadata) -> NonNull { + try_allocate(alloc, layout, metadata).unwrap_or_else(|_| handle_alloc_error(layout)) } -impl RcInnerPtr for RcBox { - #[inline(always)] - fn weak_ref(&self) -> &Cell { - &self.weak - } - - #[inline(always)] - fn strong_ref(&self) -> &Cell { - &self.strong - } +/// Dediated function for allocating to prevent generating a function for every `T` +#[cfg(not(no_global_oom_handling))] +fn allocate_zeroed( + alloc: &RcAllocator, + layout: Layout, + metadata: RcMetadata, +) -> NonNull { + try_allocate_zeroed(alloc, layout, metadata).unwrap_or_else(|_| handle_alloc_error(layout)) } -impl<'a> RcInnerPtr for WeakInner<'a> { - #[inline(always)] - fn weak_ref(&self) -> &Cell { - self.weak - } +/// Dediated function for allocating to prevent generating a function for every `T` +fn try_allocate( + alloc: &RcAllocator, + layout: Layout, + metadata: RcMetadata, +) -> Result, AllocError> { + alloc.allocate_with_prefix(layout, metadata).map(NonNull::as_non_null_ptr) +} - #[inline(always)] - fn strong_ref(&self) -> &Cell { - self.strong - } +/// Dediated function for allocating to prevent generating a function for every `T` +fn try_allocate_zeroed( + alloc: &RcAllocator, + layout: Layout, + metadata: RcMetadata, +) -> Result, AllocError> { + alloc.allocate_zeroed_with_prefix(layout, metadata).map(NonNull::as_non_null_ptr) } #[stable(feature = "rust1", since = "1.0.0")] @@ -2495,25 +2399,3 @@ impl AsRef for Rc { #[stable(feature = "pin", since = "1.33.0")] impl Unpin for Rc {} - -/// Get the offset within an `RcBox` for the payload behind a pointer. -/// -/// # Safety -/// -/// The pointer must point to (and have valid metadata for) a previously -/// valid instance of T, but the T is allowed to be dropped. -unsafe fn data_offset(ptr: *const T) -> isize { - // Align the unsized value to the end of the RcBox. - // Because RcBox is repr(C), it will always be the last field in memory. - // SAFETY: since the only unsized types possible are slices, trait objects, - // and extern types, the input safety requirement is currently enough to - // satisfy the requirements of align_of_val_raw; this is an implementation - // detail of the language that may not be relied upon outside of std. - unsafe { data_offset_align(align_of_val_raw(ptr)) } -} - -#[inline] -fn data_offset_align(align: usize) -> isize { - let layout = Layout::new::>(); - (layout.size() + layout.padding_needed_for(align)) as isize -} diff --git a/library/alloc/src/sync.rs b/library/alloc/src/sync.rs index 17927f5f5fdc4..6eb9c7148f4ae 100644 --- a/library/alloc/src/sync.rs +++ b/library/alloc/src/sync.rs @@ -4,6 +4,7 @@ //! //! See the [`Arc`][Arc] documentation for more details. +use core::alloc::helper::PrefixAllocator; use core::any::Any; use core::borrow; use core::cmp::Ordering; @@ -15,10 +16,9 @@ use core::intrinsics::abort; #[cfg(not(no_global_oom_handling))] use core::iter; use core::marker::{PhantomData, Unpin, Unsize}; -#[cfg(not(no_global_oom_handling))] -use core::mem::size_of_val; -use core::mem::{self, align_of_val_raw}; +use core::mem; use core::ops::{CoerceUnsized, Deref, DispatchFromDyn, Receiver}; +#[cfg(not(no_global_oom_handling))] use core::pin::Pin; use core::ptr::{self, NonNull}; #[cfg(not(no_global_oom_handling))] @@ -32,6 +32,7 @@ use crate::alloc::handle_alloc_error; use crate::alloc::{box_free, WriteCloneIntoRaw}; use crate::alloc::{AllocError, Allocator, Global, Layout}; use crate::borrow::{Cow, ToOwned}; +#[cfg(not(no_global_oom_handling))] use crate::boxed::Box; use crate::rc::is_dangling; #[cfg(not(no_global_oom_handling))] @@ -65,6 +66,36 @@ macro_rules! acquire { }; } +/// Metadata for `Arc` and `Weak` to be allocated as prefix. +#[unstable(feature = "allocator_api_internals", issue = "none")] +#[derive(Debug)] +#[doc(hidden)] +pub struct ArcMetadata { + strong: atomic::AtomicUsize, + + // the value usize::MAX acts as a sentinel for temporarily "locking" the + // ability to upgrade weak pointers or downgrade strong ones; this is used + // to avoid races in `make_mut` and `get_mut`. + weak: atomic::AtomicUsize, +} + +impl ArcMetadata { + #[inline] + fn new_strong() -> Self { + Self { strong: atomic::AtomicUsize::new(1), weak: atomic::AtomicUsize::new(1) } + } + + #[inline] + fn new_weak() -> Self { + Self { strong: atomic::AtomicUsize::new(0), weak: atomic::AtomicUsize::new(1) } + } +} + +/// Allocator used for `Arc` and `Weak`. +#[unstable(feature = "allocator_api_internals", issue = "none")] +#[doc(hidden)] +pub type ArcAllocator = PrefixAllocator; + /// A thread-safe reference-counting pointer. 'Arc' stands for 'Atomically /// Reference Counted'. /// @@ -231,8 +262,9 @@ macro_rules! acquire { #[cfg_attr(not(test), rustc_diagnostic_item = "Arc")] #[stable(feature = "rust1", since = "1.0.0")] pub struct Arc { - ptr: NonNull>, - phantom: PhantomData>, + ptr: NonNull, + alloc: ArcAllocator, + _marker: PhantomData, } #[stable(feature = "rust1", since = "1.0.0")] @@ -247,12 +279,20 @@ impl, U: ?Sized> CoerceUnsized> for Arc {} impl, U: ?Sized> DispatchFromDyn> for Arc {} impl Arc { - fn from_inner(ptr: NonNull>) -> Self { - Self { ptr, phantom: PhantomData } + #[inline] + fn metadata_ptr(ptr: NonNull) -> NonNull { + // SAFETY: since the only unsized types possible are slices, trait objects, + // and extern types, the input safety requirement is currently enough to + // satisfy the requirements of for_value_raw; this is an implementation + // detail of the language that may not be relied upon outside of std. + let align = unsafe { mem::align_of_val_raw(ptr.as_ptr()) }; + + unsafe { ArcAllocator::::prefix(ptr.cast(), align) } } - unsafe fn from_ptr(ptr: *mut ArcInner) -> Self { - unsafe { Self::from_inner(NonNull::new_unchecked(ptr)) } + #[inline(always)] + fn metadata(&self) -> &ArcMetadata { + unsafe { Self::metadata_ptr(self.ptr).as_ref() } } } @@ -284,7 +324,8 @@ pub struct Weak { // to allocate space on the heap. That's not a value a real pointer // will ever have because RcBox has alignment at least 2. // This is only possible when `T: Sized`; unsized `T` never dangle. - ptr: NonNull>, + ptr: NonNull, + alloc: ArcAllocator, } #[stable(feature = "arc_weak", since = "1.4.0")] @@ -304,24 +345,13 @@ impl fmt::Debug for Weak { } } -// This is repr(C) to future-proof against possible field-reordering, which -// would interfere with otherwise safe [into|from]_raw() of transmutable -// inner types. -#[repr(C)] -struct ArcInner { - strong: atomic::AtomicUsize, - - // the value usize::MAX acts as a sentinel for temporarily "locking" the - // ability to upgrade weak pointers or downgrade strong ones; this is used - // to avoid races in `make_mut` and `get_mut`. - weak: atomic::AtomicUsize, - - data: T, +impl Weak { + #[inline] + fn metadata(&self) -> Option<&ArcMetadata> { + (!is_dangling(self.ptr.as_ptr())).then(|| unsafe { Arc::metadata_ptr(self.ptr).as_ref() }) + } } -unsafe impl Send for ArcInner {} -unsafe impl Sync for ArcInner {} - impl Arc { /// Constructs a new `Arc`. /// @@ -333,16 +363,15 @@ impl Arc { /// let five = Arc::new(5); /// ``` #[inline] + #[cfg(not(no_global_oom_handling))] #[stable(feature = "rust1", since = "1.0.0")] - pub fn new(data: T) -> Arc { - // Start the weak pointer count as 1 which is the weak pointer that's - // held by all the strong pointers (kinda), see std/rc.rs for more info - let x: Box<_> = box ArcInner { - strong: atomic::AtomicUsize::new(1), - weak: atomic::AtomicUsize::new(1), - data, - }; - Self::from_inner(Box::leak(x).into()) + pub fn new(value: T) -> Self { + let alloc = ArcAllocator::new(Global); + let ptr = allocate(&alloc, Layout::new::(), ArcMetadata::new_strong()).cast::(); + unsafe { + ptr.as_ptr().write(value); + Self::from_non_null(ptr, alloc) + } } /// Constructs a new `Arc` using a weak reference to itself. Attempting @@ -366,19 +395,17 @@ impl Arc { /// }); /// ``` #[inline] + #[cfg(not(no_global_oom_handling))] #[unstable(feature = "arc_new_cyclic", issue = "75861")] - pub fn new_cyclic(data_fn: impl FnOnce(&Weak) -> T) -> Arc { + pub fn new_cyclic(data_fn: impl FnOnce(&Weak) -> T) -> Self { // Construct the inner in the "uninitialized" state with a single // weak reference. - let uninit_ptr: NonNull<_> = Box::leak(box ArcInner { - strong: atomic::AtomicUsize::new(0), - weak: atomic::AtomicUsize::new(1), - data: mem::MaybeUninit::::uninit(), - }) - .into(); - let init_ptr: NonNull> = uninit_ptr.cast(); + let alloc = ArcAllocator::new(Global); + let ptr = allocate(&alloc, Layout::new::(), ArcMetadata::new_weak()).cast::(); - let weak = Weak { ptr: init_ptr }; + // Strong references should collectively own a shared weak reference, + // so don't run the destructor for our old weak reference. + let weak = mem::ManuallyDrop::new(Weak { ptr, alloc }); // It's important we don't give up ownership of the weak pointer, or // else the memory might be freed by the time `data_fn` returns. If @@ -386,36 +413,16 @@ impl Arc { // weak pointer for ourselves, but this would result in additional // updates to the weak reference count which might not be necessary // otherwise. - let data = data_fn(&weak); - - // Now we can properly initialize the inner value and turn our weak - // reference into a strong reference. unsafe { - let inner = init_ptr.as_ptr(); - ptr::write(ptr::addr_of_mut!((*inner).data), data); - - // The above write to the data field must be visible to any threads which - // observe a non-zero strong count. Therefore we need at least "Release" ordering - // in order to synchronize with the `compare_exchange_weak` in `Weak::upgrade`. - // - // "Acquire" ordering is not required. When considering the possible behaviours - // of `data_fn` we only need to look at what it could do with a reference to a - // non-upgradeable `Weak`: - // - It can *clone* the `Weak`, increasing the weak reference count. - // - It can drop those clones, decreasing the weak reference count (but never to zero). - // - // These side effects do not impact us in any way, and no other side effects are - // possible with safe code alone. - let prev_value = (*inner).strong.fetch_add(1, Release); - debug_assert_eq!(prev_value, 0, "No prior strong references should exist"); + ptr.as_ptr().write(data_fn(&weak)); } - let strong = Arc::from_inner(init_ptr); + // SAFETY: `ptr` was just allocated with the allocator with the alignment of `T` + let meta = unsafe { Self::metadata_ptr(ptr).as_ref() }; + let prev_value = meta.strong.fetch_add(1, Release); + debug_assert_eq!(prev_value, 0, "No prior strong references should exist"); - // Strong references should collectively own a shared weak reference, - // so don't run the destructor for our old weak reference. - mem::forget(weak); - strong + unsafe { Self::from_non_null(ptr, weak.alloc) } } /// Constructs a new `Arc` with uninitialized contents. @@ -439,16 +446,13 @@ impl Arc { /// /// assert_eq!(*five, 5) /// ``` + #[inline] #[cfg(not(no_global_oom_handling))] #[unstable(feature = "new_uninit", issue = "63291")] pub fn new_uninit() -> Arc> { - unsafe { - Arc::from_ptr(Arc::allocate_for_layout( - Layout::new::(), - |layout| Global.allocate(layout), - |mem| mem as *mut ArcInner>, - )) - } + let alloc = ArcAllocator::new(Global); + let ptr = allocate(&alloc, Layout::new::(), ArcMetadata::new_strong()).cast(); + unsafe { Arc::from_non_null(ptr, alloc) } } /// Constructs a new `Arc` with uninitialized contents, with the memory @@ -471,22 +475,21 @@ impl Arc { /// ``` /// /// [zeroed]: ../../std/mem/union.MaybeUninit.html#method.zeroed + #[inline] #[cfg(not(no_global_oom_handling))] #[unstable(feature = "new_uninit", issue = "63291")] pub fn new_zeroed() -> Arc> { - unsafe { - Arc::from_ptr(Arc::allocate_for_layout( - Layout::new::(), - |layout| Global.allocate_zeroed(layout), - |mem| mem as *mut ArcInner>, - )) - } + let alloc = ArcAllocator::new(Global); + let ptr = allocate_zeroed(&alloc, Layout::new::(), ArcMetadata::new_strong()).cast(); + unsafe { Arc::from_non_null(ptr, alloc) } } /// Constructs a new `Pin>`. If `T` does not implement `Unpin`, then /// `data` will be pinned in memory and unable to be moved. + #[inline] + #[cfg(not(no_global_oom_handling))] #[stable(feature = "pin", since = "1.33.0")] - pub fn pin(data: T) -> Pin> { + pub fn pin(data: T) -> Pin { unsafe { Pin::new_unchecked(Arc::new(data)) } } @@ -501,17 +504,15 @@ impl Arc { /// let five = Arc::try_new(5)?; /// # Ok::<(), std::alloc::AllocError>(()) /// ``` - #[unstable(feature = "allocator_api", issue = "32838")] #[inline] - pub fn try_new(data: T) -> Result, AllocError> { - // Start the weak pointer count as 1 which is the weak pointer that's - // held by all the strong pointers (kinda), see std/rc.rs for more info - let x: Box<_> = Box::try_new(ArcInner { - strong: atomic::AtomicUsize::new(1), - weak: atomic::AtomicUsize::new(1), - data, - })?; - Ok(Self::from_inner(Box::leak(x).into())) + #[unstable(feature = "allocator_api", issue = "32838")] + pub fn try_new(value: T) -> Result { + let alloc = ArcAllocator::new(Global); + let ptr = try_allocate(&alloc, Layout::new::(), ArcMetadata::new_strong())?.cast::(); + unsafe { + ptr.as_ptr().write(value); + Ok(Self::from_non_null(ptr, alloc)) + } } /// Constructs a new `Arc` with uninitialized contents, returning an error @@ -540,13 +541,9 @@ impl Arc { #[unstable(feature = "allocator_api", issue = "32838")] // #[unstable(feature = "new_uninit", issue = "63291")] pub fn try_new_uninit() -> Result>, AllocError> { - unsafe { - Ok(Arc::from_ptr(Arc::try_allocate_for_layout( - Layout::new::(), - |layout| Global.allocate(layout), - |mem| mem as *mut ArcInner>, - )?)) - } + let alloc = ArcAllocator::new(Global); + let ptr = try_allocate(&alloc, Layout::new::(), ArcMetadata::new_strong())?.cast(); + unsafe { Ok(Arc::from_non_null(ptr, alloc)) } } /// Constructs a new `Arc` with uninitialized contents, with the memory @@ -573,13 +570,10 @@ impl Arc { #[unstable(feature = "allocator_api", issue = "32838")] // #[unstable(feature = "new_uninit", issue = "63291")] pub fn try_new_zeroed() -> Result>, AllocError> { - unsafe { - Ok(Arc::from_ptr(Arc::try_allocate_for_layout( - Layout::new::(), - |layout| Global.allocate_zeroed(layout), - |mem| mem as *mut ArcInner>, - )?)) - } + let alloc = ArcAllocator::new(Global); + let ptr = + try_allocate_zeroed(&alloc, Layout::new::(), ArcMetadata::new_strong())?.cast(); + unsafe { Ok(Arc::from_non_null(ptr, alloc)) } } /// Returns the inner value, if the `Arc` has exactly one strong reference. /// @@ -603,20 +597,19 @@ impl Arc { #[inline] #[stable(feature = "arc_unique", since = "1.4.0")] pub fn try_unwrap(this: Self) -> Result { - if this.inner().strong.compare_exchange(1, 0, Relaxed, Relaxed).is_err() { + if this.metadata().strong.compare_exchange(1, 0, Relaxed, Relaxed).is_err() { return Err(this); } - acquire!(this.inner().strong); + acquire!(this.metadata().strong); + let this = mem::ManuallyDrop::new(this); unsafe { - let elem = ptr::read(&this.ptr.as_ref().data); + let val = this.ptr.as_ptr().read(); // Make a weak pointer to clean up the implicit strong-weak reference - let _weak = Weak { ptr: this.ptr }; - mem::forget(this); - - Ok(elem) + let _weak = Weak { ptr: this.ptr, alloc: this.alloc }; + Ok(val) } } } @@ -648,7 +641,12 @@ impl Arc<[T]> { #[cfg(not(no_global_oom_handling))] #[unstable(feature = "new_uninit", issue = "63291")] pub fn new_uninit_slice(len: usize) -> Arc<[mem::MaybeUninit]> { - unsafe { Arc::from_ptr(Arc::allocate_for_slice(len)) } + let alloc = ArcAllocator::new(Global); + let ptr = NonNull::slice_from_raw_parts( + allocate(&alloc, Layout::array::(len).unwrap(), ArcMetadata::new_strong()).cast(), + len, + ); + unsafe { Arc::from_non_null(ptr, alloc) } } /// Constructs a new atomically reference-counted slice with uninitialized contents, with the memory being @@ -674,16 +672,13 @@ impl Arc<[T]> { #[cfg(not(no_global_oom_handling))] #[unstable(feature = "new_uninit", issue = "63291")] pub fn new_zeroed_slice(len: usize) -> Arc<[mem::MaybeUninit]> { - unsafe { - Arc::from_ptr(Arc::allocate_for_layout( - Layout::array::(len).unwrap(), - |layout| Global.allocate_zeroed(layout), - |mem| { - ptr::slice_from_raw_parts_mut(mem as *mut T, len) - as *mut ArcInner<[mem::MaybeUninit]> - }, - )) - } + let alloc = ArcAllocator::new(Global); + let ptr = NonNull::slice_from_raw_parts( + allocate_zeroed(&alloc, Layout::array::(len).unwrap(), ArcMetadata::new_strong()) + .cast(), + len, + ); + unsafe { Arc::from_non_null(ptr, alloc) } } } @@ -719,10 +714,11 @@ impl Arc> { /// /// assert_eq!(*five, 5) /// ``` - #[unstable(feature = "new_uninit", issue = "63291")] #[inline] + #[unstable(feature = "new_uninit", issue = "63291")] pub unsafe fn assume_init(self) -> Arc { - Arc::from_inner(mem::ManuallyDrop::new(self).ptr.cast()) + let this = mem::ManuallyDrop::new(self); + unsafe { Arc::from_non_null(this.ptr.cast(), this.alloc) } } } @@ -760,10 +756,15 @@ impl Arc<[mem::MaybeUninit]> { /// /// assert_eq!(*values, [1, 2, 3]) /// ``` - #[unstable(feature = "new_uninit", issue = "63291")] #[inline] + #[unstable(feature = "new_uninit", issue = "63291")] pub unsafe fn assume_init(self) -> Arc<[T]> { - unsafe { Arc::from_ptr(mem::ManuallyDrop::new(self).ptr.as_ptr() as _) } + let this = mem::ManuallyDrop::new(self); + let len = this.ptr.len(); + let ptr = unsafe { + NonNull::slice_from_raw_parts(NonNull::new_unchecked(this.ptr.as_mut_ptr().cast()), len) + }; + unsafe { Arc::from_non_null(ptr, this.alloc) } } } @@ -782,11 +783,11 @@ impl Arc { /// let x_ptr = Arc::into_raw(x); /// assert_eq!(unsafe { &*x_ptr }, "hello"); /// ``` + #[inline] #[stable(feature = "rc_raw", since = "1.17.0")] pub fn into_raw(this: Self) -> *const T { - let ptr = Self::as_ptr(&this); - mem::forget(this); - ptr + let this = mem::ManuallyDrop::new(this); + Self::as_ptr(&this) } /// Provides a raw pointer to the data. @@ -805,14 +806,10 @@ impl Arc { /// assert_eq!(x_ptr, Arc::as_ptr(&y)); /// assert_eq!(unsafe { &*x_ptr }, "hello"); /// ``` + #[inline(always)] #[stable(feature = "rc_as_ptr", since = "1.45.0")] pub fn as_ptr(this: &Self) -> *const T { - let ptr: *mut ArcInner = NonNull::as_ptr(this.ptr); - - // SAFETY: This cannot go through Deref::deref or RcBoxPtr::inner because - // this is required to retain raw/mut provenance such that e.g. `get_mut` can - // write through the pointer after the Rc is recovered through `from_raw`. - unsafe { ptr::addr_of_mut!((*ptr).data) } + this.ptr.as_ptr() } /// Constructs an `Arc` from a raw pointer. @@ -852,18 +849,19 @@ impl Arc { /// /// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling! /// ``` + #[inline] #[stable(feature = "rc_raw", since = "1.17.0")] pub unsafe fn from_raw(ptr: *const T) -> Self { unsafe { - let offset = data_offset(ptr); - - // Reverse the offset to find the original ArcInner. - let arc_ptr = (ptr as *mut ArcInner).set_ptr_value((ptr as *mut u8).offset(-offset)); - - Self::from_ptr(arc_ptr) + Self::from_non_null(NonNull::new_unchecked(ptr as *mut T), ArcAllocator::new(Global)) } } + #[inline(always)] + unsafe fn from_non_null(ptr: NonNull, alloc: ArcAllocator) -> Self { + Self { ptr, alloc, _marker: PhantomData } + } + /// Creates a new [`Weak`] pointer to this allocation. /// /// # Examples @@ -879,13 +877,13 @@ impl Arc { pub fn downgrade(this: &Self) -> Weak { // This Relaxed is OK because we're checking the value in the CAS // below. - let mut cur = this.inner().weak.load(Relaxed); + let mut cur = this.metadata().weak.load(Relaxed); loop { // check if the weak counter is currently "locked"; if so, spin. if cur == usize::MAX { hint::spin_loop(); - cur = this.inner().weak.load(Relaxed); + cur = this.metadata().weak.load(Relaxed); continue; } @@ -896,11 +894,11 @@ impl Arc { // Unlike with Clone(), we need this to be an Acquire read to // synchronize with the write coming from `is_unique`, so that the // events prior to that write happen before this read. - match this.inner().weak.compare_exchange_weak(cur, cur + 1, Acquire, Relaxed) { + match this.metadata().weak.compare_exchange_weak(cur, cur + 1, Acquire, Relaxed) { Ok(_) => { // Make sure we do not create a dangling Weak debug_assert!(!is_dangling(this.ptr.as_ptr())); - return Weak { ptr: this.ptr }; + return Weak { ptr: this.ptr, alloc: this.alloc.clone() }; } Err(old) => cur = old, } @@ -930,7 +928,7 @@ impl Arc { #[inline] #[stable(feature = "arc_counts", since = "1.15.0")] pub fn weak_count(this: &Self) -> usize { - let cnt = this.inner().weak.load(SeqCst); + let cnt = this.metadata().weak.load(SeqCst); // If the weak count is currently locked, the value of the // count was 0 just before taking the lock. if cnt == usize::MAX { 0 } else { cnt - 1 } @@ -959,7 +957,7 @@ impl Arc { #[inline] #[stable(feature = "arc_counts", since = "1.15.0")] pub fn strong_count(this: &Self) -> usize { - this.inner().strong.load(SeqCst) + this.metadata().strong.load(SeqCst) } /// Increments the strong reference count on the `Arc` associated with the @@ -1033,16 +1031,6 @@ impl Arc { unsafe { mem::drop(Arc::from_raw(ptr)) }; } - #[inline] - fn inner(&self) -> &ArcInner { - // This unsafety is ok because while this arc is alive we're guaranteed - // that the inner pointer is valid. Furthermore, we know that the - // `ArcInner` structure itself is `Sync` because the inner data is - // `Sync` as well, so we're ok loaning out an immutable pointer to these - // contents. - unsafe { self.ptr.as_ref() } - } - // Non-inlined part of `drop`. #[inline(never)] unsafe fn drop_slow(&mut self) { @@ -1051,7 +1039,7 @@ impl Arc { unsafe { ptr::drop_in_place(Self::get_mut_unchecked(self)) }; // Drop the weak ref collectively held by all strong references - drop(Weak { ptr: self.ptr }); + drop(Weak { ptr: self.ptr, alloc: ArcAllocator::new(Global) }); } #[inline] @@ -1074,125 +1062,52 @@ impl Arc { /// /// [`ptr::eq`]: core::ptr::eq pub fn ptr_eq(this: &Self, other: &Self) -> bool { - this.ptr.as_ptr() == other.ptr.as_ptr() + this.ptr == other.ptr } } impl Arc { - /// Allocates an `ArcInner` with sufficient space for - /// a possibly-unsized inner value where the value has the layout provided. - /// - /// The function `mem_to_arcinner` is called with the data pointer - /// and must return back a (potentially fat)-pointer for the `ArcInner`. - #[cfg(not(no_global_oom_handling))] - unsafe fn allocate_for_layout( - value_layout: Layout, - allocate: impl FnOnce(Layout) -> Result, AllocError>, - mem_to_arcinner: impl FnOnce(*mut u8) -> *mut ArcInner, - ) -> *mut ArcInner { - // Calculate layout using the given value layout. - // Previously, layout was calculated on the expression - // `&*(ptr as *const ArcInner)`, but this created a misaligned - // reference (see #54908). - let layout = Layout::new::>().extend(value_layout).unwrap().0.pad_to_align(); - unsafe { - Arc::try_allocate_for_layout(value_layout, allocate, mem_to_arcinner) - .unwrap_or_else(|_| handle_alloc_error(layout)) - } - } - - /// Allocates an `ArcInner` with sufficient space for - /// a possibly-unsized inner value where the value has the layout provided, - /// returning an error if allocation fails. - /// - /// The function `mem_to_arcinner` is called with the data pointer - /// and must return back a (potentially fat)-pointer for the `ArcInner`. - unsafe fn try_allocate_for_layout( - value_layout: Layout, - allocate: impl FnOnce(Layout) -> Result, AllocError>, - mem_to_arcinner: impl FnOnce(*mut u8) -> *mut ArcInner, - ) -> Result<*mut ArcInner, AllocError> { - // Calculate layout using the given value layout. - // Previously, layout was calculated on the expression - // `&*(ptr as *const ArcInner)`, but this created a misaligned - // reference (see #54908). - let layout = Layout::new::>().extend(value_layout).unwrap().0.pad_to_align(); - - let ptr = allocate(layout)?; - - // Initialize the ArcInner - let inner = mem_to_arcinner(ptr.as_non_null_ptr().as_ptr()); - debug_assert_eq!(unsafe { Layout::for_value(&*inner) }, layout); - - unsafe { - ptr::write(&mut (*inner).strong, atomic::AtomicUsize::new(1)); - ptr::write(&mut (*inner).weak, atomic::AtomicUsize::new(1)); - } - - Ok(inner) - } - - /// Allocates an `ArcInner` with sufficient space for an unsized inner value. - #[cfg(not(no_global_oom_handling))] - unsafe fn allocate_for_ptr(ptr: *const T) -> *mut ArcInner { - // Allocate for the `ArcInner` using the given value. - unsafe { - Self::allocate_for_layout( - Layout::for_value(&*ptr), - |layout| Global.allocate(layout), - |mem| (ptr as *mut ArcInner).set_ptr_value(mem) as *mut ArcInner, - ) - } - } - #[cfg(not(no_global_oom_handling))] fn from_box(v: Box) -> Arc { unsafe { let (box_unique, alloc) = Box::into_unique(v); let bptr = box_unique.as_ptr(); + let arc_alloc = ArcAllocator::new(alloc); - let value_size = size_of_val(&*bptr); - let ptr = Self::allocate_for_ptr(bptr); + let layout = Layout::for_value(&*bptr); + let mem = allocate(&arc_alloc, layout, ArcMetadata::new_strong()).cast(); + let ptr = NonNull::new_unchecked(bptr.set_ptr_value(mem.as_ptr())); // Copy value as bytes ptr::copy_nonoverlapping( bptr as *const T as *const u8, - &mut (*ptr).data as *mut _ as *mut u8, - value_size, + ptr.as_ptr() as *mut u8, + layout.size(), ); // Free the allocation without dropping its contents - box_free(box_unique, alloc); + box_free(box_unique, &arc_alloc.parent); - Self::from_ptr(ptr) + Self::from_non_null(ptr, arc_alloc) } } } impl Arc<[T]> { - /// Allocates an `ArcInner<[T]>` with the given length. - #[cfg(not(no_global_oom_handling))] - unsafe fn allocate_for_slice(len: usize) -> *mut ArcInner<[T]> { - unsafe { - Self::allocate_for_layout( - Layout::array::(len).unwrap(), - |layout| Global.allocate(layout), - |mem| ptr::slice_from_raw_parts_mut(mem as *mut T, len) as *mut ArcInner<[T]>, - ) - } - } - /// Copy elements from slice into newly allocated Arc<\[T\]> /// /// Unsafe because the caller must either take ownership or bind `T: Copy`. #[cfg(not(no_global_oom_handling))] unsafe fn copy_from_slice(v: &[T]) -> Arc<[T]> { + let alloc = ArcAllocator::new(Global); + let ptr = NonNull::slice_from_raw_parts( + allocate(&alloc, Layout::array::(v.len()).unwrap(), ArcMetadata::new_strong()) + .cast(), + v.len(), + ); unsafe { - let ptr = Self::allocate_for_slice(v.len()); - - ptr::copy_nonoverlapping(v.as_ptr(), &mut (*ptr).data as *mut [T] as *mut T, v.len()); - - Self::from_ptr(ptr) + ptr::copy_nonoverlapping(v.as_ptr(), ptr.as_non_null_ptr().as_ptr(), v.len()); + Self::from_non_null(ptr, alloc) } } @@ -1204,44 +1119,44 @@ impl Arc<[T]> { // Panic guard while cloning T elements. // In the event of a panic, elements that have been written // into the new ArcInner will be dropped, then the memory freed. - struct Guard { - mem: NonNull, - elems: *mut T, + struct Guard { + ptr: NonNull, layout: Layout, n_elems: usize, + alloc: A, } - impl Drop for Guard { + impl Drop for Guard { fn drop(&mut self) { unsafe { - let slice = from_raw_parts_mut(self.elems, self.n_elems); + let slice = from_raw_parts_mut(self.ptr.as_ptr(), self.n_elems); ptr::drop_in_place(slice); - Global.deallocate(self.mem, self.layout); + self.alloc.deallocate(self.ptr.cast(), self.layout); } } } unsafe { - let ptr = Self::allocate_for_slice(len); - - let mem = ptr as *mut _ as *mut u8; - let layout = Layout::for_value(&*ptr); - - // Pointer to first element - let elems = &mut (*ptr).data as *mut [T] as *mut T; + let alloc = ArcAllocator::new(Global); + let layout = Layout::array::(len).unwrap(); + let ptr = NonNull::slice_from_raw_parts( + allocate(&alloc, layout, ArcMetadata::new_strong()).cast(), + len, + ); - let mut guard = Guard { mem: NonNull::new_unchecked(mem), elems, layout, n_elems: 0 }; + let mut guard = + Guard { ptr: ptr.as_non_null_ptr(), layout, n_elems: 0, alloc: alloc.by_ref() }; for (i, item) in iter.enumerate() { - ptr::write(elems.add(i), item); + ptr::write(ptr.get_unchecked_mut(i).as_ptr(), item); guard.n_elems += 1; } - // All clear. Forget the guard so it doesn't free the new ArcInner. + // All clear. Forget the guard so it doesn't free the new RcBox. mem::forget(guard); - Self::from_ptr(ptr) + Self::from_non_null(ptr, alloc) } } } @@ -1297,7 +1212,7 @@ impl Clone for Arc { // another must already provide any required synchronization. // // [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html) - let old_size = self.inner().strong.fetch_add(1, Relaxed); + let old_size = self.metadata().strong.fetch_add(1, Relaxed); // However we need to guard against massive refcounts in case someone // is `mem::forget`ing Arcs. If we don't do this the count can overflow @@ -1312,7 +1227,7 @@ impl Clone for Arc { abort(); } - Self::from_inner(self.ptr) + unsafe { Self::from_raw(self.ptr.as_ptr()) } } } @@ -1322,7 +1237,7 @@ impl Deref for Arc { #[inline] fn deref(&self) -> &T { - &self.inner().data + unsafe { self.ptr.as_ref() } } } @@ -1374,7 +1289,7 @@ impl Arc { // before release writes (i.e., decrements) to `strong`. Since we hold a // weak count, there's no chance the ArcInner itself could be // deallocated. - if this.inner().strong.compare_exchange(1, 0, Acquire, Relaxed).is_err() { + if this.metadata().strong.compare_exchange(1, 0, Acquire, Relaxed).is_err() { // Another strong pointer exists, so we must clone. // Pre-allocate memory to allow writing the cloned value directly. let mut arc = Self::new_uninit(); @@ -1383,7 +1298,7 @@ impl Arc { (**this).write_clone_into_raw(data.as_mut_ptr()); *this = arc.assume_init(); } - } else if this.inner().weak.load(Relaxed) != 1 { + } else if this.metadata().weak.load(Relaxed) != 1 { // Relaxed suffices in the above because this is fundamentally an // optimization: we are always racing with weak pointers being // dropped. Worst case, we end up allocated a new Arc unnecessarily. @@ -1398,7 +1313,7 @@ impl Arc { // Materialize our own implicit weak pointer, so that it can clean // up the ArcInner as needed. - let _weak = Weak { ptr: this.ptr }; + let _weak = Weak { ptr: this.ptr, alloc: this.alloc.clone() }; // Can just steal the data, all that's left is Weaks let mut arc = Self::new_uninit(); @@ -1410,7 +1325,7 @@ impl Arc { } else { // We were the sole reference of either kind; bump back up the // strong ref count. - this.inner().strong.store(1, Release); + this.metadata().strong.store(1, Release); } // As with `get_mut()`, the unsafety is ok because our reference was @@ -1491,7 +1406,7 @@ impl Arc { pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut T { // We are careful to *not* create a reference covering the "count" fields, as // this would alias with concurrent access to the reference counts (e.g. by `Weak`). - unsafe { &mut (*this.ptr.as_ptr()).data } + unsafe { this.ptr.as_mut() } } /// Determine whether this is the unique reference (including weak refs) to @@ -1506,16 +1421,16 @@ impl Arc { // writes to `strong` (in particular in `Weak::upgrade`) prior to decrements // of the `weak` count (via `Weak::drop`, which uses release). If the upgraded // weak ref was never dropped, the CAS here will fail so we do not care to synchronize. - if self.inner().weak.compare_exchange(1, usize::MAX, Acquire, Relaxed).is_ok() { + if self.metadata().weak.compare_exchange(1, usize::MAX, Acquire, Relaxed).is_ok() { // This needs to be an `Acquire` to synchronize with the decrement of the `strong` // counter in `drop` -- the only access that happens when any but the last reference // is being dropped. - let unique = self.inner().strong.load(Acquire) == 1; + let unique = self.metadata().strong.load(Acquire) == 1; // The release write here synchronizes with a read in `downgrade`, // effectively preventing the above read of `strong` from happening // after the write. - self.inner().weak.store(1, Release); // release the lock + self.metadata().weak.store(1, Release); // release the lock unique } else { false @@ -1555,7 +1470,7 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Arc { // Because `fetch_sub` is already atomic, we do not need to synchronize // with other threads unless we are going to delete the object. This // same logic applies to the below `fetch_sub` to the `weak` count. - if self.inner().strong.fetch_sub(1, Release) != 1 { + if self.metadata().strong.fetch_sub(1, Release) != 1 { return; } @@ -1587,7 +1502,7 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Arc { // // [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html) // [2]: (https://github.com/rust-lang/rust/pull/41714) - acquire!(self.inner().strong); + acquire!(self.metadata().strong); unsafe { self.drop_slow(); @@ -1621,9 +1536,8 @@ impl Arc { T: Any + Send + Sync + 'static, { if (*self).is::() { - let ptr = self.ptr.cast::>(); - mem::forget(self); - Ok(Arc::from_inner(ptr)) + let this = mem::ManuallyDrop::new(self); + unsafe { Ok(Arc::from_non_null(this.ptr.cast(), this.alloc)) } } else { Err(self) } @@ -1646,17 +1560,13 @@ impl Weak { /// ``` #[stable(feature = "downgraded_weak", since = "1.10.0")] pub fn new() -> Weak { - Weak { ptr: NonNull::new(usize::MAX as *mut ArcInner).expect("MAX is not 0") } + Weak { + ptr: NonNull::new(usize::MAX as *mut T).expect("MAX is not 0"), + alloc: ArcAllocator::new(Global), + } } } -/// Helper type to allow accessing the reference counts without -/// making any assertions about the data field. -struct WeakInner<'a> { - weak: &'a atomic::AtomicUsize, - strong: &'a atomic::AtomicUsize, -} - impl Weak { /// Returns a raw pointer to the object `T` pointed to by this `Weak`. /// @@ -1685,18 +1595,7 @@ impl Weak { /// [`null`]: core::ptr::null #[stable(feature = "weak_into_raw", since = "1.45.0")] pub fn as_ptr(&self) -> *const T { - let ptr: *mut ArcInner = NonNull::as_ptr(self.ptr); - - if is_dangling(ptr) { - // If the pointer is dangling, we return the sentinel directly. This cannot be - // a valid payload address, as the payload is at least as aligned as ArcInner (usize). - ptr as *const T - } else { - // SAFETY: if is_dangling returns false, then the pointer is dereferencable. - // The payload may be dropped at this point, and we have to maintain provenance, - // so use raw pointer manipulation. - unsafe { ptr::addr_of_mut!((*ptr).data) } - } + self.ptr.as_ptr() } /// Consumes the `Weak` and turns it into a raw pointer. @@ -1775,24 +1674,17 @@ impl Weak { /// [`into_raw`]: Weak::into_raw /// [`upgrade`]: Weak::upgrade /// [`forget`]: std::mem::forget + #[inline] #[stable(feature = "weak_into_raw", since = "1.45.0")] pub unsafe fn from_raw(ptr: *const T) -> Self { - // See Weak::as_ptr for context on how the input pointer is derived. - - let ptr = if is_dangling(ptr as *mut T) { - // This is a dangling Weak. - ptr as *mut ArcInner - } else { - // Otherwise, we're guaranteed the pointer came from a nondangling Weak. - // SAFETY: data_offset is safe to call, as ptr references a real (potentially dropped) T. - let offset = unsafe { data_offset(ptr) }; - // Thus, we reverse the offset to get the whole RcBox. - // SAFETY: the pointer originated from a Weak, so this offset is safe. - unsafe { (ptr as *mut ArcInner).set_ptr_value((ptr as *mut u8).offset(-offset)) } - }; + unsafe { + Self::from_non_null(NonNull::new_unchecked(ptr as *mut T), ArcAllocator::new(Global)) + } + } - // SAFETY: we now have recovered the original Weak pointer, so can create the Weak. - Weak { ptr: unsafe { NonNull::new_unchecked(ptr) } } + #[inline(always)] + unsafe fn from_non_null(ptr: NonNull, alloc: ArcAllocator) -> Self { + Self { ptr, alloc } } } @@ -1825,13 +1717,13 @@ impl Weak { // We use a CAS loop to increment the strong count instead of a // fetch_add as this function should never take the reference count // from zero to one. - let inner = self.inner()?; + let meta = self.metadata()?; // Relaxed load because any write of 0 that we can observe // leaves the field in a permanently zero state (so a // "stale" read of 0 is fine), and any other value is // confirmed via the CAS below. - let mut n = inner.strong.load(Relaxed); + let mut n = meta.strong.load(Relaxed); loop { if n == 0 { @@ -1847,8 +1739,8 @@ impl Weak { // Acquire is necessary for the success case to synchronise with `Arc::new_cyclic`, when the inner // value can be initialized after `Weak` references have already been created. In that case, we // expect to observe the fully initialized value. - match inner.strong.compare_exchange_weak(n, n + 1, Acquire, Relaxed) { - Ok(_) => return Some(Arc::from_inner(self.ptr)), // null checked above + match meta.strong.compare_exchange_weak(n, n + 1, Acquire, Relaxed) { + Ok(_) => unsafe { return Some(Arc::from_raw(self.ptr.as_ptr())) }, // null checked above Err(old) => n = old, } } @@ -1859,7 +1751,7 @@ impl Weak { /// If `self` was created using [`Weak::new`], this will return 0. #[stable(feature = "weak_counts", since = "1.41.0")] pub fn strong_count(&self) -> usize { - if let Some(inner) = self.inner() { inner.strong.load(SeqCst) } else { 0 } + self.metadata().map(|meta| meta.strong.load(SeqCst)).unwrap_or(0) } /// Gets an approximation of the number of `Weak` pointers pointing to this @@ -1875,10 +1767,10 @@ impl Weak { /// `Weak`s pointing to the same allocation. #[stable(feature = "weak_counts", since = "1.41.0")] pub fn weak_count(&self) -> usize { - self.inner() - .map(|inner| { - let weak = inner.weak.load(SeqCst); - let strong = inner.strong.load(SeqCst); + self.metadata() + .map(|meta| { + let weak = meta.weak.load(SeqCst); + let strong = meta.strong.load(SeqCst); if strong == 0 { 0 } else { @@ -1893,23 +1785,6 @@ impl Weak { .unwrap_or(0) } - /// Returns `None` when the pointer is dangling and there is no allocated `ArcInner`, - /// (i.e., when this `Weak` was created by `Weak::new`). - #[inline] - fn inner(&self) -> Option> { - if is_dangling(self.ptr.as_ptr()) { - None - } else { - // We are careful to *not* create a reference covering the "data" field, as - // the field may be mutated concurrently (for example, if the last `Arc` - // is dropped, the data field will be dropped in-place). - Some(unsafe { - let ptr = self.ptr.as_ptr(); - WeakInner { strong: &(*ptr).strong, weak: &(*ptr).weak } - }) - } - } - /// Returns `true` if the two `Weak`s point to the same allocation (similar to /// [`ptr::eq`]), or if both don't point to any allocation /// (because they were created with `Weak::new()`). @@ -1954,7 +1829,7 @@ impl Weak { #[inline] #[stable(feature = "weak_ptr_eq", since = "1.39.0")] pub fn ptr_eq(&self, other: &Self) -> bool { - self.ptr.as_ptr() == other.ptr.as_ptr() + self.ptr == other.ptr } } @@ -1973,23 +1848,23 @@ impl Clone for Weak { /// ``` #[inline] fn clone(&self) -> Weak { - let inner = if let Some(inner) = self.inner() { - inner + let meta = if let Some(meta) = self.metadata() { + meta } else { - return Weak { ptr: self.ptr }; + return Weak { ptr: self.ptr, alloc: self.alloc.clone() }; }; // See comments in Arc::clone() for why this is relaxed. This can use a // fetch_add (ignoring the lock) because the weak count is only locked // where are *no other* weak pointers in existence. (So we can't be // running this code in that case). - let old_size = inner.weak.fetch_add(1, Relaxed); + let old_size = meta.weak.fetch_add(1, Relaxed); // See comments in Arc::clone() for why we do this (for mem::forget). if old_size > MAX_REFCOUNT { abort(); } - Weak { ptr: self.ptr } + Weak { ptr: self.ptr, alloc: self.alloc.clone() } } } @@ -2049,11 +1924,13 @@ impl Drop for Weak { // weak count can only be locked if there was precisely one weak ref, // meaning that drop could only subsequently run ON that remaining weak // ref, which can only happen after the lock is released. - let inner = if let Some(inner) = self.inner() { inner } else { return }; + let meta = if let Some(meta) = self.metadata() { meta } else { return }; - if inner.weak.fetch_sub(1, Release) == 1 { - acquire!(inner.weak); - unsafe { Global.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr())) } + if meta.weak.fetch_sub(1, Release) == 1 { + acquire!(meta.weak); + unsafe { + self.alloc.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr())) + } } } } @@ -2274,6 +2151,7 @@ impl fmt::Pointer for Arc { } } +#[cfg(not(no_global_oom_handling))] #[stable(feature = "rust1", since = "1.0.0")] impl Default for Arc { /// Creates a new `Arc`, with the `Default` value for `T`. @@ -2298,6 +2176,7 @@ impl Hash for Arc { } } +#[cfg(not(no_global_oom_handling))] #[stable(feature = "from_for_ptrs", since = "1.6.0")] impl From for Arc { fn from(t: T) -> Self { @@ -2520,6 +2399,40 @@ impl> ToArcSlice for I { } } +/// Dediated function for allocating to prevent generating a function for every `T` +#[cfg(not(no_global_oom_handling))] +fn allocate(alloc: &ArcAllocator, layout: Layout, metadata: ArcMetadata) -> NonNull { + try_allocate(alloc, layout, metadata).unwrap_or_else(|_| handle_alloc_error(layout)) +} + +/// Dediated function for allocating to prevent generating a function for every `T` +#[cfg(not(no_global_oom_handling))] +fn allocate_zeroed( + alloc: &ArcAllocator, + layout: Layout, + metadata: ArcMetadata, +) -> NonNull { + try_allocate_zeroed(alloc, layout, metadata).unwrap_or_else(|_| handle_alloc_error(layout)) +} + +/// Dediated function for allocating to prevent generating a function for every `T` +fn try_allocate( + alloc: &ArcAllocator, + layout: Layout, + metadata: ArcMetadata, +) -> Result, AllocError> { + alloc.allocate_with_prefix(layout, metadata).map(NonNull::as_non_null_ptr) +} + +/// Dediated function for allocating to prevent generating a function for every `T` +fn try_allocate_zeroed( + alloc: &ArcAllocator, + layout: Layout, + metadata: ArcMetadata, +) -> Result, AllocError> { + alloc.allocate_zeroed_with_prefix(layout, metadata).map(NonNull::as_non_null_ptr) +} + #[stable(feature = "rust1", since = "1.0.0")] impl borrow::Borrow for Arc { fn borrow(&self) -> &T { @@ -2536,25 +2449,3 @@ impl AsRef for Arc { #[stable(feature = "pin", since = "1.33.0")] impl Unpin for Arc {} - -/// Get the offset within an `ArcInner` for the payload behind a pointer. -/// -/// # Safety -/// -/// The pointer must point to (and have valid metadata for) a previously -/// valid instance of T, but the T is allowed to be dropped. -unsafe fn data_offset(ptr: *const T) -> isize { - // Align the unsized value to the end of the ArcInner. - // Because RcBox is repr(C), it will always be the last field in memory. - // SAFETY: since the only unsized types possible are slices, trait objects, - // and extern types, the input safety requirement is currently enough to - // satisfy the requirements of align_of_val_raw; this is an implementation - // detail of the language that may not be relied upon outside of std. - unsafe { data_offset_align(align_of_val_raw(ptr)) } -} - -#[inline] -fn data_offset_align(align: usize) -> isize { - let layout = Layout::new::>(); - (layout.size() + layout.padding_needed_for(align)) as isize -} diff --git a/library/core/src/alloc/helper.rs b/library/core/src/alloc/helper.rs new file mode 100644 index 0000000000000..d7fdd72503a8c --- /dev/null +++ b/library/core/src/alloc/helper.rs @@ -0,0 +1,219 @@ +#![unstable(feature = "allocator_api_internals", issue = "none")] +#![doc(hidden)] + +use crate::{ + alloc::{AllocError, Allocator, Layout}, + fmt, + marker::PhantomData, + ptr::NonNull, +}; + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum AllocInit { + /// The contents of the new memory are uninitialized. + Uninitialized, + /// The new memory is guaranteed to be zeroed. + Zeroed, +} + +/// An allocator that requests some extra memory from the parent allocator for storing a prefix and/or a suffix. +/// +/// The alignment of the memory block is the maximum of the alignment of `Prefix` and the requested +/// alignment. This may introduce an unused padding between `Prefix` and the returned memory. +/// +/// To get a pointer to the prefix, [`prefix()`] may be called. +/// +/// [`prefix()`]: Self::prefix +/// +/// Consider +/// +/// ```rust,ignore (not real code) +/// #[repr(C)] +/// struct Struct { +/// t: T, +/// data: Data, +/// } +/// ``` +/// +/// where `Data` is a type with layout `layout`. +/// +/// When this allocator creates an allocation for layout `layout`, the pointer can be +/// offset by `-offsetof(Struct, data)` and the resulting pointer points is an allocation +/// of `A` for `Layout::new::()`. +pub struct PrefixAllocator { + /// The parent allocator to be used as backend + pub parent: Alloc, + _prefix: PhantomData<*const Prefix>, +} + +impl fmt::Debug for PrefixAllocator { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("PrefixAllocator").field("parent", &self.parent).finish() + } +} + +impl Default for PrefixAllocator { + fn default() -> Self { + Self::new(Alloc::default()) + } +} + +impl Clone for PrefixAllocator { + fn clone(&self) -> Self { + Self::new(self.parent.clone()) + } +} + +impl Copy for PrefixAllocator {} + +impl PartialEq for PrefixAllocator { + fn eq(&self, other: &Self) -> bool { + self.parent.eq(&other.parent) + } +} + +impl Eq for PrefixAllocator {} + +unsafe impl Send for PrefixAllocator {} +unsafe impl Sync for PrefixAllocator {} +impl Unpin for PrefixAllocator {} + +impl PrefixAllocator { + pub const fn new(parent: Alloc) -> Self { + Self { parent, _prefix: PhantomData } + } + + /// Returns the offset between the `Prefix` and the stored data. + #[inline] + pub fn prefix_offset(layout: Layout) -> usize { + let prefix_layout = Layout::new::(); + prefix_layout.size() + prefix_layout.padding_needed_for(layout.align()) + } + + /// Returns a pointer to the prefix for an allocated pointer and it's used alignment. + /// + /// # Safety + /// + /// * `ptr` must denote a block of memory *[currently allocated]* via this allocator, and + /// * `align` has to be the alignment used for allocating `ptr`. + /// + /// [currently allocated]: https://doc.rust-lang.org/nightly/core/alloc/trait.AllocRef.html#currently-allocated-memory + #[inline] + pub unsafe fn prefix(ptr: NonNull, align: usize) -> NonNull { + let prefix_layout = Layout::new::(); + + let offset = prefix_layout.size() + prefix_layout.padding_needed_for(align); + + // SAFETY: `ptr` was allocated with this allocator thus, `ptr - offset` points to the + // prefix and is non-null. + unsafe { NonNull::new_unchecked(ptr.as_ptr().sub(offset).cast()) } + } + + fn create_ptr(ptr: NonNull<[u8]>, offset_prefix: usize) -> NonNull<[u8]> { + let len = ptr.len() - offset_prefix; + + // SAFETY: `prefix_offset` is smaller (and not equal to) `ptr` as the same function for calculating `prefix_offset` is used when allocating. + let ptr = unsafe { NonNull::new_unchecked(ptr.as_mut_ptr().add(offset_prefix)) }; + + NonNull::slice_from_raw_parts(ptr, len) + } + + #[inline] + fn alloc_impl( + layout: Layout, + alloc: impl FnOnce(Layout) -> Result, AllocError>, + ) -> Result, AllocError> { + let (layout, offset_prefix) = + Layout::new::().extend(layout).map_err(|_| AllocError)?; + + Ok(Self::create_ptr(alloc(layout)?, offset_prefix)) + } + + /// Behaves like `allocate` but also writes the `prefix` + pub fn allocate_with_prefix( + &self, + layout: Layout, + prefix: Prefix, + ) -> Result, AllocError> + where + Alloc: Allocator, + { + let (layout, offset_prefix) = + Layout::new::().extend(layout).map_err(|_| AllocError)?; + + let memory = self.parent.allocate(layout)?; + // SAFETY: memory was just allocated with the layout of `Prefix` + unsafe { memory.as_mut_ptr().cast::().write(prefix) }; + Ok(Self::create_ptr(memory, offset_prefix)) + } + + /// Behaves like `allocate_zeroed` but also writes the `prefix` + pub fn allocate_zeroed_with_prefix( + &self, + layout: Layout, + prefix: Prefix, + ) -> Result, AllocError> + where + Alloc: Allocator, + { + let (layout, offset_prefix) = + Layout::new::().extend(layout).map_err(|_| AllocError)?; + + let memory = self.parent.allocate_zeroed(layout)?; + // SAFETY: memory was just allocated with the layout of `Prefix` + unsafe { memory.as_mut_ptr().cast::().write(prefix) }; + Ok(Self::create_ptr(memory, offset_prefix)) + } +} + +unsafe impl Allocator for PrefixAllocator +where + Alloc: Allocator, +{ + fn allocate(&self, layout: Layout) -> Result, AllocError> { + Self::alloc_impl(layout, |l| self.parent.allocate(l)) + } + + fn allocate_zeroed(&self, layout: Layout) -> Result, AllocError> { + Self::alloc_impl(layout, |l| self.parent.allocate_zeroed(l)) + } + + unsafe fn grow( + &self, + _ptr: NonNull, + _old_layout: Layout, + _new_layout: Layout, + ) -> Result, AllocError> { + // For (A)Rc it's not needed. When implementing please take care, if the alignment changes. + unimplemented!("PrefixAllocator currently does not implement growing."); + } + + unsafe fn grow_zeroed( + &self, + _ptr: NonNull, + _old_layout: Layout, + _new_layout: Layout, + ) -> Result, AllocError> { + // For (A)Rc it's not needed. When implementing please take care, if the alignment changes. + unimplemented!("PrefixAllocator currently does not implement growing."); + } + + unsafe fn shrink( + &self, + _ptr: NonNull, + _old_layout: Layout, + _new_layout: Layout, + ) -> Result, AllocError> { + // For (A)Rc it's not needed. When implementing please take care, if the alignment changes. + unimplemented!("PrefixAllocator currently does not implement shrinking."); + } + + unsafe fn deallocate(&self, ptr: NonNull, layout: Layout) { + let (layout, prefix_offset) = Layout::new::().extend(layout).unwrap(); + // SAFETY: `prefix_offset` is smaller (and not equal to) `ptr` as the same function for calculating `prefix_offset` is used when allocating. + unsafe { + let base_ptr = NonNull::new_unchecked(ptr.as_ptr().sub(prefix_offset)); + self.parent.deallocate(base_ptr, layout) + }; + } +} diff --git a/library/core/src/alloc/mod.rs b/library/core/src/alloc/mod.rs index 06a761531b676..136f09214e404 100644 --- a/library/core/src/alloc/mod.rs +++ b/library/core/src/alloc/mod.rs @@ -2,6 +2,8 @@ #![stable(feature = "alloc_module", since = "1.28.0")] +pub mod helper; + mod global; mod layout; diff --git a/library/core/tests/alloc.rs b/library/core/tests/alloc/const_unchecked_layout.rs similarity index 100% rename from library/core/tests/alloc.rs rename to library/core/tests/alloc/const_unchecked_layout.rs diff --git a/library/core/tests/alloc/mod.rs b/library/core/tests/alloc/mod.rs new file mode 100644 index 0000000000000..de57d828edac5 --- /dev/null +++ b/library/core/tests/alloc/mod.rs @@ -0,0 +1,185 @@ +use core::alloc::{AllocError, Allocator, Layout}; +use core::ptr::NonNull; +use std::{ + collections::HashMap, + sync::{Mutex, PoisonError}, +}; + +mod const_unchecked_layout; +mod prefix; + +#[derive(Default)] +/// Implements `Allocator` and checks it's unsafety conditions. +struct Tracker { + alloc: A, + map: Mutex, (usize, Layout)>>, +} + +impl Tracker { + fn new(alloc: A) -> Self { + Self { alloc, map: Default::default() } + } + + fn after_alloc(&self, layout: Layout, result: Result, AllocError>) { + if let Ok(ptr) = result { + self.map + .lock() + .unwrap_or_else(PoisonError::into_inner) + .insert(ptr.as_non_null_ptr(), (ptr.len(), layout)); + } + } + + fn before_dealloc(&self, ptr: NonNull, layout: Layout) { + let lock = self.map.lock().unwrap_or_else(PoisonError::into_inner); + let (size, old_layout) = lock + .get(&ptr) + .expect("`ptr` must denote a block of memory currently allocated via this allocator"); + assert_eq!( + layout.align(), + old_layout.align(), + "`layout` must fit that block of memory. Expected alignment of {}, got {}", + old_layout.align(), + layout.align() + ); + if layout.size() < old_layout.size() || layout.size() > *size { + if *size == old_layout.size() { + panic!( + "`layout` must fit that block of memory. Expected size of {}, got {}", + old_layout.size(), + layout.size() + ) + } else { + panic!( + "`layout` must fit that block of memory. Expected size between {}..={}, \ + got {}", + old_layout.size(), + size, + layout.size() + ) + } + } + } + + fn after_dealloc(&self, ptr: NonNull, _layout: Layout) { + self.map.lock().unwrap_or_else(PoisonError::into_inner).remove(&ptr); + } + + fn before_grow(&self, ptr: NonNull, old_layout: Layout, new_layout: Layout) { + assert!( + new_layout.size() >= old_layout.size(), + "`new_layout.size()` must be greater than or equal to `old_layout.size()`, expected {} >= {}", + new_layout.size(), + old_layout.size() + ); + self.before_dealloc(ptr, old_layout) + } + + #[track_caller] + fn after_grow( + &self, + ptr: NonNull, + old_layout: Layout, + new_layout: Layout, + result: Result, AllocError>, + ) { + if result.is_ok() { + self.after_dealloc(ptr, old_layout); + self.after_alloc(new_layout, result); + } + } + + fn before_shrink(&self, ptr: NonNull, old_layout: Layout, new_layout: Layout) { + assert!( + new_layout.size() <= old_layout.size(), + "`new_layout.size()` must be smaller than or equal to `old_layout.size()`, expected {} >= {}", + new_layout.size(), + old_layout.size() + ); + self.before_dealloc(ptr, old_layout) + } + + #[track_caller] + fn after_shrink( + &self, + ptr: NonNull, + old_layout: Layout, + new_layout: Layout, + result: Result, AllocError>, + ) { + if result.is_ok() { + self.after_dealloc(ptr, old_layout); + self.after_alloc(new_layout, result); + } + } +} + +unsafe impl Allocator for Tracker { + #[track_caller] + fn allocate(&self, layout: Layout) -> Result, std::alloc::AllocError> { + let result = self.alloc.allocate(layout); + self.after_alloc(layout, result); + result + } + + #[track_caller] + fn allocate_zeroed(&self, layout: Layout) -> Result, std::alloc::AllocError> { + let result = self.alloc.allocate_zeroed(layout); + self.after_alloc(layout, result); + result + } + + #[track_caller] + unsafe fn deallocate(&self, ptr: NonNull, layout: Layout) { + self.before_dealloc(ptr, layout); + unsafe { self.alloc.deallocate(ptr, layout) } + self.after_dealloc(ptr, layout); + } + + #[track_caller] + unsafe fn grow( + &self, + ptr: NonNull, + old_layout: Layout, + new_layout: Layout, + ) -> Result, std::alloc::AllocError> { + self.before_grow(ptr, old_layout, new_layout); + let result = unsafe { self.alloc.grow(ptr, old_layout, new_layout) }; + self.after_grow(ptr, old_layout, new_layout, result); + result + } + + #[track_caller] + unsafe fn grow_zeroed( + &self, + ptr: NonNull, + old_layout: Layout, + new_layout: Layout, + ) -> Result, std::alloc::AllocError> { + self.before_grow(ptr, old_layout, new_layout); + let result = unsafe { self.alloc.grow_zeroed(ptr, old_layout, new_layout) }; + self.after_grow(ptr, old_layout, new_layout, result); + result + } + + #[track_caller] + unsafe fn shrink( + &self, + ptr: NonNull, + old_layout: Layout, + new_layout: Layout, + ) -> Result, std::alloc::AllocError> { + self.before_shrink(ptr, old_layout, new_layout); + let result = unsafe { self.alloc.shrink(ptr, old_layout, new_layout) }; + self.after_shrink(ptr, old_layout, new_layout, result); + result + } +} + +impl Drop for Tracker { + fn drop(&mut self) { + let lock = self.map.lock().unwrap_or_else(PoisonError::into_inner); + if !lock.is_empty() { + panic!("Missing deallocations {:#?}", lock); + } + } +} diff --git a/library/core/tests/alloc/prefix.rs b/library/core/tests/alloc/prefix.rs new file mode 100644 index 0000000000000..76a6999863123 --- /dev/null +++ b/library/core/tests/alloc/prefix.rs @@ -0,0 +1,75 @@ +use core::alloc::helper::PrefixAllocator; +use core::alloc::{Allocator, Layout}; +use core::any::type_name; +use std::alloc::System; + +use super::Tracker; + +fn test_prefix() { + unsafe { + let layout = Layout::new::(); + let prefix_offset = PrefixAllocator::::prefix_offset(layout); + assert_eq!( + prefix_offset, + Layout::new::().extend(layout).unwrap().1, + "Invalid prefix offset for PrefixAllocator<_, {}> with Layout<{}>.", + type_name::(), + type_name::(), + ); + + let alloc = + Tracker::new(PrefixAllocator::, Prefix>::new(Tracker::new(System))); + let memory = alloc.allocate(layout).unwrap_or_else(|_| { + panic!( + "Could not allocate {} bytes for PrefixAllocator<_, {}> with Layout<{}>.", + layout.size(), + type_name::(), + type_name::() + ) + }); + + assert_eq!( + PrefixAllocator::::prefix(memory.as_non_null_ptr(), layout.align()) + .cast() + .as_ptr(), + memory.as_mut_ptr().sub(prefix_offset), + "Invalid prefix location for PrefixAllocator<_, {}> with Layout<{}>.", + type_name::(), + type_name::(), + ); + + alloc.deallocate(memory.as_non_null_ptr(), layout); + } +} + +#[repr(align(1024))] +#[derive(Debug, Copy, Clone, PartialEq)] +struct AlignTo1024 { + a: T, +} + +#[repr(align(64))] +#[derive(Debug, Copy, Clone, PartialEq)] +struct AlignTo64; + +#[test] +fn test() { + macro_rules! test_ty { + ($($ty:ty),*) => { test_ty!(@2 $($ty),*; ($($ty),*)) }; + (@2 $($tyl:ty),*; $tyr:tt) => { $(test_ty!(@3 $tyl; $tyr);)* }; + (@3 $tyl:ty; ($($tyr:ty),*)) => { $(test_prefix::<$tyl, $tyr>();)* }; + } + // call test_pair::() for every combination of these types + test_ty!( + (), + u8, + u16, + u32, + u64, + u128, + AlignTo64, + AlignTo1024, + AlignTo1024, + AlignTo1024 + ); +} diff --git a/library/core/tests/lib.rs b/library/core/tests/lib.rs index 16bce6e35fe83..86a2f468e7733 100644 --- a/library/core/tests/lib.rs +++ b/library/core/tests/lib.rs @@ -1,4 +1,6 @@ #![feature(alloc_layout_extra)] +#![feature(allocator_api)] +#![feature(allocator_api_internals)] #![feature(array_chunks)] #![feature(array_methods)] #![feature(array_map)] @@ -47,6 +49,8 @@ #![feature(try_trait_v2)] #![feature(slice_internals)] #![feature(slice_partition_dedup)] +#![feature(slice_ptr_len)] +#![feature(slice_ptr_get)] #![feature(int_error_matching)] #![feature(iter_advance_by)] #![feature(iter_partition_in_place)] diff --git a/src/etc/gdb_providers.py b/src/etc/gdb_providers.py index f0ce13b269c59..490b02cf4e031 100644 --- a/src/etc/gdb_providers.py +++ b/src/etc/gdb_providers.py @@ -142,12 +142,20 @@ def display_hint(): class StdRcProvider: def __init__(self, valobj, is_atomic=False): + def size_rounded_up(size, align): + return (size + align - 1) & ~(align - 1) + self.valobj = valobj self.is_atomic = is_atomic self.ptr = unwrap_unique_or_non_null(valobj["ptr"]) - self.value = self.ptr["data" if is_atomic else "value"] - self.strong = self.ptr["strong"]["v" if is_atomic else "value"]["value"] - self.weak = self.ptr["weak"]["v" if is_atomic else "value"]["value"] - 1 + self.value = self.ptr.dereference() + + metadata_type = self.valobj["alloc"].type.template_argument(1) + offset = size_rounded_up(metadata_type.sizeof, self.value.type.sizeof) + u8_pointer = gdb.lookup_type("u8").pointer() + self.metadata = (self.ptr.cast(u8_pointer) - offset).cast(metadata_type.pointer()) + self.strong = self.metadata["strong"]["v" if is_atomic else "value"]["value"] + self.weak = self.metadata["weak"]["v" if is_atomic else "value"]["value"] - 1 def to_string(self): if self.is_atomic: diff --git a/src/etc/lldb_providers.py b/src/etc/lldb_providers.py index 86dcc335e3cbf..fa8e4effc9919 100644 --- a/src/etc/lldb_providers.py +++ b/src/etc/lldb_providers.py @@ -582,33 +582,35 @@ def StdRcSummaryProvider(valobj, dict): class StdRcSyntheticProvider: """Pretty-printer for alloc::rc::Rc and alloc::sync::Arc - struct Rc { ptr: NonNull>, ... } + struct Rc { ptr: NonNull, alloc: PrefixAllocator<_, RcMetadata>, ... } rust 1.31.1: struct NonNull { pointer: NonZero<*const T> } rust 1.33.0: struct NonNull { pointer: *const T } struct NonZero(T) - struct RcBox { strong: Cell, weak: Cell, value: T } + struct RcMetadata { strong: Cell, weak: Cell } struct Cell { value: UnsafeCell } struct UnsafeCell { value: T } - struct Arc { ptr: NonNull>, ... } - struct ArcInner { strong: atomic::AtomicUsize, weak: atomic::AtomicUsize, data: T } + struct Arc { ptr: NonNull, alloc: PrefixAllocator<_, ArcMetadata>, ... } + struct ArcMetadata{ strong: atomic::AtomicUsize, weak: atomic::AtomicUsize } struct AtomicUsize { v: UnsafeCell } """ def __init__(self, valobj, dict, is_atomic=False): + def size_rounded_up(size, align): + return (size + align - 1) & ~(align - 1) + # type: (SBValue, dict, bool) -> StdRcSyntheticProvider self.valobj = valobj + self.value_builder = ValueBuilder(valobj) + self.is_atomic = is_atomic self.ptr = unwrap_unique_or_non_null(self.valobj.GetChildMemberWithName("ptr")) - self.value = self.ptr.GetChildMemberWithName("data" if is_atomic else "value") - - self.strong = self.ptr.GetChildMemberWithName("strong").GetChildAtIndex( - 0).GetChildMemberWithName("value") - self.weak = self.ptr.GetChildMemberWithName("weak").GetChildAtIndex( - 0).GetChildMemberWithName("value") - - self.value_builder = ValueBuilder(valobj) + metadata_type = self.valobj.GetChildMemberWithName("alloc").type.template_args[1] + align = self.ptr.type.GetPointeeType().GetByteSize() + offset = size_rounded_up(metadata_type.size, align) + meta_address = self.ptr.GetValueAsUnsigned() - offset + self.meta = self.valobj.CreateValueFromAddress("meta", meta_address, metadata_type) self.update() @@ -630,7 +632,8 @@ def get_child_index(self, name): def get_child_at_index(self, index): # type: (int) -> SBValue if index == 0: - return self.value + value = self.ptr.Dereference() + return self.valobj.CreateValueFromData("value", value.data, value.type) if index == 1: return self.value_builder.from_uint("strong", self.strong_count) if index == 2: @@ -640,8 +643,16 @@ def get_child_at_index(self, index): def update(self): # type: () -> None - self.strong_count = self.strong.GetValueAsUnsigned() - self.weak_count = self.weak.GetValueAsUnsigned() - 1 + if self.is_atomic: + self.strong_count = self.meta.GetChildMemberWithName("strong").GetChildAtIndex(0)\ + .GetChildMemberWithName("value").GetValueAsUnsigned() + self.weak_count = self.meta.GetChildMemberWithName("weak").GetChildAtIndex(0)\ + .GetChildMemberWithName("value").GetValueAsUnsigned() - 1 + else: + self.strong_count = self.meta.GetChildMemberWithName("strong")\ + .GetChildMemberWithName("value").GetValueAsUnsigned() + self.weak_count = self.meta.GetChildMemberWithName("weak")\ + .GetChildMemberWithName("value").GetValueAsUnsigned() - 1 def has_children(self): # type: () -> bool diff --git a/src/test/debuginfo/rc_arc.rs b/src/test/debuginfo/rc_arc.rs index 87bc79ea79437..062668209bbc8 100644 --- a/src/test/debuginfo/rc_arc.rs +++ b/src/test/debuginfo/rc_arc.rs @@ -20,7 +20,7 @@ // lldb-command:print r // lldb-check:[...]$0 = strong=2, weak=1 { value = 42 } // lldb-command:print a -// lldb-check:[...]$1 = strong=2, weak=1 { data = 42 } +// lldb-check:[...]$1 = strong=2, weak=1 { value = 42 } use std::rc::Rc; use std::sync::Arc;