Skip to content

Commit 42a4f37

Browse files
committed
Auto merge of #41773 - frewsxcv:rollup, r=frewsxcv
Rollup of 9 pull requests - Successful merges: #41064, #41307, #41512, #41582, #41678, #41722, #41734, #41761, #41763 - Failed merges:
2 parents f420965 + 5bed9dc commit 42a4f37

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

75 files changed

+1490
-950
lines changed

src/doc/nomicon

src/liballoc/arc.rs

+13-18
Original file line numberDiff line numberDiff line change
@@ -277,8 +277,7 @@ impl<T> Arc<T> {
277277
atomic::fence(Acquire);
278278

279279
unsafe {
280-
let ptr = *this.ptr;
281-
let elem = ptr::read(&(*ptr).data);
280+
let elem = ptr::read(&this.ptr.as_ref().data);
282281

283282
// Make a weak pointer to clean up the implicit strong-weak reference
284283
let _weak = Weak { ptr: this.ptr };
@@ -306,7 +305,7 @@ impl<T> Arc<T> {
306305
/// ```
307306
#[stable(feature = "rc_raw", since = "1.17.0")]
308307
pub fn into_raw(this: Self) -> *const T {
309-
let ptr = unsafe { &(**this.ptr).data as *const _ };
308+
let ptr: *const T = &*this;
310309
mem::forget(this);
311310
ptr
312311
}
@@ -345,7 +344,7 @@ impl<T> Arc<T> {
345344
// `data` field from the pointer.
346345
let ptr = (ptr as *const u8).offset(-offset_of!(ArcInner<T>, data));
347346
Arc {
348-
ptr: Shared::new(ptr as *const _),
347+
ptr: Shared::new(ptr as *mut u8 as *mut _),
349348
}
350349
}
351350
}
@@ -452,17 +451,17 @@ impl<T: ?Sized> Arc<T> {
452451
// `ArcInner` structure itself is `Sync` because the inner data is
453452
// `Sync` as well, so we're ok loaning out an immutable pointer to these
454453
// contents.
455-
unsafe { &**self.ptr }
454+
unsafe { self.ptr.as_ref() }
456455
}
457456

458457
// Non-inlined part of `drop`.
459458
#[inline(never)]
460459
unsafe fn drop_slow(&mut self) {
461-
let ptr = self.ptr.as_mut_ptr();
460+
let ptr = self.ptr.as_ptr();
462461

463462
// Destroy the data at this time, even though we may not free the box
464463
// allocation itself (there may still be weak pointers lying around).
465-
ptr::drop_in_place(&mut (*ptr).data);
464+
ptr::drop_in_place(&mut self.ptr.as_mut().data);
466465

467466
if self.inner().weak.fetch_sub(1, Release) == 1 {
468467
atomic::fence(Acquire);
@@ -488,9 +487,7 @@ impl<T: ?Sized> Arc<T> {
488487
/// assert!(!Arc::ptr_eq(&five, &other_five));
489488
/// ```
490489
pub fn ptr_eq(this: &Self, other: &Self) -> bool {
491-
let this_ptr: *const ArcInner<T> = *this.ptr;
492-
let other_ptr: *const ArcInner<T> = *other.ptr;
493-
this_ptr == other_ptr
490+
this.ptr.as_ptr() == other.ptr.as_ptr()
494491
}
495492
}
496493

@@ -621,7 +618,7 @@ impl<T: Clone> Arc<T> {
621618
// here (due to zeroing) because data is no longer accessed by
622619
// other threads (due to there being no more strong refs at this
623620
// point).
624-
let mut swap = Arc::new(ptr::read(&(**weak.ptr).data));
621+
let mut swap = Arc::new(ptr::read(&weak.ptr.as_ref().data));
625622
mem::swap(this, &mut swap);
626623
mem::forget(swap);
627624
}
@@ -634,8 +631,7 @@ impl<T: Clone> Arc<T> {
634631
// As with `get_mut()`, the unsafety is ok because our reference was
635632
// either unique to begin with, or became one upon cloning the contents.
636633
unsafe {
637-
let inner = &mut *this.ptr.as_mut_ptr();
638-
&mut inner.data
634+
&mut this.ptr.as_mut().data
639635
}
640636
}
641637
}
@@ -677,8 +673,7 @@ impl<T: ?Sized> Arc<T> {
677673
// the Arc itself to be `mut`, so we're returning the only possible
678674
// reference to the inner data.
679675
unsafe {
680-
let inner = &mut *this.ptr.as_mut_ptr();
681-
Some(&mut inner.data)
676+
Some(&mut this.ptr.as_mut().data)
682677
}
683678
} else {
684679
None
@@ -878,7 +873,7 @@ impl<T: ?Sized> Weak<T> {
878873
#[inline]
879874
fn inner(&self) -> &ArcInner<T> {
880875
// See comments above for why this is "safe"
881-
unsafe { &**self.ptr }
876+
unsafe { self.ptr.as_ref() }
882877
}
883878
}
884879

@@ -962,7 +957,7 @@ impl<T: ?Sized> Drop for Weak<T> {
962957
/// assert!(other_weak_foo.upgrade().is_none());
963958
/// ```
964959
fn drop(&mut self) {
965-
let ptr = *self.ptr;
960+
let ptr = self.ptr.as_ptr();
966961

967962
// If we find out that we were the last weak pointer, then its time to
968963
// deallocate the data entirely. See the discussion in Arc::drop() about
@@ -1143,7 +1138,7 @@ impl<T: ?Sized + fmt::Debug> fmt::Debug for Arc<T> {
11431138
#[stable(feature = "rust1", since = "1.0.0")]
11441139
impl<T: ?Sized> fmt::Pointer for Arc<T> {
11451140
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1146-
fmt::Pointer::fmt(&*self.ptr, f)
1141+
fmt::Pointer::fmt(&self.ptr, f)
11471142
}
11481143
}
11491144

src/liballoc/boxed.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -156,7 +156,7 @@ fn make_place<T>() -> IntermediateBox<T> {
156156
let align = mem::align_of::<T>();
157157

158158
let p = if size == 0 {
159-
heap::EMPTY as *mut u8
159+
mem::align_of::<T>() as *mut u8
160160
} else {
161161
let p = unsafe { heap::allocate(size, align) };
162162
if p.is_null() {

src/liballoc/heap.rs

+4-2
Original file line numberDiff line numberDiff line change
@@ -138,7 +138,9 @@ pub fn usable_size(size: usize, align: usize) -> usize {
138138
///
139139
/// This preserves the non-null invariant for types like `Box<T>`. The address
140140
/// may overlap with non-zero-size memory allocations.
141-
pub const EMPTY: *mut () = 0x1 as *mut ();
141+
#[rustc_deprecated(since = "1.19", reason = "Use Unique/Shared::empty() instead")]
142+
#[unstable(feature = "heap_api", issue = "27700")]
143+
pub const EMPTY: *mut () = 1 as *mut ();
142144

143145
/// The allocator for unique pointers.
144146
// This function must not unwind. If it does, MIR trans will fail.
@@ -147,7 +149,7 @@ pub const EMPTY: *mut () = 0x1 as *mut ();
147149
#[inline]
148150
unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
149151
if size == 0 {
150-
EMPTY as *mut u8
152+
align as *mut u8
151153
} else {
152154
let ptr = allocate(size, align);
153155
if ptr.is_null() {

src/liballoc/raw_vec.rs

+13-15
Original file line numberDiff line numberDiff line change
@@ -22,13 +22,13 @@ use core::cmp;
2222
/// involved. This type is excellent for building your own data structures like Vec and VecDeque.
2323
/// In particular:
2424
///
25-
/// * Produces heap::EMPTY on zero-sized types
26-
/// * Produces heap::EMPTY on zero-length allocations
25+
/// * Produces Unique::empty() on zero-sized types
26+
/// * Produces Unique::empty() on zero-length allocations
2727
/// * Catches all overflows in capacity computations (promotes them to "capacity overflow" panics)
2828
/// * Guards against 32-bit systems allocating more than isize::MAX bytes
2929
/// * Guards against overflowing your length
3030
/// * Aborts on OOM
31-
/// * Avoids freeing heap::EMPTY
31+
/// * Avoids freeing Unique::empty()
3232
/// * Contains a ptr::Unique and thus endows the user with all related benefits
3333
///
3434
/// This type does not in anyway inspect the memory that it manages. When dropped it *will*
@@ -55,15 +55,13 @@ impl<T> RawVec<T> {
5555
/// it makes a RawVec with capacity `usize::MAX`. Useful for implementing
5656
/// delayed allocation.
5757
pub fn new() -> Self {
58-
unsafe {
59-
// !0 is usize::MAX. This branch should be stripped at compile time.
60-
let cap = if mem::size_of::<T>() == 0 { !0 } else { 0 };
58+
// !0 is usize::MAX. This branch should be stripped at compile time.
59+
let cap = if mem::size_of::<T>() == 0 { !0 } else { 0 };
6160

62-
// heap::EMPTY doubles as "unallocated" and "zero-sized allocation"
63-
RawVec {
64-
ptr: Unique::new(heap::EMPTY as *mut T),
65-
cap: cap,
66-
}
61+
// Unique::empty() doubles as "unallocated" and "zero-sized allocation"
62+
RawVec {
63+
ptr: Unique::empty(),
64+
cap: cap,
6765
}
6866
}
6967

@@ -101,7 +99,7 @@ impl<T> RawVec<T> {
10199

102100
// handles ZSTs and `cap = 0` alike
103101
let ptr = if alloc_size == 0 {
104-
heap::EMPTY as *mut u8
102+
mem::align_of::<T>() as *mut u8
105103
} else {
106104
let align = mem::align_of::<T>();
107105
let ptr = if zeroed {
@@ -148,10 +146,10 @@ impl<T> RawVec<T> {
148146

149147
impl<T> RawVec<T> {
150148
/// Gets a raw pointer to the start of the allocation. Note that this is
151-
/// heap::EMPTY if `cap = 0` or T is zero-sized. In the former case, you must
149+
/// Unique::empty() if `cap = 0` or T is zero-sized. In the former case, you must
152150
/// be careful.
153151
pub fn ptr(&self) -> *mut T {
154-
*self.ptr
152+
self.ptr.as_ptr()
155153
}
156154

157155
/// Gets the capacity of the allocation.
@@ -563,7 +561,7 @@ unsafe impl<#[may_dangle] T> Drop for RawVec<T> {
563561

564562
let num_bytes = elem_size * self.cap;
565563
unsafe {
566-
heap::deallocate(*self.ptr as *mut _, num_bytes, align);
564+
heap::deallocate(self.ptr() as *mut u8, num_bytes, align);
567565
}
568566
}
569567
}

src/liballoc/rc.rs

+22-28
Original file line numberDiff line numberDiff line change
@@ -230,7 +230,7 @@ use core::cell::Cell;
230230
use core::cmp::Ordering;
231231
use core::fmt;
232232
use core::hash::{Hash, Hasher};
233-
use core::intrinsics::{abort, assume};
233+
use core::intrinsics::abort;
234234
use core::marker;
235235
use core::marker::Unsize;
236236
use core::mem::{self, align_of_val, forget, size_of, size_of_val, uninitialized};
@@ -358,7 +358,7 @@ impl<T> Rc<T> {
358358
/// ```
359359
#[stable(feature = "rc_raw", since = "1.17.0")]
360360
pub fn into_raw(this: Self) -> *const T {
361-
let ptr = unsafe { &mut (*this.ptr.as_mut_ptr()).value as *const _ };
361+
let ptr: *const T = &*this;
362362
mem::forget(this);
363363
ptr
364364
}
@@ -395,7 +395,11 @@ impl<T> Rc<T> {
395395
pub unsafe fn from_raw(ptr: *const T) -> Self {
396396
// To find the corresponding pointer to the `RcBox` we need to subtract the offset of the
397397
// `value` field from the pointer.
398-
Rc { ptr: Shared::new((ptr as *const u8).offset(-offset_of!(RcBox<T>, value)) as *const _) }
398+
399+
let ptr = (ptr as *const u8).offset(-offset_of!(RcBox<T>, value));
400+
Rc {
401+
ptr: Shared::new(ptr as *mut u8 as *mut _)
402+
}
399403
}
400404
}
401405

@@ -451,7 +455,7 @@ impl<T> Rc<[T]> {
451455
// Free the original allocation without freeing its (moved) contents.
452456
box_free(Box::into_raw(value));
453457

454-
Rc { ptr: Shared::new(ptr as *const _) }
458+
Rc { ptr: Shared::new(ptr as *mut _) }
455459
}
456460
}
457461
}
@@ -553,8 +557,9 @@ impl<T: ?Sized> Rc<T> {
553557
#[stable(feature = "rc_unique", since = "1.4.0")]
554558
pub fn get_mut(this: &mut Self) -> Option<&mut T> {
555559
if Rc::is_unique(this) {
556-
let inner = unsafe { &mut *this.ptr.as_mut_ptr() };
557-
Some(&mut inner.value)
560+
unsafe {
561+
Some(&mut this.ptr.as_mut().value)
562+
}
558563
} else {
559564
None
560565
}
@@ -578,9 +583,7 @@ impl<T: ?Sized> Rc<T> {
578583
/// assert!(!Rc::ptr_eq(&five, &other_five));
579584
/// ```
580585
pub fn ptr_eq(this: &Self, other: &Self) -> bool {
581-
let this_ptr: *const RcBox<T> = *this.ptr;
582-
let other_ptr: *const RcBox<T> = *other.ptr;
583-
this_ptr == other_ptr
586+
this.ptr.as_ptr() == other.ptr.as_ptr()
584587
}
585588
}
586589

@@ -623,7 +626,7 @@ impl<T: Clone> Rc<T> {
623626
} else if Rc::weak_count(this) != 0 {
624627
// Can just steal the data, all that's left is Weaks
625628
unsafe {
626-
let mut swap = Rc::new(ptr::read(&(**this.ptr).value));
629+
let mut swap = Rc::new(ptr::read(&this.ptr.as_ref().value));
627630
mem::swap(this, &mut swap);
628631
swap.dec_strong();
629632
// Remove implicit strong-weak ref (no need to craft a fake
@@ -637,8 +640,9 @@ impl<T: Clone> Rc<T> {
637640
// reference count is guaranteed to be 1 at this point, and we required
638641
// the `Rc<T>` itself to be `mut`, so we're returning the only possible
639642
// reference to the inner value.
640-
let inner = unsafe { &mut *this.ptr.as_mut_ptr() };
641-
&mut inner.value
643+
unsafe {
644+
&mut this.ptr.as_mut().value
645+
}
642646
}
643647
}
644648

@@ -683,12 +687,12 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc<T> {
683687
/// ```
684688
fn drop(&mut self) {
685689
unsafe {
686-
let ptr = self.ptr.as_mut_ptr();
690+
let ptr = self.ptr.as_ptr();
687691

688692
self.dec_strong();
689693
if self.strong() == 0 {
690694
// destroy the contained object
691-
ptr::drop_in_place(&mut (*ptr).value);
695+
ptr::drop_in_place(self.ptr.as_mut());
692696

693697
// remove the implicit "strong weak" pointer now that we've
694698
// destroyed the contents.
@@ -925,7 +929,7 @@ impl<T: ?Sized + fmt::Debug> fmt::Debug for Rc<T> {
925929
#[stable(feature = "rust1", since = "1.0.0")]
926930
impl<T: ?Sized> fmt::Pointer for Rc<T> {
927931
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
928-
fmt::Pointer::fmt(&*self.ptr, f)
932+
fmt::Pointer::fmt(&self.ptr, f)
929933
}
930934
}
931935

@@ -1067,7 +1071,7 @@ impl<T: ?Sized> Drop for Weak<T> {
10671071
/// ```
10681072
fn drop(&mut self) {
10691073
unsafe {
1070-
let ptr = *self.ptr;
1074+
let ptr = self.ptr.as_ptr();
10711075

10721076
self.dec_weak();
10731077
// the weak count starts at 1, and will only go to zero if all
@@ -1175,12 +1179,7 @@ impl<T: ?Sized> RcBoxPtr<T> for Rc<T> {
11751179
#[inline(always)]
11761180
fn inner(&self) -> &RcBox<T> {
11771181
unsafe {
1178-
// Safe to assume this here, as if it weren't true, we'd be breaking
1179-
// the contract anyway.
1180-
// This allows the null check to be elided in the destructor if we
1181-
// manipulated the reference count in the same function.
1182-
assume(!(*(&self.ptr as *const _ as *const *const ())).is_null());
1183-
&(**self.ptr)
1182+
self.ptr.as_ref()
11841183
}
11851184
}
11861185
}
@@ -1189,12 +1188,7 @@ impl<T: ?Sized> RcBoxPtr<T> for Weak<T> {
11891188
#[inline(always)]
11901189
fn inner(&self) -> &RcBox<T> {
11911190
unsafe {
1192-
// Safe to assume this here, as if it weren't true, we'd be breaking
1193-
// the contract anyway.
1194-
// This allows the null check to be elided in the destructor if we
1195-
// manipulated the reference count in the same function.
1196-
assume(!(*(&self.ptr as *const _ as *const *const ())).is_null());
1197-
&(**self.ptr)
1191+
self.ptr.as_ref()
11981192
}
11991193
}
12001194
}

src/libarena/lib.rs

+1-3
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,6 @@
3131
#![feature(alloc)]
3232
#![feature(core_intrinsics)]
3333
#![feature(dropck_eyepatch)]
34-
#![feature(heap_api)]
3534
#![feature(generic_param_attrs)]
3635
#![feature(staged_api)]
3736
#![cfg_attr(test, feature(test))]
@@ -48,7 +47,6 @@ use std::mem;
4847
use std::ptr;
4948
use std::slice;
5049

51-
use alloc::heap;
5250
use alloc::raw_vec::RawVec;
5351

5452
/// An arena that can hold objects of only one type.
@@ -140,7 +138,7 @@ impl<T> TypedArena<T> {
140138
unsafe {
141139
if mem::size_of::<T>() == 0 {
142140
self.ptr.set(intrinsics::arith_offset(self.ptr.get() as *mut u8, 1) as *mut T);
143-
let ptr = heap::EMPTY as *mut T;
141+
let ptr = mem::align_of::<T>() as *mut T;
144142
// Don't drop the object. This `write` is equivalent to `forget`.
145143
ptr::write(ptr, object);
146144
&mut *ptr

0 commit comments

Comments
 (0)