Skip to content

Commit bb306b0

Browse files
committed
Auto merge of #59536 - Zoxc:the-arena, r=<try>
Introduce an arena type which may be used to allocate a list of types with destructors You can also specify that you want deserializers for `&'tcx [T]` and `&'tcx T` for a type in the list, which will allocate those using the arena. Based on #59517 and #59533. Look at the last commit for the interesting changes. An alternative to #56448. cc @michaelwoerister @eddyb r? @oli-obk
2 parents befeeb7 + 114a000 commit bb306b0

File tree

73 files changed

+1689
-2354
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

73 files changed

+1689
-2354
lines changed

Cargo.lock

+1
Original file line numberDiff line numberDiff line change
@@ -54,6 +54,7 @@ name = "arena"
5454
version = "0.0.0"
5555
dependencies = [
5656
"rustc_data_structures 0.0.0",
57+
"smallvec 0.6.7 (registry+https://github.com/rust-lang/crates.io-index)",
5758
]
5859

5960
[[package]]

src/libarena/Cargo.toml

+1
Original file line numberDiff line numberDiff line change
@@ -11,3 +11,4 @@ crate-type = ["dylib"]
1111

1212
[dependencies]
1313
rustc_data_structures = { path = "../librustc_data_structures" }
14+
smallvec = { version = "0.6.7", features = ["union", "may_dangle"] }

src/libarena/lib.rs

+128-16
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,9 @@
2323

2424
extern crate alloc;
2525

26+
use rustc_data_structures::cold_path;
2627
use rustc_data_structures::sync::MTLock;
28+
use smallvec::SmallVec;
2729

2830
use std::cell::{Cell, RefCell};
2931
use std::cmp;
@@ -55,13 +57,16 @@ pub struct TypedArena<T> {
5557
struct TypedArenaChunk<T> {
5658
/// The raw storage for the arena chunk.
5759
storage: RawVec<T>,
60+
/// The number of valid entries in the chunk.
61+
entries: usize,
5862
}
5963

6064
impl<T> TypedArenaChunk<T> {
6165
#[inline]
6266
unsafe fn new(capacity: usize) -> TypedArenaChunk<T> {
6367
TypedArenaChunk {
6468
storage: RawVec::with_capacity(capacity),
69+
entries: 0,
6570
}
6671
}
6772

@@ -149,6 +154,27 @@ impl<T> TypedArena<T> {
149154
}
150155
}
151156

157+
#[inline]
158+
fn can_allocate(&self, len: usize) -> bool {
159+
let available_capacity_bytes = self.end.get() as usize - self.ptr.get() as usize;
160+
let at_least_bytes = len.checked_mul(mem::size_of::<T>()).unwrap();
161+
available_capacity_bytes >= at_least_bytes
162+
}
163+
164+
#[inline]
165+
unsafe fn alloc_raw_slice(&self, len: usize) -> *mut T {
166+
assert!(mem::size_of::<T>() != 0);
167+
assert!(len != 0);
168+
169+
if !self.can_allocate(len) {
170+
self.grow(len);
171+
}
172+
173+
let start_ptr = self.ptr.get();
174+
self.ptr.set(start_ptr.add(len));
175+
start_ptr
176+
}
177+
152178
/// Allocates a slice of objects that are copied into the `TypedArena`, returning a mutable
153179
/// reference to it. Will panic if passed a zero-sized types.
154180
///
@@ -161,21 +187,63 @@ impl<T> TypedArena<T> {
161187
where
162188
T: Copy,
163189
{
190+
unsafe {
191+
let len = slice.len();
192+
let start_ptr = self.alloc_raw_slice(len);
193+
slice.as_ptr().copy_to_nonoverlapping(start_ptr, len);
194+
slice::from_raw_parts_mut(start_ptr, len)
195+
}
196+
}
197+
198+
#[inline]
199+
pub fn alloc_from_iter<I: IntoIterator<Item = T>>(&self, iter: I) -> &mut [T] {
164200
assert!(mem::size_of::<T>() != 0);
165-
assert!(slice.len() != 0);
201+
let mut iter = iter.into_iter();
202+
let size_hint = iter.size_hint();
166203

167-
let available_capacity_bytes = self.end.get() as usize - self.ptr.get() as usize;
168-
let at_least_bytes = slice.len() * mem::size_of::<T>();
169-
if available_capacity_bytes < at_least_bytes {
170-
self.grow(slice.len());
171-
}
204+
match size_hint {
205+
(min, Some(max)) if min == max => {
206+
if min == 0 {
207+
return &mut [];
208+
}
172209

173-
unsafe {
174-
let start_ptr = self.ptr.get();
175-
let arena_slice = slice::from_raw_parts_mut(start_ptr, slice.len());
176-
self.ptr.set(start_ptr.add(arena_slice.len()));
177-
arena_slice.copy_from_slice(slice);
178-
arena_slice
210+
if !self.can_allocate(min) {
211+
self.grow(min);
212+
}
213+
214+
let slice = self.ptr.get();
215+
216+
unsafe {
217+
let mut ptr = self.ptr.get();
218+
for _ in 0..min {
219+
// Write into uninitialized memory.
220+
ptr::write(ptr, iter.next().unwrap());
221+
// Advance the pointer.
222+
ptr = ptr.offset(1);
223+
// Update the pointer per iteration so if `iter.next()` panics
224+
// we destroy the correct amount
225+
self.ptr.set(ptr);
226+
}
227+
slice::from_raw_parts_mut(slice, min)
228+
}
229+
}
230+
_ => {
231+
cold_path(move || -> &mut [T] {
232+
let mut vec: SmallVec<[_; 8]> = iter.collect();
233+
if vec.is_empty() {
234+
return &mut [];
235+
}
236+
// Move the content to the arena by copying it and then forgetting
237+
// the content of the SmallVec
238+
unsafe {
239+
let len = vec.len();
240+
let start_ptr = self.alloc_raw_slice(len);
241+
vec.as_ptr().copy_to_nonoverlapping(start_ptr, len);
242+
mem::forget(vec.drain());
243+
slice::from_raw_parts_mut(start_ptr, len)
244+
}
245+
})
246+
}
179247
}
180248
}
181249

@@ -189,6 +257,7 @@ impl<T> TypedArena<T> {
189257
if let Some(last_chunk) = chunks.last_mut() {
190258
let used_bytes = self.ptr.get() as usize - last_chunk.start() as usize;
191259
let currently_used_cap = used_bytes / mem::size_of::<T>();
260+
last_chunk.entries = currently_used_cap;
192261
if last_chunk.storage.reserve_in_place(currently_used_cap, n) {
193262
self.end.set(last_chunk.end());
194263
return;
@@ -222,8 +291,7 @@ impl<T> TypedArena<T> {
222291
let len = chunks_borrow.len();
223292
// If `T` is ZST, code below has no effect.
224293
for mut chunk in chunks_borrow.drain(..len-1) {
225-
let cap = chunk.storage.cap();
226-
chunk.destroy(cap);
294+
chunk.destroy(chunk.entries);
227295
}
228296
}
229297
}
@@ -265,8 +333,7 @@ unsafe impl<#[may_dangle] T> Drop for TypedArena<T> {
265333
self.clear_last_chunk(&mut last_chunk);
266334
// The last chunk will be dropped. Destroy all other chunks.
267335
for chunk in chunks_borrow.iter_mut() {
268-
let cap = chunk.storage.cap();
269-
chunk.destroy(cap);
336+
chunk.destroy(chunk.entries);
270337
}
271338
}
272339
// RawVec handles deallocation of `last_chunk` and `self.chunks`.
@@ -410,6 +477,51 @@ impl DroplessArena {
410477
arena_slice
411478
}
412479
}
480+
481+
#[inline]
482+
pub fn alloc_from_iter<T, I: IntoIterator<Item = T>>(&self, iter: I) -> &mut [T] {
483+
let mut iter = iter.into_iter();
484+
assert!(mem::size_of::<T>() != 0);
485+
assert!(!mem::needs_drop::<T>());
486+
487+
let size_hint = iter.size_hint();
488+
489+
match size_hint {
490+
(min, Some(max)) if min == max => {
491+
if min == 0 {
492+
return &mut []
493+
}
494+
let size = min.checked_mul(mem::size_of::<T>()).unwrap();
495+
let mem = self.alloc_raw(size, mem::align_of::<T>()) as *mut _ as *mut T;
496+
unsafe {
497+
for i in 0..min {
498+
ptr::write(mem.offset(i as isize), iter.next().unwrap())
499+
}
500+
slice::from_raw_parts_mut(mem, min)
501+
}
502+
}
503+
(_, _) => {
504+
cold_path(move || -> &mut [T] {
505+
let mut vec: SmallVec<[_; 8]> = iter.collect();
506+
if vec.is_empty() {
507+
return &mut [];
508+
}
509+
// Move the content to the arena by copying it and then forgetting
510+
// the content of the SmallVec
511+
unsafe {
512+
let len = vec.len();
513+
let start_ptr = self.alloc_raw(
514+
len * mem::size_of::<T>(),
515+
mem::align_of::<T>()
516+
) as *mut _ as *mut T;
517+
vec.as_ptr().copy_to_nonoverlapping(start_ptr, len);
518+
mem::forget(vec.drain());
519+
slice::from_raw_parts_mut(start_ptr, len)
520+
}
521+
})
522+
}
523+
}
524+
}
413525
}
414526

415527
#[derive(Default)]

0 commit comments

Comments
 (0)