Skip to content

Commit 67b1d2a

Browse files
committed
Optimize DroplessArena arena allocation
1 parent 475be26 commit 67b1d2a

File tree

1 file changed

+55
-15
lines changed
  • compiler/rustc_arena/src

1 file changed

+55
-15
lines changed

compiler/rustc_arena/src/lib.rs

+55-15
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@
1111
html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/",
1212
test(no_crate_inject, attr(deny(warnings)))
1313
)]
14+
#![feature(core_intrinsics)]
1415
#![feature(dropck_eyepatch)]
1516
#![feature(new_uninit)]
1617
#![feature(maybe_uninit_slice)]
@@ -30,11 +31,11 @@ use smallvec::SmallVec;
3031

3132
use std::alloc::Layout;
3233
use std::cell::{Cell, RefCell};
33-
use std::cmp;
3434
use std::marker::PhantomData;
3535
use std::mem::{self, MaybeUninit};
3636
use std::ptr::{self, NonNull};
3737
use std::slice;
38+
use std::{cmp, intrinsics};
3839

3940
#[inline(never)]
4041
#[cold]
@@ -363,6 +364,20 @@ unsafe impl<#[may_dangle] T> Drop for TypedArena<T> {
363364

364365
unsafe impl<T: Send> Send for TypedArena<T> {}
365366

367+
#[inline(always)]
368+
fn align_down(val: usize, align: usize) -> usize {
369+
assert!(align.is_power_of_two());
370+
val & !(align - 1)
371+
}
372+
373+
#[inline(always)]
374+
fn align(val: usize, align: usize) -> usize {
375+
assert!(align.is_power_of_two());
376+
(val + align - 1) & !(align - 1)
377+
}
378+
379+
const DROPLESS_ALIGNMENT: usize = mem::align_of::<usize>();
380+
366381
/// An arena that can hold objects of multiple different types that impl `Copy`
367382
/// and/or satisfy `!mem::needs_drop`.
368383
pub struct DroplessArena {
@@ -395,8 +410,6 @@ impl Default for DroplessArena {
395410
}
396411

397412
impl DroplessArena {
398-
#[inline(never)]
399-
#[cold]
400413
fn grow(&self, additional: usize) {
401414
unsafe {
402415
let mut chunks = self.chunks.borrow_mut();
@@ -418,11 +431,30 @@ impl DroplessArena {
418431

419432
let mut chunk = ArenaChunk::new(new_cap);
420433
self.start.set(chunk.start());
421-
self.end.set(chunk.end());
434+
435+
// Align the end to DROPLESS_ALIGNMENT
436+
let end = align_down(chunk.end().addr(), DROPLESS_ALIGNMENT);
437+
// Make sure we don't go past `start`
438+
let end = cmp::max(chunk.start().addr(), end);
439+
self.end.set(chunk.end().with_addr(end));
440+
422441
chunks.push(chunk);
423442
}
424443
}
425444

445+
#[inline(never)]
446+
#[cold]
447+
fn grow_and_alloc_raw(&self, layout: Layout) -> *mut u8 {
448+
self.grow(layout.size());
449+
self.alloc_raw(layout)
450+
}
451+
452+
#[inline(never)]
453+
#[cold]
454+
fn grow_and_alloc<T>(&self) -> *mut u8 {
455+
self.grow_and_alloc_raw(Layout::new::<T>())
456+
}
457+
426458
/// Allocates a byte slice with specified layout from the current memory
427459
/// chunk. Returns `None` if there is no free space left to satisfy the
428460
/// request.
@@ -432,10 +464,13 @@ impl DroplessArena {
432464
let old_end = self.end.get();
433465
let end = old_end.addr();
434466

435-
let align = layout.align();
436-
let bytes = layout.size();
467+
// Align allocated bytes so that `self.end` stays aligned to DROPLESS_ALIGNMENT
468+
let bytes = align(layout.size(), DROPLESS_ALIGNMENT);
437469

438-
let new_end = end.checked_sub(bytes)? & !(align - 1);
470+
// Tell LLVM that `end` is aligned to DROPLESS_ALIGNMENT
471+
unsafe { intrinsics::assume(end == align_down(end, DROPLESS_ALIGNMENT)) };
472+
473+
let new_end = align_down(end.checked_sub(bytes)?, layout.align());
439474
if start <= new_end {
440475
let new_end = old_end.with_addr(new_end);
441476
self.end.set(new_end);
@@ -448,21 +483,26 @@ impl DroplessArena {
448483
#[inline]
449484
pub fn alloc_raw(&self, layout: Layout) -> *mut u8 {
450485
assert!(layout.size() != 0);
451-
loop {
452-
if let Some(a) = self.alloc_raw_without_grow(layout) {
453-
break a;
454-
}
455-
// No free space left. Allocate a new chunk to satisfy the request.
456-
// On failure the grow will panic or abort.
457-
self.grow(layout.size());
486+
if let Some(a) = self.alloc_raw_without_grow(layout) {
487+
return a;
458488
}
489+
// No free space left. Allocate a new chunk to satisfy the request.
490+
// On failure the grow will panic or abort.
491+
self.grow_and_alloc_raw(layout)
459492
}
460493

461494
#[inline]
462495
pub fn alloc<T>(&self, object: T) -> &mut T {
463496
assert!(!mem::needs_drop::<T>());
497+
assert!(mem::size_of::<T>() != 0);
464498

465-
let mem = self.alloc_raw(Layout::for_value::<T>(&object)) as *mut T;
499+
let mem = if let Some(a) = self.alloc_raw_without_grow(Layout::for_value::<T>(&object)) {
500+
a
501+
} else {
502+
// No free space left. Allocate a new chunk to satisfy the request.
503+
// On failure the grow will panic or abort.
504+
self.grow_and_alloc::<T>()
505+
} as *mut T;
466506

467507
unsafe {
468508
// Write into uninitialized memory.

0 commit comments

Comments
 (0)