11
11
html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/" ,
12
12
test( no_crate_inject, attr( deny( warnings) ) )
13
13
) ]
14
+ #![ feature( core_intrinsics) ]
14
15
#![ feature( dropck_eyepatch) ]
15
16
#![ feature( new_uninit) ]
16
17
#![ feature( maybe_uninit_slice) ]
@@ -30,11 +31,11 @@ use smallvec::SmallVec;
30
31
31
32
use std:: alloc:: Layout ;
32
33
use std:: cell:: { Cell , RefCell } ;
33
- use std:: cmp;
34
34
use std:: marker:: PhantomData ;
35
35
use std:: mem:: { self , MaybeUninit } ;
36
36
use std:: ptr:: { self , NonNull } ;
37
37
use std:: slice;
38
+ use std:: { cmp, intrinsics} ;
38
39
39
40
#[ inline( never) ]
40
41
#[ cold]
@@ -363,6 +364,20 @@ unsafe impl<#[may_dangle] T> Drop for TypedArena<T> {
363
364
364
365
unsafe impl < T : Send > Send for TypedArena < T > { }
365
366
367
+ #[ inline( always) ]
368
+ fn align_down ( val : usize , align : usize ) -> usize {
369
+ assert ! ( align. is_power_of_two( ) ) ;
370
+ val & !( align - 1 )
371
+ }
372
+
373
+ #[ inline( always) ]
374
+ fn align ( val : usize , align : usize ) -> usize {
375
+ assert ! ( align. is_power_of_two( ) ) ;
376
+ ( val + align - 1 ) & !( align - 1 )
377
+ }
378
+
379
+ const DROPLESS_ALIGNMENT : usize = mem:: align_of :: < usize > ( ) ;
380
+
366
381
/// An arena that can hold objects of multiple different types that impl `Copy`
367
382
/// and/or satisfy `!mem::needs_drop`.
368
383
pub struct DroplessArena {
@@ -395,8 +410,6 @@ impl Default for DroplessArena {
395
410
}
396
411
397
412
impl DroplessArena {
398
- #[ inline( never) ]
399
- #[ cold]
400
413
fn grow ( & self , additional : usize ) {
401
414
unsafe {
402
415
let mut chunks = self . chunks . borrow_mut ( ) ;
@@ -418,11 +431,30 @@ impl DroplessArena {
418
431
419
432
let mut chunk = ArenaChunk :: new ( new_cap) ;
420
433
self . start . set ( chunk. start ( ) ) ;
421
- self . end . set ( chunk. end ( ) ) ;
434
+
435
+ // Align the end to DROPLESS_ALIGNMENT
436
+ let end = align_down ( chunk. end ( ) . addr ( ) , DROPLESS_ALIGNMENT ) ;
437
+ // Make sure we don't go past `start`
438
+ let end = cmp:: max ( chunk. start ( ) . addr ( ) , end) ;
439
+ self . end . set ( chunk. end ( ) . with_addr ( end) ) ;
440
+
422
441
chunks. push ( chunk) ;
423
442
}
424
443
}
425
444
445
+ #[ inline( never) ]
446
+ #[ cold]
447
+ fn grow_and_alloc_raw ( & self , layout : Layout ) -> * mut u8 {
448
+ self . grow ( layout. size ( ) ) ;
449
+ self . alloc_raw ( layout)
450
+ }
451
+
452
+ #[ inline( never) ]
453
+ #[ cold]
454
+ fn grow_and_alloc < T > ( & self ) -> * mut u8 {
455
+ self . grow_and_alloc_raw ( Layout :: new :: < T > ( ) )
456
+ }
457
+
426
458
/// Allocates a byte slice with specified layout from the current memory
427
459
/// chunk. Returns `None` if there is no free space left to satisfy the
428
460
/// request.
@@ -432,10 +464,13 @@ impl DroplessArena {
432
464
let old_end = self . end . get ( ) ;
433
465
let end = old_end. addr ( ) ;
434
466
435
- let align = layout . align ( ) ;
436
- let bytes = layout. size ( ) ;
467
+ // Align allocated bytes so that `self.end` stays aligned to DROPLESS_ALIGNMENT
468
+ let bytes = align ( layout. size ( ) , DROPLESS_ALIGNMENT ) ;
437
469
438
- let new_end = end. checked_sub ( bytes) ? & !( align - 1 ) ;
470
+ // Tell LLVM that `end` is aligned to DROPLESS_ALIGNMENT
471
+ unsafe { intrinsics:: assume ( end == align_down ( end, DROPLESS_ALIGNMENT ) ) } ;
472
+
473
+ let new_end = align_down ( end. checked_sub ( bytes) ?, layout. align ( ) ) ;
439
474
if start <= new_end {
440
475
let new_end = old_end. with_addr ( new_end) ;
441
476
self . end . set ( new_end) ;
@@ -448,21 +483,26 @@ impl DroplessArena {
448
483
#[ inline]
449
484
pub fn alloc_raw ( & self , layout : Layout ) -> * mut u8 {
450
485
assert ! ( layout. size( ) != 0 ) ;
451
- loop {
452
- if let Some ( a) = self . alloc_raw_without_grow ( layout) {
453
- break a;
454
- }
455
- // No free space left. Allocate a new chunk to satisfy the request.
456
- // On failure the grow will panic or abort.
457
- self . grow ( layout. size ( ) ) ;
486
+ if let Some ( a) = self . alloc_raw_without_grow ( layout) {
487
+ return a;
458
488
}
489
+ // No free space left. Allocate a new chunk to satisfy the request.
490
+ // On failure the grow will panic or abort.
491
+ self . grow_and_alloc_raw ( layout)
459
492
}
460
493
461
494
#[ inline]
462
495
pub fn alloc < T > ( & self , object : T ) -> & mut T {
463
496
assert ! ( !mem:: needs_drop:: <T >( ) ) ;
497
+ assert ! ( mem:: size_of:: <T >( ) != 0 ) ;
464
498
465
- let mem = self . alloc_raw ( Layout :: for_value :: < T > ( & object) ) as * mut T ;
499
+ let mem = if let Some ( a) = self . alloc_raw_without_grow ( Layout :: for_value :: < T > ( & object) ) {
500
+ a
501
+ } else {
502
+ // No free space left. Allocate a new chunk to satisfy the request.
503
+ // On failure the grow will panic or abort.
504
+ self . grow_and_alloc :: < T > ( )
505
+ } as * mut T ;
466
506
467
507
unsafe {
468
508
// Write into uninitialized memory.
0 commit comments