23
23
24
24
extern crate alloc;
25
25
26
+ use rustc_data_structures:: cold_path;
26
27
use rustc_data_structures:: sync:: MTLock ;
28
+ use smallvec:: SmallVec ;
27
29
28
30
use std:: cell:: { Cell , RefCell } ;
29
31
use std:: cmp;
@@ -55,13 +57,16 @@ pub struct TypedArena<T> {
55
57
struct TypedArenaChunk < T > {
56
58
/// The raw storage for the arena chunk.
57
59
storage : RawVec < T > ,
60
+ /// The number of valid entries in the chunk.
61
+ entries : usize ,
58
62
}
59
63
60
64
impl < T > TypedArenaChunk < T > {
61
65
#[ inline]
62
66
unsafe fn new ( capacity : usize ) -> TypedArenaChunk < T > {
63
67
TypedArenaChunk {
64
68
storage : RawVec :: with_capacity ( capacity) ,
69
+ entries : 0 ,
65
70
}
66
71
}
67
72
@@ -149,6 +154,27 @@ impl<T> TypedArena<T> {
149
154
}
150
155
}
151
156
157
+ #[ inline]
158
+ fn can_allocate ( & self , len : usize ) -> bool {
159
+ let available_capacity_bytes = self . end . get ( ) as usize - self . ptr . get ( ) as usize ;
160
+ let at_least_bytes = len. checked_mul ( mem:: size_of :: < T > ( ) ) . unwrap ( ) ;
161
+ available_capacity_bytes >= at_least_bytes
162
+ }
163
+
164
+ #[ inline]
165
+ unsafe fn alloc_raw_slice ( & self , len : usize ) -> * mut T {
166
+ assert ! ( mem:: size_of:: <T >( ) != 0 ) ;
167
+ assert ! ( len != 0 ) ;
168
+
169
+ if !self . can_allocate ( len) {
170
+ self . grow ( len) ;
171
+ }
172
+
173
+ let start_ptr = self . ptr . get ( ) ;
174
+ self . ptr . set ( start_ptr. add ( len) ) ;
175
+ start_ptr
176
+ }
177
+
152
178
/// Allocates a slice of objects that are copied into the `TypedArena`, returning a mutable
153
179
/// reference to it. Will panic if passed a zero-sized types.
154
180
///
@@ -161,21 +187,63 @@ impl<T> TypedArena<T> {
161
187
where
162
188
T : Copy ,
163
189
{
190
+ unsafe {
191
+ let len = slice. len ( ) ;
192
+ let start_ptr = self . alloc_raw_slice ( len) ;
193
+ slice. as_ptr ( ) . copy_to_nonoverlapping ( start_ptr, len) ;
194
+ slice:: from_raw_parts_mut ( start_ptr, len)
195
+ }
196
+ }
197
+
198
+ #[ inline]
199
+ pub fn alloc_from_iter < I : IntoIterator < Item = T > > ( & self , iter : I ) -> & mut [ T ] {
164
200
assert ! ( mem:: size_of:: <T >( ) != 0 ) ;
165
- assert ! ( slice. len( ) != 0 ) ;
201
+ let mut iter = iter. into_iter ( ) ;
202
+ let size_hint = iter. size_hint ( ) ;
166
203
167
- let available_capacity_bytes = self . end . get ( ) as usize - self . ptr . get ( ) as usize ;
168
- let at_least_bytes = slice . len ( ) * mem :: size_of :: < T > ( ) ;
169
- if available_capacity_bytes < at_least_bytes {
170
- self . grow ( slice . len ( ) ) ;
171
- }
204
+ match size_hint {
205
+ ( min , Some ( max ) ) if min == max => {
206
+ if min == 0 {
207
+ return & mut [ ] ;
208
+ }
172
209
173
- unsafe {
174
- let start_ptr = self . ptr . get ( ) ;
175
- let arena_slice = slice:: from_raw_parts_mut ( start_ptr, slice. len ( ) ) ;
176
- self . ptr . set ( start_ptr. add ( arena_slice. len ( ) ) ) ;
177
- arena_slice. copy_from_slice ( slice) ;
178
- arena_slice
210
+ if !self . can_allocate ( min) {
211
+ self . grow ( min) ;
212
+ }
213
+
214
+ let slice = self . ptr . get ( ) ;
215
+
216
+ unsafe {
217
+ let mut ptr = self . ptr . get ( ) ;
218
+ for _ in 0 ..min {
219
+ // Write into uninitialized memory.
220
+ ptr:: write ( ptr, iter. next ( ) . unwrap ( ) ) ;
221
+ // Advance the pointer.
222
+ ptr = ptr. offset ( 1 ) ;
223
+ // Update the pointer per iteration so if `iter.next()` panics
224
+ // we destroy the correct amount
225
+ self . ptr . set ( ptr) ;
226
+ }
227
+ slice:: from_raw_parts_mut ( slice, min)
228
+ }
229
+ }
230
+ _ => {
231
+ cold_path ( move || -> & mut [ T ] {
232
+ let mut vec: SmallVec < [ _ ; 8 ] > = iter. collect ( ) ;
233
+ if vec. is_empty ( ) {
234
+ return & mut [ ] ;
235
+ }
236
+ // Move the content to the arena by copying it and then forgetting
237
+ // the content of the SmallVec
238
+ unsafe {
239
+ let len = vec. len ( ) ;
240
+ let start_ptr = self . alloc_raw_slice ( len) ;
241
+ vec. as_ptr ( ) . copy_to_nonoverlapping ( start_ptr, len) ;
242
+ mem:: forget ( vec. drain ( ) ) ;
243
+ slice:: from_raw_parts_mut ( start_ptr, len)
244
+ }
245
+ } )
246
+ }
179
247
}
180
248
}
181
249
@@ -189,6 +257,7 @@ impl<T> TypedArena<T> {
189
257
if let Some ( last_chunk) = chunks. last_mut ( ) {
190
258
let used_bytes = self . ptr . get ( ) as usize - last_chunk. start ( ) as usize ;
191
259
let currently_used_cap = used_bytes / mem:: size_of :: < T > ( ) ;
260
+ last_chunk. entries = currently_used_cap;
192
261
if last_chunk. storage . reserve_in_place ( currently_used_cap, n) {
193
262
self . end . set ( last_chunk. end ( ) ) ;
194
263
return ;
@@ -222,8 +291,7 @@ impl<T> TypedArena<T> {
222
291
let len = chunks_borrow. len ( ) ;
223
292
// If `T` is ZST, code below has no effect.
224
293
for mut chunk in chunks_borrow. drain ( ..len-1 ) {
225
- let cap = chunk. storage . cap ( ) ;
226
- chunk. destroy ( cap) ;
294
+ chunk. destroy ( chunk. entries ) ;
227
295
}
228
296
}
229
297
}
@@ -265,8 +333,7 @@ unsafe impl<#[may_dangle] T> Drop for TypedArena<T> {
265
333
self . clear_last_chunk ( & mut last_chunk) ;
266
334
// The last chunk will be dropped. Destroy all other chunks.
267
335
for chunk in chunks_borrow. iter_mut ( ) {
268
- let cap = chunk. storage . cap ( ) ;
269
- chunk. destroy ( cap) ;
336
+ chunk. destroy ( chunk. entries ) ;
270
337
}
271
338
}
272
339
// RawVec handles deallocation of `last_chunk` and `self.chunks`.
@@ -410,6 +477,51 @@ impl DroplessArena {
410
477
arena_slice
411
478
}
412
479
}
480
+
481
+ #[ inline]
482
+ pub fn alloc_from_iter < T , I : IntoIterator < Item = T > > ( & self , iter : I ) -> & mut [ T ] {
483
+ let mut iter = iter. into_iter ( ) ;
484
+ assert ! ( mem:: size_of:: <T >( ) != 0 ) ;
485
+ assert ! ( !mem:: needs_drop:: <T >( ) ) ;
486
+
487
+ let size_hint = iter. size_hint ( ) ;
488
+
489
+ match size_hint {
490
+ ( min, Some ( max) ) if min == max => {
491
+ if min == 0 {
492
+ return & mut [ ]
493
+ }
494
+ let size = min. checked_mul ( mem:: size_of :: < T > ( ) ) . unwrap ( ) ;
495
+ let mem = self . alloc_raw ( size, mem:: align_of :: < T > ( ) ) as * mut _ as * mut T ;
496
+ unsafe {
497
+ for i in 0 ..min {
498
+ ptr:: write ( mem. offset ( i as isize ) , iter. next ( ) . unwrap ( ) )
499
+ }
500
+ slice:: from_raw_parts_mut ( mem, min)
501
+ }
502
+ }
503
+ ( _, _) => {
504
+ cold_path ( move || -> & mut [ T ] {
505
+ let mut vec: SmallVec < [ _ ; 8 ] > = iter. collect ( ) ;
506
+ if vec. is_empty ( ) {
507
+ return & mut [ ] ;
508
+ }
509
+ // Move the content to the arena by copying it and then forgetting
510
+ // the content of the SmallVec
511
+ unsafe {
512
+ let len = vec. len ( ) ;
513
+ let start_ptr = self . alloc_raw (
514
+ len * mem:: size_of :: < T > ( ) ,
515
+ mem:: align_of :: < T > ( )
516
+ ) as * mut _ as * mut T ;
517
+ vec. as_ptr ( ) . copy_to_nonoverlapping ( start_ptr, len) ;
518
+ mem:: forget ( vec. drain ( ) ) ;
519
+ slice:: from_raw_parts_mut ( start_ptr, len)
520
+ }
521
+ } )
522
+ }
523
+ }
524
+ }
413
525
}
414
526
415
527
#[ derive( Default ) ]
0 commit comments