@@ -172,8 +172,8 @@ impl<T, const N: usize> IterExt<T> for std::array::IntoIter<T, N> {
172
172
return & mut [ ] ;
173
173
}
174
174
// Move the content to the arena by copying and then forgetting it.
175
+ let start_ptr = arena. alloc_raw_slice ( len) ;
175
176
unsafe {
176
- let start_ptr = arena. alloc_raw_slice ( len) ;
177
177
self . as_slice ( ) . as_ptr ( ) . copy_to_nonoverlapping ( start_ptr, len) ;
178
178
mem:: forget ( self ) ;
179
179
slice:: from_raw_parts_mut ( start_ptr, len)
@@ -189,8 +189,8 @@ impl<T> IterExt<T> for Vec<T> {
189
189
return & mut [ ] ;
190
190
}
191
191
// Move the content to the arena by copying and then forgetting it.
192
+ let start_ptr = arena. alloc_raw_slice ( len) ;
192
193
unsafe {
193
- let start_ptr = arena. alloc_raw_slice ( len) ;
194
194
self . as_ptr ( ) . copy_to_nonoverlapping ( start_ptr, len) ;
195
195
self . set_len ( 0 ) ;
196
196
slice:: from_raw_parts_mut ( start_ptr, len)
@@ -206,8 +206,8 @@ impl<A: smallvec::Array> IterExt<A::Item> for SmallVec<A> {
206
206
return & mut [ ] ;
207
207
}
208
208
// Move the content to the arena by copying and then forgetting it.
209
+ let start_ptr = arena. alloc_raw_slice ( len) ;
209
210
unsafe {
210
- let start_ptr = arena. alloc_raw_slice ( len) ;
211
211
self . as_ptr ( ) . copy_to_nonoverlapping ( start_ptr, len) ;
212
212
self . set_len ( 0 ) ;
213
213
slice:: from_raw_parts_mut ( start_ptr, len)
@@ -250,25 +250,20 @@ impl<T> TypedArena<T> {
250
250
available_bytes >= additional_bytes
251
251
}
252
252
253
- /// Ensures there's enough space in the current chunk to fit `len` objects.
254
253
#[ inline]
255
- fn ensure_capacity ( & self , additional : usize ) {
256
- if !self . can_allocate ( additional) {
257
- self . grow ( additional) ;
258
- debug_assert ! ( self . can_allocate( additional) ) ;
259
- }
260
- }
261
-
262
- #[ inline]
263
- unsafe fn alloc_raw_slice ( & self , len : usize ) -> * mut T {
254
+ fn alloc_raw_slice ( & self , len : usize ) -> * mut T {
264
255
assert ! ( mem:: size_of:: <T >( ) != 0 ) ;
265
256
assert ! ( len != 0 ) ;
266
257
267
- self . ensure_capacity ( len) ;
258
+ // Ensure the current chunk can fit `len` objects.
259
+ if !self . can_allocate ( len) {
260
+ self . grow ( len) ;
261
+ debug_assert ! ( self . can_allocate( len) ) ;
262
+ }
268
263
269
264
let start_ptr = self . ptr . get ( ) ;
270
- // SAFETY: `self.ensure_capacity` makes sure that there is enough space
271
- // for `len` elements.
265
+ // SAFETY: `can_allocate`/`grow` ensures that there is enough space for
266
+ // `len` elements.
272
267
unsafe { self . ptr . set ( start_ptr. add ( len) ) } ;
273
268
start_ptr
274
269
}
@@ -407,6 +402,8 @@ impl Default for DroplessArena {
407
402
#[ inline]
408
403
fn default ( ) -> DroplessArena {
409
404
DroplessArena {
405
+ // We set both `start` and `end` to 0 so that the first call to
406
+ // alloc() will trigger a grow().
410
407
start : Cell :: new ( ptr:: null_mut ( ) ) ,
411
408
end : Cell :: new ( ptr:: null_mut ( ) ) ,
412
409
chunks : Default :: default ( ) ,
@@ -415,9 +412,11 @@ impl Default for DroplessArena {
415
412
}
416
413
417
414
impl DroplessArena {
415
+ #[ inline( never) ]
416
+ #[ cold]
418
417
fn grow ( & self , layout : Layout ) {
419
418
// Add some padding so we can align `self.end` while
420
- // stilling fitting in a `layout` allocation.
419
+ // still fitting in a `layout` allocation.
421
420
let additional = layout. size ( ) + cmp:: max ( DROPLESS_ALIGNMENT , layout. align ( ) ) - 1 ;
422
421
423
422
unsafe {
@@ -441,7 +440,7 @@ impl DroplessArena {
441
440
let mut chunk = ArenaChunk :: new ( align_up ( new_cap, PAGE ) ) ;
442
441
self . start . set ( chunk. start ( ) ) ;
443
442
444
- // Align the end to DROPLESS_ALIGNMENT
443
+ // Align the end to DROPLESS_ALIGNMENT.
445
444
let end = align_down ( chunk. end ( ) . addr ( ) , DROPLESS_ALIGNMENT ) ;
446
445
447
446
// Make sure we don't go past `start`. This should not happen since the allocation
@@ -454,69 +453,48 @@ impl DroplessArena {
454
453
}
455
454
}
456
455
457
- #[ inline( never) ]
458
- #[ cold]
459
- fn grow_and_alloc_raw ( & self , layout : Layout ) -> * mut u8 {
460
- self . grow ( layout) ;
461
- self . alloc_raw_without_grow ( layout) . unwrap ( )
462
- }
463
-
464
- #[ inline( never) ]
465
- #[ cold]
466
- fn grow_and_alloc < T > ( & self ) -> * mut u8 {
467
- self . grow_and_alloc_raw ( Layout :: new :: < T > ( ) )
468
- }
469
-
470
- /// Allocates a byte slice with specified layout from the current memory
471
- /// chunk. Returns `None` if there is no free space left to satisfy the
472
- /// request.
473
- #[ inline]
474
- fn alloc_raw_without_grow ( & self , layout : Layout ) -> Option < * mut u8 > {
475
- let start = self . start . get ( ) . addr ( ) ;
476
- let old_end = self . end . get ( ) ;
477
- let end = old_end. addr ( ) ;
478
-
479
- // Align allocated bytes so that `self.end` stays aligned to DROPLESS_ALIGNMENT
480
- let bytes = align_up ( layout. size ( ) , DROPLESS_ALIGNMENT ) ;
481
-
482
- // Tell LLVM that `end` is aligned to DROPLESS_ALIGNMENT
483
- unsafe { intrinsics:: assume ( end == align_down ( end, DROPLESS_ALIGNMENT ) ) } ;
484
-
485
- let new_end = align_down ( end. checked_sub ( bytes) ?, layout. align ( ) ) ;
486
- if start <= new_end {
487
- let new_end = old_end. with_addr ( new_end) ;
488
- // `new_end` is aligned to DROPLESS_ALIGNMENT as `align_down` preserves alignment
489
- // as both `end` and `bytes` are already aligned to DROPLESS_ALIGNMENT.
490
- self . end . set ( new_end) ;
491
- Some ( new_end)
492
- } else {
493
- None
494
- }
495
- }
496
-
497
456
#[ inline]
498
457
pub fn alloc_raw ( & self , layout : Layout ) -> * mut u8 {
499
458
assert ! ( layout. size( ) != 0 ) ;
500
- if let Some ( a) = self . alloc_raw_without_grow ( layout) {
501
- return a;
459
+
460
+ // This loop executes once or twice: if allocation fails the first
461
+ // time, the `grow` ensures it will succeed the second time.
462
+ loop {
463
+ let start = self . start . get ( ) . addr ( ) ;
464
+ let old_end = self . end . get ( ) ;
465
+ let end = old_end. addr ( ) ;
466
+
467
+ // Align allocated bytes so that `self.end` stays aligned to
468
+ // DROPLESS_ALIGNMENT.
469
+ let bytes = align_up ( layout. size ( ) , DROPLESS_ALIGNMENT ) ;
470
+
471
+ // Tell LLVM that `end` is aligned to DROPLESS_ALIGNMENT.
472
+ unsafe { intrinsics:: assume ( end == align_down ( end, DROPLESS_ALIGNMENT ) ) } ;
473
+
474
+ if let Some ( sub) = end. checked_sub ( bytes) {
475
+ let new_end = align_down ( sub, layout. align ( ) ) ;
476
+ if start <= new_end {
477
+ let new_end = old_end. with_addr ( new_end) ;
478
+ // `new_end` is aligned to DROPLESS_ALIGNMENT as `align_down`
479
+ // preserves alignment as both `end` and `bytes` are already
480
+ // aligned to DROPLESS_ALIGNMENT.
481
+ self . end . set ( new_end) ;
482
+ return new_end;
483
+ }
484
+ }
485
+
486
+ // No free space left. Allocate a new chunk to satisfy the request.
487
+ // On failure the grow will panic or abort.
488
+ self . grow ( layout) ;
502
489
}
503
- // No free space left. Allocate a new chunk to satisfy the request.
504
- // On failure the grow will panic or abort.
505
- self . grow_and_alloc_raw ( layout)
506
490
}
507
491
508
492
#[ inline]
509
493
pub fn alloc < T > ( & self , object : T ) -> & mut T {
510
494
assert ! ( !mem:: needs_drop:: <T >( ) ) ;
511
495
assert ! ( mem:: size_of:: <T >( ) != 0 ) ;
512
496
513
- let mem = if let Some ( a) = self . alloc_raw_without_grow ( Layout :: for_value :: < T > ( & object) ) {
514
- a
515
- } else {
516
- // No free space left. Allocate a new chunk to satisfy the request.
517
- // On failure the grow will panic or abort.
518
- self . grow_and_alloc :: < T > ( )
519
- } as * mut T ;
497
+ let mem = self . alloc_raw ( Layout :: new :: < T > ( ) ) as * mut T ;
520
498
521
499
unsafe {
522
500
// Write into uninitialized memory.
@@ -713,10 +691,10 @@ pub macro declare_arena([$($a:tt $name:ident: $ty:ty,)*]) {
713
691
}
714
692
715
693
#[ allow( clippy:: mut_from_ref) ]
716
- pub fn alloc_from_iter < ' a , T : ArenaAllocatable < ' tcx , C > , C > (
717
- & ' a self ,
694
+ pub fn alloc_from_iter < T : ArenaAllocatable < ' tcx , C > , C > (
695
+ & self ,
718
696
iter : impl :: std:: iter:: IntoIterator < Item = T > ,
719
- ) -> & ' a mut [ T ] {
697
+ ) -> & mut [ T ] {
720
698
T :: allocate_from_iter ( self , iter)
721
699
}
722
700
}
0 commit comments