11
11
html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/" ,
12
12
test( no_crate_inject, attr( deny( warnings) ) )
13
13
) ]
14
+ #![ feature( core_intrinsics) ]
14
15
#![ feature( dropck_eyepatch) ]
15
16
#![ feature( new_uninit) ]
16
17
#![ feature( maybe_uninit_slice) ]
@@ -30,11 +31,11 @@ use smallvec::SmallVec;
30
31
31
32
use std:: alloc:: Layout ;
32
33
use std:: cell:: { Cell , RefCell } ;
33
- use std:: cmp;
34
34
use std:: marker:: PhantomData ;
35
35
use std:: mem:: { self , MaybeUninit } ;
36
36
use std:: ptr:: { self , NonNull } ;
37
37
use std:: slice;
38
+ use std:: { cmp, intrinsics} ;
38
39
39
40
#[ inline( never) ]
40
41
#[ cold]
@@ -363,6 +364,22 @@ unsafe impl<#[may_dangle] T> Drop for TypedArena<T> {
363
364
364
365
unsafe impl < T : Send > Send for TypedArena < T > { }
365
366
367
+ #[ inline( always) ]
368
+ fn align_down ( val : usize , align : usize ) -> usize {
369
+ debug_assert ! ( align. is_power_of_two( ) ) ;
370
+ val & !( align - 1 )
371
+ }
372
+
373
+ #[ inline( always) ]
374
+ fn align_up ( val : usize , align : usize ) -> usize {
375
+ debug_assert ! ( align. is_power_of_two( ) ) ;
376
+ ( val + align - 1 ) & !( align - 1 )
377
+ }
378
+
379
+ // Pointer alignment is common in compiler types, so keep `DroplessArena` aligned to them
380
+ // to optimize away alignment code.
381
+ const DROPLESS_ALIGNMENT : usize = mem:: align_of :: < usize > ( ) ;
382
+
366
383
/// An arena that can hold objects of multiple different types that impl `Copy`
367
384
/// and/or satisfy `!mem::needs_drop`.
368
385
pub struct DroplessArena {
@@ -375,6 +392,8 @@ pub struct DroplessArena {
375
392
/// start. (This is slightly simpler and faster than allocating upwards,
376
393
/// see <https://fitzgeraldnick.com/2019/11/01/always-bump-downwards.html>.)
377
394
/// When this pointer crosses the start pointer, a new chunk is allocated.
395
+ ///
396
+ /// This is kept aligned to DROPLESS_ALIGNMENT.
378
397
end : Cell < * mut u8 > ,
379
398
380
399
/// A vector of arena chunks.
@@ -395,9 +414,11 @@ impl Default for DroplessArena {
395
414
}
396
415
397
416
impl DroplessArena {
398
- #[ inline( never) ]
399
- #[ cold]
400
- fn grow ( & self , additional : usize ) {
417
+ fn grow ( & self , layout : Layout ) {
418
+ // Add some padding so we can align `self.end` while
419
+ // stilling fitting in a `layout` allocation.
420
+ let additional = layout. size ( ) + cmp:: max ( DROPLESS_ALIGNMENT , layout. align ( ) ) - 1 ;
421
+
401
422
unsafe {
402
423
let mut chunks = self . chunks . borrow_mut ( ) ;
403
424
let mut new_cap;
@@ -416,13 +437,35 @@ impl DroplessArena {
416
437
// Also ensure that this chunk can fit `additional`.
417
438
new_cap = cmp:: max ( additional, new_cap) ;
418
439
419
- let mut chunk = ArenaChunk :: new ( new_cap) ;
440
+ let mut chunk = ArenaChunk :: new ( align_up ( new_cap, PAGE ) ) ;
420
441
self . start . set ( chunk. start ( ) ) ;
421
- self . end . set ( chunk. end ( ) ) ;
442
+
443
+ // Align the end to DROPLESS_ALIGNMENT
444
+ let end = align_down ( chunk. end ( ) . addr ( ) , DROPLESS_ALIGNMENT ) ;
445
+
446
+ // Make sure we don't go past `start`. This should not happen since the allocation
447
+ // should be at least DROPLESS_ALIGNMENT - 1 bytes.
448
+ debug_assert ! ( chunk. start( ) . addr( ) <= end) ;
449
+
450
+ self . end . set ( chunk. end ( ) . with_addr ( end) ) ;
451
+
422
452
chunks. push ( chunk) ;
423
453
}
424
454
}
425
455
456
+ #[ inline( never) ]
457
+ #[ cold]
458
+ fn grow_and_alloc_raw ( & self , layout : Layout ) -> * mut u8 {
459
+ self . grow ( layout) ;
460
+ self . alloc_raw_without_grow ( layout) . unwrap ( )
461
+ }
462
+
463
+ #[ inline( never) ]
464
+ #[ cold]
465
+ fn grow_and_alloc < T > ( & self ) -> * mut u8 {
466
+ self . grow_and_alloc_raw ( Layout :: new :: < T > ( ) )
467
+ }
468
+
426
469
/// Allocates a byte slice with specified layout from the current memory
427
470
/// chunk. Returns `None` if there is no free space left to satisfy the
428
471
/// request.
@@ -432,12 +475,17 @@ impl DroplessArena {
432
475
let old_end = self . end . get ( ) ;
433
476
let end = old_end. addr ( ) ;
434
477
435
- let align = layout. align ( ) ;
436
- let bytes = layout. size ( ) ;
478
+ // Align allocated bytes so that `self.end` stays aligned to DROPLESS_ALIGNMENT
479
+ let bytes = align_up ( layout. size ( ) , DROPLESS_ALIGNMENT ) ;
480
+
481
+ // Tell LLVM that `end` is aligned to DROPLESS_ALIGNMENT
482
+ unsafe { intrinsics:: assume ( end == align_down ( end, DROPLESS_ALIGNMENT ) ) } ;
437
483
438
- let new_end = end. checked_sub ( bytes) ? & ! ( align - 1 ) ;
484
+ let new_end = align_down ( end. checked_sub ( bytes) ?, layout . align ( ) ) ;
439
485
if start <= new_end {
440
486
let new_end = old_end. with_addr ( new_end) ;
487
+ // `new_end` is aligned to DROPLESS_ALIGNMENT as `align_down` preserves alignment
488
+ // as both `end` and `bytes` are already aligned to DROPLESS_ALIGNMENT.
441
489
self . end . set ( new_end) ;
442
490
Some ( new_end)
443
491
} else {
@@ -448,21 +496,26 @@ impl DroplessArena {
448
496
#[ inline]
449
497
pub fn alloc_raw ( & self , layout : Layout ) -> * mut u8 {
450
498
assert ! ( layout. size( ) != 0 ) ;
451
- loop {
452
- if let Some ( a) = self . alloc_raw_without_grow ( layout) {
453
- break a;
454
- }
455
- // No free space left. Allocate a new chunk to satisfy the request.
456
- // On failure the grow will panic or abort.
457
- self . grow ( layout. size ( ) ) ;
499
+ if let Some ( a) = self . alloc_raw_without_grow ( layout) {
500
+ return a;
458
501
}
502
+ // No free space left. Allocate a new chunk to satisfy the request.
503
+ // On failure the grow will panic or abort.
504
+ self . grow_and_alloc_raw ( layout)
459
505
}
460
506
461
507
#[ inline]
462
508
pub fn alloc < T > ( & self , object : T ) -> & mut T {
463
509
assert ! ( !mem:: needs_drop:: <T >( ) ) ;
510
+ assert ! ( mem:: size_of:: <T >( ) != 0 ) ;
464
511
465
- let mem = self . alloc_raw ( Layout :: for_value :: < T > ( & object) ) as * mut T ;
512
+ let mem = if let Some ( a) = self . alloc_raw_without_grow ( Layout :: for_value :: < T > ( & object) ) {
513
+ a
514
+ } else {
515
+ // No free space left. Allocate a new chunk to satisfy the request.
516
+ // On failure the grow will panic or abort.
517
+ self . grow_and_alloc :: < T > ( )
518
+ } as * mut T ;
466
519
467
520
unsafe {
468
521
// Write into uninitialized memory.
0 commit comments