@@ -85,7 +85,7 @@ unsafe fn _start(_: usize, _: usize) -> ! {
85
85
}
86
86
#[ cfg( feature = "panic-unwind" ) ]
87
87
fn run_main ( ) -> ! {
88
- init_global_allocator ( ) ;
88
+ unsafe { allocator :: init_global_allocator ( ) }
89
89
let code = match experimental:: panic:: catch_unwind ( run) {
90
90
Ok ( ( ) ) => 0 ,
91
91
Err ( _) => 101 ,
@@ -423,32 +423,49 @@ fn run() {
423
423
println ! ( "elapsed: {:?}" , now. elapsed( ) . unwrap( ) ) ;
424
424
}
425
425
426
- // linked_list_allocator's LockedHeap uses spinning_top, but it doesn't compatible
427
- // with targets without atomic CAS. Implement our own LockedHeap by using spin,
428
- // which supports portable-atomic.
429
426
#[ cfg( feature = "panic-unwind" ) ]
430
- #[ global_allocator]
431
- static ALLOCATOR : LockedHeap = LockedHeap ( spin:: Mutex :: new ( linked_list_allocator:: Heap :: empty ( ) ) ) ;
432
- #[ cfg( feature = "panic-unwind" ) ]
433
- #[ inline( always) ]
434
- fn init_global_allocator ( ) {
435
- use core:: mem:: MaybeUninit ;
436
- const HEAP_SIZE : usize = 1024 ;
437
- static mut HEAP_MEM : [ MaybeUninit < u8 > ; HEAP_SIZE ] = [ MaybeUninit :: uninit ( ) ; HEAP_SIZE ] ;
438
- unsafe { ALLOCATOR . 0 . lock ( ) . init ( HEAP_MEM . as_mut_ptr ( ) . cast :: < u8 > ( ) , HEAP_SIZE ) }
439
- }
440
- #[ cfg( feature = "panic-unwind" ) ]
441
- struct LockedHeap ( spin:: Mutex < linked_list_allocator:: Heap > ) ;
442
- #[ cfg( feature = "panic-unwind" ) ]
443
- unsafe impl core:: alloc:: GlobalAlloc for LockedHeap {
444
- unsafe fn alloc ( & self , layout : core:: alloc:: Layout ) -> * mut u8 {
445
- self . 0
446
- . lock ( )
447
- . allocate_first_fit ( layout)
448
- . ok ( )
449
- . map_or ( core:: ptr:: null_mut ( ) , |allocation| allocation. as_ptr ( ) )
427
+ mod allocator {
428
+ use core:: { cell:: UnsafeCell , mem:: MaybeUninit } ;
429
+ // linked_list_allocator's LockedHeap uses spinning_top, but it doesn't compatible
430
+ // with targets without atomic CAS. Implement our own LockedHeap by using spin,
431
+ // which supports portable-atomic.
432
+ #[ global_allocator]
433
+ static ALLOCATOR : LockedHeap =
434
+ LockedHeap ( spin:: Mutex :: new ( linked_list_allocator:: Heap :: empty ( ) ) ) ;
435
+ #[ inline( always) ]
436
+ pub unsafe fn init_global_allocator ( ) {
437
+ const HEAP_SIZE : usize = 1024 ;
438
+ static HEAP_MEM : SyncUnsafeCell < [ MaybeUninit < u8 > ; HEAP_SIZE ] > =
439
+ SyncUnsafeCell :: new ( [ MaybeUninit :: uninit ( ) ; HEAP_SIZE ] ) ;
440
+ unsafe { ALLOCATOR . 0 . lock ( ) . init ( HEAP_MEM . get ( ) . cast :: < u8 > ( ) , HEAP_SIZE ) }
441
+ }
442
+ struct LockedHeap ( spin:: Mutex < linked_list_allocator:: Heap > ) ;
443
+ unsafe impl core:: alloc:: GlobalAlloc for LockedHeap {
444
+ unsafe fn alloc ( & self , layout : core:: alloc:: Layout ) -> * mut u8 {
445
+ self . 0
446
+ . lock ( )
447
+ . allocate_first_fit ( layout)
448
+ . ok ( )
449
+ . map_or ( core:: ptr:: null_mut ( ) , |allocation| allocation. as_ptr ( ) )
450
+ }
451
+ unsafe fn dealloc ( & self , ptr : * mut u8 , layout : core:: alloc:: Layout ) {
452
+ unsafe { self . 0 . lock ( ) . deallocate ( core:: ptr:: NonNull :: new_unchecked ( ptr) , layout) }
453
+ }
454
+ }
455
+ // See https://github.com/rust-lang/rust/issues/53639
456
+ #[ repr( transparent) ]
457
+ struct SyncUnsafeCell < T : ?Sized > {
458
+ value : UnsafeCell < T > ,
450
459
}
451
- unsafe fn dealloc ( & self , ptr : * mut u8 , layout : core:: alloc:: Layout ) {
452
- unsafe { self . 0 . lock ( ) . deallocate ( core:: ptr:: NonNull :: new_unchecked ( ptr) , layout) }
460
+ unsafe impl < T : ?Sized > Sync for SyncUnsafeCell < T > { }
461
+ impl < T > SyncUnsafeCell < T > {
462
+ #[ inline]
463
+ const fn new ( value : T ) -> Self {
464
+ Self { value : UnsafeCell :: new ( value) }
465
+ }
466
+ #[ inline]
467
+ const fn get ( & self ) -> * mut T {
468
+ self . value . get ( )
469
+ }
453
470
}
454
471
}
0 commit comments