@@ -11,11 +11,6 @@ use core::{
11
11
pub struct GcAllocator ;
12
12
13
13
use crate :: boehm;
14
- #[ cfg( feature = "rustgc" ) ]
15
- use crate :: specializer;
16
-
17
- #[ cfg( feature = "rustgc" ) ]
18
- pub ( crate ) static ALLOCATOR : GcAllocator = GcAllocator ;
19
14
20
15
unsafe impl GlobalAlloc for GcAllocator {
21
16
unsafe fn alloc ( & self , layout : Layout ) -> * mut u8 {
@@ -33,39 +28,89 @@ unsafe impl GlobalAlloc for GcAllocator {
33
28
boehm:: GC_realloc ( ptr, new_size) as * mut u8
34
29
}
35
30
36
- #[ cfg( feature = "rustgc_internal " ) ]
31
+ #[ cfg( feature = "rustgc " ) ]
37
32
unsafe fn alloc_precise ( & self , layout : Layout , bitmap : usize , bitmap_size : usize ) -> * mut u8 {
38
33
let gc_descr = boehm:: GC_make_descriptor ( & bitmap, bitmap_size) ;
39
34
boehm:: GC_malloc_explicitly_typed ( layout. size ( ) , gc_descr) as * mut u8
40
35
}
41
36
42
- #[ cfg( feature = "rustgc_internal " ) ]
37
+ #[ cfg( feature = "rustgc " ) ]
43
38
fn alloc_conservative ( & self , layout : Layout ) -> * mut u8 {
44
39
unsafe { boehm:: GC_malloc ( layout. size ( ) ) as * mut u8 }
45
40
}
46
41
47
- #[ cfg( feature = "rustgc_internal " ) ]
48
- unsafe fn alloc_atomic ( & self , layout : Layout ) -> * mut u8 {
42
+ #[ cfg( feature = "rustgc " ) ]
43
+ unsafe fn alloc_untraceable ( & self , layout : Layout ) -> * mut u8 {
49
44
boehm:: GC_malloc_atomic ( layout. size ( ) ) as * mut u8
50
45
}
51
46
}
52
47
53
48
unsafe impl Allocator for GcAllocator {
54
49
fn allocate ( & self , layout : Layout ) -> Result < NonNull < [ u8 ] > , AllocError > {
55
- let ptr = unsafe { boehm:: GC_malloc ( layout. size ( ) ) } as * mut u8 ;
56
- assert ! ( !ptr. is_null( ) ) ;
57
- let ptr = unsafe { NonNull :: new_unchecked ( ptr) } ;
58
- Ok ( NonNull :: slice_from_raw_parts ( ptr, layout. size ( ) ) )
50
+ unsafe {
51
+ let ptr = boehm:: GC_malloc ( layout. size ( ) ) as * mut u8 ;
52
+ let ptr = NonNull :: new_unchecked ( ptr) ;
53
+ Ok ( NonNull :: slice_from_raw_parts ( ptr, layout. size ( ) ) )
54
+ }
59
55
}
60
56
61
57
unsafe fn deallocate ( & self , _: NonNull < u8 > , _: Layout ) { }
58
+
59
+ #[ cfg( feature = "rustgc" ) ]
60
+ fn alloc_untraceable ( & self , layout : Layout ) -> Result < NonNull < [ u8 ] > , AllocError > {
61
+ unsafe {
62
+ let ptr = boehm:: GC_malloc_atomic ( layout. size ( ) ) as * mut u8 ;
63
+ let ptr = NonNull :: new_unchecked ( ptr) ;
64
+ Ok ( NonNull :: slice_from_raw_parts ( ptr, layout. size ( ) ) )
65
+ }
66
+ }
67
+
68
+ #[ cfg( feature = "rustgc" ) ]
69
+ fn alloc_conservative ( & self , layout : Layout ) -> Result < NonNull < [ u8 ] > , AllocError > {
70
+ unsafe {
71
+ let ptr = boehm:: GC_malloc ( layout. size ( ) ) as * mut u8 ;
72
+ let ptr = NonNull :: new_unchecked ( ptr) ;
73
+ Ok ( NonNull :: slice_from_raw_parts ( ptr, layout. size ( ) ) )
74
+ }
75
+ }
76
+
77
+ #[ cfg( feature = "rustgc" ) ]
78
+ fn alloc_precise (
79
+ & self ,
80
+ layout : Layout ,
81
+ bitmap : usize ,
82
+ bitmap_size : usize ,
83
+ ) -> Result < NonNull < [ u8 ] > , AllocError > {
84
+ unsafe {
85
+ let gc_descr = boehm:: GC_make_descriptor ( & bitmap as * const usize , bitmap_size) ;
86
+ let ptr = boehm:: GC_malloc_explicitly_typed ( layout. size ( ) , gc_descr) ;
87
+ let ptr = NonNull :: new_unchecked ( ptr) ;
88
+ Ok ( NonNull :: slice_from_raw_parts ( ptr, layout. size ( ) ) )
89
+ }
90
+ }
62
91
}
63
92
64
93
impl GcAllocator {
65
- #[ cfg( feature = "rustgc_internal" ) ]
94
+ /// Allocate `T` such that it is optimized for marking.
95
+ #[ cfg( feature = "rustgc" ) ]
66
96
pub fn maybe_optimised_alloc < T > ( & self , layout : Layout ) -> Result < NonNull < [ u8 ] > , AllocError > {
67
- let sp = specializer:: AllocationSpecializer :: new ( ) ;
68
- sp. maybe_optimised_alloc :: < T > ( layout)
97
+ assert_eq ! ( Layout :: new:: <T >( ) , layout) ;
98
+
99
+ if !:: std:: gc:: needs_tracing :: < T > ( ) {
100
+ return Allocator :: alloc_untraceable ( self , layout) ;
101
+ }
102
+
103
+ if :: std:: gc:: can_trace_precisely :: < T > ( ) {
104
+ let trace = unsafe { :: std:: gc:: gc_layout :: < T > ( ) } ;
105
+ return Allocator :: alloc_precise (
106
+ self ,
107
+ layout,
108
+ trace. bitmap as usize ,
109
+ trace. size as usize ,
110
+ ) ;
111
+ }
112
+
113
+ Allocator :: alloc_conservative ( self , layout)
69
114
}
70
115
71
116
pub fn force_gc ( ) {
0 commit comments