Skip to content

Commit f9a310b

Browse files
Merge #39
39: Simplify optimized allocation r=ltratt a=jacob-hughes We can now use the `std::gc::needs_tracing` and `std::gc::can_trace_precisely` intrinsics to make allocation decisions. These intrinsics are const expressions, so have no branching cost. This lets us remove the complicated specialization stuff. Co-authored-by: Jake Hughes <[email protected]>
2 parents 8e71db2 + afd8e03 commit f9a310b

File tree

3 files changed

+61
-264
lines changed

3 files changed

+61
-264
lines changed

src/allocator.rs

Lines changed: 61 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -11,11 +11,6 @@ use core::{
1111
pub struct GcAllocator;
1212

1313
use crate::boehm;
14-
#[cfg(feature = "rustgc")]
15-
use crate::specializer;
16-
17-
#[cfg(feature = "rustgc")]
18-
pub(crate) static ALLOCATOR: GcAllocator = GcAllocator;
1914

2015
unsafe impl GlobalAlloc for GcAllocator {
2116
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
@@ -33,39 +28,89 @@ unsafe impl GlobalAlloc for GcAllocator {
3328
boehm::GC_realloc(ptr, new_size) as *mut u8
3429
}
3530

36-
#[cfg(feature = "rustgc_internal")]
31+
#[cfg(feature = "rustgc")]
3732
unsafe fn alloc_precise(&self, layout: Layout, bitmap: usize, bitmap_size: usize) -> *mut u8 {
3833
let gc_descr = boehm::GC_make_descriptor(&bitmap, bitmap_size);
3934
boehm::GC_malloc_explicitly_typed(layout.size(), gc_descr) as *mut u8
4035
}
4136

42-
#[cfg(feature = "rustgc_internal")]
37+
#[cfg(feature = "rustgc")]
4338
fn alloc_conservative(&self, layout: Layout) -> *mut u8 {
4439
unsafe { boehm::GC_malloc(layout.size()) as *mut u8 }
4540
}
4641

47-
#[cfg(feature = "rustgc_internal")]
48-
unsafe fn alloc_atomic(&self, layout: Layout) -> *mut u8 {
42+
#[cfg(feature = "rustgc")]
43+
unsafe fn alloc_untraceable(&self, layout: Layout) -> *mut u8 {
4944
boehm::GC_malloc_atomic(layout.size()) as *mut u8
5045
}
5146
}
5247

5348
unsafe impl Allocator for GcAllocator {
5449
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
55-
let ptr = unsafe { boehm::GC_malloc(layout.size()) } as *mut u8;
56-
assert!(!ptr.is_null());
57-
let ptr = unsafe { NonNull::new_unchecked(ptr) };
58-
Ok(NonNull::slice_from_raw_parts(ptr, layout.size()))
50+
unsafe {
51+
let ptr = boehm::GC_malloc(layout.size()) as *mut u8;
52+
let ptr = NonNull::new_unchecked(ptr);
53+
Ok(NonNull::slice_from_raw_parts(ptr, layout.size()))
54+
}
5955
}
6056

6157
unsafe fn deallocate(&self, _: NonNull<u8>, _: Layout) {}
58+
59+
#[cfg(feature = "rustgc")]
60+
fn alloc_untraceable(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
61+
unsafe {
62+
let ptr = boehm::GC_malloc_atomic(layout.size()) as *mut u8;
63+
let ptr = NonNull::new_unchecked(ptr);
64+
Ok(NonNull::slice_from_raw_parts(ptr, layout.size()))
65+
}
66+
}
67+
68+
#[cfg(feature = "rustgc")]
69+
fn alloc_conservative(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
70+
unsafe {
71+
let ptr = boehm::GC_malloc(layout.size()) as *mut u8;
72+
let ptr = NonNull::new_unchecked(ptr);
73+
Ok(NonNull::slice_from_raw_parts(ptr, layout.size()))
74+
}
75+
}
76+
77+
#[cfg(feature = "rustgc")]
78+
fn alloc_precise(
79+
&self,
80+
layout: Layout,
81+
bitmap: usize,
82+
bitmap_size: usize,
83+
) -> Result<NonNull<[u8]>, AllocError> {
84+
unsafe {
85+
let gc_descr = boehm::GC_make_descriptor(&bitmap as *const usize, bitmap_size);
86+
let ptr = boehm::GC_malloc_explicitly_typed(layout.size(), gc_descr);
87+
let ptr = NonNull::new_unchecked(ptr);
88+
Ok(NonNull::slice_from_raw_parts(ptr, layout.size()))
89+
}
90+
}
6291
}
6392

6493
impl GcAllocator {
65-
#[cfg(feature = "rustgc_internal")]
94+
/// Allocate `T` such that it is optimized for marking.
95+
#[cfg(feature = "rustgc")]
6696
pub fn maybe_optimised_alloc<T>(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
67-
let sp = specializer::AllocationSpecializer::new();
68-
sp.maybe_optimised_alloc::<T>(layout)
97+
assert_eq!(Layout::new::<T>(), layout);
98+
99+
if !::std::gc::needs_tracing::<T>() {
100+
return Allocator::alloc_untraceable(self, layout);
101+
}
102+
103+
if ::std::gc::can_trace_precisely::<T>() {
104+
let trace = unsafe { ::std::gc::gc_layout::<T>() };
105+
return Allocator::alloc_precise(
106+
self,
107+
layout,
108+
trace.bitmap as usize,
109+
trace.size as usize,
110+
);
111+
}
112+
113+
Allocator::alloc_conservative(self, layout)
69114
}
70115

71116
pub fn force_gc() {

src/lib.rs

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,6 @@
44
#![feature(alloc_layout_extra)]
55
#![feature(arbitrary_self_types)]
66
#![feature(dispatch_from_dyn)]
7-
#![feature(specialization)]
87
#![feature(nonnull_slice_from_raw_parts)]
98
#![feature(raw_vec_internals)]
109
#![feature(const_fn)]
@@ -22,8 +21,6 @@ pub mod stats;
2221

2322
mod allocator;
2423
mod boehm;
25-
#[cfg(feature = "rustgc")]
26-
mod specializer;
2724

2825
pub use allocator::GcAllocator;
2926
pub use gc::Gc;

src/specializer.rs

Lines changed: 0 additions & 245 deletions
This file was deleted.

0 commit comments

Comments
 (0)