|
5 | 5 | // license that can be found in the LICENSE file or at
|
6 | 6 | // https://opensource.org/licenses/MIT.
|
7 | 7 |
|
8 |
| -use alloc::alloc::{GlobalAlloc, Layout}; |
9 |
| -use core::cell::Cell; |
| 8 | +use core::cell::UnsafeCell; |
| 9 | +use core::sync::atomic::{AtomicUsize, Ordering}; |
10 | 10 |
|
11 |
| -static mut HEAP: [u8; 4 * 1024 * 1024] = [0_u8; 4 * 1024 * 1024]; |
| 11 | +/// The allocator works in terms of an owned region |
| 12 | +/// of memory. We call this a Heap. |
| 13 | +pub(crate) trait Heap { |
| 14 | + fn as_mut_ptr(&mut self) -> *mut u8; |
| 15 | + fn len(&self) -> usize; |
| 16 | +} |
12 | 17 |
|
13 |
| -pub(crate) struct BumpAlloc<'a> { |
14 |
| - heap: Cell<&'a mut [u8]>, |
| 18 | +/// A SliceHeap is a heap created by destructuring |
| 19 | +/// the elements of a mutable slice. |
| 20 | +pub(crate) struct SliceHeap { |
| 21 | + heap: *mut u8, |
| 22 | + len: usize, |
15 | 23 | }
|
16 | 24 |
|
17 |
| -impl<'a> BumpAlloc<'a> { |
18 |
| - pub fn new(arena: &'a mut [u8]) -> BumpAlloc<'a> { |
19 |
| - BumpAlloc { heap: Cell::new(arena) } |
| 25 | +impl SliceHeap { |
| 26 | + pub fn new(arena: &mut [u8]) -> SliceHeap { |
| 27 | + SliceHeap { heap: arena.as_mut_ptr(), len: arena.len() } |
| 28 | + } |
| 29 | +} |
| 30 | +impl Heap for SliceHeap { |
| 31 | + fn as_mut_ptr(&mut self) -> *mut u8 { |
| 32 | + self.heap |
| 33 | + } |
| 34 | + fn len(&self) -> usize { |
| 35 | + self.len |
20 | 36 | }
|
21 | 37 | }
|
22 | 38 |
|
23 |
| -unsafe impl GlobalAlloc for BumpAlloc<'_> { |
24 |
| - unsafe fn alloc(&self, layout: Layout) -> *mut u8 { |
25 |
| - let heap = self.heap.take(); |
26 |
| - let ptr = heap.as_mut_ptr(); |
27 |
| - let offset = ptr.align_offset(layout.align()); |
28 |
| - if offset > heap.len() || offset + layout.size() > heap.len() { |
29 |
| - return core::ptr::null_mut(); |
30 |
| - } |
31 |
| - let ptr = ptr.wrapping_add(offset); |
32 |
| - let heap = &mut heap[offset + layout.size()..]; |
33 |
| - self.heap.replace(heap); |
34 |
| - ptr |
| 39 | +/// A Bump Allocator takes ownership of an object of |
| 40 | +/// some type that implements Heap, and maintains a |
| 41 | +/// cursor into that object. The cursor denotes the |
| 42 | +/// point between allocated and unallocated memory in |
| 43 | +/// the underlying Heap. |
| 44 | +pub(crate) struct BumpAlloc<T: Heap> { |
| 45 | + arena: UnsafeCell<T>, |
| 46 | + cursor: AtomicUsize, |
| 47 | +} |
| 48 | + |
| 49 | +impl<T: Heap> BumpAlloc<T> { |
| 50 | + pub(crate) const fn new(arena: T) -> BumpAlloc<T> { |
| 51 | + BumpAlloc { arena: UnsafeCell::new(arena), cursor: AtomicUsize::new(0) } |
| 52 | + } |
| 53 | + |
| 54 | + /// Allocates the given number of bytes with the given |
| 55 | + /// alignment. Returns `None` if the allocation cannot |
| 56 | + /// be satisfied, otherwise returns `Some` of a mutable |
| 57 | + /// slice referring to the allocated memory. |
| 58 | + pub(crate) fn alloc_bytes(&self, align: usize, size: usize) -> Option<&mut [u8]> { |
| 59 | + let heap = unsafe { &mut *self.arena.get() }; |
| 60 | + let base = heap.as_mut_ptr(); |
| 61 | + let mut offset = 0; |
| 62 | + self.cursor |
| 63 | + .fetch_update(Ordering::Relaxed, Ordering::Relaxed, |current| { |
| 64 | + let ptr = base.wrapping_add(current); |
| 65 | + let adjust = ptr.align_offset(align); |
| 66 | + offset = current.checked_add(adjust).expect("alignment overflow"); |
| 67 | + let next = offset.checked_add(size).expect("size overflow"); |
| 68 | + (next <= heap.len()).then_some(next) |
| 69 | + }) |
| 70 | + .ok()?; |
| 71 | + let ptr = base.wrapping_add(offset); |
| 72 | + Some(unsafe { core::slice::from_raw_parts_mut(ptr, size) }) |
35 | 73 | }
|
36 |
| - unsafe fn dealloc(&self, _ptr: *mut u8, _layout: Layout) {} |
37 | 74 | }
|
38 | 75 |
|
39 |
| -#[global_allocator] |
40 |
| -static mut BUMP_ALLOCATOR: BumpAlloc<'static> = BumpAlloc { heap: Cell::new(unsafe { &mut HEAP }) }; |
| 76 | +mod global { |
| 77 | + use super::{BumpAlloc, Heap}; |
| 78 | + use alloc::alloc::{GlobalAlloc, Layout}; |
| 79 | + use core::ptr; |
| 80 | + |
| 81 | + const GLOBAL_HEAP_SIZE: usize = 4 * 1024 * 1024; |
| 82 | + |
| 83 | + /// A GlobalHeap is an aligned wrapper around an |
| 84 | + /// owned buffer that implements the Heap trait. |
| 85 | + #[repr(C, align(4096))] |
| 86 | + struct GlobalHeap([u8; GLOBAL_HEAP_SIZE]); |
| 87 | + |
| 88 | + impl Heap for GlobalHeap { |
| 89 | + fn as_mut_ptr(&mut self) -> *mut u8 { |
| 90 | + self.0.as_mut_ptr() |
| 91 | + } |
| 92 | + fn len(&self) -> usize { |
| 93 | + GLOBAL_HEAP_SIZE |
| 94 | + } |
| 95 | + } |
| 96 | + |
| 97 | + unsafe impl<T: Heap> GlobalAlloc for BumpAlloc<T> { |
| 98 | + unsafe fn alloc(&self, layout: Layout) -> *mut u8 { |
| 99 | + self.alloc_bytes(layout.align(), layout.size()) |
| 100 | + .map_or(ptr::null_mut(), |p| p.as_mut_ptr()) |
| 101 | + } |
| 102 | + unsafe fn dealloc(&self, _ptr: *mut u8, _layout: Layout) {} |
| 103 | + } |
| 104 | + |
| 105 | + #[global_allocator] |
| 106 | + static mut BUMP_ALLOCATOR: BumpAlloc<GlobalHeap> = |
| 107 | + BumpAlloc::new(GlobalHeap([0u8; GLOBAL_HEAP_SIZE])); |
| 108 | +} |
0 commit comments