Skip to content

Commit f6ab74b

Browse files
glandiumSimonSapin
authored andcommitted
Remove alloc::Opaque and use *mut u8 as pointer type for GlobalAlloc
1 parent 0b7c9e7 commit f6ab74b

File tree

17 files changed

+130
-175
lines changed

17 files changed

+130
-175
lines changed

Diff for: src/doc/unstable-book/src/language-features/global-allocator.md

+3-3
Original file line numberDiff line numberDiff line change
@@ -29,17 +29,17 @@ looks like:
2929
```rust
3030
#![feature(global_allocator, allocator_api, heap_api)]
3131

32-
use std::alloc::{GlobalAlloc, System, Layout, Opaque};
32+
use std::alloc::{GlobalAlloc, System, Layout};
3333
use std::ptr::NonNull;
3434

3535
struct MyAllocator;
3636

3737
unsafe impl GlobalAlloc for MyAllocator {
38-
unsafe fn alloc(&self, layout: Layout) -> *mut Opaque {
38+
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
3939
System.alloc(layout)
4040
}
4141

42-
unsafe fn dealloc(&self, ptr: *mut Opaque, layout: Layout) {
42+
unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
4343
System.dealloc(ptr, layout)
4444
}
4545
}

Diff for: src/liballoc/alloc.rs

+15-18
Original file line numberDiff line numberDiff line change
@@ -51,52 +51,49 @@ pub const Heap: Global = Global;
5151

5252
unsafe impl GlobalAlloc for Global {
5353
#[inline]
54-
unsafe fn alloc(&self, layout: Layout) -> *mut Opaque {
55-
let ptr = __rust_alloc(layout.size(), layout.align());
56-
ptr as *mut Opaque
54+
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
55+
__rust_alloc(layout.size(), layout.align())
5756
}
5857

5958
#[inline]
60-
unsafe fn dealloc(&self, ptr: *mut Opaque, layout: Layout) {
61-
__rust_dealloc(ptr as *mut u8, layout.size(), layout.align())
59+
unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
60+
__rust_dealloc(ptr, layout.size(), layout.align())
6261
}
6362

6463
#[inline]
65-
unsafe fn realloc(&self, ptr: *mut Opaque, layout: Layout, new_size: usize) -> *mut Opaque {
66-
let ptr = __rust_realloc(ptr as *mut u8, layout.size(), layout.align(), new_size);
67-
ptr as *mut Opaque
64+
unsafe fn realloc(&self, ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 {
65+
__rust_realloc(ptr, layout.size(), layout.align(), new_size)
6866
}
6967

7068
#[inline]
71-
unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut Opaque {
72-
let ptr = __rust_alloc_zeroed(layout.size(), layout.align());
73-
ptr as *mut Opaque
69+
unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut u8 {
70+
__rust_alloc_zeroed(layout.size(), layout.align())
7471
}
7572
}
7673

7774
unsafe impl Alloc for Global {
7875
#[inline]
79-
unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<Opaque>, AllocErr> {
76+
unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<u8>, AllocErr> {
8077
NonNull::new(GlobalAlloc::alloc(self, layout)).ok_or(AllocErr)
8178
}
8279

8380
#[inline]
84-
unsafe fn dealloc(&mut self, ptr: NonNull<Opaque>, layout: Layout) {
81+
unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: Layout) {
8582
GlobalAlloc::dealloc(self, ptr.as_ptr(), layout)
8683
}
8784

8885
#[inline]
8986
unsafe fn realloc(&mut self,
90-
ptr: NonNull<Opaque>,
87+
ptr: NonNull<u8>,
9188
layout: Layout,
9289
new_size: usize)
93-
-> Result<NonNull<Opaque>, AllocErr>
90+
-> Result<NonNull<u8>, AllocErr>
9491
{
9592
NonNull::new(GlobalAlloc::realloc(self, ptr.as_ptr(), layout, new_size)).ok_or(AllocErr)
9693
}
9794

9895
#[inline]
99-
unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<NonNull<Opaque>, AllocErr> {
96+
unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<NonNull<u8>, AllocErr> {
10097
NonNull::new(GlobalAlloc::alloc_zeroed(self, layout)).ok_or(AllocErr)
10198
}
10299
}
@@ -113,7 +110,7 @@ unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
113110
let layout = Layout::from_size_align_unchecked(size, align);
114111
let ptr = Global.alloc(layout);
115112
if !ptr.is_null() {
116-
ptr as *mut u8
113+
ptr
117114
} else {
118115
oom(layout)
119116
}
@@ -129,7 +126,7 @@ pub(crate) unsafe fn box_free<T: ?Sized>(ptr: Unique<T>) {
129126
// We do not allocate for Box<T> when T is ZST, so deallocation is also not necessary.
130127
if size != 0 {
131128
let layout = Layout::from_size_align_unchecked(size, align);
132-
Global.dealloc(ptr as *mut Opaque, layout);
129+
Global.dealloc(ptr as *mut u8, layout);
133130
}
134131
}
135132

Diff for: src/liballoc/arc.rs

+3-3
Original file line numberDiff line numberDiff line change
@@ -519,7 +519,7 @@ impl<T: ?Sized> Arc<T> {
519519

520520
if self.inner().weak.fetch_sub(1, Release) == 1 {
521521
atomic::fence(Acquire);
522-
Global.dealloc(self.ptr.as_opaque(), Layout::for_value(self.ptr.as_ref()))
522+
Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref()))
523523
}
524524
}
525525

@@ -639,7 +639,7 @@ impl<T: Clone> ArcFromSlice<T> for Arc<[T]> {
639639
let slice = from_raw_parts_mut(self.elems, self.n_elems);
640640
ptr::drop_in_place(slice);
641641

642-
Global.dealloc(self.mem.as_opaque(), self.layout.clone());
642+
Global.dealloc(self.mem.cast(), self.layout.clone());
643643
}
644644
}
645645
}
@@ -1196,7 +1196,7 @@ impl<T: ?Sized> Drop for Weak<T> {
11961196
if self.inner().weak.fetch_sub(1, Release) == 1 {
11971197
atomic::fence(Acquire);
11981198
unsafe {
1199-
Global.dealloc(self.ptr.as_opaque(), Layout::for_value(self.ptr.as_ref()))
1199+
Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref()))
12001200
}
12011201
}
12021202
}

Diff for: src/liballoc/btree/node.rs

+5-5
Original file line numberDiff line numberDiff line change
@@ -287,7 +287,7 @@ impl<K, V> Root<K, V> {
287287
self.as_mut().as_leaf_mut().parent = ptr::null();
288288

289289
unsafe {
290-
Global.dealloc(NonNull::from(top).as_opaque(), Layout::new::<InternalNode<K, V>>());
290+
Global.dealloc(NonNull::from(top).cast(), Layout::new::<InternalNode<K, V>>());
291291
}
292292
}
293293
}
@@ -478,7 +478,7 @@ impl<K, V> NodeRef<marker::Owned, K, V, marker::Leaf> {
478478
debug_assert!(!self.is_shared_root());
479479
let node = self.node;
480480
let ret = self.ascend().ok();
481-
Global.dealloc(node.as_opaque(), Layout::new::<LeafNode<K, V>>());
481+
Global.dealloc(node.cast(), Layout::new::<LeafNode<K, V>>());
482482
ret
483483
}
484484
}
@@ -499,7 +499,7 @@ impl<K, V> NodeRef<marker::Owned, K, V, marker::Internal> {
499499
> {
500500
let node = self.node;
501501
let ret = self.ascend().ok();
502-
Global.dealloc(node.as_opaque(), Layout::new::<InternalNode<K, V>>());
502+
Global.dealloc(node.cast(), Layout::new::<InternalNode<K, V>>());
503503
ret
504504
}
505505
}
@@ -1321,12 +1321,12 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::
13211321
}
13221322

13231323
Global.dealloc(
1324-
right_node.node.as_opaque(),
1324+
right_node.node.cast(),
13251325
Layout::new::<InternalNode<K, V>>(),
13261326
);
13271327
} else {
13281328
Global.dealloc(
1329-
right_node.node.as_opaque(),
1329+
right_node.node.cast(),
13301330
Layout::new::<LeafNode<K, V>>(),
13311331
);
13321332
}

Diff for: src/liballoc/heap.rs

+6-6
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@
1010

1111
#![allow(deprecated)]
1212

13-
pub use alloc::{Layout, AllocErr, CannotReallocInPlace, Opaque};
13+
pub use alloc::{Layout, AllocErr, CannotReallocInPlace};
1414
use core::alloc::Alloc as CoreAlloc;
1515
use core::ptr::NonNull;
1616

@@ -54,7 +54,7 @@ unsafe impl<T> Alloc for T where T: CoreAlloc {
5454
}
5555

5656
unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
57-
let ptr = NonNull::new_unchecked(ptr as *mut Opaque);
57+
let ptr = NonNull::new_unchecked(ptr);
5858
CoreAlloc::dealloc(self, ptr, layout)
5959
}
6060

@@ -70,7 +70,7 @@ unsafe impl<T> Alloc for T where T: CoreAlloc {
7070
ptr: *mut u8,
7171
layout: Layout,
7272
new_layout: Layout) -> Result<*mut u8, AllocErr> {
73-
let ptr = NonNull::new_unchecked(ptr as *mut Opaque);
73+
let ptr = NonNull::new_unchecked(ptr);
7474
CoreAlloc::realloc(self, ptr, layout, new_layout.size()).map(|ptr| ptr.cast().as_ptr())
7575
}
7676

@@ -87,7 +87,7 @@ unsafe impl<T> Alloc for T where T: CoreAlloc {
8787
ptr: *mut u8,
8888
layout: Layout,
8989
new_layout: Layout) -> Result<Excess, AllocErr> {
90-
let ptr = NonNull::new_unchecked(ptr as *mut Opaque);
90+
let ptr = NonNull::new_unchecked(ptr);
9191
CoreAlloc::realloc_excess(self, ptr, layout, new_layout.size())
9292
.map(|e| Excess(e.0 .cast().as_ptr(), e.1))
9393
}
@@ -96,15 +96,15 @@ unsafe impl<T> Alloc for T where T: CoreAlloc {
9696
ptr: *mut u8,
9797
layout: Layout,
9898
new_layout: Layout) -> Result<(), CannotReallocInPlace> {
99-
let ptr = NonNull::new_unchecked(ptr as *mut Opaque);
99+
let ptr = NonNull::new_unchecked(ptr);
100100
CoreAlloc::grow_in_place(self, ptr, layout, new_layout.size())
101101
}
102102

103103
unsafe fn shrink_in_place(&mut self,
104104
ptr: *mut u8,
105105
layout: Layout,
106106
new_layout: Layout) -> Result<(), CannotReallocInPlace> {
107-
let ptr = NonNull::new_unchecked(ptr as *mut Opaque);
107+
let ptr = NonNull::new_unchecked(ptr);
108108
CoreAlloc::shrink_in_place(self, ptr, layout, new_layout.size())
109109
}
110110
}

Diff for: src/liballoc/raw_vec.rs

+9-10
Original file line numberDiff line numberDiff line change
@@ -93,7 +93,7 @@ impl<T, A: Alloc> RawVec<T, A> {
9393

9494
// handles ZSTs and `cap = 0` alike
9595
let ptr = if alloc_size == 0 {
96-
NonNull::<T>::dangling().as_opaque()
96+
NonNull::<T>::dangling().cast()
9797
} else {
9898
let align = mem::align_of::<T>();
9999
let layout = Layout::from_size_align(alloc_size, align).unwrap();
@@ -314,7 +314,7 @@ impl<T, A: Alloc> RawVec<T, A> {
314314
let new_cap = 2 * self.cap;
315315
let new_size = new_cap * elem_size;
316316
alloc_guard(new_size).unwrap_or_else(|_| capacity_overflow());
317-
let ptr_res = self.a.realloc(NonNull::from(self.ptr).as_opaque(),
317+
let ptr_res = self.a.realloc(NonNull::from(self.ptr).cast(),
318318
cur,
319319
new_size);
320320
match ptr_res {
@@ -373,7 +373,7 @@ impl<T, A: Alloc> RawVec<T, A> {
373373
let new_cap = 2 * self.cap;
374374
let new_size = new_cap * elem_size;
375375
alloc_guard(new_size).unwrap_or_else(|_| capacity_overflow());
376-
match self.a.grow_in_place(NonNull::from(self.ptr).as_opaque(), old_layout, new_size) {
376+
match self.a.grow_in_place(NonNull::from(self.ptr).cast(), old_layout, new_size) {
377377
Ok(_) => {
378378
// We can't directly divide `size`.
379379
self.cap = new_cap;
@@ -546,7 +546,7 @@ impl<T, A: Alloc> RawVec<T, A> {
546546
// FIXME: may crash and burn on over-reserve
547547
alloc_guard(new_layout.size()).unwrap_or_else(|_| capacity_overflow());
548548
match self.a.grow_in_place(
549-
NonNull::from(self.ptr).as_opaque(), old_layout, new_layout.size(),
549+
NonNull::from(self.ptr).cast(), old_layout, new_layout.size(),
550550
) {
551551
Ok(_) => {
552552
self.cap = new_cap;
@@ -607,7 +607,7 @@ impl<T, A: Alloc> RawVec<T, A> {
607607
let new_size = elem_size * amount;
608608
let align = mem::align_of::<T>();
609609
let old_layout = Layout::from_size_align_unchecked(old_size, align);
610-
match self.a.realloc(NonNull::from(self.ptr).as_opaque(),
610+
match self.a.realloc(NonNull::from(self.ptr).cast(),
611611
old_layout,
612612
new_size) {
613613
Ok(p) => self.ptr = p.cast().into(),
@@ -667,7 +667,7 @@ impl<T, A: Alloc> RawVec<T, A> {
667667
let res = match self.current_layout() {
668668
Some(layout) => {
669669
debug_assert!(new_layout.align() == layout.align());
670-
self.a.realloc(NonNull::from(self.ptr).as_opaque(), layout, new_layout.size())
670+
self.a.realloc(NonNull::from(self.ptr).cast(), layout, new_layout.size())
671671
}
672672
None => self.a.alloc(new_layout),
673673
};
@@ -710,7 +710,7 @@ impl<T, A: Alloc> RawVec<T, A> {
710710
let elem_size = mem::size_of::<T>();
711711
if elem_size != 0 {
712712
if let Some(layout) = self.current_layout() {
713-
self.a.dealloc(NonNull::from(self.ptr).as_opaque(), layout);
713+
self.a.dealloc(NonNull::from(self.ptr).cast(), layout);
714714
}
715715
}
716716
}
@@ -753,7 +753,6 @@ fn capacity_overflow() -> ! {
753753
#[cfg(test)]
754754
mod tests {
755755
use super::*;
756-
use alloc::Opaque;
757756

758757
#[test]
759758
fn allocator_param() {
@@ -773,7 +772,7 @@ mod tests {
773772
// before allocation attempts start failing.
774773
struct BoundedAlloc { fuel: usize }
775774
unsafe impl Alloc for BoundedAlloc {
776-
unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<Opaque>, AllocErr> {
775+
unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<u8>, AllocErr> {
777776
let size = layout.size();
778777
if size > self.fuel {
779778
return Err(AllocErr);
@@ -783,7 +782,7 @@ mod tests {
783782
err @ Err(_) => err,
784783
}
785784
}
786-
unsafe fn dealloc(&mut self, ptr: NonNull<Opaque>, layout: Layout) {
785+
unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: Layout) {
787786
Global.dealloc(ptr, layout)
788787
}
789788
}

Diff for: src/liballoc/rc.rs

+5-5
Original file line numberDiff line numberDiff line change
@@ -259,7 +259,7 @@ use core::ops::CoerceUnsized;
259259
use core::ptr::{self, NonNull};
260260
use core::convert::From;
261261

262-
use alloc::{Global, Alloc, Layout, Opaque, box_free, oom};
262+
use alloc::{Global, Alloc, Layout, box_free, oom};
263263
use string::String;
264264
use vec::Vec;
265265

@@ -732,7 +732,7 @@ impl<T: Clone> RcFromSlice<T> for Rc<[T]> {
732732
// In the event of a panic, elements that have been written
733733
// into the new RcBox will be dropped, then the memory freed.
734734
struct Guard<T> {
735-
mem: NonNull<Opaque>,
735+
mem: NonNull<u8>,
736736
elems: *mut T,
737737
layout: Layout,
738738
n_elems: usize,
@@ -755,7 +755,7 @@ impl<T: Clone> RcFromSlice<T> for Rc<[T]> {
755755
let v_ptr = v as *const [T];
756756
let ptr = Self::allocate_for_ptr(v_ptr);
757757

758-
let mem = ptr as *mut _ as *mut Opaque;
758+
let mem = ptr as *mut _ as *mut u8;
759759
let layout = Layout::for_value(&*ptr);
760760

761761
// Pointer to first element
@@ -839,7 +839,7 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc<T> {
839839
self.dec_weak();
840840

841841
if self.weak() == 0 {
842-
Global.dealloc(self.ptr.as_opaque(), Layout::for_value(self.ptr.as_ref()));
842+
Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref()));
843843
}
844844
}
845845
}
@@ -1263,7 +1263,7 @@ impl<T: ?Sized> Drop for Weak<T> {
12631263
// the weak count starts at 1, and will only go to zero if all
12641264
// the strong pointers have disappeared.
12651265
if self.weak() == 0 {
1266-
Global.dealloc(self.ptr.as_opaque(), Layout::for_value(self.ptr.as_ref()));
1266+
Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref()));
12671267
}
12681268
}
12691269
}

0 commit comments

Comments
 (0)