Skip to content

Commit 25fb12b

Browse files
committed
auto merge of rust-lang#19765 : luqmana/rust/nonzero-lang-item, r=nikomatsakis
This extends the nullable enum opt to traverse beyond just the first level to find possible fields to use as the discriminant. So now, it'll work through structs, tuples, and fixed sized arrays. This also introduces a new lang item, NonZero, that you can use to wrap raw pointers or integral types to indicate to rustc that the underlying value is known to never be 0/NULL. We then use this in Vec, Rc and Arc to have them also benefit from the nullable enum opt. As per rust-lang/rfcs#499 NonZero is not exposed via the `libstd` facade. ``` x86_64 Linux: T Option<T> (Before) Option<T> (After) ---------------------------------------------------------------------------------- Vec<int> 24 32 24 String 24 32 24 Rc<int> 8 16 8 Arc<int> 8 16 8 [Box<int>, ..2] 16 24 16 (String, uint) 32 40 32 ``` Fixes rust-lang#19419. Fixes rust-lang#13194. Fixes rust-lang#9378. Fixes rust-lang#7576.
2 parents 03a1188 + 766a719 commit 25fb12b

File tree

13 files changed

+370
-155
lines changed

13 files changed

+370
-155
lines changed

src/liballoc/arc.rs

+15-12
Original file line numberDiff line numberDiff line change
@@ -76,11 +76,11 @@ use core::default::Default;
7676
use core::kinds::{Sync, Send};
7777
use core::mem::{min_align_of, size_of, drop};
7878
use core::mem;
79+
use core::nonzero::NonZero;
7980
use core::ops::{Drop, Deref};
8081
use core::option::Option;
8182
use core::option::Option::{Some, None};
82-
use core::ptr::RawPtr;
83-
use core::ptr;
83+
use core::ptr::{mod, RawPtr};
8484
use heap::deallocate;
8585

8686
/// An atomically reference counted wrapper for shared state.
@@ -114,7 +114,7 @@ use heap::deallocate;
114114
pub struct Arc<T> {
115115
// FIXME #12808: strange name to try to avoid interfering with
116116
// field accesses of the contained type via Deref
117-
_ptr: *mut ArcInner<T>,
117+
_ptr: NonZero<*mut ArcInner<T>>,
118118
}
119119

120120
unsafe impl<T: Sync + Send> Send for Arc<T> { }
@@ -130,7 +130,7 @@ unsafe impl<T: Sync + Send> Sync for Arc<T> { }
130130
pub struct Weak<T> {
131131
// FIXME #12808: strange name to try to avoid interfering with
132132
// field accesses of the contained type via Deref
133-
_ptr: *mut ArcInner<T>,
133+
_ptr: NonZero<*mut ArcInner<T>>,
134134
}
135135

136136
unsafe impl<T: Sync + Send> Send for Weak<T> { }
@@ -165,7 +165,7 @@ impl<T> Arc<T> {
165165
weak: atomic::AtomicUint::new(1),
166166
data: data,
167167
};
168-
Arc { _ptr: unsafe { mem::transmute(x) } }
168+
Arc { _ptr: unsafe { NonZero::new(mem::transmute(x)) } }
169169
}
170170

171171
/// Downgrades the `Arc<T>` to a `Weak<T>` reference.
@@ -194,7 +194,7 @@ impl<T> Arc<T> {
194194
// pointer is valid. Furthermore, we know that the `ArcInner` structure itself is `Sync`
195195
// because the inner data is `Sync` as well, so we're ok loaning out an immutable pointer
196196
// to these contents.
197-
unsafe { &*self._ptr }
197+
unsafe { &**self._ptr }
198198
}
199199
}
200200

@@ -281,7 +281,7 @@ impl<T: Send + Sync + Clone> Arc<T> {
281281
// pointer that will ever be returned to T. Our reference count is guaranteed to be 1 at
282282
// this point, and we required the Arc itself to be `mut`, so we're returning the only
283283
// possible reference to the inner data.
284-
let inner = unsafe { &mut *self._ptr };
284+
let inner = unsafe { &mut **self._ptr };
285285
&mut inner.data
286286
}
287287
}
@@ -316,7 +316,8 @@ impl<T: Sync + Send> Drop for Arc<T> {
316316
fn drop(&mut self) {
317317
// This structure has #[unsafe_no_drop_flag], so this drop glue may run more than once (but
318318
// it is guaranteed to be zeroed after the first if it's run more than once)
319-
if self._ptr.is_null() { return }
319+
let ptr = *self._ptr;
320+
if ptr.is_null() { return }
320321

321322
// Because `fetch_sub` is already atomic, we do not need to synchronize with other threads
322323
// unless we are going to delete the object. This same logic applies to the below
@@ -346,7 +347,7 @@ impl<T: Sync + Send> Drop for Arc<T> {
346347

347348
if self.inner().weak.fetch_sub(1, atomic::Release) == 1 {
348349
atomic::fence(atomic::Acquire);
349-
unsafe { deallocate(self._ptr as *mut u8, size_of::<ArcInner<T>>(),
350+
unsafe { deallocate(ptr as *mut u8, size_of::<ArcInner<T>>(),
350351
min_align_of::<ArcInner<T>>()) }
351352
}
352353
}
@@ -386,7 +387,7 @@ impl<T: Sync + Send> Weak<T> {
386387
#[inline]
387388
fn inner(&self) -> &ArcInner<T> {
388389
// See comments above for why this is "safe"
389-
unsafe { &*self._ptr }
390+
unsafe { &**self._ptr }
390391
}
391392
}
392393

@@ -442,14 +443,16 @@ impl<T: Sync + Send> Drop for Weak<T> {
442443
/// } // implicit drop
443444
/// ```
444445
fn drop(&mut self) {
446+
let ptr = *self._ptr;
447+
445448
// see comments above for why this check is here
446-
if self._ptr.is_null() { return }
449+
if ptr.is_null() { return }
447450

448451
// If we find out that we were the last weak pointer, then its time to deallocate the data
449452
// entirely. See the discussion in Arc::drop() about the memory orderings
450453
if self.inner().weak.fetch_sub(1, atomic::Release) == 1 {
451454
atomic::fence(atomic::Acquire);
452-
unsafe { deallocate(self._ptr as *mut u8, size_of::<ArcInner<T>>(),
455+
unsafe { deallocate(ptr as *mut u8, size_of::<ArcInner<T>>(),
453456
min_align_of::<ArcInner<T>>()) }
454457
}
455458
}

src/liballoc/rc.rs

+17-15
Original file line numberDiff line numberDiff line change
@@ -150,11 +150,11 @@ use core::fmt;
150150
use core::hash::{mod, Hash};
151151
use core::kinds::marker;
152152
use core::mem::{transmute, min_align_of, size_of, forget};
153+
use core::nonzero::NonZero;
153154
use core::ops::{Deref, Drop};
154155
use core::option::Option;
155156
use core::option::Option::{Some, None};
156-
use core::ptr;
157-
use core::ptr::RawPtr;
157+
use core::ptr::{mod, RawPtr};
158158
use core::result::Result;
159159
use core::result::Result::{Ok, Err};
160160

@@ -174,7 +174,7 @@ struct RcBox<T> {
174174
pub struct Rc<T> {
175175
// FIXME #12808: strange names to try to avoid interfering with field accesses of the contained
176176
// type via Deref
177-
_ptr: *mut RcBox<T>,
177+
_ptr: NonZero<*mut RcBox<T>>,
178178
_nosend: marker::NoSend,
179179
_noshare: marker::NoSync
180180
}
@@ -196,11 +196,11 @@ impl<T> Rc<T> {
196196
// there is an implicit weak pointer owned by all the strong pointers, which
197197
// ensures that the weak destructor never frees the allocation while the strong
198198
// destructor is running, even if the weak pointer is stored inside the strong one.
199-
_ptr: transmute(box RcBox {
199+
_ptr: NonZero::new(transmute(box RcBox {
200200
value: value,
201201
strong: Cell::new(1),
202202
weak: Cell::new(1)
203-
}),
203+
})),
204204
_nosend: marker::NoSend,
205205
_noshare: marker::NoSync
206206
}
@@ -281,7 +281,7 @@ pub fn try_unwrap<T>(rc: Rc<T>) -> Result<T, Rc<T>> {
281281
let val = ptr::read(&*rc); // copy the contained object
282282
// destruct the box and skip our Drop
283283
// we can ignore the refcounts because we know we're unique
284-
deallocate(rc._ptr as *mut u8, size_of::<RcBox<T>>(),
284+
deallocate(*rc._ptr as *mut u8, size_of::<RcBox<T>>(),
285285
min_align_of::<RcBox<T>>());
286286
forget(rc);
287287
Ok(val)
@@ -311,7 +311,7 @@ pub fn try_unwrap<T>(rc: Rc<T>) -> Result<T, Rc<T>> {
311311
#[experimental]
312312
pub fn get_mut<'a, T>(rc: &'a mut Rc<T>) -> Option<&'a mut T> {
313313
if is_unique(rc) {
314-
let inner = unsafe { &mut *rc._ptr };
314+
let inner = unsafe { &mut **rc._ptr };
315315
Some(&mut inner.value)
316316
} else {
317317
None
@@ -343,7 +343,7 @@ impl<T: Clone> Rc<T> {
343343
// pointer that will ever be returned to T. Our reference count is guaranteed to be 1 at
344344
// this point, and we required the `Rc<T>` itself to be `mut`, so we're returning the only
345345
// possible reference to the inner value.
346-
let inner = unsafe { &mut *self._ptr };
346+
let inner = unsafe { &mut **self._ptr };
347347
&mut inner.value
348348
}
349349
}
@@ -391,7 +391,8 @@ impl<T> Drop for Rc<T> {
391391
/// ```
392392
fn drop(&mut self) {
393393
unsafe {
394-
if !self._ptr.is_null() {
394+
let ptr = *self._ptr;
395+
if !ptr.is_null() {
395396
self.dec_strong();
396397
if self.strong() == 0 {
397398
ptr::read(&**self); // destroy the contained object
@@ -401,7 +402,7 @@ impl<T> Drop for Rc<T> {
401402
self.dec_weak();
402403

403404
if self.weak() == 0 {
404-
deallocate(self._ptr as *mut u8, size_of::<RcBox<T>>(),
405+
deallocate(ptr as *mut u8, size_of::<RcBox<T>>(),
405406
min_align_of::<RcBox<T>>())
406407
}
407408
}
@@ -618,7 +619,7 @@ impl<T: fmt::Show> fmt::Show for Rc<T> {
618619
pub struct Weak<T> {
619620
// FIXME #12808: strange names to try to avoid interfering with
620621
// field accesses of the contained type via Deref
621-
_ptr: *mut RcBox<T>,
622+
_ptr: NonZero<*mut RcBox<T>>,
622623
_nosend: marker::NoSend,
623624
_noshare: marker::NoSync
624625
}
@@ -682,12 +683,13 @@ impl<T> Drop for Weak<T> {
682683
/// ```
683684
fn drop(&mut self) {
684685
unsafe {
685-
if !self._ptr.is_null() {
686+
let ptr = *self._ptr;
687+
if !ptr.is_null() {
686688
self.dec_weak();
687689
// the weak count starts at 1, and will only go to zero if all the strong pointers
688690
// have disappeared.
689691
if self.weak() == 0 {
690-
deallocate(self._ptr as *mut u8, size_of::<RcBox<T>>(),
692+
deallocate(ptr as *mut u8, size_of::<RcBox<T>>(),
691693
min_align_of::<RcBox<T>>())
692694
}
693695
}
@@ -742,12 +744,12 @@ trait RcBoxPtr<T> {
742744

743745
impl<T> RcBoxPtr<T> for Rc<T> {
744746
#[inline(always)]
745-
fn inner(&self) -> &RcBox<T> { unsafe { &(*self._ptr) } }
747+
fn inner(&self) -> &RcBox<T> { unsafe { &(**self._ptr) } }
746748
}
747749

748750
impl<T> RcBoxPtr<T> for Weak<T> {
749751
#[inline(always)]
750-
fn inner(&self) -> &RcBox<T> { unsafe { &(*self._ptr) } }
752+
fn inner(&self) -> &RcBox<T> { unsafe { &(**self._ptr) } }
751753
}
752754

753755
#[cfg(test)]

0 commit comments

Comments
 (0)